_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
18e5980ef1c89842c82550ae8528efa4312e5a8c5849027124138f4b6245c545 | simmone/racket-simple-qr | rotate-ratio.rkt | #lang racket
(require "../../../../share/func.rkt")
(provide (contract-out
[write-report-rotate-ratio (-> number? path-string? void?)]
))
(define (write-report-rotate-ratio rotate_ratio express_path)
(let* ([scrbl_dir (build-path express_path "rotate-ratio")]
[scrbl_file (build-path scrbl_dir "rotate-ratio.scrbl")])
(make-directory* scrbl_dir)
(with-output-to-file
scrbl_file
(lambda ()
(printf "#lang scribble/base\n\n")
(printf "@title{Rotate Ratio}\n\n")
(printf "from finder pattern center points, calcuate the rotate ratio needed.\n")
(printf "@section{Ratio:~a}\n" rotate_ratio)
))))
| null | https://raw.githubusercontent.com/simmone/racket-simple-qr/904f1491bc521badeafeabd0d7d7e97e3d0ee958/simple-qr/read/lib/express/rotate-ratio/rotate-ratio.rkt | racket | #lang racket
(require "../../../../share/func.rkt")
(provide (contract-out
[write-report-rotate-ratio (-> number? path-string? void?)]
))
(define (write-report-rotate-ratio rotate_ratio express_path)
(let* ([scrbl_dir (build-path express_path "rotate-ratio")]
[scrbl_file (build-path scrbl_dir "rotate-ratio.scrbl")])
(make-directory* scrbl_dir)
(with-output-to-file
scrbl_file
(lambda ()
(printf "#lang scribble/base\n\n")
(printf "@title{Rotate Ratio}\n\n")
(printf "from finder pattern center points, calcuate the rotate ratio needed.\n")
(printf "@section{Ratio:~a}\n" rotate_ratio)
))))
|
|
42acaabe763238fc07f20c8ad0b14774145cb98fff84cb63ef032e155add2c79 | goldfirere/th-desugar | ReifyTypeSigs.hs | # LANGUAGE CPP #
# LANGUAGE PolyKinds #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE TemplateHaskell #
#if __GLASGOW_HASKELL__ >= 809
# LANGUAGE StandaloneKindSignatures #
#endif
module ReifyTypeSigs where
#if __GLASGOW_HASKELL__ >= 809
import Data.Kind
import Data.Proxy
#endif
import Language.Haskell.TH.Desugar
import Language.Haskell.TH.Syntax hiding (Type)
import Splices (eqTH)
test_reify_kind_sigs :: [Bool]
test_reify_kind_sigs =
$(do kind_sig_decls <-
[d|
#if __GLASGOW_HASKELL__ >= 809
type A1 :: forall k. k -> Type
data A1 a
type A2 :: k -> Type
type A2 a = a
type A3 :: forall k. k -> Type
type family A3
type A4 :: forall k. k -> Type
data family A4 a
type A5 :: k -> Type
type family A5 a where
A5 a = a
type A6 :: forall (k :: Bool) -> Proxy k -> Constraint
class A6 a b where
type A7 a c
#endif
|]
let test_reify_kind :: DsMonad q
=> (Int, DKind) -> q Bool
test_reify_kind (i, expected_kind) = do
actual_kind <- dsReifyType $ mkName $ "A" ++ show i
return $ Just expected_kind `eqTH` actual_kind
kind_sig_decl_bools <-
withLocalDeclarations kind_sig_decls $
traverse test_reify_kind $
[]
#if __GLASGOW_HASKELL__ >= 809
++
let k = mkName "k"
typeKind = DConT typeKindName
boolKind = DConT ''Bool
k_to_type = DArrowT `DAppT` DVarT k `DAppT` typeKind
forall_k_invis_k_to_type =
DForallT (DForallInvis [DPlainTV k SpecifiedSpec]) k_to_type in
[ (1, forall_k_invis_k_to_type)
, (2, k_to_type)
, (3, forall_k_invis_k_to_type)
, (4, forall_k_invis_k_to_type)
, (5, k_to_type)
, (6, DForallT (DForallVis [DKindedTV k () boolKind]) $
DArrowT `DAppT` (DConT ''Proxy `DAppT` DVarT k)
`DAppT` DConT ''Constraint)
, (7, DArrowT `DAppT` boolKind `DAppT`
(DArrowT `DAppT` typeKind `DAppT` typeKind))
]
#endif
lift kind_sig_decl_bools)
| null | https://raw.githubusercontent.com/goldfirere/th-desugar/d87ea5678dfd6726a0776db71468dbd97dda2985/Test/ReifyTypeSigs.hs | haskell | # LANGUAGE RankNTypes # | # LANGUAGE CPP #
# LANGUAGE PolyKinds #
# LANGUAGE TemplateHaskell #
#if __GLASGOW_HASKELL__ >= 809
# LANGUAGE StandaloneKindSignatures #
#endif
module ReifyTypeSigs where
#if __GLASGOW_HASKELL__ >= 809
import Data.Kind
import Data.Proxy
#endif
import Language.Haskell.TH.Desugar
import Language.Haskell.TH.Syntax hiding (Type)
import Splices (eqTH)
test_reify_kind_sigs :: [Bool]
test_reify_kind_sigs =
$(do kind_sig_decls <-
[d|
#if __GLASGOW_HASKELL__ >= 809
type A1 :: forall k. k -> Type
data A1 a
type A2 :: k -> Type
type A2 a = a
type A3 :: forall k. k -> Type
type family A3
type A4 :: forall k. k -> Type
data family A4 a
type A5 :: k -> Type
type family A5 a where
A5 a = a
type A6 :: forall (k :: Bool) -> Proxy k -> Constraint
class A6 a b where
type A7 a c
#endif
|]
let test_reify_kind :: DsMonad q
=> (Int, DKind) -> q Bool
test_reify_kind (i, expected_kind) = do
actual_kind <- dsReifyType $ mkName $ "A" ++ show i
return $ Just expected_kind `eqTH` actual_kind
kind_sig_decl_bools <-
withLocalDeclarations kind_sig_decls $
traverse test_reify_kind $
[]
#if __GLASGOW_HASKELL__ >= 809
++
let k = mkName "k"
typeKind = DConT typeKindName
boolKind = DConT ''Bool
k_to_type = DArrowT `DAppT` DVarT k `DAppT` typeKind
forall_k_invis_k_to_type =
DForallT (DForallInvis [DPlainTV k SpecifiedSpec]) k_to_type in
[ (1, forall_k_invis_k_to_type)
, (2, k_to_type)
, (3, forall_k_invis_k_to_type)
, (4, forall_k_invis_k_to_type)
, (5, k_to_type)
, (6, DForallT (DForallVis [DKindedTV k () boolKind]) $
DArrowT `DAppT` (DConT ''Proxy `DAppT` DVarT k)
`DAppT` DConT ''Constraint)
, (7, DArrowT `DAppT` boolKind `DAppT`
(DArrowT `DAppT` typeKind `DAppT` typeKind))
]
#endif
lift kind_sig_decl_bools)
|
2f743be35291f7a7e4e207baa89bc45e833f2784ec0e042806477686d3280091 | ten0s/syntaxerl | syntaxerl_yrl.erl | -module(syntaxerl_yrl).
-author("Dmitry Klionsky <>").
-behaviour(syntaxerl).
-export([
check_syntax/3,
output_error/1,
output_warning/1
]).
-include("check_syntax_spec.hrl").
%% ===================================================================
%% API
%% ===================================================================
check_syntax(FileName, _BaseFileName, _Debug) ->
case yecc:file(FileName, [{report, true}, {return, true}]) of
{ok, ScannerFile} ->
file:delete(ScannerFile),
{ok, []};
{ok, ScannerFile, Warnings} ->
file:delete(ScannerFile),
{ok, syntaxerl_format:format_warnings(?MODULE, Warnings)};
{error, Errors, Warnings} ->
case syntaxerl_format:format_errors(?MODULE, Errors) of
[] ->
{ok, syntaxerl_format:format_warnings(?MODULE, Warnings)};
Errors2 ->
{error, Errors2 ++ syntaxerl_format:format_warnings(?MODULE, Warnings)}
end
end.
output_error(_) -> true.
output_warning(_) -> true.
| null | https://raw.githubusercontent.com/ten0s/syntaxerl/fc5113c68f0ce0c6ef43216e2b3483b090c6dd86/src/syntaxerl_yrl.erl | erlang | ===================================================================
API
=================================================================== | -module(syntaxerl_yrl).
-author("Dmitry Klionsky <>").
-behaviour(syntaxerl).
-export([
check_syntax/3,
output_error/1,
output_warning/1
]).
-include("check_syntax_spec.hrl").
check_syntax(FileName, _BaseFileName, _Debug) ->
case yecc:file(FileName, [{report, true}, {return, true}]) of
{ok, ScannerFile} ->
file:delete(ScannerFile),
{ok, []};
{ok, ScannerFile, Warnings} ->
file:delete(ScannerFile),
{ok, syntaxerl_format:format_warnings(?MODULE, Warnings)};
{error, Errors, Warnings} ->
case syntaxerl_format:format_errors(?MODULE, Errors) of
[] ->
{ok, syntaxerl_format:format_warnings(?MODULE, Warnings)};
Errors2 ->
{error, Errors2 ++ syntaxerl_format:format_warnings(?MODULE, Warnings)}
end
end.
output_error(_) -> true.
output_warning(_) -> true.
|
e1607a8f7aef35337a7c44c11ad3a27205fa10adfe6598b601dec8c6bbabd496 | AlexKnauth/reprovide-lang | info.rkt | #lang info
(define collection 'multi)
(define implies
'("reprovide-lang-lib"
))
(define deps
'("base"
"reprovide-lang-lib"
))
(define build-deps
'("rackunit-lib"
"scribble-lib"
"racket-doc"
))
| null | https://raw.githubusercontent.com/AlexKnauth/reprovide-lang/8c3c912d200eec45d22fc267d151fb1939471b49/reprovide-lang/info.rkt | racket | #lang info
(define collection 'multi)
(define implies
'("reprovide-lang-lib"
))
(define deps
'("base"
"reprovide-lang-lib"
))
(define build-deps
'("rackunit-lib"
"scribble-lib"
"racket-doc"
))
|
|
1dd5e6e127e45be2ec6a462de213ecbeb9bb69e4d7a6c6b8fb9f5bc57e1af31e | c-cube/iter | Iter.mli | * Simple and Efficient Iterators .
The iterators are designed to allow easy transfer ( mappings ) between data
structures , without defining [ n^2 ] conversions between the [ n ] types . The
implementation relies on the assumption that an iterator can be iterated
on as many times as needed ; this choice allows for high performance
of many combinators . However , for transient iterators , the { ! persistent }
function is provided , storing elements of a transient iterator
in memory ; the iterator can then be used several times ( See further ) .
Note that some combinators also return iterators ( e.g. { ! group } ) . The
transformation is computed on the fly every time one iterates over
the resulting iterator . If a transformation performs heavy computation ,
{ ! persistent } can also be used as intermediate storage .
Most functions are { b lazy } , i.e. they do not actually use their arguments
until their result is iterated on . For instance , if one calls { ! map }
on an iterator , one gets a new iterator , but nothing else happens until
this new iterator is used ( by folding or iterating on it ) .
If an iterator is built from an iteration function that is { b repeatable }
( i.e. calling it several times always iterates on the same set of
elements , for instance List.iter or Map.iter ) , then
the resulting { ! t } object is also repeatable . For { b one - time iter functions }
such as iteration on a file descriptor or a { ! Seq } ,
the { ! persistent } function can be used to iterate and store elements in
a memory structure ; the result is an iterator that iterates on the elements
of this memory structure , cheaply and repeatably .
The iterators are designed to allow easy transfer (mappings) between data
structures, without defining [n^2] conversions between the [n] types. The
implementation relies on the assumption that an iterator can be iterated
on as many times as needed; this choice allows for high performance
of many combinators. However, for transient iterators, the {!persistent}
function is provided, storing elements of a transient iterator
in memory; the iterator can then be used several times (See further).
Note that some combinators also return iterators (e.g. {!group}). The
transformation is computed on the fly every time one iterates over
the resulting iterator. If a transformation performs heavy computation,
{!persistent} can also be used as intermediate storage.
Most functions are {b lazy}, i.e. they do not actually use their arguments
until their result is iterated on. For instance, if one calls {!map}
on an iterator, one gets a new iterator, but nothing else happens until
this new iterator is used (by folding or iterating on it).
If an iterator is built from an iteration function that is {b repeatable}
(i.e. calling it several times always iterates on the same set of
elements, for instance List.iter or Map.iter), then
the resulting {!t} object is also repeatable. For {b one-time iter functions}
such as iteration on a file descriptor or a {!Seq},
the {!persistent} function can be used to iterate and store elements in
a memory structure; the result is an iterator that iterates on the elements
of this memory structure, cheaply and repeatably.
*)
type +'a t = ('a -> unit) -> unit
(** An iterator of values of type ['a]. If you give it a function ['a -> unit]
it will be applied to every element of the iterator successively. *)
type +'a iter = 'a t
type 'a equal = 'a -> 'a -> bool
type 'a hash = 'a -> int
(** {1 Creation} *)
val from_iter : (('a -> unit) -> unit) -> 'a t
(** Build an iterator from a iter function *)
val from_labelled_iter : (f:('a -> unit) -> unit) -> 'a t
* Build an iterator from a labelled iter function
@since 1.2
@since 1.2 *)
val from_fun : (unit -> 'a option) -> 'a t
(** Call the function repeatedly until it returns None. This
iterator is transient, use {!persistent} if needed! *)
val empty : 'a t
(** Empty iterator. It contains no element. *)
val singleton : 'a -> 'a t
* iterator , with exactly one element .
val doubleton : 'a -> 'a -> 'a t
* Iterator with exactly two elements
val init : (int -> 'a) -> 'a t
* [ init f ] is the infinite iterator [ f 0 ; f 1 ; f 2 ; … ] .
@since 0.9
@since 0.9 *)
val cons : 'a -> 'a t -> 'a t
* [ cons x l ] yields [ x ] , then yields from [ l ] .
Same as [ append ( singleton x ) l ] .
Caution : it is advised not to build long iterators out of [ cons ] ,
because it 's inefficient . Each additional [ cons x i ] adds one
layer of function call per item traversed in [ i ] .
Same as [append (singleton x) l].
Caution: it is advised not to build long iterators out of [cons],
because it's inefficient. Each additional [cons x i] adds one
layer of function call per item traversed in [i]. *)
val snoc : 'a t -> 'a -> 'a t
(** Same as {!cons} but yields the element after iterating on [l]. *)
val return : 'a -> 'a t
* Synonym to { ! }
val pure : 'a -> 'a t
* Synonym to { ! }
val repeat : 'a -> 'a t
(** Infinite iterator of the same element. You may want to look
at {!take} and the likes if you iterate on it. *)
val iterate : ('a -> 'a) -> 'a -> 'a t
(** [iterate f x] is the infinite iterator [x, f(x), f(f(x)), ...] *)
val forever : (unit -> 'b) -> 'b t
(** Iterator that calls the given function to produce elements.
The iterator may be transient (depending on the function), and definitely
is infinite. You may want to use {!take} and {!persistent}. *)
val cycle : 'a t -> 'a t
(** Cycle forever through the given iterator. Assume the given iterator can
be traversed any amount of times (not transient). This yields an
infinite iterator, you should use something like {!take} not to loop
forever. *)
val unfoldr : ('b -> ('a * 'b) option) -> 'b -> 'a t
(** [unfoldr f b] will apply [f] to [b]. If it
yields [Some (x,b')] then [x] is returned
and unfoldr recurses with [b']. *)
val scan : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b t
(** Iterator of intermediate results *)
* { 1 Consumption }
val iter : ('a -> unit) -> 'a t -> unit
(** Consume the iterator, passing all its arguments to the function.
Basically [iter f seq] is just [seq f]. *)
val iteri : (int -> 'a -> unit) -> 'a t -> unit
(** Iterate on elements and their index in the iterator *)
val for_each : 'a t -> ('a -> unit) -> unit
* Consume the iterator , passing all its arguments to the function .
[ for_each seq f ] is the same as [ iter f seq ] , i.e. , [ iter ] with
arguments reversed .
@since 1.4
[for_each seq f] is the same as [iter f seq], i.e., [iter] with
arguments reversed.
@since 1.4 *)
val for_eachi : 'a t -> (int -> 'a -> unit) -> unit
* Iterate on elements and their index in the iterator .
[ for_eachi seq f ] is the same as [ iteri f seq ] , i.e. , [ iteri ] with
arguments reversed .
@since 1.4
[for_eachi seq f] is the same as [iteri f seq], i.e., [iteri] with
arguments reversed.
@since 1.4 *)
val fold : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
(** Fold over elements of the iterator, consuming it *)
val foldi : ('a -> int -> 'b -> 'a) -> 'a -> 'b t -> 'a
(** Fold over elements of the iterator and their index, consuming it *)
val fold_map : ('acc -> 'a -> 'acc * 'b) -> 'acc -> 'a t -> 'b t
* [ fold_map f acc l ] is like { ! map } , but it carries some state as in
{ ! fold } . The state is not returned , it is just used to thread some
information to the map function .
@since 0.9
{!fold}. The state is not returned, it is just used to thread some
information to the map function.
@since 0.9 *)
val fold_filter_map : ('acc -> 'a -> 'acc * 'b option) -> 'acc -> 'a t -> 'b t
* [ fold_filter_map f acc l ] is a { ! fold_map}-like function , but the
function can choose to skip an element by retuning [ None ] .
@since 0.9
function can choose to skip an element by retuning [None].
@since 0.9 *)
val map : ('a -> 'b) -> 'a t -> 'b t
(** Map objects of the iterator into other elements, lazily *)
val mapi : (int -> 'a -> 'b) -> 'a t -> 'b t
(** Map objects, along with their index in the iterator *)
val map_by_2 : ('a -> 'a -> 'a) -> 'a t -> 'a t
* Map objects two by two . lazily .
The last element is kept in the iterator if the count is odd .
@since 0.7
The last element is kept in the iterator if the count is odd.
@since 0.7 *)
val for_all : ('a -> bool) -> 'a t -> bool
(** Do all elements satisfy the predicate? *)
val exists : ('a -> bool) -> 'a t -> bool
(** Exists there some element satisfying the predicate? *)
val mem : ?eq:('a -> 'a -> bool) -> 'a -> 'a t -> bool
* Is the value a member of the iterator ?
@param eq the equality predicate to use ( default [ ( =) ] )
@since 0.5
@param eq the equality predicate to use (default [(=)])
@since 0.5 *)
val find : ('a -> 'b option) -> 'a t -> 'b option
* Find the first element on which the function does n't return [ None ]
@since 0.5
@since 0.5 *)
val find_map : ('a -> 'b option) -> 'a t -> 'b option
* to { ! find }
@since 0.10
@since 0.10 *)
val findi : (int -> 'a -> 'b option) -> 'a t -> 'b option
* Indexed version of { ! find }
@since 0.9
@since 0.9 *)
val find_mapi : (int -> 'a -> 'b option) -> 'a t -> 'b option
* to { ! findi }
@since 0.10
@since 0.10 *)
val find_pred : ('a -> bool) -> 'a t -> 'a option
* [ find_pred p l ] finds the first element of [ l ] that satisfies [ p ] ,
or returns [ None ] if no element satisfies [ p ]
@since 0.9
or returns [None] if no element satisfies [p]
@since 0.9 *)
val find_pred_exn : ('a -> bool) -> 'a t -> 'a
* Unsafe version of { ! find_pred }
@raise Not_found if no such element is found
@since 0.9
@raise Not_found if no such element is found
@since 0.9 *)
val length : 'a t -> int
(** How long is the iterator? Forces the iterator. *)
val is_empty : 'a t -> bool
(** Is the iterator empty? Forces the iterator. *)
(** {1 Transformation} *)
val filter : ('a -> bool) -> 'a t -> 'a t
(** Filter on elements of the iterator *)
val append : 'a t -> 'a t -> 'a t
* Append two iterators . Iterating on the result is like iterating
on the first , then on the second .
on the first, then on the second. *)
val append_l : 'a t list -> 'a t
* Append iterators . Iterating on the result is like iterating
on the each iterator of the list in order .
@since 0.11
on the each iterator of the list in order.
@since 0.11 *)
val concat : 'a t t -> 'a t
* an iterator of iterators into one iterator .
val flatten : 'a t t -> 'a t
* for { ! concat }
val flat_map : ('a -> 'b t) -> 'a t -> 'b t
* bind . Intuitively , it applies the function to every
element of the initial iterator , and calls { ! concat } .
Formerly [ flatMap ]
@since 0.5
element of the initial iterator, and calls {!concat}.
Formerly [flatMap]
@since 0.5 *)
val flat_map_l : ('a -> 'b list) -> 'a t -> 'b t
* Convenience function combining { ! flat_map } and { ! of_list }
@since 0.9
@since 0.9 *)
val seq_list : 'a t list -> 'a list t
* [ seq_list l ] returns all the ways to pick one element in each sub - iterator
in [ l ] . Assumes the sub - iterators can be iterated on several times .
@since 0.11
in [l]. Assumes the sub-iterators can be iterated on several times.
@since 0.11 *)
val seq_list_map : ('a -> 'b t) -> 'a list -> 'b list t
* [ seq_list_map f l ] maps [ f ] over every element of [ l ] ,
then calls { ! seq_list }
@since 0.11
then calls {!seq_list}
@since 0.11 *)
val filter_map : ('a -> 'b option) -> 'a t -> 'b t
* Map and only keep non-[None ] elements
Formerly [ fmap ]
@since 0.5
Formerly [fmap]
@since 0.5 *)
val filter_mapi : (int -> 'a -> 'b option) -> 'a t -> 'b t
* Map with indices , and only keep non-[None ] elements
@since 0.11
@since 0.11 *)
val filter_count : ('a -> bool) -> 'a t -> int
* Count how many elements satisfy the given predicate
@since 1.0
@since 1.0 *)
val intersperse : 'a -> 'a t -> 'a t
(** Insert the single element between every element of the iterator *)
val keep_some : 'a option t -> 'a t
* [ filter_some l ] retains only elements of the form [ Some x ] .
Same as [ filter_map ( fun x->x ) ]
@since 1.0
Same as [filter_map (fun x->x)]
@since 1.0 *)
val keep_ok : ('a, _) Result.result t -> 'a t
* [ keep_ok l ] retains only elements of the form [ Ok x ] .
@since 1.0
@since 1.0 *)
val keep_error : (_, 'e) Result.result t -> 'e t
* [ keep_error l ] retains only elements of the form [ Error x ] .
@since 1.0
@since 1.0 *)
(** {1 Caching} *)
val persistent : 'a t -> 'a t
(** Iterate on the iterator, storing elements in an efficient internal structure..
The resulting iterator can be iterated on as many times as needed.
{b Note}: calling persistent on an already persistent iterator
will still make a new copy of the iterator! *)
val persistent_lazy : 'a t -> 'a t
* Lazy version of { ! persistent } . When calling [ persistent_lazy s ] ,
a new iterator [ s ' ] is immediately returned ( without actually consuming
[ s ] ) in constant time ; the first time [ s ' ] is iterated on ,
it also consumes [ s ] and caches its content into a inner data
structure that will back [ s ' ] for future iterations .
{ b warning } : on the first traversal of [ s ' ] , if the traversal
is interrupted prematurely ( { ! take } , etc . ) then [ s ' ] will not be
memorized , and the next call to [ s ' ] will traverse [ s ] again .
a new iterator [s'] is immediately returned (without actually consuming
[s]) in constant time; the first time [s'] is iterated on,
it also consumes [s] and caches its content into a inner data
structure that will back [s'] for future iterations.
{b warning}: on the first traversal of [s'], if the traversal
is interrupted prematurely ({!take}, etc.) then [s'] will not be
memorized, and the next call to [s'] will traverse [s] again. *)
(** {1 Misc} *)
val sort : ?cmp:('a -> 'a -> int) -> 'a t -> 'a t
(** Sort the iterator. Eager, O(n) ram and O(n ln(n)) time.
It iterates on elements of the argument iterator immediately,
before it sorts them. *)
val sort_uniq : ?cmp:('a -> 'a -> int) -> 'a t -> 'a t
(** Sort the iterator and remove duplicates. Eager, same as [sort] *)
val sorted : ?cmp:('a -> 'a -> int) -> 'a t -> bool
* Checks whether the iterator is sorted . Eager , same as { ! sort } .
@since 0.9
@since 0.9 *)
val group_succ_by : ?eq:('a -> 'a -> bool) -> 'a t -> 'a list t
* Group equal consecutive elements . Linear time .
Formerly synonym to [ group ] .
{ b note } : Order of items in each list is unspecified .
@since 0.6
Formerly synonym to [group].
{b note}: Order of items in each list is unspecified.
@since 0.6 *)
val group_by : ?hash:('a -> int) -> ?eq:('a -> 'a -> bool) -> 'a t -> 'a list t
* Group equal elements , disregarding their order of appearance .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
{ b note } : Order of items in each list is unspecified .
@since 0.6
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
{b note}: Order of items in each list is unspecified.
@since 0.6 *)
val count : ?hash:('a -> int) -> ?eq:('a -> 'a -> bool) -> 'a t -> ('a * int) t
* Map each distinct element to its number of occurrences in the whole seq .
Similar to [ group_by seq | > map ( fun l->List.hd l , l ) ]
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
Similar to [group_by seq |> map (fun l->List.hd l, List.length l)]
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
val uniq : ?eq:('a -> 'a -> bool) -> 'a t -> 'a t
(** Remove consecutive duplicate elements. Basically this is
like [fun seq -> map List.hd (group seq)]. *)
val product : 'a t -> 'b t -> ('a * 'b) t
* Cartesian product of iterators . When calling [ product a b ] ,
the caller { b MUST } ensure that [ b ] can be traversed as many times
as required ( several times ) , possibly by calling { ! persistent } on it
beforehand .
the caller {b MUST} ensure that [b] can be traversed as many times
as required (several times), possibly by calling {!persistent} on it
beforehand. *)
val diagonal_l : 'a list -> ('a * 'a) t
* All pairs of distinct positions of the list . [ diagonal l ] will
return the iterator of all [ List.nth i l , List.nth j l ] if [ i < j ] .
@since 0.9
return the iterator of all [List.nth i l, List.nth j l] if [i < j].
@since 0.9 *)
val diagonal : 'a t -> ('a * 'a) t
* All pairs of distinct positions of the iterator .
Iterates only once on the iterator , which must be finite .
@since 0.9
Iterates only once on the iterator, which must be finite.
@since 0.9 *)
val join : join_row:('a -> 'b -> 'c option) -> 'a t -> 'b t -> 'c t
* [ join ~join_row a b ] combines every element of [ a ] with every
element of [ b ] using [ join_row ] . If [ join_row ] returns None , then
the two elements do not combine . Assume that [ b ] allows for multiple
iterations .
element of [b] using [join_row]. If [join_row] returns None, then
the two elements do not combine. Assume that [b] allows for multiple
iterations. *)
val join_by :
?eq:'key equal ->
?hash:'key hash ->
('a -> 'key) ->
('b -> 'key) ->
merge:('key -> 'a -> 'b -> 'c option) ->
'a t ->
'b t ->
'c t
* [ join key1 key2 ~merge ] is a binary operation
that takes two iterators [ a ] and [ b ] , projects their
elements resp . with [ key1 ] and [ key2 ] , and combine
values [ ( x , y ) ] from [ ( a , b ) ] with the same [ key ]
using [ merge ] . If [ merge ] returns [ None ] , the combination
of values is discarded .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
that takes two iterators [a] and [b], projects their
elements resp. with [key1] and [key2], and combine
values [(x,y)] from [(a,b)] with the same [key]
using [merge]. If [merge] returns [None], the combination
of values is discarded.
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
val join_all_by :
?eq:'key equal ->
?hash:'key hash ->
('a -> 'key) ->
('b -> 'key) ->
merge:('key -> 'a list -> 'b list -> 'c option) ->
'a t ->
'b t ->
'c t
* [ join_all_by key1 key2 ~merge ] is a binary operation
that takes two iterators [ a ] and [ b ] , projects their
elements resp . with [ key1 ] and [ key2 ] , and , for each key [ k ]
occurring in at least one of them :
- compute the list [ l1 ] of elements of [ a ] that map to [ k ]
- compute the list [ l2 ] of elements of [ b ] that map to [ k ]
- call [ merge k l1 l2 ] . If [ merge ] returns [ None ] , the combination
of values is discarded , otherwise it returns [ Some c ]
and [ c ] is inserted in the result .
@since 0.10
that takes two iterators [a] and [b], projects their
elements resp. with [key1] and [key2], and, for each key [k]
occurring in at least one of them:
- compute the list [l1] of elements of [a] that map to [k]
- compute the list [l2] of elements of [b] that map to [k]
- call [merge k l1 l2]. If [merge] returns [None], the combination
of values is discarded, otherwise it returns [Some c]
and [c] is inserted in the result.
@since 0.10 *)
val group_join_by :
?eq:'a equal ->
?hash:'a hash ->
('b -> 'a) ->
'a t ->
'b t ->
('a * 'b list) t
* [ group_join_by key2 ] associates to every element [ x ] of
the first iterator , all the elements [ y ] of the second
iterator such that [ eq x ( key y ) ] . Elements of the first
iterators without corresponding values in the second one
are mapped to [ [ ] ]
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
the first iterator, all the elements [y] of the second
iterator such that [eq x (key y)]. Elements of the first
iterators without corresponding values in the second one
are mapped to [[]]
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
* { 2 Set - like }
val inter : ?eq:'a equal -> ?hash:'a hash -> 'a t -> 'a t -> 'a t
* Intersection of two collections . Each element will occur at most once
in the result . Eager .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
in the result. Eager.
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
$ =
[ 2;4;5;6 ] ( inter ( 1 - -6 ) ( cons 2 ( 4 - -10 ) ) | > sort | > to_list )
[ ] ( inter ( 0 - -5 ) ( 6 - -10 ) | > to_list )
[2;4;5;6] (inter (1--6) (cons 2 (4--10)) |> sort |> to_list)
[] (inter (0--5) (6--10) |> to_list)
*)
val union : ?eq:'a equal -> ?hash:'a hash -> 'a t -> 'a t -> 'a t
* Union of two collections . Each element will occur at most once
in the result . Eager .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
in the result. Eager.
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
$ =
[ 2;4;5;6 ] ( union ( 4 - -6 ) ( cons 2 ( 4 - -5 ) ) | > sort | > to_list )
[2;4;5;6] (union (4--6) (cons 2 (4--5)) |> sort |> to_list)
*)
val diff : ?eq:'a equal -> ?hash:'a hash -> 'a t -> 'a t -> 'a t
* Set difference . Eager .
@since 0.10
@since 0.10 *)
$ =
[ 1;2;8;9;10 ] ( diff ( 1 - -10 ) ( 3 - -7 ) | > to_list )
[1;2;8;9;10] (diff (1--10) (3--7) |> to_list)
*)
val subset : ?eq:'a equal -> ?hash:'a hash -> 'a t -> 'a t -> bool
* [ subset a b ] returns [ true ] if all elements of [ a ] belong to [ b ] . Eager .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
$ T
subset ( 2 -- 4 ) ( 1 -- 4 )
not ( subset ( 1 -- 4 ) ( 2 -- 10 ) )
subset (2 -- 4) (1 -- 4)
not (subset (1 -- 4) (2 -- 10))
*)
* { 2 Arithmetic }
val max : ?lt:('a -> 'a -> bool) -> 'a t -> 'a option
* element of the iterator , using the given comparison function .
@return None if the iterator is empty , Some [ m ] where [ m ] is the maximal
element otherwise
@return None if the iterator is empty, Some [m] where [m] is the maximal
element otherwise *)
val max_exn : ?lt:('a -> 'a -> bool) -> 'a t -> 'a
* Unsafe version of { ! }
@raise Not_found if the iterator is empty
@since 0.10
@raise Not_found if the iterator is empty
@since 0.10 *)
val min : ?lt:('a -> 'a -> bool) -> 'a t -> 'a option
* Min element of the iterator , using the given comparison function .
see { ! } for more details .
see {!max} for more details. *)
val min_exn : ?lt:('a -> 'a -> bool) -> 'a t -> 'a
* Unsafe version of { ! min }
@raise Not_found if the iterator is empty
@since 0.10
@raise Not_found if the iterator is empty
@since 0.10 *)
val sum : int t -> int
* Sum of elements
@since 0.11
@since 0.11 *)
val sumf : float t -> float
* Sum of elements , using Kahan summation
@since 0.11
@since 0.11 *)
* { 2 List - like }
val head : 'a t -> 'a option
* First element , if any , otherwise [ None ]
@since 0.5.1
@since 0.5.1 *)
val head_exn : 'a t -> 'a
* First element , if any , fails
@raise Invalid_argument if the iterator is empty
@since 0.5.1
@raise Invalid_argument if the iterator is empty
@since 0.5.1 *)
val take : int -> 'a t -> 'a t
(** Take at most [n] elements from the iterator. Works on infinite
iterators. *)
val take_while : ('a -> bool) -> 'a t -> 'a t
* Take elements while they satisfy the predicate , then stops iterating .
Will work on an infinite iterator [ s ] if the predicate is false for at
least one element of [ s ] .
Will work on an infinite iterator [s] if the predicate is false for at
least one element of [s]. *)
val fold_while : ('a -> 'b -> 'a * [ `Stop | `Continue ]) -> 'a -> 'b t -> 'a
* Folds over elements of the iterator , stopping early if the accumulator
returns [ ( ' a , ` Stop ) ]
@since 0.5.5
returns [('a, `Stop)]
@since 0.5.5 *)
val drop : int -> 'a t -> 'a t
* Drop the [ n ] first elements of the iterator . Lazy .
val drop_while : ('a -> bool) -> 'a t -> 'a t
(** Predicate version of {!drop} *)
val rev : 'a t -> 'a t
(** Reverse the iterator. O(n) memory and time, needs the
iterator to be finite. The result is persistent and does
not depend on the input being repeatable. *)
val zip_i : 'a t -> (int * 'a) t
* Zip elements of the iterator with their index in the iterator .
@since 1.0 Changed type to just give an iterator of pairs
@since 1.0 Changed type to just give an iterator of pairs *)
* { 2 Pair iterators }
val fold2 : ('c -> 'a -> 'b -> 'c) -> 'c -> ('a * 'b) t -> 'c
val iter2 : ('a -> 'b -> unit) -> ('a * 'b) t -> unit
val map2 : ('a -> 'b -> 'c) -> ('a * 'b) t -> 'c t
val map2_2 : ('a -> 'b -> 'c) -> ('a -> 'b -> 'd) -> ('a * 'b) t -> ('c * 'd) t
(** [map2_2 f g seq2] maps each [x, y] of seq2 into [f x y, g x y] *)
* { 1 Data structures converters }
val to_list : 'a t -> 'a list
* Convert the iterator into a list . Preserves order of elements .
This function is tail - recursive , but consumes 2*n memory .
If order does n't matter to you , consider { ! to_rev_list } .
This function is tail-recursive, but consumes 2*n memory.
If order doesn't matter to you, consider {!to_rev_list}. *)
val to_rev_list : 'a t -> 'a list
(** Get the list of the reversed iterator (more efficient than {!to_list}) *)
val of_list : 'a list -> 'a t
val on_list : ('a t -> 'b t) -> 'a list -> 'b list
* [ on_list f l ] is equivalent to [ to_list @@ f @@ of_list l ] .
@since 0.5.2
@since 0.5.2
*)
val pair_with_idx : 'a t -> (int * 'a) t
* Similar to { ! zip_i } but returns a normal iterator of tuples
@since 0.11
@since 0.11 *)
val to_opt : 'a t -> 'a option
* to { ! head }
@since 0.5.1
@since 0.5.1 *)
val to_array : 'a t -> 'a array
(** Convert to an array. Currently not very efficient because
an intermediate list is used. *)
val of_array : 'a array -> 'a t
val of_array_i : 'a array -> (int * 'a) t
(** Elements of the array, with their index *)
val array_slice : 'a array -> int -> int -> 'a t
(** [array_slice a i j] Iterator of elements whose indexes range
from [i] to [j] *)
val of_opt : 'a option -> 'a t
* Iterate on 0 or 1 values .
@since 0.5.1
@since 0.5.1 *)
val of_seq : 'a Seq.t -> 'a t
* Iterator of elements of a { ! Seq.t } .
@since 1.5
@since 1.5 *)
val to_seq_persistent : 'a t -> 'a Seq.t
* Convert to a { ! Seq } . Linear in memory and time ( a copy is made in memory ) .
This does not work on infinite iterators .
@since 1.5
This does not work on infinite iterators.
@since 1.5 *)
val to_stack : 'a Stack.t -> 'a t -> unit
(** Push elements of the iterator on the stack *)
val of_stack : 'a Stack.t -> 'a t
* Iterator of elements of the stack ( same order as [ Stack.iter ] )
val to_queue : 'a Queue.t -> 'a t -> unit
(** Push elements of the iterator into the queue *)
val of_queue : 'a Queue.t -> 'a t
(** Iterator of elements contained in the queue, FIFO order *)
val hashtbl_add : ('a, 'b) Hashtbl.t -> ('a * 'b) t -> unit
(** Add elements of the iterator to the hashtable, with
Hashtbl.add *)
val hashtbl_replace : ('a, 'b) Hashtbl.t -> ('a * 'b) t -> unit
(** Add elements of the iterator to the hashtable, with
Hashtbl.replace (erases conflicting bindings) *)
val to_hashtbl : ('a * 'b) t -> ('a, 'b) Hashtbl.t
(** Build a hashtable from an iterator of key/value pairs *)
val of_hashtbl : ('a, 'b) Hashtbl.t -> ('a * 'b) t
(** Iterator of key/value pairs from the hashtable *)
val hashtbl_keys : ('a, 'b) Hashtbl.t -> 'a t
val hashtbl_values : ('a, 'b) Hashtbl.t -> 'b t
val of_str : string -> char t
val to_str : char t -> string
val concat_str : string t -> string
* Concatenate strings together , eagerly .
Also see { ! intersperse } to add a separator .
@since 0.5
Also see {!intersperse} to add a separator.
@since 0.5 *)
exception OneShotSequence
(** Raised when the user tries to iterate several times on
a transient iterator *)
val of_in_channel : in_channel -> char t
* Iterates on characters of the input ( can block when one
iterates over the iterator ) . If you need to iterate
several times on this iterator , use { ! persistent } .
@raise OneShotIterator when used more than once .
iterates over the iterator). If you need to iterate
several times on this iterator, use {!persistent}.
@raise OneShotIterator when used more than once. *)
val to_buffer : char t -> Buffer.t -> unit
(** Copy content of the iterator into the buffer *)
val int_range : start:int -> stop:int -> int t
* Iterator on integers in [ start ... stop ] by steps 1 . Also see
{ ! ( -- ) } for an infix version .
{!(--)} for an infix version. *)
val int_range_dec : start:int -> stop:int -> int t
(** Iterator on decreasing integers in [stop...start] by steps -1.
See {!(--^)} for an infix version *)
val int_range_by : step:int -> int -> int -> int t
(** [int_range_by ~step i j] is the range starting at [i], including [j],
where the difference between successive elements is [step].
use a negative [step] for a decreasing iterator.
@raise Invalid_argument if [step=0] *)
val bools : bool t
* Iterates on [ true ] and [ false ]
@since 0.7
@since 0.7 *)
val of_set : (module Set.S with type elt = 'a and type t = 'b) -> 'b -> 'a t
(** Convert the given set to an iterator. The set module must be provided. *)
val to_set : (module Set.S with type elt = 'a and type t = 'b) -> 'a t -> 'b
(** Convert the iterator to a set, given the proper set module *)
type 'a gen = unit -> 'a option
val of_gen : 'a gen -> 'a t
(** Traverse eagerly the generator and build an iterator from it *)
val of_gen_once : 'a gen -> 'a t
* One shot iterator using this generator .
It must not be traversed twice .
@since 1.5
It must not be traversed twice.
@since 1.5 *)
val to_gen : 'a t -> 'a gen
(** Make the iterator persistent (O(n)) and then iterate on it. Eager. *)
* { 2 Sets }
module Set : sig
module type S = sig
include Set.S
val of_iter : elt iter -> t
val to_iter : t -> elt iter
val to_list : t -> elt list
val of_list : elt list -> t
val of_seq : elt iter -> t
(** @deprecated use {!of_iter} instead *)
val to_seq : t -> elt iter
(** @deprecated use {!to_iter} instead *)
end
* Create an enriched Set module from the given one
module Adapt (X : Set.S) : S with type elt = X.elt and type t = X.t
(** Functor to build an extended Set module from an ordered type *)
module Make (X : Set.OrderedType) : S with type elt = X.t
end
(** {2 Maps} *)
module Map : sig
module type S = sig
include Map.S
val to_iter : 'a t -> (key * 'a) iter
val of_iter : (key * 'a) iter -> 'a t
val keys : 'a t -> key iter
val values : 'a t -> 'a iter
val to_list : 'a t -> (key * 'a) list
val of_list : (key * 'a) list -> 'a t
val to_seq : 'a t -> (key * 'a) iter
(** @deprecated use {!to_iter} instead *)
val of_seq : (key * 'a) iter -> 'a t
(** @deprecated use {!of_iter} instead *)
end
(** Adapt a pre-existing Map module to make it iterator-aware *)
module Adapt (M : Map.S) : S with type key = M.key and type 'a t = 'a M.t
(** Create an enriched Map module, with iterator-aware functions *)
module Make (V : Map.OrderedType) : S with type key = V.t
end
* { 1 Random iterators }
val random_int : int -> int t
(** Infinite iterator of random integers between 0 and
the given higher bound (see Random.int) *)
val random_bool : bool t
(** Infinite iterator of random bool values *)
val random_float : float -> float t
val random_array : 'a array -> 'a t
(** Iterator of choices of an element in the array *)
val random_list : 'a list -> 'a t
(** Infinite iterator of random elements of the list. Basically the
same as {!random_array}. *)
val shuffle : 'a t -> 'a t
* [ shuffle seq ] returns a perfect shuffle of [ seq ] .
Uses O(length seq ) memory and time . Eager .
@since 0.7
Uses O(length seq) memory and time. Eager.
@since 0.7 *)
val shuffle_buffer : int -> 'a t -> 'a t
* [ shuffle_buffer n seq ] returns an iterator of element of [ seq ] in random
order . The shuffling is * not * uniform . Uses O(n ) memory .
The first [ n ] elements of the iterator are consumed immediately . The
rest is consumed lazily .
@since 0.7
order. The shuffling is *not* uniform. Uses O(n) memory.
The first [n] elements of the iterator are consumed immediately. The
rest is consumed lazily.
@since 0.7 *)
(** {2 Sampling} *)
val sample : int -> 'a t -> 'a array
* [ sample n seq ] returns k samples of [ seq ] , with uniform probability .
It will consume the iterator and use O(n ) memory .
It returns an array of size [ min ( length seq ) n ] .
@since 0.7
It will consume the iterator and use O(n) memory.
It returns an array of size [min (length seq) n].
@since 0.7 *)
* { 1 Infix functions }
module Infix : sig
val ( -- ) : int -> int -> int t
(** [a -- b] is the range of integers from [a] to [b], both included,
in increasing order. It will therefore be empty if [a > b]. *)
val ( --^ ) : int -> int -> int t
(** [a --^ b] is the range of integers from [b] to [a], both included,
in decreasing order (starts from [a]).
It will therefore be empty if [a < b]. *)
val ( >>= ) : 'a t -> ('a -> 'b t) -> 'b t
* bind ( infix version of { ! flat_map }
@since 0.5
@since 0.5 *)
val ( >|= ) : 'a t -> ('a -> 'b) -> 'b t
* Infix version of { ! map }
@since 0.5
@since 0.5 *)
val ( <*> ) : ('a -> 'b) t -> 'a t -> 'b t
* Applicative operator ( product+application )
@since 0.5
@since 0.5 *)
val ( <+> ) : 'a t -> 'a t -> 'a t
* Concatenation of iterators
@since 0.5
@since 0.5 *)
end
include module type of Infix
* { 1 Pretty printing }
val pp_seq :
?sep:string ->
(Format.formatter -> 'a -> unit) ->
Format.formatter ->
'a t ->
unit
(** Pretty print an iterator of ['a], using the given pretty printer
to print each elements. An optional separator string can be provided. *)
val pp_buf : ?sep:string -> (Buffer.t -> 'a -> unit) -> Buffer.t -> 'a t -> unit
(** Print into a buffer *)
val to_string : ?sep:string -> ('a -> string) -> 'a t -> string
(** Print into a string *)
* Basic IO
Very basic interface to manipulate files as iterator of chunks / lines . The
iterators take care of opening and closing files properly ; every time
one iterates over an iterator , the file is opened / closed again .
Example : copy a file [ " a " ] into file [ " b " ] , removing blank lines :
{ [
Iterator.(IO.lines_of " a " | > filter ( fun l- > l < > " " ) | > IO.write_lines " b " ) ; ;
] }
By chunks of [ 4096 ] bytes :
{ [
Iterator . IO.(chunks_of ~size:4096 " a " | > write_to " b " ) ; ;
] }
Read the lines of a file into a list :
{ [
Iterator.IO.lines " a " | > Iterator.to_list
] }
@since 0.5.1
Very basic interface to manipulate files as iterator of chunks/lines. The
iterators take care of opening and closing files properly; every time
one iterates over an iterator, the file is opened/closed again.
Example: copy a file ["a"] into file ["b"], removing blank lines:
{[
Iterator.(IO.lines_of "a" |> filter (fun l-> l<> "") |> IO.write_lines "b");;
]}
By chunks of [4096] bytes:
{[
Iterator.IO.(chunks_of ~size:4096 "a" |> write_to "b");;
]}
Read the lines of a file into a list:
{[
Iterator.IO.lines "a" |> Iterator.to_list
]}
@since 0.5.1 *)
module IO : sig
val lines_of : ?mode:int -> ?flags:open_flag list -> string -> string t
* [ filename ] reads all lines of the given file . It raises the
same exception as would opening the file and read from it , except
from [ End_of_file ] ( which is caught ) . The file is { b always } properly
closed .
Every time the iterator is iterated on , the file is opened again , so
different iterations might return different results
@param mode default [ 0o644 ]
@param flags default : [ [ Open_rdonly ] ]
same exception as would opening the file and read from it, except
from [End_of_file] (which is caught). The file is {b always} properly
closed.
Every time the iterator is iterated on, the file is opened again, so
different iterations might return different results
@param mode default [0o644]
@param flags default: [[Open_rdonly]] *)
val chunks_of :
?mode:int -> ?flags:open_flag list -> ?size:int -> string -> string t
* Read chunks of the given [ size ] from the file . The last chunk might be
smaller . Behaves like { ! } regarding errors and options .
Every time the iterator is iterated on , the file is opened again , so
different iterations might return different results
smaller. Behaves like {!lines_of} regarding errors and options.
Every time the iterator is iterated on, the file is opened again, so
different iterations might return different results *)
val write_to :
?mode:int -> ?flags:open_flag list -> string -> string t -> unit
(** [write_to filename seq] writes all strings from [seq] into the given
file. It takes care of opening and closing the file.
@param mode default [0o644]
@param flags used by [open_out_gen]. Default: [[Open_creat;Open_wronly]]. *)
val write_bytes_to :
?mode:int -> ?flags:open_flag list -> string -> Bytes.t t -> unit
(** @since 0.5.4 *)
val write_lines :
?mode:int -> ?flags:open_flag list -> string -> string t -> unit
(** Same as {!write_to}, but intercales ['\n'] between each string *)
val write_bytes_lines :
?mode:int -> ?flags:open_flag list -> string -> Bytes.t t -> unit
(** @since 0.5.4 *)
end
| null | https://raw.githubusercontent.com/c-cube/iter/29ece562e2a1d4e62ca286ffd5af356851ab5bf5/src/Iter.mli | ocaml | * An iterator of values of type ['a]. If you give it a function ['a -> unit]
it will be applied to every element of the iterator successively.
* {1 Creation}
* Build an iterator from a iter function
* Call the function repeatedly until it returns None. This
iterator is transient, use {!persistent} if needed!
* Empty iterator. It contains no element.
* Same as {!cons} but yields the element after iterating on [l].
* Infinite iterator of the same element. You may want to look
at {!take} and the likes if you iterate on it.
* [iterate f x] is the infinite iterator [x, f(x), f(f(x)), ...]
* Iterator that calls the given function to produce elements.
The iterator may be transient (depending on the function), and definitely
is infinite. You may want to use {!take} and {!persistent}.
* Cycle forever through the given iterator. Assume the given iterator can
be traversed any amount of times (not transient). This yields an
infinite iterator, you should use something like {!take} not to loop
forever.
* [unfoldr f b] will apply [f] to [b]. If it
yields [Some (x,b')] then [x] is returned
and unfoldr recurses with [b'].
* Iterator of intermediate results
* Consume the iterator, passing all its arguments to the function.
Basically [iter f seq] is just [seq f].
* Iterate on elements and their index in the iterator
* Fold over elements of the iterator, consuming it
* Fold over elements of the iterator and their index, consuming it
* Map objects of the iterator into other elements, lazily
* Map objects, along with their index in the iterator
* Do all elements satisfy the predicate?
* Exists there some element satisfying the predicate?
* How long is the iterator? Forces the iterator.
* Is the iterator empty? Forces the iterator.
* {1 Transformation}
* Filter on elements of the iterator
* Insert the single element between every element of the iterator
* {1 Caching}
* Iterate on the iterator, storing elements in an efficient internal structure..
The resulting iterator can be iterated on as many times as needed.
{b Note}: calling persistent on an already persistent iterator
will still make a new copy of the iterator!
* {1 Misc}
* Sort the iterator. Eager, O(n) ram and O(n ln(n)) time.
It iterates on elements of the argument iterator immediately,
before it sorts them.
* Sort the iterator and remove duplicates. Eager, same as [sort]
* Remove consecutive duplicate elements. Basically this is
like [fun seq -> map List.hd (group seq)].
* Take at most [n] elements from the iterator. Works on infinite
iterators.
* Predicate version of {!drop}
* Reverse the iterator. O(n) memory and time, needs the
iterator to be finite. The result is persistent and does
not depend on the input being repeatable.
* [map2_2 f g seq2] maps each [x, y] of seq2 into [f x y, g x y]
* Get the list of the reversed iterator (more efficient than {!to_list})
* Convert to an array. Currently not very efficient because
an intermediate list is used.
* Elements of the array, with their index
* [array_slice a i j] Iterator of elements whose indexes range
from [i] to [j]
* Push elements of the iterator on the stack
* Push elements of the iterator into the queue
* Iterator of elements contained in the queue, FIFO order
* Add elements of the iterator to the hashtable, with
Hashtbl.add
* Add elements of the iterator to the hashtable, with
Hashtbl.replace (erases conflicting bindings)
* Build a hashtable from an iterator of key/value pairs
* Iterator of key/value pairs from the hashtable
* Raised when the user tries to iterate several times on
a transient iterator
* Copy content of the iterator into the buffer
* Iterator on decreasing integers in [stop...start] by steps -1.
See {!(--^)} for an infix version
* [int_range_by ~step i j] is the range starting at [i], including [j],
where the difference between successive elements is [step].
use a negative [step] for a decreasing iterator.
@raise Invalid_argument if [step=0]
* Convert the given set to an iterator. The set module must be provided.
* Convert the iterator to a set, given the proper set module
* Traverse eagerly the generator and build an iterator from it
* Make the iterator persistent (O(n)) and then iterate on it. Eager.
* @deprecated use {!of_iter} instead
* @deprecated use {!to_iter} instead
* Functor to build an extended Set module from an ordered type
* {2 Maps}
* @deprecated use {!to_iter} instead
* @deprecated use {!of_iter} instead
* Adapt a pre-existing Map module to make it iterator-aware
* Create an enriched Map module, with iterator-aware functions
* Infinite iterator of random integers between 0 and
the given higher bound (see Random.int)
* Infinite iterator of random bool values
* Iterator of choices of an element in the array
* Infinite iterator of random elements of the list. Basically the
same as {!random_array}.
* {2 Sampling}
* [a -- b] is the range of integers from [a] to [b], both included,
in increasing order. It will therefore be empty if [a > b].
* [a --^ b] is the range of integers from [b] to [a], both included,
in decreasing order (starts from [a]).
It will therefore be empty if [a < b].
* Pretty print an iterator of ['a], using the given pretty printer
to print each elements. An optional separator string can be provided.
* Print into a buffer
* Print into a string
* [write_to filename seq] writes all strings from [seq] into the given
file. It takes care of opening and closing the file.
@param mode default [0o644]
@param flags used by [open_out_gen]. Default: [[Open_creat;Open_wronly]].
* @since 0.5.4
* Same as {!write_to}, but intercales ['\n'] between each string
* @since 0.5.4 | * Simple and Efficient Iterators .
The iterators are designed to allow easy transfer ( mappings ) between data
structures , without defining [ n^2 ] conversions between the [ n ] types . The
implementation relies on the assumption that an iterator can be iterated
on as many times as needed ; this choice allows for high performance
of many combinators . However , for transient iterators , the { ! persistent }
function is provided , storing elements of a transient iterator
in memory ; the iterator can then be used several times ( See further ) .
Note that some combinators also return iterators ( e.g. { ! group } ) . The
transformation is computed on the fly every time one iterates over
the resulting iterator . If a transformation performs heavy computation ,
{ ! persistent } can also be used as intermediate storage .
Most functions are { b lazy } , i.e. they do not actually use their arguments
until their result is iterated on . For instance , if one calls { ! map }
on an iterator , one gets a new iterator , but nothing else happens until
this new iterator is used ( by folding or iterating on it ) .
If an iterator is built from an iteration function that is { b repeatable }
( i.e. calling it several times always iterates on the same set of
elements , for instance List.iter or Map.iter ) , then
the resulting { ! t } object is also repeatable . For { b one - time iter functions }
such as iteration on a file descriptor or a { ! Seq } ,
the { ! persistent } function can be used to iterate and store elements in
a memory structure ; the result is an iterator that iterates on the elements
of this memory structure , cheaply and repeatably .
The iterators are designed to allow easy transfer (mappings) between data
structures, without defining [n^2] conversions between the [n] types. The
implementation relies on the assumption that an iterator can be iterated
on as many times as needed; this choice allows for high performance
of many combinators. However, for transient iterators, the {!persistent}
function is provided, storing elements of a transient iterator
in memory; the iterator can then be used several times (See further).
Note that some combinators also return iterators (e.g. {!group}). The
transformation is computed on the fly every time one iterates over
the resulting iterator. If a transformation performs heavy computation,
{!persistent} can also be used as intermediate storage.
Most functions are {b lazy}, i.e. they do not actually use their arguments
until their result is iterated on. For instance, if one calls {!map}
on an iterator, one gets a new iterator, but nothing else happens until
this new iterator is used (by folding or iterating on it).
If an iterator is built from an iteration function that is {b repeatable}
(i.e. calling it several times always iterates on the same set of
elements, for instance List.iter or Map.iter), then
the resulting {!t} object is also repeatable. For {b one-time iter functions}
such as iteration on a file descriptor or a {!Seq},
the {!persistent} function can be used to iterate and store elements in
a memory structure; the result is an iterator that iterates on the elements
of this memory structure, cheaply and repeatably.
*)
type +'a t = ('a -> unit) -> unit
type +'a iter = 'a t
type 'a equal = 'a -> 'a -> bool
type 'a hash = 'a -> int
val from_iter : (('a -> unit) -> unit) -> 'a t
val from_labelled_iter : (f:('a -> unit) -> unit) -> 'a t
* Build an iterator from a labelled iter function
@since 1.2
@since 1.2 *)
val from_fun : (unit -> 'a option) -> 'a t
val empty : 'a t
val singleton : 'a -> 'a t
* iterator , with exactly one element .
val doubleton : 'a -> 'a -> 'a t
* Iterator with exactly two elements
val init : (int -> 'a) -> 'a t
* [ init f ] is the infinite iterator [ f 0 ; f 1 ; f 2 ; … ] .
@since 0.9
@since 0.9 *)
val cons : 'a -> 'a t -> 'a t
* [ cons x l ] yields [ x ] , then yields from [ l ] .
Same as [ append ( singleton x ) l ] .
Caution : it is advised not to build long iterators out of [ cons ] ,
because it 's inefficient . Each additional [ cons x i ] adds one
layer of function call per item traversed in [ i ] .
Same as [append (singleton x) l].
Caution: it is advised not to build long iterators out of [cons],
because it's inefficient. Each additional [cons x i] adds one
layer of function call per item traversed in [i]. *)
val snoc : 'a t -> 'a -> 'a t
val return : 'a -> 'a t
* Synonym to { ! }
val pure : 'a -> 'a t
* Synonym to { ! }
val repeat : 'a -> 'a t
val iterate : ('a -> 'a) -> 'a -> 'a t
val forever : (unit -> 'b) -> 'b t
val cycle : 'a t -> 'a t
val unfoldr : ('b -> ('a * 'b) option) -> 'b -> 'a t
val scan : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b t
* { 1 Consumption }
val iter : ('a -> unit) -> 'a t -> unit
val iteri : (int -> 'a -> unit) -> 'a t -> unit
val for_each : 'a t -> ('a -> unit) -> unit
* Consume the iterator , passing all its arguments to the function .
[ for_each seq f ] is the same as [ iter f seq ] , i.e. , [ iter ] with
arguments reversed .
@since 1.4
[for_each seq f] is the same as [iter f seq], i.e., [iter] with
arguments reversed.
@since 1.4 *)
val for_eachi : 'a t -> (int -> 'a -> unit) -> unit
* Iterate on elements and their index in the iterator .
[ for_eachi seq f ] is the same as [ iteri f seq ] , i.e. , [ iteri ] with
arguments reversed .
@since 1.4
[for_eachi seq f] is the same as [iteri f seq], i.e., [iteri] with
arguments reversed.
@since 1.4 *)
val fold : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
val foldi : ('a -> int -> 'b -> 'a) -> 'a -> 'b t -> 'a
val fold_map : ('acc -> 'a -> 'acc * 'b) -> 'acc -> 'a t -> 'b t
* [ fold_map f acc l ] is like { ! map } , but it carries some state as in
{ ! fold } . The state is not returned , it is just used to thread some
information to the map function .
@since 0.9
{!fold}. The state is not returned, it is just used to thread some
information to the map function.
@since 0.9 *)
val fold_filter_map : ('acc -> 'a -> 'acc * 'b option) -> 'acc -> 'a t -> 'b t
* [ fold_filter_map f acc l ] is a { ! fold_map}-like function , but the
function can choose to skip an element by retuning [ None ] .
@since 0.9
function can choose to skip an element by retuning [None].
@since 0.9 *)
val map : ('a -> 'b) -> 'a t -> 'b t
val mapi : (int -> 'a -> 'b) -> 'a t -> 'b t
val map_by_2 : ('a -> 'a -> 'a) -> 'a t -> 'a t
* Map objects two by two . lazily .
The last element is kept in the iterator if the count is odd .
@since 0.7
The last element is kept in the iterator if the count is odd.
@since 0.7 *)
val for_all : ('a -> bool) -> 'a t -> bool
val exists : ('a -> bool) -> 'a t -> bool
val mem : ?eq:('a -> 'a -> bool) -> 'a -> 'a t -> bool
* Is the value a member of the iterator ?
@param eq the equality predicate to use ( default [ ( =) ] )
@since 0.5
@param eq the equality predicate to use (default [(=)])
@since 0.5 *)
val find : ('a -> 'b option) -> 'a t -> 'b option
* Find the first element on which the function does n't return [ None ]
@since 0.5
@since 0.5 *)
val find_map : ('a -> 'b option) -> 'a t -> 'b option
* to { ! find }
@since 0.10
@since 0.10 *)
val findi : (int -> 'a -> 'b option) -> 'a t -> 'b option
* Indexed version of { ! find }
@since 0.9
@since 0.9 *)
val find_mapi : (int -> 'a -> 'b option) -> 'a t -> 'b option
* to { ! findi }
@since 0.10
@since 0.10 *)
val find_pred : ('a -> bool) -> 'a t -> 'a option
* [ find_pred p l ] finds the first element of [ l ] that satisfies [ p ] ,
or returns [ None ] if no element satisfies [ p ]
@since 0.9
or returns [None] if no element satisfies [p]
@since 0.9 *)
val find_pred_exn : ('a -> bool) -> 'a t -> 'a
* Unsafe version of { ! find_pred }
@raise Not_found if no such element is found
@since 0.9
@raise Not_found if no such element is found
@since 0.9 *)
val length : 'a t -> int
val is_empty : 'a t -> bool
val filter : ('a -> bool) -> 'a t -> 'a t
val append : 'a t -> 'a t -> 'a t
* Append two iterators . Iterating on the result is like iterating
on the first , then on the second .
on the first, then on the second. *)
val append_l : 'a t list -> 'a t
* Append iterators . Iterating on the result is like iterating
on the each iterator of the list in order .
@since 0.11
on the each iterator of the list in order.
@since 0.11 *)
val concat : 'a t t -> 'a t
* an iterator of iterators into one iterator .
val flatten : 'a t t -> 'a t
* for { ! concat }
val flat_map : ('a -> 'b t) -> 'a t -> 'b t
* bind . Intuitively , it applies the function to every
element of the initial iterator , and calls { ! concat } .
Formerly [ flatMap ]
@since 0.5
element of the initial iterator, and calls {!concat}.
Formerly [flatMap]
@since 0.5 *)
val flat_map_l : ('a -> 'b list) -> 'a t -> 'b t
* Convenience function combining { ! flat_map } and { ! of_list }
@since 0.9
@since 0.9 *)
val seq_list : 'a t list -> 'a list t
* [ seq_list l ] returns all the ways to pick one element in each sub - iterator
in [ l ] . Assumes the sub - iterators can be iterated on several times .
@since 0.11
in [l]. Assumes the sub-iterators can be iterated on several times.
@since 0.11 *)
val seq_list_map : ('a -> 'b t) -> 'a list -> 'b list t
* [ seq_list_map f l ] maps [ f ] over every element of [ l ] ,
then calls { ! seq_list }
@since 0.11
then calls {!seq_list}
@since 0.11 *)
val filter_map : ('a -> 'b option) -> 'a t -> 'b t
* Map and only keep non-[None ] elements
Formerly [ fmap ]
@since 0.5
Formerly [fmap]
@since 0.5 *)
val filter_mapi : (int -> 'a -> 'b option) -> 'a t -> 'b t
* Map with indices , and only keep non-[None ] elements
@since 0.11
@since 0.11 *)
val filter_count : ('a -> bool) -> 'a t -> int
* Count how many elements satisfy the given predicate
@since 1.0
@since 1.0 *)
val intersperse : 'a -> 'a t -> 'a t
val keep_some : 'a option t -> 'a t
* [ filter_some l ] retains only elements of the form [ Some x ] .
Same as [ filter_map ( fun x->x ) ]
@since 1.0
Same as [filter_map (fun x->x)]
@since 1.0 *)
val keep_ok : ('a, _) Result.result t -> 'a t
* [ keep_ok l ] retains only elements of the form [ Ok x ] .
@since 1.0
@since 1.0 *)
val keep_error : (_, 'e) Result.result t -> 'e t
* [ keep_error l ] retains only elements of the form [ Error x ] .
@since 1.0
@since 1.0 *)
val persistent : 'a t -> 'a t
val persistent_lazy : 'a t -> 'a t
* Lazy version of { ! persistent } . When calling [ persistent_lazy s ] ,
a new iterator [ s ' ] is immediately returned ( without actually consuming
[ s ] ) in constant time ; the first time [ s ' ] is iterated on ,
it also consumes [ s ] and caches its content into a inner data
structure that will back [ s ' ] for future iterations .
{ b warning } : on the first traversal of [ s ' ] , if the traversal
is interrupted prematurely ( { ! take } , etc . ) then [ s ' ] will not be
memorized , and the next call to [ s ' ] will traverse [ s ] again .
a new iterator [s'] is immediately returned (without actually consuming
[s]) in constant time; the first time [s'] is iterated on,
it also consumes [s] and caches its content into a inner data
structure that will back [s'] for future iterations.
{b warning}: on the first traversal of [s'], if the traversal
is interrupted prematurely ({!take}, etc.) then [s'] will not be
memorized, and the next call to [s'] will traverse [s] again. *)
val sort : ?cmp:('a -> 'a -> int) -> 'a t -> 'a t
val sort_uniq : ?cmp:('a -> 'a -> int) -> 'a t -> 'a t
val sorted : ?cmp:('a -> 'a -> int) -> 'a t -> bool
* Checks whether the iterator is sorted . Eager , same as { ! sort } .
@since 0.9
@since 0.9 *)
val group_succ_by : ?eq:('a -> 'a -> bool) -> 'a t -> 'a list t
* Group equal consecutive elements . Linear time .
Formerly synonym to [ group ] .
{ b note } : Order of items in each list is unspecified .
@since 0.6
Formerly synonym to [group].
{b note}: Order of items in each list is unspecified.
@since 0.6 *)
val group_by : ?hash:('a -> int) -> ?eq:('a -> 'a -> bool) -> 'a t -> 'a list t
* Group equal elements , disregarding their order of appearance .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
{ b note } : Order of items in each list is unspecified .
@since 0.6
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
{b note}: Order of items in each list is unspecified.
@since 0.6 *)
val count : ?hash:('a -> int) -> ?eq:('a -> 'a -> bool) -> 'a t -> ('a * int) t
* Map each distinct element to its number of occurrences in the whole seq .
Similar to [ group_by seq | > map ( fun l->List.hd l , l ) ]
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
Similar to [group_by seq |> map (fun l->List.hd l, List.length l)]
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
val uniq : ?eq:('a -> 'a -> bool) -> 'a t -> 'a t
val product : 'a t -> 'b t -> ('a * 'b) t
* Cartesian product of iterators . When calling [ product a b ] ,
the caller { b MUST } ensure that [ b ] can be traversed as many times
as required ( several times ) , possibly by calling { ! persistent } on it
beforehand .
the caller {b MUST} ensure that [b] can be traversed as many times
as required (several times), possibly by calling {!persistent} on it
beforehand. *)
val diagonal_l : 'a list -> ('a * 'a) t
* All pairs of distinct positions of the list . [ diagonal l ] will
return the iterator of all [ List.nth i l , List.nth j l ] if [ i < j ] .
@since 0.9
return the iterator of all [List.nth i l, List.nth j l] if [i < j].
@since 0.9 *)
val diagonal : 'a t -> ('a * 'a) t
* All pairs of distinct positions of the iterator .
Iterates only once on the iterator , which must be finite .
@since 0.9
Iterates only once on the iterator, which must be finite.
@since 0.9 *)
val join : join_row:('a -> 'b -> 'c option) -> 'a t -> 'b t -> 'c t
* [ join ~join_row a b ] combines every element of [ a ] with every
element of [ b ] using [ join_row ] . If [ join_row ] returns None , then
the two elements do not combine . Assume that [ b ] allows for multiple
iterations .
element of [b] using [join_row]. If [join_row] returns None, then
the two elements do not combine. Assume that [b] allows for multiple
iterations. *)
val join_by :
?eq:'key equal ->
?hash:'key hash ->
('a -> 'key) ->
('b -> 'key) ->
merge:('key -> 'a -> 'b -> 'c option) ->
'a t ->
'b t ->
'c t
* [ join key1 key2 ~merge ] is a binary operation
that takes two iterators [ a ] and [ b ] , projects their
elements resp . with [ key1 ] and [ key2 ] , and combine
values [ ( x , y ) ] from [ ( a , b ) ] with the same [ key ]
using [ merge ] . If [ merge ] returns [ None ] , the combination
of values is discarded .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
that takes two iterators [a] and [b], projects their
elements resp. with [key1] and [key2], and combine
values [(x,y)] from [(a,b)] with the same [key]
using [merge]. If [merge] returns [None], the combination
of values is discarded.
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
val join_all_by :
?eq:'key equal ->
?hash:'key hash ->
('a -> 'key) ->
('b -> 'key) ->
merge:('key -> 'a list -> 'b list -> 'c option) ->
'a t ->
'b t ->
'c t
* [ join_all_by key1 key2 ~merge ] is a binary operation
that takes two iterators [ a ] and [ b ] , projects their
elements resp . with [ key1 ] and [ key2 ] , and , for each key [ k ]
occurring in at least one of them :
- compute the list [ l1 ] of elements of [ a ] that map to [ k ]
- compute the list [ l2 ] of elements of [ b ] that map to [ k ]
- call [ merge k l1 l2 ] . If [ merge ] returns [ None ] , the combination
of values is discarded , otherwise it returns [ Some c ]
and [ c ] is inserted in the result .
@since 0.10
that takes two iterators [a] and [b], projects their
elements resp. with [key1] and [key2], and, for each key [k]
occurring in at least one of them:
- compute the list [l1] of elements of [a] that map to [k]
- compute the list [l2] of elements of [b] that map to [k]
- call [merge k l1 l2]. If [merge] returns [None], the combination
of values is discarded, otherwise it returns [Some c]
and [c] is inserted in the result.
@since 0.10 *)
val group_join_by :
?eq:'a equal ->
?hash:'a hash ->
('b -> 'a) ->
'a t ->
'b t ->
('a * 'b list) t
* [ group_join_by key2 ] associates to every element [ x ] of
the first iterator , all the elements [ y ] of the second
iterator such that [ eq x ( key y ) ] . Elements of the first
iterators without corresponding values in the second one
are mapped to [ [ ] ]
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
the first iterator, all the elements [y] of the second
iterator such that [eq x (key y)]. Elements of the first
iterators without corresponding values in the second one
are mapped to [[]]
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
* { 2 Set - like }
val inter : ?eq:'a equal -> ?hash:'a hash -> 'a t -> 'a t -> 'a t
* Intersection of two collections . Each element will occur at most once
in the result . Eager .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
in the result. Eager.
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
$ =
[ 2;4;5;6 ] ( inter ( 1 - -6 ) ( cons 2 ( 4 - -10 ) ) | > sort | > to_list )
[ ] ( inter ( 0 - -5 ) ( 6 - -10 ) | > to_list )
[2;4;5;6] (inter (1--6) (cons 2 (4--10)) |> sort |> to_list)
[] (inter (0--5) (6--10) |> to_list)
*)
val union : ?eq:'a equal -> ?hash:'a hash -> 'a t -> 'a t -> 'a t
* Union of two collections . Each element will occur at most once
in the result . Eager .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
in the result. Eager.
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
$ =
[ 2;4;5;6 ] ( union ( 4 - -6 ) ( cons 2 ( 4 - -5 ) ) | > sort | > to_list )
[2;4;5;6] (union (4--6) (cons 2 (4--5)) |> sort |> to_list)
*)
val diff : ?eq:'a equal -> ?hash:'a hash -> 'a t -> 'a t -> 'a t
* Set difference . Eager .
@since 0.10
@since 0.10 *)
$ =
[ 1;2;8;9;10 ] ( diff ( 1 - -10 ) ( 3 - -7 ) | > to_list )
[1;2;8;9;10] (diff (1--10) (3--7) |> to_list)
*)
val subset : ?eq:'a equal -> ?hash:'a hash -> 'a t -> 'a t -> bool
* [ subset a b ] returns [ true ] if all elements of [ a ] belong to [ b ] . Eager .
precondition : for any [ x ] and [ y ] , if [ eq x y ] then [ hash x = hash y ] must hold .
@since 0.10
precondition: for any [x] and [y], if [eq x y] then [hash x=hash y] must hold.
@since 0.10 *)
$ T
subset ( 2 -- 4 ) ( 1 -- 4 )
not ( subset ( 1 -- 4 ) ( 2 -- 10 ) )
subset (2 -- 4) (1 -- 4)
not (subset (1 -- 4) (2 -- 10))
*)
* { 2 Arithmetic }
val max : ?lt:('a -> 'a -> bool) -> 'a t -> 'a option
* element of the iterator , using the given comparison function .
@return None if the iterator is empty , Some [ m ] where [ m ] is the maximal
element otherwise
@return None if the iterator is empty, Some [m] where [m] is the maximal
element otherwise *)
val max_exn : ?lt:('a -> 'a -> bool) -> 'a t -> 'a
* Unsafe version of { ! }
@raise Not_found if the iterator is empty
@since 0.10
@raise Not_found if the iterator is empty
@since 0.10 *)
val min : ?lt:('a -> 'a -> bool) -> 'a t -> 'a option
* Min element of the iterator , using the given comparison function .
see { ! } for more details .
see {!max} for more details. *)
val min_exn : ?lt:('a -> 'a -> bool) -> 'a t -> 'a
* Unsafe version of { ! min }
@raise Not_found if the iterator is empty
@since 0.10
@raise Not_found if the iterator is empty
@since 0.10 *)
val sum : int t -> int
* Sum of elements
@since 0.11
@since 0.11 *)
val sumf : float t -> float
* Sum of elements , using Kahan summation
@since 0.11
@since 0.11 *)
* { 2 List - like }
val head : 'a t -> 'a option
* First element , if any , otherwise [ None ]
@since 0.5.1
@since 0.5.1 *)
val head_exn : 'a t -> 'a
* First element , if any , fails
@raise Invalid_argument if the iterator is empty
@since 0.5.1
@raise Invalid_argument if the iterator is empty
@since 0.5.1 *)
val take : int -> 'a t -> 'a t
val take_while : ('a -> bool) -> 'a t -> 'a t
* Take elements while they satisfy the predicate , then stops iterating .
Will work on an infinite iterator [ s ] if the predicate is false for at
least one element of [ s ] .
Will work on an infinite iterator [s] if the predicate is false for at
least one element of [s]. *)
val fold_while : ('a -> 'b -> 'a * [ `Stop | `Continue ]) -> 'a -> 'b t -> 'a
* Folds over elements of the iterator , stopping early if the accumulator
returns [ ( ' a , ` Stop ) ]
@since 0.5.5
returns [('a, `Stop)]
@since 0.5.5 *)
val drop : int -> 'a t -> 'a t
* Drop the [ n ] first elements of the iterator . Lazy .
val drop_while : ('a -> bool) -> 'a t -> 'a t
val rev : 'a t -> 'a t
val zip_i : 'a t -> (int * 'a) t
* Zip elements of the iterator with their index in the iterator .
@since 1.0 Changed type to just give an iterator of pairs
@since 1.0 Changed type to just give an iterator of pairs *)
* { 2 Pair iterators }
val fold2 : ('c -> 'a -> 'b -> 'c) -> 'c -> ('a * 'b) t -> 'c
val iter2 : ('a -> 'b -> unit) -> ('a * 'b) t -> unit
val map2 : ('a -> 'b -> 'c) -> ('a * 'b) t -> 'c t
val map2_2 : ('a -> 'b -> 'c) -> ('a -> 'b -> 'd) -> ('a * 'b) t -> ('c * 'd) t
* { 1 Data structures converters }
val to_list : 'a t -> 'a list
* Convert the iterator into a list . Preserves order of elements .
This function is tail - recursive , but consumes 2*n memory .
If order does n't matter to you , consider { ! to_rev_list } .
This function is tail-recursive, but consumes 2*n memory.
If order doesn't matter to you, consider {!to_rev_list}. *)
val to_rev_list : 'a t -> 'a list
val of_list : 'a list -> 'a t
val on_list : ('a t -> 'b t) -> 'a list -> 'b list
* [ on_list f l ] is equivalent to [ to_list @@ f @@ of_list l ] .
@since 0.5.2
@since 0.5.2
*)
val pair_with_idx : 'a t -> (int * 'a) t
* Similar to { ! zip_i } but returns a normal iterator of tuples
@since 0.11
@since 0.11 *)
val to_opt : 'a t -> 'a option
* to { ! head }
@since 0.5.1
@since 0.5.1 *)
val to_array : 'a t -> 'a array
val of_array : 'a array -> 'a t
val of_array_i : 'a array -> (int * 'a) t
val array_slice : 'a array -> int -> int -> 'a t
val of_opt : 'a option -> 'a t
* Iterate on 0 or 1 values .
@since 0.5.1
@since 0.5.1 *)
val of_seq : 'a Seq.t -> 'a t
* Iterator of elements of a { ! Seq.t } .
@since 1.5
@since 1.5 *)
val to_seq_persistent : 'a t -> 'a Seq.t
* Convert to a { ! Seq } . Linear in memory and time ( a copy is made in memory ) .
This does not work on infinite iterators .
@since 1.5
This does not work on infinite iterators.
@since 1.5 *)
val to_stack : 'a Stack.t -> 'a t -> unit
val of_stack : 'a Stack.t -> 'a t
* Iterator of elements of the stack ( same order as [ Stack.iter ] )
val to_queue : 'a Queue.t -> 'a t -> unit
val of_queue : 'a Queue.t -> 'a t
val hashtbl_add : ('a, 'b) Hashtbl.t -> ('a * 'b) t -> unit
val hashtbl_replace : ('a, 'b) Hashtbl.t -> ('a * 'b) t -> unit
val to_hashtbl : ('a * 'b) t -> ('a, 'b) Hashtbl.t
val of_hashtbl : ('a, 'b) Hashtbl.t -> ('a * 'b) t
val hashtbl_keys : ('a, 'b) Hashtbl.t -> 'a t
val hashtbl_values : ('a, 'b) Hashtbl.t -> 'b t
val of_str : string -> char t
val to_str : char t -> string
val concat_str : string t -> string
* Concatenate strings together , eagerly .
Also see { ! intersperse } to add a separator .
@since 0.5
Also see {!intersperse} to add a separator.
@since 0.5 *)
exception OneShotSequence
val of_in_channel : in_channel -> char t
* Iterates on characters of the input ( can block when one
iterates over the iterator ) . If you need to iterate
several times on this iterator , use { ! persistent } .
@raise OneShotIterator when used more than once .
iterates over the iterator). If you need to iterate
several times on this iterator, use {!persistent}.
@raise OneShotIterator when used more than once. *)
val to_buffer : char t -> Buffer.t -> unit
val int_range : start:int -> stop:int -> int t
* Iterator on integers in [ start ... stop ] by steps 1 . Also see
{ ! ( -- ) } for an infix version .
{!(--)} for an infix version. *)
val int_range_dec : start:int -> stop:int -> int t
val int_range_by : step:int -> int -> int -> int t
val bools : bool t
* Iterates on [ true ] and [ false ]
@since 0.7
@since 0.7 *)
val of_set : (module Set.S with type elt = 'a and type t = 'b) -> 'b -> 'a t
val to_set : (module Set.S with type elt = 'a and type t = 'b) -> 'a t -> 'b
type 'a gen = unit -> 'a option
val of_gen : 'a gen -> 'a t
val of_gen_once : 'a gen -> 'a t
* One shot iterator using this generator .
It must not be traversed twice .
@since 1.5
It must not be traversed twice.
@since 1.5 *)
val to_gen : 'a t -> 'a gen
* { 2 Sets }
module Set : sig
module type S = sig
include Set.S
val of_iter : elt iter -> t
val to_iter : t -> elt iter
val to_list : t -> elt list
val of_list : elt list -> t
val of_seq : elt iter -> t
val to_seq : t -> elt iter
end
* Create an enriched Set module from the given one
module Adapt (X : Set.S) : S with type elt = X.elt and type t = X.t
module Make (X : Set.OrderedType) : S with type elt = X.t
end
module Map : sig
module type S = sig
include Map.S
val to_iter : 'a t -> (key * 'a) iter
val of_iter : (key * 'a) iter -> 'a t
val keys : 'a t -> key iter
val values : 'a t -> 'a iter
val to_list : 'a t -> (key * 'a) list
val of_list : (key * 'a) list -> 'a t
val to_seq : 'a t -> (key * 'a) iter
val of_seq : (key * 'a) iter -> 'a t
end
module Adapt (M : Map.S) : S with type key = M.key and type 'a t = 'a M.t
module Make (V : Map.OrderedType) : S with type key = V.t
end
* { 1 Random iterators }
val random_int : int -> int t
val random_bool : bool t
val random_float : float -> float t
val random_array : 'a array -> 'a t
val random_list : 'a list -> 'a t
val shuffle : 'a t -> 'a t
* [ shuffle seq ] returns a perfect shuffle of [ seq ] .
Uses O(length seq ) memory and time . Eager .
@since 0.7
Uses O(length seq) memory and time. Eager.
@since 0.7 *)
val shuffle_buffer : int -> 'a t -> 'a t
* [ shuffle_buffer n seq ] returns an iterator of element of [ seq ] in random
order . The shuffling is * not * uniform . Uses O(n ) memory .
The first [ n ] elements of the iterator are consumed immediately . The
rest is consumed lazily .
@since 0.7
order. The shuffling is *not* uniform. Uses O(n) memory.
The first [n] elements of the iterator are consumed immediately. The
rest is consumed lazily.
@since 0.7 *)
val sample : int -> 'a t -> 'a array
* [ sample n seq ] returns k samples of [ seq ] , with uniform probability .
It will consume the iterator and use O(n ) memory .
It returns an array of size [ min ( length seq ) n ] .
@since 0.7
It will consume the iterator and use O(n) memory.
It returns an array of size [min (length seq) n].
@since 0.7 *)
* { 1 Infix functions }
module Infix : sig
val ( -- ) : int -> int -> int t
val ( --^ ) : int -> int -> int t
val ( >>= ) : 'a t -> ('a -> 'b t) -> 'b t
* bind ( infix version of { ! flat_map }
@since 0.5
@since 0.5 *)
val ( >|= ) : 'a t -> ('a -> 'b) -> 'b t
* Infix version of { ! map }
@since 0.5
@since 0.5 *)
val ( <*> ) : ('a -> 'b) t -> 'a t -> 'b t
* Applicative operator ( product+application )
@since 0.5
@since 0.5 *)
val ( <+> ) : 'a t -> 'a t -> 'a t
* Concatenation of iterators
@since 0.5
@since 0.5 *)
end
include module type of Infix
* { 1 Pretty printing }
val pp_seq :
?sep:string ->
(Format.formatter -> 'a -> unit) ->
Format.formatter ->
'a t ->
unit
val pp_buf : ?sep:string -> (Buffer.t -> 'a -> unit) -> Buffer.t -> 'a t -> unit
val to_string : ?sep:string -> ('a -> string) -> 'a t -> string
* Basic IO
Very basic interface to manipulate files as iterator of chunks / lines . The
iterators take care of opening and closing files properly ; every time
one iterates over an iterator , the file is opened / closed again .
Example : copy a file [ " a " ] into file [ " b " ] , removing blank lines :
{ [
Iterator.(IO.lines_of " a " | > filter ( fun l- > l < > " " ) | > IO.write_lines " b " ) ; ;
] }
By chunks of [ 4096 ] bytes :
{ [
Iterator . IO.(chunks_of ~size:4096 " a " | > write_to " b " ) ; ;
] }
Read the lines of a file into a list :
{ [
Iterator.IO.lines " a " | > Iterator.to_list
] }
@since 0.5.1
Very basic interface to manipulate files as iterator of chunks/lines. The
iterators take care of opening and closing files properly; every time
one iterates over an iterator, the file is opened/closed again.
Example: copy a file ["a"] into file ["b"], removing blank lines:
{[
Iterator.(IO.lines_of "a" |> filter (fun l-> l<> "") |> IO.write_lines "b");;
]}
By chunks of [4096] bytes:
{[
Iterator.IO.(chunks_of ~size:4096 "a" |> write_to "b");;
]}
Read the lines of a file into a list:
{[
Iterator.IO.lines "a" |> Iterator.to_list
]}
@since 0.5.1 *)
module IO : sig
val lines_of : ?mode:int -> ?flags:open_flag list -> string -> string t
* [ filename ] reads all lines of the given file . It raises the
same exception as would opening the file and read from it , except
from [ End_of_file ] ( which is caught ) . The file is { b always } properly
closed .
Every time the iterator is iterated on , the file is opened again , so
different iterations might return different results
@param mode default [ 0o644 ]
@param flags default : [ [ Open_rdonly ] ]
same exception as would opening the file and read from it, except
from [End_of_file] (which is caught). The file is {b always} properly
closed.
Every time the iterator is iterated on, the file is opened again, so
different iterations might return different results
@param mode default [0o644]
@param flags default: [[Open_rdonly]] *)
val chunks_of :
?mode:int -> ?flags:open_flag list -> ?size:int -> string -> string t
* Read chunks of the given [ size ] from the file . The last chunk might be
smaller . Behaves like { ! } regarding errors and options .
Every time the iterator is iterated on , the file is opened again , so
different iterations might return different results
smaller. Behaves like {!lines_of} regarding errors and options.
Every time the iterator is iterated on, the file is opened again, so
different iterations might return different results *)
val write_to :
?mode:int -> ?flags:open_flag list -> string -> string t -> unit
val write_bytes_to :
?mode:int -> ?flags:open_flag list -> string -> Bytes.t t -> unit
val write_lines :
?mode:int -> ?flags:open_flag list -> string -> string t -> unit
val write_bytes_lines :
?mode:int -> ?flags:open_flag list -> string -> Bytes.t t -> unit
end
|
d5fc2b415a142f18e903e0d293107c3db65cbcf01a8cce9efa95ede64b4785d8 | racket/math | utils.rkt | #lang typed/racket/base
(require racket/performance-hint
racket/string
racket/fixnum
math/base
"matrix-types.rkt"
"../unsafe.rkt"
"../array/array-struct.rkt"
"../vector/vector-mutate.rkt")
(provide (all-defined-out))
(: format-matrices/error ((Listof (Array Any)) -> String))
(define (format-matrices/error as)
(string-join (map (λ: ([a : (Array Any)]) (format "~e" a)) as)))
(: matrix-shapes (Symbol (Matrix Any) (Matrix Any) * -> (Values Index Index)))
(define (matrix-shapes name arr . brrs)
(define-values (m n) (matrix-shape arr))
(unless (andmap (λ: ([brr : (Matrix Any)])
(define-values (bm bn) (matrix-shape brr))
(and (= bm m) (= bn n)))
brrs)
(error name
"matrices must have the same shape; given ~a"
(format-matrices/error (cons arr brrs))))
(values m n))
(: matrix-multiply-shape ((Matrix Any) (Matrix Any) -> (Values Index Index Index)))
(define (matrix-multiply-shape arr brr)
(define-values (ad0 ad1) (matrix-shape arr))
(define-values (bd0 bd1) (matrix-shape brr))
(unless (= ad1 bd0)
(error 'matrix-multiply
"1st argument column size and 2nd argument row size are not equal; given ~e and ~e"
arr brr))
(values ad0 ad1 bd1))
(: ensure-matrix (All (A) Symbol (Array A) -> (Array A)))
(define (ensure-matrix name a)
(if (matrix? a) a (raise-argument-error name "matrix?" a)))
(: ensure-row-matrix (All (A) Symbol (Array A) -> (Array A)))
(define (ensure-row-matrix name a)
(if (row-matrix? a) a (raise-argument-error name "row-matrix?" a)))
(: ensure-col-matrix (All (A) Symbol (Array A) -> (Array A)))
(define (ensure-col-matrix name a)
(if (col-matrix? a) a (raise-argument-error name "col-matrix?" a)))
(: sort/key (All (A B) (case-> ((Listof A) (B B -> Boolean) (A -> B) -> (Listof A))
((Listof A) (B B -> Boolean) (A -> B) Boolean -> (Listof A)))))
;; Sometimes necessary because TR can't do inference with keyword arguments yet
(define (sort/key lst lt? key [cache-keys? #f])
((inst sort A B) lst lt? #:key key #:cache-keys? cache-keys?))
(: unsafe-vector2d-ref (All (A) ((Vectorof (Vectorof A)) Index Index -> A)))
(define (unsafe-vector2d-ref vss i j)
(unsafe-vector-ref (unsafe-vector-ref vss i) j))
;; Note: this accepts +nan.0
(define nonnegative?
(λ: ([x : Real]) (not (x . < . 0))))
(define number-rational?
(λ: ([z : Number])
(cond [(real? z) (rational? z)]
[else (and (rational? (real-part z))
(rational? (imag-part z)))])))
(: find-partial-pivot
(case-> ((Vectorof (Vectorof Flonum)) Index Index Index -> (Values Index Flonum))
((Vectorof (Vectorof Real)) Index Index Index -> (Values Index Real))
((Vectorof (Vectorof Float-Complex)) Index Index Index -> (Values Index Float-Complex))
((Vectorof (Vectorof Number)) Index Index Index -> (Values Index Number))))
;; Find the element with maximum magnitude in a column
(define (find-partial-pivot rows m i j)
(define l (fx+ i 1))
(define pivot (unsafe-vector2d-ref rows i j))
(define mag-pivot (magnitude pivot))
(let loop ([#{l : Nonnegative-Fixnum} l] [#{p : Index} i] [pivot pivot] [mag-pivot mag-pivot])
(cond [(l . fx< . m)
(define new-pivot (unsafe-vector2d-ref rows l j))
(define mag-new-pivot (magnitude new-pivot))
(cond [(mag-new-pivot . > . mag-pivot) (loop (fx+ l 1) l new-pivot mag-new-pivot)]
[else (loop (fx+ l 1) p pivot mag-pivot)])]
[else (values p pivot)])))
(: find-first-pivot
(case-> ((Vectorof (Vectorof Flonum)) Index Index Index -> (Values Index Flonum))
((Vectorof (Vectorof Real)) Index Index Index -> (Values Index Real))
((Vectorof (Vectorof Float-Complex)) Index Index Index -> (Values Index Float-Complex))
((Vectorof (Vectorof Number)) Index Index Index -> (Values Index Number))))
Find the first nonzero element in a column
(define (find-first-pivot rows m i j)
(define pivot (unsafe-vector2d-ref rows i j))
(if ((magnitude pivot) . > . 0)
(values i pivot)
(let loop ([#{l : Nonnegative-Fixnum} (fx+ i 1)])
(cond [(l . fx< . m)
(define pivot (unsafe-vector2d-ref rows l j))
(if ((magnitude pivot) . > . 0) (values l pivot) (loop (fx+ l 1)))]
[else
(values i pivot)]))))
(: elim-rows!
(case-> ((Vectorof (Vectorof Flonum)) Index Index Index Flonum Nonnegative-Fixnum -> Void)
((Vectorof (Vectorof Real)) Index Index Index Real Nonnegative-Fixnum -> Void)
((Vectorof (Vectorof Float-Complex)) Index Index Index Float-Complex Nonnegative-Fixnum
-> Void)
((Vectorof (Vectorof Number)) Index Index Index Number Nonnegative-Fixnum -> Void)))
(define (elim-rows! rows m i j pivot start)
(define row_i (unsafe-vector-ref rows i))
(let loop ([#{l : Nonnegative-Fixnum} start])
(when (l . fx< . m)
(unless (l . fx= . i)
(define row_l (unsafe-vector-ref rows l))
(define x_lj (unsafe-vector-ref row_l j))
(unless (= x_lj 0)
(vector-scaled-add! row_l row_i (* -1 (/ x_lj pivot)) j)
Make sure the element below the pivot is zero
(unsafe-vector-set! row_l j (- x_lj x_lj))))
(loop (fx+ l 1)))))
(begin-encourage-inline
(: call/ns (All (A) ((-> (Matrix A)) -> (Matrix A))))
(define (call/ns thnk)
(array-default-strict
(parameterize ([array-strictness #f])
(thnk))))
) ; begin-encourage-inline
(: make-thread-local-box (All (A) (A -> (-> (Boxof A)))))
(define (make-thread-local-box contents)
(let: ([val : (Thread-Cellof (U #f (Boxof A))) (make-thread-cell #f)])
(λ () (or (thread-cell-ref val)
(let: ([v : (Boxof A) (box contents)])
(thread-cell-set! val v)
v)))))
(: one (case-> (Flonum -> Nonnegative-Flonum)
(Real -> (U 1 Nonnegative-Flonum))
(Float-Complex -> Nonnegative-Flonum)
(Number -> (U 1 Nonnegative-Flonum))))
(define (one x)
(cond [(flonum? x) 1.0]
[(real? x) 1]
[(float-complex? x) 1.0]
[else 1]))
(: zero (case-> (Flonum -> Flonum-Positive-Zero)
(Real -> (U 0 Flonum-Positive-Zero))
(Float-Complex -> Flonum-Positive-Zero)
(Number -> (U 0 Flonum-Positive-Zero))))
(define (zero x)
(cond [(flonum? x) 0.0]
[(real? x) 0]
[(float-complex? x) 0.0]
[else 0]))
(: one* (case-> (Flonum -> Nonnegative-Flonum)
(Real -> (U 1 Nonnegative-Flonum))
(Float-Complex -> Float-Complex)
(Number -> (U 1 Nonnegative-Flonum Float-Complex))))
(define (one* x)
(cond [(flonum? x) 1.0]
[(real? x) 1]
[(float-complex? x) 1.0+0.0i]
[else 1]))
(: zero* (case-> (Flonum -> Flonum-Positive-Zero)
(Real -> (U 0 Flonum-Positive-Zero))
(Float-Complex -> Float-Complex)
(Number -> (U 0 Flonum-Positive-Zero Float-Complex))))
(define (zero* x)
(cond [(flonum? x) 0.0]
[(real? x) 0]
[(float-complex? x) 0.0+0.0i]
[else 0]))
| null | https://raw.githubusercontent.com/racket/math/dcd2ea1893dc5b45b26c8312997917a15fcd1c4a/math-lib/math/private/matrix/utils.rkt | racket | Sometimes necessary because TR can't do inference with keyword arguments yet
Note: this accepts +nan.0
Find the element with maximum magnitude in a column
begin-encourage-inline | #lang typed/racket/base
(require racket/performance-hint
racket/string
racket/fixnum
math/base
"matrix-types.rkt"
"../unsafe.rkt"
"../array/array-struct.rkt"
"../vector/vector-mutate.rkt")
(provide (all-defined-out))
(: format-matrices/error ((Listof (Array Any)) -> String))
(define (format-matrices/error as)
(string-join (map (λ: ([a : (Array Any)]) (format "~e" a)) as)))
(: matrix-shapes (Symbol (Matrix Any) (Matrix Any) * -> (Values Index Index)))
(define (matrix-shapes name arr . brrs)
(define-values (m n) (matrix-shape arr))
(unless (andmap (λ: ([brr : (Matrix Any)])
(define-values (bm bn) (matrix-shape brr))
(and (= bm m) (= bn n)))
brrs)
(error name
"matrices must have the same shape; given ~a"
(format-matrices/error (cons arr brrs))))
(values m n))
(: matrix-multiply-shape ((Matrix Any) (Matrix Any) -> (Values Index Index Index)))
(define (matrix-multiply-shape arr brr)
(define-values (ad0 ad1) (matrix-shape arr))
(define-values (bd0 bd1) (matrix-shape brr))
(unless (= ad1 bd0)
(error 'matrix-multiply
"1st argument column size and 2nd argument row size are not equal; given ~e and ~e"
arr brr))
(values ad0 ad1 bd1))
(: ensure-matrix (All (A) Symbol (Array A) -> (Array A)))
(define (ensure-matrix name a)
(if (matrix? a) a (raise-argument-error name "matrix?" a)))
(: ensure-row-matrix (All (A) Symbol (Array A) -> (Array A)))
(define (ensure-row-matrix name a)
(if (row-matrix? a) a (raise-argument-error name "row-matrix?" a)))
(: ensure-col-matrix (All (A) Symbol (Array A) -> (Array A)))
(define (ensure-col-matrix name a)
(if (col-matrix? a) a (raise-argument-error name "col-matrix?" a)))
(: sort/key (All (A B) (case-> ((Listof A) (B B -> Boolean) (A -> B) -> (Listof A))
((Listof A) (B B -> Boolean) (A -> B) Boolean -> (Listof A)))))
(define (sort/key lst lt? key [cache-keys? #f])
((inst sort A B) lst lt? #:key key #:cache-keys? cache-keys?))
(: unsafe-vector2d-ref (All (A) ((Vectorof (Vectorof A)) Index Index -> A)))
(define (unsafe-vector2d-ref vss i j)
(unsafe-vector-ref (unsafe-vector-ref vss i) j))
(define nonnegative?
(λ: ([x : Real]) (not (x . < . 0))))
(define number-rational?
(λ: ([z : Number])
(cond [(real? z) (rational? z)]
[else (and (rational? (real-part z))
(rational? (imag-part z)))])))
(: find-partial-pivot
(case-> ((Vectorof (Vectorof Flonum)) Index Index Index -> (Values Index Flonum))
((Vectorof (Vectorof Real)) Index Index Index -> (Values Index Real))
((Vectorof (Vectorof Float-Complex)) Index Index Index -> (Values Index Float-Complex))
((Vectorof (Vectorof Number)) Index Index Index -> (Values Index Number))))
(define (find-partial-pivot rows m i j)
(define l (fx+ i 1))
(define pivot (unsafe-vector2d-ref rows i j))
(define mag-pivot (magnitude pivot))
(let loop ([#{l : Nonnegative-Fixnum} l] [#{p : Index} i] [pivot pivot] [mag-pivot mag-pivot])
(cond [(l . fx< . m)
(define new-pivot (unsafe-vector2d-ref rows l j))
(define mag-new-pivot (magnitude new-pivot))
(cond [(mag-new-pivot . > . mag-pivot) (loop (fx+ l 1) l new-pivot mag-new-pivot)]
[else (loop (fx+ l 1) p pivot mag-pivot)])]
[else (values p pivot)])))
(: find-first-pivot
(case-> ((Vectorof (Vectorof Flonum)) Index Index Index -> (Values Index Flonum))
((Vectorof (Vectorof Real)) Index Index Index -> (Values Index Real))
((Vectorof (Vectorof Float-Complex)) Index Index Index -> (Values Index Float-Complex))
((Vectorof (Vectorof Number)) Index Index Index -> (Values Index Number))))
Find the first nonzero element in a column
(define (find-first-pivot rows m i j)
(define pivot (unsafe-vector2d-ref rows i j))
(if ((magnitude pivot) . > . 0)
(values i pivot)
(let loop ([#{l : Nonnegative-Fixnum} (fx+ i 1)])
(cond [(l . fx< . m)
(define pivot (unsafe-vector2d-ref rows l j))
(if ((magnitude pivot) . > . 0) (values l pivot) (loop (fx+ l 1)))]
[else
(values i pivot)]))))
(: elim-rows!
(case-> ((Vectorof (Vectorof Flonum)) Index Index Index Flonum Nonnegative-Fixnum -> Void)
((Vectorof (Vectorof Real)) Index Index Index Real Nonnegative-Fixnum -> Void)
((Vectorof (Vectorof Float-Complex)) Index Index Index Float-Complex Nonnegative-Fixnum
-> Void)
((Vectorof (Vectorof Number)) Index Index Index Number Nonnegative-Fixnum -> Void)))
(define (elim-rows! rows m i j pivot start)
(define row_i (unsafe-vector-ref rows i))
(let loop ([#{l : Nonnegative-Fixnum} start])
(when (l . fx< . m)
(unless (l . fx= . i)
(define row_l (unsafe-vector-ref rows l))
(define x_lj (unsafe-vector-ref row_l j))
(unless (= x_lj 0)
(vector-scaled-add! row_l row_i (* -1 (/ x_lj pivot)) j)
Make sure the element below the pivot is zero
(unsafe-vector-set! row_l j (- x_lj x_lj))))
(loop (fx+ l 1)))))
(begin-encourage-inline
(: call/ns (All (A) ((-> (Matrix A)) -> (Matrix A))))
(define (call/ns thnk)
(array-default-strict
(parameterize ([array-strictness #f])
(thnk))))
(: make-thread-local-box (All (A) (A -> (-> (Boxof A)))))
(define (make-thread-local-box contents)
(let: ([val : (Thread-Cellof (U #f (Boxof A))) (make-thread-cell #f)])
(λ () (or (thread-cell-ref val)
(let: ([v : (Boxof A) (box contents)])
(thread-cell-set! val v)
v)))))
(: one (case-> (Flonum -> Nonnegative-Flonum)
(Real -> (U 1 Nonnegative-Flonum))
(Float-Complex -> Nonnegative-Flonum)
(Number -> (U 1 Nonnegative-Flonum))))
(define (one x)
(cond [(flonum? x) 1.0]
[(real? x) 1]
[(float-complex? x) 1.0]
[else 1]))
(: zero (case-> (Flonum -> Flonum-Positive-Zero)
(Real -> (U 0 Flonum-Positive-Zero))
(Float-Complex -> Flonum-Positive-Zero)
(Number -> (U 0 Flonum-Positive-Zero))))
(define (zero x)
(cond [(flonum? x) 0.0]
[(real? x) 0]
[(float-complex? x) 0.0]
[else 0]))
(: one* (case-> (Flonum -> Nonnegative-Flonum)
(Real -> (U 1 Nonnegative-Flonum))
(Float-Complex -> Float-Complex)
(Number -> (U 1 Nonnegative-Flonum Float-Complex))))
(define (one* x)
(cond [(flonum? x) 1.0]
[(real? x) 1]
[(float-complex? x) 1.0+0.0i]
[else 1]))
(: zero* (case-> (Flonum -> Flonum-Positive-Zero)
(Real -> (U 0 Flonum-Positive-Zero))
(Float-Complex -> Float-Complex)
(Number -> (U 0 Flonum-Positive-Zero Float-Complex))))
(define (zero* x)
(cond [(flonum? x) 0.0]
[(real? x) 0]
[(float-complex? x) 0.0+0.0i]
[else 0]))
|
b11d1d9ffa0f34e51c937dd6545f0fdc53dcd2193aed0656ddf2b5176361b4c1 | LexiFi/dead_code_analyzer | inc_val.mli | ./examples/advanced/inc_val.mli:1: x
./examples/advanced/inc_val.mli:2: y
| null | https://raw.githubusercontent.com/LexiFi/dead_code_analyzer/c44dc2ea5ccb13df2145e9316e21c39f09dad506/check/classic/examples/advanced/inc_val.mli | ocaml | ./examples/advanced/inc_val.mli:1: x
./examples/advanced/inc_val.mli:2: y
|
|
6fc3663f5e10b5daed9589721b7b8d64b7d91433a6b4b314b653cf393b7142bb | janestreet/core_bench | basic_tests.ml | open Core
open Core_bench
let get_float () = if Random.bool () then 10.0 else 10.0
let get_int () = Random.int 200000
let get_int64 () = if Random.bool () then Int64.of_int 10 else 10L
let scale t mul = Stdlib.int_of_float (Stdlib.float_of_int t *. mul)
let t1 = Bench.Test.create ~name:"Id" (fun () -> ())
let t2 =
let n = get_int () in
let fl = get_float () in
Bench.Test.create ~name:"integer scaling" (fun () -> ignore (scale n fl))
;;
let t3 =
Bench.Test.create
~name:"Int64.bits_of_float"
(let fl = get_float () in
fun () -> ignore (Int64.bits_of_float fl))
;;
let t4 =
Bench.Test.create
~name:"Int64.float_of_bits"
(let fl = get_int64 () in
fun () -> ignore (Int64.float_of_bits fl))
;;
let t5 =
let f1 = Random.float 1.0 in
let f2 = Random.float 1.0 in
Bench.Test.create ~name:"Float.*" (fun () -> ignore (f1 *. f2))
;;
let t6 =
let f1 = Random.int 5000 in
let f2 = Random.int 5000 in
Bench.Test.create ~name:"Int.*" (fun () -> ignore (f1 * f2))
;;
let tests = [ t1; t2; t3; t4; t5; t6 ]
let command = Bench.make_command tests
| null | https://raw.githubusercontent.com/janestreet/core_bench/d3bf745cdaf8a0b169c5e464584a35aeb5b08291/test/basic_tests.ml | ocaml | open Core
open Core_bench
let get_float () = if Random.bool () then 10.0 else 10.0
let get_int () = Random.int 200000
let get_int64 () = if Random.bool () then Int64.of_int 10 else 10L
let scale t mul = Stdlib.int_of_float (Stdlib.float_of_int t *. mul)
let t1 = Bench.Test.create ~name:"Id" (fun () -> ())
let t2 =
let n = get_int () in
let fl = get_float () in
Bench.Test.create ~name:"integer scaling" (fun () -> ignore (scale n fl))
;;
let t3 =
Bench.Test.create
~name:"Int64.bits_of_float"
(let fl = get_float () in
fun () -> ignore (Int64.bits_of_float fl))
;;
let t4 =
Bench.Test.create
~name:"Int64.float_of_bits"
(let fl = get_int64 () in
fun () -> ignore (Int64.float_of_bits fl))
;;
let t5 =
let f1 = Random.float 1.0 in
let f2 = Random.float 1.0 in
Bench.Test.create ~name:"Float.*" (fun () -> ignore (f1 *. f2))
;;
let t6 =
let f1 = Random.int 5000 in
let f2 = Random.int 5000 in
Bench.Test.create ~name:"Int.*" (fun () -> ignore (f1 * f2))
;;
let tests = [ t1; t2; t3; t4; t5; t6 ]
let command = Bench.make_command tests
|
|
0df1122c6da042643ff75be79b93d0b87f62ca39ce593ec3c0a509a1c3e9c2a7 | pascal-knodel/haskell-craft | E'6'37.hs | --
--
--
-----------------
Exercise 6.37 .
-----------------
--
--
--
module E'6'37 where
import B'C'6 ( Image )
import E'6'36 ( superimposeImage )
import Pictures
(
height
, width
)
-- "How would you use Image superimposition to give analogues of above and beside for Images?"
--
above : First image above second image , at the second coordinates .
beside : First image beside second image , at the second coordinates .
imageAbove, imageBeside :: Image -> Image -> Image
imageAbove (top , _) ( bottom , (bottomX , bottomY) )
= superimposeImage ( top , ( bottomX , bottomY + ( toInteger (height top) ) ) )
( bottom , ( bottomX , bottomY ) )
imageBeside (left , _) ( right , (rightX , rightY) )
= superimposeImage ( left , ( rightX - ( toInteger (width left) ) , rightY ) )
( right , ( rightX , rightY ) )
-- ... other ways of using different Image superimpositions,
-- coordinate transformations and picture positions exist.
| null | https://raw.githubusercontent.com/pascal-knodel/haskell-craft/c03d6eb857abd8b4785b6de075b094ec3653c968/Chapter%206/E'6'37.hs | haskell |
---------------
---------------
"How would you use Image superimposition to give analogues of above and beside for Images?"
... other ways of using different Image superimpositions,
coordinate transformations and picture positions exist. | Exercise 6.37 .
module E'6'37 where
import B'C'6 ( Image )
import E'6'36 ( superimposeImage )
import Pictures
(
height
, width
)
above : First image above second image , at the second coordinates .
beside : First image beside second image , at the second coordinates .
imageAbove, imageBeside :: Image -> Image -> Image
imageAbove (top , _) ( bottom , (bottomX , bottomY) )
= superimposeImage ( top , ( bottomX , bottomY + ( toInteger (height top) ) ) )
( bottom , ( bottomX , bottomY ) )
imageBeside (left , _) ( right , (rightX , rightY) )
= superimposeImage ( left , ( rightX - ( toInteger (width left) ) , rightY ) )
( right , ( rightX , rightY ) )
|
b87dea8e77209b867a0ebf100ff61610eca64223313323783b70941bf90848da | paurkedal/inhca | inhca_tools.server.ml | Copyright ( C ) 2014 - -2016 Petter A. Urkedal < >
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
open Eliom_content
open Printf
let ignore_cv (x : unit Eliom_client_value.t) = ignore x
module F = struct
let page ~title contents =
(Eliom_tools.F.html ~title ~css:[["inhca.css"]]
(Html.F.body (Html.F.h1 [Html.F.txt title] :: contents)))
let send_error ~code msg =
let hdr = sprintf "Error %d" code in
Eliom_registration.Html.send ~code
(Eliom_tools.F.html ~title:hdr ~css:[["inhca.css"]]
Html.F.(body [h1 [txt hdr]; p [txt msg]]))
let send_page ?code ~title contents =
let title =
(match code with
| None -> title
| Some code -> sprintf "Error %d, %s" code title) in
Eliom_registration.Html.send ?code (page ~title contents)
end
let http_error code msg =
Lwt.fail (Ocsigen_http_frame.Http_error.Http_exception (code, Some msg, None))
let http_error_f code fmt = ksprintf (http_error code) fmt
let authorize_admin () =
match Inhca_config.auth_http_header_cp#get with
| None -> Lwt.return_unit
| Some h ->
let ri = Eliom_request_info.get_ri () in
let frame = Ocsigen_extensions.Ocsigen_request_info.http_frame ri in
let%lwt user =
try Lwt.return (Ocsigen_headers.find h frame)
with Not_found -> http_error 500 "Missing authentication header." in
if List.mem user Inhca_config.auth_admins_cp#get
then Lwt_log.info_f "Authorized %s." user
else http_error 403 "Admin access required."
module Local_log (Spec : sig val section_name : string end) = struct
let section = Lwt_log.Section.make Spec.section_name
let debug = Lwt_log.debug ~section
let debug_f = Lwt_log.debug_f ~section
let ign_debug = Lwt_log.ign_debug ~section
let ign_debug_f = Lwt_log.ign_debug_f ~section
let info = Lwt_log.info ~section
let info_f = Lwt_log.info_f ~section
let ign_info = Lwt_log.ign_info ~section
let ign_info_f = Lwt_log.ign_info_f ~section
let notice = Lwt_log.notice ~section
let notice_f = Lwt_log.notice_f ~section
let ign_notice = Lwt_log.ign_notice ~section
let ign_notice_f = Lwt_log.ign_notice_f ~section
let warning = Lwt_log.warning ~section
let warning_f = Lwt_log.warning_f ~section
let ign_warning = Lwt_log.ign_warning ~section
let ign_warning_f = Lwt_log.ign_warning_f ~section
let error = Lwt_log.debug ~section
let error_f = Lwt_log.debug_f ~section
let ign_error = Lwt_log.ign_debug ~section
let ign_error_f = Lwt_log.ign_error_f ~section
let fatal = Lwt_log.debug ~section
let fatal_f = Lwt_log.debug_f ~section
let ign_fatal = Lwt_log.ign_debug ~section
let ign_fatal_f = Lwt_log.ign_fatal_f ~section
end
| null | https://raw.githubusercontent.com/paurkedal/inhca/c2cc4abce931684fb17ac88169822178956f18e3/web/inhca_tools.server.ml | ocaml | Copyright ( C ) 2014 - -2016 Petter A. Urkedal < >
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
open Eliom_content
open Printf
let ignore_cv (x : unit Eliom_client_value.t) = ignore x
module F = struct
let page ~title contents =
(Eliom_tools.F.html ~title ~css:[["inhca.css"]]
(Html.F.body (Html.F.h1 [Html.F.txt title] :: contents)))
let send_error ~code msg =
let hdr = sprintf "Error %d" code in
Eliom_registration.Html.send ~code
(Eliom_tools.F.html ~title:hdr ~css:[["inhca.css"]]
Html.F.(body [h1 [txt hdr]; p [txt msg]]))
let send_page ?code ~title contents =
let title =
(match code with
| None -> title
| Some code -> sprintf "Error %d, %s" code title) in
Eliom_registration.Html.send ?code (page ~title contents)
end
let http_error code msg =
Lwt.fail (Ocsigen_http_frame.Http_error.Http_exception (code, Some msg, None))
let http_error_f code fmt = ksprintf (http_error code) fmt
let authorize_admin () =
match Inhca_config.auth_http_header_cp#get with
| None -> Lwt.return_unit
| Some h ->
let ri = Eliom_request_info.get_ri () in
let frame = Ocsigen_extensions.Ocsigen_request_info.http_frame ri in
let%lwt user =
try Lwt.return (Ocsigen_headers.find h frame)
with Not_found -> http_error 500 "Missing authentication header." in
if List.mem user Inhca_config.auth_admins_cp#get
then Lwt_log.info_f "Authorized %s." user
else http_error 403 "Admin access required."
module Local_log (Spec : sig val section_name : string end) = struct
let section = Lwt_log.Section.make Spec.section_name
let debug = Lwt_log.debug ~section
let debug_f = Lwt_log.debug_f ~section
let ign_debug = Lwt_log.ign_debug ~section
let ign_debug_f = Lwt_log.ign_debug_f ~section
let info = Lwt_log.info ~section
let info_f = Lwt_log.info_f ~section
let ign_info = Lwt_log.ign_info ~section
let ign_info_f = Lwt_log.ign_info_f ~section
let notice = Lwt_log.notice ~section
let notice_f = Lwt_log.notice_f ~section
let ign_notice = Lwt_log.ign_notice ~section
let ign_notice_f = Lwt_log.ign_notice_f ~section
let warning = Lwt_log.warning ~section
let warning_f = Lwt_log.warning_f ~section
let ign_warning = Lwt_log.ign_warning ~section
let ign_warning_f = Lwt_log.ign_warning_f ~section
let error = Lwt_log.debug ~section
let error_f = Lwt_log.debug_f ~section
let ign_error = Lwt_log.ign_debug ~section
let ign_error_f = Lwt_log.ign_error_f ~section
let fatal = Lwt_log.debug ~section
let fatal_f = Lwt_log.debug_f ~section
let ign_fatal = Lwt_log.ign_debug ~section
let ign_fatal_f = Lwt_log.ign_fatal_f ~section
end
|
|
82f53dabc865f811bb91a8ae9ef032fd24f45427b3da753051ff5411e6002c9a | technion/ct_advisor | domain_parse.erl | -module(domain_parse).
-export([cert_domain_list/1]).
% A list of certificates with a list of names
-spec cert_domain_list([any()]) -> 'ok'.
cert_domain_list(Domains) ->
lager:info("Domain list for review: ~p" ,[Domains]),
lists:foreach(fun(X) -> per_cert_domains(X) end, Domains).
% A list of domains for an individual certificate
-spec per_cert_domains([{'dNSName', _}]) -> 'ok'.
per_cert_domains(DomainsID) ->
ID = proplists:lookup(serial, DomainsID),
Domains = proplists:delete(serial, DomainsID),
case lists:flatten(lookup_name_list(Domains)) of
[] ->
ok;
Alerts ->
ct_mail_alert:send_alert(Alerts, Domains, ID),
ok
end.
For a given domain name - check if it 's registered in the datbaase
-spec lookup_name_list([{atom(), _}]) -> [[] | {_, _}].
lookup_name_list([]) ->
[];
lookup_name_list([{dNSName, Name}|Tail]) ->
{ok, _Columns, Rows} = pgapp:equery("SELECT email FROM registrations "
"WHERE ($1 LIKE concat('%.',domain) or $1 = domain) AND active =1",
[Name]),
Match = case Rows of
[{User}] ->
{Name, binary_to_list(User)};
_ ->
[]
end,
[Match|lookup_name_list(Tail)];
lookup_name_list([{_, _Name}|Tail]) ->
: There are other types of subject names - see test suite
[lookup_name_list(Tail)].
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-include("test_constants.hrl").
lookup_fixture_test_() ->
{setup, fun connect/0, fun teardown/1, fun lookup_name_listi/0}.
connect() ->
application:ensure_all_started(pgapp),
db_connect:db_connect(),
ok.
teardown(_C) ->
application:stop(pgapp).
lookup_name_listi() ->
% using lists:flatten/1 because it is always called this way
?assertEqual(lists:flatten(lookup_name_list([])), []),
?assertEqual([], lists:flatten(lookup_name_list(?TEST_NONDNS_DOMAINS))),
?assertEqual([{"lolware.net",""},
{"www.lolware.net",""}],
lists:flatten(lookup_name_list(?TEST_LOOKUP_DOMAINS))).
-endif.
| null | https://raw.githubusercontent.com/technion/ct_advisor/d5f3120c468e4203caefbe57f1c64fa4b7017613/apps/ct_advisor/src/domain_parse.erl | erlang | A list of certificates with a list of names
A list of domains for an individual certificate
using lists:flatten/1 because it is always called this way | -module(domain_parse).
-export([cert_domain_list/1]).
-spec cert_domain_list([any()]) -> 'ok'.
cert_domain_list(Domains) ->
lager:info("Domain list for review: ~p" ,[Domains]),
lists:foreach(fun(X) -> per_cert_domains(X) end, Domains).
-spec per_cert_domains([{'dNSName', _}]) -> 'ok'.
per_cert_domains(DomainsID) ->
ID = proplists:lookup(serial, DomainsID),
Domains = proplists:delete(serial, DomainsID),
case lists:flatten(lookup_name_list(Domains)) of
[] ->
ok;
Alerts ->
ct_mail_alert:send_alert(Alerts, Domains, ID),
ok
end.
For a given domain name - check if it 's registered in the datbaase
-spec lookup_name_list([{atom(), _}]) -> [[] | {_, _}].
lookup_name_list([]) ->
[];
lookup_name_list([{dNSName, Name}|Tail]) ->
{ok, _Columns, Rows} = pgapp:equery("SELECT email FROM registrations "
"WHERE ($1 LIKE concat('%.',domain) or $1 = domain) AND active =1",
[Name]),
Match = case Rows of
[{User}] ->
{Name, binary_to_list(User)};
_ ->
[]
end,
[Match|lookup_name_list(Tail)];
lookup_name_list([{_, _Name}|Tail]) ->
: There are other types of subject names - see test suite
[lookup_name_list(Tail)].
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-include("test_constants.hrl").
lookup_fixture_test_() ->
{setup, fun connect/0, fun teardown/1, fun lookup_name_listi/0}.
connect() ->
application:ensure_all_started(pgapp),
db_connect:db_connect(),
ok.
teardown(_C) ->
application:stop(pgapp).
lookup_name_listi() ->
?assertEqual(lists:flatten(lookup_name_list([])), []),
?assertEqual([], lists:flatten(lookup_name_list(?TEST_NONDNS_DOMAINS))),
?assertEqual([{"lolware.net",""},
{"www.lolware.net",""}],
lists:flatten(lookup_name_list(?TEST_LOOKUP_DOMAINS))).
-endif.
|
fc9d0c53de679bd2697838ed95accf5f866e3d03f651bddc1abcc0521829bab4 | finnishtransportagency/harja | yha_yhteiset.clj | (ns harja.palvelin.integraatiot.yha.yha-yhteiset
"Sisältää YHA:n päällystykseen ja paikkaukseen liittyviä yhteisiä toimintoja.")
(defn yha-otsikot
"Muotoilee YHA:n api-keyn headerin"
[api-key json?]
(merge {"Content-Type" (if json?
"application/json"
"text/xml; charset=utf-8")}
(when api-key {"x-api-key" api-key})))
| null | https://raw.githubusercontent.com/finnishtransportagency/harja/222ecd9907e2a34364f8bf7bb4a1035939b039f6/src/clj/harja/palvelin/integraatiot/yha/yha_yhteiset.clj | clojure | (ns harja.palvelin.integraatiot.yha.yha-yhteiset
"Sisältää YHA:n päällystykseen ja paikkaukseen liittyviä yhteisiä toimintoja.")
(defn yha-otsikot
"Muotoilee YHA:n api-keyn headerin"
[api-key json?]
(merge {"Content-Type" (if json?
"application/json"
"text/xml; charset=utf-8")}
(when api-key {"x-api-key" api-key})))
|
|
1010c58418ea2f48a3fbbd410ad949797fdf06cf5879a5d3958a3b43e7de5ecd | elastic/eui-cljs | icon_app_pipeline.cljs | (ns eui.icon-app-pipeline
(:require ["@elastic/eui/lib/components/icon/assets/app_pipeline.js" :as eui]))
(def appPipeline eui/icon)
| null | https://raw.githubusercontent.com/elastic/eui-cljs/ad60b57470a2eb8db9bca050e02f52dd964d9f8e/src/eui/icon_app_pipeline.cljs | clojure | (ns eui.icon-app-pipeline
(:require ["@elastic/eui/lib/components/icon/assets/app_pipeline.js" :as eui]))
(def appPipeline eui/icon)
|
|
8113635c8cf317f2fc3f54952e4005a59bac0a18bd84cb320dde9950562c41ad | wlitwin/graphv | extraStubs.ml | open GlTypes
type vertex_array_object = int
let uniform_buffer_offset_alignment : enum = 0x8A34
let uniform_buffer : buffer_target = 0x8A11
let unpack_row_length : pixel_store_param = 0x0CF2
let unpack_skip_rows : pixel_store_param = 0x0CF3
let unpack_skip_pixels : pixel_store_param = 0x0CF4
let red : pixel_format = 0x1903
let r8 : pixel_format = 0x8229
let null_vao : vertex_array_object = 0
let dynamic_draw : buffer_usage = 0x88E8
external buffer_data_null : buffer_target -> int -> buffer_usage -> unit = "gles_buffer_data_null"[@@noalloc]
external buffer_sub_data : buffer_target -> (*offset*) int -> (*size*) int -> float_buffer -> unit = "gles_buffer_sub_data"[@@noalloc]
external uniform4fv_offset : [`vec4] uniform_location -> float_buffer -> int -> int -> unit = "gles_uniform4fv_offset"[@@noalloc]
external get_integer : enum -> int = "gles_get_integer"[@@noalloc]
external uniform_block_binding : program -> int -> int -> unit = "gles_uniform_block_binding"[@@noalloc]
external bind_buffer_range : enum -> int -> int -> int -> int -> unit = "gles_bind_buffer_range"[@@noalloc]
external create_vertex_array_object : unit -> vertex_array_object = "gles_create_vertex_array_object"[@@noalloc]
external bind_vertex_array_object : vertex_array_object -> unit = "gles_bind_vertex_array_object"[@@noalloc]
type locs = {
frag : [`vec4] uniform_location;
tex : int uniform_location;
view_size : [`vec2] uniform_location;
vert_buf : buffer_id;
}
| null | https://raw.githubusercontent.com/wlitwin/graphv/d0a09575c5ff5ee3727c222dd6130d22e4cf62d9/src/native3/extraStubs.ml | ocaml | offset
size | open GlTypes
type vertex_array_object = int
let uniform_buffer_offset_alignment : enum = 0x8A34
let uniform_buffer : buffer_target = 0x8A11
let unpack_row_length : pixel_store_param = 0x0CF2
let unpack_skip_rows : pixel_store_param = 0x0CF3
let unpack_skip_pixels : pixel_store_param = 0x0CF4
let red : pixel_format = 0x1903
let r8 : pixel_format = 0x8229
let null_vao : vertex_array_object = 0
let dynamic_draw : buffer_usage = 0x88E8
external buffer_data_null : buffer_target -> int -> buffer_usage -> unit = "gles_buffer_data_null"[@@noalloc]
external uniform4fv_offset : [`vec4] uniform_location -> float_buffer -> int -> int -> unit = "gles_uniform4fv_offset"[@@noalloc]
external get_integer : enum -> int = "gles_get_integer"[@@noalloc]
external uniform_block_binding : program -> int -> int -> unit = "gles_uniform_block_binding"[@@noalloc]
external bind_buffer_range : enum -> int -> int -> int -> int -> unit = "gles_bind_buffer_range"[@@noalloc]
external create_vertex_array_object : unit -> vertex_array_object = "gles_create_vertex_array_object"[@@noalloc]
external bind_vertex_array_object : vertex_array_object -> unit = "gles_bind_vertex_array_object"[@@noalloc]
type locs = {
frag : [`vec4] uniform_location;
tex : int uniform_location;
view_size : [`vec2] uniform_location;
vert_buf : buffer_id;
}
|
358fb11a5755d7b4554c78a7ad2d1c82d6dd169dbacca9db7aec5f1706f74ab9 | puppetlabs/puppetserver | jruby_puppet_testutils.clj | (ns puppetlabs.services.jruby.jruby-puppet-testutils
(:require [puppetlabs.services.jruby.jruby-puppet-core :as jruby-puppet-core]
[puppetlabs.services.jruby-pool-manager.jruby-core :as jruby-core]
[puppetlabs.services.jruby.jruby-puppet-schemas :as jruby-puppet-schemas]
[puppetlabs.trapperkeeper.app :as tk-app]
[puppetlabs.trapperkeeper.services :as tk-service]
[schema.core :as schema]
[puppetlabs.services.jruby-pool-manager.jruby-schemas :as jruby-schemas]
[puppetlabs.services.jruby.puppet-environments :as puppet-env]
[puppetlabs.services.protocols.pool-manager :as pool-manager-protocol]
[puppetlabs.services.jruby-pool-manager.impl.jruby-pool-manager-core :as jruby-pool-manager-core]
[puppetlabs.services.jruby-pool-manager.impl.jruby-internal :as jruby-internal]
[puppetlabs.trapperkeeper.services.metrics.metrics-service :as metrics]
[puppetlabs.services.jruby.jruby-puppet-service :as jruby-puppet]
[puppetlabs.services.puppet-profiler.puppet-profiler-service :as profiler]
[puppetlabs.trapperkeeper.services.webserver.jetty9-service :as jetty9-service]
[puppetlabs.services.jruby-pool-manager.jruby-pool-manager-service :as jruby-pool-manager]
[puppetlabs.trapperkeeper.services.scheduler.scheduler-service :as scheduler-service]
[puppetlabs.trapperkeeper.services.status.status-service :as status-service]
[puppetlabs.trapperkeeper.services.webrouting.webrouting-service :as webrouting-service])
(:import (clojure.lang IFn)
(com.puppetlabs.jruby_utils.jruby ScriptingContainer)
(puppetlabs.services.jruby_pool_manager.jruby_schemas JRubyInstance)
(com.puppetlabs.puppetserver JRubyPuppet JRubyPuppetResponse)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Constants
(def ruby-load-path ["./ruby/puppet/lib" "./ruby/facter/lib" "./ruby/hiera/lib"])
(def gem-home "./target/jruby-gem-home")
(def gem-path "./target/jruby-gem-home:./target/vendored-jruby-gems")
(def conf-dir "./target/server-conf")
(def code-dir "./target/server-code")
(def var-dir "./target/server-var")
(def run-dir "./target/server-var/run")
(def log-dir "./target/server-var/log")
(def multithreaded
(= "true" (System/getenv "MULTITHREADED")))
(def jruby-service-and-dependencies
[jruby-puppet/jruby-puppet-pooled-service
profiler/puppet-profiler-service
jruby-pool-manager/jruby-pool-manager-service
metrics/metrics-service
scheduler-service/scheduler-service
status-service/status-service
jetty9-service/jetty9-service
webrouting-service/webrouting-service])
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Schemas
(def JRubyPuppetTKConfig
"Schema combining JRubyPuppetConfig and JRubyConfig.
This represents what would be in a real TK configuration's jruby-puppet section,
so we remove some things from the JRubyConfig:
- remove :max-borrows-per-instance (keep :max-requests-per-instance)
- remove :lifecycle"
(-> jruby-puppet-schemas/JRubyPuppetConfig
(merge jruby-schemas/JRubyConfig)
(dissoc :max-borrows-per-instance
:lifecycle)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
JRubyPuppet Test util functions
(defn borrow-instance
"Borrows an instance from the JRubyPuppet interpreter pool. If there are no
interpreters left in the pool then the operation blocks until there is one
available. A timeout (integer measured in milliseconds) can be configured
which will either return an interpreter if one is available within the
timeout length, or will return nil after the timeout expires if no
interpreters are available. This timeout defaults to 1200000 milliseconds.
`reason` is an identifier (usually a map) describing the reason for borrowing the
JRuby instance. It may be used for metrics and logging purposes."
[jruby-puppet-service reason]
(let [{:keys [pool-context]} (tk-service/service-context jruby-puppet-service)
event-callbacks (jruby-core/get-event-callbacks pool-context)]
(jruby-core/borrow-from-pool-with-timeout pool-context reason event-callbacks)))
(defn return-instance
"Returns the JRubyPuppet interpreter back to the pool.
`reason` is an identifier (usually a map) describing the reason for borrowing the
JRuby instance. It may be used for metrics and logging purposes, so for
best results it should be set to the same value as it was set during the
`borrow-instance` call."
[jruby-puppet-service jruby-instance reason]
(let [pool-context (:pool-context (tk-service/service-context jruby-puppet-service))
event-callbacks (jruby-core/get-event-callbacks pool-context)]
(jruby-core/return-to-pool pool-context jruby-instance reason event-callbacks)))
(schema/defn create-mock-scripting-container :- ScriptingContainer
[]
(reify ScriptingContainer
(terminate [_])))
(schema/defn ^:always-validate
create-mock-pool-instance :- JRubyInstance
[mock-jruby-instance-creator-fn :- IFn
pool :- jruby-schemas/pool-queue-type
id :- schema/Int
config :- jruby-schemas/JRubyConfig]
(let [instance (jruby-schemas/map->JRubyInstance
{:id id
:internal {:pool pool
:max-borrows (:max-borrows-per-instance config)
:initial-borrows nil
:state (atom {:borrow-count 0})}
:scripting-container (create-mock-scripting-container)})
modified-instance (merge instance {:jruby-puppet (mock-jruby-instance-creator-fn)
:environment-registry (puppet-env/environment-registry)})]
(.register pool modified-instance)
modified-instance))
(defn jruby-puppet-tk-config
"Create a JRubyPuppet pool config with the given pool config. Suitable for use
in bootstrapping trapperkeeper (in other words, returns a representation of the
config that matches what would be read directly from the config files on disk,
as opposed to a version that has been processed and transformed to comply
with the JRubyPuppetConfig schema)."
[pool-config]
{:product {:name "puppetserver"
:update-server-url ":11111"}
:jruby-puppet pool-config
:http-client {}
:metrics {:server-id "localhost"}
:authorization {:version 1
:rules [{:match-request {:path "/" :type "path"}
:allow "*"
:sort-order 1
:name "allow all"}]}
:webserver {:host "localhost"}
:web-router-service {:puppetlabs.trapperkeeper.services.status.status-service/status-service
"/status"}})
(schema/defn ^:always-validate
jruby-puppet-config :- JRubyPuppetTKConfig
"Create a JRubyPuppetConfig for testing. The optional map argument `options` may
contain a map, which, if present, will be merged into the final JRubyPuppetConfig
map. (This function differs from `jruby-puppet-tk-config` in
that it returns a map that complies with the JRubyPuppetConfig schema, which
differs slightly from the raw format that would be read from config files
on disk.)"
([]
(let [combined-configs
(merge (jruby-puppet-core/initialize-puppet-config
{}
{:server-conf-dir conf-dir
:server-code-dir code-dir
:server-var-dir var-dir
:server-run-dir run-dir
:server-log-dir log-dir}
false)
(jruby-core/initialize-config {:ruby-load-path ruby-load-path
:gem-home gem-home
:gem-path gem-path
:multithreaded multithreaded}))
max-requests-per-instance (:max-borrows-per-instance combined-configs)
updated-config (-> combined-configs
(assoc :max-requests-per-instance max-requests-per-instance)
(dissoc :max-borrows-per-instance))]
(dissoc updated-config :lifecycle)))
([options]
(merge (jruby-puppet-config) options)))
(defn drain-pool
"Drains the JRubyPuppet pool and returns each instance in a vector."
[pool-context size]
(mapv (fn [_] (jruby-core/borrow-from-pool pool-context :test [])) (range size)))
(defn fill-drained-pool
"Returns a list of JRubyPuppet instances back to their pool."
[pool-context instance-list]
(doseq [instance instance-list]
(jruby-core/return-to-pool pool-context instance :test [])))
(defn reduce-over-jrubies!
takes a JRuby pool and size , and a function f from integer
to string. For each JRuby instance in the pool, f will be called, passing in
an integer offset into the jruby array (0..size), and f is expected to return
a string containing a script to run against the jruby instance.
Returns a vector containing the results of executing the scripts against the
JRuby instances."
[pool-context size f]
(let [jrubies (drain-pool pool-context size)
result (reduce
(fn [acc jruby-offset]
(let [sc (:scripting-container (nth jrubies jruby-offset))
script (f jruby-offset)
result (.runScriptlet sc script)]
(conj acc result)))
[]
(range size))]
(fill-drained-pool pool-context jrubies)
result))
(defn wait-for-jrubies
"Wait for all jrubies to land in the JRubyPuppetService's pool"
[app]
(let [pool-context (-> app
(tk-app/get-service :JRubyPuppetService)
tk-service/service-context
:pool-context)
num-jrubies (-> pool-context
jruby-core/get-pool-state
:size)]
(while (< (count (jruby-core/registered-instances pool-context))
num-jrubies)
(Thread/sleep 100))))
(schema/defn ^:always-validate
mock-puppet-config-settings :- {schema/Str schema/Any}
"Return a map of settings that mock the settings that core Ruby Puppet
would return via a call to JRubyPuppet.getSetting()."
[jruby-puppet-config :- {:server-conf-dir schema/Str
:server-code-dir schema/Str
schema/Keyword schema/Any}]
(let [certname "localhost"
confdir (:server-conf-dir jruby-puppet-config)
ssldir (str confdir "/ssl")
certdir (str ssldir "/certs")
cadir (str confdir "/ca")
private-key-dir (str ssldir "/private_keys")]
{"allow_duplicate_certs" false
"autosign" true
"keylength" 2048
"cadir" cadir
"cacert" (str cadir "/ca_crt.pem")
"ca_name" (str "Puppet CA: " certname)
"cacrl" (str cadir "/ca_crl.pem")
"cakey" (str cadir "/ca_key.pem")
"capub" (str cadir "/ca_pub.pem")
"ca_ttl" 157680000
"certdir" certdir
"certname" certname
"cert_inventory" (str cadir "/inventory.txt")
"codedir" (:server-code-dir jruby-puppet-config)
"csr_attributes" (str confdir "/csr_attributes.yaml")
"csrdir" (str cadir "/requests")
"dns_alt_names" ""
"hostcert" (str certdir "/" certname ".pem")
"hostcrl" (str ssldir "/crl.pem")
"hostprivkey" (str private-key-dir "/" certname ".pem")
"hostpubkey" (str ssldir "/public_keys/" certname ".pem")
"localcacert" (str certdir "/ca.pem")
"manage_internal_file_permissions" true
"privatekeydir" private-key-dir
"requestdir" (str ssldir "/certificate_requests")
"serial" (str cadir "/serial")
"signeddir" (str cadir "/signed")
"ssl_client_header" "HTTP_X_CLIENT_DN"
"ssl_client_verify_header" "HTTP_X_CLIENT_VERIFY"
"trusted_oid_mapping_file" (str confdir
"/custom_trusted_oid_mapping.yaml")}))
(schema/defn ^:always-validate
create-mock-jruby-puppet :- JRubyPuppet
"Create a 'mock' JRubyPuppet instance which returns fixed values for settings
and puppet version and a hard-coded HTTP 200 response for any requests it
handles."
([config :- {schema/Keyword schema/Any}]
(create-mock-jruby-puppet
(fn [_]
(throw (UnsupportedOperationException. "Mock handleRequest not defined")))
config))
([handle-request-fn :- IFn
config :- {schema/Keyword schema/Any}]
(let [puppet-config (merge
(mock-puppet-config-settings (:jruby-puppet config))
(:puppet config))]
(reify JRubyPuppet
(getSetting [_ setting]
(let [value (get puppet-config setting :not-found)]
(if (= value :not-found)
(throw (IllegalArgumentException.
(str "Setting not in mock-puppet-config-settings "
"requested: " setting ". Add an appropriate value "
"to the map to correct this problem.")))
value)))
(puppetVersion [_]
"1.2.3")
(handleRequest [_ request]
(handle-request-fn request))
(terminate [_])))))
(schema/defn ^:always-validate
create-mock-jruby-puppet-fn-with-handle-response-params :- IFn
"Return a function which, when invoked, will create a mock JRubyPuppet
instance. Supplied arguments - 'status-code', 'response-body',
'response-content-type', and 'puppet-version' are returned in the
'JRubyPuppetResponse' that the JRubyPuppet instance's .handleRequest
method returns when invoked. The default value for 'response-content-type',
if not supplied, is 'text/plain'. The default value for 'puppet-version',
if not supplied, is '1.2.3'."
([status-code :- schema/Int
response-body :- schema/Str]
(create-mock-jruby-puppet-fn-with-handle-response-params status-code
response-body
"text/plain"))
([status-code :- schema/Int
response-body :- schema/Str
response-content-type :- schema/Str]
(create-mock-jruby-puppet-fn-with-handle-response-params status-code
response-body
response-content-type
"1.2.3"))
([status-code :- schema/Int
response-body :- schema/Str
response-content-type :- schema/Str
puppet-version :- schema/Str]
(partial create-mock-jruby-puppet
(fn [_]
(JRubyPuppetResponse.
(Integer. status-code)
response-body
response-content-type
puppet-version)))))
(schema/defn ^:always-validate
create-mock-pool :- jruby-schemas/PoolContext
"Create a 'mock' JRuby pool. The pool is filled with the number 'mock'
JRubyPuppet instances specified in the jruby-config. The supplied
'mock-jruby-puppet-fn' is invoked for each instance to be created for the
pool and is expected to return an object of type 'JRubyPuppet'."
[jruby-config :- jruby-schemas/JRubyConfig
mock-jruby-puppet-fn :- IFn]
;; The implementation of this function is based on and very similar to
;; `prime-pool!` in `puppetlabs.services.jruby-pool-manager.impl.jruby-agents`
;; from the jruby-utils library.
(let [pool-context (jruby-pool-manager-core/create-pool-context jruby-config)
pool (jruby-internal/get-pool pool-context)
count (.remainingCapacity pool)]
(dotimes [i count]
(let [id (inc i)]
(create-mock-pool-instance
mock-jruby-puppet-fn
pool
id
jruby-config)))
pool-context))
(schema/defn ^:always-validate
mock-jruby-pool-manager-service
:- (schema/protocol tk-service/ServiceDefinition)
"Create a 'mock' JRubyPoolManagerService, with a create-pool function
which returns a 'mock' JRuby pool when called. The supplied
'mock-jruby-puppet-fn' is invoked for each instance to be created for the pool
and is expected to return an object of type 'JRubyPuppet'. The 'config'
option is passed back as an argument to 'mock-jruby-puppet-fn', if supplied."
([config :- {schema/Keyword schema/Any}]
(mock-jruby-pool-manager-service config
create-mock-jruby-puppet))
([config :- {schema/Keyword schema/Any}
mock-jruby-puppet-fn :- IFn]
(tk-service/service
pool-manager-protocol/PoolManagerService
[]
(create-pool
[this jruby-config]
(create-mock-pool jruby-config (partial mock-jruby-puppet-fn config))))))
(defn add-mock-jruby-pool-manager-service
([services config]
(add-mock-jruby-pool-manager-service services config create-mock-jruby-puppet))
([services config mock-jruby-puppet-fn]
(->> services
(remove #(= :PoolManagerService (tk-service/service-def-id %)))
vec
(cons (mock-jruby-pool-manager-service
config
mock-jruby-puppet-fn)))))
(defn jruby-service-and-dependencies-with-mocking
[config]
(add-mock-jruby-pool-manager-service
jruby-service-and-dependencies
config))
| null | https://raw.githubusercontent.com/puppetlabs/puppetserver/2d6ca01b4b72716ca543b606f752261b969e401b/test/unit/puppetlabs/services/jruby/jruby_puppet_testutils.clj | clojure |
Constants
The implementation of this function is based on and very similar to
`prime-pool!` in `puppetlabs.services.jruby-pool-manager.impl.jruby-agents`
from the jruby-utils library. | (ns puppetlabs.services.jruby.jruby-puppet-testutils
(:require [puppetlabs.services.jruby.jruby-puppet-core :as jruby-puppet-core]
[puppetlabs.services.jruby-pool-manager.jruby-core :as jruby-core]
[puppetlabs.services.jruby.jruby-puppet-schemas :as jruby-puppet-schemas]
[puppetlabs.trapperkeeper.app :as tk-app]
[puppetlabs.trapperkeeper.services :as tk-service]
[schema.core :as schema]
[puppetlabs.services.jruby-pool-manager.jruby-schemas :as jruby-schemas]
[puppetlabs.services.jruby.puppet-environments :as puppet-env]
[puppetlabs.services.protocols.pool-manager :as pool-manager-protocol]
[puppetlabs.services.jruby-pool-manager.impl.jruby-pool-manager-core :as jruby-pool-manager-core]
[puppetlabs.services.jruby-pool-manager.impl.jruby-internal :as jruby-internal]
[puppetlabs.trapperkeeper.services.metrics.metrics-service :as metrics]
[puppetlabs.services.jruby.jruby-puppet-service :as jruby-puppet]
[puppetlabs.services.puppet-profiler.puppet-profiler-service :as profiler]
[puppetlabs.trapperkeeper.services.webserver.jetty9-service :as jetty9-service]
[puppetlabs.services.jruby-pool-manager.jruby-pool-manager-service :as jruby-pool-manager]
[puppetlabs.trapperkeeper.services.scheduler.scheduler-service :as scheduler-service]
[puppetlabs.trapperkeeper.services.status.status-service :as status-service]
[puppetlabs.trapperkeeper.services.webrouting.webrouting-service :as webrouting-service])
(:import (clojure.lang IFn)
(com.puppetlabs.jruby_utils.jruby ScriptingContainer)
(puppetlabs.services.jruby_pool_manager.jruby_schemas JRubyInstance)
(com.puppetlabs.puppetserver JRubyPuppet JRubyPuppetResponse)))
(def ruby-load-path ["./ruby/puppet/lib" "./ruby/facter/lib" "./ruby/hiera/lib"])
(def gem-home "./target/jruby-gem-home")
(def gem-path "./target/jruby-gem-home:./target/vendored-jruby-gems")
(def conf-dir "./target/server-conf")
(def code-dir "./target/server-code")
(def var-dir "./target/server-var")
(def run-dir "./target/server-var/run")
(def log-dir "./target/server-var/log")
(def multithreaded
(= "true" (System/getenv "MULTITHREADED")))
(def jruby-service-and-dependencies
[jruby-puppet/jruby-puppet-pooled-service
profiler/puppet-profiler-service
jruby-pool-manager/jruby-pool-manager-service
metrics/metrics-service
scheduler-service/scheduler-service
status-service/status-service
jetty9-service/jetty9-service
webrouting-service/webrouting-service])
Schemas
(def JRubyPuppetTKConfig
"Schema combining JRubyPuppetConfig and JRubyConfig.
This represents what would be in a real TK configuration's jruby-puppet section,
so we remove some things from the JRubyConfig:
- remove :max-borrows-per-instance (keep :max-requests-per-instance)
- remove :lifecycle"
(-> jruby-puppet-schemas/JRubyPuppetConfig
(merge jruby-schemas/JRubyConfig)
(dissoc :max-borrows-per-instance
:lifecycle)))
JRubyPuppet Test util functions
(defn borrow-instance
"Borrows an instance from the JRubyPuppet interpreter pool. If there are no
interpreters left in the pool then the operation blocks until there is one
available. A timeout (integer measured in milliseconds) can be configured
which will either return an interpreter if one is available within the
timeout length, or will return nil after the timeout expires if no
interpreters are available. This timeout defaults to 1200000 milliseconds.
`reason` is an identifier (usually a map) describing the reason for borrowing the
JRuby instance. It may be used for metrics and logging purposes."
[jruby-puppet-service reason]
(let [{:keys [pool-context]} (tk-service/service-context jruby-puppet-service)
event-callbacks (jruby-core/get-event-callbacks pool-context)]
(jruby-core/borrow-from-pool-with-timeout pool-context reason event-callbacks)))
(defn return-instance
"Returns the JRubyPuppet interpreter back to the pool.
`reason` is an identifier (usually a map) describing the reason for borrowing the
JRuby instance. It may be used for metrics and logging purposes, so for
best results it should be set to the same value as it was set during the
`borrow-instance` call."
[jruby-puppet-service jruby-instance reason]
(let [pool-context (:pool-context (tk-service/service-context jruby-puppet-service))
event-callbacks (jruby-core/get-event-callbacks pool-context)]
(jruby-core/return-to-pool pool-context jruby-instance reason event-callbacks)))
(schema/defn create-mock-scripting-container :- ScriptingContainer
[]
(reify ScriptingContainer
(terminate [_])))
(schema/defn ^:always-validate
create-mock-pool-instance :- JRubyInstance
[mock-jruby-instance-creator-fn :- IFn
pool :- jruby-schemas/pool-queue-type
id :- schema/Int
config :- jruby-schemas/JRubyConfig]
(let [instance (jruby-schemas/map->JRubyInstance
{:id id
:internal {:pool pool
:max-borrows (:max-borrows-per-instance config)
:initial-borrows nil
:state (atom {:borrow-count 0})}
:scripting-container (create-mock-scripting-container)})
modified-instance (merge instance {:jruby-puppet (mock-jruby-instance-creator-fn)
:environment-registry (puppet-env/environment-registry)})]
(.register pool modified-instance)
modified-instance))
(defn jruby-puppet-tk-config
"Create a JRubyPuppet pool config with the given pool config. Suitable for use
in bootstrapping trapperkeeper (in other words, returns a representation of the
config that matches what would be read directly from the config files on disk,
as opposed to a version that has been processed and transformed to comply
with the JRubyPuppetConfig schema)."
[pool-config]
{:product {:name "puppetserver"
:update-server-url ":11111"}
:jruby-puppet pool-config
:http-client {}
:metrics {:server-id "localhost"}
:authorization {:version 1
:rules [{:match-request {:path "/" :type "path"}
:allow "*"
:sort-order 1
:name "allow all"}]}
:webserver {:host "localhost"}
:web-router-service {:puppetlabs.trapperkeeper.services.status.status-service/status-service
"/status"}})
(schema/defn ^:always-validate
jruby-puppet-config :- JRubyPuppetTKConfig
"Create a JRubyPuppetConfig for testing. The optional map argument `options` may
contain a map, which, if present, will be merged into the final JRubyPuppetConfig
map. (This function differs from `jruby-puppet-tk-config` in
that it returns a map that complies with the JRubyPuppetConfig schema, which
differs slightly from the raw format that would be read from config files
on disk.)"
([]
(let [combined-configs
(merge (jruby-puppet-core/initialize-puppet-config
{}
{:server-conf-dir conf-dir
:server-code-dir code-dir
:server-var-dir var-dir
:server-run-dir run-dir
:server-log-dir log-dir}
false)
(jruby-core/initialize-config {:ruby-load-path ruby-load-path
:gem-home gem-home
:gem-path gem-path
:multithreaded multithreaded}))
max-requests-per-instance (:max-borrows-per-instance combined-configs)
updated-config (-> combined-configs
(assoc :max-requests-per-instance max-requests-per-instance)
(dissoc :max-borrows-per-instance))]
(dissoc updated-config :lifecycle)))
([options]
(merge (jruby-puppet-config) options)))
(defn drain-pool
"Drains the JRubyPuppet pool and returns each instance in a vector."
[pool-context size]
(mapv (fn [_] (jruby-core/borrow-from-pool pool-context :test [])) (range size)))
(defn fill-drained-pool
"Returns a list of JRubyPuppet instances back to their pool."
[pool-context instance-list]
(doseq [instance instance-list]
(jruby-core/return-to-pool pool-context instance :test [])))
(defn reduce-over-jrubies!
takes a JRuby pool and size , and a function f from integer
to string. For each JRuby instance in the pool, f will be called, passing in
an integer offset into the jruby array (0..size), and f is expected to return
a string containing a script to run against the jruby instance.
Returns a vector containing the results of executing the scripts against the
JRuby instances."
[pool-context size f]
(let [jrubies (drain-pool pool-context size)
result (reduce
(fn [acc jruby-offset]
(let [sc (:scripting-container (nth jrubies jruby-offset))
script (f jruby-offset)
result (.runScriptlet sc script)]
(conj acc result)))
[]
(range size))]
(fill-drained-pool pool-context jrubies)
result))
(defn wait-for-jrubies
"Wait for all jrubies to land in the JRubyPuppetService's pool"
[app]
(let [pool-context (-> app
(tk-app/get-service :JRubyPuppetService)
tk-service/service-context
:pool-context)
num-jrubies (-> pool-context
jruby-core/get-pool-state
:size)]
(while (< (count (jruby-core/registered-instances pool-context))
num-jrubies)
(Thread/sleep 100))))
(schema/defn ^:always-validate
mock-puppet-config-settings :- {schema/Str schema/Any}
"Return a map of settings that mock the settings that core Ruby Puppet
would return via a call to JRubyPuppet.getSetting()."
[jruby-puppet-config :- {:server-conf-dir schema/Str
:server-code-dir schema/Str
schema/Keyword schema/Any}]
(let [certname "localhost"
confdir (:server-conf-dir jruby-puppet-config)
ssldir (str confdir "/ssl")
certdir (str ssldir "/certs")
cadir (str confdir "/ca")
private-key-dir (str ssldir "/private_keys")]
{"allow_duplicate_certs" false
"autosign" true
"keylength" 2048
"cadir" cadir
"cacert" (str cadir "/ca_crt.pem")
"ca_name" (str "Puppet CA: " certname)
"cacrl" (str cadir "/ca_crl.pem")
"cakey" (str cadir "/ca_key.pem")
"capub" (str cadir "/ca_pub.pem")
"ca_ttl" 157680000
"certdir" certdir
"certname" certname
"cert_inventory" (str cadir "/inventory.txt")
"codedir" (:server-code-dir jruby-puppet-config)
"csr_attributes" (str confdir "/csr_attributes.yaml")
"csrdir" (str cadir "/requests")
"dns_alt_names" ""
"hostcert" (str certdir "/" certname ".pem")
"hostcrl" (str ssldir "/crl.pem")
"hostprivkey" (str private-key-dir "/" certname ".pem")
"hostpubkey" (str ssldir "/public_keys/" certname ".pem")
"localcacert" (str certdir "/ca.pem")
"manage_internal_file_permissions" true
"privatekeydir" private-key-dir
"requestdir" (str ssldir "/certificate_requests")
"serial" (str cadir "/serial")
"signeddir" (str cadir "/signed")
"ssl_client_header" "HTTP_X_CLIENT_DN"
"ssl_client_verify_header" "HTTP_X_CLIENT_VERIFY"
"trusted_oid_mapping_file" (str confdir
"/custom_trusted_oid_mapping.yaml")}))
(schema/defn ^:always-validate
create-mock-jruby-puppet :- JRubyPuppet
"Create a 'mock' JRubyPuppet instance which returns fixed values for settings
and puppet version and a hard-coded HTTP 200 response for any requests it
handles."
([config :- {schema/Keyword schema/Any}]
(create-mock-jruby-puppet
(fn [_]
(throw (UnsupportedOperationException. "Mock handleRequest not defined")))
config))
([handle-request-fn :- IFn
config :- {schema/Keyword schema/Any}]
(let [puppet-config (merge
(mock-puppet-config-settings (:jruby-puppet config))
(:puppet config))]
(reify JRubyPuppet
(getSetting [_ setting]
(let [value (get puppet-config setting :not-found)]
(if (= value :not-found)
(throw (IllegalArgumentException.
(str "Setting not in mock-puppet-config-settings "
"requested: " setting ". Add an appropriate value "
"to the map to correct this problem.")))
value)))
(puppetVersion [_]
"1.2.3")
(handleRequest [_ request]
(handle-request-fn request))
(terminate [_])))))
(schema/defn ^:always-validate
create-mock-jruby-puppet-fn-with-handle-response-params :- IFn
"Return a function which, when invoked, will create a mock JRubyPuppet
instance. Supplied arguments - 'status-code', 'response-body',
'response-content-type', and 'puppet-version' are returned in the
'JRubyPuppetResponse' that the JRubyPuppet instance's .handleRequest
method returns when invoked. The default value for 'response-content-type',
if not supplied, is 'text/plain'. The default value for 'puppet-version',
if not supplied, is '1.2.3'."
([status-code :- schema/Int
response-body :- schema/Str]
(create-mock-jruby-puppet-fn-with-handle-response-params status-code
response-body
"text/plain"))
([status-code :- schema/Int
response-body :- schema/Str
response-content-type :- schema/Str]
(create-mock-jruby-puppet-fn-with-handle-response-params status-code
response-body
response-content-type
"1.2.3"))
([status-code :- schema/Int
response-body :- schema/Str
response-content-type :- schema/Str
puppet-version :- schema/Str]
(partial create-mock-jruby-puppet
(fn [_]
(JRubyPuppetResponse.
(Integer. status-code)
response-body
response-content-type
puppet-version)))))
(schema/defn ^:always-validate
create-mock-pool :- jruby-schemas/PoolContext
"Create a 'mock' JRuby pool. The pool is filled with the number 'mock'
JRubyPuppet instances specified in the jruby-config. The supplied
'mock-jruby-puppet-fn' is invoked for each instance to be created for the
pool and is expected to return an object of type 'JRubyPuppet'."
[jruby-config :- jruby-schemas/JRubyConfig
mock-jruby-puppet-fn :- IFn]
(let [pool-context (jruby-pool-manager-core/create-pool-context jruby-config)
pool (jruby-internal/get-pool pool-context)
count (.remainingCapacity pool)]
(dotimes [i count]
(let [id (inc i)]
(create-mock-pool-instance
mock-jruby-puppet-fn
pool
id
jruby-config)))
pool-context))
(schema/defn ^:always-validate
mock-jruby-pool-manager-service
:- (schema/protocol tk-service/ServiceDefinition)
"Create a 'mock' JRubyPoolManagerService, with a create-pool function
which returns a 'mock' JRuby pool when called. The supplied
'mock-jruby-puppet-fn' is invoked for each instance to be created for the pool
and is expected to return an object of type 'JRubyPuppet'. The 'config'
option is passed back as an argument to 'mock-jruby-puppet-fn', if supplied."
([config :- {schema/Keyword schema/Any}]
(mock-jruby-pool-manager-service config
create-mock-jruby-puppet))
([config :- {schema/Keyword schema/Any}
mock-jruby-puppet-fn :- IFn]
(tk-service/service
pool-manager-protocol/PoolManagerService
[]
(create-pool
[this jruby-config]
(create-mock-pool jruby-config (partial mock-jruby-puppet-fn config))))))
(defn add-mock-jruby-pool-manager-service
([services config]
(add-mock-jruby-pool-manager-service services config create-mock-jruby-puppet))
([services config mock-jruby-puppet-fn]
(->> services
(remove #(= :PoolManagerService (tk-service/service-def-id %)))
vec
(cons (mock-jruby-pool-manager-service
config
mock-jruby-puppet-fn)))))
(defn jruby-service-and-dependencies-with-mocking
[config]
(add-mock-jruby-pool-manager-service
jruby-service-and-dependencies
config))
|
d1fca57ceb29ed7df34205d5efc65cbffddcbfd80968d75b12cab965f70ac281 | maybevoid/casimir | Base.hs | |
Module : . Base
Base module that defines the basic datatypes for implicit - effects .
Module : Casimir.Base
Base module that defines the basic datatypes for implicit-effects.
-}
module Casimir.Base
( Effect
, EffOps (..)
, EffFunctor (..)
, ImplicitOps (..)
, Eff
, EffConstraint
, NoEff
, NoOp (..)
, Lift (..)
, MaybeLift (..)
, LiftMonoid (..)
, LiftFunctor (..)
, FreeLift (..)
, HigherLift (..)
, type (∪)
, type (~>)
, NoConstraint
, Union
, UnionOps (..)
, ContraLift (..)
, (∪)
, leftOps
, rightOps
, joinContraLift
, identityContraLift
)
where
import Casimir.Base.EffOps
import Casimir.Base.Effect
import Casimir.Base.EffFunctor
import Casimir.Base.Implicit
import Casimir.Base.Union
import Casimir.Base.NoOp
import Casimir.Base.Lift
import Casimir.Base.ContraLift
| null | https://raw.githubusercontent.com/maybevoid/casimir/ebbfa403739d6f258e6ac6793549006a0e8bff42/casimir/src/lib/Casimir/Base.hs | haskell | |
Module : . Base
Base module that defines the basic datatypes for implicit - effects .
Module : Casimir.Base
Base module that defines the basic datatypes for implicit-effects.
-}
module Casimir.Base
( Effect
, EffOps (..)
, EffFunctor (..)
, ImplicitOps (..)
, Eff
, EffConstraint
, NoEff
, NoOp (..)
, Lift (..)
, MaybeLift (..)
, LiftMonoid (..)
, LiftFunctor (..)
, FreeLift (..)
, HigherLift (..)
, type (∪)
, type (~>)
, NoConstraint
, Union
, UnionOps (..)
, ContraLift (..)
, (∪)
, leftOps
, rightOps
, joinContraLift
, identityContraLift
)
where
import Casimir.Base.EffOps
import Casimir.Base.Effect
import Casimir.Base.EffFunctor
import Casimir.Base.Implicit
import Casimir.Base.Union
import Casimir.Base.NoOp
import Casimir.Base.Lift
import Casimir.Base.ContraLift
|
|
d710ef1eb11d41c70cad6fec4820fd0c72d03f9f4b828021658fea7817256b96 | jeromesimeon/Galax | function_analysis.mli | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : function_analysis.mli , v 1.5 2007/02/01 22:08:52 simeon Exp $
(* Module: Function_analysis
Description:
This module implements the path analysis for built-in functions.
*)
(* Type for the kind of function *)
type function_kind =
NOTE : This needs to be documented ! ! -
| UsedReturnSimple
| UsedReturnSubtree
| ReturnsPaths
| ReturnsDefault
(* Returns the kind of function *)
val get_fun_analysis_type : Namespace_names.rqname -> function_kind
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/projection/function_analysis.mli | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
Module: Function_analysis
Description:
This module implements the path analysis for built-in functions.
Type for the kind of function
Returns the kind of function | Copyright 2001 - 2007 .
$ I d : function_analysis.mli , v 1.5 2007/02/01 22:08:52 simeon Exp $
type function_kind =
NOTE : This needs to be documented ! ! -
| UsedReturnSimple
| UsedReturnSubtree
| ReturnsPaths
| ReturnsDefault
val get_fun_analysis_type : Namespace_names.rqname -> function_kind
|
7ed97bbc26fe425a62c58b7af79620e12d0707ea9c21ca9cae71dd01829b14ef | fluree/ledger | validation.clj | (ns fluree.db.ledger.transact.validation
(:require [fluree.db.util.async :refer [<? <?? go-try channel?] :as async-util]
[fluree.db.dbfunctions.core :as dbfunctions]
[fluree.db.query.range :as query-range]
[fluree.db.constants :as const]
[clojure.core.async :as async]
[fluree.db.util.core :as util]
[fluree.db.dbproto :as dbproto]
[fluree.db.ledger.transact.schema :as tx-schema]
[fluree.db.ledger.transact.tx-meta :as tx-meta]
[fluree.db.util.log :as log]
[fluree.db.permissions-validate :as perm-validate]
[fluree.db.flake :as flake]
[fluree.db.api :as fdb]
[fluree.db.ledger.transact.tempid :as tempid])
(:import (fluree.db.flake Flake)))
(set! *warn-on-reflection* true)
;;; functions to validate transactions
(defn queue-validation-fn
"Queues a validation function to be run once we have a db-after completed.
There are a few basic types of validations functions, some of which must
also queue flakes into tx-state that will be inputs to the function
- unique - no flake queuing necessary - just run at end of transaction
- predicate - no flake queueing necessary as run once per flake
- predicate-tx - must queue all the predicate flakes of this type which may be
across multiple transaction items. flake-or-flakes will be a single flake.
As this is executed once per predicate, the function might already be
queued, and if so 'f' will be nil
- collection - must queue all the collection flakes per sid, as they may be across
multiple transactions (but usually will not be). flake-or-flakes will be a sequence"
[fn-type {:keys [validate-fn]} f flakes pid-or-sid]
(case fn-type
:unique (swap! validate-fn update :queue conj f)
:predicate (swap! validate-fn update :queue conj f)
:predicate-tx (swap! validate-fn (fn [validate-data]
(cond-> validate-data
f (update :queue conj f)
true (update-in [:tx-spec pid-or-sid] into flakes))))
:collection (swap! validate-fn (fn [validate-data]
(let [existing-flakes (get-in validate-data [:c-spec pid-or-sid])]
(cond-> validate-data
true (assoc-in [:c-spec pid-or-sid] (into existing-flakes flakes))
;; if flakes exist, we already queued a fn for this subject, don't want multiple
(empty? existing-flakes) (update :queue conj f)))))))
TODO - this can be done per schema change , not per transaction , to make even more efficient .
;; TODO - calls to this now block to solve resource contention issues - can remove the promise as no longer do in background
(defn build-function
"Builds a function based on a function subject-id (or a list of function subject ids) and
delivers the executable function to the provided promise.
Promise is used here to cache this work so it is only done once per transaction.
fn-type should be:
- predSpec - for predicate Spec
- "
[fn-subjects db promise fn-type]
(async/go
(try
(let [fn-str (->> fn-subjects ;; combine multiple functions with an (and ...) wrapper
(map #(query-range/index-range db :spot = [% const/$_fn:code]))
async/merge
(async/into [])
(<?)
(map #(if (util/exception? %) (throw %) (.-o ^Flake (first %))))
dbfunctions/combine-fns)
fn-parsed (<? (dbfunctions/parse-and-wrap-fn db fn-str fn-type))]
(deliver promise fn-parsed))
(catch Exception e (deliver promise e)))))
(defn resolve-function
"Returns promise for function that will get used. Tries to get one from cache
and if not already processed then will create a new promise channel.
Resistant to race conditions which are unlikely, but possible."
[fn-subjects db validate-fn-atom fn-type]
(or (get-in @validate-fn-atom [:cache fn-subjects])
(let [p (promise)]
(swap! validate-fn-atom update :cache
(fn [fn-cache]
(if (get fn-cache fn-subjects)
fn-cache ;; something put a promise in here while we were checking, just return
(do (<?? (build-function fn-subjects db p fn-type))
(assoc fn-cache fn-subjects p)))))
;; return whatever promise was in the cache - either one we just created or existing one if a race condition existed
(get-in @validate-fn-atom [:cache fn-subjects]))))
(defn- async-response-wrapper
"Wraps a core.async response coming off a port with a formatted response."
[port response-fn]
(async/pipe port (async/chan 1 (map response-fn))))
(defn update-tx-spent-fuel
"Executing functions should consume fuel. Adds fuel to the master fuel atom."
[fuel spent]
(swap! fuel (fn [fuel] (-> fuel
(update :credits - spent)
(update :spent + spent))))
(when (neg? (get @fuel :credits))
(throw (ex-info "Transaction unable to complete, all allocated fuel has been exhausted."
{:status 400
:error :db/insufficient-fuel}))))
;; unique: true special handling
;; unique values, in specific situations, require validation after the transaction is finalized to make sure
;; (a) if there is an existing unique match (pred+object), it can be OK so long as the existing match
;; is deleted somewhere within the transaction
;; (b) any tempids used for unique:true might have resolved to existing subjects (via ':upsert true') which
;; needs to happen after any such resolution happens
(defn queue-check-unique-match-retracted
"if there is an existing unique match (pred+object), it can be OK so long as the existing match
is deleted in the transaction, which we can validate once the transaction is complete by looking
for the specific retraction flake we expect to see - else throw."
[existing-flake _id pred-info object tx-state]
(let [f (fn [{:keys [flakes t]}]
(let [check-flake (flake/flip-flake existing-flake t)
retracted? (contains? flakes check-flake)]
(if retracted?
true
(throw (ex-info (str "Unique predicate " (pred-info :name) " with value: "
object " matched an existing subject.")
{:status 400
:error :db/invalid-tx
:cause [_id (pred-info :name) object]
:conflict [(.-s ^Flake existing-flake)]})))))]
(queue-validation-fn :unique tx-state f nil nil)))
(defn queue-check-unique-tempid-still-unique
"if there is an existing unique match (pred+object), it can be OK so long as the existing match
is deleted in the transaction, which we can validate once the transaction is complete by looking
for the specific retraction flake we expect to see - else throw.
- tempid - the tempid object value that is supposed to be unique
- _id - the _id of the transaction item that caused this validation to be run, might be a tempid
- pred-info - pred-info function allows getting different information about this predicate
- tx-state - transaction state, which is also passed in as the only argument in the final validation fn"
[tempid _id pred-info tx-state]
(let [f (fn [{:keys [tempids db-after]}]
(go-try
(let [tempid-sid (get @tempids tempid)
_id* (if (tempid/TempId? _id)
(get @tempids _id)
_id)
matches (<? (query-range/index-range @db-after :post = [(pred-info :id) tempid-sid]))
matches-n (count matches)]
;; should be a single match, whose .-s is the final _id of the transacted flake
(if (not= 1 matches-n)
(throw (ex-info (str "Unique predicate " (pred-info :name) " with a tempid value: "
(:user-string tempid) " resolved to subject: " tempid-sid
", which is not unique.")
{:status 400 :error :db/invalid-tx :tempid tempid}))
one match as expected ... extra check here as match .-s should always equal the _ i d * ,
;; else something strange happened
(or (= _id* (.-s ^Flake (first matches)))
(throw (ex-info (str "Unique predicate resolved to mis-matched subject.")
{:status 500
:error :db/unexpected-error
:cause [(:user-string tempid) (pred-info :name)]
:conflict (vec (first matches))})))))))]
(queue-validation-fn :unique tx-state f nil nil)))
;; predicate specs
(defn- pred-spec-response
"Returns a true for a valid spec response, or an exception (but does not throw) for an invalid one.
If response is an exception, wraps exception message."
[specDoc predicate-name ^Flake flake response]
(cond
;; some error in processing happened, don't allow transaction but communicate internal error
(util/exception? response)
(ex-info (str "Internal execution error for predicate spec: " (ex-message response) ". "
"Predicate spec failed for predicate: " predicate-name "." (when specDoc (str " " specDoc)))
{:status 400
:error :db/predicate-spec
:cause (vec flake)
:specDoc specDoc
:ex-message (ex-message response)
:ex-data (ex-data response)})
any value , spec succeeded - allow transaction
response
true
;; non truthy value, spec failed - do not allow transaction
:else
(ex-info (str "Predicate spec failed for predicate: " predicate-name "." (when specDoc (str " " specDoc)))
{:status 400
:error :db/predicate-spec
:cause (vec flake)})))
(defn run-predicate-spec
[fn-promise ^Flake flake predicate-name specDoc {:keys [fuel t auth db-after]}]
(let [sid (.-s flake)
pid (.-p flake)
o (.-o flake)]
(try
(let [fuel-atom (atom {:stack []
:credits (:credits @fuel)
:spent 0})
f @fn-promise
_ (log/debug "predicate spec fn:" f)
res (f {:db @db-after
:sid sid
:pid pid
:o o
:flakes [flake]
:auth_id auth
:state fuel-atom
:t t})]
update main tx fuel count with the fuel spent to execute this tx function
(update-tx-spent-fuel fuel (:spent @fuel-atom))
(if (async-util/channel? res)
(async-response-wrapper res (partial pred-spec-response specDoc predicate-name flake))
(pred-spec-response specDoc predicate-name flake res)))
(catch Exception e (pred-spec-response specDoc predicate-name flake e)))))
(defn queue-pred-spec
"Flakes param flows through, queues spec for flake"
[flakes flake pred-info {:keys [validate-fn db-root] :as tx-state}]
(let [fn-sids (pred-info :spec)
specDoc (pred-info :specDoc)
predicate-name (pred-info :name)
fn-promise (resolve-function fn-sids db-root validate-fn "predSpec")
pred-spec-fn (-> run-predicate-spec
(partial fn-promise flake predicate-name specDoc)
(with-meta {:type :predicate-spec
:predicate predicate-name
:target flake
:fn-sid fn-sids
:doc specDoc}))]
(queue-validation-fn :predicate tx-state pred-spec-fn nil nil)
flakes))
predicate tx - spec
(defn- pred-tx-spec-response
[pred-name flakes tx-spec-doc response]
(cond
(util/exception? response)
(ex-info (str "Internal execution error for predicate txSpec: " (.getMessage ^Exception response) ". "
"Predicate txSpec failed for: " pred-name "." (when tx-spec-doc (str " " tx-spec-doc)))
{:status 400
:error :db/predicate-tx-spec
:cause flakes
:ex-message (ex-message response)
:ex-data (ex-data response)})
response
true
:else
(ex-info (str "Predicate txSpec failed for: " pred-name "." (when tx-spec-doc (str " " tx-spec-doc)))
{:status 400
:error :db/predicate-tx-spec
:cause flakes})))
(defn run-predicate-tx-spec
"This function is designed to be called with a (partial pid pred-name txSpecDoc) and
returns a function whose only argument is tx-state, which can be used to get the final
list of predicate flakes affected by this predicate."
[pid pred-tx-fn pred-name tx-spec-doc {:keys [db-root auth instant fuel validate-fn t]}]
(try
(let [pid-flakes (get-in @validate-fn [:tx-spec pid])
fuel-atom (atom {:stack []
:credits (:credits @fuel)
:spent 0})
f @pred-tx-fn
res (f {:db db-root
:pid pid
:instant instant
:flakes pid-flakes
:auth_id auth
:state fuel-atom
:t t})]
update main tx fuel count with the fuel spent to execute this tx function
(update-tx-spent-fuel fuel (:spent @fuel-atom))
(if (async-util/channel? res)
(async-response-wrapper res (partial pred-tx-spec-response pred-name pid-flakes tx-spec-doc))
(pred-tx-spec-response pred-name pid-flakes tx-spec-doc res)))
(catch Exception e (pred-tx-spec-response pred-name (get-in @validate-fn [:tx-spec pid]) tx-spec-doc e))))
(defn- build-predicate-tx-spec-fn
"When a predicate-tx-spec function hasn't already been queued for a particular predicate,
do so and place the function into the validating function queue for processing."
[pred-info db]
(let [pred-tx-fn (promise)
pred-name (pred-info :name)
tx-spec-doc (pred-info :txSpecDoc)
pid (pred-info :id)
fn-sids (pred-info :txSpec)
queue-fn (-> run-predicate-tx-spec
(partial pid pred-tx-fn pred-name tx-spec-doc)
(with-meta {:type :predicate-tx-spec
:target pred-name
:fn-sids fn-sids}))]
kick off building function , will put realized function into pred - tx - fn promise
(<?? (build-function fn-sids db pred-tx-fn "predSpec"))
;; return function
queue-fn))
(defn queue-predicate-tx-spec
"Passes 'flakes' through function untouched, but queues predicate spec for
execution once db-after is resolved.
Predicates that have a txSpec defined need to run once for all flakes with the
same predicate as inputs.
Queuing a flake here adds it to a map by predicate. We also kick off resolving
the txSpec function in the background if not already done, so it can be ready
when the transaction is completed to run the validation.
For each predicate that requires a txSpec function to be run, we store
a two-tuple of the function (as a promise) and a list of flakes for that predicate
that must be validated."
[flakes predicate-flakes pred-info {:keys [validate-fn db-root] :as tx-state}]
(let [pid (pred-info :id)
tx-spec-fn (when (empty? (get-in @validate-fn [:tx-spec pid]))
first time called ( no existing flakes for this tx - spec ) , generate and queue fn also
(build-predicate-tx-spec-fn pred-info db-root))]
(queue-validation-fn :predicate-tx tx-state tx-spec-fn predicate-flakes pid)
flakes))
;; collection specs
(defn- collection-spec-response
[flakes collection c-spec-doc response]
(cond
(util/exception? response)
(ex-info (str "Internal execution error for collection spec: " (.getMessage ^Exception response) ". "
"Collection spec failed for: " collection "." (when c-spec-doc (str " " c-spec-doc)))
{:status 400
:error :db/collection-spec
:flakes flakes
:cause response})
response
true
:else
(ex-info (str "Collection spec failed for: " collection "." (when c-spec-doc (str " " c-spec-doc)))
{:status 400
:error :db/collection-spec
:flakes flakes})))
(defn run-collection-spec
"Runs a collection spec. Will only execute collection spec if there are still flakes for
the subject that exist."
[collection sid c-spec-fn c-spec-doc {:keys [db-after instant validate-fn auth t fuel]}]
(async/go
(try
(let [subject-flakes (get-in @validate-fn [:c-spec sid])
stop at first ` true ` .-op
deleted? (or (false? has-adds?) ; has-adds? is nil when not found
(empty? (<? (query-range/index-range @db-after :spot = [sid]))))]
(if deleted?
true
(let [fuel-atom (atom {:stack []
:credits (:credits @fuel)
:spent 0})
f @c-spec-fn
res (f {:db @db-after
:instant instant
:sid sid
:flakes subject-flakes
:auth_id auth
:t t
:state fuel-atom})
res* (if (channel? res) (async/<! res) res)]
update main tx fuel count with the fuel spent to execute this tx function
(update-tx-spent-fuel fuel (:spent @fuel-atom))
(collection-spec-response subject-flakes collection c-spec-doc res*))))
(catch Exception e (collection-spec-response (get-in @validate-fn [:c-spec sid]) collection c-spec-doc e)))))
(defn queue-collection-spec
[collection c-spec-fn-ids {:keys [validate-fn db-root] :as tx-state} subject-flakes]
(let [sid (.-s ^Flake (first subject-flakes))
c-spec-fn (resolve-function c-spec-fn-ids db-root validate-fn "collectionSpec")
c-spec-doc (or (dbproto/-c-prop db-root :specDoc collection) collection) ;; use collection name as default specDoc
execute-fn (-> run-collection-spec
(partial collection sid c-spec-fn c-spec-doc)
(with-meta {:type :collection-spec
:target sid
:fn-sid c-spec-fn-ids
:doc c-spec-doc}))]
(queue-validation-fn :collection tx-state execute-fn subject-flakes sid)))
(defn queue-predicate-collection-spec
[tx-state subject-flakes]
(let [sid (.-s ^Flake (first subject-flakes))
execute-fn (-> tx-schema/validate-schema-predicate
(partial sid)
(with-meta {:type :collection-spec
:target sid
:fn :validate-schema-predicate}))]
(queue-validation-fn :collection tx-state execute-fn subject-flakes sid)))
(defn queue-tx-meta-collection-spec
[tx-state subject-flakes]
(let [sid (.-s ^Flake (first subject-flakes))
execute-fn (-> tx-meta/valid-tx-meta?
(partial sid)
(with-meta {:type :collection-spec
:target sid
:fn :validate-tx-meta}))]
(queue-validation-fn :collection tx-state execute-fn subject-flakes sid)))
(defn check-collection-specs
"If a collection spec is needed, register it for processing the subject's flakes."
[collection {:keys [db-root] :as tx-state} subject-flakes]
(let [c-spec-fn-ids (dbproto/-c-prop db-root :spec collection)]
(when c-spec-fn-ids
(queue-collection-spec collection c-spec-fn-ids tx-state subject-flakes))
;; schema changes and user-specified _tx require internal custom validations
(cond
(= "_predicate" collection)
(queue-predicate-collection-spec tx-state subject-flakes)
(= "_tx" collection)
(queue-tx-meta-collection-spec tx-state subject-flakes)
(= "_collection" collection)
(tx-schema/validate-collection-name subject-flakes)
:else nil)
subject-flakes))
;; Permissions
(defn permissions
"Validates transaction based on the state of the new database.
Exceptions here should throw: caught by go-try."
[db-before candidate-db flakes]
(go-try
(let [tx-permissions (:permissions db-before)
no-filter? (true? (:root? tx-permissions))]
(if no-filter?
;; everything allowed, just return
true
;; go through each statement and check
(loop [[^Flake flake & r] flakes]
(when (> (.-s flake) const/$maxSystemPredicates)
(when-not (if (.-op flake)
(<? (perm-validate/allow-flake? candidate-db flake tx-permissions))
(<? (perm-validate/allow-flake? db-before flake tx-permissions)))
(throw (ex-info (format "Insufficient permissions for predicate: %s within collection: %s."
(dbproto/-p-prop db-before :name (.-p flake))
(dbproto/-c-prop db-before :name (flake/sid->cid (.-s flake))))
{:status 400
:error :db/tx-permission}))))
(if r
(recur r)
true))))))
(defn run-permissions-checks
[all-flakes {:keys [db-before db-after]} parallelism]
(go-try
(let [db-after @db-after
queue-ch (async/chan parallelism)
result-ch (async/chan parallelism)
tx-permissions (:permissions db-before)
af (fn [^Flake flake res-chan]
(async/go
(try
(let [fn-res (if (.-op flake)
(async/<! (perm-validate/allow-flake? db-after flake tx-permissions))
(async/<! (perm-validate/allow-flake? db-before flake tx-permissions)))
any value means valid
(ex-info (format "Insufficient permissions for predicate: %s within collection: %s."
(dbproto/-p-prop db-before :name (.-p flake))
(dbproto/-c-prop db-before :name (flake/sid->cid (.-s flake))))
{:status 400
:error :db/write-permission
:cause (vec flake)}))]
(async/put! res-chan res)
(async/close! res-chan))
(catch Exception e (async/put! res-chan e)
(async/close! res-chan)))))]
(->> all-flakes
(filter (fn [^Flake flake] (> (.-s flake) const/$maxSystemPredicates))) ;; skip all system predicates
(async/onto-chan! queue-ch))
(async/pipeline-async parallelism result-ch af queue-ch)
(loop [errors []]
(let [next-res (async/<! result-ch)]
(cond
;; no more functions, complete - queue-ch closed as queue was exhausted
(nil? next-res)
(->> errors
(map #(let [ex (ex-data %)]
(when (= 500 (:status ex))
(log/error % "Unexpected validation error in transaction! Flakes:" all-flakes))
(assoc ex :message (ex-message %))))
(not-empty))
(util/exception? next-res)
(recur (conj errors next-res))
;; anything else, all good - keep going
:else (recur errors)))))))
;; dependencies
(defn tx-deps-check
"A transaction can optionally include a list of dependent transactions.
Returns true if dependency check is successful, throws exception if there
is an error.
Exceptions here should throw: catch by go-try."
[db {:keys [deps] :as tx-map}]
(go-try
(let [res (->> deps
(reduce-kv (fn [query-acc key dep]
(-> query-acc
(update :selectOne conj (str "?error" key))
(update :where conj [(str "?tx" key) "_tx/id" dep])
(update :optional conj [(str "?tx" key) "_tx/error" (str "?error" key)])))
{:selectOne [] :where [] :optional []})
(fdb/query-async (go-try db))
<?)]
(if (and (seq res) (every? nil? res))
true
(throw (ex-info (str "One or more of the dependencies for this transaction failed: " deps)
{:status 400 :error :db/invalid-dependency}))))))
;; Runtime
(defn run-queued-specs
"Runs validation functions in parallel according to parallelism. Will return
'true' if all functions pass (or if there were no functions to process)
validate-fn is an atom that contains:
- queue
- cache
- tx-spec
- c-spec"
[all-flakes {:keys [validate-fn] :as tx-state} parallelism]
(go-try
(let [{:keys [queue]} @validate-fn]
(when (not-empty queue) ;; if nothing in queue, return
(let [tx-state* (assoc tx-state :flakes all-flakes)
queue-ch (async/chan parallelism)
result-ch (async/chan parallelism)
af (fn [f res-chan]
(async/go
(let [fn-result (try (f tx-state*)
(catch Exception e e))]
(async/>! res-chan
(if (channel? fn-result)
(async/<! fn-result)
fn-result))
(async/close! res-chan))))]
;; kicks off process to push queue onto queue-ch
(async/onto-chan! queue-ch queue)
;; start executing functions, pushing results to result-ch. result-ch will close once queue-ch closes
(async/pipeline-async parallelism result-ch af queue-ch)
;; read results, for now we collection all errors
(loop [errors []]
(let [next-res (async/<! result-ch)]
(cond
;; no more functions, complete - queue-ch closed as queue was exhausted
(nil? next-res)
(->> errors
(map #(let [ex (ex-data %)]
(when (= 500 (:status ex))
(log/error % "Unexpected validation error in transaction! Flakes:" all-flakes))
(assoc ex :message (ex-message %))))
(not-empty))
(util/exception? next-res)
(recur (conj errors next-res))
;; anything else, all good - keep going
:else (recur errors)))))))))
| null | https://raw.githubusercontent.com/fluree/ledger/796ea08d2a78f7ebca35301fcb91c4159f68c5c2/src/fluree/db/ledger/transact/validation.clj | clojure | functions to validate transactions
if flakes exist, we already queued a fn for this subject, don't want multiple
TODO - calls to this now block to solve resource contention issues - can remove the promise as no longer do in background
combine multiple functions with an (and ...) wrapper
something put a promise in here while we were checking, just return
return whatever promise was in the cache - either one we just created or existing one if a race condition existed
unique: true special handling
unique values, in specific situations, require validation after the transaction is finalized to make sure
(a) if there is an existing unique match (pred+object), it can be OK so long as the existing match
is deleted somewhere within the transaction
(b) any tempids used for unique:true might have resolved to existing subjects (via ':upsert true') which
needs to happen after any such resolution happens
should be a single match, whose .-s is the final _id of the transacted flake
else something strange happened
predicate specs
some error in processing happened, don't allow transaction but communicate internal error
non truthy value, spec failed - do not allow transaction
return function
collection specs
has-adds? is nil when not found
use collection name as default specDoc
schema changes and user-specified _tx require internal custom validations
Permissions
everything allowed, just return
go through each statement and check
skip all system predicates
no more functions, complete - queue-ch closed as queue was exhausted
anything else, all good - keep going
dependencies
Runtime
if nothing in queue, return
kicks off process to push queue onto queue-ch
start executing functions, pushing results to result-ch. result-ch will close once queue-ch closes
read results, for now we collection all errors
no more functions, complete - queue-ch closed as queue was exhausted
anything else, all good - keep going | (ns fluree.db.ledger.transact.validation
(:require [fluree.db.util.async :refer [<? <?? go-try channel?] :as async-util]
[fluree.db.dbfunctions.core :as dbfunctions]
[fluree.db.query.range :as query-range]
[fluree.db.constants :as const]
[clojure.core.async :as async]
[fluree.db.util.core :as util]
[fluree.db.dbproto :as dbproto]
[fluree.db.ledger.transact.schema :as tx-schema]
[fluree.db.ledger.transact.tx-meta :as tx-meta]
[fluree.db.util.log :as log]
[fluree.db.permissions-validate :as perm-validate]
[fluree.db.flake :as flake]
[fluree.db.api :as fdb]
[fluree.db.ledger.transact.tempid :as tempid])
(:import (fluree.db.flake Flake)))
(set! *warn-on-reflection* true)
(defn queue-validation-fn
"Queues a validation function to be run once we have a db-after completed.
There are a few basic types of validations functions, some of which must
also queue flakes into tx-state that will be inputs to the function
- unique - no flake queuing necessary - just run at end of transaction
- predicate - no flake queueing necessary as run once per flake
- predicate-tx - must queue all the predicate flakes of this type which may be
across multiple transaction items. flake-or-flakes will be a single flake.
As this is executed once per predicate, the function might already be
queued, and if so 'f' will be nil
- collection - must queue all the collection flakes per sid, as they may be across
multiple transactions (but usually will not be). flake-or-flakes will be a sequence"
[fn-type {:keys [validate-fn]} f flakes pid-or-sid]
(case fn-type
:unique (swap! validate-fn update :queue conj f)
:predicate (swap! validate-fn update :queue conj f)
:predicate-tx (swap! validate-fn (fn [validate-data]
(cond-> validate-data
f (update :queue conj f)
true (update-in [:tx-spec pid-or-sid] into flakes))))
:collection (swap! validate-fn (fn [validate-data]
(let [existing-flakes (get-in validate-data [:c-spec pid-or-sid])]
(cond-> validate-data
true (assoc-in [:c-spec pid-or-sid] (into existing-flakes flakes))
(empty? existing-flakes) (update :queue conj f)))))))
TODO - this can be done per schema change , not per transaction , to make even more efficient .
(defn build-function
"Builds a function based on a function subject-id (or a list of function subject ids) and
delivers the executable function to the provided promise.
Promise is used here to cache this work so it is only done once per transaction.
fn-type should be:
- predSpec - for predicate Spec
- "
[fn-subjects db promise fn-type]
(async/go
(try
(map #(query-range/index-range db :spot = [% const/$_fn:code]))
async/merge
(async/into [])
(<?)
(map #(if (util/exception? %) (throw %) (.-o ^Flake (first %))))
dbfunctions/combine-fns)
fn-parsed (<? (dbfunctions/parse-and-wrap-fn db fn-str fn-type))]
(deliver promise fn-parsed))
(catch Exception e (deliver promise e)))))
(defn resolve-function
"Returns promise for function that will get used. Tries to get one from cache
and if not already processed then will create a new promise channel.
Resistant to race conditions which are unlikely, but possible."
[fn-subjects db validate-fn-atom fn-type]
(or (get-in @validate-fn-atom [:cache fn-subjects])
(let [p (promise)]
(swap! validate-fn-atom update :cache
(fn [fn-cache]
(if (get fn-cache fn-subjects)
(do (<?? (build-function fn-subjects db p fn-type))
(assoc fn-cache fn-subjects p)))))
(get-in @validate-fn-atom [:cache fn-subjects]))))
(defn- async-response-wrapper
"Wraps a core.async response coming off a port with a formatted response."
[port response-fn]
(async/pipe port (async/chan 1 (map response-fn))))
(defn update-tx-spent-fuel
"Executing functions should consume fuel. Adds fuel to the master fuel atom."
[fuel spent]
(swap! fuel (fn [fuel] (-> fuel
(update :credits - spent)
(update :spent + spent))))
(when (neg? (get @fuel :credits))
(throw (ex-info "Transaction unable to complete, all allocated fuel has been exhausted."
{:status 400
:error :db/insufficient-fuel}))))
(defn queue-check-unique-match-retracted
"if there is an existing unique match (pred+object), it can be OK so long as the existing match
is deleted in the transaction, which we can validate once the transaction is complete by looking
for the specific retraction flake we expect to see - else throw."
[existing-flake _id pred-info object tx-state]
(let [f (fn [{:keys [flakes t]}]
(let [check-flake (flake/flip-flake existing-flake t)
retracted? (contains? flakes check-flake)]
(if retracted?
true
(throw (ex-info (str "Unique predicate " (pred-info :name) " with value: "
object " matched an existing subject.")
{:status 400
:error :db/invalid-tx
:cause [_id (pred-info :name) object]
:conflict [(.-s ^Flake existing-flake)]})))))]
(queue-validation-fn :unique tx-state f nil nil)))
(defn queue-check-unique-tempid-still-unique
"if there is an existing unique match (pred+object), it can be OK so long as the existing match
is deleted in the transaction, which we can validate once the transaction is complete by looking
for the specific retraction flake we expect to see - else throw.
- tempid - the tempid object value that is supposed to be unique
- _id - the _id of the transaction item that caused this validation to be run, might be a tempid
- pred-info - pred-info function allows getting different information about this predicate
- tx-state - transaction state, which is also passed in as the only argument in the final validation fn"
[tempid _id pred-info tx-state]
(let [f (fn [{:keys [tempids db-after]}]
(go-try
(let [tempid-sid (get @tempids tempid)
_id* (if (tempid/TempId? _id)
(get @tempids _id)
_id)
matches (<? (query-range/index-range @db-after :post = [(pred-info :id) tempid-sid]))
matches-n (count matches)]
(if (not= 1 matches-n)
(throw (ex-info (str "Unique predicate " (pred-info :name) " with a tempid value: "
(:user-string tempid) " resolved to subject: " tempid-sid
", which is not unique.")
{:status 400 :error :db/invalid-tx :tempid tempid}))
one match as expected ... extra check here as match .-s should always equal the _ i d * ,
(or (= _id* (.-s ^Flake (first matches)))
(throw (ex-info (str "Unique predicate resolved to mis-matched subject.")
{:status 500
:error :db/unexpected-error
:cause [(:user-string tempid) (pred-info :name)]
:conflict (vec (first matches))})))))))]
(queue-validation-fn :unique tx-state f nil nil)))
(defn- pred-spec-response
"Returns a true for a valid spec response, or an exception (but does not throw) for an invalid one.
If response is an exception, wraps exception message."
[specDoc predicate-name ^Flake flake response]
(cond
(util/exception? response)
(ex-info (str "Internal execution error for predicate spec: " (ex-message response) ". "
"Predicate spec failed for predicate: " predicate-name "." (when specDoc (str " " specDoc)))
{:status 400
:error :db/predicate-spec
:cause (vec flake)
:specDoc specDoc
:ex-message (ex-message response)
:ex-data (ex-data response)})
any value , spec succeeded - allow transaction
response
true
:else
(ex-info (str "Predicate spec failed for predicate: " predicate-name "." (when specDoc (str " " specDoc)))
{:status 400
:error :db/predicate-spec
:cause (vec flake)})))
(defn run-predicate-spec
[fn-promise ^Flake flake predicate-name specDoc {:keys [fuel t auth db-after]}]
(let [sid (.-s flake)
pid (.-p flake)
o (.-o flake)]
(try
(let [fuel-atom (atom {:stack []
:credits (:credits @fuel)
:spent 0})
f @fn-promise
_ (log/debug "predicate spec fn:" f)
res (f {:db @db-after
:sid sid
:pid pid
:o o
:flakes [flake]
:auth_id auth
:state fuel-atom
:t t})]
update main tx fuel count with the fuel spent to execute this tx function
(update-tx-spent-fuel fuel (:spent @fuel-atom))
(if (async-util/channel? res)
(async-response-wrapper res (partial pred-spec-response specDoc predicate-name flake))
(pred-spec-response specDoc predicate-name flake res)))
(catch Exception e (pred-spec-response specDoc predicate-name flake e)))))
(defn queue-pred-spec
"Flakes param flows through, queues spec for flake"
[flakes flake pred-info {:keys [validate-fn db-root] :as tx-state}]
(let [fn-sids (pred-info :spec)
specDoc (pred-info :specDoc)
predicate-name (pred-info :name)
fn-promise (resolve-function fn-sids db-root validate-fn "predSpec")
pred-spec-fn (-> run-predicate-spec
(partial fn-promise flake predicate-name specDoc)
(with-meta {:type :predicate-spec
:predicate predicate-name
:target flake
:fn-sid fn-sids
:doc specDoc}))]
(queue-validation-fn :predicate tx-state pred-spec-fn nil nil)
flakes))
predicate tx - spec
(defn- pred-tx-spec-response
[pred-name flakes tx-spec-doc response]
(cond
(util/exception? response)
(ex-info (str "Internal execution error for predicate txSpec: " (.getMessage ^Exception response) ". "
"Predicate txSpec failed for: " pred-name "." (when tx-spec-doc (str " " tx-spec-doc)))
{:status 400
:error :db/predicate-tx-spec
:cause flakes
:ex-message (ex-message response)
:ex-data (ex-data response)})
response
true
:else
(ex-info (str "Predicate txSpec failed for: " pred-name "." (when tx-spec-doc (str " " tx-spec-doc)))
{:status 400
:error :db/predicate-tx-spec
:cause flakes})))
(defn run-predicate-tx-spec
"This function is designed to be called with a (partial pid pred-name txSpecDoc) and
returns a function whose only argument is tx-state, which can be used to get the final
list of predicate flakes affected by this predicate."
[pid pred-tx-fn pred-name tx-spec-doc {:keys [db-root auth instant fuel validate-fn t]}]
(try
(let [pid-flakes (get-in @validate-fn [:tx-spec pid])
fuel-atom (atom {:stack []
:credits (:credits @fuel)
:spent 0})
f @pred-tx-fn
res (f {:db db-root
:pid pid
:instant instant
:flakes pid-flakes
:auth_id auth
:state fuel-atom
:t t})]
update main tx fuel count with the fuel spent to execute this tx function
(update-tx-spent-fuel fuel (:spent @fuel-atom))
(if (async-util/channel? res)
(async-response-wrapper res (partial pred-tx-spec-response pred-name pid-flakes tx-spec-doc))
(pred-tx-spec-response pred-name pid-flakes tx-spec-doc res)))
(catch Exception e (pred-tx-spec-response pred-name (get-in @validate-fn [:tx-spec pid]) tx-spec-doc e))))
(defn- build-predicate-tx-spec-fn
"When a predicate-tx-spec function hasn't already been queued for a particular predicate,
do so and place the function into the validating function queue for processing."
[pred-info db]
(let [pred-tx-fn (promise)
pred-name (pred-info :name)
tx-spec-doc (pred-info :txSpecDoc)
pid (pred-info :id)
fn-sids (pred-info :txSpec)
queue-fn (-> run-predicate-tx-spec
(partial pid pred-tx-fn pred-name tx-spec-doc)
(with-meta {:type :predicate-tx-spec
:target pred-name
:fn-sids fn-sids}))]
kick off building function , will put realized function into pred - tx - fn promise
(<?? (build-function fn-sids db pred-tx-fn "predSpec"))
queue-fn))
(defn queue-predicate-tx-spec
"Passes 'flakes' through function untouched, but queues predicate spec for
execution once db-after is resolved.
Predicates that have a txSpec defined need to run once for all flakes with the
same predicate as inputs.
Queuing a flake here adds it to a map by predicate. We also kick off resolving
the txSpec function in the background if not already done, so it can be ready
when the transaction is completed to run the validation.
For each predicate that requires a txSpec function to be run, we store
a two-tuple of the function (as a promise) and a list of flakes for that predicate
that must be validated."
[flakes predicate-flakes pred-info {:keys [validate-fn db-root] :as tx-state}]
(let [pid (pred-info :id)
tx-spec-fn (when (empty? (get-in @validate-fn [:tx-spec pid]))
first time called ( no existing flakes for this tx - spec ) , generate and queue fn also
(build-predicate-tx-spec-fn pred-info db-root))]
(queue-validation-fn :predicate-tx tx-state tx-spec-fn predicate-flakes pid)
flakes))
(defn- collection-spec-response
[flakes collection c-spec-doc response]
(cond
(util/exception? response)
(ex-info (str "Internal execution error for collection spec: " (.getMessage ^Exception response) ". "
"Collection spec failed for: " collection "." (when c-spec-doc (str " " c-spec-doc)))
{:status 400
:error :db/collection-spec
:flakes flakes
:cause response})
response
true
:else
(ex-info (str "Collection spec failed for: " collection "." (when c-spec-doc (str " " c-spec-doc)))
{:status 400
:error :db/collection-spec
:flakes flakes})))
(defn run-collection-spec
"Runs a collection spec. Will only execute collection spec if there are still flakes for
the subject that exist."
[collection sid c-spec-fn c-spec-doc {:keys [db-after instant validate-fn auth t fuel]}]
(async/go
(try
(let [subject-flakes (get-in @validate-fn [:c-spec sid])
stop at first ` true ` .-op
(empty? (<? (query-range/index-range @db-after :spot = [sid]))))]
(if deleted?
true
(let [fuel-atom (atom {:stack []
:credits (:credits @fuel)
:spent 0})
f @c-spec-fn
res (f {:db @db-after
:instant instant
:sid sid
:flakes subject-flakes
:auth_id auth
:t t
:state fuel-atom})
res* (if (channel? res) (async/<! res) res)]
update main tx fuel count with the fuel spent to execute this tx function
(update-tx-spent-fuel fuel (:spent @fuel-atom))
(collection-spec-response subject-flakes collection c-spec-doc res*))))
(catch Exception e (collection-spec-response (get-in @validate-fn [:c-spec sid]) collection c-spec-doc e)))))
(defn queue-collection-spec
[collection c-spec-fn-ids {:keys [validate-fn db-root] :as tx-state} subject-flakes]
(let [sid (.-s ^Flake (first subject-flakes))
c-spec-fn (resolve-function c-spec-fn-ids db-root validate-fn "collectionSpec")
execute-fn (-> run-collection-spec
(partial collection sid c-spec-fn c-spec-doc)
(with-meta {:type :collection-spec
:target sid
:fn-sid c-spec-fn-ids
:doc c-spec-doc}))]
(queue-validation-fn :collection tx-state execute-fn subject-flakes sid)))
(defn queue-predicate-collection-spec
[tx-state subject-flakes]
(let [sid (.-s ^Flake (first subject-flakes))
execute-fn (-> tx-schema/validate-schema-predicate
(partial sid)
(with-meta {:type :collection-spec
:target sid
:fn :validate-schema-predicate}))]
(queue-validation-fn :collection tx-state execute-fn subject-flakes sid)))
(defn queue-tx-meta-collection-spec
[tx-state subject-flakes]
(let [sid (.-s ^Flake (first subject-flakes))
execute-fn (-> tx-meta/valid-tx-meta?
(partial sid)
(with-meta {:type :collection-spec
:target sid
:fn :validate-tx-meta}))]
(queue-validation-fn :collection tx-state execute-fn subject-flakes sid)))
(defn check-collection-specs
"If a collection spec is needed, register it for processing the subject's flakes."
[collection {:keys [db-root] :as tx-state} subject-flakes]
(let [c-spec-fn-ids (dbproto/-c-prop db-root :spec collection)]
(when c-spec-fn-ids
(queue-collection-spec collection c-spec-fn-ids tx-state subject-flakes))
(cond
(= "_predicate" collection)
(queue-predicate-collection-spec tx-state subject-flakes)
(= "_tx" collection)
(queue-tx-meta-collection-spec tx-state subject-flakes)
(= "_collection" collection)
(tx-schema/validate-collection-name subject-flakes)
:else nil)
subject-flakes))
(defn permissions
"Validates transaction based on the state of the new database.
Exceptions here should throw: caught by go-try."
[db-before candidate-db flakes]
(go-try
(let [tx-permissions (:permissions db-before)
no-filter? (true? (:root? tx-permissions))]
(if no-filter?
true
(loop [[^Flake flake & r] flakes]
(when (> (.-s flake) const/$maxSystemPredicates)
(when-not (if (.-op flake)
(<? (perm-validate/allow-flake? candidate-db flake tx-permissions))
(<? (perm-validate/allow-flake? db-before flake tx-permissions)))
(throw (ex-info (format "Insufficient permissions for predicate: %s within collection: %s."
(dbproto/-p-prop db-before :name (.-p flake))
(dbproto/-c-prop db-before :name (flake/sid->cid (.-s flake))))
{:status 400
:error :db/tx-permission}))))
(if r
(recur r)
true))))))
(defn run-permissions-checks
[all-flakes {:keys [db-before db-after]} parallelism]
(go-try
(let [db-after @db-after
queue-ch (async/chan parallelism)
result-ch (async/chan parallelism)
tx-permissions (:permissions db-before)
af (fn [^Flake flake res-chan]
(async/go
(try
(let [fn-res (if (.-op flake)
(async/<! (perm-validate/allow-flake? db-after flake tx-permissions))
(async/<! (perm-validate/allow-flake? db-before flake tx-permissions)))
any value means valid
(ex-info (format "Insufficient permissions for predicate: %s within collection: %s."
(dbproto/-p-prop db-before :name (.-p flake))
(dbproto/-c-prop db-before :name (flake/sid->cid (.-s flake))))
{:status 400
:error :db/write-permission
:cause (vec flake)}))]
(async/put! res-chan res)
(async/close! res-chan))
(catch Exception e (async/put! res-chan e)
(async/close! res-chan)))))]
(->> all-flakes
(async/onto-chan! queue-ch))
(async/pipeline-async parallelism result-ch af queue-ch)
(loop [errors []]
(let [next-res (async/<! result-ch)]
(cond
(nil? next-res)
(->> errors
(map #(let [ex (ex-data %)]
(when (= 500 (:status ex))
(log/error % "Unexpected validation error in transaction! Flakes:" all-flakes))
(assoc ex :message (ex-message %))))
(not-empty))
(util/exception? next-res)
(recur (conj errors next-res))
:else (recur errors)))))))
(defn tx-deps-check
"A transaction can optionally include a list of dependent transactions.
Returns true if dependency check is successful, throws exception if there
is an error.
Exceptions here should throw: catch by go-try."
[db {:keys [deps] :as tx-map}]
(go-try
(let [res (->> deps
(reduce-kv (fn [query-acc key dep]
(-> query-acc
(update :selectOne conj (str "?error" key))
(update :where conj [(str "?tx" key) "_tx/id" dep])
(update :optional conj [(str "?tx" key) "_tx/error" (str "?error" key)])))
{:selectOne [] :where [] :optional []})
(fdb/query-async (go-try db))
<?)]
(if (and (seq res) (every? nil? res))
true
(throw (ex-info (str "One or more of the dependencies for this transaction failed: " deps)
{:status 400 :error :db/invalid-dependency}))))))
(defn run-queued-specs
"Runs validation functions in parallel according to parallelism. Will return
'true' if all functions pass (or if there were no functions to process)
validate-fn is an atom that contains:
- queue
- cache
- tx-spec
- c-spec"
[all-flakes {:keys [validate-fn] :as tx-state} parallelism]
(go-try
(let [{:keys [queue]} @validate-fn]
(let [tx-state* (assoc tx-state :flakes all-flakes)
queue-ch (async/chan parallelism)
result-ch (async/chan parallelism)
af (fn [f res-chan]
(async/go
(let [fn-result (try (f tx-state*)
(catch Exception e e))]
(async/>! res-chan
(if (channel? fn-result)
(async/<! fn-result)
fn-result))
(async/close! res-chan))))]
(async/onto-chan! queue-ch queue)
(async/pipeline-async parallelism result-ch af queue-ch)
(loop [errors []]
(let [next-res (async/<! result-ch)]
(cond
(nil? next-res)
(->> errors
(map #(let [ex (ex-data %)]
(when (= 500 (:status ex))
(log/error % "Unexpected validation error in transaction! Flakes:" all-flakes))
(assoc ex :message (ex-message %))))
(not-empty))
(util/exception? next-res)
(recur (conj errors next-res))
:else (recur errors)))))))))
|
43243119ee87e5a9880d29abc3ff19fd88537291d7ff0a32cb81075af710a649 | tz-wrapped/tezos-btc | Parser.hs | SPDX - FileCopyrightText : 2019 Bitcoin Suisse
-
- SPDX - License - Identifier : LicenseRef - MIT - BitcoinSuisse
-
- SPDX-License-Identifier: LicenseRef-MIT-BitcoinSuisse
-}
# LANGUAGE ApplicativeDo #
module Client.Parser
( ClientArgs(..)
, ClientArgsRaw(..)
, DeployContractOptions (..)
, clientArgParser
, parseContractAddressFromOutput
) where
import Data.Char (isAlpha, isDigit)
import Fmt (pretty)
import Options.Applicative (help, long, metavar, option, short, str, switch)
import Options.Applicative qualified as Opt
import Text.Megaparsec qualified as P (Parsec, customFailure, many, parse, satisfy, skipManyTill)
import Text.Megaparsec.Char (newline, printChar, space)
import Text.Megaparsec.Char.Lexer (symbol)
import Text.Megaparsec.Error (ParseErrorBundle, ShowErrorComponent(..))
import Lorentz.Contracts.Multisig
import Morley.CLI (addressOrAliasOption, mutezOption, someAddressOrAliasOption)
import Morley.Client.Parser (clientConfigParser)
import Morley.Tezos.Address
import Morley.Tezos.Address.Alias (ContractAddressOrAlias, SomeAddressOrAlias)
import Morley.Tezos.Crypto (PublicKey, Signature, parsePublicKey, parseSignature)
import Morley.Util.CLI
import Morley.Util.Named
import CLI.Parser
import Client.Types
import Lorentz.Contracts.TZBTC.Common.Types
clientArgParser :: Opt.Parser ClientArgs
clientArgParser =
ClientArgs
<$> clientConfigParser
<*> clientArgRawParser
<*> (#userOverride <:!> userOption)
<*> (#multisigOverride <:!> multisigOption)
<*> (#contractOverride <:!> contractOverride)
<*> (#fee <:!> explictFee)
<*> dryRunSwitch
where
multisigOption = optional $ addressOrAliasOption Nothing (#name :! "multisig-addr")
(#help :! "The multisig contract address/alias to use")
contractOverride = optional $ addressOrAliasOption Nothing (#name :! "contract-addr")
(#help :! "The tzbtc contract address/alias to use")
userOption = optional $ addressOrAliasOption Nothing (#name :! "user")
(#help :! "User to send operations as")
explictFee =
optional $ mutezOption
Nothing
(#name :! "fee")
(#help :! "Fee that is going to be used for the transaction. \
\By default fee will be computed automatically."
)
dryRunSwitch =
switch (long "dry-run" <>
help "Dry run command to ensure correctness of the arguments")
clientArgRawParser :: Opt.Parser ClientArgsRaw
clientArgRawParser = Opt.hsubparser $
mintCmd <> burnCmd <> transferCmd <> approveCmd
<> getAllowanceCmd <> getBalanceCmd <> addOperatorCmd
<> removeOperatorCmd <> pauseCmd <> unpauseCmd
<> setRedeemAddressCmd <> transferOwnershipCmd <> acceptOwnershipCmd
<> getTotalSupplyCmd <> getTotalMintedCmd <> getTotalBurnedCmd
<> getOwnerCmd <> getTokenMetadataCmd <> getRedeemAddressCmd
<> getOperatorsCmd <> getOpDescriptionCmd
<> getBytesToSignCmd <> getTotalBurnedCmd
<> addSignatureCmd <> signPackageCmd <> callMultisigCmd
<> deployCmd
<> deployMultisigCmd
<> showConfigCmd
where
multisigOption :: Opt.Parser (Maybe FilePath)
multisigOption =
Opt.optional $ Opt.strOption $ mconcat
[ long "multisig-package"
, short 'm'
, metavar "FILEPATH"
, help "Create package for multisig transaction and write it to the given file"
]
mintCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
mintCmd =
(mkCommandParser
"mint"
(CmdMint <$> addrOrAliasOption "to" "Address to mint to" <*>
natOption "value" "Amount to mint" <*> multisigOption)
"Mint tokens for an account")
showConfigCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
showConfigCmd =
(mkCommandParser
"config"
(pure CmdShowConfig)
"Show active configuration")
burnCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
burnCmd =
(mkCommandParser
"burn"
(CmdBurn <$> burnParamsParser <*> multisigOption)
"Burn tokens from an account")
transferCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
transferCmd =
(mkCommandParser
"transfer"
(CmdTransfer <$>
addrOrAliasOption "from" "Address to transfer from" <*>
addrOrAliasOption "to" "Address to transfer to" <*>
natOption "value" "Amount to transfer"
)
"Transfer tokens from one account to another")
approveCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
approveCmd =
(mkCommandParser
"approve"
(CmdApprove <$>
addrOrAliasOption "spender" "Address of the spender" <*>
natOption "value" "Amount to approve"
)
"Approve transfer of tokens from one account to another")
getAllowanceCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getAllowanceCmd =
(mkCommandParser
"getAllowance"
(CmdGetAllowance <$>
((,) <$> addrOrAliasOption "owner" "Address of the owner" <*>
addrOrAliasOption "spender" "Address of the spender") <*>
callbackParser
)
"Get allowance for an account")
getBalanceCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getBalanceCmd =
(mkCommandParser
"getBalance"
(CmdGetBalance <$>
addrOrAliasOption "address" "Address of the owner" <*>
callbackParser
)
"Get balance for an account")
addOperatorCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
addOperatorCmd =
(mkCommandParser
"addOperator"
(CmdAddOperator <$>
addrOrAliasOption "operator" "Address of the operator" <*>
multisigOption
)
"Add an operator")
removeOperatorCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
removeOperatorCmd =
(mkCommandParser
"removeOperator"
(CmdRemoveOperator <$>
addrOrAliasOption "operator" "Address of the operator" <*>
multisigOption
)
"Remove an operator")
pauseCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
pauseCmd =
(mkCommandParser
"pause"
(CmdPause <$> multisigOption)
"Pause the contract")
unpauseCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
unpauseCmd =
(mkCommandParser
"unpause"
(CmdUnpause <$> multisigOption)
"Unpause the contract")
setRedeemAddressCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
setRedeemAddressCmd =
(mkCommandParser
"setRedeemAddress"
(CmdSetRedeemAddress <$>
addrOrAliasArg "Redeem address" <*>
multisigOption
)
"Set redeem address")
transferOwnershipCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
transferOwnershipCmd =
(mkCommandParser
"transferOwnership"
(CmdTransferOwnership <$>
addrOrAliasArg "new-owner" <*>
multisigOption
)
"Transfer ownership")
acceptOwnershipCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
acceptOwnershipCmd =
(mkCommandParser
"acceptOwnership"
(CmdAcceptOwnership () <$> multisigOption)
"Accept ownership")
getTotalSupplyCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getTotalSupplyCmd =
(mkCommandParser
"getTotalSupply"
(CmdGetTotalSupply <$> callbackParser)
"Get total supply")
getTotalMintedCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getTotalMintedCmd =
(mkCommandParser
"getTotalMinted"
(CmdGetTotalMinted <$> callbackParser)
"Get amount of minted tokens")
getTotalBurnedCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getTotalBurnedCmd =
(mkCommandParser "getTotalBurned"
(CmdGetTotalBurned <$> callbackParser)
"Get amount of burned tokens")
getOwnerCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getOwnerCmd =
(mkCommandParser "getOwner"
(CmdGetOwner <$> callbackParser)
"Get current contract owner")
getTokenMetadataCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getTokenMetadataCmd =
(mkCommandParser "getTokenMetadata"
(CmdGetTokenMetadata <$> callbackParser)
"Get the token metadata")
getRedeemAddressCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getRedeemAddressCmd =
(mkCommandParser "getRedeemAddress"
(CmdGetRedeemAddress <$> callbackParser)
"Get the redeem address code")
getOperatorsCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getOperatorsCmd =
(mkCommandParser "getOperators" (pure CmdGetOperators) "Get list of contract operators")
getOpDescriptionCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getOpDescriptionCmd =
mkCommandParser
"getOpDescription"
(CmdGetOpDescription <$> namedFilePathOption "package" "Package filepath")
"Get operation description from given multisig package"
getBytesToSignCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getBytesToSignCmd =
mkCommandParser
"getBytesToSign"
(CmdGetBytesToSign <$> namedFilePathOption "package" "Package filepath")
"Get bytes that need to be signed from given multisig package"
addSignatureCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
addSignatureCmd =
mkCommandParser
"addSignature"
(CmdAddSignature <$> publicKeyOption <*> signatureOption <*>
namedFilePathOption "package" "Package filepath"
)
"Add signature assosiated with the given public key to the given package"
signPackageCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
signPackageCmd =
mkCommandParser
"signPackage"
(CmdSignPackage <$> namedFilePathOption "package" "Package filepath"
)
"Sign given multisig package using secret key from `tezos-client` \
\assotiated with the user alias from ClientConfig and add signature \
\to the package."
callMultisigCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
callMultisigCmd =
mkCommandParser
"callMultisig"
(CmdCallMultisig <$>
nonEmptyParser (namedFilePathOption "package" "Package filepath")
)
"Call multisig contract with the given packages"
deployCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
deployCmd =
mkCommandParser
"deployTzbtcContract"
(CmdDeployContract
<$> (#owner <:!> mbAddrOrAliasOption "owner" "Address of the owner")
<*> deployContractOptions)
"Deploy TZBTC contract to the chain"
where
deployContractOptions :: Opt.Parser DeployContractOptions
deployContractOptions = asum
[ Opt.hsubparser $ mconcat
[ mkCommandParser "v1"
(DeployContractV1 <$> deployContractOptionsV1)
"Deploy V1 version of the contract."
, mkCommandParser "v2"
(DeployContractV2 <$> deployContractOptionsV2)
"Deploy V2 version of the contract."
]
, DeployContractV1 <$> deployContractOptionsV1
]
deployContractOptionsV1 :: Opt.Parser DeployContractOptionsV1
deployContractOptionsV1 = do
dcoRedeem <- addrOrAliasOption "redeem" "Redeem address"
dcoTokenMetadata <- parseSingleTokenMetadata
pure DeployContractOptionsV1 {..}
deployContractOptionsV2 :: Opt.Parser DeployContractOptionsV2
deployContractOptionsV2 =
DeployContractOptionsV2 <$> deployContractOptionsV1
deployMultisigCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
deployMultisigCmd =
mkCommandParser
"deployMultisigContract"
(CmdDeployMultisigContract <$>
(Threshold <$> natOption "threshold" "Specialized multisig threshold") <*>
(Keys <$> many publicKeyOption) <*>
customErrorsFlag
)
"Deploy specialized multisig contract to the chain"
where
customErrorsFlag = switch
(long "use-custom-errors" <>
help "By default specialized multisig contract fails with 'unit' in all error cases.\n\
\This flag will deploy the custom version of specialized multisig\n\
\contract with human-readable string errors.")
callbackParser :: Opt.Parser (Maybe ContractAddressOrAlias)
callbackParser = optional $
addressOrAliasOption Nothing (#name :! "callback") (#help :! "Callback address")
addrOrAliasOption :: String -> String -> Opt.Parser SomeAddressOrAlias
addrOrAliasOption name hInfo =
someAddressOrAliasOption Nothing (#name :! name) (#help :! hInfo)
mbAddrOrAliasOption :: String -> String -> Opt.Parser (Maybe SomeAddressOrAlias)
mbAddrOrAliasOption = optional ... addrOrAliasOption
addrOrAliasArg :: String -> Opt.Parser SomeAddressOrAlias
addrOrAliasArg hInfo = mkCLArgumentParser Nothing (#help :! hInfo)
natOption :: String -> String -> Opt.Parser Natural
natOption name hInfo = mkCLOptionParser Nothing (#name :! name) (#help :! hInfo)
burnParamsParser :: Opt.Parser BurnParams
burnParamsParser = namedParser Nothing "Amount to burn"
signatureOption :: Opt.Parser Signature
signatureOption = option (eitherReader parseSignatureDo) $ mconcat
[ long "signature", metavar "SIGNATURE"]
parseSignatureDo :: String -> Either String Signature
parseSignatureDo sig =
either (Left . mappend "Failed to parse signature: " . pretty) Right $
parseSignature $ toText sig
publicKeyOption :: Opt.Parser PublicKey
publicKeyOption = option (eitherReader parsePublicKeyDo) $ mconcat
[ long "public-key", metavar "PUBLIC KEY", help "Address public key"]
parsePublicKeyDo :: String -> Either String PublicKey
parsePublicKeyDo pk =
either (Left . mappend "Failed to parse signature: " . pretty) Right $
parsePublicKey $ toText pk
namedFilePathOption :: String -> String -> Opt.Parser FilePath
namedFilePathOption name hInfo = option str $
mconcat [long name, metavar "FILEPATH", help hInfo]
nonEmptyParser :: Opt.Parser a -> Opt.Parser (NonEmpty a)
nonEmptyParser p = (:|) <$> p <*> many p
-- | Tezos-client output parsers
data OutputParseError = OutputParseError Text Text
deriving stock (Eq, Show, Ord)
instance ShowErrorComponent OutputParseError where
showErrorComponent (OutputParseError name err) = toString $
"Failed to parse " <> name <> ": " <> err
type Parser = P.Parsec OutputParseError Text
isBase58Char :: Char -> Bool
isBase58Char c =
(isDigit c && c /= '0') || (isAlpha c && c /= 'O' && c /= 'I' && c /= 'l')
tzbtcClientAddressParser :: Parser ContractAddress
tzbtcClientAddressParser = do
P.skipManyTill (printChar <|> newline) $ do
void $ symbol space "Contract address:"
rawAddr <- P.many (P.satisfy isBase58Char)
case parseKindedAddress (fromString rawAddr) of
Left err -> P.customFailure $ OutputParseError "address" $ pretty err
Right addr -> return addr
parseContractAddressFromOutput
:: Text -> Either (ParseErrorBundle Text OutputParseError) ContractAddress
parseContractAddressFromOutput output = P.parse tzbtcClientAddressParser "" output
| null | https://raw.githubusercontent.com/tz-wrapped/tezos-btc/c26e3cf4e00bd53121c15929407a859b57c74527/src/Client/Parser.hs | haskell | | Tezos-client output parsers | SPDX - FileCopyrightText : 2019 Bitcoin Suisse
-
- SPDX - License - Identifier : LicenseRef - MIT - BitcoinSuisse
-
- SPDX-License-Identifier: LicenseRef-MIT-BitcoinSuisse
-}
# LANGUAGE ApplicativeDo #
module Client.Parser
( ClientArgs(..)
, ClientArgsRaw(..)
, DeployContractOptions (..)
, clientArgParser
, parseContractAddressFromOutput
) where
import Data.Char (isAlpha, isDigit)
import Fmt (pretty)
import Options.Applicative (help, long, metavar, option, short, str, switch)
import Options.Applicative qualified as Opt
import Text.Megaparsec qualified as P (Parsec, customFailure, many, parse, satisfy, skipManyTill)
import Text.Megaparsec.Char (newline, printChar, space)
import Text.Megaparsec.Char.Lexer (symbol)
import Text.Megaparsec.Error (ParseErrorBundle, ShowErrorComponent(..))
import Lorentz.Contracts.Multisig
import Morley.CLI (addressOrAliasOption, mutezOption, someAddressOrAliasOption)
import Morley.Client.Parser (clientConfigParser)
import Morley.Tezos.Address
import Morley.Tezos.Address.Alias (ContractAddressOrAlias, SomeAddressOrAlias)
import Morley.Tezos.Crypto (PublicKey, Signature, parsePublicKey, parseSignature)
import Morley.Util.CLI
import Morley.Util.Named
import CLI.Parser
import Client.Types
import Lorentz.Contracts.TZBTC.Common.Types
clientArgParser :: Opt.Parser ClientArgs
clientArgParser =
ClientArgs
<$> clientConfigParser
<*> clientArgRawParser
<*> (#userOverride <:!> userOption)
<*> (#multisigOverride <:!> multisigOption)
<*> (#contractOverride <:!> contractOverride)
<*> (#fee <:!> explictFee)
<*> dryRunSwitch
where
multisigOption = optional $ addressOrAliasOption Nothing (#name :! "multisig-addr")
(#help :! "The multisig contract address/alias to use")
contractOverride = optional $ addressOrAliasOption Nothing (#name :! "contract-addr")
(#help :! "The tzbtc contract address/alias to use")
userOption = optional $ addressOrAliasOption Nothing (#name :! "user")
(#help :! "User to send operations as")
explictFee =
optional $ mutezOption
Nothing
(#name :! "fee")
(#help :! "Fee that is going to be used for the transaction. \
\By default fee will be computed automatically."
)
dryRunSwitch =
switch (long "dry-run" <>
help "Dry run command to ensure correctness of the arguments")
clientArgRawParser :: Opt.Parser ClientArgsRaw
clientArgRawParser = Opt.hsubparser $
mintCmd <> burnCmd <> transferCmd <> approveCmd
<> getAllowanceCmd <> getBalanceCmd <> addOperatorCmd
<> removeOperatorCmd <> pauseCmd <> unpauseCmd
<> setRedeemAddressCmd <> transferOwnershipCmd <> acceptOwnershipCmd
<> getTotalSupplyCmd <> getTotalMintedCmd <> getTotalBurnedCmd
<> getOwnerCmd <> getTokenMetadataCmd <> getRedeemAddressCmd
<> getOperatorsCmd <> getOpDescriptionCmd
<> getBytesToSignCmd <> getTotalBurnedCmd
<> addSignatureCmd <> signPackageCmd <> callMultisigCmd
<> deployCmd
<> deployMultisigCmd
<> showConfigCmd
where
multisigOption :: Opt.Parser (Maybe FilePath)
multisigOption =
Opt.optional $ Opt.strOption $ mconcat
[ long "multisig-package"
, short 'm'
, metavar "FILEPATH"
, help "Create package for multisig transaction and write it to the given file"
]
mintCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
mintCmd =
(mkCommandParser
"mint"
(CmdMint <$> addrOrAliasOption "to" "Address to mint to" <*>
natOption "value" "Amount to mint" <*> multisigOption)
"Mint tokens for an account")
showConfigCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
showConfigCmd =
(mkCommandParser
"config"
(pure CmdShowConfig)
"Show active configuration")
burnCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
burnCmd =
(mkCommandParser
"burn"
(CmdBurn <$> burnParamsParser <*> multisigOption)
"Burn tokens from an account")
transferCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
transferCmd =
(mkCommandParser
"transfer"
(CmdTransfer <$>
addrOrAliasOption "from" "Address to transfer from" <*>
addrOrAliasOption "to" "Address to transfer to" <*>
natOption "value" "Amount to transfer"
)
"Transfer tokens from one account to another")
approveCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
approveCmd =
(mkCommandParser
"approve"
(CmdApprove <$>
addrOrAliasOption "spender" "Address of the spender" <*>
natOption "value" "Amount to approve"
)
"Approve transfer of tokens from one account to another")
getAllowanceCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getAllowanceCmd =
(mkCommandParser
"getAllowance"
(CmdGetAllowance <$>
((,) <$> addrOrAliasOption "owner" "Address of the owner" <*>
addrOrAliasOption "spender" "Address of the spender") <*>
callbackParser
)
"Get allowance for an account")
getBalanceCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getBalanceCmd =
(mkCommandParser
"getBalance"
(CmdGetBalance <$>
addrOrAliasOption "address" "Address of the owner" <*>
callbackParser
)
"Get balance for an account")
addOperatorCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
addOperatorCmd =
(mkCommandParser
"addOperator"
(CmdAddOperator <$>
addrOrAliasOption "operator" "Address of the operator" <*>
multisigOption
)
"Add an operator")
removeOperatorCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
removeOperatorCmd =
(mkCommandParser
"removeOperator"
(CmdRemoveOperator <$>
addrOrAliasOption "operator" "Address of the operator" <*>
multisigOption
)
"Remove an operator")
pauseCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
pauseCmd =
(mkCommandParser
"pause"
(CmdPause <$> multisigOption)
"Pause the contract")
unpauseCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
unpauseCmd =
(mkCommandParser
"unpause"
(CmdUnpause <$> multisigOption)
"Unpause the contract")
setRedeemAddressCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
setRedeemAddressCmd =
(mkCommandParser
"setRedeemAddress"
(CmdSetRedeemAddress <$>
addrOrAliasArg "Redeem address" <*>
multisigOption
)
"Set redeem address")
transferOwnershipCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
transferOwnershipCmd =
(mkCommandParser
"transferOwnership"
(CmdTransferOwnership <$>
addrOrAliasArg "new-owner" <*>
multisigOption
)
"Transfer ownership")
acceptOwnershipCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
acceptOwnershipCmd =
(mkCommandParser
"acceptOwnership"
(CmdAcceptOwnership () <$> multisigOption)
"Accept ownership")
getTotalSupplyCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getTotalSupplyCmd =
(mkCommandParser
"getTotalSupply"
(CmdGetTotalSupply <$> callbackParser)
"Get total supply")
getTotalMintedCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getTotalMintedCmd =
(mkCommandParser
"getTotalMinted"
(CmdGetTotalMinted <$> callbackParser)
"Get amount of minted tokens")
getTotalBurnedCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getTotalBurnedCmd =
(mkCommandParser "getTotalBurned"
(CmdGetTotalBurned <$> callbackParser)
"Get amount of burned tokens")
getOwnerCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getOwnerCmd =
(mkCommandParser "getOwner"
(CmdGetOwner <$> callbackParser)
"Get current contract owner")
getTokenMetadataCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getTokenMetadataCmd =
(mkCommandParser "getTokenMetadata"
(CmdGetTokenMetadata <$> callbackParser)
"Get the token metadata")
getRedeemAddressCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getRedeemAddressCmd =
(mkCommandParser "getRedeemAddress"
(CmdGetRedeemAddress <$> callbackParser)
"Get the redeem address code")
getOperatorsCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getOperatorsCmd =
(mkCommandParser "getOperators" (pure CmdGetOperators) "Get list of contract operators")
getOpDescriptionCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getOpDescriptionCmd =
mkCommandParser
"getOpDescription"
(CmdGetOpDescription <$> namedFilePathOption "package" "Package filepath")
"Get operation description from given multisig package"
getBytesToSignCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
getBytesToSignCmd =
mkCommandParser
"getBytesToSign"
(CmdGetBytesToSign <$> namedFilePathOption "package" "Package filepath")
"Get bytes that need to be signed from given multisig package"
addSignatureCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
addSignatureCmd =
mkCommandParser
"addSignature"
(CmdAddSignature <$> publicKeyOption <*> signatureOption <*>
namedFilePathOption "package" "Package filepath"
)
"Add signature assosiated with the given public key to the given package"
signPackageCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
signPackageCmd =
mkCommandParser
"signPackage"
(CmdSignPackage <$> namedFilePathOption "package" "Package filepath"
)
"Sign given multisig package using secret key from `tezos-client` \
\assotiated with the user alias from ClientConfig and add signature \
\to the package."
callMultisigCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
callMultisigCmd =
mkCommandParser
"callMultisig"
(CmdCallMultisig <$>
nonEmptyParser (namedFilePathOption "package" "Package filepath")
)
"Call multisig contract with the given packages"
deployCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
deployCmd =
mkCommandParser
"deployTzbtcContract"
(CmdDeployContract
<$> (#owner <:!> mbAddrOrAliasOption "owner" "Address of the owner")
<*> deployContractOptions)
"Deploy TZBTC contract to the chain"
where
deployContractOptions :: Opt.Parser DeployContractOptions
deployContractOptions = asum
[ Opt.hsubparser $ mconcat
[ mkCommandParser "v1"
(DeployContractV1 <$> deployContractOptionsV1)
"Deploy V1 version of the contract."
, mkCommandParser "v2"
(DeployContractV2 <$> deployContractOptionsV2)
"Deploy V2 version of the contract."
]
, DeployContractV1 <$> deployContractOptionsV1
]
deployContractOptionsV1 :: Opt.Parser DeployContractOptionsV1
deployContractOptionsV1 = do
dcoRedeem <- addrOrAliasOption "redeem" "Redeem address"
dcoTokenMetadata <- parseSingleTokenMetadata
pure DeployContractOptionsV1 {..}
deployContractOptionsV2 :: Opt.Parser DeployContractOptionsV2
deployContractOptionsV2 =
DeployContractOptionsV2 <$> deployContractOptionsV1
deployMultisigCmd :: Opt.Mod Opt.CommandFields ClientArgsRaw
deployMultisigCmd =
mkCommandParser
"deployMultisigContract"
(CmdDeployMultisigContract <$>
(Threshold <$> natOption "threshold" "Specialized multisig threshold") <*>
(Keys <$> many publicKeyOption) <*>
customErrorsFlag
)
"Deploy specialized multisig contract to the chain"
where
customErrorsFlag = switch
(long "use-custom-errors" <>
help "By default specialized multisig contract fails with 'unit' in all error cases.\n\
\This flag will deploy the custom version of specialized multisig\n\
\contract with human-readable string errors.")
callbackParser :: Opt.Parser (Maybe ContractAddressOrAlias)
callbackParser = optional $
addressOrAliasOption Nothing (#name :! "callback") (#help :! "Callback address")
addrOrAliasOption :: String -> String -> Opt.Parser SomeAddressOrAlias
addrOrAliasOption name hInfo =
someAddressOrAliasOption Nothing (#name :! name) (#help :! hInfo)
mbAddrOrAliasOption :: String -> String -> Opt.Parser (Maybe SomeAddressOrAlias)
mbAddrOrAliasOption = optional ... addrOrAliasOption
addrOrAliasArg :: String -> Opt.Parser SomeAddressOrAlias
addrOrAliasArg hInfo = mkCLArgumentParser Nothing (#help :! hInfo)
natOption :: String -> String -> Opt.Parser Natural
natOption name hInfo = mkCLOptionParser Nothing (#name :! name) (#help :! hInfo)
burnParamsParser :: Opt.Parser BurnParams
burnParamsParser = namedParser Nothing "Amount to burn"
signatureOption :: Opt.Parser Signature
signatureOption = option (eitherReader parseSignatureDo) $ mconcat
[ long "signature", metavar "SIGNATURE"]
parseSignatureDo :: String -> Either String Signature
parseSignatureDo sig =
either (Left . mappend "Failed to parse signature: " . pretty) Right $
parseSignature $ toText sig
publicKeyOption :: Opt.Parser PublicKey
publicKeyOption = option (eitherReader parsePublicKeyDo) $ mconcat
[ long "public-key", metavar "PUBLIC KEY", help "Address public key"]
parsePublicKeyDo :: String -> Either String PublicKey
parsePublicKeyDo pk =
either (Left . mappend "Failed to parse signature: " . pretty) Right $
parsePublicKey $ toText pk
namedFilePathOption :: String -> String -> Opt.Parser FilePath
namedFilePathOption name hInfo = option str $
mconcat [long name, metavar "FILEPATH", help hInfo]
nonEmptyParser :: Opt.Parser a -> Opt.Parser (NonEmpty a)
nonEmptyParser p = (:|) <$> p <*> many p
data OutputParseError = OutputParseError Text Text
deriving stock (Eq, Show, Ord)
instance ShowErrorComponent OutputParseError where
showErrorComponent (OutputParseError name err) = toString $
"Failed to parse " <> name <> ": " <> err
type Parser = P.Parsec OutputParseError Text
isBase58Char :: Char -> Bool
isBase58Char c =
(isDigit c && c /= '0') || (isAlpha c && c /= 'O' && c /= 'I' && c /= 'l')
tzbtcClientAddressParser :: Parser ContractAddress
tzbtcClientAddressParser = do
P.skipManyTill (printChar <|> newline) $ do
void $ symbol space "Contract address:"
rawAddr <- P.many (P.satisfy isBase58Char)
case parseKindedAddress (fromString rawAddr) of
Left err -> P.customFailure $ OutputParseError "address" $ pretty err
Right addr -> return addr
parseContractAddressFromOutput
:: Text -> Either (ParseErrorBundle Text OutputParseError) ContractAddress
parseContractAddressFromOutput output = P.parse tzbtcClientAddressParser "" output
|
730b593b3563bd7e415940fe509979177db9fe233cec7c2f10bbd57f2d49db61 | facebook/flow | timeout.mli |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(* Helpers for handling timeout, in particular input timeout. *)
type t
The function ` with_timeout ` executes ' do _ ' for at most ' timeout '
seconds . If the ` timeout ` is reached , the ` on_timeout ` is executed
if available , otherwise the ` Timeout ` exception is raised .
On Unix platform , this function is based on ` SIGALRM ` . On Windows
platform , this is based on the equivalent of ` select ` . Hence , this
module exports variant of basic input functions , adding them a
` timeout ` parameter . It should correspond to the parameter of the
` do _ ` function .
For ` do _ ` function based only on computation ( and not I / O ) , you
should call the ` check_timeout ` function on a regular
basis . Otherwise , on Windows , the timeout will never be detected .
On Unix , the function ` check_timeout ` is no - op .
On Unix , the type ` in_channel ` is in fact an alias for
` Stdlib.in_channel ` .
seconds. If the `timeout` is reached, the `on_timeout` is executed
if available, otherwise the `Timeout` exception is raised.
On Unix platform, this function is based on `SIGALRM`. On Windows
platform, this is based on the equivalent of `select`. Hence, this
module exports variant of basic input functions, adding them a
`timeout` parameter. It should correspond to the parameter of the
`do_` function.
For `do_` function based only on computation (and not I/O), you
should call the `check_timeout` function on a regular
basis. Otherwise, on Windows, the timeout will never be detected.
On Unix, the function `check_timeout` is no-op.
On Unix, the type `in_channel` is in fact an alias for
`Stdlib.in_channel`.
*)
val with_timeout : timeout:int -> on_timeout:(unit -> 'a) -> do_:(t -> 'a) -> 'a
val check_timeout : t -> unit
type in_channel
val open_in : string -> in_channel
val close_in : in_channel -> unit
val close_in_noerr : in_channel -> unit
val in_channel_of_descr : Unix.file_descr -> in_channel
val descr_of_in_channel : in_channel -> Unix.file_descr
val select :
?timeout:t ->
Unix.file_descr list ->
Unix.file_descr list ->
Unix.file_descr list ->
float ->
Unix.file_descr list * Unix.file_descr list * Unix.file_descr list
val input : ?timeout:t -> in_channel -> bytes -> int -> int -> int
val really_input : ?timeout:t -> in_channel -> bytes -> int -> int -> unit
val input_char : ?timeout:t -> in_channel -> char
val input_line : ?timeout:t -> in_channel -> string
val input_value : ?timeout:t -> in_channel -> 'a
val open_process : string -> string array -> in_channel * out_channel
val open_process_in : string -> string array -> in_channel
val close_process_in : in_channel -> Unix.process_status
val read_process :
timeout:int ->
on_timeout:(unit -> 'a) ->
reader:(t -> in_channel -> out_channel -> 'a) ->
string ->
string array ->
'a
val open_connection : ?timeout:t -> Unix.sockaddr -> in_channel * out_channel
val read_connection :
timeout:int ->
on_timeout:(unit -> 'a) ->
reader:(t -> in_channel -> out_channel -> 'a) ->
Unix.sockaddr ->
'a
val shutdown_connection : in_channel -> unit
(* Some silly people like to catch all exceptions. This means they need to explicitly detect and
* reraise the timeout exn. *)
val is_timeout_exn : t -> exn -> bool
| null | https://raw.githubusercontent.com/facebook/flow/52e59c7a9dea8556e7caf0be2b2c5c2e310b5b65/src/hack_forked/utils/sys/timeout.mli | ocaml | Helpers for handling timeout, in particular input timeout.
Some silly people like to catch all exceptions. This means they need to explicitly detect and
* reraise the timeout exn. |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type t
The function ` with_timeout ` executes ' do _ ' for at most ' timeout '
seconds . If the ` timeout ` is reached , the ` on_timeout ` is executed
if available , otherwise the ` Timeout ` exception is raised .
On Unix platform , this function is based on ` SIGALRM ` . On Windows
platform , this is based on the equivalent of ` select ` . Hence , this
module exports variant of basic input functions , adding them a
` timeout ` parameter . It should correspond to the parameter of the
` do _ ` function .
For ` do _ ` function based only on computation ( and not I / O ) , you
should call the ` check_timeout ` function on a regular
basis . Otherwise , on Windows , the timeout will never be detected .
On Unix , the function ` check_timeout ` is no - op .
On Unix , the type ` in_channel ` is in fact an alias for
` Stdlib.in_channel ` .
seconds. If the `timeout` is reached, the `on_timeout` is executed
if available, otherwise the `Timeout` exception is raised.
On Unix platform, this function is based on `SIGALRM`. On Windows
platform, this is based on the equivalent of `select`. Hence, this
module exports variant of basic input functions, adding them a
`timeout` parameter. It should correspond to the parameter of the
`do_` function.
For `do_` function based only on computation (and not I/O), you
should call the `check_timeout` function on a regular
basis. Otherwise, on Windows, the timeout will never be detected.
On Unix, the function `check_timeout` is no-op.
On Unix, the type `in_channel` is in fact an alias for
`Stdlib.in_channel`.
*)
val with_timeout : timeout:int -> on_timeout:(unit -> 'a) -> do_:(t -> 'a) -> 'a
val check_timeout : t -> unit
type in_channel
val open_in : string -> in_channel
val close_in : in_channel -> unit
val close_in_noerr : in_channel -> unit
val in_channel_of_descr : Unix.file_descr -> in_channel
val descr_of_in_channel : in_channel -> Unix.file_descr
val select :
?timeout:t ->
Unix.file_descr list ->
Unix.file_descr list ->
Unix.file_descr list ->
float ->
Unix.file_descr list * Unix.file_descr list * Unix.file_descr list
val input : ?timeout:t -> in_channel -> bytes -> int -> int -> int
val really_input : ?timeout:t -> in_channel -> bytes -> int -> int -> unit
val input_char : ?timeout:t -> in_channel -> char
val input_line : ?timeout:t -> in_channel -> string
val input_value : ?timeout:t -> in_channel -> 'a
val open_process : string -> string array -> in_channel * out_channel
val open_process_in : string -> string array -> in_channel
val close_process_in : in_channel -> Unix.process_status
val read_process :
timeout:int ->
on_timeout:(unit -> 'a) ->
reader:(t -> in_channel -> out_channel -> 'a) ->
string ->
string array ->
'a
val open_connection : ?timeout:t -> Unix.sockaddr -> in_channel * out_channel
val read_connection :
timeout:int ->
on_timeout:(unit -> 'a) ->
reader:(t -> in_channel -> out_channel -> 'a) ->
Unix.sockaddr ->
'a
val shutdown_connection : in_channel -> unit
val is_timeout_exn : t -> exn -> bool
|
a6b8946d20bd998fa792deb0e760755e596f43dfbad607fdc0602a36a7dfdd58 | tek/ribosome | PromptMode.hs | module Ribosome.Menu.Prompt.Data.PromptMode where
data PromptMode =
Insert
|
Normal
deriving stock (Eq, Show, Ord)
| null | https://raw.githubusercontent.com/tek/ribosome/8ab6fed463a10cddb8531dc333552707a60c7c96/packages/menu/lib/Ribosome/Menu/Prompt/Data/PromptMode.hs | haskell | module Ribosome.Menu.Prompt.Data.PromptMode where
data PromptMode =
Insert
|
Normal
deriving stock (Eq, Show, Ord)
|
|
afa9de96f48b047af33533cb85a7f112917a23114b215c045bc44a3de2c2abb4 | clojure-interop/java-jdk | StAXResult.clj | (ns javax.xml.transform.stax.StAXResult
"Acts as a holder for an XML Result in the
form of a StAX writer,i.e.
XMLStreamWriter or XMLEventWriter.
StAXResult can be used in all cases that accept
a Result, e.g. Transformer,
Validator which accept
Result as input."
(:refer-clojure :only [require comment defn ->])
(:import [javax.xml.transform.stax StAXResult]))
(defn ->st-ax-result
"Constructor.
Creates a new instance of a StAXResult
by supplying an XMLEventWriter.
XMLEventWriter must be a
non-null reference.
xml-event-writer - XMLEventWriter used to create this StAXResult. - `javax.xml.stream.XMLEventWriter`
throws: java.lang.IllegalArgumentException - If xmlEventWriter == null."
(^StAXResult [^javax.xml.stream.XMLEventWriter xml-event-writer]
(new StAXResult xml-event-writer)))
(def *-feature
"Static Constant.
If TransformerFactory.getFeature(String name)
returns true when passed this value as an argument,
the Transformer supports Result output of this type.
type: java.lang.String"
StAXResult/FEATURE)
(defn get-xml-event-writer
"Get the XMLEventWriter used by this
StAXResult.
XMLEventWriter will be null
if this StAXResult was created with a
XMLStreamWriter.
returns: XMLEventWriter used by this
StAXResult. - `javax.xml.stream.XMLEventWriter`"
(^javax.xml.stream.XMLEventWriter [^StAXResult this]
(-> this (.getXMLEventWriter))))
(defn get-xml-stream-writer
"Get the XMLStreamWriter used by this
StAXResult.
XMLStreamWriter will be null
if this StAXResult was created with a
XMLEventWriter.
returns: XMLStreamWriter used by this
StAXResult. - `javax.xml.stream.XMLStreamWriter`"
(^javax.xml.stream.XMLStreamWriter [^StAXResult this]
(-> this (.getXMLStreamWriter))))
(defn set-system-id
"In the context of a StAXResult, it is not appropriate
to explicitly set the system identifier.
The XMLEventWriter or XMLStreamWriter
used to construct this StAXResult determines the
system identifier of the XML result.
An UnsupportedOperationException is always
thrown by this method.
system-id - Ignored. - `java.lang.String`
throws: java.lang.UnsupportedOperationException - Is always thrown by this method."
([^StAXResult this ^java.lang.String system-id]
(-> this (.setSystemId system-id))))
(defn get-system-id
"The returned system identifier is always null.
returns: The returned system identifier is always null. - `java.lang.String`"
(^java.lang.String [^StAXResult this]
(-> this (.getSystemId))))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.xml/src/javax/xml/transform/stax/StAXResult.clj | clojure | (ns javax.xml.transform.stax.StAXResult
"Acts as a holder for an XML Result in the
form of a StAX writer,i.e.
XMLStreamWriter or XMLEventWriter.
StAXResult can be used in all cases that accept
a Result, e.g. Transformer,
Validator which accept
Result as input."
(:refer-clojure :only [require comment defn ->])
(:import [javax.xml.transform.stax StAXResult]))
(defn ->st-ax-result
"Constructor.
Creates a new instance of a StAXResult
by supplying an XMLEventWriter.
XMLEventWriter must be a
non-null reference.
xml-event-writer - XMLEventWriter used to create this StAXResult. - `javax.xml.stream.XMLEventWriter`
throws: java.lang.IllegalArgumentException - If xmlEventWriter == null."
(^StAXResult [^javax.xml.stream.XMLEventWriter xml-event-writer]
(new StAXResult xml-event-writer)))
(def *-feature
"Static Constant.
If TransformerFactory.getFeature(String name)
returns true when passed this value as an argument,
the Transformer supports Result output of this type.
type: java.lang.String"
StAXResult/FEATURE)
(defn get-xml-event-writer
"Get the XMLEventWriter used by this
StAXResult.
XMLEventWriter will be null
if this StAXResult was created with a
XMLStreamWriter.
returns: XMLEventWriter used by this
StAXResult. - `javax.xml.stream.XMLEventWriter`"
(^javax.xml.stream.XMLEventWriter [^StAXResult this]
(-> this (.getXMLEventWriter))))
(defn get-xml-stream-writer
"Get the XMLStreamWriter used by this
StAXResult.
XMLStreamWriter will be null
if this StAXResult was created with a
XMLEventWriter.
returns: XMLStreamWriter used by this
StAXResult. - `javax.xml.stream.XMLStreamWriter`"
(^javax.xml.stream.XMLStreamWriter [^StAXResult this]
(-> this (.getXMLStreamWriter))))
(defn set-system-id
"In the context of a StAXResult, it is not appropriate
to explicitly set the system identifier.
The XMLEventWriter or XMLStreamWriter
used to construct this StAXResult determines the
system identifier of the XML result.
An UnsupportedOperationException is always
thrown by this method.
system-id - Ignored. - `java.lang.String`
throws: java.lang.UnsupportedOperationException - Is always thrown by this method."
([^StAXResult this ^java.lang.String system-id]
(-> this (.setSystemId system-id))))
(defn get-system-id
"The returned system identifier is always null.
returns: The returned system identifier is always null. - `java.lang.String`"
(^java.lang.String [^StAXResult this]
(-> this (.getSystemId))))
|
|
f190f52fd34a13a3d6de9260b520b5761d1a4730352a2d45913db02ed2194600 | BranchTaken/Hemlock | test_of_real_to_real.ml | open! Basis.Rudiments
open! Basis
open I256
let test () =
let rec test_rs rs = begin
match rs with
| [] -> ()
| r :: rs' -> begin
let x = of_real r in
File.Fmt.stdout
|> Fmt.fmt "of_real "
|> Real.fmt ~alt:true ~radix:Radix.Hex r
|> Fmt.fmt " -> "
|> fmt ~alt:true ~zpad:true ~width:64L ~radix:Radix.Hex ~pretty:true x
|> Fmt.fmt "; to_real -> "
|> Real.fmt ~alt:true ~radix:Radix.Hex (to_real x)
|> Fmt.fmt "\n"
|> ignore;
test_rs rs'
end
end in
let rs = [
-1.;
0.;
0x1.1p-1;
1.;
0x1.f_ffff_ffff_ffffp48;
0x1.f_ffff_ffff_ffffp52;
0x1.f_ffff_ffff_ffffp56;
0x1.f_ffff_ffff_ffffp127;
0x1.f_ffff_ffff_ffffp128;
0x1.f_ffff_ffff_ffffp132;
0x1.f_ffff_ffff_ffffp254;
0x1.f_ffff_ffff_ffffp255;
0x1.f_ffff_ffff_ffffp256;
0x1.f_ffff_ffff_ffffp260;
0x1p253;
0x1p254;
0x1p255;
] in
test_rs rs;
File.Fmt.stdout |> Fmt.fmt "\n" |> ignore;
let rec test_xs xs = begin
match xs with
| [] -> ()
| x :: xs' -> begin
let r = to_real x in
File.Fmt.stdout
|> Fmt.fmt "to_real "
|> fmt ~alt:true ~zpad:true ~width:64L ~radix:Radix.Hex ~pretty:true x
|> Fmt.fmt " -> "
|> Real.fmt ~alt:true ~radix:Radix.Hex r
|> Fmt.fmt "; of_real -> "
|> fmt ~alt:true ~zpad:true ~width:64L ~radix:Radix.Hex ~pretty:true (of_real r)
|> Fmt.fmt "\n"
|> ignore;
test_xs xs'
end
end in
let two = one + one in
let xs = [
zero;
one;
two;
min_value / two;
min_value;
max_value;
] in
test_xs xs
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/a07e362d66319108c1478a4cbebab765c1808b1a/bootstrap/test/basis/i256/test_of_real_to_real.ml | ocaml | open! Basis.Rudiments
open! Basis
open I256
let test () =
let rec test_rs rs = begin
match rs with
| [] -> ()
| r :: rs' -> begin
let x = of_real r in
File.Fmt.stdout
|> Fmt.fmt "of_real "
|> Real.fmt ~alt:true ~radix:Radix.Hex r
|> Fmt.fmt " -> "
|> fmt ~alt:true ~zpad:true ~width:64L ~radix:Radix.Hex ~pretty:true x
|> Fmt.fmt "; to_real -> "
|> Real.fmt ~alt:true ~radix:Radix.Hex (to_real x)
|> Fmt.fmt "\n"
|> ignore;
test_rs rs'
end
end in
let rs = [
-1.;
0.;
0x1.1p-1;
1.;
0x1.f_ffff_ffff_ffffp48;
0x1.f_ffff_ffff_ffffp52;
0x1.f_ffff_ffff_ffffp56;
0x1.f_ffff_ffff_ffffp127;
0x1.f_ffff_ffff_ffffp128;
0x1.f_ffff_ffff_ffffp132;
0x1.f_ffff_ffff_ffffp254;
0x1.f_ffff_ffff_ffffp255;
0x1.f_ffff_ffff_ffffp256;
0x1.f_ffff_ffff_ffffp260;
0x1p253;
0x1p254;
0x1p255;
] in
test_rs rs;
File.Fmt.stdout |> Fmt.fmt "\n" |> ignore;
let rec test_xs xs = begin
match xs with
| [] -> ()
| x :: xs' -> begin
let r = to_real x in
File.Fmt.stdout
|> Fmt.fmt "to_real "
|> fmt ~alt:true ~zpad:true ~width:64L ~radix:Radix.Hex ~pretty:true x
|> Fmt.fmt " -> "
|> Real.fmt ~alt:true ~radix:Radix.Hex r
|> Fmt.fmt "; of_real -> "
|> fmt ~alt:true ~zpad:true ~width:64L ~radix:Radix.Hex ~pretty:true (of_real r)
|> Fmt.fmt "\n"
|> ignore;
test_xs xs'
end
end in
let two = one + one in
let xs = [
zero;
one;
two;
min_value / two;
min_value;
max_value;
] in
test_xs xs
let _ = test ()
|
|
a8ef177dc0295326ec451dd0389b49ae2b9b5df287c299b6f5329e7c7b591ad6 | fused-effects/fused-effects | Parser.hs | {-# LANGUAGE DeriveTraversable #-}
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Parser
( example
) where
import Control.Algebra
import Control.Carrier.Cut.Church
import Control.Carrier.NonDet.Church
import Control.Carrier.State.Strict
import Control.Monad (replicateM)
import Data.Char
import Data.Kind (Type)
import Data.List (intercalate)
import Hedgehog
import qualified Hedgehog.Function as Fn
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Utils
example :: TestTree
example = testGroup "parser"
[ testGroup "parse"
[ testProperty "returns pure values at the end of input" . property $ do
a <- forAll genFactor
run (runNonDetA (parse "" (pure a))) === [a]
, testProperty "fails if input remains" . property $ do
c <- forAll Gen.alphaNum
cs <- forAll (Gen.string (Range.linear 0 10) Gen.alphaNum)
a <- forAll genFactor
run (runNonDetA (parse (c:cs) (pure a))) === []
]
, testGroup "satisfy"
[ testProperty "matches with a predicate" . property $ do
c <- forAll Gen.alphaNum
f <- (. ord) <$> Fn.forAllFn predicate
run (runNonDetA (parse [c] (satisfy f))) === [c | f c]
, testProperty "fails at end of input" . property $ do
f <- (. ord) <$> Fn.forAllFn predicate
run (runNonDetA (parse "" (satisfy f))) === []
, testProperty "fails if input remains" . property $ do
(c1, c2) <- forAll ((,) <$> Gen.alphaNum <*> Gen.alphaNum)
f <- (. ord) <$> Fn.forAllFn predicate
run (runNonDetA (parse [c1, c2] (satisfy f))) === []
, testProperty "consumes input" . property $ do
c1 <- forAll Gen.alphaNum
c2 <- forAll Gen.alphaNum
f <- (. ord) <$> Fn.forAllFn predicate
run (runNonDetA (parse [c1, c2] ((,) <$> satisfy f <*> satisfy f))) === [(c1, c2) | f c1, f c2]
]
, testGroup "factor"
[ testProperty "matches positive integers" . property $ do
a <- forAll genFactor
run (runCutA (parse (show (abs a)) factor)) === [abs a]
, testProperty "matches parenthesized expressions" . property $ do
as <- forAll (Gen.sized (arbNested genFactor))
run (runCutA (parse ('(' : intercalate "+" (intercalate "*" . map (show . abs) . (1:) <$> [0]:as) ++ ")") factor)) === [sum (map (product . map abs) as)]
]
, testGroup "term"
[ testProperty "matches factors" . property $ do
a <- forAll genFactor
run (runCutA (parse (show (abs a)) term)) === [abs a]
, testProperty "matches multiplication" . property $ do
as <- forAll genFactors
run (runCutA (parse (intercalate "*" (show . abs <$> 1:as)) term)) === [product (map abs as)]
]
, testGroup "expr"
[ testProperty "matches factors" . property $ do
a <- forAll genFactor
run (runCutA (parse (show (abs a)) expr)) === [abs a]
, testProperty "matches multiplication" . property $ do
as <- forAll genFactors
run (runCutA (parse (intercalate "*" (show . abs <$> 1:as)) expr)) === [product (map abs as)]
, testProperty "matches addition" . property $ do
as <- forAll genFactors
run (runCutA (parse (intercalate "+" (show . abs <$> 0:as)) expr)) === [sum (map abs as)]
, testProperty "respects order of operations" . property $ do
as <- forAll (Gen.sized (arbNested (Gen.integral (Range.linear 0 100))))
run (runCutA (parse (intercalate "+" (intercalate "*" . map (show . abs) . (1:) <$> [0]:as)) expr)) === [sum (map (product . map abs) as)]
]
]
where
arbNested :: Gen a -> Range.Size -> Gen [[a]]
arbNested _ 0 = pure []
arbNested g n = do
m <- Gen.integral (Range.linear 0 10)
let n' = n `div` (m + 1)
replicateM (Range.unSize m) (Gen.list (Range.singleton (Range.unSize n')) g)
predicate = Fn.fn Gen.bool
genFactor = Gen.integral (Range.linear 0 100)
genFactors = Gen.list (Range.linear 0 10) genFactor
data Symbol (m :: Type -> Type) k where
Satisfy :: (Char -> Bool) -> Symbol m Char
satisfy :: Has Symbol sig m => (Char -> Bool) -> m Char
satisfy p = send (Satisfy p)
char :: Has Symbol sig m => Char -> m Char
char = satisfy . (==)
digit :: Has Symbol sig m => m Char
digit = satisfy isDigit
parens :: Has Symbol sig m => m a -> m a
parens m = char '(' *> m <* char ')'
parse :: (Alternative m, Monad m) => String -> ParseC m a -> m a
parse input = (>>= exhaustive) . runState input . runParseC
where exhaustive ("", a) = pure a
exhaustive _ = empty
newtype ParseC m a = ParseC { runParseC :: StateC String m a }
deriving (Alternative, Applicative, Functor, Monad)
instance (Alternative m, Algebra sig m) => Algebra (Symbol :+: sig) (ParseC m) where
alg hdl sig ctx = case sig of
L (Satisfy p) -> do
input <- ParseC get
case input of
c:cs | p c -> c <$ ctx <$ ParseC (put cs)
_ -> empty
R other -> ParseC (alg (runParseC . hdl) (R other) ctx)
# INLINE alg #
expr :: (Alternative m, Has Cut sig m, Has Symbol sig m) => m Int
expr = do
i <- term
call ((i +) <$ char '+' <* cut <*> expr
<|> pure i)
term :: (Alternative m, Has Cut sig m, Has Symbol sig m) => m Int
term = do
i <- factor
call ((i *) <$ char '*' <* cut <*> term
<|> pure i)
factor :: (Alternative m, Has Cut sig m, Has Symbol sig m) => m Int
factor
= read <$> some digit
<|> parens expr
| null | https://raw.githubusercontent.com/fused-effects/fused-effects/9790ed4fb2cbaaf8933ce0eb42d7c55bc38f12ac/examples/Parser.hs | haskell | # LANGUAGE DeriveTraversable #
# LANGUAGE GADTs # | # LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Parser
( example
) where
import Control.Algebra
import Control.Carrier.Cut.Church
import Control.Carrier.NonDet.Church
import Control.Carrier.State.Strict
import Control.Monad (replicateM)
import Data.Char
import Data.Kind (Type)
import Data.List (intercalate)
import Hedgehog
import qualified Hedgehog.Function as Fn
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Utils
example :: TestTree
example = testGroup "parser"
[ testGroup "parse"
[ testProperty "returns pure values at the end of input" . property $ do
a <- forAll genFactor
run (runNonDetA (parse "" (pure a))) === [a]
, testProperty "fails if input remains" . property $ do
c <- forAll Gen.alphaNum
cs <- forAll (Gen.string (Range.linear 0 10) Gen.alphaNum)
a <- forAll genFactor
run (runNonDetA (parse (c:cs) (pure a))) === []
]
, testGroup "satisfy"
[ testProperty "matches with a predicate" . property $ do
c <- forAll Gen.alphaNum
f <- (. ord) <$> Fn.forAllFn predicate
run (runNonDetA (parse [c] (satisfy f))) === [c | f c]
, testProperty "fails at end of input" . property $ do
f <- (. ord) <$> Fn.forAllFn predicate
run (runNonDetA (parse "" (satisfy f))) === []
, testProperty "fails if input remains" . property $ do
(c1, c2) <- forAll ((,) <$> Gen.alphaNum <*> Gen.alphaNum)
f <- (. ord) <$> Fn.forAllFn predicate
run (runNonDetA (parse [c1, c2] (satisfy f))) === []
, testProperty "consumes input" . property $ do
c1 <- forAll Gen.alphaNum
c2 <- forAll Gen.alphaNum
f <- (. ord) <$> Fn.forAllFn predicate
run (runNonDetA (parse [c1, c2] ((,) <$> satisfy f <*> satisfy f))) === [(c1, c2) | f c1, f c2]
]
, testGroup "factor"
[ testProperty "matches positive integers" . property $ do
a <- forAll genFactor
run (runCutA (parse (show (abs a)) factor)) === [abs a]
, testProperty "matches parenthesized expressions" . property $ do
as <- forAll (Gen.sized (arbNested genFactor))
run (runCutA (parse ('(' : intercalate "+" (intercalate "*" . map (show . abs) . (1:) <$> [0]:as) ++ ")") factor)) === [sum (map (product . map abs) as)]
]
, testGroup "term"
[ testProperty "matches factors" . property $ do
a <- forAll genFactor
run (runCutA (parse (show (abs a)) term)) === [abs a]
, testProperty "matches multiplication" . property $ do
as <- forAll genFactors
run (runCutA (parse (intercalate "*" (show . abs <$> 1:as)) term)) === [product (map abs as)]
]
, testGroup "expr"
[ testProperty "matches factors" . property $ do
a <- forAll genFactor
run (runCutA (parse (show (abs a)) expr)) === [abs a]
, testProperty "matches multiplication" . property $ do
as <- forAll genFactors
run (runCutA (parse (intercalate "*" (show . abs <$> 1:as)) expr)) === [product (map abs as)]
, testProperty "matches addition" . property $ do
as <- forAll genFactors
run (runCutA (parse (intercalate "+" (show . abs <$> 0:as)) expr)) === [sum (map abs as)]
, testProperty "respects order of operations" . property $ do
as <- forAll (Gen.sized (arbNested (Gen.integral (Range.linear 0 100))))
run (runCutA (parse (intercalate "+" (intercalate "*" . map (show . abs) . (1:) <$> [0]:as)) expr)) === [sum (map (product . map abs) as)]
]
]
where
arbNested :: Gen a -> Range.Size -> Gen [[a]]
arbNested _ 0 = pure []
arbNested g n = do
m <- Gen.integral (Range.linear 0 10)
let n' = n `div` (m + 1)
replicateM (Range.unSize m) (Gen.list (Range.singleton (Range.unSize n')) g)
predicate = Fn.fn Gen.bool
genFactor = Gen.integral (Range.linear 0 100)
genFactors = Gen.list (Range.linear 0 10) genFactor
data Symbol (m :: Type -> Type) k where
Satisfy :: (Char -> Bool) -> Symbol m Char
satisfy :: Has Symbol sig m => (Char -> Bool) -> m Char
satisfy p = send (Satisfy p)
char :: Has Symbol sig m => Char -> m Char
char = satisfy . (==)
digit :: Has Symbol sig m => m Char
digit = satisfy isDigit
parens :: Has Symbol sig m => m a -> m a
parens m = char '(' *> m <* char ')'
parse :: (Alternative m, Monad m) => String -> ParseC m a -> m a
parse input = (>>= exhaustive) . runState input . runParseC
where exhaustive ("", a) = pure a
exhaustive _ = empty
newtype ParseC m a = ParseC { runParseC :: StateC String m a }
deriving (Alternative, Applicative, Functor, Monad)
instance (Alternative m, Algebra sig m) => Algebra (Symbol :+: sig) (ParseC m) where
alg hdl sig ctx = case sig of
L (Satisfy p) -> do
input <- ParseC get
case input of
c:cs | p c -> c <$ ctx <$ ParseC (put cs)
_ -> empty
R other -> ParseC (alg (runParseC . hdl) (R other) ctx)
# INLINE alg #
expr :: (Alternative m, Has Cut sig m, Has Symbol sig m) => m Int
expr = do
i <- term
call ((i +) <$ char '+' <* cut <*> expr
<|> pure i)
term :: (Alternative m, Has Cut sig m, Has Symbol sig m) => m Int
term = do
i <- factor
call ((i *) <$ char '*' <* cut <*> term
<|> pure i)
factor :: (Alternative m, Has Cut sig m, Has Symbol sig m) => m Int
factor
= read <$> some digit
<|> parens expr
|
feaecf216af51468fc3598fe0d026986bd6738351bc0581a64cb67aeaf34a687 | GaloisInc/json | Parallel.hs | -----------------------------------------------------------------------------
-- |
-- Module : Test.QuickCheck.Parallel
Copyright : ( c ) 2006
-- License : BSD-style (see the file LICENSE)
--
Maintainer :
-- Stability : experimental
Portability : non - portable ( uses Control . Exception , Control . Concurrent )
--
A parallel batch driver for running QuickCheck on threaded or SMP systems .
-- See the /Example.hs/ file for a complete overview.
--
module Parallel (
pRun,
pDet,
pNon
) where
import Test.QuickCheck
import Data.List
import Control.Concurrent (forkIO)
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Control.Exception hiding (evaluate)
import System.Random
import System.IO (hFlush,stdout)
import Text.Printf
type Name = String
type Depth = Int
type Test = (Name, Depth -> IO String)
| Run a list of QuickCheck properties in parallel chunks , using
' n ' threads ( first argument ) , and test to a depth of 'd '
( second argument ) . Compile your application with ' -threaded ' and run
-- with the SMP runtime's '-N4' (or however many OS threads you want to
-- donate), for best results.
--
-- > import Test.QuickCheck.Parallel
-- >
-- > do n <- getArgs >>= readIO . head
> pRun n 1000 [ ( " sort1 " , ) ]
--
Will run ' n ' threads over the property list , to depth 1000 .
--
pRun :: Int -> Int -> [Test] -> IO ()
pRun n depth tests = do
chan <- newChan
ps <- getChanContents chan
work <- newMVar tests
forM_ [1..n] $ forkIO . thread work chan
let wait xs i
| i >= n = return () -- done
| otherwise = case xs of
Nothing : xs -> wait xs $! i+1
Just s : xs -> putStr s >> hFlush stdout >> wait xs i
wait ps 0
where
thread :: MVar [Test] -> Chan (Maybe String) -> Int -> IO ()
thread work chan me = loop
where
loop = do
job <- modifyMVar work $ \jobs -> return $ case jobs of
[] -> ([], Nothing)
(j:js) -> (js, Just j)
case job of
Nothing -> writeChan chan Nothing -- done
Just (name,prop) -> do
v <- prop depth
writeChan chan . Just $ printf "%d: %-25s: %s" me name v
loop
-- | Wrap a property, and run it on a deterministic set of data
pDet :: Testable a => a -> Int -> IO String
pDet a n = mycheck Det defaultConfig
{ configMaxTest = n
, configEvery = \n args -> unlines args } a
-- | Wrap a property, and run it on a non-deterministic set of data
pNon :: Testable a => a -> Int -> IO String
pNon a n = mycheck NonDet defaultConfig
{ configMaxTest = n
, configEvery = \n args -> unlines args } a
data Mode = Det | NonDet
------------------------------------------------------------------------
mycheck :: Testable a => Mode -> Config -> a -> IO String
mycheck Det config a = do
let rnd = mkStdGen 99 -- deterministic
mytests config (evaluate a) rnd 0 0 []
mycheck NonDet config a = do
rnd <- newStdGen -- different each run
mytests config (evaluate a) rnd 0 0 []
mytests :: Config -> Gen Result -> StdGen -> Int -> Int -> [[String]] -> IO String
mytests config gen rnd0 ntest nfail stamps
| ntest == configMaxTest config = do done "OK," ntest stamps
| nfail == configMaxFail config = do done "Arguments exhausted after" ntest stamps
| otherwise = do
case ok result of
Nothing ->
mytests config gen rnd1 ntest (nfail+1) stamps
Just True ->
mytests config gen rnd1 (ntest+1) nfail (stamp result:stamps)
Just False ->
return ( "Falsifiable after "
++ show ntest
++ " tests:\n"
++ unlines (arguments result)
)
where
result = generate (configSize config ntest) rnd2 gen
(rnd1,rnd2) = split rnd0
done :: String -> Int -> [[String]] -> IO String
done mesg ntest stamps =
return ( mesg ++ " " ++ show ntest ++ " tests" ++ table )
where
table = display
. map entry
. reverse
. sort
. map pairLength
. group
. sort
. filter (not . null)
$ stamps
display [] = ".\n"
display [x] = " (" ++ x ++ ").\n"
display xs = ".\n" ++ unlines (map (++ ".") xs)
pairLength xss@(xs:_) = (length xss, xs)
entry (n, xs) = percentage n ntest
++ " "
++ concat (intersperse ", " xs)
percentage n m = show ((100 * n) `div` m) ++ "%"
forM_ = flip mapM_
| null | https://raw.githubusercontent.com/GaloisInc/json/329bbce8fd68a04a5377f48102c37d28160b246b/tests/Parallel.hs | haskell | ---------------------------------------------------------------------------
|
Module : Test.QuickCheck.Parallel
License : BSD-style (see the file LICENSE)
Stability : experimental
See the /Example.hs/ file for a complete overview.
with the SMP runtime's '-N4' (or however many OS threads you want to
donate), for best results.
> import Test.QuickCheck.Parallel
>
> do n <- getArgs >>= readIO . head
done
done
| Wrap a property, and run it on a deterministic set of data
| Wrap a property, and run it on a non-deterministic set of data
----------------------------------------------------------------------
deterministic
different each run | Copyright : ( c ) 2006
Maintainer :
Portability : non - portable ( uses Control . Exception , Control . Concurrent )
A parallel batch driver for running QuickCheck on threaded or SMP systems .
module Parallel (
pRun,
pDet,
pNon
) where
import Test.QuickCheck
import Data.List
import Control.Concurrent (forkIO)
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Control.Exception hiding (evaluate)
import System.Random
import System.IO (hFlush,stdout)
import Text.Printf
type Name = String
type Depth = Int
type Test = (Name, Depth -> IO String)
| Run a list of QuickCheck properties in parallel chunks , using
' n ' threads ( first argument ) , and test to a depth of 'd '
( second argument ) . Compile your application with ' -threaded ' and run
> pRun n 1000 [ ( " sort1 " , ) ]
Will run ' n ' threads over the property list , to depth 1000 .
pRun :: Int -> Int -> [Test] -> IO ()
pRun n depth tests = do
chan <- newChan
ps <- getChanContents chan
work <- newMVar tests
forM_ [1..n] $ forkIO . thread work chan
let wait xs i
| otherwise = case xs of
Nothing : xs -> wait xs $! i+1
Just s : xs -> putStr s >> hFlush stdout >> wait xs i
wait ps 0
where
thread :: MVar [Test] -> Chan (Maybe String) -> Int -> IO ()
thread work chan me = loop
where
loop = do
job <- modifyMVar work $ \jobs -> return $ case jobs of
[] -> ([], Nothing)
(j:js) -> (js, Just j)
case job of
Just (name,prop) -> do
v <- prop depth
writeChan chan . Just $ printf "%d: %-25s: %s" me name v
loop
pDet :: Testable a => a -> Int -> IO String
pDet a n = mycheck Det defaultConfig
{ configMaxTest = n
, configEvery = \n args -> unlines args } a
pNon :: Testable a => a -> Int -> IO String
pNon a n = mycheck NonDet defaultConfig
{ configMaxTest = n
, configEvery = \n args -> unlines args } a
data Mode = Det | NonDet
mycheck :: Testable a => Mode -> Config -> a -> IO String
mycheck Det config a = do
mytests config (evaluate a) rnd 0 0 []
mycheck NonDet config a = do
mytests config (evaluate a) rnd 0 0 []
mytests :: Config -> Gen Result -> StdGen -> Int -> Int -> [[String]] -> IO String
mytests config gen rnd0 ntest nfail stamps
| ntest == configMaxTest config = do done "OK," ntest stamps
| nfail == configMaxFail config = do done "Arguments exhausted after" ntest stamps
| otherwise = do
case ok result of
Nothing ->
mytests config gen rnd1 ntest (nfail+1) stamps
Just True ->
mytests config gen rnd1 (ntest+1) nfail (stamp result:stamps)
Just False ->
return ( "Falsifiable after "
++ show ntest
++ " tests:\n"
++ unlines (arguments result)
)
where
result = generate (configSize config ntest) rnd2 gen
(rnd1,rnd2) = split rnd0
done :: String -> Int -> [[String]] -> IO String
done mesg ntest stamps =
return ( mesg ++ " " ++ show ntest ++ " tests" ++ table )
where
table = display
. map entry
. reverse
. sort
. map pairLength
. group
. sort
. filter (not . null)
$ stamps
display [] = ".\n"
display [x] = " (" ++ x ++ ").\n"
display xs = ".\n" ++ unlines (map (++ ".") xs)
pairLength xss@(xs:_) = (length xss, xs)
entry (n, xs) = percentage n ntest
++ " "
++ concat (intersperse ", " xs)
percentage n m = show ((100 * n) `div` m) ++ "%"
forM_ = flip mapM_
|
e1228e71d03281a6717fa253915ed658bdf1b3730c32d1e92ad31229bf9a7fbd | tonyvanriet/clj-slack-client | rtm_receive.clj | (ns clj-slack-client.rtm-receive
(:gen-class)
(:require
[clj-slack-client.team-state :as state]))
(defn dispatch-handle-event [event pass-event-to-host] (:type event))
(defmulti handle-event #'dispatch-handle-event)
(defmethod handle-event "message"
[event pass-event-to-host]
(pass-event-to-host event))
(defmethod handle-event "channel_joined"
[event pass-event-to-host]
(state/channel-joined (:channel event))
(pass-event-to-host event))
(defmethod handle-event :default
[event pass-event-to-host]
(pass-event-to-host event))
| null | https://raw.githubusercontent.com/tonyvanriet/clj-slack-client/6783f003ab93adae057890421622eb5e61ab033d/src/clj_slack_client/rtm_receive.clj | clojure | (ns clj-slack-client.rtm-receive
(:gen-class)
(:require
[clj-slack-client.team-state :as state]))
(defn dispatch-handle-event [event pass-event-to-host] (:type event))
(defmulti handle-event #'dispatch-handle-event)
(defmethod handle-event "message"
[event pass-event-to-host]
(pass-event-to-host event))
(defmethod handle-event "channel_joined"
[event pass-event-to-host]
(state/channel-joined (:channel event))
(pass-event-to-host event))
(defmethod handle-event :default
[event pass-event-to-host]
(pass-event-to-host event))
|
|
4c5dd1c16c0e5709820b8ab67f58233a869b90c6e0dd5ac2959424b4f9282dfa | ndmitchell/weeder | Cabal.hs | {-# LANGUAGE ViewPatterns, RecordWildCards #-}
module Cabal(
Cabal(..), CabalSection(..), CabalSectionType,
parseCabal,
selectCabalFile,
selectHiFiles
) where
import System.IO.Extra
import System.Directory.Extra
import System.FilePath
import qualified Data.HashMap.Strict as Map
import Util
import Data.Char
import Data.Maybe
import Data.List.Extra
import Data.Tuple.Extra
import Data.Either.Extra
import Data.Semigroup
import Prelude
selectCabalFile :: FilePath -> IO FilePath
selectCabalFile dir = do
xs <- listFiles dir
case filter ((==) ".cabal" . takeExtension) xs of
[x] -> return x
_ -> fail $ "Didn't find exactly 1 cabal file in " ++ dir
-- | Return the (exposed Hi files, internal Hi files, not found)
selectHiFiles :: FilePath -> Map.HashMap FilePathEq a -> CabalSection -> ([a], [a], [ModuleName])
selectHiFiles distDir his sect@CabalSection{..} = (external, internal, bad1++bad2)
where
(bad1, external) = partitionEithers $
[findHi his sect $ Left cabalMainIs | cabalMainIs /= ""] ++
[findHi his sect $ Right x | x <- cabalExposedModules]
(bad2, internal) = partitionEithers
[findHi his sect $ Right x | x <- filter (not . isPathsModule) cabalOtherModules]
findHi :: Map.HashMap FilePathEq a -> CabalSection -> Either FilePath ModuleName -> Either ModuleName a
findHi his cabal@CabalSection{..} name =
-- error $ show (poss, Map.keys his)
maybe (Left mname) Right $ firstJust (`Map.lookup` his) poss
where
mname = either takeFileName id name
poss = map filePathEq $ possibleHi distDir cabalSourceDirs cabalSectionType $ either (return . dropExtension) (splitOn ".") name
-- | This code is fragile and keeps going wrong, should probably try a less "guess everything"
-- and a more refined filter and test.
possibleHi :: FilePath -> [FilePath] -> CabalSectionType -> [String] -> [FilePath]
possibleHi distDir sourceDirs sectionType components =
[ joinPath (root : x : components) <.> "dump-hi"
| extra <- [".",distDir]
, root <- concat [["build" </> extra </> x </> (x ++ "-tmp")
,"build" </> extra </> x </> x
,"build" </> extra </> x </> (x ++ "-tmp") </> distDir </> "build" </> x </> (x ++ "-tmp")]
| Just x <- [cabalSectionTypeName sectionType]] ++
["build", "build" </> distDir </> "build"]
, x <- sourceDirs ++ ["."]]
data Cabal = Cabal
{cabalName :: PackageName
,cabalSections :: [CabalSection]
} deriving Show
instance Semigroup Cabal where
Cabal x1 x2 <> Cabal y1 y2 = Cabal (x1?:y1) (x2++y2)
instance Monoid Cabal where
mempty = Cabal "" []
mappend = (<>)
data CabalSectionType = Library (Maybe String) | Executable String | TestSuite String | Benchmark String
deriving (Eq,Ord)
cabalSectionTypeName :: CabalSectionType -> Maybe String
cabalSectionTypeName (Library x) = x
cabalSectionTypeName (Executable x) = Just x
cabalSectionTypeName (TestSuite x) = Just x
cabalSectionTypeName (Benchmark x) = Just x
instance Show CabalSectionType where
show (Library Nothing) = "library"
show (Library (Just x)) = "library:" ++ x
show (Executable x) = "exe:" ++ x
show (TestSuite x) = "test:" ++ x
show (Benchmark x) = "bench:" ++ x
instance Read CabalSectionType where
readsPrec _ "library" = [(Library Nothing,"")]
readsPrec _ x
| Just x <- stripPrefix "exe:" x = [(Executable x, "")]
| Just x <- stripPrefix "test:" x = [(TestSuite x, "")]
| Just x <- stripPrefix "bench:" x = [(Benchmark x, "")]
| Just x <- stripPrefix "library:" x = [(Library (Just x), "")]
readsPrec _ _ = []
data CabalSection = CabalSection
{cabalSectionType :: CabalSectionType
,cabalMainIs :: FilePath
,cabalExposedModules :: [ModuleName]
,cabalOtherModules :: [ModuleName]
,cabalSourceDirs :: [FilePath]
,cabalPackages :: [PackageName]
} deriving Show
instance Semigroup CabalSection where
CabalSection x1 x2 x3 x4 x5 x6 <> CabalSection y1 y2 y3 y4 y5 y6 =
CabalSection x1 (x2?:y2) (x3<>y3) (x4<>y4) (x5<>y5) (x6<>y6)
instance Monoid CabalSection where
mempty = CabalSection (Library Nothing) "" [] [] [] []
mappend = (<>)
parseCabal :: FilePath -> IO Cabal
parseCabal = fmap parseTop . readFile'
parseTop = mconcatMap f . parseHanging . filter (not . isComment) . lines
where
isComment = isPrefixOf "--" . trimStart
keyName = (lower *** fst . word1) . word1
f (keyName -> (key, name), xs) = case key of
"name:" -> mempty{cabalName=name}
"library" -> case name of
"" -> mempty{cabalSections=[parseSection (Library Nothing) xs]}
x -> mempty{cabalSections=[parseSection (Library (Just x)) xs]}
"executable" -> mempty{cabalSections=[parseSection (Executable name) xs]}
"test-suite" -> mempty{cabalSections=[parseSection (TestSuite name) xs]}
"benchmark" -> mempty{cabalSections=[parseSection (Benchmark name) xs]}
_ -> mempty
parseSection typ xs = mempty{cabalSectionType=typ} <> parse xs
where
parse = mconcatMap f . parseHanging
keyValues (x,xs) = let (x1,x2) = word1 x in (lower x1, trimEqual $ filter (not . null) $ x2:xs)
trimEqual xs = map (drop n) xs
where n = minimum $ 0 : map (length . takeWhile isSpace) xs
listSplit = concatMap (wordsBy (`elem` " ,"))
isPackageNameChar x = isAlphaNum x || x == '-'
parsePackage = dropSuffix "-any" . takeWhile isPackageNameChar . trim
f (keyValues -> (k,vs)) = case k of
"if" -> parse vs
"else" -> parse vs
"build-depends:" -> mempty{cabalPackages = map parsePackage . splitOn "," $ unwords vs}
"hs-source-dirs:" -> mempty{cabalSourceDirs=listSplit vs}
"exposed-modules:" -> mempty{cabalExposedModules=listSplit vs}
"other-modules:" -> mempty{cabalOtherModules=listSplit vs}
"main-is:" -> mempty{cabalMainIs=headDef "" vs}
_ -> mempty
| null | https://raw.githubusercontent.com/ndmitchell/weeder/3bc7ee09de6faf34cd60a0f4554aa1baf36f25e8/src/Cabal.hs | haskell | # LANGUAGE ViewPatterns, RecordWildCards #
| Return the (exposed Hi files, internal Hi files, not found)
error $ show (poss, Map.keys his)
| This code is fragile and keeps going wrong, should probably try a less "guess everything"
and a more refined filter and test. |
module Cabal(
Cabal(..), CabalSection(..), CabalSectionType,
parseCabal,
selectCabalFile,
selectHiFiles
) where
import System.IO.Extra
import System.Directory.Extra
import System.FilePath
import qualified Data.HashMap.Strict as Map
import Util
import Data.Char
import Data.Maybe
import Data.List.Extra
import Data.Tuple.Extra
import Data.Either.Extra
import Data.Semigroup
import Prelude
selectCabalFile :: FilePath -> IO FilePath
selectCabalFile dir = do
xs <- listFiles dir
case filter ((==) ".cabal" . takeExtension) xs of
[x] -> return x
_ -> fail $ "Didn't find exactly 1 cabal file in " ++ dir
selectHiFiles :: FilePath -> Map.HashMap FilePathEq a -> CabalSection -> ([a], [a], [ModuleName])
selectHiFiles distDir his sect@CabalSection{..} = (external, internal, bad1++bad2)
where
(bad1, external) = partitionEithers $
[findHi his sect $ Left cabalMainIs | cabalMainIs /= ""] ++
[findHi his sect $ Right x | x <- cabalExposedModules]
(bad2, internal) = partitionEithers
[findHi his sect $ Right x | x <- filter (not . isPathsModule) cabalOtherModules]
findHi :: Map.HashMap FilePathEq a -> CabalSection -> Either FilePath ModuleName -> Either ModuleName a
findHi his cabal@CabalSection{..} name =
maybe (Left mname) Right $ firstJust (`Map.lookup` his) poss
where
mname = either takeFileName id name
poss = map filePathEq $ possibleHi distDir cabalSourceDirs cabalSectionType $ either (return . dropExtension) (splitOn ".") name
possibleHi :: FilePath -> [FilePath] -> CabalSectionType -> [String] -> [FilePath]
possibleHi distDir sourceDirs sectionType components =
[ joinPath (root : x : components) <.> "dump-hi"
| extra <- [".",distDir]
, root <- concat [["build" </> extra </> x </> (x ++ "-tmp")
,"build" </> extra </> x </> x
,"build" </> extra </> x </> (x ++ "-tmp") </> distDir </> "build" </> x </> (x ++ "-tmp")]
| Just x <- [cabalSectionTypeName sectionType]] ++
["build", "build" </> distDir </> "build"]
, x <- sourceDirs ++ ["."]]
data Cabal = Cabal
{cabalName :: PackageName
,cabalSections :: [CabalSection]
} deriving Show
instance Semigroup Cabal where
Cabal x1 x2 <> Cabal y1 y2 = Cabal (x1?:y1) (x2++y2)
instance Monoid Cabal where
mempty = Cabal "" []
mappend = (<>)
data CabalSectionType = Library (Maybe String) | Executable String | TestSuite String | Benchmark String
deriving (Eq,Ord)
cabalSectionTypeName :: CabalSectionType -> Maybe String
cabalSectionTypeName (Library x) = x
cabalSectionTypeName (Executable x) = Just x
cabalSectionTypeName (TestSuite x) = Just x
cabalSectionTypeName (Benchmark x) = Just x
instance Show CabalSectionType where
show (Library Nothing) = "library"
show (Library (Just x)) = "library:" ++ x
show (Executable x) = "exe:" ++ x
show (TestSuite x) = "test:" ++ x
show (Benchmark x) = "bench:" ++ x
instance Read CabalSectionType where
readsPrec _ "library" = [(Library Nothing,"")]
readsPrec _ x
| Just x <- stripPrefix "exe:" x = [(Executable x, "")]
| Just x <- stripPrefix "test:" x = [(TestSuite x, "")]
| Just x <- stripPrefix "bench:" x = [(Benchmark x, "")]
| Just x <- stripPrefix "library:" x = [(Library (Just x), "")]
readsPrec _ _ = []
data CabalSection = CabalSection
{cabalSectionType :: CabalSectionType
,cabalMainIs :: FilePath
,cabalExposedModules :: [ModuleName]
,cabalOtherModules :: [ModuleName]
,cabalSourceDirs :: [FilePath]
,cabalPackages :: [PackageName]
} deriving Show
instance Semigroup CabalSection where
CabalSection x1 x2 x3 x4 x5 x6 <> CabalSection y1 y2 y3 y4 y5 y6 =
CabalSection x1 (x2?:y2) (x3<>y3) (x4<>y4) (x5<>y5) (x6<>y6)
instance Monoid CabalSection where
mempty = CabalSection (Library Nothing) "" [] [] [] []
mappend = (<>)
parseCabal :: FilePath -> IO Cabal
parseCabal = fmap parseTop . readFile'
parseTop = mconcatMap f . parseHanging . filter (not . isComment) . lines
where
isComment = isPrefixOf "--" . trimStart
keyName = (lower *** fst . word1) . word1
f (keyName -> (key, name), xs) = case key of
"name:" -> mempty{cabalName=name}
"library" -> case name of
"" -> mempty{cabalSections=[parseSection (Library Nothing) xs]}
x -> mempty{cabalSections=[parseSection (Library (Just x)) xs]}
"executable" -> mempty{cabalSections=[parseSection (Executable name) xs]}
"test-suite" -> mempty{cabalSections=[parseSection (TestSuite name) xs]}
"benchmark" -> mempty{cabalSections=[parseSection (Benchmark name) xs]}
_ -> mempty
parseSection typ xs = mempty{cabalSectionType=typ} <> parse xs
where
parse = mconcatMap f . parseHanging
keyValues (x,xs) = let (x1,x2) = word1 x in (lower x1, trimEqual $ filter (not . null) $ x2:xs)
trimEqual xs = map (drop n) xs
where n = minimum $ 0 : map (length . takeWhile isSpace) xs
listSplit = concatMap (wordsBy (`elem` " ,"))
isPackageNameChar x = isAlphaNum x || x == '-'
parsePackage = dropSuffix "-any" . takeWhile isPackageNameChar . trim
f (keyValues -> (k,vs)) = case k of
"if" -> parse vs
"else" -> parse vs
"build-depends:" -> mempty{cabalPackages = map parsePackage . splitOn "," $ unwords vs}
"hs-source-dirs:" -> mempty{cabalSourceDirs=listSplit vs}
"exposed-modules:" -> mempty{cabalExposedModules=listSplit vs}
"other-modules:" -> mempty{cabalOtherModules=listSplit vs}
"main-is:" -> mempty{cabalMainIs=headDef "" vs}
_ -> mempty
|
90b475d93dbef881cb6673e00c5c4217b79abc4e80a91f061f5072be14b614f9 | ocaml-flambda/ocaml-jst | asmlink.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Link a set of .cmx/.o files and produce an executable or a plugin *)
open Misc
open Format
val link: ppf_dump:formatter -> string list -> string -> unit
val link_shared: ppf_dump:formatter -> string list -> string -> unit
val call_linker_shared: string list -> string -> unit
val reset : unit -> unit
val check_consistency: filepath -> Cmx_format.unit_infos -> Digest.t -> unit
val extract_crc_interfaces: unit -> Import_info.t list
val extract_crc_implementations: unit -> Import_info.t list
type error =
| File_not_found of filepath
| Not_an_object_file of filepath
| Missing_implementations of (Compilation_unit.t * string list) list
| Inconsistent_interface of Compilation_unit.Name.t * filepath * filepath
| Inconsistent_implementation of Compilation_unit.t * filepath * filepath
| Assembler_error of filepath
| Linking_error of int
| Multiple_definition of Compilation_unit.Name.t * filepath * filepath
| Missing_cmx of filepath * Compilation_unit.t
exception Error of error
val report_error: formatter -> error -> unit
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/cc63992838e9c38833bbdf10cc8f70c6c9d077bc/asmcomp/asmlink.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Link a set of .cmx/.o files and produce an executable or a plugin | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Misc
open Format
val link: ppf_dump:formatter -> string list -> string -> unit
val link_shared: ppf_dump:formatter -> string list -> string -> unit
val call_linker_shared: string list -> string -> unit
val reset : unit -> unit
val check_consistency: filepath -> Cmx_format.unit_infos -> Digest.t -> unit
val extract_crc_interfaces: unit -> Import_info.t list
val extract_crc_implementations: unit -> Import_info.t list
type error =
| File_not_found of filepath
| Not_an_object_file of filepath
| Missing_implementations of (Compilation_unit.t * string list) list
| Inconsistent_interface of Compilation_unit.Name.t * filepath * filepath
| Inconsistent_implementation of Compilation_unit.t * filepath * filepath
| Assembler_error of filepath
| Linking_error of int
| Multiple_definition of Compilation_unit.Name.t * filepath * filepath
| Missing_cmx of filepath * Compilation_unit.t
exception Error of error
val report_error: formatter -> error -> unit
|
7a86163e08e673c4b952f3ce47b028abf0b8bea7ab912959b1684af8e3514469 | lspitzner/brittany | Floating.hs | # LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
module Language.Haskell.Brittany.Internal.Transformations.Floating where
import qualified Data.Generics.Uniplate.Direct as Uniplate
import qualified GHC.OldList as List
import Language.Haskell.Brittany.Internal.Prelude
import Language.Haskell.Brittany.Internal.PreludeUtils
import Language.Haskell.Brittany.Internal.Types
import Language.Haskell.Brittany.Internal.Utils
-- note that this is not total, and cannot be with that exact signature.
mergeIndents :: BrIndent -> BrIndent -> BrIndent
mergeIndents BrIndentNone x = x
mergeIndents x BrIndentNone = x
mergeIndents (BrIndentSpecial i) (BrIndentSpecial j) =
BrIndentSpecial (max i j)
mergeIndents _ _ = error "mergeIndents"
transformSimplifyFloating :: BriDoc -> BriDoc
transformSimplifyFloating = stepBO .> stepFull
-- note that semantically, stepFull is completely sufficient.
-- but the bottom-up switch-to-top-down-on-match transformation has much
-- better complexity.
UPDATE : by now , does more than stepFull ; for semantic equivalence
-- the push/pop cases would need to be copied over
where
descendPrior = transformDownMay $ \case
-- prior floating in
BDAnnotationPrior annKey1 (BDPar ind line indented) ->
Just $ BDPar ind (BDAnnotationPrior annKey1 line) indented
BDAnnotationPrior annKey1 (BDSeq (l : lr)) ->
Just $ BDSeq (BDAnnotationPrior annKey1 l : lr)
BDAnnotationPrior annKey1 (BDLines (l : lr)) ->
Just $ BDLines (BDAnnotationPrior annKey1 l : lr)
BDAnnotationPrior annKey1 (BDCols sig (l : lr)) ->
Just $ BDCols sig (BDAnnotationPrior annKey1 l : lr)
BDAnnotationPrior annKey1 (BDAddBaseY indent x) ->
Just $ BDAddBaseY indent $ BDAnnotationPrior annKey1 x
BDAnnotationPrior annKey1 (BDDebug s x) ->
Just $ BDDebug s $ BDAnnotationPrior annKey1 x
_ -> Nothing
descendRest = transformDownMay $ \case
-- post floating in
BDAnnotationRest annKey1 (BDPar ind line indented) ->
Just $ BDPar ind line $ BDAnnotationRest annKey1 indented
BDAnnotationRest annKey1 (BDSeq list) ->
Just
$ BDSeq
$ List.init list
++ [BDAnnotationRest annKey1 $ List.last list]
BDAnnotationRest annKey1 (BDLines list) ->
Just
$ BDLines
$ List.init list
++ [BDAnnotationRest annKey1 $ List.last list]
BDAnnotationRest annKey1 (BDCols sig cols) ->
Just
$ BDCols sig
$ List.init cols
++ [BDAnnotationRest annKey1 $ List.last cols]
BDAnnotationRest annKey1 (BDAddBaseY indent x) ->
Just $ BDAddBaseY indent $ BDAnnotationRest annKey1 x
BDAnnotationRest annKey1 (BDDebug s x) ->
Just $ BDDebug s $ BDAnnotationRest annKey1 x
_ -> Nothing
descendKW = transformDownMay $ \case
-- post floating in
BDAnnotationKW annKey1 kw (BDPar ind line indented) ->
Just $ BDPar ind line $ BDAnnotationKW annKey1 kw indented
BDAnnotationKW annKey1 kw (BDSeq list) ->
Just
$ BDSeq
$ List.init list
++ [BDAnnotationKW annKey1 kw $ List.last list]
BDAnnotationKW annKey1 kw (BDLines list) ->
Just
$ BDLines
$ List.init list
++ [BDAnnotationKW annKey1 kw $ List.last list]
BDAnnotationKW annKey1 kw (BDCols sig cols) ->
Just
$ BDCols sig
$ List.init cols
++ [BDAnnotationKW annKey1 kw $ List.last cols]
BDAnnotationKW annKey1 kw (BDAddBaseY indent x) ->
Just $ BDAddBaseY indent $ BDAnnotationKW annKey1 kw x
BDAnnotationKW annKey1 kw (BDDebug s x) ->
Just $ BDDebug s $ BDAnnotationKW annKey1 kw x
_ -> Nothing
descendBYPush = transformDownMay $ \case
BDBaseYPushCur (BDCols sig cols@(_ : _)) ->
Just $ BDCols sig (BDBaseYPushCur (List.head cols) : List.tail cols)
BDBaseYPushCur (BDDebug s x) -> Just $ BDDebug s (BDBaseYPushCur x)
_ -> Nothing
descendBYPop = transformDownMay $ \case
BDBaseYPop (BDCols sig cols@(_ : _)) ->
Just $ BDCols sig (List.init cols ++ [BDBaseYPop (List.last cols)])
BDBaseYPop (BDDebug s x) -> Just $ BDDebug s (BDBaseYPop x)
_ -> Nothing
descendILPush = transformDownMay $ \case
BDIndentLevelPushCur (BDCols sig cols@(_ : _)) ->
Just $ BDCols sig (BDIndentLevelPushCur (List.head cols) : List.tail cols)
BDIndentLevelPushCur (BDDebug s x) ->
Just $ BDDebug s (BDIndentLevelPushCur x)
_ -> Nothing
descendILPop = transformDownMay $ \case
BDIndentLevelPop (BDCols sig cols@(_ : _)) ->
Just $ BDCols sig (List.init cols ++ [BDIndentLevelPop (List.last cols)])
BDIndentLevelPop (BDDebug s x) -> Just $ BDDebug s (BDIndentLevelPop x)
_ -> Nothing
descendAddB = transformDownMay $ \case
BDAddBaseY BrIndentNone x -> Just x
AddIndent floats into Lines .
BDAddBaseY indent (BDLines lines) ->
Just $ BDLines $ BDAddBaseY indent <$> lines
AddIndent floats into last column
BDAddBaseY indent (BDCols sig cols) ->
Just $ BDCols sig $ List.init cols ++ [BDAddBaseY indent $ List.last cols]
merge AddIndent and Par
BDAddBaseY ind1 (BDPar ind2 line indented) ->
Just $ BDPar (mergeIndents ind1 ind2) line indented
BDAddBaseY ind (BDAnnotationPrior annKey1 x) ->
Just $ BDAnnotationPrior annKey1 (BDAddBaseY ind x)
BDAddBaseY ind (BDAnnotationRest annKey1 x) ->
Just $ BDAnnotationRest annKey1 (BDAddBaseY ind x)
BDAddBaseY ind (BDAnnotationKW annKey1 kw x) ->
Just $ BDAnnotationKW annKey1 kw (BDAddBaseY ind x)
BDAddBaseY ind (BDSeq list) ->
Just $ BDSeq $ List.init list ++ [BDAddBaseY ind (List.last list)]
BDAddBaseY _ lit@BDLit{} -> Just $ lit
BDAddBaseY ind (BDBaseYPushCur x) ->
Just $ BDBaseYPushCur (BDAddBaseY ind x)
BDAddBaseY ind (BDBaseYPop x) -> Just $ BDBaseYPop (BDAddBaseY ind x)
BDAddBaseY ind (BDDebug s x) -> Just $ BDDebug s (BDAddBaseY ind x)
BDAddBaseY ind (BDIndentLevelPop x) ->
Just $ BDIndentLevelPop (BDAddBaseY ind x)
BDAddBaseY ind (BDIndentLevelPushCur x) ->
Just $ BDIndentLevelPushCur (BDAddBaseY ind x)
BDAddBaseY ind (BDEnsureIndent ind2 x) ->
Just $ BDEnsureIndent (mergeIndents ind ind2) x
_ -> Nothing
stepBO :: BriDoc -> BriDoc
traceFunctionWith " stepBO " ( show . briDocToDocWithAnns ) ( show . briDocToDocWithAnns ) $
transformUp f
where
f = \case
x@BDAnnotationPrior{} -> descendPrior x
x@BDAnnotationKW{} -> descendKW x
x@BDAnnotationRest{} -> descendRest x
x@BDAddBaseY{} -> descendAddB x
x@BDBaseYPushCur{} -> descendBYPush x
x@BDBaseYPop{} -> descendBYPop x
x@BDIndentLevelPushCur{} -> descendILPush x
x@BDIndentLevelPop{} -> descendILPop x
x -> x
stepFull = -- traceFunctionWith "stepFull" (show . briDocToDocWithAnns) (show . briDocToDocWithAnns) $
Uniplate.rewrite $ \case
BDAddBaseY BrIndentNone x -> Just $ x
AddIndent floats into Lines .
BDAddBaseY indent (BDLines lines) ->
Just $ BDLines $ BDAddBaseY indent <$> lines
AddIndent floats into last column
BDAddBaseY indent (BDCols sig cols) ->
Just $ BDCols sig $ List.init cols ++ [BDAddBaseY indent $ List.last cols]
BDAddBaseY ind (BDSeq list) ->
Just $ BDSeq $ List.init list ++ [BDAddBaseY ind (List.last list)]
merge AddIndent and Par
BDAddBaseY ind1 (BDPar ind2 line indented) ->
Just $ BDPar (mergeIndents ind1 ind2) line indented
BDAddBaseY _ lit@BDLit{} -> Just $ lit
BDAddBaseY ind (BDBaseYPushCur x) ->
Just $ BDBaseYPushCur (BDAddBaseY ind x)
BDAddBaseY ind (BDBaseYPop x) -> Just $ BDBaseYPop (BDAddBaseY ind x)
-- prior floating in
BDAnnotationPrior annKey1 (BDPar ind line indented) ->
Just $ BDPar ind (BDAnnotationPrior annKey1 line) indented
BDAnnotationPrior annKey1 (BDSeq (l : lr)) ->
Just $ BDSeq ((BDAnnotationPrior annKey1 l) : lr)
BDAnnotationPrior annKey1 (BDLines (l : lr)) ->
Just $ BDLines ((BDAnnotationPrior annKey1 l) : lr)
BDAnnotationPrior annKey1 (BDCols sig (l : lr)) ->
Just $ BDCols sig ((BDAnnotationPrior annKey1 l) : lr)
EnsureIndent float - in
-- BDEnsureIndent indent (BDCols sig (col:colr)) ->
-- Just $ BDCols sig (BDEnsureIndent indent col : (BDAddBaseY indent <$> colr))
-- not sure if the following rule is necessary; tests currently are
-- unaffected.
BDEnsureIndent indent ( BDLines lines ) - >
-- Just $ BDLines $ BDEnsureIndent indent <$> lines
-- post floating in
BDAnnotationRest annKey1 (BDPar ind line indented) ->
Just $ BDPar ind line $ BDAnnotationRest annKey1 indented
BDAnnotationRest annKey1 (BDSeq list) ->
Just
$ BDSeq
$ List.init list
++ [BDAnnotationRest annKey1 $ List.last list]
BDAnnotationRest annKey1 (BDLines list) ->
Just
$ BDLines
$ List.init list
++ [BDAnnotationRest annKey1 $ List.last list]
BDAnnotationRest annKey1 (BDCols sig cols) ->
Just
$ BDCols sig
$ List.init cols
++ [BDAnnotationRest annKey1 $ List.last cols]
_ -> Nothing
| null | https://raw.githubusercontent.com/lspitzner/brittany/7399b7538835411727e025e1480ea96b5496416c/source/library/Language/Haskell/Brittany/Internal/Transformations/Floating.hs | haskell | note that this is not total, and cannot be with that exact signature.
note that semantically, stepFull is completely sufficient.
but the bottom-up switch-to-top-down-on-match transformation has much
better complexity.
the push/pop cases would need to be copied over
prior floating in
post floating in
post floating in
traceFunctionWith "stepFull" (show . briDocToDocWithAnns) (show . briDocToDocWithAnns) $
prior floating in
BDEnsureIndent indent (BDCols sig (col:colr)) ->
Just $ BDCols sig (BDEnsureIndent indent col : (BDAddBaseY indent <$> colr))
not sure if the following rule is necessary; tests currently are
unaffected.
Just $ BDLines $ BDEnsureIndent indent <$> lines
post floating in | # LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
module Language.Haskell.Brittany.Internal.Transformations.Floating where
import qualified Data.Generics.Uniplate.Direct as Uniplate
import qualified GHC.OldList as List
import Language.Haskell.Brittany.Internal.Prelude
import Language.Haskell.Brittany.Internal.PreludeUtils
import Language.Haskell.Brittany.Internal.Types
import Language.Haskell.Brittany.Internal.Utils
mergeIndents :: BrIndent -> BrIndent -> BrIndent
mergeIndents BrIndentNone x = x
mergeIndents x BrIndentNone = x
mergeIndents (BrIndentSpecial i) (BrIndentSpecial j) =
BrIndentSpecial (max i j)
mergeIndents _ _ = error "mergeIndents"
transformSimplifyFloating :: BriDoc -> BriDoc
transformSimplifyFloating = stepBO .> stepFull
UPDATE : by now , does more than stepFull ; for semantic equivalence
where
descendPrior = transformDownMay $ \case
BDAnnotationPrior annKey1 (BDPar ind line indented) ->
Just $ BDPar ind (BDAnnotationPrior annKey1 line) indented
BDAnnotationPrior annKey1 (BDSeq (l : lr)) ->
Just $ BDSeq (BDAnnotationPrior annKey1 l : lr)
BDAnnotationPrior annKey1 (BDLines (l : lr)) ->
Just $ BDLines (BDAnnotationPrior annKey1 l : lr)
BDAnnotationPrior annKey1 (BDCols sig (l : lr)) ->
Just $ BDCols sig (BDAnnotationPrior annKey1 l : lr)
BDAnnotationPrior annKey1 (BDAddBaseY indent x) ->
Just $ BDAddBaseY indent $ BDAnnotationPrior annKey1 x
BDAnnotationPrior annKey1 (BDDebug s x) ->
Just $ BDDebug s $ BDAnnotationPrior annKey1 x
_ -> Nothing
descendRest = transformDownMay $ \case
BDAnnotationRest annKey1 (BDPar ind line indented) ->
Just $ BDPar ind line $ BDAnnotationRest annKey1 indented
BDAnnotationRest annKey1 (BDSeq list) ->
Just
$ BDSeq
$ List.init list
++ [BDAnnotationRest annKey1 $ List.last list]
BDAnnotationRest annKey1 (BDLines list) ->
Just
$ BDLines
$ List.init list
++ [BDAnnotationRest annKey1 $ List.last list]
BDAnnotationRest annKey1 (BDCols sig cols) ->
Just
$ BDCols sig
$ List.init cols
++ [BDAnnotationRest annKey1 $ List.last cols]
BDAnnotationRest annKey1 (BDAddBaseY indent x) ->
Just $ BDAddBaseY indent $ BDAnnotationRest annKey1 x
BDAnnotationRest annKey1 (BDDebug s x) ->
Just $ BDDebug s $ BDAnnotationRest annKey1 x
_ -> Nothing
descendKW = transformDownMay $ \case
BDAnnotationKW annKey1 kw (BDPar ind line indented) ->
Just $ BDPar ind line $ BDAnnotationKW annKey1 kw indented
BDAnnotationKW annKey1 kw (BDSeq list) ->
Just
$ BDSeq
$ List.init list
++ [BDAnnotationKW annKey1 kw $ List.last list]
BDAnnotationKW annKey1 kw (BDLines list) ->
Just
$ BDLines
$ List.init list
++ [BDAnnotationKW annKey1 kw $ List.last list]
BDAnnotationKW annKey1 kw (BDCols sig cols) ->
Just
$ BDCols sig
$ List.init cols
++ [BDAnnotationKW annKey1 kw $ List.last cols]
BDAnnotationKW annKey1 kw (BDAddBaseY indent x) ->
Just $ BDAddBaseY indent $ BDAnnotationKW annKey1 kw x
BDAnnotationKW annKey1 kw (BDDebug s x) ->
Just $ BDDebug s $ BDAnnotationKW annKey1 kw x
_ -> Nothing
descendBYPush = transformDownMay $ \case
BDBaseYPushCur (BDCols sig cols@(_ : _)) ->
Just $ BDCols sig (BDBaseYPushCur (List.head cols) : List.tail cols)
BDBaseYPushCur (BDDebug s x) -> Just $ BDDebug s (BDBaseYPushCur x)
_ -> Nothing
descendBYPop = transformDownMay $ \case
BDBaseYPop (BDCols sig cols@(_ : _)) ->
Just $ BDCols sig (List.init cols ++ [BDBaseYPop (List.last cols)])
BDBaseYPop (BDDebug s x) -> Just $ BDDebug s (BDBaseYPop x)
_ -> Nothing
descendILPush = transformDownMay $ \case
BDIndentLevelPushCur (BDCols sig cols@(_ : _)) ->
Just $ BDCols sig (BDIndentLevelPushCur (List.head cols) : List.tail cols)
BDIndentLevelPushCur (BDDebug s x) ->
Just $ BDDebug s (BDIndentLevelPushCur x)
_ -> Nothing
descendILPop = transformDownMay $ \case
BDIndentLevelPop (BDCols sig cols@(_ : _)) ->
Just $ BDCols sig (List.init cols ++ [BDIndentLevelPop (List.last cols)])
BDIndentLevelPop (BDDebug s x) -> Just $ BDDebug s (BDIndentLevelPop x)
_ -> Nothing
descendAddB = transformDownMay $ \case
BDAddBaseY BrIndentNone x -> Just x
AddIndent floats into Lines .
BDAddBaseY indent (BDLines lines) ->
Just $ BDLines $ BDAddBaseY indent <$> lines
AddIndent floats into last column
BDAddBaseY indent (BDCols sig cols) ->
Just $ BDCols sig $ List.init cols ++ [BDAddBaseY indent $ List.last cols]
merge AddIndent and Par
BDAddBaseY ind1 (BDPar ind2 line indented) ->
Just $ BDPar (mergeIndents ind1 ind2) line indented
BDAddBaseY ind (BDAnnotationPrior annKey1 x) ->
Just $ BDAnnotationPrior annKey1 (BDAddBaseY ind x)
BDAddBaseY ind (BDAnnotationRest annKey1 x) ->
Just $ BDAnnotationRest annKey1 (BDAddBaseY ind x)
BDAddBaseY ind (BDAnnotationKW annKey1 kw x) ->
Just $ BDAnnotationKW annKey1 kw (BDAddBaseY ind x)
BDAddBaseY ind (BDSeq list) ->
Just $ BDSeq $ List.init list ++ [BDAddBaseY ind (List.last list)]
BDAddBaseY _ lit@BDLit{} -> Just $ lit
BDAddBaseY ind (BDBaseYPushCur x) ->
Just $ BDBaseYPushCur (BDAddBaseY ind x)
BDAddBaseY ind (BDBaseYPop x) -> Just $ BDBaseYPop (BDAddBaseY ind x)
BDAddBaseY ind (BDDebug s x) -> Just $ BDDebug s (BDAddBaseY ind x)
BDAddBaseY ind (BDIndentLevelPop x) ->
Just $ BDIndentLevelPop (BDAddBaseY ind x)
BDAddBaseY ind (BDIndentLevelPushCur x) ->
Just $ BDIndentLevelPushCur (BDAddBaseY ind x)
BDAddBaseY ind (BDEnsureIndent ind2 x) ->
Just $ BDEnsureIndent (mergeIndents ind ind2) x
_ -> Nothing
stepBO :: BriDoc -> BriDoc
traceFunctionWith " stepBO " ( show . briDocToDocWithAnns ) ( show . briDocToDocWithAnns ) $
transformUp f
where
f = \case
x@BDAnnotationPrior{} -> descendPrior x
x@BDAnnotationKW{} -> descendKW x
x@BDAnnotationRest{} -> descendRest x
x@BDAddBaseY{} -> descendAddB x
x@BDBaseYPushCur{} -> descendBYPush x
x@BDBaseYPop{} -> descendBYPop x
x@BDIndentLevelPushCur{} -> descendILPush x
x@BDIndentLevelPop{} -> descendILPop x
x -> x
Uniplate.rewrite $ \case
BDAddBaseY BrIndentNone x -> Just $ x
AddIndent floats into Lines .
BDAddBaseY indent (BDLines lines) ->
Just $ BDLines $ BDAddBaseY indent <$> lines
AddIndent floats into last column
BDAddBaseY indent (BDCols sig cols) ->
Just $ BDCols sig $ List.init cols ++ [BDAddBaseY indent $ List.last cols]
BDAddBaseY ind (BDSeq list) ->
Just $ BDSeq $ List.init list ++ [BDAddBaseY ind (List.last list)]
merge AddIndent and Par
BDAddBaseY ind1 (BDPar ind2 line indented) ->
Just $ BDPar (mergeIndents ind1 ind2) line indented
BDAddBaseY _ lit@BDLit{} -> Just $ lit
BDAddBaseY ind (BDBaseYPushCur x) ->
Just $ BDBaseYPushCur (BDAddBaseY ind x)
BDAddBaseY ind (BDBaseYPop x) -> Just $ BDBaseYPop (BDAddBaseY ind x)
BDAnnotationPrior annKey1 (BDPar ind line indented) ->
Just $ BDPar ind (BDAnnotationPrior annKey1 line) indented
BDAnnotationPrior annKey1 (BDSeq (l : lr)) ->
Just $ BDSeq ((BDAnnotationPrior annKey1 l) : lr)
BDAnnotationPrior annKey1 (BDLines (l : lr)) ->
Just $ BDLines ((BDAnnotationPrior annKey1 l) : lr)
BDAnnotationPrior annKey1 (BDCols sig (l : lr)) ->
Just $ BDCols sig ((BDAnnotationPrior annKey1 l) : lr)
EnsureIndent float - in
BDEnsureIndent indent ( BDLines lines ) - >
BDAnnotationRest annKey1 (BDPar ind line indented) ->
Just $ BDPar ind line $ BDAnnotationRest annKey1 indented
BDAnnotationRest annKey1 (BDSeq list) ->
Just
$ BDSeq
$ List.init list
++ [BDAnnotationRest annKey1 $ List.last list]
BDAnnotationRest annKey1 (BDLines list) ->
Just
$ BDLines
$ List.init list
++ [BDAnnotationRest annKey1 $ List.last list]
BDAnnotationRest annKey1 (BDCols sig cols) ->
Just
$ BDCols sig
$ List.init cols
++ [BDAnnotationRest annKey1 $ List.last cols]
_ -> Nothing
|
25df044d82974fd483284151f350e17937313c19a6b6295b2b43672005f0e0c0 | degree9/enterprise | async.cljs | (ns degree9.async
(:require-macros degree9.async))
| null | https://raw.githubusercontent.com/degree9/enterprise/36e4f242c18b1dde54d5a15c668b17dc800c01ff/src/degree9/async.cljs | clojure | (ns degree9.async
(:require-macros degree9.async))
|
|
db9f82dc466634d7b9eb65ad5a7c1804aac3a0d192aa6468441f0e0354e9998f | jebberjeb/viz.cljc | image.cljs | (ns viz.image
(:require [vizjs]))
(defn image
[dot-string]
(js/Viz dot-string))
| null | https://raw.githubusercontent.com/jebberjeb/viz.cljc/0194d6510aa06de03577f8589faf80d7116afe0f/src/viz/image.cljs | clojure | (ns viz.image
(:require [vizjs]))
(defn image
[dot-string]
(js/Viz dot-string))
|
|
f19621cd19c86d518ef21036c4e06ba3e3007a62ed057ca86d7b02e1ee9c8cff | spurious/sagittarius-scheme-mirror | private.scm | -*- mode : scheme ; coding : utf-8 ; -*-
;;;
dbm / private.scm - abstract base DBM class library
;;;
Copyright ( c ) 2010 - 2013 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
;; to hide internal macro...
(library (dbm private)
(export <dbm> <dbm-meta>
%dbm-k2s %dbm-s2k %dbm-v2s %dbm-s2v)
(import (rnrs) (clos user))
(define-class <dbm-meta> (<class>) ())
(define-class <dbm> ()
((path :init-keyword :path)
(rw-mode :init-keyword :rw-mode :init-form :write)
(file-mode :init-keyword :file-mode :init-keyword #o664)
(key-convert :init-keyword :key-convert :init-form #f)
(value-convert :init-keyword :value-convert :init-form #f)
internal . set up by dbm - open
k2s s2k v2s s2v)
:metaclass <dbm-meta>)
Macros & procedures that can be used by implementation modules
(define-syntax %dbm-k2s
(syntax-rules ()
((_ self key) ((slot-ref self 'k2s) key))))
(define-syntax %dbm-s2k
(syntax-rules ()
((_ self key) ((slot-ref self 's2k) key))))
(define-syntax %dbm-v2s
(syntax-rules ()
((_ self key) ((slot-ref self 'v2s) key))))
(define-syntax %dbm-s2v
(syntax-rules ()
((_ self key) ((slot-ref self 's2v) key))))
) | null | https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/sitelib/dbm/private.scm | scheme | coding : utf-8 ; -*-
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
to hide internal macro... | dbm / private.scm - abstract base DBM class library
Copyright ( c ) 2010 - 2013 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
(library (dbm private)
(export <dbm> <dbm-meta>
%dbm-k2s %dbm-s2k %dbm-v2s %dbm-s2v)
(import (rnrs) (clos user))
(define-class <dbm-meta> (<class>) ())
(define-class <dbm> ()
((path :init-keyword :path)
(rw-mode :init-keyword :rw-mode :init-form :write)
(file-mode :init-keyword :file-mode :init-keyword #o664)
(key-convert :init-keyword :key-convert :init-form #f)
(value-convert :init-keyword :value-convert :init-form #f)
internal . set up by dbm - open
k2s s2k v2s s2v)
:metaclass <dbm-meta>)
Macros & procedures that can be used by implementation modules
(define-syntax %dbm-k2s
(syntax-rules ()
((_ self key) ((slot-ref self 'k2s) key))))
(define-syntax %dbm-s2k
(syntax-rules ()
((_ self key) ((slot-ref self 's2k) key))))
(define-syntax %dbm-v2s
(syntax-rules ()
((_ self key) ((slot-ref self 'v2s) key))))
(define-syntax %dbm-s2v
(syntax-rules ()
((_ self key) ((slot-ref self 's2v) key))))
) |
a6d5a2e4f5c6fb9dc615793d594707086c1dc9fd26852b625167fb216b38ad65 | reborg/clojure-essential-reference | 2.clj | < 1 >
< 2 >
{:initial-count 3}
(meta (with-meta (with-meta [1 2 3] {:a 1}) {:a 2})) ; <3>
{ : a 2 }
< 4 >
;; nil
< 5 >
ClassCastException clojure.lang . Atom can not be cast to clojure.lang . | null | https://raw.githubusercontent.com/reborg/clojure-essential-reference/c37fa19d45dd52b2995a191e3e96f0ebdc3f6d69/OtherFunctions/VarsandNamespaces/meta%2Cwith-meta%2Cvary-meta%2Calter-meta!andreset-meta!/2.clj | clojure | <3>
nil | < 1 >
< 2 >
{:initial-count 3}
{ : a 2 }
< 4 >
< 5 >
ClassCastException clojure.lang . Atom can not be cast to clojure.lang . |
348ba7188c803d8f0ddf66ca0712d0712f9f4a45de53cf114a2ac81a7701d618 | jabber-at/ejabberd | mod_proxy65_sql.erl | %%%-------------------------------------------------------------------
@author < >
Created : 30 Mar 2017 by < >
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2018 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
%%%
%%%-------------------------------------------------------------------
-module(mod_proxy65_sql).
-behaviour(mod_proxy65).
-compile([{parse_transform, ejabberd_sql_pt}]).
%% API
-export([init/0, register_stream/2, unregister_stream/1, activate_stream/4]).
-include("logger.hrl").
-include("ejabberd_sql_pt.hrl").
%%%===================================================================
%%% API
%%%===================================================================
init() ->
NodeS = erlang:atom_to_binary(node(), latin1),
?DEBUG("Cleaning SQL 'proxy65' table...", []),
case ejabberd_sql:sql_query(
ejabberd_config:get_myname(),
?SQL("delete from proxy65 where "
"node_i=%(NodeS)s or node_t=%(NodeS)s")) of
{updated, _} ->
ok;
Err ->
?ERROR_MSG("failed to clean 'proxy65' table: ~p", [Err]),
Err
end.
register_stream(SID, Pid) ->
PidS = misc:encode_pid(Pid),
NodeS = erlang:atom_to_binary(node(Pid), latin1),
F = fun() ->
case ejabberd_sql:sql_query_t(
?SQL("update proxy65 set pid_i=%(PidS)s, "
"node_i=%(NodeS)s where sid=%(SID)s")) of
{updated, 1} ->
ok;
_ ->
ejabberd_sql:sql_query_t(
?SQL("insert into proxy65"
"(sid, pid_t, node_t, pid_i, node_i, jid_i) "
"values (%(SID)s, %(PidS)s, %(NodeS)s, '', '', '')"))
end
end,
case ejabberd_sql:sql_transaction(ejabberd_config:get_myname(), F) of
{atomic, _} ->
ok;
{aborted, Reason} ->
{error, Reason}
end.
unregister_stream(SID) ->
F = fun() ->
ejabberd_sql:sql_query_t(
?SQL("delete from proxy65 where sid=%(SID)s"))
end,
case ejabberd_sql:sql_transaction(ejabberd_config:get_myname(), F) of
{atomic, _} ->
ok;
{aborted, Reason} ->
{error, Reason}
end.
activate_stream(SID, IJID, MaxConnections, _Node) ->
F = fun() ->
case ejabberd_sql:sql_query_t(
?SQL("select @(pid_t)s, @(node_t)s, @(pid_i)s, "
"@(node_i)s, @(jid_i)s from proxy65 where "
"sid=%(SID)s")) of
{selected, [{TPidS, TNodeS, IPidS, INodeS, <<"">>}]}
when IPidS /= <<"">> ->
try {misc:decode_pid(TPidS, TNodeS),
misc:decode_pid(IPidS, INodeS)} of
{TPid, IPid} ->
case ejabberd_sql:sql_query_t(
?SQL("update proxy65 set jid_i=%(IJID)s "
"where sid=%(SID)s")) of
{updated, 1} when is_integer(MaxConnections) ->
case ejabberd_sql:sql_query_t(
?SQL("select @(count(*))d from proxy65 "
"where jid_i=%(IJID)s")) of
{selected, [{Num}]} when Num > MaxConnections ->
ejabberd_sql:abort({limit, IPid, TPid});
{selected, _} ->
{ok, IPid, TPid};
Err ->
ejabberd_sql:abort(Err)
end;
{updated, _} ->
{ok, IPid, TPid};
Err ->
ejabberd_sql:abort(Err)
end
catch _:{bad_node, _} ->
{error, notfound}
end;
{selected, [{_, _, _, _, JID}]} when JID /= <<"">> ->
{error, conflict};
{selected, _} ->
{error, notfound};
Err ->
ejabberd_sql:abort(Err)
end
end,
case ejabberd_sql:sql_transaction(ejabberd_config:get_myname(), F) of
{atomic, Result} ->
Result;
{aborted, {limit, _, _} = Limit} ->
{error, Limit};
{aborted, Reason} ->
{error, Reason}
end.
%%%===================================================================
Internal functions
%%%===================================================================
| null | https://raw.githubusercontent.com/jabber-at/ejabberd/7bfec36856eaa4df21b26e879d3ba90285bad1aa/src/mod_proxy65_sql.erl | erlang | -------------------------------------------------------------------
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
-------------------------------------------------------------------
API
===================================================================
API
===================================================================
===================================================================
=================================================================== | @author < >
Created : 30 Mar 2017 by < >
ejabberd , Copyright ( C ) 2002 - 2018 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
-module(mod_proxy65_sql).
-behaviour(mod_proxy65).
-compile([{parse_transform, ejabberd_sql_pt}]).
-export([init/0, register_stream/2, unregister_stream/1, activate_stream/4]).
-include("logger.hrl").
-include("ejabberd_sql_pt.hrl").
init() ->
NodeS = erlang:atom_to_binary(node(), latin1),
?DEBUG("Cleaning SQL 'proxy65' table...", []),
case ejabberd_sql:sql_query(
ejabberd_config:get_myname(),
?SQL("delete from proxy65 where "
"node_i=%(NodeS)s or node_t=%(NodeS)s")) of
{updated, _} ->
ok;
Err ->
?ERROR_MSG("failed to clean 'proxy65' table: ~p", [Err]),
Err
end.
register_stream(SID, Pid) ->
PidS = misc:encode_pid(Pid),
NodeS = erlang:atom_to_binary(node(Pid), latin1),
F = fun() ->
case ejabberd_sql:sql_query_t(
?SQL("update proxy65 set pid_i=%(PidS)s, "
"node_i=%(NodeS)s where sid=%(SID)s")) of
{updated, 1} ->
ok;
_ ->
ejabberd_sql:sql_query_t(
?SQL("insert into proxy65"
"(sid, pid_t, node_t, pid_i, node_i, jid_i) "
"values (%(SID)s, %(PidS)s, %(NodeS)s, '', '', '')"))
end
end,
case ejabberd_sql:sql_transaction(ejabberd_config:get_myname(), F) of
{atomic, _} ->
ok;
{aborted, Reason} ->
{error, Reason}
end.
unregister_stream(SID) ->
F = fun() ->
ejabberd_sql:sql_query_t(
?SQL("delete from proxy65 where sid=%(SID)s"))
end,
case ejabberd_sql:sql_transaction(ejabberd_config:get_myname(), F) of
{atomic, _} ->
ok;
{aborted, Reason} ->
{error, Reason}
end.
activate_stream(SID, IJID, MaxConnections, _Node) ->
F = fun() ->
case ejabberd_sql:sql_query_t(
?SQL("select @(pid_t)s, @(node_t)s, @(pid_i)s, "
"@(node_i)s, @(jid_i)s from proxy65 where "
"sid=%(SID)s")) of
{selected, [{TPidS, TNodeS, IPidS, INodeS, <<"">>}]}
when IPidS /= <<"">> ->
try {misc:decode_pid(TPidS, TNodeS),
misc:decode_pid(IPidS, INodeS)} of
{TPid, IPid} ->
case ejabberd_sql:sql_query_t(
?SQL("update proxy65 set jid_i=%(IJID)s "
"where sid=%(SID)s")) of
{updated, 1} when is_integer(MaxConnections) ->
case ejabberd_sql:sql_query_t(
?SQL("select @(count(*))d from proxy65 "
"where jid_i=%(IJID)s")) of
{selected, [{Num}]} when Num > MaxConnections ->
ejabberd_sql:abort({limit, IPid, TPid});
{selected, _} ->
{ok, IPid, TPid};
Err ->
ejabberd_sql:abort(Err)
end;
{updated, _} ->
{ok, IPid, TPid};
Err ->
ejabberd_sql:abort(Err)
end
catch _:{bad_node, _} ->
{error, notfound}
end;
{selected, [{_, _, _, _, JID}]} when JID /= <<"">> ->
{error, conflict};
{selected, _} ->
{error, notfound};
Err ->
ejabberd_sql:abort(Err)
end
end,
case ejabberd_sql:sql_transaction(ejabberd_config:get_myname(), F) of
{atomic, Result} ->
Result;
{aborted, {limit, _, _} = Limit} ->
{error, Limit};
{aborted, Reason} ->
{error, Reason}
end.
Internal functions
|
9d45863d909c56a507dbadcc06f9c4ae47163b5da72afe33e8dcc6670c8e198e | dbuenzli/rel | rel_sqlite3.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2020 The rel programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2020 The rel programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open Rel
(* Circular doubly linked list *)
module Clist = struct
type 'a t =
{ mutable v : 'a option; (* None is for the root. *)
mutable prev : 'a t; (* on root this points to last element. *)
on root this points to the first element .
let root () = let rec root = { v = None; next = root; prev = root } in root
let make_first root n =
n.next.prev <- n.prev; n.prev.next <- n.next;
n.next <- root.next; n.prev <- root;
root.next.prev <- n; root.next <- n
let add_first root d =
let n = { v = Some d; prev = root; next = root.next } in
root.next.prev <- n; root.next <- n; n
let drop_last root =
let last = root.prev in
root.prev <- last.prev; last.prev.next <- root; last.v
end
(* Key-value map with access to lru binding. *)
module Lru_map = struct
type ('a, 'b) t =
{ map : ('a, ('a * 'b) Clist.t) Hashtbl.t;
root , last is lru , next is mru .
let create ?random size =
{ map = Hashtbl.create ?random size; recent = Clist.root () }
let[@inline] get_value n = snd (Option.get n.Clist.v)
let length c = Hashtbl.length c.map
let find k c = match Hashtbl.find_opt c.map k with
| None -> None
| Some n -> Clist.make_first c.recent n; Some (get_value n)
let add k v c = match Hashtbl.find_opt c.map k with
| Some n -> n.v <- Some (k, v); Clist.make_first c.recent n
| None -> let n = Clist.add_first c.recent (k, v) in Hashtbl.replace c.map k n
let lru c = c.recent.prev.Clist.v
let drop_lru c = match Clist.drop_last c.recent with
| None -> None | Some (k, _) as v -> Hashtbl.remove c.map k; v
let iter f c = Hashtbl.iter (fun k n -> f k (get_value n)) c.map
end
Thin bindings to SQLite3
module Tsqlite3 = struct
external version_number : unit -> int = "ocaml_rel_sqlite3_version_number"
let version () =
let v = version_number () and s = string_of_int in
let mmaj = 1000000 and mmin = 1000 in
let maj = v / mmaj and min = (v mod mmaj) / mmin in
let patch = (v mod mmaj) mod mmin in
String.concat "." [s maj; s min; s patch]
(* Errors, note that our open' sets the connection to always return extended
error code. *)
N.B. sqlite defines these as int32 but they are small
so that should work on 32 - bit platforms too .
so that should work on 32-bit platforms too. *)
external errstr : rc -> string = "ocaml_rel_sqlite3_errstr"
(* Database connection *)
type mode = Read | Read_write | Read_write_create | Memory
type mutex = No | Full
type t (* Boxed pointer to sqlite3 struct *)
external _open' :
string -> uri:bool -> mode:mode -> mutex:mutex -> vfs:string ->
(t, rc) result = "ocaml_rel_sqlite3_open"
let open'
?(vfs = "") ?(uri = true) ?(mutex = Full) ?(mode = Read_write_create) f
=
_open' ~vfs ~uri ~mode ~mutex f
external close : t -> rc = "ocaml_rel_sqlite3_close"
external extended_errcode : t -> int = "ocaml_rel_sqlite3_extended_errcode"
external errmsg : t -> string = "ocaml_rel_sqlite3_errmsg"
external busy_timeout : t -> int -> rc = "ocaml_rel_sqlite3_busy_timeout"
external changes : t -> int = "ocaml_rel_sqlite3_changes"
external last_insert_rowid : t -> int64 =
"ocaml_rel_sqlite3_last_insert_rowid"
Queries
external exec : t -> string -> rc = "ocaml_rel_sqlite3_exec"
Pepared statements
Boxed pointer to sqlite3_stmt struct
external stmt_errmsg : stmt -> string =
"ocaml_rel_sqlite3_stmt_errmsg"
external prepare : t -> string -> (stmt, rc) result =
"ocaml_rel_sqlite3_prepare"
external finalize : stmt -> rc =
"ocaml_rel_sqlite3_finalize"
external reset : stmt -> rc =
"ocaml_rel_sqlite3_reset"
external step : stmt -> rc =
"ocaml_rel_sqlite3_step"
external column_count : stmt -> int =
"ocaml_rel_sqlite3_column_count"
external bind_parameter_count : stmt -> int =
"ocaml_rel_sqlite3_bind_paramater_count"
external bind_null : stmt -> int -> rc =
"ocaml_rel_sqlite3_bind_null"
external bind_bool : stmt -> int -> bool -> rc =
"ocaml_rel_sqlite3_bind_bool"
external bind_int : stmt -> int -> int -> rc =
"ocaml_rel_sqlite3_bind_int"
external bind_int64 : stmt -> int -> int64 -> rc =
"ocaml_rel_sqlite3_bind_int64"
external bind_double : stmt -> int -> float -> rc =
"ocaml_rel_sqlite3_bind_double"
external bind_text : stmt -> int -> string -> rc =
"ocaml_rel_sqlite3_bind_text"
external bind_blob : stmt -> int -> string -> rc =
"ocaml_rel_sqlite3_bind_blob"
external clear_bindings : stmt -> rc =
"ocaml_rel_sqlite3_clear_bindings"
external column_is_null : stmt -> int -> bool =
"ocaml_rel_sqlite3_column_is_null"
external column_bool : stmt -> int -> bool =
"ocaml_rel_sqlite3_column_bool"
external column_int : stmt -> int -> int =
"ocaml_rel_sqlite3_column_int"
external column_int64 : stmt -> int -> int64 =
"ocaml_rel_sqlite3_column_int64"
external column_double : stmt -> int -> float =
"ocaml_rel_sqlite3_column_double"
external column_text : stmt -> int -> string =
"ocaml_rel_sqlite3_column_text"
external column_blob : stmt -> int -> string =
"ocaml_rel_sqlite3_column_blob"
end
(* Errors *)
module Error = struct
(* Result codes *)
type code = Tsqlite3.rc
let code_to_string = Tsqlite3.errstr
(* Errors *)
type t = { code : code; message : string }
let v code message = { code; message }
let code e = e.code
let message e = e.message
(* See *)
let abort_rollback = 516
let busy_recovery = 261
let busy_snapshot = 517
let busy_timeout = 773
let cantopen_convpath = 1038
let cantopen_dirtywal = 1294
let cantopen_fullpath = 782
let cantopen_isdir = 526
let cantopen_notempdir = 270
let cantopen_symlink = 1550
let constraint_check = 275
let constraint_commithook = 531
let constraint_foreignkey = 787
let constraint_function = 1043
let constraint_notnull = 1299
let constraint_pinned = 2835
let constraint_primarykey = 1555
let constraint_rowid = 2579
let constraint_trigger = 1811
let constraint_unique = 2067
let constraint_vtab = 2323
let corrupt_index = 779
let corrupt_sequence = 523
let corrupt_vtab = 267
let error_missing_collseq = 257
let error_retry = 513
let error_snapshot = 769
let ioerr_access = 3338
let ioerr_auth = 7178
let ioerr_begin_atomic = 7434
let ioerr_blocked = 2826
let ioerr_checkreservedlock = 3594
let ioerr_close = 4106
let ioerr_commit_atomic = 7690
let ioerr_convpath = 6666
let ioerr_data = 8202
let ioerr_delete = 2570
let ioerr_delete_noent = 5898
let ioerr_dir_close = 4362
let ioerr_dir_fsync = 1290
let ioerr_fstat = 1802
let ioerr_fsync = 1034
let ioerr_gettemppath = 6410
let ioerr_lock = 3850
let ioerr_mmap = 6154
let ioerr_nomem = 3082
let ioerr_rdlock = 2314
let ioerr_read = 266
let ioerr_rollback_atomic = 7946
let ioerr_seek = 5642
let ioerr_shmlock = 5130
let ioerr_shmmap = 5386
let ioerr_shmopen = 4618
let ioerr_shmsize = 4874
let ioerr_short_read = 522
let ioerr_truncate = 1546
let ioerr_unlock = 2058
let ioerr_vnode = 6922
let ioerr_write = 778
let locked_sharedcache = 262
let locked_vtab = 518
let notice_recover_rollback = 539
let notice_recover_wal = 283
let ok_load_permanently = 256
let readonly_cantinit = 1288
let readonly_cantlock = 520
let readonly_dbmoved = 1032
let readonly_directory = 1544
let readonly_recovery = 264
let readonly_rollback = 776
let warning_autoindex = 284
end
type error = Error.t
let string_error r = Result.map_error Error.message r
let db_error rc db = Error.v rc (Tsqlite3.errmsg db)
let strf = Printf.sprintf
Library configuration and information .
let version = Tsqlite3.version
(* Low-level statement interface. *)
module Stmt' = struct
(* These functions throw exceptions. *)
let stmt_error rc st = Error.v rc (Tsqlite3.stmt_errmsg st)
let stmt_error_mismatch ~expected:e ~given:g =
let msg = strf "SQL statement has %d variables, only %d were given." e g in
Error.v 1 msg
let stmt_error_var idx rc st =
let msg = strf "var %d: %s" idx (Tsqlite3.stmt_errmsg st) in
Error.v rc msg
let stmt_error_var_encode idx typ err =
let msg = strf "var %d encode %s: %s" idx typ err in
Error.v 1 msg
let col_error_decode idx typ err =
let msg = strf "column %d decode %s: %s" idx typ err in
Error.v 1 msg
exception Error of Error.t
let error err = raise (Error err)
type t =
{ stmt : Tsqlite3.stmt;
col_count : int;
mutable finalized : bool; }
type 'r step = t * 'r Rel_sql.Stmt.t
let validate s = if s.finalized then invalid_arg "finalized statement" else ()
let finalize s = match Tsqlite3.finalize s.stmt with
| 0 -> s.finalized <- true | rc -> error (stmt_error rc s.stmt)
let finalize_noerr s = try finalize s with Failure _ -> ()
let prepare db sql = match Tsqlite3.prepare db sql with
| Error rc -> error (db_error rc db)
| Ok stmt ->
let col_count = Tsqlite3.column_count stmt in
let finalized = false in
{ stmt; col_count; finalized }
let rec bind_arg st idx (Rel_sql.Stmt.Arg (t, v)) = match t with
| Type.Bool -> Tsqlite3.bind_bool st idx v
| Type.Int -> Tsqlite3.bind_int st idx v
| Type.Int64 -> Tsqlite3.bind_int64 st idx v
| Type.Float -> Tsqlite3.bind_double st idx v
| Type.Text -> Tsqlite3.bind_text st idx v
| Type.Blob -> Tsqlite3.bind_blob st idx v
| Type.Option t ->
(match v with
| None -> Tsqlite3.bind_null st idx
| Some v -> bind_arg st idx (Rel_sql.Stmt.Arg (t, v)))
| Type.Coded c ->
(match Type.Coded.enc c v with
| Ok v -> bind_arg st idx (Rel_sql.Stmt.Arg (Type.Coded.repr c, v))
| Error e -> error (stmt_error_var_encode idx (Type.Coded.name c) e))
| _ -> Type.invalid_unknown ()
let bind_args st args =
let rec loop idx st = function
| [] ->
let expected = Tsqlite3.bind_parameter_count st in
let given = idx - 1 in
if expected = given then () else
error (stmt_error_mismatch ~expected ~given)
| arg :: args ->
match bind_arg st idx arg with
| 0 -> loop (idx + 1) st args
| rc -> error (stmt_error_var idx rc st)
in
loop 1 st args
let bind s st =
validate s;
match Tsqlite3.reset s.stmt with
| 0 -> bind_args s.stmt (List.rev (Rel_sql.Stmt.rev_args st))
| rc -> error (stmt_error rc s.stmt)
let rec unpack_col_type : type r c. Tsqlite3.stmt -> int -> c Type.t -> c =
fun s i t -> match t with
| Type.Bool -> Tsqlite3.column_bool s i
| Type.Int -> Tsqlite3.column_int s i
| Type.Int64 -> Tsqlite3.column_int64 s i
| Type.Float -> Tsqlite3.column_double s i
| Type.Text -> Tsqlite3.column_text s i
| Type.Blob -> Tsqlite3.column_blob s i
| Type.Option t ->
if Tsqlite3.column_is_null s i then None else Some (unpack_col_type s i t)
| Type.Coded c ->
let v = unpack_col_type s i (Type.Coded.repr c) in
(match Type.Coded.dec c v with
| Ok v -> v
| Error e -> error (col_error_decode i (Type.Coded.name c) e))
| _ -> Type.invalid_unknown ()
let unpack_col : type r c. Tsqlite3.stmt -> int -> (r, c) Col.t -> c =
fun s i c -> unpack_col_type s i (Col.type' c)
let unpack_row : type r. t -> r Rel_sql.Stmt.t -> r = fun s st ->
let rec cols :
type r a. Tsqlite3.stmt -> int -> (r, a) Rel.Row.Private.prod' -> a
=
fun s idx r -> match r with
| Unit f -> f
| Prod (cs, c) ->
let f = cols s (idx - 1) cs in
f (unpack_col s idx c)
| Cat (cs, _, row) ->
let f =
cols s (idx - Row.col_count (Rel.Row.Private.prod_to_prod row)) cs
in
let v = cols s idx row in
f v
in
let row = Rel.Row.Private.prod_of_prod (Rel_sql.Stmt.result st) in
cols s.stmt (s.col_count - 1) row
let stop s =
(* N.B. we need to reset otherwise things like VACUUM think queries
are still going on. *)
ignore (Tsqlite3.clear_bindings s.stmt);
ignore (Tsqlite3.reset s.stmt)
let step s st = match Tsqlite3.step s.stmt with
SQLITE_DONE
| 100 (* SQLITE_ROW *) -> Some (unpack_row s st)
| rc -> let err = stmt_error rc s.stmt in stop s; error err
let fold s st f acc =
let rec loop s st f acc = match Tsqlite3.step s.stmt with
| 100 (* SQLITE_ROW *) -> loop s st f (f (unpack_row s st) acc)
SQLITE_DONE
| rc -> let err = stmt_error rc s.stmt in stop s; error err
in
loop s st f acc
let first s st =
let r = step s st in
stop s; r
let exec s = match Tsqlite3.step s.stmt with
| 100 | 101 (* SQLITE_{ROW,DONE} *) -> stop s
| rc -> let err = stmt_error rc s.stmt in stop s; error err
end
(* Database connection *)
type t =
{ db : Tsqlite3.t;
mutable stmt_cache_size : int;
mutable stmt_cache : (string, Stmt'.t) Lru_map.t;
mutable closed : bool; }
module Cache = struct
let create size = Lru_map.create ~random:true size
let clear db =
let drop _ st = Stmt'.finalize_noerr st in
Lru_map.iter drop db.stmt_cache;
db.stmt_cache <- create db.stmt_cache_size
let drop db ~count =
let rec loop db count =
if count <= 0 then () else
match Lru_map.drop_lru db.stmt_cache with
| None -> ()
| Some (_, st) -> Stmt'.finalize_noerr st; loop db (count - 1)
in
loop db count
let size db = db.stmt_cache_size
let set_size db size = db.stmt_cache_size <- size; clear db
let find db sql = Lru_map.find sql db.stmt_cache
let add db sql s =
let count = Lru_map.length db.stmt_cache - db.stmt_cache_size + 1 in
drop db ~count;
Lru_map.add sql s db.stmt_cache
let stmt db sql = match find db sql with
| Some s -> s
| None -> let s = Stmt'.prepare db.db sql in add db sql s; s
end
type mode = Tsqlite3.mode = Read | Read_write | Read_write_create | Memory
type mutex = Tsqlite3.mutex = No | Full
let[@inline] validate db =
if db.closed then invalid_arg "connection closed" else ()
let open'
?(foreign_keys = true) ?(stmt_cache_size = 10) ?vfs ?uri ?mutex ?mode f
=
match Tsqlite3.open' ?vfs ?uri ?mode ?mutex f with
| Error rc -> Error (Error.v rc (Error.code_to_string rc))
| Ok db ->
let foreign_keys = strf "PRAGMA foreign_keys = %b" foreign_keys in
let rc = Tsqlite3.exec db foreign_keys in
if rc <> 0 then Error (Error.v rc (Error.code_to_string rc)) else
let stmt_cache = Cache.create stmt_cache_size in
Ok { db; stmt_cache_size; stmt_cache; closed = false }
let close db =
validate db;
Cache.clear db;
match Tsqlite3.close db.db with 0 -> Ok () | rc -> Error (db_error rc db.db)
let busy_timeout_ms db dur =
validate db;
match Tsqlite3.busy_timeout db.db dur with
| 0 -> Ok () | rc -> Error (db_error rc db.db)
let changes db = validate db; Tsqlite3.changes db.db
let last_insert_rowid db = validate db; Tsqlite3.last_insert_rowid db.db
let stmt_cache_size = Cache.size
let set_stmt_cache_size = Cache.set_size
let clear_stmt_cache = Cache.clear
(* SQL execution *)
let exec_sql db sql =
validate db;
match Tsqlite3.exec db.db sql with
| 0 -> Ok () | rc -> Error (db_error rc db.db)
let fold db st f acc =
validate db;
try
let s = Cache.stmt db (Rel_sql.Stmt.src st) in
Stmt'.bind s st; Ok (Stmt'.fold s st f acc)
with
| Stmt'.Error e -> Error e
let first db st =
validate db;
try
let s = Cache.stmt db (Rel_sql.Stmt.src st) in
Stmt'.bind s st; Ok (Stmt'.first s st)
with
| Stmt'.Error e -> Error e
let exec db st =
validate db;
try
let s = Cache.stmt db (Rel_sql.Stmt.src st) in
Stmt'.bind s st; Ok (Stmt'.exec s)
with
| Stmt'.Error e -> Error e
type transaction_kind = [ `Deferred | `Immediate | `Exclusive ]
let with_transaction kind db f =
validate db;
let kind = match kind with
| `Deferred -> "DEFERRED"
| `Immediate -> "IMMEDIATE"
| `Exclusive -> "EXCLUSIVE"
in
let start () = Tsqlite3.exec db.db (strf "BEGIN %s TRANSACTION" kind) in
let commit () = Tsqlite3.exec db.db "COMMIT TRANSACTION" in
let abort_noerr () = ignore (Tsqlite3.exec db.db "ROLLBACK TRANSACTION") in
match start () with
| rc when rc <> 0 -> Error (db_error rc db.db)
| _0 ->
match f db with
| exception exn ->
let bt = Printexc.get_raw_backtrace () in
abort_noerr ();
Printexc.raise_with_backtrace exn bt
| Error _ as e ->
abort_noerr (); Ok e
| Ok _ as v ->
match commit () with
| rc when rc <> 0 -> abort_noerr (); Error (db_error rc db.db)
| _0 -> Ok v
let explain ?(query_plan = false) db st =
validate db;
try
let explain = if query_plan then "EXPLAIN QUERY PLAN " else "EXPLAIN " in
(* Maybe we should skip the cache. *)
let src = explain ^ Rel_sql.Stmt.src st in
let rev_args = Rel_sql.Stmt.rev_args st in
let result = Row.(t1 (text "explanation")) in
let st = Rel_sql.Stmt.v src ~rev_args ~result in
let s = Cache.stmt db src in (* XXX skip the cache ? *)
Stmt'.bind s st;
let lines = List.rev (Stmt'.fold s st List.cons []) in
Ok (String.concat "\n" lines)
with
| Stmt'.Error e -> Error e
(* Statements *)
module Stmt = struct
type db = t
type t = Stmt'.t
type 'a step = 'a Stmt'.step
let create db sql =
validate db;
try Ok (Stmt'.prepare db.db sql) with
| Stmt'.Error e -> Error e
let start s sb = try (Stmt'.bind s sb; Ok (s, sb)) with
| Stmt'.Error e -> Error e
let step (s, st) = try Ok (Stmt'.step s st) with
| Stmt'.Error e -> Error e
let finalize s = try Ok (Stmt'.finalize s) with
| Stmt'.Error e -> Error e
end
(* SQL *)
module Dialect = struct
let kind = "sqlite3"
let sqlid = Rel_sql.Syntax.id
let sqlid_in_schema = Rel_sql.Syntax.id_in_schema
let rec insert_columns ~ignore:ign i rev_cols rev_vars rev_args cols =
let ignore c =
List.exists (fun (Rel.Col.V i) -> Rel.Col.equal_name i c) ign
in
match cols with
| [] ->
let cols = List.rev rev_cols and vars = List.rev rev_vars in
i, String.concat ", " cols, String.concat ", " vars, rev_args
| Rel.Col.Value (col, _) :: cols when ignore col ->
insert_columns ~ignore:ign i rev_cols rev_vars rev_args cols
| Rel.Col.Value (col, v) :: cols ->
let c = sqlid (Rel.Col.name col) in
let var = "?" ^ string_of_int i in
let arg = Rel_sql.Stmt.Arg (Col.type' col, v) in
insert_columns ~ignore:ign (i + 1)
(c :: rev_cols) (var :: rev_vars) (arg :: rev_args) cols
let insert_into_cols ?schema ?(ignore = []) t cols =
let table = sqlid_in_schema ?schema (Rel.Table.name t) in
let i, cols, vars, rev_args = insert_columns ~ignore 1 [] [] [] cols in
let sql = ["INSERT INTO "; table; " ("; cols; ")\nVALUES ("; vars; ")"] in
let sql = String.concat "" sql in
Rel_sql.Stmt.v sql ~rev_args ~result:Rel.Row.empty
let rec bind_columns ~sep i rev_cols rev_args = function
| [] -> i, String.concat sep (List.rev rev_cols), rev_args
| Rel.Col.Value (col, v) :: cols ->
let col_name c = sqlid (Rel.Col.name col)in
let set_col = String.concat "" [col_name col; " = ?"; string_of_int i] in
let arg = Rel_sql.Stmt.Arg (Col.type' col, v) in
bind_columns ~sep (i + 1) (set_col :: rev_cols) (arg :: rev_args) cols
let update ?schema t ~set:cols ~where =
let table = sqlid_in_schema ?schema (Rel.Table.name t) in
let i, columns, rev_args = bind_columns ~sep:", " 1 [] [] cols in
let _, where, rev_args = bind_columns ~sep:" AND " i [] rev_args where in
let sql = ["UPDATE "; table; " SET "; columns; " WHERE "; where ] in
let sql = String.concat "" sql in
Rel_sql.Stmt.v sql ~rev_args ~result:Rel.Row.empty
let delete_from ?schema t ~where =
let table = sqlid_in_schema ?schema (Rel.Table.name t) in
let _, where, rev_args = bind_columns ~sep:" AND " 1 [] [] where in
let sql = ["DELETE FROM "; table; " WHERE "; where ] in
let sql = String.concat "" sql in
Rel_sql.Stmt.v sql ~rev_args ~result:Rel.Row.empty
(* Data definition statements *)
let ext c dir = match c with None -> "" | Some () -> dir
let if_exists_ext c = ext c " IF EXISTS"
let if_not_exists_ext c = ext c " IF NOT EXISTS"
let col_id c = sqlid (Col.name' c)
let pp_strf = Format.asprintf
let pp_comma ppf () = Format.fprintf ppf ",@ "
let pp_col_name ppf c = Format.pp_print_string ppf (col_id c)
let pp_col_names ppf cs =
(Format.pp_print_list ~pp_sep:pp_comma pp_col_name) ppf cs
let err_kind s ~kind = strf "%S: not a %s literal" s kind
let bool_to_literal = function true -> "TRUE" | false -> "FALSE"
let bool_of_literal = function
| "0" | "TRUE" -> Ok true | "1" | "FALSE" -> Ok false
| s -> Error (err_kind s ~kind:"bool")
let int_to_literal = Int.to_string
let int_of_literal s = match int_of_string_opt s with
| Some i -> Ok i | None -> Error (err_kind s ~kind:"int")
let int64_to_literal = Int64.to_string
let int64_of_literal s = match Int64.of_string_opt s with
| Some i -> Ok i | None -> Error (err_kind s ~kind:"int64")
let float_to_literal = Float.to_string
let float_of_literal s = match Float.of_string_opt s with
| Some i -> Ok i | None -> Error (err_kind s ~kind:"float")
let text_to_literal v = Rel_sql.Syntax.string_to_literal v
let text_of_literal s = Rel_sql.Syntax.string_of_literal s
let blob_to_literal s =
let lower_hex_digit n =
let n = n land 0xF in
Char.unsafe_chr (if n < 10 then 0x30 + n else 0x57 + n)
in
let rec loop max s i h k = match i > max with
| true -> Bytes.unsafe_to_string h
| false ->
let byte = Char.code s.[i] in
Bytes.set h k (lower_hex_digit (byte lsr 4));
Bytes.set h (k + 1) (lower_hex_digit byte);
loop max s (i + 1) h (k + 2)
in
let len = String.length s in
let h = Bytes.create (2 * len + 3) in
Bytes.set h 0 'x';
Bytes.set h 1 '\'';
Bytes.set h (Bytes.length h - 1) '\'';
loop (len - 1) s 0 h 2
let blob_of_literal s =
try
let hex_value s i = match s.[i] with
| '0' .. '9' as c -> Char.code c - 0x30
| 'A' .. 'F' as c -> 10 + (Char.code c - 0x41)
| 'a' .. 'f' as c -> 10 + (Char.code c - 0x61)
| _ -> failwith (strf "%S:%d: Not an ASCII hexadecimal digit" s i)
in
let len = String.length s in
let hex_len = len - 3 in
if len < 3 || not (s.[0] = 'x' || s.[0] = 'X') || s.[1] <> '\'' ||
s.[len - 1] <> '\''
then failwith (strf "%S: Not a blob literal (missing x or ')" s)
else if (hex_len mod 2) <> 0
then failwith (strf "%S:%d: Missing final hex digit" s (len - 2))
else
let rec loop max b i h k = match i > max with
| true -> Ok (Bytes.unsafe_to_string b)
| false ->
let hi = hex_value h k and lo = hex_value h (k + 1) in
Bytes.set b i (Char.chr @@ (hi lsl 4) lor lo);
loop max b (i + 1) h (k + 2)
in
let b_len = hex_len / 2 in
let b = Bytes.create b_len in
loop (b_len - 1) b 0 s 2
with Failure e -> Error e
let rec type_of_type : type a. a Type.t -> string * bool = function
N.B. if we create databases in strict mode we can no longer
distinguish between the first three types .
distinguish between the first three types. *)
| Type.Bool -> "BOOL", true (* not null *)
| Type.Int -> "INTEGER", true
| Type.Int64 -> "BIGINT", true
| Type.Float -> "REAL", true
| Type.Text -> "TEXT", true
| Type.Blob -> "BLOB", true
| Type.Option t -> fst (type_of_type t), false
| Type.Coded c -> type_of_type (Type.Coded.repr c)
| _ -> Type.invalid_unknown ()
let rec const_of_literal : type a. a Type.t -> string -> (a, string) result =
fun t s -> match t with
| Type.Bool -> bool_of_literal s
| Type.Int -> int_of_literal s
| Type.Int64 -> int64_of_literal s
| Type.Float -> float_of_literal s
| Type.Text -> text_of_literal s
| Type.Blob -> blob_of_literal s
| Type.Option t ->
if String.uppercase_ascii s = "NULL"
then Ok None
else Result.map Option.some (const_of_literal t s)
| Type.Coded c ->
begin match const_of_literal (Type.Coded.repr c) s with
| Ok v -> Rel.Type.Coded.dec c v
| Error e -> Error (strf "%s literal: %s" (Type.Coded.name c) e)
end
| _ -> Rel.Type.invalid_unknown ()
FIXME streamline with Rel_query , this should be part of dialect .
let rec const_to_literal : type a. a Rel.Type.t -> a -> string =
fun t v -> match t with
| Type.Bool -> bool_to_literal v
| Type.Int -> int_to_literal v
| Type.Int64 -> int64_to_literal v
| Type.Float -> float_to_literal v
| Type.Text -> text_to_literal v
| Type.Blob -> blob_to_literal v
| Type.Option t ->
(match v with None -> "NULL" | Some v -> const_to_literal t v)
| Type.Coded c ->
(match Rel.Type.Coded.enc c v with
| Ok v -> const_to_literal (Rel.Type.Coded.repr c) v
| Error e ->
let name = Rel.Type.Coded.name c in
invalid_arg (strf "invalid %s constant %s" name e))
| _ -> Rel.Type.invalid_unknown ()
let col_def (Col.V col) =
let name = sqlid (Col.name col) in
let type' = Rel.Col.type' col in
let typ, not_null = type_of_type type' in
let not_null = if not_null then " NOT NULL" else "" in
let default = match Col.default col with
| None -> ""
| Some (`Expr expr) -> strf " DEFAULT (%s)" expr
| Some (`Value v) -> strf " DEFAULT %s" (const_to_literal type' v)
in
strf "%s %s%s%s" name typ not_null default
let foreign_key ?schema t fk =
let parent fk =
let name, cs = match Table.Foreign_key.parent fk with
| Parent (`Self, cs) -> Table.name t, pp_strf "@[<h>%a@]" pp_col_names cs
| Parent (`Table t, cs) ->
Table.name t, pp_strf "@[<h>%a@]" pp_col_names cs
in
let name = sqlid_in_schema ?schema name in
strf " REFERENCES %s (%s)" name cs
in
let action act a = match a with
| None -> "" | Some a ->
strf " %s %s" act (Rel_sql.Syntax.foreign_key_action_keyword a)
in
pp_strf "FOREIGN KEY (@[<h>%a@])%s%s%s"
pp_col_names (Table.Foreign_key.cols fk)
(parent fk)
(action "ON UPDATE" (Table.Foreign_key.on_update fk))
(action "ON DELETE" (Table.Foreign_key.on_delete fk))
let unique_key k =
pp_strf "UNIQUE (@[<h>%a@])" pp_col_names (Table.Unique_key.cols k)
let create_table ?schema ?if_not_exists t =
let if_not_exists = if_not_exists_ext if_not_exists in
let name = Table.name t in
let name = sqlid_in_schema ?schema name in
let cols = List.map col_def (Table.cols t) in
let uniques = List.map unique_key (Table.unique_keys t) in
let primary_key = match Table.primary_key t with
| None -> []
| Some pk -> [pp_strf "PRIMARY KEY (@[<h>%a@])" pp_col_names pk]
in
let fks = List.map (foreign_key ?schema t) (Table.foreign_keys t) in
let defs = cols @ primary_key @ uniques @ fks in
let sql =
Would be nice to create tables in STRICT mode but then we can no
longer distinguish between bool , int and int64
longer distinguish between bool, int and int64 *)
pp_strf "@[<v2>CREATE TABLE%s %s (@,%a@]@,);"
if_not_exists name
(Format.pp_print_list ~pp_sep:pp_comma Format.pp_print_string) defs
in
Rel_sql.Stmt.(func sql @@ unit)
let create_index ?schema ?if_not_exists t i =
let pp_index_col ppf c =
let name = sqlid (Col.name' c) in
Format.fprintf ppf "%s" name
let ord = match Rel_sql . Index . c with
| None - > " "
| Some o - > " " ^ Rel_sql . Index . in
Format.fprintf ppf " % s%s " name ord
let ord = match Rel_sql.Index.Col.sort_order c with
| None -> ""
| Some o -> " " ^ Rel_sql.Index.Col.sort_order_to_kwd o
in
Format.fprintf ppf "%s%s" name ord
*)
in
let unique = if Table.Index.unique i then " UNIQUE" else "" in
let if_not_exists = if_not_exists_ext if_not_exists in
let name = Table.Index.get_name ~table_name:(Table.name t) i in
let name = sqlid_in_schema ?schema name in
let table_name = sqlid_in_schema ?schema (Table.name t) in
let cols = Table.Index.cols i in
let sql =
pp_strf "@[<v2>CREATE%s INDEX%s %s ON %s @[<1>(%a)@];@]"
unique if_not_exists name table_name
(Format.pp_print_list ~pp_sep:pp_comma pp_index_col) cols
in
Rel_sql.Stmt.(func sql @@ unit)
let drop_table ?schema ?if_exists t =
let if_exists = if_exists_ext if_exists in
let name = sqlid_in_schema ?schema (Table.name t) in
let sql = strf "DROP TABLE%s %s;" if_exists name in
Rel_sql.Stmt.(func sql @@ unit)
let drop_index ?schema ?if_exists t i =
let if_exists = if_exists_ext if_exists in
let name = Table.Index.get_name ~table_name:(Table.name t) i in
let name = sqlid_in_schema ?schema name in
let sql = strf "DROP INDEX%s %s;" if_exists name in
Rel_sql.Stmt.(func sql @@ unit)
let insert_or_action = function
| `Abort -> " OR ABORT" | `Fail -> " OR FAIL" | `Ignore -> " OR IGNORE"
| `Replace -> " OR REPLACE" | `Rollback -> " OR ROLLBACK"
let insert_into ?or_action ?schema ?(ignore = []) t =
let ignore c =
List.exists (fun (Rel.Col.V i) -> Rel.Col.equal_name i c) ignore
in
let rec loop :
type r a.
(r, a) Rel.Row.Private.prod' ->
r Rel.Col.v list * (r -> unit Rel_sql.Stmt.t) Rel_sql.Stmt.func
= function
| Unit _ -> [], Rel_sql.Stmt.nop (Rel_sql.Stmt.ret_rev Rel.Row.empty)
| Prod (r, c) ->
let ns, f = loop r in
if ignore c then ns, f else (Rel.Col.V c :: ns, Rel_sql.Stmt.col c f)
| Cat (r, proj', row) -> failwith "TODO"
in
let cs, f = loop (Rel.Row.Private.prod_of_prod (Rel.Table.row t)) in
let cs = List.rev cs in
let vars = List.mapi (fun i _ -> "?" ^ string_of_int (i + 1)) cs in
let or_action = Option.fold ~none:"" ~some:insert_or_action or_action in
let sql =
let pp_vars ppf vs =
Format.pp_open_hbox ppf ();
Format.pp_print_list ~pp_sep:pp_comma Format.pp_print_string ppf vs;
Format.pp_close_box ppf ()
in
let name = sqlid_in_schema ?schema (Rel.Table.name t) in
pp_strf "@[<v>INSERT%s INTO %s (@[<v>%a@])@,VALUES (%a)@]"
or_action name pp_col_names cs pp_vars vars
in
Rel_sql.Stmt.func sql f
(* Schema alterations, see *)
let new_columns cs =
let add_new_col acc = function
| Table.Add_column_after (c, _) -> c :: acc | _ -> acc
in
List.fold_left add_new_col [] cs
let stmt fmt =
Format.kasprintf (fun sql -> Rel_sql.Stmt.(func sql unit)) fmt
let table_changes_stmts ?schema acc t cs =
let tmp = Table.with_name t ("_rel_" ^ Table.name t) in
let t_id = sqlid (Table.name t) in
let t_sid = sqlid_in_schema ?schema (Table.name t) in
let tmp_sid = sqlid_in_schema ?schema (Table.name tmp) in
let acc = stmt
"-- Alter table %s\nPRAGMA foreign_keys = OFF;" (Table.name t) ::
acc
in
let acc = create_table ?schema tmp :: acc in
let acc =
let cols = Table.cols ~ignore:(new_columns cs) t in
stmt
"@[<v>INSERT INTO %s (@[%a@])@, SELECT @[%a@]@, FROM %s WHERE true;@]"
tmp_sid pp_col_names cols pp_col_names cols t_sid :: acc
in
let acc = stmt "DROP TABLE %s;" t_sid :: acc in
let acc = stmt "ALTER TABLE %s RENAME TO %s;" tmp_sid t_id :: acc in
let acc =
let add acc i = create_index ?schema t i :: acc in
List.fold_left add acc (Table.indices t)
in
let acc =
let schema = match schema with None -> "" | Some i -> sqlid i ^ "." in
stmt "PRAGMA %sforeign_key_check (%s);" schema t_id ::
stmt "PRAGMA %sintegrity_check (%s);" schema t_id :: acc
in
(* problem: client maybe had it off *)
stmt "PRAGMA foreign_keys = ON;\n" :: acc
let schema_changes ?schema (cs : Schema.change list) =
let add acc = function
| Schema.Alter_table (t, cs) -> table_changes_stmts ?schema acc t cs
| Create_table t ->
let is = List.map (create_index ?schema t) (Table.indices t) in
List.rev_append is (create_table ?schema t :: acc)
| Drop_table t -> stmt "DROP TABLE %s;" (sqlid_in_schema ?schema t) :: acc
| Rename_column (t, (src, dst)) ->
let t = sqlid_in_schema ?schema t in
stmt "ALTER TABLE %s RENAME COLUMN %s TO %s;" t src dst :: acc
| Rename_table (src, dst) ->
let src = sqlid_in_schema ?schema src in
stmt "ALTER TABLE %s RENAME TO %s;" src (sqlid dst) :: acc
in
let stmts = List.fold_left add [] cs in
List.rev stmts
end
let dialect = (module Dialect : Rel_sql.DIALECT)
(* Schema derivation *)
let string_subrange ?(first = 0) ?last s =
let max = String.length s - 1 in
let last = match last with
| None -> max
| Some l when l > max -> max
| Some l -> l
in
let first = if first < 0 then 0 else first in
if first > last then "" else String.sub s first (last - first + 1)
let ( let* ) = Result.bind
let never _ = assert false
let dummy_col name = Col.V (Col.v name Type.Int never)
let err_col tname cname fmt = strf ("Column %s.%s: " ^^ fmt) tname cname
let err_ignoring_default tname cname fmt =
err_col tname cname ("ignoring default: " ^^ fmt)
let err_null_not_null tname cname =
err_ignoring_default tname cname "NULL default on NOT NULL column"
let col_type tname cname not_null default type' =
let some c = Some (Col.V c) in
let parse_default type' s =
if s = "" then None else
match Dialect.const_of_literal type' s with
| Error _ -> Some (`Expr s) | Ok v -> Some (`Value v)
in
match not_null with
| true ->
let default = parse_default type' default in
some (Col.v ?default cname type' never)
| false ->
let type' = Type.(Option type') in
let default = parse_default type' default in
some (Col.v ?default cname type' never)
let col_spec tname cname type' not_null default issues =
match String.uppercase_ascii type' with
| "BOOL" | "BOOLEAN" ->
col_type tname cname not_null default Type.Bool, issues
| "INT" | "INTEGER" | "TINYINT" | "SMALLINT" | "MEDIUMINT" |"INT2" | "INT8" ->
col_type tname cname not_null default Type.Int, issues
| "BIGINT" | "UNSIGNED BIG INT" ->
col_type tname cname not_null default Type.Int64, issues
| "REAL" | "DOUBLE" | "DOUBLE PRECISION" | "FLOAT" | "NUMERIC" ->
col_type tname cname not_null default Type.Float, issues
| "TEXT" | "CLOB" ->
col_type tname cname not_null default Type.Text, issues
| "BLOB" | "" ->
col_type tname cname not_null default Type.Blob, issues
| "DATETIME" | "DATE" ->
col_type tname cname not_null default Type.Float, issues
| s ->
let err_drop s =
err_col tname cname "dropping : cannot parse type '%s'" type'
in
match String.index s '(' with
| exception Not_found -> None, (err_drop s :: issues)
| i ->
match string_subrange ~last:(i - 1) s with
| "CHARACTER" | "VARCHAR" | "VARYING CHARACTER"
| "NCHAR" | "NATIVE CHARACTER" |"NVARCHAR" ->
col_type tname cname not_null default Type.Text, issues
| "DECIMAL" | "NUMERIC" ->
col_type tname cname not_null default Type.Float, issues
| _ -> None, (err_drop s :: issues)
let table_cols db name issues =
let rec cols pk cs issues = function
| [] ->
let pk = match List.map snd (List.sort compare pk) with
| [] -> None | cols -> Some cols
in
Ok (cs, pk, issues)
| (_order, cname, type', not_null, default, pk_index) :: specs ->
let c, issues = col_spec name cname type' not_null default issues in
match c with
| None -> cols pk cs issues specs
| Some c ->
let pk = if Int.equal pk_index 0 then pk else (pk_index, c) :: pk in
cols pk (c :: cs) issues specs
in
let stmt =
let sql = "SELECT * FROM pragma_table_info (?)" in
let spec = Rel.Row.(t6 (int "cid") (text "name") (text "type")
(bool "notnull") (text "dflt_value") (int "pk")) in
Rel_sql.Stmt.(func sql (text @-> (ret spec)))
in
let* specs = fold db (stmt name) List.cons [] in
cols [] [] issues specs
let table_foreign_keys db name issues =
let fk_action tname id when' issues s = match String.uppercase_ascii s with
| "" | "NO ACTION" -> None, issues
| "CASCADE" -> Some (`Cascade), issues
| "SET DEFAULT" -> Some (`Set_default), issues
| "SET NULL" -> Some (`Set_null), issues
| "RESTRICT" -> Some (`Restrict), issues
| act ->
let e =
strf "Table %s: foreign key %d: %s: dropping unkown action %S"
tname id when' act
in
None, (e :: issues)
in
let rec fks acc issues = function
| [] -> Ok (List.rev acc, issues)
| (id, _seq, table, from, to', on_update, on_delete, _match') :: l ->
let rec get_cols child parent = function
| (id', _, _, from, to', _, _, _) :: l when Int.equal id id' ->
get_cols (dummy_col from :: child) (dummy_col to' :: parent) l
| l -> List.rev child, List.rev parent, l
in
let child, parent, l = get_cols [dummy_col from] [dummy_col to'] l in
let on_update, issues = fk_action name id "ON UPDATE" issues on_update in
let on_delete, issues = fk_action name id "ON DELETE" issues on_delete in
let fk =
let parent = match table = name with
| true -> Table.Foreign_key.Parent (`Self, parent)
| false ->
Table.Foreign_key.Parent (`Table (Table.v table Row.empty), parent)
in
Table.Foreign_key.v ?on_delete ?on_update ~cols:child ~parent:parent ()
in
fks (fk :: acc) issues l
in
let stmt =
let sql = "SELECT * FROM pragma_foreign_key_list (?) ORDER BY id, seq;" in
let row id seq table from to' on_update on_delete match' =
(id, seq, table, from, to', on_update, on_delete, match')
in
let fk_part =
Rel.Row.(unit row * (int "id") * (int "seq") * (text "table") *
(text "from") * (text "to") * (text "on_update") *
(text "on_delete") * (text "match")) in
Rel_sql.Stmt.(func sql (text @-> (ret fk_part)))
in
let* fk_parts = fold db (stmt name) List.cons [] in
fks [] issues fk_parts (* No List.rev, seems ids are in rev source order. *)
let index_cols db name =
let col (_, _, name) = dummy_col name in
let stmt =
let sql = "SELECT * FROM pragma_index_info (?) ORDER BY seqno" in
let icol = Rel.Row.(t3 (int "seqno") (int "cid") (text "name")) in
Rel_sql.Stmt.(func sql (text @-> (ret icol)))
in
let* cols = fold db (stmt name) List.cons [] in
Ok (List.rev_map col cols)
let table_indices db tname =
let rec indices is us = function
| [] -> Ok (List.rev is, List.rev us)
| (_seq, name, unique, origin, _partial) :: specs ->
let* cols = index_cols db name in
let name =
if name = Rel.Table.Index.auto_name ~table_name:tname cols
then None
else Some name
in
match origin with
| "c" -> indices (Rel.Table.index ?name ~unique cols :: is) us specs
| "u" -> indices is (Rel.Table.unique_key cols :: us) specs
| _ -> indices is us specs
in
let stmt =
let sql = "SELECT * FROM pragma_index_list (?) ORDER BY seq" in
let spec =
Rel.Row.(t5 (int "seq") (text "name") (bool "unique")
(text "origin") (bool "partial"))
in
Rel_sql.Stmt.(func sql (text @-> (ret spec)))
in
let* specs = fold db (stmt tname) List.cons [] in
indices [] [] specs (* No List.rev, seems ids are in rev source order. *)
let table db name issues =
let* cols, primary_key, issues = table_cols db name issues in
let row = Rel.Row.Private.row_of_cols cols in
let* indices, unique_keys = table_indices db name in
let* foreign_keys, issues = table_foreign_keys db name issues in
Ok (Rel.Table.v name row ?primary_key ~unique_keys ~foreign_keys ~indices,
issues)
let rec tables db ts issues = function
| [] -> Ok (List.rev ts, List.rev issues)
| (name, _sql) :: names ->
let* t, issues = table db name issues in
tables db ((Table.V t) :: ts) issues names
let table_names db =
let stmt =
let sql = "SELECT t.name, t.sql FROM sqlite_master AS t \
WHERE t.type = 'table' AND t.name NOT LIKE 'sqlite_%'" in
let cols = Rel.Row.(t2 (text "name") (text "sql")) in
Rel_sql.Stmt.(func sql (ret cols))
in
let* names = fold db stmt List.cons [] in
Ok (List.rev names)
let schema_of_db ?schema:name db =
let* names = table_names db in
let* tables, issues = tables db [] [] names in
Ok (Rel.Schema.v ?name ~tables (), issues)
---------------------------------------------------------------------------
Copyright ( c ) 2020 The rel programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2020 The rel programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/rel/004a5582fe88c2dead0ddaf7a6adbe8da5956bde/src/rel_sqlite3.ml | ocaml | Circular doubly linked list
None is for the root.
on root this points to last element.
Key-value map with access to lru binding.
Errors, note that our open' sets the connection to always return extended
error code.
Database connection
Boxed pointer to sqlite3 struct
Errors
Result codes
Errors
See
Low-level statement interface.
These functions throw exceptions.
N.B. we need to reset otherwise things like VACUUM think queries
are still going on.
SQLITE_ROW
SQLITE_ROW
SQLITE_{ROW,DONE}
Database connection
SQL execution
Maybe we should skip the cache.
XXX skip the cache ?
Statements
SQL
Data definition statements
not null
Schema alterations, see
problem: client maybe had it off
Schema derivation
No List.rev, seems ids are in rev source order.
No List.rev, seems ids are in rev source order. | ---------------------------------------------------------------------------
Copyright ( c ) 2020 The rel programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2020 The rel programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open Rel
module Clist = struct
type 'a t =
on root this points to the first element .
let root () = let rec root = { v = None; next = root; prev = root } in root
let make_first root n =
n.next.prev <- n.prev; n.prev.next <- n.next;
n.next <- root.next; n.prev <- root;
root.next.prev <- n; root.next <- n
let add_first root d =
let n = { v = Some d; prev = root; next = root.next } in
root.next.prev <- n; root.next <- n; n
let drop_last root =
let last = root.prev in
root.prev <- last.prev; last.prev.next <- root; last.v
end
module Lru_map = struct
type ('a, 'b) t =
{ map : ('a, ('a * 'b) Clist.t) Hashtbl.t;
root , last is lru , next is mru .
let create ?random size =
{ map = Hashtbl.create ?random size; recent = Clist.root () }
let[@inline] get_value n = snd (Option.get n.Clist.v)
let length c = Hashtbl.length c.map
let find k c = match Hashtbl.find_opt c.map k with
| None -> None
| Some n -> Clist.make_first c.recent n; Some (get_value n)
let add k v c = match Hashtbl.find_opt c.map k with
| Some n -> n.v <- Some (k, v); Clist.make_first c.recent n
| None -> let n = Clist.add_first c.recent (k, v) in Hashtbl.replace c.map k n
let lru c = c.recent.prev.Clist.v
let drop_lru c = match Clist.drop_last c.recent with
| None -> None | Some (k, _) as v -> Hashtbl.remove c.map k; v
let iter f c = Hashtbl.iter (fun k n -> f k (get_value n)) c.map
end
Thin bindings to SQLite3
module Tsqlite3 = struct
external version_number : unit -> int = "ocaml_rel_sqlite3_version_number"
let version () =
let v = version_number () and s = string_of_int in
let mmaj = 1000000 and mmin = 1000 in
let maj = v / mmaj and min = (v mod mmaj) / mmin in
let patch = (v mod mmaj) mod mmin in
String.concat "." [s maj; s min; s patch]
N.B. sqlite defines these as int32 but they are small
so that should work on 32 - bit platforms too .
so that should work on 32-bit platforms too. *)
external errstr : rc -> string = "ocaml_rel_sqlite3_errstr"
type mode = Read | Read_write | Read_write_create | Memory
type mutex = No | Full
external _open' :
string -> uri:bool -> mode:mode -> mutex:mutex -> vfs:string ->
(t, rc) result = "ocaml_rel_sqlite3_open"
let open'
?(vfs = "") ?(uri = true) ?(mutex = Full) ?(mode = Read_write_create) f
=
_open' ~vfs ~uri ~mode ~mutex f
external close : t -> rc = "ocaml_rel_sqlite3_close"
external extended_errcode : t -> int = "ocaml_rel_sqlite3_extended_errcode"
external errmsg : t -> string = "ocaml_rel_sqlite3_errmsg"
external busy_timeout : t -> int -> rc = "ocaml_rel_sqlite3_busy_timeout"
external changes : t -> int = "ocaml_rel_sqlite3_changes"
external last_insert_rowid : t -> int64 =
"ocaml_rel_sqlite3_last_insert_rowid"
Queries
external exec : t -> string -> rc = "ocaml_rel_sqlite3_exec"
Pepared statements
Boxed pointer to sqlite3_stmt struct
external stmt_errmsg : stmt -> string =
"ocaml_rel_sqlite3_stmt_errmsg"
external prepare : t -> string -> (stmt, rc) result =
"ocaml_rel_sqlite3_prepare"
external finalize : stmt -> rc =
"ocaml_rel_sqlite3_finalize"
external reset : stmt -> rc =
"ocaml_rel_sqlite3_reset"
external step : stmt -> rc =
"ocaml_rel_sqlite3_step"
external column_count : stmt -> int =
"ocaml_rel_sqlite3_column_count"
external bind_parameter_count : stmt -> int =
"ocaml_rel_sqlite3_bind_paramater_count"
external bind_null : stmt -> int -> rc =
"ocaml_rel_sqlite3_bind_null"
external bind_bool : stmt -> int -> bool -> rc =
"ocaml_rel_sqlite3_bind_bool"
external bind_int : stmt -> int -> int -> rc =
"ocaml_rel_sqlite3_bind_int"
external bind_int64 : stmt -> int -> int64 -> rc =
"ocaml_rel_sqlite3_bind_int64"
external bind_double : stmt -> int -> float -> rc =
"ocaml_rel_sqlite3_bind_double"
external bind_text : stmt -> int -> string -> rc =
"ocaml_rel_sqlite3_bind_text"
external bind_blob : stmt -> int -> string -> rc =
"ocaml_rel_sqlite3_bind_blob"
external clear_bindings : stmt -> rc =
"ocaml_rel_sqlite3_clear_bindings"
external column_is_null : stmt -> int -> bool =
"ocaml_rel_sqlite3_column_is_null"
external column_bool : stmt -> int -> bool =
"ocaml_rel_sqlite3_column_bool"
external column_int : stmt -> int -> int =
"ocaml_rel_sqlite3_column_int"
external column_int64 : stmt -> int -> int64 =
"ocaml_rel_sqlite3_column_int64"
external column_double : stmt -> int -> float =
"ocaml_rel_sqlite3_column_double"
external column_text : stmt -> int -> string =
"ocaml_rel_sqlite3_column_text"
external column_blob : stmt -> int -> string =
"ocaml_rel_sqlite3_column_blob"
end
module Error = struct
type code = Tsqlite3.rc
let code_to_string = Tsqlite3.errstr
type t = { code : code; message : string }
let v code message = { code; message }
let code e = e.code
let message e = e.message
let abort_rollback = 516
let busy_recovery = 261
let busy_snapshot = 517
let busy_timeout = 773
let cantopen_convpath = 1038
let cantopen_dirtywal = 1294
let cantopen_fullpath = 782
let cantopen_isdir = 526
let cantopen_notempdir = 270
let cantopen_symlink = 1550
let constraint_check = 275
let constraint_commithook = 531
let constraint_foreignkey = 787
let constraint_function = 1043
let constraint_notnull = 1299
let constraint_pinned = 2835
let constraint_primarykey = 1555
let constraint_rowid = 2579
let constraint_trigger = 1811
let constraint_unique = 2067
let constraint_vtab = 2323
let corrupt_index = 779
let corrupt_sequence = 523
let corrupt_vtab = 267
let error_missing_collseq = 257
let error_retry = 513
let error_snapshot = 769
let ioerr_access = 3338
let ioerr_auth = 7178
let ioerr_begin_atomic = 7434
let ioerr_blocked = 2826
let ioerr_checkreservedlock = 3594
let ioerr_close = 4106
let ioerr_commit_atomic = 7690
let ioerr_convpath = 6666
let ioerr_data = 8202
let ioerr_delete = 2570
let ioerr_delete_noent = 5898
let ioerr_dir_close = 4362
let ioerr_dir_fsync = 1290
let ioerr_fstat = 1802
let ioerr_fsync = 1034
let ioerr_gettemppath = 6410
let ioerr_lock = 3850
let ioerr_mmap = 6154
let ioerr_nomem = 3082
let ioerr_rdlock = 2314
let ioerr_read = 266
let ioerr_rollback_atomic = 7946
let ioerr_seek = 5642
let ioerr_shmlock = 5130
let ioerr_shmmap = 5386
let ioerr_shmopen = 4618
let ioerr_shmsize = 4874
let ioerr_short_read = 522
let ioerr_truncate = 1546
let ioerr_unlock = 2058
let ioerr_vnode = 6922
let ioerr_write = 778
let locked_sharedcache = 262
let locked_vtab = 518
let notice_recover_rollback = 539
let notice_recover_wal = 283
let ok_load_permanently = 256
let readonly_cantinit = 1288
let readonly_cantlock = 520
let readonly_dbmoved = 1032
let readonly_directory = 1544
let readonly_recovery = 264
let readonly_rollback = 776
let warning_autoindex = 284
end
type error = Error.t
let string_error r = Result.map_error Error.message r
let db_error rc db = Error.v rc (Tsqlite3.errmsg db)
let strf = Printf.sprintf
Library configuration and information .
let version = Tsqlite3.version
module Stmt' = struct
let stmt_error rc st = Error.v rc (Tsqlite3.stmt_errmsg st)
let stmt_error_mismatch ~expected:e ~given:g =
let msg = strf "SQL statement has %d variables, only %d were given." e g in
Error.v 1 msg
let stmt_error_var idx rc st =
let msg = strf "var %d: %s" idx (Tsqlite3.stmt_errmsg st) in
Error.v rc msg
let stmt_error_var_encode idx typ err =
let msg = strf "var %d encode %s: %s" idx typ err in
Error.v 1 msg
let col_error_decode idx typ err =
let msg = strf "column %d decode %s: %s" idx typ err in
Error.v 1 msg
exception Error of Error.t
let error err = raise (Error err)
type t =
{ stmt : Tsqlite3.stmt;
col_count : int;
mutable finalized : bool; }
type 'r step = t * 'r Rel_sql.Stmt.t
let validate s = if s.finalized then invalid_arg "finalized statement" else ()
let finalize s = match Tsqlite3.finalize s.stmt with
| 0 -> s.finalized <- true | rc -> error (stmt_error rc s.stmt)
let finalize_noerr s = try finalize s with Failure _ -> ()
let prepare db sql = match Tsqlite3.prepare db sql with
| Error rc -> error (db_error rc db)
| Ok stmt ->
let col_count = Tsqlite3.column_count stmt in
let finalized = false in
{ stmt; col_count; finalized }
let rec bind_arg st idx (Rel_sql.Stmt.Arg (t, v)) = match t with
| Type.Bool -> Tsqlite3.bind_bool st idx v
| Type.Int -> Tsqlite3.bind_int st idx v
| Type.Int64 -> Tsqlite3.bind_int64 st idx v
| Type.Float -> Tsqlite3.bind_double st idx v
| Type.Text -> Tsqlite3.bind_text st idx v
| Type.Blob -> Tsqlite3.bind_blob st idx v
| Type.Option t ->
(match v with
| None -> Tsqlite3.bind_null st idx
| Some v -> bind_arg st idx (Rel_sql.Stmt.Arg (t, v)))
| Type.Coded c ->
(match Type.Coded.enc c v with
| Ok v -> bind_arg st idx (Rel_sql.Stmt.Arg (Type.Coded.repr c, v))
| Error e -> error (stmt_error_var_encode idx (Type.Coded.name c) e))
| _ -> Type.invalid_unknown ()
let bind_args st args =
let rec loop idx st = function
| [] ->
let expected = Tsqlite3.bind_parameter_count st in
let given = idx - 1 in
if expected = given then () else
error (stmt_error_mismatch ~expected ~given)
| arg :: args ->
match bind_arg st idx arg with
| 0 -> loop (idx + 1) st args
| rc -> error (stmt_error_var idx rc st)
in
loop 1 st args
let bind s st =
validate s;
match Tsqlite3.reset s.stmt with
| 0 -> bind_args s.stmt (List.rev (Rel_sql.Stmt.rev_args st))
| rc -> error (stmt_error rc s.stmt)
let rec unpack_col_type : type r c. Tsqlite3.stmt -> int -> c Type.t -> c =
fun s i t -> match t with
| Type.Bool -> Tsqlite3.column_bool s i
| Type.Int -> Tsqlite3.column_int s i
| Type.Int64 -> Tsqlite3.column_int64 s i
| Type.Float -> Tsqlite3.column_double s i
| Type.Text -> Tsqlite3.column_text s i
| Type.Blob -> Tsqlite3.column_blob s i
| Type.Option t ->
if Tsqlite3.column_is_null s i then None else Some (unpack_col_type s i t)
| Type.Coded c ->
let v = unpack_col_type s i (Type.Coded.repr c) in
(match Type.Coded.dec c v with
| Ok v -> v
| Error e -> error (col_error_decode i (Type.Coded.name c) e))
| _ -> Type.invalid_unknown ()
let unpack_col : type r c. Tsqlite3.stmt -> int -> (r, c) Col.t -> c =
fun s i c -> unpack_col_type s i (Col.type' c)
let unpack_row : type r. t -> r Rel_sql.Stmt.t -> r = fun s st ->
let rec cols :
type r a. Tsqlite3.stmt -> int -> (r, a) Rel.Row.Private.prod' -> a
=
fun s idx r -> match r with
| Unit f -> f
| Prod (cs, c) ->
let f = cols s (idx - 1) cs in
f (unpack_col s idx c)
| Cat (cs, _, row) ->
let f =
cols s (idx - Row.col_count (Rel.Row.Private.prod_to_prod row)) cs
in
let v = cols s idx row in
f v
in
let row = Rel.Row.Private.prod_of_prod (Rel_sql.Stmt.result st) in
cols s.stmt (s.col_count - 1) row
let stop s =
ignore (Tsqlite3.clear_bindings s.stmt);
ignore (Tsqlite3.reset s.stmt)
let step s st = match Tsqlite3.step s.stmt with
SQLITE_DONE
| rc -> let err = stmt_error rc s.stmt in stop s; error err
let fold s st f acc =
let rec loop s st f acc = match Tsqlite3.step s.stmt with
SQLITE_DONE
| rc -> let err = stmt_error rc s.stmt in stop s; error err
in
loop s st f acc
let first s st =
let r = step s st in
stop s; r
let exec s = match Tsqlite3.step s.stmt with
| rc -> let err = stmt_error rc s.stmt in stop s; error err
end
type t =
{ db : Tsqlite3.t;
mutable stmt_cache_size : int;
mutable stmt_cache : (string, Stmt'.t) Lru_map.t;
mutable closed : bool; }
module Cache = struct
let create size = Lru_map.create ~random:true size
let clear db =
let drop _ st = Stmt'.finalize_noerr st in
Lru_map.iter drop db.stmt_cache;
db.stmt_cache <- create db.stmt_cache_size
let drop db ~count =
let rec loop db count =
if count <= 0 then () else
match Lru_map.drop_lru db.stmt_cache with
| None -> ()
| Some (_, st) -> Stmt'.finalize_noerr st; loop db (count - 1)
in
loop db count
let size db = db.stmt_cache_size
let set_size db size = db.stmt_cache_size <- size; clear db
let find db sql = Lru_map.find sql db.stmt_cache
let add db sql s =
let count = Lru_map.length db.stmt_cache - db.stmt_cache_size + 1 in
drop db ~count;
Lru_map.add sql s db.stmt_cache
let stmt db sql = match find db sql with
| Some s -> s
| None -> let s = Stmt'.prepare db.db sql in add db sql s; s
end
type mode = Tsqlite3.mode = Read | Read_write | Read_write_create | Memory
type mutex = Tsqlite3.mutex = No | Full
let[@inline] validate db =
if db.closed then invalid_arg "connection closed" else ()
let open'
?(foreign_keys = true) ?(stmt_cache_size = 10) ?vfs ?uri ?mutex ?mode f
=
match Tsqlite3.open' ?vfs ?uri ?mode ?mutex f with
| Error rc -> Error (Error.v rc (Error.code_to_string rc))
| Ok db ->
let foreign_keys = strf "PRAGMA foreign_keys = %b" foreign_keys in
let rc = Tsqlite3.exec db foreign_keys in
if rc <> 0 then Error (Error.v rc (Error.code_to_string rc)) else
let stmt_cache = Cache.create stmt_cache_size in
Ok { db; stmt_cache_size; stmt_cache; closed = false }
let close db =
validate db;
Cache.clear db;
match Tsqlite3.close db.db with 0 -> Ok () | rc -> Error (db_error rc db.db)
let busy_timeout_ms db dur =
validate db;
match Tsqlite3.busy_timeout db.db dur with
| 0 -> Ok () | rc -> Error (db_error rc db.db)
let changes db = validate db; Tsqlite3.changes db.db
let last_insert_rowid db = validate db; Tsqlite3.last_insert_rowid db.db
let stmt_cache_size = Cache.size
let set_stmt_cache_size = Cache.set_size
let clear_stmt_cache = Cache.clear
let exec_sql db sql =
validate db;
match Tsqlite3.exec db.db sql with
| 0 -> Ok () | rc -> Error (db_error rc db.db)
let fold db st f acc =
validate db;
try
let s = Cache.stmt db (Rel_sql.Stmt.src st) in
Stmt'.bind s st; Ok (Stmt'.fold s st f acc)
with
| Stmt'.Error e -> Error e
let first db st =
validate db;
try
let s = Cache.stmt db (Rel_sql.Stmt.src st) in
Stmt'.bind s st; Ok (Stmt'.first s st)
with
| Stmt'.Error e -> Error e
let exec db st =
validate db;
try
let s = Cache.stmt db (Rel_sql.Stmt.src st) in
Stmt'.bind s st; Ok (Stmt'.exec s)
with
| Stmt'.Error e -> Error e
type transaction_kind = [ `Deferred | `Immediate | `Exclusive ]
let with_transaction kind db f =
validate db;
let kind = match kind with
| `Deferred -> "DEFERRED"
| `Immediate -> "IMMEDIATE"
| `Exclusive -> "EXCLUSIVE"
in
let start () = Tsqlite3.exec db.db (strf "BEGIN %s TRANSACTION" kind) in
let commit () = Tsqlite3.exec db.db "COMMIT TRANSACTION" in
let abort_noerr () = ignore (Tsqlite3.exec db.db "ROLLBACK TRANSACTION") in
match start () with
| rc when rc <> 0 -> Error (db_error rc db.db)
| _0 ->
match f db with
| exception exn ->
let bt = Printexc.get_raw_backtrace () in
abort_noerr ();
Printexc.raise_with_backtrace exn bt
| Error _ as e ->
abort_noerr (); Ok e
| Ok _ as v ->
match commit () with
| rc when rc <> 0 -> abort_noerr (); Error (db_error rc db.db)
| _0 -> Ok v
let explain ?(query_plan = false) db st =
validate db;
try
let explain = if query_plan then "EXPLAIN QUERY PLAN " else "EXPLAIN " in
let src = explain ^ Rel_sql.Stmt.src st in
let rev_args = Rel_sql.Stmt.rev_args st in
let result = Row.(t1 (text "explanation")) in
let st = Rel_sql.Stmt.v src ~rev_args ~result in
Stmt'.bind s st;
let lines = List.rev (Stmt'.fold s st List.cons []) in
Ok (String.concat "\n" lines)
with
| Stmt'.Error e -> Error e
module Stmt = struct
type db = t
type t = Stmt'.t
type 'a step = 'a Stmt'.step
let create db sql =
validate db;
try Ok (Stmt'.prepare db.db sql) with
| Stmt'.Error e -> Error e
let start s sb = try (Stmt'.bind s sb; Ok (s, sb)) with
| Stmt'.Error e -> Error e
let step (s, st) = try Ok (Stmt'.step s st) with
| Stmt'.Error e -> Error e
let finalize s = try Ok (Stmt'.finalize s) with
| Stmt'.Error e -> Error e
end
module Dialect = struct
let kind = "sqlite3"
let sqlid = Rel_sql.Syntax.id
let sqlid_in_schema = Rel_sql.Syntax.id_in_schema
let rec insert_columns ~ignore:ign i rev_cols rev_vars rev_args cols =
let ignore c =
List.exists (fun (Rel.Col.V i) -> Rel.Col.equal_name i c) ign
in
match cols with
| [] ->
let cols = List.rev rev_cols and vars = List.rev rev_vars in
i, String.concat ", " cols, String.concat ", " vars, rev_args
| Rel.Col.Value (col, _) :: cols when ignore col ->
insert_columns ~ignore:ign i rev_cols rev_vars rev_args cols
| Rel.Col.Value (col, v) :: cols ->
let c = sqlid (Rel.Col.name col) in
let var = "?" ^ string_of_int i in
let arg = Rel_sql.Stmt.Arg (Col.type' col, v) in
insert_columns ~ignore:ign (i + 1)
(c :: rev_cols) (var :: rev_vars) (arg :: rev_args) cols
let insert_into_cols ?schema ?(ignore = []) t cols =
let table = sqlid_in_schema ?schema (Rel.Table.name t) in
let i, cols, vars, rev_args = insert_columns ~ignore 1 [] [] [] cols in
let sql = ["INSERT INTO "; table; " ("; cols; ")\nVALUES ("; vars; ")"] in
let sql = String.concat "" sql in
Rel_sql.Stmt.v sql ~rev_args ~result:Rel.Row.empty
let rec bind_columns ~sep i rev_cols rev_args = function
| [] -> i, String.concat sep (List.rev rev_cols), rev_args
| Rel.Col.Value (col, v) :: cols ->
let col_name c = sqlid (Rel.Col.name col)in
let set_col = String.concat "" [col_name col; " = ?"; string_of_int i] in
let arg = Rel_sql.Stmt.Arg (Col.type' col, v) in
bind_columns ~sep (i + 1) (set_col :: rev_cols) (arg :: rev_args) cols
let update ?schema t ~set:cols ~where =
let table = sqlid_in_schema ?schema (Rel.Table.name t) in
let i, columns, rev_args = bind_columns ~sep:", " 1 [] [] cols in
let _, where, rev_args = bind_columns ~sep:" AND " i [] rev_args where in
let sql = ["UPDATE "; table; " SET "; columns; " WHERE "; where ] in
let sql = String.concat "" sql in
Rel_sql.Stmt.v sql ~rev_args ~result:Rel.Row.empty
let delete_from ?schema t ~where =
let table = sqlid_in_schema ?schema (Rel.Table.name t) in
let _, where, rev_args = bind_columns ~sep:" AND " 1 [] [] where in
let sql = ["DELETE FROM "; table; " WHERE "; where ] in
let sql = String.concat "" sql in
Rel_sql.Stmt.v sql ~rev_args ~result:Rel.Row.empty
let ext c dir = match c with None -> "" | Some () -> dir
let if_exists_ext c = ext c " IF EXISTS"
let if_not_exists_ext c = ext c " IF NOT EXISTS"
let col_id c = sqlid (Col.name' c)
let pp_strf = Format.asprintf
let pp_comma ppf () = Format.fprintf ppf ",@ "
let pp_col_name ppf c = Format.pp_print_string ppf (col_id c)
let pp_col_names ppf cs =
(Format.pp_print_list ~pp_sep:pp_comma pp_col_name) ppf cs
let err_kind s ~kind = strf "%S: not a %s literal" s kind
let bool_to_literal = function true -> "TRUE" | false -> "FALSE"
let bool_of_literal = function
| "0" | "TRUE" -> Ok true | "1" | "FALSE" -> Ok false
| s -> Error (err_kind s ~kind:"bool")
let int_to_literal = Int.to_string
let int_of_literal s = match int_of_string_opt s with
| Some i -> Ok i | None -> Error (err_kind s ~kind:"int")
let int64_to_literal = Int64.to_string
let int64_of_literal s = match Int64.of_string_opt s with
| Some i -> Ok i | None -> Error (err_kind s ~kind:"int64")
let float_to_literal = Float.to_string
let float_of_literal s = match Float.of_string_opt s with
| Some i -> Ok i | None -> Error (err_kind s ~kind:"float")
let text_to_literal v = Rel_sql.Syntax.string_to_literal v
let text_of_literal s = Rel_sql.Syntax.string_of_literal s
let blob_to_literal s =
let lower_hex_digit n =
let n = n land 0xF in
Char.unsafe_chr (if n < 10 then 0x30 + n else 0x57 + n)
in
let rec loop max s i h k = match i > max with
| true -> Bytes.unsafe_to_string h
| false ->
let byte = Char.code s.[i] in
Bytes.set h k (lower_hex_digit (byte lsr 4));
Bytes.set h (k + 1) (lower_hex_digit byte);
loop max s (i + 1) h (k + 2)
in
let len = String.length s in
let h = Bytes.create (2 * len + 3) in
Bytes.set h 0 'x';
Bytes.set h 1 '\'';
Bytes.set h (Bytes.length h - 1) '\'';
loop (len - 1) s 0 h 2
let blob_of_literal s =
try
let hex_value s i = match s.[i] with
| '0' .. '9' as c -> Char.code c - 0x30
| 'A' .. 'F' as c -> 10 + (Char.code c - 0x41)
| 'a' .. 'f' as c -> 10 + (Char.code c - 0x61)
| _ -> failwith (strf "%S:%d: Not an ASCII hexadecimal digit" s i)
in
let len = String.length s in
let hex_len = len - 3 in
if len < 3 || not (s.[0] = 'x' || s.[0] = 'X') || s.[1] <> '\'' ||
s.[len - 1] <> '\''
then failwith (strf "%S: Not a blob literal (missing x or ')" s)
else if (hex_len mod 2) <> 0
then failwith (strf "%S:%d: Missing final hex digit" s (len - 2))
else
let rec loop max b i h k = match i > max with
| true -> Ok (Bytes.unsafe_to_string b)
| false ->
let hi = hex_value h k and lo = hex_value h (k + 1) in
Bytes.set b i (Char.chr @@ (hi lsl 4) lor lo);
loop max b (i + 1) h (k + 2)
in
let b_len = hex_len / 2 in
let b = Bytes.create b_len in
loop (b_len - 1) b 0 s 2
with Failure e -> Error e
let rec type_of_type : type a. a Type.t -> string * bool = function
N.B. if we create databases in strict mode we can no longer
distinguish between the first three types .
distinguish between the first three types. *)
| Type.Int -> "INTEGER", true
| Type.Int64 -> "BIGINT", true
| Type.Float -> "REAL", true
| Type.Text -> "TEXT", true
| Type.Blob -> "BLOB", true
| Type.Option t -> fst (type_of_type t), false
| Type.Coded c -> type_of_type (Type.Coded.repr c)
| _ -> Type.invalid_unknown ()
let rec const_of_literal : type a. a Type.t -> string -> (a, string) result =
fun t s -> match t with
| Type.Bool -> bool_of_literal s
| Type.Int -> int_of_literal s
| Type.Int64 -> int64_of_literal s
| Type.Float -> float_of_literal s
| Type.Text -> text_of_literal s
| Type.Blob -> blob_of_literal s
| Type.Option t ->
if String.uppercase_ascii s = "NULL"
then Ok None
else Result.map Option.some (const_of_literal t s)
| Type.Coded c ->
begin match const_of_literal (Type.Coded.repr c) s with
| Ok v -> Rel.Type.Coded.dec c v
| Error e -> Error (strf "%s literal: %s" (Type.Coded.name c) e)
end
| _ -> Rel.Type.invalid_unknown ()
FIXME streamline with Rel_query , this should be part of dialect .
let rec const_to_literal : type a. a Rel.Type.t -> a -> string =
fun t v -> match t with
| Type.Bool -> bool_to_literal v
| Type.Int -> int_to_literal v
| Type.Int64 -> int64_to_literal v
| Type.Float -> float_to_literal v
| Type.Text -> text_to_literal v
| Type.Blob -> blob_to_literal v
| Type.Option t ->
(match v with None -> "NULL" | Some v -> const_to_literal t v)
| Type.Coded c ->
(match Rel.Type.Coded.enc c v with
| Ok v -> const_to_literal (Rel.Type.Coded.repr c) v
| Error e ->
let name = Rel.Type.Coded.name c in
invalid_arg (strf "invalid %s constant %s" name e))
| _ -> Rel.Type.invalid_unknown ()
let col_def (Col.V col) =
let name = sqlid (Col.name col) in
let type' = Rel.Col.type' col in
let typ, not_null = type_of_type type' in
let not_null = if not_null then " NOT NULL" else "" in
let default = match Col.default col with
| None -> ""
| Some (`Expr expr) -> strf " DEFAULT (%s)" expr
| Some (`Value v) -> strf " DEFAULT %s" (const_to_literal type' v)
in
strf "%s %s%s%s" name typ not_null default
let foreign_key ?schema t fk =
let parent fk =
let name, cs = match Table.Foreign_key.parent fk with
| Parent (`Self, cs) -> Table.name t, pp_strf "@[<h>%a@]" pp_col_names cs
| Parent (`Table t, cs) ->
Table.name t, pp_strf "@[<h>%a@]" pp_col_names cs
in
let name = sqlid_in_schema ?schema name in
strf " REFERENCES %s (%s)" name cs
in
let action act a = match a with
| None -> "" | Some a ->
strf " %s %s" act (Rel_sql.Syntax.foreign_key_action_keyword a)
in
pp_strf "FOREIGN KEY (@[<h>%a@])%s%s%s"
pp_col_names (Table.Foreign_key.cols fk)
(parent fk)
(action "ON UPDATE" (Table.Foreign_key.on_update fk))
(action "ON DELETE" (Table.Foreign_key.on_delete fk))
let unique_key k =
pp_strf "UNIQUE (@[<h>%a@])" pp_col_names (Table.Unique_key.cols k)
let create_table ?schema ?if_not_exists t =
let if_not_exists = if_not_exists_ext if_not_exists in
let name = Table.name t in
let name = sqlid_in_schema ?schema name in
let cols = List.map col_def (Table.cols t) in
let uniques = List.map unique_key (Table.unique_keys t) in
let primary_key = match Table.primary_key t with
| None -> []
| Some pk -> [pp_strf "PRIMARY KEY (@[<h>%a@])" pp_col_names pk]
in
let fks = List.map (foreign_key ?schema t) (Table.foreign_keys t) in
let defs = cols @ primary_key @ uniques @ fks in
let sql =
Would be nice to create tables in STRICT mode but then we can no
longer distinguish between bool , int and int64
longer distinguish between bool, int and int64 *)
pp_strf "@[<v2>CREATE TABLE%s %s (@,%a@]@,);"
if_not_exists name
(Format.pp_print_list ~pp_sep:pp_comma Format.pp_print_string) defs
in
Rel_sql.Stmt.(func sql @@ unit)
let create_index ?schema ?if_not_exists t i =
let pp_index_col ppf c =
let name = sqlid (Col.name' c) in
Format.fprintf ppf "%s" name
let ord = match Rel_sql . Index . c with
| None - > " "
| Some o - > " " ^ Rel_sql . Index . in
Format.fprintf ppf " % s%s " name ord
let ord = match Rel_sql.Index.Col.sort_order c with
| None -> ""
| Some o -> " " ^ Rel_sql.Index.Col.sort_order_to_kwd o
in
Format.fprintf ppf "%s%s" name ord
*)
in
let unique = if Table.Index.unique i then " UNIQUE" else "" in
let if_not_exists = if_not_exists_ext if_not_exists in
let name = Table.Index.get_name ~table_name:(Table.name t) i in
let name = sqlid_in_schema ?schema name in
let table_name = sqlid_in_schema ?schema (Table.name t) in
let cols = Table.Index.cols i in
let sql =
pp_strf "@[<v2>CREATE%s INDEX%s %s ON %s @[<1>(%a)@];@]"
unique if_not_exists name table_name
(Format.pp_print_list ~pp_sep:pp_comma pp_index_col) cols
in
Rel_sql.Stmt.(func sql @@ unit)
let drop_table ?schema ?if_exists t =
let if_exists = if_exists_ext if_exists in
let name = sqlid_in_schema ?schema (Table.name t) in
let sql = strf "DROP TABLE%s %s;" if_exists name in
Rel_sql.Stmt.(func sql @@ unit)
let drop_index ?schema ?if_exists t i =
let if_exists = if_exists_ext if_exists in
let name = Table.Index.get_name ~table_name:(Table.name t) i in
let name = sqlid_in_schema ?schema name in
let sql = strf "DROP INDEX%s %s;" if_exists name in
Rel_sql.Stmt.(func sql @@ unit)
let insert_or_action = function
| `Abort -> " OR ABORT" | `Fail -> " OR FAIL" | `Ignore -> " OR IGNORE"
| `Replace -> " OR REPLACE" | `Rollback -> " OR ROLLBACK"
let insert_into ?or_action ?schema ?(ignore = []) t =
let ignore c =
List.exists (fun (Rel.Col.V i) -> Rel.Col.equal_name i c) ignore
in
let rec loop :
type r a.
(r, a) Rel.Row.Private.prod' ->
r Rel.Col.v list * (r -> unit Rel_sql.Stmt.t) Rel_sql.Stmt.func
= function
| Unit _ -> [], Rel_sql.Stmt.nop (Rel_sql.Stmt.ret_rev Rel.Row.empty)
| Prod (r, c) ->
let ns, f = loop r in
if ignore c then ns, f else (Rel.Col.V c :: ns, Rel_sql.Stmt.col c f)
| Cat (r, proj', row) -> failwith "TODO"
in
let cs, f = loop (Rel.Row.Private.prod_of_prod (Rel.Table.row t)) in
let cs = List.rev cs in
let vars = List.mapi (fun i _ -> "?" ^ string_of_int (i + 1)) cs in
let or_action = Option.fold ~none:"" ~some:insert_or_action or_action in
let sql =
let pp_vars ppf vs =
Format.pp_open_hbox ppf ();
Format.pp_print_list ~pp_sep:pp_comma Format.pp_print_string ppf vs;
Format.pp_close_box ppf ()
in
let name = sqlid_in_schema ?schema (Rel.Table.name t) in
pp_strf "@[<v>INSERT%s INTO %s (@[<v>%a@])@,VALUES (%a)@]"
or_action name pp_col_names cs pp_vars vars
in
Rel_sql.Stmt.func sql f
let new_columns cs =
let add_new_col acc = function
| Table.Add_column_after (c, _) -> c :: acc | _ -> acc
in
List.fold_left add_new_col [] cs
let stmt fmt =
Format.kasprintf (fun sql -> Rel_sql.Stmt.(func sql unit)) fmt
let table_changes_stmts ?schema acc t cs =
let tmp = Table.with_name t ("_rel_" ^ Table.name t) in
let t_id = sqlid (Table.name t) in
let t_sid = sqlid_in_schema ?schema (Table.name t) in
let tmp_sid = sqlid_in_schema ?schema (Table.name tmp) in
let acc = stmt
"-- Alter table %s\nPRAGMA foreign_keys = OFF;" (Table.name t) ::
acc
in
let acc = create_table ?schema tmp :: acc in
let acc =
let cols = Table.cols ~ignore:(new_columns cs) t in
stmt
"@[<v>INSERT INTO %s (@[%a@])@, SELECT @[%a@]@, FROM %s WHERE true;@]"
tmp_sid pp_col_names cols pp_col_names cols t_sid :: acc
in
let acc = stmt "DROP TABLE %s;" t_sid :: acc in
let acc = stmt "ALTER TABLE %s RENAME TO %s;" tmp_sid t_id :: acc in
let acc =
let add acc i = create_index ?schema t i :: acc in
List.fold_left add acc (Table.indices t)
in
let acc =
let schema = match schema with None -> "" | Some i -> sqlid i ^ "." in
stmt "PRAGMA %sforeign_key_check (%s);" schema t_id ::
stmt "PRAGMA %sintegrity_check (%s);" schema t_id :: acc
in
stmt "PRAGMA foreign_keys = ON;\n" :: acc
let schema_changes ?schema (cs : Schema.change list) =
let add acc = function
| Schema.Alter_table (t, cs) -> table_changes_stmts ?schema acc t cs
| Create_table t ->
let is = List.map (create_index ?schema t) (Table.indices t) in
List.rev_append is (create_table ?schema t :: acc)
| Drop_table t -> stmt "DROP TABLE %s;" (sqlid_in_schema ?schema t) :: acc
| Rename_column (t, (src, dst)) ->
let t = sqlid_in_schema ?schema t in
stmt "ALTER TABLE %s RENAME COLUMN %s TO %s;" t src dst :: acc
| Rename_table (src, dst) ->
let src = sqlid_in_schema ?schema src in
stmt "ALTER TABLE %s RENAME TO %s;" src (sqlid dst) :: acc
in
let stmts = List.fold_left add [] cs in
List.rev stmts
end
let dialect = (module Dialect : Rel_sql.DIALECT)
let string_subrange ?(first = 0) ?last s =
let max = String.length s - 1 in
let last = match last with
| None -> max
| Some l when l > max -> max
| Some l -> l
in
let first = if first < 0 then 0 else first in
if first > last then "" else String.sub s first (last - first + 1)
let ( let* ) = Result.bind
let never _ = assert false
let dummy_col name = Col.V (Col.v name Type.Int never)
let err_col tname cname fmt = strf ("Column %s.%s: " ^^ fmt) tname cname
let err_ignoring_default tname cname fmt =
err_col tname cname ("ignoring default: " ^^ fmt)
let err_null_not_null tname cname =
err_ignoring_default tname cname "NULL default on NOT NULL column"
let col_type tname cname not_null default type' =
let some c = Some (Col.V c) in
let parse_default type' s =
if s = "" then None else
match Dialect.const_of_literal type' s with
| Error _ -> Some (`Expr s) | Ok v -> Some (`Value v)
in
match not_null with
| true ->
let default = parse_default type' default in
some (Col.v ?default cname type' never)
| false ->
let type' = Type.(Option type') in
let default = parse_default type' default in
some (Col.v ?default cname type' never)
let col_spec tname cname type' not_null default issues =
match String.uppercase_ascii type' with
| "BOOL" | "BOOLEAN" ->
col_type tname cname not_null default Type.Bool, issues
| "INT" | "INTEGER" | "TINYINT" | "SMALLINT" | "MEDIUMINT" |"INT2" | "INT8" ->
col_type tname cname not_null default Type.Int, issues
| "BIGINT" | "UNSIGNED BIG INT" ->
col_type tname cname not_null default Type.Int64, issues
| "REAL" | "DOUBLE" | "DOUBLE PRECISION" | "FLOAT" | "NUMERIC" ->
col_type tname cname not_null default Type.Float, issues
| "TEXT" | "CLOB" ->
col_type tname cname not_null default Type.Text, issues
| "BLOB" | "" ->
col_type tname cname not_null default Type.Blob, issues
| "DATETIME" | "DATE" ->
col_type tname cname not_null default Type.Float, issues
| s ->
let err_drop s =
err_col tname cname "dropping : cannot parse type '%s'" type'
in
match String.index s '(' with
| exception Not_found -> None, (err_drop s :: issues)
| i ->
match string_subrange ~last:(i - 1) s with
| "CHARACTER" | "VARCHAR" | "VARYING CHARACTER"
| "NCHAR" | "NATIVE CHARACTER" |"NVARCHAR" ->
col_type tname cname not_null default Type.Text, issues
| "DECIMAL" | "NUMERIC" ->
col_type tname cname not_null default Type.Float, issues
| _ -> None, (err_drop s :: issues)
let table_cols db name issues =
let rec cols pk cs issues = function
| [] ->
let pk = match List.map snd (List.sort compare pk) with
| [] -> None | cols -> Some cols
in
Ok (cs, pk, issues)
| (_order, cname, type', not_null, default, pk_index) :: specs ->
let c, issues = col_spec name cname type' not_null default issues in
match c with
| None -> cols pk cs issues specs
| Some c ->
let pk = if Int.equal pk_index 0 then pk else (pk_index, c) :: pk in
cols pk (c :: cs) issues specs
in
let stmt =
let sql = "SELECT * FROM pragma_table_info (?)" in
let spec = Rel.Row.(t6 (int "cid") (text "name") (text "type")
(bool "notnull") (text "dflt_value") (int "pk")) in
Rel_sql.Stmt.(func sql (text @-> (ret spec)))
in
let* specs = fold db (stmt name) List.cons [] in
cols [] [] issues specs
let table_foreign_keys db name issues =
let fk_action tname id when' issues s = match String.uppercase_ascii s with
| "" | "NO ACTION" -> None, issues
| "CASCADE" -> Some (`Cascade), issues
| "SET DEFAULT" -> Some (`Set_default), issues
| "SET NULL" -> Some (`Set_null), issues
| "RESTRICT" -> Some (`Restrict), issues
| act ->
let e =
strf "Table %s: foreign key %d: %s: dropping unkown action %S"
tname id when' act
in
None, (e :: issues)
in
let rec fks acc issues = function
| [] -> Ok (List.rev acc, issues)
| (id, _seq, table, from, to', on_update, on_delete, _match') :: l ->
let rec get_cols child parent = function
| (id', _, _, from, to', _, _, _) :: l when Int.equal id id' ->
get_cols (dummy_col from :: child) (dummy_col to' :: parent) l
| l -> List.rev child, List.rev parent, l
in
let child, parent, l = get_cols [dummy_col from] [dummy_col to'] l in
let on_update, issues = fk_action name id "ON UPDATE" issues on_update in
let on_delete, issues = fk_action name id "ON DELETE" issues on_delete in
let fk =
let parent = match table = name with
| true -> Table.Foreign_key.Parent (`Self, parent)
| false ->
Table.Foreign_key.Parent (`Table (Table.v table Row.empty), parent)
in
Table.Foreign_key.v ?on_delete ?on_update ~cols:child ~parent:parent ()
in
fks (fk :: acc) issues l
in
let stmt =
let sql = "SELECT * FROM pragma_foreign_key_list (?) ORDER BY id, seq;" in
let row id seq table from to' on_update on_delete match' =
(id, seq, table, from, to', on_update, on_delete, match')
in
let fk_part =
Rel.Row.(unit row * (int "id") * (int "seq") * (text "table") *
(text "from") * (text "to") * (text "on_update") *
(text "on_delete") * (text "match")) in
Rel_sql.Stmt.(func sql (text @-> (ret fk_part)))
in
let* fk_parts = fold db (stmt name) List.cons [] in
let index_cols db name =
let col (_, _, name) = dummy_col name in
let stmt =
let sql = "SELECT * FROM pragma_index_info (?) ORDER BY seqno" in
let icol = Rel.Row.(t3 (int "seqno") (int "cid") (text "name")) in
Rel_sql.Stmt.(func sql (text @-> (ret icol)))
in
let* cols = fold db (stmt name) List.cons [] in
Ok (List.rev_map col cols)
let table_indices db tname =
let rec indices is us = function
| [] -> Ok (List.rev is, List.rev us)
| (_seq, name, unique, origin, _partial) :: specs ->
let* cols = index_cols db name in
let name =
if name = Rel.Table.Index.auto_name ~table_name:tname cols
then None
else Some name
in
match origin with
| "c" -> indices (Rel.Table.index ?name ~unique cols :: is) us specs
| "u" -> indices is (Rel.Table.unique_key cols :: us) specs
| _ -> indices is us specs
in
let stmt =
let sql = "SELECT * FROM pragma_index_list (?) ORDER BY seq" in
let spec =
Rel.Row.(t5 (int "seq") (text "name") (bool "unique")
(text "origin") (bool "partial"))
in
Rel_sql.Stmt.(func sql (text @-> (ret spec)))
in
let* specs = fold db (stmt tname) List.cons [] in
let table db name issues =
let* cols, primary_key, issues = table_cols db name issues in
let row = Rel.Row.Private.row_of_cols cols in
let* indices, unique_keys = table_indices db name in
let* foreign_keys, issues = table_foreign_keys db name issues in
Ok (Rel.Table.v name row ?primary_key ~unique_keys ~foreign_keys ~indices,
issues)
let rec tables db ts issues = function
| [] -> Ok (List.rev ts, List.rev issues)
| (name, _sql) :: names ->
let* t, issues = table db name issues in
tables db ((Table.V t) :: ts) issues names
let table_names db =
let stmt =
let sql = "SELECT t.name, t.sql FROM sqlite_master AS t \
WHERE t.type = 'table' AND t.name NOT LIKE 'sqlite_%'" in
let cols = Rel.Row.(t2 (text "name") (text "sql")) in
Rel_sql.Stmt.(func sql (ret cols))
in
let* names = fold db stmt List.cons [] in
Ok (List.rev names)
let schema_of_db ?schema:name db =
let* names = table_names db in
let* tables, issues = tables db [] [] names in
Ok (Rel.Schema.v ?name ~tables (), issues)
---------------------------------------------------------------------------
Copyright ( c ) 2020 The rel programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2020 The rel programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
b31a23ea49973781369c1c9b27e0c14a6ad0ab8e310b97245e8bb07b34d3594d | gregtatcam/imaplet-lwt | account.ml |
* Copyright ( c ) 2013 - 2014 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2014 Gregory Tsipenyuk <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Lwt
open Imaplet_types
open Utils
type acct_config = {
type of
storage , irmin / maildir / workdir / supported
storage, irmin/maildir/workdir/gitl supported *)
acct_encrypt : bool; (* encrypt messages, default true *)
acct_compress : bool; (* compress messages, but not attachments, default true *)
acct_compress_attach : bool; (* compress attachments, default false *)
acct_compress_repo : int option; (* compress repo, default None *)
acct_auth_required: bool; (* require user authentication, priv key encrypted with password, default true *)
parse message into MIME parts when in maildir storage format , default true
single - store attachments in irmin and workdir format , default true
hybrid of irmin and workdir store ( store should be set to irmin , default false
}
(**
CAPABILITY IMAP4rev1 LITERAL+ SASL-IR LOGIN-REFERRALS ID ENABLE IDLE SORT
SORT=DISPLAY THREAD=REFERENCES THREAD=REFS THREAD=ORDEREDSUBJECT MULTIAPPEND
URL-PARTIAL CATENATE UNSELECT CHILDREN NAMESPACE UIDPLUS LIST-EXTENDED
I18NLEVEL=1 CONDSTORE QRESYNC ESEARCH ESORT SEARCHRES WITHIN CONTEXT=SEARCH
LIST-STATUS SPECIAL-USE BINARY MOVE
**)
(** users file:
* dovecot:{PLAIN}dovecot:/Users/dovecot:/var/mail/dovecot
**)
let get_bool v n =
if n >= (String.length v) then
false
else
match String.get v n with
| 't' -> true
| _ -> false
let get_level v =
if String.length v = 3 then (
match String.get v 2 with
| '-' -> None
| x -> Some (int_of_char x - 48)
) else
None
exception InvalidStoreType
let get_store = function
| "irmin" -> `Irmin
| "workdir" -> `Workdir
| "maildir" -> `Maildir
| "mailbox" -> `Mailbox
| "gitl" -> `Gitl
| _ -> raise InvalidStoreType
let get_config buff =
if Str.string_match (Str.regexp
".*:\\(gitl\\|irmin\\|workdir\\|maildir\\|mailbox\\):\\(a[tf]\\):\\(e[tf]\\):\\(c[tf][tf][-0-9]?\\):\\(s[tf]\\):\\(h[tf]\\):\\(m[tf]\\)$") buff 0 then (
Some {acct_data_store = get_store (Str.matched_group 1 buff);
acct_auth_required = get_bool (Str.matched_group 2 buff) 1;
acct_encrypt = get_bool (Str.matched_group 3 buff) 1;
acct_compress = get_bool (Str.matched_group 4 buff) 1;
acct_compress_attach = get_bool (Str.matched_group 4 buff) 2;
acct_compress_repo = get_level (Str.matched_group 4 buff);
acct_single_store = get_bool (Str.matched_group 5 buff) 1;
acct_hybrid = get_bool (Str.matched_group 6 buff) 1;
acct_maildir_parse = get_bool (Str.matched_group 7 buff) 1;}
) else
None
let parse_users buff user password =
try
let _ = Str.search_forward (Str.regexp
"^\\([^:]+\\):{\\([^}]+\\)}\\([^:]+\\):") buff 0 in
let u = Str.matched_group 1 buff in
let p = Str.matched_group 3 buff in
let t = Str.matched_group 2 buff in
let p =
if t = "PLAIN" then
p = password
else if t = "SHA1" then
p = (Imap_crypto.get_hash ~hash:`Sha1 password)
else if t = "SHA256" then
p = (Imap_crypto.get_hash ~hash:`Sha256 password)
else
false
in
if u = user && p then (
(true,get_config buff)
) else
(false,None)
with _ ->
(false,None)
let b64decode b64 =
let = Str.global_replace ( Str.regexp " = $ " ) " " b64 in
match Nocrypto.Base64.decode (Cstruct.of_string b64) with
| Some buff -> Cstruct.to_string buff
| None -> assert(false);;
let parse_user_b64 b64 =
let buff = b64decode b64 in (** need to log this if it fails **)
let r1 = Str.regexp "^\\([^\\]+\\)\000\\([^\\]+\\)\000\\([^\\]+\\)$" in
let r2 = Str.regexp "^\000\\([^\\]+\\)\000\\([^\\]+\\)$" in
if Str.string_match r1 buff 0 then (
let u1 = Str.matched_group 1 buff in
let u2 = Str.matched_group 2 buff in
let p = Str.matched_group 3 buff in
if u1 = u2 then
Some (u1,p)
else
None
) else if Str.string_match r2 buff 0 then (
let u = Str.matched_group 1 buff in
let p = Str.matched_group 2 buff in
Some (u,p)
) else
None
let match_user line user =
try
Str.search_forward (Str.regexp_case_fold ("^" ^ user ^ ":")) line 0 = 0
with Not_found -> false
let rec read_users r user password =
Lwt_io.read_line_opt r >>=
function
| Some res ->
if match_user res user then (
if password = None then
return (true,get_config res)
else (
let (res,config) = parse_users res user (option_value_exn password) in
return (res,config)
)
) else
read_users r user password
| None -> return (false,None)
(** have to make users configurable **)
let authenticate_user ?(b64=false) ?(users=Install.users_path) user ?password () =
let (user,password) =
if b64 = false then
(user,password)
else (
match password with
| None -> (b64decode user, None)
| Some p -> (b64decode user, Some (b64decode p))
)
in
Lwt_io.with_file ~mode:Lwt_io.Input users (fun r ->
read_users r user password) >>= fun (res,config) ->
return (user,password,res,config)
let auth_user user password resp_ok resp_no =
authenticate_user user ~password () >>= fun (_,_,res,config) ->
if res then
return (`Ok (Resp_Ok
(None,Utils.formated_capability(Configuration.auth_capability)), user, password, config))
else
return (`Error (Resp_No (None,resp_no)))
let plain_auth text =
match (parse_user_b64 text) with
| Some (u,p) ->
authenticate_user u ?password:(Some p) ()
| None -> return ("",None,false,None)
let _plain_auth text =
match (parse_user_b64 text) with
| Some (u,p) -> auth_user u p "AUTHENTICATE" "PASSWORD"
| None -> return (`Error (Resp_No (None,"PASSWORD")))
(** TBD authenticate plain against users file **)
let authenticate auth_type text =
match auth_type with
| Auth_Plain -> _plain_auth text
| _ -> return (`Error (Resp_No (None,"Authentication Type")))
(** TBD **)
let login user password = auth_user user password "LOGIN" "PASSWORD"
| null | https://raw.githubusercontent.com/gregtatcam/imaplet-lwt/d7b51253e79cffa97e98ab899ed833cd7cb44bb6/lib/commands/account.ml | ocaml | encrypt messages, default true
compress messages, but not attachments, default true
compress attachments, default false
compress repo, default None
require user authentication, priv key encrypted with password, default true
*
CAPABILITY IMAP4rev1 LITERAL+ SASL-IR LOGIN-REFERRALS ID ENABLE IDLE SORT
SORT=DISPLAY THREAD=REFERENCES THREAD=REFS THREAD=ORDEREDSUBJECT MULTIAPPEND
URL-PARTIAL CATENATE UNSELECT CHILDREN NAMESPACE UIDPLUS LIST-EXTENDED
I18NLEVEL=1 CONDSTORE QRESYNC ESEARCH ESORT SEARCHRES WITHIN CONTEXT=SEARCH
LIST-STATUS SPECIAL-USE BINARY MOVE
*
* users file:
* dovecot:{PLAIN}dovecot:/Users/dovecot:/var/mail/dovecot
*
* need to log this if it fails *
* have to make users configurable *
* TBD authenticate plain against users file *
* TBD * |
* Copyright ( c ) 2013 - 2014 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2014 Gregory Tsipenyuk <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Lwt
open Imaplet_types
open Utils
type acct_config = {
type of
storage , irmin / maildir / workdir / supported
storage, irmin/maildir/workdir/gitl supported *)
parse message into MIME parts when in maildir storage format , default true
single - store attachments in irmin and workdir format , default true
hybrid of irmin and workdir store ( store should be set to irmin , default false
}
let get_bool v n =
if n >= (String.length v) then
false
else
match String.get v n with
| 't' -> true
| _ -> false
let get_level v =
if String.length v = 3 then (
match String.get v 2 with
| '-' -> None
| x -> Some (int_of_char x - 48)
) else
None
exception InvalidStoreType
let get_store = function
| "irmin" -> `Irmin
| "workdir" -> `Workdir
| "maildir" -> `Maildir
| "mailbox" -> `Mailbox
| "gitl" -> `Gitl
| _ -> raise InvalidStoreType
let get_config buff =
if Str.string_match (Str.regexp
".*:\\(gitl\\|irmin\\|workdir\\|maildir\\|mailbox\\):\\(a[tf]\\):\\(e[tf]\\):\\(c[tf][tf][-0-9]?\\):\\(s[tf]\\):\\(h[tf]\\):\\(m[tf]\\)$") buff 0 then (
Some {acct_data_store = get_store (Str.matched_group 1 buff);
acct_auth_required = get_bool (Str.matched_group 2 buff) 1;
acct_encrypt = get_bool (Str.matched_group 3 buff) 1;
acct_compress = get_bool (Str.matched_group 4 buff) 1;
acct_compress_attach = get_bool (Str.matched_group 4 buff) 2;
acct_compress_repo = get_level (Str.matched_group 4 buff);
acct_single_store = get_bool (Str.matched_group 5 buff) 1;
acct_hybrid = get_bool (Str.matched_group 6 buff) 1;
acct_maildir_parse = get_bool (Str.matched_group 7 buff) 1;}
) else
None
let parse_users buff user password =
try
let _ = Str.search_forward (Str.regexp
"^\\([^:]+\\):{\\([^}]+\\)}\\([^:]+\\):") buff 0 in
let u = Str.matched_group 1 buff in
let p = Str.matched_group 3 buff in
let t = Str.matched_group 2 buff in
let p =
if t = "PLAIN" then
p = password
else if t = "SHA1" then
p = (Imap_crypto.get_hash ~hash:`Sha1 password)
else if t = "SHA256" then
p = (Imap_crypto.get_hash ~hash:`Sha256 password)
else
false
in
if u = user && p then (
(true,get_config buff)
) else
(false,None)
with _ ->
(false,None)
let b64decode b64 =
let = Str.global_replace ( Str.regexp " = $ " ) " " b64 in
match Nocrypto.Base64.decode (Cstruct.of_string b64) with
| Some buff -> Cstruct.to_string buff
| None -> assert(false);;
let parse_user_b64 b64 =
let r1 = Str.regexp "^\\([^\\]+\\)\000\\([^\\]+\\)\000\\([^\\]+\\)$" in
let r2 = Str.regexp "^\000\\([^\\]+\\)\000\\([^\\]+\\)$" in
if Str.string_match r1 buff 0 then (
let u1 = Str.matched_group 1 buff in
let u2 = Str.matched_group 2 buff in
let p = Str.matched_group 3 buff in
if u1 = u2 then
Some (u1,p)
else
None
) else if Str.string_match r2 buff 0 then (
let u = Str.matched_group 1 buff in
let p = Str.matched_group 2 buff in
Some (u,p)
) else
None
let match_user line user =
try
Str.search_forward (Str.regexp_case_fold ("^" ^ user ^ ":")) line 0 = 0
with Not_found -> false
let rec read_users r user password =
Lwt_io.read_line_opt r >>=
function
| Some res ->
if match_user res user then (
if password = None then
return (true,get_config res)
else (
let (res,config) = parse_users res user (option_value_exn password) in
return (res,config)
)
) else
read_users r user password
| None -> return (false,None)
let authenticate_user ?(b64=false) ?(users=Install.users_path) user ?password () =
let (user,password) =
if b64 = false then
(user,password)
else (
match password with
| None -> (b64decode user, None)
| Some p -> (b64decode user, Some (b64decode p))
)
in
Lwt_io.with_file ~mode:Lwt_io.Input users (fun r ->
read_users r user password) >>= fun (res,config) ->
return (user,password,res,config)
let auth_user user password resp_ok resp_no =
authenticate_user user ~password () >>= fun (_,_,res,config) ->
if res then
return (`Ok (Resp_Ok
(None,Utils.formated_capability(Configuration.auth_capability)), user, password, config))
else
return (`Error (Resp_No (None,resp_no)))
let plain_auth text =
match (parse_user_b64 text) with
| Some (u,p) ->
authenticate_user u ?password:(Some p) ()
| None -> return ("",None,false,None)
let _plain_auth text =
match (parse_user_b64 text) with
| Some (u,p) -> auth_user u p "AUTHENTICATE" "PASSWORD"
| None -> return (`Error (Resp_No (None,"PASSWORD")))
let authenticate auth_type text =
match auth_type with
| Auth_Plain -> _plain_auth text
| _ -> return (`Error (Resp_No (None,"Authentication Type")))
let login user password = auth_user user password "LOGIN" "PASSWORD"
|
7fd0ff4e9e364c3eea7f8495b71f76b95fe1a6de9ccb554e8da88eb647c4bde2 | sebsheep/elm2node | Localizer.hs | # OPTIONS_GHC -Wall #
{-# LANGUAGE OverloadedStrings #-}
module Reporting.Render.Type.Localizer
( Localizer
, toDoc
, toChars
, empty
, fromNames
, fromModule
)
where
import qualified Data.Map as Map
import qualified Data.Name as Name
import qualified Data.Set as Set
import qualified AST.Source as Src
import qualified Elm.ModuleName as ModuleName
import Reporting.Doc ((<>))
import qualified Reporting.Doc as D
import qualified Reporting.Annotation as A
LOCALIZER
newtype Localizer =
Localizer (Map.Map Name.Name Import)
data Import =
Import
{ _alias :: Maybe Name.Name
, _exposing :: Exposing
}
data Exposing
= All
| Only (Set.Set Name.Name)
empty :: Localizer
empty =
Localizer Map.empty
LOCALIZE
toDoc :: Localizer -> ModuleName.Canonical -> Name.Name -> D.Doc
toDoc localizer home name =
D.fromChars (toChars localizer home name)
toChars :: Localizer -> ModuleName.Canonical -> Name.Name -> String
toChars (Localizer localizer) moduleName@(ModuleName.Canonical _ home) name =
case Map.lookup home localizer of
Nothing ->
Name.toChars home <> "." <> Name.toChars name
Just (Import alias exposing) ->
case exposing of
All ->
Name.toChars name
Only set ->
if Set.member name set then
Name.toChars name
else if name == Name.list && moduleName == ModuleName.list then
"List"
else
Name.toChars (maybe home id alias) <> "." <> Name.toChars name
-- FROM NAMES
fromNames :: Map.Map Name.Name a -> Localizer
fromNames names =
Localizer $ Map.map (\_ -> Import Nothing All) names
-- FROM MODULE
fromModule :: Src.Module -> Localizer
fromModule modul@(Src.Module _ _ _ imports _ _ _ _ _) =
Localizer $ Map.fromList $
(Src.getName modul, Import Nothing All) : map toPair imports
toPair :: Src.Import -> (Name.Name, Import)
toPair (Src.Import (A.At _ name) alias exposing) =
( name
, Import alias (toExposing exposing)
)
toExposing :: Src.Exposing -> Exposing
toExposing exposing =
case exposing of
Src.Open ->
All
Src.Explicit exposedList ->
Only (foldr addType Set.empty exposedList)
addType :: Src.Exposed -> Set.Set Name.Name -> Set.Set Name.Name
addType exposed types =
case exposed of
Src.Lower _ -> types
Src.Upper (A.At _ name) _ -> Set.insert name types
Src.Operator _ _ -> types
| null | https://raw.githubusercontent.com/sebsheep/elm2node/602a64f48e39edcdfa6d99793cc2827b677d650d/compiler/src/Reporting/Render/Type/Localizer.hs | haskell | # LANGUAGE OverloadedStrings #
FROM NAMES
FROM MODULE | # OPTIONS_GHC -Wall #
module Reporting.Render.Type.Localizer
( Localizer
, toDoc
, toChars
, empty
, fromNames
, fromModule
)
where
import qualified Data.Map as Map
import qualified Data.Name as Name
import qualified Data.Set as Set
import qualified AST.Source as Src
import qualified Elm.ModuleName as ModuleName
import Reporting.Doc ((<>))
import qualified Reporting.Doc as D
import qualified Reporting.Annotation as A
LOCALIZER
newtype Localizer =
Localizer (Map.Map Name.Name Import)
data Import =
Import
{ _alias :: Maybe Name.Name
, _exposing :: Exposing
}
data Exposing
= All
| Only (Set.Set Name.Name)
empty :: Localizer
empty =
Localizer Map.empty
LOCALIZE
toDoc :: Localizer -> ModuleName.Canonical -> Name.Name -> D.Doc
toDoc localizer home name =
D.fromChars (toChars localizer home name)
toChars :: Localizer -> ModuleName.Canonical -> Name.Name -> String
toChars (Localizer localizer) moduleName@(ModuleName.Canonical _ home) name =
case Map.lookup home localizer of
Nothing ->
Name.toChars home <> "." <> Name.toChars name
Just (Import alias exposing) ->
case exposing of
All ->
Name.toChars name
Only set ->
if Set.member name set then
Name.toChars name
else if name == Name.list && moduleName == ModuleName.list then
"List"
else
Name.toChars (maybe home id alias) <> "." <> Name.toChars name
fromNames :: Map.Map Name.Name a -> Localizer
fromNames names =
Localizer $ Map.map (\_ -> Import Nothing All) names
fromModule :: Src.Module -> Localizer
fromModule modul@(Src.Module _ _ _ imports _ _ _ _ _) =
Localizer $ Map.fromList $
(Src.getName modul, Import Nothing All) : map toPair imports
toPair :: Src.Import -> (Name.Name, Import)
toPair (Src.Import (A.At _ name) alias exposing) =
( name
, Import alias (toExposing exposing)
)
toExposing :: Src.Exposing -> Exposing
toExposing exposing =
case exposing of
Src.Open ->
All
Src.Explicit exposedList ->
Only (foldr addType Set.empty exposedList)
addType :: Src.Exposed -> Set.Set Name.Name -> Set.Set Name.Name
addType exposed types =
case exposed of
Src.Lower _ -> types
Src.Upper (A.At _ name) _ -> Set.insert name types
Src.Operator _ _ -> types
|
d9582eaa47ba84105764cee401328f254aaf11ab26011c8dc618c1b5fe605e3f | gstew5/snarkl | Peano.hs | # LANGUAGE RebindableSyntax
, #
, DataKinds
#-}
module Peano where
import Prelude hiding
( (>>)
, (>>=)
, (+)
, (-)
, (*)
, (/)
, (&&)
, return
, fromRational
, negate
)
import SyntaxMonad
import Syntax
import TExpr
type TF = 'TFSum ('TFConst 'TUnit) 'TFId
type TNat = 'TMu TF
nat_zero :: Comp TNat
nat_zero
= do { x <- inl unit
; roll x
}
nat_succ :: TExp TNat Rational -> Comp TNat
nat_succ n
= do { x <- inr n
; roll x
}
nat_eq :: Int
-> TExp TNat Rational
-> TExp TNat Rational
-> Comp 'TBool
nat_eq level n m
| level > 0
= do { n' <- unroll n
; m' <- unroll m
; case_sum
(const $ case_sum (const $ return true) (const $ return false) m')
(\n'' -> case_sum
(const $ return false)
(\m'' -> nat_eq (dec level) n'' m'')
m')
n'
}
| otherwise
= return false
nat_of_int :: Int -> Comp TNat
nat_of_int 0 = nat_zero
nat_of_int n
= do { x <- nat_of_int (dec n)
; nat_succ x
}
| null | https://raw.githubusercontent.com/gstew5/snarkl/d6ce72b13e370d2965bb226f28a1135269e7c198/src/examples/Peano.hs | haskell | # LANGUAGE RebindableSyntax
, #
, DataKinds
#-}
module Peano where
import Prelude hiding
( (>>)
, (>>=)
, (+)
, (-)
, (*)
, (/)
, (&&)
, return
, fromRational
, negate
)
import SyntaxMonad
import Syntax
import TExpr
type TF = 'TFSum ('TFConst 'TUnit) 'TFId
type TNat = 'TMu TF
nat_zero :: Comp TNat
nat_zero
= do { x <- inl unit
; roll x
}
nat_succ :: TExp TNat Rational -> Comp TNat
nat_succ n
= do { x <- inr n
; roll x
}
nat_eq :: Int
-> TExp TNat Rational
-> TExp TNat Rational
-> Comp 'TBool
nat_eq level n m
| level > 0
= do { n' <- unroll n
; m' <- unroll m
; case_sum
(const $ case_sum (const $ return true) (const $ return false) m')
(\n'' -> case_sum
(const $ return false)
(\m'' -> nat_eq (dec level) n'' m'')
m')
n'
}
| otherwise
= return false
nat_of_int :: Int -> Comp TNat
nat_of_int 0 = nat_zero
nat_of_int n
= do { x <- nat_of_int (dec n)
; nat_succ x
}
|
|
d9b7fa3bb1e67432ea483cba73e7b3ef8f84f44e1868226715ae628c2214b178 | imdea-software/leap | test_yicesparser.ml |
(***********************************************************************)
(* *)
LEAP
(* *)
, IMDEA Software Institute
(* *)
(* *)
Copyright 2011 IMDEA Software Institute
(* *)
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
(* You may obtain a copy of the License at *)
(* *)
(* -2.0 *)
(* *)
(* Unless required by applicable law or agreed to in writing, *)
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND ,
(* either express or implied. *)
(* See the License for the specific language governing permissions *)
(* and limitations under the License. *)
(* *)
(***********************************************************************)
let _ =
let filename = Sys.argv.(1) in
print_endline ("Going to parse file: " ^ filename);
let input_channel = Pervasives.open_in filename in
let model = (YicesModelParser.generic_model YicesModelLexer.norm)
(Lexing.from_channel input_channel) in
print_endline "Parsing done...";
print_endline (GenericModel.model_to_str model);
print_endline "Done"
| null | https://raw.githubusercontent.com/imdea-software/leap/5f946163c0f80ff9162db605a75b7ce2e27926ef/src/tests/test_yicesparser.ml | ocaml | *********************************************************************
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
either express or implied.
See the License for the specific language governing permissions
and limitations under the License.
********************************************************************* |
LEAP
, IMDEA Software Institute
Copyright 2011 IMDEA Software Institute
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND ,
let _ =
let filename = Sys.argv.(1) in
print_endline ("Going to parse file: " ^ filename);
let input_channel = Pervasives.open_in filename in
let model = (YicesModelParser.generic_model YicesModelLexer.norm)
(Lexing.from_channel input_channel) in
print_endline "Parsing done...";
print_endline (GenericModel.model_to_str model);
print_endline "Done"
|
bd125d933b72b81360deba2b4e165ab4e5a1c0807c60b7843e1b36b3ca33fc51 | potatosalad/erlang-jose | jose_SUITE.erl | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
%% vim: ts=4 sw=4 ft=erlang noet
-module(jose_SUITE).
-include_lib("common_test/include/ct.hrl").
-include("jose_public_key.hrl").
-include_lib("public_key/include/public_key.hrl").
-include("jose.hrl").
%% ct.
-export([all/0]).
-export([groups/0]).
-export([init_per_suite/1]).
-export([end_per_suite/1]).
-export([init_per_group/2]).
-export([end_per_group/2]).
%% Tests.
-export([jose_cfrg_curves_a_1/1]).
-export([jose_cfrg_curves_a_2/1]).
-export([jose_cfrg_curves_a_3/1]).
-export([jose_cfrg_curves_a_4/1]).
-export([jose_cfrg_curves_a_5/1]).
-export([jose_cfrg_curves_a_6/1]).
-export([jose_cfrg_curves_a_7/1]).
-export([jose_ecdh_1pu_a/1]).
-export([jwe_a_1/1]).
-export([jwe_a_2/1]).
-export([jwe_a_3/1]).
-export([jwk_c/1]).
-export([jwk_rsa_multi/1]).
-export([jws_a_1/1]).
-export([jws_a_2/1]).
-export([jws_a_3/1]).
-export([jws_a_4/1]).
-export([jws_a_5/1]).
-export([rfc7520_5_9/1]).
Macros .
-define(tv_ok(T, M, F, A, E),
case erlang:apply(M, F, A) of
E ->
ok;
T ->
ct:fail({{M, F, A}, {expected, E}, {got, T}})
end).
all() ->
[
{group, jose_cfrg_curves},
{group, jose_ecdh_1pu},
{group, jose_jwe},
{group, jose_jwk},
{group, jose_jws},
{group, rfc7520}
].
groups() ->
[
{jose_cfrg_curves, [parallel], [
jose_cfrg_curves_a_1,
jose_cfrg_curves_a_2,
jose_cfrg_curves_a_3,
jose_cfrg_curves_a_4,
jose_cfrg_curves_a_5,
jose_cfrg_curves_a_6,
jose_cfrg_curves_a_7
]},
{jose_ecdh_1pu, [parallel], [
jose_ecdh_1pu_a
]},
{jose_jwe, [parallel], [
jwe_a_1,
jwe_a_2,
jwe_a_3
]},
{jose_jwk, [parallel], [
jwk_c,
jwk_rsa_multi
]},
{jose_jws, [parallel], [
jws_a_1,
jws_a_2,
jws_a_3,
jws_a_4,
jws_a_5
]},
{rfc7520, [parallel], [
rfc7520_5_9
]}
].
init_per_suite(Config) ->
application:set_env(jose, crypto_fallback, true),
application:set_env(jose, unsecured_signing, true),
_ = application:ensure_all_started(jose),
Config.
end_per_suite(_Config) ->
_ = application:stop(jose),
ok.
init_per_group(G=jose_cfrg_curves, Config) ->
{ok, A1} = file:consult(data_file("jose_cfrg_curves/a.1.config", Config)),
{ok, A3} = file:consult(data_file("jose_cfrg_curves/a.3.config", Config)),
{ok, A4} = file:consult(data_file("jose_cfrg_curves/a.4.config", Config)),
{ok, A5} = file:consult(data_file("jose_cfrg_curves/a.5.config", Config)),
{ok, A6} = file:consult(data_file("jose_cfrg_curves/a.6.config", Config)),
{ok, A7} = file:consult(data_file("jose_cfrg_curves/a.7.config", Config)),
[{jose_cfrg_curves_a_1, A1}, {jose_cfrg_curves_a_3, A3},
{jose_cfrg_curves_a_4, A4}, {jose_cfrg_curves_a_5, A5},
{jose_cfrg_curves_a_6, A6}, {jose_cfrg_curves_a_7, A7} | jose_ct:start(G, Config)];
init_per_group(G=jose_ecdh_1pu, Config) ->
{ok, A} = file:consult(data_file("jose_ecdh_1pu/a.config", Config)),
[{jose_ecdh_1pu_a, A} | jose_ct:start(G, Config)];
init_per_group(G=jose_jwe, Config) ->
{ok, A1} = file:consult(data_file("jwe/a.1.config", Config)),
{ok, A2} = file:consult(data_file("jwe/a.2.config", Config)),
{ok, A3} = file:consult(data_file("jwe/a.3.config", Config)),
[{jwe_a_1, A1}, {jwe_a_2, A2}, {jwe_a_3, A3} | jose_ct:start(G, Config)];
init_per_group(G=jose_jwk, Config) ->
{ok, C} = file:consult(data_file("jwk/c.config", Config)),
[{jwk_c, C} | jose_ct:start(G, Config)];
init_per_group(G=jose_jws, Config) ->
{ok, A1} = file:consult(data_file("jws/a.1.config", Config)),
{ok, A2} = file:consult(data_file("jws/a.2.config", Config)),
{ok, A3} = file:consult(data_file("jws/a.3.config", Config)),
{ok, A4} = file:consult(data_file("jws/a.4.config", Config)),
{ok, A5} = file:consult(data_file("jws/a.5.config", Config)),
[{jws_a_1, A1}, {jws_a_2, A2}, {jws_a_3, A3}, {jws_a_4, A4}, {jws_a_5, A5} | jose_ct:start(G, Config)];
init_per_group(G=rfc7520, Config) ->
{ok, V_5_9} = file:consult(data_file("rfc7520/5.9.config", Config)),
[{rfc7520_5_9, V_5_9} | jose_ct:start(G, Config)];
init_per_group(Group, Config) ->
jose_ct:start(Group, Config).
end_per_group(_Group, Config) ->
jose_ct:stop(Config),
ok.
%%====================================================================
%% Tests
%%====================================================================
CFRG ECDH and signatures in JOSE
% A.1. Ed25519 private key
% [-ietf-jose-cfrg-curves-00#appendix-A.1]
jose_cfrg_curves_a_1(Config) ->
C = ?config(jose_cfrg_curves_a_1, Config),
% A.1
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
A_1_Secret = hex:hex_to_bin(?config("a.1.secret", C)),
A_1_PK = hex:hex_to_bin(?config("a.1.pk", C)),
% A_1_SK = << A_1_Secret/binary, A_1_PK/binary >>,
{_, #'jose_EdDSA25519PrivateKey'{
publicKey=#'jose_EdDSA25519PublicKey'{publicKey=A_1_PK},
privateKey=A_1_Secret
}} = jose_jwk:to_key(A_1_JWK),
{_, #'jose_EdDSA25519PublicKey'{publicKey=A_1_PK}} = jose_jwk:to_public_key(A_1_JWK),
ok.
CFRG ECDH and signatures in JOSE
% A.2. Ed25519 public key
% [-ietf-jose-cfrg-curves-00#appendix-A.2]
jose_cfrg_curves_a_2(Config) ->
% A.1
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
% A.2
A_2_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.2.jwk+json", Config)),
A_2_JWK = jose_jwk:to_public(A_1_JWK),
ok.
CFRG ECDH and signatures in JOSE
A.3 . JWK thumbprint canonicalization
% [-ietf-jose-cfrg-curves-00#appendix-A.3]
jose_cfrg_curves_a_3(Config) ->
C = ?config(jose_cfrg_curves_a_3, Config),
% A.1
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
% A.2
A_2_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.2.jwk+json", Config)),
% A.3
A_3_JWK = jose_jwk:from_binary(?config("a.3.jwk+json", C)),
A_3_THUMBPRINT_HEX = ?config("a.3.thumbprint+hex", C),
A_3_THUMBPRINT = jose_jwa_base64url:encode(hex:hex_to_bin(A_3_THUMBPRINT_HEX)),
A_3_THUMBPRINT = ?config("a.3.thumbprint+b64", C),
A_3_THUMBPRINT = jose_jwk:thumbprint(A_1_JWK),
A_3_THUMBPRINT = jose_jwk:thumbprint(A_2_JWK),
A_3_THUMBPRINT = jose_jwk:thumbprint(A_3_JWK),
ok.
CFRG ECDH and signatures in JOSE
% A.4. Ed25519 Signing
% [-ietf-jose-cfrg-curves-00#appendix-A.4]
jose_cfrg_curves_a_4(Config) ->
C = ?config(jose_cfrg_curves_a_4, Config),
% A.1
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
% A.4
A_4_PROTECTED = ?config("a.4.jws+json", C),
A_4_JWS = jose_jws:from_binary(A_4_PROTECTED),
A_4_JWS_B64 = ?config("a.4.jws+b64", C),
A_4_TXT = ?config("a.4.txt", C),
A_4_TXT_B64 = ?config("a.4.txt+b64", C),
A_4_SIGNINGINPUT = ?config("a.4.signing-input", C),
A_4_SIG = hex:hex_to_bin(?config("a.4.sig+hex", C)),
A_4_SIG_B64 = ?config("a.4.sig+b64", C),
A_4_SIG_COMPACT = ?config("a.4.sig+compact", C),
A_4_TXT_B64 = jose_jwa_base64url:encode(A_4_TXT),
A_4_SIGNINGINPUT = << A_4_JWS_B64/binary, $., A_4_TXT_B64/binary >>,
A_4_SIGNINGINPUT = jose_jws:signing_input(A_4_TXT, A_4_JWS),
%% Forcing the Protected header to be A_4_PROTECTED
A_4_MAP=#{
<<"signature">> := A_4_SIG_B64
} = force_sign(A_1_JWK, A_4_TXT, A_4_PROTECTED, A_4_JWS),
A_4_SIG = jose_jwa_base64url:decode(A_4_SIG_B64),
{_, A_4_SIG_COMPACT} = jose_jws:compact(A_4_MAP),
ok.
CFRG ECDH and signatures in JOSE
% A.5. Ed25519 Validation
% [-ietf-jose-cfrg-curves-00#appendix-A.5]
jose_cfrg_curves_a_5(Config) ->
C = ?config(jose_cfrg_curves_a_5, Config),
% A.1
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
% A.2
A_2_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.2.jwk+json", Config)),
% A.4
A_5_SIG_COMPACT = ?config("a.5.sig+compact", C),
A_5_JWS = jose_jws:from_binary(?config("a.5.jws+json", C)),
A_5_PAYLOAD_DATA = ?config("a.5.txt", C),
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(A_1_JWK, A_5_SIG_COMPACT),
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(A_2_JWK, A_5_SIG_COMPACT),
ok.
CFRG ECDH and signatures in JOSE
A.6 . ECDH - ES with X25519
% [-ietf-jose-cfrg-curves-00#appendix-A.6]
jose_cfrg_curves_a_6(Config) ->
C = ?config(jose_cfrg_curves_a_6, Config),
% A.6
A_6_BOB_JWK = jose_jwk:from_binary(?config("a.6.bob-jwk+json", C)),
A_6_BOB_Secret = hex:hex_to_bin(?config("a.6.bob-secret+hex", C)),
A_6_BOB_PK = hex:hex_to_bin(?config("a.6.bob-pk+hex", C)),
A_6_EPK_Secret = hex:hex_to_bin(?config("a.6.epk-secret+hex", C)),
A_6_EPK_PK = hex:hex_to_bin(?config("a.6.epk-pk+hex", C)),
A_6_EPK_JWK = jose_jwk:from_binary(?config("a.6.epk-jwk+json", C)),
A_6_PROTECTED = ?config("a.6.jwe+json", C),
A_6_JWE = jose_jwe:from_binary(A_6_PROTECTED),
A_6_Z = hex:hex_to_bin(?config("a.6.z+hex", C)),
A_6_BOB_SK = << A_6_BOB_Secret/binary, A_6_BOB_PK/binary >>,
A_6_EPK_SK = << A_6_EPK_Secret/binary, A_6_EPK_PK/binary >>,
A_6_BOB_S_JWK = jose_jwk:from_okp({'X25519', A_6_BOB_SK}),
A_6_EPK_S_JWK = jose_jwk:from_okp({'X25519', A_6_EPK_SK}),
{_, #'jose_X25519PrivateKey'{
publicKey=#'jose_X25519PublicKey'{publicKey=A_6_BOB_PK},
privateKey=A_6_BOB_Secret
}} = jose_jwk:to_key(A_6_BOB_S_JWK),
{_, #'jose_X25519PublicKey'{publicKey=A_6_BOB_PK}} = jose_jwk:to_public_key(A_6_BOB_S_JWK),
{_, #'jose_X25519PublicKey'{publicKey=A_6_BOB_PK}} = jose_jwk:to_key(A_6_BOB_JWK),
{_, #'jose_X25519PrivateKey'{
publicKey=#'jose_X25519PublicKey'{publicKey=A_6_EPK_PK},
privateKey=A_6_EPK_Secret
}} = jose_jwk:to_key(A_6_EPK_S_JWK),
{_, #'jose_X25519PublicKey'{publicKey=A_6_EPK_PK}} = jose_jwk:to_public_key(A_6_EPK_S_JWK),
{_, #'jose_X25519PublicKey'{publicKey=A_6_EPK_PK}} = jose_jwk:to_key(A_6_EPK_JWK),
A_6_Z = jose_jwk:shared_secret(A_6_BOB_JWK, A_6_EPK_S_JWK),
A_6_Z = jose_jwk:shared_secret(A_6_EPK_JWK, A_6_BOB_S_JWK),
A_6_TEXT = <<"Example of X25519 encryption">>,
{_, A_6_ENC_MAP} = jose_jwe:block_encrypt({A_6_BOB_JWK, A_6_EPK_S_JWK}, A_6_TEXT, A_6_JWE),
{_, A_6_ENC_COMPACT} = jose_jwe:compact(A_6_ENC_MAP),
{A_6_TEXT, A_6_JWE} = jose_jwe:block_decrypt(A_6_BOB_S_JWK, A_6_ENC_MAP),
{A_6_TEXT, A_6_JWE} = jose_jwe:block_decrypt(A_6_BOB_S_JWK, A_6_ENC_COMPACT),
ok.
CFRG ECDH and signatures in JOSE
A.7 . ECDH - ES with X448
% [-ietf-jose-cfrg-curves-00#appendix-A.7]
jose_cfrg_curves_a_7(Config) ->
C = ?config(jose_cfrg_curves_a_7, Config),
% A.7
A_7_BOB_JWK = jose_jwk:from_binary(?config("a.7.bob-jwk+json", C)),
A_7_BOB_Secret = hex:hex_to_bin(?config("a.7.bob-secret+hex", C)),
A_7_BOB_PK = hex:hex_to_bin(?config("a.7.bob-pk+hex", C)),
A_7_EPK_Secret = hex:hex_to_bin(?config("a.7.epk-secret+hex", C)),
A_7_EPK_PK = hex:hex_to_bin(?config("a.7.epk-pk+hex", C)),
A_7_EPK_JWK = jose_jwk:from_binary(?config("a.7.epk-jwk+json", C)),
A_7_PROTECTED = ?config("a.7.jwe+json", C),
A_7_JWE = jose_jwe:from_binary(A_7_PROTECTED),
A_7_Z = hex:hex_to_bin(?config("a.7.z+hex", C)),
A_7_BOB_SK = << A_7_BOB_Secret/binary, A_7_BOB_PK/binary >>,
A_7_EPK_SK = << A_7_EPK_Secret/binary, A_7_EPK_PK/binary >>,
A_7_BOB_S_JWK = jose_jwk:from_okp({'X448', A_7_BOB_SK}),
A_7_EPK_S_JWK = jose_jwk:from_okp({'X448', A_7_EPK_SK}),
{_, #'jose_X448PrivateKey'{
publicKey=#'jose_X448PublicKey'{publicKey=A_7_BOB_PK},
privateKey=A_7_BOB_Secret
}} = jose_jwk:to_key(A_7_BOB_S_JWK),
{_, #'jose_X448PublicKey'{publicKey=A_7_BOB_PK}} = jose_jwk:to_public_key(A_7_BOB_S_JWK),
{_, #'jose_X448PublicKey'{publicKey=A_7_BOB_PK}} = jose_jwk:to_key(A_7_BOB_JWK),
{_, #'jose_X448PrivateKey'{
publicKey=#'jose_X448PublicKey'{publicKey=A_7_EPK_PK},
privateKey=A_7_EPK_Secret
}} = jose_jwk:to_key(A_7_EPK_S_JWK),
{_, #'jose_X448PublicKey'{publicKey=A_7_EPK_PK}} = jose_jwk:to_public_key(A_7_EPK_S_JWK),
{_, #'jose_X448PublicKey'{publicKey=A_7_EPK_PK}} = jose_jwk:to_key(A_7_EPK_JWK),
A_7_Z = jose_jwk:shared_secret(A_7_BOB_JWK, A_7_EPK_S_JWK),
A_7_Z = jose_jwk:shared_secret(A_7_EPK_JWK, A_7_BOB_S_JWK),
A_7_TEXT = <<"Example of X448 encryption">>,
{_, A_7_ENC_MAP} = jose_jwe:block_encrypt({A_7_BOB_JWK, A_7_EPK_S_JWK}, A_7_TEXT, A_7_JWE),
{_, A_7_ENC_COMPACT} = jose_jwe:compact(A_7_ENC_MAP),
{A_7_TEXT, A_7_JWE} = jose_jwe:block_decrypt(A_7_BOB_S_JWK, A_7_ENC_MAP),
{A_7_TEXT, A_7_JWE} = jose_jwe:block_decrypt(A_7_BOB_S_JWK, A_7_ENC_COMPACT),
ok.
Public Key Authenticated Encryption for JOSE : ECDH-1PU
A. Example ECDH-1PU Key Agreement Computation with A256GCM
% [-madden-jose-ecdh-1pu-04#appendix-A]
jose_ecdh_1pu_a(Config) ->
C = ?config(jose_ecdh_1pu_a, Config),
A_USSK_JWK = jose_jwk:from_binary(?config("a.ussk.jwk+json", C)),
A_VSSK_JWK = jose_jwk:from_binary(?config("a.vssk.jwk+json", C)),
A_UESK_JWK = jose_jwk:from_binary(?config("a.uesk.jwk+json", C)),
A_JWE = jose_jwe:from_binary(?config("a.jwe+json", C)),
A_ZE = hex:hex_to_bin(?config("a.ze+hex", C)),
A_ZS = hex:hex_to_bin(?config("a.zs+hex", C)),
A_Z = hex:hex_to_bin(?config("a.z+hex", C)),
A_CEK = hex:hex_to_bin(?config("a.cek+hex", C)),
A_ZE = jose_jwk:shared_secret(A_VSSK_JWK, A_UESK_JWK),
A_ZS = jose_jwk:shared_secret(A_VSSK_JWK, A_USSK_JWK),
A_ZS = jose_jwk:shared_secret(A_USSK_JWK, A_VSSK_JWK),
A_Z = <<A_ZE/binary, A_ZS/binary>>,
{A_CEK, _} = jose_jwe:next_cek({A_VSSK_JWK, A_USSK_JWK, A_UESK_JWK}, A_JWE),
A_CEK = jose_jwe:key_decrypt({A_USSK_JWK, A_VSSK_JWK, A_UESK_JWK}, <<>>, A_JWE),
ok.
% JSON Web Encryption (JWE)
A.1 . Example using RSAES - OAEP and AES GCM
% [#appendix-A.1]
jwe_a_1(Config) ->
C = ?config(jwe_a_1, Config),
% A.1
A_1_TXT = ?config("a.1.txt", C),
% A.1.1
A_1_1_JWE_DATA = ?config("a.1.1.jwe+json", C),
A_1_1_JWE_MAP = jose:decode(A_1_1_JWE_DATA),
A_1_1_JWE = jose_jwe:from_binary(A_1_1_JWE_DATA),
{_, A_1_1_JWE_MAP} = jose_jwe:to_map(A_1_1_JWE),
A_1_1_JWE_DATA_B64 = ?config("a.1.1.jwe+json.b64", C),
A_1_1_JWE_DATA_B64 = jose_jwa_base64url:encode(element(2, jose_jwe:to_binary(A_1_1_JWE))),
% A.1.2
A_1_2_CEK = ?config("a.1.2.cek", C),
% A.1.3
A_1_3_JWK_DATA = ?config("a.1.3.jwk+json", C),
A_1_3_JWK_MAP = jose:decode(A_1_3_JWK_DATA),
A_1_3_JWK = jose_jwk:from_binary(A_1_3_JWK_DATA),
{_, A_1_3_JWK_MAP} = jose_jwk:to_map(A_1_3_JWK),
A_1_3_CEK_ENCRYPTED = ?config("a.1.3.cek.encrypted", C),
A_1_3_CEK_ENCRYPTED_B64 = ?config("a.1.3.cek.encrypted.b64", C),
A_1_3_CEK_ENCRYPTED_B64 = jose_jwa_base64url:encode(A_1_3_CEK_ENCRYPTED),
% A.1.4
A_1_4_IV = ?config("a.1.4.iv", C),
A_1_4_IV_B64 = ?config("a.1.4.iv.b64", C),
A_1_4_IV_B64 = jose_jwa_base64url:encode(A_1_4_IV),
% A.1.5
A_1_5_AAD = ?config("a.1.5.aad", C),
A_1_1_JWE_DATA_B64 = A_1_5_AAD,
A.1.6
A_1_6_CIPHER = ?config("a.1.6.txt.cipher", C),
A_1_6_TAG = ?config("a.1.6.txt.tag", C),
A_1_6_CIPHER_B64 = ?config("a.1.6.txt.cipher.b64", C),
A_1_6_TAG_B64 = ?config("a.1.6.txt.tag.b64", C),
A_1_6_CIPHER = jose_jwa_base64url:decode(A_1_6_CIPHER_B64),
A_1_6_TAG = jose_jwa_base64url:decode(A_1_6_TAG_B64),
% A.1.7
A_1_7_COMPACT = ?config("a.1.7.jwe+compact", C),
{A_1_TXT, A_1_1_JWE} = jose_jwe:block_decrypt(A_1_3_JWK, A_1_7_COMPACT),
Roundtrip test
A_1_7_MAP = jose_jwe:block_encrypt(A_1_3_JWK, A_1_TXT, A_1_2_CEK, A_1_4_IV, A_1_1_JWE),
{A_1_TXT, A_1_1_JWE} = jose_jwe:block_decrypt(A_1_3_JWK, A_1_7_MAP),
ok.
% JSON Web Encryption (JWE)
A.2 . Example using RSAES - PKCS1 - v1_5 and AES_128_CBC_HMAC_SHA_256
% [#appendix-A.2]
jwe_a_2(Config) ->
C = ?config(jwe_a_2, Config),
% A.2
A_2_TXT = ?config("a.2.txt", C),
% A.2.1
A_2_1_JWE_DATA = ?config("a.2.1.jwe+json", C),
A_2_1_JWE_MAP = jose:decode(A_2_1_JWE_DATA),
A_2_1_JWE = jose_jwe:from_binary(A_2_1_JWE_DATA),
{_, A_2_1_JWE_MAP} = jose_jwe:to_map(A_2_1_JWE),
A_2_1_JWE_DATA_B64 = ?config("a.2.1.jwe+json.b64", C),
A_2_1_JWE_DATA_B64 = jose_jwa_base64url:encode(element(2, jose_jwe:to_binary(A_2_1_JWE))),
% A.2.2
A_2_2_CEK = ?config("a.2.2.cek", C),
% A.2.3
A_2_3_JWK_DATA = ?config("a.2.3.jwk+json", C),
A_2_3_JWK_MAP = jose:decode(A_2_3_JWK_DATA),
A_2_3_JWK = jose_jwk:from_binary(A_2_3_JWK_DATA),
{_, A_2_3_JWK_MAP} = jose_jwk:to_map(A_2_3_JWK),
A_2_3_CEK_ENCRYPTED = ?config("a.2.3.cek.encrypted", C),
A_2_3_CEK_ENCRYPTED_B64 = ?config("a.2.3.cek.encrypted.b64", C),
A_2_3_CEK_ENCRYPTED_B64 = jose_jwa_base64url:encode(A_2_3_CEK_ENCRYPTED),
% A.2.4
A_2_4_IV = ?config("a.2.4.iv", C),
A_2_4_IV_B64 = ?config("a.2.4.iv.b64", C),
A_2_4_IV_B64 = jose_jwa_base64url:encode(A_2_4_IV),
% A.2.5
A_2_5_AAD = ?config("a.2.5.aad", C),
A_2_1_JWE_DATA_B64 = A_2_5_AAD,
% A.2.6
A_2_6_CIPHER = ?config("a.2.6.txt.cipher", C),
A_2_6_TAG = ?config("a.2.6.txt.tag", C),
A_2_6_CIPHER_B64 = ?config("a.2.6.txt.cipher.b64", C),
A_2_6_TAG_B64 = ?config("a.2.6.txt.tag.b64", C),
A_2_6_CIPHER = jose_jwa_base64url:decode(A_2_6_CIPHER_B64),
A_2_6_TAG = jose_jwa_base64url:decode(A_2_6_TAG_B64),
% A.2.7
A_2_7_COMPACT = ?config("a.2.7.jwe+compact", C),
{A_2_TXT, A_2_1_JWE} = jose_jwe:block_decrypt(A_2_3_JWK, A_2_7_COMPACT),
Roundtrip test
A_2_7_MAP = jose_jwe:block_encrypt(A_2_3_JWK, A_2_TXT, A_2_2_CEK, A_2_4_IV, A_2_1_JWE),
{A_2_TXT, A_2_1_JWE} = jose_jwe:block_decrypt(A_2_3_JWK, A_2_7_MAP),
ok.
% JSON Web Encryption (JWE)
A.3 . Example Using AES Key Wrap and AES_128_CBC_HMAC_SHA_256
% [#appendix-A.3]
jwe_a_3(Config) ->
C = ?config(jwe_a_3, Config),
% A.3
A_3_TXT = ?config("a.3.txt", C),
% A.3.1
A_3_1_JWE_DATA = ?config("a.3.1.jwe+json", C),
A_3_1_JWE_MAP = jose:decode(A_3_1_JWE_DATA),
A_3_1_JWE = jose_jwe:from_binary(A_3_1_JWE_DATA),
{_, A_3_1_JWE_MAP} = jose_jwe:to_map(A_3_1_JWE),
A_3_1_JWE_DATA_B64 = ?config("a.3.1.jwe+json.b64", C),
A_3_1_JWE_DATA_B64 = jose_jwa_base64url:encode(element(2, jose_jwe:to_binary(A_3_1_JWE))),
% A.3.2
A_3_2_CEK = ?config("a.3.2.cek", C),
A.3.3
A_3_3_JWK_DATA = ?config("a.3.3.jwk+json", C),
A_3_3_JWK_MAP = jose:decode(A_3_3_JWK_DATA),
A_3_3_JWK = jose_jwk:from_binary(A_3_3_JWK_DATA),
{_, A_3_3_JWK_MAP} = jose_jwk:to_map(A_3_3_JWK),
A_3_3_CEK_ENCRYPTED = ?config("a.3.3.cek.encrypted", C),
A_3_3_CEK_ENCRYPTED_B64 = ?config("a.3.3.cek.encrypted.b64", C),
A_3_3_CEK_ENCRYPTED_B64 = jose_jwa_base64url:encode(A_3_3_CEK_ENCRYPTED),
% A.3.4
A_3_4_IV = ?config("a.3.4.iv", C),
A_3_4_IV_B64 = ?config("a.3.4.iv.b64", C),
A_3_4_IV_B64 = jose_jwa_base64url:encode(A_3_4_IV),
% A.3.5
A_3_5_AAD = ?config("a.3.5.aad", C),
A_3_1_JWE_DATA_B64 = A_3_5_AAD,
% A.3.6
A_3_6_CIPHER = ?config("a.3.6.txt.cipher", C),
A_3_6_TAG = ?config("a.3.6.txt.tag", C),
A_3_6_CIPHER_B64 = ?config("a.3.6.txt.cipher.b64", C),
A_3_6_TAG_B64 = ?config("a.3.6.txt.tag.b64", C),
A_3_6_CIPHER = jose_jwa_base64url:decode(A_3_6_CIPHER_B64),
A_3_6_TAG = jose_jwa_base64url:decode(A_3_6_TAG_B64),
% A.3.7
A_3_7_COMPACT = ?config("a.3.7.jwe+compact", C),
{A_3_TXT, A_3_1_JWE} = jose_jwe:block_decrypt(A_3_3_JWK, A_3_7_COMPACT),
Roundtrip test
A_3_7_MAP = jose_jwe:block_encrypt(A_3_3_JWK, A_3_TXT, A_3_2_CEK, A_3_4_IV, A_3_1_JWE),
{A_3_TXT, A_3_1_JWE} = jose_jwe:block_decrypt(A_3_3_JWK, A_3_7_MAP),
ok.
JSON Web Key ( JWK )
% Appendix C. Example Encrypted RSA Private Key
% [#appendix-C]
jwk_c(Config) ->
C = ?config(jwk_c, Config),
% C.1
C_1_JSON_DATA = ?config("c.1.jwk+json", C),
C_1_JSON = jose:decode(C_1_JSON_DATA),
C_1_JWK = jose_jwk:from_file(data_file("jwk/c.1.jwk+json", Config)),
{_, C_1_JSON} = jose_jwk:to_map(C_1_JWK),
% C.2
C_2_JSON_DATA = ?config("c.2.jwe+json", C),
C_2_JSON = jose:decode(C_2_JSON_DATA),
C_2_JWE = jose_jwe:from_file(data_file("jwk/c.2.jwe+json", Config)),
{_, C_2_JSON} = jose_jwe:to_map(C_2_JWE),
C_2_B64_DATA = ?config("c.2.b64", C),
C_2_B64_DATA = jose_jwa_base64url:encode(C_2_JSON_DATA),
% C.3
C_3_CEK = ?config("c.3.cek", C),
C.4
C_4_TXT = ?config("c.4.txt", C),
C_4_SALT = ?config("c.4.salt", C),
C_4_SALT = << (maps:get(<<"alg">>, C_2_JSON))/binary, 0, (jose_jwa_base64url:decode(maps:get(<<"p2s">>, C_2_JSON)))/binary >>,
C_4_DKEY = ?config("c.4.derivedkey", C),
{ok, C_4_DKEY} = jose_jwa_pkcs5:pbkdf2({hmac, sha256}, C_4_TXT, C_4_SALT, maps:get(<<"p2c">>, C_2_JSON), 16),
% C.5
C_5_EKEY = ?config("c.5.encryptedkey", C),
{C_5_EKEY, _} = jose_jwe:key_encrypt(C_4_TXT, C_3_CEK, C_2_JWE),
% C.6
C_6_IV = ?config("c.6.iv", C),
% C.7
C_7_AAD = ?config("c.7.aad", C),
C_7_AAD = C_2_JSON_DATA,
% C.8
C_8_CIPHER_TXT = ?config("c.8.ciphertxt", C),
C_8_CIPHER_TAG = ?config("c.8.ciphertag", C),
Forcing the AAD data to be C_7_AAD
C_8_ENC_MAP=#{
<<"ciphertext">> := C_8_CIPHER_TXT_B64,
<<"tag">> := C_8_CIPHER_TAG_B64
} = force_block_encrypt(C_4_TXT, C_1_JSON_DATA, C_3_CEK, C_6_IV, C_7_AAD, C_2_JWE),
C_8_CIPHER_TXT = jose_jwa_base64url:decode(C_8_CIPHER_TXT_B64),
C_8_CIPHER_TAG = jose_jwa_base64url:decode(C_8_CIPHER_TAG_B64),
C.9
C_9_DATA = ?config("c.9.jwe+txt", C),
{_, C_9_DATA} = jose_jwe:compact(C_8_ENC_MAP),
%% Make sure decryption also works
{C_1_JSON_DATA, _} = jose_jwe:block_decrypt(C_4_TXT, C_9_DATA),
Encrypt and
{_, C_1_JWK} = jose_jwk:from_map(C_4_TXT, jose_jwk:to_map(C_4_TXT, C_2_JWE, C_1_JWK)),
ok.
jwk_rsa_multi(Config) ->
JWK = jose_jwk:from_pem_file(data_file("rsa-multi.pem", Config)),
PlainText = <<"I've Got a Lovely Bunch of Coconuts">>,
Encrypted = jose_jwk:block_encrypt(PlainText, JWK),
CompactEncrypted = jose_jwe:compact(Encrypted),
{PlainText, _} = jose_jwk:block_decrypt(Encrypted, JWK),
{PlainText, _} = jose_jwk:block_decrypt(CompactEncrypted, JWK),
Message = <<"Secret Message">>,
Signed = jose_jwk:sign(Message, JWK),
CompactSigned = jose_jws:compact(Signed),
{true, Message, _} = jose_jwk:verify(Signed, JWK),
{true, Message, _} = jose_jwk:verify(CompactSigned, JWK),
{_, Map} = jose_jwk:to_map(JWK),
JWK = jose_jwk:from_map(Map),
Password = <<"My Passphrase">>,
PEM = element(2, jose_jwk:to_pem(JWK)),
EncryptedPEM = element(2, jose_jwk:to_pem(Password, JWK)),
JWK = jose_jwk:from_pem(PEM),
JWK = jose_jwk:from_pem(Password, EncryptedPEM),
JWK = jose_jwk:from_pem(jose_jwk:to_pem(JWK)),
JWK = jose_jwk:from_pem(Password, jose_jwk:to_pem(Password, JWK)),
{_, JWK} = jose_jwk:from_binary(Password, jose_jwk:to_binary(Password, JWK)),
{_, JWK} = jose_jwk:from_binary(Password, jose_jwe:compact(jose_jwk:to_map(Password, JWK))),
ok.
JSON Web Signature ( )
Appendix A.1 . Example Using HMAC SHA-256
% [#appendix-A.1]
jws_a_1(Config) ->
C = ?config(jws_a_1, Config),
% A.1.1
A_1_1_JSON_DATA = ?config("a.1.1.jws+json", C),
A_1_1_JSON = jose:decode(A_1_1_JSON_DATA),
A_1_1_JWS = jose_jws:from_file(data_file("jws/a.1.1.jws+json", Config)),
{_, A_1_1_JSON} = jose_jws:to_map(A_1_1_JWS),
A_1_1_B64_DATA = ?config("a.1.1.b64", C),
A_1_1_B64_DATA = jose_jwa_base64url:encode(A_1_1_JSON_DATA),
A_1_1_PAYLOAD_DATA = ?config("a.1.1.payload", C),
A_1_1_B64_PAYLOAD_DATA = ?config("a.1.1.payload-b64", C),
A_1_1_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_1_1_PAYLOAD_DATA),
A_1_1_SIGNING_INPUT_DATA = ?config("a.1.1.signing-input", C),
A_1_1_SIGNING_INPUT_DATA = << A_1_1_B64_DATA/binary, $., A_1_1_B64_PAYLOAD_DATA/binary >>,
A_1_1_JWK = jose_jwk:from_file(data_file("jws/a.1.1.jwk+json", Config)),
A_1_1_B64_SIGNATURE_DATA = ?config("a.1.1.signature-b64", C),
Forcing the Protected header to be A_1_1_JSON_DATA
A_1_1_MAP=#{
<<"signature">> := A_1_1_B64_SIGNATURE_DATA
} = force_sign(A_1_1_JWK, A_1_1_PAYLOAD_DATA, A_1_1_JSON_DATA, A_1_1_JWS),
A_1_1_COMPACT_DATA = ?config("a.1.1.compact", C),
{_, A_1_1_COMPACT_DATA} = jose_jws:compact(A_1_1_MAP),
% A.1.2
{true, A_1_1_PAYLOAD_DATA, A_1_1_JWS} = jose_jws:verify(A_1_1_JWK, A_1_1_MAP),
{true, A_1_1_PAYLOAD_DATA, A_1_1_JWS} = jose_jws:verify(A_1_1_JWK, A_1_1_COMPACT_DATA),
%% Sign and Verify
{true, A_1_1_PAYLOAD_DATA, A_1_1_JWS} = jose_jwk:verify(jose_jwk:sign(A_1_1_PAYLOAD_DATA, A_1_1_JWS, A_1_1_JWK), A_1_1_JWK),
ok.
JSON Web Signature ( )
Appendix A.2 . Example Using RSASSA - PKCS1 - v1_5 SHA-256
% [#appendix-A.2]
jws_a_2(Config) ->
C = ?config(jws_a_2, Config),
% A.2.1
A_2_1_JSON_DATA = ?config("a.2.1.jws+json", C),
A_2_1_JSON = jose:decode(A_2_1_JSON_DATA),
A_2_1_JWS = jose_jws:from_file(data_file("jws/a.2.1.jws+json", Config)),
{_, A_2_1_JSON} = jose_jws:to_map(A_2_1_JWS),
A_2_1_B64_DATA = ?config("a.2.1.b64", C),
A_2_1_B64_DATA = jose_jwa_base64url:encode(A_2_1_JSON_DATA),
A_2_1_PAYLOAD_DATA = ?config("a.2.1.payload", C),
A_2_1_B64_PAYLOAD_DATA = ?config("a.2.1.payload-b64", C),
A_2_1_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_2_1_PAYLOAD_DATA),
A_2_1_SIGNING_INPUT_DATA = ?config("a.2.1.signing-input", C),
A_2_1_SIGNING_INPUT_DATA = << A_2_1_B64_DATA/binary, $., A_2_1_B64_PAYLOAD_DATA/binary >>,
A_2_1_JWK = jose_jwk:from_file(data_file("jws/a.2.1.jwk+json", Config)),
A_2_1_B64_SIGNATURE_DATA = ?config("a.2.1.signature-b64", C),
%% Forcing the Protected header to be A_2_1_JSON_DATA
A_2_1_MAP=#{
<<"signature">> := A_2_1_B64_SIGNATURE_DATA
} = force_sign(A_2_1_JWK, A_2_1_PAYLOAD_DATA, A_2_1_JSON_DATA, A_2_1_JWS),
A_2_1_COMPACT_DATA = ?config("a.2.1.compact", C),
{_, A_2_1_COMPACT_DATA} = jose_jws:compact(A_2_1_MAP),
% A.2.2
{true, A_2_1_PAYLOAD_DATA, A_2_1_JWS} = jose_jws:verify(A_2_1_JWK, A_2_1_MAP),
{true, A_2_1_PAYLOAD_DATA, A_2_1_JWS} = jose_jws:verify(A_2_1_JWK, A_2_1_COMPACT_DATA),
%% Sign and Verify
{true, A_2_1_PAYLOAD_DATA, A_2_1_JWS} = jose_jwk:verify(jose_jwk:sign(A_2_1_PAYLOAD_DATA, A_2_1_JWS, A_2_1_JWK), A_2_1_JWK),
ok.
JSON Web Signature ( )
Appendix A.3 . Example Using P-256 SHA-256
% #appendix-A.3
jws_a_3(Config) ->
C = ?config(jws_a_3, Config),
% A.3.1
A_3_1_JSON_DATA = ?config("a.3.1.jws+json", C),
A_3_1_JSON = jose:decode(A_3_1_JSON_DATA),
A_3_1_JWS = jose_jws:from_file(data_file("jws/a.3.1.jws+json", Config)),
{_, A_3_1_JSON} = jose_jws:to_map(A_3_1_JWS),
A_3_1_B64_DATA = ?config("a.3.1.b64", C),
A_3_1_B64_DATA = jose_jwa_base64url:encode(A_3_1_JSON_DATA),
A_3_1_PAYLOAD_DATA = ?config("a.3.1.payload", C),
A_3_1_B64_PAYLOAD_DATA = ?config("a.3.1.payload-b64", C),
A_3_1_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_3_1_PAYLOAD_DATA),
A_3_1_SIGNING_INPUT_DATA = ?config("a.3.1.signing-input", C),
A_3_1_SIGNING_INPUT_DATA = << A_3_1_B64_DATA/binary, $., A_3_1_B64_PAYLOAD_DATA/binary >>,
A_3_1_JWK = jose_jwk:from_file(data_file("jws/a.3.1.jwk+json", Config)),
A_3_1_B64_SIGNATURE_DATA = ?config("a.3.1.signature-b64", C),
%% Forcing the Protected header to be A_3_1_JSON_DATA
A_3_1_MAP=#{
<<"signature">> := A_3_1_B64_SIGNATURE_DATA_ALT
} = force_sign(A_3_1_JWK, A_3_1_PAYLOAD_DATA, A_3_1_JSON_DATA, A_3_1_JWS),
ECDSA produces non - matching signatures
true = (A_3_1_B64_SIGNATURE_DATA =/= A_3_1_B64_SIGNATURE_DATA_ALT),
A_3_1_COMPACT_DATA = ?config("a.3.1.compact", C),
{_, A_3_1_COMPACT_DATA} = jose_jws:compact(A_3_1_MAP#{ <<"signature">> => A_3_1_B64_SIGNATURE_DATA }),
% A.3.2
{true, A_3_1_PAYLOAD_DATA, A_3_1_JWS} = jose_jws:verify(A_3_1_JWK, A_3_1_MAP),
{true, A_3_1_PAYLOAD_DATA, A_3_1_JWS} = jose_jws:verify(A_3_1_JWK, A_3_1_COMPACT_DATA),
%% Sign and Verify
{true, A_3_1_PAYLOAD_DATA, A_3_1_JWS} = jose_jwk:verify(jose_jwk:sign(A_3_1_PAYLOAD_DATA, A_3_1_JWS, A_3_1_JWK), A_3_1_JWK),
ok.
JSON Web Signature ( )
Appendix A.4 . Example Using ECDSA P-521 SHA-512
% #appendix-A.4
jws_a_4(Config) ->
C = ?config(jws_a_4, Config),
% A.4.1
A_4_1_JSON_DATA = ?config("a.4.1.jws+json", C),
A_4_1_JSON = jose:decode(A_4_1_JSON_DATA),
A_4_1_JWS = jose_jws:from_file(data_file("jws/a.4.1.jws+json", Config)),
{_, A_4_1_JSON} = jose_jws:to_map(A_4_1_JWS),
A_4_1_B64_DATA = ?config("a.4.1.b64", C),
A_4_1_B64_DATA = jose_jwa_base64url:encode(A_4_1_JSON_DATA),
A_4_1_PAYLOAD_DATA = ?config("a.4.1.payload", C),
A_4_1_B64_PAYLOAD_DATA = ?config("a.4.1.payload-b64", C),
A_4_1_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_4_1_PAYLOAD_DATA),
A_4_1_SIGNING_INPUT_DATA = ?config("a.4.1.signing-input", C),
A_4_1_SIGNING_INPUT_DATA = << A_4_1_B64_DATA/binary, $., A_4_1_B64_PAYLOAD_DATA/binary >>,
A_4_1_JWK = jose_jwk:from_file(data_file("jws/a.4.1.jwk+json", Config)),
A_4_1_B64_SIGNATURE_DATA = ?config("a.4.1.signature-b64", C),
%% Forcing the Protected header to be A_4_1_JSON_DATA
A_4_1_MAP=#{
<<"signature">> := A_4_1_B64_SIGNATURE_DATA_ALT
} = force_sign(A_4_1_JWK, A_4_1_PAYLOAD_DATA, A_4_1_JSON_DATA, A_4_1_JWS),
ECDSA produces non - matching signatures
true = (A_4_1_B64_SIGNATURE_DATA =/= A_4_1_B64_SIGNATURE_DATA_ALT),
A_4_1_COMPACT_DATA = ?config("a.4.1.compact", C),
{_, A_4_1_COMPACT_DATA} = jose_jws:compact(A_4_1_MAP#{ <<"signature">> => A_4_1_B64_SIGNATURE_DATA }),
A.4.2
{true, A_4_1_PAYLOAD_DATA, A_4_1_JWS} = jose_jws:verify(A_4_1_JWK, A_4_1_MAP),
{true, A_4_1_PAYLOAD_DATA, A_4_1_JWS} = jose_jws:verify(A_4_1_JWK, A_4_1_COMPACT_DATA),
%% Sign and Verify
{true, A_4_1_PAYLOAD_DATA, A_4_1_JWS} = jose_jwk:verify(jose_jwk:sign(A_4_1_PAYLOAD_DATA, A_4_1_JWS, A_4_1_JWK), A_4_1_JWK),
ok.
JSON Web Signature ( )
% Appendix A.5. Example Unsecured JWS
% #appendix-A.5
jws_a_5(Config) ->
C = ?config(jws_a_5, Config),
% A.5
A_5_JSON_DATA = ?config("a.5.jws+json", C),
A_5_JSON = jose:decode(A_5_JSON_DATA),
A_5_JWS = jose_jws:from_file(data_file("jws/a.5.jws+json", Config)),
{_, A_5_JSON} = jose_jws:to_map(A_5_JWS),
A_5_B64_DATA = ?config("a.5.b64", C),
A_5_B64_DATA = jose_jwa_base64url:encode(A_5_JSON_DATA),
A_5_PAYLOAD_DATA = ?config("a.5.payload", C),
A_5_B64_PAYLOAD_DATA = ?config("a.5.payload-b64", C),
A_5_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_5_PAYLOAD_DATA),
A_5_SIGNING_INPUT_DATA = ?config("a.5.signing-input", C),
A_5_SIGNING_INPUT_DATA = << A_5_B64_DATA/binary, $., A_5_B64_PAYLOAD_DATA/binary >>,
Forcing the Protected header to be A_5_JSON_DATA
A_5_MAP=#{
<<"signature">> := <<>>
} = force_sign(none, A_5_PAYLOAD_DATA, A_5_JSON_DATA, A_5_JWS),
A_5_COMPACT_DATA = ?config("a.5.compact", C),
{_, A_5_COMPACT_DATA} = jose_jws:compact(A_5_MAP),
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(none, A_5_MAP),
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(none, A_5_COMPACT_DATA),
%% Sign and Verify
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(none, jose_jws:sign(none, A_5_PAYLOAD_DATA, A_5_JWS)),
ok.
Examples of Protecting Content Using JSON Object Signing and Encryption ( JOSE )
5.9 . Compressed Content
% #section-5.9
rfc7520_5_9(Config) ->
C = ?config(rfc7520_5_9, Config),
5.9.1
V_5_9_1_PLAIN_TEXT = ?config("figure.72", C),
V_5_9_1_JWK = jose_jwk:from_binary(?config("figure.151", C)),
5.9.2
V_5_9_2_COMPRESSED_PLAIN_TEXT = ?config("figure.162", C),
V_5_9_1_PLAIN_TEXT = jose_jwe_zip:uncompress(jose_jwa_base64url:decode(V_5_9_2_COMPRESSED_PLAIN_TEXT), zlib),
V_5_9_2_COMPRESSED_PLAIN_TEXT = jose_jwa_base64url:encode(jose_jwe_zip:compress(V_5_9_1_PLAIN_TEXT, zlib)),
V_5_9_2_CEK = ?config("figure.163", C),
V_5_9_2_IV = ?config("figure.164", C),
5.9.3
V_5_9_3_ENCRYPTED_KEY = ?config("figure.165", C),
{ALG, _} = jose_jwe_alg_aes_kw:from_map(#{<<"alg">> => <<"A128KW">>}),
V_5_9_3_ENCRYPTED_KEY = jose_jwa_base64url:encode(element(1, jose_jwe_alg_aes_kw:key_encrypt(V_5_9_1_JWK, jose_jwa_base64url:decode(V_5_9_2_CEK), ALG))),
V_5_9_2_CEK = jose_jwa_base64url:encode(jose_jwe_alg_aes_kw:key_decrypt(V_5_9_1_JWK, {undefined, undefined, jose_jwa_base64url:decode(V_5_9_3_ENCRYPTED_KEY)}, ALG)),
5.9.4
V_5_9_4_JWE = jose_jwe:from_binary(?config("figure.166", C)),
V_5_9_4_JWE_PROTECTED = ?config("figure.167", C),
V_5_9_4_JWE = jose_jwe:from_binary(jose_jwa_base64url:decode(V_5_9_4_JWE_PROTECTED)),
V_5_9_4_CIPHER_TEXT = ?config("figure.168", C),
V_5_9_4_CIPHER_TAG = ?config("figure.169", C),
% 5.9.5
V_5_9_5_JWE_COMPACT = ?config("figure.170", C),
V_5_9_5_JWE_MAP = jose:decode(?config("figure.172", C)),
V_5_9_4_CIPHER_TEXT = maps:get(<<"ciphertext">>, V_5_9_5_JWE_MAP),
V_5_9_4_CIPHER_TAG = maps:get(<<"tag">>, V_5_9_5_JWE_MAP),
{V_5_9_1_PLAIN_TEXT, V_5_9_4_JWE} = jose_jwe:block_decrypt(V_5_9_1_JWK, V_5_9_5_JWE_COMPACT),
{V_5_9_1_PLAIN_TEXT, V_5_9_4_JWE} = jose_jwe:block_decrypt(V_5_9_1_JWK, V_5_9_5_JWE_MAP),
Roundtrip test
{_, CIPHER_TEXT} = jose_jwe:compact(jose_jwe:block_encrypt(V_5_9_1_JWK, V_5_9_1_PLAIN_TEXT, jose_jwa_base64url:decode(V_5_9_2_CEK), jose_jwa_base64url:decode(V_5_9_2_IV), V_5_9_4_JWE)),
{V_5_9_1_PLAIN_TEXT, V_5_9_4_JWE} = jose_jwe:block_decrypt(V_5_9_1_JWK, CIPHER_TEXT),
ok.
%%%-------------------------------------------------------------------
Internal functions
%%%-------------------------------------------------------------------
@private
force_block_encrypt(Key, PlainText, CEK, IV, OverrideProtected, JWE=#jose_jwe{alg={ALGModule, ALG}, enc={ENCModule, ENC}}) ->
{EncryptedKey, _} = ALGModule:key_encrypt(Key, CEK, ALG),
Protected = jose_jwa_base64url:encode(OverrideProtected),
{CipherText, CipherTag} = ENCModule:block_encrypt({Protected, maybe_compress(PlainText, JWE)}, CEK, IV, ENC),
#{
<<"protected">> => Protected,
<<"encrypted_key">> => jose_jwa_base64url:encode(EncryptedKey),
<<"iv">> => jose_jwa_base64url:encode(IV),
<<"ciphertext">> => jose_jwa_base64url:encode(CipherText),
<<"tag">> => jose_jwa_base64url:encode(CipherTag)
}.
@private
force_sign(Key, PlainText, OverrideProtected, #jose_jws{alg={ALGModule, ALG}}) ->
Protected = jose_jwa_base64url:encode(OverrideProtected),
Payload = jose_jwa_base64url:encode(PlainText),
Message = << Protected/binary, $., Payload/binary >>,
Signature = jose_jwa_base64url:encode(ALGModule:sign(Key, Message, ALG)),
#{
<<"payload">> => Payload,
<<"protected">> => Protected,
<<"signature">> => Signature
}.
@private
data_file(File, Config) ->
filename:join([?config(data_dir, Config), File]).
@private
maybe_compress(PlainText, #jose_jwe{zip={Module, ZIP}}) ->
Module:compress(PlainText, ZIP);
maybe_compress(PlainText, _) ->
PlainText.
| null | https://raw.githubusercontent.com/potatosalad/erlang-jose/291dbb86fb5e5c71a8395b3f68c16f31f8bd06db/test/jose_SUITE.erl | erlang | vim: ts=4 sw=4 ft=erlang noet
ct.
Tests.
====================================================================
Tests
====================================================================
A.1. Ed25519 private key
[-ietf-jose-cfrg-curves-00#appendix-A.1]
A.1
A_1_SK = << A_1_Secret/binary, A_1_PK/binary >>,
A.2. Ed25519 public key
[-ietf-jose-cfrg-curves-00#appendix-A.2]
A.1
A.2
[-ietf-jose-cfrg-curves-00#appendix-A.3]
A.1
A.2
A.3
A.4. Ed25519 Signing
[-ietf-jose-cfrg-curves-00#appendix-A.4]
A.1
A.4
Forcing the Protected header to be A_4_PROTECTED
A.5. Ed25519 Validation
[-ietf-jose-cfrg-curves-00#appendix-A.5]
A.1
A.2
A.4
[-ietf-jose-cfrg-curves-00#appendix-A.6]
A.6
[-ietf-jose-cfrg-curves-00#appendix-A.7]
A.7
[-madden-jose-ecdh-1pu-04#appendix-A]
JSON Web Encryption (JWE)
[#appendix-A.1]
A.1
A.1.1
A.1.2
A.1.3
A.1.4
A.1.5
A.1.7
JSON Web Encryption (JWE)
[#appendix-A.2]
A.2
A.2.1
A.2.2
A.2.3
A.2.4
A.2.5
A.2.6
A.2.7
JSON Web Encryption (JWE)
[#appendix-A.3]
A.3
A.3.1
A.3.2
A.3.4
A.3.5
A.3.6
A.3.7
Appendix C. Example Encrypted RSA Private Key
[#appendix-C]
C.1
C.2
C.3
C.5
C.6
C.7
C.8
Make sure decryption also works
[#appendix-A.1]
A.1.1
A.1.2
Sign and Verify
[#appendix-A.2]
A.2.1
Forcing the Protected header to be A_2_1_JSON_DATA
A.2.2
Sign and Verify
#appendix-A.3
A.3.1
Forcing the Protected header to be A_3_1_JSON_DATA
A.3.2
Sign and Verify
#appendix-A.4
A.4.1
Forcing the Protected header to be A_4_1_JSON_DATA
Sign and Verify
Appendix A.5. Example Unsecured JWS
#appendix-A.5
A.5
Sign and Verify
#section-5.9
5.9.5
-------------------------------------------------------------------
------------------------------------------------------------------- | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
-module(jose_SUITE).
-include_lib("common_test/include/ct.hrl").
-include("jose_public_key.hrl").
-include_lib("public_key/include/public_key.hrl").
-include("jose.hrl").
-export([all/0]).
-export([groups/0]).
-export([init_per_suite/1]).
-export([end_per_suite/1]).
-export([init_per_group/2]).
-export([end_per_group/2]).
-export([jose_cfrg_curves_a_1/1]).
-export([jose_cfrg_curves_a_2/1]).
-export([jose_cfrg_curves_a_3/1]).
-export([jose_cfrg_curves_a_4/1]).
-export([jose_cfrg_curves_a_5/1]).
-export([jose_cfrg_curves_a_6/1]).
-export([jose_cfrg_curves_a_7/1]).
-export([jose_ecdh_1pu_a/1]).
-export([jwe_a_1/1]).
-export([jwe_a_2/1]).
-export([jwe_a_3/1]).
-export([jwk_c/1]).
-export([jwk_rsa_multi/1]).
-export([jws_a_1/1]).
-export([jws_a_2/1]).
-export([jws_a_3/1]).
-export([jws_a_4/1]).
-export([jws_a_5/1]).
-export([rfc7520_5_9/1]).
Macros .
-define(tv_ok(T, M, F, A, E),
case erlang:apply(M, F, A) of
E ->
ok;
T ->
ct:fail({{M, F, A}, {expected, E}, {got, T}})
end).
all() ->
[
{group, jose_cfrg_curves},
{group, jose_ecdh_1pu},
{group, jose_jwe},
{group, jose_jwk},
{group, jose_jws},
{group, rfc7520}
].
groups() ->
[
{jose_cfrg_curves, [parallel], [
jose_cfrg_curves_a_1,
jose_cfrg_curves_a_2,
jose_cfrg_curves_a_3,
jose_cfrg_curves_a_4,
jose_cfrg_curves_a_5,
jose_cfrg_curves_a_6,
jose_cfrg_curves_a_7
]},
{jose_ecdh_1pu, [parallel], [
jose_ecdh_1pu_a
]},
{jose_jwe, [parallel], [
jwe_a_1,
jwe_a_2,
jwe_a_3
]},
{jose_jwk, [parallel], [
jwk_c,
jwk_rsa_multi
]},
{jose_jws, [parallel], [
jws_a_1,
jws_a_2,
jws_a_3,
jws_a_4,
jws_a_5
]},
{rfc7520, [parallel], [
rfc7520_5_9
]}
].
init_per_suite(Config) ->
application:set_env(jose, crypto_fallback, true),
application:set_env(jose, unsecured_signing, true),
_ = application:ensure_all_started(jose),
Config.
end_per_suite(_Config) ->
_ = application:stop(jose),
ok.
init_per_group(G=jose_cfrg_curves, Config) ->
{ok, A1} = file:consult(data_file("jose_cfrg_curves/a.1.config", Config)),
{ok, A3} = file:consult(data_file("jose_cfrg_curves/a.3.config", Config)),
{ok, A4} = file:consult(data_file("jose_cfrg_curves/a.4.config", Config)),
{ok, A5} = file:consult(data_file("jose_cfrg_curves/a.5.config", Config)),
{ok, A6} = file:consult(data_file("jose_cfrg_curves/a.6.config", Config)),
{ok, A7} = file:consult(data_file("jose_cfrg_curves/a.7.config", Config)),
[{jose_cfrg_curves_a_1, A1}, {jose_cfrg_curves_a_3, A3},
{jose_cfrg_curves_a_4, A4}, {jose_cfrg_curves_a_5, A5},
{jose_cfrg_curves_a_6, A6}, {jose_cfrg_curves_a_7, A7} | jose_ct:start(G, Config)];
init_per_group(G=jose_ecdh_1pu, Config) ->
{ok, A} = file:consult(data_file("jose_ecdh_1pu/a.config", Config)),
[{jose_ecdh_1pu_a, A} | jose_ct:start(G, Config)];
init_per_group(G=jose_jwe, Config) ->
{ok, A1} = file:consult(data_file("jwe/a.1.config", Config)),
{ok, A2} = file:consult(data_file("jwe/a.2.config", Config)),
{ok, A3} = file:consult(data_file("jwe/a.3.config", Config)),
[{jwe_a_1, A1}, {jwe_a_2, A2}, {jwe_a_3, A3} | jose_ct:start(G, Config)];
init_per_group(G=jose_jwk, Config) ->
{ok, C} = file:consult(data_file("jwk/c.config", Config)),
[{jwk_c, C} | jose_ct:start(G, Config)];
init_per_group(G=jose_jws, Config) ->
{ok, A1} = file:consult(data_file("jws/a.1.config", Config)),
{ok, A2} = file:consult(data_file("jws/a.2.config", Config)),
{ok, A3} = file:consult(data_file("jws/a.3.config", Config)),
{ok, A4} = file:consult(data_file("jws/a.4.config", Config)),
{ok, A5} = file:consult(data_file("jws/a.5.config", Config)),
[{jws_a_1, A1}, {jws_a_2, A2}, {jws_a_3, A3}, {jws_a_4, A4}, {jws_a_5, A5} | jose_ct:start(G, Config)];
init_per_group(G=rfc7520, Config) ->
{ok, V_5_9} = file:consult(data_file("rfc7520/5.9.config", Config)),
[{rfc7520_5_9, V_5_9} | jose_ct:start(G, Config)];
init_per_group(Group, Config) ->
jose_ct:start(Group, Config).
end_per_group(_Group, Config) ->
jose_ct:stop(Config),
ok.
CFRG ECDH and signatures in JOSE
jose_cfrg_curves_a_1(Config) ->
C = ?config(jose_cfrg_curves_a_1, Config),
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
A_1_Secret = hex:hex_to_bin(?config("a.1.secret", C)),
A_1_PK = hex:hex_to_bin(?config("a.1.pk", C)),
{_, #'jose_EdDSA25519PrivateKey'{
publicKey=#'jose_EdDSA25519PublicKey'{publicKey=A_1_PK},
privateKey=A_1_Secret
}} = jose_jwk:to_key(A_1_JWK),
{_, #'jose_EdDSA25519PublicKey'{publicKey=A_1_PK}} = jose_jwk:to_public_key(A_1_JWK),
ok.
CFRG ECDH and signatures in JOSE
jose_cfrg_curves_a_2(Config) ->
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
A_2_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.2.jwk+json", Config)),
A_2_JWK = jose_jwk:to_public(A_1_JWK),
ok.
CFRG ECDH and signatures in JOSE
A.3 . JWK thumbprint canonicalization
jose_cfrg_curves_a_3(Config) ->
C = ?config(jose_cfrg_curves_a_3, Config),
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
A_2_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.2.jwk+json", Config)),
A_3_JWK = jose_jwk:from_binary(?config("a.3.jwk+json", C)),
A_3_THUMBPRINT_HEX = ?config("a.3.thumbprint+hex", C),
A_3_THUMBPRINT = jose_jwa_base64url:encode(hex:hex_to_bin(A_3_THUMBPRINT_HEX)),
A_3_THUMBPRINT = ?config("a.3.thumbprint+b64", C),
A_3_THUMBPRINT = jose_jwk:thumbprint(A_1_JWK),
A_3_THUMBPRINT = jose_jwk:thumbprint(A_2_JWK),
A_3_THUMBPRINT = jose_jwk:thumbprint(A_3_JWK),
ok.
CFRG ECDH and signatures in JOSE
jose_cfrg_curves_a_4(Config) ->
C = ?config(jose_cfrg_curves_a_4, Config),
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
A_4_PROTECTED = ?config("a.4.jws+json", C),
A_4_JWS = jose_jws:from_binary(A_4_PROTECTED),
A_4_JWS_B64 = ?config("a.4.jws+b64", C),
A_4_TXT = ?config("a.4.txt", C),
A_4_TXT_B64 = ?config("a.4.txt+b64", C),
A_4_SIGNINGINPUT = ?config("a.4.signing-input", C),
A_4_SIG = hex:hex_to_bin(?config("a.4.sig+hex", C)),
A_4_SIG_B64 = ?config("a.4.sig+b64", C),
A_4_SIG_COMPACT = ?config("a.4.sig+compact", C),
A_4_TXT_B64 = jose_jwa_base64url:encode(A_4_TXT),
A_4_SIGNINGINPUT = << A_4_JWS_B64/binary, $., A_4_TXT_B64/binary >>,
A_4_SIGNINGINPUT = jose_jws:signing_input(A_4_TXT, A_4_JWS),
A_4_MAP=#{
<<"signature">> := A_4_SIG_B64
} = force_sign(A_1_JWK, A_4_TXT, A_4_PROTECTED, A_4_JWS),
A_4_SIG = jose_jwa_base64url:decode(A_4_SIG_B64),
{_, A_4_SIG_COMPACT} = jose_jws:compact(A_4_MAP),
ok.
CFRG ECDH and signatures in JOSE
jose_cfrg_curves_a_5(Config) ->
C = ?config(jose_cfrg_curves_a_5, Config),
A_1_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.1.jwk+json", Config)),
A_2_JWK = jose_jwk:from_file(data_file("jose_cfrg_curves/a.2.jwk+json", Config)),
A_5_SIG_COMPACT = ?config("a.5.sig+compact", C),
A_5_JWS = jose_jws:from_binary(?config("a.5.jws+json", C)),
A_5_PAYLOAD_DATA = ?config("a.5.txt", C),
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(A_1_JWK, A_5_SIG_COMPACT),
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(A_2_JWK, A_5_SIG_COMPACT),
ok.
CFRG ECDH and signatures in JOSE
A.6 . ECDH - ES with X25519
jose_cfrg_curves_a_6(Config) ->
C = ?config(jose_cfrg_curves_a_6, Config),
A_6_BOB_JWK = jose_jwk:from_binary(?config("a.6.bob-jwk+json", C)),
A_6_BOB_Secret = hex:hex_to_bin(?config("a.6.bob-secret+hex", C)),
A_6_BOB_PK = hex:hex_to_bin(?config("a.6.bob-pk+hex", C)),
A_6_EPK_Secret = hex:hex_to_bin(?config("a.6.epk-secret+hex", C)),
A_6_EPK_PK = hex:hex_to_bin(?config("a.6.epk-pk+hex", C)),
A_6_EPK_JWK = jose_jwk:from_binary(?config("a.6.epk-jwk+json", C)),
A_6_PROTECTED = ?config("a.6.jwe+json", C),
A_6_JWE = jose_jwe:from_binary(A_6_PROTECTED),
A_6_Z = hex:hex_to_bin(?config("a.6.z+hex", C)),
A_6_BOB_SK = << A_6_BOB_Secret/binary, A_6_BOB_PK/binary >>,
A_6_EPK_SK = << A_6_EPK_Secret/binary, A_6_EPK_PK/binary >>,
A_6_BOB_S_JWK = jose_jwk:from_okp({'X25519', A_6_BOB_SK}),
A_6_EPK_S_JWK = jose_jwk:from_okp({'X25519', A_6_EPK_SK}),
{_, #'jose_X25519PrivateKey'{
publicKey=#'jose_X25519PublicKey'{publicKey=A_6_BOB_PK},
privateKey=A_6_BOB_Secret
}} = jose_jwk:to_key(A_6_BOB_S_JWK),
{_, #'jose_X25519PublicKey'{publicKey=A_6_BOB_PK}} = jose_jwk:to_public_key(A_6_BOB_S_JWK),
{_, #'jose_X25519PublicKey'{publicKey=A_6_BOB_PK}} = jose_jwk:to_key(A_6_BOB_JWK),
{_, #'jose_X25519PrivateKey'{
publicKey=#'jose_X25519PublicKey'{publicKey=A_6_EPK_PK},
privateKey=A_6_EPK_Secret
}} = jose_jwk:to_key(A_6_EPK_S_JWK),
{_, #'jose_X25519PublicKey'{publicKey=A_6_EPK_PK}} = jose_jwk:to_public_key(A_6_EPK_S_JWK),
{_, #'jose_X25519PublicKey'{publicKey=A_6_EPK_PK}} = jose_jwk:to_key(A_6_EPK_JWK),
A_6_Z = jose_jwk:shared_secret(A_6_BOB_JWK, A_6_EPK_S_JWK),
A_6_Z = jose_jwk:shared_secret(A_6_EPK_JWK, A_6_BOB_S_JWK),
A_6_TEXT = <<"Example of X25519 encryption">>,
{_, A_6_ENC_MAP} = jose_jwe:block_encrypt({A_6_BOB_JWK, A_6_EPK_S_JWK}, A_6_TEXT, A_6_JWE),
{_, A_6_ENC_COMPACT} = jose_jwe:compact(A_6_ENC_MAP),
{A_6_TEXT, A_6_JWE} = jose_jwe:block_decrypt(A_6_BOB_S_JWK, A_6_ENC_MAP),
{A_6_TEXT, A_6_JWE} = jose_jwe:block_decrypt(A_6_BOB_S_JWK, A_6_ENC_COMPACT),
ok.
CFRG ECDH and signatures in JOSE
A.7 . ECDH - ES with X448
jose_cfrg_curves_a_7(Config) ->
C = ?config(jose_cfrg_curves_a_7, Config),
A_7_BOB_JWK = jose_jwk:from_binary(?config("a.7.bob-jwk+json", C)),
A_7_BOB_Secret = hex:hex_to_bin(?config("a.7.bob-secret+hex", C)),
A_7_BOB_PK = hex:hex_to_bin(?config("a.7.bob-pk+hex", C)),
A_7_EPK_Secret = hex:hex_to_bin(?config("a.7.epk-secret+hex", C)),
A_7_EPK_PK = hex:hex_to_bin(?config("a.7.epk-pk+hex", C)),
A_7_EPK_JWK = jose_jwk:from_binary(?config("a.7.epk-jwk+json", C)),
A_7_PROTECTED = ?config("a.7.jwe+json", C),
A_7_JWE = jose_jwe:from_binary(A_7_PROTECTED),
A_7_Z = hex:hex_to_bin(?config("a.7.z+hex", C)),
A_7_BOB_SK = << A_7_BOB_Secret/binary, A_7_BOB_PK/binary >>,
A_7_EPK_SK = << A_7_EPK_Secret/binary, A_7_EPK_PK/binary >>,
A_7_BOB_S_JWK = jose_jwk:from_okp({'X448', A_7_BOB_SK}),
A_7_EPK_S_JWK = jose_jwk:from_okp({'X448', A_7_EPK_SK}),
{_, #'jose_X448PrivateKey'{
publicKey=#'jose_X448PublicKey'{publicKey=A_7_BOB_PK},
privateKey=A_7_BOB_Secret
}} = jose_jwk:to_key(A_7_BOB_S_JWK),
{_, #'jose_X448PublicKey'{publicKey=A_7_BOB_PK}} = jose_jwk:to_public_key(A_7_BOB_S_JWK),
{_, #'jose_X448PublicKey'{publicKey=A_7_BOB_PK}} = jose_jwk:to_key(A_7_BOB_JWK),
{_, #'jose_X448PrivateKey'{
publicKey=#'jose_X448PublicKey'{publicKey=A_7_EPK_PK},
privateKey=A_7_EPK_Secret
}} = jose_jwk:to_key(A_7_EPK_S_JWK),
{_, #'jose_X448PublicKey'{publicKey=A_7_EPK_PK}} = jose_jwk:to_public_key(A_7_EPK_S_JWK),
{_, #'jose_X448PublicKey'{publicKey=A_7_EPK_PK}} = jose_jwk:to_key(A_7_EPK_JWK),
A_7_Z = jose_jwk:shared_secret(A_7_BOB_JWK, A_7_EPK_S_JWK),
A_7_Z = jose_jwk:shared_secret(A_7_EPK_JWK, A_7_BOB_S_JWK),
A_7_TEXT = <<"Example of X448 encryption">>,
{_, A_7_ENC_MAP} = jose_jwe:block_encrypt({A_7_BOB_JWK, A_7_EPK_S_JWK}, A_7_TEXT, A_7_JWE),
{_, A_7_ENC_COMPACT} = jose_jwe:compact(A_7_ENC_MAP),
{A_7_TEXT, A_7_JWE} = jose_jwe:block_decrypt(A_7_BOB_S_JWK, A_7_ENC_MAP),
{A_7_TEXT, A_7_JWE} = jose_jwe:block_decrypt(A_7_BOB_S_JWK, A_7_ENC_COMPACT),
ok.
Public Key Authenticated Encryption for JOSE : ECDH-1PU
A. Example ECDH-1PU Key Agreement Computation with A256GCM
jose_ecdh_1pu_a(Config) ->
C = ?config(jose_ecdh_1pu_a, Config),
A_USSK_JWK = jose_jwk:from_binary(?config("a.ussk.jwk+json", C)),
A_VSSK_JWK = jose_jwk:from_binary(?config("a.vssk.jwk+json", C)),
A_UESK_JWK = jose_jwk:from_binary(?config("a.uesk.jwk+json", C)),
A_JWE = jose_jwe:from_binary(?config("a.jwe+json", C)),
A_ZE = hex:hex_to_bin(?config("a.ze+hex", C)),
A_ZS = hex:hex_to_bin(?config("a.zs+hex", C)),
A_Z = hex:hex_to_bin(?config("a.z+hex", C)),
A_CEK = hex:hex_to_bin(?config("a.cek+hex", C)),
A_ZE = jose_jwk:shared_secret(A_VSSK_JWK, A_UESK_JWK),
A_ZS = jose_jwk:shared_secret(A_VSSK_JWK, A_USSK_JWK),
A_ZS = jose_jwk:shared_secret(A_USSK_JWK, A_VSSK_JWK),
A_Z = <<A_ZE/binary, A_ZS/binary>>,
{A_CEK, _} = jose_jwe:next_cek({A_VSSK_JWK, A_USSK_JWK, A_UESK_JWK}, A_JWE),
A_CEK = jose_jwe:key_decrypt({A_USSK_JWK, A_VSSK_JWK, A_UESK_JWK}, <<>>, A_JWE),
ok.
A.1 . Example using RSAES - OAEP and AES GCM
jwe_a_1(Config) ->
C = ?config(jwe_a_1, Config),
A_1_TXT = ?config("a.1.txt", C),
A_1_1_JWE_DATA = ?config("a.1.1.jwe+json", C),
A_1_1_JWE_MAP = jose:decode(A_1_1_JWE_DATA),
A_1_1_JWE = jose_jwe:from_binary(A_1_1_JWE_DATA),
{_, A_1_1_JWE_MAP} = jose_jwe:to_map(A_1_1_JWE),
A_1_1_JWE_DATA_B64 = ?config("a.1.1.jwe+json.b64", C),
A_1_1_JWE_DATA_B64 = jose_jwa_base64url:encode(element(2, jose_jwe:to_binary(A_1_1_JWE))),
A_1_2_CEK = ?config("a.1.2.cek", C),
A_1_3_JWK_DATA = ?config("a.1.3.jwk+json", C),
A_1_3_JWK_MAP = jose:decode(A_1_3_JWK_DATA),
A_1_3_JWK = jose_jwk:from_binary(A_1_3_JWK_DATA),
{_, A_1_3_JWK_MAP} = jose_jwk:to_map(A_1_3_JWK),
A_1_3_CEK_ENCRYPTED = ?config("a.1.3.cek.encrypted", C),
A_1_3_CEK_ENCRYPTED_B64 = ?config("a.1.3.cek.encrypted.b64", C),
A_1_3_CEK_ENCRYPTED_B64 = jose_jwa_base64url:encode(A_1_3_CEK_ENCRYPTED),
A_1_4_IV = ?config("a.1.4.iv", C),
A_1_4_IV_B64 = ?config("a.1.4.iv.b64", C),
A_1_4_IV_B64 = jose_jwa_base64url:encode(A_1_4_IV),
A_1_5_AAD = ?config("a.1.5.aad", C),
A_1_1_JWE_DATA_B64 = A_1_5_AAD,
A.1.6
A_1_6_CIPHER = ?config("a.1.6.txt.cipher", C),
A_1_6_TAG = ?config("a.1.6.txt.tag", C),
A_1_6_CIPHER_B64 = ?config("a.1.6.txt.cipher.b64", C),
A_1_6_TAG_B64 = ?config("a.1.6.txt.tag.b64", C),
A_1_6_CIPHER = jose_jwa_base64url:decode(A_1_6_CIPHER_B64),
A_1_6_TAG = jose_jwa_base64url:decode(A_1_6_TAG_B64),
A_1_7_COMPACT = ?config("a.1.7.jwe+compact", C),
{A_1_TXT, A_1_1_JWE} = jose_jwe:block_decrypt(A_1_3_JWK, A_1_7_COMPACT),
Roundtrip test
A_1_7_MAP = jose_jwe:block_encrypt(A_1_3_JWK, A_1_TXT, A_1_2_CEK, A_1_4_IV, A_1_1_JWE),
{A_1_TXT, A_1_1_JWE} = jose_jwe:block_decrypt(A_1_3_JWK, A_1_7_MAP),
ok.
A.2 . Example using RSAES - PKCS1 - v1_5 and AES_128_CBC_HMAC_SHA_256
jwe_a_2(Config) ->
C = ?config(jwe_a_2, Config),
A_2_TXT = ?config("a.2.txt", C),
A_2_1_JWE_DATA = ?config("a.2.1.jwe+json", C),
A_2_1_JWE_MAP = jose:decode(A_2_1_JWE_DATA),
A_2_1_JWE = jose_jwe:from_binary(A_2_1_JWE_DATA),
{_, A_2_1_JWE_MAP} = jose_jwe:to_map(A_2_1_JWE),
A_2_1_JWE_DATA_B64 = ?config("a.2.1.jwe+json.b64", C),
A_2_1_JWE_DATA_B64 = jose_jwa_base64url:encode(element(2, jose_jwe:to_binary(A_2_1_JWE))),
A_2_2_CEK = ?config("a.2.2.cek", C),
A_2_3_JWK_DATA = ?config("a.2.3.jwk+json", C),
A_2_3_JWK_MAP = jose:decode(A_2_3_JWK_DATA),
A_2_3_JWK = jose_jwk:from_binary(A_2_3_JWK_DATA),
{_, A_2_3_JWK_MAP} = jose_jwk:to_map(A_2_3_JWK),
A_2_3_CEK_ENCRYPTED = ?config("a.2.3.cek.encrypted", C),
A_2_3_CEK_ENCRYPTED_B64 = ?config("a.2.3.cek.encrypted.b64", C),
A_2_3_CEK_ENCRYPTED_B64 = jose_jwa_base64url:encode(A_2_3_CEK_ENCRYPTED),
A_2_4_IV = ?config("a.2.4.iv", C),
A_2_4_IV_B64 = ?config("a.2.4.iv.b64", C),
A_2_4_IV_B64 = jose_jwa_base64url:encode(A_2_4_IV),
A_2_5_AAD = ?config("a.2.5.aad", C),
A_2_1_JWE_DATA_B64 = A_2_5_AAD,
A_2_6_CIPHER = ?config("a.2.6.txt.cipher", C),
A_2_6_TAG = ?config("a.2.6.txt.tag", C),
A_2_6_CIPHER_B64 = ?config("a.2.6.txt.cipher.b64", C),
A_2_6_TAG_B64 = ?config("a.2.6.txt.tag.b64", C),
A_2_6_CIPHER = jose_jwa_base64url:decode(A_2_6_CIPHER_B64),
A_2_6_TAG = jose_jwa_base64url:decode(A_2_6_TAG_B64),
A_2_7_COMPACT = ?config("a.2.7.jwe+compact", C),
{A_2_TXT, A_2_1_JWE} = jose_jwe:block_decrypt(A_2_3_JWK, A_2_7_COMPACT),
Roundtrip test
A_2_7_MAP = jose_jwe:block_encrypt(A_2_3_JWK, A_2_TXT, A_2_2_CEK, A_2_4_IV, A_2_1_JWE),
{A_2_TXT, A_2_1_JWE} = jose_jwe:block_decrypt(A_2_3_JWK, A_2_7_MAP),
ok.
A.3 . Example Using AES Key Wrap and AES_128_CBC_HMAC_SHA_256
jwe_a_3(Config) ->
C = ?config(jwe_a_3, Config),
A_3_TXT = ?config("a.3.txt", C),
A_3_1_JWE_DATA = ?config("a.3.1.jwe+json", C),
A_3_1_JWE_MAP = jose:decode(A_3_1_JWE_DATA),
A_3_1_JWE = jose_jwe:from_binary(A_3_1_JWE_DATA),
{_, A_3_1_JWE_MAP} = jose_jwe:to_map(A_3_1_JWE),
A_3_1_JWE_DATA_B64 = ?config("a.3.1.jwe+json.b64", C),
A_3_1_JWE_DATA_B64 = jose_jwa_base64url:encode(element(2, jose_jwe:to_binary(A_3_1_JWE))),
A_3_2_CEK = ?config("a.3.2.cek", C),
A.3.3
A_3_3_JWK_DATA = ?config("a.3.3.jwk+json", C),
A_3_3_JWK_MAP = jose:decode(A_3_3_JWK_DATA),
A_3_3_JWK = jose_jwk:from_binary(A_3_3_JWK_DATA),
{_, A_3_3_JWK_MAP} = jose_jwk:to_map(A_3_3_JWK),
A_3_3_CEK_ENCRYPTED = ?config("a.3.3.cek.encrypted", C),
A_3_3_CEK_ENCRYPTED_B64 = ?config("a.3.3.cek.encrypted.b64", C),
A_3_3_CEK_ENCRYPTED_B64 = jose_jwa_base64url:encode(A_3_3_CEK_ENCRYPTED),
A_3_4_IV = ?config("a.3.4.iv", C),
A_3_4_IV_B64 = ?config("a.3.4.iv.b64", C),
A_3_4_IV_B64 = jose_jwa_base64url:encode(A_3_4_IV),
A_3_5_AAD = ?config("a.3.5.aad", C),
A_3_1_JWE_DATA_B64 = A_3_5_AAD,
A_3_6_CIPHER = ?config("a.3.6.txt.cipher", C),
A_3_6_TAG = ?config("a.3.6.txt.tag", C),
A_3_6_CIPHER_B64 = ?config("a.3.6.txt.cipher.b64", C),
A_3_6_TAG_B64 = ?config("a.3.6.txt.tag.b64", C),
A_3_6_CIPHER = jose_jwa_base64url:decode(A_3_6_CIPHER_B64),
A_3_6_TAG = jose_jwa_base64url:decode(A_3_6_TAG_B64),
A_3_7_COMPACT = ?config("a.3.7.jwe+compact", C),
{A_3_TXT, A_3_1_JWE} = jose_jwe:block_decrypt(A_3_3_JWK, A_3_7_COMPACT),
Roundtrip test
A_3_7_MAP = jose_jwe:block_encrypt(A_3_3_JWK, A_3_TXT, A_3_2_CEK, A_3_4_IV, A_3_1_JWE),
{A_3_TXT, A_3_1_JWE} = jose_jwe:block_decrypt(A_3_3_JWK, A_3_7_MAP),
ok.
JSON Web Key ( JWK )
jwk_c(Config) ->
C = ?config(jwk_c, Config),
C_1_JSON_DATA = ?config("c.1.jwk+json", C),
C_1_JSON = jose:decode(C_1_JSON_DATA),
C_1_JWK = jose_jwk:from_file(data_file("jwk/c.1.jwk+json", Config)),
{_, C_1_JSON} = jose_jwk:to_map(C_1_JWK),
C_2_JSON_DATA = ?config("c.2.jwe+json", C),
C_2_JSON = jose:decode(C_2_JSON_DATA),
C_2_JWE = jose_jwe:from_file(data_file("jwk/c.2.jwe+json", Config)),
{_, C_2_JSON} = jose_jwe:to_map(C_2_JWE),
C_2_B64_DATA = ?config("c.2.b64", C),
C_2_B64_DATA = jose_jwa_base64url:encode(C_2_JSON_DATA),
C_3_CEK = ?config("c.3.cek", C),
C.4
C_4_TXT = ?config("c.4.txt", C),
C_4_SALT = ?config("c.4.salt", C),
C_4_SALT = << (maps:get(<<"alg">>, C_2_JSON))/binary, 0, (jose_jwa_base64url:decode(maps:get(<<"p2s">>, C_2_JSON)))/binary >>,
C_4_DKEY = ?config("c.4.derivedkey", C),
{ok, C_4_DKEY} = jose_jwa_pkcs5:pbkdf2({hmac, sha256}, C_4_TXT, C_4_SALT, maps:get(<<"p2c">>, C_2_JSON), 16),
C_5_EKEY = ?config("c.5.encryptedkey", C),
{C_5_EKEY, _} = jose_jwe:key_encrypt(C_4_TXT, C_3_CEK, C_2_JWE),
C_6_IV = ?config("c.6.iv", C),
C_7_AAD = ?config("c.7.aad", C),
C_7_AAD = C_2_JSON_DATA,
C_8_CIPHER_TXT = ?config("c.8.ciphertxt", C),
C_8_CIPHER_TAG = ?config("c.8.ciphertag", C),
Forcing the AAD data to be C_7_AAD
C_8_ENC_MAP=#{
<<"ciphertext">> := C_8_CIPHER_TXT_B64,
<<"tag">> := C_8_CIPHER_TAG_B64
} = force_block_encrypt(C_4_TXT, C_1_JSON_DATA, C_3_CEK, C_6_IV, C_7_AAD, C_2_JWE),
C_8_CIPHER_TXT = jose_jwa_base64url:decode(C_8_CIPHER_TXT_B64),
C_8_CIPHER_TAG = jose_jwa_base64url:decode(C_8_CIPHER_TAG_B64),
C.9
C_9_DATA = ?config("c.9.jwe+txt", C),
{_, C_9_DATA} = jose_jwe:compact(C_8_ENC_MAP),
{C_1_JSON_DATA, _} = jose_jwe:block_decrypt(C_4_TXT, C_9_DATA),
Encrypt and
{_, C_1_JWK} = jose_jwk:from_map(C_4_TXT, jose_jwk:to_map(C_4_TXT, C_2_JWE, C_1_JWK)),
ok.
jwk_rsa_multi(Config) ->
JWK = jose_jwk:from_pem_file(data_file("rsa-multi.pem", Config)),
PlainText = <<"I've Got a Lovely Bunch of Coconuts">>,
Encrypted = jose_jwk:block_encrypt(PlainText, JWK),
CompactEncrypted = jose_jwe:compact(Encrypted),
{PlainText, _} = jose_jwk:block_decrypt(Encrypted, JWK),
{PlainText, _} = jose_jwk:block_decrypt(CompactEncrypted, JWK),
Message = <<"Secret Message">>,
Signed = jose_jwk:sign(Message, JWK),
CompactSigned = jose_jws:compact(Signed),
{true, Message, _} = jose_jwk:verify(Signed, JWK),
{true, Message, _} = jose_jwk:verify(CompactSigned, JWK),
{_, Map} = jose_jwk:to_map(JWK),
JWK = jose_jwk:from_map(Map),
Password = <<"My Passphrase">>,
PEM = element(2, jose_jwk:to_pem(JWK)),
EncryptedPEM = element(2, jose_jwk:to_pem(Password, JWK)),
JWK = jose_jwk:from_pem(PEM),
JWK = jose_jwk:from_pem(Password, EncryptedPEM),
JWK = jose_jwk:from_pem(jose_jwk:to_pem(JWK)),
JWK = jose_jwk:from_pem(Password, jose_jwk:to_pem(Password, JWK)),
{_, JWK} = jose_jwk:from_binary(Password, jose_jwk:to_binary(Password, JWK)),
{_, JWK} = jose_jwk:from_binary(Password, jose_jwe:compact(jose_jwk:to_map(Password, JWK))),
ok.
JSON Web Signature ( )
Appendix A.1 . Example Using HMAC SHA-256
jws_a_1(Config) ->
C = ?config(jws_a_1, Config),
A_1_1_JSON_DATA = ?config("a.1.1.jws+json", C),
A_1_1_JSON = jose:decode(A_1_1_JSON_DATA),
A_1_1_JWS = jose_jws:from_file(data_file("jws/a.1.1.jws+json", Config)),
{_, A_1_1_JSON} = jose_jws:to_map(A_1_1_JWS),
A_1_1_B64_DATA = ?config("a.1.1.b64", C),
A_1_1_B64_DATA = jose_jwa_base64url:encode(A_1_1_JSON_DATA),
A_1_1_PAYLOAD_DATA = ?config("a.1.1.payload", C),
A_1_1_B64_PAYLOAD_DATA = ?config("a.1.1.payload-b64", C),
A_1_1_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_1_1_PAYLOAD_DATA),
A_1_1_SIGNING_INPUT_DATA = ?config("a.1.1.signing-input", C),
A_1_1_SIGNING_INPUT_DATA = << A_1_1_B64_DATA/binary, $., A_1_1_B64_PAYLOAD_DATA/binary >>,
A_1_1_JWK = jose_jwk:from_file(data_file("jws/a.1.1.jwk+json", Config)),
A_1_1_B64_SIGNATURE_DATA = ?config("a.1.1.signature-b64", C),
Forcing the Protected header to be A_1_1_JSON_DATA
A_1_1_MAP=#{
<<"signature">> := A_1_1_B64_SIGNATURE_DATA
} = force_sign(A_1_1_JWK, A_1_1_PAYLOAD_DATA, A_1_1_JSON_DATA, A_1_1_JWS),
A_1_1_COMPACT_DATA = ?config("a.1.1.compact", C),
{_, A_1_1_COMPACT_DATA} = jose_jws:compact(A_1_1_MAP),
{true, A_1_1_PAYLOAD_DATA, A_1_1_JWS} = jose_jws:verify(A_1_1_JWK, A_1_1_MAP),
{true, A_1_1_PAYLOAD_DATA, A_1_1_JWS} = jose_jws:verify(A_1_1_JWK, A_1_1_COMPACT_DATA),
{true, A_1_1_PAYLOAD_DATA, A_1_1_JWS} = jose_jwk:verify(jose_jwk:sign(A_1_1_PAYLOAD_DATA, A_1_1_JWS, A_1_1_JWK), A_1_1_JWK),
ok.
JSON Web Signature ( )
Appendix A.2 . Example Using RSASSA - PKCS1 - v1_5 SHA-256
jws_a_2(Config) ->
C = ?config(jws_a_2, Config),
A_2_1_JSON_DATA = ?config("a.2.1.jws+json", C),
A_2_1_JSON = jose:decode(A_2_1_JSON_DATA),
A_2_1_JWS = jose_jws:from_file(data_file("jws/a.2.1.jws+json", Config)),
{_, A_2_1_JSON} = jose_jws:to_map(A_2_1_JWS),
A_2_1_B64_DATA = ?config("a.2.1.b64", C),
A_2_1_B64_DATA = jose_jwa_base64url:encode(A_2_1_JSON_DATA),
A_2_1_PAYLOAD_DATA = ?config("a.2.1.payload", C),
A_2_1_B64_PAYLOAD_DATA = ?config("a.2.1.payload-b64", C),
A_2_1_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_2_1_PAYLOAD_DATA),
A_2_1_SIGNING_INPUT_DATA = ?config("a.2.1.signing-input", C),
A_2_1_SIGNING_INPUT_DATA = << A_2_1_B64_DATA/binary, $., A_2_1_B64_PAYLOAD_DATA/binary >>,
A_2_1_JWK = jose_jwk:from_file(data_file("jws/a.2.1.jwk+json", Config)),
A_2_1_B64_SIGNATURE_DATA = ?config("a.2.1.signature-b64", C),
A_2_1_MAP=#{
<<"signature">> := A_2_1_B64_SIGNATURE_DATA
} = force_sign(A_2_1_JWK, A_2_1_PAYLOAD_DATA, A_2_1_JSON_DATA, A_2_1_JWS),
A_2_1_COMPACT_DATA = ?config("a.2.1.compact", C),
{_, A_2_1_COMPACT_DATA} = jose_jws:compact(A_2_1_MAP),
{true, A_2_1_PAYLOAD_DATA, A_2_1_JWS} = jose_jws:verify(A_2_1_JWK, A_2_1_MAP),
{true, A_2_1_PAYLOAD_DATA, A_2_1_JWS} = jose_jws:verify(A_2_1_JWK, A_2_1_COMPACT_DATA),
{true, A_2_1_PAYLOAD_DATA, A_2_1_JWS} = jose_jwk:verify(jose_jwk:sign(A_2_1_PAYLOAD_DATA, A_2_1_JWS, A_2_1_JWK), A_2_1_JWK),
ok.
JSON Web Signature ( )
Appendix A.3 . Example Using P-256 SHA-256
jws_a_3(Config) ->
C = ?config(jws_a_3, Config),
A_3_1_JSON_DATA = ?config("a.3.1.jws+json", C),
A_3_1_JSON = jose:decode(A_3_1_JSON_DATA),
A_3_1_JWS = jose_jws:from_file(data_file("jws/a.3.1.jws+json", Config)),
{_, A_3_1_JSON} = jose_jws:to_map(A_3_1_JWS),
A_3_1_B64_DATA = ?config("a.3.1.b64", C),
A_3_1_B64_DATA = jose_jwa_base64url:encode(A_3_1_JSON_DATA),
A_3_1_PAYLOAD_DATA = ?config("a.3.1.payload", C),
A_3_1_B64_PAYLOAD_DATA = ?config("a.3.1.payload-b64", C),
A_3_1_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_3_1_PAYLOAD_DATA),
A_3_1_SIGNING_INPUT_DATA = ?config("a.3.1.signing-input", C),
A_3_1_SIGNING_INPUT_DATA = << A_3_1_B64_DATA/binary, $., A_3_1_B64_PAYLOAD_DATA/binary >>,
A_3_1_JWK = jose_jwk:from_file(data_file("jws/a.3.1.jwk+json", Config)),
A_3_1_B64_SIGNATURE_DATA = ?config("a.3.1.signature-b64", C),
A_3_1_MAP=#{
<<"signature">> := A_3_1_B64_SIGNATURE_DATA_ALT
} = force_sign(A_3_1_JWK, A_3_1_PAYLOAD_DATA, A_3_1_JSON_DATA, A_3_1_JWS),
ECDSA produces non - matching signatures
true = (A_3_1_B64_SIGNATURE_DATA =/= A_3_1_B64_SIGNATURE_DATA_ALT),
A_3_1_COMPACT_DATA = ?config("a.3.1.compact", C),
{_, A_3_1_COMPACT_DATA} = jose_jws:compact(A_3_1_MAP#{ <<"signature">> => A_3_1_B64_SIGNATURE_DATA }),
{true, A_3_1_PAYLOAD_DATA, A_3_1_JWS} = jose_jws:verify(A_3_1_JWK, A_3_1_MAP),
{true, A_3_1_PAYLOAD_DATA, A_3_1_JWS} = jose_jws:verify(A_3_1_JWK, A_3_1_COMPACT_DATA),
{true, A_3_1_PAYLOAD_DATA, A_3_1_JWS} = jose_jwk:verify(jose_jwk:sign(A_3_1_PAYLOAD_DATA, A_3_1_JWS, A_3_1_JWK), A_3_1_JWK),
ok.
JSON Web Signature ( )
Appendix A.4 . Example Using ECDSA P-521 SHA-512
jws_a_4(Config) ->
C = ?config(jws_a_4, Config),
A_4_1_JSON_DATA = ?config("a.4.1.jws+json", C),
A_4_1_JSON = jose:decode(A_4_1_JSON_DATA),
A_4_1_JWS = jose_jws:from_file(data_file("jws/a.4.1.jws+json", Config)),
{_, A_4_1_JSON} = jose_jws:to_map(A_4_1_JWS),
A_4_1_B64_DATA = ?config("a.4.1.b64", C),
A_4_1_B64_DATA = jose_jwa_base64url:encode(A_4_1_JSON_DATA),
A_4_1_PAYLOAD_DATA = ?config("a.4.1.payload", C),
A_4_1_B64_PAYLOAD_DATA = ?config("a.4.1.payload-b64", C),
A_4_1_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_4_1_PAYLOAD_DATA),
A_4_1_SIGNING_INPUT_DATA = ?config("a.4.1.signing-input", C),
A_4_1_SIGNING_INPUT_DATA = << A_4_1_B64_DATA/binary, $., A_4_1_B64_PAYLOAD_DATA/binary >>,
A_4_1_JWK = jose_jwk:from_file(data_file("jws/a.4.1.jwk+json", Config)),
A_4_1_B64_SIGNATURE_DATA = ?config("a.4.1.signature-b64", C),
A_4_1_MAP=#{
<<"signature">> := A_4_1_B64_SIGNATURE_DATA_ALT
} = force_sign(A_4_1_JWK, A_4_1_PAYLOAD_DATA, A_4_1_JSON_DATA, A_4_1_JWS),
ECDSA produces non - matching signatures
true = (A_4_1_B64_SIGNATURE_DATA =/= A_4_1_B64_SIGNATURE_DATA_ALT),
A_4_1_COMPACT_DATA = ?config("a.4.1.compact", C),
{_, A_4_1_COMPACT_DATA} = jose_jws:compact(A_4_1_MAP#{ <<"signature">> => A_4_1_B64_SIGNATURE_DATA }),
A.4.2
{true, A_4_1_PAYLOAD_DATA, A_4_1_JWS} = jose_jws:verify(A_4_1_JWK, A_4_1_MAP),
{true, A_4_1_PAYLOAD_DATA, A_4_1_JWS} = jose_jws:verify(A_4_1_JWK, A_4_1_COMPACT_DATA),
{true, A_4_1_PAYLOAD_DATA, A_4_1_JWS} = jose_jwk:verify(jose_jwk:sign(A_4_1_PAYLOAD_DATA, A_4_1_JWS, A_4_1_JWK), A_4_1_JWK),
ok.
JSON Web Signature ( )
jws_a_5(Config) ->
C = ?config(jws_a_5, Config),
A_5_JSON_DATA = ?config("a.5.jws+json", C),
A_5_JSON = jose:decode(A_5_JSON_DATA),
A_5_JWS = jose_jws:from_file(data_file("jws/a.5.jws+json", Config)),
{_, A_5_JSON} = jose_jws:to_map(A_5_JWS),
A_5_B64_DATA = ?config("a.5.b64", C),
A_5_B64_DATA = jose_jwa_base64url:encode(A_5_JSON_DATA),
A_5_PAYLOAD_DATA = ?config("a.5.payload", C),
A_5_B64_PAYLOAD_DATA = ?config("a.5.payload-b64", C),
A_5_B64_PAYLOAD_DATA = jose_jwa_base64url:encode(A_5_PAYLOAD_DATA),
A_5_SIGNING_INPUT_DATA = ?config("a.5.signing-input", C),
A_5_SIGNING_INPUT_DATA = << A_5_B64_DATA/binary, $., A_5_B64_PAYLOAD_DATA/binary >>,
Forcing the Protected header to be A_5_JSON_DATA
A_5_MAP=#{
<<"signature">> := <<>>
} = force_sign(none, A_5_PAYLOAD_DATA, A_5_JSON_DATA, A_5_JWS),
A_5_COMPACT_DATA = ?config("a.5.compact", C),
{_, A_5_COMPACT_DATA} = jose_jws:compact(A_5_MAP),
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(none, A_5_MAP),
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(none, A_5_COMPACT_DATA),
{true, A_5_PAYLOAD_DATA, A_5_JWS} = jose_jws:verify(none, jose_jws:sign(none, A_5_PAYLOAD_DATA, A_5_JWS)),
ok.
Examples of Protecting Content Using JSON Object Signing and Encryption ( JOSE )
5.9 . Compressed Content
rfc7520_5_9(Config) ->
C = ?config(rfc7520_5_9, Config),
5.9.1
V_5_9_1_PLAIN_TEXT = ?config("figure.72", C),
V_5_9_1_JWK = jose_jwk:from_binary(?config("figure.151", C)),
5.9.2
V_5_9_2_COMPRESSED_PLAIN_TEXT = ?config("figure.162", C),
V_5_9_1_PLAIN_TEXT = jose_jwe_zip:uncompress(jose_jwa_base64url:decode(V_5_9_2_COMPRESSED_PLAIN_TEXT), zlib),
V_5_9_2_COMPRESSED_PLAIN_TEXT = jose_jwa_base64url:encode(jose_jwe_zip:compress(V_5_9_1_PLAIN_TEXT, zlib)),
V_5_9_2_CEK = ?config("figure.163", C),
V_5_9_2_IV = ?config("figure.164", C),
5.9.3
V_5_9_3_ENCRYPTED_KEY = ?config("figure.165", C),
{ALG, _} = jose_jwe_alg_aes_kw:from_map(#{<<"alg">> => <<"A128KW">>}),
V_5_9_3_ENCRYPTED_KEY = jose_jwa_base64url:encode(element(1, jose_jwe_alg_aes_kw:key_encrypt(V_5_9_1_JWK, jose_jwa_base64url:decode(V_5_9_2_CEK), ALG))),
V_5_9_2_CEK = jose_jwa_base64url:encode(jose_jwe_alg_aes_kw:key_decrypt(V_5_9_1_JWK, {undefined, undefined, jose_jwa_base64url:decode(V_5_9_3_ENCRYPTED_KEY)}, ALG)),
5.9.4
V_5_9_4_JWE = jose_jwe:from_binary(?config("figure.166", C)),
V_5_9_4_JWE_PROTECTED = ?config("figure.167", C),
V_5_9_4_JWE = jose_jwe:from_binary(jose_jwa_base64url:decode(V_5_9_4_JWE_PROTECTED)),
V_5_9_4_CIPHER_TEXT = ?config("figure.168", C),
V_5_9_4_CIPHER_TAG = ?config("figure.169", C),
V_5_9_5_JWE_COMPACT = ?config("figure.170", C),
V_5_9_5_JWE_MAP = jose:decode(?config("figure.172", C)),
V_5_9_4_CIPHER_TEXT = maps:get(<<"ciphertext">>, V_5_9_5_JWE_MAP),
V_5_9_4_CIPHER_TAG = maps:get(<<"tag">>, V_5_9_5_JWE_MAP),
{V_5_9_1_PLAIN_TEXT, V_5_9_4_JWE} = jose_jwe:block_decrypt(V_5_9_1_JWK, V_5_9_5_JWE_COMPACT),
{V_5_9_1_PLAIN_TEXT, V_5_9_4_JWE} = jose_jwe:block_decrypt(V_5_9_1_JWK, V_5_9_5_JWE_MAP),
Roundtrip test
{_, CIPHER_TEXT} = jose_jwe:compact(jose_jwe:block_encrypt(V_5_9_1_JWK, V_5_9_1_PLAIN_TEXT, jose_jwa_base64url:decode(V_5_9_2_CEK), jose_jwa_base64url:decode(V_5_9_2_IV), V_5_9_4_JWE)),
{V_5_9_1_PLAIN_TEXT, V_5_9_4_JWE} = jose_jwe:block_decrypt(V_5_9_1_JWK, CIPHER_TEXT),
ok.
Internal functions
@private
force_block_encrypt(Key, PlainText, CEK, IV, OverrideProtected, JWE=#jose_jwe{alg={ALGModule, ALG}, enc={ENCModule, ENC}}) ->
{EncryptedKey, _} = ALGModule:key_encrypt(Key, CEK, ALG),
Protected = jose_jwa_base64url:encode(OverrideProtected),
{CipherText, CipherTag} = ENCModule:block_encrypt({Protected, maybe_compress(PlainText, JWE)}, CEK, IV, ENC),
#{
<<"protected">> => Protected,
<<"encrypted_key">> => jose_jwa_base64url:encode(EncryptedKey),
<<"iv">> => jose_jwa_base64url:encode(IV),
<<"ciphertext">> => jose_jwa_base64url:encode(CipherText),
<<"tag">> => jose_jwa_base64url:encode(CipherTag)
}.
@private
force_sign(Key, PlainText, OverrideProtected, #jose_jws{alg={ALGModule, ALG}}) ->
Protected = jose_jwa_base64url:encode(OverrideProtected),
Payload = jose_jwa_base64url:encode(PlainText),
Message = << Protected/binary, $., Payload/binary >>,
Signature = jose_jwa_base64url:encode(ALGModule:sign(Key, Message, ALG)),
#{
<<"payload">> => Payload,
<<"protected">> => Protected,
<<"signature">> => Signature
}.
@private
data_file(File, Config) ->
filename:join([?config(data_dir, Config), File]).
@private
maybe_compress(PlainText, #jose_jwe{zip={Module, ZIP}}) ->
Module:compress(PlainText, ZIP);
maybe_compress(PlainText, _) ->
PlainText.
|
6d30f80b6ab8ed5690a47c42fbd8ba27eb485bbd72e45a7f1939370445f0acaf | racket/typed-racket | parse-type-tests.rkt | #lang racket/base
(require "test-utils.rkt"
"evaluator.rkt"
(for-syntax
racket/base
racket/dict
racket/set
racket/list
syntax/parse
typed-racket/base-env/base-structs
typed-racket/env/tvar-env
typed-racket/env/type-alias-env
typed-racket/env/mvar-env
typed-racket/utils/tc-utils
typed-racket/private/parse-type
typed-racket/rep/type-rep
typed-racket/rep/values-rep
typed-racket/types/numeric-tower
typed-racket/types/resolve
typed-racket/types/prop-ops
(submod typed-racket/base-env/base-types initialize)
(rename-in typed-racket/types/abbrev
[Un t:Un] [-> t:->] [->* t:->*]))
(only-in typed-racket/typed-racket do-standard-inits)
typed-racket/base-env/base-types
typed-racket/base-env/base-types-extra
typed-racket/base-env/colon
;; needed for parsing case-lambda/case-> types
(only-in typed-racket/base-env/prims-lambda case-lambda)
(prefix-in un: (only-in racket/class init init-field field augment))
(only-in typed/racket/class init init-field field augment)
(only-in racket/unit import export init-depend)
rackunit)
(provide tests)
(gen-test-main)
(define mutated-var #f)
(define not-mutated-var #f)
(define x #'x)
(define y #'y)
(define z #'z)
(begin-for-syntax
(do-standard-inits)
(register-mutated-var #'mutated-var))
(define-syntax (pt-test stx)
(syntax-parse stx
[(_ (~datum FAIL) ty-stx:expr
(~optional tvar-env:expr #:defaults [(tvar-env #'initial-tvar-env)])
(~optional (~seq #:msg msg*:expr) #:defaults [(msg* #'#f)]))
(quasisyntax/loc stx
(test-case #,(format "~a" (syntax->datum #'ty-stx))
(define msg msg*)
(define actual-message
(phase1-phase0-eval
(with-handlers ([exn:fail:syntax? (lambda (exn) #`#,(exn-message exn))])
(parameterize ([current-tvars tvar-env]
[delay-errors? #f])
(parse-type (quote-syntax ty-stx)))
#'#f)))
(unless actual-message
(fail-check "No syntax error when parsing type."))
(when msg
(unless (regexp-match? msg actual-message)
(with-check-info (['expected msg] ['actual actual-message])
(fail-check "parse-type raised the wrong error message"))))))]
[(_ ty-stx:expr ty-val:expr
(~optional tvar-env:expr #:defaults [(tvar-env #'initial-tvar-env)]))
(quasisyntax/loc
stx
(test-case #,(format "~a" (syntax->datum #'ty-stx))
(define-values (expected actual same?)
(phase1-phase0-eval
(parameterize ([current-tvars tvar-env]
[delay-errors? #f])
(define expected ty-val)
(define actual (parse-type (quote-syntax ty-stx)))
#`(values #,expected #,actual #,(equal? actual expected)))))
(unless same?
(with-check-info (['expected expected] ['actual actual])
(fail-check "Unequal types")))))]))
(define-syntax pt-tests
(syntax-rules ()
[(_ nm [elems ...] ...)
(test-suite nm
(pt-test elems ...) ...)]))
(define-for-syntax N -Number)
(define-for-syntax B -Boolean)
(define-for-syntax Sym -Symbol)
(define tests
(pt-tests
"parse-type tests"
[FAIL UNBOUND]
[FAIL List]
[FAIL (All (A) (List -> Boolean))]
[Number N]
[Any Univ]
[(List Number String) (-Tuple (list N -String))]
[(All (Number) Number) (-poly (a) a)]
[(Number . Number) (-pair N N)]
[(Listof Boolean) (make-Listof B)]
[(Vectorof (Listof Symbol)) (make-Vector (make-Listof Sym))]
[(Immutable-Vectorof (Listof Symbol)) (make-Immutable-Vector (make-Listof Sym))]
[(Mutable-Vectorof (Listof Symbol)) (make-Mutable-Vector (make-Listof Sym))]
[(Vector Symbol String) (-vec* Sym -String)]
[(Immutable-Vector Symbol String) (make-Immutable-HeterogeneousVector (list Sym -String))]
[(Mutable-Vector Symbol String) (make-Mutable-HeterogeneousVector (list Sym -String))]
[(pred Number) (make-pred-ty N)]
[(-> (values Number Boolean Number)) (t:-> (-values (list N B N)))]
[(Number -> Number) (t:-> N N)]
[(All (A) Number -> Number) (-poly (a) (t:-> N N))]
[(All (A) Number -> Number -> Number) (-poly (a) (t:-> N (t:-> N N)))]
[(All (A) Number -> Number -> Number -> Number)
(-poly (a) (t:-> N (t:-> N (t:-> N N))))]
[FAIL (All (A) -> Number Number)]
[FAIL (All (A) Listof Any)]
[(All (A) (Number -> Number)) (-poly (a) (t:-> N N))]
[(All (A) (-> Number Number)) (-poly (a) (t:-> N N))]
[(All (A) A -> A) (-poly (a) (t:-> a a))]
[(All (A) A → A) (-poly (a) (t:-> a a))]
[FAIL (All (A) → A A)]
[(All (A) (A -> A)) (-poly (a) (t:-> a a))]
[(All (A) (-> A A)) (-poly (a) (t:-> a a))]
[FAIL (All (A) -> Integer -> Integer -> Integer)]
;; requires transformer time stuff that doesn't work
#;[(Refinement even?) (make-Refinement #'even?)]
[(Number Number Number Boolean -> Number) (N N N B . t:-> . N)]
[(-> Number Number Number Boolean Number) (N N N B . t:-> . N)]
[(Number Number Number * -> Boolean) ((list N N) N . t:->* . B)]
[(-> Number Number Number * Boolean) ((list N N) N . t:->* . B)]
;[((. Number) -> Number) (->* (list) N N)] ;; not legal syntax
[(U Number Boolean) (t:Un N B)]
[(Union Number Boolean) (t:Un N B)]
[(U Number Boolean Number) (t:Un N B)]
[(U Number Boolean 1) (t:Un N B)]
[(All (a) (Listof a)) (-poly (a) (make-Listof a))]
[(All (a ...) (a ... a -> Integer)) (-polydots (a) ( (list) (a a) . ->... . -Integer))]
[(All (a ...) (-> a ... a Integer)) (-polydots (a) ( (list) (a a) . ->... . -Integer))]
[(∀ (a) (Listof a)) (-poly (a) (make-Listof a))]
[(∀ (a ...) (a ... a -> Integer)) (-polydots (a) ( (list) (a a) . ->... . -Integer))]
[(∀ (a ...) (-> a ... a Integer)) (-polydots (a) ( (list) (a a) . ->... . -Integer))]
[(All (a ...) (a ... -> Number))
(-polydots (a) ((list) [a a] . ->... . N))]
[(All (a ...) (-> a ... Number))
(-polydots (a) ((list) [a a] . ->... . N))]
[(All (a ...) (-> (values a ...)))
(-polydots (a) (t:-> (make-ValuesDots (list) a 'a)))]
[(-> Number AnyValues) (t:-> N ManyUniv)]
[(Rec x (U Null (Pair String (Pair Number x))))
(-mu x (t:Un -Null (-pair -String (-pair -Number x))))]
;; PR 14554, non-productive recursive type
[FAIL (Rec x (All (A #:row) x))]
[FAIL (Rec x (All (A) x))]
[FAIL (Rec x x)]
[FAIL (Rec x (U x Number))]
[FAIL ((Listof Number) Number) #:msg "bad syntax in type application: only an identifier"]
[(case-lambda (Number -> Boolean) (Number Number -> Number)) (cl-> [(N) B]
[(N N) N])]
[(case-> (Number -> Boolean) (Number Number -> Number)) (cl-> [(N) B]
[(N N) N])]
[(case-> (Number -> Boolean) (-> Number Number Number)) (cl-> [(N) B]
[(N N) N])]
[(case-> (Boolean -> Boolean)
(-> Boolean Boolean Boolean)
(-> Boolean String * Boolean)
(->* (Boolean) #:rest String Boolean)
(->* (Boolean) #:rest-star (String Symbol) Boolean)
(->* (Boolean) (Boolean) #:rest-star (String Symbol) Boolean)
(->* (Boolean Boolean) #:rest-star (String Symbol) Boolean))
(make-Fun
(remove-duplicates
(list (-Arrow (list -Boolean) -Boolean)
(-Arrow (list -Boolean -Boolean) -Boolean)
(-Arrow (list -Boolean) #:rest -String -Boolean)
(-Arrow (list -Boolean)
#:rest (make-Rest (list -String -Symbol))
-Boolean)
(-Arrow (list -Boolean -Boolean)
#:rest (make-Rest (list -String -Symbol))
-Boolean))))]
[1 (-val 1)]
[#t (-val #t)]
[#f (-val #f)]
["foo" (-val "foo")]
['(1 2 3) (-Tuple (map -val '(1 2 3)))]
[(Listof Number) (make-Listof N)]
[FAIL (-> Listof Listof)
#:msg "expected a valid type not a type constructor"]
[FAIL (Pairof Listof Listof)
#:msg "expected a valid type not a type constructor"]
[FAIL (-> Number Listof)
#:msg "expected a valid type not a type constructor"]
[FAIL (Any -> Boolean : Listof)
#:msg "expected a valid type not a type constructor"]
[FAIL (Listof -> Listof : Listof)
#:msg "expected a valid type not a type constructor"]
[a (-v a) (dict-set initial-tvar-env 'a (-v a))]
[(Any -> Boolean : Number) (make-pred-ty -Number)]
[(-> Any Boolean : Number) (make-pred-ty -Number)]
[(Any -> Boolean : #:+ (Number @ 0) #:- (! Number @ 0))
(make-pred-ty -Number)]
[(-> Any Boolean : #:+ (Number @ 0) #:- (! Number @ 0))
(make-pred-ty -Number)]
[(Any -> Boolean : #:+ (! Number @ 0) #:- (Number @ 0))
(t:->* (list Univ) -Boolean : (-PS (-not-type 0 -Number) (-is-type 0 -Number)))]
[(-> Any Boolean : #:+ (! Number @ 0) #:- (Number @ 0))
(t:->* (list Univ) -Boolean : (-PS (-not-type 0 -Number) (-is-type 0 -Number)))]
[(-> Any (-> Any Boolean : #:+ (Number @ 1 0) #:- (! Number @ 1 0)))
(t:-> Univ
(t:->* (list Univ) -Boolean : (-PS (-is-type (cons 1 0) -Number) (-not-type (cons 1 0) -Number))))]
[(-> Any Any (-> Any Boolean : #:+ (Number @ 1 1) #:- (! Number @ 1 1)))
(t:-> Univ Univ
(t:->* (list Univ) -Boolean : (-PS (-is-type (cons 1 1) -Number) (-not-type (cons 1 1) -Number))))]
[(-> Any #:foo Any (-> Any Boolean : #:+ (Number @ 1 0) #:- (! Number @ 1 0)))
(->key Univ #:foo Univ #t
(t:->* (list Univ) -Boolean : (-PS (-is-type (cons 1 0) -Number) (-not-type (cons 1 0) -Number))))]
[(All (a b) (-> (-> a Any : #:+ b) (Listof a) (Listof b)))
(-poly (a b) (t:-> (asym-pred a Univ (-PS (-is-type 0 b) -tt)) (-lst a) (-lst b)))]
[(All (a b) (-> (-> a Any : #:+ (! b)) (Listof a) (Listof b)))
(-poly (a b) (t:-> (asym-pred a Univ (-PS (-not-type 0 b) -tt)) (-lst a) (-lst b)))]
[(All (a b) (-> (-> a Any : #:- b) (Listof a) (Listof b)))
(-poly (a b) (t:-> (asym-pred a Univ (-PS -tt (-is-type 0 b))) (-lst a) (-lst b)))]
[(All (a b) (-> (-> a Any : #:- (! b)) (Listof a) (Listof b)))
(-poly (a b) (t:-> (asym-pred a Univ (-PS -tt (-not-type 0 b))) (-lst a) (-lst b)))]
[(Number -> Number -> Number)
(t:-> -Number (t:-> -Number -Number))]
[(-> Number (-> Number Number))
(t:-> -Number (t:-> -Number -Number))]
[(Integer -> (All (X) (X -> X)))
(t:-> -Integer (-poly (x) (t:-> x x)))]
[(-> Integer (All (X) (-> X X)))
(t:-> -Integer (-poly (x) (t:-> x x)))]
[FAIL -> #:msg "incorrect use of -> type constructor"]
[FAIL (Any -> Any #:object 0) #:msg "expected the identifier `:'"]
[FAIL (-> Any Any #:+ (String @ x)) #:msg "expected the identifier `:'"]
[FAIL (-> Any Boolean : #:+ (Number @ 1 0) #:- (! Number @ 1 0))
#:msg "Index 1 used in"]
[FAIL (-> Any (-> Any Boolean : #:+ (Number @ 1 1) #:- (! Number @ 1 1)))
#:msg "larger than argument length"]
[(Any -> Boolean : #:+ (Symbol @ not-mutated-var))
(t:-> Univ -Boolean : (-PS (-is-type (-id-path #'not-mutated-var) -Symbol) -tt))]
[FAIL (Any -> Boolean : #:+ (Symbol @ mutated-var))
#:msg "may not reference identifiers that are mutated"]
[(Any -> Boolean : #:+ (! Symbol @ not-mutated-var))
(t:-> Univ -Boolean : (-PS (-not-type (-id-path #'not-mutated-var) -Symbol) -tt))]
[FAIL (Any -> Boolean : #:+ (! Symbol @ mutated-var))
#:msg "may not reference identifiers that are mutated"]
[FAIL (Any -> Boolean : #:+ (String @ unbound))
#:msg "may not reference identifiers that are unbound"]
;; ->* types
[(->* (String Symbol) Void)
(make-Fun (list (-Arrow (list -String -Symbol) -Void)))]
[(->* () (String) #:rest Symbol Void)
(make-Fun (list (-Arrow (list) -Void)
(-Arrow (list -String)
#:rest -Symbol
-Void)))]
[(->* (Number) (String) #:rest Symbol Void)
(make-Fun (list (-Arrow (list -Number) -Void)
(-Arrow (list -Number -String)
#:rest -Symbol
-Void)))]
[(->* (Number) (String Void) #:rest Symbol Any)
(make-Fun (list (-Arrow (list -Number) Univ)
(-Arrow (list -Number -String) Univ)
(-Arrow (list -Number -String -Void)
#:rest -Symbol
Univ)))]
[(->* (String Symbol) (String) Void)
(->opt -String -Symbol [-String] -Void)]
[(->* (String Symbol) (String Symbol) Void)
(->opt -String -Symbol [-String -Symbol] -Void)]
[(->* (String Symbol) (String) (values Void String))
(->opt -String -Symbol [-String] (-values (list -Void -String)))]
[(->* (String Symbol) (String) #:rest Symbol Void)
(->optkey -String -Symbol [-String] #:rest -Symbol -Void)]
[(All (a) (->* (a Symbol) (String) #:rest Symbol Void))
(-poly (a) (->optkey a -Symbol [-String] #:rest -Symbol -Void))]
[(->* (Integer) (String #:foo Integer String) Void)
(->optkey -Integer [-String -String] #:foo -Integer #f -Void)]
[(->* (Integer) (String #:foo Integer) Void)
(->optkey -Integer [-String] #:foo -Integer #f -Void)]
[(->* (Integer) (#:foo Integer String) Void)
(->optkey -Integer [-String] #:foo -Integer #f -Void)]
[(->* (Integer #:bar Integer) (String) Void)
(->optkey -Integer [-String] #:bar -Integer #t -Void)]
[(->* (#:bar Integer Integer) (String) Void)
(->optkey -Integer [-String] #:bar -Integer #t -Void)]
[(->* (Integer #:bar Integer) (String #:foo Integer) Void)
(->optkey -Integer [-String] #:bar -Integer #t #:foo -Integer #f -Void)]
[(->* (#:bar Integer Integer) (#:foo Integer String) Void)
(->optkey -Integer [-String] #:bar -Integer #t #:foo -Integer #f -Void)]
[(->* (Any (-> Any Boolean : #:+ (String @ 1 0))) Void)
(t:-> Univ (t:->* (list Univ) -Boolean : (-PS (-is-type (cons 1 0) -String) -tt))
-Void)]
[FAIL (->* (Any (-> Any Boolean : #:+ (String @ 2 0))) Void)
#:msg "Index 2 used in"]
[(Opaque foo?) (make-Opaque #'foo?)]
PR 14122
[FAIL (Opaque 3)]
;; struct types
[(Struct-Type arity-at-least) (make-StructType (resolve -Arity-At-Least))]
[FAIL (Struct-Type Integer)]
[FAIL (Struct-Type foo)]
[Struct-TypeTop -StructTypeTop]
;; keyword function types
[(#:a String -> String)
(->optkey [] #:a -String #t -String)]
[([#:a String] -> String)
(->optkey [] #:a -String #f -String)]
[(#:a String #:b String -> String)
(->optkey [] #:a -String #t #:b -String #t -String)]
[([#:a String] #:b String -> String)
(->optkey [] #:a -String #f #:b -String #t -String)]
[(#:a String [#:b String] -> String)
(->optkey [] #:a -String #t #:b -String #f -String)]
[(String #:a String -> String)
(->optkey -String [] #:a -String #t -String)]
[(String #:a String String * -> String)
(->optkey -String [] #:rest -String #:a -String #t -String)]
[(String [#:a String] String * -> String)
(->optkey -String [] #:rest -String #:a -String #f -String)]
;; #:rest-star tests
[(->* () #:rest-star () String)
(->optkey () -String)]
[(->* () (Symbol) #:rest-star (String Symbol) String)
(->optkey (-Symbol) #:rest (make-Rest (list -String -Symbol)) -String)]
[(->* () #:rest-star (String) String)
(->optkey () #:rest (make-Rest (list -String)) -String)]
[(->* () #:rest-star (String Symbol) String)
(->optkey () #:rest (make-Rest (list -String -Symbol)) -String)]
[(->* (String) #:rest-star (String Symbol) String)
(->optkey -String () #:rest (make-Rest (list -String -Symbol)) -String)]
[(->* (String) (Symbol) #:rest-star (String Symbol) String)
(->optkey -String (-Symbol) #:rest (make-Rest (list -String -Symbol)) -String)]
[(->* (String) (Symbol) #:rest-star () String)
(->optkey -String (-Symbol) -String)]
[FAIL (->* (String) #:rest-star Number String)]
[FAIL (->* (String) (Symbol) #:rest-star Number String)]
[FAIL (->* (String) (Symbol) #:rest-star (Not-A-Real-Type-Should-Fail) String)]
;;; Prefab structs
[(Prefab foo String) (-prefab 'foo -String)]
[FAIL (Prefab (foo 0) String)]
;;; Struct Type Properties
[(Struct-Property Number) (-struct-property -Number #f)]
[(Struct-Property (-> Number Number)) (-struct-property (t:-> -Number -Number) #f)]
[(Struct-Property (-> Self Number)) (-struct-property (t:-> -Self -Number) #f)]
[FAIL (-> Self Number)]
[(Some (X) (-> Number (-> X Number) : X)) (-some (X) (t:-> -Number (t:-> X -Number) : (-PS (-is-type 0 X) (-not-type 0 X))))]
[(-> Number (Some (X) (-> X Number) : #:+ X)) (t:-> -Number (-some-res (X) (t:-> X -Number) : #:+ X))]
;;; Classes
[(Class) (-class)]
[(Class (init [x Number] [y Number]))
(-class #:init ([x -Number #f] [y -Number #f]))]
[(Class (un:init [x Number] [y Number]))
(-class #:init ([x -Number #f] [y -Number #f]))]
[(Class (init [x Number] [y Number #:optional]))
(-class #:init ([x -Number #f] [y -Number #t]))]
[(Class (init [x Number]) (init-field [y Number]))
(-class #:init ([x -Number #f]) #:init-field ([y -Number #f]))]
[(Class [m (Number -> Number)])
(-class #:method ([m (t:-> N N)]))]
[(Class [m (Number -> Number)] (init [x Number]))
(-class #:init ([x -Number #f]) #:method ([m (t:-> N N)]))]
[(Class [m (Number -> Number)] (field [x Number]))
(-class #:field ([x -Number]) #:method ([m (t:-> N N)]))]
[(Class [m (Number -> Number)] (un:field [x Number]))
(-class #:field ([x -Number]) #:method ([m (t:-> N N)]))]
[(Class (augment [m (Number -> Number)]))
(-class #:augment ([m (t:-> N N)]))]
[(Class (un:augment [m (Number -> Number)]))
(-class #:augment ([m (t:-> N N)]))]
[(Class (augment [m (Number -> Number)]) (field [x Number]))
(-class #:augment ([m (t:-> N N)]) #:field ([x -Number]))]
[(Class (augment [m (-> Number)]) [m (-> Number)])
(-class #:method ([m (t:-> N)]) #:augment ([m (t:-> N)]))]
[FAIL (Class foobar)]
[FAIL (Class [x UNBOUND])]
[FAIL (Class [x Number #:random-keyword])]
[FAIL (Class (random-clause [x Number]))]
[FAIL (Class [m Number])]
[FAIL (Class (augment [m Number]))]
;; test duplicates
[FAIL (Class [x Number] [x Number])]
[FAIL (Class (init [x Number]) (init [x Number]))]
[FAIL (Class (init [x Number]) (init-field [x Number]))]
[FAIL (Class (field [x Number]) (init-field [x Number]))]
[FAIL (Class (augment [m (-> Number)] [m (-> Number)]))]
[FAIL (Class (augment [m (-> Number)]) (augment [m (-> Number)]))]
[FAIL (Class [m (-> Number)] [m (-> Number)])]
;; test #:row-var
[(All (r #:row) (Class #:row-var r))
(make-PolyRow (list 'r)
(-class #:row (make-F 'r))
(list null null null null))]
[FAIL (All (r #:row) (Class #:implements (Class #:row-var r)))]
[FAIL (All (r #:row) (Class #:implements (Class) #:row-var r))]
[FAIL (Class #:row-var 5)]
[FAIL (Class #:row-var (list 3))]
[FAIL (Class #:row-var x)]
[FAIL (Class #:implements (Class #:row-var r) #:row-var x)]
[FAIL (Class #:implements (Class #:row-var r) #:row-var r)]
[FAIL (All (r #:row)
(All (x #:row)
(Class #:implements (Class #:row-var r) #:row-var x)))]
[FAIL (All (r #:row) (Class #:implements (Class #:row-var r) #:row-var r))]
;; Test #:implements, some of these used to work but now they have to
;; refer to type aliases. Testing actual type aliases is hard here though.
[FAIL (Class #:implements (Class [m (Number -> Number)]) (field [x Number]))]
[FAIL (Class #:implements (Class [m (Number -> Number)])
#:implements (Class [n (Number -> Number)])
(field [x Number]))]
[FAIL (Class #:implements (Class [m (Number -> Number)])
#:implements (Class [m (Number -> Number)])
(field [x Number]))]
[FAIL (Class #:implements (Class (init [x Integer]) [m (Number -> Number)])
(field [x Number]))]
[FAIL (Class #:implements Number)]
[FAIL (Class #:implements Number [m (Number -> Number)])]
[FAIL (Class #:implements (Class [m (Number -> Number)]) [m String])]
[FAIL (Class #:implements (Class [m (Number -> Number)])
#:implements (Class [m (String -> String)])
(field [x Number]))]
[FAIL (Class #:implements (Class (augment [m (Number -> Number)]))
#:implements (Class (augment [m (String -> String)]))
(field [x Number]))]
[FAIL (Class #:implements (Class (augment [m (Number -> Number)]))
(augment [m (-> Number)]))]
;; Test Object types
[(Object) (-object)]
[(Object [m (Number -> Number)])
(-object #:method ([m (t:-> N N)]))]
[(Object [m (Number -> Number)] (field [f Number]))
(-object #:method ([m (t:-> N N)]) #:field ([f N]))]
[FAIL (Object foobar)]
[FAIL (Object [x UNBOUND])]
[FAIL (Object [x Number #:random-keyword])]
[FAIL (Object (random-clause [x Number]))]
[FAIL (Object [x Number] [x Number])]
[FAIL (Object (field [x Number]) (field [x Number]))]
[FAIL (Object [x Number] [x Number])]
[FAIL (Object [m Number])]
;; Test row polymorphic types
[(All (r #:row) ((Class #:row-var r) -> (Class #:row-var r)))
(-polyrow (r) (list null null null null)
(t:-> (-class #:row r) (-class #:row r)))]
[(Listof (All (r #:row) ((Class #:row-var r) -> (Class #:row-var r))))
(-lst (-polyrow (r) (list null null null null)
(t:-> (-class #:row r) (-class #:row r))))]
[(All (r #:row (init x y z) (field f) m n)
((Class #:row-var r) -> (Class #:row-var r)))
(-polyrow (r) (list '(x y z) '(f) '(m n) '())
(t:-> (-class #:row r) (-class #:row r)))]
;; Class types cannot use a row variable that doesn't constrain
;; all of its members to be absent in the row
[FAIL (All (r #:row (init x))
((Class #:row-var r (init y)) -> (Class #:row-var r)))]
[FAIL (All (r #:row (init x y z) (field f) m n)
((Class #:row-var r a b c) -> (Class #:row-var r)))]
;; parsing tests for Unit types
;; These are only simple tests because checking types
with signatures requires interaction with the Signature
;; environment. Additionally, more complex tests of Unit
;; type parsing happens in unit-tests and integrations tests as well
[(Unit (import) (export) (init-depend) String)
(make-Unit null null null (-values (list -String)))]
[(Unit (import) (export) String)
(make-Unit null null null (-values (list -String)))]
[(Unit (import) (export) (init-depend))
(make-Unit null null null (-values (list -Void)))]
[(Unit (import) (export))
(make-Unit null null null (-values (list -Void)))]
[UnitTop -UnitTop]
[FAIL (Unit (export) String)]
[FAIL (Unit (import) String)]
[FAIL (Unit (init-depend) String)]
[FAIL (Unit (import bad) (export) String)]
[FAIL (Unit (import) (export bad) String)]
[(Sequenceof Any Any) (-seq Univ Univ)]
GH issue # 314
[FAIL ~> #:msg "unbound"]
;; intersections
[(∩) Univ]
[(∩ Any) Univ]
[(∩ String Symbol) -Bottom]
[(Intersection String Symbol) -Bottom]
[(∩ (-> Number Number) (-> String String))
(-unsafe-intersect (t:-> -String -String)
(t:-> -Number -Number))]
;; refinements
;; top/bot
[(Refine [x : Number] Top) -Number]
[(Refine [x : Number] Bot) -Bottom]
;; simplify props about subject
[(Refine [x : Any] (: x String)) -String]
[(Refine [x : Integer] (: x Integer)) -Int]
[(Refine [x : Integer] (: x Symbol)) -Bottom]
;; refinements w/ inequalities
[(Refine [val : Integer] (<= val 42))
(-refine/fresh x -Int (-leq (-lexp x)
(-lexp 42)))]
[(Refine [vec : (Vectorof Any)] (<= (vector-length vec) 42))
(-refine/fresh x (-vec Univ) (-leq (-lexp (-vec-len-of (-id-path x)))
(-lexp 42)))]
[(Refine [p : (Pairof Integer Integer)] (<= (car p) (cdr p)))
(-refine/fresh p (-pair -Int -Int) (-leq (-lexp (-car-of (-id-path p)))
(-lexp (-cdr-of (-id-path p)))))]
[(Refine [x : Integer] (<= (* 2 x) 42))
(-refine/fresh x -Int (-leq (-lexp (list 2 x))
(-lexp 42)))]
[(Refine [x : Integer] (<= (+ 1 x) 42))
(-refine/fresh x -Int (-leq (-lexp 1 x)
(-lexp 42)))]
[(Refine [x : Integer] (<= (- 1 x) 42))
(-refine/fresh x -Int (-leq (-lexp 1 (-lexp (list -1 x)))
(-lexp 42)))]
[(Refine [x : Integer] (<= (+ 1 (* 3 x)) 42))
(-refine/fresh x -Int (-leq (-lexp 1 (list 3 x))
(-lexp 42)))]
[(Refine [x : Integer] (<= (+ 1 (* 3 x) (* 2 x)) 42))
(-refine/fresh x -Int (-leq (-lexp 1 (list 5 x))
(-lexp 42)))]
[(Refine [x : Integer] (<= 42 (+ 1 (* 3 x) (* 2 x))))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp 1 (list 5 x))))]
[(Refine [x : Integer] (<= 42 (* 2 x)))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp (list 2 x))))]
[(Refine [x : Integer] (<= 42 (+ 1 x)))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp 1 x)))]
[(Refine [x : Integer] (<= x 42))
(-refine/fresh x -Int (-leq (-lexp x)
(-lexp 42)))]
[(Refine [x : Integer] (< x 42))
(-refine/fresh x -Int (-leq (-lexp 1 (list 1 x))
(-lexp 42)))]
[(Refine [x : Integer] (>= x 42))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp x)))]
[(Refine [x : Integer] (>= x 42))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp x)))]
[(Refine [x : Integer] (> x 42))
(-refine/fresh x -Int (-leq (-lexp 43)
(-lexp x)))]
[(Refine [n : Integer] (<= (- (+ n n) (* 1 (+ n)))
(+ 2 (- 80 (* 2 (+ 9 9 (+) (-) 2))))))
(-refine/fresh x -Int (-leq (-lexp x)
(-lexp 42)))]
;; id shadowing
[(Refine [x : Any] (: x (Refine [x : Integer] (<= x 42))))
(-refine/fresh x -Int (-leq (-lexp x)
(-lexp 42)))]
;; refinements w/ equality
[(Refine [x : Integer] (= x 42))
(-refine/fresh x -Int (-and (-leq (-lexp x) (-lexp 42))
(-leq (-lexp 42) (-lexp x))))]
other abritrary propositions in refinements
[(Refine [x : Integer] (and (<= x 42)
(<= 0 x)))
(-refine/fresh x -Int (-and (-leq (-lexp x) (-lexp 42))
(-leq (-lexp 0) (-lexp x))))]
[(Refine [x : String] (and (: z Symbol)
(! y String)))
(-refine/fresh x -String (-and (-is-type #'z -Symbol)
(-not-type #'y -String)))]
[(Refine [x : String] (or (: z Symbol)
(: y String)))
(-refine/fresh x -String (-or (-is-type #'z -Symbol)
(-is-type #'y -String)))]
[(Refine [x : String] (unless (: z Symbol)
(: y String)))
(-refine/fresh x -String (-or (-is-type #'z -Symbol)
(-is-type #'y -String)))]
[(Refine [x : String] (or (not (: y String))
(: z Symbol)))
(-refine/fresh x -String (-or (-not-type #'y -String)
(-is-type #'z -Symbol)))]
[(Refine [x : Any] (if (: x String) (! y String) (: z Symbol)))
(-refine/fresh x Univ (-or (-and (-is-type x -String) (-not-type #'y -String))
(-and (-not-type x -String) (-is-type #'z -Symbol))))]
[(Refine [x : String] (when (: z Symbol) (: y String)))
(-refine/fresh x -String (-or (-not-type #'z -Symbol)
(-is-type #'y -String)))]
[(Refine [x : String] (when (not (not (: z Symbol)))
(: y String)))
(-refine/fresh x -String (-or (-not-type #'z -Symbol)
(-is-type #'y -String)))]
[(Refine [x : (Refine [x : Integer] (<= 42 x))] (<= x 42))
(-refine/fresh z -Int (-and (-leq (-lexp 42) (-lexp z))
(-leq (-lexp z) (-lexp 42))))]
;; fail for unbound identifiers
[FAIL (Refine [x : String] (: r Symbol))]
[FAIL (Refine [x String] (: x Symbol))]
[FAIL (Refine [x y : String] (: x Symbol))]
[FAIL (Refine [x : String] (: r Symbol) (: r Symbol))]
;; fail for bad path element usage
[FAIL (Refine [p : Integer] (<= (car p) 42))]
[FAIL (Refine [p : Integer] (<= (cdr p) 42))]
[FAIL (Refine [p : (Pairof Integer Integer)] (<= (car (car p)) 42))]
[FAIL (Refine [p : (Pairof Integer Integer)] (<= (car (cdr p)) 42))]
[FAIL (Refine [p : (Pairof Integer Integer)] (<= (cdr (car p)) 42))]
[FAIL (Refine [p : (Pairof Integer Integer)] (<= (cdr (cdr p)) 42))]
[FAIL (Refine [vec : Any] (<= (vector-length vec) 42))]
;; fail for bad linear expression (i.e. not an integer)
[FAIL (Refine [q : Any] (<= q 42))]
[FAIL (Refine [q : Any] (<= 42 q))]
[FAIL (Refine [q : Any] (< q 42))]
[FAIL (Refine [q : Any] (< 42 q))]
[FAIL (Refine [q : Any] (>= 42 q))]
[FAIL (Refine [q : Any] (>= q 42))]
[FAIL (Refine [q : Any] (> q 42))]
[FAIL (Refine [q : Any] (> 42 q))]
[FAIL (Refine [q : Any] (= 42 q))]
[FAIL (Refine [q : Any] (= q 42))]
[FAIL (Refine [q : Any] (<= (+ 1 q) 42))]
[FAIL (Refine [q : Any] (<= 42 (+ 1 q)))]
[FAIL (Refine [q : Any] (<= (* 2 q) 42))]
[FAIL (Refine [q : Any] (<= 42 (* 2 q)))]
[FAIL (Refine [q : Any] (<= (+ 1 (* 2 q)) 42))]
[FAIL (Refine [q : Any] (<= 42 (+ 1 (* 2 q))))]
;; id shadowing & bad linear expression
[FAIL (Refine [x : Integer] (: x (Refine [x : Any] (<= 42 x))))]
;; dependent function syntax tests!
;; - - - - - - - - - - - - - - - - - - - -
;; no deps, no dep type!
[(-> ([v : (Vectorof Any)])
Any)
(t:-> (-vec Univ) Univ)]
[(-> ([v : (Vectorof Any)]
[i : Integer])
Any)
(t:-> (-vec Univ) -Int Univ)]
;; if only dep range, still a DFun (if the type is dep)
[(-> ([x : Integer]
[y : Integer])
(Refine [res : Integer] (<= res (+ x y))))
(make-DepFun
(list -Int -Int)
-tt
(-values
(-refine/fresh res -Int
(-leq (-lexp (-id-path (cons 0 0)))
(-lexp (-id-path (cons 1 0))
(-id-path (cons 1 1)))))))]
;; simple dep latent props/object (no dep type, no DFun)
[(-> ([x : Any])
Boolean
#:+ (: x Integer))
(t:-> Univ -Boolean : (-PS (-is-type (cons 0 0) -Int) -tt))]
[(-> ([x : Any])
Boolean
#:- (! x Integer))
(t:-> Univ -Boolean : (-PS -tt (-not-type (cons 0 0) -Int)))]
[(-> ([x : Any])
Boolean
#:+ (: x Integer)
#:- (! x Integer))
(t:-> Univ -Boolean : (-PS (-is-type (cons 0 0) -Int)
(-not-type (cons 0 0) -Int)))]
[(-> ([x : Any]
[y : Any])
Boolean
#:+ (: x Integer)
#:- (: y Integer)
#:object x)
(t:-> Univ Univ -Boolean
: (-PS (-is-type (cons 0 0) -Int)
(-is-type (cons 0 1) -Int))
: (-id-path (cons 0 0)))]
;; simple dependencies
[(-> ([v : (Vectorof Any)]
[i : (v) (Refine [n : Integer] (<= n (vector-length v)))])
Any)
(make-DepFun (list (-vec Univ)
(-refine/fresh n -Int
(-leq (-lexp (-id-path n))
(-lexp (-vec-len-of (-id-path (cons 1 0)))))))
-tt
(-values Univ))]
[(-> ([v : (Vectorof Any)]
[i : (v) (Refine [n : Integer] (<= n (vector-length v)))])
Any)
(dep-> ([x : (-vec Univ)]
[y : (-refine/fresh n -Int
(-leq (-lexp (-id-path n))
(-lexp (-vec-len-of (-id-path x)))))])
Univ)]
[(-> ([i : (v) (Refine [n : Integer] (<= n (vector-length v)))]
[v : (Vectorof Any)])
Any)
(make-DepFun (list (-refine/fresh n -Int
(-leq (-lexp (-id-path n))
(-lexp (-vec-len-of (-id-path (cons 1 1))))))
(-vec Univ))
-tt
(-values Univ))]
[(-> ([x : Integer]
[y : (z) (Refine [n : Integer] (<= n z))]
[z : (x) (Refine [n : Integer] (<= n x))])
Any)
(dep-> ([x : -Int]
[y : (-refine/fresh n -Int
(-leq (-lexp n)
(-lexp z)))]
[z : (-refine/fresh n -Int
(-leq (-lexp n)
(-lexp x)))])
Univ)]
[(-> ([x : Integer]
[y : (z) (Refine [n : Integer] (<= n z))]
[z : (x) (Refine [n : Integer] (<= n x))])
Any)
(make-DepFun
(list -Int
(-refine/fresh n -Int
(-leq (-lexp n)
(-lexp (-id-path (cons 1 2)))))
(-refine/fresh n -Int
(-leq (-lexp n)
(-lexp (-id-path (cons 1 0))))))
-tt
(-values Univ))]
[(-> ([w : (y) (Refine [n : Integer] (<= n y))]
[x : Integer]
[y : (z x) (Refine [n : Integer] (<= n (+ x z)))]
[z : (x) (Refine [n : Integer] (<= n x))])
(Refine [n : Integer] (<= n (+ w x y z))))
(make-DepFun
(list (-refine/fresh n -Int (-leq (-lexp n) (-lexp (-id-path (cons 1 2)))))
-Int
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp (-id-path (cons 1 1))
(-id-path (cons 1 3)))))
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp (-id-path (cons 1 1))))))
-tt
(-values
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp (-id-path (cons 1 0))
(-id-path (cons 1 1))
(-id-path (cons 1 2))
(-id-path (cons 1 3)))))))]
[(-> ([w : (y) (Refine [n : Integer] (<= n y))]
[x : Integer]
[y : (z x) (Refine [n : Integer] (<= n (+ x z)))]
[z : (x) (Refine [n : Integer] (<= n x))])
(Refine [n : Integer] (<= n (+ w x y z))))
(dep-> ([w : (-refine/fresh n -Int (-leq (-lexp n) (-lexp y)))]
[x : -Int]
[y : (-refine/fresh n -Int (-leq (-lexp n)
(-lexp x z)))]
[z : (-refine/fresh n -Int (-leq (-lexp n)
(-lexp x)))])
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp w x y z))))]
;; #:pre condition
[(-> ([w : Integer]
[x : Integer]
[y : Integer]
[z : Integer])
#:pre (w x y z)
(and (<= w y)
(<= y (+ x z))
(<= z x))
(Refine [n : Integer] (<= n (+ w x y z))))
(dep-> ([w : -Int]
[x : -Int]
[y : -Int]
[z : -Int])
#:pre (-and (-leq (-lexp w) (-lexp y))
(-leq (-lexp y) (-lexp x z))
(-leq (-lexp z) (-lexp x)))
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp w x y z))))]
;; shadowing
[(-> ([x : Integer]
[y : (x) (Refine [n : Integer] (<= n x))]
[z : (x y) (Refine [y : Integer] (<= y x))])
Any)
(dep-> ([x : -Int]
[y : (-refine/fresh n -Int
(-leq (-lexp n)
(-lexp x)))]
[z : (-refine/fresh n -Int
(-leq (-lexp n)
(-lexp x)))])
Univ)]
;; shadowing (and thus not really dependent in this case)
[(-> ([x : Any]
[z : (x) (Refine [x : Integer] (<= x 42))])
Any)
(t:->
Univ
(-refine/fresh n -Int
(-leq (-lexp n)
(-lexp 42)))
Univ)]
;; shadowing
[(-> ([x : Any]
[y : Any])
(Refine [x : Integer] (<= x 42)))
(t:-> Univ Univ (-refine/fresh res -Int (-leq (-lexp res) (-lexp 42))))]
;; duplicate ids
[FAIL (-> ([x : Integer]
[x : Integer])
Integer)]
;; duplicate dependencies
[FAIL (-> ([x : (y y) Integer]
[y : Integer])
Integer)]
[FAIL (-> ([x : Integer]
[y : Integer])
#:pre (x x y)
(<= x y)
Integer)]
;; listing self in dependency list
[FAIL (-> ([x : (x y) Integer]
[y : Integer])
Integer)]
;; missing colon
[FAIL (-> ([x : Integer]
[y Integer])
Integer)]
;; unbound ids
[FAIL (-> ([x : (Refine [n : Integer] (= n this-is-an-unbound-identifier))]
[y : Integer])
Integer)]
[FAIL (-> ([x : (this-is-an-unbound-identifier)
(Refine [n : Integer] (= n this-is-an-unbound-identifier))]
[y : Integer])
Integer)]
[FAIL (-> ([x : (Refine [n : Integer] (= n fun-arg))]
[fun-arg : Integer])
Integer)]
[FAIL (-> ([x : (z) (Refine [n : Integer] (= n fun-arg))]
[fun-arg : Integer]
[z : Integer])
Integer)]
[FAIL (-> ([x : Integer]
[y : Integer])
#:pre (x y)
(and (<= x y)
(<= x this-is-an-unbound-identifier))
Integer)]
[FAIL (-> ([x : Integer]
[y : Integer])
#:pre (x y this-is-an-unbound-identifier)
(and (<= x y)
(<= x this-is-an-unbound-identifier))
Integer)]
[FAIL (-> ([x : Integer]
[y : Integer])
(Refine [n : Integer] (= n this-is-an-unbound-identifier)))]
;; cyclic dependencies
[FAIL (-> ([x : (y) (Refine [n : Integer] (= n y))]
[y : (x) (Refine [n : Integer] (= n x))])
Integer)]
[FAIL (-> ([x : (y) (Refine [n : Integer] (= n y))]
[y : (z) (Refine [n : Integer] (= n z))]
[y : (x) (Refine [n : Integer] (= n x))])
Integer)]
;; shadowing w/ bad types
[FAIL (-> ([x : Integer]
[z : (x) (Refine [x : Univ] (<= x 42))])
Any)]
[FAIL (-> ([x : Integer]
[y : Integer])
(Refine [x : Univ] (<= x 42)))]))
FIXME - add tests for parse - values - type , parse - tc - results
| null | https://raw.githubusercontent.com/racket/typed-racket/8b7bd594c66e53beba3d2568ca5ecb28f61c6d96/typed-racket-test/unit-tests/parse-type-tests.rkt | racket | needed for parsing case-lambda/case-> types
requires transformer time stuff that doesn't work
[(Refinement even?) (make-Refinement #'even?)]
[((. Number) -> Number) (->* (list) N N)] ;; not legal syntax
PR 14554, non-productive recursive type
->* types
struct types
keyword function types
#:rest-star tests
Prefab structs
Struct Type Properties
Classes
test duplicates
test #:row-var
Test #:implements, some of these used to work but now they have to
refer to type aliases. Testing actual type aliases is hard here though.
Test Object types
Test row polymorphic types
Class types cannot use a row variable that doesn't constrain
all of its members to be absent in the row
parsing tests for Unit types
These are only simple tests because checking types
environment. Additionally, more complex tests of Unit
type parsing happens in unit-tests and integrations tests as well
intersections
refinements
top/bot
simplify props about subject
refinements w/ inequalities
id shadowing
refinements w/ equality
fail for unbound identifiers
fail for bad path element usage
fail for bad linear expression (i.e. not an integer)
id shadowing & bad linear expression
dependent function syntax tests!
- - - - - - - - - - - - - - - - - - - -
no deps, no dep type!
if only dep range, still a DFun (if the type is dep)
simple dep latent props/object (no dep type, no DFun)
simple dependencies
#:pre condition
shadowing
shadowing (and thus not really dependent in this case)
shadowing
duplicate ids
duplicate dependencies
listing self in dependency list
missing colon
unbound ids
cyclic dependencies
shadowing w/ bad types | #lang racket/base
(require "test-utils.rkt"
"evaluator.rkt"
(for-syntax
racket/base
racket/dict
racket/set
racket/list
syntax/parse
typed-racket/base-env/base-structs
typed-racket/env/tvar-env
typed-racket/env/type-alias-env
typed-racket/env/mvar-env
typed-racket/utils/tc-utils
typed-racket/private/parse-type
typed-racket/rep/type-rep
typed-racket/rep/values-rep
typed-racket/types/numeric-tower
typed-racket/types/resolve
typed-racket/types/prop-ops
(submod typed-racket/base-env/base-types initialize)
(rename-in typed-racket/types/abbrev
[Un t:Un] [-> t:->] [->* t:->*]))
(only-in typed-racket/typed-racket do-standard-inits)
typed-racket/base-env/base-types
typed-racket/base-env/base-types-extra
typed-racket/base-env/colon
(only-in typed-racket/base-env/prims-lambda case-lambda)
(prefix-in un: (only-in racket/class init init-field field augment))
(only-in typed/racket/class init init-field field augment)
(only-in racket/unit import export init-depend)
rackunit)
(provide tests)
(gen-test-main)
(define mutated-var #f)
(define not-mutated-var #f)
(define x #'x)
(define y #'y)
(define z #'z)
(begin-for-syntax
(do-standard-inits)
(register-mutated-var #'mutated-var))
(define-syntax (pt-test stx)
(syntax-parse stx
[(_ (~datum FAIL) ty-stx:expr
(~optional tvar-env:expr #:defaults [(tvar-env #'initial-tvar-env)])
(~optional (~seq #:msg msg*:expr) #:defaults [(msg* #'#f)]))
(quasisyntax/loc stx
(test-case #,(format "~a" (syntax->datum #'ty-stx))
(define msg msg*)
(define actual-message
(phase1-phase0-eval
(with-handlers ([exn:fail:syntax? (lambda (exn) #`#,(exn-message exn))])
(parameterize ([current-tvars tvar-env]
[delay-errors? #f])
(parse-type (quote-syntax ty-stx)))
#'#f)))
(unless actual-message
(fail-check "No syntax error when parsing type."))
(when msg
(unless (regexp-match? msg actual-message)
(with-check-info (['expected msg] ['actual actual-message])
(fail-check "parse-type raised the wrong error message"))))))]
[(_ ty-stx:expr ty-val:expr
(~optional tvar-env:expr #:defaults [(tvar-env #'initial-tvar-env)]))
(quasisyntax/loc
stx
(test-case #,(format "~a" (syntax->datum #'ty-stx))
(define-values (expected actual same?)
(phase1-phase0-eval
(parameterize ([current-tvars tvar-env]
[delay-errors? #f])
(define expected ty-val)
(define actual (parse-type (quote-syntax ty-stx)))
#`(values #,expected #,actual #,(equal? actual expected)))))
(unless same?
(with-check-info (['expected expected] ['actual actual])
(fail-check "Unequal types")))))]))
(define-syntax pt-tests
(syntax-rules ()
[(_ nm [elems ...] ...)
(test-suite nm
(pt-test elems ...) ...)]))
(define-for-syntax N -Number)
(define-for-syntax B -Boolean)
(define-for-syntax Sym -Symbol)
(define tests
(pt-tests
"parse-type tests"
[FAIL UNBOUND]
[FAIL List]
[FAIL (All (A) (List -> Boolean))]
[Number N]
[Any Univ]
[(List Number String) (-Tuple (list N -String))]
[(All (Number) Number) (-poly (a) a)]
[(Number . Number) (-pair N N)]
[(Listof Boolean) (make-Listof B)]
[(Vectorof (Listof Symbol)) (make-Vector (make-Listof Sym))]
[(Immutable-Vectorof (Listof Symbol)) (make-Immutable-Vector (make-Listof Sym))]
[(Mutable-Vectorof (Listof Symbol)) (make-Mutable-Vector (make-Listof Sym))]
[(Vector Symbol String) (-vec* Sym -String)]
[(Immutable-Vector Symbol String) (make-Immutable-HeterogeneousVector (list Sym -String))]
[(Mutable-Vector Symbol String) (make-Mutable-HeterogeneousVector (list Sym -String))]
[(pred Number) (make-pred-ty N)]
[(-> (values Number Boolean Number)) (t:-> (-values (list N B N)))]
[(Number -> Number) (t:-> N N)]
[(All (A) Number -> Number) (-poly (a) (t:-> N N))]
[(All (A) Number -> Number -> Number) (-poly (a) (t:-> N (t:-> N N)))]
[(All (A) Number -> Number -> Number -> Number)
(-poly (a) (t:-> N (t:-> N (t:-> N N))))]
[FAIL (All (A) -> Number Number)]
[FAIL (All (A) Listof Any)]
[(All (A) (Number -> Number)) (-poly (a) (t:-> N N))]
[(All (A) (-> Number Number)) (-poly (a) (t:-> N N))]
[(All (A) A -> A) (-poly (a) (t:-> a a))]
[(All (A) A → A) (-poly (a) (t:-> a a))]
[FAIL (All (A) → A A)]
[(All (A) (A -> A)) (-poly (a) (t:-> a a))]
[(All (A) (-> A A)) (-poly (a) (t:-> a a))]
[FAIL (All (A) -> Integer -> Integer -> Integer)]
[(Number Number Number Boolean -> Number) (N N N B . t:-> . N)]
[(-> Number Number Number Boolean Number) (N N N B . t:-> . N)]
[(Number Number Number * -> Boolean) ((list N N) N . t:->* . B)]
[(-> Number Number Number * Boolean) ((list N N) N . t:->* . B)]
[(U Number Boolean) (t:Un N B)]
[(Union Number Boolean) (t:Un N B)]
[(U Number Boolean Number) (t:Un N B)]
[(U Number Boolean 1) (t:Un N B)]
[(All (a) (Listof a)) (-poly (a) (make-Listof a))]
[(All (a ...) (a ... a -> Integer)) (-polydots (a) ( (list) (a a) . ->... . -Integer))]
[(All (a ...) (-> a ... a Integer)) (-polydots (a) ( (list) (a a) . ->... . -Integer))]
[(∀ (a) (Listof a)) (-poly (a) (make-Listof a))]
[(∀ (a ...) (a ... a -> Integer)) (-polydots (a) ( (list) (a a) . ->... . -Integer))]
[(∀ (a ...) (-> a ... a Integer)) (-polydots (a) ( (list) (a a) . ->... . -Integer))]
[(All (a ...) (a ... -> Number))
(-polydots (a) ((list) [a a] . ->... . N))]
[(All (a ...) (-> a ... Number))
(-polydots (a) ((list) [a a] . ->... . N))]
[(All (a ...) (-> (values a ...)))
(-polydots (a) (t:-> (make-ValuesDots (list) a 'a)))]
[(-> Number AnyValues) (t:-> N ManyUniv)]
[(Rec x (U Null (Pair String (Pair Number x))))
(-mu x (t:Un -Null (-pair -String (-pair -Number x))))]
[FAIL (Rec x (All (A #:row) x))]
[FAIL (Rec x (All (A) x))]
[FAIL (Rec x x)]
[FAIL (Rec x (U x Number))]
[FAIL ((Listof Number) Number) #:msg "bad syntax in type application: only an identifier"]
[(case-lambda (Number -> Boolean) (Number Number -> Number)) (cl-> [(N) B]
[(N N) N])]
[(case-> (Number -> Boolean) (Number Number -> Number)) (cl-> [(N) B]
[(N N) N])]
[(case-> (Number -> Boolean) (-> Number Number Number)) (cl-> [(N) B]
[(N N) N])]
[(case-> (Boolean -> Boolean)
(-> Boolean Boolean Boolean)
(-> Boolean String * Boolean)
(->* (Boolean) #:rest String Boolean)
(->* (Boolean) #:rest-star (String Symbol) Boolean)
(->* (Boolean) (Boolean) #:rest-star (String Symbol) Boolean)
(->* (Boolean Boolean) #:rest-star (String Symbol) Boolean))
(make-Fun
(remove-duplicates
(list (-Arrow (list -Boolean) -Boolean)
(-Arrow (list -Boolean -Boolean) -Boolean)
(-Arrow (list -Boolean) #:rest -String -Boolean)
(-Arrow (list -Boolean)
#:rest (make-Rest (list -String -Symbol))
-Boolean)
(-Arrow (list -Boolean -Boolean)
#:rest (make-Rest (list -String -Symbol))
-Boolean))))]
[1 (-val 1)]
[#t (-val #t)]
[#f (-val #f)]
["foo" (-val "foo")]
['(1 2 3) (-Tuple (map -val '(1 2 3)))]
[(Listof Number) (make-Listof N)]
[FAIL (-> Listof Listof)
#:msg "expected a valid type not a type constructor"]
[FAIL (Pairof Listof Listof)
#:msg "expected a valid type not a type constructor"]
[FAIL (-> Number Listof)
#:msg "expected a valid type not a type constructor"]
[FAIL (Any -> Boolean : Listof)
#:msg "expected a valid type not a type constructor"]
[FAIL (Listof -> Listof : Listof)
#:msg "expected a valid type not a type constructor"]
[a (-v a) (dict-set initial-tvar-env 'a (-v a))]
[(Any -> Boolean : Number) (make-pred-ty -Number)]
[(-> Any Boolean : Number) (make-pred-ty -Number)]
[(Any -> Boolean : #:+ (Number @ 0) #:- (! Number @ 0))
(make-pred-ty -Number)]
[(-> Any Boolean : #:+ (Number @ 0) #:- (! Number @ 0))
(make-pred-ty -Number)]
[(Any -> Boolean : #:+ (! Number @ 0) #:- (Number @ 0))
(t:->* (list Univ) -Boolean : (-PS (-not-type 0 -Number) (-is-type 0 -Number)))]
[(-> Any Boolean : #:+ (! Number @ 0) #:- (Number @ 0))
(t:->* (list Univ) -Boolean : (-PS (-not-type 0 -Number) (-is-type 0 -Number)))]
[(-> Any (-> Any Boolean : #:+ (Number @ 1 0) #:- (! Number @ 1 0)))
(t:-> Univ
(t:->* (list Univ) -Boolean : (-PS (-is-type (cons 1 0) -Number) (-not-type (cons 1 0) -Number))))]
[(-> Any Any (-> Any Boolean : #:+ (Number @ 1 1) #:- (! Number @ 1 1)))
(t:-> Univ Univ
(t:->* (list Univ) -Boolean : (-PS (-is-type (cons 1 1) -Number) (-not-type (cons 1 1) -Number))))]
[(-> Any #:foo Any (-> Any Boolean : #:+ (Number @ 1 0) #:- (! Number @ 1 0)))
(->key Univ #:foo Univ #t
(t:->* (list Univ) -Boolean : (-PS (-is-type (cons 1 0) -Number) (-not-type (cons 1 0) -Number))))]
[(All (a b) (-> (-> a Any : #:+ b) (Listof a) (Listof b)))
(-poly (a b) (t:-> (asym-pred a Univ (-PS (-is-type 0 b) -tt)) (-lst a) (-lst b)))]
[(All (a b) (-> (-> a Any : #:+ (! b)) (Listof a) (Listof b)))
(-poly (a b) (t:-> (asym-pred a Univ (-PS (-not-type 0 b) -tt)) (-lst a) (-lst b)))]
[(All (a b) (-> (-> a Any : #:- b) (Listof a) (Listof b)))
(-poly (a b) (t:-> (asym-pred a Univ (-PS -tt (-is-type 0 b))) (-lst a) (-lst b)))]
[(All (a b) (-> (-> a Any : #:- (! b)) (Listof a) (Listof b)))
(-poly (a b) (t:-> (asym-pred a Univ (-PS -tt (-not-type 0 b))) (-lst a) (-lst b)))]
[(Number -> Number -> Number)
(t:-> -Number (t:-> -Number -Number))]
[(-> Number (-> Number Number))
(t:-> -Number (t:-> -Number -Number))]
[(Integer -> (All (X) (X -> X)))
(t:-> -Integer (-poly (x) (t:-> x x)))]
[(-> Integer (All (X) (-> X X)))
(t:-> -Integer (-poly (x) (t:-> x x)))]
[FAIL -> #:msg "incorrect use of -> type constructor"]
[FAIL (Any -> Any #:object 0) #:msg "expected the identifier `:'"]
[FAIL (-> Any Any #:+ (String @ x)) #:msg "expected the identifier `:'"]
[FAIL (-> Any Boolean : #:+ (Number @ 1 0) #:- (! Number @ 1 0))
#:msg "Index 1 used in"]
[FAIL (-> Any (-> Any Boolean : #:+ (Number @ 1 1) #:- (! Number @ 1 1)))
#:msg "larger than argument length"]
[(Any -> Boolean : #:+ (Symbol @ not-mutated-var))
(t:-> Univ -Boolean : (-PS (-is-type (-id-path #'not-mutated-var) -Symbol) -tt))]
[FAIL (Any -> Boolean : #:+ (Symbol @ mutated-var))
#:msg "may not reference identifiers that are mutated"]
[(Any -> Boolean : #:+ (! Symbol @ not-mutated-var))
(t:-> Univ -Boolean : (-PS (-not-type (-id-path #'not-mutated-var) -Symbol) -tt))]
[FAIL (Any -> Boolean : #:+ (! Symbol @ mutated-var))
#:msg "may not reference identifiers that are mutated"]
[FAIL (Any -> Boolean : #:+ (String @ unbound))
#:msg "may not reference identifiers that are unbound"]
[(->* (String Symbol) Void)
(make-Fun (list (-Arrow (list -String -Symbol) -Void)))]
[(->* () (String) #:rest Symbol Void)
(make-Fun (list (-Arrow (list) -Void)
(-Arrow (list -String)
#:rest -Symbol
-Void)))]
[(->* (Number) (String) #:rest Symbol Void)
(make-Fun (list (-Arrow (list -Number) -Void)
(-Arrow (list -Number -String)
#:rest -Symbol
-Void)))]
[(->* (Number) (String Void) #:rest Symbol Any)
(make-Fun (list (-Arrow (list -Number) Univ)
(-Arrow (list -Number -String) Univ)
(-Arrow (list -Number -String -Void)
#:rest -Symbol
Univ)))]
[(->* (String Symbol) (String) Void)
(->opt -String -Symbol [-String] -Void)]
[(->* (String Symbol) (String Symbol) Void)
(->opt -String -Symbol [-String -Symbol] -Void)]
[(->* (String Symbol) (String) (values Void String))
(->opt -String -Symbol [-String] (-values (list -Void -String)))]
[(->* (String Symbol) (String) #:rest Symbol Void)
(->optkey -String -Symbol [-String] #:rest -Symbol -Void)]
[(All (a) (->* (a Symbol) (String) #:rest Symbol Void))
(-poly (a) (->optkey a -Symbol [-String] #:rest -Symbol -Void))]
[(->* (Integer) (String #:foo Integer String) Void)
(->optkey -Integer [-String -String] #:foo -Integer #f -Void)]
[(->* (Integer) (String #:foo Integer) Void)
(->optkey -Integer [-String] #:foo -Integer #f -Void)]
[(->* (Integer) (#:foo Integer String) Void)
(->optkey -Integer [-String] #:foo -Integer #f -Void)]
[(->* (Integer #:bar Integer) (String) Void)
(->optkey -Integer [-String] #:bar -Integer #t -Void)]
[(->* (#:bar Integer Integer) (String) Void)
(->optkey -Integer [-String] #:bar -Integer #t -Void)]
[(->* (Integer #:bar Integer) (String #:foo Integer) Void)
(->optkey -Integer [-String] #:bar -Integer #t #:foo -Integer #f -Void)]
[(->* (#:bar Integer Integer) (#:foo Integer String) Void)
(->optkey -Integer [-String] #:bar -Integer #t #:foo -Integer #f -Void)]
[(->* (Any (-> Any Boolean : #:+ (String @ 1 0))) Void)
(t:-> Univ (t:->* (list Univ) -Boolean : (-PS (-is-type (cons 1 0) -String) -tt))
-Void)]
[FAIL (->* (Any (-> Any Boolean : #:+ (String @ 2 0))) Void)
#:msg "Index 2 used in"]
[(Opaque foo?) (make-Opaque #'foo?)]
PR 14122
[FAIL (Opaque 3)]
[(Struct-Type arity-at-least) (make-StructType (resolve -Arity-At-Least))]
[FAIL (Struct-Type Integer)]
[FAIL (Struct-Type foo)]
[Struct-TypeTop -StructTypeTop]
[(#:a String -> String)
(->optkey [] #:a -String #t -String)]
[([#:a String] -> String)
(->optkey [] #:a -String #f -String)]
[(#:a String #:b String -> String)
(->optkey [] #:a -String #t #:b -String #t -String)]
[([#:a String] #:b String -> String)
(->optkey [] #:a -String #f #:b -String #t -String)]
[(#:a String [#:b String] -> String)
(->optkey [] #:a -String #t #:b -String #f -String)]
[(String #:a String -> String)
(->optkey -String [] #:a -String #t -String)]
[(String #:a String String * -> String)
(->optkey -String [] #:rest -String #:a -String #t -String)]
[(String [#:a String] String * -> String)
(->optkey -String [] #:rest -String #:a -String #f -String)]
[(->* () #:rest-star () String)
(->optkey () -String)]
[(->* () (Symbol) #:rest-star (String Symbol) String)
(->optkey (-Symbol) #:rest (make-Rest (list -String -Symbol)) -String)]
[(->* () #:rest-star (String) String)
(->optkey () #:rest (make-Rest (list -String)) -String)]
[(->* () #:rest-star (String Symbol) String)
(->optkey () #:rest (make-Rest (list -String -Symbol)) -String)]
[(->* (String) #:rest-star (String Symbol) String)
(->optkey -String () #:rest (make-Rest (list -String -Symbol)) -String)]
[(->* (String) (Symbol) #:rest-star (String Symbol) String)
(->optkey -String (-Symbol) #:rest (make-Rest (list -String -Symbol)) -String)]
[(->* (String) (Symbol) #:rest-star () String)
(->optkey -String (-Symbol) -String)]
[FAIL (->* (String) #:rest-star Number String)]
[FAIL (->* (String) (Symbol) #:rest-star Number String)]
[FAIL (->* (String) (Symbol) #:rest-star (Not-A-Real-Type-Should-Fail) String)]
[(Prefab foo String) (-prefab 'foo -String)]
[FAIL (Prefab (foo 0) String)]
[(Struct-Property Number) (-struct-property -Number #f)]
[(Struct-Property (-> Number Number)) (-struct-property (t:-> -Number -Number) #f)]
[(Struct-Property (-> Self Number)) (-struct-property (t:-> -Self -Number) #f)]
[FAIL (-> Self Number)]
[(Some (X) (-> Number (-> X Number) : X)) (-some (X) (t:-> -Number (t:-> X -Number) : (-PS (-is-type 0 X) (-not-type 0 X))))]
[(-> Number (Some (X) (-> X Number) : #:+ X)) (t:-> -Number (-some-res (X) (t:-> X -Number) : #:+ X))]
[(Class) (-class)]
[(Class (init [x Number] [y Number]))
(-class #:init ([x -Number #f] [y -Number #f]))]
[(Class (un:init [x Number] [y Number]))
(-class #:init ([x -Number #f] [y -Number #f]))]
[(Class (init [x Number] [y Number #:optional]))
(-class #:init ([x -Number #f] [y -Number #t]))]
[(Class (init [x Number]) (init-field [y Number]))
(-class #:init ([x -Number #f]) #:init-field ([y -Number #f]))]
[(Class [m (Number -> Number)])
(-class #:method ([m (t:-> N N)]))]
[(Class [m (Number -> Number)] (init [x Number]))
(-class #:init ([x -Number #f]) #:method ([m (t:-> N N)]))]
[(Class [m (Number -> Number)] (field [x Number]))
(-class #:field ([x -Number]) #:method ([m (t:-> N N)]))]
[(Class [m (Number -> Number)] (un:field [x Number]))
(-class #:field ([x -Number]) #:method ([m (t:-> N N)]))]
[(Class (augment [m (Number -> Number)]))
(-class #:augment ([m (t:-> N N)]))]
[(Class (un:augment [m (Number -> Number)]))
(-class #:augment ([m (t:-> N N)]))]
[(Class (augment [m (Number -> Number)]) (field [x Number]))
(-class #:augment ([m (t:-> N N)]) #:field ([x -Number]))]
[(Class (augment [m (-> Number)]) [m (-> Number)])
(-class #:method ([m (t:-> N)]) #:augment ([m (t:-> N)]))]
[FAIL (Class foobar)]
[FAIL (Class [x UNBOUND])]
[FAIL (Class [x Number #:random-keyword])]
[FAIL (Class (random-clause [x Number]))]
[FAIL (Class [m Number])]
[FAIL (Class (augment [m Number]))]
[FAIL (Class [x Number] [x Number])]
[FAIL (Class (init [x Number]) (init [x Number]))]
[FAIL (Class (init [x Number]) (init-field [x Number]))]
[FAIL (Class (field [x Number]) (init-field [x Number]))]
[FAIL (Class (augment [m (-> Number)] [m (-> Number)]))]
[FAIL (Class (augment [m (-> Number)]) (augment [m (-> Number)]))]
[FAIL (Class [m (-> Number)] [m (-> Number)])]
[(All (r #:row) (Class #:row-var r))
(make-PolyRow (list 'r)
(-class #:row (make-F 'r))
(list null null null null))]
[FAIL (All (r #:row) (Class #:implements (Class #:row-var r)))]
[FAIL (All (r #:row) (Class #:implements (Class) #:row-var r))]
[FAIL (Class #:row-var 5)]
[FAIL (Class #:row-var (list 3))]
[FAIL (Class #:row-var x)]
[FAIL (Class #:implements (Class #:row-var r) #:row-var x)]
[FAIL (Class #:implements (Class #:row-var r) #:row-var r)]
[FAIL (All (r #:row)
(All (x #:row)
(Class #:implements (Class #:row-var r) #:row-var x)))]
[FAIL (All (r #:row) (Class #:implements (Class #:row-var r) #:row-var r))]
[FAIL (Class #:implements (Class [m (Number -> Number)]) (field [x Number]))]
[FAIL (Class #:implements (Class [m (Number -> Number)])
#:implements (Class [n (Number -> Number)])
(field [x Number]))]
[FAIL (Class #:implements (Class [m (Number -> Number)])
#:implements (Class [m (Number -> Number)])
(field [x Number]))]
[FAIL (Class #:implements (Class (init [x Integer]) [m (Number -> Number)])
(field [x Number]))]
[FAIL (Class #:implements Number)]
[FAIL (Class #:implements Number [m (Number -> Number)])]
[FAIL (Class #:implements (Class [m (Number -> Number)]) [m String])]
[FAIL (Class #:implements (Class [m (Number -> Number)])
#:implements (Class [m (String -> String)])
(field [x Number]))]
[FAIL (Class #:implements (Class (augment [m (Number -> Number)]))
#:implements (Class (augment [m (String -> String)]))
(field [x Number]))]
[FAIL (Class #:implements (Class (augment [m (Number -> Number)]))
(augment [m (-> Number)]))]
[(Object) (-object)]
[(Object [m (Number -> Number)])
(-object #:method ([m (t:-> N N)]))]
[(Object [m (Number -> Number)] (field [f Number]))
(-object #:method ([m (t:-> N N)]) #:field ([f N]))]
[FAIL (Object foobar)]
[FAIL (Object [x UNBOUND])]
[FAIL (Object [x Number #:random-keyword])]
[FAIL (Object (random-clause [x Number]))]
[FAIL (Object [x Number] [x Number])]
[FAIL (Object (field [x Number]) (field [x Number]))]
[FAIL (Object [x Number] [x Number])]
[FAIL (Object [m Number])]
[(All (r #:row) ((Class #:row-var r) -> (Class #:row-var r)))
(-polyrow (r) (list null null null null)
(t:-> (-class #:row r) (-class #:row r)))]
[(Listof (All (r #:row) ((Class #:row-var r) -> (Class #:row-var r))))
(-lst (-polyrow (r) (list null null null null)
(t:-> (-class #:row r) (-class #:row r))))]
[(All (r #:row (init x y z) (field f) m n)
((Class #:row-var r) -> (Class #:row-var r)))
(-polyrow (r) (list '(x y z) '(f) '(m n) '())
(t:-> (-class #:row r) (-class #:row r)))]
[FAIL (All (r #:row (init x))
((Class #:row-var r (init y)) -> (Class #:row-var r)))]
[FAIL (All (r #:row (init x y z) (field f) m n)
((Class #:row-var r a b c) -> (Class #:row-var r)))]
with signatures requires interaction with the Signature
[(Unit (import) (export) (init-depend) String)
(make-Unit null null null (-values (list -String)))]
[(Unit (import) (export) String)
(make-Unit null null null (-values (list -String)))]
[(Unit (import) (export) (init-depend))
(make-Unit null null null (-values (list -Void)))]
[(Unit (import) (export))
(make-Unit null null null (-values (list -Void)))]
[UnitTop -UnitTop]
[FAIL (Unit (export) String)]
[FAIL (Unit (import) String)]
[FAIL (Unit (init-depend) String)]
[FAIL (Unit (import bad) (export) String)]
[FAIL (Unit (import) (export bad) String)]
[(Sequenceof Any Any) (-seq Univ Univ)]
GH issue # 314
[FAIL ~> #:msg "unbound"]
[(∩) Univ]
[(∩ Any) Univ]
[(∩ String Symbol) -Bottom]
[(Intersection String Symbol) -Bottom]
[(∩ (-> Number Number) (-> String String))
(-unsafe-intersect (t:-> -String -String)
(t:-> -Number -Number))]
[(Refine [x : Number] Top) -Number]
[(Refine [x : Number] Bot) -Bottom]
[(Refine [x : Any] (: x String)) -String]
[(Refine [x : Integer] (: x Integer)) -Int]
[(Refine [x : Integer] (: x Symbol)) -Bottom]
[(Refine [val : Integer] (<= val 42))
(-refine/fresh x -Int (-leq (-lexp x)
(-lexp 42)))]
[(Refine [vec : (Vectorof Any)] (<= (vector-length vec) 42))
(-refine/fresh x (-vec Univ) (-leq (-lexp (-vec-len-of (-id-path x)))
(-lexp 42)))]
[(Refine [p : (Pairof Integer Integer)] (<= (car p) (cdr p)))
(-refine/fresh p (-pair -Int -Int) (-leq (-lexp (-car-of (-id-path p)))
(-lexp (-cdr-of (-id-path p)))))]
[(Refine [x : Integer] (<= (* 2 x) 42))
(-refine/fresh x -Int (-leq (-lexp (list 2 x))
(-lexp 42)))]
[(Refine [x : Integer] (<= (+ 1 x) 42))
(-refine/fresh x -Int (-leq (-lexp 1 x)
(-lexp 42)))]
[(Refine [x : Integer] (<= (- 1 x) 42))
(-refine/fresh x -Int (-leq (-lexp 1 (-lexp (list -1 x)))
(-lexp 42)))]
[(Refine [x : Integer] (<= (+ 1 (* 3 x)) 42))
(-refine/fresh x -Int (-leq (-lexp 1 (list 3 x))
(-lexp 42)))]
[(Refine [x : Integer] (<= (+ 1 (* 3 x) (* 2 x)) 42))
(-refine/fresh x -Int (-leq (-lexp 1 (list 5 x))
(-lexp 42)))]
[(Refine [x : Integer] (<= 42 (+ 1 (* 3 x) (* 2 x))))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp 1 (list 5 x))))]
[(Refine [x : Integer] (<= 42 (* 2 x)))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp (list 2 x))))]
[(Refine [x : Integer] (<= 42 (+ 1 x)))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp 1 x)))]
[(Refine [x : Integer] (<= x 42))
(-refine/fresh x -Int (-leq (-lexp x)
(-lexp 42)))]
[(Refine [x : Integer] (< x 42))
(-refine/fresh x -Int (-leq (-lexp 1 (list 1 x))
(-lexp 42)))]
[(Refine [x : Integer] (>= x 42))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp x)))]
[(Refine [x : Integer] (>= x 42))
(-refine/fresh x -Int (-leq (-lexp 42)
(-lexp x)))]
[(Refine [x : Integer] (> x 42))
(-refine/fresh x -Int (-leq (-lexp 43)
(-lexp x)))]
[(Refine [n : Integer] (<= (- (+ n n) (* 1 (+ n)))
(+ 2 (- 80 (* 2 (+ 9 9 (+) (-) 2))))))
(-refine/fresh x -Int (-leq (-lexp x)
(-lexp 42)))]
[(Refine [x : Any] (: x (Refine [x : Integer] (<= x 42))))
(-refine/fresh x -Int (-leq (-lexp x)
(-lexp 42)))]
[(Refine [x : Integer] (= x 42))
(-refine/fresh x -Int (-and (-leq (-lexp x) (-lexp 42))
(-leq (-lexp 42) (-lexp x))))]
other abritrary propositions in refinements
[(Refine [x : Integer] (and (<= x 42)
(<= 0 x)))
(-refine/fresh x -Int (-and (-leq (-lexp x) (-lexp 42))
(-leq (-lexp 0) (-lexp x))))]
[(Refine [x : String] (and (: z Symbol)
(! y String)))
(-refine/fresh x -String (-and (-is-type #'z -Symbol)
(-not-type #'y -String)))]
[(Refine [x : String] (or (: z Symbol)
(: y String)))
(-refine/fresh x -String (-or (-is-type #'z -Symbol)
(-is-type #'y -String)))]
[(Refine [x : String] (unless (: z Symbol)
(: y String)))
(-refine/fresh x -String (-or (-is-type #'z -Symbol)
(-is-type #'y -String)))]
[(Refine [x : String] (or (not (: y String))
(: z Symbol)))
(-refine/fresh x -String (-or (-not-type #'y -String)
(-is-type #'z -Symbol)))]
[(Refine [x : Any] (if (: x String) (! y String) (: z Symbol)))
(-refine/fresh x Univ (-or (-and (-is-type x -String) (-not-type #'y -String))
(-and (-not-type x -String) (-is-type #'z -Symbol))))]
[(Refine [x : String] (when (: z Symbol) (: y String)))
(-refine/fresh x -String (-or (-not-type #'z -Symbol)
(-is-type #'y -String)))]
[(Refine [x : String] (when (not (not (: z Symbol)))
(: y String)))
(-refine/fresh x -String (-or (-not-type #'z -Symbol)
(-is-type #'y -String)))]
[(Refine [x : (Refine [x : Integer] (<= 42 x))] (<= x 42))
(-refine/fresh z -Int (-and (-leq (-lexp 42) (-lexp z))
(-leq (-lexp z) (-lexp 42))))]
[FAIL (Refine [x : String] (: r Symbol))]
[FAIL (Refine [x String] (: x Symbol))]
[FAIL (Refine [x y : String] (: x Symbol))]
[FAIL (Refine [x : String] (: r Symbol) (: r Symbol))]
[FAIL (Refine [p : Integer] (<= (car p) 42))]
[FAIL (Refine [p : Integer] (<= (cdr p) 42))]
[FAIL (Refine [p : (Pairof Integer Integer)] (<= (car (car p)) 42))]
[FAIL (Refine [p : (Pairof Integer Integer)] (<= (car (cdr p)) 42))]
[FAIL (Refine [p : (Pairof Integer Integer)] (<= (cdr (car p)) 42))]
[FAIL (Refine [p : (Pairof Integer Integer)] (<= (cdr (cdr p)) 42))]
[FAIL (Refine [vec : Any] (<= (vector-length vec) 42))]
[FAIL (Refine [q : Any] (<= q 42))]
[FAIL (Refine [q : Any] (<= 42 q))]
[FAIL (Refine [q : Any] (< q 42))]
[FAIL (Refine [q : Any] (< 42 q))]
[FAIL (Refine [q : Any] (>= 42 q))]
[FAIL (Refine [q : Any] (>= q 42))]
[FAIL (Refine [q : Any] (> q 42))]
[FAIL (Refine [q : Any] (> 42 q))]
[FAIL (Refine [q : Any] (= 42 q))]
[FAIL (Refine [q : Any] (= q 42))]
[FAIL (Refine [q : Any] (<= (+ 1 q) 42))]
[FAIL (Refine [q : Any] (<= 42 (+ 1 q)))]
[FAIL (Refine [q : Any] (<= (* 2 q) 42))]
[FAIL (Refine [q : Any] (<= 42 (* 2 q)))]
[FAIL (Refine [q : Any] (<= (+ 1 (* 2 q)) 42))]
[FAIL (Refine [q : Any] (<= 42 (+ 1 (* 2 q))))]
[FAIL (Refine [x : Integer] (: x (Refine [x : Any] (<= 42 x))))]
[(-> ([v : (Vectorof Any)])
Any)
(t:-> (-vec Univ) Univ)]
[(-> ([v : (Vectorof Any)]
[i : Integer])
Any)
(t:-> (-vec Univ) -Int Univ)]
[(-> ([x : Integer]
[y : Integer])
(Refine [res : Integer] (<= res (+ x y))))
(make-DepFun
(list -Int -Int)
-tt
(-values
(-refine/fresh res -Int
(-leq (-lexp (-id-path (cons 0 0)))
(-lexp (-id-path (cons 1 0))
(-id-path (cons 1 1)))))))]
[(-> ([x : Any])
Boolean
#:+ (: x Integer))
(t:-> Univ -Boolean : (-PS (-is-type (cons 0 0) -Int) -tt))]
[(-> ([x : Any])
Boolean
#:- (! x Integer))
(t:-> Univ -Boolean : (-PS -tt (-not-type (cons 0 0) -Int)))]
[(-> ([x : Any])
Boolean
#:+ (: x Integer)
#:- (! x Integer))
(t:-> Univ -Boolean : (-PS (-is-type (cons 0 0) -Int)
(-not-type (cons 0 0) -Int)))]
[(-> ([x : Any]
[y : Any])
Boolean
#:+ (: x Integer)
#:- (: y Integer)
#:object x)
(t:-> Univ Univ -Boolean
: (-PS (-is-type (cons 0 0) -Int)
(-is-type (cons 0 1) -Int))
: (-id-path (cons 0 0)))]
[(-> ([v : (Vectorof Any)]
[i : (v) (Refine [n : Integer] (<= n (vector-length v)))])
Any)
(make-DepFun (list (-vec Univ)
(-refine/fresh n -Int
(-leq (-lexp (-id-path n))
(-lexp (-vec-len-of (-id-path (cons 1 0)))))))
-tt
(-values Univ))]
[(-> ([v : (Vectorof Any)]
[i : (v) (Refine [n : Integer] (<= n (vector-length v)))])
Any)
(dep-> ([x : (-vec Univ)]
[y : (-refine/fresh n -Int
(-leq (-lexp (-id-path n))
(-lexp (-vec-len-of (-id-path x)))))])
Univ)]
[(-> ([i : (v) (Refine [n : Integer] (<= n (vector-length v)))]
[v : (Vectorof Any)])
Any)
(make-DepFun (list (-refine/fresh n -Int
(-leq (-lexp (-id-path n))
(-lexp (-vec-len-of (-id-path (cons 1 1))))))
(-vec Univ))
-tt
(-values Univ))]
[(-> ([x : Integer]
[y : (z) (Refine [n : Integer] (<= n z))]
[z : (x) (Refine [n : Integer] (<= n x))])
Any)
(dep-> ([x : -Int]
[y : (-refine/fresh n -Int
(-leq (-lexp n)
(-lexp z)))]
[z : (-refine/fresh n -Int
(-leq (-lexp n)
(-lexp x)))])
Univ)]
[(-> ([x : Integer]
[y : (z) (Refine [n : Integer] (<= n z))]
[z : (x) (Refine [n : Integer] (<= n x))])
Any)
(make-DepFun
(list -Int
(-refine/fresh n -Int
(-leq (-lexp n)
(-lexp (-id-path (cons 1 2)))))
(-refine/fresh n -Int
(-leq (-lexp n)
(-lexp (-id-path (cons 1 0))))))
-tt
(-values Univ))]
[(-> ([w : (y) (Refine [n : Integer] (<= n y))]
[x : Integer]
[y : (z x) (Refine [n : Integer] (<= n (+ x z)))]
[z : (x) (Refine [n : Integer] (<= n x))])
(Refine [n : Integer] (<= n (+ w x y z))))
(make-DepFun
(list (-refine/fresh n -Int (-leq (-lexp n) (-lexp (-id-path (cons 1 2)))))
-Int
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp (-id-path (cons 1 1))
(-id-path (cons 1 3)))))
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp (-id-path (cons 1 1))))))
-tt
(-values
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp (-id-path (cons 1 0))
(-id-path (cons 1 1))
(-id-path (cons 1 2))
(-id-path (cons 1 3)))))))]
[(-> ([w : (y) (Refine [n : Integer] (<= n y))]
[x : Integer]
[y : (z x) (Refine [n : Integer] (<= n (+ x z)))]
[z : (x) (Refine [n : Integer] (<= n x))])
(Refine [n : Integer] (<= n (+ w x y z))))
(dep-> ([w : (-refine/fresh n -Int (-leq (-lexp n) (-lexp y)))]
[x : -Int]
[y : (-refine/fresh n -Int (-leq (-lexp n)
(-lexp x z)))]
[z : (-refine/fresh n -Int (-leq (-lexp n)
(-lexp x)))])
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp w x y z))))]
[(-> ([w : Integer]
[x : Integer]
[y : Integer]
[z : Integer])
#:pre (w x y z)
(and (<= w y)
(<= y (+ x z))
(<= z x))
(Refine [n : Integer] (<= n (+ w x y z))))
(dep-> ([w : -Int]
[x : -Int]
[y : -Int]
[z : -Int])
#:pre (-and (-leq (-lexp w) (-lexp y))
(-leq (-lexp y) (-lexp x z))
(-leq (-lexp z) (-lexp x)))
(-refine/fresh n -Int (-leq (-lexp n)
(-lexp w x y z))))]
[(-> ([x : Integer]
[y : (x) (Refine [n : Integer] (<= n x))]
[z : (x y) (Refine [y : Integer] (<= y x))])
Any)
(dep-> ([x : -Int]
[y : (-refine/fresh n -Int
(-leq (-lexp n)
(-lexp x)))]
[z : (-refine/fresh n -Int
(-leq (-lexp n)
(-lexp x)))])
Univ)]
[(-> ([x : Any]
[z : (x) (Refine [x : Integer] (<= x 42))])
Any)
(t:->
Univ
(-refine/fresh n -Int
(-leq (-lexp n)
(-lexp 42)))
Univ)]
[(-> ([x : Any]
[y : Any])
(Refine [x : Integer] (<= x 42)))
(t:-> Univ Univ (-refine/fresh res -Int (-leq (-lexp res) (-lexp 42))))]
[FAIL (-> ([x : Integer]
[x : Integer])
Integer)]
[FAIL (-> ([x : (y y) Integer]
[y : Integer])
Integer)]
[FAIL (-> ([x : Integer]
[y : Integer])
#:pre (x x y)
(<= x y)
Integer)]
[FAIL (-> ([x : (x y) Integer]
[y : Integer])
Integer)]
[FAIL (-> ([x : Integer]
[y Integer])
Integer)]
[FAIL (-> ([x : (Refine [n : Integer] (= n this-is-an-unbound-identifier))]
[y : Integer])
Integer)]
[FAIL (-> ([x : (this-is-an-unbound-identifier)
(Refine [n : Integer] (= n this-is-an-unbound-identifier))]
[y : Integer])
Integer)]
[FAIL (-> ([x : (Refine [n : Integer] (= n fun-arg))]
[fun-arg : Integer])
Integer)]
[FAIL (-> ([x : (z) (Refine [n : Integer] (= n fun-arg))]
[fun-arg : Integer]
[z : Integer])
Integer)]
[FAIL (-> ([x : Integer]
[y : Integer])
#:pre (x y)
(and (<= x y)
(<= x this-is-an-unbound-identifier))
Integer)]
[FAIL (-> ([x : Integer]
[y : Integer])
#:pre (x y this-is-an-unbound-identifier)
(and (<= x y)
(<= x this-is-an-unbound-identifier))
Integer)]
[FAIL (-> ([x : Integer]
[y : Integer])
(Refine [n : Integer] (= n this-is-an-unbound-identifier)))]
[FAIL (-> ([x : (y) (Refine [n : Integer] (= n y))]
[y : (x) (Refine [n : Integer] (= n x))])
Integer)]
[FAIL (-> ([x : (y) (Refine [n : Integer] (= n y))]
[y : (z) (Refine [n : Integer] (= n z))]
[y : (x) (Refine [n : Integer] (= n x))])
Integer)]
[FAIL (-> ([x : Integer]
[z : (x) (Refine [x : Univ] (<= x 42))])
Any)]
[FAIL (-> ([x : Integer]
[y : Integer])
(Refine [x : Univ] (<= x 42)))]))
FIXME - add tests for parse - values - type , parse - tc - results
|
b2a1b8307c56f2092c568d0e80a119da960694154f7c8e5977a03852d9dec522 | Daniel-Diaz/HaTeX | Syntax.hs |
# LANGUAGE CPP , DeriveDataTypeable , DeriveGeneric #
-- | LaTeX syntax description in the definition of the 'LaTeX' datatype.
-- If you want to add new commands or environments not defined in
the library , import this module and use ' LaTeX ' data constructors .
module Text.LaTeX.Base.Syntax
( -- * @LaTeX@ datatype
Measure (..)
, MathType (..)
, LaTeX (..)
, TeXArg (..)
, (<>), between
-- * Escaping reserved characters
, protectString
, protectText
-- * Syntax analysis
, matchCommand
, lookForCommand
, matchEnv
, lookForEnv
, texmap
, texmapM
-- ** Utils
, getBody
, getPreamble
) where
import Data.Text (Text,pack)
import qualified Data.Text
import qualified Data.Semigroup as Semigroup
import Data.String
import Control.Applicative
import Control.Monad (replicateM)
import Data.Functor.Identity (runIdentity)
import Data.Data (Data)
import Data.Typeable
import Test.QuickCheck
import Data.Hashable
import GHC.Generics (Generic)
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid
#endif
| Measure units defined in LaTeX. Use ' CustomMeasure ' to use commands like ' ' .
-- For instance:
--
> rule Nothing ( CustomMeasure linewidth ) ( Pt 2 )
--
This will create a black box ( see ' rule ' ) as wide as the text and two points tall .
--
data Measure =
^ A point is 1/72.27 inch , that means about 0.0138 inch or 0.3515 mm .
^ .
| Cm Double -- ^ Centimeter.
| In Double -- ^ Inch.
^ The height of an \"x\ " in the current font .
^ The width of an " in the current font .
^ You can introduce a ' LaTeX ' expression as a measure .
deriving (Data, Eq, Generic, Show, Typeable)
-- | Different types of syntax for mathematical expressions.
data MathType = Parentheses | Square | Dollar | DoubleDollar
deriving (Data, Eq, Generic, Show, Typeable)
-- | Type of @LaTeX@ blocks.
data LaTeX =
TeXRaw Text -- ^ Raw text.
| TeXComm String [TeXArg] -- ^ Constructor for commands.
First argument is the name of the command .
Second , its arguments .
| TeXCommS String -- ^ Constructor for commands with no arguments.
-- When rendering, no space or @{}@ will be added at
-- the end.
| TeXEnv String [TeXArg] LaTeX -- ^ Constructor for environments.
First argument is the name of the environment .
Second , its arguments .
-- Third, its content.
| TeXMath MathType LaTeX -- ^ Mathematical expressions.
| TeXLineBreak (Maybe Measure) Bool -- ^ Line break command.
| TeXBraces LaTeX -- ^ A expression between braces.
| TeXComment Text -- ^ Comments.
^ Sequencing of ' LaTeX ' expressions .
-- Use '<>' preferably.
| TeXEmpty -- ^ An empty block.
-- /Neutral element/ of '<>'.
deriving (Data, Eq, Generic, Show, Typeable)
| An argument for a ' LaTeX ' command or environment .
data TeXArg =
FixArg LaTeX -- ^ Fixed argument.
| OptArg LaTeX -- ^ Optional argument.
| MOptArg [LaTeX] -- ^ Multiple optional argument.
^ An argument enclosed between @\<@ and @\>@.
| MSymArg [LaTeX] -- ^ Version of 'SymArg' with multiple options.
^ An argument enclosed between @(@ and @)@.
^ Version of ' ParArg ' with multiple options .
deriving (Data, Eq, Generic, Show, Typeable)
Monoid instance for ' LaTeX ' .
| Method ' mappend ' is strict in both arguments ( except in the case when the first argument is ' TeXEmpty ' ) .
instance Monoid LaTeX where
mempty = TeXEmpty
mappend TeXEmpty x = x
mappend x TeXEmpty = x
-- This equation is to make 'mappend' associative.
mappend (TeXSeq x y) z = TeXSeq x $ mappend y z
--
mappend x y = TeXSeq x y
instance Semigroup.Semigroup LaTeX where
(<>) = mappend
-- | Calling 'between' @c l1 l2@ puts @c@ between @l1@ and @l2@ and
-- appends them.
--
-- > between c l1 l2 = l1 <> c <> l2
between :: Monoid m => m -> m -> m -> m
between c l1 l2 = l1 <> c <> l2
| Method ' fromString ' escapes LaTeX reserved characters using ' protectString ' .
instance IsString LaTeX where
fromString = TeXRaw . fromString . protectString
-- | Escape LaTeX reserved characters in a 'String'.
protectString :: String -> String
protectString = mconcat . fmap protectChar
-- | Escape LaTeX reserved characters in a 'Text'.
protectText :: Text -> Text
protectText = Data.Text.concatMap (fromString . protectChar)
protectChar :: Char -> String
protectChar '#' = "\\#"
protectChar '$' = "\\$"
protectChar '%' = "\\%"
protectChar '^' = "\\^{}"
protectChar '&' = "\\&"
protectChar '{' = "\\{"
protectChar '}' = "\\}"
protectChar '~' = "\\~{}"
protectChar '\\' = "\\textbackslash{}"
protectChar '_' = "\\_{}"
protectChar x = [x]
-- Syntax analysis
| Look into a ' LaTeX ' syntax tree to find any call to the command with
-- the given name. It returns a list of arguments with which this command
-- is called.
--
> lookForCommand = ( fmap snd . ) . . (= =)
--
-- If the returned list is empty, the command was not found. However,
-- if the list contains empty lists, those are callings to the command
-- with no arguments.
--
-- For example
--
-- > lookForCommand "author" l
--
-- would look for the argument passed to the @\\author@ command in @l@.
lookForCommand :: String -- ^ Name of the command.
-> LaTeX -- ^ LaTeX syntax tree.
-> [[TeXArg]] -- ^ List of arguments passed to the command.
lookForCommand = (fmap snd .) . matchCommand . (==)
| Traverse a ' LaTeX ' syntax tree and returns the commands ( see ' ' and
-- 'TeXCommS') that matches the condition and their arguments in each call.
matchCommand :: (String -> Bool) -> LaTeX -> [(String,[TeXArg])]
matchCommand f (TeXComm str as) =
let xs = concatMap (matchCommandArg f) as
in if f str then (str,as) : xs else xs
matchCommand f (TeXCommS str) = [(str, []) | f str]
matchCommand f (TeXEnv _ as l) =
let xs = concatMap (matchCommandArg f) as
in xs ++ matchCommand f l
matchCommand f (TeXMath _ l) = matchCommand f l
matchCommand f (TeXBraces l) = matchCommand f l
matchCommand f (TeXSeq l1 l2) = matchCommand f l1 ++ matchCommand f l2
matchCommand _ _ = []
matchCommandArg :: (String -> Bool) -> TeXArg -> [(String,[TeXArg])]
matchCommandArg f (OptArg l ) = matchCommand f l
matchCommandArg f (FixArg l ) = matchCommand f l
matchCommandArg f (MOptArg ls) = concatMap (matchCommand f) ls
matchCommandArg f (SymArg l ) = matchCommand f l
matchCommandArg f (MSymArg ls) = concatMap (matchCommand f) ls
matchCommandArg f (ParArg l ) = matchCommand f l
matchCommandArg f (MParArg ls) = concatMap (matchCommand f) ls
-- | Similar to 'lookForCommand', but applied to environments.
-- It returns a list with arguments passed and content of the
-- environment in each call.
--
> lookForEnv = ( fmap ( \(_,as , l ) - > ( as , l ) ) . ) . matchEnv . (= =)
--
lookForEnv :: String -> LaTeX -> [([TeXArg],LaTeX)]
lookForEnv = (fmap (\(_,as,l) -> (as,l)) .) . matchEnv . (==)
| Traverse a ' LaTeX ' syntax tree and returns the environments ( see
-- 'TeXEnv') that matches the condition, their arguments and their content
-- in each call.
matchEnv :: (String -> Bool) -> LaTeX -> [(String,[TeXArg],LaTeX)]
matchEnv f (TeXComm _ as) = concatMap (matchEnvArg f) as
matchEnv f (TeXEnv str as l) =
let xs = concatMap (matchEnvArg f) as
ys = matchEnv f l
zs = xs ++ ys
in if f str then (str,as,l) : zs else zs
matchEnv f (TeXMath _ l) = matchEnv f l
matchEnv f (TeXBraces l) = matchEnv f l
matchEnv f (TeXSeq l1 l2) = matchEnv f l1 ++ matchEnv f l2
matchEnv _ _ = []
matchEnvArg :: (String -> Bool) -> TeXArg -> [(String,[TeXArg],LaTeX)]
matchEnvArg f (OptArg l ) = matchEnv f l
matchEnvArg f (FixArg l ) = matchEnv f l
matchEnvArg f (MOptArg ls) = concatMap (matchEnv f) ls
matchEnvArg f (SymArg l ) = matchEnv f l
matchEnvArg f (MSymArg ls) = concatMap (matchEnv f) ls
matchEnvArg f (ParArg l ) = matchEnv f l
matchEnvArg f (MParArg ls) = concatMap (matchEnv f) ls
-- | The function 'texmap' looks for subexpressions that match a given
-- condition and applies a function to them.
--
-- > texmap c f = runIdentity . texmapM c (pure . f)
texmap :: (LaTeX -> Bool) -- ^ Condition.
-> (LaTeX -> LaTeX) -- ^ Function to apply when the condition matches.
-> LaTeX -> LaTeX
texmap c f = runIdentity . texmapM c (pure . f)
-- | Version of 'texmap' where the function returns values in a 'Monad'.
texmapM :: (Applicative m, Monad m)
=> (LaTeX -> Bool) -- ^ Condition.
-> (LaTeX -> m LaTeX) -- ^ Function to apply when the condition matches.
-> LaTeX -> m LaTeX
texmapM c f = go
where
go l@(TeXComm str as) = if c l then f l else TeXComm str <$> mapM go' as
go l@(TeXEnv str as b) = if c l then f l else TeXEnv str <$> mapM go' as <*> go b
go l@(TeXMath t b) = if c l then f l else TeXMath t <$> go b
go l@(TeXBraces b) = if c l then f l else TeXBraces <$> go b
go l@(TeXSeq l1 l2) = if c l then f l else liftA2 TeXSeq (go l1) (go l2)
go l = if c l then f l else pure l
--
go' (FixArg l ) = FixArg <$> go l
go' (OptArg l ) = OptArg <$> go l
go' (MOptArg ls) = MOptArg <$> mapM go ls
go' (SymArg l ) = SymArg <$> go l
go' (MSymArg ls) = MSymArg <$> mapM go ls
go' (ParArg l ) = ParArg <$> go l
go' (MParArg ls) = MParArg <$> mapM go ls
-- | Extract the content of the 'document' environment, if present.
getBody :: LaTeX -> Maybe LaTeX
getBody l =
case lookForEnv "document" l of
((_,b):_) -> Just b
_ -> Nothing
| Extract the preamble of a ' LaTeX ' document ( everything before the ' document '
-- environment). It could be empty.
getPreamble :: LaTeX -> LaTeX
getPreamble (TeXEnv "document" _ _) = mempty
getPreamble (TeXSeq l1 l2) = getPreamble l1 <> getPreamble l2
getPreamble l = l
---------------------------------------
-- LaTeX Arbitrary instance
arbitraryChar :: Gen Char
arbitraryChar = elements $
['A'..'Z']
++ ['a'..'z']
++ "\n-+*/!\"().,:;'@<>? "
| Utility for the instance of ' LaTeX ' to ' Arbitrary ' .
-- We generate a short sequence of characters and
escape reserved characters with ' protectText ' .
arbitraryRaw :: Gen Text
arbitraryRaw = do
n <- choose (1,20)
protectText . pack <$> replicateM n arbitraryChar
-- | Generator for names of command and environments.
-- We use only alphabetical characters.
arbitraryName :: Gen String
arbitraryName = do
n <- choose (1,10)
replicateM n $ elements $ ['a' .. 'z'] ++ ['A' .. 'Z']
instance Arbitrary Measure where
arbitrary = do
n <- choose (0,5)
let f = [Pt,Mm,Cm,In,Ex,Em] !! n
f <$> arbitrary
instance Arbitrary LaTeX where
arbitrary = arbitraryLaTeX False
arbitraryLaTeX :: Bool -> Gen LaTeX
arbitraryLaTeX inDollar = do
-- We give more chances to 'TeXRaw'.
This results in arbitrary ' LaTeX ' values
-- not getting too large.
n <- choose (0,16 :: Int)
case n of
0 -> if inDollar then arbitraryLaTeX True else pure TeXEmpty
1 -> do m <- choose (0,5)
TeXComm <$> arbitraryName <*> vectorOf m arbitrary
2 -> TeXCommS <$> arbitraryName
3 -> do m <- choose (0,5)
TeXEnv <$> arbitraryName <*> vectorOf m arbitrary <*> arbitrary
4 -> if inDollar
then arbitraryLaTeX True
else do m <- choose (0,3)
let t = [Parentheses,Square,Dollar,DoubleDollar] !! m
TeXMath <$> pure t <*> arbitraryLaTeX (t == Dollar || t == DoubleDollar)
5 -> TeXLineBreak <$> arbitrary <*> arbitrary
6 -> TeXBraces <$> arbitrary
7 -> TeXComment <$> arbitraryRaw
8 -> TeXSeq <$> (if inDollar then arbitraryLaTeX True else arbitrary) <*> arbitrary
_ -> TeXRaw <$> arbitraryRaw
instance Arbitrary TeXArg where
arbitrary = do
n <- choose (0,6 :: Int)
case n of
0 -> OptArg <$> arbitrary
1 -> do m <- choose (1,5)
MOptArg <$> vectorOf m arbitrary
2 -> SymArg <$> arbitrary
3 -> do m <- choose (1,5)
MSymArg <$> vectorOf m arbitrary
4 -> ParArg <$> arbitrary
5 -> do m <- choose (1,5)
MParArg <$> vectorOf m arbitrary
_ -> FixArg <$> arbitrary
instance Hashable Measure
instance Hashable MathType
instance Hashable TeXArg
instance Hashable LaTeX
| null | https://raw.githubusercontent.com/Daniel-Diaz/HaTeX/aae193763157378500ebedc733c913e74f53b060/Text/LaTeX/Base/Syntax.hs | haskell | | LaTeX syntax description in the definition of the 'LaTeX' datatype.
If you want to add new commands or environments not defined in
* @LaTeX@ datatype
* Escaping reserved characters
* Syntax analysis
** Utils
For instance:
^ Centimeter.
^ Inch.
| Different types of syntax for mathematical expressions.
| Type of @LaTeX@ blocks.
^ Raw text.
^ Constructor for commands.
^ Constructor for commands with no arguments.
When rendering, no space or @{}@ will be added at
the end.
^ Constructor for environments.
Third, its content.
^ Mathematical expressions.
^ Line break command.
^ A expression between braces.
^ Comments.
Use '<>' preferably.
^ An empty block.
/Neutral element/ of '<>'.
^ Fixed argument.
^ Optional argument.
^ Multiple optional argument.
^ Version of 'SymArg' with multiple options.
This equation is to make 'mappend' associative.
| Calling 'between' @c l1 l2@ puts @c@ between @l1@ and @l2@ and
appends them.
> between c l1 l2 = l1 <> c <> l2
| Escape LaTeX reserved characters in a 'String'.
| Escape LaTeX reserved characters in a 'Text'.
Syntax analysis
the given name. It returns a list of arguments with which this command
is called.
If the returned list is empty, the command was not found. However,
if the list contains empty lists, those are callings to the command
with no arguments.
For example
> lookForCommand "author" l
would look for the argument passed to the @\\author@ command in @l@.
^ Name of the command.
^ LaTeX syntax tree.
^ List of arguments passed to the command.
'TeXCommS') that matches the condition and their arguments in each call.
| Similar to 'lookForCommand', but applied to environments.
It returns a list with arguments passed and content of the
environment in each call.
'TeXEnv') that matches the condition, their arguments and their content
in each call.
| The function 'texmap' looks for subexpressions that match a given
condition and applies a function to them.
> texmap c f = runIdentity . texmapM c (pure . f)
^ Condition.
^ Function to apply when the condition matches.
| Version of 'texmap' where the function returns values in a 'Monad'.
^ Condition.
^ Function to apply when the condition matches.
| Extract the content of the 'document' environment, if present.
environment). It could be empty.
-------------------------------------
LaTeX Arbitrary instance
We generate a short sequence of characters and
| Generator for names of command and environments.
We use only alphabetical characters.
We give more chances to 'TeXRaw'.
not getting too large. |
# LANGUAGE CPP , DeriveDataTypeable , DeriveGeneric #
the library , import this module and use ' LaTeX ' data constructors .
module Text.LaTeX.Base.Syntax
Measure (..)
, MathType (..)
, LaTeX (..)
, TeXArg (..)
, (<>), between
, protectString
, protectText
, matchCommand
, lookForCommand
, matchEnv
, lookForEnv
, texmap
, texmapM
, getBody
, getPreamble
) where
import Data.Text (Text,pack)
import qualified Data.Text
import qualified Data.Semigroup as Semigroup
import Data.String
import Control.Applicative
import Control.Monad (replicateM)
import Data.Functor.Identity (runIdentity)
import Data.Data (Data)
import Data.Typeable
import Test.QuickCheck
import Data.Hashable
import GHC.Generics (Generic)
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid
#endif
| Measure units defined in LaTeX. Use ' CustomMeasure ' to use commands like ' ' .
> rule Nothing ( CustomMeasure linewidth ) ( Pt 2 )
This will create a black box ( see ' rule ' ) as wide as the text and two points tall .
data Measure =
^ A point is 1/72.27 inch , that means about 0.0138 inch or 0.3515 mm .
^ .
^ The height of an \"x\ " in the current font .
^ The width of an " in the current font .
^ You can introduce a ' LaTeX ' expression as a measure .
deriving (Data, Eq, Generic, Show, Typeable)
data MathType = Parentheses | Square | Dollar | DoubleDollar
deriving (Data, Eq, Generic, Show, Typeable)
data LaTeX =
First argument is the name of the command .
Second , its arguments .
First argument is the name of the environment .
Second , its arguments .
^ Sequencing of ' LaTeX ' expressions .
deriving (Data, Eq, Generic, Show, Typeable)
| An argument for a ' LaTeX ' command or environment .
data TeXArg =
^ An argument enclosed between @\<@ and @\>@.
^ An argument enclosed between @(@ and @)@.
^ Version of ' ParArg ' with multiple options .
deriving (Data, Eq, Generic, Show, Typeable)
Monoid instance for ' LaTeX ' .
| Method ' mappend ' is strict in both arguments ( except in the case when the first argument is ' TeXEmpty ' ) .
instance Monoid LaTeX where
mempty = TeXEmpty
mappend TeXEmpty x = x
mappend x TeXEmpty = x
mappend (TeXSeq x y) z = TeXSeq x $ mappend y z
mappend x y = TeXSeq x y
instance Semigroup.Semigroup LaTeX where
(<>) = mappend
between :: Monoid m => m -> m -> m -> m
between c l1 l2 = l1 <> c <> l2
| Method ' fromString ' escapes LaTeX reserved characters using ' protectString ' .
instance IsString LaTeX where
fromString = TeXRaw . fromString . protectString
protectString :: String -> String
protectString = mconcat . fmap protectChar
protectText :: Text -> Text
protectText = Data.Text.concatMap (fromString . protectChar)
protectChar :: Char -> String
protectChar '#' = "\\#"
protectChar '$' = "\\$"
protectChar '%' = "\\%"
protectChar '^' = "\\^{}"
protectChar '&' = "\\&"
protectChar '{' = "\\{"
protectChar '}' = "\\}"
protectChar '~' = "\\~{}"
protectChar '\\' = "\\textbackslash{}"
protectChar '_' = "\\_{}"
protectChar x = [x]
| Look into a ' LaTeX ' syntax tree to find any call to the command with
> lookForCommand = ( fmap snd . ) . . (= =)
lookForCommand = (fmap snd .) . matchCommand . (==)
| Traverse a ' LaTeX ' syntax tree and returns the commands ( see ' ' and
matchCommand :: (String -> Bool) -> LaTeX -> [(String,[TeXArg])]
matchCommand f (TeXComm str as) =
let xs = concatMap (matchCommandArg f) as
in if f str then (str,as) : xs else xs
matchCommand f (TeXCommS str) = [(str, []) | f str]
matchCommand f (TeXEnv _ as l) =
let xs = concatMap (matchCommandArg f) as
in xs ++ matchCommand f l
matchCommand f (TeXMath _ l) = matchCommand f l
matchCommand f (TeXBraces l) = matchCommand f l
matchCommand f (TeXSeq l1 l2) = matchCommand f l1 ++ matchCommand f l2
matchCommand _ _ = []
matchCommandArg :: (String -> Bool) -> TeXArg -> [(String,[TeXArg])]
matchCommandArg f (OptArg l ) = matchCommand f l
matchCommandArg f (FixArg l ) = matchCommand f l
matchCommandArg f (MOptArg ls) = concatMap (matchCommand f) ls
matchCommandArg f (SymArg l ) = matchCommand f l
matchCommandArg f (MSymArg ls) = concatMap (matchCommand f) ls
matchCommandArg f (ParArg l ) = matchCommand f l
matchCommandArg f (MParArg ls) = concatMap (matchCommand f) ls
> lookForEnv = ( fmap ( \(_,as , l ) - > ( as , l ) ) . ) . matchEnv . (= =)
lookForEnv :: String -> LaTeX -> [([TeXArg],LaTeX)]
lookForEnv = (fmap (\(_,as,l) -> (as,l)) .) . matchEnv . (==)
| Traverse a ' LaTeX ' syntax tree and returns the environments ( see
matchEnv :: (String -> Bool) -> LaTeX -> [(String,[TeXArg],LaTeX)]
matchEnv f (TeXComm _ as) = concatMap (matchEnvArg f) as
matchEnv f (TeXEnv str as l) =
let xs = concatMap (matchEnvArg f) as
ys = matchEnv f l
zs = xs ++ ys
in if f str then (str,as,l) : zs else zs
matchEnv f (TeXMath _ l) = matchEnv f l
matchEnv f (TeXBraces l) = matchEnv f l
matchEnv f (TeXSeq l1 l2) = matchEnv f l1 ++ matchEnv f l2
matchEnv _ _ = []
matchEnvArg :: (String -> Bool) -> TeXArg -> [(String,[TeXArg],LaTeX)]
matchEnvArg f (OptArg l ) = matchEnv f l
matchEnvArg f (FixArg l ) = matchEnv f l
matchEnvArg f (MOptArg ls) = concatMap (matchEnv f) ls
matchEnvArg f (SymArg l ) = matchEnv f l
matchEnvArg f (MSymArg ls) = concatMap (matchEnv f) ls
matchEnvArg f (ParArg l ) = matchEnv f l
matchEnvArg f (MParArg ls) = concatMap (matchEnv f) ls
-> LaTeX -> LaTeX
texmap c f = runIdentity . texmapM c (pure . f)
texmapM :: (Applicative m, Monad m)
-> LaTeX -> m LaTeX
texmapM c f = go
where
go l@(TeXComm str as) = if c l then f l else TeXComm str <$> mapM go' as
go l@(TeXEnv str as b) = if c l then f l else TeXEnv str <$> mapM go' as <*> go b
go l@(TeXMath t b) = if c l then f l else TeXMath t <$> go b
go l@(TeXBraces b) = if c l then f l else TeXBraces <$> go b
go l@(TeXSeq l1 l2) = if c l then f l else liftA2 TeXSeq (go l1) (go l2)
go l = if c l then f l else pure l
go' (FixArg l ) = FixArg <$> go l
go' (OptArg l ) = OptArg <$> go l
go' (MOptArg ls) = MOptArg <$> mapM go ls
go' (SymArg l ) = SymArg <$> go l
go' (MSymArg ls) = MSymArg <$> mapM go ls
go' (ParArg l ) = ParArg <$> go l
go' (MParArg ls) = MParArg <$> mapM go ls
getBody :: LaTeX -> Maybe LaTeX
getBody l =
case lookForEnv "document" l of
((_,b):_) -> Just b
_ -> Nothing
| Extract the preamble of a ' LaTeX ' document ( everything before the ' document '
getPreamble :: LaTeX -> LaTeX
getPreamble (TeXEnv "document" _ _) = mempty
getPreamble (TeXSeq l1 l2) = getPreamble l1 <> getPreamble l2
getPreamble l = l
arbitraryChar :: Gen Char
arbitraryChar = elements $
['A'..'Z']
++ ['a'..'z']
++ "\n-+*/!\"().,:;'@<>? "
| Utility for the instance of ' LaTeX ' to ' Arbitrary ' .
escape reserved characters with ' protectText ' .
arbitraryRaw :: Gen Text
arbitraryRaw = do
n <- choose (1,20)
protectText . pack <$> replicateM n arbitraryChar
arbitraryName :: Gen String
arbitraryName = do
n <- choose (1,10)
replicateM n $ elements $ ['a' .. 'z'] ++ ['A' .. 'Z']
instance Arbitrary Measure where
arbitrary = do
n <- choose (0,5)
let f = [Pt,Mm,Cm,In,Ex,Em] !! n
f <$> arbitrary
instance Arbitrary LaTeX where
arbitrary = arbitraryLaTeX False
arbitraryLaTeX :: Bool -> Gen LaTeX
arbitraryLaTeX inDollar = do
This results in arbitrary ' LaTeX ' values
n <- choose (0,16 :: Int)
case n of
0 -> if inDollar then arbitraryLaTeX True else pure TeXEmpty
1 -> do m <- choose (0,5)
TeXComm <$> arbitraryName <*> vectorOf m arbitrary
2 -> TeXCommS <$> arbitraryName
3 -> do m <- choose (0,5)
TeXEnv <$> arbitraryName <*> vectorOf m arbitrary <*> arbitrary
4 -> if inDollar
then arbitraryLaTeX True
else do m <- choose (0,3)
let t = [Parentheses,Square,Dollar,DoubleDollar] !! m
TeXMath <$> pure t <*> arbitraryLaTeX (t == Dollar || t == DoubleDollar)
5 -> TeXLineBreak <$> arbitrary <*> arbitrary
6 -> TeXBraces <$> arbitrary
7 -> TeXComment <$> arbitraryRaw
8 -> TeXSeq <$> (if inDollar then arbitraryLaTeX True else arbitrary) <*> arbitrary
_ -> TeXRaw <$> arbitraryRaw
instance Arbitrary TeXArg where
arbitrary = do
n <- choose (0,6 :: Int)
case n of
0 -> OptArg <$> arbitrary
1 -> do m <- choose (1,5)
MOptArg <$> vectorOf m arbitrary
2 -> SymArg <$> arbitrary
3 -> do m <- choose (1,5)
MSymArg <$> vectorOf m arbitrary
4 -> ParArg <$> arbitrary
5 -> do m <- choose (1,5)
MParArg <$> vectorOf m arbitrary
_ -> FixArg <$> arbitrary
instance Hashable Measure
instance Hashable MathType
instance Hashable TeXArg
instance Hashable LaTeX
|
80ea7ec820ab149fe136d59ce3d7f7dc898b25122923aea1694e0e358c696589 | hidaris/thinking-dumps | tests.rkt | (module tests mzscheme
(provide test-list)
;;;;;;;;;;;;;;;; tests ;;;;;;;;;;;;;;;;
(define test-list
'(
;; simple arithmetic
(positive-const "11" 11)
(negative-const "-33" -33)
(simple-arith-1 "-(44,33)" 11)
;; nested arithmetic
(nested-arith-left "-(-(44,33),22)" -11)
(nested-arith-right "-(55, -(22,11))" 44)
;; simple variables
(test-var-1 "x" 10)
(test-var-2 "-(x,1)" 9)
(test-var-3 "-(1,x)" -9)
;; simple unbound variables
(test-unbound-var-1 "foo" error)
(test-unbound-var-2 "-(x,foo)" error)
;; simple conditionals
(if-true "if zero?(0) then 3 else 4" 3)
(if-false "if zero?(1) then 3 else 4" 4)
;; test dynamic typechecking
(no-bool-to-diff-1 "-(zero?(0),1)" error)
(no-bool-to-diff-2 "-(1,zero?(0))" error)
(no-int-to-if "if 1 then 2 else 3" error)
;; make sure that the test and both arms get evaluated
;; properly.
(if-eval-test-true "if zero?(-(11,11)) then 3 else 4" 3)
(if-eval-test-false "if zero?(-(11, 12)) then 3 else 4" 4)
;; and make sure the other arm doesn't get evaluated.
(if-eval-test-true-2 "if zero?(-(11, 11)) then 3 else foo" 3)
(if-eval-test-false-2 "if zero?(-(11,12)) then foo else 4" 4)
;; simple let
(simple-let-1 "let x = 3 in x" 3)
make sure the body and rhs get evaluated
(eval-let-body "let x = 3 in -(x,1)" 2)
(eval-let-rhs "let x = -(4,1) in -(x,1)" 2)
;; check nested let and shadowing
(simple-nested-let "let x = 3 in let y = 4 in -(x,y)" -1)
(check-shadowing-in-body "let x = 3 in let x = 4 in x" 4)
(check-shadowing-in-rhs "let x = 3 in let x = -(x,1) in x" 2)
;; simple applications
(apply-proc-in-rator-pos "(proc(x) -(x,1) 30)" 29)
(apply-simple-proc "let f = proc (x) -(x,1) in (f 30)" 29)
(let-to-proc-1 "(proc(f)(f 30) proc(x)-(x,1))" 29)
(nested-procs "((proc (x) proc (y) -(x,y) 5) 6)" -1)
(nested-procs2 "let f = proc(x) proc (y) -(x,y) in ((f -(10,5)) 6)"
-1)
(y-combinator-1 "
let fix = proc (f)
let d = proc (x) proc (z) ((f (x x)) z)
in proc (n) ((f (d d)) n)
in let
t4m = proc (f) proc(x) if zero?(x) then 0 else -((f -(x,1)),-4)
in let times4 = (fix t4m)
in (times4 3)" 12)
;; simple letrecs
(simple-letrec-1 "letrec f(x) = -(x,1) in (f 33)" 32)
(simple-letrec-2
"letrec f(x) = if zero?(x) then 0 else -((f -(x,1)), -2) in (f 4)"
8)
(simple-letrec-3
"let m = -5
in letrec f(x) = if zero?(x) then 0 else -((f -(x,1)), m) in (f 4)"
20)
; (fact-of-6 "letrec
fact(x ) = if ) then 1 else * ( x , ( fact ) ) )
in ( fact 6 ) "
720 )
(HO-nested-letrecs
"letrec even(odd) = proc(x) if zero?(x) then 1 else (odd -(x,1))
in letrec odd(x) = if zero?(x) then 0 else ((even odd) -(x,1))
in (odd 13)" 1)
(begin-test-1
"begin 1; 2; 3 end"
3)
(gensym-test-1
"let g = let counter = newref(0)
in proc (dummy) let d = setref(counter, -(deref(counter),-1))
in deref(counter)
in -((g 11),(g 22))"
-1)
(simple-store-test-1 "let x = newref(17) in deref(x)" 17)
(assignment-test-1 "let x = newref(17)
in begin setref(x,27); deref(x) end"
27)
(gensym-test-2
"let g = let counter = newref(0)
in proc (dummy) begin
setref(counter, -(deref(counter),-1));
deref(counter)
end
in -((g 11),(g 22))"
-1)
(even-odd-via-set-1 "
let x = newref(0)
in letrec even(d) = if zero?(deref(x))
then 1
else let d = setref(x, -(deref(x),1))
in (odd d)
odd(d) = if zero?(deref(x))
then 0
else let d = setref(x, -(deref(x),1))
in (even d)
in let d = setref(x,13) in (odd -100)" 1)
(even-odd-via-set-1 "
let x = newref(0)
in letrec even(d) = if zero?(deref(x))
then 1
else let d = setref(x, -(deref(x),1))
in (odd d)
odd(d) = if zero?(deref(x))
then 0
else let d = setref(x, -(deref(x),1))
in (even d)
in let d = setref(x,13) in (odd -100)" 1)
(show-allocation-1 "
let x = newref(22)
in let f = proc (z) let zz = newref(-(z,deref(x))) in deref(zz)
in -((f 66), (f 55))"
11)
(chains-1 "
let x = newref(newref(0))
in begin
setref(deref(x), 11);
deref(deref(x))
end"
11)
))
)
| null | https://raw.githubusercontent.com/hidaris/thinking-dumps/3fceaf9e6195ab99c8315749814a7377ef8baf86/eopl-solutions/chap4/4-10/explicit-refs/tests.rkt | racket | tests ;;;;;;;;;;;;;;;;
simple arithmetic
nested arithmetic
simple variables
simple unbound variables
simple conditionals
test dynamic typechecking
make sure that the test and both arms get evaluated
properly.
and make sure the other arm doesn't get evaluated.
simple let
check nested let and shadowing
simple applications
simple letrecs
(fact-of-6 "letrec
deref(x) end"
| (module tests mzscheme
(provide test-list)
(define test-list
'(
(positive-const "11" 11)
(negative-const "-33" -33)
(simple-arith-1 "-(44,33)" 11)
(nested-arith-left "-(-(44,33),22)" -11)
(nested-arith-right "-(55, -(22,11))" 44)
(test-var-1 "x" 10)
(test-var-2 "-(x,1)" 9)
(test-var-3 "-(1,x)" -9)
(test-unbound-var-1 "foo" error)
(test-unbound-var-2 "-(x,foo)" error)
(if-true "if zero?(0) then 3 else 4" 3)
(if-false "if zero?(1) then 3 else 4" 4)
(no-bool-to-diff-1 "-(zero?(0),1)" error)
(no-bool-to-diff-2 "-(1,zero?(0))" error)
(no-int-to-if "if 1 then 2 else 3" error)
(if-eval-test-true "if zero?(-(11,11)) then 3 else 4" 3)
(if-eval-test-false "if zero?(-(11, 12)) then 3 else 4" 4)
(if-eval-test-true-2 "if zero?(-(11, 11)) then 3 else foo" 3)
(if-eval-test-false-2 "if zero?(-(11,12)) then foo else 4" 4)
(simple-let-1 "let x = 3 in x" 3)
make sure the body and rhs get evaluated
(eval-let-body "let x = 3 in -(x,1)" 2)
(eval-let-rhs "let x = -(4,1) in -(x,1)" 2)
(simple-nested-let "let x = 3 in let y = 4 in -(x,y)" -1)
(check-shadowing-in-body "let x = 3 in let x = 4 in x" 4)
(check-shadowing-in-rhs "let x = 3 in let x = -(x,1) in x" 2)
(apply-proc-in-rator-pos "(proc(x) -(x,1) 30)" 29)
(apply-simple-proc "let f = proc (x) -(x,1) in (f 30)" 29)
(let-to-proc-1 "(proc(f)(f 30) proc(x)-(x,1))" 29)
(nested-procs "((proc (x) proc (y) -(x,y) 5) 6)" -1)
(nested-procs2 "let f = proc(x) proc (y) -(x,y) in ((f -(10,5)) 6)"
-1)
(y-combinator-1 "
let fix = proc (f)
let d = proc (x) proc (z) ((f (x x)) z)
in proc (n) ((f (d d)) n)
in let
t4m = proc (f) proc(x) if zero?(x) then 0 else -((f -(x,1)),-4)
in let times4 = (fix t4m)
in (times4 3)" 12)
(simple-letrec-1 "letrec f(x) = -(x,1) in (f 33)" 32)
(simple-letrec-2
"letrec f(x) = if zero?(x) then 0 else -((f -(x,1)), -2) in (f 4)"
8)
(simple-letrec-3
"let m = -5
in letrec f(x) = if zero?(x) then 0 else -((f -(x,1)), m) in (f 4)"
20)
fact(x ) = if ) then 1 else * ( x , ( fact ) ) )
in ( fact 6 ) "
720 )
(HO-nested-letrecs
"letrec even(odd) = proc(x) if zero?(x) then 1 else (odd -(x,1))
in letrec odd(x) = if zero?(x) then 0 else ((even odd) -(x,1))
in (odd 13)" 1)
(begin-test-1
"begin 1; 2; 3 end"
3)
(gensym-test-1
"let g = let counter = newref(0)
in proc (dummy) let d = setref(counter, -(deref(counter),-1))
in deref(counter)
in -((g 11),(g 22))"
-1)
(simple-store-test-1 "let x = newref(17) in deref(x)" 17)
(assignment-test-1 "let x = newref(17)
27)
(gensym-test-2
"let g = let counter = newref(0)
in proc (dummy) begin
deref(counter)
end
in -((g 11),(g 22))"
-1)
(even-odd-via-set-1 "
let x = newref(0)
in letrec even(d) = if zero?(deref(x))
then 1
else let d = setref(x, -(deref(x),1))
in (odd d)
odd(d) = if zero?(deref(x))
then 0
else let d = setref(x, -(deref(x),1))
in (even d)
in let d = setref(x,13) in (odd -100)" 1)
(even-odd-via-set-1 "
let x = newref(0)
in letrec even(d) = if zero?(deref(x))
then 1
else let d = setref(x, -(deref(x),1))
in (odd d)
odd(d) = if zero?(deref(x))
then 0
else let d = setref(x, -(deref(x),1))
in (even d)
in let d = setref(x,13) in (odd -100)" 1)
(show-allocation-1 "
let x = newref(22)
in let f = proc (z) let zz = newref(-(z,deref(x))) in deref(zz)
in -((f 66), (f 55))"
11)
(chains-1 "
let x = newref(newref(0))
in begin
deref(deref(x))
end"
11)
))
)
|
fb8613ee0f8f185cd696afb577627e99827e76f0a73fdbb486a0ad97462b3243 | voxoz/emqttd | emqttd_kvs.erl | -module(emqttd_kvs).
-include_lib("kvs/include/metainfo.hrl").
-include("emqttd.hrl").
-export([metainfo/0,tables/0]).
metainfo() -> #schema{name=kvs,tables= tables() }.
tables() -> [ #table{name=mqtt_subproperty,
fields=record_info(fields,mqtt_subproperty),
copy_type=disc_copies, type=set},
#table{name=mqtt_subscription,
fields=record_info(fields,mqtt_subscription),
copy_type=disc_copies, type=bag},
#table{name=mqtt_subscriber,
fields=record_info(fields,mqtt_subscriber),
copy_type=disc_copies, type=bag}
].
| null | https://raw.githubusercontent.com/voxoz/emqttd/2be612e0e7a00a866cd9af350a030966d73fbc09/src/emqttd_kvs.erl | erlang | -module(emqttd_kvs).
-include_lib("kvs/include/metainfo.hrl").
-include("emqttd.hrl").
-export([metainfo/0,tables/0]).
metainfo() -> #schema{name=kvs,tables= tables() }.
tables() -> [ #table{name=mqtt_subproperty,
fields=record_info(fields,mqtt_subproperty),
copy_type=disc_copies, type=set},
#table{name=mqtt_subscription,
fields=record_info(fields,mqtt_subscription),
copy_type=disc_copies, type=bag},
#table{name=mqtt_subscriber,
fields=record_info(fields,mqtt_subscriber),
copy_type=disc_copies, type=bag}
].
|
|
9f2ef2ba6a0677efb5aa837f63f059d980dad01787854c17306276af9943b715 | sondresl/AdventOfCode | Day18.hs | {-# LANGUAGE OverloadedStrings #-}
module Day18 where
import Text.Parsec
( char, digit, string, between, many1, (<?>), (<|>), parse, Parsec )
import Text.Parsec.Expr
( buildExpressionParser, Assoc(AssocLeft), Operator(Infix) )
import Control.Lens (Identity)
type Op = Operator String () Identity Int
type Parser = Parsec String ()
expr1 :: Parser Int
expr1 = buildExpressionParser table1 term1 <?> "expression"
where
term1 = between (char '(') (char ')') expr1 <|> (read <$> many1 digit) <?> "term"
table1 = [ [ binary "*" (*) AssocLeft, binary "+" (+) AssocLeft ] ]
expr2 :: Parser Int
expr2 = buildExpressionParser table2 term2 <?> "expression"
where
term2 = between (char '(') (char ')') expr2 <|> (read <$> many1 digit) <?> "term"
table2 = [ [ binary "+" (+) AssocLeft ]
, [ binary "*" (*) AssocLeft ]
]
binary :: String -> (Int -> Int -> Int) -> Assoc -> Op
binary name f = Infix (f <$ string name)
run :: Parsec String () Int -> [String] -> Int
run e = sum . either (error . show) id . traverse (parse e "")
main :: IO ()
main = do
input <- lines . filter (/= ' ') <$> readFile "../data/day18.in"
print $ run expr1 input
print $ run expr2 input
8929569623593
231235959382961
| null | https://raw.githubusercontent.com/sondresl/AdventOfCode/51525441795417f31b3eb67a690aa5534d1e699b/2020/Haskell/src/Day18.hs | haskell | # LANGUAGE OverloadedStrings # | module Day18 where
import Text.Parsec
( char, digit, string, between, many1, (<?>), (<|>), parse, Parsec )
import Text.Parsec.Expr
( buildExpressionParser, Assoc(AssocLeft), Operator(Infix) )
import Control.Lens (Identity)
type Op = Operator String () Identity Int
type Parser = Parsec String ()
expr1 :: Parser Int
expr1 = buildExpressionParser table1 term1 <?> "expression"
where
term1 = between (char '(') (char ')') expr1 <|> (read <$> many1 digit) <?> "term"
table1 = [ [ binary "*" (*) AssocLeft, binary "+" (+) AssocLeft ] ]
expr2 :: Parser Int
expr2 = buildExpressionParser table2 term2 <?> "expression"
where
term2 = between (char '(') (char ')') expr2 <|> (read <$> many1 digit) <?> "term"
table2 = [ [ binary "+" (+) AssocLeft ]
, [ binary "*" (*) AssocLeft ]
]
binary :: String -> (Int -> Int -> Int) -> Assoc -> Op
binary name f = Infix (f <$ string name)
run :: Parsec String () Int -> [String] -> Int
run e = sum . either (error . show) id . traverse (parse e "")
main :: IO ()
main = do
input <- lines . filter (/= ' ') <$> readFile "../data/day18.in"
print $ run expr1 input
print $ run expr2 input
8929569623593
231235959382961
|
fdaaf7359efd24967206f1663a2d3cbe63ff6479bfc0995ed13ed748281678b9 | fission-codes/fission | Stat.hs | module Network.IPFS.Stat
( getStatRemote
, getSizeRemote
, getSize
, module Network.IPFS.Stat.Types
) where
import Data.ByteString.Lazy.Char8 as CL
import qualified RIO.ByteString.Lazy as Lazy
import qualified RIO.List as List
import qualified Network.IPFS.Internal.UTF8 as UTF8
import Network.IPFS.Local.Class as IPFS
import Network.IPFS.Prelude
import Network.IPFS.Remote.Class as Remote
import Network.IPFS.Get.Error as IPFS.Get
import qualified Network.IPFS.Process.Error as Process
import Network.IPFS.Bytes.Types
import Network.IPFS.Stat.Types
import Network.IPFS.Types as IPFS
getStatRemote :: MonadRemoteIPFS m => IPFS.CID -> m (Either IPFS.Get.Error Stat)
getStatRemote cid =
Remote.ipfsStat cid >>= \case
Right statPayload -> return $ Right statPayload
Left err -> return . Left $ IPFS.Get.WebError err
getSizeRemote :: MonadRemoteIPFS m => IPFS.CID -> m (Either IPFS.Get.Error Bytes)
getSizeRemote cid =
getStatRemote cid >>= \case
Left err ->
return $ Left err
Right Stat {cumulativeSize} ->
case cumulativeSize of
Left err -> return $ Left $ IPFS.Get.SizeError err
Right size -> return $ Right size
getSize :: MonadLocalIPFS m => IPFS.CID -> m (Either IPFS.Get.Error Integer)
getSize cid@(CID hash) = IPFS.runLocal ["object", "stat"] (Lazy.fromStrict <| encodeUtf8 hash) >>= \case
Left err -> case err of
Process.Timeout secs -> return . Left $ TimedOut cid secs
Process.UnknownErr raw -> return . Left . UnknownErr $ UTF8.textShow raw
Right contents ->
case parseSize contents of
Nothing -> return . Left . UnexpectedOutput $ "Could not parse CumulativeSize"
Just (size, _) -> return $ Right size
parseSize :: Lazy.ByteString -> Maybe (Integer, Lazy.ByteString)
parseSize lbs = do
finalLine <- List.lastMaybe $ CL.lines lbs
finalWord <- List.lastMaybe $ CL.words finalLine
CL.readInteger finalWord
| null | https://raw.githubusercontent.com/fission-codes/fission/fb76d255f06ea73187c9b787bd207c3778e1b559/ipfs/library/Network/IPFS/Stat.hs | haskell | module Network.IPFS.Stat
( getStatRemote
, getSizeRemote
, getSize
, module Network.IPFS.Stat.Types
) where
import Data.ByteString.Lazy.Char8 as CL
import qualified RIO.ByteString.Lazy as Lazy
import qualified RIO.List as List
import qualified Network.IPFS.Internal.UTF8 as UTF8
import Network.IPFS.Local.Class as IPFS
import Network.IPFS.Prelude
import Network.IPFS.Remote.Class as Remote
import Network.IPFS.Get.Error as IPFS.Get
import qualified Network.IPFS.Process.Error as Process
import Network.IPFS.Bytes.Types
import Network.IPFS.Stat.Types
import Network.IPFS.Types as IPFS
getStatRemote :: MonadRemoteIPFS m => IPFS.CID -> m (Either IPFS.Get.Error Stat)
getStatRemote cid =
Remote.ipfsStat cid >>= \case
Right statPayload -> return $ Right statPayload
Left err -> return . Left $ IPFS.Get.WebError err
getSizeRemote :: MonadRemoteIPFS m => IPFS.CID -> m (Either IPFS.Get.Error Bytes)
getSizeRemote cid =
getStatRemote cid >>= \case
Left err ->
return $ Left err
Right Stat {cumulativeSize} ->
case cumulativeSize of
Left err -> return $ Left $ IPFS.Get.SizeError err
Right size -> return $ Right size
getSize :: MonadLocalIPFS m => IPFS.CID -> m (Either IPFS.Get.Error Integer)
getSize cid@(CID hash) = IPFS.runLocal ["object", "stat"] (Lazy.fromStrict <| encodeUtf8 hash) >>= \case
Left err -> case err of
Process.Timeout secs -> return . Left $ TimedOut cid secs
Process.UnknownErr raw -> return . Left . UnknownErr $ UTF8.textShow raw
Right contents ->
case parseSize contents of
Nothing -> return . Left . UnexpectedOutput $ "Could not parse CumulativeSize"
Just (size, _) -> return $ Right size
parseSize :: Lazy.ByteString -> Maybe (Integer, Lazy.ByteString)
parseSize lbs = do
finalLine <- List.lastMaybe $ CL.lines lbs
finalWord <- List.lastMaybe $ CL.words finalLine
CL.readInteger finalWord
|
|
8e2071f667e74eab5309ca2d87e5036fcbb8870dc3e05f495bc4d271d92c3eb4 | prg-titech/baccaml | simple2.ml | ;;
let rec interp code pc a =
jit_dipatch (pc = 0) code a;
(* if pc = 0 then test_trace a bytecode else *)
let instr = code.(pc) in
if instr = 0
then (* INCR_A *)
interp code (pc + 1) (a + 1)
else if instr = 1
then (* DECR_A *)
interp code (pc + 1) (a - 1)
else if instr = 10
then (
JUMP
let t = code.(pc + 1) in
interp code t a)
else if instr = 11
then
JUMP_IF
if 0 < a
then (
let t1 = code.(pc + 1) in
interp code t1 a)
else (
let t2 = code.(pc + 2) in
interp code t2 a)
else if instr = 20
then (* HALT *)
a
else (* OTHERS *)
-1
in
let code = Array.make 20 0 in
code.(0) <- 0;
code.(1) <- 11;
code.(2) <- 4;
code.(3) <- 8;
(* then *)
code.(4) <- 0;
code.(5) <- 0;
code.(6) <- 10;
code.(7) <- 12;
(* else *)
code.(8) <- 1;
code.(9) <- 1;
code.(10) <- 10;
code.(11) <- 12;
code.(12) <- 20;
print_int (interp code 0 0)
| null | https://raw.githubusercontent.com/prg-titech/baccaml/a3b95e996a995b5004ca897a4b6419edfee590aa/test/interp_example/simple2.ml | ocaml | if pc = 0 then test_trace a bytecode else
INCR_A
DECR_A
HALT
OTHERS
then
else | ;;
let rec interp code pc a =
jit_dipatch (pc = 0) code a;
let instr = code.(pc) in
if instr = 0
interp code (pc + 1) (a + 1)
else if instr = 1
interp code (pc + 1) (a - 1)
else if instr = 10
then (
JUMP
let t = code.(pc + 1) in
interp code t a)
else if instr = 11
then
JUMP_IF
if 0 < a
then (
let t1 = code.(pc + 1) in
interp code t1 a)
else (
let t2 = code.(pc + 2) in
interp code t2 a)
else if instr = 20
a
-1
in
let code = Array.make 20 0 in
code.(0) <- 0;
code.(1) <- 11;
code.(2) <- 4;
code.(3) <- 8;
code.(4) <- 0;
code.(5) <- 0;
code.(6) <- 10;
code.(7) <- 12;
code.(8) <- 1;
code.(9) <- 1;
code.(10) <- 10;
code.(11) <- 12;
code.(12) <- 20;
print_int (interp code 0 0)
|
9d7569fa1bee361d25a2b1810fa0b123a3307119e491b7492677fdcd7e7ffedc | AndrasKovacs/ELTE-func-lang | Notes12.hs | # language BangPatterns #
module Notes12 where
import Data.List
import Data.Foldable
Haskell fordítás / optimalizálás
--------------------------------------------------------------------------------
Optimalizálás : fordító által
programozó által
-- Haskell-ben:
- GHC jelentősen átalakítja a kódot ,
- ( több transzformálására , mellékhatás - mentes átrendezi a fordító )
példa : C kód , hívunk egy fordítási egységen )
- ránézésre kitalálni , hogy mire fordul adott Haskell kód
- Programozó általi optimalizáláshoz szükséges a fordítás és runtime system ismerete
- GHC sokat javít , de ha , akkor jelentős , a kapott támogatás
a GHC - től pedig .
- Összességében compiler a GHC jó választás az RTS teljesítménye és optimalizációs
( hatékony kis allokációkhoz )
-- Alap futási modell
--------------------------------------------------------------------------------
1 .
egyszerűsítés : Bool ~ 64 bit , vagy 0 vagy 1 ( lustaság miatt : 0 vagy 1 vagy 2 )
f :: Bool -> Bool -> Bool
f b1 b2 = if b1 then b2 else True
szaturált hívás ( egy függvényt az )
pl : f True False ( ugyanaz , mint Java / C )
: closure létrehozása
-- let g = f True
h :: Bool -> Maybe (Bool -> Bool)
closure : | f pointer | 1 | True | ( fv ptr , arg . száma , argumentumok )
else Just not
-- case mf of
Just f - > f False -- ( f top - level ( Bool - > Bool ) , , closure is )
( ha , vizsgálni , )
( ha van összes param , a fv - t , egyébként új closure - t csinálunk )
-- closure létrehozása:
h2 :: Int -> Maybe (Int -> Int)
h2 x = Just (\y -> x + y + x + 100)
data Int = Int # Int #
Int # -- primitív 64 bites Int ( pointer ! maga a szám )
-- data Int = I# Int#
általános notáció : # -es függvények és típusok és literálok " primitívek " ( semmi thunk ! konkrét gépi műveletek )
-- myFun :: Int -> Int -> Int
-- myFun x y = x + y + y -- (inline függvény)
worker - wrapper transformation : , vs lassú
wmyFun :: Int# -> Int# -> Int#
wmyFun x y = x +# y +# y
-- wrapper
myFun :: Int -> Int -> Int
myFun (I# x) (I# y) = I# (wmyFun x y)
-- További "unboxing" példa: (Int, Int) --> stack-allokált Int# Int# pár
függvény definíció / definícióra
fordítás : top - level függvény lesz
( lokális " capture"-t megkapja függvény , mint extra paraméter )
fenti lambda , mint top függvény : = \x y - > x + y + x + 100
Just ( \y - > x + y + x + 100 ) -- > | lam1 ptr | 1 ( ) | x | ( closure )
szuperszaturált hívás : több arg , mint a " hivatalos " kód paraméter
pl : i d not True ( i d : 1 , 2 konkrét paraméter a hívásban )
--
fordító 2 paraméteres top függvényre fordít
h2' :: Int -> Int -> Int
h2' x = if (x == 0) then \y -> x + y + x + 100
else \y -> x + y + x + 90
h2'' :: Int -> Int -> Int
h2'' x y = if (x == 0) then x + y + x + 100
else x + y + x + 90
feltétlenül helyes :
-- f :: Bool -> Bool
\x - > f x : : Bool
Lustaság :
--------------------------------------------------------------------------------
függvényhívás csak , ha a illesztünk
h3 :: Int -> Maybe Int
h3 x = Just (x + x + x + x + x)
thunk : lusta hívás
thunk : | kód ptr | < a végeredménynek > | paraméterek száma | capture ( tömb értékekkel )
-- thunk1: return (1000 + 1000)
--
let foo = ( + ) 1000 1000 --- > | thunk1 | | 0 | [ ]
case foo of -- : extra ág
thunk - > ... ( meghívjuk a ptr - t a tárolt paraméterekkel , és próbáljuk a fennmaradó eseteket )
mutáció : a ) ( max egyszer )
( : thunk - ot kidobja , a végeredménnyel )
-- 0 -> ...
-- _ -> ...
strictness analízis : minél több thunk elkerülni
optimalizálás : strictness annotáció / strict
--------------------------------------------------------------------------------
strict :
data StrictTree a = Leaf !a | Node !(StrictTree a) !(StrictTree a)
-- konstruktorok reprezentációja:
64 bit 64 bit
Leaf 10 --- > | konstruktor tag | ( ptr 10 - re )
Leaf - ben és Node - ban thunk , csak konkrét érték
n : : Int ( thunk )
Leaf n
-- (Leaf létrehozása előtt force-oljuk n-t)
-- case t of
( Leaf n ) - > _ -- tudjuk , hogy n thunk
strict függvény paraméterek : { - # language BangPatterns # - }
f2 :: Int -> Int -> Int
f2 !x !y = y -- f2 hívásnál ! argumentumokat force-oljuk
-- foldl szigorú
-- foldl' :: (b -> a -> b) -> b -> [a] -> b
-- foldl' f b [] = b
-- foldl' f b (a:as) = let !b' = f b a in foldl' f b' as
take' :: Int -> [a] -> [a]
ADT - force - olás :
take' _ !as = []
--------------------------------------------------------------------------------
-- Optimalizáláshoz hasznos: Core output
-- ghc -O2 -ddump-simpl -dsuppress-all -dno-suppress-type-signatures
( GHC Core language )
még : STG -- > Cmm -- > assembly / LLVM
lens / monád trans / template Haskell / adatszerkezetek ( Set , Map , , )
-- monád transzformerek + adatszerkezetek
| null | https://raw.githubusercontent.com/AndrasKovacs/ELTE-func-lang/88d41930999d6056bdd7bfaa85761a527cce4113/2020-21-1/ea/Notes12.hs | haskell | ------------------------------------------------------------------------------
Haskell-ben:
Alap futási modell
------------------------------------------------------------------------------
let g = f True
case mf of
( f top - level ( Bool - > Bool ) , , closure is )
closure létrehozása:
primitív 64 bites Int ( pointer ! maga a szám )
data Int = I# Int#
myFun :: Int -> Int -> Int
myFun x y = x + y + y -- (inline függvény)
wrapper
További "unboxing" példa: (Int, Int) --> stack-allokált Int# Int# pár
> | lam1 ptr | 1 ( ) | x | ( closure )
f :: Bool -> Bool
------------------------------------------------------------------------------
thunk1: return (1000 + 1000)
- > | thunk1 | | 0 | [ ]
: extra ág
0 -> ...
_ -> ...
------------------------------------------------------------------------------
konstruktorok reprezentációja:
- > | konstruktor tag | ( ptr 10 - re )
(Leaf létrehozása előtt force-oljuk n-t)
case t of
tudjuk , hogy n thunk
f2 hívásnál ! argumentumokat force-oljuk
foldl szigorú
foldl' :: (b -> a -> b) -> b -> [a] -> b
foldl' f b [] = b
foldl' f b (a:as) = let !b' = f b a in foldl' f b' as
------------------------------------------------------------------------------
Optimalizáláshoz hasznos: Core output
ghc -O2 -ddump-simpl -dsuppress-all -dno-suppress-type-signatures
> Cmm -- > assembly / LLVM
monád transzformerek + adatszerkezetek | # language BangPatterns #
module Notes12 where
import Data.List
import Data.Foldable
Haskell fordítás / optimalizálás
Optimalizálás : fordító által
programozó által
- GHC jelentősen átalakítja a kódot ,
- ( több transzformálására , mellékhatás - mentes átrendezi a fordító )
példa : C kód , hívunk egy fordítási egységen )
- ránézésre kitalálni , hogy mire fordul adott Haskell kód
- Programozó általi optimalizáláshoz szükséges a fordítás és runtime system ismerete
- GHC sokat javít , de ha , akkor jelentős , a kapott támogatás
a GHC - től pedig .
- Összességében compiler a GHC jó választás az RTS teljesítménye és optimalizációs
( hatékony kis allokációkhoz )
1 .
egyszerűsítés : Bool ~ 64 bit , vagy 0 vagy 1 ( lustaság miatt : 0 vagy 1 vagy 2 )
f :: Bool -> Bool -> Bool
f b1 b2 = if b1 then b2 else True
szaturált hívás ( egy függvényt az )
pl : f True False ( ugyanaz , mint Java / C )
: closure létrehozása
h :: Bool -> Maybe (Bool -> Bool)
closure : | f pointer | 1 | True | ( fv ptr , arg . száma , argumentumok )
else Just not
( ha , vizsgálni , )
( ha van összes param , a fv - t , egyébként új closure - t csinálunk )
h2 :: Int -> Maybe (Int -> Int)
h2 x = Just (\y -> x + y + x + 100)
data Int = Int # Int #
általános notáció : # -es függvények és típusok és literálok " primitívek " ( semmi thunk ! konkrét gépi műveletek )
worker - wrapper transformation : , vs lassú
wmyFun :: Int# -> Int# -> Int#
wmyFun x y = x +# y +# y
myFun :: Int -> Int -> Int
myFun (I# x) (I# y) = I# (wmyFun x y)
függvény definíció / definícióra
fordítás : top - level függvény lesz
( lokális " capture"-t megkapja függvény , mint extra paraméter )
fenti lambda , mint top függvény : = \x y - > x + y + x + 100
szuperszaturált hívás : több arg , mint a " hivatalos " kód paraméter
pl : i d not True ( i d : 1 , 2 konkrét paraméter a hívásban )
fordító 2 paraméteres top függvényre fordít
h2' :: Int -> Int -> Int
h2' x = if (x == 0) then \y -> x + y + x + 100
else \y -> x + y + x + 90
h2'' :: Int -> Int -> Int
h2'' x y = if (x == 0) then x + y + x + 100
else x + y + x + 90
feltétlenül helyes :
\x - > f x : : Bool
Lustaság :
függvényhívás csak , ha a illesztünk
h3 :: Int -> Maybe Int
h3 x = Just (x + x + x + x + x)
thunk : lusta hívás
thunk : | kód ptr | < a végeredménynek > | paraméterek száma | capture ( tömb értékekkel )
thunk - > ... ( meghívjuk a ptr - t a tárolt paraméterekkel , és próbáljuk a fennmaradó eseteket )
mutáció : a ) ( max egyszer )
( : thunk - ot kidobja , a végeredménnyel )
strictness analízis : minél több thunk elkerülni
optimalizálás : strictness annotáció / strict
strict :
data StrictTree a = Leaf !a | Node !(StrictTree a) !(StrictTree a)
64 bit 64 bit
Leaf - ben és Node - ban thunk , csak konkrét érték
n : : Int ( thunk )
Leaf n
strict függvény paraméterek : { - # language BangPatterns # - }
f2 :: Int -> Int -> Int
take' :: Int -> [a] -> [a]
ADT - force - olás :
take' _ !as = []
( GHC Core language )
lens / monád trans / template Haskell / adatszerkezetek ( Set , Map , , )
|
7252930bd9f66275401705c2719860285d22a03558ca4a7b12b6d363d282beb9 | mattmundell/nightshade | build.lisp | Interface to build system .
(in-package "ED")
(defhistory *build-target-history* *build-target-history-pointer* 50)
(defcommand "Build" (p)
"Prompt for and build one of the targets defined in Build.lisp in the
current directory.
With an argument first prompt for a directory to make the directory
current."
(if p (error "FIX p"))
(build:with-build (current-directory)
(build:build-target
(prompt-for-keyword (list build:*targets*)
:default build:*first-target*
:history *build-target-history*
:history-pointer
'*build-target-history-pointer*
:prompt "Target: "
:help "Enter name of target to build."))))
(defcommand "Rebuild" (p)
"Prompt for and build one of the targets defined in Build.lisp in the
current directory, even if the targets are up to date.
With an argument first prompt for a directory to make the directory
current."
(if p (error "FIX p"))
(build:with-build (current-directory)
(build:build-target
(prompt-for-keyword (list build:*targets*)
:default build:*first-target*
:history *build-target-history*
:history-pointer
'*build-target-history-pointer*
:prompt "Target: "
:help "Enter name of target to build.")
:force)))
| null | https://raw.githubusercontent.com/mattmundell/nightshade/7a67f9eac96414355de1463ec251b98237cb4009/src/ed/build.lisp | lisp | Interface to build system .
(in-package "ED")
(defhistory *build-target-history* *build-target-history-pointer* 50)
(defcommand "Build" (p)
"Prompt for and build one of the targets defined in Build.lisp in the
current directory.
With an argument first prompt for a directory to make the directory
current."
(if p (error "FIX p"))
(build:with-build (current-directory)
(build:build-target
(prompt-for-keyword (list build:*targets*)
:default build:*first-target*
:history *build-target-history*
:history-pointer
'*build-target-history-pointer*
:prompt "Target: "
:help "Enter name of target to build."))))
(defcommand "Rebuild" (p)
"Prompt for and build one of the targets defined in Build.lisp in the
current directory, even if the targets are up to date.
With an argument first prompt for a directory to make the directory
current."
(if p (error "FIX p"))
(build:with-build (current-directory)
(build:build-target
(prompt-for-keyword (list build:*targets*)
:default build:*first-target*
:history *build-target-history*
:history-pointer
'*build-target-history-pointer*
:prompt "Target: "
:help "Enter name of target to build.")
:force)))
|
|
2966945fd1e5bdc5bb42cbde7a45c3358c8bdcbde4f00ae89994e17c94bc15c9 | moquist/datomic-schematode | config.clj | (ns datomic-schematode.examples.deli-menu-test.config
(:require [clojure.test :refer :all]
[datomic.api :as d]
[datomic-schematode.examples.deli-menu :as deli-menu]))
(defn with-db [f]
(d/create-database deli-menu/db-url)
(f)
(d/delete-database deli-menu/db-url))
| null | https://raw.githubusercontent.com/moquist/datomic-schematode/b73206fc86bb61bc97ddd5df55f7441bf5fc0447/test/datomic_schematode/examples/deli_menu_test/config.clj | clojure | (ns datomic-schematode.examples.deli-menu-test.config
(:require [clojure.test :refer :all]
[datomic.api :as d]
[datomic-schematode.examples.deli-menu :as deli-menu]))
(defn with-db [f]
(d/create-database deli-menu/db-url)
(f)
(d/delete-database deli-menu/db-url))
|
|
6878cae80b97434d307cb1fa9387506a291fccc5041c6e0dd37bc86175ded0e6 | janegca/htdp2e | Exercise-024-MovieTheatreV2.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname Exercise-024-MovieTheatreV2) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp")))))
Exercise 24 .
; Determine the potential profit for the following ticket prices:
$ 1 , $ 2 , $ 3 , $ 4 , and $ 5 .
; Which price should the owner of the movie theater choose to maximize his
; profits? Determine the best ticket price down to a dime.
; Constants
(define AVG_ATTENDENCE 120)
(define BASE_TICKET_PRICE 5.0)
(define PEOPLE_SWING 15)
(define PRICE_VARIANCE 0.1)
(define FIXED_COSTS 180.0)
(define VARIABLE_COSTS 0.04)
; # of attendees is based on ticket price
(define (attendees ticket-price)
(- AVG_ATTENDENCE (* (- ticket-price BASE_TICKET_PRICE)
(/ PEOPLE_SWING PRICE_VARIANCE))))
; revenue generated by ticket sales
(define (revenue ticket-price)
(* ticket-price (attendees ticket-price)))
; cost has a fixed part and a variable part based on the number of attendees
(define (cost ticket-price)
(+ FIXED_COSTS (* VARIABLE_COSTS (attendees ticket-price))))
; profit
(define (profit ticket-price)
(- (revenue ticket-price)
(cost ticket-price)))
511.2
937.2
1063.2
(profit 4.0) ; 889.2
415.2
; Below is an alternative version of the same program, given as a single
; function definition of the above program; use it to check the values
; returned by the original.
(define (alt-profit price)
(- (* (+ 120 (* (/ 15 0.1) (- 5.0 price))) price)
(+ 180 (* 0.04 (+ 120 (* (/ 15 0.1) (- 5.0 price)))))))
511.2
937.2
1063.2
(alt-profit 4.0) ; 889.2
415.2
| null | https://raw.githubusercontent.com/janegca/htdp2e/2d50378135edc2b8b1816204021f8763f8b2707b/01-FixedSizeData/Exercise-024-MovieTheatreV2.rkt | racket | about the language level of this file in a form that our tools can easily process.
Determine the potential profit for the following ticket prices:
Which price should the owner of the movie theater choose to maximize his
profits? Determine the best ticket price down to a dime.
Constants
# of attendees is based on ticket price
revenue generated by ticket sales
cost has a fixed part and a variable part based on the number of attendees
profit
889.2
Below is an alternative version of the same program, given as a single
function definition of the above program; use it to check the values
returned by the original.
889.2 | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname Exercise-024-MovieTheatreV2) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp")))))
Exercise 24 .
$ 1 , $ 2 , $ 3 , $ 4 , and $ 5 .
(define AVG_ATTENDENCE 120)
(define BASE_TICKET_PRICE 5.0)
(define PEOPLE_SWING 15)
(define PRICE_VARIANCE 0.1)
(define FIXED_COSTS 180.0)
(define VARIABLE_COSTS 0.04)
(define (attendees ticket-price)
(- AVG_ATTENDENCE (* (- ticket-price BASE_TICKET_PRICE)
(/ PEOPLE_SWING PRICE_VARIANCE))))
(define (revenue ticket-price)
(* ticket-price (attendees ticket-price)))
(define (cost ticket-price)
(+ FIXED_COSTS (* VARIABLE_COSTS (attendees ticket-price))))
(define (profit ticket-price)
(- (revenue ticket-price)
(cost ticket-price)))
511.2
937.2
1063.2
415.2
(define (alt-profit price)
(- (* (+ 120 (* (/ 15 0.1) (- 5.0 price))) price)
(+ 180 (* 0.04 (+ 120 (* (/ 15 0.1) (- 5.0 price)))))))
511.2
937.2
1063.2
415.2
|
f5aa91a4de628d40b7e61d9b2bc504c746fd3e8110b03ac7273c1d58c97e19b9 | Tritlo/PropR | Tests.hs | # LANGUAGE NumericUnderscores #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
module Main where
import Control.Arrow
import Data.Bits (finiteBitSize)
import Data.Default
import Data.Dynamic (Dynamic, fromDynamic)
import Data.List (find, sort)
import qualified Data.Map as Map
import Data.Maybe (catMaybes, isJust, mapMaybe)
import Data.Tree
import Data.Tuple (swap)
import PropR.Configuration
import PropR.Diff
import PropR.Eval
import PropR.Repair
import PropR.Traversals
import PropR.Types
import PropR.Util
import GHC (GhcPs, LHsExpr, noExtField, tm_parsed_module)
import GhcPlugins (GenLocated (L), getLoc, unLoc)
import Test.Tasty
import Test.Tasty.ExpectedFailure
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import TestUtils
import Trace.Hpc.Mix
tests :: TestTree
tests =
testGroup
"Tests"
[ utilTests,
repairTests,
failingPropsTests,
counterExampleTests,
traceTests,
moduleTests,
sanctifyTests
]
-- Helpers
compileParsedCheck :: HasCallStack => CompileConfig -> EExpr -> IO Dynamic
compileParsedCheck cc expr =
runGhc' (cc {holeLvl = 0}) $
dynCompileParsedExpr `reportOnError` expr
runJustParseExpr :: CompileConfig -> RExpr -> IO (LHsExpr GhcPs)
runJustParseExpr cc str = runGhcWithCleanup cc $ justParseExpr cc str
prop_insertAt :: Eq a => Int -> a -> [a] -> Property
prop_insertAt n a as = abs n < length as ==> insertAt n' a as !! n' == a
where
n' = n `mod` length as
utilTests :: TestTree
utilTests =
testProperties
"Utils"
[ ("dropPrefix", property prop_dropsPrefix),
("startsWith", property prop_startsWith),
("insertAt", property (prop_insertAt @Integer))
]
repairTests =
testGroup
"Repair"
[ -- A simple tests to see if we can repair (foldl (-) 0) to (foldl (+) 0)
in a reasonable amount of time ( here 10s )
localOption (mkTimeout 30_000_000) $
testCase "Basic Repair `foldl (-) 0`" $ do
let cc =
(compileConfig tESTCONF)
{ packages = ["base", "process", "QuickCheck"],
importStmts = ["import Prelude hiding (id, ($), ($!), asTypeOf)"]
}
ty = "[Int] -> Int"
wrong_prog = "(foldl (-) 0)"
props = ["prop_isSum f xs = f xs == sum xs"]
context =
[ "zero = 0 :: Int",
"one = 1 :: Int",
"add = (+) :: Int -> Int -> Int"
]
expected = "((foldl (+) 0)) :: [Int] -> Int"
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp@EProb {..} <- translate cc rp
fixes <- repair cc tp
let fixProgs = map (eProgToEProgFix . applyFixToEProg e_prog) fixes
expected `elem` concatMap (map (trim . showUnsafe)) fixProgs @? "Expected repair not found in fixes",
localOption (mkTimeout 20_000_000) $
testCase "GetExprCands finds important candidates" $ do
let wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)",
" ; a_constant_string = \"hello, world!\"}",
" in gcd'"
]
expected =
[ "EFC {\"hello, world!\"}",
"EFC {0}",
"EFC {(gcd' 0 b)}",
"EFC {(gcd' 0)}",
"EFC {0}",
"EFC {0}",
"EFC {(b == 0)}",
"EFC {0}",
"EFC {0}",
"EFC {(if (a > b) then gcd' (a - b) b else gcd' a (b - a))}",
"EFC {(a > b)}",
"EFC {(gcd' (a - b) b)}",
"EFC {(gcd' (a - b))}",
"EFC {(a - b)}",
"EFC {(gcd' a (b - a))}",
"EFC {(gcd' a)}",
"EFC {(b - a)}"
]
expr_cands <- runJustParseExpr (compileConfig tESTCONF) wrong_prog >>= (runGhc' (compileConfig tESTCONF) . getExprFitCands . Left)
map showUnsafe expr_cands @?= expected,
localOption (mkTimeout 60_000_000) $
testCase "Repair `gcd'` with gcd" $ do
let props =
[ "prop_1 f = f 0 55 == 55",
"prop_2 f = f 1071 1029 == 21"
]
ty = "Int -> Int -> Int"
wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)}",
" in gcd'"
]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
fixes <- map (trim . showUnsafe) <$> (translate (compileConfig tESTCONF) rp >>= repair (compileConfig tESTCONF))
not (null fixes) @? "No fix found"
]
failingPropsTests =
testGroup
"Failing props"
[ localOption (mkTimeout 15_000_000) $
testCase "Failing props for gcd" $ do
let props :: [String]
props =
[ "prop_1 f = f 0 55 == 55",
"prop_2 f = f 1071 1029 == 21"
]
ty = "Int -> Int -> Int"
wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)}",
" in gcd'"
]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate (compileConfig tESTCONF) rp
failed_props <- failingProps (compileConfig tESTCONF) tp
Only the first prop should be failing ( due to an infinite loop )
map showUnsafe failed_props @?= [head props],
localOption (mkTimeout 10_000_000) $
testCase "Only one failing prop" $ do
let cc =
(compileConfig tESTCONF)
{ packages = ["base", "process", "QuickCheck"],
importStmts = ["import Prelude hiding (id, ($), ($!), asTypeOf)"]
}
ty = "[Int] -> Int"
wrong_prog = "(foldl (-) 0)"
props = ["prop_isSum f xs = f xs == sum xs"]
context =
[ "zero = 0 :: Int",
"one = 1 :: Int",
"add = (+) :: Int -> Int -> Int"
]
expected = "((foldl (+) 0)) :: [Int] -> Int"
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate cc rp
failed_props <- failingProps cc tp
map showUnsafe failed_props @?= props,
localOption (mkTimeout 30_000_000) $
testCase "Two failing TastyProps" $ do
Just desc@ProbDesc {..} <- describeProblem tESTCONF "tests/cases/TastyTwoFix.hs"
failed_props <- failingProps compConf progProblem
length failed_props @?= 2
]
counterExampleTests =
testGroup
"Counter Examples"
[ localOption (mkTimeout 10_000_000) $
testCase "Only one counter example" $ do
let cc =
(compileConfig tESTCONF)
{ packages = ["base", "process", "QuickCheck"],
importStmts = ["import Prelude hiding (id, ($), ($!), asTypeOf)"]
}
ty = "[Int] -> Int"
wrong_prog = "(foldl (-) 0)"
props = ["prop_isSum f xs = f xs == sum xs"]
context = []
expected = "((foldl (+) 0)) :: [Int] -> Int"
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate cc rp
[failed_prop] <- failingProps cc tp
Just [counter_example] <- propCounterExample cc tp failed_prop
let expr = "(foldl (-) 0) " ++ counter_example ++ " == sum " ++ counter_example
res <- runJustParseExpr cc expr >>= compileParsedCheck cc
case fromDynamic @Bool res of
Just v -> not v @? "Counter Example is not a counter example!"
Nothing -> error "Incorrect type!!",
localOption (mkTimeout 10_000_000) $
testCase "Multiple examples" $ do
let cc =
(compileConfig tESTCONF)
{ packages = ["base", "process", "QuickCheck"],
importStmts = ["import Prelude hiding (id, ($), ($!), asTypeOf)"]
}
ty = "Int -> Int -> Int"
wrong_prog = "(-)"
props = ["prop_isPlus f a b = f a b == (a + b)"]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate cc rp
[failed_prop] <- failingProps cc tp
Just counter_example_args <- propCounterExample cc tp failed_prop
let arg_str = unwords counter_example_args
expr = "(-) " ++ arg_str ++ " == (+) " ++ arg_str
res <- runJustParseExpr cc expr >>= compileParsedCheck cc
case fromDynamic @Bool res of
Just v -> not v @? "Counter Example is not a counter example!"
Nothing -> error "Incorrect type!!",
localOption (mkTimeout 15_000_000) $
testCase "No args loop fail" $ do
let cc = (compileConfig tESTCONF)
props :: [String]
props =
[ "prop_1 f = f 0 55 == 55",
"prop_2 f = f 1071 1029 == 21"
]
ty = "Int -> Int -> Int"
wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)}",
" in gcd'"
]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate cc rp
[failed_prop] <- failingProps cc tp
Only the first prop should be failing ( due to an infinite loop )
showUnsafe failed_prop @?= head props
Just counter_example_args <- propCounterExample cc tp failed_prop
null counter_example_args @? "The counter example should not have any arguments!"
]
traceTests =
testGroup
"Trace tests"
[ localOption (mkTimeout 10_000_000) $
testCase "Trace foldl" $ do
let cc = (compileConfig tESTCONF)
ty = "[Int] -> Int"
wrong_prog = "(foldl (-) 0)"
props = ["prop_isSum f xs = f xs == sum xs"]
context = []
expected = "((foldl (+) 0)) :: [Int] -> Int"
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp@EProb {..} <- translate cc rp
[failed_prop] <- failingProps cc tp
Just counter_example <- propCounterExample cc tp failed_prop
let eprog_fix = eProgToEProgFix e_prog
Just [(texp, Node {subForest = [tree@Node {rootLabel = (tl, tname)}]})] <-
traceTarget cc tp eprog_fix failed_prop counter_example
expr <- runJustParseExpr cc wrong_prog
showUnsafe expr @?= showUnsafe texp
all ((== 1) . snd) (concatMap snd $ flatten tree) @? "All subexpressions should be touched only once!",
localOption (mkTimeout 30_000_000) $
testCase "Trace finds loop" $ do
let cc = (compileConfig tESTCONF)
props :: [String]
props =
[ "prop_1 f = f 0 55 == 55",
"prop_2 f = f 1071 1029 == 21"
]
ty = "Int -> Int -> Int"
wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)}",
" in gcd'"
]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp@EProb {..} <- translate cc rp
[failed_prop] <- failingProps cc tp
Just counter_example_args <- propCounterExample cc tp failed_prop
-- We generate the trace
let [(_, e_ty, e_prog')] = e_prog
prog_at_ty = progAtTy e_prog' e_ty
eprog_fix = eProgToEProgFix $ applyFixToEProg e_prog mempty
Just [(texp, res)] <- traceTarget cc tp eprog_fix failed_prop counter_example_args
let eMap = Map.fromList $ map (getLoc &&& showUnsafe) $ flattenExpr texp
trc = map (\(s, r) -> (eMap Map.!? s, r, maximum $ map snd r)) $ flatten res
isXBox (ExpBox _) = True
isXBox _ = False
isInEMapOrNotExpBox (Just _, _, _) = True
isInEMapOrNotExpBox (_, r, _) = not (any (isXBox . fst) r)
isLooper (Just "gcd' 0 b", _, _) = True
isLooper _ = False
loopEntry = find isLooper trc
all isInEMapOrNotExpBox trc @? "All the expressions should be present in the trace!"
isJust loopEntry @? "The loop causing expresssion should be in the trace"
let Just (_, _, loops) = loopEntry
loops >= 100_000 @? "There should have been a lot of invocations of the loop!"
]
sanctifyTests =
testGroup
"Sanctify tests"
[ localOption (mkTimeout 1_000_000) $
testCase "Sanctify foldl program" $ do
let cc = (compileConfig tESTCONF)
toFix = "tests/cases/BrokenModule.hs"
repair_target = Just "broken"
(cc', _, Just EProb {..}) <- moduleToProb cc toFix repair_target
There are 7 ways to replace parts of the broken function in BrokenModule
-- with holes:
let [(_, _, e_prog')] = e_prog
length (sanctifyExpr noExtField e_prog') @?= 7,
localOption (mkTimeout 1_000_000) $
testCase "Fill foldl program" $ do
let cc = compileConfig tESTCONF
toFix = "tests/cases/BrokenModule.hs"
repair_target = Just "broken"
(cc', _, Just EProb {..}) <- moduleToProb cc toFix repair_target
let [(_, _, e_prog')] = e_prog
(holes, holey) = unzip $ sanctifyExpr noExtField e_prog'
filled = mapMaybe (fillHole undefVar) holey
length filled @?= 7
all (uncurry (==)) (zip holes (map fst filled)) @? "All fillings should match holes!"
]
moduleTests :: TestTree
moduleTests =
testGroup
"Module tests"
[ localOption (mkTimeout 30_000_000) $
testCase "Repair BrokenModule finds correct target" $ do
let toFix = "tests/cases/BrokenModule.hs"
(_, _, Just EProb {..}) <- moduleToProb (compileConfig tESTCONF) toFix Nothing
let [(e_target, _, _)] = e_prog
showUnsafe e_target @?= "broken",
mkSimpleModuleTest 30_000_000 "Repair BrokenModule With Diff" "tests/cases/BrokenModule.hs" (Just "broken"),
mkSimpleModuleTest 240_000_000 "Repair BrokenGCD" "tests/cases/BrokenGCD.hs" (Just "gcd'"),
mkSimpleModuleTest 30_000_000 "Repair MagicConstant" "tests/cases/MagicConstant.hs" Nothing,
mkSimpleModuleTest 10_000_000 "All props pass" "tests/cases/AllPropsPass.hs" Nothing,
mkSimpleModuleTest 5_000_000 "No props" "tests/cases/NoProps.hs" Nothing,
mkSimpleModuleTest 30_000_000 "Unnamed faked" "tests/cases/unnamed.hs" Nothing,
mkSimpleModuleTest 30_000_000 "Main module faked" "tests/cases/mainMod.hs" Nothing,
mkSimpleModuleTest 30_000_000 "Prelude overwrite" "tests/cases/PreludeOverwrite.hs" Nothing,
mkSimpleModuleTest 30_000_000 "Prelude overwrite imports" "tests/cases/PreludeOverwriteImports.hs" Nothing
]
main = defaultMain tests
| null | https://raw.githubusercontent.com/Tritlo/PropR/131abe4e67e9854473d778d4fa71cbb12f757ddd/tests/Tests.hs | haskell | Helpers
A simple tests to see if we can repair (foldl (-) 0) to (foldl (+) 0)
We generate the trace
with holes: | # LANGUAGE NumericUnderscores #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
module Main where
import Control.Arrow
import Data.Bits (finiteBitSize)
import Data.Default
import Data.Dynamic (Dynamic, fromDynamic)
import Data.List (find, sort)
import qualified Data.Map as Map
import Data.Maybe (catMaybes, isJust, mapMaybe)
import Data.Tree
import Data.Tuple (swap)
import PropR.Configuration
import PropR.Diff
import PropR.Eval
import PropR.Repair
import PropR.Traversals
import PropR.Types
import PropR.Util
import GHC (GhcPs, LHsExpr, noExtField, tm_parsed_module)
import GhcPlugins (GenLocated (L), getLoc, unLoc)
import Test.Tasty
import Test.Tasty.ExpectedFailure
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import TestUtils
import Trace.Hpc.Mix
tests :: TestTree
tests =
testGroup
"Tests"
[ utilTests,
repairTests,
failingPropsTests,
counterExampleTests,
traceTests,
moduleTests,
sanctifyTests
]
compileParsedCheck :: HasCallStack => CompileConfig -> EExpr -> IO Dynamic
compileParsedCheck cc expr =
runGhc' (cc {holeLvl = 0}) $
dynCompileParsedExpr `reportOnError` expr
runJustParseExpr :: CompileConfig -> RExpr -> IO (LHsExpr GhcPs)
runJustParseExpr cc str = runGhcWithCleanup cc $ justParseExpr cc str
prop_insertAt :: Eq a => Int -> a -> [a] -> Property
prop_insertAt n a as = abs n < length as ==> insertAt n' a as !! n' == a
where
n' = n `mod` length as
utilTests :: TestTree
utilTests =
testProperties
"Utils"
[ ("dropPrefix", property prop_dropsPrefix),
("startsWith", property prop_startsWith),
("insertAt", property (prop_insertAt @Integer))
]
repairTests =
testGroup
"Repair"
in a reasonable amount of time ( here 10s )
localOption (mkTimeout 30_000_000) $
testCase "Basic Repair `foldl (-) 0`" $ do
let cc =
(compileConfig tESTCONF)
{ packages = ["base", "process", "QuickCheck"],
importStmts = ["import Prelude hiding (id, ($), ($!), asTypeOf)"]
}
ty = "[Int] -> Int"
wrong_prog = "(foldl (-) 0)"
props = ["prop_isSum f xs = f xs == sum xs"]
context =
[ "zero = 0 :: Int",
"one = 1 :: Int",
"add = (+) :: Int -> Int -> Int"
]
expected = "((foldl (+) 0)) :: [Int] -> Int"
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp@EProb {..} <- translate cc rp
fixes <- repair cc tp
let fixProgs = map (eProgToEProgFix . applyFixToEProg e_prog) fixes
expected `elem` concatMap (map (trim . showUnsafe)) fixProgs @? "Expected repair not found in fixes",
localOption (mkTimeout 20_000_000) $
testCase "GetExprCands finds important candidates" $ do
let wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)",
" ; a_constant_string = \"hello, world!\"}",
" in gcd'"
]
expected =
[ "EFC {\"hello, world!\"}",
"EFC {0}",
"EFC {(gcd' 0 b)}",
"EFC {(gcd' 0)}",
"EFC {0}",
"EFC {0}",
"EFC {(b == 0)}",
"EFC {0}",
"EFC {0}",
"EFC {(if (a > b) then gcd' (a - b) b else gcd' a (b - a))}",
"EFC {(a > b)}",
"EFC {(gcd' (a - b) b)}",
"EFC {(gcd' (a - b))}",
"EFC {(a - b)}",
"EFC {(gcd' a (b - a))}",
"EFC {(gcd' a)}",
"EFC {(b - a)}"
]
expr_cands <- runJustParseExpr (compileConfig tESTCONF) wrong_prog >>= (runGhc' (compileConfig tESTCONF) . getExprFitCands . Left)
map showUnsafe expr_cands @?= expected,
localOption (mkTimeout 60_000_000) $
testCase "Repair `gcd'` with gcd" $ do
let props =
[ "prop_1 f = f 0 55 == 55",
"prop_2 f = f 1071 1029 == 21"
]
ty = "Int -> Int -> Int"
wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)}",
" in gcd'"
]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
fixes <- map (trim . showUnsafe) <$> (translate (compileConfig tESTCONF) rp >>= repair (compileConfig tESTCONF))
not (null fixes) @? "No fix found"
]
failingPropsTests =
testGroup
"Failing props"
[ localOption (mkTimeout 15_000_000) $
testCase "Failing props for gcd" $ do
let props :: [String]
props =
[ "prop_1 f = f 0 55 == 55",
"prop_2 f = f 1071 1029 == 21"
]
ty = "Int -> Int -> Int"
wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)}",
" in gcd'"
]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate (compileConfig tESTCONF) rp
failed_props <- failingProps (compileConfig tESTCONF) tp
Only the first prop should be failing ( due to an infinite loop )
map showUnsafe failed_props @?= [head props],
localOption (mkTimeout 10_000_000) $
testCase "Only one failing prop" $ do
let cc =
(compileConfig tESTCONF)
{ packages = ["base", "process", "QuickCheck"],
importStmts = ["import Prelude hiding (id, ($), ($!), asTypeOf)"]
}
ty = "[Int] -> Int"
wrong_prog = "(foldl (-) 0)"
props = ["prop_isSum f xs = f xs == sum xs"]
context =
[ "zero = 0 :: Int",
"one = 1 :: Int",
"add = (+) :: Int -> Int -> Int"
]
expected = "((foldl (+) 0)) :: [Int] -> Int"
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate cc rp
failed_props <- failingProps cc tp
map showUnsafe failed_props @?= props,
localOption (mkTimeout 30_000_000) $
testCase "Two failing TastyProps" $ do
Just desc@ProbDesc {..} <- describeProblem tESTCONF "tests/cases/TastyTwoFix.hs"
failed_props <- failingProps compConf progProblem
length failed_props @?= 2
]
counterExampleTests =
testGroup
"Counter Examples"
[ localOption (mkTimeout 10_000_000) $
testCase "Only one counter example" $ do
let cc =
(compileConfig tESTCONF)
{ packages = ["base", "process", "QuickCheck"],
importStmts = ["import Prelude hiding (id, ($), ($!), asTypeOf)"]
}
ty = "[Int] -> Int"
wrong_prog = "(foldl (-) 0)"
props = ["prop_isSum f xs = f xs == sum xs"]
context = []
expected = "((foldl (+) 0)) :: [Int] -> Int"
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate cc rp
[failed_prop] <- failingProps cc tp
Just [counter_example] <- propCounterExample cc tp failed_prop
let expr = "(foldl (-) 0) " ++ counter_example ++ " == sum " ++ counter_example
res <- runJustParseExpr cc expr >>= compileParsedCheck cc
case fromDynamic @Bool res of
Just v -> not v @? "Counter Example is not a counter example!"
Nothing -> error "Incorrect type!!",
localOption (mkTimeout 10_000_000) $
testCase "Multiple examples" $ do
let cc =
(compileConfig tESTCONF)
{ packages = ["base", "process", "QuickCheck"],
importStmts = ["import Prelude hiding (id, ($), ($!), asTypeOf)"]
}
ty = "Int -> Int -> Int"
wrong_prog = "(-)"
props = ["prop_isPlus f a b = f a b == (a + b)"]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate cc rp
[failed_prop] <- failingProps cc tp
Just counter_example_args <- propCounterExample cc tp failed_prop
let arg_str = unwords counter_example_args
expr = "(-) " ++ arg_str ++ " == (+) " ++ arg_str
res <- runJustParseExpr cc expr >>= compileParsedCheck cc
case fromDynamic @Bool res of
Just v -> not v @? "Counter Example is not a counter example!"
Nothing -> error "Incorrect type!!",
localOption (mkTimeout 15_000_000) $
testCase "No args loop fail" $ do
let cc = (compileConfig tESTCONF)
props :: [String]
props =
[ "prop_1 f = f 0 55 == 55",
"prop_2 f = f 1071 1029 == 21"
]
ty = "Int -> Int -> Int"
wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)}",
" in gcd'"
]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp <- translate cc rp
[failed_prop] <- failingProps cc tp
Only the first prop should be failing ( due to an infinite loop )
showUnsafe failed_prop @?= head props
Just counter_example_args <- propCounterExample cc tp failed_prop
null counter_example_args @? "The counter example should not have any arguments!"
]
traceTests =
testGroup
"Trace tests"
[ localOption (mkTimeout 10_000_000) $
testCase "Trace foldl" $ do
let cc = (compileConfig tESTCONF)
ty = "[Int] -> Int"
wrong_prog = "(foldl (-) 0)"
props = ["prop_isSum f xs = f xs == sum xs"]
context = []
expected = "((foldl (+) 0)) :: [Int] -> Int"
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp@EProb {..} <- translate cc rp
[failed_prop] <- failingProps cc tp
Just counter_example <- propCounterExample cc tp failed_prop
let eprog_fix = eProgToEProgFix e_prog
Just [(texp, Node {subForest = [tree@Node {rootLabel = (tl, tname)}]})] <-
traceTarget cc tp eprog_fix failed_prop counter_example
expr <- runJustParseExpr cc wrong_prog
showUnsafe expr @?= showUnsafe texp
all ((== 1) . snd) (concatMap snd $ flatten tree) @? "All subexpressions should be touched only once!",
localOption (mkTimeout 30_000_000) $
testCase "Trace finds loop" $ do
let cc = (compileConfig tESTCONF)
props :: [String]
props =
[ "prop_1 f = f 0 55 == 55",
"prop_2 f = f 1071 1029 == 21"
]
ty = "Int -> Int -> Int"
wrong_prog =
unlines
[ "let { gcd' 0 b = gcd' 0 b",
" ; gcd' a b | b == 0 = a",
" ; gcd' a b = if (a > b) then gcd' (a-b) b else gcd' a (b-a)}",
" in gcd'"
]
context = []
rp =
RProb
{ r_target = "",
r_ctxt = context,
r_ty = ty,
r_prog = wrong_prog,
r_props = props
}
setSeedGenSeed tESTSEED
tp@EProb {..} <- translate cc rp
[failed_prop] <- failingProps cc tp
Just counter_example_args <- propCounterExample cc tp failed_prop
let [(_, e_ty, e_prog')] = e_prog
prog_at_ty = progAtTy e_prog' e_ty
eprog_fix = eProgToEProgFix $ applyFixToEProg e_prog mempty
Just [(texp, res)] <- traceTarget cc tp eprog_fix failed_prop counter_example_args
let eMap = Map.fromList $ map (getLoc &&& showUnsafe) $ flattenExpr texp
trc = map (\(s, r) -> (eMap Map.!? s, r, maximum $ map snd r)) $ flatten res
isXBox (ExpBox _) = True
isXBox _ = False
isInEMapOrNotExpBox (Just _, _, _) = True
isInEMapOrNotExpBox (_, r, _) = not (any (isXBox . fst) r)
isLooper (Just "gcd' 0 b", _, _) = True
isLooper _ = False
loopEntry = find isLooper trc
all isInEMapOrNotExpBox trc @? "All the expressions should be present in the trace!"
isJust loopEntry @? "The loop causing expresssion should be in the trace"
let Just (_, _, loops) = loopEntry
loops >= 100_000 @? "There should have been a lot of invocations of the loop!"
]
sanctifyTests =
testGroup
"Sanctify tests"
[ localOption (mkTimeout 1_000_000) $
testCase "Sanctify foldl program" $ do
let cc = (compileConfig tESTCONF)
toFix = "tests/cases/BrokenModule.hs"
repair_target = Just "broken"
(cc', _, Just EProb {..}) <- moduleToProb cc toFix repair_target
There are 7 ways to replace parts of the broken function in BrokenModule
let [(_, _, e_prog')] = e_prog
length (sanctifyExpr noExtField e_prog') @?= 7,
localOption (mkTimeout 1_000_000) $
testCase "Fill foldl program" $ do
let cc = compileConfig tESTCONF
toFix = "tests/cases/BrokenModule.hs"
repair_target = Just "broken"
(cc', _, Just EProb {..}) <- moduleToProb cc toFix repair_target
let [(_, _, e_prog')] = e_prog
(holes, holey) = unzip $ sanctifyExpr noExtField e_prog'
filled = mapMaybe (fillHole undefVar) holey
length filled @?= 7
all (uncurry (==)) (zip holes (map fst filled)) @? "All fillings should match holes!"
]
moduleTests :: TestTree
moduleTests =
testGroup
"Module tests"
[ localOption (mkTimeout 30_000_000) $
testCase "Repair BrokenModule finds correct target" $ do
let toFix = "tests/cases/BrokenModule.hs"
(_, _, Just EProb {..}) <- moduleToProb (compileConfig tESTCONF) toFix Nothing
let [(e_target, _, _)] = e_prog
showUnsafe e_target @?= "broken",
mkSimpleModuleTest 30_000_000 "Repair BrokenModule With Diff" "tests/cases/BrokenModule.hs" (Just "broken"),
mkSimpleModuleTest 240_000_000 "Repair BrokenGCD" "tests/cases/BrokenGCD.hs" (Just "gcd'"),
mkSimpleModuleTest 30_000_000 "Repair MagicConstant" "tests/cases/MagicConstant.hs" Nothing,
mkSimpleModuleTest 10_000_000 "All props pass" "tests/cases/AllPropsPass.hs" Nothing,
mkSimpleModuleTest 5_000_000 "No props" "tests/cases/NoProps.hs" Nothing,
mkSimpleModuleTest 30_000_000 "Unnamed faked" "tests/cases/unnamed.hs" Nothing,
mkSimpleModuleTest 30_000_000 "Main module faked" "tests/cases/mainMod.hs" Nothing,
mkSimpleModuleTest 30_000_000 "Prelude overwrite" "tests/cases/PreludeOverwrite.hs" Nothing,
mkSimpleModuleTest 30_000_000 "Prelude overwrite imports" "tests/cases/PreludeOverwriteImports.hs" Nothing
]
main = defaultMain tests
|
dca50756f2aaa55e2e6bda3a13eb3948dee38c5edad2ccbfa3e35b5034f58308 | elm-lang/elm-reactor | Socket.hs | # OPTIONS_GHC -Wall #
{-# LANGUAGE OverloadedStrings #-}
module Socket (watchFile) where
import Control.Concurrent (forkIO, threadDelay)
import Control.Exception (SomeException, catch)
import qualified Data.ByteString.Char8 as BS
import qualified Network.WebSockets as WS
import qualified System.FSNotify.Devel as Notify
import qualified System.FSNotify as Notify
watchFile :: FilePath -> WS.PendingConnection -> IO ()
watchFile watchedFile pendingConnection =
do connection <- WS.acceptRequest pendingConnection
Notify.withManager $ \mgmt ->
do stop <- Notify.treeExtAny mgmt "." ".elm" print
tend connection
stop
tend :: WS.Connection -> IO ()
tend connection =
let
pinger :: Integer -> IO a
pinger n =
do threadDelay (5 * 1000 * 1000)
WS.sendPing connection (BS.pack (show n))
pinger (n + 1)
receiver :: IO ()
receiver =
do _ <- WS.receiveDataMessage connection
receiver
shutdown :: SomeException -> IO ()
shutdown _ =
return ()
in
do _pid <- forkIO (receiver `catch` shutdown)
pinger 1 `catch` shutdown
| null | https://raw.githubusercontent.com/elm-lang/elm-reactor/6f15395aa307aaaf287919a326f4ffb31bb5eb4a/src/backend/Socket.hs | haskell | # LANGUAGE OverloadedStrings # | # OPTIONS_GHC -Wall #
module Socket (watchFile) where
import Control.Concurrent (forkIO, threadDelay)
import Control.Exception (SomeException, catch)
import qualified Data.ByteString.Char8 as BS
import qualified Network.WebSockets as WS
import qualified System.FSNotify.Devel as Notify
import qualified System.FSNotify as Notify
watchFile :: FilePath -> WS.PendingConnection -> IO ()
watchFile watchedFile pendingConnection =
do connection <- WS.acceptRequest pendingConnection
Notify.withManager $ \mgmt ->
do stop <- Notify.treeExtAny mgmt "." ".elm" print
tend connection
stop
tend :: WS.Connection -> IO ()
tend connection =
let
pinger :: Integer -> IO a
pinger n =
do threadDelay (5 * 1000 * 1000)
WS.sendPing connection (BS.pack (show n))
pinger (n + 1)
receiver :: IO ()
receiver =
do _ <- WS.receiveDataMessage connection
receiver
shutdown :: SomeException -> IO ()
shutdown _ =
return ()
in
do _pid <- forkIO (receiver `catch` shutdown)
pinger 1 `catch` shutdown
|
a89b5041fa4288a6a23a5c161bfb30d9255c6d4238da835cb5579cc680c97b60 | startalkIM/ejabberd | http_clear_staff.erl | -module(http_clear_staff).
-export([handle/1]).
-include("ejabberd.hrl").
-include("logger.hrl").
-record(muc_online_room,
{name_host = {<<"">>, <<"">>} :: {binary(), binary()} | '$1' |{'_', binary()} | '_', pid = self() :: pid() | '$2' | '_' | '$1'}).
handle(Req) ->
{Method, Req1} = cowboy_req:method(Req),
case Method of
<<"POST">> -> do_handle(Req1);
_ -> http_utils:cowboy_req_reply_json(http_utils:gen_fail_result(1, <<Method/binary, " is not disable">>), Req1)
end.
do_handle(Req)->
{ok, Body, Req1} = cowboy_req:body(Req),
case rfc4627:decode(Body) of
{ok, {obj, Args},[]} ->
Host = proplists:get_value("host",Args),
Users = proplists:get_value("users",Args),
clear_staff(Host, Users),
?INFO_MSG("the params is ~p~n", [{Host, Users}]),
http_utils:cowboy_req_reply_json(http_utils:gen_success_result(), Req1);
_ ->
http_utils:cowboy_req_reply_json(http_utils:gen_fail_result(1, <<"Josn parse error">>), Req1)
end.
clear_staff(_, []) -> ok;
clear_staff(Host, [User|Users]) ->
case catch ejabberd_sql:sql_query([<<"select username,muc_name from muc_room_users where username = '", User/binary, "';">>]) of
{selected,[<<"username">>,<<"muc_name">>],Res} when is_list(Res) ->
?INFO_MSG("the mucs is ~p~n", [Res]),
lists:foreach(fun([U,M]) ->
case jlib:make_jid(U, Host, <<"">>) of
error -> ?INFO_MSG("Make User Jid Error ~p ~n",[U]);
JID ->
ServerHost = str:concat(<<"conference.">>, Host),
case mod_muc_redis:get_muc_room_pid(M,ServerHost) of
[] ->
?INFO_MSG("the JID is ~p~n", [{JID, ServerHost,U, M}]),
qtalk_public:clear_ets_muc_room_users(M, U, Host),
ejabberd_sql:sql_query([<<"delete from muc_room_users where username = '">>,U,<<"' and muc_name = '">>, M, <<"' and domain = '">>, ServerHost, <<"';">>]),
ejabberd_sql:sql_query(Host, [<<"delete from user_register_mucs where username = '">>,U,<<"' and muc_name = '">>,M,<<"';">>]);
[Muc] ->
?INFO_MSG("Remove dimission User ~p ,Muc ~p ~n",[U,M]),
Muc#muc_online_room.pid ! {http_del_user,JID}
end
end
end, Res);
O -> ?ERROR_MSG("the fail res is ~p~n", [O]), ok
end,
clear_staff(Host, Users).
| null | https://raw.githubusercontent.com/startalkIM/ejabberd/718d86cd2f5681099fad14dab5f2541ddc612c8b/src/http_clear_staff.erl | erlang | -module(http_clear_staff).
-export([handle/1]).
-include("ejabberd.hrl").
-include("logger.hrl").
-record(muc_online_room,
{name_host = {<<"">>, <<"">>} :: {binary(), binary()} | '$1' |{'_', binary()} | '_', pid = self() :: pid() | '$2' | '_' | '$1'}).
handle(Req) ->
{Method, Req1} = cowboy_req:method(Req),
case Method of
<<"POST">> -> do_handle(Req1);
_ -> http_utils:cowboy_req_reply_json(http_utils:gen_fail_result(1, <<Method/binary, " is not disable">>), Req1)
end.
do_handle(Req)->
{ok, Body, Req1} = cowboy_req:body(Req),
case rfc4627:decode(Body) of
{ok, {obj, Args},[]} ->
Host = proplists:get_value("host",Args),
Users = proplists:get_value("users",Args),
clear_staff(Host, Users),
?INFO_MSG("the params is ~p~n", [{Host, Users}]),
http_utils:cowboy_req_reply_json(http_utils:gen_success_result(), Req1);
_ ->
http_utils:cowboy_req_reply_json(http_utils:gen_fail_result(1, <<"Josn parse error">>), Req1)
end.
clear_staff(_, []) -> ok;
clear_staff(Host, [User|Users]) ->
case catch ejabberd_sql:sql_query([<<"select username,muc_name from muc_room_users where username = '", User/binary, "';">>]) of
{selected,[<<"username">>,<<"muc_name">>],Res} when is_list(Res) ->
?INFO_MSG("the mucs is ~p~n", [Res]),
lists:foreach(fun([U,M]) ->
case jlib:make_jid(U, Host, <<"">>) of
error -> ?INFO_MSG("Make User Jid Error ~p ~n",[U]);
JID ->
ServerHost = str:concat(<<"conference.">>, Host),
case mod_muc_redis:get_muc_room_pid(M,ServerHost) of
[] ->
?INFO_MSG("the JID is ~p~n", [{JID, ServerHost,U, M}]),
qtalk_public:clear_ets_muc_room_users(M, U, Host),
ejabberd_sql:sql_query([<<"delete from muc_room_users where username = '">>,U,<<"' and muc_name = '">>, M, <<"' and domain = '">>, ServerHost, <<"';">>]),
ejabberd_sql:sql_query(Host, [<<"delete from user_register_mucs where username = '">>,U,<<"' and muc_name = '">>,M,<<"';">>]);
[Muc] ->
?INFO_MSG("Remove dimission User ~p ,Muc ~p ~n",[U,M]),
Muc#muc_online_room.pid ! {http_del_user,JID}
end
end
end, Res);
O -> ?ERROR_MSG("the fail res is ~p~n", [O]), ok
end,
clear_staff(Host, Users).
|
|
6ec3647b777372bc11ec27ac2ad39b75477f36683643f3955aaaa74e762d2bed | pavlobaron/ErlangOTPBookSamples | lists1.erl | -module(lists1).
-export([first/1, print/1, sum/1, filter/1]).
first([H|_]) ->
H.
print([]) -> ok;
print([H|T]) ->
io:format("~p~n", [H]),
print(T).
sum(L) ->
dosum(L, 0).
dosum([], Sum) ->
io:format("Sum: ~p~n", [Sum]);
dosum([H|T], Sum) ->
dosum(T, Sum + H).
filter(L) ->
dofilter(L, []).
dofilter([], L) ->
io:format("Filtered list: ~p~n", [lists:reverse(L)]);
dofilter([H|T], L) when H rem 2 == 0 ->
dofilter(T, [H|L]);
dofilter([_|T], L) ->
dofilter(T, L).
| null | https://raw.githubusercontent.com/pavlobaron/ErlangOTPBookSamples/50094964ad814932760174914490e49618b2b8c2/sprache/lists1.erl | erlang | -module(lists1).
-export([first/1, print/1, sum/1, filter/1]).
first([H|_]) ->
H.
print([]) -> ok;
print([H|T]) ->
io:format("~p~n", [H]),
print(T).
sum(L) ->
dosum(L, 0).
dosum([], Sum) ->
io:format("Sum: ~p~n", [Sum]);
dosum([H|T], Sum) ->
dosum(T, Sum + H).
filter(L) ->
dofilter(L, []).
dofilter([], L) ->
io:format("Filtered list: ~p~n", [lists:reverse(L)]);
dofilter([H|T], L) when H rem 2 == 0 ->
dofilter(T, [H|L]);
dofilter([_|T], L) ->
dofilter(T, L).
|
|
9619ce0f4d58a3844195c09c14632bf9315b57f513a2f1f9186af3ebd4204aef | nuvla/ui | spec.cljs | (ns sixsq.nuvla.ui.apps-component.spec
(:require [clojure.spec.alpha :as s]
[sixsq.nuvla.ui.utils.spec :as spec-utils]))
; create an initial entry for new components
(def defaults {::module-component {::image {}
::ports (sorted-map)
::mounts (sorted-map)
::architectures ["amd64"]}})
; Image
(s/def ::image-name spec-utils/nonblank-string)
(s/def ::repository (s/nilable string?))
(s/def ::registry (s/nilable string?))
(s/def ::tag (s/nilable string?))
(s/def ::image (s/keys :req [::image-name]
:opt [::registry
::tag
::repository]))
Ports
(s/def ::target-port int?)
(s/def ::published-port (s/nilable int?))
(s/def ::protocol (s/nilable string?))
(s/def ::port (s/keys :req [::target-port]
:opt [::protocol
::published-port]))
(s/def ::ports (s/map-of any? (s/merge ::port)))
; Volumes (mounts)
(s/def ::mount-source spec-utils/nonblank-string)
(s/def ::mount-target spec-utils/nonblank-string)
(s/def ::mount-read-only boolean?)
(s/def ::mount-type #{"bind" "volume"})
(s/def ::mount (s/keys :req [::mount-type
::mount-target]
:opt [::mount-source
::mount-read-only
;::volume-options
]))
(s/def ::mounts (s/map-of any? (s/merge ::mount)))
(s/def ::input-value spec-utils/nonblank-string)
(s/def ::architectures (s/coll-of string? :min-count 1))
; Module
(s/def ::module-component (s/keys :req [::image
::architectures]
:opt [::ports
::mounts
::data-types]))
| null | https://raw.githubusercontent.com/nuvla/ui/239eee3b7661b5f0b84117334d40c645de817c1c/code/src/cljs/sixsq/nuvla/ui/apps_component/spec.cljs | clojure | create an initial entry for new components
Image
Volumes (mounts)
::volume-options
Module | (ns sixsq.nuvla.ui.apps-component.spec
(:require [clojure.spec.alpha :as s]
[sixsq.nuvla.ui.utils.spec :as spec-utils]))
(def defaults {::module-component {::image {}
::ports (sorted-map)
::mounts (sorted-map)
::architectures ["amd64"]}})
(s/def ::image-name spec-utils/nonblank-string)
(s/def ::repository (s/nilable string?))
(s/def ::registry (s/nilable string?))
(s/def ::tag (s/nilable string?))
(s/def ::image (s/keys :req [::image-name]
:opt [::registry
::tag
::repository]))
Ports
(s/def ::target-port int?)
(s/def ::published-port (s/nilable int?))
(s/def ::protocol (s/nilable string?))
(s/def ::port (s/keys :req [::target-port]
:opt [::protocol
::published-port]))
(s/def ::ports (s/map-of any? (s/merge ::port)))
(s/def ::mount-source spec-utils/nonblank-string)
(s/def ::mount-target spec-utils/nonblank-string)
(s/def ::mount-read-only boolean?)
(s/def ::mount-type #{"bind" "volume"})
(s/def ::mount (s/keys :req [::mount-type
::mount-target]
:opt [::mount-source
::mount-read-only
]))
(s/def ::mounts (s/map-of any? (s/merge ::mount)))
(s/def ::input-value spec-utils/nonblank-string)
(s/def ::architectures (s/coll-of string? :min-count 1))
(s/def ::module-component (s/keys :req [::image
::architectures]
:opt [::ports
::mounts
::data-types]))
|
83b33af6765b8db6318ed5a8505476b7fb5cb9cbd2ddf87b1b584b8d75f34a52 | input-output-hk/cardano-wallet-legacy | Scenario.hs | module Test.Integration.Framework.Scenario
( Scenario
) where
import Universum
import Test.Hspec.Core.Spec (Example (..), Result (..),
ResultStatus (..))
-- | A Wrapper around 'StateT' around which we define a few instances. The most
-- interesting one is 'Example'
newtype Scenario s m a = Scenario (StateT s m a)
deriving newtype
( Functor
, Applicative
, Monad
, MonadThrow
, MonadCatch
, MonadFail
, MonadIO
, MonadMask
, MonadState s
)
-- | We emulate the 'MonadReader' using the 'MonadState' instance, this way, we
-- can lower down our constraints for methods that actually just read the state.
instance (Monad m, MonadState s (Scenario s m)) => MonadReader s (Scenario s m) where
ask = get
local f m = do
s <- get
put (f s) *> m <* put s
-- | This gives us the ability to define our spec as `Scenario` instead of just
-- plain `IO`. This way, each scenario runs within a context and has access to
-- a wallet client and a dedicated faucet wallet.
instance Example (Scenario s IO ()) where
type Arg (Scenario s IO ()) = MVar s
evaluateExample (Scenario io) _ action _ =
action runAndPersist >> return (Result "" Success)
where
runAndPersist :: MVar s -> IO ()
runAndPersist mvar = do
let acquire = takeMVar mvar
let release = putMVar mvar
let between = runStateT io >=> (putMVar mvar . snd)
bracketOnError acquire release between
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet-legacy/143e6d0dac0b28b3274600c6c49ec87e42ec9f37/test/integration/Test/Integration/Framework/Scenario.hs | haskell | | A Wrapper around 'StateT' around which we define a few instances. The most
interesting one is 'Example'
| We emulate the 'MonadReader' using the 'MonadState' instance, this way, we
can lower down our constraints for methods that actually just read the state.
| This gives us the ability to define our spec as `Scenario` instead of just
plain `IO`. This way, each scenario runs within a context and has access to
a wallet client and a dedicated faucet wallet. | module Test.Integration.Framework.Scenario
( Scenario
) where
import Universum
import Test.Hspec.Core.Spec (Example (..), Result (..),
ResultStatus (..))
newtype Scenario s m a = Scenario (StateT s m a)
deriving newtype
( Functor
, Applicative
, Monad
, MonadThrow
, MonadCatch
, MonadFail
, MonadIO
, MonadMask
, MonadState s
)
instance (Monad m, MonadState s (Scenario s m)) => MonadReader s (Scenario s m) where
ask = get
local f m = do
s <- get
put (f s) *> m <* put s
instance Example (Scenario s IO ()) where
type Arg (Scenario s IO ()) = MVar s
evaluateExample (Scenario io) _ action _ =
action runAndPersist >> return (Result "" Success)
where
runAndPersist :: MVar s -> IO ()
runAndPersist mvar = do
let acquire = takeMVar mvar
let release = putMVar mvar
let between = runStateT io >=> (putMVar mvar . snd)
bracketOnError acquire release between
|
623908601afe47f224f3e555646de5fbeda3e283823249c9e54bfa021ccae3b3 | Clozure/ccl-tests | loop17.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Thu Nov 21 09:48:38 2002
;;;; Contains: Miscellaneous loop tests
(in-package :cl-test)
;;; Initially and finally take multiple forms,
;;; and execute them in the right order
(deftest loop.17.1
(loop
with x = 0
initially (incf x 1) (incf x (+ x x))
initially (incf x (+ x x x))
until t
finally (incf x 100) (incf x (+ x x))
finally (return x))
336)
(deftest loop.17.2
(loop
with x = 0
until t
initially (incf x 1) (incf x (+ x x))
finally (incf x 100) (incf x (+ x x))
initially (incf x (+ x x x))
finally (return x))
336)
(deftest loop.17.3
(let ((x 0))
(loop
with y = (incf x 1)
initially (incf x 2)
until t
finally (return (values x y))))
3 1)
(deftest loop.17.4
(loop
doing (return 'a)
finally (return 'b))
a)
(deftest loop.17.5
(loop
return 'a
finally (return 'b))
a)
(deftest loop.17.6
(let ((x 0))
(tagbody
(loop
do (go done)
finally (incf x))
done)
x)
0)
(deftest loop.17.7
(let ((x 0))
(catch 'done
(loop
do (throw 'done nil)
finally (incf x)))
x)
0)
(deftest loop.17.8
(loop
for x in '(1 2 3)
collect x
finally (return 'good))
good)
(deftest loop.17.9
(loop
for x in '(1 2 3)
append (list x)
finally (return 'good))
good)
(deftest loop.17.10
(loop
for x in '(1 2 3)
nconc (list x)
finally (return 'good))
good)
(deftest loop.17.11
(loop
for x in '(1 2 3)
count (> x 1)
finally (return 'good))
good)
(deftest loop.17.12
(loop
for x in '(1 2 3)
sum x
finally (return 'good))
good)
(deftest loop.17.13
(loop
for x in '(1 2 3)
maximize x
finally (return 'good))
good)
(deftest loop.17.14
(loop
for x in '(1 2 3)
minimize x
finally (return 'good))
good)
;;; iteration clause grouping
(deftest loop.17.20
(loop
for i from 1 to 5
for j = 0 then (+ j i)
collect j)
(0 2 5 9 14))
(deftest loop.17.21
(loop
for i from 1 to 5
and j = 0 then (+ j i)
collect j)
(0 1 3 6 10))
;;; Test that explicit calls to macroexpand in subforms
;;; are done in the correct environment
(deftest loop.17.22
(macrolet
((%m (z) z))
(loop with x = 0
initially (expand-in-current-env (%m (incf x)))
until t
finally (expand-in-current-env (%m (return x)))))
1)
| null | https://raw.githubusercontent.com/Clozure/ccl-tests/0478abddb34dbc16487a1975560d8d073a988060/ansi-tests/loop17.lsp | lisp | -*- Mode: Lisp -*-
Contains: Miscellaneous loop tests
Initially and finally take multiple forms,
and execute them in the right order
iteration clause grouping
Test that explicit calls to macroexpand in subforms
are done in the correct environment | Author :
Created : Thu Nov 21 09:48:38 2002
(in-package :cl-test)
(deftest loop.17.1
(loop
with x = 0
initially (incf x 1) (incf x (+ x x))
initially (incf x (+ x x x))
until t
finally (incf x 100) (incf x (+ x x))
finally (return x))
336)
(deftest loop.17.2
(loop
with x = 0
until t
initially (incf x 1) (incf x (+ x x))
finally (incf x 100) (incf x (+ x x))
initially (incf x (+ x x x))
finally (return x))
336)
(deftest loop.17.3
(let ((x 0))
(loop
with y = (incf x 1)
initially (incf x 2)
until t
finally (return (values x y))))
3 1)
(deftest loop.17.4
(loop
doing (return 'a)
finally (return 'b))
a)
(deftest loop.17.5
(loop
return 'a
finally (return 'b))
a)
(deftest loop.17.6
(let ((x 0))
(tagbody
(loop
do (go done)
finally (incf x))
done)
x)
0)
(deftest loop.17.7
(let ((x 0))
(catch 'done
(loop
do (throw 'done nil)
finally (incf x)))
x)
0)
(deftest loop.17.8
(loop
for x in '(1 2 3)
collect x
finally (return 'good))
good)
(deftest loop.17.9
(loop
for x in '(1 2 3)
append (list x)
finally (return 'good))
good)
(deftest loop.17.10
(loop
for x in '(1 2 3)
nconc (list x)
finally (return 'good))
good)
(deftest loop.17.11
(loop
for x in '(1 2 3)
count (> x 1)
finally (return 'good))
good)
(deftest loop.17.12
(loop
for x in '(1 2 3)
sum x
finally (return 'good))
good)
(deftest loop.17.13
(loop
for x in '(1 2 3)
maximize x
finally (return 'good))
good)
(deftest loop.17.14
(loop
for x in '(1 2 3)
minimize x
finally (return 'good))
good)
(deftest loop.17.20
(loop
for i from 1 to 5
for j = 0 then (+ j i)
collect j)
(0 2 5 9 14))
(deftest loop.17.21
(loop
for i from 1 to 5
and j = 0 then (+ j i)
collect j)
(0 1 3 6 10))
(deftest loop.17.22
(macrolet
((%m (z) z))
(loop with x = 0
initially (expand-in-current-env (%m (incf x)))
until t
finally (expand-in-current-env (%m (return x)))))
1)
|
53f76552def019c2ba4609b40514b53761993081af28babed1e552b771528c92 | debug-ito/net-spider | SnapshotSpec.hs | module NetSpider.CLI.SnapshotSpec (main,spec) where
import Data.List (isInfixOf)
import NetSpider.Interval (interval, Extended(..))
import NetSpider.Query
( startsFrom, defQuery, timeInterval,
foundNodePolicy, policyAppend,
Query
)
import NetSpider.Timestamp (fromEpochMillisecond)
import qualified Options.Applicative as Opt
import Test.Hspec
import NetSpider.CLI.TestCommon (runP)
import NetSpider.CLI.Snapshot (SnapshotConfig(..), parserSnapshotQuery, makeSnapshotQuery)
defConfig :: SnapshotConfig Int
defConfig =
SnapshotConfig
{ nodeIDReader = Opt.auto,
startsFromAsArguments = False
}
main :: IO ()
main = hspec spec
parseSQ :: SnapshotConfig Int -> [String] -> Either String (Query Int () () ())
parseSQ sconf args = makeSnapshotQuery base_query =<< runP (parserSnapshotQuery sconf) args
where
base_query = (defQuery []) { foundNodePolicy = policyAppend }
spec :: Spec
spec = describe "parserSnapshotQuery" $ do
specify "default" $ do
let (Right got) = parseSQ defConfig []
startsFrom got `shouldBe` []
timeInterval got `shouldBe` interval (NegInf, False) (PosInf, False)
foundNodePolicy got `shouldBe` policyAppend
specify "time-from" $ do
let (Right got) = parseSQ defConfig
["--time-from", "2019-02-19T11:12:00"]
timeInterval got `shouldBe`
interval (Finite $ fromEpochMillisecond 1550574720000, True)
(PosInf, False)
specify "time-to with exclusive" $ do
let (Right got) = parseSQ defConfig
["--time-to", "x2017-12-20T19:22:02"]
timeInterval got `shouldBe`
interval (NegInf, False)
(Finite $ fromEpochMillisecond 1513797722000, False)
specify "both time-from and time-to with inclusive" $ do
let (Right got) = parseSQ defConfig
["-f", "i2018-10-11T14:13:33", "-t", "i2018-10-11T14:13:50.332"]
timeInterval got `shouldBe`
interval (Finite $ fromEpochMillisecond 1539267213000, True)
(Finite $ fromEpochMillisecond 1539267230332, True)
specify "explicit infinity" $ do
let (Right got) = parseSQ defConfig
["--time-from", "-inf", "--time-to", "+inf"]
timeInterval got `shouldBe` interval (NegInf, True) (PosInf, True)
specify "multiple starts-from" $ do
let (Right got) = parseSQ defConfig
["-s", "10", "-s", "12", "-s", "15"]
startsFrom got `shouldBe` [10,12,15]
let argsConfig = defConfig { startsFromAsArguments = True }
specify "startsFromAsArguments" $ do
let (Right got) = parseSQ argsConfig
["143", "200", "473","21"]
startsFrom got `shouldBe` [143, 200, 473, 21]
specify "startsFromAsArguments - -s still enabled" $ do
let (Right got) = parseSQ argsConfig
["90", "-s", "181"]
startsFrom got `shouldBe` [181, 90]
specify "duration + time-from" $ do
let (Right got) = parseSQ defConfig
["--duration", "3600", "--time-from", "i2019-04-30T19:03:33"]
timeInterval got `shouldBe`
interval (Finite $ fromEpochMillisecond 1556651013000, True)
(Finite $ fromEpochMillisecond (1556651013000 + 3600000), False)
specify "duration + time-to" $ do
let (Right got) = parseSQ defConfig
["-d", "600", "--time-to", "x2019-04-30T19:03:33"]
timeInterval got `shouldBe`
interval (Finite $ fromEpochMillisecond (1556651013000 - 600000), True)
(Finite $ fromEpochMillisecond 1556651013000, False)
specify "duration + time-to + time-from expects error" $ do
let (Left err) = parseSQ defConfig
["-d", "600", "--time-to", "x2019-04-30T19:03:33", "--time-from", "x2019-04-30T17:00:52"]
err `shouldSatisfy` (isInfixOf "all --time-to, --time-from and --duration is not allowed")
specify "duration without time-to or time-from" $ do
let (Left err) = parseSQ defConfig
["-d", "600"]
err `shouldSatisfy` (isInfixOf "--duration only is not allowed")
| null | https://raw.githubusercontent.com/debug-ito/net-spider/82dfbdca1add1edfd54ef36cb1ca5129d528b814/net-spider-cli/test/NetSpider/CLI/SnapshotSpec.hs | haskell | module NetSpider.CLI.SnapshotSpec (main,spec) where
import Data.List (isInfixOf)
import NetSpider.Interval (interval, Extended(..))
import NetSpider.Query
( startsFrom, defQuery, timeInterval,
foundNodePolicy, policyAppend,
Query
)
import NetSpider.Timestamp (fromEpochMillisecond)
import qualified Options.Applicative as Opt
import Test.Hspec
import NetSpider.CLI.TestCommon (runP)
import NetSpider.CLI.Snapshot (SnapshotConfig(..), parserSnapshotQuery, makeSnapshotQuery)
defConfig :: SnapshotConfig Int
defConfig =
SnapshotConfig
{ nodeIDReader = Opt.auto,
startsFromAsArguments = False
}
main :: IO ()
main = hspec spec
parseSQ :: SnapshotConfig Int -> [String] -> Either String (Query Int () () ())
parseSQ sconf args = makeSnapshotQuery base_query =<< runP (parserSnapshotQuery sconf) args
where
base_query = (defQuery []) { foundNodePolicy = policyAppend }
spec :: Spec
spec = describe "parserSnapshotQuery" $ do
specify "default" $ do
let (Right got) = parseSQ defConfig []
startsFrom got `shouldBe` []
timeInterval got `shouldBe` interval (NegInf, False) (PosInf, False)
foundNodePolicy got `shouldBe` policyAppend
specify "time-from" $ do
let (Right got) = parseSQ defConfig
["--time-from", "2019-02-19T11:12:00"]
timeInterval got `shouldBe`
interval (Finite $ fromEpochMillisecond 1550574720000, True)
(PosInf, False)
specify "time-to with exclusive" $ do
let (Right got) = parseSQ defConfig
["--time-to", "x2017-12-20T19:22:02"]
timeInterval got `shouldBe`
interval (NegInf, False)
(Finite $ fromEpochMillisecond 1513797722000, False)
specify "both time-from and time-to with inclusive" $ do
let (Right got) = parseSQ defConfig
["-f", "i2018-10-11T14:13:33", "-t", "i2018-10-11T14:13:50.332"]
timeInterval got `shouldBe`
interval (Finite $ fromEpochMillisecond 1539267213000, True)
(Finite $ fromEpochMillisecond 1539267230332, True)
specify "explicit infinity" $ do
let (Right got) = parseSQ defConfig
["--time-from", "-inf", "--time-to", "+inf"]
timeInterval got `shouldBe` interval (NegInf, True) (PosInf, True)
specify "multiple starts-from" $ do
let (Right got) = parseSQ defConfig
["-s", "10", "-s", "12", "-s", "15"]
startsFrom got `shouldBe` [10,12,15]
let argsConfig = defConfig { startsFromAsArguments = True }
specify "startsFromAsArguments" $ do
let (Right got) = parseSQ argsConfig
["143", "200", "473","21"]
startsFrom got `shouldBe` [143, 200, 473, 21]
specify "startsFromAsArguments - -s still enabled" $ do
let (Right got) = parseSQ argsConfig
["90", "-s", "181"]
startsFrom got `shouldBe` [181, 90]
specify "duration + time-from" $ do
let (Right got) = parseSQ defConfig
["--duration", "3600", "--time-from", "i2019-04-30T19:03:33"]
timeInterval got `shouldBe`
interval (Finite $ fromEpochMillisecond 1556651013000, True)
(Finite $ fromEpochMillisecond (1556651013000 + 3600000), False)
specify "duration + time-to" $ do
let (Right got) = parseSQ defConfig
["-d", "600", "--time-to", "x2019-04-30T19:03:33"]
timeInterval got `shouldBe`
interval (Finite $ fromEpochMillisecond (1556651013000 - 600000), True)
(Finite $ fromEpochMillisecond 1556651013000, False)
specify "duration + time-to + time-from expects error" $ do
let (Left err) = parseSQ defConfig
["-d", "600", "--time-to", "x2019-04-30T19:03:33", "--time-from", "x2019-04-30T17:00:52"]
err `shouldSatisfy` (isInfixOf "all --time-to, --time-from and --duration is not allowed")
specify "duration without time-to or time-from" $ do
let (Left err) = parseSQ defConfig
["-d", "600"]
err `shouldSatisfy` (isInfixOf "--duration only is not allowed")
|
|
19657da6ead2a29a35232a05f15dbeb4e3bce0e0833c17d94abbd31bba055452 | ndmitchell/tagsoup | Parser.hs |
module Compiler.Parser(parse) where
import Compiler.Lp
import Compiler.Util
parse :: String -> Program
parse = map parseRule . chunks . filter (not . dull) . lines
where
dull x = all isSpace x || "#" `isPrefixOf` x
chunks = rep (\(x:xs) -> first (x:) $ break (not . isSpace . head) xs)
parseRule :: [String] -> Rule
parseRule [x] = Rule name args $ NoChoice $ parseSeq body
where (name:args,body) = break' "=" $ lexemes x
parseRule (x:ys) = Rule name args $ Choice $ map (parseAlt . lexemes) ys
where (name:args) = lexemes x
parseAlt :: [String] -> (Bind Pat,Seq)
parseAlt (x:"=":y) = (parseBind parsePat x, parseSeq y)
parseSeq :: [String] -> Seq
parseSeq xs = Seq (map (parseBind parseExp) a) (if null b then "res" else uncurly $ head b)
where (a,b) = break ("{" `isPrefixOf`) xs
parseBind :: (String -> a) -> String -> Bind a
parseBind f x | "@" `isPrefixOf` b = Bind (Just a) $ f $ unround $ tail b
| otherwise = Bind Nothing $ f $ unround x
where (a,b) = span isAlpha x
parsePat :: String -> Pat
parsePat "_" = PWildcard
parsePat x@('\"':_) = PLit $ read x
parsePat x = PPrim x
parseExp :: String -> Exp
parseExp x@('\"':_) = Lit $ read x
parseExp x = Prim name $ map parseExp args
where (name:args) = words x
---------------------------------------------------------------------
UTILITIES
break' :: (Show a, Eq a) => a -> [a] -> ([a],[a])
break' x xs | null b = error $ "Parse error, expected " ++ show a ++ " in " ++ unwords (map show xs)
| otherwise = (a, tail b)
where (a,b) = break (== x) xs
lexemes :: String -> [String]
lexemes = f . words
where
f (x:xs) | isJust v = unwords (a++[b]) : f bs
where v = getBracket x
(a,b:bs) = break (fromJust v `elem`) (x:xs)
f (x:xs) = x : f xs
f [] = []
getBracket ('(':xs) = Just ')'
getBracket ('{':xs) = Just '}'
getBracket (_:xs) = getBracket xs
getBracket [] = Nothing
unround ('(':xs) | ")" `isSuffixOf` xs = init xs
unround x = x
uncurly ('{':xs) | "}" `isSuffixOf` xs = init xs
uncurly x = x
| null | https://raw.githubusercontent.com/ndmitchell/tagsoup/e116d3b965e4f149581bd55e45a05661b1113536/dead/parser/Compiler/Parser.hs | haskell | ------------------------------------------------------------------- |
module Compiler.Parser(parse) where
import Compiler.Lp
import Compiler.Util
parse :: String -> Program
parse = map parseRule . chunks . filter (not . dull) . lines
where
dull x = all isSpace x || "#" `isPrefixOf` x
chunks = rep (\(x:xs) -> first (x:) $ break (not . isSpace . head) xs)
parseRule :: [String] -> Rule
parseRule [x] = Rule name args $ NoChoice $ parseSeq body
where (name:args,body) = break' "=" $ lexemes x
parseRule (x:ys) = Rule name args $ Choice $ map (parseAlt . lexemes) ys
where (name:args) = lexemes x
parseAlt :: [String] -> (Bind Pat,Seq)
parseAlt (x:"=":y) = (parseBind parsePat x, parseSeq y)
parseSeq :: [String] -> Seq
parseSeq xs = Seq (map (parseBind parseExp) a) (if null b then "res" else uncurly $ head b)
where (a,b) = break ("{" `isPrefixOf`) xs
parseBind :: (String -> a) -> String -> Bind a
parseBind f x | "@" `isPrefixOf` b = Bind (Just a) $ f $ unround $ tail b
| otherwise = Bind Nothing $ f $ unround x
where (a,b) = span isAlpha x
parsePat :: String -> Pat
parsePat "_" = PWildcard
parsePat x@('\"':_) = PLit $ read x
parsePat x = PPrim x
parseExp :: String -> Exp
parseExp x@('\"':_) = Lit $ read x
parseExp x = Prim name $ map parseExp args
where (name:args) = words x
UTILITIES
break' :: (Show a, Eq a) => a -> [a] -> ([a],[a])
break' x xs | null b = error $ "Parse error, expected " ++ show a ++ " in " ++ unwords (map show xs)
| otherwise = (a, tail b)
where (a,b) = break (== x) xs
lexemes :: String -> [String]
lexemes = f . words
where
f (x:xs) | isJust v = unwords (a++[b]) : f bs
where v = getBracket x
(a,b:bs) = break (fromJust v `elem`) (x:xs)
f (x:xs) = x : f xs
f [] = []
getBracket ('(':xs) = Just ')'
getBracket ('{':xs) = Just '}'
getBracket (_:xs) = getBracket xs
getBracket [] = Nothing
unround ('(':xs) | ")" `isSuffixOf` xs = init xs
unround x = x
uncurly ('{':xs) | "}" `isSuffixOf` xs = init xs
uncurly x = x
|
930b4e66bc06d925f3a2bfa9e23134ef1705b1b6e4763bf8178d389288c43e61 | facebookarchive/pfff | ifdef.ml | let foo x =
match x with
#if XXX
| 1 -> 1
#else
| 1 -> 2
#endif
| null | https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/tests/ml/parsing/ifdef.ml | ocaml | let foo x =
match x with
#if XXX
| 1 -> 1
#else
| 1 -> 2
#endif
|
|
2d96258e3602bad5bb5a5eecf9f71af91330092c6373c5e4366ffb914f8a5d75 | tejasbubane/haskell-book-code | try.hs | module Main where
import Control.Exception
import System.Environment (getArgs)
willFail :: Integer -> IO (Either ArithException ())
willFail denom =
try $ print $ div 5 denom
-- Handle the error properly
handleArithException :: Show e => IO (Either e a) -> IO ()
handleArithException failure = do
ex <- failure
case ex of
Left e -> putStrLn $ show e
Right x -> return ()
testDiv :: Integer -> IO ()
testDiv = handleArithException . willFail
main :: IO ()
main = do
args <- getArgs
mapM_ (testDiv . read) args
-- Compile and run passing arguments
-- ./try 1 2 0
| null | https://raw.githubusercontent.com/tejasbubane/haskell-book-code/deaac8ab4db0ae8692d0278826528bb8a746ed82/ch-30/try.hs | haskell | Handle the error properly
Compile and run passing arguments
./try 1 2 0 | module Main where
import Control.Exception
import System.Environment (getArgs)
willFail :: Integer -> IO (Either ArithException ())
willFail denom =
try $ print $ div 5 denom
handleArithException :: Show e => IO (Either e a) -> IO ()
handleArithException failure = do
ex <- failure
case ex of
Left e -> putStrLn $ show e
Right x -> return ()
testDiv :: Integer -> IO ()
testDiv = handleArithException . willFail
main :: IO ()
main = do
args <- getArgs
mapM_ (testDiv . read) args
|
8bb7e1022f850a6cd16e634b6a9dd4efe5de78c1bfebc4ff87462c6e5d866fdd | patricoferris/ocaml-multicore-monorepo | graphql_async.mli | (** GraphQL schema with Async support *)
module Schema : sig
include Graphql_intf.Schema with type 'a Io.t = 'a Async_kernel.Deferred.t
and type 'a Io.Stream.t = 'a Async_kernel.Pipe.Reader.t
and type field_error = string
end
| null | https://raw.githubusercontent.com/patricoferris/ocaml-multicore-monorepo/22b441e6727bc303950b3b37c8fbc024c748fe55/duniverse/ocaml-graphql-server/graphql-async/src/graphql_async.mli | ocaml | * GraphQL schema with Async support | module Schema : sig
include Graphql_intf.Schema with type 'a Io.t = 'a Async_kernel.Deferred.t
and type 'a Io.Stream.t = 'a Async_kernel.Pipe.Reader.t
and type field_error = string
end
|
426ded85868b05de21cca507580e2c6c0431eaa824978ab4c6bf6100ee6e396d | mzp/coq-ide-for-ios | constrextern.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
i $ I d : constrextern.mli 13323 2010 - 07 - 24 15:57:30Z herbelin $ i
(*i*)
open Util
open Names
open Term
open Termops
open Sign
open Environ
open Libnames
open Nametab
open Rawterm
open Pattern
open Topconstr
open Notation
(*i*)
v7->v8 translation
val check_same_type : constr_expr -> constr_expr -> unit
(* Translation of pattern, cases pattern, rawterm and term into syntax
trees for printing *)
val extern_cases_pattern : Idset.t -> cases_pattern -> cases_pattern_expr
val extern_rawconstr : Idset.t -> rawconstr -> constr_expr
val extern_rawtype : Idset.t -> rawconstr -> constr_expr
val extern_constr_pattern : names_context -> constr_pattern -> constr_expr
If [ b = true ] in [ extern_constr b env c ] then the variables in the first
level of quantification clashing with the variables in [ env ] are renamed
level of quantification clashing with the variables in [env] are renamed *)
val extern_constr : bool -> env -> constr -> constr_expr
val extern_constr_in_scope : bool -> scope_name -> env -> constr -> constr_expr
val extern_reference : loc -> Idset.t -> global_reference -> reference
val extern_type : bool -> env -> types -> constr_expr
val extern_sort : sorts -> rawsort
val extern_rel_context : constr option -> env ->
rel_context -> local_binder list
(* Printing options *)
val print_implicits : bool ref
val print_implicits_defensive : bool ref
val print_arguments : bool ref
val print_evar_arguments : bool ref
val print_coercions : bool ref
val print_universes : bool ref
val print_no_symbol : bool ref
val print_projections : bool ref
(* Debug printing options *)
val set_debug_global_reference_printer :
(loc -> global_reference -> reference) -> unit
(* This governs printing of implicit arguments. If [with_implicits] is
on and not [with_arguments] then implicit args are printed prefixed
by "!"; if [with_implicits] and [with_arguments] are both on the
function and not the arguments is prefixed by "!" *)
val with_implicits : ('a -> 'b) -> 'a -> 'b
val with_arguments : ('a -> 'b) -> 'a -> 'b
(* This forces printing of coercions *)
val with_coercions : ('a -> 'b) -> 'a -> 'b
(* This forces printing universe names of Type{.} *)
val with_universes : ('a -> 'b) -> 'a -> 'b
(* This suppresses printing of numeral and symbols *)
val without_symbols : ('a -> 'b) -> 'a -> 'b
(* This prints metas as anonymous holes *)
val with_meta_as_hole : ('a -> 'b) -> 'a -> 'b
| null | https://raw.githubusercontent.com/mzp/coq-ide-for-ios/4cdb389bbecd7cdd114666a8450ecf5b5f0391d3/coqlib/interp/constrextern.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
i
i
Translation of pattern, cases pattern, rawterm and term into syntax
trees for printing
Printing options
Debug printing options
This governs printing of implicit arguments. If [with_implicits] is
on and not [with_arguments] then implicit args are printed prefixed
by "!"; if [with_implicits] and [with_arguments] are both on the
function and not the arguments is prefixed by "!"
This forces printing of coercions
This forces printing universe names of Type{.}
This suppresses printing of numeral and symbols
This prints metas as anonymous holes | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
i $ I d : constrextern.mli 13323 2010 - 07 - 24 15:57:30Z herbelin $ i
open Util
open Names
open Term
open Termops
open Sign
open Environ
open Libnames
open Nametab
open Rawterm
open Pattern
open Topconstr
open Notation
v7->v8 translation
val check_same_type : constr_expr -> constr_expr -> unit
val extern_cases_pattern : Idset.t -> cases_pattern -> cases_pattern_expr
val extern_rawconstr : Idset.t -> rawconstr -> constr_expr
val extern_rawtype : Idset.t -> rawconstr -> constr_expr
val extern_constr_pattern : names_context -> constr_pattern -> constr_expr
If [ b = true ] in [ extern_constr b env c ] then the variables in the first
level of quantification clashing with the variables in [ env ] are renamed
level of quantification clashing with the variables in [env] are renamed *)
val extern_constr : bool -> env -> constr -> constr_expr
val extern_constr_in_scope : bool -> scope_name -> env -> constr -> constr_expr
val extern_reference : loc -> Idset.t -> global_reference -> reference
val extern_type : bool -> env -> types -> constr_expr
val extern_sort : sorts -> rawsort
val extern_rel_context : constr option -> env ->
rel_context -> local_binder list
val print_implicits : bool ref
val print_implicits_defensive : bool ref
val print_arguments : bool ref
val print_evar_arguments : bool ref
val print_coercions : bool ref
val print_universes : bool ref
val print_no_symbol : bool ref
val print_projections : bool ref
val set_debug_global_reference_printer :
(loc -> global_reference -> reference) -> unit
val with_implicits : ('a -> 'b) -> 'a -> 'b
val with_arguments : ('a -> 'b) -> 'a -> 'b
val with_coercions : ('a -> 'b) -> 'a -> 'b
val with_universes : ('a -> 'b) -> 'a -> 'b
val without_symbols : ('a -> 'b) -> 'a -> 'b
val with_meta_as_hole : ('a -> 'b) -> 'a -> 'b
|
74b834f535b3bb3446318f4b3e7f28f2e706f7784fdfc39c7d57d5d904634b2f | kudelskisecurity/scannerl | utils_http.erl | %% utils for parsing http and handling redirection
%%
%% when redirections occurs, you still need to check
%% that the redirection does not point back to your
%% original target and page after some redirections
%% as no internal stack of redirections is kept in here
%%
%% type of result returned:
{ ok , { Code , Headermap , Body } }
HTTP 200 received and a Map containing the header options as well
%% as the body are returned
%% {error, Data}:
%% unable to parse http
{ redirect , { error , empty } , { Code , Headermap , Body } }
Redirection found ( 3XX ) but no value
%% given
{ redirect , { error , cyclic } , { Code , Headermap , Body } }
redirection ( 3XX ) is cyclic
{ redirect , { error , Location } , { Code , Headermap , Body } }
%% redirection error while parsing the location
{ redirect , { ok , { Host , Page } } , { Code , Headermap , Body } }
redirection to Host and Page
{ redirect , { https , { Host , Page } } , { Code , Headermap , Body } }
%% redirection HTTPs on Host and Page
{ http , { Code , Headermap , Body } }
a HTTP code was received that is not 2XX or 3XX
{ other , { Code , Headermap , Body } }
%% Something was received that didn't seem to be HTTP
%%
-module(utils_http).
-author("Adrien Giner - ").
-export([parse_http/3, parse_http/4]).
% parsing defines
-define(HTTP_OK, "2").
-define(HTTP_REDIRECT, "3").
-define(CRLF, "\r\n").
-define(LF, "\n").
-define(HDRFIELDSEP, ":").
-define(PAGE_SEP, "/").
-define(PAGE_RET, "..").
-define(HTTP_LOCATION, "location").
-record(rec, {
code,
page,
host,
headermap,
header,
body,
dbg,
protoline,
payload
}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% API
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% parse an http response from host Host (the entry in the Host: field of the
query ) to page ( for example " / " or " /readme.txt " )
if is set to { Target , I d , } then
% debug will be outputed if needed otherwise set it to {}
parse_http(Host, Page, Payload) ->
parse_http(Host, Page, Payload, {}).
parse_http(Host, Page, Payload, DbgInfo) ->
Resp = fix_crlf(Payload),
case parse_response(Resp, []) of
["", Body] ->
debug(DbgInfo, "this is no HTTP or no header found"),
{other, {"", maps:new(), Body}};
[Header, Body] ->
debug(DbgInfo, "HTTP parse successfully"),
Rec = #rec{host=Host, page=Page, header=Header, body=Body, payload=Payload, dbg=DbgInfo},
match_header(Rec, get_proto_code(Header, []))
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% matchers
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
match_header(Rec, {[], _}) ->
{error, Rec#rec.payload};
match_header(Rec, {Protoline, HeaderFields}) ->
debug(Rec#rec.dbg, io_lib:fwrite("HTTP response: ~p", [Protoline])),
% get a map of the header
Headermap = parse_header(HeaderFields, maps:new(), []),
Nrec = Rec#rec{protoline=Protoline, headermap=Headermap},
match_proto(Nrec).
match_proto(Rec) when length(Rec#rec.protoline) < 2 ->
{other, {lists:concat(Rec#rec.protoline), Rec#rec.header, Rec#rec.body}};
match_proto(Rec) ->
case validate_http_code(lists:nth(2, Rec#rec.protoline)) of
{ok, ?HTTP_OK ++ _ = Code} ->
2XX
{ok, {Code, Rec#rec.headermap, Rec#rec.body}};
{ok, ?HTTP_REDIRECT ++ _ = Code} ->
3XX
NRec = Rec#rec{code = Code},
handle_redirect(NRec);
{ok, Code} ->
{http, {Code, Rec#rec.headermap, Rec#rec.body}};
{error, _Code} ->
% other stuff
{other, {lists:concat(Rec#rec.protoline), Rec#rec.headermap, Rec#rec.body}}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% handler
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% handle redirection
handle_redirect(Rec) ->
Loc = maps:get(?HTTP_LOCATION, Rec#rec.headermap, ""),
debug(Rec#rec.dbg, io_lib:fwrite("<redirection> Header: ~p", [Rec#rec.headermap])),
{redirect, redirect_location(Loc, Rec),
{Rec#rec.code, Rec#rec.headermap, Rec#rec.body}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% debug
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
debug({}, _) ->
ok;
debug({Target, Id, Debugval}, Msg) ->
utils:debug(fpmodules, Msg,
{Target, Id}, Debugval).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% parsing
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% parse each header field and return
% a list with key, value
parse_hdr_field(?HDRFIELDSEP ++ Rest, Acc) ->
[string:strip(lists:reverse(Acc)), string:strip(Rest)];
parse_hdr_field([H|T], Acc) ->
parse_hdr_field(T, [H|Acc]);
parse_hdr_field([], Acc) ->
[lists:reverse(Acc), ""].
% this allows to retrieve the http code
% line from the header
get_proto_code([], _) ->
{[], []};
get_proto_code(?CRLF ++ Rest, Acc) ->
{string:tokens(lists:reverse(Acc), " "), Rest};
get_proto_code([H|T], Acc) ->
get_proto_code(T, [H|Acc]).
% parse the header and isolate each
% option to process
% returns a map of the options
parse_header(?CRLF ++ [], Map, Acc) ->
[H, T] = parse_hdr_field(lists:reverse(Acc), []),
maps:put(normalize_key(H), T, Map);
parse_header(?CRLF ++ Rest, Map, Acc) ->
[H, T] = parse_hdr_field(lists:reverse(Acc), []),
Nmap = maps:put(normalize_key(H), T, Map),
parse_header(Rest, Nmap, []);
parse_header([H|T], Map, Acc) ->
parse_header(T, Map, [H|Acc]);
parse_header([], Map, _Agg) ->
Map.
% only parse header/body if we have HTTP code
parse_response("HTTP" ++ _ = Res, Acc) ->
sub_parse_response(Res, Acc);
parse_response(Else, _Acc) ->
["", Else].
% parse the response and separate the
header and the body separated by two CRLF
sub_parse_response(?CRLF ++ ?CRLF ++ Rest, Acc) ->
[lists:reverse(Acc)++?CRLF, Rest];
sub_parse_response([H|T], Acc) ->
sub_parse_response(T, [H|Acc]);
sub_parse_response([], _Acc) ->
["", ""].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% utils
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% handles @&#($* not following RFC
fix_crlf(Data) ->
case string:str(Data, ?CRLF) of
0 ->
re:replace(Data, ?LF, ?CRLF, [{return,list},global]);
_ ->
Data
end.
% loose validate HTTP code
validate_http_code(Code) ->
try
case list_to_integer(Code) >= 100 andalso list_to_integer(Code) < 600 of
true ->
{ok, Code};
false ->
{error, Code}
end
catch
_:_ ->
{error, Code}
end.
% normalize header option
normalize_key(Key) ->
string:strip(string:to_lower(Key)).
%% RFC2616 (#section-14.30) specifies
%% that the location should be an absolute URI.
however since 2014 the new RFC ( #section-7.1.2 )
%% allows relative and absolute URI
relative - path definition is #section-4.2
redirect_location([], _Rec) ->
% empty redirect
{error, empty};
redirect_location("http://" = Loc, _Rec) ->
{error, Loc};
redirect_location("//" ++ Redir, Rec) ->
% example:
% redirect: //<domain>/
redirect_location("http://" ++ Redir, Rec);
redirect_location("/" ++ _ = Page, Rec) ->
% example:
redirect : /frontend/ .. / home "
redirect_follow(Rec#rec.host, Rec#rec.page, Rec#rec.host, eval_redirect_page(Page, Rec#rec.dbg), Rec#rec.dbg);
redirect_location("../" ++ _ = Page, Rec) ->
% example:
% redirect: ../home
% redirect: ../<domain>/asplogin.asp
% redirect: ../staff_online/staff/main/stafflogin.asp?action=start
redirect_follow(Rec#rec.host, Rec#rec.page, Rec#rec.host,
eval_redirect_page(Rec#rec.page ++ Page, Rec#rec.dbg), Rec#rec.dbg);
redirect_location("http://" ++ Field, Rec) ->
% now split host and page
Ends = string:right(Field, 1) == ?PAGE_SEP,
Fields = string:tokens(Field, ?PAGE_SEP),
case Fields of
[] ->
{error, empty};
F ->
Host = string:strip(hd(F), right, $.),
Page = ?PAGE_SEP ++ string:join(tl(F), ?PAGE_SEP),
NewPage = complete_page(Page, Ends),
redirect_follow(Rec#rec.host, Rec#rec.page, Host, NewPage, Rec#rec.dbg)
end;
redirect_location("https://" ++ Field, _Rec) ->
% now split host and page
Ends = string:right(Field, 1) == ?PAGE_SEP,
Fields = string:tokens(Field, ?PAGE_SEP),
case Fields of
[] ->
{error, empty};
F ->
Host = string:strip(hd(F), right, $.),
Page = ?PAGE_SEP ++ string:join(tl(F), ?PAGE_SEP),
NewPage = complete_page(Page, Ends),
{https, {Host, NewPage}}
end;
redirect_location(Location, Rec) ->
% complete current page with redirect
NewPage = eval_redirect_page(Rec#rec.page ++ ?PAGE_SEP ++ Location, Rec#rec.dbg),
redirect_follow(Rec#rec.host, Rec#rec.page, Rec#rec.host, NewPage, Rec#rec.dbg).
redirect_follow(Curhost, Curpage, NewHost, NewPage, Dbg) ->
case (NewHost == Curhost andalso NewPage == Curpage) of
true ->
debug(Dbg, io_lib:fwrite("cyclic !! ~p/~p => ~p/~p", [Curhost, Curpage, NewHost, NewPage])),
{error, cyclic};
false ->
debug(Dbg, io_lib:fwrite("redir ok to ~p ~p", [NewHost, NewPage])),
{ok, {NewHost, NewPage}}
end.
complete_page(Page, AddSep) ->
Ispresent = string:right(Page, 1) == ?PAGE_SEP,
case AddSep of
true ->
case Ispresent of
true ->
Page;
false ->
Page ++ ?PAGE_SEP
end;
false ->
Page
end.
% returns absolute path from relative path
eval_redirect_list([[]|T], Agg) ->
eval_redirect_list(T, Agg);
eval_redirect_list([?PAGE_RET|T], []) ->
eval_redirect_list(T, []);
eval_redirect_list([?PAGE_RET|T], Agg) ->
eval_redirect_list(T, tl(Agg));
eval_redirect_list([H|T], Agg) ->
eval_redirect_list(T, [H|Agg]);
eval_redirect_list([], []) ->
% no redirection, points to same place
[""];
eval_redirect_list([], Agg) ->
case string:str(hd(Agg), ".") of
0 ->
lists:reverse([[]|Agg]);
_ ->
lists:reverse(Agg)
end.
eval_redirect_page(?PAGE_SEP, _Dbg) ->
?PAGE_SEP;
eval_redirect_page(?PAGE_SEP ++ ?PAGE_SEP ++ Rest, Dbg) ->
eval_redirect_page(?PAGE_SEP ++ Rest, Dbg);
eval_redirect_page(Page, Dbg) ->
debug(Dbg, io_lib:fwrite("redirect to ~p", [Page])),
case string:str(Page, ?PAGE_RET) of
0 ->
Page;
_ ->
debug(Dbg, io_lib:fwrite("eval redirect being: ~p", [string:tokens(Page, ?PAGE_SEP)])),
Ends = string:right(Page, 1) == ?PAGE_SEP,
Res = eval_redirect_list(string:tokens(Page, ?PAGE_SEP), []),
?PAGE_SEP ++ string:join(Res, ?PAGE_SEP) ++ case Ends of true -> ?PAGE_SEP; _ -> "" end
end.
| null | https://raw.githubusercontent.com/kudelskisecurity/scannerl/8133065030d014401c47b2470e67a36e9df81b1e/src/utils/utils_http.erl | erlang | utils for parsing http and handling redirection
when redirections occurs, you still need to check
that the redirection does not point back to your
original target and page after some redirections
as no internal stack of redirections is kept in here
type of result returned:
as the body are returned
{error, Data}:
unable to parse http
given
redirection error while parsing the location
redirection HTTPs on Host and Page
Something was received that didn't seem to be HTTP
parsing defines
API
parse an http response from host Host (the entry in the Host: field of the
debug will be outputed if needed otherwise set it to {}
matchers
get a map of the header
other stuff
handler
handle redirection
debug
parsing
parse each header field and return
a list with key, value
this allows to retrieve the http code
line from the header
parse the header and isolate each
option to process
returns a map of the options
only parse header/body if we have HTTP code
parse the response and separate the
utils
handles @&#($* not following RFC
loose validate HTTP code
normalize header option
RFC2616 (#section-14.30) specifies
that the location should be an absolute URI.
allows relative and absolute URI
empty redirect
example:
redirect: //<domain>/
example:
example:
redirect: ../home
redirect: ../<domain>/asplogin.asp
redirect: ../staff_online/staff/main/stafflogin.asp?action=start
now split host and page
now split host and page
complete current page with redirect
returns absolute path from relative path
no redirection, points to same place | { ok , { Code , Headermap , Body } }
HTTP 200 received and a Map containing the header options as well
{ redirect , { error , empty } , { Code , Headermap , Body } }
Redirection found ( 3XX ) but no value
{ redirect , { error , cyclic } , { Code , Headermap , Body } }
redirection ( 3XX ) is cyclic
{ redirect , { error , Location } , { Code , Headermap , Body } }
{ redirect , { ok , { Host , Page } } , { Code , Headermap , Body } }
redirection to Host and Page
{ redirect , { https , { Host , Page } } , { Code , Headermap , Body } }
{ http , { Code , Headermap , Body } }
a HTTP code was received that is not 2XX or 3XX
{ other , { Code , Headermap , Body } }
-module(utils_http).
-author("Adrien Giner - ").
-export([parse_http/3, parse_http/4]).
-define(HTTP_OK, "2").
-define(HTTP_REDIRECT, "3").
-define(CRLF, "\r\n").
-define(LF, "\n").
-define(HDRFIELDSEP, ":").
-define(PAGE_SEP, "/").
-define(PAGE_RET, "..").
-define(HTTP_LOCATION, "location").
-record(rec, {
code,
page,
host,
headermap,
header,
body,
dbg,
protoline,
payload
}).
query ) to page ( for example " / " or " /readme.txt " )
if is set to { Target , I d , } then
parse_http(Host, Page, Payload) ->
parse_http(Host, Page, Payload, {}).
parse_http(Host, Page, Payload, DbgInfo) ->
Resp = fix_crlf(Payload),
case parse_response(Resp, []) of
["", Body] ->
debug(DbgInfo, "this is no HTTP or no header found"),
{other, {"", maps:new(), Body}};
[Header, Body] ->
debug(DbgInfo, "HTTP parse successfully"),
Rec = #rec{host=Host, page=Page, header=Header, body=Body, payload=Payload, dbg=DbgInfo},
match_header(Rec, get_proto_code(Header, []))
end.
match_header(Rec, {[], _}) ->
{error, Rec#rec.payload};
match_header(Rec, {Protoline, HeaderFields}) ->
debug(Rec#rec.dbg, io_lib:fwrite("HTTP response: ~p", [Protoline])),
Headermap = parse_header(HeaderFields, maps:new(), []),
Nrec = Rec#rec{protoline=Protoline, headermap=Headermap},
match_proto(Nrec).
match_proto(Rec) when length(Rec#rec.protoline) < 2 ->
{other, {lists:concat(Rec#rec.protoline), Rec#rec.header, Rec#rec.body}};
match_proto(Rec) ->
case validate_http_code(lists:nth(2, Rec#rec.protoline)) of
{ok, ?HTTP_OK ++ _ = Code} ->
2XX
{ok, {Code, Rec#rec.headermap, Rec#rec.body}};
{ok, ?HTTP_REDIRECT ++ _ = Code} ->
3XX
NRec = Rec#rec{code = Code},
handle_redirect(NRec);
{ok, Code} ->
{http, {Code, Rec#rec.headermap, Rec#rec.body}};
{error, _Code} ->
{other, {lists:concat(Rec#rec.protoline), Rec#rec.headermap, Rec#rec.body}}
end.
handle_redirect(Rec) ->
Loc = maps:get(?HTTP_LOCATION, Rec#rec.headermap, ""),
debug(Rec#rec.dbg, io_lib:fwrite("<redirection> Header: ~p", [Rec#rec.headermap])),
{redirect, redirect_location(Loc, Rec),
{Rec#rec.code, Rec#rec.headermap, Rec#rec.body}}.
debug({}, _) ->
ok;
debug({Target, Id, Debugval}, Msg) ->
utils:debug(fpmodules, Msg,
{Target, Id}, Debugval).
parse_hdr_field(?HDRFIELDSEP ++ Rest, Acc) ->
[string:strip(lists:reverse(Acc)), string:strip(Rest)];
parse_hdr_field([H|T], Acc) ->
parse_hdr_field(T, [H|Acc]);
parse_hdr_field([], Acc) ->
[lists:reverse(Acc), ""].
get_proto_code([], _) ->
{[], []};
get_proto_code(?CRLF ++ Rest, Acc) ->
{string:tokens(lists:reverse(Acc), " "), Rest};
get_proto_code([H|T], Acc) ->
get_proto_code(T, [H|Acc]).
parse_header(?CRLF ++ [], Map, Acc) ->
[H, T] = parse_hdr_field(lists:reverse(Acc), []),
maps:put(normalize_key(H), T, Map);
parse_header(?CRLF ++ Rest, Map, Acc) ->
[H, T] = parse_hdr_field(lists:reverse(Acc), []),
Nmap = maps:put(normalize_key(H), T, Map),
parse_header(Rest, Nmap, []);
parse_header([H|T], Map, Acc) ->
parse_header(T, Map, [H|Acc]);
parse_header([], Map, _Agg) ->
Map.
parse_response("HTTP" ++ _ = Res, Acc) ->
sub_parse_response(Res, Acc);
parse_response(Else, _Acc) ->
["", Else].
header and the body separated by two CRLF
sub_parse_response(?CRLF ++ ?CRLF ++ Rest, Acc) ->
[lists:reverse(Acc)++?CRLF, Rest];
sub_parse_response([H|T], Acc) ->
sub_parse_response(T, [H|Acc]);
sub_parse_response([], _Acc) ->
["", ""].
fix_crlf(Data) ->
case string:str(Data, ?CRLF) of
0 ->
re:replace(Data, ?LF, ?CRLF, [{return,list},global]);
_ ->
Data
end.
validate_http_code(Code) ->
try
case list_to_integer(Code) >= 100 andalso list_to_integer(Code) < 600 of
true ->
{ok, Code};
false ->
{error, Code}
end
catch
_:_ ->
{error, Code}
end.
normalize_key(Key) ->
string:strip(string:to_lower(Key)).
however since 2014 the new RFC ( #section-7.1.2 )
relative - path definition is #section-4.2
redirect_location([], _Rec) ->
{error, empty};
redirect_location("http://" = Loc, _Rec) ->
{error, Loc};
redirect_location("//" ++ Redir, Rec) ->
redirect_location("http://" ++ Redir, Rec);
redirect_location("/" ++ _ = Page, Rec) ->
redirect : /frontend/ .. / home "
redirect_follow(Rec#rec.host, Rec#rec.page, Rec#rec.host, eval_redirect_page(Page, Rec#rec.dbg), Rec#rec.dbg);
redirect_location("../" ++ _ = Page, Rec) ->
redirect_follow(Rec#rec.host, Rec#rec.page, Rec#rec.host,
eval_redirect_page(Rec#rec.page ++ Page, Rec#rec.dbg), Rec#rec.dbg);
redirect_location("http://" ++ Field, Rec) ->
Ends = string:right(Field, 1) == ?PAGE_SEP,
Fields = string:tokens(Field, ?PAGE_SEP),
case Fields of
[] ->
{error, empty};
F ->
Host = string:strip(hd(F), right, $.),
Page = ?PAGE_SEP ++ string:join(tl(F), ?PAGE_SEP),
NewPage = complete_page(Page, Ends),
redirect_follow(Rec#rec.host, Rec#rec.page, Host, NewPage, Rec#rec.dbg)
end;
redirect_location("https://" ++ Field, _Rec) ->
Ends = string:right(Field, 1) == ?PAGE_SEP,
Fields = string:tokens(Field, ?PAGE_SEP),
case Fields of
[] ->
{error, empty};
F ->
Host = string:strip(hd(F), right, $.),
Page = ?PAGE_SEP ++ string:join(tl(F), ?PAGE_SEP),
NewPage = complete_page(Page, Ends),
{https, {Host, NewPage}}
end;
redirect_location(Location, Rec) ->
NewPage = eval_redirect_page(Rec#rec.page ++ ?PAGE_SEP ++ Location, Rec#rec.dbg),
redirect_follow(Rec#rec.host, Rec#rec.page, Rec#rec.host, NewPage, Rec#rec.dbg).
redirect_follow(Curhost, Curpage, NewHost, NewPage, Dbg) ->
case (NewHost == Curhost andalso NewPage == Curpage) of
true ->
debug(Dbg, io_lib:fwrite("cyclic !! ~p/~p => ~p/~p", [Curhost, Curpage, NewHost, NewPage])),
{error, cyclic};
false ->
debug(Dbg, io_lib:fwrite("redir ok to ~p ~p", [NewHost, NewPage])),
{ok, {NewHost, NewPage}}
end.
complete_page(Page, AddSep) ->
Ispresent = string:right(Page, 1) == ?PAGE_SEP,
case AddSep of
true ->
case Ispresent of
true ->
Page;
false ->
Page ++ ?PAGE_SEP
end;
false ->
Page
end.
eval_redirect_list([[]|T], Agg) ->
eval_redirect_list(T, Agg);
eval_redirect_list([?PAGE_RET|T], []) ->
eval_redirect_list(T, []);
eval_redirect_list([?PAGE_RET|T], Agg) ->
eval_redirect_list(T, tl(Agg));
eval_redirect_list([H|T], Agg) ->
eval_redirect_list(T, [H|Agg]);
eval_redirect_list([], []) ->
[""];
eval_redirect_list([], Agg) ->
case string:str(hd(Agg), ".") of
0 ->
lists:reverse([[]|Agg]);
_ ->
lists:reverse(Agg)
end.
eval_redirect_page(?PAGE_SEP, _Dbg) ->
?PAGE_SEP;
eval_redirect_page(?PAGE_SEP ++ ?PAGE_SEP ++ Rest, Dbg) ->
eval_redirect_page(?PAGE_SEP ++ Rest, Dbg);
eval_redirect_page(Page, Dbg) ->
debug(Dbg, io_lib:fwrite("redirect to ~p", [Page])),
case string:str(Page, ?PAGE_RET) of
0 ->
Page;
_ ->
debug(Dbg, io_lib:fwrite("eval redirect being: ~p", [string:tokens(Page, ?PAGE_SEP)])),
Ends = string:right(Page, 1) == ?PAGE_SEP,
Res = eval_redirect_list(string:tokens(Page, ?PAGE_SEP), []),
?PAGE_SEP ++ string:join(Res, ?PAGE_SEP) ++ case Ends of true -> ?PAGE_SEP; _ -> "" end
end.
|
bcfadb3f98695192f81a8b867a3af605704845356a406c4a908ce3f5c98bed19 | bhaskara/programmable-reinforcement-learning | array-exit-distribution.lisp | (in-package aed)
(defclass <array-exit-distribution> (<cond-prob-dist>)
((featurizer :reader featurizer :initarg :featurizer :type function
:initform (lambda (omega u) (cons (canonicalize omega) (canonicalize u))))
(key-fn :reader key-fn :type function :initarg :key-fn)
(cond-dists :type (simple-array * 1) :reader cond-dists :writer set-cond-dists :initarg :cond-dists))
(:documentation "A conditional probability distribution for exit distributions in ALisp.
Initargs
:featurizer - Function of two arguments that maps from omega and u to a feature vector that is passed to fn-approx, which returns a probability distribution over exit state. Default is just (canonicalize (cons omega u))
:key-fn - Function from omega, u to a nonnegative integer
:Num-keys - key-fn takes values between 0 and num-keys - 1
"))
(defmethod initialize-instance :after ((d <array-exit-distribution>) &rest args &key key-fn num-keys)
(assert (and key-fn num-keys))
(unless (slot-boundp d 'cond-dists)
(set-cond-dists (make-array num-keys :initial-element nil) d)))
(defmethod cond-dist ((p <array-exit-distribution>) x)
(exit-dist p (car x) (cdr x)))
(defmethod exit-dist ((p <array-exit-distribution>) omega u)
(let* ((v (funcall (featurizer p) omega u))
(ind (funcall (key-fn p) v)))
(assert ind () "Invalid item ~a given to array-exit-dist ~a" v p)
(let ((d (aref (cond-dists p) ind)))
(renormalize d))))
(defmethod clone ((pe <array-exit-distribution>))
(make-instance '<array-exit-distribution> :featurizer (featurizer pe)
:cond-dists (clone (cond-dists pe)) :key-fn (key-fn pe)))
(defmethod update-exit-dist ((p <array-exit-distribution>) omega u new-dist eta)
(let ((d (make-deterministic-dist 'dummy-unknown-state)))
(unless (equal d new-dist)
(let* ((features (funcall (featurizer p) omega u))
(ind (funcall (key-fn p) features))
(cd (cond-dists p)))
(assert ind () "Invalid item ~a given to array exit dist ~a" ind features)
(let ((dist (aref cd ind)))
(setf (aref cd ind)
(if dist
(updatef dist new-dist eta)
(updatef d new-dist eta))))))))
| null | https://raw.githubusercontent.com/bhaskara/programmable-reinforcement-learning/8afc98116a8f78163b3f86076498d84b3f596217/lisp/alisp/rl-functions/array-exit-distribution.lisp | lisp | (in-package aed)
(defclass <array-exit-distribution> (<cond-prob-dist>)
((featurizer :reader featurizer :initarg :featurizer :type function
:initform (lambda (omega u) (cons (canonicalize omega) (canonicalize u))))
(key-fn :reader key-fn :type function :initarg :key-fn)
(cond-dists :type (simple-array * 1) :reader cond-dists :writer set-cond-dists :initarg :cond-dists))
(:documentation "A conditional probability distribution for exit distributions in ALisp.
Initargs
:featurizer - Function of two arguments that maps from omega and u to a feature vector that is passed to fn-approx, which returns a probability distribution over exit state. Default is just (canonicalize (cons omega u))
:key-fn - Function from omega, u to a nonnegative integer
:Num-keys - key-fn takes values between 0 and num-keys - 1
"))
(defmethod initialize-instance :after ((d <array-exit-distribution>) &rest args &key key-fn num-keys)
(assert (and key-fn num-keys))
(unless (slot-boundp d 'cond-dists)
(set-cond-dists (make-array num-keys :initial-element nil) d)))
(defmethod cond-dist ((p <array-exit-distribution>) x)
(exit-dist p (car x) (cdr x)))
(defmethod exit-dist ((p <array-exit-distribution>) omega u)
(let* ((v (funcall (featurizer p) omega u))
(ind (funcall (key-fn p) v)))
(assert ind () "Invalid item ~a given to array-exit-dist ~a" v p)
(let ((d (aref (cond-dists p) ind)))
(renormalize d))))
(defmethod clone ((pe <array-exit-distribution>))
(make-instance '<array-exit-distribution> :featurizer (featurizer pe)
:cond-dists (clone (cond-dists pe)) :key-fn (key-fn pe)))
(defmethod update-exit-dist ((p <array-exit-distribution>) omega u new-dist eta)
(let ((d (make-deterministic-dist 'dummy-unknown-state)))
(unless (equal d new-dist)
(let* ((features (funcall (featurizer p) omega u))
(ind (funcall (key-fn p) features))
(cd (cond-dists p)))
(assert ind () "Invalid item ~a given to array exit dist ~a" ind features)
(let ((dist (aref cd ind)))
(setf (aref cd ind)
(if dist
(updatef dist new-dist eta)
(updatef d new-dist eta))))))))
|
|
46d4a27172e316b3e95fcdd55c5b350208c98f1a84a44f325de81b0f8a25e22c | racket/web-server | stuffer.rkt | #lang racket/base
(require racket/contract
racket/match)
(define-struct stuffer (in out))
(define (stuffer/c dom rng)
(define in (dom . -> . rng))
(define in-proc (contract-late-neg-projection in))
(define out (rng . -> . dom))
(define out-proc (contract-late-neg-projection out))
(make-contract
#:name (build-compound-type-name 'stuffer/c in out)
#:late-neg-projection
(λ (blame)
(define in-app (in-proc blame))
(define out-app (out-proc blame))
(λ (val neg-party)
(unless (stuffer? val)
(raise-blame-error
blame #:missing-party neg-party
val
"expected <stuffer>, given: ~e"
val))
(make-stuffer
(in-app (stuffer-in val) neg-party)
(out-app (stuffer-out val) neg-party))))
#:first-order stuffer?))
(define id-stuffer
(make-stuffer
(lambda (v) v)
(lambda (v) v)))
(define (stuffer-compose g f)
(make-stuffer
(lambda (v)
((stuffer-in g) ((stuffer-in f) v)))
(lambda (v)
((stuffer-out f) ((stuffer-out g) v)))))
(define (stuffer-sequence f g)
(stuffer-compose g f))
(define (stuffer-if c f)
(make-stuffer
(lambda (v)
(if (c v)
(bytes-append #"1" ((stuffer-in f) v))
(bytes-append #"0" v)))
(lambda (tv)
(define tag (subbytes tv 0 1))
(define v (subbytes tv 1))
(if (bytes=? tag #"1")
((stuffer-out f) v)
v))))
(define (stuffer-chain . ss)
(match ss
[(list)
id-stuffer]
[(list-rest f ss)
(cond
[(stuffer? f)
(stuffer-sequence
f (apply stuffer-chain ss))]
[(procedure? f)
(stuffer-if
f (apply stuffer-chain ss))])]))
(define-values (alpha beta gamma) (values any/c any/c any/c))
(provide/contract
[struct stuffer
([in (any/c . -> . any/c)]
[out (any/c . -> . any/c)])]
[stuffer/c (any/c any/c . -> . contract?)]
[id-stuffer (stuffer/c alpha alpha)]
[stuffer-compose ((stuffer/c beta gamma) (stuffer/c alpha beta) . -> . (stuffer/c alpha gamma))]
[stuffer-sequence ((stuffer/c alpha beta) (stuffer/c beta gamma) . -> . (stuffer/c alpha gamma))]
[stuffer-if ((bytes? . -> . boolean?) (stuffer/c bytes? bytes?) . -> . (stuffer/c bytes? bytes?))]
[stuffer-chain (() () #:rest (listof (or/c stuffer? (bytes? . -> . boolean?))) . ->* . stuffer?)])
| null | https://raw.githubusercontent.com/racket/web-server/f718800b5b3f407f7935adf85dfa663c4bba1651/web-server-lib/web-server/stuffers/stuffer.rkt | racket | #lang racket/base
(require racket/contract
racket/match)
(define-struct stuffer (in out))
(define (stuffer/c dom rng)
(define in (dom . -> . rng))
(define in-proc (contract-late-neg-projection in))
(define out (rng . -> . dom))
(define out-proc (contract-late-neg-projection out))
(make-contract
#:name (build-compound-type-name 'stuffer/c in out)
#:late-neg-projection
(λ (blame)
(define in-app (in-proc blame))
(define out-app (out-proc blame))
(λ (val neg-party)
(unless (stuffer? val)
(raise-blame-error
blame #:missing-party neg-party
val
"expected <stuffer>, given: ~e"
val))
(make-stuffer
(in-app (stuffer-in val) neg-party)
(out-app (stuffer-out val) neg-party))))
#:first-order stuffer?))
(define id-stuffer
(make-stuffer
(lambda (v) v)
(lambda (v) v)))
(define (stuffer-compose g f)
(make-stuffer
(lambda (v)
((stuffer-in g) ((stuffer-in f) v)))
(lambda (v)
((stuffer-out f) ((stuffer-out g) v)))))
(define (stuffer-sequence f g)
(stuffer-compose g f))
(define (stuffer-if c f)
(make-stuffer
(lambda (v)
(if (c v)
(bytes-append #"1" ((stuffer-in f) v))
(bytes-append #"0" v)))
(lambda (tv)
(define tag (subbytes tv 0 1))
(define v (subbytes tv 1))
(if (bytes=? tag #"1")
((stuffer-out f) v)
v))))
(define (stuffer-chain . ss)
(match ss
[(list)
id-stuffer]
[(list-rest f ss)
(cond
[(stuffer? f)
(stuffer-sequence
f (apply stuffer-chain ss))]
[(procedure? f)
(stuffer-if
f (apply stuffer-chain ss))])]))
(define-values (alpha beta gamma) (values any/c any/c any/c))
(provide/contract
[struct stuffer
([in (any/c . -> . any/c)]
[out (any/c . -> . any/c)])]
[stuffer/c (any/c any/c . -> . contract?)]
[id-stuffer (stuffer/c alpha alpha)]
[stuffer-compose ((stuffer/c beta gamma) (stuffer/c alpha beta) . -> . (stuffer/c alpha gamma))]
[stuffer-sequence ((stuffer/c alpha beta) (stuffer/c beta gamma) . -> . (stuffer/c alpha gamma))]
[stuffer-if ((bytes? . -> . boolean?) (stuffer/c bytes? bytes?) . -> . (stuffer/c bytes? bytes?))]
[stuffer-chain (() () #:rest (listof (or/c stuffer? (bytes? . -> . boolean?))) . ->* . stuffer?)])
|
|
c435eb703c0b54d84a531cdff490b5084611caba060d5c92c08da38e74babb3c | zmyrgel/tursas | fen.clj | (ns tursas.state0x88.fen
(:use (tursas.state0x88 common util movegen move)
(tursas util)))
(def castling-values (list [8 \K] [4 \Q] [2 \k] [1 \q]))
(defn- castling->str
"Converts internal castling representation to string."
[castling]
(let [result (keep (fn [[value letter]]
(when (pos? (bit-and castling value))
letter))
castling-values)]
(if (empty? result)
"-"
(apply str result))))
(defn- castling->value
"Convers string representing castling to
internal castling value."
[s]
(reduce (fn [result [value letter]]
(if (some #(= % letter) s)
(+ result value)
result))
0 castling-values))
(defn- find-king-index
"Seeks king's index from piece-map.
This is only used when generating state from a fen.
Otherwise the king index can be queried from the board directly."
[state player]
(let [piece-map (if (== player white)
(:white-pieces state)
(:black-pieces state))
king (if (== player white)
white-king
black-king)]
(loop [pieces (seq piece-map)]
(cond (empty? pieces) nil
(== (second (first pieces)) king) (ffirst pieces)
:else (recur (rest pieces))))))
(defn- fen-board->0x88board
"Converts FEN notation string to 0x88 board representation."
[s]
(reduce (fn [board [index piece]]
(fill-square board index (piece-value piece)))
(init-game-board)
(seq (zipmap (iterate inc 0)
(mapcat #(concat % (repeat 8 \E))
(->> (expand-digits \E s)
(split-on \/)
reverse))))))
(defn- make-fen-row
"Builds single fen row from given board and row index."
[board row]
(compact-item \E (map #(piece-name (get board (+ row %)))
(range 8))))
(defn- board->fen-board
"Convert the state internal board to fen board representation."
[board]
(apply str
(reduce #(concat %1 '(\/) %2)
(map #(make-fen-row board %)
(reverse (range 0 0x77 0x10))))))
(defn- add-pieces
"Adds all pieces from board to piece-map."
[state]
(letfn [(add-index-if [pred piece-map index]
(if (pred (get (:board state) index))
(assoc piece-map index (get (:board state) index))
piece-map))]
(loop [index 0x77
blacks {}
whites {}]
(cond (= index -1) (-> state
(assoc :white-pieces whites)
(assoc :black-pieces blacks))
(not (board-index? index)) (recur (dec index) blacks whites)
:else (recur (dec index)
(add-index-if black-piece? blacks index)
(add-index-if white-piece? whites index))))))
(defn- add-king-indexes
"Adds king indexes to state."
[state]
(assoc state :board
(let [black-king (find-king-index state black)
white-king (find-king-index state white)]
(-> (:board state)
(update-king-index black-king black)
(update-king-index white-king white)))))
(defn- add-full-moves
"Helper funtion to add full moves to board.
Needed to workaround the byte limitation of the board."
[board moves]
(let [n-moves (int (/ moves 128))]
(-> board
(fill-square full-move-n-store n-moves)
(fill-square full-move-store (- moves (* n-moves 128))))))
(defn parse-fen
"Parses information from given FEN and applies it to given state."
[s state]
(when-let [[board turn cast en-pass half full] (re-seq #"\S+" s)]
(-> (assoc state :board
(-> (fen-board->0x88board board)
(fill-square turn-store (if (= turn "w")
white black))
(fill-square castling-store (castling->value cast))
(fill-square en-passant-store (if (= en-pass "-")
-1
(coord->index en-pass)))
(fill-square half-move-store (Integer/parseInt half))
(add-full-moves (Integer/parseInt full))))
add-pieces
add-king-indexes)))
(defn parse-state
"Returns FEN representation of given game state."
[state]
(let [board (:board state)]
(apply str
(interpose " " (list (board->fen-board board)
(if (== (int (get board turn-store)) white) "w" "b")
(castling->str (int (get board castling-store)))
(let [en-passant (int (get board en-passant-store))]
(if (== en-passant -1)
"-"
(index->coord en-passant)))
(get board half-move-store)
(+ (* (get board full-move-n-store) 127)
(get board full-move-store)))))))
| null | https://raw.githubusercontent.com/zmyrgel/tursas/362551a1861be0728f21b561d8907d0ca04333f7/src/tursas/state0x88/fen.clj | clojure | (ns tursas.state0x88.fen
(:use (tursas.state0x88 common util movegen move)
(tursas util)))
(def castling-values (list [8 \K] [4 \Q] [2 \k] [1 \q]))
(defn- castling->str
"Converts internal castling representation to string."
[castling]
(let [result (keep (fn [[value letter]]
(when (pos? (bit-and castling value))
letter))
castling-values)]
(if (empty? result)
"-"
(apply str result))))
(defn- castling->value
"Convers string representing castling to
internal castling value."
[s]
(reduce (fn [result [value letter]]
(if (some #(= % letter) s)
(+ result value)
result))
0 castling-values))
(defn- find-king-index
"Seeks king's index from piece-map.
This is only used when generating state from a fen.
Otherwise the king index can be queried from the board directly."
[state player]
(let [piece-map (if (== player white)
(:white-pieces state)
(:black-pieces state))
king (if (== player white)
white-king
black-king)]
(loop [pieces (seq piece-map)]
(cond (empty? pieces) nil
(== (second (first pieces)) king) (ffirst pieces)
:else (recur (rest pieces))))))
(defn- fen-board->0x88board
"Converts FEN notation string to 0x88 board representation."
[s]
(reduce (fn [board [index piece]]
(fill-square board index (piece-value piece)))
(init-game-board)
(seq (zipmap (iterate inc 0)
(mapcat #(concat % (repeat 8 \E))
(->> (expand-digits \E s)
(split-on \/)
reverse))))))
(defn- make-fen-row
"Builds single fen row from given board and row index."
[board row]
(compact-item \E (map #(piece-name (get board (+ row %)))
(range 8))))
(defn- board->fen-board
"Convert the state internal board to fen board representation."
[board]
(apply str
(reduce #(concat %1 '(\/) %2)
(map #(make-fen-row board %)
(reverse (range 0 0x77 0x10))))))
(defn- add-pieces
"Adds all pieces from board to piece-map."
[state]
(letfn [(add-index-if [pred piece-map index]
(if (pred (get (:board state) index))
(assoc piece-map index (get (:board state) index))
piece-map))]
(loop [index 0x77
blacks {}
whites {}]
(cond (= index -1) (-> state
(assoc :white-pieces whites)
(assoc :black-pieces blacks))
(not (board-index? index)) (recur (dec index) blacks whites)
:else (recur (dec index)
(add-index-if black-piece? blacks index)
(add-index-if white-piece? whites index))))))
(defn- add-king-indexes
"Adds king indexes to state."
[state]
(assoc state :board
(let [black-king (find-king-index state black)
white-king (find-king-index state white)]
(-> (:board state)
(update-king-index black-king black)
(update-king-index white-king white)))))
(defn- add-full-moves
"Helper funtion to add full moves to board.
Needed to workaround the byte limitation of the board."
[board moves]
(let [n-moves (int (/ moves 128))]
(-> board
(fill-square full-move-n-store n-moves)
(fill-square full-move-store (- moves (* n-moves 128))))))
(defn parse-fen
"Parses information from given FEN and applies it to given state."
[s state]
(when-let [[board turn cast en-pass half full] (re-seq #"\S+" s)]
(-> (assoc state :board
(-> (fen-board->0x88board board)
(fill-square turn-store (if (= turn "w")
white black))
(fill-square castling-store (castling->value cast))
(fill-square en-passant-store (if (= en-pass "-")
-1
(coord->index en-pass)))
(fill-square half-move-store (Integer/parseInt half))
(add-full-moves (Integer/parseInt full))))
add-pieces
add-king-indexes)))
(defn parse-state
"Returns FEN representation of given game state."
[state]
(let [board (:board state)]
(apply str
(interpose " " (list (board->fen-board board)
(if (== (int (get board turn-store)) white) "w" "b")
(castling->str (int (get board castling-store)))
(let [en-passant (int (get board en-passant-store))]
(if (== en-passant -1)
"-"
(index->coord en-passant)))
(get board half-move-store)
(+ (* (get board full-move-n-store) 127)
(get board full-move-store)))))))
|
|
45236d4fa2d2a7d4a11fd928659656f533902916df27a61efa1aef2aa8a3ff8d | dgiot/dgiot | emqx_mod_delayed.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2020 - 2021 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_delayed).
-behaviour(gen_server).
-behaviour(emqx_gen_mod).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-logger_header("[Delayed]").
Mnesia bootstrap
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
-copy_mnesia({mnesia, [copy]}).
%% emqx_gen_mod callbacks
-export([ load/1
, unload/1
, description/0
]).
-export([ start_link/0
, on_message_publish/1
]).
%% gen_server callbacks
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-record(delayed_message,
{ key
, msg
}).
-define(TAB, ?MODULE).
-define(SERVER, ?MODULE).
-define(MAX_INTERVAL, 4294967).
%%--------------------------------------------------------------------
Mnesia bootstrap
%%--------------------------------------------------------------------
mnesia(boot) ->
ok = ekka_mnesia:create_table(?TAB, [
{type, ordered_set},
{disc_copies, [node()]},
{local_content, true},
{record_name, delayed_message},
{attributes, record_info(fields, delayed_message)}]);
mnesia(copy) ->
ok = ekka_mnesia:copy_table(?TAB, disc_copies).
%%--------------------------------------------------------------------
%% Load/Unload
%%--------------------------------------------------------------------
-spec(load(list()) -> ok).
load(_Env) ->
emqx_mod_sup:start_child(?MODULE, worker),
emqx:hook('message.publish', {?MODULE, on_message_publish, []}).
-spec(unload(list()) -> ok).
unload(_Env) ->
emqx:unhook('message.publish', {?MODULE, on_message_publish}),
emqx_mod_sup:stop_child(?MODULE).
description() ->
"EMQ X Delayed Publish Module".
%%--------------------------------------------------------------------
%% Hooks
%%--------------------------------------------------------------------
on_message_publish(Msg = #message{
id = Id,
topic = <<"$delayed/", Topic/binary>>,
timestamp = Ts
}) ->
[Delay, Topic1] = binary:split(Topic, <<"/">>),
PubAt = case binary_to_integer(Delay) of
Interval when Interval < ?MAX_INTERVAL ->
Interval + erlang:round(Ts / 1000);
Timestamp ->
%% Check malicious timestamp?
case (Timestamp - erlang:round(Ts / 1000)) > ?MAX_INTERVAL of
true -> error(invalid_delayed_timestamp);
false -> Timestamp
end
end,
PubMsg = Msg#message{topic = Topic1},
Headers = PubMsg#message.headers,
ok = store(#delayed_message{key = {PubAt, Id}, msg = PubMsg}),
{stop, PubMsg#message{headers = Headers#{allow_publish => false}}};
on_message_publish(Msg) ->
{ok, Msg}.
%%--------------------------------------------------------------------
%% Start delayed publish server
%%--------------------------------------------------------------------
-spec(start_link() -> emqx_types:startlink_ret()).
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
-spec(store(#delayed_message{}) -> ok).
store(DelayedMsg) ->
gen_server:call(?SERVER, {store, DelayedMsg}, infinity).
%%--------------------------------------------------------------------
%% gen_server callback
%%--------------------------------------------------------------------
init([]) ->
{ok, ensure_stats_event(
ensure_publish_timer(#{timer => undefined, publish_at => 0}))}.
handle_call({store, DelayedMsg = #delayed_message{key = Key}}, _From, State) ->
ok = mnesia:dirty_write(?TAB, DelayedMsg),
emqx_metrics:inc('messages.delayed'),
{reply, ok, ensure_publish_timer(Key, State)};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
%% Do Publish...
handle_info({timeout, TRef, do_publish}, State = #{timer := TRef}) ->
DeletedKeys = do_publish(mnesia:dirty_first(?TAB), os:system_time(seconds)),
lists:foreach(fun(Key) -> mnesia:dirty_delete(?TAB, Key) end, DeletedKeys),
{noreply, ensure_publish_timer(State#{timer := undefined, publish_at := 0})};
handle_info(stats, State = #{stats_fun := StatsFun}) ->
StatsFun(delayed_count()),
{noreply, State, hibernate};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, #{timer := TRef}) ->
emqx_misc:cancel_timer(TRef).
code_change({down, Vsn}, State, _Extra) when Vsn =:= "4.3.0" ->
NState = maps:with([timer, publish_at], State),
{ok, NState};
code_change(Vsn, State, _Extra) when Vsn =:= "4.3.0" ->
NState = ensure_stats_event(State),
{ok, NState}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
%% Ensure the stats
ensure_stats_event(State) ->
StatsFun = emqx_stats:statsfun('delayed.count', 'delayed.max'),
{ok, StatsTimer} = timer:send_interval(timer:seconds(1), stats),
State#{stats_fun => StatsFun, stats_timer => StatsTimer}.
%% Ensure publish timer
ensure_publish_timer(State) ->
ensure_publish_timer(mnesia:dirty_first(?TAB), State).
ensure_publish_timer('$end_of_table', State) ->
State#{timer := undefined, publish_at := 0};
ensure_publish_timer({Ts, _Id}, State = #{timer := undefined}) ->
ensure_publish_timer(Ts, os:system_time(seconds), State);
ensure_publish_timer({Ts, _Id}, State = #{timer := TRef, publish_at := PubAt})
when Ts < PubAt ->
ok = emqx_misc:cancel_timer(TRef),
ensure_publish_timer(Ts, os:system_time(seconds), State);
ensure_publish_timer(_Key, State) ->
State.
ensure_publish_timer(Ts, Now, State) ->
Interval = max(1, Ts - Now),
TRef = emqx_misc:start_timer(timer:seconds(Interval), do_publish),
State#{timer := TRef, publish_at := Now + Interval}.
do_publish(Key, Now) ->
do_publish(Key, Now, []).
%% Do publish
do_publish('$end_of_table', _Now, Acc) ->
Acc;
do_publish({Ts, _Id}, Now, Acc) when Ts > Now ->
Acc;
do_publish(Key = {Ts, _Id}, Now, Acc) when Ts =< Now ->
case mnesia:dirty_read(?TAB, Key) of
[] -> ok;
[#delayed_message{msg = Msg}] ->
emqx_pool:async_submit(fun emqx:publish/1, [Msg])
end,
do_publish(mnesia:dirty_next(?TAB, Key), Now, [Key|Acc]).
-spec(delayed_count() -> non_neg_integer()).
delayed_count() -> mnesia:table_info(?TAB, size).
| null | https://raw.githubusercontent.com/dgiot/dgiot/c9f2f78af71692ba532e4806621b611db2afe0c9/lib-ce/emqx_modules/src/emqx_mod_delayed.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
emqx_gen_mod callbacks
gen_server callbacks
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
Load/Unload
--------------------------------------------------------------------
--------------------------------------------------------------------
Hooks
--------------------------------------------------------------------
Check malicious timestamp?
--------------------------------------------------------------------
Start delayed publish server
--------------------------------------------------------------------
--------------------------------------------------------------------
gen_server callback
--------------------------------------------------------------------
Do Publish...
--------------------------------------------------------------------
--------------------------------------------------------------------
Ensure the stats
Ensure publish timer
Do publish | Copyright ( c ) 2020 - 2021 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_mod_delayed).
-behaviour(gen_server).
-behaviour(emqx_gen_mod).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-logger_header("[Delayed]").
Mnesia bootstrap
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
-copy_mnesia({mnesia, [copy]}).
-export([ load/1
, unload/1
, description/0
]).
-export([ start_link/0
, on_message_publish/1
]).
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-record(delayed_message,
{ key
, msg
}).
-define(TAB, ?MODULE).
-define(SERVER, ?MODULE).
-define(MAX_INTERVAL, 4294967).
Mnesia bootstrap
mnesia(boot) ->
ok = ekka_mnesia:create_table(?TAB, [
{type, ordered_set},
{disc_copies, [node()]},
{local_content, true},
{record_name, delayed_message},
{attributes, record_info(fields, delayed_message)}]);
mnesia(copy) ->
ok = ekka_mnesia:copy_table(?TAB, disc_copies).
-spec(load(list()) -> ok).
load(_Env) ->
emqx_mod_sup:start_child(?MODULE, worker),
emqx:hook('message.publish', {?MODULE, on_message_publish, []}).
-spec(unload(list()) -> ok).
unload(_Env) ->
emqx:unhook('message.publish', {?MODULE, on_message_publish}),
emqx_mod_sup:stop_child(?MODULE).
description() ->
"EMQ X Delayed Publish Module".
on_message_publish(Msg = #message{
id = Id,
topic = <<"$delayed/", Topic/binary>>,
timestamp = Ts
}) ->
[Delay, Topic1] = binary:split(Topic, <<"/">>),
PubAt = case binary_to_integer(Delay) of
Interval when Interval < ?MAX_INTERVAL ->
Interval + erlang:round(Ts / 1000);
Timestamp ->
case (Timestamp - erlang:round(Ts / 1000)) > ?MAX_INTERVAL of
true -> error(invalid_delayed_timestamp);
false -> Timestamp
end
end,
PubMsg = Msg#message{topic = Topic1},
Headers = PubMsg#message.headers,
ok = store(#delayed_message{key = {PubAt, Id}, msg = PubMsg}),
{stop, PubMsg#message{headers = Headers#{allow_publish => false}}};
on_message_publish(Msg) ->
{ok, Msg}.
-spec(start_link() -> emqx_types:startlink_ret()).
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
-spec(store(#delayed_message{}) -> ok).
store(DelayedMsg) ->
gen_server:call(?SERVER, {store, DelayedMsg}, infinity).
init([]) ->
{ok, ensure_stats_event(
ensure_publish_timer(#{timer => undefined, publish_at => 0}))}.
handle_call({store, DelayedMsg = #delayed_message{key = Key}}, _From, State) ->
ok = mnesia:dirty_write(?TAB, DelayedMsg),
emqx_metrics:inc('messages.delayed'),
{reply, ok, ensure_publish_timer(Key, State)};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info({timeout, TRef, do_publish}, State = #{timer := TRef}) ->
DeletedKeys = do_publish(mnesia:dirty_first(?TAB), os:system_time(seconds)),
lists:foreach(fun(Key) -> mnesia:dirty_delete(?TAB, Key) end, DeletedKeys),
{noreply, ensure_publish_timer(State#{timer := undefined, publish_at := 0})};
handle_info(stats, State = #{stats_fun := StatsFun}) ->
StatsFun(delayed_count()),
{noreply, State, hibernate};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, #{timer := TRef}) ->
emqx_misc:cancel_timer(TRef).
code_change({down, Vsn}, State, _Extra) when Vsn =:= "4.3.0" ->
NState = maps:with([timer, publish_at], State),
{ok, NState};
code_change(Vsn, State, _Extra) when Vsn =:= "4.3.0" ->
NState = ensure_stats_event(State),
{ok, NState}.
Internal functions
ensure_stats_event(State) ->
StatsFun = emqx_stats:statsfun('delayed.count', 'delayed.max'),
{ok, StatsTimer} = timer:send_interval(timer:seconds(1), stats),
State#{stats_fun => StatsFun, stats_timer => StatsTimer}.
ensure_publish_timer(State) ->
ensure_publish_timer(mnesia:dirty_first(?TAB), State).
ensure_publish_timer('$end_of_table', State) ->
State#{timer := undefined, publish_at := 0};
ensure_publish_timer({Ts, _Id}, State = #{timer := undefined}) ->
ensure_publish_timer(Ts, os:system_time(seconds), State);
ensure_publish_timer({Ts, _Id}, State = #{timer := TRef, publish_at := PubAt})
when Ts < PubAt ->
ok = emqx_misc:cancel_timer(TRef),
ensure_publish_timer(Ts, os:system_time(seconds), State);
ensure_publish_timer(_Key, State) ->
State.
ensure_publish_timer(Ts, Now, State) ->
Interval = max(1, Ts - Now),
TRef = emqx_misc:start_timer(timer:seconds(Interval), do_publish),
State#{timer := TRef, publish_at := Now + Interval}.
do_publish(Key, Now) ->
do_publish(Key, Now, []).
do_publish('$end_of_table', _Now, Acc) ->
Acc;
do_publish({Ts, _Id}, Now, Acc) when Ts > Now ->
Acc;
do_publish(Key = {Ts, _Id}, Now, Acc) when Ts =< Now ->
case mnesia:dirty_read(?TAB, Key) of
[] -> ok;
[#delayed_message{msg = Msg}] ->
emqx_pool:async_submit(fun emqx:publish/1, [Msg])
end,
do_publish(mnesia:dirty_next(?TAB, Key), Now, [Key|Acc]).
-spec(delayed_count() -> non_neg_integer()).
delayed_count() -> mnesia:table_info(?TAB, size).
|
4b5a66ccd5d7b763993b805e7580e385550206d3b0552bd2774880ec1bfba245 | Clozure/ccl-tests | search-bitvector.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Sun Aug 25 13:06:54 2002
;;;; Contains: Tests for SEARCH on bit vectors
(in-package :cl-test)
(compile-and-load "search-aux.lsp")
(deftest search-bitvector.1
(let ((target *searched-bitvector*)
(pat #*0))
(loop for i from 0 to (1- (length target))
for tail = (subseq target i)
always
(let ((pos (search pat tail)))
(search-check pat tail pos))))
t)
(deftest search-bitvector.2
(let ((target *searched-bitvector*)
(pat #*0))
(loop for i from 1 to (length target)
always
(let ((pos (search pat target :end2 i :from-end t)))
(search-check pat target pos :end2 i :from-end t))))
t)
(deftest search-bitvector.3
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target)
unless (search-check pat target pos)
collect pat))
nil)
(deftest search-bitvector.4
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :from-end t)
unless (search-check pat target pos :from-end t)
collect pat))
nil)
(deftest search-bitvector.5
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :start2 25 :end2 75)
unless (search-check pat target pos :start2 25 :end2 75)
collect pat))
nil)
(deftest search-bitvector.6
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :from-end t :start2 25 :end2 75)
unless (search-check pat target pos :from-end t
:start2 25 :end2 75)
collect pat))
nil)
(deftest search-bitvector.7
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :start2 20)
unless (search-check pat target pos :start2 20)
collect pat))
nil)
(deftest search-bitvector.8
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :from-end t :start2 20)
unless (search-check pat target pos :from-end t
:start2 20)
collect pat))
nil)
(deftest search-bitvector.9
(let ((target *searched-bitvector*))
(loop for pat in (mapcar #'(lambda (x)
(map 'vector
#'(lambda (y)
(sublis '((a . 2) (b . 3)) y))
x))
*pattern-sublists*)
for pos = (search pat target :start2 20 :key #'evenp)
unless (search-check pat target pos :start2 20 :key #'evenp)
collect pat))
nil)
(deftest search-bitvector.10
(let ((target *searched-bitvector*))
(loop for pat in (mapcar #'(lambda (x)
(map 'vector
#'(lambda (y)
(sublis '((a . 2) (b . 3)) y))
x))
*pattern-sublists*)
for pos = (search pat target :from-end t :start2 20 :key 'oddp)
unless (search-check pat target pos :from-end t
:start2 20 :key 'oddp)
collect pat))
nil)
(deftest search-bitvector.11
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :start2 20 :test (complement #'eql))
unless (search-check pat target pos :start2 20
:test (complement #'eql))
collect pat))
nil)
(deftest search-bitvector.12
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :from-end t :start2 20 :test-not #'eql)
unless (search-check pat target pos :from-end t
:start2 20 :test (complement #'eql))
collect pat))
nil)
(deftest search-bitvector.13
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
when (and (> (length pat) 0)
(let ((pos (search pat target :start1 1
:test (complement #'eql))))
(not (search-check pat target pos
:start1 1
:test (complement #'eql)))))
collect pat))
nil)
(deftest search-bitvector.14
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
when (let ((len (length pat)))
(and (> len 0)
(let ((pos (search pat target :end1 (1- len)
:test (complement #'eql))))
(not (search-check pat target pos
:end1 (1- len)
:test (complement #'eql))))))
collect pat))
nil)
(deftest search-bitvector.15
(let ((a (make-array '(10) :initial-contents '(0 1 1 0 0 0 1 0 1 1)
:fill-pointer 5
:element-type 'bit)))
(values
(search #*0 a)
(search #*0 a :from-end t)
(search #*01 a)
(search #*01 a :from-end t)
(search #*010 a)
(search #*010 a :from-end t)))
0 4 0 0 nil nil)
(deftest search-bitvector.16
(let ((pat (make-array '(3) :initial-contents '(0 1 0)
:fill-pointer 1))
(a #*01100))
(values
(search pat a)
(search pat a :from-end t)
(progn
(setf (fill-pointer pat) 2)
(search pat a))
(search pat a :from-end t)
(progn
(setf (fill-pointer pat) 3)
(search pat a))
(search pat a :from-end t)))
0 4 0 0 nil nil)
;; Order of test, test-not
(deftest search-bitvector.17
(let ((pat #*10)
(target #*000011))
(search pat target :test #'<=))
4)
(deftest search-bitvector.18
(let ((pat #*10)
(target #*000011))
(search pat target :test-not #'>))
4)
| null | https://raw.githubusercontent.com/Clozure/ccl-tests/0478abddb34dbc16487a1975560d8d073a988060/ansi-tests/search-bitvector.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests for SEARCH on bit vectors
Order of test, test-not | Author :
Created : Sun Aug 25 13:06:54 2002
(in-package :cl-test)
(compile-and-load "search-aux.lsp")
(deftest search-bitvector.1
(let ((target *searched-bitvector*)
(pat #*0))
(loop for i from 0 to (1- (length target))
for tail = (subseq target i)
always
(let ((pos (search pat tail)))
(search-check pat tail pos))))
t)
(deftest search-bitvector.2
(let ((target *searched-bitvector*)
(pat #*0))
(loop for i from 1 to (length target)
always
(let ((pos (search pat target :end2 i :from-end t)))
(search-check pat target pos :end2 i :from-end t))))
t)
(deftest search-bitvector.3
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target)
unless (search-check pat target pos)
collect pat))
nil)
(deftest search-bitvector.4
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :from-end t)
unless (search-check pat target pos :from-end t)
collect pat))
nil)
(deftest search-bitvector.5
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :start2 25 :end2 75)
unless (search-check pat target pos :start2 25 :end2 75)
collect pat))
nil)
(deftest search-bitvector.6
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :from-end t :start2 25 :end2 75)
unless (search-check pat target pos :from-end t
:start2 25 :end2 75)
collect pat))
nil)
(deftest search-bitvector.7
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :start2 20)
unless (search-check pat target pos :start2 20)
collect pat))
nil)
(deftest search-bitvector.8
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :from-end t :start2 20)
unless (search-check pat target pos :from-end t
:start2 20)
collect pat))
nil)
(deftest search-bitvector.9
(let ((target *searched-bitvector*))
(loop for pat in (mapcar #'(lambda (x)
(map 'vector
#'(lambda (y)
(sublis '((a . 2) (b . 3)) y))
x))
*pattern-sublists*)
for pos = (search pat target :start2 20 :key #'evenp)
unless (search-check pat target pos :start2 20 :key #'evenp)
collect pat))
nil)
(deftest search-bitvector.10
(let ((target *searched-bitvector*))
(loop for pat in (mapcar #'(lambda (x)
(map 'vector
#'(lambda (y)
(sublis '((a . 2) (b . 3)) y))
x))
*pattern-sublists*)
for pos = (search pat target :from-end t :start2 20 :key 'oddp)
unless (search-check pat target pos :from-end t
:start2 20 :key 'oddp)
collect pat))
nil)
(deftest search-bitvector.11
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :start2 20 :test (complement #'eql))
unless (search-check pat target pos :start2 20
:test (complement #'eql))
collect pat))
nil)
(deftest search-bitvector.12
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
for pos = (search pat target :from-end t :start2 20 :test-not #'eql)
unless (search-check pat target pos :from-end t
:start2 20 :test (complement #'eql))
collect pat))
nil)
(deftest search-bitvector.13
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
when (and (> (length pat) 0)
(let ((pos (search pat target :start1 1
:test (complement #'eql))))
(not (search-check pat target pos
:start1 1
:test (complement #'eql)))))
collect pat))
nil)
(deftest search-bitvector.14
(let ((target *searched-bitvector*))
(loop for pat in *pattern-subbitvectors*
when (let ((len (length pat)))
(and (> len 0)
(let ((pos (search pat target :end1 (1- len)
:test (complement #'eql))))
(not (search-check pat target pos
:end1 (1- len)
:test (complement #'eql))))))
collect pat))
nil)
(deftest search-bitvector.15
(let ((a (make-array '(10) :initial-contents '(0 1 1 0 0 0 1 0 1 1)
:fill-pointer 5
:element-type 'bit)))
(values
(search #*0 a)
(search #*0 a :from-end t)
(search #*01 a)
(search #*01 a :from-end t)
(search #*010 a)
(search #*010 a :from-end t)))
0 4 0 0 nil nil)
(deftest search-bitvector.16
(let ((pat (make-array '(3) :initial-contents '(0 1 0)
:fill-pointer 1))
(a #*01100))
(values
(search pat a)
(search pat a :from-end t)
(progn
(setf (fill-pointer pat) 2)
(search pat a))
(search pat a :from-end t)
(progn
(setf (fill-pointer pat) 3)
(search pat a))
(search pat a :from-end t)))
0 4 0 0 nil nil)
(deftest search-bitvector.17
(let ((pat #*10)
(target #*000011))
(search pat target :test #'<=))
4)
(deftest search-bitvector.18
(let ((pat #*10)
(target #*000011))
(search pat target :test-not #'>))
4)
|
a9c0b9d9ff37581fa66475a1cecae9d26b43963f71b6119fdd0dc768839c41c4 | qfpl/reflex-workshop | Apply.hs | |
Copyright : ( c ) 2018 , Commonwealth Scientific and Industrial Research Organisation
License : :
Stability : experimental
Portability : non - portable
Copyright : (c) 2018, Commonwealth Scientific and Industrial Research Organisation
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Exercises.Behaviors.Instances.Apply (
applyExercise
) where
import Reflex
applyExercise :: Reflex t
=> Behavior t Int
-> Behavior t Int
-> Behavior t Int
applyExercise bIn1 bIn2 =
pure 0
| null | https://raw.githubusercontent.com/qfpl/reflex-workshop/244ef13fb4b2e884f455eccc50072e98d1668c9e/src/Exercises/Behaviors/Instances/Apply.hs | haskell | |
Copyright : ( c ) 2018 , Commonwealth Scientific and Industrial Research Organisation
License : :
Stability : experimental
Portability : non - portable
Copyright : (c) 2018, Commonwealth Scientific and Industrial Research Organisation
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Exercises.Behaviors.Instances.Apply (
applyExercise
) where
import Reflex
applyExercise :: Reflex t
=> Behavior t Int
-> Behavior t Int
-> Behavior t Int
applyExercise bIn1 bIn2 =
pure 0
|
|
ddde715a1714c6fd1ab61bc73c4c32858dadab8d89770c545a859b8e5a1c50fd | tejasbubane/haskell-book-code | Pair.hs | module Pair where
import Test.QuickCheck
data Pair a b = Pair a b deriving (Eq, Show)
pairGen :: (Arbitrary a, Arbitrary b) => Gen (Pair a b)
pairGen = do
a <- arbitrary
b <- arbitrary
return $ Pair a b
instance (Arbitrary a, Arbitrary b) => Arbitrary (Pair a b) where
arbitrary = pairGen
arbPairIntInt :: Gen (Pair Int Int)
arbPairIntInt = pairGen
arbPairIntString :: Gen (Pair Int String)
arbPairIntString = pairGen
main :: IO ()
main = do
sample arbPairIntInt
sample arbPairIntString
| null | https://raw.githubusercontent.com/tejasbubane/haskell-book-code/deaac8ab4db0ae8692d0278826528bb8a746ed82/ch-14/testing/Pair.hs | haskell | module Pair where
import Test.QuickCheck
data Pair a b = Pair a b deriving (Eq, Show)
pairGen :: (Arbitrary a, Arbitrary b) => Gen (Pair a b)
pairGen = do
a <- arbitrary
b <- arbitrary
return $ Pair a b
instance (Arbitrary a, Arbitrary b) => Arbitrary (Pair a b) where
arbitrary = pairGen
arbPairIntInt :: Gen (Pair Int Int)
arbPairIntInt = pairGen
arbPairIntString :: Gen (Pair Int String)
arbPairIntString = pairGen
main :: IO ()
main = do
sample arbPairIntInt
sample arbPairIntString
|
|
ddbd843135a82afdf5e3e2203a5cc755353a355464f2c1adbd0577d20f927553 | bhaskara/programmable-reinforcement-learning | hash-table-array.lisp |
(defpackage hash-table-array
(:documentation "Defines a data type for arrays of hash tables.
Types
-----
hash-table-array
Functions
---------
make-hta
get-val
set-val")
(:nicknames hta)
(:use
utils
cl)
(:export
hash-table-array
make-hta
get-val
set-val))
(in-package hta)
(defstruct (hash-table-array (:conc-name hta-))
key-fn
a)
(defun make-hta (key-fn max-key &key (test #'equalp))
"make-hta KEY-FN MAX-KEY &key (test #'equalp)
Make an array of hash tables using KEY-FN, which maps objects to nonnegative integers. The individual hash tables use TEST as the test."
(make-hash-table-array :key-fn key-fn
:a (loop with a = (make-array (1+ max-key))
for i below (1+ max-key)
do (setf (aref a i) (make-hash-table :test test))
finally (return a))))
(defun get-val (x hta)
"get-val X HTA. Like gethash"
(gethash (canonicalize x) (aref (hta-a hta) (funcall (hta-key-fn hta) x))))
(defun set-val (x hta new-val)
"set-val KEY NEW-VAL HTA. Like (setf (gethash KEY H) NEW-VAL)."
(setf (gethash (canonicalize x) (aref (hta-a hta) (funcall (hta-key-fn hta) x)))
new-val))
| null | https://raw.githubusercontent.com/bhaskara/programmable-reinforcement-learning/8afc98116a8f78163b3f86076498d84b3f596217/lisp/misc/hash-table-array.lisp | lisp |
(defpackage hash-table-array
(:documentation "Defines a data type for arrays of hash tables.
Types
-----
hash-table-array
Functions
---------
make-hta
get-val
set-val")
(:nicknames hta)
(:use
utils
cl)
(:export
hash-table-array
make-hta
get-val
set-val))
(in-package hta)
(defstruct (hash-table-array (:conc-name hta-))
key-fn
a)
(defun make-hta (key-fn max-key &key (test #'equalp))
"make-hta KEY-FN MAX-KEY &key (test #'equalp)
Make an array of hash tables using KEY-FN, which maps objects to nonnegative integers. The individual hash tables use TEST as the test."
(make-hash-table-array :key-fn key-fn
:a (loop with a = (make-array (1+ max-key))
for i below (1+ max-key)
do (setf (aref a i) (make-hash-table :test test))
finally (return a))))
(defun get-val (x hta)
"get-val X HTA. Like gethash"
(gethash (canonicalize x) (aref (hta-a hta) (funcall (hta-key-fn hta) x))))
(defun set-val (x hta new-val)
"set-val KEY NEW-VAL HTA. Like (setf (gethash KEY H) NEW-VAL)."
(setf (gethash (canonicalize x) (aref (hta-a hta) (funcall (hta-key-fn hta) x)))
new-val))
|
|
a975bdb59f22462bf56d20867210086307186f9196e9203cfd58ad0f401a3d53 | anchpop/wise_mans_haskell | goodDo.hs | willWork = do
putStrLn "hello"
two <- pure ((1 + 1) :: Int)
putStrLn . show $ two | null | https://raw.githubusercontent.com/anchpop/wise_mans_haskell/021ca3f3d96ebc0ecf2daf1a802fc33d067e24cc/haskelltests/should_compile/goodDo.hs | haskell | willWork = do
putStrLn "hello"
two <- pure ((1 + 1) :: Int)
putStrLn . show $ two |
|
7259d9a44a86a7ce385f1a2e3901b8c321e7ca4f05a32bed769c3db4eba57f9f | nikita-volkov/rerebase | Class.hs | module Control.Monad.Error.Class
(
module Rebase.Control.Monad.Error.Class
)
where
import Rebase.Control.Monad.Error.Class
| null | https://raw.githubusercontent.com/nikita-volkov/rerebase/25895e6d8b0c515c912c509ad8dd8868780a74b6/library/Control/Monad/Error/Class.hs | haskell | module Control.Monad.Error.Class
(
module Rebase.Control.Monad.Error.Class
)
where
import Rebase.Control.Monad.Error.Class
|
|
ea7dd7d4bb140dd8f532ecca196b437f27e677ffa3593b27c5674aeec5197ee8 | iambrj/imin | select-instructions.rkt | #lang racket
(require "utilities.rkt"
"constants.rkt"
"utils.rkt")
(provide select-instructions)
(define (pmask t [mask 0])
(match t
[`(Vector) mask]
[`(Vector (Vector . ,_))
(bitwise-ior mask 1)]
[`(Vector ,_) mask]
[`(Vector . ((Vector . ,_) . ,rest))
(pmask `(Vector . ,rest) (arithmetic-shift (bitwise-ior mask 1) 1))]
[`(Vector . (,t . ,rest))
(pmask `(Vector . ,rest) (arithmetic-shift mask 1))]
[else (error "Couldn't make pmask for " t)]))
second argument of cmp can not be an immediate , generate if it is
(define (cmp-tmp-mov a)
(match (si-atm a)
[(Imm a)
(values `(,(Instr 'movq `(,(Imm a) ,(Reg 'rax)))) (Reg 'rax))]
[_ (values '() a)]))
(define (si-atm e)
(match e
[(Int n) (Imm n)]
[(Var v) (Var v)]
[(Bool #f) (Imm 0)]
[(Bool #t) (Imm 1)]
[(GlobalValue g) (Global g)]
[(HasType x _) (si-atm x)]
[else (error "si-atm unhandled case " e)]))
(define (si-stmt e)
(match e
[(Assign (Var v) (HasType e t)) (si-stmt (Assign (Var v) e))]
[(Assign (Var v) (Int i)) `(,(Instr 'movq `(,(Imm i) ,(Var v))))]
[(Assign (Var v) (Bool b)) `(,(Instr 'movq `(,(si-atm (Bool b)) ,(Var v))))]
[(Assign (Var v) (Var u)) `(,(Instr 'movq `(,(Var u) ,(Var v))))]
[(Assign (Var v) (GlobalValue g)) `(,(Instr 'movq `(,(Global g) ,(Var v))))]
[(Assign (Var v) (Void)) `(,(Instr 'movq `(,(Imm 0) ,(Var v))))]
[(Assign (Var v) (Prim 'read '()))
`(,(Callq `read_int 0)
,(Instr 'movq `(,(Reg 'rax) ,(Var v))))]
[(Assign (Var v) (Prim '- `(,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,a ,(Var v)))
,(Instr 'negq `(,(Var v)))))]
[(Assign (Var v) (Prim '+ `(,a1 ,a2)))
(let ([a1 (si-atm a1)]
[a2 (si-atm a2)])
(cond
; v = (+ v a2)
[(equal? (Var v) a1) `(,(Instr 'addq `(,(si-atm a2) ,(Var v))))]
; v = (+ a1 v)
[(equal? (Var v) a2) `(,(Instr 'addq `(,(si-atm a1) ,(Var v))))]
; v = (+ a1 a2)
[else `(,(Instr 'movq `(,a1 ,(Var v))) ,(Instr 'addq `(,a2 ,(Var v))))]))]
[(Assign (Var v) (Prim 'not `(,(Var v))))
`(,(Instr 'xorq `(,(Imm 1) ,(Var v))))]
[(Assign (Var v) (Prim 'not `(,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,a ,(Var v)))
,(Instr 'xorq `(,(Imm 1) ,(Var v)))))]
[(Assign (Var v) (Prim 'eq? `(,a1 ,a2)))
(let-values ([(tmp a1) (cmp-tmp-mov a1)]
[(a2) (si-atm a2)])
(append tmp
`(,(Instr 'cmpq `(,a2 ,a1))
,(Instr 'set `(e ,(Reg 'al)))
,(Instr 'movzbq `(,(Reg 'al) ,(Var v))))))]
[(Assign (Var v) (Prim '< `(,a1 ,a2)))
(let-values ([(tmp a1) (cmp-tmp-mov a1)]
[(a2) (si-atm a2)])
(append tmp
`(,(Instr 'cmpq `(,a2 ,a1))
,(Instr 'set `(l ,(Reg 'al)))
,(Instr 'movzbq `(,(Reg 'al) ,(Var v))))))]
[(Assign (Var x) (Prim 'vector-ref `(,v ,(Int n))))
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'movq `(,(Deref 'r11 (* 8 (+ n 1))) ,(Var x))))]
[(Assign (Var x) (Prim 'vector-set! `(,v ,(Int n) ,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'movq `(,a ,(Deref 'r11 (* 8 (+ n 1)))))
,(Instr 'movq `(,(Imm 0) ,(Var x)))))]
; Invariant : allocate is only called when it is guaranteed that there is
; enough space
; Invariant : allocate can only appear in rhs of a let
[(Assign (Var v) (Allocate len t))
In FromSpace , not yet copied
(arithmetic-shift len 1) ; size of tuple
(arithmetic-shift (pmask t) 7))])
`(,(Instr 'movq `(,(Global 'free_ptr) ,(Reg 'r11)))
,(Instr 'addq `(,(Imm (* 8 (+ len 1))) ,(Global 'free_ptr)))
,(Instr 'movq `(,(Imm tag) ,(Deref 'r11 0)))
,(Instr 'movq `(,(Reg 'r11) ,(Var v)))))]
[(Assign (Var v) (Collect bytes))
`(,(Instr 'movq `(,(Reg 'r15) ,(Reg 'rdi)))
,(Instr 'movq `(,(Imm bytes) ,(Reg 'rsi)))
,(Callq 'collect 2))]
[(Assign (Var v) (FunRef f))
`(,(Instr 'leaq `(,(FunRef f) ,(Var v))))]
[(Assign (Var v) (Call fun arg*))
(let ([movs (map (lambda (idx)
(Instr 'movq `(,(si-atm (list-ref arg* idx))
,(list-ref param-reg* idx))))
(range 0 (length arg*)))])
(append movs
`(,(IndirectCallq fun (length arg*))
,(Instr 'movq `(,(Reg 'rax) ,(Var v))))))]
[else (error "si-stmt unhandled case : " e)]))
(define (si-tail e c)
(match e
[(Return (Var v)) `(,(Instr 'movq `(,(Var v) ,(Reg 'rax))) ,(Jmp c))]
[(Return (Int i)) `(,(Instr 'movq `(,(Imm i) ,(Reg 'rax))) ,(Jmp c))]
[(Return (Prim 'read '()))
`(,(Callq 'read_int 0)
,(Jmp c))]
[(Return (Prim '- `(,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,a ,(Reg 'rax)))
,(Instr 'negq `(,(Reg 'rax)))
,(Jmp c)))]
[(Return (Prim '+ `(,a1 ,a2)))
(let ([a1 (si-atm a1)]
[a2 (si-atm a2)])
`(,(Instr 'movq `(,a1 ,(Reg 'rax)))
,(Instr 'addq `(,a2 ,(Reg 'rax)))
,(Jmp c)))]
[(Return (Prim 'vector-ref `(,v ,(Int n))))
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'movq `(,(Deref 'r11 (* 8 (+ n 1))) ,(Reg 'rax)))
,(Jmp c))]
[(Return (Prim 'vector-set! `(,v ,(Int n) ,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'movq `(,a ,(Deref 'r11 (* 8 (+ n 1)))))
,(Instr 'movq `(,(Imm 0) ,(Reg 'rax)))
,(Jmp c)))]
[(Seq stmt tail)
(let ([s (si-stmt stmt)]
[t (si-tail tail c)])
(append s t))]
[(Goto l) `(,(Jmp l))]
[(IfStmt (Prim 'eq? `(,a1 ,a2)) (Goto l1) (Goto l2))
(let-values ([(tmp a1) (cmp-tmp-mov a1)]
[(a2) (si-atm a2)])
(append tmp
`(,(Instr 'cmpq `(,a2 ,a1))
,(JmpIf 'e l1)
,(Jmp l2))))]
[(IfStmt (Prim '< `(,a1 ,a2)) (Goto l1) (Goto l2))
(let-values ([(tmp a1) (cmp-tmp-mov a1)]
[(a2) (si-atm a2)])
(append tmp
`(,(Instr 'cmpq `(,a2 ,a1))
,(JmpIf 'l l1)
,(Jmp l2))))]
; Invariant : grammar restricts to immediate integer references, don't have
; to worry about expressions evaluating to integers
[(IfStmt (Prim 'vector-ref `(,v ,(Int i))) (Goto l1) (Goto l2))
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'cmpq `(,(Imm 1) ,(Deref 'r11 (* 8 (+ i 1)))))
,(JmpIf 'e l1)
,(Jmp l2))]
[(TailCall fun arg*)
(let ([movs (map (lambda (idx)
(Instr 'movq `(,(si-atm (list-ref arg* idx))
,(list-ref param-reg* idx))))
(range 0 (length arg*)))])
(append movs
`(,(TailJmp fun (length arg*)))))]
[else (error "si-tail unhandled case : " e)]))
(define (si-def d)
(match d
[(Def name param* rty info label-tail*)
(let* ([param* (map param-name param*)]
[arg-mov* (for/list ([idx (in-range (length param*))])
(Instr 'movq `(,(list-ref param-reg* idx)
,(Var (list-ref param* idx)))))]
[start-label (gen-start-label name)]
[concl-label (gen-concl-label name)]
[start `(,start-label
. ,(Block '() (append arg-mov*
(si-tail (dict-ref label-tail*
start-label)
concl-label))))]
[rest (for/list ([label-tail label-tail*])
(let ([label (car label-tail)]
[tail (cdr label-tail)])
`(,label . ,(Block '() (si-tail tail concl-label)))))])
(Def name param* 'Integer `((num-params . ,(length param*)) . ,info)
`(,start . ,rest)))]))
(define (gen-start-label name)
(string->symbol (string-append (symbol->string name)
"start")))
(define (gen-concl-label name)
(string->symbol (string-append (symbol->string name)
"conclusion")))
; select-instructions : C0 -> pseudo-x86
(define (select-instructions p)
(match p
[(ProgramDefs info def*)
(ProgramDefs info (map si-def def*))]))
| null | https://raw.githubusercontent.com/iambrj/imin/6365961b9d368c1688f0d43881a98c65a9596e0b/select-instructions.rkt | racket | v = (+ v a2)
v = (+ a1 v)
v = (+ a1 a2)
Invariant : allocate is only called when it is guaranteed that there is
enough space
Invariant : allocate can only appear in rhs of a let
size of tuple
Invariant : grammar restricts to immediate integer references, don't have
to worry about expressions evaluating to integers
select-instructions : C0 -> pseudo-x86 | #lang racket
(require "utilities.rkt"
"constants.rkt"
"utils.rkt")
(provide select-instructions)
(define (pmask t [mask 0])
(match t
[`(Vector) mask]
[`(Vector (Vector . ,_))
(bitwise-ior mask 1)]
[`(Vector ,_) mask]
[`(Vector . ((Vector . ,_) . ,rest))
(pmask `(Vector . ,rest) (arithmetic-shift (bitwise-ior mask 1) 1))]
[`(Vector . (,t . ,rest))
(pmask `(Vector . ,rest) (arithmetic-shift mask 1))]
[else (error "Couldn't make pmask for " t)]))
second argument of cmp can not be an immediate , generate if it is
(define (cmp-tmp-mov a)
(match (si-atm a)
[(Imm a)
(values `(,(Instr 'movq `(,(Imm a) ,(Reg 'rax)))) (Reg 'rax))]
[_ (values '() a)]))
(define (si-atm e)
(match e
[(Int n) (Imm n)]
[(Var v) (Var v)]
[(Bool #f) (Imm 0)]
[(Bool #t) (Imm 1)]
[(GlobalValue g) (Global g)]
[(HasType x _) (si-atm x)]
[else (error "si-atm unhandled case " e)]))
(define (si-stmt e)
(match e
[(Assign (Var v) (HasType e t)) (si-stmt (Assign (Var v) e))]
[(Assign (Var v) (Int i)) `(,(Instr 'movq `(,(Imm i) ,(Var v))))]
[(Assign (Var v) (Bool b)) `(,(Instr 'movq `(,(si-atm (Bool b)) ,(Var v))))]
[(Assign (Var v) (Var u)) `(,(Instr 'movq `(,(Var u) ,(Var v))))]
[(Assign (Var v) (GlobalValue g)) `(,(Instr 'movq `(,(Global g) ,(Var v))))]
[(Assign (Var v) (Void)) `(,(Instr 'movq `(,(Imm 0) ,(Var v))))]
[(Assign (Var v) (Prim 'read '()))
`(,(Callq `read_int 0)
,(Instr 'movq `(,(Reg 'rax) ,(Var v))))]
[(Assign (Var v) (Prim '- `(,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,a ,(Var v)))
,(Instr 'negq `(,(Var v)))))]
[(Assign (Var v) (Prim '+ `(,a1 ,a2)))
(let ([a1 (si-atm a1)]
[a2 (si-atm a2)])
(cond
[(equal? (Var v) a1) `(,(Instr 'addq `(,(si-atm a2) ,(Var v))))]
[(equal? (Var v) a2) `(,(Instr 'addq `(,(si-atm a1) ,(Var v))))]
[else `(,(Instr 'movq `(,a1 ,(Var v))) ,(Instr 'addq `(,a2 ,(Var v))))]))]
[(Assign (Var v) (Prim 'not `(,(Var v))))
`(,(Instr 'xorq `(,(Imm 1) ,(Var v))))]
[(Assign (Var v) (Prim 'not `(,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,a ,(Var v)))
,(Instr 'xorq `(,(Imm 1) ,(Var v)))))]
[(Assign (Var v) (Prim 'eq? `(,a1 ,a2)))
(let-values ([(tmp a1) (cmp-tmp-mov a1)]
[(a2) (si-atm a2)])
(append tmp
`(,(Instr 'cmpq `(,a2 ,a1))
,(Instr 'set `(e ,(Reg 'al)))
,(Instr 'movzbq `(,(Reg 'al) ,(Var v))))))]
[(Assign (Var v) (Prim '< `(,a1 ,a2)))
(let-values ([(tmp a1) (cmp-tmp-mov a1)]
[(a2) (si-atm a2)])
(append tmp
`(,(Instr 'cmpq `(,a2 ,a1))
,(Instr 'set `(l ,(Reg 'al)))
,(Instr 'movzbq `(,(Reg 'al) ,(Var v))))))]
[(Assign (Var x) (Prim 'vector-ref `(,v ,(Int n))))
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'movq `(,(Deref 'r11 (* 8 (+ n 1))) ,(Var x))))]
[(Assign (Var x) (Prim 'vector-set! `(,v ,(Int n) ,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'movq `(,a ,(Deref 'r11 (* 8 (+ n 1)))))
,(Instr 'movq `(,(Imm 0) ,(Var x)))))]
[(Assign (Var v) (Allocate len t))
In FromSpace , not yet copied
(arithmetic-shift (pmask t) 7))])
`(,(Instr 'movq `(,(Global 'free_ptr) ,(Reg 'r11)))
,(Instr 'addq `(,(Imm (* 8 (+ len 1))) ,(Global 'free_ptr)))
,(Instr 'movq `(,(Imm tag) ,(Deref 'r11 0)))
,(Instr 'movq `(,(Reg 'r11) ,(Var v)))))]
[(Assign (Var v) (Collect bytes))
`(,(Instr 'movq `(,(Reg 'r15) ,(Reg 'rdi)))
,(Instr 'movq `(,(Imm bytes) ,(Reg 'rsi)))
,(Callq 'collect 2))]
[(Assign (Var v) (FunRef f))
`(,(Instr 'leaq `(,(FunRef f) ,(Var v))))]
[(Assign (Var v) (Call fun arg*))
(let ([movs (map (lambda (idx)
(Instr 'movq `(,(si-atm (list-ref arg* idx))
,(list-ref param-reg* idx))))
(range 0 (length arg*)))])
(append movs
`(,(IndirectCallq fun (length arg*))
,(Instr 'movq `(,(Reg 'rax) ,(Var v))))))]
[else (error "si-stmt unhandled case : " e)]))
(define (si-tail e c)
(match e
[(Return (Var v)) `(,(Instr 'movq `(,(Var v) ,(Reg 'rax))) ,(Jmp c))]
[(Return (Int i)) `(,(Instr 'movq `(,(Imm i) ,(Reg 'rax))) ,(Jmp c))]
[(Return (Prim 'read '()))
`(,(Callq 'read_int 0)
,(Jmp c))]
[(Return (Prim '- `(,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,a ,(Reg 'rax)))
,(Instr 'negq `(,(Reg 'rax)))
,(Jmp c)))]
[(Return (Prim '+ `(,a1 ,a2)))
(let ([a1 (si-atm a1)]
[a2 (si-atm a2)])
`(,(Instr 'movq `(,a1 ,(Reg 'rax)))
,(Instr 'addq `(,a2 ,(Reg 'rax)))
,(Jmp c)))]
[(Return (Prim 'vector-ref `(,v ,(Int n))))
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'movq `(,(Deref 'r11 (* 8 (+ n 1))) ,(Reg 'rax)))
,(Jmp c))]
[(Return (Prim 'vector-set! `(,v ,(Int n) ,a)))
(let ([a (si-atm a)])
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'movq `(,a ,(Deref 'r11 (* 8 (+ n 1)))))
,(Instr 'movq `(,(Imm 0) ,(Reg 'rax)))
,(Jmp c)))]
[(Seq stmt tail)
(let ([s (si-stmt stmt)]
[t (si-tail tail c)])
(append s t))]
[(Goto l) `(,(Jmp l))]
[(IfStmt (Prim 'eq? `(,a1 ,a2)) (Goto l1) (Goto l2))
(let-values ([(tmp a1) (cmp-tmp-mov a1)]
[(a2) (si-atm a2)])
(append tmp
`(,(Instr 'cmpq `(,a2 ,a1))
,(JmpIf 'e l1)
,(Jmp l2))))]
[(IfStmt (Prim '< `(,a1 ,a2)) (Goto l1) (Goto l2))
(let-values ([(tmp a1) (cmp-tmp-mov a1)]
[(a2) (si-atm a2)])
(append tmp
`(,(Instr 'cmpq `(,a2 ,a1))
,(JmpIf 'l l1)
,(Jmp l2))))]
[(IfStmt (Prim 'vector-ref `(,v ,(Int i))) (Goto l1) (Goto l2))
`(,(Instr 'movq `(,v ,(Reg 'r11)))
,(Instr 'cmpq `(,(Imm 1) ,(Deref 'r11 (* 8 (+ i 1)))))
,(JmpIf 'e l1)
,(Jmp l2))]
[(TailCall fun arg*)
(let ([movs (map (lambda (idx)
(Instr 'movq `(,(si-atm (list-ref arg* idx))
,(list-ref param-reg* idx))))
(range 0 (length arg*)))])
(append movs
`(,(TailJmp fun (length arg*)))))]
[else (error "si-tail unhandled case : " e)]))
(define (si-def d)
(match d
[(Def name param* rty info label-tail*)
(let* ([param* (map param-name param*)]
[arg-mov* (for/list ([idx (in-range (length param*))])
(Instr 'movq `(,(list-ref param-reg* idx)
,(Var (list-ref param* idx)))))]
[start-label (gen-start-label name)]
[concl-label (gen-concl-label name)]
[start `(,start-label
. ,(Block '() (append arg-mov*
(si-tail (dict-ref label-tail*
start-label)
concl-label))))]
[rest (for/list ([label-tail label-tail*])
(let ([label (car label-tail)]
[tail (cdr label-tail)])
`(,label . ,(Block '() (si-tail tail concl-label)))))])
(Def name param* 'Integer `((num-params . ,(length param*)) . ,info)
`(,start . ,rest)))]))
(define (gen-start-label name)
(string->symbol (string-append (symbol->string name)
"start")))
(define (gen-concl-label name)
(string->symbol (string-append (symbol->string name)
"conclusion")))
(define (select-instructions p)
(match p
[(ProgramDefs info def*)
(ProgramDefs info (map si-def def*))]))
|
12abbdf5cd882ac2b9f2c456b93097f06bb7e73ca70f6d289323630ced98f81d | MaskRay/CamlFeatherweight | exe.ml | let size_offset =
if Config.word_size = 32 then
20
else
36
let gcsize_offset = 8
let tag_hd hd = Int32.(logand hd 255l |> to_int)
let tag_hd' hd = Int64.(logand hd 255L |> to_int)
let size_hd hd = Int32.(shift_right hd size_offset |> to_int)
let size_hd' hd = Int64.(shift_right hd size_offset |> to_int)
let string_size_hd hd = Int32.(shift_right hd (gcsize_offset+1) |> to_int)
let string_size_hd' hd = Int64.(shift_right hd (gcsize_offset+1) |> to_int)
let no_scan_tag = (1 lsl 8) - 5
let closure_tag = no_scan_tag-1
let abstract_tag = no_scan_tag
let string_tag = no_scan_tag+1
let array_tag = no_scan_tag+2
let double_tag = no_scan_tag+3
let make_header tag size =
Int32.(add (shift_left (of_int size) size_offset) (of_int tag))
let make_header' tag size =
Int64.(add (shift_left (of_int size) size_offset) (of_int tag))
let make_string_header size =
Int32.(add (shift_left (of_int size) (gcsize_offset+1)) (of_int string_tag))
let make_string_header' size =
Int64.(add (shift_left (of_int size) (gcsize_offset+1)) (of_int string_tag))
let name_tag tag =
if tag = closure_tag then
"closure"
else if tag = double_tag then
"double"
else if tag = array_tag then
"array"
else if tag = string_tag then
"string"
else
raise @@ Invalid_argument(Printf.sprintf "Unknown tag %d" tag)
let input_bin_int32 ic =
let b0 = input_byte ic |> Int32.of_int in
let b1 = input_byte ic |> Int32.of_int in
let b2 = input_byte ic |> Int32.of_int in
let b3 = input_byte ic |> Int32.of_int in
Int32.(mul b3 256l |> add b2 |> mul 256l |> add b1 |> mul 256l |> add b0)
let input_bin_int ic =
input_bin_int32 ic |> Int32.to_int
let output_bin_int oc i =
output_byte oc (i land 255);
output_byte oc (i lsr 8 land 255);
output_byte oc (i lsr 16 land 255);
output_byte oc (i lsr 24 land 255)
| null | https://raw.githubusercontent.com/MaskRay/CamlFeatherweight/989319a830dcf1ae30a4b4ccefb59f73bf966363/exe.ml | ocaml | let size_offset =
if Config.word_size = 32 then
20
else
36
let gcsize_offset = 8
let tag_hd hd = Int32.(logand hd 255l |> to_int)
let tag_hd' hd = Int64.(logand hd 255L |> to_int)
let size_hd hd = Int32.(shift_right hd size_offset |> to_int)
let size_hd' hd = Int64.(shift_right hd size_offset |> to_int)
let string_size_hd hd = Int32.(shift_right hd (gcsize_offset+1) |> to_int)
let string_size_hd' hd = Int64.(shift_right hd (gcsize_offset+1) |> to_int)
let no_scan_tag = (1 lsl 8) - 5
let closure_tag = no_scan_tag-1
let abstract_tag = no_scan_tag
let string_tag = no_scan_tag+1
let array_tag = no_scan_tag+2
let double_tag = no_scan_tag+3
let make_header tag size =
Int32.(add (shift_left (of_int size) size_offset) (of_int tag))
let make_header' tag size =
Int64.(add (shift_left (of_int size) size_offset) (of_int tag))
let make_string_header size =
Int32.(add (shift_left (of_int size) (gcsize_offset+1)) (of_int string_tag))
let make_string_header' size =
Int64.(add (shift_left (of_int size) (gcsize_offset+1)) (of_int string_tag))
let name_tag tag =
if tag = closure_tag then
"closure"
else if tag = double_tag then
"double"
else if tag = array_tag then
"array"
else if tag = string_tag then
"string"
else
raise @@ Invalid_argument(Printf.sprintf "Unknown tag %d" tag)
let input_bin_int32 ic =
let b0 = input_byte ic |> Int32.of_int in
let b1 = input_byte ic |> Int32.of_int in
let b2 = input_byte ic |> Int32.of_int in
let b3 = input_byte ic |> Int32.of_int in
Int32.(mul b3 256l |> add b2 |> mul 256l |> add b1 |> mul 256l |> add b0)
let input_bin_int ic =
input_bin_int32 ic |> Int32.to_int
let output_bin_int oc i =
output_byte oc (i land 255);
output_byte oc (i lsr 8 land 255);
output_byte oc (i lsr 16 land 255);
output_byte oc (i lsr 24 land 255)
|
|
10a95cd84d221a9662910122b4fedfbe12d079cc7acc0066c4ebaa0e264dacfb | ollef/sixten | LanguageServer.hs | # LANGUAGE DisambiguateRecordFields #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE OverloadedLists #
{-# LANGUAGE OverloadedStrings #-}
module LanguageServer where
import Protolude hiding (state)
import Control.Concurrent.STM as STM
import Control.Lens hiding (unsnoc)
import Data.Default (def)
import qualified Data.HashMap.Lazy as HashMap
import qualified Data.Map as Map
import Data.Text(Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified Language.Haskell.LSP.Control as LSP
import qualified Language.Haskell.LSP.Core
import qualified Language.Haskell.LSP.Core as LSP
import qualified Language.Haskell.LSP.Diagnostics as LSP
import qualified Language.Haskell.LSP.Messages as LSP
import qualified Language.Haskell.LSP.Types as LSP
import qualified Language.Haskell.LSP.Types.Lens as LSP
import qualified Language.Haskell.LSP.VFS as LSP
import Text.Parsix.Position
import qualified Yi.Rope as Yi
import Driver
import Driver.Query
import qualified Effect.Context as Context
import Elaboration.TypeOf
import LanguageServer.Hover as Hover
import SourceLoc
import Syntax
import Util
run :: IO ()
run = do
messageQueue <- newTQueueIO
_ <- LSP.run
( \_ -> Right ()
, \lf -> do
_ <- forkIO $ messagePump lf $ atomically $ readTQueue messageQueue
return Nothing
)
(handlers $ atomically . writeTQueue messageQueue)
options
Nothing -- (Just "sixten-lsp.log")
return ()
handlers :: (LSP.FromClientMessage -> IO ()) -> LSP.Handlers
handlers sendMessage = def
{ LSP.initializedHandler = Just $ sendMessage . LSP.NotInitialized
, LSP.hoverHandler = Just $ sendMessage . LSP.ReqHover
, LSP.didOpenTextDocumentNotificationHandler = Just $ sendMessage . LSP.NotDidOpenTextDocument
, LSP.didSaveTextDocumentNotificationHandler = Just $ sendMessage . LSP.NotDidSaveTextDocument
, LSP.didChangeTextDocumentNotificationHandler = Just $ sendMessage . LSP.NotDidChangeTextDocument
, LSP.didCloseTextDocumentNotificationHandler = Just $ sendMessage . LSP.NotDidCloseTextDocument
}
options :: LSP.Options
options = def
{ Language.Haskell.LSP.Core.textDocumentSync = Just $ LSP.TextDocumentSyncOptions
{ LSP._openClose = Just True
, LSP._change = Just LSP.TdSyncIncremental
, LSP._willSave = Just False
, LSP._willSaveWaitUntil = Just False
, LSP._save = Just $ LSP.SaveOptions $ Just False
}
}
messagePump :: LSP.LspFuncs () -> IO LSP.FromClientMessage -> IO ()
messagePump lf receiveMessage = do
state <- Driver.initialState
forever $ do
message <- receiveMessage
case message of
LSP.NotInitialized _ ->
return ()
LSP.NotDidOpenTextDocument notification -> do
sendNotification lf "messagePump: processing NotDidOpenTextDocument"
let
document = notification ^. LSP.params . LSP.textDocument . LSP.uri
version = notification ^. LSP.params . LSP.textDocument . LSP.version
fileName = LSP.uriToFilePath document
sendNotification lf $ "fileName = " <> show fileName
sendDiagnostics lf state document $ Just version
LSP.NotDidChangeTextDocument notification -> do
let
document = notification ^. LSP.params . LSP.textDocument . LSP.uri
version = notification ^. LSP.params . LSP.textDocument . LSP.version
sendNotification lf $ "messagePump:processing NotDidChangeTextDocument: uri=" <> show document
sendDiagnostics lf state document version
LSP.NotDidSaveTextDocument notification -> do
sendNotification lf "messagePump: processing NotDidSaveTextDocument"
let
document = notification ^. LSP.params . LSP.textDocument . LSP.uri
fileName = LSP.uriToFilePath document
sendNotification lf $ "fileName = " <> show fileName
sendDiagnostics lf state document Nothing
LSP.ReqHover req -> do
sendNotification lf $ "messagePump: HoverRequest: " <> show req
let
LSP.TextDocumentPositionParams
(LSP.TextDocumentIdentifier document)
pos@(LSP.Position line char)
= req ^. LSP.params
sendNotification lf $ shower document
sendNotification lf $ shower pos
(_version, contents) <- fileContents lf document
let
LSP.Uri uriText = document
uriStr = Text.unpack uriText
((types, typeOfErrs), _) <- Driver.incrementallyExecuteVirtualFile state uriStr contents $ do
defs <- fetch CheckAll
runHover $ do
(span, expr) <- hoverDefs (Hover.inside line char)
[ (n, loc, d, t)
| (n, (loc, d, t)) <- concatMap HashMap.toList defs
]
typ <- typeOf' voidArgs expr
ctx <- Context.getContext
return (span, ctx, expr, typ)
sendNotification lf $ "result " <> shower typeOfErrs
let
response = case types of
[] -> Nothing
_ -> do
let Just (_, (range, ctx, expr, typ)) = unsnoc types
Just $ LSP.Hover
{ LSP._contents =
LSP.List
[ LSP.PlainString
$ showWide
$ pretty (traverse Context.prettyVar expr ctx)
<> " : "
<> pretty (traverse Context.prettyVar typ ctx)
]
, LSP._range = Just
$ LSP.Range
{ LSP._start = LSP.Position
(visualRow $ spanStart range)
(visualColumn $ spanStart range)
, LSP._end = LSP.Position
(visualRow $ spanEnd range)
(visualColumn $ spanEnd range)
}
}
LSP.sendFunc lf $ LSP.RspHover $ LSP.makeResponseMessage req response
_ ->
return ()
-------------------------------------------------------------------------------
sendDiagnostics
:: LSP.LspFuncs ()
-> DriverState
-> LSP.Uri
-> LSP.TextDocumentVersion
-> IO ()
sendDiagnostics lf state document version = do
(currentVersion, contents) <- fileContents lf document
case (version, currentVersion) of
(Just v, Just cv)
| v < cv ->
return ()
_ -> do
let
LSP.Uri uriText = document
uriStr = Text.unpack uriText
(_, errors) <- Driver.incrementallyExecuteVirtualFile state uriStr contents $ fetch CheckAll
LSP.publishDiagnosticsFunc lf (length errors) document version
$ LSP.partitionBySource $ uncurry errorToDiagnostic <$> errors
-------------------------------------------------------------------------------
sendNotification :: LSP.LspFuncs () -> Text -> IO ()
sendNotification lf s = LSP.sendFunc lf
$ LSP.NotLogMessage
$ LSP.NotificationMessage "2.0" LSP.WindowLogMessage
$ LSP.LogMessageParams LSP.MtInfo s
diagnosticSource :: LSP.DiagnosticSource
diagnosticSource = "sixten"
sendError
:: LSP.LspFuncs ()
-> LSP.Uri
-> LSP.TextDocumentVersion
-> Error
-> Maybe AbsoluteSourceLoc
-> IO ()
sendError lf uri version e loc =
LSP.publishDiagnosticsFunc lf 10 uri version $
Map.singleton (Just diagnosticSource) [errorToDiagnostic e loc]
errorToDiagnostic :: Error -> Maybe AbsoluteSourceLoc -> LSP.Diagnostic
errorToDiagnostic err loc = LSP.Diagnostic
{ _range = maybe
(LSP.Range (LSP.Position 0 0) (LSP.Position 0 0))
(spanToRange . absoluteSpan)
loc
, _severity = Just LSP.DsError
, _code = Nothing
, _source = Just diagnosticSource
, _message = showWide $ errorSummary err <> "\n" <> errorFootnote err
, _relatedInformation = Nothing
}
spanToRange :: Span -> LSP.Range
spanToRange span = LSP.Range
{ _start = positionToPosition $ spanStart span
, _end = positionToPosition $ spanEnd span
}
positionToPosition :: Position -> LSP.Position
positionToPosition pos = LSP.Position
{ _line = visualRow pos
, _character = visualColumn pos
}
fileContents :: LSP.LspFuncs () -> LSP.Uri -> IO (LSP.TextDocumentVersion, Text)
fileContents lf uri = do
mvf <- LSP.getVirtualFileFunc lf uri
case mvf of
Just (LSP.VirtualFile version rope) -> return (Just version, Yi.toText rope)
Nothing ->
case LSP.uriToFilePath uri of
Just fp ->
(,) Nothing <$> Text.readFile fp
Nothing ->
return (Just 0, "")
| null | https://raw.githubusercontent.com/ollef/sixten/60d46eee20abd62599badea85774a9365c81af45/src/LanguageServer.hs | haskell | # LANGUAGE OverloadedStrings #
(Just "sixten-lsp.log")
-----------------------------------------------------------------------------
----------------------------------------------------------------------------- | # LANGUAGE DisambiguateRecordFields #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE OverloadedLists #
module LanguageServer where
import Protolude hiding (state)
import Control.Concurrent.STM as STM
import Control.Lens hiding (unsnoc)
import Data.Default (def)
import qualified Data.HashMap.Lazy as HashMap
import qualified Data.Map as Map
import Data.Text(Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified Language.Haskell.LSP.Control as LSP
import qualified Language.Haskell.LSP.Core
import qualified Language.Haskell.LSP.Core as LSP
import qualified Language.Haskell.LSP.Diagnostics as LSP
import qualified Language.Haskell.LSP.Messages as LSP
import qualified Language.Haskell.LSP.Types as LSP
import qualified Language.Haskell.LSP.Types.Lens as LSP
import qualified Language.Haskell.LSP.VFS as LSP
import Text.Parsix.Position
import qualified Yi.Rope as Yi
import Driver
import Driver.Query
import qualified Effect.Context as Context
import Elaboration.TypeOf
import LanguageServer.Hover as Hover
import SourceLoc
import Syntax
import Util
run :: IO ()
run = do
messageQueue <- newTQueueIO
_ <- LSP.run
( \_ -> Right ()
, \lf -> do
_ <- forkIO $ messagePump lf $ atomically $ readTQueue messageQueue
return Nothing
)
(handlers $ atomically . writeTQueue messageQueue)
options
return ()
handlers :: (LSP.FromClientMessage -> IO ()) -> LSP.Handlers
handlers sendMessage = def
{ LSP.initializedHandler = Just $ sendMessage . LSP.NotInitialized
, LSP.hoverHandler = Just $ sendMessage . LSP.ReqHover
, LSP.didOpenTextDocumentNotificationHandler = Just $ sendMessage . LSP.NotDidOpenTextDocument
, LSP.didSaveTextDocumentNotificationHandler = Just $ sendMessage . LSP.NotDidSaveTextDocument
, LSP.didChangeTextDocumentNotificationHandler = Just $ sendMessage . LSP.NotDidChangeTextDocument
, LSP.didCloseTextDocumentNotificationHandler = Just $ sendMessage . LSP.NotDidCloseTextDocument
}
options :: LSP.Options
options = def
{ Language.Haskell.LSP.Core.textDocumentSync = Just $ LSP.TextDocumentSyncOptions
{ LSP._openClose = Just True
, LSP._change = Just LSP.TdSyncIncremental
, LSP._willSave = Just False
, LSP._willSaveWaitUntil = Just False
, LSP._save = Just $ LSP.SaveOptions $ Just False
}
}
messagePump :: LSP.LspFuncs () -> IO LSP.FromClientMessage -> IO ()
messagePump lf receiveMessage = do
state <- Driver.initialState
forever $ do
message <- receiveMessage
case message of
LSP.NotInitialized _ ->
return ()
LSP.NotDidOpenTextDocument notification -> do
sendNotification lf "messagePump: processing NotDidOpenTextDocument"
let
document = notification ^. LSP.params . LSP.textDocument . LSP.uri
version = notification ^. LSP.params . LSP.textDocument . LSP.version
fileName = LSP.uriToFilePath document
sendNotification lf $ "fileName = " <> show fileName
sendDiagnostics lf state document $ Just version
LSP.NotDidChangeTextDocument notification -> do
let
document = notification ^. LSP.params . LSP.textDocument . LSP.uri
version = notification ^. LSP.params . LSP.textDocument . LSP.version
sendNotification lf $ "messagePump:processing NotDidChangeTextDocument: uri=" <> show document
sendDiagnostics lf state document version
LSP.NotDidSaveTextDocument notification -> do
sendNotification lf "messagePump: processing NotDidSaveTextDocument"
let
document = notification ^. LSP.params . LSP.textDocument . LSP.uri
fileName = LSP.uriToFilePath document
sendNotification lf $ "fileName = " <> show fileName
sendDiagnostics lf state document Nothing
LSP.ReqHover req -> do
sendNotification lf $ "messagePump: HoverRequest: " <> show req
let
LSP.TextDocumentPositionParams
(LSP.TextDocumentIdentifier document)
pos@(LSP.Position line char)
= req ^. LSP.params
sendNotification lf $ shower document
sendNotification lf $ shower pos
(_version, contents) <- fileContents lf document
let
LSP.Uri uriText = document
uriStr = Text.unpack uriText
((types, typeOfErrs), _) <- Driver.incrementallyExecuteVirtualFile state uriStr contents $ do
defs <- fetch CheckAll
runHover $ do
(span, expr) <- hoverDefs (Hover.inside line char)
[ (n, loc, d, t)
| (n, (loc, d, t)) <- concatMap HashMap.toList defs
]
typ <- typeOf' voidArgs expr
ctx <- Context.getContext
return (span, ctx, expr, typ)
sendNotification lf $ "result " <> shower typeOfErrs
let
response = case types of
[] -> Nothing
_ -> do
let Just (_, (range, ctx, expr, typ)) = unsnoc types
Just $ LSP.Hover
{ LSP._contents =
LSP.List
[ LSP.PlainString
$ showWide
$ pretty (traverse Context.prettyVar expr ctx)
<> " : "
<> pretty (traverse Context.prettyVar typ ctx)
]
, LSP._range = Just
$ LSP.Range
{ LSP._start = LSP.Position
(visualRow $ spanStart range)
(visualColumn $ spanStart range)
, LSP._end = LSP.Position
(visualRow $ spanEnd range)
(visualColumn $ spanEnd range)
}
}
LSP.sendFunc lf $ LSP.RspHover $ LSP.makeResponseMessage req response
_ ->
return ()
sendDiagnostics
:: LSP.LspFuncs ()
-> DriverState
-> LSP.Uri
-> LSP.TextDocumentVersion
-> IO ()
sendDiagnostics lf state document version = do
(currentVersion, contents) <- fileContents lf document
case (version, currentVersion) of
(Just v, Just cv)
| v < cv ->
return ()
_ -> do
let
LSP.Uri uriText = document
uriStr = Text.unpack uriText
(_, errors) <- Driver.incrementallyExecuteVirtualFile state uriStr contents $ fetch CheckAll
LSP.publishDiagnosticsFunc lf (length errors) document version
$ LSP.partitionBySource $ uncurry errorToDiagnostic <$> errors
sendNotification :: LSP.LspFuncs () -> Text -> IO ()
sendNotification lf s = LSP.sendFunc lf
$ LSP.NotLogMessage
$ LSP.NotificationMessage "2.0" LSP.WindowLogMessage
$ LSP.LogMessageParams LSP.MtInfo s
diagnosticSource :: LSP.DiagnosticSource
diagnosticSource = "sixten"
sendError
:: LSP.LspFuncs ()
-> LSP.Uri
-> LSP.TextDocumentVersion
-> Error
-> Maybe AbsoluteSourceLoc
-> IO ()
sendError lf uri version e loc =
LSP.publishDiagnosticsFunc lf 10 uri version $
Map.singleton (Just diagnosticSource) [errorToDiagnostic e loc]
errorToDiagnostic :: Error -> Maybe AbsoluteSourceLoc -> LSP.Diagnostic
errorToDiagnostic err loc = LSP.Diagnostic
{ _range = maybe
(LSP.Range (LSP.Position 0 0) (LSP.Position 0 0))
(spanToRange . absoluteSpan)
loc
, _severity = Just LSP.DsError
, _code = Nothing
, _source = Just diagnosticSource
, _message = showWide $ errorSummary err <> "\n" <> errorFootnote err
, _relatedInformation = Nothing
}
spanToRange :: Span -> LSP.Range
spanToRange span = LSP.Range
{ _start = positionToPosition $ spanStart span
, _end = positionToPosition $ spanEnd span
}
positionToPosition :: Position -> LSP.Position
positionToPosition pos = LSP.Position
{ _line = visualRow pos
, _character = visualColumn pos
}
fileContents :: LSP.LspFuncs () -> LSP.Uri -> IO (LSP.TextDocumentVersion, Text)
fileContents lf uri = do
mvf <- LSP.getVirtualFileFunc lf uri
case mvf of
Just (LSP.VirtualFile version rope) -> return (Just version, Yi.toText rope)
Nothing ->
case LSP.uriToFilePath uri of
Just fp ->
(,) Nothing <$> Text.readFile fp
Nothing ->
return (Just 0, "")
|
8219ae34e25ce47bd01792d4ed87754fc0ab15f8f4a723ff5fd63d16aeb074d9 | senapk/funcional_arcade | Main.hs | fn [] = True
fn [x] = True
fn (y:x:xs) = (y + 1 == x) && fn(x:xs)
main = do
vet <- readLn :: IO [Int]
print $ fn vet | null | https://raw.githubusercontent.com/senapk/funcional_arcade/70fa04b4799d5a8c7e5add39d9f217f38f418600/drafts/sucessores/Main.hs | haskell | fn [] = True
fn [x] = True
fn (y:x:xs) = (y + 1 == x) && fn(x:xs)
main = do
vet <- readLn :: IO [Int]
print $ fn vet |
|
374095d22a123bdf9da2c07165fecfada77188855c7ddc635eaa5341db541e73 | ssor/erlangDemos | hi_server.erl | -module(hi_server).
-behaviour(gen_web_server).
%% API
-export([start_link/1, start_link/2]).
%% gen_web_server callbacks
-export([init/1, get/3, delete/3, put/4, post/4,
head/3, options/4, trace/4, other_methods/4]).
%%%===================================================================
%%% API
start_link(Port) ->
gen_web_server:start_link(?MODULE, Port, []).
start_link(IP, Port) ->
gen_web_server:start_link(?MODULE, IP, Port, []).
%%%===================================================================
%%% gen_web_server callbacks
init([]) ->
{ok, []}.
get({http_request, 'GET', {abs_path, <<"/",Key/bytes>>}, _},
_Head, _UserData) ->
case simple_cache:lookup(Key) of
{ok, Value} ->
gen_web_server:http_reply(200, [], Value);
{error, not_found} ->
gen_web_server:http_reply(404, "Sorry, no such key.")
end.
delete({http_request, 'DELETE', {abs_path, <<"/",Key/bytes>>}, _},
_Head, _UserData) ->
simple_cache:delete(Key),
gen_web_server:http_reply(200).
put({http_request, 'PUT', {abs_path, <<"/",Key/bytes>>}, _},
_Head, Body, _UserData) ->
simple_cache:insert(Key, Body),
gen_web_server:http_reply(200).
post(_Request, _Head, _Body, _UserData) ->
gen_web_server:http_reply(501).
head(_Request, _Head, _UserData) ->
gen_web_server:http_reply(501).
options(_Request, _Head, _Body, _UserData) ->
gen_web_server:http_reply(501).
trace(_Request, _Head, _Body, _UserData) ->
gen_web_server:http_reply(501).
other_methods(_Request, _Head, _Body, _UserData) ->
gen_web_server:http_reply(501).
| null | https://raw.githubusercontent.com/ssor/erlangDemos/632cd905be2c4f275f1c1ae15238e711d7bb9147/erlware-Erlang-and-OTP-in-Action-Source/chapter_11/http_interface/src/hi_server.erl | erlang | API
gen_web_server callbacks
===================================================================
API
===================================================================
gen_web_server callbacks | -module(hi_server).
-behaviour(gen_web_server).
-export([start_link/1, start_link/2]).
-export([init/1, get/3, delete/3, put/4, post/4,
head/3, options/4, trace/4, other_methods/4]).
start_link(Port) ->
gen_web_server:start_link(?MODULE, Port, []).
start_link(IP, Port) ->
gen_web_server:start_link(?MODULE, IP, Port, []).
init([]) ->
{ok, []}.
get({http_request, 'GET', {abs_path, <<"/",Key/bytes>>}, _},
_Head, _UserData) ->
case simple_cache:lookup(Key) of
{ok, Value} ->
gen_web_server:http_reply(200, [], Value);
{error, not_found} ->
gen_web_server:http_reply(404, "Sorry, no such key.")
end.
delete({http_request, 'DELETE', {abs_path, <<"/",Key/bytes>>}, _},
_Head, _UserData) ->
simple_cache:delete(Key),
gen_web_server:http_reply(200).
put({http_request, 'PUT', {abs_path, <<"/",Key/bytes>>}, _},
_Head, Body, _UserData) ->
simple_cache:insert(Key, Body),
gen_web_server:http_reply(200).
post(_Request, _Head, _Body, _UserData) ->
gen_web_server:http_reply(501).
head(_Request, _Head, _UserData) ->
gen_web_server:http_reply(501).
options(_Request, _Head, _Body, _UserData) ->
gen_web_server:http_reply(501).
trace(_Request, _Head, _Body, _UserData) ->
gen_web_server:http_reply(501).
other_methods(_Request, _Head, _Body, _UserData) ->
gen_web_server:http_reply(501).
|
ffa31355d318caf954153294d47e2f6b4302b6fdc66ebe8a3faa000d32f19a6f | sondresl/AdventOfCode | Day24.hs | module Day24 where
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Lib
import Linear (V3 (..))
import Text.ParserCombinators.Parsec
type Hex = V3 Integer
data Dir
= NE
| E
| SE
| SW
| W
| NW
deriving (Show, Eq, Ord, Enum, Bounded)
-- -do-i-represent-a-hextile-hex-grid-in-memory
move :: Dir -> Hex -> Hex
move NE = (+ V3 1 0 (-1))
move E = (+ V3 1 (-1) 0)
move SE = (+ V3 0 (-1) 1)
move SW = (+ V3 (-1) 0 1)
move W = (+ V3 (-1) 1 0)
move NW = (+ V3 0 1 (-1))
flipHexes :: [[Dir]] -> Set Hex
flipHexes = foldl f Set.empty
where
locatePoint = foldl (flip move) (V3 0 0 0)
f seen (locatePoint -> p) =
if Set.member p seen
then Set.delete p seen
else Set.insert p seen
step :: Set Hex -> Set Hex
step input = stayBlack <> toBlack
where
cellCount = Map.unionsWith (+) . map (freqs . explode) $ Set.toList input
explode p = map (`move` p) [NE ..]
stayBlack =
Map.keysSet
. Map.filter (`elem` [1, 2])
$ Map.restrictKeys cellCount input
toBlack =
Map.keysSet
. Map.filter (== 2)
$ Map.withoutKeys cellCount input
main :: IO ()
main = do
input <- parseInput <$> readFile "../data/day24.in"
print . Set.size . flipHexes $ input
print . Set.size . (!! 100) . iterate step . flipHexes $ input
-- Parsing
parseInput :: String -> [[Dir]]
parseInput = either (error . show) id . traverse (parse dirs "") . lines
where
dirs =
many1
( choice
[ NE <$ try (string "ne")
, NW <$ try (string "nw")
, SE <$ try (string "se")
, SW <$ try (string "sw")
, E <$ string "e"
, W <$ string "w"
]
)
| null | https://raw.githubusercontent.com/sondresl/AdventOfCode/51525441795417f31b3eb67a690aa5534d1e699b/2020/Haskell/src/Day24.hs | haskell | -do-i-represent-a-hextile-hex-grid-in-memory
Parsing | module Day24 where
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Lib
import Linear (V3 (..))
import Text.ParserCombinators.Parsec
type Hex = V3 Integer
data Dir
= NE
| E
| SE
| SW
| W
| NW
deriving (Show, Eq, Ord, Enum, Bounded)
move :: Dir -> Hex -> Hex
move NE = (+ V3 1 0 (-1))
move E = (+ V3 1 (-1) 0)
move SE = (+ V3 0 (-1) 1)
move SW = (+ V3 (-1) 0 1)
move W = (+ V3 (-1) 1 0)
move NW = (+ V3 0 1 (-1))
flipHexes :: [[Dir]] -> Set Hex
flipHexes = foldl f Set.empty
where
locatePoint = foldl (flip move) (V3 0 0 0)
f seen (locatePoint -> p) =
if Set.member p seen
then Set.delete p seen
else Set.insert p seen
step :: Set Hex -> Set Hex
step input = stayBlack <> toBlack
where
cellCount = Map.unionsWith (+) . map (freqs . explode) $ Set.toList input
explode p = map (`move` p) [NE ..]
stayBlack =
Map.keysSet
. Map.filter (`elem` [1, 2])
$ Map.restrictKeys cellCount input
toBlack =
Map.keysSet
. Map.filter (== 2)
$ Map.withoutKeys cellCount input
main :: IO ()
main = do
input <- parseInput <$> readFile "../data/day24.in"
print . Set.size . flipHexes $ input
print . Set.size . (!! 100) . iterate step . flipHexes $ input
parseInput :: String -> [[Dir]]
parseInput = either (error . show) id . traverse (parse dirs "") . lines
where
dirs =
many1
( choice
[ NE <$ try (string "ne")
, NW <$ try (string "nw")
, SE <$ try (string "se")
, SW <$ try (string "sw")
, E <$ string "e"
, W <$ string "w"
]
)
|
b17ed618abe66ad31da5af5320610f2e3955555067f6212a7b4ffe5614b506f8 | artyom-poptsov/guile-ics | guix.scm | guix.scm --- GNU Guix package recipe -*- coding : utf-8 -*-
;;
Copyright ( C ) 2022 < >
;;
This file is part of Guile - ICS .
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; The program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with the program. If not, see </>.
;;; Commentary:
;;
;; GNU Guix development package. To use as the basis for a development
;; environment, run:
;;
;; guix environment --pure --container -l guix.scm
;;
;; In the new shell, run:
;;
;; autoreconf -vif && ./configure && make check
;;
;;; Code:
(use-modules (guix gexp)
(guix packages)
((guix licenses)
#:prefix license:)
(guix git-download)
(guix build-system gnu)
(gnu packages autotools)
(gnu packages guile)
(gnu packages bash)
(gnu packages base)
(gnu packages gettext)
(gnu packages admin)
(gnu packages guile-xyz)
(gnu packages pkg-config)
(gnu packages tex)
(gnu packages texinfo)
(gnu packages man))
(define %source-dir (dirname (current-filename)))
(define-public guile-ics
(package
(name "guile-ics")
(version "git")
(source (local-file %source-dir
#:recursive? #t
#:select? (git-predicate %source-dir)))
(build-system gnu-build-system)
(native-inputs
(list autoconf
automake
help2man
texinfo
Gettext brings ' AC_LIB_LINKFLAGS_FROM_LIBS ' .
pkg-config
guile-dsv
texlive))
(inputs (list guile-3.0 which))
(propagated-inputs (list guile-lib guile-smc))
(home-page "-poptsov/guile-ics")
(synopsis "Guile parser library for the iCalendar format")
(description
"Guile-ICS is an iCalendar (RFC5545) format parser library written in
pure Scheme. The library can be used to read and write iCalendar data.
The library is shipped with documentation in Info format and usage examples.")
(license license:gpl3+)))
;; Return the package.
guile-ics
;;; guix.scm ends here.
| null | https://raw.githubusercontent.com/artyom-poptsov/guile-ics/0e6486244840a459355b017521f8b10f6e4ff118/guix.scm | scheme |
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
The program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with the program. If not, see </>.
Commentary:
GNU Guix development package. To use as the basis for a development
environment, run:
guix environment --pure --container -l guix.scm
In the new shell, run:
autoreconf -vif && ./configure && make check
Code:
Return the package.
guix.scm ends here. | guix.scm --- GNU Guix package recipe -*- coding : utf-8 -*-
Copyright ( C ) 2022 < >
This file is part of Guile - ICS .
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(use-modules (guix gexp)
(guix packages)
((guix licenses)
#:prefix license:)
(guix git-download)
(guix build-system gnu)
(gnu packages autotools)
(gnu packages guile)
(gnu packages bash)
(gnu packages base)
(gnu packages gettext)
(gnu packages admin)
(gnu packages guile-xyz)
(gnu packages pkg-config)
(gnu packages tex)
(gnu packages texinfo)
(gnu packages man))
(define %source-dir (dirname (current-filename)))
(define-public guile-ics
(package
(name "guile-ics")
(version "git")
(source (local-file %source-dir
#:recursive? #t
#:select? (git-predicate %source-dir)))
(build-system gnu-build-system)
(native-inputs
(list autoconf
automake
help2man
texinfo
Gettext brings ' AC_LIB_LINKFLAGS_FROM_LIBS ' .
pkg-config
guile-dsv
texlive))
(inputs (list guile-3.0 which))
(propagated-inputs (list guile-lib guile-smc))
(home-page "-poptsov/guile-ics")
(synopsis "Guile parser library for the iCalendar format")
(description
"Guile-ICS is an iCalendar (RFC5545) format parser library written in
pure Scheme. The library can be used to read and write iCalendar data.
The library is shipped with documentation in Info format and usage examples.")
(license license:gpl3+)))
guile-ics
|
94ed6b56cd790f0e1c125f094c5251431de618bceb6fb583b394f08db6cde08c | mbutterick/beautiful-racket | sample-import.rkt | #lang basic-demo-3
10 import [math/number-theory]
20 print [nth-prime](15)
30 print [prime?](24)
40 import [racket/base]
50 print [max](f(1), f(2), f(5), f(4))
60 def f(x) = x + x | null | https://raw.githubusercontent.com/mbutterick/beautiful-racket/f0e2cb5b325733b3f9cbd554cc7d2bb236af9ee9/beautiful-racket-demo/basic-demo-3/sample-import.rkt | racket | #lang basic-demo-3
10 import [math/number-theory]
20 print [nth-prime](15)
30 print [prime?](24)
40 import [racket/base]
50 print [max](f(1), f(2), f(5), f(4))
60 def f(x) = x + x |
|
43489f8d692bf9ebae34dcb5b1d361493cdd9afc2d658ad90df3c67b0db9e6cd | acw/eve | BlueprintInfo.hs | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE PatternGuards #
# LANGUAGE RecordWildCards #
module EVE.Static.Database.BlueprintInfo(
BlueprintDatabase
, BlueprintInfo
)
where
import qualified Data.HashMap.Strict as HM
import Data.Map.Strict(Map)
import qualified Data.Map as M
import Data.Scientific( toRealFloat)
import Data.Text(Text, unpack)
import Data.Yaml(FromJSON(..), Value(..), Parser, (.:))
import EVE.Static.Database.Class(EVEDatabase(..))
import EVE.Static.Database.TypeIds(TypeId)
import EVE.Static.Database.Helpers(fromArray)
import Text.Read(readMaybe)
newtype BlueprintDatabase = BPDB {_blueprintMap :: Map TypeId BlueprintInfo}
deriving (Read, Show)
instance FromJSON BlueprintDatabase where
parseJSON json =
case json of
Object o -> BPDB `fmap` HM.foldrWithKey addBlueprintInfo (return M.empty) o
_ -> fail "Unexpected top-level blueprint list."
where
addBlueprintInfo key value prev =
case readMaybe (unpack key) of
Just bpId ->
do cur <- parseJSON value
(M.insert (fromIntegral (bpId :: Word)) cur) `fmap` prev
Nothing ->
fail "Failed to parse blueprint id (key)."
instance EVEDatabase TypeId BlueprintInfo BlueprintDatabase where
dbRecordCount (BPDB db) = fromIntegral (M.size db)
dbLookup k (BPDB db) = M.lookup k db
dbKeys (BPDB db) = M.keys db
data BlueprintInfo = BlueprintInfo {
_blueprintId :: TypeId
, _blueprintMaxProduction :: Word
, _blueprintActivities :: [BlueprintActivity]
}
deriving (Read, Show)
instance FromJSON BlueprintInfo where
parseJSON obj =
case obj of
Object o ->
do _blueprintId <- o .: "blueprintTypeID"
_blueprintMaxProduction <- o .: "maxProductionLimit"
case HM.lookup "activities" o of
Just (Object acts) ->
do _blueprintActivities <- mapM toActivity (HM.toList acts)
return BlueprintInfo{ .. }
Just _ ->
fail "Non-object activities field?"
Nothing ->
fail "Couldn't find blueprint activities"
_ -> fail "Non-object blueprint info?"
data BlueprintActivity = ActivityCopy CopyActivity
| ActivityInvention InventionActivity
| ActivityManufacture ManufactureActivity
| ActivityMaterialResearch MaterialResearchActivity
| ActivityTimeResearch TimeResearchActivity
deriving (Read, Show)
toActivity :: (Text, Value) -> Parser BlueprintActivity
toActivity ("copying", v) = ActivityCopy <$> parseJSON v
toActivity ("invention", v) = ActivityInvention <$> parseJSON v
toActivity ("manufacturing", v) = ActivityManufacture <$> parseJSON v
toActivity ("research_material", v) = ActivityMaterialResearch <$> parseJSON v
toActivity ("research_time", v) = ActivityTimeResearch <$> parseJSON v
toActivity (name, _) =
fail ("Unknown blueprint activity: " ++ unpack name)
data CopyActivity = CopyActivity {
_copyTime :: Word
}
deriving (Read, Show)
instance FromJSON CopyActivity where
parseJSON obj =
case obj of
Object o -> CopyActivity `fmap` (o .: "time")
_ -> fail "Non-object copy activity."
data InventionActivity = InventionActivity {
_inventionMaterials :: [(Word, TypeId)]
, _inventionProducts :: [(Double, Word, TypeId)]
, _inventionSkills :: [(Word, TypeId)]
, _inventionTime :: Word
}
deriving (Read, Show)
instance FromJSON InventionActivity where
parseJSON obj =
case obj of
Object o ->
do _inventionMaterials <- fromArray "materials" o parseMaterial
_inventionProducts <- fromArray "products" o parseIProduct
_inventionSkills <- fromArray "skills" o parseSkill
_inventionTime <- o .: "time"
return InventionActivity{ .. }
_ ->
fail "Non-object invention activity?"
parseMaterial :: Value -> Parser (Word, TypeId)
parseMaterial obj =
case obj of
Object o -> do quant <- o .: "quantity"
typeId <- o .: "typeID"
return (quant, typeId)
_ -> fail "Couldn't parse material (it's not an object?)"
parseIProduct :: Value -> Parser (Double, Word, TypeId)
parseIProduct obj =
case obj of
Object o -> do (quant, typeId) <- parseMaterial obj
case HM.lookup "probability" o of
Just (Number x) ->
return (toRealFloat x, quant, typeId)
Just _ ->
fail "Non-numeric probability?"
Nothing ->
return (1.0, quant, typeId)
_ -> fail "Non-object invention product."
parseSkill :: Value -> Parser (Word, TypeId)
parseSkill obj =
case obj of
Object o -> do level <- o .: "level"
typeId <- o .: "typeID"
return (level, typeId)
_ -> fail "Couldn't parse skill (it's not an object?)"
data ManufactureActivity = ManufactureActivity {
_manufactureMaterials :: [(Word, TypeId)]
, _manufactureProducts :: [(Word, TypeId)]
, _manufactureSkills :: [(Word, TypeId)]
, _manufactureTime :: Word
}
deriving (Read, Show)
instance FromJSON ManufactureActivity where
parseJSON obj =
case obj of
Object o ->
do _manufactureMaterials <- fromArray "materials" o parseMaterial
_manufactureProducts <- fromArray "products" o parseMaterial
_manufactureSkills <- fromArray "skills" o parseSkill
_manufactureTime <- o .: "time"
return ManufactureActivity{ .. }
_ ->
fail "Non-object manufacture activity."
data MaterialResearchActivity = MaterialResearchActivity {
_mresearchTime :: Word
}
deriving (Read, Show)
instance FromJSON MaterialResearchActivity where
parseJSON obj =
case obj of
Object o -> MaterialResearchActivity `fmap` (o .: "time")
_ -> fail "Non-object material research activity."
data TimeResearchActivity = TimeResearchActivity {
_tresearchTime :: Word
}
deriving (Read, Show)
instance FromJSON TimeResearchActivity where
parseJSON obj =
case obj of
Object o -> TimeResearchActivity `fmap` (o .: "time")
_ -> fail "Non-object time research activity"
| null | https://raw.githubusercontent.com/acw/eve/95c3a158e181e9708c2f3b5d998512bbc1b06e57/src/EVE/Static/Database/BlueprintInfo.hs | haskell | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE PatternGuards #
# LANGUAGE RecordWildCards #
module EVE.Static.Database.BlueprintInfo(
BlueprintDatabase
, BlueprintInfo
)
where
import qualified Data.HashMap.Strict as HM
import Data.Map.Strict(Map)
import qualified Data.Map as M
import Data.Scientific( toRealFloat)
import Data.Text(Text, unpack)
import Data.Yaml(FromJSON(..), Value(..), Parser, (.:))
import EVE.Static.Database.Class(EVEDatabase(..))
import EVE.Static.Database.TypeIds(TypeId)
import EVE.Static.Database.Helpers(fromArray)
import Text.Read(readMaybe)
newtype BlueprintDatabase = BPDB {_blueprintMap :: Map TypeId BlueprintInfo}
deriving (Read, Show)
instance FromJSON BlueprintDatabase where
parseJSON json =
case json of
Object o -> BPDB `fmap` HM.foldrWithKey addBlueprintInfo (return M.empty) o
_ -> fail "Unexpected top-level blueprint list."
where
addBlueprintInfo key value prev =
case readMaybe (unpack key) of
Just bpId ->
do cur <- parseJSON value
(M.insert (fromIntegral (bpId :: Word)) cur) `fmap` prev
Nothing ->
fail "Failed to parse blueprint id (key)."
instance EVEDatabase TypeId BlueprintInfo BlueprintDatabase where
dbRecordCount (BPDB db) = fromIntegral (M.size db)
dbLookup k (BPDB db) = M.lookup k db
dbKeys (BPDB db) = M.keys db
data BlueprintInfo = BlueprintInfo {
_blueprintId :: TypeId
, _blueprintMaxProduction :: Word
, _blueprintActivities :: [BlueprintActivity]
}
deriving (Read, Show)
instance FromJSON BlueprintInfo where
parseJSON obj =
case obj of
Object o ->
do _blueprintId <- o .: "blueprintTypeID"
_blueprintMaxProduction <- o .: "maxProductionLimit"
case HM.lookup "activities" o of
Just (Object acts) ->
do _blueprintActivities <- mapM toActivity (HM.toList acts)
return BlueprintInfo{ .. }
Just _ ->
fail "Non-object activities field?"
Nothing ->
fail "Couldn't find blueprint activities"
_ -> fail "Non-object blueprint info?"
data BlueprintActivity = ActivityCopy CopyActivity
| ActivityInvention InventionActivity
| ActivityManufacture ManufactureActivity
| ActivityMaterialResearch MaterialResearchActivity
| ActivityTimeResearch TimeResearchActivity
deriving (Read, Show)
toActivity :: (Text, Value) -> Parser BlueprintActivity
toActivity ("copying", v) = ActivityCopy <$> parseJSON v
toActivity ("invention", v) = ActivityInvention <$> parseJSON v
toActivity ("manufacturing", v) = ActivityManufacture <$> parseJSON v
toActivity ("research_material", v) = ActivityMaterialResearch <$> parseJSON v
toActivity ("research_time", v) = ActivityTimeResearch <$> parseJSON v
toActivity (name, _) =
fail ("Unknown blueprint activity: " ++ unpack name)
data CopyActivity = CopyActivity {
_copyTime :: Word
}
deriving (Read, Show)
instance FromJSON CopyActivity where
parseJSON obj =
case obj of
Object o -> CopyActivity `fmap` (o .: "time")
_ -> fail "Non-object copy activity."
data InventionActivity = InventionActivity {
_inventionMaterials :: [(Word, TypeId)]
, _inventionProducts :: [(Double, Word, TypeId)]
, _inventionSkills :: [(Word, TypeId)]
, _inventionTime :: Word
}
deriving (Read, Show)
instance FromJSON InventionActivity where
parseJSON obj =
case obj of
Object o ->
do _inventionMaterials <- fromArray "materials" o parseMaterial
_inventionProducts <- fromArray "products" o parseIProduct
_inventionSkills <- fromArray "skills" o parseSkill
_inventionTime <- o .: "time"
return InventionActivity{ .. }
_ ->
fail "Non-object invention activity?"
parseMaterial :: Value -> Parser (Word, TypeId)
parseMaterial obj =
case obj of
Object o -> do quant <- o .: "quantity"
typeId <- o .: "typeID"
return (quant, typeId)
_ -> fail "Couldn't parse material (it's not an object?)"
parseIProduct :: Value -> Parser (Double, Word, TypeId)
parseIProduct obj =
case obj of
Object o -> do (quant, typeId) <- parseMaterial obj
case HM.lookup "probability" o of
Just (Number x) ->
return (toRealFloat x, quant, typeId)
Just _ ->
fail "Non-numeric probability?"
Nothing ->
return (1.0, quant, typeId)
_ -> fail "Non-object invention product."
parseSkill :: Value -> Parser (Word, TypeId)
parseSkill obj =
case obj of
Object o -> do level <- o .: "level"
typeId <- o .: "typeID"
return (level, typeId)
_ -> fail "Couldn't parse skill (it's not an object?)"
data ManufactureActivity = ManufactureActivity {
_manufactureMaterials :: [(Word, TypeId)]
, _manufactureProducts :: [(Word, TypeId)]
, _manufactureSkills :: [(Word, TypeId)]
, _manufactureTime :: Word
}
deriving (Read, Show)
instance FromJSON ManufactureActivity where
parseJSON obj =
case obj of
Object o ->
do _manufactureMaterials <- fromArray "materials" o parseMaterial
_manufactureProducts <- fromArray "products" o parseMaterial
_manufactureSkills <- fromArray "skills" o parseSkill
_manufactureTime <- o .: "time"
return ManufactureActivity{ .. }
_ ->
fail "Non-object manufacture activity."
data MaterialResearchActivity = MaterialResearchActivity {
_mresearchTime :: Word
}
deriving (Read, Show)
instance FromJSON MaterialResearchActivity where
parseJSON obj =
case obj of
Object o -> MaterialResearchActivity `fmap` (o .: "time")
_ -> fail "Non-object material research activity."
data TimeResearchActivity = TimeResearchActivity {
_tresearchTime :: Word
}
deriving (Read, Show)
instance FromJSON TimeResearchActivity where
parseJSON obj =
case obj of
Object o -> TimeResearchActivity `fmap` (o .: "time")
_ -> fail "Non-object time research activity"
|
|
a5c672590745ef38fc0a358dcbb238a288748ecf9c8c0172b962dbdb07d5f303 | UU-ComputerScience/uhc | ClassRec1.hs | {- ----------------------------------------------------------------------------------------
what : Recursive use of class in constraint
expected: ok
---------------------------------------------------------------------------------------- -}
module ClassRec1 where
class Data a where
gfoldl :: (forall d. Data d => d) -> a
main
= return ()
| null | https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/test/regress/99/ClassRec1.hs | haskell | ----------------------------------------------------------------------------------------
what : Recursive use of class in constraint
expected: ok
---------------------------------------------------------------------------------------- |
module ClassRec1 where
class Data a where
gfoldl :: (forall d. Data d => d) -> a
main
= return ()
|
7ab01f387c66040921770f2b4d7dd6a83063ecf0c74f482b0c9571e7587e1fab | wwezhuimeng/kazoo | rebar_xref.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
%% ex: ts=4 sw=4 et
%% -------------------------------------------------------------------
%%
rebar : Erlang Build Tools
%%
Copyright ( c ) 2009 ( )
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
%%
%% -------------------------------------------------------------------
%% -------------------------------------------------------------------
%% This module borrows heavily from project as
written by < > ,
%% <> and others.
%% -------------------------------------------------------------------
-module(rebar_xref).
-include("rebar.hrl").
-export([xref/2]).
%% ===================================================================
%% Public API
%% ===================================================================
xref(Config, _) ->
%% Spin up xref
{ok, _} = xref:start(xref),
ok = xref:set_library_path(xref, code_path(Config)),
xref:set_default(xref, [{warnings,
rebar_config:get(Config, xref_warnings, false)},
{verbose, rebar_config:is_verbose(Config)}]),
{ok, _} = xref:add_directory(xref, "ebin"),
%% Save the code path prior to doing anything
OrigPath = code:get_path(),
true = code:add_path(rebar_utils:ebin_dir()),
%% Get list of xref checks we want to run
XrefChecks = rebar_config:get(Config, xref_checks,
[exports_not_used,
undefined_function_calls]),
%% Look for exports that are unused by anything
ExportsNoWarn =
case lists:member(exports_not_used, XrefChecks) of
true ->
check_exports_not_used();
false ->
true
end,
%% Look for calls to undefined functions
UndefNoWarn =
case lists:member(undefined_function_calls, XrefChecks) of
true ->
check_undefined_function_calls();
false ->
true
end,
%% Run custom queries
QueryChecks = rebar_config:get(Config, xref_queries, []),
QueryNoWarn = lists:all(fun check_query/1, QueryChecks),
%% Restore the original code path
true = code:set_path(OrigPath),
%% Stop xref
stopped = xref:stop(xref),
case lists:member(false, [ExportsNoWarn, UndefNoWarn, QueryNoWarn]) of
true ->
?FAIL;
false ->
ok
end.
%% ===================================================================
Internal functions
%% ===================================================================
check_exports_not_used() ->
{ok, UnusedExports0} = xref:analyze(xref, exports_not_used),
UnusedExports = filter_away_ignored(UnusedExports0),
%% Report all the unused functions
display_mfas(UnusedExports, "is unused export (Xref)"),
UnusedExports =:= [].
check_undefined_function_calls() ->
{ok, UndefinedCalls0} = xref:analyze(xref, undefined_function_calls),
UndefinedCalls =
[{find_mfa_source(Caller), format_fa(Caller), format_mfa(Target)}
|| {Caller, Target} <- UndefinedCalls0],
lists:foreach(
fun({{Source, Line}, FunStr, Target}) ->
?CONSOLE("~s:~w: Warning ~s calls undefined function ~s\n",
[Source, Line, FunStr, Target])
end, UndefinedCalls),
UndefinedCalls =:= [].
check_query({Query, Value}) ->
{ok, Answer} = xref:q(xref, Query),
case Answer =:= Value of
false ->
?CONSOLE("Query ~s~n answer ~p~n did not match ~p~n",
[Query, Answer, Value]),
false;
_ ->
true
end.
code_path(Config) ->
Slight hack to ensure that sub_dirs get properly included
%% in code path for xref -- otherwise one gets a lot of undefined
%% functions, even though those functions are present as part
%% of compilation. H/t to @dluna. Long term we should tie more
%% properly into the overall compile code path if possible.
BaseDir = rebar_config:get_xconf(Config, base_dir),
[P || P <- code:get_path() ++
[filename:join(BaseDir, filename:join(SubDir, "ebin"))
|| SubDir <- rebar_config:get(Config, sub_dirs, [])],
filelib:is_dir(P)].
%%
%% Ignore behaviour functions, and explicitly marked functions
%%
filter_away_ignored(UnusedExports) ->
%% Functions can be ignored by using
%% -ignore_xref([{F, A}, ...]).
%% Setup a filter function that builds a list of behaviour callbacks and/or
%% any functions marked to ignore. We then use this list to mask any
%% functions marked as unused exports by xref
F = fun(Mod) ->
Attrs = Mod:module_info(attributes),
Ignore = keyall(ignore_xref, Attrs),
Callbacks = [B:behaviour_info(callbacks)
|| B <- keyall(behaviour, Attrs)],
[{Mod, F, A} || {F, A} <- Ignore ++ lists:flatten(Callbacks)]
end,
AttrIgnore =
lists:flatten(
lists:map(F, lists:usort([M || {M, _, _} <- UnusedExports]))),
[X || X <- UnusedExports, not lists:member(X, AttrIgnore)].
keyall(Key, List) ->
lists:flatmap(fun({K, L}) when Key =:= K -> L; (_) -> [] end, List).
display_mfas([], _Message) ->
ok;
display_mfas([{_Mod, Fun, Args} = MFA | Rest], Message) ->
{Source, Line} = find_mfa_source(MFA),
?CONSOLE("~s:~w: Warning: function ~s/~w ~s\n",
[Source, Line, Fun, Args, Message]),
display_mfas(Rest, Message).
format_mfa({M, F, A}) ->
?FMT("~s:~s/~w", [M, F, A]).
format_fa({_M, F, A}) ->
?FMT("~s/~w", [F, A]).
%%
%% Extract an element from a tuple, or undefined if N > tuple size
%%
safe_element(N, Tuple) ->
case catch(element(N, Tuple)) of
{'EXIT', {badarg, _}} ->
undefined;
Value ->
Value
end.
%%
Given a MFA , find the file and LOC where it 's defined . Note that
%% xref doesn't work if there is no abstract_code, so we can avoid
%% being too paranoid here.
%%
find_mfa_source({M, F, A}) ->
{M, Bin, _} = code:get_object_code(M),
AbstractCode = beam_lib:chunks(Bin, [abstract_code]),
{ok, {M, [{abstract_code, {raw_abstract_v1, Code}}]}} = AbstractCode,
%% Extract the original source filename from the abstract code
[{attribute, 1, file, {Source, _}} | _] = Code,
%% Extract the line number for a given function def
Fn = [E || E <- Code,
safe_element(1, E) == function,
safe_element(3, E) == F,
safe_element(4, E) == A],
case Fn of
[{function, Line, F, _, _}] -> {Source, Line};
%% do not crash if functions are exported, even though they
%% are not in the source.
parameterized modules add new/1 and instance/1 for example .
[] -> {Source, function_not_found}
end.
| null | https://raw.githubusercontent.com/wwezhuimeng/kazoo/06a15811dbf123ae1601dc7a4ced956a7e04f58a/utils/rebar/src/rebar_xref.erl | erlang | ex: ts=4 sw=4 et
-------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-------------------------------------------------------------------
-------------------------------------------------------------------
This module borrows heavily from project as
<> and others.
-------------------------------------------------------------------
===================================================================
Public API
===================================================================
Spin up xref
Save the code path prior to doing anything
Get list of xref checks we want to run
Look for exports that are unused by anything
Look for calls to undefined functions
Run custom queries
Restore the original code path
Stop xref
===================================================================
===================================================================
Report all the unused functions
in code path for xref -- otherwise one gets a lot of undefined
functions, even though those functions are present as part
of compilation. H/t to @dluna. Long term we should tie more
properly into the overall compile code path if possible.
Ignore behaviour functions, and explicitly marked functions
Functions can be ignored by using
-ignore_xref([{F, A}, ...]).
Setup a filter function that builds a list of behaviour callbacks and/or
any functions marked to ignore. We then use this list to mask any
functions marked as unused exports by xref
Extract an element from a tuple, or undefined if N > tuple size
xref doesn't work if there is no abstract_code, so we can avoid
being too paranoid here.
Extract the original source filename from the abstract code
Extract the line number for a given function def
do not crash if functions are exported, even though they
are not in the source. | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
rebar : Erlang Build Tools
Copyright ( c ) 2009 ( )
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
written by < > ,
-module(rebar_xref).
-include("rebar.hrl").
-export([xref/2]).
xref(Config, _) ->
{ok, _} = xref:start(xref),
ok = xref:set_library_path(xref, code_path(Config)),
xref:set_default(xref, [{warnings,
rebar_config:get(Config, xref_warnings, false)},
{verbose, rebar_config:is_verbose(Config)}]),
{ok, _} = xref:add_directory(xref, "ebin"),
OrigPath = code:get_path(),
true = code:add_path(rebar_utils:ebin_dir()),
XrefChecks = rebar_config:get(Config, xref_checks,
[exports_not_used,
undefined_function_calls]),
ExportsNoWarn =
case lists:member(exports_not_used, XrefChecks) of
true ->
check_exports_not_used();
false ->
true
end,
UndefNoWarn =
case lists:member(undefined_function_calls, XrefChecks) of
true ->
check_undefined_function_calls();
false ->
true
end,
QueryChecks = rebar_config:get(Config, xref_queries, []),
QueryNoWarn = lists:all(fun check_query/1, QueryChecks),
true = code:set_path(OrigPath),
stopped = xref:stop(xref),
case lists:member(false, [ExportsNoWarn, UndefNoWarn, QueryNoWarn]) of
true ->
?FAIL;
false ->
ok
end.
Internal functions
check_exports_not_used() ->
{ok, UnusedExports0} = xref:analyze(xref, exports_not_used),
UnusedExports = filter_away_ignored(UnusedExports0),
display_mfas(UnusedExports, "is unused export (Xref)"),
UnusedExports =:= [].
check_undefined_function_calls() ->
{ok, UndefinedCalls0} = xref:analyze(xref, undefined_function_calls),
UndefinedCalls =
[{find_mfa_source(Caller), format_fa(Caller), format_mfa(Target)}
|| {Caller, Target} <- UndefinedCalls0],
lists:foreach(
fun({{Source, Line}, FunStr, Target}) ->
?CONSOLE("~s:~w: Warning ~s calls undefined function ~s\n",
[Source, Line, FunStr, Target])
end, UndefinedCalls),
UndefinedCalls =:= [].
check_query({Query, Value}) ->
{ok, Answer} = xref:q(xref, Query),
case Answer =:= Value of
false ->
?CONSOLE("Query ~s~n answer ~p~n did not match ~p~n",
[Query, Answer, Value]),
false;
_ ->
true
end.
code_path(Config) ->
Slight hack to ensure that sub_dirs get properly included
BaseDir = rebar_config:get_xconf(Config, base_dir),
[P || P <- code:get_path() ++
[filename:join(BaseDir, filename:join(SubDir, "ebin"))
|| SubDir <- rebar_config:get(Config, sub_dirs, [])],
filelib:is_dir(P)].
filter_away_ignored(UnusedExports) ->
F = fun(Mod) ->
Attrs = Mod:module_info(attributes),
Ignore = keyall(ignore_xref, Attrs),
Callbacks = [B:behaviour_info(callbacks)
|| B <- keyall(behaviour, Attrs)],
[{Mod, F, A} || {F, A} <- Ignore ++ lists:flatten(Callbacks)]
end,
AttrIgnore =
lists:flatten(
lists:map(F, lists:usort([M || {M, _, _} <- UnusedExports]))),
[X || X <- UnusedExports, not lists:member(X, AttrIgnore)].
keyall(Key, List) ->
lists:flatmap(fun({K, L}) when Key =:= K -> L; (_) -> [] end, List).
display_mfas([], _Message) ->
ok;
display_mfas([{_Mod, Fun, Args} = MFA | Rest], Message) ->
{Source, Line} = find_mfa_source(MFA),
?CONSOLE("~s:~w: Warning: function ~s/~w ~s\n",
[Source, Line, Fun, Args, Message]),
display_mfas(Rest, Message).
format_mfa({M, F, A}) ->
?FMT("~s:~s/~w", [M, F, A]).
format_fa({_M, F, A}) ->
?FMT("~s/~w", [F, A]).
safe_element(N, Tuple) ->
case catch(element(N, Tuple)) of
{'EXIT', {badarg, _}} ->
undefined;
Value ->
Value
end.
Given a MFA , find the file and LOC where it 's defined . Note that
find_mfa_source({M, F, A}) ->
{M, Bin, _} = code:get_object_code(M),
AbstractCode = beam_lib:chunks(Bin, [abstract_code]),
{ok, {M, [{abstract_code, {raw_abstract_v1, Code}}]}} = AbstractCode,
[{attribute, 1, file, {Source, _}} | _] = Code,
Fn = [E || E <- Code,
safe_element(1, E) == function,
safe_element(3, E) == F,
safe_element(4, E) == A],
case Fn of
[{function, Line, F, _, _}] -> {Source, Line};
parameterized modules add new/1 and instance/1 for example .
[] -> {Source, function_not_found}
end.
|
e2aa6e89166fd9257fe83ab41f1cab99652733ed04dc612762aecc47d053c7d1 | lisp-mirror/clpm | paths.lisp | Definitions for pathnames to CLPM configuration .
;;;;
This software is part of CLPM . See README.org for more information . See
;;;; LICENSE for license information.
(uiop:define-package #:clpm/config/paths
(:use #:cl)
(:export #:*clpm-config-directories*
#:clpm-config-pathname))
(in-package #:clpm/config/paths)
(defvar *clpm-config-directories* nil
"A list of directory pathnames where configurations can be found.")
(defun system-config-directories ()
"Returns the pathnames to the system-wide default config directories."
(uiop:system-config-pathnames "clpm/"))
(defun user-config-directories ()
"Returns the pathnames to the user's XDG default config directories."
(append
(when (uiop:os-windows-p)
(list (uiop:resolve-absolute-location (list (uiop:get-folder-path :local-appdata)
"clpm"
"config")
:ensure-directory t)))
(uiop:xdg-config-pathnames "clpm/")))
(defparameter *default-clpm-config-directories*
(list 'user-config-directories
'system-config-directories)
"A list of functions to call that generate the default CLPM config directory
pathnames.")
(defun compute-clpm-config-dirs ()
"Compute ~*clpm-config-directories*~ using ~*default-clpm-config-directories*~
and the CLPM_CONFIG_DIRS environment variable. The pathnames from
~*default-clpm-config-directories*~ are spliced in wherever there is an empty
directory in CLPM_CONFIG_DIRS."
(let* ((env-dirs (uiop:getenv-absolute-directories "CLPM_CONFIG_DIRS"))
(nil-cell (member nil env-dirs))
(*default-clpm-config-directories* (reduce #'append (mapcar #'funcall *default-clpm-config-directories*))))
(if env-dirs
(progn
(when nil-cell
(setf (car nil-cell)
(first *default-clpm-config-directories*))
(setf (cdr nil-cell)
(append (rest *default-clpm-config-directories*)
(cdr nil-cell))))
(setf *clpm-config-directories* env-dirs))
(setf *clpm-config-directories* *default-clpm-config-directories*))))
(defun clear-clpm-config-directories ()
"Clear the ~*clpm-config-directories*~ variable."
(setf *clpm-config-directories* nil))
(uiop:register-clear-configuration-hook 'clear-clpm-config-directories)
(uiop:register-image-restore-hook 'compute-clpm-config-dirs)
(defun clpm-config-pathname (x &key (direction :input) ensure-directory)
"Given a list of directories, optionally ending with a file name and type,
~x~ relative to an element of ~*clpm-config-directories*~, return an absolute
pathname. If ~ensure-directory~ is non-NIL, ensures the returned pathname is a
directory. If ~:direction~ is ~:input~ the pathname to an existing file is
returned. If ~:direction~ is ~:output~, ~x~ is taken to be relaitve to the first
directory in ~*clpm-config-directories*~."
(let ((files (mapcar (lambda (defaults)
(uiop:resolve-absolute-location (list* defaults x)
:ensure-directory ensure-directory))
*clpm-config-directories*)))
(uiop:find-preferred-file files :direction direction)))
| null | https://raw.githubusercontent.com/lisp-mirror/clpm/ad9a704fcdd0df5ce30ead106706ab6cc5fb3e5b/clpm/config/paths.lisp | lisp |
LICENSE for license information. | Definitions for pathnames to CLPM configuration .
This software is part of CLPM . See README.org for more information . See
(uiop:define-package #:clpm/config/paths
(:use #:cl)
(:export #:*clpm-config-directories*
#:clpm-config-pathname))
(in-package #:clpm/config/paths)
(defvar *clpm-config-directories* nil
"A list of directory pathnames where configurations can be found.")
(defun system-config-directories ()
"Returns the pathnames to the system-wide default config directories."
(uiop:system-config-pathnames "clpm/"))
(defun user-config-directories ()
"Returns the pathnames to the user's XDG default config directories."
(append
(when (uiop:os-windows-p)
(list (uiop:resolve-absolute-location (list (uiop:get-folder-path :local-appdata)
"clpm"
"config")
:ensure-directory t)))
(uiop:xdg-config-pathnames "clpm/")))
(defparameter *default-clpm-config-directories*
(list 'user-config-directories
'system-config-directories)
"A list of functions to call that generate the default CLPM config directory
pathnames.")
(defun compute-clpm-config-dirs ()
"Compute ~*clpm-config-directories*~ using ~*default-clpm-config-directories*~
and the CLPM_CONFIG_DIRS environment variable. The pathnames from
~*default-clpm-config-directories*~ are spliced in wherever there is an empty
directory in CLPM_CONFIG_DIRS."
(let* ((env-dirs (uiop:getenv-absolute-directories "CLPM_CONFIG_DIRS"))
(nil-cell (member nil env-dirs))
(*default-clpm-config-directories* (reduce #'append (mapcar #'funcall *default-clpm-config-directories*))))
(if env-dirs
(progn
(when nil-cell
(setf (car nil-cell)
(first *default-clpm-config-directories*))
(setf (cdr nil-cell)
(append (rest *default-clpm-config-directories*)
(cdr nil-cell))))
(setf *clpm-config-directories* env-dirs))
(setf *clpm-config-directories* *default-clpm-config-directories*))))
(defun clear-clpm-config-directories ()
"Clear the ~*clpm-config-directories*~ variable."
(setf *clpm-config-directories* nil))
(uiop:register-clear-configuration-hook 'clear-clpm-config-directories)
(uiop:register-image-restore-hook 'compute-clpm-config-dirs)
(defun clpm-config-pathname (x &key (direction :input) ensure-directory)
"Given a list of directories, optionally ending with a file name and type,
~x~ relative to an element of ~*clpm-config-directories*~, return an absolute
pathname. If ~ensure-directory~ is non-NIL, ensures the returned pathname is a
directory. If ~:direction~ is ~:input~ the pathname to an existing file is
returned. If ~:direction~ is ~:output~, ~x~ is taken to be relaitve to the first
directory in ~*clpm-config-directories*~."
(let ((files (mapcar (lambda (defaults)
(uiop:resolve-absolute-location (list* defaults x)
:ensure-directory ensure-directory))
*clpm-config-directories*)))
(uiop:find-preferred-file files :direction direction)))
|
7a70c7617ebd1ac21ed2df8cfde30c87f20495fe01c33f339575088ef6053782 | advancedtelematic/quickcheck-state-machine-distributed | QuickCheckHelpers.hs | module QuickCheckHelpers
( generateRequests
, shrinkRequests
, generateParallelRequests
, shrinkParallelRequests
, monadicProcess
)
where
import Control.Arrow
(second)
import Control.Distributed.Process
(Process)
import Control.Monad.State
(StateT, evalStateT, get, lift, put, runStateT)
import Data.List
(permutations)
import Test.QuickCheck
(Gen, Property, Testable, choose, ioProperty,
shrinkList, sized, suchThat)
import Test.QuickCheck.Monadic
(PropertyM, monadic)
import Utils
------------------------------------------------------------------------
generateRequestsStateT
:: (model -> Gen req)
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> StateT model Gen [req]
generateRequestsStateT generator preconditions transitions =
go =<< lift (sized (\k -> choose (0, k)))
where
go 0 = return []
go size = do
model <- get
msg <- lift (generator model `suchThat` preconditions model)
put (transitions model (Left msg))
(msg :) <$> go (size - 1)
generateRequests
:: (model -> Gen req)
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> Gen [req]
generateRequests generator preconditions transitions =
evalStateT (generateRequestsStateT generator preconditions transitions)
generateParallelRequests
:: (model -> Gen req)
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> Gen ([req], [req])
generateParallelRequests generator preconditions transitions model = do
(prefix, model') <- runStateT (generateRequestsStateT generator preconditions transitions) model
suffix <- generateParallelSafeRequests generator preconditions transitions model'
return (prefix, suffix)
generateParallelSafeRequests
:: (model -> Gen req)
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> Gen [req]
generateParallelSafeRequests generator preconditions transitions = go []
where
go reqs model = do
req <- generator model `suchThat` preconditions model
let reqs' = req : reqs
if length reqs' <= 6 && parallelSafe preconditions transitions model reqs'
then go reqs' model
else return (reverse reqs)
parallelSafe
:: (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> [req]
-> Bool
parallelSafe preconditions transitions model0
= all (preconditionsHold model0)
. permutations
where
preconditionsHold _ [] = True
preconditionsHold model (req : reqs) = preconditions model req &&
preconditionsHold (transitions model (Left req)) reqs
shrinkRequests
:: (model -> req -> [req])
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> [req] -> [[req]]
shrinkRequests shrinker preconditions transitions model0
= filter (validRequests preconditions transitions model0)
. shrinkList (shrinker model0)
validRequests :: (model -> req -> Bool) -> (model -> Either req resp -> model) -> model -> [req] -> Bool
validRequests preconditions transitions = go
where
go _ [] = True
go model (req : reqs) = preconditions model req &&
go (transitions model (Left req)) reqs
validParallelRequests
:: (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> ([req], [req])
-> Bool
validParallelRequests preconditions transitions model (prefix, suffix)
= validRequests preconditions transitions model prefix
&& parallelSafe preconditions transitions model' suffix
where
model' = foldl transitions model (map Left prefix)
shrinkParallelRequests
:: (model -> req -> [req])
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> ([req], [req]) -> [([req], [req])]
shrinkParallelRequests shrinker preconditions transitions model (prefix, suffix)
= filter (validParallelRequests preconditions transitions model)
[ (prefix', suffix')
| (prefix', suffix') <- shrinkPair (shrinkList (shrinker model)) (prefix, suffix)
]
++
moveSuffixToPrefix
where
pickOneReturnRest :: [a] -> [(a, [a])]
pickOneReturnRest [] = []
pickOneReturnRest (x : xs) = (x, xs) : map (second (x :)) (pickOneReturnRest xs)
moveSuffixToPrefix =
[ (prefix ++ [prefix'], suffix')
| (prefix', suffix') <- pickOneReturnRest suffix
]
monadicProcess :: Testable a => PropertyM Process a -> Property
monadicProcess = monadic (ioProperty . runLocalProcess)
-- | Given shrinkers for the components of a pair we can shrink the pair.
shrinkPair' :: (a -> [a]) -> (b -> [b]) -> ((a, b) -> [(a, b)])
shrinkPair' shrinkerA shrinkerB (x, y) =
[ (x', y) | x' <- shrinkerA x ] ++
[ (x, y') | y' <- shrinkerB y ]
-- | Same above, but for homogeneous pairs.
shrinkPair :: (a -> [a]) -> ((a, a) -> [(a, a)])
shrinkPair shrinker = shrinkPair' shrinker shrinker
| null | https://raw.githubusercontent.com/advancedtelematic/quickcheck-state-machine-distributed/a24e4202845dc48ed0923548c7211dd8670d5460/src/QuickCheckHelpers.hs | haskell | ----------------------------------------------------------------------
| Given shrinkers for the components of a pair we can shrink the pair.
| Same above, but for homogeneous pairs. | module QuickCheckHelpers
( generateRequests
, shrinkRequests
, generateParallelRequests
, shrinkParallelRequests
, monadicProcess
)
where
import Control.Arrow
(second)
import Control.Distributed.Process
(Process)
import Control.Monad.State
(StateT, evalStateT, get, lift, put, runStateT)
import Data.List
(permutations)
import Test.QuickCheck
(Gen, Property, Testable, choose, ioProperty,
shrinkList, sized, suchThat)
import Test.QuickCheck.Monadic
(PropertyM, monadic)
import Utils
generateRequestsStateT
:: (model -> Gen req)
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> StateT model Gen [req]
generateRequestsStateT generator preconditions transitions =
go =<< lift (sized (\k -> choose (0, k)))
where
go 0 = return []
go size = do
model <- get
msg <- lift (generator model `suchThat` preconditions model)
put (transitions model (Left msg))
(msg :) <$> go (size - 1)
generateRequests
:: (model -> Gen req)
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> Gen [req]
generateRequests generator preconditions transitions =
evalStateT (generateRequestsStateT generator preconditions transitions)
generateParallelRequests
:: (model -> Gen req)
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> Gen ([req], [req])
generateParallelRequests generator preconditions transitions model = do
(prefix, model') <- runStateT (generateRequestsStateT generator preconditions transitions) model
suffix <- generateParallelSafeRequests generator preconditions transitions model'
return (prefix, suffix)
generateParallelSafeRequests
:: (model -> Gen req)
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> Gen [req]
generateParallelSafeRequests generator preconditions transitions = go []
where
go reqs model = do
req <- generator model `suchThat` preconditions model
let reqs' = req : reqs
if length reqs' <= 6 && parallelSafe preconditions transitions model reqs'
then go reqs' model
else return (reverse reqs)
parallelSafe
:: (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> [req]
-> Bool
parallelSafe preconditions transitions model0
= all (preconditionsHold model0)
. permutations
where
preconditionsHold _ [] = True
preconditionsHold model (req : reqs) = preconditions model req &&
preconditionsHold (transitions model (Left req)) reqs
shrinkRequests
:: (model -> req -> [req])
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> [req] -> [[req]]
shrinkRequests shrinker preconditions transitions model0
= filter (validRequests preconditions transitions model0)
. shrinkList (shrinker model0)
validRequests :: (model -> req -> Bool) -> (model -> Either req resp -> model) -> model -> [req] -> Bool
validRequests preconditions transitions = go
where
go _ [] = True
go model (req : reqs) = preconditions model req &&
go (transitions model (Left req)) reqs
validParallelRequests
:: (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> ([req], [req])
-> Bool
validParallelRequests preconditions transitions model (prefix, suffix)
= validRequests preconditions transitions model prefix
&& parallelSafe preconditions transitions model' suffix
where
model' = foldl transitions model (map Left prefix)
shrinkParallelRequests
:: (model -> req -> [req])
-> (model -> req -> Bool)
-> (model -> Either req resp -> model)
-> model
-> ([req], [req]) -> [([req], [req])]
shrinkParallelRequests shrinker preconditions transitions model (prefix, suffix)
= filter (validParallelRequests preconditions transitions model)
[ (prefix', suffix')
| (prefix', suffix') <- shrinkPair (shrinkList (shrinker model)) (prefix, suffix)
]
++
moveSuffixToPrefix
where
pickOneReturnRest :: [a] -> [(a, [a])]
pickOneReturnRest [] = []
pickOneReturnRest (x : xs) = (x, xs) : map (second (x :)) (pickOneReturnRest xs)
moveSuffixToPrefix =
[ (prefix ++ [prefix'], suffix')
| (prefix', suffix') <- pickOneReturnRest suffix
]
monadicProcess :: Testable a => PropertyM Process a -> Property
monadicProcess = monadic (ioProperty . runLocalProcess)
shrinkPair' :: (a -> [a]) -> (b -> [b]) -> ((a, b) -> [(a, b)])
shrinkPair' shrinkerA shrinkerB (x, y) =
[ (x', y) | x' <- shrinkerA x ] ++
[ (x, y') | y' <- shrinkerB y ]
shrinkPair :: (a -> [a]) -> ((a, a) -> [(a, a)])
shrinkPair shrinker = shrinkPair' shrinker shrinker
|
d745ba0288240759cbbdd29c585ec482aae525e3b9ca2147fd74506af5cb0f76 | dybber/fcl | Error.hs | # OPTIONS_GHC -fno - warn - orphans #
module FCL.Error where
import Prelude hiding ((<$>))
import Text.PrettyPrint.Leijen
import FCL.Pretty ()
import FCL.Desugaring (DesugarError(..))
import FCL.Infer.Monad
import FCL.Instantiate (InstantiateError(..))
import FCL.Monomorphization (MonomorphError (..))
import FCL.External.Parser (ParseError)
import qualified FCL.IL.TypeCheck as IL (TypeError(..))
data FCLError = ParseError ParseError
| DesugarError DesugarError
| TypeError TypeError
| MonomorphError MonomorphError
| TypeErrorIL IL.TypeError
ticks :: Doc -> Doc
ticks = enclose (char '`') (char '`')
instance Pretty FCLError where
pretty (ParseError err) = pretty err
pretty (DesugarError err) = pretty err
pretty (TypeError err) = pretty err
pretty (MonomorphError err) = pretty err
pretty (TypeErrorIL err) = pretty err
instance Pretty IL.TypeError where
pretty IL.TypeMismatch = text "Internal error. Type mismatch in IL."
pretty (IL.NotInScope x) = text "Internal error. Variable" <+> ticks (text (show x)) <+> text "not in scope while type checking IL."
instance Pretty ParseError where
pretty err = text (show err)
instance Pretty DesugarError where
pretty (DesugarLevelNotInScope lv) = text "Level variable not in scope:" <+> ticks (pretty lv)
pretty (DesugarTyVarNotInScope tv) = text "Type variable not in scope:" <+> ticks (pretty tv)
pretty DesugarEmptyDo = text "Empty `do`-construct."
pretty DesugarDoFinalExpIsBind = text "Final expression in `do`-construct can not be a bind."
instance Pretty TypeError where
pretty (UnificationError ty0 ty1) =
text "Cannot unify:" <+> ticks (pretty ty0) <+> text "and" <+> ticks (pretty ty1)
pretty (UnexpectedPolymorphicVariable ident) =
text "Unexpected polymorphic variable:" <+> pretty ident
pretty (UnboundVariableError ident) =
text "Not in scope:" <+> ticks (pretty ident)
pretty (UnboundTypeVariableError ident) =
text "Not in scope: type variable" <+> ticks (pretty ident)
pretty (OccursCheckFailed tyvar ty) = text "Occurs check failed." <+> ticks (pretty tyvar) <+> text "found in" <+> ticks (pretty ty)
pretty (LevelUnificationError l1 l2) =
text "Cannot unify levels: " <+> ticks (pretty l1) <+> text "and" <+> ticks (pretty l2)
pretty (LevelOccursCheckFailed lvlvar l) = text "Occurs check failed." <+> ticks (pretty lvlvar) <+> text "found in" <+> ticks (pretty l)
pretty (NotFullyLevelApplied ident) = text "Variable" <+> ticks (pretty ident) <+> text "is not applied to right number of level parameters."
pretty (SignatureMismatch t1 t2) = text "Inferred type" <+> ticks (pretty t1) </> text "does not match signature type" <+> pretty t2
instance Pretty InstantiateError where
pretty (TypeMismatch t1 t2) = text "Type mismatch during instantiation:" <+> ticks (pretty t1)
<+> text "does not match" <+> ticks (pretty t2)
pretty (LevelMismatch l1 l2) = text "Level mismatch during instantiation:" <+> ticks (pretty l1)
<+> text "does not match" <+> ticks (pretty l2)
instance Pretty MonomorphError where
pretty (NotInScope ident) = text "Not in scope:" <+> ticks (pretty ident)
pretty (MonomorphInstantiateError err) = pretty err
pretty (UnexpectedPolyType t) = text "Unexpected polymorphic type " <+> ticks (pretty t) <+> text "found during monomorphization."
pretty (UnexpectedPolyLevel l) = text "Unexpected polymorphic level " <+> ticks (pretty l) <+> text "found during monomorphization."
| null | https://raw.githubusercontent.com/dybber/fcl/e794a4b9d3ab6207fbe89fcddaafe97ae0d379dd/src/FCL/Error.hs | haskell | # OPTIONS_GHC -fno - warn - orphans #
module FCL.Error where
import Prelude hiding ((<$>))
import Text.PrettyPrint.Leijen
import FCL.Pretty ()
import FCL.Desugaring (DesugarError(..))
import FCL.Infer.Monad
import FCL.Instantiate (InstantiateError(..))
import FCL.Monomorphization (MonomorphError (..))
import FCL.External.Parser (ParseError)
import qualified FCL.IL.TypeCheck as IL (TypeError(..))
data FCLError = ParseError ParseError
| DesugarError DesugarError
| TypeError TypeError
| MonomorphError MonomorphError
| TypeErrorIL IL.TypeError
ticks :: Doc -> Doc
ticks = enclose (char '`') (char '`')
instance Pretty FCLError where
pretty (ParseError err) = pretty err
pretty (DesugarError err) = pretty err
pretty (TypeError err) = pretty err
pretty (MonomorphError err) = pretty err
pretty (TypeErrorIL err) = pretty err
instance Pretty IL.TypeError where
pretty IL.TypeMismatch = text "Internal error. Type mismatch in IL."
pretty (IL.NotInScope x) = text "Internal error. Variable" <+> ticks (text (show x)) <+> text "not in scope while type checking IL."
instance Pretty ParseError where
pretty err = text (show err)
instance Pretty DesugarError where
pretty (DesugarLevelNotInScope lv) = text "Level variable not in scope:" <+> ticks (pretty lv)
pretty (DesugarTyVarNotInScope tv) = text "Type variable not in scope:" <+> ticks (pretty tv)
pretty DesugarEmptyDo = text "Empty `do`-construct."
pretty DesugarDoFinalExpIsBind = text "Final expression in `do`-construct can not be a bind."
instance Pretty TypeError where
pretty (UnificationError ty0 ty1) =
text "Cannot unify:" <+> ticks (pretty ty0) <+> text "and" <+> ticks (pretty ty1)
pretty (UnexpectedPolymorphicVariable ident) =
text "Unexpected polymorphic variable:" <+> pretty ident
pretty (UnboundVariableError ident) =
text "Not in scope:" <+> ticks (pretty ident)
pretty (UnboundTypeVariableError ident) =
text "Not in scope: type variable" <+> ticks (pretty ident)
pretty (OccursCheckFailed tyvar ty) = text "Occurs check failed." <+> ticks (pretty tyvar) <+> text "found in" <+> ticks (pretty ty)
pretty (LevelUnificationError l1 l2) =
text "Cannot unify levels: " <+> ticks (pretty l1) <+> text "and" <+> ticks (pretty l2)
pretty (LevelOccursCheckFailed lvlvar l) = text "Occurs check failed." <+> ticks (pretty lvlvar) <+> text "found in" <+> ticks (pretty l)
pretty (NotFullyLevelApplied ident) = text "Variable" <+> ticks (pretty ident) <+> text "is not applied to right number of level parameters."
pretty (SignatureMismatch t1 t2) = text "Inferred type" <+> ticks (pretty t1) </> text "does not match signature type" <+> pretty t2
instance Pretty InstantiateError where
pretty (TypeMismatch t1 t2) = text "Type mismatch during instantiation:" <+> ticks (pretty t1)
<+> text "does not match" <+> ticks (pretty t2)
pretty (LevelMismatch l1 l2) = text "Level mismatch during instantiation:" <+> ticks (pretty l1)
<+> text "does not match" <+> ticks (pretty l2)
instance Pretty MonomorphError where
pretty (NotInScope ident) = text "Not in scope:" <+> ticks (pretty ident)
pretty (MonomorphInstantiateError err) = pretty err
pretty (UnexpectedPolyType t) = text "Unexpected polymorphic type " <+> ticks (pretty t) <+> text "found during monomorphization."
pretty (UnexpectedPolyLevel l) = text "Unexpected polymorphic level " <+> ticks (pretty l) <+> text "found during monomorphization."
|
|
67f6d43b58837e6c3757b030f48414e00d080aaa2a6241ea08df1d51ce28cd6a | aluuu/spatial_index | rtree_intf.ml | module type Tree_S =
sig
type bb
type 'a t = Empty |
Node of (bb * 'a t) list |
Leaf of (bb * 'a) list
val empty: 'a t
val empty_node: bb * 'a t
val bounding_box_of_nodes: (bb * 'b) list -> bb
val size: 'a t -> int
val partition_by_min_delta: (bb * 'a t) list -> bb -> (bb * 'a t) * (bb * 'a t) list
val quadratic_split: (bb * 'a) list -> (bb * (bb * 'a) list) * (bb * (bb *'a) list)
val insert': ?max_nodes:(int) -> 'a t -> bb -> 'a -> (bb * 'a t) * (bb * 'a t)
val insert: ?max_nodes:(int) -> 'a t -> bb -> 'a -> 'a t
val search: 'a t -> bb -> 'a list
end
module type S =
sig
module Tree: Tree_S
type a
type t
val empty: t
val size: t -> int
val insert: t -> a -> t
val search: t -> Bounding_box.t -> a list
end
module type Rtree_params =
sig
type t
module Bounding_box: Bounding_box_intf.S
val bounding_box: t -> Bounding_box.t
val max_nodes: int
end
| null | https://raw.githubusercontent.com/aluuu/spatial_index/5515acca6dbd7f0b733d7e3272f905df3db3fa58/src/rtree_intf.ml | ocaml | module type Tree_S =
sig
type bb
type 'a t = Empty |
Node of (bb * 'a t) list |
Leaf of (bb * 'a) list
val empty: 'a t
val empty_node: bb * 'a t
val bounding_box_of_nodes: (bb * 'b) list -> bb
val size: 'a t -> int
val partition_by_min_delta: (bb * 'a t) list -> bb -> (bb * 'a t) * (bb * 'a t) list
val quadratic_split: (bb * 'a) list -> (bb * (bb * 'a) list) * (bb * (bb *'a) list)
val insert': ?max_nodes:(int) -> 'a t -> bb -> 'a -> (bb * 'a t) * (bb * 'a t)
val insert: ?max_nodes:(int) -> 'a t -> bb -> 'a -> 'a t
val search: 'a t -> bb -> 'a list
end
module type S =
sig
module Tree: Tree_S
type a
type t
val empty: t
val size: t -> int
val insert: t -> a -> t
val search: t -> Bounding_box.t -> a list
end
module type Rtree_params =
sig
type t
module Bounding_box: Bounding_box_intf.S
val bounding_box: t -> Bounding_box.t
val max_nodes: int
end
|
|
fc6e64d6efc820adabd005700e43fc32cad06d937314b2a0d26fcbbbb5dea19f | BranchTaken/Hemlock | test_get.ml | open! Basis.Rudiments
open! Basis
open String
let test () =
let strs = [
"";
"<_>";
"«»";
"‡";
"𐆗";
] in
List.iter strs ~f:(fun s ->
let rec fn i = begin
match Uns.(i = (B.length s)) with
| true -> ()
| false -> begin
File.Fmt.stdout
|> Basis.Fmt.fmt " "
|> Byte.fmt ~alt:true ~radix:Radix.Hex ~pretty:true (B.get i s)
|> ignore;
fn (Uns.succ i)
end
end in
File.Fmt.stdout
|> Basis.Fmt.fmt "s="
|> pp s
|> Basis.Fmt.fmt ":"
|> ignore;
fn 0L;
File.Fmt.stdout |> Basis.Fmt.fmt "\n" |> ignore
)
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/a07e362d66319108c1478a4cbebab765c1808b1a/bootstrap/test/basis/string/test_get.ml | ocaml | open! Basis.Rudiments
open! Basis
open String
let test () =
let strs = [
"";
"<_>";
"«»";
"‡";
"𐆗";
] in
List.iter strs ~f:(fun s ->
let rec fn i = begin
match Uns.(i = (B.length s)) with
| true -> ()
| false -> begin
File.Fmt.stdout
|> Basis.Fmt.fmt " "
|> Byte.fmt ~alt:true ~radix:Radix.Hex ~pretty:true (B.get i s)
|> ignore;
fn (Uns.succ i)
end
end in
File.Fmt.stdout
|> Basis.Fmt.fmt "s="
|> pp s
|> Basis.Fmt.fmt ":"
|> ignore;
fn 0L;
File.Fmt.stdout |> Basis.Fmt.fmt "\n" |> ignore
)
let _ = test ()
|
|
cd7fd29b2643f12f80a0ab7955a7841e3ff023c4ab9430d23a36a62234e907e0 | mattmundell/nightshade | values.lisp | ;;; The implementation of unknown-values VOPs.
(in-package "MIPS")
(define-vop (reset-stack-pointer)
(:args (ptr :scs (any-reg)))
(:generator 1
(move csp-tn ptr)))
;;; Push some values onto the stack, returning the start and number of values
pushed as results . It is assumed that the Vals are wired to the standard
;;; argument locations. Nvals is the number of values to push.
;;;
;;; The generator cost is pseudo-random. We could get it right by defining a
bogus SC that reflects the costs of the memory - to - memory moves for each
;;; operand, but this seems unworthwhile.
;;;
(define-vop (push-values)
(:args
(vals :more t))
(:results
(start :scs (any-reg))
(count :scs (any-reg)))
(:info nvals)
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (descriptor-reg)
:to (:result 0)
:target start)
start-temp)
(:generator 20
(move start-temp csp-tn)
(inst addu csp-tn csp-tn (* nvals word-bytes))
(do ((val vals (tn-ref-across val))
(i 0 (1+ i)))
((null val))
(let ((tn (tn-ref-tn val)))
(sc-case tn
(descriptor-reg
(storew tn start-temp i))
(control-stack
(load-stack-tn temp tn)
(storew temp start-temp i)))))
(move start start-temp)
(inst li count (fixnum nvals))))
;;; Push a list of values on the stack, returning Start and Count as used in
;;; unknown values continuations.
;;;
(define-vop (values-list)
(:args (arg :scs (descriptor-reg) :target list))
(:arg-types list)
(:policy :fast-safe)
(:results (start :scs (any-reg))
(count :scs (any-reg)))
(:temporary (:scs (descriptor-reg) :type list :from (:argument 0)) list)
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (non-descriptor-reg)) ndescr)
(:vop-var vop)
(:save-p :compute-only)
(:generator 0
(move list arg)
(move start csp-tn)
LOOP
(inst beq list null-tn done)
(loadw temp list cons-car-slot list-pointer-type)
(loadw list list cons-cdr-slot list-pointer-type)
(inst addu csp-tn csp-tn word-bytes)
(storew temp csp-tn -1)
(inst and ndescr list lowtag-mask)
(inst xor ndescr list-pointer-type)
(inst beq ndescr zero-tn loop)
(inst nop)
(error-call vop bogus-argument-to-values-list-error list)
DONE
(inst subu count csp-tn start)))
;;; Copy the more arg block to the top of the stack so we can use them
;;; as function arguments.
;;;
(define-vop (%more-arg-values)
(:args (context :scs (descriptor-reg any-reg) :target src)
(skip :scs (any-reg zero immediate))
(num :scs (any-reg) :target count))
(:arg-types * positive-fixnum positive-fixnum)
(:temporary (:sc any-reg :from (:argument 0)) src)
(:temporary (:sc any-reg :from (:argument 2)) dst)
(:temporary (:sc descriptor-reg :from (:argument 1)) temp)
(:results (start :scs (any-reg))
(count :scs (any-reg)))
(:generator 20
(sc-case skip
(zero
(move src context))
(immediate
(inst addu src context (* (tn-value skip) word-bytes)))
(any-reg
(inst addu src context skip)))
(move count num)
(inst beq num zero-tn done)
(inst move start csp-tn)
(inst move dst csp-tn)
(inst addu csp-tn count)
LOOP
(inst lw temp src)
(inst addu src 4)
(inst addu dst 4)
(inst bne dst csp-tn loop)
(inst sw temp dst -4)
DONE))
| null | https://raw.githubusercontent.com/mattmundell/nightshade/d8abd7bd3424b95b70bed599e0cfe033e15299e0/src/compiler/mips/values.lisp | lisp | The implementation of unknown-values VOPs.
Push some values onto the stack, returning the start and number of values
argument locations. Nvals is the number of values to push.
The generator cost is pseudo-random. We could get it right by defining a
operand, but this seems unworthwhile.
Push a list of values on the stack, returning Start and Count as used in
unknown values continuations.
Copy the more arg block to the top of the stack so we can use them
as function arguments.
|
(in-package "MIPS")
(define-vop (reset-stack-pointer)
(:args (ptr :scs (any-reg)))
(:generator 1
(move csp-tn ptr)))
pushed as results . It is assumed that the Vals are wired to the standard
bogus SC that reflects the costs of the memory - to - memory moves for each
(define-vop (push-values)
(:args
(vals :more t))
(:results
(start :scs (any-reg))
(count :scs (any-reg)))
(:info nvals)
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (descriptor-reg)
:to (:result 0)
:target start)
start-temp)
(:generator 20
(move start-temp csp-tn)
(inst addu csp-tn csp-tn (* nvals word-bytes))
(do ((val vals (tn-ref-across val))
(i 0 (1+ i)))
((null val))
(let ((tn (tn-ref-tn val)))
(sc-case tn
(descriptor-reg
(storew tn start-temp i))
(control-stack
(load-stack-tn temp tn)
(storew temp start-temp i)))))
(move start start-temp)
(inst li count (fixnum nvals))))
(define-vop (values-list)
(:args (arg :scs (descriptor-reg) :target list))
(:arg-types list)
(:policy :fast-safe)
(:results (start :scs (any-reg))
(count :scs (any-reg)))
(:temporary (:scs (descriptor-reg) :type list :from (:argument 0)) list)
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (non-descriptor-reg)) ndescr)
(:vop-var vop)
(:save-p :compute-only)
(:generator 0
(move list arg)
(move start csp-tn)
LOOP
(inst beq list null-tn done)
(loadw temp list cons-car-slot list-pointer-type)
(loadw list list cons-cdr-slot list-pointer-type)
(inst addu csp-tn csp-tn word-bytes)
(storew temp csp-tn -1)
(inst and ndescr list lowtag-mask)
(inst xor ndescr list-pointer-type)
(inst beq ndescr zero-tn loop)
(inst nop)
(error-call vop bogus-argument-to-values-list-error list)
DONE
(inst subu count csp-tn start)))
(define-vop (%more-arg-values)
(:args (context :scs (descriptor-reg any-reg) :target src)
(skip :scs (any-reg zero immediate))
(num :scs (any-reg) :target count))
(:arg-types * positive-fixnum positive-fixnum)
(:temporary (:sc any-reg :from (:argument 0)) src)
(:temporary (:sc any-reg :from (:argument 2)) dst)
(:temporary (:sc descriptor-reg :from (:argument 1)) temp)
(:results (start :scs (any-reg))
(count :scs (any-reg)))
(:generator 20
(sc-case skip
(zero
(move src context))
(immediate
(inst addu src context (* (tn-value skip) word-bytes)))
(any-reg
(inst addu src context skip)))
(move count num)
(inst beq num zero-tn done)
(inst move start csp-tn)
(inst move dst csp-tn)
(inst addu csp-tn count)
LOOP
(inst lw temp src)
(inst addu src 4)
(inst addu dst 4)
(inst bne dst csp-tn loop)
(inst sw temp dst -4)
DONE))
|
299298e2fbd2653867058ef42129c0aff762cdc393f7c482c0c3013193f417d6 | sneeuwballen/zipperposition | proofState.ml |
This file is free software , part of Zipperposition . See file " license " for more details .
* { 1 The state of a proof , contains a set of active clauses ( processed ) ,
a set of passive clauses ( to be processed ) , and an ordering
that is used for redundancy elimination . }
a set of passive clauses (to be processed), and an ordering
that is used for redundancy elimination.} *)
open Logtk
module T = Term
module C = Clause
module S = Subst.FO
module Lit = Literal
module Lits = Literals
module Pos = Position
module PB = Position.Build
module CQ = ClauseQueue
let prof_next_passive = ZProf.make "proofState.next_passive"
* { 2 Set of active clauses }
module type S = ProofState_intf.S
module Make(C : Clause.S) : S with module C = C and module Ctx = C.Ctx = struct
module Ctx = C.Ctx
module C = C
module CQueue = ClauseQueue.Make(C)
module TermIndex = NPDtree . MakeTerm(C.WithPos )
module TermIndex = Fingerprint.Make(C.WithPos)
module UnitIndex =
(* NPDtree *)
Dtree
.Make(struct
type t = T.t * T.t * bool * C.t
type rhs = T.t
let compare (t11,t12,s1,c1) (t21,t22,s2,c2) =
let open CCOrd.Infix in
T.compare t11 t21
<?> (T.compare, t12, t22)
<?> (compare, s1, s2)
<?> (C.compare, c1, c2)
let extract (t1,t2,sign,_) = t1, t2, sign
let priority (_,_,_,c) =
if C.is_oriented_rule c then 2 else 1
end)
module SubsumptionIndex = FV_tree.Make(struct
type t = C.t
let compare = C.compare
let to_lits c = C.to_forms c |> Iter.of_list
let labels c = C.trail c |> Trail.labels
end)
XXX : no customization of indexing for now
let _ indexes =
let table = Hashtbl.create 2 in
let mk_fingerprint fp =
Fingerprint.mk_index ~cmp : fp in
Hashtbl.add table " fp " ( mk_fingerprint Fingerprint.fp6 m ) ;
Hashtbl.add table " ( mk_fingerprint Fingerprint.fp7 m ) ;
Hashtbl.add table " fp16 " ( mk_fingerprint Fingerprint.fp16 ) ;
table
let _indexes =
let table = Hashtbl.create 2 in
let mk_fingerprint fp =
Fingerprint.mk_index ~cmp:Clauses.compare_clause_pos fp in
Hashtbl.add table "fp" (mk_fingerprint Fingerprint.fp6m);
Hashtbl.add table "fp7m" (mk_fingerprint Fingerprint.fp7m);
Hashtbl.add table "fp16" (mk_fingerprint Fingerprint.fp16);
table
*)
* { 5 Common Interface for Sets }
module type CLAUSE_SET = sig
val on_add_clause : C.t Signal.t
(** signal triggered when a clause is added to the set *)
val on_remove_clause : C.t Signal.t
(** signal triggered when a clause is removed from the set *)
val add : C.t Iter.t -> unit
(** Add clauses to the set *)
val remove : C.t Iter.t -> unit
(** Remove clauses from the set *)
end
module MakeClauseSet(X : sig end) = struct
let clauses_ = ref C.ClauseSet.empty
let on_add_clause = Signal.create ()
let on_remove_clause = Signal.create ()
let clauses () = !clauses_
let num_clauses () = C.ClauseSet.cardinal !clauses_
let add seq =
seq
(fun c ->
if not (C.ClauseSet.mem c !clauses_)
then (
clauses_ := C.ClauseSet.add c !clauses_;
Signal.send on_add_clause c
));
()
let remove seq =
seq (fun c ->
if C.ClauseSet.mem c !clauses_
then (
clauses_ := C.ClauseSet.remove c !clauses_;
Signal.send on_remove_clause c
));
()
end
* { 2 Sets }
module ActiveSet = MakeClauseSet(struct end)
module SimplSet = struct
let on_add_clause = Signal.create ()
let on_remove_clause = Signal.create ()
let add seq =
seq (fun c -> Signal.send on_add_clause c)
let remove seq =
seq (fun c -> Signal.send on_remove_clause c)
end
module PassiveSet = struct
include MakeClauseSet(struct end)
let queue =
let p = ClauseQueue.get_profile () in
CQueue.of_profile p
let next_ () =
if CQueue.is_empty queue
then None
else (
try
let x = CQueue.take_first queue in
Signal.send on_remove_clause x;
clauses_ := C.ClauseSet.remove x !clauses_;
Some x
with Not_found -> None
)
let next () = ZProf.with_prof prof_next_passive next_ ()
let remove seq =
seq (fun c ->
if CQueue.remove queue c then (
Signal.send on_remove_clause c
)
)
let add seq =
seq (fun c ->
if CQueue.add queue c then (
Signal.send on_add_clause c
)
)
let is_passive cl =
CQueue.mem_cl queue cl
let clauses () = C.ClauseSet.of_iter (CQueue.all_clauses queue)
let num_clauses () = CQueue.length queue
end
type stats = int * int * int
(* num passive, num active, num simplification *)
let stats () =
C.ClauseSet.cardinal (ActiveSet.clauses ()),
C.ClauseSet.cardinal (PassiveSet.clauses ()),
0
let pp out state =
let num_active, num_passive, num_simpl = stats state in
Format.fprintf out
"state {%d active clauses; %d passive clauses; \
%d simplification_rules; %a}"
num_active num_passive num_simpl
CQueue.pp PassiveSet.queue
let debug out state =
let num_active, num_passive, num_simpl = stats state in
Format.fprintf out
"@[<v2>state {%d active clauses;@ %d passive clauses;@ \
%d simplification_rules;@ queues@[<hv>%a@] \
@,active:@[<hv>%a@]@,passive:@[<hv>%a@]@,}@]"
num_active num_passive num_simpl
CQueue.pp PassiveSet.queue
C.pp_set (ActiveSet.clauses ())
C.pp_set (PassiveSet.clauses ())
end
| null | https://raw.githubusercontent.com/sneeuwballen/zipperposition/7f1455fbe2e7509907f927649c288141b1a3a247/src/prover/proofState.ml | ocaml | NPDtree
* signal triggered when a clause is added to the set
* signal triggered when a clause is removed from the set
* Add clauses to the set
* Remove clauses from the set
num passive, num active, num simplification |
This file is free software , part of Zipperposition . See file " license " for more details .
* { 1 The state of a proof , contains a set of active clauses ( processed ) ,
a set of passive clauses ( to be processed ) , and an ordering
that is used for redundancy elimination . }
a set of passive clauses (to be processed), and an ordering
that is used for redundancy elimination.} *)
open Logtk
module T = Term
module C = Clause
module S = Subst.FO
module Lit = Literal
module Lits = Literals
module Pos = Position
module PB = Position.Build
module CQ = ClauseQueue
let prof_next_passive = ZProf.make "proofState.next_passive"
* { 2 Set of active clauses }
module type S = ProofState_intf.S
module Make(C : Clause.S) : S with module C = C and module Ctx = C.Ctx = struct
module Ctx = C.Ctx
module C = C
module CQueue = ClauseQueue.Make(C)
module TermIndex = NPDtree . MakeTerm(C.WithPos )
module TermIndex = Fingerprint.Make(C.WithPos)
module UnitIndex =
Dtree
.Make(struct
type t = T.t * T.t * bool * C.t
type rhs = T.t
let compare (t11,t12,s1,c1) (t21,t22,s2,c2) =
let open CCOrd.Infix in
T.compare t11 t21
<?> (T.compare, t12, t22)
<?> (compare, s1, s2)
<?> (C.compare, c1, c2)
let extract (t1,t2,sign,_) = t1, t2, sign
let priority (_,_,_,c) =
if C.is_oriented_rule c then 2 else 1
end)
module SubsumptionIndex = FV_tree.Make(struct
type t = C.t
let compare = C.compare
let to_lits c = C.to_forms c |> Iter.of_list
let labels c = C.trail c |> Trail.labels
end)
XXX : no customization of indexing for now
let _ indexes =
let table = Hashtbl.create 2 in
let mk_fingerprint fp =
Fingerprint.mk_index ~cmp : fp in
Hashtbl.add table " fp " ( mk_fingerprint Fingerprint.fp6 m ) ;
Hashtbl.add table " ( mk_fingerprint Fingerprint.fp7 m ) ;
Hashtbl.add table " fp16 " ( mk_fingerprint Fingerprint.fp16 ) ;
table
let _indexes =
let table = Hashtbl.create 2 in
let mk_fingerprint fp =
Fingerprint.mk_index ~cmp:Clauses.compare_clause_pos fp in
Hashtbl.add table "fp" (mk_fingerprint Fingerprint.fp6m);
Hashtbl.add table "fp7m" (mk_fingerprint Fingerprint.fp7m);
Hashtbl.add table "fp16" (mk_fingerprint Fingerprint.fp16);
table
*)
* { 5 Common Interface for Sets }
module type CLAUSE_SET = sig
val on_add_clause : C.t Signal.t
val on_remove_clause : C.t Signal.t
val add : C.t Iter.t -> unit
val remove : C.t Iter.t -> unit
end
module MakeClauseSet(X : sig end) = struct
let clauses_ = ref C.ClauseSet.empty
let on_add_clause = Signal.create ()
let on_remove_clause = Signal.create ()
let clauses () = !clauses_
let num_clauses () = C.ClauseSet.cardinal !clauses_
let add seq =
seq
(fun c ->
if not (C.ClauseSet.mem c !clauses_)
then (
clauses_ := C.ClauseSet.add c !clauses_;
Signal.send on_add_clause c
));
()
let remove seq =
seq (fun c ->
if C.ClauseSet.mem c !clauses_
then (
clauses_ := C.ClauseSet.remove c !clauses_;
Signal.send on_remove_clause c
));
()
end
* { 2 Sets }
module ActiveSet = MakeClauseSet(struct end)
module SimplSet = struct
let on_add_clause = Signal.create ()
let on_remove_clause = Signal.create ()
let add seq =
seq (fun c -> Signal.send on_add_clause c)
let remove seq =
seq (fun c -> Signal.send on_remove_clause c)
end
module PassiveSet = struct
include MakeClauseSet(struct end)
let queue =
let p = ClauseQueue.get_profile () in
CQueue.of_profile p
let next_ () =
if CQueue.is_empty queue
then None
else (
try
let x = CQueue.take_first queue in
Signal.send on_remove_clause x;
clauses_ := C.ClauseSet.remove x !clauses_;
Some x
with Not_found -> None
)
let next () = ZProf.with_prof prof_next_passive next_ ()
let remove seq =
seq (fun c ->
if CQueue.remove queue c then (
Signal.send on_remove_clause c
)
)
let add seq =
seq (fun c ->
if CQueue.add queue c then (
Signal.send on_add_clause c
)
)
let is_passive cl =
CQueue.mem_cl queue cl
let clauses () = C.ClauseSet.of_iter (CQueue.all_clauses queue)
let num_clauses () = CQueue.length queue
end
type stats = int * int * int
let stats () =
C.ClauseSet.cardinal (ActiveSet.clauses ()),
C.ClauseSet.cardinal (PassiveSet.clauses ()),
0
let pp out state =
let num_active, num_passive, num_simpl = stats state in
Format.fprintf out
"state {%d active clauses; %d passive clauses; \
%d simplification_rules; %a}"
num_active num_passive num_simpl
CQueue.pp PassiveSet.queue
let debug out state =
let num_active, num_passive, num_simpl = stats state in
Format.fprintf out
"@[<v2>state {%d active clauses;@ %d passive clauses;@ \
%d simplification_rules;@ queues@[<hv>%a@] \
@,active:@[<hv>%a@]@,passive:@[<hv>%a@]@,}@]"
num_active num_passive num_simpl
CQueue.pp PassiveSet.queue
C.pp_set (ActiveSet.clauses ())
C.pp_set (PassiveSet.clauses ())
end
|
5a2fa2ebde0cf0d76c78791513fdda3ec4001268795862353742596370ebf788 | IvanRublev/year_progress_bot | chats.erl | -module(chats).
-behaviour(sumo_doc).
-export([sumo_schema/0, sumo_sleep/1, sumo_wakeup/1]).
-export([new/2]).
new(Id, Date) ->
#{id => Id, notified_at => Date}.
% sumo_doc behavior
sumo_wakeup(Data) ->
Data.
sumo_sleep(Chat) ->
Chat.
sumo_schema() ->
sumo:new_schema(chats, [
sumo:new_field(id, string, [{length, 17}, not_null, id]),
sumo:new_field(notified_at, datetime, [not_null])
]).
| null | https://raw.githubusercontent.com/IvanRublev/year_progress_bot/c3e85a5598d768933d5fb676c74d92fa8033cf60/apps/year_progress_bot/src/infra/db/chats.erl | erlang | sumo_doc behavior | -module(chats).
-behaviour(sumo_doc).
-export([sumo_schema/0, sumo_sleep/1, sumo_wakeup/1]).
-export([new/2]).
new(Id, Date) ->
#{id => Id, notified_at => Date}.
sumo_wakeup(Data) ->
Data.
sumo_sleep(Chat) ->
Chat.
sumo_schema() ->
sumo:new_schema(chats, [
sumo:new_field(id, string, [{length, 17}, not_null, id]),
sumo:new_field(notified_at, datetime, [not_null])
]).
|
34ccca1b4cfa9cc9ff114e64fc7cad90a92e25a72c6ee504b46f751400f4ca8d | tomcobley/haskell-final-exams | Types.hs | # LANGUAGE FlexibleInstances #
{-# LANGUAGE TypeSynonymInstances #-}
module Types where
import Data.Bits
type BitVector = Int
data Trie = Leaf [Int] | Node BitVector [SubNode]
deriving (Eq, Show)
data SubNode = Term Int | SubTrie Trie
deriving (Eq, Show)
type Hash = Int
type HashFun = Int -> Hash
empty :: Trie
empty
= Node 0 []
-----------------------------------------------------------------
-- Show function for trees
-- Only needed for displaying bit vectors...
maxDegree :: Int
maxDegree
= 16
showBitVector :: Int -> Int -> String
showBitVector bv 0
= ""
showBitVector bv n
= showBitVector (bv `div` 2) (n - 1) ++ show (bv `mod` 2)
showT :: Trie -> IO ()
showT t
= showT' t 0
showT' :: Trie -> Int -> IO ()
showT' (Leaf vs) indent
= do
putStr (replicate indent ' ')
putStrLn (" " ++ show vs)
showT' (Node bv ts) indent
= do
putStrLn (replicate indent ' ' ++ showBitVector bv maxDegree)
mapM_ (flip showT'' (indent + 2)) ts
showT'' (Term v) indent
= putStrLn (replicate indent ' ' ++ "<" ++ show v ++ ">")
showT'' (SubTrie t) indent
= showT' t indent
| null | https://raw.githubusercontent.com/tomcobley/haskell-final-exams/49807e0da0ee7121d19eacff3ccc5ff5d0e4d4a2/2020-hash-array-mapped-tries/Types.hs | haskell | # LANGUAGE TypeSynonymInstances #
---------------------------------------------------------------
Show function for trees
Only needed for displaying bit vectors... | # LANGUAGE FlexibleInstances #
module Types where
import Data.Bits
type BitVector = Int
data Trie = Leaf [Int] | Node BitVector [SubNode]
deriving (Eq, Show)
data SubNode = Term Int | SubTrie Trie
deriving (Eq, Show)
type Hash = Int
type HashFun = Int -> Hash
empty :: Trie
empty
= Node 0 []
maxDegree :: Int
maxDegree
= 16
showBitVector :: Int -> Int -> String
showBitVector bv 0
= ""
showBitVector bv n
= showBitVector (bv `div` 2) (n - 1) ++ show (bv `mod` 2)
showT :: Trie -> IO ()
showT t
= showT' t 0
showT' :: Trie -> Int -> IO ()
showT' (Leaf vs) indent
= do
putStr (replicate indent ' ')
putStrLn (" " ++ show vs)
showT' (Node bv ts) indent
= do
putStrLn (replicate indent ' ' ++ showBitVector bv maxDegree)
mapM_ (flip showT'' (indent + 2)) ts
showT'' (Term v) indent
= putStrLn (replicate indent ' ' ++ "<" ++ show v ++ ">")
showT'' (SubTrie t) indent
= showT' t indent
|
6fb37fbe7c7b6522b8642ffcba4a806e627abe1b8f91a65d3be84ab55eb5b674 | erlware/relx | rlx_tar.erl | -module(rlx_tar).
-export([make_tar/3,
format_error/1]).
-include("relx.hrl").
-include("rlx_log.hrl").
make_tar(Release, OutputDir, State) ->
Name = rlx_release:name(Release),
Vsn = rlx_release:vsn(Release),
TarFile = filename:join(OutputDir, [Name, "-", Vsn, ".tar.gz"]),
?log_info("Building release tarball ~s...", [filename:basename(TarFile)]),
ExtraFiles = extra_files(Release, OutputDir, State),
Opts = make_tar_opts(ExtraFiles, Release, OutputDir, State),
try systools:make_tar(filename:join([OutputDir, "releases", Vsn, Name]), Opts) of
Result when Result =:= ok orelse (is_tuple(Result) andalso
element(1, Result) =:= ok) ->
maybe_print_warnings(Result),
{ok, State1} = case rlx_state:is_relx_sasl(State) of
true ->
%% we used extra_files to copy in the overlays
%% nothing to do now but rename the tarball to <relname>-<vsn>.tar.gz
file:rename(filename:join(OutputDir, [Name, ".tar.gz"]), TarFile),
{ok, State};
false ->
%% have to manually add the extra files to the tarball
update_tar(ExtraFiles, State, OutputDir, Name, Vsn, rlx_release:erts(Release))
end,
?log_info("Tarball successfully created: ~s",
[rlx_file_utils:print_path(TarFile)]),
{ok, State1};
error ->
erlang:error(?RLX_ERROR({tar_unknown_generation_error, Name, Vsn}));
{error, Module, Errors} ->
erlang:error(?RLX_ERROR({tar_generation_error, Module, Errors}))
catch
_:{badarg, Args} ->
erlang:error(?RLX_ERROR({make_tar, {badarg, Args}}))
end.
%% since we print the warnings already in `rlx_assemble' we just print these as debug logs
maybe_print_warnings({ok, Module, Warnings}) when Warnings =/= [] ->
?log_debug("Warnings generating release:~n~s", [Module:format_warning(Warnings)]);
maybe_print_warnings(_) ->
ok.
format_error({make_tar, {badarg, Args}}) ->
io_lib:format("Unknown args given to systools:make_tar/2: ~p", [Args]);
format_error({tar_unknown_generation_error, Module, Vsn}) ->
io_lib:format("Tarball generation error of ~s ~s", [Module, Vsn]);
format_error({tar_update_error, error, {badmatch, {error, {File, enoent}}}}) ->
io_lib:format("Exception updating contents of release tarball:~n File ~s does not exist", [File]);
format_error({tar_update_error, Type, Exception}) when is_list(Exception) ->
io_lib:format("Exception updating contents of release tarball ~s:~s", [Type, Exception]);
format_error({tar_update_error, Type, Exception}) ->
io_lib:format("Exception updating contents of release tarball ~s:~p", [Type, Exception]);
format_error({tar_generation_error, Module, Errors}) ->
io_lib:format("Tarball generation errors:~n~s", [Module:format_error(Errors)]).
%%
list of options to pass to ` systools : make_tar '
make_tar_opts(ExtraFiles, Release, OutputDir, State) ->
[{path, [filename:join([OutputDir, "lib", "*", "ebin"])]},
{dirs, app_dirs(State)},
silent,
{outdir, OutputDir}
| lists:flatmap(fun(Fun) ->
Fun(ExtraFiles, Release, OutputDir, State)
end, [fun maybe_include_erts/4,
fun maybe_extra_files/4,
fun maybe_system_libs/4])].
maybe_include_erts(_ExtraFiles, Release, OutputDir, State) ->
case rlx_state:include_erts(State) of
false ->
[];
IncludeErts ->
ErtsVersion = rlx_release:erts(Release),
ErtsDir = filename:join([OutputDir, "erts-" ++ ErtsVersion]),
case filelib:is_dir(ErtsDir) of
true ->
systools : make_tar looks for directory erts - vsn in
%% the dir passed to `erts'
[{erts, OutputDir}];
false when IncludeErts =:= true ->
[{erts, code:root_dir()}];
false ->
[{erts, IncludeErts}]
end
end.
maybe_extra_files(ExtraFiles, _Release, _OutputDir, State) ->
case rlx_state:is_relx_sasl(State) of
true ->
file tuples for erl_tar : add are the reverse of erl_tar : create so swap them
[{extra_files, [{From, To} || {To, From} <- ExtraFiles]}];
false ->
[]
end.
maybe_system_libs(_ExtraFiles, _Release, _OutputDir, State) ->
case rlx_state:system_libs(State) of
false ->
[{variables, [{"SYSTEM_LIB_DIR", code:lib_dir()}]},
{var_tar, omit}];
_SystemLibDir ->
[]
end.
%% additional files to add to the release tarball that
systools : make_tar does not include by default
extra_files(Release, OutputDir, State) ->
Vsn = rlx_release:vsn(Release),
OverlayVars = rlx_overlay:generate_overlay_vars(State, Release),
OverlayFiles = overlay_files(OverlayVars, rlx_state:overlay(State), OutputDir),
ConfigFiles = config_files(Vsn, OutputDir),
StartClean = filename:join(["releases", Vsn, "start_clean.boot"]),
NoDotErlang = filename:join(["releases", Vsn, "no_dot_erlang.boot"]),
OverlayFiles ++ ConfigFiles ++
[{StartClean, filename:join([OutputDir, StartClean])},
{NoDotErlang, filename:join([OutputDir, NoDotErlang])},
{filename:join(["releases", "start_erl.data"]),
filename:join([OutputDir, "releases", "start_erl.data"])},
{"bin", filename:join([OutputDir, "bin"])}
| case filelib:is_file(filename:join([OutputDir, "releases", "RELEASES"])) of
true ->
[{filename:join(["releases", "RELEASES"]),
filename:join([OutputDir, "releases", "RELEASES"])}];
false ->
[]
end].
%% unpack the tarball to a temporary directory and repackage it with
%% the overlays and other files we need to complete the target system
update_tar(ExtraFiles, State, OutputDir, Name, Vsn, ErtsVersion) ->
TempDir = rlx_file_utils:mkdtemp(),
try
update_tar(ExtraFiles, State, TempDir, OutputDir, Name, Vsn, ErtsVersion)
catch
?WITH_STACKTRACE(Type, Exception, Stacktrace)
?log_debug("exception updating tarball ~p:~p stacktrace=~p",
[Type, Exception, Stacktrace]),
erlang:error(?RLX_ERROR({tar_update_error, Type, Exception}))
after
rlx_file_utils:remove(TempDir, [recursive])
end.
%% used to add additional files to the release tarball when using systools
%% before the `extra_files' feature was added to `make_tar'
update_tar(ExtraFiles, State, TempDir, OutputDir, Name, Vsn, ErtsVersion) ->
TarFile = filename:join(OutputDir, [Name, "-", Vsn, ".tar.gz"]),
?log_debug("updating tarball ~s with extra files ~p", [TarFile, ExtraFiles]),
IncludeErts = rlx_state:include_erts(State),
file:rename(filename:join(OutputDir, [Name, ".tar.gz"]), TarFile),
erl_tar:extract(TarFile, [{cwd, TempDir}, compressed]),
ok =
erl_tar:create(TarFile,
[{"releases", filename:join(TempDir, "releases")} |
case IncludeErts of
false ->
[{"lib", filename:join(TempDir, "lib")}];
_ ->
[{"lib", filename:join(TempDir, "lib")},
{"erts-"++ErtsVersion, filename:join(TempDir, "erts-"++ErtsVersion)}]
end]++ExtraFiles, [dereference,compressed]),
?log_debug("update tarball ~s completed", [TarFile]),
{ok, State}.
%% include each of these config files if they exist
config_files(Vsn, OutputDir) ->
VMArgs = {filename:join(["releases", Vsn, "vm.args"]),
filename:join([OutputDir, "releases", Vsn, "vm.args"])},
VMArgsSrc = {filename:join(["releases", Vsn, "vm.args.src"]),
filename:join([OutputDir, "releases", Vsn, "vm.args.src"])},
%% when we drop support for OTP-20 we can require the use of .src files and not deal with .orig
VMArgsOrig = {filename:join(["releases", Vsn, "vm.args.orig"]),
filename:join([OutputDir, "releases", Vsn, "vm.args.orig"])},
SysConfigOrig = {filename:join(["releases", Vsn, "sys.config.orig"]),
filename:join([OutputDir, "releases", Vsn, "sys.config.orig"])},
[{NameInArchive, Filename} || {NameInArchive, Filename} <- [VMArgsSrc, VMArgs, VMArgsOrig, SysConfigOrig],
filelib:is_file(Filename)].
convert overlays to a list of { NameInArchive , Filename } tuples to pass to ` erl_tar ' or ` make_tar '
overlay_files(OverlayVars, Overlay, OutputDir) ->
[begin
To = to(O),
File = rlx_overlay:render_string(OverlayVars, To),
{rlx_util:to_string(File), rlx_util:to_string(filename:join(OutputDir, File))}
end || O <- Overlay, filter(O)].
to({link, _, To}) ->
To;
to({copy, From, "./"}) ->
filename:basename(From);
to({copy, From, "."}) ->
filename:basename(From);
to({copy, _, To}) ->
To;
to({mkdir, To}) ->
To;
to({template, _, To}) ->
To.
filter({_, _, "bin/"++_}) ->
false;
filter({link, _, _}) ->
true;
filter({copy, _, _}) ->
true;
filter({mkdir, _}) ->
true;
filter({template, _, _}) ->
true;
filter(_) ->
false.
%% if `include_src' is true then include the `src' and `include' directories of each application
app_dirs(State) ->
case include_src_or_default(State) of
false ->
[];
true ->
[include, src]
end.
%% when running `tar' the default is to exclude src
include_src_or_default(State) ->
case rlx_state:include_src(State) of
undefined ->
false;
IncludeSrc ->
IncludeSrc
end.
| null | https://raw.githubusercontent.com/erlware/relx/16a7972f7679778d9d7f40228b1a20351f1077bd/src/rlx_tar.erl | erlang | we used extra_files to copy in the overlays
nothing to do now but rename the tarball to <relname>-<vsn>.tar.gz
have to manually add the extra files to the tarball
since we print the warnings already in `rlx_assemble' we just print these as debug logs
the dir passed to `erts'
additional files to add to the release tarball that
unpack the tarball to a temporary directory and repackage it with
the overlays and other files we need to complete the target system
used to add additional files to the release tarball when using systools
before the `extra_files' feature was added to `make_tar'
include each of these config files if they exist
when we drop support for OTP-20 we can require the use of .src files and not deal with .orig
if `include_src' is true then include the `src' and `include' directories of each application
when running `tar' the default is to exclude src | -module(rlx_tar).
-export([make_tar/3,
format_error/1]).
-include("relx.hrl").
-include("rlx_log.hrl").
make_tar(Release, OutputDir, State) ->
Name = rlx_release:name(Release),
Vsn = rlx_release:vsn(Release),
TarFile = filename:join(OutputDir, [Name, "-", Vsn, ".tar.gz"]),
?log_info("Building release tarball ~s...", [filename:basename(TarFile)]),
ExtraFiles = extra_files(Release, OutputDir, State),
Opts = make_tar_opts(ExtraFiles, Release, OutputDir, State),
try systools:make_tar(filename:join([OutputDir, "releases", Vsn, Name]), Opts) of
Result when Result =:= ok orelse (is_tuple(Result) andalso
element(1, Result) =:= ok) ->
maybe_print_warnings(Result),
{ok, State1} = case rlx_state:is_relx_sasl(State) of
true ->
file:rename(filename:join(OutputDir, [Name, ".tar.gz"]), TarFile),
{ok, State};
false ->
update_tar(ExtraFiles, State, OutputDir, Name, Vsn, rlx_release:erts(Release))
end,
?log_info("Tarball successfully created: ~s",
[rlx_file_utils:print_path(TarFile)]),
{ok, State1};
error ->
erlang:error(?RLX_ERROR({tar_unknown_generation_error, Name, Vsn}));
{error, Module, Errors} ->
erlang:error(?RLX_ERROR({tar_generation_error, Module, Errors}))
catch
_:{badarg, Args} ->
erlang:error(?RLX_ERROR({make_tar, {badarg, Args}}))
end.
maybe_print_warnings({ok, Module, Warnings}) when Warnings =/= [] ->
?log_debug("Warnings generating release:~n~s", [Module:format_warning(Warnings)]);
maybe_print_warnings(_) ->
ok.
format_error({make_tar, {badarg, Args}}) ->
io_lib:format("Unknown args given to systools:make_tar/2: ~p", [Args]);
format_error({tar_unknown_generation_error, Module, Vsn}) ->
io_lib:format("Tarball generation error of ~s ~s", [Module, Vsn]);
format_error({tar_update_error, error, {badmatch, {error, {File, enoent}}}}) ->
io_lib:format("Exception updating contents of release tarball:~n File ~s does not exist", [File]);
format_error({tar_update_error, Type, Exception}) when is_list(Exception) ->
io_lib:format("Exception updating contents of release tarball ~s:~s", [Type, Exception]);
format_error({tar_update_error, Type, Exception}) ->
io_lib:format("Exception updating contents of release tarball ~s:~p", [Type, Exception]);
format_error({tar_generation_error, Module, Errors}) ->
io_lib:format("Tarball generation errors:~n~s", [Module:format_error(Errors)]).
list of options to pass to ` systools : make_tar '
make_tar_opts(ExtraFiles, Release, OutputDir, State) ->
[{path, [filename:join([OutputDir, "lib", "*", "ebin"])]},
{dirs, app_dirs(State)},
silent,
{outdir, OutputDir}
| lists:flatmap(fun(Fun) ->
Fun(ExtraFiles, Release, OutputDir, State)
end, [fun maybe_include_erts/4,
fun maybe_extra_files/4,
fun maybe_system_libs/4])].
maybe_include_erts(_ExtraFiles, Release, OutputDir, State) ->
case rlx_state:include_erts(State) of
false ->
[];
IncludeErts ->
ErtsVersion = rlx_release:erts(Release),
ErtsDir = filename:join([OutputDir, "erts-" ++ ErtsVersion]),
case filelib:is_dir(ErtsDir) of
true ->
systools : make_tar looks for directory erts - vsn in
[{erts, OutputDir}];
false when IncludeErts =:= true ->
[{erts, code:root_dir()}];
false ->
[{erts, IncludeErts}]
end
end.
maybe_extra_files(ExtraFiles, _Release, _OutputDir, State) ->
case rlx_state:is_relx_sasl(State) of
true ->
file tuples for erl_tar : add are the reverse of erl_tar : create so swap them
[{extra_files, [{From, To} || {To, From} <- ExtraFiles]}];
false ->
[]
end.
maybe_system_libs(_ExtraFiles, _Release, _OutputDir, State) ->
case rlx_state:system_libs(State) of
false ->
[{variables, [{"SYSTEM_LIB_DIR", code:lib_dir()}]},
{var_tar, omit}];
_SystemLibDir ->
[]
end.
systools : make_tar does not include by default
extra_files(Release, OutputDir, State) ->
Vsn = rlx_release:vsn(Release),
OverlayVars = rlx_overlay:generate_overlay_vars(State, Release),
OverlayFiles = overlay_files(OverlayVars, rlx_state:overlay(State), OutputDir),
ConfigFiles = config_files(Vsn, OutputDir),
StartClean = filename:join(["releases", Vsn, "start_clean.boot"]),
NoDotErlang = filename:join(["releases", Vsn, "no_dot_erlang.boot"]),
OverlayFiles ++ ConfigFiles ++
[{StartClean, filename:join([OutputDir, StartClean])},
{NoDotErlang, filename:join([OutputDir, NoDotErlang])},
{filename:join(["releases", "start_erl.data"]),
filename:join([OutputDir, "releases", "start_erl.data"])},
{"bin", filename:join([OutputDir, "bin"])}
| case filelib:is_file(filename:join([OutputDir, "releases", "RELEASES"])) of
true ->
[{filename:join(["releases", "RELEASES"]),
filename:join([OutputDir, "releases", "RELEASES"])}];
false ->
[]
end].
update_tar(ExtraFiles, State, OutputDir, Name, Vsn, ErtsVersion) ->
TempDir = rlx_file_utils:mkdtemp(),
try
update_tar(ExtraFiles, State, TempDir, OutputDir, Name, Vsn, ErtsVersion)
catch
?WITH_STACKTRACE(Type, Exception, Stacktrace)
?log_debug("exception updating tarball ~p:~p stacktrace=~p",
[Type, Exception, Stacktrace]),
erlang:error(?RLX_ERROR({tar_update_error, Type, Exception}))
after
rlx_file_utils:remove(TempDir, [recursive])
end.
update_tar(ExtraFiles, State, TempDir, OutputDir, Name, Vsn, ErtsVersion) ->
TarFile = filename:join(OutputDir, [Name, "-", Vsn, ".tar.gz"]),
?log_debug("updating tarball ~s with extra files ~p", [TarFile, ExtraFiles]),
IncludeErts = rlx_state:include_erts(State),
file:rename(filename:join(OutputDir, [Name, ".tar.gz"]), TarFile),
erl_tar:extract(TarFile, [{cwd, TempDir}, compressed]),
ok =
erl_tar:create(TarFile,
[{"releases", filename:join(TempDir, "releases")} |
case IncludeErts of
false ->
[{"lib", filename:join(TempDir, "lib")}];
_ ->
[{"lib", filename:join(TempDir, "lib")},
{"erts-"++ErtsVersion, filename:join(TempDir, "erts-"++ErtsVersion)}]
end]++ExtraFiles, [dereference,compressed]),
?log_debug("update tarball ~s completed", [TarFile]),
{ok, State}.
config_files(Vsn, OutputDir) ->
VMArgs = {filename:join(["releases", Vsn, "vm.args"]),
filename:join([OutputDir, "releases", Vsn, "vm.args"])},
VMArgsSrc = {filename:join(["releases", Vsn, "vm.args.src"]),
filename:join([OutputDir, "releases", Vsn, "vm.args.src"])},
VMArgsOrig = {filename:join(["releases", Vsn, "vm.args.orig"]),
filename:join([OutputDir, "releases", Vsn, "vm.args.orig"])},
SysConfigOrig = {filename:join(["releases", Vsn, "sys.config.orig"]),
filename:join([OutputDir, "releases", Vsn, "sys.config.orig"])},
[{NameInArchive, Filename} || {NameInArchive, Filename} <- [VMArgsSrc, VMArgs, VMArgsOrig, SysConfigOrig],
filelib:is_file(Filename)].
convert overlays to a list of { NameInArchive , Filename } tuples to pass to ` erl_tar ' or ` make_tar '
overlay_files(OverlayVars, Overlay, OutputDir) ->
[begin
To = to(O),
File = rlx_overlay:render_string(OverlayVars, To),
{rlx_util:to_string(File), rlx_util:to_string(filename:join(OutputDir, File))}
end || O <- Overlay, filter(O)].
to({link, _, To}) ->
To;
to({copy, From, "./"}) ->
filename:basename(From);
to({copy, From, "."}) ->
filename:basename(From);
to({copy, _, To}) ->
To;
to({mkdir, To}) ->
To;
to({template, _, To}) ->
To.
filter({_, _, "bin/"++_}) ->
false;
filter({link, _, _}) ->
true;
filter({copy, _, _}) ->
true;
filter({mkdir, _}) ->
true;
filter({template, _, _}) ->
true;
filter(_) ->
false.
app_dirs(State) ->
case include_src_or_default(State) of
false ->
[];
true ->
[include, src]
end.
include_src_or_default(State) ->
case rlx_state:include_src(State) of
undefined ->
false;
IncludeSrc ->
IncludeSrc
end.
|
b906155131d7987ae004dc85c126941b735e9b7ef1243dac428d445331a3bdd4 | panda-planner-dev/ipc2020-domains | p-07.lisp | (defproblem problem domain
(
(visible waypoint0 waypoint3)
(visible waypoint3 waypoint0)
(visible waypoint0 waypoint5)
(visible waypoint5 waypoint0)
(visible waypoint1 waypoint0)
(visible waypoint0 waypoint1)
(visible waypoint2 waypoint0)
(visible waypoint0 waypoint2)
(visible waypoint2 waypoint1)
(visible waypoint1 waypoint2)
(visible waypoint2 waypoint5)
(visible waypoint5 waypoint2)
(visible waypoint3 waypoint2)
(visible waypoint2 waypoint3)
(visible waypoint4 waypoint0)
(visible waypoint0 waypoint4)
(visible waypoint4 waypoint3)
(visible waypoint3 waypoint4)
(visible waypoint5 waypoint1)
(visible waypoint1 waypoint5)
(visible waypoint5 waypoint4)
(visible waypoint4 waypoint5)
(at_soil_sample waypoint1)
(at_rock_sample waypoint2)
(at_rock_sample waypoint3)
(at_soil_sample waypoint4)
(at_rock_sample waypoint4)
(at_rock_sample waypoint5)
(at_lander general waypoint3)
(channel_free general)
(at rover0 waypoint2)
(available rover0)
(store_of rover0store rover0)
(empty rover0store)
(equipped_for_soil_analysis rover0)
(equipped_for_rock_analysis rover0)
(equipped_for_imaging rover0)
(can_traverse rover0 waypoint2 waypoint0)
(can_traverse rover0 waypoint0 waypoint2)
(can_traverse rover0 waypoint2 waypoint1)
(can_traverse rover0 waypoint1 waypoint2)
(can_traverse rover0 waypoint2 waypoint3)
(can_traverse rover0 waypoint3 waypoint2)
(can_traverse rover0 waypoint2 waypoint5)
(can_traverse rover0 waypoint5 waypoint2)
(can_traverse rover0 waypoint0 waypoint4)
(can_traverse rover0 waypoint4 waypoint0)
(at rover1 waypoint3)
(available rover1)
(store_of rover1store rover1)
(empty rover1store)
(equipped_for_rock_analysis rover1)
(can_traverse rover1 waypoint3 waypoint0)
(can_traverse rover1 waypoint0 waypoint3)
(can_traverse rover1 waypoint3 waypoint2)
(can_traverse rover1 waypoint2 waypoint3)
(can_traverse rover1 waypoint3 waypoint4)
(can_traverse rover1 waypoint4 waypoint3)
(can_traverse rover1 waypoint0 waypoint1)
(can_traverse rover1 waypoint1 waypoint0)
(can_traverse rover1 waypoint0 waypoint5)
(can_traverse rover1 waypoint5 waypoint0)
(at rover2 waypoint4)
(available rover2)
(store_of rover2store rover2)
(empty rover2store)
(equipped_for_soil_analysis rover2)
(equipped_for_rock_analysis rover2)
(equipped_for_imaging rover2)
(can_traverse rover2 waypoint4 waypoint0)
(can_traverse rover2 waypoint0 waypoint4)
(can_traverse rover2 waypoint4 waypoint5)
(can_traverse rover2 waypoint5 waypoint4)
(can_traverse rover2 waypoint0 waypoint1)
(can_traverse rover2 waypoint1 waypoint0)
(can_traverse rover2 waypoint0 waypoint3)
(can_traverse rover2 waypoint3 waypoint0)
(can_traverse rover2 waypoint5 waypoint2)
(can_traverse rover2 waypoint2 waypoint5)
(on_board camera0 rover0)
(calibration_target camera0 objective0)
(supports camera0 colour)
(supports camera0 high_res)
(on_board camera1 rover2)
(calibration_target camera1 objective1)
(supports camera1 high_res)
(visible_from objective0 waypoint0)
(visible_from objective0 waypoint1)
(visible_from objective0 waypoint2)
(visible_from objective0 waypoint3)
(visible_from objective0 waypoint4)
(visible_from objective1 waypoint0)
(visible_from objective1 waypoint1)
(visible_from objective1 waypoint2)
(visible_from objective1 waypoint3)
(type_camera camera0)
(type_camera camera1)
(type_lander general)
(type_mode colour)
(type_mode high_res)
(type_mode low_res)
(type_object camera0)
(type_object camera1)
(type_object colour)
(type_object general)
(type_object high_res)
(type_object low_res)
(type_object objective0)
(type_object objective1)
(type_object rover0)
(type_object rover0store)
(type_object rover1)
(type_object rover1store)
(type_object rover2)
(type_object rover2store)
(type_object waypoint0)
(type_object waypoint1)
(type_object waypoint2)
(type_object waypoint3)
(type_object waypoint4)
(type_object waypoint5)
(type_objective objective0)
(type_objective objective1)
(type_rover rover0)
(type_rover rover1)
(type_rover rover2)
(type_sort_for_camera0 camera0)
(type_sort_for_camera1 camera1)
(type_sort_for_colour colour)
(type_sort_for_general general)
(type_sort_for_high_res high_res)
(type_sort_for_objective0 objective0)
(type_sort_for_objective1 objective1)
(type_sort_for_rover0 rover0)
(type_sort_for_rover0store rover0store)
(type_sort_for_rover1 rover1)
(type_sort_for_rover1store rover1store)
(type_sort_for_rover2 rover2)
(type_sort_for_rover2store rover2store)
(type_sort_for_waypoint0 waypoint0)
(type_sort_for_waypoint1 waypoint1)
(type_sort_for_waypoint2 waypoint2)
(type_sort_for_waypoint3 waypoint3)
(type_sort_for_waypoint4 waypoint4)
(type_sort_for_waypoint5 waypoint5)
(type_store rover0store)
(type_store rover1store)
(type_store rover2store)
(type_waypoint waypoint0)
(type_waypoint waypoint1)
(type_waypoint waypoint2)
(type_waypoint waypoint3)
(type_waypoint waypoint4)
(type_waypoint waypoint5)
)
((__top))
)
| null | https://raw.githubusercontent.com/panda-planner-dev/ipc2020-domains/9adb54325d3df35907adc7115fcc65f0ce5953cc/partial-order/Rover/other/SHOP2/p-07.lisp | lisp | (defproblem problem domain
(
(visible waypoint0 waypoint3)
(visible waypoint3 waypoint0)
(visible waypoint0 waypoint5)
(visible waypoint5 waypoint0)
(visible waypoint1 waypoint0)
(visible waypoint0 waypoint1)
(visible waypoint2 waypoint0)
(visible waypoint0 waypoint2)
(visible waypoint2 waypoint1)
(visible waypoint1 waypoint2)
(visible waypoint2 waypoint5)
(visible waypoint5 waypoint2)
(visible waypoint3 waypoint2)
(visible waypoint2 waypoint3)
(visible waypoint4 waypoint0)
(visible waypoint0 waypoint4)
(visible waypoint4 waypoint3)
(visible waypoint3 waypoint4)
(visible waypoint5 waypoint1)
(visible waypoint1 waypoint5)
(visible waypoint5 waypoint4)
(visible waypoint4 waypoint5)
(at_soil_sample waypoint1)
(at_rock_sample waypoint2)
(at_rock_sample waypoint3)
(at_soil_sample waypoint4)
(at_rock_sample waypoint4)
(at_rock_sample waypoint5)
(at_lander general waypoint3)
(channel_free general)
(at rover0 waypoint2)
(available rover0)
(store_of rover0store rover0)
(empty rover0store)
(equipped_for_soil_analysis rover0)
(equipped_for_rock_analysis rover0)
(equipped_for_imaging rover0)
(can_traverse rover0 waypoint2 waypoint0)
(can_traverse rover0 waypoint0 waypoint2)
(can_traverse rover0 waypoint2 waypoint1)
(can_traverse rover0 waypoint1 waypoint2)
(can_traverse rover0 waypoint2 waypoint3)
(can_traverse rover0 waypoint3 waypoint2)
(can_traverse rover0 waypoint2 waypoint5)
(can_traverse rover0 waypoint5 waypoint2)
(can_traverse rover0 waypoint0 waypoint4)
(can_traverse rover0 waypoint4 waypoint0)
(at rover1 waypoint3)
(available rover1)
(store_of rover1store rover1)
(empty rover1store)
(equipped_for_rock_analysis rover1)
(can_traverse rover1 waypoint3 waypoint0)
(can_traverse rover1 waypoint0 waypoint3)
(can_traverse rover1 waypoint3 waypoint2)
(can_traverse rover1 waypoint2 waypoint3)
(can_traverse rover1 waypoint3 waypoint4)
(can_traverse rover1 waypoint4 waypoint3)
(can_traverse rover1 waypoint0 waypoint1)
(can_traverse rover1 waypoint1 waypoint0)
(can_traverse rover1 waypoint0 waypoint5)
(can_traverse rover1 waypoint5 waypoint0)
(at rover2 waypoint4)
(available rover2)
(store_of rover2store rover2)
(empty rover2store)
(equipped_for_soil_analysis rover2)
(equipped_for_rock_analysis rover2)
(equipped_for_imaging rover2)
(can_traverse rover2 waypoint4 waypoint0)
(can_traverse rover2 waypoint0 waypoint4)
(can_traverse rover2 waypoint4 waypoint5)
(can_traverse rover2 waypoint5 waypoint4)
(can_traverse rover2 waypoint0 waypoint1)
(can_traverse rover2 waypoint1 waypoint0)
(can_traverse rover2 waypoint0 waypoint3)
(can_traverse rover2 waypoint3 waypoint0)
(can_traverse rover2 waypoint5 waypoint2)
(can_traverse rover2 waypoint2 waypoint5)
(on_board camera0 rover0)
(calibration_target camera0 objective0)
(supports camera0 colour)
(supports camera0 high_res)
(on_board camera1 rover2)
(calibration_target camera1 objective1)
(supports camera1 high_res)
(visible_from objective0 waypoint0)
(visible_from objective0 waypoint1)
(visible_from objective0 waypoint2)
(visible_from objective0 waypoint3)
(visible_from objective0 waypoint4)
(visible_from objective1 waypoint0)
(visible_from objective1 waypoint1)
(visible_from objective1 waypoint2)
(visible_from objective1 waypoint3)
(type_camera camera0)
(type_camera camera1)
(type_lander general)
(type_mode colour)
(type_mode high_res)
(type_mode low_res)
(type_object camera0)
(type_object camera1)
(type_object colour)
(type_object general)
(type_object high_res)
(type_object low_res)
(type_object objective0)
(type_object objective1)
(type_object rover0)
(type_object rover0store)
(type_object rover1)
(type_object rover1store)
(type_object rover2)
(type_object rover2store)
(type_object waypoint0)
(type_object waypoint1)
(type_object waypoint2)
(type_object waypoint3)
(type_object waypoint4)
(type_object waypoint5)
(type_objective objective0)
(type_objective objective1)
(type_rover rover0)
(type_rover rover1)
(type_rover rover2)
(type_sort_for_camera0 camera0)
(type_sort_for_camera1 camera1)
(type_sort_for_colour colour)
(type_sort_for_general general)
(type_sort_for_high_res high_res)
(type_sort_for_objective0 objective0)
(type_sort_for_objective1 objective1)
(type_sort_for_rover0 rover0)
(type_sort_for_rover0store rover0store)
(type_sort_for_rover1 rover1)
(type_sort_for_rover1store rover1store)
(type_sort_for_rover2 rover2)
(type_sort_for_rover2store rover2store)
(type_sort_for_waypoint0 waypoint0)
(type_sort_for_waypoint1 waypoint1)
(type_sort_for_waypoint2 waypoint2)
(type_sort_for_waypoint3 waypoint3)
(type_sort_for_waypoint4 waypoint4)
(type_sort_for_waypoint5 waypoint5)
(type_store rover0store)
(type_store rover1store)
(type_store rover2store)
(type_waypoint waypoint0)
(type_waypoint waypoint1)
(type_waypoint waypoint2)
(type_waypoint waypoint3)
(type_waypoint waypoint4)
(type_waypoint waypoint5)
)
((__top))
)
|
|
ff7596609299ddadf3c1a9f7f31e56482c2cfb456c49542b809e32d7ac314f66 | mpenet/commons | string.clj | (ns qbits.commons.string
(:require [clojure.string :as str]))
(defn camel->dashed
[s]
(-> s
(str/replace #"^[A-Z]+" str/lower-case)
(str/replace #"_?([A-Z]+)"
(comp (partial str "-")
str/lower-case second))
(str/replace #"-|_" "-")))
| null | https://raw.githubusercontent.com/mpenet/commons/a377fed3a5ef00ba26f704e39ca60ad4039870cb/src/clj/qbits/commons/string.clj | clojure | (ns qbits.commons.string
(:require [clojure.string :as str]))
(defn camel->dashed
[s]
(-> s
(str/replace #"^[A-Z]+" str/lower-case)
(str/replace #"_?([A-Z]+)"
(comp (partial str "-")
str/lower-case second))
(str/replace #"-|_" "-")))
|
|
04247b47f80d4cefaf20c86fa3213d6b8f1a65dff0c43bfc059437044411ee6b | seancorfield/next-jdbc | transaction_test.clj | copyright ( c ) 2019 - 2021 , all rights reserved
(ns next.jdbc.transaction-test
"Stub test namespace for transaction handling."
(:require [clojure.test :refer [deftest is testing use-fixtures]]
[next.jdbc :as jdbc]
[next.jdbc.specs :as specs]
[next.jdbc.test-fixtures :refer [with-test-db db ds column
default-options
derby? mssql? mysql? postgres?]]
[next.jdbc.transaction :as tx]))
(set! *warn-on-reflection* true)
(use-fixtures :once with-test-db)
(specs/instrument)
| null | https://raw.githubusercontent.com/seancorfield/next-jdbc/91dda2cdae3f9e897a54fe21edf8467acae8aa0d/test/next/jdbc/transaction_test.clj | clojure | copyright ( c ) 2019 - 2021 , all rights reserved
(ns next.jdbc.transaction-test
"Stub test namespace for transaction handling."
(:require [clojure.test :refer [deftest is testing use-fixtures]]
[next.jdbc :as jdbc]
[next.jdbc.specs :as specs]
[next.jdbc.test-fixtures :refer [with-test-db db ds column
default-options
derby? mssql? mysql? postgres?]]
[next.jdbc.transaction :as tx]))
(set! *warn-on-reflection* true)
(use-fixtures :once with-test-db)
(specs/instrument)
|
|
5793dbd0688d32a3af8db7480c4e04f63559fe20f5b841aa225fbe5d5fa66126 | smallhadroncollider/brok | Options.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
module Brok.Parser.Options
( options
) where
import ClassyPrelude
import Data.Attoparsec.Text
import Brok.Parser.Attoparsec
import Brok.Parser.Links (url)
import Brok.Types.Config
import Brok.Types.Next (Next (..))
data Option
= Cache (Maybe Integer)
| Interval Integer
| Ignore [Text]
| Files [Text]
| NoColor
| CheckCerts
| OnlyFailures
onlyFailuresP :: Parser Option
onlyFailuresP = lexeme $ string "--only-failures" $> OnlyFailures
noColorP :: Parser Option
noColorP = lexeme $ string "--no-color" $> NoColor
checkCertsP :: Parser Option
checkCertsP = lexeme $ string "--check-certs" $> CheckCerts
noCacheP :: Parser Option
noCacheP = lexeme $ string "--no-cache" $> Cache Nothing
cacheP :: Parser Option
cacheP = lexeme $ Cache . Just <$> (string "--cache" *> char '\n' *> decimal)
intervalP :: Parser Option
intervalP = lexeme $ Interval <$> (string "--interval" *> char '\n' *> decimal)
urlP :: Parser Text
urlP = lexeme url
ignoreP :: Parser Option
ignoreP = lexeme $ Ignore <$> (string "--ignore" *> char '\n' *> many1 urlP)
fileP :: Parser Text
fileP = lexeme $ manyChars (notChar '\n')
optsToConfig :: [Option] -> Config
optsToConfig = foldl' convert defaultConfig
where
convert dc (Cache i) = dc {cache = i}
convert dc (Ignore i) = dc {ignore = i}
convert dc (Interval i) = dc {interval = i}
convert dc (Files i) = dc {files = i}
convert dc NoColor = dc {noColor = True}
convert dc CheckCerts = dc {checkCerts = True}
convert dc OnlyFailures = dc {onlyFailures = True}
arguments :: Parser Config
arguments = do
opts <-
many'
(noCacheP <|> cacheP <|> intervalP <|> ignoreP <|> noColorP <|> checkCertsP <|>
onlyFailuresP)
fls <- many1 fileP
pure . optsToConfig $ opts <> [Files fls]
helpP :: Parser Next
helpP = lexeme $ (string "--help" <|> string "-h") $> Help
versionP :: Parser Next
versionP = lexeme $ (string "--version" <|> string "-v") $> Version
next :: Parser Next
next = helpP <|> versionP <|> (Continue <$> arguments)
-- run parser
options :: [Text] -> Either Text Next
options [] = Left "No files provided"
options content = first tshow $ parseOnly next (unlines content)
| null | https://raw.githubusercontent.com/smallhadroncollider/brok/bf62288d913af5fc694e683cc247f66426025400/src/Brok/Parser/Options.hs | haskell | # LANGUAGE OverloadedStrings #
run parser | # LANGUAGE NoImplicitPrelude #
module Brok.Parser.Options
( options
) where
import ClassyPrelude
import Data.Attoparsec.Text
import Brok.Parser.Attoparsec
import Brok.Parser.Links (url)
import Brok.Types.Config
import Brok.Types.Next (Next (..))
data Option
= Cache (Maybe Integer)
| Interval Integer
| Ignore [Text]
| Files [Text]
| NoColor
| CheckCerts
| OnlyFailures
onlyFailuresP :: Parser Option
onlyFailuresP = lexeme $ string "--only-failures" $> OnlyFailures
noColorP :: Parser Option
noColorP = lexeme $ string "--no-color" $> NoColor
checkCertsP :: Parser Option
checkCertsP = lexeme $ string "--check-certs" $> CheckCerts
noCacheP :: Parser Option
noCacheP = lexeme $ string "--no-cache" $> Cache Nothing
cacheP :: Parser Option
cacheP = lexeme $ Cache . Just <$> (string "--cache" *> char '\n' *> decimal)
intervalP :: Parser Option
intervalP = lexeme $ Interval <$> (string "--interval" *> char '\n' *> decimal)
urlP :: Parser Text
urlP = lexeme url
ignoreP :: Parser Option
ignoreP = lexeme $ Ignore <$> (string "--ignore" *> char '\n' *> many1 urlP)
fileP :: Parser Text
fileP = lexeme $ manyChars (notChar '\n')
optsToConfig :: [Option] -> Config
optsToConfig = foldl' convert defaultConfig
where
convert dc (Cache i) = dc {cache = i}
convert dc (Ignore i) = dc {ignore = i}
convert dc (Interval i) = dc {interval = i}
convert dc (Files i) = dc {files = i}
convert dc NoColor = dc {noColor = True}
convert dc CheckCerts = dc {checkCerts = True}
convert dc OnlyFailures = dc {onlyFailures = True}
arguments :: Parser Config
arguments = do
opts <-
many'
(noCacheP <|> cacheP <|> intervalP <|> ignoreP <|> noColorP <|> checkCertsP <|>
onlyFailuresP)
fls <- many1 fileP
pure . optsToConfig $ opts <> [Files fls]
helpP :: Parser Next
helpP = lexeme $ (string "--help" <|> string "-h") $> Help
versionP :: Parser Next
versionP = lexeme $ (string "--version" <|> string "-v") $> Version
next :: Parser Next
next = helpP <|> versionP <|> (Continue <$> arguments)
options :: [Text] -> Either Text Next
options [] = Left "No files provided"
options content = first tshow $ parseOnly next (unlines content)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.