_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
504045747aa3bfcbe49cd9dad4f895ee863446ebd790c488b4317f313f863ed6 | qitab/cl-protobufs | lisp-alias-test.lisp | Copyright 2020 Google LLC
;;;
Use of this source code is governed by an MIT - style
;;; license that can be found in the LICENSE file or at
;;; .
(defpackage #:cl-protobufs.test.alias
(:use #:cl
#:clunit)
(:local-nicknames (#:pb #:cl-protobufs.alias-test)
(#:pi #:cl-protobufs.implementation)
(#:proto #:cl-protobufs))
(:export :run))
(in-package #:cl-protobufs.test.alias)
(defsuite alias-suite (cl-protobufs.test:root-suite))
(defun run (&key use-debugger)
"Run all tests in the test suite.
Parameters
USE-DEBUGGER: On assert failure bring up the debugger."
(clunit:run-suite 'alias-suite :use-debugger use-debugger
:signal-condition-on-fail t))
(defstruct aliased-struct i)
(defconstant +TAG-I+ (pi::make-tag 'proto:int32 1)
"The tag that should be used for Message.I and AliasedMessage.I")
Serialization of cl - protobufs - generated class ( as opposed to using a lisp_alias FieldOption )
(defun expect-bytes (list array)
(assert-equal (coerce list 'list) (coerce array 'list)))
(deftest serialize-regular (alias-suite)
(let ((obj (pb:make-message :i 99)))
(expect-bytes (list +TAG-I+ 99)
(proto:serialize-to-bytes obj))))
;; Serialization of the aliased (explicit) message
(defun internal-serialize-message (msg buf)
"Serialization function for message.
MSG: The message being serialized.
BUF: The buffer to serialize to."
(let ((i (aliased-struct-i msg))
(size 0))
(incf size (pi::serialize-scalar i 'proto:int32 +TAG-I+ buf))))
#+sbcl
(defun (:protobuf :serialize pb:aliased-message)
(val buf)
(internal-serialize-message val buf))
#-sbcl
(setf (get 'pb:aliased-message :serialize)
(lambda (val buf)
(internal-serialize-message val buf)))
(deftest serialize-aliased (alias-suite)
(let ((struct (make-aliased-struct :i 99)))
(expect-bytes (list +TAG-I+ 99)
(proto:serialize-to-bytes struct 'pb:aliased-message))))
Serialization of OuterMessage
(deftest serialize-empty-outer (alias-suite)
(let ((outer (pb:make-outer-message)))
(expect-bytes nil (proto:serialize-to-bytes outer))))
(defconstant +TAG-MESSAGE+ (pi::make-tag 'string 1)
"The tag that should be used for field OuterMessage.Message")
(defconstant +TAG-ALIASED+ (pi::make-tag 'string 2)
"The tag that should be used for field OuterMessage.Aliased")
(deftest serialize-outer-containing-regular (alias-suite)
(let ((outer (pb:make-outer-message
:message (pb:make-message :i 99))))
(expect-bytes (list +TAG-MESSAGE+ 2 +TAG-I+ 99)
(proto:serialize-to-bytes outer))))
(deftest serialize-outer-containing-aliased (alias-suite)
(let ((outer (pb:make-outer-message
:aliased (make-aliased-struct :i 99))))
(expect-bytes (list +TAG-ALIASED+ 2 +TAG-I+ 99)
(proto:serialize-to-bytes outer))))
;; cl-protobufs message metadata
(deftest find-message-for-alias (alias-suite)
(assert-true (proto:find-message-descriptor 'my.dog.has.fleas::aliased-struct))
Known bug with ABCL
#-abcl
(assert-eq (proto:find-message-descriptor 'pb:aliased-message)
(proto:find-message-descriptor 'my.dog.has.fleas::aliased-struct)))
| null | https://raw.githubusercontent.com/qitab/cl-protobufs/bf5447065714a9f5b1b6ce6d0512fd99b4105e76/tests/lisp-alias-test.lisp | lisp |
license that can be found in the LICENSE file or at
.
Serialization of the aliased (explicit) message
cl-protobufs message metadata | Copyright 2020 Google LLC
Use of this source code is governed by an MIT - style
(defpackage #:cl-protobufs.test.alias
(:use #:cl
#:clunit)
(:local-nicknames (#:pb #:cl-protobufs.alias-test)
(#:pi #:cl-protobufs.implementation)
(#:proto #:cl-protobufs))
(:export :run))
(in-package #:cl-protobufs.test.alias)
(defsuite alias-suite (cl-protobufs.test:root-suite))
(defun run (&key use-debugger)
"Run all tests in the test suite.
Parameters
USE-DEBUGGER: On assert failure bring up the debugger."
(clunit:run-suite 'alias-suite :use-debugger use-debugger
:signal-condition-on-fail t))
(defstruct aliased-struct i)
(defconstant +TAG-I+ (pi::make-tag 'proto:int32 1)
"The tag that should be used for Message.I and AliasedMessage.I")
Serialization of cl - protobufs - generated class ( as opposed to using a lisp_alias FieldOption )
(defun expect-bytes (list array)
(assert-equal (coerce list 'list) (coerce array 'list)))
(deftest serialize-regular (alias-suite)
(let ((obj (pb:make-message :i 99)))
(expect-bytes (list +TAG-I+ 99)
(proto:serialize-to-bytes obj))))
(defun internal-serialize-message (msg buf)
"Serialization function for message.
MSG: The message being serialized.
BUF: The buffer to serialize to."
(let ((i (aliased-struct-i msg))
(size 0))
(incf size (pi::serialize-scalar i 'proto:int32 +TAG-I+ buf))))
#+sbcl
(defun (:protobuf :serialize pb:aliased-message)
(val buf)
(internal-serialize-message val buf))
#-sbcl
(setf (get 'pb:aliased-message :serialize)
(lambda (val buf)
(internal-serialize-message val buf)))
(deftest serialize-aliased (alias-suite)
(let ((struct (make-aliased-struct :i 99)))
(expect-bytes (list +TAG-I+ 99)
(proto:serialize-to-bytes struct 'pb:aliased-message))))
Serialization of OuterMessage
(deftest serialize-empty-outer (alias-suite)
(let ((outer (pb:make-outer-message)))
(expect-bytes nil (proto:serialize-to-bytes outer))))
(defconstant +TAG-MESSAGE+ (pi::make-tag 'string 1)
"The tag that should be used for field OuterMessage.Message")
(defconstant +TAG-ALIASED+ (pi::make-tag 'string 2)
"The tag that should be used for field OuterMessage.Aliased")
(deftest serialize-outer-containing-regular (alias-suite)
(let ((outer (pb:make-outer-message
:message (pb:make-message :i 99))))
(expect-bytes (list +TAG-MESSAGE+ 2 +TAG-I+ 99)
(proto:serialize-to-bytes outer))))
(deftest serialize-outer-containing-aliased (alias-suite)
(let ((outer (pb:make-outer-message
:aliased (make-aliased-struct :i 99))))
(expect-bytes (list +TAG-ALIASED+ 2 +TAG-I+ 99)
(proto:serialize-to-bytes outer))))
(deftest find-message-for-alias (alias-suite)
(assert-true (proto:find-message-descriptor 'my.dog.has.fleas::aliased-struct))
Known bug with ABCL
#-abcl
(assert-eq (proto:find-message-descriptor 'pb:aliased-message)
(proto:find-message-descriptor 'my.dog.has.fleas::aliased-struct)))
|
3e940ad92d7431fe3ce0c43d34d6920b91c80bfbcead7db6b400fe5c44bac680 | lehins/massiv | Mult.hs | # LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
module Main where
import Criterion.Main
import Data.Massiv.Array as A
import Data.Massiv.Bench.Matrix
multArrsAlt :: Array S Ix2 Double -> Array S Ix2 Double -> Array S Ix2 Double
multArrsAlt arr1 arr2
| n1 /= m2 =
error $
"(|*|): Inner array dimensions must agree, but received: "
++ show (size arr1)
++ " and "
++ show (size arr2)
| otherwise =
makeArray (getComp arr1 <> getComp arr2) (Sz (m1 :. n2)) $ \(i :. j) ->
foldlS (+) 0 (A.zipWith (*) (arr1 !> i) (arr2' !> j))
where
Sz2 m1 n1 = size arr1
Sz2 m2 n2 = size arr2
arr2' = computeAs S $ A.transpose arr2
main :: IO ()
main = do
defaultMain
[ let MxM{..} = randomMxM
in bench "multArrsAlt (baseline)" $ whnf (multArrsAlt aMxM) bMxM
, benchMxM (randomMxM :: MxM S Double)
, benchVxM (randomVxM :: VxM S Double)
, benchMxV (randomMxV :: MxV S Double)
]
| null | https://raw.githubusercontent.com/lehins/massiv/67a920d4403f210d0bfdad1acc4bec208d80a588/massiv-bench/bench/Mult.hs | haskell | # LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
module Main where
import Criterion.Main
import Data.Massiv.Array as A
import Data.Massiv.Bench.Matrix
multArrsAlt :: Array S Ix2 Double -> Array S Ix2 Double -> Array S Ix2 Double
multArrsAlt arr1 arr2
| n1 /= m2 =
error $
"(|*|): Inner array dimensions must agree, but received: "
++ show (size arr1)
++ " and "
++ show (size arr2)
| otherwise =
makeArray (getComp arr1 <> getComp arr2) (Sz (m1 :. n2)) $ \(i :. j) ->
foldlS (+) 0 (A.zipWith (*) (arr1 !> i) (arr2' !> j))
where
Sz2 m1 n1 = size arr1
Sz2 m2 n2 = size arr2
arr2' = computeAs S $ A.transpose arr2
main :: IO ()
main = do
defaultMain
[ let MxM{..} = randomMxM
in bench "multArrsAlt (baseline)" $ whnf (multArrsAlt aMxM) bMxM
, benchMxM (randomMxM :: MxM S Double)
, benchVxM (randomVxM :: VxM S Double)
, benchMxV (randomMxV :: MxV S Double)
]
|
|
9fd0997b7214c9a8c9523e9cc52a089c8b560acc792e083e62b2c983a5a690b5 | racket/rhombus-prototype | class-binding.rkt | #lang racket/base
(require (for-syntax racket/base
syntax/parse/pre
enforest/syntax-local
(only-in enforest/operator operator-proc)
"srcloc.rkt"
"class-parse.rkt")
"binding.rkt"
"bind-macro.rkt"
(submod "bind-macro.rkt" for-class)
"composite.rkt"
"parens.rkt"
(submod "boolean-pattern.rkt" for-class)
(for-syntax "class-transformer.rkt")
(submod "dot.rkt" for-dot-provider))
(provide (for-syntax build-class-binding-form))
(define-for-syntax (build-class-binding-form super binding-rhs
exposed-internal-id intro
names)
(with-syntax ([(name name-instance name?
constructor-name-fields constructor-public-name-fields super-name-fields
constructor-field-static-infoss constructor-public-field-static-infoss super-field-static-infoss
field-keywords public-field-keywords super-field-keywords)
names])
(define (make-binding-transformer no-super? name-fields static-infoss keywords)
(with-syntax ([(constructor-name-field ...) name-fields]
[(constructor-field-static-infos ...) static-infoss]
[(field-keyword ...) keywords]
[(super-name-field ...) (if no-super? '() #'super-name-fields)]
[(super-field-static-infos ...) (if no-super? '() #'super-field-static-infoss)]
[(super-field-keyword ...) (if no-super? '() #'super-field-keywords)])
#`(binding-transformer
(make-composite-binding-transformer #,(symbol->string (syntax-e #'name))
(quote-syntax name?)
#:static-infos (quote-syntax ((#%dot-provider name-instance)))
(list (quote-syntax super-name-field) ...
(quote-syntax constructor-name-field) ...)
#:keywords '(super-field-keyword ... field-keyword ...)
(list (quote-syntax super-field-static-infos) ...
(quote-syntax constructor-field-static-infos) ...)
#:accessor->info? #t))))
(append
(if exposed-internal-id
(list
#`(define-binding-syntax #,exposed-internal-id
#,(make-binding-transformer #t
#'constructor-name-fields
#'constructor-field-static-infoss
#'field-keywords)))
null)
(cond
[binding-rhs
(list
#`(define-binding-syntax name
(wrap-class-transformer name #,(intro binding-rhs) make-binding-prefix-operator "class")))]
[else
(list
#`(define-binding-syntax name
#,(make-binding-transformer #f
#'constructor-public-name-fields
#'constructor-public-field-static-infoss
#'public-field-keywords)))]))))
(define-for-syntax (make-curried-binding-transformer super-binding-id
constructor-str predicate accessors static-infoss
#:static-infos static-infos
#:keywords keywords)
(define t
(make-composite-binding-transformer constructor-str predicate accessors static-infoss
#:static-infos static-infos
#:keywords keywords
#:accessor->info? #t))
(cond
[super-binding-id
(define p-t (operator-proc
(syntax-local-value* (in-binding-space super-binding-id) binding-prefix-operator-ref)))
(lambda (tail)
(syntax-parse tail
[(form-id p-term (tag::parens g ...) . new-tail)
(define stx (no-srcloc #'(form-id p-term (tag g ...))))
(define-values (p-binding p-tail) (p-t #'(form-id p-term)))
(define-values (binding c-tail) (t #'(form-id (tag g ...)) #f stx))
(values (make-and-binding p-binding binding)
#'new-tail)]))]
[else t]))
| null | https://raw.githubusercontent.com/racket/rhombus-prototype/4e66c1361bdde51c2df9332644800baead49e86f/rhombus/private/class-binding.rkt | racket | #lang racket/base
(require (for-syntax racket/base
syntax/parse/pre
enforest/syntax-local
(only-in enforest/operator operator-proc)
"srcloc.rkt"
"class-parse.rkt")
"binding.rkt"
"bind-macro.rkt"
(submod "bind-macro.rkt" for-class)
"composite.rkt"
"parens.rkt"
(submod "boolean-pattern.rkt" for-class)
(for-syntax "class-transformer.rkt")
(submod "dot.rkt" for-dot-provider))
(provide (for-syntax build-class-binding-form))
(define-for-syntax (build-class-binding-form super binding-rhs
exposed-internal-id intro
names)
(with-syntax ([(name name-instance name?
constructor-name-fields constructor-public-name-fields super-name-fields
constructor-field-static-infoss constructor-public-field-static-infoss super-field-static-infoss
field-keywords public-field-keywords super-field-keywords)
names])
(define (make-binding-transformer no-super? name-fields static-infoss keywords)
(with-syntax ([(constructor-name-field ...) name-fields]
[(constructor-field-static-infos ...) static-infoss]
[(field-keyword ...) keywords]
[(super-name-field ...) (if no-super? '() #'super-name-fields)]
[(super-field-static-infos ...) (if no-super? '() #'super-field-static-infoss)]
[(super-field-keyword ...) (if no-super? '() #'super-field-keywords)])
#`(binding-transformer
(make-composite-binding-transformer #,(symbol->string (syntax-e #'name))
(quote-syntax name?)
#:static-infos (quote-syntax ((#%dot-provider name-instance)))
(list (quote-syntax super-name-field) ...
(quote-syntax constructor-name-field) ...)
#:keywords '(super-field-keyword ... field-keyword ...)
(list (quote-syntax super-field-static-infos) ...
(quote-syntax constructor-field-static-infos) ...)
#:accessor->info? #t))))
(append
(if exposed-internal-id
(list
#`(define-binding-syntax #,exposed-internal-id
#,(make-binding-transformer #t
#'constructor-name-fields
#'constructor-field-static-infoss
#'field-keywords)))
null)
(cond
[binding-rhs
(list
#`(define-binding-syntax name
(wrap-class-transformer name #,(intro binding-rhs) make-binding-prefix-operator "class")))]
[else
(list
#`(define-binding-syntax name
#,(make-binding-transformer #f
#'constructor-public-name-fields
#'constructor-public-field-static-infoss
#'public-field-keywords)))]))))
(define-for-syntax (make-curried-binding-transformer super-binding-id
constructor-str predicate accessors static-infoss
#:static-infos static-infos
#:keywords keywords)
(define t
(make-composite-binding-transformer constructor-str predicate accessors static-infoss
#:static-infos static-infos
#:keywords keywords
#:accessor->info? #t))
(cond
[super-binding-id
(define p-t (operator-proc
(syntax-local-value* (in-binding-space super-binding-id) binding-prefix-operator-ref)))
(lambda (tail)
(syntax-parse tail
[(form-id p-term (tag::parens g ...) . new-tail)
(define stx (no-srcloc #'(form-id p-term (tag g ...))))
(define-values (p-binding p-tail) (p-t #'(form-id p-term)))
(define-values (binding c-tail) (t #'(form-id (tag g ...)) #f stx))
(values (make-and-binding p-binding binding)
#'new-tail)]))]
[else t]))
|
|
20fe81884fdfe089f959a05736f0c0237a4597c3afb6e2791e35e7a9d60d6e9b | scalaris-team/scalaris | clocks.erl | 2016 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
%% @doc functions for querying clocks
%% @version $Id$
-module(clocks).
-author('').
-vsn('$Id$').
%-on_load(on_load/0).
-export([init/0,
get_monotonic_clock/0, get_monotonic_clock_res/0,
get_realtime_clock/0, get_realtime_clock_res/0,
get_ptp0_clock/0, get_ptp0_clock_res/0,
get_ptp1_clock/0, get_ptp1_clock_res/0,
get_ptp2_clock/0, get_ptp2_clock_res/0]).
-export([test/0]).
-spec init() -> ok.
init() ->
%% loads the shared library
SoName =
case code:priv_dir(scalaris) of
{error, bad_name} ->
Dir1 = filename:join(
[filename:dirname(code:where_is_file("scalaris.beam")),
"..", priv]),
case filelib:is_dir(Dir1) of
true ->
filename:join(Dir1, ?MODULE);
_ ->
filename:join(["..", priv, ?MODULE])
end;
Dir ->
filename:join(Dir, ?MODULE)
end,
erlang:load_nif(SoName, 0).
-spec get_monotonic_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_monotonic_clock() ->
erlang:nif_error(undef).
-spec get_monotonic_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_monotonic_clock_res() ->
erlang:nif_error(undef).
-spec get_realtime_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_realtime_clock() ->
erlang:nif_error(undef).
-spec get_realtime_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_realtime_clock_res() ->
erlang:nif_error(undef).
-spec get_ptp0_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp0_clock() ->
erlang:nif_error(undef).
-spec get_ptp0_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp0_clock_res() ->
erlang:nif_error(undef).
-spec get_ptp1_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp1_clock() ->
erlang:nif_error(undef).
-spec get_ptp1_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp1_clock_res() ->
erlang:nif_error(undef).
-spec get_ptp2_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp2_clock() ->
erlang:nif_error(undef).
-spec get_ptp2_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp2_clock_res() ->
erlang:nif_error(undef).
-spec test() -> ok.
test() ->
init(),
io:format("monotonic ~p:~p~n", [get_monotonic_clock(), get_monotonic_clock_res()]),
io:format("realtime ~p:~p~n", [get_realtime_clock(), get_realtime_clock_res()]),
io:format("ptp0 ~p:~p~n", [get_ptp0_clock(), get_ptp0_clock_res()]),
io:format("ptp1 ~p:~p~n", [get_ptp1_clock(), get_ptp1_clock_res()]),
io:format("ptp2 ~p:~p~n", [get_ptp2_clock(), get_ptp2_clock_res()]),
ok.
| null | https://raw.githubusercontent.com/scalaris-team/scalaris/feb894d54e642bb3530e709e730156b0ecc1635f/src/time/clocks.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc functions for querying clocks
@version $Id$
-on_load(on_load/0).
loads the shared library | 2016 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
-module(clocks).
-author('').
-vsn('$Id$').
-export([init/0,
get_monotonic_clock/0, get_monotonic_clock_res/0,
get_realtime_clock/0, get_realtime_clock_res/0,
get_ptp0_clock/0, get_ptp0_clock_res/0,
get_ptp1_clock/0, get_ptp1_clock_res/0,
get_ptp2_clock/0, get_ptp2_clock_res/0]).
-export([test/0]).
-spec init() -> ok.
init() ->
SoName =
case code:priv_dir(scalaris) of
{error, bad_name} ->
Dir1 = filename:join(
[filename:dirname(code:where_is_file("scalaris.beam")),
"..", priv]),
case filelib:is_dir(Dir1) of
true ->
filename:join(Dir1, ?MODULE);
_ ->
filename:join(["..", priv, ?MODULE])
end;
Dir ->
filename:join(Dir, ?MODULE)
end,
erlang:load_nif(SoName, 0).
-spec get_monotonic_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_monotonic_clock() ->
erlang:nif_error(undef).
-spec get_monotonic_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_monotonic_clock_res() ->
erlang:nif_error(undef).
-spec get_realtime_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_realtime_clock() ->
erlang:nif_error(undef).
-spec get_realtime_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_realtime_clock_res() ->
erlang:nif_error(undef).
-spec get_ptp0_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp0_clock() ->
erlang:nif_error(undef).
-spec get_ptp0_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp0_clock_res() ->
erlang:nif_error(undef).
-spec get_ptp1_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp1_clock() ->
erlang:nif_error(undef).
-spec get_ptp1_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp1_clock_res() ->
erlang:nif_error(undef).
-spec get_ptp2_clock() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp2_clock() ->
erlang:nif_error(undef).
-spec get_ptp2_clock_res() -> failed | {non_neg_integer(), non_neg_integer()}.
get_ptp2_clock_res() ->
erlang:nif_error(undef).
-spec test() -> ok.
test() ->
init(),
io:format("monotonic ~p:~p~n", [get_monotonic_clock(), get_monotonic_clock_res()]),
io:format("realtime ~p:~p~n", [get_realtime_clock(), get_realtime_clock_res()]),
io:format("ptp0 ~p:~p~n", [get_ptp0_clock(), get_ptp0_clock_res()]),
io:format("ptp1 ~p:~p~n", [get_ptp1_clock(), get_ptp1_clock_res()]),
io:format("ptp2 ~p:~p~n", [get_ptp2_clock(), get_ptp2_clock_res()]),
ok.
|
893d3a418a6a835a8fb27350ae7a9919496d552d670e93e7970c2c36de0d5979 | lopec/LoPEC | statistician.erl | %%%-------------------------------------------------------------------
@author < >
@author Bjorn " norno " < >
@author Gustav " azariah " Simonsson < >
@author >
( C ) 2009 , < >
%%% @doc
%%%
%%% Collects various statistics about the cluster nodes and jobs put
%%% in the cluster.
%%% <pre>
%%% Cluster global statistics include:
%%% * Jobs executed (also those that are done or cancelled)
%%% * Power consumed (estimation)
%%% * Time spent executing tasks (sum total for all nodes)
%%% * Upload network traffic (total unfortunately, not just ours)
%%% * Download network traffic (ditto)
%%% * Number of tasks processed
%%% * Number of task restarts
%%% * Total amount of diskspace in cluster
%%% * Total amount of diskspace used in cluster
%%% * % of diskspace in cluster that is used
%%% * Total amount of primary memory in cluster
%%% * Total amount of primary memory used in cluster
%%% * % of primary memory used in cluster
%%%</pre>
%%%
%%% @end
Created : 21 Oct 2009 by < >
%%%-------------------------------------------------------------------
-module(statistician).
-include("../include/env.hrl").
-behaviour(gen_server).
%% API functions
-export([start_link/1, update/1, job_finished/1, remove_node/1, stop/0,
get_cluster_stats/1, get_job_stats/2,
get_node_stats/2, get_node_job_stats/3,
get_node_disk_usage/1, get_node_mem_usage/1,
get_user_stats/2, get_cluster_disk_usage/1, get_cluster_mem_usage/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
% Hiow often do slaves flush stats to master, in milliseconds
-define(UPDATE_INTERVAL, 1000).
% Do we want to delete jobs from our tables once finished (debug flag)
-ifdef(no_delete_tables).
-define(DELETE_TABLE(), dont).
-else.
-define(DELETE_TABLE(), delete).
-endif.
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server.
%% <pre>
%% Type:
%% slave - start a slave node statistician. It intermittently flushes
%% collected stats to the master.
%% master - start a master node statistician. It keeps track of node
%% (global) stats as well as job stats.
%% </pre>
@spec start_link(Type ) - > { ok , Pid } | ignore | { error , Error }
%% @end
%%--------------------------------------------------------------------
start_link(Type) ->
gen_server:start_link({local, ?MODULE}, ?MODULE,
[Type], []).
%%--------------------------------------------------------------------
%% @doc
%% Stops the statistician and all related applications and modules.
%%
stop ( ) - > ok
%% @end
%%--------------------------------------------------------------------
stop() ->
gen_server:cast(?MODULE, stop).
%%--------------------------------------------------------------------
%% @doc
%% Returns average disk usage over all nodes.
%% <pre>
%% Flag:
raw - gives internal representation ( Tuples , lists , whatnot )
%% string - gives nicely formatted string
%% </pre>
%% @spec get_cluster_disk_usage(Flag) -> String
%% | {Total::Integer, Percentage::Integer}
%% @end
%%--------------------------------------------------------------------
get_cluster_disk_usage(Flag) ->
gen_server:call(?MODULE,{get_cluster_disk_usage, Flag}).
%%--------------------------------------------------------------------
%% @doc
%% Returns average primary memory usage over all nodes.
%% <pre>
%% Flag:
raw - gives internal representation ( Tuples , lists , whatnot )
%% string - gives nicely formatted string
%% </pre>
) - > String
%% | {Total::Integer, Percentage::Integer}
%% @end
%%--------------------------------------------------------------------
get_cluster_mem_usage(Flag) ->
gen_server:call(?MODULE,{get_cluster_mem_usage, Flag}).
%%--------------------------------------------------------------------
%% @doc
%% Returns disk usage on a node.
%% <pre>
%% Flag:
raw - gives internal representation ( Tuples , lists , whatnot )
%% string - gives nicely formatted string
%% </pre>
) - > String
%% | {Total::Integer, Percentage::Integer}
%% @end
%%--------------------------------------------------------------------
get_node_disk_usage(Flag) ->
gen_server:call(?MODULE,{get_node_disk_usage, Flag}).
%%--------------------------------------------------------------------
%% @doc
%% Returns memory usage on a node.
%% <pre>
%% Flag:
raw - gives internal representation ( Tuples , lists , whatnot )
%% string - gives nicely formatted string
%% </pre>
@spec get_node_mem_usage(Flag ) - > String | { Total::Integer ,
%% Percentage::Integer}
%% @end
%%--------------------------------------------------------------------
get_node_mem_usage(Flag) ->
gen_server:call(?MODULE,{get_node_mem_usage, Flag}).
%%--------------------------------------------------------------------
%% @doc
%% Returns stats for the entire cluster.
%% <pre>
%% Flag:
raw - gives internal representation ( Tuples , lists , whatnot )
%% string - gives nicely formatted string
%% </pre>
) - > String
%% @end
%%--------------------------------------------------------------------
get_cluster_stats(Flag) ->
gen_server:call(?MODULE,{get_cluster_stats, Flag}).
%%--------------------------------------------------------------------
%% @doc
Returns stats for JobId .
%% <pre>
%% Flag:
%% raw - gives internal representation (a list of the total stats)
%% string - gives nicely formatted string with stats for each tasktype
%% </pre>
@spec get_job_stats(JobId , Flag ) - > String
%% @end
%%--------------------------------------------------------------------
get_job_stats(JobId, raw) ->
gen_server:call(?MODULE, {get_job_stats, JobId, raw});
get_job_stats(JobId, string) ->
Return = gen_server:call(?MODULE,{get_job_stats, JobId, string}),
case Return of
{error, no_such_stats_found} ->
{error, no_such_stats_found};
_Result ->
Return
end.
%%--------------------------------------------------------------------
%% @doc
%% Returns stats for NodeId.
%% <pre>
%% Flag:
raw - gives internal representation ( Tuples , lists , whatnot )
%% string - gives nicely formatted string
%% </pre>
%% @spec get_node_stats(NodeId, Flag) -> String
%% @end
%%--------------------------------------------------------------------
get_node_stats(NodeId, raw) ->
gen_server:call(?MODULE,{get_node_stats, NodeId, raw});
get_node_stats(NodeId, string) ->
Return = gen_server:call(?MODULE,{get_node_stats, NodeId, string}),
case Return of
{error, no_such_node_in_stats} ->
{error, no_such_node_in_stats};
_Result ->
Return
end.
%%--------------------------------------------------------------------
%% @doc
Returns stats the node NodeId has for the job JobId , like how many
JobId tasks NodeId has worked on , or how long .
%% <pre>
%% Flag:
raw - gives internal representation ( Tuples , lists , whatnot )
%% string - gives nicely formatted string
%% </pre>
@spec get_node_job_stats(NodeId , JobId , Flag ) - > String
%% @end
%%--------------------------------------------------------------------
get_node_job_stats(NodeId, JobId, raw) ->
gen_server:call(?MODULE,{get_node_job_stats, NodeId, JobId, raw});
get_node_job_stats(NodeId, JobId, string) ->
Return=gen_server:call(?MODULE,{get_node_job_stats, NodeId, JobId, string}),
case Return of
{error, no_such_stats_found} ->
{error, no_such_stats_found};
_Result ->
Return
end.
%%--------------------------------------------------------------------
%% @doc
%% Returns stats for the given user.
%% <pre>
%% Flag:
raw - gives internal representation ( Tuples , lists , whatnot )
%% string - gives nicely formatted string
%% </pre>
%% @spec get_user_stats(User, Flag) -> String
%% @end
%%--------------------------------------------------------------------
get_user_stats(User, raw) ->
gen_server:call(?MODULE, {get_user_stats, User, raw});
get_user_stats(User, string) ->
Return = gen_server:call(?MODULE, {get_user_stats, User, string}),
case Return of
{error, no_such_user} ->
{error, no_such_user};
_Result ->
Return
end.
%%--------------------------------------------------------------------
%% @doc
%% Updates local (node) ets table with statistics, adding the job and
%% its stats to the table if it doesn't already exist, otherwise
%% updating the existing entry.
%% <pre>
%% The Data variable should look like this tuple:
{ { NodeId , JobId , TaskType } ,
Power , Time , Upload , Download , NumTasks , Restarts , Disk , Mem }
where Disk and are formatted like calls to
get_node_disk / mem_stats(raw ) < /pre >
%%
%% @spec update(Data) -> ok
%% @end
%%--------------------------------------------------------------------
update(Data) ->
gen_server:cast(?MODULE,{update, Data}).
%%--------------------------------------------------------------------
%% @doc
%% Jobs, once finished in our cluster, have their stats dumped to file
%% and their entry cleared out of the ets table. However, we have to
%% wait to make sure that all slaves have sent their stats updates -
we hope that waiting two update intervals will be sufficient , but
%% if a node is stalled for more than that long, we're out of luck.
%%
This wait is done using timer : send_after/3 , which sends a regular
Erlang message , meaning we have to use handle_info/2 to catch
%% it. After the message is catched we pass the command onto
handle_cast/2 though .
%%
%% @spec job_finished(JobId) -> please_wait_a_few_seconds
%% @end
%%--------------------------------------------------------------------
job_finished(JobId) ->
{ok, _TimerRef} = timer:send_after(?UPDATE_INTERVAL*2, ?MODULE,
{job_finished, JobId}),
please_wait_a_few_seconds.
%%--------------------------------------------------------------------
%% @doc
%% Remove a node from the global stats. Probably called when a node
%% drops from the cluster for some reason.
%%
) - > ok
%% @end
%%--------------------------------------------------------------------
remove_node(NodeId) ->
gen_server:cast(?MODULE, {remove_node, NodeId}).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
Initiates the server , call with as [ master ] to start master ,
%% [slave] to start slave. See start_link.
%%
) - > { ok , State }
%% @end
%%--------------------------------------------------------------------
init([master]) ->
global:register_name(?MODULE, self()),
case os:cmd("uname") -- "\n" of
"Linux" ->
application:start(sasl),
gen_event:delete_handler(error_logger, sasl_report_tty_h, []),
application:start(os_mon),
diskMemHandler:start();
Name ->
chronicler:debug("~w : statistican init called on unsupported OS: ~p~n", [Name]),
ok
end,
ets:new(job_stats_table,
[set, public, named_table,
{keypos, 1}, {heir, none},
{write_concurrency, false}]),
ets:new(node_stats_table,
[set, private, named_table,
{keypos, 1}, {heir, none},
{write_concurrency, false}]),
{ok, []};
init([slave]) ->
{ok, _TimerRef} = timer:send_interval(?UPDATE_INTERVAL, flush),
ets:new(job_stats_table,
[set, private, named_table,
{keypos, 1}, {heir, none},
{write_concurrency, false}]),
% Setting up disk/mem alarm handler
case os:cmd("uname") -- "\n" of
"Linux" ->
application:start(sasl),
gen_event:delete_handler(error_logger, sasl_report_tty_h, []),
application:start(os_mon),
diskMemHandler:start();
Name ->
chronicler:debug("~w : statistican init called on unsupported OS: ~p~n", [Name]),
ok
end,
{ok, []}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Flag = raw | string
%% @see node_stats/1
%%
, Flag } , From , State ) - >
%% {reply, Reply, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_cluster_disk_usage, Flag}, _From, State) ->
Reply = gather_cluster_disk_usage(Flag),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Flag = raw | string
%% @see node_stats/1
%%
, Flag } , From , State ) - >
%% {reply, Reply, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_cluster_mem_usage, Flag}, _From, State) ->
Reply = gather_cluster_mem_usage(Flag),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Flag = raw | string
%% @see node_stats/1
%%
@spec handle_call({get_cluster_stats , Flag } , From , State ) - >
%% {reply, Reply, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_cluster_stats, Flag}, _From, State) ->
Reply = gather_cluster_stats(Flag),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Flag = raw | string
@see
%%
@spec handle_call({get_node_disk_usage , Flag } , From , State ) - >
%% {reply, Reply, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_node_disk_usage, Flag}, _From, State) ->
Reply = gather_node_disk_usage(Flag),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Flag = raw | string
%% @see get_node_mem_usage/1
%%
@spec handle_call({get_node_mem_usage , Flag } , From , State ) - >
%% {reply, Reply, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_node_mem_usage, Flag}, _From, State) ->
Reply = gather_node_mem_usage(Flag),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Flag = raw | string
%% @see node_job_stats/1
%%
, JobId , Flag } , From , State ) - >
%% {reply, Reply, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_job_stats, JobId, Flag}, _From, State) ->
Reply = gather_node_job_stats('_', JobId, Flag),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Flag = raw | string
%% @see node_stats/1
%%
, NodeId , Flag } , From , State ) - >
%% {reply, Reply, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_node_stats, NodeId, Flag}, _From, State) ->
Reply = gather_node_stats(NodeId, Flag),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Flag = raw | string
%% @see node_job_stats/1
%%
@spec handle_call({get_node_job_stats , NodeId , JobId , Flag } , From , State )
%% -> {reply, Reply, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_node_job_stats, NodeId, JobId, Flag}, _From, State) ->
Reply = gather_node_job_stats(NodeId, JobId, Flag),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Flag = raw | string
@see user_stats/1
%%
, User , Flag } , From , State ) - >
%% {reply, Reply, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_user_stats, User, Flag}, _From, State) ->
Reply = gather_user_stats(User, Flag),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Logs and discards unexpected messages.
%%
, From , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_call(Msg, From, State) ->
chronicler:debug("~w:Received unexpected handle_call call.~n"
"Message: ~p~n"
"From: ~p~n",
[?MODULE, Msg, From]),
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
@see update/1
%%
@spec handle_cast({update , StatsTuple } , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_cast({update, Stats}, State) ->
{{NodeId, JobId, TaskType, Usr},
Power, Time, Upload, Download, NumTasks, Restarts, Disk, Mem} = Stats,
User = case Usr of
no_user ->
dispatcher:get_user_from_job(JobId);
_Whatevah ->
Usr
end,
case ets:lookup(job_stats_table, {NodeId, JobId, TaskType, User}) of
[] ->
ets:insert(job_stats_table, {{NodeId, JobId, TaskType, User},
Power, Time, Upload, Download,
NumTasks, Restarts, Disk, Mem});
[OldStats] ->
{{_,JobId,_,_}, OldPower, OldTime, OldUpload,
OldDownload, OldNumTasks, OldRestarts, _, _} = OldStats,
ets:insert(job_stats_table, {{NodeId,
JobId,
TaskType, User},
Power + OldPower,
Time + OldTime,
Upload + OldUpload,
Download + OldDownload,
NumTasks + OldNumTasks,
Restarts + OldRestarts,
Disk,
Mem})
end,
case ets:info(node_stats_table) of
undefined ->
%only master has node_stats_table defined
table_undefined;
_Other ->
case ets:lookup(node_stats_table, {NodeId}) of
[] ->
ets:insert(node_stats_table, {{NodeId},
[JobId], Power, Time,
Upload, Download,
NumTasks, Restarts,
Disk, Mem});
[OldNodeStats] ->
{{_}, OldNodeJobs, OldNodePower, OldNodeTime, OldNodeUpload,
OldNodeDownload, OldNodeNumTasks, OldNodeRestarts, _, _}
= OldNodeStats,
ets:insert(node_stats_table, {{NodeId},
lists:umerge([JobId], OldNodeJobs),
Power + OldNodePower,
Time + OldNodeTime,
Upload + OldNodeUpload,
Download + OldNodeDownload,
NumTasks + OldNodeNumTasks,
Restarts + OldNodeRestarts,
Disk,
Mem})
end
end,
{noreply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% @see stop/0
%%
@spec handle_cast(stop , State ) - > { stop , normal , State }
%% @end
%%--------------------------------------------------------------------
handle_cast(stop, State) ->
{stop, normal, State};
%%--------------------------------------------------------------------
@private
%% @doc
%%
@spec handle_cast({alarm , Node , Type , Alarm } , State ) - >
{ noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_cast({alarm, Node, Type, Alarm}, State) ->
chronicler:debug("~w: Alarm at node ~p of type ~p: ~p",
[?MODULE, Node, Type, Alarm]),
{noreply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Splits the received list and updates the master table with each
%% element in the lists.
%%
@spec handle_cast({update_with_list , List } , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_cast({update_with_list, List}, State) ->
chronicler:debug("Master received message from a node.~n", []),
lists:foreach(fun (X) -> gen_server:cast(?MODULE, {update, X}) end, List),
{noreply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% @see job_finished/1
%%
, JobId } , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_cast({job_finished, JobId}, State) ->
JobStats = gather_node_job_stats('_', JobId, string),
case ?DELETE_TABLE() of
delete ->
ets:match_delete(job_stats_table,
{{'_', JobId, '_', '_'},
'_','_','_','_','_','_','_','_'});
_Dont ->
ok
end,
{ok, Root} =
configparser:read_config(?CONFIGFILE, cluster_root),
file:write_file(Root ++ "results/" ++
integer_to_list(JobId) ++ "/stats", JobStats),
chronicler:info(JobStats),
{noreply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% @see remove_node/1
%% We do not delete data from the job stats tables when a node leaves,
%% only from the global stats.
%% Should produce a file: /storage/test/results/node_NodeId_stats
%%
@spec handle_cast({remove_node , NodeId } , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_cast({remove_node, NodeId}, State) ->
NodeStats = gather_node_stats(NodeId, string),
%note that we do not check if the node exists (or rather, if
returns { error , } )
... because Erlang advocates No Defensive Coding
ets:match_delete(node_stats_table,
{{NodeId},'_','_','_','_','_','_','_',
'_','_'}),
{ok, Root} =
configparser:read_config(?CONFIGFILE, cluster_root),
file:write_file(Root ++ "results/node_" ++
atom_to_list(NodeId) ++ "_stats", NodeStats),
chronicler:info("Node "++atom_to_list(NodeId)
++" disconnected from cluster! Stats:~n"
++NodeStats),
{noreply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Logs and discards unexpected messages.
%%
@spec handle_cast(Msg , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_cast(Msg, State) ->
chronicler:debug("~w:Received unexpected handle_cast call.~n"
"Message: ~p~n",
[?MODULE, Msg]),
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Sends contents of local stats table to the master stats table,
%% then clears out the local stats table.
%%
@spec handle_info(flush , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_info(flush, State) ->
chronicler:debug("Node ~p transmitting stats.~n", [node()]),
StatsList = ets:tab2list(job_stats_table),
gen_server:cast({global, ?MODULE}, {update_with_list, StatsList}),
ets:delete_all_objects(job_stats_table),
{noreply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% @see job_finished/1
%%
handle_info({job_finished , JobId } , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_info({job_finished, JobId}, State) ->
gen_server:cast(?MODULE, {job_finished, JobId}),
{noreply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Logs and discards unexpected messages.
%%
, State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_info(Info, State) ->
chronicler:debug("~w:Received unexpected handle_info call.~n"
"Info: ~p~n",
[?MODULE, Info]),
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%%
@spec terminate(normal , State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
terminate(normal, _State) ->
chronicler:debug("~w:Received normal terminate call.~n"),
application:stop(sasl),
application:stop(os_mon),
diskMemHandler:stop(),
ok;
%%--------------------------------------------------------------------
@private
%% @doc
%% Logs and discards unexpected messages.
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
terminate(Reason, _State) ->
chronicler:debug("~w:Received terminate call.~n"
"Reason: ~p~n",
[?MODULE, Reason]),
ok.
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%% Logs and discards unexpected messages.
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
code_change(OldVsn, State, Extra) ->
chronicler:debug("~w:Received unexpected code_change call.~n"
"Old version: ~p~n"
"Extra: ~p~n",
[?MODULE, OldVsn, Extra]),
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Every element in the given stats list is summed up.
%%
, Data ) - > Data + List
%%
%% @end
%%--------------------------------------------------------------------
sum_stats([],Data) ->
Data;
sum_stats([H|T], Data) ->
[TempPower,TempTime,TempUpload,TempDownload,TempNumtasks,TempRestarts] = H,
[AccPower,AccTime,AccUpload,AccDownload,AccNumtasks,AccRestarts] = Data,
sum_stats(T, [TempPower + AccPower,
TempTime + AccTime,
TempUpload + AccUpload,
TempDownload + AccDownload,
TempNumtasks + AccNumtasks,
TempRestarts + AccRestarts]).
%%--------------------------------------------------------------------
%% @doc
%% Returns the time (in microseconds) since the given job was added to
the cluster - sort of . It 's derived from the JobId , which is in turn
%% based on now() being called when a job is created. So it's not
%% perfectly exact, but should be good enough for human purposes.
%%
time_since_job_added(JobId ) - > integer
%%
%% @end
%%--------------------------------------------------------------------
time_since_job_added(JobId) ->
TimeList = integer_to_list(JobId),
Then = {list_to_integer(lists:sublist(TimeList, 4)),
list_to_integer(lists:sublist(TimeList, 5, 6)),
list_to_integer(lists:sublist(TimeList, 11, 6))},
timer:now_diff(now(), Then) / 1000000.
%%--------------------------------------------------------------------
%% @doc
%% Returns the disk usage stats
%% Flag = raw | string
%%
gather_node_disk_usage(Flag ) - > String
%%
%% @end
%%--------------------------------------------------------------------
gather_node_disk_usage(Flag) ->
F = fun() ->
case os:cmd("uname") -- "\n" of
"Linux" ->
{_Dir, Total, Percentage} = hd(disksup:get_disk_data()),
_Stats = {Total, Percentage};
Name ->
chronicler:debug("~w : disk_usage call on unsupported OS: ~p~n", [Name]),
_Stats = {0,0}
end
end,
{Total, Percentage} = F(),
case Flag of
raw ->
{Total, Percentage};
string ->
io_lib:format("Disk stats for this node:~n"
"-------------------------~n"
"Total disk size (Kb): ~p~n"
"Percentage used: ~p%~n", [Total, Percentage])
end.
%%--------------------------------------------------------------------
%% @doc
%% Returns the disk usage stats
%% Flag = raw | string
%%
) - > String
%%
%% @end
%%--------------------------------------------------------------------
gather_node_mem_usage(Flag) ->
F = fun() ->
case os:cmd("uname") -- "\n" of
"Linux" ->
{Total, Alloc, Worst} = memsup:get_memory_data(),
Percentage = trunc((Alloc / Total) * 100),
_Stats = {Total, Percentage, Worst};
Name ->
chronicler:debug("~w : mem_usage call on unsupported OS: ~p~n", [Name]),
_Stats = {0,0,0}
end
end,
{Total, Percentage, Worst} = F(),
case Flag of
raw ->
{Total, Percentage, Worst};
string ->
{Pid, Size} = Worst,
io_lib:format("Memory stats for this node:~n"
"---------------------------~n"
"Total memory size (Bytes): ~p~n"
"Percentage used: ~p%~n"
"Erlang process ~p using most memory, ~p bytes~n",
[Total, Percentage, Pid, Size])
end.
%%--------------------------------------------------------------------
%% @doc
Extracts stats that NodeId has on JobId and returns a formatted
%% string showing these. get_job_stats/1 does not want stats on a
%% specific node, and so passes the atom '_' as NodeId, resulting
%% in a list of nodes that have worked on the job being matched out.
%% Flag = raw | string
%%
, JobId , Flag ) - > String
%%
%% @end
%%--------------------------------------------------------------------
gather_node_job_stats(NodeId, JobId, Flag) ->
T = job_stats_table,
% TODO: ets:match is a potential bottleneck
case ets:match(T, {{NodeId, JobId, '_', '_'}, '$1', '_', '_', '_', '_', '_',
'_', '_'}) of
[] ->
{error, no_such_stats_found};
_Other ->
Split = ets:match(T, {{NodeId, JobId, split, '_'},
'$1', '$2', '$3', '$4', '$5', '$6', '_', '_'}),
Map = ets:match(T, {{NodeId, JobId, map, '_'},
'$1', '$2', '$3', '$4', '$5', '$6', '_', '_'}),
Reduce = ets:match(T, {{NodeId, JobId, reduce, '_'},
'$1', '$2', '$3', '$4', '$5', '$6', '_', '_'}),
Finalize = ets:match(T, {{NodeId, JobId, finalize, '_'},
'$1', '$2', '$3', '$4', '$5', '$6', '_', '_'}),
Nodes = case NodeId of
'_' -> lists:umerge(
ets:match(T, {{'$1', JobId, '_', '_'},
'_','_','_','_','_','_','_','_'}));
_NodeId -> NodeId
end,
Zeroes = [0.0,0.0,0,0,0,0],
SumSplit = sum_stats(Split, Zeroes),
SumMap = sum_stats(Map, Zeroes),
SumReduce = sum_stats(Reduce, Zeroes),
SumFinal = sum_stats(Finalize, Zeroes),
SumAll = sum_stats([SumSplit, SumMap, SumReduce, SumFinal], Zeroes),
case Flag of
string ->
TimePassed = time_since_job_added(JobId),
SplitStrings = format_task_stats(split, SumSplit),
MapStrings = format_task_stats(map, SumMap),
ReduceStrings = format_task_stats(reduce, SumReduce),
FinalStrings = format_task_stats(finalize, SumFinal),
format_job_stats({JobId, SplitStrings, MapStrings,
ReduceStrings, FinalStrings,
TimePassed, Nodes, SumAll});
raw ->
SumAll
end
end.
%%--------------------------------------------------------------------
%% @doc
Extracts statistics about and returns it as a formatted string .
%% Flag = raw | string
%%
, Flag ) - > String
%%
%% @end
%%--------------------------------------------------------------------
gather_node_stats(NodeId, Flag) ->
T = node_stats_table,
% TODO: ets:match is a potential bottleneck
case ets:lookup(T, {NodeId}) of
[] ->
{error, no_such_node_in_stats};
[NodeStats] ->
case Flag of
raw ->
NodeStats;
string ->
format_node_stats(NodeStats)
end
end.
%%--------------------------------------------------------------------
%% @doc
%% Extracts statistics about a user.
%%
, Flag ) - > String
%%
%% @end
%%--------------------------------------------------------------------
gather_user_stats(User, Flag) ->
T = job_stats_table,
% TODO: ets:match is a potential bottleneck
ABC = ets:match(T, {{'_', '$1', '_', User},
'$2', '$3', '$4', '$5', '$6', '$7', '_', '_'}),
Zeros = {[],0,0,0,0,0,0},
case ABC of
[] ->
{error, no_such_user};
_Stats ->
case Flag of
raw ->
{User, sum_user_stats(ABC, Zeros)};
string ->
format_user_stats({User, sum_user_stats(ABC, Zeros)})
end
end.
sum_user_stats([], Tuple) ->
Tuple;
sum_user_stats([[JobId, S1, S2, S3, S4, S5, S6] | Rest],
{J1, Sa1, Sa2, Sa3, Sa4, Sa5, Sa6}) ->
sum_user_stats(Rest, {lists:usort([JobId | J1]),
S1+Sa1,S2+Sa2,S3+Sa3,S4+Sa4,S5+Sa5,S6+Sa6}).
%%--------------------------------------------------------------------
%% @doc
%% Extracts statistics about the cluster disk usage.
%%
) - > String | ListOfValues
%%
%%
%% @end
%%--------------------------------------------------------------------
gather_cluster_disk_usage(Flag) ->
Nodes = [node()|nodes()],
NodesStats = [gather_node_stats(X, raw)
|| X <- Nodes],
CorrectNodesStats =
lists:filter(fun ({error, _}) -> false; (_) -> true end,
NodesStats),
F = fun({{_NodeId},
_Jobs, _Power, _Time, _Upload, _Download, _Numtasks,
_Restarts,
{DiskTotal, DiskPercentage},
{_MemTotal, _MemPercentage, {_WorstPid, _WorstSize}}}) ->
{DiskTotal, DiskPercentage}
end,
E1 = fun({First, _Second}) -> First end,
E2 = fun({_First, Second}) -> Second end,
DiskUsed = fun({DiskTotal, DiskPercentage}) ->
DiskPercentage*0.01*DiskTotal
end,
ListOfStats = lists:map(F, CorrectNodesStats),
ResultList =
case length(ListOfStats) of
0 ->
[0,0,0,0,0];
Length ->
TotalSize = lists:sum(lists:map(E1, ListOfStats)),
SumPercentage = lists:sum(lists:map(E2, ListOfStats)),
TotalUsed = lists:sum(lists:map(DiskUsed, ListOfStats)),
AveragePercentage = SumPercentage / Length,
AverageSize = TotalSize / Length,
TotalPercentage = case TotalSize of
0 ->
chronicler:debug("Total disk size of cluster was"
"reported as 0 bytes~n"),
0;
_ ->
(TotalUsed / TotalSize) * 100
end,
[TotalSize, TotalUsed, AverageSize,
TotalPercentage, AveragePercentage]
end,
case Flag of
raw ->
[{per_node, CorrectNodesStats}, {collected, ResultList}];
string ->
io_lib:format("Total disk size of nodes: ~p Kb~n"
"Total disk used on nodes: ~p Kb~n"
"Average disk size on nodes: ~p Kb~n"
"Total disk used in cluster: ~p%~n"
"Average disk used on nodes: ~p%~n",
[trunc(X) || X <- ResultList])
end.
%%--------------------------------------------------------------------
%% @doc
%% Extracts statistics about the cluster memory usage.
%%
) - > String::string ( ) |
%% ListOfValues
%% @end
%%--------------------------------------------------------------------
gather_cluster_mem_usage(Flag) ->
Nodes = [node()|nodes()],
NodesStats = [gather_node_stats(X, raw)||
X <- Nodes],
CorrectNodesStats = [X || X <- NodesStats,
X /= {error, no_such_node_in_stats}],
F = fun({{_NodeId},
_Jobs, _Power, _Time, _Upload, _Download, _Numtasks,
_Restarts,
{_DiskTotal, _DiskPercentage},
{MemTotal, MemPercentage, {WorstPid, WorstSize}}}) ->
{MemTotal, MemPercentage, {WorstPid, WorstSize}}
end,
E1 = fun({First, _Second, _Third}) -> First end,
E2 = fun({_First, Second, _Third}) -> Second end,
MemUsed = fun({MemTotal, MemPercentage, _Worst}) ->
MemPercentage*0.01*MemTotal
end,
ListOfStats = lists:map(F, CorrectNodesStats),
ResultList =
case length(ListOfStats) of
0 ->
[0,0,0,0,0];
Length ->
TotalSize = lists:sum(lists:map(E1, ListOfStats)),
SumPercentage = lists:sum(lists:map(E2, ListOfStats)),
TotalUsed = lists:sum(lists:map(MemUsed, ListOfStats)),
TotalPercentage = case TotalSize of
0 ->
chronicler:debug("Total memory size of cluster was"
"reported as 0 bytes~n"),
0;
_ ->
(TotalUsed / TotalSize) * 100
end,
AveragePercentage = SumPercentage / Length,
AverageSize = TotalSize / Length,
[TotalSize, TotalUsed, AverageSize,
TotalPercentage, AveragePercentage]
end,
case Flag of
raw ->
[{per_node, CorrectNodesStats}, {collected, ResultList}];
string ->
io_lib:format("Total primary memory size of nodes: ~p b~n"
"Total primary memory used on nodes: ~p b~n"
"Average primary memory size on nodes: ~p b~n"
"Total primary memory used in cluster: ~p%~n"
"Average primary memory used on nodes: ~p%~n",
[trunc(X) || X <- ResultList])
end.
%%--------------------------------------------------------------------
%% @doc
%% Extracts statistics about the entire cluster and returns it as a
%% formatted string.
%% Flag = raw | string
%%
) - > String
%%
%% @end
%%--------------------------------------------------------------------
gather_cluster_stats(Flag) ->
CollectStuff =
fun ({{Node}, Jobs, Power, Time, Upload, Download, NumTasks, Restarts,
_Disklol, _Memlol},
{Nodes, JobsAcc, PowerAcc, TimeAcc,
UpAcc, DownAcc, TasksAcc, RestartsAcc, Disk, Mem}) ->
{[Node | Nodes], Jobs ++ JobsAcc,
PowerAcc + Power,
TimeAcc + Time,
UpAcc + Upload,
DownAcc + Download,
TasksAcc + NumTasks,
RestartsAcc + Restarts,
Disk,
Mem}
end,
ClusterDiskUsage = gather_cluster_disk_usage(raw),
ClusterMemUsage = gather_cluster_mem_usage(raw),
{Nodes, Jobs, Power, Time, Upload, Download, NumTasks, Restarts,
_Disk, _Mem} =
ets:foldl(CollectStuff, {[], [], 0.0, 0.0, 0,0,0,0,{0,0},{0,0,{0,0}}},
node_stats_table),
Data = {lists:usort(Nodes), lists:usort(Jobs),
Power, Time, Upload, Download, NumTasks, Restarts,
ClusterDiskUsage,
ClusterMemUsage},
case Flag of
raw ->
Data;
string ->
format_cluster_stats(Data)
end.
%%--------------------------------------------------------------------
%% @doc
%% Returns a neatly formatted string for stats of the entire cluster.
%%
) - > String
%%
%% @end
%%--------------------------------------------------------------------
format_cluster_stats(
{Nodes, Jobs, Power, Time, Upload, Download, Numtasks, Restarts,
[{per_node, WhichNodesDiskStats},
{collected, [TotalDisk, TotalUsedDisk, AverageDisk, TotalUsedDiskP, AverageUsedDiskP]}],
[{per_node, WhichNodesMemStats},
{collected, [TotalMem, TotalUsedMem, AverageMem, TotalUsedMemP, AverageUsedMemP]}]}) ->
io_lib:format(
"The cluster currently has these stats stored:~n"
"------------------------------------------------------------~n"
"Nodes used: ~p~n"
"Jobs worked on: ~p~n"
"Power used: ~.2f watt hours~n"
"Time executing: ~.2f seconds~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks total: ~p~n"
"Number of task restarts:~p~n"
"---------------------~n"
"Disk stats from nodes: ~n~p~n"
"Total Disk size: ~p bytes~n"
"Total Disk used: ~p%~n"
"Total Disk used: ~p bytes~n"
"Average Disk size: ~p bytes~n"
"Average Disk used: ~p%~n"
"---------------------~n"
"Memory stats from nodes: ~n~p~n"
"Total Memory size: ~p bytes~n"
"Total Memory used: ~p bytes~n"
"Total Memory used: ~p%~n"
"Average Memory size: ~p bytes~n"
"Average Memory used: ~p%~n",
[Nodes, Jobs, Power / 3600, Time, Upload,
Download, Numtasks, Restarts,
WhichNodesDiskStats, TotalDisk, TotalUsedDisk, TotalUsedDiskP, AverageDisk, AverageUsedDiskP,
WhichNodesMemStats, TotalMem, TotalUsedMem, TotalUsedMemP, AverageMem, AverageUsedMemP]).
%%--------------------------------------------------------------------
%% @doc
%% Returns a neatly formatted string of the stats of the given node
%%
) - > String
%%
%% @end
%%--------------------------------------------------------------------
format_node_stats({{NodeId},
Jobs, Power, Time, Upload, Download, Numtasks, Restarts,
{DiskTotal, DiskPercentage},
{MemTotal, MemPercentage, {WorstPid, WorstSize}}}) ->
io_lib:format(
"Stats for node: ~p~n"
"------------------------------------------------------------~n"
"Jobs worked on by node: ~p~n"
"Power used: ~.2f watt hours~n"
"Time executing: ~.2f seconds~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks: ~p~n"
"Number of task restarts:~p~n"
"Disk size: ~p~n"
"Disk used: ~p%~n"
"Primary memory size: ~p~n"
"Primary memory used: ~p%~n"
"Erlang process ~p using most memory, ~p bytes~n",
[NodeId, Jobs, Power / 3600, Time, Upload, Download, Numtasks, Restarts,
DiskTotal, DiskPercentage, MemTotal, MemPercentage, WorstPid, WorstSize]).
%%--------------------------------------------------------------------
%% @doc
%% Returns a neatly formatted string of the stats of the given user
%%
%% @spec format_user_stats(Data) -> String
%%
%% @end
%%--------------------------------------------------------------------
format_user_stats({User, {Jobs, Power, Time, Upload,
Download, Numtasks, Restarts}}) ->
io_lib:format(
"Stats for user: ~p~n"
"------------------------------------------------------------~n"
"Jobs: ~p~n"
"Power used: ~.2f watt hours~n"
"Time executing: ~.2f seconds~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks: ~p~n"
"Number of task restarts:~p~n",
[User, Jobs, Power / 3600, Time, Upload,
Download, Numtasks, Restarts]).
%%--------------------------------------------------------------------
%% @doc
%% Returns a neatly formatted string for the given job and its stats
%%
%% @spec format_job_stats(Data) -> String
%%
%% @end
%%--------------------------------------------------------------------
format_job_stats(
{JobId, SplitString, MapString, ReduceString, FinalizeString, TimePassed,
Nodes, [Power, TimeExecuted, Upload, Download, Numtasks, Restarts]}) ->
io_lib:format(
"Stats for job: ~p~n~ts~ts~ts~ts~n"
"------------------------------------------------------------~n"
"Total:~n"
"------------------------------------------------------------~n"
"Nodes that worked on job: ~p~n"
"Time passed: ~.2f seconds~n"
"Execution time: ~.2f seconds~n"
"Power used: ~.2f watt hours~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks: ~p~n"
"Number of restarts: ~p~n",
[JobId, SplitString, MapString, ReduceString, FinalizeString, Nodes,
TimePassed,TimeExecuted, Power / 3600, Upload,
Download, Numtasks, Restarts]).
%%--------------------------------------------------------------------
%% @doc
%% Returns a neatly formatted string for the given task and its stats
%%
@spec format_task_stats(TaskType , TaskStats ) - > String
%%
%% @end
%%--------------------------------------------------------------------
format_task_stats(TaskType, [Power,Time,Upload,Download,NumTasks,Restarts]) ->
io_lib:format(
"------------------------------------------------------------~n"
"~p~n"
"------------------------------------------------------------~n"
"Power used: ~.2f watt seconds~n"
"Execution time: ~.2f seconds~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks: ~p~n"
"Number of restarts: ~p~n",
[TaskType, Power, Time, Upload, Download, NumTasks, Restarts]).
| null | https://raw.githubusercontent.com/lopec/LoPEC/29a3989c48a60e5990615dea17bad9d24d770f7b/trunk/lib/common/src/statistician.erl | erlang | -------------------------------------------------------------------
@doc
Collects various statistics about the cluster nodes and jobs put
in the cluster.
<pre>
Cluster global statistics include:
* Jobs executed (also those that are done or cancelled)
* Power consumed (estimation)
* Time spent executing tasks (sum total for all nodes)
* Upload network traffic (total unfortunately, not just ours)
* Download network traffic (ditto)
* Number of tasks processed
* Number of task restarts
* Total amount of diskspace in cluster
* Total amount of diskspace used in cluster
* % of diskspace in cluster that is used
* Total amount of primary memory in cluster
* Total amount of primary memory used in cluster
* % of primary memory used in cluster
</pre>
@end
-------------------------------------------------------------------
API functions
gen_server callbacks
Hiow often do slaves flush stats to master, in milliseconds
Do we want to delete jobs from our tables once finished (debug flag)
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Starts the server.
<pre>
Type:
slave - start a slave node statistician. It intermittently flushes
collected stats to the master.
master - start a master node statistician. It keeps track of node
(global) stats as well as job stats.
</pre>
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Stops the statistician and all related applications and modules.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns average disk usage over all nodes.
<pre>
Flag:
string - gives nicely formatted string
</pre>
@spec get_cluster_disk_usage(Flag) -> String
| {Total::Integer, Percentage::Integer}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns average primary memory usage over all nodes.
<pre>
Flag:
string - gives nicely formatted string
</pre>
| {Total::Integer, Percentage::Integer}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns disk usage on a node.
<pre>
Flag:
string - gives nicely formatted string
</pre>
| {Total::Integer, Percentage::Integer}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns memory usage on a node.
<pre>
Flag:
string - gives nicely formatted string
</pre>
Percentage::Integer}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns stats for the entire cluster.
<pre>
Flag:
string - gives nicely formatted string
</pre>
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
<pre>
Flag:
raw - gives internal representation (a list of the total stats)
string - gives nicely formatted string with stats for each tasktype
</pre>
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns stats for NodeId.
<pre>
Flag:
string - gives nicely formatted string
</pre>
@spec get_node_stats(NodeId, Flag) -> String
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
<pre>
Flag:
string - gives nicely formatted string
</pre>
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns stats for the given user.
<pre>
Flag:
string - gives nicely formatted string
</pre>
@spec get_user_stats(User, Flag) -> String
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Updates local (node) ets table with statistics, adding the job and
its stats to the table if it doesn't already exist, otherwise
updating the existing entry.
<pre>
The Data variable should look like this tuple:
@spec update(Data) -> ok
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Jobs, once finished in our cluster, have their stats dumped to file
and their entry cleared out of the ets table. However, we have to
wait to make sure that all slaves have sent their stats updates -
if a node is stalled for more than that long, we're out of luck.
it. After the message is catched we pass the command onto
@spec job_finished(JobId) -> please_wait_a_few_seconds
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Remove a node from the global stats. Probably called when a node
drops from the cluster for some reason.
@end
--------------------------------------------------------------------
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
[slave] to start slave. See start_link.
@end
--------------------------------------------------------------------
Setting up disk/mem alarm handler
--------------------------------------------------------------------
@doc
Flag = raw | string
@see node_stats/1
{reply, Reply, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Flag = raw | string
@see node_stats/1
{reply, Reply, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Flag = raw | string
@see node_stats/1
{reply, Reply, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Flag = raw | string
{reply, Reply, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Flag = raw | string
@see get_node_mem_usage/1
{reply, Reply, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Flag = raw | string
@see node_job_stats/1
{reply, Reply, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Flag = raw | string
@see node_stats/1
{reply, Reply, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Flag = raw | string
@see node_job_stats/1
-> {reply, Reply, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Flag = raw | string
{reply, Reply, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
only master has node_stats_table defined
--------------------------------------------------------------------
@doc
@see stop/0
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Splits the received list and updates the master table with each
element in the lists.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@see job_finished/1
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@see remove_node/1
We do not delete data from the job stats tables when a node leaves,
only from the global stats.
Should produce a file: /storage/test/results/node_NodeId_stats
@end
--------------------------------------------------------------------
note that we do not check if the node exists (or rather, if
--------------------------------------------------------------------
@doc
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Sends contents of local stats table to the master stats table,
then clears out the local stats table.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@see job_finished/1
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
===================================================================
===================================================================
--------------------------------------------------------------------
@doc
Every element in the given stats list is summed up.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns the time (in microseconds) since the given job was added to
based on now() being called when a job is created. So it's not
perfectly exact, but should be good enough for human purposes.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns the disk usage stats
Flag = raw | string
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns the disk usage stats
Flag = raw | string
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
string showing these. get_job_stats/1 does not want stats on a
specific node, and so passes the atom '_' as NodeId, resulting
in a list of nodes that have worked on the job being matched out.
Flag = raw | string
@end
--------------------------------------------------------------------
TODO: ets:match is a potential bottleneck
--------------------------------------------------------------------
@doc
Flag = raw | string
@end
--------------------------------------------------------------------
TODO: ets:match is a potential bottleneck
--------------------------------------------------------------------
@doc
Extracts statistics about a user.
@end
--------------------------------------------------------------------
TODO: ets:match is a potential bottleneck
--------------------------------------------------------------------
@doc
Extracts statistics about the cluster disk usage.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Extracts statistics about the cluster memory usage.
ListOfValues
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Extracts statistics about the entire cluster and returns it as a
formatted string.
Flag = raw | string
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns a neatly formatted string for stats of the entire cluster.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns a neatly formatted string of the stats of the given node
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns a neatly formatted string of the stats of the given user
@spec format_user_stats(Data) -> String
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns a neatly formatted string for the given job and its stats
@spec format_job_stats(Data) -> String
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns a neatly formatted string for the given task and its stats
@end
-------------------------------------------------------------------- | @author < >
@author Bjorn " norno " < >
@author Gustav " azariah " Simonsson < >
@author >
( C ) 2009 , < >
Created : 21 Oct 2009 by < >
-module(statistician).
-include("../include/env.hrl").
-behaviour(gen_server).
-export([start_link/1, update/1, job_finished/1, remove_node/1, stop/0,
get_cluster_stats/1, get_job_stats/2,
get_node_stats/2, get_node_job_stats/3,
get_node_disk_usage/1, get_node_mem_usage/1,
get_user_stats/2, get_cluster_disk_usage/1, get_cluster_mem_usage/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(UPDATE_INTERVAL, 1000).
-ifdef(no_delete_tables).
-define(DELETE_TABLE(), dont).
-else.
-define(DELETE_TABLE(), delete).
-endif.
@spec start_link(Type ) - > { ok , Pid } | ignore | { error , Error }
start_link(Type) ->
gen_server:start_link({local, ?MODULE}, ?MODULE,
[Type], []).
stop ( ) - > ok
stop() ->
gen_server:cast(?MODULE, stop).
raw - gives internal representation ( Tuples , lists , whatnot )
get_cluster_disk_usage(Flag) ->
gen_server:call(?MODULE,{get_cluster_disk_usage, Flag}).
raw - gives internal representation ( Tuples , lists , whatnot )
) - > String
get_cluster_mem_usage(Flag) ->
gen_server:call(?MODULE,{get_cluster_mem_usage, Flag}).
raw - gives internal representation ( Tuples , lists , whatnot )
) - > String
get_node_disk_usage(Flag) ->
gen_server:call(?MODULE,{get_node_disk_usage, Flag}).
raw - gives internal representation ( Tuples , lists , whatnot )
@spec get_node_mem_usage(Flag ) - > String | { Total::Integer ,
get_node_mem_usage(Flag) ->
gen_server:call(?MODULE,{get_node_mem_usage, Flag}).
raw - gives internal representation ( Tuples , lists , whatnot )
) - > String
get_cluster_stats(Flag) ->
gen_server:call(?MODULE,{get_cluster_stats, Flag}).
Returns stats for JobId .
@spec get_job_stats(JobId , Flag ) - > String
get_job_stats(JobId, raw) ->
gen_server:call(?MODULE, {get_job_stats, JobId, raw});
get_job_stats(JobId, string) ->
Return = gen_server:call(?MODULE,{get_job_stats, JobId, string}),
case Return of
{error, no_such_stats_found} ->
{error, no_such_stats_found};
_Result ->
Return
end.
raw - gives internal representation ( Tuples , lists , whatnot )
get_node_stats(NodeId, raw) ->
gen_server:call(?MODULE,{get_node_stats, NodeId, raw});
get_node_stats(NodeId, string) ->
Return = gen_server:call(?MODULE,{get_node_stats, NodeId, string}),
case Return of
{error, no_such_node_in_stats} ->
{error, no_such_node_in_stats};
_Result ->
Return
end.
Returns stats the node NodeId has for the job JobId , like how many
JobId tasks NodeId has worked on , or how long .
raw - gives internal representation ( Tuples , lists , whatnot )
@spec get_node_job_stats(NodeId , JobId , Flag ) - > String
get_node_job_stats(NodeId, JobId, raw) ->
gen_server:call(?MODULE,{get_node_job_stats, NodeId, JobId, raw});
get_node_job_stats(NodeId, JobId, string) ->
Return=gen_server:call(?MODULE,{get_node_job_stats, NodeId, JobId, string}),
case Return of
{error, no_such_stats_found} ->
{error, no_such_stats_found};
_Result ->
Return
end.
raw - gives internal representation ( Tuples , lists , whatnot )
get_user_stats(User, raw) ->
gen_server:call(?MODULE, {get_user_stats, User, raw});
get_user_stats(User, string) ->
Return = gen_server:call(?MODULE, {get_user_stats, User, string}),
case Return of
{error, no_such_user} ->
{error, no_such_user};
_Result ->
Return
end.
{ { NodeId , JobId , TaskType } ,
Power , Time , Upload , Download , NumTasks , Restarts , Disk , Mem }
where Disk and are formatted like calls to
get_node_disk / mem_stats(raw ) < /pre >
update(Data) ->
gen_server:cast(?MODULE,{update, Data}).
we hope that waiting two update intervals will be sufficient , but
This wait is done using timer : send_after/3 , which sends a regular
Erlang message , meaning we have to use handle_info/2 to catch
handle_cast/2 though .
job_finished(JobId) ->
{ok, _TimerRef} = timer:send_after(?UPDATE_INTERVAL*2, ?MODULE,
{job_finished, JobId}),
please_wait_a_few_seconds.
) - > ok
remove_node(NodeId) ->
gen_server:cast(?MODULE, {remove_node, NodeId}).
@private
Initiates the server , call with as [ master ] to start master ,
) - > { ok , State }
init([master]) ->
global:register_name(?MODULE, self()),
case os:cmd("uname") -- "\n" of
"Linux" ->
application:start(sasl),
gen_event:delete_handler(error_logger, sasl_report_tty_h, []),
application:start(os_mon),
diskMemHandler:start();
Name ->
chronicler:debug("~w : statistican init called on unsupported OS: ~p~n", [Name]),
ok
end,
ets:new(job_stats_table,
[set, public, named_table,
{keypos, 1}, {heir, none},
{write_concurrency, false}]),
ets:new(node_stats_table,
[set, private, named_table,
{keypos, 1}, {heir, none},
{write_concurrency, false}]),
{ok, []};
init([slave]) ->
{ok, _TimerRef} = timer:send_interval(?UPDATE_INTERVAL, flush),
ets:new(job_stats_table,
[set, private, named_table,
{keypos, 1}, {heir, none},
{write_concurrency, false}]),
case os:cmd("uname") -- "\n" of
"Linux" ->
application:start(sasl),
gen_event:delete_handler(error_logger, sasl_report_tty_h, []),
application:start(os_mon),
diskMemHandler:start();
Name ->
chronicler:debug("~w : statistican init called on unsupported OS: ~p~n", [Name]),
ok
end,
{ok, []}.
@private
, Flag } , From , State ) - >
handle_call({get_cluster_disk_usage, Flag}, _From, State) ->
Reply = gather_cluster_disk_usage(Flag),
{reply, Reply, State};
@private
, Flag } , From , State ) - >
handle_call({get_cluster_mem_usage, Flag}, _From, State) ->
Reply = gather_cluster_mem_usage(Flag),
{reply, Reply, State};
@private
@spec handle_call({get_cluster_stats , Flag } , From , State ) - >
handle_call({get_cluster_stats, Flag}, _From, State) ->
Reply = gather_cluster_stats(Flag),
{reply, Reply, State};
@private
@see
@spec handle_call({get_node_disk_usage , Flag } , From , State ) - >
handle_call({get_node_disk_usage, Flag}, _From, State) ->
Reply = gather_node_disk_usage(Flag),
{reply, Reply, State};
@private
@spec handle_call({get_node_mem_usage , Flag } , From , State ) - >
handle_call({get_node_mem_usage, Flag}, _From, State) ->
Reply = gather_node_mem_usage(Flag),
{reply, Reply, State};
@private
, JobId , Flag } , From , State ) - >
handle_call({get_job_stats, JobId, Flag}, _From, State) ->
Reply = gather_node_job_stats('_', JobId, Flag),
{reply, Reply, State};
@private
, NodeId , Flag } , From , State ) - >
handle_call({get_node_stats, NodeId, Flag}, _From, State) ->
Reply = gather_node_stats(NodeId, Flag),
{reply, Reply, State};
@private
@spec handle_call({get_node_job_stats , NodeId , JobId , Flag } , From , State )
handle_call({get_node_job_stats, NodeId, JobId, Flag}, _From, State) ->
Reply = gather_node_job_stats(NodeId, JobId, Flag),
{reply, Reply, State};
@private
@see user_stats/1
, User , Flag } , From , State ) - >
handle_call({get_user_stats, User, Flag}, _From, State) ->
Reply = gather_user_stats(User, Flag),
{reply, Reply, State};
@private
, From , State ) - > { noreply , State }
handle_call(Msg, From, State) ->
chronicler:debug("~w:Received unexpected handle_call call.~n"
"Message: ~p~n"
"From: ~p~n",
[?MODULE, Msg, From]),
{noreply, State}.
@private
@see update/1
@spec handle_cast({update , StatsTuple } , State ) - > { noreply , State }
handle_cast({update, Stats}, State) ->
{{NodeId, JobId, TaskType, Usr},
Power, Time, Upload, Download, NumTasks, Restarts, Disk, Mem} = Stats,
User = case Usr of
no_user ->
dispatcher:get_user_from_job(JobId);
_Whatevah ->
Usr
end,
case ets:lookup(job_stats_table, {NodeId, JobId, TaskType, User}) of
[] ->
ets:insert(job_stats_table, {{NodeId, JobId, TaskType, User},
Power, Time, Upload, Download,
NumTasks, Restarts, Disk, Mem});
[OldStats] ->
{{_,JobId,_,_}, OldPower, OldTime, OldUpload,
OldDownload, OldNumTasks, OldRestarts, _, _} = OldStats,
ets:insert(job_stats_table, {{NodeId,
JobId,
TaskType, User},
Power + OldPower,
Time + OldTime,
Upload + OldUpload,
Download + OldDownload,
NumTasks + OldNumTasks,
Restarts + OldRestarts,
Disk,
Mem})
end,
case ets:info(node_stats_table) of
undefined ->
table_undefined;
_Other ->
case ets:lookup(node_stats_table, {NodeId}) of
[] ->
ets:insert(node_stats_table, {{NodeId},
[JobId], Power, Time,
Upload, Download,
NumTasks, Restarts,
Disk, Mem});
[OldNodeStats] ->
{{_}, OldNodeJobs, OldNodePower, OldNodeTime, OldNodeUpload,
OldNodeDownload, OldNodeNumTasks, OldNodeRestarts, _, _}
= OldNodeStats,
ets:insert(node_stats_table, {{NodeId},
lists:umerge([JobId], OldNodeJobs),
Power + OldNodePower,
Time + OldNodeTime,
Upload + OldNodeUpload,
Download + OldNodeDownload,
NumTasks + OldNodeNumTasks,
Restarts + OldNodeRestarts,
Disk,
Mem})
end
end,
{noreply, State};
@private
@spec handle_cast(stop , State ) - > { stop , normal , State }
handle_cast(stop, State) ->
{stop, normal, State};
@private
@spec handle_cast({alarm , Node , Type , Alarm } , State ) - >
{ noreply , State }
handle_cast({alarm, Node, Type, Alarm}, State) ->
chronicler:debug("~w: Alarm at node ~p of type ~p: ~p",
[?MODULE, Node, Type, Alarm]),
{noreply, State};
@private
@spec handle_cast({update_with_list , List } , State ) - > { noreply , State }
handle_cast({update_with_list, List}, State) ->
chronicler:debug("Master received message from a node.~n", []),
lists:foreach(fun (X) -> gen_server:cast(?MODULE, {update, X}) end, List),
{noreply, State};
@private
, JobId } , State ) - > { noreply , State }
handle_cast({job_finished, JobId}, State) ->
JobStats = gather_node_job_stats('_', JobId, string),
case ?DELETE_TABLE() of
delete ->
ets:match_delete(job_stats_table,
{{'_', JobId, '_', '_'},
'_','_','_','_','_','_','_','_'});
_Dont ->
ok
end,
{ok, Root} =
configparser:read_config(?CONFIGFILE, cluster_root),
file:write_file(Root ++ "results/" ++
integer_to_list(JobId) ++ "/stats", JobStats),
chronicler:info(JobStats),
{noreply, State};
@private
@spec handle_cast({remove_node , NodeId } , State ) - > { noreply , State }
handle_cast({remove_node, NodeId}, State) ->
NodeStats = gather_node_stats(NodeId, string),
returns { error , } )
... because Erlang advocates No Defensive Coding
ets:match_delete(node_stats_table,
{{NodeId},'_','_','_','_','_','_','_',
'_','_'}),
{ok, Root} =
configparser:read_config(?CONFIGFILE, cluster_root),
file:write_file(Root ++ "results/node_" ++
atom_to_list(NodeId) ++ "_stats", NodeStats),
chronicler:info("Node "++atom_to_list(NodeId)
++" disconnected from cluster! Stats:~n"
++NodeStats),
{noreply, State};
@private
@spec handle_cast(Msg , State ) - > { noreply , State }
handle_cast(Msg, State) ->
chronicler:debug("~w:Received unexpected handle_cast call.~n"
"Message: ~p~n",
[?MODULE, Msg]),
{noreply, State}.
@private
@spec handle_info(flush , State ) - > { noreply , State }
handle_info(flush, State) ->
chronicler:debug("Node ~p transmitting stats.~n", [node()]),
StatsList = ets:tab2list(job_stats_table),
gen_server:cast({global, ?MODULE}, {update_with_list, StatsList}),
ets:delete_all_objects(job_stats_table),
{noreply, State};
@private
handle_info({job_finished , JobId } , State ) - > { noreply , State }
handle_info({job_finished, JobId}, State) ->
gen_server:cast(?MODULE, {job_finished, JobId}),
{noreply, State};
@private
, State ) - > { noreply , State }
handle_info(Info, State) ->
chronicler:debug("~w:Received unexpected handle_info call.~n"
"Info: ~p~n",
[?MODULE, Info]),
{noreply, State}.
@private
with . The return value is ignored .
@spec terminate(normal , State ) - > void ( )
terminate(normal, _State) ->
chronicler:debug("~w:Received normal terminate call.~n"),
application:stop(sasl),
application:stop(os_mon),
diskMemHandler:stop(),
ok;
@private
, State ) - > void ( )
terminate(Reason, _State) ->
chronicler:debug("~w:Received terminate call.~n"
"Reason: ~p~n",
[?MODULE, Reason]),
ok.
@private
, State , Extra ) - > { ok , NewState }
code_change(OldVsn, State, Extra) ->
chronicler:debug("~w:Received unexpected code_change call.~n"
"Old version: ~p~n"
"Extra: ~p~n",
[?MODULE, OldVsn, Extra]),
{ok, State}.
Internal functions
, Data ) - > Data + List
sum_stats([],Data) ->
Data;
sum_stats([H|T], Data) ->
[TempPower,TempTime,TempUpload,TempDownload,TempNumtasks,TempRestarts] = H,
[AccPower,AccTime,AccUpload,AccDownload,AccNumtasks,AccRestarts] = Data,
sum_stats(T, [TempPower + AccPower,
TempTime + AccTime,
TempUpload + AccUpload,
TempDownload + AccDownload,
TempNumtasks + AccNumtasks,
TempRestarts + AccRestarts]).
the cluster - sort of . It 's derived from the JobId , which is in turn
time_since_job_added(JobId ) - > integer
time_since_job_added(JobId) ->
TimeList = integer_to_list(JobId),
Then = {list_to_integer(lists:sublist(TimeList, 4)),
list_to_integer(lists:sublist(TimeList, 5, 6)),
list_to_integer(lists:sublist(TimeList, 11, 6))},
timer:now_diff(now(), Then) / 1000000.
gather_node_disk_usage(Flag ) - > String
gather_node_disk_usage(Flag) ->
F = fun() ->
case os:cmd("uname") -- "\n" of
"Linux" ->
{_Dir, Total, Percentage} = hd(disksup:get_disk_data()),
_Stats = {Total, Percentage};
Name ->
chronicler:debug("~w : disk_usage call on unsupported OS: ~p~n", [Name]),
_Stats = {0,0}
end
end,
{Total, Percentage} = F(),
case Flag of
raw ->
{Total, Percentage};
string ->
io_lib:format("Disk stats for this node:~n"
"-------------------------~n"
"Total disk size (Kb): ~p~n"
"Percentage used: ~p%~n", [Total, Percentage])
end.
) - > String
gather_node_mem_usage(Flag) ->
F = fun() ->
case os:cmd("uname") -- "\n" of
"Linux" ->
{Total, Alloc, Worst} = memsup:get_memory_data(),
Percentage = trunc((Alloc / Total) * 100),
_Stats = {Total, Percentage, Worst};
Name ->
chronicler:debug("~w : mem_usage call on unsupported OS: ~p~n", [Name]),
_Stats = {0,0,0}
end
end,
{Total, Percentage, Worst} = F(),
case Flag of
raw ->
{Total, Percentage, Worst};
string ->
{Pid, Size} = Worst,
io_lib:format("Memory stats for this node:~n"
"---------------------------~n"
"Total memory size (Bytes): ~p~n"
"Percentage used: ~p%~n"
"Erlang process ~p using most memory, ~p bytes~n",
[Total, Percentage, Pid, Size])
end.
Extracts stats that NodeId has on JobId and returns a formatted
, JobId , Flag ) - > String
gather_node_job_stats(NodeId, JobId, Flag) ->
T = job_stats_table,
case ets:match(T, {{NodeId, JobId, '_', '_'}, '$1', '_', '_', '_', '_', '_',
'_', '_'}) of
[] ->
{error, no_such_stats_found};
_Other ->
Split = ets:match(T, {{NodeId, JobId, split, '_'},
'$1', '$2', '$3', '$4', '$5', '$6', '_', '_'}),
Map = ets:match(T, {{NodeId, JobId, map, '_'},
'$1', '$2', '$3', '$4', '$5', '$6', '_', '_'}),
Reduce = ets:match(T, {{NodeId, JobId, reduce, '_'},
'$1', '$2', '$3', '$4', '$5', '$6', '_', '_'}),
Finalize = ets:match(T, {{NodeId, JobId, finalize, '_'},
'$1', '$2', '$3', '$4', '$5', '$6', '_', '_'}),
Nodes = case NodeId of
'_' -> lists:umerge(
ets:match(T, {{'$1', JobId, '_', '_'},
'_','_','_','_','_','_','_','_'}));
_NodeId -> NodeId
end,
Zeroes = [0.0,0.0,0,0,0,0],
SumSplit = sum_stats(Split, Zeroes),
SumMap = sum_stats(Map, Zeroes),
SumReduce = sum_stats(Reduce, Zeroes),
SumFinal = sum_stats(Finalize, Zeroes),
SumAll = sum_stats([SumSplit, SumMap, SumReduce, SumFinal], Zeroes),
case Flag of
string ->
TimePassed = time_since_job_added(JobId),
SplitStrings = format_task_stats(split, SumSplit),
MapStrings = format_task_stats(map, SumMap),
ReduceStrings = format_task_stats(reduce, SumReduce),
FinalStrings = format_task_stats(finalize, SumFinal),
format_job_stats({JobId, SplitStrings, MapStrings,
ReduceStrings, FinalStrings,
TimePassed, Nodes, SumAll});
raw ->
SumAll
end
end.
Extracts statistics about and returns it as a formatted string .
, Flag ) - > String
gather_node_stats(NodeId, Flag) ->
T = node_stats_table,
case ets:lookup(T, {NodeId}) of
[] ->
{error, no_such_node_in_stats};
[NodeStats] ->
case Flag of
raw ->
NodeStats;
string ->
format_node_stats(NodeStats)
end
end.
, Flag ) - > String
gather_user_stats(User, Flag) ->
T = job_stats_table,
ABC = ets:match(T, {{'_', '$1', '_', User},
'$2', '$3', '$4', '$5', '$6', '$7', '_', '_'}),
Zeros = {[],0,0,0,0,0,0},
case ABC of
[] ->
{error, no_such_user};
_Stats ->
case Flag of
raw ->
{User, sum_user_stats(ABC, Zeros)};
string ->
format_user_stats({User, sum_user_stats(ABC, Zeros)})
end
end.
sum_user_stats([], Tuple) ->
Tuple;
sum_user_stats([[JobId, S1, S2, S3, S4, S5, S6] | Rest],
{J1, Sa1, Sa2, Sa3, Sa4, Sa5, Sa6}) ->
sum_user_stats(Rest, {lists:usort([JobId | J1]),
S1+Sa1,S2+Sa2,S3+Sa3,S4+Sa4,S5+Sa5,S6+Sa6}).
) - > String | ListOfValues
gather_cluster_disk_usage(Flag) ->
Nodes = [node()|nodes()],
NodesStats = [gather_node_stats(X, raw)
|| X <- Nodes],
CorrectNodesStats =
lists:filter(fun ({error, _}) -> false; (_) -> true end,
NodesStats),
F = fun({{_NodeId},
_Jobs, _Power, _Time, _Upload, _Download, _Numtasks,
_Restarts,
{DiskTotal, DiskPercentage},
{_MemTotal, _MemPercentage, {_WorstPid, _WorstSize}}}) ->
{DiskTotal, DiskPercentage}
end,
E1 = fun({First, _Second}) -> First end,
E2 = fun({_First, Second}) -> Second end,
DiskUsed = fun({DiskTotal, DiskPercentage}) ->
DiskPercentage*0.01*DiskTotal
end,
ListOfStats = lists:map(F, CorrectNodesStats),
ResultList =
case length(ListOfStats) of
0 ->
[0,0,0,0,0];
Length ->
TotalSize = lists:sum(lists:map(E1, ListOfStats)),
SumPercentage = lists:sum(lists:map(E2, ListOfStats)),
TotalUsed = lists:sum(lists:map(DiskUsed, ListOfStats)),
AveragePercentage = SumPercentage / Length,
AverageSize = TotalSize / Length,
TotalPercentage = case TotalSize of
0 ->
chronicler:debug("Total disk size of cluster was"
"reported as 0 bytes~n"),
0;
_ ->
(TotalUsed / TotalSize) * 100
end,
[TotalSize, TotalUsed, AverageSize,
TotalPercentage, AveragePercentage]
end,
case Flag of
raw ->
[{per_node, CorrectNodesStats}, {collected, ResultList}];
string ->
io_lib:format("Total disk size of nodes: ~p Kb~n"
"Total disk used on nodes: ~p Kb~n"
"Average disk size on nodes: ~p Kb~n"
"Total disk used in cluster: ~p%~n"
"Average disk used on nodes: ~p%~n",
[trunc(X) || X <- ResultList])
end.
) - > String::string ( ) |
gather_cluster_mem_usage(Flag) ->
Nodes = [node()|nodes()],
NodesStats = [gather_node_stats(X, raw)||
X <- Nodes],
CorrectNodesStats = [X || X <- NodesStats,
X /= {error, no_such_node_in_stats}],
F = fun({{_NodeId},
_Jobs, _Power, _Time, _Upload, _Download, _Numtasks,
_Restarts,
{_DiskTotal, _DiskPercentage},
{MemTotal, MemPercentage, {WorstPid, WorstSize}}}) ->
{MemTotal, MemPercentage, {WorstPid, WorstSize}}
end,
E1 = fun({First, _Second, _Third}) -> First end,
E2 = fun({_First, Second, _Third}) -> Second end,
MemUsed = fun({MemTotal, MemPercentage, _Worst}) ->
MemPercentage*0.01*MemTotal
end,
ListOfStats = lists:map(F, CorrectNodesStats),
ResultList =
case length(ListOfStats) of
0 ->
[0,0,0,0,0];
Length ->
TotalSize = lists:sum(lists:map(E1, ListOfStats)),
SumPercentage = lists:sum(lists:map(E2, ListOfStats)),
TotalUsed = lists:sum(lists:map(MemUsed, ListOfStats)),
TotalPercentage = case TotalSize of
0 ->
chronicler:debug("Total memory size of cluster was"
"reported as 0 bytes~n"),
0;
_ ->
(TotalUsed / TotalSize) * 100
end,
AveragePercentage = SumPercentage / Length,
AverageSize = TotalSize / Length,
[TotalSize, TotalUsed, AverageSize,
TotalPercentage, AveragePercentage]
end,
case Flag of
raw ->
[{per_node, CorrectNodesStats}, {collected, ResultList}];
string ->
io_lib:format("Total primary memory size of nodes: ~p b~n"
"Total primary memory used on nodes: ~p b~n"
"Average primary memory size on nodes: ~p b~n"
"Total primary memory used in cluster: ~p%~n"
"Average primary memory used on nodes: ~p%~n",
[trunc(X) || X <- ResultList])
end.
) - > String
gather_cluster_stats(Flag) ->
CollectStuff =
fun ({{Node}, Jobs, Power, Time, Upload, Download, NumTasks, Restarts,
_Disklol, _Memlol},
{Nodes, JobsAcc, PowerAcc, TimeAcc,
UpAcc, DownAcc, TasksAcc, RestartsAcc, Disk, Mem}) ->
{[Node | Nodes], Jobs ++ JobsAcc,
PowerAcc + Power,
TimeAcc + Time,
UpAcc + Upload,
DownAcc + Download,
TasksAcc + NumTasks,
RestartsAcc + Restarts,
Disk,
Mem}
end,
ClusterDiskUsage = gather_cluster_disk_usage(raw),
ClusterMemUsage = gather_cluster_mem_usage(raw),
{Nodes, Jobs, Power, Time, Upload, Download, NumTasks, Restarts,
_Disk, _Mem} =
ets:foldl(CollectStuff, {[], [], 0.0, 0.0, 0,0,0,0,{0,0},{0,0,{0,0}}},
node_stats_table),
Data = {lists:usort(Nodes), lists:usort(Jobs),
Power, Time, Upload, Download, NumTasks, Restarts,
ClusterDiskUsage,
ClusterMemUsage},
case Flag of
raw ->
Data;
string ->
format_cluster_stats(Data)
end.
) - > String
format_cluster_stats(
{Nodes, Jobs, Power, Time, Upload, Download, Numtasks, Restarts,
[{per_node, WhichNodesDiskStats},
{collected, [TotalDisk, TotalUsedDisk, AverageDisk, TotalUsedDiskP, AverageUsedDiskP]}],
[{per_node, WhichNodesMemStats},
{collected, [TotalMem, TotalUsedMem, AverageMem, TotalUsedMemP, AverageUsedMemP]}]}) ->
io_lib:format(
"The cluster currently has these stats stored:~n"
"------------------------------------------------------------~n"
"Nodes used: ~p~n"
"Jobs worked on: ~p~n"
"Power used: ~.2f watt hours~n"
"Time executing: ~.2f seconds~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks total: ~p~n"
"Number of task restarts:~p~n"
"---------------------~n"
"Disk stats from nodes: ~n~p~n"
"Total Disk size: ~p bytes~n"
"Total Disk used: ~p%~n"
"Total Disk used: ~p bytes~n"
"Average Disk size: ~p bytes~n"
"Average Disk used: ~p%~n"
"---------------------~n"
"Memory stats from nodes: ~n~p~n"
"Total Memory size: ~p bytes~n"
"Total Memory used: ~p bytes~n"
"Total Memory used: ~p%~n"
"Average Memory size: ~p bytes~n"
"Average Memory used: ~p%~n",
[Nodes, Jobs, Power / 3600, Time, Upload,
Download, Numtasks, Restarts,
WhichNodesDiskStats, TotalDisk, TotalUsedDisk, TotalUsedDiskP, AverageDisk, AverageUsedDiskP,
WhichNodesMemStats, TotalMem, TotalUsedMem, TotalUsedMemP, AverageMem, AverageUsedMemP]).
) - > String
format_node_stats({{NodeId},
Jobs, Power, Time, Upload, Download, Numtasks, Restarts,
{DiskTotal, DiskPercentage},
{MemTotal, MemPercentage, {WorstPid, WorstSize}}}) ->
io_lib:format(
"Stats for node: ~p~n"
"------------------------------------------------------------~n"
"Jobs worked on by node: ~p~n"
"Power used: ~.2f watt hours~n"
"Time executing: ~.2f seconds~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks: ~p~n"
"Number of task restarts:~p~n"
"Disk size: ~p~n"
"Disk used: ~p%~n"
"Primary memory size: ~p~n"
"Primary memory used: ~p%~n"
"Erlang process ~p using most memory, ~p bytes~n",
[NodeId, Jobs, Power / 3600, Time, Upload, Download, Numtasks, Restarts,
DiskTotal, DiskPercentage, MemTotal, MemPercentage, WorstPid, WorstSize]).
format_user_stats({User, {Jobs, Power, Time, Upload,
Download, Numtasks, Restarts}}) ->
io_lib:format(
"Stats for user: ~p~n"
"------------------------------------------------------------~n"
"Jobs: ~p~n"
"Power used: ~.2f watt hours~n"
"Time executing: ~.2f seconds~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks: ~p~n"
"Number of task restarts:~p~n",
[User, Jobs, Power / 3600, Time, Upload,
Download, Numtasks, Restarts]).
format_job_stats(
{JobId, SplitString, MapString, ReduceString, FinalizeString, TimePassed,
Nodes, [Power, TimeExecuted, Upload, Download, Numtasks, Restarts]}) ->
io_lib:format(
"Stats for job: ~p~n~ts~ts~ts~ts~n"
"------------------------------------------------------------~n"
"Total:~n"
"------------------------------------------------------------~n"
"Nodes that worked on job: ~p~n"
"Time passed: ~.2f seconds~n"
"Execution time: ~.2f seconds~n"
"Power used: ~.2f watt hours~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks: ~p~n"
"Number of restarts: ~p~n",
[JobId, SplitString, MapString, ReduceString, FinalizeString, Nodes,
TimePassed,TimeExecuted, Power / 3600, Upload,
Download, Numtasks, Restarts]).
@spec format_task_stats(TaskType , TaskStats ) - > String
format_task_stats(TaskType, [Power,Time,Upload,Download,NumTasks,Restarts]) ->
io_lib:format(
"------------------------------------------------------------~n"
"~p~n"
"------------------------------------------------------------~n"
"Power used: ~.2f watt seconds~n"
"Execution time: ~.2f seconds~n"
"Upload: ~p bytes~n"
"Download: ~p bytes~n"
"Number of tasks: ~p~n"
"Number of restarts: ~p~n",
[TaskType, Power, Time, Upload, Download, NumTasks, Restarts]).
|
8341a9a852b0d6945d11266901e6db1692a204086aff41d58093943faf54da8e | sirherrbatka/statistical-learning | mushrooms.lisp | (cl:in-package #:cl-user)
(ql:quickload '(:vellum :vellum-csv :statistical-learning))
(defpackage #:mushrooms-example
(:use #:cl #:statistical-learning.aux-package))
(cl:in-package #:mushrooms-example)
(defvar *data*
(vellum:copy-from :csv (~>> (asdf:system-source-directory :statistical-learning)
(merge-pathnames "examples/mushrooms.data"))
:includes-header-p nil
:columns '((:name class)
(:name cap-shape)
(:name cap-surface)
(:name cap-color)
(:name bruises?)
(:name odor)
(:name gill-attachment)
(:name gill-spacing)
(:name gill-size)
(:name gill-color)
(:name stalk-shape)
(:name stalk-root)
(:name stalk-surface-above-ring)
(:name stalk-surface-below-ring)
(:name stalk-color-above-ring)
(:name stalk-color-below-ring)
(:name veil-type)
(:name veil-color)
(:name ring-number)
(:name ring-type)
(:name spore-print-color)
(:name population)
(:name habitat))))
(defparameter *mushroom-types* 2)
(defparameter *aggregation*
(vellum:aggregate-columns *data*
(cl-ds.alg:to-vector)
:name 'vector))
(defun column-encoder-hash-table (input)
(vellum:with-table (input)
(~> (cl-ds.alg:on-each input
(lambda (_)
(declare (ignore _))
(vellum:rr 0)))
(cl-ds.alg:distinct :test #'equal)
(cl-ds.alg:enumerate :test 'equal))))
(defun encode (table)
(let* ((column-count (vellum:column-count table))
(hash-tables
(iterate
(for i from 0 below column-count)
(collect (column-encoder-hash-table (vellum:select table :columns `(,i))))))
(sizes (serapeum:scan #'+ hash-tables :key #'hash-table-count :initial-value 0))
(total-size (last-elt sizes))
(result (statistical-learning.data:make-data-matrix (vellum:row-count table)
total-size))
(index 0))
(vellum:transform table
(vellum:bind-row ()
(iterate
(for i from 0 below column-count)
(for offset in sizes)
(for hash-table in hash-tables)
(for v = (vellum:rr i))
(for encoded = (gethash v hash-table))
(setf (statistical-learning.data:mref result index (+ offset encoded)) 1.0d0))
(incf index)))
result))
(defparameter *train-data*
(encode (vellum:select *data* :columns (vellum:s (vellum:between :from 'cap-shape)))))
(defparameter *target-data*
(lret ((result (statistical-learning.data:make-data-matrix (vellum:row-count *data*) 1)))
(iterate
(for i from 0 below (vellum:row-count *data*))
(setf (statistical-learning.data:mref result i 0)
(eswitch ((vellum:at *data* i 'class) :test 'equal)
("p" 1.0d0)
("e" 0.0d0))))))
(defparameter *training-parameters*
(make 'statistical-learning.dt:classification
:optimized-function (sl.opt:gini-impurity 2)
:maximal-depth 5
:minimal-difference 0.0001d0
:minimal-size 10
:parallel t
:splitter (sl.common:lift (make-instance 'sl.tp:random-attribute-splitter)
'sl.tp:random-splitter
:trials-count 80)))
(defparameter *forest-parameters*
(make 'statistical-learning.ensemble:random-forest
:trees-count 100
:parallel t
:weights-calculator (make-instance 'sl.ensemble:dynamic-weights-calculator)
:tree-batch-size 5
:tree-attributes-count 30
:data-points-sampler (make-instance 'sl.ensemble:weights-based-data-points-sampler
:sampling-rate 0.3)
:tree-parameters *training-parameters*))
(defparameter *confusion-matrix*
(statistical-learning.performance:cross-validation *forest-parameters*
4
*train-data*
*target-data*
:parallel nil))
(print (sl.perf:accuracy *confusion-matrix*)) ; ~0.99
(print (sl.perf:attributes-importance *forest-parameters*
4
*train-data*
*target-data*
:parallel t))
| null | https://raw.githubusercontent.com/sirherrbatka/statistical-learning/dff8230c687f2bfa1097bbbd0f32aa73e2c573b2/examples/mushrooms.lisp | lisp | ~0.99 | (cl:in-package #:cl-user)
(ql:quickload '(:vellum :vellum-csv :statistical-learning))
(defpackage #:mushrooms-example
(:use #:cl #:statistical-learning.aux-package))
(cl:in-package #:mushrooms-example)
(defvar *data*
(vellum:copy-from :csv (~>> (asdf:system-source-directory :statistical-learning)
(merge-pathnames "examples/mushrooms.data"))
:includes-header-p nil
:columns '((:name class)
(:name cap-shape)
(:name cap-surface)
(:name cap-color)
(:name bruises?)
(:name odor)
(:name gill-attachment)
(:name gill-spacing)
(:name gill-size)
(:name gill-color)
(:name stalk-shape)
(:name stalk-root)
(:name stalk-surface-above-ring)
(:name stalk-surface-below-ring)
(:name stalk-color-above-ring)
(:name stalk-color-below-ring)
(:name veil-type)
(:name veil-color)
(:name ring-number)
(:name ring-type)
(:name spore-print-color)
(:name population)
(:name habitat))))
(defparameter *mushroom-types* 2)
(defparameter *aggregation*
(vellum:aggregate-columns *data*
(cl-ds.alg:to-vector)
:name 'vector))
(defun column-encoder-hash-table (input)
(vellum:with-table (input)
(~> (cl-ds.alg:on-each input
(lambda (_)
(declare (ignore _))
(vellum:rr 0)))
(cl-ds.alg:distinct :test #'equal)
(cl-ds.alg:enumerate :test 'equal))))
(defun encode (table)
(let* ((column-count (vellum:column-count table))
(hash-tables
(iterate
(for i from 0 below column-count)
(collect (column-encoder-hash-table (vellum:select table :columns `(,i))))))
(sizes (serapeum:scan #'+ hash-tables :key #'hash-table-count :initial-value 0))
(total-size (last-elt sizes))
(result (statistical-learning.data:make-data-matrix (vellum:row-count table)
total-size))
(index 0))
(vellum:transform table
(vellum:bind-row ()
(iterate
(for i from 0 below column-count)
(for offset in sizes)
(for hash-table in hash-tables)
(for v = (vellum:rr i))
(for encoded = (gethash v hash-table))
(setf (statistical-learning.data:mref result index (+ offset encoded)) 1.0d0))
(incf index)))
result))
(defparameter *train-data*
(encode (vellum:select *data* :columns (vellum:s (vellum:between :from 'cap-shape)))))
(defparameter *target-data*
(lret ((result (statistical-learning.data:make-data-matrix (vellum:row-count *data*) 1)))
(iterate
(for i from 0 below (vellum:row-count *data*))
(setf (statistical-learning.data:mref result i 0)
(eswitch ((vellum:at *data* i 'class) :test 'equal)
("p" 1.0d0)
("e" 0.0d0))))))
(defparameter *training-parameters*
(make 'statistical-learning.dt:classification
:optimized-function (sl.opt:gini-impurity 2)
:maximal-depth 5
:minimal-difference 0.0001d0
:minimal-size 10
:parallel t
:splitter (sl.common:lift (make-instance 'sl.tp:random-attribute-splitter)
'sl.tp:random-splitter
:trials-count 80)))
(defparameter *forest-parameters*
(make 'statistical-learning.ensemble:random-forest
:trees-count 100
:parallel t
:weights-calculator (make-instance 'sl.ensemble:dynamic-weights-calculator)
:tree-batch-size 5
:tree-attributes-count 30
:data-points-sampler (make-instance 'sl.ensemble:weights-based-data-points-sampler
:sampling-rate 0.3)
:tree-parameters *training-parameters*))
(defparameter *confusion-matrix*
(statistical-learning.performance:cross-validation *forest-parameters*
4
*train-data*
*target-data*
:parallel nil))
(print (sl.perf:attributes-importance *forest-parameters*
4
*train-data*
*target-data*
:parallel t))
|
47472ce1a83a1dadfdb3e0b88c17a70a2fb27938c8812c7a3e25b00c566c1d40 | synergistics/chemlambda-hask | Main.hs | module Main where
import qualified ReactionSitesBench as RSB
main = RSB.run
-- main = RSB.run >>= print
| null | https://raw.githubusercontent.com/synergistics/chemlambda-hask/6a51981d88b950490f38559e3e4c5d109da22fc5/chemlambda-chemistry/bench/Main.hs | haskell | main = RSB.run >>= print | module Main where
import qualified ReactionSitesBench as RSB
main = RSB.run
|
fc5fa767b9be154cbcc408c08f43a8a7f15e74cfe26a54e07c234dc82be9113a | sneeuwballen/zipperposition | fo_detector.ml |
This file is free software , part of Zipperposition . See file " license " for more details .
* { 1 Find applied variables in a problem }
open Logtk
open Logtk_parsers
module T = TypedSTerm
(** encode a term *)
let rec detect_term t =
match T.view t with
| T.App (f, args) -> T.is_var(f) || CCList.exists detect_term args
| T.AppBuiltin (_, args) -> CCList.exists detect_term args
| T.Bind (_, _, t) -> detect_term t
| _ -> false
(** encode a statement *)
let detect_stmt stmt =
match Statement.view stmt with
| Statement.Def _ -> failwith "Not implemented"
| Statement.Rewrite _ -> failwith "Not implemented"
| Statement.Data _ -> failwith "Not implemented"
| Statement.Lemma _ -> failwith "Not implemented"
| Statement.Goal f -> detect_term f
| Statement.NegatedGoal (_,_) -> failwith "Not implemented"
| Statement.Assert f -> detect_term f
| Statement.TyDecl (_, _) -> false
let process file =
let input = Input_format.I_tptp in
let parse = Util_tptp.parse_file ~recursive:true file in
Util.debugf 5 "Parse: %s" (fun k -> k (match parse with | CCResult.Error e -> e | CCResult.Ok _ -> "OK"));
let ast = Iter.map Util_tptp.to_ast (CCResult.get_exn parse) in
let typed_ast = TypeInference.infer_statements ?ctx:None
~on_var:(Input_format.on_var input)
~on_undef:(Input_format.on_undef_id input)
~on_shadow:(Input_format.on_shadow input)
~implicit_ty_args:false ast in
Util.debugf 5 "Parse: %s" (fun k -> k (match typed_ast with | CCResult.Error e -> e | CCResult.Ok _ -> "OK"));
let typed_ast = CCVector.to_list (CCResult.get_exn typed_ast) in
let detected = CCList.exists detect_stmt typed_ast in
detected
let options =
Options.make()
let () =
CCFormat.set_color_default true;
let files = ref [] in
let add_file f = files := f :: !files in
Arg.parse (Arg.align options) add_file "fo-detector [options] [files]";
let number = CCList.fold_left (fun n file ->
let detected = process file in
Format.printf "%s: %b\n" file detected;
if detected then n+1 else n
) 0 !files in
Format.printf "Total HO: %d/%d\n" number (List.length !files);
| null | https://raw.githubusercontent.com/sneeuwballen/zipperposition/7f1455fbe2e7509907f927649c288141b1a3a247/src/tools/fo_detector.ml | ocaml | * encode a term
* encode a statement |
This file is free software , part of Zipperposition . See file " license " for more details .
* { 1 Find applied variables in a problem }
open Logtk
open Logtk_parsers
module T = TypedSTerm
let rec detect_term t =
match T.view t with
| T.App (f, args) -> T.is_var(f) || CCList.exists detect_term args
| T.AppBuiltin (_, args) -> CCList.exists detect_term args
| T.Bind (_, _, t) -> detect_term t
| _ -> false
let detect_stmt stmt =
match Statement.view stmt with
| Statement.Def _ -> failwith "Not implemented"
| Statement.Rewrite _ -> failwith "Not implemented"
| Statement.Data _ -> failwith "Not implemented"
| Statement.Lemma _ -> failwith "Not implemented"
| Statement.Goal f -> detect_term f
| Statement.NegatedGoal (_,_) -> failwith "Not implemented"
| Statement.Assert f -> detect_term f
| Statement.TyDecl (_, _) -> false
let process file =
let input = Input_format.I_tptp in
let parse = Util_tptp.parse_file ~recursive:true file in
Util.debugf 5 "Parse: %s" (fun k -> k (match parse with | CCResult.Error e -> e | CCResult.Ok _ -> "OK"));
let ast = Iter.map Util_tptp.to_ast (CCResult.get_exn parse) in
let typed_ast = TypeInference.infer_statements ?ctx:None
~on_var:(Input_format.on_var input)
~on_undef:(Input_format.on_undef_id input)
~on_shadow:(Input_format.on_shadow input)
~implicit_ty_args:false ast in
Util.debugf 5 "Parse: %s" (fun k -> k (match typed_ast with | CCResult.Error e -> e | CCResult.Ok _ -> "OK"));
let typed_ast = CCVector.to_list (CCResult.get_exn typed_ast) in
let detected = CCList.exists detect_stmt typed_ast in
detected
let options =
Options.make()
let () =
CCFormat.set_color_default true;
let files = ref [] in
let add_file f = files := f :: !files in
Arg.parse (Arg.align options) add_file "fo-detector [options] [files]";
let number = CCList.fold_left (fun n file ->
let detected = process file in
Format.printf "%s: %b\n" file detected;
if detected then n+1 else n
) 0 !files in
Format.printf "Total HO: %d/%d\n" number (List.length !files);
|
02066566c8d5aca15548434d6648e60136e072ca699a0ce0067df4f5f3221a4a | pouyakary/Nota | Main.hs |
module Language.BackEnd.Evaluator.Main where
-- ─── IMPORTS ────────────────────────────────────────────────────────────────────
import Language.BackEnd.Evaluator.Nodes.Assignment
import Language.BackEnd.Evaluator.Nodes.BinaryOperator
import Language.BackEnd.Evaluator.Nodes.FunctionCall
import Language.BackEnd.Evaluator.Nodes.Identifier
import Language.BackEnd.Evaluator.Nodes.Negation
import Language.BackEnd.Evaluator.Nodes.Versus
import Language.BackEnd.Evaluator.Types
import Language.FrontEnd.AST
import Model
-- ─── MASTER EVAL ────────────────────────────────────────────────────────────────
masterEval :: AST -> Model -> String -> MasterEvalResult
masterEval ast model inputString =
case ast of
ASTVersus [ ] ->
appendHistoryToModel ( MasterEvalResultRight [0] model ) inputString
ASTVersus parts ->
case evalVersus eval parts model of
Left error ->
Left error
Right result ->
appendHistoryToModel result inputString
ASTAssignment name value ->
case evalAssignment eval ast model of
Left error ->
Left error
Right result ->
appendHistoryToModel result inputString
_ ->
case eval ast model of
Left error ->
Left error
Right result ->
appendHistoryToModel ( MasterEvalResultRight [result] model ) inputString
where
appendHistoryToModel ( MasterEvalResultRight resultValue resultModel ) inputString =
Right $ MasterEvalResultRight resultValue modelWithHistory where
modelWithHistory =
resultModel { history = ( history resultModel ) ++ [ inputString ]
, computedHistory = ( computedHistory resultModel ) ++ [ resultValue !! 0 ]
}
-- ─── MAIN ───────────────────────────────────────────────────────────────────────
eval :: LeafEvalSignature
eval astNode model =
case astNode of
ASTBinaryOperator op left right ->
evalBinaryOperator eval ( ASTBinaryOperator op left right ) model
ASTIdentifier _ ->
evalIdentifier astNode model
ASTParenthesis x ->
eval x model
ASTNegation _ ->
evalNegation eval astNode model
ASTNumber x ->
Right x
ASTFunctionCall name args ->
evalFunctionCall eval (ASTFunctionCall name args) model
_ ->
Left $ "Undefined AST Node " ++ show astNode
-- ────────────────────────────────────────────────────────────────────────────────
| null | https://raw.githubusercontent.com/pouyakary/Nota/d5e29eca7ea34d72835a9708977fa33c030393d1/source/Language/BackEnd/Evaluator/Main.hs | haskell | ─── IMPORTS ────────────────────────────────────────────────────────────────────
─── MASTER EVAL ────────────────────────────────────────────────────────────────
─── MAIN ───────────────────────────────────────────────────────────────────────
──────────────────────────────────────────────────────────────────────────────── |
module Language.BackEnd.Evaluator.Main where
import Language.BackEnd.Evaluator.Nodes.Assignment
import Language.BackEnd.Evaluator.Nodes.BinaryOperator
import Language.BackEnd.Evaluator.Nodes.FunctionCall
import Language.BackEnd.Evaluator.Nodes.Identifier
import Language.BackEnd.Evaluator.Nodes.Negation
import Language.BackEnd.Evaluator.Nodes.Versus
import Language.BackEnd.Evaluator.Types
import Language.FrontEnd.AST
import Model
masterEval :: AST -> Model -> String -> MasterEvalResult
masterEval ast model inputString =
case ast of
ASTVersus [ ] ->
appendHistoryToModel ( MasterEvalResultRight [0] model ) inputString
ASTVersus parts ->
case evalVersus eval parts model of
Left error ->
Left error
Right result ->
appendHistoryToModel result inputString
ASTAssignment name value ->
case evalAssignment eval ast model of
Left error ->
Left error
Right result ->
appendHistoryToModel result inputString
_ ->
case eval ast model of
Left error ->
Left error
Right result ->
appendHistoryToModel ( MasterEvalResultRight [result] model ) inputString
where
appendHistoryToModel ( MasterEvalResultRight resultValue resultModel ) inputString =
Right $ MasterEvalResultRight resultValue modelWithHistory where
modelWithHistory =
resultModel { history = ( history resultModel ) ++ [ inputString ]
, computedHistory = ( computedHistory resultModel ) ++ [ resultValue !! 0 ]
}
eval :: LeafEvalSignature
eval astNode model =
case astNode of
ASTBinaryOperator op left right ->
evalBinaryOperator eval ( ASTBinaryOperator op left right ) model
ASTIdentifier _ ->
evalIdentifier astNode model
ASTParenthesis x ->
eval x model
ASTNegation _ ->
evalNegation eval astNode model
ASTNumber x ->
Right x
ASTFunctionCall name args ->
evalFunctionCall eval (ASTFunctionCall name args) model
_ ->
Left $ "Undefined AST Node " ++ show astNode
|
9957ac663104beb0c5c5aacf548885f5284e5ba5bf856bd409825b9bd319c2f8 | nspin/hs-arm | Pattern.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
module Harm.Types.Pattern
( Atom(..)
, Pattern(..)
, match
) where
import Data.Bits
import Data.Char
import Data.Monoid
import Data.Word
import Control.DeepSeq
import GHC.Generics (Generic)
data Atom = Atom
{ atom_spec :: Word32
, atom_mask :: Word32
} deriving (Eq, Generic, NFData)
data Pattern = Pattern
{ pattern_pos :: Atom
, pattern_neg :: [Atom]
} deriving (Eq, Generic, NFData)
instance Show Atom where
showsPrec p (Atom spec mask) = showParen (p > 10) $
showString "Atom " . showHex32 spec . showChar ' ' . showHex32 mask
instance Show Pattern where
showsPrec p (Pattern pos neg) = showParen (p > 10) $
showString "Pattern " . showsPrec 11 pos . showChar ' ' . showsPrec 11 neg
showHex32 :: Word32 -> ShowS
showHex32 w = foldr (flip (.)) (showString "0x")
(take 8
(map (showChar. intToDigit . fromIntegral . (.&. 0xf))
(iterate (flip shiftR 4) w)))
match :: Pattern -> Word32 -> Bool
match (Pattern pos neg) w = foldr (&&) (f pos) (map (not . f) neg)
where
f (Atom spec mask) = xor spec w .&. mask == 0
instance Monoid Atom where
mempty = Atom 0 0
mappend (Atom spec mask) (Atom spec' mask')
| 0 == xor spec spec' .&. mask .&. mask' = Atom (spec .|. spec') (mask .|. mask')
| otherwise = error "Harm.Types.conjunction: conjunction is empty"
instance Monoid Pattern where
mempty = Pattern mempty []
mappend (Pattern pos neg) (Pattern pos' neg') = Pattern (mappend pos pos') (mappend neg neg') | null | https://raw.githubusercontent.com/nspin/hs-arm/8f10870a4afbbba010e78bd98e452ba67adc34e0/harm/harm-types/src/Harm/Types/Pattern.hs | haskell | # LANGUAGE DeriveAnyClass # | # LANGUAGE DeriveGeneric #
module Harm.Types.Pattern
( Atom(..)
, Pattern(..)
, match
) where
import Data.Bits
import Data.Char
import Data.Monoid
import Data.Word
import Control.DeepSeq
import GHC.Generics (Generic)
data Atom = Atom
{ atom_spec :: Word32
, atom_mask :: Word32
} deriving (Eq, Generic, NFData)
data Pattern = Pattern
{ pattern_pos :: Atom
, pattern_neg :: [Atom]
} deriving (Eq, Generic, NFData)
instance Show Atom where
showsPrec p (Atom spec mask) = showParen (p > 10) $
showString "Atom " . showHex32 spec . showChar ' ' . showHex32 mask
instance Show Pattern where
showsPrec p (Pattern pos neg) = showParen (p > 10) $
showString "Pattern " . showsPrec 11 pos . showChar ' ' . showsPrec 11 neg
showHex32 :: Word32 -> ShowS
showHex32 w = foldr (flip (.)) (showString "0x")
(take 8
(map (showChar. intToDigit . fromIntegral . (.&. 0xf))
(iterate (flip shiftR 4) w)))
match :: Pattern -> Word32 -> Bool
match (Pattern pos neg) w = foldr (&&) (f pos) (map (not . f) neg)
where
f (Atom spec mask) = xor spec w .&. mask == 0
instance Monoid Atom where
mempty = Atom 0 0
mappend (Atom spec mask) (Atom spec' mask')
| 0 == xor spec spec' .&. mask .&. mask' = Atom (spec .|. spec') (mask .|. mask')
| otherwise = error "Harm.Types.conjunction: conjunction is empty"
instance Monoid Pattern where
mempty = Pattern mempty []
mappend (Pattern pos neg) (Pattern pos' neg') = Pattern (mappend pos pos') (mappend neg neg') |
f6beeb81c2a483dd5c07f43f8c4ade5882e65f10846398fdffa2a542218b7d53 | Oblosys/proxima | ProxParser_Generated.hs | module ProxParser_Generated where
import Prelude hiding (Word)
import Common.CommonTypes hiding (Dirty (..), defaultTextColor)
import Presentation.PresLayerTypes
import Presentation.PresLayerUtils
import Evaluation.DocumentEdit
import DocumentEdit_Generated
import DocUtils_Generated
import Evaluation.DocTypes
import DocTypes_Generated
import Presentation.PresentationParsing
import Data.Maybe
----- GENERATED PART STARTS HERE. DO NOT EDIT ON OR BEYOND THIS LINE -----
--------------------------------------------------------------------------
ProxParser type synonym --
--------------------------------------------------------------------------
type ProxParser a = ListParser Document EnrichedDoc Node ClipDoc UserToken a
--------------------------------------------------------------------------
Construct instance --
--------------------------------------------------------------------------
instance Construct Document EnrichedDoc Node ClipDoc UserToken where
construct NoNode = error $ "ProxParser_Generated.construct not defined on NoNode"
construct (Node_RootEnr _ _) = construct_RootEnr
construct (Node_HoleEnrichedDoc _ _) = construct_HoleEnrichedDoc
construct (Node_ParseErrEnrichedDoc _ _) = construct_ParseErrEnrichedDoc
construct (Node_RootDoc _ _) = construct_RootDoc
construct (Node_HoleDocument _ _) = construct_HoleDocument
construct (Node_ParseErrDocument _ _) = construct_ParseErrDocument
construct (Node_FormDoc _ _) = construct_FormDoc
construct (Node_TaskDoc _ _) = construct_TaskDoc
construct (Node_SudokuDoc _ _) = construct_SudokuDoc
construct (Node_TestDoc _ _) = construct_TestDoc
construct (Node_HoleChoiceDoc _ _) = construct_HoleChoiceDoc
construct (Node_ParseErrChoiceDoc _ _) = construct_ParseErrChoiceDoc
construct (Node_Form _ _) = construct_Form
construct (Node_HoleForm _ _) = construct_HoleForm
construct (Node_ParseErrForm _ _) = construct_ParseErrForm
construct (Node_Expense _ _) = construct_Expense
construct (Node_HoleExpense _ _) = construct_HoleExpense
construct (Node_ParseErrExpense _ _) = construct_ParseErrExpense
construct (Node_Currency _ _) = construct_Currency
construct (Node_HoleCurrency _ _) = construct_HoleCurrency
construct (Node_ParseErrCurrency _ _) = construct_ParseErrCurrency
construct (Node_Tasks _ _) = construct_Tasks
construct (Node_HoleTasks _ _) = construct_HoleTasks
construct (Node_ParseErrTasks _ _) = construct_ParseErrTasks
construct (Node_Thing _ _) = construct_Thing
construct (Node_HoleThing _ _) = construct_HoleThing
construct (Node_ParseErrThing _ _) = construct_ParseErrThing
construct (Node_BasicTask _ _) = construct_BasicTask
construct (Node_CompositeTask _ _) = construct_CompositeTask
construct (Node_HoleTask _ _) = construct_HoleTask
construct (Node_ParseErrTask _ _) = construct_ParseErrTask
construct (Node_Description _ _) = construct_Description
construct (Node_HoleDescription _ _) = construct_HoleDescription
construct (Node_ParseErrDescription _ _) = construct_ParseErrDescription
construct (Node_Sudoku _ _) = construct_Sudoku
construct (Node_HoleSudoku _ _) = construct_HoleSudoku
construct (Node_ParseErrSudoku _ _) = construct_ParseErrSudoku
construct (Node_Row _ _) = construct_Row
construct (Node_HoleRow _ _) = construct_HoleRow
construct (Node_ParseErrRow _ _) = construct_ParseErrRow
construct (Node_Field _ _) = construct_Field
construct (Node_HoleField _ _) = construct_HoleField
construct (Node_ParseErrField _ _) = construct_ParseErrField
construct (Node_Test _ _) = construct_Test
construct (Node_HoleTest _ _) = construct_HoleTest
construct (Node_ParseErrTest _ _) = construct_ParseErrTest
construct (Node_StyledText _ _) = construct_StyledText
construct (Node_HoleStyledText _ _) = construct_HoleStyledText
construct (Node_ParseErrStyledText _ _) = construct_ParseErrStyledText
construct (Node_Word _ _) = construct_Word
construct (Node_HoleWord _ _) = construct_HoleWord
construct (Node_ParseErrWord _ _) = construct_ParseErrWord
construct (Node_WordPart _ _) = construct_WordPart
construct (Node_OpenTag _ _) = construct_OpenTag
construct (Node_CloseTag _ _) = construct_CloseTag
construct (Node_HoleWordPart _ _) = construct_HoleWordPart
construct (Node_ParseErrWordPart _ _) = construct_ParseErrWordPart
construct (Node_TextBold _ _) = construct_TextBold
construct (Node_TextItalic _ _) = construct_TextItalic
construct (Node_TextFontSize _ _) = construct_TextFontSize
construct (Node_TextColor _ _) = construct_TextColor
construct (Node_HoleTextStyle _ _) = construct_HoleTextStyle
construct (Node_ParseErrTextStyle _ _) = construct_ParseErrTextStyle
construct (Node_Int_ _ _) = construct_Int_
construct (Node_HoleInt_ _ _) = construct_HoleInt_
construct (Node_ParseErrInt_ _ _) = construct_ParseErrInt_
construct (Node_Float_ _ _) = construct_Float_
construct (Node_HoleFloat_ _ _) = construct_HoleFloat_
construct (Node_ParseErrFloat_ _ _) = construct_ParseErrFloat_
construct (Node_List_Expense _ _) = construct_List_Expense
construct (Node_HoleList_Expense _ _) = construct_HoleList_Expense
construct (Node_ParseErrList_Expense _ _) = construct_ParseErrList_Expense
construct (Node_List_Currency _ _) = construct_List_Currency
construct (Node_HoleList_Currency _ _) = construct_HoleList_Currency
construct (Node_ParseErrList_Currency _ _) = construct_ParseErrList_Currency
construct (Node_List_Thing _ _) = construct_List_Thing
construct (Node_HoleList_Thing _ _) = construct_HoleList_Thing
construct (Node_ParseErrList_Thing _ _) = construct_ParseErrList_Thing
construct (Node_List_Task _ _) = construct_List_Task
construct (Node_HoleList_Task _ _) = construct_HoleList_Task
construct (Node_ParseErrList_Task _ _) = construct_ParseErrList_Task
construct (Node_List_Word _ _) = construct_List_Word
construct (Node_HoleList_Word _ _) = construct_HoleList_Word
construct (Node_ParseErrList_Word _ _) = construct_ParseErrList_Word
construct (Node_List_WordPart _ _) = construct_List_WordPart
construct (Node_HoleList_WordPart _ _) = construct_HoleList_WordPart
construct (Node_ParseErrList_WordPart _ _) = construct_ParseErrList_WordPart
construct_RootEnr tk ~[mClip0] = Clip_EnrichedDoc $ reuseRootEnr [tk] (retrieveArg "RootEnr" "choiceDoc::ChoiceDoc" mClip0)
construct_HoleEnrichedDoc tk ~[] = Clip_EnrichedDoc $ hole
construct_ParseErrEnrichedDoc (StructuralTk _ _ pres _ _) ~[] = Clip_EnrichedDoc $ parseErr (StructuralParseErr pres)
construct_RootDoc tk ~[mClip0] = Clip_Document $ reuseRootDoc [tk] (retrieveArg "RootDoc" "choiceDoc::ChoiceDoc" mClip0)
construct_HoleDocument tk ~[] = Clip_Document $ hole
construct_ParseErrDocument (StructuralTk _ _ pres _ _) ~[] = Clip_Document $ parseErr (StructuralParseErr pres)
construct_FormDoc tk ~[mClip0] = Clip_ChoiceDoc $ reuseFormDoc [tk] (retrieveArg "FormDoc" "form::Form" mClip0)
construct_TaskDoc tk ~[mClip0] = Clip_ChoiceDoc $ reuseTaskDoc [tk] (retrieveArg "TaskDoc" "tasks::Tasks" mClip0)
construct_SudokuDoc tk ~[mClip0] = Clip_ChoiceDoc $ reuseSudokuDoc [tk] (retrieveArg "SudokuDoc" "sudoku::Sudoku" mClip0)
construct_TestDoc tk ~[mClip0] = Clip_ChoiceDoc $ reuseTestDoc [tk] (retrieveArg "TestDoc" "test::Test" mClip0)
construct_HoleChoiceDoc tk ~[] = Clip_ChoiceDoc $ hole
construct_ParseErrChoiceDoc (StructuralTk _ _ pres _ _) ~[] = Clip_ChoiceDoc $ parseErr (StructuralParseErr pres)
construct_Form tk ~[mClip0,mClip1,mClip2,mClip3,mClip4] = Clip_Form $ reuseForm [tk] (retrieveArg "Form" "name::Description" mClip0) (retrieveArg "Form" "faculty::Description" mClip1) (retrieveArg "Form" "expenses::List_Expense" mClip2) (retrieveArg "Form" "baseCurrency::Int" mClip3) (retrieveArg "Form" "currencies::List_Currency" mClip4)
construct_HoleForm tk ~[] = Clip_Form $ hole
construct_ParseErrForm (StructuralTk _ _ pres _ _) ~[] = Clip_Form $ parseErr (StructuralParseErr pres)
construct_Expense tk ~[mClip0,mClip1,mClip2] = Clip_Expense $ reuseExpense [tk] (retrieveArg "Expense" "description::Description" mClip0) (retrieveArg "Expense" "amount::Float_" mClip1) (retrieveArg "Expense" "currencyIx::Int" mClip2)
construct_HoleExpense tk ~[] = Clip_Expense $ hole
construct_ParseErrExpense (StructuralTk _ _ pres _ _) ~[] = Clip_Expense $ parseErr (StructuralParseErr pres)
construct_Currency tk ~[mClip0,mClip1] = Clip_Currency $ reuseCurrency [tk] (retrieveArg "Currency" "name::Description" mClip0) (retrieveArg "Currency" "euroRate::Float_" mClip1)
construct_HoleCurrency tk ~[] = Clip_Currency $ hole
construct_ParseErrCurrency (StructuralTk _ _ pres _ _) ~[] = Clip_Currency $ parseErr (StructuralParseErr pres)
construct_Tasks tk ~[mClip0,mClip1,mClip2] = Clip_Tasks $ reuseTasks [tk] (retrieveArg "Tasks" "things::List_Thing" mClip0) (retrieveArg "Tasks" "showCompleted::Bool" mClip1) (retrieveArg "Tasks" "tasks::List_Task" mClip2)
construct_HoleTasks tk ~[] = Clip_Tasks $ hole
construct_ParseErrTasks (StructuralTk _ _ pres _ _) ~[] = Clip_Tasks $ parseErr (StructuralParseErr pres)
construct_Thing tk ~[mClip0] = Clip_Thing $ reuseThing [tk] (retrieveArg "Thing" "size::Int" mClip0)
construct_HoleThing tk ~[] = Clip_Thing $ hole
construct_ParseErrThing (StructuralTk _ _ pres _ _) ~[] = Clip_Thing $ parseErr (StructuralParseErr pres)
construct_BasicTask tk ~[mClip0,mClip1] = Clip_Task $ reuseBasicTask [tk] (retrieveArg "BasicTask" "description::Description" mClip0) (retrieveArg "BasicTask" "completed::Bool" mClip1)
construct_CompositeTask tk ~[mClip0,mClip1,mClip2] = Clip_Task $ reuseCompositeTask [tk] (retrieveArg "CompositeTask" "expanded::Bool" mClip0) (retrieveArg "CompositeTask" "description::Description" mClip1) (retrieveArg "CompositeTask" "subtasks::List_Task" mClip2)
construct_HoleTask tk ~[] = Clip_Task $ hole
construct_ParseErrTask (StructuralTk _ _ pres _ _) ~[] = Clip_Task $ parseErr (StructuralParseErr pres)
construct_Description tk ~[mClip0] = Clip_Description $ reuseDescription [tk] (retrieveArg "Description" "str::String" mClip0)
construct_HoleDescription tk ~[] = Clip_Description $ hole
construct_ParseErrDescription (StructuralTk _ _ pres _ _) ~[] = Clip_Description $ parseErr (StructuralParseErr pres)
construct_Sudoku tk ~[mClip0,mClip1,mClip2,mClip3,mClip4,mClip5,mClip6,mClip7,mClip8] = Clip_Sudoku $ reuseSudoku [tk] (retrieveArg "Sudoku" "r0::Row" mClip0) (retrieveArg "Sudoku" "r1::Row" mClip1) (retrieveArg "Sudoku" "r2::Row" mClip2) (retrieveArg "Sudoku" "r3::Row" mClip3) (retrieveArg "Sudoku" "r4::Row" mClip4) (retrieveArg "Sudoku" "r5::Row" mClip5) (retrieveArg "Sudoku" "r6::Row" mClip6) (retrieveArg "Sudoku" "r7::Row" mClip7) (retrieveArg "Sudoku" "r8::Row" mClip8)
construct_HoleSudoku tk ~[] = Clip_Sudoku $ hole
construct_ParseErrSudoku (StructuralTk _ _ pres _ _) ~[] = Clip_Sudoku $ parseErr (StructuralParseErr pres)
construct_Row tk ~[mClip0,mClip1,mClip2,mClip3,mClip4,mClip5,mClip6,mClip7,mClip8] = Clip_Row $ reuseRow [tk] (retrieveArg "Row" "f0::Field" mClip0) (retrieveArg "Row" "f1::Field" mClip1) (retrieveArg "Row" "f2::Field" mClip2) (retrieveArg "Row" "f3::Field" mClip3) (retrieveArg "Row" "f4::Field" mClip4) (retrieveArg "Row" "f5::Field" mClip5) (retrieveArg "Row" "f6::Field" mClip6) (retrieveArg "Row" "f7::Field" mClip7) (retrieveArg "Row" "f8::Field" mClip8)
construct_HoleRow tk ~[] = Clip_Row $ hole
construct_ParseErrRow (StructuralTk _ _ pres _ _) ~[] = Clip_Row $ parseErr (StructuralParseErr pres)
construct_Field tk ~[mClip0] = Clip_Field $ reuseField [tk] (retrieveArg "Field" "val::Int_" mClip0)
construct_HoleField tk ~[] = Clip_Field $ hole
construct_ParseErrField (StructuralTk _ _ pres _ _) ~[] = Clip_Field $ parseErr (StructuralParseErr pres)
construct_Test tk ~[mClip0] = Clip_Test $ reuseTest [tk] (retrieveArg "Test" "styledText::StyledText" mClip0)
construct_HoleTest tk ~[] = Clip_Test $ hole
construct_ParseErrTest (StructuralTk _ _ pres _ _) ~[] = Clip_Test $ parseErr (StructuralParseErr pres)
construct_StyledText tk ~[mClip0] = Clip_StyledText $ reuseStyledText [tk] (retrieveArg "StyledText" "words::List_Word" mClip0)
construct_HoleStyledText tk ~[] = Clip_StyledText $ hole
construct_ParseErrStyledText (StructuralTk _ _ pres _ _) ~[] = Clip_StyledText $ parseErr (StructuralParseErr pres)
construct_Word tk ~[mClip0] = Clip_Word $ reuseWord [tk] (retrieveArg "Word" "parts::List_WordPart" mClip0)
construct_HoleWord tk ~[] = Clip_Word $ hole
construct_ParseErrWord (StructuralTk _ _ pres _ _) ~[] = Clip_Word $ parseErr (StructuralParseErr pres)
construct_WordPart tk ~[mClip0] = Clip_WordPart $ reuseWordPart [tk] Nothing (retrieveArg "WordPart" "word::String" mClip0)
construct_OpenTag tk ~[mClip0] = Clip_WordPart $ reuseOpenTag [tk] (retrieveArg "OpenTag" "style::TextStyle" mClip0)
construct_CloseTag tk ~[mClip0] = Clip_WordPart $ reuseCloseTag [tk] (retrieveArg "CloseTag" "style::TextStyle" mClip0)
construct_HoleWordPart tk ~[] = Clip_WordPart $ hole
construct_ParseErrWordPart (StructuralTk _ _ pres _ _) ~[] = Clip_WordPart $ parseErr (StructuralParseErr pres)
construct_TextBold tk ~[] = Clip_TextStyle $ reuseTextBold [tk]
construct_TextItalic tk ~[] = Clip_TextStyle $ reuseTextItalic [tk]
construct_TextFontSize tk ~[mClip0] = Clip_TextStyle $ reuseTextFontSize [tk] (retrieveArg "TextFontSize" "s::Int" mClip0)
construct_TextColor tk ~[mClip0,mClip1,mClip2] = Clip_TextStyle $ reuseTextColor [tk] (retrieveArg "TextColor" "r::Int" mClip0) (retrieveArg "TextColor" "g::Int" mClip1) (retrieveArg "TextColor" "b::Int" mClip2)
construct_HoleTextStyle tk ~[] = Clip_TextStyle $ hole
construct_ParseErrTextStyle (StructuralTk _ _ pres _ _) ~[] = Clip_TextStyle $ parseErr (StructuralParseErr pres)
construct_Int_ tk ~[mClip0] = Clip_Int_ $ reuseInt_ [tk] (retrieveArg "Int_" "value::Int" mClip0)
construct_HoleInt_ tk ~[] = Clip_Int_ $ hole
construct_ParseErrInt_ (StructuralTk _ _ pres _ _) ~[] = Clip_Int_ $ parseErr (StructuralParseErr pres)
construct_Float_ tk ~[mClip0] = Clip_Float_ $ reuseFloat_ [tk] (retrieveArg "Float_" "value::Float" mClip0)
construct_HoleFloat_ tk ~[] = Clip_Float_ $ hole
construct_ParseErrFloat_ (StructuralTk _ _ pres _ _) ~[] = Clip_Float_ $ parseErr (StructuralParseErr pres)
construct_List_Expense tk mClips = genericConstruct_List "Expense" toList_Expense mClips
construct_HoleList_Expense tk ~[] = Clip_List_Expense $ hole
construct_ParseErrList_Expense (StructuralTk _ _ pres _ _) ~[] = Clip_List_Expense $ parseErr (StructuralParseErr pres)
construct_List_Currency tk mClips = genericConstruct_List "Currency" toList_Currency mClips
construct_HoleList_Currency tk ~[] = Clip_List_Currency $ hole
construct_ParseErrList_Currency (StructuralTk _ _ pres _ _) ~[] = Clip_List_Currency $ parseErr (StructuralParseErr pres)
construct_List_Thing tk mClips = genericConstruct_List "Thing" toList_Thing mClips
construct_HoleList_Thing tk ~[] = Clip_List_Thing $ hole
construct_ParseErrList_Thing (StructuralTk _ _ pres _ _) ~[] = Clip_List_Thing $ parseErr (StructuralParseErr pres)
construct_List_Task tk mClips = genericConstruct_List "Task" toList_Task mClips
construct_HoleList_Task tk ~[] = Clip_List_Task $ hole
construct_ParseErrList_Task (StructuralTk _ _ pres _ _) ~[] = Clip_List_Task $ parseErr (StructuralParseErr pres)
construct_List_Word tk mClips = genericConstruct_List "Word" toList_Word mClips
construct_HoleList_Word tk ~[] = Clip_List_Word $ hole
construct_ParseErrList_Word (StructuralTk _ _ pres _ _) ~[] = Clip_List_Word $ parseErr (StructuralParseErr pres)
construct_List_WordPart tk mClips = genericConstruct_List "WordPart" toList_WordPart mClips
construct_HoleList_WordPart tk ~[] = Clip_List_WordPart $ hole
construct_ParseErrList_WordPart (StructuralTk _ _ pres _ _) ~[] = Clip_List_WordPart $ parseErr (StructuralParseErr pres)
--------------------------------------------------------------------------
-- reuse functions --
--------------------------------------------------------------------------
reuseRootEnr :: [Token doc enr Node clip token] -> Maybe ChoiceDoc -> EnrichedDoc
reuseRootEnr nodes ma0
= case extractFromTokens extractRootEnr defaultRootEnr nodes of
(RootEnr a0) -> genericReuse1 RootEnr a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseRootEnr"
reuseRootDoc :: [Token doc enr Node clip token] -> Maybe ChoiceDoc -> Document
reuseRootDoc nodes ma0
= case extractFromTokens extractRootDoc defaultRootDoc nodes of
(RootDoc a0) -> genericReuse1 RootDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseRootDoc"
reuseFormDoc :: [Token doc enr Node clip token] -> Maybe Form -> ChoiceDoc
reuseFormDoc nodes ma0
= case extractFromTokens extractFormDoc defaultFormDoc nodes of
(FormDoc a0) -> genericReuse1 FormDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseFormDoc"
reuseTaskDoc :: [Token doc enr Node clip token] -> Maybe Tasks -> ChoiceDoc
reuseTaskDoc nodes ma0
= case extractFromTokens extractTaskDoc defaultTaskDoc nodes of
(TaskDoc a0) -> genericReuse1 TaskDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseTaskDoc"
reuseSudokuDoc :: [Token doc enr Node clip token] -> Maybe Sudoku -> ChoiceDoc
reuseSudokuDoc nodes ma0
= case extractFromTokens extractSudokuDoc defaultSudokuDoc nodes of
(SudokuDoc a0) -> genericReuse1 SudokuDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseSudokuDoc"
reuseTestDoc :: [Token doc enr Node clip token] -> Maybe Test -> ChoiceDoc
reuseTestDoc nodes ma0
= case extractFromTokens extractTestDoc defaultTestDoc nodes of
(TestDoc a0) -> genericReuse1 TestDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseTestDoc"
reuseForm :: [Token doc enr Node clip token] -> Maybe Description -> Maybe Description -> Maybe List_Expense -> Maybe Int -> Maybe List_Currency -> Form
reuseForm nodes ma0 ma1 ma2 ma3 ma4
= case extractFromTokens extractForm defaultForm nodes of
(Form a0 a1 a2 a3 a4) -> genericReuse5 Form a0 a1 a2 a3 a4 ma0 ma1 ma2 ma3 ma4
_ -> error "Internal error:ProxParser_Generated.reuseForm"
reuseExpense :: [Token doc enr Node clip token] -> Maybe Description -> Maybe Float_ -> Maybe Int -> Expense
reuseExpense nodes ma0 ma1 ma2
= case extractFromTokens extractExpense defaultExpense nodes of
(Expense a0 a1 a2) -> genericReuse3 Expense a0 a1 a2 ma0 ma1 ma2
_ -> error "Internal error:ProxParser_Generated.reuseExpense"
reuseCurrency :: [Token doc enr Node clip token] -> Maybe Description -> Maybe Float_ -> Currency
reuseCurrency nodes ma0 ma1
= case extractFromTokens extractCurrency defaultCurrency nodes of
(Currency a0 a1) -> genericReuse2 Currency a0 a1 ma0 ma1
_ -> error "Internal error:ProxParser_Generated.reuseCurrency"
reuseTasks :: [Token doc enr Node clip token] -> Maybe List_Thing -> Maybe Bool -> Maybe List_Task -> Tasks
reuseTasks nodes ma0 ma1 ma2
= case extractFromTokens extractTasks defaultTasks nodes of
(Tasks a0 a1 a2) -> genericReuse3 Tasks a0 a1 a2 ma0 ma1 ma2
_ -> error "Internal error:ProxParser_Generated.reuseTasks"
reuseThing :: [Token doc enr Node clip token] -> Maybe Int -> Thing
reuseThing nodes ma0
= case extractFromTokens extractThing defaultThing nodes of
(Thing a0) -> genericReuse1 Thing a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseThing"
reuseBasicTask :: [Token doc enr Node clip token] -> Maybe Description -> Maybe Bool -> Task
reuseBasicTask nodes ma0 ma1
= case extractFromTokens extractBasicTask defaultBasicTask nodes of
(BasicTask a0 a1) -> genericReuse2 BasicTask a0 a1 ma0 ma1
_ -> error "Internal error:ProxParser_Generated.reuseBasicTask"
reuseCompositeTask :: [Token doc enr Node clip token] -> Maybe Bool -> Maybe Description -> Maybe List_Task -> Task
reuseCompositeTask nodes ma0 ma1 ma2
= case extractFromTokens extractCompositeTask defaultCompositeTask nodes of
(CompositeTask a0 a1 a2) -> genericReuse3 CompositeTask a0 a1 a2 ma0 ma1 ma2
_ -> error "Internal error:ProxParser_Generated.reuseCompositeTask"
reuseDescription :: [Token doc enr Node clip token] -> Maybe String -> Description
reuseDescription nodes ma0
= case extractFromTokens extractDescription defaultDescription nodes of
(Description a0) -> genericReuse1 Description a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseDescription"
reuseSudoku :: [Token doc enr Node clip token] -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Sudoku
reuseSudoku nodes ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8
= case extractFromTokens extractSudoku defaultSudoku nodes of
(Sudoku a0 a1 a2 a3 a4 a5 a6 a7 a8) -> genericReuse9 Sudoku a0 a1 a2 a3 a4 a5 a6 a7 a8 ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8
_ -> error "Internal error:ProxParser_Generated.reuseSudoku"
reuseRow :: [Token doc enr Node clip token] -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Row
reuseRow nodes ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8
= case extractFromTokens extractRow defaultRow nodes of
(Row a0 a1 a2 a3 a4 a5 a6 a7 a8) -> genericReuse9 Row a0 a1 a2 a3 a4 a5 a6 a7 a8 ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8
_ -> error "Internal error:ProxParser_Generated.reuseRow"
reuseField :: [Token doc enr Node clip token] -> Maybe Int_ -> Field
reuseField nodes ma0
= case extractFromTokens extractField defaultField nodes of
(Field a0) -> genericReuse1 Field a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseField"
reuseTest :: [Token doc enr Node clip token] -> Maybe StyledText -> Test
reuseTest nodes ma0
= case extractFromTokens extractTest defaultTest nodes of
(Test a0) -> genericReuse1 Test a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseTest"
reuseStyledText :: [Token doc enr Node clip token] -> Maybe List_Word -> StyledText
reuseStyledText nodes ma0
= case extractFromTokens extractStyledText defaultStyledText nodes of
(StyledText a0) -> genericReuse1 StyledText a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseStyledText"
reuseWord :: [Token doc enr Node clip token] -> Maybe List_WordPart -> Word
reuseWord nodes ma0
= case extractFromTokens extractWord defaultWord nodes of
(Word a0) -> genericReuse1 Word a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseWord"
reuseWordPart :: [Token doc enr Node clip token] -> Maybe IDP -> Maybe String -> WordPart
reuseWordPart nodes ma0 ma1
= case extractFromTokens extractWordPart defaultWordPart nodes of
(WordPart a0 a1) -> genericReuse2 WordPart a0 a1 ma0 ma1
_ -> error "Internal error:ProxParser_Generated.reuseWordPart"
reuseOpenTag :: [Token doc enr Node clip token] -> Maybe TextStyle -> WordPart
reuseOpenTag nodes ma0
= case extractFromTokens extractOpenTag defaultOpenTag nodes of
(OpenTag a0) -> genericReuse1 OpenTag a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseOpenTag"
reuseCloseTag :: [Token doc enr Node clip token] -> Maybe TextStyle -> WordPart
reuseCloseTag nodes ma0
= case extractFromTokens extractCloseTag defaultCloseTag nodes of
(CloseTag a0) -> genericReuse1 CloseTag a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseCloseTag"
reuseTextBold :: [Token doc enr Node clip token] -> TextStyle
reuseTextBold nodes
= case extractFromTokens extractTextBold defaultTextBold nodes of
(TextBold) -> genericReuse0 TextBold
_ -> error "Internal error:ProxParser_Generated.reuseTextBold"
reuseTextItalic :: [Token doc enr Node clip token] -> TextStyle
reuseTextItalic nodes
= case extractFromTokens extractTextItalic defaultTextItalic nodes of
(TextItalic) -> genericReuse0 TextItalic
_ -> error "Internal error:ProxParser_Generated.reuseTextItalic"
reuseTextFontSize :: [Token doc enr Node clip token] -> Maybe Int -> TextStyle
reuseTextFontSize nodes ma0
= case extractFromTokens extractTextFontSize defaultTextFontSize nodes of
(TextFontSize a0) -> genericReuse1 TextFontSize a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseTextFontSize"
reuseTextColor :: [Token doc enr Node clip token] -> Maybe Int -> Maybe Int -> Maybe Int -> TextStyle
reuseTextColor nodes ma0 ma1 ma2
= case extractFromTokens extractTextColor defaultTextColor nodes of
(TextColor a0 a1 a2) -> genericReuse3 TextColor a0 a1 a2 ma0 ma1 ma2
_ -> error "Internal error:ProxParser_Generated.reuseTextColor"
reuseInt_ :: [Token doc enr Node clip token] -> Maybe Int -> Int_
reuseInt_ nodes ma0
= case extractFromTokens extractInt_ defaultInt_ nodes of
(Int_ a0) -> genericReuse1 Int_ a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseInt_"
reuseFloat_ :: [Token doc enr Node clip token] -> Maybe Float -> Float_
reuseFloat_ nodes ma0
= case extractFromTokens extractFloat_ defaultFloat_ nodes of
(Float_ a0) -> genericReuse1 Float_ a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseFloat_"
reuseList_Expense :: [Token doc enr Node clip token] -> Maybe ConsList_Expense -> List_Expense
reuseList_Expense nodes ma0
= case extractFromTokens extractList_Expense defaultList_Expense nodes of
(List_Expense a0) -> genericReuse1 List_Expense a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Expense"
reuseList_Currency :: [Token doc enr Node clip token] -> Maybe ConsList_Currency -> List_Currency
reuseList_Currency nodes ma0
= case extractFromTokens extractList_Currency defaultList_Currency nodes of
(List_Currency a0) -> genericReuse1 List_Currency a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Currency"
reuseList_Thing :: [Token doc enr Node clip token] -> Maybe ConsList_Thing -> List_Thing
reuseList_Thing nodes ma0
= case extractFromTokens extractList_Thing defaultList_Thing nodes of
(List_Thing a0) -> genericReuse1 List_Thing a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Thing"
reuseList_Task :: [Token doc enr Node clip token] -> Maybe ConsList_Task -> List_Task
reuseList_Task nodes ma0
= case extractFromTokens extractList_Task defaultList_Task nodes of
(List_Task a0) -> genericReuse1 List_Task a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Task"
reuseList_Word :: [Token doc enr Node clip token] -> Maybe ConsList_Word -> List_Word
reuseList_Word nodes ma0
= case extractFromTokens extractList_Word defaultList_Word nodes of
(List_Word a0) -> genericReuse1 List_Word a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Word"
reuseList_WordPart :: [Token doc enr Node clip token] -> Maybe ConsList_WordPart -> List_WordPart
reuseList_WordPart nodes ma0
= case extractFromTokens extractList_WordPart defaultList_WordPart nodes of
(List_WordPart a0) -> genericReuse1 List_WordPart a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_WordPart"
--------------------------------------------------------------------------
-- extract functions --
--------------------------------------------------------------------------
extractRootEnr :: Maybe Node -> Maybe EnrichedDoc
extractRootEnr (Just (Node_RootEnr x@(RootEnr _) _)) = Just x
extractRootEnr _ = Nothing
extractRootDoc :: Maybe Node -> Maybe Document
extractRootDoc (Just (Node_RootDoc x@(RootDoc _) _)) = Just x
extractRootDoc _ = Nothing
extractFormDoc :: Maybe Node -> Maybe ChoiceDoc
extractFormDoc (Just (Node_FormDoc x@(FormDoc _) _)) = Just x
extractFormDoc _ = Nothing
extractTaskDoc :: Maybe Node -> Maybe ChoiceDoc
extractTaskDoc (Just (Node_TaskDoc x@(TaskDoc _) _)) = Just x
extractTaskDoc _ = Nothing
extractSudokuDoc :: Maybe Node -> Maybe ChoiceDoc
extractSudokuDoc (Just (Node_SudokuDoc x@(SudokuDoc _) _)) = Just x
extractSudokuDoc _ = Nothing
extractTestDoc :: Maybe Node -> Maybe ChoiceDoc
extractTestDoc (Just (Node_TestDoc x@(TestDoc _) _)) = Just x
extractTestDoc _ = Nothing
extractForm :: Maybe Node -> Maybe Form
extractForm (Just (Node_Form x@(Form _ _ _ _ _) _)) = Just x
extractForm _ = Nothing
extractExpense :: Maybe Node -> Maybe Expense
extractExpense (Just (Node_Expense x@(Expense _ _ _) _)) = Just x
extractExpense _ = Nothing
extractCurrency :: Maybe Node -> Maybe Currency
extractCurrency (Just (Node_Currency x@(Currency _ _) _)) = Just x
extractCurrency _ = Nothing
extractTasks :: Maybe Node -> Maybe Tasks
extractTasks (Just (Node_Tasks x@(Tasks _ _ _) _)) = Just x
extractTasks _ = Nothing
extractThing :: Maybe Node -> Maybe Thing
extractThing (Just (Node_Thing x@(Thing _) _)) = Just x
extractThing _ = Nothing
extractBasicTask :: Maybe Node -> Maybe Task
extractBasicTask (Just (Node_BasicTask x@(BasicTask _ _) _)) = Just x
extractBasicTask _ = Nothing
extractCompositeTask :: Maybe Node -> Maybe Task
extractCompositeTask (Just (Node_CompositeTask x@(CompositeTask _ _ _) _)) = Just x
extractCompositeTask _ = Nothing
extractDescription :: Maybe Node -> Maybe Description
extractDescription (Just (Node_Description x@(Description _) _)) = Just x
extractDescription _ = Nothing
extractSudoku :: Maybe Node -> Maybe Sudoku
extractSudoku (Just (Node_Sudoku x@(Sudoku _ _ _ _ _ _ _ _ _) _)) = Just x
extractSudoku _ = Nothing
extractRow :: Maybe Node -> Maybe Row
extractRow (Just (Node_Row x@(Row _ _ _ _ _ _ _ _ _) _)) = Just x
extractRow _ = Nothing
extractField :: Maybe Node -> Maybe Field
extractField (Just (Node_Field x@(Field _) _)) = Just x
extractField _ = Nothing
extractTest :: Maybe Node -> Maybe Test
extractTest (Just (Node_Test x@(Test _) _)) = Just x
extractTest _ = Nothing
extractStyledText :: Maybe Node -> Maybe StyledText
extractStyledText (Just (Node_StyledText x@(StyledText _) _)) = Just x
extractStyledText _ = Nothing
extractWord :: Maybe Node -> Maybe Word
extractWord (Just (Node_Word x@(Word _) _)) = Just x
extractWord _ = Nothing
extractWordPart :: Maybe Node -> Maybe WordPart
extractWordPart (Just (Node_WordPart x@(WordPart _ _) _)) = Just x
extractWordPart _ = Nothing
extractOpenTag :: Maybe Node -> Maybe WordPart
extractOpenTag (Just (Node_OpenTag x@(OpenTag _) _)) = Just x
extractOpenTag _ = Nothing
extractCloseTag :: Maybe Node -> Maybe WordPart
extractCloseTag (Just (Node_CloseTag x@(CloseTag _) _)) = Just x
extractCloseTag _ = Nothing
extractTextBold :: Maybe Node -> Maybe TextStyle
extractTextBold (Just (Node_TextBold x@(TextBold) _)) = Just x
extractTextBold _ = Nothing
extractTextItalic :: Maybe Node -> Maybe TextStyle
extractTextItalic (Just (Node_TextItalic x@(TextItalic) _)) = Just x
extractTextItalic _ = Nothing
extractTextFontSize :: Maybe Node -> Maybe TextStyle
extractTextFontSize (Just (Node_TextFontSize x@(TextFontSize _) _)) = Just x
extractTextFontSize _ = Nothing
extractTextColor :: Maybe Node -> Maybe TextStyle
extractTextColor (Just (Node_TextColor x@(TextColor _ _ _) _)) = Just x
extractTextColor _ = Nothing
extractInt_ :: Maybe Node -> Maybe Int_
extractInt_ (Just (Node_Int_ x@(Int_ _) _)) = Just x
extractInt_ _ = Nothing
extractFloat_ :: Maybe Node -> Maybe Float_
extractFloat_ (Just (Node_Float_ x@(Float_ _) _)) = Just x
extractFloat_ _ = Nothing
extractList_Expense :: Maybe Node -> Maybe List_Expense
extractList_Expense (Just (Node_List_Expense x@(List_Expense _) _)) = Just x
extractList_Expense _ = Nothing
extractList_Currency :: Maybe Node -> Maybe List_Currency
extractList_Currency (Just (Node_List_Currency x@(List_Currency _) _)) = Just x
extractList_Currency _ = Nothing
extractList_Thing :: Maybe Node -> Maybe List_Thing
extractList_Thing (Just (Node_List_Thing x@(List_Thing _) _)) = Just x
extractList_Thing _ = Nothing
extractList_Task :: Maybe Node -> Maybe List_Task
extractList_Task (Just (Node_List_Task x@(List_Task _) _)) = Just x
extractList_Task _ = Nothing
extractList_Word :: Maybe Node -> Maybe List_Word
extractList_Word (Just (Node_List_Word x@(List_Word _) _)) = Just x
extractList_Word _ = Nothing
extractList_WordPart :: Maybe Node -> Maybe List_WordPart
extractList_WordPart (Just (Node_List_WordPart x@(List_WordPart _) _)) = Just x
extractList_WordPart _ = Nothing
--------------------------------------------------------------------------
-- default functions --
--------------------------------------------------------------------------
defaultRootEnr :: EnrichedDoc
defaultRootEnr = RootEnr hole
defaultRootDoc :: Document
defaultRootDoc = RootDoc hole
defaultFormDoc :: ChoiceDoc
defaultFormDoc = FormDoc hole
defaultTaskDoc :: ChoiceDoc
defaultTaskDoc = TaskDoc hole
defaultSudokuDoc :: ChoiceDoc
defaultSudokuDoc = SudokuDoc hole
defaultTestDoc :: ChoiceDoc
defaultTestDoc = TestDoc hole
defaultForm :: Form
defaultForm = Form hole hole hole hole hole
defaultExpense :: Expense
defaultExpense = Expense hole hole hole
defaultCurrency :: Currency
defaultCurrency = Currency hole hole
defaultTasks :: Tasks
defaultTasks = Tasks hole hole hole
defaultThing :: Thing
defaultThing = Thing hole
defaultBasicTask :: Task
defaultBasicTask = BasicTask hole hole
defaultCompositeTask :: Task
defaultCompositeTask = CompositeTask hole hole hole
defaultDescription :: Description
defaultDescription = Description hole
defaultSudoku :: Sudoku
defaultSudoku = Sudoku hole hole hole hole hole hole hole hole hole
defaultRow :: Row
defaultRow = Row hole hole hole hole hole hole hole hole hole
defaultField :: Field
defaultField = Field hole
defaultTest :: Test
defaultTest = Test hole
defaultStyledText :: StyledText
defaultStyledText = StyledText hole
defaultWord :: Word
defaultWord = Word hole
defaultWordPart :: WordPart
defaultWordPart = WordPart NoIDP hole
defaultOpenTag :: WordPart
defaultOpenTag = OpenTag hole
defaultCloseTag :: WordPart
defaultCloseTag = CloseTag hole
defaultTextBold :: TextStyle
defaultTextBold = TextBold
defaultTextItalic :: TextStyle
defaultTextItalic = TextItalic
defaultTextFontSize :: TextStyle
defaultTextFontSize = TextFontSize hole
defaultTextColor :: TextStyle
defaultTextColor = TextColor hole hole hole
defaultInt_ :: Int_
defaultInt_ = Int_ hole
defaultFloat_ :: Float_
defaultFloat_ = Float_ hole
defaultList_Expense :: List_Expense
defaultList_Expense = List_Expense Nil_Expense
defaultList_Currency :: List_Currency
defaultList_Currency = List_Currency Nil_Currency
defaultList_Thing :: List_Thing
defaultList_Thing = List_Thing Nil_Thing
defaultList_Task :: List_Task
defaultList_Task = List_Task Nil_Task
defaultList_Word :: List_Word
defaultList_Word = List_Word Nil_Word
defaultList_WordPart :: List_WordPart
defaultList_WordPart = List_WordPart Nil_WordPart
--------------------------------------------------------------------------
-- extractFromTokens --
--------------------------------------------------------------------------
return result of the first extraction application in the list that is not Nothing
extractFromTokens :: (Maybe Node -> Maybe a) -> a -> [Token doc enr Node clip token] -> a
extractFromTokens extr def [] = def
extractFromTokens extr def (t:ts) = maybe (extractFromTokens extr def ts) id (extr (tokenNode t))
--------------------------------------------------------------------------
-- genericReuse functions --
--------------------------------------------------------------------------
genericReuse0 :: (r) ->
r
genericReuse0 f =
f
genericReuse1 :: (a0 -> r) ->
a0 ->
Maybe a0 -> r
genericReuse1 f a0 ma0 =
f (maybe a0 id ma0)
genericReuse2 :: (a0 -> a1 -> r) ->
a0 -> a1 ->
Maybe a0 -> Maybe a1 -> r
genericReuse2 f a0 a1 ma0 ma1 =
f (maybe a0 id ma0) (maybe a1 id ma1)
genericReuse3 :: (a0 -> a1 -> a2 -> r) ->
a0 -> a1 -> a2 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> r
genericReuse3 f a0 a1 a2 ma0 ma1 ma2 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2)
genericReuse4 :: (a0 -> a1 -> a2 -> a3 -> r) ->
a0 -> a1 -> a2 -> a3 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> r
genericReuse4 f a0 a1 a2 a3 ma0 ma1 ma2 ma3 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3)
genericReuse5 :: (a0 -> a1 -> a2 -> a3 -> a4 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> r
genericReuse5 f a0 a1 a2 a3 a4 ma0 ma1 ma2 ma3 ma4 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4)
genericReuse6 :: (a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 -> a5 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe a5 -> r
genericReuse6 f a0 a1 a2 a3 a4 a5 ma0 ma1 ma2 ma3 ma4 ma5 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4) (maybe a5 id ma5)
genericReuse7 :: (a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe a5 -> Maybe a6 -> r
genericReuse7 f a0 a1 a2 a3 a4 a5 a6 ma0 ma1 ma2 ma3 ma4 ma5 ma6 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4) (maybe a5 id ma5) (maybe a6 id ma6)
genericReuse8 :: (a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe a5 -> Maybe a6 -> Maybe a7 -> r
genericReuse8 f a0 a1 a2 a3 a4 a5 a6 a7 ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4) (maybe a5 id ma5) (maybe a6 id ma6) (maybe a7 id ma7)
genericReuse9 :: (a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe a5 -> Maybe a6 -> Maybe a7 -> Maybe a8 -> r
genericReuse9 f a0 a1 a2 a3 a4 a5 a6 a7 a8 ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4) (maybe a5 id ma5) (maybe a6 id ma6) (maybe a7 id ma7) (maybe a8 id ma8)
| null | https://raw.githubusercontent.com/Oblosys/proxima/f154dff2ccb8afe00eeb325d9d06f5e2a5ee7589/multi-editor/src/ProxParser_Generated.hs | haskell | --- GENERATED PART STARTS HERE. DO NOT EDIT ON OR BEYOND THIS LINE -----
------------------------------------------------------------------------
------------------------------------------------------------------------
------------------------------------------------------------------------
------------------------------------------------------------------------
------------------------------------------------------------------------
reuse functions --
------------------------------------------------------------------------
------------------------------------------------------------------------
extract functions --
------------------------------------------------------------------------
------------------------------------------------------------------------
default functions --
------------------------------------------------------------------------
------------------------------------------------------------------------
extractFromTokens --
------------------------------------------------------------------------
------------------------------------------------------------------------
genericReuse functions --
------------------------------------------------------------------------ | module ProxParser_Generated where
import Prelude hiding (Word)
import Common.CommonTypes hiding (Dirty (..), defaultTextColor)
import Presentation.PresLayerTypes
import Presentation.PresLayerUtils
import Evaluation.DocumentEdit
import DocumentEdit_Generated
import DocUtils_Generated
import Evaluation.DocTypes
import DocTypes_Generated
import Presentation.PresentationParsing
import Data.Maybe
type ProxParser a = ListParser Document EnrichedDoc Node ClipDoc UserToken a
instance Construct Document EnrichedDoc Node ClipDoc UserToken where
construct NoNode = error $ "ProxParser_Generated.construct not defined on NoNode"
construct (Node_RootEnr _ _) = construct_RootEnr
construct (Node_HoleEnrichedDoc _ _) = construct_HoleEnrichedDoc
construct (Node_ParseErrEnrichedDoc _ _) = construct_ParseErrEnrichedDoc
construct (Node_RootDoc _ _) = construct_RootDoc
construct (Node_HoleDocument _ _) = construct_HoleDocument
construct (Node_ParseErrDocument _ _) = construct_ParseErrDocument
construct (Node_FormDoc _ _) = construct_FormDoc
construct (Node_TaskDoc _ _) = construct_TaskDoc
construct (Node_SudokuDoc _ _) = construct_SudokuDoc
construct (Node_TestDoc _ _) = construct_TestDoc
construct (Node_HoleChoiceDoc _ _) = construct_HoleChoiceDoc
construct (Node_ParseErrChoiceDoc _ _) = construct_ParseErrChoiceDoc
construct (Node_Form _ _) = construct_Form
construct (Node_HoleForm _ _) = construct_HoleForm
construct (Node_ParseErrForm _ _) = construct_ParseErrForm
construct (Node_Expense _ _) = construct_Expense
construct (Node_HoleExpense _ _) = construct_HoleExpense
construct (Node_ParseErrExpense _ _) = construct_ParseErrExpense
construct (Node_Currency _ _) = construct_Currency
construct (Node_HoleCurrency _ _) = construct_HoleCurrency
construct (Node_ParseErrCurrency _ _) = construct_ParseErrCurrency
construct (Node_Tasks _ _) = construct_Tasks
construct (Node_HoleTasks _ _) = construct_HoleTasks
construct (Node_ParseErrTasks _ _) = construct_ParseErrTasks
construct (Node_Thing _ _) = construct_Thing
construct (Node_HoleThing _ _) = construct_HoleThing
construct (Node_ParseErrThing _ _) = construct_ParseErrThing
construct (Node_BasicTask _ _) = construct_BasicTask
construct (Node_CompositeTask _ _) = construct_CompositeTask
construct (Node_HoleTask _ _) = construct_HoleTask
construct (Node_ParseErrTask _ _) = construct_ParseErrTask
construct (Node_Description _ _) = construct_Description
construct (Node_HoleDescription _ _) = construct_HoleDescription
construct (Node_ParseErrDescription _ _) = construct_ParseErrDescription
construct (Node_Sudoku _ _) = construct_Sudoku
construct (Node_HoleSudoku _ _) = construct_HoleSudoku
construct (Node_ParseErrSudoku _ _) = construct_ParseErrSudoku
construct (Node_Row _ _) = construct_Row
construct (Node_HoleRow _ _) = construct_HoleRow
construct (Node_ParseErrRow _ _) = construct_ParseErrRow
construct (Node_Field _ _) = construct_Field
construct (Node_HoleField _ _) = construct_HoleField
construct (Node_ParseErrField _ _) = construct_ParseErrField
construct (Node_Test _ _) = construct_Test
construct (Node_HoleTest _ _) = construct_HoleTest
construct (Node_ParseErrTest _ _) = construct_ParseErrTest
construct (Node_StyledText _ _) = construct_StyledText
construct (Node_HoleStyledText _ _) = construct_HoleStyledText
construct (Node_ParseErrStyledText _ _) = construct_ParseErrStyledText
construct (Node_Word _ _) = construct_Word
construct (Node_HoleWord _ _) = construct_HoleWord
construct (Node_ParseErrWord _ _) = construct_ParseErrWord
construct (Node_WordPart _ _) = construct_WordPart
construct (Node_OpenTag _ _) = construct_OpenTag
construct (Node_CloseTag _ _) = construct_CloseTag
construct (Node_HoleWordPart _ _) = construct_HoleWordPart
construct (Node_ParseErrWordPart _ _) = construct_ParseErrWordPart
construct (Node_TextBold _ _) = construct_TextBold
construct (Node_TextItalic _ _) = construct_TextItalic
construct (Node_TextFontSize _ _) = construct_TextFontSize
construct (Node_TextColor _ _) = construct_TextColor
construct (Node_HoleTextStyle _ _) = construct_HoleTextStyle
construct (Node_ParseErrTextStyle _ _) = construct_ParseErrTextStyle
construct (Node_Int_ _ _) = construct_Int_
construct (Node_HoleInt_ _ _) = construct_HoleInt_
construct (Node_ParseErrInt_ _ _) = construct_ParseErrInt_
construct (Node_Float_ _ _) = construct_Float_
construct (Node_HoleFloat_ _ _) = construct_HoleFloat_
construct (Node_ParseErrFloat_ _ _) = construct_ParseErrFloat_
construct (Node_List_Expense _ _) = construct_List_Expense
construct (Node_HoleList_Expense _ _) = construct_HoleList_Expense
construct (Node_ParseErrList_Expense _ _) = construct_ParseErrList_Expense
construct (Node_List_Currency _ _) = construct_List_Currency
construct (Node_HoleList_Currency _ _) = construct_HoleList_Currency
construct (Node_ParseErrList_Currency _ _) = construct_ParseErrList_Currency
construct (Node_List_Thing _ _) = construct_List_Thing
construct (Node_HoleList_Thing _ _) = construct_HoleList_Thing
construct (Node_ParseErrList_Thing _ _) = construct_ParseErrList_Thing
construct (Node_List_Task _ _) = construct_List_Task
construct (Node_HoleList_Task _ _) = construct_HoleList_Task
construct (Node_ParseErrList_Task _ _) = construct_ParseErrList_Task
construct (Node_List_Word _ _) = construct_List_Word
construct (Node_HoleList_Word _ _) = construct_HoleList_Word
construct (Node_ParseErrList_Word _ _) = construct_ParseErrList_Word
construct (Node_List_WordPart _ _) = construct_List_WordPart
construct (Node_HoleList_WordPart _ _) = construct_HoleList_WordPart
construct (Node_ParseErrList_WordPart _ _) = construct_ParseErrList_WordPart
construct_RootEnr tk ~[mClip0] = Clip_EnrichedDoc $ reuseRootEnr [tk] (retrieveArg "RootEnr" "choiceDoc::ChoiceDoc" mClip0)
construct_HoleEnrichedDoc tk ~[] = Clip_EnrichedDoc $ hole
construct_ParseErrEnrichedDoc (StructuralTk _ _ pres _ _) ~[] = Clip_EnrichedDoc $ parseErr (StructuralParseErr pres)
construct_RootDoc tk ~[mClip0] = Clip_Document $ reuseRootDoc [tk] (retrieveArg "RootDoc" "choiceDoc::ChoiceDoc" mClip0)
construct_HoleDocument tk ~[] = Clip_Document $ hole
construct_ParseErrDocument (StructuralTk _ _ pres _ _) ~[] = Clip_Document $ parseErr (StructuralParseErr pres)
construct_FormDoc tk ~[mClip0] = Clip_ChoiceDoc $ reuseFormDoc [tk] (retrieveArg "FormDoc" "form::Form" mClip0)
construct_TaskDoc tk ~[mClip0] = Clip_ChoiceDoc $ reuseTaskDoc [tk] (retrieveArg "TaskDoc" "tasks::Tasks" mClip0)
construct_SudokuDoc tk ~[mClip0] = Clip_ChoiceDoc $ reuseSudokuDoc [tk] (retrieveArg "SudokuDoc" "sudoku::Sudoku" mClip0)
construct_TestDoc tk ~[mClip0] = Clip_ChoiceDoc $ reuseTestDoc [tk] (retrieveArg "TestDoc" "test::Test" mClip0)
construct_HoleChoiceDoc tk ~[] = Clip_ChoiceDoc $ hole
construct_ParseErrChoiceDoc (StructuralTk _ _ pres _ _) ~[] = Clip_ChoiceDoc $ parseErr (StructuralParseErr pres)
construct_Form tk ~[mClip0,mClip1,mClip2,mClip3,mClip4] = Clip_Form $ reuseForm [tk] (retrieveArg "Form" "name::Description" mClip0) (retrieveArg "Form" "faculty::Description" mClip1) (retrieveArg "Form" "expenses::List_Expense" mClip2) (retrieveArg "Form" "baseCurrency::Int" mClip3) (retrieveArg "Form" "currencies::List_Currency" mClip4)
construct_HoleForm tk ~[] = Clip_Form $ hole
construct_ParseErrForm (StructuralTk _ _ pres _ _) ~[] = Clip_Form $ parseErr (StructuralParseErr pres)
construct_Expense tk ~[mClip0,mClip1,mClip2] = Clip_Expense $ reuseExpense [tk] (retrieveArg "Expense" "description::Description" mClip0) (retrieveArg "Expense" "amount::Float_" mClip1) (retrieveArg "Expense" "currencyIx::Int" mClip2)
construct_HoleExpense tk ~[] = Clip_Expense $ hole
construct_ParseErrExpense (StructuralTk _ _ pres _ _) ~[] = Clip_Expense $ parseErr (StructuralParseErr pres)
construct_Currency tk ~[mClip0,mClip1] = Clip_Currency $ reuseCurrency [tk] (retrieveArg "Currency" "name::Description" mClip0) (retrieveArg "Currency" "euroRate::Float_" mClip1)
construct_HoleCurrency tk ~[] = Clip_Currency $ hole
construct_ParseErrCurrency (StructuralTk _ _ pres _ _) ~[] = Clip_Currency $ parseErr (StructuralParseErr pres)
construct_Tasks tk ~[mClip0,mClip1,mClip2] = Clip_Tasks $ reuseTasks [tk] (retrieveArg "Tasks" "things::List_Thing" mClip0) (retrieveArg "Tasks" "showCompleted::Bool" mClip1) (retrieveArg "Tasks" "tasks::List_Task" mClip2)
construct_HoleTasks tk ~[] = Clip_Tasks $ hole
construct_ParseErrTasks (StructuralTk _ _ pres _ _) ~[] = Clip_Tasks $ parseErr (StructuralParseErr pres)
construct_Thing tk ~[mClip0] = Clip_Thing $ reuseThing [tk] (retrieveArg "Thing" "size::Int" mClip0)
construct_HoleThing tk ~[] = Clip_Thing $ hole
construct_ParseErrThing (StructuralTk _ _ pres _ _) ~[] = Clip_Thing $ parseErr (StructuralParseErr pres)
construct_BasicTask tk ~[mClip0,mClip1] = Clip_Task $ reuseBasicTask [tk] (retrieveArg "BasicTask" "description::Description" mClip0) (retrieveArg "BasicTask" "completed::Bool" mClip1)
construct_CompositeTask tk ~[mClip0,mClip1,mClip2] = Clip_Task $ reuseCompositeTask [tk] (retrieveArg "CompositeTask" "expanded::Bool" mClip0) (retrieveArg "CompositeTask" "description::Description" mClip1) (retrieveArg "CompositeTask" "subtasks::List_Task" mClip2)
construct_HoleTask tk ~[] = Clip_Task $ hole
construct_ParseErrTask (StructuralTk _ _ pres _ _) ~[] = Clip_Task $ parseErr (StructuralParseErr pres)
construct_Description tk ~[mClip0] = Clip_Description $ reuseDescription [tk] (retrieveArg "Description" "str::String" mClip0)
construct_HoleDescription tk ~[] = Clip_Description $ hole
construct_ParseErrDescription (StructuralTk _ _ pres _ _) ~[] = Clip_Description $ parseErr (StructuralParseErr pres)
construct_Sudoku tk ~[mClip0,mClip1,mClip2,mClip3,mClip4,mClip5,mClip6,mClip7,mClip8] = Clip_Sudoku $ reuseSudoku [tk] (retrieveArg "Sudoku" "r0::Row" mClip0) (retrieveArg "Sudoku" "r1::Row" mClip1) (retrieveArg "Sudoku" "r2::Row" mClip2) (retrieveArg "Sudoku" "r3::Row" mClip3) (retrieveArg "Sudoku" "r4::Row" mClip4) (retrieveArg "Sudoku" "r5::Row" mClip5) (retrieveArg "Sudoku" "r6::Row" mClip6) (retrieveArg "Sudoku" "r7::Row" mClip7) (retrieveArg "Sudoku" "r8::Row" mClip8)
construct_HoleSudoku tk ~[] = Clip_Sudoku $ hole
construct_ParseErrSudoku (StructuralTk _ _ pres _ _) ~[] = Clip_Sudoku $ parseErr (StructuralParseErr pres)
construct_Row tk ~[mClip0,mClip1,mClip2,mClip3,mClip4,mClip5,mClip6,mClip7,mClip8] = Clip_Row $ reuseRow [tk] (retrieveArg "Row" "f0::Field" mClip0) (retrieveArg "Row" "f1::Field" mClip1) (retrieveArg "Row" "f2::Field" mClip2) (retrieveArg "Row" "f3::Field" mClip3) (retrieveArg "Row" "f4::Field" mClip4) (retrieveArg "Row" "f5::Field" mClip5) (retrieveArg "Row" "f6::Field" mClip6) (retrieveArg "Row" "f7::Field" mClip7) (retrieveArg "Row" "f8::Field" mClip8)
construct_HoleRow tk ~[] = Clip_Row $ hole
construct_ParseErrRow (StructuralTk _ _ pres _ _) ~[] = Clip_Row $ parseErr (StructuralParseErr pres)
construct_Field tk ~[mClip0] = Clip_Field $ reuseField [tk] (retrieveArg "Field" "val::Int_" mClip0)
construct_HoleField tk ~[] = Clip_Field $ hole
construct_ParseErrField (StructuralTk _ _ pres _ _) ~[] = Clip_Field $ parseErr (StructuralParseErr pres)
construct_Test tk ~[mClip0] = Clip_Test $ reuseTest [tk] (retrieveArg "Test" "styledText::StyledText" mClip0)
construct_HoleTest tk ~[] = Clip_Test $ hole
construct_ParseErrTest (StructuralTk _ _ pres _ _) ~[] = Clip_Test $ parseErr (StructuralParseErr pres)
construct_StyledText tk ~[mClip0] = Clip_StyledText $ reuseStyledText [tk] (retrieveArg "StyledText" "words::List_Word" mClip0)
construct_HoleStyledText tk ~[] = Clip_StyledText $ hole
construct_ParseErrStyledText (StructuralTk _ _ pres _ _) ~[] = Clip_StyledText $ parseErr (StructuralParseErr pres)
construct_Word tk ~[mClip0] = Clip_Word $ reuseWord [tk] (retrieveArg "Word" "parts::List_WordPart" mClip0)
construct_HoleWord tk ~[] = Clip_Word $ hole
construct_ParseErrWord (StructuralTk _ _ pres _ _) ~[] = Clip_Word $ parseErr (StructuralParseErr pres)
construct_WordPart tk ~[mClip0] = Clip_WordPart $ reuseWordPart [tk] Nothing (retrieveArg "WordPart" "word::String" mClip0)
construct_OpenTag tk ~[mClip0] = Clip_WordPart $ reuseOpenTag [tk] (retrieveArg "OpenTag" "style::TextStyle" mClip0)
construct_CloseTag tk ~[mClip0] = Clip_WordPart $ reuseCloseTag [tk] (retrieveArg "CloseTag" "style::TextStyle" mClip0)
construct_HoleWordPart tk ~[] = Clip_WordPart $ hole
construct_ParseErrWordPart (StructuralTk _ _ pres _ _) ~[] = Clip_WordPart $ parseErr (StructuralParseErr pres)
construct_TextBold tk ~[] = Clip_TextStyle $ reuseTextBold [tk]
construct_TextItalic tk ~[] = Clip_TextStyle $ reuseTextItalic [tk]
construct_TextFontSize tk ~[mClip0] = Clip_TextStyle $ reuseTextFontSize [tk] (retrieveArg "TextFontSize" "s::Int" mClip0)
construct_TextColor tk ~[mClip0,mClip1,mClip2] = Clip_TextStyle $ reuseTextColor [tk] (retrieveArg "TextColor" "r::Int" mClip0) (retrieveArg "TextColor" "g::Int" mClip1) (retrieveArg "TextColor" "b::Int" mClip2)
construct_HoleTextStyle tk ~[] = Clip_TextStyle $ hole
construct_ParseErrTextStyle (StructuralTk _ _ pres _ _) ~[] = Clip_TextStyle $ parseErr (StructuralParseErr pres)
construct_Int_ tk ~[mClip0] = Clip_Int_ $ reuseInt_ [tk] (retrieveArg "Int_" "value::Int" mClip0)
construct_HoleInt_ tk ~[] = Clip_Int_ $ hole
construct_ParseErrInt_ (StructuralTk _ _ pres _ _) ~[] = Clip_Int_ $ parseErr (StructuralParseErr pres)
construct_Float_ tk ~[mClip0] = Clip_Float_ $ reuseFloat_ [tk] (retrieveArg "Float_" "value::Float" mClip0)
construct_HoleFloat_ tk ~[] = Clip_Float_ $ hole
construct_ParseErrFloat_ (StructuralTk _ _ pres _ _) ~[] = Clip_Float_ $ parseErr (StructuralParseErr pres)
construct_List_Expense tk mClips = genericConstruct_List "Expense" toList_Expense mClips
construct_HoleList_Expense tk ~[] = Clip_List_Expense $ hole
construct_ParseErrList_Expense (StructuralTk _ _ pres _ _) ~[] = Clip_List_Expense $ parseErr (StructuralParseErr pres)
construct_List_Currency tk mClips = genericConstruct_List "Currency" toList_Currency mClips
construct_HoleList_Currency tk ~[] = Clip_List_Currency $ hole
construct_ParseErrList_Currency (StructuralTk _ _ pres _ _) ~[] = Clip_List_Currency $ parseErr (StructuralParseErr pres)
construct_List_Thing tk mClips = genericConstruct_List "Thing" toList_Thing mClips
construct_HoleList_Thing tk ~[] = Clip_List_Thing $ hole
construct_ParseErrList_Thing (StructuralTk _ _ pres _ _) ~[] = Clip_List_Thing $ parseErr (StructuralParseErr pres)
construct_List_Task tk mClips = genericConstruct_List "Task" toList_Task mClips
construct_HoleList_Task tk ~[] = Clip_List_Task $ hole
construct_ParseErrList_Task (StructuralTk _ _ pres _ _) ~[] = Clip_List_Task $ parseErr (StructuralParseErr pres)
construct_List_Word tk mClips = genericConstruct_List "Word" toList_Word mClips
construct_HoleList_Word tk ~[] = Clip_List_Word $ hole
construct_ParseErrList_Word (StructuralTk _ _ pres _ _) ~[] = Clip_List_Word $ parseErr (StructuralParseErr pres)
construct_List_WordPart tk mClips = genericConstruct_List "WordPart" toList_WordPart mClips
construct_HoleList_WordPart tk ~[] = Clip_List_WordPart $ hole
construct_ParseErrList_WordPart (StructuralTk _ _ pres _ _) ~[] = Clip_List_WordPart $ parseErr (StructuralParseErr pres)
reuseRootEnr :: [Token doc enr Node clip token] -> Maybe ChoiceDoc -> EnrichedDoc
reuseRootEnr nodes ma0
= case extractFromTokens extractRootEnr defaultRootEnr nodes of
(RootEnr a0) -> genericReuse1 RootEnr a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseRootEnr"
reuseRootDoc :: [Token doc enr Node clip token] -> Maybe ChoiceDoc -> Document
reuseRootDoc nodes ma0
= case extractFromTokens extractRootDoc defaultRootDoc nodes of
(RootDoc a0) -> genericReuse1 RootDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseRootDoc"
reuseFormDoc :: [Token doc enr Node clip token] -> Maybe Form -> ChoiceDoc
reuseFormDoc nodes ma0
= case extractFromTokens extractFormDoc defaultFormDoc nodes of
(FormDoc a0) -> genericReuse1 FormDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseFormDoc"
reuseTaskDoc :: [Token doc enr Node clip token] -> Maybe Tasks -> ChoiceDoc
reuseTaskDoc nodes ma0
= case extractFromTokens extractTaskDoc defaultTaskDoc nodes of
(TaskDoc a0) -> genericReuse1 TaskDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseTaskDoc"
reuseSudokuDoc :: [Token doc enr Node clip token] -> Maybe Sudoku -> ChoiceDoc
reuseSudokuDoc nodes ma0
= case extractFromTokens extractSudokuDoc defaultSudokuDoc nodes of
(SudokuDoc a0) -> genericReuse1 SudokuDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseSudokuDoc"
reuseTestDoc :: [Token doc enr Node clip token] -> Maybe Test -> ChoiceDoc
reuseTestDoc nodes ma0
= case extractFromTokens extractTestDoc defaultTestDoc nodes of
(TestDoc a0) -> genericReuse1 TestDoc a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseTestDoc"
reuseForm :: [Token doc enr Node clip token] -> Maybe Description -> Maybe Description -> Maybe List_Expense -> Maybe Int -> Maybe List_Currency -> Form
reuseForm nodes ma0 ma1 ma2 ma3 ma4
= case extractFromTokens extractForm defaultForm nodes of
(Form a0 a1 a2 a3 a4) -> genericReuse5 Form a0 a1 a2 a3 a4 ma0 ma1 ma2 ma3 ma4
_ -> error "Internal error:ProxParser_Generated.reuseForm"
reuseExpense :: [Token doc enr Node clip token] -> Maybe Description -> Maybe Float_ -> Maybe Int -> Expense
reuseExpense nodes ma0 ma1 ma2
= case extractFromTokens extractExpense defaultExpense nodes of
(Expense a0 a1 a2) -> genericReuse3 Expense a0 a1 a2 ma0 ma1 ma2
_ -> error "Internal error:ProxParser_Generated.reuseExpense"
reuseCurrency :: [Token doc enr Node clip token] -> Maybe Description -> Maybe Float_ -> Currency
reuseCurrency nodes ma0 ma1
= case extractFromTokens extractCurrency defaultCurrency nodes of
(Currency a0 a1) -> genericReuse2 Currency a0 a1 ma0 ma1
_ -> error "Internal error:ProxParser_Generated.reuseCurrency"
reuseTasks :: [Token doc enr Node clip token] -> Maybe List_Thing -> Maybe Bool -> Maybe List_Task -> Tasks
reuseTasks nodes ma0 ma1 ma2
= case extractFromTokens extractTasks defaultTasks nodes of
(Tasks a0 a1 a2) -> genericReuse3 Tasks a0 a1 a2 ma0 ma1 ma2
_ -> error "Internal error:ProxParser_Generated.reuseTasks"
reuseThing :: [Token doc enr Node clip token] -> Maybe Int -> Thing
reuseThing nodes ma0
= case extractFromTokens extractThing defaultThing nodes of
(Thing a0) -> genericReuse1 Thing a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseThing"
reuseBasicTask :: [Token doc enr Node clip token] -> Maybe Description -> Maybe Bool -> Task
reuseBasicTask nodes ma0 ma1
= case extractFromTokens extractBasicTask defaultBasicTask nodes of
(BasicTask a0 a1) -> genericReuse2 BasicTask a0 a1 ma0 ma1
_ -> error "Internal error:ProxParser_Generated.reuseBasicTask"
reuseCompositeTask :: [Token doc enr Node clip token] -> Maybe Bool -> Maybe Description -> Maybe List_Task -> Task
reuseCompositeTask nodes ma0 ma1 ma2
= case extractFromTokens extractCompositeTask defaultCompositeTask nodes of
(CompositeTask a0 a1 a2) -> genericReuse3 CompositeTask a0 a1 a2 ma0 ma1 ma2
_ -> error "Internal error:ProxParser_Generated.reuseCompositeTask"
reuseDescription :: [Token doc enr Node clip token] -> Maybe String -> Description
reuseDescription nodes ma0
= case extractFromTokens extractDescription defaultDescription nodes of
(Description a0) -> genericReuse1 Description a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseDescription"
reuseSudoku :: [Token doc enr Node clip token] -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Maybe Row -> Sudoku
reuseSudoku nodes ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8
= case extractFromTokens extractSudoku defaultSudoku nodes of
(Sudoku a0 a1 a2 a3 a4 a5 a6 a7 a8) -> genericReuse9 Sudoku a0 a1 a2 a3 a4 a5 a6 a7 a8 ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8
_ -> error "Internal error:ProxParser_Generated.reuseSudoku"
reuseRow :: [Token doc enr Node clip token] -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Maybe Field -> Row
reuseRow nodes ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8
= case extractFromTokens extractRow defaultRow nodes of
(Row a0 a1 a2 a3 a4 a5 a6 a7 a8) -> genericReuse9 Row a0 a1 a2 a3 a4 a5 a6 a7 a8 ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8
_ -> error "Internal error:ProxParser_Generated.reuseRow"
reuseField :: [Token doc enr Node clip token] -> Maybe Int_ -> Field
reuseField nodes ma0
= case extractFromTokens extractField defaultField nodes of
(Field a0) -> genericReuse1 Field a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseField"
reuseTest :: [Token doc enr Node clip token] -> Maybe StyledText -> Test
reuseTest nodes ma0
= case extractFromTokens extractTest defaultTest nodes of
(Test a0) -> genericReuse1 Test a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseTest"
reuseStyledText :: [Token doc enr Node clip token] -> Maybe List_Word -> StyledText
reuseStyledText nodes ma0
= case extractFromTokens extractStyledText defaultStyledText nodes of
(StyledText a0) -> genericReuse1 StyledText a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseStyledText"
reuseWord :: [Token doc enr Node clip token] -> Maybe List_WordPart -> Word
reuseWord nodes ma0
= case extractFromTokens extractWord defaultWord nodes of
(Word a0) -> genericReuse1 Word a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseWord"
reuseWordPart :: [Token doc enr Node clip token] -> Maybe IDP -> Maybe String -> WordPart
reuseWordPart nodes ma0 ma1
= case extractFromTokens extractWordPart defaultWordPart nodes of
(WordPart a0 a1) -> genericReuse2 WordPart a0 a1 ma0 ma1
_ -> error "Internal error:ProxParser_Generated.reuseWordPart"
reuseOpenTag :: [Token doc enr Node clip token] -> Maybe TextStyle -> WordPart
reuseOpenTag nodes ma0
= case extractFromTokens extractOpenTag defaultOpenTag nodes of
(OpenTag a0) -> genericReuse1 OpenTag a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseOpenTag"
reuseCloseTag :: [Token doc enr Node clip token] -> Maybe TextStyle -> WordPart
reuseCloseTag nodes ma0
= case extractFromTokens extractCloseTag defaultCloseTag nodes of
(CloseTag a0) -> genericReuse1 CloseTag a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseCloseTag"
reuseTextBold :: [Token doc enr Node clip token] -> TextStyle
reuseTextBold nodes
= case extractFromTokens extractTextBold defaultTextBold nodes of
(TextBold) -> genericReuse0 TextBold
_ -> error "Internal error:ProxParser_Generated.reuseTextBold"
reuseTextItalic :: [Token doc enr Node clip token] -> TextStyle
reuseTextItalic nodes
= case extractFromTokens extractTextItalic defaultTextItalic nodes of
(TextItalic) -> genericReuse0 TextItalic
_ -> error "Internal error:ProxParser_Generated.reuseTextItalic"
reuseTextFontSize :: [Token doc enr Node clip token] -> Maybe Int -> TextStyle
reuseTextFontSize nodes ma0
= case extractFromTokens extractTextFontSize defaultTextFontSize nodes of
(TextFontSize a0) -> genericReuse1 TextFontSize a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseTextFontSize"
reuseTextColor :: [Token doc enr Node clip token] -> Maybe Int -> Maybe Int -> Maybe Int -> TextStyle
reuseTextColor nodes ma0 ma1 ma2
= case extractFromTokens extractTextColor defaultTextColor nodes of
(TextColor a0 a1 a2) -> genericReuse3 TextColor a0 a1 a2 ma0 ma1 ma2
_ -> error "Internal error:ProxParser_Generated.reuseTextColor"
reuseInt_ :: [Token doc enr Node clip token] -> Maybe Int -> Int_
reuseInt_ nodes ma0
= case extractFromTokens extractInt_ defaultInt_ nodes of
(Int_ a0) -> genericReuse1 Int_ a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseInt_"
reuseFloat_ :: [Token doc enr Node clip token] -> Maybe Float -> Float_
reuseFloat_ nodes ma0
= case extractFromTokens extractFloat_ defaultFloat_ nodes of
(Float_ a0) -> genericReuse1 Float_ a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseFloat_"
reuseList_Expense :: [Token doc enr Node clip token] -> Maybe ConsList_Expense -> List_Expense
reuseList_Expense nodes ma0
= case extractFromTokens extractList_Expense defaultList_Expense nodes of
(List_Expense a0) -> genericReuse1 List_Expense a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Expense"
reuseList_Currency :: [Token doc enr Node clip token] -> Maybe ConsList_Currency -> List_Currency
reuseList_Currency nodes ma0
= case extractFromTokens extractList_Currency defaultList_Currency nodes of
(List_Currency a0) -> genericReuse1 List_Currency a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Currency"
reuseList_Thing :: [Token doc enr Node clip token] -> Maybe ConsList_Thing -> List_Thing
reuseList_Thing nodes ma0
= case extractFromTokens extractList_Thing defaultList_Thing nodes of
(List_Thing a0) -> genericReuse1 List_Thing a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Thing"
reuseList_Task :: [Token doc enr Node clip token] -> Maybe ConsList_Task -> List_Task
reuseList_Task nodes ma0
= case extractFromTokens extractList_Task defaultList_Task nodes of
(List_Task a0) -> genericReuse1 List_Task a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Task"
reuseList_Word :: [Token doc enr Node clip token] -> Maybe ConsList_Word -> List_Word
reuseList_Word nodes ma0
= case extractFromTokens extractList_Word defaultList_Word nodes of
(List_Word a0) -> genericReuse1 List_Word a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_Word"
reuseList_WordPart :: [Token doc enr Node clip token] -> Maybe ConsList_WordPart -> List_WordPart
reuseList_WordPart nodes ma0
= case extractFromTokens extractList_WordPart defaultList_WordPart nodes of
(List_WordPart a0) -> genericReuse1 List_WordPart a0 ma0
_ -> error "Internal error:ProxParser_Generated.reuseList_WordPart"
extractRootEnr :: Maybe Node -> Maybe EnrichedDoc
extractRootEnr (Just (Node_RootEnr x@(RootEnr _) _)) = Just x
extractRootEnr _ = Nothing
extractRootDoc :: Maybe Node -> Maybe Document
extractRootDoc (Just (Node_RootDoc x@(RootDoc _) _)) = Just x
extractRootDoc _ = Nothing
extractFormDoc :: Maybe Node -> Maybe ChoiceDoc
extractFormDoc (Just (Node_FormDoc x@(FormDoc _) _)) = Just x
extractFormDoc _ = Nothing
extractTaskDoc :: Maybe Node -> Maybe ChoiceDoc
extractTaskDoc (Just (Node_TaskDoc x@(TaskDoc _) _)) = Just x
extractTaskDoc _ = Nothing
extractSudokuDoc :: Maybe Node -> Maybe ChoiceDoc
extractSudokuDoc (Just (Node_SudokuDoc x@(SudokuDoc _) _)) = Just x
extractSudokuDoc _ = Nothing
extractTestDoc :: Maybe Node -> Maybe ChoiceDoc
extractTestDoc (Just (Node_TestDoc x@(TestDoc _) _)) = Just x
extractTestDoc _ = Nothing
extractForm :: Maybe Node -> Maybe Form
extractForm (Just (Node_Form x@(Form _ _ _ _ _) _)) = Just x
extractForm _ = Nothing
extractExpense :: Maybe Node -> Maybe Expense
extractExpense (Just (Node_Expense x@(Expense _ _ _) _)) = Just x
extractExpense _ = Nothing
extractCurrency :: Maybe Node -> Maybe Currency
extractCurrency (Just (Node_Currency x@(Currency _ _) _)) = Just x
extractCurrency _ = Nothing
extractTasks :: Maybe Node -> Maybe Tasks
extractTasks (Just (Node_Tasks x@(Tasks _ _ _) _)) = Just x
extractTasks _ = Nothing
extractThing :: Maybe Node -> Maybe Thing
extractThing (Just (Node_Thing x@(Thing _) _)) = Just x
extractThing _ = Nothing
extractBasicTask :: Maybe Node -> Maybe Task
extractBasicTask (Just (Node_BasicTask x@(BasicTask _ _) _)) = Just x
extractBasicTask _ = Nothing
extractCompositeTask :: Maybe Node -> Maybe Task
extractCompositeTask (Just (Node_CompositeTask x@(CompositeTask _ _ _) _)) = Just x
extractCompositeTask _ = Nothing
extractDescription :: Maybe Node -> Maybe Description
extractDescription (Just (Node_Description x@(Description _) _)) = Just x
extractDescription _ = Nothing
extractSudoku :: Maybe Node -> Maybe Sudoku
extractSudoku (Just (Node_Sudoku x@(Sudoku _ _ _ _ _ _ _ _ _) _)) = Just x
extractSudoku _ = Nothing
extractRow :: Maybe Node -> Maybe Row
extractRow (Just (Node_Row x@(Row _ _ _ _ _ _ _ _ _) _)) = Just x
extractRow _ = Nothing
extractField :: Maybe Node -> Maybe Field
extractField (Just (Node_Field x@(Field _) _)) = Just x
extractField _ = Nothing
extractTest :: Maybe Node -> Maybe Test
extractTest (Just (Node_Test x@(Test _) _)) = Just x
extractTest _ = Nothing
extractStyledText :: Maybe Node -> Maybe StyledText
extractStyledText (Just (Node_StyledText x@(StyledText _) _)) = Just x
extractStyledText _ = Nothing
extractWord :: Maybe Node -> Maybe Word
extractWord (Just (Node_Word x@(Word _) _)) = Just x
extractWord _ = Nothing
extractWordPart :: Maybe Node -> Maybe WordPart
extractWordPart (Just (Node_WordPart x@(WordPart _ _) _)) = Just x
extractWordPart _ = Nothing
extractOpenTag :: Maybe Node -> Maybe WordPart
extractOpenTag (Just (Node_OpenTag x@(OpenTag _) _)) = Just x
extractOpenTag _ = Nothing
extractCloseTag :: Maybe Node -> Maybe WordPart
extractCloseTag (Just (Node_CloseTag x@(CloseTag _) _)) = Just x
extractCloseTag _ = Nothing
extractTextBold :: Maybe Node -> Maybe TextStyle
extractTextBold (Just (Node_TextBold x@(TextBold) _)) = Just x
extractTextBold _ = Nothing
extractTextItalic :: Maybe Node -> Maybe TextStyle
extractTextItalic (Just (Node_TextItalic x@(TextItalic) _)) = Just x
extractTextItalic _ = Nothing
extractTextFontSize :: Maybe Node -> Maybe TextStyle
extractTextFontSize (Just (Node_TextFontSize x@(TextFontSize _) _)) = Just x
extractTextFontSize _ = Nothing
extractTextColor :: Maybe Node -> Maybe TextStyle
extractTextColor (Just (Node_TextColor x@(TextColor _ _ _) _)) = Just x
extractTextColor _ = Nothing
extractInt_ :: Maybe Node -> Maybe Int_
extractInt_ (Just (Node_Int_ x@(Int_ _) _)) = Just x
extractInt_ _ = Nothing
extractFloat_ :: Maybe Node -> Maybe Float_
extractFloat_ (Just (Node_Float_ x@(Float_ _) _)) = Just x
extractFloat_ _ = Nothing
extractList_Expense :: Maybe Node -> Maybe List_Expense
extractList_Expense (Just (Node_List_Expense x@(List_Expense _) _)) = Just x
extractList_Expense _ = Nothing
extractList_Currency :: Maybe Node -> Maybe List_Currency
extractList_Currency (Just (Node_List_Currency x@(List_Currency _) _)) = Just x
extractList_Currency _ = Nothing
extractList_Thing :: Maybe Node -> Maybe List_Thing
extractList_Thing (Just (Node_List_Thing x@(List_Thing _) _)) = Just x
extractList_Thing _ = Nothing
extractList_Task :: Maybe Node -> Maybe List_Task
extractList_Task (Just (Node_List_Task x@(List_Task _) _)) = Just x
extractList_Task _ = Nothing
extractList_Word :: Maybe Node -> Maybe List_Word
extractList_Word (Just (Node_List_Word x@(List_Word _) _)) = Just x
extractList_Word _ = Nothing
extractList_WordPart :: Maybe Node -> Maybe List_WordPart
extractList_WordPart (Just (Node_List_WordPart x@(List_WordPart _) _)) = Just x
extractList_WordPart _ = Nothing
defaultRootEnr :: EnrichedDoc
defaultRootEnr = RootEnr hole
defaultRootDoc :: Document
defaultRootDoc = RootDoc hole
defaultFormDoc :: ChoiceDoc
defaultFormDoc = FormDoc hole
defaultTaskDoc :: ChoiceDoc
defaultTaskDoc = TaskDoc hole
defaultSudokuDoc :: ChoiceDoc
defaultSudokuDoc = SudokuDoc hole
defaultTestDoc :: ChoiceDoc
defaultTestDoc = TestDoc hole
defaultForm :: Form
defaultForm = Form hole hole hole hole hole
defaultExpense :: Expense
defaultExpense = Expense hole hole hole
defaultCurrency :: Currency
defaultCurrency = Currency hole hole
defaultTasks :: Tasks
defaultTasks = Tasks hole hole hole
defaultThing :: Thing
defaultThing = Thing hole
defaultBasicTask :: Task
defaultBasicTask = BasicTask hole hole
defaultCompositeTask :: Task
defaultCompositeTask = CompositeTask hole hole hole
defaultDescription :: Description
defaultDescription = Description hole
defaultSudoku :: Sudoku
defaultSudoku = Sudoku hole hole hole hole hole hole hole hole hole
defaultRow :: Row
defaultRow = Row hole hole hole hole hole hole hole hole hole
defaultField :: Field
defaultField = Field hole
defaultTest :: Test
defaultTest = Test hole
defaultStyledText :: StyledText
defaultStyledText = StyledText hole
defaultWord :: Word
defaultWord = Word hole
defaultWordPart :: WordPart
defaultWordPart = WordPart NoIDP hole
defaultOpenTag :: WordPart
defaultOpenTag = OpenTag hole
defaultCloseTag :: WordPart
defaultCloseTag = CloseTag hole
defaultTextBold :: TextStyle
defaultTextBold = TextBold
defaultTextItalic :: TextStyle
defaultTextItalic = TextItalic
defaultTextFontSize :: TextStyle
defaultTextFontSize = TextFontSize hole
defaultTextColor :: TextStyle
defaultTextColor = TextColor hole hole hole
defaultInt_ :: Int_
defaultInt_ = Int_ hole
defaultFloat_ :: Float_
defaultFloat_ = Float_ hole
defaultList_Expense :: List_Expense
defaultList_Expense = List_Expense Nil_Expense
defaultList_Currency :: List_Currency
defaultList_Currency = List_Currency Nil_Currency
defaultList_Thing :: List_Thing
defaultList_Thing = List_Thing Nil_Thing
defaultList_Task :: List_Task
defaultList_Task = List_Task Nil_Task
defaultList_Word :: List_Word
defaultList_Word = List_Word Nil_Word
defaultList_WordPart :: List_WordPart
defaultList_WordPart = List_WordPart Nil_WordPart
return result of the first extraction application in the list that is not Nothing
extractFromTokens :: (Maybe Node -> Maybe a) -> a -> [Token doc enr Node clip token] -> a
extractFromTokens extr def [] = def
extractFromTokens extr def (t:ts) = maybe (extractFromTokens extr def ts) id (extr (tokenNode t))
genericReuse0 :: (r) ->
r
genericReuse0 f =
f
genericReuse1 :: (a0 -> r) ->
a0 ->
Maybe a0 -> r
genericReuse1 f a0 ma0 =
f (maybe a0 id ma0)
genericReuse2 :: (a0 -> a1 -> r) ->
a0 -> a1 ->
Maybe a0 -> Maybe a1 -> r
genericReuse2 f a0 a1 ma0 ma1 =
f (maybe a0 id ma0) (maybe a1 id ma1)
genericReuse3 :: (a0 -> a1 -> a2 -> r) ->
a0 -> a1 -> a2 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> r
genericReuse3 f a0 a1 a2 ma0 ma1 ma2 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2)
genericReuse4 :: (a0 -> a1 -> a2 -> a3 -> r) ->
a0 -> a1 -> a2 -> a3 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> r
genericReuse4 f a0 a1 a2 a3 ma0 ma1 ma2 ma3 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3)
genericReuse5 :: (a0 -> a1 -> a2 -> a3 -> a4 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> r
genericReuse5 f a0 a1 a2 a3 a4 ma0 ma1 ma2 ma3 ma4 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4)
genericReuse6 :: (a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 -> a5 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe a5 -> r
genericReuse6 f a0 a1 a2 a3 a4 a5 ma0 ma1 ma2 ma3 ma4 ma5 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4) (maybe a5 id ma5)
genericReuse7 :: (a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe a5 -> Maybe a6 -> r
genericReuse7 f a0 a1 a2 a3 a4 a5 a6 ma0 ma1 ma2 ma3 ma4 ma5 ma6 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4) (maybe a5 id ma5) (maybe a6 id ma6)
genericReuse8 :: (a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe a5 -> Maybe a6 -> Maybe a7 -> r
genericReuse8 f a0 a1 a2 a3 a4 a5 a6 a7 ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4) (maybe a5 id ma5) (maybe a6 id ma6) (maybe a7 id ma7)
genericReuse9 :: (a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> r) ->
a0 -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 ->
Maybe a0 -> Maybe a1 -> Maybe a2 -> Maybe a3 -> Maybe a4 -> Maybe a5 -> Maybe a6 -> Maybe a7 -> Maybe a8 -> r
genericReuse9 f a0 a1 a2 a3 a4 a5 a6 a7 a8 ma0 ma1 ma2 ma3 ma4 ma5 ma6 ma7 ma8 =
f (maybe a0 id ma0) (maybe a1 id ma1) (maybe a2 id ma2) (maybe a3 id ma3) (maybe a4 id ma4) (maybe a5 id ma5) (maybe a6 id ma6) (maybe a7 id ma7) (maybe a8 id ma8)
|
9618205546d8d602c40176d96e62c358e1a2f6062d62a4c6d1b3157aff5d6d6c | junjihashimoto/hasktorch-yolo | ImageSpec.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE ExtendedDefaultRules #
# LANGUAGE FunctionalDependencies #
# LANGUAGE RecordWildCards #
# LANGUAGE NoMonomorphismRestriction #
module ImageSpec (spec, main) where
import Control.Exception.Safe
import Control.Monad.State.Strict
import qualified Data.ByteString.Lazy as B
import qualified Data.Map as M
import Data.Word
import GHC.Exts
import GHC.Generics
import qualified System.IO
import Test.Hspec
import Torch.DType
import Torch.Functional
import qualified Torch.Functional.Internal as I
import Torch.NN
import Torch.Serialize
import Torch.Tensor
import Torch.TensorFactories
import Torch.Typed.NN (HasForward (..))
import Torch.Vision
import Torch.Vision.Darknet.Config
import Torch.Vision.Darknet.Forward
import Torch.Vision.Darknet.Spec
main = hspec spec
spec :: Spec
spec = do
describe "ImageSpec" $ do
it "load" $ do
input_data <- System.IO.withFile "test-data/metrics/input-images.bin" System.IO.ReadMode $ \h -> do
loadBinary h (zeros' [1, 3, 416, 416])
Right (_, raw) <- readImageAsRGB8WithScaling "test-data/metrics/COCO_val2014_000000000164.jpg" 416 416 True
let target = divScalar (255 :: Float) (hwc2chw $ toType Float raw)
asValue (mseLoss input_data target) < (0.00001 :: Float) `shouldBe` True
| null | https://raw.githubusercontent.com/junjihashimoto/hasktorch-yolo/af1d2cd221d393bc52d692712ffc9fd46037f296/test/ImageSpec.hs | haskell | # LANGUAGE DeriveGeneric #
# LANGUAGE ExtendedDefaultRules #
# LANGUAGE FunctionalDependencies #
# LANGUAGE RecordWildCards #
# LANGUAGE NoMonomorphismRestriction #
module ImageSpec (spec, main) where
import Control.Exception.Safe
import Control.Monad.State.Strict
import qualified Data.ByteString.Lazy as B
import qualified Data.Map as M
import Data.Word
import GHC.Exts
import GHC.Generics
import qualified System.IO
import Test.Hspec
import Torch.DType
import Torch.Functional
import qualified Torch.Functional.Internal as I
import Torch.NN
import Torch.Serialize
import Torch.Tensor
import Torch.TensorFactories
import Torch.Typed.NN (HasForward (..))
import Torch.Vision
import Torch.Vision.Darknet.Config
import Torch.Vision.Darknet.Forward
import Torch.Vision.Darknet.Spec
main = hspec spec
spec :: Spec
spec = do
describe "ImageSpec" $ do
it "load" $ do
input_data <- System.IO.withFile "test-data/metrics/input-images.bin" System.IO.ReadMode $ \h -> do
loadBinary h (zeros' [1, 3, 416, 416])
Right (_, raw) <- readImageAsRGB8WithScaling "test-data/metrics/COCO_val2014_000000000164.jpg" 416 416 True
let target = divScalar (255 :: Float) (hwc2chw $ toType Float raw)
asValue (mseLoss input_data target) < (0.00001 :: Float) `shouldBe` True
|
|
1783d46378ef393094c01c47fcdefc4017cbe018f16b8fafe1871457cad46d12 | tail-reversion/elle | base.rkt | #lang elle/private/prebase
{reprovide elle/private/prebase #:exposing-all}
{reprovide elle/private/boolean #:exposing-all}
{reprovide elle/private/char #:exposing-all}
{reprovide elle/private/equality #:exposing-all}
{reprovide elle/private/keyword #:exposing-all}
{reprovide elle/private/number #:exposing-all}
{reprovide elle/private/option #:exposing-all}
{reprovide elle/private/ordering #:exposing-all}
{reprovide elle/private/procedure #:exposing-all}
{reprovide elle/private/result #:exposing-all}
{reprovide elle/private/symbol #:exposing-all}
{reprovide elle/private/text #:exposing-all}
#(module reader syntax/module-reader elle/base
#:wrapper1 call-with-elle-reading-parameterization
(require elle/private/reader))
| null | https://raw.githubusercontent.com/tail-reversion/elle/ed113df48d37c0481d2e6d068635e4699ea33d65/elle-lib/base.rkt | racket | #lang elle/private/prebase
{reprovide elle/private/prebase #:exposing-all}
{reprovide elle/private/boolean #:exposing-all}
{reprovide elle/private/char #:exposing-all}
{reprovide elle/private/equality #:exposing-all}
{reprovide elle/private/keyword #:exposing-all}
{reprovide elle/private/number #:exposing-all}
{reprovide elle/private/option #:exposing-all}
{reprovide elle/private/ordering #:exposing-all}
{reprovide elle/private/procedure #:exposing-all}
{reprovide elle/private/result #:exposing-all}
{reprovide elle/private/symbol #:exposing-all}
{reprovide elle/private/text #:exposing-all}
#(module reader syntax/module-reader elle/base
#:wrapper1 call-with-elle-reading-parameterization
(require elle/private/reader))
|
|
2e88c3954125e417efc0fceab202e7671bd7d0c0f66779ded4366af67192a12a | inaka/canillita | canillita_single_newspaper_handler.erl | %%% @doc GET|PUT|DELETE /newspapers/:id handler
-module(canillita_single_newspaper_handler).
-behaviour(trails_handler).
-include_lib("mixer/include/mixer.hrl").
-mixin([{ sr_single_entity_handler
, [ init/3
, rest_init/2
, allowed_methods/2
, resource_exists/2
, content_types_accepted/2
, content_types_provided/2
, handle_get/2
, handle_put/2
, delete_resource/2
]
}]).
-export([ trails/0 ]).
-spec trails() -> trails:trails().
trails() ->
RequestBody =
#{ name => <<"request body">>
, in => body
, description => <<"request body (as json)">>
, required => true
},
Id =
#{ name => id
, in => path
, description => <<"Newspaper key">>
, required => true
, type => string
},
Metadata =
#{ get =>
#{ tags => ["newspapers"]
, description => "Returns a newspaper"
, produces => ["application/json"]
, parameters => [Id]
}
, put =>
#{ tags => ["newspapers"]
, description => "Updates or creates a new newspaper"
, consumes => ["application/json", "application/json; charset=utf-8"]
, produces => ["application/json"]
, parameters => [RequestBody, Id]
}
, delete =>
#{ tags => ["newspapers"]
, description => "Deletes a newspaper"
, parameters => [Id]
}
},
Path = "/newspapers/:id",
Options = #{path => Path, model => canillita_newspapers, verbose => true},
[trails:trail(Path, ?MODULE, Options, Metadata)].
| null | https://raw.githubusercontent.com/inaka/canillita/56e7fa6b7441a591dbf2396dc85a43cdca6878c6/src/canillita_single_newspaper_handler.erl | erlang | @doc GET|PUT|DELETE /newspapers/:id handler | -module(canillita_single_newspaper_handler).
-behaviour(trails_handler).
-include_lib("mixer/include/mixer.hrl").
-mixin([{ sr_single_entity_handler
, [ init/3
, rest_init/2
, allowed_methods/2
, resource_exists/2
, content_types_accepted/2
, content_types_provided/2
, handle_get/2
, handle_put/2
, delete_resource/2
]
}]).
-export([ trails/0 ]).
-spec trails() -> trails:trails().
trails() ->
RequestBody =
#{ name => <<"request body">>
, in => body
, description => <<"request body (as json)">>
, required => true
},
Id =
#{ name => id
, in => path
, description => <<"Newspaper key">>
, required => true
, type => string
},
Metadata =
#{ get =>
#{ tags => ["newspapers"]
, description => "Returns a newspaper"
, produces => ["application/json"]
, parameters => [Id]
}
, put =>
#{ tags => ["newspapers"]
, description => "Updates or creates a new newspaper"
, consumes => ["application/json", "application/json; charset=utf-8"]
, produces => ["application/json"]
, parameters => [RequestBody, Id]
}
, delete =>
#{ tags => ["newspapers"]
, description => "Deletes a newspaper"
, parameters => [Id]
}
},
Path = "/newspapers/:id",
Options = #{path => Path, model => canillita_newspapers, verbose => true},
[trails:trail(Path, ?MODULE, Options, Metadata)].
|
49f782a1e4731cab5a6e941a5904b22681b0707fe49b2701e86457f04c795df0 | narimiran/AdventOfCode2022 | day23.clj | (ns day23
(:require aoc
[clojure.data.int-map :refer [dense-int-set int-map]]))
(def ^:const S 256)
(def ^:const N (- S))
(def directions
[[(dec N) N (inc N)] ; N
[(dec S) S (inc S)] ; S
[(dec N) -1 (dec S)] ; W
[(inc N) 1 (inc S)]]) ; E
(let [^longs adjacent (long-array (dedupe (reduce concat directions)))
len (alength adjacent)
^longs *nbs* (long-array len)]
(defn get-adjacent! ^longs [^long elf]
(dotimes [i len]
(aset *nbs* i (+ elf (aget adjacent i))))
*nbs*))
(defn find-free-spot [elves ^long elf deltas]
(reduce-kv
(fn [found? i delta]
(let [pos (+ elf ^long delta)]
(cond
(elves pos) (reduced nil)
(= i 1) pos
:else found?)))
nil
deltas))
(defn propose [elves ^long round proposals elf]
(if (aoc/array-none? elves (get-adjacent! elf)) proposals
(let [prop (reduce
(fn [elf ^long i]
(let [n (mod (+ round i) 4)]
(if-let [nb (find-free-spot elves elf (directions n))]
(reduced nb)
elf)))
elf
(range 4))]
(if (proposals prop)
(dissoc! proposals prop)
(assoc! proposals prop elf)))))
(defn move [elves proposals]
(->> proposals
(reduce-kv (fn [elves prop old]
(-> elves
(disj! old)
(conj! prop)))
(transient elves))
persistent!))
(defn play-round [elves round]
(->> elves
(reduce (partial propose elves round) (transient (int-map)))
persistent!
(#(when (seq %) (move elves %)))))
(defn calc-area [elves]
(let [xs (sort (map #(mod % S) elves))
ys (sort (map #(quot % S) elves))]
(* (inc (- (last xs) (first xs)))
(inc (- (last ys) (first ys))))))
(defn part-1 [elves]
(-> (reduce play-round elves (range 10))
calc-area
(- (count elves))))
(defn part-2 [elves]
(reduce
(fn [elves round]
(let [new-elves (play-round elves round)]
(if (nil? new-elves)
(reduced (inc round))
new-elves)))
elves
(iterate inc 0)))
(defn parse-input [input]
(->> input
aoc/read-input
(#(for [[y line] (map-indexed vector %)
[x char] (map-indexed vector line)
:when (= char \#)]
(+ (/ S 2) x
(* S (+ (/ S 2) y)))))
dense-int-set))
(defn solve
([] (solve 23))
([input]
(let [elves (parse-input input)]
[(part-1 elves)
(part-2 elves)])))
(solve)
| null | https://raw.githubusercontent.com/narimiran/AdventOfCode2022/ed7e5674e1c58cc20d5cfabdbbd732ae4b9dde62/clojure/day23.clj | clojure | N
S
W
E | (ns day23
(:require aoc
[clojure.data.int-map :refer [dense-int-set int-map]]))
(def ^:const S 256)
(def ^:const N (- S))
(def directions
(let [^longs adjacent (long-array (dedupe (reduce concat directions)))
len (alength adjacent)
^longs *nbs* (long-array len)]
(defn get-adjacent! ^longs [^long elf]
(dotimes [i len]
(aset *nbs* i (+ elf (aget adjacent i))))
*nbs*))
(defn find-free-spot [elves ^long elf deltas]
(reduce-kv
(fn [found? i delta]
(let [pos (+ elf ^long delta)]
(cond
(elves pos) (reduced nil)
(= i 1) pos
:else found?)))
nil
deltas))
(defn propose [elves ^long round proposals elf]
(if (aoc/array-none? elves (get-adjacent! elf)) proposals
(let [prop (reduce
(fn [elf ^long i]
(let [n (mod (+ round i) 4)]
(if-let [nb (find-free-spot elves elf (directions n))]
(reduced nb)
elf)))
elf
(range 4))]
(if (proposals prop)
(dissoc! proposals prop)
(assoc! proposals prop elf)))))
(defn move [elves proposals]
(->> proposals
(reduce-kv (fn [elves prop old]
(-> elves
(disj! old)
(conj! prop)))
(transient elves))
persistent!))
(defn play-round [elves round]
(->> elves
(reduce (partial propose elves round) (transient (int-map)))
persistent!
(#(when (seq %) (move elves %)))))
(defn calc-area [elves]
(let [xs (sort (map #(mod % S) elves))
ys (sort (map #(quot % S) elves))]
(* (inc (- (last xs) (first xs)))
(inc (- (last ys) (first ys))))))
(defn part-1 [elves]
(-> (reduce play-round elves (range 10))
calc-area
(- (count elves))))
(defn part-2 [elves]
(reduce
(fn [elves round]
(let [new-elves (play-round elves round)]
(if (nil? new-elves)
(reduced (inc round))
new-elves)))
elves
(iterate inc 0)))
(defn parse-input [input]
(->> input
aoc/read-input
(#(for [[y line] (map-indexed vector %)
[x char] (map-indexed vector line)
:when (= char \#)]
(+ (/ S 2) x
(* S (+ (/ S 2) y)))))
dense-int-set))
(defn solve
([] (solve 23))
([input]
(let [elves (parse-input input)]
[(part-1 elves)
(part-2 elves)])))
(solve)
|
86c5ba84d4056895974136dcf23d91a664ebf03182477139a90554412a2e4d4a | victornicolet/parsynt | ParsyntI.ml | *
This file is part of Parsynt .
Author : < >
Parsynt is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with . If not , see < / > .
This file is part of Parsynt.
Author: Victor Nicolet <>
Parsynt is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Parsynt is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Parsynt. If not, see </>.
*)
open Beta
open Conf
open Fn
open Format
open
open Str
open Solve
open Utils
open Utils .
module L = Local
module C = Canalyst
module Proofs
module Cg = Codegen
let debug = ref false
let verbose = ref false
let elapsed_time = ref 0.0
let skip_first_solve = ref false
let skip_all_before_vardisc = ref false
let use_z3 = ref false
( * let exact_fp = ref false
open Conf
open Fn
open Format
open Getopt
open Str
open Solve
open Utils
open Utils.PpTools
module L = Local
module C = Canalyst
module Pf = Proofs
module Cg = Codegen
let debug = ref false
let verbose = ref false
let elapsed_time = ref 0.0
let skip_first_solve = ref false
let skip_all_before_vardisc = ref false
let use_z3 = ref false
(* let exact_fp = ref false *)
let options = [
( 'd', "dump", (set Local.dump_sketch true), None);
( ' e ' , " exact - fp " , ( set exact_fp true ) , None ) ;
( 'f', "debug-func", (set Cil2Func.debug true), None);
( 'g', "debug", (set debug true), None);
( 'i', "incremental", (set Solve.solve_incrementally true), None);
( 'k', "kill-first-solve", (set skip_first_solve true), None);
( 'K', "kill-first-inner", (set skip_all_before_vardisc true), None);
( 'o', "output-folder", None,
Some (fun o_folder -> Config.output_dir := o_folder));
( 's', "debug-sketch", (set Sketch.debug true), None);
( 'v', "verbose", (set verbose true), None);
( 'x', "debug-variable-discovery", (ignore(set VariableDiscovery.debug true);
set SymbExe.debug true), None);
( 'C', "concrete-sketch", (set Sketch.concrete_sketch true), None);
( 'z', "use-z3", (set use_z3 true), None);
('I', "discovery-max-iterations", None,
Some (fun itmax -> VariableDiscovery.max_exec_no := int_of_string itmax))]
let print_inner_result problem inner_funcs () =
List.iter
(fun pb ->
printf "[INFO] Inner function %s,@.\
\tFunction:@.%a@.\
\tJoin:@.%a@.\
\tIdentity state:%a@."
pb.loop_name
FnPretty.pp_fnexpr pb.main_loop_body
FnPretty.pp_fnexpr pb.memless_solution
(ppimap pp_constants) pb.identity_values
)
inner_funcs
(**
Recursively solve the inner loops using different tactics.
@param problem The problem we are currently trying to solve.
@return Some problem if all the inner functions can be paralleized or
made memoryless. None if not.
*)
(* Note : added additional optional depth argument*)
let rec solve_inners (problem : prob_rep) : prob_rep option =
if List.length problem.inner_functions = 0 then
Some problem
else
let solve_inner_problem inpb =
if is_empty_record inpb.memless_solution then
let loc_solve inpb =
let start = Unix.gettimeofday () in
let sol = solve_one ~inner:true (Some problem.scontext) inpb in
let elapsed = Unix.gettimeofday () -. start in
sol, elapsed
in
let sln_inner msol elapsed =
message_info (fun () ->
printf "Inner loop %s, solved in %.3f s." inpb.loop_name elapsed);
msol
in
match loc_solve inpb with
| Some sln, elapsed ->
sln_inner (Some sln) elapsed
| None, elapsed->
let inpb' =
try
Canalyst.find_new_variables inpb
with VariableDiscovery.VariableDiscoveryError s as e ->
eprintf "[ERROR] Received variable discovery errror in aux_solve of solve_problem.@.";
eprintf "[ERROR] Skipping problem %s.@." problem.loop_name;
message_error_task "Couldn't find auxliary variables...\n";
raise e
in
let sol', elapsed' = loc_solve inpb' in
sln_inner sol' elapsed'
else Some inpb
in
(* Solve the inner functions. *)
message_start_subtask ("Solvinng inner loops of "^problem.loop_name);
let inner_funcs =
somes (List.map solve_inner_problem problem.inner_functions)
in
message_done ~done_what:"(inner loops)" ();
(* Replace occurrences of the inner functions by join operator and new
input sequence if possible.
- Condition 1: all inner function are solved. *)
if List.length inner_funcs = List.length problem.inner_functions then
begin
if !verbose then print_inner_result problem inner_funcs ();
Some (Sketch.Join.sketch_join
(InnerFuncs.replace_by_join problem inner_funcs))
end
else
None
and solve_problem problem =
Try to solve the inner loops first
let aux_solve problem =
let tactic1_sol =
if !skip_first_solve || !skip_all_before_vardisc then None else solve_one None problem
in
match tactic1_sol with
| Some x -> Some x
| None ->
message_start_subtask ("Searching auxiliaries for "^problem.loop_name);
let problem' =
(try
Canalyst.find_new_variables problem
with VariableDiscovery.VariableDiscoveryError s as e ->
eprintf "[ERROR] Received variable discovery errror in aux_solve of solve_problem.@.";
eprintf "[ERROR] Skipping problem %s.@." problem.loop_name;
message_error_task "Couldn't find auxliary variables...\n";
raise e)
in
(* Once the variable discovery has been done, we don't want to timeout easily. *)
Solve.timeout_multiplier := 100;
message_done ();
match (solve_inners =>> (solve_one None)) problem' with
| Some x -> Some x
| None -> solve_one ~expr_depth:2 None problem'
(** If the problem is not solved yet, might be because expression
depth is too limited *)
in
maybe_apply aux_solve
(if !skip_all_before_vardisc then Some problem else solve_inners problem)
(** --------------------------------------------------------------------------*)
* Generating a TBB implementation of the parallel solution discovered
let output_tbb_tests (solutions : prob_rep list) =
let tbb_test_filename (solution : prob_rep) =
let folder_name =
(!Config.output_dir)^"/"^(Config.get_conf_string "tbb_examples_folder")
in
let errco =
if Sys.file_exists folder_name then
0
else
Sys.command ("mkdir "^folder_name)
in
if errco = 0 then
folder_name^(Tbb.pbname_of_sketch solution)^".cpp"
else
failwith "Failed to create directory for tbb example output."
in
printf "@.%s%sGenerating implementations for solved examples..%s@."
(color "black") (color "b-green") color_default;
List.iter (Tbb.output_tbb_test tbb_test_filename) solutions
* Generating proofs
let output_dafny_proofs (sols : prob_rep list) : unit =
let dafny_proof_filename (sol : prob_rep) =
let folder_name =
(!Config.output_dir)^"/"^(Config.get_conf_string "dafny_examples_folder")
in
let errco =
if Sys.file_exists folder_name then
0
else
Sys.command ("mkdir "^folder_name)
in
if errco = 0 then
folder_name^(Pf.filename_of_solution sol)^".dfy"
else
failwith "Failed to create directory for Dafny proof output."
in
printf "@.%s%sGenerating proofs for solved examples..%s@."
(color "black") (color "b-green") color_default;
try
List.iter (Pf.output_dafny_proof dafny_proof_filename) sols
with _ ->
printf "%s[ERROR] Could not generate proof.%s@." (color "b-red") color_default
(** --------------------------------------------------------------------------*)
let main () =
parse_cmdline options print_endline;
if Array.length Sys.argv < 2 then
begin
eprintf "%sUsage : ./Parsy.native [filename] .. options%s@."
(color "red") color_default;
flush_all ();
exit 1;
end;
L.debug := !debug;
let filename = Array.get Sys.argv 1 in
if !debug then
begin
FError.logfile := "log"^(string_of_float (Sys.time ()))^filename;
printf "Logging in %s@." !FError.logfile;
(** Set all the debug flags to true *)
Cil2Func.debug := true;
Sketch.debug := true;
Func2Fn.debug := true;
Sketch.Join.debug := true;
VariableDiscovery.debug := true;
end;
if !verbose then
begin
Solve.verbose := true;
Loops.verbose := true;
Canalyst.verbose := true;
InnerFuncs.verbose := true;
Sketch.Join.verbose := true;
VariableDiscovery.verbose := true;
SymbExe.verbose := true;
Incremental.verbose := true;
end;
elapsed_time := Unix.gettimeofday ();
message_start_task "Parsing C program ...";
let c_file, loops = C.processFile filename in
message_done ();
message_start_task "Translating C Ast to partial functional ast...";
let functions = C.cil2func c_file loops in
message_done ();
message_start_task "Translating input to functional representation...";
let problem_list = Canalyst.func2sketch c_file functions in
message_done ();
message_start_task "Solving sketches ...";
message_skip ();
(** Try to solve the sketches without adding auxiliary variables *)
(* All the sketches that have been solved, including with auxiliaries *)
let solved =
List.map check_option
(List.filter is_some
(List.map solve_problem problem_list))
in
(** Handle all the solutions found *)
(List.iter (fun problem -> FnPretty.pp_problem_rep std_formatter problem)
(somes solved));
For each solved problem , generate a TBB implementation
begin try
output_tbb_tests (somes solved);
with _ -> () end;
If exact_fp is set , generate the exact floating point parallel
implementation
implementation *)
(* if !exact_fp then (List.iter fpexp_header (somes solved)); *)
Generate a proof in .
begin try
output_dafny_proofs (somes solved);
with _ -> () end;
(* Total elapsed_time *)
elapsed_time := (Unix.gettimeofday ()) -. !elapsed_time;
printf "@.\t\t\t\t\t\t%sFINISHED in %.3f s%s@.@." (color "green")
!elapsed_time color_default;;
main (); *)
open ParsyntLib.Lib
let main () =
Log.verbose := true;
Log.verb_debug := 1234;
Log.warning_msg "ok"
;;
main ()
| null | https://raw.githubusercontent.com/victornicolet/parsynt/d3f530923c0c75537b92c2930eb882921f38268c/bin/ParsyntI.ml | ocaml | let exact_fp = ref false
*
Recursively solve the inner loops using different tactics.
@param problem The problem we are currently trying to solve.
@return Some problem if all the inner functions can be paralleized or
made memoryless. None if not.
Note : added additional optional depth argument
Solve the inner functions.
Replace occurrences of the inner functions by join operator and new
input sequence if possible.
- Condition 1: all inner function are solved.
Once the variable discovery has been done, we don't want to timeout easily.
* If the problem is not solved yet, might be because expression
depth is too limited
* --------------------------------------------------------------------------
* --------------------------------------------------------------------------
* Set all the debug flags to true
* Try to solve the sketches without adding auxiliary variables
All the sketches that have been solved, including with auxiliaries
* Handle all the solutions found
if !exact_fp then (List.iter fpexp_header (somes solved));
Total elapsed_time | *
This file is part of Parsynt .
Author : < >
Parsynt is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with . If not , see < / > .
This file is part of Parsynt.
Author: Victor Nicolet <>
Parsynt is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Parsynt is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Parsynt. If not, see </>.
*)
open Beta
open Conf
open Fn
open Format
open
open Str
open Solve
open Utils
open Utils .
module L = Local
module C = Canalyst
module Proofs
module Cg = Codegen
let debug = ref false
let verbose = ref false
let elapsed_time = ref 0.0
let skip_first_solve = ref false
let skip_all_before_vardisc = ref false
let use_z3 = ref false
( * let exact_fp = ref false
open Conf
open Fn
open Format
open Getopt
open Str
open Solve
open Utils
open Utils.PpTools
module L = Local
module C = Canalyst
module Pf = Proofs
module Cg = Codegen
let debug = ref false
let verbose = ref false
let elapsed_time = ref 0.0
let skip_first_solve = ref false
let skip_all_before_vardisc = ref false
let use_z3 = ref false
let options = [
( 'd', "dump", (set Local.dump_sketch true), None);
( ' e ' , " exact - fp " , ( set exact_fp true ) , None ) ;
( 'f', "debug-func", (set Cil2Func.debug true), None);
( 'g', "debug", (set debug true), None);
( 'i', "incremental", (set Solve.solve_incrementally true), None);
( 'k', "kill-first-solve", (set skip_first_solve true), None);
( 'K', "kill-first-inner", (set skip_all_before_vardisc true), None);
( 'o', "output-folder", None,
Some (fun o_folder -> Config.output_dir := o_folder));
( 's', "debug-sketch", (set Sketch.debug true), None);
( 'v', "verbose", (set verbose true), None);
( 'x', "debug-variable-discovery", (ignore(set VariableDiscovery.debug true);
set SymbExe.debug true), None);
( 'C', "concrete-sketch", (set Sketch.concrete_sketch true), None);
( 'z', "use-z3", (set use_z3 true), None);
('I', "discovery-max-iterations", None,
Some (fun itmax -> VariableDiscovery.max_exec_no := int_of_string itmax))]
let print_inner_result problem inner_funcs () =
List.iter
(fun pb ->
printf "[INFO] Inner function %s,@.\
\tFunction:@.%a@.\
\tJoin:@.%a@.\
\tIdentity state:%a@."
pb.loop_name
FnPretty.pp_fnexpr pb.main_loop_body
FnPretty.pp_fnexpr pb.memless_solution
(ppimap pp_constants) pb.identity_values
)
inner_funcs
let rec solve_inners (problem : prob_rep) : prob_rep option =
if List.length problem.inner_functions = 0 then
Some problem
else
let solve_inner_problem inpb =
if is_empty_record inpb.memless_solution then
let loc_solve inpb =
let start = Unix.gettimeofday () in
let sol = solve_one ~inner:true (Some problem.scontext) inpb in
let elapsed = Unix.gettimeofday () -. start in
sol, elapsed
in
let sln_inner msol elapsed =
message_info (fun () ->
printf "Inner loop %s, solved in %.3f s." inpb.loop_name elapsed);
msol
in
match loc_solve inpb with
| Some sln, elapsed ->
sln_inner (Some sln) elapsed
| None, elapsed->
let inpb' =
try
Canalyst.find_new_variables inpb
with VariableDiscovery.VariableDiscoveryError s as e ->
eprintf "[ERROR] Received variable discovery errror in aux_solve of solve_problem.@.";
eprintf "[ERROR] Skipping problem %s.@." problem.loop_name;
message_error_task "Couldn't find auxliary variables...\n";
raise e
in
let sol', elapsed' = loc_solve inpb' in
sln_inner sol' elapsed'
else Some inpb
in
message_start_subtask ("Solvinng inner loops of "^problem.loop_name);
let inner_funcs =
somes (List.map solve_inner_problem problem.inner_functions)
in
message_done ~done_what:"(inner loops)" ();
if List.length inner_funcs = List.length problem.inner_functions then
begin
if !verbose then print_inner_result problem inner_funcs ();
Some (Sketch.Join.sketch_join
(InnerFuncs.replace_by_join problem inner_funcs))
end
else
None
and solve_problem problem =
Try to solve the inner loops first
let aux_solve problem =
let tactic1_sol =
if !skip_first_solve || !skip_all_before_vardisc then None else solve_one None problem
in
match tactic1_sol with
| Some x -> Some x
| None ->
message_start_subtask ("Searching auxiliaries for "^problem.loop_name);
let problem' =
(try
Canalyst.find_new_variables problem
with VariableDiscovery.VariableDiscoveryError s as e ->
eprintf "[ERROR] Received variable discovery errror in aux_solve of solve_problem.@.";
eprintf "[ERROR] Skipping problem %s.@." problem.loop_name;
message_error_task "Couldn't find auxliary variables...\n";
raise e)
in
Solve.timeout_multiplier := 100;
message_done ();
match (solve_inners =>> (solve_one None)) problem' with
| Some x -> Some x
| None -> solve_one ~expr_depth:2 None problem'
in
maybe_apply aux_solve
(if !skip_all_before_vardisc then Some problem else solve_inners problem)
* Generating a TBB implementation of the parallel solution discovered
let output_tbb_tests (solutions : prob_rep list) =
let tbb_test_filename (solution : prob_rep) =
let folder_name =
(!Config.output_dir)^"/"^(Config.get_conf_string "tbb_examples_folder")
in
let errco =
if Sys.file_exists folder_name then
0
else
Sys.command ("mkdir "^folder_name)
in
if errco = 0 then
folder_name^(Tbb.pbname_of_sketch solution)^".cpp"
else
failwith "Failed to create directory for tbb example output."
in
printf "@.%s%sGenerating implementations for solved examples..%s@."
(color "black") (color "b-green") color_default;
List.iter (Tbb.output_tbb_test tbb_test_filename) solutions
* Generating proofs
let output_dafny_proofs (sols : prob_rep list) : unit =
let dafny_proof_filename (sol : prob_rep) =
let folder_name =
(!Config.output_dir)^"/"^(Config.get_conf_string "dafny_examples_folder")
in
let errco =
if Sys.file_exists folder_name then
0
else
Sys.command ("mkdir "^folder_name)
in
if errco = 0 then
folder_name^(Pf.filename_of_solution sol)^".dfy"
else
failwith "Failed to create directory for Dafny proof output."
in
printf "@.%s%sGenerating proofs for solved examples..%s@."
(color "black") (color "b-green") color_default;
try
List.iter (Pf.output_dafny_proof dafny_proof_filename) sols
with _ ->
printf "%s[ERROR] Could not generate proof.%s@." (color "b-red") color_default
let main () =
parse_cmdline options print_endline;
if Array.length Sys.argv < 2 then
begin
eprintf "%sUsage : ./Parsy.native [filename] .. options%s@."
(color "red") color_default;
flush_all ();
exit 1;
end;
L.debug := !debug;
let filename = Array.get Sys.argv 1 in
if !debug then
begin
FError.logfile := "log"^(string_of_float (Sys.time ()))^filename;
printf "Logging in %s@." !FError.logfile;
Cil2Func.debug := true;
Sketch.debug := true;
Func2Fn.debug := true;
Sketch.Join.debug := true;
VariableDiscovery.debug := true;
end;
if !verbose then
begin
Solve.verbose := true;
Loops.verbose := true;
Canalyst.verbose := true;
InnerFuncs.verbose := true;
Sketch.Join.verbose := true;
VariableDiscovery.verbose := true;
SymbExe.verbose := true;
Incremental.verbose := true;
end;
elapsed_time := Unix.gettimeofday ();
message_start_task "Parsing C program ...";
let c_file, loops = C.processFile filename in
message_done ();
message_start_task "Translating C Ast to partial functional ast...";
let functions = C.cil2func c_file loops in
message_done ();
message_start_task "Translating input to functional representation...";
let problem_list = Canalyst.func2sketch c_file functions in
message_done ();
message_start_task "Solving sketches ...";
message_skip ();
let solved =
List.map check_option
(List.filter is_some
(List.map solve_problem problem_list))
in
(List.iter (fun problem -> FnPretty.pp_problem_rep std_formatter problem)
(somes solved));
For each solved problem , generate a TBB implementation
begin try
output_tbb_tests (somes solved);
with _ -> () end;
If exact_fp is set , generate the exact floating point parallel
implementation
implementation *)
Generate a proof in .
begin try
output_dafny_proofs (somes solved);
with _ -> () end;
elapsed_time := (Unix.gettimeofday ()) -. !elapsed_time;
printf "@.\t\t\t\t\t\t%sFINISHED in %.3f s%s@.@." (color "green")
!elapsed_time color_default;;
main (); *)
open ParsyntLib.Lib
let main () =
Log.verbose := true;
Log.verb_debug := 1234;
Log.warning_msg "ok"
;;
main ()
|
f59c6d223002c899410c3dd679a5216c5bd6cbeb627fe6efcbb14be6886b6e4a | acl2/acl2 | symbol-listp.lisp | ; A lightweight book about the built-in function symbol-listp.
;
Copyright ( C ) 2008 - 2011 and Stanford University
Copyright ( C ) 2013 - 2023 Kestrel Institute
;
License : A 3 - clause BSD license . See the file books/3BSD - mod.txt .
;
Author : ( )
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package "ACL2")
;; See also books/std/typed-lists/symbol-listp.lisp, but that book may be more
;; heavyweight.
(in-theory (disable symbol-listp))
;; Avoid name clash with std.
(defthm symbol-listp-of-set-difference-equal-alt
(implies (symbol-listp l1)
(symbol-listp (set-difference-equal l1 l2)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-append2
(equal (symbol-listp (append x y))
(and (symbol-listp (true-list-fix x))
(symbol-listp y)))
:hints (("Goal" :in-theory (enable append symbol-listp))))
;this had a name conflict with a theorem in std/typed-lists/symbol-listp
(defthm symbol-listp-of-union-equal-alt
(equal (symbol-listp (union-equal l1 l2))
(and (symbol-listp (true-list-fix l1))
(symbol-listp l2)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-intersection-equal
(implies (or (symbol-listp l1)
(symbol-listp l2))
(symbol-listp (intersection-equal l1 l2)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-add-to-set-equal
(equal (symbol-listp (add-to-set-equal x l))
(and (symbolp x)
(symbol-listp l)))
:hints (("Goal" :in-theory (enable symbol-listp add-to-set-equal))))
(defthm symbol-listp-of-cdr
(implies (symbol-listp x)
(symbol-listp (cdr x)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-cons
(equal (symbol-listp (cons a x))
(and (symbolp a)
(symbol-listp x)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-true-list-fix
(implies (symbol-listp x)
(symbol-listp (true-list-fix x))))
;; Disabled but see symbolp-of-car-when-symbol-listp.
(defthmd symbolp-of-car-when-symbol-listp
(implies (symbol-listp x)
(symbolp (car x)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbolp-of-car-when-symbol-listp-cheap
(implies (symbol-listp x)
(symbolp (car x)))
:rule-classes ((:rewrite :backchain-limit-lst (0)))
:hints (("Goal" :in-theory (enable symbol-listp))))
;; Avoids name clash with std
(defthm symbol-listp-of-take-simple
(implies (symbol-listp l)
(symbol-listp (take n l)))
:hints (("Goal" :in-theory (enable take))))
;; Avoids name clash with std
(defthm symbol-listp-of-nthcdr-simple
(implies (symbol-listp l)
(symbol-listp (nthcdr n l)))
:hints (("Goal" :in-theory (enable nthcdr))))
(defthm symbol-listp-of-revappend
(equal (symbol-listp (revappend x y))
(and (symbol-listp (true-list-fix x))
(symbol-listp y)))
:hints (("Goal" :in-theory (enable revappend symbol-listp))))
(defthm symbol-listp-of-reverse
(implies (symbol-listp x)
(symbol-listp (reverse x)))
:hints (("Goal" :in-theory (enable reverse))))
;; matches the version in std
(defthm symbol-listp-of-remove-equal
(implies (symbol-listp x)
(symbol-listp (remove-equal a x)))
:hints (("Goal" :in-theory (enable remove-equal))))
(defthm symbol-listp-of-remove1-equal
(implies (symbol-listp x)
(symbol-listp (remove1-equal a x)))
:hints (("Goal" :in-theory (enable remove1-equal))))
;; todo: strengthen?
(defthm symbol-listp-of-remove-duplicates-equal
(implies (symbol-listp x)
(symbol-listp (remove-duplicates-equal x)))
:hints (("Goal" :in-theory (enable remove-duplicates-equal))))
;this matches something in STD
(defthm true-listp-when-symbol-listp
(implies (symbol-listp x)
(true-listp x))
:rule-classes :compound-recognizer)
;; Can't call this true-listp-when-symbol-listp because std uses that name for a :compound-recognizer rule.
;; Can't call this true-listp-when-symbol-listp-rewrite because std uses that name for a backchain-limited rule.
(defthmd true-listp-when-symbol-listp-rewrite-unlimited
(implies (symbol-listp x)
(true-listp x)))
; may be nil, which is a symbol!
(defthm symbolp-of-car-of-last-when-symbol-listp
(implies (symbol-listp x)
(symbolp (car (last x))))
:hints (("Goal" :in-theory (enable symbol-listp))))
Compatible with std
(defthm symbolp-of-nth-when-symbol-listp
(implies (symbol-listp x)
(symbolp (nth n x)))
:hints (("Goal" :in-theory (enable nth))))
;; avoids name clash with std
(defthm symbol-listp-when-subsetp-equal-1
(implies (and (subsetp-equal l1 l2)
(symbol-listp l2))
(equal (symbol-listp l1)
(true-listp l1)))
:hints (("Goal" :in-theory (enable nth))))
(defthmd symbolp-when-member-equal-and-symbol-listp
(implies (and (member-equal a x)
(symbol-listp x))
(symbolp a))
:hints (("Goal" :in-theory (enable symbol-listp member-equal))))
| null | https://raw.githubusercontent.com/acl2/acl2/2790a294db676f428d72175afcfa5dfff2b05979/books/kestrel/typed-lists-light/symbol-listp.lisp | lisp | A lightweight book about the built-in function symbol-listp.
See also books/std/typed-lists/symbol-listp.lisp, but that book may be more
heavyweight.
Avoid name clash with std.
this had a name conflict with a theorem in std/typed-lists/symbol-listp
Disabled but see symbolp-of-car-when-symbol-listp.
Avoids name clash with std
Avoids name clash with std
matches the version in std
todo: strengthen?
this matches something in STD
Can't call this true-listp-when-symbol-listp because std uses that name for a :compound-recognizer rule.
Can't call this true-listp-when-symbol-listp-rewrite because std uses that name for a backchain-limited rule.
may be nil, which is a symbol!
avoids name clash with std | Copyright ( C ) 2008 - 2011 and Stanford University
Copyright ( C ) 2013 - 2023 Kestrel Institute
License : A 3 - clause BSD license . See the file books/3BSD - mod.txt .
Author : ( )
(in-package "ACL2")
(in-theory (disable symbol-listp))
(defthm symbol-listp-of-set-difference-equal-alt
(implies (symbol-listp l1)
(symbol-listp (set-difference-equal l1 l2)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-append2
(equal (symbol-listp (append x y))
(and (symbol-listp (true-list-fix x))
(symbol-listp y)))
:hints (("Goal" :in-theory (enable append symbol-listp))))
(defthm symbol-listp-of-union-equal-alt
(equal (symbol-listp (union-equal l1 l2))
(and (symbol-listp (true-list-fix l1))
(symbol-listp l2)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-intersection-equal
(implies (or (symbol-listp l1)
(symbol-listp l2))
(symbol-listp (intersection-equal l1 l2)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-add-to-set-equal
(equal (symbol-listp (add-to-set-equal x l))
(and (symbolp x)
(symbol-listp l)))
:hints (("Goal" :in-theory (enable symbol-listp add-to-set-equal))))
(defthm symbol-listp-of-cdr
(implies (symbol-listp x)
(symbol-listp (cdr x)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-cons
(equal (symbol-listp (cons a x))
(and (symbolp a)
(symbol-listp x)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-true-list-fix
(implies (symbol-listp x)
(symbol-listp (true-list-fix x))))
(defthmd symbolp-of-car-when-symbol-listp
(implies (symbol-listp x)
(symbolp (car x)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbolp-of-car-when-symbol-listp-cheap
(implies (symbol-listp x)
(symbolp (car x)))
:rule-classes ((:rewrite :backchain-limit-lst (0)))
:hints (("Goal" :in-theory (enable symbol-listp))))
(defthm symbol-listp-of-take-simple
(implies (symbol-listp l)
(symbol-listp (take n l)))
:hints (("Goal" :in-theory (enable take))))
(defthm symbol-listp-of-nthcdr-simple
(implies (symbol-listp l)
(symbol-listp (nthcdr n l)))
:hints (("Goal" :in-theory (enable nthcdr))))
(defthm symbol-listp-of-revappend
(equal (symbol-listp (revappend x y))
(and (symbol-listp (true-list-fix x))
(symbol-listp y)))
:hints (("Goal" :in-theory (enable revappend symbol-listp))))
(defthm symbol-listp-of-reverse
(implies (symbol-listp x)
(symbol-listp (reverse x)))
:hints (("Goal" :in-theory (enable reverse))))
(defthm symbol-listp-of-remove-equal
(implies (symbol-listp x)
(symbol-listp (remove-equal a x)))
:hints (("Goal" :in-theory (enable remove-equal))))
(defthm symbol-listp-of-remove1-equal
(implies (symbol-listp x)
(symbol-listp (remove1-equal a x)))
:hints (("Goal" :in-theory (enable remove1-equal))))
(defthm symbol-listp-of-remove-duplicates-equal
(implies (symbol-listp x)
(symbol-listp (remove-duplicates-equal x)))
:hints (("Goal" :in-theory (enable remove-duplicates-equal))))
(defthm true-listp-when-symbol-listp
(implies (symbol-listp x)
(true-listp x))
:rule-classes :compound-recognizer)
(defthmd true-listp-when-symbol-listp-rewrite-unlimited
(implies (symbol-listp x)
(true-listp x)))
(defthm symbolp-of-car-of-last-when-symbol-listp
(implies (symbol-listp x)
(symbolp (car (last x))))
:hints (("Goal" :in-theory (enable symbol-listp))))
Compatible with std
(defthm symbolp-of-nth-when-symbol-listp
(implies (symbol-listp x)
(symbolp (nth n x)))
:hints (("Goal" :in-theory (enable nth))))
(defthm symbol-listp-when-subsetp-equal-1
(implies (and (subsetp-equal l1 l2)
(symbol-listp l2))
(equal (symbol-listp l1)
(true-listp l1)))
:hints (("Goal" :in-theory (enable nth))))
(defthmd symbolp-when-member-equal-and-symbol-listp
(implies (and (member-equal a x)
(symbol-listp x))
(symbolp a))
:hints (("Goal" :in-theory (enable symbol-listp member-equal))))
|
f3fac18d0373b2072a421a31a0216af0bb93be0788906dafb4f4e71f8b7b2b62 | broadinstitute/wfl | wgs_test.clj | (ns wfl.unit.modules.wgs-test
(:require [clojure.test :refer [deftest is]]
[wfl.module.wgs :as wgs]))
(def ^:private output-url "gs-output-bucket/")
(deftest test-make-inputs-from-cram
(let [sample "gs-input-bucket/folder/sample.cram"
inputs (wgs/make-inputs-to-save output-url {:input_cram sample})]
(is (= sample (:input_cram inputs)))
(is (= "sample" (:sample_name inputs)))
(is (= "sample" (:base_file_name inputs)))
(is (= "sample" (:final_gvcf_base_name inputs)))
(is (= ".unmapped.bam" (:unmapped_bam_suffix inputs)))
(is (= (str output-url "folder") (:destination_cloud_path inputs)))))
(deftest test-make-inputs-from-bam
(let [sample "gs-input-bucket/folder/sample.bam"
inputs (wgs/make-inputs-to-save output-url {:input_bam sample})]
(is (= sample (:input_bam inputs)))
(is (= "sample" (:sample_name inputs)))
(is (= "sample" (:base_file_name inputs)))
(is (= "sample" (:final_gvcf_base_name inputs)))
(is (= ".unmapped.bam" (:unmapped_bam_suffix inputs)))
(is (= (str output-url "folder") (:destination_cloud_path inputs)))))
(deftest test-specifying-destination_cloud_path
(let [destination "gs-bucket/in-the-middle/of-nowhere.out"
inputs (wgs/make-inputs-to-save output-url
{:input_bam "gs-input-bucket/sample.bam"
:destination_cloud_path destination})]
(is (= destination (:destination_cloud_path inputs)))))
(deftest test-specifying-sample_name
(let [name "geoff"
inputs (wgs/make-inputs-to-save output-url
{:input_bam "gs-input-bucket/sample.bam"
:sample_name name})]
(is (= name (:sample_name inputs)))))
(deftest test-specifying-arbitrary-workflow-inputs
(is (:arbitrary
(wgs/make-inputs-to-save output-url
{:input_bam "gs-input-bucket/sample.bam"
:arbitrary "hai"}))))
(deftest test-invalid-input-gs-url-throws
(is (thrown? Exception
(wgs/make-inputs-to-save
output-url
{:input_cram ""})))
(is (thrown? Exception
(wgs/make-inputs-to-save
output-url
{:input_cram ""}))))
| null | https://raw.githubusercontent.com/broadinstitute/wfl/f6fc34e1a7bbb9588570d36c96f0d88e5e014a3d/api/test/wfl/unit/modules/wgs_test.clj | clojure | (ns wfl.unit.modules.wgs-test
(:require [clojure.test :refer [deftest is]]
[wfl.module.wgs :as wgs]))
(def ^:private output-url "gs-output-bucket/")
(deftest test-make-inputs-from-cram
(let [sample "gs-input-bucket/folder/sample.cram"
inputs (wgs/make-inputs-to-save output-url {:input_cram sample})]
(is (= sample (:input_cram inputs)))
(is (= "sample" (:sample_name inputs)))
(is (= "sample" (:base_file_name inputs)))
(is (= "sample" (:final_gvcf_base_name inputs)))
(is (= ".unmapped.bam" (:unmapped_bam_suffix inputs)))
(is (= (str output-url "folder") (:destination_cloud_path inputs)))))
(deftest test-make-inputs-from-bam
(let [sample "gs-input-bucket/folder/sample.bam"
inputs (wgs/make-inputs-to-save output-url {:input_bam sample})]
(is (= sample (:input_bam inputs)))
(is (= "sample" (:sample_name inputs)))
(is (= "sample" (:base_file_name inputs)))
(is (= "sample" (:final_gvcf_base_name inputs)))
(is (= ".unmapped.bam" (:unmapped_bam_suffix inputs)))
(is (= (str output-url "folder") (:destination_cloud_path inputs)))))
(deftest test-specifying-destination_cloud_path
(let [destination "gs-bucket/in-the-middle/of-nowhere.out"
inputs (wgs/make-inputs-to-save output-url
{:input_bam "gs-input-bucket/sample.bam"
:destination_cloud_path destination})]
(is (= destination (:destination_cloud_path inputs)))))
(deftest test-specifying-sample_name
(let [name "geoff"
inputs (wgs/make-inputs-to-save output-url
{:input_bam "gs-input-bucket/sample.bam"
:sample_name name})]
(is (= name (:sample_name inputs)))))
(deftest test-specifying-arbitrary-workflow-inputs
(is (:arbitrary
(wgs/make-inputs-to-save output-url
{:input_bam "gs-input-bucket/sample.bam"
:arbitrary "hai"}))))
(deftest test-invalid-input-gs-url-throws
(is (thrown? Exception
(wgs/make-inputs-to-save
output-url
{:input_cram ""})))
(is (thrown? Exception
(wgs/make-inputs-to-save
output-url
{:input_cram ""}))))
|
|
01b4510431b965b6d2cc5e9565f62651343b15edb642f620dbaead1a3231e812 | brianium/tomaat | settings.cljs | (ns tomaat.worker.settings
(:require [tomaat.data :as data]))
(defn update-settings
"Merge the given settings into stored settings for Tomaat"
[event id settings]
(-> settings
(js->clj :keywordize-keys true)
data/write))
| null | https://raw.githubusercontent.com/brianium/tomaat/990cb7c496796c567613ec7ea8c9855f397c45c2/src/tomaat/worker/settings.cljs | clojure | (ns tomaat.worker.settings
(:require [tomaat.data :as data]))
(defn update-settings
"Merge the given settings into stored settings for Tomaat"
[event id settings]
(-> settings
(js->clj :keywordize-keys true)
data/write))
|
|
a761b73f12896d172c057d54b8265bc0888aefcd0d122efbf56cfc0c57f19ade | dancrossnyc/multics | glasstty.ctl.lisp | ;;; ***********************************************************
;;; * *
* Copyright , ( C ) Honeywell Information Systems Inc. , 1982 *
;;; * *
* Copyright ( c ) 1978 by Massachusetts Institute of *
* Technology and Honeywell Information Systems , Inc. *
;;; * *
;;; ***********************************************************
;;;
;;;
tty display control
From printing tty , BSG 6/29/78
Redone for new redisplay 7/7/78
For tty - no - cleolp , bsg 2/14/80
(declare (special X Y screenheight idel-lines-availablep idel-chars-availablep screenlinelen tty-type tty-no-upmotionp tty-no-cleolp))
(declare (array* (notype (newscreen ?))))
(defun DCTL-init ()
(setq X -777 Y -777)
(setq tty-type 'teleray)
(setq screenheight 24. screenlinelen 79.)
(setq idel-chars-availablep nil idel-lines-availablep nil tty-no-upmotionp t tty-no-cleolp t))
(defun DCTL-position-cursor (x y)
(prog ()
(and (= x X)(= y Y)(return nil))
(and (< X 0)(DCTL-crlf))
(and (= y Y)
(progn
(cond ((and (= x 0)(> X 4))(DCTL-cret))
((< X x)(DCTL-display-char-string
(substr (or (cadr (newscreen Y)) " ") (1+ X) (- x X))))
((< (- X x) x) (do xx X (1- xx)(= xx x)(Rtyo 10)))
(t (DCTL-cret)
(DCTL-position-cursor x Y)))
(setq X x) ;y is right by definition
(return nil)))
;; Definitely going to a new line at this point
(DCTL-nextline)
(setq Y y)
(DCTL-position-cursor x y)))
(defun DCTL-assert-scpos (x y)
(and x (setq X x))
(and y (setq Y y)))
(defun DCTL-clear-rest-of-screen ())
(defun DCTL-nextline ()(Rtyo 12))
(defun DCTL-display-char-string (s)
(Rprinc s)
(setq X (+ X (stringlength s))))
(defun DCTL-cret ()
(Rtyo 15)(setq X 0))
(defun DCTL-crlf ()
(Rtyo 15)(Rtyo 12)(setq X 0))
| null | https://raw.githubusercontent.com/dancrossnyc/multics/dc291689edf955c660e57236da694630e2217151/library_dir_dir/system_library_unbundled/source/bound_emacs_ctls_.s.archive/glasstty.ctl.lisp | lisp | ***********************************************************
* *
* *
* *
***********************************************************
y is right by definition
Definitely going to a new line at this point | * Copyright , ( C ) Honeywell Information Systems Inc. , 1982 *
* Copyright ( c ) 1978 by Massachusetts Institute of *
* Technology and Honeywell Information Systems , Inc. *
tty display control
From printing tty , BSG 6/29/78
Redone for new redisplay 7/7/78
For tty - no - cleolp , bsg 2/14/80
(declare (special X Y screenheight idel-lines-availablep idel-chars-availablep screenlinelen tty-type tty-no-upmotionp tty-no-cleolp))
(declare (array* (notype (newscreen ?))))
(defun DCTL-init ()
(setq X -777 Y -777)
(setq tty-type 'teleray)
(setq screenheight 24. screenlinelen 79.)
(setq idel-chars-availablep nil idel-lines-availablep nil tty-no-upmotionp t tty-no-cleolp t))
(defun DCTL-position-cursor (x y)
(prog ()
(and (= x X)(= y Y)(return nil))
(and (< X 0)(DCTL-crlf))
(and (= y Y)
(progn
(cond ((and (= x 0)(> X 4))(DCTL-cret))
((< X x)(DCTL-display-char-string
(substr (or (cadr (newscreen Y)) " ") (1+ X) (- x X))))
((< (- X x) x) (do xx X (1- xx)(= xx x)(Rtyo 10)))
(t (DCTL-cret)
(DCTL-position-cursor x Y)))
(return nil)))
(DCTL-nextline)
(setq Y y)
(DCTL-position-cursor x y)))
(defun DCTL-assert-scpos (x y)
(and x (setq X x))
(and y (setq Y y)))
(defun DCTL-clear-rest-of-screen ())
(defun DCTL-nextline ()(Rtyo 12))
(defun DCTL-display-char-string (s)
(Rprinc s)
(setq X (+ X (stringlength s))))
(defun DCTL-cret ()
(Rtyo 15)(setq X 0))
(defun DCTL-crlf ()
(Rtyo 15)(Rtyo 12)(setq X 0))
|
c015829c617c58239ead95f10b259a8dc74ebb34254eca930a12f5fa38773836 | jradtilbrook/rubiks | Moves.hs | module Moves
( (-:)
, front
, back
, back'
, back2
, down
, front'
, front2
, left
, left'
, left2
, right
, right'
, right2
, up
, up'
, up2
, down'
, down2
) where
import Cube
import qualified Moves.Corners as Corner
import qualified Moves.Edges as Edge
{-
- Similar to function composition for applying moves to a cube.
- Start with the cube state, then write each move you want to take in order.
- Eg. Cube -: down -: right -: front
-}
x -: f = f x
front = move Corner.front Edge.front
front' = move Corner.front' Edge.front'
front2 = move Corner.front2 Edge.front2
back = move Corner.back Edge.back
back' = move Corner.back' Edge.back'
back2 = move Corner.back2 Edge.back2
left = move Corner.left Edge.left
left' = move Corner.left' Edge.left'
left2 = move Corner.left2 Edge.left2
right = move Corner.right Edge.right
right' = move Corner.right' Edge.right'
right2 = move Corner.right2 Edge.right2
up = move Corner.up Edge.up
up' = move Corner.up' Edge.up'
up2 = move Corner.up2 Edge.up2
down = move Corner.down Edge.down
down' = move Corner.down' Edge.down'
down2 = move Corner.down2 Edge.down2
move mc me (Cube corner edge) = Cube corner' edge'
where
corner' = mc corner
edge' = me edge
| null | https://raw.githubusercontent.com/jradtilbrook/rubiks/d49b169187b68b5d07804da723de73820e7ab88b/src/Moves.hs | haskell |
- Similar to function composition for applying moves to a cube.
- Start with the cube state, then write each move you want to take in order.
- Eg. Cube -: down -: right -: front
| module Moves
( (-:)
, front
, back
, back'
, back2
, down
, front'
, front2
, left
, left'
, left2
, right
, right'
, right2
, up
, up'
, up2
, down'
, down2
) where
import Cube
import qualified Moves.Corners as Corner
import qualified Moves.Edges as Edge
x -: f = f x
front = move Corner.front Edge.front
front' = move Corner.front' Edge.front'
front2 = move Corner.front2 Edge.front2
back = move Corner.back Edge.back
back' = move Corner.back' Edge.back'
back2 = move Corner.back2 Edge.back2
left = move Corner.left Edge.left
left' = move Corner.left' Edge.left'
left2 = move Corner.left2 Edge.left2
right = move Corner.right Edge.right
right' = move Corner.right' Edge.right'
right2 = move Corner.right2 Edge.right2
up = move Corner.up Edge.up
up' = move Corner.up' Edge.up'
up2 = move Corner.up2 Edge.up2
down = move Corner.down Edge.down
down' = move Corner.down' Edge.down'
down2 = move Corner.down2 Edge.down2
move mc me (Cube corner edge) = Cube corner' edge'
where
corner' = mc corner
edge' = me edge
|
09ac5fcd3a05e45fb790ebe710d8774175b535ac4446faf3a090be38967508c2 | tweag/asterius | gcdInteger.hs |
# LANGUAGE MagicHash #
module Main (main) where
import GHC.Base
import GHC.Integer
main :: IO ()
main = case i of
I# i# ->
print (gcd (smallInteger i#) (smallInteger i#))
# NOINLINE i #
i :: Int
i = minBound
| null | https://raw.githubusercontent.com/tweag/asterius/e7b823c87499656860f87b9b468eb0567add1de8/asterius/test/ghc-testsuite/integer/gcdInteger.hs | haskell |
# LANGUAGE MagicHash #
module Main (main) where
import GHC.Base
import GHC.Integer
main :: IO ()
main = case i of
I# i# ->
print (gcd (smallInteger i#) (smallInteger i#))
# NOINLINE i #
i :: Int
i = minBound
|
|
f242d4c62cd69bd851402ba570e92d45186b4870c830df2f3ed318b6d6e4568e | ghc/packages-Cabal | cabal.test.hs | import Test.Cabal.Prelude
main = cabalTest $ do
cabal "v2-build" ["test"]
| null | https://raw.githubusercontent.com/ghc/packages-Cabal/6f22f2a789fa23edb210a2591d74ea6a5f767872/cabal-testsuite/PackageTests/Regression/T4720/cabal.test.hs | haskell | import Test.Cabal.Prelude
main = cabalTest $ do
cabal "v2-build" ["test"]
|
|
bfe810d0311dcf82271b69767605b782923e100c44b4a22baba30181537ccc4b | MinaProtocol/mina | account_id_intf.ml | [%%import "/src/config.mlh"]
open Core_kernel
open Mina_base_import
module type S = sig
module Digest : sig
[%%versioned:
module Stable : sig
module V1 : sig
type t [@@deriving sexp, equal, compare, hash, yojson]
end
end]
val of_field : Snark_params.Tick.Field.t -> t
val to_field_unsafe : t -> Snark_params.Tick.Field.t
include Stringable.S with type t := t
(* so we can easily import these into Token_id *)
module Binables : sig
include Comparable_binable with type t := t
include Hashable_binable with type t := t
end
include module type of Binables
val to_input : t -> Snark_params.Tick.Field.t Random_oracle.Input.Chunked.t
val default : t
val gen : t Quickcheck.Generator.t
val gen_non_default : t Quickcheck.Generator.t
[%%ifdef consensus_mechanism]
module Checked : sig
open Pickles.Impls.Step
type t
val to_input : t -> Field.t Random_oracle.Input.Chunked.t
val constant : Stable.Latest.t -> t
val equal : t -> t -> Boolean.var
val if_ : Boolean.var -> then_:t -> else_:t -> t
val of_field : Pickles.Impls.Step.Field.t -> t
val to_field_unsafe : t -> Pickles.Impls.Step.Field.t
module Assert : sig
val equal : t -> t -> unit
end
end
val typ : (Checked.t, t) Snark_params.Tick.Typ.t
[%%endif]
end
[%%versioned:
module Stable : sig
module V2 : sig
type t [@@deriving sexp, equal, compare, hash, yojson]
end
end]
val create : Public_key.Compressed.t -> Digest.t -> t
val derive_token_id : owner:t -> Digest.t
val empty : t
val invalid : t
val public_key : t -> Public_key.Compressed.t
val token_id : t -> Digest.t
val to_input : t -> Snark_params.Tick.Field.t Random_oracle.Input.Chunked.t
val gen : t Quickcheck.Generator.t
include Comparable.S with type t := t
include Hashable.S_binable with type t := t
[%%ifdef consensus_mechanism]
type var
val typ : (var, t) Snark_params.Tick.Typ.t
val var_of_t : t -> var
module Checked : sig
open Snark_params
open Tick
val create : Public_key.Compressed.var -> Digest.Checked.t -> var
val public_key : var -> Public_key.Compressed.var
val token_id : var -> Digest.Checked.t
val to_input :
var -> Snark_params.Tick.Field.Var.t Random_oracle.Input.Chunked.t
val equal : var -> var -> Boolean.var Checked.t
val if_ : Boolean.var -> then_:var -> else_:var -> var Checked.t
val derive_token_id : owner:var -> Digest.Checked.t
end
[%%endif]
end
| null | https://raw.githubusercontent.com/MinaProtocol/mina/d6436be74fd3fa5e38bbf95ad2f2c6dfd10cfe37/src/lib/mina_base/account_id_intf.ml | ocaml | so we can easily import these into Token_id | [%%import "/src/config.mlh"]
open Core_kernel
open Mina_base_import
module type S = sig
module Digest : sig
[%%versioned:
module Stable : sig
module V1 : sig
type t [@@deriving sexp, equal, compare, hash, yojson]
end
end]
val of_field : Snark_params.Tick.Field.t -> t
val to_field_unsafe : t -> Snark_params.Tick.Field.t
include Stringable.S with type t := t
module Binables : sig
include Comparable_binable with type t := t
include Hashable_binable with type t := t
end
include module type of Binables
val to_input : t -> Snark_params.Tick.Field.t Random_oracle.Input.Chunked.t
val default : t
val gen : t Quickcheck.Generator.t
val gen_non_default : t Quickcheck.Generator.t
[%%ifdef consensus_mechanism]
module Checked : sig
open Pickles.Impls.Step
type t
val to_input : t -> Field.t Random_oracle.Input.Chunked.t
val constant : Stable.Latest.t -> t
val equal : t -> t -> Boolean.var
val if_ : Boolean.var -> then_:t -> else_:t -> t
val of_field : Pickles.Impls.Step.Field.t -> t
val to_field_unsafe : t -> Pickles.Impls.Step.Field.t
module Assert : sig
val equal : t -> t -> unit
end
end
val typ : (Checked.t, t) Snark_params.Tick.Typ.t
[%%endif]
end
[%%versioned:
module Stable : sig
module V2 : sig
type t [@@deriving sexp, equal, compare, hash, yojson]
end
end]
val create : Public_key.Compressed.t -> Digest.t -> t
val derive_token_id : owner:t -> Digest.t
val empty : t
val invalid : t
val public_key : t -> Public_key.Compressed.t
val token_id : t -> Digest.t
val to_input : t -> Snark_params.Tick.Field.t Random_oracle.Input.Chunked.t
val gen : t Quickcheck.Generator.t
include Comparable.S with type t := t
include Hashable.S_binable with type t := t
[%%ifdef consensus_mechanism]
type var
val typ : (var, t) Snark_params.Tick.Typ.t
val var_of_t : t -> var
module Checked : sig
open Snark_params
open Tick
val create : Public_key.Compressed.var -> Digest.Checked.t -> var
val public_key : var -> Public_key.Compressed.var
val token_id : var -> Digest.Checked.t
val to_input :
var -> Snark_params.Tick.Field.Var.t Random_oracle.Input.Chunked.t
val equal : var -> var -> Boolean.var Checked.t
val if_ : Boolean.var -> then_:var -> else_:var -> var Checked.t
val derive_token_id : owner:var -> Digest.Checked.t
end
[%%endif]
end
|
52b18ec0b0a6560142d2a29c54dc83e6b6755bfa9b6be38e3b1688d7a6dcadd5 | roburio/udns | dns_client_flow.mli | * TODO ideally there 'd be something like mirage - flow - lwt that did n't depend
on lwt and a ton of other things , and still provided [ map ]
and [ connect ] and so on . leaving this stuff here for now until a
better solution presents itself .
on lwt and a ton of other things, and still provided [map]
and [connect] and so on. leaving this stuff here for now until a
better solution presents itself.
*)
module type S = sig
type flow
(** A flow is a connection produced by {!U.connect} *)
type (+'ok,+'err) io constraint 'err = [> `Msg of string]
(** [io] is the type of an effect. ['err] is a polymorphic variant. *)
type io_addr
(** An address for a given flow type, usually this will consist of
IP address + a TCP/IP or UDP/IP port number, but for some flow types
it can carry additional information for purposes of cryptographic
verification. TODO at least that would be nice in the future. TODO
*)
type ns_addr = [ `TCP | `UDP] * io_addr
* TODO well this is kind of crude ; it 's a tuple to prevent having
to do endless amounts of currying things when implementing flow types ,
and we need to know the protocol used so we can prefix packets for
DNS - over - TCP and set correct socket options etc . therefore we ca n't
just use the opaque [ io_addr ] .
TODO
to do endless amounts of currying things when implementing flow types,
and we need to know the protocol used so we can prefix packets for
DNS-over-TCP and set correct socket options etc. therefore we can't
just use the opaque [io_addr].
TODO*)
type stack
(** A stack with which to connect, e.g. {IPv4.tcpv4}*)
type t
* The abstract state of a DNS client .
val create : ?nameserver:ns_addr -> stack -> t
* [ create ~nameserver stack ] creates the state record of the DNS client .
val nameserver : t -> ns_addr
(** The address of a nameserver that is supposed to work with
the underlying flow, can be used if the user does not want to
bother with configuring their own.*)
val connect : ?nameserver:ns_addr -> t -> (flow,'err) io
(** [connect addr] is a new connection ([flow]) to [addr], or an error. *)
val send : flow -> Cstruct.t -> (unit,'err) io
(** [send flow buffer] sends [buffer] to the [flow] upstream.*)
val recv : flow -> (Cstruct.t, 'err) io
(** [recv flow] tries to read a [buffer] from the [flow] downstream.*)
val resolve : ('ok,'err) io -> ('ok -> ('next,'err) result) -> ('next,'err) io
(** a.k.a. [>|=] *)
val map : ('ok,'err) io -> ('ok -> ('next,'err) io) -> ('next,'err) io
(** a.k.a. [>>=] *)
val lift : ('ok, 'err) result -> ('ok,'err) io
end
module Make : functor (U : S) ->
sig
val create : ?nameserver:U.ns_addr -> U.stack -> U.t
* [ create ~nameserver stack ] creates the state of the DNS client .
val nameserver : U.t -> U.ns_addr
(** [nameserver t] returns the default nameserver to be used. *)
val getaddrinfo : U.t -> ?nameserver:U.ns_addr -> 'response Dns.Rr_map.key ->
'a Domain_name.t -> ('response, 'err) U.io
* [ nameserver name ] is the [ query_type]-dependent
response from [ nameserver ] regarding [ name ] , or an [ Error _ ] message .
See { ! Dns_client.query_state } for more information about the
result types .
response from [nameserver] regarding [name], or an [Error _] message.
See {!Dns_client.query_state} for more information about the
result types.
*)
val gethostbyname : U.t -> ?nameserver:U.ns_addr -> [ `host ] Domain_name.t ->
(Ipaddr.V4.t, 'err) U.io
(** [gethostbyname state ~nameserver domain] is the IPv4 address of [domain]
resolved via the [state] and [nameserver] specified.
If the query fails, or if the [domain] does not have any IPv4 addresses,
an [Error _] message is returned.
Any extraneous IPv4 addresses are ignored.
For an example of using this API, see [unix/ohost.ml]
in the distribution of this package.
*)
val gethostbyname6 : U.t -> ?nameserver:U.ns_addr -> [ `host ] Domain_name.t ->
(Ipaddr.V6.t, 'err) U.io
* [ gethostbyname6 state ~nameserver domain ] is the IPv6 address of
[ domain ] resolved via the [ state ] and [ nameserver ] specified .
It is the IPv6 equivalent of { ! } .
[domain] resolved via the [state] and [nameserver] specified.
It is the IPv6 equivalent of {!gethostbyname}.
*)
end
| null | https://raw.githubusercontent.com/roburio/udns/585c40933ac3d5eceb351f7edd3f45cf2615a9f8/client/dns_client_flow.mli | ocaml | * A flow is a connection produced by {!U.connect}
* [io] is the type of an effect. ['err] is a polymorphic variant.
* An address for a given flow type, usually this will consist of
IP address + a TCP/IP or UDP/IP port number, but for some flow types
it can carry additional information for purposes of cryptographic
verification. TODO at least that would be nice in the future. TODO
* A stack with which to connect, e.g. {IPv4.tcpv4}
* The address of a nameserver that is supposed to work with
the underlying flow, can be used if the user does not want to
bother with configuring their own.
* [connect addr] is a new connection ([flow]) to [addr], or an error.
* [send flow buffer] sends [buffer] to the [flow] upstream.
* [recv flow] tries to read a [buffer] from the [flow] downstream.
* a.k.a. [>|=]
* a.k.a. [>>=]
* [nameserver t] returns the default nameserver to be used.
* [gethostbyname state ~nameserver domain] is the IPv4 address of [domain]
resolved via the [state] and [nameserver] specified.
If the query fails, or if the [domain] does not have any IPv4 addresses,
an [Error _] message is returned.
Any extraneous IPv4 addresses are ignored.
For an example of using this API, see [unix/ohost.ml]
in the distribution of this package.
| * TODO ideally there 'd be something like mirage - flow - lwt that did n't depend
on lwt and a ton of other things , and still provided [ map ]
and [ connect ] and so on . leaving this stuff here for now until a
better solution presents itself .
on lwt and a ton of other things, and still provided [map]
and [connect] and so on. leaving this stuff here for now until a
better solution presents itself.
*)
module type S = sig
type flow
type (+'ok,+'err) io constraint 'err = [> `Msg of string]
type io_addr
type ns_addr = [ `TCP | `UDP] * io_addr
* TODO well this is kind of crude ; it 's a tuple to prevent having
to do endless amounts of currying things when implementing flow types ,
and we need to know the protocol used so we can prefix packets for
DNS - over - TCP and set correct socket options etc . therefore we ca n't
just use the opaque [ io_addr ] .
TODO
to do endless amounts of currying things when implementing flow types,
and we need to know the protocol used so we can prefix packets for
DNS-over-TCP and set correct socket options etc. therefore we can't
just use the opaque [io_addr].
TODO*)
type stack
type t
* The abstract state of a DNS client .
val create : ?nameserver:ns_addr -> stack -> t
* [ create ~nameserver stack ] creates the state record of the DNS client .
val nameserver : t -> ns_addr
val connect : ?nameserver:ns_addr -> t -> (flow,'err) io
val send : flow -> Cstruct.t -> (unit,'err) io
val recv : flow -> (Cstruct.t, 'err) io
val resolve : ('ok,'err) io -> ('ok -> ('next,'err) result) -> ('next,'err) io
val map : ('ok,'err) io -> ('ok -> ('next,'err) io) -> ('next,'err) io
val lift : ('ok, 'err) result -> ('ok,'err) io
end
module Make : functor (U : S) ->
sig
val create : ?nameserver:U.ns_addr -> U.stack -> U.t
* [ create ~nameserver stack ] creates the state of the DNS client .
val nameserver : U.t -> U.ns_addr
val getaddrinfo : U.t -> ?nameserver:U.ns_addr -> 'response Dns.Rr_map.key ->
'a Domain_name.t -> ('response, 'err) U.io
* [ nameserver name ] is the [ query_type]-dependent
response from [ nameserver ] regarding [ name ] , or an [ Error _ ] message .
See { ! Dns_client.query_state } for more information about the
result types .
response from [nameserver] regarding [name], or an [Error _] message.
See {!Dns_client.query_state} for more information about the
result types.
*)
val gethostbyname : U.t -> ?nameserver:U.ns_addr -> [ `host ] Domain_name.t ->
(Ipaddr.V4.t, 'err) U.io
val gethostbyname6 : U.t -> ?nameserver:U.ns_addr -> [ `host ] Domain_name.t ->
(Ipaddr.V6.t, 'err) U.io
* [ gethostbyname6 state ~nameserver domain ] is the IPv6 address of
[ domain ] resolved via the [ state ] and [ nameserver ] specified .
It is the IPv6 equivalent of { ! } .
[domain] resolved via the [state] and [nameserver] specified.
It is the IPv6 equivalent of {!gethostbyname}.
*)
end
|
53389680f46ed6dcfaec3d1d30ce7a127a6fb50962a2e52a5f894e3b0c6daf10 | jyh/metaprl | ma_message__automata.ml | extends Nuprl_message__automata
open Dtactic
define pw_compat : "pw_ma-compat"[level:l]{'L} <--> all_list{'L; A_1.all_list{'L; A_2."ma-compat"[level:l]{'A_1;'A_2}}}
define pw_compat_list : pw_compat_list[level:l] <--> ({L:list{msga[level:l]} | "pw_ma-compat"[level:l]{'L}})
interactive empty_is_pw_compat {| intro[] |} :
sequent { <H> >- nil in pw_compat_list[level:l] }
interactive all_list_if {| intro[] |} :
[wf] sequent { <H> >- 'b in bool } -->
sequent { <H>; "assert"{'b} >- all_list{'l; x. 'P['x]} } -->
sequent { <H>; "assert"{bnot{'b}} >- all_list{'m; x. 'P['x]} } -->
sequent { <H> >- all_list{if 'b then 'l else 'm; x. 'P['x]} }
prim msga_wf {| intro[] |} :
sequent { <H> >- "type"{msga[level:l]} } =
it
prim join_wf {| intro[] |} :
sequent { <H> >- 'L in pw_compat_list[level:l] } -->
sequent { <H> >- "ma-join-list"{'L} in msga[level:l] } =
it
prim compat_wf {| intro[] |} :
sequent { <H> >- 'A in msga[level:l] } -->
sequent { <H> >- 'B in msga[level:l] } -->
sequent { <H> >- "type"{"ma-compat"[level:l]{'A;'B}} } =
it
prim comp_left_join {| intro[] |} :
[wf] sequent { <H> >- 'L in pw_compat_list[level:l] } -->
[wf] sequent { <H> >- 'B in msga[level:l] } -->
sequent { <H> >- all_list{'L; A."ma-compat"[level:l]{'A;'B}} } -->
sequent { <H> >- "ma-compat"[level:l]{"ma-join-list"{'L};'B} } =
it
prim comp_right_join {| intro[] |} :
[wf] sequent { <H> >- 'A in msga[level:l] } -->
[wf] sequent { <H> >- 'M in pw_compat_list[level:l] } -->
sequent { <H> >- all_list{'M; B."ma-compat"[level:l]{'A;'B}} } -->
sequent { <H> >- "ma-compat"[level:l]{'A; "ma-join-list"{'M}} } =
it
interactive comp_join_join {| intro[] |} :
[wf] sequent { <H> >- 'L in pw_compat_list[level:l] } -->
[wf] sequent { <H> >- 'M in pw_compat_list[level:l] } -->
sequent { <H> >- all_list{'L; A.all_list{'M; B."ma-compat"[level:l]{'A;'B}}} } -->
sequent { <H> >- "ma-compat"[level:l]{"ma-join-list"{'L}; "ma-join-list"{'M}} }
interactive comp_left_if {| intro[] |} :
[wf] sequent { <H> >- 'b in bool } -->
sequent { <H>; "assert"{'b} >- "ma-compat"[level:l]{'A;'C} } -->
sequent { <H>; "assert"{bnot{'b}} >- "ma-compat"[level:l]{'B;'C} } -->
sequent { <H> >- "ma-compat"[level:l]{ if 'b then 'A else 'B; 'C} }
interactive comp_right_if {| intro[] |} :
[wf] sequent { <H> >- 'b in bool } -->
sequent { <H>; "assert"{'b} >- "ma-compat"[level:l]{'C;'A} } -->
sequent { <H>; "assert"{bnot{'b}} >- "ma-compat"[level:l]{'C;'B} } -->
sequent { <H> >- "ma-compat"[level:l]{'C; if 'b then 'A else 'B} }
interactive nuprl_ma_empty_compat_left {| intro[] |} :
[wf] sequent { <Gamma> >- '"A" in "msga"[level:l]{} } -->
sequent { <Gamma> >- "ma-compat"[level:l]{"ma-empty"[]{};'"A"} }
interactive nuprl_ma_empty_compat_right {| intro[] |} :
[wf] sequent { <Gamma> >- '"A" in "msga"[level:l]{} } -->
sequent { <Gamma> >- "ma-compat"[level:l]{'"A";"ma-empty"[]{}} }
interactive nuprl_ma_empty_compatible_left {| intro[] |} :
[wf] sequent { <Gamma> >- '"A" in "msga"[level:l]{} } -->
sequent { <Gamma> >- "ma-compatible"[level:l]{"ma-empty"[]{};'"A"} }
interactive nuprl_ma_empty_compatible_right {| intro[] |} :
[wf] sequent { <Gamma> >- '"A" in "msga"[level:l]{} } -->
sequent { <Gamma> >- "ma-compatible"[level:l]{'"A";"ma-empty"[]{}} }
interactive state_single_elim {| elim[] |} 'H :
[wf] sequent { <H>; s: "ma-state"{"fpf-single"{'x;'T}}; <J['s]> >- "type"{'T} } -->
[wf] sequent { <H>; s: "ma-state"{"fpf-single"{'x;'T}}; <J['s]> >- 'x in Id } -->
sequent { <H>; s: "ma-state"{"fpf-single"{'x;'T}}; 's 'x in 'T; <J['s]> >- 'C['s]} -->
sequent { <H>; s: "ma-state"{"fpf-single"{'x;'T}}; <J['s]> >- 'C['s]}
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/theories/mesa/ma_message__automata.ml | ocaml | extends Nuprl_message__automata
open Dtactic
define pw_compat : "pw_ma-compat"[level:l]{'L} <--> all_list{'L; A_1.all_list{'L; A_2."ma-compat"[level:l]{'A_1;'A_2}}}
define pw_compat_list : pw_compat_list[level:l] <--> ({L:list{msga[level:l]} | "pw_ma-compat"[level:l]{'L}})
interactive empty_is_pw_compat {| intro[] |} :
sequent { <H> >- nil in pw_compat_list[level:l] }
interactive all_list_if {| intro[] |} :
[wf] sequent { <H> >- 'b in bool } -->
sequent { <H>; "assert"{'b} >- all_list{'l; x. 'P['x]} } -->
sequent { <H>; "assert"{bnot{'b}} >- all_list{'m; x. 'P['x]} } -->
sequent { <H> >- all_list{if 'b then 'l else 'm; x. 'P['x]} }
prim msga_wf {| intro[] |} :
sequent { <H> >- "type"{msga[level:l]} } =
it
prim join_wf {| intro[] |} :
sequent { <H> >- 'L in pw_compat_list[level:l] } -->
sequent { <H> >- "ma-join-list"{'L} in msga[level:l] } =
it
prim compat_wf {| intro[] |} :
sequent { <H> >- 'A in msga[level:l] } -->
sequent { <H> >- 'B in msga[level:l] } -->
sequent { <H> >- "type"{"ma-compat"[level:l]{'A;'B}} } =
it
prim comp_left_join {| intro[] |} :
[wf] sequent { <H> >- 'L in pw_compat_list[level:l] } -->
[wf] sequent { <H> >- 'B in msga[level:l] } -->
sequent { <H> >- all_list{'L; A."ma-compat"[level:l]{'A;'B}} } -->
sequent { <H> >- "ma-compat"[level:l]{"ma-join-list"{'L};'B} } =
it
prim comp_right_join {| intro[] |} :
[wf] sequent { <H> >- 'A in msga[level:l] } -->
[wf] sequent { <H> >- 'M in pw_compat_list[level:l] } -->
sequent { <H> >- all_list{'M; B."ma-compat"[level:l]{'A;'B}} } -->
sequent { <H> >- "ma-compat"[level:l]{'A; "ma-join-list"{'M}} } =
it
interactive comp_join_join {| intro[] |} :
[wf] sequent { <H> >- 'L in pw_compat_list[level:l] } -->
[wf] sequent { <H> >- 'M in pw_compat_list[level:l] } -->
sequent { <H> >- all_list{'L; A.all_list{'M; B."ma-compat"[level:l]{'A;'B}}} } -->
sequent { <H> >- "ma-compat"[level:l]{"ma-join-list"{'L}; "ma-join-list"{'M}} }
interactive comp_left_if {| intro[] |} :
[wf] sequent { <H> >- 'b in bool } -->
sequent { <H>; "assert"{'b} >- "ma-compat"[level:l]{'A;'C} } -->
sequent { <H>; "assert"{bnot{'b}} >- "ma-compat"[level:l]{'B;'C} } -->
sequent { <H> >- "ma-compat"[level:l]{ if 'b then 'A else 'B; 'C} }
interactive comp_right_if {| intro[] |} :
[wf] sequent { <H> >- 'b in bool } -->
sequent { <H>; "assert"{'b} >- "ma-compat"[level:l]{'C;'A} } -->
sequent { <H>; "assert"{bnot{'b}} >- "ma-compat"[level:l]{'C;'B} } -->
sequent { <H> >- "ma-compat"[level:l]{'C; if 'b then 'A else 'B} }
interactive nuprl_ma_empty_compat_left {| intro[] |} :
[wf] sequent { <Gamma> >- '"A" in "msga"[level:l]{} } -->
sequent { <Gamma> >- "ma-compat"[level:l]{"ma-empty"[]{};'"A"} }
interactive nuprl_ma_empty_compat_right {| intro[] |} :
[wf] sequent { <Gamma> >- '"A" in "msga"[level:l]{} } -->
sequent { <Gamma> >- "ma-compat"[level:l]{'"A";"ma-empty"[]{}} }
interactive nuprl_ma_empty_compatible_left {| intro[] |} :
[wf] sequent { <Gamma> >- '"A" in "msga"[level:l]{} } -->
sequent { <Gamma> >- "ma-compatible"[level:l]{"ma-empty"[]{};'"A"} }
interactive nuprl_ma_empty_compatible_right {| intro[] |} :
[wf] sequent { <Gamma> >- '"A" in "msga"[level:l]{} } -->
sequent { <Gamma> >- "ma-compatible"[level:l]{'"A";"ma-empty"[]{}} }
interactive state_single_elim {| elim[] |} 'H :
[wf] sequent { <H>; s: "ma-state"{"fpf-single"{'x;'T}}; <J['s]> >- "type"{'T} } -->
[wf] sequent { <H>; s: "ma-state"{"fpf-single"{'x;'T}}; <J['s]> >- 'x in Id } -->
sequent { <H>; s: "ma-state"{"fpf-single"{'x;'T}}; 's 'x in 'T; <J['s]> >- 'C['s]} -->
sequent { <H>; s: "ma-state"{"fpf-single"{'x;'T}}; <J['s]> >- 'C['s]}
|
|
aa5df0fcaae364b974197b45b92991976da630b2b657d46a39722fb77fbcf765 | mfoemmel/erlang-otp | wxXmlResource.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%% This file is generated DO NOT EDIT
%% @doc See external documentation: <a href="">wxXmlResource</a>.
%% @type wxXmlResource(). An object reference, The representation is internal
%% and can be changed without notice. It can't be used for comparsion
%% stored on disc or distributed for use on other nodes.
-module(wxXmlResource).
-include("wxe.hrl").
-export([ xrcctrl/3 ,attachUnknownControl/3,attachUnknownControl/4,clearHandlers/1,
compareVersion/5,destroy/1,get/0,getFlags/1,getVersion/1,getXRCID/1,
getXRCID/2,initAllHandlers/1,load/2,loadBitmap/2,loadDialog/3,loadDialog/4,
loadFrame/3,loadFrame/4,loadIcon/2,loadMenu/2,loadMenuBar/2,loadMenuBar/3,
loadPanel/3,loadPanel/4,loadToolBar/3,new/0,new/1,new/2,set/1,setFlags/2,
unload/2]).
%% inherited exports
-export([parent_class/1]).
%% @hidden
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
( ) - > wxXmlResource ( )
%% @equiv new([])
new() ->
new([]).
( [ Option ] ) - > wxXmlResource ( )
%% Option = {flags, integer()} | {domain, string()}
%% @doc See <a href="#wxxmlresourcewxxmlresource">external documentation</a>.
new(Options)
when is_list(Options) ->
MOpts = fun({flags, Flags}, Acc) -> [<<1:32/?UI,Flags:32/?UI>>|Acc];
({domain, Domain}, Acc) -> Domain_UC = unicode:characters_to_binary([Domain,0]),[<<2:32/?UI,(byte_size(Domain_UC)):32/?UI,(Domain_UC)/binary, 0:(((8- ((0+byte_size(Domain_UC)) band 16#7)) band 16#7))/unit:8>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:construct(?wxXmlResource_new_1,
<<BinOpt/binary>>).
@spec ( Filemask::string ( ) , [ Option ] ) - > wxXmlResource ( )
%% Option = {flags, integer()} | {domain, string()}
%% @doc See <a href="#wxxmlresourcewxxmlresource">external documentation</a>.
new(Filemask, Options)
when is_list(Filemask),is_list(Options) ->
Filemask_UC = unicode:characters_to_binary([Filemask,0]),
MOpts = fun({flags, Flags}, Acc) -> [<<1:32/?UI,Flags:32/?UI>>|Acc];
({domain, Domain}, Acc) -> Domain_UC = unicode:characters_to_binary([Domain,0]),[<<2:32/?UI,(byte_size(Domain_UC)):32/?UI,(Domain_UC)/binary, 0:(((8- ((0+byte_size(Domain_UC)) band 16#7)) band 16#7))/unit:8>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:construct(?wxXmlResource_new_2,
<<(byte_size(Filemask_UC)):32/?UI,(Filemask_UC)/binary, 0:(((8- ((4+byte_size(Filemask_UC)) band 16#7)) band 16#7))/unit:8, BinOpt/binary>>).
@spec ( ( ) , Name::string ( ) , Control::wxWindow : wxWindow ( ) ) - > bool ( )
%% @equiv attachUnknownControl(This,Name,Control, [])
attachUnknownControl(This,Name,Control)
when is_record(This, wx_ref),is_list(Name),is_record(Control, wx_ref) ->
attachUnknownControl(This,Name,Control, []).
@spec ( ( ) , Name::string ( ) , Control::wxWindow : wxWindow ( ) , [ Option ] ) - > bool ( )
%% Option = {parent, wxWindow:wxWindow()}
%% @doc See <a href="#wxxmlresourceattachunknowncontrol">external documentation</a>.
attachUnknownControl(#wx_ref{type=ThisT,ref=ThisRef},Name,#wx_ref{type=ControlT,ref=ControlRef}, Options)
when is_list(Name),is_list(Options) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
?CLASS(ControlT,wxWindow),
MOpts = fun({parent, #wx_ref{type=ParentT,ref=ParentRef}}, Acc) -> ?CLASS(ParentT,wxWindow),[<<1:32/?UI,ParentRef:32/?UI>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:call(?wxXmlResource_AttachUnknownControl,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8,ControlRef:32/?UI, 0:32,BinOpt/binary>>).
@spec ( ( ) ) - > ok
%% @doc See <a href="#wxxmlresourceclearhandlers">external documentation</a>.
clearHandlers(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:cast(?wxXmlResource_ClearHandlers,
<<ThisRef:32/?UI>>).
@spec ( ( ) , Major::integer ( ) , ( ) , Release::integer ( ) , Revision::integer ( ) ) - > integer ( )
%% @doc See <a href="#wxxmlresourcecompareversion">external documentation</a>.
compareVersion(#wx_ref{type=ThisT,ref=ThisRef},Major,Minor,Release,Revision)
when is_integer(Major),is_integer(Minor),is_integer(Release),is_integer(Revision) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:call(?wxXmlResource_CompareVersion,
<<ThisRef:32/?UI,Major:32/?UI,Minor:32/?UI,Release:32/?UI,Revision:32/?UI>>).
( ) - > wxXmlResource ( )
%% @doc See <a href="#wxxmlresourceget">external documentation</a>.
get() ->
wxe_util:call(?wxXmlResource_Get,
<<>>).
@spec ( ( ) ) - > integer ( )
%% @doc See <a href="#wxxmlresourcegetflags">external documentation</a>.
getFlags(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:call(?wxXmlResource_GetFlags,
<<ThisRef:32/?UI>>).
@spec ( ( ) ) - > integer ( )
%% @doc See <a href="#wxxmlresourcegetversion">external documentation</a>.
getVersion(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:call(?wxXmlResource_GetVersion,
<<ThisRef:32/?UI>>).
%% @spec (Str_id::[string()]) -> integer()
%% @equiv getXRCID(Str_id, [])
getXRCID(Str_id)
when is_list(Str_id) ->
getXRCID(Str_id, []).
%% @spec (Str_id::[string()], [Option]) -> integer()
%% Option = {value_if_not_found, integer()}
%% @doc See <a href="#wxxmlresourcegetxrcid">external documentation</a>.
getXRCID(Str_id, Options)
when is_list(Str_id),is_list(Options) ->
Str_id_UC = unicode:characters_to_binary([Str_id,0]),
MOpts = fun({value_if_not_found, Value_if_not_found}, Acc) -> [<<1:32/?UI,Value_if_not_found:32/?UI>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:call(?wxXmlResource_GetXRCID,
<<(byte_size(Str_id_UC)):32/?UI,(Str_id_UC)/binary, 0:(((8- ((4+byte_size(Str_id_UC)) band 16#7)) band 16#7))/unit:8, BinOpt/binary>>).
@spec ( ( ) ) - > ok
%% @doc See <a href="#wxxmlresourceinitallhandlers">external documentation</a>.
initAllHandlers(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:cast(?wxXmlResource_InitAllHandlers,
<<ThisRef:32/?UI>>).
@spec ( ( ) , Filemask::string ( ) ) - > bool ( )
%% @doc See <a href="#wxxmlresourceload">external documentation</a>.
load(#wx_ref{type=ThisT,ref=ThisRef},Filemask)
when is_list(Filemask) ->
?CLASS(ThisT,wxXmlResource),
Filemask_UC = unicode:characters_to_binary([Filemask,0]),
wxe_util:call(?wxXmlResource_Load,
<<ThisRef:32/?UI,(byte_size(Filemask_UC)):32/?UI,(Filemask_UC)/binary, 0:(((8- ((0+byte_size(Filemask_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Name::string ( ) ) - > wxBitmap : wxBitmap ( )
%% @doc See <a href="#wxxmlresourceloadbitmap">external documentation</a>.
loadBitmap(#wx_ref{type=ThisT,ref=ThisRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadBitmap,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxDialog : ( )
%% @doc See <a href="#wxxmlresourceloaddialog">external documentation</a>.
loadDialog(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadDialog_2,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , : ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > bool ( )
%% @doc See <a href="#wxxmlresourceloaddialog">external documentation</a>.
loadDialog(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=DlgT,ref=DlgRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(DlgT,wxDialog),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadDialog_3,
<<ThisRef:32/?UI,DlgRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxFrame : wxFrame ( )
%% @doc See <a href="#wxxmlresourceloadframe">external documentation</a>.
loadFrame(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadFrame_2,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Frame::wxFrame : wxFrame ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > bool ( )
%% @doc See <a href="#wxxmlresourceloadframe">external documentation</a>.
loadFrame(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=FrameT,ref=FrameRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(FrameT,wxFrame),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadFrame_3,
<<ThisRef:32/?UI,FrameRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Name::string ( ) ) - > wxIcon : wxIcon ( )
%% @doc See <a href="#wxxmlresourceloadicon">external documentation</a>.
loadIcon(#wx_ref{type=ThisT,ref=ThisRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadIcon,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Name::string ( ) ) - > wxMenu : wxMenu ( )
%% @doc See <a href="#wxxmlresourceloadmenu">external documentation</a>.
loadMenu(#wx_ref{type=ThisT,ref=ThisRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadMenu,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Name::string ( ) ) - > wxMenuBar : wxMenuBar ( )
@doc See < a href=" / manuals / stable / wx_wxxmlresource.html#wxxmlresourceloadmenubar">external documentation</a > .
loadMenuBar(#wx_ref{type=ThisT,ref=ThisRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadMenuBar_1,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxMenuBar : wxMenuBar ( )
@doc See < a href=" / manuals / stable / wx_wxxmlresource.html#wxxmlresourceloadmenubar">external documentation</a > .
loadMenuBar(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadMenuBar_2,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxPanel : wxPanel ( )
%% @doc See <a href="#wxxmlresourceloadpanel">external documentation</a>.
loadPanel(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadPanel_2,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Panel::wxPanel : wxPanel ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > bool ( )
%% @doc See <a href="#wxxmlresourceloadpanel">external documentation</a>.
loadPanel(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=PanelT,ref=PanelRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(PanelT,wxPanel),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadPanel_3,
<<ThisRef:32/?UI,PanelRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxToolBar : wxToolBar ( )
%% @doc See <a href="#wxxmlresourceloadtoolbar">external documentation</a>.
loadToolBar(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadToolBar,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
( ( ) ) - > wxXmlResource ( )
%% @doc See <a href="#wxxmlresourceset">external documentation</a>.
set(#wx_ref{type=ResT,ref=ResRef}) ->
?CLASS(ResT,wxXmlResource),
wxe_util:call(?wxXmlResource_Set,
<<ResRef:32/?UI>>).
@spec ( ( ) , Flags::integer ( ) ) - > ok
%% @doc See <a href="#wxxmlresourcesetflags">external documentation</a>.
setFlags(#wx_ref{type=ThisT,ref=ThisRef},Flags)
when is_integer(Flags) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:cast(?wxXmlResource_SetFlags,
<<ThisRef:32/?UI,Flags:32/?UI>>).
@spec ( ( ) , Filename::string ( ) ) - > bool ( )
%% @doc See <a href="#wxxmlresourceunload">external documentation</a>.
unload(#wx_ref{type=ThisT,ref=ThisRef},Filename)
when is_list(Filename) ->
?CLASS(ThisT,wxXmlResource),
Filename_UC = unicode:characters_to_binary([Filename,0]),
wxe_util:call(?wxXmlResource_Unload,
<<ThisRef:32/?UI,(byte_size(Filename_UC)):32/?UI,(Filename_UC)/binary, 0:(((8- ((0+byte_size(Filename_UC)) band 16#7)) band 16#7))/unit:8>>).
( Window::wxWindow : ( ) , Type::atom ( ) ) - > wx : wxObject ( )
%% @doc Looks up a control with Name in a window created with XML
%% resources. You can use it to set/get values from controls.
%% The object is type casted to <b>Type</b>.
%% Example: <br />
%% Xrc = wxXmlResource:get(), <br />
%% Dlg = wxDialog:new(), <br />
true = wxXmlResource : loadDialog(Xrc , Dlg , Frame , " " ) , < br / >
LCtrl = xrcctrl(Dlg , " controls_listctrl " , wxListCtrl ) , < br / >
wxListCtrl : insertColumn(LCtrl , 0 , " Name " , [ { width , 200 } ] ) , < br / >
xrcctrl(Window = #wx_ref{}, Name, Type) when is_list(Name), is_atom(Type) ->
Func I d ?
ID = wxXmlResource:getXRCID(Name),
Res = wxWindow:findWindow(Window,ID),
wx:typeCast(Res, Type).
@spec ( ( ) ) - > ok
%% @doc Destroys this object, do not use object again
destroy(Obj=#wx_ref{type=Type}) ->
?CLASS(Type,wxXmlResource),
wxe_util:destroy(?DESTROY_OBJECT,Obj),
ok.
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/wx/src/gen/wxXmlResource.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
This file is generated DO NOT EDIT
@doc See external documentation: <a href="">wxXmlResource</a>.
@type wxXmlResource(). An object reference, The representation is internal
and can be changed without notice. It can't be used for comparsion
stored on disc or distributed for use on other nodes.
inherited exports
@hidden
@equiv new([])
Option = {flags, integer()} | {domain, string()}
@doc See <a href="#wxxmlresourcewxxmlresource">external documentation</a>.
Option = {flags, integer()} | {domain, string()}
@doc See <a href="#wxxmlresourcewxxmlresource">external documentation</a>.
@equiv attachUnknownControl(This,Name,Control, [])
Option = {parent, wxWindow:wxWindow()}
@doc See <a href="#wxxmlresourceattachunknowncontrol">external documentation</a>.
@doc See <a href="#wxxmlresourceclearhandlers">external documentation</a>.
@doc See <a href="#wxxmlresourcecompareversion">external documentation</a>.
@doc See <a href="#wxxmlresourceget">external documentation</a>.
@doc See <a href="#wxxmlresourcegetflags">external documentation</a>.
@doc See <a href="#wxxmlresourcegetversion">external documentation</a>.
@spec (Str_id::[string()]) -> integer()
@equiv getXRCID(Str_id, [])
@spec (Str_id::[string()], [Option]) -> integer()
Option = {value_if_not_found, integer()}
@doc See <a href="#wxxmlresourcegetxrcid">external documentation</a>.
@doc See <a href="#wxxmlresourceinitallhandlers">external documentation</a>.
@doc See <a href="#wxxmlresourceload">external documentation</a>.
@doc See <a href="#wxxmlresourceloadbitmap">external documentation</a>.
@doc See <a href="#wxxmlresourceloaddialog">external documentation</a>.
@doc See <a href="#wxxmlresourceloaddialog">external documentation</a>.
@doc See <a href="#wxxmlresourceloadframe">external documentation</a>.
@doc See <a href="#wxxmlresourceloadframe">external documentation</a>.
@doc See <a href="#wxxmlresourceloadicon">external documentation</a>.
@doc See <a href="#wxxmlresourceloadmenu">external documentation</a>.
@doc See <a href="#wxxmlresourceloadpanel">external documentation</a>.
@doc See <a href="#wxxmlresourceloadpanel">external documentation</a>.
@doc See <a href="#wxxmlresourceloadtoolbar">external documentation</a>.
@doc See <a href="#wxxmlresourceset">external documentation</a>.
@doc See <a href="#wxxmlresourcesetflags">external documentation</a>.
@doc See <a href="#wxxmlresourceunload">external documentation</a>.
@doc Looks up a control with Name in a window created with XML
resources. You can use it to set/get values from controls.
The object is type casted to <b>Type</b>.
Example: <br />
Xrc = wxXmlResource:get(), <br />
Dlg = wxDialog:new(), <br />
@doc Destroys this object, do not use object again | Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(wxXmlResource).
-include("wxe.hrl").
-export([ xrcctrl/3 ,attachUnknownControl/3,attachUnknownControl/4,clearHandlers/1,
compareVersion/5,destroy/1,get/0,getFlags/1,getVersion/1,getXRCID/1,
getXRCID/2,initAllHandlers/1,load/2,loadBitmap/2,loadDialog/3,loadDialog/4,
loadFrame/3,loadFrame/4,loadIcon/2,loadMenu/2,loadMenuBar/2,loadMenuBar/3,
loadPanel/3,loadPanel/4,loadToolBar/3,new/0,new/1,new/2,set/1,setFlags/2,
unload/2]).
-export([parent_class/1]).
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
( ) - > wxXmlResource ( )
new() ->
new([]).
( [ Option ] ) - > wxXmlResource ( )
new(Options)
when is_list(Options) ->
MOpts = fun({flags, Flags}, Acc) -> [<<1:32/?UI,Flags:32/?UI>>|Acc];
({domain, Domain}, Acc) -> Domain_UC = unicode:characters_to_binary([Domain,0]),[<<2:32/?UI,(byte_size(Domain_UC)):32/?UI,(Domain_UC)/binary, 0:(((8- ((0+byte_size(Domain_UC)) band 16#7)) band 16#7))/unit:8>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:construct(?wxXmlResource_new_1,
<<BinOpt/binary>>).
@spec ( Filemask::string ( ) , [ Option ] ) - > wxXmlResource ( )
new(Filemask, Options)
when is_list(Filemask),is_list(Options) ->
Filemask_UC = unicode:characters_to_binary([Filemask,0]),
MOpts = fun({flags, Flags}, Acc) -> [<<1:32/?UI,Flags:32/?UI>>|Acc];
({domain, Domain}, Acc) -> Domain_UC = unicode:characters_to_binary([Domain,0]),[<<2:32/?UI,(byte_size(Domain_UC)):32/?UI,(Domain_UC)/binary, 0:(((8- ((0+byte_size(Domain_UC)) band 16#7)) band 16#7))/unit:8>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:construct(?wxXmlResource_new_2,
<<(byte_size(Filemask_UC)):32/?UI,(Filemask_UC)/binary, 0:(((8- ((4+byte_size(Filemask_UC)) band 16#7)) band 16#7))/unit:8, BinOpt/binary>>).
@spec ( ( ) , Name::string ( ) , Control::wxWindow : wxWindow ( ) ) - > bool ( )
attachUnknownControl(This,Name,Control)
when is_record(This, wx_ref),is_list(Name),is_record(Control, wx_ref) ->
attachUnknownControl(This,Name,Control, []).
@spec ( ( ) , Name::string ( ) , Control::wxWindow : wxWindow ( ) , [ Option ] ) - > bool ( )
attachUnknownControl(#wx_ref{type=ThisT,ref=ThisRef},Name,#wx_ref{type=ControlT,ref=ControlRef}, Options)
when is_list(Name),is_list(Options) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
?CLASS(ControlT,wxWindow),
MOpts = fun({parent, #wx_ref{type=ParentT,ref=ParentRef}}, Acc) -> ?CLASS(ParentT,wxWindow),[<<1:32/?UI,ParentRef:32/?UI>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:call(?wxXmlResource_AttachUnknownControl,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8,ControlRef:32/?UI, 0:32,BinOpt/binary>>).
@spec ( ( ) ) - > ok
clearHandlers(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:cast(?wxXmlResource_ClearHandlers,
<<ThisRef:32/?UI>>).
@spec ( ( ) , Major::integer ( ) , ( ) , Release::integer ( ) , Revision::integer ( ) ) - > integer ( )
compareVersion(#wx_ref{type=ThisT,ref=ThisRef},Major,Minor,Release,Revision)
when is_integer(Major),is_integer(Minor),is_integer(Release),is_integer(Revision) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:call(?wxXmlResource_CompareVersion,
<<ThisRef:32/?UI,Major:32/?UI,Minor:32/?UI,Release:32/?UI,Revision:32/?UI>>).
( ) - > wxXmlResource ( )
get() ->
wxe_util:call(?wxXmlResource_Get,
<<>>).
@spec ( ( ) ) - > integer ( )
getFlags(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:call(?wxXmlResource_GetFlags,
<<ThisRef:32/?UI>>).
@spec ( ( ) ) - > integer ( )
getVersion(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:call(?wxXmlResource_GetVersion,
<<ThisRef:32/?UI>>).
getXRCID(Str_id)
when is_list(Str_id) ->
getXRCID(Str_id, []).
getXRCID(Str_id, Options)
when is_list(Str_id),is_list(Options) ->
Str_id_UC = unicode:characters_to_binary([Str_id,0]),
MOpts = fun({value_if_not_found, Value_if_not_found}, Acc) -> [<<1:32/?UI,Value_if_not_found:32/?UI>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:call(?wxXmlResource_GetXRCID,
<<(byte_size(Str_id_UC)):32/?UI,(Str_id_UC)/binary, 0:(((8- ((4+byte_size(Str_id_UC)) band 16#7)) band 16#7))/unit:8, BinOpt/binary>>).
@spec ( ( ) ) - > ok
initAllHandlers(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:cast(?wxXmlResource_InitAllHandlers,
<<ThisRef:32/?UI>>).
@spec ( ( ) , Filemask::string ( ) ) - > bool ( )
load(#wx_ref{type=ThisT,ref=ThisRef},Filemask)
when is_list(Filemask) ->
?CLASS(ThisT,wxXmlResource),
Filemask_UC = unicode:characters_to_binary([Filemask,0]),
wxe_util:call(?wxXmlResource_Load,
<<ThisRef:32/?UI,(byte_size(Filemask_UC)):32/?UI,(Filemask_UC)/binary, 0:(((8- ((0+byte_size(Filemask_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Name::string ( ) ) - > wxBitmap : wxBitmap ( )
loadBitmap(#wx_ref{type=ThisT,ref=ThisRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadBitmap,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxDialog : ( )
loadDialog(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadDialog_2,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , : ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > bool ( )
loadDialog(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=DlgT,ref=DlgRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(DlgT,wxDialog),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadDialog_3,
<<ThisRef:32/?UI,DlgRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxFrame : wxFrame ( )
loadFrame(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadFrame_2,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Frame::wxFrame : wxFrame ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > bool ( )
loadFrame(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=FrameT,ref=FrameRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(FrameT,wxFrame),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadFrame_3,
<<ThisRef:32/?UI,FrameRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Name::string ( ) ) - > wxIcon : wxIcon ( )
loadIcon(#wx_ref{type=ThisT,ref=ThisRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadIcon,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Name::string ( ) ) - > wxMenu : wxMenu ( )
loadMenu(#wx_ref{type=ThisT,ref=ThisRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadMenu,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Name::string ( ) ) - > wxMenuBar : wxMenuBar ( )
@doc See < a href=" / manuals / stable / wx_wxxmlresource.html#wxxmlresourceloadmenubar">external documentation</a > .
loadMenuBar(#wx_ref{type=ThisT,ref=ThisRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadMenuBar_1,
<<ThisRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxMenuBar : wxMenuBar ( )
@doc See < a href=" / manuals / stable / wx_wxxmlresource.html#wxxmlresourceloadmenubar">external documentation</a > .
loadMenuBar(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadMenuBar_2,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxPanel : wxPanel ( )
loadPanel(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadPanel_2,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Panel::wxPanel : wxPanel ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > bool ( )
loadPanel(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=PanelT,ref=PanelRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(PanelT,wxPanel),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadPanel_3,
<<ThisRef:32/?UI,PanelRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((0+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
@spec ( ( ) , Parent::wxWindow : wxWindow ( ) , Name::string ( ) ) - > wxToolBar : wxToolBar ( )
loadToolBar(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=ParentT,ref=ParentRef},Name)
when is_list(Name) ->
?CLASS(ThisT,wxXmlResource),
?CLASS(ParentT,wxWindow),
Name_UC = unicode:characters_to_binary([Name,0]),
wxe_util:call(?wxXmlResource_LoadToolBar,
<<ThisRef:32/?UI,ParentRef:32/?UI,(byte_size(Name_UC)):32/?UI,(Name_UC)/binary, 0:(((8- ((4+byte_size(Name_UC)) band 16#7)) band 16#7))/unit:8>>).
( ( ) ) - > wxXmlResource ( )
set(#wx_ref{type=ResT,ref=ResRef}) ->
?CLASS(ResT,wxXmlResource),
wxe_util:call(?wxXmlResource_Set,
<<ResRef:32/?UI>>).
@spec ( ( ) , Flags::integer ( ) ) - > ok
setFlags(#wx_ref{type=ThisT,ref=ThisRef},Flags)
when is_integer(Flags) ->
?CLASS(ThisT,wxXmlResource),
wxe_util:cast(?wxXmlResource_SetFlags,
<<ThisRef:32/?UI,Flags:32/?UI>>).
@spec ( ( ) , Filename::string ( ) ) - > bool ( )
unload(#wx_ref{type=ThisT,ref=ThisRef},Filename)
when is_list(Filename) ->
?CLASS(ThisT,wxXmlResource),
Filename_UC = unicode:characters_to_binary([Filename,0]),
wxe_util:call(?wxXmlResource_Unload,
<<ThisRef:32/?UI,(byte_size(Filename_UC)):32/?UI,(Filename_UC)/binary, 0:(((8- ((0+byte_size(Filename_UC)) band 16#7)) band 16#7))/unit:8>>).
( Window::wxWindow : ( ) , Type::atom ( ) ) - > wx : wxObject ( )
true = wxXmlResource : loadDialog(Xrc , Dlg , Frame , " " ) , < br / >
LCtrl = xrcctrl(Dlg , " controls_listctrl " , wxListCtrl ) , < br / >
wxListCtrl : insertColumn(LCtrl , 0 , " Name " , [ { width , 200 } ] ) , < br / >
xrcctrl(Window = #wx_ref{}, Name, Type) when is_list(Name), is_atom(Type) ->
Func I d ?
ID = wxXmlResource:getXRCID(Name),
Res = wxWindow:findWindow(Window,ID),
wx:typeCast(Res, Type).
@spec ( ( ) ) - > ok
destroy(Obj=#wx_ref{type=Type}) ->
?CLASS(Type,wxXmlResource),
wxe_util:destroy(?DESTROY_OBJECT,Obj),
ok.
|
6c7b630ad9f49fd4b3cfcff98d5aea059f9012eb72bd412fab1cc448e03080e8 | cl-axon/shop2 | io-tests.lisp | ;;; -*- mode: common-lisp; coding: unix; -*-
;;;---------------------------------------------------------------------------
Copyright 2010 Smart Information Flow Technologies , d / b / a SIFT , LLC
;;;
This file made available together with the SHOP2 system , according to the
SHOP2 system 's license
;;;
;;;---------------------------------------------------------------------------
;;;
Created [ 2010/05/19 : rpg ]
;;; File Description:
;;;
;;; This file is intended to supply a number of unit tests to
determine whether or not SHOP2 's processing of definitions is working .
;;;
;;;--------------------------------------------------------------------------
(in-package :arity-test)
(fiveam:def-suite io-tests)
(fiveam:in-suite io-tests)
(def-fixture empty-domain ()
(let ((*domain* (make-instance 'domain)))
(&body)))
(def-fixture op-def-domain ()
(let ((domain (make-instance 'domain)))
(setf (slot-value domain 'shop2::operators)
(make-hash-table :test 'eq))
(&body)))
(def-fixture method-def ()
(let ((meth '(:method (achieve-goals ?goals)
()
((assert-goals ?goals nil)
(find-nomove) (add-new-goals) (find-movable) (move-block)))))
(&body)))
(def-fixture complex-method-def ()
(let ((meth '(:method (find-movable)
(:first (clear ?x) (not (dont-move ?x))
(goal (on-table ?x)) (not (put-on-table ?x)))
; Decomposition
((!assert ((put-on-table ?x))) (find-movable))
(:first (clear ?x) (not (dont-move ?x)) (goal (on ?x ?y))
(not (stack-on-block ?x ?y)) (dont-move ?y) (clear ?y))
;Decomposition
((!assert ((stack-on-block ?x ?y))) (find-movable))
nil
nil)))
(&body)))
(test method-tests
(with-fixture empty-domain ()
(with-fixture method-def ()
(is (equal (let ((meth-def (shop2::process-method *domain* meth)))
there will be a gensym in the third position -- the name that is
;; automatically supplied
(setf (nth 2 meth-def) 'placeholder)
meth-def)
'(:method (achieve-goals ?goals)
placeholder
()
'(:ordered (:task assert-goals ?goals nil)
(:task find-nomove) (:task add-new-goals) (:task find-movable) (:task move-block))))))
(with-fixture complex-method-def ()
(is
(equal (let ((meth-def (shop2::process-method *domain* meth)))
replace all the
(subst-if 'placeholder
#'(lambda (x) (and x (symbolp x) (null (symbol-package x))))
meth-def))
'(:method (find-movable)
placeholder
(:first (clear ?x) (not (dont-move ?x)) (goal (on-table ?x)) (not (put-on-table ?x)))
'(:ordered (:task !assert ((put-on-table ?x))) (:task find-movable))
placeholder
(:first (clear ?x) (not (dont-move ?x)) (goal (on ?x ?y)) (not (stack-on-block ?x ?y)) (dont-move ?y) (clear ?y))
'(:ordered (:task !assert ((stack-on-block ?x ?y))) (:task find-movable))
placeholder nil '(:ordered (:task shop2::!!inop))))))))
(test check-operator-definitions
;; FIXME: it's possibly wrong to be depending on the accidental return of the operator object
(let ((op (with-fixture op-def-domain ()
(shop2::parse-domain-item domain :operator '(:operator (!!delete-truck ?truck)
()
((typevp ?truck truck))
())))))
(is (equal (shop2::operator-head op) '(!!delete-truck ?truck)))
(is (null (shop2::operator-preconditions op)))
(is (null (shop2::operator-additions op)))
(is (equal (shop2::operator-deletions op) '((typevp ?truck truck))))
(is (= (shop2::operator-cost-fun op) 1.0)))
(let ((op (with-fixture op-def-domain ()
(shop2::parse-domain-item domain :op '(:op (!!delete-truck ?truck)
:delete
((typevp ?truck truck)))))))
(is (equal (shop2::operator-head op) '(!!delete-truck ?truck)))
(is (null (shop2::operator-preconditions op)))
(is (null (shop2::operator-additions op)))
(is (equal (shop2::operator-deletions op) '((typevp ?truck truck))))
(is (= (shop2::operator-cost-fun op) 1.0)))
here 's a big one
(let ((op (with-fixture op-def-domain ()
(shop2::parse-domain-item domain :operator
'(:operator (!takeoff ?p ?flight-alt ?earliest-start ?start ?end)
;; preconditions
(
a / c starts at alt = = 0
(= 0 ?alt)
(fuel ?p ?fuel)
(assign ?fuel-cost (takeoff-fuel-cost ?flight-alt))
(assign ?fuel-remaining (- ?fuel ?fuel-cost))
(call >= ?fuel-remaining 0)
uninformed hack FIXME
(assign ?duration 10)
;; timelines for at update
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
;; timelines for fuel update
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
(assign ?start (max ?earliest-start ?t-write-at ?t-read-at ?t-write-fuel ?t-read-fuel))
(assign ?end (+ ?start ?duration))
)
;; deletes
(
;; update fuel and position
(at ?p (pos ?north ?east ?alt))
(fuel ?p ?fuel)
;; timelines for at update
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
;; timelines for fuel update
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
)
;; adds
(
;; update fuel and position
(at ?p (pos ?north ?east ?flight-alt))
(fuel ?p ?fuel-remaining)
;; timelines for at update
(write-time (at ?p) ?end)
(read-time (at ?p) ?end)
;; timelines for fuel update
(write-time (fuel ?p) ?end)
(read-time (fuel ?p) ?end)
)
;; cost
0)))))
(is (equal (shop2::operator-head op) '(!takeoff ?p ?flight-alt ?earliest-start ?start ?end)))
(is (equal (shop2::operator-preconditions op)
'(
a / c starts at alt = = 0
(= 0 ?alt)
(fuel ?p ?fuel)
(assign ?fuel-cost (takeoff-fuel-cost ?flight-alt))
(assign ?fuel-remaining (- ?fuel ?fuel-cost))
(call >= ?fuel-remaining 0)
uninformed hack FIXME
(assign ?duration 10)
;; timelines for at update
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
;; timelines for fuel update
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
(assign ?start (max ?earliest-start ?t-write-at ?t-read-at ?t-write-fuel ?t-read-fuel))
(assign ?end (+ ?start ?duration))
)))
(is (equal (shop2::operator-additions op)
'(
;; update fuel and position
(at ?p (pos ?north ?east ?flight-alt))
(fuel ?p ?fuel-remaining)
;; timelines for at update
(write-time (at ?p) ?end)
(read-time (at ?p) ?end)
;; timelines for fuel update
(write-time (fuel ?p) ?end)
(read-time (fuel ?p) ?end)
)))
(is (equal (shop2::operator-deletions op) '(
;; update fuel and position
(at ?p (pos ?north ?east ?alt))
(fuel ?p ?fuel)
;; timelines for at update
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
;; timelines for fuel update
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
)))
(is (= (shop2::operator-cost-fun op) 0)))
(let ((op (with-fixture op-def-domain ()
(shop2::parse-domain-item domain :op
'(:op (!takeoff ?p ?flight-alt ?earliest-start ?start ?end)
:precond
(
a / c starts at alt = = 0
(= 0 ?alt)
(fuel ?p ?fuel)
(assign ?fuel-cost (takeoff-fuel-cost ?flight-alt))
(assign ?fuel-remaining (- ?fuel ?fuel-cost))
(call >= ?fuel-remaining 0)
uninformed hack FIXME
(assign ?duration 10)
;; timelines for at update
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
;; timelines for fuel update
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
(assign ?start (max ?earliest-start ?t-write-at ?t-read-at ?t-write-fuel ?t-read-fuel))
(assign ?end (+ ?start ?duration))
)
:delete
(
;; update fuel and position
(at ?p (pos ?north ?east ?alt))
(fuel ?p ?fuel)
;; timelines for at update
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
;; timelines for fuel update
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
)
:add
(
;; update fuel and position
(at ?p (pos ?north ?east ?flight-alt))
(fuel ?p ?fuel-remaining)
;; timelines for at update
(write-time (at ?p) ?end)
(read-time (at ?p) ?end)
;; timelines for fuel update
(write-time (fuel ?p) ?end)
(read-time (fuel ?p) ?end)
)
:cost
0)))))
(is (equal (shop2::operator-head op) '(!takeoff ?p ?flight-alt ?earliest-start ?start ?end)))
(is (equal (shop2::operator-preconditions op)
'(
a / c starts at alt = = 0
(= 0 ?alt)
(fuel ?p ?fuel)
(assign ?fuel-cost (takeoff-fuel-cost ?flight-alt))
(assign ?fuel-remaining (- ?fuel ?fuel-cost))
(call >= ?fuel-remaining 0)
uninformed hack FIXME
(assign ?duration 10)
;; timelines for at update
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
;; timelines for fuel update
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
(assign ?start (max ?earliest-start ?t-write-at ?t-read-at ?t-write-fuel ?t-read-fuel))
(assign ?end (+ ?start ?duration))
)))
(is (equal (shop2::operator-additions op)
'(
;; update fuel and position
(at ?p (pos ?north ?east ?flight-alt))
(fuel ?p ?fuel-remaining)
;; timelines for at update
(write-time (at ?p) ?end)
(read-time (at ?p) ?end)
;; timelines for fuel update
(write-time (fuel ?p) ?end)
(read-time (fuel ?p) ?end)
)))
(is (equal (shop2::operator-deletions op) '(
;; update fuel and position
(at ?p (pos ?north ?east ?alt))
(fuel ?p ?fuel)
;; timelines for at update
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
;; timelines for fuel update
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
)))
(is (= (shop2::operator-cost-fun op) 0))))
(test check-problem-deletion
(make-problem 'problem-for-deletion-test
'((foo x) (bar y))
'(achieve (bar x)))
(fiveam:is-true (find-problem 'problem-for-deletion-test))
(delete-problem 'problem-for-deletion-test)
(fiveam:is-false (find-problem 'problem-for-deletion-test nil)))
(in-package :shop2-user)
(defparameter arity-test::*expected-umt-plan*
'((!!ASSERT
((GOAL (CLEAR)) (GOAL (DELIVERED PACKAGE2 LOCATION5))
(GOAL (DELIVERED PACKAGE1 LOCATION4))
(GOAL (DELIVERED PACKAGE0 LOCATION1))))
(!!CHECK PACKAGE2) (!!ADD-PACKAGE-LOCAL PACKAGE2)
(!!ADD-PACKAGE-NN PACKAGE2 LOCATION3 LOCATION3)
(!!ADD-PACKAGE-NN PACKAGE2 LOCATION3 LOCATION2)
(!!ADD-PACKAGE-NN PACKAGE2 LOCATION2 LOCATION3)
(!!ADD-PACKAGE-NN PACKAGE2 LOCATION2 LOCATION2) (!!CHECK PACKAGE1)
(!!ADD-PACKAGE-ROAD PACKAGE1 ROAD_ROUTE1)
(!!ADD-PACKAGE-NN PACKAGE1 LOCATION3 LOCATION1)
(!!ADD-PACKAGE-NN PACKAGE1 LOCATION3 LOCATION0)
(!!ADD-PACKAGE-NN PACKAGE1 LOCATION2 LOCATION1)
(!!ADD-PACKAGE-NN PACKAGE1 LOCATION2 LOCATION0) (!!CHECK PACKAGE0)
(!!ADD-PACKAGE-LOCAL PACKAGE0) (!COLLECT-FEES PACKAGE0)
(!!ADD-NEXT TRUCK3 LOCATION4) (!!EXP-WEIGHT-SET TRUCK3 CITY0 13)
(!MOVE-VEHICLE-LOCAL-ROAD-ROUTE3 TRUCK3 LOCATION0 LOCATION4 CITY0)
(!!DELETE-PROTECTION (NEXT TRUCK3 LOCATION4)) (!!DEL-NEXT TRUCK3 LOCATION4)
(!!ADD-NEXT TRUCK3 LOCATION1) (!CONNECT-CHUTE TRUCK3)
(!FILL-HOPPER PACKAGE0 TRUCK3 LOCATION4) (!COLLECT-FEES PACKAGE1)
(!!ADD-NEXT TRUCK0 LOCATION5) (!!EXP-WEIGHT-SET TRUCK0 ROAD_ROUTE1 12)
(!MOVE-VEHICLE-LOCAL-ROAD-ROUTE3 TRUCK0 LOCATION3 LOCATION5 CITY1)
(!!DELETE-PROTECTION (NEXT TRUCK0 LOCATION5)) (!!DEL-NEXT TRUCK0 LOCATION5)
(!!ADD-NEXT TRUCK0 LOCATION4) (!CONNECT-CHUTE TRUCK0)
(!FILL-HOPPER PACKAGE1 TRUCK0 LOCATION5) (!DELIVER PACKAGE2 LOCATION5)
(!DISCONNECT-CHUTE TRUCK3) (!DISCONNECT-CHUTE TRUCK0)
(!MOVE-VEHICLE-LOCAL-ROAD-ROUTE2 TRUCK3 LOCATION4 LOCATION1 CITY0)
(!!DELETE-PROTECTION (NEXT TRUCK3 LOCATION1))
(!!EXP-WEIGHT-CLEAR TRUCK3 CITY0) (!CONNECT-CHUTE TRUCK3)
(!EMPTY-HOPPER PACKAGE0 TRUCK3 LOCATION1)
(!MOVE-VEHICLE-ROAD-ROUTE-CROSSCITY TRUCK0 LOCATION5 LOCATION4 CITY1 CITY0
ROAD_ROUTE1)
(!!DELETE-PROTECTION (NEXT TRUCK0 LOCATION4))
(!!EXP-WEIGHT-CLEAR TRUCK0 ROAD_ROUTE1) (!CONNECT-CHUTE TRUCK0)
(!EMPTY-HOPPER PACKAGE1 TRUCK0 LOCATION4) (!DISCONNECT-CHUTE TRUCK3)
(!DELIVER PACKAGE0 LOCATION1) (!DISCONNECT-CHUTE TRUCK0)
(!DELIVER PACKAGE1 LOCATION4) (!CLEAN-DOMAIN)))
(in-package :arity-test)
;;; FIXME: probably should undefine the problem and domain here.
(test test-include-directive
(shop2-user::define-partitioned-umt-domain)
(fiveam:is
(equalp
(shop2-user::remove-plan-costs
(first
(find-plans
'shop2-user::umt-partitioned.pfile1
:which :first
:verbose 0)))
*expected-umt-plan*)))
| null | https://raw.githubusercontent.com/cl-axon/shop2/9136e51f7845b46232cc17ca3618f515ddcf2787/tests/io-tests.lisp | lisp | -*- mode: common-lisp; coding: unix; -*-
---------------------------------------------------------------------------
---------------------------------------------------------------------------
File Description:
This file is intended to supply a number of unit tests to
--------------------------------------------------------------------------
Decomposition
Decomposition
automatically supplied
FIXME: it's possibly wrong to be depending on the accidental return of the operator object
preconditions
timelines for at update
timelines for fuel update
deletes
update fuel and position
timelines for at update
timelines for fuel update
adds
update fuel and position
timelines for at update
timelines for fuel update
cost
timelines for at update
timelines for fuel update
update fuel and position
timelines for at update
timelines for fuel update
update fuel and position
timelines for at update
timelines for fuel update
timelines for at update
timelines for fuel update
update fuel and position
timelines for at update
timelines for fuel update
update fuel and position
timelines for at update
timelines for fuel update
timelines for at update
timelines for fuel update
update fuel and position
timelines for at update
timelines for fuel update
update fuel and position
timelines for at update
timelines for fuel update
FIXME: probably should undefine the problem and domain here. | Copyright 2010 Smart Information Flow Technologies , d / b / a SIFT , LLC
This file made available together with the SHOP2 system , according to the
SHOP2 system 's license
Created [ 2010/05/19 : rpg ]
determine whether or not SHOP2 's processing of definitions is working .
(in-package :arity-test)
(fiveam:def-suite io-tests)
(fiveam:in-suite io-tests)
(def-fixture empty-domain ()
(let ((*domain* (make-instance 'domain)))
(&body)))
(def-fixture op-def-domain ()
(let ((domain (make-instance 'domain)))
(setf (slot-value domain 'shop2::operators)
(make-hash-table :test 'eq))
(&body)))
(def-fixture method-def ()
(let ((meth '(:method (achieve-goals ?goals)
()
((assert-goals ?goals nil)
(find-nomove) (add-new-goals) (find-movable) (move-block)))))
(&body)))
(def-fixture complex-method-def ()
(let ((meth '(:method (find-movable)
(:first (clear ?x) (not (dont-move ?x))
(goal (on-table ?x)) (not (put-on-table ?x)))
((!assert ((put-on-table ?x))) (find-movable))
(:first (clear ?x) (not (dont-move ?x)) (goal (on ?x ?y))
(not (stack-on-block ?x ?y)) (dont-move ?y) (clear ?y))
((!assert ((stack-on-block ?x ?y))) (find-movable))
nil
nil)))
(&body)))
(test method-tests
(with-fixture empty-domain ()
(with-fixture method-def ()
(is (equal (let ((meth-def (shop2::process-method *domain* meth)))
there will be a gensym in the third position -- the name that is
(setf (nth 2 meth-def) 'placeholder)
meth-def)
'(:method (achieve-goals ?goals)
placeholder
()
'(:ordered (:task assert-goals ?goals nil)
(:task find-nomove) (:task add-new-goals) (:task find-movable) (:task move-block))))))
(with-fixture complex-method-def ()
(is
(equal (let ((meth-def (shop2::process-method *domain* meth)))
replace all the
(subst-if 'placeholder
#'(lambda (x) (and x (symbolp x) (null (symbol-package x))))
meth-def))
'(:method (find-movable)
placeholder
(:first (clear ?x) (not (dont-move ?x)) (goal (on-table ?x)) (not (put-on-table ?x)))
'(:ordered (:task !assert ((put-on-table ?x))) (:task find-movable))
placeholder
(:first (clear ?x) (not (dont-move ?x)) (goal (on ?x ?y)) (not (stack-on-block ?x ?y)) (dont-move ?y) (clear ?y))
'(:ordered (:task !assert ((stack-on-block ?x ?y))) (:task find-movable))
placeholder nil '(:ordered (:task shop2::!!inop))))))))
(test check-operator-definitions
(let ((op (with-fixture op-def-domain ()
(shop2::parse-domain-item domain :operator '(:operator (!!delete-truck ?truck)
()
((typevp ?truck truck))
())))))
(is (equal (shop2::operator-head op) '(!!delete-truck ?truck)))
(is (null (shop2::operator-preconditions op)))
(is (null (shop2::operator-additions op)))
(is (equal (shop2::operator-deletions op) '((typevp ?truck truck))))
(is (= (shop2::operator-cost-fun op) 1.0)))
(let ((op (with-fixture op-def-domain ()
(shop2::parse-domain-item domain :op '(:op (!!delete-truck ?truck)
:delete
((typevp ?truck truck)))))))
(is (equal (shop2::operator-head op) '(!!delete-truck ?truck)))
(is (null (shop2::operator-preconditions op)))
(is (null (shop2::operator-additions op)))
(is (equal (shop2::operator-deletions op) '((typevp ?truck truck))))
(is (= (shop2::operator-cost-fun op) 1.0)))
here 's a big one
(let ((op (with-fixture op-def-domain ()
(shop2::parse-domain-item domain :operator
'(:operator (!takeoff ?p ?flight-alt ?earliest-start ?start ?end)
(
a / c starts at alt = = 0
(= 0 ?alt)
(fuel ?p ?fuel)
(assign ?fuel-cost (takeoff-fuel-cost ?flight-alt))
(assign ?fuel-remaining (- ?fuel ?fuel-cost))
(call >= ?fuel-remaining 0)
uninformed hack FIXME
(assign ?duration 10)
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
(assign ?start (max ?earliest-start ?t-write-at ?t-read-at ?t-write-fuel ?t-read-fuel))
(assign ?end (+ ?start ?duration))
)
(
(at ?p (pos ?north ?east ?alt))
(fuel ?p ?fuel)
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
)
(
(at ?p (pos ?north ?east ?flight-alt))
(fuel ?p ?fuel-remaining)
(write-time (at ?p) ?end)
(read-time (at ?p) ?end)
(write-time (fuel ?p) ?end)
(read-time (fuel ?p) ?end)
)
0)))))
(is (equal (shop2::operator-head op) '(!takeoff ?p ?flight-alt ?earliest-start ?start ?end)))
(is (equal (shop2::operator-preconditions op)
'(
a / c starts at alt = = 0
(= 0 ?alt)
(fuel ?p ?fuel)
(assign ?fuel-cost (takeoff-fuel-cost ?flight-alt))
(assign ?fuel-remaining (- ?fuel ?fuel-cost))
(call >= ?fuel-remaining 0)
uninformed hack FIXME
(assign ?duration 10)
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
(assign ?start (max ?earliest-start ?t-write-at ?t-read-at ?t-write-fuel ?t-read-fuel))
(assign ?end (+ ?start ?duration))
)))
(is (equal (shop2::operator-additions op)
'(
(at ?p (pos ?north ?east ?flight-alt))
(fuel ?p ?fuel-remaining)
(write-time (at ?p) ?end)
(read-time (at ?p) ?end)
(write-time (fuel ?p) ?end)
(read-time (fuel ?p) ?end)
)))
(is (equal (shop2::operator-deletions op) '(
(at ?p (pos ?north ?east ?alt))
(fuel ?p ?fuel)
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
)))
(is (= (shop2::operator-cost-fun op) 0)))
(let ((op (with-fixture op-def-domain ()
(shop2::parse-domain-item domain :op
'(:op (!takeoff ?p ?flight-alt ?earliest-start ?start ?end)
:precond
(
a / c starts at alt = = 0
(= 0 ?alt)
(fuel ?p ?fuel)
(assign ?fuel-cost (takeoff-fuel-cost ?flight-alt))
(assign ?fuel-remaining (- ?fuel ?fuel-cost))
(call >= ?fuel-remaining 0)
uninformed hack FIXME
(assign ?duration 10)
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
(assign ?start (max ?earliest-start ?t-write-at ?t-read-at ?t-write-fuel ?t-read-fuel))
(assign ?end (+ ?start ?duration))
)
:delete
(
(at ?p (pos ?north ?east ?alt))
(fuel ?p ?fuel)
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
)
:add
(
(at ?p (pos ?north ?east ?flight-alt))
(fuel ?p ?fuel-remaining)
(write-time (at ?p) ?end)
(read-time (at ?p) ?end)
(write-time (fuel ?p) ?end)
(read-time (fuel ?p) ?end)
)
:cost
0)))))
(is (equal (shop2::operator-head op) '(!takeoff ?p ?flight-alt ?earliest-start ?start ?end)))
(is (equal (shop2::operator-preconditions op)
'(
a / c starts at alt = = 0
(= 0 ?alt)
(fuel ?p ?fuel)
(assign ?fuel-cost (takeoff-fuel-cost ?flight-alt))
(assign ?fuel-remaining (- ?fuel ?fuel-cost))
(call >= ?fuel-remaining 0)
uninformed hack FIXME
(assign ?duration 10)
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
(assign ?start (max ?earliest-start ?t-write-at ?t-read-at ?t-write-fuel ?t-read-fuel))
(assign ?end (+ ?start ?duration))
)))
(is (equal (shop2::operator-additions op)
'(
(at ?p (pos ?north ?east ?flight-alt))
(fuel ?p ?fuel-remaining)
(write-time (at ?p) ?end)
(read-time (at ?p) ?end)
(write-time (fuel ?p) ?end)
(read-time (fuel ?p) ?end)
)))
(is (equal (shop2::operator-deletions op) '(
(at ?p (pos ?north ?east ?alt))
(fuel ?p ?fuel)
(write-time (at ?p) ?t-write-at)
(read-time (at ?p) ?t-read-at)
(write-time (fuel ?p) ?t-write-fuel)
(read-time (fuel ?p) ?t-read-fuel)
)))
(is (= (shop2::operator-cost-fun op) 0))))
(test check-problem-deletion
(make-problem 'problem-for-deletion-test
'((foo x) (bar y))
'(achieve (bar x)))
(fiveam:is-true (find-problem 'problem-for-deletion-test))
(delete-problem 'problem-for-deletion-test)
(fiveam:is-false (find-problem 'problem-for-deletion-test nil)))
(in-package :shop2-user)
(defparameter arity-test::*expected-umt-plan*
'((!!ASSERT
((GOAL (CLEAR)) (GOAL (DELIVERED PACKAGE2 LOCATION5))
(GOAL (DELIVERED PACKAGE1 LOCATION4))
(GOAL (DELIVERED PACKAGE0 LOCATION1))))
(!!CHECK PACKAGE2) (!!ADD-PACKAGE-LOCAL PACKAGE2)
(!!ADD-PACKAGE-NN PACKAGE2 LOCATION3 LOCATION3)
(!!ADD-PACKAGE-NN PACKAGE2 LOCATION3 LOCATION2)
(!!ADD-PACKAGE-NN PACKAGE2 LOCATION2 LOCATION3)
(!!ADD-PACKAGE-NN PACKAGE2 LOCATION2 LOCATION2) (!!CHECK PACKAGE1)
(!!ADD-PACKAGE-ROAD PACKAGE1 ROAD_ROUTE1)
(!!ADD-PACKAGE-NN PACKAGE1 LOCATION3 LOCATION1)
(!!ADD-PACKAGE-NN PACKAGE1 LOCATION3 LOCATION0)
(!!ADD-PACKAGE-NN PACKAGE1 LOCATION2 LOCATION1)
(!!ADD-PACKAGE-NN PACKAGE1 LOCATION2 LOCATION0) (!!CHECK PACKAGE0)
(!!ADD-PACKAGE-LOCAL PACKAGE0) (!COLLECT-FEES PACKAGE0)
(!!ADD-NEXT TRUCK3 LOCATION4) (!!EXP-WEIGHT-SET TRUCK3 CITY0 13)
(!MOVE-VEHICLE-LOCAL-ROAD-ROUTE3 TRUCK3 LOCATION0 LOCATION4 CITY0)
(!!DELETE-PROTECTION (NEXT TRUCK3 LOCATION4)) (!!DEL-NEXT TRUCK3 LOCATION4)
(!!ADD-NEXT TRUCK3 LOCATION1) (!CONNECT-CHUTE TRUCK3)
(!FILL-HOPPER PACKAGE0 TRUCK3 LOCATION4) (!COLLECT-FEES PACKAGE1)
(!!ADD-NEXT TRUCK0 LOCATION5) (!!EXP-WEIGHT-SET TRUCK0 ROAD_ROUTE1 12)
(!MOVE-VEHICLE-LOCAL-ROAD-ROUTE3 TRUCK0 LOCATION3 LOCATION5 CITY1)
(!!DELETE-PROTECTION (NEXT TRUCK0 LOCATION5)) (!!DEL-NEXT TRUCK0 LOCATION5)
(!!ADD-NEXT TRUCK0 LOCATION4) (!CONNECT-CHUTE TRUCK0)
(!FILL-HOPPER PACKAGE1 TRUCK0 LOCATION5) (!DELIVER PACKAGE2 LOCATION5)
(!DISCONNECT-CHUTE TRUCK3) (!DISCONNECT-CHUTE TRUCK0)
(!MOVE-VEHICLE-LOCAL-ROAD-ROUTE2 TRUCK3 LOCATION4 LOCATION1 CITY0)
(!!DELETE-PROTECTION (NEXT TRUCK3 LOCATION1))
(!!EXP-WEIGHT-CLEAR TRUCK3 CITY0) (!CONNECT-CHUTE TRUCK3)
(!EMPTY-HOPPER PACKAGE0 TRUCK3 LOCATION1)
(!MOVE-VEHICLE-ROAD-ROUTE-CROSSCITY TRUCK0 LOCATION5 LOCATION4 CITY1 CITY0
ROAD_ROUTE1)
(!!DELETE-PROTECTION (NEXT TRUCK0 LOCATION4))
(!!EXP-WEIGHT-CLEAR TRUCK0 ROAD_ROUTE1) (!CONNECT-CHUTE TRUCK0)
(!EMPTY-HOPPER PACKAGE1 TRUCK0 LOCATION4) (!DISCONNECT-CHUTE TRUCK3)
(!DELIVER PACKAGE0 LOCATION1) (!DISCONNECT-CHUTE TRUCK0)
(!DELIVER PACKAGE1 LOCATION4) (!CLEAN-DOMAIN)))
(in-package :arity-test)
(test test-include-directive
(shop2-user::define-partitioned-umt-domain)
(fiveam:is
(equalp
(shop2-user::remove-plan-costs
(first
(find-plans
'shop2-user::umt-partitioned.pfile1
:which :first
:verbose 0)))
*expected-umt-plan*)))
|
3c54e477b1e5d6ea44e79c2cd07e0e6de6063b176705c0e174ceb7a8d8264cdf | Reisen/pixel | Error.hs | module Pixel.Error
( Error(..)
) where
import Protolude
import Pixel.Model.Users.Error ( UserError(..) )
--------------------------------------------------------------------------------
data AuthenticationError
= TokenExpired
| MissingToken
deriving Show
data Error
= AuthError !AuthenticationError
| UserError !UserError
| UnknownError
deriving Show
| null | https://raw.githubusercontent.com/Reisen/pixel/9096cc2c5b909049cdca6d14856ffc1fc99d81b5/src/lib/Pixel/Error.hs | haskell | ------------------------------------------------------------------------------ | module Pixel.Error
( Error(..)
) where
import Protolude
import Pixel.Model.Users.Error ( UserError(..) )
data AuthenticationError
= TokenExpired
| MissingToken
deriving Show
data Error
= AuthError !AuthenticationError
| UserError !UserError
| UnknownError
deriving Show
|
c4e70168476534f3426f5aff7a1aa7c6101abb04a44efef8021fe98b95950fb0 | johnlawrenceaspden/hobby-code | knapsack.clj | ;; The Knapsack Problem
Suppose you 've got twelve pounds
(def budget 12)
And there 's a thing that costs a pound , but is worth 20 .
And another thing that costs 3 , but is worth 30
And another thing that costs 3 , but is worth 21
And a thing that costs 6 but is worth 40
(def things (map (fn[[c v]] {:cost c :value v}) [[1 20][3 30][3 21][6 40]]))
(defn price [things] (reduce + (map :cost things)))
(defn evaluate [things] (reduce + (map :value things)))
- > 111
- > 13
So there 's 111 's worth of things going for 13 , but you ca n't buy everything .
;; What do you buy?
;; If you can put them in an order, then you can buy as many as you can afford
(defn value [sorted-things budget]
(evaluate
(let [ baskets (reductions conj '() sorted-things)]
(last (take-while #(<= (price %) budget) baskets)))))
;; So if you're a cynic
- > 71
Then you come away with 71 's worth
;; And if you're an idealist
- > 91
Then you do way better with 91
;; A more cunning approach is to take things in order of their price/value ratio
- > 71
;; Sadly that does worse than the approach that only pays attention to the value.
So it seems that out of the three natural - seeming ' greedy algorithms ' , the best solution is 91
;; Another approach is to exhaustively search the space of possibilities:
(defn subsets [things]
(if (empty? things) '(())
(let [srt (subsets (rest things))]
(concat (map #(cons (first things) %) srt) srt))))
(reverse (sort-by second (for [i (subsets things)] [(price i) (evaluate i)])))
- > ( [ 13 111 ] [ 12 91 ] [ 10 90 ] [ 10 81 ] [ 7 71 ] [ 9 70 ] [ 9 61 ] [ 7 60 ] [ 6 51 ] [ 4 50 ] [ 4 41 ] [ 6 40 ] [ 3 30 ] [ 3 21 ] [ 1 20 ] [ 0 0 ] )
;; Which tells us that the best combination is unaffordable, so we
have to settle for the second best , which is paying 12 to get 91 ,
;; which the idealist has been trying to tell us all along.
;; But the idealistic approach is unlikely to work in the general case.
;; Consider a thing which is worth a lot, but horribly expensive, and
;; lots of other things which are worth a fair bit and dirt cheap.
;; Personally my money would have been on the 'buy things in order of
;; price/value ratio' approach, but we saw above that that fails in at
;; least one easy case.
;; So it appears that if we are faced with a problem like this, ( and
;; there are many such problems ), then we are doomed.
;; Exhaustive search is not feasible once you've got more than a very
;; few items, and yet the various greedy algorithms above will all get
;; the wrong answers.
;; And yet if you write down a knapsack problem like this, you will
;; not find it appallingly difficult to pick the best arrangement.
;; There is a certain tradition at this point of exclaiming 'The HUMAN
BRAIN is performing a COMPUTATION INFEASIBLE for a CLASSICAL
;; COMPUTER', and then going on to derive your favourite philosophical
;; position on the nature of consciousness, which will miraculously
;; turn out to be whatever it was you thought before you contemplated
;; the problem in question.
;; But wait ...
| null | https://raw.githubusercontent.com/johnlawrenceaspden/hobby-code/48e2a89d28557994c72299962cd8e3ace6a75b2d/knapsack.clj | clojure | The Knapsack Problem
What do you buy?
If you can put them in an order, then you can buy as many as you can afford
So if you're a cynic
And if you're an idealist
A more cunning approach is to take things in order of their price/value ratio
Sadly that does worse than the approach that only pays attention to the value.
Another approach is to exhaustively search the space of possibilities:
Which tells us that the best combination is unaffordable, so we
which the idealist has been trying to tell us all along.
But the idealistic approach is unlikely to work in the general case.
Consider a thing which is worth a lot, but horribly expensive, and
lots of other things which are worth a fair bit and dirt cheap.
Personally my money would have been on the 'buy things in order of
price/value ratio' approach, but we saw above that that fails in at
least one easy case.
So it appears that if we are faced with a problem like this, ( and
there are many such problems ), then we are doomed.
Exhaustive search is not feasible once you've got more than a very
few items, and yet the various greedy algorithms above will all get
the wrong answers.
And yet if you write down a knapsack problem like this, you will
not find it appallingly difficult to pick the best arrangement.
There is a certain tradition at this point of exclaiming 'The HUMAN
COMPUTER', and then going on to derive your favourite philosophical
position on the nature of consciousness, which will miraculously
turn out to be whatever it was you thought before you contemplated
the problem in question.
But wait ... |
Suppose you 've got twelve pounds
(def budget 12)
And there 's a thing that costs a pound , but is worth 20 .
And another thing that costs 3 , but is worth 30
And another thing that costs 3 , but is worth 21
And a thing that costs 6 but is worth 40
(def things (map (fn[[c v]] {:cost c :value v}) [[1 20][3 30][3 21][6 40]]))
(defn price [things] (reduce + (map :cost things)))
(defn evaluate [things] (reduce + (map :value things)))
- > 111
- > 13
So there 's 111 's worth of things going for 13 , but you ca n't buy everything .
(defn value [sorted-things budget]
(evaluate
(let [ baskets (reductions conj '() sorted-things)]
(last (take-while #(<= (price %) budget) baskets)))))
- > 71
Then you come away with 71 's worth
- > 91
Then you do way better with 91
- > 71
So it seems that out of the three natural - seeming ' greedy algorithms ' , the best solution is 91
(defn subsets [things]
(if (empty? things) '(())
(let [srt (subsets (rest things))]
(concat (map #(cons (first things) %) srt) srt))))
(reverse (sort-by second (for [i (subsets things)] [(price i) (evaluate i)])))
- > ( [ 13 111 ] [ 12 91 ] [ 10 90 ] [ 10 81 ] [ 7 71 ] [ 9 70 ] [ 9 61 ] [ 7 60 ] [ 6 51 ] [ 4 50 ] [ 4 41 ] [ 6 40 ] [ 3 30 ] [ 3 21 ] [ 1 20 ] [ 0 0 ] )
have to settle for the second best , which is paying 12 to get 91 ,
BRAIN is performing a COMPUTATION INFEASIBLE for a CLASSICAL
|
e25857223867d870861a7f450f749b8ab86c631dbeb92615d129e2f2cf8a46fb | xapi-project/message-switch | vdi_automaton_test.ml |
* Copyright Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
(* For any state [s] and operation [o] where [s' = s + o], [if s <> s' then s -
s' = op] *)
let all_pairs x y =
List.fold_left (fun acc x -> List.map (fun y -> (x, y)) y @ acc) [] x
let run () =
List.iter
(fun (s, op) ->
try
let s' = Vdi_automaton.(s + op) in
let op' = List.map fst Vdi_automaton.(s - s') in
if s <> s' && [op] <> op' then
failwith
Vdi_automaton.(
Printf.sprintf "s = %s; op = %s; s + op = %s; s - (s + op) = %s"
(string_of_state s) (string_of_op op) (string_of_state s')
(String.concat ", " (List.map string_of_op op')))
with Vdi_automaton.Bad_transition (_, _) -> ())
(all_pairs Vdi_automaton.every_state Vdi_automaton.every_op) ;
Printf.printf "Passed."
let tests = [("VDI automaton test", `Quick, run)]
| null | https://raw.githubusercontent.com/xapi-project/message-switch/1d0d1aa45c01eba144ac2826d0d88bb663e33101/xapi-idl/storage/vdi_automaton_test.ml | ocaml | For any state [s] and operation [o] where [s' = s + o], [if s <> s' then s -
s' = op] |
* Copyright Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
let all_pairs x y =
List.fold_left (fun acc x -> List.map (fun y -> (x, y)) y @ acc) [] x
let run () =
List.iter
(fun (s, op) ->
try
let s' = Vdi_automaton.(s + op) in
let op' = List.map fst Vdi_automaton.(s - s') in
if s <> s' && [op] <> op' then
failwith
Vdi_automaton.(
Printf.sprintf "s = %s; op = %s; s + op = %s; s - (s + op) = %s"
(string_of_state s) (string_of_op op) (string_of_state s')
(String.concat ", " (List.map string_of_op op')))
with Vdi_automaton.Bad_transition (_, _) -> ())
(all_pairs Vdi_automaton.every_state Vdi_automaton.every_op) ;
Printf.printf "Passed."
let tests = [("VDI automaton test", `Quick, run)]
|
7433ce96fc5048214693732aa62a66bf67da074902a53c83095f7a6757667a31 | audreyt/openafp | BFG.hs |
module OpenAFP.Records.AFP.BFG where
import OpenAFP.Types
import OpenAFP.Internals
data BFG = BFG {
bfg_Type :: !N3
,bfg_ :: !N3
,bfg :: !NStr
} deriving (Show, Typeable)
| null | https://raw.githubusercontent.com/audreyt/openafp/178e0dd427479ac7b8b461e05c263e52dd614b73/src/OpenAFP/Records/AFP/BFG.hs | haskell |
module OpenAFP.Records.AFP.BFG where
import OpenAFP.Types
import OpenAFP.Internals
data BFG = BFG {
bfg_Type :: !N3
,bfg_ :: !N3
,bfg :: !NStr
} deriving (Show, Typeable)
|
|
adf0da60c7defeb3b46056fe2d68ba25412276b8b96d49d09f37ac0cbf94b075 | brendanhay/amazonka | WorkspaceConnectionStatus.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
Module : Amazonka . WorkSpaces . Types . WorkspaceConnectionStatus
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Amazonka.WorkSpaces.Types.WorkspaceConnectionStatus where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import Amazonka.WorkSpaces.Types.ConnectionState
| Describes the connection status of a WorkSpace .
--
/See:/ ' newWorkspaceConnectionStatus ' smart constructor .
data WorkspaceConnectionStatus = WorkspaceConnectionStatus'
| The connection state of the WorkSpace . The connection state is unknown
if the WorkSpace is stopped .
connectionState :: Prelude.Maybe ConnectionState,
-- | The timestamp of the connection status check.
connectionStateCheckTimestamp :: Prelude.Maybe Data.POSIX,
-- | The timestamp of the last known user connection.
lastKnownUserConnectionTimestamp :: Prelude.Maybe Data.POSIX,
| The identifier of the WorkSpace .
workspaceId :: Prelude.Maybe Prelude.Text
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
-- |
Create a value of ' WorkspaceConnectionStatus ' with all optional fields omitted .
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
' connectionState ' , ' workspaceConnectionStatus_connectionState ' - The connection state of the WorkSpace . The connection state is unknown
if the WorkSpace is stopped .
--
-- 'connectionStateCheckTimestamp', 'workspaceConnectionStatus_connectionStateCheckTimestamp' - The timestamp of the connection status check.
--
-- 'lastKnownUserConnectionTimestamp', 'workspaceConnectionStatus_lastKnownUserConnectionTimestamp' - The timestamp of the last known user connection.
--
' workspaceId ' , ' workspaceConnectionStatus_workspaceId ' - The identifier of the WorkSpace .
newWorkspaceConnectionStatus ::
WorkspaceConnectionStatus
newWorkspaceConnectionStatus =
WorkspaceConnectionStatus'
{ connectionState =
Prelude.Nothing,
connectionStateCheckTimestamp = Prelude.Nothing,
lastKnownUserConnectionTimestamp =
Prelude.Nothing,
workspaceId = Prelude.Nothing
}
| The connection state of the WorkSpace . The connection state is unknown
if the WorkSpace is stopped .
workspaceConnectionStatus_connectionState :: Lens.Lens' WorkspaceConnectionStatus (Prelude.Maybe ConnectionState)
workspaceConnectionStatus_connectionState = Lens.lens (\WorkspaceConnectionStatus' {connectionState} -> connectionState) (\s@WorkspaceConnectionStatus' {} a -> s {connectionState = a} :: WorkspaceConnectionStatus)
-- | The timestamp of the connection status check.
workspaceConnectionStatus_connectionStateCheckTimestamp :: Lens.Lens' WorkspaceConnectionStatus (Prelude.Maybe Prelude.UTCTime)
workspaceConnectionStatus_connectionStateCheckTimestamp = Lens.lens (\WorkspaceConnectionStatus' {connectionStateCheckTimestamp} -> connectionStateCheckTimestamp) (\s@WorkspaceConnectionStatus' {} a -> s {connectionStateCheckTimestamp = a} :: WorkspaceConnectionStatus) Prelude.. Lens.mapping Data._Time
-- | The timestamp of the last known user connection.
workspaceConnectionStatus_lastKnownUserConnectionTimestamp :: Lens.Lens' WorkspaceConnectionStatus (Prelude.Maybe Prelude.UTCTime)
workspaceConnectionStatus_lastKnownUserConnectionTimestamp = Lens.lens (\WorkspaceConnectionStatus' {lastKnownUserConnectionTimestamp} -> lastKnownUserConnectionTimestamp) (\s@WorkspaceConnectionStatus' {} a -> s {lastKnownUserConnectionTimestamp = a} :: WorkspaceConnectionStatus) Prelude.. Lens.mapping Data._Time
| The identifier of the WorkSpace .
workspaceConnectionStatus_workspaceId :: Lens.Lens' WorkspaceConnectionStatus (Prelude.Maybe Prelude.Text)
workspaceConnectionStatus_workspaceId = Lens.lens (\WorkspaceConnectionStatus' {workspaceId} -> workspaceId) (\s@WorkspaceConnectionStatus' {} a -> s {workspaceId = a} :: WorkspaceConnectionStatus)
instance Data.FromJSON WorkspaceConnectionStatus where
parseJSON =
Data.withObject
"WorkspaceConnectionStatus"
( \x ->
WorkspaceConnectionStatus'
Prelude.<$> (x Data..:? "ConnectionState")
Prelude.<*> (x Data..:? "ConnectionStateCheckTimestamp")
Prelude.<*> (x Data..:? "LastKnownUserConnectionTimestamp")
Prelude.<*> (x Data..:? "WorkspaceId")
)
instance Prelude.Hashable WorkspaceConnectionStatus where
hashWithSalt _salt WorkspaceConnectionStatus' {..} =
_salt `Prelude.hashWithSalt` connectionState
`Prelude.hashWithSalt` connectionStateCheckTimestamp
`Prelude.hashWithSalt` lastKnownUserConnectionTimestamp
`Prelude.hashWithSalt` workspaceId
instance Prelude.NFData WorkspaceConnectionStatus where
rnf WorkspaceConnectionStatus' {..} =
Prelude.rnf connectionState
`Prelude.seq` Prelude.rnf connectionStateCheckTimestamp
`Prelude.seq` Prelude.rnf lastKnownUserConnectionTimestamp
`Prelude.seq` Prelude.rnf workspaceId
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-workspaces/gen/Amazonka/WorkSpaces/Types/WorkspaceConnectionStatus.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
| The timestamp of the connection status check.
| The timestamp of the last known user connection.
|
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
'connectionStateCheckTimestamp', 'workspaceConnectionStatus_connectionStateCheckTimestamp' - The timestamp of the connection status check.
'lastKnownUserConnectionTimestamp', 'workspaceConnectionStatus_lastKnownUserConnectionTimestamp' - The timestamp of the last known user connection.
| The timestamp of the connection status check.
| The timestamp of the last known user connection. | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Module : Amazonka . WorkSpaces . Types . WorkspaceConnectionStatus
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.WorkSpaces.Types.WorkspaceConnectionStatus where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import Amazonka.WorkSpaces.Types.ConnectionState
| Describes the connection status of a WorkSpace .
/See:/ ' newWorkspaceConnectionStatus ' smart constructor .
data WorkspaceConnectionStatus = WorkspaceConnectionStatus'
| The connection state of the WorkSpace . The connection state is unknown
if the WorkSpace is stopped .
connectionState :: Prelude.Maybe ConnectionState,
connectionStateCheckTimestamp :: Prelude.Maybe Data.POSIX,
lastKnownUserConnectionTimestamp :: Prelude.Maybe Data.POSIX,
| The identifier of the WorkSpace .
workspaceId :: Prelude.Maybe Prelude.Text
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
Create a value of ' WorkspaceConnectionStatus ' with all optional fields omitted .
Use < -lens generic - lens > or < optics > to modify other optional fields .
' connectionState ' , ' workspaceConnectionStatus_connectionState ' - The connection state of the WorkSpace . The connection state is unknown
if the WorkSpace is stopped .
' workspaceId ' , ' workspaceConnectionStatus_workspaceId ' - The identifier of the WorkSpace .
newWorkspaceConnectionStatus ::
WorkspaceConnectionStatus
newWorkspaceConnectionStatus =
WorkspaceConnectionStatus'
{ connectionState =
Prelude.Nothing,
connectionStateCheckTimestamp = Prelude.Nothing,
lastKnownUserConnectionTimestamp =
Prelude.Nothing,
workspaceId = Prelude.Nothing
}
| The connection state of the WorkSpace . The connection state is unknown
if the WorkSpace is stopped .
workspaceConnectionStatus_connectionState :: Lens.Lens' WorkspaceConnectionStatus (Prelude.Maybe ConnectionState)
workspaceConnectionStatus_connectionState = Lens.lens (\WorkspaceConnectionStatus' {connectionState} -> connectionState) (\s@WorkspaceConnectionStatus' {} a -> s {connectionState = a} :: WorkspaceConnectionStatus)
workspaceConnectionStatus_connectionStateCheckTimestamp :: Lens.Lens' WorkspaceConnectionStatus (Prelude.Maybe Prelude.UTCTime)
workspaceConnectionStatus_connectionStateCheckTimestamp = Lens.lens (\WorkspaceConnectionStatus' {connectionStateCheckTimestamp} -> connectionStateCheckTimestamp) (\s@WorkspaceConnectionStatus' {} a -> s {connectionStateCheckTimestamp = a} :: WorkspaceConnectionStatus) Prelude.. Lens.mapping Data._Time
workspaceConnectionStatus_lastKnownUserConnectionTimestamp :: Lens.Lens' WorkspaceConnectionStatus (Prelude.Maybe Prelude.UTCTime)
workspaceConnectionStatus_lastKnownUserConnectionTimestamp = Lens.lens (\WorkspaceConnectionStatus' {lastKnownUserConnectionTimestamp} -> lastKnownUserConnectionTimestamp) (\s@WorkspaceConnectionStatus' {} a -> s {lastKnownUserConnectionTimestamp = a} :: WorkspaceConnectionStatus) Prelude.. Lens.mapping Data._Time
| The identifier of the WorkSpace .
workspaceConnectionStatus_workspaceId :: Lens.Lens' WorkspaceConnectionStatus (Prelude.Maybe Prelude.Text)
workspaceConnectionStatus_workspaceId = Lens.lens (\WorkspaceConnectionStatus' {workspaceId} -> workspaceId) (\s@WorkspaceConnectionStatus' {} a -> s {workspaceId = a} :: WorkspaceConnectionStatus)
instance Data.FromJSON WorkspaceConnectionStatus where
parseJSON =
Data.withObject
"WorkspaceConnectionStatus"
( \x ->
WorkspaceConnectionStatus'
Prelude.<$> (x Data..:? "ConnectionState")
Prelude.<*> (x Data..:? "ConnectionStateCheckTimestamp")
Prelude.<*> (x Data..:? "LastKnownUserConnectionTimestamp")
Prelude.<*> (x Data..:? "WorkspaceId")
)
instance Prelude.Hashable WorkspaceConnectionStatus where
hashWithSalt _salt WorkspaceConnectionStatus' {..} =
_salt `Prelude.hashWithSalt` connectionState
`Prelude.hashWithSalt` connectionStateCheckTimestamp
`Prelude.hashWithSalt` lastKnownUserConnectionTimestamp
`Prelude.hashWithSalt` workspaceId
instance Prelude.NFData WorkspaceConnectionStatus where
rnf WorkspaceConnectionStatus' {..} =
Prelude.rnf connectionState
`Prelude.seq` Prelude.rnf connectionStateCheckTimestamp
`Prelude.seq` Prelude.rnf lastKnownUserConnectionTimestamp
`Prelude.seq` Prelude.rnf workspaceId
|
e6e9bf690a408e89ad8abae62ac0dfc075c671c2427f9e40620b21377308fe87 | tommaisey/aeon | sosc.scm | ;; bytevector -> int
(define decode-u8
(lambda (v)
(bytevector-u8-ref v 0)))
;; bytevector -> int
(define decode-u16
(lambda (v)
(bytevector-u16-ref v 0 (endianness big))))
;; bytevector -> int
(define decode-u32
(lambda (v)
(bytevector-u32-ref v 0 (endianness big))))
;; bytevector -> int
(define decode-u64
(lambda (v)
(bytevector-u64-ref v 0 (endianness big))))
;; bytevector -> int
(define decode-i8
(lambda (v)
(bytevector-s8-ref v 0)))
;; bytevector -> int
(define decode-i16
(lambda (v)
(bytevector-s16-ref v 0 (endianness big))))
;; bytevector -> int
(define decode-i32
(lambda (v)
(bytevector-s32-ref v 0 (endianness big))))
;; bytevector -> int
(define decode-i64
(lambda (v)
(bytevector-s64-ref v 0 (endianness big))))
;; bytevector -> double
(define decode-f32
(lambda (v)
(bytevector-ieee-single-ref v 0 (endianness big))))
;; bytevector -> double
(define decode-f64
(lambda (v)
(bytevector-ieee-double-ref v 0 (endianness big))))
;; bytevector -> string
(define decode-str
(lambda (b)
(utf8->string b)))
;; bytevector -> string
;;
;; (decode-pstr (flatten-bytevectors (encode-pstr "string")))
(define decode-pstr
(lambda (v)
(let* ((n (decode-u8 v))
(w (bytevector-section v 1 (+ n 1))))
(decode-str w))))
;; bytevector -> string
(define decode-cstr
(lambda (v)
(let* ((n (bytevector-find-index v 0))
(w (bytevector-section v 0 n)))
(decode-str w))))
;; int -> bytevector
(define encode-u8
(lambda (n)
(bytevector-make-and-set1
bytevector-u8-set!
1
(exact n))))
;; int -> bytevector
(define encode-u16
(lambda (n)
(bytevector-make-and-set
bytevector-u16-set!
2
(exact n))))
;; int -> bytevector
(define encode-u32
(lambda (n)
(bytevector-make-and-set
bytevector-u32-set!
4
(exact n))))
;; int -> bytevector
(define encode-u64
(lambda (n)
(bytevector-make-and-set
bytevector-u64-set!
8
(exact n))))
;; int -> bytevector
(define encode-i8
(lambda (n)
(bytevector-make-and-set1
bytevector-s8-set!
1
(exact n))))
;; int -> bytevector
(define encode-i16
(lambda (n)
(bytevector-make-and-set
bytevector-s16-set!
2
(exact n))))
;; int -> bytevector
(define encode-i32
(lambda (n)
(bytevector-make-and-set
bytevector-s32-set!
4
(exact n))))
;; int -> bytevector
(define encode-i64
(lambda (n)
(bytevector-make-and-set
bytevector-s64-set!
8
(exact n))))
;; double -> bytevector
(define encode-f32
(lambda (n)
(bytevector-make-and-set
bytevector-ieee-single-set!
4
(inexact n))))
;; double -> bytevector
(define encode-f64
(lambda (n)
(bytevector-make-and-set
bytevector-ieee-double-set!
8
(inexact n))))
;; string -> bytevector
(define encode-str
(lambda (s)
(string->utf8 s)))
;; string -> bytevector
(define encode-pstr
(lambda (s)
(let* ((b (encode-str s))
(n (encode-u8 (bytevector-length b))))
(list n b))))
;; string -> [bytevector]
(define encode-cstr
(lambda (s)
(let* ((b (encode-str s))
(z (encode-u8 0)))
(list b z))))
;; port -> int -> bytevector
(define read-bstr
(lambda (p n)
(get-bytevector-n p n)))
;; port -> string
(define read-pstr
(lambda (p)
(let* ((n (lookahead-u8 p))
(v (read-bstr p (+ n 1))))
(decode-pstr v))))
;; port -> string
(define read-cstr
(lambda (p)
(let loop ((l nil)
(b (get-u8 p)))
(if (= b 0)
(list->string (map integer->char (reverse l)))
(loop (cons b l) (get-u8 p))))))
;; port -> int
(define read-i8
(lambda (p)
(decode-i8 (read-bstr p 1))))
;; port -> int
(define read-u8
(lambda (p)
(decode-u8 (read-bstr p 1))))
;; port -> int
(define read-i16
(lambda (p)
(decode-i16 (read-bstr p 2))))
;; port -> int
(define read-u16
(lambda (p)
(decode-u16 (read-bstr p 2))))
;; port -> int
(define read-i32
(lambda (p)
(decode-i32 (read-bstr p 4))))
;; port -> int
(define read-u32
(lambda (p)
(decode-u32 (read-bstr p 4))))
;; port -> int
(define read-i64
(lambda (p)
(decode-i64 (read-bstr p 8))))
;; port -> int
(define read-u64
(lambda (p)
(decode-u64 (read-bstr p 8))))
;; port -> double
(define read-f32
(lambda (p)
(decode-f32 (read-bstr p 4))))
;; port -> double
(define read-f64
(lambda (p)
(decode-f64 (read-bstr p 8))))
;; int
(define seconds-from-1900-to-1970
(+ (* 70 365 24 60 60) (* 17 24 60 60)))
;; double -> int
(define ntpr->ntp
(lambda (n)
(exact (round (* n (expt 2 32))))))
;; double -> double
(define utc->ntpr
(lambda (n)
(+ n seconds-from-1900-to-1970)))
;; int -> double
(define ntp->utc
(lambda (n)
(- (/ n (expt 2 32)) seconds-from-1900-to-1970)))
;; port -> string
(define read-ostr
(lambda (p)
(let* ((s (read-cstr p))
(n (mod (cstring-length s) 4))
(i (- 4 (mod n 4))))
(if (not (= n 0))
(read-bstr p i)
#f)
s)))
;; port -> bytevector
(define read-obyt
(lambda (p)
(let* ((n (read-i32 p))
(b (read-bstr p n))
(i (- 4 (mod n 4))))
(if (not (= n 0))
(read-bstr p i)
#f)
b)))
;; datum = int | double | string | bytevector
;; port -> char -> datum
(define read-value
(lambda (p t)
(cond
((equal? t oI32) (read-i32 p))
((equal? t oI64) (read-i64 p))
((equal? t oU64) (read-u64 p))
((equal? t oF32) (read-f32 p))
((equal? t oF64) (read-f64 p))
((equal? t oSTR) (read-ostr p))
((equal? t oBYT) (read-obyt p))
((equal? t oMID) (read-u32 p))
(else (error "read-value" "bad type" t)))))
;; port -> [char] -> [datum]
(define read-arguments
(lambda (p types)
(if (null? types)
'()
(cons (read-value p (car types))
(read-arguments p (cdr types))))))
;; port -> (string:[datum])
(define read-message
(lambda (p)
(let* ((address (read-ostr p))
(types (read-ostr p)))
(cons address
(read-arguments p (cdr (string->list types)))))))
;; port -> (utc:[message])
(define read-bundle
(lambda (p)
(let ((bundletag (read-ostr p))
(timetag (ntp->utc (read-u64 p)))
(parts (list)))
(if (not (equal? bundletag "#bundle"))
(error "read-bundle"
"illegal bundle tag"
bundletag)
(cons timetag
(let loop ((parts (list)))
(if (eof-object? (lookahead-u8 p))
(reverse parts)
(begin
;; We have no use for the message size...
(read-i32 p)
(loop (cons (read-packet p) parts))))))))))
;; byte
(define hash-u8
(char->integer #\#))
;; port -> osc
(define read-packet
(lambda (p)
(if (equal? (lookahead-u8 p) hash-u8)
(read-bundle p)
(read-message p))))
;; bytevector -> osc
(define decode-osc
(lambda (b)
(with-input-from-bytevector b read-packet)))
;; [byte] -> ()
(define osc-display
(lambda (l)
(zip-with
(lambda (b n)
(display (list (number->string b 16) (integer->char b)))
(if (= 3 (mod n 4))
(newline)
(display #\space)))
l
(enum-from-to 0 (- (length l) 1)))))
;; string -> int
(define cstring-length
(lambda (s)
(+ 1 (string-length s))))
;; int -> int
;; (equal? (map osc-align (enum-from-to 0 7)) (list 0 3 2 1 0 3 2 1))
(define osc-align
(lambda (n)
(- (fxand (+ n 3) (fxnot 3)) n)))
;; int -> [bytevector]
(define padding-of
(lambda (n) (replicate (osc-align n) (encode-u8 0))))
;; string -> [bytevector]
(define encode-string
(lambda (s)
(list (encode-cstr s) (padding-of (cstring-length s)))))
;; bytevector -> [bytevector]
(define encode-bytes
(lambda (b)
(let ((n (bytevector-length b)))
(list (encode-i32 n)
b
(padding-of n)))))
;; datum -> bytevector
(define encode-value
(lambda (e)
(cond ((number? e) (if (integer? e)
(encode-i32 e)
(encode-f32 e)))
((string? e) (encode-string e))
((bytevector? e) (encode-bytes e))
(else (error "encode-value" "illegal value" e)))))
;; [datum] -> bytevector
(define encode-types
(lambda (l)
(encode-string
(list->string
(cons #\,
(map (lambda (e)
(cond ((number? e) (if (integer? e) oI32 oF32))
((string? e) oSTR)
((bytevector? e) oBYT)
(else (error "encode-types" "type?" e))))
l))))))
;; osc -> [bytevector]
(define encode-message
(lambda (m)
(list (encode-string (car m))
(encode-types (cdr m))
(map encode-value (cdr m)))))
;; osc -> [bytevector]
(define encode-bundle-ntp
(lambda (b)
(list (encode-string "#bundle")
(encode-u64 (ntpr->ntp (car b)))
(map (lambda (e)
(if (message? e)
(encode-bytes (encode-osc e))
(error "encode-bundle" "illegal value" e)))
(cdr b)))))
;; osc -> [bytevector]
(define encode-bundle
(lambda (b)
(encode-bundle-ntp (cons (utc->ntpr (car b)) (cdr b)))))
;; osc -> bytevector
(define encode-osc
(lambda (p)
(flatten-bytevectors
(if (bundle? p)
(encode-bundle p)
(encode-message p)))))
;; any | [any] -> datum | [datum]
(define purify
(lambda (e)
(cond ((or3 (number? e) (string? e) (bytevector? e)) e)
((list? e) (map purify e))
((symbol? e) (symbol->string e))
((boolean? e) (if e 1 0))
(else (error "purify" "illegal input" e)))))
;; char
(define oI32 #\i)
(define oI64 #\h)
(define oU64 #\t)
(define oF32 #\f)
(define oF64 #\d)
(define oSTR #\s)
(define oBYT #\b)
(define oMID #\m)
;; string -> [any] -> osc
(define message
(lambda (c l)
(if (string? c)
(cons c l)
(error "message" "illegal address"))))
;; float -> [any] -> osc
(define bundle
(lambda (t l)
(if (number? t)
(cons t l)
(error "bundle" "illegal timestamp" t))))
;; osc -> bool
(define message?
(lambda (p)
(string? (car p))))
;; osc -> bool
(define bundle?
(lambda (p)
(number? (car p))))
;; osc -> bool
(define verify-message
(lambda (m)
(and2 (string? (car m))
(all (lambda (e) (or (number? e)
(string? e)))
(cdr m)))))
;; osc -> bool
(define verify-bundle
(lambda (b)
(and2 (integer? (car b))
(all (lambda (e) (or2 (verify-message e)
(and2 (verify-bundle e)
(>= (car e) (car b)))))
(cdr b)))))
;; osc -> bool
(define verify-packet
(lambda (p)
(or2 (verify-message p)
(verify-bundle p))))
| null | https://raw.githubusercontent.com/tommaisey/aeon/80744a7235425c47a061ec8324d923c53ebedf15/libs/third-party/sc3/sosc/src/sosc.scm | scheme | bytevector -> int
bytevector -> int
bytevector -> int
bytevector -> int
bytevector -> int
bytevector -> int
bytevector -> int
bytevector -> int
bytevector -> double
bytevector -> double
bytevector -> string
bytevector -> string
(decode-pstr (flatten-bytevectors (encode-pstr "string")))
bytevector -> string
int -> bytevector
int -> bytevector
int -> bytevector
int -> bytevector
int -> bytevector
int -> bytevector
int -> bytevector
int -> bytevector
double -> bytevector
double -> bytevector
string -> bytevector
string -> bytevector
string -> [bytevector]
port -> int -> bytevector
port -> string
port -> string
port -> int
port -> int
port -> int
port -> int
port -> int
port -> int
port -> int
port -> int
port -> double
port -> double
int
double -> int
double -> double
int -> double
port -> string
port -> bytevector
datum = int | double | string | bytevector
port -> char -> datum
port -> [char] -> [datum]
port -> (string:[datum])
port -> (utc:[message])
We have no use for the message size...
byte
port -> osc
bytevector -> osc
[byte] -> ()
string -> int
int -> int
(equal? (map osc-align (enum-from-to 0 7)) (list 0 3 2 1 0 3 2 1))
int -> [bytevector]
string -> [bytevector]
bytevector -> [bytevector]
datum -> bytevector
[datum] -> bytevector
osc -> [bytevector]
osc -> [bytevector]
osc -> [bytevector]
osc -> bytevector
any | [any] -> datum | [datum]
char
string -> [any] -> osc
float -> [any] -> osc
osc -> bool
osc -> bool
osc -> bool
osc -> bool
osc -> bool | (define decode-u8
(lambda (v)
(bytevector-u8-ref v 0)))
(define decode-u16
(lambda (v)
(bytevector-u16-ref v 0 (endianness big))))
(define decode-u32
(lambda (v)
(bytevector-u32-ref v 0 (endianness big))))
(define decode-u64
(lambda (v)
(bytevector-u64-ref v 0 (endianness big))))
(define decode-i8
(lambda (v)
(bytevector-s8-ref v 0)))
(define decode-i16
(lambda (v)
(bytevector-s16-ref v 0 (endianness big))))
(define decode-i32
(lambda (v)
(bytevector-s32-ref v 0 (endianness big))))
(define decode-i64
(lambda (v)
(bytevector-s64-ref v 0 (endianness big))))
(define decode-f32
(lambda (v)
(bytevector-ieee-single-ref v 0 (endianness big))))
(define decode-f64
(lambda (v)
(bytevector-ieee-double-ref v 0 (endianness big))))
(define decode-str
(lambda (b)
(utf8->string b)))
(define decode-pstr
(lambda (v)
(let* ((n (decode-u8 v))
(w (bytevector-section v 1 (+ n 1))))
(decode-str w))))
(define decode-cstr
(lambda (v)
(let* ((n (bytevector-find-index v 0))
(w (bytevector-section v 0 n)))
(decode-str w))))
(define encode-u8
(lambda (n)
(bytevector-make-and-set1
bytevector-u8-set!
1
(exact n))))
(define encode-u16
(lambda (n)
(bytevector-make-and-set
bytevector-u16-set!
2
(exact n))))
(define encode-u32
(lambda (n)
(bytevector-make-and-set
bytevector-u32-set!
4
(exact n))))
(define encode-u64
(lambda (n)
(bytevector-make-and-set
bytevector-u64-set!
8
(exact n))))
(define encode-i8
(lambda (n)
(bytevector-make-and-set1
bytevector-s8-set!
1
(exact n))))
(define encode-i16
(lambda (n)
(bytevector-make-and-set
bytevector-s16-set!
2
(exact n))))
(define encode-i32
(lambda (n)
(bytevector-make-and-set
bytevector-s32-set!
4
(exact n))))
(define encode-i64
(lambda (n)
(bytevector-make-and-set
bytevector-s64-set!
8
(exact n))))
(define encode-f32
(lambda (n)
(bytevector-make-and-set
bytevector-ieee-single-set!
4
(inexact n))))
(define encode-f64
(lambda (n)
(bytevector-make-and-set
bytevector-ieee-double-set!
8
(inexact n))))
(define encode-str
(lambda (s)
(string->utf8 s)))
(define encode-pstr
(lambda (s)
(let* ((b (encode-str s))
(n (encode-u8 (bytevector-length b))))
(list n b))))
(define encode-cstr
(lambda (s)
(let* ((b (encode-str s))
(z (encode-u8 0)))
(list b z))))
(define read-bstr
(lambda (p n)
(get-bytevector-n p n)))
(define read-pstr
(lambda (p)
(let* ((n (lookahead-u8 p))
(v (read-bstr p (+ n 1))))
(decode-pstr v))))
(define read-cstr
(lambda (p)
(let loop ((l nil)
(b (get-u8 p)))
(if (= b 0)
(list->string (map integer->char (reverse l)))
(loop (cons b l) (get-u8 p))))))
(define read-i8
(lambda (p)
(decode-i8 (read-bstr p 1))))
(define read-u8
(lambda (p)
(decode-u8 (read-bstr p 1))))
(define read-i16
(lambda (p)
(decode-i16 (read-bstr p 2))))
(define read-u16
(lambda (p)
(decode-u16 (read-bstr p 2))))
(define read-i32
(lambda (p)
(decode-i32 (read-bstr p 4))))
(define read-u32
(lambda (p)
(decode-u32 (read-bstr p 4))))
(define read-i64
(lambda (p)
(decode-i64 (read-bstr p 8))))
(define read-u64
(lambda (p)
(decode-u64 (read-bstr p 8))))
(define read-f32
(lambda (p)
(decode-f32 (read-bstr p 4))))
(define read-f64
(lambda (p)
(decode-f64 (read-bstr p 8))))
(define seconds-from-1900-to-1970
(+ (* 70 365 24 60 60) (* 17 24 60 60)))
(define ntpr->ntp
(lambda (n)
(exact (round (* n (expt 2 32))))))
(define utc->ntpr
(lambda (n)
(+ n seconds-from-1900-to-1970)))
(define ntp->utc
(lambda (n)
(- (/ n (expt 2 32)) seconds-from-1900-to-1970)))
(define read-ostr
(lambda (p)
(let* ((s (read-cstr p))
(n (mod (cstring-length s) 4))
(i (- 4 (mod n 4))))
(if (not (= n 0))
(read-bstr p i)
#f)
s)))
(define read-obyt
(lambda (p)
(let* ((n (read-i32 p))
(b (read-bstr p n))
(i (- 4 (mod n 4))))
(if (not (= n 0))
(read-bstr p i)
#f)
b)))
(define read-value
(lambda (p t)
(cond
((equal? t oI32) (read-i32 p))
((equal? t oI64) (read-i64 p))
((equal? t oU64) (read-u64 p))
((equal? t oF32) (read-f32 p))
((equal? t oF64) (read-f64 p))
((equal? t oSTR) (read-ostr p))
((equal? t oBYT) (read-obyt p))
((equal? t oMID) (read-u32 p))
(else (error "read-value" "bad type" t)))))
(define read-arguments
(lambda (p types)
(if (null? types)
'()
(cons (read-value p (car types))
(read-arguments p (cdr types))))))
(define read-message
(lambda (p)
(let* ((address (read-ostr p))
(types (read-ostr p)))
(cons address
(read-arguments p (cdr (string->list types)))))))
(define read-bundle
(lambda (p)
(let ((bundletag (read-ostr p))
(timetag (ntp->utc (read-u64 p)))
(parts (list)))
(if (not (equal? bundletag "#bundle"))
(error "read-bundle"
"illegal bundle tag"
bundletag)
(cons timetag
(let loop ((parts (list)))
(if (eof-object? (lookahead-u8 p))
(reverse parts)
(begin
(read-i32 p)
(loop (cons (read-packet p) parts))))))))))
(define hash-u8
(char->integer #\#))
(define read-packet
(lambda (p)
(if (equal? (lookahead-u8 p) hash-u8)
(read-bundle p)
(read-message p))))
(define decode-osc
(lambda (b)
(with-input-from-bytevector b read-packet)))
(define osc-display
(lambda (l)
(zip-with
(lambda (b n)
(display (list (number->string b 16) (integer->char b)))
(if (= 3 (mod n 4))
(newline)
(display #\space)))
l
(enum-from-to 0 (- (length l) 1)))))
(define cstring-length
(lambda (s)
(+ 1 (string-length s))))
(define osc-align
(lambda (n)
(- (fxand (+ n 3) (fxnot 3)) n)))
(define padding-of
(lambda (n) (replicate (osc-align n) (encode-u8 0))))
(define encode-string
(lambda (s)
(list (encode-cstr s) (padding-of (cstring-length s)))))
(define encode-bytes
(lambda (b)
(let ((n (bytevector-length b)))
(list (encode-i32 n)
b
(padding-of n)))))
(define encode-value
(lambda (e)
(cond ((number? e) (if (integer? e)
(encode-i32 e)
(encode-f32 e)))
((string? e) (encode-string e))
((bytevector? e) (encode-bytes e))
(else (error "encode-value" "illegal value" e)))))
(define encode-types
(lambda (l)
(encode-string
(list->string
(cons #\,
(map (lambda (e)
(cond ((number? e) (if (integer? e) oI32 oF32))
((string? e) oSTR)
((bytevector? e) oBYT)
(else (error "encode-types" "type?" e))))
l))))))
(define encode-message
(lambda (m)
(list (encode-string (car m))
(encode-types (cdr m))
(map encode-value (cdr m)))))
(define encode-bundle-ntp
(lambda (b)
(list (encode-string "#bundle")
(encode-u64 (ntpr->ntp (car b)))
(map (lambda (e)
(if (message? e)
(encode-bytes (encode-osc e))
(error "encode-bundle" "illegal value" e)))
(cdr b)))))
(define encode-bundle
(lambda (b)
(encode-bundle-ntp (cons (utc->ntpr (car b)) (cdr b)))))
(define encode-osc
(lambda (p)
(flatten-bytevectors
(if (bundle? p)
(encode-bundle p)
(encode-message p)))))
(define purify
(lambda (e)
(cond ((or3 (number? e) (string? e) (bytevector? e)) e)
((list? e) (map purify e))
((symbol? e) (symbol->string e))
((boolean? e) (if e 1 0))
(else (error "purify" "illegal input" e)))))
(define oI32 #\i)
(define oI64 #\h)
(define oU64 #\t)
(define oF32 #\f)
(define oF64 #\d)
(define oSTR #\s)
(define oBYT #\b)
(define oMID #\m)
(define message
(lambda (c l)
(if (string? c)
(cons c l)
(error "message" "illegal address"))))
(define bundle
(lambda (t l)
(if (number? t)
(cons t l)
(error "bundle" "illegal timestamp" t))))
(define message?
(lambda (p)
(string? (car p))))
(define bundle?
(lambda (p)
(number? (car p))))
(define verify-message
(lambda (m)
(and2 (string? (car m))
(all (lambda (e) (or (number? e)
(string? e)))
(cdr m)))))
(define verify-bundle
(lambda (b)
(and2 (integer? (car b))
(all (lambda (e) (or2 (verify-message e)
(and2 (verify-bundle e)
(>= (car e) (car b)))))
(cdr b)))))
(define verify-packet
(lambda (p)
(or2 (verify-message p)
(verify-bundle p))))
|
a69ab104653f6d84ebfd19d8cae701ba34de2adbb60fa3993aa32abadaef0dd4 | chrovis/cljam | depth.clj | (ns cljam.algo.depth
"Provides algorithms for calculating simple depth of coverage."
(:require [com.climate.claypoole :as cp]
[com.climate.claypoole.lazy :as lazy]
[cljam.common :as common]
[cljam.util.region :as region]
[cljam.io.sam :as sam]
[cljam.io.sam.util :as sam-util]
[cljam.io.sam.util.refs :as refs])
(:import [cljam.io.protocols SAMRegionBlock]))
(def ^:const default-step 1000000)
;; lazy
;; ----
(defn- count-for-positions
"Piles the alignments up and counts them in the positions, returning it as a seq."
[alns beg end]
(let [pile (long-array (inc (- end beg)))]
(doseq [aln alns]
(let [left (max (:pos aln) beg)
right (min (:end aln) end)
left-index (- left beg)]
(dotimes [i (inc (- right left))]
(aset-long pile (+ i left-index) (inc (aget pile (+ i left-index)))))))
(seq pile)))
(defn- lazy-depth*
"Internal lazy-depth function returning lazy sequence of depth."
[rdr rname start end step]
(let [n-threads (common/get-exec-n-threads)
read-fn (fn [r start end]
(sam/read-blocks r {:chr rname :start start :end end} {:mode :region}))
count-fn (fn [xs]
(if (= n-threads 1)
(map (fn [[start end]]
(count-for-positions (read-fn rdr start end) start end)) xs)
(lazy/pmap (dec n-threads)
(fn [[start end]]
(with-open [r (sam/clone-bam-reader rdr)]
(count-for-positions (read-fn r start end) start end))) xs)))]
(->> (region/divide-region start end step)
count-fn
(apply concat))))
(defn lazy-depth
"Calculate depth of coverage lazily. Returns a lazy seq of depth for range [start, end].
Requires a `cljam.io.bam.reader.BAMReader` instance and region.
If start and end are not supplied, piles whole range up.
Note that CIGAR code in alignments are ignored and only start/end positions are used."
[bam-reader {:keys [chr start end] :or {start 1 end Long/MAX_VALUE}}
& [{:keys [step n-threads] :or {step default-step n-threads 1}}]]
{:pre [chr start end (pos? start) (pos? end) (<= start end)]}
(when-let [{:keys [len]} (refs/ref-by-name (sam/read-refs bam-reader) chr)]
(binding [common/*n-threads* n-threads]
(lazy-depth* bam-reader chr (min len start) (min len end) step))))
;; eager
;; -----
(defn- unchecked-aset-depth-in-region!
"Piles alignments up and sets depth values to a part of the given int-array."
[alns beg end offset ^ints pile]
(let [beg (int beg)
end (int end)
offset (int offset)]
(doseq [^SAMRegionBlock aln alns]
(let [left (Math/max ^int (.pos aln) beg)
right (unchecked-inc-int (.end aln))
left-index (unchecked-add-int (unchecked-subtract-int left beg) offset)
right-index (unchecked-add-int (unchecked-subtract-int right beg) offset)]
(aset-int pile left-index (unchecked-inc-int (aget pile left-index)))
(when (<= right end)
(aset-int pile right-index (unchecked-dec-int (aget pile right-index))))))
(dotimes [i (- end beg)]
(aset-int
pile
(unchecked-add-int (unchecked-inc-int i) offset)
(unchecked-add-int
(aget pile (unchecked-add-int i offset))
(aget pile (unchecked-add-int (unchecked-inc-int i) offset)))))))
(defn- aset-depth-in-region!
"Piles alignments up and sets depth values to a part of the given int-array.
It's roughly 15-25% slower than unchecked version."
[alns beg end offset ^ints pile]
(let [beg (int beg)
end (int end)
offset (int offset)]
(doseq [aln alns]
(let [left (Math/max ^int (:pos aln) beg)
right (int ^long (inc (or (:end aln) (sam-util/get-end aln))))
left-index (+ (- left beg) offset)
right-index (+ (- right beg) offset)]
(aset-int pile left-index (inc (aget pile left-index)))
(when (<= right end)
(aset-int pile right-index (dec (aget pile right-index))))))
(dotimes [i (- end beg)]
(aset-int pile (+ (inc i) offset) (+ (aget pile (+ i offset)) (aget pile (+ (inc i) offset)))))))
(defn ^"[I" depth*
"Internal depth function which returns an int-array."
[rdr {:keys [chr start end] :as region}
& [{:keys [step unchecked? n-threads] :or {step default-step unchecked? false n-threads 1}}]]
(let [pile (int-array (inc (- end start)))
f (if unchecked? unchecked-aset-depth-in-region! aset-depth-in-region!)]
(if (= n-threads 1)
(f (sam/read-blocks rdr region {:mode :region}) start end 0 pile)
(cp/pdoseq
n-threads
[[s e] (region/divide-region start end step)]
(with-open [r (sam/clone-bam-reader rdr)]
(-> (sam/read-blocks r {:chr chr, :start s, :end e} {:mode :region})
(f s e (- s start) pile)))))
pile))
(defn depth
"Calculate depth of coverage eagerly. Returns a seq of depth for range [start, end].
Requires a `cljam.io.bam.reader.BAMReader` instance and region.
If start and end are not supplied, piles whole range up.
Note that CIGAR code in alignments are ignored and only start/end positions are used."
[bam-reader {:keys [chr start end] :or {start 1 end Long/MAX_VALUE}}
& [{:keys [step unchecked? n-threads] :or {step default-step unchecked? false n-threads 1}}]]
{:pre [chr start end (pos? start) (pos? end) (<= start end)]}
(when-let [{:keys [len]} (refs/ref-by-name (sam/read-refs bam-reader) chr)]
(seq
(depth*
bam-reader
{:chr chr, :start (min len start), :end (min len end)}
{:step step, :unchecked? unchecked?, :n-threads n-threads}))))
| null | https://raw.githubusercontent.com/chrovis/cljam/2b8e7386765be8efdbbbb4f18dbc52447f4a08af/src/cljam/algo/depth.clj | clojure | lazy
----
eager
----- | (ns cljam.algo.depth
"Provides algorithms for calculating simple depth of coverage."
(:require [com.climate.claypoole :as cp]
[com.climate.claypoole.lazy :as lazy]
[cljam.common :as common]
[cljam.util.region :as region]
[cljam.io.sam :as sam]
[cljam.io.sam.util :as sam-util]
[cljam.io.sam.util.refs :as refs])
(:import [cljam.io.protocols SAMRegionBlock]))
(def ^:const default-step 1000000)
(defn- count-for-positions
"Piles the alignments up and counts them in the positions, returning it as a seq."
[alns beg end]
(let [pile (long-array (inc (- end beg)))]
(doseq [aln alns]
(let [left (max (:pos aln) beg)
right (min (:end aln) end)
left-index (- left beg)]
(dotimes [i (inc (- right left))]
(aset-long pile (+ i left-index) (inc (aget pile (+ i left-index)))))))
(seq pile)))
(defn- lazy-depth*
"Internal lazy-depth function returning lazy sequence of depth."
[rdr rname start end step]
(let [n-threads (common/get-exec-n-threads)
read-fn (fn [r start end]
(sam/read-blocks r {:chr rname :start start :end end} {:mode :region}))
count-fn (fn [xs]
(if (= n-threads 1)
(map (fn [[start end]]
(count-for-positions (read-fn rdr start end) start end)) xs)
(lazy/pmap (dec n-threads)
(fn [[start end]]
(with-open [r (sam/clone-bam-reader rdr)]
(count-for-positions (read-fn r start end) start end))) xs)))]
(->> (region/divide-region start end step)
count-fn
(apply concat))))
(defn lazy-depth
"Calculate depth of coverage lazily. Returns a lazy seq of depth for range [start, end].
Requires a `cljam.io.bam.reader.BAMReader` instance and region.
If start and end are not supplied, piles whole range up.
Note that CIGAR code in alignments are ignored and only start/end positions are used."
[bam-reader {:keys [chr start end] :or {start 1 end Long/MAX_VALUE}}
& [{:keys [step n-threads] :or {step default-step n-threads 1}}]]
{:pre [chr start end (pos? start) (pos? end) (<= start end)]}
(when-let [{:keys [len]} (refs/ref-by-name (sam/read-refs bam-reader) chr)]
(binding [common/*n-threads* n-threads]
(lazy-depth* bam-reader chr (min len start) (min len end) step))))
(defn- unchecked-aset-depth-in-region!
"Piles alignments up and sets depth values to a part of the given int-array."
[alns beg end offset ^ints pile]
(let [beg (int beg)
end (int end)
offset (int offset)]
(doseq [^SAMRegionBlock aln alns]
(let [left (Math/max ^int (.pos aln) beg)
right (unchecked-inc-int (.end aln))
left-index (unchecked-add-int (unchecked-subtract-int left beg) offset)
right-index (unchecked-add-int (unchecked-subtract-int right beg) offset)]
(aset-int pile left-index (unchecked-inc-int (aget pile left-index)))
(when (<= right end)
(aset-int pile right-index (unchecked-dec-int (aget pile right-index))))))
(dotimes [i (- end beg)]
(aset-int
pile
(unchecked-add-int (unchecked-inc-int i) offset)
(unchecked-add-int
(aget pile (unchecked-add-int i offset))
(aget pile (unchecked-add-int (unchecked-inc-int i) offset)))))))
(defn- aset-depth-in-region!
"Piles alignments up and sets depth values to a part of the given int-array.
It's roughly 15-25% slower than unchecked version."
[alns beg end offset ^ints pile]
(let [beg (int beg)
end (int end)
offset (int offset)]
(doseq [aln alns]
(let [left (Math/max ^int (:pos aln) beg)
right (int ^long (inc (or (:end aln) (sam-util/get-end aln))))
left-index (+ (- left beg) offset)
right-index (+ (- right beg) offset)]
(aset-int pile left-index (inc (aget pile left-index)))
(when (<= right end)
(aset-int pile right-index (dec (aget pile right-index))))))
(dotimes [i (- end beg)]
(aset-int pile (+ (inc i) offset) (+ (aget pile (+ i offset)) (aget pile (+ (inc i) offset)))))))
(defn ^"[I" depth*
"Internal depth function which returns an int-array."
[rdr {:keys [chr start end] :as region}
& [{:keys [step unchecked? n-threads] :or {step default-step unchecked? false n-threads 1}}]]
(let [pile (int-array (inc (- end start)))
f (if unchecked? unchecked-aset-depth-in-region! aset-depth-in-region!)]
(if (= n-threads 1)
(f (sam/read-blocks rdr region {:mode :region}) start end 0 pile)
(cp/pdoseq
n-threads
[[s e] (region/divide-region start end step)]
(with-open [r (sam/clone-bam-reader rdr)]
(-> (sam/read-blocks r {:chr chr, :start s, :end e} {:mode :region})
(f s e (- s start) pile)))))
pile))
(defn depth
"Calculate depth of coverage eagerly. Returns a seq of depth for range [start, end].
Requires a `cljam.io.bam.reader.BAMReader` instance and region.
If start and end are not supplied, piles whole range up.
Note that CIGAR code in alignments are ignored and only start/end positions are used."
[bam-reader {:keys [chr start end] :or {start 1 end Long/MAX_VALUE}}
& [{:keys [step unchecked? n-threads] :or {step default-step unchecked? false n-threads 1}}]]
{:pre [chr start end (pos? start) (pos? end) (<= start end)]}
(when-let [{:keys [len]} (refs/ref-by-name (sam/read-refs bam-reader) chr)]
(seq
(depth*
bam-reader
{:chr chr, :start (min len start), :end (min len end)}
{:step step, :unchecked? unchecked?, :n-threads n-threads}))))
|
d996799ffff852e739aa82198cbc172fbce91f893b6a2f7c598efc52a0e32dce | cojna/iota | FenwickTree.hs | {-# LANGUAGE BangPatterns #-}
module Data.FenwickTree where
import Control.Monad
import Control.Monad.Primitive
import Data.Bits
import Data.Coerce
import Data.Function
import Data.Monoid
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as UM
import Data.Vector.Unboxed.Instances ()
newtype FenwickTree s a = FenwickTree {getFenwickTree :: UM.MVector s a}
newFenwickTree ::
(U.Unbox a, Monoid a, PrimMonad m) =>
Int ->
m (FenwickTree (PrimState m) a)
newFenwickTree n = FenwickTree <$> UM.replicate (n + 1) mempty
# INLINE newFenwickTree #
-- | /O(n)/
buildFenwickTree ::
(U.Unbox a, Monoid a, PrimMonad m) =>
U.Vector a ->
m (FenwickTree (PrimState m) a)
buildFenwickTree vec = do
let n = U.length vec
ft <- UM.unsafeNew (n + 1)
UM.write ft 0 mempty
U.unsafeCopy (UM.tail ft) vec
flip fix 1 $ \loop !i -> when (i <= n) $ do
let j = i + (i .&. (- i))
when (j <= n) $ do
fti <- UM.unsafeRead ft i
UM.unsafeModify ft (<> fti) j
loop (i + 1)
return $ FenwickTree ft
# INLINE buildFenwickTree #
| mappend [ 0 .. k )
/O(log n)/
/O(log n)/
-}
mappendTo ::
(PrimMonad m, U.Unbox a, Monoid a) =>
FenwickTree (PrimState m) a ->
Int ->
m a
mappendTo (FenwickTree ft) = go mempty
where
go !acc !i
| i > 0 = do
xi <- UM.unsafeRead ft i
go (acc <> xi) (i - (i .&. (- i)))
| otherwise = return acc
# INLINE mappendTo #
| /O(log n)/
mappendAt ::
(U.Unbox a, Semigroup a, PrimMonad m) =>
FenwickTree (PrimState m) a ->
Int ->
a ->
m ()
mappendAt (FenwickTree ft) k v = flip fix (k + 1) $ \loop !i -> do
when (i < n) $ do
UM.unsafeModify ft (<> v) i
loop $ i + (i .&. (- i))
where
!n = UM.length ft
# INLINE mappendAt #
type SumFenwickTree s a = FenwickTree s (Sum a)
newSumFenwickTree ::
(Num a, U.Unbox a, PrimMonad m) =>
Int ->
m (SumFenwickTree (PrimState m) a)
newSumFenwickTree = newFenwickTree
# INLINE newSumFenwickTree #
-- | /O(n)/
buildSumFenwickTree ::
(Num a, U.Unbox a, PrimMonad m) =>
U.Vector a ->
m (SumFenwickTree (PrimState m) a)
buildSumFenwickTree = buildFenwickTree . U.map coerce
# INLINE buildSumFenwickTree #
| sum [ 0 .. k )
/O(log n)/
/O(log n)/
-}
sumTo ::
(Num a, U.Unbox a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
m a
sumTo ft k = coerce <$> mappendTo ft k
# INLINE sumTo #
| sum [ l .. r )
/O(log n)/
/O(log n)/
-}
sumFromTo ::
(Num a, U.Unbox a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
Int ->
m a
sumFromTo ft l r = (-) <$> sumTo ft r <*> sumTo ft l
# INLINE sumFromTo #
/O(log n)/
readSumFenwickTree ::
(Num a, U.Unbox a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
m a
readSumFenwickTree ft i = sumFromTo ft i (i + 1)
# INLINE readSumFenwickTree #
/O(log n)/
writeSumFenwickTree ::
(Num a, U.Unbox a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
a ->
m ()
writeSumFenwickTree ft i x = readSumFenwickTree ft i >>= addAt ft i . (x -)
# INLINE writeSumFenwickTree #
| /O(log n)/
addAt ::
(U.Unbox a, Num a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
a ->
m ()
addAt ft k x = mappendAt ft k (coerce x)
# INLINE addAt #
| max i [ 0 .. i ) < w
findMaxIndexLT k [ 1 , 1 .. 1 ] = = k - 1
> > > ones < - buildFenwickTree [ 1 , 1 , 1 , 1 , 1 ]
> > > findMaxIndexLT 3 ones
2
> > > findMaxIndexLT 0 ones
0
> > > ids < - buildFenwickTree [ 1 , 2 , 3 , 4 , 5 ]
> > > findMaxIndexLT 6 ids
2
> > > findMaxIndexLT 7 ids
3
> > > zeros < - buildFenwickTree [ 0 , 0 , 0 , 0 , 0 ]
> > > findMaxIndexLT 1 zeros
5
findMaxIndexLT k [1, 1..1] == k - 1
>>> ones <- buildFenwickTree [1, 1, 1, 1, 1]
>>> findMaxIndexLT 3 ones
2
>>> findMaxIndexLT 0 ones
0
>>> ids <- buildFenwickTree [1, 2, 3, 4, 5]
>>> findMaxIndexLT 6 ids
2
>>> findMaxIndexLT 7 ids
3
>>> zeros <- buildFenwickTree [0, 0, 0, 0, 0]
>>> findMaxIndexLT 1 zeros
5
-}
findMaxIndexLT ::
(U.Unbox a, Num a, Ord a, PrimMonad m) =>
FenwickTree (PrimState m) a ->
a ->
m Int
findMaxIndexLT (FenwickTree ft) w0
| w0 <= 0 = return 0
| otherwise = go w0 highestOneBit 0
where
n = UM.length ft
highestOneBit = until (> n) (* 2) 1 `quot` 2
go !w !step !i
| step == 0 = return i
| otherwise = do
if i + step < n
then do
u <- UM.unsafeRead ft (i + step)
if u < w
then go (w - u) (step `unsafeShiftR` 1) (i + step)
else go w (step `unsafeShiftR` 1) i
else go w (step `unsafeShiftR` 1) i
# INLINE findMaxIndexLT #
| null | https://raw.githubusercontent.com/cojna/iota/6d2ad5b71b1b50bca9136d6ed84f80a0b7713d7c/src/Data/FenwickTree.hs | haskell | # LANGUAGE BangPatterns #
| /O(n)/
| /O(n)/ |
module Data.FenwickTree where
import Control.Monad
import Control.Monad.Primitive
import Data.Bits
import Data.Coerce
import Data.Function
import Data.Monoid
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as UM
import Data.Vector.Unboxed.Instances ()
newtype FenwickTree s a = FenwickTree {getFenwickTree :: UM.MVector s a}
newFenwickTree ::
(U.Unbox a, Monoid a, PrimMonad m) =>
Int ->
m (FenwickTree (PrimState m) a)
newFenwickTree n = FenwickTree <$> UM.replicate (n + 1) mempty
# INLINE newFenwickTree #
buildFenwickTree ::
(U.Unbox a, Monoid a, PrimMonad m) =>
U.Vector a ->
m (FenwickTree (PrimState m) a)
buildFenwickTree vec = do
let n = U.length vec
ft <- UM.unsafeNew (n + 1)
UM.write ft 0 mempty
U.unsafeCopy (UM.tail ft) vec
flip fix 1 $ \loop !i -> when (i <= n) $ do
let j = i + (i .&. (- i))
when (j <= n) $ do
fti <- UM.unsafeRead ft i
UM.unsafeModify ft (<> fti) j
loop (i + 1)
return $ FenwickTree ft
# INLINE buildFenwickTree #
| mappend [ 0 .. k )
/O(log n)/
/O(log n)/
-}
mappendTo ::
(PrimMonad m, U.Unbox a, Monoid a) =>
FenwickTree (PrimState m) a ->
Int ->
m a
mappendTo (FenwickTree ft) = go mempty
where
go !acc !i
| i > 0 = do
xi <- UM.unsafeRead ft i
go (acc <> xi) (i - (i .&. (- i)))
| otherwise = return acc
# INLINE mappendTo #
| /O(log n)/
mappendAt ::
(U.Unbox a, Semigroup a, PrimMonad m) =>
FenwickTree (PrimState m) a ->
Int ->
a ->
m ()
mappendAt (FenwickTree ft) k v = flip fix (k + 1) $ \loop !i -> do
when (i < n) $ do
UM.unsafeModify ft (<> v) i
loop $ i + (i .&. (- i))
where
!n = UM.length ft
# INLINE mappendAt #
type SumFenwickTree s a = FenwickTree s (Sum a)
newSumFenwickTree ::
(Num a, U.Unbox a, PrimMonad m) =>
Int ->
m (SumFenwickTree (PrimState m) a)
newSumFenwickTree = newFenwickTree
# INLINE newSumFenwickTree #
buildSumFenwickTree ::
(Num a, U.Unbox a, PrimMonad m) =>
U.Vector a ->
m (SumFenwickTree (PrimState m) a)
buildSumFenwickTree = buildFenwickTree . U.map coerce
# INLINE buildSumFenwickTree #
| sum [ 0 .. k )
/O(log n)/
/O(log n)/
-}
sumTo ::
(Num a, U.Unbox a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
m a
sumTo ft k = coerce <$> mappendTo ft k
# INLINE sumTo #
| sum [ l .. r )
/O(log n)/
/O(log n)/
-}
sumFromTo ::
(Num a, U.Unbox a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
Int ->
m a
sumFromTo ft l r = (-) <$> sumTo ft r <*> sumTo ft l
# INLINE sumFromTo #
/O(log n)/
readSumFenwickTree ::
(Num a, U.Unbox a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
m a
readSumFenwickTree ft i = sumFromTo ft i (i + 1)
# INLINE readSumFenwickTree #
/O(log n)/
writeSumFenwickTree ::
(Num a, U.Unbox a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
a ->
m ()
writeSumFenwickTree ft i x = readSumFenwickTree ft i >>= addAt ft i . (x -)
# INLINE writeSumFenwickTree #
| /O(log n)/
addAt ::
(U.Unbox a, Num a, PrimMonad m) =>
SumFenwickTree (PrimState m) a ->
Int ->
a ->
m ()
addAt ft k x = mappendAt ft k (coerce x)
# INLINE addAt #
| max i [ 0 .. i ) < w
findMaxIndexLT k [ 1 , 1 .. 1 ] = = k - 1
> > > ones < - buildFenwickTree [ 1 , 1 , 1 , 1 , 1 ]
> > > findMaxIndexLT 3 ones
2
> > > findMaxIndexLT 0 ones
0
> > > ids < - buildFenwickTree [ 1 , 2 , 3 , 4 , 5 ]
> > > findMaxIndexLT 6 ids
2
> > > findMaxIndexLT 7 ids
3
> > > zeros < - buildFenwickTree [ 0 , 0 , 0 , 0 , 0 ]
> > > findMaxIndexLT 1 zeros
5
findMaxIndexLT k [1, 1..1] == k - 1
>>> ones <- buildFenwickTree [1, 1, 1, 1, 1]
>>> findMaxIndexLT 3 ones
2
>>> findMaxIndexLT 0 ones
0
>>> ids <- buildFenwickTree [1, 2, 3, 4, 5]
>>> findMaxIndexLT 6 ids
2
>>> findMaxIndexLT 7 ids
3
>>> zeros <- buildFenwickTree [0, 0, 0, 0, 0]
>>> findMaxIndexLT 1 zeros
5
-}
findMaxIndexLT ::
(U.Unbox a, Num a, Ord a, PrimMonad m) =>
FenwickTree (PrimState m) a ->
a ->
m Int
findMaxIndexLT (FenwickTree ft) w0
| w0 <= 0 = return 0
| otherwise = go w0 highestOneBit 0
where
n = UM.length ft
highestOneBit = until (> n) (* 2) 1 `quot` 2
go !w !step !i
| step == 0 = return i
| otherwise = do
if i + step < n
then do
u <- UM.unsafeRead ft (i + step)
if u < w
then go (w - u) (step `unsafeShiftR` 1) (i + step)
else go w (step `unsafeShiftR` 1) i
else go w (step `unsafeShiftR` 1) i
# INLINE findMaxIndexLT #
|
fb2cf8a7757a90d6cfb6a40ff206a41e0fc8c85f16de36095345510dbc488057 | conquerant-project/conquerant | core.clj | (ns conquerant.core
(:refer-clojure :exclude [await promise])
(:require [clojure.walk :refer [prewalk-replace]]
[conquerant.internals :as ci]))
(defn- async-fn [fn]
(for [[argv & body] (rest fn)]
(list argv (cons `ci/ado body))))
(defmacro async
"If `expr` is a `fn` or `defn` form, its body will
run asyncronously. Otherwise, `expr` will itself
run asyncronously, and return a `CompletableFuture`.
All async exectution occurs on the `ci/*executor*` pool,
which is bound to the common ForkJoinPool by default."
[expr]
(if (and (coll? expr) (seq expr))
(let [expr (->> expr
(prewalk-replace {'let* `ci/alet
'let `ci/alet
`let `ci/alet})
macroexpand)
type (first expr)]
(cond
(or (= 'fn* type)
(= `fn type))
`(fn ~@(async-fn expr))
(= `def type)
`(defn ~(second expr)
~@(async-fn (last expr)))
(= `do type)
`(ci/ado ~@(rest expr))
:else
`(ci/ado ~expr)))
`(ci/ado ~expr)))
(defn await
"Use inside `async` `let` bindings.
The `let` block will return a `CompletableFuture`.
(async
(let [x (async :x)
y (await x)]
y))
Will wait for the `CompletableFuture` to complete
before evaluation resumes."
([promise]
(await promise nil nil))
([promise timeout-ms timeout-val]
(throw (Exception. "await used outside async let bindings!"))))
(defmacro promise
"Used to get values out of callbacks.
ex:
;; some fn that takes a callback
(defn fetch [url callback] ...)
;; can be used as
(def p
(promise [resolve]
(fetch \"\" #(resolve %))))
;; can also be completed from outside
(complete (promise) :done)"
{:style/indent 1}
([]
`(promise [_#]))
([[resolve] & body]
`(ci/promise* (fn [~resolve _#]
~@body))))
(defn promise?
"Returns `true` if obj is a `CompletableFuture`."
[obj]
(ci/promise? obj))
(defn complete
"Completes the `CompletableFuture`.
It will contain x."
[promise x]
(ci/complete promise x))
(defmacro with-async-executor
"`async` blocks in body will run on
the given `ExecutorService`'s threadpool."
[executor & body]
`(let [executor# ~executor]
(binding [ci/*executor* executor#]
~@body)))
| null | https://raw.githubusercontent.com/conquerant-project/conquerant/264b5054dbae4f7645ef6501c75a8209bb57e9d6/src/conquerant/core.clj | clojure | some fn that takes a callback
can be used as
can also be completed from outside | (ns conquerant.core
(:refer-clojure :exclude [await promise])
(:require [clojure.walk :refer [prewalk-replace]]
[conquerant.internals :as ci]))
(defn- async-fn [fn]
(for [[argv & body] (rest fn)]
(list argv (cons `ci/ado body))))
(defmacro async
"If `expr` is a `fn` or `defn` form, its body will
run asyncronously. Otherwise, `expr` will itself
run asyncronously, and return a `CompletableFuture`.
All async exectution occurs on the `ci/*executor*` pool,
which is bound to the common ForkJoinPool by default."
[expr]
(if (and (coll? expr) (seq expr))
(let [expr (->> expr
(prewalk-replace {'let* `ci/alet
'let `ci/alet
`let `ci/alet})
macroexpand)
type (first expr)]
(cond
(or (= 'fn* type)
(= `fn type))
`(fn ~@(async-fn expr))
(= `def type)
`(defn ~(second expr)
~@(async-fn (last expr)))
(= `do type)
`(ci/ado ~@(rest expr))
:else
`(ci/ado ~expr)))
`(ci/ado ~expr)))
(defn await
"Use inside `async` `let` bindings.
The `let` block will return a `CompletableFuture`.
(async
(let [x (async :x)
y (await x)]
y))
Will wait for the `CompletableFuture` to complete
before evaluation resumes."
([promise]
(await promise nil nil))
([promise timeout-ms timeout-val]
(throw (Exception. "await used outside async let bindings!"))))
(defmacro promise
"Used to get values out of callbacks.
ex:
(defn fetch [url callback] ...)
(def p
(promise [resolve]
(fetch \"\" #(resolve %))))
(complete (promise) :done)"
{:style/indent 1}
([]
`(promise [_#]))
([[resolve] & body]
`(ci/promise* (fn [~resolve _#]
~@body))))
(defn promise?
"Returns `true` if obj is a `CompletableFuture`."
[obj]
(ci/promise? obj))
(defn complete
"Completes the `CompletableFuture`.
It will contain x."
[promise x]
(ci/complete promise x))
(defmacro with-async-executor
"`async` blocks in body will run on
the given `ExecutorService`'s threadpool."
[executor & body]
`(let [executor# ~executor]
(binding [ci/*executor* executor#]
~@body)))
|
05ff7b2deff0cb2db7ebc6752d84041cc9538c9a4164975127e57f2a516a85ef | ddssff/refact-global-hse | D.hs | module D(MoveType(Down, Up)
) where
| Declaration moves can be characterized as one of two types , Down
-- or Up. This must be computed by scanning the parsed code of the
-- departure module (the module where the declaration is when we
-- begin) for any remaining uses of the declaration's symbols. Note
-- that it is possible to specify a move that results in a legitimate
-- import loop. The only solution to this is to bring more
-- declarations over, or some manual intervention.
data MoveType
= Down
-- ^ A Down move moves a declaration away from where it is used,
-- which means we probably need to add imports of the symbols of
-- the declaration to the departure module.
| Up
-- ^ An Up move moves a declaration towards where it is used. In
-- this case leaving behind an import will probably create an
-- import cycle. Therefore we need to convert the (remaining)
-- exports of the departure module into imports and add them to
-- the arrival module.
| null | https://raw.githubusercontent.com/ddssff/refact-global-hse/519a017009cae8aa1a3db1b46eb560d76bd9895d/tests/expected/simple2/D.hs | haskell | or Up. This must be computed by scanning the parsed code of the
departure module (the module where the declaration is when we
begin) for any remaining uses of the declaration's symbols. Note
that it is possible to specify a move that results in a legitimate
import loop. The only solution to this is to bring more
declarations over, or some manual intervention.
^ A Down move moves a declaration away from where it is used,
which means we probably need to add imports of the symbols of
the declaration to the departure module.
^ An Up move moves a declaration towards where it is used. In
this case leaving behind an import will probably create an
import cycle. Therefore we need to convert the (remaining)
exports of the departure module into imports and add them to
the arrival module. | module D(MoveType(Down, Up)
) where
| Declaration moves can be characterized as one of two types , Down
data MoveType
= Down
| Up
|
ac72ef5e0e040743cdeead507052260bbc9e52335ac7997a6a38764f03f8d71c | Liqwid-Labs/liqwid-plutarch-extra | State.hs | module Plutarch.Extra.State (
PState,
pstate,
prunState,
pevalState,
pexecState,
pget,
pput,
pmodify,
) where
import Plutarch.Extra.Applicative (PApplicative (ppure), PApply (pliftA2))
import Plutarch.Extra.Bind (PBind ((#>>=)))
import Plutarch.Extra.Functor (PFunctor (PSubcategory, pfmap), Plut)
import Plutarch.Extra.TermCont (pmatchC)
-- | @since 1.0.0
newtype PState (s :: S -> Type) (a :: S -> Type) (s' :: S)
> PPair s a ) )
deriving stock
| @since 1.4.0
Generic
)
deriving anyclass
( -- | @since 1.0.0
PlutusType
)
| @since 1.4.0
instance DerivePlutusType (PState s a) where
type DPTStrat _ = PlutusTypeNewtype
| @since 3.1.0
instance PFunctor (PState s) where
type PSubcategory (PState s) = Plut
pfmap = phoistAcyclic $
plam $ \f state -> unTermCont $ do
PState g <- pmatchC state
pure . pcon . PState $ plam $ \s -> pfmap # f # (g # s)
-- | @since 1.0.0
instance PApply (PState s) where
pliftA2 = phoistAcyclic $
plam $ \f xs ys -> unTermCont $ do
PState g <- pmatchC xs
PState h <- pmatchC ys
pure . pcon . PState $
plam $ \s -> unTermCont $ do
PPair s' x <- pmatchC (g # s)
PPair s'' y <- pmatchC (h # s')
pure . pcon . PPair s'' $ f # x # y
-- | @since 1.0.0
instance PApplicative (PState s) where
ppure =
phoistAcyclic $ plam $ \x -> pcon . PState $ plam $ \s -> pcon . PPair s $ x
| @since 3.0.1
instance PBind (PState s) where
# INLINEABLE ( # > > =) #
xs #>>= f = pmatch xs $ \case
PState g -> pcon . PState . plam $ \s -> pmatch (g # s) $ \case
PPair s' res -> pmatch (f # res) $ \case
PState h -> h # s'
| Lift a Plutarch lambda into ' PState ' .
@since 1.0.0
@since 1.0.0
-}
pstate ::
forall (s :: S -> Type) (a :: S -> Type) (s' :: S).
> PPair s a ) : -- > PState s a )
pstate = phoistAcyclic $ plam $ pcon . PState
-- | @since 1.0.0
prunState ::
forall (s :: S -> Type) (a :: S -> Type) (s' :: S).
> s : -- > PPair s a )
prunState = phoistAcyclic $
plam $ \comp state -> unTermCont $ do
PState f <- pmatchC comp
pure $ f # state
-- | @since 1.0.0
pevalState ::
forall (s :: S -> Type) (a :: S -> Type) (s' :: S).
Term s' (PState s a :--> s :--> a)
pevalState = phoistAcyclic $
plam $ \comp state -> unTermCont $ do
PPair _ x <- pmatchC (prunState # comp # state)
pure x
-- | @since 1.0.0
pexecState ::
forall (s :: S -> Type) (a :: S -> Type) (s' :: S).
Term s' (PState s a :--> s :--> s)
pexecState = phoistAcyclic $
plam $ \comp state -> unTermCont $ do
PPair state' _ <- pmatchC (prunState # comp # state)
pure state'
-- | @since 1.0.0
pget ::
forall (s :: S -> Type) (s' :: S).
Term s' (PState s s)
pget = pcon . PState $ plam $ \s -> pcon . PPair s $ s
-- | @since 1.0.0
pput ::
forall (s :: S -> Type) (s' :: S).
Term s' (s :--> PState s PUnit)
pput = phoistAcyclic $ plam $ \x -> pcon . PState $ plam $ \_ -> pcon . PPair x . pcon $ PUnit
-- | @since 1.0.0
pmodify ::
forall (s :: S -> Type) (s' :: S).
Term s' ((s :--> s) :--> PState s PUnit)
pmodify = phoistAcyclic $ plam $ \f -> pcon . PState $ plam $ \s -> pcon . PPair (f # s) . pcon $ PUnit
| null | https://raw.githubusercontent.com/Liqwid-Labs/liqwid-plutarch-extra/e354b559c358c1500854ad3c9f14133b258b7531/src/Plutarch/Extra/State.hs | haskell | | @since 1.0.0
| @since 1.0.0
| @since 1.0.0
| @since 1.0.0
> PState s a )
| @since 1.0.0
> PPair s a )
| @since 1.0.0
> s :--> a)
| @since 1.0.0
> s :--> s)
| @since 1.0.0
| @since 1.0.0
> PState s PUnit)
| @since 1.0.0
> s) :--> PState s PUnit) | module Plutarch.Extra.State (
PState,
pstate,
prunState,
pevalState,
pexecState,
pget,
pput,
pmodify,
) where
import Plutarch.Extra.Applicative (PApplicative (ppure), PApply (pliftA2))
import Plutarch.Extra.Bind (PBind ((#>>=)))
import Plutarch.Extra.Functor (PFunctor (PSubcategory, pfmap), Plut)
import Plutarch.Extra.TermCont (pmatchC)
newtype PState (s :: S -> Type) (a :: S -> Type) (s' :: S)
> PPair s a ) )
deriving stock
| @since 1.4.0
Generic
)
deriving anyclass
PlutusType
)
| @since 1.4.0
instance DerivePlutusType (PState s a) where
type DPTStrat _ = PlutusTypeNewtype
| @since 3.1.0
instance PFunctor (PState s) where
type PSubcategory (PState s) = Plut
pfmap = phoistAcyclic $
plam $ \f state -> unTermCont $ do
PState g <- pmatchC state
pure . pcon . PState $ plam $ \s -> pfmap # f # (g # s)
instance PApply (PState s) where
pliftA2 = phoistAcyclic $
plam $ \f xs ys -> unTermCont $ do
PState g <- pmatchC xs
PState h <- pmatchC ys
pure . pcon . PState $
plam $ \s -> unTermCont $ do
PPair s' x <- pmatchC (g # s)
PPair s'' y <- pmatchC (h # s')
pure . pcon . PPair s'' $ f # x # y
instance PApplicative (PState s) where
ppure =
phoistAcyclic $ plam $ \x -> pcon . PState $ plam $ \s -> pcon . PPair s $ x
| @since 3.0.1
instance PBind (PState s) where
# INLINEABLE ( # > > =) #
xs #>>= f = pmatch xs $ \case
PState g -> pcon . PState . plam $ \s -> pmatch (g # s) $ \case
PPair s' res -> pmatch (f # res) $ \case
PState h -> h # s'
| Lift a Plutarch lambda into ' PState ' .
@since 1.0.0
@since 1.0.0
-}
pstate ::
forall (s :: S -> Type) (a :: S -> Type) (s' :: S).
pstate = phoistAcyclic $ plam $ pcon . PState
prunState ::
forall (s :: S -> Type) (a :: S -> Type) (s' :: S).
prunState = phoistAcyclic $
plam $ \comp state -> unTermCont $ do
PState f <- pmatchC comp
pure $ f # state
pevalState ::
forall (s :: S -> Type) (a :: S -> Type) (s' :: S).
pevalState = phoistAcyclic $
plam $ \comp state -> unTermCont $ do
PPair _ x <- pmatchC (prunState # comp # state)
pure x
pexecState ::
forall (s :: S -> Type) (a :: S -> Type) (s' :: S).
pexecState = phoistAcyclic $
plam $ \comp state -> unTermCont $ do
PPair state' _ <- pmatchC (prunState # comp # state)
pure state'
pget ::
forall (s :: S -> Type) (s' :: S).
Term s' (PState s s)
pget = pcon . PState $ plam $ \s -> pcon . PPair s $ s
pput ::
forall (s :: S -> Type) (s' :: S).
pput = phoistAcyclic $ plam $ \x -> pcon . PState $ plam $ \_ -> pcon . PPair x . pcon $ PUnit
pmodify ::
forall (s :: S -> Type) (s' :: S).
pmodify = phoistAcyclic $ plam $ \f -> pcon . PState $ plam $ \s -> pcon . PPair (f # s) . pcon $ PUnit
|
4e14c70cda02293de4ba2c939ec248034414fafa2a65617d3c82edf718770e0d | chansey97/clprosette-miniKanren | music.rkt | #lang racket
(require "../mk.rkt")
(require "../rosette-bridge.rkt")
(require "../test-check.rkt")
(printf "music.rkt\n")
(define perfect-consonant '(0 5 7))
(define consonant '(0 3 4 5 7 8 9))
(define imperfect-consonant '(3 4 8 9))
(current-solver
(z3
#:path "C:/env/z3/z3-4.8.7/z3-4.8.7-x64-win/bin/z3.exe"
#:options (hash ':smt.random_seed 1
' : smt.random_seed 2
' : smt.random_seed 3
' : smt.arith.solver 1
' : smt.arith.solver 2 ; default:2 in z3 - 4.8.7
':smt.arith.solver 6 ; default:6 in z3-4.8.12
)))
(define harmony
'((1 (3 6 2 4 5))
(2 (5 7))
(3 (6))
(4 (5 7))
(5 (1))
(6 (2 4))
(7 (1))))
(define (interval-ino ds note harmony)
(fresh (d dr)
(== (cons d dr) ds)
(rosette-typeo note r/@integer?)
(rosette-typeo harmony r/@integer?)
(conde
((rosette-asserto `(,r/@= (,r/@- ,note ,harmony) ,d)))
((rosette-asserto `(,r/@! (,r/@= (,r/@- ,note ,harmony) ,d)))
(interval-ino dr note harmony)))))
(define (ino xs x)
(fresh (y ys)
(== (cons y ys) xs)
(rosette-typeo x r/@integer?)
(rosette-typeo y r/@integer?)
(conde
((rosette-asserto `(,r/@= ,x ,y)))
((rosette-asserto `(,r/@! (,r/@= ,x ,y)))
(ino ys x)))))
(define (nexto harmony prev-harmony cur-harmony)
(fresh (p hs cs)
(== (cons `(,p ,cs) hs) harmony)
(rosette-typeo p r/@integer?)
(rosette-typeo prev-harmony r/@integer?)
(conde
((rosette-asserto `(,r/@= ,p ,prev-harmony))
(ino cs cur-harmony))
((rosette-asserto `(,r/@! (,r/@= ,p ,prev-harmony)))
(nexto hs prev-harmony cur-harmony)))))
(define (zico measure phrase position prev-note cur-note prev-harmony cur-harmony)
(fresh ()
(nexto harmony prev-harmony cur-harmony)
(rosette-typeo position r/@integer?)
(rosette-typeo measure r/@integer?)
(conde
((rosette-asserto `(,r/@= 0 (,r/@modulo ,position ,measure)))
(== cur-harmony 1)
(interval-ino perfect-consonant cur-note cur-harmony))
((rosette-asserto `(,r/@! (,r/@= 0 (,r/@modulo ,position ,measure))))
(interval-ino imperfect-consonant cur-note cur-harmony)))))
(define (musico measure phrase position prev-note prev-harmony m)
(fresh ()
(rosette-typeo position r/@integer?)
The following two variables seems always ground ?
So use ` ( * measure phrase ) ` directly .
(rosette-typeo measure r/@integer?)
(rosette-typeo phrase r/@integer?)
(conde
((rosette-asserto `(,r/@= ,position ,(* measure phrase)))
(== m '()))
((rosette-asserto `(,r/@< ,position ,(* measure phrase)))
(fresh (position+1 cur-note cur-harmony rest-m)
(== m (cons (list cur-note cur-harmony) rest-m))
(rosette-typeo position+1 r/@integer?)
(rosette-asserto `(,r/@= ,position+1 (,r/@+ 1 ,position)))
(zico measure phrase position prev-note cur-note prev-harmony cur-harmony)
(musico measure phrase position+1 cur-note cur-harmony rest-m))))))
(test "1"
(run 1 (m)
(musico 1 1 0 5 5 m))
'(((1 1))))
(test "5" ;; slow
(run 1 (m)
(musico 5 1 0 5 5 m))
'(((1 1) (6 3) (9 6) (5 2) (8 5))))
(test "4-2" ;; very slow
(run 1 (m)
(musico 4 2 0 5 5 m))
'(((1 1) (9 6) (5 2) (8 5) (1 1) (6 3) (9 6) (5 2))))
| null | https://raw.githubusercontent.com/chansey97/clprosette-miniKanren/d322f688312fa9481b22c2729018d383f493cb82/clprosette-miniKanren/tests/music.rkt | racket | default:2 in z3 - 4.8.7
default:6 in z3-4.8.12
slow
very slow | #lang racket
(require "../mk.rkt")
(require "../rosette-bridge.rkt")
(require "../test-check.rkt")
(printf "music.rkt\n")
(define perfect-consonant '(0 5 7))
(define consonant '(0 3 4 5 7 8 9))
(define imperfect-consonant '(3 4 8 9))
(current-solver
(z3
#:path "C:/env/z3/z3-4.8.7/z3-4.8.7-x64-win/bin/z3.exe"
#:options (hash ':smt.random_seed 1
' : smt.random_seed 2
' : smt.random_seed 3
' : smt.arith.solver 1
)))
(define harmony
'((1 (3 6 2 4 5))
(2 (5 7))
(3 (6))
(4 (5 7))
(5 (1))
(6 (2 4))
(7 (1))))
(define (interval-ino ds note harmony)
(fresh (d dr)
(== (cons d dr) ds)
(rosette-typeo note r/@integer?)
(rosette-typeo harmony r/@integer?)
(conde
((rosette-asserto `(,r/@= (,r/@- ,note ,harmony) ,d)))
((rosette-asserto `(,r/@! (,r/@= (,r/@- ,note ,harmony) ,d)))
(interval-ino dr note harmony)))))
(define (ino xs x)
(fresh (y ys)
(== (cons y ys) xs)
(rosette-typeo x r/@integer?)
(rosette-typeo y r/@integer?)
(conde
((rosette-asserto `(,r/@= ,x ,y)))
((rosette-asserto `(,r/@! (,r/@= ,x ,y)))
(ino ys x)))))
(define (nexto harmony prev-harmony cur-harmony)
(fresh (p hs cs)
(== (cons `(,p ,cs) hs) harmony)
(rosette-typeo p r/@integer?)
(rosette-typeo prev-harmony r/@integer?)
(conde
((rosette-asserto `(,r/@= ,p ,prev-harmony))
(ino cs cur-harmony))
((rosette-asserto `(,r/@! (,r/@= ,p ,prev-harmony)))
(nexto hs prev-harmony cur-harmony)))))
(define (zico measure phrase position prev-note cur-note prev-harmony cur-harmony)
(fresh ()
(nexto harmony prev-harmony cur-harmony)
(rosette-typeo position r/@integer?)
(rosette-typeo measure r/@integer?)
(conde
((rosette-asserto `(,r/@= 0 (,r/@modulo ,position ,measure)))
(== cur-harmony 1)
(interval-ino perfect-consonant cur-note cur-harmony))
((rosette-asserto `(,r/@! (,r/@= 0 (,r/@modulo ,position ,measure))))
(interval-ino imperfect-consonant cur-note cur-harmony)))))
(define (musico measure phrase position prev-note prev-harmony m)
(fresh ()
(rosette-typeo position r/@integer?)
The following two variables seems always ground ?
So use ` ( * measure phrase ) ` directly .
(rosette-typeo measure r/@integer?)
(rosette-typeo phrase r/@integer?)
(conde
((rosette-asserto `(,r/@= ,position ,(* measure phrase)))
(== m '()))
((rosette-asserto `(,r/@< ,position ,(* measure phrase)))
(fresh (position+1 cur-note cur-harmony rest-m)
(== m (cons (list cur-note cur-harmony) rest-m))
(rosette-typeo position+1 r/@integer?)
(rosette-asserto `(,r/@= ,position+1 (,r/@+ 1 ,position)))
(zico measure phrase position prev-note cur-note prev-harmony cur-harmony)
(musico measure phrase position+1 cur-note cur-harmony rest-m))))))
(test "1"
(run 1 (m)
(musico 1 1 0 5 5 m))
'(((1 1))))
(run 1 (m)
(musico 5 1 0 5 5 m))
'(((1 1) (6 3) (9 6) (5 2) (8 5))))
(run 1 (m)
(musico 4 2 0 5 5 m))
'(((1 1) (9 6) (5 2) (8 5) (1 1) (6 3) (9 6) (5 2))))
|
9879c3a19eaaaa37104d525e2f1a3812c7f0c8b8e3a8c65905e0d50b389cdcc7 | GaloisInc/daedalus | Debug.hs | {-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
module Daedalus.LSP.Command.Debug (debugPass, passNames) where
import qualified Data.Text as Text
import qualified Data.Aeson as A
import qualified Language.LSP.Types as J
import Control.Monad.IO.Class (liftIO)
import Data.Foldable (find)
import Data.Text (Text)
import Daedalus.Driver
import Daedalus.LSP.Monad
import Daedalus.LSP.Position (declAtPos)
import Daedalus.PP (Doc, pp)
import Daedalus.Type.AST
data DDLPass = DDLPass
{ passName :: Text
, passRun :: TCModule SourceRange -> Ident -> ModuleName -> Daedalus Doc
}
-- We could be clever and order these to reduce duplication, but this
-- is simpler
passes :: [DDLPass]
passes = [ DDLPass { passName = "tc"
, passRun = \md _ _ -> pure (pp md)
}
, DDLPass { passName = "core"
, passRun = \m entryName specMod -> do
passSpecialize specMod [(tcModuleName m, entryName)]
passCore specMod
pp <$> ddlGetAST specMod astCore
}
]
passNames :: [Text]
passNames = map passName passes
debugPass :: J.Position -> TCModule SourceRange -> Text -> ServerM (Maybe A.Value)
debugPass pos m passN = do
let m_d = declAtPos pos m
let entryName = maybe "Main" (nameToIdent . nameScopedIdent . tcDeclName) m_d
specMod = "DaedalusMain"
case find (\pass -> passName pass == passN) passes of
Nothing -> pure Nothing
Just pass -> do
e_r <- liftDaedalus (passRun pass m entryName specMod)
msg <- case e_r of
Left err -> liftIO $ prettyDaedalusError err
Right r -> pure (show r)
pure (Just $ A.String (Text.pack msg))
where
-- FIXME: move
nameToIdent x = case x of
Unknown n -> n
Local n -> n
ModScope _m n -> n
-- runIt ms d = do
-- (_, res) <- interpFile Nothing ms (nameScopedIdent (tcDeclName d))
-- -- For now we just return the pretty-printed value (we could also return the json)
-- let msg = case res of
-- RTS.NoResults err -> show (RTS.ppParseError err)
RTS.Results as - > showPP ( NE.head as ) -- FIXME
pure ( Just $ A.String ( Text.pack msg ) )
| null | https://raw.githubusercontent.com/GaloisInc/daedalus/4a7db71fd2cbe64d9c073c3cf175ffbc353a65cc/daedalus-language-server/src/Daedalus/LSP/Command/Debug.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE OverloadedStrings #
We could be clever and order these to reduce duplication, but this
is simpler
FIXME: move
runIt ms d = do
(_, res) <- interpFile Nothing ms (nameScopedIdent (tcDeclName d))
-- For now we just return the pretty-printed value (we could also return the json)
let msg = case res of
RTS.NoResults err -> show (RTS.ppParseError err)
FIXME |
module Daedalus.LSP.Command.Debug (debugPass, passNames) where
import qualified Data.Text as Text
import qualified Data.Aeson as A
import qualified Language.LSP.Types as J
import Control.Monad.IO.Class (liftIO)
import Data.Foldable (find)
import Data.Text (Text)
import Daedalus.Driver
import Daedalus.LSP.Monad
import Daedalus.LSP.Position (declAtPos)
import Daedalus.PP (Doc, pp)
import Daedalus.Type.AST
data DDLPass = DDLPass
{ passName :: Text
, passRun :: TCModule SourceRange -> Ident -> ModuleName -> Daedalus Doc
}
passes :: [DDLPass]
passes = [ DDLPass { passName = "tc"
, passRun = \md _ _ -> pure (pp md)
}
, DDLPass { passName = "core"
, passRun = \m entryName specMod -> do
passSpecialize specMod [(tcModuleName m, entryName)]
passCore specMod
pp <$> ddlGetAST specMod astCore
}
]
passNames :: [Text]
passNames = map passName passes
debugPass :: J.Position -> TCModule SourceRange -> Text -> ServerM (Maybe A.Value)
debugPass pos m passN = do
let m_d = declAtPos pos m
let entryName = maybe "Main" (nameToIdent . nameScopedIdent . tcDeclName) m_d
specMod = "DaedalusMain"
case find (\pass -> passName pass == passN) passes of
Nothing -> pure Nothing
Just pass -> do
e_r <- liftDaedalus (passRun pass m entryName specMod)
msg <- case e_r of
Left err -> liftIO $ prettyDaedalusError err
Right r -> pure (show r)
pure (Just $ A.String (Text.pack msg))
where
nameToIdent x = case x of
Unknown n -> n
Local n -> n
ModScope _m n -> n
pure ( Just $ A.String ( Text.pack msg ) )
|
0ff55d2c8e6a6a9f62ba4351413ccc3bd75885cf05fd95afe11dc7b6ca9f7d16 | VincentCordobes/prep | cli.ml | open Base
let rec add ?(last_reviewed_at = Unix.time ()) ?(retry = false) content =
let content =
match content with
| Some s -> if retry then Editor.edit (s ^ Editor.default_template) else s
| None -> Editor.edit Editor.default_template
in
let store = Store.load () in
let card_content = Card.Plain content in
let id = Card.Id.generate (Card.title_of_content card_content) in
let exists = Store.exists id store in
if exists then (
Fmt.pr "This name already exists. Press any key to continue...@.";
Caml.(input_char Caml.stdin) |> ignore;
add (Some content) ~retry:true)
else
match
Card.create ~deck:store.current_deck id card_content last_reviewed_at
with
| Ok card ->
let updated_store = Store.add card store in
Store.save updated_store;
Fmt.pr "Card added (id: %s)\n" (Card.Id.to_short card.id)
| Error msg -> failwith msg
let add_file ?(name = None) ?(last_reviewed_at = Unix.time ()) file =
let open Caml in
let store = Store.load () in
let path =
if Filename.is_relative file then
Filename.concat (Sys.getcwd ()) file
else
file
in
let card_content = Card.File (name, path) in
let id = Card.Id.generate (Card.title_of_content card_content) in
let exists = Store.exists id store in
if exists then
Fmt.pr "This card already exists@."
else
match
Card.create id ~deck:store.current_deck card_content last_reviewed_at
with
| Ok card ->
store |> Store.add card |> Store.save;
Fmt.pr "Card added (id: %s)\n" card.id
| Error msg -> failwith msg
let add_box interval =
let store = Store.load () in
let box_exists =
List.exists
~f:(fun box -> Interval.compare box.interval interval = 0)
(Store.get_boxes store)
in
if box_exists then
Console.(print_error "A box with interval %a already exists" green_s)
@@ Interval.to_string interval
else begin
Store.save (Store.add_box (Box.create interval) store);
Fmt.pr "Box added (repetitions every %a)" Console.green_s
(Interval.to_string interval)
end
let date_of_datetime dt =
let open ISO8601.Permissive in
date (string_of_date dt)
let next_review (interval : Interval.t) (card : Card.t) =
Float.(
let interval =
let day_to_second n = of_int n * 24.0 * 60.0 * 60.0 in
match interval with
| Day n -> day_to_second n
| Week n -> day_to_second n * 7.0
in
date_of_datetime card.last_reviewed_at + interval)
let print_cards_to_review ?(all = false) now store cards =
let open Card in
let grouped_cards =
cards
|> List.map ~f:(fun card ->
let box = Store.get_box card store in
let date = next_review box.interval card in
(date, card))
|> List.sort ~compare:(fun (x, _) (y, _) -> Float.(x - y |> to_int))
|> List.group ~break:(fun (x, _) (y, _) -> Float.(x <> y))
|> List.map ~f:(fun group ->
let cards = List.map group ~f:(fun (_, card) -> card) in
let date, _ = List.hd_exn group in
(date, cards))
in
let is_today date =
let date = Unix.localtime date in
let now = Unix.localtime now in
date.tm_year = now.tm_year && date.tm_yday = now.tm_yday
in
let cards_to_review, futur_cards_to_review =
let ( <= ) x y =
let y = Unix.localtime y in
let x = Unix.localtime x in
if x.tm_year = y.tm_year then
x.tm_yday <= y.tm_yday
else
x.tm_year < y.tm_year
in
List.partition_tf grouped_cards ~f:(fun (date, _cards) -> date <= now)
in
let print_space_when_not_last i items ppf =
if i < List.length items - 1 then
Fmt.pf ppf "@ "
in
let pp_box ppf box = Fmt.pf ppf "#%d" (box + 1) in
let pp_title ppf card =
let words = String.split_on_chars ~on:[ ' ' ] (title card) in
List.iteri words ~f:(fun i word ->
Fmt.pf ppf "%s" word;
print_space_when_not_last i words ppf)
in
let pp_id ppf card = Fmt.(pf ppf "(%s)" (Card.Id.to_short card.id)) in
let pp_cards ppf cards =
match cards with
| [] -> Fmt.pf ppf "%a" Fmt.(styled `Yellow string) "--"
| _ ->
let pp_card i card =
Fmt.(
pf ppf "%a @[%a@ %a@]"
(styled `Green pp_box)
card.box pp_title card
(styled `Faint pp_id)
card);
print_space_when_not_last i cards ppf
in
List.iteri cards ~f:pp_card
in
let pp_group ppf (date, cards) =
let color = if is_today date then `Yellow else `Faint in
Fmt.pf ppf "%a @[<v>%a@]"
(Fmt.styled color ISO8601.Permissive.pp_date)
date pp_cards cards
in
let cards_to_review =
let today_empty =
not (List.exists cards_to_review ~f:(fun (date, _) -> is_today date))
in
if today_empty then
cards_to_review @ [ (now, []) ]
else
cards_to_review
in
let cards_to_print =
match futur_cards_to_review with
| [] -> cards_to_review
| x :: tail -> cards_to_review @ [ x ] @ if all then tail else []
in
let pp_groups ppf cards_to_print =
if List.length cards > 0 then
cards_to_print
|> List.iteri ~f:(fun i card_group ->
Fmt.pf ppf "@[<h>%a@]" pp_group card_group;
print_space_when_not_last i cards_to_print ppf)
else
Fmt.pf ppf "No card."
in
Fmt.pr "@[<v>%a@]@." pp_groups cards_to_print
let pp_cards ?interval ppf cards =
let open Card in
let open ISO8601.Permissive in
let open Fmt in
let pp_content ppf card =
match interval with
| None -> pf ppf "Box #%d" (card.box + 1)
| Some interval -> pf ppf "%a" pp_date (next_review interval card)
in
let _pp_last_reviewed ppf card = pf ppf "%a" pp_content card in
if List.length cards > 0 then
cards
|> List.sort ~compare:(fun a b -> if a.archived then 1 else a.box - b.box)
|> List.iter ~f:(fun card ->
if card.archived then
pf ppf " %a %s@." (styled `Red string) "[archived]" (title card)
else
pf ppf " %a %s@."
(styled `Faint string)
(Card.Id.to_short card.id) (title card))
else
Fmt.pf ppf " No card.\n"
let list_boxes () =
let store = Store.load () in
let print_box box_id box =
let interval = Box.(box.interval) in
let cards =
List.filter
~f:(fun card ->
card.box = box_id && String.(card.deck = store.current_deck))
store.cards
in
let pp_box_id ppf box_id =
Fmt.pf ppf "%a"
Fmt.(styled `Green string)
("#" ^ Int.to_string (box_id + 1))
in
let pp_heading ppf box_id =
let line_before = if box_id = 0 then "" else "\n" in
Fmt.pf ppf "%s%a Every %s\n" line_before pp_box_id box_id
(Interval.to_string interval)
in
Fmt.pr "%a" pp_heading box_id;
Fmt.pr "%a" (pp_cards ~interval) cards
in
List.iteri ~f:print_box (Store.get_boxes store)
let list_decks () =
let store = Store.load () in
match store.decks with
| [] -> Fmt.pr "No decks@."
| _ ->
let print_deck deck =
if String.(store.current_deck = deck) then
Fmt.pr "* %s\n" deck
else
Fmt.pr " %s\n" deck
in
List.iter store.decks ~f:(fun deck -> print_deck deck.id)
let add_deck name =
Store.load ()
|> Store.add_deck (Deck.create ~id:name ~decks:[] ())
|> Store.save
let rec use_deck ~input_char name =
let store = Store.load () in
match List.find ~f:(fun deck -> String.(deck.id = name)) store.decks with
| Some _ ->
Store.save { store with current_deck = name };
Fmt.pr "Using deck %s@." name
| None -> (
Fmt.pr "Deck %s doesn't exist. Do you want to create it? [y/N] %!" name;
match input_char () with
| Some c when Char.(c = 'y' || c = 'Y') ->
let store = store |> Store.add_deck (Deck.create ~id:name ()) in
Store.save store;
Fmt.pr "Deck created.@.";
use_deck ~input_char name
| _ -> Fmt.pr "Aborted!@.")
let show_file_content ?(with_editor = false) path =
let filetype = Caml.Filename.extension path in
let plainTextCard =
List.exists [ ".md"; ".txt"; "" ] ~f:(fun extension ->
String.(extension = filetype))
in
let plainTextApp =
if plainTextCard || with_editor then
match Caml.Sys.getenv_opt "VISUAL" with
| Some x -> [ x ]
| None -> (
match Caml.Sys.getenv_opt "EDITOR" with
| Some x -> [ x ]
| None -> (
match Caml.Sys.getenv_opt "PAGER" with
| Some x -> [ x ]
| None -> []))
else
[]
in
let candidates =
if with_editor then
plainTextApp
else
plainTextApp @ [ "open"; "xdg-open" ]
in
List.exists
~f:(fun bin ->
Caml.Sys.command (bin ^ " " ^ Caml.Filename.quote path ^ " 2> /dev/null")
<> 127)
candidates
|> ignore
let show_card ?(with_editor = false) id =
let store = Store.load () in
let card = Store.find_card_exn id store in
match card.content with
| Plain text -> Fmt.pr "%s\n" text
| File (_, path) -> show_file_content ~with_editor path
(* let edit open_in_editor card_id = *)
(* let store = Store.load () in *)
(* let card = Store.find_card_exn card_id store in *)
let content = match card.content with Plain text | File ( _ , text ) - > text in
(* let new_content = open_in_editor (content ^ Editor.default_template) in *)
(* let new_id = Card.Id.generate new_content in *)
let = { card with content = Plain new_content ; i d = new_id } in
store | > Store.set_card card.id new_card | > Store.save ;
(* *)
if String.(new_id = card.id ) then
Fmt.pr " Edited card % a@. " Console.yellow_s @@ new_card.id
(* else *)
Fmt.pr " Edited card % a ( new name % a)@. " Console.yellow_s card.id
(* Console.green_s *)
(* @@ new_card.id *)
let remove input_char card_id =
let store = Store.load () in
let card = Store.find_card_exn card_id store in
Fmt.pr "You are about to remove the card '%a', continue? [y/N]: %!"
Console.magenta_s
@@ Card.title card;
match input_char () with
| Some c when Char.(c = 'y' || c = 'Y') ->
let cards =
List.filter store.cards ~f:(fun c -> not String.(c.id = card.id))
in
Store.save { store with cards };
Fmt.pr "Card removed.@."
| _ -> Fmt.pr "Aborted!@."
let archive card_id =
let store = Store.load () in
let card = Store.find_card_exn card_id store in
Store.set_card card.id { card with archived = true } store |> Store.save;
Fmt.pr "Card Archived.@."
let unarchive card_id =
let store = Store.load () in
let card = Store.find_card_exn card_id store in
Store.set_card card.id { card with archived = false } store |> Store.save;
Fmt.pr "Card unarchived.@."
let move_card ~at card_id box_id =
let store = Store.load () in
Store.move_card_to at (box_id - 1) card_id store |> Store.save
let complete_ids () =
let store = Store.load () in
let cards = store.cards in
List.iter cards ~f:(fun card -> Fmt.pr "%s " Card.(card.id));
Fmt.pr "@."
let zshids () =
let store = Store.load () in
let cards = Store.get_cards store in
List.iter cards ~f:(fun card ->
Fmt.pr "%s:%s\n" (Card.Id.to_short card.id) (Card.title card))
let rate ~at (rating : Card.Rating.t) card_id =
let open Card.Rating in
let store = Store.load () in
let card = Store.find_card_exn card_id store in
let move_card_to = Store.move_card_to at in
begin
match rating with
| Bad -> store |> move_card_to 0 card_id |> Store.save
| Again -> store |> move_card_to card.box card_id |> Store.save
| Good -> store |> move_card_to (card.box + 1) card_id |> Store.save
| Easy ->
store
|> move_card_to (List.length (Store.get_boxes store) - 1) card_id
|> Store.save
end;
Fmt.pr "Card rated %a\n" Console.magenta_s
@@ String.lowercase
@@ Card.Rating.to_string rating
let review ?(deck = None) ?(all = false) now =
let open Box in
let store = Store.load () in
let deck = match deck with Some deck -> deck | None -> store.current_deck in
let _should_review (card : Card.t) =
if String.(card.deck = deck) then
let box = List.nth_exn (Store.get_boxes ~deck store) card.box in
Float.(next_review box.interval card <= now)
else
false
in
store |> Store.get_cards ~deck
|> List.filter ~f:(fun card -> not Card.(card.archived))
|> print_cards_to_review ~all now store
| null | https://raw.githubusercontent.com/VincentCordobes/prep/8929c4798ae705f4b1c8c389447c47cd2b3caca8/lib/cli.ml | ocaml | let edit open_in_editor card_id =
let store = Store.load () in
let card = Store.find_card_exn card_id store in
let new_content = open_in_editor (content ^ Editor.default_template) in
let new_id = Card.Id.generate new_content in
else
Console.green_s
@@ new_card.id | open Base
let rec add ?(last_reviewed_at = Unix.time ()) ?(retry = false) content =
let content =
match content with
| Some s -> if retry then Editor.edit (s ^ Editor.default_template) else s
| None -> Editor.edit Editor.default_template
in
let store = Store.load () in
let card_content = Card.Plain content in
let id = Card.Id.generate (Card.title_of_content card_content) in
let exists = Store.exists id store in
if exists then (
Fmt.pr "This name already exists. Press any key to continue...@.";
Caml.(input_char Caml.stdin) |> ignore;
add (Some content) ~retry:true)
else
match
Card.create ~deck:store.current_deck id card_content last_reviewed_at
with
| Ok card ->
let updated_store = Store.add card store in
Store.save updated_store;
Fmt.pr "Card added (id: %s)\n" (Card.Id.to_short card.id)
| Error msg -> failwith msg
let add_file ?(name = None) ?(last_reviewed_at = Unix.time ()) file =
let open Caml in
let store = Store.load () in
let path =
if Filename.is_relative file then
Filename.concat (Sys.getcwd ()) file
else
file
in
let card_content = Card.File (name, path) in
let id = Card.Id.generate (Card.title_of_content card_content) in
let exists = Store.exists id store in
if exists then
Fmt.pr "This card already exists@."
else
match
Card.create id ~deck:store.current_deck card_content last_reviewed_at
with
| Ok card ->
store |> Store.add card |> Store.save;
Fmt.pr "Card added (id: %s)\n" card.id
| Error msg -> failwith msg
let add_box interval =
let store = Store.load () in
let box_exists =
List.exists
~f:(fun box -> Interval.compare box.interval interval = 0)
(Store.get_boxes store)
in
if box_exists then
Console.(print_error "A box with interval %a already exists" green_s)
@@ Interval.to_string interval
else begin
Store.save (Store.add_box (Box.create interval) store);
Fmt.pr "Box added (repetitions every %a)" Console.green_s
(Interval.to_string interval)
end
let date_of_datetime dt =
let open ISO8601.Permissive in
date (string_of_date dt)
let next_review (interval : Interval.t) (card : Card.t) =
Float.(
let interval =
let day_to_second n = of_int n * 24.0 * 60.0 * 60.0 in
match interval with
| Day n -> day_to_second n
| Week n -> day_to_second n * 7.0
in
date_of_datetime card.last_reviewed_at + interval)
let print_cards_to_review ?(all = false) now store cards =
let open Card in
let grouped_cards =
cards
|> List.map ~f:(fun card ->
let box = Store.get_box card store in
let date = next_review box.interval card in
(date, card))
|> List.sort ~compare:(fun (x, _) (y, _) -> Float.(x - y |> to_int))
|> List.group ~break:(fun (x, _) (y, _) -> Float.(x <> y))
|> List.map ~f:(fun group ->
let cards = List.map group ~f:(fun (_, card) -> card) in
let date, _ = List.hd_exn group in
(date, cards))
in
let is_today date =
let date = Unix.localtime date in
let now = Unix.localtime now in
date.tm_year = now.tm_year && date.tm_yday = now.tm_yday
in
let cards_to_review, futur_cards_to_review =
let ( <= ) x y =
let y = Unix.localtime y in
let x = Unix.localtime x in
if x.tm_year = y.tm_year then
x.tm_yday <= y.tm_yday
else
x.tm_year < y.tm_year
in
List.partition_tf grouped_cards ~f:(fun (date, _cards) -> date <= now)
in
let print_space_when_not_last i items ppf =
if i < List.length items - 1 then
Fmt.pf ppf "@ "
in
let pp_box ppf box = Fmt.pf ppf "#%d" (box + 1) in
let pp_title ppf card =
let words = String.split_on_chars ~on:[ ' ' ] (title card) in
List.iteri words ~f:(fun i word ->
Fmt.pf ppf "%s" word;
print_space_when_not_last i words ppf)
in
let pp_id ppf card = Fmt.(pf ppf "(%s)" (Card.Id.to_short card.id)) in
let pp_cards ppf cards =
match cards with
| [] -> Fmt.pf ppf "%a" Fmt.(styled `Yellow string) "--"
| _ ->
let pp_card i card =
Fmt.(
pf ppf "%a @[%a@ %a@]"
(styled `Green pp_box)
card.box pp_title card
(styled `Faint pp_id)
card);
print_space_when_not_last i cards ppf
in
List.iteri cards ~f:pp_card
in
let pp_group ppf (date, cards) =
let color = if is_today date then `Yellow else `Faint in
Fmt.pf ppf "%a @[<v>%a@]"
(Fmt.styled color ISO8601.Permissive.pp_date)
date pp_cards cards
in
let cards_to_review =
let today_empty =
not (List.exists cards_to_review ~f:(fun (date, _) -> is_today date))
in
if today_empty then
cards_to_review @ [ (now, []) ]
else
cards_to_review
in
let cards_to_print =
match futur_cards_to_review with
| [] -> cards_to_review
| x :: tail -> cards_to_review @ [ x ] @ if all then tail else []
in
let pp_groups ppf cards_to_print =
if List.length cards > 0 then
cards_to_print
|> List.iteri ~f:(fun i card_group ->
Fmt.pf ppf "@[<h>%a@]" pp_group card_group;
print_space_when_not_last i cards_to_print ppf)
else
Fmt.pf ppf "No card."
in
Fmt.pr "@[<v>%a@]@." pp_groups cards_to_print
let pp_cards ?interval ppf cards =
let open Card in
let open ISO8601.Permissive in
let open Fmt in
let pp_content ppf card =
match interval with
| None -> pf ppf "Box #%d" (card.box + 1)
| Some interval -> pf ppf "%a" pp_date (next_review interval card)
in
let _pp_last_reviewed ppf card = pf ppf "%a" pp_content card in
if List.length cards > 0 then
cards
|> List.sort ~compare:(fun a b -> if a.archived then 1 else a.box - b.box)
|> List.iter ~f:(fun card ->
if card.archived then
pf ppf " %a %s@." (styled `Red string) "[archived]" (title card)
else
pf ppf " %a %s@."
(styled `Faint string)
(Card.Id.to_short card.id) (title card))
else
Fmt.pf ppf " No card.\n"
let list_boxes () =
let store = Store.load () in
let print_box box_id box =
let interval = Box.(box.interval) in
let cards =
List.filter
~f:(fun card ->
card.box = box_id && String.(card.deck = store.current_deck))
store.cards
in
let pp_box_id ppf box_id =
Fmt.pf ppf "%a"
Fmt.(styled `Green string)
("#" ^ Int.to_string (box_id + 1))
in
let pp_heading ppf box_id =
let line_before = if box_id = 0 then "" else "\n" in
Fmt.pf ppf "%s%a Every %s\n" line_before pp_box_id box_id
(Interval.to_string interval)
in
Fmt.pr "%a" pp_heading box_id;
Fmt.pr "%a" (pp_cards ~interval) cards
in
List.iteri ~f:print_box (Store.get_boxes store)
let list_decks () =
let store = Store.load () in
match store.decks with
| [] -> Fmt.pr "No decks@."
| _ ->
let print_deck deck =
if String.(store.current_deck = deck) then
Fmt.pr "* %s\n" deck
else
Fmt.pr " %s\n" deck
in
List.iter store.decks ~f:(fun deck -> print_deck deck.id)
let add_deck name =
Store.load ()
|> Store.add_deck (Deck.create ~id:name ~decks:[] ())
|> Store.save
let rec use_deck ~input_char name =
let store = Store.load () in
match List.find ~f:(fun deck -> String.(deck.id = name)) store.decks with
| Some _ ->
Store.save { store with current_deck = name };
Fmt.pr "Using deck %s@." name
| None -> (
Fmt.pr "Deck %s doesn't exist. Do you want to create it? [y/N] %!" name;
match input_char () with
| Some c when Char.(c = 'y' || c = 'Y') ->
let store = store |> Store.add_deck (Deck.create ~id:name ()) in
Store.save store;
Fmt.pr "Deck created.@.";
use_deck ~input_char name
| _ -> Fmt.pr "Aborted!@.")
let show_file_content ?(with_editor = false) path =
let filetype = Caml.Filename.extension path in
let plainTextCard =
List.exists [ ".md"; ".txt"; "" ] ~f:(fun extension ->
String.(extension = filetype))
in
let plainTextApp =
if plainTextCard || with_editor then
match Caml.Sys.getenv_opt "VISUAL" with
| Some x -> [ x ]
| None -> (
match Caml.Sys.getenv_opt "EDITOR" with
| Some x -> [ x ]
| None -> (
match Caml.Sys.getenv_opt "PAGER" with
| Some x -> [ x ]
| None -> []))
else
[]
in
let candidates =
if with_editor then
plainTextApp
else
plainTextApp @ [ "open"; "xdg-open" ]
in
List.exists
~f:(fun bin ->
Caml.Sys.command (bin ^ " " ^ Caml.Filename.quote path ^ " 2> /dev/null")
<> 127)
candidates
|> ignore
let show_card ?(with_editor = false) id =
let store = Store.load () in
let card = Store.find_card_exn id store in
match card.content with
| Plain text -> Fmt.pr "%s\n" text
| File (_, path) -> show_file_content ~with_editor path
let content = match card.content with Plain text | File ( _ , text ) - > text in
let = { card with content = Plain new_content ; i d = new_id } in
store | > Store.set_card card.id new_card | > Store.save ;
if String.(new_id = card.id ) then
Fmt.pr " Edited card % a@. " Console.yellow_s @@ new_card.id
Fmt.pr " Edited card % a ( new name % a)@. " Console.yellow_s card.id
let remove input_char card_id =
let store = Store.load () in
let card = Store.find_card_exn card_id store in
Fmt.pr "You are about to remove the card '%a', continue? [y/N]: %!"
Console.magenta_s
@@ Card.title card;
match input_char () with
| Some c when Char.(c = 'y' || c = 'Y') ->
let cards =
List.filter store.cards ~f:(fun c -> not String.(c.id = card.id))
in
Store.save { store with cards };
Fmt.pr "Card removed.@."
| _ -> Fmt.pr "Aborted!@."
let archive card_id =
let store = Store.load () in
let card = Store.find_card_exn card_id store in
Store.set_card card.id { card with archived = true } store |> Store.save;
Fmt.pr "Card Archived.@."
let unarchive card_id =
let store = Store.load () in
let card = Store.find_card_exn card_id store in
Store.set_card card.id { card with archived = false } store |> Store.save;
Fmt.pr "Card unarchived.@."
let move_card ~at card_id box_id =
let store = Store.load () in
Store.move_card_to at (box_id - 1) card_id store |> Store.save
let complete_ids () =
let store = Store.load () in
let cards = store.cards in
List.iter cards ~f:(fun card -> Fmt.pr "%s " Card.(card.id));
Fmt.pr "@."
let zshids () =
let store = Store.load () in
let cards = Store.get_cards store in
List.iter cards ~f:(fun card ->
Fmt.pr "%s:%s\n" (Card.Id.to_short card.id) (Card.title card))
let rate ~at (rating : Card.Rating.t) card_id =
let open Card.Rating in
let store = Store.load () in
let card = Store.find_card_exn card_id store in
let move_card_to = Store.move_card_to at in
begin
match rating with
| Bad -> store |> move_card_to 0 card_id |> Store.save
| Again -> store |> move_card_to card.box card_id |> Store.save
| Good -> store |> move_card_to (card.box + 1) card_id |> Store.save
| Easy ->
store
|> move_card_to (List.length (Store.get_boxes store) - 1) card_id
|> Store.save
end;
Fmt.pr "Card rated %a\n" Console.magenta_s
@@ String.lowercase
@@ Card.Rating.to_string rating
let review ?(deck = None) ?(all = false) now =
let open Box in
let store = Store.load () in
let deck = match deck with Some deck -> deck | None -> store.current_deck in
let _should_review (card : Card.t) =
if String.(card.deck = deck) then
let box = List.nth_exn (Store.get_boxes ~deck store) card.box in
Float.(next_review box.interval card <= now)
else
false
in
store |> Store.get_cards ~deck
|> List.filter ~f:(fun card -> not Card.(card.archived))
|> print_cards_to_review ~all now store
|
710d5321d83bba053e9cd5e758b487aca5fa918ecf4bcda8b2a57ebb57d87d61 | JHU-PL-Lab/jaylang | bluejay_ast_internal.ml | open Batteries
type label = Bluejay_ast.label = Label of string
[@@deriving eq, ord, show, to_yojson]
type ident = Jayil.Ast.ident = Ident of string
[@@deriving eq, ord, show, to_yojson]
module Ident = Jayil.Ast.Ident
module Ident_set = Jayil.Ast.Ident_set
module Ident_map = Jayil.Ast.Ident_map
type variant_label = Bluejay_ast.variant_label = Variant_label of string
[@@deriving eq, ord, show, to_yojson]
type syntactic_only = [ `Syntactic ]
type semantic_only = [ `Semantic ]
type core_only = [ `Core ]
type 'a syntactic_and_semantic = [< `Syntactic | `Semantic ] as 'a
type 'a core_and_semantic = [< `Core | `Semantic ] as 'a
type type_sig =
| TopType
| IntType
| BoolType
| FunType
| RecType of Ident_set.t
| ListType
| VariantType of variant_label
| UntouchedType of string
[@@deriving eq, ord, show, to_yojson]
type pattern = Bluejay_ast.pattern =
| AnyPat
| IntPat
| BoolPat
| FunPat
| RecPat of ident option Ident_map.t
| StrictRecPat of ident option Ident_map.t
| VariantPat of variant_label * ident
| VarPat of ident
| EmptyLstPat
| LstDestructPat of ident * ident
[@@deriving eq, ord, show, to_yojson]
type predicate = syntactic_only expr_desc
and 'a funsig = Funsig of ident * ident list * 'a expr_desc
and 'a typed_funsig =
| Typed_funsig of
ident * (ident * 'a expr_desc) list * ('a expr_desc * 'a expr_desc)
(* TODO: In the future we may want to change this to argument list accomodate easier user experience *)
| DTyped_funsig of
ident * (ident * 'a expr_desc) * ('a expr_desc * 'a expr_desc)
and 'a expr_desc = { body : 'a expr; tag : int }
(*
P1: no internal transformation -> doesn't need to change tag
P2: no internal transformation
P3: HAS internal transformation
*)
and 'a expr =
| Int : int -> 'a expr
| Bool : bool -> 'a expr
| Var : ident -> 'a expr
| Function : (ident list * 'a expr_desc) -> 'a expr
| Input : 'a expr
| Appl : ('a expr_desc * 'a expr_desc) -> 'a expr
| Let : (ident * 'a expr_desc * 'a expr_desc) -> 'a expr
| LetRecFun : ('a funsig list * 'a expr_desc) -> 'a expr
| LetFun : ('a funsig * 'a expr_desc) -> 'a expr
| LetWithType :
(ident * 'a expr_desc * 'a expr_desc * 'a expr_desc)
-> 'a syntactic_and_semantic expr
| LetRecFunWithType :
('a typed_funsig list * 'a expr_desc)
-> 'a syntactic_and_semantic expr
| LetFunWithType :
('a typed_funsig * 'a expr_desc)
-> 'a syntactic_and_semantic expr
| Plus : ('a expr_desc * 'a expr_desc) -> 'a expr
| Minus : ('a expr_desc * 'a expr_desc) -> 'a expr
| Times : ('a expr_desc * 'a expr_desc) -> 'a expr
| Divide : ('a expr_desc * 'a expr_desc) -> 'a expr
| Modulus : ('a expr_desc * 'a expr_desc) -> 'a expr
| Equal : ('a expr_desc * 'a expr_desc) -> 'a expr
| Neq : ('a expr_desc * 'a expr_desc) -> 'a expr
| LessThan : ('a expr_desc * 'a expr_desc) -> 'a expr
| Leq : ('a expr_desc * 'a expr_desc) -> 'a expr
| GreaterThan : ('a expr_desc * 'a expr_desc) -> 'a expr
| Geq : ('a expr_desc * 'a expr_desc) -> 'a expr
| And : ('a expr_desc * 'a expr_desc) -> 'a expr
| Or : ('a expr_desc * 'a expr_desc) -> 'a expr
| Not : 'a expr_desc -> 'a expr
| If : ('a expr_desc * 'a expr_desc * 'a expr_desc) -> 'a expr
| Record : 'a expr_desc Ident_map.t -> 'a expr
| RecordProj : ('a expr_desc * label) -> 'a expr
| Match : ('a expr_desc * (pattern * 'a expr_desc) list) -> 'a expr
| VariantExpr : (variant_label * 'a expr_desc) -> 'a expr
| List : 'a expr_desc list -> 'a expr
| ListCons : ('a expr_desc * 'a expr_desc) -> 'a expr
(* TODO: Create a separate class of constructors for type errors? *)
| TypeError : ident -> 'a expr
| Assert : 'a expr_desc -> 'a expr
| Assume : 'a expr_desc -> 'a expr
(* Type expressions *)
| TypeVar : ident -> syntactic_only expr
| TypeInt : syntactic_only expr
| TypeBool : syntactic_only expr
| TypeRecord : syntactic_only expr_desc Ident_map.t -> syntactic_only expr
| TypeList : syntactic_only expr_desc -> syntactic_only expr
| TypeArrow :
(syntactic_only expr_desc * syntactic_only expr_desc)
-> syntactic_only expr
| TypeArrowD :
((ident * syntactic_only expr_desc) * syntactic_only expr_desc)
-> syntactic_only expr
| TypeSet : syntactic_only expr_desc * predicate -> syntactic_only expr
| TypeUnion :
(syntactic_only expr_desc * syntactic_only expr_desc)
-> syntactic_only expr
| TypeIntersect :
(syntactic_only expr_desc * syntactic_only expr_desc)
-> syntactic_only expr
| TypeRecurse : (ident * syntactic_only expr_desc) -> syntactic_only expr
| TypeUntouched : string -> syntactic_only expr
| TypeVariant :
(variant_label * syntactic_only expr_desc)
-> syntactic_only expr
let counter = ref 0
let fresh_tag () =
let c = !counter in
counter := c + 1 ;
c
let new_expr_desc : type a. a expr -> a expr_desc =
fun e -> { tag = fresh_tag (); body = e }
type syn_type_bluejay = syntactic_only expr
type syn_bluejay_edesc = syntactic_only expr_desc
type syn_type_bluejay_desc = syntactic_only expr_desc
type sem_type_bluejay = [ `Semantic ] expr
type sem_bluejay_edesc = [ `Semantic ] expr_desc
type sem_type_bluejay_desc = [ ` Semantic ] expr
type core_bluejay = [ `Core ] expr
type core_bluejay_edesc = [ `Core ] expr_desc
let rec equal_funsig : type a. a funsig -> a funsig -> bool =
fun (Funsig (id1, params1, fe1)) (Funsig (id2, params2, fe2)) ->
id1 = id2 && List.eq equal_ident params1 params2 && equal_expr_desc fe1 fe2
and equal_typed_funsig : type a. a typed_funsig -> a typed_funsig -> bool =
fun fsig_1 fsig_2 ->
match (fsig_1, fsig_2) with
| ( Typed_funsig (f1, params_with_type_1, (f_body_1, ret_type_1)),
Typed_funsig (f2, params_with_type_2, (f_body_2, ret_type_2)) ) ->
equal_ident f1 f2
&& List.equal
(fun (param1, t1) (param2, t2) ->
equal_ident param1 param2 && equal_expr_desc t1 t2)
params_with_type_1 params_with_type_2
&& equal_expr_desc f_body_1 f_body_2
&& equal_expr_desc ret_type_1 ret_type_2
| ( DTyped_funsig (f1, (param1, t1), (f_body_1, ret_type_1)),
DTyped_funsig (f2, (param2, t2), (f_body_2, ret_type_2)) ) ->
equal_ident f1 f2 && equal_ident param1 param2 && equal_expr_desc t1 t2
&& equal_expr_desc f_body_1 f_body_2
&& equal_expr_desc ret_type_1 ret_type_2
| _ -> false
and equal_expr_desc : type a. a expr_desc -> a expr_desc -> bool =
fun e1 e2 ->
equal_expr e1.body e2.body
Option.eq e1.tag e2.tag
e1.tag = e2.tag
and equal_expr : type a. a expr -> a expr -> bool =
fun e1 e2 ->
match (e1, e2) with
| Int n1, Int n2 -> n1 = n2
(* | Int _, _ -> false *)
| Bool b1, Bool b2 -> b1 = b2
(* | Bool _, _ -> false *)
| Input, Input -> true
(* | Input, _ -> false *)
| Var x1, Var x2 -> x1 = x2
(* | Var _, _ -> false *)
| List l1, List l2 -> List.eq equal_expr_desc l1 l2
(* | List _, _ -> false *)
| Record r1, Record r2 -> Ident_map.equal equal_expr_desc r1 r2
(* | Record _, _ -> false *)
| Function (id_lst1, fun_body1), Function (id_lst2, fun_body2) ->
List.eq equal_ident id_lst1 id_lst2 && equal_expr_desc fun_body1 fun_body2
(* | Function _, _ -> false *)
| Let (x1, xe1, e1), Let (x2, xe2, e2) ->
x1 = x2 && equal_expr_desc xe1 xe2 && equal_expr_desc e1 e2
(* | Let _, _ -> false *)
| LetFun (f1, e1), LetFun (f2, e2) ->
equal_funsig f1 f2 && equal_expr_desc e1 e2
(* | LetFun _, _ -> false *)
| LetRecFun (sig_lst1, e1), LetRecFun (sig_lst2, e2) ->
List.eq equal_funsig sig_lst1 sig_lst2 && equal_expr_desc e1 e2
(* | LetRecFun _, _ -> false *)
| LetWithType (x1, xe1, e1, t1), LetWithType (x2, xe2, e2, t2) ->
x1 = x2 && equal_expr_desc xe1 xe2 && equal_expr_desc e1 e2
&& equal_expr_desc t1 t2
(* | LetWithType _, _ -> false *)
| LetFunWithType (f1, e1), LetFunWithType (f2, e2) ->
equal_typed_funsig f1 f2 && equal_expr_desc e1 e2
(* | LetFunWithType _, _ -> false *)
| LetRecFunWithType (sig_lst1, e1), LetRecFunWithType (sig_lst2, e2) ->
List.eq equal_typed_funsig sig_lst1 sig_lst2 && equal_expr_desc e1 e2
(* | LetRecFunWithType _, _ -> false *)
| Match (me1, pe_lst1), Match (me2, pe_lst2) ->
let eq_pe (p1, e1) (p2, e2) = p1 = p2 && equal_expr_desc e1 e2 in
equal_expr_desc me1 me2 && List.eq eq_pe pe_lst1 pe_lst2
(* | Match _, _ -> false *)
| If (cond1, tb1, fb1), If (cond2, tb2, fb2) ->
equal_expr_desc cond1 cond2
&& equal_expr_desc tb1 tb2 && equal_expr_desc fb1 fb2
(* | If _, _ -> false *)
| Or (lop1, rop1), Or (lop2, rop2)
| And (lop1, rop1), And (lop2, rop2)
| Equal (lop1, rop1), Equal (lop2, rop2)
| Neq (lop1, rop1), Neq (lop2, rop2)
| LessThan (lop1, rop1), LessThan (lop2, rop2)
| Leq (lop1, rop1), Leq (lop2, rop2)
| GreaterThan (lop1, rop1), GreaterThan (lop2, rop2)
| Geq (lop1, rop1), Geq (lop2, rop2)
| Appl (lop1, rop1), Appl (lop2, rop2)
| Plus (lop1, rop1), Plus (lop2, rop2)
| Minus (lop1, rop1), Minus (lop2, rop2)
| Times (lop1, rop1), Times (lop2, rop2)
| Divide (lop1, rop1), Divide (lop2, rop2)
| Modulus (lop1, rop1), Modulus (lop2, rop2)
| ListCons (lop1, rop1), ListCons (lop2, rop2) ->
equal_expr_desc lop1 lop2 && equal_expr_desc rop1 rop2
| Or _ , _
| And _ , _
| Equal _ , _
| Neq _ , _
| LessThan _ , _
| Leq _ , _
| GreaterThan _ , _
| Geq _ , _
| Appl _ , _
| Plus _ , _
| Minus _ , _
| Times _ , _
| Divide _ , _
| Modulus _ , _
| ListCons _ , _ - > false
| And _, _
| Equal _, _
| Neq _, _
| LessThan _, _
| Leq _, _
| GreaterThan _, _
| Geq _, _
| Appl _, _
| Plus _, _
| Minus _, _
| Times _, _
| Divide _, _
| Modulus _, _
| ListCons _, _ -> false *)
| Assert e1, Assert e2 | Assume e1, Assume e2 | Not e1, Not e2 ->
equal_expr_desc e1 e2
| VariantExpr (l1, e1), VariantExpr (l2, e2) ->
l1 = l2 && equal_expr_desc e1 e2
| RecordProj (e1, l1), RecordProj (e2, l2) -> l1 = l2 && equal_expr_desc e1 e2
(* Type expressions *)
| TypeVar x1, TypeVar x2 -> x1 = x2
| TypeInt, TypeInt | TypeBool, TypeBool -> true
| TypeRecord t1, TypeRecord t2 -> Ident_map.equal equal_expr_desc t1 t2
| TypeList t1, TypeList t2 -> equal_expr_desc t1 t2
| TypeArrow (lt1, rt1), TypeArrow (lt2, rt2)
| TypeUnion (lt1, rt1), TypeUnion (lt2, rt2)
| TypeIntersect (lt1, rt1), TypeIntersect (lt2, rt2)
| TypeSet (lt1, rt1), TypeSet (lt2, rt2) ->
equal_expr_desc lt1 lt2 && equal_expr_desc rt1 rt2
| TypeArrowD ((id1, lt1), rt1), TypeArrowD ((id2, lt2), rt2) ->
id1 = id2 && equal_expr_desc lt1 lt2 && equal_expr_desc rt1 rt2
| TypeRecurse (x1, t1), TypeRecurse (x2, t2) -> x1 = x2 && t1 = t2
| TypeUntouched s1, TypeUntouched s2 -> s1 = s2
| TypeVariant (l1, e1), TypeVariant (l2, e2) ->
l1 = l2 && equal_expr_desc e1 e2
| _ -> false
let rec tagless_equal_funsig : type a. a funsig -> a funsig -> bool =
fun (Funsig (id1, params1, fe1)) (Funsig (id2, params2, fe2)) ->
id1 = id2
&& List.eq equal_ident params1 params2
&& tagless_equal_expr_desc fe1 fe2
and tagless_equal_typed_funsig :
type a. a typed_funsig -> a typed_funsig -> bool =
fun fsig_1 fsig_2 ->
match (fsig_1, fsig_2) with
| ( Typed_funsig (f1, params_with_type_1, (f_body_1, ret_type_1)),
Typed_funsig (f2, params_with_type_2, (f_body_2, ret_type_2)) ) ->
equal_ident f1 f2
&& List.equal
(fun (param1, t1) (param2, t2) ->
equal_ident param1 param2 && tagless_equal_expr_desc t1 t2)
params_with_type_1 params_with_type_2
&& tagless_equal_expr_desc f_body_1 f_body_2
&& tagless_equal_expr_desc ret_type_1 ret_type_2
| ( DTyped_funsig (f1, (param1, t1), (f_body_1, ret_type_1)),
DTyped_funsig (f2, (param2, t2), (f_body_2, ret_type_2)) ) ->
equal_ident f1 f2 && equal_ident param1 param2
&& tagless_equal_expr_desc t1 t2
&& tagless_equal_expr_desc f_body_1 f_body_2
&& tagless_equal_expr_desc ret_type_1 ret_type_2
| _ -> false
and tagless_equal_expr_desc : type a. a expr_desc -> a expr_desc -> bool =
fun e1 e2 -> tagless_equal_expr e1.body e2.body
and tagless_equal_expr : type a. a expr -> a expr -> bool =
fun e1 e2 ->
match (e1, e2) with
| Int n1, Int n2 -> n1 = n2
(* | Int _, _ -> false *)
| Bool b1, Bool b2 -> b1 = b2
(* | Bool _, _ -> false *)
| Input, Input -> true
(* | Input, _ -> false *)
| Var x1, Var x2 -> x1 = x2
(* | Var _, _ -> false *)
| List l1, List l2 -> List.eq tagless_equal_expr_desc l1 l2
(* | List _, _ -> false *)
| Record r1, Record r2 -> Ident_map.equal tagless_equal_expr_desc r1 r2
(* | Record _, _ -> false *)
| Function (id_lst1, fun_body1), Function (id_lst2, fun_body2) ->
List.eq equal_ident id_lst1 id_lst2
&& tagless_equal_expr_desc fun_body1 fun_body2
(* | Function _, _ -> false *)
| Let (x1, xe1, e1), Let (x2, xe2, e2) ->
x1 = x2
&& tagless_equal_expr_desc xe1 xe2
&& tagless_equal_expr_desc e1 e2
(* | Let _, _ -> false *)
| LetFun (f1, e1), LetFun (f2, e2) ->
equal_funsig f1 f2 && tagless_equal_expr_desc e1 e2
(* | LetFun _, _ -> false *)
| LetRecFun (sig_lst1, e1), LetRecFun (sig_lst2, e2) ->
List.eq equal_funsig sig_lst1 sig_lst2 && tagless_equal_expr_desc e1 e2
(* | LetRecFun _, _ -> false *)
| LetWithType (x1, xe1, e1, t1), LetWithType (x2, xe2, e2, t2) ->
x1 = x2
&& tagless_equal_expr_desc xe1 xe2
&& tagless_equal_expr_desc e1 e2
&& tagless_equal_expr_desc t1 t2
(* | LetWithType _, _ -> false *)
| LetFunWithType (f1, e1), LetFunWithType (f2, e2) ->
equal_typed_funsig f1 f2 && tagless_equal_expr_desc e1 e2
(* | LetFunWithType _, _ -> false *)
| LetRecFunWithType (sig_lst1, e1), LetRecFunWithType (sig_lst2, e2) ->
List.eq equal_typed_funsig sig_lst1 sig_lst2
&& tagless_equal_expr_desc e1 e2
(* | LetRecFunWithType _, _ -> false *)
| Match (me1, pe_lst1), Match (me2, pe_lst2) ->
let eq_pe (p1, e1) (p2, e2) = p1 = p2 && tagless_equal_expr_desc e1 e2 in
tagless_equal_expr_desc me1 me2 && List.eq eq_pe pe_lst1 pe_lst2
(* | Match _, _ -> false *)
| If (cond1, tb1, fb1), If (cond2, tb2, fb2) ->
tagless_equal_expr_desc cond1 cond2
&& tagless_equal_expr_desc tb1 tb2
&& tagless_equal_expr_desc fb1 fb2
(* | If _, _ -> false *)
| Or (lop1, rop1), Or (lop2, rop2)
| And (lop1, rop1), And (lop2, rop2)
| Equal (lop1, rop1), Equal (lop2, rop2)
| Neq (lop1, rop1), Neq (lop2, rop2)
| LessThan (lop1, rop1), LessThan (lop2, rop2)
| Leq (lop1, rop1), Leq (lop2, rop2)
| GreaterThan (lop1, rop1), GreaterThan (lop2, rop2)
| Geq (lop1, rop1), Geq (lop2, rop2)
| Appl (lop1, rop1), Appl (lop2, rop2)
| Plus (lop1, rop1), Plus (lop2, rop2)
| Minus (lop1, rop1), Minus (lop2, rop2)
| Times (lop1, rop1), Times (lop2, rop2)
| Divide (lop1, rop1), Divide (lop2, rop2)
| Modulus (lop1, rop1), Modulus (lop2, rop2)
| ListCons (lop1, rop1), ListCons (lop2, rop2) ->
tagless_equal_expr_desc lop1 lop2 && tagless_equal_expr_desc rop1 rop2
| Or _ , _
| And _ , _
| Equal _ , _
| Neq _ , _
| LessThan _ , _
| Leq _ , _
| GreaterThan _ , _
| Geq _ , _
| Appl _ , _
| Plus _ , _
| Minus _ , _
| Times _ , _
| Divide _ , _
| Modulus _ , _
| ListCons _ , _ - > false
| And _, _
| Equal _, _
| Neq _, _
| LessThan _, _
| Leq _, _
| GreaterThan _, _
| Geq _, _
| Appl _, _
| Plus _, _
| Minus _, _
| Times _, _
| Divide _, _
| Modulus _, _
| ListCons _, _ -> false *)
| Assert e1, Assert e2 | Assume e1, Assume e2 | Not e1, Not e2 ->
tagless_equal_expr_desc e1 e2
| VariantExpr (l1, e1), VariantExpr (l2, e2) ->
l1 = l2 && tagless_equal_expr_desc e1 e2
| RecordProj (e1, l1), RecordProj (e2, l2) ->
l1 = l2 && tagless_equal_expr_desc e1 e2
(* Type expressions *)
| TypeVar x1, TypeVar x2 -> x1 = x2
| TypeInt, TypeInt | TypeBool, TypeBool -> true
| TypeRecord t1, TypeRecord t2 ->
Ident_map.equal tagless_equal_expr_desc t1 t2
| TypeList t1, TypeList t2 -> tagless_equal_expr_desc t1 t2
| TypeArrow (lt1, rt1), TypeArrow (lt2, rt2)
| TypeUnion (lt1, rt1), TypeUnion (lt2, rt2)
| TypeIntersect (lt1, rt1), TypeIntersect (lt2, rt2)
| TypeSet (lt1, rt1), TypeSet (lt2, rt2) ->
tagless_equal_expr_desc lt1 lt2 && tagless_equal_expr_desc rt1 rt2
| TypeArrowD ((id1, lt1), rt1), TypeArrowD ((id2, lt2), rt2) ->
id1 = id2
&& tagless_equal_expr_desc lt1 lt2
&& tagless_equal_expr_desc rt1 rt2
| TypeRecurse (x1, t1), TypeRecurse (x2, t2) -> x1 = x2 && t1 = t2
| TypeUntouched s1, TypeUntouched s2 -> s1 = s2
| TypeVariant (l1, e1), TypeVariant (l2, e2) ->
l1 = l2 && tagless_equal_expr_desc e1 e2
| _ -> false
let compare_helper (x : int) (y : int) : int = if x <> 0 then x else y
let rec compare_funsig : type a. a funsig -> a funsig -> int =
fun (Funsig (id1, params1, fe1)) (Funsig (id2, params2, fe2)) ->
compare id1 id2
|> compare_helper (List.compare compare_ident params1 params2)
|> compare_helper (compare_expr_desc fe1 fe2)
and compare_typed_funsig : type a. a typed_funsig -> a typed_funsig -> int =
fun fsig_1 fsig_2 ->
match (fsig_1, fsig_2) with
| ( Typed_funsig (f1, params_with_type_1, (f_body_1, ret_type_1)),
Typed_funsig (f2, params_with_type_2, (f_body_2, ret_type_2)) ) ->
compare_ident f1 f2
|> compare_helper
@@ List.compare
(fun (param1, t1) (param2, t2) ->
compare_ident param1 param2
|> compare_helper @@ compare_expr_desc t1 t2)
params_with_type_1 params_with_type_2
|> compare_helper @@ compare_expr_desc f_body_1 f_body_2
|> compare_helper @@ compare_expr_desc ret_type_1 ret_type_2
| ( DTyped_funsig (f1, (param1, t1), (f_body_1, ret_type_1)),
DTyped_funsig (f2, (param2, t2), (f_body_2, ret_type_2)) ) ->
compare_ident f1 f2
|> compare_helper @@ compare_ident param1 param2
|> compare_helper @@ compare_expr_desc t1 t2
|> compare_helper @@ compare_expr_desc f_body_1 f_body_2
|> compare_helper @@ compare_expr_desc ret_type_1 ret_type_2
| DTyped_funsig _, Typed_funsig _ -> 1
| Typed_funsig _, DTyped_funsig _ -> -1
and compare_expr_desc : type a. a expr_desc -> a expr_desc -> int =
fun e1 e2 ->
compare_expr e1.body e2.body |> compare_helper (compare e1.tag e2.tag)
and compare_expr : type a. a expr -> a expr -> int =
fun e1 e2 ->
match (e1, e2) with
| Int n1, Int n2 -> compare n1 n2
| Bool b1, Bool b2 -> compare b1 b2
| Input, Input -> 0
| Var x1, Var x2 -> compare x1 x2
| List l1, List l2 -> List.compare compare_expr_desc l1 l2
| Record r1, Record r2 -> Ident_map.compare compare_expr_desc r1 r2
| Function (id_lst1, fun_body1), Function (id_lst2, fun_body2) ->
List.compare compare_ident id_lst1 id_lst2
|> compare_helper (compare_expr_desc fun_body1 fun_body2)
| Let (x1, xe1, e1), Let (x2, xe2, e2) ->
compare x1 x2
|> compare_helper (compare_expr_desc xe1 xe2)
|> compare_helper (compare_expr_desc e1 e2)
| LetFun (f1, e1), LetFun (f2, e2) ->
compare_funsig f1 f2 |> compare_helper (compare_expr_desc e1 e2)
| LetRecFun (sig_lst1, e1), LetRecFun (sig_lst2, e2) ->
List.compare compare_funsig sig_lst1 sig_lst2 + compare_expr_desc e1 e2
| LetWithType (x1, xe1, e1, t1), LetWithType (x2, xe2, e2, t2) ->
compare x1 x2
|> compare_helper (compare_expr_desc xe1 xe2)
|> compare_helper (compare_expr_desc e1 e2)
|> compare_helper (compare_expr_desc t1 t2)
| LetFunWithType (f1, e1), LetFunWithType (f2, e2) ->
compare_typed_funsig f1 f2 |> compare_helper (compare_expr_desc e1 e2)
| LetRecFunWithType (sig_lst1, e1), LetRecFunWithType (sig_lst2, e2) ->
List.compare compare_typed_funsig sig_lst1 sig_lst2
|> compare_helper (compare_expr_desc e1 e2)
| Match (me1, pe_lst1), Match (me2, pe_lst2) ->
let compare_pe (p1, e1) (p2, e2) =
compare_pattern p1 p2 |> compare_helper (compare_expr_desc e1 e2)
in
compare_expr_desc me1 me2
|> compare_helper (List.compare compare_pe pe_lst1 pe_lst2)
| If (cond1, tb1, fb1), If (cond2, tb2, fb2) ->
compare_expr_desc cond1 cond2
|> compare_helper (compare_expr_desc tb1 tb2)
|> compare_helper (compare_expr_desc fb1 fb2)
| Or (lop1, rop1), Or (lop2, rop2)
| And (lop1, rop1), And (lop2, rop2)
| Equal (lop1, rop1), Equal (lop2, rop2)
| Neq (lop1, rop1), Neq (lop2, rop2)
| LessThan (lop1, rop1), LessThan (lop2, rop2)
| Leq (lop1, rop1), Leq (lop2, rop2)
| GreaterThan (lop1, rop1), GreaterThan (lop2, rop2)
| Geq (lop1, rop1), Geq (lop2, rop2)
| Appl (lop1, rop1), Appl (lop2, rop2)
| Plus (lop1, rop1), Plus (lop2, rop2)
| Minus (lop1, rop1), Minus (lop2, rop2)
| Times (lop1, rop1), Times (lop2, rop2)
| Divide (lop1, rop1), Divide (lop2, rop2)
| Modulus (lop1, rop1), Modulus (lop2, rop2)
| ListCons (lop1, rop1), ListCons (lop2, rop2) ->
compare_expr_desc lop1 lop2
|> compare_helper (compare_expr_desc rop1 rop2)
| Assert e1, Assert e2 | Assume e1, Assume e2 | Not e1, Not e2 ->
compare_expr_desc e1 e2
| VariantExpr (l1, e1), VariantExpr (l2, e2) ->
compare l1 l2 |> compare_helper (compare_expr_desc e1 e2)
| RecordProj (e1, l1), RecordProj (e2, l2) ->
compare l1 l2 |> compare_helper (compare_expr_desc e1 e2)
(* Type expressions *)
| TypeVar x1, TypeVar x2 -> compare x1 x2
| TypeInt, TypeInt | TypeBool, TypeBool -> 0
| TypeRecord t1, TypeRecord t2 -> Ident_map.compare compare_expr_desc t1 t2
| TypeList t1, TypeList t2 -> compare_expr_desc t1 t2
| TypeArrow (lt1, rt1), TypeArrow (lt2, rt2)
| TypeUnion (lt1, rt1), TypeUnion (lt2, rt2)
| TypeIntersect (lt1, rt1), TypeIntersect (lt2, rt2)
| TypeSet (lt1, rt1), TypeSet (lt2, rt2) ->
compare_expr_desc lt1 lt2 + compare_expr_desc rt1 rt2
| TypeArrowD ((id1, lt1), rt1), TypeArrowD ((id2, lt2), rt2) ->
compare id1 id2
|> compare_helper (compare_expr_desc lt1 lt2)
|> compare_helper (compare_expr_desc rt1 rt2)
| TypeRecurse (x1, t1), TypeRecurse (x2, t2) ->
compare x1 x2 |> compare_helper (compare t1 t2)
| TypeUntouched s1, TypeUntouched s2 -> compare s1 s2
| TypeVariant (l1, e1), TypeVariant (l2, e2) ->
compare l1 l2 |> compare_helper (compare_expr_desc e1 e2)
(* TODO: Another potential source for bug *)
| Int _, _ -> 1
| _, Int _ -> -1
| Bool _, _ -> 1
| _, Bool _ -> -1
| Var _, _ -> 1
| _, Var _ -> -1
| Function _, _ -> 1
| _, Function _ -> -1
| Input, _ -> 1
| _, Input -> -1
| Appl _, _ -> 1
| _, Appl _ -> -1
| Let _, _ -> 1
| _, Let _ -> -1
| LetRecFun _, _ -> 1
| _, LetRecFun _ -> -1
| LetFun _, _ -> 1
| _, LetFun _ -> -1
| LetWithType _, _ -> 1
| _, LetWithType _ -> -1
| LetRecFunWithType _, _ -> 1
| _, LetRecFunWithType _ -> -1
| LetFunWithType _, _ -> 1
| _, LetFunWithType _ -> -1
| Plus _, _ -> 1
| _, Plus _ -> -1
| Minus _, _ -> 1
| _, Minus _ -> -1
| Times _, _ -> 1
| _, Times _ -> -1
| Divide _, _ -> 1
| _, Divide _ -> -1
| Modulus _, _ -> 1
| _, Modulus _ -> -1
| Equal _, _ -> 1
| _, Equal _ -> -1
| Neq _, _ -> 1
| _, Neq _ -> -1
| LessThan _, _ -> 1
| _, LessThan _ -> -1
| Leq _, _ -> 1
| _, Leq _ -> -1
| GreaterThan _, _ -> 1
| _, GreaterThan _ -> -1
| Geq _, _ -> 1
| _, Geq _ -> -1
| And _, _ -> 1
| _, And _ -> -1
| Or _, _ -> 1
| _, Or _ -> -1
| Not _, _ -> 1
| _, Not _ -> -1
| If _, _ -> 1
| _, If _ -> -1
| Record _, _ -> 1
| _, Record _ -> -1
| RecordProj _, _ -> 1
| _, RecordProj _ -> -1
| Match _, _ -> 1
| _, Match _ -> -1
| VariantExpr _, _ -> 1
| _, VariantExpr _ -> -1
| List _, _ -> 1
| _, List _ -> -1
| ListCons _, _ -> 1
| _, ListCons _ -> -1
| TypeError _, _ -> 1
| _, TypeError _ -> -1
| Assert _, _ -> 1
| _, Assert _ -> -1
| Assume _, _ -> 1
| _, Assume _ -> -1
| TypeVar _, _ -> 1
| _, TypeVar _ -> -1
| TypeInt, _ -> 1
| _, TypeInt -> -1
| TypeBool, _ -> 1
| _, TypeBool -> -1
| TypeRecord _, _ -> 1
| _, TypeRecord _ -> -1
| TypeList _, _ -> 1
| _, TypeList _ -> -1
| TypeArrow _, _ -> 1
| _, TypeArrow _ -> -1
| TypeArrowD _, _ -> 1
| _, TypeArrowD _ -> -1
| TypeSet _, _ -> 1
| _, TypeSet _ -> -1
| TypeUnion _, _ -> 1
| _, TypeUnion _ -> -1
| TypeIntersect _, _ -> 1
| _, TypeIntersect _ -> -1
| TypeRecurse _, _ -> 1
| _, TypeRecurse _ -> -1
| TypeUntouched _, _ -> 1
| _, TypeUntouched _ -> -1
module type Expr_desc = sig
type t
val equal : t -> t -> bool
val compare : t -> t -> int
end
module Typed_expr_desc : Expr_desc with type t = syn_bluejay_edesc = struct
type t = syn_bluejay_edesc
let equal = equal_expr_desc
let compare = compare_expr_desc
end
module Semantic_typed_expr_desc : Expr_desc with type t = sem_bluejay_edesc =
struct
type t = sem_bluejay_edesc
let equal = equal_expr_desc
let compare = compare_expr_desc
end
module Core_expr_desc : Expr_desc with type t = core_bluejay_edesc = struct
type t = core_bluejay_edesc
let equal = equal_expr_desc
let compare = compare_expr_desc
end
module Pattern = struct
type t = pattern
let equal = equal_pattern
let compare = compare_pattern
let to_yojson = pattern_to_yojson
end
(* Takes [expr] as an argument. Returns the relative precedence of the
expression. Higher ints correspond to higher precedences. *)
let expr_precedence_p1 : type a. a expr -> int =
fun expr ->
match expr with
| Function _ | Let _ | LetFun _ | LetRecFun _ | LetWithType _
| LetFunWithType _ | LetRecFunWithType _ | Match _ ->
0
| If _ -> 1
| Or _ -> 2
| And _ -> 3
| Not _ -> 4
| Equal _ | Neq _ | LessThan _ | Leq _ | GreaterThan _ | Geq _ -> 5
| ListCons _ -> 6
| Plus _ | Minus _ -> 7
| Times _ | Divide _ | Modulus _ -> 8
| Assert _ | Assume _ | VariantExpr _ -> 9
| Appl _ -> 10
| RecordProj _ -> 11
| Int _ | Bool _ | Input | Var _ | List _ | Record _ -> 12
(* TODO: For now, all type expressions will have the lowest precedence coz I'm lazy and don't wanna think about it *)
| TypeVar _ | TypeInt | TypeBool | TypeRecord _ | TypeList _ | TypeArrow _
| TypeArrowD _ | TypeSet _ | TypeUnion _ | TypeIntersect _ | TypeRecurse _
| TypeError _ | TypeUntouched _ | TypeVariant _ ->
13
* Takes expressions [ e1 ] and [ e2 ] as arguments . Returns 0 if the two
expressions have equal precedence , a negative int if [ e1 ] has lower
precedence than [ e2 ] , and a positive int if [ e1 ] has higher precedence .
expressions have equal precedence, a negative int if [e1] has lower
precedence than [e2], and a positive int if [e1] has higher precedence. *)
let expr_precedence_cmp e1 e2 = expr_precedence_p1 e1 - expr_precedence_p1 e2
let expr_desc_precedence_cmp : type a. a expr_desc -> a expr_desc -> int =
fun ed1 ed2 -> expr_precedence_cmp ed1.body ed2.body
(* Helper routines to transform internal bluejay to external bluejay *)
let rec from_internal_expr_desc (e : syn_bluejay_edesc) : Bluejay_ast.expr_desc
=
let tag' = e.tag in
let e' = from_internal_expr e.body in
{ tag = tag'; body = e' }
and transform_funsig (fun_sig : 'a funsig) : Bluejay_ast.funsig =
let (Funsig (f, args, f_body)) = fun_sig in
let f_body' = from_internal_expr_desc f_body in
Bluejay_ast.Funsig (f, args, f_body')
and transform_typed_funsig (fun_sig : 'a typed_funsig) :
Bluejay_ast.typed_funsig =
match fun_sig with
| Typed_funsig (f, args_with_type, (f_body, ret_type)) ->
let args_with_type' =
List.map
(fun (arg, t) -> (arg, from_internal_expr_desc t))
args_with_type
in
let f_body' = from_internal_expr_desc f_body in
let ret_type' = from_internal_expr_desc ret_type in
Bluejay_ast.Typed_funsig (f, args_with_type', (f_body', ret_type'))
| DTyped_funsig (f, (arg, t), (f_body, ret_type)) ->
let f_body' = from_internal_expr_desc f_body in
let ret_type' = from_internal_expr_desc ret_type in
Bluejay_ast.DTyped_funsig
(f, (arg, from_internal_expr_desc t), (f_body', ret_type'))
and from_internal_expr (e : syn_type_bluejay) : Bluejay_ast.expr =
match e with
| Int n -> Int n
| Bool b -> Bool b
| Var v -> Var v
| Function (args, f_edesc) -> Function (args, from_internal_expr_desc f_edesc)
| Input -> Input
| Appl (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Appl (ed1', ed2')
| Let (x, ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Let (x, ed1', ed2')
| LetRecFun (fs, ed) ->
let fs' = List.map transform_funsig fs in
let ed' = from_internal_expr_desc ed in
LetRecFun (fs', ed')
| LetFun (fun_sig, ed) ->
let fun_sig' = transform_funsig fun_sig in
let ed' = from_internal_expr_desc ed in
LetFun (fun_sig', ed')
| LetWithType (x, ed1, ed2, t) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
let t' = from_internal_expr_desc t in
LetWithType (x, ed1', ed2', t')
| LetRecFunWithType (fs, ed) ->
let fs' = List.map transform_typed_funsig fs in
let ed' = from_internal_expr_desc ed in
LetRecFunWithType (fs', ed')
| LetFunWithType (fun_sig, ed) ->
let fun_sig' = transform_typed_funsig fun_sig in
let ed' = from_internal_expr_desc ed in
LetFunWithType (fun_sig', ed')
| Plus (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Plus (ed1', ed2')
| Minus (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Minus (ed1', ed2')
| Times (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Times (ed1', ed2')
| Divide (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Divide (ed1', ed2')
| Modulus (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Modulus (ed1', ed2')
| Equal (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Equal (ed1', ed2')
| Neq (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Neq (ed1', ed2')
| LessThan (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
LessThan (ed1', ed2')
| Leq (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Leq (ed1', ed2')
| GreaterThan (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
GreaterThan (ed1', ed2')
| Geq (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Geq (ed1', ed2')
| And (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
And (ed1', ed2')
| Or (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Or (ed1', ed2')
| Not ed ->
let ed' = from_internal_expr_desc ed in
Not ed'
| If (ed1, ed2, ed3) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
let ed3' = from_internal_expr_desc ed3 in
If (ed1', ed2', ed3')
| Record r ->
let r' = Ident_map.map from_internal_expr_desc r in
Record r'
| RecordProj (ed, l) ->
let ed' = from_internal_expr_desc ed in
RecordProj (ed', l)
| Match (m_ed, pe_lst) ->
let m_ed' = from_internal_expr_desc m_ed in
let pe_lst' =
List.map
(fun (p, ed) ->
let ed' = from_internal_expr_desc ed in
(p, ed'))
pe_lst
in
Match (m_ed', pe_lst')
| VariantExpr (lbl, ed) ->
let ed' = from_internal_expr_desc ed in
VariantExpr (lbl, ed')
| List eds ->
let eds' = List.map from_internal_expr_desc eds in
List eds'
| ListCons (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
ListCons (ed1', ed2')
| TypeError x -> TypeError x
| Assert ed ->
let ed' = from_internal_expr_desc ed in
Assert ed'
| Assume ed ->
let ed' = from_internal_expr_desc ed in
Assume ed'
| TypeVar x -> TypeVar x
| TypeInt -> TypeInt
| TypeBool -> TypeBool
| TypeRecord r ->
let r' = Ident_map.map from_internal_expr_desc r in
TypeRecord r'
| TypeList ed ->
let ed' = from_internal_expr_desc ed in
TypeList ed'
| TypeArrow (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
TypeArrow (ed1', ed2')
| TypeArrowD ((x, ed1), ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
TypeArrowD ((x, ed1'), ed2')
| TypeSet (ed, p) ->
let ed' = from_internal_expr_desc ed in
let p' = from_internal_expr_desc p in
TypeSet (ed', p')
| TypeUnion (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
TypeUnion (ed1', ed2')
| TypeIntersect (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
TypeIntersect (ed1', ed2')
| TypeRecurse (tv, ed) ->
let ed' = from_internal_expr_desc ed in
TypeRecurse (tv, ed')
| TypeUntouched s -> TypeUntouched s
| TypeVariant (l, ed) ->
let ed' = from_internal_expr_desc ed in
TypeVariant (l, ed')
(* Helper routines to transform external bluejay to internal bluejay *)
let rec to_internal_expr_desc (e : Bluejay_ast.expr_desc) : syn_bluejay_edesc =
let tag' = e.tag in
let e' = to_internal_expr e.body in
{ tag = tag'; body = e' }
and transform_funsig (fun_sig : Bluejay_ast.funsig) : 'a funsig =
let (Bluejay_ast.Funsig (f, args, f_body)) = fun_sig in
let f_body' = to_internal_expr_desc f_body in
Funsig (f, args, f_body')
and transform_typed_funsig (fun_sig : Bluejay_ast.typed_funsig) :
'a typed_funsig =
match fun_sig with
| Bluejay_ast.Typed_funsig (f, args_with_type, (f_body, ret_type)) ->
let args_with_type' =
List.map (fun (arg, t) -> (arg, to_internal_expr_desc t)) args_with_type
in
let f_body' = to_internal_expr_desc f_body in
let ret_type' = to_internal_expr_desc ret_type in
Typed_funsig (f, args_with_type', (f_body', ret_type'))
| Bluejay_ast.DTyped_funsig (f, (arg, t), (f_body, ret_type)) ->
let f_body' = to_internal_expr_desc f_body in
let ret_type' = to_internal_expr_desc ret_type in
DTyped_funsig (f, (arg, to_internal_expr_desc t), (f_body', ret_type'))
and to_internal_expr (e : Bluejay_ast.expr) : syn_type_bluejay =
match e with
| Int n -> Int n
| Bool b -> Bool b
| Var v -> Var v
| Function (args, f_edesc) -> Function (args, to_internal_expr_desc f_edesc)
| Input -> Input
| Appl (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Appl (ed1', ed2')
| Let (x, ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Let (x, ed1', ed2')
| LetRecFun (fs, ed) ->
let fs' = List.map transform_funsig fs in
let ed' = to_internal_expr_desc ed in
LetRecFun (fs', ed')
| LetFun (fun_sig, ed) ->
let fun_sig' = transform_funsig fun_sig in
let ed' = to_internal_expr_desc ed in
LetFun (fun_sig', ed')
| LetWithType (x, ed1, ed2, t) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
let t' = to_internal_expr_desc t in
LetWithType (x, ed1', ed2', t')
| LetRecFunWithType (fs, ed) ->
let fs' = List.map transform_typed_funsig fs in
let ed' = to_internal_expr_desc ed in
LetRecFunWithType (fs', ed')
| LetFunWithType (fun_sig, ed) ->
let fun_sig' = transform_typed_funsig fun_sig in
let ed' = to_internal_expr_desc ed in
LetFunWithType (fun_sig', ed')
| Plus (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Plus (ed1', ed2')
| Minus (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Minus (ed1', ed2')
| Times (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Times (ed1', ed2')
| Divide (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Divide (ed1', ed2')
| Modulus (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Modulus (ed1', ed2')
| Equal (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Equal (ed1', ed2')
| Neq (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Neq (ed1', ed2')
| LessThan (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
LessThan (ed1', ed2')
| Leq (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Leq (ed1', ed2')
| GreaterThan (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
GreaterThan (ed1', ed2')
| Geq (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Geq (ed1', ed2')
| And (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
And (ed1', ed2')
| Or (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Or (ed1', ed2')
| Not ed ->
let ed' = to_internal_expr_desc ed in
Not ed'
| If (ed1, ed2, ed3) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
let ed3' = to_internal_expr_desc ed3 in
If (ed1', ed2', ed3')
| Record r ->
let r' = Ident_map.map to_internal_expr_desc r in
Record r'
| RecordProj (ed, l) ->
let ed' = to_internal_expr_desc ed in
RecordProj (ed', l)
| Match (m_ed, pe_lst) ->
let m_ed' = to_internal_expr_desc m_ed in
let pe_lst' =
List.map
(fun (p, ed) ->
let ed' = to_internal_expr_desc ed in
(p, ed'))
pe_lst
in
Match (m_ed', pe_lst')
| VariantExpr (lbl, ed) ->
let ed' = to_internal_expr_desc ed in
VariantExpr (lbl, ed')
| List eds ->
let eds' = List.map to_internal_expr_desc eds in
List eds'
| ListCons (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
ListCons (ed1', ed2')
| TypeError x -> TypeError x
| Assert ed ->
let ed' = to_internal_expr_desc ed in
Assert ed'
| Assume ed ->
let ed' = to_internal_expr_desc ed in
Assume ed'
| TypeVar x -> TypeVar x
| TypeInt -> TypeInt
| TypeBool -> TypeBool
| TypeRecord r ->
let r' = Ident_map.map to_internal_expr_desc r in
TypeRecord r'
| TypeList ed ->
let ed' = to_internal_expr_desc ed in
TypeList ed'
| TypeArrow (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
TypeArrow (ed1', ed2')
| TypeArrowD ((x, ed1), ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
TypeArrowD ((x, ed1'), ed2')
| TypeSet (ed, p) ->
let ed' = to_internal_expr_desc ed in
let p' = to_internal_expr_desc p in
TypeSet (ed', p')
| TypeUnion (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
TypeUnion (ed1', ed2')
| TypeIntersect (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
TypeIntersect (ed1', ed2')
| TypeRecurse (tv, ed) ->
let ed' = to_internal_expr_desc ed in
TypeRecurse (tv, ed')
| TypeUntouched s -> TypeUntouched s
| TypeVariant (lbl, ed) ->
let ed' = to_internal_expr_desc ed in
TypeVariant (lbl, ed')
(* Helper routines to transform jay to internal bluejay *)
let rec from_jay_expr_desc (e : Jay.Jay_ast.expr_desc) : core_bluejay_edesc =
let tag' = e.tag in
let e' = from_jay_expr e.body in
{ tag = tag'; body = e' }
and transform_funsig (fun_sig : Jay.Jay_ast.funsig) : core_only funsig =
let (Jay.Jay_ast.Funsig (f, args, f_body)) = fun_sig in
let f_body' = from_jay_expr_desc f_body in
Funsig (f, args, f_body')
and from_jay_expr (e : Jay.Jay_ast.expr) : core_bluejay =
let pat_conv (p : Jay.Jay_ast.pattern) : pattern =
match p with
| AnyPat -> AnyPat
| IntPat -> IntPat
| BoolPat -> BoolPat
| FunPat -> FunPat
| RecPat r -> RecPat r
| StrictRecPat r -> StrictRecPat r
| VariantPat (Variant_label l, x) -> VariantPat (Variant_label l, x)
| VarPat x -> VarPat x
| EmptyLstPat -> EmptyLstPat
| LstDestructPat (hd, tl) -> LstDestructPat (hd, tl)
in
match e with
| Int n -> Int n
| Bool b -> Bool b
| Var v -> Var v
| Function (args, f_edesc) -> Function (args, from_jay_expr_desc f_edesc)
| Input -> Input
| Appl (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Appl (ed1', ed2')
| Let (x, ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Let (x, ed1', ed2')
| LetRecFun (fs, ed) ->
let fs' = List.map transform_funsig fs in
let ed' = from_jay_expr_desc ed in
LetRecFun (fs', ed')
| LetFun (fun_sig, ed) ->
let fun_sig' = transform_funsig fun_sig in
let ed' = from_jay_expr_desc ed in
LetFun (fun_sig', ed')
| Plus (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Plus (ed1', ed2')
| Minus (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Minus (ed1', ed2')
| Times (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Times (ed1', ed2')
| Divide (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Divide (ed1', ed2')
| Modulus (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Modulus (ed1', ed2')
| Equal (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Equal (ed1', ed2')
| Neq (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Neq (ed1', ed2')
| LessThan (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
LessThan (ed1', ed2')
| Leq (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Leq (ed1', ed2')
| GreaterThan (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
GreaterThan (ed1', ed2')
| Geq (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Geq (ed1', ed2')
| And (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
And (ed1', ed2')
| Or (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Or (ed1', ed2')
| Not ed ->
let ed' = from_jay_expr_desc ed in
Not ed'
| If (ed1, ed2, ed3) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
let ed3' = from_jay_expr_desc ed3 in
If (ed1', ed2', ed3')
| Record r ->
let r' = Ident_map.map from_jay_expr_desc r in
Record r'
| RecordProj (ed, Label l) ->
let ed' = from_jay_expr_desc ed in
RecordProj (ed', Label l)
| Match (m_ed, pe_lst) ->
let m_ed' = from_jay_expr_desc m_ed in
let pe_lst' =
List.map
(fun (p, ed) ->
let ed' = from_jay_expr_desc ed in
(pat_conv p, ed'))
pe_lst
in
Match (m_ed', pe_lst')
| VariantExpr (Variant_label lbl, ed) ->
let ed' = from_jay_expr_desc ed in
VariantExpr (Variant_label lbl, ed')
| List eds ->
let eds' = List.map from_jay_expr_desc eds in
List eds'
| ListCons (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
ListCons (ed1', ed2')
| Assert ed ->
let ed' = from_jay_expr_desc ed in
Assert ed'
| Assume ed ->
let ed' = from_jay_expr_desc ed in
Assume ed'
| Error x -> TypeError x
(* Helper routines to transform internal bluejay to jay *)
let rec to_jay_expr_desc (e : core_bluejay_edesc) : Jay.Jay_ast.expr_desc =
let tag' = e.tag in
let e' = to_jay_expr e.body in
{ tag = tag'; body = e' }
and transform_funsig (fun_sig : core_only funsig) : Jay.Jay_ast.funsig =
let (Funsig (f, args, f_body)) = fun_sig in
let f_body' = to_jay_expr_desc f_body in
Jay.Jay_ast.Funsig (f, args, f_body')
and to_jay_expr (e : core_bluejay) : Jay.Jay_ast.expr =
let pat_conv (p : pattern) : Jay.Jay_ast.pattern =
match p with
| AnyPat -> AnyPat
| IntPat -> IntPat
| BoolPat -> BoolPat
| FunPat -> FunPat
| RecPat r -> RecPat r
| StrictRecPat r -> StrictRecPat r
| VariantPat (Variant_label l, x) -> VariantPat (Variant_label l, x)
| VarPat x -> VarPat x
| EmptyLstPat -> EmptyLstPat
| LstDestructPat (hd, tl) -> LstDestructPat (hd, tl)
in
match e with
| Int n -> Int n
| Bool b -> Bool b
| Var v -> Var v
| Function (args, f_edesc) -> Function (args, to_jay_expr_desc f_edesc)
| Input -> Input
| Appl (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Appl (ed1', ed2')
| Let (x, ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Let (x, ed1', ed2')
| LetRecFun (fs, ed) ->
let fs' = List.map transform_funsig fs in
let ed' = to_jay_expr_desc ed in
LetRecFun (fs', ed')
| LetFun (fun_sig, ed) ->
let fun_sig' = transform_funsig fun_sig in
let ed' = to_jay_expr_desc ed in
LetFun (fun_sig', ed')
| Plus (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Plus (ed1', ed2')
| Minus (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Minus (ed1', ed2')
| Times (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Times (ed1', ed2')
| Divide (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Divide (ed1', ed2')
| Modulus (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Modulus (ed1', ed2')
| Equal (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Equal (ed1', ed2')
| Neq (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Neq (ed1', ed2')
| LessThan (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
LessThan (ed1', ed2')
| Leq (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Leq (ed1', ed2')
| GreaterThan (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
GreaterThan (ed1', ed2')
| Geq (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Geq (ed1', ed2')
| And (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
And (ed1', ed2')
| Or (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Or (ed1', ed2')
| Not ed ->
let ed' = to_jay_expr_desc ed in
Not ed'
| If (ed1, ed2, ed3) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
let ed3' = to_jay_expr_desc ed3 in
If (ed1', ed2', ed3')
| Record r ->
let r' = Ident_map.map to_jay_expr_desc r in
Record r'
| RecordProj (ed, Label l) ->
let ed' = to_jay_expr_desc ed in
RecordProj (ed', Label l)
| Match (m_ed, pe_lst) ->
let m_ed' = to_jay_expr_desc m_ed in
let pe_lst' =
List.map
(fun (p, ed) ->
let ed' = to_jay_expr_desc ed in
(pat_conv p, ed'))
pe_lst
in
Match (m_ed', pe_lst')
| VariantExpr (Variant_label lbl, ed) ->
let ed' = to_jay_expr_desc ed in
VariantExpr (Variant_label lbl, ed')
| List eds ->
let eds' = List.map to_jay_expr_desc eds in
List eds'
| ListCons (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
ListCons (ed1', ed2')
| Assert ed ->
let ed' = to_jay_expr_desc ed in
Assert ed'
| Assume ed ->
let ed' = to_jay_expr_desc ed in
Assume ed'
| TypeError x -> Error x
(* Other helper functions *)
let is_type_expr (ed : syn_bluejay_edesc) : bool =
match ed.body with
| TypeVar _ | TypeInt | TypeBool | TypeRecord _ | TypeList _ | TypeArrow _
| TypeArrowD _ | TypeUnion _ | TypeIntersect _ | TypeSet _ | TypeRecurse _
| TypeVariant _ ->
true
| _ -> false
let is_fun_type (ed : syn_bluejay_edesc) : bool =
match ed.body with TypeArrow _ | TypeArrowD _ -> true | _ -> false
let is_dependent_fun_type (ed : syn_bluejay_edesc) : bool =
match ed.body with TypeArrowD _ -> true | _ -> false
let is_polymorphic_type (ed : syn_bluejay_edesc) : bool =
match ed.body with TypeUntouched _ -> true | _ -> false
let get_dependent_fun_var (ed : syn_bluejay_edesc) : ident =
match ed.body with
| TypeArrowD ((x, _), _) -> x
| _ ->
failwith
"get_dependent_fun_var: Should only be called with a dependent \
function type!"
let is_record_type : type a. a expr_desc -> bool =
fun ed -> match ed.body with TypeRecord _ -> true | _ -> false
let is_record_pat (p : pattern) : bool =
match p with
| StrictRecPat rec_pat | RecPat rec_pat ->
not @@ Ident_map.mem (Ident "~untouched") rec_pat
| _ -> false
let rec is_subtype (ed1 : syn_bluejay_edesc) (ed2 : syn_bluejay_edesc) : bool =
if tagless_equal_expr_desc ed1 ed2
then true
else
match (ed1.body, ed2.body) with
| TypeRecord r1, TypeRecord r2 ->
(* e.g.: { a : int; b : int } <: { a : int v bool } *)
let r1_labels = Ident_map.key_list r1 in
let r2_labels = Ident_map.key_list r2 in
(* r1 must have all the labels that r2 has to be its subtype *)
let prelim = List.subset compare_ident r2_labels r1_labels in
if prelim
then
(* r1's fields must all be subtypes to the corresponding fields in r2 *)
Ident_map.for_all (fun k v -> is_subtype (Ident_map.find k r1) v) r2
else false
| TypeList t1, TypeList t2 -> is_subtype t1 t2
| TypeArrow (dom1, cod1), TypeArrow (dom2, cod2) ->
is_subtype dom2 dom1 && is_subtype cod1 cod2
| _, TypeUnion (t1, t2) ->
if is_subtype ed1 t1 then true else is_subtype ed1 t2
| _, TypeIntersect (t1, t2) ->
if is_subtype ed1 t1
then is_subtype ed1 t2
else false (* TODO: What other cases are we missing here? *)
| _ -> failwith "TBI!" | null | https://raw.githubusercontent.com/JHU-PL-Lab/jaylang/da5775f91994333ec0d8c06b8226dbf75add0d77/src/lang-bluejay/bluejay_ast_internal.ml | ocaml | TODO: In the future we may want to change this to argument list accomodate easier user experience
P1: no internal transformation -> doesn't need to change tag
P2: no internal transformation
P3: HAS internal transformation
TODO: Create a separate class of constructors for type errors?
Type expressions
| Int _, _ -> false
| Bool _, _ -> false
| Input, _ -> false
| Var _, _ -> false
| List _, _ -> false
| Record _, _ -> false
| Function _, _ -> false
| Let _, _ -> false
| LetFun _, _ -> false
| LetRecFun _, _ -> false
| LetWithType _, _ -> false
| LetFunWithType _, _ -> false
| LetRecFunWithType _, _ -> false
| Match _, _ -> false
| If _, _ -> false
Type expressions
| Int _, _ -> false
| Bool _, _ -> false
| Input, _ -> false
| Var _, _ -> false
| List _, _ -> false
| Record _, _ -> false
| Function _, _ -> false
| Let _, _ -> false
| LetFun _, _ -> false
| LetRecFun _, _ -> false
| LetWithType _, _ -> false
| LetFunWithType _, _ -> false
| LetRecFunWithType _, _ -> false
| Match _, _ -> false
| If _, _ -> false
Type expressions
Type expressions
TODO: Another potential source for bug
Takes [expr] as an argument. Returns the relative precedence of the
expression. Higher ints correspond to higher precedences.
TODO: For now, all type expressions will have the lowest precedence coz I'm lazy and don't wanna think about it
Helper routines to transform internal bluejay to external bluejay
Helper routines to transform external bluejay to internal bluejay
Helper routines to transform jay to internal bluejay
Helper routines to transform internal bluejay to jay
Other helper functions
e.g.: { a : int; b : int } <: { a : int v bool }
r1 must have all the labels that r2 has to be its subtype
r1's fields must all be subtypes to the corresponding fields in r2
TODO: What other cases are we missing here? | open Batteries
type label = Bluejay_ast.label = Label of string
[@@deriving eq, ord, show, to_yojson]
type ident = Jayil.Ast.ident = Ident of string
[@@deriving eq, ord, show, to_yojson]
module Ident = Jayil.Ast.Ident
module Ident_set = Jayil.Ast.Ident_set
module Ident_map = Jayil.Ast.Ident_map
type variant_label = Bluejay_ast.variant_label = Variant_label of string
[@@deriving eq, ord, show, to_yojson]
type syntactic_only = [ `Syntactic ]
type semantic_only = [ `Semantic ]
type core_only = [ `Core ]
type 'a syntactic_and_semantic = [< `Syntactic | `Semantic ] as 'a
type 'a core_and_semantic = [< `Core | `Semantic ] as 'a
type type_sig =
| TopType
| IntType
| BoolType
| FunType
| RecType of Ident_set.t
| ListType
| VariantType of variant_label
| UntouchedType of string
[@@deriving eq, ord, show, to_yojson]
type pattern = Bluejay_ast.pattern =
| AnyPat
| IntPat
| BoolPat
| FunPat
| RecPat of ident option Ident_map.t
| StrictRecPat of ident option Ident_map.t
| VariantPat of variant_label * ident
| VarPat of ident
| EmptyLstPat
| LstDestructPat of ident * ident
[@@deriving eq, ord, show, to_yojson]
type predicate = syntactic_only expr_desc
and 'a funsig = Funsig of ident * ident list * 'a expr_desc
and 'a typed_funsig =
| Typed_funsig of
ident * (ident * 'a expr_desc) list * ('a expr_desc * 'a expr_desc)
| DTyped_funsig of
ident * (ident * 'a expr_desc) * ('a expr_desc * 'a expr_desc)
and 'a expr_desc = { body : 'a expr; tag : int }
and 'a expr =
| Int : int -> 'a expr
| Bool : bool -> 'a expr
| Var : ident -> 'a expr
| Function : (ident list * 'a expr_desc) -> 'a expr
| Input : 'a expr
| Appl : ('a expr_desc * 'a expr_desc) -> 'a expr
| Let : (ident * 'a expr_desc * 'a expr_desc) -> 'a expr
| LetRecFun : ('a funsig list * 'a expr_desc) -> 'a expr
| LetFun : ('a funsig * 'a expr_desc) -> 'a expr
| LetWithType :
(ident * 'a expr_desc * 'a expr_desc * 'a expr_desc)
-> 'a syntactic_and_semantic expr
| LetRecFunWithType :
('a typed_funsig list * 'a expr_desc)
-> 'a syntactic_and_semantic expr
| LetFunWithType :
('a typed_funsig * 'a expr_desc)
-> 'a syntactic_and_semantic expr
| Plus : ('a expr_desc * 'a expr_desc) -> 'a expr
| Minus : ('a expr_desc * 'a expr_desc) -> 'a expr
| Times : ('a expr_desc * 'a expr_desc) -> 'a expr
| Divide : ('a expr_desc * 'a expr_desc) -> 'a expr
| Modulus : ('a expr_desc * 'a expr_desc) -> 'a expr
| Equal : ('a expr_desc * 'a expr_desc) -> 'a expr
| Neq : ('a expr_desc * 'a expr_desc) -> 'a expr
| LessThan : ('a expr_desc * 'a expr_desc) -> 'a expr
| Leq : ('a expr_desc * 'a expr_desc) -> 'a expr
| GreaterThan : ('a expr_desc * 'a expr_desc) -> 'a expr
| Geq : ('a expr_desc * 'a expr_desc) -> 'a expr
| And : ('a expr_desc * 'a expr_desc) -> 'a expr
| Or : ('a expr_desc * 'a expr_desc) -> 'a expr
| Not : 'a expr_desc -> 'a expr
| If : ('a expr_desc * 'a expr_desc * 'a expr_desc) -> 'a expr
| Record : 'a expr_desc Ident_map.t -> 'a expr
| RecordProj : ('a expr_desc * label) -> 'a expr
| Match : ('a expr_desc * (pattern * 'a expr_desc) list) -> 'a expr
| VariantExpr : (variant_label * 'a expr_desc) -> 'a expr
| List : 'a expr_desc list -> 'a expr
| ListCons : ('a expr_desc * 'a expr_desc) -> 'a expr
| TypeError : ident -> 'a expr
| Assert : 'a expr_desc -> 'a expr
| Assume : 'a expr_desc -> 'a expr
| TypeVar : ident -> syntactic_only expr
| TypeInt : syntactic_only expr
| TypeBool : syntactic_only expr
| TypeRecord : syntactic_only expr_desc Ident_map.t -> syntactic_only expr
| TypeList : syntactic_only expr_desc -> syntactic_only expr
| TypeArrow :
(syntactic_only expr_desc * syntactic_only expr_desc)
-> syntactic_only expr
| TypeArrowD :
((ident * syntactic_only expr_desc) * syntactic_only expr_desc)
-> syntactic_only expr
| TypeSet : syntactic_only expr_desc * predicate -> syntactic_only expr
| TypeUnion :
(syntactic_only expr_desc * syntactic_only expr_desc)
-> syntactic_only expr
| TypeIntersect :
(syntactic_only expr_desc * syntactic_only expr_desc)
-> syntactic_only expr
| TypeRecurse : (ident * syntactic_only expr_desc) -> syntactic_only expr
| TypeUntouched : string -> syntactic_only expr
| TypeVariant :
(variant_label * syntactic_only expr_desc)
-> syntactic_only expr
let counter = ref 0
let fresh_tag () =
let c = !counter in
counter := c + 1 ;
c
let new_expr_desc : type a. a expr -> a expr_desc =
fun e -> { tag = fresh_tag (); body = e }
type syn_type_bluejay = syntactic_only expr
type syn_bluejay_edesc = syntactic_only expr_desc
type syn_type_bluejay_desc = syntactic_only expr_desc
type sem_type_bluejay = [ `Semantic ] expr
type sem_bluejay_edesc = [ `Semantic ] expr_desc
type sem_type_bluejay_desc = [ ` Semantic ] expr
type core_bluejay = [ `Core ] expr
type core_bluejay_edesc = [ `Core ] expr_desc
let rec equal_funsig : type a. a funsig -> a funsig -> bool =
fun (Funsig (id1, params1, fe1)) (Funsig (id2, params2, fe2)) ->
id1 = id2 && List.eq equal_ident params1 params2 && equal_expr_desc fe1 fe2
and equal_typed_funsig : type a. a typed_funsig -> a typed_funsig -> bool =
fun fsig_1 fsig_2 ->
match (fsig_1, fsig_2) with
| ( Typed_funsig (f1, params_with_type_1, (f_body_1, ret_type_1)),
Typed_funsig (f2, params_with_type_2, (f_body_2, ret_type_2)) ) ->
equal_ident f1 f2
&& List.equal
(fun (param1, t1) (param2, t2) ->
equal_ident param1 param2 && equal_expr_desc t1 t2)
params_with_type_1 params_with_type_2
&& equal_expr_desc f_body_1 f_body_2
&& equal_expr_desc ret_type_1 ret_type_2
| ( DTyped_funsig (f1, (param1, t1), (f_body_1, ret_type_1)),
DTyped_funsig (f2, (param2, t2), (f_body_2, ret_type_2)) ) ->
equal_ident f1 f2 && equal_ident param1 param2 && equal_expr_desc t1 t2
&& equal_expr_desc f_body_1 f_body_2
&& equal_expr_desc ret_type_1 ret_type_2
| _ -> false
and equal_expr_desc : type a. a expr_desc -> a expr_desc -> bool =
fun e1 e2 ->
equal_expr e1.body e2.body
Option.eq e1.tag e2.tag
e1.tag = e2.tag
and equal_expr : type a. a expr -> a expr -> bool =
fun e1 e2 ->
match (e1, e2) with
| Int n1, Int n2 -> n1 = n2
| Bool b1, Bool b2 -> b1 = b2
| Input, Input -> true
| Var x1, Var x2 -> x1 = x2
| List l1, List l2 -> List.eq equal_expr_desc l1 l2
| Record r1, Record r2 -> Ident_map.equal equal_expr_desc r1 r2
| Function (id_lst1, fun_body1), Function (id_lst2, fun_body2) ->
List.eq equal_ident id_lst1 id_lst2 && equal_expr_desc fun_body1 fun_body2
| Let (x1, xe1, e1), Let (x2, xe2, e2) ->
x1 = x2 && equal_expr_desc xe1 xe2 && equal_expr_desc e1 e2
| LetFun (f1, e1), LetFun (f2, e2) ->
equal_funsig f1 f2 && equal_expr_desc e1 e2
| LetRecFun (sig_lst1, e1), LetRecFun (sig_lst2, e2) ->
List.eq equal_funsig sig_lst1 sig_lst2 && equal_expr_desc e1 e2
| LetWithType (x1, xe1, e1, t1), LetWithType (x2, xe2, e2, t2) ->
x1 = x2 && equal_expr_desc xe1 xe2 && equal_expr_desc e1 e2
&& equal_expr_desc t1 t2
| LetFunWithType (f1, e1), LetFunWithType (f2, e2) ->
equal_typed_funsig f1 f2 && equal_expr_desc e1 e2
| LetRecFunWithType (sig_lst1, e1), LetRecFunWithType (sig_lst2, e2) ->
List.eq equal_typed_funsig sig_lst1 sig_lst2 && equal_expr_desc e1 e2
| Match (me1, pe_lst1), Match (me2, pe_lst2) ->
let eq_pe (p1, e1) (p2, e2) = p1 = p2 && equal_expr_desc e1 e2 in
equal_expr_desc me1 me2 && List.eq eq_pe pe_lst1 pe_lst2
| If (cond1, tb1, fb1), If (cond2, tb2, fb2) ->
equal_expr_desc cond1 cond2
&& equal_expr_desc tb1 tb2 && equal_expr_desc fb1 fb2
| Or (lop1, rop1), Or (lop2, rop2)
| And (lop1, rop1), And (lop2, rop2)
| Equal (lop1, rop1), Equal (lop2, rop2)
| Neq (lop1, rop1), Neq (lop2, rop2)
| LessThan (lop1, rop1), LessThan (lop2, rop2)
| Leq (lop1, rop1), Leq (lop2, rop2)
| GreaterThan (lop1, rop1), GreaterThan (lop2, rop2)
| Geq (lop1, rop1), Geq (lop2, rop2)
| Appl (lop1, rop1), Appl (lop2, rop2)
| Plus (lop1, rop1), Plus (lop2, rop2)
| Minus (lop1, rop1), Minus (lop2, rop2)
| Times (lop1, rop1), Times (lop2, rop2)
| Divide (lop1, rop1), Divide (lop2, rop2)
| Modulus (lop1, rop1), Modulus (lop2, rop2)
| ListCons (lop1, rop1), ListCons (lop2, rop2) ->
equal_expr_desc lop1 lop2 && equal_expr_desc rop1 rop2
| Or _ , _
| And _ , _
| Equal _ , _
| Neq _ , _
| LessThan _ , _
| Leq _ , _
| GreaterThan _ , _
| Geq _ , _
| Appl _ , _
| Plus _ , _
| Minus _ , _
| Times _ , _
| Divide _ , _
| Modulus _ , _
| ListCons _ , _ - > false
| And _, _
| Equal _, _
| Neq _, _
| LessThan _, _
| Leq _, _
| GreaterThan _, _
| Geq _, _
| Appl _, _
| Plus _, _
| Minus _, _
| Times _, _
| Divide _, _
| Modulus _, _
| ListCons _, _ -> false *)
| Assert e1, Assert e2 | Assume e1, Assume e2 | Not e1, Not e2 ->
equal_expr_desc e1 e2
| VariantExpr (l1, e1), VariantExpr (l2, e2) ->
l1 = l2 && equal_expr_desc e1 e2
| RecordProj (e1, l1), RecordProj (e2, l2) -> l1 = l2 && equal_expr_desc e1 e2
| TypeVar x1, TypeVar x2 -> x1 = x2
| TypeInt, TypeInt | TypeBool, TypeBool -> true
| TypeRecord t1, TypeRecord t2 -> Ident_map.equal equal_expr_desc t1 t2
| TypeList t1, TypeList t2 -> equal_expr_desc t1 t2
| TypeArrow (lt1, rt1), TypeArrow (lt2, rt2)
| TypeUnion (lt1, rt1), TypeUnion (lt2, rt2)
| TypeIntersect (lt1, rt1), TypeIntersect (lt2, rt2)
| TypeSet (lt1, rt1), TypeSet (lt2, rt2) ->
equal_expr_desc lt1 lt2 && equal_expr_desc rt1 rt2
| TypeArrowD ((id1, lt1), rt1), TypeArrowD ((id2, lt2), rt2) ->
id1 = id2 && equal_expr_desc lt1 lt2 && equal_expr_desc rt1 rt2
| TypeRecurse (x1, t1), TypeRecurse (x2, t2) -> x1 = x2 && t1 = t2
| TypeUntouched s1, TypeUntouched s2 -> s1 = s2
| TypeVariant (l1, e1), TypeVariant (l2, e2) ->
l1 = l2 && equal_expr_desc e1 e2
| _ -> false
let rec tagless_equal_funsig : type a. a funsig -> a funsig -> bool =
fun (Funsig (id1, params1, fe1)) (Funsig (id2, params2, fe2)) ->
id1 = id2
&& List.eq equal_ident params1 params2
&& tagless_equal_expr_desc fe1 fe2
and tagless_equal_typed_funsig :
type a. a typed_funsig -> a typed_funsig -> bool =
fun fsig_1 fsig_2 ->
match (fsig_1, fsig_2) with
| ( Typed_funsig (f1, params_with_type_1, (f_body_1, ret_type_1)),
Typed_funsig (f2, params_with_type_2, (f_body_2, ret_type_2)) ) ->
equal_ident f1 f2
&& List.equal
(fun (param1, t1) (param2, t2) ->
equal_ident param1 param2 && tagless_equal_expr_desc t1 t2)
params_with_type_1 params_with_type_2
&& tagless_equal_expr_desc f_body_1 f_body_2
&& tagless_equal_expr_desc ret_type_1 ret_type_2
| ( DTyped_funsig (f1, (param1, t1), (f_body_1, ret_type_1)),
DTyped_funsig (f2, (param2, t2), (f_body_2, ret_type_2)) ) ->
equal_ident f1 f2 && equal_ident param1 param2
&& tagless_equal_expr_desc t1 t2
&& tagless_equal_expr_desc f_body_1 f_body_2
&& tagless_equal_expr_desc ret_type_1 ret_type_2
| _ -> false
and tagless_equal_expr_desc : type a. a expr_desc -> a expr_desc -> bool =
fun e1 e2 -> tagless_equal_expr e1.body e2.body
and tagless_equal_expr : type a. a expr -> a expr -> bool =
fun e1 e2 ->
match (e1, e2) with
| Int n1, Int n2 -> n1 = n2
| Bool b1, Bool b2 -> b1 = b2
| Input, Input -> true
| Var x1, Var x2 -> x1 = x2
| List l1, List l2 -> List.eq tagless_equal_expr_desc l1 l2
| Record r1, Record r2 -> Ident_map.equal tagless_equal_expr_desc r1 r2
| Function (id_lst1, fun_body1), Function (id_lst2, fun_body2) ->
List.eq equal_ident id_lst1 id_lst2
&& tagless_equal_expr_desc fun_body1 fun_body2
| Let (x1, xe1, e1), Let (x2, xe2, e2) ->
x1 = x2
&& tagless_equal_expr_desc xe1 xe2
&& tagless_equal_expr_desc e1 e2
| LetFun (f1, e1), LetFun (f2, e2) ->
equal_funsig f1 f2 && tagless_equal_expr_desc e1 e2
| LetRecFun (sig_lst1, e1), LetRecFun (sig_lst2, e2) ->
List.eq equal_funsig sig_lst1 sig_lst2 && tagless_equal_expr_desc e1 e2
| LetWithType (x1, xe1, e1, t1), LetWithType (x2, xe2, e2, t2) ->
x1 = x2
&& tagless_equal_expr_desc xe1 xe2
&& tagless_equal_expr_desc e1 e2
&& tagless_equal_expr_desc t1 t2
| LetFunWithType (f1, e1), LetFunWithType (f2, e2) ->
equal_typed_funsig f1 f2 && tagless_equal_expr_desc e1 e2
| LetRecFunWithType (sig_lst1, e1), LetRecFunWithType (sig_lst2, e2) ->
List.eq equal_typed_funsig sig_lst1 sig_lst2
&& tagless_equal_expr_desc e1 e2
| Match (me1, pe_lst1), Match (me2, pe_lst2) ->
let eq_pe (p1, e1) (p2, e2) = p1 = p2 && tagless_equal_expr_desc e1 e2 in
tagless_equal_expr_desc me1 me2 && List.eq eq_pe pe_lst1 pe_lst2
| If (cond1, tb1, fb1), If (cond2, tb2, fb2) ->
tagless_equal_expr_desc cond1 cond2
&& tagless_equal_expr_desc tb1 tb2
&& tagless_equal_expr_desc fb1 fb2
| Or (lop1, rop1), Or (lop2, rop2)
| And (lop1, rop1), And (lop2, rop2)
| Equal (lop1, rop1), Equal (lop2, rop2)
| Neq (lop1, rop1), Neq (lop2, rop2)
| LessThan (lop1, rop1), LessThan (lop2, rop2)
| Leq (lop1, rop1), Leq (lop2, rop2)
| GreaterThan (lop1, rop1), GreaterThan (lop2, rop2)
| Geq (lop1, rop1), Geq (lop2, rop2)
| Appl (lop1, rop1), Appl (lop2, rop2)
| Plus (lop1, rop1), Plus (lop2, rop2)
| Minus (lop1, rop1), Minus (lop2, rop2)
| Times (lop1, rop1), Times (lop2, rop2)
| Divide (lop1, rop1), Divide (lop2, rop2)
| Modulus (lop1, rop1), Modulus (lop2, rop2)
| ListCons (lop1, rop1), ListCons (lop2, rop2) ->
tagless_equal_expr_desc lop1 lop2 && tagless_equal_expr_desc rop1 rop2
| Or _ , _
| And _ , _
| Equal _ , _
| Neq _ , _
| LessThan _ , _
| Leq _ , _
| GreaterThan _ , _
| Geq _ , _
| Appl _ , _
| Plus _ , _
| Minus _ , _
| Times _ , _
| Divide _ , _
| Modulus _ , _
| ListCons _ , _ - > false
| And _, _
| Equal _, _
| Neq _, _
| LessThan _, _
| Leq _, _
| GreaterThan _, _
| Geq _, _
| Appl _, _
| Plus _, _
| Minus _, _
| Times _, _
| Divide _, _
| Modulus _, _
| ListCons _, _ -> false *)
| Assert e1, Assert e2 | Assume e1, Assume e2 | Not e1, Not e2 ->
tagless_equal_expr_desc e1 e2
| VariantExpr (l1, e1), VariantExpr (l2, e2) ->
l1 = l2 && tagless_equal_expr_desc e1 e2
| RecordProj (e1, l1), RecordProj (e2, l2) ->
l1 = l2 && tagless_equal_expr_desc e1 e2
| TypeVar x1, TypeVar x2 -> x1 = x2
| TypeInt, TypeInt | TypeBool, TypeBool -> true
| TypeRecord t1, TypeRecord t2 ->
Ident_map.equal tagless_equal_expr_desc t1 t2
| TypeList t1, TypeList t2 -> tagless_equal_expr_desc t1 t2
| TypeArrow (lt1, rt1), TypeArrow (lt2, rt2)
| TypeUnion (lt1, rt1), TypeUnion (lt2, rt2)
| TypeIntersect (lt1, rt1), TypeIntersect (lt2, rt2)
| TypeSet (lt1, rt1), TypeSet (lt2, rt2) ->
tagless_equal_expr_desc lt1 lt2 && tagless_equal_expr_desc rt1 rt2
| TypeArrowD ((id1, lt1), rt1), TypeArrowD ((id2, lt2), rt2) ->
id1 = id2
&& tagless_equal_expr_desc lt1 lt2
&& tagless_equal_expr_desc rt1 rt2
| TypeRecurse (x1, t1), TypeRecurse (x2, t2) -> x1 = x2 && t1 = t2
| TypeUntouched s1, TypeUntouched s2 -> s1 = s2
| TypeVariant (l1, e1), TypeVariant (l2, e2) ->
l1 = l2 && tagless_equal_expr_desc e1 e2
| _ -> false
let compare_helper (x : int) (y : int) : int = if x <> 0 then x else y
let rec compare_funsig : type a. a funsig -> a funsig -> int =
fun (Funsig (id1, params1, fe1)) (Funsig (id2, params2, fe2)) ->
compare id1 id2
|> compare_helper (List.compare compare_ident params1 params2)
|> compare_helper (compare_expr_desc fe1 fe2)
and compare_typed_funsig : type a. a typed_funsig -> a typed_funsig -> int =
fun fsig_1 fsig_2 ->
match (fsig_1, fsig_2) with
| ( Typed_funsig (f1, params_with_type_1, (f_body_1, ret_type_1)),
Typed_funsig (f2, params_with_type_2, (f_body_2, ret_type_2)) ) ->
compare_ident f1 f2
|> compare_helper
@@ List.compare
(fun (param1, t1) (param2, t2) ->
compare_ident param1 param2
|> compare_helper @@ compare_expr_desc t1 t2)
params_with_type_1 params_with_type_2
|> compare_helper @@ compare_expr_desc f_body_1 f_body_2
|> compare_helper @@ compare_expr_desc ret_type_1 ret_type_2
| ( DTyped_funsig (f1, (param1, t1), (f_body_1, ret_type_1)),
DTyped_funsig (f2, (param2, t2), (f_body_2, ret_type_2)) ) ->
compare_ident f1 f2
|> compare_helper @@ compare_ident param1 param2
|> compare_helper @@ compare_expr_desc t1 t2
|> compare_helper @@ compare_expr_desc f_body_1 f_body_2
|> compare_helper @@ compare_expr_desc ret_type_1 ret_type_2
| DTyped_funsig _, Typed_funsig _ -> 1
| Typed_funsig _, DTyped_funsig _ -> -1
and compare_expr_desc : type a. a expr_desc -> a expr_desc -> int =
fun e1 e2 ->
compare_expr e1.body e2.body |> compare_helper (compare e1.tag e2.tag)
and compare_expr : type a. a expr -> a expr -> int =
fun e1 e2 ->
match (e1, e2) with
| Int n1, Int n2 -> compare n1 n2
| Bool b1, Bool b2 -> compare b1 b2
| Input, Input -> 0
| Var x1, Var x2 -> compare x1 x2
| List l1, List l2 -> List.compare compare_expr_desc l1 l2
| Record r1, Record r2 -> Ident_map.compare compare_expr_desc r1 r2
| Function (id_lst1, fun_body1), Function (id_lst2, fun_body2) ->
List.compare compare_ident id_lst1 id_lst2
|> compare_helper (compare_expr_desc fun_body1 fun_body2)
| Let (x1, xe1, e1), Let (x2, xe2, e2) ->
compare x1 x2
|> compare_helper (compare_expr_desc xe1 xe2)
|> compare_helper (compare_expr_desc e1 e2)
| LetFun (f1, e1), LetFun (f2, e2) ->
compare_funsig f1 f2 |> compare_helper (compare_expr_desc e1 e2)
| LetRecFun (sig_lst1, e1), LetRecFun (sig_lst2, e2) ->
List.compare compare_funsig sig_lst1 sig_lst2 + compare_expr_desc e1 e2
| LetWithType (x1, xe1, e1, t1), LetWithType (x2, xe2, e2, t2) ->
compare x1 x2
|> compare_helper (compare_expr_desc xe1 xe2)
|> compare_helper (compare_expr_desc e1 e2)
|> compare_helper (compare_expr_desc t1 t2)
| LetFunWithType (f1, e1), LetFunWithType (f2, e2) ->
compare_typed_funsig f1 f2 |> compare_helper (compare_expr_desc e1 e2)
| LetRecFunWithType (sig_lst1, e1), LetRecFunWithType (sig_lst2, e2) ->
List.compare compare_typed_funsig sig_lst1 sig_lst2
|> compare_helper (compare_expr_desc e1 e2)
| Match (me1, pe_lst1), Match (me2, pe_lst2) ->
let compare_pe (p1, e1) (p2, e2) =
compare_pattern p1 p2 |> compare_helper (compare_expr_desc e1 e2)
in
compare_expr_desc me1 me2
|> compare_helper (List.compare compare_pe pe_lst1 pe_lst2)
| If (cond1, tb1, fb1), If (cond2, tb2, fb2) ->
compare_expr_desc cond1 cond2
|> compare_helper (compare_expr_desc tb1 tb2)
|> compare_helper (compare_expr_desc fb1 fb2)
| Or (lop1, rop1), Or (lop2, rop2)
| And (lop1, rop1), And (lop2, rop2)
| Equal (lop1, rop1), Equal (lop2, rop2)
| Neq (lop1, rop1), Neq (lop2, rop2)
| LessThan (lop1, rop1), LessThan (lop2, rop2)
| Leq (lop1, rop1), Leq (lop2, rop2)
| GreaterThan (lop1, rop1), GreaterThan (lop2, rop2)
| Geq (lop1, rop1), Geq (lop2, rop2)
| Appl (lop1, rop1), Appl (lop2, rop2)
| Plus (lop1, rop1), Plus (lop2, rop2)
| Minus (lop1, rop1), Minus (lop2, rop2)
| Times (lop1, rop1), Times (lop2, rop2)
| Divide (lop1, rop1), Divide (lop2, rop2)
| Modulus (lop1, rop1), Modulus (lop2, rop2)
| ListCons (lop1, rop1), ListCons (lop2, rop2) ->
compare_expr_desc lop1 lop2
|> compare_helper (compare_expr_desc rop1 rop2)
| Assert e1, Assert e2 | Assume e1, Assume e2 | Not e1, Not e2 ->
compare_expr_desc e1 e2
| VariantExpr (l1, e1), VariantExpr (l2, e2) ->
compare l1 l2 |> compare_helper (compare_expr_desc e1 e2)
| RecordProj (e1, l1), RecordProj (e2, l2) ->
compare l1 l2 |> compare_helper (compare_expr_desc e1 e2)
| TypeVar x1, TypeVar x2 -> compare x1 x2
| TypeInt, TypeInt | TypeBool, TypeBool -> 0
| TypeRecord t1, TypeRecord t2 -> Ident_map.compare compare_expr_desc t1 t2
| TypeList t1, TypeList t2 -> compare_expr_desc t1 t2
| TypeArrow (lt1, rt1), TypeArrow (lt2, rt2)
| TypeUnion (lt1, rt1), TypeUnion (lt2, rt2)
| TypeIntersect (lt1, rt1), TypeIntersect (lt2, rt2)
| TypeSet (lt1, rt1), TypeSet (lt2, rt2) ->
compare_expr_desc lt1 lt2 + compare_expr_desc rt1 rt2
| TypeArrowD ((id1, lt1), rt1), TypeArrowD ((id2, lt2), rt2) ->
compare id1 id2
|> compare_helper (compare_expr_desc lt1 lt2)
|> compare_helper (compare_expr_desc rt1 rt2)
| TypeRecurse (x1, t1), TypeRecurse (x2, t2) ->
compare x1 x2 |> compare_helper (compare t1 t2)
| TypeUntouched s1, TypeUntouched s2 -> compare s1 s2
| TypeVariant (l1, e1), TypeVariant (l2, e2) ->
compare l1 l2 |> compare_helper (compare_expr_desc e1 e2)
| Int _, _ -> 1
| _, Int _ -> -1
| Bool _, _ -> 1
| _, Bool _ -> -1
| Var _, _ -> 1
| _, Var _ -> -1
| Function _, _ -> 1
| _, Function _ -> -1
| Input, _ -> 1
| _, Input -> -1
| Appl _, _ -> 1
| _, Appl _ -> -1
| Let _, _ -> 1
| _, Let _ -> -1
| LetRecFun _, _ -> 1
| _, LetRecFun _ -> -1
| LetFun _, _ -> 1
| _, LetFun _ -> -1
| LetWithType _, _ -> 1
| _, LetWithType _ -> -1
| LetRecFunWithType _, _ -> 1
| _, LetRecFunWithType _ -> -1
| LetFunWithType _, _ -> 1
| _, LetFunWithType _ -> -1
| Plus _, _ -> 1
| _, Plus _ -> -1
| Minus _, _ -> 1
| _, Minus _ -> -1
| Times _, _ -> 1
| _, Times _ -> -1
| Divide _, _ -> 1
| _, Divide _ -> -1
| Modulus _, _ -> 1
| _, Modulus _ -> -1
| Equal _, _ -> 1
| _, Equal _ -> -1
| Neq _, _ -> 1
| _, Neq _ -> -1
| LessThan _, _ -> 1
| _, LessThan _ -> -1
| Leq _, _ -> 1
| _, Leq _ -> -1
| GreaterThan _, _ -> 1
| _, GreaterThan _ -> -1
| Geq _, _ -> 1
| _, Geq _ -> -1
| And _, _ -> 1
| _, And _ -> -1
| Or _, _ -> 1
| _, Or _ -> -1
| Not _, _ -> 1
| _, Not _ -> -1
| If _, _ -> 1
| _, If _ -> -1
| Record _, _ -> 1
| _, Record _ -> -1
| RecordProj _, _ -> 1
| _, RecordProj _ -> -1
| Match _, _ -> 1
| _, Match _ -> -1
| VariantExpr _, _ -> 1
| _, VariantExpr _ -> -1
| List _, _ -> 1
| _, List _ -> -1
| ListCons _, _ -> 1
| _, ListCons _ -> -1
| TypeError _, _ -> 1
| _, TypeError _ -> -1
| Assert _, _ -> 1
| _, Assert _ -> -1
| Assume _, _ -> 1
| _, Assume _ -> -1
| TypeVar _, _ -> 1
| _, TypeVar _ -> -1
| TypeInt, _ -> 1
| _, TypeInt -> -1
| TypeBool, _ -> 1
| _, TypeBool -> -1
| TypeRecord _, _ -> 1
| _, TypeRecord _ -> -1
| TypeList _, _ -> 1
| _, TypeList _ -> -1
| TypeArrow _, _ -> 1
| _, TypeArrow _ -> -1
| TypeArrowD _, _ -> 1
| _, TypeArrowD _ -> -1
| TypeSet _, _ -> 1
| _, TypeSet _ -> -1
| TypeUnion _, _ -> 1
| _, TypeUnion _ -> -1
| TypeIntersect _, _ -> 1
| _, TypeIntersect _ -> -1
| TypeRecurse _, _ -> 1
| _, TypeRecurse _ -> -1
| TypeUntouched _, _ -> 1
| _, TypeUntouched _ -> -1
module type Expr_desc = sig
type t
val equal : t -> t -> bool
val compare : t -> t -> int
end
module Typed_expr_desc : Expr_desc with type t = syn_bluejay_edesc = struct
type t = syn_bluejay_edesc
let equal = equal_expr_desc
let compare = compare_expr_desc
end
module Semantic_typed_expr_desc : Expr_desc with type t = sem_bluejay_edesc =
struct
type t = sem_bluejay_edesc
let equal = equal_expr_desc
let compare = compare_expr_desc
end
module Core_expr_desc : Expr_desc with type t = core_bluejay_edesc = struct
type t = core_bluejay_edesc
let equal = equal_expr_desc
let compare = compare_expr_desc
end
module Pattern = struct
type t = pattern
let equal = equal_pattern
let compare = compare_pattern
let to_yojson = pattern_to_yojson
end
let expr_precedence_p1 : type a. a expr -> int =
fun expr ->
match expr with
| Function _ | Let _ | LetFun _ | LetRecFun _ | LetWithType _
| LetFunWithType _ | LetRecFunWithType _ | Match _ ->
0
| If _ -> 1
| Or _ -> 2
| And _ -> 3
| Not _ -> 4
| Equal _ | Neq _ | LessThan _ | Leq _ | GreaterThan _ | Geq _ -> 5
| ListCons _ -> 6
| Plus _ | Minus _ -> 7
| Times _ | Divide _ | Modulus _ -> 8
| Assert _ | Assume _ | VariantExpr _ -> 9
| Appl _ -> 10
| RecordProj _ -> 11
| Int _ | Bool _ | Input | Var _ | List _ | Record _ -> 12
| TypeVar _ | TypeInt | TypeBool | TypeRecord _ | TypeList _ | TypeArrow _
| TypeArrowD _ | TypeSet _ | TypeUnion _ | TypeIntersect _ | TypeRecurse _
| TypeError _ | TypeUntouched _ | TypeVariant _ ->
13
* Takes expressions [ e1 ] and [ e2 ] as arguments . Returns 0 if the two
expressions have equal precedence , a negative int if [ e1 ] has lower
precedence than [ e2 ] , and a positive int if [ e1 ] has higher precedence .
expressions have equal precedence, a negative int if [e1] has lower
precedence than [e2], and a positive int if [e1] has higher precedence. *)
let expr_precedence_cmp e1 e2 = expr_precedence_p1 e1 - expr_precedence_p1 e2
let expr_desc_precedence_cmp : type a. a expr_desc -> a expr_desc -> int =
fun ed1 ed2 -> expr_precedence_cmp ed1.body ed2.body
let rec from_internal_expr_desc (e : syn_bluejay_edesc) : Bluejay_ast.expr_desc
=
let tag' = e.tag in
let e' = from_internal_expr e.body in
{ tag = tag'; body = e' }
and transform_funsig (fun_sig : 'a funsig) : Bluejay_ast.funsig =
let (Funsig (f, args, f_body)) = fun_sig in
let f_body' = from_internal_expr_desc f_body in
Bluejay_ast.Funsig (f, args, f_body')
and transform_typed_funsig (fun_sig : 'a typed_funsig) :
Bluejay_ast.typed_funsig =
match fun_sig with
| Typed_funsig (f, args_with_type, (f_body, ret_type)) ->
let args_with_type' =
List.map
(fun (arg, t) -> (arg, from_internal_expr_desc t))
args_with_type
in
let f_body' = from_internal_expr_desc f_body in
let ret_type' = from_internal_expr_desc ret_type in
Bluejay_ast.Typed_funsig (f, args_with_type', (f_body', ret_type'))
| DTyped_funsig (f, (arg, t), (f_body, ret_type)) ->
let f_body' = from_internal_expr_desc f_body in
let ret_type' = from_internal_expr_desc ret_type in
Bluejay_ast.DTyped_funsig
(f, (arg, from_internal_expr_desc t), (f_body', ret_type'))
and from_internal_expr (e : syn_type_bluejay) : Bluejay_ast.expr =
match e with
| Int n -> Int n
| Bool b -> Bool b
| Var v -> Var v
| Function (args, f_edesc) -> Function (args, from_internal_expr_desc f_edesc)
| Input -> Input
| Appl (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Appl (ed1', ed2')
| Let (x, ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Let (x, ed1', ed2')
| LetRecFun (fs, ed) ->
let fs' = List.map transform_funsig fs in
let ed' = from_internal_expr_desc ed in
LetRecFun (fs', ed')
| LetFun (fun_sig, ed) ->
let fun_sig' = transform_funsig fun_sig in
let ed' = from_internal_expr_desc ed in
LetFun (fun_sig', ed')
| LetWithType (x, ed1, ed2, t) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
let t' = from_internal_expr_desc t in
LetWithType (x, ed1', ed2', t')
| LetRecFunWithType (fs, ed) ->
let fs' = List.map transform_typed_funsig fs in
let ed' = from_internal_expr_desc ed in
LetRecFunWithType (fs', ed')
| LetFunWithType (fun_sig, ed) ->
let fun_sig' = transform_typed_funsig fun_sig in
let ed' = from_internal_expr_desc ed in
LetFunWithType (fun_sig', ed')
| Plus (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Plus (ed1', ed2')
| Minus (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Minus (ed1', ed2')
| Times (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Times (ed1', ed2')
| Divide (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Divide (ed1', ed2')
| Modulus (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Modulus (ed1', ed2')
| Equal (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Equal (ed1', ed2')
| Neq (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Neq (ed1', ed2')
| LessThan (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
LessThan (ed1', ed2')
| Leq (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Leq (ed1', ed2')
| GreaterThan (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
GreaterThan (ed1', ed2')
| Geq (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Geq (ed1', ed2')
| And (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
And (ed1', ed2')
| Or (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
Or (ed1', ed2')
| Not ed ->
let ed' = from_internal_expr_desc ed in
Not ed'
| If (ed1, ed2, ed3) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
let ed3' = from_internal_expr_desc ed3 in
If (ed1', ed2', ed3')
| Record r ->
let r' = Ident_map.map from_internal_expr_desc r in
Record r'
| RecordProj (ed, l) ->
let ed' = from_internal_expr_desc ed in
RecordProj (ed', l)
| Match (m_ed, pe_lst) ->
let m_ed' = from_internal_expr_desc m_ed in
let pe_lst' =
List.map
(fun (p, ed) ->
let ed' = from_internal_expr_desc ed in
(p, ed'))
pe_lst
in
Match (m_ed', pe_lst')
| VariantExpr (lbl, ed) ->
let ed' = from_internal_expr_desc ed in
VariantExpr (lbl, ed')
| List eds ->
let eds' = List.map from_internal_expr_desc eds in
List eds'
| ListCons (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
ListCons (ed1', ed2')
| TypeError x -> TypeError x
| Assert ed ->
let ed' = from_internal_expr_desc ed in
Assert ed'
| Assume ed ->
let ed' = from_internal_expr_desc ed in
Assume ed'
| TypeVar x -> TypeVar x
| TypeInt -> TypeInt
| TypeBool -> TypeBool
| TypeRecord r ->
let r' = Ident_map.map from_internal_expr_desc r in
TypeRecord r'
| TypeList ed ->
let ed' = from_internal_expr_desc ed in
TypeList ed'
| TypeArrow (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
TypeArrow (ed1', ed2')
| TypeArrowD ((x, ed1), ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
TypeArrowD ((x, ed1'), ed2')
| TypeSet (ed, p) ->
let ed' = from_internal_expr_desc ed in
let p' = from_internal_expr_desc p in
TypeSet (ed', p')
| TypeUnion (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
TypeUnion (ed1', ed2')
| TypeIntersect (ed1, ed2) ->
let ed1' = from_internal_expr_desc ed1 in
let ed2' = from_internal_expr_desc ed2 in
TypeIntersect (ed1', ed2')
| TypeRecurse (tv, ed) ->
let ed' = from_internal_expr_desc ed in
TypeRecurse (tv, ed')
| TypeUntouched s -> TypeUntouched s
| TypeVariant (l, ed) ->
let ed' = from_internal_expr_desc ed in
TypeVariant (l, ed')
let rec to_internal_expr_desc (e : Bluejay_ast.expr_desc) : syn_bluejay_edesc =
let tag' = e.tag in
let e' = to_internal_expr e.body in
{ tag = tag'; body = e' }
and transform_funsig (fun_sig : Bluejay_ast.funsig) : 'a funsig =
let (Bluejay_ast.Funsig (f, args, f_body)) = fun_sig in
let f_body' = to_internal_expr_desc f_body in
Funsig (f, args, f_body')
and transform_typed_funsig (fun_sig : Bluejay_ast.typed_funsig) :
'a typed_funsig =
match fun_sig with
| Bluejay_ast.Typed_funsig (f, args_with_type, (f_body, ret_type)) ->
let args_with_type' =
List.map (fun (arg, t) -> (arg, to_internal_expr_desc t)) args_with_type
in
let f_body' = to_internal_expr_desc f_body in
let ret_type' = to_internal_expr_desc ret_type in
Typed_funsig (f, args_with_type', (f_body', ret_type'))
| Bluejay_ast.DTyped_funsig (f, (arg, t), (f_body, ret_type)) ->
let f_body' = to_internal_expr_desc f_body in
let ret_type' = to_internal_expr_desc ret_type in
DTyped_funsig (f, (arg, to_internal_expr_desc t), (f_body', ret_type'))
and to_internal_expr (e : Bluejay_ast.expr) : syn_type_bluejay =
match e with
| Int n -> Int n
| Bool b -> Bool b
| Var v -> Var v
| Function (args, f_edesc) -> Function (args, to_internal_expr_desc f_edesc)
| Input -> Input
| Appl (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Appl (ed1', ed2')
| Let (x, ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Let (x, ed1', ed2')
| LetRecFun (fs, ed) ->
let fs' = List.map transform_funsig fs in
let ed' = to_internal_expr_desc ed in
LetRecFun (fs', ed')
| LetFun (fun_sig, ed) ->
let fun_sig' = transform_funsig fun_sig in
let ed' = to_internal_expr_desc ed in
LetFun (fun_sig', ed')
| LetWithType (x, ed1, ed2, t) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
let t' = to_internal_expr_desc t in
LetWithType (x, ed1', ed2', t')
| LetRecFunWithType (fs, ed) ->
let fs' = List.map transform_typed_funsig fs in
let ed' = to_internal_expr_desc ed in
LetRecFunWithType (fs', ed')
| LetFunWithType (fun_sig, ed) ->
let fun_sig' = transform_typed_funsig fun_sig in
let ed' = to_internal_expr_desc ed in
LetFunWithType (fun_sig', ed')
| Plus (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Plus (ed1', ed2')
| Minus (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Minus (ed1', ed2')
| Times (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Times (ed1', ed2')
| Divide (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Divide (ed1', ed2')
| Modulus (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Modulus (ed1', ed2')
| Equal (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Equal (ed1', ed2')
| Neq (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Neq (ed1', ed2')
| LessThan (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
LessThan (ed1', ed2')
| Leq (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Leq (ed1', ed2')
| GreaterThan (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
GreaterThan (ed1', ed2')
| Geq (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Geq (ed1', ed2')
| And (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
And (ed1', ed2')
| Or (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
Or (ed1', ed2')
| Not ed ->
let ed' = to_internal_expr_desc ed in
Not ed'
| If (ed1, ed2, ed3) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
let ed3' = to_internal_expr_desc ed3 in
If (ed1', ed2', ed3')
| Record r ->
let r' = Ident_map.map to_internal_expr_desc r in
Record r'
| RecordProj (ed, l) ->
let ed' = to_internal_expr_desc ed in
RecordProj (ed', l)
| Match (m_ed, pe_lst) ->
let m_ed' = to_internal_expr_desc m_ed in
let pe_lst' =
List.map
(fun (p, ed) ->
let ed' = to_internal_expr_desc ed in
(p, ed'))
pe_lst
in
Match (m_ed', pe_lst')
| VariantExpr (lbl, ed) ->
let ed' = to_internal_expr_desc ed in
VariantExpr (lbl, ed')
| List eds ->
let eds' = List.map to_internal_expr_desc eds in
List eds'
| ListCons (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
ListCons (ed1', ed2')
| TypeError x -> TypeError x
| Assert ed ->
let ed' = to_internal_expr_desc ed in
Assert ed'
| Assume ed ->
let ed' = to_internal_expr_desc ed in
Assume ed'
| TypeVar x -> TypeVar x
| TypeInt -> TypeInt
| TypeBool -> TypeBool
| TypeRecord r ->
let r' = Ident_map.map to_internal_expr_desc r in
TypeRecord r'
| TypeList ed ->
let ed' = to_internal_expr_desc ed in
TypeList ed'
| TypeArrow (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
TypeArrow (ed1', ed2')
| TypeArrowD ((x, ed1), ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
TypeArrowD ((x, ed1'), ed2')
| TypeSet (ed, p) ->
let ed' = to_internal_expr_desc ed in
let p' = to_internal_expr_desc p in
TypeSet (ed', p')
| TypeUnion (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
TypeUnion (ed1', ed2')
| TypeIntersect (ed1, ed2) ->
let ed1' = to_internal_expr_desc ed1 in
let ed2' = to_internal_expr_desc ed2 in
TypeIntersect (ed1', ed2')
| TypeRecurse (tv, ed) ->
let ed' = to_internal_expr_desc ed in
TypeRecurse (tv, ed')
| TypeUntouched s -> TypeUntouched s
| TypeVariant (lbl, ed) ->
let ed' = to_internal_expr_desc ed in
TypeVariant (lbl, ed')
let rec from_jay_expr_desc (e : Jay.Jay_ast.expr_desc) : core_bluejay_edesc =
let tag' = e.tag in
let e' = from_jay_expr e.body in
{ tag = tag'; body = e' }
and transform_funsig (fun_sig : Jay.Jay_ast.funsig) : core_only funsig =
let (Jay.Jay_ast.Funsig (f, args, f_body)) = fun_sig in
let f_body' = from_jay_expr_desc f_body in
Funsig (f, args, f_body')
and from_jay_expr (e : Jay.Jay_ast.expr) : core_bluejay =
let pat_conv (p : Jay.Jay_ast.pattern) : pattern =
match p with
| AnyPat -> AnyPat
| IntPat -> IntPat
| BoolPat -> BoolPat
| FunPat -> FunPat
| RecPat r -> RecPat r
| StrictRecPat r -> StrictRecPat r
| VariantPat (Variant_label l, x) -> VariantPat (Variant_label l, x)
| VarPat x -> VarPat x
| EmptyLstPat -> EmptyLstPat
| LstDestructPat (hd, tl) -> LstDestructPat (hd, tl)
in
match e with
| Int n -> Int n
| Bool b -> Bool b
| Var v -> Var v
| Function (args, f_edesc) -> Function (args, from_jay_expr_desc f_edesc)
| Input -> Input
| Appl (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Appl (ed1', ed2')
| Let (x, ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Let (x, ed1', ed2')
| LetRecFun (fs, ed) ->
let fs' = List.map transform_funsig fs in
let ed' = from_jay_expr_desc ed in
LetRecFun (fs', ed')
| LetFun (fun_sig, ed) ->
let fun_sig' = transform_funsig fun_sig in
let ed' = from_jay_expr_desc ed in
LetFun (fun_sig', ed')
| Plus (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Plus (ed1', ed2')
| Minus (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Minus (ed1', ed2')
| Times (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Times (ed1', ed2')
| Divide (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Divide (ed1', ed2')
| Modulus (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Modulus (ed1', ed2')
| Equal (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Equal (ed1', ed2')
| Neq (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Neq (ed1', ed2')
| LessThan (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
LessThan (ed1', ed2')
| Leq (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Leq (ed1', ed2')
| GreaterThan (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
GreaterThan (ed1', ed2')
| Geq (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Geq (ed1', ed2')
| And (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
And (ed1', ed2')
| Or (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
Or (ed1', ed2')
| Not ed ->
let ed' = from_jay_expr_desc ed in
Not ed'
| If (ed1, ed2, ed3) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
let ed3' = from_jay_expr_desc ed3 in
If (ed1', ed2', ed3')
| Record r ->
let r' = Ident_map.map from_jay_expr_desc r in
Record r'
| RecordProj (ed, Label l) ->
let ed' = from_jay_expr_desc ed in
RecordProj (ed', Label l)
| Match (m_ed, pe_lst) ->
let m_ed' = from_jay_expr_desc m_ed in
let pe_lst' =
List.map
(fun (p, ed) ->
let ed' = from_jay_expr_desc ed in
(pat_conv p, ed'))
pe_lst
in
Match (m_ed', pe_lst')
| VariantExpr (Variant_label lbl, ed) ->
let ed' = from_jay_expr_desc ed in
VariantExpr (Variant_label lbl, ed')
| List eds ->
let eds' = List.map from_jay_expr_desc eds in
List eds'
| ListCons (ed1, ed2) ->
let ed1' = from_jay_expr_desc ed1 in
let ed2' = from_jay_expr_desc ed2 in
ListCons (ed1', ed2')
| Assert ed ->
let ed' = from_jay_expr_desc ed in
Assert ed'
| Assume ed ->
let ed' = from_jay_expr_desc ed in
Assume ed'
| Error x -> TypeError x
let rec to_jay_expr_desc (e : core_bluejay_edesc) : Jay.Jay_ast.expr_desc =
let tag' = e.tag in
let e' = to_jay_expr e.body in
{ tag = tag'; body = e' }
and transform_funsig (fun_sig : core_only funsig) : Jay.Jay_ast.funsig =
let (Funsig (f, args, f_body)) = fun_sig in
let f_body' = to_jay_expr_desc f_body in
Jay.Jay_ast.Funsig (f, args, f_body')
and to_jay_expr (e : core_bluejay) : Jay.Jay_ast.expr =
let pat_conv (p : pattern) : Jay.Jay_ast.pattern =
match p with
| AnyPat -> AnyPat
| IntPat -> IntPat
| BoolPat -> BoolPat
| FunPat -> FunPat
| RecPat r -> RecPat r
| StrictRecPat r -> StrictRecPat r
| VariantPat (Variant_label l, x) -> VariantPat (Variant_label l, x)
| VarPat x -> VarPat x
| EmptyLstPat -> EmptyLstPat
| LstDestructPat (hd, tl) -> LstDestructPat (hd, tl)
in
match e with
| Int n -> Int n
| Bool b -> Bool b
| Var v -> Var v
| Function (args, f_edesc) -> Function (args, to_jay_expr_desc f_edesc)
| Input -> Input
| Appl (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Appl (ed1', ed2')
| Let (x, ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Let (x, ed1', ed2')
| LetRecFun (fs, ed) ->
let fs' = List.map transform_funsig fs in
let ed' = to_jay_expr_desc ed in
LetRecFun (fs', ed')
| LetFun (fun_sig, ed) ->
let fun_sig' = transform_funsig fun_sig in
let ed' = to_jay_expr_desc ed in
LetFun (fun_sig', ed')
| Plus (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Plus (ed1', ed2')
| Minus (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Minus (ed1', ed2')
| Times (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Times (ed1', ed2')
| Divide (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Divide (ed1', ed2')
| Modulus (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Modulus (ed1', ed2')
| Equal (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Equal (ed1', ed2')
| Neq (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Neq (ed1', ed2')
| LessThan (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
LessThan (ed1', ed2')
| Leq (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Leq (ed1', ed2')
| GreaterThan (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
GreaterThan (ed1', ed2')
| Geq (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Geq (ed1', ed2')
| And (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
And (ed1', ed2')
| Or (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
Or (ed1', ed2')
| Not ed ->
let ed' = to_jay_expr_desc ed in
Not ed'
| If (ed1, ed2, ed3) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
let ed3' = to_jay_expr_desc ed3 in
If (ed1', ed2', ed3')
| Record r ->
let r' = Ident_map.map to_jay_expr_desc r in
Record r'
| RecordProj (ed, Label l) ->
let ed' = to_jay_expr_desc ed in
RecordProj (ed', Label l)
| Match (m_ed, pe_lst) ->
let m_ed' = to_jay_expr_desc m_ed in
let pe_lst' =
List.map
(fun (p, ed) ->
let ed' = to_jay_expr_desc ed in
(pat_conv p, ed'))
pe_lst
in
Match (m_ed', pe_lst')
| VariantExpr (Variant_label lbl, ed) ->
let ed' = to_jay_expr_desc ed in
VariantExpr (Variant_label lbl, ed')
| List eds ->
let eds' = List.map to_jay_expr_desc eds in
List eds'
| ListCons (ed1, ed2) ->
let ed1' = to_jay_expr_desc ed1 in
let ed2' = to_jay_expr_desc ed2 in
ListCons (ed1', ed2')
| Assert ed ->
let ed' = to_jay_expr_desc ed in
Assert ed'
| Assume ed ->
let ed' = to_jay_expr_desc ed in
Assume ed'
| TypeError x -> Error x
let is_type_expr (ed : syn_bluejay_edesc) : bool =
match ed.body with
| TypeVar _ | TypeInt | TypeBool | TypeRecord _ | TypeList _ | TypeArrow _
| TypeArrowD _ | TypeUnion _ | TypeIntersect _ | TypeSet _ | TypeRecurse _
| TypeVariant _ ->
true
| _ -> false
let is_fun_type (ed : syn_bluejay_edesc) : bool =
match ed.body with TypeArrow _ | TypeArrowD _ -> true | _ -> false
let is_dependent_fun_type (ed : syn_bluejay_edesc) : bool =
match ed.body with TypeArrowD _ -> true | _ -> false
let is_polymorphic_type (ed : syn_bluejay_edesc) : bool =
match ed.body with TypeUntouched _ -> true | _ -> false
let get_dependent_fun_var (ed : syn_bluejay_edesc) : ident =
match ed.body with
| TypeArrowD ((x, _), _) -> x
| _ ->
failwith
"get_dependent_fun_var: Should only be called with a dependent \
function type!"
let is_record_type : type a. a expr_desc -> bool =
fun ed -> match ed.body with TypeRecord _ -> true | _ -> false
let is_record_pat (p : pattern) : bool =
match p with
| StrictRecPat rec_pat | RecPat rec_pat ->
not @@ Ident_map.mem (Ident "~untouched") rec_pat
| _ -> false
let rec is_subtype (ed1 : syn_bluejay_edesc) (ed2 : syn_bluejay_edesc) : bool =
if tagless_equal_expr_desc ed1 ed2
then true
else
match (ed1.body, ed2.body) with
| TypeRecord r1, TypeRecord r2 ->
let r1_labels = Ident_map.key_list r1 in
let r2_labels = Ident_map.key_list r2 in
let prelim = List.subset compare_ident r2_labels r1_labels in
if prelim
then
Ident_map.for_all (fun k v -> is_subtype (Ident_map.find k r1) v) r2
else false
| TypeList t1, TypeList t2 -> is_subtype t1 t2
| TypeArrow (dom1, cod1), TypeArrow (dom2, cod2) ->
is_subtype dom2 dom1 && is_subtype cod1 cod2
| _, TypeUnion (t1, t2) ->
if is_subtype ed1 t1 then true else is_subtype ed1 t2
| _, TypeIntersect (t1, t2) ->
if is_subtype ed1 t1
then is_subtype ed1 t2
| _ -> failwith "TBI!" |
e48539820f5abb9bd5d6ee273cc0b448bb6e49dbda982db1f0c1d4c5661d69ab | tsloughter/kuberl | kuberl_v1beta1_self_subject_access_review_spec.erl | -module(kuberl_v1beta1_self_subject_access_review_spec).
-export([encode/1]).
-export_type([kuberl_v1beta1_self_subject_access_review_spec/0]).
-type kuberl_v1beta1_self_subject_access_review_spec() ::
#{ 'nonResourceAttributes' => kuberl_v1beta1_non_resource_attributes:kuberl_v1beta1_non_resource_attributes(),
'resourceAttributes' => kuberl_v1beta1_resource_attributes:kuberl_v1beta1_resource_attributes()
}.
encode(#{ 'nonResourceAttributes' := NonResourceAttributes,
'resourceAttributes' := ResourceAttributes
}) ->
#{ 'nonResourceAttributes' => NonResourceAttributes,
'resourceAttributes' => ResourceAttributes
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_v1beta1_self_subject_access_review_spec.erl | erlang | -module(kuberl_v1beta1_self_subject_access_review_spec).
-export([encode/1]).
-export_type([kuberl_v1beta1_self_subject_access_review_spec/0]).
-type kuberl_v1beta1_self_subject_access_review_spec() ::
#{ 'nonResourceAttributes' => kuberl_v1beta1_non_resource_attributes:kuberl_v1beta1_non_resource_attributes(),
'resourceAttributes' => kuberl_v1beta1_resource_attributes:kuberl_v1beta1_resource_attributes()
}.
encode(#{ 'nonResourceAttributes' := NonResourceAttributes,
'resourceAttributes' := ResourceAttributes
}) ->
#{ 'nonResourceAttributes' => NonResourceAttributes,
'resourceAttributes' => ResourceAttributes
}.
|
|
458cfd7900c6d33454f0f0f6690fd0a1eb815eabbbd0f11ed435b9badfbe9940 | mirage/ke | fke.mli | include Sigs.F
module Weighted : Sigs.Weighted.F
| null | https://raw.githubusercontent.com/mirage/ke/0b3d570f56c558766e8d53600e59ce65f3218556/lib/fke.mli | ocaml | include Sigs.F
module Weighted : Sigs.Weighted.F
|
|
599778db4dddcce54eed9739f2e2b24788e06c65b04dcc67705fb5d49d00a6f4 | wdhowe/clojure-snippets | project.clj | (defproject config-test "0.1.0-SNAPSHOT"
:description "An example of using edn configuration"
:url ""
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[org.clojure/clojure "1.10.0"]]
:main ^:skip-aot config-test.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
| null | https://raw.githubusercontent.com/wdhowe/clojure-snippets/0c3247ce99a563312b549d03f080b8cf449b541d/file_operations/config_test/project.clj | clojure | (defproject config-test "0.1.0-SNAPSHOT"
:description "An example of using edn configuration"
:url ""
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[org.clojure/clojure "1.10.0"]]
:main ^:skip-aot config-test.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
|
|
f24c45ed2a003f9f39fcc4065d86de8ef462b335eb1fd2568ec24a8fcd6fcdba | vernemq/vernemq | vmq_parser.erl | -module(vmq_parser).
-include("vmq_types.hrl").
-export([parse/1, parse/2, serialise/1]).
-dialyzer({no_match, utf8/1}).
-export([
gen_connect/2,
gen_connack/0,
gen_connack/1,
gen_connack/2,
gen_publish/4,
gen_puback/1,
gen_pubrec/1,
gen_pubrel/1,
gen_pubcomp/1,
gen_subscribe/2,
gen_subscribe/3,
gen_suback/2,
gen_unsubscribe/2,
gen_unsuback/1,
gen_pingreq/0,
gen_pingresp/0,
gen_disconnect/0
]).
%% frame types
-define(CONNECT, 1).
-define(CONNACK, 2).
-define(PUBLISH, 3).
-define(PUBACK, 4).
-define(PUBREC, 5).
-define(PUBREL, 6).
-define(PUBCOMP, 7).
-define(SUBSCRIBE, 8).
-define(SUBACK, 9).
-define(UNSUBSCRIBE, 10).
-define(UNSUBACK, 11).
-define(PINGREQ, 12).
-define(PINGRESP, 13).
-define(DISCONNECT, 14).
-define(RESERVED, 0).
-define(PROTOCOL_MAGIC_31, <<"MQIsdp">>).
-define(PROTOCOL_MAGIC_311, <<"MQTT">>).
-define(MAX_LEN, 16#fffffff).
-define(HIGHBIT, 2#10000000).
-define(LOWBITS, 2#01111111).
-define(MAX_PACKET_SIZE, 268435455).
-spec parse(binary()) ->
{mqtt_frame(), binary()} | {error, atom()} | {{error, atom()}, any()} | more.
parse(Data) ->
parse(Data, ?MAX_PACKET_SIZE).
-spec parse(binary(), non_neg_integer()) -> {mqtt_frame(), binary()} | {error, atom()} | more.
parse(<<Fixed:1/binary, 0:1, DataSize:7, Data/binary>>, MaxSize) ->
parse(DataSize, MaxSize, Fixed, Data);
parse(<<Fixed:1/binary, 1:1, L1:7, 0:1, L2:7, Data/binary>>, MaxSize) ->
parse(L1 + (L2 bsl 7), MaxSize, Fixed, Data);
parse(<<Fixed:1/binary, 1:1, L1:7, 1:1, L2:7, 0:1, L3:7, Data/binary>>, MaxSize) ->
parse(L1 + (L2 bsl 7) + (L3 bsl 14), MaxSize, Fixed, Data);
parse(<<Fixed:1/binary, 1:1, L1:7, 1:1, L2:7, 1:1, L3:7, 0:1, L4:7, Data/binary>>, MaxSize) ->
parse(L1 + (L2 bsl 7) + (L3 bsl 14) + (L4 bsl 21), MaxSize, Fixed, Data);
parse(<<_:8/binary, _/binary>>, _) ->
{error, cant_parse_fixed_header};
parse(_, _) ->
more.
parse(DataSize, 0, Fixed, Data) when byte_size(Data) >= DataSize ->
%% no max size limit
<<Var:DataSize/binary, Rest/binary>> = Data,
{variable(Fixed, Var), Rest};
parse(DataSize, 0, _Fixed, Data) when byte_size(Data) < DataSize ->
more;
parse(DataSize, MaxSize, Fixed, Data) when
byte_size(Data) >= DataSize,
byte_size(Data) =< MaxSize
->
<<Var:DataSize/binary, Rest/binary>> = Data,
{variable(Fixed, Var), Rest};
parse(DataSize, MaxSize, _, _) when
DataSize > MaxSize
->
{error, packet_exceeds_max_size};
parse(_, _, _, _) ->
more.
-spec variable(binary(), binary()) -> mqtt_frame() | {error, atom()}.
variable(
<<?PUBLISH:4, Dup:1, 0:2, Retain:1>>, <<TopicLen:16/big, Topic:TopicLen/binary, Payload/binary>>
) ->
case vmq_topic:validate_topic(publish, Topic) of
{ok, ParsedTopic} ->
#mqtt_publish{
dup = Dup,
retain = Retain,
topic = ParsedTopic,
qos = 0,
payload = Payload
};
{error, Reason} ->
{error, Reason}
end;
variable(
<<?PUBLISH:4, Dup:1, QoS:2, Retain:1>>,
<<TopicLen:16/big, Topic:TopicLen/binary, MessageId:16/big, Payload/binary>>
) when
QoS < 3
->
case vmq_topic:validate_topic(publish, Topic) of
{ok, ParsedTopic} ->
#mqtt_publish{
dup = Dup,
retain = Retain,
topic = ParsedTopic,
qos = QoS,
message_id = MessageId,
payload = Payload
};
{error, Reason} ->
{error, Reason}
end;
variable(<<?PUBACK:4, 0:4>>, <<MessageId:16/big>>) ->
#mqtt_puback{message_id = MessageId};
variable(<<?PUBREC:4, 0:4>>, <<MessageId:16/big>>) ->
#mqtt_pubrec{message_id = MessageId};
variable(<<?PUBREL:4, 0:2, 1:1, 0:1>>, <<MessageId:16/big>>) ->
#mqtt_pubrel{message_id = MessageId};
variable(<<?PUBCOMP:4, 0:4>>, <<MessageId:16/big>>) ->
#mqtt_pubcomp{message_id = MessageId};
variable(<<?SUBSCRIBE:4, 0:2, 1:1, 0:1>>, <<MessageId:16/big, Topics/binary>>) ->
case parse_topics(Topics, ?SUBSCRIBE, []) of
{ok, ParsedTopics} ->
#mqtt_subscribe{
topics = ParsedTopics,
message_id = MessageId
};
E ->
E
end;
variable(<<?UNSUBSCRIBE:4, 0:2, 1:1, 0:1>>, <<MessageId:16/big, Topics/binary>>) ->
case parse_topics(Topics, ?UNSUBSCRIBE, []) of
{ok, ParsedTopics} ->
#mqtt_unsubscribe{
topics = ParsedTopics,
message_id = MessageId
};
E ->
E
end;
variable(<<?SUBACK:4, 0:4>>, <<MessageId:16/big, Acks/binary>>) ->
#mqtt_suback{
qos_table = parse_acks(Acks, []),
message_id = MessageId
};
variable(<<?UNSUBACK:4, 0:4>>, <<MessageId:16/big>>) ->
#mqtt_unsuback{message_id = MessageId};
variable(<<?CONNECT:4, 0:4>>, <<L:16/big, PMagic:L/binary, _/binary>>) when
not ((PMagic == ?PROTOCOL_MAGIC_311) or
(PMagic == ?PROTOCOL_MAGIC_31))
->
{error, unknown_protocol_magic};
variable(
<<?CONNECT:4, 0:4>>,
<<L:16/big, _:L/binary, ProtoVersion:8, UserNameFlag:1, PasswordFlag:1, WillRetain:1, WillQos:2,
WillFlag:1, CleanSession:1,
% reserved
0:1, KeepAlive:16/big, ClientIdLen:16/big, ClientId:ClientIdLen/binary, Rest0/binary>>
) ->
Conn0 = #mqtt_connect{
proto_ver = ProtoVersion,
clean_session = CleanSession,
keep_alive = KeepAlive,
client_id = ClientId
},
case parse_last_will_topic(Rest0, WillFlag, WillRetain, WillQos, Conn0) of
{ok, Rest1, Conn1} ->
case parse_username(Rest1, UserNameFlag, Conn1) of
{ok, Rest2, Conn2} ->
case parse_password(Rest2, UserNameFlag, PasswordFlag, Conn2) of
{ok, <<>>, Conn3} ->
Conn3;
{ok, _, _} ->
{error, invalid_rest_of_binary};
E ->
E
end;
E ->
E
end;
E ->
E
end;
variable(<<?CONNACK:4, 0:4>>, <<0:7, SP:1, ReturnCode:8/big>>) ->
#mqtt_connack{session_present = SP, return_code = ReturnCode};
variable(<<?PINGREQ:4, 0:4>>, <<>>) ->
#mqtt_pingreq{};
variable(<<?PINGRESP:4, 0:4>>, <<>>) ->
#mqtt_pingresp{};
variable(<<?DISCONNECT:4, 0:4>>, <<>>) ->
#mqtt_disconnect{};
variable(_, _) ->
{error, cant_parse_variable_header}.
parse_last_will_topic(Rest, 0, 0, 0, Conn) ->
{ok, Rest, Conn};
parse_last_will_topic(
<<WillTopicLen:16/big, WillTopic:WillTopicLen/binary, WillMsgLen:16/big,
WillMsg:WillMsgLen/binary, Rest/binary>>,
1,
Retain,
QoS,
Conn
) ->
case vmq_topic:validate_topic(publish, WillTopic) of
{ok, ParsedTopic} ->
{ok, Rest, Conn#mqtt_connect{
will_msg = WillMsg,
will_topic = ParsedTopic,
will_retain = Retain,
will_qos = QoS
}};
_ ->
{error, cant_validate_last_will_topic}
end;
parse_last_will_topic(_, _, _, _, _) ->
{error, cant_parse_last_will}.
parse_username(Rest, 0, Conn) ->
{ok, Rest, Conn};
parse_username(<<Len:16/big, UserName:Len/binary, Rest/binary>>, 1, Conn) ->
{ok, Rest, Conn#mqtt_connect{username = UserName}};
parse_username(_, 1, _) ->
{error, cant_parse_username}.
parse_password(Rest, _, 0, Conn) ->
{ok, Rest, Conn};
parse_password(<<Len:16/big, Password:Len/binary, Rest/binary>>, 1, 1, Conn) ->
{ok, Rest, Conn#mqtt_connect{password = Password}};
parse_password(_, 0, 1, _) ->
{error, username_flag_not_set};
parse_password(_, _, 1, _) ->
{error, cant_parse_password}.
parse_topics(<<>>, _, []) ->
{error, no_topic_provided};
parse_topics(<<>>, _, Topics) ->
{ok, Topics};
parse_topics(<<L:16/big, Topic:L/binary, 0:6, QoS:2, Rest/binary>>, ?SUBSCRIBE = Sub, Acc) when
(QoS >= 0) and (QoS < 3)
->
case vmq_topic:validate_topic(subscribe, Topic) of
{ok, ParsedTopic} ->
parse_topics(Rest, Sub, [{ParsedTopic, QoS} | Acc]);
E ->
E
end;
parse_topics(<<L:16/big, Topic:L/binary, Rest/binary>>, ?UNSUBSCRIBE = Sub, Acc) ->
case vmq_topic:validate_topic(subscribe, Topic) of
{ok, ParsedTopic} ->
parse_topics(Rest, Sub, [ParsedTopic | Acc]);
E ->
E
end;
parse_topics(_, _, _) ->
{error, cant_parse_topics}.
parse_acks(<<>>, Acks) ->
Acks;
parse_acks(<<128:8, Rest/binary>>, Acks) ->
parse_acks(Rest, [not_allowed | Acks]);
parse_acks(<<_:6, QoS:2, Rest/binary>>, Acks) when
is_integer(QoS) and ((QoS >= 0) and (QoS =< 2))
->
parse_acks(Rest, [QoS | Acks]).
-spec serialise(mqtt_frame()) -> binary() | iolist().
serialise(#mqtt_publish{
qos = 0,
topic = Topic,
retain = Retain,
dup = Dup,
payload = Payload
}) ->
Var = [utf8(vmq_topic:unword(Topic)), Payload],
LenBytes = serialise_len(iolist_size(Var)),
[<<?PUBLISH:4, (flag(Dup)):1/integer, 0:2/integer, (flag(Retain)):1/integer>>, LenBytes, Var];
serialise(#mqtt_publish{
message_id = MessageId,
topic = Topic,
qos = QoS,
retain = Retain,
dup = Dup,
payload = Payload
}) ->
Var = [utf8(vmq_topic:unword(Topic)), msg_id(MessageId), Payload],
LenBytes = serialise_len(iolist_size(Var)),
[
<<?PUBLISH:4, (flag(Dup)):1/integer, (default(QoS, 0)):2/integer,
(flag(Retain)):1/integer>>,
LenBytes,
Var
];
serialise(#mqtt_puback{message_id = MessageId}) ->
<<?PUBACK:4, 0:4, 2, MessageId:16/big>>;
serialise(#mqtt_pubrel{message_id = MessageId}) ->
<<?PUBREL:4, 0:2, 1:1, 0:1, 2, MessageId:16/big>>;
serialise(#mqtt_pubrec{message_id = MessageId}) ->
<<?PUBREC:4, 0:4, 2, MessageId:16/big>>;
serialise(#mqtt_pubcomp{message_id = MessageId}) ->
<<?PUBCOMP:4, 0:4, 2, MessageId:16/big>>;
serialise(#mqtt_connect{
proto_ver = ProtoVersion,
username = UserName,
password = Password,
will_retain = WillRetain,
will_qos = WillQos,
clean_session = CleanSession,
keep_alive = KeepAlive,
client_id = ClientId,
will_topic = WillTopic,
will_msg = WillMsg
}) ->
{PMagicL, PMagic} = proto(ProtoVersion),
Var = [
<<PMagicL:16/big-unsigned-integer, PMagic/binary, ProtoVersion:8/unsigned-integer,
(flag(UserName)):1/integer, (flag(Password)):1/integer, (flag(WillRetain)):1/integer,
(default(WillQos, 0)):2/integer, (flag(WillTopic)):1/integer,
(flag(CleanSession)):1/integer,
% reserved
0:1, (default(KeepAlive, 0)):16/big-unsigned-integer>>,
utf8(ClientId),
utf8(vmq_topic:unword(WillTopic)),
utf8(WillMsg),
utf8(UserName),
utf8(Password)
],
LenBytes = serialise_len(iolist_size(Var)),
[<<?CONNECT:4, 0:4>>, LenBytes, Var];
serialise(#mqtt_connack{session_present = SP, return_code = RC}) ->
[<<?CONNACK:4, 0:4>>, serialise_len(2), <<0:7, (flag(SP)):1/integer>>, <<RC:8/big>>];
serialise(#mqtt_subscribe{message_id = MessageId, topics = Topics}) ->
SerialisedTopics = serialise_topics(?SUBSCRIBE, Topics, []),
LenBytes = serialise_len(iolist_size(SerialisedTopics) + 2),
[<<?SUBSCRIBE:4, 0:2, 1:1, 0:1>>, LenBytes, <<MessageId:16/big>>, SerialisedTopics];
serialise(#mqtt_suback{message_id = MessageId, qos_table = QosTable}) ->
SerialisedAcks = serialise_acks(QosTable, []),
LenBytes = serialise_len(iolist_size(SerialisedAcks) + 2),
[<<?SUBACK:4, 0:4>>, LenBytes, <<MessageId:16/big>>, SerialisedAcks];
serialise(#mqtt_unsubscribe{message_id = MessageId, topics = Topics}) ->
SerialisedTopics = serialise_topics(?UNSUBSCRIBE, Topics, []),
LenBytes = serialise_len(iolist_size(SerialisedTopics) + 2),
[
<<?UNSUBSCRIBE:4, 0:2, 1:1, 0:1>>,
LenBytes,
<<MessageId:16/big>>,
SerialisedTopics
];
serialise(#mqtt_unsuback{message_id = MessageId}) ->
<<?UNSUBACK:4, 0:4, 2, MessageId:16/big>>;
serialise(#mqtt_pingreq{}) ->
<<?PINGREQ:4, 0:4, 0>>;
serialise(#mqtt_pingresp{}) ->
<<?PINGRESP:4, 0:4, 0>>;
serialise(#mqtt_disconnect{}) ->
<<?DISCONNECT:4, 0:4, 0>>.
serialise_len(N) when N =< ?LOWBITS ->
<<0:1, N:7>>;
serialise_len(N) ->
<<1:1, (N rem ?HIGHBIT):7, (serialise_len(N div ?HIGHBIT))/binary>>.
serialise_topics(?SUBSCRIBE = Sub, [{Topic, QoS} | Rest], Acc) ->
serialise_topics(Sub, Rest, [utf8(vmq_topic:unword(Topic)), <<0:6, QoS:2>> | Acc]);
serialise_topics(?UNSUBSCRIBE = Sub, [Topic | Rest], Acc) ->
serialise_topics(Sub, Rest, [utf8(vmq_topic:unword(Topic)) | Acc]);
serialise_topics(_, [], Topics) ->
Topics.
serialise_acks([QoS | Rest], Acks) when is_integer(QoS) and ((QoS >= 0) and (QoS =< 2)) ->
serialise_acks(Rest, [<<0:6, QoS:2>> | Acks]);
serialise_acks([_ | Rest], Acks) ->
use 0x80 failure code for everything else
serialise_acks(Rest, [<<128:8>> | Acks]);
serialise_acks([], Acks) ->
Acks.
proto(4) -> {4, ?PROTOCOL_MAGIC_311};
proto(3) -> {6, ?PROTOCOL_MAGIC_31};
proto(131) -> {6, ?PROTOCOL_MAGIC_31};
proto(132) -> {4, ?PROTOCOL_MAGIC_311}.
flag(<<>>) -> 0;
flag(undefined) -> 0;
flag(0) -> 0;
flag(1) -> 1;
flag(false) -> 0;
flag(true) -> 1;
flag(V) when is_binary(V) orelse is_list(V) -> 1;
%% for test purposes
flag(empty) -> 1;
flag(_) -> 0.
msg_id(undefined) -> <<>>;
msg_id(MsgId) -> <<MsgId:16/big>>.
default(undefined, Default) -> Default;
default(Val, _) -> Val.
utf8(<<>>) ->
<<>>;
utf8(undefined) ->
<<>>;
%% for test purposes, useful if you want to encode an empty string..
utf8(empty) ->
<<0:16/big>>;
utf8(IoList) when is_list(IoList) ->
[<<(iolist_size(IoList)):16/big>>, IoList];
utf8(Bin) when is_binary(Bin) ->
<<(byte_size(Bin)):16/big, Bin/binary>>.
ensure_binary(L) when is_list(L) -> list_to_binary(L);
ensure_binary(B) when is_binary(B) -> B;
ensure_binary(undefined) -> undefined;
% for test purposes
ensure_binary(empty) -> empty.
%%%%%%% packet generator functions (useful for testing)
gen_connect(ClientId, Opts) ->
Frame = #mqtt_connect{
client_id = ensure_binary(ClientId),
clean_session = proplists:get_value(clean_session, Opts, true),
keep_alive = proplists:get_value(keepalive, Opts, 60),
username = ensure_binary(proplists:get_value(username, Opts)),
password = ensure_binary(proplists:get_value(password, Opts)),
proto_ver = proplists:get_value(proto_ver, Opts, 3),
will_topic = ensure_binary(proplists:get_value(will_topic, Opts)),
will_qos = proplists:get_value(will_qos, Opts, 0),
will_retain = proplists:get_value(will_retain, Opts, false),
will_msg = ensure_binary(proplists:get_value(will_msg, Opts))
},
iolist_to_binary(serialise(Frame)).
gen_connack() ->
gen_connack(?CONNACK_ACCEPT).
gen_connack(RC) ->
gen_connack(0, RC).
gen_connack(SP, RC) ->
iolist_to_binary(serialise(#mqtt_connack{session_present = flag(SP), return_code = RC})).
gen_publish(Topic, Qos, Payload, Opts) ->
Frame = #mqtt_publish{
dup = proplists:get_value(dup, Opts, false),
qos = Qos,
retain = proplists:get_value(retain, Opts, false),
topic = ensure_binary(Topic),
message_id = proplists:get_value(mid, Opts, 0),
payload = ensure_binary(Payload)
},
iolist_to_binary(serialise(Frame)).
gen_puback(MId) ->
iolist_to_binary(serialise(#mqtt_puback{message_id = MId})).
gen_pubrec(MId) ->
iolist_to_binary(serialise(#mqtt_pubrec{message_id = MId})).
gen_pubrel(MId) ->
iolist_to_binary(serialise(#mqtt_pubrel{message_id = MId})).
gen_pubcomp(MId) ->
iolist_to_binary(serialise(#mqtt_pubcomp{message_id = MId})).
gen_subscribe(MId, [{_, _} | _] = Topics) ->
BinTopics = [{ensure_binary(Topic), QoS} || {Topic, QoS} <- Topics],
iolist_to_binary(serialise(#mqtt_subscribe{topics = BinTopics, message_id = MId})).
gen_subscribe(MId, Topic, QoS) ->
gen_subscribe(MId, [{Topic, QoS}]).
gen_suback(MId, QoSs) when is_list(QoSs) ->
iolist_to_binary(serialise(#mqtt_suback{qos_table = QoSs, message_id = MId}));
gen_suback(MId, QoS) ->
gen_suback(MId, [QoS]).
gen_unsubscribe(MId, Topic) ->
iolist_to_binary(
serialise(#mqtt_unsubscribe{topics = [ensure_binary(Topic)], message_id = MId})
).
gen_unsuback(MId) ->
iolist_to_binary(serialise(#mqtt_unsuback{message_id = MId})).
gen_pingreq() ->
iolist_to_binary(serialise(#mqtt_pingreq{})).
gen_pingresp() ->
iolist_to_binary(serialise(#mqtt_pingresp{})).
gen_disconnect() ->
iolist_to_binary(serialise(#mqtt_disconnect{})).
| null | https://raw.githubusercontent.com/vernemq/vernemq/234d253250cb5371b97ebb588622076fdabc6a5f/apps/vmq_commons/src/vmq_parser.erl | erlang | frame types
no max size limit
reserved
reserved
for test purposes
for test purposes, useful if you want to encode an empty string..
for test purposes
packet generator functions (useful for testing) | -module(vmq_parser).
-include("vmq_types.hrl").
-export([parse/1, parse/2, serialise/1]).
-dialyzer({no_match, utf8/1}).
-export([
gen_connect/2,
gen_connack/0,
gen_connack/1,
gen_connack/2,
gen_publish/4,
gen_puback/1,
gen_pubrec/1,
gen_pubrel/1,
gen_pubcomp/1,
gen_subscribe/2,
gen_subscribe/3,
gen_suback/2,
gen_unsubscribe/2,
gen_unsuback/1,
gen_pingreq/0,
gen_pingresp/0,
gen_disconnect/0
]).
-define(CONNECT, 1).
-define(CONNACK, 2).
-define(PUBLISH, 3).
-define(PUBACK, 4).
-define(PUBREC, 5).
-define(PUBREL, 6).
-define(PUBCOMP, 7).
-define(SUBSCRIBE, 8).
-define(SUBACK, 9).
-define(UNSUBSCRIBE, 10).
-define(UNSUBACK, 11).
-define(PINGREQ, 12).
-define(PINGRESP, 13).
-define(DISCONNECT, 14).
-define(RESERVED, 0).
-define(PROTOCOL_MAGIC_31, <<"MQIsdp">>).
-define(PROTOCOL_MAGIC_311, <<"MQTT">>).
-define(MAX_LEN, 16#fffffff).
-define(HIGHBIT, 2#10000000).
-define(LOWBITS, 2#01111111).
-define(MAX_PACKET_SIZE, 268435455).
-spec parse(binary()) ->
{mqtt_frame(), binary()} | {error, atom()} | {{error, atom()}, any()} | more.
parse(Data) ->
parse(Data, ?MAX_PACKET_SIZE).
-spec parse(binary(), non_neg_integer()) -> {mqtt_frame(), binary()} | {error, atom()} | more.
parse(<<Fixed:1/binary, 0:1, DataSize:7, Data/binary>>, MaxSize) ->
parse(DataSize, MaxSize, Fixed, Data);
parse(<<Fixed:1/binary, 1:1, L1:7, 0:1, L2:7, Data/binary>>, MaxSize) ->
parse(L1 + (L2 bsl 7), MaxSize, Fixed, Data);
parse(<<Fixed:1/binary, 1:1, L1:7, 1:1, L2:7, 0:1, L3:7, Data/binary>>, MaxSize) ->
parse(L1 + (L2 bsl 7) + (L3 bsl 14), MaxSize, Fixed, Data);
parse(<<Fixed:1/binary, 1:1, L1:7, 1:1, L2:7, 1:1, L3:7, 0:1, L4:7, Data/binary>>, MaxSize) ->
parse(L1 + (L2 bsl 7) + (L3 bsl 14) + (L4 bsl 21), MaxSize, Fixed, Data);
parse(<<_:8/binary, _/binary>>, _) ->
{error, cant_parse_fixed_header};
parse(_, _) ->
more.
parse(DataSize, 0, Fixed, Data) when byte_size(Data) >= DataSize ->
<<Var:DataSize/binary, Rest/binary>> = Data,
{variable(Fixed, Var), Rest};
parse(DataSize, 0, _Fixed, Data) when byte_size(Data) < DataSize ->
more;
parse(DataSize, MaxSize, Fixed, Data) when
byte_size(Data) >= DataSize,
byte_size(Data) =< MaxSize
->
<<Var:DataSize/binary, Rest/binary>> = Data,
{variable(Fixed, Var), Rest};
parse(DataSize, MaxSize, _, _) when
DataSize > MaxSize
->
{error, packet_exceeds_max_size};
parse(_, _, _, _) ->
more.
-spec variable(binary(), binary()) -> mqtt_frame() | {error, atom()}.
variable(
<<?PUBLISH:4, Dup:1, 0:2, Retain:1>>, <<TopicLen:16/big, Topic:TopicLen/binary, Payload/binary>>
) ->
case vmq_topic:validate_topic(publish, Topic) of
{ok, ParsedTopic} ->
#mqtt_publish{
dup = Dup,
retain = Retain,
topic = ParsedTopic,
qos = 0,
payload = Payload
};
{error, Reason} ->
{error, Reason}
end;
variable(
<<?PUBLISH:4, Dup:1, QoS:2, Retain:1>>,
<<TopicLen:16/big, Topic:TopicLen/binary, MessageId:16/big, Payload/binary>>
) when
QoS < 3
->
case vmq_topic:validate_topic(publish, Topic) of
{ok, ParsedTopic} ->
#mqtt_publish{
dup = Dup,
retain = Retain,
topic = ParsedTopic,
qos = QoS,
message_id = MessageId,
payload = Payload
};
{error, Reason} ->
{error, Reason}
end;
variable(<<?PUBACK:4, 0:4>>, <<MessageId:16/big>>) ->
#mqtt_puback{message_id = MessageId};
variable(<<?PUBREC:4, 0:4>>, <<MessageId:16/big>>) ->
#mqtt_pubrec{message_id = MessageId};
variable(<<?PUBREL:4, 0:2, 1:1, 0:1>>, <<MessageId:16/big>>) ->
#mqtt_pubrel{message_id = MessageId};
variable(<<?PUBCOMP:4, 0:4>>, <<MessageId:16/big>>) ->
#mqtt_pubcomp{message_id = MessageId};
variable(<<?SUBSCRIBE:4, 0:2, 1:1, 0:1>>, <<MessageId:16/big, Topics/binary>>) ->
case parse_topics(Topics, ?SUBSCRIBE, []) of
{ok, ParsedTopics} ->
#mqtt_subscribe{
topics = ParsedTopics,
message_id = MessageId
};
E ->
E
end;
variable(<<?UNSUBSCRIBE:4, 0:2, 1:1, 0:1>>, <<MessageId:16/big, Topics/binary>>) ->
case parse_topics(Topics, ?UNSUBSCRIBE, []) of
{ok, ParsedTopics} ->
#mqtt_unsubscribe{
topics = ParsedTopics,
message_id = MessageId
};
E ->
E
end;
variable(<<?SUBACK:4, 0:4>>, <<MessageId:16/big, Acks/binary>>) ->
#mqtt_suback{
qos_table = parse_acks(Acks, []),
message_id = MessageId
};
variable(<<?UNSUBACK:4, 0:4>>, <<MessageId:16/big>>) ->
#mqtt_unsuback{message_id = MessageId};
variable(<<?CONNECT:4, 0:4>>, <<L:16/big, PMagic:L/binary, _/binary>>) when
not ((PMagic == ?PROTOCOL_MAGIC_311) or
(PMagic == ?PROTOCOL_MAGIC_31))
->
{error, unknown_protocol_magic};
variable(
<<?CONNECT:4, 0:4>>,
<<L:16/big, _:L/binary, ProtoVersion:8, UserNameFlag:1, PasswordFlag:1, WillRetain:1, WillQos:2,
WillFlag:1, CleanSession:1,
0:1, KeepAlive:16/big, ClientIdLen:16/big, ClientId:ClientIdLen/binary, Rest0/binary>>
) ->
Conn0 = #mqtt_connect{
proto_ver = ProtoVersion,
clean_session = CleanSession,
keep_alive = KeepAlive,
client_id = ClientId
},
case parse_last_will_topic(Rest0, WillFlag, WillRetain, WillQos, Conn0) of
{ok, Rest1, Conn1} ->
case parse_username(Rest1, UserNameFlag, Conn1) of
{ok, Rest2, Conn2} ->
case parse_password(Rest2, UserNameFlag, PasswordFlag, Conn2) of
{ok, <<>>, Conn3} ->
Conn3;
{ok, _, _} ->
{error, invalid_rest_of_binary};
E ->
E
end;
E ->
E
end;
E ->
E
end;
variable(<<?CONNACK:4, 0:4>>, <<0:7, SP:1, ReturnCode:8/big>>) ->
#mqtt_connack{session_present = SP, return_code = ReturnCode};
variable(<<?PINGREQ:4, 0:4>>, <<>>) ->
#mqtt_pingreq{};
variable(<<?PINGRESP:4, 0:4>>, <<>>) ->
#mqtt_pingresp{};
variable(<<?DISCONNECT:4, 0:4>>, <<>>) ->
#mqtt_disconnect{};
variable(_, _) ->
{error, cant_parse_variable_header}.
parse_last_will_topic(Rest, 0, 0, 0, Conn) ->
{ok, Rest, Conn};
parse_last_will_topic(
<<WillTopicLen:16/big, WillTopic:WillTopicLen/binary, WillMsgLen:16/big,
WillMsg:WillMsgLen/binary, Rest/binary>>,
1,
Retain,
QoS,
Conn
) ->
case vmq_topic:validate_topic(publish, WillTopic) of
{ok, ParsedTopic} ->
{ok, Rest, Conn#mqtt_connect{
will_msg = WillMsg,
will_topic = ParsedTopic,
will_retain = Retain,
will_qos = QoS
}};
_ ->
{error, cant_validate_last_will_topic}
end;
parse_last_will_topic(_, _, _, _, _) ->
{error, cant_parse_last_will}.
parse_username(Rest, 0, Conn) ->
{ok, Rest, Conn};
parse_username(<<Len:16/big, UserName:Len/binary, Rest/binary>>, 1, Conn) ->
{ok, Rest, Conn#mqtt_connect{username = UserName}};
parse_username(_, 1, _) ->
{error, cant_parse_username}.
parse_password(Rest, _, 0, Conn) ->
{ok, Rest, Conn};
parse_password(<<Len:16/big, Password:Len/binary, Rest/binary>>, 1, 1, Conn) ->
{ok, Rest, Conn#mqtt_connect{password = Password}};
parse_password(_, 0, 1, _) ->
{error, username_flag_not_set};
parse_password(_, _, 1, _) ->
{error, cant_parse_password}.
parse_topics(<<>>, _, []) ->
{error, no_topic_provided};
parse_topics(<<>>, _, Topics) ->
{ok, Topics};
parse_topics(<<L:16/big, Topic:L/binary, 0:6, QoS:2, Rest/binary>>, ?SUBSCRIBE = Sub, Acc) when
(QoS >= 0) and (QoS < 3)
->
case vmq_topic:validate_topic(subscribe, Topic) of
{ok, ParsedTopic} ->
parse_topics(Rest, Sub, [{ParsedTopic, QoS} | Acc]);
E ->
E
end;
parse_topics(<<L:16/big, Topic:L/binary, Rest/binary>>, ?UNSUBSCRIBE = Sub, Acc) ->
case vmq_topic:validate_topic(subscribe, Topic) of
{ok, ParsedTopic} ->
parse_topics(Rest, Sub, [ParsedTopic | Acc]);
E ->
E
end;
parse_topics(_, _, _) ->
{error, cant_parse_topics}.
parse_acks(<<>>, Acks) ->
Acks;
parse_acks(<<128:8, Rest/binary>>, Acks) ->
parse_acks(Rest, [not_allowed | Acks]);
parse_acks(<<_:6, QoS:2, Rest/binary>>, Acks) when
is_integer(QoS) and ((QoS >= 0) and (QoS =< 2))
->
parse_acks(Rest, [QoS | Acks]).
-spec serialise(mqtt_frame()) -> binary() | iolist().
serialise(#mqtt_publish{
qos = 0,
topic = Topic,
retain = Retain,
dup = Dup,
payload = Payload
}) ->
Var = [utf8(vmq_topic:unword(Topic)), Payload],
LenBytes = serialise_len(iolist_size(Var)),
[<<?PUBLISH:4, (flag(Dup)):1/integer, 0:2/integer, (flag(Retain)):1/integer>>, LenBytes, Var];
serialise(#mqtt_publish{
message_id = MessageId,
topic = Topic,
qos = QoS,
retain = Retain,
dup = Dup,
payload = Payload
}) ->
Var = [utf8(vmq_topic:unword(Topic)), msg_id(MessageId), Payload],
LenBytes = serialise_len(iolist_size(Var)),
[
<<?PUBLISH:4, (flag(Dup)):1/integer, (default(QoS, 0)):2/integer,
(flag(Retain)):1/integer>>,
LenBytes,
Var
];
serialise(#mqtt_puback{message_id = MessageId}) ->
<<?PUBACK:4, 0:4, 2, MessageId:16/big>>;
serialise(#mqtt_pubrel{message_id = MessageId}) ->
<<?PUBREL:4, 0:2, 1:1, 0:1, 2, MessageId:16/big>>;
serialise(#mqtt_pubrec{message_id = MessageId}) ->
<<?PUBREC:4, 0:4, 2, MessageId:16/big>>;
serialise(#mqtt_pubcomp{message_id = MessageId}) ->
<<?PUBCOMP:4, 0:4, 2, MessageId:16/big>>;
serialise(#mqtt_connect{
proto_ver = ProtoVersion,
username = UserName,
password = Password,
will_retain = WillRetain,
will_qos = WillQos,
clean_session = CleanSession,
keep_alive = KeepAlive,
client_id = ClientId,
will_topic = WillTopic,
will_msg = WillMsg
}) ->
{PMagicL, PMagic} = proto(ProtoVersion),
Var = [
<<PMagicL:16/big-unsigned-integer, PMagic/binary, ProtoVersion:8/unsigned-integer,
(flag(UserName)):1/integer, (flag(Password)):1/integer, (flag(WillRetain)):1/integer,
(default(WillQos, 0)):2/integer, (flag(WillTopic)):1/integer,
(flag(CleanSession)):1/integer,
0:1, (default(KeepAlive, 0)):16/big-unsigned-integer>>,
utf8(ClientId),
utf8(vmq_topic:unword(WillTopic)),
utf8(WillMsg),
utf8(UserName),
utf8(Password)
],
LenBytes = serialise_len(iolist_size(Var)),
[<<?CONNECT:4, 0:4>>, LenBytes, Var];
serialise(#mqtt_connack{session_present = SP, return_code = RC}) ->
[<<?CONNACK:4, 0:4>>, serialise_len(2), <<0:7, (flag(SP)):1/integer>>, <<RC:8/big>>];
serialise(#mqtt_subscribe{message_id = MessageId, topics = Topics}) ->
SerialisedTopics = serialise_topics(?SUBSCRIBE, Topics, []),
LenBytes = serialise_len(iolist_size(SerialisedTopics) + 2),
[<<?SUBSCRIBE:4, 0:2, 1:1, 0:1>>, LenBytes, <<MessageId:16/big>>, SerialisedTopics];
serialise(#mqtt_suback{message_id = MessageId, qos_table = QosTable}) ->
SerialisedAcks = serialise_acks(QosTable, []),
LenBytes = serialise_len(iolist_size(SerialisedAcks) + 2),
[<<?SUBACK:4, 0:4>>, LenBytes, <<MessageId:16/big>>, SerialisedAcks];
serialise(#mqtt_unsubscribe{message_id = MessageId, topics = Topics}) ->
SerialisedTopics = serialise_topics(?UNSUBSCRIBE, Topics, []),
LenBytes = serialise_len(iolist_size(SerialisedTopics) + 2),
[
<<?UNSUBSCRIBE:4, 0:2, 1:1, 0:1>>,
LenBytes,
<<MessageId:16/big>>,
SerialisedTopics
];
serialise(#mqtt_unsuback{message_id = MessageId}) ->
<<?UNSUBACK:4, 0:4, 2, MessageId:16/big>>;
serialise(#mqtt_pingreq{}) ->
<<?PINGREQ:4, 0:4, 0>>;
serialise(#mqtt_pingresp{}) ->
<<?PINGRESP:4, 0:4, 0>>;
serialise(#mqtt_disconnect{}) ->
<<?DISCONNECT:4, 0:4, 0>>.
serialise_len(N) when N =< ?LOWBITS ->
<<0:1, N:7>>;
serialise_len(N) ->
<<1:1, (N rem ?HIGHBIT):7, (serialise_len(N div ?HIGHBIT))/binary>>.
serialise_topics(?SUBSCRIBE = Sub, [{Topic, QoS} | Rest], Acc) ->
serialise_topics(Sub, Rest, [utf8(vmq_topic:unword(Topic)), <<0:6, QoS:2>> | Acc]);
serialise_topics(?UNSUBSCRIBE = Sub, [Topic | Rest], Acc) ->
serialise_topics(Sub, Rest, [utf8(vmq_topic:unword(Topic)) | Acc]);
serialise_topics(_, [], Topics) ->
Topics.
serialise_acks([QoS | Rest], Acks) when is_integer(QoS) and ((QoS >= 0) and (QoS =< 2)) ->
serialise_acks(Rest, [<<0:6, QoS:2>> | Acks]);
serialise_acks([_ | Rest], Acks) ->
use 0x80 failure code for everything else
serialise_acks(Rest, [<<128:8>> | Acks]);
serialise_acks([], Acks) ->
Acks.
proto(4) -> {4, ?PROTOCOL_MAGIC_311};
proto(3) -> {6, ?PROTOCOL_MAGIC_31};
proto(131) -> {6, ?PROTOCOL_MAGIC_31};
proto(132) -> {4, ?PROTOCOL_MAGIC_311}.
flag(<<>>) -> 0;
flag(undefined) -> 0;
flag(0) -> 0;
flag(1) -> 1;
flag(false) -> 0;
flag(true) -> 1;
flag(V) when is_binary(V) orelse is_list(V) -> 1;
flag(empty) -> 1;
flag(_) -> 0.
msg_id(undefined) -> <<>>;
msg_id(MsgId) -> <<MsgId:16/big>>.
default(undefined, Default) -> Default;
default(Val, _) -> Val.
utf8(<<>>) ->
<<>>;
utf8(undefined) ->
<<>>;
utf8(empty) ->
<<0:16/big>>;
utf8(IoList) when is_list(IoList) ->
[<<(iolist_size(IoList)):16/big>>, IoList];
utf8(Bin) when is_binary(Bin) ->
<<(byte_size(Bin)):16/big, Bin/binary>>.
ensure_binary(L) when is_list(L) -> list_to_binary(L);
ensure_binary(B) when is_binary(B) -> B;
ensure_binary(undefined) -> undefined;
ensure_binary(empty) -> empty.
gen_connect(ClientId, Opts) ->
Frame = #mqtt_connect{
client_id = ensure_binary(ClientId),
clean_session = proplists:get_value(clean_session, Opts, true),
keep_alive = proplists:get_value(keepalive, Opts, 60),
username = ensure_binary(proplists:get_value(username, Opts)),
password = ensure_binary(proplists:get_value(password, Opts)),
proto_ver = proplists:get_value(proto_ver, Opts, 3),
will_topic = ensure_binary(proplists:get_value(will_topic, Opts)),
will_qos = proplists:get_value(will_qos, Opts, 0),
will_retain = proplists:get_value(will_retain, Opts, false),
will_msg = ensure_binary(proplists:get_value(will_msg, Opts))
},
iolist_to_binary(serialise(Frame)).
gen_connack() ->
gen_connack(?CONNACK_ACCEPT).
gen_connack(RC) ->
gen_connack(0, RC).
gen_connack(SP, RC) ->
iolist_to_binary(serialise(#mqtt_connack{session_present = flag(SP), return_code = RC})).
gen_publish(Topic, Qos, Payload, Opts) ->
Frame = #mqtt_publish{
dup = proplists:get_value(dup, Opts, false),
qos = Qos,
retain = proplists:get_value(retain, Opts, false),
topic = ensure_binary(Topic),
message_id = proplists:get_value(mid, Opts, 0),
payload = ensure_binary(Payload)
},
iolist_to_binary(serialise(Frame)).
gen_puback(MId) ->
iolist_to_binary(serialise(#mqtt_puback{message_id = MId})).
gen_pubrec(MId) ->
iolist_to_binary(serialise(#mqtt_pubrec{message_id = MId})).
gen_pubrel(MId) ->
iolist_to_binary(serialise(#mqtt_pubrel{message_id = MId})).
gen_pubcomp(MId) ->
iolist_to_binary(serialise(#mqtt_pubcomp{message_id = MId})).
gen_subscribe(MId, [{_, _} | _] = Topics) ->
BinTopics = [{ensure_binary(Topic), QoS} || {Topic, QoS} <- Topics],
iolist_to_binary(serialise(#mqtt_subscribe{topics = BinTopics, message_id = MId})).
gen_subscribe(MId, Topic, QoS) ->
gen_subscribe(MId, [{Topic, QoS}]).
gen_suback(MId, QoSs) when is_list(QoSs) ->
iolist_to_binary(serialise(#mqtt_suback{qos_table = QoSs, message_id = MId}));
gen_suback(MId, QoS) ->
gen_suback(MId, [QoS]).
gen_unsubscribe(MId, Topic) ->
iolist_to_binary(
serialise(#mqtt_unsubscribe{topics = [ensure_binary(Topic)], message_id = MId})
).
gen_unsuback(MId) ->
iolist_to_binary(serialise(#mqtt_unsuback{message_id = MId})).
gen_pingreq() ->
iolist_to_binary(serialise(#mqtt_pingreq{})).
gen_pingresp() ->
iolist_to_binary(serialise(#mqtt_pingresp{})).
gen_disconnect() ->
iolist_to_binary(serialise(#mqtt_disconnect{})).
|
7825bcd100ee21c56554ce926e0d070063f903c762c0c9dd292b6a20857e4ce0 | reflectionalist/S9fES | id.scm | Scheme 9 from Empty Space , Function Library
By , 2010
; Placed in the Public Domain
;
; (false object ...) ==> #f
; (id object) ==> object
; (true object ...) ==> #t
;
( load - from - library " id.scm " )
;
; ID returns the object passed to it.
;
; FALSE returns always #F and TRUE returns always #T, no matter which
; values are passed to them.
;
; Example: (true) ==> #t
; (false 1 2 3) ==> #f
; (id 'whatever) ==> whatever
(define (id x) x)
(define (true . x) #t)
(define (false . x) #f)
| null | https://raw.githubusercontent.com/reflectionalist/S9fES/0ade11593cf35f112e197026886fc819042058dd/lib/id.scm | scheme | Placed in the Public Domain
(false object ...) ==> #f
(id object) ==> object
(true object ...) ==> #t
ID returns the object passed to it.
FALSE returns always #F and TRUE returns always #T, no matter which
values are passed to them.
Example: (true) ==> #t
(false 1 2 3) ==> #f
(id 'whatever) ==> whatever | Scheme 9 from Empty Space , Function Library
By , 2010
( load - from - library " id.scm " )
(define (id x) x)
(define (true . x) #t)
(define (false . x) #f)
|
ee12e79dc70a368d5990bf0c0dc17d2affcf70e1c1368432b44e66a733d225d0 | deadtrickster/prometheus-cowboy | prometheus_cowboy2_handler_SUITE.erl | -module(prometheus_cowboy2_handler_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
%% ===================================================================
%% ===================================================================
-define(PROMETHEUS_ACCEPT, "application/vnd.google.protobuf;"
"proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,"
"text/plain;version=0.0.4;q=0.3,"
"application/json;schema=\"prometheus/telemetry\";version=0.0.2;q=0.2,"
"*/*;q=0.1").
-define(TELEMETRY_METRICS_METADATA,
[
"# TYPE telemetry_scrape_duration_seconds summary",
"# HELP telemetry_scrape_duration_seconds Scrape duration",
"# TYPE telemetry_scrape_size_bytes summary",
"# HELP telemetry_scrape_size_bytes Scrape size, not encoded",
"# TYPE telemetry_scrape_encoded_size_bytes summary",
"# HELP telemetry_scrape_encoded_size_bytes Scrape size, encoded"
]).
-define(METRICS_URL,
":" ++ integer_to_list(?config(port, Config)) ++ "/metrics").
-define(METRICS_URL(Registry),
?METRICS_URL ++ "/" ++ Registry).
-define(AUTH_TESTS,
{ok, DeniedR1} =
httpc:request(get, {?METRICS_URL,
[]}, [], []),
?assertMatch(403, status(DeniedR1)),
{ok, DeniedR2} =
httpc:request(get, {?METRICS_URL,
[{"Authorization", "Basic cXdlOnF3ZQ=="}]},
[], []),
?assertMatch(403, status(DeniedR2)),
{ok, DeniedR3} =
httpc:request(get, {?METRICS_URL,
[{"Authorization", "Basic abba"}]},
[], []),
?assertMatch(403, status(DeniedR3)),
{ok, DeniedR4} =
httpc:request(get, {?METRICS_URL,
[{"Authorization", "Bearer abba"}]},
[], []),
?assertMatch(403, status(DeniedR4)),
{ok, BasicLPR} =
httpc:request(get, {?METRICS_URL,
[{"Authorization", "Basic cXdlOnF3YQ=="}]},
[], []),
?assertMatch(200, status(BasicLPR))).
%% @doc All tests of this suite.
all() ->
[
{group, positive}
].
%% @doc Groups of tests
groups() ->
[
{positive, [sequential], [
prometheus_cowboy2_negotiation,
prometheus_cowboy2_negotiation_fail,
prometheus_cowboy2,
prometheus_cowboy2_registry,
prometheus_cowboy2_registry_conflict,
prometheus_cowboy2_auth_basic1,
prometheus_cowboy2_auth_basic2,
prometheus_cowboy2_auth_basic3,
prometheus_cowboy2_auth_provider1,
prometheus_cowboy2_auth_provider2,
prometheus_cowboy2_auth_invalid
]}
].
%% @doc Start the application.
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(cowboy),
{ok, _} = application:ensure_all_started(prometheus),
{Port, Listener} = prometheus_cowboy2_app:start(),
%% debugger:start(),
%% timer:sleep(80000),
[{port, Port}, {listener, Listener} | Config].
%% @doc Stop the application.
end_per_suite(Config) ->
ok = application:stop(cowboy),
ok = application:stop(prometheus),
Config.
end_per_testcase(_, Config) ->
application:set_env(prometheus, prometheus_http, []),
Config.
%% ===================================================================
%% TESTS
%% ===================================================================
prometheus_cowboy2_negotiation(Config) ->
{ok, TextResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept-Encoding", "deflate"}]}, [], []),
?assertMatch(200, status(TextResponse)),
TextCT = prometheus_text_format:content_type(),
ExpectedTextCT = binary_to_list(TextCT),
?assertMatch([{"content-encoding", "deflate"},
{"content-length", ExpectedTextCL},
{"content-type", ExpectedTextCT}|_]
when ExpectedTextCL > 0, headers(TextResponse)),
?assert(iolist_size(body(TextResponse)) > 0),
{ok, ProtobufResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept", ?PROMETHEUS_ACCEPT},
{"Accept-Encoding", "gzip, sdch"}]}, [], []),
?assertMatch(200, status(ProtobufResponse)),
ProtobufCT = prometheus_protobuf_format:content_type(),
ExpectedProtobufCT = binary_to_list(ProtobufCT),
?assertMatch([{"content-encoding", "gzip"},
{"content-length", ExpectedProtobufCL},
{"content-type", ExpectedProtobufCT}|_]
when ExpectedProtobufCL > 0, headers(ProtobufResponse)),
?assert(iolist_size(zlib:gunzip(body(ProtobufResponse))) > 0),
application:set_env(prometheus, prometheus_http,
[{format, prometheus_protobuf_format}]),
{ok, ProtobufResponse1} =
httpc:request(get, {?METRICS_URL, []}, [], []),
?assertMatch(200, status(ProtobufResponse1)),
ProtobufCT = prometheus_protobuf_format:content_type(),
ExpectedProtobufCT = binary_to_list(ProtobufCT),
?assertMatch([{"content-encoding", "identity"},
{"content-length", ExpectedProtobufCL1},
{"content-type", ExpectedProtobufCT}|_]
when ExpectedProtobufCL1 > 0, headers(ProtobufResponse1)),
?assert(iolist_size(body(ProtobufResponse)) > 0).
prometheus_cowboy2_negotiation_fail(Config) ->
{ok, IdentityResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept-Encoding", "qwe"}]}, [], []),
?assertMatch(200, status(IdentityResponse)),
IdentityCT = prometheus_text_format:content_type(),
ExpectedIdentityCT = binary_to_list(IdentityCT),
?assertMatch([{"content-encoding", "identity"},
{"content-length", ExpectedIdentityCL},
{"content-type", ExpectedIdentityCT}|_]
when ExpectedIdentityCL > 0, headers(IdentityResponse)),
{ok, FEResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept-Encoding", "qwe, *;q=0"}]}, [], []),
?assertMatch(406, status(FEResponse)),
?assertMatch([{"content-length", "0"}|_], headers(FEResponse)),
{ok, CTResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept", "image/png"}]}, [], []),
?assertMatch(406, status(CTResponse)),
?assertMatch([{"content-length", "0"}|_],
headers(CTResponse)).
prometheus_cowboy2(Config) ->
{ok, MetricsResponse} = httpc:request(?METRICS_URL),
?assertMatch(200, status(MetricsResponse)),
MetricsCT = prometheus_text_format:content_type(),
ExpecteMetricsCT = binary_to_list(MetricsCT),
?assertMatch([{"content-encoding", "identity"},
{"content-length", ExpectedMetricsCL},
{"content-type", ExpecteMetricsCT}|_]
when ExpectedMetricsCL > 0, headers(MetricsResponse)),
MetricsBody = body(MetricsResponse),
?assertMatch(true, all_telemetry_metrics_present(MetricsBody)),
{ok, CTResponse} =
httpc:request(get, {?METRICS_URL("qwe"),
[]}, [], []),
?assertMatch(404, status(CTResponse)),
?assertMatch([{"content-length", CL404}|_]
when CL404 > 0,
headers(CTResponse)).
prometheus_cowboy2_registry(Config) ->
prometheus_counter:new([{registry, qwe}, {name, qwe}, {help, ""}]),
prometheus_counter:inc(qwe, qwe, [], 10),
{ok, MetricsResponse} = httpc:request(?METRICS_URL("qwe")),
?assertMatch(200, status(MetricsResponse)),
MetricsCT = prometheus_text_format:content_type(),
ExpecteMetricsCT = binary_to_list(MetricsCT),
?assertMatch([{"content-encoding", "identity"},
{"content-length", ExpectedMetricsCL},
{"content-type", ExpecteMetricsCT}|_]
when ExpectedMetricsCL > 0, headers(MetricsResponse)),
MetricsBody = body(MetricsResponse),
?assertMatch(false, all_telemetry_metrics_present(MetricsBody)),
?assertMatch({match, _}, re:run(MetricsBody, "# TYPE qwe counter")),
{ok, IRResponse} =
httpc:request(get, {?METRICS_URL("qwa"),
[]}, [], []),
?assertMatch(404, status(IRResponse)),
?assertMatch([{"content-length", CL404}|_]
when CL404 > 0,
headers(IRResponse)).
prometheus_cowboy2_registry_conflict(Config) ->
application:set_env(prometheus, prometheus_http,
[{registry, default}]),
{ok, DeniedR1} =
httpc:request(get, {?METRICS_URL("qwe"),
[]}, [], []),
?assertMatch(409, status(DeniedR1)).
prometheus_cowboy2_auth_basic1(Config) ->
application:set_env(prometheus, prometheus_http, [{authorization,
{basic, "qwe", "qwa"}}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_basic2(Config) ->
application:set_env(prometheus, prometheus_http, [{authorization,
{basic, ?MODULE}}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_basic3(Config) ->
application:set_env(prometheus, prometheus_http,
[{authorization,
{basic, {?MODULE, authorize}}}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_provider1(Config) ->
application:set_env(prometheus, prometheus_http,
[{authorization,
{?MODULE, authorize}}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_provider2(Config) ->
application:set_env(prometheus, prometheus_http,
[{authorization,
?MODULE}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_invalid(Config) ->
application:set_env(prometheus, prometheus_http,
[{authorization, "qwe"}]),
{ok, DeniedR1} =
httpc:request(get, {?METRICS_URL,
[]}, [], []),
?assertMatch(500, status(DeniedR1)).
authorize("qwe", "qwa") ->
true;
authorize(_, _) ->
false.
authorize(#{headers := Headers}) ->
case Headers("authorization", undefined) of
undefined ->
false;
"Basic cXdlOnF3ZQ==" ->
false;
"Basic abba" ->
false;
"Bearer abba" ->
false;
<<"Basic cXdlOnF3ZQ==">> ->
false;
<<"Basic abba">> ->
false;
<<"Bearer abba">> ->
false;
_ ->
true
end.
%% ===================================================================
%% Private parts
%% ===================================================================
all_telemetry_metrics_present(Body) ->
lists:all(fun(Metric) ->
case re:run(Body, Metric) of
{match, _} -> true;
_ -> false
end
end, ?TELEMETRY_METRICS_METADATA).
%%% Helpers
status({{_, Status, _}, _, _}) ->
Status.
body({_, _, Body}) ->
Body.
headers({_, Headers, _}) ->
lists:sort(Headers).
| null | https://raw.githubusercontent.com/deadtrickster/prometheus-cowboy/24a29e2f0b096f739587a526bc927a951c44b862/test/cowboy2/prometheus_cowboy2_handler_SUITE.erl | erlang | ===================================================================
===================================================================
@doc All tests of this suite.
@doc Groups of tests
@doc Start the application.
debugger:start(),
timer:sleep(80000),
@doc Stop the application.
===================================================================
TESTS
===================================================================
===================================================================
Private parts
===================================================================
Helpers | -module(prometheus_cowboy2_handler_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(PROMETHEUS_ACCEPT, "application/vnd.google.protobuf;"
"proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,"
"text/plain;version=0.0.4;q=0.3,"
"application/json;schema=\"prometheus/telemetry\";version=0.0.2;q=0.2,"
"*/*;q=0.1").
-define(TELEMETRY_METRICS_METADATA,
[
"# TYPE telemetry_scrape_duration_seconds summary",
"# HELP telemetry_scrape_duration_seconds Scrape duration",
"# TYPE telemetry_scrape_size_bytes summary",
"# HELP telemetry_scrape_size_bytes Scrape size, not encoded",
"# TYPE telemetry_scrape_encoded_size_bytes summary",
"# HELP telemetry_scrape_encoded_size_bytes Scrape size, encoded"
]).
-define(METRICS_URL,
":" ++ integer_to_list(?config(port, Config)) ++ "/metrics").
-define(METRICS_URL(Registry),
?METRICS_URL ++ "/" ++ Registry).
-define(AUTH_TESTS,
{ok, DeniedR1} =
httpc:request(get, {?METRICS_URL,
[]}, [], []),
?assertMatch(403, status(DeniedR1)),
{ok, DeniedR2} =
httpc:request(get, {?METRICS_URL,
[{"Authorization", "Basic cXdlOnF3ZQ=="}]},
[], []),
?assertMatch(403, status(DeniedR2)),
{ok, DeniedR3} =
httpc:request(get, {?METRICS_URL,
[{"Authorization", "Basic abba"}]},
[], []),
?assertMatch(403, status(DeniedR3)),
{ok, DeniedR4} =
httpc:request(get, {?METRICS_URL,
[{"Authorization", "Bearer abba"}]},
[], []),
?assertMatch(403, status(DeniedR4)),
{ok, BasicLPR} =
httpc:request(get, {?METRICS_URL,
[{"Authorization", "Basic cXdlOnF3YQ=="}]},
[], []),
?assertMatch(200, status(BasicLPR))).
all() ->
[
{group, positive}
].
groups() ->
[
{positive, [sequential], [
prometheus_cowboy2_negotiation,
prometheus_cowboy2_negotiation_fail,
prometheus_cowboy2,
prometheus_cowboy2_registry,
prometheus_cowboy2_registry_conflict,
prometheus_cowboy2_auth_basic1,
prometheus_cowboy2_auth_basic2,
prometheus_cowboy2_auth_basic3,
prometheus_cowboy2_auth_provider1,
prometheus_cowboy2_auth_provider2,
prometheus_cowboy2_auth_invalid
]}
].
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(cowboy),
{ok, _} = application:ensure_all_started(prometheus),
{Port, Listener} = prometheus_cowboy2_app:start(),
[{port, Port}, {listener, Listener} | Config].
end_per_suite(Config) ->
ok = application:stop(cowboy),
ok = application:stop(prometheus),
Config.
end_per_testcase(_, Config) ->
application:set_env(prometheus, prometheus_http, []),
Config.
prometheus_cowboy2_negotiation(Config) ->
{ok, TextResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept-Encoding", "deflate"}]}, [], []),
?assertMatch(200, status(TextResponse)),
TextCT = prometheus_text_format:content_type(),
ExpectedTextCT = binary_to_list(TextCT),
?assertMatch([{"content-encoding", "deflate"},
{"content-length", ExpectedTextCL},
{"content-type", ExpectedTextCT}|_]
when ExpectedTextCL > 0, headers(TextResponse)),
?assert(iolist_size(body(TextResponse)) > 0),
{ok, ProtobufResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept", ?PROMETHEUS_ACCEPT},
{"Accept-Encoding", "gzip, sdch"}]}, [], []),
?assertMatch(200, status(ProtobufResponse)),
ProtobufCT = prometheus_protobuf_format:content_type(),
ExpectedProtobufCT = binary_to_list(ProtobufCT),
?assertMatch([{"content-encoding", "gzip"},
{"content-length", ExpectedProtobufCL},
{"content-type", ExpectedProtobufCT}|_]
when ExpectedProtobufCL > 0, headers(ProtobufResponse)),
?assert(iolist_size(zlib:gunzip(body(ProtobufResponse))) > 0),
application:set_env(prometheus, prometheus_http,
[{format, prometheus_protobuf_format}]),
{ok, ProtobufResponse1} =
httpc:request(get, {?METRICS_URL, []}, [], []),
?assertMatch(200, status(ProtobufResponse1)),
ProtobufCT = prometheus_protobuf_format:content_type(),
ExpectedProtobufCT = binary_to_list(ProtobufCT),
?assertMatch([{"content-encoding", "identity"},
{"content-length", ExpectedProtobufCL1},
{"content-type", ExpectedProtobufCT}|_]
when ExpectedProtobufCL1 > 0, headers(ProtobufResponse1)),
?assert(iolist_size(body(ProtobufResponse)) > 0).
prometheus_cowboy2_negotiation_fail(Config) ->
{ok, IdentityResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept-Encoding", "qwe"}]}, [], []),
?assertMatch(200, status(IdentityResponse)),
IdentityCT = prometheus_text_format:content_type(),
ExpectedIdentityCT = binary_to_list(IdentityCT),
?assertMatch([{"content-encoding", "identity"},
{"content-length", ExpectedIdentityCL},
{"content-type", ExpectedIdentityCT}|_]
when ExpectedIdentityCL > 0, headers(IdentityResponse)),
{ok, FEResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept-Encoding", "qwe, *;q=0"}]}, [], []),
?assertMatch(406, status(FEResponse)),
?assertMatch([{"content-length", "0"}|_], headers(FEResponse)),
{ok, CTResponse} =
httpc:request(get, {?METRICS_URL,
[{"Accept", "image/png"}]}, [], []),
?assertMatch(406, status(CTResponse)),
?assertMatch([{"content-length", "0"}|_],
headers(CTResponse)).
prometheus_cowboy2(Config) ->
{ok, MetricsResponse} = httpc:request(?METRICS_URL),
?assertMatch(200, status(MetricsResponse)),
MetricsCT = prometheus_text_format:content_type(),
ExpecteMetricsCT = binary_to_list(MetricsCT),
?assertMatch([{"content-encoding", "identity"},
{"content-length", ExpectedMetricsCL},
{"content-type", ExpecteMetricsCT}|_]
when ExpectedMetricsCL > 0, headers(MetricsResponse)),
MetricsBody = body(MetricsResponse),
?assertMatch(true, all_telemetry_metrics_present(MetricsBody)),
{ok, CTResponse} =
httpc:request(get, {?METRICS_URL("qwe"),
[]}, [], []),
?assertMatch(404, status(CTResponse)),
?assertMatch([{"content-length", CL404}|_]
when CL404 > 0,
headers(CTResponse)).
prometheus_cowboy2_registry(Config) ->
prometheus_counter:new([{registry, qwe}, {name, qwe}, {help, ""}]),
prometheus_counter:inc(qwe, qwe, [], 10),
{ok, MetricsResponse} = httpc:request(?METRICS_URL("qwe")),
?assertMatch(200, status(MetricsResponse)),
MetricsCT = prometheus_text_format:content_type(),
ExpecteMetricsCT = binary_to_list(MetricsCT),
?assertMatch([{"content-encoding", "identity"},
{"content-length", ExpectedMetricsCL},
{"content-type", ExpecteMetricsCT}|_]
when ExpectedMetricsCL > 0, headers(MetricsResponse)),
MetricsBody = body(MetricsResponse),
?assertMatch(false, all_telemetry_metrics_present(MetricsBody)),
?assertMatch({match, _}, re:run(MetricsBody, "# TYPE qwe counter")),
{ok, IRResponse} =
httpc:request(get, {?METRICS_URL("qwa"),
[]}, [], []),
?assertMatch(404, status(IRResponse)),
?assertMatch([{"content-length", CL404}|_]
when CL404 > 0,
headers(IRResponse)).
prometheus_cowboy2_registry_conflict(Config) ->
application:set_env(prometheus, prometheus_http,
[{registry, default}]),
{ok, DeniedR1} =
httpc:request(get, {?METRICS_URL("qwe"),
[]}, [], []),
?assertMatch(409, status(DeniedR1)).
prometheus_cowboy2_auth_basic1(Config) ->
application:set_env(prometheus, prometheus_http, [{authorization,
{basic, "qwe", "qwa"}}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_basic2(Config) ->
application:set_env(prometheus, prometheus_http, [{authorization,
{basic, ?MODULE}}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_basic3(Config) ->
application:set_env(prometheus, prometheus_http,
[{authorization,
{basic, {?MODULE, authorize}}}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_provider1(Config) ->
application:set_env(prometheus, prometheus_http,
[{authorization,
{?MODULE, authorize}}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_provider2(Config) ->
application:set_env(prometheus, prometheus_http,
[{authorization,
?MODULE}]),
?AUTH_TESTS.
prometheus_cowboy2_auth_invalid(Config) ->
application:set_env(prometheus, prometheus_http,
[{authorization, "qwe"}]),
{ok, DeniedR1} =
httpc:request(get, {?METRICS_URL,
[]}, [], []),
?assertMatch(500, status(DeniedR1)).
authorize("qwe", "qwa") ->
true;
authorize(_, _) ->
false.
authorize(#{headers := Headers}) ->
case Headers("authorization", undefined) of
undefined ->
false;
"Basic cXdlOnF3ZQ==" ->
false;
"Basic abba" ->
false;
"Bearer abba" ->
false;
<<"Basic cXdlOnF3ZQ==">> ->
false;
<<"Basic abba">> ->
false;
<<"Bearer abba">> ->
false;
_ ->
true
end.
all_telemetry_metrics_present(Body) ->
lists:all(fun(Metric) ->
case re:run(Body, Metric) of
{match, _} -> true;
_ -> false
end
end, ?TELEMETRY_METRICS_METADATA).
status({{_, Status, _}, _, _}) ->
Status.
body({_, _, Body}) ->
Body.
headers({_, Headers, _}) ->
lists:sort(Headers).
|
f4c69c6e10d4aecc1b999113470a757dec67a8dbaa98d741c5f868de48c32e69 | con-kitty/categorifier-c | Pretty.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TypeFamilies #
| C - code emission from CExpr graphs -- this module contains the low - level functionality for turning
AST fragments into pretty - printer fragments .
module Categorifier.C.CExpr.C.Pretty
( cFunArgs,
newline,
infixBinOp,
prefixBinOp,
prefixUnOp,
indexArray,
ternary,
assignStatement,
constAssignStatement,
commentName,
)
where
import Prettyprinter (Doc, (<+>))
import qualified Prettyprinter as Doc
{- First we can define all the pure helper functions -}
cFunArgs :: [Doc ann] -> Doc ann
cFunArgs = Doc.group . Doc.encloseSep "(" ")" ", "
newline :: Doc ann
newline = ";" <> Doc.line
infixBinOp :: Doc ann -> Doc ann -> Doc ann -> Doc ann
infixBinOp o l r = l <+> o <+> r
prefixBinOp :: Doc ann -> Doc ann -> Doc ann -> Doc ann
prefixBinOp o l r = o <> cFunArgs [l, r]
prefixUnOp :: Doc ann -> Doc ann -> Doc ann
prefixUnOp o a = o <> Doc.parens a
indexArray :: Doc ann -> Doc ann -> Doc ann
indexArray arr idx = arr <> Doc.brackets idx
ternary :: Doc ann -> Doc ann -> Doc ann -> Doc ann
ternary p t f = p <+> "?" <+> t <+> ":" <+> f
assignStatement :: Doc ann -> Doc ann -> Doc ann
assignStatement name expr = name <+> "=" <+> expr <> newline
constAssignStatement :: Doc ann -> Doc ann -> Doc ann -> Doc ann
constAssignStatement ty name expr = "const" <+> ty <+> assignStatement name expr
commentName :: Doc ann -> Doc ann -> Doc ann
commentName name fun = "/* " <> name <> "() */" <+> fun
| null | https://raw.githubusercontent.com/con-kitty/categorifier-c/74bc972dd164bc8e69414397cc366fa8ded2c8f1/Categorifier/C/CExpr/C/Pretty.hs | haskell | # LANGUAGE OverloadedStrings #
this module contains the low - level functionality for turning
First we can define all the pure helper functions | # LANGUAGE TypeFamilies #
AST fragments into pretty - printer fragments .
module Categorifier.C.CExpr.C.Pretty
( cFunArgs,
newline,
infixBinOp,
prefixBinOp,
prefixUnOp,
indexArray,
ternary,
assignStatement,
constAssignStatement,
commentName,
)
where
import Prettyprinter (Doc, (<+>))
import qualified Prettyprinter as Doc
cFunArgs :: [Doc ann] -> Doc ann
cFunArgs = Doc.group . Doc.encloseSep "(" ")" ", "
newline :: Doc ann
newline = ";" <> Doc.line
infixBinOp :: Doc ann -> Doc ann -> Doc ann -> Doc ann
infixBinOp o l r = l <+> o <+> r
prefixBinOp :: Doc ann -> Doc ann -> Doc ann -> Doc ann
prefixBinOp o l r = o <> cFunArgs [l, r]
prefixUnOp :: Doc ann -> Doc ann -> Doc ann
prefixUnOp o a = o <> Doc.parens a
indexArray :: Doc ann -> Doc ann -> Doc ann
indexArray arr idx = arr <> Doc.brackets idx
ternary :: Doc ann -> Doc ann -> Doc ann -> Doc ann
ternary p t f = p <+> "?" <+> t <+> ":" <+> f
assignStatement :: Doc ann -> Doc ann -> Doc ann
assignStatement name expr = name <+> "=" <+> expr <> newline
constAssignStatement :: Doc ann -> Doc ann -> Doc ann -> Doc ann
constAssignStatement ty name expr = "const" <+> ty <+> assignStatement name expr
commentName :: Doc ann -> Doc ann -> Doc ann
commentName name fun = "/* " <> name <> "() */" <+> fun
|
eeb99466c076e6f9310a4f4d94e32d3af8fda9da3326bdf8431b652e1078f2ce | babashka/neil | dev.clj | (ns babashka.neil.dev
(:require [babashka.process :refer [sh]]
[clojure.core.async :refer [<!] :as async]
[clojure.string :as str]
[pod.babashka.fswatcher :as fw]
[taoensso.timbre :as log]))
(def watch-paths ["bb.edn" "prelude" "src" "dev"])
(defn- build-event? [{:keys [type path] :as _watch-event}]
(and (not (#{:chmod} type))
(not (str/ends-with? path "~"))))
(def build-number (atom 0))
(defn build-once [event]
(let [i (swap! build-number inc)]
(log/info [:start-build i event])
(sh "bb gen-script" {:err :inherit})
(log/info [:end-build i event])))
(defn- start-builder [build-events]
(async/go-loop []
(let [event (<! build-events)]
(build-once event)
(recur))))
(defn- start-watchers [watch-paths build-events]
(doseq [p watch-paths]
(fw/watch p #(async/put! build-events %) {:recursive true})))
(defn dev []
(let [build-xf (filter build-event?)
build-events (async/chan (async/sliding-buffer 1) build-xf)]
(log/info [:start-dev])
(start-watchers watch-paths build-events)
(build-once {:type ::startup-build})
(start-builder build-events)
(deref (promise))))
| null | https://raw.githubusercontent.com/babashka/neil/3c545b6a6c02b25576f5745cae9d186941e7d90a/dev/babashka/neil/dev.clj | clojure | (ns babashka.neil.dev
(:require [babashka.process :refer [sh]]
[clojure.core.async :refer [<!] :as async]
[clojure.string :as str]
[pod.babashka.fswatcher :as fw]
[taoensso.timbre :as log]))
(def watch-paths ["bb.edn" "prelude" "src" "dev"])
(defn- build-event? [{:keys [type path] :as _watch-event}]
(and (not (#{:chmod} type))
(not (str/ends-with? path "~"))))
(def build-number (atom 0))
(defn build-once [event]
(let [i (swap! build-number inc)]
(log/info [:start-build i event])
(sh "bb gen-script" {:err :inherit})
(log/info [:end-build i event])))
(defn- start-builder [build-events]
(async/go-loop []
(let [event (<! build-events)]
(build-once event)
(recur))))
(defn- start-watchers [watch-paths build-events]
(doseq [p watch-paths]
(fw/watch p #(async/put! build-events %) {:recursive true})))
(defn dev []
(let [build-xf (filter build-event?)
build-events (async/chan (async/sliding-buffer 1) build-xf)]
(log/info [:start-dev])
(start-watchers watch-paths build-events)
(build-once {:type ::startup-build})
(start-builder build-events)
(deref (promise))))
|
|
84d0fee0399ba9619baa438c39880290d62f2291eb34fc2c78b474b945454bd8 | rajasegar/cl-djula-tailwind | utils.lisp | (defpackage cl-djula-tailwind.utils
(:use :cl)
(:export :get-single
:get-double
:get-quadruple
:get-translate
:get-percentage-widths
:get-translate-ratios))
(in-package cl-djula-tailwind.utils)
(defvar *values* '(
"0px"
"1px"
"0.125rem"
"0.25rem"
"0.375rem"
"0.5rem"
"0.625rem"
"0.75rem"
"0.875rem"
"1rem"
"1.25rem"
"1.5rem"
"1.75rem"
"2rem"
"2.25rem"
"2.5rem"
"2.75rem"
"3rem"
"3.5rem"
"4rem"
"5rem"
"6rem"
"7rem"
"8rem"
"9rem"
"10rem"
"11rem"
"12rem"
"13rem"
"14rem"
"15rem"
"16rem"
"18rem"
"20rem"
"24rem"
))
(defvar *units* '("0" "px" "0.5" "1" "1.5" "2" "2.5" "3" "3.5" "4" "5" "6" "7" "8" "9" "10" "11" "12" "14" "16" "20" "24" "28" "32" "36" "40" "44" "48" "52" "56" "60" "64" "72" "80" "96"))
(defun get-single (prop prefix)
"Get single or all side values for margin or padding"
(loop for val in *values*
for unit in *units*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop ,val)))))
(defun get-double (prop-left prop-right prefix)
"Get vertical or horizontal side values for margin or padding"
(loop for val in *values*
for unit in *units*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop-left ,val ,prop-right ,val)))))
(defun get-quadruple (prop-1 prop-2 prop-3 prop-4 prefix)
"Get top, right, bottom and left values"
(loop for val in *values*
for unit in *units*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop-1 ,val ,prop-2 ,val ,prop-3 ,val ,prop-4 ,val)))))
(defun get-translate (prop prefix fun)
"Get translate x and y utils"
(loop for val in *values*
for unit in *units*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop ,(concatenate 'string fun "(" val ")"))))))
(defvar *percentages* '("50%" "33.333333%" "66.666666%" "25%" "50%" "75%" "20%" "40%" "60%" "80%" "16.666667%" "33.333333%" "50%" "66.666667%" "83.333333%" "8.333333%" "16.666667%" "25%" "33.333333%" "41.666667%" "50%" "58.333333%" "66.666667%" "75%" "83.333333%" "91.666667%"))
(defvar *ratios* '("1/2" "1/3" "2/3" "1/4" "2/4" "3/4" "1/5" "2/5" "3/5" "4/5" "1/6" "2/6" "3/6" "4/6" "5/6" "1/12"
"2/12" "3/12" "4/12" "5/12" "6/12" "7/12" "8/12" "9/12" "10/12" "11/12"))
(defun get-percentage-widths (prop prefix)
"Get percentage width utils"
(loop for val in *percentages*
for unit in *ratios*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop ,val)))))
(defun get-translate-ratios (prop prefix fun)
"Get percentage width utils"
(loop for val in *percentages*
for unit in *ratios*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop ,(concatenate 'string fun "(" val ")"))))))
| null | https://raw.githubusercontent.com/rajasegar/cl-djula-tailwind/d7e887c3ad54ada8eb78a33487dc4b96482a5da3/src/utils.lisp | lisp | (defpackage cl-djula-tailwind.utils
(:use :cl)
(:export :get-single
:get-double
:get-quadruple
:get-translate
:get-percentage-widths
:get-translate-ratios))
(in-package cl-djula-tailwind.utils)
(defvar *values* '(
"0px"
"1px"
"0.125rem"
"0.25rem"
"0.375rem"
"0.5rem"
"0.625rem"
"0.75rem"
"0.875rem"
"1rem"
"1.25rem"
"1.5rem"
"1.75rem"
"2rem"
"2.25rem"
"2.5rem"
"2.75rem"
"3rem"
"3.5rem"
"4rem"
"5rem"
"6rem"
"7rem"
"8rem"
"9rem"
"10rem"
"11rem"
"12rem"
"13rem"
"14rem"
"15rem"
"16rem"
"18rem"
"20rem"
"24rem"
))
(defvar *units* '("0" "px" "0.5" "1" "1.5" "2" "2.5" "3" "3.5" "4" "5" "6" "7" "8" "9" "10" "11" "12" "14" "16" "20" "24" "28" "32" "36" "40" "44" "48" "52" "56" "60" "64" "72" "80" "96"))
(defun get-single (prop prefix)
"Get single or all side values for margin or padding"
(loop for val in *values*
for unit in *units*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop ,val)))))
(defun get-double (prop-left prop-right prefix)
"Get vertical or horizontal side values for margin or padding"
(loop for val in *values*
for unit in *units*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop-left ,val ,prop-right ,val)))))
(defun get-quadruple (prop-1 prop-2 prop-3 prop-4 prefix)
"Get top, right, bottom and left values"
(loop for val in *values*
for unit in *units*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop-1 ,val ,prop-2 ,val ,prop-3 ,val ,prop-4 ,val)))))
(defun get-translate (prop prefix fun)
"Get translate x and y utils"
(loop for val in *values*
for unit in *units*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop ,(concatenate 'string fun "(" val ")"))))))
(defvar *percentages* '("50%" "33.333333%" "66.666666%" "25%" "50%" "75%" "20%" "40%" "60%" "80%" "16.666667%" "33.333333%" "50%" "66.666667%" "83.333333%" "8.333333%" "16.666667%" "25%" "33.333333%" "41.666667%" "50%" "58.333333%" "66.666667%" "75%" "83.333333%" "91.666667%"))
(defvar *ratios* '("1/2" "1/3" "2/3" "1/4" "2/4" "3/4" "1/5" "2/5" "3/5" "4/5" "1/6" "2/6" "3/6" "4/6" "5/6" "1/12"
"2/12" "3/12" "4/12" "5/12" "6/12" "7/12" "8/12" "9/12" "10/12" "11/12"))
(defun get-percentage-widths (prop prefix)
"Get percentage width utils"
(loop for val in *percentages*
for unit in *ratios*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop ,val)))))
(defun get-translate-ratios (prop prefix fun)
"Get percentage width utils"
(loop for val in *percentages*
for unit in *ratios*
for key = (concatenate 'string prefix unit)
for classname = (concatenate 'string "." prefix unit)
collect `(,key . ((,classname ,prop ,(concatenate 'string fun "(" val ")"))))))
|
|
d214a53119696a62b87d50de8802a49c3bff23156af6384ce7a7a1a072eabc09 | eholk/harlan | typecheck.scm | (library
(harlan front typecheck)
(export typecheck free-regions-type gen-rvar)
(import
(rnrs)
(elegant-weapons match)
(elegant-weapons helpers)
(elegant-weapons sets)
(harlan compile-opts)
(util compat)
(util color))
(define (typecheck m)
(let-values (((m s) (infer-module m)))
(ground-module `(module . ,m) s)))
(define-record-type tvar (fields name))
(define-record-type rvar (fields name))
(define (gen-tvar x) (make-tvar (gensym x)))
(define (gen-rvar x) (make-rvar (gensym x)))
(define type-tag (gensym 'type))
;; Walks type and region variables in a substitution
(define (walk x s)
(let ((x^ (assq x s)))
;; TODO: We will probably need to check for cycles.
(if x^
(let ((x^ (cdr x^)))
(cond
((or (tvar? x^) (rvar? x^))
(walk x^ s))
((eq? x^ 'Numeric)
x)
(else x^)))
x)))
(define (walk-type t s)
(match t
(,t (guard (symbol? t)) t)
((vec ,r ,[t]) `(vec ,(walk r s) ,t))
((ptr ,[t]) `(ptr ,t))
((adt ,[t]) `(adt ,t))
((adt ,[t] ,r) `(adt ,t ,(walk r s)))
((closure ,r (,[t*] ...) -> ,[t])
`(closure ,(walk r s) ,t* -> ,t))
((fn (,[t*] ...) -> ,[t]) `(fn (,t* ...) -> ,t))
(,x (guard (or (tvar? x) (rvar? x)))
(let ((x^ (walk x s)))
(if (equal? x x^)
x
(walk-type x^ s))))
(,else (error 'walk-type "Unknown type" else))))
types a and b. s is an a - list containing substitutions
;; for both type and region variables. If the unification is
;; successful, this function returns a new substitution. Otherwise,
this functions returns # f.
(define (unify-types a b s)
(define (maybe-subst a b s)
(let ((t (or (tvar? a) (rvar? a))))
(if t
(and s `((,a . ,b) . ,s))
(error 'maybe-subst
"You don't want to put this in the substitution."
a b))))
(let ((s
(match `(,(walk-type a s) ,(walk-type b s))
;; Obviously equal types unify.
((,a ,b) (guard (equal? a b)) s)
((int Numeric)
(if (tvar? b)
(maybe-subst b 'int s)
s))
((float Numeric)
(if (tvar? b)
(maybe-subst b 'float s)
s))
((u64 Numeric)
(if (tvar? b)
(maybe-subst b 'u64 s)
s))
;;((Numeric float) (guard (tvar? a)) `((,a . float) . ,s))
((,a ,b) (guard (tvar? a)) (maybe-subst a b s))
((,a ,b) (guard (tvar? b)) (maybe-subst b a s))
((,a ,b) (guard (and (rvar? a) (rvar? b))) (maybe-subst a b s))
(((vec ,ra ,a) (vec ,rb ,b))
(let ((s (unify-types a b s)))
(if (eq? ra rb)
s
(maybe-subst ra rb s))))
(((ptr ,a) (ptr ,b))
(unify-types a b s))
(((adt ,ta ,ra) (adt ,tb ,rb))
(let ((s (unify-types ta tb s)))
(if (eq? ra rb)
s
(maybe-subst ra rb s))))
(((closure ,r1 ,a* -> ,a)
(closure ,r2 ,b* -> ,b))
(let loop ((a* a*)
(b* b*)
(s s))
(match `(,a* ,b*)
((() ())
(let ((s (unify-types a b s)))
(if (eq? r1 r2)
s
(maybe-subst r1 r2 s))))
(((,a ,a* ...) (,b ,b* ...))
(let ((s (unify-types a b s)))
(and s (loop a* b* s))))
(,else #f))))
(((fn (,a* ...) -> ,a) (fn (,b* ...) -> ,b))
(let loop ((a* a*)
(b* b*)
(s s))
(match `(,a* ,b*)
((() ()) (unify-types a b s))
(((,a ,a* ...) (,b ,b* ...))
(let ((s (unify-types a b s)))
(and s (loop a* b* s))))
(,else #f))))
(,else #f))))
(if s
(if (not (andmap (lambda (s)
(or (tvar? (car s)) (rvar? (car s))))
s))
(begin
(pretty-print s)
(error 'unify-types "invalid substitution created"
a b s
(walk-type a s)
(walk-type b s)))))
s))
;; Remove extra tags and things so that the expression looks more like
;; what the programmer typed.
(define (unparse e)
(match e
((num ,n) n)
((str ,s) s)
((,op ,[e1] ,[e2]) (guard (binop? op))
`(,op ,e1 ,e2))
(,else else)))
(define (type-error e expected found)
(display "In expression...\n")
(pretty-print (unparse e))
(display "Expected type...\n")
(pretty-print expected)
(display "But found...\n")
(pretty-print found)
(error 'typecheck
"Could not unify types"))
(define (return e t)
(lambda (_ r s)
(values e t s)))
(define (bind m seq)
(lambda (e^ r s)
(let-values (((e t s) (m e^ r s)))
((seq e t) e^ r s))))
(define (unify a b seq)
(lambda (e r s)
(let ((s^ (unify-types a b s)))
(if s^
((seq) e r s^)
(type-error e (walk-type a s) (walk-type b s))))))
(define (== a b)
(unify a b (lambda () (return #f a))))
(define (require-type e env t)
(let ((tv (make-tvar (gensym 'tv))))
(do* (((e t^) (infer-expr e env))
((_ __) (== tv t))
((_ __) (== tv t^)))
(return e tv))))
(define (unify-return-type t seq)
(lambda (e r s)
((unify r t seq) e r s)))
(define-syntax with-current-expr
(syntax-rules ()
((_ e b)
(lambda (e^ r s)
(b e r s)))))
;; you can use this with bind too!
(define (infer-expr* e* env)
(if (null? e*)
(return '() '())
(let ((e (car e*))
(e* (cdr e*)))
(bind
(infer-expr* e* env)
(lambda (e* t*)
(bind (infer-expr e env)
(lambda (e t)
(return `(,e . ,e*)
`(,t . ,t*)))))))))
(define (require-all e* env t)
(if (null? e*)
(return '() t)
(let ((e (car e*))
(e* (cdr e*)))
(do* (((e* t) (require-all e* env t))
((e t) (require-type e env t)))
(return `(,e . ,e*) t)))))
;; Here env is just a list of formal parameters and internally bound
;; variables.
(define (free-var-types e env)
(match e
((num ,i) '())
((float ,f) '())
((bool ,b) '())
((var ,t ,x)
(if (memq x env)
'()
(list (cons x t))))
((lambda ,t ((,x* ,t*) ...) ,b)
(free-var-types b (append x* env)))
((let ((,x* ,t* ,[e*]) ...) ,b)
(apply append (free-var-types b (append x* env)) e*))
((if ,[t] ,[c] ,[a]) (append t c a))
((vector-ref ,t ,[x] ,[i])
(append x i))
((match ,t ,[e]
((,tag ,x ...) ,b) ...)
(apply append e
(map (lambda (x b) (free-var-types b (append x env))) x b)))
((call ,[e*] ...)
(apply append e*))
((invoke ,[e*] ...)
(apply append e*))
((,op ,t ,[a] ,[b])
(guard (or (binop? op) (relop? op)))
(append a b))
(,else (error 'free-var-types
"Unexpected expression" else))))
(define-syntax do*
(syntax-rules ()
((_ (((x ...) e) ((x* ...) e*) ...) b)
(bind e (lambda (x ...)
(do* (((x* ...) e*) ...) b))))
((_ () b) b)))
(define (unify-regions* r r*)
(if (null? r*)
(return '() '())
(do* (((a b) (unify-regions* r (cdr r*)))
((a b) (== r (car r*))))
(return a b))))
(define (infer-expr e env)
;(display `(,e :: ,env)) (newline)
(with-current-expr
e
(match e
((int ,n)
(return `(int ,n) 'int))
((float ,f)
(return `(float ,f) 'float))
((num ,n)
(let ((t (make-tvar (gensym 'num))))
(do* (((_ t) (== t 'Numeric)))
(return `(num ,n) t))))
((char ,c) (return `(char ,c) 'char))
((bool ,b)
(return `(bool ,b) 'bool))
((str ,s)
(return `(str ,s) 'str))
((var ,x)
(let ((t (lookup x env)))
(return `(var ,t ,x) t)))
((int->float ,e)
(do* (((e _) (require-type e env 'int)))
(return `(int->float ,e) 'float)))
((float->int ,e)
(do* (((e _) (require-type e env 'float)))
(return `(float->int ,e) 'int)))
((return)
(unify-return-type
'void
;; Returning a free type variable is better so we can return
;; from any context, but that gives us problems with free
;; type variables at the end.
(lambda () (return `(return) 'void))))
((return ,e)
(bind (infer-expr e env)
(lambda (e t)
(unify-return-type
t
(lambda ()
(return `(return ,e) t))))))
((print ,e)
(do* (((e t) (infer-expr e env)))
(return `(print ,t ,e) 'void)))
((print ,e ,f)
(do* (((e t) (infer-expr e env))
((f _) (require-type f env '(ptr ofstream))))
(return `(print ,t ,e ,f) 'void)))
((println ,e)
(do* (((e t) (infer-expr e env)))
(return `(println ,t ,e) 'void)))
((iota ,e)
(do* (((e t) (require-type e env 'int)))
(let ((r (make-rvar (gensym 'r))))
(return `(iota-r ,r ,e)
`(vec ,r int)))))
((iota-r ,r ,e)
(do* (((e t) (require-type e env 'int)))
(return `(iota-r ,r ,e)
`(vec ,r int))))
((vector ,e* ...)
(let ((t (make-tvar (gensym 'tvec)))
(r (make-rvar (gensym 'rv))))
(do* (((e* t) (require-all e* env t)))
(return `(vector (vec ,r ,t) ,e* ...) `(vec ,r ,t)))))
((vector-r ,r ,e* ...)
(let ((t (make-tvar (gensym 'tvec))))
(do* (((e* t) (require-all e* env t)))
(return `(vector (vec ,r ,t) ,e* ...) `(vec ,r ,t)))))
((make-vector ,len ,val)
(do* (((len _) (require-type len env 'int))
((val t) (infer-expr val env)))
(let ((t `(vec ,(make-rvar (gensym 'rmake-vector)) ,t)))
(return `(make-vector ,t ,len ,val) t))))
((length ,v)
(let ((t (make-tvar (gensym 'tveclength)))
(r (make-rvar (gensym 'rvl))))
(do* (((v _) (require-type v env `(vec ,r ,t))))
(return `(length ,v) 'int))))
((vector-ref ,v ,i)
(let ((t (make-tvar (gensym 'tvecref)))
(r (make-rvar (gensym 'rvref))))
(do* (((v _) (require-type v env `(vec ,r ,t)))
((i _) (require-type i env 'int)))
(return `(vector-ref ,t ,v ,i) t))))
((unsafe-vector-ref ,v ,i)
(let ((t (make-tvar (gensym 'tvecref)))
(r (make-rvar (gensym 'rvref))))
(do* (((v _) (require-type v env `(vec ,r ,t)))
((i _) (require-type i env 'int)))
(return `(unsafe-vector-ref ,t ,v ,i) t))))
((unsafe-vec-ptr ,v)
(let ((t (make-tvar (gensym 'tvecref)))
(r (make-rvar (gensym 'rvref))))
(do* (((v _) (require-type v env `(vec ,r ,t))))
(return `(unsafe-vec-ptr (ptr ,t) ,v) `(ptr ,t)))))
((unsafe-explicit-cast (,t1 -> ,t2) ,e)
(do* (((e _) (require-type e env t1)))
(return `(cast ,t2 ,e) t2)))
((,+ ,a ,b) (guard (binop? +))
(do* (((a t) (infer-expr a env))
((b t) (require-type b env t))
((_ __) (== t 'Numeric)))
(return `(,+ ,t ,a ,b) t)))
((= ,a ,b)
(do* (((a t) (infer-expr a env))
((b t) (require-type b env t)))
(return `(= ,t ,a ,b) 'bool)))
((,< ,a ,b)
(guard (relop? <))
(do* (((a t) (infer-expr a env))
((b t) (require-type b env t))
((_ __) (== t 'Numeric)))
(return `(,< bool ,a ,b) 'bool)))
((assert ,e)
(do* (((e t) (require-type e env 'bool)))
(return `(assert ,e) t)))
((set! ,x ,e)
(do* (((x t) (infer-expr x env))
((e t) (require-type e env t)))
(return `(set! ,x ,e) 'void)))
((begin ,s* ... ,e)
(do* (((s* _) (infer-expr* s* env))
((e t) (infer-expr e env)))
(return `(begin ,s* ... ,e) t)))
((if ,test ,c ,a)
(do* (((test tt) (require-type test env 'bool))
((c t) (infer-expr c env))
((a t) (require-type a env t)))
(return `(if ,test ,c ,a) t)))
((if ,test ,c)
(do* (((test tt) (require-type test env 'bool))
((c t) (require-type c env 'void)))
(return `(if ,test ,c) t)))
((lambda (,x* ...) ,body)
;; Lambda is a little tricky because of regions in the free
variables . First we infer the type based on the usual way
;; of inferring lambda, but then we determine the regions for
;; the free variables in the body. We create a new region
;; variable and unify this with all of the regions of free
;; variables.
(let* ((arg-types (map (lambda (x) (make-tvar (gensym x))) x*))
(env (append (map cons x* arg-types) env))
(r (gen-rvar 'lambda)))
(do* (((body tbody)
(infer-expr body env)))
(let* ((fv (free-var-types body x*))
(regions (apply union
(map (lambda (x)
(free-regions-type (cdr x)))
fv))))
(do* (((_ __) (unify-regions* r regions)))
(return
`(lambda (closure ,r ,arg-types -> ,tbody)
((,x* ,arg-types) ...)
,body)
`(closure ,r ,arg-types -> ,tbody)))))))
((let ((,x ,e) ...) ,body)
(do* (((e t*) (infer-expr* e env))
((body t) (infer-expr body (append (map cons x t*) env))))
(return `(let ((,x ,t* ,e) ...) ,body) t)))
((let-region (,r* ...) ,b)
(do* (((b t) (infer-expr b env)))
(return `(let-region (,r* ...) ,b) t)))
((while ,t ,b)
(do* (((t _) (require-type t env 'bool))
((b _) (infer-expr b env)))
(return `(while ,t ,b) 'void)))
((reduce + ,e)
(let ((r (make-rvar (gensym 'r)))
(t (make-tvar (gensym 'reduce-t))))
(do* (((_ __) (== t 'Numeric))
((e t) (require-type e env `(vec ,r ,t))))
(return `(reduce ,t + ,e) 'int))))
((kernel ((,x ,e) ...) ,b)
(do* (((e t*) (let loop ((e e))
(if (null? e)
(return '() '())
(let ((e* (cdr e))
(e (car e))
(t (make-tvar (gensym 'kt)))
(r (make-rvar (gensym 'rkt))))
(do* (((e* t*) (loop e*))
((e _) (require-type e env `(vec ,r ,t))))
(return (cons e e*)
(cons (list r t) t*)))))))
((b t) (infer-expr b (append
(map (lambda (x t) (cons x (cadr t))) x t*)
env))))
(let ((r (make-rvar (gensym 'rk))))
(return `(kernel-r (vec ,r ,t) ,r
(((,x ,(map cadr t*))
(,e (vec . ,t*))) ...)
,b)
`(vec ,r ,t)))))
((kernel-r ,r ((,x ,e) ...) ,b)
(do* (((e t*) (let loop ((e e))
(if (null? e)
(return '() '())
(let ((e* (cdr e))
(e (car e))
(t (make-tvar (gensym 'kt)))
(r (make-rvar (gensym 'rkt))))
(do* (((e* t*) (loop e*))
((e _) (require-type e env `(vec ,r ,t))))
(return (cons e e*)
(cons (list r t) t*)))))))
((b t) (infer-expr b (append
(map (lambda (x t) (cons x (cadr t))) x t*)
env))))
(return `(kernel-r (vec ,r ,t) ,r
(((,x ,(map cadr t*))
(,e (vec . ,t*))) ...)
,b)
`(vec ,r ,t))))
((call ,f ,e* ...) (guard (ident? f))
(let ((t (make-tvar (gensym 'rt)))
(ft (lookup f env)))
(do* (((e* t*) (infer-expr* e* env))
((_ __) (require-type `(var ,f) env `(fn ,t* -> ,t))))
(return `(call (var (fn ,t* -> ,t) ,f) ,e* ...) t))))
((invoke ,rator ,rand* ...)
(let ((t (gen-tvar 'invoke))
(r (gen-rvar 'invoke)))
(do* (((rand* randt*) (infer-expr* rand* env))
((rator fty) (require-type rator env
`(closure ,r ,randt* -> ,t))))
(return `(invoke ,rator . ,rand*) t))))
((do ,e)
(do* (((e t) (infer-expr e env)))
(return `(do ,e) t)))
((match ,e
((,tag ,x* ...) ,e*) ...)
;; This might be a little tricky, depending on how much
;; information we have to start with. If the type of e is
;; known at this point, it's easy. However, if we don't know
;; if yet (for example, the value was passed in as a
;; parameter), we might have to infer the type based on the
;; constructors given.
(match (lookup-type-tags tag env)
((,te . ,typedef)
(do* (((e _) (require-type e env te))
((e* t)
(let check-arms ((tag tag)
(x* x*)
(e* e*)
(typedef typedef))
(match `(,tag ,x* ,e*)
(((,tag . ,tag*) (,x* . ,x**) (,e* . ,e**))
(let-values (((constructor rest)
(partition (lambda (x)
(eq? (car x) tag))
typedef)))
(match constructor
(((,_ ,t* ...))
(do* (((e**^ t) (check-arms tag* x** e** rest))
((e^ _) (require-type e* (append
(map cons x* t*)
env)
t)))
(return (cons e^ e**^) t))))))
((() () ()) (return '() (make-tvar (gensym 'tmatch))))))))
(return `(match ,t ,e ((,tag ,x* ...) ,e*) ...) t)))))
((error! ,s) (guard (string? s))
(return `(error! ,s) (gen-tvar 'error!)))
)))
(define infer-body infer-expr)
(define (add-region-vars-to-type end adt-graph)
(lambda (t*)
(match t*
((vec ,[t])
`(vec ,@end ,t))
((vec ,r ,[t])
(begin
(display "Warning, in type \n")
(display t*)
(display " there was already a region parameter. Replacing with \n")
(display end)
(newline)
`(vec ,@end ,t)))
((closure (,[t*] ...) -> ,[t])
`(closure ,@end ,t* -> ,t))
((adt ,t^) (guard (recursive-adt? t^ adt-graph))
`(adt ,t^ . ,end))
(,else (begin #;(if (pair? else)
(display else))
else)))))
(define (make-top-level-env decls adt-graph)
(append
(apply append
(map (lambda (d)
(match d
((fn ,name (,[make-tvar -> var*] ...) ,body)
`((,name fn (,var* ...) -> ,(make-tvar name))))
((define-datatype ,t
(,c ,t* ...) ...)
(let* ((end (if (recursive-adt? t adt-graph)
(list (make-rvar (gensym t)))
'()))
(t* (map (lambda (t*)
(map (add-region-vars-to-type end
adt-graph)
t*))
t*)))
`((,type-tag (adt ,t . ,end) (,c ,t* ...) ...)
(,c fn (,t* ...)
-> ,(map (lambda (_) `(adt ,t . ,end)) c)) ...)))
((extern ,name . ,t)
(list (cons name (cons 'fn t))))))
decls))
;; Add some primitives
'((harlan_sqrt fn (float) -> float)
(floor fn (float) -> float)
(atan2 fn (float float) -> float))))
(define (recursive-adt? name graph)
(let loop ((path (list name)))
(let ((node (assq (car path) graph)))
(if node
(ormap (lambda (n)
(or (memq n path)
(loop (cons n path))))
(cdr node))
#f))))
;; A graph of which types are referenced by each adt. Used by
;; recursive-adt?
(define (make-adt-graph decl*)
(apply append
(map (lambda (d)
(match d
((define-datatype ,t (,c ,t* ...) ...)
`((,t . ,(apply union
(map
(lambda (t^)
(map (lambda (t^)
(match t^
;; if we contain a
;; vector or closure,
;; then the type is
;; always
;; recursive. We
;; trick the type
;; checker into
;; thinking this by
;; putting in a
;; self-link if we
;; encounter one of
;; these types.
((vec . ,_) t)
((closure . ,_) t)
((adt ,t) t)
(,else else)))
t^))
t*)))))
(,else '())))
decl*)))
(define (infer-module m)
(match m
((module . ,decls)
(let* ((adt-graph (make-adt-graph decls))
;;(_ (pretty-print adt-graph))
(env (make-top-level-env decls adt-graph)))
;;(pretty-print env)
(infer-decls decls env adt-graph)))))
(define (infer-decls decls env adt-graph)
(match decls
(() (values '() '()))
((,d . ,d*)
(let-values (((d* s) (infer-decls d* env adt-graph)))
(let-values (((d s) (infer-decl d env s adt-graph)))
(values (cons d d*) s))))))
(define (infer-decl d env s adt-graph)
(match d
((extern . ,whatever)
(values `(extern . ,whatever) s))
((define-datatype ,t (,c ,t* ...) ...)
(values
(if (recursive-adt? t adt-graph)
(let* ((r (make-rvar (gensym t)))
(t* (map (lambda (t*)
(map (lambda (t*)
(match t*
((closure (,[t*] ...) -> ,[t])
`(closure ,r ,t* -> ,t))
((adt ,t^)
(guard (recursive-adt? t^ adt-graph))
`(adt ,t^ ,r))
(,else else))) t*))
t*)))
`(define-datatype (,t ,r) (,c ,t* ...)))
`(define-datatype ,t (,c ,t* ...)))
s))
((fn ,name (,var* ...) ,body)
;; find the function definition in the environment, bring the
;; parameters into scope.
(match (lookup name env)
((fn (,t* ...) -> ,t)
(let-values (((b t s)
((infer-body body (append (map cons var* t*) env))
body t s)))
(values
`(fn ,name (,var* ...) (fn (,t* ...) -> ,t) ,b)
s)))))))
(define (lookup x e)
(let ((t (assq x e)))
(if t
(cdr t)
(error 'lookup "Variable not found" x e))))
(define (lookup-type-tags tags e)
(match e
(()
(error 'lookup-type-tags "Could not find type from constructors" tags))
(((,tag (adt ,name . ,end) (,tag* . ,t) ...) . ,rest)
(guard (and (eq? tag type-tag)
(set-equal? tags tag*)))
`((adt ,name . ,end) (,tag* . ,t) ...))
((,e . ,e*) (lookup-type-tags tags e*))))
(define (ground-module m s)
(if (verbosity? trace-pass-verbosity-level)
(begin (pretty-print m) (newline)
(pretty-print s) (newline)))
(match m
((module ,[(lambda (d) (ground-decl d s)) -> decl*] ...)
`(module ,decl* ...))))
(define (ground-decl d s)
(match d
((extern . ,whatever) `(extern . ,whatever))
((define-datatype (,t ,r) (,c ,t* ...) ...)
`(define-datatype (,t ,(rvar-name r))
. ,(car (map (lambda (c t*)
(map (lambda (c t*)
`(,c . ,(map (lambda (t) (ground-type t s)) t*)))
c t*)) c t*))))
((define-datatype ,t (,c ,t* ...) ...)
`(define-datatype ,t
. ,(car (map (lambda (c t*)
(map (lambda (c t*)
`(,c . ,(map (lambda (t) (ground-type t s)) t*)))
c t*)) c t*))))
((fn ,name (,var ...)
,[(lambda (t) (ground-type t s)) -> t]
,[(lambda (e) (ground-expr e s)) -> body])
(let* ((region-params (free-regions-type t))
(body-regions (free-regions-expr body))
(local-regions (difference body-regions region-params)))
`(fn ,name (,var ...) ,t (let-region ,local-regions ,body))))))
(define (region-name r)
(if (rvar? r)
(rvar-name r)
r))
(define (ground-type t s)
(let ((t (walk-type t s)))
(if (tvar? t)
(let ((t^ (assq t s)))
(if t^
(case (cdr t^)
;; We have a free variable that's constrained as
;; Numeric, so ground it as an integer.
((Numeric) 'int))
(begin
(display "Warning: free type variable: ")
(display t)
(newline)
(display "Defaulting to type int.\n")
'int)))
(match t
(,prim (guard (symbol? prim)) prim)
((vec ,r ,t) `(vec ,(region-name r) ,(ground-type t s)))
((ptr ,t) `(ptr ,(ground-type t s)))
((adt ,t) `(adt ,(ground-type t s)))
((adt ,t ,r) `(adt ,(ground-type t s) ,(region-name r)))
((closure ,r (,[(lambda (t) (ground-type t s)) -> t*] ...) -> ,t)
`(closure ,(region-name r) ,t* -> ,(ground-type t s)))
((fn (,[(lambda (t) (ground-type t s)) -> t*] ...) -> ,t)
`(fn ,t* -> ,(ground-type t s)))
(,else (error 'ground-type "unsupported type" else))))))
(define (ground-expr e s)
(let ((ground-type (lambda (t) (ground-type t s))))
(match e
((int ,n) `(int ,n))
((float ,f) `(float ,f))
;; This next line is cheating, but it should get us through
;; the rest of the compiler.
((num ,n)
(if (< n #x100000000)
`(int ,n)
`(u64 ,n)))
((char ,c) `(char ,c))
((str ,s) `(str ,s))
((bool ,b) `(bool ,b))
((var ,[ground-type -> t] ,x) `(var ,t ,x))
((int->float ,[e]) `(int->float ,e))
((float->int ,[e]) `(float->int ,e))
((,op ,[ground-type -> t] ,[e1] ,[e2])
(guard (or (relop? op) (binop? op)))
`(,op ,t ,e1 ,e2))
((print ,[ground-type -> t] ,[e]) `(print ,t ,e))
((print ,[ground-type -> t] ,[e] ,[f]) `(print ,t ,e ,f))
((println ,[ground-type -> t] ,[e]) `(println ,t ,e))
((assert ,[e]) `(assert ,e))
((iota-r ,r ,[e]) `(iota-r ,(region-name (walk r s)) ,e))
((iota ,[e]) `(iota ,e))
((make-vector ,[ground-type -> t] ,[len] ,[val])
`(make-vector ,t ,len ,val))
((lambda ,[ground-type -> t0] ((,x ,[ground-type -> t]) ...) ,[b])
`(lambda ,t0 ((,x ,t) ...) ,b))
((let ((,x ,[ground-type -> t] ,[e]) ...) ,[b])
`(let ((,x ,t ,e) ...) ,b))
((for (,x ,[start] ,[end] ,[step]) ,[body])
`(for (,x ,start ,end ,step) ,body))
((while ,[t] ,[b]) `(while ,t ,b))
((vector ,[ground-type -> t] ,[e*] ...)
`(vector ,t ,e* ...))
((length ,[e]) `(length ,e))
((vector-ref ,[ground-type -> t] ,[v] ,[i])
`(vector-ref ,t ,v ,i))
((cast ,[ground-type -> t] ,[e])
`(cast ,t ,e))
((unsafe-vector-ref ,[ground-type -> t] ,[v] ,[i])
`(unsafe-vector-ref ,t ,v ,i))
((unsafe-vec-ptr ,[ground-type -> t] ,[v])
`(unsafe-vec-ptr ,t ,v))
((kernel-r ,[ground-type -> t] ,r
(((,x ,[ground-type -> ta*]) (,[e] ,[ground-type -> ta**])) ...)
,[b])
`(kernel-r ,t ,(region-name (walk r s))
(((,x ,ta*) (,e ,ta**)) ...) ,b))
((reduce ,[ground-type -> t] + ,[e]) `(reduce ,t + ,e))
((set! ,[x] ,[e]) `(set! ,x ,e))
((begin ,[e*] ...) `(begin ,e* ...))
((if ,[t] ,[c] ,[a]) `(if ,t ,c ,a))
((if ,[t] ,[c]) `(if ,t ,c))
((return) `(return))
((return ,[e]) `(return ,e))
((call ,[f] ,[e*] ...) `(call ,f ,e* ...))
((invoke ,[rator] ,[rand*] ...) `(invoke ,rator . ,rand*))
((do ,[e]) `(do ,e))
((let-region (,r* ...) ,[e]) `(let-region (,r* ...) ,e))
((match ,[ground-type -> t] ,[e]
((,tag . ,x) ,[e*]) ...)
`(match ,t ,e ((,tag . ,x) ,e*) ...))
((error! ,s) `(error! ,s))
(,else (error 'ground-expr "Unrecognized expression" else)))))
(define-match free-regions-expr
((var ,[free-regions-type -> t] ,x) t)
((int ,n) '())
((u64 ,n) '())
((float ,f) '())
((char ,c) '())
((bool ,b) '())
((str ,s) '())
((int->float ,[e]) e)
((float->int ,[e]) e)
((assert ,[e]) e)
((print ,[free-regions-type -> t] ,[e]) (union t e))
((print ,[free-regions-type -> t] ,[e] ,[f]) (union t e f))
((println ,[free-regions-type -> t] ,[e]) (union t e))
((,op ,[free-regions-type -> t] ,[rhs] ,[lhs])
(guard (or (binop? op) (relop? op)))
(union t lhs rhs))
((vector ,[free-regions-type -> t] ,[e*] ...)
(union t (apply union e*)))
((length ,[e]) e)
((cast ,[free-regions-type -> t] ,[e])
(union t e))
((vector-ref ,[free-regions-type -> t] ,[x] ,[i]) (union t x i))
((unsafe-vector-ref ,[free-regions-type -> t] ,[x] ,[i]) (union t x i))
((unsafe-vec-ptr ,[free-regions-type -> t] ,[v])
(union t v))
((iota-r ,r ,[e]) (set-add e r))
((make-vector ,[free-regions-type -> t] ,[len] ,[val])
(union t len val))
((kernel-r ,[free-regions-type -> t] ,r
(((,x ,[free-regions-type -> t*]) (,[xs] ,[free-regions-type -> ts*]))
...)
,[b])
(set-add (union b t (apply union (append t* ts* xs))) r))
((reduce ,[free-regions-type -> t] ,op ,[e]) (union t e))
((set! ,[x] ,[e]) (union x e))
((begin ,[e*] ...) (apply union e*))
((lambda ,[free-regions-type -> t0]
((,x ,[free-regions-type -> t]) ...) ,b)
;; The type inferencer is designed so that each lambda should
;; have no free regions other than the type-inferencer supplied
;; region.
(apply union t0 t))
((let ((,x ,[free-regions-type -> t] ,[e]) ...) ,[b])
(union b (apply union (append t e))))
((for (,x ,[start] ,[end] ,[step]) ,[body])
(union start end step body))
((while ,[t] ,[e]) (union t e))
((if ,[t] ,[c] ,[a]) (union t c a))
((if ,[t] ,[c]) (union t c))
((call ,[e*] ...) (apply union e*))
((invoke ,[e*] ...) (apply union e*))
((do ,[e]) e)
((let-region (,r* ...) ,[e])
(difference e r*))
((match ,[free-regions-type -> t] ,[e]
(,p ,[e*]) ...)
(apply union `(,t ,e . ,e*)))
((return) '())
((return ,[e]) e)
((error! ,s) '()))
(define-match free-regions-type
;; This isn't fantastic... what if this later unifies to a type
;; that contains a region? We might need some sort of lazy
;; suspension thingy.
(,x (guard (tvar? x)) '())
((vec ,r ,[t]) (set-add t r))
((adt ,[t] ,r) (set-add t r))
((adt ,[t]) t)
((closure ,r (,[t*] ...) -> ,[t])
(set-add (apply union t t*) r))
((fn (,[t*] ...) -> ,[t]) (union t (apply union t*)))
((ptr ,[t]) t)
;; Boxes hide all their regions until they are unboxed.
((box ,r ,t) (list r))
(() '())
(,else (guard (symbol? else)) '()))
)
| null | https://raw.githubusercontent.com/eholk/harlan/3afd95b1c3ad02a354481774585e866857a687b8/harlan/front/typecheck.scm | scheme | Walks type and region variables in a substitution
TODO: We will probably need to check for cycles.
for both type and region variables. If the unification is
successful, this function returns a new substitution. Otherwise,
Obviously equal types unify.
((Numeric float) (guard (tvar? a)) `((,a . float) . ,s))
Remove extra tags and things so that the expression looks more like
what the programmer typed.
you can use this with bind too!
Here env is just a list of formal parameters and internally bound
variables.
(display `(,e :: ,env)) (newline)
Returning a free type variable is better so we can return
from any context, but that gives us problems with free
type variables at the end.
Lambda is a little tricky because of regions in the free
of inferring lambda, but then we determine the regions for
the free variables in the body. We create a new region
variable and unify this with all of the regions of free
variables.
This might be a little tricky, depending on how much
information we have to start with. If the type of e is
known at this point, it's easy. However, if we don't know
if yet (for example, the value was passed in as a
parameter), we might have to infer the type based on the
constructors given.
(if (pair? else)
Add some primitives
A graph of which types are referenced by each adt. Used by
recursive-adt?
if we contain a
vector or closure,
then the type is
always
recursive. We
trick the type
checker into
thinking this by
putting in a
self-link if we
encounter one of
these types.
(_ (pretty-print adt-graph))
(pretty-print env)
find the function definition in the environment, bring the
parameters into scope.
We have a free variable that's constrained as
Numeric, so ground it as an integer.
This next line is cheating, but it should get us through
the rest of the compiler.
The type inferencer is designed so that each lambda should
have no free regions other than the type-inferencer supplied
region.
This isn't fantastic... what if this later unifies to a type
that contains a region? We might need some sort of lazy
suspension thingy.
Boxes hide all their regions until they are unboxed. | (library
(harlan front typecheck)
(export typecheck free-regions-type gen-rvar)
(import
(rnrs)
(elegant-weapons match)
(elegant-weapons helpers)
(elegant-weapons sets)
(harlan compile-opts)
(util compat)
(util color))
(define (typecheck m)
(let-values (((m s) (infer-module m)))
(ground-module `(module . ,m) s)))
(define-record-type tvar (fields name))
(define-record-type rvar (fields name))
(define (gen-tvar x) (make-tvar (gensym x)))
(define (gen-rvar x) (make-rvar (gensym x)))
(define type-tag (gensym 'type))
(define (walk x s)
(let ((x^ (assq x s)))
(if x^
(let ((x^ (cdr x^)))
(cond
((or (tvar? x^) (rvar? x^))
(walk x^ s))
((eq? x^ 'Numeric)
x)
(else x^)))
x)))
(define (walk-type t s)
(match t
(,t (guard (symbol? t)) t)
((vec ,r ,[t]) `(vec ,(walk r s) ,t))
((ptr ,[t]) `(ptr ,t))
((adt ,[t]) `(adt ,t))
((adt ,[t] ,r) `(adt ,t ,(walk r s)))
((closure ,r (,[t*] ...) -> ,[t])
`(closure ,(walk r s) ,t* -> ,t))
((fn (,[t*] ...) -> ,[t]) `(fn (,t* ...) -> ,t))
(,x (guard (or (tvar? x) (rvar? x)))
(let ((x^ (walk x s)))
(if (equal? x x^)
x
(walk-type x^ s))))
(,else (error 'walk-type "Unknown type" else))))
types a and b. s is an a - list containing substitutions
this functions returns # f.
(define (unify-types a b s)
(define (maybe-subst a b s)
(let ((t (or (tvar? a) (rvar? a))))
(if t
(and s `((,a . ,b) . ,s))
(error 'maybe-subst
"You don't want to put this in the substitution."
a b))))
(let ((s
(match `(,(walk-type a s) ,(walk-type b s))
((,a ,b) (guard (equal? a b)) s)
((int Numeric)
(if (tvar? b)
(maybe-subst b 'int s)
s))
((float Numeric)
(if (tvar? b)
(maybe-subst b 'float s)
s))
((u64 Numeric)
(if (tvar? b)
(maybe-subst b 'u64 s)
s))
((,a ,b) (guard (tvar? a)) (maybe-subst a b s))
((,a ,b) (guard (tvar? b)) (maybe-subst b a s))
((,a ,b) (guard (and (rvar? a) (rvar? b))) (maybe-subst a b s))
(((vec ,ra ,a) (vec ,rb ,b))
(let ((s (unify-types a b s)))
(if (eq? ra rb)
s
(maybe-subst ra rb s))))
(((ptr ,a) (ptr ,b))
(unify-types a b s))
(((adt ,ta ,ra) (adt ,tb ,rb))
(let ((s (unify-types ta tb s)))
(if (eq? ra rb)
s
(maybe-subst ra rb s))))
(((closure ,r1 ,a* -> ,a)
(closure ,r2 ,b* -> ,b))
(let loop ((a* a*)
(b* b*)
(s s))
(match `(,a* ,b*)
((() ())
(let ((s (unify-types a b s)))
(if (eq? r1 r2)
s
(maybe-subst r1 r2 s))))
(((,a ,a* ...) (,b ,b* ...))
(let ((s (unify-types a b s)))
(and s (loop a* b* s))))
(,else #f))))
(((fn (,a* ...) -> ,a) (fn (,b* ...) -> ,b))
(let loop ((a* a*)
(b* b*)
(s s))
(match `(,a* ,b*)
((() ()) (unify-types a b s))
(((,a ,a* ...) (,b ,b* ...))
(let ((s (unify-types a b s)))
(and s (loop a* b* s))))
(,else #f))))
(,else #f))))
(if s
(if (not (andmap (lambda (s)
(or (tvar? (car s)) (rvar? (car s))))
s))
(begin
(pretty-print s)
(error 'unify-types "invalid substitution created"
a b s
(walk-type a s)
(walk-type b s)))))
s))
(define (unparse e)
(match e
((num ,n) n)
((str ,s) s)
((,op ,[e1] ,[e2]) (guard (binop? op))
`(,op ,e1 ,e2))
(,else else)))
(define (type-error e expected found)
(display "In expression...\n")
(pretty-print (unparse e))
(display "Expected type...\n")
(pretty-print expected)
(display "But found...\n")
(pretty-print found)
(error 'typecheck
"Could not unify types"))
(define (return e t)
(lambda (_ r s)
(values e t s)))
(define (bind m seq)
(lambda (e^ r s)
(let-values (((e t s) (m e^ r s)))
((seq e t) e^ r s))))
(define (unify a b seq)
(lambda (e r s)
(let ((s^ (unify-types a b s)))
(if s^
((seq) e r s^)
(type-error e (walk-type a s) (walk-type b s))))))
(define (== a b)
(unify a b (lambda () (return #f a))))
(define (require-type e env t)
(let ((tv (make-tvar (gensym 'tv))))
(do* (((e t^) (infer-expr e env))
((_ __) (== tv t))
((_ __) (== tv t^)))
(return e tv))))
(define (unify-return-type t seq)
(lambda (e r s)
((unify r t seq) e r s)))
(define-syntax with-current-expr
(syntax-rules ()
((_ e b)
(lambda (e^ r s)
(b e r s)))))
(define (infer-expr* e* env)
(if (null? e*)
(return '() '())
(let ((e (car e*))
(e* (cdr e*)))
(bind
(infer-expr* e* env)
(lambda (e* t*)
(bind (infer-expr e env)
(lambda (e t)
(return `(,e . ,e*)
`(,t . ,t*)))))))))
(define (require-all e* env t)
(if (null? e*)
(return '() t)
(let ((e (car e*))
(e* (cdr e*)))
(do* (((e* t) (require-all e* env t))
((e t) (require-type e env t)))
(return `(,e . ,e*) t)))))
(define (free-var-types e env)
(match e
((num ,i) '())
((float ,f) '())
((bool ,b) '())
((var ,t ,x)
(if (memq x env)
'()
(list (cons x t))))
((lambda ,t ((,x* ,t*) ...) ,b)
(free-var-types b (append x* env)))
((let ((,x* ,t* ,[e*]) ...) ,b)
(apply append (free-var-types b (append x* env)) e*))
((if ,[t] ,[c] ,[a]) (append t c a))
((vector-ref ,t ,[x] ,[i])
(append x i))
((match ,t ,[e]
((,tag ,x ...) ,b) ...)
(apply append e
(map (lambda (x b) (free-var-types b (append x env))) x b)))
((call ,[e*] ...)
(apply append e*))
((invoke ,[e*] ...)
(apply append e*))
((,op ,t ,[a] ,[b])
(guard (or (binop? op) (relop? op)))
(append a b))
(,else (error 'free-var-types
"Unexpected expression" else))))
(define-syntax do*
(syntax-rules ()
((_ (((x ...) e) ((x* ...) e*) ...) b)
(bind e (lambda (x ...)
(do* (((x* ...) e*) ...) b))))
((_ () b) b)))
(define (unify-regions* r r*)
(if (null? r*)
(return '() '())
(do* (((a b) (unify-regions* r (cdr r*)))
((a b) (== r (car r*))))
(return a b))))
(define (infer-expr e env)
(with-current-expr
e
(match e
((int ,n)
(return `(int ,n) 'int))
((float ,f)
(return `(float ,f) 'float))
((num ,n)
(let ((t (make-tvar (gensym 'num))))
(do* (((_ t) (== t 'Numeric)))
(return `(num ,n) t))))
((char ,c) (return `(char ,c) 'char))
((bool ,b)
(return `(bool ,b) 'bool))
((str ,s)
(return `(str ,s) 'str))
((var ,x)
(let ((t (lookup x env)))
(return `(var ,t ,x) t)))
((int->float ,e)
(do* (((e _) (require-type e env 'int)))
(return `(int->float ,e) 'float)))
((float->int ,e)
(do* (((e _) (require-type e env 'float)))
(return `(float->int ,e) 'int)))
((return)
(unify-return-type
'void
(lambda () (return `(return) 'void))))
((return ,e)
(bind (infer-expr e env)
(lambda (e t)
(unify-return-type
t
(lambda ()
(return `(return ,e) t))))))
((print ,e)
(do* (((e t) (infer-expr e env)))
(return `(print ,t ,e) 'void)))
((print ,e ,f)
(do* (((e t) (infer-expr e env))
((f _) (require-type f env '(ptr ofstream))))
(return `(print ,t ,e ,f) 'void)))
((println ,e)
(do* (((e t) (infer-expr e env)))
(return `(println ,t ,e) 'void)))
((iota ,e)
(do* (((e t) (require-type e env 'int)))
(let ((r (make-rvar (gensym 'r))))
(return `(iota-r ,r ,e)
`(vec ,r int)))))
((iota-r ,r ,e)
(do* (((e t) (require-type e env 'int)))
(return `(iota-r ,r ,e)
`(vec ,r int))))
((vector ,e* ...)
(let ((t (make-tvar (gensym 'tvec)))
(r (make-rvar (gensym 'rv))))
(do* (((e* t) (require-all e* env t)))
(return `(vector (vec ,r ,t) ,e* ...) `(vec ,r ,t)))))
((vector-r ,r ,e* ...)
(let ((t (make-tvar (gensym 'tvec))))
(do* (((e* t) (require-all e* env t)))
(return `(vector (vec ,r ,t) ,e* ...) `(vec ,r ,t)))))
((make-vector ,len ,val)
(do* (((len _) (require-type len env 'int))
((val t) (infer-expr val env)))
(let ((t `(vec ,(make-rvar (gensym 'rmake-vector)) ,t)))
(return `(make-vector ,t ,len ,val) t))))
((length ,v)
(let ((t (make-tvar (gensym 'tveclength)))
(r (make-rvar (gensym 'rvl))))
(do* (((v _) (require-type v env `(vec ,r ,t))))
(return `(length ,v) 'int))))
((vector-ref ,v ,i)
(let ((t (make-tvar (gensym 'tvecref)))
(r (make-rvar (gensym 'rvref))))
(do* (((v _) (require-type v env `(vec ,r ,t)))
((i _) (require-type i env 'int)))
(return `(vector-ref ,t ,v ,i) t))))
((unsafe-vector-ref ,v ,i)
(let ((t (make-tvar (gensym 'tvecref)))
(r (make-rvar (gensym 'rvref))))
(do* (((v _) (require-type v env `(vec ,r ,t)))
((i _) (require-type i env 'int)))
(return `(unsafe-vector-ref ,t ,v ,i) t))))
((unsafe-vec-ptr ,v)
(let ((t (make-tvar (gensym 'tvecref)))
(r (make-rvar (gensym 'rvref))))
(do* (((v _) (require-type v env `(vec ,r ,t))))
(return `(unsafe-vec-ptr (ptr ,t) ,v) `(ptr ,t)))))
((unsafe-explicit-cast (,t1 -> ,t2) ,e)
(do* (((e _) (require-type e env t1)))
(return `(cast ,t2 ,e) t2)))
((,+ ,a ,b) (guard (binop? +))
(do* (((a t) (infer-expr a env))
((b t) (require-type b env t))
((_ __) (== t 'Numeric)))
(return `(,+ ,t ,a ,b) t)))
((= ,a ,b)
(do* (((a t) (infer-expr a env))
((b t) (require-type b env t)))
(return `(= ,t ,a ,b) 'bool)))
((,< ,a ,b)
(guard (relop? <))
(do* (((a t) (infer-expr a env))
((b t) (require-type b env t))
((_ __) (== t 'Numeric)))
(return `(,< bool ,a ,b) 'bool)))
((assert ,e)
(do* (((e t) (require-type e env 'bool)))
(return `(assert ,e) t)))
((set! ,x ,e)
(do* (((x t) (infer-expr x env))
((e t) (require-type e env t)))
(return `(set! ,x ,e) 'void)))
((begin ,s* ... ,e)
(do* (((s* _) (infer-expr* s* env))
((e t) (infer-expr e env)))
(return `(begin ,s* ... ,e) t)))
((if ,test ,c ,a)
(do* (((test tt) (require-type test env 'bool))
((c t) (infer-expr c env))
((a t) (require-type a env t)))
(return `(if ,test ,c ,a) t)))
((if ,test ,c)
(do* (((test tt) (require-type test env 'bool))
((c t) (require-type c env 'void)))
(return `(if ,test ,c) t)))
((lambda (,x* ...) ,body)
variables . First we infer the type based on the usual way
(let* ((arg-types (map (lambda (x) (make-tvar (gensym x))) x*))
(env (append (map cons x* arg-types) env))
(r (gen-rvar 'lambda)))
(do* (((body tbody)
(infer-expr body env)))
(let* ((fv (free-var-types body x*))
(regions (apply union
(map (lambda (x)
(free-regions-type (cdr x)))
fv))))
(do* (((_ __) (unify-regions* r regions)))
(return
`(lambda (closure ,r ,arg-types -> ,tbody)
((,x* ,arg-types) ...)
,body)
`(closure ,r ,arg-types -> ,tbody)))))))
((let ((,x ,e) ...) ,body)
(do* (((e t*) (infer-expr* e env))
((body t) (infer-expr body (append (map cons x t*) env))))
(return `(let ((,x ,t* ,e) ...) ,body) t)))
((let-region (,r* ...) ,b)
(do* (((b t) (infer-expr b env)))
(return `(let-region (,r* ...) ,b) t)))
((while ,t ,b)
(do* (((t _) (require-type t env 'bool))
((b _) (infer-expr b env)))
(return `(while ,t ,b) 'void)))
((reduce + ,e)
(let ((r (make-rvar (gensym 'r)))
(t (make-tvar (gensym 'reduce-t))))
(do* (((_ __) (== t 'Numeric))
((e t) (require-type e env `(vec ,r ,t))))
(return `(reduce ,t + ,e) 'int))))
((kernel ((,x ,e) ...) ,b)
(do* (((e t*) (let loop ((e e))
(if (null? e)
(return '() '())
(let ((e* (cdr e))
(e (car e))
(t (make-tvar (gensym 'kt)))
(r (make-rvar (gensym 'rkt))))
(do* (((e* t*) (loop e*))
((e _) (require-type e env `(vec ,r ,t))))
(return (cons e e*)
(cons (list r t) t*)))))))
((b t) (infer-expr b (append
(map (lambda (x t) (cons x (cadr t))) x t*)
env))))
(let ((r (make-rvar (gensym 'rk))))
(return `(kernel-r (vec ,r ,t) ,r
(((,x ,(map cadr t*))
(,e (vec . ,t*))) ...)
,b)
`(vec ,r ,t)))))
((kernel-r ,r ((,x ,e) ...) ,b)
(do* (((e t*) (let loop ((e e))
(if (null? e)
(return '() '())
(let ((e* (cdr e))
(e (car e))
(t (make-tvar (gensym 'kt)))
(r (make-rvar (gensym 'rkt))))
(do* (((e* t*) (loop e*))
((e _) (require-type e env `(vec ,r ,t))))
(return (cons e e*)
(cons (list r t) t*)))))))
((b t) (infer-expr b (append
(map (lambda (x t) (cons x (cadr t))) x t*)
env))))
(return `(kernel-r (vec ,r ,t) ,r
(((,x ,(map cadr t*))
(,e (vec . ,t*))) ...)
,b)
`(vec ,r ,t))))
((call ,f ,e* ...) (guard (ident? f))
(let ((t (make-tvar (gensym 'rt)))
(ft (lookup f env)))
(do* (((e* t*) (infer-expr* e* env))
((_ __) (require-type `(var ,f) env `(fn ,t* -> ,t))))
(return `(call (var (fn ,t* -> ,t) ,f) ,e* ...) t))))
((invoke ,rator ,rand* ...)
(let ((t (gen-tvar 'invoke))
(r (gen-rvar 'invoke)))
(do* (((rand* randt*) (infer-expr* rand* env))
((rator fty) (require-type rator env
`(closure ,r ,randt* -> ,t))))
(return `(invoke ,rator . ,rand*) t))))
((do ,e)
(do* (((e t) (infer-expr e env)))
(return `(do ,e) t)))
((match ,e
((,tag ,x* ...) ,e*) ...)
(match (lookup-type-tags tag env)
((,te . ,typedef)
(do* (((e _) (require-type e env te))
((e* t)
(let check-arms ((tag tag)
(x* x*)
(e* e*)
(typedef typedef))
(match `(,tag ,x* ,e*)
(((,tag . ,tag*) (,x* . ,x**) (,e* . ,e**))
(let-values (((constructor rest)
(partition (lambda (x)
(eq? (car x) tag))
typedef)))
(match constructor
(((,_ ,t* ...))
(do* (((e**^ t) (check-arms tag* x** e** rest))
((e^ _) (require-type e* (append
(map cons x* t*)
env)
t)))
(return (cons e^ e**^) t))))))
((() () ()) (return '() (make-tvar (gensym 'tmatch))))))))
(return `(match ,t ,e ((,tag ,x* ...) ,e*) ...) t)))))
((error! ,s) (guard (string? s))
(return `(error! ,s) (gen-tvar 'error!)))
)))
(define infer-body infer-expr)
(define (add-region-vars-to-type end adt-graph)
(lambda (t*)
(match t*
((vec ,[t])
`(vec ,@end ,t))
((vec ,r ,[t])
(begin
(display "Warning, in type \n")
(display t*)
(display " there was already a region parameter. Replacing with \n")
(display end)
(newline)
`(vec ,@end ,t)))
((closure (,[t*] ...) -> ,[t])
`(closure ,@end ,t* -> ,t))
((adt ,t^) (guard (recursive-adt? t^ adt-graph))
`(adt ,t^ . ,end))
(display else))
else)))))
(define (make-top-level-env decls adt-graph)
(append
(apply append
(map (lambda (d)
(match d
((fn ,name (,[make-tvar -> var*] ...) ,body)
`((,name fn (,var* ...) -> ,(make-tvar name))))
((define-datatype ,t
(,c ,t* ...) ...)
(let* ((end (if (recursive-adt? t adt-graph)
(list (make-rvar (gensym t)))
'()))
(t* (map (lambda (t*)
(map (add-region-vars-to-type end
adt-graph)
t*))
t*)))
`((,type-tag (adt ,t . ,end) (,c ,t* ...) ...)
(,c fn (,t* ...)
-> ,(map (lambda (_) `(adt ,t . ,end)) c)) ...)))
((extern ,name . ,t)
(list (cons name (cons 'fn t))))))
decls))
'((harlan_sqrt fn (float) -> float)
(floor fn (float) -> float)
(atan2 fn (float float) -> float))))
(define (recursive-adt? name graph)
(let loop ((path (list name)))
(let ((node (assq (car path) graph)))
(if node
(ormap (lambda (n)
(or (memq n path)
(loop (cons n path))))
(cdr node))
#f))))
(define (make-adt-graph decl*)
(apply append
(map (lambda (d)
(match d
((define-datatype ,t (,c ,t* ...) ...)
`((,t . ,(apply union
(map
(lambda (t^)
(map (lambda (t^)
(match t^
((vec . ,_) t)
((closure . ,_) t)
((adt ,t) t)
(,else else)))
t^))
t*)))))
(,else '())))
decl*)))
(define (infer-module m)
(match m
((module . ,decls)
(let* ((adt-graph (make-adt-graph decls))
(env (make-top-level-env decls adt-graph)))
(infer-decls decls env adt-graph)))))
(define (infer-decls decls env adt-graph)
(match decls
(() (values '() '()))
((,d . ,d*)
(let-values (((d* s) (infer-decls d* env adt-graph)))
(let-values (((d s) (infer-decl d env s adt-graph)))
(values (cons d d*) s))))))
(define (infer-decl d env s adt-graph)
(match d
((extern . ,whatever)
(values `(extern . ,whatever) s))
((define-datatype ,t (,c ,t* ...) ...)
(values
(if (recursive-adt? t adt-graph)
(let* ((r (make-rvar (gensym t)))
(t* (map (lambda (t*)
(map (lambda (t*)
(match t*
((closure (,[t*] ...) -> ,[t])
`(closure ,r ,t* -> ,t))
((adt ,t^)
(guard (recursive-adt? t^ adt-graph))
`(adt ,t^ ,r))
(,else else))) t*))
t*)))
`(define-datatype (,t ,r) (,c ,t* ...)))
`(define-datatype ,t (,c ,t* ...)))
s))
((fn ,name (,var* ...) ,body)
(match (lookup name env)
((fn (,t* ...) -> ,t)
(let-values (((b t s)
((infer-body body (append (map cons var* t*) env))
body t s)))
(values
`(fn ,name (,var* ...) (fn (,t* ...) -> ,t) ,b)
s)))))))
(define (lookup x e)
(let ((t (assq x e)))
(if t
(cdr t)
(error 'lookup "Variable not found" x e))))
(define (lookup-type-tags tags e)
(match e
(()
(error 'lookup-type-tags "Could not find type from constructors" tags))
(((,tag (adt ,name . ,end) (,tag* . ,t) ...) . ,rest)
(guard (and (eq? tag type-tag)
(set-equal? tags tag*)))
`((adt ,name . ,end) (,tag* . ,t) ...))
((,e . ,e*) (lookup-type-tags tags e*))))
(define (ground-module m s)
(if (verbosity? trace-pass-verbosity-level)
(begin (pretty-print m) (newline)
(pretty-print s) (newline)))
(match m
((module ,[(lambda (d) (ground-decl d s)) -> decl*] ...)
`(module ,decl* ...))))
(define (ground-decl d s)
(match d
((extern . ,whatever) `(extern . ,whatever))
((define-datatype (,t ,r) (,c ,t* ...) ...)
`(define-datatype (,t ,(rvar-name r))
. ,(car (map (lambda (c t*)
(map (lambda (c t*)
`(,c . ,(map (lambda (t) (ground-type t s)) t*)))
c t*)) c t*))))
((define-datatype ,t (,c ,t* ...) ...)
`(define-datatype ,t
. ,(car (map (lambda (c t*)
(map (lambda (c t*)
`(,c . ,(map (lambda (t) (ground-type t s)) t*)))
c t*)) c t*))))
((fn ,name (,var ...)
,[(lambda (t) (ground-type t s)) -> t]
,[(lambda (e) (ground-expr e s)) -> body])
(let* ((region-params (free-regions-type t))
(body-regions (free-regions-expr body))
(local-regions (difference body-regions region-params)))
`(fn ,name (,var ...) ,t (let-region ,local-regions ,body))))))
(define (region-name r)
(if (rvar? r)
(rvar-name r)
r))
(define (ground-type t s)
(let ((t (walk-type t s)))
(if (tvar? t)
(let ((t^ (assq t s)))
(if t^
(case (cdr t^)
((Numeric) 'int))
(begin
(display "Warning: free type variable: ")
(display t)
(newline)
(display "Defaulting to type int.\n")
'int)))
(match t
(,prim (guard (symbol? prim)) prim)
((vec ,r ,t) `(vec ,(region-name r) ,(ground-type t s)))
((ptr ,t) `(ptr ,(ground-type t s)))
((adt ,t) `(adt ,(ground-type t s)))
((adt ,t ,r) `(adt ,(ground-type t s) ,(region-name r)))
((closure ,r (,[(lambda (t) (ground-type t s)) -> t*] ...) -> ,t)
`(closure ,(region-name r) ,t* -> ,(ground-type t s)))
((fn (,[(lambda (t) (ground-type t s)) -> t*] ...) -> ,t)
`(fn ,t* -> ,(ground-type t s)))
(,else (error 'ground-type "unsupported type" else))))))
(define (ground-expr e s)
(let ((ground-type (lambda (t) (ground-type t s))))
(match e
((int ,n) `(int ,n))
((float ,f) `(float ,f))
((num ,n)
(if (< n #x100000000)
`(int ,n)
`(u64 ,n)))
((char ,c) `(char ,c))
((str ,s) `(str ,s))
((bool ,b) `(bool ,b))
((var ,[ground-type -> t] ,x) `(var ,t ,x))
((int->float ,[e]) `(int->float ,e))
((float->int ,[e]) `(float->int ,e))
((,op ,[ground-type -> t] ,[e1] ,[e2])
(guard (or (relop? op) (binop? op)))
`(,op ,t ,e1 ,e2))
((print ,[ground-type -> t] ,[e]) `(print ,t ,e))
((print ,[ground-type -> t] ,[e] ,[f]) `(print ,t ,e ,f))
((println ,[ground-type -> t] ,[e]) `(println ,t ,e))
((assert ,[e]) `(assert ,e))
((iota-r ,r ,[e]) `(iota-r ,(region-name (walk r s)) ,e))
((iota ,[e]) `(iota ,e))
((make-vector ,[ground-type -> t] ,[len] ,[val])
`(make-vector ,t ,len ,val))
((lambda ,[ground-type -> t0] ((,x ,[ground-type -> t]) ...) ,[b])
`(lambda ,t0 ((,x ,t) ...) ,b))
((let ((,x ,[ground-type -> t] ,[e]) ...) ,[b])
`(let ((,x ,t ,e) ...) ,b))
((for (,x ,[start] ,[end] ,[step]) ,[body])
`(for (,x ,start ,end ,step) ,body))
((while ,[t] ,[b]) `(while ,t ,b))
((vector ,[ground-type -> t] ,[e*] ...)
`(vector ,t ,e* ...))
((length ,[e]) `(length ,e))
((vector-ref ,[ground-type -> t] ,[v] ,[i])
`(vector-ref ,t ,v ,i))
((cast ,[ground-type -> t] ,[e])
`(cast ,t ,e))
((unsafe-vector-ref ,[ground-type -> t] ,[v] ,[i])
`(unsafe-vector-ref ,t ,v ,i))
((unsafe-vec-ptr ,[ground-type -> t] ,[v])
`(unsafe-vec-ptr ,t ,v))
((kernel-r ,[ground-type -> t] ,r
(((,x ,[ground-type -> ta*]) (,[e] ,[ground-type -> ta**])) ...)
,[b])
`(kernel-r ,t ,(region-name (walk r s))
(((,x ,ta*) (,e ,ta**)) ...) ,b))
((reduce ,[ground-type -> t] + ,[e]) `(reduce ,t + ,e))
((set! ,[x] ,[e]) `(set! ,x ,e))
((begin ,[e*] ...) `(begin ,e* ...))
((if ,[t] ,[c] ,[a]) `(if ,t ,c ,a))
((if ,[t] ,[c]) `(if ,t ,c))
((return) `(return))
((return ,[e]) `(return ,e))
((call ,[f] ,[e*] ...) `(call ,f ,e* ...))
((invoke ,[rator] ,[rand*] ...) `(invoke ,rator . ,rand*))
((do ,[e]) `(do ,e))
((let-region (,r* ...) ,[e]) `(let-region (,r* ...) ,e))
((match ,[ground-type -> t] ,[e]
((,tag . ,x) ,[e*]) ...)
`(match ,t ,e ((,tag . ,x) ,e*) ...))
((error! ,s) `(error! ,s))
(,else (error 'ground-expr "Unrecognized expression" else)))))
(define-match free-regions-expr
((var ,[free-regions-type -> t] ,x) t)
((int ,n) '())
((u64 ,n) '())
((float ,f) '())
((char ,c) '())
((bool ,b) '())
((str ,s) '())
((int->float ,[e]) e)
((float->int ,[e]) e)
((assert ,[e]) e)
((print ,[free-regions-type -> t] ,[e]) (union t e))
((print ,[free-regions-type -> t] ,[e] ,[f]) (union t e f))
((println ,[free-regions-type -> t] ,[e]) (union t e))
((,op ,[free-regions-type -> t] ,[rhs] ,[lhs])
(guard (or (binop? op) (relop? op)))
(union t lhs rhs))
((vector ,[free-regions-type -> t] ,[e*] ...)
(union t (apply union e*)))
((length ,[e]) e)
((cast ,[free-regions-type -> t] ,[e])
(union t e))
((vector-ref ,[free-regions-type -> t] ,[x] ,[i]) (union t x i))
((unsafe-vector-ref ,[free-regions-type -> t] ,[x] ,[i]) (union t x i))
((unsafe-vec-ptr ,[free-regions-type -> t] ,[v])
(union t v))
((iota-r ,r ,[e]) (set-add e r))
((make-vector ,[free-regions-type -> t] ,[len] ,[val])
(union t len val))
((kernel-r ,[free-regions-type -> t] ,r
(((,x ,[free-regions-type -> t*]) (,[xs] ,[free-regions-type -> ts*]))
...)
,[b])
(set-add (union b t (apply union (append t* ts* xs))) r))
((reduce ,[free-regions-type -> t] ,op ,[e]) (union t e))
((set! ,[x] ,[e]) (union x e))
((begin ,[e*] ...) (apply union e*))
((lambda ,[free-regions-type -> t0]
((,x ,[free-regions-type -> t]) ...) ,b)
(apply union t0 t))
((let ((,x ,[free-regions-type -> t] ,[e]) ...) ,[b])
(union b (apply union (append t e))))
((for (,x ,[start] ,[end] ,[step]) ,[body])
(union start end step body))
((while ,[t] ,[e]) (union t e))
((if ,[t] ,[c] ,[a]) (union t c a))
((if ,[t] ,[c]) (union t c))
((call ,[e*] ...) (apply union e*))
((invoke ,[e*] ...) (apply union e*))
((do ,[e]) e)
((let-region (,r* ...) ,[e])
(difference e r*))
((match ,[free-regions-type -> t] ,[e]
(,p ,[e*]) ...)
(apply union `(,t ,e . ,e*)))
((return) '())
((return ,[e]) e)
((error! ,s) '()))
(define-match free-regions-type
(,x (guard (tvar? x)) '())
((vec ,r ,[t]) (set-add t r))
((adt ,[t] ,r) (set-add t r))
((adt ,[t]) t)
((closure ,r (,[t*] ...) -> ,[t])
(set-add (apply union t t*) r))
((fn (,[t*] ...) -> ,[t]) (union t (apply union t*)))
((ptr ,[t]) t)
((box ,r ,t) (list r))
(() '())
(,else (guard (symbol? else)) '()))
)
|
96f2c31b256a683cda510b4bcd997303dcaf6e1faaf7a30fd136f3f4383dc8c6 | jeromesimeon/Galax | error.mli | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : error.mli , v 1.40 2007/08/01 18:06:30 simeon Exp $
Module : Error
Description :
This module deals with error handling in Galax .
Description:
This module deals with error handling in Galax.
*)
type error =
Lexing
| Lexing of Finfo.finfo * string
(* Parsing *)
| Parsing of Finfo.finfo * string
| Algebra_Parsing_Error of string
| Namespace_Internal of string
| Namespace_Error of string
(* Normalization *)
| Static_Error of string
| Static_Internal of string
| Module_Import of string
| Annotation_Error of string
(* Types *)
| Malformed_Type of string
| Malformed_Tuple of string
| Malformed_Expr of string
| Malformed_Core_Expr of string
| Malformed_Algebra_Expr of string
Static Typing
| Static_Type_Error of string
| Automata of string
| Undefined_Variable of Finfo.finfo * string * string
(* Rewriting *)
| Rewriting of string
| Streaming_XPath of string
(* Factorization *)
| Factorization of string
(* Compilation *)
| Compilation of string
| Symbol_Already_Defined of (string * string)
(* Optimization *)
| Optimization of string
(* Code Selection *)
| Code_Selection of string
| Expr_Error of string
| Key_Error of (string * string)
| KeyRef_Error of (string * string)
| Physical_Type_Error of string
(* Evaluation *)
| Constructor_Error of string
| Type_Error of string
| Unicode_Error of string
| Validation of string
(* Schema Normalization *)
| Schema of string
| Schema_Internal of string
| Schema_Import of string
(* Serialization *)
| Serialization of string
Data Model / Loading
| Datamodel of string
| URI_Error of string
| Load_Error of string
| Cast_Error of string
| Protocol_Error of string
| Stream_Error of string
| Cursor_Error of string
| Physical_DM_Error of string
| Malformed_DateTimeValue of string
| Jungle_Error of string
| Shredded_Error of string
(* Projection *)
| Projection of string
(* WSDL *)
| Root
Toplevel tools
| Toplevel_Error of string
| Monitor_Error of string
Multiple Modules
| Parameter_Mismatch of string (* Norm, Eval *)
| Unknown of string
| Internal_Error of string
| Wrong_Args of string (* PhysicalDM, Code Selection *)
| Prototype of string
Namespace , Parsing , Code Selection
| Mapping_Failure of string (* Normalization, Namespaces *)
DM , Code Selection
| DXQ_Error of string (* Distributed XQuery *)
(* Testing *)
| Testing_Error of string
(* Top-level error resulting from downgrade_error *)
| Error of string
(* XQueryX errors *)
| XQueryX_Error of Finfo.finfo * string
Generic error with file location -- used to wrap internal error with a file location
| Wrapped_Error of Finfo.finfo * string
exception Query of error
val printf_warning : string -> unit
val eprintf_warning : string -> unit
val bprintf_warning : string -> string
val printf_error : string -> exn -> unit
val eprintf_error : string -> exn -> unit
val bprintf_error : string -> exn -> string
val printf_error_safe : string -> exn -> unit
val eprintf_error_safe : string -> exn -> unit
val bprintf_error_safe : string -> exn -> string
val error_with_file_location : Finfo.finfo -> exn -> exn
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/base/error.mli | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
Parsing
Normalization
Types
Rewriting
Factorization
Compilation
Optimization
Code Selection
Evaluation
Schema Normalization
Serialization
Projection
WSDL
Norm, Eval
PhysicalDM, Code Selection
Normalization, Namespaces
Distributed XQuery
Testing
Top-level error resulting from downgrade_error
XQueryX errors | Copyright 2001 - 2007 .
$ I d : error.mli , v 1.40 2007/08/01 18:06:30 simeon Exp $
Module : Error
Description :
This module deals with error handling in Galax .
Description:
This module deals with error handling in Galax.
*)
type error =
Lexing
| Lexing of Finfo.finfo * string
| Parsing of Finfo.finfo * string
| Algebra_Parsing_Error of string
| Namespace_Internal of string
| Namespace_Error of string
| Static_Error of string
| Static_Internal of string
| Module_Import of string
| Annotation_Error of string
| Malformed_Type of string
| Malformed_Tuple of string
| Malformed_Expr of string
| Malformed_Core_Expr of string
| Malformed_Algebra_Expr of string
Static Typing
| Static_Type_Error of string
| Automata of string
| Undefined_Variable of Finfo.finfo * string * string
| Rewriting of string
| Streaming_XPath of string
| Factorization of string
| Compilation of string
| Symbol_Already_Defined of (string * string)
| Optimization of string
| Code_Selection of string
| Expr_Error of string
| Key_Error of (string * string)
| KeyRef_Error of (string * string)
| Physical_Type_Error of string
| Constructor_Error of string
| Type_Error of string
| Unicode_Error of string
| Validation of string
| Schema of string
| Schema_Internal of string
| Schema_Import of string
| Serialization of string
Data Model / Loading
| Datamodel of string
| URI_Error of string
| Load_Error of string
| Cast_Error of string
| Protocol_Error of string
| Stream_Error of string
| Cursor_Error of string
| Physical_DM_Error of string
| Malformed_DateTimeValue of string
| Jungle_Error of string
| Shredded_Error of string
| Projection of string
| Root
Toplevel tools
| Toplevel_Error of string
| Monitor_Error of string
Multiple Modules
| Unknown of string
| Internal_Error of string
| Prototype of string
Namespace , Parsing , Code Selection
DM , Code Selection
| Testing_Error of string
| Error of string
| XQueryX_Error of Finfo.finfo * string
Generic error with file location -- used to wrap internal error with a file location
| Wrapped_Error of Finfo.finfo * string
exception Query of error
val printf_warning : string -> unit
val eprintf_warning : string -> unit
val bprintf_warning : string -> string
val printf_error : string -> exn -> unit
val eprintf_error : string -> exn -> unit
val bprintf_error : string -> exn -> string
val printf_error_safe : string -> exn -> unit
val eprintf_error_safe : string -> exn -> unit
val bprintf_error_safe : string -> exn -> string
val error_with_file_location : Finfo.finfo -> exn -> exn
|
e6eeb2f4bbb50918d83e596d1d10f7e7b7ee11bc12cc9476e70212d21853ac60 | sharplispers/montezuma | index.lisp | (in-package #:montezuma)
(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter *valid-index-options*
'(:path
:create-if-missing-p
:create-p
:default-field
:id-field
:default-search-field
:analyzer
:directory
:close-directory-p
:occur-default
:wild-lower-p
:fields
:default-slop
:key
:use-compound-file-p
:handle-parse-errors-p
:auto-flush-p
:merge-factor
:min-merge-docs
:max-merge-docs
:info-stream)))
(defun index-options-list-p (list)
(do ((options list (cddr options)))
((endp options) T)
(when (not (member (car options) *valid-index-options*))
(return-from index-options-list-p NIL))))
(deftype index-option () `(member ,@*valid-index-options*))
(deftype index-options-list () '(satisfies index-options-list-p))
(defun get-index-option (options option &optional default)
(check-type option index-option)
(getf options option default))
(define-setf-expander get-index-option (place option &environment env)
(multiple-value-bind (vars vals store-vars writer-form reader-form)
(get-setf-expansion place env)
(declare (ignore writer-form))
(let ((goption (gensym "OPTION"))
(gstore (if store-vars (car store-vars) (gensym "STORE"))))
(values
(list* goption vars)
(list* option vals)
(list gstore)
`(progn
(check-type ,goption index-option)
(setf (getf ,reader-form ,goption) ,(car store-vars)))
`(getf ,goption ,reader-form)))))
(defclass index ()
((key)
(dir)
(has-writes-p :initform NIL)
(reader :initform nil)
(writer)
(close-dir-p)
(auto-flush-p)
(default-search-field)
(default-field)
(analyzer :reader analyzer)
(searcher :initform nil)
(open-p :initform T)
(options)
(qp :initform nil)))
(defmethod initialize-instance :after ((self index) &rest args &key &allow-other-keys)
(with-slots (options) self
(check-type args index-options-list)
(setf options (copy-list args))
(setf (get-index-option options :default-field)
(if (get-index-option options :default-field)
(string (get-index-option options :default-field))
""))
(setf (get-index-option options :default-search-field)
(or (get-index-option options :default-search-field)
(get-index-option options :default-field)
"*"))
(setf (get-index-option options :create-if-missing-p)
(get-index-option options :create-if-missing-p T))
FIXME : I do n't flatten the : key option , I 'm not sure why Ferret does .
(with-slots (key dir options close-dir-p auto-flush-p create-p analyzer writer
default-search-field default-field) self
(setf key (get-index-option options :key))
(cond ((get-index-option options :path)
(handler-case
(setf dir (make-fs-directory (get-index-option options :path)
:create-p (get-index-option options :create-p)))
(error () (setf dir (make-fs-directory (get-index-option options :path)
:create-p (get-index-option options :create-if-missing-p)))))
(setf (get-index-option options :close-directory-p) T))
((get-index-option options :directory)
(setf dir (get-index-option options :directory)))
(T
(setf (get-index-option options :create-p) T)
(setf dir (make-instance 'ram-directory))))
;; Create the index if need be
(setf writer (apply #'make-instance 'index-writer
:directory dir
options))
(setf (get-index-option options :analyzer) (setf analyzer (analyzer writer)))
(close writer)
(setf writer nil)
Only want to create the first time , if at all .
(setf (get-index-option options :create-p) NIL)
(setf close-dir-p (get-index-option options :close-directory-p))
(setf (get-index-option options :close-directory-p) NIL)
(setf auto-flush-p (get-index-option options :auto-flush-p))
(setf default-search-field (or (get-index-option options :default-search-field)
(get-index-option options :default-field)
"*"))
(setf default-field (or (get-index-option options :default-field) ""))
(when (not (get-index-option options :handle-parse-errors-p))
(setf (get-index-option options :handle-parse-errors-p) T)))))
(defmethod close ((self index))
(with-slots (open-p reader writer dir) self
(when (not open-p)
(error "Tried to close an already closed directory."))
(when reader (close reader))
(when writer (close writer))
(close dir)
(setf open-p NIL)))
(defgeneric reader (index))
(defmethod reader ((self index))
(ensure-reader-open self)
(slot-value self 'reader))
(defgeneric searcher (index))
(defmethod searcher ((self index))
(ensure-searcher-open self)
(slot-value self 'searcher))
(defgeneric writer (index))
(defmethod writer ((self index))
(ensure-writer-open self)
(slot-value self 'writer))
(defmethod document-count ((self index))
(with-slots (writer) self
(document-count writer)))
(defgeneric add-document-to-index (index doc &optional analyzer))
(defmethod add-document-to-index ((self index) doc &optional analyzer)
(let ((fdoc nil)
(default-field (slot-value self 'default-field)))
(when (listp doc)
;; Turn association lists into something we can treat like any
;; other table (including hash tables).
(setf doc (convert-alist-to-table doc)))
(cond ((stringp doc)
(setf fdoc (make-instance 'document))
(add-field fdoc (make-field default-field doc
:stored T :index :tokenized)))
((typep doc 'array)
(setf fdoc (make-instance 'document))
(dosequence (field doc)
(add-field fdoc (make-field default-field field
:stored T :index :tokenized))))
((table-like-p doc)
(setf fdoc (make-instance 'document))
(dolist (field (table-keys doc))
(let ((text (table-value doc field)))
(add-field fdoc (make-field (string field) (stringify text)
:stored T :index :tokenized)))))
((typep doc 'document)
(setf fdoc doc))
(T
(error "Unknown document type ~S" doc)))
;; Delete existing documents with the same key.
(let ((key (slot-value self 'key)))
(when key
(let ((query (inject key (make-instance 'boolean-query)
#'(lambda (query field)
(add-query query
(make-instance 'term-query
:term (make-term field (get-field fdoc field)))
:must-occur)))))
(query-delete self query))))
(let ((writer (writer self)))
(setf (slot-value self 'has-writes-p) T)
(add-document-to-index-writer (slot-value self 'writer) fdoc
(if analyzer analyzer (analyzer writer)))
(when (slot-value self 'auto-flush-p)
(flush self)))))
;; The main search method for the index. You need to create a query to
pass to this method . You can also pass a hash with one or more of
the following ; { filter , num_docs , first_doc , sort }
;;
;; query:: The query to run on the index
;; filter:: Filters docs from the search result
first_doc : : The index in the results of the first doc retrieved .
;; Default is 0
num_docs : : The number of results returned . Default is 10
sort : : An array of SortFields describing how to sort the results .
(defgeneric search (index query &rest options))
(defmethod search ((self index) query &rest options)
(do-search self query options))
(defgeneric search-each (index query fn &optional options))
(defmethod search-each ((self index) query fn &optional options)
(let ((hits (do-search self query options)))
(dosequence (score-doc (score-docs hits))
(funcall fn (doc score-doc) (score score-doc)))
(total-hits hits)))
(defmethod get-document ((self index) id)
(let ((reader (reader self)))
(cond ((stringp id)
(get-document-with-term reader (make-term "id" id)))
((typep id 'term)
(get-document-with-term reader id))
(T
(get-document reader id)))))
(defmethod delete-document ((self index) id)
(let ((reader (reader self)))
(let ((count (cond ((stringp id)
(delete-docs-with-term reader (make-term "id" id)))
((typep id 'term)
(delete-docs-with-term reader id))
((integerp id)
(delete-document reader id))
(T
(error "Can't delete for id ~S" id)))))
(when (slot-value self 'auto-flush-p)
(flush self))
count)))
(defgeneric query-delete (index query))
(defmethod query-delete ((self index) query)
(let ((reader (reader self))
(searcher (searcher self))
(query (process-query self query)))
(search-each searcher query
#'(lambda (doc score)
(declare (ignore score))
(delete-document reader doc)))
(when (slot-value self 'auto-flush-p)
(flush self))))
(defmethod deleted-p ((self index) n)
(deleted-p (reader self) n))
(defgeneric update (index id new-val))
(defmethod update ((self index) id new-val)
(with-slots (options) self
(cond ((stringp id)
;; FIXME: how about using a pre-parsed form of query?
(query-update self (format nil "id:~A" id) new-val))
((typep id 'term)
(query-update self
(make-instance 'term-query
:term id)
new-val))
((integerp id)
(let ((reader (reader self))
(document (get-document self id)))
(when (listp new-val)
(setf new-val (convert-alist-to-table new-val)))
(cond ((table-like-p new-val)
(dolist (name (table-keys new-val))
(let ((content (table-value new-val name)))
(setf (document-values document name) (string content)))))
((typep new-val 'document)
(setf document new-val))
(T
(setf (document-values document (get-index-option options :default-field))
(string new-val))))
(delete-document reader id)
(let ((writer (writer self)))
(add-document-to-index-writer writer document))))
(T
(error "Cannot update for id ~S" id)))
(when (slot-value self 'auto-flush-p)
(flush self))))
(defgeneric query-update (index query new-val))
(defmethod query-update ((self index) query new-val)
(let ((searcher (searcher self))
(reader (reader self))
(docs-to-add '())
(query (process-query self query)))
(search-each searcher query
#'(lambda (id score)
(declare (ignore score))
(let ((document (get-document self id)))
(when (listp new-val)
(setf new-val (convert-alist-to-table new-val)))
(cond ((table-like-p new-val)
(dolist (name (table-keys new-val))
(let ((content (table-value new-val name)))
(setf (document-values document name) (string content)))))
((typep new-val 'document)
(setf document new-val))
(T
(setf (document-values document (get-index-option (slot-value self 'options) :default-field))
(string new-val))))
(push document docs-to-add)
(delete-document reader id))))
(let ((writer (writer self)))
(dolist (doc (reverse docs-to-add))
(add-document-to-index-writer writer doc))
(when (slot-value self 'auto-flush-p)
(flush self)))))
(defmethod has-deletions-p ((self index))
(has-deletions-p (reader self)))
(defgeneric has-writes (index))
(defmethod has-writes ((self index))
(slot-value self 'has-writes))
(defmethod flush ((self index))
(with-slots (reader writer searcher) self
(when reader (close reader))
(when writer (close writer))
(setf reader nil
writer nil
searcher nil)))
(defmethod optimize ((self index))
(optimize (writer self))
(flush self))
(defmethod size ((self index))
(num-docs (reader self)))
(defmethod add-indexes ((self index) &rest indexes)
(let ((indexes indexes))
(when (> (length indexes) 0)
(when (typep (elt indexes 0) 'index)
(setf indexes (map 'vector #'reader indexes)))
(cond ((typep (elt indexes 0) 'index-reader)
(let ((reader (reader self)))
(setf indexes (remove reader indexes)))
(add-indexes-readers (writer self) indexes))
((typep (elt indexes 0) 'directory)
(setf indexes (remove (slot-value self 'dir) indexes))
(apply #'add-indexes (writer self) indexes))
(T
(error "Unknown index type ~S when trying to merge indexes." (elt indexes 0)))))))
(defgeneric persist (index directory &key create-p))
(defmethod persist ((self index) directory &key (create-p T))
(flush self)
(with-slots (dir options) self
(let ((old-dir dir))
(etypecase directory
((or string pathname)
(setf dir (make-fs-directory directory :create-p create-p))
(setf (get-index-option options :close-directory-p) T))
(directory
(setf dir directory)))
(ensure-writer-open self)
(add-indexes (writer self) old-dir))))
(defgeneric ensure-writer-open (index))
(defmethod ensure-writer-open ((self index))
(with-slots (open-p writer reader dir options searcher) self
(unless open-p
(error "Tried to use a closed index."))
(unless writer
(when reader
(close reader)
(setf reader nil
searcher nil))
(setf writer (apply #'make-instance 'index-writer
:directory dir
options)))))
(defgeneric ensure-reader-open (index))
(defmethod ensure-reader-open ((self index))
(with-slots (open-p writer reader dir) self
(unless open-p
(error "Tried to use a closed index."))
(if reader
(if (not (latest-p reader))
(setf reader (open-index-reader dir :close-directory-p NIL))
NIL)
(progn
(when writer
(close writer)
(setf writer nil))
(setf reader (open-index-reader dir :close-directory-p NIL))))))
(defgeneric ensure-searcher-open (index))
(defmethod ensure-searcher-open ((self index))
(with-slots (open-p searcher reader) self
(unless open-p
(error "Tried to use a closed index."))
(when (or (ensure-reader-open self) (not searcher))
(setf searcher (make-instance 'index-searcher
:reader reader)))))
(defgeneric do-search (index query options))
(defmethod do-search ((self index) query options)
(let ((searcher (searcher self))
(query (process-query self query)))
(apply #'search searcher query options)))
(defgeneric process-query (index query))
(defmethod process-query ((self index) query)
(if (stringp query)
(with-slots (qp default-search-field options reader) self
(unless qp
(setf qp (apply #'make-instance 'query-parser
:allow-other-keys T
:default-field default-search-field
options)))
;; We need to set this every time, in case a new field has
;; been added.
(unless (get-index-option options :fields)
(setf (fields qp) (coerce (get-field-names reader) 'vector)))
(parse qp query))
query))
(defun stringify (x)
(etypecase x
(string x)
((or character symbol) (string x))
(integer (format nil "~A" x))))
| null | https://raw.githubusercontent.com/sharplispers/montezuma/ee2129eece7065760de4ebbaeffaadcb27644738/src/index/index.lisp | lisp | Create the index if need be
Turn association lists into something we can treat like any
other table (including hash tables).
Delete existing documents with the same key.
The main search method for the index. You need to create a query to
{ filter , num_docs , first_doc , sort }
query:: The query to run on the index
filter:: Filters docs from the search result
Default is 0
FIXME: how about using a pre-parsed form of query?
We need to set this every time, in case a new field has
been added. | (in-package #:montezuma)
(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter *valid-index-options*
'(:path
:create-if-missing-p
:create-p
:default-field
:id-field
:default-search-field
:analyzer
:directory
:close-directory-p
:occur-default
:wild-lower-p
:fields
:default-slop
:key
:use-compound-file-p
:handle-parse-errors-p
:auto-flush-p
:merge-factor
:min-merge-docs
:max-merge-docs
:info-stream)))
(defun index-options-list-p (list)
(do ((options list (cddr options)))
((endp options) T)
(when (not (member (car options) *valid-index-options*))
(return-from index-options-list-p NIL))))
(deftype index-option () `(member ,@*valid-index-options*))
(deftype index-options-list () '(satisfies index-options-list-p))
(defun get-index-option (options option &optional default)
(check-type option index-option)
(getf options option default))
(define-setf-expander get-index-option (place option &environment env)
(multiple-value-bind (vars vals store-vars writer-form reader-form)
(get-setf-expansion place env)
(declare (ignore writer-form))
(let ((goption (gensym "OPTION"))
(gstore (if store-vars (car store-vars) (gensym "STORE"))))
(values
(list* goption vars)
(list* option vals)
(list gstore)
`(progn
(check-type ,goption index-option)
(setf (getf ,reader-form ,goption) ,(car store-vars)))
`(getf ,goption ,reader-form)))))
(defclass index ()
((key)
(dir)
(has-writes-p :initform NIL)
(reader :initform nil)
(writer)
(close-dir-p)
(auto-flush-p)
(default-search-field)
(default-field)
(analyzer :reader analyzer)
(searcher :initform nil)
(open-p :initform T)
(options)
(qp :initform nil)))
(defmethod initialize-instance :after ((self index) &rest args &key &allow-other-keys)
(with-slots (options) self
(check-type args index-options-list)
(setf options (copy-list args))
(setf (get-index-option options :default-field)
(if (get-index-option options :default-field)
(string (get-index-option options :default-field))
""))
(setf (get-index-option options :default-search-field)
(or (get-index-option options :default-search-field)
(get-index-option options :default-field)
"*"))
(setf (get-index-option options :create-if-missing-p)
(get-index-option options :create-if-missing-p T))
FIXME : I do n't flatten the : key option , I 'm not sure why Ferret does .
(with-slots (key dir options close-dir-p auto-flush-p create-p analyzer writer
default-search-field default-field) self
(setf key (get-index-option options :key))
(cond ((get-index-option options :path)
(handler-case
(setf dir (make-fs-directory (get-index-option options :path)
:create-p (get-index-option options :create-p)))
(error () (setf dir (make-fs-directory (get-index-option options :path)
:create-p (get-index-option options :create-if-missing-p)))))
(setf (get-index-option options :close-directory-p) T))
((get-index-option options :directory)
(setf dir (get-index-option options :directory)))
(T
(setf (get-index-option options :create-p) T)
(setf dir (make-instance 'ram-directory))))
(setf writer (apply #'make-instance 'index-writer
:directory dir
options))
(setf (get-index-option options :analyzer) (setf analyzer (analyzer writer)))
(close writer)
(setf writer nil)
Only want to create the first time , if at all .
(setf (get-index-option options :create-p) NIL)
(setf close-dir-p (get-index-option options :close-directory-p))
(setf (get-index-option options :close-directory-p) NIL)
(setf auto-flush-p (get-index-option options :auto-flush-p))
(setf default-search-field (or (get-index-option options :default-search-field)
(get-index-option options :default-field)
"*"))
(setf default-field (or (get-index-option options :default-field) ""))
(when (not (get-index-option options :handle-parse-errors-p))
(setf (get-index-option options :handle-parse-errors-p) T)))))
(defmethod close ((self index))
(with-slots (open-p reader writer dir) self
(when (not open-p)
(error "Tried to close an already closed directory."))
(when reader (close reader))
(when writer (close writer))
(close dir)
(setf open-p NIL)))
(defgeneric reader (index))
(defmethod reader ((self index))
(ensure-reader-open self)
(slot-value self 'reader))
(defgeneric searcher (index))
(defmethod searcher ((self index))
(ensure-searcher-open self)
(slot-value self 'searcher))
(defgeneric writer (index))
(defmethod writer ((self index))
(ensure-writer-open self)
(slot-value self 'writer))
(defmethod document-count ((self index))
(with-slots (writer) self
(document-count writer)))
(defgeneric add-document-to-index (index doc &optional analyzer))
(defmethod add-document-to-index ((self index) doc &optional analyzer)
(let ((fdoc nil)
(default-field (slot-value self 'default-field)))
(when (listp doc)
(setf doc (convert-alist-to-table doc)))
(cond ((stringp doc)
(setf fdoc (make-instance 'document))
(add-field fdoc (make-field default-field doc
:stored T :index :tokenized)))
((typep doc 'array)
(setf fdoc (make-instance 'document))
(dosequence (field doc)
(add-field fdoc (make-field default-field field
:stored T :index :tokenized))))
((table-like-p doc)
(setf fdoc (make-instance 'document))
(dolist (field (table-keys doc))
(let ((text (table-value doc field)))
(add-field fdoc (make-field (string field) (stringify text)
:stored T :index :tokenized)))))
((typep doc 'document)
(setf fdoc doc))
(T
(error "Unknown document type ~S" doc)))
(let ((key (slot-value self 'key)))
(when key
(let ((query (inject key (make-instance 'boolean-query)
#'(lambda (query field)
(add-query query
(make-instance 'term-query
:term (make-term field (get-field fdoc field)))
:must-occur)))))
(query-delete self query))))
(let ((writer (writer self)))
(setf (slot-value self 'has-writes-p) T)
(add-document-to-index-writer (slot-value self 'writer) fdoc
(if analyzer analyzer (analyzer writer)))
(when (slot-value self 'auto-flush-p)
(flush self)))))
pass to this method . You can also pass a hash with one or more of
first_doc : : The index in the results of the first doc retrieved .
num_docs : : The number of results returned . Default is 10
sort : : An array of SortFields describing how to sort the results .
(defgeneric search (index query &rest options))
(defmethod search ((self index) query &rest options)
(do-search self query options))
(defgeneric search-each (index query fn &optional options))
(defmethod search-each ((self index) query fn &optional options)
(let ((hits (do-search self query options)))
(dosequence (score-doc (score-docs hits))
(funcall fn (doc score-doc) (score score-doc)))
(total-hits hits)))
(defmethod get-document ((self index) id)
(let ((reader (reader self)))
(cond ((stringp id)
(get-document-with-term reader (make-term "id" id)))
((typep id 'term)
(get-document-with-term reader id))
(T
(get-document reader id)))))
(defmethod delete-document ((self index) id)
(let ((reader (reader self)))
(let ((count (cond ((stringp id)
(delete-docs-with-term reader (make-term "id" id)))
((typep id 'term)
(delete-docs-with-term reader id))
((integerp id)
(delete-document reader id))
(T
(error "Can't delete for id ~S" id)))))
(when (slot-value self 'auto-flush-p)
(flush self))
count)))
(defgeneric query-delete (index query))
(defmethod query-delete ((self index) query)
(let ((reader (reader self))
(searcher (searcher self))
(query (process-query self query)))
(search-each searcher query
#'(lambda (doc score)
(declare (ignore score))
(delete-document reader doc)))
(when (slot-value self 'auto-flush-p)
(flush self))))
(defmethod deleted-p ((self index) n)
(deleted-p (reader self) n))
(defgeneric update (index id new-val))
(defmethod update ((self index) id new-val)
(with-slots (options) self
(cond ((stringp id)
(query-update self (format nil "id:~A" id) new-val))
((typep id 'term)
(query-update self
(make-instance 'term-query
:term id)
new-val))
((integerp id)
(let ((reader (reader self))
(document (get-document self id)))
(when (listp new-val)
(setf new-val (convert-alist-to-table new-val)))
(cond ((table-like-p new-val)
(dolist (name (table-keys new-val))
(let ((content (table-value new-val name)))
(setf (document-values document name) (string content)))))
((typep new-val 'document)
(setf document new-val))
(T
(setf (document-values document (get-index-option options :default-field))
(string new-val))))
(delete-document reader id)
(let ((writer (writer self)))
(add-document-to-index-writer writer document))))
(T
(error "Cannot update for id ~S" id)))
(when (slot-value self 'auto-flush-p)
(flush self))))
(defgeneric query-update (index query new-val))
(defmethod query-update ((self index) query new-val)
(let ((searcher (searcher self))
(reader (reader self))
(docs-to-add '())
(query (process-query self query)))
(search-each searcher query
#'(lambda (id score)
(declare (ignore score))
(let ((document (get-document self id)))
(when (listp new-val)
(setf new-val (convert-alist-to-table new-val)))
(cond ((table-like-p new-val)
(dolist (name (table-keys new-val))
(let ((content (table-value new-val name)))
(setf (document-values document name) (string content)))))
((typep new-val 'document)
(setf document new-val))
(T
(setf (document-values document (get-index-option (slot-value self 'options) :default-field))
(string new-val))))
(push document docs-to-add)
(delete-document reader id))))
(let ((writer (writer self)))
(dolist (doc (reverse docs-to-add))
(add-document-to-index-writer writer doc))
(when (slot-value self 'auto-flush-p)
(flush self)))))
(defmethod has-deletions-p ((self index))
(has-deletions-p (reader self)))
(defgeneric has-writes (index))
(defmethod has-writes ((self index))
(slot-value self 'has-writes))
(defmethod flush ((self index))
(with-slots (reader writer searcher) self
(when reader (close reader))
(when writer (close writer))
(setf reader nil
writer nil
searcher nil)))
(defmethod optimize ((self index))
(optimize (writer self))
(flush self))
(defmethod size ((self index))
(num-docs (reader self)))
(defmethod add-indexes ((self index) &rest indexes)
(let ((indexes indexes))
(when (> (length indexes) 0)
(when (typep (elt indexes 0) 'index)
(setf indexes (map 'vector #'reader indexes)))
(cond ((typep (elt indexes 0) 'index-reader)
(let ((reader (reader self)))
(setf indexes (remove reader indexes)))
(add-indexes-readers (writer self) indexes))
((typep (elt indexes 0) 'directory)
(setf indexes (remove (slot-value self 'dir) indexes))
(apply #'add-indexes (writer self) indexes))
(T
(error "Unknown index type ~S when trying to merge indexes." (elt indexes 0)))))))
(defgeneric persist (index directory &key create-p))
(defmethod persist ((self index) directory &key (create-p T))
(flush self)
(with-slots (dir options) self
(let ((old-dir dir))
(etypecase directory
((or string pathname)
(setf dir (make-fs-directory directory :create-p create-p))
(setf (get-index-option options :close-directory-p) T))
(directory
(setf dir directory)))
(ensure-writer-open self)
(add-indexes (writer self) old-dir))))
(defgeneric ensure-writer-open (index))
(defmethod ensure-writer-open ((self index))
(with-slots (open-p writer reader dir options searcher) self
(unless open-p
(error "Tried to use a closed index."))
(unless writer
(when reader
(close reader)
(setf reader nil
searcher nil))
(setf writer (apply #'make-instance 'index-writer
:directory dir
options)))))
(defgeneric ensure-reader-open (index))
(defmethod ensure-reader-open ((self index))
(with-slots (open-p writer reader dir) self
(unless open-p
(error "Tried to use a closed index."))
(if reader
(if (not (latest-p reader))
(setf reader (open-index-reader dir :close-directory-p NIL))
NIL)
(progn
(when writer
(close writer)
(setf writer nil))
(setf reader (open-index-reader dir :close-directory-p NIL))))))
(defgeneric ensure-searcher-open (index))
(defmethod ensure-searcher-open ((self index))
(with-slots (open-p searcher reader) self
(unless open-p
(error "Tried to use a closed index."))
(when (or (ensure-reader-open self) (not searcher))
(setf searcher (make-instance 'index-searcher
:reader reader)))))
(defgeneric do-search (index query options))
(defmethod do-search ((self index) query options)
(let ((searcher (searcher self))
(query (process-query self query)))
(apply #'search searcher query options)))
(defgeneric process-query (index query))
(defmethod process-query ((self index) query)
(if (stringp query)
(with-slots (qp default-search-field options reader) self
(unless qp
(setf qp (apply #'make-instance 'query-parser
:allow-other-keys T
:default-field default-search-field
options)))
(unless (get-index-option options :fields)
(setf (fields qp) (coerce (get-field-names reader) 'vector)))
(parse qp query))
query))
(defun stringify (x)
(etypecase x
(string x)
((or character symbol) (string x))
(integer (format nil "~A" x))))
|
2cec34fc271203ec9283567180353aa93b8414a416172c8a25e9d74e50f8d96c | patricoferris/sesame | collection.mli | * { 1 Collections }
Collections are at the very heart of and was the first thing Sesame
could do . A Collection is simply a set of files that share the same metadata
structure . This is expressed in the " Jekyll Format " , a header ( )
of yaml followed by markdown .
Once you have specified the shape of the metadata using types , can
more or less handle the rest until you want to customise the output which
you most likely will !
Collections are at the very heart of Sesame and was the first thing Sesame
could do. A Collection is simply a set of files that share the same metadata
structure. This is expressed in the "Jekyll Format", a header (frontmatter)
of yaml followed by markdown.
Once you have specified the shape of the metadata using types, Sesame can
more or less handle the rest until you want to customise the output which
you most likely will! *)
module type Meta = sig
type t [@@deriving yaml]
(** The type for metadata *)
end
(** {2 Make a Collection}
This module creates a collection from a file *)
module Make (M : Meta) : sig
type meta = M.t
type t = { path : string; meta : M.t; body : string }
include S.S with type Input.t = Fpath.t and type t := t and type Output.t = t
end
* { 2 Make HTML Output }
The [ HTML ] functor generates a string of HTML based on some metadata ( i.e. a
collection ) , it also passes through the original path of the collection the
HTML was generated from .
The [HTML] functor generates a string of HTML based on some metadata (i.e. a
collection), it also passes through the original path of the collection the
HTML was generated from. *)
module Html (M : Meta) : sig
type t = { path : string; html : string }
include
S.S with type Input.t = Make(M).t and type t := t and type Output.t = t
end
| null | https://raw.githubusercontent.com/patricoferris/sesame/8521e2a086b49d0bc20f0fca705f07675c52e1ae/src/sesame/collection.mli | ocaml | * The type for metadata
* {2 Make a Collection}
This module creates a collection from a file | * { 1 Collections }
Collections are at the very heart of and was the first thing Sesame
could do . A Collection is simply a set of files that share the same metadata
structure . This is expressed in the " Jekyll Format " , a header ( )
of yaml followed by markdown .
Once you have specified the shape of the metadata using types , can
more or less handle the rest until you want to customise the output which
you most likely will !
Collections are at the very heart of Sesame and was the first thing Sesame
could do. A Collection is simply a set of files that share the same metadata
structure. This is expressed in the "Jekyll Format", a header (frontmatter)
of yaml followed by markdown.
Once you have specified the shape of the metadata using types, Sesame can
more or less handle the rest until you want to customise the output which
you most likely will! *)
module type Meta = sig
type t [@@deriving yaml]
end
module Make (M : Meta) : sig
type meta = M.t
type t = { path : string; meta : M.t; body : string }
include S.S with type Input.t = Fpath.t and type t := t and type Output.t = t
end
* { 2 Make HTML Output }
The [ HTML ] functor generates a string of HTML based on some metadata ( i.e. a
collection ) , it also passes through the original path of the collection the
HTML was generated from .
The [HTML] functor generates a string of HTML based on some metadata (i.e. a
collection), it also passes through the original path of the collection the
HTML was generated from. *)
module Html (M : Meta) : sig
type t = { path : string; html : string }
include
S.S with type Input.t = Make(M).t and type t := t and type Output.t = t
end
|
8655f952916c104e4849706dd773ef5a0598b69d480f3cc75dbcc745e9aeb423 | spurious/sagittarius-scheme-mirror | %3a26.scm | ;; -*- Scheme -*-
SRFI 26
(library (srfi :26)
(export cut cute)
(import (srfi :26 cut))) | null | https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/sitelib/srfi/%253a26.scm | scheme | -*- Scheme -*- | SRFI 26
(library (srfi :26)
(export cut cute)
(import (srfi :26 cut))) |
259427a6f1f684b78bf7d7b47fa929862d38dd36c385f2b8152be5e36068410e | google/lisp-koans | iteration.lisp | Copyright 2013 Google Inc.
;;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;;; you may not use this file except in compliance with the License.
;;; You may obtain a copy of the License at
;;;
;;; -2.0
;;;
;;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;;; See the License for the specific language governing permissions and
;;; limitations under the License.
;;; Lisp has multiple options for iteration.
;;; This set of koans will introduce some of the most common ones.
(define-test dolist
(let ((numbers '(4 8 15 16 23 42)))
;; The macro DOLIST binds a variable to subsequent elements of a list.
(let ((sum 0))
(dolist (number numbers)
( INCF PLACE N ) is equivalent to ( SETF PLACE ( + N PLACE ) ) .
(incf sum number))
(assert-equal ____ sum))
DOLIST can optionally return a value .
(let ((sum 0))
(assert-equal ____ (dolist (number numbers sum)
(incf sum number))))))
(define-test dotimes
The macro DOTIMES binds a variable to subsequent integers from 0 to
;; (1- COUNT).
(let ((stack '()))
(dotimes (i 5)
(push i stack))
(assert-equal ____ stack))
DOTIMES can optionally return a value .
(let ((stack '()))
(assert-equal ____ (dotimes (i 5 stack)
(push i stack)))))
(define-test do
;; The macro DO accepts a list of variable bindings, a termination test with
;; epilogue forms, and Lisp code that should be executed on each iteration.
(let ((result '()))
(do ((i 0 (1+ i)))
((> i 5))
(push i result))
(assert-equal ____ result))
;; The epilogue of DO can return a value.
(let ((result (do ((i 0 (1+ i))
;; A variable bound by DO noes not need to be updated on
;; each iteration.
(result '()))
((> i 5) (nreverse result))
(push i result))))
(assert-equal ____ result)))
(define-test loop-basic-form
;; The macro LOOP in its simple form loops forever. It is possible to stop the
;; looping by calling the RETURN special form.
(let ((counter 0))
(loop (incf counter)
(when (>= counter 100)
(return counter)))
(assert-equal ____ counter))
;; The RETURN special form can return a value out of a LOOP.
(let ((counter 0))
(assert-equal ____ (loop (incf counter)
(when (>= counter 100)
(return counter)))))
The extended form of LOOP will be contemplated in a future koan .
)
| null | https://raw.githubusercontent.com/google/lisp-koans/df5e58dc88429ef0ff202d0b45c21ce572144ba8/koans/iteration.lisp | lisp |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Lisp has multiple options for iteration.
This set of koans will introduce some of the most common ones.
The macro DOLIST binds a variable to subsequent elements of a list.
(1- COUNT).
The macro DO accepts a list of variable bindings, a termination test with
epilogue forms, and Lisp code that should be executed on each iteration.
The epilogue of DO can return a value.
A variable bound by DO noes not need to be updated on
each iteration.
The macro LOOP in its simple form loops forever. It is possible to stop the
looping by calling the RETURN special form.
The RETURN special form can return a value out of a LOOP. | Copyright 2013 Google Inc.
distributed under the License is distributed on an " AS IS " BASIS ,
(define-test dolist
(let ((numbers '(4 8 15 16 23 42)))
(let ((sum 0))
(dolist (number numbers)
( INCF PLACE N ) is equivalent to ( SETF PLACE ( + N PLACE ) ) .
(incf sum number))
(assert-equal ____ sum))
DOLIST can optionally return a value .
(let ((sum 0))
(assert-equal ____ (dolist (number numbers sum)
(incf sum number))))))
(define-test dotimes
The macro DOTIMES binds a variable to subsequent integers from 0 to
(let ((stack '()))
(dotimes (i 5)
(push i stack))
(assert-equal ____ stack))
DOTIMES can optionally return a value .
(let ((stack '()))
(assert-equal ____ (dotimes (i 5 stack)
(push i stack)))))
(define-test do
(let ((result '()))
(do ((i 0 (1+ i)))
((> i 5))
(push i result))
(assert-equal ____ result))
(let ((result (do ((i 0 (1+ i))
(result '()))
((> i 5) (nreverse result))
(push i result))))
(assert-equal ____ result)))
(define-test loop-basic-form
(let ((counter 0))
(loop (incf counter)
(when (>= counter 100)
(return counter)))
(assert-equal ____ counter))
(let ((counter 0))
(assert-equal ____ (loop (incf counter)
(when (>= counter 100)
(return counter)))))
The extended form of LOOP will be contemplated in a future koan .
)
|
b5601ce7ac036a5c5103b86d723450ba56dcb9ff8453eb3b5eedb22c7a9b288a | brendanhay/amazonka | WorkLink.hs | # OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
Module : Test . Amazonka . Gen.
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Test.Amazonka.Gen.WorkLink where
import Amazonka.WorkLink
import qualified Data.Proxy as Proxy
import Test.Amazonka.Fixture
import Test.Amazonka.Prelude
import Test.Amazonka.WorkLink.Internal
import Test.Tasty
-- Auto-generated: the actual test selection needs to be manually placed into
-- the top-level so that real test data can be incrementally added.
--
-- This commented snippet is what the entire set should look like:
-- fixtures :: TestTree
-- fixtures =
-- [ testGroup "request"
-- ]
-- , testGroup "response"
-- ]
-- ]
-- Requests
-- Responses
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-worklink/test/Test/Amazonka/Gen/WorkLink.hs | haskell | |
Stability : auto-generated
Auto-generated: the actual test selection needs to be manually placed into
the top-level so that real test data can be incrementally added.
This commented snippet is what the entire set should look like:
fixtures :: TestTree
fixtures =
[ testGroup "request"
]
, testGroup "response"
]
]
Requests
Responses | # OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Module : Test . Amazonka . Gen.
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Test.Amazonka.Gen.WorkLink where
import Amazonka.WorkLink
import qualified Data.Proxy as Proxy
import Test.Amazonka.Fixture
import Test.Amazonka.Prelude
import Test.Amazonka.WorkLink.Internal
import Test.Tasty
|
73d4e54881c79636ed7fb1bfdcf33ebe7b65e093f565963d67c96a482921be73 | racket/gui | cycle.rkt | #lang racket/base
(define-syntax-rule (decl id set-id)
(begin
(provide id set-id)
(define id #f)
(define (set-id v) (set! id v))))
(decl text% set-text%!)
(decl pasteboard% set-pasteboard%!)
(decl editor-stream-in% set-editor-stream-in%!)
(decl editor-stream-out% set-editor-stream-out%!)
(decl editor-snip% set-editor-snip%!)
(decl editor-snip-editor-admin% set-editor-snip-editor-admin%!)
(decl extended-editor-snip% set-extended-editor-snip%!)
(decl extended-text% set-extended-text%!)
(decl extended-pasteboard% set-extended-pasteboard%!)
(decl get-editor-data-class set-get-editor-data-class!)
(decl editor-get-file set-editor-get-file!)
(decl editor-put-file set-editor-put-file!)
(decl popup-menu% set-popup-menu%!)
(decl printer-dc% set-printer-dc%!)
| null | https://raw.githubusercontent.com/racket/gui/d1fef7a43a482c0fdd5672be9a6e713f16d8be5c/gui-lib/mred/private/wxme/cycle.rkt | racket | #lang racket/base
(define-syntax-rule (decl id set-id)
(begin
(provide id set-id)
(define id #f)
(define (set-id v) (set! id v))))
(decl text% set-text%!)
(decl pasteboard% set-pasteboard%!)
(decl editor-stream-in% set-editor-stream-in%!)
(decl editor-stream-out% set-editor-stream-out%!)
(decl editor-snip% set-editor-snip%!)
(decl editor-snip-editor-admin% set-editor-snip-editor-admin%!)
(decl extended-editor-snip% set-extended-editor-snip%!)
(decl extended-text% set-extended-text%!)
(decl extended-pasteboard% set-extended-pasteboard%!)
(decl get-editor-data-class set-get-editor-data-class!)
(decl editor-get-file set-editor-get-file!)
(decl editor-put-file set-editor-put-file!)
(decl popup-menu% set-popup-menu%!)
(decl printer-dc% set-printer-dc%!)
|
|
59a5e9543ea87812cef014b328c9cc3ceaf370cff38b55f90975c4c179aa207e | adolenc/cl-neovim | logging.lisp | (in-package #:cl-neovim)
(defparameter *log-stream* *standard-output*)
(cl:defun enable-logging (&key (stream *standard-output*) (level :info))
(setf *log-stream* stream
vom:*log-stream* stream
*print-pretty* NIL)
(vom:config t level))
(cl:defmethod mrpc::send :before (event-loop socket bytes)
(declare (ignore event-loop socket))
(vom:debug1 "sending bytes: ~S" bytes))
(cl:defmethod mrpc::callback-handler :before (session bytes)
(declare (ignore session))
(vom:debug1 "received bytes: #~S" bytes))
(cl:defmethod mrpc::send-request :before (session id method params)
(declare (ignore session))
(vom:debug "(cl-neovim -> nvim) [req] (~S ~S ~S)" id method params))
(cl:defmethod mrpc::send-notification :before (session method params)
(declare (ignore session))
(vom:debug "(cl-neovim -> nvim) [ntf] (~S ~S)" method params))
(cl:defmethod mrpc::send-response :before (session id error result)
(declare (ignore session))
(vom:debug "(cl-neovim -> nvim) [rsp] (~S ~S ~S)" id error result))
(cl:defmethod mrpc::on-message :before (session data)
(declare (ignore session))
(vom:debug "(nvim -> cl-neovim) [~A] ~S"
(cond ((mrpc::requestp data) "req")
((mrpc::notificationp data) "ntf")
((mrpc::responsep data) "rsp"))
data))
(cl:defmethod mrpc:register-callback :before (session method callback)
(declare (ignore session callback))
(vom:debug "Registering callback `~S'" method))
(cl:defmethod mrpc:remove-callback :before (session method)
(declare (ignore session))
(vom:debug "Removing callback `~S'" method))
| null | https://raw.githubusercontent.com/adolenc/cl-neovim/7212d305206aaae331a3e2d0d2597b671cec01f4/src/logging.lisp | lisp | (in-package #:cl-neovim)
(defparameter *log-stream* *standard-output*)
(cl:defun enable-logging (&key (stream *standard-output*) (level :info))
(setf *log-stream* stream
vom:*log-stream* stream
*print-pretty* NIL)
(vom:config t level))
(cl:defmethod mrpc::send :before (event-loop socket bytes)
(declare (ignore event-loop socket))
(vom:debug1 "sending bytes: ~S" bytes))
(cl:defmethod mrpc::callback-handler :before (session bytes)
(declare (ignore session))
(vom:debug1 "received bytes: #~S" bytes))
(cl:defmethod mrpc::send-request :before (session id method params)
(declare (ignore session))
(vom:debug "(cl-neovim -> nvim) [req] (~S ~S ~S)" id method params))
(cl:defmethod mrpc::send-notification :before (session method params)
(declare (ignore session))
(vom:debug "(cl-neovim -> nvim) [ntf] (~S ~S)" method params))
(cl:defmethod mrpc::send-response :before (session id error result)
(declare (ignore session))
(vom:debug "(cl-neovim -> nvim) [rsp] (~S ~S ~S)" id error result))
(cl:defmethod mrpc::on-message :before (session data)
(declare (ignore session))
(vom:debug "(nvim -> cl-neovim) [~A] ~S"
(cond ((mrpc::requestp data) "req")
((mrpc::notificationp data) "ntf")
((mrpc::responsep data) "rsp"))
data))
(cl:defmethod mrpc:register-callback :before (session method callback)
(declare (ignore session callback))
(vom:debug "Registering callback `~S'" method))
(cl:defmethod mrpc:remove-callback :before (session method)
(declare (ignore session))
(vom:debug "Removing callback `~S'" method))
|
|
b13182fd2ee3a1a5b836814de8b3e5c2510f378ade2c1ba6b34c05ae66082df3 | georepl/georepl | draw_primitives_test.clj | (ns georepl.draw-primitives-test
(:require [clojure.test :refer :all]
[georepl.shapes :as shapes]
[georepl.draw-framework :as fw]
[georepl.draw-primitives :as dp]))
(deftest exit-test
(#'fw/init-renderer :test)
(is (= :exit (get (#'dp/exit) :f-renderer))))
(deftest draw-str-test
(#'fw/init-renderer :test)
(is (= :no-fill (get (#'dp/draw-str "Test" 0 0 100 100) :f-renderer))))
(deftest draw-point-test
(#'fw/init-renderer :test)
(is (= "argument 42 must be two-dimensional vector" (#'dp/draw-point 42)))
(is (= :ellipse (get (#'dp/draw-point [42 18]) :f-renderer)))
(is (= :no-fill (get (#'dp/draw-point [42 18] 8) :f-renderer)))
(is (= :no-fill (get (#'dp/draw-point [42 18] [100 0 0]) :f-renderer)))
(is (= :no-fill (get (#'dp/draw-point [42 18] [100 0 0] 42) :f-renderer)))
)
(deftest draw-element-test
(is (= nil (#'dp/draw-element (#'shapes/constructCompound [(#'shapes/constructLine [0 0][100 30])(#'shapes/constructCircle [50 15] 10)]) true)))
(is (= nil (#'dp/draw-element [[0 0][1 0][2 -1][3 0][4 1]] true))))
(def selcoll (list {:s "foo" :p1 [0 10] :p2 [25 15] :f #(42) :create :polyline :highlight true}
{:s "bizz" :p1 [0 15] :p2 [25 20] :f #(43) :create :point :highlight false}
{:s "buzz" :p1 [0 20] :p2 [25 25] :f #(44) :create :modify :highlight false}))
(deftest draw-text-vec-test
(#'fw/init-renderer :test)
(is (= :no-fill (get (#'dp/draw-text-vec selcoll) :f-renderer)))
(is (nil? (get (#'dp/draw-text-vec (map #(dissoc % :p1) selcoll)) :f-renderer)))
(is (nil? (get (#'dp/draw-text-vec (map #(dissoc % :p2) selcoll)) :f-renderer))))
| null | https://raw.githubusercontent.com/georepl/georepl/1502ae04bbc3cab757000714008ccecc4e9e571b/test/georepl/draw_primitives_test.clj | clojure | (ns georepl.draw-primitives-test
(:require [clojure.test :refer :all]
[georepl.shapes :as shapes]
[georepl.draw-framework :as fw]
[georepl.draw-primitives :as dp]))
(deftest exit-test
(#'fw/init-renderer :test)
(is (= :exit (get (#'dp/exit) :f-renderer))))
(deftest draw-str-test
(#'fw/init-renderer :test)
(is (= :no-fill (get (#'dp/draw-str "Test" 0 0 100 100) :f-renderer))))
(deftest draw-point-test
(#'fw/init-renderer :test)
(is (= "argument 42 must be two-dimensional vector" (#'dp/draw-point 42)))
(is (= :ellipse (get (#'dp/draw-point [42 18]) :f-renderer)))
(is (= :no-fill (get (#'dp/draw-point [42 18] 8) :f-renderer)))
(is (= :no-fill (get (#'dp/draw-point [42 18] [100 0 0]) :f-renderer)))
(is (= :no-fill (get (#'dp/draw-point [42 18] [100 0 0] 42) :f-renderer)))
)
(deftest draw-element-test
(is (= nil (#'dp/draw-element (#'shapes/constructCompound [(#'shapes/constructLine [0 0][100 30])(#'shapes/constructCircle [50 15] 10)]) true)))
(is (= nil (#'dp/draw-element [[0 0][1 0][2 -1][3 0][4 1]] true))))
(def selcoll (list {:s "foo" :p1 [0 10] :p2 [25 15] :f #(42) :create :polyline :highlight true}
{:s "bizz" :p1 [0 15] :p2 [25 20] :f #(43) :create :point :highlight false}
{:s "buzz" :p1 [0 20] :p2 [25 25] :f #(44) :create :modify :highlight false}))
(deftest draw-text-vec-test
(#'fw/init-renderer :test)
(is (= :no-fill (get (#'dp/draw-text-vec selcoll) :f-renderer)))
(is (nil? (get (#'dp/draw-text-vec (map #(dissoc % :p1) selcoll)) :f-renderer)))
(is (nil? (get (#'dp/draw-text-vec (map #(dissoc % :p2) selcoll)) :f-renderer))))
|
|
d01d8b72f7ad558358800f91e7d766e27d0e4d52198a7fd1e8f27b1911c4a1af | ndmitchell/shake | Main.hs | # LANGUAGE RecordWildCards #
module Main(main) where
import Data.Tuple.Extra
import Control.Monad
import Data.Char
import Data.List.Extra
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
import Text.HTML.TagSoup
import Text.HTML.TagSoup.Entity
import qualified Data.Map as Map
import Text.Markdown
import Text.Blaze.Html.Renderer.Text
import System.Directory
import System.Environment
import System.FilePath
import System.IO.Extra
import Code
data Mode = Debug | Release deriving (Eq,Enum,Bounded,Show)
getMode :: IO Mode
getMode = do
args <- getArgs
let modes = [Debug, Release]
case args of
[] -> pure Release
[x] | Just x <- lookup (lower x) $ map (lower . show &&& id) modes -> pure x
| otherwise -> fail $ "Couldn't recognise argument, got " ++ x ++ ", wanted " ++ show modes
_ -> fail "Only allowed at most one command line argument"
main :: IO ()
main = do
mode <- getMode
createDirectoryIfMissing True "output"
files <- getDirectoryContents "../docs"
code <- code "../dist/doc/html/shake/shake.txt"
skeleton <- skeleton mode "parts" "output/index.css"
forM_ files $ \file -> case takeExtension file of
".md" -> do
putChar '.'
p <- readPage mode code $ "../docs" </> file
skeleton ("output" </> lower (takeBaseName file) <.> "html") p
".png" -> copyFile ("../docs" </> file) ("output" </> file)
_ -> pure ()
copyFile "parts/favicon.ico" "output/favicon.ico"
putStrLn " done"
data Link = Link
{linkLevel :: String
,linkTitle :: String
,linkKey :: String
} deriving Show
data Page = Page
{pageTitle :: String
,pageTOC :: [Link]
,pageBody :: [Tag String]
} deriving Show
readFileMarkdown :: FilePath -> IO [Tag String]
readFileMarkdown = fmap (parseTags . T.unpack . renderHtml . markdown def{msXssProtect=False}) . T.readFile
readFileTags :: FilePath -> IO [Tag String]
readFileTags = fmap parseTags . readFile'
writeFileTags :: FilePath -> [Tag String] -> IO ()
writeFileTags file = writeFileUTF8 file . renderTagsOptions renderOptions{optEscape=concatMap (\x -> Map.findWithDefault [x] x escapes)}
where escapes = Map.fromList $ [(b, "&" ++ a ++ ";") | (a,[b]) <- xmlEntities] ++
[(b, "&" ++ takeWhile (/= ';') a ++ ";") | (a,[b]) <- htmlEntities, not $ isAscii b]
---------------------------------------------------------------------
-- READ A PAGE
readPage :: Mode -> (String -> [Tag String]) -> FilePath -> IO Page
readPage mode code file = do
(pageTOC, pageBody) <- fmap (links . reformat mode code) $ readFileMarkdown $ "../docs" </> file
let pageTitle = innerText $ inside "h1" pageBody
pure Page{..}
where
links (TagOpen linkLevel@['h',i] at:xs) | i `elem` "234" =
([Link{..} | i /= '4'] ++) *** (prefix++) $ links rest
where linkTitle = innerText $ takeWhile (/= TagClose linkLevel) xs
linkKey = intercalate "-" $ map (lower . filter isAlpha) $ words $
takeWhile (`notElem` "?.!") $ dropPrefix "Q: " linkTitle
(this,rest) = break (== TagClose linkLevel) xs
prefix = [TagOpen "span" [("class","target"),("id",linkKey)],TagClose "span"
,TagOpen linkLevel at,TagOpen "a" [("href",'#':linkKey),("class","anchor")]] ++
this ++ [TagClose "a"]
links (x:xs) = second (x:) $ links xs
links [] = ([], [])
reformat :: Mode -> (String -> [Tag String]) -> [Tag String] -> [Tag String]
reformat mode code (TagOpen "p" []:TagOpen "i" []:TagText s:xs) | "See also" `isPrefixOf` s =
reformat mode code $ drop1 $ dropWhile (~/= "</p>") xs
reformat mode code (TagOpen "a" at:xs) = TagOpen "a" (map f at) : reformat mode code xs
where f ("href",x) | ".md" `isPrefixOf` takeExtension x =
watch out for Manual.md#readme
("href", noReadme $ dropFileName x ++ lower (takeBaseName x) ++
(if mode == Release then "" else ".html") ++
drop 3 (takeExtension x))
f x = x
reformat mode code (TagOpen "pre" []:TagOpen "code" []:xs) = reformat mode code $ TagOpen "pre" [] : xs
reformat mode code (TagClose "code":TagClose "pre":xs) = reformat mode code $ TagClose "pre" : xs
reformat mode code (TagOpen t at:xs) | t `elem` ["pre","code"] = TagOpen t at : concatMap f a ++ reformat mode code b
where (a,b) = break (== TagClose t) xs
skip = TagComment " nosyntax " `elem` a || notCode (innerText a)
f (TagText x) | not skip = code x
f x = [x]
reformat mode code (TagClose x:xs) | x `elem` ["p","pre","li","ol","ul","h1","h2","h3","h4","h5","h6"] =
TagClose x : TagText "\n" : reformat mode code xs
reformat mode code (TagText x:xs) = TagText (replace " -- " " \x2013 " x) : reformat mode code xs
properly render the ASCII fallback " -- " as 's EN DASH ( U+2013 )
reformat mode code (x:xs) = x : reformat mode code xs
reformat mode code [] = []
noReadme = dropSuffix "#readme"
notCode :: String -> Bool
notCode x =
"stack " `isPrefixOf` x ||
"shake-" `isPrefixOf` x ||
("--" `isPrefixOf` x && length (lines x) == 1) ||
x == "shake" ||
(let t = takeExtension x in "." `isPrefixOf` t && all isAlpha (drop1 t))
---------------------------------------------------------------------
POPULATE A SKELETON
skeleton :: Mode -> FilePath -> FilePath -> IO (FilePath -> Page -> IO ())
skeleton mode dir cssOut = do
common <- readFile' $ dir </> "index.css"
header <- readFileTags $ dir </> "header.html"
content <- readFileTags $ dir </> "content.html"
footer <- readFileTags $ dir </> "footer.html"
writeFile cssOut $ common ++ style header ++ style content ++ style footer
pure $ \file Page{..} -> writeFileTags file $
inject (takeBaseName file) (takeWhile (~/= "<div id=content>") (remode $ map (activate $ takeFileName file) $ noStyle header)) ++
parseTags "<div id=content>" ++
(if length pageTOC <= 1 then [] else
parseTags "<div id=toc>" ++
concat [ [TagOpen "a" [("class",linkLevel),("href",'#':linkKey)], TagText linkTitle, TagClose "a"]
| Link{..} <- pageTOC] ++
parseTags "</div>") ++
pageBody ++
parseTags "</div>" ++
dropWhile (~/= "<p id=footer>") footer
where
remode xs = if mode == Debug then xs else map f xs
where f (TagOpen "a" at) = TagOpen "a" $ flip map at $ second $ \v ->
if v == "index.html" then "."
else if takeExtension v == ".html" then dropExtension v else v
f x = x
style = innerText . inside "style"
noStyle x = a ++ drop1 (dropWhile (~/= "</style>") b)
where (a,b) = break (~== "<style>") x
activate url (TagOpen "a" ats) = TagOpen "a" $ let act = ("class","active") in
[act | ("href",url) `elem` ats] ++ delete act ats
activate url x = x
inject name (TagOpen "body" at:xs) = TagOpen "body" (("class","page-"++name):at) : inject name xs
inject name (x:xs) = x : inject name xs
inject name [] = []
inside :: String -> [Tag String] -> [Tag String]
inside tag = takeWhile (~/= TagClose tag) . dropWhile (~/= TagOpen tag [])
| null | https://raw.githubusercontent.com/ndmitchell/shake/99c5a7a4dc1d5a069b13ed5c1bc8e4bc7f13f4a6/website/Main.hs | haskell | -------------------------------------------------------------------
READ A PAGE
------------------------------------------------------------------- | # LANGUAGE RecordWildCards #
module Main(main) where
import Data.Tuple.Extra
import Control.Monad
import Data.Char
import Data.List.Extra
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
import Text.HTML.TagSoup
import Text.HTML.TagSoup.Entity
import qualified Data.Map as Map
import Text.Markdown
import Text.Blaze.Html.Renderer.Text
import System.Directory
import System.Environment
import System.FilePath
import System.IO.Extra
import Code
data Mode = Debug | Release deriving (Eq,Enum,Bounded,Show)
getMode :: IO Mode
getMode = do
args <- getArgs
let modes = [Debug, Release]
case args of
[] -> pure Release
[x] | Just x <- lookup (lower x) $ map (lower . show &&& id) modes -> pure x
| otherwise -> fail $ "Couldn't recognise argument, got " ++ x ++ ", wanted " ++ show modes
_ -> fail "Only allowed at most one command line argument"
main :: IO ()
main = do
mode <- getMode
createDirectoryIfMissing True "output"
files <- getDirectoryContents "../docs"
code <- code "../dist/doc/html/shake/shake.txt"
skeleton <- skeleton mode "parts" "output/index.css"
forM_ files $ \file -> case takeExtension file of
".md" -> do
putChar '.'
p <- readPage mode code $ "../docs" </> file
skeleton ("output" </> lower (takeBaseName file) <.> "html") p
".png" -> copyFile ("../docs" </> file) ("output" </> file)
_ -> pure ()
copyFile "parts/favicon.ico" "output/favicon.ico"
putStrLn " done"
data Link = Link
{linkLevel :: String
,linkTitle :: String
,linkKey :: String
} deriving Show
data Page = Page
{pageTitle :: String
,pageTOC :: [Link]
,pageBody :: [Tag String]
} deriving Show
readFileMarkdown :: FilePath -> IO [Tag String]
readFileMarkdown = fmap (parseTags . T.unpack . renderHtml . markdown def{msXssProtect=False}) . T.readFile
readFileTags :: FilePath -> IO [Tag String]
readFileTags = fmap parseTags . readFile'
writeFileTags :: FilePath -> [Tag String] -> IO ()
writeFileTags file = writeFileUTF8 file . renderTagsOptions renderOptions{optEscape=concatMap (\x -> Map.findWithDefault [x] x escapes)}
where escapes = Map.fromList $ [(b, "&" ++ a ++ ";") | (a,[b]) <- xmlEntities] ++
[(b, "&" ++ takeWhile (/= ';') a ++ ";") | (a,[b]) <- htmlEntities, not $ isAscii b]
readPage :: Mode -> (String -> [Tag String]) -> FilePath -> IO Page
readPage mode code file = do
(pageTOC, pageBody) <- fmap (links . reformat mode code) $ readFileMarkdown $ "../docs" </> file
let pageTitle = innerText $ inside "h1" pageBody
pure Page{..}
where
links (TagOpen linkLevel@['h',i] at:xs) | i `elem` "234" =
([Link{..} | i /= '4'] ++) *** (prefix++) $ links rest
where linkTitle = innerText $ takeWhile (/= TagClose linkLevel) xs
linkKey = intercalate "-" $ map (lower . filter isAlpha) $ words $
takeWhile (`notElem` "?.!") $ dropPrefix "Q: " linkTitle
(this,rest) = break (== TagClose linkLevel) xs
prefix = [TagOpen "span" [("class","target"),("id",linkKey)],TagClose "span"
,TagOpen linkLevel at,TagOpen "a" [("href",'#':linkKey),("class","anchor")]] ++
this ++ [TagClose "a"]
links (x:xs) = second (x:) $ links xs
links [] = ([], [])
reformat :: Mode -> (String -> [Tag String]) -> [Tag String] -> [Tag String]
reformat mode code (TagOpen "p" []:TagOpen "i" []:TagText s:xs) | "See also" `isPrefixOf` s =
reformat mode code $ drop1 $ dropWhile (~/= "</p>") xs
reformat mode code (TagOpen "a" at:xs) = TagOpen "a" (map f at) : reformat mode code xs
where f ("href",x) | ".md" `isPrefixOf` takeExtension x =
watch out for Manual.md#readme
("href", noReadme $ dropFileName x ++ lower (takeBaseName x) ++
(if mode == Release then "" else ".html") ++
drop 3 (takeExtension x))
f x = x
reformat mode code (TagOpen "pre" []:TagOpen "code" []:xs) = reformat mode code $ TagOpen "pre" [] : xs
reformat mode code (TagClose "code":TagClose "pre":xs) = reformat mode code $ TagClose "pre" : xs
reformat mode code (TagOpen t at:xs) | t `elem` ["pre","code"] = TagOpen t at : concatMap f a ++ reformat mode code b
where (a,b) = break (== TagClose t) xs
skip = TagComment " nosyntax " `elem` a || notCode (innerText a)
f (TagText x) | not skip = code x
f x = [x]
reformat mode code (TagClose x:xs) | x `elem` ["p","pre","li","ol","ul","h1","h2","h3","h4","h5","h6"] =
TagClose x : TagText "\n" : reformat mode code xs
reformat mode code (TagText x:xs) = TagText (replace " -- " " \x2013 " x) : reformat mode code xs
properly render the ASCII fallback " -- " as 's EN DASH ( U+2013 )
reformat mode code (x:xs) = x : reformat mode code xs
reformat mode code [] = []
noReadme = dropSuffix "#readme"
notCode :: String -> Bool
notCode x =
"stack " `isPrefixOf` x ||
"shake-" `isPrefixOf` x ||
("--" `isPrefixOf` x && length (lines x) == 1) ||
x == "shake" ||
(let t = takeExtension x in "." `isPrefixOf` t && all isAlpha (drop1 t))
POPULATE A SKELETON
skeleton :: Mode -> FilePath -> FilePath -> IO (FilePath -> Page -> IO ())
skeleton mode dir cssOut = do
common <- readFile' $ dir </> "index.css"
header <- readFileTags $ dir </> "header.html"
content <- readFileTags $ dir </> "content.html"
footer <- readFileTags $ dir </> "footer.html"
writeFile cssOut $ common ++ style header ++ style content ++ style footer
pure $ \file Page{..} -> writeFileTags file $
inject (takeBaseName file) (takeWhile (~/= "<div id=content>") (remode $ map (activate $ takeFileName file) $ noStyle header)) ++
parseTags "<div id=content>" ++
(if length pageTOC <= 1 then [] else
parseTags "<div id=toc>" ++
concat [ [TagOpen "a" [("class",linkLevel),("href",'#':linkKey)], TagText linkTitle, TagClose "a"]
| Link{..} <- pageTOC] ++
parseTags "</div>") ++
pageBody ++
parseTags "</div>" ++
dropWhile (~/= "<p id=footer>") footer
where
remode xs = if mode == Debug then xs else map f xs
where f (TagOpen "a" at) = TagOpen "a" $ flip map at $ second $ \v ->
if v == "index.html" then "."
else if takeExtension v == ".html" then dropExtension v else v
f x = x
style = innerText . inside "style"
noStyle x = a ++ drop1 (dropWhile (~/= "</style>") b)
where (a,b) = break (~== "<style>") x
activate url (TagOpen "a" ats) = TagOpen "a" $ let act = ("class","active") in
[act | ("href",url) `elem` ats] ++ delete act ats
activate url x = x
inject name (TagOpen "body" at:xs) = TagOpen "body" (("class","page-"++name):at) : inject name xs
inject name (x:xs) = x : inject name xs
inject name [] = []
inside :: String -> [Tag String] -> [Tag String]
inside tag = takeWhile (~/= TagClose tag) . dropWhile (~/= TagOpen tag [])
|
7dc9e71abe8abf6abdf72c29ec132abd7233ccecec128cd31125fe63121f5e77 | mzp/coq-ide-for-ios | tactic_printer.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
i $ I d : tactic_printer.mli 13323 2010 - 07 - 24 15:57:30Z herbelin $ i
(*i*)
open Pp
open Sign
open Evd
open Tacexpr
open Proof_type
(*i*)
(* These are the entry points for tactics, proof trees, ... *)
val print_proof : evar_map -> named_context -> proof_tree -> std_ppcmds
val pr_rule : rule -> std_ppcmds
val pr_tactic : tactic_expr -> std_ppcmds
val pr_proof_instr : Decl_expr.proof_instr -> Pp.std_ppcmds
val print_script :
?nochange:bool -> evar_map -> proof_tree -> std_ppcmds
val print_treescript :
?nochange:bool -> evar_map -> proof_tree -> std_ppcmds
| null | https://raw.githubusercontent.com/mzp/coq-ide-for-ios/4cdb389bbecd7cdd114666a8450ecf5b5f0391d3/coqlib/parsing/tactic_printer.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
i
i
These are the entry points for tactics, proof trees, ... | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
i $ I d : tactic_printer.mli 13323 2010 - 07 - 24 15:57:30Z herbelin $ i
open Pp
open Sign
open Evd
open Tacexpr
open Proof_type
val print_proof : evar_map -> named_context -> proof_tree -> std_ppcmds
val pr_rule : rule -> std_ppcmds
val pr_tactic : tactic_expr -> std_ppcmds
val pr_proof_instr : Decl_expr.proof_instr -> Pp.std_ppcmds
val print_script :
?nochange:bool -> evar_map -> proof_tree -> std_ppcmds
val print_treescript :
?nochange:bool -> evar_map -> proof_tree -> std_ppcmds
|
1ef7e766b73ef223434591251e2a46d035b0b71d60bef3a3fb383a94b8af9815 | egonSchiele/chips | Main.hs | # LANGUAGE TemplateHaskell , NoMonomorphismRestriction #
# OPTIONS_GHC -fno - full - laziness -fno - cse #
import Chips
main = do
playSound ( soundDir + + " chips01.wav " ) True
run "chips challenge" (9 * tileSize, 9 * tileSize) (gameState 1) on stepGame
stepGame :: Float -> GameMonad ()
stepGame i = do
gs <- get
maybeDisableInput
case gs ^. player.direction of
DirLeft -> do
maybeMove TileLeft $ do
player.x -= tileSize
when (gs ^. player.x > (4*tileSize) && gs ^. player.x < ((boardW-4)*tileSize)) $ do
x += tileSize
DirRight -> do
maybeMove TileRight $ do
player.x += tileSize
when (gs ^. player.x > (3*tileSize) && gs ^. player.x < ((boardW-5)*tileSize)) $ do
x -= tileSize
DirUp -> do
maybeMove TileAbove $ do
player.y += tileSize
when (gs ^. player.y < ((boardH-5)*tileSize) && gs ^. player.y > (3*tileSize)) $ do
y -= tileSize
DirDown -> do
maybeMove TileBelow $ do
player.y -= tileSize
when (gs ^. player.y < ((boardH-4)*tileSize) && gs ^. player.y > (4*tileSize)) $ do
y += tileSize
_ -> return ()
curTile <- use . tileAt $ Current
curI <- tilePosToIndex Current
curLocation_ <- liftIO $ readIORef curLocation
liftIO $ prevLocation $= curLocation_
liftIO $ curLocation $= curI
player.standingOn .= curTile
case curTile of
ButtonRed _ -> once $ checkCurTile curTile
ButtonBlue _ -> once $ checkCurTile curTile
ButtonGreen _ -> once $ checkCurTile curTile
_ -> checkCurTile curTile
cur <- liftIO getCurrentTime
last <- liftIO $ readIORef lastTick
if (diffUTCTime last cur < moveSpeed)
then do
liftIO $ lastTick $= cur
tick._1 .= True
tick._2 += 1
else
tick._1 .= False
moveEnemies
checkSand
-- this function checks if there
-- are any sand blocks on brown buttons.
-- If so, the related traps should be open.
checkSand = do
gs <- get
eachTile $ \(tile, i) -> do
case tile of
Sand button@(ButtonBrown _ _) _ -> checkCurTile button
_ -> return ()
maybeDisableInput = do
curTile <- use . tileAt $ Current
guardGodMode $ do
case curTile of
Ice _ -> do
whenM not hasIceSkates $ do
disableInput .= True
IceBottomLeft _ -> do
whenM not hasIceSkates $ do
disableInput .= True
IceBottomRight _ -> do
whenM not hasIceSkates $ do
disableInput .= True
IceTopLeft _ -> do
whenM not hasIceSkates $ do
disableInput .= True
IceTopRight _ -> do
whenM not hasIceSkates $ do
disableInput .= True
-- trap disables/enables input by itself, dont mess with it
Trap _ _ -> return ()
_ -> do
whenM id disableInput $ do
player.direction .= Standing
disableInput .= False
| null | https://raw.githubusercontent.com/egonSchiele/chips/14bb957f9ad42fa05c5edc56e50b90fcde461a77/src/Main.hs | haskell | this function checks if there
are any sand blocks on brown buttons.
If so, the related traps should be open.
trap disables/enables input by itself, dont mess with it | # LANGUAGE TemplateHaskell , NoMonomorphismRestriction #
# OPTIONS_GHC -fno - full - laziness -fno - cse #
import Chips
main = do
playSound ( soundDir + + " chips01.wav " ) True
run "chips challenge" (9 * tileSize, 9 * tileSize) (gameState 1) on stepGame
stepGame :: Float -> GameMonad ()
stepGame i = do
gs <- get
maybeDisableInput
case gs ^. player.direction of
DirLeft -> do
maybeMove TileLeft $ do
player.x -= tileSize
when (gs ^. player.x > (4*tileSize) && gs ^. player.x < ((boardW-4)*tileSize)) $ do
x += tileSize
DirRight -> do
maybeMove TileRight $ do
player.x += tileSize
when (gs ^. player.x > (3*tileSize) && gs ^. player.x < ((boardW-5)*tileSize)) $ do
x -= tileSize
DirUp -> do
maybeMove TileAbove $ do
player.y += tileSize
when (gs ^. player.y < ((boardH-5)*tileSize) && gs ^. player.y > (3*tileSize)) $ do
y -= tileSize
DirDown -> do
maybeMove TileBelow $ do
player.y -= tileSize
when (gs ^. player.y < ((boardH-4)*tileSize) && gs ^. player.y > (4*tileSize)) $ do
y += tileSize
_ -> return ()
curTile <- use . tileAt $ Current
curI <- tilePosToIndex Current
curLocation_ <- liftIO $ readIORef curLocation
liftIO $ prevLocation $= curLocation_
liftIO $ curLocation $= curI
player.standingOn .= curTile
case curTile of
ButtonRed _ -> once $ checkCurTile curTile
ButtonBlue _ -> once $ checkCurTile curTile
ButtonGreen _ -> once $ checkCurTile curTile
_ -> checkCurTile curTile
cur <- liftIO getCurrentTime
last <- liftIO $ readIORef lastTick
if (diffUTCTime last cur < moveSpeed)
then do
liftIO $ lastTick $= cur
tick._1 .= True
tick._2 += 1
else
tick._1 .= False
moveEnemies
checkSand
checkSand = do
gs <- get
eachTile $ \(tile, i) -> do
case tile of
Sand button@(ButtonBrown _ _) _ -> checkCurTile button
_ -> return ()
maybeDisableInput = do
curTile <- use . tileAt $ Current
guardGodMode $ do
case curTile of
Ice _ -> do
whenM not hasIceSkates $ do
disableInput .= True
IceBottomLeft _ -> do
whenM not hasIceSkates $ do
disableInput .= True
IceBottomRight _ -> do
whenM not hasIceSkates $ do
disableInput .= True
IceTopLeft _ -> do
whenM not hasIceSkates $ do
disableInput .= True
IceTopRight _ -> do
whenM not hasIceSkates $ do
disableInput .= True
Trap _ _ -> return ()
_ -> do
whenM id disableInput $ do
player.direction .= Standing
disableInput .= False
|
f99a61cd44b329e4011b1c155f61da23f3af012e65b72739a4c887ae7da20c37 | oakes/Dynadoc | boot.clj | (ns dynadoc.boot
{:boot/export-tasks true}
(:require [dynadoc.core :refer [start]]
[boot.core :as core]
[clojure.java.io :as io]
[clojure.string :as str]))
(core/deftask dynadoc
[p port PORT int "The port that Dynadoc runs on"
_ host HOST str "The hostname that Dynadoc listens on"]
(core/with-pass-thru _
(start {:port (or port 5000)
:ip (or host "0.0.0.0")})))
| null | https://raw.githubusercontent.com/oakes/Dynadoc/2e3a4bd90bfeb6648674b0891341a047d210e500/src/dynadoc/boot.clj | clojure | (ns dynadoc.boot
{:boot/export-tasks true}
(:require [dynadoc.core :refer [start]]
[boot.core :as core]
[clojure.java.io :as io]
[clojure.string :as str]))
(core/deftask dynadoc
[p port PORT int "The port that Dynadoc runs on"
_ host HOST str "The hostname that Dynadoc listens on"]
(core/with-pass-thru _
(start {:port (or port 5000)
:ip (or host "0.0.0.0")})))
|
|
51f1b50135811147806992c5cd0c23d9fdfc599d624e68054c6ad925cc94899b | pveber/bistro | ncbi_genome.mli | open Bistro
open Formats
val assembly_summary : tsv file
val fetch_assembly :
genome_id:string ->
assembly_id:string ->
fasta gz file
| null | https://raw.githubusercontent.com/pveber/bistro/da0ebc969c8c5ca091905366875cbf8366622280/lib/bio/ncbi_genome.mli | ocaml | open Bistro
open Formats
val assembly_summary : tsv file
val fetch_assembly :
genome_id:string ->
assembly_id:string ->
fasta gz file
|
|
002036ab0384a7395e304e4fe8bc106cf98fe9aea8f220fb1ab21483bddad43c | bos/rwh | Hash.hs | {-- snippet jenkins --}
# LANGUAGE BangPatterns , ForeignFunctionInterface #
module BloomFilter.Hash
(
Hashable(..)
, hash
, doubleHash
) where
import Data.Bits ((.&.), shiftR)
import Foreign.Marshal.Array (withArrayLen)
import Control.Monad (foldM)
import Data.Word (Word32, Word64)
import Foreign.C.Types (CSize)
import Foreign.Marshal.Utils (with)
import Foreign.Ptr (Ptr, castPtr, plusPtr)
import Foreign.Storable (Storable, peek, sizeOf)
import qualified Data.ByteString as Strict
import qualified Data.ByteString.Lazy as Lazy
import System.IO.Unsafe (unsafePerformIO)
foreign import ccall unsafe "lookup3.h hashword2" hashWord2
:: Ptr Word32 -> CSize -> Ptr Word32 -> Ptr Word32 -> IO ()
foreign import ccall unsafe "lookup3.h hashlittle2" hashLittle2
:: Ptr a -> CSize -> Ptr Word32 -> Ptr Word32 -> IO ()
{-- /snippet jenkins --}
{-- snippet Hashable --}
class Hashable a where
hashSalt :: Word64 -- ^ salt
-> a -- ^ value to hash
-> Word64
hash :: Hashable a => a -> Word64
hash = hashSalt 0x106fc397cf62f64d3
{-- /snippet Hashable --}
- snippet hashIO -
hashIO :: Ptr a -- value to hash
-> CSize -- number of bytes
-> Word64 -- salt
-> IO Word64
hashIO ptr bytes salt =
with (fromIntegral salt) $ \sp -> do
let p1 = castPtr sp
p2 = castPtr sp `plusPtr` 4
go p1 p2
peek sp
where go p1 p2
| bytes .&. 3 == 0 = hashWord2 (castPtr ptr) words p1 p2
| otherwise = hashLittle2 ptr bytes p1 p2
words = bytes `div` 4
{-- /snippet hashIO --}
{-- snippet hashStorable --}
hashStorable :: Storable a => Word64 -> a -> Word64
hashStorable salt k = unsafePerformIO . with k $ \ptr ->
hashIO ptr (fromIntegral (sizeOf k)) salt
instance Hashable Char where hashSalt = hashStorable
instance Hashable Int where hashSalt = hashStorable
instance Hashable Double where hashSalt = hashStorable
{-- /snippet hashStorable --}
{ -- snippet -
{-- snippet Storable --}
instance Storable a => Hashable a where
hashSalt = hashStorable
{-- /snippet Storable --}
-}
{-- snippet hashList --}
hashList :: (Storable a) => Word64 -> [a] -> IO Word64
hashList salt xs =
withArrayLen xs $ \len ptr ->
hashIO ptr (fromIntegral (len * sizeOf x)) salt
where x = head xs
instance (Storable a) => Hashable [a] where
hashSalt salt xs = unsafePerformIO $ hashList salt xs
{-- /snippet hashList --}
- snippet -
hash2 :: (Hashable a) => a -> Word64 -> Word64
hash2 k salt = hashSalt salt k
instance (Hashable a, Hashable b) => Hashable (a,b) where
hashSalt salt (a,b) = hash2 b . hash2 a $ salt
instance (Hashable a, Hashable b, Hashable c) => Hashable (a,b,c) where
hashSalt salt (a,b,c) = hash2 c . hash2 b . hash2 a $ salt
- /snippet
{-- snippet hashSB --}
hashByteString :: Word64 -> Strict.ByteString -> IO Word64
hashByteString salt bs = Strict.useAsCStringLen bs $ \(ptr, len) ->
hashIO ptr (fromIntegral len) salt
instance Hashable Strict.ByteString where
hashSalt salt bs = unsafePerformIO $ hashByteString salt bs
rechunk :: Lazy.ByteString -> [Strict.ByteString]
rechunk s
| Lazy.null s = []
| otherwise = let (pre,suf) = Lazy.splitAt chunkSize s
in repack pre : rechunk suf
where repack = Strict.concat . Lazy.toChunks
chunkSize = 64 * 1024
instance Hashable Lazy.ByteString where
hashSalt salt bs = unsafePerformIO $
foldM hashByteString salt (rechunk bs)
{-- /snippet hashSB --}
{-
{-- snippet doubleHash --}
doubleHash :: Hashable a => Int -> a -> [Word32]
doubleHash numHashes value = [h1 + h2 * i | i <- [0..num]]
where h = hashSalt 0x9150a946c4a8966e value
h1 = fromIntegral (h `shiftR` 32) .&. maxBound
h2 = fromIntegral h
num = fromIntegral numHashes
{-- /snippet doubleHash --}
-}
{-- snippet doubleHash_new --}
doubleHash :: Hashable a => Int -> a -> [Word32]
doubleHash numHashes value = go 0
where go n | n == num = []
| otherwise = h1 + h2 * n : go (n + 1)
!h1 = fromIntegral (h `shiftR` 32) .&. maxBound
!h2 = fromIntegral h
h = hashSalt 0x9150a946c4a8966e value
num = fromIntegral numHashes
{-- /snippet doubleHash_new --}
| null | https://raw.githubusercontent.com/bos/rwh/7fd1e467d54aef832f5476ebf5f4f6a898a895d1/examples/ch26/BloomFilter/Hash.hs | haskell | - snippet jenkins -
- /snippet jenkins -
- snippet Hashable -
^ salt
^ value to hash
- /snippet Hashable -
value to hash
number of bytes
salt
- /snippet hashIO -
- snippet hashStorable -
- /snippet hashStorable -
snippet -
- snippet Storable -
- /snippet Storable -
- snippet hashList -
- /snippet hashList -
- snippet hashSB -
- /snippet hashSB -
{-- snippet doubleHash -
- /snippet doubleHash -
- snippet doubleHash_new -
- /snippet doubleHash_new - | # LANGUAGE BangPatterns , ForeignFunctionInterface #
module BloomFilter.Hash
(
Hashable(..)
, hash
, doubleHash
) where
import Data.Bits ((.&.), shiftR)
import Foreign.Marshal.Array (withArrayLen)
import Control.Monad (foldM)
import Data.Word (Word32, Word64)
import Foreign.C.Types (CSize)
import Foreign.Marshal.Utils (with)
import Foreign.Ptr (Ptr, castPtr, plusPtr)
import Foreign.Storable (Storable, peek, sizeOf)
import qualified Data.ByteString as Strict
import qualified Data.ByteString.Lazy as Lazy
import System.IO.Unsafe (unsafePerformIO)
foreign import ccall unsafe "lookup3.h hashword2" hashWord2
:: Ptr Word32 -> CSize -> Ptr Word32 -> Ptr Word32 -> IO ()
foreign import ccall unsafe "lookup3.h hashlittle2" hashLittle2
:: Ptr a -> CSize -> Ptr Word32 -> Ptr Word32 -> IO ()
class Hashable a where
-> Word64
hash :: Hashable a => a -> Word64
hash = hashSalt 0x106fc397cf62f64d3
- snippet hashIO -
-> IO Word64
hashIO ptr bytes salt =
with (fromIntegral salt) $ \sp -> do
let p1 = castPtr sp
p2 = castPtr sp `plusPtr` 4
go p1 p2
peek sp
where go p1 p2
| bytes .&. 3 == 0 = hashWord2 (castPtr ptr) words p1 p2
| otherwise = hashLittle2 ptr bytes p1 p2
words = bytes `div` 4
hashStorable :: Storable a => Word64 -> a -> Word64
hashStorable salt k = unsafePerformIO . with k $ \ptr ->
hashIO ptr (fromIntegral (sizeOf k)) salt
instance Hashable Char where hashSalt = hashStorable
instance Hashable Int where hashSalt = hashStorable
instance Hashable Double where hashSalt = hashStorable
instance Storable a => Hashable a where
hashSalt = hashStorable
-}
hashList :: (Storable a) => Word64 -> [a] -> IO Word64
hashList salt xs =
withArrayLen xs $ \len ptr ->
hashIO ptr (fromIntegral (len * sizeOf x)) salt
where x = head xs
instance (Storable a) => Hashable [a] where
hashSalt salt xs = unsafePerformIO $ hashList salt xs
- snippet -
hash2 :: (Hashable a) => a -> Word64 -> Word64
hash2 k salt = hashSalt salt k
instance (Hashable a, Hashable b) => Hashable (a,b) where
hashSalt salt (a,b) = hash2 b . hash2 a $ salt
instance (Hashable a, Hashable b, Hashable c) => Hashable (a,b,c) where
hashSalt salt (a,b,c) = hash2 c . hash2 b . hash2 a $ salt
- /snippet
hashByteString :: Word64 -> Strict.ByteString -> IO Word64
hashByteString salt bs = Strict.useAsCStringLen bs $ \(ptr, len) ->
hashIO ptr (fromIntegral len) salt
instance Hashable Strict.ByteString where
hashSalt salt bs = unsafePerformIO $ hashByteString salt bs
rechunk :: Lazy.ByteString -> [Strict.ByteString]
rechunk s
| Lazy.null s = []
| otherwise = let (pre,suf) = Lazy.splitAt chunkSize s
in repack pre : rechunk suf
where repack = Strict.concat . Lazy.toChunks
chunkSize = 64 * 1024
instance Hashable Lazy.ByteString where
hashSalt salt bs = unsafePerformIO $
foldM hashByteString salt (rechunk bs)
doubleHash :: Hashable a => Int -> a -> [Word32]
doubleHash numHashes value = [h1 + h2 * i | i <- [0..num]]
where h = hashSalt 0x9150a946c4a8966e value
h1 = fromIntegral (h `shiftR` 32) .&. maxBound
h2 = fromIntegral h
num = fromIntegral numHashes
-}
doubleHash :: Hashable a => Int -> a -> [Word32]
doubleHash numHashes value = go 0
where go n | n == num = []
| otherwise = h1 + h2 * n : go (n + 1)
!h1 = fromIntegral (h `shiftR` 32) .&. maxBound
!h2 = fromIntegral h
h = hashSalt 0x9150a946c4a8966e value
num = fromIntegral numHashes
|
4ca4debddb390fcfc2f09b3f955f2a3e9503fe8f4887728974b64d923f789f84 | patricoferris/ocaml-search | search.ml | include Search_intf
module Tfidf = Tfidf
module Private = struct
module Witness = Witness
end
let create_uid (type uid) ~(to_string : uid -> string)
~(cmp : uid -> uid -> int) =
let module T = struct
type t = uid
let to_string = to_string
let compare = cmp
end in
(module T : Uid with type t = uid)
module Uids = struct
module String = struct
type t = string
let to_string = Fun.id
let compare = String.compare
end
module Int = struct
type t = int
let to_string = string_of_int
let compare = Int.compare
end
end
| null | https://raw.githubusercontent.com/patricoferris/ocaml-search/bd1702084eef48e31226985e6e9b333e674ab659/src/search.ml | ocaml | include Search_intf
module Tfidf = Tfidf
module Private = struct
module Witness = Witness
end
let create_uid (type uid) ~(to_string : uid -> string)
~(cmp : uid -> uid -> int) =
let module T = struct
type t = uid
let to_string = to_string
let compare = cmp
end in
(module T : Uid with type t = uid)
module Uids = struct
module String = struct
type t = string
let to_string = Fun.id
let compare = String.compare
end
module Int = struct
type t = int
let to_string = string_of_int
let compare = Int.compare
end
end
|
|
89095ff815e2cfa45e364d17e2686982108cff6cf7665a508981f9f312cb1e13 | bobatkey/authenticated-data-structures | Verifier.ml | type 'a auth =
string
type 'a authenticated_computation =
Kit.proof -> [`Ok of Kit.proof * 'a | `ProofFailure]
let return a =
fun proof -> `Ok (proof, a)
let (>>=) c f =
fun prfs ->
match c prfs with
| `ProofFailure -> `ProofFailure
| `Ok (prfs',a) -> f a prfs'
module Authenticatable = struct
type 'a evidence =
{ serialise : 'a -> Ezjsonm.value
; deserialise : Ezjsonm.value -> 'a option
}
let auth =
let serialise h = `String h
and deserialise = function
| `String s -> Some s
| _ -> None
in
{ serialise; deserialise }
let pair a_s b_s =
let serialise (a,b) =
`A [a_s.serialise a; b_s.serialise b]
and deserialise = function
| `A [x;y] ->
(match a_s.deserialise x, b_s.deserialise y with
| Some a, Some b -> Some (a,b)
| _ -> None)
| _ ->
None
in
{ serialise; deserialise }
let sum a_s b_s =
let serialise = function
| `left a -> `A [`String "left"; a_s.serialise a]
| `right b -> `A [`String "right"; b_s.serialise b]
and deserialise = function
| `A [`String "left"; x] ->
(match a_s.deserialise x with
| Some a -> Some (`left a)
| _ -> None)
| `A [`String "right"; y] ->
(match b_s.deserialise y with
| Some b -> Some (`right b)
| _ -> None)
| _ ->
None
in
{ serialise; deserialise }
let string =
let serialise s = `String s
and deserialise = function
| `String s -> Some s
| _ -> None
in
{ serialise; deserialise }
let int =
let serialise i = `String (string_of_int i)
and deserialise = function
| `String i -> (try Some (int_of_string i) with Failure _ -> None)
| _ -> None
in
{ serialise; deserialise }
let unit =
let serialise () = `Null
and deserialise = function
| `Null -> Some ()
| _ -> None
in
{ serialise; deserialise }
end
open Authenticatable
let auth auth_evidence a =
Kit.hash_json (auth_evidence.serialise a)
let unauth auth_evidence h =
function
| [] -> `ProofFailure
| p::ps when Kit.hash_json p = h ->
(match auth_evidence.deserialise p with
| None -> `ProofFailure
| Some a -> `Ok (ps, a))
| _ -> `ProofFailure
| null | https://raw.githubusercontent.com/bobatkey/authenticated-data-structures/a271bb85edd724a55be69c447f8103b5522dd787/src/Verifier.ml | ocaml | type 'a auth =
string
type 'a authenticated_computation =
Kit.proof -> [`Ok of Kit.proof * 'a | `ProofFailure]
let return a =
fun proof -> `Ok (proof, a)
let (>>=) c f =
fun prfs ->
match c prfs with
| `ProofFailure -> `ProofFailure
| `Ok (prfs',a) -> f a prfs'
module Authenticatable = struct
type 'a evidence =
{ serialise : 'a -> Ezjsonm.value
; deserialise : Ezjsonm.value -> 'a option
}
let auth =
let serialise h = `String h
and deserialise = function
| `String s -> Some s
| _ -> None
in
{ serialise; deserialise }
let pair a_s b_s =
let serialise (a,b) =
`A [a_s.serialise a; b_s.serialise b]
and deserialise = function
| `A [x;y] ->
(match a_s.deserialise x, b_s.deserialise y with
| Some a, Some b -> Some (a,b)
| _ -> None)
| _ ->
None
in
{ serialise; deserialise }
let sum a_s b_s =
let serialise = function
| `left a -> `A [`String "left"; a_s.serialise a]
| `right b -> `A [`String "right"; b_s.serialise b]
and deserialise = function
| `A [`String "left"; x] ->
(match a_s.deserialise x with
| Some a -> Some (`left a)
| _ -> None)
| `A [`String "right"; y] ->
(match b_s.deserialise y with
| Some b -> Some (`right b)
| _ -> None)
| _ ->
None
in
{ serialise; deserialise }
let string =
let serialise s = `String s
and deserialise = function
| `String s -> Some s
| _ -> None
in
{ serialise; deserialise }
let int =
let serialise i = `String (string_of_int i)
and deserialise = function
| `String i -> (try Some (int_of_string i) with Failure _ -> None)
| _ -> None
in
{ serialise; deserialise }
let unit =
let serialise () = `Null
and deserialise = function
| `Null -> Some ()
| _ -> None
in
{ serialise; deserialise }
end
open Authenticatable
let auth auth_evidence a =
Kit.hash_json (auth_evidence.serialise a)
let unauth auth_evidence h =
function
| [] -> `ProofFailure
| p::ps when Kit.hash_json p = h ->
(match auth_evidence.deserialise p with
| None -> `ProofFailure
| Some a -> `Ok (ps, a))
| _ -> `ProofFailure
|
|
2b0afe31714c9c623ddc59acf158034bb034905d65d200c0117c06164411da37 | nasa/Common-Metadata-Repository | util.clj | (ns cmr.plugin.jar.util
(:require
[clojure.string :as string]
[cmr.exchange.common.util]
[taoensso.timbre :as log])
(:import
(clojure.lang Symbol)))
(defn matches-coll?
[coll regex-str]
(->> coll
(map #(re-matches (re-pattern regex-str) %))
(some (complement nil?))))
(defn matches-key?
[hashmap regex-str]
(matches-coll? (keys hashmap) regex-str))
(defn matches-val?
[hashmap regex-str]
(matches-coll? (vals hashmap) regex-str))
(defn matched-coll
[coll regex-str]
(->> coll
(map #(re-matches (re-pattern regex-str) %))
(remove nil?)))
(defn matched-keys
[hashmap regex-str]
(->> hashmap
keys
(map #(re-matches (re-pattern regex-str) %))
(remove nil?)))
(defn matched-vals
[hashmap key-regex-str val-regex-str]
(->> hashmap
vec
(map (fn [[k v]]
(when (re-matches (re-pattern key-regex-str) k)
(re-matches (re-pattern val-regex-str) v))))
(remove nil?)))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/63001cf021d32d61030b1dcadd8b253e4a221662/other/cmr-exchange/jar-plugin-lib/src/cmr/plugin/jar/util.clj | clojure | (ns cmr.plugin.jar.util
(:require
[clojure.string :as string]
[cmr.exchange.common.util]
[taoensso.timbre :as log])
(:import
(clojure.lang Symbol)))
(defn matches-coll?
[coll regex-str]
(->> coll
(map #(re-matches (re-pattern regex-str) %))
(some (complement nil?))))
(defn matches-key?
[hashmap regex-str]
(matches-coll? (keys hashmap) regex-str))
(defn matches-val?
[hashmap regex-str]
(matches-coll? (vals hashmap) regex-str))
(defn matched-coll
[coll regex-str]
(->> coll
(map #(re-matches (re-pattern regex-str) %))
(remove nil?)))
(defn matched-keys
[hashmap regex-str]
(->> hashmap
keys
(map #(re-matches (re-pattern regex-str) %))
(remove nil?)))
(defn matched-vals
[hashmap key-regex-str val-regex-str]
(->> hashmap
vec
(map (fn [[k v]]
(when (re-matches (re-pattern key-regex-str) k)
(re-matches (re-pattern val-regex-str) v))))
(remove nil?)))
|
|
e21e579ad672b70c24e10169d3ada04fb985dd9651024cf5873adeaf893ea154 | degree9/enterprise | target.cljs | (ns degree9.events.target)
;; EventTarget Protocol ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defprotocol IEventTarget
(addEventListener
[this type listener] [this type listener options]
"Registers an event handler of a specific event type on the target.")
(removeEventListener
[this type listener] [this type listener options]
"Removes an event listener from the target.")
(dispatchEvent
[this event]
"Dispatches an event to this target."))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(extend-type js/EventTarget
IEventTarget
(addEventListener
([this type listener] (.addEventListener this type listener))
([this type listener options] (.addEventListener this type listener options)))
(removeEventListener
([this type listener] (.removeEventListener this type listener))
([this type listener options] (.removeEventListener this type listener options)))
(dispatchEvent
([this event] (.dispatchEvent this event))))
| null | https://raw.githubusercontent.com/degree9/enterprise/65737c347e513d0a0bf94f2d4374935c7270185d/src/degree9/events/target.cljs | clojure | EventTarget Protocol ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
| (ns degree9.events.target)
(defprotocol IEventTarget
(addEventListener
[this type listener] [this type listener options]
"Registers an event handler of a specific event type on the target.")
(removeEventListener
[this type listener] [this type listener options]
"Removes an event listener from the target.")
(dispatchEvent
[this event]
"Dispatches an event to this target."))
(extend-type js/EventTarget
IEventTarget
(addEventListener
([this type listener] (.addEventListener this type listener))
([this type listener options] (.addEventListener this type listener options)))
(removeEventListener
([this type listener] (.removeEventListener this type listener))
([this type listener options] (.removeEventListener this type listener options)))
(dispatchEvent
([this event] (.dispatchEvent this event))))
|
846398545920c443bcbae42ee576a77162603fca931a9ca13bc67526bf2192e4 | ml4tp/tcoq | tactic_debug.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Util
open Names
open Pp
open Tacexpr
open Termops
open Nameops
open Proofview.Notations
let (ltac_trace_info : ltac_trace Exninfo.t) = Exninfo.make ()
let prtac x =
Pptactic.pr_glob_tactic (Global.env()) x
let prmatchpatt env sigma hyp =
Pptactic.pr_match_pattern (Printer.pr_constr_pattern_env env sigma) hyp
let prmatchrl rl =
Pptactic.pr_match_rule false (Pptactic.pr_glob_tactic (Global.env()))
(fun (_,p) -> Printer.pr_constr_pattern p) rl
(* This module intends to be a beginning of debugger for tactic expressions.
Currently, it is quite simple and we can hope to have, in the future, a more
complete panel of commands dedicated to a proof assistant framework *)
(* Debug information *)
type debug_info =
| DebugOn of int
| DebugOff
(* An exception handler *)
let explain_logic_error e =
CErrors.print (fst (ExplainErr.process_vernac_interp_error (e, Exninfo.null)))
let explain_logic_error_no_anomaly e =
CErrors.print_no_report
(fst (ExplainErr.process_vernac_interp_error (e, Exninfo.null)))
let msg_tac_debug s = Proofview.NonLogical.print_debug (s++fnl())
let msg_tac_notice s = Proofview.NonLogical.print_notice (s++fnl())
(* Prints the goal *)
let db_pr_goal gl =
let env = Proofview.Goal.env gl in
let concl = Proofview.Goal.concl gl in
let penv = print_named_context env in
let pc = print_constr_env env concl in
str" " ++ hv 0 (penv ++ fnl () ++
str "============================" ++ fnl () ++
str" " ++ pc) ++ fnl ()
let db_pr_goal =
Proofview.Goal.nf_enter { enter = begin fun gl ->
let pg = db_pr_goal gl in
Proofview.tclLIFT (msg_tac_notice (str "Goal:" ++ fnl () ++ pg))
end }
(* Prints the commands *)
let help () =
msg_tac_debug (str "Commands: <Enter> = Continue" ++ fnl() ++
str " h/? = Help" ++ fnl() ++
str " r <num> = Run <num> times" ++ fnl() ++
str " r <string> = Run up to next idtac <string>" ++ fnl() ++
str " s = Skip" ++ fnl() ++
str " x = Exit")
(* Prints the goal and the command to be executed *)
let goal_com tac =
Proofview.tclTHEN
db_pr_goal
(Proofview.tclLIFT (msg_tac_debug (str "Going to execute:" ++ fnl () ++ prtac tac)))
(* [run (new_ref _)] gives us a ref shared among [NonLogical.t]
expressions. It avoids parametrizing everything over a
reference. *)
let skipped = Proofview.NonLogical.run (Proofview.NonLogical.ref 0)
let skip = Proofview.NonLogical.run (Proofview.NonLogical.ref 0)
let breakpoint = Proofview.NonLogical.run (Proofview.NonLogical.ref None)
let rec drop_spaces inst i =
if String.length inst > i && inst.[i] == ' ' then drop_spaces inst (i+1)
else i
let possibly_unquote s =
if String.length s >= 2 && s.[0] == '"' && s.[String.length s - 1] == '"' then
String.sub s 1 (String.length s - 2)
else
s
( debugger
let db_initialize =
let open Proofview.NonLogical in
(skip:=0) >> (skipped:=0) >> (breakpoint:=None)
let int_of_string s =
try Proofview.NonLogical.return (int_of_string s)
with e -> Proofview.NonLogical.raise e
let string_get s i =
try Proofview.NonLogical.return (String.get s i)
with e -> Proofview.NonLogical.raise e
let run_invalid_arg () = Proofview.NonLogical.raise (Invalid_argument "run_com")
(* Gives the number of steps or next breakpoint of a run command *)
let run_com inst =
let open Proofview.NonLogical in
string_get inst 0 >>= fun first_char ->
if first_char ='r' then
let i = drop_spaces inst 1 in
if String.length inst > i then
let s = String.sub inst i (String.length inst - i) in
if inst.[0] >= '0' && inst.[0] <= '9' then
int_of_string s >>= fun num ->
(if num<0 then run_invalid_arg () else return ()) >>
(skip:=num) >> (skipped:=0)
else
breakpoint:=Some (possibly_unquote s)
else
run_invalid_arg ()
else
run_invalid_arg ()
(* Prints the run counter *)
let run ini =
let open Proofview.NonLogical in
if not ini then
begin
Proofview.NonLogical.print_notice (str"\b\r\b\r") >>
!skipped >>= fun skipped ->
msg_tac_debug (str "Executed expressions: " ++ int skipped ++ fnl())
end >>
!skipped >>= fun x ->
skipped := x+1
else
return ()
(* Prints the prompt *)
let rec prompt level =
(* spiwack: avoid overriding by the open below *)
let runtrue = run true in
begin
let open Proofview.NonLogical in
Proofview.NonLogical.print_notice (fnl () ++ str "TcDebug (" ++ int level ++ str ") > ") >>
let exit = (skip:=0) >> (skipped:=0) >> raise Sys.Break in
Proofview.NonLogical.catch Proofview.NonLogical.read_line
begin function (e, info) -> match e with
| End_of_file -> exit
| e -> raise ~info e
end
>>= fun inst ->
match inst with
| "" -> return (DebugOn (level+1))
| "s" -> return (DebugOff)
| "x" -> Proofview.NonLogical.print_char '\b' >> exit
| "h"| "?" ->
begin
help () >>
prompt level
end
| _ ->
Proofview.NonLogical.catch (run_com inst >> runtrue >> return (DebugOn (level+1)))
begin function (e, info) -> match e with
| Failure _ | Invalid_argument _ -> prompt level
| e -> raise ~info e
end
end
(* Prints the state and waits for an instruction *)
(* spiwack: the only reason why we need to take the continuation [f]
as an argument rather than returning the new level directly seems to
be that [f] is wrapped in with "explain_logic_error". I don't think
it serves any purpose in the current design, so we could just drop
that. *)
let debug_prompt lev tac f =
(* spiwack: avoid overriding by the open below *)
let runfalse = run false in
let open Proofview.NonLogical in
let (>=) = Proofview.tclBIND in
(* What to print and to do next *)
let newlevel =
Proofview.tclLIFT !skip >= fun initial_skip ->
if Int.equal initial_skip 0 then
Proofview.tclLIFT !breakpoint >= fun breakpoint ->
if Option.is_empty breakpoint then Proofview.tclTHEN (goal_com tac) (Proofview.tclLIFT (prompt lev))
else Proofview.tclLIFT(runfalse >> return (DebugOn (lev+1)))
else Proofview.tclLIFT begin
(!skip >>= fun s -> skip:=s-1) >>
runfalse >>
!skip >>= fun new_skip ->
(if Int.equal new_skip 0 then skipped:=0 else return ()) >>
return (DebugOn (lev+1))
end in
newlevel >= fun newlevel ->
(* What to execute *)
Proofview.tclOR
(f newlevel)
begin fun (reraise, info) ->
Proofview.tclTHEN
(Proofview.tclLIFT begin
(skip:=0) >> (skipped:=0) >>
if Logic.catchable_exception reraise then
msg_tac_debug (str "Level " ++ int lev ++ str ": " ++ explain_logic_error reraise)
else return ()
end)
(Proofview.tclZERO ~info reraise)
end
let is_debug db =
let open Proofview.NonLogical in
!breakpoint >>= fun breakpoint ->
match db, breakpoint with
| DebugOff, _ -> return false
| _, Some _ -> return false
| _ ->
!skip >>= fun skip ->
return (Int.equal skip 0)
(* Prints a constr *)
let db_constr debug env c =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "Evaluated term: " ++ print_constr_env env c)
else return ()
(* Prints the pattern rule *)
let db_pattern_rule debug num r =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
begin
msg_tac_debug (str "Pattern rule " ++ int num ++ str ":" ++ fnl () ++
str "|" ++ spc () ++ prmatchrl r)
end
else return ()
(* Prints the hypothesis pattern identifier if it exists *)
let hyp_bound = function
| Anonymous -> str " (unbound)"
| Name id -> str " (bound to " ++ pr_id id ++ str ")"
(* Prints a matched hypothesis *)
let db_matched_hyp debug env (id,_,c) ido =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "Hypothesis " ++ pr_id id ++ hyp_bound ido ++
str " has been matched: " ++ print_constr_env env c)
else return ()
(* Prints the matched conclusion *)
let db_matched_concl debug env c =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "Conclusion has been matched: " ++ print_constr_env env c)
else return ()
(* Prints a success message when the goal has been matched *)
let db_mc_pattern_success debug =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "The goal has been successfully matched!" ++ fnl() ++
str "Let us execute the right-hand side part..." ++ fnl())
else return ()
(* Prints a failure message for an hypothesis pattern *)
let db_hyp_pattern_failure debug env sigma (na,hyp) =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "The pattern hypothesis" ++ hyp_bound na ++
str " cannot match: " ++
prmatchpatt env sigma hyp)
else return ()
(* Prints a matching failure message for a rule *)
let db_matching_failure debug =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "This rule has failed due to matching errors!" ++ fnl() ++
str "Let us try the next one...")
else return ()
(* Prints an evaluation failure message for a rule *)
let db_eval_failure debug s =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
let s = str "message \"" ++ s ++ str "\"" in
msg_tac_debug
(str "This rule has failed due to \"Fail\" tactic (" ++
s ++ str ", level 0)!" ++ fnl() ++ str "Let us try the next one...")
else return ()
(* Prints a logic failure message for a rule *)
let db_logic_failure debug err =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
begin
msg_tac_debug (explain_logic_error err) >>
msg_tac_debug (str "This rule has failed due to a logic error!" ++ fnl() ++
str "Let us try the next one...")
end
else return ()
let is_breakpoint brkname s = match brkname, s with
| Some s, MsgString s'::_ -> String.equal s s'
| _ -> false
let db_breakpoint debug s =
let open Proofview.NonLogical in
!breakpoint >>= fun opt_breakpoint ->
match debug with
| DebugOn lev when not (CList.is_empty s) && is_breakpoint opt_breakpoint s ->
breakpoint:=None
| _ ->
return ()
* traces
let is_defined_ltac trace =
let rec aux = function
| (_, Tacexpr.LtacNameCall f) :: _ -> not (Tacenv.is_ltac_for_ml_tactic f)
| (_, Tacexpr.LtacNotationCall f) :: _ -> true
| (_, Tacexpr.LtacAtomCall _) :: _ -> false
| _ :: tail -> aux tail
| [] -> false in
aux (List.rev trace)
let explain_ltac_call_trace last trace loc =
let calls = last :: List.rev_map snd trace in
let pr_call ck = match ck with
| Tacexpr.LtacNotationCall kn -> quote (Pptactic.pr_alias_key kn)
| Tacexpr.LtacNameCall cst -> quote (Pptactic.pr_ltac_constant cst)
| Tacexpr.LtacMLCall t ->
quote (Pptactic.pr_glob_tactic (Global.env()) t)
| Tacexpr.LtacVarCall (id,t) ->
quote (Nameops.pr_id id) ++ strbrk " (bound to " ++
Pptactic.pr_glob_tactic (Global.env()) t ++ str ")"
| Tacexpr.LtacAtomCall te ->
quote (Pptactic.pr_glob_tactic (Global.env())
(Tacexpr.TacAtom (Loc.ghost,te)))
| Tacexpr.LtacConstrInterp (c, { Pretyping.ltac_constrs = vars }) ->
quote (Printer.pr_glob_constr_env (Global.env()) c) ++
(if not (Id.Map.is_empty vars) then
strbrk " (with " ++
prlist_with_sep pr_comma
(fun (id,c) ->
pr_id id ++ str ":=" ++ Printer.pr_lconstr_under_binders c)
(List.rev (Id.Map.bindings vars)) ++ str ")"
else mt())
in
match calls with
| [] -> mt ()
| [a] -> hov 0 (str "Ltac call to " ++ pr_call a ++ str " failed.")
| _ ->
let kind_of_last_call = match List.last calls with
| Tacexpr.LtacConstrInterp _ -> ", last term evaluation failed."
| _ -> ", last call failed."
in
hov 0 (str "In nested Ltac calls to " ++
pr_enum pr_call calls ++ strbrk kind_of_last_call)
let skip_extensions trace =
let rec aux = function
| (_,Tacexpr.LtacNameCall f as tac) :: _
when Tacenv.is_ltac_for_ml_tactic f -> [tac]
| (_,Tacexpr.LtacNotationCall _ as tac) :: (_,Tacexpr.LtacMLCall _) :: _ ->
Case of an ML defined tactic with entry of the form < < " foo " args > >
(* see tacextend.mlp *)
[tac]
| (_,Tacexpr.LtacMLCall _ as tac) :: _ -> [tac]
| t :: tail -> t :: aux tail
| [] -> [] in
List.rev (aux (List.rev trace))
let finer_loc loc1 loc2 = Loc.merge loc1 loc2 = loc2
let extract_ltac_trace trace eloc =
let trace = skip_extensions trace in
let (loc,c),tail = List.sep_last trace in
if is_defined_ltac trace then
(* We entered a user-defined tactic,
we display the trace with location of the call *)
let msg = hov 0 (explain_ltac_call_trace c tail eloc ++ fnl()) in
Some msg, if finer_loc eloc loc then eloc else loc
else
(* We entered a primitive tactic, we don't display trace but
report on the finest location *)
let best_loc =
trace is with innermost call coming first
let rec aux best_loc = function
| (loc,_)::tail ->
if Loc.is_ghost best_loc ||
not (Loc.is_ghost loc) && finer_loc loc best_loc
then
aux loc tail
else
aux best_loc tail
| [] -> best_loc in
aux eloc trace in
None, best_loc
let get_ltac_trace (_, info) =
let ltac_trace = Exninfo.get info ltac_trace_info in
let loc = Option.default Loc.ghost (Loc.get_loc info) in
match ltac_trace with
| None -> None
| Some trace -> Some (extract_ltac_trace trace loc)
let () = ExplainErr.register_additional_error_info get_ltac_trace
| null | https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/ltac/tactic_debug.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
This module intends to be a beginning of debugger for tactic expressions.
Currently, it is quite simple and we can hope to have, in the future, a more
complete panel of commands dedicated to a proof assistant framework
Debug information
An exception handler
Prints the goal
Prints the commands
Prints the goal and the command to be executed
[run (new_ref _)] gives us a ref shared among [NonLogical.t]
expressions. It avoids parametrizing everything over a
reference.
Gives the number of steps or next breakpoint of a run command
Prints the run counter
Prints the prompt
spiwack: avoid overriding by the open below
Prints the state and waits for an instruction
spiwack: the only reason why we need to take the continuation [f]
as an argument rather than returning the new level directly seems to
be that [f] is wrapped in with "explain_logic_error". I don't think
it serves any purpose in the current design, so we could just drop
that.
spiwack: avoid overriding by the open below
What to print and to do next
What to execute
Prints a constr
Prints the pattern rule
Prints the hypothesis pattern identifier if it exists
Prints a matched hypothesis
Prints the matched conclusion
Prints a success message when the goal has been matched
Prints a failure message for an hypothesis pattern
Prints a matching failure message for a rule
Prints an evaluation failure message for a rule
Prints a logic failure message for a rule
see tacextend.mlp
We entered a user-defined tactic,
we display the trace with location of the call
We entered a primitive tactic, we don't display trace but
report on the finest location | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Util
open Names
open Pp
open Tacexpr
open Termops
open Nameops
open Proofview.Notations
let (ltac_trace_info : ltac_trace Exninfo.t) = Exninfo.make ()
let prtac x =
Pptactic.pr_glob_tactic (Global.env()) x
let prmatchpatt env sigma hyp =
Pptactic.pr_match_pattern (Printer.pr_constr_pattern_env env sigma) hyp
let prmatchrl rl =
Pptactic.pr_match_rule false (Pptactic.pr_glob_tactic (Global.env()))
(fun (_,p) -> Printer.pr_constr_pattern p) rl
type debug_info =
| DebugOn of int
| DebugOff
let explain_logic_error e =
CErrors.print (fst (ExplainErr.process_vernac_interp_error (e, Exninfo.null)))
let explain_logic_error_no_anomaly e =
CErrors.print_no_report
(fst (ExplainErr.process_vernac_interp_error (e, Exninfo.null)))
let msg_tac_debug s = Proofview.NonLogical.print_debug (s++fnl())
let msg_tac_notice s = Proofview.NonLogical.print_notice (s++fnl())
let db_pr_goal gl =
let env = Proofview.Goal.env gl in
let concl = Proofview.Goal.concl gl in
let penv = print_named_context env in
let pc = print_constr_env env concl in
str" " ++ hv 0 (penv ++ fnl () ++
str "============================" ++ fnl () ++
str" " ++ pc) ++ fnl ()
let db_pr_goal =
Proofview.Goal.nf_enter { enter = begin fun gl ->
let pg = db_pr_goal gl in
Proofview.tclLIFT (msg_tac_notice (str "Goal:" ++ fnl () ++ pg))
end }
let help () =
msg_tac_debug (str "Commands: <Enter> = Continue" ++ fnl() ++
str " h/? = Help" ++ fnl() ++
str " r <num> = Run <num> times" ++ fnl() ++
str " r <string> = Run up to next idtac <string>" ++ fnl() ++
str " s = Skip" ++ fnl() ++
str " x = Exit")
let goal_com tac =
Proofview.tclTHEN
db_pr_goal
(Proofview.tclLIFT (msg_tac_debug (str "Going to execute:" ++ fnl () ++ prtac tac)))
let skipped = Proofview.NonLogical.run (Proofview.NonLogical.ref 0)
let skip = Proofview.NonLogical.run (Proofview.NonLogical.ref 0)
let breakpoint = Proofview.NonLogical.run (Proofview.NonLogical.ref None)
let rec drop_spaces inst i =
if String.length inst > i && inst.[i] == ' ' then drop_spaces inst (i+1)
else i
let possibly_unquote s =
if String.length s >= 2 && s.[0] == '"' && s.[String.length s - 1] == '"' then
String.sub s 1 (String.length s - 2)
else
s
( debugger
let db_initialize =
let open Proofview.NonLogical in
(skip:=0) >> (skipped:=0) >> (breakpoint:=None)
let int_of_string s =
try Proofview.NonLogical.return (int_of_string s)
with e -> Proofview.NonLogical.raise e
let string_get s i =
try Proofview.NonLogical.return (String.get s i)
with e -> Proofview.NonLogical.raise e
let run_invalid_arg () = Proofview.NonLogical.raise (Invalid_argument "run_com")
let run_com inst =
let open Proofview.NonLogical in
string_get inst 0 >>= fun first_char ->
if first_char ='r' then
let i = drop_spaces inst 1 in
if String.length inst > i then
let s = String.sub inst i (String.length inst - i) in
if inst.[0] >= '0' && inst.[0] <= '9' then
int_of_string s >>= fun num ->
(if num<0 then run_invalid_arg () else return ()) >>
(skip:=num) >> (skipped:=0)
else
breakpoint:=Some (possibly_unquote s)
else
run_invalid_arg ()
else
run_invalid_arg ()
let run ini =
let open Proofview.NonLogical in
if not ini then
begin
Proofview.NonLogical.print_notice (str"\b\r\b\r") >>
!skipped >>= fun skipped ->
msg_tac_debug (str "Executed expressions: " ++ int skipped ++ fnl())
end >>
!skipped >>= fun x ->
skipped := x+1
else
return ()
let rec prompt level =
let runtrue = run true in
begin
let open Proofview.NonLogical in
Proofview.NonLogical.print_notice (fnl () ++ str "TcDebug (" ++ int level ++ str ") > ") >>
let exit = (skip:=0) >> (skipped:=0) >> raise Sys.Break in
Proofview.NonLogical.catch Proofview.NonLogical.read_line
begin function (e, info) -> match e with
| End_of_file -> exit
| e -> raise ~info e
end
>>= fun inst ->
match inst with
| "" -> return (DebugOn (level+1))
| "s" -> return (DebugOff)
| "x" -> Proofview.NonLogical.print_char '\b' >> exit
| "h"| "?" ->
begin
help () >>
prompt level
end
| _ ->
Proofview.NonLogical.catch (run_com inst >> runtrue >> return (DebugOn (level+1)))
begin function (e, info) -> match e with
| Failure _ | Invalid_argument _ -> prompt level
| e -> raise ~info e
end
end
let debug_prompt lev tac f =
let runfalse = run false in
let open Proofview.NonLogical in
let (>=) = Proofview.tclBIND in
let newlevel =
Proofview.tclLIFT !skip >= fun initial_skip ->
if Int.equal initial_skip 0 then
Proofview.tclLIFT !breakpoint >= fun breakpoint ->
if Option.is_empty breakpoint then Proofview.tclTHEN (goal_com tac) (Proofview.tclLIFT (prompt lev))
else Proofview.tclLIFT(runfalse >> return (DebugOn (lev+1)))
else Proofview.tclLIFT begin
(!skip >>= fun s -> skip:=s-1) >>
runfalse >>
!skip >>= fun new_skip ->
(if Int.equal new_skip 0 then skipped:=0 else return ()) >>
return (DebugOn (lev+1))
end in
newlevel >= fun newlevel ->
Proofview.tclOR
(f newlevel)
begin fun (reraise, info) ->
Proofview.tclTHEN
(Proofview.tclLIFT begin
(skip:=0) >> (skipped:=0) >>
if Logic.catchable_exception reraise then
msg_tac_debug (str "Level " ++ int lev ++ str ": " ++ explain_logic_error reraise)
else return ()
end)
(Proofview.tclZERO ~info reraise)
end
let is_debug db =
let open Proofview.NonLogical in
!breakpoint >>= fun breakpoint ->
match db, breakpoint with
| DebugOff, _ -> return false
| _, Some _ -> return false
| _ ->
!skip >>= fun skip ->
return (Int.equal skip 0)
let db_constr debug env c =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "Evaluated term: " ++ print_constr_env env c)
else return ()
let db_pattern_rule debug num r =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
begin
msg_tac_debug (str "Pattern rule " ++ int num ++ str ":" ++ fnl () ++
str "|" ++ spc () ++ prmatchrl r)
end
else return ()
let hyp_bound = function
| Anonymous -> str " (unbound)"
| Name id -> str " (bound to " ++ pr_id id ++ str ")"
let db_matched_hyp debug env (id,_,c) ido =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "Hypothesis " ++ pr_id id ++ hyp_bound ido ++
str " has been matched: " ++ print_constr_env env c)
else return ()
let db_matched_concl debug env c =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "Conclusion has been matched: " ++ print_constr_env env c)
else return ()
let db_mc_pattern_success debug =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "The goal has been successfully matched!" ++ fnl() ++
str "Let us execute the right-hand side part..." ++ fnl())
else return ()
let db_hyp_pattern_failure debug env sigma (na,hyp) =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "The pattern hypothesis" ++ hyp_bound na ++
str " cannot match: " ++
prmatchpatt env sigma hyp)
else return ()
let db_matching_failure debug =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
msg_tac_debug (str "This rule has failed due to matching errors!" ++ fnl() ++
str "Let us try the next one...")
else return ()
let db_eval_failure debug s =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
let s = str "message \"" ++ s ++ str "\"" in
msg_tac_debug
(str "This rule has failed due to \"Fail\" tactic (" ++
s ++ str ", level 0)!" ++ fnl() ++ str "Let us try the next one...")
else return ()
let db_logic_failure debug err =
let open Proofview.NonLogical in
is_debug debug >>= fun db ->
if db then
begin
msg_tac_debug (explain_logic_error err) >>
msg_tac_debug (str "This rule has failed due to a logic error!" ++ fnl() ++
str "Let us try the next one...")
end
else return ()
let is_breakpoint brkname s = match brkname, s with
| Some s, MsgString s'::_ -> String.equal s s'
| _ -> false
let db_breakpoint debug s =
let open Proofview.NonLogical in
!breakpoint >>= fun opt_breakpoint ->
match debug with
| DebugOn lev when not (CList.is_empty s) && is_breakpoint opt_breakpoint s ->
breakpoint:=None
| _ ->
return ()
* traces
let is_defined_ltac trace =
let rec aux = function
| (_, Tacexpr.LtacNameCall f) :: _ -> not (Tacenv.is_ltac_for_ml_tactic f)
| (_, Tacexpr.LtacNotationCall f) :: _ -> true
| (_, Tacexpr.LtacAtomCall _) :: _ -> false
| _ :: tail -> aux tail
| [] -> false in
aux (List.rev trace)
let explain_ltac_call_trace last trace loc =
let calls = last :: List.rev_map snd trace in
let pr_call ck = match ck with
| Tacexpr.LtacNotationCall kn -> quote (Pptactic.pr_alias_key kn)
| Tacexpr.LtacNameCall cst -> quote (Pptactic.pr_ltac_constant cst)
| Tacexpr.LtacMLCall t ->
quote (Pptactic.pr_glob_tactic (Global.env()) t)
| Tacexpr.LtacVarCall (id,t) ->
quote (Nameops.pr_id id) ++ strbrk " (bound to " ++
Pptactic.pr_glob_tactic (Global.env()) t ++ str ")"
| Tacexpr.LtacAtomCall te ->
quote (Pptactic.pr_glob_tactic (Global.env())
(Tacexpr.TacAtom (Loc.ghost,te)))
| Tacexpr.LtacConstrInterp (c, { Pretyping.ltac_constrs = vars }) ->
quote (Printer.pr_glob_constr_env (Global.env()) c) ++
(if not (Id.Map.is_empty vars) then
strbrk " (with " ++
prlist_with_sep pr_comma
(fun (id,c) ->
pr_id id ++ str ":=" ++ Printer.pr_lconstr_under_binders c)
(List.rev (Id.Map.bindings vars)) ++ str ")"
else mt())
in
match calls with
| [] -> mt ()
| [a] -> hov 0 (str "Ltac call to " ++ pr_call a ++ str " failed.")
| _ ->
let kind_of_last_call = match List.last calls with
| Tacexpr.LtacConstrInterp _ -> ", last term evaluation failed."
| _ -> ", last call failed."
in
hov 0 (str "In nested Ltac calls to " ++
pr_enum pr_call calls ++ strbrk kind_of_last_call)
let skip_extensions trace =
let rec aux = function
| (_,Tacexpr.LtacNameCall f as tac) :: _
when Tacenv.is_ltac_for_ml_tactic f -> [tac]
| (_,Tacexpr.LtacNotationCall _ as tac) :: (_,Tacexpr.LtacMLCall _) :: _ ->
Case of an ML defined tactic with entry of the form < < " foo " args > >
[tac]
| (_,Tacexpr.LtacMLCall _ as tac) :: _ -> [tac]
| t :: tail -> t :: aux tail
| [] -> [] in
List.rev (aux (List.rev trace))
let finer_loc loc1 loc2 = Loc.merge loc1 loc2 = loc2
let extract_ltac_trace trace eloc =
let trace = skip_extensions trace in
let (loc,c),tail = List.sep_last trace in
if is_defined_ltac trace then
let msg = hov 0 (explain_ltac_call_trace c tail eloc ++ fnl()) in
Some msg, if finer_loc eloc loc then eloc else loc
else
let best_loc =
trace is with innermost call coming first
let rec aux best_loc = function
| (loc,_)::tail ->
if Loc.is_ghost best_loc ||
not (Loc.is_ghost loc) && finer_loc loc best_loc
then
aux loc tail
else
aux best_loc tail
| [] -> best_loc in
aux eloc trace in
None, best_loc
let get_ltac_trace (_, info) =
let ltac_trace = Exninfo.get info ltac_trace_info in
let loc = Option.default Loc.ghost (Loc.get_loc info) in
match ltac_trace with
| None -> None
| Some trace -> Some (extract_ltac_trace trace loc)
let () = ExplainErr.register_additional_error_info get_ltac_trace
|
8fc277c57d8e22a03275f5d9c238c7fcd98fb353e9ee27a8794ddd0b7a82d8c3 | kazu-yamamoto/network-run | tcpClient.hs | {-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import qualified Data.ByteString.Char8 as C
import Network.Run.TCP (runTCPClient)
import Network.Socket.ByteString (recv, sendAll)
main :: IO ()
main = runTCPClient "127.0.0.1" "3000" $ \s -> do
sendAll s "Hello, world!"
msg <- recv s 1024
putStr "Received: "
C.putStrLn msg
| null | https://raw.githubusercontent.com/kazu-yamamoto/network-run/938eb6223b981f83b5a42c33959a90de7a034efd/examples/tcpClient.hs | haskell | # LANGUAGE OverloadedStrings # | module Main (main) where
import qualified Data.ByteString.Char8 as C
import Network.Run.TCP (runTCPClient)
import Network.Socket.ByteString (recv, sendAll)
main :: IO ()
main = runTCPClient "127.0.0.1" "3000" $ \s -> do
sendAll s "Hello, world!"
msg <- recv s 1024
putStr "Received: "
C.putStrLn msg
|
a8bff81b17a25539301b44b509229b2162947c87dab7502cc6982e1e9cfafdea | KingoftheHomeless/in-other-words | NonDet.hs | module Control.Effect.NonDet
( -- * Effects
NonDet(..)
, Cull(..)
, Cut(..)
, Split(..)
, Logic
-- * Actions
, choose
, lose
, fromList
, cull
, cutfail
, cut
, call
, split
-- * Interpretations
, runNonDet
, runNonDet1
, runCullCut
, runLogic
-- * Threading constraints
, NonDetThreads
-- * Carriers
, NonDetC
, CullCutC
, LogicC
) where
import Control.Monad
import Control.Effect
import Control.Effect.Internal.NonDet
import Control.Effect.Internal.Utils
import Control.Effect.Type.Split
-- | Introduce new branches stemming from the current one using a list of values.
fromList :: Eff NonDet m => [a] -> m a
fromList = send .# FromList
| Introduce two new branches stemming from the current one .
choose :: Eff NonDet m => m a -> m a -> m a
choose ma mb = join $ fromList [ma, mb]
{-# INLINE choose #-}
-- | Fail the current branch and proceed to the next branch,
-- backtracking to the nearest use of 'choose' or 'fromList' that
-- still has unprocessed branches.
lose :: Eff NonDet m => m a
lose = fromList []
# INLINE lose #
| Cull nondeterminism in the argument , limiting the number of branches
-- it may introduce to be at most 1.
--
-- @'cull' (return True \`'choose'\` return False) == return True@
--
-- @'cull' ('lose' \`'choose'\` return False) == return False@
cull :: Eff Cull m => m a -> m a
cull = send .# Cull
# INLINE cull #
-- | Fail the current branch, and prevent backtracking up until the nearest
-- enclosing use of 'call' (if any).
--
' \`'choose'\ ` m = = ' cutfail'@
cutfail :: Eff Cut m => m a
cutfail = send Cutfail
# INLINE cutfail #
-- | Commit to the current branch: prevent all backtracking that would move
-- execution to before 'cut' was invoked, up until the nearest enclosing use
-- of 'call' (if any).
--
@'call ' ( ' fromList ' [ 1,2 ] > > = \\a - > ' cut ' > > ' fromList ' [ a , a+3 ] ) = = ' fromList ' [ 1,4]@
--
-- @'call' (('cut' >> return True) \`'choose'\` return False) == return True@
cut :: Effs '[NonDet, Cut] m => m ()
cut = pure () `choose` cutfail
# INLINE cut #
-- | Delimit the prevention of backtracking from uses of 'cut' and 'cutfail'.
--
-- @'call' 'cutfail' \`'choose'\` m = m@
call :: Eff Cut m => m a -> m a
call = send . Call
# INLINE call #
| Split a nondeterministic computation into its first result
-- and the rest of the computation, if possible.
--
Note that ' ' cutfail ' = = ' cutfail'@. If you do n't want that behavior ,
use ' ( ' call ' m)@ instead of ' m@.
split :: Eff Split m => m a -> m (Maybe (a, m a))
split = send . Split id
# INLINE split #
| null | https://raw.githubusercontent.com/KingoftheHomeless/in-other-words/9c864c81beb4fdf71d363b6962db5c90275c57ef/src/Control/Effect/NonDet.hs | haskell | * Effects
* Actions
* Interpretations
* Threading constraints
* Carriers
| Introduce new branches stemming from the current one using a list of values.
# INLINE choose #
| Fail the current branch and proceed to the next branch,
backtracking to the nearest use of 'choose' or 'fromList' that
still has unprocessed branches.
it may introduce to be at most 1.
@'cull' (return True \`'choose'\` return False) == return True@
@'cull' ('lose' \`'choose'\` return False) == return False@
| Fail the current branch, and prevent backtracking up until the nearest
enclosing use of 'call' (if any).
| Commit to the current branch: prevent all backtracking that would move
execution to before 'cut' was invoked, up until the nearest enclosing use
of 'call' (if any).
@'call' (('cut' >> return True) \`'choose'\` return False) == return True@
| Delimit the prevention of backtracking from uses of 'cut' and 'cutfail'.
@'call' 'cutfail' \`'choose'\` m = m@
and the rest of the computation, if possible.
| module Control.Effect.NonDet
NonDet(..)
, Cull(..)
, Cut(..)
, Split(..)
, Logic
, choose
, lose
, fromList
, cull
, cutfail
, cut
, call
, split
, runNonDet
, runNonDet1
, runCullCut
, runLogic
, NonDetThreads
, NonDetC
, CullCutC
, LogicC
) where
import Control.Monad
import Control.Effect
import Control.Effect.Internal.NonDet
import Control.Effect.Internal.Utils
import Control.Effect.Type.Split
fromList :: Eff NonDet m => [a] -> m a
fromList = send .# FromList
| Introduce two new branches stemming from the current one .
choose :: Eff NonDet m => m a -> m a -> m a
choose ma mb = join $ fromList [ma, mb]
lose :: Eff NonDet m => m a
lose = fromList []
# INLINE lose #
| Cull nondeterminism in the argument , limiting the number of branches
cull :: Eff Cull m => m a -> m a
cull = send .# Cull
# INLINE cull #
' \`'choose'\ ` m = = ' cutfail'@
cutfail :: Eff Cut m => m a
cutfail = send Cutfail
# INLINE cutfail #
@'call ' ( ' fromList ' [ 1,2 ] > > = \\a - > ' cut ' > > ' fromList ' [ a , a+3 ] ) = = ' fromList ' [ 1,4]@
cut :: Effs '[NonDet, Cut] m => m ()
cut = pure () `choose` cutfail
# INLINE cut #
call :: Eff Cut m => m a -> m a
call = send . Call
# INLINE call #
| Split a nondeterministic computation into its first result
Note that ' ' cutfail ' = = ' cutfail'@. If you do n't want that behavior ,
use ' ( ' call ' m)@ instead of ' m@.
split :: Eff Split m => m a -> m (Maybe (a, m a))
split = send . Split id
# INLINE split #
|
9b96a1051d6211f5265a97979fe8a5636b3003146dfb5d546a9ad91cab5e7516 | iokasimov/pandora | Possible.hs | module Pandora.Paradigm.Structure.Ability.Possible where
import Pandora.Paradigm.Primary.Functor.Maybe (Maybe)
import Pandora.Paradigm.Inventory.Some.Optics (Lens)
class Possible target source where
perhaps :: Lens Maybe source target
| null | https://raw.githubusercontent.com/iokasimov/pandora/62fda602322d0123809dc690f61624f06c008cb2/Pandora/Paradigm/Structure/Ability/Possible.hs | haskell | module Pandora.Paradigm.Structure.Ability.Possible where
import Pandora.Paradigm.Primary.Functor.Maybe (Maybe)
import Pandora.Paradigm.Inventory.Some.Optics (Lens)
class Possible target source where
perhaps :: Lens Maybe source target
|
|
95a5480f8055543a3b6718b5c2cd90f5dd35edb02f8e4c6de42441678c266d32 | fulcrologic/semantic-ui-wrapper | ui_portal_inner.cljc | (ns com.fulcrologic.semantic-ui.addons.portal.ui-portal-inner
(:require
[com.fulcrologic.semantic-ui.factory-helpers :as h]
#?(:cljs ["semantic-ui-react$PortalInner" :as PortalInner])))
(def ui-portal-inner
"An inner component that allows you to render children outside their parent.
Props:
- children (node): Primary content.
- innerRef (custom): Called with a ref to the inner node.
- mountNode (any): The node where the portal should mount.
- onMount (func): Called when the portal is mounted on the DOM
- onUnmount (func): Called when the portal is unmounted from the DOM"
#?(:cljs (h/factory-apply PortalInner)))
| null | https://raw.githubusercontent.com/fulcrologic/semantic-ui-wrapper/7bd53f445bc4ca7e052c69596dc089282671df6c/src/main/com/fulcrologic/semantic_ui/addons/portal/ui_portal_inner.cljc | clojure | (ns com.fulcrologic.semantic-ui.addons.portal.ui-portal-inner
(:require
[com.fulcrologic.semantic-ui.factory-helpers :as h]
#?(:cljs ["semantic-ui-react$PortalInner" :as PortalInner])))
(def ui-portal-inner
"An inner component that allows you to render children outside their parent.
Props:
- children (node): Primary content.
- innerRef (custom): Called with a ref to the inner node.
- mountNode (any): The node where the portal should mount.
- onMount (func): Called when the portal is mounted on the DOM
- onUnmount (func): Called when the portal is unmounted from the DOM"
#?(:cljs (h/factory-apply PortalInner)))
|
|
014fb23504138e1086b866e7ecf74861fb358f0ce7a87d04f6efb121b27d29d3 | EasyCrypt/easycrypt | ecPhlSp.ml | (* -------------------------------------------------------------------- *)
open EcUtils
open EcTypes
open EcModules
open EcFol
open EcParsetree
open EcEnv
open EcCoreGoal
open EcLowPhlGoal
* SP carries four elements ,
* - bds : a set of existential binders
* - assoc : a set of pairs ( x , e ) such that x = e holds
* for instance after an assignment x < - e
* - pre : the actual precondition ( progressively weakened )
* - cost : the cost of evaluating the statements up - to the current point .
*
* After an assignment of the form x < - e the four elements are updated :
* 1 ) a new fresh local x ' is added to the list of existential binders
* 2 ) ( x , e ) is added to the assoc list , and every other ( y , d ) is replaced
* by ( y[x->x ' ] , d[x->x ' ] )
* 3 ) pre is replaced by pre[x->x ' ]
* 4 ) cost is replaced by cost + expr_cost(e )
*
* The simplification of this version comes from two tricks :
*
* 1 ) the replacement of ( y[x->x ' ] ) introduces a simplification
* opportunity . There is no need to keep ( x ' , d[x->x ' ] ) as a
* conjuction x ' = d[x->x ' ] : it is enough to perform the substitution
* of d[x->x ' ] for x ' in place ( it is a mess however to implement this
* idea with simultaneous assigns )
* 2 ) $ MISSING ...
* SP carries four elements,
* - bds: a set of existential binders
* - assoc: a set of pairs (x,e) such that x=e holds
* for instance after an assignment x <- e
* - pre: the actual precondition (progressively weakened)
* - cost: the cost of evaluating the statements up-to the current point.
*
* After an assignment of the form x <- e the four elements are updated:
* 1) a new fresh local x' is added to the list of existential binders
* 2) (x, e) is added to the assoc list, and every other (y,d) is replaced
* by (y[x->x'], d[x->x'])
* 3) pre is replaced by pre[x->x']
* 4) cost is replaced by cost + expr_cost(e)
*
* The simplification of this version comes from two tricks:
*
* 1) the replacement of (y[x->x']) introduces a simplification
* opportunity. There is no need to keep (x', d[x->x']) as a
* conjuction x' = d[x->x']: it is enough to perform the substitution
* of d[x->x'] for x' in place (it is a mess however to implement this
* idea with simultaneous assigns)
* 2) $MISSING...
*)
(* -------------------------------------------------------------------- *)
module LowInternal = struct
(* ------------------------------------------------------------------ *)
exception No_sp
(* ------------------------------------------------------------------ *)
type assignable =
| APVar of (prog_var * ty)
| ALocal of (EcIdent.t * ty)
and assignables = assignable list
(* ------------------------------------------------------------------ *)
let isAPVar = function APVar _ -> true | _ -> false
let isALocal = function ALocal _ -> true | _ -> false
(* ------------------------------------------------------------------ *)
let sp_asgn (memenv : EcMemory.memenv) env lv e (bds, assoc, pre) =
let subst_in_assoc lv new_id_exp new_ids ((ass : assignables), f) =
let replace_assignable var =
match var with
| APVar (pv', ty) -> begin
match lv,new_ids with
| LvVar (pv ,_), [new_id,_] when NormMp.pv_equal env pv pv' ->
ALocal (new_id,ty)
| LvVar _, _ ->
var
| LvTuple vs, _ -> begin
let aux = List.map2 (fun x y -> (fst x, fst y)) vs new_ids in
try
let new_id = snd (List.find (NormMp.pv_equal env pv' |- fst) aux) in
ALocal (new_id, ty)
with Not_found -> var
end
end
| _ -> var
in let ass = List.map replace_assignable ass in
let f = subst_form_lv env (EcMemory.memory memenv) lv new_id_exp f in
(ass, f)
in
let rec simplify_assoc (assoc, bds, pre) =
match assoc with
| [] ->
([], bds, pre)
| (ass, f) :: assoc ->
let assoc, bds, pre = simplify_assoc (assoc, bds, pre) in
let destr_ass =
try List.combine (List.map in_seq1 ass) (destr_tuple f)
with Invalid_argument _ | DestrError _ -> [(ass, f)]
in
let do_subst_or_accum (assoc, bds, pre) (a, f) =
match a with
| [ALocal (id, _)] ->
let subst = EcFol.Fsubst.f_subst_id in
let subst = EcFol.Fsubst.f_bind_local subst id f in
(List.map (snd_map (EcFol.Fsubst.f_subst subst)) assoc,
List.filter ((<>) id |- fst) bds,
EcFol.Fsubst.f_subst subst pre)
| _ -> ((a, f) :: assoc, bds, pre)
in
List.fold_left do_subst_or_accum (assoc, bds, pre) destr_ass
in
let for_lvars vs =
let mem = EcMemory.memory memenv in
let fresh pv = EcIdent.create (EcIdent.name (id_of_pv pv mem)) in
let newids = List.map (fst_map fresh) vs in
let bds = newids @ bds in
let astuple = f_tuple (List.map (curry f_local) newids) in
let pre = subst_form_lv env mem lv astuple pre in
let e_form = EcFol.form_of_expr mem e in
let e_form = subst_form_lv env mem lv astuple e_form in
let assoc =
(List.map (fun x -> APVar x) vs, e_form)
:: (List.map (subst_in_assoc lv astuple newids) assoc) in
let assoc, bds, pre = simplify_assoc (List.rev assoc, bds, pre) in
(bds, List.rev assoc, pre)
in
match lv with
| LvVar v -> for_lvars [v]
| LvTuple vs -> for_lvars vs
(* ------------------------------------------------------------------ *)
let build_sp (memenv : EcMemory.memenv) bds assoc pre =
let f_assoc = function
| APVar (pv, pv_ty) -> f_pvar pv pv_ty (EcMemory.memory memenv)
| ALocal (lv, lv_ty) -> f_local lv lv_ty
in
let rem_ex (assoc, f) (x_id, x_ty) =
try
let rec partition_on_x = function
| [] ->
raise Not_found
| (a, e) :: assoc when f_equal e (f_local x_id x_ty) ->
(a, assoc)
| x :: assoc ->
let a, assoc = partition_on_x assoc in (a, x::assoc)
in
let a,assoc = partition_on_x assoc in
let a = f_tuple (List.map f_assoc a) in
let subst = EcFol.Fsubst.f_subst_id in
let subst = EcFol.Fsubst.f_bind_local subst x_id a in
let f = EcFol.Fsubst.f_subst subst f in
let assoc = List.map (snd_map (EcFol.Fsubst.f_subst subst)) assoc in
(assoc, f)
with Not_found -> (assoc, f)
in
let assoc, pre = List.fold_left rem_ex (assoc, pre) bds in
let pre =
let merge_assoc f (a, e) =
f_and_simpl (f_eq_simpl (f_tuple (List.map f_assoc a)) e) f
in List.fold_left merge_assoc pre assoc in
EcFol.f_exists_simpl (List.map (snd_map (fun t -> GTty t)) bds) pre
(* ------------------------------------------------------------------ *)
let rec sp_stmt (memenv : EcMemory.memenv) env (bds, assoc, pre, cost) stmt =
match stmt with
| [] ->
([], (bds, assoc, pre, cost))
| i :: is ->
try
let bds, assoc, pre, cost =
sp_instr memenv env (bds, assoc, pre) cost i in
sp_stmt memenv env (bds,assoc,pre,cost) is
with No_sp ->
(stmt, (bds, assoc, pre, cost))
and sp_instr (memenv : EcMemory.memenv) env (bds,assoc,pre) cost instr =
match instr.i_node with
| Sasgn (lv, e) ->
let bds, assoc, pre = sp_asgn memenv env lv e (bds, assoc, pre) in
let cost = f_xadd cost (EcCHoare.cost_of_expr_any memenv e) in
bds, assoc, pre, cost
| Sif (e, s1, s2) ->
let e_form = EcFol.form_of_expr (EcMemory.memory memenv) e in
let pre_t =
build_sp memenv bds assoc (f_and_simpl e_form pre) in
let pre_f =
build_sp memenv bds assoc (f_and_simpl (f_not e_form) pre) in
let stmt_t, (bds_t, assoc_t, pre_t, cost_t) =
sp_stmt memenv env (bds, assoc, pre_t, f_x0) s1.s_node in
let stmt_f, (bds_f, assoc_f, pre_f, cost_f) =
sp_stmt memenv env (bds, assoc, pre_f, f_x0) s2.s_node in
if not (List.is_empty stmt_t && List.is_empty stmt_f) then raise No_sp;
let sp_t = build_sp memenv bds_t assoc_t pre_t in
let sp_f = build_sp memenv bds_f assoc_f pre_f in
let cost =
f_xadd cost
(f_xadd
(EcCHoare.cost_of_expr_any memenv e)
(f_xadd cost_t cost_f)) in
([], [], f_or_simpl sp_t sp_f, cost)
| _ -> raise No_sp
let sp_stmt (memenv : EcMemory.memenv) env stmt f =
let stmt, (bds, assoc, pre, cost) =
sp_stmt memenv env ([], [], f, f_x0) stmt in
let pre = build_sp memenv bds assoc pre in
stmt, pre, cost
end
(* -------------------------------------------------------------------- *)
let t_sp_side pos tc =
let module LI = LowInternal in
let env, _, concl = FApi.tc1_eflat tc in
let as_single = function Single i -> i | _ -> assert false
and as_double = function Double i -> i | _ -> assert false in
let check_sp_progress ?side pos stmt =
if is_some pos && not (List.is_empty stmt) then
tc_error_lazy !!tc (fun fmt ->
let side = side |> (function
| None -> "remaining"
| Some (`Left ) -> "remaining on the left"
| Some (`Right) -> "remaining on the right")
in
Format.fprintf fmt
"%d instruction(s) %s, change your [sp] bound"
(List.length stmt) side)
in
let check_form_indep stmt mem form =
let write_set = EcPV.s_write env (EcModules.stmt stmt) in
let read_set = EcPV.PV.fv env (EcMemory.memory mem) form in
if not (EcPV.PV.indep env write_set read_set) then
tc_error !!tc "the bound should not be modified by the statement \
targeted by [sp]" in
match concl.f_node, pos with
| FhoareS hs, (None | Some (Single _)) ->
let pos = pos |> omap as_single in
let stmt1, stmt2 = o_split ~rev:true pos hs.hs_s in
let stmt1, hs_pr, _ =
LI.sp_stmt hs.hs_m env stmt1 hs.hs_pr in
check_sp_progress pos stmt1;
let subgoal = f_hoareS_r { hs with hs_s = stmt (stmt1@stmt2); hs_pr } in
FApi.xmutate1 tc `Sp [subgoal]
| FcHoareS chs, (None | Some (Single _)) ->
let pos = pos |> omap as_single in
let stmt1, stmt2 = o_split ~rev:true pos chs.chs_s in
let stmt1, chs_pr, sp_cost =
LI.sp_stmt chs.chs_m env stmt1 chs.chs_pr in
check_sp_progress pos stmt1;
let cond, cost = EcCHoare.cost_sub_self chs.chs_co sp_cost in
let subgoal = f_cHoareS_r {chs with chs_s = stmt (stmt1@stmt2);
chs_pr;
chs_co = cost } in
FApi.xmutate1 tc `Sp [cond; subgoal]
| FbdHoareS bhs, (None | Some (Single _)) ->
let pos = pos |> omap as_single in
let stmt1, stmt2 = o_split ~rev:true pos bhs.bhs_s in
check_form_indep stmt1 bhs.bhs_m bhs.bhs_bd;
let stmt1, bhs_pr, _ =
LI.sp_stmt bhs.bhs_m env stmt1 bhs.bhs_pr in
check_sp_progress pos stmt1;
let subgoal = f_bdHoareS_r {bhs with bhs_s = stmt (stmt1@stmt2); bhs_pr; } in
FApi.xmutate1 tc `Sp [subgoal]
| FequivS es, (None | Some (Double _)) ->
let pos = pos |> omap as_double in
let posL = pos |> omap fst in
let posR = pos |> omap snd in
let stmtL1, stmtL2 = o_split ~rev:true posL es.es_sl in
let stmtR1, stmtR2 = o_split ~rev:true posR es.es_sr in
let es_pr = es.es_pr in
let stmtL1, es_pr, _ =
LI.sp_stmt es.es_ml env stmtL1 es_pr in
let stmtR1, es_pr, _ =
LI.sp_stmt es.es_mr env stmtR1 es_pr in
check_sp_progress ~side:`Left pos stmtL1;
check_sp_progress ~side:`Right pos stmtR1;
let subgoal = f_equivS_r { es with
es_sl = stmt (stmtL1@stmtL2);
es_sr = stmt (stmtR1@stmtR2);
es_pr =es_pr;
} in
FApi.xmutate1 tc `Sp [subgoal]
| _, Some (Single _) -> tc_error_noXhl ~kinds:[`Hoare `Stmt;
`CHoare `Stmt;
`PHoare `Stmt] !!tc
| _, Some (Double _) -> tc_error_noXhl ~kinds:[`Equiv `Stmt] !!tc
| _, None -> tc_error_noXhl ~kinds:(hlkinds_Xhl_r `Stmt) !!tc
(* -------------------------------------------------------------------- *)
let t_sp = FApi.t_low1 "sp" t_sp_side
| null | https://raw.githubusercontent.com/EasyCrypt/easycrypt/f87695472e70c313ef2966e20979b1afcc2e543e/src/phl/ecPhlSp.ml | ocaml | --------------------------------------------------------------------
--------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | open EcUtils
open EcTypes
open EcModules
open EcFol
open EcParsetree
open EcEnv
open EcCoreGoal
open EcLowPhlGoal
* SP carries four elements ,
* - bds : a set of existential binders
* - assoc : a set of pairs ( x , e ) such that x = e holds
* for instance after an assignment x < - e
* - pre : the actual precondition ( progressively weakened )
* - cost : the cost of evaluating the statements up - to the current point .
*
* After an assignment of the form x < - e the four elements are updated :
* 1 ) a new fresh local x ' is added to the list of existential binders
* 2 ) ( x , e ) is added to the assoc list , and every other ( y , d ) is replaced
* by ( y[x->x ' ] , d[x->x ' ] )
* 3 ) pre is replaced by pre[x->x ' ]
* 4 ) cost is replaced by cost + expr_cost(e )
*
* The simplification of this version comes from two tricks :
*
* 1 ) the replacement of ( y[x->x ' ] ) introduces a simplification
* opportunity . There is no need to keep ( x ' , d[x->x ' ] ) as a
* conjuction x ' = d[x->x ' ] : it is enough to perform the substitution
* of d[x->x ' ] for x ' in place ( it is a mess however to implement this
* idea with simultaneous assigns )
* 2 ) $ MISSING ...
* SP carries four elements,
* - bds: a set of existential binders
* - assoc: a set of pairs (x,e) such that x=e holds
* for instance after an assignment x <- e
* - pre: the actual precondition (progressively weakened)
* - cost: the cost of evaluating the statements up-to the current point.
*
* After an assignment of the form x <- e the four elements are updated:
* 1) a new fresh local x' is added to the list of existential binders
* 2) (x, e) is added to the assoc list, and every other (y,d) is replaced
* by (y[x->x'], d[x->x'])
* 3) pre is replaced by pre[x->x']
* 4) cost is replaced by cost + expr_cost(e)
*
* The simplification of this version comes from two tricks:
*
* 1) the replacement of (y[x->x']) introduces a simplification
* opportunity. There is no need to keep (x', d[x->x']) as a
* conjuction x' = d[x->x']: it is enough to perform the substitution
* of d[x->x'] for x' in place (it is a mess however to implement this
* idea with simultaneous assigns)
* 2) $MISSING...
*)
module LowInternal = struct
exception No_sp
type assignable =
| APVar of (prog_var * ty)
| ALocal of (EcIdent.t * ty)
and assignables = assignable list
let isAPVar = function APVar _ -> true | _ -> false
let isALocal = function ALocal _ -> true | _ -> false
let sp_asgn (memenv : EcMemory.memenv) env lv e (bds, assoc, pre) =
let subst_in_assoc lv new_id_exp new_ids ((ass : assignables), f) =
let replace_assignable var =
match var with
| APVar (pv', ty) -> begin
match lv,new_ids with
| LvVar (pv ,_), [new_id,_] when NormMp.pv_equal env pv pv' ->
ALocal (new_id,ty)
| LvVar _, _ ->
var
| LvTuple vs, _ -> begin
let aux = List.map2 (fun x y -> (fst x, fst y)) vs new_ids in
try
let new_id = snd (List.find (NormMp.pv_equal env pv' |- fst) aux) in
ALocal (new_id, ty)
with Not_found -> var
end
end
| _ -> var
in let ass = List.map replace_assignable ass in
let f = subst_form_lv env (EcMemory.memory memenv) lv new_id_exp f in
(ass, f)
in
let rec simplify_assoc (assoc, bds, pre) =
match assoc with
| [] ->
([], bds, pre)
| (ass, f) :: assoc ->
let assoc, bds, pre = simplify_assoc (assoc, bds, pre) in
let destr_ass =
try List.combine (List.map in_seq1 ass) (destr_tuple f)
with Invalid_argument _ | DestrError _ -> [(ass, f)]
in
let do_subst_or_accum (assoc, bds, pre) (a, f) =
match a with
| [ALocal (id, _)] ->
let subst = EcFol.Fsubst.f_subst_id in
let subst = EcFol.Fsubst.f_bind_local subst id f in
(List.map (snd_map (EcFol.Fsubst.f_subst subst)) assoc,
List.filter ((<>) id |- fst) bds,
EcFol.Fsubst.f_subst subst pre)
| _ -> ((a, f) :: assoc, bds, pre)
in
List.fold_left do_subst_or_accum (assoc, bds, pre) destr_ass
in
let for_lvars vs =
let mem = EcMemory.memory memenv in
let fresh pv = EcIdent.create (EcIdent.name (id_of_pv pv mem)) in
let newids = List.map (fst_map fresh) vs in
let bds = newids @ bds in
let astuple = f_tuple (List.map (curry f_local) newids) in
let pre = subst_form_lv env mem lv astuple pre in
let e_form = EcFol.form_of_expr mem e in
let e_form = subst_form_lv env mem lv astuple e_form in
let assoc =
(List.map (fun x -> APVar x) vs, e_form)
:: (List.map (subst_in_assoc lv astuple newids) assoc) in
let assoc, bds, pre = simplify_assoc (List.rev assoc, bds, pre) in
(bds, List.rev assoc, pre)
in
match lv with
| LvVar v -> for_lvars [v]
| LvTuple vs -> for_lvars vs
let build_sp (memenv : EcMemory.memenv) bds assoc pre =
let f_assoc = function
| APVar (pv, pv_ty) -> f_pvar pv pv_ty (EcMemory.memory memenv)
| ALocal (lv, lv_ty) -> f_local lv lv_ty
in
let rem_ex (assoc, f) (x_id, x_ty) =
try
let rec partition_on_x = function
| [] ->
raise Not_found
| (a, e) :: assoc when f_equal e (f_local x_id x_ty) ->
(a, assoc)
| x :: assoc ->
let a, assoc = partition_on_x assoc in (a, x::assoc)
in
let a,assoc = partition_on_x assoc in
let a = f_tuple (List.map f_assoc a) in
let subst = EcFol.Fsubst.f_subst_id in
let subst = EcFol.Fsubst.f_bind_local subst x_id a in
let f = EcFol.Fsubst.f_subst subst f in
let assoc = List.map (snd_map (EcFol.Fsubst.f_subst subst)) assoc in
(assoc, f)
with Not_found -> (assoc, f)
in
let assoc, pre = List.fold_left rem_ex (assoc, pre) bds in
let pre =
let merge_assoc f (a, e) =
f_and_simpl (f_eq_simpl (f_tuple (List.map f_assoc a)) e) f
in List.fold_left merge_assoc pre assoc in
EcFol.f_exists_simpl (List.map (snd_map (fun t -> GTty t)) bds) pre
let rec sp_stmt (memenv : EcMemory.memenv) env (bds, assoc, pre, cost) stmt =
match stmt with
| [] ->
([], (bds, assoc, pre, cost))
| i :: is ->
try
let bds, assoc, pre, cost =
sp_instr memenv env (bds, assoc, pre) cost i in
sp_stmt memenv env (bds,assoc,pre,cost) is
with No_sp ->
(stmt, (bds, assoc, pre, cost))
and sp_instr (memenv : EcMemory.memenv) env (bds,assoc,pre) cost instr =
match instr.i_node with
| Sasgn (lv, e) ->
let bds, assoc, pre = sp_asgn memenv env lv e (bds, assoc, pre) in
let cost = f_xadd cost (EcCHoare.cost_of_expr_any memenv e) in
bds, assoc, pre, cost
| Sif (e, s1, s2) ->
let e_form = EcFol.form_of_expr (EcMemory.memory memenv) e in
let pre_t =
build_sp memenv bds assoc (f_and_simpl e_form pre) in
let pre_f =
build_sp memenv bds assoc (f_and_simpl (f_not e_form) pre) in
let stmt_t, (bds_t, assoc_t, pre_t, cost_t) =
sp_stmt memenv env (bds, assoc, pre_t, f_x0) s1.s_node in
let stmt_f, (bds_f, assoc_f, pre_f, cost_f) =
sp_stmt memenv env (bds, assoc, pre_f, f_x0) s2.s_node in
if not (List.is_empty stmt_t && List.is_empty stmt_f) then raise No_sp;
let sp_t = build_sp memenv bds_t assoc_t pre_t in
let sp_f = build_sp memenv bds_f assoc_f pre_f in
let cost =
f_xadd cost
(f_xadd
(EcCHoare.cost_of_expr_any memenv e)
(f_xadd cost_t cost_f)) in
([], [], f_or_simpl sp_t sp_f, cost)
| _ -> raise No_sp
let sp_stmt (memenv : EcMemory.memenv) env stmt f =
let stmt, (bds, assoc, pre, cost) =
sp_stmt memenv env ([], [], f, f_x0) stmt in
let pre = build_sp memenv bds assoc pre in
stmt, pre, cost
end
let t_sp_side pos tc =
let module LI = LowInternal in
let env, _, concl = FApi.tc1_eflat tc in
let as_single = function Single i -> i | _ -> assert false
and as_double = function Double i -> i | _ -> assert false in
let check_sp_progress ?side pos stmt =
if is_some pos && not (List.is_empty stmt) then
tc_error_lazy !!tc (fun fmt ->
let side = side |> (function
| None -> "remaining"
| Some (`Left ) -> "remaining on the left"
| Some (`Right) -> "remaining on the right")
in
Format.fprintf fmt
"%d instruction(s) %s, change your [sp] bound"
(List.length stmt) side)
in
let check_form_indep stmt mem form =
let write_set = EcPV.s_write env (EcModules.stmt stmt) in
let read_set = EcPV.PV.fv env (EcMemory.memory mem) form in
if not (EcPV.PV.indep env write_set read_set) then
tc_error !!tc "the bound should not be modified by the statement \
targeted by [sp]" in
match concl.f_node, pos with
| FhoareS hs, (None | Some (Single _)) ->
let pos = pos |> omap as_single in
let stmt1, stmt2 = o_split ~rev:true pos hs.hs_s in
let stmt1, hs_pr, _ =
LI.sp_stmt hs.hs_m env stmt1 hs.hs_pr in
check_sp_progress pos stmt1;
let subgoal = f_hoareS_r { hs with hs_s = stmt (stmt1@stmt2); hs_pr } in
FApi.xmutate1 tc `Sp [subgoal]
| FcHoareS chs, (None | Some (Single _)) ->
let pos = pos |> omap as_single in
let stmt1, stmt2 = o_split ~rev:true pos chs.chs_s in
let stmt1, chs_pr, sp_cost =
LI.sp_stmt chs.chs_m env stmt1 chs.chs_pr in
check_sp_progress pos stmt1;
let cond, cost = EcCHoare.cost_sub_self chs.chs_co sp_cost in
let subgoal = f_cHoareS_r {chs with chs_s = stmt (stmt1@stmt2);
chs_pr;
chs_co = cost } in
FApi.xmutate1 tc `Sp [cond; subgoal]
| FbdHoareS bhs, (None | Some (Single _)) ->
let pos = pos |> omap as_single in
let stmt1, stmt2 = o_split ~rev:true pos bhs.bhs_s in
check_form_indep stmt1 bhs.bhs_m bhs.bhs_bd;
let stmt1, bhs_pr, _ =
LI.sp_stmt bhs.bhs_m env stmt1 bhs.bhs_pr in
check_sp_progress pos stmt1;
let subgoal = f_bdHoareS_r {bhs with bhs_s = stmt (stmt1@stmt2); bhs_pr; } in
FApi.xmutate1 tc `Sp [subgoal]
| FequivS es, (None | Some (Double _)) ->
let pos = pos |> omap as_double in
let posL = pos |> omap fst in
let posR = pos |> omap snd in
let stmtL1, stmtL2 = o_split ~rev:true posL es.es_sl in
let stmtR1, stmtR2 = o_split ~rev:true posR es.es_sr in
let es_pr = es.es_pr in
let stmtL1, es_pr, _ =
LI.sp_stmt es.es_ml env stmtL1 es_pr in
let stmtR1, es_pr, _ =
LI.sp_stmt es.es_mr env stmtR1 es_pr in
check_sp_progress ~side:`Left pos stmtL1;
check_sp_progress ~side:`Right pos stmtR1;
let subgoal = f_equivS_r { es with
es_sl = stmt (stmtL1@stmtL2);
es_sr = stmt (stmtR1@stmtR2);
es_pr =es_pr;
} in
FApi.xmutate1 tc `Sp [subgoal]
| _, Some (Single _) -> tc_error_noXhl ~kinds:[`Hoare `Stmt;
`CHoare `Stmt;
`PHoare `Stmt] !!tc
| _, Some (Double _) -> tc_error_noXhl ~kinds:[`Equiv `Stmt] !!tc
| _, None -> tc_error_noXhl ~kinds:(hlkinds_Xhl_r `Stmt) !!tc
let t_sp = FApi.t_low1 "sp" t_sp_side
|
49810fad63e9f95645acc57a084d595ee92b344f04d3eb8fef2c1c70cac0542a | nathell/smyrna | huffman.clj | (ns smyrna.huffman
(:require [smyrna.bitstream :as bitstream]
[clojure.data.csv :as csv]
[clojure.java.io :as io])
(:import [java.util Arrays]
[java.nio IntBuffer]
[smyrna.bitstream IBitSink IBitSource]))
(defn extendv
([v] (extendv v 64 0))
([v n x]
(let [l (count v)]
(if (> l n)
(throw (Exception.))
(into v (repeat (- n l) x))))))
(defn canonical-code [code-lengths]
(let [numl (frequencies code-lengths)
max-length (apply max code-lengths)
first-code (loop [first-code (list 0)
i (dec max-length)]
(if (= i 0)
(vec first-code)
(recur (conj first-code (long (/ (+ (first first-code) (numl (inc i) 0)) 2)))
(dec i))))
{:keys [symbols codes]} (loop [next-code first-code
symbols (vec (repeat max-length []))
codes []
code-lengths code-lengths
i 0]
(if-let [x (first code-lengths)]
(recur
(update-in next-code [(dec x)] inc)
(update-in symbols [(dec x)] conj i)
(conj codes (next-code (dec x)))
(next code-lengths)
(inc i))
{:symbols (reduce into symbols) :codes codes}))]
{:numl (extendv (vec (for [i (range 1 (inc (apply max (keys numl))))] (numl i 0)))), :first-code (extendv first-code), :codes codes, :symbols symbols}))
(defn down-heap [^longs heap ^long i len]
(let [v (aget heap i)]
(loop [i i]
(let [[nx vm] (if (and (<= (+ i i) len)
(< (aget heap (aget heap (+ i i))) (aget heap v)))
[(+ i i) (aget heap (+ i i))] [i v])
[nx vm] (if (and (<= (+ i i 1) len)
(< (aget heap (aget heap (+ i i 1))) (aget heap vm)))
[(+ i i 1) (aget heap (+ i i 1))] [nx vm])]
(if (< i nx)
(do
(aset-long heap i (aget heap nx))
(recur (long nx)))
(do
(aset-long heap i v)
heap))))))
(defn make-heap [heap len]
(doseq [i (range (bit-shift-right len 1) 0 -1)]
(down-heap heap i len))
heap)
(defn code-lengths [freqs]
(let [len (count freqs)
^longs a (into-array Long/TYPE (concat [0] (range (inc len) (inc (* 2 len))) freqs))]
(make-heap a len)
(doseq [h (range len 1 -1)]
(let [h (int h)
m1 (aget a 1)
_ (aset-long a 1 (aget a h))
h (dec h)
_ (down-heap a 1 h)
m2 (aget a 1)]
(aset-long a (inc h) (+ (aget a m1) (aget a m2)))
(aset-long a 1 (inc h))
(aset-long a m1 (inc h))
(aset-long a m2 (inc h))
(down-heap a 1 h)))
(aset-long a 2 0)
(doseq [i (range 3 (+ len len 1))]
(aset-long a i (inc (aget a (aget a i)))))
(vec (Arrays/copyOfRange a (inc len) (count a)))))
(defn do-encode
[s ^IBitSink out codes lengths index]
(doseq [sym s :let [i (index sym)]]
(.writeBinary out (codes i) (lengths i))))
(defn enumerate
[seq]
(zipmap seq (range)))
(defn precompute-encoding
[syms counts]
(let [lengths (code-lengths counts)]
(assoc (canonical-code lengths)
:lengths lengths
:index (enumerate syms))))
(defn encode
([s ^IBitSink out] (encode s out (frequencies s)))
([s ^IBitSink out freqs] (encode s out (keys freqs) (vals freqs)))
([s ^IBitSink out syms counts]
(let [{:keys [codes lengths index]} (precompute-encoding syms counts)]
(do-encode s out codes lengths index))))
(defn int-buffer
[v]
(IntBuffer/wrap (into-array Integer/TYPE (map int v))))
(defn decode-symbol
[^IBitSource bs ^IntBuffer numl ^IntBuffer first-code ^IntBuffer symbols]
(loop [n (.nextBit bs)
i 0
ofs 0]
(if (>= n (.get first-code i))
(.get symbols (+ ofs n (- (.get first-code i))))
(recur (+ n n (.nextBit bs))
(inc i)
(+ ofs (.get numl i))))))
| null | https://raw.githubusercontent.com/nathell/smyrna/7ceb648d9ab9dfcf95f3197af27191b15a96d0e7/src/clj/smyrna/huffman.clj | clojure | (ns smyrna.huffman
(:require [smyrna.bitstream :as bitstream]
[clojure.data.csv :as csv]
[clojure.java.io :as io])
(:import [java.util Arrays]
[java.nio IntBuffer]
[smyrna.bitstream IBitSink IBitSource]))
(defn extendv
([v] (extendv v 64 0))
([v n x]
(let [l (count v)]
(if (> l n)
(throw (Exception.))
(into v (repeat (- n l) x))))))
(defn canonical-code [code-lengths]
(let [numl (frequencies code-lengths)
max-length (apply max code-lengths)
first-code (loop [first-code (list 0)
i (dec max-length)]
(if (= i 0)
(vec first-code)
(recur (conj first-code (long (/ (+ (first first-code) (numl (inc i) 0)) 2)))
(dec i))))
{:keys [symbols codes]} (loop [next-code first-code
symbols (vec (repeat max-length []))
codes []
code-lengths code-lengths
i 0]
(if-let [x (first code-lengths)]
(recur
(update-in next-code [(dec x)] inc)
(update-in symbols [(dec x)] conj i)
(conj codes (next-code (dec x)))
(next code-lengths)
(inc i))
{:symbols (reduce into symbols) :codes codes}))]
{:numl (extendv (vec (for [i (range 1 (inc (apply max (keys numl))))] (numl i 0)))), :first-code (extendv first-code), :codes codes, :symbols symbols}))
(defn down-heap [^longs heap ^long i len]
(let [v (aget heap i)]
(loop [i i]
(let [[nx vm] (if (and (<= (+ i i) len)
(< (aget heap (aget heap (+ i i))) (aget heap v)))
[(+ i i) (aget heap (+ i i))] [i v])
[nx vm] (if (and (<= (+ i i 1) len)
(< (aget heap (aget heap (+ i i 1))) (aget heap vm)))
[(+ i i 1) (aget heap (+ i i 1))] [nx vm])]
(if (< i nx)
(do
(aset-long heap i (aget heap nx))
(recur (long nx)))
(do
(aset-long heap i v)
heap))))))
(defn make-heap [heap len]
(doseq [i (range (bit-shift-right len 1) 0 -1)]
(down-heap heap i len))
heap)
(defn code-lengths [freqs]
(let [len (count freqs)
^longs a (into-array Long/TYPE (concat [0] (range (inc len) (inc (* 2 len))) freqs))]
(make-heap a len)
(doseq [h (range len 1 -1)]
(let [h (int h)
m1 (aget a 1)
_ (aset-long a 1 (aget a h))
h (dec h)
_ (down-heap a 1 h)
m2 (aget a 1)]
(aset-long a (inc h) (+ (aget a m1) (aget a m2)))
(aset-long a 1 (inc h))
(aset-long a m1 (inc h))
(aset-long a m2 (inc h))
(down-heap a 1 h)))
(aset-long a 2 0)
(doseq [i (range 3 (+ len len 1))]
(aset-long a i (inc (aget a (aget a i)))))
(vec (Arrays/copyOfRange a (inc len) (count a)))))
(defn do-encode
[s ^IBitSink out codes lengths index]
(doseq [sym s :let [i (index sym)]]
(.writeBinary out (codes i) (lengths i))))
(defn enumerate
[seq]
(zipmap seq (range)))
(defn precompute-encoding
[syms counts]
(let [lengths (code-lengths counts)]
(assoc (canonical-code lengths)
:lengths lengths
:index (enumerate syms))))
(defn encode
([s ^IBitSink out] (encode s out (frequencies s)))
([s ^IBitSink out freqs] (encode s out (keys freqs) (vals freqs)))
([s ^IBitSink out syms counts]
(let [{:keys [codes lengths index]} (precompute-encoding syms counts)]
(do-encode s out codes lengths index))))
(defn int-buffer
[v]
(IntBuffer/wrap (into-array Integer/TYPE (map int v))))
(defn decode-symbol
[^IBitSource bs ^IntBuffer numl ^IntBuffer first-code ^IntBuffer symbols]
(loop [n (.nextBit bs)
i 0
ofs 0]
(if (>= n (.get first-code i))
(.get symbols (+ ofs n (- (.get first-code i))))
(recur (+ n n (.nextBit bs))
(inc i)
(+ ofs (.get numl i))))))
|
|
d86a55cd9217e6de572b5e6fa650c6528237563d4e8bd1678bb4780dd74ef802 | TheLortex/mirage-monorepo | quoter.mli | * Generate expressions in a hygienic way .
The idea is that whenever we want to refer to an expression in generated
code we first quote it . The result will be an identifier that is guaranteed
to refer to the expression it was created from . This way it is impossible
for quoted fragments to refer to newly introduced expressions .
The idea is that whenever we want to refer to an expression in generated
code we first quote it. The result will be an identifier that is guaranteed
to refer to the expression it was created from. This way it is impossible
for quoted fragments to refer to newly introduced expressions. *)
open Import
type t
val create : unit -> t
(** Creates a quoter. A quoter guarantees to give names that do not clash with
any other names used before *)
val quote : t -> expression -> expression
(** [quote t e] returns the expression that is safe to use in place of [e] in
generated code*)
val sanitize : t -> expression -> expression
(** [sanitize t e] Returns [e] wrapped with bindings for all quoted expressions
in the quoter [t] *)
| null | https://raw.githubusercontent.com/TheLortex/mirage-monorepo/b557005dfe5a51fc50f0597d82c450291cfe8a2a/duniverse/ppxlib/src/quoter.mli | ocaml | * Creates a quoter. A quoter guarantees to give names that do not clash with
any other names used before
* [quote t e] returns the expression that is safe to use in place of [e] in
generated code
* [sanitize t e] Returns [e] wrapped with bindings for all quoted expressions
in the quoter [t] | * Generate expressions in a hygienic way .
The idea is that whenever we want to refer to an expression in generated
code we first quote it . The result will be an identifier that is guaranteed
to refer to the expression it was created from . This way it is impossible
for quoted fragments to refer to newly introduced expressions .
The idea is that whenever we want to refer to an expression in generated
code we first quote it. The result will be an identifier that is guaranteed
to refer to the expression it was created from. This way it is impossible
for quoted fragments to refer to newly introduced expressions. *)
open Import
type t
val create : unit -> t
val quote : t -> expression -> expression
val sanitize : t -> expression -> expression
|
4a0133782076b76203771cb82f92d681ffb08729e6e5d1376682d2582954eae9 | darkleaf/publicator | show.clj | (ns publicator.web.responders.post.show
(:require
[publicator.use-cases.interactors.post.show :as interactor]
[publicator.web.responders.base :as responders.base]
[publicator.web.responses :as responses]
[publicator.web.presenters.post.show :as presenter]))
(defmethod responders.base/result->resp ::interactor/processed [[_ posts]]
(let [model (presenter/processed posts)]
(responses/render-page "post/show" model)))
(derive ::interactor/not-found ::responders.base/not-found)
| null | https://raw.githubusercontent.com/darkleaf/publicator/e07eee93d8f3d9c07a15d574619d5ea59c00f87d/web/src/publicator/web/responders/post/show.clj | clojure | (ns publicator.web.responders.post.show
(:require
[publicator.use-cases.interactors.post.show :as interactor]
[publicator.web.responders.base :as responders.base]
[publicator.web.responses :as responses]
[publicator.web.presenters.post.show :as presenter]))
(defmethod responders.base/result->resp ::interactor/processed [[_ posts]]
(let [model (presenter/processed posts)]
(responses/render-page "post/show" model)))
(derive ::interactor/not-found ::responders.base/not-found)
|
|
fc5b456a2e5f40af77b7d1d16ee4690591da106eeedeb4c80d50a5b583b4e0f9 | mirage/irmin | store_graph.mli |
* Copyright ( c ) 2013 - 2022 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2022 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
module Make : Common.Store_tests
| null | https://raw.githubusercontent.com/mirage/irmin/abeee121a6db7b085b3c68af50ef24a8d8f9ed05/src/irmin-test/store_graph.mli | ocaml |
* Copyright ( c ) 2013 - 2022 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2022 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
module Make : Common.Store_tests
|
|
23e4bce7b746f13f663d67530168c1cd301933350279f9130ff59f828db44691 | dhleong/wish | header.cljs | (ns wish.sheets.dnd5e.views.header
(:require [clojure.string :as str]
[spade.core :refer [defattrs]]
[wish.style :refer [text-primary-on-dark]]
[wish.style.flex :as flex :refer [flex]]
[wish.style.media :as media]
[wish.util :refer [<sub click>evt]]
[wish.util.nav :refer [sheet-url]]
[wish.sheets.dnd5e.overlays :as overlays]
[wish.sheets.dnd5e.overlays.hp
:refer [overlay] :rename {overlay hp-overlay}]
[wish.sheets.dnd5e.style :as styles]
[wish.sheets.dnd5e.subs :as subs]
[wish.sheets.dnd5e.subs.effects :as effects]
[wish.sheets.dnd5e.subs.hp :as hp]
[wish.sheets.dnd5e.subs.combat :as combat]
[wish.sheets.dnd5e.subs.proficiency :as proficiency]
[wish.sheets.dnd5e.util :refer [mod->str]]
[wish.sheets.dnd5e.views.shared :refer [buff-kind->attrs]]
[wish.views.widgets :as widgets
:refer-macros [icon]
:refer [link link>evt]]))
(defattrs header-container-style []
(at-media media/dark-scheme
{:background "#333"})
{:display 'block
:background "#666666"})
(defattrs header-style []
(at-media media/tablets
[:.col.meta {:max-width "15vw"}])
(at-media media/smartphones
[:.side
[:&.settings {:order "0 !important"}]
[:&.right {:justify-content 'space-between
:padding "0 12px"
:width "100%"}]]
[:.col.meta {:max-width "35vw"}]
[:.hp
[:.label
[:.content {:display 'none}]
[:&:after {:content "'HP'"}]]
[:.value {:display "block !important"}]
[:.divider {:display 'block
:height "1px"
:border-top "1px solid #fff"
:overflow 'hidden}]
[:.max {:font-size "60%"}]])
(at-media media/tiny
{:font-size "80%"}
[:.side {:padding "0 !important"}])
[:& (merge flex
flex/wrap
{:color text-primary-on-dark
:margin "0 auto"
:padding "4px 0"
:max-width "1200px"
:width "100%"})]
[:.side flex
[:&.left {:padding-left "12px"}]
[:&.settings {:order 1
:padding-right "12px"}]
[:.col (merge flex/vertical-center
styles/text-center
{:padding "4px 8px"})
[:&.left {:text-align 'left}]
[:.meta (merge flex
flex/wrap
{:font-size "80%"})
[:.race {:margin-right "0.5em"}]]
[:.save-state {:margin-right "12px"}]
[:.stat {:font-size "140%"}
[:&.buffed {:color styles/color-accent2}]
[:&.nerfed {:color styles/color-accent-nerf}]
[:.unit {:font-size "60%"}]]]]
[:.label {:font-size "80%"}]
[:.hp flex/center
[:.value (merge flex
styles/text-center
{:padding "4px"
:font-size "120%"})]
[:.divider {:padding "0 4px"}]
[:.indicators
[:.icon {:font-size "12px"}
[:&.save {:color "#00cc00"}]
[:&.fail {:color "#aa0000"}]]]
[:.max
[:&.buffed {:color styles/color-accent2}]
[:&.nerfed {:color styles/color-accent-nerf}]]]
[:.space flex/grow])
; ======= Top bar ==========================================
(defn- hp-normal [hp max-hp hp-mod]
(let [buff-kind (<sub [::effects/change-for :hp-max])]
[:<>
[:div.label [:span.content "Hit Points"]]
[:div.value
[:div.now hp]
[:div.divider " / "]
[:div.max (buff-kind->attrs (or hp-mod buff-kind))
max-hp]]]))
(defn- save-indicators
[prefix icon-class used]
[:div.indicators
prefix
(for [i (range 3)]
(with-meta
(if (< i used)
(icon :radio-button-checked.icon {:class icon-class})
(icon :radio-button-unchecked.icon {:class icon-class}))
{:key i}))])
(defn hp-death-saving-throws
([] (hp-death-saving-throws nil))
([sheet-id]
(let [{:keys [saves fails]} (<sub [::hp/death-saving-throws sheet-id])]
[:<>
[save-indicators "😇" :save saves]
[save-indicators "☠️" :fail fails]])))
(defn hp []
(let [[hp max-hp hp-mod] (<sub [::hp/state])]
[:div.clickable.hp.col
{:on-click (click>evt [:toggle-overlay [#'hp-overlay]])}
(if (> hp 0)
[hp-normal hp max-hp hp-mod]
[hp-death-saving-throws])]))
(defn buffable-stat [stat-id label & content]
(let [buff-kind (<sub [::effects/change-for stat-id])]
[:div.col
(into [:div.stat (buff-kind->attrs buff-kind)]
content)
[:div.label label]]))
; ======= public interface ================================
(defn view []
(let [common (<sub [:sheet-meta])
classes (<sub [:classes])]
[:div (header-container-style)
[:div (header-style)
[:div.left.side
[:div.col
[widgets/save-state]]
[:div.col.left.meta
[:div.name [link {:class "inline"
:href "/sheets"}
(:name common)]]
[:div.meta
[:div.race (:name (<sub [:race]))]
[:div.classes (->> classes
(map (fn [c]
(str (-> c :name) "\u00a0" (:level c))))
(str/join " / "))]]]
[:div.col
[link>evt [:toggle-overlay [#'overlays/notes-overlay]]
(icon :description)]]]
[:div.space]
[:div.share.side
[:div.col
[widgets/share-button]]]
[:div.settings.side
[:div.col
(let [sheet-id (<sub [:active-sheet-id])]
[link {:href (sheet-url sheet-id :builder :class)}
(icon :settings)])]]
[:div.right.side
[:div.col
[:div.stat (mod->str
(<sub [::proficiency/bonus]))]
[:div.label "Proficiency"]]
[buffable-stat :ac "AC"
(<sub [::combat/ac])]
[buffable-stat :speed "Speed"
(<sub [::subs/speed]) [:span.unit " ft"]]
[:div.col
[:div.stat (<sub [::subs/passive-perception])]
[:div.label "Pass. Perc."]]
[buffable-stat :initiative "Initiative"
(mod->str
(<sub [::combat/initiative]))]
[hp]]
]]))
| null | https://raw.githubusercontent.com/dhleong/wish/9036f9da3706bfcc1e4b4736558b6f7309f53b7b/src/cljs/wish/sheets/dnd5e/views/header.cljs | clojure | ======= Top bar ==========================================
======= public interface ================================ | (ns wish.sheets.dnd5e.views.header
(:require [clojure.string :as str]
[spade.core :refer [defattrs]]
[wish.style :refer [text-primary-on-dark]]
[wish.style.flex :as flex :refer [flex]]
[wish.style.media :as media]
[wish.util :refer [<sub click>evt]]
[wish.util.nav :refer [sheet-url]]
[wish.sheets.dnd5e.overlays :as overlays]
[wish.sheets.dnd5e.overlays.hp
:refer [overlay] :rename {overlay hp-overlay}]
[wish.sheets.dnd5e.style :as styles]
[wish.sheets.dnd5e.subs :as subs]
[wish.sheets.dnd5e.subs.effects :as effects]
[wish.sheets.dnd5e.subs.hp :as hp]
[wish.sheets.dnd5e.subs.combat :as combat]
[wish.sheets.dnd5e.subs.proficiency :as proficiency]
[wish.sheets.dnd5e.util :refer [mod->str]]
[wish.sheets.dnd5e.views.shared :refer [buff-kind->attrs]]
[wish.views.widgets :as widgets
:refer-macros [icon]
:refer [link link>evt]]))
(defattrs header-container-style []
(at-media media/dark-scheme
{:background "#333"})
{:display 'block
:background "#666666"})
(defattrs header-style []
(at-media media/tablets
[:.col.meta {:max-width "15vw"}])
(at-media media/smartphones
[:.side
[:&.settings {:order "0 !important"}]
[:&.right {:justify-content 'space-between
:padding "0 12px"
:width "100%"}]]
[:.col.meta {:max-width "35vw"}]
[:.hp
[:.label
[:.content {:display 'none}]
[:&:after {:content "'HP'"}]]
[:.value {:display "block !important"}]
[:.divider {:display 'block
:height "1px"
:border-top "1px solid #fff"
:overflow 'hidden}]
[:.max {:font-size "60%"}]])
(at-media media/tiny
{:font-size "80%"}
[:.side {:padding "0 !important"}])
[:& (merge flex
flex/wrap
{:color text-primary-on-dark
:margin "0 auto"
:padding "4px 0"
:max-width "1200px"
:width "100%"})]
[:.side flex
[:&.left {:padding-left "12px"}]
[:&.settings {:order 1
:padding-right "12px"}]
[:.col (merge flex/vertical-center
styles/text-center
{:padding "4px 8px"})
[:&.left {:text-align 'left}]
[:.meta (merge flex
flex/wrap
{:font-size "80%"})
[:.race {:margin-right "0.5em"}]]
[:.save-state {:margin-right "12px"}]
[:.stat {:font-size "140%"}
[:&.buffed {:color styles/color-accent2}]
[:&.nerfed {:color styles/color-accent-nerf}]
[:.unit {:font-size "60%"}]]]]
[:.label {:font-size "80%"}]
[:.hp flex/center
[:.value (merge flex
styles/text-center
{:padding "4px"
:font-size "120%"})]
[:.divider {:padding "0 4px"}]
[:.indicators
[:.icon {:font-size "12px"}
[:&.save {:color "#00cc00"}]
[:&.fail {:color "#aa0000"}]]]
[:.max
[:&.buffed {:color styles/color-accent2}]
[:&.nerfed {:color styles/color-accent-nerf}]]]
[:.space flex/grow])
(defn- hp-normal [hp max-hp hp-mod]
(let [buff-kind (<sub [::effects/change-for :hp-max])]
[:<>
[:div.label [:span.content "Hit Points"]]
[:div.value
[:div.now hp]
[:div.divider " / "]
[:div.max (buff-kind->attrs (or hp-mod buff-kind))
max-hp]]]))
(defn- save-indicators
[prefix icon-class used]
[:div.indicators
prefix
(for [i (range 3)]
(with-meta
(if (< i used)
(icon :radio-button-checked.icon {:class icon-class})
(icon :radio-button-unchecked.icon {:class icon-class}))
{:key i}))])
(defn hp-death-saving-throws
([] (hp-death-saving-throws nil))
([sheet-id]
(let [{:keys [saves fails]} (<sub [::hp/death-saving-throws sheet-id])]
[:<>
[save-indicators "😇" :save saves]
[save-indicators "☠️" :fail fails]])))
(defn hp []
(let [[hp max-hp hp-mod] (<sub [::hp/state])]
[:div.clickable.hp.col
{:on-click (click>evt [:toggle-overlay [#'hp-overlay]])}
(if (> hp 0)
[hp-normal hp max-hp hp-mod]
[hp-death-saving-throws])]))
(defn buffable-stat [stat-id label & content]
(let [buff-kind (<sub [::effects/change-for stat-id])]
[:div.col
(into [:div.stat (buff-kind->attrs buff-kind)]
content)
[:div.label label]]))
(defn view []
(let [common (<sub [:sheet-meta])
classes (<sub [:classes])]
[:div (header-container-style)
[:div (header-style)
[:div.left.side
[:div.col
[widgets/save-state]]
[:div.col.left.meta
[:div.name [link {:class "inline"
:href "/sheets"}
(:name common)]]
[:div.meta
[:div.race (:name (<sub [:race]))]
[:div.classes (->> classes
(map (fn [c]
(str (-> c :name) "\u00a0" (:level c))))
(str/join " / "))]]]
[:div.col
[link>evt [:toggle-overlay [#'overlays/notes-overlay]]
(icon :description)]]]
[:div.space]
[:div.share.side
[:div.col
[widgets/share-button]]]
[:div.settings.side
[:div.col
(let [sheet-id (<sub [:active-sheet-id])]
[link {:href (sheet-url sheet-id :builder :class)}
(icon :settings)])]]
[:div.right.side
[:div.col
[:div.stat (mod->str
(<sub [::proficiency/bonus]))]
[:div.label "Proficiency"]]
[buffable-stat :ac "AC"
(<sub [::combat/ac])]
[buffable-stat :speed "Speed"
(<sub [::subs/speed]) [:span.unit " ft"]]
[:div.col
[:div.stat (<sub [::subs/passive-perception])]
[:div.label "Pass. Perc."]]
[buffable-stat :initiative "Initiative"
(mod->str
(<sub [::combat/initiative]))]
[hp]]
]]))
|
8e5f74f0e07b89c2c5449d73423ef4ad94fef7534d63427cb1b44694cffbd9ef | mbj/stratosphere | FileSystemConfigProperty.hs | module Stratosphere.SageMaker.AppImageConfig.FileSystemConfigProperty (
FileSystemConfigProperty(..), mkFileSystemConfigProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data FileSystemConfigProperty
= FileSystemConfigProperty {defaultGid :: (Prelude.Maybe (Value Prelude.Integer)),
defaultUid :: (Prelude.Maybe (Value Prelude.Integer)),
mountPath :: (Prelude.Maybe (Value Prelude.Text))}
mkFileSystemConfigProperty :: FileSystemConfigProperty
mkFileSystemConfigProperty
= FileSystemConfigProperty
{defaultGid = Prelude.Nothing, defaultUid = Prelude.Nothing,
mountPath = Prelude.Nothing}
instance ToResourceProperties FileSystemConfigProperty where
toResourceProperties FileSystemConfigProperty {..}
= ResourceProperties
{awsType = "AWS::SageMaker::AppImageConfig.FileSystemConfig",
supportsTags = Prelude.False,
properties = Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "DefaultGid" Prelude.<$> defaultGid,
(JSON..=) "DefaultUid" Prelude.<$> defaultUid,
(JSON..=) "MountPath" Prelude.<$> mountPath])}
instance JSON.ToJSON FileSystemConfigProperty where
toJSON FileSystemConfigProperty {..}
= JSON.object
(Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "DefaultGid" Prelude.<$> defaultGid,
(JSON..=) "DefaultUid" Prelude.<$> defaultUid,
(JSON..=) "MountPath" Prelude.<$> mountPath]))
instance Property "DefaultGid" FileSystemConfigProperty where
type PropertyType "DefaultGid" FileSystemConfigProperty = Value Prelude.Integer
set newValue FileSystemConfigProperty {..}
= FileSystemConfigProperty {defaultGid = Prelude.pure newValue, ..}
instance Property "DefaultUid" FileSystemConfigProperty where
type PropertyType "DefaultUid" FileSystemConfigProperty = Value Prelude.Integer
set newValue FileSystemConfigProperty {..}
= FileSystemConfigProperty {defaultUid = Prelude.pure newValue, ..}
instance Property "MountPath" FileSystemConfigProperty where
type PropertyType "MountPath" FileSystemConfigProperty = Value Prelude.Text
set newValue FileSystemConfigProperty {..}
= FileSystemConfigProperty {mountPath = Prelude.pure newValue, ..} | null | https://raw.githubusercontent.com/mbj/stratosphere/c70f301715425247efcda29af4f3fcf7ec04aa2f/services/sagemaker/gen/Stratosphere/SageMaker/AppImageConfig/FileSystemConfigProperty.hs | haskell | module Stratosphere.SageMaker.AppImageConfig.FileSystemConfigProperty (
FileSystemConfigProperty(..), mkFileSystemConfigProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data FileSystemConfigProperty
= FileSystemConfigProperty {defaultGid :: (Prelude.Maybe (Value Prelude.Integer)),
defaultUid :: (Prelude.Maybe (Value Prelude.Integer)),
mountPath :: (Prelude.Maybe (Value Prelude.Text))}
mkFileSystemConfigProperty :: FileSystemConfigProperty
mkFileSystemConfigProperty
= FileSystemConfigProperty
{defaultGid = Prelude.Nothing, defaultUid = Prelude.Nothing,
mountPath = Prelude.Nothing}
instance ToResourceProperties FileSystemConfigProperty where
toResourceProperties FileSystemConfigProperty {..}
= ResourceProperties
{awsType = "AWS::SageMaker::AppImageConfig.FileSystemConfig",
supportsTags = Prelude.False,
properties = Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "DefaultGid" Prelude.<$> defaultGid,
(JSON..=) "DefaultUid" Prelude.<$> defaultUid,
(JSON..=) "MountPath" Prelude.<$> mountPath])}
instance JSON.ToJSON FileSystemConfigProperty where
toJSON FileSystemConfigProperty {..}
= JSON.object
(Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "DefaultGid" Prelude.<$> defaultGid,
(JSON..=) "DefaultUid" Prelude.<$> defaultUid,
(JSON..=) "MountPath" Prelude.<$> mountPath]))
instance Property "DefaultGid" FileSystemConfigProperty where
type PropertyType "DefaultGid" FileSystemConfigProperty = Value Prelude.Integer
set newValue FileSystemConfigProperty {..}
= FileSystemConfigProperty {defaultGid = Prelude.pure newValue, ..}
instance Property "DefaultUid" FileSystemConfigProperty where
type PropertyType "DefaultUid" FileSystemConfigProperty = Value Prelude.Integer
set newValue FileSystemConfigProperty {..}
= FileSystemConfigProperty {defaultUid = Prelude.pure newValue, ..}
instance Property "MountPath" FileSystemConfigProperty where
type PropertyType "MountPath" FileSystemConfigProperty = Value Prelude.Text
set newValue FileSystemConfigProperty {..}
= FileSystemConfigProperty {mountPath = Prelude.pure newValue, ..} |
|
7c98cd607246c5774aef16b98e2ea9e1aad4765c027cbcdb8d9b64a89f54d1f7 | heyoka/faxe | esp_mongo_query.erl | Date : 23.07.2021
Mongo DB find
Ⓒ 2021 heyoka
%%
-module(esp_mongo_query).
-author("Alexander Minichmair").
-include("faxe.hrl").
-behavior(df_component).
%% API
-export([
init/3, process/3, options/0, handle_info/2,
metrics/0, shutdown/1, check_options/0]).
-record(state, {
host :: string(),
port :: non_neg_integer(),
selector :: map(),
user :: string(), %% Schema
pass :: string(), %%
database :: iodata(),
collection :: binary(),
as :: binary(),
client,
client_ref,
db_opts,
every,
align = false,
timer,
fn_id
}).
-define(DB_OPTIONS, #{
timeout => 3000
}).
options() ->
[
{host, string},
{port, integer, 27017},
{user, string, <<>>},
{pass, string, <<>>},
{database, string},
{collection, string},
{query, string, <<"{}">>}, %% json string
{as, binary, undefined},
{time_field, string, <<"ts">>},
{every, duration, undefined},
{align, is_set, false}
].
check_options() ->
[
{func, query,
fun(Selector) ->
case catch(jiffy:decode(Selector, [return_maps])) of
S when is_map(S) orelse is_list(S) -> true;
_ -> false
end
end,
<<" seems not to be valid json">>}
].
@todo figure out how to get the byte - size of the data
metrics() ->
[
{ ? METRIC_READING_TIME , histogram , [ slide , 60 ] , " Network time for sending a message . " } ,
{ ? METRIC_BYTES_READ , , [ ] }
].
init(NodeId, _Inputs, #{host := Host0, port := Port, user := User, every := Every, as := As,
pass := Pass, query := JsonString, align := Align, database := DB, collection := Collection}) ->
%% we need to trap exists form the result cursors
process_flag(trap_exit, true),
Host = binary_to_list(Host0),
Query = jiffy:decode(JsonString, [return_maps]),
DBOpts = [{host, Host}, {port, Port}, {login, User}, {password, Pass}, {database, DB}],
connection_registry:reg(NodeId, Host, Port, <<"mongodb">>),
State = #state{host = Host, port = Port, user = User, pass = Pass, selector = Query, database = DB,
db_opts = DBOpts, every = Every, align = Align, fn_id = NodeId, collection = Collection, as = As},
erlang:send_after(0, self(), reconnect),
{ok, all, State}.
%% read on incoming data-items
process(_In, _DataItem, State = #state{}) ->
handle_info(query, State).
handle_info(reconnect, State = #state{client = Client}) ->
NewState =
case Client /= undefined andalso is_process_alive(Client) of
true -> State;
false -> connect(State)
end,
{ok, NewState};
handle_info(query, State = #state{timer = Timer, client = C, collection = Coll, selector = Sel, database = DB}) ->
NewTimer = faxe_time:timer_next(Timer),
%% do query
Res = (catch mc_worker_api:find(C, Coll, Sel)),
node_metrics:metric(?METRIC_ITEMS_IN, 1, State#state.fn_id),
case handle_response(Res, State) of
{ok, DataPoints} ->
{emit, {1, #data_batch{points = DataPoints}}, State#state{timer = NewTimer}};
{error, empty_response} ->
lager:notice("Empty response with query: ~p on collection: ~p in database: ~p", [Sel, Coll, DB]),
{ok, State#state{timer = NewTimer}};
{error, Reason} ->
lager:notice("Error with mongodb response: ~p",[Reason]),
{ok, State#state{timer = NewTimer}}
end;
handle_info({'DOWN', _MonitorRef, process, Pid, _Info}, State=#state{client = Pid, timer = Timer}) ->
connection_registry:disconnected(),
lager:notice("mongodb connection DOWN"),
NewTimer = cancel_timer(Timer),
erlang:send_after(1000, self(), reconnect),
{ok, State#state{timer = NewTimer}};
handle_info(What, State) ->
lager:debug("++other info : ~p",[What]),
{ok, State}.
shutdown(#state{client = Client}) ->
catch mc_worker_api:disconnect(Client).
-spec connect(#state{}) -> #state{}.
connect(State = #state{db_opts = Opts}) ->
connection_registry:connecting(),
case mc_worker_api:connect(Opts) of
{ok, C} ->
erlang:monitor(process, C),
connection_registry:connected(),
init_timer(State#state{client = C});
{error, _What} ->
lager:notice("failed to connect to mongodb with Reason: ~p",[_What]),
State
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
handle_response({ok, Cursor}, State) when is_pid(Cursor) ->
Rows = mc_cursor:rest(Cursor),
mc_cursor:close(Cursor),
lager:info("Rows: ~p",[Rows]),
build(Rows, State);
handle_response([], _State) ->
{error, empty_response};
handle_response({error, _Reason} = E, _State) ->
E.
build(Rows, State) ->
build(Rows, [], State).
build([], Points, _S) ->
{ok, Points};
build([Row|Rows], Points, S=#state{timer = Timer, as = As}) ->
Ts = Timer#faxe_timer.last_time,
P0 = #data_point{ts = Ts, fields = maps:without([<<"_id">>], Row)},
NewPoint =
case As of
undefined -> P0;
Bin when is_binary(Bin) -> flowdata:set_root(P0, As)
end,
build(Rows, Points ++ [NewPoint], S).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init_timer(S = #state{align = Align, every = Every}) ->
Timer = faxe_time:init_timer(Align, Every, query),
S#state{timer = Timer}.
-spec cancel_timer(#faxe_timer{}|undefined) -> #faxe_timer{}|undefined.
cancel_timer(Timer) ->
case catch (faxe_time:timer_cancel(Timer)) of
T = #faxe_timer{} -> T;
_ -> Timer
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% TESTS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(TEST).
-endif. | null | https://raw.githubusercontent.com/heyoka/faxe/aa0414cd7d80e492c21fd3f57dd06d5a9ebb9e8d/apps/faxe/src/components/esp_mongo_query.erl | erlang |
API
Schema
json string
we need to trap exists form the result cursors
read on incoming data-items
do query
TESTS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | Date : 23.07.2021
Mongo DB find
Ⓒ 2021 heyoka
-module(esp_mongo_query).
-author("Alexander Minichmair").
-include("faxe.hrl").
-behavior(df_component).
-export([
init/3, process/3, options/0, handle_info/2,
metrics/0, shutdown/1, check_options/0]).
-record(state, {
host :: string(),
port :: non_neg_integer(),
selector :: map(),
database :: iodata(),
collection :: binary(),
as :: binary(),
client,
client_ref,
db_opts,
every,
align = false,
timer,
fn_id
}).
-define(DB_OPTIONS, #{
timeout => 3000
}).
options() ->
[
{host, string},
{port, integer, 27017},
{user, string, <<>>},
{pass, string, <<>>},
{database, string},
{collection, string},
{as, binary, undefined},
{time_field, string, <<"ts">>},
{every, duration, undefined},
{align, is_set, false}
].
check_options() ->
[
{func, query,
fun(Selector) ->
case catch(jiffy:decode(Selector, [return_maps])) of
S when is_map(S) orelse is_list(S) -> true;
_ -> false
end
end,
<<" seems not to be valid json">>}
].
@todo figure out how to get the byte - size of the data
metrics() ->
[
{ ? METRIC_READING_TIME , histogram , [ slide , 60 ] , " Network time for sending a message . " } ,
{ ? METRIC_BYTES_READ , , [ ] }
].
init(NodeId, _Inputs, #{host := Host0, port := Port, user := User, every := Every, as := As,
pass := Pass, query := JsonString, align := Align, database := DB, collection := Collection}) ->
process_flag(trap_exit, true),
Host = binary_to_list(Host0),
Query = jiffy:decode(JsonString, [return_maps]),
DBOpts = [{host, Host}, {port, Port}, {login, User}, {password, Pass}, {database, DB}],
connection_registry:reg(NodeId, Host, Port, <<"mongodb">>),
State = #state{host = Host, port = Port, user = User, pass = Pass, selector = Query, database = DB,
db_opts = DBOpts, every = Every, align = Align, fn_id = NodeId, collection = Collection, as = As},
erlang:send_after(0, self(), reconnect),
{ok, all, State}.
process(_In, _DataItem, State = #state{}) ->
handle_info(query, State).
handle_info(reconnect, State = #state{client = Client}) ->
NewState =
case Client /= undefined andalso is_process_alive(Client) of
true -> State;
false -> connect(State)
end,
{ok, NewState};
handle_info(query, State = #state{timer = Timer, client = C, collection = Coll, selector = Sel, database = DB}) ->
NewTimer = faxe_time:timer_next(Timer),
Res = (catch mc_worker_api:find(C, Coll, Sel)),
node_metrics:metric(?METRIC_ITEMS_IN, 1, State#state.fn_id),
case handle_response(Res, State) of
{ok, DataPoints} ->
{emit, {1, #data_batch{points = DataPoints}}, State#state{timer = NewTimer}};
{error, empty_response} ->
lager:notice("Empty response with query: ~p on collection: ~p in database: ~p", [Sel, Coll, DB]),
{ok, State#state{timer = NewTimer}};
{error, Reason} ->
lager:notice("Error with mongodb response: ~p",[Reason]),
{ok, State#state{timer = NewTimer}}
end;
handle_info({'DOWN', _MonitorRef, process, Pid, _Info}, State=#state{client = Pid, timer = Timer}) ->
connection_registry:disconnected(),
lager:notice("mongodb connection DOWN"),
NewTimer = cancel_timer(Timer),
erlang:send_after(1000, self(), reconnect),
{ok, State#state{timer = NewTimer}};
handle_info(What, State) ->
lager:debug("++other info : ~p",[What]),
{ok, State}.
shutdown(#state{client = Client}) ->
catch mc_worker_api:disconnect(Client).
-spec connect(#state{}) -> #state{}.
connect(State = #state{db_opts = Opts}) ->
connection_registry:connecting(),
case mc_worker_api:connect(Opts) of
{ok, C} ->
erlang:monitor(process, C),
connection_registry:connected(),
init_timer(State#state{client = C});
{error, _What} ->
lager:notice("failed to connect to mongodb with Reason: ~p",[_What]),
State
end.
handle_response({ok, Cursor}, State) when is_pid(Cursor) ->
Rows = mc_cursor:rest(Cursor),
mc_cursor:close(Cursor),
lager:info("Rows: ~p",[Rows]),
build(Rows, State);
handle_response([], _State) ->
{error, empty_response};
handle_response({error, _Reason} = E, _State) ->
E.
build(Rows, State) ->
build(Rows, [], State).
build([], Points, _S) ->
{ok, Points};
build([Row|Rows], Points, S=#state{timer = Timer, as = As}) ->
Ts = Timer#faxe_timer.last_time,
P0 = #data_point{ts = Ts, fields = maps:without([<<"_id">>], Row)},
NewPoint =
case As of
undefined -> P0;
Bin when is_binary(Bin) -> flowdata:set_root(P0, As)
end,
build(Rows, Points ++ [NewPoint], S).
init_timer(S = #state{align = Align, every = Every}) ->
Timer = faxe_time:init_timer(Align, Every, query),
S#state{timer = Timer}.
-spec cancel_timer(#faxe_timer{}|undefined) -> #faxe_timer{}|undefined.
cancel_timer(Timer) ->
case catch (faxe_time:timer_cancel(Timer)) of
T = #faxe_timer{} -> T;
_ -> Timer
end.
-ifdef(TEST).
-endif. |
12ea57f425a334c053590db5ef23a93576279ba6c9ecbbbad67b9da25e71e5c0 | haskell-cryptography/libsodium-bindings | Signing.hs | # LANGUAGE DerivingStrategies #
# LANGUAGE NamedFieldPuns #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
-- |
--
-- Module: Sel.Hashing.Signing
-- Description: Public-key signatures with the Ed25519 algorithm
Copyright : ( C ) Hécate Moonlight 2022
-- License: BSD-3-Clause
Maintainer : The Haskell Cryptography Group
Portability : GHC only
module Sel.Signing
( -- ** Introduction
-- $introduction
PublicKey
, SecretKey
, SignedMessage
-- ** Key Pair generation
, generateKeyPair
-- ** Message Signing
, signMessage
, openMessage
-- ** Constructing and Deconstructing
, getSignature
, unsafeGetMessage
, mkSignature
) where
import Control.Monad (void)
import Data.ByteString (ByteString)
import Data.ByteString.Unsafe (unsafePackMallocCStringLen)
import qualified Data.ByteString.Unsafe as ByteString
import Foreign
( ForeignPtr
, Ptr
, castPtr
, mallocBytes
, mallocForeignPtrBytes
, withForeignPtr
)
import Foreign.C (CChar, CSize, CUChar, CULLong)
import qualified Foreign.Marshal.Array as Foreign
import qualified Foreign.Ptr as Foreign
import GHC.IO.Handle.Text (memcpy)
import LibSodium.Bindings.Signing
( cryptoSignBytes
, cryptoSignDetached
, cryptoSignKeyPair
, cryptoSignPublicKeyBytes
, cryptoSignSecretKeyBytes
, cryptoSignVerifyDetached
)
import System.IO.Unsafe (unsafeDupablePerformIO)
-- $introduction
--
Public - key Signatures work with a ' SecretKey ' and ' PublicKey '
--
* The ' SecretKey ' is used to append a signature to any number of messages . It must stay private ;
* The ' PublicKey ' is used by third - parties to to verify that the signature appended to a message was
issued by the creator of the public key . It must be distributed to third - parties .
--
-- Verifiers need to already know and ultimately trust a public key before messages signed
-- using it can be verified.
-- |
--
@since 0.0.1.0
newtype PublicKey = PublicKey (ForeignPtr CUChar)
-- |
--
@since 0.0.1.0
newtype SecretKey = SecretKey (ForeignPtr CUChar)
-- |
--
@since 0.0.1.0
data SignedMessage = SignedMessage
{ messageLength :: CSize
, messageForeignPtr :: ForeignPtr CUChar
, signatureForeignPtr :: ForeignPtr CUChar
}
-- | Generate a pair of public and secret key.
--
-- The length parameters used are 'cryptoSignPublicKeyBytes'
-- and 'cryptoSignSecretKeyBytes'.
--
@since 0.0.1.0
generateKeyPair :: IO (PublicKey, SecretKey)
generateKeyPair = do
publicKeyForeignPtr <- mallocForeignPtrBytes (fromIntegral @CSize @Int cryptoSignPublicKeyBytes)
secretKeyForeignPtr <- mallocForeignPtrBytes (fromIntegral @CSize @Int cryptoSignSecretKeyBytes)
withForeignPtr publicKeyForeignPtr $ \pkPtr ->
withForeignPtr secretKeyForeignPtr $ \skPtr ->
void $
cryptoSignKeyPair
pkPtr
skPtr
pure (PublicKey publicKeyForeignPtr, SecretKey secretKeyForeignPtr)
-- | Sign a message.
--
@since 0.0.1.0
signMessage :: ByteString -> SecretKey -> IO SignedMessage
signMessage message (SecretKey skFPtr) =
ByteString.unsafeUseAsCStringLen message $ \(cString, messageLength) -> do
let sigLength = fromIntegral @CSize @Int cryptoSignBytes
(messageForeignPtr :: ForeignPtr CUChar) <- Foreign.mallocForeignPtrBytes messageLength
signatureForeignPtr <- Foreign.mallocForeignPtrBytes sigLength
withForeignPtr messageForeignPtr $ \messagePtr ->
withForeignPtr signatureForeignPtr $ \signaturePtr ->
withForeignPtr skFPtr $ \skPtr -> do
Foreign.copyArray messagePtr (Foreign.castPtr @CChar @CUChar cString) messageLength
void $
cryptoSignDetached
signaturePtr
Foreign.nullPtr -- Always of size 'cryptoSignBytes'
(castPtr @CChar @CUChar cString)
(fromIntegral @Int @CULLong messageLength)
skPtr
pure $ SignedMessage (fromIntegral @Int @CSize messageLength) messageForeignPtr signatureForeignPtr
-- | Open a signed message with the signatory's public key. The function returns 'Nothing' if there
-- is a key mismatch.
--
@since 0.0.1.0
openMessage :: SignedMessage -> PublicKey -> Maybe ByteString
openMessage SignedMessage{messageLength, messageForeignPtr, signatureForeignPtr} (PublicKey pkForeignPtr) = unsafeDupablePerformIO $
withForeignPtr pkForeignPtr $ \publicKeyPtr ->
withForeignPtr signatureForeignPtr $ \signaturePtr -> do
withForeignPtr messageForeignPtr $ \messagePtr -> do
result <-
cryptoSignVerifyDetached
signaturePtr
messagePtr
(fromIntegral @CSize @CULLong messageLength)
publicKeyPtr
case result of
(-1) -> pure Nothing
_ -> do
bsPtr <- mallocBytes (fromIntegral messageLength)
memcpy bsPtr (castPtr messagePtr) messageLength
Just <$> unsafePackMallocCStringLen (castPtr bsPtr :: Ptr CChar, fromIntegral messageLength)
-- | Get the signature part of a 'SignedMessage'.
--
@since 0.0.1.0
getSignature :: SignedMessage -> ByteString
getSignature SignedMessage{signatureForeignPtr} = unsafeDupablePerformIO $
withForeignPtr signatureForeignPtr $ \signaturePtr -> do
bsPtr <- Foreign.mallocBytes (fromIntegral cryptoSignBytes)
memcpy bsPtr signaturePtr cryptoSignBytes
unsafePackMallocCStringLen (Foreign.castPtr bsPtr :: Ptr CChar, fromIntegral cryptoSignBytes)
| Get the message part of a ' SignedMessage ' _ _ without verifying the signature _ _ .
--
@since 0.0.1.0
unsafeGetMessage :: SignedMessage -> ByteString
unsafeGetMessage SignedMessage{messageLength, messageForeignPtr} = unsafeDupablePerformIO $
withForeignPtr messageForeignPtr $ \messagePtr -> do
bsPtr <- Foreign.mallocBytes (fromIntegral messageLength)
memcpy bsPtr messagePtr messageLength
unsafePackMallocCStringLen (Foreign.castPtr bsPtr :: Ptr CChar, fromIntegral messageLength)
-- | Combine a message and a signature into a 'SignedMessage'.
--
@since 0.0.1.0
mkSignature :: ByteString -> ByteString -> SignedMessage
mkSignature message signature = unsafeDupablePerformIO $
ByteString.unsafeUseAsCStringLen message $ \(messageStringPtr, messageLength) ->
ByteString.unsafeUseAsCStringLen signature $ \(signatureStringPtr, _) -> do
(messageForeignPtr :: ForeignPtr CUChar) <- Foreign.mallocForeignPtrBytes messageLength
signatureForeignPtr <- Foreign.mallocForeignPtrBytes (fromIntegral cryptoSignBytes)
withForeignPtr messageForeignPtr $ \messagePtr ->
withForeignPtr signatureForeignPtr $ \signaturePtr -> do
Foreign.copyArray messagePtr (Foreign.castPtr messageStringPtr) messageLength
Foreign.copyArray signaturePtr (Foreign.castPtr signatureStringPtr) (fromIntegral cryptoSignBytes)
pure $ SignedMessage (fromIntegral @Int @CSize messageLength) messageForeignPtr signatureForeignPtr
| null | https://raw.githubusercontent.com/haskell-cryptography/libsodium-bindings/127113199e7b3f91305cc34b169b06a2622d65da/sel/src/Sel/Signing.hs | haskell | |
Module: Sel.Hashing.Signing
Description: Public-key signatures with the Ed25519 algorithm
License: BSD-3-Clause
** Introduction
$introduction
** Key Pair generation
** Message Signing
** Constructing and Deconstructing
$introduction
Verifiers need to already know and ultimately trust a public key before messages signed
using it can be verified.
|
|
|
| Generate a pair of public and secret key.
The length parameters used are 'cryptoSignPublicKeyBytes'
and 'cryptoSignSecretKeyBytes'.
| Sign a message.
Always of size 'cryptoSignBytes'
| Open a signed message with the signatory's public key. The function returns 'Nothing' if there
is a key mismatch.
| Get the signature part of a 'SignedMessage'.
| Combine a message and a signature into a 'SignedMessage'.
| # LANGUAGE DerivingStrategies #
# LANGUAGE NamedFieldPuns #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
Copyright : ( C ) Hécate Moonlight 2022
Maintainer : The Haskell Cryptography Group
Portability : GHC only
module Sel.Signing
PublicKey
, SecretKey
, SignedMessage
, generateKeyPair
, signMessage
, openMessage
, getSignature
, unsafeGetMessage
, mkSignature
) where
import Control.Monad (void)
import Data.ByteString (ByteString)
import Data.ByteString.Unsafe (unsafePackMallocCStringLen)
import qualified Data.ByteString.Unsafe as ByteString
import Foreign
( ForeignPtr
, Ptr
, castPtr
, mallocBytes
, mallocForeignPtrBytes
, withForeignPtr
)
import Foreign.C (CChar, CSize, CUChar, CULLong)
import qualified Foreign.Marshal.Array as Foreign
import qualified Foreign.Ptr as Foreign
import GHC.IO.Handle.Text (memcpy)
import LibSodium.Bindings.Signing
( cryptoSignBytes
, cryptoSignDetached
, cryptoSignKeyPair
, cryptoSignPublicKeyBytes
, cryptoSignSecretKeyBytes
, cryptoSignVerifyDetached
)
import System.IO.Unsafe (unsafeDupablePerformIO)
Public - key Signatures work with a ' SecretKey ' and ' PublicKey '
* The ' SecretKey ' is used to append a signature to any number of messages . It must stay private ;
* The ' PublicKey ' is used by third - parties to to verify that the signature appended to a message was
issued by the creator of the public key . It must be distributed to third - parties .
@since 0.0.1.0
newtype PublicKey = PublicKey (ForeignPtr CUChar)
@since 0.0.1.0
newtype SecretKey = SecretKey (ForeignPtr CUChar)
@since 0.0.1.0
data SignedMessage = SignedMessage
{ messageLength :: CSize
, messageForeignPtr :: ForeignPtr CUChar
, signatureForeignPtr :: ForeignPtr CUChar
}
@since 0.0.1.0
generateKeyPair :: IO (PublicKey, SecretKey)
generateKeyPair = do
publicKeyForeignPtr <- mallocForeignPtrBytes (fromIntegral @CSize @Int cryptoSignPublicKeyBytes)
secretKeyForeignPtr <- mallocForeignPtrBytes (fromIntegral @CSize @Int cryptoSignSecretKeyBytes)
withForeignPtr publicKeyForeignPtr $ \pkPtr ->
withForeignPtr secretKeyForeignPtr $ \skPtr ->
void $
cryptoSignKeyPair
pkPtr
skPtr
pure (PublicKey publicKeyForeignPtr, SecretKey secretKeyForeignPtr)
@since 0.0.1.0
signMessage :: ByteString -> SecretKey -> IO SignedMessage
signMessage message (SecretKey skFPtr) =
ByteString.unsafeUseAsCStringLen message $ \(cString, messageLength) -> do
let sigLength = fromIntegral @CSize @Int cryptoSignBytes
(messageForeignPtr :: ForeignPtr CUChar) <- Foreign.mallocForeignPtrBytes messageLength
signatureForeignPtr <- Foreign.mallocForeignPtrBytes sigLength
withForeignPtr messageForeignPtr $ \messagePtr ->
withForeignPtr signatureForeignPtr $ \signaturePtr ->
withForeignPtr skFPtr $ \skPtr -> do
Foreign.copyArray messagePtr (Foreign.castPtr @CChar @CUChar cString) messageLength
void $
cryptoSignDetached
signaturePtr
(castPtr @CChar @CUChar cString)
(fromIntegral @Int @CULLong messageLength)
skPtr
pure $ SignedMessage (fromIntegral @Int @CSize messageLength) messageForeignPtr signatureForeignPtr
@since 0.0.1.0
openMessage :: SignedMessage -> PublicKey -> Maybe ByteString
openMessage SignedMessage{messageLength, messageForeignPtr, signatureForeignPtr} (PublicKey pkForeignPtr) = unsafeDupablePerformIO $
withForeignPtr pkForeignPtr $ \publicKeyPtr ->
withForeignPtr signatureForeignPtr $ \signaturePtr -> do
withForeignPtr messageForeignPtr $ \messagePtr -> do
result <-
cryptoSignVerifyDetached
signaturePtr
messagePtr
(fromIntegral @CSize @CULLong messageLength)
publicKeyPtr
case result of
(-1) -> pure Nothing
_ -> do
bsPtr <- mallocBytes (fromIntegral messageLength)
memcpy bsPtr (castPtr messagePtr) messageLength
Just <$> unsafePackMallocCStringLen (castPtr bsPtr :: Ptr CChar, fromIntegral messageLength)
@since 0.0.1.0
getSignature :: SignedMessage -> ByteString
getSignature SignedMessage{signatureForeignPtr} = unsafeDupablePerformIO $
withForeignPtr signatureForeignPtr $ \signaturePtr -> do
bsPtr <- Foreign.mallocBytes (fromIntegral cryptoSignBytes)
memcpy bsPtr signaturePtr cryptoSignBytes
unsafePackMallocCStringLen (Foreign.castPtr bsPtr :: Ptr CChar, fromIntegral cryptoSignBytes)
| Get the message part of a ' SignedMessage ' _ _ without verifying the signature _ _ .
@since 0.0.1.0
unsafeGetMessage :: SignedMessage -> ByteString
unsafeGetMessage SignedMessage{messageLength, messageForeignPtr} = unsafeDupablePerformIO $
withForeignPtr messageForeignPtr $ \messagePtr -> do
bsPtr <- Foreign.mallocBytes (fromIntegral messageLength)
memcpy bsPtr messagePtr messageLength
unsafePackMallocCStringLen (Foreign.castPtr bsPtr :: Ptr CChar, fromIntegral messageLength)
@since 0.0.1.0
mkSignature :: ByteString -> ByteString -> SignedMessage
mkSignature message signature = unsafeDupablePerformIO $
ByteString.unsafeUseAsCStringLen message $ \(messageStringPtr, messageLength) ->
ByteString.unsafeUseAsCStringLen signature $ \(signatureStringPtr, _) -> do
(messageForeignPtr :: ForeignPtr CUChar) <- Foreign.mallocForeignPtrBytes messageLength
signatureForeignPtr <- Foreign.mallocForeignPtrBytes (fromIntegral cryptoSignBytes)
withForeignPtr messageForeignPtr $ \messagePtr ->
withForeignPtr signatureForeignPtr $ \signaturePtr -> do
Foreign.copyArray messagePtr (Foreign.castPtr messageStringPtr) messageLength
Foreign.copyArray signaturePtr (Foreign.castPtr signatureStringPtr) (fromIntegral cryptoSignBytes)
pure $ SignedMessage (fromIntegral @Int @CSize messageLength) messageForeignPtr signatureForeignPtr
|
546fea0ffa44fffa94ba37cb388281900fc7ee8ac204cdd7bccfca7f7790a714 | facebook/duckling | Rules.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Volume.RU.Rules
( rules
) where
import Data.String
import Data.Text (Text)
import Prelude
import Duckling.Dimensions.Types
import Duckling.Types
import Duckling.Regex.Types
import Duckling.Volume.Helpers
import Duckling.Numeral.Helpers (isPositive)
import qualified Duckling.Volume.Types as TVolume
import qualified Duckling.Numeral.Types as TNumeral
volumes :: [(Text, String, TVolume.Unit)]
volumes = [ ("<latent vol> ml" , "мл|миллилитр(а|ов)?" , TVolume.Millilitre)
, ("<vol> hectoliters" , "гл|гектолитр(а|ов)?" , TVolume.Hectolitre)
, ("<vol> liters" , "л(итр(а|ов)?)?" , TVolume.Litre)
, ("<latent vol> gallon", "галлон(а|ов)?" , TVolume.Gallon)
]
rulesVolumes :: [Rule]
rulesVolumes = map go volumes
where
go :: (Text, String, TVolume.Unit) -> Rule
go (name, regexPattern, u) = Rule
{ name = name
, pattern =
[ regex regexPattern
]
, prod = \_ -> Just . Token Volume $ unitOnly u
}
fractions :: [(Text, String, Double)]
fractions = [ ("half", "пол[-\\s]?", 1/2)
]
rulesFractionalVolume :: [Rule]
rulesFractionalVolume = map go fractions
where
go :: (Text, String, Double) -> Rule
go (name, regexPattern, f) = Rule
{ name = name
, pattern =
[ regex regexPattern
, Predicate isUnitOnly
]
, prod = \case
(_:
Token Volume TVolume.VolumeData{TVolume.unit = Just u}:
_) ->
Just . Token Volume $ volume u f
_ -> Nothing
}
ruleHalfLiter :: Rule
ruleHalfLiter = Rule
{ name = "half liter"
, pattern =
[ regex "поллитра"
]
, prod = \_ -> Just . Token Volume . withUnit TVolume.Litre $ valueOnly 0.5
}
rules :: [Rule]
rules = [ ruleHalfLiter
]
++ rulesVolumes
++ rulesFractionalVolume
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/Duckling/Volume/RU/Rules.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings # | Copyright ( c ) 2016 - present , Facebook , Inc.
# LANGUAGE LambdaCase #
module Duckling.Volume.RU.Rules
( rules
) where
import Data.String
import Data.Text (Text)
import Prelude
import Duckling.Dimensions.Types
import Duckling.Types
import Duckling.Regex.Types
import Duckling.Volume.Helpers
import Duckling.Numeral.Helpers (isPositive)
import qualified Duckling.Volume.Types as TVolume
import qualified Duckling.Numeral.Types as TNumeral
volumes :: [(Text, String, TVolume.Unit)]
volumes = [ ("<latent vol> ml" , "мл|миллилитр(а|ов)?" , TVolume.Millilitre)
, ("<vol> hectoliters" , "гл|гектолитр(а|ов)?" , TVolume.Hectolitre)
, ("<vol> liters" , "л(итр(а|ов)?)?" , TVolume.Litre)
, ("<latent vol> gallon", "галлон(а|ов)?" , TVolume.Gallon)
]
rulesVolumes :: [Rule]
rulesVolumes = map go volumes
where
go :: (Text, String, TVolume.Unit) -> Rule
go (name, regexPattern, u) = Rule
{ name = name
, pattern =
[ regex regexPattern
]
, prod = \_ -> Just . Token Volume $ unitOnly u
}
fractions :: [(Text, String, Double)]
fractions = [ ("half", "пол[-\\s]?", 1/2)
]
rulesFractionalVolume :: [Rule]
rulesFractionalVolume = map go fractions
where
go :: (Text, String, Double) -> Rule
go (name, regexPattern, f) = Rule
{ name = name
, pattern =
[ regex regexPattern
, Predicate isUnitOnly
]
, prod = \case
(_:
Token Volume TVolume.VolumeData{TVolume.unit = Just u}:
_) ->
Just . Token Volume $ volume u f
_ -> Nothing
}
ruleHalfLiter :: Rule
ruleHalfLiter = Rule
{ name = "half liter"
, pattern =
[ regex "поллитра"
]
, prod = \_ -> Just . Token Volume . withUnit TVolume.Litre $ valueOnly 0.5
}
rules :: [Rule]
rules = [ ruleHalfLiter
]
++ rulesVolumes
++ rulesFractionalVolume
|
78cf4fa9321bd25bbe21ca6345f12e8128149fad0bf5caf1cf3b77849ce6cfd6 | nojb/ppx_ocamllex | lexing_plus.ml | let init_lexbuf lexbuf mem_size =
let pos = lexbuf.Lexing.lex_curr_pos in
lexbuf.Lexing.lex_mem <- Array.make mem_size (-1);
lexbuf.Lexing.lex_start_pos <- pos;
lexbuf.Lexing.lex_last_pos <- pos;
lexbuf.Lexing.lex_last_action <- -1
let rec next_char lexbuf =
if lexbuf.Lexing.lex_curr_pos >= lexbuf.Lexing.lex_buffer_len then begin
if lexbuf.Lexing.lex_eof_reached then
256
else begin
lexbuf.Lexing.refill_buff lexbuf;
next_char lexbuf
end
end else begin
let i = lexbuf.Lexing.lex_curr_pos in
let c = lexbuf.Lexing.lex_buffer.[i] in
lexbuf.Lexing.lex_curr_pos <- i+1;
Char.code c
end
| null | https://raw.githubusercontent.com/nojb/ppx_ocamllex/f802000c4ff984d3158a217c480294d85b3452db/lib/lexing_plus.ml | ocaml | let init_lexbuf lexbuf mem_size =
let pos = lexbuf.Lexing.lex_curr_pos in
lexbuf.Lexing.lex_mem <- Array.make mem_size (-1);
lexbuf.Lexing.lex_start_pos <- pos;
lexbuf.Lexing.lex_last_pos <- pos;
lexbuf.Lexing.lex_last_action <- -1
let rec next_char lexbuf =
if lexbuf.Lexing.lex_curr_pos >= lexbuf.Lexing.lex_buffer_len then begin
if lexbuf.Lexing.lex_eof_reached then
256
else begin
lexbuf.Lexing.refill_buff lexbuf;
next_char lexbuf
end
end else begin
let i = lexbuf.Lexing.lex_curr_pos in
let c = lexbuf.Lexing.lex_buffer.[i] in
lexbuf.Lexing.lex_curr_pos <- i+1;
Char.code c
end
|
|
8cc21fcc1e6c2a73e76832aaa29ad44965cc7d8345bbcc48d4ccf3c1ce3032ca | ocurrent/ocurrent | current_git.mli | (** Integration with Git. *)
module Commit_id : sig
include Set.OrderedType
val v : repo:string -> gref:string -> hash:string -> t
(** [v ~repo ~gref ~hash] identifies a commit that can be fetched from [repo]
using [gref] as the reference name and has hash [hash]. *)
val repo : t -> string
(** [repo t] is the Git URI of the repository. *)
val gref : t -> string
val hash : t -> string
[ hash t ] is the commit hash .
val equal : t -> t -> bool
val pp : t Fmt.t
val pp_user_clone : t Fmt.t
(** Display a Git command a user could run to get this commit. *)
val digest : t -> string
end
module Commit : sig
include Set.OrderedType
val id : t -> Commit_id.t
val hash : t -> string
val equal : t -> t -> bool
val pp : t Fmt.t
val pp_short : t Fmt.t
(** [pp_short] shows just the start of the hash. *)
val marshal : t -> string
val unmarshal : string -> t
end
val clone : schedule:Current_cache.Schedule.t -> ?gref:string -> string -> Commit.t Current.t
(** [clone ~schedule ~gref uri] evaluates to the head commit of [uri]'s [gref] branch (default: "master"). *)
val fetch : Commit_id.t Current.t -> Commit.t Current.t
val with_checkout :
?pool:unit Current.Pool.t ->
job:Current.Job.t ->
Commit.t ->
(Fpath.t -> 'a Current.or_error Lwt.t) ->
'a Current.or_error Lwt.t
(** [with_checkout ~job c fn] clones [c] to a temporary directory and runs [fn tmpdir].
When it returns, the directory is deleted.
@param pool Used to prevent too many clones from happening at once. *)
module Local : sig
type t
(** A local Git repository. *)
val v : Fpath.t -> t
(** [v path] is the local Git repository at [path]. *)
val head : t -> [`Commit of Commit_id.t | `Ref of string ] Current.t
(** [head] is the current branch ref (e.g. "/refs/heads/master"). *)
val head_commit : t -> Commit.t Current.t
(** [head_commit] is the commit at the head of the current branch. *)
val commit_of_ref : t -> string -> Commit.t Current.t
* [ commit_of_ref t ] evaluates to the commit at the head of [ gref ] .
e.g. [ commit_of_ref t " /refs / heads / master " ]
e.g. [commit_of_ref t "/refs/heads/master"] *)
val repo : t -> Fpath.t
end
| null | https://raw.githubusercontent.com/ocurrent/ocurrent/344af83279e9ba17f5f32d0a0351c228a6f42863/plugins/git/current_git.mli | ocaml | * Integration with Git.
* [v ~repo ~gref ~hash] identifies a commit that can be fetched from [repo]
using [gref] as the reference name and has hash [hash].
* [repo t] is the Git URI of the repository.
* Display a Git command a user could run to get this commit.
* [pp_short] shows just the start of the hash.
* [clone ~schedule ~gref uri] evaluates to the head commit of [uri]'s [gref] branch (default: "master").
* [with_checkout ~job c fn] clones [c] to a temporary directory and runs [fn tmpdir].
When it returns, the directory is deleted.
@param pool Used to prevent too many clones from happening at once.
* A local Git repository.
* [v path] is the local Git repository at [path].
* [head] is the current branch ref (e.g. "/refs/heads/master").
* [head_commit] is the commit at the head of the current branch. |
module Commit_id : sig
include Set.OrderedType
val v : repo:string -> gref:string -> hash:string -> t
val repo : t -> string
val gref : t -> string
val hash : t -> string
[ hash t ] is the commit hash .
val equal : t -> t -> bool
val pp : t Fmt.t
val pp_user_clone : t Fmt.t
val digest : t -> string
end
module Commit : sig
include Set.OrderedType
val id : t -> Commit_id.t
val hash : t -> string
val equal : t -> t -> bool
val pp : t Fmt.t
val pp_short : t Fmt.t
val marshal : t -> string
val unmarshal : string -> t
end
val clone : schedule:Current_cache.Schedule.t -> ?gref:string -> string -> Commit.t Current.t
val fetch : Commit_id.t Current.t -> Commit.t Current.t
val with_checkout :
?pool:unit Current.Pool.t ->
job:Current.Job.t ->
Commit.t ->
(Fpath.t -> 'a Current.or_error Lwt.t) ->
'a Current.or_error Lwt.t
module Local : sig
type t
val v : Fpath.t -> t
val head : t -> [`Commit of Commit_id.t | `Ref of string ] Current.t
val head_commit : t -> Commit.t Current.t
val commit_of_ref : t -> string -> Commit.t Current.t
* [ commit_of_ref t ] evaluates to the commit at the head of [ gref ] .
e.g. [ commit_of_ref t " /refs / heads / master " ]
e.g. [commit_of_ref t "/refs/heads/master"] *)
val repo : t -> Fpath.t
end
|
2e59da5b156a460ebda3f720a7ba0d27df8b674dd3a13aed27919e6ba68523a1 | tuura/plato | Main.hs | module Main (main) where
import Data.Char
import qualified Data.Text as Text
import System.Directory
import Control.Exception
import System.Environment
import System.IO.Error
import Tuura.Concept
import Tuura.Concept.Simulation
import qualified Language.Haskell.Interpreter as GHC
import qualified Language.Haskell.Interpreter.Unsafe as GHC
main :: IO ()
main = do
args <- getArgs
if length args /= 1
then putStrLn "Exactly one path needed"
else do
r <- GHC.runInterpreter $ doWork (head args)
case r of
Left err -> putStrLn $ displayException err
Right () -> return ()
Our own Signal type . Contains the signal index , from 0 to x-1 if
- there are x signals .
- there are x signals. -}
data DynSignal = Signal Int deriving Eq
instance Show DynSignal where show (Signal i) = [chr (ord 'A' + i)]
Temporary module to help us use any number of signals in the user 's
- circuit . Otherwise , we would be bound to a number of arguments
- ( signals ) known at compile time .
- Keep the data DynSignal line in sync with the one above !
- circuit. Otherwise, we would be bound to a number of arguments
- (signals) known at compile time.
- Keep the data DynSignal line in sync with the one above! -}
signalsApply :: Int -> [String]
signalsApply num = [
"import Data.Char",
"data DynSignal = Signal Int deriving Eq",
"signs = [Signal i | i <- [0.." ++ show (num-1) ++ "]]",
"apply c = c " ++ unwords ["(signs !! " ++ show i ++ ")" | i <- [0..num-1]]]
writeTmpFile :: [String] -> IO ()
writeTmpFile ls =
writeFile tmpModuleFile $ unlines withModule
where withModule = "module Helper where" : ls
removeIfExists :: FilePath -> IO ()
removeIfExists fileName = removeFile fileName `catch` handleExists
where handleExists e
| isDoesNotExistError e = return ()
| otherwise = throwIO e
{- Exported names in the user's haskell module (file) -}
circuitName, tmpModuleFile :: String
circuitName = "circuit"
tmpModuleFile = ".Helper.hs"
{- Helper functions because we deal with String, not Text. -}
count :: String -> String -> Int
count sub str = Text.count (Text.pack sub) (Text.pack str)
strRepeat :: Int -> String -> String
strRepeat n str = Text.unpack $ Text.replicate n (Text.pack str)
loadModulesTopLevel :: [String] -> GHC.Interpreter ()
loadModulesTopLevel paths = do
GHC.loadModules paths
mods <- GHC.getLoadedModules
GHC.setTopLevelModules mods
doWork :: String -> GHC.Interpreter ()
doWork path = do
{- Load user's module to gather info. -}
loadModulesTopLevel [path]
{- Use the circuit's type to gather how many signals it takes. -}
t <- GHC.typeOf circuitName
let numSigns = count "->" t
liftIO $ putStrLn $ "Circuit signal count: " ++ show numSigns
{- Load the generated module too. -}
liftIO $ writeTmpFile $ signalsApply numSigns
loadModulesTopLevel [path, tmpModuleFile]
liftIO $ removeIfExists tmpModuleFile
{- Fetch our signals. -}
signs <- GHC.interpret "signs" (GHC.as :: [DynSignal])
liftIO $ putStrLn $ "Circuit signal names: " ++ show signs
{- Obtain the circuit in terms of any signal (takes them as args). -}
let ctype = strRepeat numSigns "DynSignal ->" ++ "CircuitConcept DynSignal"
circuit <- GHC.unsafeInterpret circuitName ctype
{- Use our generated code to apply our signals to the circuit above -}
apply <- GHC.unsafeInterpret "apply" $ "(" ++ ctype ++ ") -> CircuitConcept DynSignal"
let fullCircuit = apply circuit
{- Input initial state. -}
initialState <- liftIO $ readState numSigns
liftIO $ putStrLn ""
{- Run the interactive simulation. -}
(_, finalState) <- liftIO $ runSimulation (doSimulate signs fullCircuit) initialState
liftIO $ putStrLn $ "\nFinal state: " ++ showState finalState signs
isBinary :: String -> Bool
isBinary = foldr (\c -> (&&) (c == '0' || c == '1')) True
readState :: Int -> IO (State DynSignal)
readState num = do
putStr "Initial state: "
word <- getLine
if length word /= num || not (isBinary word)
then do
let start = replicate num '0'
let end = replicate num '1'
let ranging = "from " ++ start ++ " to " ++ end
putStrLn $ "Invalid state! Use a binary array ranging " ++ ranging
readState num
else return $ State (\(Signal i) -> '1' == word !! i)
Helper functions to avoid needing SignalWrapper to be / Bounded
showState :: State t -> [t] -> String
showState (State v) = map (\s -> if v s then '1' else '0')
allTrans :: [a] -> [Transition a]
allTrans signs = [Transition s b | s <- signs, b <- [False, True]]
enabledTrans :: Monad m => s -> Concept s (Transition a) -> [a] -> m [Transition a]
enabledTrans st c signs =
return $ filter (\t -> excited c t st) $ allTrans signs
{- Find the signal with a matching name. -}
readSignal :: [DynSignal] -> String -> DynSignal
readSignal (s:ls) word = if show s == word then s else readSignal ls word
readSignal [] _ = Signal (-1)
doSimulate :: MonadIO m => [DynSignal] -> Concept (State DynSignal) (Transition DynSignal) -> StateT (State DynSignal) m ()
doSimulate signs circuit = do
st <- get
liftIO $ putStrLn $ "State: " ++ showState st signs
ts <- enabledTrans st circuit signs
liftIO $ putStrLn $ "Enabled: " ++ show ts
liftIO $ putStr "Do: "
word <- liftIO getLine
unless ("end" == map toLower word) $ do
let signWord = init word
let up = '+' == last word
let sign = readSignal signs signWord
let t = Transition sign up
if t `elem` ts
then fire t
else liftIO $ putStrLn "Invalid transition!"
doSimulate signs circuit
| null | https://raw.githubusercontent.com/tuura/plato/4b528f73ad677cf634dde7644a0ec5c759114baf/simulate/Main.hs | haskell | Exported names in the user's haskell module (file)
Helper functions because we deal with String, not Text.
Load user's module to gather info.
Use the circuit's type to gather how many signals it takes.
Load the generated module too.
Fetch our signals.
Obtain the circuit in terms of any signal (takes them as args).
Use our generated code to apply our signals to the circuit above
Input initial state.
Run the interactive simulation.
Find the signal with a matching name. | module Main (main) where
import Data.Char
import qualified Data.Text as Text
import System.Directory
import Control.Exception
import System.Environment
import System.IO.Error
import Tuura.Concept
import Tuura.Concept.Simulation
import qualified Language.Haskell.Interpreter as GHC
import qualified Language.Haskell.Interpreter.Unsafe as GHC
main :: IO ()
main = do
args <- getArgs
if length args /= 1
then putStrLn "Exactly one path needed"
else do
r <- GHC.runInterpreter $ doWork (head args)
case r of
Left err -> putStrLn $ displayException err
Right () -> return ()
Our own Signal type . Contains the signal index , from 0 to x-1 if
- there are x signals .
- there are x signals. -}
data DynSignal = Signal Int deriving Eq
instance Show DynSignal where show (Signal i) = [chr (ord 'A' + i)]
Temporary module to help us use any number of signals in the user 's
- circuit . Otherwise , we would be bound to a number of arguments
- ( signals ) known at compile time .
- Keep the data DynSignal line in sync with the one above !
- circuit. Otherwise, we would be bound to a number of arguments
- (signals) known at compile time.
- Keep the data DynSignal line in sync with the one above! -}
signalsApply :: Int -> [String]
signalsApply num = [
"import Data.Char",
"data DynSignal = Signal Int deriving Eq",
"signs = [Signal i | i <- [0.." ++ show (num-1) ++ "]]",
"apply c = c " ++ unwords ["(signs !! " ++ show i ++ ")" | i <- [0..num-1]]]
writeTmpFile :: [String] -> IO ()
writeTmpFile ls =
writeFile tmpModuleFile $ unlines withModule
where withModule = "module Helper where" : ls
removeIfExists :: FilePath -> IO ()
removeIfExists fileName = removeFile fileName `catch` handleExists
where handleExists e
| isDoesNotExistError e = return ()
| otherwise = throwIO e
circuitName, tmpModuleFile :: String
circuitName = "circuit"
tmpModuleFile = ".Helper.hs"
count :: String -> String -> Int
count sub str = Text.count (Text.pack sub) (Text.pack str)
strRepeat :: Int -> String -> String
strRepeat n str = Text.unpack $ Text.replicate n (Text.pack str)
loadModulesTopLevel :: [String] -> GHC.Interpreter ()
loadModulesTopLevel paths = do
GHC.loadModules paths
mods <- GHC.getLoadedModules
GHC.setTopLevelModules mods
doWork :: String -> GHC.Interpreter ()
doWork path = do
loadModulesTopLevel [path]
t <- GHC.typeOf circuitName
let numSigns = count "->" t
liftIO $ putStrLn $ "Circuit signal count: " ++ show numSigns
liftIO $ writeTmpFile $ signalsApply numSigns
loadModulesTopLevel [path, tmpModuleFile]
liftIO $ removeIfExists tmpModuleFile
signs <- GHC.interpret "signs" (GHC.as :: [DynSignal])
liftIO $ putStrLn $ "Circuit signal names: " ++ show signs
let ctype = strRepeat numSigns "DynSignal ->" ++ "CircuitConcept DynSignal"
circuit <- GHC.unsafeInterpret circuitName ctype
apply <- GHC.unsafeInterpret "apply" $ "(" ++ ctype ++ ") -> CircuitConcept DynSignal"
let fullCircuit = apply circuit
initialState <- liftIO $ readState numSigns
liftIO $ putStrLn ""
(_, finalState) <- liftIO $ runSimulation (doSimulate signs fullCircuit) initialState
liftIO $ putStrLn $ "\nFinal state: " ++ showState finalState signs
isBinary :: String -> Bool
isBinary = foldr (\c -> (&&) (c == '0' || c == '1')) True
readState :: Int -> IO (State DynSignal)
readState num = do
putStr "Initial state: "
word <- getLine
if length word /= num || not (isBinary word)
then do
let start = replicate num '0'
let end = replicate num '1'
let ranging = "from " ++ start ++ " to " ++ end
putStrLn $ "Invalid state! Use a binary array ranging " ++ ranging
readState num
else return $ State (\(Signal i) -> '1' == word !! i)
Helper functions to avoid needing SignalWrapper to be / Bounded
showState :: State t -> [t] -> String
showState (State v) = map (\s -> if v s then '1' else '0')
allTrans :: [a] -> [Transition a]
allTrans signs = [Transition s b | s <- signs, b <- [False, True]]
enabledTrans :: Monad m => s -> Concept s (Transition a) -> [a] -> m [Transition a]
enabledTrans st c signs =
return $ filter (\t -> excited c t st) $ allTrans signs
readSignal :: [DynSignal] -> String -> DynSignal
readSignal (s:ls) word = if show s == word then s else readSignal ls word
readSignal [] _ = Signal (-1)
doSimulate :: MonadIO m => [DynSignal] -> Concept (State DynSignal) (Transition DynSignal) -> StateT (State DynSignal) m ()
doSimulate signs circuit = do
st <- get
liftIO $ putStrLn $ "State: " ++ showState st signs
ts <- enabledTrans st circuit signs
liftIO $ putStrLn $ "Enabled: " ++ show ts
liftIO $ putStr "Do: "
word <- liftIO getLine
unless ("end" == map toLower word) $ do
let signWord = init word
let up = '+' == last word
let sign = readSignal signs signWord
let t = Transition sign up
if t `elem` ts
then fire t
else liftIO $ putStrLn "Invalid transition!"
doSimulate signs circuit
|
efa3703feceec51a0e2877f71f7982169f7ad31931011bc375721c828e1fb7af | cxphoe/SICP-solutions | 4.11.rkt | ; environment operations
(define (enclosing-environment env) (cdr env))
(define (first-frame env) (car env))
(define the-empty-environment '())
(define (make-frame var-val-pairs)
(cons '*table* (var-val-pairs)))
(define (frame-pairs frame) (cdr frame))
(define (first-pair frame) (car (frame-pairs frame)))
(define (rest-pairs frame) (cdr (frame-pairs frame)))
(define (add-binding-to-frame! var-val frame)
(set-cdr! frame (cons var-val
(frame-pairs frame))))
(define (extend-environment var-val-pairs base-env)
(cons (make-frame var-val-pairs) base-env))
(define (lookup-variable-value var env)
(define (env-loop env)
(define (scan frame)
(cond ((null? frame)
(env-loop (enclosing-environment env)))
((eq? var (car (first-pair frame)))
(cdr (first-pair frame)))
(else (scan (rest-pairs frame)))))
(if (eq? env the-empty-environment)
(error "Unbound variable" var)
(let ((frame (first-frame env)))
(scan frame))))
(env-loop env))
(define (set-variable-value! var val env)
(define (env-loop env)
(define (scan frame)
(cond ((null? frame)
(env-loop (enclosing-environment env)))
((eq? var (car (first-pair frame)))
(set-cdr! (first-pair frame) val))
(else (scan (rest-pairs frame)))))
(if (eq? env the-empty-environment)
(error "Unbound variable -- SET!" var)
(let ((frame (first-frame env)))
(scan frame))))
(env-loop env))
(define (define-variable! var val env)
(let ((frame (first-frame env)))
(define (scan frame)
(cond ((null? frame)
(add-binding-to-frame! (cons var val) frame))
((eq? var (car (first-pair frame)))
(set-cdr! (first-pair frame) val))
(else (scan (rest-pairs frame)))))
(scan frame))) | null | https://raw.githubusercontent.com/cxphoe/SICP-solutions/d35bb688db0320f6efb3b3bde1a14ce21da319bd/Chapter%204-Metalinguistic%20Abstraction/1.The%20Meta-cycle%20Evaluator/4.11.rkt | racket | environment operations | (define (enclosing-environment env) (cdr env))
(define (first-frame env) (car env))
(define the-empty-environment '())
(define (make-frame var-val-pairs)
(cons '*table* (var-val-pairs)))
(define (frame-pairs frame) (cdr frame))
(define (first-pair frame) (car (frame-pairs frame)))
(define (rest-pairs frame) (cdr (frame-pairs frame)))
(define (add-binding-to-frame! var-val frame)
(set-cdr! frame (cons var-val
(frame-pairs frame))))
(define (extend-environment var-val-pairs base-env)
(cons (make-frame var-val-pairs) base-env))
(define (lookup-variable-value var env)
(define (env-loop env)
(define (scan frame)
(cond ((null? frame)
(env-loop (enclosing-environment env)))
((eq? var (car (first-pair frame)))
(cdr (first-pair frame)))
(else (scan (rest-pairs frame)))))
(if (eq? env the-empty-environment)
(error "Unbound variable" var)
(let ((frame (first-frame env)))
(scan frame))))
(env-loop env))
(define (set-variable-value! var val env)
(define (env-loop env)
(define (scan frame)
(cond ((null? frame)
(env-loop (enclosing-environment env)))
((eq? var (car (first-pair frame)))
(set-cdr! (first-pair frame) val))
(else (scan (rest-pairs frame)))))
(if (eq? env the-empty-environment)
(error "Unbound variable -- SET!" var)
(let ((frame (first-frame env)))
(scan frame))))
(env-loop env))
(define (define-variable! var val env)
(let ((frame (first-frame env)))
(define (scan frame)
(cond ((null? frame)
(add-binding-to-frame! (cons var val) frame))
((eq? var (car (first-pair frame)))
(set-cdr! (first-pair frame) val))
(else (scan (rest-pairs frame)))))
(scan frame))) |
a1aebab3f9aa6026cc2c2ca98a9cad85c408a96574ac50bcd96f08583f55e790 | metareflection/poof | info.rkt | #lang info
;; ---------------------------------------------------------
;; Package Info
(define collection "poof")
(define deps '("base" "scribble-lib" "rackunit-lib"
"scribble-abbrevs" "scribble-minted" "scribble-math"))
;; ---------------------------------------------------------
;; Collection Info
(define compile-omit-paths 'all)
(define compile-include-files
'("main.rkt"
"poof.scrbl"
"util/examples-module.rkt"
"util/eval-check.rkt"))
| null | https://raw.githubusercontent.com/metareflection/poof/1a03c5e31c16dd971d698c8457ff6e4a8748d8f1/info.rkt | racket | ---------------------------------------------------------
Package Info
---------------------------------------------------------
Collection Info | #lang info
(define collection "poof")
(define deps '("base" "scribble-lib" "rackunit-lib"
"scribble-abbrevs" "scribble-minted" "scribble-math"))
(define compile-omit-paths 'all)
(define compile-include-files
'("main.rkt"
"poof.scrbl"
"util/examples-module.rkt"
"util/eval-check.rkt"))
|
cbfaa64f54a7e1cabf954c56b50cd52c46542600cd0c970038abd8a3ae6ba4b7 | seandepagnier/cruisingplot | task.scm | Copyright ( C ) 2011 < >
;;
This Program is free software ; you can redistribute it and/or
;; modify it under the terms of the GNU General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
; define our own scheduler so we can process concurrent events,
; but get reliable timing, implemented using call/cc
(declare (unit task))
(declare (uses srfi-1 srfi-18 utilities))
; list of tasks
(define tasks '())
(define task-info first)
(define task-period second)
(define task-sleep-until third)
(define task-thunk fourth)
(define task-continuation fifth)
(define current-task #f) ; task currently running
(define current-task-cont #f) ; continuation to exit current task
(define (task-set-period! task period)
(set-car! (cdr task) period))
(define (task-set-sleep-until! task sleep-until)
(set-car! (cddr task) sleep-until))
(define (task-set-continuation! task cont)
(set-car! (cddddr task) cont))
; sleep for the task period if seconds not specified
(define (task-sleep . seconds)
(if (not current-task) (error "task-sleep called from non-task"))
(call/cc (lambda (cont)
(task-set-continuation! current-task (lambda () (cont #t)))
(current-task-cont (if (null? seconds) #t (first seconds))))))
(define (task-exit)
(if (not current-task) (error "task-exit called from non-task"))
(current-task-cont 'exit))
; Run thunk every period seconds (resolution milliseconds)
(define (create-periodic-task info period thunk)
(very-verbose "create-periodic-task " info " " tasks)
(set! tasks (cons (list info period 0 thunk thunk) tasks)))
; Run the thunk as a task, when it returns the task exits
(define (create-task info thunk)
(create-periodic-task info 0
(lambda () (thunk)
(task-exit))))
; could be replaced with a heap if there were many tasks
; returns sorted task list
(define (insert-periodic-task task tasks)
(cond ((null? tasks) (list task))
((< (task-sleep-until task) (task-sleep-until (car tasks)))
(cons task tasks))
(else
(cons (car tasks) (insert-periodic-task task (cdr tasks))))))
(define (create-task-scheduler sleep-while-idle)
(let ((timer (start-timer))
(total-run-time 0)
(total-sleep-time 0)
(last-total-run-time 1)
(task-time 0))
(create-periodic-task
"task-stats-task" 1
(let ((last-time 0))
(lambda ()
(let ((time (timer)))
(let ((elapsed (- time last-time)))
(set! last-time time)
(define (time-percentage t)
(string-append (if (zero? elapsed) "N/A"
(number->string (round-to-places (* 100 (/ t elapsed)) 2)))
"%"))
(very-verbose "<" (length tasks) " tasks>"
" task-time: " (time-percentage total-run-time)
" non-task: " (time-percentage (- elapsed total-run-time total-sleep-time))
" sleep: " (time-percentage total-sleep-time))
(set! last-total-run-time total-run-time)
(set! total-run-time 0)
(set! total-sleep-time 0)
)))))
(lambda ()
(cond ((null? tasks)
(print "No tasks exist, exiting.")
(exit)))
(let ((task (first tasks)))
(cond ((< (task-sleep-until task) (timer))
(set! current-task task)
(let ((ret (call/cc (lambda (cont)
(set! current-task-cont cont)
(set! task-time (timer))
((task-continuation task))
(task-set-continuation! task (task-thunk task))
'return))))
(let ((run-time (- (timer) task-time)))
(set! total-run-time (+ total-run-time run-time))
(cond ((eq? ret 'exit)
(verbose "task " (task-info task) " exited")
(remove! (lambda (t) (eq? t current-task)) tasks)) ; delete this task
(else
(set! current-task #f)
do nt do this first 5 seconds
(> run-time (/ (task-period task) (length tasks)))
(> (- (timer) (task-sleep-until task))
(* 2 (task-period task))))
(verbose "task " (task-info task) " took too long "
" " (round-to-places run-time 5) " seconds"
" doubling period to " (* 2 (task-period task)))
(task-set-period! task (* 2 (task-period task)))))
(task-set-sleep-until! task
(if (number? ret)
(+ (timer) ret)
(+ (task-sleep-until task)
(task-period task))))
(set! tasks (insert-periodic-task task (cdr tasks)))
)))))
(else ; sleep it off
(let ((sleep-start-time (timer)))
(sleep (if sleep-while-idle
(- (task-sleep-until task) sleep-start-time)
.001))
(let ((sleep-time (- (timer) sleep-start-time)))
(set! total-sleep-time (+ total-sleep-time sleep-time))))))))))
(define (task-schedule-loop)
(let ((scheduler (create-task-scheduler #t)))
(let loop () (scheduler) (loop))))
(define (current-task-period)
(if current-task (task-period current-task)
(error "call to current-task-period outside of task")))
| null | https://raw.githubusercontent.com/seandepagnier/cruisingplot/d3d83e7372e2c5ce1a8e8071286e30c2028088cf/task.scm | scheme |
you can redistribute it and/or
modify it under the terms of the GNU General Public
either
define our own scheduler so we can process concurrent events,
but get reliable timing, implemented using call/cc
list of tasks
task currently running
continuation to exit current task
sleep for the task period if seconds not specified
Run thunk every period seconds (resolution milliseconds)
Run the thunk as a task, when it returns the task exits
could be replaced with a heap if there were many tasks
returns sorted task list
delete this task
sleep it off | Copyright ( C ) 2011 < >
version 3 of the License , or ( at your option ) any later version .
(declare (unit task))
(declare (uses srfi-1 srfi-18 utilities))
(define tasks '())
(define task-info first)
(define task-period second)
(define task-sleep-until third)
(define task-thunk fourth)
(define task-continuation fifth)
(define (task-set-period! task period)
(set-car! (cdr task) period))
(define (task-set-sleep-until! task sleep-until)
(set-car! (cddr task) sleep-until))
(define (task-set-continuation! task cont)
(set-car! (cddddr task) cont))
(define (task-sleep . seconds)
(if (not current-task) (error "task-sleep called from non-task"))
(call/cc (lambda (cont)
(task-set-continuation! current-task (lambda () (cont #t)))
(current-task-cont (if (null? seconds) #t (first seconds))))))
(define (task-exit)
(if (not current-task) (error "task-exit called from non-task"))
(current-task-cont 'exit))
(define (create-periodic-task info period thunk)
(very-verbose "create-periodic-task " info " " tasks)
(set! tasks (cons (list info period 0 thunk thunk) tasks)))
(define (create-task info thunk)
(create-periodic-task info 0
(lambda () (thunk)
(task-exit))))
(define (insert-periodic-task task tasks)
(cond ((null? tasks) (list task))
((< (task-sleep-until task) (task-sleep-until (car tasks)))
(cons task tasks))
(else
(cons (car tasks) (insert-periodic-task task (cdr tasks))))))
(define (create-task-scheduler sleep-while-idle)
(let ((timer (start-timer))
(total-run-time 0)
(total-sleep-time 0)
(last-total-run-time 1)
(task-time 0))
(create-periodic-task
"task-stats-task" 1
(let ((last-time 0))
(lambda ()
(let ((time (timer)))
(let ((elapsed (- time last-time)))
(set! last-time time)
(define (time-percentage t)
(string-append (if (zero? elapsed) "N/A"
(number->string (round-to-places (* 100 (/ t elapsed)) 2)))
"%"))
(very-verbose "<" (length tasks) " tasks>"
" task-time: " (time-percentage total-run-time)
" non-task: " (time-percentage (- elapsed total-run-time total-sleep-time))
" sleep: " (time-percentage total-sleep-time))
(set! last-total-run-time total-run-time)
(set! total-run-time 0)
(set! total-sleep-time 0)
)))))
(lambda ()
(cond ((null? tasks)
(print "No tasks exist, exiting.")
(exit)))
(let ((task (first tasks)))
(cond ((< (task-sleep-until task) (timer))
(set! current-task task)
(let ((ret (call/cc (lambda (cont)
(set! current-task-cont cont)
(set! task-time (timer))
((task-continuation task))
(task-set-continuation! task (task-thunk task))
'return))))
(let ((run-time (- (timer) task-time)))
(set! total-run-time (+ total-run-time run-time))
(cond ((eq? ret 'exit)
(verbose "task " (task-info task) " exited")
(else
(set! current-task #f)
do nt do this first 5 seconds
(> run-time (/ (task-period task) (length tasks)))
(> (- (timer) (task-sleep-until task))
(* 2 (task-period task))))
(verbose "task " (task-info task) " took too long "
" " (round-to-places run-time 5) " seconds"
" doubling period to " (* 2 (task-period task)))
(task-set-period! task (* 2 (task-period task)))))
(task-set-sleep-until! task
(if (number? ret)
(+ (timer) ret)
(+ (task-sleep-until task)
(task-period task))))
(set! tasks (insert-periodic-task task (cdr tasks)))
)))))
(let ((sleep-start-time (timer)))
(sleep (if sleep-while-idle
(- (task-sleep-until task) sleep-start-time)
.001))
(let ((sleep-time (- (timer) sleep-start-time)))
(set! total-sleep-time (+ total-sleep-time sleep-time))))))))))
(define (task-schedule-loop)
(let ((scheduler (create-task-scheduler #t)))
(let loop () (scheduler) (loop))))
(define (current-task-period)
(if current-task (task-period current-task)
(error "call to current-task-period outside of task")))
|
f6a3a4461808f34ee86d547edc133789bb2fbc2065a3ab432a6d8347d46705f0 | typelead/etlas | DescribeUnitId.hs | {-# LANGUAGE Rank2Types #-}
# LANGUAGE FlexibleContexts #
module Distribution.Backpack.DescribeUnitId where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Types.PackageId
import Distribution.Types.ComponentName
import Distribution.Compat.Stack
import Distribution.Verbosity
import Distribution.ModuleName
import Distribution.Text
import Distribution.Simple.Utils
import Text.PrettyPrint
-- Unit identifiers have a well defined, machine-readable format,
-- but this format isn't very user-friendly for users. This
-- module defines some functions for solving common rendering
-- problems one has for displaying these.
--
There are three basic problems we tackle :
--
- Users do n't want to see pkg-0.5 - inplace - libname ,
they want to see " library ' ' from ' pkg-0.5 ' "
--
-- - Users don't want to see the raw component identifier, which
-- usually contains a wordy hash that doesn't matter.
--
-- - Users don't want to see a hash of the instantiation: they
-- want to see the actual instantiation, and they want it in
-- interpretable form.
--
-- | Print a Setup message stating (1) what operation we are doing,
for ( 2 ) which component ( with enough details to uniquely identify
-- the build in question.)
--
setupMessage' :: Text a => Verbosity
-> String -- ^ Operation being done (capitalized), on:
-> PackageIdentifier -- ^ Package
-> ComponentName -- ^ Component name
-> Maybe [(ModuleName, a)] -- ^ Instantiation, if available.
-- Polymorphic to take
-- 'OpenModule' or 'Module'
-> IO ()
setupMessage' verbosity msg pkgid cname mb_insts = withFrozenCallStack $ do
noticeDoc verbosity $
case mb_insts of
Just insts | not (null insts) ->
hang (msg_doc <+> text "instantiated with") 2
(vcat [ disp k <+> text "=" <+> disp v
| (k,v) <- insts ]) $$
for_doc
_ ->
msg_doc <+> for_doc
where
msg_doc = text msg <+> text (showComponentName cname)
for_doc = text "for" <+> disp pkgid <<>> text ".."
| null | https://raw.githubusercontent.com/typelead/etlas/bbd7c558169e1fda086e759e1a6f8c8ca2807583/etlas-cabal/Distribution/Backpack/DescribeUnitId.hs | haskell | # LANGUAGE Rank2Types #
Unit identifiers have a well defined, machine-readable format,
but this format isn't very user-friendly for users. This
module defines some functions for solving common rendering
problems one has for displaying these.
- Users don't want to see the raw component identifier, which
usually contains a wordy hash that doesn't matter.
- Users don't want to see a hash of the instantiation: they
want to see the actual instantiation, and they want it in
interpretable form.
| Print a Setup message stating (1) what operation we are doing,
the build in question.)
^ Operation being done (capitalized), on:
^ Package
^ Component name
^ Instantiation, if available.
Polymorphic to take
'OpenModule' or 'Module' | # LANGUAGE FlexibleContexts #
module Distribution.Backpack.DescribeUnitId where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Types.PackageId
import Distribution.Types.ComponentName
import Distribution.Compat.Stack
import Distribution.Verbosity
import Distribution.ModuleName
import Distribution.Text
import Distribution.Simple.Utils
import Text.PrettyPrint
There are three basic problems we tackle :
- Users do n't want to see pkg-0.5 - inplace - libname ,
they want to see " library ' ' from ' pkg-0.5 ' "
for ( 2 ) which component ( with enough details to uniquely identify
setupMessage' :: Text a => Verbosity
-> IO ()
setupMessage' verbosity msg pkgid cname mb_insts = withFrozenCallStack $ do
noticeDoc verbosity $
case mb_insts of
Just insts | not (null insts) ->
hang (msg_doc <+> text "instantiated with") 2
(vcat [ disp k <+> text "=" <+> disp v
| (k,v) <- insts ]) $$
for_doc
_ ->
msg_doc <+> for_doc
where
msg_doc = text msg <+> text (showComponentName cname)
for_doc = text "for" <+> disp pkgid <<>> text ".."
|
12d02cb7c0f02252bf7b206f5137e231575e6bb7c14382c926d3ec1d5e3e5597 | CicadaBank/bips | deploy.clj | ;; based on: -lib-deployer/blob/master/deploy-lein.bb
updated for clj - tools : g - krisztian
(def release
(edn/read-string (slurp "release.edn")))
(println release)
(def project
(format "%s/%s" (:group-id release) (:artifact-id release)))
(println "Project: " project)
(def version
(:version release))
(println "Version: " version)
(defn- can-deploy? []
(let [status (:status (curl/get (str "/" project
"/versions/" version)
{:throw false}))]
(= 404 status)))
(defn- tag-name [] (System/getenv "TAG"))
(defn- decode-base64 [string]
(-> java.util.Base64
.getDecoder
(.decode string)))
(defn run-shell-cmd [& args]
(let [{:keys [exit out err] :as result} (apply shell/sh args)]
(when-not (zero? exit)
(println "ERROR running command\nSTDOUT:")
(println out "\nSTDERR:")
(println err)
(throw (ex-info "Error while running shell command" {:status exit})))
result))
(defn- import-gpg! []
(let [secret (System/getenv "GPG_SECRET_KEYS")
ownertrust (System/getenv "GPG_OWNERTRUST")]
(when-not (and secret ownertrust) (throw (ex-info "Can't find GPG keys!" {})))
(run-shell-cmd "gpg" "--import" :in (decode-base64 secret))
(run-shell-cmd "gpg" "--import-ownertrust" :in (decode-base64 ownertrust))))
(defn deploy! []
(let [tag (not-empty (tag-name))]
(when-not (can-deploy?)
(throw (ex-info "Can't deploy this version - release version already exist on clojars"
{:version version})))
(when (some-> tag (str/replace-first #"v" "") (not= version))
(throw (ex-info "Tag version mismatches with release.edn"
{:tag-name tag
:version version})))
(when tag
(import-gpg!)
(println "Deploying a release version")
(run-shell-cmd "clojure" "-M:release" "--version" version)
(println "Deploy was successful"))))
(deploy!)
| null | https://raw.githubusercontent.com/CicadaBank/bips/0d68f8d441cf6deb18ff12bfc9fe41ce02a7793b/generators/deploy.clj | clojure | based on: -lib-deployer/blob/master/deploy-lein.bb | updated for clj - tools : g - krisztian
(def release
(edn/read-string (slurp "release.edn")))
(println release)
(def project
(format "%s/%s" (:group-id release) (:artifact-id release)))
(println "Project: " project)
(def version
(:version release))
(println "Version: " version)
(defn- can-deploy? []
(let [status (:status (curl/get (str "/" project
"/versions/" version)
{:throw false}))]
(= 404 status)))
(defn- tag-name [] (System/getenv "TAG"))
(defn- decode-base64 [string]
(-> java.util.Base64
.getDecoder
(.decode string)))
(defn run-shell-cmd [& args]
(let [{:keys [exit out err] :as result} (apply shell/sh args)]
(when-not (zero? exit)
(println "ERROR running command\nSTDOUT:")
(println out "\nSTDERR:")
(println err)
(throw (ex-info "Error while running shell command" {:status exit})))
result))
(defn- import-gpg! []
(let [secret (System/getenv "GPG_SECRET_KEYS")
ownertrust (System/getenv "GPG_OWNERTRUST")]
(when-not (and secret ownertrust) (throw (ex-info "Can't find GPG keys!" {})))
(run-shell-cmd "gpg" "--import" :in (decode-base64 secret))
(run-shell-cmd "gpg" "--import-ownertrust" :in (decode-base64 ownertrust))))
(defn deploy! []
(let [tag (not-empty (tag-name))]
(when-not (can-deploy?)
(throw (ex-info "Can't deploy this version - release version already exist on clojars"
{:version version})))
(when (some-> tag (str/replace-first #"v" "") (not= version))
(throw (ex-info "Tag version mismatches with release.edn"
{:tag-name tag
:version version})))
(when tag
(import-gpg!)
(println "Deploying a release version")
(run-shell-cmd "clojure" "-M:release" "--version" version)
(println "Deploy was successful"))))
(deploy!)
|
626d4965b5d34418187eca0e35e8c0f6598a1f6f6bdab4d1947e0be0a26fd46e | Z572/gwwm | monitor.scm | (define-module (gwwm monitor)
#:autoload (gwwm) (gwwm-output-layout)
#:use-module (srfi srfi-1)
#:use-module (ice-9 format)
#:use-module (ice-9 q)
#:use-module (oop goops)
#:use-module (util572 box)
#:use-module (wayland list)
#:use-module (wlroots types)
#:use-module (wlroots types output)
#:use-module (wlroots types output-layout)
#:use-module (bytestructure-class)
#:export (current-monitor
monitor-name
monitor-description
monitor-enabled?
monitor-scale
monitor-height
monitor-list
monitor-width
monitor-refresh
monitor-physical-width
monitor-physical-height
monitor-at
monitor-output
monitor-layouts
monitor-window-area
monitor-scene-output
monitor-area
monitor-sellt
monitor-nmaster
monitor-mfact
dirtomon
<gwwm-monitor>
%monitors
wlr-output->monitor))
(define-once wlr-output->monitor (make-object-property))
(define-once %monitors
(make-parameter
(make-q)
(lambda (o)
(if (q? o) o
(error "not a q! ~A" o)))))
(define (monitor-list)
"return all monitors."
(car (%monitors)))
(define-once %current-monitor #f)
(define (get-current-monitor)
%current-monitor)
(define (set-current-monitor m)
(set! %current-monitor m))
(define current-monitor (make-procedure-with-setter
get-current-monitor
set-current-monitor))
(define-inlinable (%monitor-output m)
(slot-ref m '%output))
(define-class <gwwm-monitor> ()
(%output #:accessor monitor-output
#:setter set-.wlr-output!
#:init-keyword #:wlr-output)
(name #:allocation #:virtual
#:slot-ref (lambda (m) (.name (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-name)
(area #:accessor monitor-area
#:setter set-.area!)
(window-area #:accessor monitor-window-area
#:setter set-.window-area!)
(description #:allocation #:virtual
#:slot-ref (lambda (m) (.description (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-description)
(enabled? #:allocation #:virtual
#:slot-ref (lambda (m) (.enabled (%monitor-output m)))
#:slot-set! (lambda (m b)
(wlr-output-enable
(%monitor-output m) b))
#:accessor monitor-enabled?)
(width #:allocation #:virtual
#:slot-ref (lambda (m) (.width (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-width)
(height #:allocation #:virtual
#:slot-ref (lambda (m) (.height (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-height)
(scale #:allocation #:virtual
#:slot-ref (lambda (m) (.scale (%monitor-output m)))
#:slot-set! (lambda (m o)
(wlr-output-set-scale (%monitor-output m) o))
#:accessor monitor-scale)
(refresh #:allocation #:virtual
#:slot-ref (lambda (m) (.refresh (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-refresh)
(physical-width #:allocation #:virtual
#:slot-ref (lambda (m)
(.phys-width (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-physical-width)
(physical-height #:allocation #:virtual
#:slot-ref (lambda (m)
(.phys-height (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-physical-height)
(scene-output #:accessor monitor-scene-output #:init-value #f )
(layouts #:init-value (list #f #f)
#:accessor monitor-layouts
#:setter set-.monitor-layouts
#:init-keyword #:layouts)
(sellt #:init-value 0
#:accessor monitor-sellt
#:setter set-.monitor-sellt!)
(layers #:init-thunk
(lambda ()
(list (make-q)
(make-q)
(make-q)
(make-q))))
(nmaster #:init-value 1 #:accessor monitor-nmaster)
(mfact #:init-value 1/2 #:accessor monitor-mfact)
(seltags #:init-value 0)
(tagset #:init-thunk (lambda () (list 1 1)))
#:metaclass <redefinable-class>)
(define-method (write (o <gwwm-monitor>) port)
(format port "#<~a ~a ~x (~a . ~a) scale: ~a>"
(class-name (class-of o))
(monitor-name o)
(object-address o)
(monitor-width o)
(monitor-height o)
(monitor-scale o)))
(define (monitor-at x y)
(and=> (wlr-output-layout-output-at
(gwwm-output-layout) x y)
wlr-output->monitor))
(define (dirtomon dir)
(define p wlr-output->monitor)
(let* ((m (current-monitor))
(area (monitor-area m)))
(or
(and=> (wlr-output-layout-adjacent-output
(gwwm-output-layout)
(bs:enum->integer %wlr-direction-enum dir)
(monitor-output m)
(box-x area)
(box-y area))
p)
(and=> (wlr-output-layout-farthest-output
(gwwm-output-layout)
(logxor (bs:enum->integer %wlr-direction-enum
dir) 12 ;; dir ^ (WLR_DIRECTION_LEFT|WLR_DIRECTION_RIGHT)
)
(monitor-output m)
(box-x area)
(box-y area))
p)
m)))
| null | https://raw.githubusercontent.com/Z572/gwwm/6267afa8efcd4174d7ae12cc3fe3a6d3842bb628/gwwm/monitor.scm | scheme | dir ^ (WLR_DIRECTION_LEFT|WLR_DIRECTION_RIGHT) | (define-module (gwwm monitor)
#:autoload (gwwm) (gwwm-output-layout)
#:use-module (srfi srfi-1)
#:use-module (ice-9 format)
#:use-module (ice-9 q)
#:use-module (oop goops)
#:use-module (util572 box)
#:use-module (wayland list)
#:use-module (wlroots types)
#:use-module (wlroots types output)
#:use-module (wlroots types output-layout)
#:use-module (bytestructure-class)
#:export (current-monitor
monitor-name
monitor-description
monitor-enabled?
monitor-scale
monitor-height
monitor-list
monitor-width
monitor-refresh
monitor-physical-width
monitor-physical-height
monitor-at
monitor-output
monitor-layouts
monitor-window-area
monitor-scene-output
monitor-area
monitor-sellt
monitor-nmaster
monitor-mfact
dirtomon
<gwwm-monitor>
%monitors
wlr-output->monitor))
(define-once wlr-output->monitor (make-object-property))
(define-once %monitors
(make-parameter
(make-q)
(lambda (o)
(if (q? o) o
(error "not a q! ~A" o)))))
(define (monitor-list)
"return all monitors."
(car (%monitors)))
(define-once %current-monitor #f)
(define (get-current-monitor)
%current-monitor)
(define (set-current-monitor m)
(set! %current-monitor m))
(define current-monitor (make-procedure-with-setter
get-current-monitor
set-current-monitor))
(define-inlinable (%monitor-output m)
(slot-ref m '%output))
(define-class <gwwm-monitor> ()
(%output #:accessor monitor-output
#:setter set-.wlr-output!
#:init-keyword #:wlr-output)
(name #:allocation #:virtual
#:slot-ref (lambda (m) (.name (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-name)
(area #:accessor monitor-area
#:setter set-.area!)
(window-area #:accessor monitor-window-area
#:setter set-.window-area!)
(description #:allocation #:virtual
#:slot-ref (lambda (m) (.description (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-description)
(enabled? #:allocation #:virtual
#:slot-ref (lambda (m) (.enabled (%monitor-output m)))
#:slot-set! (lambda (m b)
(wlr-output-enable
(%monitor-output m) b))
#:accessor monitor-enabled?)
(width #:allocation #:virtual
#:slot-ref (lambda (m) (.width (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-width)
(height #:allocation #:virtual
#:slot-ref (lambda (m) (.height (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-height)
(scale #:allocation #:virtual
#:slot-ref (lambda (m) (.scale (%monitor-output m)))
#:slot-set! (lambda (m o)
(wlr-output-set-scale (%monitor-output m) o))
#:accessor monitor-scale)
(refresh #:allocation #:virtual
#:slot-ref (lambda (m) (.refresh (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-refresh)
(physical-width #:allocation #:virtual
#:slot-ref (lambda (m)
(.phys-width (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-physical-width)
(physical-height #:allocation #:virtual
#:slot-ref (lambda (m)
(.phys-height (%monitor-output m)))
#:slot-set! (lambda _ #f)
#:getter monitor-physical-height)
(scene-output #:accessor monitor-scene-output #:init-value #f )
(layouts #:init-value (list #f #f)
#:accessor monitor-layouts
#:setter set-.monitor-layouts
#:init-keyword #:layouts)
(sellt #:init-value 0
#:accessor monitor-sellt
#:setter set-.monitor-sellt!)
(layers #:init-thunk
(lambda ()
(list (make-q)
(make-q)
(make-q)
(make-q))))
(nmaster #:init-value 1 #:accessor monitor-nmaster)
(mfact #:init-value 1/2 #:accessor monitor-mfact)
(seltags #:init-value 0)
(tagset #:init-thunk (lambda () (list 1 1)))
#:metaclass <redefinable-class>)
(define-method (write (o <gwwm-monitor>) port)
(format port "#<~a ~a ~x (~a . ~a) scale: ~a>"
(class-name (class-of o))
(monitor-name o)
(object-address o)
(monitor-width o)
(monitor-height o)
(monitor-scale o)))
(define (monitor-at x y)
(and=> (wlr-output-layout-output-at
(gwwm-output-layout) x y)
wlr-output->monitor))
(define (dirtomon dir)
(define p wlr-output->monitor)
(let* ((m (current-monitor))
(area (monitor-area m)))
(or
(and=> (wlr-output-layout-adjacent-output
(gwwm-output-layout)
(bs:enum->integer %wlr-direction-enum dir)
(monitor-output m)
(box-x area)
(box-y area))
p)
(and=> (wlr-output-layout-farthest-output
(gwwm-output-layout)
(logxor (bs:enum->integer %wlr-direction-enum
)
(monitor-output m)
(box-x area)
(box-y area))
p)
m)))
|
1e251f0169acc92e40930052c3e9ad8e0b0a50632061d90dea538c74a630da22 | McCLIM/McCLIM | text-editor-pane.lisp | (cl:defpackage #:mcclim.text-editor-pane-test
(:use
#:cl
#:clim)
(:shadowing-import-from #:clim
#:interactive-stream-p))
(cl:in-package #:mcclim.text-editor-pane-test)
(defclass text-editor-with-syntax-pane (text-editor-pane)
())
(defmethod initialize-instance :after ((instance text-editor-with-syntax-pane) &key)
(let* ((view (view (climi::substrate instance)))
(buffer (drei:buffer view)))
(setf (drei:syntax view)
(make-instance 'drei-lisp-syntax:lisp-syntax :buffer buffer))))
(define-application-frame text-editor-pane-test ()
()
(:panes
(editor text-editor-with-syntax-pane :nlines 20)
(drei :drei :syntax (make-instance (drei-syntax:syntax-from-name :lisp)
:buffer (make-instance 'drei-buffer:standard-buffer))
:scroll-bars t)
)
(:layouts
(default
(vertically ()
editor
drei
))))
(let ((frame (make-application-frame 'text-editor-pane-test)))
(run-frame-top-level frame))
| null | https://raw.githubusercontent.com/McCLIM/McCLIM/7c890f1ac79f0c6f36866c47af89398e2f05b343/Tests/text-editor-pane.lisp | lisp | (cl:defpackage #:mcclim.text-editor-pane-test
(:use
#:cl
#:clim)
(:shadowing-import-from #:clim
#:interactive-stream-p))
(cl:in-package #:mcclim.text-editor-pane-test)
(defclass text-editor-with-syntax-pane (text-editor-pane)
())
(defmethod initialize-instance :after ((instance text-editor-with-syntax-pane) &key)
(let* ((view (view (climi::substrate instance)))
(buffer (drei:buffer view)))
(setf (drei:syntax view)
(make-instance 'drei-lisp-syntax:lisp-syntax :buffer buffer))))
(define-application-frame text-editor-pane-test ()
()
(:panes
(editor text-editor-with-syntax-pane :nlines 20)
(drei :drei :syntax (make-instance (drei-syntax:syntax-from-name :lisp)
:buffer (make-instance 'drei-buffer:standard-buffer))
:scroll-bars t)
)
(:layouts
(default
(vertically ()
editor
drei
))))
(let ((frame (make-application-frame 'text-editor-pane-test)))
(run-frame-top-level frame))
|
|
2388165cc9334f9b7af76a890be53b4ce7f67696cb94b988c95dccc34152ace7 | IG-Group/Havoc | core.clj | (ns fake.core
(:require
[compojure.core :refer [defroutes ANY GET]]
[ring.adapter.jetty :as jetty]
[ig.havoc.evil-http-server-mw :as evil]
[clojure.tools.nrepl.server :refer (start-server)]
[franzy.serialization.serializers :as serializers]
[franzy.clients.producer.client :as client]
[franzy.clients.producer.protocols :as producer]
[clojure.tools.logging :as log])
(:use ring.middleware.params)
(:gen-class))
(defn for-ever
[thunk]
(loop []
(if-let [result (try
[(thunk)]
(catch Exception e
(println e)
(Thread/sleep 100)))]
(result 0)
(recur))))
(def kafka-client (delay
(client/make-producer {:bootstrap.servers "kafka1:9092,kafka2:9092,kafka3:9092"
:acks "all"
:retries 1
:client.id "example-producer"}
(serializers/keyword-serializer)
(serializers/edn-serializer))))
(defn produce-edn [content]
(for-ever
#(producer/send-sync! @kafka-client {:topic "THE.TEST"
:value content})))
(def received (atom #{}))
(def sending (atom false))
(defn produce [from to]
(log/debug "producing" from)
(when-not (zero? (- to from))
(produce-edn {:id from
:ts (System/currentTimeMillis)})
(recur (inc from) to)))
(comment
(produce 10 12))
(defroutes api
(ANY "/" {:keys [body]}
(let [msg (read-string (slurp body))]
(swap! received conj (:id msg)))
{:status 200
:body "received!"})
(GET "/msgs-ids" []
{:status 200
:body (pr-str {:done (not @sending)
:ids @received})})
(GET "/send-msgs" [from to]
(assert (and from to) "from or to missing")
(reset! received #{})
(reset! sending true)
(future
(produce (Integer/parseInt from)
(Integer/parseInt to))
(reset! sending false))
{:status 204}))
(defn -main [& args]
(start-server :port 3002 :bind "0.0.0.0")
(jetty/run-jetty
(evil/create-ring-mw
(wrap-params (var api)))
{:port 80
:join? true}))
| null | https://raw.githubusercontent.com/IG-Group/Havoc/24c9d8409b273d791370593d131525f04ba2c9a1/example/fake/src/fake/core.clj | clojure | (ns fake.core
(:require
[compojure.core :refer [defroutes ANY GET]]
[ring.adapter.jetty :as jetty]
[ig.havoc.evil-http-server-mw :as evil]
[clojure.tools.nrepl.server :refer (start-server)]
[franzy.serialization.serializers :as serializers]
[franzy.clients.producer.client :as client]
[franzy.clients.producer.protocols :as producer]
[clojure.tools.logging :as log])
(:use ring.middleware.params)
(:gen-class))
(defn for-ever
[thunk]
(loop []
(if-let [result (try
[(thunk)]
(catch Exception e
(println e)
(Thread/sleep 100)))]
(result 0)
(recur))))
(def kafka-client (delay
(client/make-producer {:bootstrap.servers "kafka1:9092,kafka2:9092,kafka3:9092"
:acks "all"
:retries 1
:client.id "example-producer"}
(serializers/keyword-serializer)
(serializers/edn-serializer))))
(defn produce-edn [content]
(for-ever
#(producer/send-sync! @kafka-client {:topic "THE.TEST"
:value content})))
(def received (atom #{}))
(def sending (atom false))
(defn produce [from to]
(log/debug "producing" from)
(when-not (zero? (- to from))
(produce-edn {:id from
:ts (System/currentTimeMillis)})
(recur (inc from) to)))
(comment
(produce 10 12))
(defroutes api
(ANY "/" {:keys [body]}
(let [msg (read-string (slurp body))]
(swap! received conj (:id msg)))
{:status 200
:body "received!"})
(GET "/msgs-ids" []
{:status 200
:body (pr-str {:done (not @sending)
:ids @received})})
(GET "/send-msgs" [from to]
(assert (and from to) "from or to missing")
(reset! received #{})
(reset! sending true)
(future
(produce (Integer/parseInt from)
(Integer/parseInt to))
(reset! sending false))
{:status 204}))
(defn -main [& args]
(start-server :port 3002 :bind "0.0.0.0")
(jetty/run-jetty
(evil/create-ring-mw
(wrap-params (var api)))
{:port 80
:join? true}))
|
|
c3fe86e1d347c93fa47f70ce086b164c4af144a9cf897beb5bc098781e1e1055 | lopec/LoPEC | taskFetcher.erl | %%%-------------------------------------------------------------------
@author < >
@author < >
( C ) 2009 , Bjorn Dahlman &
%%% @doc
%%% The taskFetcher is responsible for fetching and adding tasks.
%%% @end
Created : 29 Sep 2009 by < >
%%%-------------------------------------------------------------------
-module(taskFetcher).
-behaviour(gen_server).
%% API
-export([start_link/0,
task_done/1,
error/1,
new_task/5]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-define(DYNSUP, dynamicSupervisor).
-define(WORKER, computingProcess).
-define(TASK_FETCH_INTERVAL, 1000).
-record(state, {work_state = no_task, timer}).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
( ) - > { ok , Pid } | ignore | { error , Error }
%% @end
%%--------------------------------------------------------------------
start_link() ->
chronicler:info("~w : module started", [?MODULE]),
gen_server:start_link({global, node()}, ?MODULE, no_args, []).
%%--------------------------------------------------------------------
%% @doc
%% Queries the dispatcher to create a new task.
%%
@spec new_task(JobId , ProgName , Type , Bucket , Key ) - > Task | { error , Error }
%% @end
%%--------------------------------------------------------------------
new_task(JobId, ProgName, Type, Bucket, Key) ->
chronicler:info("~w : called new_task of type ~w", [?MODULE, Type]),
gen_server:call({global, node()},
{request, new_task, JobId, ProgName, Type, Bucket, Key}).
task_done(Data) ->
gen_server:cast({global, node()}, Data).
error(Data) ->
gen_server:cast({global, node()}, Data).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Initiates the server
%%
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init(no_args) ->
{ok, TimerRef} = timer:send_interval(?TASK_FETCH_INTERVAL, poll),
{Upload, Download} = netMonitor:get_net_stats(),
NetStats = {Upload, Download},
{ok, {#state{timer = TimerRef}, NetStats}}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling call messages
%%
, From , State ) - >
%% {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call({request, task}, _From, State) ->
request_task(),
{reply, State, State};
handle_call({request, new_task, JobId, ProgName, Type, Bucket, Key},
_From, State) ->
Reply = create_task(JobId, ProgName, Type, Bucket, Key),
{reply, Reply, State};
%%--------------------------------------------------------------------
@private
%% @doc
%% Logs and discards unexpected messages.
%%
, From , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_call(Msg, From, State) ->
chronicler:debug("~w : Received unexpected handle_call call.~n"
"Message: ~p~n"
"From: ~p~n",
[?MODULE, Msg, From]),
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling cast messages
%%
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast({_Pid, done, {JobId, TaskId, Time, TaskType, _Progname}},
{_LolTimer, {OldUp, OldDown}}) ->
%% Report to statistician
Diff = timer:now_diff(now(), Time) / 1000000,
Power = power_check:get_watt_per_task(Diff),
{NewUp, NewDown} = netMonitor:get_net_stats(),
Disk = statistician:get_node_disk_usage(raw),
Mem = statistician:get_node_mem_usage(raw),
statistician:update({{node(), JobId, TaskType, no_user},
Power, Diff, NewUp - OldUp, NewDown - OldDown, 1, 0,
Disk, Mem}),
%% Kill and remove computingProcess spec from dynamic supervisor
supervisor:terminate_child(?DYNSUP, ?WORKER),
supervisor:delete_child(?DYNSUP, ?WORKER),
%% Report to master that task is done
dispatcher:report_task_done(TaskId),
Reinstate poll timer and request task
{ok, Timer} = timer:send_interval(?TASK_FETCH_INTERVAL, poll),
request_task(),
{noreply, {#state{work_state = no_task, timer = Timer}, {NewUp, NewDown}}};
%%--------------------------------------------------------------------
@private
%% @doc
%% Handles errournous exit of user application.
%% Will tell the user through a user_info message.
%%
handle_cast({Pid , error , CallState } , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_cast({_Pid, error, {JobId, _TaskId, Time, TaskType, _Progname}},
{_Timer, {OldUp, OldDown}}) ->
%% Report to statistician
Diff = timer:now_diff(now(), Time) / 1000000,
Power = power_check:get_watt_per_task(Diff),
{NewUp, NewDown} = netMonitor:get_net_stats(),
Disk = statistician:get_node_disk_usage(raw),
Mem = statistician:get_node_mem_usage(raw),
statistician:update({{node(), JobId, TaskType, no_user},
Power, Diff, NewUp - OldUp, NewDown - OldDown, 0, 1,
Disk, Mem}),
%% Free task that has been given to node
dispatcher:task_failed(JobId, TaskType),
%% Kill and remove computingProcess spec from dynamic supervisor
supervisor:terminate_child(?DYNSUP, ?WORKER),
supervisor:delete_child(?DYNSUP, ?WORKER),
Reinstate poll timer and request task
{ok, Timer} = timer:send_interval(?TASK_FETCH_INTERVAL, poll),
request_task(),
{noreply, {#state{work_state = no_task, timer = Timer}, {NewUp, NewDown}}};
%%--------------------------------------------------------------------
@private
%% @doc
%% Logs and discards unexpected messages.
%%
@spec handle_cast(Msg , State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_cast(Msg, State) ->
chronicler:debug("~w : Received unexpected handle_cast call.~n"
"Message: ~p~n",
[?MODULE, Msg]),
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling all non call/cast messages
%%
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info(poll, {State = #state{work_state = no_task}, NetLoad}) ->
request_task(),
{noreply, {State, NetLoad}};
handle_info({task_response, no_task}, {State, NetLoad}) ->
{noreply, {State, NetLoad}};
handle_info({task_response, Task}, {State, NetLoad}) ->
start_task(Task),
timer:cancel(State#state.timer),
{noreply, {State#state{work_state = task}, NetLoad}};
handle_info(stop_job, {State, NetLoad}) ->
chronicler:debug("~w : Stopping job~n", [?MODULE]),
computingProcess:stop_job(),
computingProcess:stop(),
supervisor:terminate_child(?DYNSUP, ?WORKER),
supervisor:delete_child(?DYNSUP, ?WORKER),
{NewUp, NewDown} = netMonitor:get_net_stats(),
{ok, Timer} = timer:send_interval(?TASK_FETCH_INTERVAL, poll),
{noreply, {#state{work_state = no_task, timer = Timer}, {NewUp, NewDown}}};
%%--------------------------------------------------------------------
@private
%% @doc
%% Logs and discards unexpected messages.
%%
, State ) - > { noreply , State }
%% @end
%%--------------------------------------------------------------------
handle_info(Info, State) ->
chronicler:debug("~w : Received unexpected handle_info call.~n"
"Info: ~p~n",
[?MODULE, Info]),
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%% Logs and discards unexpected messages.
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
terminate(Reason, _State) ->
chronicler:info("~w : Received terminate call.~n"
"Reason: ~p~n",
[?MODULE, Reason]),
ok.
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%% Logs and discards unexpected messages.
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
code_change(OldVsn, State, Extra) ->
chronicler:debug("~w : Received unexpected code_change call.~n"
"Old version: ~p~n"
"Extra: ~p~n",
[?MODULE, OldVsn, Extra]),
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Polls the dispatcher for a free task.
%%
( ) - > { Task , NewState }
%% @end
%%--------------------------------------------------------------------
request_task() ->
dispatcher:fetch_task(node(), self()).
%%--------------------------------------------------------------------
%% @doc
%% Tells the dispatcher to create a new task
%%
create_task(JobId , ProgName , Type , , DataKey ) - > { Task , NewState }
%% @end
%%--------------------------------------------------------------------
create_task(JobId, ProgName, Type, Bucket, Key) ->
StorageKey = {Bucket, Key},
dispatcher:add_task({JobId, ProgName, Type, StorageKey}).
%%--------------------------------------------------------------------
%% @doc
%% Starts an already fetched task on the node.
%%
@spec start_task({TaskId , JobId , ProgName , TaskType , StorageKeys } ) - >
ChildSpec
%% @end
%%--------------------------------------------------------------------
start_task({TaskId, JobId, ProgName, TaskType, StorageKeys}) ->
ChildSpec = {?WORKER,
{?WORKER,
start_link,
[ProgName, TaskType, JobId, StorageKeys, TaskId]},
temporary,
1,
worker,
[?WORKER]},
supervisor:start_child(?DYNSUP, ChildSpec),
ChildSpec.
| null | https://raw.githubusercontent.com/lopec/LoPEC/29a3989c48a60e5990615dea17bad9d24d770f7b/trunk/lib/slave/src/taskFetcher.erl | erlang | -------------------------------------------------------------------
@doc
The taskFetcher is responsible for fetching and adding tasks.
@end
-------------------------------------------------------------------
API
gen_server callbacks
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Starts the server
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Queries the dispatcher to create a new task.
@end
--------------------------------------------------------------------
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
Initiates the server
ignore |
{stop, Reason}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling call messages
{reply, Reply, State} |
{stop, Reason, Reply, State} |
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
Report to statistician
Kill and remove computingProcess spec from dynamic supervisor
Report to master that task is done
--------------------------------------------------------------------
@doc
Handles errournous exit of user application.
Will tell the user through a user_info message.
@end
--------------------------------------------------------------------
Report to statistician
Free task that has been given to node
Kill and remove computingProcess spec from dynamic supervisor
--------------------------------------------------------------------
@doc
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling all non call/cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
Logs and discards unexpected messages.
@end
--------------------------------------------------------------------
===================================================================
===================================================================
--------------------------------------------------------------------
@doc
Polls the dispatcher for a free task.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Tells the dispatcher to create a new task
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Starts an already fetched task on the node.
@end
-------------------------------------------------------------------- | @author < >
@author < >
( C ) 2009 , Bjorn Dahlman &
Created : 29 Sep 2009 by < >
-module(taskFetcher).
-behaviour(gen_server).
-export([start_link/0,
task_done/1,
error/1,
new_task/5]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-define(DYNSUP, dynamicSupervisor).
-define(WORKER, computingProcess).
-define(TASK_FETCH_INTERVAL, 1000).
-record(state, {work_state = no_task, timer}).
( ) - > { ok , Pid } | ignore | { error , Error }
start_link() ->
chronicler:info("~w : module started", [?MODULE]),
gen_server:start_link({global, node()}, ?MODULE, no_args, []).
@spec new_task(JobId , ProgName , Type , Bucket , Key ) - > Task | { error , Error }
new_task(JobId, ProgName, Type, Bucket, Key) ->
chronicler:info("~w : called new_task of type ~w", [?MODULE, Type]),
gen_server:call({global, node()},
{request, new_task, JobId, ProgName, Type, Bucket, Key}).
task_done(Data) ->
gen_server:cast({global, node()}, Data).
error(Data) ->
gen_server:cast({global, node()}, Data).
@private
) - > { ok , State } |
{ ok , State , Timeout } |
init(no_args) ->
{ok, TimerRef} = timer:send_interval(?TASK_FETCH_INTERVAL, poll),
{Upload, Download} = netMonitor:get_net_stats(),
NetStats = {Upload, Download},
{ok, {#state{timer = TimerRef}, NetStats}}.
@private
, From , State ) - >
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call({request, task}, _From, State) ->
request_task(),
{reply, State, State};
handle_call({request, new_task, JobId, ProgName, Type, Bucket, Key},
_From, State) ->
Reply = create_task(JobId, ProgName, Type, Bucket, Key),
{reply, Reply, State};
@private
, From , State ) - > { noreply , State }
handle_call(Msg, From, State) ->
chronicler:debug("~w : Received unexpected handle_call call.~n"
"Message: ~p~n"
"From: ~p~n",
[?MODULE, Msg, From]),
{noreply, State}.
@private
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast({_Pid, done, {JobId, TaskId, Time, TaskType, _Progname}},
{_LolTimer, {OldUp, OldDown}}) ->
Diff = timer:now_diff(now(), Time) / 1000000,
Power = power_check:get_watt_per_task(Diff),
{NewUp, NewDown} = netMonitor:get_net_stats(),
Disk = statistician:get_node_disk_usage(raw),
Mem = statistician:get_node_mem_usage(raw),
statistician:update({{node(), JobId, TaskType, no_user},
Power, Diff, NewUp - OldUp, NewDown - OldDown, 1, 0,
Disk, Mem}),
supervisor:terminate_child(?DYNSUP, ?WORKER),
supervisor:delete_child(?DYNSUP, ?WORKER),
dispatcher:report_task_done(TaskId),
Reinstate poll timer and request task
{ok, Timer} = timer:send_interval(?TASK_FETCH_INTERVAL, poll),
request_task(),
{noreply, {#state{work_state = no_task, timer = Timer}, {NewUp, NewDown}}};
@private
handle_cast({Pid , error , CallState } , State ) - > { noreply , State }
handle_cast({_Pid, error, {JobId, _TaskId, Time, TaskType, _Progname}},
{_Timer, {OldUp, OldDown}}) ->
Diff = timer:now_diff(now(), Time) / 1000000,
Power = power_check:get_watt_per_task(Diff),
{NewUp, NewDown} = netMonitor:get_net_stats(),
Disk = statistician:get_node_disk_usage(raw),
Mem = statistician:get_node_mem_usage(raw),
statistician:update({{node(), JobId, TaskType, no_user},
Power, Diff, NewUp - OldUp, NewDown - OldDown, 0, 1,
Disk, Mem}),
dispatcher:task_failed(JobId, TaskType),
supervisor:terminate_child(?DYNSUP, ?WORKER),
supervisor:delete_child(?DYNSUP, ?WORKER),
Reinstate poll timer and request task
{ok, Timer} = timer:send_interval(?TASK_FETCH_INTERVAL, poll),
request_task(),
{noreply, {#state{work_state = no_task, timer = Timer}, {NewUp, NewDown}}};
@private
@spec handle_cast(Msg , State ) - > { noreply , State }
handle_cast(Msg, State) ->
chronicler:debug("~w : Received unexpected handle_cast call.~n"
"Message: ~p~n",
[?MODULE, Msg]),
{noreply, State}.
@private
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info(poll, {State = #state{work_state = no_task}, NetLoad}) ->
request_task(),
{noreply, {State, NetLoad}};
handle_info({task_response, no_task}, {State, NetLoad}) ->
{noreply, {State, NetLoad}};
handle_info({task_response, Task}, {State, NetLoad}) ->
start_task(Task),
timer:cancel(State#state.timer),
{noreply, {State#state{work_state = task}, NetLoad}};
handle_info(stop_job, {State, NetLoad}) ->
chronicler:debug("~w : Stopping job~n", [?MODULE]),
computingProcess:stop_job(),
computingProcess:stop(),
supervisor:terminate_child(?DYNSUP, ?WORKER),
supervisor:delete_child(?DYNSUP, ?WORKER),
{NewUp, NewDown} = netMonitor:get_net_stats(),
{ok, Timer} = timer:send_interval(?TASK_FETCH_INTERVAL, poll),
{noreply, {#state{work_state = no_task, timer = Timer}, {NewUp, NewDown}}};
@private
, State ) - > { noreply , State }
handle_info(Info, State) ->
chronicler:debug("~w : Received unexpected handle_info call.~n"
"Info: ~p~n",
[?MODULE, Info]),
{noreply, State}.
@private
with . The return value is ignored .
, State ) - > void ( )
terminate(Reason, _State) ->
chronicler:info("~w : Received terminate call.~n"
"Reason: ~p~n",
[?MODULE, Reason]),
ok.
@private
, State , Extra ) - > { ok , NewState }
code_change(OldVsn, State, Extra) ->
chronicler:debug("~w : Received unexpected code_change call.~n"
"Old version: ~p~n"
"Extra: ~p~n",
[?MODULE, OldVsn, Extra]),
{ok, State}.
Internal functions
( ) - > { Task , NewState }
request_task() ->
dispatcher:fetch_task(node(), self()).
create_task(JobId , ProgName , Type , , DataKey ) - > { Task , NewState }
create_task(JobId, ProgName, Type, Bucket, Key) ->
StorageKey = {Bucket, Key},
dispatcher:add_task({JobId, ProgName, Type, StorageKey}).
@spec start_task({TaskId , JobId , ProgName , TaskType , StorageKeys } ) - >
ChildSpec
start_task({TaskId, JobId, ProgName, TaskType, StorageKeys}) ->
ChildSpec = {?WORKER,
{?WORKER,
start_link,
[ProgName, TaskType, JobId, StorageKeys, TaskId]},
temporary,
1,
worker,
[?WORKER]},
supervisor:start_child(?DYNSUP, ChildSpec),
ChildSpec.
|
46db405fbce84ce36363908c632f945e3fdf5385f4647858b81dccce30603c8a | taruen/apertiumpp | get-bible.rkt | #lang racket
A script to download Bible translations from bible.com
(require net/url
html-parsing
sxml/sxpath)
(module+ test
(require rackunit))
(define ROOT "")
(define LANGS-PAGE (string-append ROOT "/languages"))
(define REQUEST-HEADERS
'("User-agent: Mozilla/5.0 (compatible; Taruenbot/0.1; +/)"))
(define SLEEP 0) ;; second(s) between requests
(struct verse (id content) #:transparent)
a Verse is ( verse String String )
interp . a single verse from the Bible
(define V-0
(verse "GEN.1.1" "In the beginning God created the heaven and the earth."))
( listof String ) - > Void
given language codes , fetch translations from the LANGS - PAGE/[lang ] and
store each translation in the [ lang ] directory , one verse per line
(define (main langs)
(for ([lang langs])
(make-directory lang)
(for ([version (versions lang)])
(define outf (string-append lang "/" (id version) ".csv"))
(dump-to-file (verses version) outf))))
String - > ( )
;; given LANGS-PAGE, return language codes for which
a Bible translation is available
(define (langs langs-page)
(map (λ (s) (string-replace s "/languages/" ""))
(filter (λ (url) (string-contains? url "/languages/"))
(urls (fetch langs-page)))))
String - > ( )
given a lang code , return urls of available Bible versions in that lang
(define (versions lang)
(map (λ (s) (string-append ROOT s))
(filter (λ (url) (string-contains? url "/versions/"))
(urls (fetch (string-append LANGS-PAGE "/" lang))))))
;; String -> (listof Verse)
;; given a 'versions' page url, e.g.
;; -kjv-king-james-version
;; return a list of verses from that translation
(define (verses version)
(define (recur page accum)
(begin
(displayln ((sxpath '(html head title)) page))
(sleep SLEEP)
(define nextpage (next page))
(if nextpage
(recur (fetch nextpage) (append accum (scrap page)))
(append accum (scrap page)))))
(recur (page1 version) '()))
String - > SXML
;; given a 'versions' page url, e.g.
;; -kjv-king-james-version
return the first page of that partucular Bible translation
(define (page1 version)
(fetch
(string-append
ROOT
(first ((sxpath '(@ href *text*))
((sxpath "//a[contains(text(), 'Read Version:')]")
(fetch version)))))))
SXML - > ( or / c String # f )
given a page of Bible translation , return the url of the next page
(define (next page)
(define n
((sxpath '(@ href *text*))
((sxpath "//a[@data-vars-event-action='Next']") page)))
(if (empty? n)
#f
(string-append ROOT (first n))))
SXML - > ( listof Verse )
given a page of Bible translation , return verses from it
(define (scrap page)
(filter
has-content?
(map
span->verse
((sxpath "//span[contains(@class, 'verse')]") page))))
SXML - > Verse
;; given a <span class="verse"> element, return a verse struct
(define (span->verse span)
(define id (first ((sxpath '(@ data-usfm *text*)) span)))
(define con
((sxpath "span[contains(@class, 'content')]/text()") span))
(if (empty? con)
(verse id "")
(verse id (first con))))
;; Verse -> Boolean
;; return #t if verse has real content
(define (has-content? v)
(not (regexp-match #rx"^ *$" (verse-content v))))
;; utilities
String - > SXML
return contents of a webpage as sxml
(define (fetch url)
(html->xexp
(port->string (get-pure-port (string->url url) REQUEST-HEADERS))))
SXML - > ( )
;; return all links from a page
(define (urls page)
((sxpath '(// @ href *text*)) page))
;; String -> String
;; remove base url, leaving only version id
(define (id version)
(string-replace version (string-append ROOT "/versions/") ""))
(module+ test
(check-equal? (id "-bsknt14-inzhil")
"1929-bsknt14-inzhil"))
;; (listof Verse) -> Void
;; output verses to a file
(define (dump-to-file verses outf)
(call-with-output-file outf
(λ (out)
(for ([v verses])
(display
(string-append (verse-id v) "\t" (verse-content v) "\n")
out)))))
(module+ main
(main '("fra" "uzn" "kaa" "eng" "lin" "cat" "ita" "por" "por_pt"
"tur" "uig_cyr" "kaa" "tat"))) | null | https://raw.githubusercontent.com/taruen/apertiumpp/73eeacc19015170e54c77824e015224f6456cf3e/apertiumpp/cookbook/get-bible.rkt | racket | second(s) between requests
given LANGS-PAGE, return language codes for which
String -> (listof Verse)
given a 'versions' page url, e.g.
-kjv-king-james-version
return a list of verses from that translation
given a 'versions' page url, e.g.
-kjv-king-james-version
given a <span class="verse"> element, return a verse struct
Verse -> Boolean
return #t if verse has real content
utilities
return all links from a page
String -> String
remove base url, leaving only version id
(listof Verse) -> Void
output verses to a file | #lang racket
A script to download Bible translations from bible.com
(require net/url
html-parsing
sxml/sxpath)
(module+ test
(require rackunit))
(define ROOT "")
(define LANGS-PAGE (string-append ROOT "/languages"))
(define REQUEST-HEADERS
'("User-agent: Mozilla/5.0 (compatible; Taruenbot/0.1; +/)"))
(struct verse (id content) #:transparent)
a Verse is ( verse String String )
interp . a single verse from the Bible
(define V-0
(verse "GEN.1.1" "In the beginning God created the heaven and the earth."))
( listof String ) - > Void
given language codes , fetch translations from the LANGS - PAGE/[lang ] and
store each translation in the [ lang ] directory , one verse per line
(define (main langs)
(for ([lang langs])
(make-directory lang)
(for ([version (versions lang)])
(define outf (string-append lang "/" (id version) ".csv"))
(dump-to-file (verses version) outf))))
String - > ( )
a Bible translation is available
(define (langs langs-page)
(map (λ (s) (string-replace s "/languages/" ""))
(filter (λ (url) (string-contains? url "/languages/"))
(urls (fetch langs-page)))))
String - > ( )
given a lang code , return urls of available Bible versions in that lang
(define (versions lang)
(map (λ (s) (string-append ROOT s))
(filter (λ (url) (string-contains? url "/versions/"))
(urls (fetch (string-append LANGS-PAGE "/" lang))))))
(define (verses version)
(define (recur page accum)
(begin
(displayln ((sxpath '(html head title)) page))
(sleep SLEEP)
(define nextpage (next page))
(if nextpage
(recur (fetch nextpage) (append accum (scrap page)))
(append accum (scrap page)))))
(recur (page1 version) '()))
String - > SXML
return the first page of that partucular Bible translation
(define (page1 version)
(fetch
(string-append
ROOT
(first ((sxpath '(@ href *text*))
((sxpath "//a[contains(text(), 'Read Version:')]")
(fetch version)))))))
SXML - > ( or / c String # f )
given a page of Bible translation , return the url of the next page
(define (next page)
(define n
((sxpath '(@ href *text*))
((sxpath "//a[@data-vars-event-action='Next']") page)))
(if (empty? n)
#f
(string-append ROOT (first n))))
SXML - > ( listof Verse )
given a page of Bible translation , return verses from it
(define (scrap page)
(filter
has-content?
(map
span->verse
((sxpath "//span[contains(@class, 'verse')]") page))))
SXML - > Verse
(define (span->verse span)
(define id (first ((sxpath '(@ data-usfm *text*)) span)))
(define con
((sxpath "span[contains(@class, 'content')]/text()") span))
(if (empty? con)
(verse id "")
(verse id (first con))))
(define (has-content? v)
(not (regexp-match #rx"^ *$" (verse-content v))))
String - > SXML
return contents of a webpage as sxml
(define (fetch url)
(html->xexp
(port->string (get-pure-port (string->url url) REQUEST-HEADERS))))
SXML - > ( )
(define (urls page)
((sxpath '(// @ href *text*)) page))
(define (id version)
(string-replace version (string-append ROOT "/versions/") ""))
(module+ test
(check-equal? (id "-bsknt14-inzhil")
"1929-bsknt14-inzhil"))
(define (dump-to-file verses outf)
(call-with-output-file outf
(λ (out)
(for ([v verses])
(display
(string-append (verse-id v) "\t" (verse-content v) "\n")
out)))))
(module+ main
(main '("fra" "uzn" "kaa" "eng" "lin" "cat" "ita" "por" "por_pt"
"tur" "uig_cyr" "kaa" "tat"))) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.