_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
c3847c91f82c5076bfaad604e9d186cb788cec7de8f5fec062b8c8a8093ecddf | GaloisInc/saw-script | UnionFind.hs | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
|
Module : Verifier . SAW.UnionFind
Copyright : Galois , Inc. 2012 - 2015
License : :
Stability : experimental
Portability : non - portable ( language extensions )
Module : Verifier.SAW.UnionFind
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable (language extensions)
-}
module Verifier.SAW.UnionFind (
AssertResult(..)
, assertSucceeded
-- * Class operations
, Class
, UnionFind
, empty
, Action
, runAction
, classRep
, freshClass
, areEqual
, setEqual
, setUnequal
-- * Class descriptions
, readClassDesc
, writeClassDesc
, modifyClassDesc
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative (Applicative)
#endif
import Control.Monad.State.Strict
import Data.List (foldl')
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
Types { { { 1
type ClassIndex = Int
-- | Equivalence class in union find structure.
newtype Class d = Class ClassIndex
data ClassState d = NonRep !ClassIndex
| Rep {
_classNeqs :: [ClassIndex] -- ^ Classes not equal to this class
, _classSize :: !Int -- ^ Size of class
, _classDesc :: d -- ^ Class descriptor
}
data UnionFind d = UFS {
ufsCount :: !Int
, ufsMap :: !(Map ClassIndex (ClassState d))
}
-- | Returns union find struct with no classes.
empty :: UnionFind d
empty = UFS { ufsCount = 0, ufsMap = Map.empty }
-- | Monad with scoped union find support.
newtype Action d a = UF { _unUF :: State (UnionFind d) a }
deriving (Functor, Applicative, Monad)
-- | Runs union find computation.
runAction :: UnionFind d -> Action d a -> (a, UnionFind d)
runAction s (UF m) = runState m s
Class operations { { { 1
-- | Get class description
classRep :: Class d -> Action d (Class d)
classRep (Class r) = UF $ do
m <- gets ufsMap
let impl i prev = do
case Map.lookup i m of
Nothing -> error $ "classRep: Illegal index " ++ show i
Just (NonRep next) -> impl next (i:prev)
Just Rep{} -> do
let updateRep ma j = Map.insert j (NonRep i) ma
modify $ \s -> s { ufsMap = foldl' updateRep (ufsMap s) prev }
return (Class i)
impl r []
-- | Creates a new class with the given descriptor.
freshClass :: d -> Action d (Class d)
freshClass d = UF $ do
UFS { ufsCount = c, ufsMap = m } <- get
put UFS { ufsCount = c + 1, ufsMap = Map.insert c (Rep [] 1 d) m }
return $ Class c
| Return true if two classes are equal .
areEqual :: Class d -> Class d -> Action d Bool
areEqual cx cy = do
Class rx <- classRep cx
Class ry <- classRep cy
return (rx == ry)
toClassIdx :: Class d -> ClassIndex
toClassIdx (Class c) = c
data AssertResult = AssertSuccess | AssertFailed | AssertRedundant
deriving (Eq, Show)
assertSucceeded :: AssertResult -> Bool
assertSucceeded AssertSuccess = True
assertSucceeded AssertFailed = False
assertSucceeded AssertRedundant = True
| Attempt to set two equivalence classes to be equal .
-- Returns true if attempt succeeded, and false is classes are
previously set inequal .
setEqual :: Class d
-> Class d
-> d -- ^ Descriptor for union class.
-> Action d AssertResult
setEqual x y d = do
Class xr <- classRep x
Class yr <- classRep y
if xr == yr
then return AssertRedundant
else do
m <- UF $ gets ufsMap
let Rep xne xsz _xd = m Map.! xr
let Rep yne ysz _yd = m Map.! yr
xElts <- fmap (map toClassIdx) $ mapM classRep (map Class xne)
yElts <- fmap (map toClassIdx) $ mapM classRep (map Class yne)
if xr `elem` yElts || yr `elem` xElts
then return AssertFailed
else do
let neqs = Set.toList $ Set.fromList $ xElts ++ yElts
UF $ modify $ \s ->
if xsz < ysz
then do
s { ufsMap =
Map.insert xr (NonRep yr) $
Map.insert yr (Rep neqs (xsz + ysz) d) $
ufsMap s }
else do
s { ufsMap =
Map.insert xr (Rep neqs (xsz + ysz) d) $
Map.insert yr (NonRep xr) $
ufsMap s }
return AssertSuccess
| Attempt to set two equivalence classes to be unequal .
-- Returns true if attempt succeeded, and false is classes are
-- previously set equal.
setUnequal :: Class d -> Class d -> Action d AssertResult
setUnequal x y = do
Class xr <- classRep x
Class yr <- classRep y
if xr == yr
then return AssertFailed
else do
m <- UF $ gets ufsMap
let Rep xne xsz xd = m Map.! xr
let Rep yne _ _ = m Map.! yr
xElts <- fmap (map toClassIdx) $ mapM classRep (map Class xne)
yElts <- fmap (map toClassIdx) $ mapM classRep (map Class yne)
if xr `elem` yElts || yr `elem` xElts
then return AssertRedundant
else do
UF $ modify $ \s -> s { ufsMap = Map.insert xr (Rep (yr:xne) xsz xd) (ufsMap s) }
return AssertSuccess
Class descriptions { { { 1
-- | Get a class description
readClassDesc :: Class d -> Action d d
readClassDesc c = do
Class rC <- classRep c
m <- UF $ gets ufsMap
let Rep _ _ desc = m Map.! rC
return desc
-- | Set a class description
writeClassDesc :: Class d -> d -> Action d ()
writeClassDesc c d = do
Class rC <- classRep c
UF $ modify $ \s ->
let Rep dis sz _ = (ufsMap s) Map.! rC
in s { ufsMap = Map.insert rC (Rep dis sz d) (ufsMap s) }
-- | Modify a class description
modifyClassDesc :: Class d -> (d -> d) -> Action d ()
modifyClassDesc c fn = do
Class rC <- classRep c
UF $ modify $ \s ->
let Rep dis sz desc = (ufsMap s) Map.! rC
in s { ufsMap = Map.insert rC (Rep dis sz (fn desc)) (ufsMap s) }
| null | https://raw.githubusercontent.com/GaloisInc/saw-script/fdb8987f09999439833d5cb573f69197bdf2cb7f/saw-core/src/Verifier/SAW/UnionFind.hs | haskell | * Class operations
* Class descriptions
| Equivalence class in union find structure.
^ Classes not equal to this class
^ Size of class
^ Class descriptor
| Returns union find struct with no classes.
| Monad with scoped union find support.
| Runs union find computation.
| Get class description
| Creates a new class with the given descriptor.
Returns true if attempt succeeded, and false is classes are
^ Descriptor for union class.
Returns true if attempt succeeded, and false is classes are
previously set equal.
| Get a class description
| Set a class description
| Modify a class description | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
|
Module : Verifier . SAW.UnionFind
Copyright : Galois , Inc. 2012 - 2015
License : :
Stability : experimental
Portability : non - portable ( language extensions )
Module : Verifier.SAW.UnionFind
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable (language extensions)
-}
module Verifier.SAW.UnionFind (
AssertResult(..)
, assertSucceeded
, Class
, UnionFind
, empty
, Action
, runAction
, classRep
, freshClass
, areEqual
, setEqual
, setUnequal
, readClassDesc
, writeClassDesc
, modifyClassDesc
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative (Applicative)
#endif
import Control.Monad.State.Strict
import Data.List (foldl')
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
Types { { { 1
type ClassIndex = Int
newtype Class d = Class ClassIndex
data ClassState d = NonRep !ClassIndex
| Rep {
}
data UnionFind d = UFS {
ufsCount :: !Int
, ufsMap :: !(Map ClassIndex (ClassState d))
}
empty :: UnionFind d
empty = UFS { ufsCount = 0, ufsMap = Map.empty }
newtype Action d a = UF { _unUF :: State (UnionFind d) a }
deriving (Functor, Applicative, Monad)
runAction :: UnionFind d -> Action d a -> (a, UnionFind d)
runAction s (UF m) = runState m s
Class operations { { { 1
classRep :: Class d -> Action d (Class d)
classRep (Class r) = UF $ do
m <- gets ufsMap
let impl i prev = do
case Map.lookup i m of
Nothing -> error $ "classRep: Illegal index " ++ show i
Just (NonRep next) -> impl next (i:prev)
Just Rep{} -> do
let updateRep ma j = Map.insert j (NonRep i) ma
modify $ \s -> s { ufsMap = foldl' updateRep (ufsMap s) prev }
return (Class i)
impl r []
freshClass :: d -> Action d (Class d)
freshClass d = UF $ do
UFS { ufsCount = c, ufsMap = m } <- get
put UFS { ufsCount = c + 1, ufsMap = Map.insert c (Rep [] 1 d) m }
return $ Class c
| Return true if two classes are equal .
areEqual :: Class d -> Class d -> Action d Bool
areEqual cx cy = do
Class rx <- classRep cx
Class ry <- classRep cy
return (rx == ry)
toClassIdx :: Class d -> ClassIndex
toClassIdx (Class c) = c
data AssertResult = AssertSuccess | AssertFailed | AssertRedundant
deriving (Eq, Show)
assertSucceeded :: AssertResult -> Bool
assertSucceeded AssertSuccess = True
assertSucceeded AssertFailed = False
assertSucceeded AssertRedundant = True
| Attempt to set two equivalence classes to be equal .
previously set inequal .
setEqual :: Class d
-> Class d
-> Action d AssertResult
setEqual x y d = do
Class xr <- classRep x
Class yr <- classRep y
if xr == yr
then return AssertRedundant
else do
m <- UF $ gets ufsMap
let Rep xne xsz _xd = m Map.! xr
let Rep yne ysz _yd = m Map.! yr
xElts <- fmap (map toClassIdx) $ mapM classRep (map Class xne)
yElts <- fmap (map toClassIdx) $ mapM classRep (map Class yne)
if xr `elem` yElts || yr `elem` xElts
then return AssertFailed
else do
let neqs = Set.toList $ Set.fromList $ xElts ++ yElts
UF $ modify $ \s ->
if xsz < ysz
then do
s { ufsMap =
Map.insert xr (NonRep yr) $
Map.insert yr (Rep neqs (xsz + ysz) d) $
ufsMap s }
else do
s { ufsMap =
Map.insert xr (Rep neqs (xsz + ysz) d) $
Map.insert yr (NonRep xr) $
ufsMap s }
return AssertSuccess
| Attempt to set two equivalence classes to be unequal .
setUnequal :: Class d -> Class d -> Action d AssertResult
setUnequal x y = do
Class xr <- classRep x
Class yr <- classRep y
if xr == yr
then return AssertFailed
else do
m <- UF $ gets ufsMap
let Rep xne xsz xd = m Map.! xr
let Rep yne _ _ = m Map.! yr
xElts <- fmap (map toClassIdx) $ mapM classRep (map Class xne)
yElts <- fmap (map toClassIdx) $ mapM classRep (map Class yne)
if xr `elem` yElts || yr `elem` xElts
then return AssertRedundant
else do
UF $ modify $ \s -> s { ufsMap = Map.insert xr (Rep (yr:xne) xsz xd) (ufsMap s) }
return AssertSuccess
Class descriptions { { { 1
readClassDesc :: Class d -> Action d d
readClassDesc c = do
Class rC <- classRep c
m <- UF $ gets ufsMap
let Rep _ _ desc = m Map.! rC
return desc
writeClassDesc :: Class d -> d -> Action d ()
writeClassDesc c d = do
Class rC <- classRep c
UF $ modify $ \s ->
let Rep dis sz _ = (ufsMap s) Map.! rC
in s { ufsMap = Map.insert rC (Rep dis sz d) (ufsMap s) }
modifyClassDesc :: Class d -> (d -> d) -> Action d ()
modifyClassDesc c fn = do
Class rC <- classRep c
UF $ modify $ \s ->
let Rep dis sz desc = (ufsMap s) Map.! rC
in s { ufsMap = Map.insert rC (Rep dis sz (fn desc)) (ufsMap s) }
|
f67c6390ceae8975fb8ff53fcb24816552758f9180a1c56f0617fb62a0814e35 | anuragsoni/h2 | push_promise_frame_test.ml | open H2
let wire = "000018050C0000000A060000000C746869732069732064756D6D79486F77647921"
let wire' = "000011050c0000000a0000000c746869732069732064756d6d79"
let extract_payload payload =
let open Types in
match payload with
| PushPromiseFrame (s, m) -> (s, m)
| _ -> failwith "INVALID FRAME"
let parse_push_promise_frame () =
let parsed = Util.parse_success wire in
Alcotest.(check int) "Flags" 12 parsed.frame_header.flags ;
Alcotest.(check int32) "Stream id" 10l parsed.frame_header.stream_id ;
Alcotest.(check int) "Length" 24 parsed.frame_header.length ;
let stream, message = extract_payload parsed.frame_payload in
Alcotest.(check int32) "Stream id" 12l stream ;
Alcotest.(check string) "message" "this is dummy" message
let serialize_push_promise_frame_with_padding () =
let info = {Serialize.flags = 12; stream_id = 10l; padding = Some "Howdy!"} in
let f = Faraday.create 24 in
Serialize.write_frame f info (Types.PushPromiseFrame (12l, "this is dummy")) ;
let output = Faraday.serialize_to_string f in
Alcotest.(check string) "Serialized" (Util.string_of_hex wire) output
let serialize_push_promise_frame_without_padding () =
let info = {Serialize.flags = 12; stream_id = 10l; padding = None} in
let f = Faraday.create 24 in
Serialize.write_frame f info (Types.PushPromiseFrame (12l, "this is dummy")) ;
let output = Faraday.serialize_to_string f in
Alcotest.(check string) "Serialized" (Util.string_of_hex wire') output
let tests =
[ ("Can parse push promise frame", `Quick, parse_push_promise_frame)
; ( "Can serialize push promise frame with padding"
, `Quick
, serialize_push_promise_frame_with_padding )
; ( "Can serialize push promise frame without padding"
, `Quick
, serialize_push_promise_frame_without_padding ) ]
| null | https://raw.githubusercontent.com/anuragsoni/h2/270d27cf54942e2b34932041827990ae49c89f85/test/push_promise_frame_test.ml | ocaml | open H2
let wire = "000018050C0000000A060000000C746869732069732064756D6D79486F77647921"
let wire' = "000011050c0000000a0000000c746869732069732064756d6d79"
let extract_payload payload =
let open Types in
match payload with
| PushPromiseFrame (s, m) -> (s, m)
| _ -> failwith "INVALID FRAME"
let parse_push_promise_frame () =
let parsed = Util.parse_success wire in
Alcotest.(check int) "Flags" 12 parsed.frame_header.flags ;
Alcotest.(check int32) "Stream id" 10l parsed.frame_header.stream_id ;
Alcotest.(check int) "Length" 24 parsed.frame_header.length ;
let stream, message = extract_payload parsed.frame_payload in
Alcotest.(check int32) "Stream id" 12l stream ;
Alcotest.(check string) "message" "this is dummy" message
let serialize_push_promise_frame_with_padding () =
let info = {Serialize.flags = 12; stream_id = 10l; padding = Some "Howdy!"} in
let f = Faraday.create 24 in
Serialize.write_frame f info (Types.PushPromiseFrame (12l, "this is dummy")) ;
let output = Faraday.serialize_to_string f in
Alcotest.(check string) "Serialized" (Util.string_of_hex wire) output
let serialize_push_promise_frame_without_padding () =
let info = {Serialize.flags = 12; stream_id = 10l; padding = None} in
let f = Faraday.create 24 in
Serialize.write_frame f info (Types.PushPromiseFrame (12l, "this is dummy")) ;
let output = Faraday.serialize_to_string f in
Alcotest.(check string) "Serialized" (Util.string_of_hex wire') output
let tests =
[ ("Can parse push promise frame", `Quick, parse_push_promise_frame)
; ( "Can serialize push promise frame with padding"
, `Quick
, serialize_push_promise_frame_with_padding )
; ( "Can serialize push promise frame without padding"
, `Quick
, serialize_push_promise_frame_without_padding ) ]
|
|
aa18d162e67a7b7cd9d9b5bf05db196696d2770956d4aa38058ad8d820c3175a | janestreet/incr_dom | js_misc.ml | open! Core
open Js_of_ocaml
module Rect = struct
type 'a t =
{ top : 'a
; left : 'a
; bottom : 'a
; right : 'a
}
[@@deriving sexp, bin_io, compare, fields]
let map t ~f =
{ top = f t.top; left = f t.left; bottom = f t.bottom; right = f t.right }
;;
let int_height t = t.bottom - t.top
let int_width t = t.right - t.left
let float_height t = t.bottom -. t.top
let float_width t = t.right -. t.left
end
let round_float_rect ?(round = Float.iround_nearest_exn) = Rect.map ~f:round
type rows_or_columns =
| Rows
| Columns
[@@deriving sexp, bin_io, variants, compare]
let innerHeight () = Dom_html.window##.innerHeight
let innerWidth () = Dom_html.window##.innerWidth
let element_is_in_viewport (elt : Dom_html.element Js.t) =
let rect = elt##getBoundingClientRect in
Float.( >= ) rect##.top 0.
&& Float.( >= ) rect##.left 0.
&& Float.( <= ) rect##.bottom (Float.of_int (innerHeight ()))
&& Float.( <= ) rect##.right (Float.of_int (innerWidth ()))
;;
(** Scrolls to the item marked as "keep-in-view" *)
let scroll ?(id = "keep-in-view") () =
match Dom_html.getElementById_opt id with
| None -> ()
| Some elt -> if not (element_is_in_viewport elt) then elt##scrollIntoView Js._true
;;
Not yet supported on Chrome . Maybe we should use jQuery ?
{ [
let scroll_into_view ( elt : Dom_html.element ) : unit =
elt##scrollIntoView ( object%js
val block = Js.string " start "
val behavior = Js.string " smooth "
end )
] }
{[
let scroll_into_view (elt : Dom_html.element Js.t) : unit =
elt##scrollIntoView (object%js
val block = Js.string "start"
val behavior = Js.string "smooth"
end)
]}
*)
(* [viewport_rect_of_element el] gets bounding rect of [elt]. The bounding rect is
relative to the view port *)
let viewport_rect_of_element (elt : Dom_html.element Js.t) : float Rect.t =
let rect = elt##getBoundingClientRect in
{ Rect.top = rect##.top
; left = rect##.left
; bottom = rect##.bottom
; right = rect##.right
}
;;
let viewport_rect () =
{ Rect.top = 0; left = 0; bottom = innerHeight (); right = innerWidth () }
;;
let client_rect_of_element (elt : Dom_html.element Js.t) : float Rect.t =
let bounding_rect = viewport_rect_of_element elt in
{ bounding_rect with
bottom = bounding_rect.top +. Float.of_int elt##.clientHeight
; right = bounding_rect.left +. Float.of_int elt##.clientWidth
}
;;
let client_rect () =
{ Rect.top = 0
; left = 0
; bottom = Dom_html.document##.documentElement##.clientHeight
; right = Dom_html.document##.documentElement##.clientWidth
}
;;
(** Simple wrapper for the binary-search functor *)
let binary_search (type elt) ~length ~get ~compare mode x =
let module Bs =
Binary_searchable.Make (struct
type nonrec elt = elt
type nonrec t = unit
let get () n = get n
let length () = length
end)
in
Bs.binary_search () ~compare mode x
;;
(** Searches through elements indexed from [0] to [length - 1]. *)
let element_search ~length ~nth_element_id ~search_by mode layout x =
let get =
let nth_element n =
let id = nth_element_id n in
match Dom_html.getElementById_opt id with
| None -> failwithf "Element %s not found" id ()
| Some elt -> elt
in
let first = viewport_rect_of_element (nth_element 0) in
let last = viewport_rect_of_element (nth_element (length - 1)) in
(* Compute the n'th element in a normalized way, as if the indexing always goes in
increasing order from top top bottom *)
let nth_element_normalized n =
let is_ascending =
match layout with
| Rows -> Float.( <= ) first.top last.top
| Columns -> Float.( <= ) first.left last.left
in
nth_element (if is_ascending then n else length - n - 1)
in
fun n -> search_by (viewport_rect_of_element (nth_element_normalized n))
in
binary_search ~length ~get ~compare:Float.compare mode x
;;
let find_visible_range ~length ~nth_element_id layout =
if length = 0
then None
else (
let element_search = element_search ~length ~nth_element_id in
let viewport_rect = viewport_rect () in
let first =
let search_by, target =
match layout with
| Rows -> Rect.bottom, viewport_rect.top
| Columns -> Rect.right, viewport_rect.left
in
element_search ~search_by `First_strictly_greater_than layout (Float.of_int target)
in
let last =
let search_by, target =
match layout with
| Rows -> Rect.top, viewport_rect.bottom
| Columns -> Rect.left, viewport_rect.right
in
element_search ~search_by `Last_strictly_less_than layout (Float.of_int target)
in
Both [ first ] and [ last ] need to be [ Some ] . Otherwise , for example if the whole
table is below view port , then [ first ] will be [ Some ] , [ last ] will be [ None ]
table is below view port, then [first] will be [Some], [last] will be [None] *)
Option.both first last)
;;
let get_scroll_container_js_expr =
Js.Unsafe.pure_js_expr
{js|
(function (element) {
var doc = element.ownerDocument || document;
var win = doc.defaultView || window;
var re = /(auto|scroll)/;
if (element === doc) {
return doc;
}
var cur = element.parentNode;
while (cur.parentNode) {
var style = win.getComputedStyle(cur);
if (re.test(style.overflow + style.overflowY + style.overflowX)) {
return cur;
}
cur = cur.parentNode;
}
return doc;
})
|js}
;;
let get_scroll_container (el : #Dom.node Js.t) : Dom.node Js.t =
Js.Unsafe.fun_call get_scroll_container_js_expr [| Js.Unsafe.inject el |]
;;
| null | https://raw.githubusercontent.com/janestreet/incr_dom/7aaba451cf76dc493e218d738c6391882b55ef1b/src/js_misc.ml | ocaml | * Scrolls to the item marked as "keep-in-view"
[viewport_rect_of_element el] gets bounding rect of [elt]. The bounding rect is
relative to the view port
* Simple wrapper for the binary-search functor
* Searches through elements indexed from [0] to [length - 1].
Compute the n'th element in a normalized way, as if the indexing always goes in
increasing order from top top bottom | open! Core
open Js_of_ocaml
module Rect = struct
type 'a t =
{ top : 'a
; left : 'a
; bottom : 'a
; right : 'a
}
[@@deriving sexp, bin_io, compare, fields]
let map t ~f =
{ top = f t.top; left = f t.left; bottom = f t.bottom; right = f t.right }
;;
let int_height t = t.bottom - t.top
let int_width t = t.right - t.left
let float_height t = t.bottom -. t.top
let float_width t = t.right -. t.left
end
let round_float_rect ?(round = Float.iround_nearest_exn) = Rect.map ~f:round
type rows_or_columns =
| Rows
| Columns
[@@deriving sexp, bin_io, variants, compare]
let innerHeight () = Dom_html.window##.innerHeight
let innerWidth () = Dom_html.window##.innerWidth
let element_is_in_viewport (elt : Dom_html.element Js.t) =
let rect = elt##getBoundingClientRect in
Float.( >= ) rect##.top 0.
&& Float.( >= ) rect##.left 0.
&& Float.( <= ) rect##.bottom (Float.of_int (innerHeight ()))
&& Float.( <= ) rect##.right (Float.of_int (innerWidth ()))
;;
let scroll ?(id = "keep-in-view") () =
match Dom_html.getElementById_opt id with
| None -> ()
| Some elt -> if not (element_is_in_viewport elt) then elt##scrollIntoView Js._true
;;
Not yet supported on Chrome . Maybe we should use jQuery ?
{ [
let scroll_into_view ( elt : Dom_html.element ) : unit =
elt##scrollIntoView ( object%js
val block = Js.string " start "
val behavior = Js.string " smooth "
end )
] }
{[
let scroll_into_view (elt : Dom_html.element Js.t) : unit =
elt##scrollIntoView (object%js
val block = Js.string "start"
val behavior = Js.string "smooth"
end)
]}
*)
let viewport_rect_of_element (elt : Dom_html.element Js.t) : float Rect.t =
let rect = elt##getBoundingClientRect in
{ Rect.top = rect##.top
; left = rect##.left
; bottom = rect##.bottom
; right = rect##.right
}
;;
let viewport_rect () =
{ Rect.top = 0; left = 0; bottom = innerHeight (); right = innerWidth () }
;;
let client_rect_of_element (elt : Dom_html.element Js.t) : float Rect.t =
let bounding_rect = viewport_rect_of_element elt in
{ bounding_rect with
bottom = bounding_rect.top +. Float.of_int elt##.clientHeight
; right = bounding_rect.left +. Float.of_int elt##.clientWidth
}
;;
let client_rect () =
{ Rect.top = 0
; left = 0
; bottom = Dom_html.document##.documentElement##.clientHeight
; right = Dom_html.document##.documentElement##.clientWidth
}
;;
let binary_search (type elt) ~length ~get ~compare mode x =
let module Bs =
Binary_searchable.Make (struct
type nonrec elt = elt
type nonrec t = unit
let get () n = get n
let length () = length
end)
in
Bs.binary_search () ~compare mode x
;;
let element_search ~length ~nth_element_id ~search_by mode layout x =
let get =
let nth_element n =
let id = nth_element_id n in
match Dom_html.getElementById_opt id with
| None -> failwithf "Element %s not found" id ()
| Some elt -> elt
in
let first = viewport_rect_of_element (nth_element 0) in
let last = viewport_rect_of_element (nth_element (length - 1)) in
let nth_element_normalized n =
let is_ascending =
match layout with
| Rows -> Float.( <= ) first.top last.top
| Columns -> Float.( <= ) first.left last.left
in
nth_element (if is_ascending then n else length - n - 1)
in
fun n -> search_by (viewport_rect_of_element (nth_element_normalized n))
in
binary_search ~length ~get ~compare:Float.compare mode x
;;
let find_visible_range ~length ~nth_element_id layout =
if length = 0
then None
else (
let element_search = element_search ~length ~nth_element_id in
let viewport_rect = viewport_rect () in
let first =
let search_by, target =
match layout with
| Rows -> Rect.bottom, viewport_rect.top
| Columns -> Rect.right, viewport_rect.left
in
element_search ~search_by `First_strictly_greater_than layout (Float.of_int target)
in
let last =
let search_by, target =
match layout with
| Rows -> Rect.top, viewport_rect.bottom
| Columns -> Rect.left, viewport_rect.right
in
element_search ~search_by `Last_strictly_less_than layout (Float.of_int target)
in
Both [ first ] and [ last ] need to be [ Some ] . Otherwise , for example if the whole
table is below view port , then [ first ] will be [ Some ] , [ last ] will be [ None ]
table is below view port, then [first] will be [Some], [last] will be [None] *)
Option.both first last)
;;
let get_scroll_container_js_expr =
Js.Unsafe.pure_js_expr
{js|
(function (element) {
var doc = element.ownerDocument || document;
var win = doc.defaultView || window;
var re = /(auto|scroll)/;
if (element === doc) {
return doc;
}
var cur = element.parentNode;
while (cur.parentNode) {
var style = win.getComputedStyle(cur);
if (re.test(style.overflow + style.overflowY + style.overflowX)) {
return cur;
}
cur = cur.parentNode;
}
return doc;
})
|js}
;;
let get_scroll_container (el : #Dom.node Js.t) : Dom.node Js.t =
Js.Unsafe.fun_call get_scroll_container_js_expr [| Js.Unsafe.inject el |]
;;
|
39fdc74f58ed33338cbed90d576b43ec6d2c8ed37855d38590db8e591b065170 | sdiehl/elliptic-curve | BrainpoolP384R1.hs | module Data.Curve.Weierstrass.BrainpoolP384R1
( module Data.Curve.Weierstrass
, Point(..)
-- * BrainpoolP384R1 curve
, module Data.Curve.Weierstrass.BrainpoolP384R1
) where
import Protolude
import Data.Field.Galois
import GHC.Natural (Natural)
import Data.Curve.Weierstrass
-------------------------------------------------------------------------------
-- Types
-------------------------------------------------------------------------------
-- | BrainpoolP384R1 curve.
data BrainpoolP384R1
-- | Field of points of BrainpoolP384R1 curve.
type Fq = Prime Q
type Q = 0x8cb91e82a3386d280f5d6f7e50e641df152f7109ed5456b412b1da197fb71123acd3a729901d1a71874700133107ec53
-- | Field of coefficients of BrainpoolP384R1 curve.
type Fr = Prime R
type R = 0x8cb91e82a3386d280f5d6f7e50e641df152f7109ed5456b31f166e6cac0425a7cf3ab6af6b7fc3103b883202e9046565
BrainpoolP384R1 curve is a Weierstrass curve .
instance Curve 'Weierstrass c BrainpoolP384R1 Fq Fr => WCurve c BrainpoolP384R1 Fq Fr where
a_ = const _a
{-# INLINABLE a_ #-}
b_ = const _b
# INLINABLE b _ #
h_ = const _h
{-# INLINABLE h_ #-}
q_ = const _q
# INLINABLE q _ #
r_ = const _r
# INLINABLE r _ #
-- | Affine BrainpoolP384R1 curve point.
type PA = WAPoint BrainpoolP384R1 Fq Fr
Affine BrainpoolP384R1 curve is a Weierstrass affine curve .
instance WACurve BrainpoolP384R1 Fq Fr where
gA_ = gA
# INLINABLE gA _ #
-- | Jacobian BrainpoolP384R1 point.
type PJ = WJPoint BrainpoolP384R1 Fq Fr
Jacobian BrainpoolP384R1 curve is a Weierstrass Jacobian curve .
instance WJCurve BrainpoolP384R1 Fq Fr where
gJ_ = gJ
# INLINABLE gJ _ #
-- | Projective BrainpoolP384R1 point.
type PP = WPPoint BrainpoolP384R1 Fq Fr
Projective BrainpoolP384R1 curve is a Weierstrass projective curve .
instance WPCurve BrainpoolP384R1 Fq Fr where
gP_ = gP
# INLINABLE gP _ #
-------------------------------------------------------------------------------
-- Parameters
-------------------------------------------------------------------------------
-- | Coefficient @A@ of BrainpoolP384R1 curve.
_a :: Fq
_a = 0x7bc382c63d8c150c3c72080ace05afa0c2bea28e4fb22787139165efba91f90f8aa5814a503ad4eb04a8c7dd22ce2826
# INLINABLE _ a #
| Coefficient @B@ of BrainpoolP384R1 curve .
_b :: Fq
_b = 0x4a8c7dd22ce28268b39b55416f0447c2fb77de107dcd2a62e880ea53eeb62d57cb4390295dbc9943ab78696fa504c11
{-# INLINABLE _b #-}
-- | Cofactor of BrainpoolP384R1 curve.
_h :: Natural
_h = 0x1
# INLINABLE _ h #
-- | Characteristic of BrainpoolP384R1 curve.
_q :: Natural
_q = 0x8cb91e82a3386d280f5d6f7e50e641df152f7109ed5456b412b1da197fb71123acd3a729901d1a71874700133107ec53
{-# INLINABLE _q #-}
-- | Order of BrainpoolP384R1 curve.
_r :: Natural
_r = 0x8cb91e82a3386d280f5d6f7e50e641df152f7109ed5456b31f166e6cac0425a7cf3ab6af6b7fc3103b883202e9046565
{-# INLINABLE _r #-}
-- | Coordinate @X@ of BrainpoolP384R1 curve.
_x :: Fq
_x = 0x1d1c64f068cf45ffa2a63a81b7c13f6b8847a3e77ef14fe3db7fcafe0cbd10e8e826e03436d646aaef87b2e247d4af1e
{-# INLINABLE _x #-}
-- | Coordinate @Y@ of BrainpoolP384R1 curve.
_y :: Fq
_y = 0x8abe1d7520f9c2a45cb1eb8e95cfd55262b70b29feec5864e19c054ff99129280e4646217791811142820341263c5315
{-# INLINABLE _y #-}
-- | Generator of affine BrainpoolP384R1 curve.
gA :: PA
gA = A _x _y
# INLINABLE gA #
-- | Generator of Jacobian BrainpoolP384R1 curve.
gJ :: PJ
gJ = J _x _y 1
# INLINABLE gJ #
-- | Generator of projective BrainpoolP384R1 curve.
gP :: PP
gP = P _x _y 1
# INLINABLE gP #
| null | https://raw.githubusercontent.com/sdiehl/elliptic-curve/445e196a550e36e0f25bd4d9d6a38676b4cf2be8/src/Data/Curve/Weierstrass/BrainpoolP384R1.hs | haskell | * BrainpoolP384R1 curve
-----------------------------------------------------------------------------
Types
-----------------------------------------------------------------------------
| BrainpoolP384R1 curve.
| Field of points of BrainpoolP384R1 curve.
| Field of coefficients of BrainpoolP384R1 curve.
# INLINABLE a_ #
# INLINABLE h_ #
| Affine BrainpoolP384R1 curve point.
| Jacobian BrainpoolP384R1 point.
| Projective BrainpoolP384R1 point.
-----------------------------------------------------------------------------
Parameters
-----------------------------------------------------------------------------
| Coefficient @A@ of BrainpoolP384R1 curve.
# INLINABLE _b #
| Cofactor of BrainpoolP384R1 curve.
| Characteristic of BrainpoolP384R1 curve.
# INLINABLE _q #
| Order of BrainpoolP384R1 curve.
# INLINABLE _r #
| Coordinate @X@ of BrainpoolP384R1 curve.
# INLINABLE _x #
| Coordinate @Y@ of BrainpoolP384R1 curve.
# INLINABLE _y #
| Generator of affine BrainpoolP384R1 curve.
| Generator of Jacobian BrainpoolP384R1 curve.
| Generator of projective BrainpoolP384R1 curve. | module Data.Curve.Weierstrass.BrainpoolP384R1
( module Data.Curve.Weierstrass
, Point(..)
, module Data.Curve.Weierstrass.BrainpoolP384R1
) where
import Protolude
import Data.Field.Galois
import GHC.Natural (Natural)
import Data.Curve.Weierstrass
data BrainpoolP384R1
type Fq = Prime Q
type Q = 0x8cb91e82a3386d280f5d6f7e50e641df152f7109ed5456b412b1da197fb71123acd3a729901d1a71874700133107ec53
type Fr = Prime R
type R = 0x8cb91e82a3386d280f5d6f7e50e641df152f7109ed5456b31f166e6cac0425a7cf3ab6af6b7fc3103b883202e9046565
BrainpoolP384R1 curve is a Weierstrass curve .
instance Curve 'Weierstrass c BrainpoolP384R1 Fq Fr => WCurve c BrainpoolP384R1 Fq Fr where
a_ = const _a
b_ = const _b
# INLINABLE b _ #
h_ = const _h
q_ = const _q
# INLINABLE q _ #
r_ = const _r
# INLINABLE r _ #
type PA = WAPoint BrainpoolP384R1 Fq Fr
Affine BrainpoolP384R1 curve is a Weierstrass affine curve .
instance WACurve BrainpoolP384R1 Fq Fr where
gA_ = gA
# INLINABLE gA _ #
type PJ = WJPoint BrainpoolP384R1 Fq Fr
Jacobian BrainpoolP384R1 curve is a Weierstrass Jacobian curve .
instance WJCurve BrainpoolP384R1 Fq Fr where
gJ_ = gJ
# INLINABLE gJ _ #
type PP = WPPoint BrainpoolP384R1 Fq Fr
Projective BrainpoolP384R1 curve is a Weierstrass projective curve .
instance WPCurve BrainpoolP384R1 Fq Fr where
gP_ = gP
# INLINABLE gP _ #
_a :: Fq
_a = 0x7bc382c63d8c150c3c72080ace05afa0c2bea28e4fb22787139165efba91f90f8aa5814a503ad4eb04a8c7dd22ce2826
# INLINABLE _ a #
| Coefficient @B@ of BrainpoolP384R1 curve .
_b :: Fq
_b = 0x4a8c7dd22ce28268b39b55416f0447c2fb77de107dcd2a62e880ea53eeb62d57cb4390295dbc9943ab78696fa504c11
_h :: Natural
_h = 0x1
# INLINABLE _ h #
_q :: Natural
_q = 0x8cb91e82a3386d280f5d6f7e50e641df152f7109ed5456b412b1da197fb71123acd3a729901d1a71874700133107ec53
_r :: Natural
_r = 0x8cb91e82a3386d280f5d6f7e50e641df152f7109ed5456b31f166e6cac0425a7cf3ab6af6b7fc3103b883202e9046565
_x :: Fq
_x = 0x1d1c64f068cf45ffa2a63a81b7c13f6b8847a3e77ef14fe3db7fcafe0cbd10e8e826e03436d646aaef87b2e247d4af1e
_y :: Fq
_y = 0x8abe1d7520f9c2a45cb1eb8e95cfd55262b70b29feec5864e19c054ff99129280e4646217791811142820341263c5315
gA :: PA
gA = A _x _y
# INLINABLE gA #
gJ :: PJ
gJ = J _x _y 1
# INLINABLE gJ #
gP :: PP
gP = P _x _y 1
# INLINABLE gP #
|
03a3418e3fc426ef21f59d084fc536976616c18d62d7c22a0c4da6aa143e44fd | ermine-language/ermine | Global.hs | # LANGUAGE DeriveGeneric #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
--------------------------------------------------------------------
-- |
Copyright : ( c ) 2011
License : BSD2
Maintainer : < >
-- Stability : experimental
-- Portability: non-portable (DeriveDataTypeable)
--
--------------------------------------------------------------------
module Ermine.Syntax.Global
(
-- * Globals
Global(Global)
, AsGlobal(..)
, HasGlobal(..)
-- * Fixity
, Assoc(..)
, Fixity(..)
, _Fixity
, fixityLevel
, unpackFixity
, packFixity
-- * Combinators
, glob
) where
import Control.Applicative
import Control.Lens
import Control.Monad
import Crypto.Hash.MD5 as MD5
import Data.Binary (Binary)
import qualified Data.Binary as Binary
import Data.Bits
import Data.Bytes.Get
import Data.Bytes.Put
import Data.Bytes.Serial
import Data.ByteString
import Data.Data (Data, Typeable)
import Data.Function (on)
import Data.Hashable
import Data.Serialize (Serialize)
import qualified Data.Serialize as Serialize
import Data.Text
import Data.Word
import Ermine.Syntax.Digest
import Ermine.Syntax.ModuleName
import Ermine.Syntax.Name
import GHC.Generics (Generic)
------------------------------------------------------------------------------
Associativity
------------------------------------------------------------------------------
-- | The associativity of an infix identifier
data Assoc = L | R | N deriving (Eq,Ord,Show,Read,Enum,Data,Typeable,Generic)
instance Digestable Assoc
------------------------------------------------------------------------------
-- Fixity
------------------------------------------------------------------------------
-- | The fixity of an identifier
data Fixity
= Infix !Assoc !Int
| Prefix !Int
| Postfix !Int
| Idfix
deriving (Eq,Ord,Show,Read,Data,Typeable,Generic)
fixityLevel :: Fixity -> Maybe Int
fixityLevel (Infix _ i) = Just i
fixityLevel (Prefix i) = Just i
fixityLevel (Postfix i) = Just i
fixityLevel Idfix = Nothing
-- | Pack 'Fixity' into a 'Word8'.
--
-- Format:
--
> 01234567
-- > ccaapppp
--
@cc@ is constructor tag , @0 - 3@
@pppp@ is precedence level , @0 - 9@
@aa@ is associativity tag , @0 - 2@
packFixity :: Fixity -> Word8
packFixity Idfix = 0xC0
packFixity (Prefix n) = 0x40 .|. (0x0F .&. fromIntegral n)
packFixity (Postfix n) = 0x80 .|. (0x0F .&. fromIntegral n)
packFixity (Infix a n) = packAssoc a .|. (0x0F .&. fromIntegral n)
where
packAssoc L = 0x00
packAssoc R = 0x10
packAssoc N = 0x20
# INLINE packFixity #
this should be MonadPlus , but Get is n't
unpackFixity :: Monad m => Word8 -> m Fixity
unpackFixity w8 =
case 0xC0 .&. w8 of
0x00 -> case 0x30 .&. w8 of
0x00 -> return $ Infix L n
0x10 -> return $ Infix R n
0x20 -> return $ Infix N n
_ -> fail "unpackFixity: bad associativity"
0x40 -> return $ Prefix n
0x80 -> return $ Postfix n
0xC0 -> return Idfix
_ -> fail "unpackFixity: IMPOSSIBLE"
where n = fromIntegral $ 0x0F .&. w8
# INLINE unpackFixity #
_Fixity :: Prism' Word8 Fixity
_Fixity = prism' packFixity unpackFixity
# INLINE _ Fixity #
instance Serial Fixity where
serialize f = putWord8 $ packFixity f
deserialize = do
w <- getWord8
unpackFixity w
instance Binary Fixity where
put = serialize
get = deserialize
instance Serialize Fixity where
put = serialize
get = deserialize
instance Digestable Fixity
------------------------------------------------------------------------------
-- Global
------------------------------------------------------------------------------
-- | A 'Global' is a full qualified top level name.
--
-- /NB:/ You should construct these with 'global' and only use the constructor for pattern matching.
data Global = Global
{ _globalDigest :: !ByteString
, _globalFixity :: !Fixity
, _globalModule :: !ModuleName
, _globalName :: !Text
} deriving (Data, Typeable, Generic)
instance Show Global where
showsPrec d (Global _ f m n) = showParen (d > 10) $
showString "glob " . showsPrec 11 f .
showChar ' ' . showsPrec 11 m .
showChar ' ' . showsPrec 11 n
instance Read Global where
readsPrec d = readParen (d > 10) $ \r -> do
("glob", r') <- lex r
(f, r'') <- readsPrec 11 r'
(m, r''') <- readsPrec 11 r''
(n, r'''') <- readsPrec 11 r'''
return (glob f m n, r'''')
instance Serial Global where
serialize (Global d f m n) = serialize d >> serialize f >> serialize m >> serialize n
deserialize = liftM4 Global deserialize deserialize deserialize deserialize
instance Binary Global where
put = serialize
get = deserialize
instance HasModuleName Global where
moduleName g (Global _ f m n) = g m <&> \m' -> glob f m' n
instance HasName Global where
name g (Global _ f m n) = g n <&> \n' -> glob f m n'
------------------------------------------------------------------------------
AsGlobal
------------------------------------------------------------------------------
class AsGlobal t where
_Global :: Prism' t Global
instance AsGlobal Global where
_Global = id
------------------------------------------------------------------------------
HasGlobal
------------------------------------------------------------------------------
class HasGlobal t where
global :: Lens' t Global
-- | A lens that will read or update the fixity (and compute a new digest)
fixity :: Lens' t Fixity
fixity f = global $ \ (Global _ a m n) -> (\a' -> glob a' m n) <$> f a
instance HasGlobal Global where
global = id
instance Eq Global where
(==) = (==) `on` _globalDigest
instance Ord Global where
compare = compare `on` _globalDigest
instance Digestable Global where
digest c = digest c . _globalDigest
instance Hashable Global where
hashWithSalt s c = hashWithSalt s (_globalDigest c)
------------------------------------------------------------------------------
-- Combinators
------------------------------------------------------------------------------
-- | Construct a 'Global' with a correct digest.
glob :: AsGlobal t => Fixity -> ModuleName -> Text -> t
glob f m n = _Global # Global d f m n where
d = MD5.finalize $ digest MD5.init f `digest` m `digest` n
| null | https://raw.githubusercontent.com/ermine-language/ermine/bd58949ab56311be9e0d2506a900f3d77652566b/src/Ermine/Syntax/Global.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE DeriveDataTypeable #
------------------------------------------------------------------
|
Stability : experimental
Portability: non-portable (DeriveDataTypeable)
------------------------------------------------------------------
* Globals
* Fixity
* Combinators
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| The associativity of an infix identifier
----------------------------------------------------------------------------
Fixity
----------------------------------------------------------------------------
| The fixity of an identifier
| Pack 'Fixity' into a 'Word8'.
Format:
> ccaapppp
----------------------------------------------------------------------------
Global
----------------------------------------------------------------------------
| A 'Global' is a full qualified top level name.
/NB:/ You should construct these with 'global' and only use the constructor for pattern matching.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| A lens that will read or update the fixity (and compute a new digest)
----------------------------------------------------------------------------
Combinators
----------------------------------------------------------------------------
| Construct a 'Global' with a correct digest. | # LANGUAGE DeriveGeneric #
Copyright : ( c ) 2011
License : BSD2
Maintainer : < >
module Ermine.Syntax.Global
(
Global(Global)
, AsGlobal(..)
, HasGlobal(..)
, Assoc(..)
, Fixity(..)
, _Fixity
, fixityLevel
, unpackFixity
, packFixity
, glob
) where
import Control.Applicative
import Control.Lens
import Control.Monad
import Crypto.Hash.MD5 as MD5
import Data.Binary (Binary)
import qualified Data.Binary as Binary
import Data.Bits
import Data.Bytes.Get
import Data.Bytes.Put
import Data.Bytes.Serial
import Data.ByteString
import Data.Data (Data, Typeable)
import Data.Function (on)
import Data.Hashable
import Data.Serialize (Serialize)
import qualified Data.Serialize as Serialize
import Data.Text
import Data.Word
import Ermine.Syntax.Digest
import Ermine.Syntax.ModuleName
import Ermine.Syntax.Name
import GHC.Generics (Generic)
Associativity
data Assoc = L | R | N deriving (Eq,Ord,Show,Read,Enum,Data,Typeable,Generic)
instance Digestable Assoc
data Fixity
= Infix !Assoc !Int
| Prefix !Int
| Postfix !Int
| Idfix
deriving (Eq,Ord,Show,Read,Data,Typeable,Generic)
fixityLevel :: Fixity -> Maybe Int
fixityLevel (Infix _ i) = Just i
fixityLevel (Prefix i) = Just i
fixityLevel (Postfix i) = Just i
fixityLevel Idfix = Nothing
> 01234567
@cc@ is constructor tag , @0 - 3@
@pppp@ is precedence level , @0 - 9@
@aa@ is associativity tag , @0 - 2@
packFixity :: Fixity -> Word8
packFixity Idfix = 0xC0
packFixity (Prefix n) = 0x40 .|. (0x0F .&. fromIntegral n)
packFixity (Postfix n) = 0x80 .|. (0x0F .&. fromIntegral n)
packFixity (Infix a n) = packAssoc a .|. (0x0F .&. fromIntegral n)
where
packAssoc L = 0x00
packAssoc R = 0x10
packAssoc N = 0x20
# INLINE packFixity #
this should be MonadPlus , but Get is n't
unpackFixity :: Monad m => Word8 -> m Fixity
unpackFixity w8 =
case 0xC0 .&. w8 of
0x00 -> case 0x30 .&. w8 of
0x00 -> return $ Infix L n
0x10 -> return $ Infix R n
0x20 -> return $ Infix N n
_ -> fail "unpackFixity: bad associativity"
0x40 -> return $ Prefix n
0x80 -> return $ Postfix n
0xC0 -> return Idfix
_ -> fail "unpackFixity: IMPOSSIBLE"
where n = fromIntegral $ 0x0F .&. w8
# INLINE unpackFixity #
_Fixity :: Prism' Word8 Fixity
_Fixity = prism' packFixity unpackFixity
# INLINE _ Fixity #
instance Serial Fixity where
serialize f = putWord8 $ packFixity f
deserialize = do
w <- getWord8
unpackFixity w
instance Binary Fixity where
put = serialize
get = deserialize
instance Serialize Fixity where
put = serialize
get = deserialize
instance Digestable Fixity
data Global = Global
{ _globalDigest :: !ByteString
, _globalFixity :: !Fixity
, _globalModule :: !ModuleName
, _globalName :: !Text
} deriving (Data, Typeable, Generic)
instance Show Global where
showsPrec d (Global _ f m n) = showParen (d > 10) $
showString "glob " . showsPrec 11 f .
showChar ' ' . showsPrec 11 m .
showChar ' ' . showsPrec 11 n
instance Read Global where
readsPrec d = readParen (d > 10) $ \r -> do
("glob", r') <- lex r
(f, r'') <- readsPrec 11 r'
(m, r''') <- readsPrec 11 r''
(n, r'''') <- readsPrec 11 r'''
return (glob f m n, r'''')
instance Serial Global where
serialize (Global d f m n) = serialize d >> serialize f >> serialize m >> serialize n
deserialize = liftM4 Global deserialize deserialize deserialize deserialize
instance Binary Global where
put = serialize
get = deserialize
instance HasModuleName Global where
moduleName g (Global _ f m n) = g m <&> \m' -> glob f m' n
instance HasName Global where
name g (Global _ f m n) = g n <&> \n' -> glob f m n'
AsGlobal
class AsGlobal t where
_Global :: Prism' t Global
instance AsGlobal Global where
_Global = id
HasGlobal
class HasGlobal t where
global :: Lens' t Global
fixity :: Lens' t Fixity
fixity f = global $ \ (Global _ a m n) -> (\a' -> glob a' m n) <$> f a
instance HasGlobal Global where
global = id
instance Eq Global where
(==) = (==) `on` _globalDigest
instance Ord Global where
compare = compare `on` _globalDigest
instance Digestable Global where
digest c = digest c . _globalDigest
instance Hashable Global where
hashWithSalt s c = hashWithSalt s (_globalDigest c)
glob :: AsGlobal t => Fixity -> ModuleName -> Text -> t
glob f m n = _Global # Global d f m n where
d = MD5.finalize $ digest MD5.init f `digest` m `digest` n
|
12d60147d08051f91772b819027098ee03d82a38e389899a2131cc144ba0c1fa | fetburner/Coq2SML | tags.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
module Script :
sig
val table : GText.tag_table
val kwd : GText.tag
val qed : GText.tag
val decl : GText.tag
val proof_decl : GText.tag
val comment : GText.tag
val reserved : GText.tag
val error : GText.tag
val to_process : GText.tag
val processed : GText.tag
val unjustified : GText.tag
val found : GText.tag
val hidden : GText.tag
val folded : GText.tag
val paren : GText.tag
val sentence : GText.tag
end
module Proof :
sig
val table : GText.tag_table
val highlight : GText.tag
val hypothesis : GText.tag
val goal : GText.tag
end
module Message :
sig
val table : GText.tag_table
val error : GText.tag
end
val string_of_color : Gdk.color -> string
val color_of_string : string -> Gdk.color
val get_processed_color : unit -> Gdk.color
val set_processed_color : Gdk.color -> unit
val get_processing_color : unit -> Gdk.color
val set_processing_color : Gdk.color -> unit
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/ide/tags.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
module Script :
sig
val table : GText.tag_table
val kwd : GText.tag
val qed : GText.tag
val decl : GText.tag
val proof_decl : GText.tag
val comment : GText.tag
val reserved : GText.tag
val error : GText.tag
val to_process : GText.tag
val processed : GText.tag
val unjustified : GText.tag
val found : GText.tag
val hidden : GText.tag
val folded : GText.tag
val paren : GText.tag
val sentence : GText.tag
end
module Proof :
sig
val table : GText.tag_table
val highlight : GText.tag
val hypothesis : GText.tag
val goal : GText.tag
end
module Message :
sig
val table : GText.tag_table
val error : GText.tag
end
val string_of_color : Gdk.color -> string
val color_of_string : string -> Gdk.color
val get_processed_color : unit -> Gdk.color
val set_processed_color : Gdk.color -> unit
val get_processing_color : unit -> Gdk.color
val set_processing_color : Gdk.color -> unit
|
0f3addffbe4efd09b5e53a6008b328a6eefc805997e94c52a6603ecc176e6bf3 | astanin/moo | Constraints.hs | module Moo.GeneticAlgorithm.Constraints
(
ConstraintFunction
, Constraint()
, isFeasible
-- *** Simple equalities and inequalities
, (.<.), (.<=.), (.>.), (.>=.), (.==.)
-- *** Double inequalities
, LeftHandSideInequality()
, (.<), (.<=), (<.), (<=.)
-- ** Constrained initalization
, getConstrainedGenomes
, getConstrainedBinaryGenomes
-- ** Constrained selection
, withDeathPenalty
, withFinalDeathPenalty
, withConstraints
, numberOfViolations
, degreeOfViolation
) where
import Moo.GeneticAlgorithm.Types
import Moo.GeneticAlgorithm.Random
import Moo.GeneticAlgorithm.Utilities (getRandomGenomes)
import Moo.GeneticAlgorithm.Selection (withPopulationTransform, bestFirst)
type ConstraintFunction a b = Genome a -> b
-- Defining a constraint as a pair of function and its boundary value
-- (vs just a boolean valued function) allows for estimating the
-- degree of constraint violation when necessary.
-- | Define constraints using '.<.', '.<=.', '.>.', '.>=.', and '.==.'
-- operators, with a 'ConstraintFunction' on the left hand side.
--
-- For double inequality constraints use pairs of '.<', '<.' and
' .<= ' , ' < = . ' respectively , with a ' ConstraintFunction ' in the middle .
--
-- Examples:
--
-- @
function
lowerBound .<= function < = .
-- @
data Constraint a b
= LessThan (ConstraintFunction a b) b
-- ^ strict inequality constraint,
-- function value is less than the constraint value
| LessThanOrEqual (ConstraintFunction a b) b
-- ^ non-strict inequality constraint,
-- function value is less than or equal to the constraint value
| Equal (ConstraintFunction a b) b
-- ^ equality constraint,
-- function value is equal to the constraint value
| InInterval (ConstraintFunction a b) (Bool, b) (Bool, b)
-- ^ double inequality, boolean flags indicate if the
-- bound is inclusive.
(.<.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.<.) = LessThan
(.<=.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.<=.) = LessThanOrEqual
(.>.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.>.) f v = LessThan (negate . f) (negate v)
(.>=.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.>=.) f v = LessThanOrEqual (negate . f) (negate v)
(.==.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.==.) = Equal
-- Left hand side of the double inequality defined in the form:
-- @lowerBound .<= function <=. upperBound@.
data LeftHandSideInequality a b
= LeftHandSideInequality (ConstraintFunction a b) (Bool, b)
-- ^ boolean flag indicates if the bound is inclusive
(.<=) :: (Real b) => b -> ConstraintFunction a b -> LeftHandSideInequality a b
lval .<= f = LeftHandSideInequality f (True, lval)
(.<) :: (Real b) => b -> ConstraintFunction a b -> LeftHandSideInequality a b
lval .< f = LeftHandSideInequality f (False, lval)
(<.) :: (Real b) => LeftHandSideInequality a b -> b -> Constraint a b
(LeftHandSideInequality f l) <. rval = InInterval f l (False, rval)
(<=.) :: (Real b) => LeftHandSideInequality a b -> b -> Constraint a b
(LeftHandSideInequality f l) <=. rval = InInterval f l (True, rval)
| Returns @True@ if a @genome@ represents a feasible solution
-- with respect to the @constraint@.
satisfiesConstraint :: (Real b)
=> Genome a -- ^ @genome@
-> Constraint a b -- ^ @constraint@
-> Bool
satisfiesConstraint g (LessThan f v) = f g < v
satisfiesConstraint g (LessThanOrEqual f v) = f g <= v
satisfiesConstraint g (Equal f v) = f g == v
satisfiesConstraint g (InInterval f (inclusive1,v1) (inclusive2,v2)) =
let v' = f g
c1 = if inclusive1 then v1 <= v' else v1 < v'
c2 = if inclusive2 then v' <= v2 else v' < v2
in c1 && c2
| Returns @True@ if a @genome@ represents a feasible solution ,
-- i.e. satisfies all @constraints@.
isFeasible :: (GenomeState gt a, Real b)
=> [Constraint a b] -- ^ constraints
-> gt -- ^ genome
-> Bool
isFeasible constraints genome = all ((takeGenome genome) `satisfiesConstraint`) constraints
-- | Generate @n@ feasible random genomes with individual genome elements
-- bounded by @ranges@.
getConstrainedGenomes :: (Random a, Ord a, Real b)
=> [Constraint a b] -- ^ constraints
^ @n@ , how many genomes to generate
-> [(a, a)] -- ^ ranges for individual genome elements
-> Rand ([Genome a]) -- ^ random feasible genomes
getConstrainedGenomes constraints n ranges
| n <= 0 = return []
| otherwise = do
candidates <- getRandomGenomes n ranges
let feasible = filter (isFeasible constraints) candidates
let found = length feasible
more <- getConstrainedGenomes constraints (n - found) ranges
return $ feasible ++ more
-- | Generate @n@ feasible random binary genomes.
getConstrainedBinaryGenomes :: (Real b)
=> [Constraint Bool b] -- ^ constraints
^ @n@ , how many genomes to generate
-> Int -- ^ @L@, genome length
-> Rand [Genome Bool] -- ^ random feasible genomes
getConstrainedBinaryGenomes constraints n len =
getConstrainedGenomes constraints n (replicate len (False,True))
-- | A simple estimate of the degree of (in)feasibility.
--
-- Count the number of constraint violations. Return @0@ if the solution is feasible.
numberOfViolations :: (Real b)
=> [Constraint a b] -- ^ constraints
-> Genome a -- ^ genome
-> Int -- ^ the number of violated constraints
numberOfViolations constraints genome =
let satisfied = map (genome `satisfiesConstraint`) constraints
in length $ filter not satisfied
-- | An estimate of the degree of (in)feasibility.
--
Given @f_j@ is the excess of @j@-th constraint function value ,
-- return @sum |f_j|^beta@. For strict inequality constraints, return
-- @sum (|f_j|^beta + eta)@. Return @0.0@ if the solution is
-- feasible.
--
degreeOfViolation :: Double -- ^ beta, single violation exponent
-> Double -- ^ eta, equality penalty in strict inequalities
-> [Constraint a Double] -- ^ constrains
-> Genome a -- ^ genome
-> Double -- ^ total degree of violation
degreeOfViolation beta eta constraints genome =
sum $ map violation constraints
where
violation (LessThan f v) =
let v' = f genome
in if v' < v
then 0.0
else (abs $ v' - v) ** beta + eta
violation (LessThanOrEqual f v) =
let v' = f genome
in if v' <= v
then 0.0
else (abs $ v' - v) ** beta
violation (Equal f v) =
let v' = f genome
in if v' == v
then 0.0
else (abs $ v' - v) ** beta
violation (InInterval f (incleft, l) (incright, r)) =
let v' = f genome
leftok = if incleft
then l <= v'
else l < v'
rightok = if incright
then r >= v'
else r > v'
in case (leftok, rightok) of
(True, True) -> 0.0
(False, _) -> (abs $ l - v') ** beta
+ (fromIntegral . fromEnum . not $ incleft) * eta
(_, False) -> (abs $ v' - r) ** beta
+ (fromIntegral . fromEnum . not $ incright) * eta
| Modify objective function in such a way that 1 ) any feasible
solution is preferred to any infeasible solution , 2 ) among two
-- feasible solutions the one having better objective function value
is preferred , 3 ) among two infeasible solution the one having
-- smaller constraint violation is preferred.
--
Reference : Deb , K. ( 2000 ) . An efficient constraint handling method
-- for genetic algorithms. Computer methods in applied mechanics and
engineering , ) , 311 - 338 .
withConstraints :: (Real b, Real c)
=> [Constraint a b] -- ^ constraints
-> ([Constraint a b] -> Genome a -> c) -- ^ non-negative degree of violation,
-- see 'numberOfViolations' and 'degreeOfViolation'
-> ProblemType
-> SelectionOp a
-> SelectionOp a
withConstraints constraints violation ptype =
withPopulationTransform (penalizeInfeasible constraints violation ptype)
penalizeInfeasible :: (Real b, Real c)
=> [Constraint a b]
-> ([Constraint a b] -> Genome a -> c)
-> ProblemType
-> Population a
-> Population a
penalizeInfeasible constraints violation ptype phenotypes =
let worst = takeObjectiveValue . head . worstFirst ptype $ phenotypes
penalize p = let g = takeGenome p
v = fromRational . toRational . violation constraints $ g
in if (v > 0)
then (g, worst `worsen` v)
else p
in map penalize phenotypes
where
worstFirst Minimizing = bestFirst Maximizing
worstFirst Maximizing = bestFirst Minimizing
worsen x delta = if ptype == Minimizing
then x + delta
else x - delta
-- | Kill all infeasible solutions after every step of the genetic algorithm.
--
-- “Death penalty is very popular within the evolution strategies community,
-- but it is limited to problems in which the feasible search space is convex
-- and constitutes a reasonably large portion of the whole search space,” --
( Coello 1999 ) .
--
Coello , , & Carlos , A. ( 1999 ) . A survey of constraint
-- handling techniques used with evolutionary algorithms.
, Laboratorio Nacional de Informática Avanzada .
withDeathPenalty :: (Monad m, Real b)
=> [Constraint a b] -- ^ constraints
-> StepGA m a -- ^ unconstrained step
-> StepGA m a -- ^ constrained step
withDeathPenalty cs step =
\stop popstate -> do
stepresult <- step stop popstate
case stepresult of
StopGA pop -> return (StopGA (filterFeasible cs pop))
ContinueGA pop -> return (ContinueGA (filterFeasible cs pop))
-- | Kill all infeasible solutions once after the last step of the
-- genetic algorithm. See also 'withDeathPenalty'.
withFinalDeathPenalty :: (Monad m, Real b)
=> [Constraint a b] -- ^ constriants
-> StepGA m a -- ^ unconstrained step
-> StepGA m a -- ^ constrained step
withFinalDeathPenalty cs step =
\stop popstate -> do
result <- step stop popstate
case result of
(ContinueGA _) -> return result
(StopGA pop) -> return (StopGA (filterFeasible cs pop))
filterFeasible :: (Real b) => [Constraint a b] -> Population a -> Population a
filterFeasible cs = filter (isFeasible cs . takeGenome)
| null | https://raw.githubusercontent.com/astanin/moo/2e77a94a543d21360f7610c1ee0dda52813997c4/Moo/GeneticAlgorithm/Constraints.hs | haskell | *** Simple equalities and inequalities
*** Double inequalities
** Constrained initalization
** Constrained selection
Defining a constraint as a pair of function and its boundary value
(vs just a boolean valued function) allows for estimating the
degree of constraint violation when necessary.
| Define constraints using '.<.', '.<=.', '.>.', '.>=.', and '.==.'
operators, with a 'ConstraintFunction' on the left hand side.
For double inequality constraints use pairs of '.<', '<.' and
Examples:
@
@
^ strict inequality constraint,
function value is less than the constraint value
^ non-strict inequality constraint,
function value is less than or equal to the constraint value
^ equality constraint,
function value is equal to the constraint value
^ double inequality, boolean flags indicate if the
bound is inclusive.
Left hand side of the double inequality defined in the form:
@lowerBound .<= function <=. upperBound@.
^ boolean flag indicates if the bound is inclusive
with respect to the @constraint@.
^ @genome@
^ @constraint@
i.e. satisfies all @constraints@.
^ constraints
^ genome
| Generate @n@ feasible random genomes with individual genome elements
bounded by @ranges@.
^ constraints
^ ranges for individual genome elements
^ random feasible genomes
| Generate @n@ feasible random binary genomes.
^ constraints
^ @L@, genome length
^ random feasible genomes
| A simple estimate of the degree of (in)feasibility.
Count the number of constraint violations. Return @0@ if the solution is feasible.
^ constraints
^ genome
^ the number of violated constraints
| An estimate of the degree of (in)feasibility.
return @sum |f_j|^beta@. For strict inequality constraints, return
@sum (|f_j|^beta + eta)@. Return @0.0@ if the solution is
feasible.
^ beta, single violation exponent
^ eta, equality penalty in strict inequalities
^ constrains
^ genome
^ total degree of violation
feasible solutions the one having better objective function value
smaller constraint violation is preferred.
for genetic algorithms. Computer methods in applied mechanics and
^ constraints
^ non-negative degree of violation,
see 'numberOfViolations' and 'degreeOfViolation'
| Kill all infeasible solutions after every step of the genetic algorithm.
“Death penalty is very popular within the evolution strategies community,
but it is limited to problems in which the feasible search space is convex
and constitutes a reasonably large portion of the whole search space,” --
handling techniques used with evolutionary algorithms.
^ constraints
^ unconstrained step
^ constrained step
| Kill all infeasible solutions once after the last step of the
genetic algorithm. See also 'withDeathPenalty'.
^ constriants
^ unconstrained step
^ constrained step | module Moo.GeneticAlgorithm.Constraints
(
ConstraintFunction
, Constraint()
, isFeasible
, (.<.), (.<=.), (.>.), (.>=.), (.==.)
, LeftHandSideInequality()
, (.<), (.<=), (<.), (<=.)
, getConstrainedGenomes
, getConstrainedBinaryGenomes
, withDeathPenalty
, withFinalDeathPenalty
, withConstraints
, numberOfViolations
, degreeOfViolation
) where
import Moo.GeneticAlgorithm.Types
import Moo.GeneticAlgorithm.Random
import Moo.GeneticAlgorithm.Utilities (getRandomGenomes)
import Moo.GeneticAlgorithm.Selection (withPopulationTransform, bestFirst)
type ConstraintFunction a b = Genome a -> b
' .<= ' , ' < = . ' respectively , with a ' ConstraintFunction ' in the middle .
function
lowerBound .<= function < = .
data Constraint a b
= LessThan (ConstraintFunction a b) b
| LessThanOrEqual (ConstraintFunction a b) b
| Equal (ConstraintFunction a b) b
| InInterval (ConstraintFunction a b) (Bool, b) (Bool, b)
(.<.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.<.) = LessThan
(.<=.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.<=.) = LessThanOrEqual
(.>.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.>.) f v = LessThan (negate . f) (negate v)
(.>=.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.>=.) f v = LessThanOrEqual (negate . f) (negate v)
(.==.) :: (Real b) => ConstraintFunction a b -> b -> Constraint a b
(.==.) = Equal
data LeftHandSideInequality a b
= LeftHandSideInequality (ConstraintFunction a b) (Bool, b)
(.<=) :: (Real b) => b -> ConstraintFunction a b -> LeftHandSideInequality a b
lval .<= f = LeftHandSideInequality f (True, lval)
(.<) :: (Real b) => b -> ConstraintFunction a b -> LeftHandSideInequality a b
lval .< f = LeftHandSideInequality f (False, lval)
(<.) :: (Real b) => LeftHandSideInequality a b -> b -> Constraint a b
(LeftHandSideInequality f l) <. rval = InInterval f l (False, rval)
(<=.) :: (Real b) => LeftHandSideInequality a b -> b -> Constraint a b
(LeftHandSideInequality f l) <=. rval = InInterval f l (True, rval)
| Returns @True@ if a @genome@ represents a feasible solution
satisfiesConstraint :: (Real b)
-> Bool
satisfiesConstraint g (LessThan f v) = f g < v
satisfiesConstraint g (LessThanOrEqual f v) = f g <= v
satisfiesConstraint g (Equal f v) = f g == v
satisfiesConstraint g (InInterval f (inclusive1,v1) (inclusive2,v2)) =
let v' = f g
c1 = if inclusive1 then v1 <= v' else v1 < v'
c2 = if inclusive2 then v' <= v2 else v' < v2
in c1 && c2
| Returns @True@ if a @genome@ represents a feasible solution ,
isFeasible :: (GenomeState gt a, Real b)
-> Bool
isFeasible constraints genome = all ((takeGenome genome) `satisfiesConstraint`) constraints
getConstrainedGenomes :: (Random a, Ord a, Real b)
^ @n@ , how many genomes to generate
getConstrainedGenomes constraints n ranges
| n <= 0 = return []
| otherwise = do
candidates <- getRandomGenomes n ranges
let feasible = filter (isFeasible constraints) candidates
let found = length feasible
more <- getConstrainedGenomes constraints (n - found) ranges
return $ feasible ++ more
getConstrainedBinaryGenomes :: (Real b)
^ @n@ , how many genomes to generate
getConstrainedBinaryGenomes constraints n len =
getConstrainedGenomes constraints n (replicate len (False,True))
numberOfViolations :: (Real b)
numberOfViolations constraints genome =
let satisfied = map (genome `satisfiesConstraint`) constraints
in length $ filter not satisfied
Given @f_j@ is the excess of @j@-th constraint function value ,
degreeOfViolation beta eta constraints genome =
sum $ map violation constraints
where
violation (LessThan f v) =
let v' = f genome
in if v' < v
then 0.0
else (abs $ v' - v) ** beta + eta
violation (LessThanOrEqual f v) =
let v' = f genome
in if v' <= v
then 0.0
else (abs $ v' - v) ** beta
violation (Equal f v) =
let v' = f genome
in if v' == v
then 0.0
else (abs $ v' - v) ** beta
violation (InInterval f (incleft, l) (incright, r)) =
let v' = f genome
leftok = if incleft
then l <= v'
else l < v'
rightok = if incright
then r >= v'
else r > v'
in case (leftok, rightok) of
(True, True) -> 0.0
(False, _) -> (abs $ l - v') ** beta
+ (fromIntegral . fromEnum . not $ incleft) * eta
(_, False) -> (abs $ v' - r) ** beta
+ (fromIntegral . fromEnum . not $ incright) * eta
| Modify objective function in such a way that 1 ) any feasible
solution is preferred to any infeasible solution , 2 ) among two
is preferred , 3 ) among two infeasible solution the one having
Reference : Deb , K. ( 2000 ) . An efficient constraint handling method
engineering , ) , 311 - 338 .
withConstraints :: (Real b, Real c)
-> ProblemType
-> SelectionOp a
-> SelectionOp a
withConstraints constraints violation ptype =
withPopulationTransform (penalizeInfeasible constraints violation ptype)
penalizeInfeasible :: (Real b, Real c)
=> [Constraint a b]
-> ([Constraint a b] -> Genome a -> c)
-> ProblemType
-> Population a
-> Population a
penalizeInfeasible constraints violation ptype phenotypes =
let worst = takeObjectiveValue . head . worstFirst ptype $ phenotypes
penalize p = let g = takeGenome p
v = fromRational . toRational . violation constraints $ g
in if (v > 0)
then (g, worst `worsen` v)
else p
in map penalize phenotypes
where
worstFirst Minimizing = bestFirst Maximizing
worstFirst Maximizing = bestFirst Minimizing
worsen x delta = if ptype == Minimizing
then x + delta
else x - delta
( Coello 1999 ) .
Coello , , & Carlos , A. ( 1999 ) . A survey of constraint
, Laboratorio Nacional de Informática Avanzada .
withDeathPenalty :: (Monad m, Real b)
withDeathPenalty cs step =
\stop popstate -> do
stepresult <- step stop popstate
case stepresult of
StopGA pop -> return (StopGA (filterFeasible cs pop))
ContinueGA pop -> return (ContinueGA (filterFeasible cs pop))
withFinalDeathPenalty :: (Monad m, Real b)
withFinalDeathPenalty cs step =
\stop popstate -> do
result <- step stop popstate
case result of
(ContinueGA _) -> return result
(StopGA pop) -> return (StopGA (filterFeasible cs pop))
filterFeasible :: (Real b) => [Constraint a b] -> Population a -> Population a
filterFeasible cs = filter (isFeasible cs . takeGenome)
|
29d187aab79878ab291fdd5fc98149936bcaeabdfa3157af4801874eab7dd20d | srdqty/talc-3.0 | dasm.ml | (**********************************************************************)
( c ) , , ,
August 1999 , all rights reserved .
(**********************************************************************)
(* dasm.ml
*
* Disassembles an object file and associated .to file to produce a
* TAL implementation.
*)
let pp i =
print_string ("Got here " ^ (string_of_int i) ^"\n");
flush stdout;
()
;;
open Tal;;
open Talbin;;
open Talbinin;;
open Numtypes;;
open Disobjfile;;
open Utilities;;
let debug f =
let fmt = Format.std_formatter in
let o = Talpp.std_options in
Format.pp_open_hvbox fmt 0;
f fmt o;
Format.pp_print_newline fmt ();
Format.pp_print_flush fmt ();
()
;;
module type dasm = sig
val disassemble : bool -> string -> string -> Tal.tal_pre_mod
end
module DasmFunc (X: sig
type in_channel
val input_char : in_channel -> char
val input_byte : in_channel -> int
val pos_in : in_channel -> int
val seek_in : in_channel -> int -> unit
val open_in_bin : string -> in_channel
val close_in : in_channel -> unit
val really_input : in_channel -> string -> int -> int -> unit
end ) : dasm = struct
open X
module TalbininX = Talbinin.TalbininFunc(X)
module Discoff = Discoff.DiscoffFunc(X)
module Diself = Diself.DiselfFunc(X)
type binformat = COFF | ELF;;
let objformat =
match Sys.os_type with
"Unix" -> ELF
| "Win32" -> COFF
| _ -> Printf.eprintf "Unknown operating system\n"; exit 1
let fail objf s =
let (sect,addr) = objf.obj_get_pos() in
Printf.eprintf "dasm (sect=%d, addr=0x%x): %s\n"
sect addr s;
raise Gcdfec.Exit;
;;
let get_code_blocks objf code_annots init_labs =
(* the annotated code blocks -- initialized with the types only *)
let code_blocks =
let bogus_id = Identifier.id_new "\000bogus" in
Array.init (Array.length code_annots)
(fun i -> (bogus_id,fst(code_annots.(i)), Array.of_list [])) in
let merge_inst_annots = merge_inst_annots objf in
(* get the code blocks and merge with the annotations *)
(* gets a list of intructions and the next list of labels *)
let rec get_rest_instrs () =
match objf.obj_get_labels 0 with
[] -> (try get_instrs () with End_of_Section -> ([],[]))
| labs -> ([],List.rev(labs))
and get_instrs () =
let instr = Disasmx86.get_instr objf in
let (instrs,labs) = get_rest_instrs() in
(instr::instrs,labs)
in
let labs = ref init_labs in
let rec merge_inst_annots' annots raw_insts =
try
merge_inst_annots annots raw_insts
with Talbin.Found_Hole (annots,insts) ->
let (raw_instrs,next_labs) = get_instrs() in
(match !labs with
XXX - we should check that the hole label matches that in the
first annotation in .
first annotation in annots. *)
[holeLab] -> ()
| [] -> fail objf "No hole label here."
| _ ->
Printf.eprintf "Labels = ";
List.map (fun x -> Printf.eprintf "%s " (Identifier.id_to_string x)) !labs;
Printf.eprintf "\n";
flush stderr;
fail objf "Too many hole labels here.");
labs := next_labs;
(insts @ (merge_inst_annots' annots raw_instrs))
in
for i = 0 to (Array.length code_annots) - 1 do
let do_code_block lab raw_instrs =
let (_,copt,_) = code_blocks.(i) in
let instrs = merge_inst_annots' (snd code_annots.(i)) raw_instrs in
code_blocks.(i) <- (lab,copt,Array.of_list instrs);
in
begin match !labs with
| [] -> fail objf "More annotations than code blocks!"
| [lab] ->
one label -- should be followed by instructions which we suck
* in , merge with the annotations , and then bang into the code_blocks
* array . As a side - effect of reading the instructions , we get the
* next list of labels and loop around . Note that the labels are
* sucked in in what appears to be reverse order from the original
* TAL file .
* in, merge with the annotations, and then bang into the code_blocks
* array. As a side-effect of reading the instructions, we get the
* next list of labels and loop around. Note that the labels are
* sucked in in what appears to be reverse order from the original
* TAL file. *)
let (raw_instrs,next_labs) = get_instrs() in
labs := next_labs;
do_code_block lab raw_instrs;
()
| lab::rest ->
we have more than one label -- so the instructions are empty .
* However , the annotations may not be because we may have virtual
* instructions , so we have to merge them and then loop around with
* the rest of the labels .
* However, the annotations may not be because we may have virtual
* instructions, so we have to merge them and then loop around with
* the rest of the labels. *)
labs := rest;
do_code_block lab [];
()
end;
(* Format.open_vbox 0;
Talpp.print_code_block
Format.std_formatter Talpp.std_options code_blocks.(i);
Format.close_box ();
Format.print_flush ();
flush stdout;
*)
done;
(code_blocks, !labs)
;;
let get_code_section code_annots objf =
(* seek the code in the object file *)
objf.obj_seek_code();
let init_labs =
(match List.rev (objf.obj_get_labels 0) with
[] -> fail objf "no initial text labels!"
| t1::rest ->
if (Identifier.id_to_string t1) = ".text" then rest
else fail objf "expecting .text label!") in
let (code_blocks,labs) = get_code_blocks objf code_annots init_labs in
match labs with
| [] -> code_blocks
| _ -> fail objf "more labels, but no annotations."
;;
let get_templates template_annots objf =
try
objf.obj_seek_cyclone ();
let init_labs =
(match List.rev (objf.obj_get_labels 0) with
[] -> fail objf "no initial text labels!"
| t1::rest ->
if (Identifier.id_to_string t1) = ".cyc" then rest
else fail objf "expecting .cyc label!") in
let templates =
let bogus_id = Identifier.id_new "\000bogus" in
Array.init (Array.length template_annots)
(fun i -> (bogus_id,fst(template_annots.(i)),
[])) in
let labs = ref init_labs in
for i = 0 to (Array.length template_annots) - 1 do
match !labs with
[] -> fail objf "too few labels, no template start."
| [templ_start] ->
(* Printf.printf "templ_start = %s\n" (Identifier.id_to_string templ_start);
flush stdout; *)
let (con,annots) = template_annots.(i) in
We output one word here when assembling to store the template
length . Not sure why ? Still when reading this in we should skip
4 bytes here .
length. Not sure why? Still when reading this in we should skip
4 bytes here. *)
objf.Disobjfile.obj_get_byte ();
objf.Disobjfile.obj_get_byte ();
objf.Disobjfile.obj_get_byte ();
objf.Disobjfile.obj_get_byte ();
let init_labs = List.rev (objf.obj_get_labels 0) in
let (cb,rest) = get_code_blocks objf annots init_labs in
templates.(i) <- (templ_start,con,Array.to_list cb);
labs := rest
| _ -> fail objf "too many labels, at template start."
done;
(match !labs with
[] -> templates
| _ ->
List.iter (fun x -> Printf.printf " %s\n" (Identifier.id_to_string x)) !labs;
flush stdout;
fail objf "extra labels in Cyclone section")
with Disobjfile.No_Cyclone_Section -> (Array.of_list [])
;;
let get_data_blocks data_annots objf =
let get_data a = (* Given a data annotation read in the data. *)
match a with
An_dlabel c ->
(match objf.obj_get_reloc () with
None -> fail objf "expecting label as data!"
| Some lab ->
let b0 = objf.obj_get_byte() in
let b1 = objf.obj_get_byte() in
let b2 = objf.obj_get_byte() in
let b3 = objf.obj_get_byte() in
let v = (b3 lsl 24) lor (b2 lsl 16) lor (b1 lsl 8) lor b0 in
if v <> 0 then fail objf "relocation adjustment";
Dlabel(lab,c))
| An_dbytes (len) ->
if len < 0 then fail objf "An_dbytes: size is less than 0"
else
let s = String.create len in
for i = 0 to len - 1 do
String.set s i (Char.chr (objf.obj_get_byte()))
done; Dbytes (s)
| An_d2bytes ->
let lo = objf.obj_get_byte() in
let hi = objf.obj_get_byte() in
let v = int_to_int32((hi lsl 8) lor lo) in
D2bytes(int32_to_int16 v)
| An_d4bytes c ->
let b0 = objf.obj_get_byte() in
let b1 = objf.obj_get_byte() in
let b2 = objf.obj_get_byte() in
let b3 = objf.obj_get_byte() in
let v = (b3 lsl 24) lor (b2 lsl 16) lor (b1 lsl 8) lor b0 in
D4bytes(int_to_int32 v,c)
| An_dfloat32 ->
let b = String.create 4 in
for i=0 to 3 do
b.[i] <- char_of_int (objf.obj_get_byte ());
done;
Dfloat32 (bytes_to_f32 b)
| An_dfloat64 ->
let b = String.create 8 in
for i = 0 to 7 do
b.[i] <- char_of_int (objf.obj_get_byte ());
done;
Dfloat64 (bytes_to_f64 b)
| An_djunk ->
let b0 = objf.obj_get_byte() in
let b1 = objf.obj_get_byte() in
let b2 = objf.obj_get_byte() in
let b3 = objf.obj_get_byte() in
let v = (b3 lsl 24) lor (b2 lsl 16) lor (b1 lsl 8) lor b0 in
if v <> 0 then fail objf "data supposed to be junk is non-zero";
Djunk
| An_drep ri ->
First four bytes describe the size
let b0 = objf.obj_get_byte() in
let b1 = objf.obj_get_byte() in
let b2 = objf.obj_get_byte() in
let b3 = objf.obj_get_byte() in
let len = (b3 lsl 24) lor (b2 lsl 16) lor (b1 lsl 8) lor b0 in
let str = String.create len in
for i = 0 to len - 1 do
String.set str i (Char.chr (objf.obj_get_byte()))
done;
let strchan = Stringchan.from_string str in
Drep
((match ri with
An_con -> RCon (Talbinin.Str.read_in_con strchan)
| An_kind -> RKind (Talbinin.Str.read_in_kind strchan)
| An_label -> RLabel (Talbinin.Str.read_in_label strchan)),
ref (Some (str)))
| An_dup -> Dup
| An_ddown -> Ddown in
let data_blocks =
Array.make (Array.length data_annots)
(Identifier.id_new "\000bogusd",i32_4,None,([],[])) in
let set_data_block i lab =
let (align,copt,(annots,cs)) = data_annots.(i) in
data_blocks.(i) <- (lab,align,copt,(List.map get_data annots,cs)) in
let rec set_data_blocks i labs =
match labs with
[] ->
if i <> (Array.length data_blocks) then
fail objf "Too few labels in data section."
else ()
| [lab] ->
set_data_block i lab;
if (i+1) < Array.length data_annots then
(let (align,_,_) = data_annots.(i+1) in
objf.obj_align (int32_to_int align);
set_data_blocks (i+1) (List.rev (objf.obj_get_labels 0)))
| lab :: rest ->
set_data_block i lab;
set_data_blocks (i+1) rest
in
objf.obj_seek_data();
let init_labs =
(match List.rev (objf.obj_get_labels 0) with
[] -> fail objf "no initial data labels!"
| t1::rest ->
if (Identifier.id_to_string t1) = ".data" then rest
else fail objf "expecting .data label!") in
Printf.printf " initial data labels:\n " ;
List.iter ( fun x - > Printf.printf " % s\n " ( ) )
init_labs ;
Printf.printf " --------------------\n " ;
List.iter (fun x -> Printf.printf "%s\n" (Identifier.id_to_string x))
init_labs;
Printf.printf "--------------------\n"; *)
set_data_blocks 0 init_labs;
data_blocks
;;
let disassemble disassem_internals tofile objfile =
let tofile_ch = open_in_bin tofile in
Gcdfec.reset_fe "dasm";
(* read the .to file *)
let tal_info = TalbininX.read_tal_info disassem_internals tofile_ch in
let code_annots = tal_info.ti_code_annots in
let data_annots = tal_info.ti_data_annots in
let template_annots = tal_info.ti_template_annots in
close_in tofile_ch;
if disassem_internals then
begin
(* open the object file and suck in headers and so forth *)
let objf =
match objformat with
COFF -> Discoff.coff_objfile objfile
| ELF -> Diself.elf_objfile objfile
in
(* get the code blocks *)
let code_blocks = get_code_section code_annots objf in
(* get the data blocks *)
let data_blocks = get_data_blocks data_annots objf in
(* get the template blocks *)
let templates = get_templates template_annots objf in
objf.obj_close();
(* let impl = *)
{ import_refs = tal_info.ti_imports;
export_refs = tal_info.ti_exports;
LX
imp_abbrevs = tal_info.ti_abbrevs;
con_blocks = tal_info.ti_con_blocks;
code_blocks = code_blocks;
data_blocks = data_blocks;
templates = templates; };
}
(* in
Talout.write_pre_mod "foo" "temp" impl;
impl *)
end
else
{ import_refs = tal_info.ti_imports;
export_refs = tal_info.ti_exports;
pre_imp = { imp_abbrevs = [||];
LX
con_blocks = [||];
code_blocks = [||];
data_blocks = [||];
templates = [||] } ;
}
;;
DasmFunc
module Chan = DasmFunc(Pervasives)
module Str = DasmFunc (Stringchan)
| null | https://raw.githubusercontent.com/srdqty/talc-3.0/df83dd5ff0e2b189b13280ddae233d8277199350/talx86/dasm.ml | ocaml | ********************************************************************
********************************************************************
dasm.ml
*
* Disassembles an object file and associated .to file to produce a
* TAL implementation.
the annotated code blocks -- initialized with the types only
get the code blocks and merge with the annotations
gets a list of intructions and the next list of labels
Format.open_vbox 0;
Talpp.print_code_block
Format.std_formatter Talpp.std_options code_blocks.(i);
Format.close_box ();
Format.print_flush ();
flush stdout;
seek the code in the object file
Printf.printf "templ_start = %s\n" (Identifier.id_to_string templ_start);
flush stdout;
Given a data annotation read in the data.
read the .to file
open the object file and suck in headers and so forth
get the code blocks
get the data blocks
get the template blocks
let impl =
in
Talout.write_pre_mod "foo" "temp" impl;
impl | ( c ) , , ,
August 1999 , all rights reserved .
let pp i =
print_string ("Got here " ^ (string_of_int i) ^"\n");
flush stdout;
()
;;
open Tal;;
open Talbin;;
open Talbinin;;
open Numtypes;;
open Disobjfile;;
open Utilities;;
let debug f =
let fmt = Format.std_formatter in
let o = Talpp.std_options in
Format.pp_open_hvbox fmt 0;
f fmt o;
Format.pp_print_newline fmt ();
Format.pp_print_flush fmt ();
()
;;
module type dasm = sig
val disassemble : bool -> string -> string -> Tal.tal_pre_mod
end
module DasmFunc (X: sig
type in_channel
val input_char : in_channel -> char
val input_byte : in_channel -> int
val pos_in : in_channel -> int
val seek_in : in_channel -> int -> unit
val open_in_bin : string -> in_channel
val close_in : in_channel -> unit
val really_input : in_channel -> string -> int -> int -> unit
end ) : dasm = struct
open X
module TalbininX = Talbinin.TalbininFunc(X)
module Discoff = Discoff.DiscoffFunc(X)
module Diself = Diself.DiselfFunc(X)
type binformat = COFF | ELF;;
let objformat =
match Sys.os_type with
"Unix" -> ELF
| "Win32" -> COFF
| _ -> Printf.eprintf "Unknown operating system\n"; exit 1
let fail objf s =
let (sect,addr) = objf.obj_get_pos() in
Printf.eprintf "dasm (sect=%d, addr=0x%x): %s\n"
sect addr s;
raise Gcdfec.Exit;
;;
let get_code_blocks objf code_annots init_labs =
let code_blocks =
let bogus_id = Identifier.id_new "\000bogus" in
Array.init (Array.length code_annots)
(fun i -> (bogus_id,fst(code_annots.(i)), Array.of_list [])) in
let merge_inst_annots = merge_inst_annots objf in
let rec get_rest_instrs () =
match objf.obj_get_labels 0 with
[] -> (try get_instrs () with End_of_Section -> ([],[]))
| labs -> ([],List.rev(labs))
and get_instrs () =
let instr = Disasmx86.get_instr objf in
let (instrs,labs) = get_rest_instrs() in
(instr::instrs,labs)
in
let labs = ref init_labs in
let rec merge_inst_annots' annots raw_insts =
try
merge_inst_annots annots raw_insts
with Talbin.Found_Hole (annots,insts) ->
let (raw_instrs,next_labs) = get_instrs() in
(match !labs with
XXX - we should check that the hole label matches that in the
first annotation in .
first annotation in annots. *)
[holeLab] -> ()
| [] -> fail objf "No hole label here."
| _ ->
Printf.eprintf "Labels = ";
List.map (fun x -> Printf.eprintf "%s " (Identifier.id_to_string x)) !labs;
Printf.eprintf "\n";
flush stderr;
fail objf "Too many hole labels here.");
labs := next_labs;
(insts @ (merge_inst_annots' annots raw_instrs))
in
for i = 0 to (Array.length code_annots) - 1 do
let do_code_block lab raw_instrs =
let (_,copt,_) = code_blocks.(i) in
let instrs = merge_inst_annots' (snd code_annots.(i)) raw_instrs in
code_blocks.(i) <- (lab,copt,Array.of_list instrs);
in
begin match !labs with
| [] -> fail objf "More annotations than code blocks!"
| [lab] ->
one label -- should be followed by instructions which we suck
* in , merge with the annotations , and then bang into the code_blocks
* array . As a side - effect of reading the instructions , we get the
* next list of labels and loop around . Note that the labels are
* sucked in in what appears to be reverse order from the original
* TAL file .
* in, merge with the annotations, and then bang into the code_blocks
* array. As a side-effect of reading the instructions, we get the
* next list of labels and loop around. Note that the labels are
* sucked in in what appears to be reverse order from the original
* TAL file. *)
let (raw_instrs,next_labs) = get_instrs() in
labs := next_labs;
do_code_block lab raw_instrs;
()
| lab::rest ->
we have more than one label -- so the instructions are empty .
* However , the annotations may not be because we may have virtual
* instructions , so we have to merge them and then loop around with
* the rest of the labels .
* However, the annotations may not be because we may have virtual
* instructions, so we have to merge them and then loop around with
* the rest of the labels. *)
labs := rest;
do_code_block lab [];
()
end;
done;
(code_blocks, !labs)
;;
let get_code_section code_annots objf =
objf.obj_seek_code();
let init_labs =
(match List.rev (objf.obj_get_labels 0) with
[] -> fail objf "no initial text labels!"
| t1::rest ->
if (Identifier.id_to_string t1) = ".text" then rest
else fail objf "expecting .text label!") in
let (code_blocks,labs) = get_code_blocks objf code_annots init_labs in
match labs with
| [] -> code_blocks
| _ -> fail objf "more labels, but no annotations."
;;
let get_templates template_annots objf =
try
objf.obj_seek_cyclone ();
let init_labs =
(match List.rev (objf.obj_get_labels 0) with
[] -> fail objf "no initial text labels!"
| t1::rest ->
if (Identifier.id_to_string t1) = ".cyc" then rest
else fail objf "expecting .cyc label!") in
let templates =
let bogus_id = Identifier.id_new "\000bogus" in
Array.init (Array.length template_annots)
(fun i -> (bogus_id,fst(template_annots.(i)),
[])) in
let labs = ref init_labs in
for i = 0 to (Array.length template_annots) - 1 do
match !labs with
[] -> fail objf "too few labels, no template start."
| [templ_start] ->
let (con,annots) = template_annots.(i) in
We output one word here when assembling to store the template
length . Not sure why ? Still when reading this in we should skip
4 bytes here .
length. Not sure why? Still when reading this in we should skip
4 bytes here. *)
objf.Disobjfile.obj_get_byte ();
objf.Disobjfile.obj_get_byte ();
objf.Disobjfile.obj_get_byte ();
objf.Disobjfile.obj_get_byte ();
let init_labs = List.rev (objf.obj_get_labels 0) in
let (cb,rest) = get_code_blocks objf annots init_labs in
templates.(i) <- (templ_start,con,Array.to_list cb);
labs := rest
| _ -> fail objf "too many labels, at template start."
done;
(match !labs with
[] -> templates
| _ ->
List.iter (fun x -> Printf.printf " %s\n" (Identifier.id_to_string x)) !labs;
flush stdout;
fail objf "extra labels in Cyclone section")
with Disobjfile.No_Cyclone_Section -> (Array.of_list [])
;;
let get_data_blocks data_annots objf =
match a with
An_dlabel c ->
(match objf.obj_get_reloc () with
None -> fail objf "expecting label as data!"
| Some lab ->
let b0 = objf.obj_get_byte() in
let b1 = objf.obj_get_byte() in
let b2 = objf.obj_get_byte() in
let b3 = objf.obj_get_byte() in
let v = (b3 lsl 24) lor (b2 lsl 16) lor (b1 lsl 8) lor b0 in
if v <> 0 then fail objf "relocation adjustment";
Dlabel(lab,c))
| An_dbytes (len) ->
if len < 0 then fail objf "An_dbytes: size is less than 0"
else
let s = String.create len in
for i = 0 to len - 1 do
String.set s i (Char.chr (objf.obj_get_byte()))
done; Dbytes (s)
| An_d2bytes ->
let lo = objf.obj_get_byte() in
let hi = objf.obj_get_byte() in
let v = int_to_int32((hi lsl 8) lor lo) in
D2bytes(int32_to_int16 v)
| An_d4bytes c ->
let b0 = objf.obj_get_byte() in
let b1 = objf.obj_get_byte() in
let b2 = objf.obj_get_byte() in
let b3 = objf.obj_get_byte() in
let v = (b3 lsl 24) lor (b2 lsl 16) lor (b1 lsl 8) lor b0 in
D4bytes(int_to_int32 v,c)
| An_dfloat32 ->
let b = String.create 4 in
for i=0 to 3 do
b.[i] <- char_of_int (objf.obj_get_byte ());
done;
Dfloat32 (bytes_to_f32 b)
| An_dfloat64 ->
let b = String.create 8 in
for i = 0 to 7 do
b.[i] <- char_of_int (objf.obj_get_byte ());
done;
Dfloat64 (bytes_to_f64 b)
| An_djunk ->
let b0 = objf.obj_get_byte() in
let b1 = objf.obj_get_byte() in
let b2 = objf.obj_get_byte() in
let b3 = objf.obj_get_byte() in
let v = (b3 lsl 24) lor (b2 lsl 16) lor (b1 lsl 8) lor b0 in
if v <> 0 then fail objf "data supposed to be junk is non-zero";
Djunk
| An_drep ri ->
First four bytes describe the size
let b0 = objf.obj_get_byte() in
let b1 = objf.obj_get_byte() in
let b2 = objf.obj_get_byte() in
let b3 = objf.obj_get_byte() in
let len = (b3 lsl 24) lor (b2 lsl 16) lor (b1 lsl 8) lor b0 in
let str = String.create len in
for i = 0 to len - 1 do
String.set str i (Char.chr (objf.obj_get_byte()))
done;
let strchan = Stringchan.from_string str in
Drep
((match ri with
An_con -> RCon (Talbinin.Str.read_in_con strchan)
| An_kind -> RKind (Talbinin.Str.read_in_kind strchan)
| An_label -> RLabel (Talbinin.Str.read_in_label strchan)),
ref (Some (str)))
| An_dup -> Dup
| An_ddown -> Ddown in
let data_blocks =
Array.make (Array.length data_annots)
(Identifier.id_new "\000bogusd",i32_4,None,([],[])) in
let set_data_block i lab =
let (align,copt,(annots,cs)) = data_annots.(i) in
data_blocks.(i) <- (lab,align,copt,(List.map get_data annots,cs)) in
let rec set_data_blocks i labs =
match labs with
[] ->
if i <> (Array.length data_blocks) then
fail objf "Too few labels in data section."
else ()
| [lab] ->
set_data_block i lab;
if (i+1) < Array.length data_annots then
(let (align,_,_) = data_annots.(i+1) in
objf.obj_align (int32_to_int align);
set_data_blocks (i+1) (List.rev (objf.obj_get_labels 0)))
| lab :: rest ->
set_data_block i lab;
set_data_blocks (i+1) rest
in
objf.obj_seek_data();
let init_labs =
(match List.rev (objf.obj_get_labels 0) with
[] -> fail objf "no initial data labels!"
| t1::rest ->
if (Identifier.id_to_string t1) = ".data" then rest
else fail objf "expecting .data label!") in
Printf.printf " initial data labels:\n " ;
List.iter ( fun x - > Printf.printf " % s\n " ( ) )
init_labs ;
Printf.printf " --------------------\n " ;
List.iter (fun x -> Printf.printf "%s\n" (Identifier.id_to_string x))
init_labs;
Printf.printf "--------------------\n"; *)
set_data_blocks 0 init_labs;
data_blocks
;;
let disassemble disassem_internals tofile objfile =
let tofile_ch = open_in_bin tofile in
Gcdfec.reset_fe "dasm";
let tal_info = TalbininX.read_tal_info disassem_internals tofile_ch in
let code_annots = tal_info.ti_code_annots in
let data_annots = tal_info.ti_data_annots in
let template_annots = tal_info.ti_template_annots in
close_in tofile_ch;
if disassem_internals then
begin
let objf =
match objformat with
COFF -> Discoff.coff_objfile objfile
| ELF -> Diself.elf_objfile objfile
in
let code_blocks = get_code_section code_annots objf in
let data_blocks = get_data_blocks data_annots objf in
let templates = get_templates template_annots objf in
objf.obj_close();
{ import_refs = tal_info.ti_imports;
export_refs = tal_info.ti_exports;
LX
imp_abbrevs = tal_info.ti_abbrevs;
con_blocks = tal_info.ti_con_blocks;
code_blocks = code_blocks;
data_blocks = data_blocks;
templates = templates; };
}
end
else
{ import_refs = tal_info.ti_imports;
export_refs = tal_info.ti_exports;
pre_imp = { imp_abbrevs = [||];
LX
con_blocks = [||];
code_blocks = [||];
data_blocks = [||];
templates = [||] } ;
}
;;
DasmFunc
module Chan = DasmFunc(Pervasives)
module Str = DasmFunc (Stringchan)
|
ca5abdfb356347d5840e3418c7e61418eab59b4d7590c2592c1cb895501bce69 | facebookincubator/hsthrift | Mangle.hs | Copyright ( c ) Facebook , Inc. and its affiliates .
# LANGUAGE ViewPatterns #
module Mangle
( mangle
) where
import Control.Arrow
import Control.Applicative hiding (Const)
import Control.Monad
import Data.Functor.Identity
import Data.Function
import Data.List
import Data.Maybe
import Data.Set (Set)
import Foreign.C
import Numeric
import Text.Parsec hiding ((<|>), many)
import qualified Data.Set as Set
-- | Mangles a C++ signature into a string.
mangle :: String -> Either ParseError String
mangle = fmap show . runParser sig (Uniq 0) ""
foreign export ccall itaniumMangle :: CString -> CSize -> IO CString
itaniumMangle :: CString -> CSize -> IO CString
itaniumMangle csymbol clen = do
symbol <- peekCStringLen (csymbol, fromIntegral clen)
either (\e -> fail $ "itaniumMangle failed: " ++ symbol ++ "\n" ++ show e)
newCString
(mangle symbol)
--------------------------------------------------------------------------------
-- Signatures
--
-- A signature consists of a name, a parameter list, and a set of
-- cv-qualifiers. The return type is parsed, but is not included in the mangled
-- name, so we don't store it.
data Sig = Sig Name [Type] (Set CV)
instance Show Sig where
show (normalizeSig -> s@(Sig sigName params cv)) = concat
$ "_Z" -- All mangled symbols start this way.
: showCvName cv sigName
: if null params
then [showType s (Builtin Void)]
else map (showType s) params
where
cv - qualifiers are n't allowed on non - nested names in C++ .
showCvName _ (Unqual n _) = lengthPrefix n
showCvName cvs (Qual names name _)
= lengthEncode (showCvs cvs) "" (names ++ [name])
sig :: Parser Sig
sig = do
spaces
_ <- type_ -- Return type, ignored.
id_ <- nestedId
params_ <- list type_
cvs <- opt cvQuals
return $ Sig id_ params_ cvs
normalizeSig :: Sig -> Sig
normalizeSig (Sig name params cv) =
Sig name (map normalizeParameterType params) cv
--------------------------------------------------------------------------------
-- Types
data Type
= Builtin Builtin
| Named Name (Maybe [Type]) Uniq
| Ptr Type Uniq
| Ref Type Uniq
| CV (Set CV) Type Uniq
deriving (Show)
instance Eq Type where
Builtin a == Builtin b = a == b
Named a b _ == Named c d _ = (a, b) == (c, d)
Ptr a _ == Ptr b _ = a == b
Ref a _ == Ref b _ = a == b
CV a b _ == CV c d _ = (a, b) == (c, d)
_ == _ = False
showType :: Sig -> Type -> String
showType s t = case t of
Builtin b -> show b
Named name args _ -> (showName name args' `fromMaybe` findName name args)
where args' = maybe "" (("I" ++) . (++ "E") . concatMap recur) args
Ptr t' u -> fromMaybe ("P" ++ recur t') (findType t u)
Ref t' u -> fromMaybe ("R" ++ recur t') (findType t u)
CV cvs t' u -> fromMaybe (showCvs cvs ++ recur t') (findType t u)
where
recur = showType s
findType t' u = search byType id
where
byType match (TypeSub t'')
= t' == t'' && maybe False (match u) (typeUniq t'')
byType _ _ = False
findName (Qual names name u) args = byName <|> byQual
where
byName = search byName' id
byQual = search byQual'
$ \i -> concat ["N", i, lengthPrefix name, "E"]
byName' match (NameSub names' args' u')
= and [names ++ [name] == names', match u u', args == args']
byName' _ _ = False
byQual' match (QualSub names' u')
= names == names' && match u u'
byQual' _ _ = False
findName (Unqual name u) args = search byName id
where
byName match (NameSub names' args' u')
= and [[name] == names', match u u', args == args']
byName _ _ = False
search by f = do
-- Find a component that is equal but elsewhere.
i <- subIndex (by (/=)) s
-- Ensure it occurs before this one.
guard $ i `before` subIndex (by (==)) s
return $ f (show i)
_ `before` Nothing = True
i `before` Just i' = i < i'
type_ :: Parser Type
type_ = do
-- cv-qualifiers may occur before or after base types.
cv1 <- opt cvQuals
id_ <- typeId
mods <- many $ ref <|> ((\q u t -> mkCv q t u) <$> cvQuals <*> genUniq)
u <- genUniq
return $ foldr ($) (mkCv cv1 id_ u) (reverse mods)
typeId :: Parser Type
typeId = do
qual <- typeQual
parts <- if null qual
then idParts
else do
ps <- optionMaybe ((:[]) <$> qualifiable)
return $ flip fromMaybe ps $ case qual of
-- long is a qualifier as well as a end value itself
["long"] -> []
_ -> ["int"]
args <- optionMaybe templateArgs
case maybeBuiltin (sortBy (flip compare) $ qual ++ parts) of
Just builtin -> return $ Builtin builtin
-- TODO: Perhaps verify that 'qual' is empty if not used.
Nothing -> Named <$> (case parts of
[] -> error "empty identifier"
[part] -> Unqual part <$> genUniq
_ -> Qual (init parts) (last parts) <$> genUniq) <*> pure args <*> genUniq
typeQual :: Parser [String]
typeQual = fmap (sortBy $ flip compare)
. many . choice $ map word ["long", "unsigned", "signed"]
templateArgs :: Parser [Type]
templateArgs = between (word "<") (word ">") (commas type_)
qualifiable :: Parser String
qualifiable = choice $ map word
["char", "int", "__int64", "__int128"]
ref :: Parser (Type -> Type)
ref = flip <$> (Ptr <$ word "*" <|> Ref <$ word "&") <*> genUniq
From the itanium ABI spec :
-- "Note that top-level cv-qualifiers specified on a parameter type do
-- not affect the function type directly (i.e., int(*)(T) and int(*)(T
-- const) are the same type)"
normalizeParameterType :: Type -> Type
normalizeParameterType t = case normalizeType t of
CV _ t' _ -> t'
other -> other
normalizeType :: Type -> Type
normalizeType t = case t of
Builtin{} -> t
Named name args u -> Named name (map normalizeType <$> args) u
Ptr t' u -> Ptr (normalizeType t') u
Ref t' u -> Ref (normalizeType t') u
CV cvs t' _ | Set.null cvs -> normalizeType t'
CV cvs (CV cvs' t' _) u -> normalizeType $ CV (cvs <> cvs') t' u
CV cvs t' u -> CV cvs (normalizeType t') u
--------------------------------------------------------------------------------
-- Names
data Name = Qual [String] String Uniq | Unqual String Uniq
deriving (Show)
instance Eq Name where
Qual a b _ == Qual c d _ = (a, b) == (c, d)
Unqual a _ == Unqual b _ = a == b
_ == _ = False
idParts :: Parser [String]
idParts = do
skipMany (word "::") -- possible leading "::"
rawId `sepBy1` word "::"
showName :: Name -> String -> String
showName (Unqual name _) args = lengthEncode "" args [name]
showName (Qual names name _) args = lengthEncode "" args (names ++ [name])
lengthEncode :: String -> String -> [String] -> String
lengthEncode cvs args = \case
["std", "allocator"] -> "Sa"
["std", "basic_string"] -> "Sb" ++ args
["std", "string"] -> "Ss"
["std", "istream"] -> "Si"
["std", "ostream"] -> "So"
["std", "iostream"] -> "Sd"
["std", "size_t"] -> "m"
["std", name] -> "St" ++ lengthPrefix name ++ args
"std":names -> concat ["NSt", concatMap lengthPrefix names, args, "E"]
[name] -> lengthPrefix name ++ args
names -> concat ["N", cvs, concatMap lengthPrefix names, args, "E"]
lengthPrefix :: String -> String
lengthPrefix = uncurry (++) . (show . length &&& id)
nestedId :: Parser Name
nestedId = do
parts <- idParts
(case parts of
[part] -> Unqual part
_ -> Qual (init parts) (last parts)) <$> genUniq
rawId :: Parser String
rawId = notFollowedBy cvQual *> lexeme
(liftA2 (:) nondigit (many $ nondigit <|> digit) <?> "identifier")
nondigit :: Parser Char
nondigit = letter <|> char '_'
--------------------------------------------------------------------------------
-- Substitutions
--
-- Symbols are compressed by allowing signature components to refer to prior
-- components in the signature.
data Sub
= QualSub [String] Uniq
| NameSub [String] (Maybe [Type]) Uniq
| TypeSub Type
deriving (Eq, Show)
subIndex :: (Sub -> Bool) -> Sig -> Maybe Index
subIndex f = fmap Index . findIndex f . nubBy ((==) `on` ignoreUniq) . sigSubs
where
ignoreUniq (QualSub names _) = QualSub names $ Uniq 0
ignoreUniq (NameSub names name _) = NameSub names name $ Uniq 0
ignoreUniq t = t
-- 'nub' because substitutions are not repeated.
-- Note that the whole nested name from a signature is not considered for
-- substitution, only its prefix.
sigSubs :: Sig -> [Sub]
sigSubs (Sig Unqual{} types _)
= concatMap typeSubs types
sigSubs (Sig (Qual names _ u) types _)
= [QualSub nss u | nss <- tail $ inits names]
++ concatMap typeSubs types
typeSubs :: Type -> [Sub]
typeSubs Builtin{} = []
typeSubs (Named (Unqual name u) args _)
= NameSub [name] Nothing u
: [NameSub [name] args u | isJust args]
typeSubs (Named (Qual names name u) args _)
= [QualSub nss u | nss <- tail $ inits names]
++ [NameSub names' Nothing u]
++ [NameSub names' args u | isJust args]
where
names' = names ++ [name]
typeSubs t@(Ptr t' _) = typeSubs t' ++ [TypeSub t]
typeSubs t@(Ref t' _) = typeSubs t' ++ [TypeSub t]
typeSubs t@(CV _ t' _) = typeSubs t' ++ [TypeSub t]
--------------------------------------------------------------------------------
-- Substitution indices
--
Backreferences to substitutions are mangled in base 36 .
newtype Index = Index Int
deriving (Bounded, Eq, Ord)
instance Show Index where
show (Index 0) = "S_"
show (Index n) = "S" ++ showIntAtBase 36 c (n - 1) "_"
where
c x = toEnum $ if x >= 0 && x <= 9
then fromEnum '0' + x
else fromEnum 'A' + (x - 10)
--------------------------------------------------------------------------------
-- cv-qualifiers
--
-- cv-qualifiers are ordered and deduplicated, so we store them in sets.
data CV = Volatile | Restrict | Const
deriving (Eq, Ord)
instance Show CV where
show = \case
Const -> "K"
Restrict -> "r"
Volatile -> "V"
constQual :: Parser CV
constQual = Const <$ word "const"
cvQuals :: Parser (Set CV)
cvQuals = Set.fromList <$> many1 cvQual
cvQual :: Parser CV
cvQual = choice [constQual, volatileQual, restrictQual]
mkCv :: Set CV -> Type -> Uniq -> Type
mkCv cvs (CV cvs' t _) u = CV (cvs <> cvs') t u
mkCv cvs t u = CV cvs t u
-- Basically unnecessary.
restrictQual :: Parser CV
restrictQual = Restrict <$ word "restrict"
showCvs :: Set CV -> String
showCvs = concatMap show . Set.toList
volatileQual :: Parser CV
volatileQual = Volatile <$ word "volatile"
--------------------------------------------------------------------------------
-- Unique tags
--
-- When compressing a symbol, we do a depth-first pre-order traversal of the
signature AST . We do n't want to substitute a type with a reference to itself ,
-- so we give each type a unique tag.
newtype Uniq = Uniq Int
deriving (Enum, Eq, Show)
genUniq :: Parser Uniq
genUniq = do
next <- getState
modifyState succ
return next
typeUniq :: Type -> Maybe Uniq
typeUniq Builtin{} = Nothing
typeUniq (Named _ _ u) = Just u
typeUniq (Ptr _ u) = Just u
typeUniq (Ref _ u) = Just u
typeUniq (CV _ _ u) = Just u
--------------------------------------------------------------------------------
Parser utilities
--
Parsec 's user state is used to generate unique tags for types . See ' Uniq ' .
type Parser a = ParsecT String Uniq Identity a
commas :: Parser a -> Parser [a]
commas = (`sepEndBy` word ",")
list :: Parser a -> Parser [a]
list = paren . commas
lexeme :: Parser String -> Parser String
lexeme = (<* spaces)
opt :: (Monoid a) => Parser a -> Parser a
opt = option mempty
paren :: Parser a -> Parser a
paren = between (word "(") (word ")")
word :: String -> Parser String
word = try . lexeme . string
--------------------------------------------------------------------------------
-- Builtins
--
Builtin types are mangled differently from user - defined types .
data Builtin
= Void
| WChar
| Bool
| Char
| SChar
| UChar
| Short
| UShort
| Int
| UInt
| Long
| ULong
| LongLong
| ULongLong
| LongLongLong
| ULongLongLong
| Float
| Double
| LongDouble
| LongLongDouble
| Char32
| Char16
deriving (Eq)
instance Show Builtin where
show = \case
Void -> "v"
WChar -> "w"
Bool -> "b"
Char -> "c"
SChar -> "a"
UChar -> "h"
Short -> "s"
UShort -> "t"
Int -> "i"
UInt -> "j"
Long -> "l"
ULong -> "m"
LongLong -> "x"
ULongLong -> "y"
LongLongLong -> "n"
ULongLongLong -> "o"
Float -> "f"
Double -> "d"
LongDouble -> "e"
LongLongDouble -> "g"
Char32 -> "Di"
Char16 -> "Ds"
maybeBuiltin :: [String] -> Maybe Builtin
maybeBuiltin = \case
["void"] -> Just Void
["wchar_t"] -> Just WChar
["bool"] -> Just Bool
["char"] -> Just Char
-- WTB disjunctive patterns.
["signed", "char"] -> Just SChar
["int8_t"] -> Just SChar
["unsigned", "char"] -> Just UChar
["uint8_t"] -> Just UChar
["short"] -> Just Short
["short", "int"] -> Just Short
["int16_t"] -> Just Short
["unsigned", "short"] -> Just UShort
["unsigned", "short", "int"] -> Just UShort
["uint16_t"] -> Just UShort
["int"] -> Just Int
["int32_t"] -> Just Int
["unsigned"] -> Just UInt
["unsigned", "int"] -> Just UInt
["uint32_t"] -> Just UInt
["long"] -> Just Long
["int64_t"] -> Just Long
["unsigned", "long"] -> Just ULong
["unsigned", "long", "int"] -> Just ULong
["uint64_t"] -> Just ULong
["size_t"] -> Just ULong
["long", "long"] -> Just LongLong
["long", "long", "int"] -> Just LongLong
["__int64"] -> Just LongLong
["unsigned", "long", "long"] -> Just ULongLong
["unsigned", "long", "long", "int"] -> Just ULongLong
["unsigned", "__int64"] -> Just ULongLong
["__int128"] -> Just LongLongLong
["unsigned", "__int128"] -> Just ULongLongLong
["float"] -> Just Float
["double"] -> Just Double
["long", "double"] -> Just LongDouble
["__float80"] -> Just LongDouble
["__float128"] -> Just LongLongDouble
["char32_t"] -> Just Char32
["char16_t"] -> Just Char16
_ -> Nothing
| null | https://raw.githubusercontent.com/facebookincubator/hsthrift/d3ff75d487e9d0c2904d18327373b603456e7a01/common/mangle/Mangle.hs | haskell | | Mangles a C++ signature into a string.
------------------------------------------------------------------------------
Signatures
A signature consists of a name, a parameter list, and a set of
cv-qualifiers. The return type is parsed, but is not included in the mangled
name, so we don't store it.
All mangled symbols start this way.
Return type, ignored.
------------------------------------------------------------------------------
Types
Find a component that is equal but elsewhere.
Ensure it occurs before this one.
cv-qualifiers may occur before or after base types.
long is a qualifier as well as a end value itself
TODO: Perhaps verify that 'qual' is empty if not used.
"Note that top-level cv-qualifiers specified on a parameter type do
not affect the function type directly (i.e., int(*)(T) and int(*)(T
const) are the same type)"
------------------------------------------------------------------------------
Names
possible leading "::"
------------------------------------------------------------------------------
Substitutions
Symbols are compressed by allowing signature components to refer to prior
components in the signature.
'nub' because substitutions are not repeated.
Note that the whole nested name from a signature is not considered for
substitution, only its prefix.
------------------------------------------------------------------------------
Substitution indices
------------------------------------------------------------------------------
cv-qualifiers
cv-qualifiers are ordered and deduplicated, so we store them in sets.
Basically unnecessary.
------------------------------------------------------------------------------
Unique tags
When compressing a symbol, we do a depth-first pre-order traversal of the
so we give each type a unique tag.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Builtins
WTB disjunctive patterns. | Copyright ( c ) Facebook , Inc. and its affiliates .
# LANGUAGE ViewPatterns #
module Mangle
( mangle
) where
import Control.Arrow
import Control.Applicative hiding (Const)
import Control.Monad
import Data.Functor.Identity
import Data.Function
import Data.List
import Data.Maybe
import Data.Set (Set)
import Foreign.C
import Numeric
import Text.Parsec hiding ((<|>), many)
import qualified Data.Set as Set
mangle :: String -> Either ParseError String
mangle = fmap show . runParser sig (Uniq 0) ""
foreign export ccall itaniumMangle :: CString -> CSize -> IO CString
itaniumMangle :: CString -> CSize -> IO CString
itaniumMangle csymbol clen = do
symbol <- peekCStringLen (csymbol, fromIntegral clen)
either (\e -> fail $ "itaniumMangle failed: " ++ symbol ++ "\n" ++ show e)
newCString
(mangle symbol)
data Sig = Sig Name [Type] (Set CV)
instance Show Sig where
show (normalizeSig -> s@(Sig sigName params cv)) = concat
: showCvName cv sigName
: if null params
then [showType s (Builtin Void)]
else map (showType s) params
where
cv - qualifiers are n't allowed on non - nested names in C++ .
showCvName _ (Unqual n _) = lengthPrefix n
showCvName cvs (Qual names name _)
= lengthEncode (showCvs cvs) "" (names ++ [name])
sig :: Parser Sig
sig = do
spaces
id_ <- nestedId
params_ <- list type_
cvs <- opt cvQuals
return $ Sig id_ params_ cvs
normalizeSig :: Sig -> Sig
normalizeSig (Sig name params cv) =
Sig name (map normalizeParameterType params) cv
data Type
= Builtin Builtin
| Named Name (Maybe [Type]) Uniq
| Ptr Type Uniq
| Ref Type Uniq
| CV (Set CV) Type Uniq
deriving (Show)
instance Eq Type where
Builtin a == Builtin b = a == b
Named a b _ == Named c d _ = (a, b) == (c, d)
Ptr a _ == Ptr b _ = a == b
Ref a _ == Ref b _ = a == b
CV a b _ == CV c d _ = (a, b) == (c, d)
_ == _ = False
showType :: Sig -> Type -> String
showType s t = case t of
Builtin b -> show b
Named name args _ -> (showName name args' `fromMaybe` findName name args)
where args' = maybe "" (("I" ++) . (++ "E") . concatMap recur) args
Ptr t' u -> fromMaybe ("P" ++ recur t') (findType t u)
Ref t' u -> fromMaybe ("R" ++ recur t') (findType t u)
CV cvs t' u -> fromMaybe (showCvs cvs ++ recur t') (findType t u)
where
recur = showType s
findType t' u = search byType id
where
byType match (TypeSub t'')
= t' == t'' && maybe False (match u) (typeUniq t'')
byType _ _ = False
findName (Qual names name u) args = byName <|> byQual
where
byName = search byName' id
byQual = search byQual'
$ \i -> concat ["N", i, lengthPrefix name, "E"]
byName' match (NameSub names' args' u')
= and [names ++ [name] == names', match u u', args == args']
byName' _ _ = False
byQual' match (QualSub names' u')
= names == names' && match u u'
byQual' _ _ = False
findName (Unqual name u) args = search byName id
where
byName match (NameSub names' args' u')
= and [[name] == names', match u u', args == args']
byName _ _ = False
search by f = do
i <- subIndex (by (/=)) s
guard $ i `before` subIndex (by (==)) s
return $ f (show i)
_ `before` Nothing = True
i `before` Just i' = i < i'
type_ :: Parser Type
type_ = do
cv1 <- opt cvQuals
id_ <- typeId
mods <- many $ ref <|> ((\q u t -> mkCv q t u) <$> cvQuals <*> genUniq)
u <- genUniq
return $ foldr ($) (mkCv cv1 id_ u) (reverse mods)
typeId :: Parser Type
typeId = do
qual <- typeQual
parts <- if null qual
then idParts
else do
ps <- optionMaybe ((:[]) <$> qualifiable)
return $ flip fromMaybe ps $ case qual of
["long"] -> []
_ -> ["int"]
args <- optionMaybe templateArgs
case maybeBuiltin (sortBy (flip compare) $ qual ++ parts) of
Just builtin -> return $ Builtin builtin
Nothing -> Named <$> (case parts of
[] -> error "empty identifier"
[part] -> Unqual part <$> genUniq
_ -> Qual (init parts) (last parts) <$> genUniq) <*> pure args <*> genUniq
typeQual :: Parser [String]
typeQual = fmap (sortBy $ flip compare)
. many . choice $ map word ["long", "unsigned", "signed"]
templateArgs :: Parser [Type]
templateArgs = between (word "<") (word ">") (commas type_)
qualifiable :: Parser String
qualifiable = choice $ map word
["char", "int", "__int64", "__int128"]
ref :: Parser (Type -> Type)
ref = flip <$> (Ptr <$ word "*" <|> Ref <$ word "&") <*> genUniq
From the itanium ABI spec :
normalizeParameterType :: Type -> Type
normalizeParameterType t = case normalizeType t of
CV _ t' _ -> t'
other -> other
normalizeType :: Type -> Type
normalizeType t = case t of
Builtin{} -> t
Named name args u -> Named name (map normalizeType <$> args) u
Ptr t' u -> Ptr (normalizeType t') u
Ref t' u -> Ref (normalizeType t') u
CV cvs t' _ | Set.null cvs -> normalizeType t'
CV cvs (CV cvs' t' _) u -> normalizeType $ CV (cvs <> cvs') t' u
CV cvs t' u -> CV cvs (normalizeType t') u
data Name = Qual [String] String Uniq | Unqual String Uniq
deriving (Show)
instance Eq Name where
Qual a b _ == Qual c d _ = (a, b) == (c, d)
Unqual a _ == Unqual b _ = a == b
_ == _ = False
idParts :: Parser [String]
idParts = do
rawId `sepBy1` word "::"
showName :: Name -> String -> String
showName (Unqual name _) args = lengthEncode "" args [name]
showName (Qual names name _) args = lengthEncode "" args (names ++ [name])
lengthEncode :: String -> String -> [String] -> String
lengthEncode cvs args = \case
["std", "allocator"] -> "Sa"
["std", "basic_string"] -> "Sb" ++ args
["std", "string"] -> "Ss"
["std", "istream"] -> "Si"
["std", "ostream"] -> "So"
["std", "iostream"] -> "Sd"
["std", "size_t"] -> "m"
["std", name] -> "St" ++ lengthPrefix name ++ args
"std":names -> concat ["NSt", concatMap lengthPrefix names, args, "E"]
[name] -> lengthPrefix name ++ args
names -> concat ["N", cvs, concatMap lengthPrefix names, args, "E"]
lengthPrefix :: String -> String
lengthPrefix = uncurry (++) . (show . length &&& id)
nestedId :: Parser Name
nestedId = do
parts <- idParts
(case parts of
[part] -> Unqual part
_ -> Qual (init parts) (last parts)) <$> genUniq
rawId :: Parser String
rawId = notFollowedBy cvQual *> lexeme
(liftA2 (:) nondigit (many $ nondigit <|> digit) <?> "identifier")
nondigit :: Parser Char
nondigit = letter <|> char '_'
data Sub
= QualSub [String] Uniq
| NameSub [String] (Maybe [Type]) Uniq
| TypeSub Type
deriving (Eq, Show)
subIndex :: (Sub -> Bool) -> Sig -> Maybe Index
subIndex f = fmap Index . findIndex f . nubBy ((==) `on` ignoreUniq) . sigSubs
where
ignoreUniq (QualSub names _) = QualSub names $ Uniq 0
ignoreUniq (NameSub names name _) = NameSub names name $ Uniq 0
ignoreUniq t = t
sigSubs :: Sig -> [Sub]
sigSubs (Sig Unqual{} types _)
= concatMap typeSubs types
sigSubs (Sig (Qual names _ u) types _)
= [QualSub nss u | nss <- tail $ inits names]
++ concatMap typeSubs types
typeSubs :: Type -> [Sub]
typeSubs Builtin{} = []
typeSubs (Named (Unqual name u) args _)
= NameSub [name] Nothing u
: [NameSub [name] args u | isJust args]
typeSubs (Named (Qual names name u) args _)
= [QualSub nss u | nss <- tail $ inits names]
++ [NameSub names' Nothing u]
++ [NameSub names' args u | isJust args]
where
names' = names ++ [name]
typeSubs t@(Ptr t' _) = typeSubs t' ++ [TypeSub t]
typeSubs t@(Ref t' _) = typeSubs t' ++ [TypeSub t]
typeSubs t@(CV _ t' _) = typeSubs t' ++ [TypeSub t]
Backreferences to substitutions are mangled in base 36 .
newtype Index = Index Int
deriving (Bounded, Eq, Ord)
instance Show Index where
show (Index 0) = "S_"
show (Index n) = "S" ++ showIntAtBase 36 c (n - 1) "_"
where
c x = toEnum $ if x >= 0 && x <= 9
then fromEnum '0' + x
else fromEnum 'A' + (x - 10)
data CV = Volatile | Restrict | Const
deriving (Eq, Ord)
instance Show CV where
show = \case
Const -> "K"
Restrict -> "r"
Volatile -> "V"
constQual :: Parser CV
constQual = Const <$ word "const"
cvQuals :: Parser (Set CV)
cvQuals = Set.fromList <$> many1 cvQual
cvQual :: Parser CV
cvQual = choice [constQual, volatileQual, restrictQual]
mkCv :: Set CV -> Type -> Uniq -> Type
mkCv cvs (CV cvs' t _) u = CV (cvs <> cvs') t u
mkCv cvs t u = CV cvs t u
restrictQual :: Parser CV
restrictQual = Restrict <$ word "restrict"
showCvs :: Set CV -> String
showCvs = concatMap show . Set.toList
volatileQual :: Parser CV
volatileQual = Volatile <$ word "volatile"
signature AST . We do n't want to substitute a type with a reference to itself ,
newtype Uniq = Uniq Int
deriving (Enum, Eq, Show)
genUniq :: Parser Uniq
genUniq = do
next <- getState
modifyState succ
return next
typeUniq :: Type -> Maybe Uniq
typeUniq Builtin{} = Nothing
typeUniq (Named _ _ u) = Just u
typeUniq (Ptr _ u) = Just u
typeUniq (Ref _ u) = Just u
typeUniq (CV _ _ u) = Just u
Parser utilities
Parsec 's user state is used to generate unique tags for types . See ' Uniq ' .
type Parser a = ParsecT String Uniq Identity a
commas :: Parser a -> Parser [a]
commas = (`sepEndBy` word ",")
list :: Parser a -> Parser [a]
list = paren . commas
lexeme :: Parser String -> Parser String
lexeme = (<* spaces)
opt :: (Monoid a) => Parser a -> Parser a
opt = option mempty
paren :: Parser a -> Parser a
paren = between (word "(") (word ")")
word :: String -> Parser String
word = try . lexeme . string
Builtin types are mangled differently from user - defined types .
data Builtin
= Void
| WChar
| Bool
| Char
| SChar
| UChar
| Short
| UShort
| Int
| UInt
| Long
| ULong
| LongLong
| ULongLong
| LongLongLong
| ULongLongLong
| Float
| Double
| LongDouble
| LongLongDouble
| Char32
| Char16
deriving (Eq)
instance Show Builtin where
show = \case
Void -> "v"
WChar -> "w"
Bool -> "b"
Char -> "c"
SChar -> "a"
UChar -> "h"
Short -> "s"
UShort -> "t"
Int -> "i"
UInt -> "j"
Long -> "l"
ULong -> "m"
LongLong -> "x"
ULongLong -> "y"
LongLongLong -> "n"
ULongLongLong -> "o"
Float -> "f"
Double -> "d"
LongDouble -> "e"
LongLongDouble -> "g"
Char32 -> "Di"
Char16 -> "Ds"
maybeBuiltin :: [String] -> Maybe Builtin
maybeBuiltin = \case
["void"] -> Just Void
["wchar_t"] -> Just WChar
["bool"] -> Just Bool
["char"] -> Just Char
["signed", "char"] -> Just SChar
["int8_t"] -> Just SChar
["unsigned", "char"] -> Just UChar
["uint8_t"] -> Just UChar
["short"] -> Just Short
["short", "int"] -> Just Short
["int16_t"] -> Just Short
["unsigned", "short"] -> Just UShort
["unsigned", "short", "int"] -> Just UShort
["uint16_t"] -> Just UShort
["int"] -> Just Int
["int32_t"] -> Just Int
["unsigned"] -> Just UInt
["unsigned", "int"] -> Just UInt
["uint32_t"] -> Just UInt
["long"] -> Just Long
["int64_t"] -> Just Long
["unsigned", "long"] -> Just ULong
["unsigned", "long", "int"] -> Just ULong
["uint64_t"] -> Just ULong
["size_t"] -> Just ULong
["long", "long"] -> Just LongLong
["long", "long", "int"] -> Just LongLong
["__int64"] -> Just LongLong
["unsigned", "long", "long"] -> Just ULongLong
["unsigned", "long", "long", "int"] -> Just ULongLong
["unsigned", "__int64"] -> Just ULongLong
["__int128"] -> Just LongLongLong
["unsigned", "__int128"] -> Just ULongLongLong
["float"] -> Just Float
["double"] -> Just Double
["long", "double"] -> Just LongDouble
["__float80"] -> Just LongDouble
["__float128"] -> Just LongLongDouble
["char32_t"] -> Just Char32
["char16_t"] -> Just Char16
_ -> Nothing
|
69e6a5dda63661c9bc4f669399f37de5b6ab1f120fc6dea78f51fe51e9a84f32 | hopv/horsat2 | alternatingAutomaton.mli | type state = string;;
type index = int;;
type formula = Bool of bool
| Var of index * state
| Or of formula * formula
| And of formula * formula;;
type t = { alpha : (string * int) list;
st : state list;
delta : ((state * string) * formula) list;
init : state };;
val from_automaton : Automaton.automaton -> t;;
val from_transitions : (state * int) list * Syntax.ata_trans list -> t;;
val print : t -> unit;;
val negate : t -> t;;
val prime_implicants : formula -> (int * string) list list;;
val cata : (bool -> 'a) ->
(index * state -> 'a) ->
('a -> 'a -> 'a) -> (* or *)
('a -> 'a -> 'a) -> (* and *)
formula ->
'a;;
| null | https://raw.githubusercontent.com/hopv/horsat2/22c8b3e5d6a5751f28824182d4f27728e3acbf82/alternatingAutomaton.mli | ocaml | or
and | type state = string;;
type index = int;;
type formula = Bool of bool
| Var of index * state
| Or of formula * formula
| And of formula * formula;;
type t = { alpha : (string * int) list;
st : state list;
delta : ((state * string) * formula) list;
init : state };;
val from_automaton : Automaton.automaton -> t;;
val from_transitions : (state * int) list * Syntax.ata_trans list -> t;;
val print : t -> unit;;
val negate : t -> t;;
val prime_implicants : formula -> (int * string) list list;;
val cata : (bool -> 'a) ->
(index * state -> 'a) ->
formula ->
'a;;
|
3418971d1487e0ec5a0b9da21f9b9bfd9e8e7f3c9c20847788ee64af8957a9c1 | jvanbruegge/Megarecord | Internal.hs | # LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE KindSignatures #
# LANGUAGE PolyKinds #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeInType #-}
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Data.Kind.Row.Internal where
import Data.Kind (Type)
import Fcf (Eval, Exp, Flip, ConstFn, FromMaybe, type (=<<))
import GHC.TypeLits (Symbol, Nat, CmpSymbol, CmpNat)
import qualified Fcf as F
-- Used for providing a total order over types
type family Compare (a :: k) (b :: k) :: Ordering
type instance Compare (a :: Symbol) (b :: Symbol) = CmpSymbol a b
type instance Compare (a :: Nat) (b :: Nat) = CmpNat a b
-- The core type level Map datatype
data Map (k :: Type) (v :: Type) = Cons k v (Map k v) | Nil
type Empty = 'Nil
type Row k = Map Symbol [k]
Core operations on the type
type Lookup (k :: k1) (m :: Map k1 k2) = Eval (Lookup_ k m)
data Lookup_ :: k1 -> Map k1 k2 -> Exp (Maybe k2)
type instance Eval (Lookup_ k 'Nil) = 'Nothing
type instance Eval (Lookup_ k ('Cons k' v m)) = Eval (LookupInternal (Compare k k') k ('Cons k' v m))
data LookupInternal :: Ordering -> k1 -> Map k1 k2 -> Exp (Maybe k2)
type instance Eval (LookupInternal 'LT _ _) = 'Nothing
type instance Eval (LookupInternal 'EQ k ('Cons _ v _)) = 'Just v
type instance Eval (LookupInternal 'GT k ('Cons _ _ m)) = Lookup k m
data Transform :: (k1 -> k2 -> Exp k2) -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (Transform f ('Cons k v m)) = 'Cons k (Eval (f k v)) (Eval (Transform f m))
type instance Eval (Transform _ 'Nil) = 'Nil
data InsertWith :: (Maybe k2 -> k2 -> Exp k2) -> k1 -> k2 -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (InsertWith f k v 'Nil) = 'Cons k (Eval (f 'Nothing v)) 'Nil
type instance Eval (InsertWith f k v ('Cons k' v' m)) =
Eval (InsertInternal (Compare k k') f k v ('Cons k' v' m))
data InsertInternal :: Ordering -> (Maybe k2 -> k2 -> Exp k2) -> k1 -> k2 -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (InsertInternal 'LT f k v m) = 'Cons k (Eval (f 'Nothing v)) m
type instance Eval (InsertInternal 'EQ f k v ('Cons k' v' m)) = 'Cons k (Eval (f ('Just v') v)) m
type instance Eval (InsertInternal 'GT f k v ('Cons k' v' m)) = 'Cons k' v' (Eval (InsertWith f k v m))
data RemoveWith :: (k2 -> Exp (Maybe k2)) -> k1 -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (RemoveWith f k 'Nil) = 'Nil
type instance Eval (RemoveWith f k ('Cons k' v m)) =
Eval (RemoveWithInternal (Compare k k') f k ('Cons k' v m))
data RemoveWithInternal :: Ordering -> (k2 -> Exp (Maybe k2)) -> k1 -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (RemoveWithInternal 'LT _ _ m) = m
type instance Eval (RemoveWithInternal 'EQ f k ('Cons _ v m)) =
RemoveNode (Eval (f v)) ('Cons k v m)
type instance Eval (RemoveWithInternal 'GT f k ('Cons k' v m)) =
'Cons k' v (Eval (RemoveWith f k m))
type family RemoveNode (v :: Maybe k2) (m :: Map k1 k2) :: Map k1 k2 where
RemoveNode 'Nothing ('Cons _ _ m) = m
RemoveNode ('Just x) ('Cons k _ m) = 'Cons k x m
type family Traverse (f :: k1 -> k2 -> Exp k3) (con :: k3 -> k3 -> Exp k3) (m :: Map k1 k2) (id :: k3) :: k3 where
Traverse _ _ 'Nil id = id
Traverse f _ ('Cons k v 'Nil) _ = Eval (f k v)
Traverse f con ('Cons k v m) id = Eval (con (Eval (f k v)) (Traverse f con m id))
-- Combinators
type Insert (k :: k1) (v :: k2) (m :: Map k1 k2) = Eval (InsertWith (Flip ConstFn) k v m)
type Contains (mapping :: k2 -> Exp Bool) (k :: k1) (m :: Map k1 k2) =
Eval (FromMaybe 'False =<< F.Map mapping =<< Lookup_ k m)
type RowPrepend (k :: Symbol) (v :: k2) (m :: Row k2) = Eval (InsertWith RowPrep k '[v] m)
data RowPrep :: Maybe [v] -> [v] -> Exp [v]
type instance Eval (RowPrep 'Nothing v) = v
type instance Eval (RowPrep ('Just xs) '[v]) = v ': xs
| null | https://raw.githubusercontent.com/jvanbruegge/Megarecord/4b62620a57edf4162d4ac07ec0f527d54872d010/src/Data/Kind/Row/Internal.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE TypeInType #
Used for providing a total order over types
The core type level Map datatype
Combinators | # LANGUAGE DataKinds #
# LANGUAGE KindSignatures #
# LANGUAGE PolyKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Data.Kind.Row.Internal where
import Data.Kind (Type)
import Fcf (Eval, Exp, Flip, ConstFn, FromMaybe, type (=<<))
import GHC.TypeLits (Symbol, Nat, CmpSymbol, CmpNat)
import qualified Fcf as F
type family Compare (a :: k) (b :: k) :: Ordering
type instance Compare (a :: Symbol) (b :: Symbol) = CmpSymbol a b
type instance Compare (a :: Nat) (b :: Nat) = CmpNat a b
data Map (k :: Type) (v :: Type) = Cons k v (Map k v) | Nil
type Empty = 'Nil
type Row k = Map Symbol [k]
Core operations on the type
type Lookup (k :: k1) (m :: Map k1 k2) = Eval (Lookup_ k m)
data Lookup_ :: k1 -> Map k1 k2 -> Exp (Maybe k2)
type instance Eval (Lookup_ k 'Nil) = 'Nothing
type instance Eval (Lookup_ k ('Cons k' v m)) = Eval (LookupInternal (Compare k k') k ('Cons k' v m))
data LookupInternal :: Ordering -> k1 -> Map k1 k2 -> Exp (Maybe k2)
type instance Eval (LookupInternal 'LT _ _) = 'Nothing
type instance Eval (LookupInternal 'EQ k ('Cons _ v _)) = 'Just v
type instance Eval (LookupInternal 'GT k ('Cons _ _ m)) = Lookup k m
data Transform :: (k1 -> k2 -> Exp k2) -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (Transform f ('Cons k v m)) = 'Cons k (Eval (f k v)) (Eval (Transform f m))
type instance Eval (Transform _ 'Nil) = 'Nil
data InsertWith :: (Maybe k2 -> k2 -> Exp k2) -> k1 -> k2 -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (InsertWith f k v 'Nil) = 'Cons k (Eval (f 'Nothing v)) 'Nil
type instance Eval (InsertWith f k v ('Cons k' v' m)) =
Eval (InsertInternal (Compare k k') f k v ('Cons k' v' m))
data InsertInternal :: Ordering -> (Maybe k2 -> k2 -> Exp k2) -> k1 -> k2 -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (InsertInternal 'LT f k v m) = 'Cons k (Eval (f 'Nothing v)) m
type instance Eval (InsertInternal 'EQ f k v ('Cons k' v' m)) = 'Cons k (Eval (f ('Just v') v)) m
type instance Eval (InsertInternal 'GT f k v ('Cons k' v' m)) = 'Cons k' v' (Eval (InsertWith f k v m))
data RemoveWith :: (k2 -> Exp (Maybe k2)) -> k1 -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (RemoveWith f k 'Nil) = 'Nil
type instance Eval (RemoveWith f k ('Cons k' v m)) =
Eval (RemoveWithInternal (Compare k k') f k ('Cons k' v m))
data RemoveWithInternal :: Ordering -> (k2 -> Exp (Maybe k2)) -> k1 -> Map k1 k2 -> Exp (Map k1 k2)
type instance Eval (RemoveWithInternal 'LT _ _ m) = m
type instance Eval (RemoveWithInternal 'EQ f k ('Cons _ v m)) =
RemoveNode (Eval (f v)) ('Cons k v m)
type instance Eval (RemoveWithInternal 'GT f k ('Cons k' v m)) =
'Cons k' v (Eval (RemoveWith f k m))
type family RemoveNode (v :: Maybe k2) (m :: Map k1 k2) :: Map k1 k2 where
RemoveNode 'Nothing ('Cons _ _ m) = m
RemoveNode ('Just x) ('Cons k _ m) = 'Cons k x m
type family Traverse (f :: k1 -> k2 -> Exp k3) (con :: k3 -> k3 -> Exp k3) (m :: Map k1 k2) (id :: k3) :: k3 where
Traverse _ _ 'Nil id = id
Traverse f _ ('Cons k v 'Nil) _ = Eval (f k v)
Traverse f con ('Cons k v m) id = Eval (con (Eval (f k v)) (Traverse f con m id))
type Insert (k :: k1) (v :: k2) (m :: Map k1 k2) = Eval (InsertWith (Flip ConstFn) k v m)
type Contains (mapping :: k2 -> Exp Bool) (k :: k1) (m :: Map k1 k2) =
Eval (FromMaybe 'False =<< F.Map mapping =<< Lookup_ k m)
type RowPrepend (k :: Symbol) (v :: k2) (m :: Row k2) = Eval (InsertWith RowPrep k '[v] m)
data RowPrep :: Maybe [v] -> [v] -> Exp [v]
type instance Eval (RowPrep 'Nothing v) = v
type instance Eval (RowPrep ('Just xs) '[v]) = v ': xs
|
82def8d13e3819297ded099cbc119957cc3e9bc44d386f0ee1dbfa603bcf84f2 | nikodemus/SBCL | defpackage.lisp | (defpackage "SB-BSD-SOCKETS-INTERNAL"
(:nicknames "SOCKINT")
(:shadow close listen)
(:shadowing-import-from "SB-KERNEL" with-array-data)
(:use "COMMON-LISP" "SB-ALIEN" "SB-EXT"))
(defpackage "SB-BSD-SOCKETS"
(:export socket local-socket local-abstract-socket inet-socket
make-local-socket make-inet-socket
socket-bind socket-accept socket-connect
socket-send socket-receive socket-recv
socket-name socket-peername socket-listen
socket-close socket-file-descriptor
socket-family socket-protocol socket-open-p
socket-type socket-make-stream get-protocol-by-name
get-host-by-name get-host-by-address
host-ent
host-ent-addresses host-ent-address
host-ent-aliases host-ent-name
name-service-error
getaddrinfo
;; not sure if these are really good names or not
netdb-internal-error
netdb-success-error
host-not-found-error
try-again-error
no-recovery-error
unknown-protocol
;; all socket options are also exported, by code in
;; sockopt.lisp
socket-error
;; other errno-based socket errors are exported by code in
;; sockets.lisp
make-inet-address
non-blocking-mode)
(:use "COMMON-LISP" "SB-BSD-SOCKETS-INTERNAL")
(:import-from "SB-INT" "UNSUPPORTED-OPERATOR" "FEATUREP")
(:documentation
"A thinly-disguised BSD socket API for SBCL. Ideas stolen from the BSD
socket API for C and Graham Barr's IO::Socket classes for Perl.
We represent sockets as CLOS objects, and rename a lot of methods and
arguments to fit Lisp style more closely."))
;;; gethostbyname/gethostbyaddr are generally not thread safe. POSIX
1003.1 - 2003 defines an alternative API , which is specified in the
;;; RFC to be thread-safe. If it seems to be available, use it.
;;;
;;; Unfortunately the manual page claims that these functions are not
thread - safe on OS X , but they probably ca n't be any worse than
;;; gethostbyname and gethostbyaddr.
;;;
CLH : seems to be broken is broken on x86 - 64 /
#-(or win32 (and x86-64 darwin))
(let ((addr (sb-alien::find-dynamic-foreign-symbol-address "getaddrinfo")))
(when addr
(pushnew :sb-bsd-sockets-addrinfo *features*)))
| null | https://raw.githubusercontent.com/nikodemus/SBCL/3c11847d1e12db89b24a7887b18a137c45ed4661/contrib/sb-bsd-sockets/defpackage.lisp | lisp | not sure if these are really good names or not
all socket options are also exported, by code in
sockopt.lisp
other errno-based socket errors are exported by code in
sockets.lisp
gethostbyname/gethostbyaddr are generally not thread safe. POSIX
RFC to be thread-safe. If it seems to be available, use it.
Unfortunately the manual page claims that these functions are not
gethostbyname and gethostbyaddr.
| (defpackage "SB-BSD-SOCKETS-INTERNAL"
(:nicknames "SOCKINT")
(:shadow close listen)
(:shadowing-import-from "SB-KERNEL" with-array-data)
(:use "COMMON-LISP" "SB-ALIEN" "SB-EXT"))
(defpackage "SB-BSD-SOCKETS"
(:export socket local-socket local-abstract-socket inet-socket
make-local-socket make-inet-socket
socket-bind socket-accept socket-connect
socket-send socket-receive socket-recv
socket-name socket-peername socket-listen
socket-close socket-file-descriptor
socket-family socket-protocol socket-open-p
socket-type socket-make-stream get-protocol-by-name
get-host-by-name get-host-by-address
host-ent
host-ent-addresses host-ent-address
host-ent-aliases host-ent-name
name-service-error
getaddrinfo
netdb-internal-error
netdb-success-error
host-not-found-error
try-again-error
no-recovery-error
unknown-protocol
socket-error
make-inet-address
non-blocking-mode)
(:use "COMMON-LISP" "SB-BSD-SOCKETS-INTERNAL")
(:import-from "SB-INT" "UNSUPPORTED-OPERATOR" "FEATUREP")
(:documentation
"A thinly-disguised BSD socket API for SBCL. Ideas stolen from the BSD
socket API for C and Graham Barr's IO::Socket classes for Perl.
We represent sockets as CLOS objects, and rename a lot of methods and
arguments to fit Lisp style more closely."))
1003.1 - 2003 defines an alternative API , which is specified in the
thread - safe on OS X , but they probably ca n't be any worse than
CLH : seems to be broken is broken on x86 - 64 /
#-(or win32 (and x86-64 darwin))
(let ((addr (sb-alien::find-dynamic-foreign-symbol-address "getaddrinfo")))
(when addr
(pushnew :sb-bsd-sockets-addrinfo *features*)))
|
5abd23220088a245cec0576097130edbdffdc2ea89db764cf4dd4dae3fcbec33 | brendanhay/gogol | Update.hs | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
-- |
Module : . . . Instances . Update
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
-- Updates settings of a Cloud SQL instance. Using this operation might cause your instance to restart.
--
/See:/ < > for @sql.instances.update@.
module Gogol.SQLAdmin.Sql.Instances.Update
( -- * Resource
SqlInstancesUpdateResource,
-- ** Constructing a Request
SqlInstancesUpdate (..),
newSqlInstancesUpdate,
)
where
import qualified Gogol.Prelude as Core
import Gogol.SQLAdmin.Types
| A resource alias for @sql.instances.update@ method which the
-- 'SqlInstancesUpdate' request conforms to.
type SqlInstancesUpdateResource =
"v1"
Core.:> "projects"
Core.:> Core.Capture "project" Core.Text
Core.:> "instances"
Core.:> Core.Capture "instance" Core.Text
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.ReqBody '[Core.JSON] DatabaseInstance
Core.:> Core.Put '[Core.JSON] Operation
-- | Updates settings of a Cloud SQL instance. Using this operation might cause your instance to restart.
--
-- /See:/ 'newSqlInstancesUpdate' smart constructor.
data SqlInstancesUpdate = SqlInstancesUpdate
{ -- | V1 error format.
xgafv :: (Core.Maybe Xgafv),
-- | OAuth access token.
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
-- | Cloud SQL instance ID. This does not include the project ID.
instance' :: Core.Text,
-- | Multipart request metadata.
payload :: DatabaseInstance,
-- | Project ID of the project that contains the instance.
project :: Core.Text,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
-- | Creates a value of 'SqlInstancesUpdate' with the minimum fields required to make a request.
newSqlInstancesUpdate ::
-- | Cloud SQL instance ID. This does not include the project ID. See 'instance''.
Core.Text ->
-- | Multipart request metadata. See 'payload'.
DatabaseInstance ->
-- | Project ID of the project that contains the instance. See 'project'.
Core.Text ->
SqlInstancesUpdate
newSqlInstancesUpdate instance' payload project =
SqlInstancesUpdate
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
instance' = instance',
payload = payload,
project = project,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance Core.GoogleRequest SqlInstancesUpdate where
type Rs SqlInstancesUpdate = Operation
type
Scopes SqlInstancesUpdate =
'[CloudPlatform'FullControl, Sqlservice'Admin]
requestClient SqlInstancesUpdate {..} =
go
project
instance'
xgafv
accessToken
callback
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
payload
sQLAdminService
where
go =
Core.buildClient
(Core.Proxy :: Core.Proxy SqlInstancesUpdateResource)
Core.mempty
| null | https://raw.githubusercontent.com/brendanhay/gogol/fffd4d98a1996d0ffd4cf64545c5e8af9c976cda/lib/services/gogol-sqladmin/gen/Gogol/SQLAdmin/Sql/Instances/Update.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
Updates settings of a Cloud SQL instance. Using this operation might cause your instance to restart.
* Resource
** Constructing a Request
'SqlInstancesUpdate' request conforms to.
| Updates settings of a Cloud SQL instance. Using this operation might cause your instance to restart.
/See:/ 'newSqlInstancesUpdate' smart constructor.
| V1 error format.
| OAuth access token.
| Cloud SQL instance ID. This does not include the project ID.
| Multipart request metadata.
| Project ID of the project that contains the instance.
| Upload protocol for media (e.g. \"raw\", \"multipart\").
| Creates a value of 'SqlInstancesUpdate' with the minimum fields required to make a request.
| Cloud SQL instance ID. This does not include the project ID. See 'instance''.
| Multipart request metadata. See 'payload'.
| Project ID of the project that contains the instance. See 'project'. | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Module : . . . Instances . Update
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
/See:/ < > for @sql.instances.update@.
module Gogol.SQLAdmin.Sql.Instances.Update
SqlInstancesUpdateResource,
SqlInstancesUpdate (..),
newSqlInstancesUpdate,
)
where
import qualified Gogol.Prelude as Core
import Gogol.SQLAdmin.Types
| A resource alias for @sql.instances.update@ method which the
type SqlInstancesUpdateResource =
"v1"
Core.:> "projects"
Core.:> Core.Capture "project" Core.Text
Core.:> "instances"
Core.:> Core.Capture "instance" Core.Text
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.ReqBody '[Core.JSON] DatabaseInstance
Core.:> Core.Put '[Core.JSON] Operation
data SqlInstancesUpdate = SqlInstancesUpdate
xgafv :: (Core.Maybe Xgafv),
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
instance' :: Core.Text,
payload :: DatabaseInstance,
project :: Core.Text,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
newSqlInstancesUpdate ::
Core.Text ->
DatabaseInstance ->
Core.Text ->
SqlInstancesUpdate
newSqlInstancesUpdate instance' payload project =
SqlInstancesUpdate
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
instance' = instance',
payload = payload,
project = project,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance Core.GoogleRequest SqlInstancesUpdate where
type Rs SqlInstancesUpdate = Operation
type
Scopes SqlInstancesUpdate =
'[CloudPlatform'FullControl, Sqlservice'Admin]
requestClient SqlInstancesUpdate {..} =
go
project
instance'
xgafv
accessToken
callback
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
payload
sQLAdminService
where
go =
Core.buildClient
(Core.Proxy :: Core.Proxy SqlInstancesUpdateResource)
Core.mempty
|
d6680d378c86a4926d8715dd84e59e9c51a63404a0a989fa02efd7f601c4b8aa | emqx/mria | mria_rlog_replica.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc This module implements a gen_statem which collects rlogs from
%% a remote core node.
-module(mria_rlog_replica).
-behaviour(gen_statem).
%% API:
-export([start_link/2]).
%% gen_statem callbacks:
-export([init/1, terminate/3, code_change/4, callback_mode/0, handle_event/4, format_status/1]).
%% Internal exports:
-export([do_push_tlog_entry/2, push_tlog_entry/4]).
-include("mria_rlog.hrl").
-include_lib("snabbkaffe/include/trace.hrl").
%%================================================================================
%% Type declarations
%%================================================================================
States :
-define(disconnected, disconnected).
-define(bootstrap, bootstrap).
-define(local_replay, local_replay).
-define(normal, normal).
-type state() :: ?disconnected
| ?bootstrap
| ?local_replay
| ?normal.
%% Timeouts:
-define(reconnect, reconnect).
-record(d,
{ shard :: mria_rlog:shard()
, parent_sup :: pid()
, remote_core_node = undefined :: node() | undefined
, agent :: pid() | undefined
, checkpoint = undefined :: mria_rlog_server:checkpoint() | undefined
, next_batch_seqno = 0 :: non_neg_integer()
, replayq :: replayq:q() | undefined
, importer_worker :: pid() | undefined
, importer_ref = false :: false | reference()
}).
-type data() :: #d{}.
-type fsm_result() :: gen_statem:event_handler_result(state()).
%%================================================================================
%% API funcions
%%================================================================================
start_link(ParentSup, Shard) ->
gen_statem:start_link({local, Shard}, ?MODULE, {ParentSup, Shard}, []).
%%================================================================================
%% gen_statem callbacks
%%================================================================================
@private We use handle_event_function style , because it leads to
%% better code reuse and makes it harder to accidentally forget to
handle some type of event in one of the states . Also it allows to
%% group event handlers logically.
callback_mode() -> [handle_event_function, state_enter].
-spec init({pid(), mria_rlog:shard()}) -> {ok, state(), data()}.
init({ParentSup, Shard}) ->
process_flag(trap_exit, true),
process_flag(message_queue_data, off_heap),
logger:update_process_metadata(#{ domain => [mria, rlog, replica]
, shard => Shard
}),
?tp(info, "starting_rlog_shard", #{shard => Shard}),
?tp(rlog_replica_start,
#{ node => node()
, shard => Shard
}),
D = #d{ shard = Shard
, parent_sup = ParentSup
},
{ok, ?disconnected, D}.
-spec handle_event(gen_statem:event_type(), _EventContent, state(), data()) -> fsm_result().
%% Main loop:
handle_event(info, Tx = #entry{}, State, D) ->
handle_tlog_entry(State, Tx, D);
handle_event(info, Ack = #imported{}, State, D) ->
handle_importer_ack(State, Ack, D);
%% Events specific to `disconnected' state:
handle_event(enter, OldState, ?disconnected, D) ->
handle_state_trans(OldState, ?disconnected, D),
initiate_reconnect(D);
handle_event(state_timeout, ?reconnect, ?disconnected, D) ->
handle_reconnect(D);
%% Events specific to `bootstrap' state:
handle_event(enter, OldState, ?bootstrap, D) ->
handle_state_trans(OldState, ?bootstrap, D),
initiate_bootstrap(D);
handle_event(info, #bootstrap_complete{checkpoint = Checkpoint}, ?bootstrap, D) ->
handle_bootstrap_complete(Checkpoint, D);
%% Events specific to `local_replay' state:
handle_event(enter, OldState, ?local_replay, D) ->
handle_state_trans(OldState, ?local_replay, D),
initiate_local_replay(D);
%% Events specific to `normal' state:
handle_event(enter, OldState, ?normal, D) ->
handle_state_trans(OldState, ?normal, D),
enter_normal(D);
%% Common events:
handle_event(enter, OldState, State, Data) ->
handle_state_trans(OldState, State, Data);
handle_event(info, {'EXIT', Agent, Reason}, State, D = #d{agent = Agent}) ->
handle_agent_down(State, Reason, D);
handle_event(EventType, Event, State, Data) ->
handle_unknown(EventType, Event, State, Data).
code_change(_OldVsn, State, Data, _Extra) ->
{ok, State, Data}.
terminate(_Reason, _State, Data) ->
close_replayq(Data),
?tp(stopping_rlog_shard, #{shard => Data#d.shard, reason => _Reason}),
ok.
format_status(Status) ->
maps:map(fun(data, Data) ->
format_data(Data);
(messages, Msgs) ->
lists:sublist(Msgs, 10);
(_Key, Value) ->
Value
end,
Status).
%%================================================================================
%% Internal exports
%%================================================================================
%% This function is called by the remote core node.
-spec push_tlog_entry(mria_rlog:transport(), mria_rlog:shard(), mria_lib:subscriber(), mria_rlog:entry()) -> ok.
push_tlog_entry(distr, _Shard, {_Node, Pid}, TLOGEntry) ->
Note : here Pid is remote
ok;
push_tlog_entry(gen_rpc, Shard, {Node, Pid}, TLOGEntry) ->
gen_rpc:ordered_cast({Node, Shard}, ?MODULE, do_push_tlog_entry, [Pid, TLOGEntry]),
ok.
%%================================================================================
Internal functions
%%================================================================================
@private Consume transactions from the core node
-spec handle_tlog_entry(state(), mria_rlog:entry(), data()) -> fsm_result().
handle_tlog_entry(St, #entry{sender = Agent, seqno = SeqNo, tx = {_Tid, _Transaction} = Tx},
D0 = #d{ agent = Agent
, next_batch_seqno = SeqNo
, importer_ref = ImporterRef
}) ->
?tp(rlog_replica_store_trans,
#{ agent => Agent
, seqno => SeqNo
, transaction => _Transaction
, tid => _Tid
}),
D1 = buffer_tlog_ops(Tx, D0),
D = D1#d{next_batch_seqno = SeqNo + 1},
ImportInProgress = is_reference(ImporterRef),
case {St, ImportInProgress} of
{?bootstrap, _} ->
ImportInProgress = false, % assert
{keep_state, D};
{_, false} ->
Restart replay loop after idle :
async_replay(St, D);
{_, true} ->
{keep_state, D}
end;
handle_tlog_entry(_State, #entry{sender = Agent, seqno = SeqNo},
#d{agent = Agent, next_batch_seqno = MySeqNo})
when SeqNo > MySeqNo ->
Gap in the TLOG . Consuming it now will cause inconsistency , so we must restart .
%% TODO: sometimes it should be possible to restart gracefully to
%% salvage the bootstrapped data.
?tp(error, gap_in_the_tlog, #{ expected_seqno => MySeqNo
, got_seqno => SeqNo
, agent => Agent
}),
error({gap_in_the_tlog, SeqNo, MySeqNo});
handle_tlog_entry(State, #entry{sender = Agent, seqno = SeqNo},
#d{ next_batch_seqno = ExpectedSeqno
, agent = ExpectedAgent
}) ->
?tp(debug, rlog_replica_unexpected_trans,
#{ state => State
, from => Agent
, from_expected => ExpectedAgent
, seqno => SeqNo
, seqno_expected => ExpectedSeqno
}),
keep_state_and_data.
-spec initiate_bootstrap(data()) -> fsm_result().
initiate_bootstrap(D) ->
#d{ shard = Shard
, remote_core_node = Remote
, parent_sup = ParentSup
} = D,
_Pid = mria_replicant_shard_sup:start_bootstrap_client(ParentSup, Shard, Remote, self()),
ReplayqMemOnly = application:get_env(mria, rlog_replayq_mem_only, true),
ReplayqBaseDir = application:get_env(mria, rlog_replayq_dir, "/tmp/rlog"),
ReplayqExtraOpts = application:get_env(mria, rlog_replayq_options, #{}),
Q = replayq:open(ReplayqExtraOpts
#{ mem_only => ReplayqMemOnly
, sizer => fun(_) -> 1 end
, dir => filename:join(ReplayqBaseDir, atom_to_list(Shard))
}),
{keep_state, D#d{replayq = Q}}.
-spec initiate_local_replay(data()) -> fsm_result().
initiate_local_replay(D) ->
async_replay(?local_replay, D).
-spec handle_bootstrap_complete(mria_rlog_server:checkpoint(), data()) -> fsm_result().
handle_bootstrap_complete(Checkpoint, D) ->
?tp(notice, "Bootstrap of the shard is complete",
#{ checkpoint => Checkpoint
, shard => D#d.shard
}),
{next_state, ?local_replay, D#d{ checkpoint = Checkpoint
}}.
-spec handle_agent_down(state(), term(), data()) -> fsm_result().
handle_agent_down(State, Reason, D) ->
?tp(notice, "Remote RLOG agent died",
#{ reason => Reason
, repl_state => State
}),
case State of
?normal ->
{next_state, ?disconnected, D#d{agent = undefined}};
_ ->
%% TODO: Sometimes it should be possible to handle it more gracefully
{stop, {shutdown, agent_died}}
end.
-spec async_replay(state(), data()) -> fsm_result().
async_replay(?bootstrap, Data) ->
%% Should not happen! During bootstrap we must not replay anything.
?unexpected_event_tp(#{ event => async_replay
, state => ?bootstrap
, data => format_data(Data)
}),
error(internal_bootstrap_error);
async_replay(State, D0) ->
D1 = ensure_importer_worker(D0),
#d{ replayq = Q0
, importer_worker = Importer
, importer_ref = false
, shard = Shard
} = D1,
{Q, AckRef, Items} = replayq:pop(Q0, #{count_limit => mria_config:replay_batch_size()}),
ok = replayq:ack(Q, AckRef),
ImportType = case mria_config:dirty_shard(Shard) orelse State =/= ?normal of
true -> dirty;
false -> transaction
end,
%% The reply will arrive asynchronously:
Alias = mria_replica_importer_worker:import_batch(ImportType, Importer, Items),
D = D0#d{replayq = Q, importer_ref = Alias, importer_worker = Importer},
{keep_state, D}.
-spec handle_importer_ack(state(), #imported{}, data()) -> fsm_result().
handle_importer_ack( State
, #imported{ref = Ref}
, D0 = #d{importer_ref = Ref, replayq = Q, shard = Shard}
) when State =:= ?normal;
State =:= ?local_replay ->
mria_status:notify_replicant_replayq_len(Shard, replayq:count(Q)),
D = D0#d{importer_ref = false},
case replayq:is_empty(Q) of
true ->
%% TODO: use a more reliable way to enter normal state
{next_state, ?normal, D};
false ->
async_replay(State, D)
end;
handle_importer_ack(State, Ack, Data) ->
%% Should not happen!
?unexpected_event_tp(#{ event => Ack
, state => State
, data => format_data(Data)
}),
error(internal_bootstrap_error).
-spec initiate_reconnect(data()) -> fsm_result().
initiate_reconnect(D0 = #d{shard = Shard, parent_sup = SupPid, importer_ref = Ref}) ->
mria_status:notify_shard_down(Shard),
mria_replicant_shard_sup:stop_importer_worker(SupPid),
flush_importer_acks(Ref),
D1 = close_replayq(D0),
D = D1#d{ agent = undefined
, remote_core_node = undefined
, next_batch_seqno = 0
, importer_worker = undefined
, importer_ref = false
},
{keep_state, D, [{state_timeout, 0, ?reconnect}]}.
@private Try connecting to a core node
-spec handle_reconnect(data()) -> fsm_result().
handle_reconnect(D0 = #d{shard = Shard, checkpoint = Checkpoint, parent_sup = ParentSup}) ->
?tp(debug, rlog_replica_reconnect,
#{ node => node()
, shard => Shard
}),
case try_connect(Shard, Checkpoint) of
{ok, _BootstrapNeeded = true, Node, ConnPid, _TableSpecs, SeqNo} ->
D = D0#d{ shard = Shard
, parent_sup = ParentSup
, agent = ConnPid
, remote_core_node = Node
, next_batch_seqno = SeqNo
},
%% Disable local reads before starting bootstrap:
{next_state, ?bootstrap, D};
{ok, _BootstrapNeeded = false, Node, ConnPid, _TableSpecs, SeqNo} ->
D = D0#d{ shard = Shard
, parent_sup = ParentSup
, agent = ConnPid
, remote_core_node = Node
, checkpoint = Checkpoint
, next_batch_seqno = SeqNo
},
{next_state, ?normal, D};
{error, Err} ->
?tp(debug, "Replicant couldn't connect to the upstream node",
#{ reason => Err
}),
ReconnectTimeout = application:get_env(mria, rlog_replica_reconnect_interval, 5000),
{keep_state_and_data, [{state_timeout, ReconnectTimeout, ?reconnect}]}
end.
-spec try_connect(mria_rlog:shard(), mria_rlog_server:checkpoint()) ->
{ ok
, boolean()
, node()
, pid()
, [mria_schema:entry()]
, integer()
}
| {error, term()}.
try_connect(Shard, Checkpoint) ->
try_connect(mria_lib:shuffle(mria_rlog:core_nodes()), Shard, Checkpoint).
-spec try_connect([node()], mria_rlog:shard(), mria_rlog_server:checkpoint()) ->
{ ok
, boolean()
, node()
, pid()
, [mria_schema:entry()]
, integer()
}
| {error, term()}.
try_connect([], _, _) ->
{error, no_core_available};
try_connect([Node|Rest], Shard, Checkpoint) ->
?tp(info, "Trying to connect to the core node",
#{ node => Node
}),
case mria_rlog:subscribe(Shard, Node, self(), Checkpoint) of
{ok, NeedBootstrap, Agent, TableSpecs, SeqNo} ->
?tp(notice, "Connected to the core node",
#{ shard => Shard
, node => Node
, seqno => SeqNo
}),
link(Agent),
{ok, NeedBootstrap, Node, Agent, TableSpecs, SeqNo};
Err ->
?tp(info, "Failed to connect to the core node",
#{ node => Node
, reason => Err
}),
try_connect(Rest, Shard, Checkpoint)
end.
-spec buffer_tlog_ops(mria_rlog:tx(), data()) -> data().
buffer_tlog_ops(Transaction, D = #d{replayq = Q0, shard = Shard}) ->
Q = replayq:append(Q0, [Transaction]),
mria_status:notify_replicant_replayq_len(Shard, replayq:count(Q)),
D#d{replayq = Q}.
-spec enter_normal(data()) -> fsm_result().
enter_normal(D = #d{shard = Shard, agent = Agent}) ->
%% Now we can enable local reads:
set_where_to_read(Shard, node()),
mria_status:notify_shard_up(Shard, Agent),
?tp(notice, "Shard fully up",
#{ node => node()
, shard => D#d.shard
}),
keep_state_and_data.
-spec handle_unknown(term(), term(), state(), data()) -> fsm_result().
handle_unknown(EventType, Event, State, Data) ->
?unexpected_event_tp(#{ event_type => EventType
, event => Event
, state => State
, data => format_data(Data)
}),
keep_state_and_data.
handle_state_trans(OldState, State, #d{shard = Shard}) ->
?tp(info, state_change,
#{ from => OldState
, to => State
, shard => Shard
}),
mria_status:notify_replicant_state(Shard, State),
keep_state_and_data.
-spec do_push_tlog_entry(pid(), mria_rlog:entry()) -> ok.
do_push_tlog_entry(Pid, TLOGEntry) ->
?tp(receive_tlog_entry,
#{ entry => TLOGEntry
}),
Pid ! TLOGEntry,
ok.
-spec format_data(#d{}) -> map().
format_data(D) ->
FieldNames = record_info(fields, d),
[_|Fields] = tuple_to_list(D),
maps:from_list([{Field, Val} || {Field, Val} <- lists:zip(FieldNames, Fields),
Field =/= replayq]).
-spec set_where_to_read(mria_rlog:shard(), node()) -> ok.
set_where_to_read(Shard, Node) ->
[mria_mnesia:set_where_to_read(Node, Tab) || Tab <- mria_schema:tables_of_shard(Shard)],
ok.
close_replayq(D = #d{replayq = RQ}) ->
case RQ of
undefined ->
D;
_ ->
replayq:close(RQ),
D#d{replayq = undefined}
end.
ensure_importer_worker(D = #d{importer_worker = Pid}) when is_pid(Pid) ->
D;
ensure_importer_worker(D = #d{shard = Shard, parent_sup = Parent, next_batch_seqno = SeqNo}) ->
Pid = mria_replicant_shard_sup:start_importer_worker(Parent, Shard, SeqNo),
D#d{importer_worker = Pid}.
flush_importer_acks(Ref) ->
receive
#imported{ref = Ref} -> ok
after 0 -> ok
end.
| null | https://raw.githubusercontent.com/emqx/mria/fcec1873582c3a66c960da6c7f6d48ac73c64c13/src/mria_rlog_replica.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
@doc This module implements a gen_statem which collects rlogs from
a remote core node.
API:
gen_statem callbacks:
Internal exports:
================================================================================
Type declarations
================================================================================
Timeouts:
================================================================================
API funcions
================================================================================
================================================================================
gen_statem callbacks
================================================================================
better code reuse and makes it harder to accidentally forget to
group event handlers logically.
Main loop:
Events specific to `disconnected' state:
Events specific to `bootstrap' state:
Events specific to `local_replay' state:
Events specific to `normal' state:
Common events:
================================================================================
Internal exports
================================================================================
This function is called by the remote core node.
================================================================================
================================================================================
assert
TODO: sometimes it should be possible to restart gracefully to
salvage the bootstrapped data.
TODO: Sometimes it should be possible to handle it more gracefully
Should not happen! During bootstrap we must not replay anything.
The reply will arrive asynchronously:
TODO: use a more reliable way to enter normal state
Should not happen!
Disable local reads before starting bootstrap:
Now we can enable local reads: | Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(mria_rlog_replica).
-behaviour(gen_statem).
-export([start_link/2]).
-export([init/1, terminate/3, code_change/4, callback_mode/0, handle_event/4, format_status/1]).
-export([do_push_tlog_entry/2, push_tlog_entry/4]).
-include("mria_rlog.hrl").
-include_lib("snabbkaffe/include/trace.hrl").
States :
-define(disconnected, disconnected).
-define(bootstrap, bootstrap).
-define(local_replay, local_replay).
-define(normal, normal).
-type state() :: ?disconnected
| ?bootstrap
| ?local_replay
| ?normal.
-define(reconnect, reconnect).
-record(d,
{ shard :: mria_rlog:shard()
, parent_sup :: pid()
, remote_core_node = undefined :: node() | undefined
, agent :: pid() | undefined
, checkpoint = undefined :: mria_rlog_server:checkpoint() | undefined
, next_batch_seqno = 0 :: non_neg_integer()
, replayq :: replayq:q() | undefined
, importer_worker :: pid() | undefined
, importer_ref = false :: false | reference()
}).
-type data() :: #d{}.
-type fsm_result() :: gen_statem:event_handler_result(state()).
start_link(ParentSup, Shard) ->
gen_statem:start_link({local, Shard}, ?MODULE, {ParentSup, Shard}, []).
@private We use handle_event_function style , because it leads to
handle some type of event in one of the states . Also it allows to
callback_mode() -> [handle_event_function, state_enter].
-spec init({pid(), mria_rlog:shard()}) -> {ok, state(), data()}.
init({ParentSup, Shard}) ->
process_flag(trap_exit, true),
process_flag(message_queue_data, off_heap),
logger:update_process_metadata(#{ domain => [mria, rlog, replica]
, shard => Shard
}),
?tp(info, "starting_rlog_shard", #{shard => Shard}),
?tp(rlog_replica_start,
#{ node => node()
, shard => Shard
}),
D = #d{ shard = Shard
, parent_sup = ParentSup
},
{ok, ?disconnected, D}.
-spec handle_event(gen_statem:event_type(), _EventContent, state(), data()) -> fsm_result().
handle_event(info, Tx = #entry{}, State, D) ->
handle_tlog_entry(State, Tx, D);
handle_event(info, Ack = #imported{}, State, D) ->
handle_importer_ack(State, Ack, D);
handle_event(enter, OldState, ?disconnected, D) ->
handle_state_trans(OldState, ?disconnected, D),
initiate_reconnect(D);
handle_event(state_timeout, ?reconnect, ?disconnected, D) ->
handle_reconnect(D);
handle_event(enter, OldState, ?bootstrap, D) ->
handle_state_trans(OldState, ?bootstrap, D),
initiate_bootstrap(D);
handle_event(info, #bootstrap_complete{checkpoint = Checkpoint}, ?bootstrap, D) ->
handle_bootstrap_complete(Checkpoint, D);
handle_event(enter, OldState, ?local_replay, D) ->
handle_state_trans(OldState, ?local_replay, D),
initiate_local_replay(D);
handle_event(enter, OldState, ?normal, D) ->
handle_state_trans(OldState, ?normal, D),
enter_normal(D);
handle_event(enter, OldState, State, Data) ->
handle_state_trans(OldState, State, Data);
handle_event(info, {'EXIT', Agent, Reason}, State, D = #d{agent = Agent}) ->
handle_agent_down(State, Reason, D);
handle_event(EventType, Event, State, Data) ->
handle_unknown(EventType, Event, State, Data).
code_change(_OldVsn, State, Data, _Extra) ->
{ok, State, Data}.
terminate(_Reason, _State, Data) ->
close_replayq(Data),
?tp(stopping_rlog_shard, #{shard => Data#d.shard, reason => _Reason}),
ok.
format_status(Status) ->
maps:map(fun(data, Data) ->
format_data(Data);
(messages, Msgs) ->
lists:sublist(Msgs, 10);
(_Key, Value) ->
Value
end,
Status).
-spec push_tlog_entry(mria_rlog:transport(), mria_rlog:shard(), mria_lib:subscriber(), mria_rlog:entry()) -> ok.
push_tlog_entry(distr, _Shard, {_Node, Pid}, TLOGEntry) ->
Note : here Pid is remote
ok;
push_tlog_entry(gen_rpc, Shard, {Node, Pid}, TLOGEntry) ->
gen_rpc:ordered_cast({Node, Shard}, ?MODULE, do_push_tlog_entry, [Pid, TLOGEntry]),
ok.
Internal functions
@private Consume transactions from the core node
-spec handle_tlog_entry(state(), mria_rlog:entry(), data()) -> fsm_result().
handle_tlog_entry(St, #entry{sender = Agent, seqno = SeqNo, tx = {_Tid, _Transaction} = Tx},
D0 = #d{ agent = Agent
, next_batch_seqno = SeqNo
, importer_ref = ImporterRef
}) ->
?tp(rlog_replica_store_trans,
#{ agent => Agent
, seqno => SeqNo
, transaction => _Transaction
, tid => _Tid
}),
D1 = buffer_tlog_ops(Tx, D0),
D = D1#d{next_batch_seqno = SeqNo + 1},
ImportInProgress = is_reference(ImporterRef),
case {St, ImportInProgress} of
{?bootstrap, _} ->
{keep_state, D};
{_, false} ->
Restart replay loop after idle :
async_replay(St, D);
{_, true} ->
{keep_state, D}
end;
handle_tlog_entry(_State, #entry{sender = Agent, seqno = SeqNo},
#d{agent = Agent, next_batch_seqno = MySeqNo})
when SeqNo > MySeqNo ->
Gap in the TLOG . Consuming it now will cause inconsistency , so we must restart .
?tp(error, gap_in_the_tlog, #{ expected_seqno => MySeqNo
, got_seqno => SeqNo
, agent => Agent
}),
error({gap_in_the_tlog, SeqNo, MySeqNo});
handle_tlog_entry(State, #entry{sender = Agent, seqno = SeqNo},
#d{ next_batch_seqno = ExpectedSeqno
, agent = ExpectedAgent
}) ->
?tp(debug, rlog_replica_unexpected_trans,
#{ state => State
, from => Agent
, from_expected => ExpectedAgent
, seqno => SeqNo
, seqno_expected => ExpectedSeqno
}),
keep_state_and_data.
-spec initiate_bootstrap(data()) -> fsm_result().
initiate_bootstrap(D) ->
#d{ shard = Shard
, remote_core_node = Remote
, parent_sup = ParentSup
} = D,
_Pid = mria_replicant_shard_sup:start_bootstrap_client(ParentSup, Shard, Remote, self()),
ReplayqMemOnly = application:get_env(mria, rlog_replayq_mem_only, true),
ReplayqBaseDir = application:get_env(mria, rlog_replayq_dir, "/tmp/rlog"),
ReplayqExtraOpts = application:get_env(mria, rlog_replayq_options, #{}),
Q = replayq:open(ReplayqExtraOpts
#{ mem_only => ReplayqMemOnly
, sizer => fun(_) -> 1 end
, dir => filename:join(ReplayqBaseDir, atom_to_list(Shard))
}),
{keep_state, D#d{replayq = Q}}.
-spec initiate_local_replay(data()) -> fsm_result().
initiate_local_replay(D) ->
async_replay(?local_replay, D).
-spec handle_bootstrap_complete(mria_rlog_server:checkpoint(), data()) -> fsm_result().
handle_bootstrap_complete(Checkpoint, D) ->
?tp(notice, "Bootstrap of the shard is complete",
#{ checkpoint => Checkpoint
, shard => D#d.shard
}),
{next_state, ?local_replay, D#d{ checkpoint = Checkpoint
}}.
-spec handle_agent_down(state(), term(), data()) -> fsm_result().
handle_agent_down(State, Reason, D) ->
?tp(notice, "Remote RLOG agent died",
#{ reason => Reason
, repl_state => State
}),
case State of
?normal ->
{next_state, ?disconnected, D#d{agent = undefined}};
_ ->
{stop, {shutdown, agent_died}}
end.
-spec async_replay(state(), data()) -> fsm_result().
async_replay(?bootstrap, Data) ->
?unexpected_event_tp(#{ event => async_replay
, state => ?bootstrap
, data => format_data(Data)
}),
error(internal_bootstrap_error);
async_replay(State, D0) ->
D1 = ensure_importer_worker(D0),
#d{ replayq = Q0
, importer_worker = Importer
, importer_ref = false
, shard = Shard
} = D1,
{Q, AckRef, Items} = replayq:pop(Q0, #{count_limit => mria_config:replay_batch_size()}),
ok = replayq:ack(Q, AckRef),
ImportType = case mria_config:dirty_shard(Shard) orelse State =/= ?normal of
true -> dirty;
false -> transaction
end,
Alias = mria_replica_importer_worker:import_batch(ImportType, Importer, Items),
D = D0#d{replayq = Q, importer_ref = Alias, importer_worker = Importer},
{keep_state, D}.
-spec handle_importer_ack(state(), #imported{}, data()) -> fsm_result().
handle_importer_ack( State
, #imported{ref = Ref}
, D0 = #d{importer_ref = Ref, replayq = Q, shard = Shard}
) when State =:= ?normal;
State =:= ?local_replay ->
mria_status:notify_replicant_replayq_len(Shard, replayq:count(Q)),
D = D0#d{importer_ref = false},
case replayq:is_empty(Q) of
true ->
{next_state, ?normal, D};
false ->
async_replay(State, D)
end;
handle_importer_ack(State, Ack, Data) ->
?unexpected_event_tp(#{ event => Ack
, state => State
, data => format_data(Data)
}),
error(internal_bootstrap_error).
-spec initiate_reconnect(data()) -> fsm_result().
initiate_reconnect(D0 = #d{shard = Shard, parent_sup = SupPid, importer_ref = Ref}) ->
mria_status:notify_shard_down(Shard),
mria_replicant_shard_sup:stop_importer_worker(SupPid),
flush_importer_acks(Ref),
D1 = close_replayq(D0),
D = D1#d{ agent = undefined
, remote_core_node = undefined
, next_batch_seqno = 0
, importer_worker = undefined
, importer_ref = false
},
{keep_state, D, [{state_timeout, 0, ?reconnect}]}.
@private Try connecting to a core node
-spec handle_reconnect(data()) -> fsm_result().
handle_reconnect(D0 = #d{shard = Shard, checkpoint = Checkpoint, parent_sup = ParentSup}) ->
?tp(debug, rlog_replica_reconnect,
#{ node => node()
, shard => Shard
}),
case try_connect(Shard, Checkpoint) of
{ok, _BootstrapNeeded = true, Node, ConnPid, _TableSpecs, SeqNo} ->
D = D0#d{ shard = Shard
, parent_sup = ParentSup
, agent = ConnPid
, remote_core_node = Node
, next_batch_seqno = SeqNo
},
{next_state, ?bootstrap, D};
{ok, _BootstrapNeeded = false, Node, ConnPid, _TableSpecs, SeqNo} ->
D = D0#d{ shard = Shard
, parent_sup = ParentSup
, agent = ConnPid
, remote_core_node = Node
, checkpoint = Checkpoint
, next_batch_seqno = SeqNo
},
{next_state, ?normal, D};
{error, Err} ->
?tp(debug, "Replicant couldn't connect to the upstream node",
#{ reason => Err
}),
ReconnectTimeout = application:get_env(mria, rlog_replica_reconnect_interval, 5000),
{keep_state_and_data, [{state_timeout, ReconnectTimeout, ?reconnect}]}
end.
-spec try_connect(mria_rlog:shard(), mria_rlog_server:checkpoint()) ->
{ ok
, boolean()
, node()
, pid()
, [mria_schema:entry()]
, integer()
}
| {error, term()}.
try_connect(Shard, Checkpoint) ->
try_connect(mria_lib:shuffle(mria_rlog:core_nodes()), Shard, Checkpoint).
-spec try_connect([node()], mria_rlog:shard(), mria_rlog_server:checkpoint()) ->
{ ok
, boolean()
, node()
, pid()
, [mria_schema:entry()]
, integer()
}
| {error, term()}.
try_connect([], _, _) ->
{error, no_core_available};
try_connect([Node|Rest], Shard, Checkpoint) ->
?tp(info, "Trying to connect to the core node",
#{ node => Node
}),
case mria_rlog:subscribe(Shard, Node, self(), Checkpoint) of
{ok, NeedBootstrap, Agent, TableSpecs, SeqNo} ->
?tp(notice, "Connected to the core node",
#{ shard => Shard
, node => Node
, seqno => SeqNo
}),
link(Agent),
{ok, NeedBootstrap, Node, Agent, TableSpecs, SeqNo};
Err ->
?tp(info, "Failed to connect to the core node",
#{ node => Node
, reason => Err
}),
try_connect(Rest, Shard, Checkpoint)
end.
-spec buffer_tlog_ops(mria_rlog:tx(), data()) -> data().
buffer_tlog_ops(Transaction, D = #d{replayq = Q0, shard = Shard}) ->
Q = replayq:append(Q0, [Transaction]),
mria_status:notify_replicant_replayq_len(Shard, replayq:count(Q)),
D#d{replayq = Q}.
-spec enter_normal(data()) -> fsm_result().
enter_normal(D = #d{shard = Shard, agent = Agent}) ->
set_where_to_read(Shard, node()),
mria_status:notify_shard_up(Shard, Agent),
?tp(notice, "Shard fully up",
#{ node => node()
, shard => D#d.shard
}),
keep_state_and_data.
-spec handle_unknown(term(), term(), state(), data()) -> fsm_result().
handle_unknown(EventType, Event, State, Data) ->
?unexpected_event_tp(#{ event_type => EventType
, event => Event
, state => State
, data => format_data(Data)
}),
keep_state_and_data.
handle_state_trans(OldState, State, #d{shard = Shard}) ->
?tp(info, state_change,
#{ from => OldState
, to => State
, shard => Shard
}),
mria_status:notify_replicant_state(Shard, State),
keep_state_and_data.
-spec do_push_tlog_entry(pid(), mria_rlog:entry()) -> ok.
do_push_tlog_entry(Pid, TLOGEntry) ->
?tp(receive_tlog_entry,
#{ entry => TLOGEntry
}),
Pid ! TLOGEntry,
ok.
-spec format_data(#d{}) -> map().
format_data(D) ->
FieldNames = record_info(fields, d),
[_|Fields] = tuple_to_list(D),
maps:from_list([{Field, Val} || {Field, Val} <- lists:zip(FieldNames, Fields),
Field =/= replayq]).
-spec set_where_to_read(mria_rlog:shard(), node()) -> ok.
set_where_to_read(Shard, Node) ->
[mria_mnesia:set_where_to_read(Node, Tab) || Tab <- mria_schema:tables_of_shard(Shard)],
ok.
close_replayq(D = #d{replayq = RQ}) ->
case RQ of
undefined ->
D;
_ ->
replayq:close(RQ),
D#d{replayq = undefined}
end.
ensure_importer_worker(D = #d{importer_worker = Pid}) when is_pid(Pid) ->
D;
ensure_importer_worker(D = #d{shard = Shard, parent_sup = Parent, next_batch_seqno = SeqNo}) ->
Pid = mria_replicant_shard_sup:start_importer_worker(Parent, Shard, SeqNo),
D#d{importer_worker = Pid}.
flush_importer_acks(Ref) ->
receive
#imported{ref = Ref} -> ok
after 0 -> ok
end.
|
a49a4f81bea6e845b8d2a7f1f33be4344ef08b4a9f48dae5ee088b3e96de96f6 | karen/haskell-book | Semigroup.hs | import Data.Semigroup
import Test.QuickCheck(quickCheck,
Arbitrary,
arbitrary,
frequency,
elements)
data Trivial = Trivial deriving (Eq, Show)
instance Semigroup Trivial where
_ <> _ = Trivial
instance Arbitrary Trivial where
arbitrary = return Trivial
semigroupAssoc :: (Eq m, Semigroup m) => m -> m -> m -> Bool
semigroupAssoc a b c = (a <> (b <> c)) == ((a <> b) <> c)
type TrivialAssoc = Trivial -> Trivial -> Trivial -> Bool
--------------------------------------------------------------------------------
newtype Identity a = Identity a deriving (Eq, Show)
instance Semigroup (Identity a) where
x <> _ = x
instance Arbitrary a => Arbitrary (Identity a) where
arbitrary = do
x <- arbitrary
return (Identity x)
type IdentityAssoc a = Identity a -> Identity a -> Identity a -> Bool
--------------------------------------------------------------------------------
data Two a b = Two a b deriving (Eq, Show)
instance Semigroup (Two a b) where
(Two x y) <> (Two x' y') = (Two x y')
instance (Arbitrary a, Arbitrary b) => Arbitrary (Two a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
return $ Two x y
type TwoAssoc a b = Two a b -> Two a b -> Two a b -> Bool
data Three a b c = Three a b c deriving (Eq, Show)
instance Semigroup (Three a b c) where
(Three x y z) <> (Three x' y' z') = (Three x y' z)
instance (Arbitrary a, Arbitrary b, Arbitrary c) => Arbitrary (Three a b c) where
arbitrary = do
x <- arbitrary
y <- arbitrary
z <- arbitrary
return $ Three x y z
type ThreeAssoc a b c = Three a b c -> Three a b c -> Three a b c -> Bool
newtype BoolConj = BoolConj Bool deriving (Eq, Show)
instance Semigroup BoolConj where
(BoolConj x) <> (BoolConj y) = BoolConj (x && y)
instance Arbitrary BoolConj where
arbitrary = elements [BoolConj False, BoolConj True]
type BoolConjAssoc = BoolConj -> BoolConj -> BoolConj -> Bool
newtype BoolDisj = BoolDisj Bool deriving (Eq, Show)
instance Semigroup BoolDisj where
(BoolDisj x) <> (BoolDisj y) = BoolDisj (x || y)
instance Arbitrary BoolDisj where
arbitrary = elements [BoolDisj False, BoolDisj True]
type BoolDisjAssoc = BoolDisj -> BoolDisj -> BoolDisj -> Bool
data Or a b = Fst a | Snd b deriving (Eq, Show)
instance Semigroup (Or a b) where
(Snd x) <> _ = Snd x
_ <> (Snd x) = Snd x
_ <> x = x
instance (Arbitrary a, Arbitrary b) => Arbitrary (Or a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
frequency [(1, return $ Fst x),
(1, return $ Snd y)]
type OrAssoc a b = Or a b -> Or a b -> Or a b -> Bool
newtype Combine a b = Combine { unCombine :: (a -> b) }
instance (Semigroup b) => Semigroup (Combine a b) where
Combine f <> Combine g = Combine (f <> g)
newtype Comp a = Comp { unComp :: (a -> a) }
instance Semigroup a => Semigroup (Comp a) where
Comp f <> Comp g = Comp $ f <> g
data Validation a b =
Failure a | Success b
deriving (Eq, Show)
instance Semigroup a => Semigroup (Validation a b) where
Success x <> _ = Success x
_ <> Success x = Success x
Failure y <> Failure y' = Failure $ y <> y'
instance (Arbitrary a, Arbitrary b) => Arbitrary (Validation a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
frequency [(1, return $ Success x),
(1, return $ Failure y)]
type ValidationAssoc a b = Validation a b -> Validation a b -> Validation a b -> Bool
newtype AccumulateRight a b =
AccumulateRight (Validation a b)
deriving (Eq, Show)
instance Semigroup b => Semigroup (AccumulateRight a b) where
AccumulateRight (Success x) <> AccumulateRight (Success x') = AccumulateRight $ Success $ x <> x'
_ <> AccumulateRight (Success x) = AccumulateRight $ Success x
AccumulateRight (Success x) <> _ = AccumulateRight $ Success x
AccumulateRight (Failure x) <> AccumulateRight (Failure x') = AccumulateRight $ Failure x'
instance (Arbitrary a, Arbitrary b) => Arbitrary (AccumulateRight a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
frequency [(1, return $ AccumulateRight $ Success x),
(1, return $ AccumulateRight $ Success y)]
type AccumulateRightAssoc a b = AccumulateRight a b -> AccumulateRight a b -> AccumulateRight a b -> Bool
newtype AccumulateBoth a b =
AccumulateBoth (Validation a b)
deriving (Eq, Show)
instance (Semigroup a, Semigroup b) => Semigroup (AccumulateBoth a b) where
AccumulateBoth (Success x) <> AccumulateBoth (Success x') = AccumulateBoth $ Success $ x <> x'
AccumulateBoth (Failure x) <> AccumulateBoth (Failure x') = AccumulateBoth $ Failure $ x <> x'
_ <> AccumulateBoth (Failure x') = AccumulateBoth $ Failure x'
AccumulateBoth (Failure x) <> _ = AccumulateBoth $ Failure x
instance (Arbitrary a, Arbitrary b) => Arbitrary (AccumulateBoth a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
frequency [(1, return $ AccumulateBoth $ Success x),
(1, return $ AccumulateBoth $ Success y)]
type AccumulateBothAssoc a b = AccumulateBoth a b -> AccumulateBoth a b -> AccumulateBoth a b -> Bool
main :: IO ()
main = quickCheck (semigroupAssoc :: AccumulateBothAssoc String String)
| null | https://raw.githubusercontent.com/karen/haskell-book/90bb80ec3203fde68fc7fda1662d9fc8b509d179/src/ch15/Semigroup.hs | haskell | ------------------------------------------------------------------------------
------------------------------------------------------------------------------ | import Data.Semigroup
import Test.QuickCheck(quickCheck,
Arbitrary,
arbitrary,
frequency,
elements)
data Trivial = Trivial deriving (Eq, Show)
instance Semigroup Trivial where
_ <> _ = Trivial
instance Arbitrary Trivial where
arbitrary = return Trivial
semigroupAssoc :: (Eq m, Semigroup m) => m -> m -> m -> Bool
semigroupAssoc a b c = (a <> (b <> c)) == ((a <> b) <> c)
type TrivialAssoc = Trivial -> Trivial -> Trivial -> Bool
newtype Identity a = Identity a deriving (Eq, Show)
instance Semigroup (Identity a) where
x <> _ = x
instance Arbitrary a => Arbitrary (Identity a) where
arbitrary = do
x <- arbitrary
return (Identity x)
type IdentityAssoc a = Identity a -> Identity a -> Identity a -> Bool
data Two a b = Two a b deriving (Eq, Show)
instance Semigroup (Two a b) where
(Two x y) <> (Two x' y') = (Two x y')
instance (Arbitrary a, Arbitrary b) => Arbitrary (Two a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
return $ Two x y
type TwoAssoc a b = Two a b -> Two a b -> Two a b -> Bool
data Three a b c = Three a b c deriving (Eq, Show)
instance Semigroup (Three a b c) where
(Three x y z) <> (Three x' y' z') = (Three x y' z)
instance (Arbitrary a, Arbitrary b, Arbitrary c) => Arbitrary (Three a b c) where
arbitrary = do
x <- arbitrary
y <- arbitrary
z <- arbitrary
return $ Three x y z
type ThreeAssoc a b c = Three a b c -> Three a b c -> Three a b c -> Bool
newtype BoolConj = BoolConj Bool deriving (Eq, Show)
instance Semigroup BoolConj where
(BoolConj x) <> (BoolConj y) = BoolConj (x && y)
instance Arbitrary BoolConj where
arbitrary = elements [BoolConj False, BoolConj True]
type BoolConjAssoc = BoolConj -> BoolConj -> BoolConj -> Bool
newtype BoolDisj = BoolDisj Bool deriving (Eq, Show)
instance Semigroup BoolDisj where
(BoolDisj x) <> (BoolDisj y) = BoolDisj (x || y)
instance Arbitrary BoolDisj where
arbitrary = elements [BoolDisj False, BoolDisj True]
type BoolDisjAssoc = BoolDisj -> BoolDisj -> BoolDisj -> Bool
data Or a b = Fst a | Snd b deriving (Eq, Show)
instance Semigroup (Or a b) where
(Snd x) <> _ = Snd x
_ <> (Snd x) = Snd x
_ <> x = x
instance (Arbitrary a, Arbitrary b) => Arbitrary (Or a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
frequency [(1, return $ Fst x),
(1, return $ Snd y)]
type OrAssoc a b = Or a b -> Or a b -> Or a b -> Bool
newtype Combine a b = Combine { unCombine :: (a -> b) }
instance (Semigroup b) => Semigroup (Combine a b) where
Combine f <> Combine g = Combine (f <> g)
newtype Comp a = Comp { unComp :: (a -> a) }
instance Semigroup a => Semigroup (Comp a) where
Comp f <> Comp g = Comp $ f <> g
data Validation a b =
Failure a | Success b
deriving (Eq, Show)
instance Semigroup a => Semigroup (Validation a b) where
Success x <> _ = Success x
_ <> Success x = Success x
Failure y <> Failure y' = Failure $ y <> y'
instance (Arbitrary a, Arbitrary b) => Arbitrary (Validation a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
frequency [(1, return $ Success x),
(1, return $ Failure y)]
type ValidationAssoc a b = Validation a b -> Validation a b -> Validation a b -> Bool
newtype AccumulateRight a b =
AccumulateRight (Validation a b)
deriving (Eq, Show)
instance Semigroup b => Semigroup (AccumulateRight a b) where
AccumulateRight (Success x) <> AccumulateRight (Success x') = AccumulateRight $ Success $ x <> x'
_ <> AccumulateRight (Success x) = AccumulateRight $ Success x
AccumulateRight (Success x) <> _ = AccumulateRight $ Success x
AccumulateRight (Failure x) <> AccumulateRight (Failure x') = AccumulateRight $ Failure x'
instance (Arbitrary a, Arbitrary b) => Arbitrary (AccumulateRight a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
frequency [(1, return $ AccumulateRight $ Success x),
(1, return $ AccumulateRight $ Success y)]
type AccumulateRightAssoc a b = AccumulateRight a b -> AccumulateRight a b -> AccumulateRight a b -> Bool
newtype AccumulateBoth a b =
AccumulateBoth (Validation a b)
deriving (Eq, Show)
instance (Semigroup a, Semigroup b) => Semigroup (AccumulateBoth a b) where
AccumulateBoth (Success x) <> AccumulateBoth (Success x') = AccumulateBoth $ Success $ x <> x'
AccumulateBoth (Failure x) <> AccumulateBoth (Failure x') = AccumulateBoth $ Failure $ x <> x'
_ <> AccumulateBoth (Failure x') = AccumulateBoth $ Failure x'
AccumulateBoth (Failure x) <> _ = AccumulateBoth $ Failure x
instance (Arbitrary a, Arbitrary b) => Arbitrary (AccumulateBoth a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
frequency [(1, return $ AccumulateBoth $ Success x),
(1, return $ AccumulateBoth $ Success y)]
type AccumulateBothAssoc a b = AccumulateBoth a b -> AccumulateBoth a b -> AccumulateBoth a b -> Bool
main :: IO ()
main = quickCheck (semigroupAssoc :: AccumulateBothAssoc String String)
|
fc3f6462612feaf9bcc34f3f02954a09688fbeba3387ad8c38705b43c25626ed | plandes/cisql | core.clj | (ns ^{:doc "Command line entry point."
:author "Paul Landes"}
zensols.cisql.core
(:require [clojure.string :as s])
(:require [zensols.actioncli.log4j2 :as lu]
[zensols.actioncli.parse :as parse]
[zensols.cisql.conf :as conf]
[zensols.cisql.version :as ver])
(:gen-class :main true))
(defn- print-help [summary]
(with-out-str
(println)
(println (conf/format-intro))
(println)
(print summary)
(flush)))
(defn- version-info []
(println (format "%s (%s)" ver/version ver/gitref)))
(defn- create-action-context []
(parse/single-action-context
'(zensols.cisql.interactive interactive-command)
:version-option (parse/version-option version-info)
:print-help-fn print-help))
(defn -main [& args]
(lu/configure "cisql-log4j2.xml")
(parse/set-program-name "cisql")
(-> (create-action-context)
(parse/process-arguments args)))
| null | https://raw.githubusercontent.com/plandes/cisql/8001864f1596d191386a3787b49e8a640705fee2/src/clojure/zensols/cisql/core.clj | clojure | (ns ^{:doc "Command line entry point."
:author "Paul Landes"}
zensols.cisql.core
(:require [clojure.string :as s])
(:require [zensols.actioncli.log4j2 :as lu]
[zensols.actioncli.parse :as parse]
[zensols.cisql.conf :as conf]
[zensols.cisql.version :as ver])
(:gen-class :main true))
(defn- print-help [summary]
(with-out-str
(println)
(println (conf/format-intro))
(println)
(print summary)
(flush)))
(defn- version-info []
(println (format "%s (%s)" ver/version ver/gitref)))
(defn- create-action-context []
(parse/single-action-context
'(zensols.cisql.interactive interactive-command)
:version-option (parse/version-option version-info)
:print-help-fn print-help))
(defn -main [& args]
(lu/configure "cisql-log4j2.xml")
(parse/set-program-name "cisql")
(-> (create-action-context)
(parse/process-arguments args)))
|
|
fc396334e75e8da88fe38674e815f427b36cc3d1dcac5908f67e37caa6c2d646 | oakes/play-cljc-examples | core.cljc | (ns super-koalio.core
(:require [super-koalio.utils :as utils]
[super-koalio.move :as move]
[clojure.edn :as edn]
[play-cljc.gl.core :as c]
[play-cljc.gl.entities-2d :as e]
[play-cljc.transforms :as t]
#?(:clj [play-cljc.macros-java :refer [gl math]]
:cljs [play-cljc.macros-js :refer-macros [gl math]])
#?(:clj [super-koalio.tiles :as tiles :refer [read-tiled-map]]
:cljs [super-koalio.tiles :as tiles :refer-macros [read-tiled-map]])))
(def koala-width 18)
(def koala-height 26)
(defonce *state (atom {:mouse-x 0
:mouse-y 0
:pressed-keys #{}
:x-velocity 0
:y-velocity 0
:player-x 20
:player-y 0
:player-width 1
:player-height (/ koala-height koala-width)
:can-jump? false
:direction :right
:player-images {}
:player-walk-keys [:walk1 :walk2 :walk3]
:player-image-key :jump
:tiled-map nil
:tiled-map-entity nil
:camera (e/->camera)}))
(def tiled-map (edn/read-string (read-tiled-map "level1.tmx")))
(defn init [game]
;; allow transparency in images
(gl game enable (gl game BLEND))
(gl game blendFunc (gl game SRC_ALPHA) (gl game ONE_MINUS_SRC_ALPHA))
load image
(utils/get-image "koalio.png"
(fn [{:keys [data width height]}]
(let [entity (c/compile game (e/->image-entity game data width height))
images (vec (for [i (range 5)]
(t/crop entity
(* i koala-width)
0
koala-width
koala-height)))
[stand jump walk1 walk2 walk3] images]
;; add it to the state
(swap! *state update :player-images assoc
:stand stand
:jump jump
:walk1 walk1
:walk2 walk2
:walk3 walk3))))
;; load the tiled map
(tiles/load-tiled-map game tiled-map
(fn [tiled-map entity]
(swap! *state assoc :tiled-map tiled-map :tiled-map-entity entity))))
(def screen-entity
{:viewport {:x 0 :y 0 :width 0 :height 0}
:clear {:color [(/ 173 255) (/ 216 255) (/ 230 255) 1] :depth 1}})
(defn tick [game]
(let [state (swap! *state
(fn [state]
(->> state
(move/move game)
(move/prevent-move)
(move/animate game))))
{:keys [pressed-keys
player-x
player-y
player-width
player-height
direction
player-images
player-image-key
tiled-map
tiled-map-entity
camera]} state
[game-width game-height] (utils/get-size game)]
(when (and (pos? game-width) (pos? game-height))
(let [{:keys [map-height]} tiled-map
scaled-tile-size (/ game-height map-height)
offset (/ game-width 2 scaled-tile-size)
camera (t/translate camera (- player-x offset) 0)]
;; render the blue background
(c/render game (update screen-entity :viewport
assoc :width game-width :height game-height))
;; render the tiled map
(when tiled-map-entity
(c/render game (-> tiled-map-entity
(t/project game-width game-height)
(t/scale scaled-tile-size scaled-tile-size)
(t/invert camera))))
;; get the current player image to display
(when-let [player (get player-images player-image-key)]
;; render the player
(c/render game
(-> player
(t/project game-width game-height)
(t/scale scaled-tile-size scaled-tile-size)
(t/invert camera)
(t/translate (cond-> player-x
(= direction :left)
(+ player-width))
player-y)
(t/scale (cond-> player-width
(= direction :left)
(* -1))
player-height)))))))
;; return the game map
game)
| null | https://raw.githubusercontent.com/oakes/play-cljc-examples/0063e0ee234f2a806d0de625b597fdd84f119aaf/super-koalio/src/super_koalio/core.cljc | clojure | allow transparency in images
add it to the state
load the tiled map
render the blue background
render the tiled map
get the current player image to display
render the player
return the game map | (ns super-koalio.core
(:require [super-koalio.utils :as utils]
[super-koalio.move :as move]
[clojure.edn :as edn]
[play-cljc.gl.core :as c]
[play-cljc.gl.entities-2d :as e]
[play-cljc.transforms :as t]
#?(:clj [play-cljc.macros-java :refer [gl math]]
:cljs [play-cljc.macros-js :refer-macros [gl math]])
#?(:clj [super-koalio.tiles :as tiles :refer [read-tiled-map]]
:cljs [super-koalio.tiles :as tiles :refer-macros [read-tiled-map]])))
(def koala-width 18)
(def koala-height 26)
(defonce *state (atom {:mouse-x 0
:mouse-y 0
:pressed-keys #{}
:x-velocity 0
:y-velocity 0
:player-x 20
:player-y 0
:player-width 1
:player-height (/ koala-height koala-width)
:can-jump? false
:direction :right
:player-images {}
:player-walk-keys [:walk1 :walk2 :walk3]
:player-image-key :jump
:tiled-map nil
:tiled-map-entity nil
:camera (e/->camera)}))
(def tiled-map (edn/read-string (read-tiled-map "level1.tmx")))
(defn init [game]
(gl game enable (gl game BLEND))
(gl game blendFunc (gl game SRC_ALPHA) (gl game ONE_MINUS_SRC_ALPHA))
load image
(utils/get-image "koalio.png"
(fn [{:keys [data width height]}]
(let [entity (c/compile game (e/->image-entity game data width height))
images (vec (for [i (range 5)]
(t/crop entity
(* i koala-width)
0
koala-width
koala-height)))
[stand jump walk1 walk2 walk3] images]
(swap! *state update :player-images assoc
:stand stand
:jump jump
:walk1 walk1
:walk2 walk2
:walk3 walk3))))
(tiles/load-tiled-map game tiled-map
(fn [tiled-map entity]
(swap! *state assoc :tiled-map tiled-map :tiled-map-entity entity))))
(def screen-entity
{:viewport {:x 0 :y 0 :width 0 :height 0}
:clear {:color [(/ 173 255) (/ 216 255) (/ 230 255) 1] :depth 1}})
(defn tick [game]
(let [state (swap! *state
(fn [state]
(->> state
(move/move game)
(move/prevent-move)
(move/animate game))))
{:keys [pressed-keys
player-x
player-y
player-width
player-height
direction
player-images
player-image-key
tiled-map
tiled-map-entity
camera]} state
[game-width game-height] (utils/get-size game)]
(when (and (pos? game-width) (pos? game-height))
(let [{:keys [map-height]} tiled-map
scaled-tile-size (/ game-height map-height)
offset (/ game-width 2 scaled-tile-size)
camera (t/translate camera (- player-x offset) 0)]
(c/render game (update screen-entity :viewport
assoc :width game-width :height game-height))
(when tiled-map-entity
(c/render game (-> tiled-map-entity
(t/project game-width game-height)
(t/scale scaled-tile-size scaled-tile-size)
(t/invert camera))))
(when-let [player (get player-images player-image-key)]
(c/render game
(-> player
(t/project game-width game-height)
(t/scale scaled-tile-size scaled-tile-size)
(t/invert camera)
(t/translate (cond-> player-x
(= direction :left)
(+ player-width))
player-y)
(t/scale (cond-> player-width
(= direction :left)
(* -1))
player-height)))))))
game)
|
60f43bc2611818294924de7945299bb8089c21c686b1c34e98d457a9b8f29138 | semilin/layoup | sturdE.lisp |
(MAKE-LAYOUT :NAME "sturdE" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX NIL
:KEYBOARD NIL) | null | https://raw.githubusercontent.com/semilin/layoup/27ec9ba9a9388cd944ac46206d10424e3ab45499/data/layouts/sturdE.lisp | lisp |
(MAKE-LAYOUT :NAME "sturdE" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX NIL
:KEYBOARD NIL) |
|
50703975b28b22e92606317e6d2dde00be893fd47f84eac963bcd8f531fc82e4 | TeMPOraL/alice | suites.lisp | (in-package #:alice-tests)
(defparameter *suites* '())
(defun run-all ()
(mapc 'fiveam:run! *suites*)
(format t "DONE!~%"))
(defun add-suite (suite)
(setf *suites* (remove-duplicates (append (list suite) *suites*))))
| null | https://raw.githubusercontent.com/TeMPOraL/alice/4621a53ccd459bebf0b34c531dab49f7b42f35c7/tests/suites.lisp | lisp | (in-package #:alice-tests)
(defparameter *suites* '())
(defun run-all ()
(mapc 'fiveam:run! *suites*)
(format t "DONE!~%"))
(defun add-suite (suite)
(setf *suites* (remove-duplicates (append (list suite) *suites*))))
|
|
82b7353abb7cf1b7fe12dadcaa1f912633a72087c4f832572a288f88b73a7f16 | repl-electric/.sonic-pi | init.sps | #key: init
#point_line:1
#point_index:0
# --
["instruments","shaderview","experiments", "log"].each{|f| load "/Users/josephwilk/Workspace/repl-electric/live-coding-space/lib/#{f}.rb"}; _=nil
load_snippets("~/.sonic-pi/snippets/")
| null | https://raw.githubusercontent.com/repl-electric/.sonic-pi/a00c733f0a5fa1fa0aa65bf06fe7ab71654d2da9/snippets/init.sps | scheme | _=nil | #key: init
#point_line:1
#point_index:0
# --
load_snippets("~/.sonic-pi/snippets/")
|
01a7b3e7c9541891869d6a1fdf6380f2a16b95e7e08e96579f3d889f5194e9f9 | jacobobryant/flub | core.cljc | (ns flub.core
(:require
[clojure.pprint :as pp]
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.string :as str]
[clojure.walk :refer [postwalk]]
[clojure.tools.namespace.repl :as tn-repl]
#?@(:clj [[clojure.java.shell :as shell]])))
(defonce system (atom nil))
(defn refresh []
(let [{:keys [flub/after-refresh flub/stop]} @system]
(doseq [f stop]
(f))
(tn-repl/refresh :after after-refresh)))
(defn start-system [config components]
(reset! system (merge {:flub/stop '()} config))
(reduce (fn [_ f]
(reset! system (f @system)))
nil
components))
(defn read-env [env-keys]
(->> env-keys
(keep (fn [[env-key clj-key coerce]]
(when-some [v (System/getenv env-key)]
[clj-key ((or coerce identity) v)])))
(into {})))
(defn map-kv [f m]
(into {} (map (fn [[k v]] (f k v)) m)))
(defn map-keys [f m]
(map-kv (fn [k v] [(f k) v]) m))
(defn map-vals [f m]
(map-kv (fn [k v] [k (f v)]) m))
(defn map-from-to [f g xs]
(->> xs
(map (juxt f g))
(into {})))
(defn map-from [f xs]
(map-from-to f identity xs))
(defn map-to [f xs]
(map-from-to identity f xs))
(defn pprint [x]
(binding [*print-namespace-maps* false]
(pp/pprint x)))
(defn ppr-str [x]
(with-out-str (pprint x)))
(defn only-keys [& {:keys [req opt req-un opt-un]}]
(let [all-keys (->> (concat req-un opt-un)
(map (comp keyword name))
(concat req opt))]
(s/and
map?
#(= % (select-keys % all-keys))
(eval `(s/keys :req ~req :opt ~opt :req-un ~req-un :opt-un ~opt-un)))))
(defn assoc-pred
"Like assoc, but skip kv pairs where (f v) is false."
[m f & kvs]
(if-some [kvs (some->> kvs
(partition 2)
(filter (comp f second))
(apply concat)
not-empty)]
(apply assoc m kvs)
m))
(defn assoc-some [m & kvs]
(apply assoc-pred m some? kvs))
(defn anomaly? [x]
(s/valid? (s/keys :req [:cognitect.anomalies/category] :opt [:cognitect.anomalies/message]) x))
(defn anom [category & [message & kvs]]
(apply assoc-some
{:cognitect.anomalies/category (keyword "cognitect.anomalies" (name category))}
:cognitect.anomalies/message message
kvs))
(defn ns-contains? [nspace sym]
(and (namespace sym)
(let [segments (str/split (name nspace) #"\.")]
(= segments (take (count segments) (str/split (namespace sym) #"\."))))))
(defn select-as [m key-map]
(-> m
(select-keys (keys key-map))
(set/rename-keys key-map)))
(defn select-ns [m nspace]
(select-keys m (filter #(ns-contains? nspace (symbol %)) (keys m))))
(defn ns-parts [nspace]
(if (nil? nspace)
[]
(some-> nspace
str
not-empty
(str/split #"\."))))
(defn select-ns-as [m ns-from ns-to]
(map-keys
(fn [k]
(let [new-ns-parts (->> (ns-parts (namespace k))
(drop (count (ns-parts ns-from)))
(concat (ns-parts ns-to)))]
(if (empty? new-ns-parts)
(keyword (name k))
(keyword (str/join "." new-ns-parts) (name k)))))
(select-ns m ns-from)))
(defn prepend-ns [ns-segment k]
(keyword
(cond-> ns-segment
(not-empty (namespace k)) (str "." (namespace k)))
(name k)))
(defn prepend-keys [ns-segment m]
(map-keys #(prepend-ns ns-segment %) m))
(defn nest-string-keys [m ks]
(let [ks (set ks)]
(reduce (fn [resp [k v]]
(let [nested-k (keyword (namespace k))]
(if (ks nested-k)
(-> resp
(update nested-k assoc (name k) v)
(dissoc k))
resp)))
m
m)))
(defn add-seconds [date seconds]
#?(:clj (java.util.Date/from (.plusSeconds (.toInstant date) seconds))
:cljs (js/Date. (+ (.getTime date) (* 1000 seconds)))))
(defn concrete [x]
(cond
(var? x) @x
(fn? x) (x)
:default x))
(defn split-by [pred xs]
(reduce #(update %1 (if (pred %2) 0 1) (fnil conj []) %2)
[nil nil] xs))
(defn compare= [x y]
(= 0 (compare x y)))
(defn compare< [x y]
(= -1 (compare x y)))
(defn compare> [x y]
(= 1 (compare x y)))
(defn compare<= [x y]
(or (compare< x y) (compare= x y)))
(defn compare>= [x y]
(or (compare> x y) (compare= x y)))
(defn join [sep xs]
(butlast (interleave xs (repeat sep))))
(defn pad [n _val coll]
(take n (concat coll (repeat _val))))
(defn format-columns
"Formats rows of text into columns.
Example:
```
(doseq [row (format-columns [[\"hellooooooooo \" \"there\"]
[\"foo \" \"bar\"]
[\"one column\"]])]
(println row))
hellooooooooo there
foo bar
one column
```"
[rows]
(let [n-cols (apply max (map count rows))
rows (map (partial pad n-cols " ") rows)
lens (apply map (fn [& column-parts]
(apply max (map count column-parts)))
rows)
fmt (str/join (map #(str "%" (when (not (zero? %)) (str "-" %)) "s") lens))]
(->> rows
(map #(apply (partial format fmt) %))
(map str/trimr))))
(defn print-table
"Prints a nicely formatted table.
Example:
```
(print-table
[[:foo \"Foo\"] [:bar \"Bar\"]]
[{:foo 1 :bar 2} {:foo 3 :bar 4}])
=> Foo Bar
1 2
3 4
```"
[header-info table]
(let [[ks header] (apply map vector header-info)
header (map #(str % " ") header)
body (->> table
(map (apply juxt ks))
(map (fn [row] (map #(str % " ") row))))
rows (concat [header] body)]
(doseq [row (format-columns rows)]
(println row))))
(defn between-hours? [t h1 h2]
(let [hours (/ (mod (quot (inst-ms t) (* 1000 60)) (* 60 24)) 60.0)]
(<= h1 hours h2)))
(defn day-of-week [t]
(-> (inst-ms t)
(quot (* 1000 60 60))
(- (* 24 3) 8)
(quot 24)
(mod 7)))
(defn distinct-by [f coll]
(let [step (fn step [xs seen]
(lazy-seq
((fn [[x :as xs] seen]
(when-let [s (seq xs)]
(let [fx (f x)]
(if (contains? seen fx)
(recur (rest s) seen)
(cons x (step (rest s) (conj seen fx)))))))
xs seen)))]
(step coll #{})))
(def rfc3339 "yyyy-MM-dd'T'HH:mm:ss.SSSXXX")
#?(:clj
(do
(defn base64-encode [bs]
(.encodeToString (java.util.Base64/getEncoder)
bs))
(defn base64-decode [s]
(.decode (java.util.Base64/getDecoder) s))
(defn parse-format-date [date in-format out-format]
(cond->> date
in-format (.parse (new java.text.SimpleDateFormat in-format))
out-format (.format (new java.text.SimpleDateFormat out-format))))
(defn parse-date
([date]
(parse-date date rfc3339))
([date in-format]
(parse-format-date date in-format nil)))
(defn format-date
([date]
(format-date date rfc3339))
([date out-format]
(parse-format-date date nil out-format)))
(defn last-midnight [t]
(-> t
inst-ms
(quot (* 1000 60 60 24))
(* 1000 60 60 24)
(java.util.Date.)))
(defn take-str [n s]
(some->> s (take n) (str/join "")))
(defn ellipsize [n s]
(cond-> (take-str n s)
(< n (count s)) (str "…")))
(defn sppit [f x]
(spit f (ppr-str x)))
(defn sh
"Runs a shell command.
Returns the output if successful; otherwise, throws an exception."
[& args]
(let [result (apply shell/sh args)]
(if (= 0 (:exit result))
(:out result)
(throw (ex-info (:err result) result)))))
(defmacro sdefs [& body]
`(do
~@(for [form (partition 2 body)]
`(s/def ~@form))))
(defmacro fix-stdout [& body]
`(let [ret# (atom nil)
s# (with-out-str
(reset! ret# (do ~@body)))]
(some->> s#
not-empty
(.print System/out))
@ret#))
(defn add-deref [form syms]
(postwalk
#(cond->> %
(syms %) (list deref))
form))
(defmacro letdelay [bindings & body]
(let [[bindings syms] (->> bindings
(partition 2)
(reduce (fn [[bindings syms] [sym form]]
[(into bindings [sym `(delay ~(add-deref form syms))])
(conj syms sym)])
[[] #{}]))]
`(let ~bindings
~@(add-deref body syms))))
(defmacro catchall [& body]
`(try ~@body (catch Exception ~'_ nil)))
(defmacro verbose [& body]
`(try ~@body
(catch Exception e#
(.printStackTrace e#))))
(defmacro pprint-ex [& body]
`(try
(bu/pprint ~@body)
(catch ~'Exception e#
(st/print-stack-trace e#)))))
:cljs
(do
(defn chan? [x]
(satisfies? (requiring-resolve 'cljs.core.async.impl.protocols/ReadPort) x))))
| null | https://raw.githubusercontent.com/jacobobryant/flub/704d79fd55237ef7babdb97b37947675d0c8d2c2/core/src/flub/core.cljc | clojure | otherwise, throws an exception." | (ns flub.core
(:require
[clojure.pprint :as pp]
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.string :as str]
[clojure.walk :refer [postwalk]]
[clojure.tools.namespace.repl :as tn-repl]
#?@(:clj [[clojure.java.shell :as shell]])))
(defonce system (atom nil))
(defn refresh []
(let [{:keys [flub/after-refresh flub/stop]} @system]
(doseq [f stop]
(f))
(tn-repl/refresh :after after-refresh)))
(defn start-system [config components]
(reset! system (merge {:flub/stop '()} config))
(reduce (fn [_ f]
(reset! system (f @system)))
nil
components))
(defn read-env [env-keys]
(->> env-keys
(keep (fn [[env-key clj-key coerce]]
(when-some [v (System/getenv env-key)]
[clj-key ((or coerce identity) v)])))
(into {})))
(defn map-kv [f m]
(into {} (map (fn [[k v]] (f k v)) m)))
(defn map-keys [f m]
(map-kv (fn [k v] [(f k) v]) m))
(defn map-vals [f m]
(map-kv (fn [k v] [k (f v)]) m))
(defn map-from-to [f g xs]
(->> xs
(map (juxt f g))
(into {})))
(defn map-from [f xs]
(map-from-to f identity xs))
(defn map-to [f xs]
(map-from-to identity f xs))
(defn pprint [x]
(binding [*print-namespace-maps* false]
(pp/pprint x)))
(defn ppr-str [x]
(with-out-str (pprint x)))
(defn only-keys [& {:keys [req opt req-un opt-un]}]
(let [all-keys (->> (concat req-un opt-un)
(map (comp keyword name))
(concat req opt))]
(s/and
map?
#(= % (select-keys % all-keys))
(eval `(s/keys :req ~req :opt ~opt :req-un ~req-un :opt-un ~opt-un)))))
(defn assoc-pred
"Like assoc, but skip kv pairs where (f v) is false."
[m f & kvs]
(if-some [kvs (some->> kvs
(partition 2)
(filter (comp f second))
(apply concat)
not-empty)]
(apply assoc m kvs)
m))
(defn assoc-some [m & kvs]
(apply assoc-pred m some? kvs))
(defn anomaly? [x]
(s/valid? (s/keys :req [:cognitect.anomalies/category] :opt [:cognitect.anomalies/message]) x))
(defn anom [category & [message & kvs]]
(apply assoc-some
{:cognitect.anomalies/category (keyword "cognitect.anomalies" (name category))}
:cognitect.anomalies/message message
kvs))
(defn ns-contains? [nspace sym]
(and (namespace sym)
(let [segments (str/split (name nspace) #"\.")]
(= segments (take (count segments) (str/split (namespace sym) #"\."))))))
(defn select-as [m key-map]
(-> m
(select-keys (keys key-map))
(set/rename-keys key-map)))
(defn select-ns [m nspace]
(select-keys m (filter #(ns-contains? nspace (symbol %)) (keys m))))
(defn ns-parts [nspace]
(if (nil? nspace)
[]
(some-> nspace
str
not-empty
(str/split #"\."))))
(defn select-ns-as [m ns-from ns-to]
(map-keys
(fn [k]
(let [new-ns-parts (->> (ns-parts (namespace k))
(drop (count (ns-parts ns-from)))
(concat (ns-parts ns-to)))]
(if (empty? new-ns-parts)
(keyword (name k))
(keyword (str/join "." new-ns-parts) (name k)))))
(select-ns m ns-from)))
(defn prepend-ns [ns-segment k]
(keyword
(cond-> ns-segment
(not-empty (namespace k)) (str "." (namespace k)))
(name k)))
(defn prepend-keys [ns-segment m]
(map-keys #(prepend-ns ns-segment %) m))
(defn nest-string-keys [m ks]
(let [ks (set ks)]
(reduce (fn [resp [k v]]
(let [nested-k (keyword (namespace k))]
(if (ks nested-k)
(-> resp
(update nested-k assoc (name k) v)
(dissoc k))
resp)))
m
m)))
(defn add-seconds [date seconds]
#?(:clj (java.util.Date/from (.plusSeconds (.toInstant date) seconds))
:cljs (js/Date. (+ (.getTime date) (* 1000 seconds)))))
(defn concrete [x]
(cond
(var? x) @x
(fn? x) (x)
:default x))
(defn split-by [pred xs]
(reduce #(update %1 (if (pred %2) 0 1) (fnil conj []) %2)
[nil nil] xs))
(defn compare= [x y]
(= 0 (compare x y)))
(defn compare< [x y]
(= -1 (compare x y)))
(defn compare> [x y]
(= 1 (compare x y)))
(defn compare<= [x y]
(or (compare< x y) (compare= x y)))
(defn compare>= [x y]
(or (compare> x y) (compare= x y)))
(defn join [sep xs]
(butlast (interleave xs (repeat sep))))
(defn pad [n _val coll]
(take n (concat coll (repeat _val))))
(defn format-columns
"Formats rows of text into columns.
Example:
```
(doseq [row (format-columns [[\"hellooooooooo \" \"there\"]
[\"foo \" \"bar\"]
[\"one column\"]])]
(println row))
hellooooooooo there
foo bar
one column
```"
[rows]
(let [n-cols (apply max (map count rows))
rows (map (partial pad n-cols " ") rows)
lens (apply map (fn [& column-parts]
(apply max (map count column-parts)))
rows)
fmt (str/join (map #(str "%" (when (not (zero? %)) (str "-" %)) "s") lens))]
(->> rows
(map #(apply (partial format fmt) %))
(map str/trimr))))
(defn print-table
"Prints a nicely formatted table.
Example:
```
(print-table
[[:foo \"Foo\"] [:bar \"Bar\"]]
[{:foo 1 :bar 2} {:foo 3 :bar 4}])
=> Foo Bar
1 2
3 4
```"
[header-info table]
(let [[ks header] (apply map vector header-info)
header (map #(str % " ") header)
body (->> table
(map (apply juxt ks))
(map (fn [row] (map #(str % " ") row))))
rows (concat [header] body)]
(doseq [row (format-columns rows)]
(println row))))
(defn between-hours? [t h1 h2]
(let [hours (/ (mod (quot (inst-ms t) (* 1000 60)) (* 60 24)) 60.0)]
(<= h1 hours h2)))
(defn day-of-week [t]
(-> (inst-ms t)
(quot (* 1000 60 60))
(- (* 24 3) 8)
(quot 24)
(mod 7)))
(defn distinct-by [f coll]
(let [step (fn step [xs seen]
(lazy-seq
((fn [[x :as xs] seen]
(when-let [s (seq xs)]
(let [fx (f x)]
(if (contains? seen fx)
(recur (rest s) seen)
(cons x (step (rest s) (conj seen fx)))))))
xs seen)))]
(step coll #{})))
(def rfc3339 "yyyy-MM-dd'T'HH:mm:ss.SSSXXX")
#?(:clj
(do
(defn base64-encode [bs]
(.encodeToString (java.util.Base64/getEncoder)
bs))
(defn base64-decode [s]
(.decode (java.util.Base64/getDecoder) s))
(defn parse-format-date [date in-format out-format]
(cond->> date
in-format (.parse (new java.text.SimpleDateFormat in-format))
out-format (.format (new java.text.SimpleDateFormat out-format))))
(defn parse-date
([date]
(parse-date date rfc3339))
([date in-format]
(parse-format-date date in-format nil)))
(defn format-date
([date]
(format-date date rfc3339))
([date out-format]
(parse-format-date date nil out-format)))
(defn last-midnight [t]
(-> t
inst-ms
(quot (* 1000 60 60 24))
(* 1000 60 60 24)
(java.util.Date.)))
(defn take-str [n s]
(some->> s (take n) (str/join "")))
(defn ellipsize [n s]
(cond-> (take-str n s)
(< n (count s)) (str "…")))
(defn sppit [f x]
(spit f (ppr-str x)))
(defn sh
"Runs a shell command.
[& args]
(let [result (apply shell/sh args)]
(if (= 0 (:exit result))
(:out result)
(throw (ex-info (:err result) result)))))
(defmacro sdefs [& body]
`(do
~@(for [form (partition 2 body)]
`(s/def ~@form))))
(defmacro fix-stdout [& body]
`(let [ret# (atom nil)
s# (with-out-str
(reset! ret# (do ~@body)))]
(some->> s#
not-empty
(.print System/out))
@ret#))
(defn add-deref [form syms]
(postwalk
#(cond->> %
(syms %) (list deref))
form))
(defmacro letdelay [bindings & body]
(let [[bindings syms] (->> bindings
(partition 2)
(reduce (fn [[bindings syms] [sym form]]
[(into bindings [sym `(delay ~(add-deref form syms))])
(conj syms sym)])
[[] #{}]))]
`(let ~bindings
~@(add-deref body syms))))
(defmacro catchall [& body]
`(try ~@body (catch Exception ~'_ nil)))
(defmacro verbose [& body]
`(try ~@body
(catch Exception e#
(.printStackTrace e#))))
(defmacro pprint-ex [& body]
`(try
(bu/pprint ~@body)
(catch ~'Exception e#
(st/print-stack-trace e#)))))
:cljs
(do
(defn chan? [x]
(satisfies? (requiring-resolve 'cljs.core.async.impl.protocols/ReadPort) x))))
|
88a3eaec3ced3da773727d1fd48f975abe3f075499755f4099ab77fe03c27b76 | HeinrichApfelmus/reactive-banana | Test.hs | # LANGUAGE RecursiveDo #
{-----------------------------------------------------------------------------
reactive-banana
------------------------------------------------------------------------------}
module Reactive.Banana.Prim.Mid.Test where
import Reactive.Banana.Prim.Mid
main :: IO ()
main = test_space1
{-----------------------------------------------------------------------------
Functionality tests
------------------------------------------------------------------------------}
test_accumL1 :: Pulse Int -> BuildIO (Pulse Int)
test_accumL1 p1 = liftBuild $ do
p2 <- mapP (+) p1
(l1,_) <- accumL 0 p2
let l2 = mapL const l1
applyP l2 p1
test_recursion1 :: Pulse () -> BuildIO (Pulse Int)
test_recursion1 p1 = liftBuild $ mdo
p2 <- applyP l2 p1
p3 <- mapP (const (+1)) p2
~(l1,_) <- accumL (0::Int) p3
let l2 = mapL const l1
return p2
-- test garbage collection
{-----------------------------------------------------------------------------
Space leak tests
------------------------------------------------------------------------------}
test_space1 :: IO ()
test_space1 = runSpaceProfile test_accumL1 [1::Int .. 2 * 10 ^ (4 :: Int)]
test_space2 :: IO ()
test_space2 = runSpaceProfile test_recursion1 $ () <$ [1::Int .. 2 * 10 ^ (4 :: Int)]
| null | https://raw.githubusercontent.com/HeinrichApfelmus/reactive-banana/440e9df76d66c2e5e0d550889d8da36fe3d2c7d2/reactive-banana/src/Reactive/Banana/Prim/Mid/Test.hs | haskell | ----------------------------------------------------------------------------
reactive-banana
-----------------------------------------------------------------------------
----------------------------------------------------------------------------
Functionality tests
-----------------------------------------------------------------------------
test garbage collection
----------------------------------------------------------------------------
Space leak tests
----------------------------------------------------------------------------- | # LANGUAGE RecursiveDo #
module Reactive.Banana.Prim.Mid.Test where
import Reactive.Banana.Prim.Mid
main :: IO ()
main = test_space1
test_accumL1 :: Pulse Int -> BuildIO (Pulse Int)
test_accumL1 p1 = liftBuild $ do
p2 <- mapP (+) p1
(l1,_) <- accumL 0 p2
let l2 = mapL const l1
applyP l2 p1
test_recursion1 :: Pulse () -> BuildIO (Pulse Int)
test_recursion1 p1 = liftBuild $ mdo
p2 <- applyP l2 p1
p3 <- mapP (const (+1)) p2
~(l1,_) <- accumL (0::Int) p3
let l2 = mapL const l1
return p2
test_space1 :: IO ()
test_space1 = runSpaceProfile test_accumL1 [1::Int .. 2 * 10 ^ (4 :: Int)]
test_space2 :: IO ()
test_space2 = runSpaceProfile test_recursion1 $ () <$ [1::Int .. 2 * 10 ^ (4 :: Int)]
|
4cb4dbaa675fe4b0febb9df8ebcc8aa49876fea4384506c41bfcc5ee009454eb | nasser/magic | run.clj | (ns run
(:refer-clojure :exclude [compile])
(:require
[magic.core :as magic]
[magic.analyzer :as ana]
[mage.core :as il]))
(defn compile
"Compile file to file.dll using magic"
[file]
(il/emit!
(il/assembly+module
(str file)
(-> (str file)
(slurp :encoding "utf8")
read-string
ana/analyze
magic/compile)))
(println (str "Built " file ".dll")))
#_
(defn tests
"Run analysis tests"
[]
(println "test-ctors...")
(g/test-ctors)
(println "test-fields...")
(g/test-fields)
(println "test-properties...")
(g/test-properties)
(println "test-methods...")
(g/test-methods)) | null | https://raw.githubusercontent.com/nasser/magic/7a46f773bc7785c82d9527d52c1a8c28ac16e195/run.clj | clojure | (ns run
(:refer-clojure :exclude [compile])
(:require
[magic.core :as magic]
[magic.analyzer :as ana]
[mage.core :as il]))
(defn compile
"Compile file to file.dll using magic"
[file]
(il/emit!
(il/assembly+module
(str file)
(-> (str file)
(slurp :encoding "utf8")
read-string
ana/analyze
magic/compile)))
(println (str "Built " file ".dll")))
#_
(defn tests
"Run analysis tests"
[]
(println "test-ctors...")
(g/test-ctors)
(println "test-fields...")
(g/test-fields)
(println "test-properties...")
(g/test-properties)
(println "test-methods...")
(g/test-methods)) |
|
f672ef63e55b4f11fa2bdc16fccbacfac47cd5d190491ebdb56445fb0a4391d3 | ruliana/racket-examples | custodians.rkt | #lang racket
(define (open-and-forget)
(define out (open-output-file
"/tmp/test.txt"
#:exists 'truncate))
(displayln "writing!")
(writeln "can I write?" out))
; === How? ===
(define (do-work a-function)
(define cust (make-custodian))
(define (start) (displayln "->"))
(define (finish)
(displayln "<-")
(custodian-shutdown-all cust))
(parameterize ([current-custodian cust])
(dynamic-wind
start
a-function
finish)))
(let/cc return
(define (fire)
(displayln "fire!")
(return)
(displayln "unfire!"))
(do-work fire))
;(open-and-forget)
(do-work open-and-forget)
| null | https://raw.githubusercontent.com/ruliana/racket-examples/688293c86132f3b5c924360d53238ca352d4cf5b/custodians.rkt | racket | === How? ===
(open-and-forget) | #lang racket
(define (open-and-forget)
(define out (open-output-file
"/tmp/test.txt"
#:exists 'truncate))
(displayln "writing!")
(writeln "can I write?" out))
(define (do-work a-function)
(define cust (make-custodian))
(define (start) (displayln "->"))
(define (finish)
(displayln "<-")
(custodian-shutdown-all cust))
(parameterize ([current-custodian cust])
(dynamic-wind
start
a-function
finish)))
(let/cc return
(define (fire)
(displayln "fire!")
(return)
(displayln "unfire!"))
(do-work fire))
(do-work open-and-forget)
|
b4503e5bb5d393f834da0608239f67a0ecadc3174c3d1d66f4654552e0379e1a | RefactoringTools/HaRe | D3.expected.hs | module AddOneParameter.D3(sumSquares,sumSquares_y) where
{- add parameter 'y' to 'sumSquares'. 'sumSquares_y_1' to be added to the
export list -}
sumSquares y (x:xs) = sq x + (sumSquares y) xs
sumSquares y [] = 0
sumSquares_y = undefined
sq x = x ^ pow
pow =2
| null | https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/test/testdata/AddOneParameter/D3.expected.hs | haskell | add parameter 'y' to 'sumSquares'. 'sumSquares_y_1' to be added to the
export list | module AddOneParameter.D3(sumSquares,sumSquares_y) where
sumSquares y (x:xs) = sq x + (sumSquares y) xs
sumSquares y [] = 0
sumSquares_y = undefined
sq x = x ^ pow
pow =2
|
48e679f95c12d94e94fc21fc644f395e4b4d9d9d2b4629f394b9cb18c8cb1d12 | weblocks-framework/weblocks | boolean.lisp |
(in-package :weblocks-test)
;;; Test boolean render-view-field-value
(deftest-html boolean-render-view-field-value-1
(render-view-field-value nil (make-instance 'predicate-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
(:span :class "value" "No"))
(deftest-html boolean-render-view-field-value-2
(render-view-field-value t (make-instance 'predicate-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
(:span :class "value" "Yes"))
;;; Test boolean print-view-field-value
(deftest boolean-print-view-field-value-1
(print-view-field-value nil (make-instance 'predicate-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
"No")
(deftest boolean-print-view-field-value-2
(print-view-field-value t (make-instance 'predicate-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
"Yes")
;;; Test checkbox render-view-field-value
(deftest-html checkbox-render-view-field-value-1
(render-view-field-value t (make-instance 'checkbox-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
(:input :name "predicate" :type "checkbox" :class "checkbox" :value "t" :checked "checked"))
;;; Test boolean parse-view-field-value
(deftest boolean-parse-view-field-value-1
(parse-view-field-value (make-instance 'predicate-parser)
"t"
*joe*
(find-view '(form employee))
(make-instance 'form-view-field))
t t t)
;;; Test boolean typespec->view-field-presentation
(deftest boolean-typespec->view-field-presentation-1
(object-class-name
(cadr
(multiple-value-list
(typespec->view-field-presentation (make-instance 'scaffold)
'boolean nil))))
predicate-presentation)
Test boolean typespec->form - view - field - parser
(deftest boolean-typespec->form-view-field-parser-1
(object-class-name
(cadr
(multiple-value-list
(typespec->form-view-field-parser (make-instance 'form-scaffold)
'boolean nil))))
predicate-parser)
(addtest boolean-i18n-1
(ensure-alist-has-keys
(widget-translation-table (make-instance 'predicate-presentation))
(list :true-string :false-string)))
| null | https://raw.githubusercontent.com/weblocks-framework/weblocks/fe96152458c8eb54d74751b3201db42dafe1708b/test/views/types/boolean.lisp | lisp | Test boolean render-view-field-value
Test boolean print-view-field-value
Test checkbox render-view-field-value
Test boolean parse-view-field-value
Test boolean typespec->view-field-presentation |
(in-package :weblocks-test)
(deftest-html boolean-render-view-field-value-1
(render-view-field-value nil (make-instance 'predicate-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
(:span :class "value" "No"))
(deftest-html boolean-render-view-field-value-2
(render-view-field-value t (make-instance 'predicate-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
(:span :class "value" "Yes"))
(deftest boolean-print-view-field-value-1
(print-view-field-value nil (make-instance 'predicate-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
"No")
(deftest boolean-print-view-field-value-2
(print-view-field-value t (make-instance 'predicate-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
"Yes")
(deftest-html checkbox-render-view-field-value-1
(render-view-field-value t (make-instance 'checkbox-presentation)
(make-instance 'form-view-field
:slot-name 'predicate)
(find-view '(form employee))
nil *joe*)
(:input :name "predicate" :type "checkbox" :class "checkbox" :value "t" :checked "checked"))
(deftest boolean-parse-view-field-value-1
(parse-view-field-value (make-instance 'predicate-parser)
"t"
*joe*
(find-view '(form employee))
(make-instance 'form-view-field))
t t t)
(deftest boolean-typespec->view-field-presentation-1
(object-class-name
(cadr
(multiple-value-list
(typespec->view-field-presentation (make-instance 'scaffold)
'boolean nil))))
predicate-presentation)
Test boolean typespec->form - view - field - parser
(deftest boolean-typespec->form-view-field-parser-1
(object-class-name
(cadr
(multiple-value-list
(typespec->form-view-field-parser (make-instance 'form-scaffold)
'boolean nil))))
predicate-parser)
(addtest boolean-i18n-1
(ensure-alist-has-keys
(widget-translation-table (make-instance 'predicate-presentation))
(list :true-string :false-string)))
|
5216bdb5b6f83e5f6b6426abb78512d308e8b2fedd38fdccd39cd16af4b4689f | census-instrumentation/opencensus-erlang | oc_sampler_probability.erl | %%%------------------------------------------------------------------------
Copyright 2017 , OpenCensus Authors
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
@doc This sampler assumes the lower 64 bits of the trace i d are
%% randomly distributed around the whole (long) range. The sampler creates
%% an upper bound id based on the configured probability and compares the
lower 64 bits of the trace i d to for the sampling decision .
%% @end
%%%-----------------------------------------------------------------------
-module(oc_sampler_probability).
-behaviour(oc_sampler).
-include_lib("syntax_tools/include/merl.hrl").
-export([init/1,
should_sample/4]).
-define(MAX_VALUE, 9223372036854775807).
-define(DEFAULT_PROBABILITY, 0.5).
init(Opts) ->
case proplists:get_value(probability, Opts, ?DEFAULT_PROBABILITY) of
P when P =:= 0.0 ->
IdUpperBound = 0;
P when P =:= 1.0 ->
IdUpperBound = ?MAX_VALUE;
P when P >= 0.0
, P =< 1.0 ->
IdUpperBound = (P * ?MAX_VALUE)
end,
IdUpperBound.
%% probability sampler keeps parent decision if it is true
should_sample(_, _, true, _) ->
true;
should_sample(TraceId, _, _, IdUpperBound) ->
Lower64Bits = TraceId band ?MAX_VALUE,
erlang:abs(Lower64Bits) < IdUpperBound.
| null | https://raw.githubusercontent.com/census-instrumentation/opencensus-erlang/7fb276ff73d677c00458922c9180df634f45e018/src/oc_sampler_probability.erl | erlang | ------------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
randomly distributed around the whole (long) range. The sampler creates
an upper bound id based on the configured probability and compares the
@end
-----------------------------------------------------------------------
probability sampler keeps parent decision if it is true | Copyright 2017 , OpenCensus Authors
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@doc This sampler assumes the lower 64 bits of the trace i d are
lower 64 bits of the trace i d to for the sampling decision .
-module(oc_sampler_probability).
-behaviour(oc_sampler).
-include_lib("syntax_tools/include/merl.hrl").
-export([init/1,
should_sample/4]).
-define(MAX_VALUE, 9223372036854775807).
-define(DEFAULT_PROBABILITY, 0.5).
init(Opts) ->
case proplists:get_value(probability, Opts, ?DEFAULT_PROBABILITY) of
P when P =:= 0.0 ->
IdUpperBound = 0;
P when P =:= 1.0 ->
IdUpperBound = ?MAX_VALUE;
P when P >= 0.0
, P =< 1.0 ->
IdUpperBound = (P * ?MAX_VALUE)
end,
IdUpperBound.
should_sample(_, _, true, _) ->
true;
should_sample(TraceId, _, _, IdUpperBound) ->
Lower64Bits = TraceId band ?MAX_VALUE,
erlang:abs(Lower64Bits) < IdUpperBound.
|
d932318cce0cf322a74d1296511a1e34635bcbda72fe586094f0717695fa2f3e | hkupty/carbon.html | symbols_test.clj | (ns carbon.symbols-test
(:require [clojure.test :refer [deftest testing is]]
[matcher-combinators.test] ;; adds support for `match?` and `thrown-match?` in `is` expressions
[matcher-combinators.matchers :as m]
[clojure.string :as str]
[clojure.edn :as edn]
[carbon.symbols :as symbols]))
(deftest carbon-symbols
(testing "We can search symbols in a kw map"
(is (match? true
(symbols/process-symbol 'x {:x true}))))
(testing "We can search nested symbols in a kw map"
(is (match? true (symbols/process-symbol 'x.y/z {:x {:y {:z true}}}))))
(testing "We can match namespaced symbols to namespaced values in a kw map"
(is (match? true (symbols/process-symbol 'x.y/z {:x {:y/z true}})))))
| null | https://raw.githubusercontent.com/hkupty/carbon.html/beff9a6ef6eb158cdcd099da7fe6b6895434124c/test/carbon/symbols_test.clj | clojure | adds support for `match?` and `thrown-match?` in `is` expressions | (ns carbon.symbols-test
(:require [clojure.test :refer [deftest testing is]]
[matcher-combinators.matchers :as m]
[clojure.string :as str]
[clojure.edn :as edn]
[carbon.symbols :as symbols]))
(deftest carbon-symbols
(testing "We can search symbols in a kw map"
(is (match? true
(symbols/process-symbol 'x {:x true}))))
(testing "We can search nested symbols in a kw map"
(is (match? true (symbols/process-symbol 'x.y/z {:x {:y {:z true}}}))))
(testing "We can match namespaced symbols to namespaced values in a kw map"
(is (match? true (symbols/process-symbol 'x.y/z {:x {:y/z true}})))))
|
45fc42fd84d10b89deaced3177fd9babd1aa72cbb134801c41d399abf49347ee | candera/causatum | event_streams_test.clj | (ns causatum.event-streams-test
(:require [causatum.event-streams :refer :all]
[causatum.util]
[clojure.data.generators :as dg]
[clojure.set :as set]
[clojure.test :refer :all]))
;; The delay operations we use during the tests
(def delay-ops
{:constant (fn [rtime delay] delay)
:rand-exp (fn [rtime mean] (causatum.util/rand-exp mean))})
(defn- simplify
"Boils its input down to an [rtime state] pair for easy comparison."
[event]
[(:rtime event) (:state event)])
(defn- within-tolerance?
"Returns true if `x` is within `tolerance` of `target`."
[x target tolerance]
(<= (- target tolerance) x (+ target tolerance)))
(defn- normalize
"Given a map, returns a map with the same keys whose values are
normized to a sum of 1.0."
[m]
(let [total (->> m vals (reduce +))]
(reduce-kv (fn [a k v]
(assoc a k (/ v total)))
{}
m)))
(defn- plausible?
"Returns true if the frequencies of states in `events` matches
`distribution`, a map of states to [probability tolerance] pairs."
[distribution events]
(let [freqs (->> events (map :state) frequencies normalize)]
(and (= (-> freqs keys set) (-> distribution keys set))
(every? (fn [[k v]]
(let [[prob tol] (get distribution k)]
(within-tolerance? v prob tol)))
freqs))))
(defn- rtime-ordered?
"Returns true if the event sequence is monotonically nondecreasing in rtime."
[events]
(->> (map vector events (drop 1 events))
(every? (fn [[e1 e2]] (<= (:rtime e1) (:rtime e2))))))
(defmacro throws?
"Returns true if body throws an exception"
[body])
(deftest agenda-tests
(testing "Agendas don't grow too much"
(let [model {:graph {:a [{:b {}}]}}
seed-events (map (fn [rtime] {:state :a :rtime rtime}) (iterate inc 0))]
;; Yes it's weird that we're testing a private function. It's
;; even weirder that that function exists, but if there's more
;; than one item in :future-events, it means we're keeping track
;; of too many things. v0.1.0 had a bug in it that led to memory
;; growth over time. It's important to avoid that.
;;
;; TODO: Validate this through a functional test instead of a
;; unit test.
(is (>= 1 (-> (->> (iterate (fn [[events event-stream agenda]]
(@#'causatum.event-streams/next-state model event-stream agenda))
[[] seed-events (sorted-map)])
(drop 100))
first
(nth 2)
count))))))
(deftest event-stream-tests
(testing "Event stream generation."
(testing "Null model and input stream produces an empty event stream."
(is (empty? (event-stream {:graph {}} [])) ))
(testing "Simple linear model"
(is (= [[0 :a] [0 :b]]
(->> (event-stream {:graph {:a [{:b {}}]}}
[{:rtime 0 :state :a}])
(map simplify)))))
(testing "Linear model with simple constant delay"
(is (= [[0 :a] [1 :b]]
(->> (event-stream {:graph {:a [{:b {:delay [:constant 1]}}]}
:delay-ops delay-ops}
[{:rtime 0 :state :a}])
(map simplify)))))
(testing "Loopback model"
(is (= (take 1000 (repeat [0 :a]))
(->> (event-stream {:graph {:a [{:a {}}]}} [{:rtime 0 :state :a}])
(take 1000)
(map simplify)))))
(testing "Binding dg/*rnd*"
(let [model {:graph {:a [{:a {:weight 1}
:b {:weight 3}}]
:b [{:a {:weight 1}
:b {:weight 3}}]}}
seeds [{:rtime 0 :state :a}]]
(testing "makes the stream stable if the seed is the same"
(is (= (binding [dg/*rnd* (java.util.Random. 42)]
(doall (take 100 (event-stream model seeds))))
(binding [dg/*rnd* (java.util.Random. 42)]
(doall (take 100 (event-stream model seeds)))))))
(testing "produces a difference sequence for different seeds"
(is (not (= (binding [dg/*rnd* (java.util.Random. 42)]
(doall (take 100 (event-stream model seeds))))
(binding [dg/*rnd* (java.util.Random. 24)]
(doall (take 100 (event-stream model seeds))))))))))
(testing "Model with weights"
(is (plausible? {:a [0.25 0.1]
:b [0.75 0.1]}
(binding [dg/*rnd* (java.util.Random. 42)]
(->> (event-stream {:graph {:a [{:a {:weight 1}
:b {:weight 3}}]
:b [{:a {:weight 1}
:b {:weight 3}}]}}
[{:rtime 0 :state :a}])
(take 1000)
doall)))))
(testing "Unbound delay op throws"
(is (thrown? clojure.lang.ExceptionInfo
(dorun (event-stream {:graph {:a [{:b {:delay [:unspecified]}}]}
:delay-ops delay-ops}
[{:rtime 0 :state :a}])))))
(testing "Mutliple destination states"
(is (= [[0 :a] [1 :b] [1 :c]]
(->> (event-stream {:graph {:a [{:b {:delay [:constant 1]}}
{:c {:delay [:constant 1]}}]}
:delay-ops delay-ops}
[{:rtime 0 :state :a}])
(map simplify)))))
(testing "Input stream of length greater than one"
(is (= 20
(count (event-stream {:graph {:a [{:b {}}]}}
(map (fn [rtime] {:state :a :rtime rtime})
(range 10)))))))
(testing "Infinite input event stream"
(is (= [[10 :a] [10 :b] [11 :a] [11 :b]]
(->> (event-stream {:graph {:a [{:b {}}]}}
(map (fn [rtime] {:state :a :rtime rtime})
(iterate inc 0)))
(drop 20)
(take 4)
(map simplify)))))
(testing "Out-of-order event bug is gone"
(is (rtime-ordered?
(binding [dg/*rnd* (java.util.Random. 42)]
(doall (event-stream {:graph {:a [{:b {:weight 1 :delay [:rand-exp 1.0]}
:c {:weight 1 :delay [:rand-exp 1.0]}}]}
:delay-ops delay-ops}
(map (fn [rtime] {:state :a :rtime rtime})
(range 100)))))))))) | null | https://raw.githubusercontent.com/candera/causatum/fa2e22c052042473d07b6df73caac1df8b0c608a/test/causatum/event_streams_test.clj | clojure | The delay operations we use during the tests
Yes it's weird that we're testing a private function. It's
even weirder that that function exists, but if there's more
than one item in :future-events, it means we're keeping track
of too many things. v0.1.0 had a bug in it that led to memory
growth over time. It's important to avoid that.
TODO: Validate this through a functional test instead of a
unit test. | (ns causatum.event-streams-test
(:require [causatum.event-streams :refer :all]
[causatum.util]
[clojure.data.generators :as dg]
[clojure.set :as set]
[clojure.test :refer :all]))
(def delay-ops
{:constant (fn [rtime delay] delay)
:rand-exp (fn [rtime mean] (causatum.util/rand-exp mean))})
(defn- simplify
"Boils its input down to an [rtime state] pair for easy comparison."
[event]
[(:rtime event) (:state event)])
(defn- within-tolerance?
"Returns true if `x` is within `tolerance` of `target`."
[x target tolerance]
(<= (- target tolerance) x (+ target tolerance)))
(defn- normalize
"Given a map, returns a map with the same keys whose values are
normized to a sum of 1.0."
[m]
(let [total (->> m vals (reduce +))]
(reduce-kv (fn [a k v]
(assoc a k (/ v total)))
{}
m)))
(defn- plausible?
"Returns true if the frequencies of states in `events` matches
`distribution`, a map of states to [probability tolerance] pairs."
[distribution events]
(let [freqs (->> events (map :state) frequencies normalize)]
(and (= (-> freqs keys set) (-> distribution keys set))
(every? (fn [[k v]]
(let [[prob tol] (get distribution k)]
(within-tolerance? v prob tol)))
freqs))))
(defn- rtime-ordered?
"Returns true if the event sequence is monotonically nondecreasing in rtime."
[events]
(->> (map vector events (drop 1 events))
(every? (fn [[e1 e2]] (<= (:rtime e1) (:rtime e2))))))
(defmacro throws?
"Returns true if body throws an exception"
[body])
(deftest agenda-tests
(testing "Agendas don't grow too much"
(let [model {:graph {:a [{:b {}}]}}
seed-events (map (fn [rtime] {:state :a :rtime rtime}) (iterate inc 0))]
(is (>= 1 (-> (->> (iterate (fn [[events event-stream agenda]]
(@#'causatum.event-streams/next-state model event-stream agenda))
[[] seed-events (sorted-map)])
(drop 100))
first
(nth 2)
count))))))
(deftest event-stream-tests
(testing "Event stream generation."
(testing "Null model and input stream produces an empty event stream."
(is (empty? (event-stream {:graph {}} [])) ))
(testing "Simple linear model"
(is (= [[0 :a] [0 :b]]
(->> (event-stream {:graph {:a [{:b {}}]}}
[{:rtime 0 :state :a}])
(map simplify)))))
(testing "Linear model with simple constant delay"
(is (= [[0 :a] [1 :b]]
(->> (event-stream {:graph {:a [{:b {:delay [:constant 1]}}]}
:delay-ops delay-ops}
[{:rtime 0 :state :a}])
(map simplify)))))
(testing "Loopback model"
(is (= (take 1000 (repeat [0 :a]))
(->> (event-stream {:graph {:a [{:a {}}]}} [{:rtime 0 :state :a}])
(take 1000)
(map simplify)))))
(testing "Binding dg/*rnd*"
(let [model {:graph {:a [{:a {:weight 1}
:b {:weight 3}}]
:b [{:a {:weight 1}
:b {:weight 3}}]}}
seeds [{:rtime 0 :state :a}]]
(testing "makes the stream stable if the seed is the same"
(is (= (binding [dg/*rnd* (java.util.Random. 42)]
(doall (take 100 (event-stream model seeds))))
(binding [dg/*rnd* (java.util.Random. 42)]
(doall (take 100 (event-stream model seeds)))))))
(testing "produces a difference sequence for different seeds"
(is (not (= (binding [dg/*rnd* (java.util.Random. 42)]
(doall (take 100 (event-stream model seeds))))
(binding [dg/*rnd* (java.util.Random. 24)]
(doall (take 100 (event-stream model seeds))))))))))
(testing "Model with weights"
(is (plausible? {:a [0.25 0.1]
:b [0.75 0.1]}
(binding [dg/*rnd* (java.util.Random. 42)]
(->> (event-stream {:graph {:a [{:a {:weight 1}
:b {:weight 3}}]
:b [{:a {:weight 1}
:b {:weight 3}}]}}
[{:rtime 0 :state :a}])
(take 1000)
doall)))))
(testing "Unbound delay op throws"
(is (thrown? clojure.lang.ExceptionInfo
(dorun (event-stream {:graph {:a [{:b {:delay [:unspecified]}}]}
:delay-ops delay-ops}
[{:rtime 0 :state :a}])))))
(testing "Mutliple destination states"
(is (= [[0 :a] [1 :b] [1 :c]]
(->> (event-stream {:graph {:a [{:b {:delay [:constant 1]}}
{:c {:delay [:constant 1]}}]}
:delay-ops delay-ops}
[{:rtime 0 :state :a}])
(map simplify)))))
(testing "Input stream of length greater than one"
(is (= 20
(count (event-stream {:graph {:a [{:b {}}]}}
(map (fn [rtime] {:state :a :rtime rtime})
(range 10)))))))
(testing "Infinite input event stream"
(is (= [[10 :a] [10 :b] [11 :a] [11 :b]]
(->> (event-stream {:graph {:a [{:b {}}]}}
(map (fn [rtime] {:state :a :rtime rtime})
(iterate inc 0)))
(drop 20)
(take 4)
(map simplify)))))
(testing "Out-of-order event bug is gone"
(is (rtime-ordered?
(binding [dg/*rnd* (java.util.Random. 42)]
(doall (event-stream {:graph {:a [{:b {:weight 1 :delay [:rand-exp 1.0]}
:c {:weight 1 :delay [:rand-exp 1.0]}}]}
:delay-ops delay-ops}
(map (fn [rtime] {:state :a :rtime rtime})
(range 100)))))))))) |
69bd7410f98c59465dcb26872ea6818471cc30027f1bdfa7ef666940a6eb719d | MaskRay/OJHaskell | Center Alignment.hs | ast n = replicate n '*'
spc n = replicate n ' '
f w [] _ = []
f w (l:ls) left =
let n = (w-length l) `div` 2
in if (w-length l) `mod` 2 == 0 then
("*"++spc n++l++spc n++"*") : f w ls left
else
("*"++spc (n+left)++l++spc (n+1-left)++"*") : f w ls (1-left)
main = do
contents <- return . lines =<< getContents
let w = maximum $ map length contents
mapM_ putStrLn $ [ast (w+2)]++f w contents 0++[ast (w+2)] | null | https://raw.githubusercontent.com/MaskRay/OJHaskell/ba24050b2480619f10daa7d37fca558182ba006c/Codeforces/5/Center%20Alignment.hs | haskell | ast n = replicate n '*'
spc n = replicate n ' '
f w [] _ = []
f w (l:ls) left =
let n = (w-length l) `div` 2
in if (w-length l) `mod` 2 == 0 then
("*"++spc n++l++spc n++"*") : f w ls left
else
("*"++spc (n+left)++l++spc (n+1-left)++"*") : f w ls (1-left)
main = do
contents <- return . lines =<< getContents
let w = maximum $ map length contents
mapM_ putStrLn $ [ast (w+2)]++f w contents 0++[ast (w+2)] |
|
394b4c1a2960038c013675e48276f080b07ebdd53e0018ca7429bd2ce13c5869 | wireapp/wire-server | DelayQueue.hs | -- This file is part of the Wire Server implementation.
--
Copyright ( C ) 2022 Wire Swiss GmbH < >
--
-- This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
-- later version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
-- details.
--
You should have received a copy of the GNU Affero General Public License along
-- with this program. If not, see </>.
module Gundeck.Util.DelayQueue
( DelayQueue,
Clock (..),
Delay (..),
Limit (..),
new,
enqueue,
dequeue,
cancel,
length,
delay,
limit,
)
where
import Data.OrdPSQ (OrdPSQ)
import qualified Data.OrdPSQ as PSQ
import Imports hiding (length)
data DelayQueue k v = DelayQueue
{ _queue :: IORef (OrdPSQ k Word64 v),
_clock :: Clock,
delay :: !Delay,
limit :: !Limit
}
newtype Clock = Clock {getTime :: IO Word64}
newtype Delay = Delay {delayTime :: Word64}
deriving (Eq, Show, Ord)
newtype Limit = Limit {getLimit :: Int}
deriving (Eq, Show, Ord)
new :: Clock -> Delay -> Limit -> IO (DelayQueue k v)
new c d l = do
queue <- newIORef PSQ.empty
pure $! DelayQueue queue c d l
enqueue :: Ord k => DelayQueue k v -> k -> v -> IO Bool
enqueue (DelayQueue queue clock d l) k v = do
time <- getTime clock
let !p = time + delayTime d
atomicModifyIORef' queue $ \q ->
if PSQ.size q >= getLimit l
then (q, False)
else
swap $
PSQ.alter
( \case
Nothing -> (True, Just (p, v))
Just pv -> (True, Just pv)
)
k
q
dequeue :: Ord k => DelayQueue k v -> IO (Maybe (Either Delay v))
dequeue (DelayQueue queue clock _ _) = do
time <- getTime clock
atomicModifyIORef' queue $ \q ->
case PSQ.minView q of
Nothing -> (q, Nothing)
Just (_, p, v, q') | p <= time -> (q', Just (Right v))
Just (_, p, _, _) -> (q, Just (Left (Delay (p - time))))
cancel :: Ord k => DelayQueue k v -> k -> IO Bool
cancel (DelayQueue queue _ _ _) k =
atomicModifyIORef' queue $
swap . PSQ.alter (\pv -> (isJust pv, Nothing)) k
length :: DelayQueue k v -> IO Int
length q = PSQ.size <$> readIORef (_queue q)
| null | https://raw.githubusercontent.com/wireapp/wire-server/d6b6c52b2219efb5fedb8945a66a52016a116186/services/gundeck/src/Gundeck/Util/DelayQueue.hs | haskell | This file is part of the Wire Server implementation.
This program is free software: you can redistribute it and/or modify it under
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
with this program. If not, see </>. | Copyright ( C ) 2022 Wire Swiss GmbH < >
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
You should have received a copy of the GNU Affero General Public License along
module Gundeck.Util.DelayQueue
( DelayQueue,
Clock (..),
Delay (..),
Limit (..),
new,
enqueue,
dequeue,
cancel,
length,
delay,
limit,
)
where
import Data.OrdPSQ (OrdPSQ)
import qualified Data.OrdPSQ as PSQ
import Imports hiding (length)
data DelayQueue k v = DelayQueue
{ _queue :: IORef (OrdPSQ k Word64 v),
_clock :: Clock,
delay :: !Delay,
limit :: !Limit
}
newtype Clock = Clock {getTime :: IO Word64}
newtype Delay = Delay {delayTime :: Word64}
deriving (Eq, Show, Ord)
newtype Limit = Limit {getLimit :: Int}
deriving (Eq, Show, Ord)
new :: Clock -> Delay -> Limit -> IO (DelayQueue k v)
new c d l = do
queue <- newIORef PSQ.empty
pure $! DelayQueue queue c d l
enqueue :: Ord k => DelayQueue k v -> k -> v -> IO Bool
enqueue (DelayQueue queue clock d l) k v = do
time <- getTime clock
let !p = time + delayTime d
atomicModifyIORef' queue $ \q ->
if PSQ.size q >= getLimit l
then (q, False)
else
swap $
PSQ.alter
( \case
Nothing -> (True, Just (p, v))
Just pv -> (True, Just pv)
)
k
q
dequeue :: Ord k => DelayQueue k v -> IO (Maybe (Either Delay v))
dequeue (DelayQueue queue clock _ _) = do
time <- getTime clock
atomicModifyIORef' queue $ \q ->
case PSQ.minView q of
Nothing -> (q, Nothing)
Just (_, p, v, q') | p <= time -> (q', Just (Right v))
Just (_, p, _, _) -> (q, Just (Left (Delay (p - time))))
cancel :: Ord k => DelayQueue k v -> k -> IO Bool
cancel (DelayQueue queue _ _ _) k =
atomicModifyIORef' queue $
swap . PSQ.alter (\pv -> (isJust pv, Nothing)) k
length :: DelayQueue k v -> IO Int
length q = PSQ.size <$> readIORef (_queue q)
|
d7b0b69d56ed795adbbbe51c52b9c2c4d71ea28f64471b1c135b21021633b3cc | sebashack/servantRestfulAPI | ResponseParser.hs | # LANGUAGE OverloadedStrings #
module HelperLibs.ElasticSearch.ResponseParser where
import Data.Aeson
import Data.Aeson.Types
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Data.Set as S
import qualified Data.CountryCodes as CC
import qualified Domains.BookingDomain.Property.DataTypes as PT
import qualified Domains.BookingDomain.Bookable.DataTypes as BT
import qualified Domains.ContentAdminDomain.Article.DataTypes as AT
import qualified Data.Time as TM
import qualified Network.HTTP.Simple as SHTTP
import qualified Data.ByteString.Lazy.Char8 as LC8
# Helper functions to parse ElasticSearch JSON Value Responses # --
Helper function to log the ElasticSearch responses
logResponse :: SHTTP.Response Value -> IO ()
logResponse res = do
putStrLn $ "The status code: " ++ show (SHTTP.getResponseStatusCode res)
print $ SHTTP.getResponseHeader "Content-Type" res
LC8.putStrLn $ encode (SHTTP.getResponseBody res :: Value)
Some BASIC JSON Parsers --
parseIntAttr :: Integral a => Object -> T.Text -> Maybe a
parseIntAttr obj attr = do
(Number v) <- parseMaybe (obj .:) attr
return $ round v
parseBoolAttr :: Object -> T.Text -> Maybe Bool
parseBoolAttr obj attr = do
(Bool v) <- parseMaybe (obj .:) attr
return v
parseStringAttr :: Object -> T.Text -> Maybe T.Text
parseStringAttr obj attr = do
(String v) <- parseMaybe (obj .:) attr
return v
parseOptionalStringAttr :: Object -> T.Text -> Maybe T.Text
parseOptionalStringAttr obj attr = do
val <- parseMaybe (obj .:) attr
case val of
(String "null") -> Nothing
(String v) -> return v
Null -> Nothing
_ -> Nothing
parseArrString :: Object -> T.Text -> [T.Text]
parseArrString obj attr =
case parseMaybe (obj .:) attr of
Just (Array arr) -> V.toList $ fmap (\(String x) -> x) arr
Just Null -> []
Just (String "null") -> []
_ -> []
parseDateAttr :: Object -> T.Text -> Maybe TM.UTCTime
parseDateAttr obj attr = do
(String date) <- parseMaybe (obj .:) attr
case T.split (\c -> c == '-') date of
[year,month,day] -> do
gregorian <- TM.fromGregorianValid (read $ T.unpack year) (read $ T.unpack month) (read $ T.unpack day)
return $ TM.UTCTime gregorian 0
_ -> Nothing
-- END of BASIC JSON parsers --
-- This function receives an Object (json) and parses a Property if possible.
parseProperty :: Object -> Maybe PT.Property
parseProperty object = do
-- Obligatory Attributes
_id <- parseStringAttr object "_id"
_source <- parseMaybe (object .:) "_source" :: Maybe Object
propType <- parseStringAttr _source "lodging_type"
propName <- parseStringAttr _source "prop_name"
cCode <- parseStringAttr _source "country_code"
address <- parseStringAttr _source "address"
region <- parseStringAttr _source "region"
city <- parseStringAttr _source "city"
regDate <- parseDateAttr _source "reg_date"
-- Optional Attributes
let zipCode = parseOptionalStringAttr _source "zip_code"
esDesc = parseOptionalStringAttr _source "esp_prop_desc"
enDesc = parseOptionalStringAttr _source "eng_prop_desc"
facs = parseArrString _source "facilities"
rules = parseArrString _source "rules"
phNums = parseArrString _source "contact_nums"
mainImgId = parseOptionalStringAttr _source "main_img_id"
return $ PT.Property _id
(PT.BasicPropData propName propType (PT.Location (CC.fromText cCode) region city address zipCode))
regDate
esDesc
enDesc
facs
rules
phNums
mainImgId
-- Given an Object (json) with several properties, this function parses it into a list of Properties.
parseProperties :: Object -> Maybe [PT.Property]
parseProperties obj = do
hits <- parseMaybe (obj .:) "hits"
hitsHits <- parseMaybe (hits .:) "hits"
properties <- mapM parseProperty hitsHits
return properties
This function receives an Object ( json ) and parses a Bookable if possible .
parseBookable :: Object -> Maybe BT.Bookable
parseBookable object = do
-- Obligatory Attributes
_id <- parseStringAttr object "_id"
_source <- parseMaybe (object .:) "_source" :: Maybe Object
propId <- parseStringAttr _source "prop_id"
cCode <- parseStringAttr _source "prop_country_code"
maxOccu <- parseIntAttr _source "max_occupancy"
name <- parseStringAttr _source "bkl_name"
bedNum <- parseIntAttr _source "bed_num"
status <- parseBoolAttr _source "listed"
-- Optional fields
let roomSize = parseOptionalStringAttr _source "room_size"
bedType = parseOptionalStringAttr _source "bed_type"
esDesc = parseOptionalStringAttr _source "esp_bkl_desc"
enDesc = parseOptionalStringAttr _source "eng_bkl_desc"
facs = parseArrString _source "prop_facilities"
amens = parseArrString _source "amenities"
roomIds = parseArrString _source "room_ids"
let bklSpecs = BT.BookableSpecs name roomSize bedType bedNum amens
return $ BT.Bookable _id
(CC.fromText cCode)
(if status == True then BT.Listed else BT.Unlisted)
(BT.BasicBookableData propId bklSpecs esDesc enDesc maxOccu (S.fromList roomIds))
(getPricings _source)
where
parsePricing :: Value -> Maybe BT.Pricing
parsePricing (Object obj) = do
let conds = parseArrString obj "conditions"
priId <- parseStringAttr obj "pri_id"
occu <- parseIntAttr obj "occupancy"
price <- parseIntAttr obj "night_price"
disc <- parseIntAttr obj "discount"
return $ BT.Pricing priId (BT.PricingData occu conds price disc)
parsePricing _ = Nothing
getPricings object = case parseMaybe (object .:) "pricings" of
Nothing -> S.empty
Just (Array val) ->
case mapM parsePricing val of
Nothing -> S.empty
Just vec -> S.fromList $ V.toList vec
_ -> S.empty
Given an Object ( json ) with several bookables , this function parses it into a list of Bookables .
parseBookables :: Object -> Maybe [BT.Bookable]
parseBookables obj = do
hits <- parseMaybe (obj .:) "hits"
hitsHits <- parseMaybe (hits .:) "hits"
bookables <- mapM parseBookable hitsHits
return bookables
-- This function receives an Object (json) and parses an Article if possible.
parseArticle :: Object -> Maybe AT.Article
parseArticle object = do
-- Obligatory Attributes
_id <- parseStringAttr object "_id"
_type <- parseStringAttr object "_type"
_source <- parseMaybe (object .:) "_source" :: Maybe Object
let isSpanish = if _type == "esp_article" then True else False
format = if isSpanish then "format" else "format_eng"
author = if isSpanish then "author" else "author_eng"
pubDate = if isSpanish then "pub_date" else "pub_date_eng"
title = if isSpanish then "title" else "title_eng"
abstract = if isSpanish then "abstract" else "abstract_eng"
sectionTitles = if isSpanish then "section_titles" else "section_titles_eng"
sections = if isSpanish then "sections" else "sections_eng"
abstractImageUrl = if isSpanish then "abstract_image_url" else "abstract_image_url_eng"
mainImageUrl = if isSpanish then "main_image_url" else "main_image_url_eng"
thumbnailUrl = if isSpanish then "thumbnail_url" else "thumbnail_url_eng"
imageUrls = if isSpanish then "image_urls" else "image_urls_eng"
videoHeaders = if isSpanish then "video_headers" else "video_headers_eng"
videoUrls = if isSpanish then "video_urls" else "video_urls_eng"
country = if isSpanish then "country" else "country_eng"
region = if isSpanish then "region" else "region_eng"
city = if isSpanish then "city" else "city_eng"
propertyName = if isSpanish then "property_name" else "property_name_eng"
format' <- parseStringAttr _source format
author' <- parseStringAttr _source author
pubDate' <- parseStringAttr _source pubDate
title' <- parseStringAttr _source title
abstract' <- parseStringAttr _source abstract
abstractImageUrl' <- parseStringAttr _source abstractImageUrl
mainImageUrl' <- parseStringAttr _source mainImageUrl
thumbnailUrl' <- parseStringAttr _source thumbnailUrl
country' <- parseStringAttr _source country
region' <- parseStringAttr _source region
city' <- parseStringAttr _source city
-- Optional Attributes
let sectionTitles' = parseArrString _source sectionTitles
sections' = parseArrString _source sections
imageUrls' = parseArrString _source imageUrls
videoHeaders' = parseArrString _source videoHeaders
videoUrls' = parseArrString _source videoUrls
propertyName' = parseOptionalStringAttr _source propertyName
return $ AT.Article _id
_type
format'
author'
pubDate'
title'
abstract'
sectionTitles'
sections'
abstractImageUrl'
mainImageUrl'
thumbnailUrl'
imageUrls'
videoHeaders'
videoUrls'
(AT.Location country' region' city')
propertyName'
-- This function receives an Object (json) and parses an Article if possible.
parseAbstract :: Object -> Maybe AT.Abstract
parseAbstract object = do
-- Obligatory Attributes
_id <- parseStringAttr object "_id"
_type <- parseStringAttr object "_type"
_source <- parseMaybe (object .:) "_source" :: Maybe Object
let isSpanish = if _type == "esp_article" then True else False
format = if isSpanish then "format" else "format_eng"
author = if isSpanish then "author" else "author_eng"
pubDate = if isSpanish then "pub_date" else "pub_date_eng"
title = if isSpanish then "title" else "title_eng"
abstract = if isSpanish then "abstract" else "abstract_eng"
abstractImageUrl = if isSpanish then "abstract_image_url" else "abstract_image_url_eng"
thumbnailUrl = if isSpanish then "thumbnail_url" else "thumbnail_url_eng"
format' <- parseStringAttr _source format
author' <- parseStringAttr _source author
pubDate' <- parseStringAttr _source pubDate
title' <- parseStringAttr _source title
abstract' <- parseStringAttr _source abstract
abstractImageUrl' <- parseStringAttr _source abstractImageUrl
thumbnailUrl' <- parseStringAttr _source thumbnailUrl
return $ AT.Abstract _id
_type
format'
author'
pubDate'
title'
abstract'
abstractImageUrl'
thumbnailUrl'
Given an Object ( json ) with several abstracts , this function parses it into a list of Abtracts .
parseAbstracts :: Object -> Maybe [AT.Abstract]
parseAbstracts obj = do
hits <- parseMaybe (obj .:) "hits"
hitsHits <- parseMaybe (hits .:) "hits"
abstracts <- mapM parseAbstract hitsHits
return abstracts
| null | https://raw.githubusercontent.com/sebashack/servantRestfulAPI/e625535d196acefaff4f5bf03108816be668fe4d/libs/HelperLibs/ElasticSearch/ResponseParser.hs | haskell |
END of BASIC JSON parsers --
This function receives an Object (json) and parses a Property if possible.
Obligatory Attributes
Optional Attributes
Given an Object (json) with several properties, this function parses it into a list of Properties.
Obligatory Attributes
Optional fields
This function receives an Object (json) and parses an Article if possible.
Obligatory Attributes
Optional Attributes
This function receives an Object (json) and parses an Article if possible.
Obligatory Attributes | # LANGUAGE OverloadedStrings #
module HelperLibs.ElasticSearch.ResponseParser where
import Data.Aeson
import Data.Aeson.Types
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Data.Set as S
import qualified Data.CountryCodes as CC
import qualified Domains.BookingDomain.Property.DataTypes as PT
import qualified Domains.BookingDomain.Bookable.DataTypes as BT
import qualified Domains.ContentAdminDomain.Article.DataTypes as AT
import qualified Data.Time as TM
import qualified Network.HTTP.Simple as SHTTP
import qualified Data.ByteString.Lazy.Char8 as LC8
Helper function to log the ElasticSearch responses
logResponse :: SHTTP.Response Value -> IO ()
logResponse res = do
putStrLn $ "The status code: " ++ show (SHTTP.getResponseStatusCode res)
print $ SHTTP.getResponseHeader "Content-Type" res
LC8.putStrLn $ encode (SHTTP.getResponseBody res :: Value)
parseIntAttr :: Integral a => Object -> T.Text -> Maybe a
parseIntAttr obj attr = do
(Number v) <- parseMaybe (obj .:) attr
return $ round v
parseBoolAttr :: Object -> T.Text -> Maybe Bool
parseBoolAttr obj attr = do
(Bool v) <- parseMaybe (obj .:) attr
return v
parseStringAttr :: Object -> T.Text -> Maybe T.Text
parseStringAttr obj attr = do
(String v) <- parseMaybe (obj .:) attr
return v
parseOptionalStringAttr :: Object -> T.Text -> Maybe T.Text
parseOptionalStringAttr obj attr = do
val <- parseMaybe (obj .:) attr
case val of
(String "null") -> Nothing
(String v) -> return v
Null -> Nothing
_ -> Nothing
parseArrString :: Object -> T.Text -> [T.Text]
parseArrString obj attr =
case parseMaybe (obj .:) attr of
Just (Array arr) -> V.toList $ fmap (\(String x) -> x) arr
Just Null -> []
Just (String "null") -> []
_ -> []
parseDateAttr :: Object -> T.Text -> Maybe TM.UTCTime
parseDateAttr obj attr = do
(String date) <- parseMaybe (obj .:) attr
case T.split (\c -> c == '-') date of
[year,month,day] -> do
gregorian <- TM.fromGregorianValid (read $ T.unpack year) (read $ T.unpack month) (read $ T.unpack day)
return $ TM.UTCTime gregorian 0
_ -> Nothing
parseProperty :: Object -> Maybe PT.Property
parseProperty object = do
_id <- parseStringAttr object "_id"
_source <- parseMaybe (object .:) "_source" :: Maybe Object
propType <- parseStringAttr _source "lodging_type"
propName <- parseStringAttr _source "prop_name"
cCode <- parseStringAttr _source "country_code"
address <- parseStringAttr _source "address"
region <- parseStringAttr _source "region"
city <- parseStringAttr _source "city"
regDate <- parseDateAttr _source "reg_date"
let zipCode = parseOptionalStringAttr _source "zip_code"
esDesc = parseOptionalStringAttr _source "esp_prop_desc"
enDesc = parseOptionalStringAttr _source "eng_prop_desc"
facs = parseArrString _source "facilities"
rules = parseArrString _source "rules"
phNums = parseArrString _source "contact_nums"
mainImgId = parseOptionalStringAttr _source "main_img_id"
return $ PT.Property _id
(PT.BasicPropData propName propType (PT.Location (CC.fromText cCode) region city address zipCode))
regDate
esDesc
enDesc
facs
rules
phNums
mainImgId
parseProperties :: Object -> Maybe [PT.Property]
parseProperties obj = do
hits <- parseMaybe (obj .:) "hits"
hitsHits <- parseMaybe (hits .:) "hits"
properties <- mapM parseProperty hitsHits
return properties
This function receives an Object ( json ) and parses a Bookable if possible .
parseBookable :: Object -> Maybe BT.Bookable
parseBookable object = do
_id <- parseStringAttr object "_id"
_source <- parseMaybe (object .:) "_source" :: Maybe Object
propId <- parseStringAttr _source "prop_id"
cCode <- parseStringAttr _source "prop_country_code"
maxOccu <- parseIntAttr _source "max_occupancy"
name <- parseStringAttr _source "bkl_name"
bedNum <- parseIntAttr _source "bed_num"
status <- parseBoolAttr _source "listed"
let roomSize = parseOptionalStringAttr _source "room_size"
bedType = parseOptionalStringAttr _source "bed_type"
esDesc = parseOptionalStringAttr _source "esp_bkl_desc"
enDesc = parseOptionalStringAttr _source "eng_bkl_desc"
facs = parseArrString _source "prop_facilities"
amens = parseArrString _source "amenities"
roomIds = parseArrString _source "room_ids"
let bklSpecs = BT.BookableSpecs name roomSize bedType bedNum amens
return $ BT.Bookable _id
(CC.fromText cCode)
(if status == True then BT.Listed else BT.Unlisted)
(BT.BasicBookableData propId bklSpecs esDesc enDesc maxOccu (S.fromList roomIds))
(getPricings _source)
where
parsePricing :: Value -> Maybe BT.Pricing
parsePricing (Object obj) = do
let conds = parseArrString obj "conditions"
priId <- parseStringAttr obj "pri_id"
occu <- parseIntAttr obj "occupancy"
price <- parseIntAttr obj "night_price"
disc <- parseIntAttr obj "discount"
return $ BT.Pricing priId (BT.PricingData occu conds price disc)
parsePricing _ = Nothing
getPricings object = case parseMaybe (object .:) "pricings" of
Nothing -> S.empty
Just (Array val) ->
case mapM parsePricing val of
Nothing -> S.empty
Just vec -> S.fromList $ V.toList vec
_ -> S.empty
Given an Object ( json ) with several bookables , this function parses it into a list of Bookables .
parseBookables :: Object -> Maybe [BT.Bookable]
parseBookables obj = do
hits <- parseMaybe (obj .:) "hits"
hitsHits <- parseMaybe (hits .:) "hits"
bookables <- mapM parseBookable hitsHits
return bookables
parseArticle :: Object -> Maybe AT.Article
parseArticle object = do
_id <- parseStringAttr object "_id"
_type <- parseStringAttr object "_type"
_source <- parseMaybe (object .:) "_source" :: Maybe Object
let isSpanish = if _type == "esp_article" then True else False
format = if isSpanish then "format" else "format_eng"
author = if isSpanish then "author" else "author_eng"
pubDate = if isSpanish then "pub_date" else "pub_date_eng"
title = if isSpanish then "title" else "title_eng"
abstract = if isSpanish then "abstract" else "abstract_eng"
sectionTitles = if isSpanish then "section_titles" else "section_titles_eng"
sections = if isSpanish then "sections" else "sections_eng"
abstractImageUrl = if isSpanish then "abstract_image_url" else "abstract_image_url_eng"
mainImageUrl = if isSpanish then "main_image_url" else "main_image_url_eng"
thumbnailUrl = if isSpanish then "thumbnail_url" else "thumbnail_url_eng"
imageUrls = if isSpanish then "image_urls" else "image_urls_eng"
videoHeaders = if isSpanish then "video_headers" else "video_headers_eng"
videoUrls = if isSpanish then "video_urls" else "video_urls_eng"
country = if isSpanish then "country" else "country_eng"
region = if isSpanish then "region" else "region_eng"
city = if isSpanish then "city" else "city_eng"
propertyName = if isSpanish then "property_name" else "property_name_eng"
format' <- parseStringAttr _source format
author' <- parseStringAttr _source author
pubDate' <- parseStringAttr _source pubDate
title' <- parseStringAttr _source title
abstract' <- parseStringAttr _source abstract
abstractImageUrl' <- parseStringAttr _source abstractImageUrl
mainImageUrl' <- parseStringAttr _source mainImageUrl
thumbnailUrl' <- parseStringAttr _source thumbnailUrl
country' <- parseStringAttr _source country
region' <- parseStringAttr _source region
city' <- parseStringAttr _source city
let sectionTitles' = parseArrString _source sectionTitles
sections' = parseArrString _source sections
imageUrls' = parseArrString _source imageUrls
videoHeaders' = parseArrString _source videoHeaders
videoUrls' = parseArrString _source videoUrls
propertyName' = parseOptionalStringAttr _source propertyName
return $ AT.Article _id
_type
format'
author'
pubDate'
title'
abstract'
sectionTitles'
sections'
abstractImageUrl'
mainImageUrl'
thumbnailUrl'
imageUrls'
videoHeaders'
videoUrls'
(AT.Location country' region' city')
propertyName'
parseAbstract :: Object -> Maybe AT.Abstract
parseAbstract object = do
_id <- parseStringAttr object "_id"
_type <- parseStringAttr object "_type"
_source <- parseMaybe (object .:) "_source" :: Maybe Object
let isSpanish = if _type == "esp_article" then True else False
format = if isSpanish then "format" else "format_eng"
author = if isSpanish then "author" else "author_eng"
pubDate = if isSpanish then "pub_date" else "pub_date_eng"
title = if isSpanish then "title" else "title_eng"
abstract = if isSpanish then "abstract" else "abstract_eng"
abstractImageUrl = if isSpanish then "abstract_image_url" else "abstract_image_url_eng"
thumbnailUrl = if isSpanish then "thumbnail_url" else "thumbnail_url_eng"
format' <- parseStringAttr _source format
author' <- parseStringAttr _source author
pubDate' <- parseStringAttr _source pubDate
title' <- parseStringAttr _source title
abstract' <- parseStringAttr _source abstract
abstractImageUrl' <- parseStringAttr _source abstractImageUrl
thumbnailUrl' <- parseStringAttr _source thumbnailUrl
return $ AT.Abstract _id
_type
format'
author'
pubDate'
title'
abstract'
abstractImageUrl'
thumbnailUrl'
Given an Object ( json ) with several abstracts , this function parses it into a list of Abtracts .
parseAbstracts :: Object -> Maybe [AT.Abstract]
parseAbstracts obj = do
hits <- parseMaybe (obj .:) "hits"
hitsHits <- parseMaybe (hits .:) "hits"
abstracts <- mapM parseAbstract hitsHits
return abstracts
|
44f4e09d70fa843c6d6b63b5e91752e0c600863f2148670be02880830e6fa75d | idontgetoutmuch/HasBayes | PopGrowthMCMCMain.hs | {-# OPTIONS_GHC -Wall #-}
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - type - defaults #
# OPTIONS_GHC -fno - warn - unused - do - bind #
# OPTIONS_GHC -fno - warn - missing - methods #
# OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE ViewPatterns #
module Main (
main
) where
import Diagrams.Prelude
import Diagrams.Backend.CmdLine
import Diagrams.Backend.Cairo.CmdLine
import qualified Data.Vector as V
import qualified Data.ByteString.Lazy as BL
import Data.Csv
import Data.Time
import System.Locale
import Data.Char
import qualified Data.ByteString as B
import Control.Monad
import PopGrowthMCMC
import PopGrowthMCMCChart
displayHeader :: FilePath -> Diagram B R2 -> IO ()
displayHeader fn =
mainRender ( DiagramOpts (Just 900) (Just 700) fn
, DiagramLoopOpts False Nothing 0
)
main :: IO ()
main = do
displayHeader "diagrams/AutoregressionVary1.png"
(diag "Predicted Flow at Kingston Bridge (Varying Parameters)"
(zip [0..] (V.toList $ V.map exp $ V.take 800 flows))
(zip [0..] (V.toList $ V.map exp $ V.take 800 preds)))
| null | https://raw.githubusercontent.com/idontgetoutmuch/HasBayes/43c7d09b8ee839dba5ed8f18e17110135d20b93d/PopGrowthMCMCMain.hs | haskell | # OPTIONS_GHC -Wall # | # OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - type - defaults #
# OPTIONS_GHC -fno - warn - unused - do - bind #
# OPTIONS_GHC -fno - warn - missing - methods #
# OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE ViewPatterns #
module Main (
main
) where
import Diagrams.Prelude
import Diagrams.Backend.CmdLine
import Diagrams.Backend.Cairo.CmdLine
import qualified Data.Vector as V
import qualified Data.ByteString.Lazy as BL
import Data.Csv
import Data.Time
import System.Locale
import Data.Char
import qualified Data.ByteString as B
import Control.Monad
import PopGrowthMCMC
import PopGrowthMCMCChart
displayHeader :: FilePath -> Diagram B R2 -> IO ()
displayHeader fn =
mainRender ( DiagramOpts (Just 900) (Just 700) fn
, DiagramLoopOpts False Nothing 0
)
main :: IO ()
main = do
displayHeader "diagrams/AutoregressionVary1.png"
(diag "Predicted Flow at Kingston Bridge (Varying Parameters)"
(zip [0..] (V.toList $ V.map exp $ V.take 800 flows))
(zip [0..] (V.toList $ V.map exp $ V.take 800 preds)))
|
9bf58246e3b1ab33d208e3a255a36debb627bade2c6816ba16f33b383d62520d | serokell/ariadne | TxMetaStorageSpecs.hs | {-# LANGUAGE RankNTypes #-}
module TxMetaStorageSpecs (txMetaStorageSpecs) where
import Ariadne.Wallet.Cardano.Kernel.DB.TxMeta
import qualified Data.List as List
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Set as Set
import Data.Text.Buildable (build)
import qualified Pos.Core as Core
import Formatting (bprint)
import Serokell.Util.Text (listJsonIndent, pairF)
import Test.Hspec (shouldThrow)
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck (Arbitrary, Gen, arbitrary, forAll, vectorOf)
import Test.QuickCheck.Monadic (assert, monadicIO, pick, run)
import Util.Buildable (ShowThroughBuild(..))
import Util.Buildable.Hspec
import Test.Pos.Core.Arbitrary ()
chunksOf :: Int -> [e] -> [[e]]
chunksOf i ls = map (take i) (buildCons (splitter ls))
where
splitter :: [e] -> ([e] -> a -> a) -> a -> a
splitter [] _ n = n
splitter l c n = l `c` splitter (drop i l) c n
buildCons :: ((a -> [a] -> [a]) -> [a] -> [a]) -> [a]
buildCons g = g (:) []
-- | Handy combinator which yields a fresh database to work with on each spec.
withTemporaryDb :: forall m a. (MonadIO m, MonadMask m) => (MetaDBHandle -> m a) -> m a
withTemporaryDb action = bracket acquire release action
where
acquire :: m MetaDBHandle
acquire = liftIO $ do
db <- openMetaDB ":memory:"
migrateMetaDB db
return db
release :: MetaDBHandle -> m ()
release = liftIO . closeMetaDB
| Generates two ' TxMeta ' which are @almost@ identical , if not in the
-- arrangement of their inputs & outputs.
genSimilarTxMetas :: Gen (ShowThroughBuild TxMeta, ShowThroughBuild TxMeta)
genSimilarTxMetas = do
inputs <- uniqueElements 5
outputs <- uniqueElements 5
blueprint <- unSTB <$> genMeta
let t1 = blueprint & over txMetaInputs (const inputs)
. over txMetaOutputs (const outputs)
let t2 = blueprint & over txMetaInputs (const (NonEmpty.reverse inputs))
. over txMetaOutputs (const (NonEmpty.reverse outputs))
return (STB t1, STB t2)
-- | Synthetic @newtype@ used to generate unique inputs and outputs as part of
-- 'genMetas'. The reason why it is necessary is because the stock implementation
of ' Eq ' for ' ( Core . Address , Core . Coin ) ' would of course declare two tuples
-- equal if their elements are.
-- However, this is too \"strong\" for our 'uniqueElements' generator, which
would consider these two unique :
--
( " 123 " , 10 )
( " 123 " , 0 )
--
This would of course break our persistent storage , because inserting " 123 "
-- twice would trigger the primary key uniqueness violation.
newtype TxEntry = TxEntry { getTxEntry :: (Core.Address, Core.Coin) }
instance Eq TxEntry where
(TxEntry (a1, _)) == (TxEntry (a2, _)) = a1 == a2
| This is a totally bonkers ' Ord ' instance ( as it does n't really make sense
-- to order anything by an 'Address' value, but it's necessary for the sake
-- of the input and output generation. In particular, writing the following
-- will introduce a bug later on:
--
instance where
compare ( ( _ , c1 ) ) ( ( _ , c2 ) ) = compare c1 c2
--
-- This will speed up the tests considerably, but it comes with a fatal flaw:
-- later on, once generating unique inputs & outputs as part of 'uniqueElements',
-- we rely on a 'Set' (and thus an 'Ord' instance) to generate unique elements.
But the instance above will ' compare ' the two ' Coin ' values and in turn
-- piggyback on equality for Coins, essentially trashing the invariant we
describe above as the entire for the ' ' type .
instance Ord TxEntry where
compare (TxEntry (a1, _)) (TxEntry (a2, _)) = compare a1 a2
instance Arbitrary TxEntry where
arbitrary = TxEntry <$> arbitrary
instance Buildable TxEntry where
build (TxEntry b) = bprint pairF b
instance Buildable (Int, TxEntry) where
build b = bprint pairF b
instance Buildable [TxEntry] where
build = bprint (listJsonIndent 4)
| Handy generator which make sure we are generating ' TxMeta ' which all
-- have distinct inputs and outptus.
genMetas :: Int -> Gen [ShowThroughBuild TxMeta]
genMetas size = do
metas <- map unSTB <$> vectorOf size genMeta
inputs <- chunksOf 3 . toList <$> uniqueElements (length metas * 3)
outputs <- chunksOf 3 . toList <$> uniqueElements (length metas * 3)
return $ map (STB . mkTx) (zip3 metas inputs outputs)
where
mkTx :: (TxMeta, [TxEntry], [TxEntry])
-> TxMeta
mkTx (tMeta, i, o) =
case liftM2 (,) (nonEmpty . map getTxEntry $ i) (nonEmpty . map getTxEntry $ o) of
Nothing -> error "mkTx: the impossible happened, invariant violated."
Just (inputs, outputs) ->
tMeta & over txMetaInputs (const inputs)
. over txMetaOutputs (const outputs)
| Generator for an arbitrary ' TxMeta ' which uses ' ' underneath to
-- generate the inputs and the outputs.
genMeta :: Gen (ShowThroughBuild TxMeta)
genMeta = do
meta <- TxMeta <$> arbitrary
<*> arbitrary
<*> (fmap getTxEntry <$> uniqueElements 10)
<*> (fmap getTxEntry <$> uniqueElements 10)
<*> arbitrary
<*> arbitrary
<*> arbitrary
pure (STB meta)
newtype TxMetaWrapper = TxMetaWrapper (ShowThroughBuild TxMeta) deriving Show
instance Arbitrary TxMetaWrapper where
arbitrary = TxMetaWrapper <$> genMeta
-- | Handy wrapper to be able to compare things with the 'isomorphicTo'
-- combinator, which ignores the different order of the inputs & outputs.
data DeepEqual = DeepEqual TxMeta
instance Eq DeepEqual where
(DeepEqual t1) == (DeepEqual t2) = t1 `exactlyEqualTo` t2
instance Buildable DeepEqual where
build (DeepEqual t) = build t
data Isomorphic = Isomorphic TxMeta
instance Eq Isomorphic where
(Isomorphic t1) == (Isomorphic t2) = t1 `isomorphicTo` t2
instance Buildable Isomorphic where
build (Isomorphic t) = build t
instance Buildable [Isomorphic] where
build ts = bprint (listJsonIndent 4) ts
sortByAmount :: SortDirection -> [Isomorphic] -> [Isomorphic]
sortByAmount direction = sortBy sortFn
where
withDir Ascending = identity
withDir Descending = flip
sortFn (Isomorphic a) (Isomorphic b) =
(withDir direction compare) (a ^. txMetaAmount) (b ^. txMetaAmount)
sortByCreationAt :: SortDirection -> [Isomorphic] -> [Isomorphic]
sortByCreationAt direction = sortBy sortFn
where
withDir Ascending = identity
withDir Descending = flip
sortFn (Isomorphic a) (Isomorphic b) =
(withDir direction compare) (a ^. txMetaCreationAt) (b ^. txMetaCreationAt)
hasDupes :: Ord a => [a] -> Bool
hasDupes xs = length (Set.fromList xs) /= List.length xs
| Specs which tests the persistent storage and API provided by ' TxMeta ' .
txMetaStorageSpecs :: Spec
txMetaStorageSpecs = do
describe "uniqueElements generator" $ do
it "generates unique inputs" $ monadicIO $ do
(inputs :: NonEmpty (ShowThroughBuild TxEntry)) <- pick (uniqueElements 30)
assert (not $ hasDupes . map unSTB . toList $ inputs)
describe "TxMeta equality" $ do
prop "should be reflexive" $ \(TxMetaWrapper blueprint) -> do
unSTB blueprint `exactlyEqualTo` unSTB (blueprint :: ShowThroughBuild TxMeta)
it "should be strict when needed"
$ forAll genSimilarTxMetas
$ \(STB t1, STB t2) -> not (t1 `exactlyEqualTo` t2)
it "isomorphicTo is more lenient"
$ forAll genSimilarTxMetas
$ \(STB t1, STB t2) -> t1 `isomorphicTo` t2
describe "TxMeta storage" $ do
it "can store a TxMeta and retrieve it back" $ monadicIO $ do
testMetaSTB <- pick genMeta
run $ withTemporaryDb $ \hdl -> do
let testMeta = unSTB testMetaSTB
void $ putTxMeta hdl testMeta
mbTx <- getTxMeta hdl (testMeta ^. txMetaId)
fmap DeepEqual mbTx `shouldBe` Just (DeepEqual testMeta)
it "yields Nothing when calling getTxMeta, if a TxMeta is not there" $ monadicIO $ do
testMetaSTB <- pick genMeta
run $ withTemporaryDb $ \hdl -> do
let testMeta = unSTB testMetaSTB
mbTx <- getTxMeta hdl (testMeta ^. txMetaId)
fmap DeepEqual mbTx `shouldBe` Nothing
it "inserting the same tx twice is a no-op" $ monadicIO $ do
testMetaSTB <- pick genMeta
run $ withTemporaryDb $ \hdl -> do
let testMeta = unSTB testMetaSTB
putTxMeta hdl testMeta `shouldReturn` ()
putTxMeta hdl testMeta `shouldReturn` ()
it "inserting two tx with the same tx, but different content is an error" $ monadicIO $ do
testMetaSTB <- pick genMeta
run $ withTemporaryDb $ \hdl -> do
let meta1 = unSTB testMetaSTB
let meta2 = set txMetaIsOutgoing (not $ meta1 ^. txMetaIsOutgoing) meta1
putTxMeta hdl meta1 `shouldReturn` ()
putTxMeta hdl meta2 `shouldThrow`
(\(InvariantViolated (DuplicatedTransactionWithDifferentHash _)) -> True)
it "inserting multiple txs and later retrieving all of them works" $ monadicIO $ do
testMetasSTB <- pick (genMetas 5)
run $ withTemporaryDb $ \hdl -> do
let metas = map unSTB testMetasSTB
forM_ metas (putTxMeta hdl)
result <- getTxMetas hdl (Offset 0) (Limit 100) Nothing
map Isomorphic result `shouldMatchList` map Isomorphic metas
it "pagination correctly limit the results" $ monadicIO $ do
testMetasSTB <- pick (genMetas 10)
run $ withTemporaryDb $ \hdl -> do
let metas = map unSTB testMetasSTB
forM_ metas (putTxMeta hdl)
result <- getTxMetas hdl (Offset 0) (Limit 5) Nothing
length result `shouldBe` 5
it "pagination correctly sorts (ascending) the results" $ monadicIO $ do
testMetasSTB <- pick (genMetas 5)
run $ withTemporaryDb $ \hdl -> do
let metas = map unSTB testMetasSTB
forM_ metas (putTxMeta hdl)
result <- (getTxMetas hdl) (Offset 0) (Limit 10) (Just $ Sorting SortByAmount Ascending)
map Isomorphic result `shouldBe` sortByAmount Ascending (map Isomorphic metas)
it "pagination correctly sorts (descending) the results" $ monadicIO $ do
testMetasSTB <- pick (genMetas 5)
run $ withTemporaryDb $ \hdl -> do
let metas = map unSTB testMetasSTB
forM_ metas (putTxMeta hdl)
result <- (getTxMetas hdl) (Offset 0) (Limit 10) (Just $ Sorting SortByCreationAt Descending)
map Isomorphic result `shouldBe` sortByCreationAt Descending (map Isomorphic metas)
| null | https://raw.githubusercontent.com/serokell/ariadne/5f49ee53b6bbaf332cb6f110c75f7b971acdd452/ariadne/cardano/test/backend/TxMetaStorageSpecs.hs | haskell | # LANGUAGE RankNTypes #
| Handy combinator which yields a fresh database to work with on each spec.
arrangement of their inputs & outputs.
| Synthetic @newtype@ used to generate unique inputs and outputs as part of
'genMetas'. The reason why it is necessary is because the stock implementation
equal if their elements are.
However, this is too \"strong\" for our 'uniqueElements' generator, which
twice would trigger the primary key uniqueness violation.
to order anything by an 'Address' value, but it's necessary for the sake
of the input and output generation. In particular, writing the following
will introduce a bug later on:
This will speed up the tests considerably, but it comes with a fatal flaw:
later on, once generating unique inputs & outputs as part of 'uniqueElements',
we rely on a 'Set' (and thus an 'Ord' instance) to generate unique elements.
piggyback on equality for Coins, essentially trashing the invariant we
have distinct inputs and outptus.
generate the inputs and the outputs.
| Handy wrapper to be able to compare things with the 'isomorphicTo'
combinator, which ignores the different order of the inputs & outputs. |
module TxMetaStorageSpecs (txMetaStorageSpecs) where
import Ariadne.Wallet.Cardano.Kernel.DB.TxMeta
import qualified Data.List as List
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Set as Set
import Data.Text.Buildable (build)
import qualified Pos.Core as Core
import Formatting (bprint)
import Serokell.Util.Text (listJsonIndent, pairF)
import Test.Hspec (shouldThrow)
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck (Arbitrary, Gen, arbitrary, forAll, vectorOf)
import Test.QuickCheck.Monadic (assert, monadicIO, pick, run)
import Util.Buildable (ShowThroughBuild(..))
import Util.Buildable.Hspec
import Test.Pos.Core.Arbitrary ()
chunksOf :: Int -> [e] -> [[e]]
chunksOf i ls = map (take i) (buildCons (splitter ls))
where
splitter :: [e] -> ([e] -> a -> a) -> a -> a
splitter [] _ n = n
splitter l c n = l `c` splitter (drop i l) c n
buildCons :: ((a -> [a] -> [a]) -> [a] -> [a]) -> [a]
buildCons g = g (:) []
withTemporaryDb :: forall m a. (MonadIO m, MonadMask m) => (MetaDBHandle -> m a) -> m a
withTemporaryDb action = bracket acquire release action
where
acquire :: m MetaDBHandle
acquire = liftIO $ do
db <- openMetaDB ":memory:"
migrateMetaDB db
return db
release :: MetaDBHandle -> m ()
release = liftIO . closeMetaDB
| Generates two ' TxMeta ' which are @almost@ identical , if not in the
genSimilarTxMetas :: Gen (ShowThroughBuild TxMeta, ShowThroughBuild TxMeta)
genSimilarTxMetas = do
inputs <- uniqueElements 5
outputs <- uniqueElements 5
blueprint <- unSTB <$> genMeta
let t1 = blueprint & over txMetaInputs (const inputs)
. over txMetaOutputs (const outputs)
let t2 = blueprint & over txMetaInputs (const (NonEmpty.reverse inputs))
. over txMetaOutputs (const (NonEmpty.reverse outputs))
return (STB t1, STB t2)
of ' Eq ' for ' ( Core . Address , Core . Coin ) ' would of course declare two tuples
would consider these two unique :
( " 123 " , 10 )
( " 123 " , 0 )
This would of course break our persistent storage , because inserting " 123 "
newtype TxEntry = TxEntry { getTxEntry :: (Core.Address, Core.Coin) }
instance Eq TxEntry where
(TxEntry (a1, _)) == (TxEntry (a2, _)) = a1 == a2
| This is a totally bonkers ' Ord ' instance ( as it does n't really make sense
instance where
compare ( ( _ , c1 ) ) ( ( _ , c2 ) ) = compare c1 c2
But the instance above will ' compare ' the two ' Coin ' values and in turn
describe above as the entire for the ' ' type .
instance Ord TxEntry where
compare (TxEntry (a1, _)) (TxEntry (a2, _)) = compare a1 a2
instance Arbitrary TxEntry where
arbitrary = TxEntry <$> arbitrary
instance Buildable TxEntry where
build (TxEntry b) = bprint pairF b
instance Buildable (Int, TxEntry) where
build b = bprint pairF b
instance Buildable [TxEntry] where
build = bprint (listJsonIndent 4)
| Handy generator which make sure we are generating ' TxMeta ' which all
genMetas :: Int -> Gen [ShowThroughBuild TxMeta]
genMetas size = do
metas <- map unSTB <$> vectorOf size genMeta
inputs <- chunksOf 3 . toList <$> uniqueElements (length metas * 3)
outputs <- chunksOf 3 . toList <$> uniqueElements (length metas * 3)
return $ map (STB . mkTx) (zip3 metas inputs outputs)
where
mkTx :: (TxMeta, [TxEntry], [TxEntry])
-> TxMeta
mkTx (tMeta, i, o) =
case liftM2 (,) (nonEmpty . map getTxEntry $ i) (nonEmpty . map getTxEntry $ o) of
Nothing -> error "mkTx: the impossible happened, invariant violated."
Just (inputs, outputs) ->
tMeta & over txMetaInputs (const inputs)
. over txMetaOutputs (const outputs)
| Generator for an arbitrary ' TxMeta ' which uses ' ' underneath to
genMeta :: Gen (ShowThroughBuild TxMeta)
genMeta = do
meta <- TxMeta <$> arbitrary
<*> arbitrary
<*> (fmap getTxEntry <$> uniqueElements 10)
<*> (fmap getTxEntry <$> uniqueElements 10)
<*> arbitrary
<*> arbitrary
<*> arbitrary
pure (STB meta)
newtype TxMetaWrapper = TxMetaWrapper (ShowThroughBuild TxMeta) deriving Show
instance Arbitrary TxMetaWrapper where
arbitrary = TxMetaWrapper <$> genMeta
data DeepEqual = DeepEqual TxMeta
instance Eq DeepEqual where
(DeepEqual t1) == (DeepEqual t2) = t1 `exactlyEqualTo` t2
instance Buildable DeepEqual where
build (DeepEqual t) = build t
data Isomorphic = Isomorphic TxMeta
instance Eq Isomorphic where
(Isomorphic t1) == (Isomorphic t2) = t1 `isomorphicTo` t2
instance Buildable Isomorphic where
build (Isomorphic t) = build t
instance Buildable [Isomorphic] where
build ts = bprint (listJsonIndent 4) ts
sortByAmount :: SortDirection -> [Isomorphic] -> [Isomorphic]
sortByAmount direction = sortBy sortFn
where
withDir Ascending = identity
withDir Descending = flip
sortFn (Isomorphic a) (Isomorphic b) =
(withDir direction compare) (a ^. txMetaAmount) (b ^. txMetaAmount)
sortByCreationAt :: SortDirection -> [Isomorphic] -> [Isomorphic]
sortByCreationAt direction = sortBy sortFn
where
withDir Ascending = identity
withDir Descending = flip
sortFn (Isomorphic a) (Isomorphic b) =
(withDir direction compare) (a ^. txMetaCreationAt) (b ^. txMetaCreationAt)
hasDupes :: Ord a => [a] -> Bool
hasDupes xs = length (Set.fromList xs) /= List.length xs
| Specs which tests the persistent storage and API provided by ' TxMeta ' .
txMetaStorageSpecs :: Spec
txMetaStorageSpecs = do
describe "uniqueElements generator" $ do
it "generates unique inputs" $ monadicIO $ do
(inputs :: NonEmpty (ShowThroughBuild TxEntry)) <- pick (uniqueElements 30)
assert (not $ hasDupes . map unSTB . toList $ inputs)
describe "TxMeta equality" $ do
prop "should be reflexive" $ \(TxMetaWrapper blueprint) -> do
unSTB blueprint `exactlyEqualTo` unSTB (blueprint :: ShowThroughBuild TxMeta)
it "should be strict when needed"
$ forAll genSimilarTxMetas
$ \(STB t1, STB t2) -> not (t1 `exactlyEqualTo` t2)
it "isomorphicTo is more lenient"
$ forAll genSimilarTxMetas
$ \(STB t1, STB t2) -> t1 `isomorphicTo` t2
describe "TxMeta storage" $ do
it "can store a TxMeta and retrieve it back" $ monadicIO $ do
testMetaSTB <- pick genMeta
run $ withTemporaryDb $ \hdl -> do
let testMeta = unSTB testMetaSTB
void $ putTxMeta hdl testMeta
mbTx <- getTxMeta hdl (testMeta ^. txMetaId)
fmap DeepEqual mbTx `shouldBe` Just (DeepEqual testMeta)
it "yields Nothing when calling getTxMeta, if a TxMeta is not there" $ monadicIO $ do
testMetaSTB <- pick genMeta
run $ withTemporaryDb $ \hdl -> do
let testMeta = unSTB testMetaSTB
mbTx <- getTxMeta hdl (testMeta ^. txMetaId)
fmap DeepEqual mbTx `shouldBe` Nothing
it "inserting the same tx twice is a no-op" $ monadicIO $ do
testMetaSTB <- pick genMeta
run $ withTemporaryDb $ \hdl -> do
let testMeta = unSTB testMetaSTB
putTxMeta hdl testMeta `shouldReturn` ()
putTxMeta hdl testMeta `shouldReturn` ()
it "inserting two tx with the same tx, but different content is an error" $ monadicIO $ do
testMetaSTB <- pick genMeta
run $ withTemporaryDb $ \hdl -> do
let meta1 = unSTB testMetaSTB
let meta2 = set txMetaIsOutgoing (not $ meta1 ^. txMetaIsOutgoing) meta1
putTxMeta hdl meta1 `shouldReturn` ()
putTxMeta hdl meta2 `shouldThrow`
(\(InvariantViolated (DuplicatedTransactionWithDifferentHash _)) -> True)
it "inserting multiple txs and later retrieving all of them works" $ monadicIO $ do
testMetasSTB <- pick (genMetas 5)
run $ withTemporaryDb $ \hdl -> do
let metas = map unSTB testMetasSTB
forM_ metas (putTxMeta hdl)
result <- getTxMetas hdl (Offset 0) (Limit 100) Nothing
map Isomorphic result `shouldMatchList` map Isomorphic metas
it "pagination correctly limit the results" $ monadicIO $ do
testMetasSTB <- pick (genMetas 10)
run $ withTemporaryDb $ \hdl -> do
let metas = map unSTB testMetasSTB
forM_ metas (putTxMeta hdl)
result <- getTxMetas hdl (Offset 0) (Limit 5) Nothing
length result `shouldBe` 5
it "pagination correctly sorts (ascending) the results" $ monadicIO $ do
testMetasSTB <- pick (genMetas 5)
run $ withTemporaryDb $ \hdl -> do
let metas = map unSTB testMetasSTB
forM_ metas (putTxMeta hdl)
result <- (getTxMetas hdl) (Offset 0) (Limit 10) (Just $ Sorting SortByAmount Ascending)
map Isomorphic result `shouldBe` sortByAmount Ascending (map Isomorphic metas)
it "pagination correctly sorts (descending) the results" $ monadicIO $ do
testMetasSTB <- pick (genMetas 5)
run $ withTemporaryDb $ \hdl -> do
let metas = map unSTB testMetasSTB
forM_ metas (putTxMeta hdl)
result <- (getTxMetas hdl) (Offset 0) (Limit 10) (Just $ Sorting SortByCreationAt Descending)
map Isomorphic result `shouldBe` sortByCreationAt Descending (map Isomorphic metas)
|
c041548a47ba2ff832212bca91789eb65be8a4648c0354b7ea31768e171ae843 | processone/tsung | mochiweb_headers.erl | @author < >
2007 Mochi Media , Inc.
%%
%% Permission is hereby granted, free of charge, to any person obtaining a
%% copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction , including without limitation
%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software , and to permit persons to whom the
%% Software is furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
%% DEALINGS IN THE SOFTWARE.
%% @doc Case preserving (but case insensitive) HTTP Header dictionary.
-module(mochiweb_headers).
-author('').
-export([empty/0, from_list/1, insert/3, enter/3, get_value/2, lookup/2]).
-export([delete_any/2, get_primary_value/2, get_combined_value/2]).
-export([default/3, enter_from_list/2, default_from_list/2]).
-export([to_list/1, make/1]).
-export([from_binary/1]).
%% @type headers().
%% @type key() = atom() | binary() | string().
%% @type value() = atom() | binary() | string() | integer().
%% @spec empty() -> headers()
%% @doc Create an empty headers structure.
empty() ->
gb_trees:empty().
( ) | [ { key ( ) , value ( ) } ] ) - > headers ( )
@doc Construct a headers ( ) from the given list .
make(L) when is_list(L) ->
from_list(L);
%% assume a non-list is already mochiweb_headers.
make(T) ->
T.
%% @spec from_binary(iolist()) -> headers()
%% @doc Transforms a raw HTTP header into a mochiweb headers structure.
%%
%% The given raw HTTP header can be one of the following:
%%
1 ) A string or a binary representing a full HTTP header ending with
%% double CRLF.
%% Examples:
%% ```
" Content - Length : 47\r\nContent - Type : text / plain\r\n\r\n "
%% <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>'''
%%
2 ) A list of binaries or strings where each element represents a raw
%% HTTP header line ending with a single CRLF.
%% Examples:
%% ```
[ < < " Content - Length : 47\r\n " > > , < < " Content - Type : text / plain\r\n " > > ]
[ " Content - Length : 47\r\n " , " Content - Type : text / plain\r\n " ]
[ " Content - Length : 47\r\n " , < < " Content - Type : text / plain\r\n " > > ] '' '
%%
from_binary(RawHttpHeader) when is_binary(RawHttpHeader) ->
from_binary(RawHttpHeader, []);
from_binary(RawHttpHeaderList) ->
from_binary(list_to_binary([RawHttpHeaderList, "\r\n"])).
from_binary(RawHttpHeader, Acc) ->
case erlang:decode_packet(httph, RawHttpHeader, []) of
{ok, {http_header, _, H, _, V}, Rest} ->
from_binary(Rest, [{H, V} | Acc]);
_ ->
make(Acc)
end.
%% @spec from_list([{key(), value()}]) -> headers()
@doc Construct a headers ( ) from the given list .
from_list(List) ->
lists:foldl(fun ({K, V}, T) -> insert(K, V, T) end, empty(), List).
%% @spec enter_from_list([{key(), value()}], headers()) -> headers()
%% @doc Insert pairs into the headers, replace any values for existing keys.
enter_from_list(List, T) ->
lists:foldl(fun ({K, V}, T1) -> enter(K, V, T1) end, T, List).
%% @spec default_from_list([{key(), value()}], headers()) -> headers()
%% @doc Insert pairs into the headers for keys that do not already exist.
default_from_list(List, T) ->
lists:foldl(fun ({K, V}, T1) -> default(K, V, T1) end, T, List).
to_list(headers ( ) ) - > [ { key ( ) , string ( ) } ]
%% @doc Return the contents of the headers. The keys will be the exact key
%% that was first inserted (e.g. may be an atom or binary, case is
%% preserved).
to_list(T) ->
F = fun ({K, {array, L}}, Acc) ->
L1 = lists:reverse(L),
lists:foldl(fun (V, Acc1) -> [{K, V} | Acc1] end, Acc, L1);
(Pair, Acc) ->
[Pair | Acc]
end,
lists:reverse(lists:foldl(F, [], gb_trees:values(T))).
%% @spec get_value(key(), headers()) -> string() | undefined
%% @doc Return the value of the given header using a case insensitive search.
%% undefined will be returned for keys that are not present.
get_value(K, T) ->
case lookup(K, T) of
{value, {_, V}} ->
expand(V);
none ->
undefined
end.
( ) , headers ( ) ) - > string ( ) | undefined
@doc Return the value of the given header up to the first semicolon using
%% a case insensitive search. undefined will be returned for keys
%% that are not present.
get_primary_value(K, T) ->
case get_value(K, T) of
undefined ->
undefined;
V ->
lists:takewhile(fun (C) -> C =/= $; end, V)
end.
get_combined_value(key ( ) , headers ( ) ) - > string ( ) | undefined
%% @doc Return the value from the given header using a case insensitive search.
%% If the value of the header is a comma-separated list where holds values
%% are all identical, the identical value will be returned.
%% undefined will be returned for keys that are not present or the
%% values in the list are not the same.
%%
%% NOTE: The process isn't designed for a general purpose. If you need
%% to access all values in the combined header, please refer to
%% '''tokenize_header_value/1'''.
%%
Section 4.2 of the RFC 2616 ( HTTP 1.1 ) describes multiple message - header
%% fields with the same field-name may be present in a message if and only
%% if the entire field-value for that header field is defined as a
%% comma-separated list [i.e., #(values)].
get_combined_value(K, T) ->
case get_value(K, T) of
undefined ->
undefined;
V ->
case sets:to_list(sets:from_list(tokenize_header_value(V))) of
[Val] ->
Val;
_ ->
undefined
end
end.
( ) , headers ( ) ) - > { value , { key ( ) , string ( ) } } | none
%% @doc Return the case preserved key and value for the given header using
%% a case insensitive search. none will be returned for keys that are
%% not present.
lookup(K, T) ->
case gb_trees:lookup(normalize(K), T) of
{value, {K0, V}} ->
{value, {K0, expand(V)}};
none ->
none
end.
%% @spec default(key(), value(), headers()) -> headers()
%% @doc Insert the pair into the headers if it does not already exist.
default(K, V, T) ->
K1 = normalize(K),
V1 = any_to_list(V),
try gb_trees:insert(K1, {K, V1}, T)
catch
error:{key_exists, _} ->
T
end.
enter(key ( ) , value ( ) , headers ( ) ) - > headers ( )
%% @doc Insert the pair into the headers, replacing any pre-existing key.
enter(K, V, T) ->
K1 = normalize(K),
V1 = any_to_list(V),
gb_trees:enter(K1, {K, V1}, T).
%% @spec insert(key(), value(), headers()) -> headers()
%% @doc Insert the pair into the headers, merging with any pre-existing key.
%% A merge is done with Value = V0 ++ ", " ++ V1.
insert(K, V, T) ->
K1 = normalize(K),
V1 = any_to_list(V),
try gb_trees:insert(K1, {K, V1}, T)
catch
error:{key_exists, _} ->
{K0, V0} = gb_trees:get(K1, T),
V2 = merge(K1, V1, V0),
gb_trees:update(K1, {K0, V2}, T)
end.
( ) , headers ( ) ) - > headers ( )
%% @doc Delete the header corresponding to key if it is present.
delete_any(K, T) ->
K1 = normalize(K),
gb_trees:delete_any(K1, T).
%% Internal API
tokenize_header_value(undefined) ->
undefined;
tokenize_header_value(V) ->
reversed_tokens(trim_and_reverse(V, false), [], []).
trim_and_reverse([S | Rest], Reversed) when S=:=$ ; S=:=$\n; S=:=$\t ->
trim_and_reverse(Rest, Reversed);
trim_and_reverse(V, false) ->
trim_and_reverse(lists:reverse(V), true);
trim_and_reverse(V, true) ->
V.
reversed_tokens([], [], Acc) ->
Acc;
reversed_tokens([], Token, Acc) ->
[Token | Acc];
reversed_tokens("\"" ++ Rest, [], Acc) ->
case extract_quoted_string(Rest, []) of
{String, NewRest} ->
reversed_tokens(NewRest, [], [String | Acc]);
undefined ->
undefined
end;
reversed_tokens("\"" ++ _Rest, _Token, _Acc) ->
undefined;
reversed_tokens([C | Rest], [], Acc) when C=:=$ ;C=:=$\n;C=:=$\t;C=:=$, ->
reversed_tokens(Rest, [], Acc);
reversed_tokens([C | Rest], Token, Acc) when C=:=$ ;C=:=$\n;C=:=$\t;C=:=$, ->
reversed_tokens(Rest, [], [Token | Acc]);
reversed_tokens([C | Rest], Token, Acc) ->
reversed_tokens(Rest, [C | Token], Acc);
reversed_tokens(_, _, _) ->
undefeined.
extract_quoted_string([], _Acc) ->
undefined;
extract_quoted_string("\"\\" ++ Rest, Acc) ->
extract_quoted_string(Rest, "\"" ++ Acc);
extract_quoted_string("\"" ++ Rest, Acc) ->
{Acc, Rest};
extract_quoted_string([C | Rest], Acc) ->
extract_quoted_string(Rest, [C | Acc]).
expand({array, L}) ->
mochiweb_util:join(lists:reverse(L), ", ");
expand(V) ->
V.
merge("set-cookie", V1, {array, L}) ->
{array, [V1 | L]};
merge("set-cookie", V1, V0) ->
{array, [V1, V0]};
merge(_, V1, V0) ->
V0 ++ ", " ++ V1.
normalize(K) when is_list(K) ->
string:to_lower(K);
normalize(K) when is_atom(K) ->
normalize(atom_to_list(K));
normalize(K) when is_binary(K) ->
normalize(binary_to_list(K)).
any_to_list(V) when is_list(V) ->
V;
any_to_list(V) when is_atom(V) ->
atom_to_list(V);
any_to_list(V) when is_binary(V) ->
binary_to_list(V);
any_to_list(V) when is_integer(V) ->
integer_to_list(V).
%%
%% Tests.
%%
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
make_test() ->
Identity = make([{hdr, foo}]),
?assertEqual(
Identity,
make(Identity)).
enter_from_list_test() ->
H = make([{hdr, foo}]),
?assertEqual(
[{baz, "wibble"}, {hdr, "foo"}],
to_list(enter_from_list([{baz, wibble}], H))),
?assertEqual(
[{hdr, "bar"}],
to_list(enter_from_list([{hdr, bar}], H))),
ok.
default_from_list_test() ->
H = make([{hdr, foo}]),
?assertEqual(
[{baz, "wibble"}, {hdr, "foo"}],
to_list(default_from_list([{baz, wibble}], H))),
?assertEqual(
[{hdr, "foo"}],
to_list(default_from_list([{hdr, bar}], H))),
ok.
get_primary_value_test() ->
H = make([{hdr, foo}, {baz, <<"wibble;taco">>}]),
?assertEqual(
"foo",
get_primary_value(hdr, H)),
?assertEqual(
undefined,
get_primary_value(bar, H)),
?assertEqual(
"wibble",
get_primary_value(<<"baz">>, H)),
ok.
get_combined_value_test() ->
H = make([{hdr, foo}, {baz, <<"wibble,taco">>}, {content_length, "123, 123"},
{test, " 123, 123, 123 , 123,123 "},
{test2, "456, 123, 123 , 123"},
{test3, "123"}, {test4, " 123, "}]),
?assertEqual(
"foo",
get_combined_value(hdr, H)),
?assertEqual(
undefined,
get_combined_value(bar, H)),
?assertEqual(
undefined,
get_combined_value(<<"baz">>, H)),
?assertEqual(
"123",
get_combined_value(<<"content_length">>, H)),
?assertEqual(
"123",
get_combined_value(<<"test">>, H)),
?assertEqual(
undefined,
get_combined_value(<<"test2">>, H)),
?assertEqual(
"123",
get_combined_value(<<"test3">>, H)),
?assertEqual(
"123",
get_combined_value(<<"test4">>, H)),
ok.
set_cookie_test() ->
H = make([{"set-cookie", foo}, {"set-cookie", bar}, {"set-cookie", baz}]),
?assertEqual(
[{"set-cookie", "foo"}, {"set-cookie", "bar"}, {"set-cookie", "baz"}],
to_list(H)),
ok.
headers_test() ->
H = ?MODULE:make([{hdr, foo}, {"Hdr", "bar"}, {'Hdr', 2}]),
[{hdr, "foo, bar, 2"}] = ?MODULE:to_list(H),
H1 = ?MODULE:insert(taco, grande, H),
[{hdr, "foo, bar, 2"}, {taco, "grande"}] = ?MODULE:to_list(H1),
H2 = ?MODULE:make([{"Set-Cookie", "foo"}]),
[{"Set-Cookie", "foo"}] = ?MODULE:to_list(H2),
H3 = ?MODULE:insert("Set-Cookie", "bar", H2),
[{"Set-Cookie", "foo"}, {"Set-Cookie", "bar"}] = ?MODULE:to_list(H3),
"foo, bar" = ?MODULE:get_value("set-cookie", H3),
{value, {"Set-Cookie", "foo, bar"}} = ?MODULE:lookup("set-cookie", H3),
undefined = ?MODULE:get_value("shibby", H3),
none = ?MODULE:lookup("shibby", H3),
H4 = ?MODULE:insert("content-type",
"application/x-www-form-urlencoded; charset=utf8",
H3),
"application/x-www-form-urlencoded" = ?MODULE:get_primary_value(
"content-type", H4),
H4 = ?MODULE:delete_any("nonexistent-header", H4),
H3 = ?MODULE:delete_any("content-type", H4),
HB = <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>,
H_HB = ?MODULE:from_binary(HB),
H_HB = ?MODULE:from_binary(binary_to_list(HB)),
"47" = ?MODULE:get_value("Content-Length", H_HB),
"text/plain" = ?MODULE:get_value("Content-Type", H_HB),
L_H_HB = ?MODULE:to_list(H_HB),
2 = length(L_H_HB),
true = lists:member({'Content-Length', "47"}, L_H_HB),
true = lists:member({'Content-Type', "text/plain"}, L_H_HB),
HL = [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ],
HL2 = [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ],
HL3 = [ <<"Content-Length: 47\r\n">>, "Content-Type: text/plain\r\n" ],
H_HL = ?MODULE:from_binary(HL),
H_HL = ?MODULE:from_binary(HL2),
H_HL = ?MODULE:from_binary(HL3),
"47" = ?MODULE:get_value("Content-Length", H_HL),
"text/plain" = ?MODULE:get_value("Content-Type", H_HL),
L_H_HL = ?MODULE:to_list(H_HL),
2 = length(L_H_HL),
true = lists:member({'Content-Length', "47"}, L_H_HL),
true = lists:member({'Content-Type', "text/plain"}, L_H_HL),
[] = ?MODULE:to_list(?MODULE:from_binary(<<>>)),
[] = ?MODULE:to_list(?MODULE:from_binary(<<"">>)),
[] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n">>)),
[] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n\r\n">>)),
[] = ?MODULE:to_list(?MODULE:from_binary("")),
[] = ?MODULE:to_list(?MODULE:from_binary([<<>>])),
[] = ?MODULE:to_list(?MODULE:from_binary([<<"">>])),
[] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n">>])),
[] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n\r\n">>])),
ok.
tokenize_header_value_test() ->
?assertEqual(["a quote in a \"quote\"."],
tokenize_header_value("\"a quote in a \\\"quote\\\".\"")),
?assertEqual(["abc"], tokenize_header_value("abc")),
?assertEqual(["abc", "def"], tokenize_header_value("abc def")),
?assertEqual(["abc", "def"], tokenize_header_value("abc , def")),
?assertEqual(["abc", "def"], tokenize_header_value(",abc ,, def,,")),
?assertEqual(["abc def"], tokenize_header_value("\"abc def\" ")),
?assertEqual(["abc, def"], tokenize_header_value("\"abc, def\"")),
?assertEqual(["\\a\\$"], tokenize_header_value("\"\\a\\$\"")),
?assertEqual(["abc def", "foo, bar", "12345", ""],
tokenize_header_value("\"abc def\" \"foo, bar\" , 12345, \"\"")),
?assertEqual(undefined,
tokenize_header_value(undefined)),
?assertEqual(undefined,
tokenize_header_value("umatched quote\"")),
?assertEqual(undefined,
tokenize_header_value("\"unmatched quote")).
-endif.
| null | https://raw.githubusercontent.com/processone/tsung/e9babe2acfb4298e3b51bd56886561b052979884/src/lib/mochiweb_headers.erl | erlang |
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
@doc Case preserving (but case insensitive) HTTP Header dictionary.
@type headers().
@type key() = atom() | binary() | string().
@type value() = atom() | binary() | string() | integer().
@spec empty() -> headers()
@doc Create an empty headers structure.
assume a non-list is already mochiweb_headers.
@spec from_binary(iolist()) -> headers()
@doc Transforms a raw HTTP header into a mochiweb headers structure.
The given raw HTTP header can be one of the following:
double CRLF.
Examples:
```
<<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>'''
HTTP header line ending with a single CRLF.
Examples:
```
@spec from_list([{key(), value()}]) -> headers()
@spec enter_from_list([{key(), value()}], headers()) -> headers()
@doc Insert pairs into the headers, replace any values for existing keys.
@spec default_from_list([{key(), value()}], headers()) -> headers()
@doc Insert pairs into the headers for keys that do not already exist.
@doc Return the contents of the headers. The keys will be the exact key
that was first inserted (e.g. may be an atom or binary, case is
preserved).
@spec get_value(key(), headers()) -> string() | undefined
@doc Return the value of the given header using a case insensitive search.
undefined will be returned for keys that are not present.
a case insensitive search. undefined will be returned for keys
that are not present.
@doc Return the value from the given header using a case insensitive search.
If the value of the header is a comma-separated list where holds values
are all identical, the identical value will be returned.
undefined will be returned for keys that are not present or the
values in the list are not the same.
NOTE: The process isn't designed for a general purpose. If you need
to access all values in the combined header, please refer to
'''tokenize_header_value/1'''.
fields with the same field-name may be present in a message if and only
if the entire field-value for that header field is defined as a
comma-separated list [i.e., #(values)].
@doc Return the case preserved key and value for the given header using
a case insensitive search. none will be returned for keys that are
not present.
@spec default(key(), value(), headers()) -> headers()
@doc Insert the pair into the headers if it does not already exist.
@doc Insert the pair into the headers, replacing any pre-existing key.
@spec insert(key(), value(), headers()) -> headers()
@doc Insert the pair into the headers, merging with any pre-existing key.
A merge is done with Value = V0 ++ ", " ++ V1.
@doc Delete the header corresponding to key if it is present.
Internal API
Tests.
| @author < >
2007 Mochi Media , Inc.
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
-module(mochiweb_headers).
-author('').
-export([empty/0, from_list/1, insert/3, enter/3, get_value/2, lookup/2]).
-export([delete_any/2, get_primary_value/2, get_combined_value/2]).
-export([default/3, enter_from_list/2, default_from_list/2]).
-export([to_list/1, make/1]).
-export([from_binary/1]).
empty() ->
gb_trees:empty().
( ) | [ { key ( ) , value ( ) } ] ) - > headers ( )
@doc Construct a headers ( ) from the given list .
make(L) when is_list(L) ->
from_list(L);
make(T) ->
T.
1 ) A string or a binary representing a full HTTP header ending with
" Content - Length : 47\r\nContent - Type : text / plain\r\n\r\n "
2 ) A list of binaries or strings where each element represents a raw
[ < < " Content - Length : 47\r\n " > > , < < " Content - Type : text / plain\r\n " > > ]
[ " Content - Length : 47\r\n " , " Content - Type : text / plain\r\n " ]
[ " Content - Length : 47\r\n " , < < " Content - Type : text / plain\r\n " > > ] '' '
from_binary(RawHttpHeader) when is_binary(RawHttpHeader) ->
from_binary(RawHttpHeader, []);
from_binary(RawHttpHeaderList) ->
from_binary(list_to_binary([RawHttpHeaderList, "\r\n"])).
from_binary(RawHttpHeader, Acc) ->
case erlang:decode_packet(httph, RawHttpHeader, []) of
{ok, {http_header, _, H, _, V}, Rest} ->
from_binary(Rest, [{H, V} | Acc]);
_ ->
make(Acc)
end.
@doc Construct a headers ( ) from the given list .
from_list(List) ->
lists:foldl(fun ({K, V}, T) -> insert(K, V, T) end, empty(), List).
enter_from_list(List, T) ->
lists:foldl(fun ({K, V}, T1) -> enter(K, V, T1) end, T, List).
default_from_list(List, T) ->
lists:foldl(fun ({K, V}, T1) -> default(K, V, T1) end, T, List).
to_list(headers ( ) ) - > [ { key ( ) , string ( ) } ]
to_list(T) ->
F = fun ({K, {array, L}}, Acc) ->
L1 = lists:reverse(L),
lists:foldl(fun (V, Acc1) -> [{K, V} | Acc1] end, Acc, L1);
(Pair, Acc) ->
[Pair | Acc]
end,
lists:reverse(lists:foldl(F, [], gb_trees:values(T))).
get_value(K, T) ->
case lookup(K, T) of
{value, {_, V}} ->
expand(V);
none ->
undefined
end.
( ) , headers ( ) ) - > string ( ) | undefined
@doc Return the value of the given header up to the first semicolon using
get_primary_value(K, T) ->
case get_value(K, T) of
undefined ->
undefined;
V ->
lists:takewhile(fun (C) -> C =/= $; end, V)
end.
get_combined_value(key ( ) , headers ( ) ) - > string ( ) | undefined
Section 4.2 of the RFC 2616 ( HTTP 1.1 ) describes multiple message - header
get_combined_value(K, T) ->
case get_value(K, T) of
undefined ->
undefined;
V ->
case sets:to_list(sets:from_list(tokenize_header_value(V))) of
[Val] ->
Val;
_ ->
undefined
end
end.
( ) , headers ( ) ) - > { value , { key ( ) , string ( ) } } | none
lookup(K, T) ->
case gb_trees:lookup(normalize(K), T) of
{value, {K0, V}} ->
{value, {K0, expand(V)}};
none ->
none
end.
default(K, V, T) ->
K1 = normalize(K),
V1 = any_to_list(V),
try gb_trees:insert(K1, {K, V1}, T)
catch
error:{key_exists, _} ->
T
end.
enter(key ( ) , value ( ) , headers ( ) ) - > headers ( )
enter(K, V, T) ->
K1 = normalize(K),
V1 = any_to_list(V),
gb_trees:enter(K1, {K, V1}, T).
insert(K, V, T) ->
K1 = normalize(K),
V1 = any_to_list(V),
try gb_trees:insert(K1, {K, V1}, T)
catch
error:{key_exists, _} ->
{K0, V0} = gb_trees:get(K1, T),
V2 = merge(K1, V1, V0),
gb_trees:update(K1, {K0, V2}, T)
end.
( ) , headers ( ) ) - > headers ( )
delete_any(K, T) ->
K1 = normalize(K),
gb_trees:delete_any(K1, T).
tokenize_header_value(undefined) ->
undefined;
tokenize_header_value(V) ->
reversed_tokens(trim_and_reverse(V, false), [], []).
trim_and_reverse([S | Rest], Reversed) when S=:=$ ; S=:=$\n; S=:=$\t ->
trim_and_reverse(Rest, Reversed);
trim_and_reverse(V, false) ->
trim_and_reverse(lists:reverse(V), true);
trim_and_reverse(V, true) ->
V.
reversed_tokens([], [], Acc) ->
Acc;
reversed_tokens([], Token, Acc) ->
[Token | Acc];
reversed_tokens("\"" ++ Rest, [], Acc) ->
case extract_quoted_string(Rest, []) of
{String, NewRest} ->
reversed_tokens(NewRest, [], [String | Acc]);
undefined ->
undefined
end;
reversed_tokens("\"" ++ _Rest, _Token, _Acc) ->
undefined;
reversed_tokens([C | Rest], [], Acc) when C=:=$ ;C=:=$\n;C=:=$\t;C=:=$, ->
reversed_tokens(Rest, [], Acc);
reversed_tokens([C | Rest], Token, Acc) when C=:=$ ;C=:=$\n;C=:=$\t;C=:=$, ->
reversed_tokens(Rest, [], [Token | Acc]);
reversed_tokens([C | Rest], Token, Acc) ->
reversed_tokens(Rest, [C | Token], Acc);
reversed_tokens(_, _, _) ->
undefeined.
extract_quoted_string([], _Acc) ->
undefined;
extract_quoted_string("\"\\" ++ Rest, Acc) ->
extract_quoted_string(Rest, "\"" ++ Acc);
extract_quoted_string("\"" ++ Rest, Acc) ->
{Acc, Rest};
extract_quoted_string([C | Rest], Acc) ->
extract_quoted_string(Rest, [C | Acc]).
expand({array, L}) ->
mochiweb_util:join(lists:reverse(L), ", ");
expand(V) ->
V.
merge("set-cookie", V1, {array, L}) ->
{array, [V1 | L]};
merge("set-cookie", V1, V0) ->
{array, [V1, V0]};
merge(_, V1, V0) ->
V0 ++ ", " ++ V1.
normalize(K) when is_list(K) ->
string:to_lower(K);
normalize(K) when is_atom(K) ->
normalize(atom_to_list(K));
normalize(K) when is_binary(K) ->
normalize(binary_to_list(K)).
any_to_list(V) when is_list(V) ->
V;
any_to_list(V) when is_atom(V) ->
atom_to_list(V);
any_to_list(V) when is_binary(V) ->
binary_to_list(V);
any_to_list(V) when is_integer(V) ->
integer_to_list(V).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
make_test() ->
Identity = make([{hdr, foo}]),
?assertEqual(
Identity,
make(Identity)).
enter_from_list_test() ->
H = make([{hdr, foo}]),
?assertEqual(
[{baz, "wibble"}, {hdr, "foo"}],
to_list(enter_from_list([{baz, wibble}], H))),
?assertEqual(
[{hdr, "bar"}],
to_list(enter_from_list([{hdr, bar}], H))),
ok.
default_from_list_test() ->
H = make([{hdr, foo}]),
?assertEqual(
[{baz, "wibble"}, {hdr, "foo"}],
to_list(default_from_list([{baz, wibble}], H))),
?assertEqual(
[{hdr, "foo"}],
to_list(default_from_list([{hdr, bar}], H))),
ok.
get_primary_value_test() ->
H = make([{hdr, foo}, {baz, <<"wibble;taco">>}]),
?assertEqual(
"foo",
get_primary_value(hdr, H)),
?assertEqual(
undefined,
get_primary_value(bar, H)),
?assertEqual(
"wibble",
get_primary_value(<<"baz">>, H)),
ok.
get_combined_value_test() ->
H = make([{hdr, foo}, {baz, <<"wibble,taco">>}, {content_length, "123, 123"},
{test, " 123, 123, 123 , 123,123 "},
{test2, "456, 123, 123 , 123"},
{test3, "123"}, {test4, " 123, "}]),
?assertEqual(
"foo",
get_combined_value(hdr, H)),
?assertEqual(
undefined,
get_combined_value(bar, H)),
?assertEqual(
undefined,
get_combined_value(<<"baz">>, H)),
?assertEqual(
"123",
get_combined_value(<<"content_length">>, H)),
?assertEqual(
"123",
get_combined_value(<<"test">>, H)),
?assertEqual(
undefined,
get_combined_value(<<"test2">>, H)),
?assertEqual(
"123",
get_combined_value(<<"test3">>, H)),
?assertEqual(
"123",
get_combined_value(<<"test4">>, H)),
ok.
set_cookie_test() ->
H = make([{"set-cookie", foo}, {"set-cookie", bar}, {"set-cookie", baz}]),
?assertEqual(
[{"set-cookie", "foo"}, {"set-cookie", "bar"}, {"set-cookie", "baz"}],
to_list(H)),
ok.
headers_test() ->
H = ?MODULE:make([{hdr, foo}, {"Hdr", "bar"}, {'Hdr', 2}]),
[{hdr, "foo, bar, 2"}] = ?MODULE:to_list(H),
H1 = ?MODULE:insert(taco, grande, H),
[{hdr, "foo, bar, 2"}, {taco, "grande"}] = ?MODULE:to_list(H1),
H2 = ?MODULE:make([{"Set-Cookie", "foo"}]),
[{"Set-Cookie", "foo"}] = ?MODULE:to_list(H2),
H3 = ?MODULE:insert("Set-Cookie", "bar", H2),
[{"Set-Cookie", "foo"}, {"Set-Cookie", "bar"}] = ?MODULE:to_list(H3),
"foo, bar" = ?MODULE:get_value("set-cookie", H3),
{value, {"Set-Cookie", "foo, bar"}} = ?MODULE:lookup("set-cookie", H3),
undefined = ?MODULE:get_value("shibby", H3),
none = ?MODULE:lookup("shibby", H3),
H4 = ?MODULE:insert("content-type",
"application/x-www-form-urlencoded; charset=utf8",
H3),
"application/x-www-form-urlencoded" = ?MODULE:get_primary_value(
"content-type", H4),
H4 = ?MODULE:delete_any("nonexistent-header", H4),
H3 = ?MODULE:delete_any("content-type", H4),
HB = <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>,
H_HB = ?MODULE:from_binary(HB),
H_HB = ?MODULE:from_binary(binary_to_list(HB)),
"47" = ?MODULE:get_value("Content-Length", H_HB),
"text/plain" = ?MODULE:get_value("Content-Type", H_HB),
L_H_HB = ?MODULE:to_list(H_HB),
2 = length(L_H_HB),
true = lists:member({'Content-Length', "47"}, L_H_HB),
true = lists:member({'Content-Type', "text/plain"}, L_H_HB),
HL = [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ],
HL2 = [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ],
HL3 = [ <<"Content-Length: 47\r\n">>, "Content-Type: text/plain\r\n" ],
H_HL = ?MODULE:from_binary(HL),
H_HL = ?MODULE:from_binary(HL2),
H_HL = ?MODULE:from_binary(HL3),
"47" = ?MODULE:get_value("Content-Length", H_HL),
"text/plain" = ?MODULE:get_value("Content-Type", H_HL),
L_H_HL = ?MODULE:to_list(H_HL),
2 = length(L_H_HL),
true = lists:member({'Content-Length', "47"}, L_H_HL),
true = lists:member({'Content-Type', "text/plain"}, L_H_HL),
[] = ?MODULE:to_list(?MODULE:from_binary(<<>>)),
[] = ?MODULE:to_list(?MODULE:from_binary(<<"">>)),
[] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n">>)),
[] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n\r\n">>)),
[] = ?MODULE:to_list(?MODULE:from_binary("")),
[] = ?MODULE:to_list(?MODULE:from_binary([<<>>])),
[] = ?MODULE:to_list(?MODULE:from_binary([<<"">>])),
[] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n">>])),
[] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n\r\n">>])),
ok.
tokenize_header_value_test() ->
?assertEqual(["a quote in a \"quote\"."],
tokenize_header_value("\"a quote in a \\\"quote\\\".\"")),
?assertEqual(["abc"], tokenize_header_value("abc")),
?assertEqual(["abc", "def"], tokenize_header_value("abc def")),
?assertEqual(["abc", "def"], tokenize_header_value("abc , def")),
?assertEqual(["abc", "def"], tokenize_header_value(",abc ,, def,,")),
?assertEqual(["abc def"], tokenize_header_value("\"abc def\" ")),
?assertEqual(["abc, def"], tokenize_header_value("\"abc, def\"")),
?assertEqual(["\\a\\$"], tokenize_header_value("\"\\a\\$\"")),
?assertEqual(["abc def", "foo, bar", "12345", ""],
tokenize_header_value("\"abc def\" \"foo, bar\" , 12345, \"\"")),
?assertEqual(undefined,
tokenize_header_value(undefined)),
?assertEqual(undefined,
tokenize_header_value("umatched quote\"")),
?assertEqual(undefined,
tokenize_header_value("\"unmatched quote")).
-endif.
|
3949890ce5ded658c69f95f540b62f68a9dbde3c01982ba4fa12b8598ef8375a | theodormoroianu/SecondYearCourses | lab6-sol_20210115150000.hs | -- /
import Data.Char
import Data.List
import Test.QuickCheck
1 .
rotate :: Int -> [Char] -> [Char]
rotate n l
| n > 0
, n < length l
= suf ++ pre
where
(pre, suf) = splitAt n l
rotate _ _ = error "număr negativ sau prea mare"
2 .
prop_rotate :: Int -> String -> Bool
prop_rotate k str = rotate (l + 1 - m) (rotate m str') == str'
where
str' = "ab" ++ str
l = length str + 1
m = 1 + k `mod` l
3 .
makeKey :: Int -> [(Char, Char)]
makeKey n = zip alphabet (rotate n alphabet)
where
alphabet = ['A'..'Z']
4 .
lookUp :: Char -> [(Char, Char)] -> Char
lookUp c l = head $ [y | (x, y) <- l, x == c] ++ [c]
5 .
encipher :: Int -> Char -> Char
encipher n c = lookUp c (makeKey n)
6 .
normalize :: String -> String
normalize = map toUpper . filter isAlphaNum
7 .
encipherStr :: Int -> String -> String
encipherStr n = map (encipher n) . normalize
8 .
reverseKey :: [(Char, Char)] -> [(Char, Char)]
reverseKey = map (\(x,y) -> (y,x))
9 .
decipher :: Int -> Char -> Char
decipher n c = lookup c (reverseKey (makeKey n))
decipherStr :: Int -> String -> String
decipherStr , = map (decipher n)
| null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/99185b0e97119135e7301c2c7be0f07ae7258006/Haskell/l/.history/lab6/lab6-sol_20210115150000.hs | haskell | / |
import Data.Char
import Data.List
import Test.QuickCheck
1 .
rotate :: Int -> [Char] -> [Char]
rotate n l
| n > 0
, n < length l
= suf ++ pre
where
(pre, suf) = splitAt n l
rotate _ _ = error "număr negativ sau prea mare"
2 .
prop_rotate :: Int -> String -> Bool
prop_rotate k str = rotate (l + 1 - m) (rotate m str') == str'
where
str' = "ab" ++ str
l = length str + 1
m = 1 + k `mod` l
3 .
makeKey :: Int -> [(Char, Char)]
makeKey n = zip alphabet (rotate n alphabet)
where
alphabet = ['A'..'Z']
4 .
lookUp :: Char -> [(Char, Char)] -> Char
lookUp c l = head $ [y | (x, y) <- l, x == c] ++ [c]
5 .
encipher :: Int -> Char -> Char
encipher n c = lookUp c (makeKey n)
6 .
normalize :: String -> String
normalize = map toUpper . filter isAlphaNum
7 .
encipherStr :: Int -> String -> String
encipherStr n = map (encipher n) . normalize
8 .
reverseKey :: [(Char, Char)] -> [(Char, Char)]
reverseKey = map (\(x,y) -> (y,x))
9 .
decipher :: Int -> Char -> Char
decipher n c = lookup c (reverseKey (makeKey n))
decipherStr :: Int -> String -> String
decipherStr , = map (decipher n)
|
302e1f12d2e80e9d13bdf34d46217af1ae181bc67a1c271c01e15d5e80053c32 | HunterYIboHu/htdp2-solution | ex477-arrangements.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex477-arrangements) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
; [List-of X] -> [List-of [List-of X]]
; creates a list of all rearrangements of the items in w
(define (arrangements w)
(cond
[(empty? w) '(())]
[else
(foldr (lambda (item others)
(local ((define without-item
(arrangements (remove item w)))
(define add-item-to-front
(map (lambda (a) (cons item a))
without-item)))
(append add-item-to-front others)))
'()
w)]))
(define (all-words-from-rat? w)
(and (member (explode "rat") w)
(member (explode "art") w)
(member (explode "tar") w)))
(check-satisfied (arrangements '("r" "a" "t"))
all-words-from-rat?)
;; Questions
;; Q1: What is a trivially solvable problem?
;; A1: when w is an empty list.
;;
;; Q2: How are trivial solutions solved?
;; A2: it returns '(()).
;;
;; Q3: How does the algorithm generate new problems that are
;; more easily solvable than the original one?
Is there one new problem that we generate or are there several ?
;; A3: To generate new problem, the algorithm turn to cut the list of
;; [List-of X]; every time the new problem deal with a smaller list.
We generate 1 new problem -- the problem is like the origin one ,
;; but deal with a smaller one.
;;
;; Q4: Is the solution of the given problem the same as the solution
of ( one of ) the new problems ? Or , do we need to combine the
;; solutions to create a solution for the original problem?
;; And, if so, do we need anything from the original problem data?
;; A4: The new problem is the same kind problem as the origin one.
;; We do need the items on the given input of the origin problem.
;;
Q5 : the program would terminate for any input ?
;; A5: Yes! The program would cut the length of list, and when the list
;; is empty, the program will terminate.
;;
Q6 : Does arrangements in figure 165 create the same lists as the
solution of Word Games , the Heart of the Problem ?
A6 : No . This version 's result is :
;;(list
;; (list "r" "a" "t")
;; (list "r" "t" "a")
;; (list "a" "r" "t")
;; (list "a" "t" "r")
;; (list "t" "r" "a")
;; (list "t" "a" "r"))
;; And the original version's result is:
;;(list
;; (list "r" "a" "t")
;; (list "a" "r" "t")
;; (list "a" "t" "r")
;; (list "r" "t" "a")
;; (list "t" "r" "a")
;; (list "t" "a" "r"))
| null | https://raw.githubusercontent.com/HunterYIboHu/htdp2-solution/6182b4c2ef650ac7059f3c143f639d09cd708516/Chapter5/Section29-algorithm-that-backtrack/ex477-arrangements.rkt | racket | about the language level of this file in a form that our tools can easily process.
[List-of X] -> [List-of [List-of X]]
creates a list of all rearrangements of the items in w
Questions
Q1: What is a trivially solvable problem?
A1: when w is an empty list.
Q2: How are trivial solutions solved?
A2: it returns '(()).
Q3: How does the algorithm generate new problems that are
more easily solvable than the original one?
A3: To generate new problem, the algorithm turn to cut the list of
[List-of X]; every time the new problem deal with a smaller list.
but deal with a smaller one.
Q4: Is the solution of the given problem the same as the solution
solutions to create a solution for the original problem?
And, if so, do we need anything from the original problem data?
A4: The new problem is the same kind problem as the origin one.
We do need the items on the given input of the origin problem.
A5: Yes! The program would cut the length of list, and when the list
is empty, the program will terminate.
(list
(list "r" "a" "t")
(list "r" "t" "a")
(list "a" "r" "t")
(list "a" "t" "r")
(list "t" "r" "a")
(list "t" "a" "r"))
And the original version's result is:
(list
(list "r" "a" "t")
(list "a" "r" "t")
(list "a" "t" "r")
(list "r" "t" "a")
(list "t" "r" "a")
(list "t" "a" "r")) | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex477-arrangements) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define (arrangements w)
(cond
[(empty? w) '(())]
[else
(foldr (lambda (item others)
(local ((define without-item
(arrangements (remove item w)))
(define add-item-to-front
(map (lambda (a) (cons item a))
without-item)))
(append add-item-to-front others)))
'()
w)]))
(define (all-words-from-rat? w)
(and (member (explode "rat") w)
(member (explode "art") w)
(member (explode "tar") w)))
(check-satisfied (arrangements '("r" "a" "t"))
all-words-from-rat?)
Is there one new problem that we generate or are there several ?
We generate 1 new problem -- the problem is like the origin one ,
of ( one of ) the new problems ? Or , do we need to combine the
Q5 : the program would terminate for any input ?
Q6 : Does arrangements in figure 165 create the same lists as the
solution of Word Games , the Heart of the Problem ?
A6 : No . This version 's result is :
|
c6598f216b15509144202030de2f6cb7289eacd39ac925fa8c43dca665cc2a10 | turtl/api | app.lisp | (in-package :turtl)
;; this is responsible for checking user auth
;; TODO: if this ever does MORE than just check auth, be sure to split into
;; multiple functions
(add-hook :pre-route
(lambda (req res)
(with-promise (res rej)
(let* ((auth (get-header (request-headers req) :authorization))
(path (quri:uri-path (request-uri req)))
(method (request-method req))
(auth-fail-fn (lambda ()
(let ((err (make-instance 'auth-failed :msg "Authentication failed."))
;; random wait time (0-2ms) to prevent timing attacks on auth
(rand-wait (/ (secure-random:number 2000000) 100000000d0)))
(as:delay
(lambda ()
(send-response res
:status (error-code err)
:headers '(:content-type "application/json")
:body (error-json err))
(rej err))
:time rand-wait)))))
(if (or (is-public-action method path)
(eq (request-method req) :options))
;; this is a signup or file serve. let it fly with no auth
(res)
;; not a signup, test the auth...
(if auth
(let* ((auth (subseq auth 6))
(auth (cl-base64:base64-string-to-string auth))
(split-pos (position #\: auth))
(auth-key (if split-pos
(subseq auth (1+ split-pos))
nil)))
(catch-errors (res)
(alet* ((user (check-auth auth-key)))
(if user
(progn
(setf (request-data req) user)
(res))
(funcall auth-fail-fn)))))
(funcall auth-fail-fn))))))
:turtl-auth)
(add-hook :response-started
(lambda (res req &rest _)
(declare (ignore _))
(when *enable-hsts-header*
(setf (getf (response-headers res) :strict-transport-security)
(format nil "max-age=~a" *enable-hsts-header*)))
set up CORS junk . generally , we only allow it if it comes from the FF
;; extension, which uses resource:// URLs
(let* ((req-headers (request-headers req))
(origin (get-header req-headers :origin)))
;; TODO: figure out a better CORS policy
(when (or t
(and origin (< 11 (length origin)) (string= (subseq origin 0 11) "resource://")))
(setf (getf (response-headers res) :access-control-allow-origin) *enabled-cors-resources*
(getf (response-headers res) :access-control-allow-methods) "GET, POST"
(getf (response-headers res) :access-control-allow-headers) (get-header (request-headers req) :access-control-request-headers)))))
:post-headers)
| null | https://raw.githubusercontent.com/turtl/api/20ab4cc91128921300913b885eb1e201a5e0fc3f/controllers/app.lisp | lisp | this is responsible for checking user auth
TODO: if this ever does MORE than just check auth, be sure to split into
multiple functions
random wait time (0-2ms) to prevent timing attacks on auth
this is a signup or file serve. let it fly with no auth
not a signup, test the auth...
extension, which uses resource:// URLs
TODO: figure out a better CORS policy | (in-package :turtl)
(add-hook :pre-route
(lambda (req res)
(with-promise (res rej)
(let* ((auth (get-header (request-headers req) :authorization))
(path (quri:uri-path (request-uri req)))
(method (request-method req))
(auth-fail-fn (lambda ()
(let ((err (make-instance 'auth-failed :msg "Authentication failed."))
(rand-wait (/ (secure-random:number 2000000) 100000000d0)))
(as:delay
(lambda ()
(send-response res
:status (error-code err)
:headers '(:content-type "application/json")
:body (error-json err))
(rej err))
:time rand-wait)))))
(if (or (is-public-action method path)
(eq (request-method req) :options))
(res)
(if auth
(let* ((auth (subseq auth 6))
(auth (cl-base64:base64-string-to-string auth))
(split-pos (position #\: auth))
(auth-key (if split-pos
(subseq auth (1+ split-pos))
nil)))
(catch-errors (res)
(alet* ((user (check-auth auth-key)))
(if user
(progn
(setf (request-data req) user)
(res))
(funcall auth-fail-fn)))))
(funcall auth-fail-fn))))))
:turtl-auth)
(add-hook :response-started
(lambda (res req &rest _)
(declare (ignore _))
(when *enable-hsts-header*
(setf (getf (response-headers res) :strict-transport-security)
(format nil "max-age=~a" *enable-hsts-header*)))
set up CORS junk . generally , we only allow it if it comes from the FF
(let* ((req-headers (request-headers req))
(origin (get-header req-headers :origin)))
(when (or t
(and origin (< 11 (length origin)) (string= (subseq origin 0 11) "resource://")))
(setf (getf (response-headers res) :access-control-allow-origin) *enabled-cors-resources*
(getf (response-headers res) :access-control-allow-methods) "GET, POST"
(getf (response-headers res) :access-control-allow-headers) (get-header (request-headers req) :access-control-request-headers)))))
:post-headers)
|
72bc72a2fb16a92d5ca73bf8c5ce3ae33c45c5decb15da91b59103c5ed69cff0 | Spivoxity/obc-3 | data.mli |
* data.mli
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright ( c ) 2006 - -2016 J. M. Spivey
* All rights reserved
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are met :
*
* 1 . Redistributions of source code must retain the above copyright notice ,
* this list of conditions and the following disclaimer .
* 2 . Redistributions in binary form must reproduce the above copyright notice ,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution .
* 3 . The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS OR
* IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED .
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
* SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ;
* OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
* OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
* data.mli
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright (c) 2006--2016 J. M. Spivey
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*)
(* dynvalue -- type of runtime values *)
type dynvalue
val type_of : dynvalue -> Dict.otype
val void_value : dynvalue
(* def_value -- get value of a definition *)
val def_value : int32 -> Dict.def -> dynvalue
val null_pointer : dynvalue -> bool
val deref : dynvalue -> dynvalue
val subscript : dynvalue -> int -> dynvalue
val select : dynvalue -> Dict.def -> dynvalue
(* fDynVal -- format a dynamic value *)
val fDynVal : dynvalue -> Print.arg
(* fLongVal -- format a dynamic value, printing its fields if a record *)
val fLongVal : dynvalue -> Print.arg
fType -- format a type
val fType : Dict.otype -> Print.arg
val fFrame : Binary.regs -> Print.arg
| null | https://raw.githubusercontent.com/Spivoxity/obc-3/9e5094df8382ac5dd25ff08768277be6bd71a4ae/debugger/data.mli | ocaml | dynvalue -- type of runtime values
def_value -- get value of a definition
fDynVal -- format a dynamic value
fLongVal -- format a dynamic value, printing its fields if a record |
* data.mli
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright ( c ) 2006 - -2016 J. M. Spivey
* All rights reserved
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are met :
*
* 1 . Redistributions of source code must retain the above copyright notice ,
* this list of conditions and the following disclaimer .
* 2 . Redistributions in binary form must reproduce the above copyright notice ,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution .
* 3 . The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS OR
* IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED .
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
* SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ;
* OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
* OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
* data.mli
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright (c) 2006--2016 J. M. Spivey
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*)
type dynvalue
val type_of : dynvalue -> Dict.otype
val void_value : dynvalue
val def_value : int32 -> Dict.def -> dynvalue
val null_pointer : dynvalue -> bool
val deref : dynvalue -> dynvalue
val subscript : dynvalue -> int -> dynvalue
val select : dynvalue -> Dict.def -> dynvalue
val fDynVal : dynvalue -> Print.arg
val fLongVal : dynvalue -> Print.arg
fType -- format a type
val fType : Dict.otype -> Print.arg
val fFrame : Binary.regs -> Print.arg
|
9e8c2d71007c991bc8ca2136bb05f56e43e6359ca7d02b885688157df70dfe3e | lisp/de.setf.xml | namespace.lisp | -*- Mode : lisp ; Syntax : ansi - common - lisp ; Base : 10 ; Package : xml - query - data - model ; -*-
(in-package :xml-query-data-model)
(setq xml-query-data-model:*namespace*
(xml-query-data-model:defnamespace "#"
(:use)
(:nicknames)
(:export
"Access"
"Analysis"
"Archive"
"Array"
"ASCII"
"BigEndian"
"Binary"
"BinaryMask"
"ByteOrder"
"Campaign"
"Cell"
"Character"
"Collection"
"Column"
"Comment"
"Compression"
"CoordinateTransformation"
"Data"
"DataMining"
"DataModel"
"DataProduct"
"DataService"
"Dataset"
"DatasetProperty"
"DataStructure"
"Delimiter"
"Dimension"
"Discovery"
"DODS"
"Ensemble"
"Experiment"
"Field"
"File"
"FileFormat"
"FillValue"
"FormatConversion"
"GeoTIFF"
"Grib"
"Grid"
"Gzip"
"HDFEOS"
"Header"
"If"
"Image"
"Increment"
"Input"
"InterfaceProtocol"
"Interpolation"
"Investigation"
"L1"
"L2"
"L3"
"L4"
"Layer"
"Level1"
"Level2"
"Level3"
"Level4"
"LittleEndian"
"Map"
"MaximumValid"
"Metadata"
"MinimumValid"
"MissingValue"
"NearestNeighbor"
"NetCDF"
"Observation"
"Offset"
"OPeNDAP"
"Output"
"Parameter"
"ParameterSubset"
"Plot"
"ProcessingLevel"
"Profile"
"Project"
"Provenance"
"Raster"
"ReducedGrid"
"Reformat"
"Regridding"
"Resolution"
"Resource"
"Row"
"ScaleFactor"
"Series"
"Service"
"Simulation"
"SpatialSubset"
"StratigraphicSequence"
"Structure"
"Subset"
"Survey"
"Swath"
"Swath_2D"
"Swath_3D"
"SynopticMap"
"TemporalSubset"
"TimeStep"
"Trajectory"
"Value"
"Variable"
"Vector"
"Version"
"Visualization"
"WCS"
"WebCoverageServer"
"WebFeatureServer"
"WebMapServer"
"WFS"
"WMS"
"Z")
(:documentation nil)))
(let ((xml-query-data-model::p
(or (find-package "#")
(make-package "#"
:use
nil
:nicknames
'nil))))
(dolist (xml-query-data-model::s
'("Access" "Analysis" "Archive" "Array" "ASCII" "BigEndian" "Binary"
"BinaryMask" "ByteOrder" "Campaign" "Cell" "Character"
"Collection" "Column" "Comment" "Compression"
"CoordinateTransformation" "Data" "DataMining" "DataModel"
"DataProduct" "DataService" "Dataset" "DatasetProperty"
"DataStructure" "Delimiter" "Dimension" "Discovery" "DODS"
"Ensemble" "Experiment" "Field" "File" "FileFormat" "FillValue"
"FormatConversion" "GeoTIFF" "Grib" "Grid" "Gzip" "HDFEOS"
"Header" "If" "Image" "Increment" "Input" "InterfaceProtocol"
"Interpolation" "Investigation" "L1" "L2" "L3" "L4" "Layer"
"Level1" "Level2" "Level3" "Level4" "LittleEndian" "Map"
"MaximumValid" "Metadata" "MinimumValid" "MissingValue"
"NearestNeighbor" "NetCDF" "Observation" "Offset" "OPeNDAP"
"Output" "Parameter" "ParameterSubset" "Plot" "ProcessingLevel"
"Profile" "Project" "Provenance" "Raster" "ReducedGrid" "Reformat"
"Regridding" "Resolution" "Resource" "Row" "ScaleFactor" "Series"
"Service" "Simulation" "SpatialSubset" "StratigraphicSequence"
"Structure" "Subset" "Survey" "Swath" "Swath_2D" "Swath_3D"
"SynopticMap" "TemporalSubset" "TimeStep" "Trajectory" "Value"
"Variable" "Vector" "Version" "Visualization" "WCS"
"WebCoverageServer" "WebFeatureServer" "WebMapServer" "WFS" "WMS"
"Z"))
(export (intern xml-query-data-model::s xml-query-data-model::p)
xml-query-data-model::p)))
;;; (xqdm:find-namespace "#" :if-does-not-exist :load)
| null | https://raw.githubusercontent.com/lisp/de.setf.xml/827681c969342096c3b95735d84b447befa69fa6/namespaces/sweet-jpl-nasa-gov/ontology/data-owl/namespace.lisp | lisp | Syntax : ansi - common - lisp ; Base : 10 ; Package : xml - query - data - model ; -*-
(xqdm:find-namespace "#" :if-does-not-exist :load) |
(in-package :xml-query-data-model)
(setq xml-query-data-model:*namespace*
(xml-query-data-model:defnamespace "#"
(:use)
(:nicknames)
(:export
"Access"
"Analysis"
"Archive"
"Array"
"ASCII"
"BigEndian"
"Binary"
"BinaryMask"
"ByteOrder"
"Campaign"
"Cell"
"Character"
"Collection"
"Column"
"Comment"
"Compression"
"CoordinateTransformation"
"Data"
"DataMining"
"DataModel"
"DataProduct"
"DataService"
"Dataset"
"DatasetProperty"
"DataStructure"
"Delimiter"
"Dimension"
"Discovery"
"DODS"
"Ensemble"
"Experiment"
"Field"
"File"
"FileFormat"
"FillValue"
"FormatConversion"
"GeoTIFF"
"Grib"
"Grid"
"Gzip"
"HDFEOS"
"Header"
"If"
"Image"
"Increment"
"Input"
"InterfaceProtocol"
"Interpolation"
"Investigation"
"L1"
"L2"
"L3"
"L4"
"Layer"
"Level1"
"Level2"
"Level3"
"Level4"
"LittleEndian"
"Map"
"MaximumValid"
"Metadata"
"MinimumValid"
"MissingValue"
"NearestNeighbor"
"NetCDF"
"Observation"
"Offset"
"OPeNDAP"
"Output"
"Parameter"
"ParameterSubset"
"Plot"
"ProcessingLevel"
"Profile"
"Project"
"Provenance"
"Raster"
"ReducedGrid"
"Reformat"
"Regridding"
"Resolution"
"Resource"
"Row"
"ScaleFactor"
"Series"
"Service"
"Simulation"
"SpatialSubset"
"StratigraphicSequence"
"Structure"
"Subset"
"Survey"
"Swath"
"Swath_2D"
"Swath_3D"
"SynopticMap"
"TemporalSubset"
"TimeStep"
"Trajectory"
"Value"
"Variable"
"Vector"
"Version"
"Visualization"
"WCS"
"WebCoverageServer"
"WebFeatureServer"
"WebMapServer"
"WFS"
"WMS"
"Z")
(:documentation nil)))
(let ((xml-query-data-model::p
(or (find-package "#")
(make-package "#"
:use
nil
:nicknames
'nil))))
(dolist (xml-query-data-model::s
'("Access" "Analysis" "Archive" "Array" "ASCII" "BigEndian" "Binary"
"BinaryMask" "ByteOrder" "Campaign" "Cell" "Character"
"Collection" "Column" "Comment" "Compression"
"CoordinateTransformation" "Data" "DataMining" "DataModel"
"DataProduct" "DataService" "Dataset" "DatasetProperty"
"DataStructure" "Delimiter" "Dimension" "Discovery" "DODS"
"Ensemble" "Experiment" "Field" "File" "FileFormat" "FillValue"
"FormatConversion" "GeoTIFF" "Grib" "Grid" "Gzip" "HDFEOS"
"Header" "If" "Image" "Increment" "Input" "InterfaceProtocol"
"Interpolation" "Investigation" "L1" "L2" "L3" "L4" "Layer"
"Level1" "Level2" "Level3" "Level4" "LittleEndian" "Map"
"MaximumValid" "Metadata" "MinimumValid" "MissingValue"
"NearestNeighbor" "NetCDF" "Observation" "Offset" "OPeNDAP"
"Output" "Parameter" "ParameterSubset" "Plot" "ProcessingLevel"
"Profile" "Project" "Provenance" "Raster" "ReducedGrid" "Reformat"
"Regridding" "Resolution" "Resource" "Row" "ScaleFactor" "Series"
"Service" "Simulation" "SpatialSubset" "StratigraphicSequence"
"Structure" "Subset" "Survey" "Swath" "Swath_2D" "Swath_3D"
"SynopticMap" "TemporalSubset" "TimeStep" "Trajectory" "Value"
"Variable" "Vector" "Version" "Visualization" "WCS"
"WebCoverageServer" "WebFeatureServer" "WebMapServer" "WFS" "WMS"
"Z"))
(export (intern xml-query-data-model::s xml-query-data-model::p)
xml-query-data-model::p)))
|
2fe0857ddd7119e3067c3444174c7c36d1f9eaae98becbc01452846af74fd1a2 | oscoin/oscoin | Options.hs | # LANGUAGE UndecidableInstances #
module Oscoin.P2P.Disco.Options
( Options(..)
, discoParser
, discoOpts
, renderDiscoOpts
, CanRenderNetwork
, OptNetwork(..)
, showOptNetwork
, evalOptions
, evalYesNo
)
where
import Oscoin.Prelude hiding (option)
import qualified Oscoin.Configuration as Global
import Oscoin.Crypto.PubKey (PublicKey)
import Oscoin.P2P.Types
( Network(..)
, SeedInfo
, pattern Somenet
, randomNetwork
, readBootstrapInfo
, readNetwork
, renderNetwork
, showBootstrapInfo
)
import qualified Data.Text as T
import qualified Formatting as F
import Network.Socket (HostName, PortNumber)
import Options.Applicative
import Options.Applicative.Help
(paragraph, stringChunk, unChunk, vcatChunks, vsepChunks)
import System.Console.Option (Opt(Flag, Opt))
import System.IO (hFlush, stdout)
import System.Random.SplitMix (newSMGen)
import Text.Show (Show(..))
data Options crypto network = Options
{ optNetwork :: network
, optSeeds :: [SeedInfo crypto]
, optSDDomains :: [HostName]
, optEnableMDns :: Bool
, optNameserver :: Maybe (HostName, PortNumber) -- only for testing currently
} deriving (Generic)
deriving instance (Eq (PublicKey c), Eq n) => Eq (Options c n)
deriving instance (Show (PublicKey c), Show n) => Show (Options c n)
data YesNo = Yes | No
data OptNetwork =
Confirm Text (YesNo -> Either String Network)
| NoConfirm Network
| Random
-- nb. Eq + Show for testing
instance Eq OptNetwork where
(Confirm a _) == (Confirm b _) = a == b
(NoConfirm a ) == (NoConfirm b ) = a == b
Random == Random = True
_ == _ = False
instance Show OptNetwork where
show = showOptNetwork
showOptNetwork :: OptNetwork -> String
showOptNetwork (Confirm s _) = toS $ "`" <> s <> "` (confirmation required)"
showOptNetwork (NoConfirm n) = toS $ renderNetwork n
showOptNetwork Random = "randomly generated"
optNetworkReader :: ReadM OptNetwork
optNetworkReader = eitherReader $ map mkOpt . readNetwork
where
mkOpt = \case
Mainnet -> Confirm (renderNetwork Mainnet) $ evalYesNo Mainnet
Testnet -> Confirm (renderNetwork Testnet) $ evalYesNo Testnet
Devnet -> NoConfirm Devnet
sn@Somenet{} -> NoConfirm sn
evalYesNo :: a -> YesNo -> Either String a
evalYesNo x = \case
Yes -> pure x
No -> Left "Cancelled"
evalOptions :: Options c OptNetwork -> IO (Either String (Options c Network))
evalOptions opt =
map (\n -> opt { optNetwork = n })
<$> evalOptNetwork (optNetwork opt)
| Interactively obtain a ' YesNo ' value .
promptYesNo :: Text -> IO YesNo
promptYesNo msg = prompt
where
prompt = do
putStr msg *> hFlush stdout
l <- T.toLower <$> getLine
case T.uncons l of
Just ('y',_) -> pure Yes
Just ('n',_) -> pure No
_ -> putStrLn (mempty @Text) *> hFlush stdout *> prompt
evalOptNetwork :: OptNetwork -> IO (Either String Network)
evalOptNetwork (NoConfirm n) = pure $ Right n
evalOptNetwork Random = pure . randomNetwork <$> newSMGen
evalOptNetwork (Confirm s f) = f <$> promptYesNo msg
where
msg = "Are you sure you want to join network `" <> s <> "` [yes/no]:"
discoParser :: Parser (Options c OptNetwork)
discoParser = Options
<$> option optNetworkReader
( long "network"
<> help "The name of the overlay network to join"
<> metavar
(intercalate
"|" ("STRING" : map (toS . Global.renderNetwork) Global.allNetworks))
<> value Random
<> showDefaultWith showOptNetwork
)
<*> many
( option (eitherReader readBootstrapInfo)
( long "seed"
<> helpDoc
( unChunk $ vsepChunks
[ paragraph "Zero or more gossip seed nodes to connect to"
, paragraph "If HOST is an IPv6 address, it must be enclosed in \
\square brackets to delimit it from the portnumber."
, paragraph "If HOST is a domain name, all IPv4 and IPv6 \
\addresses bound to it will be considered as \
\distinct peer addresses."
, paragraph "Examples:"
, vcatChunks $ map stringChunk
[ "--seed=\"[2001:db8::01]:6942\""
, "--seed=\"127.0.0.1:6942\""
, "--seed=testnet.oscoin.io:6942\""
]
]
)
<> metavar "HOST:PORT"
)
)
<*> many
( option str
( long "sd-domain"
<> helpDoc
( unChunk $ vsepChunks
[ paragraph "Zero or more search domains to query for SRV records"
, paragraph "Examples:"
, vcatChunks $ map stringChunk
[ "--sd-domain=svc.cluster.local"
, "--sd-domain=oscoin.io"
, "--sd-domain=monadic.xyz"
]
]
)
<> metavar "DOMAIN NAME"
)
)
<*> switch
( long "enable-mdns"
<> help "Enable mDNS discovery"
)
<*> pure Nothing
class CanRenderNetwork a where
renderNetwork' :: a -> Maybe Text
instance CanRenderNetwork OptNetwork where
renderNetwork' = \case
Confirm n _ -> pure n
NoConfirm n -> pure $ renderNetwork n
_ -> Nothing
instance CanRenderNetwork Network where
renderNetwork' = pure . renderNetwork
discoOpts :: CanRenderNetwork n => Options c n -> [Opt Text]
discoOpts
(Options
optNetwork
optSeeds
optSDDomains
optEnableMDns
_optNameserver) = concat
[ fromMaybe [] . map (pure . Opt "network") . renderNetwork' $ optNetwork
, map (Opt "seed" . toS . showBootstrapInfo) optSeeds
, map (Opt "sd-domain" . toS) optSDDomains
, bool [] (pure (Flag "enable-mdns")) optEnableMDns
]
renderDiscoOpts :: Options c OptNetwork -> [Text]
renderDiscoOpts = map (F.sformat F.build) . discoOpts
| null | https://raw.githubusercontent.com/oscoin/oscoin/2eb5652c9999dd0f30c70b3ba6b638156c74cdb1/src/Oscoin/P2P/Disco/Options.hs | haskell | only for testing currently
nb. Eq + Show for testing | # LANGUAGE UndecidableInstances #
module Oscoin.P2P.Disco.Options
( Options(..)
, discoParser
, discoOpts
, renderDiscoOpts
, CanRenderNetwork
, OptNetwork(..)
, showOptNetwork
, evalOptions
, evalYesNo
)
where
import Oscoin.Prelude hiding (option)
import qualified Oscoin.Configuration as Global
import Oscoin.Crypto.PubKey (PublicKey)
import Oscoin.P2P.Types
( Network(..)
, SeedInfo
, pattern Somenet
, randomNetwork
, readBootstrapInfo
, readNetwork
, renderNetwork
, showBootstrapInfo
)
import qualified Data.Text as T
import qualified Formatting as F
import Network.Socket (HostName, PortNumber)
import Options.Applicative
import Options.Applicative.Help
(paragraph, stringChunk, unChunk, vcatChunks, vsepChunks)
import System.Console.Option (Opt(Flag, Opt))
import System.IO (hFlush, stdout)
import System.Random.SplitMix (newSMGen)
import Text.Show (Show(..))
data Options crypto network = Options
{ optNetwork :: network
, optSeeds :: [SeedInfo crypto]
, optSDDomains :: [HostName]
, optEnableMDns :: Bool
} deriving (Generic)
deriving instance (Eq (PublicKey c), Eq n) => Eq (Options c n)
deriving instance (Show (PublicKey c), Show n) => Show (Options c n)
data YesNo = Yes | No
data OptNetwork =
Confirm Text (YesNo -> Either String Network)
| NoConfirm Network
| Random
instance Eq OptNetwork where
(Confirm a _) == (Confirm b _) = a == b
(NoConfirm a ) == (NoConfirm b ) = a == b
Random == Random = True
_ == _ = False
instance Show OptNetwork where
show = showOptNetwork
showOptNetwork :: OptNetwork -> String
showOptNetwork (Confirm s _) = toS $ "`" <> s <> "` (confirmation required)"
showOptNetwork (NoConfirm n) = toS $ renderNetwork n
showOptNetwork Random = "randomly generated"
optNetworkReader :: ReadM OptNetwork
optNetworkReader = eitherReader $ map mkOpt . readNetwork
where
mkOpt = \case
Mainnet -> Confirm (renderNetwork Mainnet) $ evalYesNo Mainnet
Testnet -> Confirm (renderNetwork Testnet) $ evalYesNo Testnet
Devnet -> NoConfirm Devnet
sn@Somenet{} -> NoConfirm sn
evalYesNo :: a -> YesNo -> Either String a
evalYesNo x = \case
Yes -> pure x
No -> Left "Cancelled"
evalOptions :: Options c OptNetwork -> IO (Either String (Options c Network))
evalOptions opt =
map (\n -> opt { optNetwork = n })
<$> evalOptNetwork (optNetwork opt)
| Interactively obtain a ' YesNo ' value .
promptYesNo :: Text -> IO YesNo
promptYesNo msg = prompt
where
prompt = do
putStr msg *> hFlush stdout
l <- T.toLower <$> getLine
case T.uncons l of
Just ('y',_) -> pure Yes
Just ('n',_) -> pure No
_ -> putStrLn (mempty @Text) *> hFlush stdout *> prompt
evalOptNetwork :: OptNetwork -> IO (Either String Network)
evalOptNetwork (NoConfirm n) = pure $ Right n
evalOptNetwork Random = pure . randomNetwork <$> newSMGen
evalOptNetwork (Confirm s f) = f <$> promptYesNo msg
where
msg = "Are you sure you want to join network `" <> s <> "` [yes/no]:"
discoParser :: Parser (Options c OptNetwork)
discoParser = Options
<$> option optNetworkReader
( long "network"
<> help "The name of the overlay network to join"
<> metavar
(intercalate
"|" ("STRING" : map (toS . Global.renderNetwork) Global.allNetworks))
<> value Random
<> showDefaultWith showOptNetwork
)
<*> many
( option (eitherReader readBootstrapInfo)
( long "seed"
<> helpDoc
( unChunk $ vsepChunks
[ paragraph "Zero or more gossip seed nodes to connect to"
, paragraph "If HOST is an IPv6 address, it must be enclosed in \
\square brackets to delimit it from the portnumber."
, paragraph "If HOST is a domain name, all IPv4 and IPv6 \
\addresses bound to it will be considered as \
\distinct peer addresses."
, paragraph "Examples:"
, vcatChunks $ map stringChunk
[ "--seed=\"[2001:db8::01]:6942\""
, "--seed=\"127.0.0.1:6942\""
, "--seed=testnet.oscoin.io:6942\""
]
]
)
<> metavar "HOST:PORT"
)
)
<*> many
( option str
( long "sd-domain"
<> helpDoc
( unChunk $ vsepChunks
[ paragraph "Zero or more search domains to query for SRV records"
, paragraph "Examples:"
, vcatChunks $ map stringChunk
[ "--sd-domain=svc.cluster.local"
, "--sd-domain=oscoin.io"
, "--sd-domain=monadic.xyz"
]
]
)
<> metavar "DOMAIN NAME"
)
)
<*> switch
( long "enable-mdns"
<> help "Enable mDNS discovery"
)
<*> pure Nothing
class CanRenderNetwork a where
renderNetwork' :: a -> Maybe Text
instance CanRenderNetwork OptNetwork where
renderNetwork' = \case
Confirm n _ -> pure n
NoConfirm n -> pure $ renderNetwork n
_ -> Nothing
instance CanRenderNetwork Network where
renderNetwork' = pure . renderNetwork
discoOpts :: CanRenderNetwork n => Options c n -> [Opt Text]
discoOpts
(Options
optNetwork
optSeeds
optSDDomains
optEnableMDns
_optNameserver) = concat
[ fromMaybe [] . map (pure . Opt "network") . renderNetwork' $ optNetwork
, map (Opt "seed" . toS . showBootstrapInfo) optSeeds
, map (Opt "sd-domain" . toS) optSDDomains
, bool [] (pure (Flag "enable-mdns")) optEnableMDns
]
renderDiscoOpts :: Options c OptNetwork -> [Text]
renderDiscoOpts = map (F.sformat F.build) . discoOpts
|
ac50e752e19e3fb4a38ab7d514cd2eac2b3a07c018375498ff1312b1d748c908 | ssrihari/kosha | ragam.clj | (ns kosha.app.ragam
(:require [kosha.app.view :as view]
[kosha.db.search :as db-search]))
(defn show [{:keys [params] :as request}]
(let [ragam (first (db-search/ragams (:name params)))]
(view/html-skeleton [])))
| null | https://raw.githubusercontent.com/ssrihari/kosha/1d7a1858ec6578b146dca4b842020f9a093d479d/src/kosha/app/ragam.clj | clojure | (ns kosha.app.ragam
(:require [kosha.app.view :as view]
[kosha.db.search :as db-search]))
(defn show [{:keys [params] :as request}]
(let [ragam (first (db-search/ragams (:name params)))]
(view/html-skeleton [])))
|
|
27b08792921606b263a9089885f17c311f4200607ccaf14c2a8152b29e915ef2 | binghe/cl-net-snmp | null.lisp | ;;;; -*- Mode: Lisp -*-
$ Id$
(in-package :asn.1)
(defmethod plain-value ((object (eql nil)) &key default)
(declare (ignore object default))
nil)
(defmethod ber-equal ((a (eql nil)) (b (eql nil)))
(declare (ignore a b))
t)
;;; NULL (:null)
(defmethod ber-encode ((value (eql nil)))
(declare (ignore value))
(concatenate 'vector
(ber-encode-type 0 0 5)
(ber-encode-length 0)))
(defmethod ber-decode-value ((stream stream) (type (eql :null)) length)
"Eat bytes and return a NIL"
(declare (type fixnum length) (ignore type))
(dotimes (i length nil) (read-byte stream)))
(eval-when (:load-toplevel :execute)
(install-asn.1-type :null 0 0 5))
| null | https://raw.githubusercontent.com/binghe/cl-net-snmp/3cf053bce75734097f0d7e2245a53fa0c45f5e05/runtime/null.lisp | lisp | -*- Mode: Lisp -*-
NULL (:null) | $ Id$
(in-package :asn.1)
(defmethod plain-value ((object (eql nil)) &key default)
(declare (ignore object default))
nil)
(defmethod ber-equal ((a (eql nil)) (b (eql nil)))
(declare (ignore a b))
t)
(defmethod ber-encode ((value (eql nil)))
(declare (ignore value))
(concatenate 'vector
(ber-encode-type 0 0 5)
(ber-encode-length 0)))
(defmethod ber-decode-value ((stream stream) (type (eql :null)) length)
"Eat bytes and return a NIL"
(declare (type fixnum length) (ignore type))
(dotimes (i length nil) (read-byte stream)))
(eval-when (:load-toplevel :execute)
(install-asn.1-type :null 0 0 5))
|
b530acfdb08f4807338353f1bcb91fdb5cc0c7992f57521e6415e31b314e9ee4 | typeclasses/dsv | IO.hs | # LANGUAGE NoImplicitPrelude #
module DSV.IO
( MonadIO (liftIO), MonadThrow (throwM), MonadCatch, MonadMask
, IOMode (ReadMode)
, SafeT, runSafeT
, withFile
, Exception (displayException), throw
, Handle
) where
-- base
import Control.Exception (Exception (displayException), throw)
import Control.Monad.IO.Class (MonadIO (liftIO))
import System.IO (IOMode (ReadMode), Handle)
-- pipes-safe
import Pipes.Safe (SafeT, runSafeT, MonadThrow (throwM), MonadCatch, MonadMask)
import Pipes.Safe.Prelude (withFile)
| null | https://raw.githubusercontent.com/typeclasses/dsv/ae4eb823e27e4c569c4f9b097441985cf865fbab/dsv/library/DSV/IO.hs | haskell | base
pipes-safe | # LANGUAGE NoImplicitPrelude #
module DSV.IO
( MonadIO (liftIO), MonadThrow (throwM), MonadCatch, MonadMask
, IOMode (ReadMode)
, SafeT, runSafeT
, withFile
, Exception (displayException), throw
, Handle
) where
import Control.Exception (Exception (displayException), throw)
import Control.Monad.IO.Class (MonadIO (liftIO))
import System.IO (IOMode (ReadMode), Handle)
import Pipes.Safe (SafeT, runSafeT, MonadThrow (throwM), MonadCatch, MonadMask)
import Pipes.Safe.Prelude (withFile)
|
4912486c799dc1c5104c22162ecd43104d36d53ff6c214089e35534c5fbc397f | clf/lollimon | modes.ml | type mode = [Unknown | Input | Output of bool];
value allModes = ref [];
value parseModes p =
let myfail () = do { ps 0 "raising bad mode declaration\n"; raise (Stream.Error "Bad mode declaration")} in
let rec go = fun [
Const "o" 0 [] -> parser [
[: `(Kwd ".",_) :] -> [] |
[: :] -> myfail ()
] |
Const "->" 0 [_;typ] -> parser [ (*** Const with level -2 are just for mode checking ***)
[: `(Kwd "+",_); `(Ident _,_); res = go typ :] -> [Const "+" (-2) []::res] |
[: `(Kwd "-",_); `(Ident _,_); res = go typ :] -> [Const "-" (-2) []::res] |
[: `(Kwd "*",_); `(Ident _,_); res = go typ :] -> [Const "*" (-2) []::res] |
[: :] -> myfail ()
] |
Const "pi" 0 [Lam _ typ []] -> go typ |
_ -> myfail()
] in
try go (fst (List.assoc p mysignature.val))
with [e -> myfail()]
;
exception BadMode of string;
(***
checkMode should only be called right after (right after
residuating) parsing so there shouldn't be any actual EVars in head
or body
***)
value checkMode head body evars =
let _ = ps 0 ( " checkMode : " ^(term2str head)^ " | " ^(term2str body)^"\n " ) in
let _ = ps 0 ("checkMode: "^(term2str head)^" | "^(term2str body)^"\n") in
*)
let outputs = ref [] in
let headName = ref "" in
let sub = makeSub evars (Some (-1)) in (*** EVars with level -1 are just for mode checking ***)
let rec doArgs f mode = fun [ (*** do f (which expects a mode constant) to each arg ***)
[] -> () |
[tm::tms] ->
let (m,ms) = match mode with [
None -> (Const "*" (-2) [],None) |
Some [m::ms] -> (m,Some ms) |
_ -> raise (Failure "checkMode doArgs (was chkHead)")
] in
do {f m tm; doArgs f ms tms}
] in
* * do something to each mode EVar * *
let rec go = fun [
(e as (Lam _ _ [_::_] | ExpSub _ _ _)) -> go (expose e) |
Var _ _ args -> List.iter go args |
Const c (-1) [] -> () |
Const c 0 args ->
let args' = (*** ignore implicit type variables for mode analysis ***)
if useTypes.val then
let n =
try snd (List.assoc c (mysignature.val @ signature.val))
with [Not_found -> try let _ = int_of_string c in 0
with [_ -> raise (Failure ("checkMode undefined constant: "^c))
]]
in
nthTail n args
else args
in
List.iter go args' |
Lam _ dc [] -> go dc |
(e as EVar nm rf (-1) args) ->
let allBVars = List.for_all (fun x -> match expose x with [Var _ _ [] -> True | x -> False]) args in
f allBVars m rf |
e -> raise (Failure ("checkMode doEVar: "^(term2str' True e))) (*** there shouldn't be any real EVars ***)
] in go
in
* * if EVar is not pattern then always use * * *
if isPat then match (m,rf.val) with [
let _ = ps 0 ( " initEV m="^(term2str m)^ " ( EVar " ? " rf ( -2 ) [ ] ) ) ^"\n " ) in
let _ = ps 0 ("initEV m="^(term2str m)^" rf="^(term2str (EVar "?" rf (-2) []))^"\n") in
*)
(Const "-" -2 [], Open _) -> do {
outputs.val := [rf::outputs.val];
rf.val := Inst (Const "*" (-2) [])
} |
(_, Open _) -> rf.val := Inst m |
_ -> ()
]
else rf.val := Inst (Const "*" (-2) [])
in
let rec chkHead = fun [ (*** initialize mode EVars using mode declaration ***)
(e as (Lam _ _ [_::_] | ExpSub _ _ _)) -> chkHead (expose e) |
Const c 0 args ->
let _ = headName.val := c in
let mode = try Some (List.assoc c allModes.val) with [
Not_found -> None
] in
doArgs (doEVar (initEV)) mode args |
EVar _ rf (-1) args -> do {
match rf.val with [
Inst (Const "+" -2 []) -> () |
_ -> ps 0 ("Warning: assuming clause with possibly uninstantiated head.\n")
];
ps 0 ("Warning: cannot check mode of variable head clause.\n");
doArgs (doEVar (initEV)) None args
} |
_ -> raise (Failure "checkMode Head")
] in
let chkMode c isGoal isPat m rf = (*** only propagate instantiation for pattern EVars ***)
let _ = ps 0 ( " chkMode " ^(sob isGoal)^ " " ^(term2str m)^ " " ^(term2str ' True ( EVar " ? " rf ( -2 ) [ ] ) ) ^"\n " ) in
let _ = ps 0 ("chkMode "^(sob isGoal)^" "^(term2str m)^" "^(term2str' True (EVar "?" rf (-2) []))^"\n") in
*)
match (isGoal, m, rf.val) with [
(True, Const "-" -2 [], _) -> if isPat then rf.val := Inst (Const "+" (-2) []) else () |
(True, Const "+" -2 [], Inst (Const "+" -2 [])) -> () |
(True, Const "+" -2 [], _) -> raise (BadMode c) |
(False, Const "-" -2 [], Inst (Const "+" -2 [])) -> () |
(False, Const "-" -2 [], _) -> raise (BadMode c) |
_ -> ()
] in
let rec chkBody isGoal = fun [
(e as (Lam _ _ [_::_] | ExpSub _ _ _)) -> chkBody isGoal (expose e) |
Const (c as ("pi" | "sigma")) 0 [e] -> match expose e with [
Lam nm e [] ->
let isGoal' = if c = "pi" then isGoal else not isGoal in
if isGoal' then chkBody isGoal e
else chkBody isGoal (Lam nm e [newEVar False nm (Some (-1))]) |
_ -> raise (Failure "checkModes: bad pi")
] |
Lam _ e [] -> chkBody isGoal e |
Const ("!" | "@" | "{}") 0 [x] -> chkBody isGoal x |
Const ("," | ";" | "&") 0 [x;y] -> do {
chkBody isGoal x; chkBody isGoal y
} |
Const ("-o" | "=>" | "-@") 0 [x;y] -> (*** order of subgoals in clauses must be reversed ***)
if isGoal then do {chkBody (not isGoal) x; chkBody isGoal y}
else do {chkBody isGoal y; chkBody (not isGoal) x} |
Const "=" 0 [x;y] -> match (expose x,expose y) with [ (*** propagate instantiation flow for equals ***)
(EVar _ rf (-1) _, Const _ _ _) -> rf.val := Inst (Const "+" (-2) []) |
(Const _ _ _, EVar _ rf (-1) _) -> rf.val := Inst (Const "+" (-2) []) |
(EVar _ rf1 (-1) _, EVar _ rf2 (-1) _) -> match (rf1.val, rf2.val) with [
(Inst (Const "+" (-2) []), _) -> rf2.val := Inst (Const "+" (-2) []) |
(_, Inst (Const "+" (-2) [])) -> rf1.val := Inst (Const "+" (-2) []) |
_ -> ()
] |
_ -> ()
] |
(me as Const c 0 args) ->
(*
let _ = ps 0 ("chkBody "^(sob isGoal)^": "^(term2str' True me)^"\n") in
*)
let mode = try Some (List.assoc c allModes.val) with [
Not_found -> None
] in
doArgs (doEVar (chkMode c isGoal)) mode args |
(ev as EVar nm rf (-1) args) -> do {
ps 0 ("Warning: cannot check mode of a variable subgoal.\n");
doArgs (doEVar (chkMode nm isGoal)) None [ev]
} |
_ -> raise (Failure "checkMode chkBody")
] in
let isInst rf = match rf.val with [
Inst (Const "+" -2 []) -> () |
_ -> raise (BadMode headName.val)
] in
match expose (ExpSub head sub []) with [
Const "{}" 0 [head'] -> do {
chkBody True (ExpSub body sub []);
chkBody False head'
} |
head' -> do {
chkHead head';
chkBody True (ExpSub body sub []);
List.iter isInst outputs.val
}
];
| null | https://raw.githubusercontent.com/clf/lollimon/bc4290f5bb221c514b2a66ec427e85eec498c7be/src/modes.ml | ocaml | ** Const with level -2 are just for mode checking **
**
checkMode should only be called right after (right after
residuating) parsing so there shouldn't be any actual EVars in head
or body
**
** EVars with level -1 are just for mode checking **
** do f (which expects a mode constant) to each arg **
** ignore implicit type variables for mode analysis **
** there shouldn't be any real EVars **
** initialize mode EVars using mode declaration **
** only propagate instantiation for pattern EVars **
** order of subgoals in clauses must be reversed **
** propagate instantiation flow for equals **
let _ = ps 0 ("chkBody "^(sob isGoal)^": "^(term2str' True me)^"\n") in
| type mode = [Unknown | Input | Output of bool];
value allModes = ref [];
value parseModes p =
let myfail () = do { ps 0 "raising bad mode declaration\n"; raise (Stream.Error "Bad mode declaration")} in
let rec go = fun [
Const "o" 0 [] -> parser [
[: `(Kwd ".",_) :] -> [] |
[: :] -> myfail ()
] |
[: `(Kwd "+",_); `(Ident _,_); res = go typ :] -> [Const "+" (-2) []::res] |
[: `(Kwd "-",_); `(Ident _,_); res = go typ :] -> [Const "-" (-2) []::res] |
[: `(Kwd "*",_); `(Ident _,_); res = go typ :] -> [Const "*" (-2) []::res] |
[: :] -> myfail ()
] |
Const "pi" 0 [Lam _ typ []] -> go typ |
_ -> myfail()
] in
try go (fst (List.assoc p mysignature.val))
with [e -> myfail()]
;
exception BadMode of string;
value checkMode head body evars =
let _ = ps 0 ( " checkMode : " ^(term2str head)^ " | " ^(term2str body)^"\n " ) in
let _ = ps 0 ("checkMode: "^(term2str head)^" | "^(term2str body)^"\n") in
*)
let outputs = ref [] in
let headName = ref "" in
[] -> () |
[tm::tms] ->
let (m,ms) = match mode with [
None -> (Const "*" (-2) [],None) |
Some [m::ms] -> (m,Some ms) |
_ -> raise (Failure "checkMode doArgs (was chkHead)")
] in
do {f m tm; doArgs f ms tms}
] in
* * do something to each mode EVar * *
let rec go = fun [
(e as (Lam _ _ [_::_] | ExpSub _ _ _)) -> go (expose e) |
Var _ _ args -> List.iter go args |
Const c (-1) [] -> () |
Const c 0 args ->
if useTypes.val then
let n =
try snd (List.assoc c (mysignature.val @ signature.val))
with [Not_found -> try let _ = int_of_string c in 0
with [_ -> raise (Failure ("checkMode undefined constant: "^c))
]]
in
nthTail n args
else args
in
List.iter go args' |
Lam _ dc [] -> go dc |
(e as EVar nm rf (-1) args) ->
let allBVars = List.for_all (fun x -> match expose x with [Var _ _ [] -> True | x -> False]) args in
f allBVars m rf |
] in go
in
* * if EVar is not pattern then always use * * *
if isPat then match (m,rf.val) with [
let _ = ps 0 ( " initEV m="^(term2str m)^ " ( EVar " ? " rf ( -2 ) [ ] ) ) ^"\n " ) in
let _ = ps 0 ("initEV m="^(term2str m)^" rf="^(term2str (EVar "?" rf (-2) []))^"\n") in
*)
(Const "-" -2 [], Open _) -> do {
outputs.val := [rf::outputs.val];
rf.val := Inst (Const "*" (-2) [])
} |
(_, Open _) -> rf.val := Inst m |
_ -> ()
]
else rf.val := Inst (Const "*" (-2) [])
in
(e as (Lam _ _ [_::_] | ExpSub _ _ _)) -> chkHead (expose e) |
Const c 0 args ->
let _ = headName.val := c in
let mode = try Some (List.assoc c allModes.val) with [
Not_found -> None
] in
doArgs (doEVar (initEV)) mode args |
EVar _ rf (-1) args -> do {
match rf.val with [
Inst (Const "+" -2 []) -> () |
_ -> ps 0 ("Warning: assuming clause with possibly uninstantiated head.\n")
];
ps 0 ("Warning: cannot check mode of variable head clause.\n");
doArgs (doEVar (initEV)) None args
} |
_ -> raise (Failure "checkMode Head")
] in
let _ = ps 0 ( " chkMode " ^(sob isGoal)^ " " ^(term2str m)^ " " ^(term2str ' True ( EVar " ? " rf ( -2 ) [ ] ) ) ^"\n " ) in
let _ = ps 0 ("chkMode "^(sob isGoal)^" "^(term2str m)^" "^(term2str' True (EVar "?" rf (-2) []))^"\n") in
*)
match (isGoal, m, rf.val) with [
(True, Const "-" -2 [], _) -> if isPat then rf.val := Inst (Const "+" (-2) []) else () |
(True, Const "+" -2 [], Inst (Const "+" -2 [])) -> () |
(True, Const "+" -2 [], _) -> raise (BadMode c) |
(False, Const "-" -2 [], Inst (Const "+" -2 [])) -> () |
(False, Const "-" -2 [], _) -> raise (BadMode c) |
_ -> ()
] in
let rec chkBody isGoal = fun [
(e as (Lam _ _ [_::_] | ExpSub _ _ _)) -> chkBody isGoal (expose e) |
Const (c as ("pi" | "sigma")) 0 [e] -> match expose e with [
Lam nm e [] ->
let isGoal' = if c = "pi" then isGoal else not isGoal in
if isGoal' then chkBody isGoal e
else chkBody isGoal (Lam nm e [newEVar False nm (Some (-1))]) |
_ -> raise (Failure "checkModes: bad pi")
] |
Lam _ e [] -> chkBody isGoal e |
Const ("!" | "@" | "{}") 0 [x] -> chkBody isGoal x |
Const ("," | ";" | "&") 0 [x;y] -> do {
chkBody isGoal x; chkBody isGoal y
} |
if isGoal then do {chkBody (not isGoal) x; chkBody isGoal y}
else do {chkBody isGoal y; chkBody (not isGoal) x} |
(EVar _ rf (-1) _, Const _ _ _) -> rf.val := Inst (Const "+" (-2) []) |
(Const _ _ _, EVar _ rf (-1) _) -> rf.val := Inst (Const "+" (-2) []) |
(EVar _ rf1 (-1) _, EVar _ rf2 (-1) _) -> match (rf1.val, rf2.val) with [
(Inst (Const "+" (-2) []), _) -> rf2.val := Inst (Const "+" (-2) []) |
(_, Inst (Const "+" (-2) [])) -> rf1.val := Inst (Const "+" (-2) []) |
_ -> ()
] |
_ -> ()
] |
(me as Const c 0 args) ->
let mode = try Some (List.assoc c allModes.val) with [
Not_found -> None
] in
doArgs (doEVar (chkMode c isGoal)) mode args |
(ev as EVar nm rf (-1) args) -> do {
ps 0 ("Warning: cannot check mode of a variable subgoal.\n");
doArgs (doEVar (chkMode nm isGoal)) None [ev]
} |
_ -> raise (Failure "checkMode chkBody")
] in
let isInst rf = match rf.val with [
Inst (Const "+" -2 []) -> () |
_ -> raise (BadMode headName.val)
] in
match expose (ExpSub head sub []) with [
Const "{}" 0 [head'] -> do {
chkBody True (ExpSub body sub []);
chkBody False head'
} |
head' -> do {
chkHead head';
chkBody True (ExpSub body sub []);
List.iter isInst outputs.val
}
];
|
1d387df3e2f1188a7b1eb4f48c9afcf0b74dc2260c355225f33a1aa501b270fa | bennn/dissertation | mixer.rkt | #lang typed/racket/base
(require require-typed-check
"typed-data.rkt"
(for-syntax racket/base)
(only-in racket/list first second rest))
(require/typed/check "array-struct.rkt"
[array? (-> Array Boolean)]
[array-shape (-> Array Indexes)]
[array-default-strict! (-> Array Void)]
[unsafe-array-proc (-> Array (-> Indexes Float))]
[unsafe-build-array (-> Indexes (-> Indexes Float) Array)])
(require/typed/check "array-broadcast.rkt"
[array-broadcast (-> Array Indexes Array)]
[array-shape-broadcast (case-> ((Listof Indexes) -> Indexes)
((Listof Indexes) (U #f #t 'permissive) -> Indexes))]
[array-broadcasting (Parameterof (U #f #t 'permissive))])
(provide mix)
;; -- array-pointwise
(define-syntax-rule (ensure-array name arr-expr)
(let ([arr arr-expr])
(if (array? arr) arr (raise-argument-error name "Array" arr))))
(define-syntax (inline-array-map stx)
(syntax-case stx ()
[(_ f arr-expr)
(syntax/loc stx
(let ([arr (ensure-array 'array-map arr-expr)])
(define ds (array-shape arr))
(define proc (unsafe-array-proc arr))
(define arr* (unsafe-build-array ds (λ ([js : Indexes]) (f (proc js)))))
(array-default-strict! arr*)
arr*))]
[(_ f arr-expr arr-exprs ...)
(with-syntax ([(arrs ...) (generate-temporaries #'(arr-exprs ...))]
[(procs ...) (generate-temporaries #'(arr-exprs ...))])
(syntax/loc stx
(let ([arr (ensure-array 'array-map arr-expr)]
[arrs (ensure-array 'array-map arr-exprs)] ...)
(define ds (array-shape-broadcast (list (array-shape arr) (array-shape arrs) ...)))
(let ([arr (array-broadcast arr ds)]
[arrs (array-broadcast arrs ds)] ...)
(define proc (unsafe-array-proc arr))
(define procs (unsafe-array-proc arrs)) ...
(define arr* (unsafe-build-array ds (λ ([js : Indexes]) (f (proc js) (procs js) ...))))
(array-default-strict! arr*)
arr*))))]))
(: array-map
(case->
(-> (-> Float Float Float) Array Array Array)
(-> (-> Float Float) Array Array)))
(define array-map
(case-lambda
[([f : (Float -> Float)] [arr : Array])
(inline-array-map f arr)]
[([f : (Float Float -> Float)] [arr0 : Array] [arr1 : Array])
(inline-array-map f arr0 arr1)]))
;; Weighted sum of signals, receives a list of lists (signal weight).
;; Shorter signals are repeated to match the length of the longest.
Normalizes output to be within [ -1,1 ] .
(: mix (-> Weighted-Signal * Array))
(define (mix . ss)
(: signals (Listof Array))
(define signals
(for/list : (Listof Array) ([s : Weighted-Signal ss])
(first s)))
(: weights (Listof Float))
(define weights
(for/list : (Listof Float) ([x : Weighted-Signal ss])
(real->double-flonum (second x))))
(: downscale-ratio Float)
(define downscale-ratio (/ 1.0 (apply + weights)))
(: scale-signal (Float -> (Float -> Float)))
(define ((scale-signal w) x) (* x w downscale-ratio))
(parameterize ([array-broadcasting 'permissive]) ; repeat short signals
(for/fold ([res : Array (array-map (scale-signal (first weights))
(first signals))])
([s (in-list (rest signals))]
[w (in-list (rest weights))])
(define scale (scale-signal w))
(array-map (lambda ([acc : Float]
[new : Float])
(+ acc (scale new)))
res s))))
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/QA/math-test/synth/s-d/mixer.rkt | racket | -- array-pointwise
Weighted sum of signals, receives a list of lists (signal weight).
Shorter signals are repeated to match the length of the longest.
repeat short signals | #lang typed/racket/base
(require require-typed-check
"typed-data.rkt"
(for-syntax racket/base)
(only-in racket/list first second rest))
(require/typed/check "array-struct.rkt"
[array? (-> Array Boolean)]
[array-shape (-> Array Indexes)]
[array-default-strict! (-> Array Void)]
[unsafe-array-proc (-> Array (-> Indexes Float))]
[unsafe-build-array (-> Indexes (-> Indexes Float) Array)])
(require/typed/check "array-broadcast.rkt"
[array-broadcast (-> Array Indexes Array)]
[array-shape-broadcast (case-> ((Listof Indexes) -> Indexes)
((Listof Indexes) (U #f #t 'permissive) -> Indexes))]
[array-broadcasting (Parameterof (U #f #t 'permissive))])
(provide mix)
(define-syntax-rule (ensure-array name arr-expr)
(let ([arr arr-expr])
(if (array? arr) arr (raise-argument-error name "Array" arr))))
(define-syntax (inline-array-map stx)
(syntax-case stx ()
[(_ f arr-expr)
(syntax/loc stx
(let ([arr (ensure-array 'array-map arr-expr)])
(define ds (array-shape arr))
(define proc (unsafe-array-proc arr))
(define arr* (unsafe-build-array ds (λ ([js : Indexes]) (f (proc js)))))
(array-default-strict! arr*)
arr*))]
[(_ f arr-expr arr-exprs ...)
(with-syntax ([(arrs ...) (generate-temporaries #'(arr-exprs ...))]
[(procs ...) (generate-temporaries #'(arr-exprs ...))])
(syntax/loc stx
(let ([arr (ensure-array 'array-map arr-expr)]
[arrs (ensure-array 'array-map arr-exprs)] ...)
(define ds (array-shape-broadcast (list (array-shape arr) (array-shape arrs) ...)))
(let ([arr (array-broadcast arr ds)]
[arrs (array-broadcast arrs ds)] ...)
(define proc (unsafe-array-proc arr))
(define procs (unsafe-array-proc arrs)) ...
(define arr* (unsafe-build-array ds (λ ([js : Indexes]) (f (proc js) (procs js) ...))))
(array-default-strict! arr*)
arr*))))]))
(: array-map
(case->
(-> (-> Float Float Float) Array Array Array)
(-> (-> Float Float) Array Array)))
(define array-map
(case-lambda
[([f : (Float -> Float)] [arr : Array])
(inline-array-map f arr)]
[([f : (Float Float -> Float)] [arr0 : Array] [arr1 : Array])
(inline-array-map f arr0 arr1)]))
Normalizes output to be within [ -1,1 ] .
(: mix (-> Weighted-Signal * Array))
(define (mix . ss)
(: signals (Listof Array))
(define signals
(for/list : (Listof Array) ([s : Weighted-Signal ss])
(first s)))
(: weights (Listof Float))
(define weights
(for/list : (Listof Float) ([x : Weighted-Signal ss])
(real->double-flonum (second x))))
(: downscale-ratio Float)
(define downscale-ratio (/ 1.0 (apply + weights)))
(: scale-signal (Float -> (Float -> Float)))
(define ((scale-signal w) x) (* x w downscale-ratio))
(for/fold ([res : Array (array-map (scale-signal (first weights))
(first signals))])
([s (in-list (rest signals))]
[w (in-list (rest weights))])
(define scale (scale-signal w))
(array-map (lambda ([acc : Float]
[new : Float])
(+ acc (scale new)))
res s))))
|
030c409eb266c2e7b2dc3029ac66f333f2370b32b0f67342a324632343816858 | z0w0/helm | SDL.hs | # LANGUAGE TypeFamilies #
| Contains the SDL engine implementation of .
module Helm.Engine.SDL
(
-- * Types
SDLEngine
, SDLEngineConfig(..)
-- * Startup
, defaultConfig
, startup
, startupWith
-- * Asset Loading
, withImage
) where
import Helm.Engine.SDL.Engine
( SDLEngine
, SDLEngineConfig(..)
, defaultConfig
, startup
, startupWith
, withImage
)
| null | https://raw.githubusercontent.com/z0w0/helm/e2d8745d573c8b775aa390b70c597722ec102e48/src/Helm/Engine/SDL.hs | haskell | * Types
* Startup
* Asset Loading | # LANGUAGE TypeFamilies #
| Contains the SDL engine implementation of .
module Helm.Engine.SDL
(
SDLEngine
, SDLEngineConfig(..)
, defaultConfig
, startup
, startupWith
, withImage
) where
import Helm.Engine.SDL.Engine
( SDLEngine
, SDLEngineConfig(..)
, defaultConfig
, startup
, startupWith
, withImage
)
|
6a6f6ed04c175a72742a244e2b0dd7b4201c30413d26224bce0533614caf74cc | svdm/ClojureGP | test_generate.clj | Copyright ( c ) . All rights reserved .
The use and distribution terms for this software are covered by the Eclipse
;; Public License 1.0 (-1.0.php) which
;; can be found in the file epl-v10.html at the root of this distribution. By
;; using this software in any fashion, you are agreeing to be bound by the
;; terms of this license. You must not remove this notice, or any other, from
;; this software.
(ns test.cljgp.test-generate
(:use clojure.test
cljgp.generate
cljgp.util)
(:refer test.helpers))
(deftest test-generate-tree
(testing "grow method"
(let [max-depth 4
tree (my-gen max-depth :grow rtype)
depth (tree-depth tree)]
(full-tree-test tree)
(is (and (> depth 0) (<= depth max-depth))
"Grow-method trees must be of a valid size up to the limit.")
(is (= (tree-depth (my-gen 0 :grow rtype)) 1)
"For max-depth 0, must return single node.")))
(testing "full method"
(let [max-depth 4
tree (my-gen max-depth :full rtype)]
(full-tree-test tree)
(is (= (tree-depth tree) max-depth)
"Full-method trees must be the given max-depth in size.")
(is (= (tree-depth (my-gen 0 :full rtype)) 1)
"For max-depth 0, must return single node."))))
(deftest test-generate-ramped
(let [d 4
gtor (fn [opts] (get-valid (comp not nil?) 512
#(generate-ramped opts config-maths)))
grown-tree (gtor {:max-depth d :grow-chance 1})
full-tree (gtor {:max-depth d :grow-chance 0})
rand-tree (gtor {:max-depth d :grow-chance 0.5})]
(testing "generated tree validity"
(full-tree-test grown-tree)
(full-tree-test full-tree)
(full-tree-test rand-tree))
(is (<= (tree-depth full-tree) d)
"Ramped gen with 0% grow chance should result in a full tree.")
(is (<= (tree-depth grown-tree) d)
"Ramped gen with 100% grow chance should result in a grown tree.")))
(deftest test-generate-pop
(let [target-size (:population-size config-maths)
pop (doall (generate-pop config-maths))]
(is (seq pop)
"Generated population must be a valid seq-able.")
(is (= (count pop) target-size)
"Generated population should be of the specified size.")
(is (empty? (filter #(not (valid-tree? (get-fn-body (get-func %)))) pop))
"All generated trees must be valid.")))
(deftest test-get-valid
(is (nil? (get-valid true? 2 #(vector [false false false]))))
(is (= [1 2] (get-valid number? 1 #(vector 1 2))))) | null | https://raw.githubusercontent.com/svdm/ClojureGP/266e501411b37297bdeb082913df63ececa8515c/test/cljgp/test_generate.clj | clojure | Public License 1.0 (-1.0.php) which
can be found in the file epl-v10.html at the root of this distribution. By
using this software in any fashion, you are agreeing to be bound by the
terms of this license. You must not remove this notice, or any other, from
this software.
| Copyright ( c ) . All rights reserved .
The use and distribution terms for this software are covered by the Eclipse
(ns test.cljgp.test-generate
(:use clojure.test
cljgp.generate
cljgp.util)
(:refer test.helpers))
(deftest test-generate-tree
(testing "grow method"
(let [max-depth 4
tree (my-gen max-depth :grow rtype)
depth (tree-depth tree)]
(full-tree-test tree)
(is (and (> depth 0) (<= depth max-depth))
"Grow-method trees must be of a valid size up to the limit.")
(is (= (tree-depth (my-gen 0 :grow rtype)) 1)
"For max-depth 0, must return single node.")))
(testing "full method"
(let [max-depth 4
tree (my-gen max-depth :full rtype)]
(full-tree-test tree)
(is (= (tree-depth tree) max-depth)
"Full-method trees must be the given max-depth in size.")
(is (= (tree-depth (my-gen 0 :full rtype)) 1)
"For max-depth 0, must return single node."))))
(deftest test-generate-ramped
(let [d 4
gtor (fn [opts] (get-valid (comp not nil?) 512
#(generate-ramped opts config-maths)))
grown-tree (gtor {:max-depth d :grow-chance 1})
full-tree (gtor {:max-depth d :grow-chance 0})
rand-tree (gtor {:max-depth d :grow-chance 0.5})]
(testing "generated tree validity"
(full-tree-test grown-tree)
(full-tree-test full-tree)
(full-tree-test rand-tree))
(is (<= (tree-depth full-tree) d)
"Ramped gen with 0% grow chance should result in a full tree.")
(is (<= (tree-depth grown-tree) d)
"Ramped gen with 100% grow chance should result in a grown tree.")))
(deftest test-generate-pop
(let [target-size (:population-size config-maths)
pop (doall (generate-pop config-maths))]
(is (seq pop)
"Generated population must be a valid seq-able.")
(is (= (count pop) target-size)
"Generated population should be of the specified size.")
(is (empty? (filter #(not (valid-tree? (get-fn-body (get-func %)))) pop))
"All generated trees must be valid.")))
(deftest test-get-valid
(is (nil? (get-valid true? 2 #(vector [false false false]))))
(is (= [1 2] (get-valid number? 1 #(vector 1 2))))) |
11964d498991273951bf1c973d96697c823a309b1bbd52700bd27d91196835b9 | jellelicht/guix | llvm.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2014 < >
Copyright © 2015 < >
Copyright © 2015 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages llvm)
#:use-module (guix packages)
#:use-module (guix licenses)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (guix build-system cmake)
#:use-module (gnu packages)
#:use-module (gnu packages gcc)
#:use-module (gnu packages bootstrap) ;glibc-dynamic-linker
#:use-module (gnu packages perl)
#:use-module (gnu packages python)
#:use-module (gnu packages xml))
(define-public llvm
(package
(name "llvm")
(version "3.6.2")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"153vcvj8gvgwakzr4j0kndc0b7wn91c2g1vy2vg24s6spxcc23gn"))))
(build-system cmake-build-system)
(native-inputs
`(("python" ,python-wrapper)
("perl" ,perl)))
(arguments
`(#:configure-flags '("-DCMAKE_SKIP_BUILD_RPATH=FALSE"
"-DCMAKE_BUILD_WITH_INSTALL_RPATH=FALSE")))
(home-page "")
(synopsis "Optimizing compiler infrastructure")
(description
"LLVM is a compiler infrastructure designed for compile-time, link-time,
runtime, and idle-time optimization of programs from arbitrary programming
languages. It currently supports compilation of C and C++ programs, using
front-ends derived from GCC 4.0.1. A new front-end for the C family of
languages is in development. The compiler infrastructure includes mirror sets
of programming tools as well as libraries with equivalent functionality.")
(license ncsa)))
(define (clang-runtime-from-llvm llvm hash)
(package
(name "clang-runtime")
(version (package-version llvm))
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/compiler-rt-" version ".src.tar.xz"))
(sha256 (base32 hash))))
(build-system cmake-build-system)
(native-inputs (package-native-inputs llvm))
(inputs
`(("llvm" ,llvm)))
(arguments
`(;; Don't use '-g' during the build to save space.
#:build-type "Release"))
(home-page "-rt.llvm.org")
(synopsis "Runtime library for Clang/LLVM")
(description
"The \"clang-runtime\" library provides the implementations of run-time
functions for C and C++ programs. It also provides header files that allow C
and C++ source code to interface with the \"sanitization\" passes of the clang
compiler. In LLVM this library is called \"compiler-rt\".")
(license ncsa)
< -rt.llvm.org/ > does n't list MIPS as supported .
(supported-systems (delete "mips64el-linux" %supported-systems))))
(define (clang-from-llvm llvm clang-runtime hash)
(package
(name "clang")
(version (package-version llvm))
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/cfe-" version ".src.tar.xz"))
(sha256 (base32 hash))
(patches (list (search-patch "clang-libc-search-path.patch")))))
Using cmake allows us to treat as an external library . There
does n't seem to be any way to do this with clang 's - based
;; build system.
(build-system cmake-build-system)
(native-inputs (package-native-inputs llvm))
(inputs
`(("libxml2" ,libxml2)
("gcc-lib" ,gcc "lib")
,@(package-inputs llvm)))
(propagated-inputs
`(("llvm" ,llvm)
("clang-runtime" ,clang-runtime)))
(arguments
`(#:configure-flags
(list "-DCLANG_INCLUDE_TESTS=True"
;; Find libgcc_s, crtbegin.o, and crtend.o.
(string-append "-DGCC_INSTALL_PREFIX="
(assoc-ref %build-inputs "gcc-lib"))
;; Use a sane default include directory.
(string-append "-DC_INCLUDE_DIRS="
(assoc-ref %build-inputs "libc")
"/include"))
;; Don't use '-g' during the build to save space.
#:build-type "Release"
#:phases (modify-phases %standard-phases
(add-after
'unpack 'set-glibc-file-names
(lambda* (#:key inputs #:allow-other-keys)
(let ((libc (assoc-ref inputs "libc"))
(compiler-rt (assoc-ref inputs "clang-runtime")))
(substitute* "lib/Driver/Tools.cpp"
;; Patch the 'getLinuxDynamicLinker' function to that
;; it uses the right dynamic linker file name.
(("/lib64/ld-linux-x86-64.so.2")
(string-append libc
,(glibc-dynamic-linker)))
Link to files from clang - runtime .
(("TC\\.getDriver\\(\\)\\.ResourceDir")
(string-append "\"" compiler-rt "\"")))
Same for libc 's , to allow crt1.o & co. to be
;; found.
(substitute* "lib/Driver/ToolChains.cpp"
(("@GLIBC_LIBDIR@")
(string-append libc "/lib")))))))))
Clang supports the same environment variables as GCC .
(native-search-paths
(list (search-path-specification
(variable "CPATH")
(files '("include")))
(search-path-specification
(variable "LIBRARY_PATH")
(files '("lib" "lib64")))))
(home-page "")
(synopsis "C language family frontend for LLVM")
(description
"Clang is a compiler front end for the C, C++, Objective-C and
Objective-C++ programming languages. It uses LLVM as its back end. The Clang
project includes the Clang front end, the Clang static analyzer, and several
code analysis tools.")
(license ncsa)))
(define-public clang-runtime
(clang-runtime-from-llvm
llvm
"11qx8d3pbfqjaj2x207pvlvzihbs1z2xbw4crpz7aid6h1yz6bqg"))
(define-public clang
(clang-from-llvm llvm clang-runtime
"1wwr8s6lzr324hv4s1k6na4j5zv6n9kdhi14s4kb9b13d93814df"))
(define-public llvm-3.5
(package (inherit llvm)
(version "3.5.0")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"00swb43mzlvda8306arlg2jw7g6k3acwfccgf1k4c2pgd3rrkq98"))))))
(define-public clang-runtime-3.5
(clang-runtime-from-llvm
llvm-3.5
"0dl1kbrhz96djsxqr61iw5h788s7ncfpfb7aayixky1bhdaydcx4"))
(define-public clang-3.5
(clang-from-llvm llvm-3.5 clang-runtime-3.5
"12yv3jwdjcbkrx7zjm8wh4jrvb59v8fdw4mnmz3zc1jb00p9k07w"))
| null | https://raw.githubusercontent.com/jellelicht/guix/83cfc9414fca3ab57c949e18c1ceb375a179b59c/gnu/packages/llvm.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
glibc-dynamic-linker
Don't use '-g' during the build to save space.
build system.
Find libgcc_s, crtbegin.o, and crtend.o.
Use a sane default include directory.
Don't use '-g' during the build to save space.
Patch the 'getLinuxDynamicLinker' function to that
it uses the right dynamic linker file name.
found. | Copyright © 2014 < >
Copyright © 2015 < >
Copyright © 2015 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages llvm)
#:use-module (guix packages)
#:use-module (guix licenses)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (guix build-system cmake)
#:use-module (gnu packages)
#:use-module (gnu packages gcc)
#:use-module (gnu packages perl)
#:use-module (gnu packages python)
#:use-module (gnu packages xml))
(define-public llvm
(package
(name "llvm")
(version "3.6.2")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"153vcvj8gvgwakzr4j0kndc0b7wn91c2g1vy2vg24s6spxcc23gn"))))
(build-system cmake-build-system)
(native-inputs
`(("python" ,python-wrapper)
("perl" ,perl)))
(arguments
`(#:configure-flags '("-DCMAKE_SKIP_BUILD_RPATH=FALSE"
"-DCMAKE_BUILD_WITH_INSTALL_RPATH=FALSE")))
(home-page "")
(synopsis "Optimizing compiler infrastructure")
(description
"LLVM is a compiler infrastructure designed for compile-time, link-time,
runtime, and idle-time optimization of programs from arbitrary programming
languages. It currently supports compilation of C and C++ programs, using
front-ends derived from GCC 4.0.1. A new front-end for the C family of
languages is in development. The compiler infrastructure includes mirror sets
of programming tools as well as libraries with equivalent functionality.")
(license ncsa)))
(define (clang-runtime-from-llvm llvm hash)
(package
(name "clang-runtime")
(version (package-version llvm))
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/compiler-rt-" version ".src.tar.xz"))
(sha256 (base32 hash))))
(build-system cmake-build-system)
(native-inputs (package-native-inputs llvm))
(inputs
`(("llvm" ,llvm)))
(arguments
#:build-type "Release"))
(home-page "-rt.llvm.org")
(synopsis "Runtime library for Clang/LLVM")
(description
"The \"clang-runtime\" library provides the implementations of run-time
functions for C and C++ programs. It also provides header files that allow C
and C++ source code to interface with the \"sanitization\" passes of the clang
compiler. In LLVM this library is called \"compiler-rt\".")
(license ncsa)
< -rt.llvm.org/ > does n't list MIPS as supported .
(supported-systems (delete "mips64el-linux" %supported-systems))))
(define (clang-from-llvm llvm clang-runtime hash)
(package
(name "clang")
(version (package-version llvm))
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/cfe-" version ".src.tar.xz"))
(sha256 (base32 hash))
(patches (list (search-patch "clang-libc-search-path.patch")))))
Using cmake allows us to treat as an external library . There
does n't seem to be any way to do this with clang 's - based
(build-system cmake-build-system)
(native-inputs (package-native-inputs llvm))
(inputs
`(("libxml2" ,libxml2)
("gcc-lib" ,gcc "lib")
,@(package-inputs llvm)))
(propagated-inputs
`(("llvm" ,llvm)
("clang-runtime" ,clang-runtime)))
(arguments
`(#:configure-flags
(list "-DCLANG_INCLUDE_TESTS=True"
(string-append "-DGCC_INSTALL_PREFIX="
(assoc-ref %build-inputs "gcc-lib"))
(string-append "-DC_INCLUDE_DIRS="
(assoc-ref %build-inputs "libc")
"/include"))
#:build-type "Release"
#:phases (modify-phases %standard-phases
(add-after
'unpack 'set-glibc-file-names
(lambda* (#:key inputs #:allow-other-keys)
(let ((libc (assoc-ref inputs "libc"))
(compiler-rt (assoc-ref inputs "clang-runtime")))
(substitute* "lib/Driver/Tools.cpp"
(("/lib64/ld-linux-x86-64.so.2")
(string-append libc
,(glibc-dynamic-linker)))
Link to files from clang - runtime .
(("TC\\.getDriver\\(\\)\\.ResourceDir")
(string-append "\"" compiler-rt "\"")))
Same for libc 's , to allow crt1.o & co. to be
(substitute* "lib/Driver/ToolChains.cpp"
(("@GLIBC_LIBDIR@")
(string-append libc "/lib")))))))))
Clang supports the same environment variables as GCC .
(native-search-paths
(list (search-path-specification
(variable "CPATH")
(files '("include")))
(search-path-specification
(variable "LIBRARY_PATH")
(files '("lib" "lib64")))))
(home-page "")
(synopsis "C language family frontend for LLVM")
(description
"Clang is a compiler front end for the C, C++, Objective-C and
Objective-C++ programming languages. It uses LLVM as its back end. The Clang
project includes the Clang front end, the Clang static analyzer, and several
code analysis tools.")
(license ncsa)))
(define-public clang-runtime
(clang-runtime-from-llvm
llvm
"11qx8d3pbfqjaj2x207pvlvzihbs1z2xbw4crpz7aid6h1yz6bqg"))
(define-public clang
(clang-from-llvm llvm clang-runtime
"1wwr8s6lzr324hv4s1k6na4j5zv6n9kdhi14s4kb9b13d93814df"))
(define-public llvm-3.5
(package (inherit llvm)
(version "3.5.0")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"00swb43mzlvda8306arlg2jw7g6k3acwfccgf1k4c2pgd3rrkq98"))))))
(define-public clang-runtime-3.5
(clang-runtime-from-llvm
llvm-3.5
"0dl1kbrhz96djsxqr61iw5h788s7ncfpfb7aayixky1bhdaydcx4"))
(define-public clang-3.5
(clang-from-llvm llvm-3.5 clang-runtime-3.5
"12yv3jwdjcbkrx7zjm8wh4jrvb59v8fdw4mnmz3zc1jb00p9k07w"))
|
d70cadebd6642bb14f797ac55c5209460f4358598b98b20d032d55102bcbf012 | facebook/pyre-check | sources.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(* Sources: defines a source kind in our taint representation.
*
* For instance, `TaintSource[Header]` is represented as `Sources.NamedSource "Header"`.
*)
open Core
let name = "source"
module T = struct
type t =
| Attach
| NamedSource of string
| ParametricSource of {
source_name: string;
subkind: string;
}
| Transform of {
(* Invariant: concatenation of local @ global is non-empty. *)
local: TaintTransforms.t;
global: TaintTransforms.t;
(* Invariant: not a transform. *)
base: t;
}
[@@deriving compare, eq]
let rec pp formatter = function
| Attach -> Format.fprintf formatter "Attach"
| NamedSource name -> Format.fprintf formatter "%s" name
| ParametricSource { source_name; subkind } ->
Format.fprintf formatter "%s[%s]" source_name subkind
| Transform { local; global; base } ->
TaintTransforms.pp_kind ~formatter ~pp_base:pp ~local ~global ~base
let show = Format.asprintf "%a" pp
end
include T
let make_transform ~local ~global ~base =
match local, global with
| [], [] -> base
| _ -> Transform { local; global; base }
let ignore_kind_at_call = function
| Attach -> true
| _ -> false
let apply_call = function
| Transform { local; global; base } ->
Transform
{ local = TaintTransforms.empty; global = TaintTransforms.merge ~local ~global; base }
| source -> source
module Set = struct
include Stdlib.Set.Make (struct
include T
end)
let show set =
set |> elements |> List.map ~f:T.show |> String.concat ~sep:", " |> Format.asprintf "[%s]"
let pp format set = Format.fprintf format "%s" (show set)
let to_sanitize_transform_set_exn set =
let to_transform = function
| NamedSource name -> SanitizeTransform.Source.Named name
| source -> Format.asprintf "cannot sanitize the source `%a`" T.pp source |> failwith
in
set
|> elements
|> List.map ~f:to_transform
|> SanitizeTransform.SourceSet.of_list
|> SanitizeTransformSet.from_sources
let is_singleton set =
The only way to implement this in O(1 ) is with ` for_all ` or ` exists ` .
(not (is_empty set))
&&
let count = ref 0 in
for_all
(fun _ ->
incr count;
!count = 1)
set
let as_singleton set = if is_singleton set then Some (choose set) else None
end
module Map = struct
include Stdlib.Map.Make (struct
include T
end)
let of_alist_exn =
let add map (key, data) =
update
key
(function
| None -> Some data
| Some _ -> failwith "key already exists")
map
in
List.fold ~init:empty ~f:add
let to_alist map =
let gather key data sofar = (key, data) :: sofar in
fold gather map []
end
let discard_subkind = function
| ParametricSource { source_name; _ } -> NamedSource source_name
| source -> source
let discard_transforms = function
| Transform { base; _ } -> base
| source -> source
let discard_sanitize_transforms = function
| Transform { base; local; global } ->
make_transform
~local:(TaintTransforms.discard_sanitize_transforms local)
~global:(TaintTransforms.discard_sanitize_transforms global)
~base
| source -> source
let extract_sanitized_sources_from_transforms transforms =
let extract (SanitizeTransform.Source.Named name) sources = Set.add (NamedSource name) sources in
SanitizeTransform.SourceSet.fold extract transforms Set.empty
let to_sanitized_source_exn = function
| NamedSource name -> SanitizeTransform.Source.Named name
| ParametricSource { source_name = name; _ } -> SanitizeTransform.Source.Named name
| _ -> failwith "Unsupported source sanitizer"
let from_sanitized_source (SanitizeTransform.Source.Named name) = NamedSource name
let extract_sanitize_transforms = function
| Transform { local; global; _ } ->
TaintTransforms.merge ~local ~global |> TaintTransforms.get_sanitize_transforms
| _ -> SanitizeTransformSet.empty
let get_named_transforms = function
| Transform { local; global; _ } ->
TaintTransforms.merge ~local ~global |> TaintTransforms.get_named_transforms
| _ -> []
let contains_sanitize_transforms source sanitize_transforms =
SanitizeTransformSet.less_or_equal
~left:sanitize_transforms
~right:(extract_sanitize_transforms source)
| null | https://raw.githubusercontent.com/facebook/pyre-check/83be58c61b14dd0812e07a75bdbff756d57db47c/source/interprocedural_analyses/taint/sources.ml | ocaml | Sources: defines a source kind in our taint representation.
*
* For instance, `TaintSource[Header]` is represented as `Sources.NamedSource "Header"`.
Invariant: concatenation of local @ global is non-empty.
Invariant: not a transform. |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Core
let name = "source"
module T = struct
type t =
| Attach
| NamedSource of string
| ParametricSource of {
source_name: string;
subkind: string;
}
| Transform of {
local: TaintTransforms.t;
global: TaintTransforms.t;
base: t;
}
[@@deriving compare, eq]
let rec pp formatter = function
| Attach -> Format.fprintf formatter "Attach"
| NamedSource name -> Format.fprintf formatter "%s" name
| ParametricSource { source_name; subkind } ->
Format.fprintf formatter "%s[%s]" source_name subkind
| Transform { local; global; base } ->
TaintTransforms.pp_kind ~formatter ~pp_base:pp ~local ~global ~base
let show = Format.asprintf "%a" pp
end
include T
let make_transform ~local ~global ~base =
match local, global with
| [], [] -> base
| _ -> Transform { local; global; base }
let ignore_kind_at_call = function
| Attach -> true
| _ -> false
let apply_call = function
| Transform { local; global; base } ->
Transform
{ local = TaintTransforms.empty; global = TaintTransforms.merge ~local ~global; base }
| source -> source
module Set = struct
include Stdlib.Set.Make (struct
include T
end)
let show set =
set |> elements |> List.map ~f:T.show |> String.concat ~sep:", " |> Format.asprintf "[%s]"
let pp format set = Format.fprintf format "%s" (show set)
let to_sanitize_transform_set_exn set =
let to_transform = function
| NamedSource name -> SanitizeTransform.Source.Named name
| source -> Format.asprintf "cannot sanitize the source `%a`" T.pp source |> failwith
in
set
|> elements
|> List.map ~f:to_transform
|> SanitizeTransform.SourceSet.of_list
|> SanitizeTransformSet.from_sources
let is_singleton set =
The only way to implement this in O(1 ) is with ` for_all ` or ` exists ` .
(not (is_empty set))
&&
let count = ref 0 in
for_all
(fun _ ->
incr count;
!count = 1)
set
let as_singleton set = if is_singleton set then Some (choose set) else None
end
module Map = struct
include Stdlib.Map.Make (struct
include T
end)
let of_alist_exn =
let add map (key, data) =
update
key
(function
| None -> Some data
| Some _ -> failwith "key already exists")
map
in
List.fold ~init:empty ~f:add
let to_alist map =
let gather key data sofar = (key, data) :: sofar in
fold gather map []
end
let discard_subkind = function
| ParametricSource { source_name; _ } -> NamedSource source_name
| source -> source
let discard_transforms = function
| Transform { base; _ } -> base
| source -> source
let discard_sanitize_transforms = function
| Transform { base; local; global } ->
make_transform
~local:(TaintTransforms.discard_sanitize_transforms local)
~global:(TaintTransforms.discard_sanitize_transforms global)
~base
| source -> source
let extract_sanitized_sources_from_transforms transforms =
let extract (SanitizeTransform.Source.Named name) sources = Set.add (NamedSource name) sources in
SanitizeTransform.SourceSet.fold extract transforms Set.empty
let to_sanitized_source_exn = function
| NamedSource name -> SanitizeTransform.Source.Named name
| ParametricSource { source_name = name; _ } -> SanitizeTransform.Source.Named name
| _ -> failwith "Unsupported source sanitizer"
let from_sanitized_source (SanitizeTransform.Source.Named name) = NamedSource name
let extract_sanitize_transforms = function
| Transform { local; global; _ } ->
TaintTransforms.merge ~local ~global |> TaintTransforms.get_sanitize_transforms
| _ -> SanitizeTransformSet.empty
let get_named_transforms = function
| Transform { local; global; _ } ->
TaintTransforms.merge ~local ~global |> TaintTransforms.get_named_transforms
| _ -> []
let contains_sanitize_transforms source sanitize_transforms =
SanitizeTransformSet.less_or_equal
~left:sanitize_transforms
~right:(extract_sanitize_transforms source)
|
ded0559c579ebbb308208511035d793fac13612085a482d995032b1ca7992929 | tud-fop/vanda-haskell | PennToSentenceCorpus.hs | -----------------------------------------------------------------------------
-- |
Module : VandaCLI.PennToSentenceCorpus
Copyright : ( c ) Technische Universität Dresden 2018
-- License : BSD-style
--
-- Stability : unknown
-- Portability : portable
-----------------------------------------------------------------------------
module VandaCLI.PennToSentenceCorpus where
import Vanda.Corpus.Penn.Text
import qualified Data.Text.Lazy.IO as TIO
import Data.Tree
import System.Console.CmdArgs.Explicit
import System.Console.CmdArgs.Explicit.Misc
data Args = Help String | Args String deriving Show
cmdArgs :: Mode Args
cmdArgs = (modeEmpty $ Args "-"){ modeNames = ["penn-to-sentence-corpus"]
, modeHelp = "Reads of the yield of trees in a PennTreeCorpus."
, modeArgs = ([flagArg (\ s _ -> Right $ Args s) "FILE"], Nothing)
, modeGroupFlags = toGroup [flagNone ["help"] (\ _ -> Help $ defaultHelp cmdArgs) "Prints the help message."]
}
mainArgs :: Args -> IO ()
mainArgs (Help cs) = putStr cs
mainArgs (Args filename)
= do cTrees <- if filename == "-" then TIO.getContents else TIO.readFile filename
TIO.putStr . yield $ (parsePenn cTrees :: [Tree String])
| null | https://raw.githubusercontent.com/tud-fop/vanda-haskell/3214966361b6dbf178155950c94423eee7f9453e/executable/VandaCLI/PennToSentenceCorpus.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style
Stability : unknown
Portability : portable
--------------------------------------------------------------------------- | Module : VandaCLI.PennToSentenceCorpus
Copyright : ( c ) Technische Universität Dresden 2018
module VandaCLI.PennToSentenceCorpus where
import Vanda.Corpus.Penn.Text
import qualified Data.Text.Lazy.IO as TIO
import Data.Tree
import System.Console.CmdArgs.Explicit
import System.Console.CmdArgs.Explicit.Misc
data Args = Help String | Args String deriving Show
cmdArgs :: Mode Args
cmdArgs = (modeEmpty $ Args "-"){ modeNames = ["penn-to-sentence-corpus"]
, modeHelp = "Reads of the yield of trees in a PennTreeCorpus."
, modeArgs = ([flagArg (\ s _ -> Right $ Args s) "FILE"], Nothing)
, modeGroupFlags = toGroup [flagNone ["help"] (\ _ -> Help $ defaultHelp cmdArgs) "Prints the help message."]
}
mainArgs :: Args -> IO ()
mainArgs (Help cs) = putStr cs
mainArgs (Args filename)
= do cTrees <- if filename == "-" then TIO.getContents else TIO.readFile filename
TIO.putStr . yield $ (parsePenn cTrees :: [Tree String])
|
d2de1d5af3886d5ba84ebfab07062d601bba006265533461fd6031b94854f22b | zellige/hs-geojson | GeoLine.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE TemplateHaskell #
-------------------------------------------------------------------
-- |
Module : Data . Geospatial . Internal . Geometry . GeoLine
Copyright : ( C ) 2014 - 2021 HS - GeoJSON Project
-- License : BSD-style (see the file LICENSE.md)
Maintainer :
module Data.Geospatial.Internal.Geometry.GeoLine
( -- * Type
GeoLine (..),
-- * Lenses
unGeoLine,
)
where
import Control.DeepSeq
import Control.Lens (makeLenses)
import Control.Monad (mzero)
import Data.Aeson
( FromJSON (..),
ToJSON (..),
Value (..),
)
import Data.Geospatial.Internal.BasicTypes
import Data.Geospatial.Internal.Geometry.Aeson
import Data.LineString
import GHC.Generics (Generic)
newtype GeoLine = GeoLine {_unGeoLine :: LineString GeoPositionWithoutCRS} deriving (Show, Eq, Generic, NFData)
makeLenses ''GeoLine
-- instances
instance ToJSON GeoLine where
: : a - > Value
toJSON = makeGeometryGeoAeson "LineString" . _unGeoLine
instance FromJSON GeoLine where
-- parseJSON :: Value -> Parser a
parseJSON (Object o) = readGeometryGeoAeson "LineString" GeoLine o
parseJSON _ = mzero
| null | https://raw.githubusercontent.com/zellige/hs-geojson/fb66e4f1b016d8e73408d9faa0945f61253131fa/src/Data/Geospatial/Internal/Geometry/GeoLine.hs | haskell | # LANGUAGE DeriveAnyClass #
-----------------------------------------------------------------
|
License : BSD-style (see the file LICENSE.md)
* Type
* Lenses
instances
parseJSON :: Value -> Parser a | # LANGUAGE DeriveGeneric #
# LANGUAGE TemplateHaskell #
Module : Data . Geospatial . Internal . Geometry . GeoLine
Copyright : ( C ) 2014 - 2021 HS - GeoJSON Project
Maintainer :
module Data.Geospatial.Internal.Geometry.GeoLine
GeoLine (..),
unGeoLine,
)
where
import Control.DeepSeq
import Control.Lens (makeLenses)
import Control.Monad (mzero)
import Data.Aeson
( FromJSON (..),
ToJSON (..),
Value (..),
)
import Data.Geospatial.Internal.BasicTypes
import Data.Geospatial.Internal.Geometry.Aeson
import Data.LineString
import GHC.Generics (Generic)
newtype GeoLine = GeoLine {_unGeoLine :: LineString GeoPositionWithoutCRS} deriving (Show, Eq, Generic, NFData)
makeLenses ''GeoLine
instance ToJSON GeoLine where
: : a - > Value
toJSON = makeGeometryGeoAeson "LineString" . _unGeoLine
instance FromJSON GeoLine where
parseJSON (Object o) = readGeometryGeoAeson "LineString" GeoLine o
parseJSON _ = mzero
|
2adf85f1a0b8767cbfafb54bd74a3d8b36a1fbb5bf284a54ef02725e4a710305 | screenshotbot/screenshotbot-oss | audit-log.lisp | ;;;; Copyright 2018-Present Modern Interpreters Inc.
;;;;
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :screenshotbot/pro/bitbucket/audit-log
(:nicknames :screenshotbot/bitbucket/audit-log)
(:use #:cl)
(:import-from #:bknr.indices
#:skip-list-index)
(:import-from #:screenshotbot/user-api
#:%created-at)
(:import-from #:bknr.datastore
#:persistent-class)
(:import-from #:bknr.datastore
#:store-object)
(:import-from #:bknr.indices
#:hash-index)
(:import-from #:bknr.datastore
#:with-transaction)
(:import-from #:screenshotbot/pro/bitbucket/core
#:bitbucket-error)
(:import-from #:util/misc
#:uniq)
(:import-from #:screenshotbot/model/auto-cleanup
#:register-auto-cleanup)
(:import-from #:util/store
#:with-class-validation)
(:import-from #:easy-macros
#:def-easy-macro)
(:import-from #:util/threading
#:ignore-and-log-errors)
(:import-from #:screenshotbot/audit-log
#:audit-log-error
#:audit-logs-for-company
#:base-audit-log
#:with-audit-log)
(:local-nicknames (#:a #:alexandria))
(:export
#:audit-log
#:build-status-audit-log
#:bitbucket-audit-logs-for-company
#:audit-log-error-response
#:build-status-audit-log-commit
#:build-status-audit-log-full-name
#:http-result-code
#:access-token-audit-log
#:access-token-audit-log-grant-type
#:with-audit-log))
(in-package :screenshotbot/bitbucket/audit-log)
(with-class-validation
(defclass audit-log (base-audit-log)
((%company :index-type hash-index
:index-reader %bitbucket-audit-logs-for-company)
(err :initform nil
:accessor %audit-log-error)
(error-response :initform nil
:accessor audit-log-error-response)
(http-result-code :initform nil
:accessor http-result-code)
(ts :initarg :ts
:reader %created-at))
(:default-initargs :ts (get-universal-time))
(:metaclass persistent-class)))
(register-auto-cleanup 'audit-log :timestamp #'%created-at)
(defmethod audit-log-error ((self audit-log))
;; For migration
(or
(%audit-log-error self)
(call-next-method)))
(with-class-validation
(defclass build-status-audit-log (audit-log)
((commit :initarg :commit
:reader build-status-audit-log-commit
:initform nil)
(full-name :initarg :full-name
:reader build-status-audit-log-full-name
:initform nil))
(:metaclass persistent-class)))
(with-class-validation
(defclass access-token-audit-log (audit-log)
((grant-type :initarg :grant-type
:reader access-token-audit-log-grant-type))
(:metaclass persistent-class)))
(defun bitbucket-audit-logs-for-company (company)
(append
(audit-logs-for-company company 'audit-log)
(let ((elems (%bitbucket-audit-logs-for-company company)))
(uniq (sort elems #'> :key 'bknr.datastore:store-object-id)))))
(defun parse-error-response (response result-code audit-log)
(let* ((response-obj (json:decode-json-from-string response))
(errors (a:assoc-value response-obj :errors)))
(let ((message (or
(a:assoc-value (car errors) :message)
;; See test-parses-error-correctly
(when (equal "error" (a:assoc-value response-obj :type))
(a:assoc-value (a:assoc-value response-obj :error)
:message))
;; If we can't parse an actual response, just use the
whole json . Suitable for OAuth .
response)))
(warn "Bitbucket api failed with: ~a" message)
(with-transaction ()
(setf (http-result-code audit-log) result-code)
(setf (audit-log-error audit-log) message)
(setf (audit-log-error-response audit-log) response))))
(error 'bitbucket-error :audit-log audit-log))
| null | https://raw.githubusercontent.com/screenshotbot/screenshotbot-oss/a44533e7f11925a49cd11ba6ebb071e43d1961e0/src/screenshotbot/bitbucket/audit-log.lisp | lisp | Copyright 2018-Present Modern Interpreters Inc.
For migration
See test-parses-error-correctly
If we can't parse an actual response, just use the | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :screenshotbot/pro/bitbucket/audit-log
(:nicknames :screenshotbot/bitbucket/audit-log)
(:use #:cl)
(:import-from #:bknr.indices
#:skip-list-index)
(:import-from #:screenshotbot/user-api
#:%created-at)
(:import-from #:bknr.datastore
#:persistent-class)
(:import-from #:bknr.datastore
#:store-object)
(:import-from #:bknr.indices
#:hash-index)
(:import-from #:bknr.datastore
#:with-transaction)
(:import-from #:screenshotbot/pro/bitbucket/core
#:bitbucket-error)
(:import-from #:util/misc
#:uniq)
(:import-from #:screenshotbot/model/auto-cleanup
#:register-auto-cleanup)
(:import-from #:util/store
#:with-class-validation)
(:import-from #:easy-macros
#:def-easy-macro)
(:import-from #:util/threading
#:ignore-and-log-errors)
(:import-from #:screenshotbot/audit-log
#:audit-log-error
#:audit-logs-for-company
#:base-audit-log
#:with-audit-log)
(:local-nicknames (#:a #:alexandria))
(:export
#:audit-log
#:build-status-audit-log
#:bitbucket-audit-logs-for-company
#:audit-log-error-response
#:build-status-audit-log-commit
#:build-status-audit-log-full-name
#:http-result-code
#:access-token-audit-log
#:access-token-audit-log-grant-type
#:with-audit-log))
(in-package :screenshotbot/bitbucket/audit-log)
(with-class-validation
(defclass audit-log (base-audit-log)
((%company :index-type hash-index
:index-reader %bitbucket-audit-logs-for-company)
(err :initform nil
:accessor %audit-log-error)
(error-response :initform nil
:accessor audit-log-error-response)
(http-result-code :initform nil
:accessor http-result-code)
(ts :initarg :ts
:reader %created-at))
(:default-initargs :ts (get-universal-time))
(:metaclass persistent-class)))
(register-auto-cleanup 'audit-log :timestamp #'%created-at)
(defmethod audit-log-error ((self audit-log))
(or
(%audit-log-error self)
(call-next-method)))
(with-class-validation
(defclass build-status-audit-log (audit-log)
((commit :initarg :commit
:reader build-status-audit-log-commit
:initform nil)
(full-name :initarg :full-name
:reader build-status-audit-log-full-name
:initform nil))
(:metaclass persistent-class)))
(with-class-validation
(defclass access-token-audit-log (audit-log)
((grant-type :initarg :grant-type
:reader access-token-audit-log-grant-type))
(:metaclass persistent-class)))
(defun bitbucket-audit-logs-for-company (company)
(append
(audit-logs-for-company company 'audit-log)
(let ((elems (%bitbucket-audit-logs-for-company company)))
(uniq (sort elems #'> :key 'bknr.datastore:store-object-id)))))
(defun parse-error-response (response result-code audit-log)
(let* ((response-obj (json:decode-json-from-string response))
(errors (a:assoc-value response-obj :errors)))
(let ((message (or
(a:assoc-value (car errors) :message)
(when (equal "error" (a:assoc-value response-obj :type))
(a:assoc-value (a:assoc-value response-obj :error)
:message))
whole json . Suitable for OAuth .
response)))
(warn "Bitbucket api failed with: ~a" message)
(with-transaction ()
(setf (http-result-code audit-log) result-code)
(setf (audit-log-error audit-log) message)
(setf (audit-log-error-response audit-log) response))))
(error 'bitbucket-error :audit-log audit-log))
|
7ddb68b99842ecf7f4967f3cacba541dc6bd555e20f954fdad1bb128dd45f558 | flotsfacetieux/cl-entity-system | tests.lisp | (in-package #:cl-entity-system-tests)
(defclass flag (component)
((id :accessor flag-id
:initarg :id)))
(defclass direction (component)
())
(defclass c1 (component)
())
(defclass c2 (component)
())
(defclass c3 (component)
())
(defclass geometry (component)
())
(defclass render-system (system)
())
(define-test test-make-entity
(let ((em (make-instance 'entity-manager)))
(assert-equal 0 (make-entity em))
(assert-equal 1 (lowest-unassigned-id em))
(assert-equal 1 (make-entity em))
(assert-equal 2 (lowest-unassigned-id em))
(assert-equal 2 (length (entities em)))))
(define-test test-add-component
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction))
(geom1 (make-instance 'geometry))
(entity2 (make-entity em))
(dir2 (make-instance 'direction))
(flag2 (make-instance 'flag)))
(add-component em entity1 dir1)
(add-component em entity1 geom1)
(add-component em entity2 dir2)
(add-component em entity2 flag2)
(assert-equal dir1 (entity-component em entity1 'direction))
(assert-true (entity-component em entity1 'geometry))
(assert-false (eq dir2 (entity-component em entity1 'direction)))
(assert-false (entity-component em entity2 'geometry))
(assert-equal 3 (hash-table-count (cl-es::components em)))))
(define-test test-del-component
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction)))
(add-component em entity1 dir1)
(assert-equal dir1 (entity-component em entity1 'direction))
(del-components em entity1 'direction)
(assert-false (eq dir1 (entity-component em entity1 'direction)))))
(define-test test-remove-entity
(let* ((em (make-instance 'entity-manager))
(entity (make-entity em))
(geom (make-instance 'geometry)))
(add-component em entity geom)
(assert-equal 1 (length (entities em)))
(remove-entity em entity)
(assert-equal 0 (length (entities em)))))
(define-test test-component-entity
(let* ((em (make-instance 'entity-manager))
(entity (make-entity em))
(geom (make-instance 'geometry))
(flag (make-instance 'flag)))
(add-component em entity geom)
(add-component em entity flag)
(assert-equal geom (entity-component em entity 'geometry))
(assert-equal flag (entity-component em entity 'flag))))
(define-test test-find-entities
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction))
(geom1 (make-instance 'geometry))
(entity2 (make-entity em))
(dir2 (make-instance 'direction))
(flag2 (make-instance 'flag)))
(add-component em entity1 dir1)
(add-component em entity1 geom1)
(add-component em entity2 dir2)
(add-component em entity2 flag2)
(assert-equal (list entity1 entity2)
(find-entities em 'direction))
(assert-equal (list entity1) (find-entities em 'geometry))
(assert-equal (list entity2) (find-entities em 'flag))))
(define-test test-find-entities-of
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction))
(geom1 (make-instance 'geometry))
(c11 (make-instance 'c1))
(c12 (make-instance 'c2))
(entity2 (make-entity em))
(dir2 (make-instance 'direction))
(flag2 (make-instance 'flag))
(c21 (make-instance 'c1))
(c22 (make-instance 'c2))
(entity3 (make-entity em))
(c31 (make-instance 'c1))
(c32 (make-instance 'c2))
(dir3 (make-instance 'direction))
(flag3 (make-instance 'flag)))
(add-component em entity1 dir1)
(add-component em entity1 geom1)
(add-component em entity1 c11)
(add-component em entity1 c12)
(add-component em entity2 dir2)
(add-component em entity2 flag2)
(add-component em entity2 c21)
(add-component em entity2 c22)
(add-component em entity3 dir3)
(add-component em entity3 flag3)
(add-component em entity3 c31)
(add-component em entity3 c32)
(assert-false (find-entities-of em '(direction flag geometry)))
(assert-equal (sort (list entity2 entity3) #'<)
(sort (find-entities-of em '(direction flag c1 c2))
#'<))
(assert-equal (sort (list entity1 entity2 entity3) #'<)
(sort (find-entities-of em '(direction c1 c2))
#'<))))
(define-test test-find-components
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction))
(geom1 (make-instance 'geometry))
(entity2 (make-entity em))
(dir2 (make-instance 'direction))
(flag2 (make-instance 'flag)))
(add-component em entity1 dir1)
(add-component em entity1 geom1)
(add-component em entity2 dir2)
(add-component em entity2 flag2)
(assert-equal (list dir1 dir2)
(find-components em 'direction))
(assert-equal (list geom1) (find-components em 'geometry))
(assert-equal (list flag2) (find-components em 'flag))))
(defun do-tests ()
(setq *print-failures* t)
(setq *print-errors* t)
(run-tests :all :cl-es-tests))
| null | https://raw.githubusercontent.com/flotsfacetieux/cl-entity-system/6f4e00722f1a666c563e457bfbd7c03924f9e0b8/tests.lisp | lisp | (in-package #:cl-entity-system-tests)
(defclass flag (component)
((id :accessor flag-id
:initarg :id)))
(defclass direction (component)
())
(defclass c1 (component)
())
(defclass c2 (component)
())
(defclass c3 (component)
())
(defclass geometry (component)
())
(defclass render-system (system)
())
(define-test test-make-entity
(let ((em (make-instance 'entity-manager)))
(assert-equal 0 (make-entity em))
(assert-equal 1 (lowest-unassigned-id em))
(assert-equal 1 (make-entity em))
(assert-equal 2 (lowest-unassigned-id em))
(assert-equal 2 (length (entities em)))))
(define-test test-add-component
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction))
(geom1 (make-instance 'geometry))
(entity2 (make-entity em))
(dir2 (make-instance 'direction))
(flag2 (make-instance 'flag)))
(add-component em entity1 dir1)
(add-component em entity1 geom1)
(add-component em entity2 dir2)
(add-component em entity2 flag2)
(assert-equal dir1 (entity-component em entity1 'direction))
(assert-true (entity-component em entity1 'geometry))
(assert-false (eq dir2 (entity-component em entity1 'direction)))
(assert-false (entity-component em entity2 'geometry))
(assert-equal 3 (hash-table-count (cl-es::components em)))))
(define-test test-del-component
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction)))
(add-component em entity1 dir1)
(assert-equal dir1 (entity-component em entity1 'direction))
(del-components em entity1 'direction)
(assert-false (eq dir1 (entity-component em entity1 'direction)))))
(define-test test-remove-entity
(let* ((em (make-instance 'entity-manager))
(entity (make-entity em))
(geom (make-instance 'geometry)))
(add-component em entity geom)
(assert-equal 1 (length (entities em)))
(remove-entity em entity)
(assert-equal 0 (length (entities em)))))
(define-test test-component-entity
(let* ((em (make-instance 'entity-manager))
(entity (make-entity em))
(geom (make-instance 'geometry))
(flag (make-instance 'flag)))
(add-component em entity geom)
(add-component em entity flag)
(assert-equal geom (entity-component em entity 'geometry))
(assert-equal flag (entity-component em entity 'flag))))
(define-test test-find-entities
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction))
(geom1 (make-instance 'geometry))
(entity2 (make-entity em))
(dir2 (make-instance 'direction))
(flag2 (make-instance 'flag)))
(add-component em entity1 dir1)
(add-component em entity1 geom1)
(add-component em entity2 dir2)
(add-component em entity2 flag2)
(assert-equal (list entity1 entity2)
(find-entities em 'direction))
(assert-equal (list entity1) (find-entities em 'geometry))
(assert-equal (list entity2) (find-entities em 'flag))))
(define-test test-find-entities-of
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction))
(geom1 (make-instance 'geometry))
(c11 (make-instance 'c1))
(c12 (make-instance 'c2))
(entity2 (make-entity em))
(dir2 (make-instance 'direction))
(flag2 (make-instance 'flag))
(c21 (make-instance 'c1))
(c22 (make-instance 'c2))
(entity3 (make-entity em))
(c31 (make-instance 'c1))
(c32 (make-instance 'c2))
(dir3 (make-instance 'direction))
(flag3 (make-instance 'flag)))
(add-component em entity1 dir1)
(add-component em entity1 geom1)
(add-component em entity1 c11)
(add-component em entity1 c12)
(add-component em entity2 dir2)
(add-component em entity2 flag2)
(add-component em entity2 c21)
(add-component em entity2 c22)
(add-component em entity3 dir3)
(add-component em entity3 flag3)
(add-component em entity3 c31)
(add-component em entity3 c32)
(assert-false (find-entities-of em '(direction flag geometry)))
(assert-equal (sort (list entity2 entity3) #'<)
(sort (find-entities-of em '(direction flag c1 c2))
#'<))
(assert-equal (sort (list entity1 entity2 entity3) #'<)
(sort (find-entities-of em '(direction c1 c2))
#'<))))
(define-test test-find-components
(let* ((em (make-instance 'entity-manager))
(entity1 (make-entity em))
(dir1 (make-instance 'direction))
(geom1 (make-instance 'geometry))
(entity2 (make-entity em))
(dir2 (make-instance 'direction))
(flag2 (make-instance 'flag)))
(add-component em entity1 dir1)
(add-component em entity1 geom1)
(add-component em entity2 dir2)
(add-component em entity2 flag2)
(assert-equal (list dir1 dir2)
(find-components em 'direction))
(assert-equal (list geom1) (find-components em 'geometry))
(assert-equal (list flag2) (find-components em 'flag))))
(defun do-tests ()
(setq *print-failures* t)
(setq *print-errors* t)
(run-tests :all :cl-es-tests))
|
|
e6da12fa6b6b2d7d50df9146ea96bcadbda5802b9957004ed939f1ef998d7955 | pink-gorilla/webly | date.cljc | (ns webly.date
"Time calculation and formatting helpers"
(:require
#?(:clj [java-time]
:cljs [cljs-time.core :as cljs-time])
#?(:clj [java-time.format])))
#?(:cljs
(def date-time cljs-time/date-time)
:clj
(defn date-time [& args]
(java-time/with-zone-same-instant
(apply java-time/zoned-date-time args)
"UTC")))
#?(:clj (defn now []
(java-time/with-zone-same-instant
(java-time/local-date)
"UTC"))
:cljs (def now cljs-time/now))
#?(:clj (defn tostring [dt]
(let [fmt (java-time.format/formatter "YYYY-MM-dd HH:mm:ss")] ;
(java-time.format/format fmt dt)))
:cljs (defn tostring [dt]
(let [fmt (cljs-time/formatter "yyyy-MM-dd HH:mm:ss")]
(cljs-time/unparse fmt dt))))
(defn now-str []
(tostring (now))
)
(now)
(tostring
(date-time 2021 01 23))
(tostring (now))
# ? (:
;; (s/fdef time-ago
;; :args (s/cat :instant (s/alt :instant java-time/instant?
;; :str string?))
: ret string ? ) )
(def ^:const units [{:name "second" :limit 60 :in-second 1}
{:name "minute" :limit 3600 :in-second 60}
{:name "hour" :limit 86400 :in-second 3600}
{:name "day" :limit 604800 :in-second 86400}
{:name "week" :limit 2629743 :in-second 604800}
{:name "month" :limit 31556926 :in-second 2629743}
{:name "year" :limit nil :in-second 31556926}])
(defn diff-to-now [instant]
#?(:clj
(java-time/as (java-time/duration instant (java-time/instant)) :seconds)
:cljs
(cljs-time/in-seconds (cljs-time/interval instant (cljs-time/now)))))
(defn time-ago
"Returns the difference of `instant` and now formated as words"
[instant]
(let [diff (diff-to-now instant)]
(if (< diff 5)
"just now"
(let [unit (first (drop-while #(and (:limit %)
(>= diff (:limit %)))
units))]
(-> (/ diff (:in-second unit))
Math/floor
int
(#(str % " " (:name unit) (when (> % 1) "s") " ago")))))))
(comment
#?(:clj
(do
(time-ago (java-time/instant "2018-07-02T10:20:30.00Z"))
(time-ago #inst "2018-07-04T11:14:49.738-00:00")
(time-ago #inst "2017-07-04T11:14:49.738-00:00"))
:cljs
(do
(in-minutes (interval (date-time 1986 10 2) (date-time 1986 10 14)))
(time-ago (cljs-time/minus (cljs-time/now) (cljs-time/hours 5)))))) | null | https://raw.githubusercontent.com/pink-gorilla/webly/36c7d6c15c96daed4f95c55d0721e9d612e9eb77/src-unused/date.cljc | clojure |
(s/fdef time-ago
:args (s/cat :instant (s/alt :instant java-time/instant?
:str string?)) | (ns webly.date
"Time calculation and formatting helpers"
(:require
#?(:clj [java-time]
:cljs [cljs-time.core :as cljs-time])
#?(:clj [java-time.format])))
#?(:cljs
(def date-time cljs-time/date-time)
:clj
(defn date-time [& args]
(java-time/with-zone-same-instant
(apply java-time/zoned-date-time args)
"UTC")))
#?(:clj (defn now []
(java-time/with-zone-same-instant
(java-time/local-date)
"UTC"))
:cljs (def now cljs-time/now))
#?(:clj (defn tostring [dt]
(java-time.format/format fmt dt)))
:cljs (defn tostring [dt]
(let [fmt (cljs-time/formatter "yyyy-MM-dd HH:mm:ss")]
(cljs-time/unparse fmt dt))))
(defn now-str []
(tostring (now))
)
(now)
(tostring
(date-time 2021 01 23))
(tostring (now))
# ? (:
: ret string ? ) )
(def ^:const units [{:name "second" :limit 60 :in-second 1}
{:name "minute" :limit 3600 :in-second 60}
{:name "hour" :limit 86400 :in-second 3600}
{:name "day" :limit 604800 :in-second 86400}
{:name "week" :limit 2629743 :in-second 604800}
{:name "month" :limit 31556926 :in-second 2629743}
{:name "year" :limit nil :in-second 31556926}])
(defn diff-to-now [instant]
#?(:clj
(java-time/as (java-time/duration instant (java-time/instant)) :seconds)
:cljs
(cljs-time/in-seconds (cljs-time/interval instant (cljs-time/now)))))
(defn time-ago
"Returns the difference of `instant` and now formated as words"
[instant]
(let [diff (diff-to-now instant)]
(if (< diff 5)
"just now"
(let [unit (first (drop-while #(and (:limit %)
(>= diff (:limit %)))
units))]
(-> (/ diff (:in-second unit))
Math/floor
int
(#(str % " " (:name unit) (when (> % 1) "s") " ago")))))))
(comment
#?(:clj
(do
(time-ago (java-time/instant "2018-07-02T10:20:30.00Z"))
(time-ago #inst "2018-07-04T11:14:49.738-00:00")
(time-ago #inst "2017-07-04T11:14:49.738-00:00"))
:cljs
(do
(in-minutes (interval (date-time 1986 10 2) (date-time 1986 10 14)))
(time-ago (cljs-time/minus (cljs-time/now) (cljs-time/hours 5)))))) |
a352923099e422ae42f552a4506cfed9478f26c5ccd6b3cdc1d20260f86479de | larcenists/larceny | mutable-pairs.sps | (import (tests r6rs mutable-pairs)
(tests scheme test)
(scheme write))
(display "Running tests for (rnrs mutable-pairs)\n")
(run-mutable-pairs-tests)
(report-test-results)
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/test/R7RS/Lib/tests/r6rs/run/mutable-pairs.sps | scheme | (import (tests r6rs mutable-pairs)
(tests scheme test)
(scheme write))
(display "Running tests for (rnrs mutable-pairs)\n")
(run-mutable-pairs-tests)
(report-test-results)
|
|
46d440df2473ed1c34ccc5e026cc0f7ffa93429ed5a7f460ce751f7a6432c816 | csabahruska/jhc-components | Version.hs | module Version.Version(
versionContext,
versionSimple,
versionString
) where
import Data.Version
import System.Info
import qualified Data.ByteString.UTF8 as BS
import Version.Config
import RawFiles
# NOINLINE versionSimple #
versionSimple = concat [package, " ", version, " (", BS.toString shortchange_txt, ")"]
# NOINLINE versionString #
versionString = concat [versionSimple, "\n", "compiled by ",compilerName,"-",showVersion compilerVersion," on a ",arch," running ",os]
# NOINLINE versionContext #
versionContext = changelog
| null | https://raw.githubusercontent.com/csabahruska/jhc-components/a7dace481d017f5a83fbfc062bdd2d099133adf1/jhc-common/src/Version/Version.hs | haskell | module Version.Version(
versionContext,
versionSimple,
versionString
) where
import Data.Version
import System.Info
import qualified Data.ByteString.UTF8 as BS
import Version.Config
import RawFiles
# NOINLINE versionSimple #
versionSimple = concat [package, " ", version, " (", BS.toString shortchange_txt, ")"]
# NOINLINE versionString #
versionString = concat [versionSimple, "\n", "compiled by ",compilerName,"-",showVersion compilerVersion," on a ",arch," running ",os]
# NOINLINE versionContext #
versionContext = changelog
|
|
cf1112d24197997c13a818b37ddf159a201635d1ac4cd0e702b5644b39255a16 | own-pt/cl-krr | banana-slug.lisp | (compile-suo-kif :in-files '("sample/core-sumo.kif" "sample/banana-slug.kif") :tptp t)
| null | https://raw.githubusercontent.com/own-pt/cl-krr/d21ce3c385ecb0b5b51bd2b6491a082532f9867c/development/experiments/banana-slug/banana-slug.lisp | lisp | (compile-suo-kif :in-files '("sample/core-sumo.kif" "sample/banana-slug.kif") :tptp t)
|
|
612e152d81cac6bf77555d7a6b66b62dbbf4d6ae47af8221794940bcb02fdbc3 | jesperes/aoc_erlang | aoc2016_day18.erl | -module(aoc2016_day18).
-include("aoc_puzzle.hrl").
-behavior(aoc_puzzle).
-export([parse/1, solve1/1, solve2/1, info/0]).
-define(TRAP, $^).
-define(SAFE, $.).
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2016,
day = 18,
name = "Like A Rogue",
expected = {2035, 20000577},
has_input_file = false}.
-type input_type() :: string().
-type result1_type() :: integer().
-type result2_type() :: result1_type().
-spec parse(Input :: binary()) -> input_type().
parse(_Input) ->
".^..^....^....^^.^^.^.^^.^.....^.^..^...^^^^^^.^^^^.^.^^^^^^^.^^^^^."
".^.^^^.^^..^.^^.^....^.^...^^.^.".
-spec solve1(Input :: input_type()) -> result1_type().
solve1(Input) ->
count_tiles(Input, 40).
-spec solve2(Input :: input_type()) -> result2_type().
solve2(Input) ->
count_tiles(Input, 400 * 1000).
%%% Implementation
next_row([Center, Right | Row]) ->
next_row(?SAFE, Center, Right, Row).
next_row(Left, Center, Right, []) ->
[is_trap(Left, Center, Right), is_trap(Center, Right, ?SAFE)];
next_row(Left, Center, Right, [NextRight | Rest]) ->
Trap = is_trap(Left, Center, Right),
[Trap | next_row(Center, Right, NextRight, Rest)].
is_trap(?TRAP, ?TRAP, ?SAFE) ->
?TRAP;
is_trap(?SAFE, ?TRAP, ?TRAP) ->
?TRAP;
is_trap(?TRAP, ?SAFE, ?SAFE) ->
?TRAP;
is_trap(?SAFE, ?SAFE, ?TRAP) ->
?TRAP;
is_trap(_, _, _) ->
?SAFE.
safe_tiles(Row) ->
length(lists:filter(fun(C) -> C == ?SAFE end, Row)).
count_tiles(Start, Rows) ->
{Tiles, _} =
lists:foldl(fun(_, {N, AccIn}) ->
AccOut = next_row(AccIn),
{N + safe_tiles(AccOut), AccOut}
end,
{safe_tiles(Start), Start},
lists:seq(1, Rows - 1)),
Tiles.
| null | https://raw.githubusercontent.com/jesperes/aoc_erlang/ec0786088fb9ab886ee57e17ea0149ba3e91810a/src/2016/aoc2016_day18.erl | erlang | Implementation | -module(aoc2016_day18).
-include("aoc_puzzle.hrl").
-behavior(aoc_puzzle).
-export([parse/1, solve1/1, solve2/1, info/0]).
-define(TRAP, $^).
-define(SAFE, $.).
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2016,
day = 18,
name = "Like A Rogue",
expected = {2035, 20000577},
has_input_file = false}.
-type input_type() :: string().
-type result1_type() :: integer().
-type result2_type() :: result1_type().
-spec parse(Input :: binary()) -> input_type().
parse(_Input) ->
".^..^....^....^^.^^.^.^^.^.....^.^..^...^^^^^^.^^^^.^.^^^^^^^.^^^^^."
".^.^^^.^^..^.^^.^....^.^...^^.^.".
-spec solve1(Input :: input_type()) -> result1_type().
solve1(Input) ->
count_tiles(Input, 40).
-spec solve2(Input :: input_type()) -> result2_type().
solve2(Input) ->
count_tiles(Input, 400 * 1000).
next_row([Center, Right | Row]) ->
next_row(?SAFE, Center, Right, Row).
next_row(Left, Center, Right, []) ->
[is_trap(Left, Center, Right), is_trap(Center, Right, ?SAFE)];
next_row(Left, Center, Right, [NextRight | Rest]) ->
Trap = is_trap(Left, Center, Right),
[Trap | next_row(Center, Right, NextRight, Rest)].
is_trap(?TRAP, ?TRAP, ?SAFE) ->
?TRAP;
is_trap(?SAFE, ?TRAP, ?TRAP) ->
?TRAP;
is_trap(?TRAP, ?SAFE, ?SAFE) ->
?TRAP;
is_trap(?SAFE, ?SAFE, ?TRAP) ->
?TRAP;
is_trap(_, _, _) ->
?SAFE.
safe_tiles(Row) ->
length(lists:filter(fun(C) -> C == ?SAFE end, Row)).
count_tiles(Start, Rows) ->
{Tiles, _} =
lists:foldl(fun(_, {N, AccIn}) ->
AccOut = next_row(AccIn),
{N + safe_tiles(AccOut), AccOut}
end,
{safe_tiles(Start), Start},
lists:seq(1, Rows - 1)),
Tiles.
|
9b4e654bbfa5d8f6468a23afc55f0199b8da755afa0906d17826f280bec72347 | reactive-systems/syfco | Reader.hs | -----------------------------------------------------------------------------
-- |
-- Module : Reader
License : MIT ( see the LICENSE file )
Maintainer : ( )
--
-- The module reads a specification to the internal format.
--
-----------------------------------------------------------------------------
{-# LANGUAGE
RecordWildCards
#-}
-----------------------------------------------------------------------------
module Reader
( fromTLSF
) where
-----------------------------------------------------------------------------
import Data.Error
( Error
)
import Data.SymbolTable
( SymbolTable
, IdRec(..)
)
import Data.Specification
( Specification(..)
)
import Reader.Sugar
( replaceSugar
)
import Reader.Parser
( parse
)
import Reader.Bindings
( specBindings
)
import Reader.InferType
( inferTypes
)
import Reader.Abstraction
( abstract
)
import Data.Maybe
( fromJust
)
import Data.List
( zip7
)
import qualified Data.IntMap as IM
( null
, toAscList
, minViewWithKey
, maxViewWithKey
)
import qualified Data.Array.IArray as A
( array
)
import qualified Reader.Data as RD
( Specification(..)
)
-----------------------------------------------------------------------------
| Parses a specification in TLSF .
fromTLSF
:: String -> Either Error Specification
fromTLSF str =
-- parse the input
parse str >>=
-- replace variable names by a unique identifier
abstract >>=
-- replace syntactic sugar constructs for later converison
replaceSugar >>=
-- retrieve the bindings of expression variables
specBindings >>=
infer types and
inferTypes >>=
-- lift reader specification to global specification
\( {..}) -> return
Specification
{ source = str
, title = fst title
, titlePos = snd title
, description = fst description
, descriptionPos = snd description
, semantics = fst semantics
, semanticsPos = snd semantics
, target = fst target
, targetPos = snd target
, tags = map fst $ tags
, tagsPos = map snd $ tags
, enumerations = enumerations
, parameters = parameters
, definitions = definitions
, inputs = inputs
, outputs = outputs
, initially = initially
, preset = preset
, requirements = requirements
, assumptions = assumptions
, invariants = invariants
, guarantees = guarantees
, symboltable = symtable s
}
-----------------------------------------------------------------------------
symtable
:: RD.Specification -> SymbolTable
symtable (RD.Specification {..}) =
let
minkey = key IM.minViewWithKey
maxkey = key IM.maxViewWithKey
is = map fst $ IM.toAscList names
ns = map snd $ IM.toAscList names
ps = map snd $ IM.toAscList positions
as = map snd $ IM.toAscList arguments
bs = map snd $ IM.toAscList bindings
ts = map snd $ IM.toAscList types
ds = map snd $ IM.toAscList dependencies
ys = zip7 is ns ps as bs ts ds
xs = map (\(a,b,c,d,e,f,g) -> (a,IdRec b c d e f g)) ys
in
A.array (minkey, maxkey) xs
where
key f
| IM.null names = 0
| otherwise =
fst $ fst $ fromJust $ f names
-----------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/reactive-systems/syfco/603bbdba1c4902674165714516eea7b6dacbd155/src/lib/Reader.hs | haskell | ---------------------------------------------------------------------------
|
Module : Reader
The module reads a specification to the internal format.
---------------------------------------------------------------------------
# LANGUAGE
RecordWildCards
#
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
parse the input
replace variable names by a unique identifier
replace syntactic sugar constructs for later converison
retrieve the bindings of expression variables
lift reader specification to global specification
---------------------------------------------------------------------------
--------------------------------------------------------------------------- | License : MIT ( see the LICENSE file )
Maintainer : ( )
module Reader
( fromTLSF
) where
import Data.Error
( Error
)
import Data.SymbolTable
( SymbolTable
, IdRec(..)
)
import Data.Specification
( Specification(..)
)
import Reader.Sugar
( replaceSugar
)
import Reader.Parser
( parse
)
import Reader.Bindings
( specBindings
)
import Reader.InferType
( inferTypes
)
import Reader.Abstraction
( abstract
)
import Data.Maybe
( fromJust
)
import Data.List
( zip7
)
import qualified Data.IntMap as IM
( null
, toAscList
, minViewWithKey
, maxViewWithKey
)
import qualified Data.Array.IArray as A
( array
)
import qualified Reader.Data as RD
( Specification(..)
)
| Parses a specification in TLSF .
fromTLSF
:: String -> Either Error Specification
fromTLSF str =
parse str >>=
abstract >>=
replaceSugar >>=
specBindings >>=
infer types and
inferTypes >>=
\( {..}) -> return
Specification
{ source = str
, title = fst title
, titlePos = snd title
, description = fst description
, descriptionPos = snd description
, semantics = fst semantics
, semanticsPos = snd semantics
, target = fst target
, targetPos = snd target
, tags = map fst $ tags
, tagsPos = map snd $ tags
, enumerations = enumerations
, parameters = parameters
, definitions = definitions
, inputs = inputs
, outputs = outputs
, initially = initially
, preset = preset
, requirements = requirements
, assumptions = assumptions
, invariants = invariants
, guarantees = guarantees
, symboltable = symtable s
}
symtable
:: RD.Specification -> SymbolTable
symtable (RD.Specification {..}) =
let
minkey = key IM.minViewWithKey
maxkey = key IM.maxViewWithKey
is = map fst $ IM.toAscList names
ns = map snd $ IM.toAscList names
ps = map snd $ IM.toAscList positions
as = map snd $ IM.toAscList arguments
bs = map snd $ IM.toAscList bindings
ts = map snd $ IM.toAscList types
ds = map snd $ IM.toAscList dependencies
ys = zip7 is ns ps as bs ts ds
xs = map (\(a,b,c,d,e,f,g) -> (a,IdRec b c d e f g)) ys
in
A.array (minkey, maxkey) xs
where
key f
| IM.null names = 0
| otherwise =
fst $ fst $ fromJust $ f names
|
a0bf2a861f6ff7f235b9630b16d3be43015cd56beb44e927a3b1c4a4cb078072 | amar47shah/cis-194 | Testing.hs | # OPTIONS_GHC -Wall #
# LANGUAGE ExistentialQuantification #
module Testing where
import Data.Maybe
import Control.Arrow
data Test = forall a. Show a => Test String (a -> Bool) [a]
data Failure = forall a. Show a => Fail String [a]
instance Show Failure where
show (Fail s as) = "Failed Test \"" ++ s
++ "\" on inputs " ++ show as
runTest :: Test -> Maybe Failure
runTest (Test s f as) = case filter (not . f) as of
[] -> Nothing
fs -> Just $ Fail s fs
runTests :: [Test] -> [Failure]
runTests = catMaybes . map runTest
-- Helpers
testF1 :: (Show a, Show b, Eq b) => String -> (a -> b) -> [(a, b)] -> Test
testF1 s f l = Test s (uncurry (==)) $ map (first f) l
testF2 :: (Show a, Show b, Show c, Eq c) => String -> (a -> b -> c)
-> [(a, b, c)] -> Test
testF2 s f l = Test s (uncurry (==)) $ map (\(x, y, z) -> (f x y, z)) l
testF3 :: (Show a, Show b, Show c, Show d, Eq d) => String -> (a -> b -> c -> d)
-> [(a, b, c, d)] -> Test
testF3 s f l = Test s (uncurry (==)) $ map (\(w, x, y, z) -> (f w x y, z)) l
testF4 :: (Show a, Show b, Show c, Show d, Show e, Eq e) =>
String -> (a -> b -> c -> d -> e) -> [(a, b, c, d, e)] -> Test
testF4 s f l = Test s (uncurry (==)) $
map (\(v, w, x, y, z) -> (f v w x y, z)) l
| null | https://raw.githubusercontent.com/amar47shah/cis-194/2b35ce18df176b47502950030af26e14d866b3e4/2015-noamz/solutions/Testing.hs | haskell | Helpers | # OPTIONS_GHC -Wall #
# LANGUAGE ExistentialQuantification #
module Testing where
import Data.Maybe
import Control.Arrow
data Test = forall a. Show a => Test String (a -> Bool) [a]
data Failure = forall a. Show a => Fail String [a]
instance Show Failure where
show (Fail s as) = "Failed Test \"" ++ s
++ "\" on inputs " ++ show as
runTest :: Test -> Maybe Failure
runTest (Test s f as) = case filter (not . f) as of
[] -> Nothing
fs -> Just $ Fail s fs
runTests :: [Test] -> [Failure]
runTests = catMaybes . map runTest
testF1 :: (Show a, Show b, Eq b) => String -> (a -> b) -> [(a, b)] -> Test
testF1 s f l = Test s (uncurry (==)) $ map (first f) l
testF2 :: (Show a, Show b, Show c, Eq c) => String -> (a -> b -> c)
-> [(a, b, c)] -> Test
testF2 s f l = Test s (uncurry (==)) $ map (\(x, y, z) -> (f x y, z)) l
testF3 :: (Show a, Show b, Show c, Show d, Eq d) => String -> (a -> b -> c -> d)
-> [(a, b, c, d)] -> Test
testF3 s f l = Test s (uncurry (==)) $ map (\(w, x, y, z) -> (f w x y, z)) l
testF4 :: (Show a, Show b, Show c, Show d, Show e, Eq e) =>
String -> (a -> b -> c -> d -> e) -> [(a, b, c, d, e)] -> Test
testF4 s f l = Test s (uncurry (==)) $
map (\(v, w, x, y, z) -> (f v w x y, z)) l
|
b231d4d3ac75d3e47998f16264da5f4f3718e176f15fd995346a9a36d2a1e0bd | leviroth/ocaml-reddit-api | test_set_subreddit_sticky.ml | open! Core
open! Async
open! Import
let%expect_test "set_subreddit_sticky" =
with_cassette "set_subreddit_sticky" ~f:(fun connection ->
let link = Thing.Link.Id.of_string "f7vspj" in
let%bind () =
Connection.call_exn
connection
(Endpoint.set_subreddit_sticky
()
~link
~sticky_state:(Sticky { slot = Some 2 }))
in
[%expect];
let%bind () =
Connection.call_exn
connection
(Endpoint.set_subreddit_sticky () ~link ~sticky_state:Unsticky)
in
[%expect];
return ())
;;
| null | https://raw.githubusercontent.com/leviroth/ocaml-reddit-api/03cf04fddc0882adb99ce51ed832dc66251b9ae5/test/test_set_subreddit_sticky.ml | ocaml | open! Core
open! Async
open! Import
let%expect_test "set_subreddit_sticky" =
with_cassette "set_subreddit_sticky" ~f:(fun connection ->
let link = Thing.Link.Id.of_string "f7vspj" in
let%bind () =
Connection.call_exn
connection
(Endpoint.set_subreddit_sticky
()
~link
~sticky_state:(Sticky { slot = Some 2 }))
in
[%expect];
let%bind () =
Connection.call_exn
connection
(Endpoint.set_subreddit_sticky () ~link ~sticky_state:Unsticky)
in
[%expect];
return ())
;;
|
|
8eeeb7722f36cea2771022738c81fbb812ea129430f04d560da65e9e92c9a59a | ygrek/mldonkey | mp3tag.mli | (**************************************************************************)
Copyright 2003 , 2002 b8_bavard , , , b52_simon INRIA
(* *)
(* This file is part of mldonkey. *)
(* *)
is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation ; either version 2 of the License ,
(* or (at your option) any later version. *)
(* *)
is distributed in the hope that it will be useful ,
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston ,
MA 02111 - 1307 USA
(* *)
(**************************************************************************)
(** Interface for editing mp3 tags and getting information. *)
(** Reading and writing id3 v1.1 tags. *)
module Id3v1 : sig
(** An id3 v1.1 tag. *)
type tag = {
mutable title: string;
mutable artist: string;
mutable album: string;
mutable year:string;
mutable comment: string;
mutable tracknum: int;
mutable genre: int
}
(** Check if the given file has a id3 v1.1 tag.
@raise Sys_error if an error occurs while opening the file.
*)
val has_tag : string -> bool
(** Read the tags in a mp3 file.
@raise Not_found if the file doesn't contain tags.
@raise Sys_error if an error occurs while opening the file.
*)
val read : string -> tag
(** Write the given tag structure into the given file.
@raise Sys_error if an error occurs with the file.
*)
val write : tag -> string -> unit
* Merge two tags .
[ merge t1 t2 ] return a new tag where field values in [ t1 ]
have been replaced by the ones in [ t2 ] when they are defined
in [ t2 ] .
[merge t1 t2] return a new tag where field values in [t1]
have been replaced by the ones in [t2] when they are defined
in [t2].*)
val merge : tag -> tag -> tag
(** The empty tag. *)
val no_tag : tag
end
(** Reading and writing id3 v2.3 tags. *)
module Id3v2 : sig
type tag = (string * string) list
(** Read the tags in a mp3 file.
@return the list of information or [[]] if no information was found.
@raise Sys_error if an error occurs while opening the file.
*)
val read : string -> tag
(** Write the given tag structure into the given file.
@raise Sys_error if an error occurs with the file.
@param src the optional source filename, if it is
different from the filename.
*)
val write : tag -> ?src:string -> string -> unit
* Merge two tags .
[ merge t1 t2 ] return a new tag where values in [ t1 ]
have been replaced by the ones in [ t2 ] when they are defined
in [ t2 ] .
[merge t1 t2] return a new tag where values in [t1]
have been replaced by the ones in [t2] when they are defined
in [t2].*)
val merge : tag -> tag -> tag
(** The empty tag. *)
val no_tag : tag
end
(** {2 Reading and writing both versions of tags} *)
(** Get a v1 tag from the v1 and v2 tags of the given file.
The returned tag is the result of [merge v2 v1].*)
val read_both_as_v1 : string -> Id3v1.tag
(** Get a v2 tag from the v1 and v2 tags of the given file.
The returned tag is the result of [merge v2 v1].*)
val read_both_as_v2 : string -> Id3v2.tag
(** Write the given v1 tag to the v1 and v2 tags of the given file.
@param src the optional source filename, if it is
different from the filename.*)
val write_both_v1 : Id3v1.tag -> ?src: string -> string -> unit
(** Write the given v2 tag to the v1 and v2 tags of the given file.
@param src the optional source filename, if it is
different from the filename.*)
val write_both_v2 : Id3v2.tag -> ?src: string -> string -> unit
* { 2 Tag conversions }
val v2_of_v1 : Id3v1.tag -> Id3v2.tag
val v1_of_v2 : Id3v2.tag -> Id3v1.tag
* { 2 Genres }
(** Get the genre string from the given genre id. *)
val string_of_genre : int -> string
(** Get the genre id from the given genre string. *)
val genre_of_string : string -> int
(** The list of genre names. *)
val genres : string list
* { 2 Getting information on a MP3 file }
type channel_mode =
Stereo
| Joint_stereo
| Dual_channel_stereo
| Mono
type mp3_encoding =
CBR (** Constant Bit Rate *)
| VBR (** Variable Bit Rate *)
type info =
* in seconds
samplerate: int; (** in kilobits per second *)
mode: channel_mode; (** stereo, mono, etc *)
bitrate: int; (** in kilobits per second *)
encoding: mp3_encoding; (** variable or constant bit rate *)
filesize: int (** in bytes *)
}
(** Return information on the given MP3 file. *)
val info : string -> info
| null | https://raw.githubusercontent.com/ygrek/mldonkey/333868a12bb6cd25fed49391dd2c3a767741cb51/src/utils/mp3tagui/mp3tag.mli | ocaml | ************************************************************************
This file is part of mldonkey.
or (at your option) any later version.
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
************************************************************************
* Interface for editing mp3 tags and getting information.
* Reading and writing id3 v1.1 tags.
* An id3 v1.1 tag.
* Check if the given file has a id3 v1.1 tag.
@raise Sys_error if an error occurs while opening the file.
* Read the tags in a mp3 file.
@raise Not_found if the file doesn't contain tags.
@raise Sys_error if an error occurs while opening the file.
* Write the given tag structure into the given file.
@raise Sys_error if an error occurs with the file.
* The empty tag.
* Reading and writing id3 v2.3 tags.
* Read the tags in a mp3 file.
@return the list of information or [[]] if no information was found.
@raise Sys_error if an error occurs while opening the file.
* Write the given tag structure into the given file.
@raise Sys_error if an error occurs with the file.
@param src the optional source filename, if it is
different from the filename.
* The empty tag.
* {2 Reading and writing both versions of tags}
* Get a v1 tag from the v1 and v2 tags of the given file.
The returned tag is the result of [merge v2 v1].
* Get a v2 tag from the v1 and v2 tags of the given file.
The returned tag is the result of [merge v2 v1].
* Write the given v1 tag to the v1 and v2 tags of the given file.
@param src the optional source filename, if it is
different from the filename.
* Write the given v2 tag to the v1 and v2 tags of the given file.
@param src the optional source filename, if it is
different from the filename.
* Get the genre string from the given genre id.
* Get the genre id from the given genre string.
* The list of genre names.
* Constant Bit Rate
* Variable Bit Rate
* in kilobits per second
* stereo, mono, etc
* in kilobits per second
* variable or constant bit rate
* in bytes
* Return information on the given MP3 file. | Copyright 2003 , 2002 b8_bavard , , , b52_simon INRIA
is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation ; either version 2 of the License ,
is distributed in the hope that it will be useful ,
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston ,
MA 02111 - 1307 USA
module Id3v1 : sig
type tag = {
mutable title: string;
mutable artist: string;
mutable album: string;
mutable year:string;
mutable comment: string;
mutable tracknum: int;
mutable genre: int
}
val has_tag : string -> bool
val read : string -> tag
val write : tag -> string -> unit
* Merge two tags .
[ merge t1 t2 ] return a new tag where field values in [ t1 ]
have been replaced by the ones in [ t2 ] when they are defined
in [ t2 ] .
[merge t1 t2] return a new tag where field values in [t1]
have been replaced by the ones in [t2] when they are defined
in [t2].*)
val merge : tag -> tag -> tag
val no_tag : tag
end
module Id3v2 : sig
type tag = (string * string) list
val read : string -> tag
val write : tag -> ?src:string -> string -> unit
* Merge two tags .
[ merge t1 t2 ] return a new tag where values in [ t1 ]
have been replaced by the ones in [ t2 ] when they are defined
in [ t2 ] .
[merge t1 t2] return a new tag where values in [t1]
have been replaced by the ones in [t2] when they are defined
in [t2].*)
val merge : tag -> tag -> tag
val no_tag : tag
end
val read_both_as_v1 : string -> Id3v1.tag
val read_both_as_v2 : string -> Id3v2.tag
val write_both_v1 : Id3v1.tag -> ?src: string -> string -> unit
val write_both_v2 : Id3v2.tag -> ?src: string -> string -> unit
* { 2 Tag conversions }
val v2_of_v1 : Id3v1.tag -> Id3v2.tag
val v1_of_v2 : Id3v2.tag -> Id3v1.tag
* { 2 Genres }
val string_of_genre : int -> string
val genre_of_string : string -> int
val genres : string list
* { 2 Getting information on a MP3 file }
type channel_mode =
Stereo
| Joint_stereo
| Dual_channel_stereo
| Mono
type mp3_encoding =
type info =
* in seconds
}
val info : string -> info
|
70b2580ad68dda63fce8ff918819e6556d67e8f78ec11cd3994eaf4ec029bac5 | helvm/helma | LSU.hs | module HelVM.HelMA.Automaton.Units.LSU where
import HelVM.HelMA.Automaton.Units.ALU
import qualified HelVM.HelMA.Automaton.Units.RAM as RAM
import HelVM.HelMA.Automaton.IO.BusinessIO
import HelVM.HelMA.Automaton.Instruction.IOInstruction
import HelVM.HelMA.Automaton.Instruction.LSInstruction
import HelVM.HelIO.Control.Safe
import Control.Type.Operator
runSLI :: (LSU m s r element) => LSInstruction -> LoadStoreUnit s r -> m $ LoadStoreUnit s r
runSLI Load = load
runSLI Store = store
runSLI (MIO OutputChar) = loadOutputChar
runSLI (MIO OutputDec) = loadOutputDec
runSLI (MIO InputChar) = storeInputChar
runSLI (MIO InputDec) = storeInputDec
load :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
load (LSU s r) = appendError "LSU.load" $ build <$> pop1 s where
build (address , s') = LSU (push1 (RAM.genericLoad r address) s') r
store :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
store (LSU s r) = appendError "LSU.store" $ build <$> pop2 s where
build (value , address , s') = LSU s' $ RAM.store address value r
loadOutputChar :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
loadOutputChar (LSU s r) = appendError "LSU.loadOutputChar" $ build =<< pop1 s where
build (address , s') = LSU s' r <$ wPutAsChar (RAM.genericLoad r address)
loadOutputDec :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
loadOutputDec (LSU s r) = appendError "LSU.loadOutputDec" $ build =<< pop1 s where
build (address , s') = LSU s' r <$ wPutAsDec (RAM.genericLoad r address)
storeInputChar :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
storeInputChar (LSU s r) = appendError "LSU.storeInputChar" $ build =<< pop1 s where
build (address , s') = LSU s' . flip (RAM.store address) r <$> wGetCharAs
storeInputDec :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
storeInputDec (LSU s r) = appendError "LSU.storeInputDec" $ build =<< pop1 s where
build (address , s') = LSU s' . flip (RAM.store address) r <$> wGetDecAs
-- | Types
type LSU m s r element = (ALU m s element , RAM.RAM r element)
data LoadStoreUnit s r = LSU
{ stack :: s
, ram :: r
}
| null | https://raw.githubusercontent.com/helvm/helma/2ae31668ce11ded53daf01effe25047d5ce471cf/hs/src/HelVM/HelMA/Automaton/Units/LSU.hs | haskell | | Types | module HelVM.HelMA.Automaton.Units.LSU where
import HelVM.HelMA.Automaton.Units.ALU
import qualified HelVM.HelMA.Automaton.Units.RAM as RAM
import HelVM.HelMA.Automaton.IO.BusinessIO
import HelVM.HelMA.Automaton.Instruction.IOInstruction
import HelVM.HelMA.Automaton.Instruction.LSInstruction
import HelVM.HelIO.Control.Safe
import Control.Type.Operator
runSLI :: (LSU m s r element) => LSInstruction -> LoadStoreUnit s r -> m $ LoadStoreUnit s r
runSLI Load = load
runSLI Store = store
runSLI (MIO OutputChar) = loadOutputChar
runSLI (MIO OutputDec) = loadOutputDec
runSLI (MIO InputChar) = storeInputChar
runSLI (MIO InputDec) = storeInputDec
load :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
load (LSU s r) = appendError "LSU.load" $ build <$> pop1 s where
build (address , s') = LSU (push1 (RAM.genericLoad r address) s') r
store :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
store (LSU s r) = appendError "LSU.store" $ build <$> pop2 s where
build (value , address , s') = LSU s' $ RAM.store address value r
loadOutputChar :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
loadOutputChar (LSU s r) = appendError "LSU.loadOutputChar" $ build =<< pop1 s where
build (address , s') = LSU s' r <$ wPutAsChar (RAM.genericLoad r address)
loadOutputDec :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
loadOutputDec (LSU s r) = appendError "LSU.loadOutputDec" $ build =<< pop1 s where
build (address , s') = LSU s' r <$ wPutAsDec (RAM.genericLoad r address)
storeInputChar :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
storeInputChar (LSU s r) = appendError "LSU.storeInputChar" $ build =<< pop1 s where
build (address , s') = LSU s' . flip (RAM.store address) r <$> wGetCharAs
storeInputDec :: LSU m s r element => LoadStoreUnit s r -> m $ LoadStoreUnit s r
storeInputDec (LSU s r) = appendError "LSU.storeInputDec" $ build =<< pop1 s where
build (address , s') = LSU s' . flip (RAM.store address) r <$> wGetDecAs
type LSU m s r element = (ALU m s element , RAM.RAM r element)
data LoadStoreUnit s r = LSU
{ stack :: s
, ram :: r
}
|
5c06a33021b79715a3f7544768e72cb372b4b9287d2c0223e3be0c09a13ecd97 | dancrumb/clojure-brave-and-true | chapter3.clj | (ns clojure-brave-and-true.chapter3
(:gen-class))
(use 'clojure.pprint)
(def asym-hobbit-body-parts [{:name "head" :size 3}
{:name "left-eye" :size 1}
{:name "left-ear" :size 1}
{:name "mouth" :size 1}
{:name "nose" :size 1}
{:name "neck" :size 2}
{:name "left-shoulder" :size 3}
{:name "left-upper-arm" :size 3}
{:name "chest" :size 10}
{:name "back" :size 10}
{:name "left-forearm" :size 3}
{:name "abdomen" :size 6}
{:name "left-kidney" :size 1}
{:name "left-hand" :size 2}
{:name "left-knee" :size 2}
{:name "left-thigh" :size 4}
{:name "left-lower-leg" :size 3}
{:name "left-achilles" :size 1}
{:name "left-foot" :size 2}])
(defn matching-parts
[part multiplier]
(if (re-find #"^left-" (:name part))
(multiplier
{
:name (clojure.string/replace (:name part) #"^left-" "")
:size (:size part)})
part))
(defn to-seq
[thing]
(if (seq? thing)
thing
[thing]))
(defn multiply-by
[multiple]
#(repeat multiple %))
(defn symmetrize-body-parts
"Expects a seq of maps that have a :name and :size"
[asym-body-parts]
(reduce (fn
[parts part]
(reduce (fn [parts part] (cons part parts)) parts (to-seq (matching-parts part (multiply-by 5)))))
[] asym-body-parts))
(defn -main
[& args]
(clojure.pprint/pprint (symmetrize-body-parts asym-hobbit-body-parts)))
| null | https://raw.githubusercontent.com/dancrumb/clojure-brave-and-true/6ae47dcfc3274b6751156f54c486b0732d0e5edc/src/clojure_brave_and_true/chapter3.clj | clojure | (ns clojure-brave-and-true.chapter3
(:gen-class))
(use 'clojure.pprint)
(def asym-hobbit-body-parts [{:name "head" :size 3}
{:name "left-eye" :size 1}
{:name "left-ear" :size 1}
{:name "mouth" :size 1}
{:name "nose" :size 1}
{:name "neck" :size 2}
{:name "left-shoulder" :size 3}
{:name "left-upper-arm" :size 3}
{:name "chest" :size 10}
{:name "back" :size 10}
{:name "left-forearm" :size 3}
{:name "abdomen" :size 6}
{:name "left-kidney" :size 1}
{:name "left-hand" :size 2}
{:name "left-knee" :size 2}
{:name "left-thigh" :size 4}
{:name "left-lower-leg" :size 3}
{:name "left-achilles" :size 1}
{:name "left-foot" :size 2}])
(defn matching-parts
[part multiplier]
(if (re-find #"^left-" (:name part))
(multiplier
{
:name (clojure.string/replace (:name part) #"^left-" "")
:size (:size part)})
part))
(defn to-seq
[thing]
(if (seq? thing)
thing
[thing]))
(defn multiply-by
[multiple]
#(repeat multiple %))
(defn symmetrize-body-parts
"Expects a seq of maps that have a :name and :size"
[asym-body-parts]
(reduce (fn
[parts part]
(reduce (fn [parts part] (cons part parts)) parts (to-seq (matching-parts part (multiply-by 5)))))
[] asym-body-parts))
(defn -main
[& args]
(clojure.pprint/pprint (symmetrize-body-parts asym-hobbit-body-parts)))
|
|
4395eb0ca95ac478f376c1bab4fcd0bec69b39b000780490507fb2ed5ce576fc | cloverage/cloverage | exercise_instrumentation.clj | (ns cloverage.sample.exercise-instrumentation
(:refer-clojure :exclude [loop])
(:import java.lang.RuntimeException)
(:require [clojure.test :refer :all]))
'()
(+ 40)
(+ 40 2)
(str 1 2 3)
(+ (* 2 3)
(/ 12 3))
(let [a (+ 40 2)
b (+ 3 4)]
(* a b))
{:a (+ 40 2)
(/ 4 2) "two"}
(defn function-with-empty-list []
used to break stuff - see issues # 14 and # 17
'())
(defn not-covered-at-all
"This function is not covered at all"
[arg1 arg2]
(+ 2 3)
(- 2 3))
(defn partially-covered
[cnd]
(if cnd (+ 1 2 3) (- 2 3 4)))
(deftest test-partially-covered
(is (= 6 (partially-covered true))))
(defn fully-covered [cnd]
(if cnd (+ 1 2 3) (- 4 5 6)))
(deftest test-fully-covered
(is (= 6 (fully-covered true)))
(is (= -7 (fully-covered false))))
(defmulti mixed-coverage-multi type)
(defmethod mixed-coverage-multi String
;; fully covered
[x]
no - op
x))
(defmethod mixed-coverage-multi Long
;; partially covered
[x]
(if (= x 1)
(+ x 2)
(- x 2)))
(defmethod mixed-coverage-multi Character
;; not covered
[x]
(str x))
(deftest test-mixed-multi
(is "String" (mixed-coverage-multi "String"))
(is 3 (mixed-coverage-multi 1)))
(defmulti fully-covered-multi type)
(defmethod fully-covered-multi String [x] x)
(defmethod fully-covered-multi :default [x] x)
(deftest test-fully-covered-multi
(is "String" (fully-covered-multi "String"))
(is 1 (fully-covered-multi 1)))
(defn palindrome?
"Tests whether s is a palindrom."
;; fully covered
[s]
(if-not (vector? s)
(palindrome? (vec s))
(if (<= (count s) 1)
true
(and (= (s 0) (s (dec (count s))))
(palindrome? (subvec s 1 (dec (count s))))))))
(deftest test-palindrome
Palindrome is fully covered
(is (palindrome? "noon"))
(is (palindrome? "racecar"))
(is (not (palindrome? "hello"))))
(defn permutation?
"Tests whether a and b are permutations of each other"
[a b]
(and (= (count a)
(count b))
(let [add-occurrence (fn [m c] (assoc m c (inc (m c 0))))
a-counts (reduce add-occurrence {} a)
b-counts (reduce add-occurrence {} b)]
(= a-counts b-counts))))
(deftest test-permutation
;; permutation is partially covered
(is (not (permutation? "foo" "foobar"))))
(defn fully-covered-cond
[n]
(cond
(zero? n) :zero
:else :nonzero))
(deftest test-fully-covered-cond
(is (= :zero (fully-covered-cond 0)))
(is (= :nonzero (fully-covered-cond 1))))
(defn transaction-fn
[n]
(dosync
(cond
(zero? n) :zero
:else (throw (RuntimeException. "FAIL TRANSACTION")))))
(deftest failing-transaction
(is (thrown? Exception (transaction-fn 1))))
(letfn [(covered [] (+ 2 3))
(not-covered [] {:and :not-tracked})
(not-covered [] ({:preimage :image} :preimage))]
(covered))
(defn loop "Not really loop."
[n] (+ n n))
(defn global-loop-shouldnt-crash []
(loop 3))
;; top-level propagate-line-numbers broke preconditions
(defn make-matrix
([data ncol]
{:pre [true]}
:ok))
(defn locals-dont-crash []
(let [letfn #(+ % 42)]
(letfn 2)))
(defn inline-use []
(bytes (byte-array (map byte [1 2]))))
(deftest CLJ-1330-workaround []
(is (not= (type (inline-use))
clojure.lang.Cons)))
| null | https://raw.githubusercontent.com/cloverage/cloverage/3b9718e9c80f84ba5882139376a07f8a67f14062/cloverage/dev-resources/cloverage/sample/exercise_instrumentation.clj | clojure | fully covered
partially covered
not covered
fully covered
permutation is partially covered
top-level propagate-line-numbers broke preconditions | (ns cloverage.sample.exercise-instrumentation
(:refer-clojure :exclude [loop])
(:import java.lang.RuntimeException)
(:require [clojure.test :refer :all]))
'()
(+ 40)
(+ 40 2)
(str 1 2 3)
(+ (* 2 3)
(/ 12 3))
(let [a (+ 40 2)
b (+ 3 4)]
(* a b))
{:a (+ 40 2)
(/ 4 2) "two"}
(defn function-with-empty-list []
used to break stuff - see issues # 14 and # 17
'())
(defn not-covered-at-all
"This function is not covered at all"
[arg1 arg2]
(+ 2 3)
(- 2 3))
(defn partially-covered
[cnd]
(if cnd (+ 1 2 3) (- 2 3 4)))
(deftest test-partially-covered
(is (= 6 (partially-covered true))))
(defn fully-covered [cnd]
(if cnd (+ 1 2 3) (- 4 5 6)))
(deftest test-fully-covered
(is (= 6 (fully-covered true)))
(is (= -7 (fully-covered false))))
(defmulti mixed-coverage-multi type)
(defmethod mixed-coverage-multi String
[x]
no - op
x))
(defmethod mixed-coverage-multi Long
[x]
(if (= x 1)
(+ x 2)
(- x 2)))
(defmethod mixed-coverage-multi Character
[x]
(str x))
(deftest test-mixed-multi
(is "String" (mixed-coverage-multi "String"))
(is 3 (mixed-coverage-multi 1)))
(defmulti fully-covered-multi type)
(defmethod fully-covered-multi String [x] x)
(defmethod fully-covered-multi :default [x] x)
(deftest test-fully-covered-multi
(is "String" (fully-covered-multi "String"))
(is 1 (fully-covered-multi 1)))
(defn palindrome?
"Tests whether s is a palindrom."
[s]
(if-not (vector? s)
(palindrome? (vec s))
(if (<= (count s) 1)
true
(and (= (s 0) (s (dec (count s))))
(palindrome? (subvec s 1 (dec (count s))))))))
(deftest test-palindrome
Palindrome is fully covered
(is (palindrome? "noon"))
(is (palindrome? "racecar"))
(is (not (palindrome? "hello"))))
(defn permutation?
"Tests whether a and b are permutations of each other"
[a b]
(and (= (count a)
(count b))
(let [add-occurrence (fn [m c] (assoc m c (inc (m c 0))))
a-counts (reduce add-occurrence {} a)
b-counts (reduce add-occurrence {} b)]
(= a-counts b-counts))))
(deftest test-permutation
(is (not (permutation? "foo" "foobar"))))
(defn fully-covered-cond
[n]
(cond
(zero? n) :zero
:else :nonzero))
(deftest test-fully-covered-cond
(is (= :zero (fully-covered-cond 0)))
(is (= :nonzero (fully-covered-cond 1))))
(defn transaction-fn
[n]
(dosync
(cond
(zero? n) :zero
:else (throw (RuntimeException. "FAIL TRANSACTION")))))
(deftest failing-transaction
(is (thrown? Exception (transaction-fn 1))))
(letfn [(covered [] (+ 2 3))
(not-covered [] {:and :not-tracked})
(not-covered [] ({:preimage :image} :preimage))]
(covered))
(defn loop "Not really loop."
[n] (+ n n))
(defn global-loop-shouldnt-crash []
(loop 3))
(defn make-matrix
([data ncol]
{:pre [true]}
:ok))
(defn locals-dont-crash []
(let [letfn #(+ % 42)]
(letfn 2)))
(defn inline-use []
(bytes (byte-array (map byte [1 2]))))
(deftest CLJ-1330-workaround []
(is (not= (type (inline-use))
clojure.lang.Cons)))
|
05a4eea77e72646691895b5a82f6d314b5747f8ec91934dfb6005b122d7826b5 | haskell-github/github | Comments.hs | -----------------------------------------------------------------------------
-- |
-- License : BSD-3-Clause
Maintainer : < >
--
-- The pull request review comments API as described at
-- </>.
module GitHub.Endpoints.PullRequests.Comments (
pullRequestCommentsR,
pullRequestCommentR,
createPullCommentR,
createPullCommentReplyR,
module GitHub.Data,
) where
import GitHub.Data
import GitHub.Internal.Prelude
import Prelude ()
-- | List comments on a pull request.
-- See </#list-comments-on-a-pull-request>
pullRequestCommentsR :: Name Owner -> Name Repo -> IssueNumber -> FetchCount -> Request k (Vector Comment)
pullRequestCommentsR user repo prid =
pagedQuery ["repos", toPathPart user, toPathPart repo, "pulls", toPathPart prid, "comments"] []
-- | Query a single comment.
-- See </#get-a-single-comment>
pullRequestCommentR :: Name Owner -> Name Repo -> Id Comment -> Request k Comment
pullRequestCommentR user repo cid =
query ["repos", toPathPart user, toPathPart repo, "pulls", "comments", toPathPart cid] []
-- | Create a comment.
--
-- See </#create-a-comment>
createPullCommentR :: Name Owner -> Name Repo -> IssueNumber -> Text -> Text -> Int -> Text -> Request 'RW Comment
createPullCommentR user repo iss commit path position body =
command Post parts (encode $ NewPullComment commit path position body)
where
parts = ["repos", toPathPart user, toPathPart repo, "pulls", toPathPart iss, "comments"]
-- | Create a comment reply.
--
-- See </#create-a-review-comment-reply>
createPullCommentReplyR :: Name Owner -> Name Repo -> IssueNumber -> Id Comment -> Text -> Request 'RW Comment
createPullCommentReplyR user repo iss cid body =
command Post parts (encode $ PullCommentReply body)
where
parts = ["repos", toPathPart user, toPathPart repo, "pulls", toPathPart iss
, "comments", toPathPart cid, "replies"]
| null | https://raw.githubusercontent.com/haskell-github/github/81d9b658c33a706f18418211a78d2690752518a4/src/GitHub/Endpoints/PullRequests/Comments.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-3-Clause
The pull request review comments API as described at
</>.
| List comments on a pull request.
See </#list-comments-on-a-pull-request>
| Query a single comment.
See </#get-a-single-comment>
| Create a comment.
See </#create-a-comment>
| Create a comment reply.
See </#create-a-review-comment-reply> | Maintainer : < >
module GitHub.Endpoints.PullRequests.Comments (
pullRequestCommentsR,
pullRequestCommentR,
createPullCommentR,
createPullCommentReplyR,
module GitHub.Data,
) where
import GitHub.Data
import GitHub.Internal.Prelude
import Prelude ()
pullRequestCommentsR :: Name Owner -> Name Repo -> IssueNumber -> FetchCount -> Request k (Vector Comment)
pullRequestCommentsR user repo prid =
pagedQuery ["repos", toPathPart user, toPathPart repo, "pulls", toPathPart prid, "comments"] []
pullRequestCommentR :: Name Owner -> Name Repo -> Id Comment -> Request k Comment
pullRequestCommentR user repo cid =
query ["repos", toPathPart user, toPathPart repo, "pulls", "comments", toPathPart cid] []
createPullCommentR :: Name Owner -> Name Repo -> IssueNumber -> Text -> Text -> Int -> Text -> Request 'RW Comment
createPullCommentR user repo iss commit path position body =
command Post parts (encode $ NewPullComment commit path position body)
where
parts = ["repos", toPathPart user, toPathPart repo, "pulls", toPathPart iss, "comments"]
createPullCommentReplyR :: Name Owner -> Name Repo -> IssueNumber -> Id Comment -> Text -> Request 'RW Comment
createPullCommentReplyR user repo iss cid body =
command Post parts (encode $ PullCommentReply body)
where
parts = ["repos", toPathPart user, toPathPart repo, "pulls", toPathPart iss
, "comments", toPathPart cid, "replies"]
|
6418b329def90ce4f4332b9a73df1b52f3b20a9986e9d74604e45ae1142d185b | karamellpelle/grid | Friends.hs | grid is a game written in Haskell
Copyright ( C ) 2018
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
You should have received a copy of the GNU General Public License
-- along with grid. If not, see </>.
--
module Game.Helpers.Friends
(
) where
-- todo: helper functions like connecting to friends
friendsSelectWidget : : [ Friend ] - > ( Friend - > a - > a ) - > ScrollWidget a
friendsSelectWidget :: [Friend] -> (Friend -> a -> a) -> ScrollWidget a
-}
| null | https://raw.githubusercontent.com/karamellpelle/grid/56729e63ed6404fd6cfd6d11e73fa358f03c386f/designer/source/Game/Helpers/Friends.hs | haskell |
This file is part of grid.
grid is free software: you can redistribute it and/or modify
(at your option) any later version.
grid is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with grid. If not, see </>.
todo: helper functions like connecting to friends | grid is a game written in Haskell
Copyright ( C ) 2018
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
module Game.Helpers.Friends
(
) where
friendsSelectWidget : : [ Friend ] - > ( Friend - > a - > a ) - > ScrollWidget a
friendsSelectWidget :: [Friend] -> (Friend -> a -> a) -> ScrollWidget a
-}
|
7a588cb3d896046068798b8ceba658e22952bb44ae2311e75483225a984624a1 | armstnp/advent-of-code-2019 | day22.clj | Set NS before starting : C - c M - n n
(ns advent-of-code-2019.day22
(:require [advent-of-code-2019.core :as core]
[advent-of-code-2019.left-shark :as ls]
[clojure.string :as str]
[clojure.math.combinatorics :as combo]
[is-prime.core :refer [is-prime]]))
(: import [ org.jgrapht.graph SimpleDirectedWeightedGraph DefaultWeightedEdge ] ) )
(def input (->> "day22.txt" core/read-input str/split-lines))
(def parse-line
(comp
:components
(ls/parse
;; Fill in parse components here
)))
| null | https://raw.githubusercontent.com/armstnp/advent-of-code-2019/68e21174394d8b0e14433f9f249e995c10ac6d67/src/advent_of_code_2019/day22.clj | clojure | Fill in parse components here | Set NS before starting : C - c M - n n
(ns advent-of-code-2019.day22
(:require [advent-of-code-2019.core :as core]
[advent-of-code-2019.left-shark :as ls]
[clojure.string :as str]
[clojure.math.combinatorics :as combo]
[is-prime.core :refer [is-prime]]))
(: import [ org.jgrapht.graph SimpleDirectedWeightedGraph DefaultWeightedEdge ] ) )
(def input (->> "day22.txt" core/read-input str/split-lines))
(def parse-line
(comp
:components
(ls/parse
)))
|
faf41f2220decc44d1de1d67914096fb31bf8b68b125928634bb3c4475d127d7 | HaxeFoundation/ocamllibs | as3hl.mli |
* This file is part of SwfLib
* Copyright ( c)2004 - 2008
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , , USA
* This file is part of SwfLib
* Copyright (c)2004-2008 Nicolas Cannasse
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
*)
open As3
type hl_ident = string
type hl_int = int32
type hl_uint = int32
type hl_float = float
type hl_slot = int
type hl_jump = as3_jump
type hl_op = as3_op
type hl_opcode =
| HBreakPoint
| HNop
| HThrow
| HGetSuper of hl_name
| HSetSuper of hl_name
| HDxNs of hl_ident
| HDxNsLate
| HRegKill of reg
| HLabel
| HJump of hl_jump * int
| HSwitch of int * int list
| HPushWith
| HPopScope
| HForIn
| HHasNext
| HNull
| HUndefined
| HForEach
| HSmallInt of int
| HInt of int
| HTrue
| HFalse
| HNaN
| HPop
| HDup
| HSwap
| HString of hl_ident
| HIntRef of hl_int
| HUIntRef of hl_uint
| HFloat of hl_float
| HScope
| HNamespace of hl_namespace
| HNext of reg * reg
| HFunction of hl_method
| HCallStack of nargs
| HConstruct of nargs
| HCallMethod of hl_slot * nargs
| HCallStatic of hl_method * nargs
| HCallSuper of hl_name * nargs
| HCallProperty of hl_name * nargs
| HRetVoid
| HRet
| HConstructSuper of nargs
| HConstructProperty of hl_name * nargs
| HCallPropLex of hl_name * nargs
| HCallSuperVoid of hl_name * nargs
| HCallPropVoid of hl_name * nargs
| HApplyType of nargs
| HObject of nargs
| HArray of nargs
| HNewBlock
| HClassDef of hl_class
| HGetDescendants of hl_name
| HCatch of int
| HFindPropStrict of hl_name
| HFindProp of hl_name
| HFindDefinition of hl_name
| HGetLex of hl_name
| HSetProp of hl_name
| HReg of reg
| HSetReg of reg
| HGetGlobalScope
| HGetScope of int
| HGetProp of hl_name
| HInitProp of hl_name
| HDeleteProp of hl_name
| HGetSlot of hl_slot
| HSetSlot of hl_slot
| HToString
| HToXml
| HToXmlAttr
| HToInt
| HToUInt
| HToNumber
| HToBool
| HToObject
| HCheckIsXml
| HCast of hl_name
| HAsAny
| HAsString
| HAsType of hl_name
| HAsObject
| HIncrReg of reg
| HDecrReg of reg
| HTypeof
| HInstanceOf
| HIsType of hl_name
| HIncrIReg of reg
| HDecrIReg of reg
| HThis
| HSetThis
| HDebugReg of hl_ident * reg * int
| HDebugLine of int
| HDebugFile of hl_ident
| HBreakPointLine of int
| HTimestamp
| HOp of hl_op
| HUnk of char
and hl_namespace =
| HNPrivate of hl_ident option
| HNPublic of hl_ident option
| HNInternal of hl_ident option
| HNProtected of hl_ident
| HNNamespace of hl_ident
| HNExplicit of hl_ident
| HNStaticProtected of hl_ident option
and hl_ns_set = hl_namespace list
and hl_name =
| HMPath of hl_ident list * hl_ident
| HMName of hl_ident * hl_namespace
| HMMultiName of hl_ident option * hl_ns_set
| HMRuntimeName of hl_ident
| HMRuntimeNameLate
| HMMultiNameLate of hl_ns_set
| HMAttrib of hl_name
| HMParams of hl_name * hl_name list
| HMNSAny of hl_ident
| HMAny
and hl_value =
| HVNone
| HVNull
| HVBool of bool
| HVString of hl_ident
| HVInt of hl_int
| HVUInt of hl_uint
| HVFloat of hl_float
| HVNamespace of int * hl_namespace
and hl_method = {
hlmt_index : int; (* used to sort methods (preserve order) *)
hlmt_ret : hl_name option;
hlmt_args : hl_name option list;
hlmt_native : bool;
hlmt_var_args : bool;
hlmt_arguments_defined : bool;
hlmt_uses_dxns : bool;
hlmt_new_block : bool;
hlmt_unused_flag : bool;
hlmt_debug_name : hl_ident option;
hlmt_dparams : hl_value list option;
hlmt_pnames : hl_ident option list option;
mutable hlmt_function : hl_function option; (* None for interfaces constructors only *)
}
and hl_try_catch = {
hltc_start : int;
hltc_end : int;
hltc_handle : int;
hltc_type : hl_name option;
hltc_name : hl_name option;
}
and hl_function = {
hlf_stack_size : int;
hlf_nregs : int;
hlf_init_scope : int;
hlf_max_scope : int;
mutable hlf_code : hl_opcode MultiArray.t;
mutable hlf_trys : hl_try_catch array;
hlf_locals : (hl_name * hl_name option * hl_slot * bool) array; (* bool = const - mostly false *)
}
and hl_method_kind = as3_method_kind
and hl_method_field = {
hlm_type : hl_method;
hlm_final : bool;
hlm_override : bool;
hlm_kind : hl_method_kind;
}
and hl_var_field = {
hlv_type : hl_name option;
hlv_value : hl_value;
hlv_const : bool;
}
and hl_metadata = {
hlmeta_name : hl_ident;
hlmeta_data : (hl_ident option * hl_ident) array;
}
and hl_field_kind =
| HFMethod of hl_method_field
| HFVar of hl_var_field
| HFFunction of hl_method
| HFClass of hl_class (* only for hl_static fields *)
and hl_field = {
hlf_name : hl_name;
hlf_slot : hl_slot;
hlf_kind : hl_field_kind;
hlf_metas : hl_metadata array option;
}
and hl_class = {
hlc_index : int;
hlc_name : hl_name;
hlc_super : hl_name option;
hlc_sealed : bool;
hlc_final : bool;
hlc_interface : bool;
hlc_namespace : hl_namespace option;
hlc_implements : hl_name array;
mutable hlc_construct : hl_method;
mutable hlc_fields : hl_field array;
mutable hlc_static_construct : hl_method;
mutable hlc_static_fields : hl_field array;
}
and hl_static = {
hls_method : hl_method;
hls_fields : hl_field array;
}
and hl_tag = hl_static list
| null | https://raw.githubusercontent.com/HaxeFoundation/ocamllibs/97e498e1b3bc2b3f08cfcae874b8529e4292bc3d/swflib/as3hl.mli | ocaml | used to sort methods (preserve order)
None for interfaces constructors only
bool = const - mostly false
only for hl_static fields |
* This file is part of SwfLib
* Copyright ( c)2004 - 2008
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , , USA
* This file is part of SwfLib
* Copyright (c)2004-2008 Nicolas Cannasse
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
*)
open As3
type hl_ident = string
type hl_int = int32
type hl_uint = int32
type hl_float = float
type hl_slot = int
type hl_jump = as3_jump
type hl_op = as3_op
type hl_opcode =
| HBreakPoint
| HNop
| HThrow
| HGetSuper of hl_name
| HSetSuper of hl_name
| HDxNs of hl_ident
| HDxNsLate
| HRegKill of reg
| HLabel
| HJump of hl_jump * int
| HSwitch of int * int list
| HPushWith
| HPopScope
| HForIn
| HHasNext
| HNull
| HUndefined
| HForEach
| HSmallInt of int
| HInt of int
| HTrue
| HFalse
| HNaN
| HPop
| HDup
| HSwap
| HString of hl_ident
| HIntRef of hl_int
| HUIntRef of hl_uint
| HFloat of hl_float
| HScope
| HNamespace of hl_namespace
| HNext of reg * reg
| HFunction of hl_method
| HCallStack of nargs
| HConstruct of nargs
| HCallMethod of hl_slot * nargs
| HCallStatic of hl_method * nargs
| HCallSuper of hl_name * nargs
| HCallProperty of hl_name * nargs
| HRetVoid
| HRet
| HConstructSuper of nargs
| HConstructProperty of hl_name * nargs
| HCallPropLex of hl_name * nargs
| HCallSuperVoid of hl_name * nargs
| HCallPropVoid of hl_name * nargs
| HApplyType of nargs
| HObject of nargs
| HArray of nargs
| HNewBlock
| HClassDef of hl_class
| HGetDescendants of hl_name
| HCatch of int
| HFindPropStrict of hl_name
| HFindProp of hl_name
| HFindDefinition of hl_name
| HGetLex of hl_name
| HSetProp of hl_name
| HReg of reg
| HSetReg of reg
| HGetGlobalScope
| HGetScope of int
| HGetProp of hl_name
| HInitProp of hl_name
| HDeleteProp of hl_name
| HGetSlot of hl_slot
| HSetSlot of hl_slot
| HToString
| HToXml
| HToXmlAttr
| HToInt
| HToUInt
| HToNumber
| HToBool
| HToObject
| HCheckIsXml
| HCast of hl_name
| HAsAny
| HAsString
| HAsType of hl_name
| HAsObject
| HIncrReg of reg
| HDecrReg of reg
| HTypeof
| HInstanceOf
| HIsType of hl_name
| HIncrIReg of reg
| HDecrIReg of reg
| HThis
| HSetThis
| HDebugReg of hl_ident * reg * int
| HDebugLine of int
| HDebugFile of hl_ident
| HBreakPointLine of int
| HTimestamp
| HOp of hl_op
| HUnk of char
and hl_namespace =
| HNPrivate of hl_ident option
| HNPublic of hl_ident option
| HNInternal of hl_ident option
| HNProtected of hl_ident
| HNNamespace of hl_ident
| HNExplicit of hl_ident
| HNStaticProtected of hl_ident option
and hl_ns_set = hl_namespace list
and hl_name =
| HMPath of hl_ident list * hl_ident
| HMName of hl_ident * hl_namespace
| HMMultiName of hl_ident option * hl_ns_set
| HMRuntimeName of hl_ident
| HMRuntimeNameLate
| HMMultiNameLate of hl_ns_set
| HMAttrib of hl_name
| HMParams of hl_name * hl_name list
| HMNSAny of hl_ident
| HMAny
and hl_value =
| HVNone
| HVNull
| HVBool of bool
| HVString of hl_ident
| HVInt of hl_int
| HVUInt of hl_uint
| HVFloat of hl_float
| HVNamespace of int * hl_namespace
and hl_method = {
hlmt_ret : hl_name option;
hlmt_args : hl_name option list;
hlmt_native : bool;
hlmt_var_args : bool;
hlmt_arguments_defined : bool;
hlmt_uses_dxns : bool;
hlmt_new_block : bool;
hlmt_unused_flag : bool;
hlmt_debug_name : hl_ident option;
hlmt_dparams : hl_value list option;
hlmt_pnames : hl_ident option list option;
}
and hl_try_catch = {
hltc_start : int;
hltc_end : int;
hltc_handle : int;
hltc_type : hl_name option;
hltc_name : hl_name option;
}
and hl_function = {
hlf_stack_size : int;
hlf_nregs : int;
hlf_init_scope : int;
hlf_max_scope : int;
mutable hlf_code : hl_opcode MultiArray.t;
mutable hlf_trys : hl_try_catch array;
}
and hl_method_kind = as3_method_kind
and hl_method_field = {
hlm_type : hl_method;
hlm_final : bool;
hlm_override : bool;
hlm_kind : hl_method_kind;
}
and hl_var_field = {
hlv_type : hl_name option;
hlv_value : hl_value;
hlv_const : bool;
}
and hl_metadata = {
hlmeta_name : hl_ident;
hlmeta_data : (hl_ident option * hl_ident) array;
}
and hl_field_kind =
| HFMethod of hl_method_field
| HFVar of hl_var_field
| HFFunction of hl_method
and hl_field = {
hlf_name : hl_name;
hlf_slot : hl_slot;
hlf_kind : hl_field_kind;
hlf_metas : hl_metadata array option;
}
and hl_class = {
hlc_index : int;
hlc_name : hl_name;
hlc_super : hl_name option;
hlc_sealed : bool;
hlc_final : bool;
hlc_interface : bool;
hlc_namespace : hl_namespace option;
hlc_implements : hl_name array;
mutable hlc_construct : hl_method;
mutable hlc_fields : hl_field array;
mutable hlc_static_construct : hl_method;
mutable hlc_static_fields : hl_field array;
}
and hl_static = {
hls_method : hl_method;
hls_fields : hl_field array;
}
and hl_tag = hl_static list
|
fe35d13fd5c838c34099353cf8272b311be197418585ef346e8fac6702a7d150 | input-output-hk/cardano-sl | Aeson.hs | module Util.Aeson
( parseJSONP
) where
import Data.Aeson (FromJSON, Result (..), fromJSON)
import Data.Aeson.Parser (json)
import Pipes
import Pipes.Prelude (map)
import Universum hiding (map)
import Util.Pipes (parseP)
parseJSONP:: (FromJSON a, Monad m) => Pipe ByteString a m b
parseJSONP = parseP json >-> map (fromResult . fromJSON)
where
fromResult :: Result a -> a
fromResult (Success a) = a
fromResult (Error e) = error $ toText e
| null | https://raw.githubusercontent.com/input-output-hk/cardano-sl/1499214d93767b703b9599369a431e67d83f10a2/tools/post-mortem/src/Util/Aeson.hs | haskell | module Util.Aeson
( parseJSONP
) where
import Data.Aeson (FromJSON, Result (..), fromJSON)
import Data.Aeson.Parser (json)
import Pipes
import Pipes.Prelude (map)
import Universum hiding (map)
import Util.Pipes (parseP)
parseJSONP:: (FromJSON a, Monad m) => Pipe ByteString a m b
parseJSONP = parseP json >-> map (fromResult . fromJSON)
where
fromResult :: Result a -> a
fromResult (Success a) = a
fromResult (Error e) = error $ toText e
|
|
5a2ec48a12af8837d947cbfcb46cd1aeccf3d4fa5e88ee2cdc09720f9b83ef06 | openbadgefactory/salava | block.clj | (ns salava.oauth.block
(:require [salava.oauth.db :as db]))
(defn user-information [ctx user-id]
(db/get-user-information ctx user-id))
| null | https://raw.githubusercontent.com/openbadgefactory/salava/97f05992406e4dcbe3c4bff75c04378d19606b61/src/clj/salava/oauth/block.clj | clojure | (ns salava.oauth.block
(:require [salava.oauth.db :as db]))
(defn user-information [ctx user-id]
(db/get-user-information ctx user-id))
|
|
cb02a4839bb2004b388868afbee04a4895cbfa630bee3c7f2f5028ba137d1a98 | colis-anr/colis-language | metaFile.ml | open Common
let yaml_of_string = Yaml.of_string_exn
open Protocol_conv_yaml
type input =
{ arguments : string list ;
stdin : string }
[@@deriving protocol ~driver:(module Yaml)]
type output =
{ stdout : string ;
stderr : string ;
return_code : int }
[@@deriving protocol ~driver:(module Yaml)]
type t =
{ input : input ;
output : output }
[@@deriving protocol ~driver:(module Yaml)]
let rec promote_null_to_empty_string = function
| `Null -> `String ""
| `Bool b -> `Bool b
| `Float f -> `Float f
| `String s -> `String s
| `A vl -> `A (List.map promote_null_to_empty_string vl)
| `O svl -> `O (List.map (fun (s, v) -> (s, promote_null_to_empty_string v)) svl)
let load_from_file filename =
try
let ichan = open_in filename in
let yaml =
in_channel_to_string ichan
|> yaml_of_string
|> promote_null_to_empty_string
|> of_yaml_exn
in
close_in ichan;
yaml
with
Not_found -> failwith ("one required key could not be found: "^filename)
| null | https://raw.githubusercontent.com/colis-anr/colis-language/14b8087fa6323fb817d32c07236a2a084def01a1/tests/runner/metaFile.ml | ocaml | open Common
let yaml_of_string = Yaml.of_string_exn
open Protocol_conv_yaml
type input =
{ arguments : string list ;
stdin : string }
[@@deriving protocol ~driver:(module Yaml)]
type output =
{ stdout : string ;
stderr : string ;
return_code : int }
[@@deriving protocol ~driver:(module Yaml)]
type t =
{ input : input ;
output : output }
[@@deriving protocol ~driver:(module Yaml)]
let rec promote_null_to_empty_string = function
| `Null -> `String ""
| `Bool b -> `Bool b
| `Float f -> `Float f
| `String s -> `String s
| `A vl -> `A (List.map promote_null_to_empty_string vl)
| `O svl -> `O (List.map (fun (s, v) -> (s, promote_null_to_empty_string v)) svl)
let load_from_file filename =
try
let ichan = open_in filename in
let yaml =
in_channel_to_string ichan
|> yaml_of_string
|> promote_null_to_empty_string
|> of_yaml_exn
in
close_in ichan;
yaml
with
Not_found -> failwith ("one required key could not be found: "^filename)
|
|
6053b71a950e79ef6d1f09441084edbaea85e4ce2ee8578788d2ec77a72f60d8 | well-typed/large-records | R040.hs | #if PROFILE_CORESIZE
{-# OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #-}
#endif
#if PROFILE_TIMING
{-# OPTIONS_GHC -ddump-to-file -ddump-timings #-}
#endif
module Experiment.Generics_LR.Sized.R040 where
import Data.Aeson (Value)
import Bench.HList
import Experiment.Generics_LR
import Common.HListOfSize.HL040
hlistToJSON :: HList ExampleFields -> Value
hlistToJSON = gtoJSON | null | https://raw.githubusercontent.com/well-typed/large-records/551f265845fbe56346988a6b484dca40ef380609/large-records-benchmarks/bench/experiments/Experiment/Generics_LR/Sized/R040.hs | haskell | # OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #
# OPTIONS_GHC -ddump-to-file -ddump-timings # | #if PROFILE_CORESIZE
#endif
#if PROFILE_TIMING
#endif
module Experiment.Generics_LR.Sized.R040 where
import Data.Aeson (Value)
import Bench.HList
import Experiment.Generics_LR
import Common.HListOfSize.HL040
hlistToJSON :: HList ExampleFields -> Value
hlistToJSON = gtoJSON |
524b5e0914672a9e5f3917c3c7f800c68e3ee69a90e65417aaf3e62eed334945 | jarohen/phoenix | om_sablono.cljs | (ns {{name}}.ui.app
(:require [clojure.string :as s]
[om.core :as om :include-macros true]
[sablono.core :as html :refer-macros [html]]
simple-brepl.client))
(enable-console-print!)
(defn hello-world []
(om/component
(html [:p "Hello world!"])))
(set! (.-onload js/window)
(fn []
(om/root hello-world {} {:target js/document.body})))
;; ------------------------------------------------------------
Below this line is only required for the Phoenix welcome page , feel
;; free to just delete all of it when you want to get cracking on your
;; own project!
(defn code [s]
[:strong {:style {:font-family "'Courier New', 'monospace'"}}
s])
(defn demo-component []
(om/component
(html
[:div.container
[:h2 {:style {:margin-top "1em"}}
"Hello from Phoenix!"]
[:h3 "Things to try:"]
[:ul
[:li [:p "In your Clojure REPL, run " (code "(phoenix/reload!)") " to completely reload the webapp without restarting the JVM."]]
[:li [:p "Connect to a CLJS bREPL by running " (code "(simple-brepl)")]]
[:li
[:p "Once you've opened the bREPL, reload your browser to make the connection, then you can eval some CLJS."]
[:p "I recommend:"]
[:ul
[:li (code "(+ 1 1)")]
[:li (code "(js/alert \"Hello world!\")")]
[:li (code "(set! (.-backgroundColor js/document.body.style) \"green\")")]]
[:p "Run " (code ":cljs/quit") " to get back to a Clojure REPL."]]
[:li [:p "Start making your webapp!"]
[:ul
[:li [:p "The CLJS entry point is in " (code "ui-src/{{sanitized}}/ui/app.cljs")]]
[:li [:p "The Clojure Ring handler is in " (code "src/{{sanitized}}/service/handler.clj")]]]]
[:li [:p "Any trouble, let me know - either through GitHub or on Twitter at " [:a {:href ""} "@jarohen"]]]
[:li [:p "Good luck!"]]]
[:div {:style {:text-align "right"
:font-weight "bold"}}
[:p
[:span {:style {:font-size "1.3em"}} "James Henderson"]
[:br]
"Twitter: " [:a {:href ""} "@jarohen"]
[:br]
"GitHub: " [:a {:href "-henderson"} "james-henderson"]]]])))
(set! (.-onload js/window)
(fn []
(om/root demo-component {} {:target js/document.body})))
| null | https://raw.githubusercontent.com/jarohen/phoenix/f828bf144154f110f0a73f54645f5696e2c8bdab/templates/phoenix-webapp/src/leiningen/new/phoenix_webapp/cljs/om_sablono.cljs | clojure | ------------------------------------------------------------
free to just delete all of it when you want to get cracking on your
own project! | (ns {{name}}.ui.app
(:require [clojure.string :as s]
[om.core :as om :include-macros true]
[sablono.core :as html :refer-macros [html]]
simple-brepl.client))
(enable-console-print!)
(defn hello-world []
(om/component
(html [:p "Hello world!"])))
(set! (.-onload js/window)
(fn []
(om/root hello-world {} {:target js/document.body})))
Below this line is only required for the Phoenix welcome page , feel
(defn code [s]
[:strong {:style {:font-family "'Courier New', 'monospace'"}}
s])
(defn demo-component []
(om/component
(html
[:div.container
[:h2 {:style {:margin-top "1em"}}
"Hello from Phoenix!"]
[:h3 "Things to try:"]
[:ul
[:li [:p "In your Clojure REPL, run " (code "(phoenix/reload!)") " to completely reload the webapp without restarting the JVM."]]
[:li [:p "Connect to a CLJS bREPL by running " (code "(simple-brepl)")]]
[:li
[:p "Once you've opened the bREPL, reload your browser to make the connection, then you can eval some CLJS."]
[:p "I recommend:"]
[:ul
[:li (code "(+ 1 1)")]
[:li (code "(js/alert \"Hello world!\")")]
[:li (code "(set! (.-backgroundColor js/document.body.style) \"green\")")]]
[:p "Run " (code ":cljs/quit") " to get back to a Clojure REPL."]]
[:li [:p "Start making your webapp!"]
[:ul
[:li [:p "The CLJS entry point is in " (code "ui-src/{{sanitized}}/ui/app.cljs")]]
[:li [:p "The Clojure Ring handler is in " (code "src/{{sanitized}}/service/handler.clj")]]]]
[:li [:p "Any trouble, let me know - either through GitHub or on Twitter at " [:a {:href ""} "@jarohen"]]]
[:li [:p "Good luck!"]]]
[:div {:style {:text-align "right"
:font-weight "bold"}}
[:p
[:span {:style {:font-size "1.3em"}} "James Henderson"]
[:br]
"Twitter: " [:a {:href ""} "@jarohen"]
[:br]
"GitHub: " [:a {:href "-henderson"} "james-henderson"]]]])))
(set! (.-onload js/window)
(fn []
(om/root demo-component {} {:target js/document.body})))
|
ff3b9cb6c4c050b041bedcba74f3b6ee982aade7e15093bcaf8a31be69e2e2fc | typelead/eta | tcfail103.hs |
module ShouldCompile where
import GHC.ST
import GHC.STRef
import GHC.Arr
-- Another 'escape' example
f:: ST t Int
f = do
v <- newSTRef 5
let g :: ST s Int
-- Implicitly forall s. ST s Int
g = readSTRef v
g
| null | https://raw.githubusercontent.com/typelead/eta/97ee2251bbc52294efbf60fa4342ce6f52c0d25c/tests/suite/typecheck/fail/tcfail103.hs | haskell | Another 'escape' example
Implicitly forall s. ST s Int |
module ShouldCompile where
import GHC.ST
import GHC.STRef
import GHC.Arr
f:: ST t Int
f = do
v <- newSTRef 5
let g :: ST s Int
g = readSTRef v
g
|
8e3b399b07eb6c0d3ecac594dc5aaf4225afb196ee552e0d41f18030c00b20f4 | neongreen/haskell-ex | Main.hs | movingAvg r [] = []
movingAvg r list@(x:xs) =
let left = take r list
right = tail list
in firstHalf left ++ secondHalf r right
firstHalf [] = []
firstHalf list@(x:xs) =
firstHalf (init list) ++ [(sum list) / fromIntegral (length list)]
secondHalf a list@(x:xs)
| length list < a = []
| otherwise = [sum(take a list) / fromIntegral a] ++ secondHalf a (tail list)
| null | https://raw.githubusercontent.com/neongreen/haskell-ex/345115444fdf370a43390fd942e2851b9b1963ad/week3/average/444c43/Main.hs | haskell | movingAvg r [] = []
movingAvg r list@(x:xs) =
let left = take r list
right = tail list
in firstHalf left ++ secondHalf r right
firstHalf [] = []
firstHalf list@(x:xs) =
firstHalf (init list) ++ [(sum list) / fromIntegral (length list)]
secondHalf a list@(x:xs)
| length list < a = []
| otherwise = [sum(take a list) / fromIntegral a] ++ secondHalf a (tail list)
|
|
834de91dfebb29349396ac456f09f0631cd3fc8b6a0c0334239c795c848e9f4e | ghc/nofib | Matrix.hs | Time - stamp : < 2010 - 11 - 03 09:27:34 >
$ I d : Matrix.hs , v 1.4.2.5 2002/06/15 01:34:29 hwloidl Exp $
Data Encapsulation of the ADT Matrix .
Internal representation is a list of lists .
--
LinSolv remark : default determinant is parallel ( d&c over 1st line )
-----------------------------------------------------------------------------
-- @node ADT Matrix, , ,
-- @chapter ADT Matrix
module Matrix(SqMatrix, Vector, {- MatBounds, -}
(!!-), (!-), sqMatrix,
vecBounds, matBounds, vecCont, matCont,
listSqMatrix, lolSqMatrix, unSqMatrix, vector, unvector,
determinant, transp, replaceColumn, size,
maxElem, maxElemVec, scalarMult, vecScalarQuot,
matGcd, vecGcd, matHom, vecHom, matBounds, matCont,
matMult, matCompact
)
showsMatrix , matEqual , matSum , matMult )
matSum',matSum'',showIt , makeUnique )
matSum',matSum'',showIt,makeUnique) -} where
-- @menu
-- * Imports::
-- * Data Types::
-- * Constants::
-- * Aux functions::
-- * Data type constructors::
-- * H.o. fcts::
-- * Misc operations::
-- * Arithmetic Operations::
-- * I/O Operations::
-- * Instances::
-- @end menu
@node Imports , Data Types , ADT Matrix , ADT Matrix
-- @section Imports
import Data.List(transpose)
import Data.Array
import ModArithm ({- Hom(hom), -} modHom)
only needed if we use array based LU Decomp later
#if defined(STRATEGIES)
import Control.Parallel.Strategies
#endif
import Control.DeepSeq
infixl 5 !!-
infixl 5 !-
m!!-(i,j) = (m'!!i')!!j'
where bds@((rl,cl),(rh,ch)) = matBounds m
i' = i - rl
j' = j - cl
m' = matCont m
v!-i = v'!!i'
where bds@(rl,rh) = vecBounds v
i' = i - rl
v' = vecCont v
-- ----------------------------------------------------------------------------
@node Data Types , Constants , Imports , ADT Matrix
-- @section Data Types
--
-- Data Type definitions
-- ----------------------------------------------------------------------------
data (Integral a) => SqMatrix a = SqMatrixC MatBounds [[a]]
data (Integral a) => Vector a = VectorC VecBounds [a]
type MatBounds = ((Int,Int),(Int,Int))
type VecBounds = ((Int),(Int))
instance (NFData a, Integral a) => NFData (Vector a) where
rnf b l ) = rnf b > > rnf l > > return x
rnf (VectorC b l) = rnf b `seq` rnf l
instance (NFData a, Integral a) => NFData (SqMatrix a) where
-- rnf x@(SqMatrixC b m) = rnf b >> rnf m >> return x
rnf (SqMatrixC b m) = rnf b `seq` rnf m
-- ----------------------------------------------------------------------------
@node Aux functions , Data type constructors , Constants , ADT Matrix
@section Aux functions
-- ----------------------------------------------------------------------------
lol :: (Integral a) => Int -> [a] -> [[a]]
lol _ [] = []
lol n l = let
(line, rest) = splitAt n l
in
line : (lol n rest)
mat_map = map
listCompwiseComp :: (a -> b -> c) -> [a] -> [b] -> [c]
listCompwiseComp = zipWith
{- map f' (zip l l')
where f' (a,b) = a `f` b -}
-- ----------------------------------------------------------------------------
@node Data type constructors , H.o . fcts , Aux functions , ADT Matrix
@section Data type constructors
-- ----------------------------------------------------------------------------
sqMatrix :: (Integral a) => Array (Int,Int) a -> SqMatrix a
sqMatrix arr = SqMatrixC b [ [ (arr!(i,j)) | j <- [jLo..jHi] ]
| i <- [iLo..iHi] ]
where b@((iLo,jLo),(iHi,jHi)) = (bounds arr)
unSqMatrix :: (Integral a) => SqMatrix a -> Array (Int,Int) a
unSqMatrix (SqMatrixC b@((iLo,jLo),(iHi,jHi)) m)
= array b (concat [ [ ((i,j), (m!!(i-1))!!(j-1)) | j <- [jLo..jHi] ]
| i <- [iLo..iHi] ])
listSqMatrix :: (Integral a) => MatBounds -> [a] -> SqMatrix a
listSqMatrix b@((iLo,jLo),(iHi,jHi)) l = SqMatrixC b (take m (lol n l))
where m = iHi - iLo +1
n = jHi - jLo + 1
lolSqMatrix :: (Integral a) => MatBounds -> [[a]] -> SqMatrix a
lolSqMatrix b l = SqMatrixC b l
matBounds (SqMatrixC b _) = b
matCont (SqMatrixC _ m) = m
vector :: (Integral a) => [a] -> Vector a
vector l = VectorC ((1),(n)) l
where n = length l
vecBounds (VectorC b _) = b
vecCont (VectorC _ v) = v
unvector :: (Integral a) => Vector a -> Array (Int) a
unvector (VectorC b@(x,y) l) = array b (zip [x..y] l)
-- ----------------------------------------------------------------------------
@node H.o . fcts , Misc operations , Data type constructors , ADT Matrix
-- @section H.o. fcts
--
-- Mapping and other general operations
-- ----------------------------------------------------------------------------
#if defined(STRATEGIES)
matMapUnary :: (Integral a, NFData a) =>
#else
matMapUnary :: (Integral a) =>
#endif
(a -> a) -> SqMatrix a -> SqMatrix a
matMapUnary f (SqMatrixC b mat) =
SqMatrixC b (mat_map (mat_map f) mat)
matCompwiseComp :: (Integral a, Integral b, Integral c
#if defined(STRATEGIES)
,NFData a, NFData b, NFData c
#endif
) =>
(a -> b -> c) -> SqMatrix a -> SqMatrix b -> SqMatrix c
matCompwiseComp f (SqMatrixC bnds@((iLo,jLo),(iHi,jHi)) mat) (SqMatrixC bnds' mat') =
if (bnds==bnds')
then SqMatrixC bnds [ listCompwiseComp f (mat!!(k-1)) (mat'!!(k-1))
| k <- [iLo..iHi] ]
else error "matCompwiseComp: Matrices have different bounds\n"
#if defined(STRATEGIES)
matFold :: (Integral a, NFData a) => (a -> a -> a) -> a -> SqMatrix a -> a
#else
matFold :: (Integral a) => (a -> a -> a) -> a -> SqMatrix a -> a
#endif
matFold f init (SqMatrixC _ mat) = foldl f init (mat_map (foldl f init) mat)
#if defined(STRATEGIES)
vecFold :: (Integral a, NFData a) => (a -> a -> a) -> a -> Vector a -> a
#else
vecFold :: (Integral a) => (a -> a -> a) -> a -> Vector a -> a
#endif
vecFold f init (VectorC _ mat) = foldl f init mat
-- ----------------------------------------------------------------------------
@node Misc operations , Arithmetic Operations , H.o . fcts , ADT Matrix
-- @section Misc operations
--
-- Misc operations
-- ----------------------------------------------------------------------------
Just for testing ; demands computation of all elems of the matrix
matCompact x = matFold max 0 (matMapUnary signum x)
-- ---------------------------------------------------------------------------
size :: (Integral a) => SqMatrix a -> Int
size (SqMatrixC ((iLo,jLo),(iHi,jHi)) mat) =
if (iLo==jLo) && (iHi==jHi)
then iHi-iLo+1
else error "size: Matrix doesn't have size ((1,1),(n,n))\n"
-- replaceColumn :: (Ix a, Ix b) => a -> Array (a,b) c -> Array b c -> Array (a,b) c
replaceColumn :: (Integral a) => Int -> SqMatrix a -> Vector a -> SqMatrix a
-- This is definitely more elegant. But is it as efficient?
replaceColumn j (SqMatrixC b m)(VectorC _ v) =
SqMatrixC b (transpose (replaceLine j v (transpose m)))
where replaceLine :: Int -> [a] -> [[a]] -> [[a]]
replaceLine j v m = ( take (j-1) m ) ++
[v] ++
( drop (j) m )
replaceColumn j ( SqMatrixC b@((iLo , jLo),(iHi , jHi ) ) mat ) ( VectorC _ v ) =
if ( not ( inRange ( jLo , jHi ) j ) )
then error " Error in replaceColumn : column index not in range "
else SqMatrixC b [ replaceElem j i | i < - [ iLo .. ] ]
where replaceElem j i = [ line ! ! ( k-1 ) | k < - [ jLo .. j-1 ] ] + +
[ v ! ! ( i-1 ) ] + +
[ line ! ! ( k-1 ) | k < - [ j+1 .. jHi ] ]
where line = mat ! ! ( i-1 )
replaceColumn j (SqMatrixC b@((iLo,jLo),(iHi,jHi)) mat) (VectorC _ v) =
if (not (inRange (jLo,jHi) j))
then error "Error in replaceColumn: column index not in range"
else SqMatrixC b [ replaceElem j i | i <- [iLo..iHi] ]
where replaceElem j i = [ line !! (k-1) | k <- [jLo..j-1] ] ++
[ v !! (i-1) ] ++
[ line !! (k-1) | k <- [j+1..jHi] ]
where line = mat !! (i-1)
-}
-- transp :: (Ix a, Ix b) => Array (a,b) c -> Array (b,a) c
transp :: (Integral a) => SqMatrix a -> SqMatrix a
transp (SqMatrixC b@((iLo,jLo),(iHi,jHi)) mat) = SqMatrixC b (transpose mat)
{-
SqMatrixC b [ [ line !! (j-1) | line <- mat ] | j <- [jLo..jHi] ]
-}
maxElem : : ( Ix a , Ix b , Ord c ) = > Array ( a , b ) c - > c
#if defined(STRATEGIES)
maxElem :: (Integral a, NFData a) => SqMatrix a -> a
#else
maxElem :: (Integral a) => SqMatrix a -> a
#endif
maxElem (SqMatrixC _ mat) = maximum ( mat_map maximum mat )
#if defined(STRATEGIES)
maxElemVec :: (Integral a, NFData a) => Vector a -> a
#else
maxElemVec :: (Integral a) => Vector a -> a
#endif
maxElemVec (VectorC _ vec) = maximum vec
-- ----------------------------------------------------------------------------
@node Arithmetic Operations , I / O Operations , Misc operations , ADT Matrix
-- @section Arithmetic Operations
-- ----------------------------------------------------------------------------
scalarMult : : ( Ix a , Ix b , ) = > c - > Array ( a , b ) c - > Array ( a , b ) c
#if defined(STRATEGIES)
scalarMult :: (Integral a, NFData a) => a -> SqMatrix a -> SqMatrix a
#else
scalarMult :: (Integral a) => a -> SqMatrix a -> SqMatrix a
#endif
scalarMult x = matMapUnary (x*)
{-
SqMatrixC b [ mat_map (x*) line | line <- mat ]
-}
#if defined(STRATEGIES)
vecScalarQuot :: (Integral a, NFData a) => a -> Vector a -> Vector a
#else
vecScalarQuot :: (Integral a) => a -> Vector a -> Vector a
#endif
vecScalarQuot x (VectorC b vec) =
VectorC b (mat_map (`div` x) vec)
#if defined(STRATEGIES)
crossProd :: (Integral a, NFData a) => Vector a -> Vector a -> a
#else
crossProd :: (Integral a) => Vector a -> Vector a -> a
#endif
crossProd (VectorC _ vec) (VectorC _ vec') = sum (zipWith (+) vec vec')
foldl ( + ) 0 ( listCompwiseComp ( * ) )
-- @cindex determinant
determinant : : ( Ix a , Ix b , ) = > Array ( a , b ) c - > c
determinant :: (
Integral a
, NFData a
) => SqMatrix a -> a
determinant (SqMatrixC ((iLo,jLo),(iHi,jHi)) mat)
| jHi-jLo+1 == 1 = let
[[mat_1_1]] = mat
in
mat_1_1
| jHi-jLo+1 == 2 = let
[[mat_1_1,mat_1_2],
[mat_2_1,mat_2_2] ] = mat
in
mat_1_1 * mat_2_2 - mat_1_2 * mat_2_1
| otherwise = sum l_par
where
l_par = map determine1 [jLo..jHi]
determine1 j =
(if pivot > 0 then
sign*pivot*det'
else
0) -- `sparking` rnf sign
where
sign = if (even (j-jLo)) then 1 else -1
pivot = (head mat) !! (j-1)
mat_h' = (map (newLine j) (tail mat))
mat' = SqMatrixC ((iLo,jLo),(iHi-1,jHi-1))
mat_h'
det' = determinant mat'
#if 0
strategyD r =
parList (parList rnf) mat_h' `par`
rnf det' `par`
r0 r
#endif
tree_sum [] = 0
tree_sum [x] = x
tree_sum xs = (left+right)
where (l,r) = splitAt (length xs `div` 2) xs
left = tree_sum l
right = tree_sum r
newLine _ [] = []
newLine j line = (pre ++ post)
where
pre = [ line !! (k-1) | k <- [jLo..j-1] ]
post = [ line !! (k-1) | k <- [j+1..jHi] ]
{- seq determinant! -}
-- matEqual :: (Ix a, Ix b, Eq c) => Array (a,b) c -> Array (a,b) c -> Bool
matEqual :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a -> Bool
matEqual (SqMatrixC bnds@((iLo,jLo),(iHi,jHi)) mat) (SqMatrixC bnds' mat') =
if (bnds==bnds')
then foldl (&&) True
[ foldl (&&) True
(listCompwiseComp (==) (mat !! (k-1)) (mat' !! (k-1)))
| k <- [iLo..iHi] ]
else error "matEqual: Matrices have different bounds\n"
vecEqual :: (Integral a, NFData a) => Vector a -> Vector a -> Bool
vecEqual (VectorC bnds vec) (VectorC bnds' vec') =
if (bnds==bnds')
then foldl (&&) True (listCompwiseComp (==) vec vec')
else error "vecEqual: Matrices have different bounds\n"
matSum : : ( Ix a , Ix b , ) - > Array ( a , b ) c - > Array ( a , b ) c - > Array ( a , b ) c
matSum :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a -> SqMatrix a
matSum = matCompwiseComp (+)
matDif :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a -> SqMatrix a
matDif = matCompwiseComp (-)
-- @cindex mat mult
{- parallel matrix multiplication -}
matMult (SqMatrixC bnds mat) (SqMatrixC bnds' mat') =
SqMatrixC resultBounds
#if defined(__PARALLEL_HASKELL__) || defined(__GRANSIM__)
(parMap rwhnf
(\i ->
parMap rnf
(\j ->
#else
(map (\i -> map (\j ->
#endif
let
line = (VectorC ((jLo),(jHi)) (getLine i mat))
column = (VectorC ((iLo'),(iHi')) (getColumn j mat'))
in
crossProd line column
)
[iLo..iHi]
)
[jLo..jHi]
)
where getLine i mat = mat !! (i-1)
getColumn j mat = [ line !! (j-1) | line <- mat ]
size = iHi - iLo + 1
((iLo,jLo),(iHi,jHi)) = bnds
((iLo',jLo'),(iHi',jHi')) = bnds'
resultBounds
| (jLo,jHi)==(iLo',iHi') = ((iLo,jLo'),(iHi,jHi'))
| otherwise = error "matMult: incompatible bounds"
matAbs :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a
matAbs = matMapUnary abs
matSignum :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a
matSignum = matMapUnary signum
matGcd :: (Integral a, NFData a) => SqMatrix a -> a
matGcd m = matFold gcd (maxElem m) m
vecGcd :: (Integral a, NFData a) => Vector a -> a
vecGcd m = vecFold gcd (maxElemVec m) m
matHom : : ( Integral a ) = > Integer - > a - > a
matHom : : ( Integral a ) = > Integer - > a - > a
matHom p = matMapUnary (modHom p)
-- vecHom :: (Integral a) => Integer -> Vector a -> Vector a
-- vecHom :: (Integral a) => Integer -> Vector a -> Vector a
vecHom p (VectorC _ v) = vector (mat_map (modHom p) v)
matBounds : : ( Integral a ) = > a - > MatBounds
matBounds ( SqMatrixC mat ) = bounds mat
matBounds :: (Integral a) => SqMatrix a -> MatBounds
matBounds (SqMatrixC mat) = bounds mat
-}
matFromInteger :: Integer -> SqMatrix Integer
matFromInteger n = SqMatrixC ((1,1),(1,1)) [[n]]
-- ----------------------------------------------------------------------------
@node I / O Operations , Instances , Arithmetic Operations , ADT Matrix
-- @section I/O Operations
-- ----------------------------------------------------------------------------
-- showsMatrix :: (Ix a, Ix b, Text c) => Array (a,b) c -> ShowS
showsMatrix :: (Integral a) => SqMatrix a -> ShowS
showsMatrix (SqMatrixC _ mat) = ( (++) ("Matrix: \n" ++
(foldl (++) "" [ show line ++ "\n"
| line <- mat ] ) ) )
showsVector :: (Integral a) => Vector a -> ShowS
showsVector (VectorC _ vec) =
( (++) ("Vector: " ++ show vec) )
-- ----------------------------------------------------------------------------
@node Instances , , I / O Operations , ADT Matrix
-- @section Instances
--
Instance definitions for the ADT of Square Matrices and Vectors
-- ----------------------------------------------------------------------------
instance ( Eq a ) = > Eq [ a ] where
l = = l ' = foldl ( & & ) True ( listCompwiseComp (= =) l l ' )
instance (Eq a) => Eq [a] where
l == l' = foldl (&&) True (listCompwiseComp (==) l l')
-}
instance (Integral a, NFData a) => Eq (SqMatrix a) where
(==) = matEqual
instance (Integral a) => Read (SqMatrix a) where
readsPrec p = error "readsPrec of Matrix: Not yet implemented!\n"
instance (Integral a) => Show (SqMatrix a) where
showsPrec p = showsMatrix
instance (Integral a, NFData a) => Num (SqMatrix a) where
(+) = matSum
(-) = matDif
(*) = matMult
negate = scalarMult (-1)
abs = matAbs
signum = matSignum
fromInteger = error "fromInteger of Matrix: Not yet implemented\n"
{- matFromInteger -}
instance (Integral a, NFData a) => Eq (Vector a) where
(==) = vecEqual
instance (Integral a, NFData a) => Read (Vector a) where
readsPrec p = error "readsPrec of Vector: Not yet implemented!\n"
instance (Integral a, NFData a) => Show (Vector a) where
showsPrec p = showsVector
| null | https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/parallel/linsolv/Matrix.hs | haskell |
---------------------------------------------------------------------------
@node ADT Matrix, , ,
@chapter ADT Matrix
MatBounds,
@menu
* Imports::
* Data Types::
* Constants::
* Aux functions::
* Data type constructors::
* H.o. fcts::
* Misc operations::
* Arithmetic Operations::
* I/O Operations::
* Instances::
@end menu
@section Imports
Hom(hom),
----------------------------------------------------------------------------
@section Data Types
Data Type definitions
----------------------------------------------------------------------------
rnf x@(SqMatrixC b m) = rnf b >> rnf m >> return x
----------------------------------------------------------------------------
----------------------------------------------------------------------------
map f' (zip l l')
where f' (a,b) = a `f` b
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
@section H.o. fcts
Mapping and other general operations
----------------------------------------------------------------------------
----------------------------------------------------------------------------
@section Misc operations
Misc operations
----------------------------------------------------------------------------
---------------------------------------------------------------------------
replaceColumn :: (Ix a, Ix b) => a -> Array (a,b) c -> Array b c -> Array (a,b) c
This is definitely more elegant. But is it as efficient?
transp :: (Ix a, Ix b) => Array (a,b) c -> Array (b,a) c
SqMatrixC b [ [ line !! (j-1) | line <- mat ] | j <- [jLo..jHi] ]
----------------------------------------------------------------------------
@section Arithmetic Operations
----------------------------------------------------------------------------
SqMatrixC b [ mat_map (x*) line | line <- mat ]
@cindex determinant
`sparking` rnf sign
seq determinant!
matEqual :: (Ix a, Ix b, Eq c) => Array (a,b) c -> Array (a,b) c -> Bool
@cindex mat mult
parallel matrix multiplication
vecHom :: (Integral a) => Integer -> Vector a -> Vector a
vecHom :: (Integral a) => Integer -> Vector a -> Vector a
----------------------------------------------------------------------------
@section I/O Operations
----------------------------------------------------------------------------
showsMatrix :: (Ix a, Ix b, Text c) => Array (a,b) c -> ShowS
----------------------------------------------------------------------------
@section Instances
----------------------------------------------------------------------------
matFromInteger | Time - stamp : < 2010 - 11 - 03 09:27:34 >
$ I d : Matrix.hs , v 1.4.2.5 2002/06/15 01:34:29 hwloidl Exp $
Data Encapsulation of the ADT Matrix .
Internal representation is a list of lists .
LinSolv remark : default determinant is parallel ( d&c over 1st line )
(!!-), (!-), sqMatrix,
vecBounds, matBounds, vecCont, matCont,
listSqMatrix, lolSqMatrix, unSqMatrix, vector, unvector,
determinant, transp, replaceColumn, size,
maxElem, maxElemVec, scalarMult, vecScalarQuot,
matGcd, vecGcd, matHom, vecHom, matBounds, matCont,
matMult, matCompact
)
showsMatrix , matEqual , matSum , matMult )
matSum',matSum'',showIt , makeUnique )
matSum',matSum'',showIt,makeUnique) -} where
@node Imports , Data Types , ADT Matrix , ADT Matrix
import Data.List(transpose)
import Data.Array
only needed if we use array based LU Decomp later
#if defined(STRATEGIES)
import Control.Parallel.Strategies
#endif
import Control.DeepSeq
infixl 5 !!-
infixl 5 !-
m!!-(i,j) = (m'!!i')!!j'
where bds@((rl,cl),(rh,ch)) = matBounds m
i' = i - rl
j' = j - cl
m' = matCont m
v!-i = v'!!i'
where bds@(rl,rh) = vecBounds v
i' = i - rl
v' = vecCont v
@node Data Types , Constants , Imports , ADT Matrix
data (Integral a) => SqMatrix a = SqMatrixC MatBounds [[a]]
data (Integral a) => Vector a = VectorC VecBounds [a]
type MatBounds = ((Int,Int),(Int,Int))
type VecBounds = ((Int),(Int))
instance (NFData a, Integral a) => NFData (Vector a) where
rnf b l ) = rnf b > > rnf l > > return x
rnf (VectorC b l) = rnf b `seq` rnf l
instance (NFData a, Integral a) => NFData (SqMatrix a) where
rnf (SqMatrixC b m) = rnf b `seq` rnf m
@node Aux functions , Data type constructors , Constants , ADT Matrix
@section Aux functions
lol :: (Integral a) => Int -> [a] -> [[a]]
lol _ [] = []
lol n l = let
(line, rest) = splitAt n l
in
line : (lol n rest)
mat_map = map
listCompwiseComp :: (a -> b -> c) -> [a] -> [b] -> [c]
listCompwiseComp = zipWith
@node Data type constructors , H.o . fcts , Aux functions , ADT Matrix
@section Data type constructors
sqMatrix :: (Integral a) => Array (Int,Int) a -> SqMatrix a
sqMatrix arr = SqMatrixC b [ [ (arr!(i,j)) | j <- [jLo..jHi] ]
| i <- [iLo..iHi] ]
where b@((iLo,jLo),(iHi,jHi)) = (bounds arr)
unSqMatrix :: (Integral a) => SqMatrix a -> Array (Int,Int) a
unSqMatrix (SqMatrixC b@((iLo,jLo),(iHi,jHi)) m)
= array b (concat [ [ ((i,j), (m!!(i-1))!!(j-1)) | j <- [jLo..jHi] ]
| i <- [iLo..iHi] ])
listSqMatrix :: (Integral a) => MatBounds -> [a] -> SqMatrix a
listSqMatrix b@((iLo,jLo),(iHi,jHi)) l = SqMatrixC b (take m (lol n l))
where m = iHi - iLo +1
n = jHi - jLo + 1
lolSqMatrix :: (Integral a) => MatBounds -> [[a]] -> SqMatrix a
lolSqMatrix b l = SqMatrixC b l
matBounds (SqMatrixC b _) = b
matCont (SqMatrixC _ m) = m
vector :: (Integral a) => [a] -> Vector a
vector l = VectorC ((1),(n)) l
where n = length l
vecBounds (VectorC b _) = b
vecCont (VectorC _ v) = v
unvector :: (Integral a) => Vector a -> Array (Int) a
unvector (VectorC b@(x,y) l) = array b (zip [x..y] l)
@node H.o . fcts , Misc operations , Data type constructors , ADT Matrix
#if defined(STRATEGIES)
matMapUnary :: (Integral a, NFData a) =>
#else
matMapUnary :: (Integral a) =>
#endif
(a -> a) -> SqMatrix a -> SqMatrix a
matMapUnary f (SqMatrixC b mat) =
SqMatrixC b (mat_map (mat_map f) mat)
matCompwiseComp :: (Integral a, Integral b, Integral c
#if defined(STRATEGIES)
,NFData a, NFData b, NFData c
#endif
) =>
(a -> b -> c) -> SqMatrix a -> SqMatrix b -> SqMatrix c
matCompwiseComp f (SqMatrixC bnds@((iLo,jLo),(iHi,jHi)) mat) (SqMatrixC bnds' mat') =
if (bnds==bnds')
then SqMatrixC bnds [ listCompwiseComp f (mat!!(k-1)) (mat'!!(k-1))
| k <- [iLo..iHi] ]
else error "matCompwiseComp: Matrices have different bounds\n"
#if defined(STRATEGIES)
matFold :: (Integral a, NFData a) => (a -> a -> a) -> a -> SqMatrix a -> a
#else
matFold :: (Integral a) => (a -> a -> a) -> a -> SqMatrix a -> a
#endif
matFold f init (SqMatrixC _ mat) = foldl f init (mat_map (foldl f init) mat)
#if defined(STRATEGIES)
vecFold :: (Integral a, NFData a) => (a -> a -> a) -> a -> Vector a -> a
#else
vecFold :: (Integral a) => (a -> a -> a) -> a -> Vector a -> a
#endif
vecFold f init (VectorC _ mat) = foldl f init mat
@node Misc operations , Arithmetic Operations , H.o . fcts , ADT Matrix
Just for testing ; demands computation of all elems of the matrix
matCompact x = matFold max 0 (matMapUnary signum x)
size :: (Integral a) => SqMatrix a -> Int
size (SqMatrixC ((iLo,jLo),(iHi,jHi)) mat) =
if (iLo==jLo) && (iHi==jHi)
then iHi-iLo+1
else error "size: Matrix doesn't have size ((1,1),(n,n))\n"
replaceColumn :: (Integral a) => Int -> SqMatrix a -> Vector a -> SqMatrix a
replaceColumn j (SqMatrixC b m)(VectorC _ v) =
SqMatrixC b (transpose (replaceLine j v (transpose m)))
where replaceLine :: Int -> [a] -> [[a]] -> [[a]]
replaceLine j v m = ( take (j-1) m ) ++
[v] ++
( drop (j) m )
replaceColumn j ( SqMatrixC b@((iLo , jLo),(iHi , jHi ) ) mat ) ( VectorC _ v ) =
if ( not ( inRange ( jLo , jHi ) j ) )
then error " Error in replaceColumn : column index not in range "
else SqMatrixC b [ replaceElem j i | i < - [ iLo .. ] ]
where replaceElem j i = [ line ! ! ( k-1 ) | k < - [ jLo .. j-1 ] ] + +
[ v ! ! ( i-1 ) ] + +
[ line ! ! ( k-1 ) | k < - [ j+1 .. jHi ] ]
where line = mat ! ! ( i-1 )
replaceColumn j (SqMatrixC b@((iLo,jLo),(iHi,jHi)) mat) (VectorC _ v) =
if (not (inRange (jLo,jHi) j))
then error "Error in replaceColumn: column index not in range"
else SqMatrixC b [ replaceElem j i | i <- [iLo..iHi] ]
where replaceElem j i = [ line !! (k-1) | k <- [jLo..j-1] ] ++
[ v !! (i-1) ] ++
[ line !! (k-1) | k <- [j+1..jHi] ]
where line = mat !! (i-1)
-}
transp :: (Integral a) => SqMatrix a -> SqMatrix a
transp (SqMatrixC b@((iLo,jLo),(iHi,jHi)) mat) = SqMatrixC b (transpose mat)
maxElem : : ( Ix a , Ix b , Ord c ) = > Array ( a , b ) c - > c
#if defined(STRATEGIES)
maxElem :: (Integral a, NFData a) => SqMatrix a -> a
#else
maxElem :: (Integral a) => SqMatrix a -> a
#endif
maxElem (SqMatrixC _ mat) = maximum ( mat_map maximum mat )
#if defined(STRATEGIES)
maxElemVec :: (Integral a, NFData a) => Vector a -> a
#else
maxElemVec :: (Integral a) => Vector a -> a
#endif
maxElemVec (VectorC _ vec) = maximum vec
@node Arithmetic Operations , I / O Operations , Misc operations , ADT Matrix
scalarMult : : ( Ix a , Ix b , ) = > c - > Array ( a , b ) c - > Array ( a , b ) c
#if defined(STRATEGIES)
scalarMult :: (Integral a, NFData a) => a -> SqMatrix a -> SqMatrix a
#else
scalarMult :: (Integral a) => a -> SqMatrix a -> SqMatrix a
#endif
scalarMult x = matMapUnary (x*)
#if defined(STRATEGIES)
vecScalarQuot :: (Integral a, NFData a) => a -> Vector a -> Vector a
#else
vecScalarQuot :: (Integral a) => a -> Vector a -> Vector a
#endif
vecScalarQuot x (VectorC b vec) =
VectorC b (mat_map (`div` x) vec)
#if defined(STRATEGIES)
crossProd :: (Integral a, NFData a) => Vector a -> Vector a -> a
#else
crossProd :: (Integral a) => Vector a -> Vector a -> a
#endif
crossProd (VectorC _ vec) (VectorC _ vec') = sum (zipWith (+) vec vec')
foldl ( + ) 0 ( listCompwiseComp ( * ) )
determinant : : ( Ix a , Ix b , ) = > Array ( a , b ) c - > c
determinant :: (
Integral a
, NFData a
) => SqMatrix a -> a
determinant (SqMatrixC ((iLo,jLo),(iHi,jHi)) mat)
| jHi-jLo+1 == 1 = let
[[mat_1_1]] = mat
in
mat_1_1
| jHi-jLo+1 == 2 = let
[[mat_1_1,mat_1_2],
[mat_2_1,mat_2_2] ] = mat
in
mat_1_1 * mat_2_2 - mat_1_2 * mat_2_1
| otherwise = sum l_par
where
l_par = map determine1 [jLo..jHi]
determine1 j =
(if pivot > 0 then
sign*pivot*det'
else
where
sign = if (even (j-jLo)) then 1 else -1
pivot = (head mat) !! (j-1)
mat_h' = (map (newLine j) (tail mat))
mat' = SqMatrixC ((iLo,jLo),(iHi-1,jHi-1))
mat_h'
det' = determinant mat'
#if 0
strategyD r =
parList (parList rnf) mat_h' `par`
rnf det' `par`
r0 r
#endif
tree_sum [] = 0
tree_sum [x] = x
tree_sum xs = (left+right)
where (l,r) = splitAt (length xs `div` 2) xs
left = tree_sum l
right = tree_sum r
newLine _ [] = []
newLine j line = (pre ++ post)
where
pre = [ line !! (k-1) | k <- [jLo..j-1] ]
post = [ line !! (k-1) | k <- [j+1..jHi] ]
matEqual :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a -> Bool
matEqual (SqMatrixC bnds@((iLo,jLo),(iHi,jHi)) mat) (SqMatrixC bnds' mat') =
if (bnds==bnds')
then foldl (&&) True
[ foldl (&&) True
(listCompwiseComp (==) (mat !! (k-1)) (mat' !! (k-1)))
| k <- [iLo..iHi] ]
else error "matEqual: Matrices have different bounds\n"
vecEqual :: (Integral a, NFData a) => Vector a -> Vector a -> Bool
vecEqual (VectorC bnds vec) (VectorC bnds' vec') =
if (bnds==bnds')
then foldl (&&) True (listCompwiseComp (==) vec vec')
else error "vecEqual: Matrices have different bounds\n"
matSum : : ( Ix a , Ix b , ) - > Array ( a , b ) c - > Array ( a , b ) c - > Array ( a , b ) c
matSum :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a -> SqMatrix a
matSum = matCompwiseComp (+)
matDif :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a -> SqMatrix a
matDif = matCompwiseComp (-)
matMult (SqMatrixC bnds mat) (SqMatrixC bnds' mat') =
SqMatrixC resultBounds
#if defined(__PARALLEL_HASKELL__) || defined(__GRANSIM__)
(parMap rwhnf
(\i ->
parMap rnf
(\j ->
#else
(map (\i -> map (\j ->
#endif
let
line = (VectorC ((jLo),(jHi)) (getLine i mat))
column = (VectorC ((iLo'),(iHi')) (getColumn j mat'))
in
crossProd line column
)
[iLo..iHi]
)
[jLo..jHi]
)
where getLine i mat = mat !! (i-1)
getColumn j mat = [ line !! (j-1) | line <- mat ]
size = iHi - iLo + 1
((iLo,jLo),(iHi,jHi)) = bnds
((iLo',jLo'),(iHi',jHi')) = bnds'
resultBounds
| (jLo,jHi)==(iLo',iHi') = ((iLo,jLo'),(iHi,jHi'))
| otherwise = error "matMult: incompatible bounds"
matAbs :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a
matAbs = matMapUnary abs
matSignum :: (Integral a, NFData a) => SqMatrix a -> SqMatrix a
matSignum = matMapUnary signum
matGcd :: (Integral a, NFData a) => SqMatrix a -> a
matGcd m = matFold gcd (maxElem m) m
vecGcd :: (Integral a, NFData a) => Vector a -> a
vecGcd m = vecFold gcd (maxElemVec m) m
matHom : : ( Integral a ) = > Integer - > a - > a
matHom : : ( Integral a ) = > Integer - > a - > a
matHom p = matMapUnary (modHom p)
vecHom p (VectorC _ v) = vector (mat_map (modHom p) v)
matBounds : : ( Integral a ) = > a - > MatBounds
matBounds ( SqMatrixC mat ) = bounds mat
matBounds :: (Integral a) => SqMatrix a -> MatBounds
matBounds (SqMatrixC mat) = bounds mat
-}
matFromInteger :: Integer -> SqMatrix Integer
matFromInteger n = SqMatrixC ((1,1),(1,1)) [[n]]
@node I / O Operations , Instances , Arithmetic Operations , ADT Matrix
showsMatrix :: (Integral a) => SqMatrix a -> ShowS
showsMatrix (SqMatrixC _ mat) = ( (++) ("Matrix: \n" ++
(foldl (++) "" [ show line ++ "\n"
| line <- mat ] ) ) )
showsVector :: (Integral a) => Vector a -> ShowS
showsVector (VectorC _ vec) =
( (++) ("Vector: " ++ show vec) )
@node Instances , , I / O Operations , ADT Matrix
Instance definitions for the ADT of Square Matrices and Vectors
instance ( Eq a ) = > Eq [ a ] where
l = = l ' = foldl ( & & ) True ( listCompwiseComp (= =) l l ' )
instance (Eq a) => Eq [a] where
l == l' = foldl (&&) True (listCompwiseComp (==) l l')
-}
instance (Integral a, NFData a) => Eq (SqMatrix a) where
(==) = matEqual
instance (Integral a) => Read (SqMatrix a) where
readsPrec p = error "readsPrec of Matrix: Not yet implemented!\n"
instance (Integral a) => Show (SqMatrix a) where
showsPrec p = showsMatrix
instance (Integral a, NFData a) => Num (SqMatrix a) where
(+) = matSum
(-) = matDif
(*) = matMult
negate = scalarMult (-1)
abs = matAbs
signum = matSignum
fromInteger = error "fromInteger of Matrix: Not yet implemented\n"
instance (Integral a, NFData a) => Eq (Vector a) where
(==) = vecEqual
instance (Integral a, NFData a) => Read (Vector a) where
readsPrec p = error "readsPrec of Vector: Not yet implemented!\n"
instance (Integral a, NFData a) => Show (Vector a) where
showsPrec p = showsVector
|
21632724175aa5378c20fc15daf3a7df486cf49f0265d02d4b26f6f912571db0 | techascent/tech.datatype | dimensions_test.clj | (ns tech.v2.tensor.dimensions-test
(:require [tech.v2.tensor.dimensions :as ct-dims]
[clojure.test :refer :all]))
(defn ->raw
[dimensions]
(->> (select-keys dimensions [:shape :strides])
(map (fn [[k v]]
[k (vec v)]))
(into {})))
(deftest in-place-reshape-test
(is (= {:shape [6 2]
:strides [2 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [3 2 2] [4 2 1]) [6 2]))))
(is (= {:shape [3 4]
:strides [4 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [3 2 2] [4 2 1])
[3 4]))))
#_(is (= {:shape [3 4]
:strides [5 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [3 2 2] [5 2 1])
[3 4]))))
#_(is (= {:shape [20 8] :strides [10 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [4 5 8] [50 10 1])
[20 8]))))
#_(is (= {:shape [20 8 1 1] :strides [10 1 1 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [4 5 8] [50 10 1])
[20 8 1 1]))))
#_(is (= {:shape [1 1 20 8] :strides [200 200 10 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [4 5 8] [50 10 1])
[1 1 20 8]))))
(is (= {:shape [169 5] :strides [5 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [845] [1])
[169 5]))))
;;This test is just f-ed up. But the thing is that if the dimensions are dense then
;;in-place reshape that preserves ecount is possible; it is just an arbitrary
;;reinterpretation of the data.
(is (= {:shape [10 4 9] :strides [36 9 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [10 1 18 2] [36 36 2 1])
[10 4 9]))))
#_(is (= {:shape [845 1] :strides [25 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [13 13 5 1] [1625 125 25 1])
[845 1]))))
(is (= {:shape [1 1] :strides [1 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [1] [1])
[1 1])))))
| null | https://raw.githubusercontent.com/techascent/tech.datatype/8cc83d771d9621d580fd5d4d0625005bd7ab0e0c/test/tech/v2/tensor/dimensions_test.clj | clojure | This test is just f-ed up. But the thing is that if the dimensions are dense then
in-place reshape that preserves ecount is possible; it is just an arbitrary
reinterpretation of the data. | (ns tech.v2.tensor.dimensions-test
(:require [tech.v2.tensor.dimensions :as ct-dims]
[clojure.test :refer :all]))
(defn ->raw
[dimensions]
(->> (select-keys dimensions [:shape :strides])
(map (fn [[k v]]
[k (vec v)]))
(into {})))
(deftest in-place-reshape-test
(is (= {:shape [6 2]
:strides [2 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [3 2 2] [4 2 1]) [6 2]))))
(is (= {:shape [3 4]
:strides [4 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [3 2 2] [4 2 1])
[3 4]))))
#_(is (= {:shape [3 4]
:strides [5 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [3 2 2] [5 2 1])
[3 4]))))
#_(is (= {:shape [20 8] :strides [10 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [4 5 8] [50 10 1])
[20 8]))))
#_(is (= {:shape [20 8 1 1] :strides [10 1 1 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [4 5 8] [50 10 1])
[20 8 1 1]))))
#_(is (= {:shape [1 1 20 8] :strides [200 200 10 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [4 5 8] [50 10 1])
[1 1 20 8]))))
(is (= {:shape [169 5] :strides [5 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [845] [1])
[169 5]))))
(is (= {:shape [10 4 9] :strides [36 9 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [10 1 18 2] [36 36 2 1])
[10 4 9]))))
#_(is (= {:shape [845 1] :strides [25 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [13 13 5 1] [1625 125 25 1])
[845 1]))))
(is (= {:shape [1 1] :strides [1 1]}
(->raw
(ct-dims/in-place-reshape
(ct-dims/dimensions [1] [1])
[1 1])))))
|
6610c33c9c1e850a141a5c83a6491c0acf16a8ab35dff9a154caae8eec018342 | seereason/atp-haskell | PropExamples.hs | -- | Some propositional formulas to test, and functions to generate classes.
--
Copyright ( c ) 2003 - 2007 , . ( See " LICENSE.txt " for details . )
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeSynonymInstances #-}
module Data.Logic.ATP.PropExamples
( Knows(K)
, mk_knows, mk_knows2
, prime
, ramsey
, testPropExamples
) where
import Data.Bits (Bits, shiftR)
import Data.List as List (map)
import Data.Logic.ATP.Formulas
import Data.Logic.ATP.Lib (allsets, timeMessage)
import Data.Logic.ATP.Lit ((.~.))
import Data.Logic.ATP.Pretty (HasFixity(precedence), Pretty(pPrint), prettyShow, text)
import Data.Logic.ATP.Prop
import Data.Set as Set
import Prelude hiding (sum)
import Test.HUnit
| Generate assertion equivalent to R(s , t ) < = n for the number R(s , t )
ramsey :: (IsPropositional pf, AtomOf pf ~ Knows Integer, Ord pf) =>
Integer -> Integer -> Integer -> pf
ramsey s t n =
let vertices = Set.fromList [1 .. n] in
let yesgrps = Set.map (allsets (2 :: Integer)) (allsets s vertices)
nogrps = Set.map (allsets (2 :: Integer)) (allsets t vertices) in
let e xs = let [a, b] = Set.toAscList xs in atomic (K "p" a (Just b)) in
list_disj (Set.map (list_conj . Set.map e) yesgrps) .|. list_disj (Set.map (list_conj . Set.map (\ p -> (.~.)(e p))) nogrps)
data Knows a = K String a (Maybe a) deriving (Eq, Ord, Show)
instance (Num a, Show a) => Pretty (Knows a) where
pPrint (K s n mm) = text (s ++ show n ++ maybe "" (\ m -> "." ++ show m) mm)
instance Num a => HasFixity (Knows a) where
precedence _ = 9
instance IsAtom (Knows Integer)
Some currently tractable examples . ( p. 36 )
test01 :: Test
test01 = TestList [TestCase (assertEqual "ramsey 3 3 4"
"(p1.2∧p1.3∧p2.3)∨(p1.2∧p1.4∧p2.4)∨(p1.3∧p1.4∧p3.4)∨(p2.3∧p2.4∧p3.4)∨(¬p1.2∧¬p1.3∧¬p2.3)∨(¬p1.2∧¬p1.4∧¬p2.4)∨(¬p1.3∧¬p1.4∧¬p3.4)∨(¬p2.3∧¬p2.4∧¬p3.4)"
-- "p1.2∧p1.3∧p2.3∨p1.2∧p1.4∧p2.4∨p1.3∧p1.4∧p3.4∨p2.3∧p2.4∧p3.4∨¬p1.2∧¬p1.3∧¬p2.3∨¬p1.2∧¬p1.4∧¬p2.4∨¬p1.3∧¬p1.4∧¬p3.4∨¬p2.3∧¬p2.4∧¬p3.4"
(prettyShow (ramsey 3 3 4 :: PFormula (Knows Integer)))),
TestCase (timeMessage (\_ t -> "\nTime to compute (ramsey 3 3 5): " ++ show t) $ assertEqual "tautology (ramsey 3 3 5)" False (tautology (ramsey 3 3 5 :: PFormula (Knows Integer)))),
TestCase (timeMessage (\_ t -> "\nTime to compute (ramsey 3 3 6): " ++ show t) $ assertEqual "tautology (ramsey 3 3 6)" True (tautology (ramsey 3 3 6 :: PFormula (Knows Integer))))]
| Half adder . ( p. 66 )
halfsum :: forall formula. IsPropositional formula => formula -> formula -> formula
halfsum x y = x .<=>. ((.~.) y)
halfcarry :: forall formula. IsPropositional formula => formula -> formula -> formula
halfcarry x y = x .&. y
ha :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula -> formula
ha x y s c = (s .<=>. halfsum x y) .&. (c .<=>. halfcarry x y)
-- | Full adder.
carry :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula
carry x y z = (x .&. y) .|. ((x .|. y) .&. z)
sum :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula
sum x y z = halfsum (halfsum x y) z
fa :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula -> formula -> formula
fa x y z s c = (s .<=>. sum x y z) .&. (c .<=>. carry x y z)
-- | Useful idiom.
conjoin :: (IsPropositional formula, Ord formula, Ord a) => (a -> formula) -> Set a -> formula
conjoin f l = list_conj (Set.map f l)
| n - bit ripple carry adder with carry c(0 ) propagated in and c(n ) out . ( p. 67 )
ripplecarry :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> a -> formula
ripplecarry x y c out n =
conjoin (\ i -> fa (x i) (y i) (c i) (out i) (c(i + 1))) (Set.fromList [0 .. (n - 1)])
-- Example.
mk_knows :: (IsPropositional formula, AtomOf formula ~ Knows a) => String -> a -> formula
mk_knows x i = atomic (K x i Nothing)
mk_knows2 :: (IsPropositional formula, AtomOf formula ~ Knows a) => String -> a -> a -> formula
mk_knows2 x i j = atomic (K x i (Just j))
test02 :: Test
test02 =
let [x, y, out, c] = List.map mk_knows ["X", "Y", "OUT", "C"] :: [Integer -> PFormula (Knows Integer)] in
TestCase (assertEqual "ripplecarry x y c out 2"
(((out 0 .<=>. ((x 0 .<=>. ((.~.) (y 0))) .<=>. ((.~.) (c 0)))) .&.
(c 1 .<=>. ((x 0 .&. y 0) .|. ((x 0 .|. y 0) .&. c 0)))) .&.
((out 1 .<=>. ((x 1 .<=>. ((.~.) (y 1))) .<=>. ((.~.) (c 1)))) .&.
(c 2 .<=>. ((x 1 .&. y 1) .|. ((x 1 .|. y 1) .&. c 1)))))
(ripplecarry x y c out 2 :: PFormula (Knows Integer)))
| Special case with 0 instead of c(0 ) .
ripplecarry0 :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> a -> formula
ripplecarry0 x y c out n =
psimplify
(ripplecarry x y (\ i -> if i == 0 then false else c i) out n)
-- | Carry-select adder
ripplecarry1 :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> a -> formula
ripplecarry1 x y c out n =
psimplify
(ripplecarry x y (\ i -> if i == 0 then true else c i) out n)
mux :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula
mux sel in0 in1 = (((.~.) sel) .&. in0) .|. (sel .&. in1)
offset :: forall t a. Num a => a -> (a -> t) -> a -> t
offset n x i = x (n + i)
carryselect :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> a -> a -> formula
carryselect x y c0 c1 s0 s1 c s n k =
let k' = min n k in
let fm = ((ripplecarry0 x y c0 s0 k') .&. (ripplecarry1 x y c1 s1 k')) .&.
(((c k') .<=>. (mux (c 0) (c0 k') (c1 k'))) .&.
(conjoin (\ i -> (s i) .<=>. (mux (c 0) (s0 i) (s1 i)))
(Set.fromList [0 .. (k' - 1)]))) in
if k' < k then fm else
fm .&. (carryselect
(offset k x) (offset k y) (offset k c0) (offset k c1)
(offset k s0) (offset k s1) (offset k c) (offset k s)
(n - k) k)
| Equivalence problems for carry - select vs ripple carry adders . ( p. 69 )
mk_adder_test :: (IsPropositional formula, Ord formula, AtomOf formula ~ Knows a, Ord a, Num a, Enum a) =>
a -> a -> formula
mk_adder_test n k =
let [x, y, c, s, c0, s0, c1, s1, c2, s2] =
List.map mk_knows ["x", "y", "c", "s", "c0", "s0", "c1", "s1", "c2", "s2"] in
(((carryselect x y c0 c1 s0 s1 c s n k) .&.
((.~.) (c 0))) .&.
(ripplecarry0 x y c2 s2 n)) .=>.
(((c n) .<=>. (c2 n)) .&.
(conjoin (\ i -> (s i) .<=>. (s2 i)) (Set.fromList [0 .. (n - 1)])))
| Ripple carry stage that separates off the final result . ( p. 70 )
--
UUUUUUUUUUUUUUUUUUUU ( u )
+ VVVVVVVVVVVVVVVVVVVV ( v )
--
= WWWWWWWWWWWWWWWWWWWW ( w )
+ Z ( z )
rippleshift :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> formula
-> (a -> formula)
-> a -> formula
rippleshift u v c z w n =
ripplecarry0 u v (\ i -> if i == n then w(n - 1) else c(i + 1))
(\ i -> if i == 0 then z else w(i - 1)) n
-- | Naive multiplier based on repeated ripple carry.
multiplier :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> a -> formula)
-> (a -> a -> formula)
-> (a -> a -> formula)
-> (a -> formula)
-> a
-> formula
multiplier x u v out n =
if n == 1 then ((out 0) .<=>. (x 0 0)) .&. ((.~.)(out 1)) else
psimplify (((out 0) .<=>. (x 0 0)) .&.
((rippleshift
(\ i -> if i == n - 1 then false else x 0 (i + 1))
(x 1) (v 2) (out 1) (u 2) n) .&.
(if n == 2 then ((out 2) .<=>. (u 2 0)) .&. ((out 3) .<=>. (u 2 1)) else
conjoin (\ k -> rippleshift (u k) (x k) (v(k + 1)) (out k)
(if k == n - 1 then \ i -> out(n + i)
else u(k + 1)) n) (Set.fromList [2 .. (n - 1)]))))
| Primality examples . ( p. 71 )
--
For large examples , should use ' Integer ' instead of ' Int ' in these functions .
bitlength :: forall b a. (Num a, Num b, Bits b) => b -> a
bitlength x = if x == 0 then 0 else 1 + bitlength (shiftR x 1);;
bit :: forall a b. (Num a, Eq a, Bits b, Integral b) => a -> b -> Bool
bit n x = if n == 0 then x `mod` 2 == 1 else bit (n - 1) (shiftR x 1)
congruent_to :: (IsPropositional formula, Ord formula, Bits b, Ord a, Num a, Integral b, Enum a) =>
(a -> formula) -> b -> a -> formula
congruent_to x m n =
conjoin (\ i -> if bit i m then x i else (.~.)(x i))
(Set.fromList [0 .. (n - 1)])
prime :: (IsPropositional formula, Ord formula, AtomOf formula ~ Knows Integer) => Integer -> formula
prime p =
let [x, y, out] = List.map mk_knows ["x", "y", "out"] in
let m i j = (x i) .&. (y j)
[u, v] = List.map mk_knows2 ["u", "v"] in
let (n :: Integer) = bitlength p in
(.~.) (multiplier m u v out (n - 1) .&. congruent_to out p (max n (2 * n - 2)))
Examples . ( p. 72 )
type F = PFormula (Knows Integer)
test03 :: Test
test03 =
TestList [TestCase (timeMessage (\_ t -> "\nTime to prove (prime 7): " ++ show t) (assertEqual "tautology(prime 7)" True (tautology (prime 7 :: F)))),
TestCase (timeMessage (\_ t -> "\nTime to prove (prime 9): " ++ show t) (assertEqual "tautology(prime 9)" False (tautology (prime 9 :: F)))),
TestCase (timeMessage (\_ t -> "\nTime to prove (prime 11): " ++ show t) (assertEqual "tautology(prime 11)" True (tautology (prime 11 :: F))))]
testPropExamples :: Test
testPropExamples = TestLabel "PropExamples" (TestList [test01, test02, test03])
| null | https://raw.githubusercontent.com/seereason/atp-haskell/8b3431236369b9bf5b8723225f65cfac1832a0f9/src/Data/Logic/ATP/PropExamples.hs | haskell | | Some propositional formulas to test, and functions to generate classes.
# LANGUAGE RankNTypes #
# LANGUAGE TypeSynonymInstances #
"p1.2∧p1.3∧p2.3∨p1.2∧p1.4∧p2.4∨p1.3∧p1.4∧p3.4∨p2.3∧p2.4∧p3.4∨¬p1.2∧¬p1.3∧¬p2.3∨¬p1.2∧¬p1.4∧¬p2.4∨¬p1.3∧¬p1.4∧¬p3.4∨¬p2.3∧¬p2.4∧¬p3.4"
| Full adder.
| Useful idiom.
Example.
| Carry-select adder
| Naive multiplier based on repeated ripple carry.
| Copyright ( c ) 2003 - 2007 , . ( See " LICENSE.txt " for details . )
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Data.Logic.ATP.PropExamples
( Knows(K)
, mk_knows, mk_knows2
, prime
, ramsey
, testPropExamples
) where
import Data.Bits (Bits, shiftR)
import Data.List as List (map)
import Data.Logic.ATP.Formulas
import Data.Logic.ATP.Lib (allsets, timeMessage)
import Data.Logic.ATP.Lit ((.~.))
import Data.Logic.ATP.Pretty (HasFixity(precedence), Pretty(pPrint), prettyShow, text)
import Data.Logic.ATP.Prop
import Data.Set as Set
import Prelude hiding (sum)
import Test.HUnit
| Generate assertion equivalent to R(s , t ) < = n for the number R(s , t )
ramsey :: (IsPropositional pf, AtomOf pf ~ Knows Integer, Ord pf) =>
Integer -> Integer -> Integer -> pf
ramsey s t n =
let vertices = Set.fromList [1 .. n] in
let yesgrps = Set.map (allsets (2 :: Integer)) (allsets s vertices)
nogrps = Set.map (allsets (2 :: Integer)) (allsets t vertices) in
let e xs = let [a, b] = Set.toAscList xs in atomic (K "p" a (Just b)) in
list_disj (Set.map (list_conj . Set.map e) yesgrps) .|. list_disj (Set.map (list_conj . Set.map (\ p -> (.~.)(e p))) nogrps)
data Knows a = K String a (Maybe a) deriving (Eq, Ord, Show)
instance (Num a, Show a) => Pretty (Knows a) where
pPrint (K s n mm) = text (s ++ show n ++ maybe "" (\ m -> "." ++ show m) mm)
instance Num a => HasFixity (Knows a) where
precedence _ = 9
instance IsAtom (Knows Integer)
Some currently tractable examples . ( p. 36 )
test01 :: Test
test01 = TestList [TestCase (assertEqual "ramsey 3 3 4"
"(p1.2∧p1.3∧p2.3)∨(p1.2∧p1.4∧p2.4)∨(p1.3∧p1.4∧p3.4)∨(p2.3∧p2.4∧p3.4)∨(¬p1.2∧¬p1.3∧¬p2.3)∨(¬p1.2∧¬p1.4∧¬p2.4)∨(¬p1.3∧¬p1.4∧¬p3.4)∨(¬p2.3∧¬p2.4∧¬p3.4)"
(prettyShow (ramsey 3 3 4 :: PFormula (Knows Integer)))),
TestCase (timeMessage (\_ t -> "\nTime to compute (ramsey 3 3 5): " ++ show t) $ assertEqual "tautology (ramsey 3 3 5)" False (tautology (ramsey 3 3 5 :: PFormula (Knows Integer)))),
TestCase (timeMessage (\_ t -> "\nTime to compute (ramsey 3 3 6): " ++ show t) $ assertEqual "tautology (ramsey 3 3 6)" True (tautology (ramsey 3 3 6 :: PFormula (Knows Integer))))]
| Half adder . ( p. 66 )
halfsum :: forall formula. IsPropositional formula => formula -> formula -> formula
halfsum x y = x .<=>. ((.~.) y)
halfcarry :: forall formula. IsPropositional formula => formula -> formula -> formula
halfcarry x y = x .&. y
ha :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula -> formula
ha x y s c = (s .<=>. halfsum x y) .&. (c .<=>. halfcarry x y)
carry :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula
carry x y z = (x .&. y) .|. ((x .|. y) .&. z)
sum :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula
sum x y z = halfsum (halfsum x y) z
fa :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula -> formula -> formula
fa x y z s c = (s .<=>. sum x y z) .&. (c .<=>. carry x y z)
conjoin :: (IsPropositional formula, Ord formula, Ord a) => (a -> formula) -> Set a -> formula
conjoin f l = list_conj (Set.map f l)
| n - bit ripple carry adder with carry c(0 ) propagated in and c(n ) out . ( p. 67 )
ripplecarry :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> a -> formula
ripplecarry x y c out n =
conjoin (\ i -> fa (x i) (y i) (c i) (out i) (c(i + 1))) (Set.fromList [0 .. (n - 1)])
mk_knows :: (IsPropositional formula, AtomOf formula ~ Knows a) => String -> a -> formula
mk_knows x i = atomic (K x i Nothing)
mk_knows2 :: (IsPropositional formula, AtomOf formula ~ Knows a) => String -> a -> a -> formula
mk_knows2 x i j = atomic (K x i (Just j))
test02 :: Test
test02 =
let [x, y, out, c] = List.map mk_knows ["X", "Y", "OUT", "C"] :: [Integer -> PFormula (Knows Integer)] in
TestCase (assertEqual "ripplecarry x y c out 2"
(((out 0 .<=>. ((x 0 .<=>. ((.~.) (y 0))) .<=>. ((.~.) (c 0)))) .&.
(c 1 .<=>. ((x 0 .&. y 0) .|. ((x 0 .|. y 0) .&. c 0)))) .&.
((out 1 .<=>. ((x 1 .<=>. ((.~.) (y 1))) .<=>. ((.~.) (c 1)))) .&.
(c 2 .<=>. ((x 1 .&. y 1) .|. ((x 1 .|. y 1) .&. c 1)))))
(ripplecarry x y c out 2 :: PFormula (Knows Integer)))
| Special case with 0 instead of c(0 ) .
ripplecarry0 :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> a -> formula
ripplecarry0 x y c out n =
psimplify
(ripplecarry x y (\ i -> if i == 0 then false else c i) out n)
ripplecarry1 :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> a -> formula
ripplecarry1 x y c out n =
psimplify
(ripplecarry x y (\ i -> if i == 0 then true else c i) out n)
mux :: forall formula. IsPropositional formula => formula -> formula -> formula -> formula
mux sel in0 in1 = (((.~.) sel) .&. in0) .|. (sel .&. in1)
offset :: forall t a. Num a => a -> (a -> t) -> a -> t
offset n x i = x (n + i)
carryselect :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> (a -> formula)
-> a -> a -> formula
carryselect x y c0 c1 s0 s1 c s n k =
let k' = min n k in
let fm = ((ripplecarry0 x y c0 s0 k') .&. (ripplecarry1 x y c1 s1 k')) .&.
(((c k') .<=>. (mux (c 0) (c0 k') (c1 k'))) .&.
(conjoin (\ i -> (s i) .<=>. (mux (c 0) (s0 i) (s1 i)))
(Set.fromList [0 .. (k' - 1)]))) in
if k' < k then fm else
fm .&. (carryselect
(offset k x) (offset k y) (offset k c0) (offset k c1)
(offset k s0) (offset k s1) (offset k c) (offset k s)
(n - k) k)
| Equivalence problems for carry - select vs ripple carry adders . ( p. 69 )
mk_adder_test :: (IsPropositional formula, Ord formula, AtomOf formula ~ Knows a, Ord a, Num a, Enum a) =>
a -> a -> formula
mk_adder_test n k =
let [x, y, c, s, c0, s0, c1, s1, c2, s2] =
List.map mk_knows ["x", "y", "c", "s", "c0", "s0", "c1", "s1", "c2", "s2"] in
(((carryselect x y c0 c1 s0 s1 c s n k) .&.
((.~.) (c 0))) .&.
(ripplecarry0 x y c2 s2 n)) .=>.
(((c n) .<=>. (c2 n)) .&.
(conjoin (\ i -> (s i) .<=>. (s2 i)) (Set.fromList [0 .. (n - 1)])))
| Ripple carry stage that separates off the final result . ( p. 70 )
UUUUUUUUUUUUUUUUUUUU ( u )
+ VVVVVVVVVVVVVVVVVVVV ( v )
= WWWWWWWWWWWWWWWWWWWW ( w )
+ Z ( z )
rippleshift :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> formula)
-> (a -> formula)
-> (a -> formula)
-> formula
-> (a -> formula)
-> a -> formula
rippleshift u v c z w n =
ripplecarry0 u v (\ i -> if i == n then w(n - 1) else c(i + 1))
(\ i -> if i == 0 then z else w(i - 1)) n
multiplier :: (IsPropositional formula, Ord formula, Ord a, Num a, Enum a) =>
(a -> a -> formula)
-> (a -> a -> formula)
-> (a -> a -> formula)
-> (a -> formula)
-> a
-> formula
multiplier x u v out n =
if n == 1 then ((out 0) .<=>. (x 0 0)) .&. ((.~.)(out 1)) else
psimplify (((out 0) .<=>. (x 0 0)) .&.
((rippleshift
(\ i -> if i == n - 1 then false else x 0 (i + 1))
(x 1) (v 2) (out 1) (u 2) n) .&.
(if n == 2 then ((out 2) .<=>. (u 2 0)) .&. ((out 3) .<=>. (u 2 1)) else
conjoin (\ k -> rippleshift (u k) (x k) (v(k + 1)) (out k)
(if k == n - 1 then \ i -> out(n + i)
else u(k + 1)) n) (Set.fromList [2 .. (n - 1)]))))
| Primality examples . ( p. 71 )
For large examples , should use ' Integer ' instead of ' Int ' in these functions .
bitlength :: forall b a. (Num a, Num b, Bits b) => b -> a
bitlength x = if x == 0 then 0 else 1 + bitlength (shiftR x 1);;
bit :: forall a b. (Num a, Eq a, Bits b, Integral b) => a -> b -> Bool
bit n x = if n == 0 then x `mod` 2 == 1 else bit (n - 1) (shiftR x 1)
congruent_to :: (IsPropositional formula, Ord formula, Bits b, Ord a, Num a, Integral b, Enum a) =>
(a -> formula) -> b -> a -> formula
congruent_to x m n =
conjoin (\ i -> if bit i m then x i else (.~.)(x i))
(Set.fromList [0 .. (n - 1)])
prime :: (IsPropositional formula, Ord formula, AtomOf formula ~ Knows Integer) => Integer -> formula
prime p =
let [x, y, out] = List.map mk_knows ["x", "y", "out"] in
let m i j = (x i) .&. (y j)
[u, v] = List.map mk_knows2 ["u", "v"] in
let (n :: Integer) = bitlength p in
(.~.) (multiplier m u v out (n - 1) .&. congruent_to out p (max n (2 * n - 2)))
Examples . ( p. 72 )
type F = PFormula (Knows Integer)
test03 :: Test
test03 =
TestList [TestCase (timeMessage (\_ t -> "\nTime to prove (prime 7): " ++ show t) (assertEqual "tautology(prime 7)" True (tautology (prime 7 :: F)))),
TestCase (timeMessage (\_ t -> "\nTime to prove (prime 9): " ++ show t) (assertEqual "tautology(prime 9)" False (tautology (prime 9 :: F)))),
TestCase (timeMessage (\_ t -> "\nTime to prove (prime 11): " ++ show t) (assertEqual "tautology(prime 11)" True (tautology (prime 11 :: F))))]
testPropExamples :: Test
testPropExamples = TestLabel "PropExamples" (TestList [test01, test02, test03])
|
fc36feeeb1b5c1edc415b440e9a5ddab7d8d749d29a13f3d71ddb1cabd6f1645 | lokedhs/containers | blocking-queue.lisp | (in-package :receptacle)
(defclass generic-sequence ()
()
(:documentation "Generic ordered sequence"))
(define-condition sequence-empty (error)
()
(:documentation "Error that is raised if an attempt is made to
access pop an element from an empty sequence."))
#+nil
(eval-when (:compile-toplevel :load-toplevel :execute)
(push :log-queue *features*)
(push :debug-queue *features*))
(defclass queue (generic-sequence)
((content :type (vector t *)
:initform (make-array 32 :adjustable t)
:accessor queue/content)
(head :type (integer 0)
:initform 0
:accessor queue/head)
(tail :type (integer 0)
:initform 0
:accessor queue/tail))
(:documentation "Queue that supports insertion and removal from both the tail and head"))
(defmethod print-object ((obj queue) stream)
(print-unreadable-object (obj stream :type t :identity nil)
(with-slots (content head tail) obj
(format stream "SIZE ~a" (mod (- tail head) (array-dimension content 0))))))
(defmethod content-length ((queue queue))
(with-slots (head tail content) queue
(mod (- tail head) (array-dimension content 0))))
(defmethod empty-p ((queue queue))
(with-slots (head tail) queue
(= head tail)))
(defmethod delete-all ((queue queue))
(loop
with content = (queue/content queue)
with length = (array-dimension content 0)
for i from 0 below length
do (setf (aref content i) nil))
(setf (queue/head queue) 0)
(setf (queue/tail queue) 0))
(defun make-queue ()
"Create a new instance of a queue."
(make-instance 'queue))
(defmethod queue-push ((queue queue) element)
(with-slots (content head tail) queue
(let ((size (array-dimension content 0)))
(when (= (mod (- tail head) size) (1- size))
(let* ((new-size (* size 2))
(new-head (- new-size (- size head))))
(adjust-array content new-size)
(when (< tail head)
(loop
for src from head below size
for dest from new-head
do (setf (aref content dest) (aref content src)))
(setq head new-head))))
(setf (aref content tail) element)
(setq tail (mod (1+ tail) (array-dimension content 0)))
element)))
(defmethod queue-pop ((queue queue) &key (if-empty nil if-empty-set-p))
(with-slots (content head tail) queue
(if (= head tail)
(if if-empty-set-p
if-empty
(error 'sequence-empty))
(let ((result (aref content head)))
(setf (aref content head) nil)
(setq head (mod (1+ head) (array-dimension content 0)))
result))))
(defgeneric queue-pop-wait (queue &key timeout)
(:documentation "Attempts to pop one element off QUEUE. If the queue
is empty, wait until an element is added."))
(defclass blocking-queue (queue lockable-instance)
()
(:documentation "A thread-safe version of QUEUE that allows waiting
for elements to be added to it."))
(defun make-blocking-queue (&key name)
"Create a new instance of a blocking queue."
(make-instance 'blocking-queue :lockable-instance-name name))
(defmethod empty-p ((queue blocking-queue))
(with-locked-instance queue
(call-next-method)))
(defmethod delete-all ((queue blocking-queue))
(with-locked-instance queue
(call-next-method)))
(defmethod queue-push ((queue blocking-queue) element)
(declare (ignore element))
(with-locked-instance queue
(let ((result (call-next-method)))
(bordeaux-threads:condition-notify (lockable-instance/cond-variable queue))
result)))
(defmethod queue-pop ((queue blocking-queue) &rest rest)
(declare (ignore rest))
(with-locked-instance queue
(call-next-method)))
(defun current-time ()
(let ((now (local-time:now)))
(+ (local-time:timestamp-to-unix now)
(/ (local-time:nsec-of now) 1000000000))))
(defun %queue-pop-wait (queue timeout)
(with-locked-instance queue
(if (empty-p queue)
(when (let ((condition (lockable-instance/cond-variable queue))
(lock (lockable-instance/lock queue)))
#+sbcl
(sb-thread:condition-wait condition lock :timeout timeout)
#+abcl
(progn
(threads:synchronized-on condition
(bordeaux-threads:release-lock lock)
(apply #'threads:object-wait condition (if timeout (list timeout))))
(bordeaux-threads:acquire-lock lock)
t)
#+ccl
(progn
(ccl:release-lock lock)
(unwind-protect
(if timeout
(ccl:timed-wait-on-semaphore condition timeout)
(ccl:wait-on-semaphore condition))
(ccl:grab-lock lock)))
#+clisp
(mt:exemption-wait condition lock :timeout timeout)
#-(or sbcl abcl ccl clisp)
(progn
(bordeaux-threads:condition-wait condition lock)
t))
(unless (empty-p queue)
(queue-pop queue)))
;; ELSE: We have an element on the queue
(queue-pop queue))))
(defmethod queue-pop-wait ((queue blocking-queue) &key timeout)
#-(or sbcl abcl ccl clisp)
(when timeout
(error "Timeout is not supported on this CL implementation"))
(check-type timeout (or null number))
(if timeout
(loop
;; wait-time-epsilon indicates the minimum amount of time to wait
with wait-time-epsilon = 1/1000
with start = (current-time)
with now = start
with cutoff = (+ start (rationalize timeout))
while (< now cutoff)
do (let ((result (%queue-pop-wait queue (max (- cutoff now) wait-time-epsilon))))
(when result
(return result))
(setq now (current-time)))
finally (return nil))
;; ELSE: No timeout
(loop
do (let ((result (%queue-pop-wait queue nil)))
(when result
(return result))))))
| null | https://raw.githubusercontent.com/lokedhs/containers/5d4ce688bddd51ee34a4259e37b698b84f650bdf/src/blocking-queue.lisp | lisp | ELSE: We have an element on the queue
wait-time-epsilon indicates the minimum amount of time to wait
ELSE: No timeout | (in-package :receptacle)
(defclass generic-sequence ()
()
(:documentation "Generic ordered sequence"))
(define-condition sequence-empty (error)
()
(:documentation "Error that is raised if an attempt is made to
access pop an element from an empty sequence."))
#+nil
(eval-when (:compile-toplevel :load-toplevel :execute)
(push :log-queue *features*)
(push :debug-queue *features*))
(defclass queue (generic-sequence)
((content :type (vector t *)
:initform (make-array 32 :adjustable t)
:accessor queue/content)
(head :type (integer 0)
:initform 0
:accessor queue/head)
(tail :type (integer 0)
:initform 0
:accessor queue/tail))
(:documentation "Queue that supports insertion and removal from both the tail and head"))
(defmethod print-object ((obj queue) stream)
(print-unreadable-object (obj stream :type t :identity nil)
(with-slots (content head tail) obj
(format stream "SIZE ~a" (mod (- tail head) (array-dimension content 0))))))
(defmethod content-length ((queue queue))
(with-slots (head tail content) queue
(mod (- tail head) (array-dimension content 0))))
(defmethod empty-p ((queue queue))
(with-slots (head tail) queue
(= head tail)))
(defmethod delete-all ((queue queue))
(loop
with content = (queue/content queue)
with length = (array-dimension content 0)
for i from 0 below length
do (setf (aref content i) nil))
(setf (queue/head queue) 0)
(setf (queue/tail queue) 0))
(defun make-queue ()
"Create a new instance of a queue."
(make-instance 'queue))
(defmethod queue-push ((queue queue) element)
(with-slots (content head tail) queue
(let ((size (array-dimension content 0)))
(when (= (mod (- tail head) size) (1- size))
(let* ((new-size (* size 2))
(new-head (- new-size (- size head))))
(adjust-array content new-size)
(when (< tail head)
(loop
for src from head below size
for dest from new-head
do (setf (aref content dest) (aref content src)))
(setq head new-head))))
(setf (aref content tail) element)
(setq tail (mod (1+ tail) (array-dimension content 0)))
element)))
(defmethod queue-pop ((queue queue) &key (if-empty nil if-empty-set-p))
(with-slots (content head tail) queue
(if (= head tail)
(if if-empty-set-p
if-empty
(error 'sequence-empty))
(let ((result (aref content head)))
(setf (aref content head) nil)
(setq head (mod (1+ head) (array-dimension content 0)))
result))))
(defgeneric queue-pop-wait (queue &key timeout)
(:documentation "Attempts to pop one element off QUEUE. If the queue
is empty, wait until an element is added."))
(defclass blocking-queue (queue lockable-instance)
()
(:documentation "A thread-safe version of QUEUE that allows waiting
for elements to be added to it."))
(defun make-blocking-queue (&key name)
"Create a new instance of a blocking queue."
(make-instance 'blocking-queue :lockable-instance-name name))
(defmethod empty-p ((queue blocking-queue))
(with-locked-instance queue
(call-next-method)))
(defmethod delete-all ((queue blocking-queue))
(with-locked-instance queue
(call-next-method)))
(defmethod queue-push ((queue blocking-queue) element)
(declare (ignore element))
(with-locked-instance queue
(let ((result (call-next-method)))
(bordeaux-threads:condition-notify (lockable-instance/cond-variable queue))
result)))
(defmethod queue-pop ((queue blocking-queue) &rest rest)
(declare (ignore rest))
(with-locked-instance queue
(call-next-method)))
(defun current-time ()
(let ((now (local-time:now)))
(+ (local-time:timestamp-to-unix now)
(/ (local-time:nsec-of now) 1000000000))))
(defun %queue-pop-wait (queue timeout)
(with-locked-instance queue
(if (empty-p queue)
(when (let ((condition (lockable-instance/cond-variable queue))
(lock (lockable-instance/lock queue)))
#+sbcl
(sb-thread:condition-wait condition lock :timeout timeout)
#+abcl
(progn
(threads:synchronized-on condition
(bordeaux-threads:release-lock lock)
(apply #'threads:object-wait condition (if timeout (list timeout))))
(bordeaux-threads:acquire-lock lock)
t)
#+ccl
(progn
(ccl:release-lock lock)
(unwind-protect
(if timeout
(ccl:timed-wait-on-semaphore condition timeout)
(ccl:wait-on-semaphore condition))
(ccl:grab-lock lock)))
#+clisp
(mt:exemption-wait condition lock :timeout timeout)
#-(or sbcl abcl ccl clisp)
(progn
(bordeaux-threads:condition-wait condition lock)
t))
(unless (empty-p queue)
(queue-pop queue)))
(queue-pop queue))))
(defmethod queue-pop-wait ((queue blocking-queue) &key timeout)
#-(or sbcl abcl ccl clisp)
(when timeout
(error "Timeout is not supported on this CL implementation"))
(check-type timeout (or null number))
(if timeout
(loop
with wait-time-epsilon = 1/1000
with start = (current-time)
with now = start
with cutoff = (+ start (rationalize timeout))
while (< now cutoff)
do (let ((result (%queue-pop-wait queue (max (- cutoff now) wait-time-epsilon))))
(when result
(return result))
(setq now (current-time)))
finally (return nil))
(loop
do (let ((result (%queue-pop-wait queue nil)))
(when result
(return result))))))
|
b53e3f9164e53b7cddfe34d650fafd0e878d490499b1db675507c8289c9b802c | clojureverse/clojurians-log-app | test_data.clj | (ns test-data
(:require [datomic.api :as d]
[clojure.pprint :refer [pprint]]))
(defn pprint-str [val]
(with-out-str
(pprint val)))
(defn conn [] (-> reloaded.repl/system :datomic :conn))
(defn db [] (d/db (conn)))
(defn channel-data [db chans]
(for [chan chans]
(-> db
(d/entity [:channel/name chan])
(select-keys [:channel/slack-id
:channel/name
:channel/created
:channel/creator])
(update :channel/creator #(do [:user/slack-id (:user/slack-id %)])))))
(defn user-data [db users]
(for [user users]
(-> db
(d/entity [:user/slack-id user])
(d/touch))))
(defn message-data [db channels days]
(->> (d/q '[:find [(pull ?msg [:db/id
:message/key
:message/text
{:message/channel [:channel/slack-id]}
{:message/user [:user/slack-id]}
:message/ts
:message/day
#_:message/thread-ts
#_:message/thread-inst]) ...]
:in $ [?chans ...] [?days ...]
:where
[?chan :channel/name ?chans]
[?msg :message/day ?days]
[?msg :message/channel ?chan]]
db
channels
days)
(map #(update % :message/user first))
(map #(update % :message/channel first))))
two - channels - two - days
(let [db (db)
chan-data (channel-data db ["clojure" "clojurescript"])
msg-data (message-data db #{"clojure" "clojurescript"} ["2018-02-02" "2018-02-03"])
user-data (user-data db (concat (map (comp last :message/user) msg-data)
(map (comp last :channel/creator) chan-data)))]
(spit "resources/clojurians-log/test-data/two-channels-two-days.edn"
(pprint-str (mapv #(mapv (fn [m]
(dissoc (into {} m) :db/id))
%)
[user-data
chan-data
msg-data]))))
;; quiet-channels
(let [db (db)
channels #{"jobs" "keechma" "reitit" "yada" "dirac"}
days #{"2018-02-01" "2018-02-02" "2018-02-03" "2018-02-04"}
chan-data (channel-data db channels)
msg-data (message-data db channels days)
user-data (user-data db (concat (map (comp last :message/user) msg-data)
(map (comp last :channel/creator) chan-data)))]
(spit "resources/clojurians-log/test-data/quiet-channels.edn"
(pprint-str (mapv #(mapv (fn [m]
(dissoc (into {} m) :db/id))
%)
[user-data
chan-data
msg-data]))))
(defn thread-messages-by-ids [db channels msg-ids]
(->> (d/q '[:find [(pull ?msg [:db/id
:message/key
:message/text
{:message/channel [:channel/slack-id]}
{:message/user [:user/slack-id]}
:message/ts
:message/day
:message/thread-ts
:message/thread-inst]) ...]
:in $ [?chans ...] [?msg-id ...]
:where
[?chan :channel/name ?chans]
[?msg :message/channel ?chan]
(or [?msg :message/ts ?msg-id]
[?msg :message/thread-ts ?msg-id])]
db
channels
msg-ids)
(map #(update % :message/user first))
(map #(update % :message/channel first))))
(defn dissoc-db-id [data]
(mapv (fn [m]
(dissoc (into {} m) :db/id))
data))
;; threaded-messages
(let [db (db)
channels #{"datomic"}
thread 1 parent
thread 2 parent
"1518034517.000637" ;; non-threaded message
random message from another day
"1517924158.000577" ;; random thread message belonging to another day
]
chan-data (channel-data db channels)
msg-data (thread-messages-by-ids db channels ids)
user-data (user-data db (concat (map (comp last :message/user) msg-data)
(map (comp last :channel/creator) chan-data)))]
(->> (mapv dissoc-db-id
[user-data
chan-data
msg-data])
(pprint-str)
(spit "resources/clojurians-log/test-data/threaded-messages.edn")))
| null | https://raw.githubusercontent.com/clojureverse/clojurians-log-app/d34c9bd715e492c8f1899653548716d32c623fad/repl/test_data.clj | clojure | quiet-channels
threaded-messages
non-threaded message
random thread message belonging to another day | (ns test-data
(:require [datomic.api :as d]
[clojure.pprint :refer [pprint]]))
(defn pprint-str [val]
(with-out-str
(pprint val)))
(defn conn [] (-> reloaded.repl/system :datomic :conn))
(defn db [] (d/db (conn)))
(defn channel-data [db chans]
(for [chan chans]
(-> db
(d/entity [:channel/name chan])
(select-keys [:channel/slack-id
:channel/name
:channel/created
:channel/creator])
(update :channel/creator #(do [:user/slack-id (:user/slack-id %)])))))
(defn user-data [db users]
(for [user users]
(-> db
(d/entity [:user/slack-id user])
(d/touch))))
(defn message-data [db channels days]
(->> (d/q '[:find [(pull ?msg [:db/id
:message/key
:message/text
{:message/channel [:channel/slack-id]}
{:message/user [:user/slack-id]}
:message/ts
:message/day
#_:message/thread-ts
#_:message/thread-inst]) ...]
:in $ [?chans ...] [?days ...]
:where
[?chan :channel/name ?chans]
[?msg :message/day ?days]
[?msg :message/channel ?chan]]
db
channels
days)
(map #(update % :message/user first))
(map #(update % :message/channel first))))
two - channels - two - days
(let [db (db)
chan-data (channel-data db ["clojure" "clojurescript"])
msg-data (message-data db #{"clojure" "clojurescript"} ["2018-02-02" "2018-02-03"])
user-data (user-data db (concat (map (comp last :message/user) msg-data)
(map (comp last :channel/creator) chan-data)))]
(spit "resources/clojurians-log/test-data/two-channels-two-days.edn"
(pprint-str (mapv #(mapv (fn [m]
(dissoc (into {} m) :db/id))
%)
[user-data
chan-data
msg-data]))))
(let [db (db)
channels #{"jobs" "keechma" "reitit" "yada" "dirac"}
days #{"2018-02-01" "2018-02-02" "2018-02-03" "2018-02-04"}
chan-data (channel-data db channels)
msg-data (message-data db channels days)
user-data (user-data db (concat (map (comp last :message/user) msg-data)
(map (comp last :channel/creator) chan-data)))]
(spit "resources/clojurians-log/test-data/quiet-channels.edn"
(pprint-str (mapv #(mapv (fn [m]
(dissoc (into {} m) :db/id))
%)
[user-data
chan-data
msg-data]))))
(defn thread-messages-by-ids [db channels msg-ids]
(->> (d/q '[:find [(pull ?msg [:db/id
:message/key
:message/text
{:message/channel [:channel/slack-id]}
{:message/user [:user/slack-id]}
:message/ts
:message/day
:message/thread-ts
:message/thread-inst]) ...]
:in $ [?chans ...] [?msg-id ...]
:where
[?chan :channel/name ?chans]
[?msg :message/channel ?chan]
(or [?msg :message/ts ?msg-id]
[?msg :message/thread-ts ?msg-id])]
db
channels
msg-ids)
(map #(update % :message/user first))
(map #(update % :message/channel first))))
(defn dissoc-db-id [data]
(mapv (fn [m]
(dissoc (into {} m) :db/id))
data))
(let [db (db)
channels #{"datomic"}
thread 1 parent
thread 2 parent
random message from another day
]
chan-data (channel-data db channels)
msg-data (thread-messages-by-ids db channels ids)
user-data (user-data db (concat (map (comp last :message/user) msg-data)
(map (comp last :channel/creator) chan-data)))]
(->> (mapv dissoc-db-id
[user-data
chan-data
msg-data])
(pprint-str)
(spit "resources/clojurians-log/test-data/threaded-messages.edn")))
|
912aed9b39f2744f2a37af7ef9f562e5dede4d3d5d0c30aaa1104e92b2a339cd | dpiponi/Moodler | test_karplus_strong.hs | do
restart
root <- getRoot
let out = "out"
let keyboard = "keyboard"
let trigger = "trigger"
audio_saw0 <- new' "audio_saw"
audio_sin1 <- new' "audio_sin"
audio_square2 <- new' "audio_square"
audio_triangle3 <- new' "audio_triangle"
butterbp4 <- new' "butterbp"
butterhp5 <- new' "butterhp"
butterhp6 <- new' "butterhp"
butterlp7 <- new' "butterlp"
butterlp8 <- new' "butterlp"
delay10 <- new' "delay"
delay11 <- new' "delay"
delay9 <- new' "delay"
echo12 <- new' "echo"
exp_decay13 <- new' "exp_decay"
id14 <- new' "id"
id15 <- new' "id"
id16 <- new' "id"
id17 <- new' "id"
id18 <- new' "id"
id19 <- new' "id"
id20 <- new' "id"
id21 <- new' "id"
id22 <- new' "id"
id23 <- new' "id"
id24 <- new' "id"
id25 <- new' "id"
id26 <- new' "id"
id365 <- new' "id"
input27 <- new' "input"
input28 <- new' "input"
input29 <- new' "input"
input30 <- new' "input"
input31 <- new' "input"
input32 <- new' "input"
input33 <- new' "input"
input34 <- new' "input"
input35 <- new' "input"
input36 <- new' "input"
input368 <- new' "input"
input369 <- new' "input"
input37 <- new' "input"
input370 <- new' "input"
input371 <- new' "input"
input372 <- new' "input"
input373 <- new' "input"
input374 <- new' "input"
input375 <- new' "input"
input376 <- new' "input"
input377 <- new' "input"
input378 <- new' "input"
input379 <- new' "input"
input38 <- new' "input"
input380 <- new' "input"
input381 <- new' "input"
input382 <- new' "input"
input383 <- new' "input"
input384 <- new' "input"
input385 <- new' "input"
input386 <- new' "input"
input387 <- new' "input"
input388 <- new' "input"
input389 <- new' "input"
input39 <- new' "input"
input390 <- new' "input"
input391 <- new' "input"
input392 <- new' "input"
input393 <- new' "input"
input40 <- new' "input"
input41 <- new' "input"
input42 <- new' "input"
input43 <- new' "input"
input44 <- new' "input"
input45 <- new' "input"
input46 <- new' "input"
input47 <- new' "input"
input48 <- new' "input"
input49 <- new' "input"
input50 <- new' "input"
input51 <- new' "input"
input52 <- new' "input"
input53 <- new' "input"
input54 <- new' "input"
input55 <- new' "input"
input56 <- new' "input"
input57 <- new' "input"
input58 <- new' "input"
input59 <- new' "input"
input60 <- new' "input"
new "input" "keyboard"
let keyboard = "keyboard"
lfo61 <- new' "lfo"
lfo62 <- new' "lfo"
linear_mix63 <- new' "linear_mix"
linear_split64 <- new' "linear_split"
minus65 <- new' "minus"
noise66 <- new' "noise"
poisson67 <- new' "poisson"
random68 <- new' "random"
sequencer394 <- new' "sequencer"
signal_to_freq69 <- new' "signal_to_freq"
sum70 <- new' "sum"
sum71 <- new' "sum"
sum72 <- new' "sum"
sum73 <- new' "sum"
sum74 <- new' "sum"
sum75 <- new' "sum"
sum76 <- new' "sum"
sum77 <- new' "sum"
sum78 <- new' "sum"
sum79 <- new' "sum"
sum80 <- new' "sum"
new "input" "trigger"
let trigger = "trigger"
vca81 <- new' "vca"
vca82 <- new' "vca"
vca83 <- new' "vca"
vca84 <- new' "vca"
vca85 <- new' "vca"
vca86 <- new' "vca"
vca87 <- new' "vca"
vca88 <- new' "vca"
vca89 <- new' "vca"
vca90 <- new' "vca"
vca91 <- new' "vca"
vca92 <- new' "vca"
vca93 <- new' "vca"
vca94 <- new' "vca"
vca95 <- new' "vca"
container102 <- container' "panel_3x1.png" (-156.0,-504.0) (Inside root)
in103 <- plugin' (echo12 ! "signal") (-177.0,-504.0) (Outside container102)
setColour in103 "#sample"
label104 <- label' "echo" (-181.0,-429.0) (Outside container102)
out105 <- plugout' (echo12 ! "result") (-136.0,-504.0) (Outside container102)
setColour out105 "#sample"
container106 <- container' "panel_out.png" (444.0,24.0) (Inside root)
in107 <- plugin' (out ! "value") (420.0,24.0) (Outside container106)
setOutput in107
setColour in107 "#sample"
container108 <- container' "panel_delay.png" (228.0,132.0) (Inside root)
in109 <- plugin' (sum80 ! "signal1") (180.0,12.0) (Outside container108)
setColour in109 "#sample"
in110 <- plugin' (sum80 ! "signal2") (161.0,-6.0) (Outside container108)
setColour in110 "#sample"
hide in110
in111 <- plugin' (vca94 ! "cv") (201.0,0.0) (Outside container108)
setColour in111 "#sample"
hide in111
in112 <- plugin' (vca94 ! "signal") (185.0,-10.0) (Outside container108)
setColour in112 "#sample"
hide in112
in113 <- plugin' (vca95 ! "signal") (204.0,18.0) (Outside container108)
setColour in113 "#sample"
hide in113
in114 <- plugin' (delay10 ! "delay") (252.0,216.0) (Outside container108)
setColour in114 "#control"
hide in114
in115 <- plugin' (delay10 ! "signal") (201.0,-10.0) (Outside container108)
setColour in115 "#sample"
hide in115
knob116 <- knob' (input50 ! "result") (252.0,216.0) (Outside container108)
knob117 <- knob' (input33 ! "result") (252.0,168.0) (Outside container108)
knob118 <- knob' (input35 ! "result") (252.0,120.0) (Outside container108)
out119 <- plugout' (delay10 ! "result") (183.0,23.0) (Outside container108)
setColour out119 "#sample"
hide out119
out120 <- plugout' (sum80 ! "result") (169.0,17.0) (Outside container108)
setColour out120 "#sample"
hide out120
out121 <- plugout' (vca94 ! "result") (165.0,21.0) (Outside container108)
setColour out121 "#sample"
hide out121
out122 <- plugout' (vca95 ! "result") (276.0,12.0) (Outside container108)
setColour out122 "#sample"
container123 <- container' "panel_3x1.png" (-60.0,24.0) (Inside root)
in124 <- plugin' (exp_decay13 ! "decay_time") (-81.0,49.0) (Outside container123)
setColour in124 "#control"
hide in124
in125 <- plugin' (exp_decay13 ! "trigger") (-81.0,-1.0) (Outside container123)
setColour in125 "#control"
knob126 <- knob' (input51 ! "result") (-81.0,49.0) (Outside container123)
label127 <- label' "exp_decay" (-85.0,99.0) (Outside container123)
out128 <- plugout' (exp_decay13 ! "result") (-40.0,24.0) (Outside container123)
setColour out128 "#control"
container129 <- container' "panel_lfo.png" (-612.0,-120.0) (Inside root)
in130 <- plugin' (lfo62 ! "rate") (-615.0,-65.0) (Outside container129)
setColour in130 "#control"
hide in130
in131 <- plugin' (lfo62 ! "sync") (-600.0,-96.0) (Outside container129)
setColour in131 "#control"
knob132 <- knob' (input48 ! "result") (-600.0,-48.0) (Outside container129)
out133 <- plugout' (lfo62 ! "triangle") (-624.0,-240.0) (Outside container129)
setColour out133 "#control"
out134 <- plugout' (lfo62 ! "saw") (-564.0,-240.0) (Outside container129)
setColour out134 "#control"
out135 <- plugout' (lfo62 ! "sin_result") (-624.0,-204.0) (Outside container129)
setColour out135 "#control"
out136 <- plugout' (lfo62 ! "square_result") (-564.0,-204.0) (Outside container129)
setColour out136 "#control"
container137 <- container' "panel_3x1.png" (252.0,-216.0) (Inside root)
in138 <- plugin' (delay11 ! "delay") (231.0,-191.0) (Outside container137)
setColour in138 "#control"
in139 <- plugin' (delay11 ! "signal") (231.0,-241.0) (Outside container137)
setColour in139 "#sample"
label140 <- label' "delay" (227.0,-141.0) (Outside container137)
out141 <- plugout' (delay11 ! "result") (272.0,-216.0) (Outside container137)
setColour out141 "#sample"
container142 <- container' "panel_lfo.png" (-456.0,-228.0) (Inside root)
in143 <- plugin' (lfo61 ! "sync") (-444.0,-204.0) (Outside container142)
setColour in143 "#control"
in144 <- plugin' (lfo61 ! "rate") (-459.0,-173.0) (Outside container142)
setColour in144 "#control"
hide in144
knob145 <- knob' (input32 ! "result") (-444.0,-156.0) (Outside container142)
out146 <- plugout' (lfo61 ! "triangle") (-468.0,-348.0) (Outside container142)
setColour out146 "#control"
out147 <- plugout' (lfo61 ! "saw") (-408.0,-348.0) (Outside container142)
setColour out147 "#control"
out148 <- plugout' (lfo61 ! "sin_result") (-468.0,-312.0) (Outside container142)
setColour out148 "#control"
out149 <- plugout' (lfo61 ! "square_result") (-408.0,-312.0) (Outside container142)
setColour out149 "#control"
container150 <- container' "panel_gain.png" (-156.0,-192.0) (Inside root)
in151 <- plugin' (vca84 ! "cv") (-180.0,-192.0) (Outside container150)
setColour in151 "#control"
hide in151
in152 <- plugin' (vca84 ! "signal") (-216.0,-192.0) (Outside container150)
setColour in152 "#sample"
knob153 <- knob' (input34 ! "result") (-180.0,-192.0) (Outside container150)
out154 <- plugout' (vca84 ! "result") (-96.0,-192.0) (Outside container150)
setColour out154 "#sample"
container155 <- container' "panel_3x1.png" (-300.0,-204.0) (Inside root)
in156 <- plugin' (sum71 ! "signal1") (-321.0,-179.0) (Outside container155)
setColour in156 "#sample"
hide in156
in157 <- plugin' (sum71 ! "signal2") (-321.0,-229.0) (Outside container155)
setColour in157 "#sample"
knob158 <- knob' (input57 ! "result") (-321.0,-179.0) (Outside container155)
label159 <- label' "sum" (-325.0,-129.0) (Outside container155)
out160 <- plugout' (sum71 ! "result") (-280.0,-204.0) (Outside container155)
setColour out160 "#sample"
container161 <- container' "panel_filter.png" (240.0,-636.0) (Inside root)
in162 <- plugin' (vca85 ! "cv") (228.0,-516.0) (Outside container161)
setColour in162 "#sample"
hide in162
in163 <- plugin' (vca85 ! "signal") (180.0,-576.0) (Outside container161)
setColour in163 "#control"
in164 <- plugin' (vca86 ! "cv") (223.0,-638.0) (Outside container161)
setColour in164 "#sample"
hide in164
in165 <- plugin' (vca86 ! "signal") (180.0,-636.0) (Outside container161)
setColour in165 "#control"
in166 <- plugin' (vca87 ! "cv") (221.0,-754.0) (Outside container161)
setColour in166 "#sample"
hide in166
in167 <- plugin' (vca87 ! "signal") (180.0,-696.0) (Outside container161)
setColour in167 "#control"
in168 <- plugin' (id14 ! "signal") (180.0,-516.0) (Outside container161)
setColour in168 "#control"
knob169 <- knob' (input41 ! "result") (216.0,-696.0) (Outside container161)
knob170 <- knob' (input42 ! "result") (264.0,-696.0) (Outside container161)
setLow knob170 (Just (-1.0))
setHigh knob170 (Just (1.0))
knob171 <- knob' (input43 ! "result") (264.0,-744.0) (Outside container161)
setLow knob171 (Just (1.0))
setHigh knob171 (Just (1000.0))
knob172 <- knob' (input46 ! "result") (264.0,-576.0) (Outside container161)
setLow knob172 (Just (-1.0))
setHigh knob172 (Just (1.0))
knob173 <- knob' (input38 ! "result") (216.0,-576.0) (Outside container161)
knob174 <- knob' (input39 ! "result") (216.0,-636.0) (Outside container161)
knob175 <- knob' (input40 ! "result") (264.0,-636.0) (Outside container161)
setLow knob175 (Just (-1.0))
setHigh knob175 (Just (1.0))
out176 <- plugout' (butterbp4 ! "result") (300.0,-696.0) (Outside container161)
setColour out176 "#sample"
out177 <- plugout' (butterlp8 ! "result") (300.0,-576.0) (Outside container161)
setColour out177 "#sample"
out178 <- plugout' (butterhp6 ! "result") (300.0,-636.0) (Outside container161)
setColour out178 "#sample"
proxy179 <- proxy' (289.0,-526.0) (Outside container161)
hide proxy179
in180 <- plugin' (sum72 ! "signal2") (-129.0,144.0) (Inside proxy179)
setColour in180 "#sample"
hide in180
in181 <- plugin' (sum73 ! "signal1") (-134.0,73.0) (Inside proxy179)
setColour in181 "#sample"
in182 <- plugin' (sum73 ! "signal2") (-134.0,23.0) (Inside proxy179)
setColour in182 "#sample"
hide in182
in183 <- plugin' (sum74 ! "signal1") (-140.0,-40.0) (Inside proxy179)
setColour in183 "#sample"
in184 <- plugin' (sum74 ! "signal2") (-140.0,-90.0) (Inside proxy179)
setColour in184 "#sample"
hide in184
in185 <- plugin' (butterlp8 ! "freq") (-43.0,192.0) (Inside proxy179)
setColour in185 "#sample"
in186 <- plugin' (butterlp8 ! "signal") (-43.0,142.0) (Inside proxy179)
setColour in186 "#sample"
in187 <- plugin' (butterhp6 ! "freq") (-47.0,72.0) (Inside proxy179)
setColour in187 "#sample"
in188 <- plugin' (butterhp6 ! "signal") (-47.0,22.0) (Inside proxy179)
setColour in188 "#sample"
in189 <- plugin' (butterbp4 ! "freq") (-55.0,-41.0) (Inside proxy179)
setColour in189 "#sample"
in190 <- plugin' (butterbp4 ! "bandwidth") (-55.0,-91.0) (Inside proxy179)
setColour in190 "#sample"
hide in190
in191 <- plugin' (butterbp4 ! "signal") (-55.0,-141.0) (Inside proxy179)
setColour in191 "#sample"
in192 <- plugin' (sum72 ! "signal1") (-129.0,194.0) (Inside proxy179)
setColour in192 "#sample"
out193 <- plugout' (sum72 ! "result") (-79.0,194.0) (Inside proxy179)
setColour out193 "#sample"
out194 <- plugout' (sum73 ! "result") (-84.0,73.0) (Inside proxy179)
setColour out194 "#sample"
out195 <- plugout' (sum74 ! "result") (-90.0,-40.0) (Inside proxy179)
setColour out195 "#sample"
out196 <- plugout' (vca85 ! "result") (-200.0,196.0) (Inside proxy179)
setColour out196 "#sample"
out197 <- plugout' (vca86 ! "result") (-205.0,74.0) (Inside proxy179)
setColour out197 "#sample"
out198 <- plugout' (vca87 ! "result") (-207.0,-42.0) (Inside proxy179)
setColour out198 "#sample"
out199 <- plugout' (id14 ! "result") (-163.0,279.0) (Inside proxy179)
setColour out199 "#sample"
container200 <- container' "panel_3x1.png" (444.0,-216.0) (Inside root)
in201 <- plugin' (linear_mix63 ! "gain") (423.0,-166.0) (Outside container200)
setColour in201 "#control"
hide in201
in202 <- plugin' (linear_mix63 ! "signal1") (423.0,-216.0) (Outside container200)
setColour in202 "#sample"
in203 <- plugin' (linear_mix63 ! "signal2") (423.0,-266.0) (Outside container200)
setColour in203 "#sample"
knob204 <- knob' (input60 ! "result") (423.0,-166.0) (Outside container200)
label205 <- label' "linear_mix" (419.0,-141.0) (Outside container200)
out206 <- plugout' (linear_mix63 ! "result") (464.0,-216.0) (Outside container200)
setColour out206 "#sample"
container207 <- container' "panel_vco2.png" (444.0,-564.0) (Inside root)
in208 <- plugin' (id22 ! "signal") (480.0,-528.0) (Outside container207)
setColour in208 "#control"
in209 <- plugin' (id23 ! "signal") (457.0,-484.0) (Outside container207)
setColour in209 "#sample"
hide in209
in210 <- plugin' (id16 ! "signal") (475.0,-561.0) (Outside container207)
setColour in210 "#sample"
hide in210
in211 <- plugin' (id17 ! "signal") (480.0,-600.0) (Outside container207)
setColour in211 "#control"
knob212 <- knob' (input45 ! "result") (480.0,-564.0) (Outside container207)
knob213 <- knob' (input44 ! "result") (480.0,-492.0) (Outside container207)
out214 <- plugout' (id21 ! "result") (420.0,-648.0) (Outside container207)
setColour out214 "#sample"
out215 <- plugout' (id18 ! "result") (492.0,-648.0) (Outside container207)
setColour out215 "#sample"
out216 <- plugout' (id19 ! "result") (420.0,-684.0) (Outside container207)
setColour out216 "#sample"
out217 <- plugout' (id20 ! "result") (492.0,-684.0) (Outside container207)
setColour out217 "#sample"
proxy218 <- proxy' (397.0,-482.0) (Outside container207)
hide proxy218
container219 <- container' "panel_3x1.png" (-815.0,439.0) (Inside proxy218)
in220 <- plugin' (sum70 ! "signal2") (-836.0,414.0) (Outside container219)
setColour in220 "#sample"
in221 <- plugin' (sum70 ! "signal1") (-836.0,464.0) (Outside container219)
setColour in221 "#sample"
label222 <- label' "sum" (-840.0,514.0) (Outside container219)
out223 <- plugout' (sum70 ! "result") (-795.0,439.0) (Outside container219)
setColour out223 "#sample"
container224 <- container' "panel_3x1.png" (-434.0,420.0) (Inside proxy218)
in225 <- plugin' (audio_triangle3 ! "freq") (-455.0,445.0) (Outside container224)
setColour in225 "#sample"
in226 <- plugin' (audio_triangle3 ! "sync") (-455.0,395.0) (Outside container224)
setColour in226 "#sample"
label227 <- label' "audio_triangle" (-459.0,495.0) (Outside container224)
out228 <- plugout' (audio_triangle3 ! "result") (-414.0,420.0) (Outside container224)
setColour out228 "#sample"
container229 <- container' "panel_3x1.png" (-318.0,291.0) (Inside proxy218)
in230 <- plugin' (audio_saw0 ! "freq") (-339.0,316.0) (Outside container229)
setColour in230 "#sample"
in231 <- plugin' (audio_saw0 ! "sync") (-339.0,266.0) (Outside container229)
setColour in231 "#sample"
label232 <- label' "audio_saw" (-343.0,366.0) (Outside container229)
out233 <- plugout' (audio_saw0 ! "result") (-298.0,291.0) (Outside container229)
setColour out233 "#sample"
container234 <- container' "panel_3x1.png" (-691.0,453.0) (Inside proxy218)
in235 <- plugin' (audio_sin1 ! "freq") (-712.0,478.0) (Outside container234)
setColour in235 "#sample"
in236 <- plugin' (audio_sin1 ! "sync") (-712.0,428.0) (Outside container234)
setColour in236 "#sample"
label237 <- label' "audio_sin" (-716.0,528.0) (Outside container234)
out238 <- plugout' (audio_sin1 ! "result") (-671.0,453.0) (Outside container234)
setColour out238 "#sample"
container239 <- container' "panel_3x1.png" (-826.0,199.0) (Inside proxy218)
in240 <- plugin' (audio_square2 ! "pwm") (-847.0,199.0) (Outside container239)
setColour in240 "#sample"
in241 <- plugin' (audio_square2 ! "sync") (-847.0,149.0) (Outside container239)
setColour in241 "#sample"
in242 <- plugin' (audio_square2 ! "freq") (-847.0,249.0) (Outside container239)
setColour in242 "#sample"
label243 <- label' "audio_square" (-851.0,274.0) (Outside container239)
out244 <- plugout' (audio_square2 ! "result") (-806.0,199.0) (Outside container239)
setColour out244 "#sample"
in245 <- plugin' (id18 ! "signal") (-753.0,198.0) (Inside proxy218)
setColour in245 "#sample"
in246 <- plugin' (id19 ! "signal") (-360.0,422.0) (Inside proxy218)
setColour in246 "#sample"
in247 <- plugin' (id20 ! "signal") (-247.0,292.0) (Inside proxy218)
setColour in247 "#sample"
in248 <- plugin' (id21 ! "signal") (-556.0,449.0) (Inside proxy218)
setColour in248 "#sample"
out249 <- plugout' (id22 ! "result") (-891.0,413.0) (Inside proxy218)
setColour out249 "#sample"
out250 <- plugout' (id23 ! "result") (-892.0,469.0) (Inside proxy218)
setColour out250 "#sample"
out251 <- plugout' (id16 ! "result") (-894.0,199.0) (Inside proxy218)
setColour out251 "#sample"
out252 <- plugout' (id17 ! "result") (-893.0,146.0) (Inside proxy218)
setColour out252 "#sample"
container253 <- container' "panel_3x1.png" (-252.0,-504.0) (Inside root)
in254 <- plugin' (minus65 ! "signal1") (-273.0,-479.0) (Outside container253)
setColour in254 "#sample"
in255 <- plugin' (minus65 ! "signal2") (-273.0,-529.0) (Outside container253)
setColour in255 "#sample"
label256 <- label' "minus" (-277.0,-429.0) (Outside container253)
out257 <- plugout' (minus65 ! "result") (-232.0,-504.0) (Outside container253)
setColour out257 "#sample"
container258 <- container' "panel_knob.png" (-612.0,-732.0) (Inside root)
in259 <- plugin' (id24 ! "signal") (-624.0,-732.0) (Outside container258)
setColour in259 "#control"
hide in259
knob260 <- knob' (input52 ! "result") (-624.0,-732.0) (Outside container258)
out261 <- plugout' (id24 ! "result") (-588.0,-732.0) (Outside container258)
setColour out261 "#control"
container262 <- container' "panel_knob.png" (-264.0,-660.0) (Inside root)
in263 <- plugin' (id25 ! "signal") (-276.0,-660.0) (Outside container262)
setColour in263 "#control"
hide in263
knob264 <- knob' (input53 ! "result") (-276.0,-660.0) (Outside container262)
out265 <- plugout' (id25 ! "result") (-240.0,-660.0) (Outside container262)
setColour out265 "#control"
container266 <- container' "panel_gain.png" (564.0,-324.0) (Inside root)
in267 <- plugin' (vca81 ! "cv") (540.0,-324.0) (Outside container266)
setColour in267 "#control"
hide in267
in268 <- plugin' (vca81 ! "signal") (504.0,-324.0) (Outside container266)
setColour in268 "#sample"
knob269 <- knob' (input55 ! "result") (540.0,-324.0) (Outside container266)
out270 <- plugout' (vca81 ! "result") (624.0,-324.0) (Outside container266)
setColour out270 "#sample"
container271 <- container' "panel_knob.png" (-144.0,-288.0) (Inside root)
in272 <- plugin' (id15 ! "signal") (-156.0,-288.0) (Outside container271)
setColour in272 "#control"
hide in272
knob273 <- knob' (input49 ! "result") (-156.0,-288.0) (Outside container271)
out274 <- plugout' (id15 ! "result") (-120.0,-288.0) (Outside container271)
setColour out274 "#control"
container275 <- container' "panel_keyboard.png" (-456.0,36.0) (Inside root)
out276 <- plugout' (keyboard ! "result") (-396.0,60.0) (Outside container275)
setColour out276 "#control"
out277 <- plugout' (trigger ! "result") (-396.0,12.0) (Outside container275)
setColour out277 "#control"
container278 <- container' "panel_3x1.png" (72.0,84.0) (Inside root)
in279 <- plugin' (vca88 ! "cv") (51.0,109.0) (Outside container278)
setColour in279 "#control"
in280 <- plugin' (vca88 ! "signal") (51.0,59.0) (Outside container278)
setColour in280 "#sample"
label281 <- label' "vca" (47.0,159.0) (Outside container278)
out282 <- plugout' (vca88 ! "result") (92.0,84.0) (Outside container278)
setColour out282 "#sample"
container283 <- container' "panel_3x1.png" (-12.0,-216.0) (Inside root)
in284 <- plugin' (delay9 ! "delay") (-33.0,-191.0) (Outside container283)
setColour in284 "#control"
in285 <- plugin' (delay9 ! "signal") (-33.0,-241.0) (Outside container283)
setColour in285 "#sample"
label286 <- label' "delay" (-37.0,-141.0) (Outside container283)
out287 <- plugout' (delay9 ! "result") (8.0,-216.0) (Outside container283)
setColour out287 "#sample"
container288 <- container' "panel_3x1.png" (120.0,-216.0) (Inside root)
in289 <- plugin' (sum75 ! "signal1") (99.0,-191.0) (Outside container288)
setColour in289 "#sample"
in290 <- plugin' (sum75 ! "signal2") (99.0,-241.0) (Outside container288)
setColour in290 "#sample"
label291 <- label' "sum" (95.0,-141.0) (Outside container288)
out292 <- plugout' (sum75 ! "result") (140.0,-216.0) (Outside container288)
setColour out292 "#sample"
container293 <- container' "panel_gain.png" (192.0,-420.0) (Inside root)
in294 <- plugin' (vca89 ! "cv") (168.0,-420.0) (Outside container293)
setColour in294 "#control"
hide in294
in295 <- plugin' (vca89 ! "signal") (132.0,-420.0) (Outside container293)
setColour in295 "#sample"
knob296 <- knob' (input28 ! "result") (168.0,-420.0) (Outside container293)
out297 <- plugout' (vca89 ! "result") (252.0,-420.0) (Outside container293)
setColour out297 "#sample"
container298 <- container' "panel_random.png" (-240.0,96.0) (Inside root)
in299 <- plugin' (sum77 ! "signal2") (-245.0,23.0) (Outside container298)
setColour in299 "#sample"
hide in299
in300 <- plugin' (vca92 ! "cv") (-266.0,22.0) (Outside container298)
setColour in300 "#sample"
hide in300
in301 <- plugin' (vca92 ! "signal") (-324.0,12.0) (Outside container298)
setColour in301 "#control"
in302 <- plugin' (poisson67 ! "rate") (-151.0,28.0) (Outside container298)
setColour in302 "#sample"
hide in302
in303 <- plugin' (sum78 ! "signal1") (-225.0,149.0) (Outside container298)
setColour in303 "#sample"
hide in303
in304 <- plugin' (sum78 ! "signal2") (-152.0,152.0) (Outside container298)
setColour in304 "#sample"
hide in304
in305 <- plugin' (vca93 ! "cv") (-240.0,104.0) (Outside container298)
setColour in305 "#sample"
hide in305
in306 <- plugin' (vca93 ! "signal") (-324.0,156.0) (Outside container298)
setColour in306 "#control"
in307 <- plugin' (butterlp7 ! "freq") (-150.0,164.0) (Outside container298)
setColour in307 "#sample"
hide in307
in308 <- plugin' (butterlp7 ! "signal") (-213.0,145.0) (Outside container298)
setColour in308 "#sample"
hide in308
in309 <- plugin' (butterhp5 ! "freq") (-151.0,115.0) (Outside container298)
setColour in309 "#sample"
hide in309
in310 <- plugin' (butterhp5 ! "signal") (-153.0,109.0) (Outside container298)
setColour in310 "#sample"
hide in310
in311 <- plugin' (sum79 ! "signal1") (-272.0,106.0) (Outside container298)
setColour in311 "#sample"
hide in311
in312 <- plugin' (sum79 ! "signal2") (-153.0,104.0) (Outside container298)
setColour in312 "#sample"
hide in312
in313 <- plugin' (random68 ! "rate") (-203.0,60.0) (Outside container298)
setColour in313 "#sample"
hide in313
in314 <- plugin' (vca90 ! "cv") (-243.0,103.0) (Outside container298)
setColour in314 "#sample"
hide in314
in315 <- plugin' (vca90 ! "signal") (-324.0,108.0) (Outside container298)
setColour in315 "#control"
in316 <- plugin' (sum76 ! "signal1") (-217.0,66.0) (Outside container298)
setColour in316 "#sample"
hide in316
in317 <- plugin' (sum76 ! "signal2") (-149.0,60.0) (Outside container298)
setColour in317 "#sample"
hide in317
in318 <- plugin' (vca91 ! "cv") (-318.0,72.0) (Outside container298)
setColour in318 "#sample"
hide in318
in319 <- plugin' (vca91 ! "signal") (-324.0,60.0) (Outside container298)
setColour in319 "#control"
in320 <- plugin' (sum77 ! "signal1") (-211.0,21.0) (Outside container298)
setColour in320 "#sample"
hide in320
knob321 <- knob' (input27 ! "result") (-240.0,60.0) (Outside container298)
knob322 <- knob' (input29 ! "result") (-288.0,60.0) (Outside container298)
knob323 <- knob' (input30 ! "result") (-240.0,12.0) (Outside container298)
knob324 <- knob' (input31 ! "result") (-288.0,12.0) (Outside container298)
knob325 <- knob' (input36 ! "result") (-240.0,156.0) (Outside container298)
knob326 <- knob' (input47 ! "result") (-288.0,156.0) (Outside container298)
knob327 <- knob' (input54 ! "result") (-240.0,108.0) (Outside container298)
knob328 <- knob' (input56 ! "result") (-288.0,108.0) (Outside container298)
out329 <- plugout' (butterlp7 ! "result") (-144.0,156.0) (Outside container298)
setColour out329 "#sample"
out330 <- plugout' (butterhp5 ! "result") (-144.0,108.0) (Outside container298)
setColour out330 "#sample"
out331 <- plugout' (sum79 ! "result") (-226.0,108.0) (Outside container298)
setColour out331 "#sample"
hide out331
out332 <- plugout' (vca90 ! "result") (-247.0,99.0) (Outside container298)
setColour out332 "#sample"
hide out332
out333 <- plugout' (sum76 ! "result") (-232.0,61.0) (Outside container298)
setColour out333 "#sample"
hide out333
out334 <- plugout' (vca91 ! "result") (-244.0,58.0) (Outside container298)
setColour out334 "#sample"
hide out334
out335 <- plugout' (sum77 ! "result") (-229.0,22.0) (Outside container298)
setColour out335 "#sample"
hide out335
out336 <- plugout' (vca92 ! "result") (-245.0,22.0) (Outside container298)
setColour out336 "#sample"
hide out336
out337 <- plugout' (random68 ! "result") (-144.0,60.0) (Outside container298)
setColour out337 "#control"
out338 <- plugout' (poisson67 ! "trigger") (-144.0,12.0) (Outside container298)
setColour out338 "#control"
out339 <- plugout' (sum78 ! "result") (-232.0,159.0) (Outside container298)
setColour out339 "#sample"
hide out339
out340 <- plugout' (vca93 ! "result") (-243.0,150.0) (Outside container298)
setColour out340 "#sample"
hide out340
out341 <- plugout' (noise66 ! "result") (-144.0,204.0) (Outside container298)
setColour out341 "#sample"
container342 <- container' "panel_3x1.png" (-156.0,-756.0) (Inside root)
in343 <- plugin' (linear_split64 ! "gain") (-177.0,-731.0) (Outside container342)
setColour in343 "#control"
in344 <- plugin' (linear_split64 ! "signal") (-177.0,-781.0) (Outside container342)
setColour in344 "#sample"
label345 <- label' "linear_split" (-181.0,-681.0) (Outside container342)
out346 <- plugout' (linear_split64 ! "result1") (-136.0,-731.0) (Outside container342)
setColour out346 "#sample"
out347 <- plugout' (linear_split64 ! "result2") (-136.0,-781.0) (Outside container342)
setColour out347 "#sample"
container348 <- container' "panel_knob.png" (-324.0,-936.0) (Inside root)
in349 <- plugin' (id26 ! "signal") (-336.0,-936.0) (Outside container348)
setColour in349 "#control"
hide in349
knob350 <- knob' (input37 ! "result") (-336.0,-936.0) (Outside container348)
out351 <- plugout' (id26 ! "result") (-300.0,-936.0) (Outside container348)
setColour out351 "#control"
container352 <- container' "panel_3x1.png" (-492.0,-504.0) (Inside root)
in353 <- plugin' (signal_to_freq69 ! "signal") (-513.0,-504.0) (Outside container352)
setColour in353 "#control"
label354 <- label' "signal_to_freq" (-517.0,-429.0) (Outside container352)
out355 <- plugout' (signal_to_freq69 ! "freq") (-472.0,-479.0) (Outside container352)
setColour out355 "#control"
out356 <- plugout' (signal_to_freq69 ! "rate") (-472.0,-529.0) (Outside container352)
setColour out356 "#control"
container357 <- container' "panel_3x1.png" (-792.0,-588.0) (Inside root)
in358 <- plugin' (vca82 ! "signal") (-813.0,-613.0) (Outside container357)
setColour in358 "#sample"
in359 <- plugin' (vca82 ! "cv") (-813.0,-563.0) (Outside container357)
setColour in359 "#control"
hide in359
knob360 <- knob' (input58 ! "result") (-813.0,-563.0) (Outside container357)
label361 <- label' "vca" (-817.0,-513.0) (Outside container357)
out362 <- plugout' (vca82 ! "result") (-772.0,-588.0) (Outside container357)
setColour out362 "#sample"
container363 <- container' "panel_3x1.png" (-492.0,-888.0) (Inside root)
in366 <- plugin' (id365 ! "signal") (-513.0,-888.0) (Outside container363)
setColour in366 "#control"
label364 <- label' "id" (-517.0,-813.0) (Outside container363)
out367 <- plugout' (id365 ! "result") (-472.0,-888.0) (Outside container363)
setColour out367 "#control"
container395 <- container' "panel_sequencer.png" (168.0,-972.0) (Inside root)
in396 <- plugin' (sequencer394 ! "freq2") (24.0,-919.0) (Outside container395)
setColour in396 "#sample"
hide in396
in397 <- plugin' (sequencer394 ! "pulse2") (67.0,-924.0) (Outside container395)
setColour in397 "#sample"
hide in397
in398 <- plugin' (sequencer394 ! "mode2") (103.0,-923.0) (Outside container395)
setColour in398 "#sample"
hide in398
in399 <- plugin' (sequencer394 ! "freq3") (24.0,-969.0) (Outside container395)
setColour in399 "#sample"
hide in399
in400 <- plugin' (sequencer394 ! "pulse3") (67.0,-974.0) (Outside container395)
setColour in400 "#sample"
hide in400
in401 <- plugin' (sequencer394 ! "mode3") (103.0,-973.0) (Outside container395)
setColour in401 "#sample"
hide in401
in402 <- plugin' (sequencer394 ! "freq4") (24.0,-1019.0) (Outside container395)
setColour in402 "#sample"
hide in402
in403 <- plugin' (sequencer394 ! "pulse4") (67.0,-1024.0) (Outside container395)
setColour in403 "#sample"
hide in403
in404 <- plugin' (sequencer394 ! "mode4") (103.0,-1023.0) (Outside container395)
setColour in404 "#sample"
hide in404
in405 <- plugin' (sequencer394 ! "freq5") (194.0,-871.0) (Outside container395)
setColour in405 "#sample"
hide in405
in406 <- plugin' (sequencer394 ! "pulse5") (239.0,-867.0) (Outside container395)
setColour in406 "#sample"
hide in406
in407 <- plugin' (sequencer394 ! "mode5") (276.0,-870.0) (Outside container395)
setColour in407 "#sample"
hide in407
in408 <- plugin' (sequencer394 ! "freq6") (194.0,-921.0) (Outside container395)
setColour in408 "#sample"
hide in408
in409 <- plugin' (sequencer394 ! "pulse6") (239.0,-917.0) (Outside container395)
setColour in409 "#sample"
hide in409
in410 <- plugin' (sequencer394 ! "mode6") (276.0,-920.0) (Outside container395)
setColour in410 "#sample"
hide in410
in411 <- plugin' (sequencer394 ! "freq1") (24.0,-869.0) (Outside container395)
setColour in411 "#sample"
hide in411
in412 <- plugin' (sequencer394 ! "freq7") (194.0,-971.0) (Outside container395)
setColour in412 "#sample"
hide in412
in413 <- plugin' (sequencer394 ! "pulse7") (239.0,-967.0) (Outside container395)
setColour in413 "#sample"
hide in413
in414 <- plugin' (sequencer394 ! "mode7") (276.0,-970.0) (Outside container395)
setColour in414 "#sample"
hide in414
in415 <- plugin' (sequencer394 ! "freq8") (194.0,-1021.0) (Outside container395)
setColour in415 "#sample"
hide in415
in416 <- plugin' (sequencer394 ! "pulse8") (239.0,-1017.0) (Outside container395)
setColour in416 "#sample"
hide in416
in417 <- plugin' (sequencer394 ! "mode8") (276.0,-1020.0) (Outside container395)
setColour in417 "#sample"
hide in417
in418 <- plugin' (sequencer394 ! "gate") (36.0,-1104.0) (Outside container395)
setColour in418 "#control"
in419 <- plugin' (sequencer394 ! "add") (36.0,-1068.0) (Outside container395)
setColour in419 "#control"
in420 <- plugin' (sequencer394 ! "slide_rate") (197.0,-1118.0) (Outside container395)
setColour in420 "#sample"
hide in420
in421 <- plugin' (sequencer394 ! "length") (197.0,-1068.0) (Outside container395)
setColour in421 "#sample"
hide in421
in422 <- plugin' (sequencer394 ! "pulse1") (67.0,-874.0) (Outside container395)
setColour in422 "#sample"
hide in422
in423 <- plugin' (sequencer394 ! "mode1") (103.0,-873.0) (Outside container395)
setColour in423 "#sample"
hide in423
knob424 <- knob' (input378 ! "result") (24.0,-924.0) (Outside container395)
knob425 <- knob' (input381 ! "result") (24.0,-972.0) (Outside container395)
knob426 <- knob' (input385 ! "result") (24.0,-1020.0) (Outside container395)
knob427 <- knob' (input388 ! "result") (192.0,-876.0) (Outside container395)
knob428 <- knob' (input391 ! "result") (192.0,-924.0) (Outside container395)
knob429 <- knob' (input368 ! "result") (192.0,-972.0) (Outside container395)
knob430 <- knob' (input372 ! "result") (192.0,-1020.0) (Outside container395)
knob431 <- knob' (input384 ! "result") (24.0,-876.0) (Outside container395)
knob432 <- knob' (input376 ! "result") (180.0,-1104.0) (Outside container395)
out433 <- plugout' (sequencer394 ! "result") (324.0,-1104.0) (Outside container395)
setColour out433 "#control"
out434 <- plugout' (sequencer394 ! "trigger") (324.0,-1068.0) (Outside container395)
setColour out434 "#control"
selector435 <- selector' (input379 ! "result") (60.0,-924.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector436 <- selector' (input380 ! "result") (96.0,-924.0) ["repeat","slide","rest","hold"] (Outside container395)
selector437 <- selector' (input382 ! "result") (60.0,-972.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector438 <- selector' (input383 ! "result") (96.0,-972.0) ["repeat","slide","rest","hold"] (Outside container395)
selector439 <- selector' (input386 ! "result") (60.0,-1020.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector440 <- selector' (input387 ! "result") (96.0,-1020.0) ["repeat","slide","rest","hold"] (Outside container395)
selector441 <- selector' (input389 ! "result") (228.0,-876.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector442 <- selector' (input390 ! "result") (264.0,-876.0) ["repeat","slide","rest","hold"] (Outside container395)
selector443 <- selector' (input392 ! "result") (228.0,-924.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector444 <- selector' (input393 ! "result") (264.0,-924.0) ["repeat","slide","rest","hold"] (Outside container395)
selector445 <- selector' (input370 ! "result") (228.0,-972.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector446 <- selector' (input371 ! "result") (264.0,-972.0) ["repeat","slide","rest","hold"] (Outside container395)
selector447 <- selector' (input373 ! "result") (228.0,-1020.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector448 <- selector' (input374 ! "result") (264.0,-1020.0) ["repeat","slide","rest","hold"] (Outside container395)
selector449 <- selector' (input375 ! "result") (180.0,-1068.0) ["1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16"] (Outside container395)
selector450 <- selector' (input369 ! "result") (60.0,-876.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector451 <- selector' (input377 ! "result") (96.0,-876.0) ["repeat","slide","rest","hold"] (Outside container395)
container96 <- container' "panel_3x1.png" (-744.0,-336.0) (Inside root)
in97 <- plugin' (vca83 ! "cv") (-765.0,-311.0) (Outside container96)
setColour in97 "#control"
hide in97
in98 <- plugin' (vca83 ! "signal") (-765.0,-361.0) (Outside container96)
setColour in98 "#sample"
knob99 <- knob' (input59 ! "result") (-765.0,-311.0) (Outside container96)
label100 <- label' "vca" (-769.0,-261.0) (Outside container96)
out101 <- plugout' (vca83 ! "result") (-724.0,-336.0) (Outside container96)
setColour out101 "#sample"
cable out257 in103
cable out270 in107
cable out121 in110
cable knob117 in111
cable out119 in112
cable out119 in113
cable knob116 in114
cable out120 in115
cable knob126 in124
cable out136 in125
cable knob132 in130
cable out347 in138
cable out292 in139
cable out136 in143
cable knob145 in144
cable knob153 in151
cable out160 in152
cable knob158 in156
cable out148 in157
cable knob173 in162
cable knob174 in164
cable knob169 in166
cable out297 in168
cable knob172 in180
cable out197 in181
cable knob175 in182
cable out198 in183
cable knob170 in184
cable out193 in185
cable out199 in186
cable out194 in187
cable out199 in188
cable out195 in189
cable knob171 in190
cable out199 in191
cable out196 in192
cable knob204 in201
cable out141 in202
cable out214 in203
cable out367 in208
cable knob213 in209
cable knob212 in210
cable out249 in220
cable out250 in221
cable out223 in225
cable out252 in226
cable out223 in230
cable out252 in231
cable out223 in235
cable out252 in236
cable out251 in240
cable out252 in241
cable out223 in242
cable out244 in245
cable out228 in246
cable out233 in247
cable out238 in248
cable out346 in254
cable out265 in255
cable knob260 in259
cable knob264 in263
cable knob269 in267
cable out206 in268
cable knob273 in272
cable out128 in279
cable out341 in280
cable out105 in284
cable out177 in285
cable out282 in289
cable out287 in290
cable knob296 in294
cable out141 in295
cable out336 in299
cable knob324 in300
cable out335 in302
cable knob325 in303
cable out340 in304
cable knob326 in305
cable out339 in307
cable out341 in308
cable out331 in309
cable out341 in310
cable knob327 in311
cable out332 in312
cable out333 in313
cable knob328 in314
cable knob321 in316
cable out334 in317
cable knob322 in318
cable knob323 in320
cable out351 in343
cable out356 in344
cable knob350 in349
cable out367 in353
cable knob360 in359
cable out433 in366
cable knob424 in396
cable selector435 in397
cable selector436 in398
cable knob425 in399
cable selector437 in400
cable selector438 in401
cable knob426 in402
cable selector439 in403
cable selector440 in404
cable knob427 in405
cable selector441 in406
cable selector442 in407
cable knob428 in408
cable selector443 in409
cable selector444 in410
cable knob431 in411
cable knob429 in412
cable selector445 in413
cable selector446 in414
cable knob430 in415
cable selector447 in416
cable selector448 in417
cable out136 in418
cable knob432 in420
cable selector449 in421
cable selector450 in422
cable selector451 in423
cable knob99 in97
recompile
set knob116 (0.14281064)
set knob117 (0.72483045)
set knob118 (0.38944468)
set knob126 (2.0e-2)
set knob132 (4.5592184)
set knob145 (1.0)
set knob153 (3.0e-3)
set knob158 (1.0)
set knob169 (0.0)
set knob170 (0.0)
set knob171 (250.0)
set knob172 (0.5469699)
set knob173 (0.0)
set knob174 (0.0)
set knob175 (0.0)
set knob204 (0.0)
set knob212 (0.0)
set knob213 (0.0)
set knob260 (0.14525436)
set knob264 (3.0e-5)
set knob269 (0.1)
set knob273 (0.1319111)
set knob296 (0.995)
set knob321 (0.19)
set knob322 (0.0)
set knob323 (2.0)
set knob324 (0.0)
set knob325 (0.0)
set knob326 (0.0)
set knob327 (0.0)
set knob328 (0.0)
set knob350 (0.9)
set knob360 (0.5)
set knob424 (0.0)
set knob425 (5.8333334e-2)
set knob426 (5.8333334e-2)
set knob427 (0.0)
set knob428 (5.8333334e-2)
set knob429 (4.1666668e-2)
set knob430 (4.1666668e-2)
set knob431 (-4.1666664e-2)
set knob432 (2.3120196)
set selector435 (0.0)
set selector436 (0.0)
set selector437 (5.0)
set selector438 (1.0)
set selector439 (0.0)
set selector440 (2.0)
set selector441 (0.0)
set selector442 (0.0)
set selector443 (0.0)
set selector444 (0.0)
set selector445 (5.0)
set selector446 (1.0)
set selector447 (7.0)
set selector448 (2.0)
set selector449 (7.0)
set selector450 (0.0)
set selector451 (0.0)
set knob99 (0.5)
return ()
| null | https://raw.githubusercontent.com/dpiponi/Moodler/a0c984c36abae52668d00f25eb3749e97e8936d3/Moodler/saves/test_karplus_strong.hs | haskell | do
restart
root <- getRoot
let out = "out"
let keyboard = "keyboard"
let trigger = "trigger"
audio_saw0 <- new' "audio_saw"
audio_sin1 <- new' "audio_sin"
audio_square2 <- new' "audio_square"
audio_triangle3 <- new' "audio_triangle"
butterbp4 <- new' "butterbp"
butterhp5 <- new' "butterhp"
butterhp6 <- new' "butterhp"
butterlp7 <- new' "butterlp"
butterlp8 <- new' "butterlp"
delay10 <- new' "delay"
delay11 <- new' "delay"
delay9 <- new' "delay"
echo12 <- new' "echo"
exp_decay13 <- new' "exp_decay"
id14 <- new' "id"
id15 <- new' "id"
id16 <- new' "id"
id17 <- new' "id"
id18 <- new' "id"
id19 <- new' "id"
id20 <- new' "id"
id21 <- new' "id"
id22 <- new' "id"
id23 <- new' "id"
id24 <- new' "id"
id25 <- new' "id"
id26 <- new' "id"
id365 <- new' "id"
input27 <- new' "input"
input28 <- new' "input"
input29 <- new' "input"
input30 <- new' "input"
input31 <- new' "input"
input32 <- new' "input"
input33 <- new' "input"
input34 <- new' "input"
input35 <- new' "input"
input36 <- new' "input"
input368 <- new' "input"
input369 <- new' "input"
input37 <- new' "input"
input370 <- new' "input"
input371 <- new' "input"
input372 <- new' "input"
input373 <- new' "input"
input374 <- new' "input"
input375 <- new' "input"
input376 <- new' "input"
input377 <- new' "input"
input378 <- new' "input"
input379 <- new' "input"
input38 <- new' "input"
input380 <- new' "input"
input381 <- new' "input"
input382 <- new' "input"
input383 <- new' "input"
input384 <- new' "input"
input385 <- new' "input"
input386 <- new' "input"
input387 <- new' "input"
input388 <- new' "input"
input389 <- new' "input"
input39 <- new' "input"
input390 <- new' "input"
input391 <- new' "input"
input392 <- new' "input"
input393 <- new' "input"
input40 <- new' "input"
input41 <- new' "input"
input42 <- new' "input"
input43 <- new' "input"
input44 <- new' "input"
input45 <- new' "input"
input46 <- new' "input"
input47 <- new' "input"
input48 <- new' "input"
input49 <- new' "input"
input50 <- new' "input"
input51 <- new' "input"
input52 <- new' "input"
input53 <- new' "input"
input54 <- new' "input"
input55 <- new' "input"
input56 <- new' "input"
input57 <- new' "input"
input58 <- new' "input"
input59 <- new' "input"
input60 <- new' "input"
new "input" "keyboard"
let keyboard = "keyboard"
lfo61 <- new' "lfo"
lfo62 <- new' "lfo"
linear_mix63 <- new' "linear_mix"
linear_split64 <- new' "linear_split"
minus65 <- new' "minus"
noise66 <- new' "noise"
poisson67 <- new' "poisson"
random68 <- new' "random"
sequencer394 <- new' "sequencer"
signal_to_freq69 <- new' "signal_to_freq"
sum70 <- new' "sum"
sum71 <- new' "sum"
sum72 <- new' "sum"
sum73 <- new' "sum"
sum74 <- new' "sum"
sum75 <- new' "sum"
sum76 <- new' "sum"
sum77 <- new' "sum"
sum78 <- new' "sum"
sum79 <- new' "sum"
sum80 <- new' "sum"
new "input" "trigger"
let trigger = "trigger"
vca81 <- new' "vca"
vca82 <- new' "vca"
vca83 <- new' "vca"
vca84 <- new' "vca"
vca85 <- new' "vca"
vca86 <- new' "vca"
vca87 <- new' "vca"
vca88 <- new' "vca"
vca89 <- new' "vca"
vca90 <- new' "vca"
vca91 <- new' "vca"
vca92 <- new' "vca"
vca93 <- new' "vca"
vca94 <- new' "vca"
vca95 <- new' "vca"
container102 <- container' "panel_3x1.png" (-156.0,-504.0) (Inside root)
in103 <- plugin' (echo12 ! "signal") (-177.0,-504.0) (Outside container102)
setColour in103 "#sample"
label104 <- label' "echo" (-181.0,-429.0) (Outside container102)
out105 <- plugout' (echo12 ! "result") (-136.0,-504.0) (Outside container102)
setColour out105 "#sample"
container106 <- container' "panel_out.png" (444.0,24.0) (Inside root)
in107 <- plugin' (out ! "value") (420.0,24.0) (Outside container106)
setOutput in107
setColour in107 "#sample"
container108 <- container' "panel_delay.png" (228.0,132.0) (Inside root)
in109 <- plugin' (sum80 ! "signal1") (180.0,12.0) (Outside container108)
setColour in109 "#sample"
in110 <- plugin' (sum80 ! "signal2") (161.0,-6.0) (Outside container108)
setColour in110 "#sample"
hide in110
in111 <- plugin' (vca94 ! "cv") (201.0,0.0) (Outside container108)
setColour in111 "#sample"
hide in111
in112 <- plugin' (vca94 ! "signal") (185.0,-10.0) (Outside container108)
setColour in112 "#sample"
hide in112
in113 <- plugin' (vca95 ! "signal") (204.0,18.0) (Outside container108)
setColour in113 "#sample"
hide in113
in114 <- plugin' (delay10 ! "delay") (252.0,216.0) (Outside container108)
setColour in114 "#control"
hide in114
in115 <- plugin' (delay10 ! "signal") (201.0,-10.0) (Outside container108)
setColour in115 "#sample"
hide in115
knob116 <- knob' (input50 ! "result") (252.0,216.0) (Outside container108)
knob117 <- knob' (input33 ! "result") (252.0,168.0) (Outside container108)
knob118 <- knob' (input35 ! "result") (252.0,120.0) (Outside container108)
out119 <- plugout' (delay10 ! "result") (183.0,23.0) (Outside container108)
setColour out119 "#sample"
hide out119
out120 <- plugout' (sum80 ! "result") (169.0,17.0) (Outside container108)
setColour out120 "#sample"
hide out120
out121 <- plugout' (vca94 ! "result") (165.0,21.0) (Outside container108)
setColour out121 "#sample"
hide out121
out122 <- plugout' (vca95 ! "result") (276.0,12.0) (Outside container108)
setColour out122 "#sample"
container123 <- container' "panel_3x1.png" (-60.0,24.0) (Inside root)
in124 <- plugin' (exp_decay13 ! "decay_time") (-81.0,49.0) (Outside container123)
setColour in124 "#control"
hide in124
in125 <- plugin' (exp_decay13 ! "trigger") (-81.0,-1.0) (Outside container123)
setColour in125 "#control"
knob126 <- knob' (input51 ! "result") (-81.0,49.0) (Outside container123)
label127 <- label' "exp_decay" (-85.0,99.0) (Outside container123)
out128 <- plugout' (exp_decay13 ! "result") (-40.0,24.0) (Outside container123)
setColour out128 "#control"
container129 <- container' "panel_lfo.png" (-612.0,-120.0) (Inside root)
in130 <- plugin' (lfo62 ! "rate") (-615.0,-65.0) (Outside container129)
setColour in130 "#control"
hide in130
in131 <- plugin' (lfo62 ! "sync") (-600.0,-96.0) (Outside container129)
setColour in131 "#control"
knob132 <- knob' (input48 ! "result") (-600.0,-48.0) (Outside container129)
out133 <- plugout' (lfo62 ! "triangle") (-624.0,-240.0) (Outside container129)
setColour out133 "#control"
out134 <- plugout' (lfo62 ! "saw") (-564.0,-240.0) (Outside container129)
setColour out134 "#control"
out135 <- plugout' (lfo62 ! "sin_result") (-624.0,-204.0) (Outside container129)
setColour out135 "#control"
out136 <- plugout' (lfo62 ! "square_result") (-564.0,-204.0) (Outside container129)
setColour out136 "#control"
container137 <- container' "panel_3x1.png" (252.0,-216.0) (Inside root)
in138 <- plugin' (delay11 ! "delay") (231.0,-191.0) (Outside container137)
setColour in138 "#control"
in139 <- plugin' (delay11 ! "signal") (231.0,-241.0) (Outside container137)
setColour in139 "#sample"
label140 <- label' "delay" (227.0,-141.0) (Outside container137)
out141 <- plugout' (delay11 ! "result") (272.0,-216.0) (Outside container137)
setColour out141 "#sample"
container142 <- container' "panel_lfo.png" (-456.0,-228.0) (Inside root)
in143 <- plugin' (lfo61 ! "sync") (-444.0,-204.0) (Outside container142)
setColour in143 "#control"
in144 <- plugin' (lfo61 ! "rate") (-459.0,-173.0) (Outside container142)
setColour in144 "#control"
hide in144
knob145 <- knob' (input32 ! "result") (-444.0,-156.0) (Outside container142)
out146 <- plugout' (lfo61 ! "triangle") (-468.0,-348.0) (Outside container142)
setColour out146 "#control"
out147 <- plugout' (lfo61 ! "saw") (-408.0,-348.0) (Outside container142)
setColour out147 "#control"
out148 <- plugout' (lfo61 ! "sin_result") (-468.0,-312.0) (Outside container142)
setColour out148 "#control"
out149 <- plugout' (lfo61 ! "square_result") (-408.0,-312.0) (Outside container142)
setColour out149 "#control"
container150 <- container' "panel_gain.png" (-156.0,-192.0) (Inside root)
in151 <- plugin' (vca84 ! "cv") (-180.0,-192.0) (Outside container150)
setColour in151 "#control"
hide in151
in152 <- plugin' (vca84 ! "signal") (-216.0,-192.0) (Outside container150)
setColour in152 "#sample"
knob153 <- knob' (input34 ! "result") (-180.0,-192.0) (Outside container150)
out154 <- plugout' (vca84 ! "result") (-96.0,-192.0) (Outside container150)
setColour out154 "#sample"
container155 <- container' "panel_3x1.png" (-300.0,-204.0) (Inside root)
in156 <- plugin' (sum71 ! "signal1") (-321.0,-179.0) (Outside container155)
setColour in156 "#sample"
hide in156
in157 <- plugin' (sum71 ! "signal2") (-321.0,-229.0) (Outside container155)
setColour in157 "#sample"
knob158 <- knob' (input57 ! "result") (-321.0,-179.0) (Outside container155)
label159 <- label' "sum" (-325.0,-129.0) (Outside container155)
out160 <- plugout' (sum71 ! "result") (-280.0,-204.0) (Outside container155)
setColour out160 "#sample"
container161 <- container' "panel_filter.png" (240.0,-636.0) (Inside root)
in162 <- plugin' (vca85 ! "cv") (228.0,-516.0) (Outside container161)
setColour in162 "#sample"
hide in162
in163 <- plugin' (vca85 ! "signal") (180.0,-576.0) (Outside container161)
setColour in163 "#control"
in164 <- plugin' (vca86 ! "cv") (223.0,-638.0) (Outside container161)
setColour in164 "#sample"
hide in164
in165 <- plugin' (vca86 ! "signal") (180.0,-636.0) (Outside container161)
setColour in165 "#control"
in166 <- plugin' (vca87 ! "cv") (221.0,-754.0) (Outside container161)
setColour in166 "#sample"
hide in166
in167 <- plugin' (vca87 ! "signal") (180.0,-696.0) (Outside container161)
setColour in167 "#control"
in168 <- plugin' (id14 ! "signal") (180.0,-516.0) (Outside container161)
setColour in168 "#control"
knob169 <- knob' (input41 ! "result") (216.0,-696.0) (Outside container161)
knob170 <- knob' (input42 ! "result") (264.0,-696.0) (Outside container161)
setLow knob170 (Just (-1.0))
setHigh knob170 (Just (1.0))
knob171 <- knob' (input43 ! "result") (264.0,-744.0) (Outside container161)
setLow knob171 (Just (1.0))
setHigh knob171 (Just (1000.0))
knob172 <- knob' (input46 ! "result") (264.0,-576.0) (Outside container161)
setLow knob172 (Just (-1.0))
setHigh knob172 (Just (1.0))
knob173 <- knob' (input38 ! "result") (216.0,-576.0) (Outside container161)
knob174 <- knob' (input39 ! "result") (216.0,-636.0) (Outside container161)
knob175 <- knob' (input40 ! "result") (264.0,-636.0) (Outside container161)
setLow knob175 (Just (-1.0))
setHigh knob175 (Just (1.0))
out176 <- plugout' (butterbp4 ! "result") (300.0,-696.0) (Outside container161)
setColour out176 "#sample"
out177 <- plugout' (butterlp8 ! "result") (300.0,-576.0) (Outside container161)
setColour out177 "#sample"
out178 <- plugout' (butterhp6 ! "result") (300.0,-636.0) (Outside container161)
setColour out178 "#sample"
proxy179 <- proxy' (289.0,-526.0) (Outside container161)
hide proxy179
in180 <- plugin' (sum72 ! "signal2") (-129.0,144.0) (Inside proxy179)
setColour in180 "#sample"
hide in180
in181 <- plugin' (sum73 ! "signal1") (-134.0,73.0) (Inside proxy179)
setColour in181 "#sample"
in182 <- plugin' (sum73 ! "signal2") (-134.0,23.0) (Inside proxy179)
setColour in182 "#sample"
hide in182
in183 <- plugin' (sum74 ! "signal1") (-140.0,-40.0) (Inside proxy179)
setColour in183 "#sample"
in184 <- plugin' (sum74 ! "signal2") (-140.0,-90.0) (Inside proxy179)
setColour in184 "#sample"
hide in184
in185 <- plugin' (butterlp8 ! "freq") (-43.0,192.0) (Inside proxy179)
setColour in185 "#sample"
in186 <- plugin' (butterlp8 ! "signal") (-43.0,142.0) (Inside proxy179)
setColour in186 "#sample"
in187 <- plugin' (butterhp6 ! "freq") (-47.0,72.0) (Inside proxy179)
setColour in187 "#sample"
in188 <- plugin' (butterhp6 ! "signal") (-47.0,22.0) (Inside proxy179)
setColour in188 "#sample"
in189 <- plugin' (butterbp4 ! "freq") (-55.0,-41.0) (Inside proxy179)
setColour in189 "#sample"
in190 <- plugin' (butterbp4 ! "bandwidth") (-55.0,-91.0) (Inside proxy179)
setColour in190 "#sample"
hide in190
in191 <- plugin' (butterbp4 ! "signal") (-55.0,-141.0) (Inside proxy179)
setColour in191 "#sample"
in192 <- plugin' (sum72 ! "signal1") (-129.0,194.0) (Inside proxy179)
setColour in192 "#sample"
out193 <- plugout' (sum72 ! "result") (-79.0,194.0) (Inside proxy179)
setColour out193 "#sample"
out194 <- plugout' (sum73 ! "result") (-84.0,73.0) (Inside proxy179)
setColour out194 "#sample"
out195 <- plugout' (sum74 ! "result") (-90.0,-40.0) (Inside proxy179)
setColour out195 "#sample"
out196 <- plugout' (vca85 ! "result") (-200.0,196.0) (Inside proxy179)
setColour out196 "#sample"
out197 <- plugout' (vca86 ! "result") (-205.0,74.0) (Inside proxy179)
setColour out197 "#sample"
out198 <- plugout' (vca87 ! "result") (-207.0,-42.0) (Inside proxy179)
setColour out198 "#sample"
out199 <- plugout' (id14 ! "result") (-163.0,279.0) (Inside proxy179)
setColour out199 "#sample"
container200 <- container' "panel_3x1.png" (444.0,-216.0) (Inside root)
in201 <- plugin' (linear_mix63 ! "gain") (423.0,-166.0) (Outside container200)
setColour in201 "#control"
hide in201
in202 <- plugin' (linear_mix63 ! "signal1") (423.0,-216.0) (Outside container200)
setColour in202 "#sample"
in203 <- plugin' (linear_mix63 ! "signal2") (423.0,-266.0) (Outside container200)
setColour in203 "#sample"
knob204 <- knob' (input60 ! "result") (423.0,-166.0) (Outside container200)
label205 <- label' "linear_mix" (419.0,-141.0) (Outside container200)
out206 <- plugout' (linear_mix63 ! "result") (464.0,-216.0) (Outside container200)
setColour out206 "#sample"
container207 <- container' "panel_vco2.png" (444.0,-564.0) (Inside root)
in208 <- plugin' (id22 ! "signal") (480.0,-528.0) (Outside container207)
setColour in208 "#control"
in209 <- plugin' (id23 ! "signal") (457.0,-484.0) (Outside container207)
setColour in209 "#sample"
hide in209
in210 <- plugin' (id16 ! "signal") (475.0,-561.0) (Outside container207)
setColour in210 "#sample"
hide in210
in211 <- plugin' (id17 ! "signal") (480.0,-600.0) (Outside container207)
setColour in211 "#control"
knob212 <- knob' (input45 ! "result") (480.0,-564.0) (Outside container207)
knob213 <- knob' (input44 ! "result") (480.0,-492.0) (Outside container207)
out214 <- plugout' (id21 ! "result") (420.0,-648.0) (Outside container207)
setColour out214 "#sample"
out215 <- plugout' (id18 ! "result") (492.0,-648.0) (Outside container207)
setColour out215 "#sample"
out216 <- plugout' (id19 ! "result") (420.0,-684.0) (Outside container207)
setColour out216 "#sample"
out217 <- plugout' (id20 ! "result") (492.0,-684.0) (Outside container207)
setColour out217 "#sample"
proxy218 <- proxy' (397.0,-482.0) (Outside container207)
hide proxy218
container219 <- container' "panel_3x1.png" (-815.0,439.0) (Inside proxy218)
in220 <- plugin' (sum70 ! "signal2") (-836.0,414.0) (Outside container219)
setColour in220 "#sample"
in221 <- plugin' (sum70 ! "signal1") (-836.0,464.0) (Outside container219)
setColour in221 "#sample"
label222 <- label' "sum" (-840.0,514.0) (Outside container219)
out223 <- plugout' (sum70 ! "result") (-795.0,439.0) (Outside container219)
setColour out223 "#sample"
container224 <- container' "panel_3x1.png" (-434.0,420.0) (Inside proxy218)
in225 <- plugin' (audio_triangle3 ! "freq") (-455.0,445.0) (Outside container224)
setColour in225 "#sample"
in226 <- plugin' (audio_triangle3 ! "sync") (-455.0,395.0) (Outside container224)
setColour in226 "#sample"
label227 <- label' "audio_triangle" (-459.0,495.0) (Outside container224)
out228 <- plugout' (audio_triangle3 ! "result") (-414.0,420.0) (Outside container224)
setColour out228 "#sample"
container229 <- container' "panel_3x1.png" (-318.0,291.0) (Inside proxy218)
in230 <- plugin' (audio_saw0 ! "freq") (-339.0,316.0) (Outside container229)
setColour in230 "#sample"
in231 <- plugin' (audio_saw0 ! "sync") (-339.0,266.0) (Outside container229)
setColour in231 "#sample"
label232 <- label' "audio_saw" (-343.0,366.0) (Outside container229)
out233 <- plugout' (audio_saw0 ! "result") (-298.0,291.0) (Outside container229)
setColour out233 "#sample"
container234 <- container' "panel_3x1.png" (-691.0,453.0) (Inside proxy218)
in235 <- plugin' (audio_sin1 ! "freq") (-712.0,478.0) (Outside container234)
setColour in235 "#sample"
in236 <- plugin' (audio_sin1 ! "sync") (-712.0,428.0) (Outside container234)
setColour in236 "#sample"
label237 <- label' "audio_sin" (-716.0,528.0) (Outside container234)
out238 <- plugout' (audio_sin1 ! "result") (-671.0,453.0) (Outside container234)
setColour out238 "#sample"
container239 <- container' "panel_3x1.png" (-826.0,199.0) (Inside proxy218)
in240 <- plugin' (audio_square2 ! "pwm") (-847.0,199.0) (Outside container239)
setColour in240 "#sample"
in241 <- plugin' (audio_square2 ! "sync") (-847.0,149.0) (Outside container239)
setColour in241 "#sample"
in242 <- plugin' (audio_square2 ! "freq") (-847.0,249.0) (Outside container239)
setColour in242 "#sample"
label243 <- label' "audio_square" (-851.0,274.0) (Outside container239)
out244 <- plugout' (audio_square2 ! "result") (-806.0,199.0) (Outside container239)
setColour out244 "#sample"
in245 <- plugin' (id18 ! "signal") (-753.0,198.0) (Inside proxy218)
setColour in245 "#sample"
in246 <- plugin' (id19 ! "signal") (-360.0,422.0) (Inside proxy218)
setColour in246 "#sample"
in247 <- plugin' (id20 ! "signal") (-247.0,292.0) (Inside proxy218)
setColour in247 "#sample"
in248 <- plugin' (id21 ! "signal") (-556.0,449.0) (Inside proxy218)
setColour in248 "#sample"
out249 <- plugout' (id22 ! "result") (-891.0,413.0) (Inside proxy218)
setColour out249 "#sample"
out250 <- plugout' (id23 ! "result") (-892.0,469.0) (Inside proxy218)
setColour out250 "#sample"
out251 <- plugout' (id16 ! "result") (-894.0,199.0) (Inside proxy218)
setColour out251 "#sample"
out252 <- plugout' (id17 ! "result") (-893.0,146.0) (Inside proxy218)
setColour out252 "#sample"
container253 <- container' "panel_3x1.png" (-252.0,-504.0) (Inside root)
in254 <- plugin' (minus65 ! "signal1") (-273.0,-479.0) (Outside container253)
setColour in254 "#sample"
in255 <- plugin' (minus65 ! "signal2") (-273.0,-529.0) (Outside container253)
setColour in255 "#sample"
label256 <- label' "minus" (-277.0,-429.0) (Outside container253)
out257 <- plugout' (minus65 ! "result") (-232.0,-504.0) (Outside container253)
setColour out257 "#sample"
container258 <- container' "panel_knob.png" (-612.0,-732.0) (Inside root)
in259 <- plugin' (id24 ! "signal") (-624.0,-732.0) (Outside container258)
setColour in259 "#control"
hide in259
knob260 <- knob' (input52 ! "result") (-624.0,-732.0) (Outside container258)
out261 <- plugout' (id24 ! "result") (-588.0,-732.0) (Outside container258)
setColour out261 "#control"
container262 <- container' "panel_knob.png" (-264.0,-660.0) (Inside root)
in263 <- plugin' (id25 ! "signal") (-276.0,-660.0) (Outside container262)
setColour in263 "#control"
hide in263
knob264 <- knob' (input53 ! "result") (-276.0,-660.0) (Outside container262)
out265 <- plugout' (id25 ! "result") (-240.0,-660.0) (Outside container262)
setColour out265 "#control"
container266 <- container' "panel_gain.png" (564.0,-324.0) (Inside root)
in267 <- plugin' (vca81 ! "cv") (540.0,-324.0) (Outside container266)
setColour in267 "#control"
hide in267
in268 <- plugin' (vca81 ! "signal") (504.0,-324.0) (Outside container266)
setColour in268 "#sample"
knob269 <- knob' (input55 ! "result") (540.0,-324.0) (Outside container266)
out270 <- plugout' (vca81 ! "result") (624.0,-324.0) (Outside container266)
setColour out270 "#sample"
container271 <- container' "panel_knob.png" (-144.0,-288.0) (Inside root)
in272 <- plugin' (id15 ! "signal") (-156.0,-288.0) (Outside container271)
setColour in272 "#control"
hide in272
knob273 <- knob' (input49 ! "result") (-156.0,-288.0) (Outside container271)
out274 <- plugout' (id15 ! "result") (-120.0,-288.0) (Outside container271)
setColour out274 "#control"
container275 <- container' "panel_keyboard.png" (-456.0,36.0) (Inside root)
out276 <- plugout' (keyboard ! "result") (-396.0,60.0) (Outside container275)
setColour out276 "#control"
out277 <- plugout' (trigger ! "result") (-396.0,12.0) (Outside container275)
setColour out277 "#control"
container278 <- container' "panel_3x1.png" (72.0,84.0) (Inside root)
in279 <- plugin' (vca88 ! "cv") (51.0,109.0) (Outside container278)
setColour in279 "#control"
in280 <- plugin' (vca88 ! "signal") (51.0,59.0) (Outside container278)
setColour in280 "#sample"
label281 <- label' "vca" (47.0,159.0) (Outside container278)
out282 <- plugout' (vca88 ! "result") (92.0,84.0) (Outside container278)
setColour out282 "#sample"
container283 <- container' "panel_3x1.png" (-12.0,-216.0) (Inside root)
in284 <- plugin' (delay9 ! "delay") (-33.0,-191.0) (Outside container283)
setColour in284 "#control"
in285 <- plugin' (delay9 ! "signal") (-33.0,-241.0) (Outside container283)
setColour in285 "#sample"
label286 <- label' "delay" (-37.0,-141.0) (Outside container283)
out287 <- plugout' (delay9 ! "result") (8.0,-216.0) (Outside container283)
setColour out287 "#sample"
container288 <- container' "panel_3x1.png" (120.0,-216.0) (Inside root)
in289 <- plugin' (sum75 ! "signal1") (99.0,-191.0) (Outside container288)
setColour in289 "#sample"
in290 <- plugin' (sum75 ! "signal2") (99.0,-241.0) (Outside container288)
setColour in290 "#sample"
label291 <- label' "sum" (95.0,-141.0) (Outside container288)
out292 <- plugout' (sum75 ! "result") (140.0,-216.0) (Outside container288)
setColour out292 "#sample"
container293 <- container' "panel_gain.png" (192.0,-420.0) (Inside root)
in294 <- plugin' (vca89 ! "cv") (168.0,-420.0) (Outside container293)
setColour in294 "#control"
hide in294
in295 <- plugin' (vca89 ! "signal") (132.0,-420.0) (Outside container293)
setColour in295 "#sample"
knob296 <- knob' (input28 ! "result") (168.0,-420.0) (Outside container293)
out297 <- plugout' (vca89 ! "result") (252.0,-420.0) (Outside container293)
setColour out297 "#sample"
container298 <- container' "panel_random.png" (-240.0,96.0) (Inside root)
in299 <- plugin' (sum77 ! "signal2") (-245.0,23.0) (Outside container298)
setColour in299 "#sample"
hide in299
in300 <- plugin' (vca92 ! "cv") (-266.0,22.0) (Outside container298)
setColour in300 "#sample"
hide in300
in301 <- plugin' (vca92 ! "signal") (-324.0,12.0) (Outside container298)
setColour in301 "#control"
in302 <- plugin' (poisson67 ! "rate") (-151.0,28.0) (Outside container298)
setColour in302 "#sample"
hide in302
in303 <- plugin' (sum78 ! "signal1") (-225.0,149.0) (Outside container298)
setColour in303 "#sample"
hide in303
in304 <- plugin' (sum78 ! "signal2") (-152.0,152.0) (Outside container298)
setColour in304 "#sample"
hide in304
in305 <- plugin' (vca93 ! "cv") (-240.0,104.0) (Outside container298)
setColour in305 "#sample"
hide in305
in306 <- plugin' (vca93 ! "signal") (-324.0,156.0) (Outside container298)
setColour in306 "#control"
in307 <- plugin' (butterlp7 ! "freq") (-150.0,164.0) (Outside container298)
setColour in307 "#sample"
hide in307
in308 <- plugin' (butterlp7 ! "signal") (-213.0,145.0) (Outside container298)
setColour in308 "#sample"
hide in308
in309 <- plugin' (butterhp5 ! "freq") (-151.0,115.0) (Outside container298)
setColour in309 "#sample"
hide in309
in310 <- plugin' (butterhp5 ! "signal") (-153.0,109.0) (Outside container298)
setColour in310 "#sample"
hide in310
in311 <- plugin' (sum79 ! "signal1") (-272.0,106.0) (Outside container298)
setColour in311 "#sample"
hide in311
in312 <- plugin' (sum79 ! "signal2") (-153.0,104.0) (Outside container298)
setColour in312 "#sample"
hide in312
in313 <- plugin' (random68 ! "rate") (-203.0,60.0) (Outside container298)
setColour in313 "#sample"
hide in313
in314 <- plugin' (vca90 ! "cv") (-243.0,103.0) (Outside container298)
setColour in314 "#sample"
hide in314
in315 <- plugin' (vca90 ! "signal") (-324.0,108.0) (Outside container298)
setColour in315 "#control"
in316 <- plugin' (sum76 ! "signal1") (-217.0,66.0) (Outside container298)
setColour in316 "#sample"
hide in316
in317 <- plugin' (sum76 ! "signal2") (-149.0,60.0) (Outside container298)
setColour in317 "#sample"
hide in317
in318 <- plugin' (vca91 ! "cv") (-318.0,72.0) (Outside container298)
setColour in318 "#sample"
hide in318
in319 <- plugin' (vca91 ! "signal") (-324.0,60.0) (Outside container298)
setColour in319 "#control"
in320 <- plugin' (sum77 ! "signal1") (-211.0,21.0) (Outside container298)
setColour in320 "#sample"
hide in320
knob321 <- knob' (input27 ! "result") (-240.0,60.0) (Outside container298)
knob322 <- knob' (input29 ! "result") (-288.0,60.0) (Outside container298)
knob323 <- knob' (input30 ! "result") (-240.0,12.0) (Outside container298)
knob324 <- knob' (input31 ! "result") (-288.0,12.0) (Outside container298)
knob325 <- knob' (input36 ! "result") (-240.0,156.0) (Outside container298)
knob326 <- knob' (input47 ! "result") (-288.0,156.0) (Outside container298)
knob327 <- knob' (input54 ! "result") (-240.0,108.0) (Outside container298)
knob328 <- knob' (input56 ! "result") (-288.0,108.0) (Outside container298)
out329 <- plugout' (butterlp7 ! "result") (-144.0,156.0) (Outside container298)
setColour out329 "#sample"
out330 <- plugout' (butterhp5 ! "result") (-144.0,108.0) (Outside container298)
setColour out330 "#sample"
out331 <- plugout' (sum79 ! "result") (-226.0,108.0) (Outside container298)
setColour out331 "#sample"
hide out331
out332 <- plugout' (vca90 ! "result") (-247.0,99.0) (Outside container298)
setColour out332 "#sample"
hide out332
out333 <- plugout' (sum76 ! "result") (-232.0,61.0) (Outside container298)
setColour out333 "#sample"
hide out333
out334 <- plugout' (vca91 ! "result") (-244.0,58.0) (Outside container298)
setColour out334 "#sample"
hide out334
out335 <- plugout' (sum77 ! "result") (-229.0,22.0) (Outside container298)
setColour out335 "#sample"
hide out335
out336 <- plugout' (vca92 ! "result") (-245.0,22.0) (Outside container298)
setColour out336 "#sample"
hide out336
out337 <- plugout' (random68 ! "result") (-144.0,60.0) (Outside container298)
setColour out337 "#control"
out338 <- plugout' (poisson67 ! "trigger") (-144.0,12.0) (Outside container298)
setColour out338 "#control"
out339 <- plugout' (sum78 ! "result") (-232.0,159.0) (Outside container298)
setColour out339 "#sample"
hide out339
out340 <- plugout' (vca93 ! "result") (-243.0,150.0) (Outside container298)
setColour out340 "#sample"
hide out340
out341 <- plugout' (noise66 ! "result") (-144.0,204.0) (Outside container298)
setColour out341 "#sample"
container342 <- container' "panel_3x1.png" (-156.0,-756.0) (Inside root)
in343 <- plugin' (linear_split64 ! "gain") (-177.0,-731.0) (Outside container342)
setColour in343 "#control"
in344 <- plugin' (linear_split64 ! "signal") (-177.0,-781.0) (Outside container342)
setColour in344 "#sample"
label345 <- label' "linear_split" (-181.0,-681.0) (Outside container342)
out346 <- plugout' (linear_split64 ! "result1") (-136.0,-731.0) (Outside container342)
setColour out346 "#sample"
out347 <- plugout' (linear_split64 ! "result2") (-136.0,-781.0) (Outside container342)
setColour out347 "#sample"
container348 <- container' "panel_knob.png" (-324.0,-936.0) (Inside root)
in349 <- plugin' (id26 ! "signal") (-336.0,-936.0) (Outside container348)
setColour in349 "#control"
hide in349
knob350 <- knob' (input37 ! "result") (-336.0,-936.0) (Outside container348)
out351 <- plugout' (id26 ! "result") (-300.0,-936.0) (Outside container348)
setColour out351 "#control"
container352 <- container' "panel_3x1.png" (-492.0,-504.0) (Inside root)
in353 <- plugin' (signal_to_freq69 ! "signal") (-513.0,-504.0) (Outside container352)
setColour in353 "#control"
label354 <- label' "signal_to_freq" (-517.0,-429.0) (Outside container352)
out355 <- plugout' (signal_to_freq69 ! "freq") (-472.0,-479.0) (Outside container352)
setColour out355 "#control"
out356 <- plugout' (signal_to_freq69 ! "rate") (-472.0,-529.0) (Outside container352)
setColour out356 "#control"
container357 <- container' "panel_3x1.png" (-792.0,-588.0) (Inside root)
in358 <- plugin' (vca82 ! "signal") (-813.0,-613.0) (Outside container357)
setColour in358 "#sample"
in359 <- plugin' (vca82 ! "cv") (-813.0,-563.0) (Outside container357)
setColour in359 "#control"
hide in359
knob360 <- knob' (input58 ! "result") (-813.0,-563.0) (Outside container357)
label361 <- label' "vca" (-817.0,-513.0) (Outside container357)
out362 <- plugout' (vca82 ! "result") (-772.0,-588.0) (Outside container357)
setColour out362 "#sample"
container363 <- container' "panel_3x1.png" (-492.0,-888.0) (Inside root)
in366 <- plugin' (id365 ! "signal") (-513.0,-888.0) (Outside container363)
setColour in366 "#control"
label364 <- label' "id" (-517.0,-813.0) (Outside container363)
out367 <- plugout' (id365 ! "result") (-472.0,-888.0) (Outside container363)
setColour out367 "#control"
container395 <- container' "panel_sequencer.png" (168.0,-972.0) (Inside root)
in396 <- plugin' (sequencer394 ! "freq2") (24.0,-919.0) (Outside container395)
setColour in396 "#sample"
hide in396
in397 <- plugin' (sequencer394 ! "pulse2") (67.0,-924.0) (Outside container395)
setColour in397 "#sample"
hide in397
in398 <- plugin' (sequencer394 ! "mode2") (103.0,-923.0) (Outside container395)
setColour in398 "#sample"
hide in398
in399 <- plugin' (sequencer394 ! "freq3") (24.0,-969.0) (Outside container395)
setColour in399 "#sample"
hide in399
in400 <- plugin' (sequencer394 ! "pulse3") (67.0,-974.0) (Outside container395)
setColour in400 "#sample"
hide in400
in401 <- plugin' (sequencer394 ! "mode3") (103.0,-973.0) (Outside container395)
setColour in401 "#sample"
hide in401
in402 <- plugin' (sequencer394 ! "freq4") (24.0,-1019.0) (Outside container395)
setColour in402 "#sample"
hide in402
in403 <- plugin' (sequencer394 ! "pulse4") (67.0,-1024.0) (Outside container395)
setColour in403 "#sample"
hide in403
in404 <- plugin' (sequencer394 ! "mode4") (103.0,-1023.0) (Outside container395)
setColour in404 "#sample"
hide in404
in405 <- plugin' (sequencer394 ! "freq5") (194.0,-871.0) (Outside container395)
setColour in405 "#sample"
hide in405
in406 <- plugin' (sequencer394 ! "pulse5") (239.0,-867.0) (Outside container395)
setColour in406 "#sample"
hide in406
in407 <- plugin' (sequencer394 ! "mode5") (276.0,-870.0) (Outside container395)
setColour in407 "#sample"
hide in407
in408 <- plugin' (sequencer394 ! "freq6") (194.0,-921.0) (Outside container395)
setColour in408 "#sample"
hide in408
in409 <- plugin' (sequencer394 ! "pulse6") (239.0,-917.0) (Outside container395)
setColour in409 "#sample"
hide in409
in410 <- plugin' (sequencer394 ! "mode6") (276.0,-920.0) (Outside container395)
setColour in410 "#sample"
hide in410
in411 <- plugin' (sequencer394 ! "freq1") (24.0,-869.0) (Outside container395)
setColour in411 "#sample"
hide in411
in412 <- plugin' (sequencer394 ! "freq7") (194.0,-971.0) (Outside container395)
setColour in412 "#sample"
hide in412
in413 <- plugin' (sequencer394 ! "pulse7") (239.0,-967.0) (Outside container395)
setColour in413 "#sample"
hide in413
in414 <- plugin' (sequencer394 ! "mode7") (276.0,-970.0) (Outside container395)
setColour in414 "#sample"
hide in414
in415 <- plugin' (sequencer394 ! "freq8") (194.0,-1021.0) (Outside container395)
setColour in415 "#sample"
hide in415
in416 <- plugin' (sequencer394 ! "pulse8") (239.0,-1017.0) (Outside container395)
setColour in416 "#sample"
hide in416
in417 <- plugin' (sequencer394 ! "mode8") (276.0,-1020.0) (Outside container395)
setColour in417 "#sample"
hide in417
in418 <- plugin' (sequencer394 ! "gate") (36.0,-1104.0) (Outside container395)
setColour in418 "#control"
in419 <- plugin' (sequencer394 ! "add") (36.0,-1068.0) (Outside container395)
setColour in419 "#control"
in420 <- plugin' (sequencer394 ! "slide_rate") (197.0,-1118.0) (Outside container395)
setColour in420 "#sample"
hide in420
in421 <- plugin' (sequencer394 ! "length") (197.0,-1068.0) (Outside container395)
setColour in421 "#sample"
hide in421
in422 <- plugin' (sequencer394 ! "pulse1") (67.0,-874.0) (Outside container395)
setColour in422 "#sample"
hide in422
in423 <- plugin' (sequencer394 ! "mode1") (103.0,-873.0) (Outside container395)
setColour in423 "#sample"
hide in423
knob424 <- knob' (input378 ! "result") (24.0,-924.0) (Outside container395)
knob425 <- knob' (input381 ! "result") (24.0,-972.0) (Outside container395)
knob426 <- knob' (input385 ! "result") (24.0,-1020.0) (Outside container395)
knob427 <- knob' (input388 ! "result") (192.0,-876.0) (Outside container395)
knob428 <- knob' (input391 ! "result") (192.0,-924.0) (Outside container395)
knob429 <- knob' (input368 ! "result") (192.0,-972.0) (Outside container395)
knob430 <- knob' (input372 ! "result") (192.0,-1020.0) (Outside container395)
knob431 <- knob' (input384 ! "result") (24.0,-876.0) (Outside container395)
knob432 <- knob' (input376 ! "result") (180.0,-1104.0) (Outside container395)
out433 <- plugout' (sequencer394 ! "result") (324.0,-1104.0) (Outside container395)
setColour out433 "#control"
out434 <- plugout' (sequencer394 ! "trigger") (324.0,-1068.0) (Outside container395)
setColour out434 "#control"
selector435 <- selector' (input379 ! "result") (60.0,-924.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector436 <- selector' (input380 ! "result") (96.0,-924.0) ["repeat","slide","rest","hold"] (Outside container395)
selector437 <- selector' (input382 ! "result") (60.0,-972.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector438 <- selector' (input383 ! "result") (96.0,-972.0) ["repeat","slide","rest","hold"] (Outside container395)
selector439 <- selector' (input386 ! "result") (60.0,-1020.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector440 <- selector' (input387 ! "result") (96.0,-1020.0) ["repeat","slide","rest","hold"] (Outside container395)
selector441 <- selector' (input389 ! "result") (228.0,-876.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector442 <- selector' (input390 ! "result") (264.0,-876.0) ["repeat","slide","rest","hold"] (Outside container395)
selector443 <- selector' (input392 ! "result") (228.0,-924.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector444 <- selector' (input393 ! "result") (264.0,-924.0) ["repeat","slide","rest","hold"] (Outside container395)
selector445 <- selector' (input370 ! "result") (228.0,-972.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector446 <- selector' (input371 ! "result") (264.0,-972.0) ["repeat","slide","rest","hold"] (Outside container395)
selector447 <- selector' (input373 ! "result") (228.0,-1020.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector448 <- selector' (input374 ! "result") (264.0,-1020.0) ["repeat","slide","rest","hold"] (Outside container395)
selector449 <- selector' (input375 ! "result") (180.0,-1068.0) ["1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16"] (Outside container395)
selector450 <- selector' (input369 ! "result") (60.0,-876.0) ["1","2","3","4","5","6","7","8"] (Outside container395)
selector451 <- selector' (input377 ! "result") (96.0,-876.0) ["repeat","slide","rest","hold"] (Outside container395)
container96 <- container' "panel_3x1.png" (-744.0,-336.0) (Inside root)
in97 <- plugin' (vca83 ! "cv") (-765.0,-311.0) (Outside container96)
setColour in97 "#control"
hide in97
in98 <- plugin' (vca83 ! "signal") (-765.0,-361.0) (Outside container96)
setColour in98 "#sample"
knob99 <- knob' (input59 ! "result") (-765.0,-311.0) (Outside container96)
label100 <- label' "vca" (-769.0,-261.0) (Outside container96)
out101 <- plugout' (vca83 ! "result") (-724.0,-336.0) (Outside container96)
setColour out101 "#sample"
cable out257 in103
cable out270 in107
cable out121 in110
cable knob117 in111
cable out119 in112
cable out119 in113
cable knob116 in114
cable out120 in115
cable knob126 in124
cable out136 in125
cable knob132 in130
cable out347 in138
cable out292 in139
cable out136 in143
cable knob145 in144
cable knob153 in151
cable out160 in152
cable knob158 in156
cable out148 in157
cable knob173 in162
cable knob174 in164
cable knob169 in166
cable out297 in168
cable knob172 in180
cable out197 in181
cable knob175 in182
cable out198 in183
cable knob170 in184
cable out193 in185
cable out199 in186
cable out194 in187
cable out199 in188
cable out195 in189
cable knob171 in190
cable out199 in191
cable out196 in192
cable knob204 in201
cable out141 in202
cable out214 in203
cable out367 in208
cable knob213 in209
cable knob212 in210
cable out249 in220
cable out250 in221
cable out223 in225
cable out252 in226
cable out223 in230
cable out252 in231
cable out223 in235
cable out252 in236
cable out251 in240
cable out252 in241
cable out223 in242
cable out244 in245
cable out228 in246
cable out233 in247
cable out238 in248
cable out346 in254
cable out265 in255
cable knob260 in259
cable knob264 in263
cable knob269 in267
cable out206 in268
cable knob273 in272
cable out128 in279
cable out341 in280
cable out105 in284
cable out177 in285
cable out282 in289
cable out287 in290
cable knob296 in294
cable out141 in295
cable out336 in299
cable knob324 in300
cable out335 in302
cable knob325 in303
cable out340 in304
cable knob326 in305
cable out339 in307
cable out341 in308
cable out331 in309
cable out341 in310
cable knob327 in311
cable out332 in312
cable out333 in313
cable knob328 in314
cable knob321 in316
cable out334 in317
cable knob322 in318
cable knob323 in320
cable out351 in343
cable out356 in344
cable knob350 in349
cable out367 in353
cable knob360 in359
cable out433 in366
cable knob424 in396
cable selector435 in397
cable selector436 in398
cable knob425 in399
cable selector437 in400
cable selector438 in401
cable knob426 in402
cable selector439 in403
cable selector440 in404
cable knob427 in405
cable selector441 in406
cable selector442 in407
cable knob428 in408
cable selector443 in409
cable selector444 in410
cable knob431 in411
cable knob429 in412
cable selector445 in413
cable selector446 in414
cable knob430 in415
cable selector447 in416
cable selector448 in417
cable out136 in418
cable knob432 in420
cable selector449 in421
cable selector450 in422
cable selector451 in423
cable knob99 in97
recompile
set knob116 (0.14281064)
set knob117 (0.72483045)
set knob118 (0.38944468)
set knob126 (2.0e-2)
set knob132 (4.5592184)
set knob145 (1.0)
set knob153 (3.0e-3)
set knob158 (1.0)
set knob169 (0.0)
set knob170 (0.0)
set knob171 (250.0)
set knob172 (0.5469699)
set knob173 (0.0)
set knob174 (0.0)
set knob175 (0.0)
set knob204 (0.0)
set knob212 (0.0)
set knob213 (0.0)
set knob260 (0.14525436)
set knob264 (3.0e-5)
set knob269 (0.1)
set knob273 (0.1319111)
set knob296 (0.995)
set knob321 (0.19)
set knob322 (0.0)
set knob323 (2.0)
set knob324 (0.0)
set knob325 (0.0)
set knob326 (0.0)
set knob327 (0.0)
set knob328 (0.0)
set knob350 (0.9)
set knob360 (0.5)
set knob424 (0.0)
set knob425 (5.8333334e-2)
set knob426 (5.8333334e-2)
set knob427 (0.0)
set knob428 (5.8333334e-2)
set knob429 (4.1666668e-2)
set knob430 (4.1666668e-2)
set knob431 (-4.1666664e-2)
set knob432 (2.3120196)
set selector435 (0.0)
set selector436 (0.0)
set selector437 (5.0)
set selector438 (1.0)
set selector439 (0.0)
set selector440 (2.0)
set selector441 (0.0)
set selector442 (0.0)
set selector443 (0.0)
set selector444 (0.0)
set selector445 (5.0)
set selector446 (1.0)
set selector447 (7.0)
set selector448 (2.0)
set selector449 (7.0)
set selector450 (0.0)
set selector451 (0.0)
set knob99 (0.5)
return ()
|
|
68c65b7ec396f2d47df9490a5288a793eb9540394b54fa54d45d8a88f55a2aaa | c-cube/qcheck | test_qualified_names.ml | open QCheck2
open Helpers
module type S = sig
type t = int
val gen : int QCheck2.Gen.t
end
module Q : S = struct
type t = int [@@deriving qcheck2]
end
module F (X : S) = struct
type t = X.t [@@deriving qcheck2]
end
module G = F (Q)
type t = Q.t [@@deriving qcheck2]
type u = G.t [@@deriving qcheck2]
let test_module () =
test_compare ~msg:"Gen.int <=> deriving Q.t" ~eq:Alcotest.int Gen.int gen
let test_functor () =
test_compare ~msg:"Gen.int <=> deriving F.t" ~eq:Alcotest.int Gen.int gen_u
* { 2 . Execute tests }
let () = Alcotest.run "Test_Qualified_names"
[("Qualified names",
Alcotest.[
test_case "test_module" `Quick test_module;
test_case "test_functor" `Quick test_functor
])]
| null | https://raw.githubusercontent.com/c-cube/qcheck/063c1d74795a24eb77fa661d218c4715382df566/test/ppx_deriving_qcheck/deriver/qcheck2/test_qualified_names.ml | ocaml | open QCheck2
open Helpers
module type S = sig
type t = int
val gen : int QCheck2.Gen.t
end
module Q : S = struct
type t = int [@@deriving qcheck2]
end
module F (X : S) = struct
type t = X.t [@@deriving qcheck2]
end
module G = F (Q)
type t = Q.t [@@deriving qcheck2]
type u = G.t [@@deriving qcheck2]
let test_module () =
test_compare ~msg:"Gen.int <=> deriving Q.t" ~eq:Alcotest.int Gen.int gen
let test_functor () =
test_compare ~msg:"Gen.int <=> deriving F.t" ~eq:Alcotest.int Gen.int gen_u
* { 2 . Execute tests }
let () = Alcotest.run "Test_Qualified_names"
[("Qualified names",
Alcotest.[
test_case "test_module" `Quick test_module;
test_case "test_functor" `Quick test_functor
])]
|
|
68ce8f3b9bd574ef1a668d790b7b64817b8576406c7e6a1214de3275470db83b | juspay/atlas | Common.hs | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : PublicTransport . Common
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : PublicTransport.Common
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module PublicTransport.Common where
import Beckn.Prelude
import Beckn.Types.Time
import Beckn.Utils.Dhall (readDhallConfig)
import qualified "mock-public-transport-bpp" Environment as Bpp
import qualified "public-transport-bap" Environment as Bap
import GHC.IO (unsafePerformIO)
import Servant.Client
import Utils
kafkaConsumerTimeoutMilliseconds :: Int
kafkaConsumerTimeoutMilliseconds = 500
publicTransportBapUrl :: BaseUrl
publicTransportBapUrl =
BaseUrl
{ baseUrlScheme = Http,
baseUrlHost = "localhost",
baseUrlPort = 8023,
baseUrlPath = ""
}
publicTransportBapClientEnv :: ClientEnv
publicTransportBapClientEnv = mkClientEnv defaultManager publicTransportBapUrl
callPublicTransportBap :: (Show a) => ClientM a -> IO a
callPublicTransportBap = runClient' publicTransportBapClientEnv
mockBppUrl :: BaseUrl
mockBppUrl =
BaseUrl
{ baseUrlScheme = Http,
baseUrlHost = "localhost",
baseUrlPort = 8091,
baseUrlPath = ""
}
mockPublicTransportBppClientEnv :: ClientEnv
mockPublicTransportBppClientEnv = mkClientEnv defaultManager mockBppUrl
callMockPublicTransportBpp :: (Show a) => ClientM a -> IO a
callMockPublicTransportBpp = runClient' mockPublicTransportBppClientEnv
mockWaitTimeSeconds :: Seconds
mockWaitTimeSeconds = 2
# NOINLINE publicTransportBapEnv #
publicTransportBapEnv :: Bap.AppEnv
publicTransportBapEnv = unsafePerformIO $ do
appCfg <- readDhallConfig "../dhall-configs/dev/public-transport-bap.dhall"
let updLogCfg =
appCfg.loggerConfig{logToFile = False,
logToConsole = False
}
updAppCfg = appCfg{loggerConfig = updLogCfg}
Bap.buildAppEnv updAppCfg
# NOINLINE mockBppEnv #
mockBppEnv :: Bpp.AppEnv
mockBppEnv = unsafePerformIO $ do
appCfg <- readDhallConfig "../dhall-configs/dev/mock-public-transport-bpp.dhall"
let updLogCfg =
appCfg.loggerConfig{logToFile = False,
logToConsole = False
}
updAppCfg = appCfg{loggerConfig = updLogCfg}
Bpp.buildAppEnv updAppCfg
| null | https://raw.githubusercontent.com/juspay/atlas/e64b227dc17887fb01c2554db21c08284d18a806/test/src/PublicTransport/Common.hs | haskell | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : PublicTransport . Common
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : PublicTransport.Common
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module PublicTransport.Common where
import Beckn.Prelude
import Beckn.Types.Time
import Beckn.Utils.Dhall (readDhallConfig)
import qualified "mock-public-transport-bpp" Environment as Bpp
import qualified "public-transport-bap" Environment as Bap
import GHC.IO (unsafePerformIO)
import Servant.Client
import Utils
kafkaConsumerTimeoutMilliseconds :: Int
kafkaConsumerTimeoutMilliseconds = 500
publicTransportBapUrl :: BaseUrl
publicTransportBapUrl =
BaseUrl
{ baseUrlScheme = Http,
baseUrlHost = "localhost",
baseUrlPort = 8023,
baseUrlPath = ""
}
publicTransportBapClientEnv :: ClientEnv
publicTransportBapClientEnv = mkClientEnv defaultManager publicTransportBapUrl
callPublicTransportBap :: (Show a) => ClientM a -> IO a
callPublicTransportBap = runClient' publicTransportBapClientEnv
mockBppUrl :: BaseUrl
mockBppUrl =
BaseUrl
{ baseUrlScheme = Http,
baseUrlHost = "localhost",
baseUrlPort = 8091,
baseUrlPath = ""
}
mockPublicTransportBppClientEnv :: ClientEnv
mockPublicTransportBppClientEnv = mkClientEnv defaultManager mockBppUrl
callMockPublicTransportBpp :: (Show a) => ClientM a -> IO a
callMockPublicTransportBpp = runClient' mockPublicTransportBppClientEnv
mockWaitTimeSeconds :: Seconds
mockWaitTimeSeconds = 2
# NOINLINE publicTransportBapEnv #
publicTransportBapEnv :: Bap.AppEnv
publicTransportBapEnv = unsafePerformIO $ do
appCfg <- readDhallConfig "../dhall-configs/dev/public-transport-bap.dhall"
let updLogCfg =
appCfg.loggerConfig{logToFile = False,
logToConsole = False
}
updAppCfg = appCfg{loggerConfig = updLogCfg}
Bap.buildAppEnv updAppCfg
# NOINLINE mockBppEnv #
mockBppEnv :: Bpp.AppEnv
mockBppEnv = unsafePerformIO $ do
appCfg <- readDhallConfig "../dhall-configs/dev/mock-public-transport-bpp.dhall"
let updLogCfg =
appCfg.loggerConfig{logToFile = False,
logToConsole = False
}
updAppCfg = appCfg{loggerConfig = updLogCfg}
Bpp.buildAppEnv updAppCfg
|
|
132b011c530bb187c0472837ed4096413637329d68e65bad1c2c5b6254cfc967 | league/metaserv | status.ml |
type t =
1xx Informational
2xx Successful
3xx
4xx Client errors
| Forbidden
| Not_found
| Request_timeout
5xx Server errors
| Not_implemented
let code status = match status with
| Continue -> 100
| Ok -> 200
| Moved_permanently -> 301
| Bad_request -> 400
| Forbidden -> 403
| Not_found -> 404
| Request_timeout -> 408
| Server_error -> 500
| Not_implemented -> 501
let text status = match status with
Continue -> "Continue"
| Ok -> "Ok"
| Moved_permanently -> "Moved Permanently"
| Bad_request -> "Bad Request"
| Forbidden -> "Forbidden"
| Not_found -> "Not Found"
| Request_timeout -> "Request Timeout"
| Server_error -> "Server Error"
| Not_implemented -> "Not Implemented"
| null | https://raw.githubusercontent.com/league/metaserv/35e85832b3d6dfe4e15b8036653d4429a90644f5/server/status.ml | ocaml |
type t =
1xx Informational
2xx Successful
3xx
4xx Client errors
| Forbidden
| Not_found
| Request_timeout
5xx Server errors
| Not_implemented
let code status = match status with
| Continue -> 100
| Ok -> 200
| Moved_permanently -> 301
| Bad_request -> 400
| Forbidden -> 403
| Not_found -> 404
| Request_timeout -> 408
| Server_error -> 500
| Not_implemented -> 501
let text status = match status with
Continue -> "Continue"
| Ok -> "Ok"
| Moved_permanently -> "Moved Permanently"
| Bad_request -> "Bad Request"
| Forbidden -> "Forbidden"
| Not_found -> "Not Found"
| Request_timeout -> "Request Timeout"
| Server_error -> "Server Error"
| Not_implemented -> "Not Implemented"
|
|
19c995d39df578b8d499de51d312a42e756c4e85e1f26654b801cff2fa3ac18c | well-typed/large-records | CodeGen.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE DataKinds #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fplugin=Data.Record.Plugin #-}
| Sanity checks of the TH code generation
module Test.Record.Sanity.CodeGen (tests) where
import Data.Record.Generic
import Data.Record.Plugin
import Test.Tasty
import Test.Tasty.HUnit
------------------------------------------------------------------------------
Test record
This is the example record we use throughout the comments in TH codegen .
------------------------------------------------------------------------------
Test record
This is the example record we use throughout the comments in TH codegen.
-------------------------------------------------------------------------------}
{-# ANN type T largeRecord #-}
data T a b = MkT {
tInt :: Word
, tBool :: Bool
, tChar :: Char
, tA :: a
, tListB :: [b]
}
deriving (Eq, Ord, Show)
exampleT :: T () Float
exampleT = MkT 5 True 'c' () [3.14]
_silenceWarnings :: T a b -> ()
_silenceWarnings MkT{..} = const () $ (
tInt
, tBool
, tChar
, tA
, tListB
)
------------------------------------------------------------------------------
Tests
TODO : Should we have some other sanity checks here of the codegen ?
( They might exist in other parts of the test suite , perhaps we should
reorganize things a bit . )
------------------------------------------------------------------------------
Tests
TODO: Should we have some other sanity checks here of the codegen?
(They might exist in other parts of the test suite, perhaps we should
reorganize things a bit.)
-------------------------------------------------------------------------------}
test_from_to_id :: Assertion
test_from_to_id =
assertEqual "from . to = id" expected actual
where
expected, actual :: T () Float
expected = exampleT
actual = (to . from) exampleT
{-------------------------------------------------------------------------------
All tests
-------------------------------------------------------------------------------}
tests :: TestTree
tests = testGroup "Test.Record.Sanity.CodeGen" [
testCase "from_to_id" test_from_to_id
]
| null | https://raw.githubusercontent.com/well-typed/large-records/fb983aa136c2602499c2421323bd52b6a54b7c9a/large-records/test/Test/Record/Sanity/CodeGen.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -fplugin=Data.Record.Plugin #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
# ANN type T largeRecord #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
------------------------------------------------------------------------------
All tests
------------------------------------------------------------------------------ | # LANGUAGE DataKinds #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
| Sanity checks of the TH code generation
module Test.Record.Sanity.CodeGen (tests) where
import Data.Record.Generic
import Data.Record.Plugin
import Test.Tasty
import Test.Tasty.HUnit
Test record
This is the example record we use throughout the comments in TH codegen .
Test record
This is the example record we use throughout the comments in TH codegen.
data T a b = MkT {
tInt :: Word
, tBool :: Bool
, tChar :: Char
, tA :: a
, tListB :: [b]
}
deriving (Eq, Ord, Show)
exampleT :: T () Float
exampleT = MkT 5 True 'c' () [3.14]
_silenceWarnings :: T a b -> ()
_silenceWarnings MkT{..} = const () $ (
tInt
, tBool
, tChar
, tA
, tListB
)
Tests
TODO : Should we have some other sanity checks here of the codegen ?
( They might exist in other parts of the test suite , perhaps we should
reorganize things a bit . )
Tests
TODO: Should we have some other sanity checks here of the codegen?
(They might exist in other parts of the test suite, perhaps we should
reorganize things a bit.)
test_from_to_id :: Assertion
test_from_to_id =
assertEqual "from . to = id" expected actual
where
expected, actual :: T () Float
expected = exampleT
actual = (to . from) exampleT
tests :: TestTree
tests = testGroup "Test.Record.Sanity.CodeGen" [
testCase "from_to_id" test_from_to_id
]
|
8ecb30c5df24f60c04b9007f97a2149af7904f2b57acdf08aa1e7d035e59fc91 | antono/guix-debian | package-management.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2013 , 2014 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages package-management)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix git-download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module ((guix licenses) #:select (gpl3+))
#:use-module (gnu packages)
#:use-module (gnu packages guile)
#:use-module ((gnu packages compression) #:select (bzip2 gzip))
#:use-module (gnu packages gnupg)
#:use-module (gnu packages sqlite)
#:use-module (gnu packages graphviz)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages autotools)
#:use-module (gnu packages gettext)
#:use-module (gnu packages texinfo))
(define-public guix-0.6
(package
(name "guix")
(version "0.6")
(source (origin
(method url-fetch)
(uri (string-append "ftp-"
version ".tar.gz"))
(sha256
(base32
"01xw51wizhsk827w4xp79k2b6dxjaviw04r6rbrb85qdxnwg6k9n"))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags (list
"--localstatedir=/var"
"--sysconfdir=/etc"
(string-append "--with-libgcrypt-prefix="
(assoc-ref %build-inputs
"libgcrypt")))
#:phases (alist-cons-before
'configure 'copy-bootstrap-guile
(lambda* (#:key system inputs #:allow-other-keys)
(define (copy arch)
(let ((guile (assoc-ref inputs
(string-append "boot-guile/"
arch)))
(target (string-append "gnu/packages/bootstrap/"
arch "-linux/"
"/guile-2.0.9.tar.xz")))
(copy-file guile target)))
(copy "i686")
(copy "x86_64")
(copy "mips64el")
#t)
%standard-phases)))
(inputs
(let ((boot-guile (lambda (arch hash)
(origin
(method url-fetch)
(uri (string-append
"/"
arch "-linux"
"/20131110/guile-2.0.9.tar.xz"))
(sha256 hash)))))
`(("bzip2" ,bzip2)
("gzip" ,gzip)
("sqlite" ,sqlite)
("libgcrypt" ,libgcrypt)
("guile" ,guile-2.0)
("pkg-config" ,pkg-config)
("boot-guile/i686"
,(boot-guile "i686"
(base32
"0im800m30abgh7msh331pcbjvb4n02smz5cfzf1srv0kpx3csmxp")))
("boot-guile/x86_64"
,(boot-guile "x86_64"
(base32
"1w2p5zyrglzzniqgvyn1b55vprfzhgk8vzbzkkbdgl5248si0yq3")))
("boot-guile/mips64el"
,(boot-guile "mips64el"
(base32
"0fzp93lvi0hn54acc0fpvhc7bvl0yc853k62l958cihk03q80ilr"))))))
(home-page "")
(synopsis "Functional package manager for installed software packages and versions")
(description
"GNU Guix is a functional package manager for the GNU system, and is
also a distribution thereof. It includes a virtual machine image. Besides
the usual package management features, it also supports transactional
upgrades and roll-backs, per-user profiles, and much more. It is based on the
Nix package manager.")
(license gpl3+)))
(define-public guix
Development version of Guix .
(let ((commit "0ae8c15"))
(package (inherit guix-0.6)
(version (string-append "0.6." commit))
(source (origin
(method git-fetch)
(uri (git-reference
(url "git")
(commit commit)
(recursive? #t)))
(sha256
(base32
"1y6mwzwsjdxbfibqypb55dix371rifhfz0bygfr8k868lcdsawic"))))
(arguments
(substitute-keyword-arguments (package-arguments guix-0.6)
((#:phases phases)
`(alist-cons-before
'configure 'bootstrap
(lambda _
;; Comment out `git' invocations, since 'git-fetch' provides us
;; with a checkout that includes sub-modules.
(substitute* "bootstrap"
(("git ")
"true git "))
;; Keep a list of the files already available under nix/...
(call-with-output-file "ls-R"
(lambda (port)
(for-each (lambda (file)
(format port "~a~%" file))
(find-files "nix" ""))))
;; ... and use that as a substitute to 'git ls-tree'.
(substitute* "nix/sync-with-upstream"
(("git ls-tree HEAD -- [[:graph:]]+")
"cat ls-R"))
Make sure ' msgmerge ' can modify the PO files .
(for-each (lambda (po)
(chmod po #o666))
(find-files "." "\\.po$"))
(zero? (system* "./bootstrap")))
,phases))))
(native-inputs
`(("autoconf" ,(autoconf-wrapper))
("automake" ,automake)
("gettext" ,gnu-gettext)
("texinfo" ,texinfo)
("graphviz" ,graphviz)
,@(package-native-inputs guix-0.6))))))
| null | https://raw.githubusercontent.com/antono/guix-debian/85ef443788f0788a62010a942973d4f7714d10b4/gnu/packages/package-management.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Comment out `git' invocations, since 'git-fetch' provides us
with a checkout that includes sub-modules.
Keep a list of the files already available under nix/...
... and use that as a substitute to 'git ls-tree'. | Copyright © 2013 , 2014 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages package-management)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix git-download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module ((guix licenses) #:select (gpl3+))
#:use-module (gnu packages)
#:use-module (gnu packages guile)
#:use-module ((gnu packages compression) #:select (bzip2 gzip))
#:use-module (gnu packages gnupg)
#:use-module (gnu packages sqlite)
#:use-module (gnu packages graphviz)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages autotools)
#:use-module (gnu packages gettext)
#:use-module (gnu packages texinfo))
(define-public guix-0.6
(package
(name "guix")
(version "0.6")
(source (origin
(method url-fetch)
(uri (string-append "ftp-"
version ".tar.gz"))
(sha256
(base32
"01xw51wizhsk827w4xp79k2b6dxjaviw04r6rbrb85qdxnwg6k9n"))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags (list
"--localstatedir=/var"
"--sysconfdir=/etc"
(string-append "--with-libgcrypt-prefix="
(assoc-ref %build-inputs
"libgcrypt")))
#:phases (alist-cons-before
'configure 'copy-bootstrap-guile
(lambda* (#:key system inputs #:allow-other-keys)
(define (copy arch)
(let ((guile (assoc-ref inputs
(string-append "boot-guile/"
arch)))
(target (string-append "gnu/packages/bootstrap/"
arch "-linux/"
"/guile-2.0.9.tar.xz")))
(copy-file guile target)))
(copy "i686")
(copy "x86_64")
(copy "mips64el")
#t)
%standard-phases)))
(inputs
(let ((boot-guile (lambda (arch hash)
(origin
(method url-fetch)
(uri (string-append
"/"
arch "-linux"
"/20131110/guile-2.0.9.tar.xz"))
(sha256 hash)))))
`(("bzip2" ,bzip2)
("gzip" ,gzip)
("sqlite" ,sqlite)
("libgcrypt" ,libgcrypt)
("guile" ,guile-2.0)
("pkg-config" ,pkg-config)
("boot-guile/i686"
,(boot-guile "i686"
(base32
"0im800m30abgh7msh331pcbjvb4n02smz5cfzf1srv0kpx3csmxp")))
("boot-guile/x86_64"
,(boot-guile "x86_64"
(base32
"1w2p5zyrglzzniqgvyn1b55vprfzhgk8vzbzkkbdgl5248si0yq3")))
("boot-guile/mips64el"
,(boot-guile "mips64el"
(base32
"0fzp93lvi0hn54acc0fpvhc7bvl0yc853k62l958cihk03q80ilr"))))))
(home-page "")
(synopsis "Functional package manager for installed software packages and versions")
(description
"GNU Guix is a functional package manager for the GNU system, and is
also a distribution thereof. It includes a virtual machine image. Besides
the usual package management features, it also supports transactional
upgrades and roll-backs, per-user profiles, and much more. It is based on the
Nix package manager.")
(license gpl3+)))
(define-public guix
Development version of Guix .
(let ((commit "0ae8c15"))
(package (inherit guix-0.6)
(version (string-append "0.6." commit))
(source (origin
(method git-fetch)
(uri (git-reference
(url "git")
(commit commit)
(recursive? #t)))
(sha256
(base32
"1y6mwzwsjdxbfibqypb55dix371rifhfz0bygfr8k868lcdsawic"))))
(arguments
(substitute-keyword-arguments (package-arguments guix-0.6)
((#:phases phases)
`(alist-cons-before
'configure 'bootstrap
(lambda _
(substitute* "bootstrap"
(("git ")
"true git "))
(call-with-output-file "ls-R"
(lambda (port)
(for-each (lambda (file)
(format port "~a~%" file))
(find-files "nix" ""))))
(substitute* "nix/sync-with-upstream"
(("git ls-tree HEAD -- [[:graph:]]+")
"cat ls-R"))
Make sure ' msgmerge ' can modify the PO files .
(for-each (lambda (po)
(chmod po #o666))
(find-files "." "\\.po$"))
(zero? (system* "./bootstrap")))
,phases))))
(native-inputs
`(("autoconf" ,(autoconf-wrapper))
("automake" ,automake)
("gettext" ,gnu-gettext)
("texinfo" ,texinfo)
("graphviz" ,graphviz)
,@(package-native-inputs guix-0.6))))))
|
ddb201543c188f349a42a3b67158dd38045e50abce93e61af0acf82e1ec1c180 | target/row-types | OverridingTypeClassInstances.hs | # LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedLabels #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PolyKinds #
{-# LANGUAGE Rank2Types #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE UndecidableInstances #
module OverridingTypeClassInstances where
-- Note that `Data.Row.Aeson` is not exported my the row-types library and
-- currently lives in the src\aeson directory. You must put it in an
-- appropriate place and make sure to have `aeson` in your environment in order
-- to use this module.
import Data.Aeson (ToJSON(..), encode)
import Data.Coerce
import Data.Row
import Data.Row.Aeson ()
import qualified Data.Row.Records as Rec
import Data.Text (Text)
import qualified Data.Text as Text
import GHC.Generics (Generic)
newtype CharArray = CharArray { unCharArray :: String }
instance ToJSON CharArray where
toJSON = toJSON . map (:[]) . unCharArray
newtype Uptext = Uptext { unUptext :: Text }
instance ToJSON Uptext where
toJSON = toJSON . Text.toUpper . unUptext
data MyRec = MyRec
{ foo :: Int
, bar :: String
, baz :: Text
} deriving stock (Show, Eq, Generic)
v = MyRec 3 "french" "hens"
newtype Override a (mods :: Row *) = Override {unOverride :: a}
| A version of ' Override ' that accepts first the value and then the mods type .
override :: a -> (forall mods. Override a mods)
override = Override
x = override v @Empty
y = override v @("bar" .== CharArray .+ "baz" .== Uptext)
main = putStrLn $ show $ encode y
instance
( ρ ≈ Rec.NativeRow t
, ρ' ≈ mods .// ρ
, BiForall ρ ρ' Coercible
, Rec.FromNative t
, Forall ρ' ToJSON
) => ToJSON (Override t mods) where
toJSON = toJSON . Rec.coerceRec @ρ @ρ' . Rec.fromNative . unOverride
| null | https://raw.githubusercontent.com/target/row-types/0d6dc8a3d9a53a8f9a0783bb300e4d1e27b8c8cd/examples/OverridingTypeClassInstances.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE Rank2Types #
Note that `Data.Row.Aeson` is not exported my the row-types library and
currently lives in the src\aeson directory. You must put it in an
appropriate place and make sure to have `aeson` in your environment in order
to use this module. | # LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedLabels #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE UndecidableInstances #
module OverridingTypeClassInstances where
import Data.Aeson (ToJSON(..), encode)
import Data.Coerce
import Data.Row
import Data.Row.Aeson ()
import qualified Data.Row.Records as Rec
import Data.Text (Text)
import qualified Data.Text as Text
import GHC.Generics (Generic)
newtype CharArray = CharArray { unCharArray :: String }
instance ToJSON CharArray where
toJSON = toJSON . map (:[]) . unCharArray
newtype Uptext = Uptext { unUptext :: Text }
instance ToJSON Uptext where
toJSON = toJSON . Text.toUpper . unUptext
data MyRec = MyRec
{ foo :: Int
, bar :: String
, baz :: Text
} deriving stock (Show, Eq, Generic)
v = MyRec 3 "french" "hens"
newtype Override a (mods :: Row *) = Override {unOverride :: a}
| A version of ' Override ' that accepts first the value and then the mods type .
override :: a -> (forall mods. Override a mods)
override = Override
x = override v @Empty
y = override v @("bar" .== CharArray .+ "baz" .== Uptext)
main = putStrLn $ show $ encode y
instance
( ρ ≈ Rec.NativeRow t
, ρ' ≈ mods .// ρ
, BiForall ρ ρ' Coercible
, Rec.FromNative t
, Forall ρ' ToJSON
) => ToJSON (Override t mods) where
toJSON = toJSON . Rec.coerceRec @ρ @ρ' . Rec.fromNative . unOverride
|
931f0981c5032c3a2914b49038bbb19486887870b7d5917f7a1b1a75ea96e7e3 | igorhvr/bedlam | ssax.scm | SISC module of SSAX code
;; These files define macros and I don't feel like messing with the
;; module system + macros yet.
(include "define-opt.scm")
(include "myenv-sisc.scm")
;; These define only functions
(include "look-for-str.scm")
(include "parser-error.scm")
(include "ascii.scm")
(include "input-parse.scm")
(include "util.scm")
(module ssax
(xml-token?
xml-token-kind xml-token-head
make-empty-attlist attlist-add
attlist-null?
attlist-remove-top
attlist->alist attlist-fold
equal_?
name-compare
make-xml-token
SSAX:largest-unres-name
SSAX:read-pi-body-as-string
SSAX:prefix-xml
SSAX:resolve-name
SSAX:warn
SSAX:skip-internal-dtd
SSAX:read-markup-token
SSAX:read-CDATA-body
SSAX:read-NCName
SSAX:read-QName
SSAX:read-char-ref
SSAX:read-attributes
SSAX:complete-start-tag
SSAX:read-external-ID
SSAX:read-char-data
SSAX:make-pi-parser SSAX:make-elem-parser SSAX:make-parser
SSAX:XML->SXML)
(import parser-error)
(import ascii)
(import input-parse)
(import util)
(import miscio)
(include "ssax-warn-vanilla.scm")
(include "SSAX-code.scm")
) | null | https://raw.githubusercontent.com/igorhvr/bedlam/b62e0d047105bb0473bdb47c58b23f6ca0f79a4e/sisc/sisc-contrib/pure-scheme/ssax/ssax.scm | scheme | These files define macros and I don't feel like messing with the
module system + macros yet.
These define only functions | SISC module of SSAX code
(include "define-opt.scm")
(include "myenv-sisc.scm")
(include "look-for-str.scm")
(include "parser-error.scm")
(include "ascii.scm")
(include "input-parse.scm")
(include "util.scm")
(module ssax
(xml-token?
xml-token-kind xml-token-head
make-empty-attlist attlist-add
attlist-null?
attlist-remove-top
attlist->alist attlist-fold
equal_?
name-compare
make-xml-token
SSAX:largest-unres-name
SSAX:read-pi-body-as-string
SSAX:prefix-xml
SSAX:resolve-name
SSAX:warn
SSAX:skip-internal-dtd
SSAX:read-markup-token
SSAX:read-CDATA-body
SSAX:read-NCName
SSAX:read-QName
SSAX:read-char-ref
SSAX:read-attributes
SSAX:complete-start-tag
SSAX:read-external-ID
SSAX:read-char-data
SSAX:make-pi-parser SSAX:make-elem-parser SSAX:make-parser
SSAX:XML->SXML)
(import parser-error)
(import ascii)
(import input-parse)
(import util)
(import miscio)
(include "ssax-warn-vanilla.scm")
(include "SSAX-code.scm")
) |
05252b2c0d741e8c19ff5955e5d372c5e406166e228a369fe5d595a2e26df0be | vikram/lisplibraries | corman.lisp | (in-package #:bordeaux-threads)
(eval-when (:compile-toplevel :load-toplevel :execute)
(require :threads))
;;; Thread Creation
(defmethod make-thread (function &key name)
(declare (ignore name))
(threads:create-thread function))
(defmethod current-thread ()
threads:*current-thread*)
;;; Introspection/debugging
(defmethod destroy-thread (thread)
(threads:terminate-thread thread))
(mark-supported)
| null | https://raw.githubusercontent.com/vikram/lisplibraries/105e3ef2d165275eb78f36f5090c9e2cdd0754dd/site/ucw-boxset/dependencies/bordeaux-threads/src/corman.lisp | lisp | Thread Creation
Introspection/debugging | (in-package #:bordeaux-threads)
(eval-when (:compile-toplevel :load-toplevel :execute)
(require :threads))
(defmethod make-thread (function &key name)
(declare (ignore name))
(threads:create-thread function))
(defmethod current-thread ()
threads:*current-thread*)
(defmethod destroy-thread (thread)
(threads:terminate-thread thread))
(mark-supported)
|
08a930c4a5ba3522019e32d0468b15d529258c40ec63ea80b6fb08b5eda6ed67 | jumarko/clojure-experiments | 0434_sentence_searcher.clj | (ns clojure-experiments.purely-functional.puzzles.0434-sentence-searcher
"-tv-newsletter-434-re-combination-of-parts/
Solutions: "
(:require [clojure.string :as str]))
(defn sentences [document]
(mapv str/trim
(str/split document #"(?<=[.?!])")))
(defn contains-word? [sentence word]
(let [sentence-words (str/split (str/lower-case sentence) #"\s")]
((set sentence-words) (str/lower-case word))))
(defn search [document word]
(not-empty (filterv #(contains-word? % word)
(sentences document))))
(search "This is my document." "Hello")
;; => nil
(search "I like to write. Do you like to write?" "like")
;; => ["I like to write." "Do you like to write?"]
(search "This is not my document. It has No two sentences." "no")
= > [ " It has No two sentences . " ]
| null | https://raw.githubusercontent.com/jumarko/clojure-experiments/abc204d0a8c1b504610d91bc00e1aec21f8cec77/src/clojure_experiments/purely_functional/puzzles/0434_sentence_searcher.clj | clojure | => nil
=> ["I like to write." "Do you like to write?"] | (ns clojure-experiments.purely-functional.puzzles.0434-sentence-searcher
"-tv-newsletter-434-re-combination-of-parts/
Solutions: "
(:require [clojure.string :as str]))
(defn sentences [document]
(mapv str/trim
(str/split document #"(?<=[.?!])")))
(defn contains-word? [sentence word]
(let [sentence-words (str/split (str/lower-case sentence) #"\s")]
((set sentence-words) (str/lower-case word))))
(defn search [document word]
(not-empty (filterv #(contains-word? % word)
(sentences document))))
(search "This is my document." "Hello")
(search "I like to write. Do you like to write?" "like")
(search "This is not my document. It has No two sentences." "no")
= > [ " It has No two sentences . " ]
|
6db9fefb08beca2caff3f6cfff14e590dd3b8b589cf2ac94a968a7cbfa11b0af | inhabitedtype/ocaml-aws | getParameter.mli | open Types
type input = GetParameterRequest.t
type output = GetParameterResult.t
type error = Errors_internal.t
include
Aws.Call with type input := input and type output := output and type error := error
| null | https://raw.githubusercontent.com/inhabitedtype/ocaml-aws/3bc554af7ae7ef9e2dcea44a1b72c9e687435fa9/libraries/ssm/lib/getParameter.mli | ocaml | open Types
type input = GetParameterRequest.t
type output = GetParameterResult.t
type error = Errors_internal.t
include
Aws.Call with type input := input and type output := output and type error := error
|
|
cc258bb8d9f14c7ca8b05b0e4209c21a298cbe60655d58b64066c15043bf0021 | fetburner/Coq2SML | subtac_pretyping.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Global
open Pp
open Util
open Names
open Sign
open Evd
open Term
open Termops
open Reductionops
open Environ
open Type_errors
open Typeops
open Libnames
open Classops
open List
open Recordops
open Evarutil
open Pretype_errors
open Glob_term
open Evarconv
open Pattern
open Subtac_coercion
open Subtac_utils
open Coqlib
open Printer
open Subtac_errors
open Eterm
module Pretyping = Subtac_pretyping_F.SubtacPretyping_F(Subtac_coercion.Coercion)
open Pretyping
let _ = Pretyping.allow_anonymous_refs := true
type recursion_info = {
arg_name: name;
arg_type: types; (* A *)
args_after : rel_context;
wf_relation: constr; (* R : A -> A -> Prop *)
wf_proof: constr; (* : well_founded R *)
f_type: types; (* f: A -> Set *)
Type with argument and wf proof product first
}
let my_print_rec_info env t =
str "Name: " ++ Nameops.pr_name t.arg_name ++ spc () ++
str "Arg type: " ++ my_print_constr env t.arg_type ++ spc () ++
str "Wf relation: " ++ my_print_constr env t.wf_relation ++ spc () ++
str "Wf proof: " ++ my_print_constr env t.wf_proof ++ spc () ++
str "Abbreviated Type: " ++ my_print_constr env t.f_type ++ spc () ++
str "Full type: " ++ my_print_constr env t.f_fulltype
trace ( str " pretype for " + + ( my_print_glob_constr env c ) + +
str " and " + + my_print_tycon env + +
(* str " in environment: " ++ my_print_env env); *)
let interp env isevars c tycon =
let j = pretype true tycon env isevars ([],[]) c in
let _ = isevars := Evarutil.nf_evar_map !isevars in
let evd = consider_remaining_unif_problems env !isevars in
(* let unevd = undefined_evars evd in *)
let unevd' = Typeclasses.resolve_typeclasses ~filter:Subtac_utils.no_goals_or_obligations ~split:true ~fail:true env evd in
let unevd' = Typeclasses.resolve_typeclasses ~filter:Typeclasses.all_evars ~split:true ~fail:false env unevd' in
let evm = unevd' in
isevars := unevd';
nf_evar evm j.uj_val, nf_evar evm j.uj_type
let find_with_index x l =
let rec aux i = function
(y, _, _) as t :: tl -> if x = y then i, t else aux (succ i) tl
| [] -> raise Not_found
in aux 0 l
open Vernacexpr
let coqintern_constr evd env : Topconstr.constr_expr -> Glob_term.glob_constr =
Constrintern.intern_constr evd env
let coqintern_type evd env : Topconstr.constr_expr -> Glob_term.glob_constr =
Constrintern.intern_type evd env
let env_with_binders env isevars l =
let rec aux ((env, rels) as acc) = function
Topconstr.LocalRawDef ((loc, name), def) :: tl ->
let rawdef = coqintern_constr !isevars env def in
let coqdef, deftyp = interp env isevars rawdef empty_tycon in
let reldecl = (name, Some coqdef, deftyp) in
aux (push_rel reldecl env, reldecl :: rels) tl
| Topconstr.LocalRawAssum (bl, k, typ) :: tl ->
let rawtyp = coqintern_type !isevars env typ in
let coqtyp, typtyp = interp env isevars rawtyp empty_tycon in
let acc =
List.fold_left (fun (env, rels) (loc, name) ->
let reldecl = (name, None, coqtyp) in
(push_rel reldecl env,
reldecl :: rels))
(env, rels) bl
in aux acc tl
| [] -> acc
in aux (env, []) l
let subtac_process ?(is_type=false) env isevars id bl c tycon =
let c = Topconstr.abstract_constr_expr c bl in
let tycon, imps =
match tycon with
None -> empty_tycon, None
| Some t ->
let t = Topconstr.prod_constr_expr t bl in
let t = coqintern_type !isevars env t in
let imps = Implicit_quantifiers.implicits_of_glob_constr t in
let coqt, ttyp = interp env isevars t empty_tycon in
mk_tycon coqt, Some imps
in
let c = coqintern_constr !isevars env c in
let imps = match imps with
| Some i -> i
| None -> Implicit_quantifiers.implicits_of_glob_constr ~with_products:is_type c
in
let coqc, ctyp = interp env isevars c tycon in
let evm = non_instanciated_map env isevars !isevars in
let ty = nf_evar !isevars (match tycon with Some (None, c) -> c | _ -> ctyp) in
evm, coqc, ty, imps
open Subtac_obligations
let subtac_proof kind hook env isevars id bl c tycon =
let evm, coqc, coqt, imps = subtac_process env isevars id bl c tycon in
let evm' = Subtac_utils.evars_of_term evm Evd.empty coqc in
let evm' = Subtac_utils.evars_of_term evm evm' coqt in
let evars, _, def, ty = Eterm.eterm_obligations env id !isevars evm' 0 coqc coqt in
add_definition id ~term:def ty ~implicits:imps ~kind ~hook evars
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/plugins/subtac/subtac_pretyping.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
A
R : A -> A -> Prop
: well_founded R
f: A -> Set
str " in environment: " ++ my_print_env env);
let unevd = undefined_evars evd in | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Global
open Pp
open Util
open Names
open Sign
open Evd
open Term
open Termops
open Reductionops
open Environ
open Type_errors
open Typeops
open Libnames
open Classops
open List
open Recordops
open Evarutil
open Pretype_errors
open Glob_term
open Evarconv
open Pattern
open Subtac_coercion
open Subtac_utils
open Coqlib
open Printer
open Subtac_errors
open Eterm
module Pretyping = Subtac_pretyping_F.SubtacPretyping_F(Subtac_coercion.Coercion)
open Pretyping
let _ = Pretyping.allow_anonymous_refs := true
type recursion_info = {
arg_name: name;
args_after : rel_context;
Type with argument and wf proof product first
}
let my_print_rec_info env t =
str "Name: " ++ Nameops.pr_name t.arg_name ++ spc () ++
str "Arg type: " ++ my_print_constr env t.arg_type ++ spc () ++
str "Wf relation: " ++ my_print_constr env t.wf_relation ++ spc () ++
str "Wf proof: " ++ my_print_constr env t.wf_proof ++ spc () ++
str "Abbreviated Type: " ++ my_print_constr env t.f_type ++ spc () ++
str "Full type: " ++ my_print_constr env t.f_fulltype
trace ( str " pretype for " + + ( my_print_glob_constr env c ) + +
str " and " + + my_print_tycon env + +
let interp env isevars c tycon =
let j = pretype true tycon env isevars ([],[]) c in
let _ = isevars := Evarutil.nf_evar_map !isevars in
let evd = consider_remaining_unif_problems env !isevars in
let unevd' = Typeclasses.resolve_typeclasses ~filter:Subtac_utils.no_goals_or_obligations ~split:true ~fail:true env evd in
let unevd' = Typeclasses.resolve_typeclasses ~filter:Typeclasses.all_evars ~split:true ~fail:false env unevd' in
let evm = unevd' in
isevars := unevd';
nf_evar evm j.uj_val, nf_evar evm j.uj_type
let find_with_index x l =
let rec aux i = function
(y, _, _) as t :: tl -> if x = y then i, t else aux (succ i) tl
| [] -> raise Not_found
in aux 0 l
open Vernacexpr
let coqintern_constr evd env : Topconstr.constr_expr -> Glob_term.glob_constr =
Constrintern.intern_constr evd env
let coqintern_type evd env : Topconstr.constr_expr -> Glob_term.glob_constr =
Constrintern.intern_type evd env
let env_with_binders env isevars l =
let rec aux ((env, rels) as acc) = function
Topconstr.LocalRawDef ((loc, name), def) :: tl ->
let rawdef = coqintern_constr !isevars env def in
let coqdef, deftyp = interp env isevars rawdef empty_tycon in
let reldecl = (name, Some coqdef, deftyp) in
aux (push_rel reldecl env, reldecl :: rels) tl
| Topconstr.LocalRawAssum (bl, k, typ) :: tl ->
let rawtyp = coqintern_type !isevars env typ in
let coqtyp, typtyp = interp env isevars rawtyp empty_tycon in
let acc =
List.fold_left (fun (env, rels) (loc, name) ->
let reldecl = (name, None, coqtyp) in
(push_rel reldecl env,
reldecl :: rels))
(env, rels) bl
in aux acc tl
| [] -> acc
in aux (env, []) l
let subtac_process ?(is_type=false) env isevars id bl c tycon =
let c = Topconstr.abstract_constr_expr c bl in
let tycon, imps =
match tycon with
None -> empty_tycon, None
| Some t ->
let t = Topconstr.prod_constr_expr t bl in
let t = coqintern_type !isevars env t in
let imps = Implicit_quantifiers.implicits_of_glob_constr t in
let coqt, ttyp = interp env isevars t empty_tycon in
mk_tycon coqt, Some imps
in
let c = coqintern_constr !isevars env c in
let imps = match imps with
| Some i -> i
| None -> Implicit_quantifiers.implicits_of_glob_constr ~with_products:is_type c
in
let coqc, ctyp = interp env isevars c tycon in
let evm = non_instanciated_map env isevars !isevars in
let ty = nf_evar !isevars (match tycon with Some (None, c) -> c | _ -> ctyp) in
evm, coqc, ty, imps
open Subtac_obligations
let subtac_proof kind hook env isevars id bl c tycon =
let evm, coqc, coqt, imps = subtac_process env isevars id bl c tycon in
let evm' = Subtac_utils.evars_of_term evm Evd.empty coqc in
let evm' = Subtac_utils.evars_of_term evm evm' coqt in
let evars, _, def, ty = Eterm.eterm_obligations env id !isevars evm' 0 coqc coqt in
add_definition id ~term:def ty ~implicits:imps ~kind ~hook evars
|
895c7bd1fe0356e8cb8b9c3a14676f5b338f8d04b2ee6c8194cbc9f085582a6f | cedlemo/OCaml-GI-ctypes-bindings-generator | Toggle_button_accessible_private.ml | open Ctypes
open Foreign
type t
let t_typ : t structure typ = structure "Toggle_button_accessible_private"
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Toggle_button_accessible_private.ml | ocaml | open Ctypes
open Foreign
type t
let t_typ : t structure typ = structure "Toggle_button_accessible_private"
|
|
f79000731557c8123297aaecd05d7055727237f9d053ae6ced559f145784a75f | jaspervdj/psqueues | LRUCache.hs | {-# LANGUAGE BangPatterns #-}
module Data.LRUCache
( LRUCache
, empty
, lookup
, lookupNoLRU
, insert
, delete
, deleteLRU
) where
import Data.Maybe (fromMaybe)
import qualified Data.IntPSQ as IntPSQ
import Prelude hiding (lookup)
type Tick = Int
type Size = Int
data LRUCache v = LRUCache
{ lrucNextTick :: {-# UNPACK #-} !Tick
, lrucPsq :: !(IntPSQ.IntPSQ Tick v)
, lrucMaxSize :: {-# UNPACK #-} !Size
}
empty :: Int -> LRUCache v
empty maxSize = LRUCache 0 IntPSQ.empty maxSize
size :: LRUCache v -> Int
size = IntPSQ.size . lrucPsq
lookup :: Int -> LRUCache v -> (Maybe v, LRUCache v)
lookup k cache@(LRUCache nextTick psq maxSize) =
case IntPSQ.alter tickleIfExists k psq of
(Nothing, _ ) -> (Nothing, cache)
(mbV@(Just _), psq') -> (mbV, increaseTick nextTick psq' maxSize)
where
tickleIfExists Nothing = (Nothing, Nothing)
tickleIfExists (Just (_, v)) = (Just v, Just (nextTick, v))
# INLINE lookupNoLRU #
lookupNoLRU :: Int -> LRUCache v -> Maybe v
lookupNoLRU k = fmap snd . IntPSQ.lookup k . lrucPsq
increaseTick :: Tick -> IntPSQ.IntPSQ Tick v -> Size -> LRUCache v
increaseTick tick psq maxSize
| tick < maxBound = LRUCache (tick + 1) psq maxSize
| otherwise = retick psq IntPSQ.empty 0
where
retick !oldPsq !newPsq !newTick = case IntPSQ.minViewWithKey oldPsq of
Nothing -> LRUCache newTick newPsq maxSize
Just ((k, _, v), oldPsq') ->
retick oldPsq' (IntPSQ.insert k newTick v newPsq) (newTick + 1)
insert :: Int -> v -> LRUCache v -> (LRUCache v, Maybe (Int, v))
insert k v (LRUCache nextTick psq maxSize)
| IntPSQ.size psq' <= maxSize =
(increaseTick nextTick psq' maxSize, Nothing)
| otherwise =
fromMaybe (empty maxSize, Nothing) $ do
((k, _, v), psq'') <- IntPSQ.minViewWithKey psq'
return (increaseTick nextTick psq'' maxSize, Just (k, v))
where
psq' = IntPSQ.insert k nextTick v psq
-- | Evict the entry at the given key in the cache.
delete :: Int -> LRUCache v -> (LRUCache v, Maybe v)
delete k cache = case IntPSQ.deleteView k (lrucPsq cache) of
(_, Nothing) -> (cache, Nothing)
(psq', Just (_t, v)) -> (cache {lrucPsq = psq'}, Just v)
deleteLRU :: LRUCache v -> (LRUCache v, Maybe (Int, v))
deleteLRU (LRUCache nextTick psq maxSize) =
fromMaybe (empty maxSize, Nothing) $ do
((k, _, v), psq') <- IntPSQ.minViewWithKey psq
return (LRUCache nextTick psq' maxSize, Just (k, v))
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
TODO ( SM ): make validity test work
-- Run a series of consistency checks on the structure inside of the map , return a list of
-- errors if any issues where encountered
valid : : ( Eq v , v )
= > LRUCache v - > Maybe String
valid c =
let w =
execWriter $ do
when ( size c > lrucMaxSize c )
$ tell " over the limit\n "
allTicks < -
let traverse s ticks t =
case t of
Leaf h ( L lk lv lt ls ) - >
(: ticks ) < $ > checkKey h lk lv lt ls
Collision h ch - > do
-- tell " Found collision\n "
when ( length ( unSList ch ) < 2 ) $
tell " Hash collision node with < 2 children\n "
foldM ( ( L lk lv lt ls ) - >
(: xs ) < $ > checkKey h lk lv lt ls
)
ticks
( unSList ch )
Node minA maxA minB maxB a b - > do
let mint = min minA minB
maxt = max maxA maxB
when ( s + 1 > bitSize ( undefined : : Word ) ) $
tell " shift too large during traversal\n "
when ( mint < minParent || ) $
tell " Node min / max tick outside of parent interval\n "
let used = foldr ( \x@(t ' , _ , _ ) u - >
case t ' of Empty - > u ; _ - > x : u
)
[ ]
$ ( a , minA , ) : ( b , minB , maxB ) : [ ]
when ( length used = = 0 ) $
tell " Node with only empty children\n "
when ( length used = = 1 ) $
case ( \((x , _ , _ ) : _ ) - > x ) used of
Leaf _ _ - > tell " Node with single Leaf child\n "
Collision _ _ - > tell " Node with single Collision child\n "
_ - > return ( )
foldM ( \xs ( c , mint ' , maxt ' ) - >
traverse ( s + 1 ) mint ' maxt ' xs c
)
ticks
used
Empty - > return ticks
lookupTest v mbV fName =
case mbV of
Nothing - >
tell $ fName + + " : Ca n't lookup key found during traversal\n "
Just v ' - >
when ( v /= v ' ) $ tell $
fName + +
" : Lookup of key found during traversal yields " + +
" different value\n "
checkKey h k v tick s = do
when ( H.hash k /= h ) $
tell " Hash / key mismatch\n "
when ( tick > = cTick m ) $
tell " Tick of leaf matches / exceeds current tick\n "
when ( tick < minParent || tick ) $
tell " Leaf min / max tick outside of parent interval\n "
lookupTest v ( snd $ lookup k m ) " lookup "
lookupTest v ( lookupNoLRU k m ) " lookupNoLRU "
let ( m ' , mbV ' ) = delete k m
case mbV ' of
Just ( v ' , s ' ) | v = = v ' & & s = = s ' - >
when ( size m ' /= ( size m - s ) ) $
tell " Deleting key did not reduce size correctly\n "
_ - >
tell " Delete returned wrong value\n "
return tick
in traverse 0 minBound maxBound [ ] $ cTrie m
when ( length allTicks /= numEntries m ) $
tell " Collection of all tick values used resulted in different numEntries than cSize\n "
unless ( Data.List.null . filter ( \x - > length x /= 1 ) . group . sort $ allTicks ) $
tell " Duplicate tick value found\n "
let keysL = map ( ^. _ 1 ) $ toList m
allDeleted = foldl ' ( \r k - > fst ( delete k r ) ) m keysL
when ( length keysL /= numEntries m ) $
tell " Length of toList does not match size\n "
unless ( null allDeleted ) $
tell " Deleting all elements does not result in an empty map\n "
unless ( size allDeleted = = 0 ) $
tell " Deleting all elements does not result in a zero size map\n "
let compacted = compactTicks m
when ( ( snd $ popOldest m ) /= ( snd $ popOldest compacted ) ||
( snd $ popNewest m ) /= ( snd compacted ) ) $
tell " Tick compaction changes LRU\n "
when ( sort ( toList m ) /= sort ( toList compacted ) ) $
tell " Tick compaction changes map\n "
when ( ( fromIntegral $ cTick compacted ) /= numEntries compacted ) $
tell " Tick compaction did not reduce tick range to minimum\n "
in case w of
[ ] - > Nothing
xs - > Just xs
-- Run a series of consistency checks on the structure inside of the map, return a list of
-- errors if any issues where encountered
valid :: (Eq v, Ord v)
=> LRUCache v -> Maybe String
valid c =
let w =
execWriter $ do
when (size c > lrucMaxSize c)
$ tell "Size over the limit\n"
allTicks <-
let traverse s minParent maxParent ticks t =
case t of
Leaf h (L lk lv lt ls) ->
(: ticks) <$> checkKey h lk lv lt ls minParent maxParent
Collision h ch -> do
-- tell "Found collision\n"
when (length (unSList ch) < 2) $
tell "Hash collision node with <2 children\n"
foldM (\xs (L lk lv lt ls) ->
(: xs) <$> checkKey h lk lv lt ls minParent maxParent
)
ticks
(unSList ch)
Node minA maxA minB maxB a b -> do
let mint = min minA minB
maxt = max maxA maxB
when (s + 1 > bitSize (undefined :: Word)) $
tell "Subkey shift too large during traversal\n"
when (mint < minParent || maxt > maxParent) $
tell "Node min/max tick outside of parent interval\n"
let used = foldr (\x@(t', _, _) u ->
case t' of Empty -> u; _ -> x : u
)
[]
$ (a, minA, maxA) : (b, minB, maxB) : []
when (length used == 0) $
tell "Node with only empty children\n"
when (length used == 1) $
case (\((x, _, _) : _) -> x) used of
Leaf _ _ -> tell "Node with single Leaf child\n"
Collision _ _ -> tell "Node with single Collision child\n"
_ -> return ()
foldM (\xs (c, mint', maxt') ->
traverse (s + 1) mint' maxt' xs c
)
ticks
used
Empty -> return ticks
lookupTest v mbV fName =
case mbV of
Nothing ->
tell $ fName ++ ": Can't lookup key found during traversal\n"
Just v' ->
when (v /= v') $ tell $
fName ++
": Lookup of key found during traversal yields " ++
"different value\n"
checkKey h k v tick s minParent maxParent = do
when (H.hash k /= h) $
tell "Hash / key mismatch\n"
when (tick >= cTick m) $
tell "Tick of leaf matches / exceeds current tick\n"
when (tick < minParent || tick > maxParent) $
tell "Leaf min/max tick outside of parent interval\n"
lookupTest v (snd $ lookup k m) "lookup"
lookupTest v (lookupNoLRU k m) "lookupNoLRU"
let (m', mbV') = delete k m
case mbV' of
Just (v', s') | v == v' && s == s' ->
when (size m' /= (size m - s)) $
tell "Deleting key did not reduce size correctly\n"
_ ->
tell "Delete returned wrong value\n"
return tick
in traverse 0 minBound maxBound [] $ cTrie m
when (length allTicks /= numEntries m) $
tell "Collection of all tick values used resulted in different numEntries than cSize\n"
unless (Data.List.null . filter (\x -> length x /= 1) . group . sort $ allTicks) $
tell "Duplicate tick value found\n"
let keysL = map (^. _1) $ toList m
allDeleted = foldl' (\r k -> fst (delete k r)) m keysL
when (length keysL /= numEntries m) $
tell "Length of toList does not match size\n"
unless (null allDeleted) $
tell "Deleting all elements does not result in an empty map\n"
unless (size allDeleted == 0) $
tell "Deleting all elements does not result in a zero size map\n"
let compacted = compactTicks m
when ((snd $ popOldest m) /= (snd $ popOldest compacted) ||
(snd $ popNewest m) /= (snd $ popNewest compacted)) $
tell "Tick compaction changes LRU\n"
when (sort (toList m) /= sort (toList compacted)) $
tell "Tick compaction changes map\n"
when ((fromIntegral $ cTick compacted) /= numEntries compacted) $
tell "Tick compaction did not reduce tick range to minimum\n"
in case w of
[] -> Nothing
xs -> Just xs
-}
Make generation of arbitrary test work
arbitraryCache
: : ( Eq k , , QC.Arbitrary k , QC.Arbitrary v , Show k , Show v )
= > ( v - > Size ) - > QC.Gen ( Cache k v )
arbitraryCache computeSize = QC.sized $ \n - >
fromList < $ > QC.choose ( 1 , maxSizeLimit )
< * > sequence [ do k < - QC.arbitrary
v < - QC.arbitrary
return ( k , v , computeSize v )
| _ < - [ 1 .. n ] ]
-- | Newtype with an ' QC.Arbitrary ' instance that uses ' NFDataSize ' as
-- size - computing function .
newtype NFDataSizeCache k v =
NFDataSizeCache { unNFDataSizeCache : : Cache k v }
instance ( Show k , Show v ) = > Show ( NFDataSizeCache k v ) where
show ( NFDataSizeCache cache ) = show cache
instance ( Eq k , , QC.Arbitrary k , NFDataSize v , QC.Arbitrary v , Show k , Show v ) = >
QC.Arbitrary ( NFDataSizeCache k v ) where
arbitrary = NFDataSizeCache < $ > arbitraryCache rnfSize
-- | A ' Cache ' with an arbitrary instance that will most likely evict
-- some objects at creation .
newtype NFDataSizeSmallCache k v =
NFDataSizeSmallCache { unNFDataSizeSmallCache : : Cache k v }
instance ( Show k , Show v ) = > Show ( NFDataSizeSmallCache k v ) where
show ( NFDataSizeSmallCache cache ) = show cache
instance ( Eq k , , QC.Arbitrary k , NFDataSize v , QC.Arbitrary v , Show k , Show v ) = >
QC.Arbitrary ( NFDataSizeSmallCache k v ) where
arbitrary = QC.sized $ \n - > do
kvs < - sequence [ do k < - QC.arbitrary
v < - QC.arbitrary
return ( k , v , rnfSize v )
| _ < - [ 1 .. n ] ]
-- If we do n't have any values , we go the normal route .
maxSize < -
if n > 0
then let avgSize = sum [ s | ( _ , _ , s ) < - kvs ] ` div ` length kvs
in return $ min maxSizeLimit $ max 1 $ avgSize * 3
else QC.choose ( 1 , maxSizeLimit )
return $ NFDataSizeSmallCache $ fromList maxSize kvs
arbitraryCache
:: (Eq k, Hashable k, QC.Arbitrary k, QC.Arbitrary v, Show k, Show v)
=> (v -> Size) -> QC.Gen (Cache k v)
arbitraryCache computeSize = QC.sized $ \n ->
fromList <$> QC.choose (1, maxSizeLimit)
<*> sequence [ do k <- QC.arbitrary
v <- QC.arbitrary
return (k, v, computeSize v)
| _ <- [1..n] ]
-- | Newtype with an 'QC.Arbitrary' instance that uses 'NFDataSize' as
-- size-computing function.
newtype NFDataSizeCache k v =
NFDataSizeCache {unNFDataSizeCache :: Cache k v}
instance (Show k, Show v) => Show (NFDataSizeCache k v) where
show (NFDataSizeCache cache) = show cache
instance (Eq k, Hashable k, QC.Arbitrary k, NFDataSize v, QC.Arbitrary v, Show k, Show v) =>
QC.Arbitrary (NFDataSizeCache k v) where
arbitrary = NFDataSizeCache <$> arbitraryCache rnfSize
-- | A 'Cache' with an arbitrary instance that will most likely evict
-- some objects at creation.
newtype NFDataSizeSmallCache k v =
NFDataSizeSmallCache {unNFDataSizeSmallCache :: Cache k v}
instance (Show k, Show v) => Show (NFDataSizeSmallCache k v) where
show (NFDataSizeSmallCache cache) = show cache
instance (Eq k, Hashable k, QC.Arbitrary k, NFDataSize v, QC.Arbitrary v, Show k, Show v) =>
QC.Arbitrary (NFDataSizeSmallCache k v) where
arbitrary = QC.sized $ \n -> do
kvs <- sequence [ do k <- QC.arbitrary
v <- QC.arbitrary
return (k, v, rnfSize v)
| _ <- [1..n] ]
-- If we don't have any values, we go the normal route.
maxSize <-
if n > 0
then let avgSize = sum [s | (_, _, s) <- kvs] `div` length kvs
in return $ min maxSizeLimit $ max 1 $ avgSize * 3
else QC.choose (1, maxSizeLimit)
return $ NFDataSizeSmallCache $ fromList maxSize kvs
-}
| null | https://raw.githubusercontent.com/jaspervdj/psqueues/f6c8e112a7e6bc3e75303d87473f72e3c34822ec/examples/LRUCache.hs | haskell | # LANGUAGE BangPatterns #
# UNPACK #
# UNPACK #
| Evict the entry at the given key in the cache.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Run a series of consistency checks on the structure inside of the map , return a list of
errors if any issues where encountered
tell " Found collision\n "
Run a series of consistency checks on the structure inside of the map, return a list of
errors if any issues where encountered
tell "Found collision\n"
| Newtype with an ' QC.Arbitrary ' instance that uses ' NFDataSize ' as
size - computing function .
| A ' Cache ' with an arbitrary instance that will most likely evict
some objects at creation .
If we do n't have any values , we go the normal route .
| Newtype with an 'QC.Arbitrary' instance that uses 'NFDataSize' as
size-computing function.
| A 'Cache' with an arbitrary instance that will most likely evict
some objects at creation.
If we don't have any values, we go the normal route. | module Data.LRUCache
( LRUCache
, empty
, lookup
, lookupNoLRU
, insert
, delete
, deleteLRU
) where
import Data.Maybe (fromMaybe)
import qualified Data.IntPSQ as IntPSQ
import Prelude hiding (lookup)
type Tick = Int
type Size = Int
data LRUCache v = LRUCache
, lrucPsq :: !(IntPSQ.IntPSQ Tick v)
}
empty :: Int -> LRUCache v
empty maxSize = LRUCache 0 IntPSQ.empty maxSize
size :: LRUCache v -> Int
size = IntPSQ.size . lrucPsq
lookup :: Int -> LRUCache v -> (Maybe v, LRUCache v)
lookup k cache@(LRUCache nextTick psq maxSize) =
case IntPSQ.alter tickleIfExists k psq of
(Nothing, _ ) -> (Nothing, cache)
(mbV@(Just _), psq') -> (mbV, increaseTick nextTick psq' maxSize)
where
tickleIfExists Nothing = (Nothing, Nothing)
tickleIfExists (Just (_, v)) = (Just v, Just (nextTick, v))
# INLINE lookupNoLRU #
lookupNoLRU :: Int -> LRUCache v -> Maybe v
lookupNoLRU k = fmap snd . IntPSQ.lookup k . lrucPsq
increaseTick :: Tick -> IntPSQ.IntPSQ Tick v -> Size -> LRUCache v
increaseTick tick psq maxSize
| tick < maxBound = LRUCache (tick + 1) psq maxSize
| otherwise = retick psq IntPSQ.empty 0
where
retick !oldPsq !newPsq !newTick = case IntPSQ.minViewWithKey oldPsq of
Nothing -> LRUCache newTick newPsq maxSize
Just ((k, _, v), oldPsq') ->
retick oldPsq' (IntPSQ.insert k newTick v newPsq) (newTick + 1)
insert :: Int -> v -> LRUCache v -> (LRUCache v, Maybe (Int, v))
insert k v (LRUCache nextTick psq maxSize)
| IntPSQ.size psq' <= maxSize =
(increaseTick nextTick psq' maxSize, Nothing)
| otherwise =
fromMaybe (empty maxSize, Nothing) $ do
((k, _, v), psq'') <- IntPSQ.minViewWithKey psq'
return (increaseTick nextTick psq'' maxSize, Just (k, v))
where
psq' = IntPSQ.insert k nextTick v psq
delete :: Int -> LRUCache v -> (LRUCache v, Maybe v)
delete k cache = case IntPSQ.deleteView k (lrucPsq cache) of
(_, Nothing) -> (cache, Nothing)
(psq', Just (_t, v)) -> (cache {lrucPsq = psq'}, Just v)
deleteLRU :: LRUCache v -> (LRUCache v, Maybe (Int, v))
deleteLRU (LRUCache nextTick psq maxSize) =
fromMaybe (empty maxSize, Nothing) $ do
((k, _, v), psq') <- IntPSQ.minViewWithKey psq
return (LRUCache nextTick psq' maxSize, Just (k, v))
TODO ( SM ): make validity test work
valid : : ( Eq v , v )
= > LRUCache v - > Maybe String
valid c =
let w =
execWriter $ do
when ( size c > lrucMaxSize c )
$ tell " over the limit\n "
allTicks < -
let traverse s ticks t =
case t of
Leaf h ( L lk lv lt ls ) - >
(: ticks ) < $ > checkKey h lk lv lt ls
Collision h ch - > do
when ( length ( unSList ch ) < 2 ) $
tell " Hash collision node with < 2 children\n "
foldM ( ( L lk lv lt ls ) - >
(: xs ) < $ > checkKey h lk lv lt ls
)
ticks
( unSList ch )
Node minA maxA minB maxB a b - > do
let mint = min minA minB
maxt = max maxA maxB
when ( s + 1 > bitSize ( undefined : : Word ) ) $
tell " shift too large during traversal\n "
when ( mint < minParent || ) $
tell " Node min / max tick outside of parent interval\n "
let used = foldr ( \x@(t ' , _ , _ ) u - >
case t ' of Empty - > u ; _ - > x : u
)
[ ]
$ ( a , minA , ) : ( b , minB , maxB ) : [ ]
when ( length used = = 0 ) $
tell " Node with only empty children\n "
when ( length used = = 1 ) $
case ( \((x , _ , _ ) : _ ) - > x ) used of
Leaf _ _ - > tell " Node with single Leaf child\n "
Collision _ _ - > tell " Node with single Collision child\n "
_ - > return ( )
foldM ( \xs ( c , mint ' , maxt ' ) - >
traverse ( s + 1 ) mint ' maxt ' xs c
)
ticks
used
Empty - > return ticks
lookupTest v mbV fName =
case mbV of
Nothing - >
tell $ fName + + " : Ca n't lookup key found during traversal\n "
Just v ' - >
when ( v /= v ' ) $ tell $
fName + +
" : Lookup of key found during traversal yields " + +
" different value\n "
checkKey h k v tick s = do
when ( H.hash k /= h ) $
tell " Hash / key mismatch\n "
when ( tick > = cTick m ) $
tell " Tick of leaf matches / exceeds current tick\n "
when ( tick < minParent || tick ) $
tell " Leaf min / max tick outside of parent interval\n "
lookupTest v ( snd $ lookup k m ) " lookup "
lookupTest v ( lookupNoLRU k m ) " lookupNoLRU "
let ( m ' , mbV ' ) = delete k m
case mbV ' of
Just ( v ' , s ' ) | v = = v ' & & s = = s ' - >
when ( size m ' /= ( size m - s ) ) $
tell " Deleting key did not reduce size correctly\n "
_ - >
tell " Delete returned wrong value\n "
return tick
in traverse 0 minBound maxBound [ ] $ cTrie m
when ( length allTicks /= numEntries m ) $
tell " Collection of all tick values used resulted in different numEntries than cSize\n "
unless ( Data.List.null . filter ( \x - > length x /= 1 ) . group . sort $ allTicks ) $
tell " Duplicate tick value found\n "
let keysL = map ( ^. _ 1 ) $ toList m
allDeleted = foldl ' ( \r k - > fst ( delete k r ) ) m keysL
when ( length keysL /= numEntries m ) $
tell " Length of toList does not match size\n "
unless ( null allDeleted ) $
tell " Deleting all elements does not result in an empty map\n "
unless ( size allDeleted = = 0 ) $
tell " Deleting all elements does not result in a zero size map\n "
let compacted = compactTicks m
when ( ( snd $ popOldest m ) /= ( snd $ popOldest compacted ) ||
( snd $ popNewest m ) /= ( snd compacted ) ) $
tell " Tick compaction changes LRU\n "
when ( sort ( toList m ) /= sort ( toList compacted ) ) $
tell " Tick compaction changes map\n "
when ( ( fromIntegral $ cTick compacted ) /= numEntries compacted ) $
tell " Tick compaction did not reduce tick range to minimum\n "
in case w of
[ ] - > Nothing
xs - > Just xs
valid :: (Eq v, Ord v)
=> LRUCache v -> Maybe String
valid c =
let w =
execWriter $ do
when (size c > lrucMaxSize c)
$ tell "Size over the limit\n"
allTicks <-
let traverse s minParent maxParent ticks t =
case t of
Leaf h (L lk lv lt ls) ->
(: ticks) <$> checkKey h lk lv lt ls minParent maxParent
Collision h ch -> do
when (length (unSList ch) < 2) $
tell "Hash collision node with <2 children\n"
foldM (\xs (L lk lv lt ls) ->
(: xs) <$> checkKey h lk lv lt ls minParent maxParent
)
ticks
(unSList ch)
Node minA maxA minB maxB a b -> do
let mint = min minA minB
maxt = max maxA maxB
when (s + 1 > bitSize (undefined :: Word)) $
tell "Subkey shift too large during traversal\n"
when (mint < minParent || maxt > maxParent) $
tell "Node min/max tick outside of parent interval\n"
let used = foldr (\x@(t', _, _) u ->
case t' of Empty -> u; _ -> x : u
)
[]
$ (a, minA, maxA) : (b, minB, maxB) : []
when (length used == 0) $
tell "Node with only empty children\n"
when (length used == 1) $
case (\((x, _, _) : _) -> x) used of
Leaf _ _ -> tell "Node with single Leaf child\n"
Collision _ _ -> tell "Node with single Collision child\n"
_ -> return ()
foldM (\xs (c, mint', maxt') ->
traverse (s + 1) mint' maxt' xs c
)
ticks
used
Empty -> return ticks
lookupTest v mbV fName =
case mbV of
Nothing ->
tell $ fName ++ ": Can't lookup key found during traversal\n"
Just v' ->
when (v /= v') $ tell $
fName ++
": Lookup of key found during traversal yields " ++
"different value\n"
checkKey h k v tick s minParent maxParent = do
when (H.hash k /= h) $
tell "Hash / key mismatch\n"
when (tick >= cTick m) $
tell "Tick of leaf matches / exceeds current tick\n"
when (tick < minParent || tick > maxParent) $
tell "Leaf min/max tick outside of parent interval\n"
lookupTest v (snd $ lookup k m) "lookup"
lookupTest v (lookupNoLRU k m) "lookupNoLRU"
let (m', mbV') = delete k m
case mbV' of
Just (v', s') | v == v' && s == s' ->
when (size m' /= (size m - s)) $
tell "Deleting key did not reduce size correctly\n"
_ ->
tell "Delete returned wrong value\n"
return tick
in traverse 0 minBound maxBound [] $ cTrie m
when (length allTicks /= numEntries m) $
tell "Collection of all tick values used resulted in different numEntries than cSize\n"
unless (Data.List.null . filter (\x -> length x /= 1) . group . sort $ allTicks) $
tell "Duplicate tick value found\n"
let keysL = map (^. _1) $ toList m
allDeleted = foldl' (\r k -> fst (delete k r)) m keysL
when (length keysL /= numEntries m) $
tell "Length of toList does not match size\n"
unless (null allDeleted) $
tell "Deleting all elements does not result in an empty map\n"
unless (size allDeleted == 0) $
tell "Deleting all elements does not result in a zero size map\n"
let compacted = compactTicks m
when ((snd $ popOldest m) /= (snd $ popOldest compacted) ||
(snd $ popNewest m) /= (snd $ popNewest compacted)) $
tell "Tick compaction changes LRU\n"
when (sort (toList m) /= sort (toList compacted)) $
tell "Tick compaction changes map\n"
when ((fromIntegral $ cTick compacted) /= numEntries compacted) $
tell "Tick compaction did not reduce tick range to minimum\n"
in case w of
[] -> Nothing
xs -> Just xs
-}
Make generation of arbitrary test work
arbitraryCache
: : ( Eq k , , QC.Arbitrary k , QC.Arbitrary v , Show k , Show v )
= > ( v - > Size ) - > QC.Gen ( Cache k v )
arbitraryCache computeSize = QC.sized $ \n - >
fromList < $ > QC.choose ( 1 , maxSizeLimit )
< * > sequence [ do k < - QC.arbitrary
v < - QC.arbitrary
return ( k , v , computeSize v )
| _ < - [ 1 .. n ] ]
newtype NFDataSizeCache k v =
NFDataSizeCache { unNFDataSizeCache : : Cache k v }
instance ( Show k , Show v ) = > Show ( NFDataSizeCache k v ) where
show ( NFDataSizeCache cache ) = show cache
instance ( Eq k , , QC.Arbitrary k , NFDataSize v , QC.Arbitrary v , Show k , Show v ) = >
QC.Arbitrary ( NFDataSizeCache k v ) where
arbitrary = NFDataSizeCache < $ > arbitraryCache rnfSize
newtype NFDataSizeSmallCache k v =
NFDataSizeSmallCache { unNFDataSizeSmallCache : : Cache k v }
instance ( Show k , Show v ) = > Show ( NFDataSizeSmallCache k v ) where
show ( NFDataSizeSmallCache cache ) = show cache
instance ( Eq k , , QC.Arbitrary k , NFDataSize v , QC.Arbitrary v , Show k , Show v ) = >
QC.Arbitrary ( NFDataSizeSmallCache k v ) where
arbitrary = QC.sized $ \n - > do
kvs < - sequence [ do k < - QC.arbitrary
v < - QC.arbitrary
return ( k , v , rnfSize v )
| _ < - [ 1 .. n ] ]
maxSize < -
if n > 0
then let avgSize = sum [ s | ( _ , _ , s ) < - kvs ] ` div ` length kvs
in return $ min maxSizeLimit $ max 1 $ avgSize * 3
else QC.choose ( 1 , maxSizeLimit )
return $ NFDataSizeSmallCache $ fromList maxSize kvs
arbitraryCache
:: (Eq k, Hashable k, QC.Arbitrary k, QC.Arbitrary v, Show k, Show v)
=> (v -> Size) -> QC.Gen (Cache k v)
arbitraryCache computeSize = QC.sized $ \n ->
fromList <$> QC.choose (1, maxSizeLimit)
<*> sequence [ do k <- QC.arbitrary
v <- QC.arbitrary
return (k, v, computeSize v)
| _ <- [1..n] ]
newtype NFDataSizeCache k v =
NFDataSizeCache {unNFDataSizeCache :: Cache k v}
instance (Show k, Show v) => Show (NFDataSizeCache k v) where
show (NFDataSizeCache cache) = show cache
instance (Eq k, Hashable k, QC.Arbitrary k, NFDataSize v, QC.Arbitrary v, Show k, Show v) =>
QC.Arbitrary (NFDataSizeCache k v) where
arbitrary = NFDataSizeCache <$> arbitraryCache rnfSize
newtype NFDataSizeSmallCache k v =
NFDataSizeSmallCache {unNFDataSizeSmallCache :: Cache k v}
instance (Show k, Show v) => Show (NFDataSizeSmallCache k v) where
show (NFDataSizeSmallCache cache) = show cache
instance (Eq k, Hashable k, QC.Arbitrary k, NFDataSize v, QC.Arbitrary v, Show k, Show v) =>
QC.Arbitrary (NFDataSizeSmallCache k v) where
arbitrary = QC.sized $ \n -> do
kvs <- sequence [ do k <- QC.arbitrary
v <- QC.arbitrary
return (k, v, rnfSize v)
| _ <- [1..n] ]
maxSize <-
if n > 0
then let avgSize = sum [s | (_, _, s) <- kvs] `div` length kvs
in return $ min maxSizeLimit $ max 1 $ avgSize * 3
else QC.choose (1, maxSizeLimit)
return $ NFDataSizeSmallCache $ fromList maxSize kvs
-}
|
5b0a47ed87c23b604400f632ac44ee608ab4bed26f707e03ff49a46ad9d7c19f | mdsebald/link_blox_app | block_common.erl | %%%
%%% @doc
%%% Block functionality common to all block types
%%%
%%% @end
%%%
-module(block_common).
-author ("Mark Sebald").
-include("block_state.hrl").
%% ====================================================================
%% API functions
%% ====================================================================
-export([
configs/4,
inputs/0,
outputs/0,
execute/2,
ok_to_execute/2,
initialize/1,
delete/1,
temp_delete/1,
update_execution_timer/3,
update_linked_inputs/2
]).
%%
%% Common Config Attributes
%% Config values are set once on block creation and never modified
%%
-spec configs(Name :: atom(),
Module :: module(),
Version :: string(),
Description :: string()) -> config_attribs().
configs(Name, Module, Version, Description) ->
[
{block_name, {Name}}, %| atom | N/A | read only |
{block_module, {Module}}, %| module_name | N/A | read only |
{version, {Version}}, %| string | N/A | read only |
{description, {Description}} %| string | "" | User defined string |
].
%%
%% Common Input Attributes,
%% Inputs may be set to a fixed value or linked to a block output value
%%
-spec inputs() -> input_attribs().
inputs() ->
[
% Block will execute as long as disable input is false/null
% When disable input is true, all block outputs set to null,
% and block status is set to disabled. Block will not execute,
% Default block to disabled, on create.
% Set disable input to false in create function if you want block
% to begin executing on create.
{disable, {true, {true}}}, %| bool | true | true, false |
% Link exec_in to block that will execute this block.
May only be linked to the ' exec_out ' block output value
i.e. implement Control Flow
{exec_in, {empty, {empty}}}, %| signal | empty | name of executor block |
% If > 0, execute block every 'exec_interval' milliseconds.
% Used to execute a block at fixed intervals
% instead of being executed via exec_out/exec_in link
% or executed on change of input values
{exec_interval, {0, {0}}} %| int | 0 | 0..max int |
% exec_in and exec_interval may both be used to execute the block.
% They are not mutually exclusive.
% If exec_in is linked to another block or exec_interval > 0,
% the block will no longer execute on change of input state
].
%%
%% Common Output Attributes
%% Block output values are modified upon block execution
%%
-spec outputs() -> output_attribs().
outputs() ->
[
% Blocks with the 'exec_in' input linked to this output
% will be executed by this block, each time this block is executed
% This output may only be linked to exec_in inputs
{exec_out, {false, []}}, %| signal | N/A | N/A |
{status, {created, []}}, %| enum | created | created, initialed, normal, ... |
| enum | empty | manual , input_cos , timer , ... |
{value, {null, []}} %| block type dependent | null | block type dependent |
].
%%
%% Common block initialization function
%%
-spec initialize(BlockDefn :: block_defn()) -> block_state().
initialize({Config, Inputs, Outputs}) ->
{BlockName, BlockModule} = config_utils:name_module(Config),
m_logger:debug(initializing_block, [BlockName]),
Initialize the private attributes values list here .
Timer reference attribute is common to all block types
Private = [ {timer_ref, {empty}} ],
% In case this block is set to execute via timer, initialize the timer
{_Status, Private1} = update_execution_timer(BlockName, Inputs, Private),
% Perform block type specific initialization
BlockModule:initialize({Config, Inputs, Outputs, Private1}).
%%
%% Common block execute function
%%
-spec execute(BlockState :: block_state(),
ExecMethod :: exec_method()) -> block_state().
execute(BlockState, ExecMethod) ->
{Config, Inputs, Outputs, Private} = BlockState,
BlockStatus = output_utils:get_status(Outputs),
% Check block status before executing
case ok_to_execute(BlockStatus, ExecMethod) of
true ->
{BlockName, BlockModule} = config_utils:name_module(Config),
case input_utils:get_boolean(Inputs, disable) of
{ok, true} -> % Block is disabled
{Config, Inputs, Outputs1, Private1} = BlockModule:execute(BlockState, disable),
% Record method of execution
{ok, Outputs2} = attrib_utils:set_values(Outputs1, [{exec_method, ExecMethod}, {exec_out, ExecMethod}]);
{ok, _NotDisabled} -> % Block is not disabled. Disable input is false or null
{Config, Inputs, Outputs1, Private1} = BlockModule:execute(BlockState, ExecMethod),
% Record method of execution
{ok, Outputs2} = attrib_utils:set_values(Outputs1, [{exec_method, ExecMethod}, {exec_out, ExecMethod}]);
{error, Reason} -> % Disable input value error
input_utils:log_error(Config, disable, Reason),
Outputs2 = output_utils:update_all_outputs(Outputs, null, input_err),
Private1 = Private
end,
{Status, Private2} = update_execution_timer(BlockName, Inputs, Private1),
if (Status /= normal) -> % Some kind error setting execution timer
Outputs3 = output_utils:update_all_outputs(Outputs2, null, Status);
true -> % Execution timer status is normal
Outputs3 = Outputs2
end,
% Update the block inputs linked to the block outputs
% that have just been updated (Data Flow)
update_blocks(Outputs, Outputs3),
Execute the blocks connected to the exec_out output value ( Control Flow )
update_execute(BlockName, Outputs3),
% Return the updated block state
{Config, Inputs, Outputs3, Private2};
false -> % Unable to execute block, in error state or disabled
% Just return the current block state, unchanged
{Config, Inputs, Outputs, Private}
end.
%%
%% Don't execute block if the block is configured incorrectly,
%% if there is a problem accessing the hardware,
%% or if an input is missing or in error
%%
-spec ok_to_execute(BlockStatus :: block_status(),
ExecMethod :: exec_method()) -> boolean().
ok_to_execute(BlockStatus, ExecMethod) ->
case ExecMethod of
% always execute the block on a manual command
manual -> true;
% if input value changed, allow block to execute if normal, disabled, or some kind of input error
% The changed input value could have enabled, or otherwise fixed the input value error
i.e. execute if BlockStatus a member of this list
input_cos ->
lists:member(BlockStatus, [input_err, no_input, initialed, normal, disabled, timeout]);
If block received a message from a subsystem , execute the block if in one of these states :
message ->
lists:member(BlockStatus, [input_err, no_input, initialed, normal, disabled, timeout]);
If block execution timer expired , execute the block if in one of these states :
timer ->
lists:member(BlockStatus, [input_err, no_input, initialed, normal, timeout]);
If block executed via exec_out , execute the block if in one of these states :
exec_out ->
lists:member(BlockStatus, [input_err, no_input, initialed, normal, timeout]);
_ ->
For any other execution method , do n't execute if BlockStatus is a member of this list
not lists:member(BlockStatus, [input_err, config_err, proc_err, no_input])
end.
%%
%% Update the block execution timer
%%
-spec update_execution_timer(BlockName :: block_name(),
Inputs :: input_attribs(),
Private :: private_attribs()) -> {atom(), private_attribs()}.
update_execution_timer(BlockName, Inputs, Private) ->
{ok, ExecuteInterval} = attrib_utils:get_value(Inputs, exec_interval),
{ok, TimerRef} = attrib_utils:get_value(Private, timer_ref),
% Cancel block execution timer, if it is set
cancel_timer(TimerRef),
% Check validity of ExecuteInterval input value
if is_integer(ExecuteInterval) ->
if (ExecuteInterval == 0) ->
Status = normal,
NewTimerRef = empty;
true ->
if (ExecuteInterval > 0) ->
NewTimerRef = set_timer(BlockName, ExecuteInterval),
Status = normal;
true -> % Execute Interval input value is negative
Status = input_err,
NewTimerRef = empty,
m_logger:error(negative_exec_interval_value, [BlockName, ExecuteInterval])
end
end;
true -> % Execute Interval input value is not an integer
Status = input_err,
NewTimerRef = empty,
m_logger:error(invalid_exec_interval_value, [BlockName, ExecuteInterval])
end,
{ok, Private1} = attrib_utils:set_value(Private, timer_ref, NewTimerRef),
{Status, Private1}.
%%
%% Cancel block execution timer, if the timer is set
%%
-spec cancel_timer(TimerRef :: reference()) -> integer() | false.
cancel_timer(TimerRef) ->
if (TimerRef /= empty) -> erlang:cancel_timer(TimerRef);
true -> false
end.
%%
%% Set timer to execute the block on expiration
%%
-spec set_timer(BlockName :: block_name(),
ExecuteInterval :: pos_integer()) ->
reference().
set_timer(BlockName, ExecuteInterval) ->
erlang:send_after(ExecuteInterval, BlockName, timer_execute).
%%
%% Send an update message to each block linked to any output value that has changed
This assumes CurrentOutputs and NewOutputs , have the same ValueNames and order for all outputs
%%
-spec update_blocks(CurrentOutputs :: output_attribs(),
NewOutputs :: output_attribs()) -> ok.
update_blocks(CurrentOutputs, NewOutputs) ->
update_blocks(CurrentOutputs, NewOutputs, []).
% No update messages to send
update_blocks([], [], []) ->
ok;
% After building the messages, send an update message to each linked block
update_blocks([], [], BlockUpdateMsgs) ->
lists:foreach(fun({ToBlockName, NewInputValues}) ->
block_server:update(ToBlockName, NewInputValues)
end,
BlockUpdateMsgs),
ok;
%
% Build update messages for each block in the links for the output values
% that have been modified
BlockUpdateMsgs = [ { BlockName1 , [ { ValueId11 , NewValue11 } , { ValueId12 , NewValue12 } , ... ] } ,
% {BlockName2, [{ValueId21, NewValue21}, {ValueId22, NewValue22}, ...]},
% ...]
%
update_blocks([CurrentOutput | CurrentOutputs],
[NewOutput | NewOutputs], BlockUpdateMsgs)->
case CurrentOutput of
% Non-array value output
{ValueName, {CurrentValue, Links}} ->
{ValueName, {NewValue, Links}} = NewOutput,
% For each output value that changed, add the updated value to
% The messages that will be sent to the blocks linked to this output
% Don't check the 'exec_out' output, that is for control flow execution
if (CurrentValue /= NewValue) andalso (ValueName /= exec_out) ->
NewBlockUpdateMsgs =
lists:foldl(fun(Link, AccUpdateMsgs) ->
add_update_msg(AccUpdateMsgs, Link, NewValue)
end,
BlockUpdateMsgs,
Links);
true ->
% else do nothing
NewBlockUpdateMsgs = BlockUpdateMsgs
end;
% Array value output
{ValueName, CurrentArrayValues} ->
{ValueName, NewArrayValues} = NewOutput,
NewBlockUpdateMsgs = check_array_values(BlockUpdateMsgs, ValueName,
CurrentArrayValues, NewArrayValues)
end,
update_blocks(CurrentOutputs, NewOutputs, NewBlockUpdateMsgs).
%%
%% Check the values in an array of output values
%% Loop through each value in the array. If value has changed,
Examine each Link , and add to the current BlockUpdateMsgs
%%
-spec check_array_values(BlockUpdateMsgs :: list(),
ValueName :: value_name(),
CurrentArrayValues :: list(),
NewArrayValues :: list()) -> list().
check_array_values(BlockUpdateMsgs, _ValueName, [], []) ->
BlockUpdateMsgs;
check_array_values(BlockUpdateMsgs, ValueName,
[{CurrentValue, Links} | CurrentArrayValues],
[{NewValue, Links} | NewArrayValues]) ->
if CurrentValue /= NewValue ->
NewBlockUpdateMsgs =
lists:foldl(fun(Link, AccUpdateMsgs) ->
add_update_msg(AccUpdateMsgs, Link, NewValue)
end,
BlockUpdateMsgs,
Links);
true ->
% else do nothing
NewBlockUpdateMsgs = BlockUpdateMsgs
end,
check_array_values(NewBlockUpdateMsgs, ValueName,
CurrentArrayValues, NewArrayValues).
%%
Add the new ValueId / Value to the block update message for the destination block
%% Add a new messsage to the list of messages,
%% if there is not a message already created for the destination block
%%
-spec add_update_msg(BlockUpdateMsgs :: list(),
Link :: link_def(),
NewValue :: value()) -> list().
add_update_msg(BlockUpdateMsgs, {DestBlock, ValueId}, NewValue) ->
case lists:keyfind(DestBlock, 1, BlockUpdateMsgs) of
% List already contains a message for this destination block,
Add the ValueId / Value tuple to this block 's list of values to update
If not already in list of ValueId / Value tuples
{DestBlock, ExistingValues} ->
case lists:keyfind(ValueId, 1, ExistingValues) of
false ->
NewUpdateMsg = {DestBlock, [{ValueId, NewValue} | ExistingValues]},
lists:keyreplace(DestBlock, 1, BlockUpdateMsgs, NewUpdateMsg);
List of ValueId / Value tuples already contains this ValueId
% Return the list of messages unchanged.
BlockUpdateMsgs
end;
false -> % No message created for this destination block yet.
Just add destination block and list of one ValueId / Value tuple .
[{DestBlock, [{ValueId, NewValue}]} | BlockUpdateMsgs]
end.
%%
%% update each block input value, in the list of links
%%
-spec update_linked_inputs(NewValue :: value(),
Links :: link_defs()) -> ok.
update_linked_inputs(NewValue, Links) ->
lists:foreach(fun(Link) ->
case Link of
{ToBlockName, ToValueId} ->
block_server:update(ToBlockName, [{ToValueId, NewValue}]);
InvalidLink ->
m_logger:error(err_unrecognized_link, [InvalidLink])
end
end,
Links).
%%
%% Send an exec_out_execute message to each block connected to the 'exec_out' output of this block
%% This will implement control flow execution, versus data flow done in the update_blocks function.
%%
-spec update_execute(BlockName :: block_name(),
Outputs :: output_attribs()) -> ok.
update_execute(BlockName, Outputs) ->
% exec_out attribute is never an array,
% Don't need to worry about arrays of values or indexes here
{ok, {exec_out, {_Value, Links}}} = attrib_utils:get_attribute(Outputs, exec_out),
lists:foreach(fun(Link) ->
case Link of
{ToBlockName, _ToValueId} ->
block_server:execute(ToBlockName, BlockName, exec_out);
InvalidLink ->
m_logger:error(err_unrecognized_link, [InvalidLink])
end
end,
Links).
%%
%% Common block delete function,
%% Return the updated block state, in case calling function wants to reuse it
%%
-spec delete(BlockState :: block_state()) -> block_defn().
delete({Config, Inputs, Outputs, Private}) ->
{BlockName, BlockModule} = config_utils:name_module(Config),
% Cancel execution timer if it exists
case attrib_utils:get_value(Private, timer_ref) of
{ok, empty} -> ok;
{ok, TimerRef} -> erlang:cancel_timer(TimerRef);
{error, _Reason} -> ok % Don't care if timer_ref doesn't exist
end,
% Tell all other blocks on this node to remove any links to this block
link_utils:unlink_block(BlockName),
% Set all input values to default value
DefaultInputs = input_utils:set_to_defaults(Inputs),
% Set all output values of this block, including status, to 'empty'
EmptyOutputs = output_utils:update_all_outputs(Outputs, empty, empty),
% Update the block inputs linked to this block's outputs
% This will set the input value of any block
% linked to this deleted block, to 'empty'.
update_blocks(Outputs, EmptyOutputs),
% Execute the blocks connected to the exec_out output value
of this block , one last time .
update_execute(BlockName, EmptyOutputs),
% Perform block type specific delete actions
BlockModule:delete({Config, DefaultInputs, EmptyOutputs, Private}).
%%
%% Temporary delete function,
%% Go through the motions of deleting a block without unlinking from other blocks
%% Used when a block needs to be reinitialized
%%
-spec temp_delete(BlockState :: block_state()) -> block_defn().
temp_delete({Config, Inputs, Outputs, Private}) ->
% Cancel execution timer if it exists
case attrib_utils:get_value(Private, timer_ref) of
{ok, empty} -> ok;
{ok, TimerRef} -> erlang:cancel_timer(TimerRef);
{error, _Reason} -> ok % Don't care if timer_ref doesn't exist
end,
{_BlockName, BlockModule} = config_utils:name_module(Config),
% Perform block type specific delete actions
BlockModule:delete({Config, Inputs, Outputs, Private}).
%% ====================================================================
Internal functions
%% ====================================================================
| null | https://raw.githubusercontent.com/mdsebald/link_blox_app/64034fa5854759ad16625b93e3dde65a9c65f615/src/block_common.erl | erlang |
@doc
Block functionality common to all block types
@end
====================================================================
API functions
====================================================================
Common Config Attributes
Config values are set once on block creation and never modified
| atom | N/A | read only |
| module_name | N/A | read only |
| string | N/A | read only |
| string | "" | User defined string |
Common Input Attributes,
Inputs may be set to a fixed value or linked to a block output value
Block will execute as long as disable input is false/null
When disable input is true, all block outputs set to null,
and block status is set to disabled. Block will not execute,
Default block to disabled, on create.
Set disable input to false in create function if you want block
to begin executing on create.
| bool | true | true, false |
Link exec_in to block that will execute this block.
| signal | empty | name of executor block |
If > 0, execute block every 'exec_interval' milliseconds.
Used to execute a block at fixed intervals
instead of being executed via exec_out/exec_in link
or executed on change of input values
| int | 0 | 0..max int |
exec_in and exec_interval may both be used to execute the block.
They are not mutually exclusive.
If exec_in is linked to another block or exec_interval > 0,
the block will no longer execute on change of input state
Common Output Attributes
Block output values are modified upon block execution
Blocks with the 'exec_in' input linked to this output
will be executed by this block, each time this block is executed
This output may only be linked to exec_in inputs
| signal | N/A | N/A |
| enum | created | created, initialed, normal, ... |
| block type dependent | null | block type dependent |
Common block initialization function
In case this block is set to execute via timer, initialize the timer
Perform block type specific initialization
Common block execute function
Check block status before executing
Block is disabled
Record method of execution
Block is not disabled. Disable input is false or null
Record method of execution
Disable input value error
Some kind error setting execution timer
Execution timer status is normal
Update the block inputs linked to the block outputs
that have just been updated (Data Flow)
Return the updated block state
Unable to execute block, in error state or disabled
Just return the current block state, unchanged
Don't execute block if the block is configured incorrectly,
if there is a problem accessing the hardware,
or if an input is missing or in error
always execute the block on a manual command
if input value changed, allow block to execute if normal, disabled, or some kind of input error
The changed input value could have enabled, or otherwise fixed the input value error
Update the block execution timer
Cancel block execution timer, if it is set
Check validity of ExecuteInterval input value
Execute Interval input value is negative
Execute Interval input value is not an integer
Cancel block execution timer, if the timer is set
Set timer to execute the block on expiration
Send an update message to each block linked to any output value that has changed
No update messages to send
After building the messages, send an update message to each linked block
Build update messages for each block in the links for the output values
that have been modified
{BlockName2, [{ValueId21, NewValue21}, {ValueId22, NewValue22}, ...]},
...]
Non-array value output
For each output value that changed, add the updated value to
The messages that will be sent to the blocks linked to this output
Don't check the 'exec_out' output, that is for control flow execution
else do nothing
Array value output
Check the values in an array of output values
Loop through each value in the array. If value has changed,
else do nothing
Add a new messsage to the list of messages,
if there is not a message already created for the destination block
List already contains a message for this destination block,
Return the list of messages unchanged.
No message created for this destination block yet.
update each block input value, in the list of links
Send an exec_out_execute message to each block connected to the 'exec_out' output of this block
This will implement control flow execution, versus data flow done in the update_blocks function.
exec_out attribute is never an array,
Don't need to worry about arrays of values or indexes here
Common block delete function,
Return the updated block state, in case calling function wants to reuse it
Cancel execution timer if it exists
Don't care if timer_ref doesn't exist
Tell all other blocks on this node to remove any links to this block
Set all input values to default value
Set all output values of this block, including status, to 'empty'
Update the block inputs linked to this block's outputs
This will set the input value of any block
linked to this deleted block, to 'empty'.
Execute the blocks connected to the exec_out output value
Perform block type specific delete actions
Temporary delete function,
Go through the motions of deleting a block without unlinking from other blocks
Used when a block needs to be reinitialized
Cancel execution timer if it exists
Don't care if timer_ref doesn't exist
Perform block type specific delete actions
====================================================================
==================================================================== |
-module(block_common).
-author ("Mark Sebald").
-include("block_state.hrl").
-export([
configs/4,
inputs/0,
outputs/0,
execute/2,
ok_to_execute/2,
initialize/1,
delete/1,
temp_delete/1,
update_execution_timer/3,
update_linked_inputs/2
]).
-spec configs(Name :: atom(),
Module :: module(),
Version :: string(),
Description :: string()) -> config_attribs().
configs(Name, Module, Version, Description) ->
[
].
-spec inputs() -> input_attribs().
inputs() ->
[
May only be linked to the ' exec_out ' block output value
i.e. implement Control Flow
].
-spec outputs() -> output_attribs().
outputs() ->
[
| enum | empty | manual , input_cos , timer , ... |
].
-spec initialize(BlockDefn :: block_defn()) -> block_state().
initialize({Config, Inputs, Outputs}) ->
{BlockName, BlockModule} = config_utils:name_module(Config),
m_logger:debug(initializing_block, [BlockName]),
Initialize the private attributes values list here .
Timer reference attribute is common to all block types
Private = [ {timer_ref, {empty}} ],
{_Status, Private1} = update_execution_timer(BlockName, Inputs, Private),
BlockModule:initialize({Config, Inputs, Outputs, Private1}).
-spec execute(BlockState :: block_state(),
ExecMethod :: exec_method()) -> block_state().
execute(BlockState, ExecMethod) ->
{Config, Inputs, Outputs, Private} = BlockState,
BlockStatus = output_utils:get_status(Outputs),
case ok_to_execute(BlockStatus, ExecMethod) of
true ->
{BlockName, BlockModule} = config_utils:name_module(Config),
case input_utils:get_boolean(Inputs, disable) of
{Config, Inputs, Outputs1, Private1} = BlockModule:execute(BlockState, disable),
{ok, Outputs2} = attrib_utils:set_values(Outputs1, [{exec_method, ExecMethod}, {exec_out, ExecMethod}]);
{Config, Inputs, Outputs1, Private1} = BlockModule:execute(BlockState, ExecMethod),
{ok, Outputs2} = attrib_utils:set_values(Outputs1, [{exec_method, ExecMethod}, {exec_out, ExecMethod}]);
input_utils:log_error(Config, disable, Reason),
Outputs2 = output_utils:update_all_outputs(Outputs, null, input_err),
Private1 = Private
end,
{Status, Private2} = update_execution_timer(BlockName, Inputs, Private1),
Outputs3 = output_utils:update_all_outputs(Outputs2, null, Status);
Outputs3 = Outputs2
end,
update_blocks(Outputs, Outputs3),
Execute the blocks connected to the exec_out output value ( Control Flow )
update_execute(BlockName, Outputs3),
{Config, Inputs, Outputs3, Private2};
{Config, Inputs, Outputs, Private}
end.
-spec ok_to_execute(BlockStatus :: block_status(),
ExecMethod :: exec_method()) -> boolean().
ok_to_execute(BlockStatus, ExecMethod) ->
case ExecMethod of
manual -> true;
i.e. execute if BlockStatus a member of this list
input_cos ->
lists:member(BlockStatus, [input_err, no_input, initialed, normal, disabled, timeout]);
If block received a message from a subsystem , execute the block if in one of these states :
message ->
lists:member(BlockStatus, [input_err, no_input, initialed, normal, disabled, timeout]);
If block execution timer expired , execute the block if in one of these states :
timer ->
lists:member(BlockStatus, [input_err, no_input, initialed, normal, timeout]);
If block executed via exec_out , execute the block if in one of these states :
exec_out ->
lists:member(BlockStatus, [input_err, no_input, initialed, normal, timeout]);
_ ->
For any other execution method , do n't execute if BlockStatus is a member of this list
not lists:member(BlockStatus, [input_err, config_err, proc_err, no_input])
end.
-spec update_execution_timer(BlockName :: block_name(),
Inputs :: input_attribs(),
Private :: private_attribs()) -> {atom(), private_attribs()}.
update_execution_timer(BlockName, Inputs, Private) ->
{ok, ExecuteInterval} = attrib_utils:get_value(Inputs, exec_interval),
{ok, TimerRef} = attrib_utils:get_value(Private, timer_ref),
cancel_timer(TimerRef),
if is_integer(ExecuteInterval) ->
if (ExecuteInterval == 0) ->
Status = normal,
NewTimerRef = empty;
true ->
if (ExecuteInterval > 0) ->
NewTimerRef = set_timer(BlockName, ExecuteInterval),
Status = normal;
Status = input_err,
NewTimerRef = empty,
m_logger:error(negative_exec_interval_value, [BlockName, ExecuteInterval])
end
end;
Status = input_err,
NewTimerRef = empty,
m_logger:error(invalid_exec_interval_value, [BlockName, ExecuteInterval])
end,
{ok, Private1} = attrib_utils:set_value(Private, timer_ref, NewTimerRef),
{Status, Private1}.
-spec cancel_timer(TimerRef :: reference()) -> integer() | false.
cancel_timer(TimerRef) ->
if (TimerRef /= empty) -> erlang:cancel_timer(TimerRef);
true -> false
end.
-spec set_timer(BlockName :: block_name(),
ExecuteInterval :: pos_integer()) ->
reference().
set_timer(BlockName, ExecuteInterval) ->
erlang:send_after(ExecuteInterval, BlockName, timer_execute).
This assumes CurrentOutputs and NewOutputs , have the same ValueNames and order for all outputs
-spec update_blocks(CurrentOutputs :: output_attribs(),
NewOutputs :: output_attribs()) -> ok.
update_blocks(CurrentOutputs, NewOutputs) ->
update_blocks(CurrentOutputs, NewOutputs, []).
update_blocks([], [], []) ->
ok;
update_blocks([], [], BlockUpdateMsgs) ->
lists:foreach(fun({ToBlockName, NewInputValues}) ->
block_server:update(ToBlockName, NewInputValues)
end,
BlockUpdateMsgs),
ok;
BlockUpdateMsgs = [ { BlockName1 , [ { ValueId11 , NewValue11 } , { ValueId12 , NewValue12 } , ... ] } ,
update_blocks([CurrentOutput | CurrentOutputs],
[NewOutput | NewOutputs], BlockUpdateMsgs)->
case CurrentOutput of
{ValueName, {CurrentValue, Links}} ->
{ValueName, {NewValue, Links}} = NewOutput,
if (CurrentValue /= NewValue) andalso (ValueName /= exec_out) ->
NewBlockUpdateMsgs =
lists:foldl(fun(Link, AccUpdateMsgs) ->
add_update_msg(AccUpdateMsgs, Link, NewValue)
end,
BlockUpdateMsgs,
Links);
true ->
NewBlockUpdateMsgs = BlockUpdateMsgs
end;
{ValueName, CurrentArrayValues} ->
{ValueName, NewArrayValues} = NewOutput,
NewBlockUpdateMsgs = check_array_values(BlockUpdateMsgs, ValueName,
CurrentArrayValues, NewArrayValues)
end,
update_blocks(CurrentOutputs, NewOutputs, NewBlockUpdateMsgs).
Examine each Link , and add to the current BlockUpdateMsgs
-spec check_array_values(BlockUpdateMsgs :: list(),
ValueName :: value_name(),
CurrentArrayValues :: list(),
NewArrayValues :: list()) -> list().
check_array_values(BlockUpdateMsgs, _ValueName, [], []) ->
BlockUpdateMsgs;
check_array_values(BlockUpdateMsgs, ValueName,
[{CurrentValue, Links} | CurrentArrayValues],
[{NewValue, Links} | NewArrayValues]) ->
if CurrentValue /= NewValue ->
NewBlockUpdateMsgs =
lists:foldl(fun(Link, AccUpdateMsgs) ->
add_update_msg(AccUpdateMsgs, Link, NewValue)
end,
BlockUpdateMsgs,
Links);
true ->
NewBlockUpdateMsgs = BlockUpdateMsgs
end,
check_array_values(NewBlockUpdateMsgs, ValueName,
CurrentArrayValues, NewArrayValues).
Add the new ValueId / Value to the block update message for the destination block
-spec add_update_msg(BlockUpdateMsgs :: list(),
Link :: link_def(),
NewValue :: value()) -> list().
add_update_msg(BlockUpdateMsgs, {DestBlock, ValueId}, NewValue) ->
case lists:keyfind(DestBlock, 1, BlockUpdateMsgs) of
Add the ValueId / Value tuple to this block 's list of values to update
If not already in list of ValueId / Value tuples
{DestBlock, ExistingValues} ->
case lists:keyfind(ValueId, 1, ExistingValues) of
false ->
NewUpdateMsg = {DestBlock, [{ValueId, NewValue} | ExistingValues]},
lists:keyreplace(DestBlock, 1, BlockUpdateMsgs, NewUpdateMsg);
List of ValueId / Value tuples already contains this ValueId
BlockUpdateMsgs
end;
Just add destination block and list of one ValueId / Value tuple .
[{DestBlock, [{ValueId, NewValue}]} | BlockUpdateMsgs]
end.
-spec update_linked_inputs(NewValue :: value(),
Links :: link_defs()) -> ok.
update_linked_inputs(NewValue, Links) ->
lists:foreach(fun(Link) ->
case Link of
{ToBlockName, ToValueId} ->
block_server:update(ToBlockName, [{ToValueId, NewValue}]);
InvalidLink ->
m_logger:error(err_unrecognized_link, [InvalidLink])
end
end,
Links).
-spec update_execute(BlockName :: block_name(),
Outputs :: output_attribs()) -> ok.
update_execute(BlockName, Outputs) ->
{ok, {exec_out, {_Value, Links}}} = attrib_utils:get_attribute(Outputs, exec_out),
lists:foreach(fun(Link) ->
case Link of
{ToBlockName, _ToValueId} ->
block_server:execute(ToBlockName, BlockName, exec_out);
InvalidLink ->
m_logger:error(err_unrecognized_link, [InvalidLink])
end
end,
Links).
-spec delete(BlockState :: block_state()) -> block_defn().
delete({Config, Inputs, Outputs, Private}) ->
{BlockName, BlockModule} = config_utils:name_module(Config),
case attrib_utils:get_value(Private, timer_ref) of
{ok, empty} -> ok;
{ok, TimerRef} -> erlang:cancel_timer(TimerRef);
end,
link_utils:unlink_block(BlockName),
DefaultInputs = input_utils:set_to_defaults(Inputs),
EmptyOutputs = output_utils:update_all_outputs(Outputs, empty, empty),
update_blocks(Outputs, EmptyOutputs),
of this block , one last time .
update_execute(BlockName, EmptyOutputs),
BlockModule:delete({Config, DefaultInputs, EmptyOutputs, Private}).
-spec temp_delete(BlockState :: block_state()) -> block_defn().
temp_delete({Config, Inputs, Outputs, Private}) ->
case attrib_utils:get_value(Private, timer_ref) of
{ok, empty} -> ok;
{ok, TimerRef} -> erlang:cancel_timer(TimerRef);
end,
{_BlockName, BlockModule} = config_utils:name_module(Config),
BlockModule:delete({Config, Inputs, Outputs, Private}).
Internal functions
|
cccc22c26cb6e06a24a4f46d815097b6e85267727b86046a3f32f1b159ae953d | emina/rosette | base-solver.rkt | #lang racket
(require "server.rkt" "cmd.rkt" "env.rkt"
"../solution.rkt"
(only-in racket [remove-duplicates unique])
(only-in "smtlib2.rkt" reset set-option check-sat get-model get-unsat-core push pop set-logic)
(only-in "../../base/core/term.rkt" term term? term-type)
(only-in "../../base/core/bool.rkt" @boolean?)
(only-in "../../base/core/bitvector.rkt" bitvector? bv?)
(only-in "../../base/core/real.rkt" @integer? @real?)
(only-in "../../base/core/reporter.rkt" current-reporter))
(provide (all-defined-out))
(define (find-solver binary base-path [user-path #f])
(cond
[(and (path-string? user-path) (file-exists? user-path)) user-path]
[(file-exists? base-path) base-path]
[(file-exists? (path-replace-suffix base-path ".exe")) (path-replace-suffix base-path ".exe")]
[else (or (find-executable-path binary) #f)]))
(define (make-send-options conf)
(match-define (config options _ logic) conf)
(lambda (server)
(server-write server
(unless (false? logic)
(set-logic logic))
(for ([opt (in-list (sort (hash-keys options) symbol<?))])
(set-option opt (hash-ref options opt))))))
(struct solver (server config asserts mins maxs env level)
#:mutable)
(struct config (options path logic))
(define (solver-assert self bools [wfcheck #f])
(unless (list? bools)
(raise-argument-error 'solver-assert "(listof boolean?)" bools))
(define wfcheck-cache (mutable-set))
(set-solver-asserts! self
(append (solver-asserts self)
(for/list ([b bools] #:unless (equal? b #t))
(unless (or (boolean? b) (and (term? b) (equal? @boolean? (term-type b))))
(error 'assert "expected a boolean value, given ~s" b))
(when wfcheck
(wfcheck b wfcheck-cache))
b))))
(define (solver-minimize self nums)
(unless (null? nums)
(error 'solver-minimize "optimization isn't supported by solver ~v" self)))
(define (solver-maximize self nums)
(unless (null? nums)
(error 'solver-maximize "optimization isn't supported by solver ~v" self)))
(define (solver-clear self)
(solver-shutdown self))
(define (solver-shutdown self)
(solver-clear-stacks! self)
(solver-clear-env! self)
(server-shutdown (solver-server self)))
(define (solver-push self)
(match-define (solver server _ (app unique asserts) (app unique mins) (app unique maxs) env level) self)
(server-write
server
(begin
((current-reporter) 'encode-start)
(encode env asserts mins maxs)
((current-reporter) 'encode-finish)
(push)))
(solver-clear-stacks! self)
(set-solver-level! self (cons (dict-count env) level)))
(define (solver-pop self [k 1])
(match-define (solver server _ _ _ _ env level) self)
(when (or (<= k 0) (> k (length level)))
(error 'solver-pop "expected 1 <= k <= ~a, given ~a" (length level) k))
(server-write server (pop k))
(solver-clear-stacks! self)
(for ([lvl level][i k])
(clear! env lvl))
(set-solver-level! self (drop level k)))
(define (solver-check self [read-solution read-solution])
(match-define (solver server _ (app unique asserts) (app unique mins) (app unique maxs) env _) self)
(cond [(ormap false? asserts) (unsat)]
[else (server-write
server
(begin
((current-reporter) 'encode-start)
(encode env asserts mins maxs)
((current-reporter) 'encode-finish)
(check-sat)))
((current-reporter) 'solve-start)
(solver-clear-stacks! self)
(define ret (read-solution server env))
((current-reporter) 'solve-finish (sat? ret))
ret]))
(define (solver-debug self)
(error 'solver-debug "debugging isn't supported by solver ~v" self))
(define (solver-options self)
(config-options (solver-config self)))
(define (solver-clear-stacks! self)
(set-solver-asserts! self '())
(set-solver-mins! self '())
(set-solver-maxs! self '()))
(define (solver-clear-env! self)
(set-solver-env! self (env))
(set-solver-level! self '()))
Reads the SMT solution from the server .
; The solution consists of 'sat or 'unsat, followed by
; followed by a suitably formatted s-expression. The
; output of this procedure is a hashtable from constant
identifiers to their SMTLib values ( if the solution is ' sat ) ;
; a non-empty list of assertion identifiers that form an
; unsatisfiable core (if the solution is 'unsat and a
; core was extracted); #f (if the solution is
; 'unsat and no core was extracted); or 'unknown otherwise.
(define (read-solution server env #:unsat-core? [unsat-core? #f])
(decode (parse-solution server #:unsat-core? unsat-core?) env))
(define (parse-solution server #:unsat-core? [unsat-core? #f])
read BV literals as symbols
(match (server-read server (read))
[(== 'sat)
(server-write server (get-model))
(let loop ()
(match (server-read server (read))
[(list (== 'objectives) _ ...) (loop)]
The SMT - LIB spec says that a model should be just a list of
` define - fun`s , but many SMT solvers used to prefix that list
; with `model`, so let's support both versions.
; -lib/c/5xpcIxdQ8-A/m/X4uQ7dIgAwAJ
[(or (list (== 'model) def ...) (list def ...))
(for/hash ([d def] #:when (and (pair? d) (equal? (car d) 'define-fun)))
(values (cadr d) d))]
[other (error 'read-solution "expected model, given ~a" other)]))]
[(== 'unsat)
(if unsat-core?
(begin
(server-write server (get-unsat-core))
(match (server-read server (read))
[(list (? symbol? name) ...) name]
[other (error 'read-solution "expected unsat core, given ~a" other)]))
'unsat)]
[(== 'unknown) 'unknown]
[other (error 'read-solution "unrecognized solver output: ~a" other)]))) | null | https://raw.githubusercontent.com/emina/rosette/c2975b940036dc380929daafc24ee76b66f64edd/rosette/solver/smt/base-solver.rkt | racket | The solution consists of 'sat or 'unsat, followed by
followed by a suitably formatted s-expression. The
output of this procedure is a hashtable from constant
a non-empty list of assertion identifiers that form an
unsatisfiable core (if the solution is 'unsat and a
core was extracted); #f (if the solution is
'unsat and no core was extracted); or 'unknown otherwise.
with `model`, so let's support both versions.
-lib/c/5xpcIxdQ8-A/m/X4uQ7dIgAwAJ | #lang racket
(require "server.rkt" "cmd.rkt" "env.rkt"
"../solution.rkt"
(only-in racket [remove-duplicates unique])
(only-in "smtlib2.rkt" reset set-option check-sat get-model get-unsat-core push pop set-logic)
(only-in "../../base/core/term.rkt" term term? term-type)
(only-in "../../base/core/bool.rkt" @boolean?)
(only-in "../../base/core/bitvector.rkt" bitvector? bv?)
(only-in "../../base/core/real.rkt" @integer? @real?)
(only-in "../../base/core/reporter.rkt" current-reporter))
(provide (all-defined-out))
(define (find-solver binary base-path [user-path #f])
(cond
[(and (path-string? user-path) (file-exists? user-path)) user-path]
[(file-exists? base-path) base-path]
[(file-exists? (path-replace-suffix base-path ".exe")) (path-replace-suffix base-path ".exe")]
[else (or (find-executable-path binary) #f)]))
(define (make-send-options conf)
(match-define (config options _ logic) conf)
(lambda (server)
(server-write server
(unless (false? logic)
(set-logic logic))
(for ([opt (in-list (sort (hash-keys options) symbol<?))])
(set-option opt (hash-ref options opt))))))
(struct solver (server config asserts mins maxs env level)
#:mutable)
(struct config (options path logic))
(define (solver-assert self bools [wfcheck #f])
(unless (list? bools)
(raise-argument-error 'solver-assert "(listof boolean?)" bools))
(define wfcheck-cache (mutable-set))
(set-solver-asserts! self
(append (solver-asserts self)
(for/list ([b bools] #:unless (equal? b #t))
(unless (or (boolean? b) (and (term? b) (equal? @boolean? (term-type b))))
(error 'assert "expected a boolean value, given ~s" b))
(when wfcheck
(wfcheck b wfcheck-cache))
b))))
(define (solver-minimize self nums)
(unless (null? nums)
(error 'solver-minimize "optimization isn't supported by solver ~v" self)))
(define (solver-maximize self nums)
(unless (null? nums)
(error 'solver-maximize "optimization isn't supported by solver ~v" self)))
(define (solver-clear self)
(solver-shutdown self))
(define (solver-shutdown self)
(solver-clear-stacks! self)
(solver-clear-env! self)
(server-shutdown (solver-server self)))
(define (solver-push self)
(match-define (solver server _ (app unique asserts) (app unique mins) (app unique maxs) env level) self)
(server-write
server
(begin
((current-reporter) 'encode-start)
(encode env asserts mins maxs)
((current-reporter) 'encode-finish)
(push)))
(solver-clear-stacks! self)
(set-solver-level! self (cons (dict-count env) level)))
(define (solver-pop self [k 1])
(match-define (solver server _ _ _ _ env level) self)
(when (or (<= k 0) (> k (length level)))
(error 'solver-pop "expected 1 <= k <= ~a, given ~a" (length level) k))
(server-write server (pop k))
(solver-clear-stacks! self)
(for ([lvl level][i k])
(clear! env lvl))
(set-solver-level! self (drop level k)))
(define (solver-check self [read-solution read-solution])
(match-define (solver server _ (app unique asserts) (app unique mins) (app unique maxs) env _) self)
(cond [(ormap false? asserts) (unsat)]
[else (server-write
server
(begin
((current-reporter) 'encode-start)
(encode env asserts mins maxs)
((current-reporter) 'encode-finish)
(check-sat)))
((current-reporter) 'solve-start)
(solver-clear-stacks! self)
(define ret (read-solution server env))
((current-reporter) 'solve-finish (sat? ret))
ret]))
(define (solver-debug self)
(error 'solver-debug "debugging isn't supported by solver ~v" self))
(define (solver-options self)
(config-options (solver-config self)))
(define (solver-clear-stacks! self)
(set-solver-asserts! self '())
(set-solver-mins! self '())
(set-solver-maxs! self '()))
(define (solver-clear-env! self)
(set-solver-env! self (env))
(set-solver-level! self '()))
Reads the SMT solution from the server .
(define (read-solution server env #:unsat-core? [unsat-core? #f])
(decode (parse-solution server #:unsat-core? unsat-core?) env))
(define (parse-solution server #:unsat-core? [unsat-core? #f])
read BV literals as symbols
(match (server-read server (read))
[(== 'sat)
(server-write server (get-model))
(let loop ()
(match (server-read server (read))
[(list (== 'objectives) _ ...) (loop)]
The SMT - LIB spec says that a model should be just a list of
` define - fun`s , but many SMT solvers used to prefix that list
[(or (list (== 'model) def ...) (list def ...))
(for/hash ([d def] #:when (and (pair? d) (equal? (car d) 'define-fun)))
(values (cadr d) d))]
[other (error 'read-solution "expected model, given ~a" other)]))]
[(== 'unsat)
(if unsat-core?
(begin
(server-write server (get-unsat-core))
(match (server-read server (read))
[(list (? symbol? name) ...) name]
[other (error 'read-solution "expected unsat core, given ~a" other)]))
'unsat)]
[(== 'unknown) 'unknown]
[other (error 'read-solution "unrecognized solver output: ~a" other)]))) |
1aeac0ade8347eafac0887f3c099f7e53791a92a890a51492a2ba6f3c2de996a | mzp/coq-ide-for-ios | fourier_plugin_mod.ml | let _=Mltop.add_known_module"Fourier"
let _=Mltop.add_known_module"FourierR"
let _=Mltop.add_known_module"G_fourier"
let _=Mltop.add_known_module"Fourier_plugin_mod"
let _=Mltop.add_known_module"fourier_plugin"
| null | https://raw.githubusercontent.com/mzp/coq-ide-for-ios/4cdb389bbecd7cdd114666a8450ecf5b5f0391d3/CoqIDE/coq-8.2pl2/plugins/fourier/fourier_plugin_mod.ml | ocaml | let _=Mltop.add_known_module"Fourier"
let _=Mltop.add_known_module"FourierR"
let _=Mltop.add_known_module"G_fourier"
let _=Mltop.add_known_module"Fourier_plugin_mod"
let _=Mltop.add_known_module"fourier_plugin"
|
|
4bd1edae8b042ffcd84242ce8c46be141cb4575a188b2e3cb2ded1bd695158c1 | biocaml/phylogenetics | simulator.ml | open Core
open Phylogenetics
module L = Linear_algebra
module Branch_info = struct
type t = float
let length x = x
end
module Model = struct
type param = {
stationary_distribution : Amino_acid.vector ;
exchangeability_matrix : Amino_acid.matrix ;
}
let stationary_distribution p = p.stationary_distribution
let substitution_rate p i j =
p.exchangeability_matrix.Amino_acid.%{i, j} *.
p.stationary_distribution.Amino_acid.%(j)
let rate_matrix p =
Rate_matrix.Amino_acid.make (substitution_rate p)
end
module Sim = Simulator.Make(Amino_acid)(Branch_info)
let single_branch_tree l =
Tree.node () List1.(cons (Tree.branch l (Tree.leaf ())) [])
let draw_amino_acid_profile rng alpha =
let theta = Array.create ~len:20 0. in
Gsl.Randist.dirichlet rng ~alpha:(Array.create ~len:20 alpha) ~theta ;
Amino_acid.Vector.of_array_exn theta
let simulation_on_one_branch simulator simulator_name =
let rng = Gsl.Rng.(make (default ())) in
let tree = single_branch_tree 10. in
let root = Amino_acid.of_int_exn 0 in
let target = draw_amino_acid_profile rng 0.1 in
let exchangeability_matrix = (Wag.parse "../data/wag.dat").rate_matrix in
let param = {
Model.stationary_distribution = target ;
exchangeability_matrix ;
}
in
let rates = Model.rate_matrix param in
let empirical_frequencies =
Sequence.init 1000 ~f:(fun _ ->
simulator rng tree ~root ~rate_matrix:(Fn.const rates)
|> Tree.leaves
|> List.map ~f:snd
|> List.hd_exn
)
|> Amino_acid.counts
|> (fun (k : int Amino_acid.table) -> (k :> int array))
|> Array.map ~f:Float.of_int
|> Amino_acid.Vector.of_array_exn
|> Amino_acid.Vector.normalize
in
let res =
[|
Amino_acid.Vector.to_array target ;
Amino_acid.Vector.to_array empirical_frequencies ;
|]
|> L.Matrix.of_arrays_exn
|> L.Matrix.transpose
in
printf "Test convergence of %s on one branch\n" simulator_name ;
L.Matrix.pp Format.std_formatter res ;
Stdlib.print_newline ()
let () =
simulation_on_one_branch Sim.site_gillespie_direct "Gillespie direct" ;
simulation_on_one_branch Sim.site_gillespie_first_reaction "Gillespie first reaction"
| null | https://raw.githubusercontent.com/biocaml/phylogenetics/e225616a700b03c429c16f760dbe8c363fb4c79d/tests/expect/simulator.ml | ocaml | open Core
open Phylogenetics
module L = Linear_algebra
module Branch_info = struct
type t = float
let length x = x
end
module Model = struct
type param = {
stationary_distribution : Amino_acid.vector ;
exchangeability_matrix : Amino_acid.matrix ;
}
let stationary_distribution p = p.stationary_distribution
let substitution_rate p i j =
p.exchangeability_matrix.Amino_acid.%{i, j} *.
p.stationary_distribution.Amino_acid.%(j)
let rate_matrix p =
Rate_matrix.Amino_acid.make (substitution_rate p)
end
module Sim = Simulator.Make(Amino_acid)(Branch_info)
let single_branch_tree l =
Tree.node () List1.(cons (Tree.branch l (Tree.leaf ())) [])
let draw_amino_acid_profile rng alpha =
let theta = Array.create ~len:20 0. in
Gsl.Randist.dirichlet rng ~alpha:(Array.create ~len:20 alpha) ~theta ;
Amino_acid.Vector.of_array_exn theta
let simulation_on_one_branch simulator simulator_name =
let rng = Gsl.Rng.(make (default ())) in
let tree = single_branch_tree 10. in
let root = Amino_acid.of_int_exn 0 in
let target = draw_amino_acid_profile rng 0.1 in
let exchangeability_matrix = (Wag.parse "../data/wag.dat").rate_matrix in
let param = {
Model.stationary_distribution = target ;
exchangeability_matrix ;
}
in
let rates = Model.rate_matrix param in
let empirical_frequencies =
Sequence.init 1000 ~f:(fun _ ->
simulator rng tree ~root ~rate_matrix:(Fn.const rates)
|> Tree.leaves
|> List.map ~f:snd
|> List.hd_exn
)
|> Amino_acid.counts
|> (fun (k : int Amino_acid.table) -> (k :> int array))
|> Array.map ~f:Float.of_int
|> Amino_acid.Vector.of_array_exn
|> Amino_acid.Vector.normalize
in
let res =
[|
Amino_acid.Vector.to_array target ;
Amino_acid.Vector.to_array empirical_frequencies ;
|]
|> L.Matrix.of_arrays_exn
|> L.Matrix.transpose
in
printf "Test convergence of %s on one branch\n" simulator_name ;
L.Matrix.pp Format.std_formatter res ;
Stdlib.print_newline ()
let () =
simulation_on_one_branch Sim.site_gillespie_direct "Gillespie direct" ;
simulation_on_one_branch Sim.site_gillespie_first_reaction "Gillespie first reaction"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.