_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
8cd3e1ae6e8a0cf3186986282d5fa2df03630f9ad5e30efa69348abbf2fb8b9d | MyDataFlow/ttalk-server | mim_ct_sup.erl | -module(mim_ct_sup).
-behaviour(supervisor).
-export([start_link/1, init/1]).
start_link(Name) ->
supervisor:start_link({local, Name}, ?MODULE, []).
init([]) ->
{ok, {{one_for_one, 10, 1}, []}}.
| null | https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/apps/ejabberd/test/mim_ct_sup.erl | erlang | -module(mim_ct_sup).
-behaviour(supervisor).
-export([start_link/1, init/1]).
start_link(Name) ->
supervisor:start_link({local, Name}, ?MODULE, []).
init([]) ->
{ok, {{one_for_one, 10, 1}, []}}.
|
|
5705ad97c7ec2d80e7fa961e026719055bae239d856c7fa8fb8353713b0fe51f | ahungry/ahubu | lib.clj | (ns ahubu.lib
(:require
[clojure.string :as str])
(:import
MyEventDispatcher
WebUIController
(java.io File)
(java.net HttpURLConnection URL URLConnection URLStreamHandler URLStreamHandlerFactory)
(javafx.application Application Platform)
(javafx.beans.value ChangeListener)
(javafx.concurrent Worker$State)
(javafx.event EventHandler)
(javafx.fxml FXMLLoader)
(javafx.scene Parent Scene)
(javafx.scene.control Label)
(javafx.scene.input Clipboard ClipboardContent KeyEvent)
(javafx.scene.web WebView)
(javafx.stage Stage)
(javax.net.ssl HttpsURLConnection)
(netscape.javascript JSObject)
(sun.net.www.protocol.https Handler)
))
(gen-class
:extends javafx.application.Application
:name com.ahungry.Browser)
(declare delete-current-scene)
(declare bind-keys)
(declare new-scene)
(declare goto-scene)
(declare hide-buffers)
(declare show-buffers)
(declare filter-buffers)
(declare omnibar-load-url)
(declare default-mode)
(declare omnibar-handler)
(declare omnibar-parse-command)
(declare omnibar-handle-command)
(defmacro compile-time-slurp [file]
(slurp file))
(def js-bundle (slurp "js-src/bundle.js"))
(defmacro run-later [& forms]
`(let [
p# (promise)
]
(Platform/runLater
(fn []
(deliver p# (try ~@forms (catch Throwable t# t#)))))
p#))
(def world
(atom
{
:cookies {}
:cross-domain-url ""
:default-url (format "file" (System/getProperty "user.dir"))
:hinting? false
:mode :default
:new-tab? false
:omnibar-open? false
:scene-id 0
:scenes []
:searching? false
:showing-buffers? false
:stage nil
}))
(defn set-mode [mode]
(swap! world conj {:mode mode}))
(defn set-atomic-stage [stage]
(swap! world conj {:stage stage}))
(defn get-atomic-stage [] (:stage @world))
;; Each scene is basically a tab
(defn add-scene [scene]
(swap! world conj {:scenes (conj (:scenes @world) scene)}))
(defn get-scene [n]
(-> (:scenes @world) (get n)))
(defn get-scenes [] (:scenes @world))
(defn delete-nth-scene [scenes n]
(into []
(concat (subvec scenes 0 n)
(subvec scenes (+ 1 n) (count scenes)))))
(defn del-scene [n]
(swap! world conj {:scenes (-> (:scenes @world) (delete-nth-scene n))}))
(defn set-scene-id [n] (swap! world conj {:scene-id n}))
(defn get-scene-id [] (:scene-id @world))
(defn set-new-tab [b]
(swap! world conj {:new-tab? b}))
(defn get-new-tab? [] (:new-tab? @world))
;; (def atomic-default-url (atom ""))
(defn set-default-url [s]
(swap! world conj {:default-url s}))
(defn get-default-url [] (:default-url @world))
(defn set-showing-buffers [b]
(swap! world conj {:showing-buffers? b}))
(defn get-showing-buffers? [] (:showing-buffers? @world))
(defn get-omnibar []
(-> (get-scene-id) get-scene (.lookup "#txtURL")))
(defn get-webview []
(-> (get-scene-id) get-scene (.lookup "#webView")))
(defn get-webengine []
(-> (get-webview) .getEngine))
(defn get-buffers []
(-> (get-scene-id) get-scene (.lookup "#buffers")))
(defn get-tip []
(-> (get-scene-id) get-scene (.lookup "#tip")))
(defn set-tip [s]
(let [style (case s
"NORMAL" "-fx-text-fill: #af0; -fx-background-color: #000;"
"OMNI" "-fx-text-fill: #000; -fx-background-color: #36f"
"GO" "-fx-text-fill: #000; -fx-background-color: #f69"
"INSERT" "-fx-text-fill: #000; -fx-background-color: #f36"
"HINTING" "-fx-text-fill: #000; -fx-background-color: #f63"
"SEARCHING" "-fx-text-fill: #000; -fx-background-color: #f33"
"BUFFERS" "-fx-text-fill: #000; -fx-background-color: #63f"
"-fx-text-fill: #000; -fx-background-color: #af0")]
(run-later
(doto (get-tip)
(.setText s)
(.setStyle style)))))
(defn get-omnibar-text []
(-> (get-omnibar) .getText))
(defn set-omnibar-text [s]
(run-later
(doto (get-omnibar)
(.setText s)
(.positionCaret (count s)))))
(defn set-omnibar-text-to-url []
(when (not (:omnibar-open? @world))
(set-omnibar-text
(-> (get-webengine) .getLocation))))
(defn url-ignore-regexes-from-file [file]
(map re-pattern
(map #(format ".*%s.*" %)
(str/split (slurp file) #"\n"))))
(defn url-ignore-regexes []
(url-ignore-regexes-from-file "conf/url-ignore-regexes.txt"))
(defn matching-regexes [url regexes]
(filter #(re-matches % url) regexes))
(defn url-ignorable? [url]
(let [ignorables (matching-regexes url (url-ignore-regexes))]
(if (> (count ignorables) 0)
(do
(println (format "Ignoring URL: %s, hit %d matchers." url (count ignorables)))
true)
false)))
(defn get-base-domain-pattern [s]
(let [[_ fqdn] (re-matches #".*?://(.*?)[/.$]*" s)]
(if fqdn
(let [domain-parts (-> (str/split fqdn #"\.") reverse)
domain (-> (into [] domain-parts) (subvec 0 2))]
(if domain
(re-pattern
(format "^http[s]*://(.*\\.)*%s\\.%s/.*"
(second domain)
(first domain)))
#".*")) #".*")))
;; Work with a sort of timeout here - cross domain base is set strictly after
;; first URL request, then lax again after some time has expired.
;; FIXME: Handle root domain logic better - when to flip/flop cross domain setting
;; TODO: Add cross domain user setting
(defn block-cross-domain-net?x [url]
(let [domain (get-base-domain-pattern (:cross-domain-url @world))]
(swap! world conj {:cross-domain-url url})
(future (Thread/sleep 5000) (swap! world conj {:cross-domain-url ""}))
(if (not (re-matches (re-pattern domain) url))
(do (println (format "Blocking X-Domain request: %s" url))
(println domain)
true)
false)))
(defn block-cross-domain-net? [_ ] false)
(defn url-or-no [url proto]
(let [url (.toString url)]
(URL.
(if (or (url-ignorable? url) (block-cross-domain-net? url))
(format "%s:65535" proto)
url))))
;; Hmm, we could hide things we do not want to see.
(defn my-connection-handler [protocol]
(case protocol
"http" (proxy [sun.net.www.protocol.http.Handler] []
(openConnection [& [url proxy :as args]]
(println url)
(proxy-super openConnection (url-or-no url protocol) proxy)))
"https" (proxy [sun.net.www.protocol.https.Handler] []
(openConnection [& [url proxy :as args]]
(println url)
(proxy-super openConnection (url-or-no url protocol) proxy)))
nil
))
;; Opposite of slurp
(defn barf [file-name data]
(with-open [wr (clojure.java.io/writer file-name)]
(.write wr (pr-str data))))
(defn clean-uri [uri]
(java.net.URI. (.getScheme uri) (.getHost uri) nil nil))
(defn cookie-to-map [cookie]
{:name (.getName cookie)
:value (.getValue cookie)
:domain (.getDomain cookie)
:maxAge (.getMaxAge cookie)
:secure (.getSecure cookie)})
(defn cookiemap-to-cookie [{name :name value :value domain :domain maxAge :maxAge secure :secure}]
(let [cookie (java.net.HttpCookie. name value)]
(doto cookie
(.setVersion 0)
(.setDomain domain)
(.setSecure secure)
(.setMaxAge maxAge))))
;; Add a previously dumped cookie
(defn add-cookie [store uri cookiemap]
(let [cookie (cookiemap-to-cookie cookiemap)
uri (clean-uri (java.net.URI. uri))]
(-> store (.add uri cookie))))
(defn load-cookies [store]
(when (.exists (clojure.java.io/file "ahubu.cookies"))
(let [cookies (read-string (slurp "ahubu.cookies"))]
(doseq [[uri uri-map] cookies]
(doseq [[name cookie] uri-map]
(add-cookie store uri cookie))))))
(defn push-cookie-to-uri-map [cookie mp]
(let [name (:name cookie)]
(assoc mp name cookie)))
(defn push-cookie-to-cookie-map [cookie uri mp]
(let [old (get mp uri)]
(assoc mp uri (push-cookie-to-uri-map cookie old))))
(defn push-cookie-to-world [uri cookie]
(swap! world
(fn [old]
(assoc old :cookies
(push-cookie-to-cookie-map cookie uri (:cookies old))))))
;; -java
;;
;; -a-cookie-using-javafxs-webengine-webview
(defn my-cookie-store []
(let [store (-> (java.net.CookieManager.) .getCookieStore)
my-store
(proxy [java.net.CookieStore Runnable] []
(run []
(println "Save to disk here"))
(add [uri cookie]
(let [clean (clean-uri uri)
u (.toString clean)]
(.add store clean cookie)
(push-cookie-to-world u (cookie-to-map cookie))))
(get [& [uri :as args]]
(let [clean (clean-uri uri)
u (.toString clean)]
(let [result (.get store clean)]
result)))
(getCookies []
(.getCookies store))
(getURIs []
(.getURIs store))
(remove [uri cookie]
(.remove store uri cookie))
(removeAll []
(.removeAll store)))]
(load-cookies my-store)
my-store))
(defn feed-cookies-to-the-manager [manager cookies]
(doseq [[domain domain-map] cookies]
(doseq [[name c] domain-map]
(let [uri (clean-uri (java.net.URI. domain))]
(.put manager uri {"Set-Cookie" [(format "%s=%s" (:name c) (:value c)) ]})))))
(defn quietly-set-cookies []
(def cookie-manager
(doto (java.net.CookieManager.
(my-cookie-store)
java.net.CookiePolicy/ACCEPT_ALL
java.net . / ACCEPT_ORIGINAL_SERVER
)
java.net.CookieHandler/setDefault))
(feed-cookies-to-the-manager cookie-manager (:cookies @world)))
(defn save-cookies []
(barf "ahubu.cookies" (:cookies @world)))
(defn dump-cookies [store]
(doall (map cookie-to-map (.getCookies store))))
(defn quietly-set-stream-factory []
(WebUIController/stfuAndSetURLStreamHandlerFactory))
(defn -start [this stage]
(let [
root (FXMLLoader/load (-> "resources/WebUI.fxml" File. .toURI .toURL))
scene (Scene. root)
exit (reify javafx.event.EventHandler
(handle [this event]
(println "Goodbye")
(save-cookies)
(javafx.application.Platform/exit)
(System/exit 0)
))
]
(bind-keys stage)
(set-atomic-stage stage)
;; (.addShutdownHook
;; (java.lang.Runtime/getRuntime)
;; (Thread. (println "Adios!") (save-cookies)))
;; (set-scene-id 0)
;; (add-scene scene)
;; (bind-keys scene)
(doto stage
;; (.initModality javafx.stage.Modality/APPLICATION_MODAL)
;; (.initModality javafx.stage.Modality/WINDOW_MODAL)
;; (.setAlwaysOnTop true)
(.setOnCloseRequest exit)
(.setScene scene)
(.setTitle "AHUBU")
(.show))))
(defn execute-script [w-engine s]
(run-later
(let [
result (.executeScript w-engine s)
]
(if (instance? JSObject result)
(str result)
result))))
(defn dojs [s ]
(execute-script (get-webengine) s))
(defn dojsf [file]
(execute-script (get-webengine) (slurp (format "js-src/%s.js" file))))
(defn decrease-font-size []
(dojsf "decrease-font-size"))
(defn increase-font-size []
(dojsf "increase-font-size"))
(defn inject-firebug [w-engine]
(execute-script w-engine (slurp "js-src/inject-firebug.js")))
(defn execute-script-async [w-engine s]
(let [
p (promise)
*out* *out*
]
(Platform/runLater
(fn []
(let [
o (.executeScript w-engine "new Object()")
]
(.setMember o "cb" (fn [s] (deliver p s)))
(.setMember o "println" (fn [s] (println s)))
(.eval o s))))
@p))
(defn repl [webengine]
(let [s (read-line)]
(when (not= "" (.trim s))
(println @(execute-script webengine s))
(recur webengine))))
(defn bind [s obj webengine]
(run-later
(.setMember
(.executeScript webengine "window")
s obj)))
(defn clear-cookies [cookie-manager]
(-> cookie-manager .getCookieStore .removeAll))
(defn async-load [url]
(run-later
(doto (get-webengine)
(.load url))))
(defn async-loadx [url]
(let [
webengine (get-webengine)
p (promise)
f (fn [s]
(binding [*out* *out*] (println s)))
listener (reify ChangeListener
(changed [this observable old-value new-value]
(when (= new-value Worker$State/SUCCEEDED)
; first remove this listener
( .removeListener observable this )
(println "In the ChangeListener...")
(execute-script webengine js-bundle)
;and then redefine log and error (fresh page)
(bind "println" f webengine)
(future
(Thread/sleep 1000)
(execute-script webengine "console.log = function(s) {println.invoke(s)};
console.error = function(s) {println.invoke(s)};
"))
(deliver p true))))
]
(run-later
(doto webengine
(-> .getLoadWorker .stateProperty (.addListener listener))
(.load url)))
@p))
(defn back [webengine]
(execute-script webengine "window.history.back()"))
(defn prev-scene []
(default-mode)
(let [n (get-scene-id)
id (- n 1)]
(if (< id 0)
(goto-scene (- (count (get-scenes)) 1))
(goto-scene id))))
(defn next-scene []
(default-mode)
(let [n (get-scene-id)
id (+ n 1)]
(if (>= id (count (get-scenes)))
(goto-scene 0)
(goto-scene id))))
(defn omnibar-stop []
(swap! world conj {:omnibar-open? false})
(run-later
(future (Thread/sleep 100) (set-omnibar-text-to-url))
(doto (get-omnibar) (.setDisable true))
(doto (get-webview) (.setDisable false))))
(defn omnibar-start []
(swap! world conj {:omnibar-open? true})
(run-later
(doto (get-omnibar) (.setDisable false) (.requestFocus))
(doto (get-webview) (.setDisable true))))
(defn yank [s]
(let [content (ClipboardContent.)]
(run-later
(set-tip "YANKED!")
(future (Thread/sleep 500) (set-tip "NORMAL"))
(-> content (.putString s))
(-> (Clipboard/getSystemClipboard) (.setContent content)))))
(defn yank-current-url []
(-> (get-webengine) .getLocation yank))
(defn buffers-start []
(set-mode :omnibar)
(set-tip "BUFFERS")
(set-showing-buffers true)
(run-later
(omnibar-start)
(show-buffers)
(set-omnibar-text ":buffers! ")
"Overlay.show()"))
(defn quickmark-url [url]
(default-mode)
(omnibar-load-url url))
(defn get-xdg-config-home []
(or (System/getenv "XDG_CONFIG_HOME")
(System/getProperty "user.home")))
(defn get-rc-file-raw []
(let [defaults (read-string (slurp "conf/default-rc"))
home-rc (format "%s/.ahuburc" (System/getProperty "user.home"))
xdg-rc (format "%s/ahubu/ahuburc" (get-xdg-config-home))]
(conj
defaults
(if (.exists (clojure.java.io/file home-rc))
(read-string (slurp home-rc)))
(if (.exists (clojure.java.io/file xdg-rc))
(read-string (slurp xdg-rc))))))
(defn get-rc-file []
(let [rc (get-rc-file-raw)
quickmarks (:quickmarks rc)
qm-fns (reduce-kv #(assoc %1 %2 (fn [] (quickmark-url %3))) {} quickmarks)
merged-qms (conj (:quickmarks (:keymaps rc)) qm-fns)]
(conj rc
{:keymaps (conj (:keymaps rc)
{:quickmarks merged-qms})})))
(defn go-mode []
(set-mode :go)
(set-tip "GO"))
(defn font-mode []
(set-mode :font)
(set-tip "FONT"))
(defn quickmarks-mode []
(set-mode :quickmarks)
(set-tip "QUICKMARKS"))
(defn quickmarks-new-tab-mode []
(set-new-tab true)
(quickmarks-mode))
(defn default-mode []
(set-mode :default)
(set-tip "NORMAL")
(hide-buffers)
(omnibar-stop)
(swap! world conj {:hinting? false :searching? false})
(dojs "Hinting.off(); Overlay.hide(); Form.disable()"))
(defn insert-mode []
(set-mode :insert)
(set-tip "INSERT")
(dojs "Form.enable()"))
(defn search-mode []
(set-mode :search)
(set-tip "SEARCHING")
(swap! world conj {:searching? true})
(println "Searching")
(set-omnibar-text "/")
(dojs "Search.reset()"))
(defn hinting-mode []
(set-mode :hinting)
(set-tip "HINTING")
(swap! world conj {:hinting? true})
(dojs "Hinting.on(); Overlay.show()"))
(defn inject-firebug []
(dojsf "inject-firebug"))
(defn omnibar-open []
(set-mode :omnibar)
(set-tip "OMNI")
(omnibar-start)
(set-omnibar-text ":open ")
(dojs "Overlay.show()"))
(defn omnibar-open-current []
(omnibar-open)
(set-omnibar-text (format ":open %s" (get-omnibar-text))))
(defn omnibar-open-new-tab []
(set-new-tab true)
(omnibar-open)
(set-omnibar-text ":tabopen "))
(defn go-top []
(default-mode)
(dojs "window.scrollTo(0, 0)"))
;; Try to grab string key, then keyword key
(defn key-map-op [key]
(let [mode (:mode @world)
rc (-> (:keymaps (get-rc-file)) (get mode))
op? (get rc key)
key (keyword key)
op (or op? (get rc key))]
op))
(defn process-op [op]
(when op
(if (= java.lang.String (type op))
(execute-script (get-webengine) op)
((eval op)))))
(defn key-map-handler [key]
(let [op (key-map-op key)
op-before (key-map-op :BEFORE)
op-after (key-map-op :AFTER)]
( println ( format " KM OP : % s " op - before ) )
;; (println key)
( println ( format " KM OP : % s " op ) )
( println ( format " KM OP : % s " op - after ) )
;; Global key listeners
(when (get-showing-buffers?)
(filter-buffers))
(when (:hinting? @world)
(dojs (format "Hinting.keyHandler('%s')" key))
( println ( format " HINTING : % s " key ) )
)
(when (:searching? @world)
(when (= 1 (count key))
(set-omnibar-text (format "%s%s" (get-omnibar-text) key)))
(dojs (format "Search.incrementalFind('%s')" key))
)
Check for the BEFORE bind ( runs with any other keypress )
(process-op op-before)
(when (process-op op)
(future
(Thread/sleep 100)
(process-op op-after)))
true)) ; bubble up keypress
;; ENTER (code) vs <invis> (char), we want ENTER
;; Ideally, we want the char, since it tracks lowercase etc.
(defn get-readable-key [code text]
(if (>= (count text) (count code))
text code))
;;
(defn bind-keys [what]
(doto what
(->
(.addEventFilter
(. KeyEvent KEY_PRESSED)
EventHandler
(handle [this event]
(let [ecode (-> event .getCode .toString)
etext (-> event .getText .toString)]
;; (println (get-readable-key ecode etext))
;; (.consume event)
;; disable webview here, until some delay was met
;; -disable-highlight-and-copy-mode-in-webengine
;;
(key-map-handler (get-readable-key ecode etext)))
false
))))))
(defn show-alert [s]
(doto (javafx.scene.control.Dialog.)
(-> .getDialogPane (.setContentText s))
(-> .getDialogPane .getButtonTypes (.add (. javafx.scene.control.ButtonType OK)))
(.showAndWait)))
(defn goto-scene [n]
(println "GOING TO SCENE")
(println n)
(run-later
(set-scene-id n)
(doto (get-atomic-stage)
(.setScene (get-scene n))
(.show))))
(defn delete-current-scene []
(let [n (get-scene-id)]
(when (> n 0)
(goto-scene (- n 1))
(run-later
(Thread/sleep 50)
(del-scene n)))))
(defn omnibar-load-url [url]
(run-later
(if (get-new-tab?)
(do
(set-default-url url)
(new-scene)
(set-new-tab false))
(-> (get-webengine) (.load url)))))
(defn get-selected-buffer-text []
(let [bufs (get-buffers)
children (-> bufs .getChildren)
id 0
child (when children (get (vec children) id))]
(if child (.getText child) "")))
(defn switch-to-buffer []
(let [s (get-selected-buffer-text)
maybe-id (last (re-matches #"^([0-9]).*" s))
id (if maybe-id (Integer/parseInt maybe-id) -1)]
(when (>= id 0)
(goto-scene id))
(set-showing-buffers false)
(hide-buffers)))
(defn omnibar-parse-command [cmd]
(re-matches #":(.*?) (.*)" cmd))
(defn omnibar-handle-command [cmd]
(let [[_ cmd arg] (omnibar-parse-command cmd)]
( println ( format " OB Parse Cmd : % s % s % s " _ cmd arg ) )
(case cmd
"open" (omnibar-handler arg)
"tabopen" (omnibar-handler arg)
(omnibar-handler _))))
(defn omnibar-handler [n]
(if (get-showing-buffers?) (switch-to-buffer)
(let [query
(cond
(re-matches #"^:.*" n) (omnibar-handle-command n)
(re-matches #"^file:.*" n) n
(re-matches #"^http[s]*:.*" n) n
(re-matches #".*\..*" n) (format "" n)
:else (format "" n)
)]
(omnibar-load-url query))))
(defn hide-buffers []
(let [bufs (get-buffers)]
(run-later
(-> bufs .getChildren .clear))))
(defn is-matching-buf? [s]
(let [[_ cmd arg] (-> (get-omnibar) .getText omnibar-parse-command)
ob-text (or arg _)
pattern (re-pattern (str/lower-case (str/join "" [".*" ob-text ".*"])))]
(re-matches pattern (str/lower-case s))))
(defn get-buffer-entry-text [scene n]
(let [webview (.lookup scene "#webView")
engine (-> webview .getEngine)
title (-> engine .getTitle)
location (-> engine .getLocation)]
(format "%s :: %s :: %s" n title location)))
(defn filter-buffers []
(future
( Thread / sleep 100 )
(let [bufs (get-buffers)
children (-> bufs .getChildren)]
(doall
(map
(fn [c]
(when (not (is-matching-buf? (.getText c)))
(run-later
(.remove children c)
)))
children)))))
(defn show-buffers []
(let [scenes (get-scenes)]
(run-later
(let [bufs (get-buffers)]
(doto bufs
(-> .getChildren .clear)
(-> .getChildren (.add (Label. "Buffers: "))))))
(doall
(map (fn [i]
(let [scene (get scenes i)]
(println "Make the scene....")
(run-later
(doto (-> (get-scene-id) get-scene (.lookup "#buffers"))
(-> .getChildren (.add (Label. (get-buffer-entry-text scene i))))))))
(range (count scenes))))))
;; Map over elements (links) on page load...sweet
;; TODO: Based on this filter list, we can show the user a native list
of jumpable links ( instead of relying on JS ) , where it works like the buffer
;; jump list, but the action is set to .load url or simulate a key click
(defn el-link-fn [els]
(doall
(map (fn [i]
(let [el (-> els (.item i))]
;;
( - > el ( .setTextContent " OH WEL " ) )
;; (println (-> el .getTextContent))
;; (println (-> el (.getAttribute "href")))
(-> el (.addEventListener
"click"
(reify org.w3c.dom.events.EventListener
(handleEvent [this event]
(default-mode)
(println "I clicked a link, good job")
(println (-> el .getTextContent))))
false))
)
)
(range (.getLength els)))))
(defn remove-annoying-div [dom id]
(let [el (-> dom (.getElementById id))]
(when el (.remove el))))
(defn remove-annoying-divs [dom]
(let [ids (str/split (slurp "conf/dom-id-ignores.txt") #"\n")]
(doall
(map #(remove-annoying-div dom %) ids))))
(defn remove-annoying-class [dom class-name]
(let [els (-> dom (.getElementsByClassName class-name))]
(doall
(map
(fn [_]
;; We remove item 0, because each remove causes a reindex
(let [el (-> els (.item 0))]
(-> el .remove)))
(range (.getLength els))))))
(defn remove-annoying-classes [dom]
(let [ids (str/split (slurp "conf/dom-class-ignores.txt") #"\n")]
(doall
(map #(remove-annoying-class dom %) ids))))
(defn new-scene []
(run-later
(let [
root (FXMLLoader/load (-> "resources/WebUI.fxml" File. .toURI .toURL))
scene (Scene. root)
]
(add-scene scene)
(set-scene-id (- (count (get-scenes)) 1))
;; (bind-keys scene)
;; (set-scene-id (+ 1 (get-scene-id)))
(println "Getting new scene, binding keys...")
;; Bind the keys
(let [webview (.lookup scene "#webView")
webengine (.getEngine webview)]
;; Clean up this mess
(doto webengine
;; (.onStatusChanged
( reify javafx.event . EventHandler
;; (handle [this event]
;; (println "On status change"))))
(.setOnAlert
(reify javafx.event.EventHandler
(handle [this event]
(println (.getData event))
(show-alert (.getData event)))))
(-> .getLoadWorker
.stateProperty
(.addListener
(reify ChangeListener
(changed [this observable old-value new-value]
(when (and (= new-value Worker$State/RUNNING)
(= old-value Worker$State/SCHEDULED))
(execute-script webengine js-bundle))
(when (not (= new-value Worker$State/SUCCEEDED))
(set-omnibar-text
(format "Loading :: %s" (-> webengine .getLocation))))
(when (= new-value Worker$State/SUCCEEDED)
( .removeListener observable this )
;;
(println (-> webengine .getLocation))
;; (println (-> webengine .getDocument .toString))
;; When a thing loads, set the URL to match
(set-omnibar-text-to-url)
;; map over all the page links on load
(-> webengine .getDocument remove-annoying-divs)
(-> webengine .getDocument remove-annoying-classes)
(-> webengine .getDocument (.getElementsByTagName "a") el-link-fn)
(-> webengine (.setUserAgent "Mozilla/5.0 (Windows NT 6.1) Gecko/20100101 Firefox/61.0"))
;; (-> webengine .getDocument (.getElementById "content")
;; (.addEventListener
;; "click"
( reify org.w3c.dom.events . EventListener
;; (handleEvent [this event]
;; (javafx.application.Platform/exit)))))
(execute-script webengine js-bundle)
)))))
(.load (get-default-url))
))
;; Add it to the stage
(doto (get-atomic-stage)
(.setScene scene)
(.show)))))
;; Abstract the webview + webengine
( - > ( - > ( get ( ahubu.browser/get-scenes ) 0 ) ( .lookup " # webView " ) ) .getEngine )
| null | https://raw.githubusercontent.com/ahungry/ahubu/29f19ea71ab4121d932f1cd976fc3edc2b0999b9/src/ahubu/lib.clj | clojure | Each scene is basically a tab
(def atomic-default-url (atom ""))
Work with a sort of timeout here - cross domain base is set strictly after
first URL request, then lax again after some time has expired.
FIXME: Handle root domain logic better - when to flip/flop cross domain setting
TODO: Add cross domain user setting
Hmm, we could hide things we do not want to see.
Opposite of slurp
Add a previously dumped cookie
-java
-a-cookie-using-javafxs-webengine-webview
(.addShutdownHook
(java.lang.Runtime/getRuntime)
(Thread. (println "Adios!") (save-cookies)))
(set-scene-id 0)
(add-scene scene)
(bind-keys scene)
(.initModality javafx.stage.Modality/APPLICATION_MODAL)
(.initModality javafx.stage.Modality/WINDOW_MODAL)
(.setAlwaysOnTop true)
first remove this listener
and then redefine log and error (fresh page)
Try to grab string key, then keyword key
(println key)
Global key listeners
bubble up keypress
ENTER (code) vs <invis> (char), we want ENTER
Ideally, we want the char, since it tracks lowercase etc.
(println (get-readable-key ecode etext))
(.consume event)
disable webview here, until some delay was met
-disable-highlight-and-copy-mode-in-webengine
Map over elements (links) on page load...sweet
TODO: Based on this filter list, we can show the user a native list
jump list, but the action is set to .load url or simulate a key click
(println (-> el .getTextContent))
(println (-> el (.getAttribute "href")))
We remove item 0, because each remove causes a reindex
(bind-keys scene)
(set-scene-id (+ 1 (get-scene-id)))
Bind the keys
Clean up this mess
(.onStatusChanged
(handle [this event]
(println "On status change"))))
(println (-> webengine .getDocument .toString))
When a thing loads, set the URL to match
map over all the page links on load
(-> webengine .getDocument (.getElementById "content")
(.addEventListener
"click"
(handleEvent [this event]
(javafx.application.Platform/exit)))))
Add it to the stage
Abstract the webview + webengine | (ns ahubu.lib
(:require
[clojure.string :as str])
(:import
MyEventDispatcher
WebUIController
(java.io File)
(java.net HttpURLConnection URL URLConnection URLStreamHandler URLStreamHandlerFactory)
(javafx.application Application Platform)
(javafx.beans.value ChangeListener)
(javafx.concurrent Worker$State)
(javafx.event EventHandler)
(javafx.fxml FXMLLoader)
(javafx.scene Parent Scene)
(javafx.scene.control Label)
(javafx.scene.input Clipboard ClipboardContent KeyEvent)
(javafx.scene.web WebView)
(javafx.stage Stage)
(javax.net.ssl HttpsURLConnection)
(netscape.javascript JSObject)
(sun.net.www.protocol.https Handler)
))
(gen-class
:extends javafx.application.Application
:name com.ahungry.Browser)
(declare delete-current-scene)
(declare bind-keys)
(declare new-scene)
(declare goto-scene)
(declare hide-buffers)
(declare show-buffers)
(declare filter-buffers)
(declare omnibar-load-url)
(declare default-mode)
(declare omnibar-handler)
(declare omnibar-parse-command)
(declare omnibar-handle-command)
(defmacro compile-time-slurp [file]
(slurp file))
(def js-bundle (slurp "js-src/bundle.js"))
(defmacro run-later [& forms]
`(let [
p# (promise)
]
(Platform/runLater
(fn []
(deliver p# (try ~@forms (catch Throwable t# t#)))))
p#))
(def world
(atom
{
:cookies {}
:cross-domain-url ""
:default-url (format "file" (System/getProperty "user.dir"))
:hinting? false
:mode :default
:new-tab? false
:omnibar-open? false
:scene-id 0
:scenes []
:searching? false
:showing-buffers? false
:stage nil
}))
(defn set-mode [mode]
(swap! world conj {:mode mode}))
(defn set-atomic-stage [stage]
(swap! world conj {:stage stage}))
(defn get-atomic-stage [] (:stage @world))
(defn add-scene [scene]
(swap! world conj {:scenes (conj (:scenes @world) scene)}))
(defn get-scene [n]
(-> (:scenes @world) (get n)))
(defn get-scenes [] (:scenes @world))
(defn delete-nth-scene [scenes n]
(into []
(concat (subvec scenes 0 n)
(subvec scenes (+ 1 n) (count scenes)))))
(defn del-scene [n]
(swap! world conj {:scenes (-> (:scenes @world) (delete-nth-scene n))}))
(defn set-scene-id [n] (swap! world conj {:scene-id n}))
(defn get-scene-id [] (:scene-id @world))
(defn set-new-tab [b]
(swap! world conj {:new-tab? b}))
(defn get-new-tab? [] (:new-tab? @world))
(defn set-default-url [s]
(swap! world conj {:default-url s}))
(defn get-default-url [] (:default-url @world))
(defn set-showing-buffers [b]
(swap! world conj {:showing-buffers? b}))
(defn get-showing-buffers? [] (:showing-buffers? @world))
(defn get-omnibar []
(-> (get-scene-id) get-scene (.lookup "#txtURL")))
(defn get-webview []
(-> (get-scene-id) get-scene (.lookup "#webView")))
(defn get-webengine []
(-> (get-webview) .getEngine))
(defn get-buffers []
(-> (get-scene-id) get-scene (.lookup "#buffers")))
(defn get-tip []
(-> (get-scene-id) get-scene (.lookup "#tip")))
(defn set-tip [s]
(let [style (case s
"NORMAL" "-fx-text-fill: #af0; -fx-background-color: #000;"
"OMNI" "-fx-text-fill: #000; -fx-background-color: #36f"
"GO" "-fx-text-fill: #000; -fx-background-color: #f69"
"INSERT" "-fx-text-fill: #000; -fx-background-color: #f36"
"HINTING" "-fx-text-fill: #000; -fx-background-color: #f63"
"SEARCHING" "-fx-text-fill: #000; -fx-background-color: #f33"
"BUFFERS" "-fx-text-fill: #000; -fx-background-color: #63f"
"-fx-text-fill: #000; -fx-background-color: #af0")]
(run-later
(doto (get-tip)
(.setText s)
(.setStyle style)))))
(defn get-omnibar-text []
(-> (get-omnibar) .getText))
(defn set-omnibar-text [s]
(run-later
(doto (get-omnibar)
(.setText s)
(.positionCaret (count s)))))
(defn set-omnibar-text-to-url []
(when (not (:omnibar-open? @world))
(set-omnibar-text
(-> (get-webengine) .getLocation))))
(defn url-ignore-regexes-from-file [file]
(map re-pattern
(map #(format ".*%s.*" %)
(str/split (slurp file) #"\n"))))
(defn url-ignore-regexes []
(url-ignore-regexes-from-file "conf/url-ignore-regexes.txt"))
(defn matching-regexes [url regexes]
(filter #(re-matches % url) regexes))
(defn url-ignorable? [url]
(let [ignorables (matching-regexes url (url-ignore-regexes))]
(if (> (count ignorables) 0)
(do
(println (format "Ignoring URL: %s, hit %d matchers." url (count ignorables)))
true)
false)))
(defn get-base-domain-pattern [s]
(let [[_ fqdn] (re-matches #".*?://(.*?)[/.$]*" s)]
(if fqdn
(let [domain-parts (-> (str/split fqdn #"\.") reverse)
domain (-> (into [] domain-parts) (subvec 0 2))]
(if domain
(re-pattern
(format "^http[s]*://(.*\\.)*%s\\.%s/.*"
(second domain)
(first domain)))
#".*")) #".*")))
(defn block-cross-domain-net?x [url]
(let [domain (get-base-domain-pattern (:cross-domain-url @world))]
(swap! world conj {:cross-domain-url url})
(future (Thread/sleep 5000) (swap! world conj {:cross-domain-url ""}))
(if (not (re-matches (re-pattern domain) url))
(do (println (format "Blocking X-Domain request: %s" url))
(println domain)
true)
false)))
(defn block-cross-domain-net? [_ ] false)
(defn url-or-no [url proto]
(let [url (.toString url)]
(URL.
(if (or (url-ignorable? url) (block-cross-domain-net? url))
(format "%s:65535" proto)
url))))
(defn my-connection-handler [protocol]
(case protocol
"http" (proxy [sun.net.www.protocol.http.Handler] []
(openConnection [& [url proxy :as args]]
(println url)
(proxy-super openConnection (url-or-no url protocol) proxy)))
"https" (proxy [sun.net.www.protocol.https.Handler] []
(openConnection [& [url proxy :as args]]
(println url)
(proxy-super openConnection (url-or-no url protocol) proxy)))
nil
))
(defn barf [file-name data]
(with-open [wr (clojure.java.io/writer file-name)]
(.write wr (pr-str data))))
(defn clean-uri [uri]
(java.net.URI. (.getScheme uri) (.getHost uri) nil nil))
(defn cookie-to-map [cookie]
{:name (.getName cookie)
:value (.getValue cookie)
:domain (.getDomain cookie)
:maxAge (.getMaxAge cookie)
:secure (.getSecure cookie)})
(defn cookiemap-to-cookie [{name :name value :value domain :domain maxAge :maxAge secure :secure}]
(let [cookie (java.net.HttpCookie. name value)]
(doto cookie
(.setVersion 0)
(.setDomain domain)
(.setSecure secure)
(.setMaxAge maxAge))))
(defn add-cookie [store uri cookiemap]
(let [cookie (cookiemap-to-cookie cookiemap)
uri (clean-uri (java.net.URI. uri))]
(-> store (.add uri cookie))))
(defn load-cookies [store]
(when (.exists (clojure.java.io/file "ahubu.cookies"))
(let [cookies (read-string (slurp "ahubu.cookies"))]
(doseq [[uri uri-map] cookies]
(doseq [[name cookie] uri-map]
(add-cookie store uri cookie))))))
(defn push-cookie-to-uri-map [cookie mp]
(let [name (:name cookie)]
(assoc mp name cookie)))
(defn push-cookie-to-cookie-map [cookie uri mp]
(let [old (get mp uri)]
(assoc mp uri (push-cookie-to-uri-map cookie old))))
(defn push-cookie-to-world [uri cookie]
(swap! world
(fn [old]
(assoc old :cookies
(push-cookie-to-cookie-map cookie uri (:cookies old))))))
(defn my-cookie-store []
(let [store (-> (java.net.CookieManager.) .getCookieStore)
my-store
(proxy [java.net.CookieStore Runnable] []
(run []
(println "Save to disk here"))
(add [uri cookie]
(let [clean (clean-uri uri)
u (.toString clean)]
(.add store clean cookie)
(push-cookie-to-world u (cookie-to-map cookie))))
(get [& [uri :as args]]
(let [clean (clean-uri uri)
u (.toString clean)]
(let [result (.get store clean)]
result)))
(getCookies []
(.getCookies store))
(getURIs []
(.getURIs store))
(remove [uri cookie]
(.remove store uri cookie))
(removeAll []
(.removeAll store)))]
(load-cookies my-store)
my-store))
(defn feed-cookies-to-the-manager [manager cookies]
(doseq [[domain domain-map] cookies]
(doseq [[name c] domain-map]
(let [uri (clean-uri (java.net.URI. domain))]
(.put manager uri {"Set-Cookie" [(format "%s=%s" (:name c) (:value c)) ]})))))
(defn quietly-set-cookies []
(def cookie-manager
(doto (java.net.CookieManager.
(my-cookie-store)
java.net.CookiePolicy/ACCEPT_ALL
java.net . / ACCEPT_ORIGINAL_SERVER
)
java.net.CookieHandler/setDefault))
(feed-cookies-to-the-manager cookie-manager (:cookies @world)))
(defn save-cookies []
(barf "ahubu.cookies" (:cookies @world)))
(defn dump-cookies [store]
(doall (map cookie-to-map (.getCookies store))))
(defn quietly-set-stream-factory []
(WebUIController/stfuAndSetURLStreamHandlerFactory))
(defn -start [this stage]
(let [
root (FXMLLoader/load (-> "resources/WebUI.fxml" File. .toURI .toURL))
scene (Scene. root)
exit (reify javafx.event.EventHandler
(handle [this event]
(println "Goodbye")
(save-cookies)
(javafx.application.Platform/exit)
(System/exit 0)
))
]
(bind-keys stage)
(set-atomic-stage stage)
(doto stage
(.setOnCloseRequest exit)
(.setScene scene)
(.setTitle "AHUBU")
(.show))))
(defn execute-script [w-engine s]
(run-later
(let [
result (.executeScript w-engine s)
]
(if (instance? JSObject result)
(str result)
result))))
(defn dojs [s ]
(execute-script (get-webengine) s))
(defn dojsf [file]
(execute-script (get-webengine) (slurp (format "js-src/%s.js" file))))
(defn decrease-font-size []
(dojsf "decrease-font-size"))
(defn increase-font-size []
(dojsf "increase-font-size"))
(defn inject-firebug [w-engine]
(execute-script w-engine (slurp "js-src/inject-firebug.js")))
(defn execute-script-async [w-engine s]
(let [
p (promise)
*out* *out*
]
(Platform/runLater
(fn []
(let [
o (.executeScript w-engine "new Object()")
]
(.setMember o "cb" (fn [s] (deliver p s)))
(.setMember o "println" (fn [s] (println s)))
(.eval o s))))
@p))
(defn repl [webengine]
(let [s (read-line)]
(when (not= "" (.trim s))
(println @(execute-script webengine s))
(recur webengine))))
(defn bind [s obj webengine]
(run-later
(.setMember
(.executeScript webengine "window")
s obj)))
(defn clear-cookies [cookie-manager]
(-> cookie-manager .getCookieStore .removeAll))
(defn async-load [url]
(run-later
(doto (get-webengine)
(.load url))))
(defn async-loadx [url]
(let [
webengine (get-webengine)
p (promise)
f (fn [s]
(binding [*out* *out*] (println s)))
listener (reify ChangeListener
(changed [this observable old-value new-value]
(when (= new-value Worker$State/SUCCEEDED)
( .removeListener observable this )
(println "In the ChangeListener...")
(execute-script webengine js-bundle)
(bind "println" f webengine)
(future
(Thread/sleep 1000)
"))
(deliver p true))))
]
(run-later
(doto webengine
(-> .getLoadWorker .stateProperty (.addListener listener))
(.load url)))
@p))
(defn back [webengine]
(execute-script webengine "window.history.back()"))
(defn prev-scene []
(default-mode)
(let [n (get-scene-id)
id (- n 1)]
(if (< id 0)
(goto-scene (- (count (get-scenes)) 1))
(goto-scene id))))
(defn next-scene []
(default-mode)
(let [n (get-scene-id)
id (+ n 1)]
(if (>= id (count (get-scenes)))
(goto-scene 0)
(goto-scene id))))
(defn omnibar-stop []
(swap! world conj {:omnibar-open? false})
(run-later
(future (Thread/sleep 100) (set-omnibar-text-to-url))
(doto (get-omnibar) (.setDisable true))
(doto (get-webview) (.setDisable false))))
(defn omnibar-start []
(swap! world conj {:omnibar-open? true})
(run-later
(doto (get-omnibar) (.setDisable false) (.requestFocus))
(doto (get-webview) (.setDisable true))))
(defn yank [s]
(let [content (ClipboardContent.)]
(run-later
(set-tip "YANKED!")
(future (Thread/sleep 500) (set-tip "NORMAL"))
(-> content (.putString s))
(-> (Clipboard/getSystemClipboard) (.setContent content)))))
(defn yank-current-url []
(-> (get-webengine) .getLocation yank))
(defn buffers-start []
(set-mode :omnibar)
(set-tip "BUFFERS")
(set-showing-buffers true)
(run-later
(omnibar-start)
(show-buffers)
(set-omnibar-text ":buffers! ")
"Overlay.show()"))
(defn quickmark-url [url]
(default-mode)
(omnibar-load-url url))
(defn get-xdg-config-home []
(or (System/getenv "XDG_CONFIG_HOME")
(System/getProperty "user.home")))
(defn get-rc-file-raw []
(let [defaults (read-string (slurp "conf/default-rc"))
home-rc (format "%s/.ahuburc" (System/getProperty "user.home"))
xdg-rc (format "%s/ahubu/ahuburc" (get-xdg-config-home))]
(conj
defaults
(if (.exists (clojure.java.io/file home-rc))
(read-string (slurp home-rc)))
(if (.exists (clojure.java.io/file xdg-rc))
(read-string (slurp xdg-rc))))))
(defn get-rc-file []
(let [rc (get-rc-file-raw)
quickmarks (:quickmarks rc)
qm-fns (reduce-kv #(assoc %1 %2 (fn [] (quickmark-url %3))) {} quickmarks)
merged-qms (conj (:quickmarks (:keymaps rc)) qm-fns)]
(conj rc
{:keymaps (conj (:keymaps rc)
{:quickmarks merged-qms})})))
(defn go-mode []
(set-mode :go)
(set-tip "GO"))
(defn font-mode []
(set-mode :font)
(set-tip "FONT"))
(defn quickmarks-mode []
(set-mode :quickmarks)
(set-tip "QUICKMARKS"))
(defn quickmarks-new-tab-mode []
(set-new-tab true)
(quickmarks-mode))
(defn default-mode []
(set-mode :default)
(set-tip "NORMAL")
(hide-buffers)
(omnibar-stop)
(swap! world conj {:hinting? false :searching? false})
(dojs "Hinting.off(); Overlay.hide(); Form.disable()"))
(defn insert-mode []
(set-mode :insert)
(set-tip "INSERT")
(dojs "Form.enable()"))
(defn search-mode []
(set-mode :search)
(set-tip "SEARCHING")
(swap! world conj {:searching? true})
(println "Searching")
(set-omnibar-text "/")
(dojs "Search.reset()"))
(defn hinting-mode []
(set-mode :hinting)
(set-tip "HINTING")
(swap! world conj {:hinting? true})
(dojs "Hinting.on(); Overlay.show()"))
(defn inject-firebug []
(dojsf "inject-firebug"))
(defn omnibar-open []
(set-mode :omnibar)
(set-tip "OMNI")
(omnibar-start)
(set-omnibar-text ":open ")
(dojs "Overlay.show()"))
(defn omnibar-open-current []
(omnibar-open)
(set-omnibar-text (format ":open %s" (get-omnibar-text))))
(defn omnibar-open-new-tab []
(set-new-tab true)
(omnibar-open)
(set-omnibar-text ":tabopen "))
(defn go-top []
(default-mode)
(dojs "window.scrollTo(0, 0)"))
(defn key-map-op [key]
(let [mode (:mode @world)
rc (-> (:keymaps (get-rc-file)) (get mode))
op? (get rc key)
key (keyword key)
op (or op? (get rc key))]
op))
(defn process-op [op]
(when op
(if (= java.lang.String (type op))
(execute-script (get-webengine) op)
((eval op)))))
(defn key-map-handler [key]
(let [op (key-map-op key)
op-before (key-map-op :BEFORE)
op-after (key-map-op :AFTER)]
( println ( format " KM OP : % s " op - before ) )
( println ( format " KM OP : % s " op ) )
( println ( format " KM OP : % s " op - after ) )
(when (get-showing-buffers?)
(filter-buffers))
(when (:hinting? @world)
(dojs (format "Hinting.keyHandler('%s')" key))
( println ( format " HINTING : % s " key ) )
)
(when (:searching? @world)
(when (= 1 (count key))
(set-omnibar-text (format "%s%s" (get-omnibar-text) key)))
(dojs (format "Search.incrementalFind('%s')" key))
)
Check for the BEFORE bind ( runs with any other keypress )
(process-op op-before)
(when (process-op op)
(future
(Thread/sleep 100)
(process-op op-after)))
(defn get-readable-key [code text]
(if (>= (count text) (count code))
text code))
(defn bind-keys [what]
(doto what
(->
(.addEventFilter
(. KeyEvent KEY_PRESSED)
EventHandler
(handle [this event]
(let [ecode (-> event .getCode .toString)
etext (-> event .getText .toString)]
(key-map-handler (get-readable-key ecode etext)))
false
))))))
(defn show-alert [s]
(doto (javafx.scene.control.Dialog.)
(-> .getDialogPane (.setContentText s))
(-> .getDialogPane .getButtonTypes (.add (. javafx.scene.control.ButtonType OK)))
(.showAndWait)))
(defn goto-scene [n]
(println "GOING TO SCENE")
(println n)
(run-later
(set-scene-id n)
(doto (get-atomic-stage)
(.setScene (get-scene n))
(.show))))
(defn delete-current-scene []
(let [n (get-scene-id)]
(when (> n 0)
(goto-scene (- n 1))
(run-later
(Thread/sleep 50)
(del-scene n)))))
(defn omnibar-load-url [url]
(run-later
(if (get-new-tab?)
(do
(set-default-url url)
(new-scene)
(set-new-tab false))
(-> (get-webengine) (.load url)))))
(defn get-selected-buffer-text []
(let [bufs (get-buffers)
children (-> bufs .getChildren)
id 0
child (when children (get (vec children) id))]
(if child (.getText child) "")))
(defn switch-to-buffer []
(let [s (get-selected-buffer-text)
maybe-id (last (re-matches #"^([0-9]).*" s))
id (if maybe-id (Integer/parseInt maybe-id) -1)]
(when (>= id 0)
(goto-scene id))
(set-showing-buffers false)
(hide-buffers)))
(defn omnibar-parse-command [cmd]
(re-matches #":(.*?) (.*)" cmd))
(defn omnibar-handle-command [cmd]
(let [[_ cmd arg] (omnibar-parse-command cmd)]
( println ( format " OB Parse Cmd : % s % s % s " _ cmd arg ) )
(case cmd
"open" (omnibar-handler arg)
"tabopen" (omnibar-handler arg)
(omnibar-handler _))))
(defn omnibar-handler [n]
(if (get-showing-buffers?) (switch-to-buffer)
(let [query
(cond
(re-matches #"^:.*" n) (omnibar-handle-command n)
(re-matches #"^file:.*" n) n
(re-matches #"^http[s]*:.*" n) n
(re-matches #".*\..*" n) (format "" n)
:else (format "" n)
)]
(omnibar-load-url query))))
(defn hide-buffers []
(let [bufs (get-buffers)]
(run-later
(-> bufs .getChildren .clear))))
(defn is-matching-buf? [s]
(let [[_ cmd arg] (-> (get-omnibar) .getText omnibar-parse-command)
ob-text (or arg _)
pattern (re-pattern (str/lower-case (str/join "" [".*" ob-text ".*"])))]
(re-matches pattern (str/lower-case s))))
(defn get-buffer-entry-text [scene n]
(let [webview (.lookup scene "#webView")
engine (-> webview .getEngine)
title (-> engine .getTitle)
location (-> engine .getLocation)]
(format "%s :: %s :: %s" n title location)))
(defn filter-buffers []
(future
( Thread / sleep 100 )
(let [bufs (get-buffers)
children (-> bufs .getChildren)]
(doall
(map
(fn [c]
(when (not (is-matching-buf? (.getText c)))
(run-later
(.remove children c)
)))
children)))))
(defn show-buffers []
(let [scenes (get-scenes)]
(run-later
(let [bufs (get-buffers)]
(doto bufs
(-> .getChildren .clear)
(-> .getChildren (.add (Label. "Buffers: "))))))
(doall
(map (fn [i]
(let [scene (get scenes i)]
(println "Make the scene....")
(run-later
(doto (-> (get-scene-id) get-scene (.lookup "#buffers"))
(-> .getChildren (.add (Label. (get-buffer-entry-text scene i))))))))
(range (count scenes))))))
of jumpable links ( instead of relying on JS ) , where it works like the buffer
(defn el-link-fn [els]
(doall
(map (fn [i]
(let [el (-> els (.item i))]
( - > el ( .setTextContent " OH WEL " ) )
(-> el (.addEventListener
"click"
(reify org.w3c.dom.events.EventListener
(handleEvent [this event]
(default-mode)
(println "I clicked a link, good job")
(println (-> el .getTextContent))))
false))
)
)
(range (.getLength els)))))
(defn remove-annoying-div [dom id]
(let [el (-> dom (.getElementById id))]
(when el (.remove el))))
(defn remove-annoying-divs [dom]
(let [ids (str/split (slurp "conf/dom-id-ignores.txt") #"\n")]
(doall
(map #(remove-annoying-div dom %) ids))))
(defn remove-annoying-class [dom class-name]
(let [els (-> dom (.getElementsByClassName class-name))]
(doall
(map
(fn [_]
(let [el (-> els (.item 0))]
(-> el .remove)))
(range (.getLength els))))))
(defn remove-annoying-classes [dom]
(let [ids (str/split (slurp "conf/dom-class-ignores.txt") #"\n")]
(doall
(map #(remove-annoying-class dom %) ids))))
(defn new-scene []
(run-later
(let [
root (FXMLLoader/load (-> "resources/WebUI.fxml" File. .toURI .toURL))
scene (Scene. root)
]
(add-scene scene)
(set-scene-id (- (count (get-scenes)) 1))
(println "Getting new scene, binding keys...")
(let [webview (.lookup scene "#webView")
webengine (.getEngine webview)]
(doto webengine
( reify javafx.event . EventHandler
(.setOnAlert
(reify javafx.event.EventHandler
(handle [this event]
(println (.getData event))
(show-alert (.getData event)))))
(-> .getLoadWorker
.stateProperty
(.addListener
(reify ChangeListener
(changed [this observable old-value new-value]
(when (and (= new-value Worker$State/RUNNING)
(= old-value Worker$State/SCHEDULED))
(execute-script webengine js-bundle))
(when (not (= new-value Worker$State/SUCCEEDED))
(set-omnibar-text
(format "Loading :: %s" (-> webengine .getLocation))))
(when (= new-value Worker$State/SUCCEEDED)
( .removeListener observable this )
(println (-> webengine .getLocation))
(set-omnibar-text-to-url)
(-> webengine .getDocument remove-annoying-divs)
(-> webengine .getDocument remove-annoying-classes)
(-> webengine .getDocument (.getElementsByTagName "a") el-link-fn)
(-> webengine (.setUserAgent "Mozilla/5.0 (Windows NT 6.1) Gecko/20100101 Firefox/61.0"))
( reify org.w3c.dom.events . EventListener
(execute-script webengine js-bundle)
)))))
(.load (get-default-url))
))
(doto (get-atomic-stage)
(.setScene scene)
(.show)))))
( - > ( - > ( get ( ahubu.browser/get-scenes ) 0 ) ( .lookup " # webView " ) ) .getEngine )
|
a088e3a2c2621e9150b85eb1d5a8c37c096811226e1c3fa9ce535f12f4e5d2bf | GregorySchwartz/too-many-cells | Types.hs | TooManyCells . Classify . Types
Collects the types used in classification of cells .
Gregory W. Schwartz
Collects the types used in classification of cells.
-}
{-# LANGUAGE StrictData #-}
module TooManyCells.Classify.Types where
-- Remote
-- Local
-- Basic
newtype SingleMatrixFlag = SingleMatrixFlag
{ unSingleMatrixFlag :: Bool
} deriving (Read,Show)
| null | https://raw.githubusercontent.com/GregorySchwartz/too-many-cells/0de948c6e99a489f1d2e8c412a04e4bcf3b26806/src/TooManyCells/Classify/Types.hs | haskell | # LANGUAGE StrictData #
Remote
Local
Basic | TooManyCells . Classify . Types
Collects the types used in classification of cells .
Gregory W. Schwartz
Collects the types used in classification of cells.
-}
module TooManyCells.Classify.Types where
newtype SingleMatrixFlag = SingleMatrixFlag
{ unSingleMatrixFlag :: Bool
} deriving (Read,Show)
|
9929d7deb1de371d588ba9aa763f28d50190e95f8e7dff6fd0de1405fd800233 | cedlemo/OCaml-GI-ctypes-bindings-generator | Shadow_type.ml | open Ctypes
open Foreign
type t = None | In | Out | Etched_in | Etched_out
let of_value v =
if v = Unsigned.UInt32.of_int 0 then None
else if v = Unsigned.UInt32.of_int 1 then In
else if v = Unsigned.UInt32.of_int 2 then Out
else if v = Unsigned.UInt32.of_int 3 then Etched_in
else if v = Unsigned.UInt32.of_int 4 then Etched_out
else raise (Invalid_argument "Unexpected Shadow_type value")
let to_value = function
| None -> Unsigned.UInt32.of_int 0
| In -> Unsigned.UInt32.of_int 1
| Out -> Unsigned.UInt32.of_int 2
| Etched_in -> Unsigned.UInt32.of_int 3
| Etched_out -> Unsigned.UInt32.of_int 4
let t_view = view ~read:of_value ~write:to_value uint32_t
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Shadow_type.ml | ocaml | open Ctypes
open Foreign
type t = None | In | Out | Etched_in | Etched_out
let of_value v =
if v = Unsigned.UInt32.of_int 0 then None
else if v = Unsigned.UInt32.of_int 1 then In
else if v = Unsigned.UInt32.of_int 2 then Out
else if v = Unsigned.UInt32.of_int 3 then Etched_in
else if v = Unsigned.UInt32.of_int 4 then Etched_out
else raise (Invalid_argument "Unexpected Shadow_type value")
let to_value = function
| None -> Unsigned.UInt32.of_int 0
| In -> Unsigned.UInt32.of_int 1
| Out -> Unsigned.UInt32.of_int 2
| Etched_in -> Unsigned.UInt32.of_int 3
| Etched_out -> Unsigned.UInt32.of_int 4
let t_view = view ~read:of_value ~write:to_value uint32_t
|
|
7a54422ec2ce3b0bbaab935536cf6b8fa48494678972e721e6ed704d64c3c5f3 | eslick/cl-stdutils | matrix.lisp | -*-Mode : LISP ; Package : stdutils ; ; Syntax : Common - lisp -*-
(in-package :stdutils)
(eval-when (:compile-toplevel)
(export '(matrixp num-rows num-cols square-matrix? make-matrix
make-identity-matrix copy-matrix print-matrix
transpose-matrix multiply-matrix add-matrix subtract-matrix
invert-matrix solve-matrix)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Matrix operations
;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(eval-when (:compile-toplevel)
(proclaim '(inline matrixp num-rows num-cols)))
(eval-when (:load-toplevel)
(defun-exported matrixp (matrix)
"Test whether the argument is a matrix"
(and (arrayp matrix)
(= (array-rank matrix) 2)))
(defun-exported num-rows (matrix)
"Return the number of rows of a matrix"
(array-dimension matrix 0))
(defun-exported num-cols (matrix)
"Return the number of rows of a matrix"
(array-dimension matrix 1))
(defun-exported square-matrix? (matrix)
"Is the matrix a square matrix?"
(and (matrixp matrix)
(= (num-rows matrix) (num-cols matrix)))))
(defun-exported make-matrix (rows &optional (cols rows))
"Create a matrix filled with zeros. If only one parameter is
specified the matrix will be square."
(make-array (list rows cols) :initial-element 0))
(defmacro-exported do-matrix-rows ((row matrix) &body body)
`(dotimes (,row (num-rows ,matrix))
,@body))
(defmacro-exported do-matrix-columns ((col matrix) &body body)
`(dotimes (,col (num-cols ,matrix))
,@body))
(defmacro-exported do-matrix ((row col matrix) &body body)
(with-gensyms (mval)
`(let ((,mval ,matrix))
(do-matrix-rows (,row ,mval)
(do-matrix-columns (,col ,mval)
,@body)))))
(defun-exported make-identity-matrix (size)
"Make an identity matrix of the specified size."
(let ((matrix (make-array (list size size) :initial-element 0)))
(dotimes (i size matrix)
(setf (aref matrix i i) 1))))
(defun-exported copy-matrix (matrix)
"Return a copy of the matrix."
(let* ((rows (num-rows matrix))
(cols (num-cols matrix))
(copy (make-array (list rows cols))))
(dotimes (row rows copy)
(dotimes (col cols)
(setf (aref copy row col) (aref matrix row col))))))
(defun-exported clear-matrix (matrix value)
"Set every element of matrix to an initial-value"
(do-matrix (row col matrix)
(setf (aref matrix row col) value)))
(defun-exported print-matrix (matrix &optional (destination t) (control-string "~20S"))
"Print a matrix. The optional control string indicates how each
entry should be printed."
(let ((rows (num-Rows matrix))
(cols (num-Cols matrix)))
(dotimes (row rows)
(format destination "~%")
(dotimes (col cols)
(format destination control-string (aref matrix row col))))
(format destination "~%")))
(defun-exported transpose-matrix (matrix)
"Transpose a matrix"
(let* ((rows (num-rows matrix))
(cols (num-cols matrix))
(transpose (make-matrix cols rows)))
(dotimes (row rows transpose)
(dotimes (col cols)
(setf (aref transpose col row)
(aref matrix row col))))))
(defun-exported multiply-matrix (&rest matrices)
"Multiply matrices"
(labels ((multiply-two (m1 m2)
(let* ((rows1 (num-rows m1))
(cols1 (num-cols m1))
(cols2 (num-cols m2))
(result (make-matrix rows1 cols2)))
(dotimes (row rows1 result)
(dotimes (col cols2)
(dotimes (i cols1)
(setf (aref result row col)
(+ (aref result row col)
(* (aref m1 row i)
(aref m2 i col))))))))))
(when matrices ; Empty arguments check
(reduce #'multiply-two matrices))))
;;(defun-exported matrix-multiply-vector (matrix vector)
;; "Multiply a matrix by a vector"
;; (assert (= (array-dimension matrix 0) (array-dimension vector 0)))
;; (
(defun-exported add-matrix (&rest matrices)
"Add matrices"
(labels ((add-two (m1 m2)
(let* ((rows (num-rows m1))
(cols (num-cols m1))
(result (make-matrix rows cols)))
(dotimes (row rows result)
(dotimes (col cols)
(setf (aref result row col)
(+ (aref m1 row col)
(aref m2 row col))))))))
(when matrices ; Empty arguments check
(reduce #'add-two matrices))))
(defun-exported subtract-matrix (&rest matrices)
"Subtract matrices"
(labels ((subtract-two (m1 m2)
(let* ((rows (num-rows m1))
(cols (num-cols m1))
(result (make-matrix rows cols)))
(dotimes (row rows result)
(dotimes (col cols)
(setf (aref result row col)
(- (aref m1 row col)
(aref m2 row col))))))))
(when matrices ; Empty arguments check
(reduce #'subtract-two matrices))))
(defun-exported invert-matrix (matrix &optional (destructive T))
"Find the inverse of a matrix. By default this operation is
destructive. If you want to preserve the original matrix, call this
function with an argument of NIL to destructive."
(let ((result (if destructive matrix (copy-matrix matrix)))
(size (num-rows matrix))
(temp 0))
(dotimes (i size result)
(setf temp (aref result i i))
(dotimes (j size)
(setf (aref result i j)
(if (= i j)
(/ (aref result i j))
(/ (aref result i j) temp))))
(dotimes (j size)
(unless (= i j)
(setf temp (aref result j i)
(aref result j i) 0)
(dotimes (k size)
(setf (aref result j k)
(- (aref result j k)
(* temp (aref result i k))))))))))
(defun-exported exchange-rows (matrix row-i row-j)
"Exchange row-i and row-j of a matrix"
(let ((cols (num-cols matrix)))
(dotimes (col cols)
(rotatef (aref matrix row-i col) (aref matrix row-j col)))))
(defun-exported eliminate-matrix (matrix rows cols)
"Gaussian elimination with partial pivoting. "
;; Evaluated for side effect. A return value of :singular indicates the
;; matrix is singular (an error).
(let ((max 0))
(loop for i below rows
do (setf max i)
do (loop for j from (1+ i) below rows
do (when (> (abs (aref matrix j i))
(abs (aref matrix max i)))
(setf max j)))
do (when (zerop (aref matrix max i))
(return-from eliminate-matrix :singular)) ; error "Singular matrix"
do (loop for k from i below cols ; Exchange rows
do (rotatef (aref matrix i k) (aref matrix max k)))
do (loop for j from (1+ i) below rows
do (loop for k from (1- cols) downto i
do (setf (aref matrix j k)
(- (aref matrix j k)
(* (aref matrix i k)
(/ (aref matrix j i)
(aref matrix i i)))))
)))
matrix))
(defun-exported substitute-matrix (matrix rows cols)
(let ((temp 0.0)
(x (make-array rows :initial-element 0)))
(loop for j from (1- rows) downto 0
do (setf temp 0.0)
do (loop for k from (1+ j) below rows
do (incf temp (* (aref matrix j k) (aref x k))))
do (setf (aref x j) (/ (- (aref matrix j (1- cols)) temp)
(aref matrix j j))))
x))
(defun-exported solve-matrix (matrix &optional (destructive T) print-soln)
"Solve a matrix using Gaussian elimination
Matrix must be N by N+1
Assume solution is stored as the N+1st column of the matrix"
(let ((rows (num-rows matrix))
(cols (num-cols matrix))
(result (if destructive matrix (copy-matrix matrix))))
(unless (= (1+ rows) cols)
(error "Ill formed matrix")) ; Cryptic error message
(cond ((eq :singular (eliminate-matrix result rows cols)))
(T (let ((soln (substitute-matrix result rows cols)))
(when print-soln
(loop for i below rows
do (format t "~% X~A = ~A" i (aref soln i))))
soln)))))
| null | https://raw.githubusercontent.com/eslick/cl-stdutils/4a4e5a4036b815318282da5dee2a22825369137b/src/matrix.lisp | lisp | Package : stdutils ; ; Syntax : Common - lisp -*-
Matrix operations
Empty arguments check
(defun-exported matrix-multiply-vector (matrix vector)
"Multiply a matrix by a vector"
(assert (= (array-dimension matrix 0) (array-dimension vector 0)))
(
Empty arguments check
Empty arguments check
Evaluated for side effect. A return value of :singular indicates the
matrix is singular (an error).
error "Singular matrix"
Exchange rows
Cryptic error message
|
(in-package :stdutils)
(eval-when (:compile-toplevel)
(export '(matrixp num-rows num-cols square-matrix? make-matrix
make-identity-matrix copy-matrix print-matrix
transpose-matrix multiply-matrix add-matrix subtract-matrix
invert-matrix solve-matrix)))
(eval-when (:compile-toplevel)
(proclaim '(inline matrixp num-rows num-cols)))
(eval-when (:load-toplevel)
(defun-exported matrixp (matrix)
"Test whether the argument is a matrix"
(and (arrayp matrix)
(= (array-rank matrix) 2)))
(defun-exported num-rows (matrix)
"Return the number of rows of a matrix"
(array-dimension matrix 0))
(defun-exported num-cols (matrix)
"Return the number of rows of a matrix"
(array-dimension matrix 1))
(defun-exported square-matrix? (matrix)
"Is the matrix a square matrix?"
(and (matrixp matrix)
(= (num-rows matrix) (num-cols matrix)))))
(defun-exported make-matrix (rows &optional (cols rows))
"Create a matrix filled with zeros. If only one parameter is
specified the matrix will be square."
(make-array (list rows cols) :initial-element 0))
(defmacro-exported do-matrix-rows ((row matrix) &body body)
`(dotimes (,row (num-rows ,matrix))
,@body))
(defmacro-exported do-matrix-columns ((col matrix) &body body)
`(dotimes (,col (num-cols ,matrix))
,@body))
(defmacro-exported do-matrix ((row col matrix) &body body)
(with-gensyms (mval)
`(let ((,mval ,matrix))
(do-matrix-rows (,row ,mval)
(do-matrix-columns (,col ,mval)
,@body)))))
(defun-exported make-identity-matrix (size)
"Make an identity matrix of the specified size."
(let ((matrix (make-array (list size size) :initial-element 0)))
(dotimes (i size matrix)
(setf (aref matrix i i) 1))))
(defun-exported copy-matrix (matrix)
"Return a copy of the matrix."
(let* ((rows (num-rows matrix))
(cols (num-cols matrix))
(copy (make-array (list rows cols))))
(dotimes (row rows copy)
(dotimes (col cols)
(setf (aref copy row col) (aref matrix row col))))))
(defun-exported clear-matrix (matrix value)
"Set every element of matrix to an initial-value"
(do-matrix (row col matrix)
(setf (aref matrix row col) value)))
(defun-exported print-matrix (matrix &optional (destination t) (control-string "~20S"))
"Print a matrix. The optional control string indicates how each
entry should be printed."
(let ((rows (num-Rows matrix))
(cols (num-Cols matrix)))
(dotimes (row rows)
(format destination "~%")
(dotimes (col cols)
(format destination control-string (aref matrix row col))))
(format destination "~%")))
(defun-exported transpose-matrix (matrix)
"Transpose a matrix"
(let* ((rows (num-rows matrix))
(cols (num-cols matrix))
(transpose (make-matrix cols rows)))
(dotimes (row rows transpose)
(dotimes (col cols)
(setf (aref transpose col row)
(aref matrix row col))))))
(defun-exported multiply-matrix (&rest matrices)
"Multiply matrices"
(labels ((multiply-two (m1 m2)
(let* ((rows1 (num-rows m1))
(cols1 (num-cols m1))
(cols2 (num-cols m2))
(result (make-matrix rows1 cols2)))
(dotimes (row rows1 result)
(dotimes (col cols2)
(dotimes (i cols1)
(setf (aref result row col)
(+ (aref result row col)
(* (aref m1 row i)
(aref m2 i col))))))))))
(reduce #'multiply-two matrices))))
(defun-exported add-matrix (&rest matrices)
"Add matrices"
(labels ((add-two (m1 m2)
(let* ((rows (num-rows m1))
(cols (num-cols m1))
(result (make-matrix rows cols)))
(dotimes (row rows result)
(dotimes (col cols)
(setf (aref result row col)
(+ (aref m1 row col)
(aref m2 row col))))))))
(reduce #'add-two matrices))))
(defun-exported subtract-matrix (&rest matrices)
"Subtract matrices"
(labels ((subtract-two (m1 m2)
(let* ((rows (num-rows m1))
(cols (num-cols m1))
(result (make-matrix rows cols)))
(dotimes (row rows result)
(dotimes (col cols)
(setf (aref result row col)
(- (aref m1 row col)
(aref m2 row col))))))))
(reduce #'subtract-two matrices))))
(defun-exported invert-matrix (matrix &optional (destructive T))
"Find the inverse of a matrix. By default this operation is
destructive. If you want to preserve the original matrix, call this
function with an argument of NIL to destructive."
(let ((result (if destructive matrix (copy-matrix matrix)))
(size (num-rows matrix))
(temp 0))
(dotimes (i size result)
(setf temp (aref result i i))
(dotimes (j size)
(setf (aref result i j)
(if (= i j)
(/ (aref result i j))
(/ (aref result i j) temp))))
(dotimes (j size)
(unless (= i j)
(setf temp (aref result j i)
(aref result j i) 0)
(dotimes (k size)
(setf (aref result j k)
(- (aref result j k)
(* temp (aref result i k))))))))))
(defun-exported exchange-rows (matrix row-i row-j)
"Exchange row-i and row-j of a matrix"
(let ((cols (num-cols matrix)))
(dotimes (col cols)
(rotatef (aref matrix row-i col) (aref matrix row-j col)))))
(defun-exported eliminate-matrix (matrix rows cols)
"Gaussian elimination with partial pivoting. "
(let ((max 0))
(loop for i below rows
do (setf max i)
do (loop for j from (1+ i) below rows
do (when (> (abs (aref matrix j i))
(abs (aref matrix max i)))
(setf max j)))
do (when (zerop (aref matrix max i))
do (rotatef (aref matrix i k) (aref matrix max k)))
do (loop for j from (1+ i) below rows
do (loop for k from (1- cols) downto i
do (setf (aref matrix j k)
(- (aref matrix j k)
(* (aref matrix i k)
(/ (aref matrix j i)
(aref matrix i i)))))
)))
matrix))
(defun-exported substitute-matrix (matrix rows cols)
(let ((temp 0.0)
(x (make-array rows :initial-element 0)))
(loop for j from (1- rows) downto 0
do (setf temp 0.0)
do (loop for k from (1+ j) below rows
do (incf temp (* (aref matrix j k) (aref x k))))
do (setf (aref x j) (/ (- (aref matrix j (1- cols)) temp)
(aref matrix j j))))
x))
(defun-exported solve-matrix (matrix &optional (destructive T) print-soln)
"Solve a matrix using Gaussian elimination
Matrix must be N by N+1
Assume solution is stored as the N+1st column of the matrix"
(let ((rows (num-rows matrix))
(cols (num-cols matrix))
(result (if destructive matrix (copy-matrix matrix))))
(unless (= (1+ rows) cols)
(cond ((eq :singular (eliminate-matrix result rows cols)))
(T (let ((soln (substitute-matrix result rows cols)))
(when print-soln
(loop for i below rows
do (format t "~% X~A = ~A" i (aref soln i))))
soln)))))
|
fe8d76e084d827608be3cae00510185093257b54b5b558c6112148e74e8f8f46 | tomjridge/tjr_simple_earley | earley.ml | (** An experiment to see whether the imperative code (represented using
a monad) is easier to read; probably it is. *)
open Prelude
(* profiling; debugging --------------------------------------------- *)
let dest_Some = function Some x -> x | _ -> (failwith "dest_Some")
let now () = Core.Time_stamp_counter.(
now () |> to_int63 |> Core.Int63.to_int |> dest_Some)
let Tjr_profile.{mark;get_marks} = Tjr_profile.mk_profiler ~now
(* main ------------------------------------------------------------- *)
let run_earley ~monad_ops ~item_ops ~state_ops ~at_ops =
let { bind; return } = monad_ops in
let ( >>= ) = bind in
let { sym_case; _NT; dot_nt; dot_i; dot_k; dot_bs; cut; elements } =
item_ops
in
let { todo_gt_k_find; update_bitms_lt_k; empty_bitms_at_k;
empty_ixk_done; empty_ktjs } = state_ops
in
let { get_bitms_at_k; get_bitms_lt_k; add_bitm_at_k; pop_todo;
add_todos_at_k; add_todos_gt_k; add_ixk_done;
mem_ixk_done; find_ktjs; add_ktjs; with_state } = at_ops
in
let is_finished nitm = nitm|>dot_bs = [] in
let module Let_syntax = struct
let bind a ~f = a >>= f
end
in
fun ~new_items ~input ~parse_tm ~input_length ->
begin
Explanation of step_at_k code which follows :
The basic Earley step is :
X - > i as k',S bs k ' S k
----------------------------
X - > i as S k bs
In the code , there are labels of the form ( * : am :
Explanation of step_at_k code which follows:
The basic Earley step is:
X -> i as k',S bs k' S k
----------------------------
X -> i as S k bs
In the code, there are labels of the form (*:am:*). The following
discussion is indexed by these labels
- af:
- the item nitm is complete, ie of the form Y -> k',as,k,[]
- aj,al: has (k',Y,k) been encountered before? if so, do nothing
- am: if not encountered before, k' Y k is cut with blocked X ->
... and new todo items are added
- ax:
- item is not complete ie of form _ -> i,as,k,S bs
- ax/ce:
- S is nonterm Y
- add bitm to blocked items at (k,Y)
- check if we have seen (k,Y) before (bitms_empty)
- co: if we have, check if k Y k; cut bitm with k Y k if so
- cw: if we haven't, generate new items from (k,Y)
- ax/ec:
- S is terminal tm
- attempt to retrieve (k,tm,j) set from ktjs
- ek: if we haven't already met (k,tm) then parse (k,tm), update
ktjs and pass on js
- otherwise, just reuse js from previously
- el: given the set of js (which are all >= k)
- partition into >k, and =k
- for j > k, cut bitm with j, and add to todos
- note that if this is the first time we meet (k,tm), then there
are no other items blocked on (k,tm); if this is not the first
time, then we have already processed items blocked on (k,tm); in
either case, we do not need to do anything more with items
blocked on (k,tm); in fact, we don't even need to record such
items
- em: if k is in js (ie tm matched the empty string) cut bitm with k
*)
let (^) = List.map in
let step_at_k k nitm =
mark __LINE__;
let get_bitms (i,x) =
if i=k then get_bitms_at_k x else
get_bitms_lt_k (i,x)
in
match is_finished nitm with
| true -> ( (*:af:*)
mark __LINE__;
let (k',_Y) = (nitm|>dot_i,nitm|>dot_nt) in
let%bind already_done = mem_ixk_done (k',_Y) in (*:aj:*)
mark __LINE__;
match already_done with
| true -> mark __LINE__; return () (*:al:*)
| false -> ( (*:am:*)
mark __LINE__;
add_ixk_done (k',_Y) >>= fun _ ->
mark __LINE__;
get_bitms (k',_Y) >>= fun bitms ->
mark __LINE__;
add_todos_at_k ((fun bitm -> cut bitm k) ^ bitms) >>= fun _ ->
mark __LINE__; return ()))
| false -> ( (*:ax:*)
mark __LINE__;
let bitm = nitm in
let _S = List.hd (bitm|>dot_bs) in
_S |> sym_case
~nt:(fun _Y -> (*:ce:*)
mark __LINE__;
get_bitms_at_k _Y >>= fun bitms ->
mark __LINE__;
let bitms_empty = bitms=[] in
add_bitm_at_k bitm _Y >>= fun _ ->
mark __LINE__;
match bitms_empty with
| false -> ( (*:co:*)
mark __LINE__;
mem_ixk_done (k,_Y) >>= function
| true ->
add_todos_at_k [cut bitm k] >>= fun _ ->
mark __LINE__;
return ()
| false -> return ())
| true -> ( (*:cw:*)
mark __LINE__;
let itms = new_items ~nt:_Y ~input ~k in
add_todos_at_k itms >>= fun _ ->
mark __LINE__;
return ()
))
: ec :
mark __LINE__;
find_ktjs tm >>= fun ktjs ->
(match ktjs with
| None -> (
(* we need to process kT *) (*:ek:*)
let js = parse_tm ~tm ~input ~k ~input_length in
add_ktjs tm js >>= fun _ ->
return js)
| Some js -> return js) >>= fun js ->
(* there may be a k in js, in which case we have a
new todo at the current stage *)
let (xs,js) = List.partition (fun j -> j=k) js in (*:el:*)
add_todos_gt_k ((fun j -> cut bitm j) ^ js) >>= fun _ ->
match xs with (*:em:*)
| [] -> return ()
| _ -> add_todos_at_k [cut bitm k]))
in
FIXME monad syntax may make this easier to read
let rec loop_at_k k =
(* print_endline "loop_at_k"; *)
pop_todo () >>= function
| None -> return ()
| Some itm -> step_at_k k itm >>= fun _ -> loop_at_k k
in
let rec loop k =
(* Printf.printf "loop %d\n" k; *)
match k >= input_length with
correct ? FIXME do n't we have to go one further ?
| true -> return ()
| false ->
(* process items *)
loop_at_k k >>= fun _ ->
let k' = k+1 in
(*
todo and todo_done are updated with todo_gt_k[k'];
bitms_lt_k is updated: bitms_lt_k[k]=bitms_at_k
bitms_at_k is reset;
ixk_done and ktjs are reset *)
with_state (fun s ->
let todo' = todo_gt_k_find k' s.todo_gt_k in
let todo = elements todo' in
Printf.printf " elements : % d " ( todo ) ;
{ todo;
todo_done=todo';
todo_gt_k=s.todo_gt_k;
bitms_lt_k=(update_bitms_lt_k k s.bitms_at_k s.bitms_lt_k);
bitms_at_k=empty_bitms_at_k;
ixk_done=empty_ixk_done;
ktjs=empty_ktjs;
}) >>= fun _ ->
loop k'
in
loop 0
end (* run_earley *)
| null | https://raw.githubusercontent.com/tomjridge/tjr_simple_earley/ca558e0e7f4ddba4cd6573bf180710cd02f25ba4/_work_in_progress/experimental_v2/abstract/earley.ml | ocaml | * An experiment to see whether the imperative code (represented using
a monad) is easier to read; probably it is.
profiling; debugging ---------------------------------------------
main -------------------------------------------------------------
:am:
:af:
:aj:
:al:
:am:
:ax:
:ce:
:co:
:cw:
we need to process kT
:ek:
there may be a k in js, in which case we have a
new todo at the current stage
:el:
:em:
print_endline "loop_at_k";
Printf.printf "loop %d\n" k;
process items
todo and todo_done are updated with todo_gt_k[k'];
bitms_lt_k is updated: bitms_lt_k[k]=bitms_at_k
bitms_at_k is reset;
ixk_done and ktjs are reset
run_earley |
open Prelude
let dest_Some = function Some x -> x | _ -> (failwith "dest_Some")
let now () = Core.Time_stamp_counter.(
now () |> to_int63 |> Core.Int63.to_int |> dest_Some)
let Tjr_profile.{mark;get_marks} = Tjr_profile.mk_profiler ~now
let run_earley ~monad_ops ~item_ops ~state_ops ~at_ops =
let { bind; return } = monad_ops in
let ( >>= ) = bind in
let { sym_case; _NT; dot_nt; dot_i; dot_k; dot_bs; cut; elements } =
item_ops
in
let { todo_gt_k_find; update_bitms_lt_k; empty_bitms_at_k;
empty_ixk_done; empty_ktjs } = state_ops
in
let { get_bitms_at_k; get_bitms_lt_k; add_bitm_at_k; pop_todo;
add_todos_at_k; add_todos_gt_k; add_ixk_done;
mem_ixk_done; find_ktjs; add_ktjs; with_state } = at_ops
in
let is_finished nitm = nitm|>dot_bs = [] in
let module Let_syntax = struct
let bind a ~f = a >>= f
end
in
fun ~new_items ~input ~parse_tm ~input_length ->
begin
Explanation of step_at_k code which follows :
The basic Earley step is :
X - > i as k',S bs k ' S k
----------------------------
X - > i as S k bs
In the code , there are labels of the form ( * : am :
Explanation of step_at_k code which follows:
The basic Earley step is:
X -> i as k',S bs k' S k
----------------------------
X -> i as S k bs
discussion is indexed by these labels
- af:
- the item nitm is complete, ie of the form Y -> k',as,k,[]
- aj,al: has (k',Y,k) been encountered before? if so, do nothing
- am: if not encountered before, k' Y k is cut with blocked X ->
... and new todo items are added
- ax:
- item is not complete ie of form _ -> i,as,k,S bs
- ax/ce:
- S is nonterm Y
- add bitm to blocked items at (k,Y)
- check if we have seen (k,Y) before (bitms_empty)
- co: if we have, check if k Y k; cut bitm with k Y k if so
- cw: if we haven't, generate new items from (k,Y)
- ax/ec:
- S is terminal tm
- attempt to retrieve (k,tm,j) set from ktjs
- ek: if we haven't already met (k,tm) then parse (k,tm), update
ktjs and pass on js
- otherwise, just reuse js from previously
- el: given the set of js (which are all >= k)
- partition into >k, and =k
- for j > k, cut bitm with j, and add to todos
- note that if this is the first time we meet (k,tm), then there
are no other items blocked on (k,tm); if this is not the first
time, then we have already processed items blocked on (k,tm); in
either case, we do not need to do anything more with items
blocked on (k,tm); in fact, we don't even need to record such
items
- em: if k is in js (ie tm matched the empty string) cut bitm with k
*)
let (^) = List.map in
let step_at_k k nitm =
mark __LINE__;
let get_bitms (i,x) =
if i=k then get_bitms_at_k x else
get_bitms_lt_k (i,x)
in
match is_finished nitm with
mark __LINE__;
let (k',_Y) = (nitm|>dot_i,nitm|>dot_nt) in
mark __LINE__;
match already_done with
mark __LINE__;
add_ixk_done (k',_Y) >>= fun _ ->
mark __LINE__;
get_bitms (k',_Y) >>= fun bitms ->
mark __LINE__;
add_todos_at_k ((fun bitm -> cut bitm k) ^ bitms) >>= fun _ ->
mark __LINE__; return ()))
mark __LINE__;
let bitm = nitm in
let _S = List.hd (bitm|>dot_bs) in
_S |> sym_case
mark __LINE__;
get_bitms_at_k _Y >>= fun bitms ->
mark __LINE__;
let bitms_empty = bitms=[] in
add_bitm_at_k bitm _Y >>= fun _ ->
mark __LINE__;
match bitms_empty with
mark __LINE__;
mem_ixk_done (k,_Y) >>= function
| true ->
add_todos_at_k [cut bitm k] >>= fun _ ->
mark __LINE__;
return ()
| false -> return ())
mark __LINE__;
let itms = new_items ~nt:_Y ~input ~k in
add_todos_at_k itms >>= fun _ ->
mark __LINE__;
return ()
))
: ec :
mark __LINE__;
find_ktjs tm >>= fun ktjs ->
(match ktjs with
| None -> (
let js = parse_tm ~tm ~input ~k ~input_length in
add_ktjs tm js >>= fun _ ->
return js)
| Some js -> return js) >>= fun js ->
add_todos_gt_k ((fun j -> cut bitm j) ^ js) >>= fun _ ->
| [] -> return ()
| _ -> add_todos_at_k [cut bitm k]))
in
FIXME monad syntax may make this easier to read
let rec loop_at_k k =
pop_todo () >>= function
| None -> return ()
| Some itm -> step_at_k k itm >>= fun _ -> loop_at_k k
in
let rec loop k =
match k >= input_length with
correct ? FIXME do n't we have to go one further ?
| true -> return ()
| false ->
loop_at_k k >>= fun _ ->
let k' = k+1 in
with_state (fun s ->
let todo' = todo_gt_k_find k' s.todo_gt_k in
let todo = elements todo' in
Printf.printf " elements : % d " ( todo ) ;
{ todo;
todo_done=todo';
todo_gt_k=s.todo_gt_k;
bitms_lt_k=(update_bitms_lt_k k s.bitms_at_k s.bitms_lt_k);
bitms_at_k=empty_bitms_at_k;
ixk_done=empty_ixk_done;
ktjs=empty_ktjs;
}) >>= fun _ ->
loop k'
in
loop 0
|
15d9e2a560a4ea2b22ab27725681163057209aee7d75ba9c48498c587188f27f | souenzzo/souenzzo.github.io | wtf.clj | (ns br.com.souenzzo.wtf
(:require [clojure.java.shell :as sh]
[clojure.java.io :as io])
(:import (java.io File ByteArrayOutputStream)
(org.graalvm.polyglot Context Source Value)
(org.graalvm.polyglot.io ByteSequence)))
(set! *warn-on-reflection* true)
(defn wat->wasm
[s]
(let [stdin (File/createTempFile "wat" "in")
_ (spit stdin (str s))
stdout (File/createTempFile "wat" "out")
{:keys [out err exit]} (sh/sh "wat2wasm" (.getAbsolutePath stdin)
"-o" (.getAbsolutePath stdout))
_ (println out)
_ (println err)
baos (ByteArrayOutputStream.)]
(io/copy stdout baos)
(.delete stdin)
(.delete stdout)
(.toByteArray baos)))
(defn ^Value wtf!
[s]
(println s)
(let [ctx (.build (Context/newBuilder (into-array ["wasm"])))
src (.build (Source/newBuilder
"wasm"
(ByteSequence/create (wat->wasm s))
"hello.wat"))]
(.eval ctx src)
(.getMember (.getBindings ctx "wasm") "main")))
(defn func
[& ops]
(let [f (.getMember (wtf! `(~'module
(~'func (~'export "inline")
~@ops)))
"inline")]
(fn [& vs]
(.execute f (into-array Object vs)))))
| null | https://raw.githubusercontent.com/souenzzo/souenzzo.github.io/30a811c4e5633ad07bba1d58d19eb091dac222e9/wtf/src/br/com/souenzzo/wtf.clj | clojure | (ns br.com.souenzzo.wtf
(:require [clojure.java.shell :as sh]
[clojure.java.io :as io])
(:import (java.io File ByteArrayOutputStream)
(org.graalvm.polyglot Context Source Value)
(org.graalvm.polyglot.io ByteSequence)))
(set! *warn-on-reflection* true)
(defn wat->wasm
[s]
(let [stdin (File/createTempFile "wat" "in")
_ (spit stdin (str s))
stdout (File/createTempFile "wat" "out")
{:keys [out err exit]} (sh/sh "wat2wasm" (.getAbsolutePath stdin)
"-o" (.getAbsolutePath stdout))
_ (println out)
_ (println err)
baos (ByteArrayOutputStream.)]
(io/copy stdout baos)
(.delete stdin)
(.delete stdout)
(.toByteArray baos)))
(defn ^Value wtf!
[s]
(println s)
(let [ctx (.build (Context/newBuilder (into-array ["wasm"])))
src (.build (Source/newBuilder
"wasm"
(ByteSequence/create (wat->wasm s))
"hello.wat"))]
(.eval ctx src)
(.getMember (.getBindings ctx "wasm") "main")))
(defn func
[& ops]
(let [f (.getMember (wtf! `(~'module
(~'func (~'export "inline")
~@ops)))
"inline")]
(fn [& vs]
(.execute f (into-array Object vs)))))
|
|
98e055f69853978bd47a1baf8e5cc8c23116030eb15a2445fd902120dbd0a62d | alexandergunnarson/quantum | reify.cljc | (ns quantum.core.macros.reify
(:refer-clojure :exclude [contains?])
(:require
[quantum.core.error :as err
:refer [>ex-info]]
[quantum.core.fn :as fn
:refer [fn-> fn->> <-]]
[quantum.core.log :as log]
[quantum.core.logic :as logic
:refer [whenc fn-and]]
[quantum.core.macros.transform :as trans]
[quantum.core.macros.type-hint :as th]
[quantum.untyped.core.collections :as ucoll
:refer [contains? update-first]]
[quantum.untyped.core.data
:refer [kw-map]]))
(defn gen-reify-def
[{:keys [ns- sym ns-qualified-interface-name reify-body]}]
(let [reified-sym (-> sym name
(str "-reified")
symbol)
reified-sym-qualified
(-> (symbol (name (ns-name ns-)) (name reified-sym))
(th/with-type-hint ns-qualified-interface-name))
reify-body-relevant
(->> reify-body (filter (fn-and (fn-> meta :default not)
(fn-> meta :nil? not))))
reify-def
(list 'def reified-sym reify-body-relevant)]
(kw-map reified-sym
reified-sym-qualified
reify-def)))
(defn gen-reify-body-unverified
[{:keys [ns-qualified-interface-name
genned-method-name
gen-interface-code-body-expanded]}]
(apply list 'reify ns-qualified-interface-name
(->> gen-interface-code-body-expanded
(map (fn [arity]
(let [[hints body] arity
return-type-hinted-method
(th/with-type-hint genned-method-name (last hints))
arglist-n (->> body first (into ['this]))
body-f (-> body rest (trans/hint-body-with-arglist (first body) :clj))
updated-body (->> body-f (cons arglist-n))]
(with-meta
(cons return-type-hinted-method updated-body)
(meta arity)))))))) ; to pass on :default and :nil?
(defn verify-reify-body [reify-body sym]
To handle ClassFormatError " Duplicate method name&signature "
duplicate-methods
(->> reify-body rest rest
(map (fn-> rest
(update-first
(fn->> rest
(mapv (fn-> th/type-hint (whenc nil? trans/default-hint)))))))
(ucoll/frequencies-by first)
(group-by val)
(<- (dissoc 1)))
_ (when (contains? duplicate-methods)
(log/pr :always "Duplicate methods for" sym ":")
(log/ppr-hints :always duplicate-methods)
(throw (>ex-info "Duplicate methods")))]
reify-body))
(defn gen-reify-body
[{:as args
:keys [sym
ns-qualified-interface-name
genned-method-name
gen-interface-code-body-expanded]}]
{:post [(log/ppr-hints :macro-expand "REIFY BODY" %)]}
(-> (gen-reify-body-unverified args)
(verify-reify-body sym)))
| null | https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/src/quantum/core/macros/reify.cljc | clojure | to pass on :default and :nil? | (ns quantum.core.macros.reify
(:refer-clojure :exclude [contains?])
(:require
[quantum.core.error :as err
:refer [>ex-info]]
[quantum.core.fn :as fn
:refer [fn-> fn->> <-]]
[quantum.core.log :as log]
[quantum.core.logic :as logic
:refer [whenc fn-and]]
[quantum.core.macros.transform :as trans]
[quantum.core.macros.type-hint :as th]
[quantum.untyped.core.collections :as ucoll
:refer [contains? update-first]]
[quantum.untyped.core.data
:refer [kw-map]]))
(defn gen-reify-def
[{:keys [ns- sym ns-qualified-interface-name reify-body]}]
(let [reified-sym (-> sym name
(str "-reified")
symbol)
reified-sym-qualified
(-> (symbol (name (ns-name ns-)) (name reified-sym))
(th/with-type-hint ns-qualified-interface-name))
reify-body-relevant
(->> reify-body (filter (fn-and (fn-> meta :default not)
(fn-> meta :nil? not))))
reify-def
(list 'def reified-sym reify-body-relevant)]
(kw-map reified-sym
reified-sym-qualified
reify-def)))
(defn gen-reify-body-unverified
[{:keys [ns-qualified-interface-name
genned-method-name
gen-interface-code-body-expanded]}]
(apply list 'reify ns-qualified-interface-name
(->> gen-interface-code-body-expanded
(map (fn [arity]
(let [[hints body] arity
return-type-hinted-method
(th/with-type-hint genned-method-name (last hints))
arglist-n (->> body first (into ['this]))
body-f (-> body rest (trans/hint-body-with-arglist (first body) :clj))
updated-body (->> body-f (cons arglist-n))]
(with-meta
(cons return-type-hinted-method updated-body)
(defn verify-reify-body [reify-body sym]
To handle ClassFormatError " Duplicate method name&signature "
duplicate-methods
(->> reify-body rest rest
(map (fn-> rest
(update-first
(fn->> rest
(mapv (fn-> th/type-hint (whenc nil? trans/default-hint)))))))
(ucoll/frequencies-by first)
(group-by val)
(<- (dissoc 1)))
_ (when (contains? duplicate-methods)
(log/pr :always "Duplicate methods for" sym ":")
(log/ppr-hints :always duplicate-methods)
(throw (>ex-info "Duplicate methods")))]
reify-body))
(defn gen-reify-body
[{:as args
:keys [sym
ns-qualified-interface-name
genned-method-name
gen-interface-code-body-expanded]}]
{:post [(log/ppr-hints :macro-expand "REIFY BODY" %)]}
(-> (gen-reify-body-unverified args)
(verify-reify-body sym)))
|
38bb8538c319a540e06d98fb6713d20f83ee16d51f0bca16846552f61e9f1ba3 | abtv/tech-radar | message_view.cljs | (ns tech-radar.ui.message-view
(:require [om.next :as om :refer-macros [defui]]
[sablono.core :refer-macros [html]]))
(defui MessageView
Object
(render [this]
(let [{:keys [text]} (om/props this)]
(html
[:div {}
[:p {:class "lead"} text]]))))
(def message-view (om/factory MessageView))
| null | https://raw.githubusercontent.com/abtv/tech-radar/167c1c66ff2cf7140fe1de247d67a7134b0b1748/src/cljs/tech-radar/ui/message_view.cljs | clojure | (ns tech-radar.ui.message-view
(:require [om.next :as om :refer-macros [defui]]
[sablono.core :refer-macros [html]]))
(defui MessageView
Object
(render [this]
(let [{:keys [text]} (om/props this)]
(html
[:div {}
[:p {:class "lead"} text]]))))
(def message-view (om/factory MessageView))
|
|
56075699f7ac23c42a420418809a5008ffda2a8db0bb5b1596178bf222505120 | Eonblast/Scalaxis | hfs_beh.erl | 2010 - 2011 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
@doc HashFunctionSet Behaviour
%% @end
%% @version $Id$
-module(hfs_beh).
-export([behaviour_info/1]).
-spec behaviour_info(atom()) -> [{atom(), arity()}] | undefined.
behaviour_info(callbacks) ->
[
{new, 1},
{new, 2},
{apply_val, 2},
{hfs_size, 1}
];
behaviour_info(_Other) ->
undefined.
| null | https://raw.githubusercontent.com/Eonblast/Scalaxis/10287d11428e627dca8c41c818745763b9f7e8d4/src/rrepair/hfs_beh.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@end
@version $Id$ | 2010 - 2011 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
@doc HashFunctionSet Behaviour
-module(hfs_beh).
-export([behaviour_info/1]).
-spec behaviour_info(atom()) -> [{atom(), arity()}] | undefined.
behaviour_info(callbacks) ->
[
{new, 1},
{new, 2},
{apply_val, 2},
{hfs_size, 1}
];
behaviour_info(_Other) ->
undefined.
|
824ef6c6c091dea725440d89540dd8132dafb24156007dfc45e740754a5d154b | NetComposer/nksip | timer_test_ua2.erl | %% -------------------------------------------------------------------
%%
%% timer_test: Timer (RFC4028) Tests
%%
Copyright ( c ) 2013 . All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(timer_test_ua2).
-include_lib("nkserver/include/nkserver_module.hrl").
-export([sip_invite/2, sip_ack/2, sip_update/2, sip_bye/2, sip_dialog_update/3]).
sip_invite(Req, _Call) ->
tests_util:save_ref(Req),
{ok, Body} = nksip_request:body(Req),
Body1 = nksip_sdp:increment(Body),
{reply, {answer, Body1}}.
sip_ack(Req, _Call) ->
tests_util:send_ref(ack, Req),
ok.
sip_update(_Req, _Call) ->
{reply, ok}.
sip_bye(Req, _Call) ->
tests_util:send_ref(bye, Req),
{reply, ok}.
sip_dialog_update(Status, Dialog, _Call) ->
tests_util:dialog_update(Status, Dialog),
ok.
| null | https://raw.githubusercontent.com/NetComposer/nksip/7fbcc66806635dc8ecc5d11c30322e4d1df36f0a/test/callbacks/timer_test_ua2.erl | erlang | -------------------------------------------------------------------
timer_test: Timer (RFC4028) Tests
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
------------------------------------------------------------------- | Copyright ( c ) 2013 . All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(timer_test_ua2).
-include_lib("nkserver/include/nkserver_module.hrl").
-export([sip_invite/2, sip_ack/2, sip_update/2, sip_bye/2, sip_dialog_update/3]).
sip_invite(Req, _Call) ->
tests_util:save_ref(Req),
{ok, Body} = nksip_request:body(Req),
Body1 = nksip_sdp:increment(Body),
{reply, {answer, Body1}}.
sip_ack(Req, _Call) ->
tests_util:send_ref(ack, Req),
ok.
sip_update(_Req, _Call) ->
{reply, ok}.
sip_bye(Req, _Call) ->
tests_util:send_ref(bye, Req),
{reply, ok}.
sip_dialog_update(Status, Dialog, _Call) ->
tests_util:dialog_update(Status, Dialog),
ok.
|
7a4efaf69ae731c806af0c9906c2c83c794b6f451339259f4c8a961f403ce559 | Octachron/codept | stdlib_414.ml | let modules= let open Module in let open Sig in
Dict.of_list [("Arg",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Arg";namespace=["Stdlib"]}};path={name="Arg";namespace=["Stdlib"]}}; signature=empty}));
("Array",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Array";namespace=["Stdlib"]}};path={name="Array";namespace=["Stdlib"]}}; signature=of_list
[("Floatarray",Sig ({origin=Submodule; signature=empty}))]}));
("ArrayLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="ArrayLabels";namespace=["Stdlib"]}};path={name="ArrayLabels";namespace=["Stdlib"]}}; signature=of_list
[("Floatarray",Sig ({origin=Submodule; signature=empty}))]}));
("Atomic",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Atomic";namespace=["Stdlib"]}};path={name="Atomic";namespace=["Stdlib"]}}; signature=empty}));
("Bigarray",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Bigarray";namespace=["Stdlib"]}};path={name="Bigarray";namespace=["Stdlib"]}}; signature=of_list
[("Array0",Sig ({origin=Submodule; signature=empty}));
("Array1",Sig ({origin=Submodule; signature=empty}));
("Array2",Sig ({origin=Submodule; signature=empty}));
("Array3",Sig ({origin=Submodule; signature=empty}));
("Genarray",Sig ({origin=Submodule; signature=empty}))]}));
("Bool",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Bool";namespace=["Stdlib"]}};path={name="Bool";namespace=["Stdlib"]}}; signature=empty}));
("Buffer",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Buffer";namespace=["Stdlib"]}};path={name="Buffer";namespace=["Stdlib"]}}; signature=empty}));
("Bytes",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Bytes";namespace=["Stdlib"]}};path={name="Bytes";namespace=["Stdlib"]}}; signature=empty}));
("BytesLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="BytesLabels";namespace=["Stdlib"]}};path={name="BytesLabels";namespace=["Stdlib"]}}; signature=empty}));
("Callback",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Callback";namespace=["Stdlib"]}};path={name="Callback";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalAtomic",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalAtomic";namespace=["Stdlib"]}};path={name="CamlinternalAtomic";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalFormat",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalFormat";namespace=["Stdlib"]}};path={name="CamlinternalFormat";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalFormatBasics",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalFormatBasics";namespace=["Stdlib"]}};path={name="CamlinternalFormatBasics";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalLazy",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalLazy";namespace=["Stdlib"]}};path={name="CamlinternalLazy";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalMod",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalMod";namespace=["Stdlib"]}};path={name="CamlinternalMod";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalOO",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalOO";namespace=["Stdlib"]}};path={name="CamlinternalOO";namespace=["Stdlib"]}}; signature=empty}));
("Char",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Char";namespace=["Stdlib"]}};path={name="Char";namespace=["Stdlib"]}}; signature=empty}));
("Complex",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Complex";namespace=["Stdlib"]}};path={name="Complex";namespace=["Stdlib"]}}; signature=empty}));
("Digest",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Digest";namespace=["Stdlib"]}};path={name="Digest";namespace=["Stdlib"]}}; signature=empty}));
("Either",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Either";namespace=["Stdlib"]}};path={name="Either";namespace=["Stdlib"]}}; signature=empty}));
("Ephemeron",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Ephemeron";namespace=["Stdlib"]}};path={name="Ephemeron";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("GenHashTable",Sig ({origin=Submodule; signature=of_list
[("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))]}));
("K1",Sig ({origin=Submodule; signature=of_list
[("Bucket",Sig ({origin=Submodule; signature=empty}));
("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})));
("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))]}));
("K2",Sig ({origin=Submodule; signature=of_list
[("Bucket",Sig ({origin=Submodule; signature=empty}));
("Make",Fun (Some {name=Some "H1";signature=Sig (
{origin=Submodule; signature=empty})},Fun (Some {name=Some "H2";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty}))));
("MakeSeeded",Fun (Some {name=Some "H1";signature=Sig (
{origin=Submodule; signature=empty})},Fun (Some {name=Some "H2";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty}))))]}));
("Kn",Sig ({origin=Submodule; signature=of_list
[("Bucket",Sig ({origin=Submodule; signature=empty}));
("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})));
("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))]}))])
(of_list_type [("S",Sig ({origin=Submodule; signature=empty}));
("SeededS",Sig ({origin=Submodule; signature=empty}))])
)}));
("Filename",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Filename";namespace=["Stdlib"]}};path={name="Filename";namespace=["Stdlib"]}}; signature=empty}));
("Float",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Float";namespace=["Stdlib"]}};path={name="Float";namespace=["Stdlib"]}}; signature=of_list
[("Array",Sig ({origin=Submodule; signature=empty}));
("ArrayLabels",Sig ({origin=Submodule; signature=empty}))]}));
("Format",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Format";namespace=["Stdlib"]}};path={name="Format";namespace=["Stdlib"]}}; signature=empty}));
("Fun",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Fun";namespace=["Stdlib"]}};path={name="Fun";namespace=["Stdlib"]}}; signature=empty}));
("Gc",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Gc";namespace=["Stdlib"]}};path={name="Gc";namespace=["Stdlib"]}}; signature=of_list
[("Memprof",Sig ({origin=Submodule; signature=empty}))]}));
("Genlex",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Genlex";namespace=["Stdlib"]}};path={name="Genlex";namespace=["Stdlib"]}}; signature=empty}));
("Hashtbl",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Hashtbl";namespace=["Stdlib"]}};path={name="Hashtbl";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})));
("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("HashedType",Sig ({origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}));
("SeededHashedType",Sig ({origin=Submodule; signature=empty}));
("SeededS",Sig ({origin=Submodule; signature=empty}))])
)}));
("In_channel",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="In_channel";namespace=["Stdlib"]}};path={name="In_channel";namespace=["Stdlib"]}}; signature=empty}));
("Int",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Int";namespace=["Stdlib"]}};path={name="Int";namespace=["Stdlib"]}}; signature=empty}));
("Int32",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Int32";namespace=["Stdlib"]}};path={name="Int32";namespace=["Stdlib"]}}; signature=empty}));
("Int64",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Int64";namespace=["Stdlib"]}};path={name="Int64";namespace=["Stdlib"]}}; signature=empty}));
("Lazy",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Lazy";namespace=["Stdlib"]}};path={name="Lazy";namespace=["Stdlib"]}}; signature=empty}));
("Lexing",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Lexing";namespace=["Stdlib"]}};path={name="Lexing";namespace=["Stdlib"]}}; signature=empty}));
("List",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="List";namespace=["Stdlib"]}};path={name="List";namespace=["Stdlib"]}}; signature=empty}));
("ListLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="ListLabels";namespace=["Stdlib"]}};path={name="ListLabels";namespace=["Stdlib"]}}; signature=empty}));
("Map",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Map";namespace=["Stdlib"]}};path={name="Map";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Ord";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("OrderedType",Sig ({origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}))])
)}));
("Marshal",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Marshal";namespace=["Stdlib"]}};path={name="Marshal";namespace=["Stdlib"]}}; signature=empty}));
("MoreLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="MoreLabels";namespace=["Stdlib"]}};path={name="MoreLabels";namespace=["Stdlib"]}}; signature=of_list
[("Hashtbl",Sig ({origin=Submodule; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})));
("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("HashedType",Sig (
{origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}));
("SeededHashedType",Sig (
{origin=Submodule; signature=empty}));
("SeededS",Sig ({origin=Submodule; signature=empty}))])
)}));
("Map",Sig ({origin=Submodule; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Ord";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("OrderedType",Sig (
{origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}))])
)}));
("Set",Sig ({origin=Submodule; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Ord";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("OrderedType",Sig (
{origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}))])
)}))]}));
("Nativeint",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Nativeint";namespace=["Stdlib"]}};path={name="Nativeint";namespace=["Stdlib"]}}; signature=empty}));
("Obj",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Obj";namespace=["Stdlib"]}};path={name="Obj";namespace=["Stdlib"]}}; signature=of_list
[("Closure",Sig ({origin=Submodule; signature=empty}));
("Ephemeron",Sig ({origin=Submodule; signature=empty}));
("Extension_constructor",Sig ({origin=Submodule; signature=empty}))]}));
("Oo",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Oo";namespace=["Stdlib"]}};path={name="Oo";namespace=["Stdlib"]}}; signature=empty}));
("Option",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Option";namespace=["Stdlib"]}};path={name="Option";namespace=["Stdlib"]}}; signature=empty}));
("Out_channel",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Out_channel";namespace=["Stdlib"]}};path={name="Out_channel";namespace=["Stdlib"]}}; signature=empty}));
("Parsing",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Parsing";namespace=["Stdlib"]}};path={name="Parsing";namespace=["Stdlib"]}}; signature=empty}));
("Printexc",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Printexc";namespace=["Stdlib"]}};path={name="Printexc";namespace=["Stdlib"]}}; signature=of_list
[("Slot",Sig ({origin=Submodule; signature=empty}))]}));
("Printf",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Printf";namespace=["Stdlib"]}};path={name="Printf";namespace=["Stdlib"]}}; signature=empty}));
("Queue",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Queue";namespace=["Stdlib"]}};path={name="Queue";namespace=["Stdlib"]}}; signature=empty}));
("Random",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Random";namespace=["Stdlib"]}};path={name="Random";namespace=["Stdlib"]}}; signature=of_list
[("State",Sig ({origin=Submodule; signature=empty}))]}));
("Result",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Result";namespace=["Stdlib"]}};path={name="Result";namespace=["Stdlib"]}}; signature=empty}));
("Scanf",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Scanf";namespace=["Stdlib"]}};path={name="Scanf";namespace=["Stdlib"]}}; signature=of_list
[("Scanning",Sig ({origin=Submodule; signature=empty}))]}));
("Seq",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Seq";namespace=["Stdlib"]}};path={name="Seq";namespace=["Stdlib"]}}; signature=empty}));
("Set",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Set";namespace=["Stdlib"]}};path={name="Set";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Ord";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("OrderedType",Sig ({origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}))])
)}));
("Stack",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Stack";namespace=["Stdlib"]}};path={name="Stack";namespace=["Stdlib"]}}; signature=empty}));
("StdLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="StdLabels";namespace=["Stdlib"]}};path={name="StdLabels";namespace=["Stdlib"]}}; signature=of_list
[("Array",Alias {path=Namespaced.make "ArrayLabels";phantom=None});
("Bytes",Alias {path=Namespaced.make "BytesLabels";phantom=None});
("List",Alias {path=Namespaced.make "ListLabels";phantom=None});
("String",Alias {path=Namespaced.make "StringLabels";phantom=None})]}));
("Stream",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Stream";namespace=["Stdlib"]}};path={name="Stream";namespace=["Stdlib"]}}; signature=empty}));
("String",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="String";namespace=["Stdlib"]}};path={name="String";namespace=["Stdlib"]}}; signature=empty}));
("StringLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="StringLabels";namespace=["Stdlib"]}};path={name="StringLabels";namespace=["Stdlib"]}}; signature=empty}));
("Sys",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Sys";namespace=["Stdlib"]}};path={name="Sys";namespace=["Stdlib"]}}; signature=of_list
[("Immediate64",Sig ({origin=Submodule; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Immediate";signature=Sig (
{origin=Submodule; signature=empty})},Fun (Some {name=Some "Non_immediate";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty}))))])
(of_list_type [("Immediate",Sig (
{origin=Submodule; signature=empty}));
("Non_immediate",Sig (
{origin=Submodule; signature=empty}))])
)}))]}));
("Uchar",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Uchar";namespace=["Stdlib"]}};path={name="Uchar";namespace=["Stdlib"]}}; signature=empty}));
("Unit",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Unit";namespace=["Stdlib"]}};path={name="Unit";namespace=["Stdlib"]}}; signature=empty}));
("Weak",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Weak";namespace=["Stdlib"]}};path={name="Weak";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("S",Sig ({origin=Submodule; signature=empty}))])
)}))]
| null | https://raw.githubusercontent.com/Octachron/codept/5346aee9337f4bef6a2e5bb7db91625217ca499e/tests/bundle_refs/stdlib_414.ml | ocaml | let modules= let open Module in let open Sig in
Dict.of_list [("Arg",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Arg";namespace=["Stdlib"]}};path={name="Arg";namespace=["Stdlib"]}}; signature=empty}));
("Array",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Array";namespace=["Stdlib"]}};path={name="Array";namespace=["Stdlib"]}}; signature=of_list
[("Floatarray",Sig ({origin=Submodule; signature=empty}))]}));
("ArrayLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="ArrayLabels";namespace=["Stdlib"]}};path={name="ArrayLabels";namespace=["Stdlib"]}}; signature=of_list
[("Floatarray",Sig ({origin=Submodule; signature=empty}))]}));
("Atomic",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Atomic";namespace=["Stdlib"]}};path={name="Atomic";namespace=["Stdlib"]}}; signature=empty}));
("Bigarray",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Bigarray";namespace=["Stdlib"]}};path={name="Bigarray";namespace=["Stdlib"]}}; signature=of_list
[("Array0",Sig ({origin=Submodule; signature=empty}));
("Array1",Sig ({origin=Submodule; signature=empty}));
("Array2",Sig ({origin=Submodule; signature=empty}));
("Array3",Sig ({origin=Submodule; signature=empty}));
("Genarray",Sig ({origin=Submodule; signature=empty}))]}));
("Bool",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Bool";namespace=["Stdlib"]}};path={name="Bool";namespace=["Stdlib"]}}; signature=empty}));
("Buffer",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Buffer";namespace=["Stdlib"]}};path={name="Buffer";namespace=["Stdlib"]}}; signature=empty}));
("Bytes",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Bytes";namespace=["Stdlib"]}};path={name="Bytes";namespace=["Stdlib"]}}; signature=empty}));
("BytesLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="BytesLabels";namespace=["Stdlib"]}};path={name="BytesLabels";namespace=["Stdlib"]}}; signature=empty}));
("Callback",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Callback";namespace=["Stdlib"]}};path={name="Callback";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalAtomic",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalAtomic";namespace=["Stdlib"]}};path={name="CamlinternalAtomic";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalFormat",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalFormat";namespace=["Stdlib"]}};path={name="CamlinternalFormat";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalFormatBasics",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalFormatBasics";namespace=["Stdlib"]}};path={name="CamlinternalFormatBasics";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalLazy",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalLazy";namespace=["Stdlib"]}};path={name="CamlinternalLazy";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalMod",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalMod";namespace=["Stdlib"]}};path={name="CamlinternalMod";namespace=["Stdlib"]}}; signature=empty}));
("CamlinternalOO",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="CamlinternalOO";namespace=["Stdlib"]}};path={name="CamlinternalOO";namespace=["Stdlib"]}}; signature=empty}));
("Char",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Char";namespace=["Stdlib"]}};path={name="Char";namespace=["Stdlib"]}}; signature=empty}));
("Complex",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Complex";namespace=["Stdlib"]}};path={name="Complex";namespace=["Stdlib"]}}; signature=empty}));
("Digest",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Digest";namespace=["Stdlib"]}};path={name="Digest";namespace=["Stdlib"]}}; signature=empty}));
("Either",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Either";namespace=["Stdlib"]}};path={name="Either";namespace=["Stdlib"]}}; signature=empty}));
("Ephemeron",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Ephemeron";namespace=["Stdlib"]}};path={name="Ephemeron";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("GenHashTable",Sig ({origin=Submodule; signature=of_list
[("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))]}));
("K1",Sig ({origin=Submodule; signature=of_list
[("Bucket",Sig ({origin=Submodule; signature=empty}));
("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})));
("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))]}));
("K2",Sig ({origin=Submodule; signature=of_list
[("Bucket",Sig ({origin=Submodule; signature=empty}));
("Make",Fun (Some {name=Some "H1";signature=Sig (
{origin=Submodule; signature=empty})},Fun (Some {name=Some "H2";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty}))));
("MakeSeeded",Fun (Some {name=Some "H1";signature=Sig (
{origin=Submodule; signature=empty})},Fun (Some {name=Some "H2";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty}))))]}));
("Kn",Sig ({origin=Submodule; signature=of_list
[("Bucket",Sig ({origin=Submodule; signature=empty}));
("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})));
("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))]}))])
(of_list_type [("S",Sig ({origin=Submodule; signature=empty}));
("SeededS",Sig ({origin=Submodule; signature=empty}))])
)}));
("Filename",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Filename";namespace=["Stdlib"]}};path={name="Filename";namespace=["Stdlib"]}}; signature=empty}));
("Float",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Float";namespace=["Stdlib"]}};path={name="Float";namespace=["Stdlib"]}}; signature=of_list
[("Array",Sig ({origin=Submodule; signature=empty}));
("ArrayLabels",Sig ({origin=Submodule; signature=empty}))]}));
("Format",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Format";namespace=["Stdlib"]}};path={name="Format";namespace=["Stdlib"]}}; signature=empty}));
("Fun",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Fun";namespace=["Stdlib"]}};path={name="Fun";namespace=["Stdlib"]}}; signature=empty}));
("Gc",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Gc";namespace=["Stdlib"]}};path={name="Gc";namespace=["Stdlib"]}}; signature=of_list
[("Memprof",Sig ({origin=Submodule; signature=empty}))]}));
("Genlex",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Genlex";namespace=["Stdlib"]}};path={name="Genlex";namespace=["Stdlib"]}}; signature=empty}));
("Hashtbl",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Hashtbl";namespace=["Stdlib"]}};path={name="Hashtbl";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})));
("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("HashedType",Sig ({origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}));
("SeededHashedType",Sig ({origin=Submodule; signature=empty}));
("SeededS",Sig ({origin=Submodule; signature=empty}))])
)}));
("In_channel",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="In_channel";namespace=["Stdlib"]}};path={name="In_channel";namespace=["Stdlib"]}}; signature=empty}));
("Int",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Int";namespace=["Stdlib"]}};path={name="Int";namespace=["Stdlib"]}}; signature=empty}));
("Int32",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Int32";namespace=["Stdlib"]}};path={name="Int32";namespace=["Stdlib"]}}; signature=empty}));
("Int64",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Int64";namespace=["Stdlib"]}};path={name="Int64";namespace=["Stdlib"]}}; signature=empty}));
("Lazy",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Lazy";namespace=["Stdlib"]}};path={name="Lazy";namespace=["Stdlib"]}}; signature=empty}));
("Lexing",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Lexing";namespace=["Stdlib"]}};path={name="Lexing";namespace=["Stdlib"]}}; signature=empty}));
("List",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="List";namespace=["Stdlib"]}};path={name="List";namespace=["Stdlib"]}}; signature=empty}));
("ListLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="ListLabels";namespace=["Stdlib"]}};path={name="ListLabels";namespace=["Stdlib"]}}; signature=empty}));
("Map",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Map";namespace=["Stdlib"]}};path={name="Map";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Ord";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("OrderedType",Sig ({origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}))])
)}));
("Marshal",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Marshal";namespace=["Stdlib"]}};path={name="Marshal";namespace=["Stdlib"]}}; signature=empty}));
("MoreLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="MoreLabels";namespace=["Stdlib"]}};path={name="MoreLabels";namespace=["Stdlib"]}}; signature=of_list
[("Hashtbl",Sig ({origin=Submodule; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})));
("MakeSeeded",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("HashedType",Sig (
{origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}));
("SeededHashedType",Sig (
{origin=Submodule; signature=empty}));
("SeededS",Sig ({origin=Submodule; signature=empty}))])
)}));
("Map",Sig ({origin=Submodule; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Ord";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("OrderedType",Sig (
{origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}))])
)}));
("Set",Sig ({origin=Submodule; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Ord";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("OrderedType",Sig (
{origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}))])
)}))]}));
("Nativeint",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Nativeint";namespace=["Stdlib"]}};path={name="Nativeint";namespace=["Stdlib"]}}; signature=empty}));
("Obj",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Obj";namespace=["Stdlib"]}};path={name="Obj";namespace=["Stdlib"]}}; signature=of_list
[("Closure",Sig ({origin=Submodule; signature=empty}));
("Ephemeron",Sig ({origin=Submodule; signature=empty}));
("Extension_constructor",Sig ({origin=Submodule; signature=empty}))]}));
("Oo",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Oo";namespace=["Stdlib"]}};path={name="Oo";namespace=["Stdlib"]}}; signature=empty}));
("Option",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Option";namespace=["Stdlib"]}};path={name="Option";namespace=["Stdlib"]}}; signature=empty}));
("Out_channel",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Out_channel";namespace=["Stdlib"]}};path={name="Out_channel";namespace=["Stdlib"]}}; signature=empty}));
("Parsing",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Parsing";namespace=["Stdlib"]}};path={name="Parsing";namespace=["Stdlib"]}}; signature=empty}));
("Printexc",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Printexc";namespace=["Stdlib"]}};path={name="Printexc";namespace=["Stdlib"]}}; signature=of_list
[("Slot",Sig ({origin=Submodule; signature=empty}))]}));
("Printf",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Printf";namespace=["Stdlib"]}};path={name="Printf";namespace=["Stdlib"]}}; signature=empty}));
("Queue",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Queue";namespace=["Stdlib"]}};path={name="Queue";namespace=["Stdlib"]}}; signature=empty}));
("Random",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Random";namespace=["Stdlib"]}};path={name="Random";namespace=["Stdlib"]}}; signature=of_list
[("State",Sig ({origin=Submodule; signature=empty}))]}));
("Result",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Result";namespace=["Stdlib"]}};path={name="Result";namespace=["Stdlib"]}}; signature=empty}));
("Scanf",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Scanf";namespace=["Stdlib"]}};path={name="Scanf";namespace=["Stdlib"]}}; signature=of_list
[("Scanning",Sig ({origin=Submodule; signature=empty}))]}));
("Seq",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Seq";namespace=["Stdlib"]}};path={name="Seq";namespace=["Stdlib"]}}; signature=empty}));
("Set",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Set";namespace=["Stdlib"]}};path={name="Set";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Ord";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("OrderedType",Sig ({origin=Submodule; signature=empty}));
("S",Sig ({origin=Submodule; signature=empty}))])
)}));
("Stack",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Stack";namespace=["Stdlib"]}};path={name="Stack";namespace=["Stdlib"]}}; signature=empty}));
("StdLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="StdLabels";namespace=["Stdlib"]}};path={name="StdLabels";namespace=["Stdlib"]}}; signature=of_list
[("Array",Alias {path=Namespaced.make "ArrayLabels";phantom=None});
("Bytes",Alias {path=Namespaced.make "BytesLabels";phantom=None});
("List",Alias {path=Namespaced.make "ListLabels";phantom=None});
("String",Alias {path=Namespaced.make "StringLabels";phantom=None})]}));
("Stream",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Stream";namespace=["Stdlib"]}};path={name="Stream";namespace=["Stdlib"]}}; signature=empty}));
("String",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="String";namespace=["Stdlib"]}};path={name="String";namespace=["Stdlib"]}}; signature=empty}));
("StringLabels",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="StringLabels";namespace=["Stdlib"]}};path={name="StringLabels";namespace=["Stdlib"]}}; signature=empty}));
("Sys",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Sys";namespace=["Stdlib"]}};path={name="Sys";namespace=["Stdlib"]}}; signature=of_list
[("Immediate64",Sig ({origin=Submodule; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "Immediate";signature=Sig (
{origin=Submodule; signature=empty})},Fun (Some {name=Some "Non_immediate";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty}))))])
(of_list_type [("Immediate",Sig (
{origin=Submodule; signature=empty}));
("Non_immediate",Sig (
{origin=Submodule; signature=empty}))])
)}))]}));
("Uchar",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Uchar";namespace=["Stdlib"]}};path={name="Uchar";namespace=["Stdlib"]}}; signature=empty}));
("Unit",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Unit";namespace=["Stdlib"]}};path={name="Unit";namespace=["Stdlib"]}}; signature=empty}));
("Weak",Sig ({origin=Unit {source={source=Special "stdlib"; file={name="Weak";namespace=["Stdlib"]}};path={name="Weak";namespace=["Stdlib"]}}; signature=
(merge
(of_list [("Make",Fun (Some {name=Some "H";signature=Sig (
{origin=Submodule; signature=empty})},Sig (
{origin=Submodule; signature=empty})))])
(of_list_type [("S",Sig ({origin=Submodule; signature=empty}))])
)}))]
|
|
88a78b6721cb90962b6e4d91057431b9f5d9c46f0f725f51132c80985439ae3c | ejgallego/coq-serapi | ser_pp.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(************************************************************************)
(* Coq serialization API/Plugin *)
Copyright 2016 - 2017 MINES ParisTech
(************************************************************************)
(* Status: Very Experimental *)
(************************************************************************)
open Sexplib.Std
type pp_tag =
[%import: Pp.pp_tag]
[@@deriving sexp, yojson]
type block_type =
[%import: Pp.block_type]
[@@deriving sexp, yojson]
module P = struct
type _t =
| Pp_empty
| Pp_string of string
| Pp_glue of _t list
| Pp_box of block_type * _t
| Pp_tag of pp_tag * _t
(* Are those redundant? *)
| Pp_print_break of int * int
| Pp_force_newline
| Pp_comment of string list
[@@deriving sexp, yojson]
open Pp
let rec from_t (d : t) : _t = match repr d with
| Ppcmd_empty -> Pp_empty
| Ppcmd_string s -> Pp_string s
| Ppcmd_glue l -> Pp_glue (List.map from_t l)
| Ppcmd_box (bt,d) -> Pp_box(bt, from_t d)
| Ppcmd_tag (t,d) -> Pp_tag(t, from_t d)
| Ppcmd_print_break (n,m) -> Pp_print_break(n,m)
| Ppcmd_force_newline -> Pp_force_newline
| Ppcmd_comment s -> Pp_comment s
let rec to_t (d : _t) : t = unrepr (match d with
| Pp_empty -> Ppcmd_empty
| Pp_string s -> Ppcmd_string s
| Pp_glue l -> Ppcmd_glue (List.map to_t l)
| Pp_box (bt,d) -> Ppcmd_box(bt, to_t d)
| Pp_tag (t,d) -> Ppcmd_tag(t, to_t d)
| Pp_print_break (n,m) -> Ppcmd_print_break(n,m)
| Pp_force_newline -> Ppcmd_force_newline
| Pp_comment s -> Ppcmd_comment s)
end
type t = Pp.t
let t_of_sexp s = P.(to_t (_t_of_sexp s))
let sexp_of_t d = P.(sexp_of__t (from_t d))
let of_yojson json = Ppx_deriving_yojson_runtime.(P.(_t_of_yojson json >|= to_t))
let to_yojson level = P.(_t_to_yojson (from_t level))
type doc_view =
[%import: Pp.doc_view]
[@@deriving sexp, yojson]
| null | https://raw.githubusercontent.com/ejgallego/coq-serapi/61d2a5c092c1918312b8a92f43a374639d1786f9/serlib/ser_pp.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
**********************************************************************
Coq serialization API/Plugin
**********************************************************************
Status: Very Experimental
**********************************************************************
Are those redundant? | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Copyright 2016 - 2017 MINES ParisTech
open Sexplib.Std
type pp_tag =
[%import: Pp.pp_tag]
[@@deriving sexp, yojson]
type block_type =
[%import: Pp.block_type]
[@@deriving sexp, yojson]
module P = struct
type _t =
| Pp_empty
| Pp_string of string
| Pp_glue of _t list
| Pp_box of block_type * _t
| Pp_tag of pp_tag * _t
| Pp_print_break of int * int
| Pp_force_newline
| Pp_comment of string list
[@@deriving sexp, yojson]
open Pp
let rec from_t (d : t) : _t = match repr d with
| Ppcmd_empty -> Pp_empty
| Ppcmd_string s -> Pp_string s
| Ppcmd_glue l -> Pp_glue (List.map from_t l)
| Ppcmd_box (bt,d) -> Pp_box(bt, from_t d)
| Ppcmd_tag (t,d) -> Pp_tag(t, from_t d)
| Ppcmd_print_break (n,m) -> Pp_print_break(n,m)
| Ppcmd_force_newline -> Pp_force_newline
| Ppcmd_comment s -> Pp_comment s
let rec to_t (d : _t) : t = unrepr (match d with
| Pp_empty -> Ppcmd_empty
| Pp_string s -> Ppcmd_string s
| Pp_glue l -> Ppcmd_glue (List.map to_t l)
| Pp_box (bt,d) -> Ppcmd_box(bt, to_t d)
| Pp_tag (t,d) -> Ppcmd_tag(t, to_t d)
| Pp_print_break (n,m) -> Ppcmd_print_break(n,m)
| Pp_force_newline -> Ppcmd_force_newline
| Pp_comment s -> Ppcmd_comment s)
end
type t = Pp.t
let t_of_sexp s = P.(to_t (_t_of_sexp s))
let sexp_of_t d = P.(sexp_of__t (from_t d))
let of_yojson json = Ppx_deriving_yojson_runtime.(P.(_t_of_yojson json >|= to_t))
let to_yojson level = P.(_t_to_yojson (from_t level))
type doc_view =
[%import: Pp.doc_view]
[@@deriving sexp, yojson]
|
10a5b2fb6e9e635096375da5d9acb3d3a1700e7c0f95add27957c15f078ff063 | facebook/duckling | Tests.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Ordinal.HE.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.HE.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "HE Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/tests/Duckling/Ordinal/HE/Tests.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. | Copyright ( c ) 2016 - present , Facebook , Inc.
module Duckling.Ordinal.HE.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.HE.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "HE Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
|
fc3cd2359296711afd6c5e31e10b7e197096988f56c9613b3126ef31ac96aee5 | esl/erlang-web | tarball.erl | #!/usr/bin/env escript
-include_lib("kernel/include/file.hrl").
main([]) ->
Name = string:strip(io:get_line("Name of the tarball: "), both, 10),
Url = string:strip(io:get_line("SVN address: "), both, 10),
main([Name, Url]);
main([Name, URL]) ->
Path = export_svn(URL, Name),
generate_pdf(Name, Path),
Tarball = create_tarball(Name, Path),
calculate_checksum(Tarball),
clean_up(Path).
export_svn(URL, Name) ->
Path = filename:join(["/tmp",
pid_to_list(self()),
Name]),
file:make_dir(filename:join("/tmp",
pid_to_list(self()))),
os:cmd(io_lib:format("svn export ~p ~p", [URL, Path])),
message("SVN version exported."),
Path.
generate_pdf(Name, Path) ->
DocPath = filename:join(Path, "doc"),
PDFName = filename:join(DocPath, Name ++ ".pdf"),
os:cmd(io_lib:format("make -C ~p", [DocPath])),
file:rename(filename:join(DocPath, "skeleton.pdf"),
PDFName),
message("PDF generated"),
clean_tex(DocPath, PDFName).
clean_tex(Path, PDF) ->
{ok, Dir} = file:list_dir(Path),
lists:foreach(fun(F) ->
delete_file(filename:join(Path, F), PDF)
end, Dir),
message("PDF cleaning up completed").
delete_file(PDF, PDF) ->
ok;
delete_file(File, PDF) ->
{ok, FileInfo} = file:read_file_info(File),
case FileInfo#file_info.type of
directory ->
{ok, Dir} = file:list_dir(File),
lists:foreach(fun(F) ->
delete_file(filename:join(File, F), PDF)
end, Dir),
file:del_dir(File);
_ ->
file:delete(File)
end.
create_tarball(Name, Path) ->
Result = Name ++ ".tar.gz",
os:cmd(io_lib:format("tar -czf ~p ~p", [Result, Path])),
message("Creating tar.gz completed"),
Result.
calculate_checksum(Tarball) ->
Md5 = hd(string:tokens(os:cmd("md5sum " ++ Tarball), " ")),
Sha1 = hd(string:tokens(os:cmd("sha1sum " ++ Tarball), " ")),
io:format("MD5: ~s~nSHA1: ~s~n", [Md5, Sha1]).
clean_up(Path) ->
{ok, Dir} = file:list_dir(Path),
lists:foreach(fun(F) ->
delete_file(filename:join(Path, F), "not used")
end, Dir),
message("Cleaning up completed").
message(Msg) ->
io:format("~s~n", [Msg]).
| null | https://raw.githubusercontent.com/esl/erlang-web/2e5c2c9725465fc5b522250c305a9d553b3b8243/bin/tarball.erl | erlang | #!/usr/bin/env escript
-include_lib("kernel/include/file.hrl").
main([]) ->
Name = string:strip(io:get_line("Name of the tarball: "), both, 10),
Url = string:strip(io:get_line("SVN address: "), both, 10),
main([Name, Url]);
main([Name, URL]) ->
Path = export_svn(URL, Name),
generate_pdf(Name, Path),
Tarball = create_tarball(Name, Path),
calculate_checksum(Tarball),
clean_up(Path).
export_svn(URL, Name) ->
Path = filename:join(["/tmp",
pid_to_list(self()),
Name]),
file:make_dir(filename:join("/tmp",
pid_to_list(self()))),
os:cmd(io_lib:format("svn export ~p ~p", [URL, Path])),
message("SVN version exported."),
Path.
generate_pdf(Name, Path) ->
DocPath = filename:join(Path, "doc"),
PDFName = filename:join(DocPath, Name ++ ".pdf"),
os:cmd(io_lib:format("make -C ~p", [DocPath])),
file:rename(filename:join(DocPath, "skeleton.pdf"),
PDFName),
message("PDF generated"),
clean_tex(DocPath, PDFName).
clean_tex(Path, PDF) ->
{ok, Dir} = file:list_dir(Path),
lists:foreach(fun(F) ->
delete_file(filename:join(Path, F), PDF)
end, Dir),
message("PDF cleaning up completed").
delete_file(PDF, PDF) ->
ok;
delete_file(File, PDF) ->
{ok, FileInfo} = file:read_file_info(File),
case FileInfo#file_info.type of
directory ->
{ok, Dir} = file:list_dir(File),
lists:foreach(fun(F) ->
delete_file(filename:join(File, F), PDF)
end, Dir),
file:del_dir(File);
_ ->
file:delete(File)
end.
create_tarball(Name, Path) ->
Result = Name ++ ".tar.gz",
os:cmd(io_lib:format("tar -czf ~p ~p", [Result, Path])),
message("Creating tar.gz completed"),
Result.
calculate_checksum(Tarball) ->
Md5 = hd(string:tokens(os:cmd("md5sum " ++ Tarball), " ")),
Sha1 = hd(string:tokens(os:cmd("sha1sum " ++ Tarball), " ")),
io:format("MD5: ~s~nSHA1: ~s~n", [Md5, Sha1]).
clean_up(Path) ->
{ok, Dir} = file:list_dir(Path),
lists:foreach(fun(F) ->
delete_file(filename:join(Path, F), "not used")
end, Dir),
message("Cleaning up completed").
message(Msg) ->
io:format("~s~n", [Msg]).
|
|
ebc73f48c33985a4309baac2d3d63dac055be18daa3db3c36095f775e75e9867 | omnyway-labs/re-crud | http_client.cljs | (ns re-crud.http-client
(:require [re-frame.core :refer [dispatch]]
[ajax.core :refer [POST GET] :as ajax]
[cljs.reader :as reader]
[clojure.string :as s]))
(defn log [& args]
(.log js/console :info args))
(defn make-url [service-url url request-params]
(->> (reduce (fn [u [k v]] (s/replace u (str "{"(name k)"}") v))
url
request-params)
(str service-url)))
(defn parse-json-string [string]
(js->clj (.parse js/JSON string)))
(defn parse-response [response]
(clojure.walk/keywordize-keys response))
(defn response-handler [log-id request-body response operation-id on-success]
(let [parsed-response (parse-response response)]
(when on-success
(dispatch (conj on-success parsed-response)))))
(def actions
{:get ajax/GET
:post ajax/POST
:put ajax/PUT
:patch ajax/PATCH
:delete ajax/DELETE})
(defn make-request [operation-id method url request-body & {:keys [on-success service-name on-failure]}]
(let [log-id (random-uuid)
action (get actions method)]
(action url
{:params request-body
:headers {"x-re-crud-service" service-name}
:format :json
:handler #(response-handler log-id request-body % operation-id on-success)
:error-handler (fn [{:keys [status response]}]
(when (some? on-failure)
(dispatch [on-failure status response]))
(dispatch [:crud-http-fail operation-id status response]))})))
| null | https://raw.githubusercontent.com/omnyway-labs/re-crud/2fe9cbcf0a19d8c09a86d9025577e6271e9dd5a3/src/cljs/re_crud/http_client.cljs | clojure | (ns re-crud.http-client
(:require [re-frame.core :refer [dispatch]]
[ajax.core :refer [POST GET] :as ajax]
[cljs.reader :as reader]
[clojure.string :as s]))
(defn log [& args]
(.log js/console :info args))
(defn make-url [service-url url request-params]
(->> (reduce (fn [u [k v]] (s/replace u (str "{"(name k)"}") v))
url
request-params)
(str service-url)))
(defn parse-json-string [string]
(js->clj (.parse js/JSON string)))
(defn parse-response [response]
(clojure.walk/keywordize-keys response))
(defn response-handler [log-id request-body response operation-id on-success]
(let [parsed-response (parse-response response)]
(when on-success
(dispatch (conj on-success parsed-response)))))
(def actions
{:get ajax/GET
:post ajax/POST
:put ajax/PUT
:patch ajax/PATCH
:delete ajax/DELETE})
(defn make-request [operation-id method url request-body & {:keys [on-success service-name on-failure]}]
(let [log-id (random-uuid)
action (get actions method)]
(action url
{:params request-body
:headers {"x-re-crud-service" service-name}
:format :json
:handler #(response-handler log-id request-body % operation-id on-success)
:error-handler (fn [{:keys [status response]}]
(when (some? on-failure)
(dispatch [on-failure status response]))
(dispatch [:crud-http-fail operation-id status response]))})))
|
|
3e79f6b6830537c7da603723fa71296fe6742751b8505937f227b4a7a16dadee | noinia/hgeometry | Types.hs | # LANGUAGE ScopedTypeVariables #
--------------------------------------------------------------------------------
-- |
-- Module : Algorithms.Geometry.DelaunayTriangulation.Types
Copyright : ( C )
-- License : see the LICENSE file
Maintainer :
--
Defines some geometric types used in the delaunay triangulation
--
--------------------------------------------------------------------------------
module Algorithms.Geometry.DelaunayTriangulation.Types
( VertexID
, Vertex
, Adj
, Triangulation(..)
, vertexIds
, positions
, neighbours
, Mapping
, edgesAsPoints
, edgesAsVertices
, toPlanarSubdivision
, toPlaneGraph
) where
import Control.Lens
import qualified Data.CircularList as C
import Data.Ext
import qualified Data.IntMap.Strict as IM
import qualified Data.Map as M
import Geometry.PlanarSubdivision
import Geometry.Point
import Geometry.Properties
import qualified Data . Map . Strict as SM
import qualified Data.PlaneGraph as PG
import qualified Data.PlanarGraph as PPG
import qualified Data.Vector as V
import Data.PlaneGraph.Core (PlaneGraph(..))
--------------------------------------------------------------------------------
-- We store all adjacency lists in clockwise order
: If v on the convex hull , then its first entry in the adj . lists is its CCW
-- successor (i.e. its predecessor) on the convex hull
-- | Vertex identifier.
type VertexID = Int
-- | Rotating Right <-> rotate clockwise
type Vertex = C.CList VertexID
-- | Neighbours indexed by VertexID.
type Adj = IM.IntMap (C.CList VertexID)
-- | Neighbours are stored in clockwise order: i.e. rotating right moves to the
-- next clockwise neighbour.
data Triangulation p r = Triangulation { _vertexIds :: M.Map (Point 2 r) VertexID
, _positions :: V.Vector (Point 2 r :+ p)
, _neighbours :: V.Vector (C.CList VertexID)
}
deriving (Show,Eq)
-- | Mapping between triangulated points and their internal VertexID.
vertexIds :: Lens' (Triangulation p r) (M.Map (Point 2 r) VertexID)
vertexIds = lens _vertexIds (\(Triangulation _v p n) v -> Triangulation v p n)
| Point positions indexed by VertexID .
positions :: Lens (Triangulation p1 r) (Triangulation p2 r) (V.Vector (Point 2 r :+ p1)) (V.Vector (Point 2 r :+ p2))
positions = lens _positions (\(Triangulation v _p n) p -> Triangulation v p n)
-- | Point neighbours indexed by VertexID.
neighbours :: Lens' (Triangulation p r) (V.Vector (C.CList VertexID))
neighbours = lens _neighbours (\(Triangulation v p _n) n -> Triangulation v p n)
type instance NumType (Triangulation p r) = r
type instance Dimension (Triangulation p r) = 2
| Bidirectional mapping between points and VertexIDs .
type Mapping p r = (M.Map (Point 2 r) VertexID, V.Vector (Point 2 r :+ p))
-- showDT :: (Show p, Show r) => Triangulation p r -> IO ()
-- showDT = mapM_ print . edgesAsPoints
-- | List add edges as point pairs.
edgesAsPoints :: Triangulation p r -> [(Point 2 r :+ p, Point 2 r :+ p)]
edgesAsPoints t = let pts = _positions t
in map (bimap (pts V.!) (pts V.!)) . edgesAsVertices $ t
-- | List add edges as VertexID pairs.
edgesAsVertices :: Triangulation p r -> [(VertexID,VertexID)]
edgesAsVertices = concatMap (\(i,ns) -> map (i,) . filter (> i) . C.toList $ ns)
. zip [0..] . V.toList . _neighbours
--------------------------------------------------------------------------------
data ST a b c = ST { fst ' : : ! a , snd ' : : ! b , ' : : ! c }
type
-- | ST' is a strict triple (m,a,x) containing:
--
-- - m: a Map, mapping edges, represented by a pair of vertexId's (u,v) with
-- u < v, to arcId's.
-- - a: the next available unused arcID
-- - x: the data value we are interested in computing
type ST ' a = ST ( SM.Map ( VertexID , VertexID ) ) ArcID a
-- | convert the triangulation into a planarsubdivision
--
-- running time: \(O(n)\).
toPlanarSubdivision :: forall s p r. (Ord r, Fractional r)
=> Triangulation p r -> PlanarSubdivision s p () () r
toPlanarSubdivision = fromPlaneGraph . toPlaneGraph
-- | convert the triangulation into a plane graph
--
-- running time: \(O(n)\).
toPlaneGraph :: forall s p r. Triangulation p r -> PG.PlaneGraph s p () () r
toPlaneGraph tr = PlaneGraph $ g&PPG.vertexData .~ vtxData
where
g = PPG.fromAdjacencyLists . V.toList . V.imap f $ tr^.neighbours
report in CCW order
vtxData = (\(loc :+ p) -> VertexData loc p) <$> tr^.positions
| null | https://raw.githubusercontent.com/noinia/hgeometry/8bebc3ddc2b24c1fe46b768248f324e1351aa7f2/hgeometry/src/Algorithms/Geometry/DelaunayTriangulation/Types.hs | haskell | ------------------------------------------------------------------------------
|
Module : Algorithms.Geometry.DelaunayTriangulation.Types
License : see the LICENSE file
------------------------------------------------------------------------------
------------------------------------------------------------------------------
We store all adjacency lists in clockwise order
successor (i.e. its predecessor) on the convex hull
| Vertex identifier.
| Rotating Right <-> rotate clockwise
| Neighbours indexed by VertexID.
| Neighbours are stored in clockwise order: i.e. rotating right moves to the
next clockwise neighbour.
| Mapping between triangulated points and their internal VertexID.
| Point neighbours indexed by VertexID.
showDT :: (Show p, Show r) => Triangulation p r -> IO ()
showDT = mapM_ print . edgesAsPoints
| List add edges as point pairs.
| List add edges as VertexID pairs.
------------------------------------------------------------------------------
| ST' is a strict triple (m,a,x) containing:
- m: a Map, mapping edges, represented by a pair of vertexId's (u,v) with
u < v, to arcId's.
- a: the next available unused arcID
- x: the data value we are interested in computing
| convert the triangulation into a planarsubdivision
running time: \(O(n)\).
| convert the triangulation into a plane graph
running time: \(O(n)\). | # LANGUAGE ScopedTypeVariables #
Copyright : ( C )
Maintainer :
Defines some geometric types used in the delaunay triangulation
module Algorithms.Geometry.DelaunayTriangulation.Types
( VertexID
, Vertex
, Adj
, Triangulation(..)
, vertexIds
, positions
, neighbours
, Mapping
, edgesAsPoints
, edgesAsVertices
, toPlanarSubdivision
, toPlaneGraph
) where
import Control.Lens
import qualified Data.CircularList as C
import Data.Ext
import qualified Data.IntMap.Strict as IM
import qualified Data.Map as M
import Geometry.PlanarSubdivision
import Geometry.Point
import Geometry.Properties
import qualified Data . Map . Strict as SM
import qualified Data.PlaneGraph as PG
import qualified Data.PlanarGraph as PPG
import qualified Data.Vector as V
import Data.PlaneGraph.Core (PlaneGraph(..))
: If v on the convex hull , then its first entry in the adj . lists is its CCW
type VertexID = Int
type Vertex = C.CList VertexID
type Adj = IM.IntMap (C.CList VertexID)
data Triangulation p r = Triangulation { _vertexIds :: M.Map (Point 2 r) VertexID
, _positions :: V.Vector (Point 2 r :+ p)
, _neighbours :: V.Vector (C.CList VertexID)
}
deriving (Show,Eq)
vertexIds :: Lens' (Triangulation p r) (M.Map (Point 2 r) VertexID)
vertexIds = lens _vertexIds (\(Triangulation _v p n) v -> Triangulation v p n)
| Point positions indexed by VertexID .
positions :: Lens (Triangulation p1 r) (Triangulation p2 r) (V.Vector (Point 2 r :+ p1)) (V.Vector (Point 2 r :+ p2))
positions = lens _positions (\(Triangulation v _p n) p -> Triangulation v p n)
neighbours :: Lens' (Triangulation p r) (V.Vector (C.CList VertexID))
neighbours = lens _neighbours (\(Triangulation v p _n) n -> Triangulation v p n)
type instance NumType (Triangulation p r) = r
type instance Dimension (Triangulation p r) = 2
| Bidirectional mapping between points and VertexIDs .
type Mapping p r = (M.Map (Point 2 r) VertexID, V.Vector (Point 2 r :+ p))
edgesAsPoints :: Triangulation p r -> [(Point 2 r :+ p, Point 2 r :+ p)]
edgesAsPoints t = let pts = _positions t
in map (bimap (pts V.!) (pts V.!)) . edgesAsVertices $ t
edgesAsVertices :: Triangulation p r -> [(VertexID,VertexID)]
edgesAsVertices = concatMap (\(i,ns) -> map (i,) . filter (> i) . C.toList $ ns)
. zip [0..] . V.toList . _neighbours
data ST a b c = ST { fst ' : : ! a , snd ' : : ! b , ' : : ! c }
type
type ST ' a = ST ( SM.Map ( VertexID , VertexID ) ) ArcID a
toPlanarSubdivision :: forall s p r. (Ord r, Fractional r)
=> Triangulation p r -> PlanarSubdivision s p () () r
toPlanarSubdivision = fromPlaneGraph . toPlaneGraph
toPlaneGraph :: forall s p r. Triangulation p r -> PG.PlaneGraph s p () () r
toPlaneGraph tr = PlaneGraph $ g&PPG.vertexData .~ vtxData
where
g = PPG.fromAdjacencyLists . V.toList . V.imap f $ tr^.neighbours
report in CCW order
vtxData = (\(loc :+ p) -> VertexData loc p) <$> tr^.positions
|
d7f853a64ca8dff6e8db25aa097d28b53fcac6f84544479673616dbb14798eaf | KestrelInstitute/Specware | Tests.lisp | (test-directories ".")
(test
("Bug 0022 : Redefined ops processed without giving error message [OpDef]."
:show "RedefinedOp#OpDef"
:output '(";;; Elaborating spec at $TESTDIR/RedefinedOp#OpDef"
(:optional "")
"spec"
(:optional "")
(:alternatives
"op f: Nat"
"op f : Nat"
)
(:alternatives
" def f: Nat = 3"
" def f : Nat = 3"
" def f = 3")
(:optional "")
(:alternatives "endspec" "end-spec")
(:optional "")
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [DefOp]"
:sw "RedefinedOp#DefOp"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#DefOp"
"ERROR: in specification: Operator f has been redeclared"
" from 3"
" to <anyterm>: Nat"
" found in $TESTDIR/RedefinedOp.sw"
"8.1-8.10"
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [DefDef]"
:sw "RedefinedOp#DefDef"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#DefDef"
"ERROR: in specification: Operator f has been redefined"
" from 3"
" to 3"
" found in $TESTDIR/RedefinedOp.sw"
"13.1-13.9"
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [OpOp]"
:sw "RedefinedOp#OpOp"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#OpOp"
"ERROR: in specification: Operator f has been redeclared"
" from Nat"
" to Nat"
" found in $TESTDIR/RedefinedOp.sw"
"18.1-18.10"
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [OpDefOp]"
:sw "RedefinedOp#OpDefOp"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#OpDefOp"
"ERROR: in specification: Operator f has been redeclared"
" from 3: Nat"
" to <anyterm>: Nat"
" found in $TESTDIR/RedefinedOp.sw"
"24.1-24.10"
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [OpDefDef]"
:sw "RedefinedOp#OpDefDef"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#OpDefDef"
"ERROR: in specification: Operator f has been redefined"
" from 3: Nat"
" to 3"
" found in $TESTDIR/RedefinedOp.sw"
"30.1-30.9"
(:optional "")
))
)
| null | https://raw.githubusercontent.com/KestrelInstitute/Specware/2be6411c55f26432bf5c9e2f7778128898220c24/TestSuite/Bugs/Bug_0022/Tests.lisp | lisp | (test-directories ".")
(test
("Bug 0022 : Redefined ops processed without giving error message [OpDef]."
:show "RedefinedOp#OpDef"
:output '(";;; Elaborating spec at $TESTDIR/RedefinedOp#OpDef"
(:optional "")
"spec"
(:optional "")
(:alternatives
"op f: Nat"
"op f : Nat"
)
(:alternatives
" def f: Nat = 3"
" def f : Nat = 3"
" def f = 3")
(:optional "")
(:alternatives "endspec" "end-spec")
(:optional "")
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [DefOp]"
:sw "RedefinedOp#DefOp"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#DefOp"
"ERROR: in specification: Operator f has been redeclared"
" from 3"
" to <anyterm>: Nat"
" found in $TESTDIR/RedefinedOp.sw"
"8.1-8.10"
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [DefDef]"
:sw "RedefinedOp#DefDef"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#DefDef"
"ERROR: in specification: Operator f has been redefined"
" from 3"
" to 3"
" found in $TESTDIR/RedefinedOp.sw"
"13.1-13.9"
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [OpOp]"
:sw "RedefinedOp#OpOp"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#OpOp"
"ERROR: in specification: Operator f has been redeclared"
" from Nat"
" to Nat"
" found in $TESTDIR/RedefinedOp.sw"
"18.1-18.10"
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [OpDefOp]"
:sw "RedefinedOp#OpDefOp"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#OpDefOp"
"ERROR: in specification: Operator f has been redeclared"
" from 3: Nat"
" to <anyterm>: Nat"
" found in $TESTDIR/RedefinedOp.sw"
"24.1-24.10"
(:optional "")
))
("Bug 0022 : Redefined ops processed without giving error message [OpDefDef]"
:sw "RedefinedOp#OpDefDef"
:output '((:optional "")
";;; Elaborating spec at $TESTDIR/RedefinedOp#OpDefDef"
"ERROR: in specification: Operator f has been redefined"
" from 3: Nat"
" to 3"
" found in $TESTDIR/RedefinedOp.sw"
"30.1-30.9"
(:optional "")
))
)
|
|
7b9a385c898bab40b7fcd437ff8264f4c6e10f08381f676fd4ccc09987d60cb1 | eugeneia/athens | early.lisp | Anaphora : The Anaphoric Macro Package from Hell
;;;;
;;;; This been placed in Public Domain by the author,
Nikodemus Siivola < >
(in-package :anaphora)
(defmacro with-unique-names ((&rest bindings) &body body)
`(let ,(mapcar #'(lambda (binding)
(destructuring-bind (var prefix)
(if (consp binding) binding (list binding binding))
`(,var (gensym ,(string prefix)))))
bindings)
,@body))
(defmacro ignore-first (first expr)
(declare (ignore first))
expr)
| null | https://raw.githubusercontent.com/eugeneia/athens/cc9d456edd3891b764b0fbf0202a3e2f58865cbf/quicklisp/dists/quicklisp/software/anaphora-20180228-git/early.lisp | lisp |
This been placed in Public Domain by the author, | Anaphora : The Anaphoric Macro Package from Hell
Nikodemus Siivola < >
(in-package :anaphora)
(defmacro with-unique-names ((&rest bindings) &body body)
`(let ,(mapcar #'(lambda (binding)
(destructuring-bind (var prefix)
(if (consp binding) binding (list binding binding))
`(,var (gensym ,(string prefix)))))
bindings)
,@body))
(defmacro ignore-first (first expr)
(declare (ignore first))
expr)
|
732a94382d041203c0fba326fc2f45b3c8071161bf8cb5abc9eea69bc9da8c3d | haskell/ghcide | Experiments.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE ExistentialQuantification #
# LANGUAGE ImplicitParams #
{-# LANGUAGE ImpredicativeTypes #-}
module Experiments
( Bench(..)
, BenchRun(..)
, Config(..)
, Verbosity(..)
, CabalStack(..)
, SetupResult(..)
, Example(..)
, experiments
, configP
, defConfig
, output
, setup
, runBench
, exampleToOptions
) where
import Control.Applicative.Combinators (skipManyTill)
import Control.Concurrent
import Control.Exception.Safe
import Control.Monad.Extra
import Control.Monad.IO.Class
import Data.Aeson (Value(Null))
import Data.Char (isDigit)
import Data.List
import Data.Maybe
import qualified Data.Text as T
import Data.Version
import Development.IDE.Plugin.Test
import Experiments.Types
import Language.Haskell.LSP.Test
import Language.Haskell.LSP.Types
import Language.Haskell.LSP.Types.Capabilities
import Numeric.Natural
import Options.Applicative
import System.Directory
import System.Environment.Blank (getEnv)
import System.FilePath ((</>), (<.>))
import System.Process
import System.Time.Extra
import Text.ParserCombinators.ReadP (readP_to_S)
hygienicEdit :: (?hygienicP :: Position) => TextDocumentContentChangeEvent
hygienicEdit =
TextDocumentContentChangeEvent
{ _range = Just (Range ?hygienicP ?hygienicP),
_rangeLength = Nothing,
_text = " "
}
breakingEdit :: (?identifierP :: Position) => TextDocumentContentChangeEvent
breakingEdit =
TextDocumentContentChangeEvent
{ _range = Just (Range ?identifierP ?identifierP),
_rangeLength = Nothing,
_text = "a"
}
-- | Experiments have access to these special positions:
-- - hygienicP points to a string in the target file, convenient for hygienic edits
-- - identifierP points to the middle of an identifier, convenient for goto-def, hover and completions
type HasPositions = (?hygienicP :: Position, ?identifierP :: Position)
experiments :: [Bench]
experiments =
[ ---------------------------------------------------------------------------------------
bench "hover" 10 $ \doc ->
isJust <$> getHover doc ?identifierP,
---------------------------------------------------------------------------------------
bench "edit" 10 $ \doc -> do
changeDoc doc [hygienicEdit]
waitForProgressDone
return True,
---------------------------------------------------------------------------------------
bench "hover after edit" 10 $ \doc -> do
changeDoc doc [hygienicEdit]
isJust <$> getHover doc ?identifierP,
---------------------------------------------------------------------------------------
bench "getDefinition" 10 $ \doc ->
not . null <$> getDefinitions doc ?identifierP,
---------------------------------------------------------------------------------------
bench "documentSymbols" 100 $
fmap (either (not . null) (not . null)) . getDocumentSymbols,
---------------------------------------------------------------------------------------
bench "documentSymbols after edit" 100 $ \doc -> do
changeDoc doc [hygienicEdit]
either (not . null) (not . null) <$> getDocumentSymbols doc,
---------------------------------------------------------------------------------------
bench "completions after edit" 10 $ \doc -> do
changeDoc doc [hygienicEdit]
not . null <$> getCompletions doc ?identifierP,
---------------------------------------------------------------------------------------
benchWithSetup
"code actions"
10
( \doc -> do
changeDoc doc [breakingEdit]
waitForProgressDone
return ?identifierP
)
( \p doc -> do
not . null <$> getCodeActions doc (Range p p)
),
---------------------------------------------------------------------------------------
benchWithSetup
"code actions after edit"
10
( \doc -> do
changeDoc doc [breakingEdit]
return ?identifierP
)
( \p doc -> do
changeDoc doc [hygienicEdit]
waitForProgressDone
-- NOTE ghcide used to clear and reinstall the diagnostics here
-- new versions no longer do, but keep this logic around
-- to benchmark old versions sucessfully
diags <- getCurrentDiagnostics doc
when (null diags) $
whileM (null <$> waitForDiagnostics)
not . null <$> getCodeActions doc (Range p p)
)
]
---------------------------------------------------------------------------------------------
exampleModulePath :: HasConfig => FilePath
exampleModulePath = exampleModule (example ?config)
examplesPath :: FilePath
examplesPath = "bench/example"
defConfig :: Config
Success defConfig = execParserPure defaultPrefs (info configP fullDesc) []
quiet, verbose :: Config -> Bool
verbose = (== All) . verbosity
quiet = (== Quiet) . verbosity
type HasConfig = (?config :: Config)
configP :: Parser Config
configP =
Config
<$> (flag' All (short 'v' <> long "verbose")
<|> flag' Quiet (short 'q' <> long "quiet")
<|> pure Normal
)
<*> optional (strOption (long "shake-profiling" <> metavar "PATH"))
<*> optional (strOption (long "ot-profiling" <> metavar "DIR" <> help "Enable OpenTelemetry and write eventlog for each benchmark in DIR"))
<*> strOption (long "csv" <> metavar "PATH" <> value "results.csv" <> showDefault)
<*> flag Cabal Stack (long "stack" <> help "Use stack (by default cabal is used)")
<*> many (strOption (long "ghcide-options" <> help "additional options for ghcide"))
<*> many (strOption (short 's' <> long "select" <> help "select which benchmarks to run"))
<*> optional (option auto (long "samples" <> metavar "NAT" <> help "override sampling count"))
<*> strOption (long "ghcide" <> metavar "PATH" <> help "path to ghcide" <> value "ghcide")
<*> option auto (long "timeout" <> value 60 <> help "timeout for waiting for a ghcide response")
<*> ( GetPackage <$> strOption (long "example-package-name" <> value "Cabal")
<*> moduleOption
<*> option versionP (long "example-package-version" <> value (makeVersion [3,2,0,0]))
<|>
UsePackage <$> strOption (long "example-path")
<*> moduleOption
)
where
moduleOption = strOption (long "example-module" <> metavar "PATH" <> value "Distribution/Simple.hs")
versionP :: ReadM Version
versionP = maybeReader $ extract . readP_to_S parseVersion
where
extract parses = listToMaybe [ res | (res,"") <- parses]
output :: (MonadIO m, HasConfig) => String -> m ()
output = if quiet?config then (\_ -> pure ()) else liftIO . putStrLn
---------------------------------------------------------------------------------------
type Experiment = TextDocumentIdentifier -> Session Bool
data Bench = forall setup.
Bench
{ name :: !String,
enabled :: !Bool,
samples :: !Natural,
benchSetup :: HasPositions => TextDocumentIdentifier -> Session setup,
experiment :: HasPositions => setup -> Experiment
}
select :: HasConfig => Bench -> Bool
select Bench {name, enabled} =
enabled && (null mm || name `elem` mm)
where
mm = matches ?config
benchWithSetup ::
String ->
Natural ->
(HasPositions => TextDocumentIdentifier -> Session p) ->
(HasPositions => p -> Experiment) ->
Bench
benchWithSetup name samples benchSetup experiment = Bench {..}
where
enabled = True
bench :: String -> Natural -> (HasPositions => Experiment) -> Bench
bench name defSamples userExperiment =
benchWithSetup name defSamples (const $ pure ()) experiment
where
experiment () = userExperiment
runBenchmarksFun :: HasConfig => FilePath -> [Bench] -> IO ()
runBenchmarksFun dir allBenchmarks = do
let benchmarks = [ b{samples = fromMaybe (samples b) (repetitions ?config) }
| b <- allBenchmarks
, select b ]
whenJust (otMemoryProfiling ?config) $ \eventlogDir ->
createDirectoryIfMissing True eventlogDir
results <- forM benchmarks $ \b@Bench{name} ->
let run = runSessionWithConfig conf (cmd name dir) lspTestCaps dir
in (b,) <$> runBench run b
-- output raw data as CSV
let headers =
[ "name"
, "success"
, "samples"
, "startup"
, "setup"
, "userTime"
, "delayedTime"
, "totalTime"
, "maxResidency"
, "allocatedBytes"]
rows =
[ [ name,
show success,
show samples,
show startup,
show runSetup',
show userWaits,
show delayedWork,
show runExperiment,
show maxResidency,
show allocations
]
| (Bench {name, samples}, BenchRun {..}) <- results,
let runSetup' = if runSetup < 0.01 then 0 else runSetup
]
csv = unlines $ map (intercalate ", ") (headers : rows)
writeFile (outputCSV ?config) csv
-- print a nice table
let pads = map (maximum . map length) (transpose (headers : rowsHuman))
paddedHeaders = zipWith pad pads headers
outputRow = putStrLn . intercalate " | "
rowsHuman =
[ [ name,
show success,
show samples,
showDuration startup,
showDuration runSetup',
showDuration userWaits,
showDuration delayedWork,
showDuration runExperiment,
showMB maxResidency,
showMB allocations
]
| (Bench {name, samples}, BenchRun {..}) <- results,
let runSetup' = if runSetup < 0.01 then 0 else runSetup
]
outputRow paddedHeaders
outputRow $ (map . map) (const '-') paddedHeaders
forM_ rowsHuman $ \row -> outputRow $ zipWith pad pads row
where
cmd name dir =
unwords $
[ ghcide ?config,
"--lsp",
"--test",
"--cwd",
dir
]
++ case otMemoryProfiling ?config of
Just dir -> ["-l", "-ol" ++ (dir </> map (\c -> if c == ' ' then '-' else c) name <.> "eventlog")]
Nothing -> []
++ [ "-RTS" ]
++ ghcideOptions ?config
++ concat
[ ["--shake-profiling", path] | Just path <- [shakeProfiling ?config]
]
++ ["--verbose" | verbose ?config]
++ ["--ot-memory-profiling" | Just _ <- [otMemoryProfiling ?config]]
lspTestCaps =
fullCaps {_window = Just $ WindowClientCapabilities $ Just True}
conf =
defaultConfig
{ logStdErr = verbose ?config,
logMessages = verbose ?config,
logColor = False,
messageTimeout = timeoutLsp ?config
}
data BenchRun = BenchRun
{ startup :: !Seconds,
runSetup :: !Seconds,
runExperiment :: !Seconds,
userWaits :: !Seconds,
delayedWork :: !Seconds,
success :: !Bool,
maxResidency :: !Int,
allocations :: !Int
}
badRun :: BenchRun
badRun = BenchRun 0 0 0 0 0 False 0 0
waitForProgressDone :: Session ()
waitForProgressDone =
void(skipManyTill anyMessage message :: Session WorkDoneProgressEndNotification)
runBench ::
(?config :: Config) =>
(Session BenchRun -> IO BenchRun) ->
(HasPositions => Bench) ->
IO BenchRun
runBench runSess b = handleAny (\e -> print e >> return badRun)
$ runSess
$ do
doc <- openDoc exampleModulePath "haskell"
-- Setup the special positions used by the experiments
lastLine <- length . T.lines <$> documentContents doc
changeDoc doc [TextDocumentContentChangeEvent
{ _range = Just (Range (Position lastLine 0) (Position lastLine 0))
, _rangeLength = Nothing
, _text = T.unlines
[ "_hygienic = \"hygienic\""
, "_identifier = _hygienic"
]
}]
let
-- Points to a string in the target file,
-- convenient for hygienic edits
?hygienicP = Position lastLine 15
let
-- Points to the middle of an identifier,
-- convenient for requesting goto-def, hover and completions
?identifierP = Position (lastLine+1) 15
case b of
Bench{..} -> do
(startup, _) <- duration $ do
waitForProgressDone
-- wait again, as the progress is restarted once while loading the cradle
-- make an edit, to ensure this doesn't block
changeDoc doc [hygienicEdit]
waitForProgressDone
liftIO $ output $ "Running " <> name <> " benchmark"
(runSetup, userState) <- duration $ benchSetup doc
let loop !userWaits !delayedWork 0 = return $ Just (userWaits, delayedWork)
loop !userWaits !delayedWork n = do
(t, res) <- duration $ experiment userState doc
if not res
then return Nothing
else do
output (showDuration t)
-- Wait for the delayed actions to finish
waitId <- sendRequest (CustomClientMethod "test") WaitForShakeQueue
(td, resp) <- duration $ skipManyTill anyMessage $ responseForId waitId
case resp of
ResponseMessage{_result=Right Null} -> do
loop (userWaits+t) (delayedWork+td) (n -1)
_ ->
Assume a ghcide build lacking the WaitForShakeQueue command
loop (userWaits+t) delayedWork (n -1)
(runExperiment, result) <- duration $ loop 0 0 samples
let success = isJust result
(userWaits, delayedWork) = fromMaybe (0,0) result
gcStats = escapeSpaces (name <> ".benchmark-gcStats")
sleep to give ghcide a chance to GC
liftIO $ threadDelay 1100000
(maxResidency, allocations) <- liftIO $
ifM (doesFileExist gcStats)
(parseMaxResidencyAndAllocations <$> readFile gcStats)
(pure (0,0))
return BenchRun {..}
data SetupResult = SetupResult {
runBenchmarks :: [Bench] -> IO (),
-- | Path to the setup benchmark example
benchDir :: FilePath,
cleanUp :: IO ()
}
setup :: HasConfig => IO SetupResult
setup = do
alreadyExists <- doesDirectoryExist examplesPath
when alreadyExists $ removeDirectoryRecursive examplesPath
benchDir <- case example ?config of
UsePackage{..} -> return examplePath
GetPackage{..} -> do
let path = examplesPath </> package
package = exampleName <> "-" <> showVersion exampleVersion
case buildTool ?config of
Cabal -> do
callCommand $ "cabal get -v0 " <> package <> " -d " <> examplesPath
writeFile
(path </> "hie.yaml")
("cradle: {cabal: {component: " <> exampleName <> "}}")
-- Need this in case there is a parent cabal.project somewhere
writeFile
(path </> "cabal.project")
"packages: ."
writeFile
(path </> "cabal.project.local")
""
Stack -> do
callCommand $ "stack --silent unpack " <> package <> " --to " <> examplesPath
-- Generate the stack descriptor to match the one used to build ghcide
stack_yaml <- fromMaybe "stack.yaml" <$> getEnv "STACK_YAML"
stack_yaml_lines <- lines <$> readFile stack_yaml
writeFile (path </> stack_yaml)
(unlines $
"packages: [.]" :
[ l
| l <- stack_yaml_lines
, any (`isPrefixOf` l)
["resolver"
,"allow-newer"
,"compiler"]
]
)
writeFile
(path </> "hie.yaml")
("cradle: {stack: {component: " <> show (exampleName <> ":lib") <> "}}")
return path
whenJust (shakeProfiling ?config) $ createDirectoryIfMissing True
let cleanUp = case example ?config of
GetPackage{} -> removeDirectoryRecursive examplesPath
UsePackage{} -> return ()
runBenchmarks = runBenchmarksFun benchDir
return SetupResult{..}
--------------------------------------------------------------------------------------------
Parse the residency and allocations in RTS -s output
parseMaxResidencyAndAllocations :: String -> (Int, Int)
parseMaxResidencyAndAllocations input =
(f "maximum residency", f "bytes allocated in the heap")
where
inps = reverse $ lines input
f label = case find (label `isInfixOf`) inps of
Just l -> read $ filter isDigit $ head $ words l
Nothing -> -1
escapeSpaces :: String -> String
escapeSpaces = map f
where
f ' ' = '_'
f x = x
pad :: Int -> String -> String
pad n [] = replicate n ' '
pad 0 _ = error "pad"
pad n (x:xx) = x : pad (n-1) xx
showMB :: Int -> String
showMB x = show (x `div` 2^(20::Int)) <> "MB"
| null | https://raw.githubusercontent.com/haskell/ghcide/3ef4ef99c4b9cde867d29180c32586947df64b9e/bench/lib/Experiments.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE ImpredicativeTypes #
| Experiments have access to these special positions:
- hygienicP points to a string in the target file, convenient for hygienic edits
- identifierP points to the middle of an identifier, convenient for goto-def, hover and completions
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
NOTE ghcide used to clear and reinstall the diagnostics here
new versions no longer do, but keep this logic around
to benchmark old versions sucessfully
-------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
output raw data as CSV
print a nice table
Setup the special positions used by the experiments
Points to a string in the target file,
convenient for hygienic edits
Points to the middle of an identifier,
convenient for requesting goto-def, hover and completions
wait again, as the progress is restarted once while loading the cradle
make an edit, to ensure this doesn't block
Wait for the delayed actions to finish
| Path to the setup benchmark example
Need this in case there is a parent cabal.project somewhere
Generate the stack descriptor to match the one used to build ghcide
------------------------------------------------------------------------------------------ | # LANGUAGE ExistentialQuantification #
# LANGUAGE ImplicitParams #
module Experiments
( Bench(..)
, BenchRun(..)
, Config(..)
, Verbosity(..)
, CabalStack(..)
, SetupResult(..)
, Example(..)
, experiments
, configP
, defConfig
, output
, setup
, runBench
, exampleToOptions
) where
import Control.Applicative.Combinators (skipManyTill)
import Control.Concurrent
import Control.Exception.Safe
import Control.Monad.Extra
import Control.Monad.IO.Class
import Data.Aeson (Value(Null))
import Data.Char (isDigit)
import Data.List
import Data.Maybe
import qualified Data.Text as T
import Data.Version
import Development.IDE.Plugin.Test
import Experiments.Types
import Language.Haskell.LSP.Test
import Language.Haskell.LSP.Types
import Language.Haskell.LSP.Types.Capabilities
import Numeric.Natural
import Options.Applicative
import System.Directory
import System.Environment.Blank (getEnv)
import System.FilePath ((</>), (<.>))
import System.Process
import System.Time.Extra
import Text.ParserCombinators.ReadP (readP_to_S)
hygienicEdit :: (?hygienicP :: Position) => TextDocumentContentChangeEvent
hygienicEdit =
TextDocumentContentChangeEvent
{ _range = Just (Range ?hygienicP ?hygienicP),
_rangeLength = Nothing,
_text = " "
}
breakingEdit :: (?identifierP :: Position) => TextDocumentContentChangeEvent
breakingEdit =
TextDocumentContentChangeEvent
{ _range = Just (Range ?identifierP ?identifierP),
_rangeLength = Nothing,
_text = "a"
}
type HasPositions = (?hygienicP :: Position, ?identifierP :: Position)
experiments :: [Bench]
experiments =
bench "hover" 10 $ \doc ->
isJust <$> getHover doc ?identifierP,
bench "edit" 10 $ \doc -> do
changeDoc doc [hygienicEdit]
waitForProgressDone
return True,
bench "hover after edit" 10 $ \doc -> do
changeDoc doc [hygienicEdit]
isJust <$> getHover doc ?identifierP,
bench "getDefinition" 10 $ \doc ->
not . null <$> getDefinitions doc ?identifierP,
bench "documentSymbols" 100 $
fmap (either (not . null) (not . null)) . getDocumentSymbols,
bench "documentSymbols after edit" 100 $ \doc -> do
changeDoc doc [hygienicEdit]
either (not . null) (not . null) <$> getDocumentSymbols doc,
bench "completions after edit" 10 $ \doc -> do
changeDoc doc [hygienicEdit]
not . null <$> getCompletions doc ?identifierP,
benchWithSetup
"code actions"
10
( \doc -> do
changeDoc doc [breakingEdit]
waitForProgressDone
return ?identifierP
)
( \p doc -> do
not . null <$> getCodeActions doc (Range p p)
),
benchWithSetup
"code actions after edit"
10
( \doc -> do
changeDoc doc [breakingEdit]
return ?identifierP
)
( \p doc -> do
changeDoc doc [hygienicEdit]
waitForProgressDone
diags <- getCurrentDiagnostics doc
when (null diags) $
whileM (null <$> waitForDiagnostics)
not . null <$> getCodeActions doc (Range p p)
)
]
exampleModulePath :: HasConfig => FilePath
exampleModulePath = exampleModule (example ?config)
examplesPath :: FilePath
examplesPath = "bench/example"
defConfig :: Config
Success defConfig = execParserPure defaultPrefs (info configP fullDesc) []
quiet, verbose :: Config -> Bool
verbose = (== All) . verbosity
quiet = (== Quiet) . verbosity
type HasConfig = (?config :: Config)
configP :: Parser Config
configP =
Config
<$> (flag' All (short 'v' <> long "verbose")
<|> flag' Quiet (short 'q' <> long "quiet")
<|> pure Normal
)
<*> optional (strOption (long "shake-profiling" <> metavar "PATH"))
<*> optional (strOption (long "ot-profiling" <> metavar "DIR" <> help "Enable OpenTelemetry and write eventlog for each benchmark in DIR"))
<*> strOption (long "csv" <> metavar "PATH" <> value "results.csv" <> showDefault)
<*> flag Cabal Stack (long "stack" <> help "Use stack (by default cabal is used)")
<*> many (strOption (long "ghcide-options" <> help "additional options for ghcide"))
<*> many (strOption (short 's' <> long "select" <> help "select which benchmarks to run"))
<*> optional (option auto (long "samples" <> metavar "NAT" <> help "override sampling count"))
<*> strOption (long "ghcide" <> metavar "PATH" <> help "path to ghcide" <> value "ghcide")
<*> option auto (long "timeout" <> value 60 <> help "timeout for waiting for a ghcide response")
<*> ( GetPackage <$> strOption (long "example-package-name" <> value "Cabal")
<*> moduleOption
<*> option versionP (long "example-package-version" <> value (makeVersion [3,2,0,0]))
<|>
UsePackage <$> strOption (long "example-path")
<*> moduleOption
)
where
moduleOption = strOption (long "example-module" <> metavar "PATH" <> value "Distribution/Simple.hs")
versionP :: ReadM Version
versionP = maybeReader $ extract . readP_to_S parseVersion
where
extract parses = listToMaybe [ res | (res,"") <- parses]
output :: (MonadIO m, HasConfig) => String -> m ()
output = if quiet?config then (\_ -> pure ()) else liftIO . putStrLn
type Experiment = TextDocumentIdentifier -> Session Bool
data Bench = forall setup.
Bench
{ name :: !String,
enabled :: !Bool,
samples :: !Natural,
benchSetup :: HasPositions => TextDocumentIdentifier -> Session setup,
experiment :: HasPositions => setup -> Experiment
}
select :: HasConfig => Bench -> Bool
select Bench {name, enabled} =
enabled && (null mm || name `elem` mm)
where
mm = matches ?config
benchWithSetup ::
String ->
Natural ->
(HasPositions => TextDocumentIdentifier -> Session p) ->
(HasPositions => p -> Experiment) ->
Bench
benchWithSetup name samples benchSetup experiment = Bench {..}
where
enabled = True
bench :: String -> Natural -> (HasPositions => Experiment) -> Bench
bench name defSamples userExperiment =
benchWithSetup name defSamples (const $ pure ()) experiment
where
experiment () = userExperiment
runBenchmarksFun :: HasConfig => FilePath -> [Bench] -> IO ()
runBenchmarksFun dir allBenchmarks = do
let benchmarks = [ b{samples = fromMaybe (samples b) (repetitions ?config) }
| b <- allBenchmarks
, select b ]
whenJust (otMemoryProfiling ?config) $ \eventlogDir ->
createDirectoryIfMissing True eventlogDir
results <- forM benchmarks $ \b@Bench{name} ->
let run = runSessionWithConfig conf (cmd name dir) lspTestCaps dir
in (b,) <$> runBench run b
let headers =
[ "name"
, "success"
, "samples"
, "startup"
, "setup"
, "userTime"
, "delayedTime"
, "totalTime"
, "maxResidency"
, "allocatedBytes"]
rows =
[ [ name,
show success,
show samples,
show startup,
show runSetup',
show userWaits,
show delayedWork,
show runExperiment,
show maxResidency,
show allocations
]
| (Bench {name, samples}, BenchRun {..}) <- results,
let runSetup' = if runSetup < 0.01 then 0 else runSetup
]
csv = unlines $ map (intercalate ", ") (headers : rows)
writeFile (outputCSV ?config) csv
let pads = map (maximum . map length) (transpose (headers : rowsHuman))
paddedHeaders = zipWith pad pads headers
outputRow = putStrLn . intercalate " | "
rowsHuman =
[ [ name,
show success,
show samples,
showDuration startup,
showDuration runSetup',
showDuration userWaits,
showDuration delayedWork,
showDuration runExperiment,
showMB maxResidency,
showMB allocations
]
| (Bench {name, samples}, BenchRun {..}) <- results,
let runSetup' = if runSetup < 0.01 then 0 else runSetup
]
outputRow paddedHeaders
outputRow $ (map . map) (const '-') paddedHeaders
forM_ rowsHuman $ \row -> outputRow $ zipWith pad pads row
where
cmd name dir =
unwords $
[ ghcide ?config,
"--lsp",
"--test",
"--cwd",
dir
]
++ case otMemoryProfiling ?config of
Just dir -> ["-l", "-ol" ++ (dir </> map (\c -> if c == ' ' then '-' else c) name <.> "eventlog")]
Nothing -> []
++ [ "-RTS" ]
++ ghcideOptions ?config
++ concat
[ ["--shake-profiling", path] | Just path <- [shakeProfiling ?config]
]
++ ["--verbose" | verbose ?config]
++ ["--ot-memory-profiling" | Just _ <- [otMemoryProfiling ?config]]
lspTestCaps =
fullCaps {_window = Just $ WindowClientCapabilities $ Just True}
conf =
defaultConfig
{ logStdErr = verbose ?config,
logMessages = verbose ?config,
logColor = False,
messageTimeout = timeoutLsp ?config
}
data BenchRun = BenchRun
{ startup :: !Seconds,
runSetup :: !Seconds,
runExperiment :: !Seconds,
userWaits :: !Seconds,
delayedWork :: !Seconds,
success :: !Bool,
maxResidency :: !Int,
allocations :: !Int
}
badRun :: BenchRun
badRun = BenchRun 0 0 0 0 0 False 0 0
waitForProgressDone :: Session ()
waitForProgressDone =
void(skipManyTill anyMessage message :: Session WorkDoneProgressEndNotification)
runBench ::
(?config :: Config) =>
(Session BenchRun -> IO BenchRun) ->
(HasPositions => Bench) ->
IO BenchRun
runBench runSess b = handleAny (\e -> print e >> return badRun)
$ runSess
$ do
doc <- openDoc exampleModulePath "haskell"
lastLine <- length . T.lines <$> documentContents doc
changeDoc doc [TextDocumentContentChangeEvent
{ _range = Just (Range (Position lastLine 0) (Position lastLine 0))
, _rangeLength = Nothing
, _text = T.unlines
[ "_hygienic = \"hygienic\""
, "_identifier = _hygienic"
]
}]
let
?hygienicP = Position lastLine 15
let
?identifierP = Position (lastLine+1) 15
case b of
Bench{..} -> do
(startup, _) <- duration $ do
waitForProgressDone
changeDoc doc [hygienicEdit]
waitForProgressDone
liftIO $ output $ "Running " <> name <> " benchmark"
(runSetup, userState) <- duration $ benchSetup doc
let loop !userWaits !delayedWork 0 = return $ Just (userWaits, delayedWork)
loop !userWaits !delayedWork n = do
(t, res) <- duration $ experiment userState doc
if not res
then return Nothing
else do
output (showDuration t)
waitId <- sendRequest (CustomClientMethod "test") WaitForShakeQueue
(td, resp) <- duration $ skipManyTill anyMessage $ responseForId waitId
case resp of
ResponseMessage{_result=Right Null} -> do
loop (userWaits+t) (delayedWork+td) (n -1)
_ ->
Assume a ghcide build lacking the WaitForShakeQueue command
loop (userWaits+t) delayedWork (n -1)
(runExperiment, result) <- duration $ loop 0 0 samples
let success = isJust result
(userWaits, delayedWork) = fromMaybe (0,0) result
gcStats = escapeSpaces (name <> ".benchmark-gcStats")
sleep to give ghcide a chance to GC
liftIO $ threadDelay 1100000
(maxResidency, allocations) <- liftIO $
ifM (doesFileExist gcStats)
(parseMaxResidencyAndAllocations <$> readFile gcStats)
(pure (0,0))
return BenchRun {..}
data SetupResult = SetupResult {
runBenchmarks :: [Bench] -> IO (),
benchDir :: FilePath,
cleanUp :: IO ()
}
setup :: HasConfig => IO SetupResult
setup = do
alreadyExists <- doesDirectoryExist examplesPath
when alreadyExists $ removeDirectoryRecursive examplesPath
benchDir <- case example ?config of
UsePackage{..} -> return examplePath
GetPackage{..} -> do
let path = examplesPath </> package
package = exampleName <> "-" <> showVersion exampleVersion
case buildTool ?config of
Cabal -> do
callCommand $ "cabal get -v0 " <> package <> " -d " <> examplesPath
writeFile
(path </> "hie.yaml")
("cradle: {cabal: {component: " <> exampleName <> "}}")
writeFile
(path </> "cabal.project")
"packages: ."
writeFile
(path </> "cabal.project.local")
""
Stack -> do
callCommand $ "stack --silent unpack " <> package <> " --to " <> examplesPath
stack_yaml <- fromMaybe "stack.yaml" <$> getEnv "STACK_YAML"
stack_yaml_lines <- lines <$> readFile stack_yaml
writeFile (path </> stack_yaml)
(unlines $
"packages: [.]" :
[ l
| l <- stack_yaml_lines
, any (`isPrefixOf` l)
["resolver"
,"allow-newer"
,"compiler"]
]
)
writeFile
(path </> "hie.yaml")
("cradle: {stack: {component: " <> show (exampleName <> ":lib") <> "}}")
return path
whenJust (shakeProfiling ?config) $ createDirectoryIfMissing True
let cleanUp = case example ?config of
GetPackage{} -> removeDirectoryRecursive examplesPath
UsePackage{} -> return ()
runBenchmarks = runBenchmarksFun benchDir
return SetupResult{..}
Parse the residency and allocations in RTS -s output
parseMaxResidencyAndAllocations :: String -> (Int, Int)
parseMaxResidencyAndAllocations input =
(f "maximum residency", f "bytes allocated in the heap")
where
inps = reverse $ lines input
f label = case find (label `isInfixOf`) inps of
Just l -> read $ filter isDigit $ head $ words l
Nothing -> -1
escapeSpaces :: String -> String
escapeSpaces = map f
where
f ' ' = '_'
f x = x
pad :: Int -> String -> String
pad n [] = replicate n ' '
pad 0 _ = error "pad"
pad n (x:xx) = x : pad (n-1) xx
showMB :: Int -> String
showMB x = show (x `div` 2^(20::Int)) <> "MB"
|
1ee7feef213ddc182487f210d8a2ff2f9135e3ad7c1ed0ea2f7882fdd8bf5369 | loganallen/CmlControl | diff.mli | (********************************* Diff Module ********************************)
(******************************************************************************)
type file_data = {
file_path : string;
compressed : bool;
}
type diff_input = {
name : string;
old_file_data : file_data option;
new_file_data : file_data option;
}
[ diff ( file1 , ) ( , iscompressed2 ) ] prints a diff between
* two files that can be compressed or decompressed
* two files that can be compressed or decompressed *)
val diff : file_data -> file_data -> Odiff.diffs
(* prints diffs for all pairs of files in a list [lst] *)
val diff_mult : diff_input list -> unit
(* Convert an index into an associate list with file_data *)
val index_to_diff_index : bool -> Universal.index -> (string * file_data) list
Print the diff of two diff input lists
val diff_indexes : (string * file_data) list -> (string * file_data) list -> unit
precondition : cwd is cml repo root
val get_diff_current_index: unit -> (string * file_data) list
(* precondition: [abs_path_lst] holds the absolute paths from cml.
* Also, all the files are uncompressed *)
val diff_idx_current_files: string list -> (string * file_data) list
(* return the diff index of the cmt_idx for each file in [files] *)
val diff_idx_commit: Universal.index -> string list -> (string * file_data) list
| null | https://raw.githubusercontent.com/loganallen/CmlControl/24bf4616b36672bc12225b9bd93b1ae662733663/modules/diff.mli | ocaml | ******************************** Diff Module *******************************
****************************************************************************
prints diffs for all pairs of files in a list [lst]
Convert an index into an associate list with file_data
precondition: [abs_path_lst] holds the absolute paths from cml.
* Also, all the files are uncompressed
return the diff index of the cmt_idx for each file in [files] |
type file_data = {
file_path : string;
compressed : bool;
}
type diff_input = {
name : string;
old_file_data : file_data option;
new_file_data : file_data option;
}
[ diff ( file1 , ) ( , iscompressed2 ) ] prints a diff between
* two files that can be compressed or decompressed
* two files that can be compressed or decompressed *)
val diff : file_data -> file_data -> Odiff.diffs
val diff_mult : diff_input list -> unit
val index_to_diff_index : bool -> Universal.index -> (string * file_data) list
Print the diff of two diff input lists
val diff_indexes : (string * file_data) list -> (string * file_data) list -> unit
precondition : cwd is cml repo root
val get_diff_current_index: unit -> (string * file_data) list
val diff_idx_current_files: string list -> (string * file_data) list
val diff_idx_commit: Universal.index -> string list -> (string * file_data) list
|
1ff3fd65e4c599bb76901340fc1909003272755ea7d0224d2a1ac9c28bc14e9f | cram2/cram | package.lisp | Copyright ( c ) 2012 , < >
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
* Neither the name of the Intelligent Autonomous Systems Group/
;;; Technische Universitaet Muenchen nor the names of its contributors
;;; may be used to endorse or promote products derived from this software
;;; without specific prior written permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
;;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
;;; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
;;; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
;;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
;;; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
;;; POSSIBILITY OF SUCH DAMAGE.
(in-package :cl-user)
(defpackage cram-semantic-map
(:nicknames #:semantic-map #:sem-map)
(:use #:common-lisp #:cram-plan-occasions-events #:cram-occasions-events)
(:shadow get-semantic-map)
(:export get-semantic-map))
| null | https://raw.githubusercontent.com/cram2/cram/dcb73031ee944d04215bbff9e98b9e8c210ef6c5/cram_knowrob/cram_semantic_map/src/package.lisp | lisp | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Technische Universitaet Muenchen nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE. | Copyright ( c ) 2012 , < >
* Neither the name of the Intelligent Autonomous Systems Group/
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
(in-package :cl-user)
(defpackage cram-semantic-map
(:nicknames #:semantic-map #:sem-map)
(:use #:common-lisp #:cram-plan-occasions-events #:cram-occasions-events)
(:shadow get-semantic-map)
(:export get-semantic-map))
|
7b3945f5e9155426cacc9b4c38c827ed479592b32e82127d63086a901dde735a | mfoemmel/erlang-otp | CosNaming_NamingContextExt_CannotProceed.erl | %%------------------------------------------------------------
%%
%% Implementation stub file
%%
Target : CosNaming_NamingContextExt_CannotProceed
Source : /net / isildur / ldisk / daily_build / otp_prebuild_r13b02.2009 - 09 - 21_11 / otp_src_R13B02 / lib / orber / COSS / CosNaming / cos_naming_ext.idl
%% IC vsn: 4.2.22
%%
%% This file is automatically generated. DO NOT EDIT IT.
%%
%%------------------------------------------------------------
-module('CosNaming_NamingContextExt_CannotProceed').
-ic_compiled("4_2_22").
-include("CosNaming_NamingContextExt.hrl").
-export([tc/0,id/0,name/0]).
%% returns type code
tc() -> {tk_except,"IDL:omg.org/CosNaming/NamingContext/CannotProceed:1.0",
"CannotProceed",
[{"cxt",
{tk_objref,"IDL:omg.org/CosNaming/NamingContext:1.0",
"NamingContext"}},
{"rest_of_name",
{tk_sequence,
{tk_struct,"IDL:omg.org/CosNaming/NameComponent:1.0",
"NameComponent",
[{"id",{tk_string,0}},{"kind",{tk_string,0}}]},
0}}]}.
%% returns id
id() -> "IDL:CosNaming/NamingContextExt/CannotProceed:1.0".
%% returns name
name() -> "CosNaming_NamingContextExt_CannotProceed".
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/orber/COSS/CosNaming/CosNaming_NamingContextExt_CannotProceed.erl | erlang | ------------------------------------------------------------
Implementation stub file
IC vsn: 4.2.22
This file is automatically generated. DO NOT EDIT IT.
------------------------------------------------------------
returns type code
returns id
returns name | Target : CosNaming_NamingContextExt_CannotProceed
Source : /net / isildur / ldisk / daily_build / otp_prebuild_r13b02.2009 - 09 - 21_11 / otp_src_R13B02 / lib / orber / COSS / CosNaming / cos_naming_ext.idl
-module('CosNaming_NamingContextExt_CannotProceed').
-ic_compiled("4_2_22").
-include("CosNaming_NamingContextExt.hrl").
-export([tc/0,id/0,name/0]).
tc() -> {tk_except,"IDL:omg.org/CosNaming/NamingContext/CannotProceed:1.0",
"CannotProceed",
[{"cxt",
{tk_objref,"IDL:omg.org/CosNaming/NamingContext:1.0",
"NamingContext"}},
{"rest_of_name",
{tk_sequence,
{tk_struct,"IDL:omg.org/CosNaming/NameComponent:1.0",
"NameComponent",
[{"id",{tk_string,0}},{"kind",{tk_string,0}}]},
0}}]}.
id() -> "IDL:CosNaming/NamingContextExt/CannotProceed:1.0".
name() -> "CosNaming_NamingContextExt_CannotProceed".
|
72ed3c2e7ca6121eb0842146dc83ec82faf3bcfa1849236c378fde9195d66208 | SonyCSLParis/fcg-server | grammars.lisp | Copyright 2022 - present Sony Computer Science Laboratories Paris
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;; -2.0
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;;=========================================================================
(in-package :fcg-server)
;; This file contains prototype code that was developed for research purposes and should not be used in production environments.
;; No warranties are provided.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
Loading grammars for FCG interactive ; ;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Loading the grammars into FCG
(fcg:load-demo-grammar)
;; Add additional grammars here:
;; e.g. (ql:quickload :dutch-vp) | null | https://raw.githubusercontent.com/SonyCSLParis/fcg-server/8c5f1824454477ee2c489013e021a8d2c0ae150c/fcg-interactive/grammars.lisp | lisp |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=========================================================================
This file contains prototype code that was developed for research purposes and should not be used in production environments.
No warranties are provided.
;;
;
;;
Add additional grammars here:
e.g. (ql:quickload :dutch-vp) | Copyright 2022 - present Sony Computer Science Laboratories Paris
distributed under the License is distributed on an " AS IS " BASIS ,
(in-package :fcg-server)
Loading the grammars into FCG
(fcg:load-demo-grammar)
|
a4be0f56cab38ca16fc61ec2cf943b915bef38cb2518e2903be432703b831756 | kmi/irs | wont-load.lisp | Mode : Lisp ; Package :
Author :
The Open University
(in-package "OCML")
(in-ontology persons-and-organizations)
(def-class presentation ()
((presentation-title :type string)
(presentation-topic)
(speaker)
(presentation-abstract :type string)))
(def-class statistical-software-technology (software-technology))
(def-class bayesian-software-technology (statistical-software-technology))
(def-class head-of-organisation (manager))
(def-class consultant (affiliated-person))
(def-class url ()
((prefix) (host-name) (location)))
(def-class minister (politician)
((portfolio) (areas-of-interest)))
(def-class software-status ())
(def-class operating-system (software-technology))
(def-class senior-manager (manager))
(def-class business-analyst (employee))
(def-class drawing-technology (multimedia-technology))
(def-class higher-educational-course (course)
((host-faculty)))
(def-class open-university-course (higher-educational-course)
((host-educational-organization :default-value open-university)))
(def-class senior-lecturer (academic))
(def-class course ()
((course-name) (host-educational-organization) (host-department)))
(def-class research-institute (non-profit-organization))
(def-instance course-t171 open-university-course
((course-name "you, your computer and the net")
(host-department department-of-telematics)
(host-faculty faculty-of-technology)))
(def-instance department-of-telematics academic-unit
((part-of faculty-of-technology)))
(def-instance howells minister
((portfolio lifelong-learning)
(constituency pontypridd)
(areas-of-interest learning-age university-for-industry)
(biography "Dr Howells entered parliament in 1989 and is MP for Pontypridd. In
opposition, he has spoken for Development and Cooperation, Foreign and
Commonwealth Affairs, Home Affairs and Trade and Industry. His current
government position in Lifelong Learning at the DfEE has seen the
publication of a Green Paper on the Learning Age and the planning
prospectus for the University for Industry.")))
(def-instance daniel head-of-organisation
((full-name "Sir John Daniel")
(works-at open-university)
(has-affiliation open-university)
(biography "Sir John Daniel has been Vice-Chancellor since 1990. He holds dual UK and
Canadian nationality, having spent most of his career in Canada after being
born and educated in the UK. Secondary education at Christ's Hospital led
him to study at Oxford where he obtained a First in Metallurgy. He then
spent four years at the University of Paris completing a doctorate on the
mechanical behaviour of uranium crystals. During this period he spent two
summers working in industrial laboratories in the USA, where he met his
wife.
His first academic appointment was at the Ecole Polytechnique of the
Universite de Montreal in 1969. From there successive moves took him to the
Tele-université of the Université du Quebec 1973-77 (Director of Studies);
Concordia University 1980 - 84
and Laurentian University 1984 - 90 ( President ) . He returned to
Britain in 1990 after spending his last year in Canada at the National
Defence College.
In 1971 he embarked on part-time study for an MA in Educational Technology
at what is now Concordia University, Montreal. The internship required by
this programme, which he spent at the Open University in 1972, inspired him
to re-orient his career towards distance education. He finally completed the
MA programme in 1990 after preparing a thesis on the implications of new
technologies for world's ten largest open universities. In the intervening
period he had authored nearly 150 publications on a variety of scientific,
educational and management subjects.
Sir John was president of the International Council for Distance Education
from 1982-85 and Chairman of the Planning Committee for the Commonwealth of
Learning in 1987-88. He holds honorary degrees from universities on four
continents and was knighted for services to higher education in 1994.
")
(email-address "")
(web-address "")))
(def-instance TecInno corporation
((location germany)
(affiliated-people wess traphoner)
(alternate-names )
(part-of )))
(def-instance technical-university-of-kosice higher-educational-organization
((support-units )
(academic-units )
(location slovakia)
(affiliated-people hatala)
(alternate-names )
(part-of )))
(def-instance german-national-ai-institute research-institute
((part-of )
(alternate-names )
(affiliated-people hinkelman bernardi)
(location )))
(def-instance university-of-pavia higher-educational-organization
((support-units )
(academic-units )
(location Pavia)
(affiliated-people riva)
(alternate-names )
(part-of )))
(def-instance alpha-status software-status)
(def-instance beta-status software-status)
(def-instance finished-status software-status)
(def-instance microsoft corporation
((location seattle)
(affiliated-people )
(alternate-names )
(part-of )))
(def-instance Boulder-colorado location)
(def-instance Boulder-colorado location)
(def-instance Boulder-colorado location)
(def-instance University-of-Colorado higher-educational-organization
((web-address university-of-colorado-address)
(part-of nil )
(alternate-names nil )
(affiliated-people lewis)
(location Boulder-colorado)
(academic-units nil )
(support-units nil )))
(def-instance lewis professor
((works-in nil )
(web-address nil )
(has-affiliation nil )
(full-name nil )
(biography nil )
(email-address nil )
(works-at University-of-Colorado)))
(def-instance lewis professor
((works-in nil )
(web-address nil )
(has-affiliation nil )
(full-name nil )
(biography nil )
(email-address nil )
(works-at University-of-Colorado)))
| null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/ontologies/domains/persons-and-organizations/wont-load.lisp | lisp | Package :
|
Author :
The Open University
(in-package "OCML")
(in-ontology persons-and-organizations)
(def-class presentation ()
((presentation-title :type string)
(presentation-topic)
(speaker)
(presentation-abstract :type string)))
(def-class statistical-software-technology (software-technology))
(def-class bayesian-software-technology (statistical-software-technology))
(def-class head-of-organisation (manager))
(def-class consultant (affiliated-person))
(def-class url ()
((prefix) (host-name) (location)))
(def-class minister (politician)
((portfolio) (areas-of-interest)))
(def-class software-status ())
(def-class operating-system (software-technology))
(def-class senior-manager (manager))
(def-class business-analyst (employee))
(def-class drawing-technology (multimedia-technology))
(def-class higher-educational-course (course)
((host-faculty)))
(def-class open-university-course (higher-educational-course)
((host-educational-organization :default-value open-university)))
(def-class senior-lecturer (academic))
(def-class course ()
((course-name) (host-educational-organization) (host-department)))
(def-class research-institute (non-profit-organization))
(def-instance course-t171 open-university-course
((course-name "you, your computer and the net")
(host-department department-of-telematics)
(host-faculty faculty-of-technology)))
(def-instance department-of-telematics academic-unit
((part-of faculty-of-technology)))
(def-instance howells minister
((portfolio lifelong-learning)
(constituency pontypridd)
(areas-of-interest learning-age university-for-industry)
(biography "Dr Howells entered parliament in 1989 and is MP for Pontypridd. In
opposition, he has spoken for Development and Cooperation, Foreign and
Commonwealth Affairs, Home Affairs and Trade and Industry. His current
government position in Lifelong Learning at the DfEE has seen the
publication of a Green Paper on the Learning Age and the planning
prospectus for the University for Industry.")))
(def-instance daniel head-of-organisation
((full-name "Sir John Daniel")
(works-at open-university)
(has-affiliation open-university)
(biography "Sir John Daniel has been Vice-Chancellor since 1990. He holds dual UK and
Canadian nationality, having spent most of his career in Canada after being
born and educated in the UK. Secondary education at Christ's Hospital led
him to study at Oxford where he obtained a First in Metallurgy. He then
spent four years at the University of Paris completing a doctorate on the
mechanical behaviour of uranium crystals. During this period he spent two
summers working in industrial laboratories in the USA, where he met his
wife.
His first academic appointment was at the Ecole Polytechnique of the
Universite de Montreal in 1969. From there successive moves took him to the
Concordia University 1980 - 84
and Laurentian University 1984 - 90 ( President ) . He returned to
Britain in 1990 after spending his last year in Canada at the National
Defence College.
In 1971 he embarked on part-time study for an MA in Educational Technology
at what is now Concordia University, Montreal. The internship required by
this programme, which he spent at the Open University in 1972, inspired him
to re-orient his career towards distance education. He finally completed the
MA programme in 1990 after preparing a thesis on the implications of new
technologies for world's ten largest open universities. In the intervening
period he had authored nearly 150 publications on a variety of scientific,
educational and management subjects.
Sir John was president of the International Council for Distance Education
from 1982-85 and Chairman of the Planning Committee for the Commonwealth of
Learning in 1987-88. He holds honorary degrees from universities on four
continents and was knighted for services to higher education in 1994.
")
(email-address "")
(web-address "")))
(def-instance TecInno corporation
((location germany)
(affiliated-people wess traphoner)
(alternate-names )
(part-of )))
(def-instance technical-university-of-kosice higher-educational-organization
((support-units )
(academic-units )
(location slovakia)
(affiliated-people hatala)
(alternate-names )
(part-of )))
(def-instance german-national-ai-institute research-institute
((part-of )
(alternate-names )
(affiliated-people hinkelman bernardi)
(location )))
(def-instance university-of-pavia higher-educational-organization
((support-units )
(academic-units )
(location Pavia)
(affiliated-people riva)
(alternate-names )
(part-of )))
(def-instance alpha-status software-status)
(def-instance beta-status software-status)
(def-instance finished-status software-status)
(def-instance microsoft corporation
((location seattle)
(affiliated-people )
(alternate-names )
(part-of )))
(def-instance Boulder-colorado location)
(def-instance Boulder-colorado location)
(def-instance Boulder-colorado location)
(def-instance University-of-Colorado higher-educational-organization
((web-address university-of-colorado-address)
(part-of nil )
(alternate-names nil )
(affiliated-people lewis)
(location Boulder-colorado)
(academic-units nil )
(support-units nil )))
(def-instance lewis professor
((works-in nil )
(web-address nil )
(has-affiliation nil )
(full-name nil )
(biography nil )
(email-address nil )
(works-at University-of-Colorado)))
(def-instance lewis professor
((works-in nil )
(web-address nil )
(has-affiliation nil )
(full-name nil )
(biography nil )
(email-address nil )
(works-at University-of-Colorado)))
|
9ee244383d7e936b5030ab7a4c34806f609a7260700d41e927be7c636da09c72 | nasa/pvslib | patch-20220512-exprjudgements.lisp | (in-package :pvs)
(defmethod typecheck* ((decl expr-judgement) expected kind arguments)
(declare (ignore expected kind arguments))
;; We typecheck copies of the declared-type and expr - enough to tell whether we
need to change to an application - judgement .
(let* ((ctype (unless (forall-expr? (expr decl))
(let ((*generate-tccs* 'none)
(*no-conversions-allowed* t)
(dtype (copy-all (declared-type decl))))
(copy-lex (declared-type decl) dtype)
(typecheck* dtype nil nil nil))))
(cexpr (when ctype
(let ((*generate-tccs* 'none)
(*no-conversions-allowed* t)
(cex (copy-all (expr decl))))
(copy-lex (expr decl) cex)
(typecheck* cex ctype nil nil))))
(mexpr (and cexpr (or (from-macro cexpr) cexpr))))
;; expr-judgement basically just has an expr and declared-type may want
;; to treat as appl-judgement, with name and formals, in the special
;; case of application form, e.g., f(x, y)(z), where all the arguments
;; are distinct variables.
(if (and (typep mexpr '(and application (not infix-application)))
(let ((args-lists (arguments* mexpr)))
(and (every #'(lambda (args) (every #'variable? args))
args-lists)
(not (duplicates? (apply #'append args-lists)
:test #'same-declaration)))))
(change-expr-judgement-to-application-judgement decl)
(typecheck-expr-judgement decl))))
(defun typecheck-expr-judgement (decl)
"Typechecks the expr-judgement decl, determined not to be an application-judgement.
Note that if it is a forall-expr, it is treated specially; e.g.,
FORALL (x: real | x > 1): x * x HAS_TYPE {y : real | y > x}
in a way, a HAS_TYPE b is boolean, but it's not a valid expr."
(let ((*generate-tccs* 'none))
(cond ((forall-expr? (expr decl))
;; Note that it is not really a forall expr, as it is not boolean
(typecheck* (bindings (expr decl)) nil nil nil)
(let ((*bound-variables* (append (bindings (expr decl)) *bound-variables*)))
(setf (type decl) (typecheck* (declared-type decl) nil nil nil))
(typecheck* (expression (expr decl)) (type decl) nil nil)))
(t (setf (type decl) (typecheck* (declared-type decl) nil nil nil))
(typecheck* (expr decl) (type decl) nil nil))))
Not an application - judgement , but has freevars
;; Get the freevars list, and create untyped-bind-decls
;; Append to the beginning of bindings if expr is a forall-expr
;; Set (formals decl) to this list
;; Then retypecheck expr under the new bindings
(let* ((*no-expected* t)
;; uform is not a valid forall expr, but this gets the
;; expr and type under the same bindings
(lform (if (forall-expr? (expr decl))
(copy (expr decl)
:expression (list (expression (expr decl)) (type decl)))
(list (expr decl) (type decl))))
(uform (universal-closure lform))
(*no-conversions-allowed* t)
(*compatible-pred-reason*
(acons (if (forall-expr? uform)
(car (expression uform))
(car uform))
"judgement" *compatible-pred-reason*))
(*bound-variables* (when (forall-expr? uform) (bindings uform))))
(if (forall-expr? uform)
(let* ((*bound-variables* (bindings uform)))
(assert (listp (expression uform)))
(set-type (car (expression uform)) (cadr (expression uform))))
(set-type (car uform) (cadr uform)))
(setf (closed-form decl) uform)
(cond ((and (expr-judgement? decl)
(expr-judgement-useless? (closed-form decl)))
(useless-judgement-warning decl))
(t (when (formals-sans-usings (current-theory))
(generic-judgement-warning decl))
(add-judgement-decl decl)))))
| null | https://raw.githubusercontent.com/nasa/pvslib/e7cdff5f70cc32cf7e440bba8b24b5e28bd3e0b1/pvs-patches/patch-20220512-exprjudgements.lisp | lisp | We typecheck copies of the declared-type and expr - enough to tell whether we
expr-judgement basically just has an expr and declared-type may want
to treat as appl-judgement, with name and formals, in the special
case of application form, e.g., f(x, y)(z), where all the arguments
are distinct variables.
e.g.,
Note that it is not really a forall expr, as it is not boolean
Get the freevars list, and create untyped-bind-decls
Append to the beginning of bindings if expr is a forall-expr
Set (formals decl) to this list
Then retypecheck expr under the new bindings
uform is not a valid forall expr, but this gets the
expr and type under the same bindings
| (in-package :pvs)
(defmethod typecheck* ((decl expr-judgement) expected kind arguments)
(declare (ignore expected kind arguments))
need to change to an application - judgement .
(let* ((ctype (unless (forall-expr? (expr decl))
(let ((*generate-tccs* 'none)
(*no-conversions-allowed* t)
(dtype (copy-all (declared-type decl))))
(copy-lex (declared-type decl) dtype)
(typecheck* dtype nil nil nil))))
(cexpr (when ctype
(let ((*generate-tccs* 'none)
(*no-conversions-allowed* t)
(cex (copy-all (expr decl))))
(copy-lex (expr decl) cex)
(typecheck* cex ctype nil nil))))
(mexpr (and cexpr (or (from-macro cexpr) cexpr))))
(if (and (typep mexpr '(and application (not infix-application)))
(let ((args-lists (arguments* mexpr)))
(and (every #'(lambda (args) (every #'variable? args))
args-lists)
(not (duplicates? (apply #'append args-lists)
:test #'same-declaration)))))
(change-expr-judgement-to-application-judgement decl)
(typecheck-expr-judgement decl))))
(defun typecheck-expr-judgement (decl)
"Typechecks the expr-judgement decl, determined not to be an application-judgement.
FORALL (x: real | x > 1): x * x HAS_TYPE {y : real | y > x}
in a way, a HAS_TYPE b is boolean, but it's not a valid expr."
(let ((*generate-tccs* 'none))
(cond ((forall-expr? (expr decl))
(typecheck* (bindings (expr decl)) nil nil nil)
(let ((*bound-variables* (append (bindings (expr decl)) *bound-variables*)))
(setf (type decl) (typecheck* (declared-type decl) nil nil nil))
(typecheck* (expression (expr decl)) (type decl) nil nil)))
(t (setf (type decl) (typecheck* (declared-type decl) nil nil nil))
(typecheck* (expr decl) (type decl) nil nil))))
Not an application - judgement , but has freevars
(let* ((*no-expected* t)
(lform (if (forall-expr? (expr decl))
(copy (expr decl)
:expression (list (expression (expr decl)) (type decl)))
(list (expr decl) (type decl))))
(uform (universal-closure lform))
(*no-conversions-allowed* t)
(*compatible-pred-reason*
(acons (if (forall-expr? uform)
(car (expression uform))
(car uform))
"judgement" *compatible-pred-reason*))
(*bound-variables* (when (forall-expr? uform) (bindings uform))))
(if (forall-expr? uform)
(let* ((*bound-variables* (bindings uform)))
(assert (listp (expression uform)))
(set-type (car (expression uform)) (cadr (expression uform))))
(set-type (car uform) (cadr uform)))
(setf (closed-form decl) uform)
(cond ((and (expr-judgement? decl)
(expr-judgement-useless? (closed-form decl)))
(useless-judgement-warning decl))
(t (when (formals-sans-usings (current-theory))
(generic-judgement-warning decl))
(add-judgement-decl decl)))))
|
466fcdee2522c04d735c484cee0d4543d9cb658f591162d5f12f837c5c0168a6 | vyzo/gerbil | core$_more-syntax-sugar___rt.scm | (declare (block) (standard-bindings) (extended-bindings))
(begin
(load-module "gerbil/core$_expander-runtime___rt")
(load-module "gerbil/core$_syntax-case___rt")
(load-module "gerbil/core$_syntax-sugar___rt"))
| null | https://raw.githubusercontent.com/vyzo/gerbil/17fbcb95a8302c0de3f88380be1a3eb6fe891b95/src/bootstrap/gerbil/core%24_more-syntax-sugar___rt.scm | scheme | (declare (block) (standard-bindings) (extended-bindings))
(begin
(load-module "gerbil/core$_expander-runtime___rt")
(load-module "gerbil/core$_syntax-case___rt")
(load-module "gerbil/core$_syntax-sugar___rt"))
|
|
a9e48ce44dc546ba377bcf857de1862a0da6afc5ed22b5583fbab3d305ac7416 | albertoruiz/easyVision | ellipses2.hs | import EasyVision hiding (c1,c2)
import Control.Arrow((***),(&&&))
import Control.Monad(when)
import Data.Colour.Names(red,yellow,orange)
import Graphics.UI.GLUT(lineWidth,($=),clear,ClearBuffer (DepthBuffer),depthFunc,ComparisonFunction (Less),
renderPrimitive,PrimitiveMode (Points),vertex,pointSize)
import Util.Ellipses(estimateConicRaw,InfoEllipse(..),analyzeEllipse,conicPoints,tangentEllipses)
import Data.List(sortBy)
import Data.Maybe(isJust)
import Data.Complex(realPart,Complex((:+)),magnitude)
import Util.Misc(Mat,mt,diagl,degree,debug)
import Numeric.LinearAlgebra((<>),ident,trans,inv,toList,takeDiag,fromComplex,toColumns,eig,toRows,fromList,(<.>),complex,NormType (PNorm2),pnorm)
import Util.Homogeneous(adjustRectifier,inHomog,normatdet)
import Vision.Camera(rectifierFromCircularPoint,imagOfCircPt,cameraFromHomogZ0,focalFromCircularPoint,circularConsistency,sepCam)
import Text.Printf(printf)
import Numeric.GSL.Minimization
main = run $ camera ~> grayscale
>>= wcontours id ~> (id *** contSel)
>>= contourMonitor "all contours" fst (setColor' red) snd
~> findEllipses
>>= contourMonitor "detected ellipses" fst (lineWidth $= 3 >> setColor' yellow) snd
~> analyzeEllipses
>>= contourMonitor "model conics" fst (lineWidth $= 3 >> setColor' orange) (map (Closed . conicPoints 50) . snd)
~> computeRectifier
>>= showThings
>>= observe "rectified" (\(im,(_,h)) -> warp zeroP (Size 600 600) h im)
>>= timeMonitor
----------------------------------------------------------------------
showThings :: IO (ImageGray, ([InfoEllipse], Mat)) -> IO (IO (ImageGray, ([InfoEllipse], Mat)))
showThings c = do
m <- monitor "misc" (mpSize 10) sh c
depthFunc $= Just Less
return m
where
sh (img,(es,rec)) = do
clear [DepthBuffer]
drawImage' img
clear [DepthBuffer]
pointCoordinates (size img)
when (length es >= 2) $ do
let [e1,e2] = take 2 es
[c1,c2] = map conicMatrix [e1,e2]
setColor' orange
mapM_ shLine $ map (map realPart) $ tangentEllipses c1 c2
invariant of two conics
setColor' yellow
pointSize $= 5
let vs = map (fst . fromComplex) . toColumns . snd . eig $ inv c1 <> c2
vsl = map (fst . fromComplex) . toColumns . snd . eig $ c1 <> inv c2
_ <- renderPrimitive Points (mapM (vertex . toList. inHomog) vs)
mapM_ (shLine.toList) vsl
let okrec = diagl[-1,1,1] <> rec
let mbcam = cameraFromHomogZ0 Nothing (inv okrec)
Just cam = mbcam
elliprec = map (f.conicMatrix) es
where f m = analyzeEllipse $ a <> m <> trans a
a = mt okrec
g InfoEllipse {conicCenter = (x,y), conicSizes = (r,_)} = sphere x y (r/2) (r/2)
when (isJust mbcam) $ do
let Just ij = imagOfCircPt e1 e2
[f1',f2',_] = toList . takeDiag . fst . sepCam $ cam
setColor' red
text2D 0.9 0.6 $ printf "f = %.2f (%.2f, %.2f)" (focalFromCircularPoint ij) f1' f2'
text2D 0.9 0.6 $ printf "f = %.2f" $ focalFromCircularPoint ij
text2D 0.9 0.5 $ printf "ang = %.1f" $ abs ((acos $ circularConsistency ij)/degree - 90)
clear [DepthBuffer]
cameraView cam (4/3) 0.1 100
mapM_ g elliprec
----------------------------------------------------------------------
findEllipses :: (c, [Polyline]) -> (c, [Polyline])
findEllipses = (id *** sortBy (compare `on` negate . area) . filter (isEllipse 5))
analyzeEllipses :: (c, [Polyline]) -> (c, [InfoEllipse])
analyzeEllipses = (id *** map (analyzeEllipse . estimateConicRaw . polyPts))
computeRectifier :: (c, [InfoEllipse]) -> (c, ([InfoEllipse],Mat))
computeRectifier = (id *** (id &&& rectifierFromCircles) )
----------------------------------------------------------------------
rectifierFromCircles :: [InfoEllipse] -> Mat
rectifierFromCircles [] = ident 3
rectifierFromCircles [_] = ident 3
rectifierFromCircles [e1,e2] = rectif
where
mbij = imagOfCircPt e1 e2
Just ij = mbij
recraw = rectifierFromCircularPoint ij
[(mx,my),(mx2,my2)] = map conicCenter [e1,e2]
rec = adjustRectifier recraw [mx,my] [mx2,my2]
rectif = if isJust mbij then rec else ident 3
-- provisional
rectifierFromCircles es = rectifierFromManyCircles es
rectifierFromManyCircles es@(e1:e2:_) = rec
where
mbij = imagOfCircPt e1 e2
Just ij = mbij
recraw = rectifierFromCircularPoint (f ij)
[(mx,my),(mx2,my2)] = map conicCenter [e1,e2]
rec = adjustRectifier recraw [mx,my] [mx2,my2]
rectif = if isJust mbij then rec else ident 3
f = improveCirc es
improveCirc es (rx:+ix,ry:+iy) = (rx':+ix',ry':+iy') where
[rx',ix',ry',iy'] = fst $ debug "optim" g $ minimize NMSimplex2 1e-6 600 (replicate 4 0.1) cost [rx,ix,ry,iy]
cs = map (normatdet.conicMatrix) es
cost ij = sum $ map (quality ij) cs
g = (take 3.toList.head&&&take 3.toList.last).toRows.snd
quality [rx,ix,ry,iy] c = magnitude (p <> complex c <.> p) / np
where
p = fromList [rx:+ix,ry:+iy,1]
np = pnorm PNorm2 p **2
quality' [rx,ix,ry,iy] c = eccentricity (mt t <> c <> inv t)
where
t = rectifierFromCircularPoint (rx:+ix,ry:+iy)
eccentricity con = (d1-d2)/d1 where InfoEllipse {conicSizes = (d1,d2)} = analyzeEllipse con
| null | https://raw.githubusercontent.com/albertoruiz/easyVision/26bb2efaa676c902cecb12047560a09377a969f2/projects/old/pose/ellipses2.hs | haskell | --------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
provisional | import EasyVision hiding (c1,c2)
import Control.Arrow((***),(&&&))
import Control.Monad(when)
import Data.Colour.Names(red,yellow,orange)
import Graphics.UI.GLUT(lineWidth,($=),clear,ClearBuffer (DepthBuffer),depthFunc,ComparisonFunction (Less),
renderPrimitive,PrimitiveMode (Points),vertex,pointSize)
import Util.Ellipses(estimateConicRaw,InfoEllipse(..),analyzeEllipse,conicPoints,tangentEllipses)
import Data.List(sortBy)
import Data.Maybe(isJust)
import Data.Complex(realPart,Complex((:+)),magnitude)
import Util.Misc(Mat,mt,diagl,degree,debug)
import Numeric.LinearAlgebra((<>),ident,trans,inv,toList,takeDiag,fromComplex,toColumns,eig,toRows,fromList,(<.>),complex,NormType (PNorm2),pnorm)
import Util.Homogeneous(adjustRectifier,inHomog,normatdet)
import Vision.Camera(rectifierFromCircularPoint,imagOfCircPt,cameraFromHomogZ0,focalFromCircularPoint,circularConsistency,sepCam)
import Text.Printf(printf)
import Numeric.GSL.Minimization
main = run $ camera ~> grayscale
>>= wcontours id ~> (id *** contSel)
>>= contourMonitor "all contours" fst (setColor' red) snd
~> findEllipses
>>= contourMonitor "detected ellipses" fst (lineWidth $= 3 >> setColor' yellow) snd
~> analyzeEllipses
>>= contourMonitor "model conics" fst (lineWidth $= 3 >> setColor' orange) (map (Closed . conicPoints 50) . snd)
~> computeRectifier
>>= showThings
>>= observe "rectified" (\(im,(_,h)) -> warp zeroP (Size 600 600) h im)
>>= timeMonitor
showThings :: IO (ImageGray, ([InfoEllipse], Mat)) -> IO (IO (ImageGray, ([InfoEllipse], Mat)))
showThings c = do
m <- monitor "misc" (mpSize 10) sh c
depthFunc $= Just Less
return m
where
sh (img,(es,rec)) = do
clear [DepthBuffer]
drawImage' img
clear [DepthBuffer]
pointCoordinates (size img)
when (length es >= 2) $ do
let [e1,e2] = take 2 es
[c1,c2] = map conicMatrix [e1,e2]
setColor' orange
mapM_ shLine $ map (map realPart) $ tangentEllipses c1 c2
invariant of two conics
setColor' yellow
pointSize $= 5
let vs = map (fst . fromComplex) . toColumns . snd . eig $ inv c1 <> c2
vsl = map (fst . fromComplex) . toColumns . snd . eig $ c1 <> inv c2
_ <- renderPrimitive Points (mapM (vertex . toList. inHomog) vs)
mapM_ (shLine.toList) vsl
let okrec = diagl[-1,1,1] <> rec
let mbcam = cameraFromHomogZ0 Nothing (inv okrec)
Just cam = mbcam
elliprec = map (f.conicMatrix) es
where f m = analyzeEllipse $ a <> m <> trans a
a = mt okrec
g InfoEllipse {conicCenter = (x,y), conicSizes = (r,_)} = sphere x y (r/2) (r/2)
when (isJust mbcam) $ do
let Just ij = imagOfCircPt e1 e2
[f1',f2',_] = toList . takeDiag . fst . sepCam $ cam
setColor' red
text2D 0.9 0.6 $ printf "f = %.2f (%.2f, %.2f)" (focalFromCircularPoint ij) f1' f2'
text2D 0.9 0.6 $ printf "f = %.2f" $ focalFromCircularPoint ij
text2D 0.9 0.5 $ printf "ang = %.1f" $ abs ((acos $ circularConsistency ij)/degree - 90)
clear [DepthBuffer]
cameraView cam (4/3) 0.1 100
mapM_ g elliprec
findEllipses :: (c, [Polyline]) -> (c, [Polyline])
findEllipses = (id *** sortBy (compare `on` negate . area) . filter (isEllipse 5))
analyzeEllipses :: (c, [Polyline]) -> (c, [InfoEllipse])
analyzeEllipses = (id *** map (analyzeEllipse . estimateConicRaw . polyPts))
computeRectifier :: (c, [InfoEllipse]) -> (c, ([InfoEllipse],Mat))
computeRectifier = (id *** (id &&& rectifierFromCircles) )
rectifierFromCircles :: [InfoEllipse] -> Mat
rectifierFromCircles [] = ident 3
rectifierFromCircles [_] = ident 3
rectifierFromCircles [e1,e2] = rectif
where
mbij = imagOfCircPt e1 e2
Just ij = mbij
recraw = rectifierFromCircularPoint ij
[(mx,my),(mx2,my2)] = map conicCenter [e1,e2]
rec = adjustRectifier recraw [mx,my] [mx2,my2]
rectif = if isJust mbij then rec else ident 3
rectifierFromCircles es = rectifierFromManyCircles es
rectifierFromManyCircles es@(e1:e2:_) = rec
where
mbij = imagOfCircPt e1 e2
Just ij = mbij
recraw = rectifierFromCircularPoint (f ij)
[(mx,my),(mx2,my2)] = map conicCenter [e1,e2]
rec = adjustRectifier recraw [mx,my] [mx2,my2]
rectif = if isJust mbij then rec else ident 3
f = improveCirc es
improveCirc es (rx:+ix,ry:+iy) = (rx':+ix',ry':+iy') where
[rx',ix',ry',iy'] = fst $ debug "optim" g $ minimize NMSimplex2 1e-6 600 (replicate 4 0.1) cost [rx,ix,ry,iy]
cs = map (normatdet.conicMatrix) es
cost ij = sum $ map (quality ij) cs
g = (take 3.toList.head&&&take 3.toList.last).toRows.snd
quality [rx,ix,ry,iy] c = magnitude (p <> complex c <.> p) / np
where
p = fromList [rx:+ix,ry:+iy,1]
np = pnorm PNorm2 p **2
quality' [rx,ix,ry,iy] c = eccentricity (mt t <> c <> inv t)
where
t = rectifierFromCircularPoint (rx:+ix,ry:+iy)
eccentricity con = (d1-d2)/d1 where InfoEllipse {conicSizes = (d1,d2)} = analyzeEllipse con
|
574d436787be64dbbb710755258393d362c93e3cc1820c03c80f8e6d62339de9 | Frama-C/Frama-C-snapshot | security_slicing_parameters.ml | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
include Plugin.Register
(struct
let name = "security-slicing"
let shortname = "security-slicing"
let help = "security slicing (experimental, undocumented)"
end)
module Slicing =
False
(struct
let option_name = "-security-slicing"
let help = "perform the security slicing analysis"
end)
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/security_slicing/security_slicing_parameters.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
include Plugin.Register
(struct
let name = "security-slicing"
let shortname = "security-slicing"
let help = "security slicing (experimental, undocumented)"
end)
module Slicing =
False
(struct
let option_name = "-security-slicing"
let help = "perform the security slicing analysis"
end)
|
873452cda333bc1c60aacdb55b24e0fe7d901deaa6ca9db29c6ee56d0f469101 | albertoruiz/easyVision | linemodels.hs | import Vision.GUI.Simple
--import ImagProc
import Data.Traversable(traverse)
import Util.Options(optionFromFile)
import Util.Geometry as G
import Util.Estimation
import Numeric.LinearAlgebra
import Numeric.LinearAlgebra.Util
import Numeric.LinearProgramming.L1
main = runIt $ clickPoints "click points" "--points" () (sh.fst)
sh pts = Draw [ color white . drawPointsLabeled $ pts
, models ]
where
models | length pts < 2 = Draw ()
| otherwise = Draw [ color green (algline pts)
, color blue (pinvline pts)
, color red (eigline pts)
, color orange (l1yline pts) ]
algline :: [Point] -> HLine
mse algebraic cost
algline = unsafeFromVector . fromList . mseLine
pinvline :: [Point] -> HLine
mse of y value
pinvline pts = unsafeFromVector (fromList [a',-1,b'])
where
[a',b'] = toList sol
sol = a <\> b
b = fromList $ map py pts
x = col $ map px pts
a = x ¦ 1
px (Point x _) = x
py (Point _ y) = y
eigline :: [Point] -> HLine
-- principal direction
eigline pts = gjoin p q
where
x = fromRows $ map toVector pts
(m,c) = meanCov x
p = G.homog (unsafeFromVector m :: Point)
q' = head $ toColumns $ snd $ eigSH c
q = unsafeFromVector (q' & 0) :: HPoint
l1yline :: [Point] -> HLine
-- L1 cost of y value
l1yline pts = unsafeFromVector (fromList [a',-1,b'])
where
[a',b'] = toList sol
sol = l1SolveO a b
b = fromList $ map py pts
x = col $ map px pts
a = x ¦ 1
px (Point x _) = x
py (Point _ y) = y
| null | https://raw.githubusercontent.com/albertoruiz/easyVision/26bb2efaa676c902cecb12047560a09377a969f2/projects/vision/geom/linemodels.hs | haskell | import ImagProc
principal direction
L1 cost of y value | import Vision.GUI.Simple
import Data.Traversable(traverse)
import Util.Options(optionFromFile)
import Util.Geometry as G
import Util.Estimation
import Numeric.LinearAlgebra
import Numeric.LinearAlgebra.Util
import Numeric.LinearProgramming.L1
main = runIt $ clickPoints "click points" "--points" () (sh.fst)
sh pts = Draw [ color white . drawPointsLabeled $ pts
, models ]
where
models | length pts < 2 = Draw ()
| otherwise = Draw [ color green (algline pts)
, color blue (pinvline pts)
, color red (eigline pts)
, color orange (l1yline pts) ]
algline :: [Point] -> HLine
mse algebraic cost
algline = unsafeFromVector . fromList . mseLine
pinvline :: [Point] -> HLine
mse of y value
pinvline pts = unsafeFromVector (fromList [a',-1,b'])
where
[a',b'] = toList sol
sol = a <\> b
b = fromList $ map py pts
x = col $ map px pts
a = x ¦ 1
px (Point x _) = x
py (Point _ y) = y
eigline :: [Point] -> HLine
eigline pts = gjoin p q
where
x = fromRows $ map toVector pts
(m,c) = meanCov x
p = G.homog (unsafeFromVector m :: Point)
q' = head $ toColumns $ snd $ eigSH c
q = unsafeFromVector (q' & 0) :: HPoint
l1yline :: [Point] -> HLine
l1yline pts = unsafeFromVector (fromList [a',-1,b'])
where
[a',b'] = toList sol
sol = l1SolveO a b
b = fromList $ map py pts
x = col $ map px pts
a = x ¦ 1
px (Point x _) = x
py (Point _ y) = y
|
8a9e6df49c9a4e9d46dff7736e7098bff75359d9c01001664bf474a4f2b755e9 | SKA-ScienceDataProcessor/RC | SlurmBackend.hs | {-# OPTIONS_HADDOCK hide #-}
-- | Simple backend based on the TCP transport
--
-- To simplify getting started we provide special support for /master/ and
-- /slave/ nodes (see 'startSlave' and 'startMaster'). Use of these functions
-- is completely optional; you can use the local backend without making use
-- of the predefined master and slave nodes.
--
-- [Minimal example]
--
-- > import System.Environment (getArgs)
> import Control . Distributed . Process
> import Control . Distributed . Process . Node ( initRemoteTable )
> import Control . Distributed . Process . Backend . SimpleLocalnet
-- >
-- > master :: Backend -> [NodeId] -> Process ()
-- > master backend slaves = do
-- > -- Do something interesting with the slaves
> liftIO . " Slaves : " + + show slaves
-- > -- Terminate the slaves when the master terminates (this is optional)
-- > terminateAllSlaves backend
-- >
-- > main :: IO ()
-- > main = do
-- > args <- getArgs
-- >
-- > case args of
-- > ["master", host, port] -> do
-- > backend <- initializeBackend host port initRemoteTable
-- > startMaster backend (master backend)
-- > ["slave", host, port] -> do
-- > backend <- initializeBackend host port initRemoteTable
-- > startSlave backend
--
[ Compiling and Running ]
--
-- Save to @example.hs@ and compile using
--
-- > ghc -threaded example.hs
--
-- Fire up some slave nodes (for the example, we run them on a single machine):
--
> ./example slave localhost 8080 &
> ./example slave localhost 8081 &
-- > ./example slave localhost 8082 &
> ./example slave localhost 8083 &
--
-- And start the master node:
--
> ./example master localhost 8084
--
-- which should then output:
--
-- > Slaves: [nid:8083:0,nid:8082:0,nid:8081:0,nid:8080:0]
--
-- at which point the slaves should exit.
--
-- To run the example on multiple machines, you could run
--
> ./example slave 198.51.100.1 8080 &
> ./example slave 198.51.100.2 8080 &
> ./example slave 198.51.100.3 8080 &
> ./example slave 198.51.100.4 8080 &
--
on four different machines ( with IP addresses 198.51.100.1 .. 4 ) , and run the
master on a fifth node ( or on any of the four machines that run the slave
-- nodes).
--
-- It is important that every node has a unique (hostname, port number) pair,
-- and that the hostname you use to initialize the node can be resolved by
-- peer nodes. In other words, if you start a node and pass hostname @localhost@
-- then peer nodes won't be able to reach it because @localhost@ will resolve
-- to a different IP address for them.
--
-- [Troubleshooting]
--
# OPTIONS_GHC -fno - warn - orphans #
{-# LANGUAGE DeriveDataTypeable #-}
module DNA.SlurmBackend
( -- * Initialization
Backend(..)
, CAD(..)
, initializeBackend
-- * Slave nodes
, startSlave
, terminateSlave
, findSlaves
, terminateAllSlaves
-- * Master nodes
, startMaster
) where
import Data.Maybe (catMaybes)
import Data.Binary (Binary(get, put), getWord8, putWord8)
import Data.Accessor (Accessor, accessor, (^:), (^.))
import Data.Foldable (forM_)
import Data.Typeable (Typeable)
import Control.Exception (throw)
import Control.Monad (replicateM, replicateM_)
import Control.Monad.IO.Class (liftIO)
import Control.Concurrent (threadDelay)
import Control.Concurrent.MVar (MVar, newMVar, readMVar, modifyMVar_)
import Control.Distributed.Process
( RemoteTable
, NodeId(..)
, Process
, ProcessId
, WhereIsReply(..)
, whereis
, whereisRemoteAsync
, getSelfPid
, register
, reregister
, expect
, nsendRemote
, receiveWait
, match
, processNodeId
, monitorNode
, monitor
, unmonitor
, NodeMonitorNotification(..)
, ProcessRegistrationException
, finally
, newChan
, receiveChan
, nsend
, SendPort
, bracket
, try
, send
)
import qualified Control.Distributed.Process.Node as Node
( LocalNode
, newLocalNode
, runProcess
)
import qualified Network.Socket as N
import qualified Network.Transport.TCP as NT
( createTransport
, defaultTCPParameters
, encodeEndPointAddress
)
import qualified Network.Transport as NT (Transport(..))
import DNA.Logging (eventMessage)
-- | Local backend
data Backend = Backend {
-- | Create a new local node
newLocalNode :: IO Node.LocalNode
-- | @findPeers returns the list of peers
, findPeers :: IO [NodeId]
-- | Make sure that all log messages are printed by the logger on the
-- current node
, redirectLogsHere :: [ProcessId] -> Process ()
}
data BackendState = BackendState {
_localNodes :: [Node.LocalNode]
, _peers :: [NodeId]
}
-- | Cluster
data CAD
= Local [Int] -- ^ Spawn program on localhost on list of ports
| SLURM Int [(String,Int)]
-- ^ Base port and number of tasks per node
-- | Initialize the backend
initializeBackend
:: CAD -- ^ Cluster description
-> N.HostName -- ^ Host name for
-> N.ServiceName -- ^ Port number as bytestring
-> RemoteTable -- ^ Remote table for backend
-> IO Backend
initializeBackend cad host port rtable = do
mTransport <- NT.createTransport host port NT.defaultTCPParameters
let addresses = case cad of
-- passing 0 as a endpointid is a hack. Again, I haven't found any better way.
Local ports -> [ NodeId $ NT.encodeEndPointAddress "localhost" theirPort 0
| p <- ports
, let theirPort = show p
, theirPort /= port
]
SLURM basePort nodes -> concat
-- FIXME: filter out our address
[ [ NodeId $ NT.encodeEndPointAddress hst (show (basePort+n)) 0
| n <- [0 .. nTasks - 1]
]
| (hst,nTasks) <- nodes
]
backendState <- newMVar BackendState
{ _localNodes = []
, _peers = addresses
}
case mTransport of
Left err -> throw err
Right transport ->
let backend = Backend
{ newLocalNode = apiNewLocalNode transport rtable backendState
, findPeers = apiFindPeers backendState
, redirectLogsHere = apiRedirectLogsHere backend
}
in return backend
-- | Create a new local node
apiNewLocalNode :: NT.Transport
-> RemoteTable
-> MVar BackendState
-> IO Node.LocalNode
apiNewLocalNode transport rtable backendState = do
localNode <- Node.newLocalNode transport rtable
modifyMVar_ backendState $ return . (localNodes ^: (localNode :))
return localNode
-- | Peer discovery
apiFindPeers :: MVar BackendState
-> IO [NodeId]
apiFindPeers backendState = do
(^. peers) <$> readMVar backendState
--------------------------------------------------------------------------------
-- Back-end specific primitives --
--------------------------------------------------------------------------------
-- | Make sure that all log messages are printed by the logger on this node
apiRedirectLogsHere :: Backend -> [ProcessId] -> Process ()
apiRedirectLogsHere _backend slavecontrollers = do
mLogger <- whereis "logger"
myPid <- getSelfPid
forM_ mLogger $ \logger -> do
bracket
(mapM monitor slavecontrollers)
(mapM unmonitor)
$ \_ -> do
-- fire off redirect requests
forM_ slavecontrollers $ \pid -> send pid (RedirectLogsTo logger myPid)
-- Wait for the replies
replicateM_ (length slavecontrollers) $ do
receiveWait
[ match (\(RedirectLogsReply {}) -> return ())
, match (\(NodeMonitorNotification {}) -> return ())
]
--------------------------------------------------------------------------------
-- Slaves --
--------------------------------------------------------------------------------
-- | Messages to slave nodes
--
-- This datatype is not exposed; instead, we expose primitives for dealing
-- with slaves.
data SlaveControllerMsg
= SlaveTerminate
| RedirectLogsTo ProcessId ProcessId
deriving (Typeable, Show)
instance Binary SlaveControllerMsg where
put SlaveTerminate = putWord8 0
put (RedirectLogsTo a b) = do putWord8 1; put (a,b)
get = do
header <- getWord8
case header of
0 -> return SlaveTerminate
1 -> do (a,b) <- get; return (RedirectLogsTo a b)
_ -> fail "SlaveControllerMsg.get: invalid"
data RedirectLogsReply
= RedirectLogsReply ProcessId Bool
deriving (Typeable, Show)
instance Binary RedirectLogsReply where
put (RedirectLogsReply from ok) = put (from,ok)
get = do
(from,ok) <- get
return (RedirectLogsReply from ok)
-- | Calling 'slave' sets up a new local node and then waits. You start
-- processes on the slave by calling 'spawn' from other nodes.
--
-- This function does not return. The only way to exit the slave is to CTRL-C
-- the process or call terminateSlave from another node.
startSlave :: Backend -> IO ()
startSlave backend = do
node <- newLocalNode backend
Node.runProcess node slaveController
| The slave controller interprets ' SlaveControllerMsg 's
slaveController :: Process ()
slaveController = do
pid <- getSelfPid
register "slaveController" pid
go
where
go = do
msg <- expect
case msg of
SlaveTerminate -> return ()
RedirectLogsTo loggerPid from -> do
r <- try (reregister "logger" loggerPid)
ok <- case (r :: Either ProcessRegistrationException ()) of
Right _ -> return True
Left _ -> do
s <- try (register "logger" loggerPid)
case (s :: Either ProcessRegistrationException ()) of
Right _ -> return True
Left _ -> return False
pid <- getSelfPid
send from (RedirectLogsReply pid ok)
go
-- | Terminate the slave at the given node ID
terminateSlave :: NodeId -> Process ()
terminateSlave nid = nsendRemote nid "slaveController" SlaveTerminate
-- | Find slave nodes. Function return list of slaveController
-- processes.
findSlaves
:: Int -- ^ Number of retries
-> Backend
-> Process [ProcessId]
findSlaves nRetry backend = do
nodes <- liftIO $ findPeers backend
-- Fire off asynchronous requests for the slave controller
bracket
(mapM monitorNode nodes)
(mapM unmonitor)
$ \_ -> do
fire off whereis requests
forM_ nodes $ \nid -> whereisRemoteAsync nid "slaveController"
-- Wait for the replies
pids <- catMaybes <$> replicateM (length nodes) (
receiveWait
[ match $ \(WhereIsReply "slaveController" mPid) ->
case mPid of
Nothing -> do eventMessage "No slave controller on remote node!"
return Nothing
Just pid -> return (Just pid)
, match $ \n@(NodeMonitorNotification {}) -> do
eventMessage $ "Remote node is down? : " ++ show n
return Nothing
])
-- FIXME: how should we treat not found nodes?
if (length pids /= length nodes) && (nRetry > 0)
then do eventMessage $ "Retrying findSlaves (" ++ show (nRetry-1) ++ ") left"
liftIO $ threadDelay (1000*1000)
findSlaves (nRetry-1) backend
else return pids
-- | Terminate all slaves
terminateAllSlaves :: Backend -> Process ()
terminateAllSlaves backend = do
slaves <- findSlaves 0 backend
forM_ slaves $ \pid -> send pid SlaveTerminate
liftIO $ threadDelay 1000000
--------------------------------------------------------------------------------
-- Master nodes
--------------------------------------------------------------------------------
-- | 'startMaster' finds all slaves /currently/ available on the local network,
-- redirects all log messages to itself, and then calls the specified process,
-- passing the list of slaves nodes.
--
Terminates when the specified process terminates . If you want to terminate
-- the slaves when the master terminates, you should manually call
-- 'terminateAllSlaves'.
--
-- If you start more slave nodes after having started the master node, you can
-- discover them with later calls to 'findSlaves', but be aware that you will
-- need to call 'redirectLogHere' to redirect their logs to the master node.
--
-- Note that you can use functionality of "SimpleLocalnet" directly (through
-- 'Backend'), instead of using 'startMaster'/'startSlave', if the master/slave
-- distinction does not suit your application.
startMaster :: Backend -> ([NodeId] -> Process ()) -> IO ()
startMaster backend proc = do
node <- newLocalNode backend
Node.runProcess node $ do
slaves <- findSlaves 2 backend
redirectLogsHere backend slaves
proc (map processNodeId slaves) `finally` shutdownLogger
--
-- | shut down the logger process. This ensures that any pending
-- messages are flushed before the process exits.
--
shutdownLogger :: Process ()
shutdownLogger = do
(sport,rport) <- newChan
nsend "logger" (sport :: SendPort ())
receiveChan rport
-- TODO: we should monitor the logger process so we don't deadlock if
-- it has already died.
--------------------------------------------------------------------------------
-- Accessors --
--------------------------------------------------------------------------------
localNodes :: Accessor BackendState [Node.LocalNode]
localNodes = accessor _localNodes (\ns st -> st { _localNodes = ns })
peers :: Accessor BackendState [NodeId]
peers = accessor _peers (\ps st -> st { _peers = ps })
| null | https://raw.githubusercontent.com/SKA-ScienceDataProcessor/RC/1b5e25baf9204a9f7ef40ed8ee94a86cc6c674af/MS5/dna/core/DNA/SlurmBackend.hs | haskell | # OPTIONS_HADDOCK hide #
| Simple backend based on the TCP transport
To simplify getting started we provide special support for /master/ and
/slave/ nodes (see 'startSlave' and 'startMaster'). Use of these functions
is completely optional; you can use the local backend without making use
of the predefined master and slave nodes.
[Minimal example]
> import System.Environment (getArgs)
>
> master :: Backend -> [NodeId] -> Process ()
> master backend slaves = do
> -- Do something interesting with the slaves
> -- Terminate the slaves when the master terminates (this is optional)
> terminateAllSlaves backend
>
> main :: IO ()
> main = do
> args <- getArgs
>
> case args of
> ["master", host, port] -> do
> backend <- initializeBackend host port initRemoteTable
> startMaster backend (master backend)
> ["slave", host, port] -> do
> backend <- initializeBackend host port initRemoteTable
> startSlave backend
Save to @example.hs@ and compile using
> ghc -threaded example.hs
Fire up some slave nodes (for the example, we run them on a single machine):
> ./example slave localhost 8082 &
And start the master node:
which should then output:
> Slaves: [nid:8083:0,nid:8082:0,nid:8081:0,nid:8080:0]
at which point the slaves should exit.
To run the example on multiple machines, you could run
nodes).
It is important that every node has a unique (hostname, port number) pair,
and that the hostname you use to initialize the node can be resolved by
peer nodes. In other words, if you start a node and pass hostname @localhost@
then peer nodes won't be able to reach it because @localhost@ will resolve
to a different IP address for them.
[Troubleshooting]
# LANGUAGE DeriveDataTypeable #
* Initialization
* Slave nodes
* Master nodes
| Local backend
| Create a new local node
| @findPeers returns the list of peers
| Make sure that all log messages are printed by the logger on the
current node
| Cluster
^ Spawn program on localhost on list of ports
^ Base port and number of tasks per node
| Initialize the backend
^ Cluster description
^ Host name for
^ Port number as bytestring
^ Remote table for backend
passing 0 as a endpointid is a hack. Again, I haven't found any better way.
FIXME: filter out our address
| Create a new local node
| Peer discovery
------------------------------------------------------------------------------
Back-end specific primitives --
------------------------------------------------------------------------------
| Make sure that all log messages are printed by the logger on this node
fire off redirect requests
Wait for the replies
------------------------------------------------------------------------------
Slaves --
------------------------------------------------------------------------------
| Messages to slave nodes
This datatype is not exposed; instead, we expose primitives for dealing
with slaves.
| Calling 'slave' sets up a new local node and then waits. You start
processes on the slave by calling 'spawn' from other nodes.
This function does not return. The only way to exit the slave is to CTRL-C
the process or call terminateSlave from another node.
| Terminate the slave at the given node ID
| Find slave nodes. Function return list of slaveController
processes.
^ Number of retries
Fire off asynchronous requests for the slave controller
Wait for the replies
FIXME: how should we treat not found nodes?
| Terminate all slaves
------------------------------------------------------------------------------
Master nodes
------------------------------------------------------------------------------
| 'startMaster' finds all slaves /currently/ available on the local network,
redirects all log messages to itself, and then calls the specified process,
passing the list of slaves nodes.
the slaves when the master terminates, you should manually call
'terminateAllSlaves'.
If you start more slave nodes after having started the master node, you can
discover them with later calls to 'findSlaves', but be aware that you will
need to call 'redirectLogHere' to redirect their logs to the master node.
Note that you can use functionality of "SimpleLocalnet" directly (through
'Backend'), instead of using 'startMaster'/'startSlave', if the master/slave
distinction does not suit your application.
| shut down the logger process. This ensures that any pending
messages are flushed before the process exits.
TODO: we should monitor the logger process so we don't deadlock if
it has already died.
------------------------------------------------------------------------------
Accessors --
------------------------------------------------------------------------------ | > import Control . Distributed . Process
> import Control . Distributed . Process . Node ( initRemoteTable )
> import Control . Distributed . Process . Backend . SimpleLocalnet
> liftIO . " Slaves : " + + show slaves
[ Compiling and Running ]
> ./example slave localhost 8080 &
> ./example slave localhost 8081 &
> ./example slave localhost 8083 &
> ./example master localhost 8084
> ./example slave 198.51.100.1 8080 &
> ./example slave 198.51.100.2 8080 &
> ./example slave 198.51.100.3 8080 &
> ./example slave 198.51.100.4 8080 &
on four different machines ( with IP addresses 198.51.100.1 .. 4 ) , and run the
master on a fifth node ( or on any of the four machines that run the slave
# OPTIONS_GHC -fno - warn - orphans #
module DNA.SlurmBackend
Backend(..)
, CAD(..)
, initializeBackend
, startSlave
, terminateSlave
, findSlaves
, terminateAllSlaves
, startMaster
) where
import Data.Maybe (catMaybes)
import Data.Binary (Binary(get, put), getWord8, putWord8)
import Data.Accessor (Accessor, accessor, (^:), (^.))
import Data.Foldable (forM_)
import Data.Typeable (Typeable)
import Control.Exception (throw)
import Control.Monad (replicateM, replicateM_)
import Control.Monad.IO.Class (liftIO)
import Control.Concurrent (threadDelay)
import Control.Concurrent.MVar (MVar, newMVar, readMVar, modifyMVar_)
import Control.Distributed.Process
( RemoteTable
, NodeId(..)
, Process
, ProcessId
, WhereIsReply(..)
, whereis
, whereisRemoteAsync
, getSelfPid
, register
, reregister
, expect
, nsendRemote
, receiveWait
, match
, processNodeId
, monitorNode
, monitor
, unmonitor
, NodeMonitorNotification(..)
, ProcessRegistrationException
, finally
, newChan
, receiveChan
, nsend
, SendPort
, bracket
, try
, send
)
import qualified Control.Distributed.Process.Node as Node
( LocalNode
, newLocalNode
, runProcess
)
import qualified Network.Socket as N
import qualified Network.Transport.TCP as NT
( createTransport
, defaultTCPParameters
, encodeEndPointAddress
)
import qualified Network.Transport as NT (Transport(..))
import DNA.Logging (eventMessage)
data Backend = Backend {
newLocalNode :: IO Node.LocalNode
, findPeers :: IO [NodeId]
, redirectLogsHere :: [ProcessId] -> Process ()
}
data BackendState = BackendState {
_localNodes :: [Node.LocalNode]
, _peers :: [NodeId]
}
data CAD
| SLURM Int [(String,Int)]
initializeBackend
-> IO Backend
initializeBackend cad host port rtable = do
mTransport <- NT.createTransport host port NT.defaultTCPParameters
let addresses = case cad of
Local ports -> [ NodeId $ NT.encodeEndPointAddress "localhost" theirPort 0
| p <- ports
, let theirPort = show p
, theirPort /= port
]
SLURM basePort nodes -> concat
[ [ NodeId $ NT.encodeEndPointAddress hst (show (basePort+n)) 0
| n <- [0 .. nTasks - 1]
]
| (hst,nTasks) <- nodes
]
backendState <- newMVar BackendState
{ _localNodes = []
, _peers = addresses
}
case mTransport of
Left err -> throw err
Right transport ->
let backend = Backend
{ newLocalNode = apiNewLocalNode transport rtable backendState
, findPeers = apiFindPeers backendState
, redirectLogsHere = apiRedirectLogsHere backend
}
in return backend
apiNewLocalNode :: NT.Transport
-> RemoteTable
-> MVar BackendState
-> IO Node.LocalNode
apiNewLocalNode transport rtable backendState = do
localNode <- Node.newLocalNode transport rtable
modifyMVar_ backendState $ return . (localNodes ^: (localNode :))
return localNode
apiFindPeers :: MVar BackendState
-> IO [NodeId]
apiFindPeers backendState = do
(^. peers) <$> readMVar backendState
apiRedirectLogsHere :: Backend -> [ProcessId] -> Process ()
apiRedirectLogsHere _backend slavecontrollers = do
mLogger <- whereis "logger"
myPid <- getSelfPid
forM_ mLogger $ \logger -> do
bracket
(mapM monitor slavecontrollers)
(mapM unmonitor)
$ \_ -> do
forM_ slavecontrollers $ \pid -> send pid (RedirectLogsTo logger myPid)
replicateM_ (length slavecontrollers) $ do
receiveWait
[ match (\(RedirectLogsReply {}) -> return ())
, match (\(NodeMonitorNotification {}) -> return ())
]
data SlaveControllerMsg
= SlaveTerminate
| RedirectLogsTo ProcessId ProcessId
deriving (Typeable, Show)
instance Binary SlaveControllerMsg where
put SlaveTerminate = putWord8 0
put (RedirectLogsTo a b) = do putWord8 1; put (a,b)
get = do
header <- getWord8
case header of
0 -> return SlaveTerminate
1 -> do (a,b) <- get; return (RedirectLogsTo a b)
_ -> fail "SlaveControllerMsg.get: invalid"
data RedirectLogsReply
= RedirectLogsReply ProcessId Bool
deriving (Typeable, Show)
instance Binary RedirectLogsReply where
put (RedirectLogsReply from ok) = put (from,ok)
get = do
(from,ok) <- get
return (RedirectLogsReply from ok)
startSlave :: Backend -> IO ()
startSlave backend = do
node <- newLocalNode backend
Node.runProcess node slaveController
| The slave controller interprets ' SlaveControllerMsg 's
slaveController :: Process ()
slaveController = do
pid <- getSelfPid
register "slaveController" pid
go
where
go = do
msg <- expect
case msg of
SlaveTerminate -> return ()
RedirectLogsTo loggerPid from -> do
r <- try (reregister "logger" loggerPid)
ok <- case (r :: Either ProcessRegistrationException ()) of
Right _ -> return True
Left _ -> do
s <- try (register "logger" loggerPid)
case (s :: Either ProcessRegistrationException ()) of
Right _ -> return True
Left _ -> return False
pid <- getSelfPid
send from (RedirectLogsReply pid ok)
go
terminateSlave :: NodeId -> Process ()
terminateSlave nid = nsendRemote nid "slaveController" SlaveTerminate
findSlaves
-> Backend
-> Process [ProcessId]
findSlaves nRetry backend = do
nodes <- liftIO $ findPeers backend
bracket
(mapM monitorNode nodes)
(mapM unmonitor)
$ \_ -> do
fire off whereis requests
forM_ nodes $ \nid -> whereisRemoteAsync nid "slaveController"
pids <- catMaybes <$> replicateM (length nodes) (
receiveWait
[ match $ \(WhereIsReply "slaveController" mPid) ->
case mPid of
Nothing -> do eventMessage "No slave controller on remote node!"
return Nothing
Just pid -> return (Just pid)
, match $ \n@(NodeMonitorNotification {}) -> do
eventMessage $ "Remote node is down? : " ++ show n
return Nothing
])
if (length pids /= length nodes) && (nRetry > 0)
then do eventMessage $ "Retrying findSlaves (" ++ show (nRetry-1) ++ ") left"
liftIO $ threadDelay (1000*1000)
findSlaves (nRetry-1) backend
else return pids
terminateAllSlaves :: Backend -> Process ()
terminateAllSlaves backend = do
slaves <- findSlaves 0 backend
forM_ slaves $ \pid -> send pid SlaveTerminate
liftIO $ threadDelay 1000000
Terminates when the specified process terminates . If you want to terminate
startMaster :: Backend -> ([NodeId] -> Process ()) -> IO ()
startMaster backend proc = do
node <- newLocalNode backend
Node.runProcess node $ do
slaves <- findSlaves 2 backend
redirectLogsHere backend slaves
proc (map processNodeId slaves) `finally` shutdownLogger
shutdownLogger :: Process ()
shutdownLogger = do
(sport,rport) <- newChan
nsend "logger" (sport :: SendPort ())
receiveChan rport
localNodes :: Accessor BackendState [Node.LocalNode]
localNodes = accessor _localNodes (\ns st -> st { _localNodes = ns })
peers :: Accessor BackendState [NodeId]
peers = accessor _peers (\ps st -> st { _peers = ps })
|
5cff616c2102f1d3fa8cde797714c012fa7f11769308b3e99e0697bc10928602 | metosin/eines | edn.clj | (ns eines.pack.edn
(:require [clojure.edn :as edn]))
(defn pack ^String [message]
(when message
(pr-str message)))
(defn unpack [^String message]
(when message
(edn/read-string message)))
| null | https://raw.githubusercontent.com/metosin/eines/e293d0a3b29eb18fb20bdf0c234cd898e7b87ac9/modules/eines-server/src/eines/pack/edn.clj | clojure | (ns eines.pack.edn
(:require [clojure.edn :as edn]))
(defn pack ^String [message]
(when message
(pr-str message)))
(defn unpack [^String message]
(when message
(edn/read-string message)))
|
|
8b184132cf09ac8987f5413ea60139cdf2c7cf3c33bf17d5013ee9925ad59d0e | jolby/colors | colors_test.cljc | by
February 4 , 2010
Copyright ( c ) , 2010 . All rights reserved . The use
and distribution terms for this software are covered by the Eclipse
;; Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this
;; distribution. By using this software in any fashion, you are
;; agreeing to be bound by the terms of this license. You must not
;; remove this notice, or any other, from this software.
(ns com.evocomputing.colors-test
#?(:clj (:import (java.awt Color)))
(:require [clojure.string :as s]
[clojure.test :refer (is deftest)]
[com.evocomputing.utils :as utils]
[com.evocomputing.colors :as sut]))
(deftest test-hsl-to-rgb
(is (= [255 0 0] (sut/hsl-to-rgb 360.0,100.0,50.0)))
(is (= [255 0 0] (sut/hsl-to-rgb 0.0,100.0,50.0)))
(is (= [0 255 0] (sut/hsl-to-rgb 120.0,100.0,50.0)))
(is (= [0 0 255] (sut/hsl-to-rgb 240.0,100.0,50.0))))
(deftest test-rgb-to-hsl
(is (= [0.0,100.0,50.0] (sut/rgb-to-hsl 255 0 0)))
(is (= [120.0,100.0,50.0] (sut/rgb-to-hsl 0 255 0)))
(is (= [240.0,100.0,50.0] (sut/rgb-to-hsl 0 0 255))))
(deftest test-create-color-dispatch
(is (= :com.evocomputing.colors/symbolic-color (sut/create-color-dispatch "0x000000")))
(is (= :com.evocomputing.colors/symbolic-color (sut/create-color-dispatch "#000000")))
(is (= :com.evocomputing.colors/rgb-int (sut/create-color-dispatch 0x000000)))
(is (= :com.evocomputing.colors/rgb-array (sut/create-color-dispatch [255 0 0])))
(is (= :com.evocomputing.colors/rgb-array (sut/create-color-dispatch 255 0 0)))
(is (= :com.evocomputing.colors/rgb-array (sut/create-color-dispatch [255 0 0 255])))
(is (= :com.evocomputing.colors/rgb-array (sut/create-color-dispatch 255 0 0 128)))
(is (= :com.evocomputing.colors/rgb-map (sut/create-color-dispatch {:r 255 :g 0 :blue 0 :a 255})))
(is (= :com.evocomputing.colors/rgb-map (sut/create-color-dispatch :r 255 :g 0 :blue 0 :a 255)))
(is (= :com.evocomputing.colors/hsl-map (sut/create-color-dispatch {:h 255 :s 0 :l 0})))
(is (= :com.evocomputing.colors/hsl-map (sut/create-color-dispatch :h 255 :s 0 :l 0)))
#?(:clj (is (= java.awt.Color (sut/create-color-dispatch (Color. 255 0 0)))))
)
(deftest test-create-color
(is (instance? com.evocomputing.colors.color (sut/create-color "#fff")))
(is (instance? com.evocomputing.colors.color (sut/create-color "0x000000")))
(is (instance? com.evocomputing.colors.color (sut/create-color "#000000")))
(is (instance? com.evocomputing.colors.color (sut/create-color 0x000000)))
(is (instance? com.evocomputing.colors.color (sut/create-color [255 0 0])))
(is (instance? com.evocomputing.colors.color (sut/create-color 255 0 0)))
(is (instance? com.evocomputing.colors.color (sut/create-color [255 0 0 255])))
(is (instance? com.evocomputing.colors.color (sut/create-color 255 0 0 128)))
(is (instance? com.evocomputing.colors.color (sut/create-color {:r 255 :g 0 :blue 0 :a 255})))
(is (instance? com.evocomputing.colors.color (sut/create-color :r 255 :g 0 :blue 0 :a 255)))
(is (instance? com.evocomputing.colors.color (sut/create-color {:h 120.0 :s 100.0 :l 50.0})))
(is (instance? com.evocomputing.colors.color (sut/create-color :h 120.0 :s 100.0 :l 50.0)))
(is (instance? com.evocomputing.colors.color (sut/create-color :h 120 :s 100 :l 50)))
(is (pos? (count (with-out-str (print (sut/create-color :h 120 :s 100 :l 50))))))
#?(:clj (is (instance? com.evocomputing.colors.color (sut/create-color (Color. 255 0 0)))))
;; test bad input checking
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color "#badhexstring")))
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color 355 0 0)))
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color 255 0)))
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color :h 120.0 :s 200.0 :l 50.0)))
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color :h 420.0 :s 100.0 :l 500.0)))
)
(deftest test-adjust-alpha
(is (= 192 (sut/alpha (sut/adjust-alpha (sut/create-color 0 0 0 0.50) 0.25))))
)
Make sure that creating a color from RGB values leads to legal
;; saturation and lightness levels. These test values were
;; formerly causing exceptions by yielding saturation or lightness
;; values slightly greater than 100.0.
(deftest test-rgb-color-creation
(sut/adjust-hue (sut/create-color :r 10 :g 255 :b 43) 40)
(sut/adjust-hue (sut/create-color :r 115 :g 255 :b 218) 40)
(sut/adjust-hue (sut/create-color :r 250 :g 255 :b 121) 40))
(deftest test-rgba-int-to-components
(is (= (sut/rgba-int-to-components (sut/hexstring-to-rgba-int "#fff")) [255 255 255 255])))
#?(:clj (defn hsl-rgb-test-pairs []
(let [filestr (slurp (.getPath (.getResource (clojure.lang.RT/baseLoader) "hsl-rgb.txt")))
chunks (s/split filestr #"\n\n")
clean-fn (fn [lines] (filter #(not= "" %) (map #(.trim %) (s/split lines #"\n"))))]
(partition 2 (flatten
(for [chunk chunks]
(let [[hsls rgbs] (s/split chunk #"====")]
(interleave (clean-fn hsls)
(clean-fn rgbs)))))))))
#?(:clj (defn test-hsl-rgb-conversions []
(let [pairs (hsl-rgb-test-pairs)
extract-hsl (fn [hslstr]
(let [hsl (re-find #"hsl\((-?[0-9]+), ([0-9]+)%, ([0-9]+)%\)" hslstr)]
(sut/create-color :h (utils/parse-float (hsl 1))
:s (utils/parse-float (hsl 2))
:l (utils/parse-float (hsl 3)))))
extract-rgb (fn [rgbstr]
(sut/create-color
(vec (map #(utils/parse-int %)
(drop 1
(re-find #"rgb\(([0-9]+), ([0-9]+), ([0-9]+)\)" rgbstr))))))]
(for [pair pairs]
(let [hsl-color (extract-hsl (first pair))
rgb-color (extract-rgb (second pair))
white (= (sut/lightness hsl-color) 100.0)
black (= (sut/lightness hsl-color) 0.0)
grayscale (or white black (= (sut/saturation hsl-color) 0.0))]
(utils/throw-if-not (or grayscale
(sut/within-tolerance? (sut/hue hsl-color) (sut/hue rgb-color)))
"Hues should be equal")
(utils/throw-if-not (or white black
(sut/within-tolerance? (sut/saturation hsl-color) (sut/saturation rgb-color)))
"Saturations should be equal")
(utils/throw-if-not (sut/within-tolerance? (sut/lightness hsl-color) (sut/lightness rgb-color))
"Lightnesses should be equal"))))))
| null | https://raw.githubusercontent.com/jolby/colors/30607e456cb7e80bc5b58f04b59505db92ae728e/test/com/evocomputing/colors_test.cljc | clojure | Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this
distribution. By using this software in any fashion, you are
agreeing to be bound by the terms of this license. You must not
remove this notice, or any other, from this software.
test bad input checking
saturation and lightness levels. These test values were
formerly causing exceptions by yielding saturation or lightness
values slightly greater than 100.0. | by
February 4 , 2010
Copyright ( c ) , 2010 . All rights reserved . The use
and distribution terms for this software are covered by the Eclipse
(ns com.evocomputing.colors-test
#?(:clj (:import (java.awt Color)))
(:require [clojure.string :as s]
[clojure.test :refer (is deftest)]
[com.evocomputing.utils :as utils]
[com.evocomputing.colors :as sut]))
(deftest test-hsl-to-rgb
(is (= [255 0 0] (sut/hsl-to-rgb 360.0,100.0,50.0)))
(is (= [255 0 0] (sut/hsl-to-rgb 0.0,100.0,50.0)))
(is (= [0 255 0] (sut/hsl-to-rgb 120.0,100.0,50.0)))
(is (= [0 0 255] (sut/hsl-to-rgb 240.0,100.0,50.0))))
(deftest test-rgb-to-hsl
(is (= [0.0,100.0,50.0] (sut/rgb-to-hsl 255 0 0)))
(is (= [120.0,100.0,50.0] (sut/rgb-to-hsl 0 255 0)))
(is (= [240.0,100.0,50.0] (sut/rgb-to-hsl 0 0 255))))
(deftest test-create-color-dispatch
(is (= :com.evocomputing.colors/symbolic-color (sut/create-color-dispatch "0x000000")))
(is (= :com.evocomputing.colors/symbolic-color (sut/create-color-dispatch "#000000")))
(is (= :com.evocomputing.colors/rgb-int (sut/create-color-dispatch 0x000000)))
(is (= :com.evocomputing.colors/rgb-array (sut/create-color-dispatch [255 0 0])))
(is (= :com.evocomputing.colors/rgb-array (sut/create-color-dispatch 255 0 0)))
(is (= :com.evocomputing.colors/rgb-array (sut/create-color-dispatch [255 0 0 255])))
(is (= :com.evocomputing.colors/rgb-array (sut/create-color-dispatch 255 0 0 128)))
(is (= :com.evocomputing.colors/rgb-map (sut/create-color-dispatch {:r 255 :g 0 :blue 0 :a 255})))
(is (= :com.evocomputing.colors/rgb-map (sut/create-color-dispatch :r 255 :g 0 :blue 0 :a 255)))
(is (= :com.evocomputing.colors/hsl-map (sut/create-color-dispatch {:h 255 :s 0 :l 0})))
(is (= :com.evocomputing.colors/hsl-map (sut/create-color-dispatch :h 255 :s 0 :l 0)))
#?(:clj (is (= java.awt.Color (sut/create-color-dispatch (Color. 255 0 0)))))
)
(deftest test-create-color
(is (instance? com.evocomputing.colors.color (sut/create-color "#fff")))
(is (instance? com.evocomputing.colors.color (sut/create-color "0x000000")))
(is (instance? com.evocomputing.colors.color (sut/create-color "#000000")))
(is (instance? com.evocomputing.colors.color (sut/create-color 0x000000)))
(is (instance? com.evocomputing.colors.color (sut/create-color [255 0 0])))
(is (instance? com.evocomputing.colors.color (sut/create-color 255 0 0)))
(is (instance? com.evocomputing.colors.color (sut/create-color [255 0 0 255])))
(is (instance? com.evocomputing.colors.color (sut/create-color 255 0 0 128)))
(is (instance? com.evocomputing.colors.color (sut/create-color {:r 255 :g 0 :blue 0 :a 255})))
(is (instance? com.evocomputing.colors.color (sut/create-color :r 255 :g 0 :blue 0 :a 255)))
(is (instance? com.evocomputing.colors.color (sut/create-color {:h 120.0 :s 100.0 :l 50.0})))
(is (instance? com.evocomputing.colors.color (sut/create-color :h 120.0 :s 100.0 :l 50.0)))
(is (instance? com.evocomputing.colors.color (sut/create-color :h 120 :s 100 :l 50)))
(is (pos? (count (with-out-str (print (sut/create-color :h 120 :s 100 :l 50))))))
#?(:clj (is (instance? com.evocomputing.colors.color (sut/create-color (Color. 255 0 0)))))
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color "#badhexstring")))
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color 355 0 0)))
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color 255 0)))
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color :h 120.0 :s 200.0 :l 50.0)))
(is (thrown? #?(:clj Exception, :cljs js/Error) (sut/create-color :h 420.0 :s 100.0 :l 500.0)))
)
(deftest test-adjust-alpha
(is (= 192 (sut/alpha (sut/adjust-alpha (sut/create-color 0 0 0 0.50) 0.25))))
)
Make sure that creating a color from RGB values leads to legal
(deftest test-rgb-color-creation
(sut/adjust-hue (sut/create-color :r 10 :g 255 :b 43) 40)
(sut/adjust-hue (sut/create-color :r 115 :g 255 :b 218) 40)
(sut/adjust-hue (sut/create-color :r 250 :g 255 :b 121) 40))
(deftest test-rgba-int-to-components
(is (= (sut/rgba-int-to-components (sut/hexstring-to-rgba-int "#fff")) [255 255 255 255])))
#?(:clj (defn hsl-rgb-test-pairs []
(let [filestr (slurp (.getPath (.getResource (clojure.lang.RT/baseLoader) "hsl-rgb.txt")))
chunks (s/split filestr #"\n\n")
clean-fn (fn [lines] (filter #(not= "" %) (map #(.trim %) (s/split lines #"\n"))))]
(partition 2 (flatten
(for [chunk chunks]
(let [[hsls rgbs] (s/split chunk #"====")]
(interleave (clean-fn hsls)
(clean-fn rgbs)))))))))
#?(:clj (defn test-hsl-rgb-conversions []
(let [pairs (hsl-rgb-test-pairs)
extract-hsl (fn [hslstr]
(let [hsl (re-find #"hsl\((-?[0-9]+), ([0-9]+)%, ([0-9]+)%\)" hslstr)]
(sut/create-color :h (utils/parse-float (hsl 1))
:s (utils/parse-float (hsl 2))
:l (utils/parse-float (hsl 3)))))
extract-rgb (fn [rgbstr]
(sut/create-color
(vec (map #(utils/parse-int %)
(drop 1
(re-find #"rgb\(([0-9]+), ([0-9]+), ([0-9]+)\)" rgbstr))))))]
(for [pair pairs]
(let [hsl-color (extract-hsl (first pair))
rgb-color (extract-rgb (second pair))
white (= (sut/lightness hsl-color) 100.0)
black (= (sut/lightness hsl-color) 0.0)
grayscale (or white black (= (sut/saturation hsl-color) 0.0))]
(utils/throw-if-not (or grayscale
(sut/within-tolerance? (sut/hue hsl-color) (sut/hue rgb-color)))
"Hues should be equal")
(utils/throw-if-not (or white black
(sut/within-tolerance? (sut/saturation hsl-color) (sut/saturation rgb-color)))
"Saturations should be equal")
(utils/throw-if-not (sut/within-tolerance? (sut/lightness hsl-color) (sut/lightness rgb-color))
"Lightnesses should be equal"))))))
|
8579dcd9b225c200ac2e1569fbb98aeff22b4e1b791234569b5bb55570732c91 | vaibhavsagar/experiments | Update.hs | -----------------------------------------------------------------------------
-- |
-- Module : Network.Hackage.CabalInstall.Update
Copyright : ( c ) 2005
-- License : BSD-like
--
-- Maintainer :
-- Stability : provisional
-- Portability : portable
--
--
-----------------------------------------------------------------------------
module Network.Hackage.CabalInstall.Update
( update
) where
import Network.Hackage.CabalInstall.Types (ConfigFlags (..), OutputGen(..))
import Network.Hackage.CabalInstall.Config (writeKnownPackages)
import Network.Hackage.Client (listPackages)
-- | 'update' downloads the package list from all known servers
update :: ConfigFlags -> IO ()
update cfg
= do pkgs <- flip concatMapM servers
$ \serv -> do gettingPkgList (configOutputGen cfg) serv
listPackages serv
writeKnownPackages cfg pkgs
where servers = configServers cfg
concatMapM f = fmap concat . mapM f
| null | https://raw.githubusercontent.com/vaibhavsagar/experiments/378d7ba97eabfc7bbeaa4116380369ea6612bfeb/hugs/packages/Cabal/Network/Hackage/CabalInstall/Update.hs | haskell | ---------------------------------------------------------------------------
|
Module : Network.Hackage.CabalInstall.Update
License : BSD-like
Maintainer :
Stability : provisional
Portability : portable
---------------------------------------------------------------------------
| 'update' downloads the package list from all known servers | Copyright : ( c ) 2005
module Network.Hackage.CabalInstall.Update
( update
) where
import Network.Hackage.CabalInstall.Types (ConfigFlags (..), OutputGen(..))
import Network.Hackage.CabalInstall.Config (writeKnownPackages)
import Network.Hackage.Client (listPackages)
update :: ConfigFlags -> IO ()
update cfg
= do pkgs <- flip concatMapM servers
$ \serv -> do gettingPkgList (configOutputGen cfg) serv
listPackages serv
writeKnownPackages cfg pkgs
where servers = configServers cfg
concatMapM f = fmap concat . mapM f
|
e1414714e0fe1fd8803dd7f6f09c9f82218a5055fb1f1a8cccc584a636eca386 | janestreet/core_unix | import.ml | open! Core
module Time = Time_float_unix
| null | https://raw.githubusercontent.com/janestreet/core_unix/59d04e163b49c7eeef9d96fccb2403fd49c44505/time_stamp_counter/test/import.ml | ocaml | open! Core
module Time = Time_float_unix
|
|
01198e62a4be96ccc5ac73c370ef27e82935e4ed430ee5c9a1cece68a2e51be9 | ocaml-gospel/cameleer | power.ml | module type Exponentiation = sig
type t
(*@ function one: t *)
@ function ( x y : t ) : t
@ axiom assoc : forall x y ( mul x y ) z = mul x ( mul y z )
@ axiom unit_def_l : forall x : one x = x
@ axiom unit_def_r : forall x : one = x
(*@ function power (t: t) (x: integer) : t *)
@ axiom power_0 : forall x : 0 = one
(*@ axiom power_s : forall x: t, n: integer. n >= 0 ->
power x (n+1) = mul x (power x n) *)
@ lemma power_s_alt : forall x : t , n : int . n > 0 - >
power x n ( power x ( n-1 ) )
power x n = mul x (power x (n-1)) *)
@ lemma power_1 : forall x : 1 = x
@ lemma power_sum : forall x : t , n m : int . 0 < = n - > 0 < = m - >
power x ( ) = mul ( power x n ) ( power x m )
power x (n+m) = mul (power x n) (power x m) *)
(*@ lemma power_mult : forall x:t, n m : int. 0 <= n -> 0 <= m ->
power x (n * m) = power (power x n) m *)
@ lemma power_comm1 : forall x y : y =
forall n : int . 0 < = n - >
( power x n ) y = mul y ( power x n )
forall n:int. 0 <= n ->
mul (power x n) y = mul y (power x n) *)
@ lemma power_comm2 : forall x y : y =
forall n : int . 0 < = n - >
power ( mul x y ) n = mul ( power x n ) ( power y n )
forall n:int. 0 <= n ->
power (mul x y) n = mul (power x n) (power y n) *)
end
module type Power = sig
type t = int
val power : int -> int -> int [@@logic]
@ function one : int = 1
@ function ( x y : t ) : t = x * y
@ axiom assoc : forall x y ( mul x y ) z = mul x ( mul y z )
@ axiom unit_def_l : forall x : one x = x
@ axiom unit_def_r : forall x : one = x
@ axiom power_0 : forall x : 0 = one
(*@ axiom power_s : forall x: t, n: integer. n >= 0 ->
power x (n+1) = mul x (power x n) *)
@ lemma power_s_alt : forall x : t , n : int . n > 0 - >
power x n ( power x ( n-1 ) )
power x n = mul x (power x (n-1)) *)
@ lemma power_1 : forall x : 1 = x
@ lemma power_sum : forall x : t , n m : int . 0 < = n - > 0 < = m - >
power x ( ) = mul ( power x n ) ( power x m )
power x (n+m) = mul (power x n) (power x m) *)
(*@ lemma power_mult : forall x:t, n m : int. 0 <= n -> 0 <= m ->
power x (n * m) = power (power x n) m *)
@ lemma power_comm1 : forall x y : y =
forall n : int . 0 < = n - >
( power x n ) y = mul y ( power x n )
forall n:int. 0 <= n ->
mul (power x n) y = mul y (power x n) *)
@ lemma power_comm2 : forall x y : y =
forall n : int . 0 < = n - >
power ( mul x y ) n = mul ( power x n ) ( power y n )
forall n:int. 0 <= n ->
power (mul x y) n = mul (power x n) (power y n) *)
@ lemma power_non_neg :
forall x y. x > = 0 /\ y > = 0 - > power x y > = 0
forall x y. x >= 0 /\ y >= 0 -> power x y >= 0 *)
@ lemma power_pos :
forall x y. x > 0 /\ y > = 0 - > power x y > 0
forall x y. x > 0 /\ y >= 0 -> power x y > 0 *)
(*@ lemma aux: forall x y z. x > 0 -> 0 <= y <= z ->
x * y <= x * z *)
(*@ lemma power_monotonic:
forall x n m:int. 0 < x /\ 0 <= n <= m -> power x n <= power x m *)
end
| null | https://raw.githubusercontent.com/ocaml-gospel/cameleer/fcf00fe27e0a41125880043aa9aa633399fc8cc2/stdlib/power.ml | ocaml | @ function one: t
@ function power (t: t) (x: integer) : t
@ axiom power_s : forall x: t, n: integer. n >= 0 ->
power x (n+1) = mul x (power x n)
@ lemma power_mult : forall x:t, n m : int. 0 <= n -> 0 <= m ->
power x (n * m) = power (power x n) m
@ axiom power_s : forall x: t, n: integer. n >= 0 ->
power x (n+1) = mul x (power x n)
@ lemma power_mult : forall x:t, n m : int. 0 <= n -> 0 <= m ->
power x (n * m) = power (power x n) m
@ lemma aux: forall x y z. x > 0 -> 0 <= y <= z ->
x * y <= x * z
@ lemma power_monotonic:
forall x n m:int. 0 < x /\ 0 <= n <= m -> power x n <= power x m | module type Exponentiation = sig
type t
@ function ( x y : t ) : t
@ axiom assoc : forall x y ( mul x y ) z = mul x ( mul y z )
@ axiom unit_def_l : forall x : one x = x
@ axiom unit_def_r : forall x : one = x
@ axiom power_0 : forall x : 0 = one
@ lemma power_s_alt : forall x : t , n : int . n > 0 - >
power x n ( power x ( n-1 ) )
power x n = mul x (power x (n-1)) *)
@ lemma power_1 : forall x : 1 = x
@ lemma power_sum : forall x : t , n m : int . 0 < = n - > 0 < = m - >
power x ( ) = mul ( power x n ) ( power x m )
power x (n+m) = mul (power x n) (power x m) *)
@ lemma power_comm1 : forall x y : y =
forall n : int . 0 < = n - >
( power x n ) y = mul y ( power x n )
forall n:int. 0 <= n ->
mul (power x n) y = mul y (power x n) *)
@ lemma power_comm2 : forall x y : y =
forall n : int . 0 < = n - >
power ( mul x y ) n = mul ( power x n ) ( power y n )
forall n:int. 0 <= n ->
power (mul x y) n = mul (power x n) (power y n) *)
end
module type Power = sig
type t = int
val power : int -> int -> int [@@logic]
@ function one : int = 1
@ function ( x y : t ) : t = x * y
@ axiom assoc : forall x y ( mul x y ) z = mul x ( mul y z )
@ axiom unit_def_l : forall x : one x = x
@ axiom unit_def_r : forall x : one = x
@ axiom power_0 : forall x : 0 = one
@ lemma power_s_alt : forall x : t , n : int . n > 0 - >
power x n ( power x ( n-1 ) )
power x n = mul x (power x (n-1)) *)
@ lemma power_1 : forall x : 1 = x
@ lemma power_sum : forall x : t , n m : int . 0 < = n - > 0 < = m - >
power x ( ) = mul ( power x n ) ( power x m )
power x (n+m) = mul (power x n) (power x m) *)
@ lemma power_comm1 : forall x y : y =
forall n : int . 0 < = n - >
( power x n ) y = mul y ( power x n )
forall n:int. 0 <= n ->
mul (power x n) y = mul y (power x n) *)
@ lemma power_comm2 : forall x y : y =
forall n : int . 0 < = n - >
power ( mul x y ) n = mul ( power x n ) ( power y n )
forall n:int. 0 <= n ->
power (mul x y) n = mul (power x n) (power y n) *)
@ lemma power_non_neg :
forall x y. x > = 0 /\ y > = 0 - > power x y > = 0
forall x y. x >= 0 /\ y >= 0 -> power x y >= 0 *)
@ lemma power_pos :
forall x y. x > 0 /\ y > = 0 - > power x y > 0
forall x y. x > 0 /\ y >= 0 -> power x y > 0 *)
end
|
8d9493166f6ff3c8b21e75e9489828bddccbdcdacfa6467e21704882df09f073 | ferd/dispcount | dispcount_serv.erl | %% In charge of relaying info about the supervisor when called.
-module(dispcount_serv).
-behaviour(gen_server).
-include("state.hrl").
-export([start_link/4, wait_for_dispatch/2, get_info/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
%%%%%%%%%%%%%%%%%
%%% INTERFACE %%%
%%%%%%%%%%%%%%%%%
-spec start_link(Parent::pid(), Name::atom(), {module(),[term()]}, [term(),...]) -> {ok, pid()}.
start_link(Parent, Name, {M,A}, Opts) ->
gen_server:start_link(?MODULE, {Parent, Name, {M,A}, Opts}, []).
-spec wait_for_dispatch(Name::atom(), infinity | pos_integer()) -> ok.
wait_for_dispatch(Name, Timeout) ->
gen_server:call(get_name(Name), wait_for_tables, Timeout).
-spec get_info(Name::atom()) -> {ok, #config{}}.
get_info(Name) ->
gen_server:call(get_name(Name), get_info).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%
GEN_SERVER CALLBACKS % % %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init({Parent, Name, {M,A}, Opts}) ->
%% This one needs to go fast because we're gonna mess up the synchronous
%% starts of servers for the sake of the pool. For this reason, we'll
%% temporarily use this process to receive all requests and just forward
%% them when the time has come, maybe.
ConfTmp = init_tables(Opts),
Conf = ConfTmp#config{dispatch_name=Name,
num_watchers=proplists:get_value(resources,Opts,10),
dispatch_mechanism=proplists:get_value(dispatch_mechanism,Opts,hash)},
SupSpec =
{{simple_one_for_one, proplists:get_value(maxr, Opts, 1), proplists:get_value(maxt, Opts, 60)},
[{watchers,
{dispcount_watcher, start_link, [Conf,{M,A}]},
proplists:get_value(restart,Opts,permanent),
proplists:get_value(shutdown,Opts,5000),
worker,
[dispcount_watcher,M]}]}, % <- check to make sure this can survive relups
ChildSpec = {watchers_sup, {watchers_sup, start_link, [SupSpec]},
permanent, infinity, supervisor, [watchers_sup]},
self() ! continue_init,
register(get_name(Name), self()),
{ok, {Parent, ChildSpec, Conf}}.
handle_call(get_info, _From, S = #config{}) ->
{reply, {ok, S}, S};
handle_call(wait_for_tables, _From, S = #config{watcher_type=atomics,num_watchers=N, dispatch_table=Atomics}) ->
case atomics:get(Atomics, 1) == N of
true ->
{reply, ok, S};
false ->
timer:sleep(1),
handle_call(wait_for_tables, _From, S)
end;
handle_call(wait_for_tables, _From, S = #config{num_watchers=N, dispatch_table=Tid}) ->
there should be N + 2 entries in the dispatch table
case ets:info(Tid, size) of
X when X =:= N+2 ->
{reply, ok, S};
_ ->
timer:sleep(1),
handle_call(wait_for_tables, _From, S)
end;
handle_call(_Call, _From, State) ->
{noreply, State}.
handle_cast(_Cast, State) ->
{noreply, State}.
handle_info(continue_init, {Parent, ChildSpec, Conf}) ->
Sup = case supervisor:start_child(Parent, ChildSpec) of
{error, {already_started, OldPid}} ->
%% get rid of old supervisor with stale references in it
ok = supervisor:terminate_child(Parent, watchers_sup),
ok = supervisor:delete_child(Parent, watchers_sup),
Ref = erlang:monitor(process, OldPid),
receive {'DOWN', Ref, process, OldPid, _} ->
ok
end,
{ok, S} = supervisor:start_child(Parent, ChildSpec),
S;
{ok, S} ->
S
end,
ok = start_watchers(Sup, Conf),
{noreply, Conf};
handle_info(_Info, State) ->
{noreply, State}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
terminate(_Reason, _State) ->
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%
%%% PRIVATE & HELPERS %%%
%%%%%%%%%%%%%%%%%%%%%%%%%
init_tables(Opts) ->
case proplists:get_value(watcher_type, Opts, ets) of
ets -> %% here
Dispatch = ets:new(dispatch_table, [set, public, {write_concurrency,true}]),
Worker = ets:new(worker_table, [set, public, {read_concurrency,true}]),
true = ets:insert(Dispatch, {ct,0}),
true = ets:insert(Dispatch, {round_robin,0}),
#config{watcher_type = ets,
dispatch_table = Dispatch,
worker_table = Worker};
named -> %% here
Dispatch = ets:new(dispatch_table, [set, public, {write_concurrency,true}]),
true = ets:insert(Dispatch, {ct,0}),
true = ets:insert(Dispatch, {round_robin,0}),
#config{watcher_type = named,
dispatch_table = Dispatch,
worker_table = undefined};
atomics ->
allocate 2 extra atomics for the ct and round_robin
Atomics = atomics:new(proplists:get_value(resources,Opts,10) + 2, [{signed, false}]),
Worker = ets:new(worker_table, [set, public, {read_concurrency,true}]),
#config{watcher_type = atomics,
dispatch_table = Atomics,
worker_table = Worker};
Other ->
erlang:error({bad_option,{watcher_type,Other}})
end.
start_watchers(Sup, #config{num_watchers=Num}) ->
[start_watcher(Sup, Id) || Id <- lists:seq(1,Num)],
ok.
start_watcher(Sup, Id) ->
{ok, _} = supervisor:start_child(Sup, [Id]).
get_name(Name) ->
list_to_atom(atom_to_list(Name) ++ "_serv").
| null | https://raw.githubusercontent.com/ferd/dispcount/4befc7445b4a8c5f616c0dadfec76569a1f4452b/src/dispcount_serv.erl | erlang | In charge of relaying info about the supervisor when called.
INTERFACE %%%
% %
This one needs to go fast because we're gonna mess up the synchronous
starts of servers for the sake of the pool. For this reason, we'll
temporarily use this process to receive all requests and just forward
them when the time has come, maybe.
<- check to make sure this can survive relups
get rid of old supervisor with stale references in it
PRIVATE & HELPERS %%%
here
here | -module(dispcount_serv).
-behaviour(gen_server).
-include("state.hrl").
-export([start_link/4, wait_for_dispatch/2, get_info/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-spec start_link(Parent::pid(), Name::atom(), {module(),[term()]}, [term(),...]) -> {ok, pid()}.
start_link(Parent, Name, {M,A}, Opts) ->
gen_server:start_link(?MODULE, {Parent, Name, {M,A}, Opts}, []).
-spec wait_for_dispatch(Name::atom(), infinity | pos_integer()) -> ok.
wait_for_dispatch(Name, Timeout) ->
gen_server:call(get_name(Name), wait_for_tables, Timeout).
-spec get_info(Name::atom()) -> {ok, #config{}}.
get_info(Name) ->
gen_server:call(get_name(Name), get_info).
init({Parent, Name, {M,A}, Opts}) ->
ConfTmp = init_tables(Opts),
Conf = ConfTmp#config{dispatch_name=Name,
num_watchers=proplists:get_value(resources,Opts,10),
dispatch_mechanism=proplists:get_value(dispatch_mechanism,Opts,hash)},
SupSpec =
{{simple_one_for_one, proplists:get_value(maxr, Opts, 1), proplists:get_value(maxt, Opts, 60)},
[{watchers,
{dispcount_watcher, start_link, [Conf,{M,A}]},
proplists:get_value(restart,Opts,permanent),
proplists:get_value(shutdown,Opts,5000),
worker,
ChildSpec = {watchers_sup, {watchers_sup, start_link, [SupSpec]},
permanent, infinity, supervisor, [watchers_sup]},
self() ! continue_init,
register(get_name(Name), self()),
{ok, {Parent, ChildSpec, Conf}}.
handle_call(get_info, _From, S = #config{}) ->
{reply, {ok, S}, S};
handle_call(wait_for_tables, _From, S = #config{watcher_type=atomics,num_watchers=N, dispatch_table=Atomics}) ->
case atomics:get(Atomics, 1) == N of
true ->
{reply, ok, S};
false ->
timer:sleep(1),
handle_call(wait_for_tables, _From, S)
end;
handle_call(wait_for_tables, _From, S = #config{num_watchers=N, dispatch_table=Tid}) ->
there should be N + 2 entries in the dispatch table
case ets:info(Tid, size) of
X when X =:= N+2 ->
{reply, ok, S};
_ ->
timer:sleep(1),
handle_call(wait_for_tables, _From, S)
end;
handle_call(_Call, _From, State) ->
{noreply, State}.
handle_cast(_Cast, State) ->
{noreply, State}.
handle_info(continue_init, {Parent, ChildSpec, Conf}) ->
Sup = case supervisor:start_child(Parent, ChildSpec) of
{error, {already_started, OldPid}} ->
ok = supervisor:terminate_child(Parent, watchers_sup),
ok = supervisor:delete_child(Parent, watchers_sup),
Ref = erlang:monitor(process, OldPid),
receive {'DOWN', Ref, process, OldPid, _} ->
ok
end,
{ok, S} = supervisor:start_child(Parent, ChildSpec),
S;
{ok, S} ->
S
end,
ok = start_watchers(Sup, Conf),
{noreply, Conf};
handle_info(_Info, State) ->
{noreply, State}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
terminate(_Reason, _State) ->
ok.
init_tables(Opts) ->
case proplists:get_value(watcher_type, Opts, ets) of
Dispatch = ets:new(dispatch_table, [set, public, {write_concurrency,true}]),
Worker = ets:new(worker_table, [set, public, {read_concurrency,true}]),
true = ets:insert(Dispatch, {ct,0}),
true = ets:insert(Dispatch, {round_robin,0}),
#config{watcher_type = ets,
dispatch_table = Dispatch,
worker_table = Worker};
Dispatch = ets:new(dispatch_table, [set, public, {write_concurrency,true}]),
true = ets:insert(Dispatch, {ct,0}),
true = ets:insert(Dispatch, {round_robin,0}),
#config{watcher_type = named,
dispatch_table = Dispatch,
worker_table = undefined};
atomics ->
allocate 2 extra atomics for the ct and round_robin
Atomics = atomics:new(proplists:get_value(resources,Opts,10) + 2, [{signed, false}]),
Worker = ets:new(worker_table, [set, public, {read_concurrency,true}]),
#config{watcher_type = atomics,
dispatch_table = Atomics,
worker_table = Worker};
Other ->
erlang:error({bad_option,{watcher_type,Other}})
end.
start_watchers(Sup, #config{num_watchers=Num}) ->
[start_watcher(Sup, Id) || Id <- lists:seq(1,Num)],
ok.
start_watcher(Sup, Id) ->
{ok, _} = supervisor:start_child(Sup, [Id]).
get_name(Name) ->
list_to_atom(atom_to_list(Name) ++ "_serv").
|
0736e7577e564dcb505042e0b1708a1ab5da343f529d27cda26d55ac890f2848 | returntocorp/ocaml-tree-sitter-core | Protect_ident.mli |
Maintain a 1:1 map of translations from ' src ' to ' dst ' , ensuring ' dst '
is not reserved or already taken by a different translation .
Maintain a 1:1 map of translations from 'src' to 'dst', ensuring 'dst'
is not reserved or already taken by a different translation.
*)
(* Translation map. *)
type t
Initialize a translation map . ' reserved_dst ' specifies forbidden ' dst '
elements . It guarantees that no ' src ' string will be translated to
to one of these reserved elements .
Additionally , any ' src ' in the ' reserved_dst ' list is set to be translated
with a single underscore appended .
Example :
let map = create ~reserved_dst:["let " ] ( ) in
assert ( translate map " let " = " let _ " )
Initialize a translation map. 'reserved_dst' specifies forbidden 'dst'
elements. It guarantees that no 'src' string will be translated to
to one of these reserved elements.
Additionally, any 'src' in the 'reserved_dst' list is set to be translated
with a single underscore appended.
Example:
let map = create ~reserved_dst:["let"] () in
assert (translate map "let" = "let_")
*)
val create : ?reserved_dst:string list -> unit -> t
(* Translate a string 'src' to a string 'dst', ensuring that
'dst' is as close as possible to 'preferred_dst' and that
nothing else already translates to that 'dst'.
'preferred_dst' defaults to 'src'.
This translation is remembered, with the consequence that calling this
function later with the same arguments is guaranteed to return the same
result.
*)
val add_translation : ?preferred_dst:string -> t -> string -> string
(* Check for an existing translation. *)
val translate : t -> string -> string option
(* Lowercase identifiers that are keywords in OCaml. *)
val ocaml_keywords : string list
(* Lowercase identifiers that are built-in type names in OCaml. *)
val ocaml_builtin_types : string list
(* Union of ocaml_keywords and ocaml_builtin_types *)
val ocaml_reserved : string list
| null | https://raw.githubusercontent.com/returntocorp/ocaml-tree-sitter-core/66fbeabb8c3fec69a30a2e7f3eec41bc2d112d40/src/gen/lib/Protect_ident.mli | ocaml | Translation map.
Translate a string 'src' to a string 'dst', ensuring that
'dst' is as close as possible to 'preferred_dst' and that
nothing else already translates to that 'dst'.
'preferred_dst' defaults to 'src'.
This translation is remembered, with the consequence that calling this
function later with the same arguments is guaranteed to return the same
result.
Check for an existing translation.
Lowercase identifiers that are keywords in OCaml.
Lowercase identifiers that are built-in type names in OCaml.
Union of ocaml_keywords and ocaml_builtin_types |
Maintain a 1:1 map of translations from ' src ' to ' dst ' , ensuring ' dst '
is not reserved or already taken by a different translation .
Maintain a 1:1 map of translations from 'src' to 'dst', ensuring 'dst'
is not reserved or already taken by a different translation.
*)
type t
Initialize a translation map . ' reserved_dst ' specifies forbidden ' dst '
elements . It guarantees that no ' src ' string will be translated to
to one of these reserved elements .
Additionally , any ' src ' in the ' reserved_dst ' list is set to be translated
with a single underscore appended .
Example :
let map = create ~reserved_dst:["let " ] ( ) in
assert ( translate map " let " = " let _ " )
Initialize a translation map. 'reserved_dst' specifies forbidden 'dst'
elements. It guarantees that no 'src' string will be translated to
to one of these reserved elements.
Additionally, any 'src' in the 'reserved_dst' list is set to be translated
with a single underscore appended.
Example:
let map = create ~reserved_dst:["let"] () in
assert (translate map "let" = "let_")
*)
val create : ?reserved_dst:string list -> unit -> t
val add_translation : ?preferred_dst:string -> t -> string -> string
val translate : t -> string -> string option
val ocaml_keywords : string list
val ocaml_builtin_types : string list
val ocaml_reserved : string list
|
e860832ca76e0620540374ba2338d307a9213c64815391043fdd19454a1560f5 | Hans-Halverson/myte | mir_builders.ml | open Basic_collections
open Mir
open Mir_builtin
open Mir_type
(*
* ============================
* Values
* ============================
*)
let uninit_value : Value.value = Value.Lit (Bool true)
let mk_value (value : Value.value) : Value.t = { id = mk_value_id (); value; uses = None }
let mk_uninit_value () : Value.t = { id = mk_value_id (); value = uninit_value; uses = None }
(*
* ============================
* Literals
* ============================
*)
let mk_bool_lit (b : bool) : Value.t = mk_value (Lit (Bool b))
let mk_byte_lit (n : Int8.t) : Value.t = mk_value (Lit (Byte n))
let mk_int_lit (n : int) : Value.t = mk_value (Lit (Int (Int32.of_int n)))
let mk_int_lit_of_int32 (n : Int32.t) : Value.t = mk_value (Lit (Int n))
let mk_long_lit (n : Int64.t) : Value.t = mk_value (Lit (Long n))
let mk_double_lit (n : Float.t) : Value.t = mk_value (Lit (Double n))
let mk_null_ptr_lit (type_ : Type.t) : Value.t = mk_value (Lit (NullPointer type_))
let mk_array_string_lit (string : string) : Value.t = mk_value (Lit (ArrayString string))
let rec mk_array_vtable_lit (funcs : Function.t list) : Value.t =
let value = mk_uninit_value () in
let size = List.length funcs in
let func_uses = List.map (fun func -> user_add_use ~user:value ~use:func.Function.value) funcs in
value.value <- Lit (ArrayVtable (size, func_uses));
value
and mk_aggregate_closure (ty : Type.t) (func : Function.t) : Value.t =
let value = mk_uninit_value () in
let func_use = user_add_use ~user:value ~use:func.value in
value.value <- Lit (AggregateClosure (ty, func_use));
value
(*
* ============================
* Instruction Constructors
* ============================
*)
(* Set a value to contain an instruction, without attaching to a block *)
and mk_blockless_instr f =
let value = mk_uninit_value () in
f ~value;
value
(* Set a value to contain an instruction, and append to the end of a block *)
and mk_block_instr ~block f =
let value = mk_uninit_value () in
f ~value;
append_instruction block value;
value
(* Set a value to contain an instruction *)
and set_instr ~(value : Value.t) ~(type_ : Type.t) ~(instr : Instruction.instr) : unit =
let instruction = { Instruction.type_; instr; prev = value; next = value; block = null_block } in
value.value <- Instr instruction
(* Set a value to contain an instruction and appends it the end of a block *)
and mk_instr ~(value : Value.t) ~(block : Block.t) ~(type_ : Type.t) ~(instr : Instruction.instr) :
Value.t =
set_instr ~value ~type_ ~instr;
append_instruction block value;
value
and set_phi_instr ~(value : Value.t) ~(type_ : Type.t) ~(args : Value.t BlockMap.t) : unit =
let args = BlockMap.map (fun arg -> user_add_use ~user:value ~use:arg) args in
set_instr ~value ~type_ ~instr:(Phi { args })
and mk_blockless_phi ~(type_ : Type.t) ~(args : Value.t BlockMap.t) : Value.t =
mk_blockless_instr (set_phi_instr ~type_ ~args)
and set_mov_instr ~(value : Value.t) ~(arg : Value.t) : unit =
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_:(type_of_value arg) ~instr:(Mov arg_use)
and mk_blockless_mov ~(arg : Value.t) : Value.t = mk_blockless_instr (set_mov_instr ~arg)
and set_stack_alloc_instr ~(value : Value.t) ~(type_ : Type.t) : unit =
set_instr ~value ~type_:(Pointer type_) ~instr:(StackAlloc type_) |> ignore
and mk_blockless_stack_alloc ~(type_ : Type.t) : Value.t =
mk_blockless_instr (set_stack_alloc_instr ~type_)
and mk_stack_alloc ~(block : Block.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_stack_alloc_instr ~type_)
and set_load_instr ~(value : Value.t) ~(ptr : Value.t) : unit =
match type_of_value ptr with
| Pointer type_ ->
let ptr_use = user_add_use ~user:value ~use:ptr in
set_instr ~value ~type_ ~instr:(Load ptr_use)
| _ -> failwith "Load argument must be a pointer type"
and mk_load ~(block : Block.t) ~(ptr : Value.t) : Value.t =
mk_block_instr ~block (set_load_instr ~ptr)
and set_store_instr ~(instr_value : Value.t) ~(ptr : Value.t) ~(stored_value : Value.t) : unit =
if not (types_equal (pointer_value_element_type ptr) (type_of_value stored_value)) then
failwith "Stored pointer and value types do not match";
let ptr_use = user_add_use ~user:instr_value ~use:ptr in
let value_use = user_add_use ~user:instr_value ~use:stored_value in
ignore (set_instr ~value:instr_value ~type_:no_return_type ~instr:(Store (ptr_use, value_use)))
and mk_store_ ~(block : Block.t) ~(ptr : Value.t) ~(value : Value.t) : unit =
mk_block_instr ~block (fun ~value:instr_value ->
set_store_instr ~instr_value ~ptr ~stored_value:value)
|> ignore
and set_get_pointer_instr
~(value : Value.t)
?(pointer_offset : Value.t option = None)
~(type_ : Type.t)
~(ptr : Value.t)
~(offsets : Instruction.GetPointer.value_offset list)
() : unit =
if not (is_pointer_value ptr) then failwith "GetPointer argument must be a pointer type";
let ptr_use = user_add_use ~user:value ~use:ptr in
let pointer_offset_use =
Option.map (fun offset -> user_add_use ~user:value ~use:offset) pointer_offset
in
let use_offsets =
List.map
(fun offset ->
match offset with
| Instruction.GetPointer.PointerIndex arg ->
Instruction.GetPointer.PointerIndex (user_add_use ~user:value ~use:arg)
| FieldIndex index -> FieldIndex index)
offsets
in
set_instr
~value
~type_:(Pointer type_)
~instr:
(GetPointer { pointer = ptr_use; pointer_offset = pointer_offset_use; offsets = use_offsets })
and mk_get_pointer_instr
~(block : Block.t)
?(pointer_offset : Value.t option = None)
~(type_ : Type.t)
~(ptr : Value.t)
~(offsets : Instruction.GetPointer.value_offset list)
() : Value.t =
mk_block_instr ~block (set_get_pointer_instr ~pointer_offset ~type_ ~ptr ~offsets ())
and set_call_instr
~(value : Value.t) ~(func : Value.t) ~(args : Value.t list) ~(return : Type.t option) : unit =
if not (is_function_value func) then failwith "Call function argument must have function type";
let func_use = user_add_use ~user:value ~use:func in
let arg_uses = List.map (fun arg -> user_add_use ~user:value ~use:arg) args in
let (type_, has_return) =
match return with
| Some type_ -> (type_, true)
| None -> (no_return_type, false)
in
set_instr ~value ~type_ ~instr:(Call { func = Value func_use; args = arg_uses; has_return })
and mk_call ~(block : Block.t) ~(func : Value.t) ~(args : Value.t list) ~(return : Type.t option) :
Value.t =
mk_block_instr ~block (set_call_instr ~func ~args ~return)
and mk_call_ ~block ~func ~args ~return : unit = ignore (mk_call ~block ~func ~args ~return)
and set_call_builtin_instr
~(value : Value.t) ~(builtin : Builtin.t) ~(args : Value.t list) ~(return : Type.t option) :
unit =
let arg_uses = List.map (fun arg -> user_add_use ~user:value ~use:arg) args in
let (type_, has_return) =
match return with
| None -> (no_return_type, false)
| Some type_ -> (type_, true)
in
set_instr ~value ~type_ ~instr:(Call { func = MirBuiltin builtin; args = arg_uses; has_return })
and mk_builtin_return_ty (builtin : Builtin.t) (args : Type.t list) : Type.t option =
if builtin.name = myte_alloc.name then
Some (Pointer (List.hd args))
else
builtin.return_type
and mk_blockless_call_builtin
(builtin : Builtin.t) (args : Value.t list) (mk_return_ty_args : Type.t list) : Value.t =
let return = mk_builtin_return_ty builtin mk_return_ty_args in
mk_blockless_instr (set_call_builtin_instr ~builtin ~args ~return)
and mk_call_builtin
~(block : Block.t) (builtin : Builtin.t) (args : Value.t list) (mk_return_ty_args : Type.t list)
: Value.t =
let return = mk_builtin_return_ty builtin mk_return_ty_args in
mk_block_instr ~block (set_call_builtin_instr ~builtin ~args ~return)
and mk_call_builtin_no_return_ ~(block : Block.t) (builtin : Builtin.t) (args : Value.t list) =
mk_block_instr ~block (set_call_builtin_instr ~builtin ~args ~return:None) |> ignore
and set_ret_instr ~(value : Value.t) ~(arg : Value.t option) : unit =
let arg_use = Option.map (fun arg -> user_add_use ~user:value ~use:arg) arg in
set_instr ~value ~type_:no_return_type ~instr:(Ret arg_use)
and mk_ret_ ~(block : Block.t) ~(arg : Value.t option) : unit =
mk_block_instr ~block (set_ret_instr ~arg) |> ignore
and set_unary_instr ~(value : Value.t) ~(op : Instruction.unary_operation) ~(arg : Value.t) : unit =
if not (is_numeric_value arg) then failwith "Unary argument must be numeric value";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_:(type_of_value arg) ~instr:(Unary (op, arg_use))
and mk_unary ~(block : Block.t) ~(op : Instruction.unary_operation) ~(arg : Value.t) : Value.t =
mk_block_instr ~block (set_unary_instr ~op ~arg)
and set_binary_instr
~(value : Value.t) ~(op : Instruction.binary_operation) ~(left : Value.t) ~(right : Value.t) :
unit =
let is_shift_op = is_shift_op op in
if is_shift_op && not (is_integer_value left && is_integer_value right) then
failwith "Shift arguments must be integers"
else if
(not is_shift_op)
&& not (is_numeric_value left && is_numeric_value right && values_have_same_type left right)
then
failwith "Binary arguments must be numeric and have the same type";
let left_use = user_add_use ~user:value ~use:left in
let right_use = user_add_use ~user:value ~use:right in
set_instr ~value ~type_:(type_of_value left) ~instr:(Binary (op, left_use, right_use))
and mk_binary
~(block : Block.t) ~(op : Instruction.binary_operation) ~(left : Value.t) ~(right : Value.t) :
Value.t =
mk_block_instr ~block (set_binary_instr ~op ~left ~right)
and set_cmp_instr
~(value : Value.t) ~(cmp : Instruction.comparison) ~(left : Value.t) ~(right : Value.t) : unit =
if not (is_comparable_value left && is_comparable_value right && values_have_same_type left right)
then
failwith "Cmp arguments must be numeric or pointers and have the same type";
let left_use = user_add_use ~user:value ~use:left in
let right_use = user_add_use ~user:value ~use:right in
set_instr ~value ~type_:Bool ~instr:(Cmp (cmp, left_use, right_use))
and mk_blockless_cmp ~(cmp : Instruction.comparison) ~(left : Value.t) ~(right : Value.t) : Value.t
=
mk_blockless_instr (set_cmp_instr ~cmp ~left ~right)
and mk_cmp ~(block : Block.t) ~(cmp : Instruction.comparison) ~(left : Value.t) ~(right : Value.t) :
Value.t =
mk_block_instr ~block (set_cmp_instr ~cmp ~left ~right)
and set_cast_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
if not (is_pointer_value arg && is_pointer_type type_) then
failwith "Cast arguments must be pointers";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(Cast arg_use)
and mk_cast ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_cast_instr ~arg ~type_)
and set_trunc_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if
not
(is_integer_type arg_type
&& is_integer_type type_
&& size_of_type arg_type >= size_of_type type_)
then
failwith
"Trunc arguments must be inters with type argument having smaller size than value argument";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(Trunc arg_use)
and mk_blockless_trunc ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_blockless_instr (set_trunc_instr ~arg ~type_)
and mk_trunc ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_trunc_instr ~arg ~type_)
and set_sext_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if
not
(is_integer_type arg_type
&& is_integer_type type_
&& size_of_type arg_type <= size_of_type type_)
then
failwith
"SExt arguments must be integers with type argument having larger size than value argument";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(SExt arg_use)
and mk_blockless_sext ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_blockless_instr (set_sext_instr ~arg ~type_)
and mk_sext ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_sext_instr ~arg ~type_)
and set_zext_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if
not
(is_integer_type arg_type
&& is_integer_type type_
&& size_of_type arg_type <= size_of_type type_)
then
failwith
"ZExt arguments must be integers with type argument having larger size than value argument";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(ZExt arg_use)
and mk_blockless_zext ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_blockless_instr (set_zext_instr ~arg ~type_)
and mk_zext ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_zext_instr ~arg ~type_)
and set_int_to_float_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if not (is_integer_type arg_type && is_float_type type_) then
failwith "IntToFloat must have integer argument converted to float type";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(IntToFloat arg_use)
and mk_int_to_float ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_int_to_float_instr ~arg ~type_)
and set_float_to_int_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if not (is_float_type arg_type && is_integer_type type_) then
failwith "IntToFloat must have float argument converted to integer type";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(FloatToInt arg_use)
and mk_float_to_int ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_float_to_int_instr ~arg ~type_)
and set_unreachable_instr ~(value : Value.t) : unit =
set_instr ~value ~type_:no_return_type ~instr:Unreachable
and mk_unreachable_ ~(block : Block.t) : unit =
mk_block_instr ~block set_unreachable_instr |> ignore
and set_continue_instr ~(value : Value.t) ~(continue : Block.t) : unit =
set_instr ~value ~type_:no_return_type ~instr:(Continue continue)
and mk_continue_ ~(block : Block.t) ~(continue : Block.t) : unit =
mk_block_instr ~block (set_continue_instr ~continue) |> ignore
and set_branch_instr ~(value : Value.t) ~(test : Value.t) ~(continue : Block.t) ~(jump : Block.t) :
unit =
let test_use = user_add_use ~user:value ~use:test in
set_instr ~value ~type_:no_return_type ~instr:(Branch { test = test_use; continue; jump })
and mk_branch_ ~(block : Block.t) ~(test : Value.t) ~(continue : Block.t) ~(jump : Block.t) : unit =
mk_block_instr ~block (set_branch_instr ~test ~continue ~jump) |> ignore
(*
* ============================
* Globals
* ============================
*)
and mk_global
~(name : label)
~(loc : Loc.t)
~(type_ : Type.t)
~(init_val : Value.t option)
~(is_constant : bool) : Global.t =
let value = mk_uninit_value () in
let init_val_use = Option.map (fun init_val -> user_add_use ~user:value ~use:init_val) init_val in
let global = { Global.loc; name; type_; init_val = init_val_use; is_constant; value } in
value.value <- Lit (Global global);
global
and global_set_init ~(global : Global.t) ~(init : Value.t option) =
(match global.init_val with
| None -> ()
| Some old_init_use -> remove_use old_init_use);
let init_use = Option.map (fun init -> user_add_use ~user:global.value ~use:init) init in
global.init_val <- init_use
(*
* ============================
* Functions
* ============================
*)
and mk_function ~(name : label) : Function.t =
let value = mk_uninit_value () in
let func =
{
Function.name;
loc = Loc.none;
params = [];
return_type = None;
start_block = null_block;
blocks = BlockSet.empty;
value;
}
in
value.value <- Lit (Function func);
func
and mk_argument ~(func : Function.t) ~(decl_loc : Loc.t) ~(type_ : Type.t) : Value.t =
let argument = { Argument.type_; func; decl_loc } in
mk_value (Argument argument)
and func_iter_blocks (func : Function.t) (f : Block.t -> unit) = BlockSet.iter f func.blocks
(*
* ============================
* Uses
* ============================
*)
and add_use_link (u1 : Use.t) (u2 : Use.t) =
u1.next <- u2;
u2.prev <- u1
and add_value_use ~(value : Value.t) ~(use : Use.t) =
match value.uses with
| None ->
value.uses <- Some use;
add_use_link use use
| Some first_use ->
add_use_link use first_use.next;
add_use_link first_use use
and user_add_use ~(user : Value.t) ~(use : Value.t) =
let rec use_node = { Use.value = use; prev = use_node; next = use_node; user } in
add_value_use ~value:use ~use:use_node;
use_node
and remove_use (use : Use.t) =
let value = use.value in
if use.next == use then
value.uses <- None
else
let prev = use.prev in
let next = use.next in
add_use_link prev next;
match value.uses with
| Some first_use when first_use == use -> value.uses <- Some use.next
| _ -> ()
and value_has_uses (value : Value.t) : bool = value.uses <> None
and value_has_single_use (value : Value.t) : bool =
match value.uses with
| Some first_use when first_use.next == first_use -> true
| _ -> false
and is_single_use (use : Use.t) : bool = use.next == use
and value_iter_uses ~(value : Value.t) (f : Use.t -> unit) =
match value.uses with
| None -> ()
| Some first_use ->
let rec iter current_use =
let next_use = current_use.Use.next in
f current_use;
if next_use != first_use then iter next_use
in
iter first_use
and value_get_uses ~(value : Value.t) : Use.t list =
match value.uses with
| None -> []
| Some first_use ->
let rec gather current_use acc =
let next_use = current_use.Use.next in
let acc = current_use :: acc in
if next_use != first_use then
gather next_use acc
else
acc
in
gather first_use []
(* Replace all uses of a value with another value. Uses are modified in place and all use links
are updated appropriately. *)
and value_replace_uses ~(from : Value.t) ~(to_ : Value.t) =
value_iter_uses ~value:from (fun use ->
(* Change the value for this use in-place and attach to use list of new value *)
use.value <- to_;
add_value_use ~value:to_ ~use);
from.uses <- None
(*
* ============================
* Instructions
* ============================
*)
and instruction_iter_operands ~(instr : Instruction.t) (f : Use.t -> unit) =
match instr.instr with
| StackAlloc _
| Continue _
| Unreachable ->
()
| Load operand
| Unary (_, operand)
| Cast operand
| Trunc operand
| SExt operand
| ZExt operand
| IntToFloat operand
| FloatToInt operand
| Mov operand
| Branch { test = operand; _ } ->
f operand
| Store (operand1, operand2)
| Binary (_, operand1, operand2)
| Cmp (_, operand1, operand2) ->
f operand1;
f operand2
| Ret operand_opt -> Option.iter f operand_opt
| Phi { args } -> BlockMap.iter (fun _ use -> f use) args
| Call { func; args; _ } ->
(match func with
| Value value -> f value
| MirBuiltin _ -> ());
List.iter f args
| GetPointer { pointer; pointer_offset; offsets } ->
f pointer;
Option.iter f pointer_offset;
List.iter
(fun offset ->
match offset with
| Instruction.GetPointer.PointerIndex operand -> f operand
| FieldIndex _ -> ())
offsets
(*
* ============================
* Blocks
* ============================
*)
and mk_block ~(func : Function.t) : Block.t =
let block =
{ Block.id = Block.mk_id (); func; instructions = None; prev_blocks = BlockSet.empty }
in
func.blocks <- BlockSet.add block func.blocks;
block
(*
* ============================
* Block Instructions
* ============================
*)
and has_single_instruction (block : Block.t) : bool =
match block.instructions with
| Some { first; last } when first == last -> true
| _ -> false
and add_instr_link (instr_val1 : Value.t) (instr_val2 : Value.t) =
let instr1 = cast_to_instruction instr_val1 in
let instr2 = cast_to_instruction instr_val2 in
instr1.next <- instr_val2;
instr2.prev <- instr_val1
(* Prepend an instruction to the beginning of a block's instruction list *)
and prepend_instruction (block : Block.t) (instr_val : Value.t) =
let instr = cast_to_instruction instr_val in
instr.block <- block;
match block.instructions with
| None -> block.instructions <- Some { first = instr_val; last = instr_val }
| Some ({ first; last } as list) ->
add_instr_link instr_val first;
add_instr_link last instr_val;
list.first <- instr_val
(* Append an instruction to the end of a block's instruction list *)
and append_instruction (block : Block.t) (instr_val : Value.t) =
let instr = cast_to_instruction instr_val in
instr.block <- block;
match block.instructions with
| None -> block.instructions <- Some { first = instr_val; last = instr_val }
| Some ({ first; last } as list) ->
add_instr_link last instr_val;
add_instr_link instr_val first;
list.last <- instr_val
(* Insert an instruction immediately before another instruction in a block's instruction list *)
and insert_instruction_before ~(before : Value.t) (instr_val : Value.t) =
let before_instr = cast_to_instruction before in
let instr = cast_to_instruction instr_val in
let block = before_instr.block in
instr.block <- block;
match block.instructions with
| None -> failwith "Block must have before instruction"
| Some list ->
let prev_instr = before_instr.prev in
add_instr_link prev_instr instr_val;
add_instr_link instr_val before;
if list.first == before then list.first <- instr_val
(* Remove an instruction. This removes the instruction from a block's instruction list, and
removes the uses that correspond to each operand of this instruction. *)
and remove_instruction (instr_val : Value.t) =
let instr = cast_to_instruction instr_val in
let block = instr.block in
instruction_iter_operands ~instr remove_use;
value_iter_uses ~value:instr_val (fun use ->
(* A phi can appear in its own arguments, in which case it is already being removed *)
if instr_val != use.value then
match use.value.value with
| Instr { instr = Phi phi; _ } ->
phi_filter_args ~phi (fun _ arg_use -> arg_use.Use.value != instr_val)
| _ -> ());
(* Instruction list is circular, so check if single element list *)
if instr.next == instr_val then
block.instructions <- None
else
let prev = instr.prev in
let next = instr.next in
add_instr_link prev next;
let list = Option.get block.instructions in
if list.first == instr_val then list.first <- next;
if list.last == instr_val then list.last <- prev
Concatenate the instructions in the second block to the end of the first block .
This is a destructive operation on the second block 's instructions . Removes the first block 's
terminator instruction .
This is a destructive operation on the second block's instructions. Removes the first block's
terminator instruction. *)
and concat_instructions (b1 : Block.t) (b2 : Block.t) =
Remove terminator from first block
(match get_terminator_value b1 with
| Some terminator -> remove_instruction terminator
| None -> ());
Concatenate lists of instructions
iter_instructions b2 (fun _ instr -> instr.Instruction.block <- b1);
match (b1.instructions, b2.instructions) with
| (_, None) -> ()
| (None, (Some _ as instrs)) -> b1.instructions <- instrs
| (Some ({ first = first1; last = last1 } as list), Some { first = first2; last = last2 }) ->
add_instr_link last1 first2;
add_instr_link last2 first1;
list.last <- last2
Split a block after an instruction into two separate blocks with no continue between them .
Return a tuple of the ( first block , second block ) .
Return a tuple of the (first block, second block). *)
and split_block_after_instruction (instr_value : Value.t) : Block.t * Block.t =
let instr = cast_to_instruction instr_value in
let first_block = instr.block in
let second_block = mk_block ~func:first_block.func in
let { Block.first = first_block_first; last = first_block_last } =
Option.get first_block.instructions
in
Next blocks now have the second block as their prev block
BlockSet.iter
(fun next_block ->
remove_block_link first_block next_block;
add_block_link second_block next_block;
map_phi_backreferences_for_block
~block:next_block
~from:first_block
~to_:(BlockSet.singleton second_block))
(get_next_blocks first_block);
If instruction is at end of block then first block is unchanged and second block is empty
if instr_value == first_block_last then
(first_block, second_block)
else
let instr_next = instr.next in
Create circular linked lists of instructions for first and second blocks
add_instr_link instr_value first_block_first;
first_block.instructions <- Some { first = first_block_first; last = instr_value };
add_instr_link first_block_last instr_next;
second_block.instructions <- Some { first = instr_next; last = first_block_last };
iter_instructions second_block (fun _ instr -> instr.block <- second_block);
(first_block, second_block)
(* Replace an instruction with another value, removing the instruction and replacing all its
uses with the other value. *)
and replace_instruction ~(from : Value.t) ~(to_ : Value.t) =
value_replace_uses ~from ~to_;
remove_instruction from
and iter_instructions (block : Block.t) (f : Value.t -> Instruction.t -> unit) =
match block.instructions with
| None -> ()
| Some { first; last } ->
let rec iter current_val last_val f =
(* Save next in case instruction is modified *)
let current = cast_to_instruction current_val in
let next = current.next in
f current_val current;
if current_val != last_val then iter next last_val f
in
iter first last f
Return the first instruction that matches the predicate , if such an instruction exists
and find_instruction (block : Block.t) (f : Value.t -> Instruction.t -> bool) : Value.t option =
match block.instructions with
| None -> None
| Some { first; last } ->
let rec iter current_val last_val f =
let current = cast_to_instruction current_val in
if f current_val current then
Some current_val
else if current_val == last_val then
None
else
iter current.next last_val f
in
iter first last f
and filter_instructions (block : Block.t) (f : Instruction.t -> bool) =
iter_instructions block (fun instr_val instr ->
if not (f instr) then remove_instruction instr_val)
and fold_instructions : 'a. Block.t -> 'a -> (Value.t -> Instruction.t -> 'a -> 'a) -> 'a =
fun block acc f ->
match block.instructions with
| None -> acc
| Some { first; last } ->
let rec fold current_val last_val f acc =
let current = cast_to_instruction current_val in
let acc' = f current_val current acc in
if current_val == last_val then
acc'
else
fold current.Instruction.next last_val f acc'
in
fold first last f acc
(*
* ============================
* Block Phis
* ============================
*)
and block_has_phis (block : Block.t) : bool =
match block.instructions with
| Some { first = { value = Instr { instr = Phi _; _ }; _ }; _ } -> true
| _ -> false
and block_get_phis (block : Block.t) : Instruction.Phi.t list =
fold_instructions block [] (fun _ instr acc ->
match instr with
| { instr = Phi phi; _ } -> phi :: acc
| _ -> acc)
and block_iter_phis (block : Block.t) (f : Value.t -> Instruction.Phi.t -> unit) =
iter_instructions block (fun instr_val instr ->
match instr with
| { instr = Phi phi; _ } -> f instr_val phi
| _ -> ())
and block_filter_phis (block : Block.t) (f : Value.id -> Instruction.Phi.t -> bool) =
iter_instructions block (fun instr_val instr ->
match instr with
| { instr = Phi phi; _ } -> if not (f instr_val.id phi) then remove_instruction instr_val
| _ -> ())
and block_fold_phis (block : Block.t) (acc : 'a) (f : Value.t -> Instruction.Phi.t -> 'a -> 'a) : 'a
=
fold_instructions block acc (fun instr_val instr acc ->
match instr with
| { instr = Phi phi; _ } -> f instr_val phi acc
| _ -> acc)
and block_clear_phis (block : Block.t) = block_filter_phis block (fun _ _ -> false)
and phi_add_arg
~(phi_val : Value.t) ~(phi : Instruction.Phi.t) ~(block : Block.t) ~(value : Value.t) =
let use = user_add_use ~user:phi_val ~use:value in
phi.args <- BlockMap.add block use phi.args
and phi_remove_arg ~(phi : Instruction.Phi.t) ~(block : Block.t) =
match BlockMap.find_opt block phi.args with
| None -> ()
| Some use ->
remove_use use;
phi.args <- BlockMap.remove block phi.args
and phi_filter_args ~(phi : Instruction.Phi.t) (f : Block.t -> Use.t -> bool) =
phi.args <-
BlockMap.filter
(fun block use ->
let keep = f block use in
if not keep then remove_use use;
keep)
phi.args
(* If all phi args have the same value, return that value. Otherwise return None. *)
and phi_get_single_arg_value_with_mapper ~(map_value : Value.t -> Value.t) (phi : Instruction.Phi.t)
: Value.t option =
match BlockMap.choose_opt phi.args with
| None -> None
| Some (_, first_use) ->
let first_arg = map_value first_use.value in
let has_single_arg_value =
BlockMap.for_all
(fun _ arg_use ->
let arg = map_value arg_use.Use.value in
values_equal arg first_arg)
phi.args
in
if has_single_arg_value then
Some first_arg
else
None
and phi_get_single_arg_value (phi : Instruction.Phi.t) : Value.t option =
phi_get_single_arg_value_with_mapper ~map_value:Function_utils.id phi
(*
* ============================
* Block Graph
* ============================
*)
and add_block_link (prev_block : Block.t) (next_block : Block.t) =
next_block.prev_blocks <- BlockSet.add prev_block next_block.prev_blocks
and remove_block_link (prev_block : Block.t) (next_block : Block.t) =
next_block.prev_blocks <- BlockSet.remove prev_block next_block.prev_blocks
(* Return the set of all blocks that this block branches to *)
and get_next_blocks (block : Block.t) : BlockSet.t =
match get_terminator block with
| Some { instr = Continue continue; _ } -> BlockSet.singleton continue
| Some { instr = Branch { test = _; jump; continue }; _ } ->
BlockSet.add jump (BlockSet.singleton continue)
| _ -> BlockSet.empty
and iter_next_blocks (block : Block.t) (f : Block.t -> unit) =
match get_terminator block with
| Some { instr = Continue continue; _ } -> f continue
| Some { instr = Branch { test = _; jump; continue }; _ } ->
f continue;
f jump
| _ -> ()
* = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* Block Graph Mutation
* = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* ============================
* Block Graph Mutation
* ============================
*)
and block_remove_if_empty (block : Block.t) =
if can_remove_empty_block block then remove_block block
(* An empty block can be removed only if it continues to a single block, and is not needed by any
phi nodes in its succeeding block. *)
and can_remove_empty_block (block : Block.t) =
has_single_instruction block
&&
match get_terminator block with
| Some { instr = Continue continue_block; _ } ->
A block is needed if any of its previous blocks appear in a phi node of the next block , with
a different value than the value from this block . If we were to remove this block , the value
from its branch would be lost in the phi node . The start block can never be removed .
a different value than the value from this block. If we were to remove this block, the value
from its branch would be lost in the phi node. The start block can never be removed. *)
let is_start_block = block.func.start_block == block in
let continue_block_phis = block_get_phis continue_block in
let block_needed_for_phi =
(continue_block_phis <> [] && is_start_block)
|| List.exists
(fun { Instruction.Phi.args; _ } ->
BlockMap.exists
(fun prev_block prev_block_arg ->
if BlockSet.mem prev_block block.prev_blocks then
not (values_equal prev_block_arg.Use.value (BlockMap.find block args).value)
else
false)
args)
continue_block_phis
in
(not block_needed_for_phi) && not is_start_block
| _ -> false
and block_is_unreachable (block : Block.t) =
BlockSet.is_empty block.prev_blocks && block.func.start_block != block
and remove_unreachable_blocks_from_init ?(on_removed_block : Block.t -> unit = ignore) init_func =
let worklist = ref BlockSet.empty in
let removed = ref BlockSet.empty in
let remove_if_unreachable block =
if (not (BlockSet.mem block !removed)) && block_is_unreachable block then (
iter_next_blocks block (fun next_block ->
if not (BlockSet.mem next_block !removed) then
worklist := BlockSet.add next_block !worklist);
on_removed_block block;
remove_block block;
removed := BlockSet.add block !removed
)
in
init_func remove_if_unreachable;
while not (BlockSet.is_empty !worklist) do
let block = BlockSet.choose !worklist in
remove_if_unreachable block;
worklist := BlockSet.remove block !worklist
done
and remove_unreachable_blocks_from_root
?(on_removed_block : Block.t -> unit = ignore) (root_block : Block.t) =
remove_unreachable_blocks_from_init ~on_removed_block (fun remove_if_unreachable ->
remove_if_unreachable root_block)
and remove_unreachable_blocks_in_func (func : Function.t) =
remove_unreachable_blocks_from_init (fun remove_if_unreachable ->
func_iter_blocks func remove_if_unreachable)
and remove_block (block : Block.t) =
Remove block from function . This may be the first block in the function . If so , update the
function to point to the next block as the start .
function to point to the next block as the start. *)
let func = block.func in
func.blocks <- BlockSet.remove block func.blocks;
(match get_terminator block with
| Some { instr = Continue continue_block; _ } ->
if func.start_block == block then func.start_block <- continue_block
| _ -> ());
(match get_terminator block with
(* Only when removing unreachable blocks from branch pruning, which could include return block.
Remove any instances of block from previous blocks. *)
| Some { instr = Unreachable; _ } ->
BlockSet.iter
(fun prev_block ->
match get_terminator prev_block with
| Some ({ instr = Continue _; _ } as term_instr) -> term_instr.instr <- Unreachable
| Some ({ instr = Branch { test; continue; jump }; _ } as term_instr) ->
term_instr.instr <-
(if continue == block then
if jump == block then
Unreachable
else
Continue jump
else
Continue continue);
remove_use test
| _ -> failwith "Previous block must have branching terminator")
block.prev_blocks
| Some { instr = Continue next_block; _ } ->
(* Update phis in next block to reference previous blocks instead of removed block *)
map_phi_backreferences_for_block ~block:next_block ~from:block ~to_:block.prev_blocks;
(* Rewrite next of previous blocks to point to next block instead of removed block *)
BlockSet.iter
(fun prev_block -> map_next_block prev_block ~from:block ~to_:next_block)
block.prev_blocks
| _ -> ());
(* Remove links between this block and its next blocks and their phis *)
BlockSet.iter
(fun next_block ->
remove_phi_backreferences_for_block ~block:next_block ~to_remove:block;
remove_block_link block next_block)
(get_next_blocks block);
(* Remove all operand uses in instructions in the block *)
iter_instructions block (fun _ instr -> instruction_iter_operands ~instr remove_use)
(* Merge adjacent blocks b1 and b2. Must only be called if b1 and b2 can be merged, meaning
b1 only continues to b2 and b2 has no other previous blocks. *)
and merge_adjacent_blocks block1 block2 =
let open Block in
let map_block block =
if block == block2 then
block1
else
block
in
(* Use b2's next, but take care to reference b1 instead of b2 in the case of self references *)
(match get_terminator block2 with
| Some ({ instr = Continue continue; _ } as term_instr) ->
term_instr.instr <- Continue (map_block continue)
| Some ({ instr = Branch { test; continue; jump }; _ } as term_instr) ->
term_instr.instr <- Branch { test; continue = map_block continue; jump = map_block jump }
| _ -> ());
concat_instructions block1 block2;
References to the b2 block in phi nodes of blocks that succeed b2 should be rewritten
to now reference b1 instead .
to now reference b1 instead. *)
let next_blocks = get_next_blocks block2 in
BlockSet.iter
(fun next_block ->
map_phi_backreferences_for_block
~block:next_block
~from:block2
~to_:(BlockSet.singleton block1))
next_blocks;
(* Set prev pointers for blocks that succeed b2 to point to b1 instead *)
BlockSet.iter
(fun next_block ->
remove_block_link block2 next_block;
add_block_link block1 next_block)
next_blocks;
remove_block_link block1 block2;
(* Remove b2 from remaining maps in context *)
let func = block2.func in
func.blocks <- BlockSet.remove block2 func.blocks
and prune_branch (to_keep : bool) (block : Block.t) ~(on_removed_block : Block.t -> unit) =
match get_terminator block with
| Some ({ instr = Branch { test; continue; jump }; _ } as terminator_instr) ->
let (to_continue, to_prune) =
if to_keep then
(continue, jump)
else
(jump, continue)
in
(* Remove block link and set to continue to unpruned block *)
remove_block_link block to_prune;
remove_phi_backreferences_for_block ~block:to_prune ~to_remove:block;
terminator_instr.instr <- Continue to_continue;
remove_use test;
(* Pruning a branch may cause other to become unreachable *)
remove_unreachable_blocks_from_root ~on_removed_block to_prune
| _ -> failwith "Expected branch terminator"
Split an edge between two blocks , inserting an empty block in the middle
and split_block_edge (prev_block : Block.t) (next_block : Block.t) : Block.t =
let func = prev_block.func in
let new_block =
{
Block.id = Block.mk_id ();
func;
instructions = None;
prev_blocks = BlockSet.singleton prev_block;
}
in
mk_continue_ ~block:new_block ~continue:next_block;
add_block_link new_block next_block;
func.blocks <- BlockSet.add new_block func.blocks;
map_next_block prev_block ~from:next_block ~to_:new_block;
map_phi_backreferences_for_block
~block:next_block
~from:prev_block
~to_:(BlockSet.singleton new_block);
new_block
Map block 's next block from a block to another block . Do not update any phi references .
and map_next_block (block : Block.t) ~(from : Block.t) ~(to_ : Block.t) =
let map_next_block maybe_from =
if maybe_from == from then (
remove_block_link block from;
add_block_link block to_;
to_
) else
maybe_from
in
match get_terminator block with
| Some ({ instr = Continue continue; _ } as term_instr) ->
term_instr.instr <- Continue (map_next_block continue)
| Some ({ instr = Branch { test; jump; continue }; _ } as term_instr) ->
let new_continue = map_next_block continue in
let new_jump = map_next_block jump in
(* If both branches point to same block convert to continue *)
term_instr.instr <-
(if new_continue == new_jump then
Continue new_continue
else
Branch { test; continue = new_continue; jump = new_jump })
| _ -> ()
Remove all references to a block from phi nodes of on of its next blocks .
This may be needed when removing a block or block link .
This may be needed when removing a block or block link. *)
and remove_phi_backreferences_for_block ~(block : Block.t) ~(to_remove : Block.t) =
block_iter_phis block (fun _ phi ->
phi_filter_args ~phi (fun prev_block _ -> prev_block != to_remove))
Replace all references to old_block_id in the phis of a block with new_block_ids . Note that there
may be multiple new_block_ids , so a single phi argument may be expanded to multiple arguments .
This may be needed when editing the program .
may be multiple new_block_ids, so a single phi argument may be expanded to multiple arguments.
This may be needed when editing the program. *)
and map_phi_backreferences_for_block ~(block : Block.t) ~(from : Block.t) ~(to_ : BlockSet.t) =
block_iter_phis block (fun phi_val phi ->
match BlockMap.find_opt from phi.args with
| None -> ()
| Some use ->
phi_remove_arg ~phi ~block:from;
BlockSet.iter
(fun to_block -> phi_add_arg ~phi_val ~phi ~block:to_block ~value:use.value)
to_)
(*
* ============================
* Program
* ============================
*)
and program_iter_funcs (program : Program.t) (f : Function.t -> unit) =
SMap.iter (fun _ func -> f func) program.funcs
and program_iter_blocks (program : Program.t) (f : Block.t -> unit) =
program_iter_funcs program (fun func -> func_iter_blocks func f)
and program_remove_func ~(program : Program.t) ~(func : Function.t) =
program.funcs <- SMap.remove func.name program.funcs
(*
* ============================
* Validation
* ============================
*)
and assert_valid_program (program : Program.t) =
SMap.iter (fun _ func -> assert_valid_function func) program.funcs
and assert_valid_function (func : Function.t) =
assert_valid_function_cfg func;
func_iter_blocks func (fun block ->
assert_valid_instruction_list block;
iter_instructions block (fun instr_value _ -> assert_valid_use_list ~value:instr_value))
and assert_valid_function_cfg (func : Function.t) =
Create multimap of all previous blocks by visiting CFG
let prev_blocks = ref BlockMMap.empty in
func_iter_blocks func (fun block ->
BlockSet.iter
(fun next_block -> prev_blocks := BlockMMap.add next_block block !prev_blocks)
(get_next_blocks block));
func_iter_blocks func (fun block ->
Check that prev blocks for each block matches the true CFG
let prev_blocks_1 = block.prev_blocks in
let prev_blocks_2 = BlockMMap.find_all block !prev_blocks in
let is_subset_1 =
BlockSet.for_all (fun block -> BlockSet.mem block prev_blocks_2) prev_blocks_1
in
let is_subset_2 =
BlockSet.for_all (fun block -> BlockSet.mem block prev_blocks_1) prev_blocks_2
in
if (not is_subset_1) || not is_subset_2 then
failwith "Previous blocks do not match structure of cfg\n";
(* Check that each phi contains entries for all previous blocks *)
block_iter_phis block (fun _ phi ->
let phi_prev_blocks =
BlockMap.fold (fun block _ acc -> BlockSet.add block acc) phi.args BlockSet.empty
in
if not (BlockSet.equal prev_blocks_1 phi_prev_blocks) then
failwith
(Printf.sprintf
"Phi does not have arguments for all previous blocks for block %s in func %s\n"
(Block.id_to_string block.id)
func.name)))
(* Utility function to check if a use list has a valid structure *)
and assert_valid_use_list ~(value : Value.t) =
match value.uses with
| None -> ()
| Some first_use ->
let rec iter current_use last_use =
let next_use = current_use.Use.next in
if next_use.prev != current_use then failwith "Link is not bidirectional";
if current_use.value != value then failwith "Use does not have correct value";
if next_use != last_use then iter next_use last_use
in
iter first_use first_use
(* Utility function to check if an instruction list has a valid structure *)
and assert_valid_instruction_list (block : Block.t) =
match block.instructions with
| None -> ()
| Some { first = first_val; last = last_val } ->
let first = cast_to_instruction first_val in
let last = cast_to_instruction last_val in
if first.prev != last_val || last.next != first_val then
failwith
(Printf.sprintf
"List must be circular %B %B"
(first.prev != last_val)
(last.next != first_val));
let rec iter current_val last_val =
let current = cast_to_instruction current_val in
let current_next = cast_to_instruction current.next in
if current.block != block then failwith "Instruction does not have correct block";
if current_next.prev != current_val then failwith "Link is not bidirectional";
if current.next != last_val then iter current.next last_val
in
iter first_val last_val
| null | https://raw.githubusercontent.com/Hans-Halverson/myte/05cb60b0ed2d0ea42d43fc80ac6d25e2085d624e/src/mir/mir_builders.ml | ocaml |
* ============================
* Values
* ============================
* ============================
* Literals
* ============================
* ============================
* Instruction Constructors
* ============================
Set a value to contain an instruction, without attaching to a block
Set a value to contain an instruction, and append to the end of a block
Set a value to contain an instruction
Set a value to contain an instruction and appends it the end of a block
* ============================
* Globals
* ============================
* ============================
* Functions
* ============================
* ============================
* Uses
* ============================
Replace all uses of a value with another value. Uses are modified in place and all use links
are updated appropriately.
Change the value for this use in-place and attach to use list of new value
* ============================
* Instructions
* ============================
* ============================
* Blocks
* ============================
* ============================
* Block Instructions
* ============================
Prepend an instruction to the beginning of a block's instruction list
Append an instruction to the end of a block's instruction list
Insert an instruction immediately before another instruction in a block's instruction list
Remove an instruction. This removes the instruction from a block's instruction list, and
removes the uses that correspond to each operand of this instruction.
A phi can appear in its own arguments, in which case it is already being removed
Instruction list is circular, so check if single element list
Replace an instruction with another value, removing the instruction and replacing all its
uses with the other value.
Save next in case instruction is modified
* ============================
* Block Phis
* ============================
If all phi args have the same value, return that value. Otherwise return None.
* ============================
* Block Graph
* ============================
Return the set of all blocks that this block branches to
An empty block can be removed only if it continues to a single block, and is not needed by any
phi nodes in its succeeding block.
Only when removing unreachable blocks from branch pruning, which could include return block.
Remove any instances of block from previous blocks.
Update phis in next block to reference previous blocks instead of removed block
Rewrite next of previous blocks to point to next block instead of removed block
Remove links between this block and its next blocks and their phis
Remove all operand uses in instructions in the block
Merge adjacent blocks b1 and b2. Must only be called if b1 and b2 can be merged, meaning
b1 only continues to b2 and b2 has no other previous blocks.
Use b2's next, but take care to reference b1 instead of b2 in the case of self references
Set prev pointers for blocks that succeed b2 to point to b1 instead
Remove b2 from remaining maps in context
Remove block link and set to continue to unpruned block
Pruning a branch may cause other to become unreachable
If both branches point to same block convert to continue
* ============================
* Program
* ============================
* ============================
* Validation
* ============================
Check that each phi contains entries for all previous blocks
Utility function to check if a use list has a valid structure
Utility function to check if an instruction list has a valid structure | open Basic_collections
open Mir
open Mir_builtin
open Mir_type
let uninit_value : Value.value = Value.Lit (Bool true)
let mk_value (value : Value.value) : Value.t = { id = mk_value_id (); value; uses = None }
let mk_uninit_value () : Value.t = { id = mk_value_id (); value = uninit_value; uses = None }
let mk_bool_lit (b : bool) : Value.t = mk_value (Lit (Bool b))
let mk_byte_lit (n : Int8.t) : Value.t = mk_value (Lit (Byte n))
let mk_int_lit (n : int) : Value.t = mk_value (Lit (Int (Int32.of_int n)))
let mk_int_lit_of_int32 (n : Int32.t) : Value.t = mk_value (Lit (Int n))
let mk_long_lit (n : Int64.t) : Value.t = mk_value (Lit (Long n))
let mk_double_lit (n : Float.t) : Value.t = mk_value (Lit (Double n))
let mk_null_ptr_lit (type_ : Type.t) : Value.t = mk_value (Lit (NullPointer type_))
let mk_array_string_lit (string : string) : Value.t = mk_value (Lit (ArrayString string))
let rec mk_array_vtable_lit (funcs : Function.t list) : Value.t =
let value = mk_uninit_value () in
let size = List.length funcs in
let func_uses = List.map (fun func -> user_add_use ~user:value ~use:func.Function.value) funcs in
value.value <- Lit (ArrayVtable (size, func_uses));
value
and mk_aggregate_closure (ty : Type.t) (func : Function.t) : Value.t =
let value = mk_uninit_value () in
let func_use = user_add_use ~user:value ~use:func.value in
value.value <- Lit (AggregateClosure (ty, func_use));
value
and mk_blockless_instr f =
let value = mk_uninit_value () in
f ~value;
value
and mk_block_instr ~block f =
let value = mk_uninit_value () in
f ~value;
append_instruction block value;
value
and set_instr ~(value : Value.t) ~(type_ : Type.t) ~(instr : Instruction.instr) : unit =
let instruction = { Instruction.type_; instr; prev = value; next = value; block = null_block } in
value.value <- Instr instruction
and mk_instr ~(value : Value.t) ~(block : Block.t) ~(type_ : Type.t) ~(instr : Instruction.instr) :
Value.t =
set_instr ~value ~type_ ~instr;
append_instruction block value;
value
and set_phi_instr ~(value : Value.t) ~(type_ : Type.t) ~(args : Value.t BlockMap.t) : unit =
let args = BlockMap.map (fun arg -> user_add_use ~user:value ~use:arg) args in
set_instr ~value ~type_ ~instr:(Phi { args })
and mk_blockless_phi ~(type_ : Type.t) ~(args : Value.t BlockMap.t) : Value.t =
mk_blockless_instr (set_phi_instr ~type_ ~args)
and set_mov_instr ~(value : Value.t) ~(arg : Value.t) : unit =
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_:(type_of_value arg) ~instr:(Mov arg_use)
and mk_blockless_mov ~(arg : Value.t) : Value.t = mk_blockless_instr (set_mov_instr ~arg)
and set_stack_alloc_instr ~(value : Value.t) ~(type_ : Type.t) : unit =
set_instr ~value ~type_:(Pointer type_) ~instr:(StackAlloc type_) |> ignore
and mk_blockless_stack_alloc ~(type_ : Type.t) : Value.t =
mk_blockless_instr (set_stack_alloc_instr ~type_)
and mk_stack_alloc ~(block : Block.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_stack_alloc_instr ~type_)
and set_load_instr ~(value : Value.t) ~(ptr : Value.t) : unit =
match type_of_value ptr with
| Pointer type_ ->
let ptr_use = user_add_use ~user:value ~use:ptr in
set_instr ~value ~type_ ~instr:(Load ptr_use)
| _ -> failwith "Load argument must be a pointer type"
and mk_load ~(block : Block.t) ~(ptr : Value.t) : Value.t =
mk_block_instr ~block (set_load_instr ~ptr)
and set_store_instr ~(instr_value : Value.t) ~(ptr : Value.t) ~(stored_value : Value.t) : unit =
if not (types_equal (pointer_value_element_type ptr) (type_of_value stored_value)) then
failwith "Stored pointer and value types do not match";
let ptr_use = user_add_use ~user:instr_value ~use:ptr in
let value_use = user_add_use ~user:instr_value ~use:stored_value in
ignore (set_instr ~value:instr_value ~type_:no_return_type ~instr:(Store (ptr_use, value_use)))
and mk_store_ ~(block : Block.t) ~(ptr : Value.t) ~(value : Value.t) : unit =
mk_block_instr ~block (fun ~value:instr_value ->
set_store_instr ~instr_value ~ptr ~stored_value:value)
|> ignore
and set_get_pointer_instr
~(value : Value.t)
?(pointer_offset : Value.t option = None)
~(type_ : Type.t)
~(ptr : Value.t)
~(offsets : Instruction.GetPointer.value_offset list)
() : unit =
if not (is_pointer_value ptr) then failwith "GetPointer argument must be a pointer type";
let ptr_use = user_add_use ~user:value ~use:ptr in
let pointer_offset_use =
Option.map (fun offset -> user_add_use ~user:value ~use:offset) pointer_offset
in
let use_offsets =
List.map
(fun offset ->
match offset with
| Instruction.GetPointer.PointerIndex arg ->
Instruction.GetPointer.PointerIndex (user_add_use ~user:value ~use:arg)
| FieldIndex index -> FieldIndex index)
offsets
in
set_instr
~value
~type_:(Pointer type_)
~instr:
(GetPointer { pointer = ptr_use; pointer_offset = pointer_offset_use; offsets = use_offsets })
and mk_get_pointer_instr
~(block : Block.t)
?(pointer_offset : Value.t option = None)
~(type_ : Type.t)
~(ptr : Value.t)
~(offsets : Instruction.GetPointer.value_offset list)
() : Value.t =
mk_block_instr ~block (set_get_pointer_instr ~pointer_offset ~type_ ~ptr ~offsets ())
and set_call_instr
~(value : Value.t) ~(func : Value.t) ~(args : Value.t list) ~(return : Type.t option) : unit =
if not (is_function_value func) then failwith "Call function argument must have function type";
let func_use = user_add_use ~user:value ~use:func in
let arg_uses = List.map (fun arg -> user_add_use ~user:value ~use:arg) args in
let (type_, has_return) =
match return with
| Some type_ -> (type_, true)
| None -> (no_return_type, false)
in
set_instr ~value ~type_ ~instr:(Call { func = Value func_use; args = arg_uses; has_return })
and mk_call ~(block : Block.t) ~(func : Value.t) ~(args : Value.t list) ~(return : Type.t option) :
Value.t =
mk_block_instr ~block (set_call_instr ~func ~args ~return)
and mk_call_ ~block ~func ~args ~return : unit = ignore (mk_call ~block ~func ~args ~return)
and set_call_builtin_instr
~(value : Value.t) ~(builtin : Builtin.t) ~(args : Value.t list) ~(return : Type.t option) :
unit =
let arg_uses = List.map (fun arg -> user_add_use ~user:value ~use:arg) args in
let (type_, has_return) =
match return with
| None -> (no_return_type, false)
| Some type_ -> (type_, true)
in
set_instr ~value ~type_ ~instr:(Call { func = MirBuiltin builtin; args = arg_uses; has_return })
and mk_builtin_return_ty (builtin : Builtin.t) (args : Type.t list) : Type.t option =
if builtin.name = myte_alloc.name then
Some (Pointer (List.hd args))
else
builtin.return_type
and mk_blockless_call_builtin
(builtin : Builtin.t) (args : Value.t list) (mk_return_ty_args : Type.t list) : Value.t =
let return = mk_builtin_return_ty builtin mk_return_ty_args in
mk_blockless_instr (set_call_builtin_instr ~builtin ~args ~return)
and mk_call_builtin
~(block : Block.t) (builtin : Builtin.t) (args : Value.t list) (mk_return_ty_args : Type.t list)
: Value.t =
let return = mk_builtin_return_ty builtin mk_return_ty_args in
mk_block_instr ~block (set_call_builtin_instr ~builtin ~args ~return)
and mk_call_builtin_no_return_ ~(block : Block.t) (builtin : Builtin.t) (args : Value.t list) =
mk_block_instr ~block (set_call_builtin_instr ~builtin ~args ~return:None) |> ignore
and set_ret_instr ~(value : Value.t) ~(arg : Value.t option) : unit =
let arg_use = Option.map (fun arg -> user_add_use ~user:value ~use:arg) arg in
set_instr ~value ~type_:no_return_type ~instr:(Ret arg_use)
and mk_ret_ ~(block : Block.t) ~(arg : Value.t option) : unit =
mk_block_instr ~block (set_ret_instr ~arg) |> ignore
and set_unary_instr ~(value : Value.t) ~(op : Instruction.unary_operation) ~(arg : Value.t) : unit =
if not (is_numeric_value arg) then failwith "Unary argument must be numeric value";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_:(type_of_value arg) ~instr:(Unary (op, arg_use))
and mk_unary ~(block : Block.t) ~(op : Instruction.unary_operation) ~(arg : Value.t) : Value.t =
mk_block_instr ~block (set_unary_instr ~op ~arg)
and set_binary_instr
~(value : Value.t) ~(op : Instruction.binary_operation) ~(left : Value.t) ~(right : Value.t) :
unit =
let is_shift_op = is_shift_op op in
if is_shift_op && not (is_integer_value left && is_integer_value right) then
failwith "Shift arguments must be integers"
else if
(not is_shift_op)
&& not (is_numeric_value left && is_numeric_value right && values_have_same_type left right)
then
failwith "Binary arguments must be numeric and have the same type";
let left_use = user_add_use ~user:value ~use:left in
let right_use = user_add_use ~user:value ~use:right in
set_instr ~value ~type_:(type_of_value left) ~instr:(Binary (op, left_use, right_use))
and mk_binary
~(block : Block.t) ~(op : Instruction.binary_operation) ~(left : Value.t) ~(right : Value.t) :
Value.t =
mk_block_instr ~block (set_binary_instr ~op ~left ~right)
and set_cmp_instr
~(value : Value.t) ~(cmp : Instruction.comparison) ~(left : Value.t) ~(right : Value.t) : unit =
if not (is_comparable_value left && is_comparable_value right && values_have_same_type left right)
then
failwith "Cmp arguments must be numeric or pointers and have the same type";
let left_use = user_add_use ~user:value ~use:left in
let right_use = user_add_use ~user:value ~use:right in
set_instr ~value ~type_:Bool ~instr:(Cmp (cmp, left_use, right_use))
and mk_blockless_cmp ~(cmp : Instruction.comparison) ~(left : Value.t) ~(right : Value.t) : Value.t
=
mk_blockless_instr (set_cmp_instr ~cmp ~left ~right)
and mk_cmp ~(block : Block.t) ~(cmp : Instruction.comparison) ~(left : Value.t) ~(right : Value.t) :
Value.t =
mk_block_instr ~block (set_cmp_instr ~cmp ~left ~right)
and set_cast_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
if not (is_pointer_value arg && is_pointer_type type_) then
failwith "Cast arguments must be pointers";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(Cast arg_use)
and mk_cast ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_cast_instr ~arg ~type_)
and set_trunc_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if
not
(is_integer_type arg_type
&& is_integer_type type_
&& size_of_type arg_type >= size_of_type type_)
then
failwith
"Trunc arguments must be inters with type argument having smaller size than value argument";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(Trunc arg_use)
and mk_blockless_trunc ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_blockless_instr (set_trunc_instr ~arg ~type_)
and mk_trunc ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_trunc_instr ~arg ~type_)
and set_sext_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if
not
(is_integer_type arg_type
&& is_integer_type type_
&& size_of_type arg_type <= size_of_type type_)
then
failwith
"SExt arguments must be integers with type argument having larger size than value argument";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(SExt arg_use)
and mk_blockless_sext ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_blockless_instr (set_sext_instr ~arg ~type_)
and mk_sext ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_sext_instr ~arg ~type_)
and set_zext_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if
not
(is_integer_type arg_type
&& is_integer_type type_
&& size_of_type arg_type <= size_of_type type_)
then
failwith
"ZExt arguments must be integers with type argument having larger size than value argument";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(ZExt arg_use)
and mk_blockless_zext ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_blockless_instr (set_zext_instr ~arg ~type_)
and mk_zext ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_zext_instr ~arg ~type_)
and set_int_to_float_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if not (is_integer_type arg_type && is_float_type type_) then
failwith "IntToFloat must have integer argument converted to float type";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(IntToFloat arg_use)
and mk_int_to_float ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_int_to_float_instr ~arg ~type_)
and set_float_to_int_instr ~(value : Value.t) ~(arg : Value.t) ~(type_ : Type.t) : unit =
let arg_type = type_of_value arg in
if not (is_float_type arg_type && is_integer_type type_) then
failwith "IntToFloat must have float argument converted to integer type";
let arg_use = user_add_use ~user:value ~use:arg in
set_instr ~value ~type_ ~instr:(FloatToInt arg_use)
and mk_float_to_int ~(block : Block.t) ~(arg : Value.t) ~(type_ : Type.t) : Value.t =
mk_block_instr ~block (set_float_to_int_instr ~arg ~type_)
and set_unreachable_instr ~(value : Value.t) : unit =
set_instr ~value ~type_:no_return_type ~instr:Unreachable
and mk_unreachable_ ~(block : Block.t) : unit =
mk_block_instr ~block set_unreachable_instr |> ignore
and set_continue_instr ~(value : Value.t) ~(continue : Block.t) : unit =
set_instr ~value ~type_:no_return_type ~instr:(Continue continue)
and mk_continue_ ~(block : Block.t) ~(continue : Block.t) : unit =
mk_block_instr ~block (set_continue_instr ~continue) |> ignore
and set_branch_instr ~(value : Value.t) ~(test : Value.t) ~(continue : Block.t) ~(jump : Block.t) :
unit =
let test_use = user_add_use ~user:value ~use:test in
set_instr ~value ~type_:no_return_type ~instr:(Branch { test = test_use; continue; jump })
and mk_branch_ ~(block : Block.t) ~(test : Value.t) ~(continue : Block.t) ~(jump : Block.t) : unit =
mk_block_instr ~block (set_branch_instr ~test ~continue ~jump) |> ignore
and mk_global
~(name : label)
~(loc : Loc.t)
~(type_ : Type.t)
~(init_val : Value.t option)
~(is_constant : bool) : Global.t =
let value = mk_uninit_value () in
let init_val_use = Option.map (fun init_val -> user_add_use ~user:value ~use:init_val) init_val in
let global = { Global.loc; name; type_; init_val = init_val_use; is_constant; value } in
value.value <- Lit (Global global);
global
and global_set_init ~(global : Global.t) ~(init : Value.t option) =
(match global.init_val with
| None -> ()
| Some old_init_use -> remove_use old_init_use);
let init_use = Option.map (fun init -> user_add_use ~user:global.value ~use:init) init in
global.init_val <- init_use
and mk_function ~(name : label) : Function.t =
let value = mk_uninit_value () in
let func =
{
Function.name;
loc = Loc.none;
params = [];
return_type = None;
start_block = null_block;
blocks = BlockSet.empty;
value;
}
in
value.value <- Lit (Function func);
func
and mk_argument ~(func : Function.t) ~(decl_loc : Loc.t) ~(type_ : Type.t) : Value.t =
let argument = { Argument.type_; func; decl_loc } in
mk_value (Argument argument)
and func_iter_blocks (func : Function.t) (f : Block.t -> unit) = BlockSet.iter f func.blocks
and add_use_link (u1 : Use.t) (u2 : Use.t) =
u1.next <- u2;
u2.prev <- u1
and add_value_use ~(value : Value.t) ~(use : Use.t) =
match value.uses with
| None ->
value.uses <- Some use;
add_use_link use use
| Some first_use ->
add_use_link use first_use.next;
add_use_link first_use use
and user_add_use ~(user : Value.t) ~(use : Value.t) =
let rec use_node = { Use.value = use; prev = use_node; next = use_node; user } in
add_value_use ~value:use ~use:use_node;
use_node
and remove_use (use : Use.t) =
let value = use.value in
if use.next == use then
value.uses <- None
else
let prev = use.prev in
let next = use.next in
add_use_link prev next;
match value.uses with
| Some first_use when first_use == use -> value.uses <- Some use.next
| _ -> ()
and value_has_uses (value : Value.t) : bool = value.uses <> None
and value_has_single_use (value : Value.t) : bool =
match value.uses with
| Some first_use when first_use.next == first_use -> true
| _ -> false
and is_single_use (use : Use.t) : bool = use.next == use
and value_iter_uses ~(value : Value.t) (f : Use.t -> unit) =
match value.uses with
| None -> ()
| Some first_use ->
let rec iter current_use =
let next_use = current_use.Use.next in
f current_use;
if next_use != first_use then iter next_use
in
iter first_use
and value_get_uses ~(value : Value.t) : Use.t list =
match value.uses with
| None -> []
| Some first_use ->
let rec gather current_use acc =
let next_use = current_use.Use.next in
let acc = current_use :: acc in
if next_use != first_use then
gather next_use acc
else
acc
in
gather first_use []
and value_replace_uses ~(from : Value.t) ~(to_ : Value.t) =
value_iter_uses ~value:from (fun use ->
use.value <- to_;
add_value_use ~value:to_ ~use);
from.uses <- None
and instruction_iter_operands ~(instr : Instruction.t) (f : Use.t -> unit) =
match instr.instr with
| StackAlloc _
| Continue _
| Unreachable ->
()
| Load operand
| Unary (_, operand)
| Cast operand
| Trunc operand
| SExt operand
| ZExt operand
| IntToFloat operand
| FloatToInt operand
| Mov operand
| Branch { test = operand; _ } ->
f operand
| Store (operand1, operand2)
| Binary (_, operand1, operand2)
| Cmp (_, operand1, operand2) ->
f operand1;
f operand2
| Ret operand_opt -> Option.iter f operand_opt
| Phi { args } -> BlockMap.iter (fun _ use -> f use) args
| Call { func; args; _ } ->
(match func with
| Value value -> f value
| MirBuiltin _ -> ());
List.iter f args
| GetPointer { pointer; pointer_offset; offsets } ->
f pointer;
Option.iter f pointer_offset;
List.iter
(fun offset ->
match offset with
| Instruction.GetPointer.PointerIndex operand -> f operand
| FieldIndex _ -> ())
offsets
and mk_block ~(func : Function.t) : Block.t =
let block =
{ Block.id = Block.mk_id (); func; instructions = None; prev_blocks = BlockSet.empty }
in
func.blocks <- BlockSet.add block func.blocks;
block
and has_single_instruction (block : Block.t) : bool =
match block.instructions with
| Some { first; last } when first == last -> true
| _ -> false
and add_instr_link (instr_val1 : Value.t) (instr_val2 : Value.t) =
let instr1 = cast_to_instruction instr_val1 in
let instr2 = cast_to_instruction instr_val2 in
instr1.next <- instr_val2;
instr2.prev <- instr_val1
and prepend_instruction (block : Block.t) (instr_val : Value.t) =
let instr = cast_to_instruction instr_val in
instr.block <- block;
match block.instructions with
| None -> block.instructions <- Some { first = instr_val; last = instr_val }
| Some ({ first; last } as list) ->
add_instr_link instr_val first;
add_instr_link last instr_val;
list.first <- instr_val
and append_instruction (block : Block.t) (instr_val : Value.t) =
let instr = cast_to_instruction instr_val in
instr.block <- block;
match block.instructions with
| None -> block.instructions <- Some { first = instr_val; last = instr_val }
| Some ({ first; last } as list) ->
add_instr_link last instr_val;
add_instr_link instr_val first;
list.last <- instr_val
and insert_instruction_before ~(before : Value.t) (instr_val : Value.t) =
let before_instr = cast_to_instruction before in
let instr = cast_to_instruction instr_val in
let block = before_instr.block in
instr.block <- block;
match block.instructions with
| None -> failwith "Block must have before instruction"
| Some list ->
let prev_instr = before_instr.prev in
add_instr_link prev_instr instr_val;
add_instr_link instr_val before;
if list.first == before then list.first <- instr_val
and remove_instruction (instr_val : Value.t) =
let instr = cast_to_instruction instr_val in
let block = instr.block in
instruction_iter_operands ~instr remove_use;
value_iter_uses ~value:instr_val (fun use ->
if instr_val != use.value then
match use.value.value with
| Instr { instr = Phi phi; _ } ->
phi_filter_args ~phi (fun _ arg_use -> arg_use.Use.value != instr_val)
| _ -> ());
if instr.next == instr_val then
block.instructions <- None
else
let prev = instr.prev in
let next = instr.next in
add_instr_link prev next;
let list = Option.get block.instructions in
if list.first == instr_val then list.first <- next;
if list.last == instr_val then list.last <- prev
Concatenate the instructions in the second block to the end of the first block .
This is a destructive operation on the second block 's instructions . Removes the first block 's
terminator instruction .
This is a destructive operation on the second block's instructions. Removes the first block's
terminator instruction. *)
and concat_instructions (b1 : Block.t) (b2 : Block.t) =
Remove terminator from first block
(match get_terminator_value b1 with
| Some terminator -> remove_instruction terminator
| None -> ());
Concatenate lists of instructions
iter_instructions b2 (fun _ instr -> instr.Instruction.block <- b1);
match (b1.instructions, b2.instructions) with
| (_, None) -> ()
| (None, (Some _ as instrs)) -> b1.instructions <- instrs
| (Some ({ first = first1; last = last1 } as list), Some { first = first2; last = last2 }) ->
add_instr_link last1 first2;
add_instr_link last2 first1;
list.last <- last2
Split a block after an instruction into two separate blocks with no continue between them .
Return a tuple of the ( first block , second block ) .
Return a tuple of the (first block, second block). *)
and split_block_after_instruction (instr_value : Value.t) : Block.t * Block.t =
let instr = cast_to_instruction instr_value in
let first_block = instr.block in
let second_block = mk_block ~func:first_block.func in
let { Block.first = first_block_first; last = first_block_last } =
Option.get first_block.instructions
in
Next blocks now have the second block as their prev block
BlockSet.iter
(fun next_block ->
remove_block_link first_block next_block;
add_block_link second_block next_block;
map_phi_backreferences_for_block
~block:next_block
~from:first_block
~to_:(BlockSet.singleton second_block))
(get_next_blocks first_block);
If instruction is at end of block then first block is unchanged and second block is empty
if instr_value == first_block_last then
(first_block, second_block)
else
let instr_next = instr.next in
Create circular linked lists of instructions for first and second blocks
add_instr_link instr_value first_block_first;
first_block.instructions <- Some { first = first_block_first; last = instr_value };
add_instr_link first_block_last instr_next;
second_block.instructions <- Some { first = instr_next; last = first_block_last };
iter_instructions second_block (fun _ instr -> instr.block <- second_block);
(first_block, second_block)
and replace_instruction ~(from : Value.t) ~(to_ : Value.t) =
value_replace_uses ~from ~to_;
remove_instruction from
and iter_instructions (block : Block.t) (f : Value.t -> Instruction.t -> unit) =
match block.instructions with
| None -> ()
| Some { first; last } ->
let rec iter current_val last_val f =
let current = cast_to_instruction current_val in
let next = current.next in
f current_val current;
if current_val != last_val then iter next last_val f
in
iter first last f
Return the first instruction that matches the predicate , if such an instruction exists
and find_instruction (block : Block.t) (f : Value.t -> Instruction.t -> bool) : Value.t option =
match block.instructions with
| None -> None
| Some { first; last } ->
let rec iter current_val last_val f =
let current = cast_to_instruction current_val in
if f current_val current then
Some current_val
else if current_val == last_val then
None
else
iter current.next last_val f
in
iter first last f
and filter_instructions (block : Block.t) (f : Instruction.t -> bool) =
iter_instructions block (fun instr_val instr ->
if not (f instr) then remove_instruction instr_val)
and fold_instructions : 'a. Block.t -> 'a -> (Value.t -> Instruction.t -> 'a -> 'a) -> 'a =
fun block acc f ->
match block.instructions with
| None -> acc
| Some { first; last } ->
let rec fold current_val last_val f acc =
let current = cast_to_instruction current_val in
let acc' = f current_val current acc in
if current_val == last_val then
acc'
else
fold current.Instruction.next last_val f acc'
in
fold first last f acc
and block_has_phis (block : Block.t) : bool =
match block.instructions with
| Some { first = { value = Instr { instr = Phi _; _ }; _ }; _ } -> true
| _ -> false
and block_get_phis (block : Block.t) : Instruction.Phi.t list =
fold_instructions block [] (fun _ instr acc ->
match instr with
| { instr = Phi phi; _ } -> phi :: acc
| _ -> acc)
and block_iter_phis (block : Block.t) (f : Value.t -> Instruction.Phi.t -> unit) =
iter_instructions block (fun instr_val instr ->
match instr with
| { instr = Phi phi; _ } -> f instr_val phi
| _ -> ())
and block_filter_phis (block : Block.t) (f : Value.id -> Instruction.Phi.t -> bool) =
iter_instructions block (fun instr_val instr ->
match instr with
| { instr = Phi phi; _ } -> if not (f instr_val.id phi) then remove_instruction instr_val
| _ -> ())
and block_fold_phis (block : Block.t) (acc : 'a) (f : Value.t -> Instruction.Phi.t -> 'a -> 'a) : 'a
=
fold_instructions block acc (fun instr_val instr acc ->
match instr with
| { instr = Phi phi; _ } -> f instr_val phi acc
| _ -> acc)
and block_clear_phis (block : Block.t) = block_filter_phis block (fun _ _ -> false)
and phi_add_arg
~(phi_val : Value.t) ~(phi : Instruction.Phi.t) ~(block : Block.t) ~(value : Value.t) =
let use = user_add_use ~user:phi_val ~use:value in
phi.args <- BlockMap.add block use phi.args
and phi_remove_arg ~(phi : Instruction.Phi.t) ~(block : Block.t) =
match BlockMap.find_opt block phi.args with
| None -> ()
| Some use ->
remove_use use;
phi.args <- BlockMap.remove block phi.args
and phi_filter_args ~(phi : Instruction.Phi.t) (f : Block.t -> Use.t -> bool) =
phi.args <-
BlockMap.filter
(fun block use ->
let keep = f block use in
if not keep then remove_use use;
keep)
phi.args
and phi_get_single_arg_value_with_mapper ~(map_value : Value.t -> Value.t) (phi : Instruction.Phi.t)
: Value.t option =
match BlockMap.choose_opt phi.args with
| None -> None
| Some (_, first_use) ->
let first_arg = map_value first_use.value in
let has_single_arg_value =
BlockMap.for_all
(fun _ arg_use ->
let arg = map_value arg_use.Use.value in
values_equal arg first_arg)
phi.args
in
if has_single_arg_value then
Some first_arg
else
None
and phi_get_single_arg_value (phi : Instruction.Phi.t) : Value.t option =
phi_get_single_arg_value_with_mapper ~map_value:Function_utils.id phi
and add_block_link (prev_block : Block.t) (next_block : Block.t) =
next_block.prev_blocks <- BlockSet.add prev_block next_block.prev_blocks
and remove_block_link (prev_block : Block.t) (next_block : Block.t) =
next_block.prev_blocks <- BlockSet.remove prev_block next_block.prev_blocks
and get_next_blocks (block : Block.t) : BlockSet.t =
match get_terminator block with
| Some { instr = Continue continue; _ } -> BlockSet.singleton continue
| Some { instr = Branch { test = _; jump; continue }; _ } ->
BlockSet.add jump (BlockSet.singleton continue)
| _ -> BlockSet.empty
and iter_next_blocks (block : Block.t) (f : Block.t -> unit) =
match get_terminator block with
| Some { instr = Continue continue; _ } -> f continue
| Some { instr = Branch { test = _; jump; continue }; _ } ->
f continue;
f jump
| _ -> ()
* = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* Block Graph Mutation
* = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* ============================
* Block Graph Mutation
* ============================
*)
and block_remove_if_empty (block : Block.t) =
if can_remove_empty_block block then remove_block block
and can_remove_empty_block (block : Block.t) =
has_single_instruction block
&&
match get_terminator block with
| Some { instr = Continue continue_block; _ } ->
A block is needed if any of its previous blocks appear in a phi node of the next block , with
a different value than the value from this block . If we were to remove this block , the value
from its branch would be lost in the phi node . The start block can never be removed .
a different value than the value from this block. If we were to remove this block, the value
from its branch would be lost in the phi node. The start block can never be removed. *)
let is_start_block = block.func.start_block == block in
let continue_block_phis = block_get_phis continue_block in
let block_needed_for_phi =
(continue_block_phis <> [] && is_start_block)
|| List.exists
(fun { Instruction.Phi.args; _ } ->
BlockMap.exists
(fun prev_block prev_block_arg ->
if BlockSet.mem prev_block block.prev_blocks then
not (values_equal prev_block_arg.Use.value (BlockMap.find block args).value)
else
false)
args)
continue_block_phis
in
(not block_needed_for_phi) && not is_start_block
| _ -> false
and block_is_unreachable (block : Block.t) =
BlockSet.is_empty block.prev_blocks && block.func.start_block != block
and remove_unreachable_blocks_from_init ?(on_removed_block : Block.t -> unit = ignore) init_func =
let worklist = ref BlockSet.empty in
let removed = ref BlockSet.empty in
let remove_if_unreachable block =
if (not (BlockSet.mem block !removed)) && block_is_unreachable block then (
iter_next_blocks block (fun next_block ->
if not (BlockSet.mem next_block !removed) then
worklist := BlockSet.add next_block !worklist);
on_removed_block block;
remove_block block;
removed := BlockSet.add block !removed
)
in
init_func remove_if_unreachable;
while not (BlockSet.is_empty !worklist) do
let block = BlockSet.choose !worklist in
remove_if_unreachable block;
worklist := BlockSet.remove block !worklist
done
and remove_unreachable_blocks_from_root
?(on_removed_block : Block.t -> unit = ignore) (root_block : Block.t) =
remove_unreachable_blocks_from_init ~on_removed_block (fun remove_if_unreachable ->
remove_if_unreachable root_block)
and remove_unreachable_blocks_in_func (func : Function.t) =
remove_unreachable_blocks_from_init (fun remove_if_unreachable ->
func_iter_blocks func remove_if_unreachable)
and remove_block (block : Block.t) =
Remove block from function . This may be the first block in the function . If so , update the
function to point to the next block as the start .
function to point to the next block as the start. *)
let func = block.func in
func.blocks <- BlockSet.remove block func.blocks;
(match get_terminator block with
| Some { instr = Continue continue_block; _ } ->
if func.start_block == block then func.start_block <- continue_block
| _ -> ());
(match get_terminator block with
| Some { instr = Unreachable; _ } ->
BlockSet.iter
(fun prev_block ->
match get_terminator prev_block with
| Some ({ instr = Continue _; _ } as term_instr) -> term_instr.instr <- Unreachable
| Some ({ instr = Branch { test; continue; jump }; _ } as term_instr) ->
term_instr.instr <-
(if continue == block then
if jump == block then
Unreachable
else
Continue jump
else
Continue continue);
remove_use test
| _ -> failwith "Previous block must have branching terminator")
block.prev_blocks
| Some { instr = Continue next_block; _ } ->
map_phi_backreferences_for_block ~block:next_block ~from:block ~to_:block.prev_blocks;
BlockSet.iter
(fun prev_block -> map_next_block prev_block ~from:block ~to_:next_block)
block.prev_blocks
| _ -> ());
BlockSet.iter
(fun next_block ->
remove_phi_backreferences_for_block ~block:next_block ~to_remove:block;
remove_block_link block next_block)
(get_next_blocks block);
iter_instructions block (fun _ instr -> instruction_iter_operands ~instr remove_use)
and merge_adjacent_blocks block1 block2 =
let open Block in
let map_block block =
if block == block2 then
block1
else
block
in
(match get_terminator block2 with
| Some ({ instr = Continue continue; _ } as term_instr) ->
term_instr.instr <- Continue (map_block continue)
| Some ({ instr = Branch { test; continue; jump }; _ } as term_instr) ->
term_instr.instr <- Branch { test; continue = map_block continue; jump = map_block jump }
| _ -> ());
concat_instructions block1 block2;
References to the b2 block in phi nodes of blocks that succeed b2 should be rewritten
to now reference b1 instead .
to now reference b1 instead. *)
let next_blocks = get_next_blocks block2 in
BlockSet.iter
(fun next_block ->
map_phi_backreferences_for_block
~block:next_block
~from:block2
~to_:(BlockSet.singleton block1))
next_blocks;
BlockSet.iter
(fun next_block ->
remove_block_link block2 next_block;
add_block_link block1 next_block)
next_blocks;
remove_block_link block1 block2;
let func = block2.func in
func.blocks <- BlockSet.remove block2 func.blocks
and prune_branch (to_keep : bool) (block : Block.t) ~(on_removed_block : Block.t -> unit) =
match get_terminator block with
| Some ({ instr = Branch { test; continue; jump }; _ } as terminator_instr) ->
let (to_continue, to_prune) =
if to_keep then
(continue, jump)
else
(jump, continue)
in
remove_block_link block to_prune;
remove_phi_backreferences_for_block ~block:to_prune ~to_remove:block;
terminator_instr.instr <- Continue to_continue;
remove_use test;
remove_unreachable_blocks_from_root ~on_removed_block to_prune
| _ -> failwith "Expected branch terminator"
Split an edge between two blocks , inserting an empty block in the middle
and split_block_edge (prev_block : Block.t) (next_block : Block.t) : Block.t =
let func = prev_block.func in
let new_block =
{
Block.id = Block.mk_id ();
func;
instructions = None;
prev_blocks = BlockSet.singleton prev_block;
}
in
mk_continue_ ~block:new_block ~continue:next_block;
add_block_link new_block next_block;
func.blocks <- BlockSet.add new_block func.blocks;
map_next_block prev_block ~from:next_block ~to_:new_block;
map_phi_backreferences_for_block
~block:next_block
~from:prev_block
~to_:(BlockSet.singleton new_block);
new_block
Map block 's next block from a block to another block . Do not update any phi references .
and map_next_block (block : Block.t) ~(from : Block.t) ~(to_ : Block.t) =
let map_next_block maybe_from =
if maybe_from == from then (
remove_block_link block from;
add_block_link block to_;
to_
) else
maybe_from
in
match get_terminator block with
| Some ({ instr = Continue continue; _ } as term_instr) ->
term_instr.instr <- Continue (map_next_block continue)
| Some ({ instr = Branch { test; jump; continue }; _ } as term_instr) ->
let new_continue = map_next_block continue in
let new_jump = map_next_block jump in
term_instr.instr <-
(if new_continue == new_jump then
Continue new_continue
else
Branch { test; continue = new_continue; jump = new_jump })
| _ -> ()
Remove all references to a block from phi nodes of on of its next blocks .
This may be needed when removing a block or block link .
This may be needed when removing a block or block link. *)
and remove_phi_backreferences_for_block ~(block : Block.t) ~(to_remove : Block.t) =
block_iter_phis block (fun _ phi ->
phi_filter_args ~phi (fun prev_block _ -> prev_block != to_remove))
Replace all references to old_block_id in the phis of a block with new_block_ids . Note that there
may be multiple new_block_ids , so a single phi argument may be expanded to multiple arguments .
This may be needed when editing the program .
may be multiple new_block_ids, so a single phi argument may be expanded to multiple arguments.
This may be needed when editing the program. *)
and map_phi_backreferences_for_block ~(block : Block.t) ~(from : Block.t) ~(to_ : BlockSet.t) =
block_iter_phis block (fun phi_val phi ->
match BlockMap.find_opt from phi.args with
| None -> ()
| Some use ->
phi_remove_arg ~phi ~block:from;
BlockSet.iter
(fun to_block -> phi_add_arg ~phi_val ~phi ~block:to_block ~value:use.value)
to_)
and program_iter_funcs (program : Program.t) (f : Function.t -> unit) =
SMap.iter (fun _ func -> f func) program.funcs
and program_iter_blocks (program : Program.t) (f : Block.t -> unit) =
program_iter_funcs program (fun func -> func_iter_blocks func f)
and program_remove_func ~(program : Program.t) ~(func : Function.t) =
program.funcs <- SMap.remove func.name program.funcs
and assert_valid_program (program : Program.t) =
SMap.iter (fun _ func -> assert_valid_function func) program.funcs
and assert_valid_function (func : Function.t) =
assert_valid_function_cfg func;
func_iter_blocks func (fun block ->
assert_valid_instruction_list block;
iter_instructions block (fun instr_value _ -> assert_valid_use_list ~value:instr_value))
and assert_valid_function_cfg (func : Function.t) =
Create multimap of all previous blocks by visiting CFG
let prev_blocks = ref BlockMMap.empty in
func_iter_blocks func (fun block ->
BlockSet.iter
(fun next_block -> prev_blocks := BlockMMap.add next_block block !prev_blocks)
(get_next_blocks block));
func_iter_blocks func (fun block ->
Check that prev blocks for each block matches the true CFG
let prev_blocks_1 = block.prev_blocks in
let prev_blocks_2 = BlockMMap.find_all block !prev_blocks in
let is_subset_1 =
BlockSet.for_all (fun block -> BlockSet.mem block prev_blocks_2) prev_blocks_1
in
let is_subset_2 =
BlockSet.for_all (fun block -> BlockSet.mem block prev_blocks_1) prev_blocks_2
in
if (not is_subset_1) || not is_subset_2 then
failwith "Previous blocks do not match structure of cfg\n";
block_iter_phis block (fun _ phi ->
let phi_prev_blocks =
BlockMap.fold (fun block _ acc -> BlockSet.add block acc) phi.args BlockSet.empty
in
if not (BlockSet.equal prev_blocks_1 phi_prev_blocks) then
failwith
(Printf.sprintf
"Phi does not have arguments for all previous blocks for block %s in func %s\n"
(Block.id_to_string block.id)
func.name)))
and assert_valid_use_list ~(value : Value.t) =
match value.uses with
| None -> ()
| Some first_use ->
let rec iter current_use last_use =
let next_use = current_use.Use.next in
if next_use.prev != current_use then failwith "Link is not bidirectional";
if current_use.value != value then failwith "Use does not have correct value";
if next_use != last_use then iter next_use last_use
in
iter first_use first_use
and assert_valid_instruction_list (block : Block.t) =
match block.instructions with
| None -> ()
| Some { first = first_val; last = last_val } ->
let first = cast_to_instruction first_val in
let last = cast_to_instruction last_val in
if first.prev != last_val || last.next != first_val then
failwith
(Printf.sprintf
"List must be circular %B %B"
(first.prev != last_val)
(last.next != first_val));
let rec iter current_val last_val =
let current = cast_to_instruction current_val in
let current_next = cast_to_instruction current.next in
if current.block != block then failwith "Instruction does not have correct block";
if current_next.prev != current_val then failwith "Link is not bidirectional";
if current.next != last_val then iter current.next last_val
in
iter first_val last_val
|
3be48beffb45c47c61ef4f13e54a02f0819ac052aacdb91aab17614b99950ed9 | rtoy/cmucl | boot-2020-04-1.lisp | ;; Simple cross-compile script to remove `*scavenge-read-only-space*`
;; which is no longer needed
;;
;; Nothing special needs to be done for the cross-compile. Just use
this file for the -B option ( not really necessary ) , and use the
;; standard cross-compile scripts in src/tools/cross-scripts.
;;
cross-build-world.sh -crl -B boot-2020 - 04 - 1 xtarget xcross src / tools / cross - scripts / cross - foo.lisp old - lisp
;;
;; x86: cross-x86-x86
sparc : cross - sparc - sparc
;; This is also used to easily change the order of x86::conditions
constant so that we prefer je instead of jeq . Without a
;; cross-compile we'd need to handle the refefintion of the
defconstant in a different way . See issue # 95 .
| null | https://raw.githubusercontent.com/rtoy/cmucl/9b1abca53598f03a5b39ded4185471a5b8777dea/src/bootfiles/21d/boot-2020-04-1.lisp | lisp | Simple cross-compile script to remove `*scavenge-read-only-space*`
which is no longer needed
Nothing special needs to be done for the cross-compile. Just use
standard cross-compile scripts in src/tools/cross-scripts.
x86: cross-x86-x86
This is also used to easily change the order of x86::conditions
cross-compile we'd need to handle the refefintion of the | this file for the -B option ( not really necessary ) , and use the
cross-build-world.sh -crl -B boot-2020 - 04 - 1 xtarget xcross src / tools / cross - scripts / cross - foo.lisp old - lisp
sparc : cross - sparc - sparc
constant so that we prefer je instead of jeq . Without a
defconstant in a different way . See issue # 95 .
|
01e24ceb769dab828c180ddf1bde82976499ab321faa43dfb064cb56f536edf3 | racket/libs | info.rkt | #lang setup/infotab
SPDX - License - Identifier : ( Apache-2.0 OR MIT )
;; THIS FILE IS AUTO-GENERATED FROM racket/src/native-libs/install.rkt
(define collection 'multi)
(define deps '("base"))
(define pkg-desc "native libraries for \"base\" package")
(define pkg-authors '(mflatt))
(define license '((Apache-2.0 OR MIT) AND blessing))
| null | https://raw.githubusercontent.com/racket/libs/ebcea119197dc0cb86be1ccbbfbe5806f7280976/db-win32-i386/info.rkt | racket | THIS FILE IS AUTO-GENERATED FROM racket/src/native-libs/install.rkt | #lang setup/infotab
SPDX - License - Identifier : ( Apache-2.0 OR MIT )
(define collection 'multi)
(define deps '("base"))
(define pkg-desc "native libraries for \"base\" package")
(define pkg-authors '(mflatt))
(define license '((Apache-2.0 OR MIT) AND blessing))
|
fffad2acb69428c36b81036efcc2ee03ce7a0844aa67e1cfe3feb67d064a438b | wdebeaum/DeepSemLex | unfamiliar.lisp | ;;;;
;;;; W::unfamiliar
;;;;
(define-words :pos W::adj :templ CENTRAL-ADJ-TEMPL
:words (
(W::unfamiliar
(SENSES
((meta-data :origin cernl :entry-date 20100501 :change-date nil :comments nil)
(EXAMPLE "an unfamiliar person")
(lf-parent ont::unfamiliar-val)
(TEMPL central-adj-TEMPL)
)
((meta-data :origin cernl :entry-date 20100501 :change-date nil :comments nil)
(EXAMPLE "that's unfamiliar to him")
(lf-parent ont::unfamiliar-val)
(templ adj-affected-XP-templ (xp (% w::pp (w::ptype w::to))))
)
((meta-data :origin ptb :entry-date 20100501 :change-date nil :comments nil)
(EXAMPLE "he is unfamiliar with it")
(lf-parent ont::unfamiliar-val)
(templ adj-affected-stimulus-xp-templ (xp (% w::pp (w::ptype w::with))))
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/DeepSemLex/ce0e7523dd2b1ebd42b9e88ffbcfdb0fd339aaee/trips/src/LexiconManager/Data/new/unfamiliar.lisp | lisp |
W::unfamiliar
|
(define-words :pos W::adj :templ CENTRAL-ADJ-TEMPL
:words (
(W::unfamiliar
(SENSES
((meta-data :origin cernl :entry-date 20100501 :change-date nil :comments nil)
(EXAMPLE "an unfamiliar person")
(lf-parent ont::unfamiliar-val)
(TEMPL central-adj-TEMPL)
)
((meta-data :origin cernl :entry-date 20100501 :change-date nil :comments nil)
(EXAMPLE "that's unfamiliar to him")
(lf-parent ont::unfamiliar-val)
(templ adj-affected-XP-templ (xp (% w::pp (w::ptype w::to))))
)
((meta-data :origin ptb :entry-date 20100501 :change-date nil :comments nil)
(EXAMPLE "he is unfamiliar with it")
(lf-parent ont::unfamiliar-val)
(templ adj-affected-stimulus-xp-templ (xp (% w::pp (w::ptype w::with))))
)
)
)
))
|
365b851ad729753420212be192faba686c2f986adb4270cef9627b39e06643c5 | GaloisInc/macaw | X86.hs |
Copyright : ( c ) Galois , Inc 2015 - 2017
Maintainer : < >
This defines the primitives needed to provide architecture info for
x86_64 programs .
Copyright : (c) Galois, Inc 2015-2017
Maintainer : Joe Hendrix <>
This defines the primitives needed to provide architecture info for
x86_64 programs.
-}
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PatternGuards #
# LANGUAGE PatternSynonyms #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE ViewPatterns #
# LANGUAGE NondecreasingIndentation #
module Data.Macaw.X86
( x86_64_info
, x86_64_freeBSD_info
, x86_64_linux_info
, x86_64CallParams
, x86_64PLTStubInfo
, freeBSD_syscallPersonality
, linux_syscallPersonality
-- * Low level exports
, CallParams(..)
, ArchitectureInfo(..)
, X86BlockPrecond(..)
, ExploreLoc(..)
, rootLoc
, initX86State
, tryDisassembleBlock
, disassembleBlock
, disassembleFixedBlock
, translateInstruction
, X86TranslateError(..)
, Data.Macaw.X86.ArchTypes.X86_64
, Data.Macaw.X86.ArchTypes.X86PrimFn(..)
, Data.Macaw.X86.ArchTypes.X86Stmt(..)
, Data.Macaw.X86.ArchTypes.X86TermStmt(..)
, Data.Macaw.X86.X86Reg.X86Reg(..)
, Data.Macaw.X86.X86Reg.x86ArgumentRegs
, Data.Macaw.X86.X86Reg.x86ResultRegs
, Data.Macaw.X86.X86Reg.x86FloatArgumentRegs
, Data.Macaw.X86.X86Reg.x86FloatResultRegs
, Data.Macaw.X86.X86Reg.x86CalleeSavedRegs
, pattern Data.Macaw.X86.X86Reg.RAX
, x86DemandContext
) where
import Control.Lens
import Control.Monad.Cont
import Control.Monad.Except
import Control.Monad.ST
import qualified Data.ElfEdit as EE
import Data.Foldable
import qualified Data.Map as Map
import Data.Parameterized.Classes
import qualified Data.Parameterized.Map as MapF
import Data.Parameterized.NatRepr
import Data.Parameterized.Nonce
import Data.Parameterized.Some
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Text as Text
import Data.Word
import qualified Flexdis86 as F
import Prettyprinter (Pretty(..), viaShow)
import Data.Macaw.AbsDomain.AbsState
import qualified Data.Macaw.AbsDomain.JumpBounds as Jmp
import qualified Data.Macaw.AbsDomain.StridedInterval as SI
import Data.Macaw.Architecture.Info
import Data.Macaw.CFG
import Data.Macaw.CFG.DemandSet
import Data.Macaw.Discovery ( defaultClassifier )
import qualified Data.Macaw.Memory as MM
import qualified Data.Macaw.Memory.ElfLoader.PLTStubs as MMEP
import qualified Data.Macaw.Memory.Permissions as Perm
import Data.Macaw.Types
( n8
, HasRepr(..)
)
import Data.Macaw.X86.ArchTypes
import Data.Macaw.X86.Flexdis
import Data.Macaw.X86.Semantics (execInstruction)
import Data.Macaw.X86.SyscallInfo
import Data.Macaw.X86.SyscallInfo.FreeBSD as FreeBSD
import Data.Macaw.X86.SyscallInfo.Linux as Linux
import Data.Macaw.X86.X86Reg
import Data.Macaw.X86.Generator
------------------------------------------------------------------------
-- ExploreLoc
-- | Information needed to disassble a This represents the control-flow information needed to build
-- basic blocks for a code location.
data ExploreLoc
= ExploreLoc { loc_ip :: !(MemSegmentOff 64)
-- ^ IP address.
, loc_x87_top :: !Int
-- ^ Top register of x87 stack
, loc_df_flag :: !Bool
^ Value of DF flag
}
deriving (Eq, Ord)
instance Pretty ExploreLoc where
pretty loc = viaShow (loc_ip loc)
rootLoc :: MemSegmentOff 64 -> ExploreLoc
rootLoc ip = ExploreLoc { loc_ip = ip
, loc_x87_top = 7
, loc_df_flag = False
}
initX86State :: ExploreLoc -- ^ Location to explore from.
-> RegState X86Reg (Value X86_64 ids)
initX86State loc = mkRegState Initial
& curIP .~ RelocatableValue Addr64 (segoffAddr (loc_ip loc))
& boundValue X87_TopReg .~ mkLit knownNat (toInteger (loc_x87_top loc))
& boundValue DF .~ BoolValue (loc_df_flag loc)
------------------------------------------------------------------------
-- Location
-- | Describes the reason the translation error occured.
data X86TranslateError w
= FlexdisMemoryError !(MemoryError w)
^ A memory error occured in decoding with
| DecodeError !(MemAddr w) !(InstructionDecodeError w)
-- ^ A failure occured while trying to decode an instruction.
| UnsupportedInstruction !(MemSegmentOff w) !F.InstructionInstance
-- ^ The instruction is not supported by the translator
| ExecInstructionError !(MemSegmentOff w) !F.InstructionInstance Text.Text
-- ^ An error occured when trying to translate the instruction
| UnexpectedTerminalInstruction !(MemSegmentOff w) !F.InstructionInstance
instance MemWidth w => Show (X86TranslateError w) where
show err =
case err of
FlexdisMemoryError me ->
show me
DecodeError addr derr ->
show addr ++ ": " ++ show derr
UnsupportedInstruction addr i ->
"Unsupported instruction at " ++ show addr ++ ": " ++ show i
ExecInstructionError addr i msg ->
"Error in interpreting instruction at " ++ show addr ++ ": " ++ show i ++ "\n "
++ Text.unpack msg
UnexpectedTerminalInstruction addr i -> do
show addr ++ ": " ++ "Premature end of basic block due to instruction " ++ show i ++ "."
-- | Signal an error from the initial address.
initError :: RegState X86Reg (Value X86_64 ids)
-> MemoryError 64
-> Block X86_64 ids
initError s err =
Block { blockStmts = []
, blockTerm = TranslateError s (Text.pack (show err))
}
-- | Disassemble memory contents using flexdis.
disassembleInstruction :: MemSegmentOff 64
-- ^ Address of next instruction to disassemble
-> [MemChunk 64]
-- ^ Contents at address
-> ExceptT (X86TranslateError 64) (ST st_s) (F.InstructionInstance, Int, [MemChunk 64])
disassembleInstruction curIPAddr contents =
case readInstruction contents of
Left (errOff, err) -> do
throwError $ DecodeError (segoffAddr curIPAddr & incAddr (toInteger errOff)) err
Right r -> do
pure r
-- | Translate block, returning blocks read, ending
-- PC, and an optional error. and ending PC.
translateStep :: forall st_s ids
. NonceGenerator (ST st_s) ids
-- ^ Generator for new assign ids
-> PreBlock ids
-- ^ Block information built up so far.
-> MemWord 64
-- ^ Offset of instruction from start of block
-> MemSegmentOff 64
-- ^ Address of next instruction to translate
-> [MemChunk 64]
-- ^ List of contents to read next.
-> ExceptT (X86TranslateError 64) (ST st_s)
(F.InstructionInstance, PartialBlock ids, Int, MemAddr 64, [MemChunk 64])
translateStep gen pblock blockOff curIPAddr contents = do
(i, instSize, nextContents) <- disassembleInstruction curIPAddr contents
-- Get size of instruction
let next_ip :: MemAddr 64
next_ip = segoffAddr curIPAddr & incAddr (toInteger instSize)
let next_ip_val :: BVValue X86_64 ids 64
next_ip_val = RelocatableValue Addr64 next_ip
case execInstruction (ValueExpr next_ip_val) i of
Nothing -> do
throwError $ UnsupportedInstruction curIPAddr i
Just exec -> do
let gs = GenState { assignIdGen = gen
, _blockState = pblock
, genInitPCAddr = curIPAddr
, genInstructionSize = instSize
, avxMode = False
, _genRegUpdates = MapF.empty
}
let transExcept msg = ExecInstructionError curIPAddr i msg
res <-
withExceptT transExcept $ runX86Generator gs $ do
let line = Text.pack $ show $ F.ppInstruction i
addStmt $ InstructionStart blockOff line
asAtomicStateUpdate (MM.segoffAddr curIPAddr) exec
pure $ (i, res, instSize, next_ip, nextContents)
-- | Recursive function used by `disassembleFixedBlock` below.
translateFixedBlock' :: NonceGenerator (ST st_s) ids
-> PreBlock ids
-> MemWord 64
-- ^ Offset relative to start of block.
-> MemSegmentOff 64
-- ^ Address of next instruction to translate
-> [MemChunk 64]
-- ^ List of contents to read next.
-> ExceptT (X86TranslateError 64) (ST st_s) (Block X86_64 ids)
translateFixedBlock' gen pblock blockOff curIPAddr contents = do
(i, res, instSize, nextIP, nextContents) <- translateStep gen pblock blockOff curIPAddr contents
let blockOff' = blockOff + fromIntegral instSize
case unfinishedAtAddr res nextIP of
Just pblock'
| not (null nextContents)
, Just nextIPAddr <- incSegmentOff curIPAddr (toInteger instSize) -> do
translateFixedBlock' gen pblock' blockOff' nextIPAddr nextContents
_ -> do
when (not (null nextContents)) $ do
throwError $ UnexpectedTerminalInstruction curIPAddr i
pure $! finishPartialBlock res
{-# DEPRECATED disassembleFixedBlock "Planned for removal." #-}
-- | Disassemble a block with a fixed number of bytes.
disassembleFixedBlock :: NonceGenerator (ST st_s) ids
-> ExploreLoc
-- ^ Information about starting location for disassembling.
-> Int
-- ^ Number of bytes to translate
-> ST st_s (Either (X86TranslateError 64) (Block X86_64 ids))
disassembleFixedBlock gen loc sz = do
let addr = loc_ip loc
let initRegs = initX86State loc
case segoffContentsAfter addr of
Left err -> do
pure $ Left $ FlexdisMemoryError err
Right fullContents ->
case splitMemChunks fullContents sz of
Left _err -> do
error $ "Could not split memory."
Right (contents,_) -> do
let pblock = emptyPreBlock addr initRegs
runExceptT $ translateFixedBlock' gen pblock 0 addr contents
| Attempt to translate a single instruction into a Macaw block and instruction size .
translateInstruction :: NonceGenerator (ST st_s) ids
-> RegState X86Reg (Value X86_64 ids)
-- ^ Registers
-> MemSegmentOff 64
-- ^ Address of instruction to disassemble.
-> ExceptT (X86TranslateError 64) (ST st_s) (Block X86_64 ids, MemWord 64)
translateInstruction gen initRegs addr =
case segoffContentsAfter addr of
Left err -> do
throwError $ FlexdisMemoryError err
Right contents -> do
let pblock = emptyPreBlock addr initRegs
(_i, res, instSize, _nextIP, _nextContents) <- translateStep gen pblock 0 addr contents
pure $! (finishPartialBlock res, fromIntegral instSize)
-- | Translate block, returning block read, number of bytes in block,
-- remaining bytes to parse, and an optional error.
translateBlockImpl :: forall st_s ids
. NonceGenerator (ST st_s) ids
-- ^ Generator for new assign ids
-> PreBlock ids
-- ^ Block information built up so far.
-> MemSegmentOff 64
-- ^ Address of next instruction to translate
-> MemWord 64
-- ^ Offset of instruction from start of block
-> MemWord 64
-- ^ Maximum offset for this addr from start of block.
-> [MemChunk 64]
-- ^ List of contents to read next.
-> ST st_s ( Block X86_64 ids
, MemWord 64
)
translateBlockImpl gen pblock curIPAddr blockOff maxSize contents = do
r <- runExceptT $ translateStep gen pblock blockOff curIPAddr contents
case r of
Left err -> do
let b = Block { blockStmts = toList (pblock^.pBlockStmts)
, blockTerm = TranslateError (pblock^.pBlockState) (Text.pack (show err))
}
pure (b, blockOff)
Right (_, res, instSize, nextIP, nextContents) -> do
let blockOff' = blockOff + fromIntegral instSize
case unfinishedAtAddr res nextIP of
Just pblock'
| blockOff' < maxSize
, Just nextIPSegOff <- incSegmentOff curIPAddr (toInteger instSize) -> do
translateBlockImpl gen pblock' nextIPSegOff blockOff' maxSize nextContents
_ ->
pure (finishPartialBlock res, blockOff')
-- | The abstract state for a function begining at a given address.
initialX86AbsState :: MemSegmentOff 64 -> AbsBlockState X86Reg
initialX86AbsState addr =
let m = MapF.fromList [ MapF.Pair X87_TopReg (FinSet (Set.singleton 7))
, MapF.Pair DF (BoolConst False)
]
in fnStartAbsBlockState addr m [(0, StackEntry (BVMemRepr n8 LittleEndian) ReturnAddr)]
preserveFreeBSDSyscallReg :: X86Reg tp -> Bool
preserveFreeBSDSyscallReg r
| Just Refl <- testEquality r CF = False
| Just Refl <- testEquality r RAX = False
| otherwise = True
| Linux preserves the same registers the x86_64 ABI does
linuxSystemCallPreservedRegisters :: Set.Set (Some X86Reg)
linuxSystemCallPreservedRegisters = x86CalleeSavedRegs
-- | Transfer some type into an abstract value given a processor state.
transferAbsValue :: AbsProcessorState X86Reg ids
-> X86PrimFn (Value X86_64 ids) tp
-> AbsValue 64 tp
transferAbsValue r f =
case f of
EvenParity _ -> TopV
ReadFSBase -> TopV
ReadGSBase -> TopV
GetSegmentSelector _ -> TopV
CPUID _ -> TopV
CMPXCHG8B{} -> TopV
RDTSC -> TopV
XGetBV _ -> TopV
PShufb{} -> TopV
We know only that it will return up to ( and including ( ? ) )
MemCmp _sz cnt _src _dest _rev
| Just upper <- hasMaximum knownRepr (transferValue r cnt) ->
stridedInterval $ SI.mkStridedInterval knownNat False 0 upper 1
| otherwise -> TopV
RepnzScas _sz _val _buf cnt
| Just upper <- hasMaximum knownRepr (transferValue r cnt) ->
stridedInterval $ SI.mkStridedInterval knownNat False 0 upper 1
| otherwise -> TopV
MMXExtend{} -> TopV
X86IDivRem{} -> TopV
X86DivRem{} -> TopV
SSE_UnaryOp{} -> TopV
SSE_VectorOp{} -> TopV
SSE_Sqrt{} -> TopV
SSE_CMPSX{} -> TopV
SSE_UCOMIS{} -> TopV
SSE_CVTSD2SS{} -> TopV
SSE_CVTSS2SD{} -> TopV
SSE_CVTSI2SX{} -> TopV
SSE_CVTTSX2SI{} -> TopV
X87_Extend{} -> TopV
X87_FST{} -> TopV
X87_FAdd{} -> TopV
X87_FSub{} -> TopV
X87_FMul{} -> TopV
CLMul{} -> TopV
AESNI_AESEnc{} -> TopV
AESNI_AESEncLast{} -> TopV
AESNI_AESDec{} -> TopV
AESNI_AESDecLast{} -> TopV
AESNI_AESKeyGenAssist{} -> TopV
AESNI_AESIMC{} -> TopV
SHA_sigma0{} -> TopV
SHA_sigma1{} -> TopV
SHA_Sigma0{} -> TopV
SHA_Sigma1{} -> TopV
SHA_Ch{} -> TopV
SHA_Maj{} -> TopV
XXX : Is ' TopV ' the right thing for the AVX instruction below ?
VOp1 {} -> TopV
VOp2 {} -> TopV
Pointwise2 {} -> TopV
PointwiseShiftL {} -> TopV
PointwiseLogicalShiftR {} -> TopV
VExtractF128 {} -> TopV
VInsert {} -> TopV
X86Syscall {} -> TopV
-- | Extra constraints on block for disassembling.
data X86BlockPrecond = X86BlockPrecond { blockInitX87TopReg :: !Word8
-- ^ Value to assign to X87 Top register
--
( should be from 0 to 7 with 7 the stack is empty . )
, blockInitDF :: !Bool
}
type instance ArchBlockPrecond X86_64 = X86BlockPrecond
-- | Disassemble block, returning either an error, or a list of blocks
-- and ending PC.
extractX86BlockPrecond :: MemSegmentOff 64
-- ^ Address to disassemble at
-> AbsBlockState X86Reg
-- ^ Abstract state of processor for defining state.
-> Either String X86BlockPrecond
extractX86BlockPrecond _addr ab = do
t <-
case asConcreteSingleton (ab^.absRegState^.boundValue X87_TopReg) of
Nothing -> Left "Could not determine height of X87 stack."
Just t -> pure t
d <-
case ab^.absRegState^.boundValue DF of
BoolConst b -> pure b
_ -> Left $ "Could not determine df flag " ++ show (ab^.absRegState^.boundValue DF)
pure $! X86BlockPrecond { blockInitX87TopReg = fromIntegral t
, blockInitDF = d
}
-- | Create initial registers for a block from address and preconditions.
initX86BlockRegs :: forall ids
. MemSegmentOff 64
-- ^ Address to disassemble at
-> X86BlockPrecond
-- ^ Preconditions
-> RegState X86Reg (Value X86_64 ids)
initX86BlockRegs addr pr =
let mkReg :: X86Reg tp -> Value X86_64 ids tp
mkReg r
| Just Refl <- testEquality r X86_IP =
RelocatableValue Addr64 (segoffAddr addr)
| Just Refl <- testEquality r X87_TopReg =
mkLit knownNat (toInteger (blockInitX87TopReg pr))
| Just Refl <- testEquality r DF =
BoolValue (blockInitDF pr)
| otherwise = Initial r
in mkRegState mkReg
| Generate a Macaw block starting from the given address .
--
-- This is used in the architectur einfo.
translateBlockWithRegs :: forall s ids
. NonceGenerator (ST s) ids
^ Generator for creating fresh @AssignId@ values .
-> MemSegmentOff 64
-- ^ Address to disassemble at.
-> RegState X86Reg (Value X86_64 ids)
-- ^ Initial register values.
-> Int
-- ^ Maximum size of this block
-> ST s (Block X86_64 ids, Int)
translateBlockWithRegs gen addr initRegs maxSize = do
case segoffContentsAfter addr of
Left err -> do
pure $! (initError initRegs err, 0)
Right contents -> do
(b, sz) <- translateBlockImpl gen (emptyPreBlock addr initRegs) addr 0 (fromIntegral maxSize) contents
pure $! (b, fromIntegral sz)
-- | Attempt to identify the write to a stack return address, returning
-- instructions prior to that write and return values.
--
-- This can also return Nothing if the call is not supported.
identifyX86Call :: Memory 64
-> Seq (Stmt X86_64 ids)
-> RegState X86Reg (Value X86_64 ids)
-> Maybe (Seq (Stmt X86_64 ids), MemSegmentOff 64)
identifyX86Call mem stmts0 s = go stmts0 Seq.empty
where -- Get value of stack pointer
next_sp = s^.boundValue RSP
-- Recurse on statements.
go stmts after =
case Seq.viewr stmts of
Seq.EmptyR -> Nothing
prev Seq.:> stmt
-- Check for a call statement by determining if the last statement
-- writes an executable address to the stack pointer.
| WriteMem a _repr val <- stmt
, Just _ <- testEquality a next_sp
-- Check this is the right length.
, Just Refl <- testEquality (typeRepr next_sp) (typeRepr val)
-- Check if value is a valid literal address
, Just val_a <- valueAsMemAddr val
-- Check if segment of address is marked as executable.
, Just ret_addr <- asSegmentOff mem val_a
, segmentFlags (segoffSegment ret_addr) `Perm.hasPerm` Perm.execute ->
Just (prev Seq.>< after, ret_addr)
-- Stop if we hit any architecture specific instructions prior to
-- identifying return address since they may have side effects.
| ExecArchStmt _ <- stmt -> Nothing
-- Otherwise skip over this instruction.
| otherwise -> go prev (stmt Seq.<| after)
-- | Return true if stack pointer has been reset to original value, and
-- return address is on top of stack.
checkForReturnAddrX86 :: forall ids
. AbsProcessorState X86Reg ids
-> Bool
checkForReturnAddrX86 absState
| Just (StackEntry _ ReturnAddr) <- Map.lookup 0 (absState^.curAbsStack) =
True
| otherwise =
False
-- | Called to determine if the instruction sequence contains a return
-- from the current function.
--
-- An instruction executing a return from a function will place the
ReturnAddr value ( placed on the top of the stack by
-- 'initialX86AbsState' above) into the instruction pointer.
identifyX86Return :: Seq (Stmt X86_64 ids)
-> RegState X86Reg (Value X86_64 ids)
-> AbsProcessorState X86Reg ids
-> Maybe (Seq (Stmt X86_64 ids))
identifyX86Return stmts s finalRegSt8 =
case transferValue finalRegSt8 (s^.boundValue ip_reg) of
ReturnAddr -> Just stmts
_ -> Nothing
freeBSD_syscallPersonality :: SyscallPersonality
freeBSD_syscallPersonality =
SyscallPersonality { spTypeInfo = FreeBSD.syscallInfo
, spResultRegisters = [ Some RAX ]
}
x86DemandContext :: DemandContext X86_64
x86DemandContext =
DemandContext { demandConstraints = \a -> a
, archFnHasSideEffects = x86PrimFnHasSideEffects
}
-- | Get the next IP that may run after this terminal and the abstract
-- state denoting possible starting conditions when that code runs.
postX86TermStmtAbsState :: (forall tp . X86Reg tp -> Bool)
-> Memory 64
-> AbsProcessorState X86Reg ids
-> Jmp.IntraJumpBounds X86_64 ids
-> RegState X86Reg (Value X86_64 ids)
-> X86TermStmt (Value X86_64 ids)
-> Maybe ( MemSegmentOff 64
, AbsBlockState X86Reg
, Jmp.InitJumpBounds X86_64
)
postX86TermStmtAbsState _preservePred _mem _s _bnds _regs tstmt =
case tstmt of
Hlt ->
Nothing
UD2 ->
Nothing
x86_64CallParams :: CallParams X86Reg
x86_64CallParams =
CallParams { postCallStackDelta = 8
, preserveReg = \r -> Set.member (Some r) x86CalleeSavedRegs
, stackGrowsDown = True
}
-- | Common architecture information for X86_64
x86_64_info :: (forall tp . X86Reg tp -> Bool)
-- ^ Function that returns true if we should preserve a register across a system call.
-> ArchitectureInfo X86_64
x86_64_info preservePred =
ArchitectureInfo { withArchConstraints = \x -> x
, archAddrWidth = Addr64
, archEndianness = LittleEndian
, extractBlockPrecond = extractX86BlockPrecond
, initialBlockRegs = initX86BlockRegs
, disassembleFn = translateBlockWithRegs
, mkInitialAbsState = \_ addr -> initialX86AbsState addr
, absEvalArchFn = transferAbsValue
, absEvalArchStmt = \s _ -> s
, identifyCall = identifyX86Call
, archCallParams = x86_64CallParams
, checkForReturnAddr = \_ s -> checkForReturnAddrX86 s
, identifyReturn = identifyX86Return
, rewriteArchFn = rewriteX86PrimFn
, rewriteArchStmt = rewriteX86Stmt
, rewriteArchTermStmt = rewriteX86TermStmt
, archDemandContext = x86DemandContext
, postArchTermStmtAbsState = postX86TermStmtAbsState preservePred
, archClassifier = defaultClassifier
}
-- | Architecture information for X86_64 on FreeBSD.
x86_64_freeBSD_info :: ArchitectureInfo X86_64
x86_64_freeBSD_info = x86_64_info preserveFreeBSDSyscallReg
linux_syscallPersonality :: SyscallPersonality
linux_syscallPersonality =
SyscallPersonality { spTypeInfo = Linux.syscallInfo
, spResultRegisters = [Some RAX]
}
| Architecture information for X86_64 .
x86_64_linux_info :: ArchitectureInfo X86_64
x86_64_linux_info = x86_64_info preserveFn
where preserveFn r = Set.member (Some r) linuxSystemCallPreservedRegisters
| PLT stub information for X86_64 relocation types .
x86_64PLTStubInfo :: MMEP.PLTStubInfo EE.X86_64_RelocationType
x86_64PLTStubInfo = MMEP.PLTStubInfo
{ MMEP.pltFunSize = 16
, MMEP.pltStubSize = 16
, MMEP.pltGotStubSize = 8
}
{-# DEPRECATED disassembleBlock "Use disassembleFn x86_64_info" #-}
-- | Disassemble block starting from explore location, and
-- return block along with size of block.
disassembleBlock :: forall s
. NonceGenerator (ST s) s
-> ExploreLoc
-> MemWord 64
-- ^ Maximum number of bytes in ths block.
-> ST s (Block X86_64 s, MemWord 64)
disassembleBlock gen loc maxSize = do
let addr = loc_ip loc
let regs = initX86State loc
let maxInt = toInteger (maxBound :: Int)
let maxSizeFixed = fromIntegral $ min maxInt (toInteger maxSize)
(b,sz) <- translateBlockWithRegs gen addr regs maxSizeFixed
pure (b, fromIntegral sz)
{-# DEPRECATED tryDisassembleBlock "Use disassembleFn x86_64_info" #-}
-- | Disassemble block, returning either an error, or a list of blocks
-- and ending PC.
tryDisassembleBlock :: forall s ids
. NonceGenerator (ST s) ids
-> MemSegmentOff 64
-- ^ Address to disassemble at
-> RegState X86Reg (Value X86_64 ids)
-- ^ Initial registers
-> Int
-- ^ Maximum size of this block
-> ExceptT String (ST s) (Block X86_64 ids, Int, Maybe String)
tryDisassembleBlock gen addr initRegs maxSize = lift $ do
(b,sz) <- translateBlockWithRegs gen addr initRegs maxSize
pure (b, sz, Nothing)
| null | https://raw.githubusercontent.com/GaloisInc/macaw/97c61e471aa60a48393bb6496260db416c4dca55/x86/src/Data/Macaw/X86.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes #
# LANGUAGE TypeSynonymInstances #
* Low level exports
----------------------------------------------------------------------
ExploreLoc
| Information needed to disassble a This represents the control-flow information needed to build
basic blocks for a code location.
^ IP address.
^ Top register of x87 stack
^ Location to explore from.
----------------------------------------------------------------------
Location
| Describes the reason the translation error occured.
^ A failure occured while trying to decode an instruction.
^ The instruction is not supported by the translator
^ An error occured when trying to translate the instruction
| Signal an error from the initial address.
| Disassemble memory contents using flexdis.
^ Address of next instruction to disassemble
^ Contents at address
| Translate block, returning blocks read, ending
PC, and an optional error. and ending PC.
^ Generator for new assign ids
^ Block information built up so far.
^ Offset of instruction from start of block
^ Address of next instruction to translate
^ List of contents to read next.
Get size of instruction
| Recursive function used by `disassembleFixedBlock` below.
^ Offset relative to start of block.
^ Address of next instruction to translate
^ List of contents to read next.
# DEPRECATED disassembleFixedBlock "Planned for removal." #
| Disassemble a block with a fixed number of bytes.
^ Information about starting location for disassembling.
^ Number of bytes to translate
^ Registers
^ Address of instruction to disassemble.
| Translate block, returning block read, number of bytes in block,
remaining bytes to parse, and an optional error.
^ Generator for new assign ids
^ Block information built up so far.
^ Address of next instruction to translate
^ Offset of instruction from start of block
^ Maximum offset for this addr from start of block.
^ List of contents to read next.
| The abstract state for a function begining at a given address.
| Transfer some type into an abstract value given a processor state.
| Extra constraints on block for disassembling.
^ Value to assign to X87 Top register
| Disassemble block, returning either an error, or a list of blocks
and ending PC.
^ Address to disassemble at
^ Abstract state of processor for defining state.
| Create initial registers for a block from address and preconditions.
^ Address to disassemble at
^ Preconditions
This is used in the architectur einfo.
^ Address to disassemble at.
^ Initial register values.
^ Maximum size of this block
| Attempt to identify the write to a stack return address, returning
instructions prior to that write and return values.
This can also return Nothing if the call is not supported.
Get value of stack pointer
Recurse on statements.
Check for a call statement by determining if the last statement
writes an executable address to the stack pointer.
Check this is the right length.
Check if value is a valid literal address
Check if segment of address is marked as executable.
Stop if we hit any architecture specific instructions prior to
identifying return address since they may have side effects.
Otherwise skip over this instruction.
| Return true if stack pointer has been reset to original value, and
return address is on top of stack.
| Called to determine if the instruction sequence contains a return
from the current function.
An instruction executing a return from a function will place the
'initialX86AbsState' above) into the instruction pointer.
| Get the next IP that may run after this terminal and the abstract
state denoting possible starting conditions when that code runs.
| Common architecture information for X86_64
^ Function that returns true if we should preserve a register across a system call.
| Architecture information for X86_64 on FreeBSD.
# DEPRECATED disassembleBlock "Use disassembleFn x86_64_info" #
| Disassemble block starting from explore location, and
return block along with size of block.
^ Maximum number of bytes in ths block.
# DEPRECATED tryDisassembleBlock "Use disassembleFn x86_64_info" #
| Disassemble block, returning either an error, or a list of blocks
and ending PC.
^ Address to disassemble at
^ Initial registers
^ Maximum size of this block |
Copyright : ( c ) Galois , Inc 2015 - 2017
Maintainer : < >
This defines the primitives needed to provide architecture info for
x86_64 programs .
Copyright : (c) Galois, Inc 2015-2017
Maintainer : Joe Hendrix <>
This defines the primitives needed to provide architecture info for
x86_64 programs.
-}
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PatternGuards #
# LANGUAGE PatternSynonyms #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE ViewPatterns #
# LANGUAGE NondecreasingIndentation #
module Data.Macaw.X86
( x86_64_info
, x86_64_freeBSD_info
, x86_64_linux_info
, x86_64CallParams
, x86_64PLTStubInfo
, freeBSD_syscallPersonality
, linux_syscallPersonality
, CallParams(..)
, ArchitectureInfo(..)
, X86BlockPrecond(..)
, ExploreLoc(..)
, rootLoc
, initX86State
, tryDisassembleBlock
, disassembleBlock
, disassembleFixedBlock
, translateInstruction
, X86TranslateError(..)
, Data.Macaw.X86.ArchTypes.X86_64
, Data.Macaw.X86.ArchTypes.X86PrimFn(..)
, Data.Macaw.X86.ArchTypes.X86Stmt(..)
, Data.Macaw.X86.ArchTypes.X86TermStmt(..)
, Data.Macaw.X86.X86Reg.X86Reg(..)
, Data.Macaw.X86.X86Reg.x86ArgumentRegs
, Data.Macaw.X86.X86Reg.x86ResultRegs
, Data.Macaw.X86.X86Reg.x86FloatArgumentRegs
, Data.Macaw.X86.X86Reg.x86FloatResultRegs
, Data.Macaw.X86.X86Reg.x86CalleeSavedRegs
, pattern Data.Macaw.X86.X86Reg.RAX
, x86DemandContext
) where
import Control.Lens
import Control.Monad.Cont
import Control.Monad.Except
import Control.Monad.ST
import qualified Data.ElfEdit as EE
import Data.Foldable
import qualified Data.Map as Map
import Data.Parameterized.Classes
import qualified Data.Parameterized.Map as MapF
import Data.Parameterized.NatRepr
import Data.Parameterized.Nonce
import Data.Parameterized.Some
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Text as Text
import Data.Word
import qualified Flexdis86 as F
import Prettyprinter (Pretty(..), viaShow)
import Data.Macaw.AbsDomain.AbsState
import qualified Data.Macaw.AbsDomain.JumpBounds as Jmp
import qualified Data.Macaw.AbsDomain.StridedInterval as SI
import Data.Macaw.Architecture.Info
import Data.Macaw.CFG
import Data.Macaw.CFG.DemandSet
import Data.Macaw.Discovery ( defaultClassifier )
import qualified Data.Macaw.Memory as MM
import qualified Data.Macaw.Memory.ElfLoader.PLTStubs as MMEP
import qualified Data.Macaw.Memory.Permissions as Perm
import Data.Macaw.Types
( n8
, HasRepr(..)
)
import Data.Macaw.X86.ArchTypes
import Data.Macaw.X86.Flexdis
import Data.Macaw.X86.Semantics (execInstruction)
import Data.Macaw.X86.SyscallInfo
import Data.Macaw.X86.SyscallInfo.FreeBSD as FreeBSD
import Data.Macaw.X86.SyscallInfo.Linux as Linux
import Data.Macaw.X86.X86Reg
import Data.Macaw.X86.Generator
data ExploreLoc
= ExploreLoc { loc_ip :: !(MemSegmentOff 64)
, loc_x87_top :: !Int
, loc_df_flag :: !Bool
^ Value of DF flag
}
deriving (Eq, Ord)
instance Pretty ExploreLoc where
pretty loc = viaShow (loc_ip loc)
rootLoc :: MemSegmentOff 64 -> ExploreLoc
rootLoc ip = ExploreLoc { loc_ip = ip
, loc_x87_top = 7
, loc_df_flag = False
}
-> RegState X86Reg (Value X86_64 ids)
initX86State loc = mkRegState Initial
& curIP .~ RelocatableValue Addr64 (segoffAddr (loc_ip loc))
& boundValue X87_TopReg .~ mkLit knownNat (toInteger (loc_x87_top loc))
& boundValue DF .~ BoolValue (loc_df_flag loc)
data X86TranslateError w
= FlexdisMemoryError !(MemoryError w)
^ A memory error occured in decoding with
| DecodeError !(MemAddr w) !(InstructionDecodeError w)
| UnsupportedInstruction !(MemSegmentOff w) !F.InstructionInstance
| ExecInstructionError !(MemSegmentOff w) !F.InstructionInstance Text.Text
| UnexpectedTerminalInstruction !(MemSegmentOff w) !F.InstructionInstance
instance MemWidth w => Show (X86TranslateError w) where
show err =
case err of
FlexdisMemoryError me ->
show me
DecodeError addr derr ->
show addr ++ ": " ++ show derr
UnsupportedInstruction addr i ->
"Unsupported instruction at " ++ show addr ++ ": " ++ show i
ExecInstructionError addr i msg ->
"Error in interpreting instruction at " ++ show addr ++ ": " ++ show i ++ "\n "
++ Text.unpack msg
UnexpectedTerminalInstruction addr i -> do
show addr ++ ": " ++ "Premature end of basic block due to instruction " ++ show i ++ "."
initError :: RegState X86Reg (Value X86_64 ids)
-> MemoryError 64
-> Block X86_64 ids
initError s err =
Block { blockStmts = []
, blockTerm = TranslateError s (Text.pack (show err))
}
disassembleInstruction :: MemSegmentOff 64
-> [MemChunk 64]
-> ExceptT (X86TranslateError 64) (ST st_s) (F.InstructionInstance, Int, [MemChunk 64])
disassembleInstruction curIPAddr contents =
case readInstruction contents of
Left (errOff, err) -> do
throwError $ DecodeError (segoffAddr curIPAddr & incAddr (toInteger errOff)) err
Right r -> do
pure r
translateStep :: forall st_s ids
. NonceGenerator (ST st_s) ids
-> PreBlock ids
-> MemWord 64
-> MemSegmentOff 64
-> [MemChunk 64]
-> ExceptT (X86TranslateError 64) (ST st_s)
(F.InstructionInstance, PartialBlock ids, Int, MemAddr 64, [MemChunk 64])
translateStep gen pblock blockOff curIPAddr contents = do
(i, instSize, nextContents) <- disassembleInstruction curIPAddr contents
let next_ip :: MemAddr 64
next_ip = segoffAddr curIPAddr & incAddr (toInteger instSize)
let next_ip_val :: BVValue X86_64 ids 64
next_ip_val = RelocatableValue Addr64 next_ip
case execInstruction (ValueExpr next_ip_val) i of
Nothing -> do
throwError $ UnsupportedInstruction curIPAddr i
Just exec -> do
let gs = GenState { assignIdGen = gen
, _blockState = pblock
, genInitPCAddr = curIPAddr
, genInstructionSize = instSize
, avxMode = False
, _genRegUpdates = MapF.empty
}
let transExcept msg = ExecInstructionError curIPAddr i msg
res <-
withExceptT transExcept $ runX86Generator gs $ do
let line = Text.pack $ show $ F.ppInstruction i
addStmt $ InstructionStart blockOff line
asAtomicStateUpdate (MM.segoffAddr curIPAddr) exec
pure $ (i, res, instSize, next_ip, nextContents)
translateFixedBlock' :: NonceGenerator (ST st_s) ids
-> PreBlock ids
-> MemWord 64
-> MemSegmentOff 64
-> [MemChunk 64]
-> ExceptT (X86TranslateError 64) (ST st_s) (Block X86_64 ids)
translateFixedBlock' gen pblock blockOff curIPAddr contents = do
(i, res, instSize, nextIP, nextContents) <- translateStep gen pblock blockOff curIPAddr contents
let blockOff' = blockOff + fromIntegral instSize
case unfinishedAtAddr res nextIP of
Just pblock'
| not (null nextContents)
, Just nextIPAddr <- incSegmentOff curIPAddr (toInteger instSize) -> do
translateFixedBlock' gen pblock' blockOff' nextIPAddr nextContents
_ -> do
when (not (null nextContents)) $ do
throwError $ UnexpectedTerminalInstruction curIPAddr i
pure $! finishPartialBlock res
disassembleFixedBlock :: NonceGenerator (ST st_s) ids
-> ExploreLoc
-> Int
-> ST st_s (Either (X86TranslateError 64) (Block X86_64 ids))
disassembleFixedBlock gen loc sz = do
let addr = loc_ip loc
let initRegs = initX86State loc
case segoffContentsAfter addr of
Left err -> do
pure $ Left $ FlexdisMemoryError err
Right fullContents ->
case splitMemChunks fullContents sz of
Left _err -> do
error $ "Could not split memory."
Right (contents,_) -> do
let pblock = emptyPreBlock addr initRegs
runExceptT $ translateFixedBlock' gen pblock 0 addr contents
| Attempt to translate a single instruction into a Macaw block and instruction size .
translateInstruction :: NonceGenerator (ST st_s) ids
-> RegState X86Reg (Value X86_64 ids)
-> MemSegmentOff 64
-> ExceptT (X86TranslateError 64) (ST st_s) (Block X86_64 ids, MemWord 64)
translateInstruction gen initRegs addr =
case segoffContentsAfter addr of
Left err -> do
throwError $ FlexdisMemoryError err
Right contents -> do
let pblock = emptyPreBlock addr initRegs
(_i, res, instSize, _nextIP, _nextContents) <- translateStep gen pblock 0 addr contents
pure $! (finishPartialBlock res, fromIntegral instSize)
translateBlockImpl :: forall st_s ids
. NonceGenerator (ST st_s) ids
-> PreBlock ids
-> MemSegmentOff 64
-> MemWord 64
-> MemWord 64
-> [MemChunk 64]
-> ST st_s ( Block X86_64 ids
, MemWord 64
)
translateBlockImpl gen pblock curIPAddr blockOff maxSize contents = do
r <- runExceptT $ translateStep gen pblock blockOff curIPAddr contents
case r of
Left err -> do
let b = Block { blockStmts = toList (pblock^.pBlockStmts)
, blockTerm = TranslateError (pblock^.pBlockState) (Text.pack (show err))
}
pure (b, blockOff)
Right (_, res, instSize, nextIP, nextContents) -> do
let blockOff' = blockOff + fromIntegral instSize
case unfinishedAtAddr res nextIP of
Just pblock'
| blockOff' < maxSize
, Just nextIPSegOff <- incSegmentOff curIPAddr (toInteger instSize) -> do
translateBlockImpl gen pblock' nextIPSegOff blockOff' maxSize nextContents
_ ->
pure (finishPartialBlock res, blockOff')
initialX86AbsState :: MemSegmentOff 64 -> AbsBlockState X86Reg
initialX86AbsState addr =
let m = MapF.fromList [ MapF.Pair X87_TopReg (FinSet (Set.singleton 7))
, MapF.Pair DF (BoolConst False)
]
in fnStartAbsBlockState addr m [(0, StackEntry (BVMemRepr n8 LittleEndian) ReturnAddr)]
preserveFreeBSDSyscallReg :: X86Reg tp -> Bool
preserveFreeBSDSyscallReg r
| Just Refl <- testEquality r CF = False
| Just Refl <- testEquality r RAX = False
| otherwise = True
| Linux preserves the same registers the x86_64 ABI does
linuxSystemCallPreservedRegisters :: Set.Set (Some X86Reg)
linuxSystemCallPreservedRegisters = x86CalleeSavedRegs
transferAbsValue :: AbsProcessorState X86Reg ids
-> X86PrimFn (Value X86_64 ids) tp
-> AbsValue 64 tp
transferAbsValue r f =
case f of
EvenParity _ -> TopV
ReadFSBase -> TopV
ReadGSBase -> TopV
GetSegmentSelector _ -> TopV
CPUID _ -> TopV
CMPXCHG8B{} -> TopV
RDTSC -> TopV
XGetBV _ -> TopV
PShufb{} -> TopV
We know only that it will return up to ( and including ( ? ) )
MemCmp _sz cnt _src _dest _rev
| Just upper <- hasMaximum knownRepr (transferValue r cnt) ->
stridedInterval $ SI.mkStridedInterval knownNat False 0 upper 1
| otherwise -> TopV
RepnzScas _sz _val _buf cnt
| Just upper <- hasMaximum knownRepr (transferValue r cnt) ->
stridedInterval $ SI.mkStridedInterval knownNat False 0 upper 1
| otherwise -> TopV
MMXExtend{} -> TopV
X86IDivRem{} -> TopV
X86DivRem{} -> TopV
SSE_UnaryOp{} -> TopV
SSE_VectorOp{} -> TopV
SSE_Sqrt{} -> TopV
SSE_CMPSX{} -> TopV
SSE_UCOMIS{} -> TopV
SSE_CVTSD2SS{} -> TopV
SSE_CVTSS2SD{} -> TopV
SSE_CVTSI2SX{} -> TopV
SSE_CVTTSX2SI{} -> TopV
X87_Extend{} -> TopV
X87_FST{} -> TopV
X87_FAdd{} -> TopV
X87_FSub{} -> TopV
X87_FMul{} -> TopV
CLMul{} -> TopV
AESNI_AESEnc{} -> TopV
AESNI_AESEncLast{} -> TopV
AESNI_AESDec{} -> TopV
AESNI_AESDecLast{} -> TopV
AESNI_AESKeyGenAssist{} -> TopV
AESNI_AESIMC{} -> TopV
SHA_sigma0{} -> TopV
SHA_sigma1{} -> TopV
SHA_Sigma0{} -> TopV
SHA_Sigma1{} -> TopV
SHA_Ch{} -> TopV
SHA_Maj{} -> TopV
XXX : Is ' TopV ' the right thing for the AVX instruction below ?
VOp1 {} -> TopV
VOp2 {} -> TopV
Pointwise2 {} -> TopV
PointwiseShiftL {} -> TopV
PointwiseLogicalShiftR {} -> TopV
VExtractF128 {} -> TopV
VInsert {} -> TopV
X86Syscall {} -> TopV
data X86BlockPrecond = X86BlockPrecond { blockInitX87TopReg :: !Word8
( should be from 0 to 7 with 7 the stack is empty . )
, blockInitDF :: !Bool
}
type instance ArchBlockPrecond X86_64 = X86BlockPrecond
extractX86BlockPrecond :: MemSegmentOff 64
-> AbsBlockState X86Reg
-> Either String X86BlockPrecond
extractX86BlockPrecond _addr ab = do
t <-
case asConcreteSingleton (ab^.absRegState^.boundValue X87_TopReg) of
Nothing -> Left "Could not determine height of X87 stack."
Just t -> pure t
d <-
case ab^.absRegState^.boundValue DF of
BoolConst b -> pure b
_ -> Left $ "Could not determine df flag " ++ show (ab^.absRegState^.boundValue DF)
pure $! X86BlockPrecond { blockInitX87TopReg = fromIntegral t
, blockInitDF = d
}
initX86BlockRegs :: forall ids
. MemSegmentOff 64
-> X86BlockPrecond
-> RegState X86Reg (Value X86_64 ids)
initX86BlockRegs addr pr =
let mkReg :: X86Reg tp -> Value X86_64 ids tp
mkReg r
| Just Refl <- testEquality r X86_IP =
RelocatableValue Addr64 (segoffAddr addr)
| Just Refl <- testEquality r X87_TopReg =
mkLit knownNat (toInteger (blockInitX87TopReg pr))
| Just Refl <- testEquality r DF =
BoolValue (blockInitDF pr)
| otherwise = Initial r
in mkRegState mkReg
| Generate a Macaw block starting from the given address .
translateBlockWithRegs :: forall s ids
. NonceGenerator (ST s) ids
^ Generator for creating fresh @AssignId@ values .
-> MemSegmentOff 64
-> RegState X86Reg (Value X86_64 ids)
-> Int
-> ST s (Block X86_64 ids, Int)
translateBlockWithRegs gen addr initRegs maxSize = do
case segoffContentsAfter addr of
Left err -> do
pure $! (initError initRegs err, 0)
Right contents -> do
(b, sz) <- translateBlockImpl gen (emptyPreBlock addr initRegs) addr 0 (fromIntegral maxSize) contents
pure $! (b, fromIntegral sz)
identifyX86Call :: Memory 64
-> Seq (Stmt X86_64 ids)
-> RegState X86Reg (Value X86_64 ids)
-> Maybe (Seq (Stmt X86_64 ids), MemSegmentOff 64)
identifyX86Call mem stmts0 s = go stmts0 Seq.empty
next_sp = s^.boundValue RSP
go stmts after =
case Seq.viewr stmts of
Seq.EmptyR -> Nothing
prev Seq.:> stmt
| WriteMem a _repr val <- stmt
, Just _ <- testEquality a next_sp
, Just Refl <- testEquality (typeRepr next_sp) (typeRepr val)
, Just val_a <- valueAsMemAddr val
, Just ret_addr <- asSegmentOff mem val_a
, segmentFlags (segoffSegment ret_addr) `Perm.hasPerm` Perm.execute ->
Just (prev Seq.>< after, ret_addr)
| ExecArchStmt _ <- stmt -> Nothing
| otherwise -> go prev (stmt Seq.<| after)
checkForReturnAddrX86 :: forall ids
. AbsProcessorState X86Reg ids
-> Bool
checkForReturnAddrX86 absState
| Just (StackEntry _ ReturnAddr) <- Map.lookup 0 (absState^.curAbsStack) =
True
| otherwise =
False
ReturnAddr value ( placed on the top of the stack by
identifyX86Return :: Seq (Stmt X86_64 ids)
-> RegState X86Reg (Value X86_64 ids)
-> AbsProcessorState X86Reg ids
-> Maybe (Seq (Stmt X86_64 ids))
identifyX86Return stmts s finalRegSt8 =
case transferValue finalRegSt8 (s^.boundValue ip_reg) of
ReturnAddr -> Just stmts
_ -> Nothing
freeBSD_syscallPersonality :: SyscallPersonality
freeBSD_syscallPersonality =
SyscallPersonality { spTypeInfo = FreeBSD.syscallInfo
, spResultRegisters = [ Some RAX ]
}
x86DemandContext :: DemandContext X86_64
x86DemandContext =
DemandContext { demandConstraints = \a -> a
, archFnHasSideEffects = x86PrimFnHasSideEffects
}
postX86TermStmtAbsState :: (forall tp . X86Reg tp -> Bool)
-> Memory 64
-> AbsProcessorState X86Reg ids
-> Jmp.IntraJumpBounds X86_64 ids
-> RegState X86Reg (Value X86_64 ids)
-> X86TermStmt (Value X86_64 ids)
-> Maybe ( MemSegmentOff 64
, AbsBlockState X86Reg
, Jmp.InitJumpBounds X86_64
)
postX86TermStmtAbsState _preservePred _mem _s _bnds _regs tstmt =
case tstmt of
Hlt ->
Nothing
UD2 ->
Nothing
x86_64CallParams :: CallParams X86Reg
x86_64CallParams =
CallParams { postCallStackDelta = 8
, preserveReg = \r -> Set.member (Some r) x86CalleeSavedRegs
, stackGrowsDown = True
}
x86_64_info :: (forall tp . X86Reg tp -> Bool)
-> ArchitectureInfo X86_64
x86_64_info preservePred =
ArchitectureInfo { withArchConstraints = \x -> x
, archAddrWidth = Addr64
, archEndianness = LittleEndian
, extractBlockPrecond = extractX86BlockPrecond
, initialBlockRegs = initX86BlockRegs
, disassembleFn = translateBlockWithRegs
, mkInitialAbsState = \_ addr -> initialX86AbsState addr
, absEvalArchFn = transferAbsValue
, absEvalArchStmt = \s _ -> s
, identifyCall = identifyX86Call
, archCallParams = x86_64CallParams
, checkForReturnAddr = \_ s -> checkForReturnAddrX86 s
, identifyReturn = identifyX86Return
, rewriteArchFn = rewriteX86PrimFn
, rewriteArchStmt = rewriteX86Stmt
, rewriteArchTermStmt = rewriteX86TermStmt
, archDemandContext = x86DemandContext
, postArchTermStmtAbsState = postX86TermStmtAbsState preservePred
, archClassifier = defaultClassifier
}
x86_64_freeBSD_info :: ArchitectureInfo X86_64
x86_64_freeBSD_info = x86_64_info preserveFreeBSDSyscallReg
linux_syscallPersonality :: SyscallPersonality
linux_syscallPersonality =
SyscallPersonality { spTypeInfo = Linux.syscallInfo
, spResultRegisters = [Some RAX]
}
| Architecture information for X86_64 .
x86_64_linux_info :: ArchitectureInfo X86_64
x86_64_linux_info = x86_64_info preserveFn
where preserveFn r = Set.member (Some r) linuxSystemCallPreservedRegisters
| PLT stub information for X86_64 relocation types .
x86_64PLTStubInfo :: MMEP.PLTStubInfo EE.X86_64_RelocationType
x86_64PLTStubInfo = MMEP.PLTStubInfo
{ MMEP.pltFunSize = 16
, MMEP.pltStubSize = 16
, MMEP.pltGotStubSize = 8
}
disassembleBlock :: forall s
. NonceGenerator (ST s) s
-> ExploreLoc
-> MemWord 64
-> ST s (Block X86_64 s, MemWord 64)
disassembleBlock gen loc maxSize = do
let addr = loc_ip loc
let regs = initX86State loc
let maxInt = toInteger (maxBound :: Int)
let maxSizeFixed = fromIntegral $ min maxInt (toInteger maxSize)
(b,sz) <- translateBlockWithRegs gen addr regs maxSizeFixed
pure (b, fromIntegral sz)
tryDisassembleBlock :: forall s ids
. NonceGenerator (ST s) ids
-> MemSegmentOff 64
-> RegState X86Reg (Value X86_64 ids)
-> Int
-> ExceptT String (ST s) (Block X86_64 ids, Int, Maybe String)
tryDisassembleBlock gen addr initRegs maxSize = lift $ do
(b,sz) <- translateBlockWithRegs gen addr initRegs maxSize
pure (b, sz, Nothing)
|
aac4bf422672ce576c88295ede4cf7b19fe0517625da4301193fd34a4353569b | takikawa/racket-ppa | kw-file.rkt | (module kw-file "pre-base.rkt"
(require (prefix-in k: "pre-base.rkt")
"sort.rkt")
(provide (rename-out
[open-input-file -open-input-file]
[open-output-file -open-output-file]
[open-input-output-file -open-input-output-file]
[call-with-input-file -call-with-input-file]
[call-with-output-file -call-with-output-file]
[with-input-from-file -with-input-from-file]
[with-output-to-file -with-output-to-file]
[raise-syntax-error -raise-syntax-error])
call-with-input-file*
call-with-output-file*
(rename-out
[directory-list -directory-list]))
(define exists-syms
'(error append update can-update replace truncate must-truncate truncate/replace))
(define exists-desc
"(or/c 'error 'append 'update 'can-update 'replace 'truncate 'must-truncate 'truncate/replace)")
(define binary-or-text-desc
"(or/c 'binary 'text)")
(define DEFAULT-CREATE-PERMS #o666)
(define (permissions? perms)
(and (exact-integer? perms) (<= 0 perms 65535)))
(define perms-desc "(integer-in 0 65535)")
(define (open-input-file path #:mode [mode 'binary] #:for-module? [for-module? #f])
(unless (path-string? path)
(raise-argument-error 'open-input-file "path-string?" path))
(unless (memq mode '(binary text))
(raise-argument-error 'open-input-file binary-or-text-desc mode))
(k:open-input-file path mode (if for-module? 'module 'none)))
(define (open-output-file path #:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'open-output-file "path-string?" path))
(unless (memq mode '(binary text))
(raise-argument-error 'open-output-file binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'open-output-file exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'open-output-file perms-desc perms))
(k:open-output-file path mode exists perms))
(define (open-input-output-file path #:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'open-input-output-file "path-string?" path))
(unless (memq mode '(binary text))
(raise-argument-error 'open-input-output-file binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'open-input-output-file exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'open-input-output-file perms-desc perms))
(k:open-input-output-file path mode exists perms))
(define (call-with-input-file path proc #:mode [mode 'binary])
(unless (path-string? path)
(raise-argument-error 'call-with-input-file "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 1))
(raise-argument-error 'call-with-input-file "(input-port? . -> . any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'call-with-input-file binary-or-text-desc mode))
(k:call-with-input-file path proc mode))
(define (call-with-output-file path proc
#:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'call-with-output-file "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 1))
(raise-argument-error 'call-with-output-file "(output-port? . -> . any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'call-with-output-file binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'call-with-output-file exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'call-with-output-file perms-desc perms))
(k:call-with-output-file path proc mode exists perms))
(define (with-input-from-file path proc #:mode [mode 'binary])
(unless (path-string? path)
(raise-argument-error 'with-input-from-file "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 0))
(raise-argument-error 'with-input-from-file "(-> any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'with-input-from-file binary-or-text-desc mode))
(k:with-input-from-file path proc mode))
(define (with-output-to-file path proc
#:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'with-output-to-file "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 0))
(raise-argument-error 'with-output-to-file "(-> any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'with-output-to-file binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'with-output-to-file exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'with-output-to-file perms-desc perms))
(k:with-output-to-file path proc mode exists perms))
(define (call-with-input-file* path proc #:mode [mode 'binary])
(unless (path-string? path)
(raise-argument-error 'call-with-input-file* "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 1))
(raise-argument-error 'call-with-input-file* "(input-port? . -> . any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'call-with-input-file* binary-or-text-desc mode))
(let ([p (k:open-input-file path mode)])
(dynamic-wind
void
(lambda () (proc p))
(lambda () (close-input-port p)))))
(define (call-with-output-file* path proc
#:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'call-with-output-file* "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 1))
(raise-argument-error 'call-with-output-file* "(output-port? . -> . any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'call-with-output-file* binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'call-with-output-file* exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'call-with-output-file* perms-desc perms))
(let ([p (k:open-output-file path mode exists perms)])
(dynamic-wind
void
(lambda () (proc p))
(lambda () (close-output-port p)))))
;; Using `define-values' to avoid the inlining expansion for keyword
;; arguments, because that expansion confuses Typed Racket:
(define-values (directory-list)
(lambda ([dir (current-directory)] #:build? [build? #f])
(unless (path-string? dir)
(raise-argument-error 'directory-list "path-string?" dir))
(let ([content (sort (k:directory-list dir)
path<?)])
(if build?
(map (lambda (i) (build-path dir i)) content)
content))))
(define (raise-syntax-error given-name message
[expr #f] [sub-expr #f]
[extra-sources null]
[message-suffix ""]
#:exn [exn exn:fail:syntax])
(do-raise-syntax-error 'raise-syntax-error exn given-name message
expr sub-expr
extra-sources
message-suffix)))
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/caff086a1cd48208815cec2a22645a3091c11d4c/collects/racket/private/kw-file.rkt | racket | Using `define-values' to avoid the inlining expansion for keyword
arguments, because that expansion confuses Typed Racket: | (module kw-file "pre-base.rkt"
(require (prefix-in k: "pre-base.rkt")
"sort.rkt")
(provide (rename-out
[open-input-file -open-input-file]
[open-output-file -open-output-file]
[open-input-output-file -open-input-output-file]
[call-with-input-file -call-with-input-file]
[call-with-output-file -call-with-output-file]
[with-input-from-file -with-input-from-file]
[with-output-to-file -with-output-to-file]
[raise-syntax-error -raise-syntax-error])
call-with-input-file*
call-with-output-file*
(rename-out
[directory-list -directory-list]))
(define exists-syms
'(error append update can-update replace truncate must-truncate truncate/replace))
(define exists-desc
"(or/c 'error 'append 'update 'can-update 'replace 'truncate 'must-truncate 'truncate/replace)")
(define binary-or-text-desc
"(or/c 'binary 'text)")
(define DEFAULT-CREATE-PERMS #o666)
(define (permissions? perms)
(and (exact-integer? perms) (<= 0 perms 65535)))
(define perms-desc "(integer-in 0 65535)")
(define (open-input-file path #:mode [mode 'binary] #:for-module? [for-module? #f])
(unless (path-string? path)
(raise-argument-error 'open-input-file "path-string?" path))
(unless (memq mode '(binary text))
(raise-argument-error 'open-input-file binary-or-text-desc mode))
(k:open-input-file path mode (if for-module? 'module 'none)))
(define (open-output-file path #:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'open-output-file "path-string?" path))
(unless (memq mode '(binary text))
(raise-argument-error 'open-output-file binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'open-output-file exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'open-output-file perms-desc perms))
(k:open-output-file path mode exists perms))
(define (open-input-output-file path #:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'open-input-output-file "path-string?" path))
(unless (memq mode '(binary text))
(raise-argument-error 'open-input-output-file binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'open-input-output-file exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'open-input-output-file perms-desc perms))
(k:open-input-output-file path mode exists perms))
(define (call-with-input-file path proc #:mode [mode 'binary])
(unless (path-string? path)
(raise-argument-error 'call-with-input-file "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 1))
(raise-argument-error 'call-with-input-file "(input-port? . -> . any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'call-with-input-file binary-or-text-desc mode))
(k:call-with-input-file path proc mode))
(define (call-with-output-file path proc
#:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'call-with-output-file "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 1))
(raise-argument-error 'call-with-output-file "(output-port? . -> . any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'call-with-output-file binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'call-with-output-file exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'call-with-output-file perms-desc perms))
(k:call-with-output-file path proc mode exists perms))
(define (with-input-from-file path proc #:mode [mode 'binary])
(unless (path-string? path)
(raise-argument-error 'with-input-from-file "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 0))
(raise-argument-error 'with-input-from-file "(-> any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'with-input-from-file binary-or-text-desc mode))
(k:with-input-from-file path proc mode))
(define (with-output-to-file path proc
#:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'with-output-to-file "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 0))
(raise-argument-error 'with-output-to-file "(-> any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'with-output-to-file binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'with-output-to-file exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'with-output-to-file perms-desc perms))
(k:with-output-to-file path proc mode exists perms))
(define (call-with-input-file* path proc #:mode [mode 'binary])
(unless (path-string? path)
(raise-argument-error 'call-with-input-file* "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 1))
(raise-argument-error 'call-with-input-file* "(input-port? . -> . any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'call-with-input-file* binary-or-text-desc mode))
(let ([p (k:open-input-file path mode)])
(dynamic-wind
void
(lambda () (proc p))
(lambda () (close-input-port p)))))
(define (call-with-output-file* path proc
#:mode [mode 'binary]
#:exists [exists 'error]
#:permissions [perms DEFAULT-CREATE-PERMS])
(unless (path-string? path)
(raise-argument-error 'call-with-output-file* "path-string?" path))
(unless (and (procedure? proc)
(procedure-arity-includes? proc 1))
(raise-argument-error 'call-with-output-file* "(output-port? . -> . any)" proc))
(unless (memq mode '(binary text))
(raise-argument-error 'call-with-output-file* binary-or-text-desc mode))
(unless (memq exists exists-syms)
(raise-argument-error 'call-with-output-file* exists-desc exists))
(unless (permissions? perms)
(raise-argument-error 'call-with-output-file* perms-desc perms))
(let ([p (k:open-output-file path mode exists perms)])
(dynamic-wind
void
(lambda () (proc p))
(lambda () (close-output-port p)))))
(define-values (directory-list)
(lambda ([dir (current-directory)] #:build? [build? #f])
(unless (path-string? dir)
(raise-argument-error 'directory-list "path-string?" dir))
(let ([content (sort (k:directory-list dir)
path<?)])
(if build?
(map (lambda (i) (build-path dir i)) content)
content))))
(define (raise-syntax-error given-name message
[expr #f] [sub-expr #f]
[extra-sources null]
[message-suffix ""]
#:exn [exn exn:fail:syntax])
(do-raise-syntax-error 'raise-syntax-error exn given-name message
expr sub-expr
extra-sources
message-suffix)))
|
e4a02f4307a2a38382051097cf95d8a5058389e015968c592eff5f818a32cb42 | metaocaml/ber-metaocaml | unclosed_simple_pattern.ml | (* TEST
* toplevel
*)
let f = function
| List.(_
;;
let f = function
| (_
;;
let f = function
| (_ : int
;;
(* Impossible to get the "unclosed (" message here. This case gets absorbed by
val_ident... *)
let f = function
| (module Foo : sig end
;;
(* As with expressions, impossible to get the unclosed message for the following
cases. *)
let f = function
| { foo; bar;
;;
let f = function
| [ 1; 2;
;;
let f = function
| [| 3; 4;
;;
| null | https://raw.githubusercontent.com/metaocaml/ber-metaocaml/4992d1f87fc08ccb958817926cf9d1d739caf3a2/testsuite/tests/parse-errors/unclosed_simple_pattern.ml | ocaml | TEST
* toplevel
Impossible to get the "unclosed (" message here. This case gets absorbed by
val_ident...
As with expressions, impossible to get the unclosed message for the following
cases. |
let f = function
| List.(_
;;
let f = function
| (_
;;
let f = function
| (_ : int
;;
let f = function
| (module Foo : sig end
;;
let f = function
| { foo; bar;
;;
let f = function
| [ 1; 2;
;;
let f = function
| [| 3; 4;
;;
|
7ff6351475227e4e202086b8efa586c84412bc61863192dd91f2a5a0507f8bca | tiensonqin/lymchat | spatial.clj | (ns api.pg.spatial
":require [api.pg.spatial :as st]"
(:import [org.postgis Geometry PGgeometryLW PGgeometry LineString LinearRing MultiLineString MultiPoint MultiPolygon Point Polygon]))
(defn srid
"Returns the set SRID of a geometry object"
[^Geometry geometry]
(.getSrid geometry))
(defn with-srid!
"Return the geometry object with SRID set. Alters the object."
[^Geometry geometry srid]
(doto geometry
(.setSrid ^int srid)))
(defn- pointy-structure?
[x]
(or (instance? Point x)
(and (coll? x)
(>= (count x) 2)
(<= (count x) 3)
(every? number? (map number? x)))))
(defn point
"Make a 2D or 3D Point."
([x y]
(Point. x y))
([x y z]
(Point. x y z))
([coll-or-str]
(cond (instance? Point coll-or-str) coll-or-str
(coll? coll-or-str) (let [x (first coll-or-str)
y (second coll-or-str)]
(if-let [z (nth coll-or-str 2 nil)]
(Point. x y z)
(Point. x y)))
:else (Point. (str coll-or-str)))))
(defn multi-point
"Make a MultiPoint from collection of Points."
[points]
(cond (instance? MultiPoint points) points
(coll? points) (MultiPoint. (into-array Point (map point points)))
:else (MultiPoint. (str points))))
(defn line-string
"Make a LineString from a collection of points."
[points]
(cond (instance? LineString points) points
(coll? points) (LineString. (into-array Point (map point points)))
:else (LineString. (str points))))
(defn multi-line-string
"Make a MultiLineString from a collection of LineStrings."
[line-strings]
(cond (instance? MultiLineString line-strings) line-strings
(coll? line-strings) (MultiLineString. (into-array LineString (map line-string line-strings)))
:else (MultiLineString. (str line-strings))))
(defn linear-ring
"Used for constructing Polygons from Points."
[points]
(cond (instance? LinearRing points) points
(coll? points) (LinearRing. (into-array Point (map point points)))
:else (LinearRing. (str points))))
(defn polygon
"Make a Polygon from a collection of Points."
[linear-rings]
(cond (instance? Polygon linear-rings) linear-rings
(coll? linear-rings) (Polygon. (into-array LinearRing (map linear-ring linear-rings)))
:else (Polygon. (str linear-rings))))
(defn multi-polygon
"Make a MultiPolygon from collection of Polygons."
[polygons]
(cond (instance? MultiPolygon polygons) polygons
(coll? polygons) (MultiPolygon. (into-array Polygon (map polygon polygons)))
:else (MultiPolygon. (str polygons))))
(defn pg-geom
[geometry]
(PGgeometryLW. geometry))
| null | https://raw.githubusercontent.com/tiensonqin/lymchat/824026607d30c12bc50afb06f677d1fa95ff1f2f/api/src/api/pg/spatial.clj | clojure | (ns api.pg.spatial
":require [api.pg.spatial :as st]"
(:import [org.postgis Geometry PGgeometryLW PGgeometry LineString LinearRing MultiLineString MultiPoint MultiPolygon Point Polygon]))
(defn srid
"Returns the set SRID of a geometry object"
[^Geometry geometry]
(.getSrid geometry))
(defn with-srid!
"Return the geometry object with SRID set. Alters the object."
[^Geometry geometry srid]
(doto geometry
(.setSrid ^int srid)))
(defn- pointy-structure?
[x]
(or (instance? Point x)
(and (coll? x)
(>= (count x) 2)
(<= (count x) 3)
(every? number? (map number? x)))))
(defn point
"Make a 2D or 3D Point."
([x y]
(Point. x y))
([x y z]
(Point. x y z))
([coll-or-str]
(cond (instance? Point coll-or-str) coll-or-str
(coll? coll-or-str) (let [x (first coll-or-str)
y (second coll-or-str)]
(if-let [z (nth coll-or-str 2 nil)]
(Point. x y z)
(Point. x y)))
:else (Point. (str coll-or-str)))))
(defn multi-point
"Make a MultiPoint from collection of Points."
[points]
(cond (instance? MultiPoint points) points
(coll? points) (MultiPoint. (into-array Point (map point points)))
:else (MultiPoint. (str points))))
(defn line-string
"Make a LineString from a collection of points."
[points]
(cond (instance? LineString points) points
(coll? points) (LineString. (into-array Point (map point points)))
:else (LineString. (str points))))
(defn multi-line-string
"Make a MultiLineString from a collection of LineStrings."
[line-strings]
(cond (instance? MultiLineString line-strings) line-strings
(coll? line-strings) (MultiLineString. (into-array LineString (map line-string line-strings)))
:else (MultiLineString. (str line-strings))))
(defn linear-ring
"Used for constructing Polygons from Points."
[points]
(cond (instance? LinearRing points) points
(coll? points) (LinearRing. (into-array Point (map point points)))
:else (LinearRing. (str points))))
(defn polygon
"Make a Polygon from a collection of Points."
[linear-rings]
(cond (instance? Polygon linear-rings) linear-rings
(coll? linear-rings) (Polygon. (into-array LinearRing (map linear-ring linear-rings)))
:else (Polygon. (str linear-rings))))
(defn multi-polygon
"Make a MultiPolygon from collection of Polygons."
[polygons]
(cond (instance? MultiPolygon polygons) polygons
(coll? polygons) (MultiPolygon. (into-array Polygon (map polygon polygons)))
:else (MultiPolygon. (str polygons))))
(defn pg-geom
[geometry]
(PGgeometryLW. geometry))
|
|
02216d5b33041e7e2ea40510af7f1e2d46c799e73cd8100758893f9c3110d91b | S8A/htdp-exercises | ex148.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex148) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp")) #f)))
; Q: Is it better to work with data definitions that accommodate
; empty lists as opposed to definitions for non-empty lists? Why? Why not?
; A: It's hard to say whether one is always better than the other since
; there are infinite possibilities and not all problems are the same.
; However, there are cases where it's obvious that accommodating empty
; lists in the data definition is counter-productive. The all-true and
one - true functions are good examples . The all - true function determines
; whether all the elements in a list of booleans are true, but to work
; it needs the base case of an empty list to be true, which doesn't make
; much sense on its face (if the list is empty there are no elements
to be true or false ) . The one - true function determines whether at least
one element is true , and says that this is false for the base case of an
; empty list, which does make sense since there are no items that are true.
; But now there's a contradiction: all-true says that all the elements of an
empty list are true but one - true says that an empty list does n't have even
one item that is true .
| null | https://raw.githubusercontent.com/S8A/htdp-exercises/578e49834a9513f29ef81b7589b28081c5e0b69f/ex148.rkt | racket | about the language level of this file in a form that our tools can easily process.
Q: Is it better to work with data definitions that accommodate
empty lists as opposed to definitions for non-empty lists? Why? Why not?
A: It's hard to say whether one is always better than the other since
there are infinite possibilities and not all problems are the same.
However, there are cases where it's obvious that accommodating empty
lists in the data definition is counter-productive. The all-true and
whether all the elements in a list of booleans are true, but to work
it needs the base case of an empty list to be true, which doesn't make
much sense on its face (if the list is empty there are no elements
empty list, which does make sense since there are no items that are true.
But now there's a contradiction: all-true says that all the elements of an | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex148) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp")) #f)))
one - true functions are good examples . The all - true function determines
to be true or false ) . The one - true function determines whether at least
one element is true , and says that this is false for the base case of an
empty list are true but one - true says that an empty list does n't have even
one item that is true .
|
244de4973e312f397cefc03c14c4a8f271515a147d933040c0f160e6261629dd | broadinstitute/wfl | logging_test.clj | (ns wfl.unit.logging-test
"Test that our overcomplicated logging works."
(:require [clojure.test :refer [is deftest testing]]
[clojure.data.json :as json]
[clojure.edn :as edn]
[clojure.string :as str]
[wfl.log :as log])
(:import [java.util.regex Pattern]))
(defmulti check-message?
(fn [expected actual]
[(type expected) (type actual)]))
(defmethod check-message? :default
[expected actual]
(= expected actual))
(defmethod check-message? [Pattern String]
[expected actual]
(re-find expected actual))
(defn logged?
"Test that the expected logs happened."
[result severity test]
(let [json (json/read-str result :key-fn keyword)]
(and (= (:severity json) (-> severity name str/upper-case))
(boolean (check-message? test (get-in json [:message :result]))))))
(deftest level-test
(testing "basic logging levels"
(with-redefs [log/active-level-predicate (atom (:debug @#'log/active-map))]
(is (logged? (with-out-str (log/info "ofni")) :info "ofni"))
(is (logged? (with-out-str (log/warning "gninraw")) :warning "gninraw"))
(is (logged? (with-out-str (log/error "rorre")) :error "rorre"))
(is (logged? (with-out-str (log/debug "gubed")) :debug "gubed")))))
(deftest severity-level-filtering-test
(testing "logging level ignores lesser severities"
(with-redefs [log/active-level-predicate (atom (:info @#'log/active-map))]
(is (str/blank? (with-out-str (log/debug "Debug Message"))))
(is (logged? (with-out-str (log/info "Info Message"))
:info "Info Message")))))
(deftest log-almost-anything
(testing "WFL can log almost anything"
(let [tagged (tagged-literal 'object :fnord)
exinfo (ex-info "log ex-info" {:tagged tagged})]
(try
(is (not (str/includes?
(with-out-str
(log/error false :what? true)
(log/error nil)
(log/error Math/E :set #{false nil nil? true Math/E})
(log/error 23 :ok 'OK! :never "mind")
(log/error (/ 22 7) :pi :pi :pi 'pie)
(log/error \X 'hey 'hey 'hey 'ho)
(log/error log/log :nope #_"nope" 'nope #_'ok)
(log/error '(comment comment) :no (comment comment))
(log/error tagged)
(log/error (str ex-info "log ex-info" {:tagged tagged}))
(log/error (ex-info "log ex-info" {:tagged tagged}))
(log/error (type tagged))
(log/error (type (type tagged)))
(log/error (ancestors (type tagged)))
(log/error {(type tagged) tagged})
(log/error ["There is a character C here:" \C])
(log/error (rest [exinfo exinfo exinfo exinfo]))
(log/error #{:a :symbol `symbol})
(log/error #{(type tagged) tagged})
(log/error (list \X tagged (list) (type tagged) list)))
"tried-to-log")))
(catch Throwable t
(log/emergency {:t t})
(is false))))))
(defrecord WTF [tag value])
(def ^:private wtf (partial edn/read-string {:default ->WTF}))
(deftest can-log-exception
(testing "exceptions can be serialized as JSON"
(let [oops (ex-info "Oops!" {:why "I did it again."}
(ex-info "I played with your heart."
{:why "Got lost in the game."}
(ex-info "Oh baby, baby."
{:oops "You think I'm in love."
:that "I'm sent from above."})))
log (json/read-str (with-out-str (log/alert oops)) :key-fn keyword)
result (:value (wtf (get-in log [:message :result])))]
(is (= "ALERT" (get-in log [:severity])))
(is (= "I did it again." (get-in result [:via 0 :data :why])))
(is (= "Oh baby, baby." (get-in result [:cause])))
(is (= "I'm sent from above." (get-in result [:data :that]))))))
Can not ( log / emergency # # NaN ) because on it .
;;
(deftest log-has-backstop
(testing "JSON-incompatible input is stringified"
(let [log (with-out-str (log/emergency (Math/sqrt -1)))
edn (wtf (json/read-str log :key-fn keyword))
{:keys [tried-to-log cause]} edn]
(is (map? tried-to-log))
(is (map? cause))
(let [{:keys [::log/message ::log/severity]} tried-to-log]
(is (map? message))
(is (= "EMERGENCY" severity)))
(let [{:keys [tag value]} cause]
(is (= 'error tag))
(is (= "JSON error: cannot write Double NaN" (:cause value)))))))
| null | https://raw.githubusercontent.com/broadinstitute/wfl/acc8b6bb7a510d99d4dd33c21f0d63acf8723717/api/test/wfl/unit/logging_test.clj | clojure | (ns wfl.unit.logging-test
"Test that our overcomplicated logging works."
(:require [clojure.test :refer [is deftest testing]]
[clojure.data.json :as json]
[clojure.edn :as edn]
[clojure.string :as str]
[wfl.log :as log])
(:import [java.util.regex Pattern]))
(defmulti check-message?
(fn [expected actual]
[(type expected) (type actual)]))
(defmethod check-message? :default
[expected actual]
(= expected actual))
(defmethod check-message? [Pattern String]
[expected actual]
(re-find expected actual))
(defn logged?
"Test that the expected logs happened."
[result severity test]
(let [json (json/read-str result :key-fn keyword)]
(and (= (:severity json) (-> severity name str/upper-case))
(boolean (check-message? test (get-in json [:message :result]))))))
(deftest level-test
(testing "basic logging levels"
(with-redefs [log/active-level-predicate (atom (:debug @#'log/active-map))]
(is (logged? (with-out-str (log/info "ofni")) :info "ofni"))
(is (logged? (with-out-str (log/warning "gninraw")) :warning "gninraw"))
(is (logged? (with-out-str (log/error "rorre")) :error "rorre"))
(is (logged? (with-out-str (log/debug "gubed")) :debug "gubed")))))
(deftest severity-level-filtering-test
(testing "logging level ignores lesser severities"
(with-redefs [log/active-level-predicate (atom (:info @#'log/active-map))]
(is (str/blank? (with-out-str (log/debug "Debug Message"))))
(is (logged? (with-out-str (log/info "Info Message"))
:info "Info Message")))))
(deftest log-almost-anything
(testing "WFL can log almost anything"
(let [tagged (tagged-literal 'object :fnord)
exinfo (ex-info "log ex-info" {:tagged tagged})]
(try
(is (not (str/includes?
(with-out-str
(log/error false :what? true)
(log/error nil)
(log/error Math/E :set #{false nil nil? true Math/E})
(log/error 23 :ok 'OK! :never "mind")
(log/error (/ 22 7) :pi :pi :pi 'pie)
(log/error \X 'hey 'hey 'hey 'ho)
(log/error log/log :nope #_"nope" 'nope #_'ok)
(log/error '(comment comment) :no (comment comment))
(log/error tagged)
(log/error (str ex-info "log ex-info" {:tagged tagged}))
(log/error (ex-info "log ex-info" {:tagged tagged}))
(log/error (type tagged))
(log/error (type (type tagged)))
(log/error (ancestors (type tagged)))
(log/error {(type tagged) tagged})
(log/error ["There is a character C here:" \C])
(log/error (rest [exinfo exinfo exinfo exinfo]))
(log/error #{:a :symbol `symbol})
(log/error #{(type tagged) tagged})
(log/error (list \X tagged (list) (type tagged) list)))
"tried-to-log")))
(catch Throwable t
(log/emergency {:t t})
(is false))))))
(defrecord WTF [tag value])
(def ^:private wtf (partial edn/read-string {:default ->WTF}))
(deftest can-log-exception
(testing "exceptions can be serialized as JSON"
(let [oops (ex-info "Oops!" {:why "I did it again."}
(ex-info "I played with your heart."
{:why "Got lost in the game."}
(ex-info "Oh baby, baby."
{:oops "You think I'm in love."
:that "I'm sent from above."})))
log (json/read-str (with-out-str (log/alert oops)) :key-fn keyword)
result (:value (wtf (get-in log [:message :result])))]
(is (= "ALERT" (get-in log [:severity])))
(is (= "I did it again." (get-in result [:via 0 :data :why])))
(is (= "Oh baby, baby." (get-in result [:cause])))
(is (= "I'm sent from above." (get-in result [:data :that]))))))
Can not ( log / emergency # # NaN ) because on it .
(deftest log-has-backstop
(testing "JSON-incompatible input is stringified"
(let [log (with-out-str (log/emergency (Math/sqrt -1)))
edn (wtf (json/read-str log :key-fn keyword))
{:keys [tried-to-log cause]} edn]
(is (map? tried-to-log))
(is (map? cause))
(let [{:keys [::log/message ::log/severity]} tried-to-log]
(is (map? message))
(is (= "EMERGENCY" severity)))
(let [{:keys [tag value]} cause]
(is (= 'error tag))
(is (= "JSON error: cannot write Double NaN" (:cause value)))))))
|
|
be8d8e82cdcedcdb5699337d0974b77b2a0085152df1326dba19b7c95f6588bc | victornicolet/parsynt | ExpressionReduction.ml | *
This file is part of Parsynt .
Author : < >
Parsynt is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with . If not , see < / > .
This file is part of Parsynt.
Author: Victor Nicolet <>
Parsynt is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Parsynt is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Parsynt. If not, see </>.
*)
open Beta
open Utils
open Fn
open FnPretty
open Format
open Expressions
let rec is_stv vset expr =
match expr with
| FnUnop (_, FnVar v) | FnVar v -> (
try VarSet.mem (check_option (vi_of v)) vset with Failure _ -> false)
| FnCond (c, _, _) -> is_stv vset c
| _ -> false
let scalar_normal_form (vset : VarSet.t) (e : fnExpr) =
let is_candidate = function
| FnBinop (_, e1, e2) | FnCond (_, e1, e2) ->
One of the operands must be a state variable
but not the other
but not the other *)
(is_stv vset e1 && not (fn_uses vset e2)) || (is_stv vset e2 && not (fn_uses vset e1))
(* Special rule for conditionals *)
| _ -> false
in
let handle_candidate _ = function
| FnBinop (_, e1, e2) -> (
match (e1, e2) with
| FnCond (_, _, _), estv when is_stv vset estv -> [ 1 ]
| estv, FnCond (_, _, _) when is_stv vset estv -> [ 1 ]
| _, estv when is_stv vset estv -> [ 1 ]
| estv, _ when is_stv vset estv -> [ 1 ]
| _ -> [])
| FnCond (_, e1, _) -> if is_stv vset e1 then [ 1 ] else [ 1 ]
| _ -> []
in
let collected =
rec_expr (fun a b -> a @ b) [] is_candidate handle_candidate (fun _ -> []) (fun _ -> []) e
in
List.length collected <= VarSet.cardinal vset + 1
let reduce_cost_binop _ ctx (op2 : symb_binop) (x : fnExpr) (y : fnExpr) =
match (x, y) with
Transform comparisons with max min into conjunctions
or disjunctions , because conj / disj . are associative
or disjunctions, because conj/disj. are associative *)
(* max(a, b) > c --> a > c or b > c *)
| FnBinop (Max, a, b), c when op2 = Gt || op2 = Ge ->
FnBinop (Or, FnBinop (op2, a, c), FnBinop (op2, b, c))
(* c > max(a, b) --> c > a and c > b *)
| c, FnBinop (Max, a, b) when op2 = Gt || op2 = Ge ->
FnBinop (And, FnBinop (op2, c, a), FnBinop (op2, c, b))
(* max(a, b) < c --> a < c and b < c *)
| FnBinop (Max, a, b), c when op2 = Lt || op2 = Le ->
FnBinop (And, FnBinop (op2, a, c), FnBinop (op2, b, c))
(* c < max(a, b) --> c < a or c < b *)
| c, FnBinop (Max, a, b) when op2 = Lt || op2 = Le ->
FnBinop (Or, FnBinop (op2, c, a), FnBinop (op2, c, b))
(* Distributivity with operators *)
| FnBinop (op1, a, b), c ->
let ca = cost ctx a in
let cb = cost ctx b in
let cc = cost ctx c in
[ ( a + b ) * c -- > a*c + b*c ] if no stv in c
if is_right_distributive op1 op2 && max ca cb >= cc then
FnBinop (op1, FnBinop (op2, a, c), FnBinop (op2, b, c))
else FnBinop (op2, x, y)
| c, FnBinop (Or, a, b) when op2 = And -> FnBinop (Or, FnBinop (And, c, a), FnBinop (And, c, b))
(* Distributivity with ternary expressions *)
| FnCond (cond, a, b), c ->
let ca = cost ctx a in
let cb = cost ctx b in
let cc = cost ctx c in
if is_associative op2 && max ca cb > cc then
FnCond (cond, FnBinop (op2, a, c), FnBinop (op2, b, c))
else FnBinop (op2, x, y)
| c, FnCond (cond, a, b) ->
let ca = cost ctx a in
let cb = cost ctx b in
let cc = cost ctx c in
if is_associative op2 && max ca cb > cc then
FnCond (cond, FnBinop (op2, c, a), FnBinop (op2, c, b))
else FnBinop (op2, x, y)
| _, _ -> FnBinop (op2, x, y)
let reduce_cost_ternary rfunc ctx c x y =
match (x, y) with
| FnBinop (op1, x1, x2), FnBinop (op2, y1, y2) when op1 = op2 && is_associative op1 ->
let cx1 = cost ctx x1 in
let cx2 = cost ctx x2 in
let cy1 = cost ctx y1 in
let cy2 = cost ctx y2 in
if x1 = y1 && cx1 > max cx2 cy2 then
let cond = rfunc (FnCond (c, x2, y2)) in
FnBinop (op1, x1, cond)
else if x2 = y2 && cx2 > max cx1 cy1 then
let cond = rfunc (FnCond (c, x1, y1)) in
FnBinop (op1, cond, x2)
else FnCond (c, x, y)
| _, _ -> FnCond (c, x, y)
(**
Reduce the cost of an expression. The cost is computed according to the ctx
information, which contains 'costly' expressions.
*)
let reduce_cost ctx expr =
let reduction_cases expr =
match expr with
| FnBinop (_, _, _) -> true
| FnCond (_, _, _) -> true
| FnUnop (_, _) -> true
| _ -> false
in
(* Tranform expressions by looking at its leaves *)
let reduce_transform rfunc expr =
match expr with
| FnBinop (op2, x, y) -> reduce_cost_binop rfunc ctx op2 (rfunc x) (rfunc y)
| FnCond (c, x, y) -> reduce_cost_ternary rfunc ctx (rfunc c) (rfunc x) (rfunc y)
Distribute unary boolean not down , unary num neg down
| FnUnop (op, x) -> (
let e' = rfunc x in
match (op, e') with
| Not, FnBinop (And, e1, e2) ->
FnBinop (Or, rfunc (FnUnop (Not, e1)), rfunc (FnUnop (Not, e2)))
| Not, FnBinop (Or, e1, e2) ->
FnBinop (And, rfunc (FnUnop (Not, e1)), rfunc (FnUnop (Not, e2)))
| Neg, FnBinop (Plus, e1, e2) ->
FnBinop (Plus, rfunc (FnUnop (Neg, e1)), rfunc (FnUnop (Neg, e2)))
| Neg, FnBinop (Minus, e1, e2) -> FnBinop (Minus, rfunc e1, rfunc e2)
| _, _ -> FnUnop (op, e')
(* End FnUnop (op, e) case *))
| _ -> failwith "Unexpected case in expression transformation"
(* End transform expressions *)
in
transform_expr reduction_cases reduce_transform identity identity expr
let reduce_cost_specials ctx e =
let red_cases e = match e with FnCond _ -> true | _ -> false in
let red_apply rfunc e =
match e with
| FnCond (cond1, x, y) -> (
let x' = rfunc x in
let y' = rfunc y in
match (x', y') with
| FnCond (cond2, a, b), c ->
let ca = cost ctx a in
let cb = cost ctx b in
let cc = cost ctx c in
if ca > max cb cc then
FnCond (FnBinop (And, cond1, cond2), a, FnCond (FnUnop (Not, cond2), c, b))
else FnCond (cond1, x, y)
| _ -> FnCond (cond1, x, y))
| _ -> failwith "Unexpected case in reduce_cost_specials"
in
transform_expr red_cases red_apply identity identity e
let remove_double_negs _ e =
let red_cases e = match e with FnUnop _ -> true | _ -> false in
let red_apply_dbn rfunc e =
match e with
| FnUnop (op, e') -> (
let e'' = rfunc e' in
match (op, e'') with
| Not, FnUnop (op2, e0) when op2 = Not -> rfunc e0
| Neg, FnUnop (op2, e0) when op2 = Neg -> rfunc e0
| _, _ -> FnUnop (op, e''))
| _ -> failwith "Unexpected case in reduce_cost_specials"
in
transform_expr red_cases red_apply_dbn identity identity e
let force_linear_normal_form ctx e =
let distr_for_linear op expr_list =
let aux acc e =
let e' = rebuild_tree_AC ctx e in
match e' with
| FnBinop (op1, e0, FnBinop (op2, u, v)) when is_right_distributive op2 op1 && op2 = op ->
acc @ [ FnBinop (op1, e0, u); FnBinop (op1, e0, v) ]
| FnBinop (op1, FnBinop (op2, u, v), e0) when is_left_distributive op2 op1 && op2 = op ->
acc @ [ FnBinop (op1, u, e0); FnBinop (op1, v, e0) ]
| _ -> acc @ [ e ]
in
List.fold_left aux [] expr_list
in
let factor_largest_common t top_op el =
let flat_el = List.map flatten_AC el in
let flat_ops, remainder =
List.partition
(fun e ->
match e with FnApp (_, Some opvar, _) -> is_some (op_from_name opvar.vname) | _ -> false)
flat_el
in
let op_assoc =
let add_op_elt op el opmap =
try
let elts = List.assoc op opmap in
let opmap' = List.remove_assoc op opmap in
(op, el :: elts) :: opmap'
with Not_found -> (op, [ el ]) :: opmap
in
List.fold_left
(fun opmap expr ->
match expr with
| FnApp (_, Some opvar, el) ->
let op = check_option (op_from_name opvar.vname) in
add_op_elt op el opmap
| _ -> opmap)
[] flat_ops
in
List.fold_left
(fun el (op, assoce) -> el @ find_max_state_factors ctx t top_op (get_AC_op op) assoce)
[] op_assoc
@ remainder
in
(* Try to apply distributivity rules that can make the expression
linear normal. *)
let flat_e = flatten_AC e in
let e_tree =
match flat_e with
| FnApp (t, Some opvar, el) -> (
match op_from_name opvar.vname with
| Some op ->
let el' = distr_for_linear op el in
let el'' = factor_largest_common t opvar el' in
rebuild_tree_AC ctx (FnApp (t, Some opvar, el''))
| None -> e)
| _ -> e
in
e_tree
let reduce_full ?(limit = 10) (ctx : context) (expr : fnExpr) =
let rec aux_apply_ternary_rules ctx limit e =
let red_expr0 = reduce_cost ctx e in
let red_expr1 = reduce_cost_specials ctx red_expr0 in
let red_expr = remove_double_negs ctx.state_vars red_expr1 in
if (red_expr @= e) || limit = 0 then red_expr
else aux_apply_ternary_rules ctx (limit - 1) red_expr
in
let rules_AC e =
let flat_r = flatten_AC e in
let r1 = apply_special_rules ctx flat_r in
rebuild_tree_AC ctx r1
in
let r0 = aux_apply_ternary_rules ctx limit expr in
let r0' = rules_AC r0 in
if scalar_normal_form ctx.state_vars r0' then r0'
else force_linear_normal_form ctx (factorize_multi_toplevel ctx r0)
let rec normalize ctx sklet =
match sklet with
| FnLetIn (ve_list, letin) ->
FnLetIn (List.map (fun (v, e) -> (v, reduce_full ctx e)) ve_list, normalize ctx letin)
| FnRecord (vs, emap) ->
let ve_list = unwrap_state vs emap in
wrap_state (List.map (fun (v, e) -> (v, reduce_full ctx e)) ve_list)
| e -> reduce_full ctx e
let clean_unused_assignments : fnExpr -> fnExpr =
let is_rec_mem e = match e with FnRecordMember _ -> true | _ -> false in
let remove_to_ignore ignore_list =
transform_expr2
{
case = (fun e -> match e with FnLetIn _ -> true | _ -> false);
on_case =
(fun f e ->
match e with
| FnLetIn (bindings, cont) -> (
let rem_b =
List.filter
(fun (v, e) -> (not (List.mem v ignore_list)) || is_rec_mem e)
bindings
in
let rec_b = List.map (fun (v, e) -> (v, f e)) rem_b in
match rec_b with [] -> f cont | _ -> FnLetIn (rec_b, f cont))
| _ -> failwith "x");
on_var = identity;
on_const = identity;
}
in
transform_expr2
{
case = (fun e -> match e with FnLetIn _ -> true | _ -> false);
on_case =
(fun f e ->
match e with
| FnLetIn ([ (v, FnRec (igu, st, (s, body))) ], FnRecord (rvs, remap)) ->
let choices = unwrap_state rvs remap in
let to_ignore =
List.fold_left
(fun l (v, e) -> match e with FnRecordMember _ -> l | _ -> v :: l)
[] choices
in
FnLetIn
([ (v, FnRec (igu, st, (s, remove_to_ignore to_ignore body))) ], wrap_state choices)
| FnLetIn (b, cont) -> FnLetIn (b, f cont)
| _ -> failwith "Guarded clause");
on_var = identity;
on_const = identity;
}
let clean _ e = clean_unused_assignments e
* Using to solve other reduction / expression matching problems
let _find_function_with_rosette all_vars fe e =
let pp_defs fmt () = Sketch.pp_symbolic_definitions_of fmt [] all_vars in
let pp_expr_e fmt () = fprintf fmt "(define e @[%a@])@." pp_fnexpr e in
let pp_expr_fe fmt () = fprintf fmt "(define fe @[%a@])@." pp_fnexpr fe in
let pp_f_sketch fmt () =
fprintf fmt "(define (f x) (bExpr %a x 2))" Sketch.pp_defined_input all_vars
in
let pp_synth_prob fmt () =
fprintf fmt
"(define odot (synthesize #:forall (list %a) #:guarantee (assert (eq? fe (f e)))))@."
Sketch.pp_defined_input all_vars
in
let pp_all fmt () =
pp_defs fmt ();
pp_expr_e fmt ();
pp_expr_fe fmt ();
pp_f_sketch fmt ();
pp_synth_prob fmt ()
in
let _, solution = Sygus.Local.compile_and_fetch Config.rosette pp_all () in
Sygus.RAst.pp_expr_list Format.std_formatter solution
| null | https://raw.githubusercontent.com/victornicolet/parsynt/d3f530923c0c75537b92c2930eb882921f38268c/src/singleloop/ExpressionReduction.ml | ocaml | Special rule for conditionals
max(a, b) > c --> a > c or b > c
c > max(a, b) --> c > a and c > b
max(a, b) < c --> a < c and b < c
c < max(a, b) --> c < a or c < b
Distributivity with operators
Distributivity with ternary expressions
*
Reduce the cost of an expression. The cost is computed according to the ctx
information, which contains 'costly' expressions.
Tranform expressions by looking at its leaves
End FnUnop (op, e) case
End transform expressions
Try to apply distributivity rules that can make the expression
linear normal. | *
This file is part of Parsynt .
Author : < >
Parsynt is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with . If not , see < / > .
This file is part of Parsynt.
Author: Victor Nicolet <>
Parsynt is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Parsynt is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Parsynt. If not, see </>.
*)
open Beta
open Utils
open Fn
open FnPretty
open Format
open Expressions
let rec is_stv vset expr =
match expr with
| FnUnop (_, FnVar v) | FnVar v -> (
try VarSet.mem (check_option (vi_of v)) vset with Failure _ -> false)
| FnCond (c, _, _) -> is_stv vset c
| _ -> false
let scalar_normal_form (vset : VarSet.t) (e : fnExpr) =
let is_candidate = function
| FnBinop (_, e1, e2) | FnCond (_, e1, e2) ->
One of the operands must be a state variable
but not the other
but not the other *)
(is_stv vset e1 && not (fn_uses vset e2)) || (is_stv vset e2 && not (fn_uses vset e1))
| _ -> false
in
let handle_candidate _ = function
| FnBinop (_, e1, e2) -> (
match (e1, e2) with
| FnCond (_, _, _), estv when is_stv vset estv -> [ 1 ]
| estv, FnCond (_, _, _) when is_stv vset estv -> [ 1 ]
| _, estv when is_stv vset estv -> [ 1 ]
| estv, _ when is_stv vset estv -> [ 1 ]
| _ -> [])
| FnCond (_, e1, _) -> if is_stv vset e1 then [ 1 ] else [ 1 ]
| _ -> []
in
let collected =
rec_expr (fun a b -> a @ b) [] is_candidate handle_candidate (fun _ -> []) (fun _ -> []) e
in
List.length collected <= VarSet.cardinal vset + 1
let reduce_cost_binop _ ctx (op2 : symb_binop) (x : fnExpr) (y : fnExpr) =
match (x, y) with
Transform comparisons with max min into conjunctions
or disjunctions , because conj / disj . are associative
or disjunctions, because conj/disj. are associative *)
| FnBinop (Max, a, b), c when op2 = Gt || op2 = Ge ->
FnBinop (Or, FnBinop (op2, a, c), FnBinop (op2, b, c))
| c, FnBinop (Max, a, b) when op2 = Gt || op2 = Ge ->
FnBinop (And, FnBinop (op2, c, a), FnBinop (op2, c, b))
| FnBinop (Max, a, b), c when op2 = Lt || op2 = Le ->
FnBinop (And, FnBinop (op2, a, c), FnBinop (op2, b, c))
| c, FnBinop (Max, a, b) when op2 = Lt || op2 = Le ->
FnBinop (Or, FnBinop (op2, c, a), FnBinop (op2, c, b))
| FnBinop (op1, a, b), c ->
let ca = cost ctx a in
let cb = cost ctx b in
let cc = cost ctx c in
[ ( a + b ) * c -- > a*c + b*c ] if no stv in c
if is_right_distributive op1 op2 && max ca cb >= cc then
FnBinop (op1, FnBinop (op2, a, c), FnBinop (op2, b, c))
else FnBinop (op2, x, y)
| c, FnBinop (Or, a, b) when op2 = And -> FnBinop (Or, FnBinop (And, c, a), FnBinop (And, c, b))
| FnCond (cond, a, b), c ->
let ca = cost ctx a in
let cb = cost ctx b in
let cc = cost ctx c in
if is_associative op2 && max ca cb > cc then
FnCond (cond, FnBinop (op2, a, c), FnBinop (op2, b, c))
else FnBinop (op2, x, y)
| c, FnCond (cond, a, b) ->
let ca = cost ctx a in
let cb = cost ctx b in
let cc = cost ctx c in
if is_associative op2 && max ca cb > cc then
FnCond (cond, FnBinop (op2, c, a), FnBinop (op2, c, b))
else FnBinop (op2, x, y)
| _, _ -> FnBinop (op2, x, y)
let reduce_cost_ternary rfunc ctx c x y =
match (x, y) with
| FnBinop (op1, x1, x2), FnBinop (op2, y1, y2) when op1 = op2 && is_associative op1 ->
let cx1 = cost ctx x1 in
let cx2 = cost ctx x2 in
let cy1 = cost ctx y1 in
let cy2 = cost ctx y2 in
if x1 = y1 && cx1 > max cx2 cy2 then
let cond = rfunc (FnCond (c, x2, y2)) in
FnBinop (op1, x1, cond)
else if x2 = y2 && cx2 > max cx1 cy1 then
let cond = rfunc (FnCond (c, x1, y1)) in
FnBinop (op1, cond, x2)
else FnCond (c, x, y)
| _, _ -> FnCond (c, x, y)
let reduce_cost ctx expr =
let reduction_cases expr =
match expr with
| FnBinop (_, _, _) -> true
| FnCond (_, _, _) -> true
| FnUnop (_, _) -> true
| _ -> false
in
let reduce_transform rfunc expr =
match expr with
| FnBinop (op2, x, y) -> reduce_cost_binop rfunc ctx op2 (rfunc x) (rfunc y)
| FnCond (c, x, y) -> reduce_cost_ternary rfunc ctx (rfunc c) (rfunc x) (rfunc y)
Distribute unary boolean not down , unary num neg down
| FnUnop (op, x) -> (
let e' = rfunc x in
match (op, e') with
| Not, FnBinop (And, e1, e2) ->
FnBinop (Or, rfunc (FnUnop (Not, e1)), rfunc (FnUnop (Not, e2)))
| Not, FnBinop (Or, e1, e2) ->
FnBinop (And, rfunc (FnUnop (Not, e1)), rfunc (FnUnop (Not, e2)))
| Neg, FnBinop (Plus, e1, e2) ->
FnBinop (Plus, rfunc (FnUnop (Neg, e1)), rfunc (FnUnop (Neg, e2)))
| Neg, FnBinop (Minus, e1, e2) -> FnBinop (Minus, rfunc e1, rfunc e2)
| _, _ -> FnUnop (op, e')
| _ -> failwith "Unexpected case in expression transformation"
in
transform_expr reduction_cases reduce_transform identity identity expr
let reduce_cost_specials ctx e =
let red_cases e = match e with FnCond _ -> true | _ -> false in
let red_apply rfunc e =
match e with
| FnCond (cond1, x, y) -> (
let x' = rfunc x in
let y' = rfunc y in
match (x', y') with
| FnCond (cond2, a, b), c ->
let ca = cost ctx a in
let cb = cost ctx b in
let cc = cost ctx c in
if ca > max cb cc then
FnCond (FnBinop (And, cond1, cond2), a, FnCond (FnUnop (Not, cond2), c, b))
else FnCond (cond1, x, y)
| _ -> FnCond (cond1, x, y))
| _ -> failwith "Unexpected case in reduce_cost_specials"
in
transform_expr red_cases red_apply identity identity e
let remove_double_negs _ e =
let red_cases e = match e with FnUnop _ -> true | _ -> false in
let red_apply_dbn rfunc e =
match e with
| FnUnop (op, e') -> (
let e'' = rfunc e' in
match (op, e'') with
| Not, FnUnop (op2, e0) when op2 = Not -> rfunc e0
| Neg, FnUnop (op2, e0) when op2 = Neg -> rfunc e0
| _, _ -> FnUnop (op, e''))
| _ -> failwith "Unexpected case in reduce_cost_specials"
in
transform_expr red_cases red_apply_dbn identity identity e
let force_linear_normal_form ctx e =
let distr_for_linear op expr_list =
let aux acc e =
let e' = rebuild_tree_AC ctx e in
match e' with
| FnBinop (op1, e0, FnBinop (op2, u, v)) when is_right_distributive op2 op1 && op2 = op ->
acc @ [ FnBinop (op1, e0, u); FnBinop (op1, e0, v) ]
| FnBinop (op1, FnBinop (op2, u, v), e0) when is_left_distributive op2 op1 && op2 = op ->
acc @ [ FnBinop (op1, u, e0); FnBinop (op1, v, e0) ]
| _ -> acc @ [ e ]
in
List.fold_left aux [] expr_list
in
let factor_largest_common t top_op el =
let flat_el = List.map flatten_AC el in
let flat_ops, remainder =
List.partition
(fun e ->
match e with FnApp (_, Some opvar, _) -> is_some (op_from_name opvar.vname) | _ -> false)
flat_el
in
let op_assoc =
let add_op_elt op el opmap =
try
let elts = List.assoc op opmap in
let opmap' = List.remove_assoc op opmap in
(op, el :: elts) :: opmap'
with Not_found -> (op, [ el ]) :: opmap
in
List.fold_left
(fun opmap expr ->
match expr with
| FnApp (_, Some opvar, el) ->
let op = check_option (op_from_name opvar.vname) in
add_op_elt op el opmap
| _ -> opmap)
[] flat_ops
in
List.fold_left
(fun el (op, assoce) -> el @ find_max_state_factors ctx t top_op (get_AC_op op) assoce)
[] op_assoc
@ remainder
in
let flat_e = flatten_AC e in
let e_tree =
match flat_e with
| FnApp (t, Some opvar, el) -> (
match op_from_name opvar.vname with
| Some op ->
let el' = distr_for_linear op el in
let el'' = factor_largest_common t opvar el' in
rebuild_tree_AC ctx (FnApp (t, Some opvar, el''))
| None -> e)
| _ -> e
in
e_tree
let reduce_full ?(limit = 10) (ctx : context) (expr : fnExpr) =
let rec aux_apply_ternary_rules ctx limit e =
let red_expr0 = reduce_cost ctx e in
let red_expr1 = reduce_cost_specials ctx red_expr0 in
let red_expr = remove_double_negs ctx.state_vars red_expr1 in
if (red_expr @= e) || limit = 0 then red_expr
else aux_apply_ternary_rules ctx (limit - 1) red_expr
in
let rules_AC e =
let flat_r = flatten_AC e in
let r1 = apply_special_rules ctx flat_r in
rebuild_tree_AC ctx r1
in
let r0 = aux_apply_ternary_rules ctx limit expr in
let r0' = rules_AC r0 in
if scalar_normal_form ctx.state_vars r0' then r0'
else force_linear_normal_form ctx (factorize_multi_toplevel ctx r0)
let rec normalize ctx sklet =
match sklet with
| FnLetIn (ve_list, letin) ->
FnLetIn (List.map (fun (v, e) -> (v, reduce_full ctx e)) ve_list, normalize ctx letin)
| FnRecord (vs, emap) ->
let ve_list = unwrap_state vs emap in
wrap_state (List.map (fun (v, e) -> (v, reduce_full ctx e)) ve_list)
| e -> reduce_full ctx e
let clean_unused_assignments : fnExpr -> fnExpr =
let is_rec_mem e = match e with FnRecordMember _ -> true | _ -> false in
let remove_to_ignore ignore_list =
transform_expr2
{
case = (fun e -> match e with FnLetIn _ -> true | _ -> false);
on_case =
(fun f e ->
match e with
| FnLetIn (bindings, cont) -> (
let rem_b =
List.filter
(fun (v, e) -> (not (List.mem v ignore_list)) || is_rec_mem e)
bindings
in
let rec_b = List.map (fun (v, e) -> (v, f e)) rem_b in
match rec_b with [] -> f cont | _ -> FnLetIn (rec_b, f cont))
| _ -> failwith "x");
on_var = identity;
on_const = identity;
}
in
transform_expr2
{
case = (fun e -> match e with FnLetIn _ -> true | _ -> false);
on_case =
(fun f e ->
match e with
| FnLetIn ([ (v, FnRec (igu, st, (s, body))) ], FnRecord (rvs, remap)) ->
let choices = unwrap_state rvs remap in
let to_ignore =
List.fold_left
(fun l (v, e) -> match e with FnRecordMember _ -> l | _ -> v :: l)
[] choices
in
FnLetIn
([ (v, FnRec (igu, st, (s, remove_to_ignore to_ignore body))) ], wrap_state choices)
| FnLetIn (b, cont) -> FnLetIn (b, f cont)
| _ -> failwith "Guarded clause");
on_var = identity;
on_const = identity;
}
let clean _ e = clean_unused_assignments e
* Using to solve other reduction / expression matching problems
let _find_function_with_rosette all_vars fe e =
let pp_defs fmt () = Sketch.pp_symbolic_definitions_of fmt [] all_vars in
let pp_expr_e fmt () = fprintf fmt "(define e @[%a@])@." pp_fnexpr e in
let pp_expr_fe fmt () = fprintf fmt "(define fe @[%a@])@." pp_fnexpr fe in
let pp_f_sketch fmt () =
fprintf fmt "(define (f x) (bExpr %a x 2))" Sketch.pp_defined_input all_vars
in
let pp_synth_prob fmt () =
fprintf fmt
"(define odot (synthesize #:forall (list %a) #:guarantee (assert (eq? fe (f e)))))@."
Sketch.pp_defined_input all_vars
in
let pp_all fmt () =
pp_defs fmt ();
pp_expr_e fmt ();
pp_expr_fe fmt ();
pp_f_sketch fmt ();
pp_synth_prob fmt ()
in
let _, solution = Sygus.Local.compile_and_fetch Config.rosette pp_all () in
Sygus.RAst.pp_expr_list Format.std_formatter solution
|
86b0871e62c0fe085922b45b6c14a7a14d4745937724aea380ce694a9dbd2a09 | master-q/carettah | WrapPaths.hs | module Carettah.WrapPaths (wrapGetDataFileName, wrapVersion) where
import Data.Version
import Paths_carettah (getDataFileName, version)
wrapGetDataFileName :: FilePath -> IO FilePath
wrapGetDataFileName = getDataFileName
wrapVersion :: Version
wrapVersion = version
| null | https://raw.githubusercontent.com/master-q/carettah/355cadf8e5e957fd7df1d8281c29adf1d22fd399/src/Carettah/WrapPaths.hs | haskell | module Carettah.WrapPaths (wrapGetDataFileName, wrapVersion) where
import Data.Version
import Paths_carettah (getDataFileName, version)
wrapGetDataFileName :: FilePath -> IO FilePath
wrapGetDataFileName = getDataFileName
wrapVersion :: Version
wrapVersion = version
|
|
4d0b1168805a44aefd527d955f005a38304c64b66b78fb71a2a08dfbcba97f06 | ygmpkk/house | StdReceiverDef.hs | -----------------------------------------------------------------------------
-- |
-- Module : StdReceiverDef
Copyright : ( c ) 2002
-- License : BSD-style
--
-- Maintainer :
-- Stability : provisional
-- Portability : portable
--
-- StdReceiverDef contains the types to define the standard set of receivers.
--
-----------------------------------------------------------------------------
module Graphics.UI.ObjectIO.StdReceiverDef
(
-- * Definitions
module Graphics.UI.ObjectIO.StdReceiverDef,
-- * A visible modules
module Graphics.UI.ObjectIO.StdIOCommon
, module Graphics.UI.ObjectIO.StdGUI
) where
import Graphics.UI.ObjectIO.StdGUI
import Graphics.UI.ObjectIO.StdIOCommon
-- | Uni-directional receiver. The uni-directional receiver can receive
-- messages but cann\'t respond to it.
data Receiver m ls ps = Receiver (RId m) (ReceiverFunction m ls ps) [ReceiverAttribute ls ps]
type ReceiverFunction m ls ps = m -> GUIFun ls ps
-- | Bi-directional can receive messages and then must respond to it.
data Receiver2 m r ls ps = Receiver2 (R2Id m r) (Receiver2Function m r ls ps) [ReceiverAttribute ls ps]
type Receiver2Function m r ls ps = m -> (ls,ps) -> GUI ps (r,(ls,ps))
data ReceiverAttribute ls ps -- Default:
= ReceiverInit (GUIFun ls ps) -- no actions after opening receiver
| ReceiverSelectState SelectState -- receiver Able
| null | https://raw.githubusercontent.com/ygmpkk/house/1ed0eed82139869e85e3c5532f2b579cf2566fa2/ghc-6.2/libraries/ObjectIO/Graphics/UI/ObjectIO/StdReceiverDef.hs | haskell | ---------------------------------------------------------------------------
|
Module : StdReceiverDef
License : BSD-style
Maintainer :
Stability : provisional
Portability : portable
StdReceiverDef contains the types to define the standard set of receivers.
---------------------------------------------------------------------------
* Definitions
* A visible modules
| Uni-directional receiver. The uni-directional receiver can receive
messages but cann\'t respond to it.
| Bi-directional can receive messages and then must respond to it.
Default:
no actions after opening receiver
receiver Able | Copyright : ( c ) 2002
module Graphics.UI.ObjectIO.StdReceiverDef
(
module Graphics.UI.ObjectIO.StdReceiverDef,
module Graphics.UI.ObjectIO.StdIOCommon
, module Graphics.UI.ObjectIO.StdGUI
) where
import Graphics.UI.ObjectIO.StdGUI
import Graphics.UI.ObjectIO.StdIOCommon
data Receiver m ls ps = Receiver (RId m) (ReceiverFunction m ls ps) [ReceiverAttribute ls ps]
type ReceiverFunction m ls ps = m -> GUIFun ls ps
data Receiver2 m r ls ps = Receiver2 (R2Id m r) (Receiver2Function m r ls ps) [ReceiverAttribute ls ps]
type Receiver2Function m r ls ps = m -> (ls,ps) -> GUI ps (r,(ls,ps))
|
e09ac888918cf33c52843faf6d12f96f517496e49702acf3de1b82e856d71484 | pascal-knodel/haskell-craft | C'13.hs | --
Chapter 13 .
--
module C'13 where
import E'13'28
import E'13'27
import E'13'26
import E'13'25
import E'13'24
import E'13'23
import E'13'22
import E'13'21
import E'13'20
import E'13'19
import E'13'18
import E'13'17
import E'13'16
import E'13'15
import E'13'14
import E'13'13
import E'13'12
import E'13'11
import E'13'10
import E'13''9
import E'13''8
import E'13''7
import E'13''6
import E'13''5
import E'13''4
import E'13''3
import E'13''2
import E'13''1 | null | https://raw.githubusercontent.com/pascal-knodel/haskell-craft/c03d6eb857abd8b4785b6de075b094ec3653c968/Chapter%C2%A013/C'13.hs | haskell | Chapter 13 .
module C'13 where
import E'13'28
import E'13'27
import E'13'26
import E'13'25
import E'13'24
import E'13'23
import E'13'22
import E'13'21
import E'13'20
import E'13'19
import E'13'18
import E'13'17
import E'13'16
import E'13'15
import E'13'14
import E'13'13
import E'13'12
import E'13'11
import E'13'10
import E'13''9
import E'13''8
import E'13''7
import E'13''6
import E'13''5
import E'13''4
import E'13''3
import E'13''2
import E'13''1 |
|
b8660748903b434e041abce6eb692497c49b7fe9d7182cb0f85ce2c254ae64f4 | ryanpbrewster/haskell | 145.hs | 145.hs
HOLY COW SLOW ( 9 m 42s )
- Some positive integers n have the property that the sum [ n + reverse(n ) ]
- consists entirely of odd ( decimal ) digits . For instance , 36 + 63 = 99 and
- 409 + 904 = 1313 . We will call such numbers reversible ; so 36 , 63 , 409 , and
- 904 are reversible . Leading zeroes are not allowed in either n or
- reverse(n ) .
-
- There are 120 reversible numbers below one - thousand .
-
- How many reversible numbers are there below one - billion ( 109 ) ?
- Some positive integers n have the property that the sum [n + reverse(n)]
- consists entirely of odd (decimal) digits. For instance, 36 + 63 = 99 and
- 409 + 904 = 1313. We will call such numbers reversible; so 36, 63, 409, and
- 904 are reversible. Leading zeroes are not allowed in either n or
- reverse(n).
-
- There are 120 reversible numbers below one-thousand.
-
- How many reversible numbers are there below one-billion (109)?
-}
import ProjectEuler.Math (fromIntegerDigits, integerDigits)
intReverse n = intReverse_h n 0
intReverse_h 0 n' = n'
intReverse_h n n' = let (q,r) = n `divMod` 10
in intReverse_h q (10*n' + r)
isReversible n | n `mod` 10 == 0 = False
| otherwise = let s = n + (intReverse n)
in all odd $ integerDigits s
solveProblem bound = 2 * (length $ filter isReversible [1,3..bound])
main = print $ solveProblem (10^9)
| null | https://raw.githubusercontent.com/ryanpbrewster/haskell/6edd0afe234008a48b4871032dedfd143ca6e412/project-euler/src/Old/145.hs | haskell | 145.hs
HOLY COW SLOW ( 9 m 42s )
- Some positive integers n have the property that the sum [ n + reverse(n ) ]
- consists entirely of odd ( decimal ) digits . For instance , 36 + 63 = 99 and
- 409 + 904 = 1313 . We will call such numbers reversible ; so 36 , 63 , 409 , and
- 904 are reversible . Leading zeroes are not allowed in either n or
- reverse(n ) .
-
- There are 120 reversible numbers below one - thousand .
-
- How many reversible numbers are there below one - billion ( 109 ) ?
- Some positive integers n have the property that the sum [n + reverse(n)]
- consists entirely of odd (decimal) digits. For instance, 36 + 63 = 99 and
- 409 + 904 = 1313. We will call such numbers reversible; so 36, 63, 409, and
- 904 are reversible. Leading zeroes are not allowed in either n or
- reverse(n).
-
- There are 120 reversible numbers below one-thousand.
-
- How many reversible numbers are there below one-billion (109)?
-}
import ProjectEuler.Math (fromIntegerDigits, integerDigits)
intReverse n = intReverse_h n 0
intReverse_h 0 n' = n'
intReverse_h n n' = let (q,r) = n `divMod` 10
in intReverse_h q (10*n' + r)
isReversible n | n `mod` 10 == 0 = False
| otherwise = let s = n + (intReverse n)
in all odd $ integerDigits s
solveProblem bound = 2 * (length $ filter isReversible [1,3..bound])
main = print $ solveProblem (10^9)
|
|
cc7a2585eb4403a195cc687c7b4a171a6d5ce1b8f36869a31776fb68ea1c9c29 | LaurentMazare/ocaml-arrow | python.ml | open Core_kernel
open Arrow_c_api
type t =
{ x : int
; y : float
; z : string
; z_opt : float option
; b : bool
; b_opt : bool option
}
[@@deriving sexp_of, fields, compare]
let `read read, `write write =
let open F in
Fields.make_creator ~x:i64 ~y:f64 ~z:str ~z_opt:f64_opt ~b:bool ~b_opt:bool_opt
|> read_write_fn
let gen =
let float_gen = Float.gen_uniform_excl (-1e6) 1e6 in
let open Quickcheck.Let_syntax in
let%map x = Int.gen_incl (-1000000) 1000000
and y = float_gen
and z = Int.quickcheck_generator
and z_opt = Option.quickcheck_generator float_gen
and b = Bool.quickcheck_generator
and b_opt = Option.quickcheck_generator Bool.quickcheck_generator in
{ x; y; z = Printf.sprintf "foo%d" z; z_opt; b; b_opt }
let python_read_and_rewrite ~filename ~print_details =
let in_channel, out_channel = Caml_unix.open_process "python" in
let print_details =
if print_details
then
[ "print(df.shape)"
; "print(df['z'].iloc[0])"
; "print(sum(df['x']), sum(df['y']), np.nansum(df['z_opt']))"
; "print(sum(df['b']), sum(df['b_opt'].astype(float).fillna(10**6)))"
]
else []
in
Out_channel.output_lines
out_channel
([ "import os"
; "import pandas as pd"
; "import numpy as np"
; Printf.sprintf "df = pd.read_parquet('%s')" filename
]
@ print_details
@ [ Printf.sprintf "os.remove('%s')" filename
; Printf.sprintf "df.to_parquet('%s')" filename
]);
Out_channel.close out_channel;
let lines = In_channel.input_lines in_channel in
In_channel.close in_channel;
lines
let%expect_test _ =
let gen =
let%bind.Quickcheck length = Int.gen_incl 1 10_000 in
List.gen_with_length length gen
in
Quickcheck.iter ~trials:10 ~seed:(`Deterministic "fortytwo") gen ~f:(fun ts ->
let filename = Caml.Filename.temp_file "test" ".parquet" in
Exn.protect
~f:(fun () ->
let hd = List.hd_exn ts in
Stdio.printf "z: %s\n" hd.z;
let sum_x = List.fold ts ~init:0 ~f:(fun acc t -> acc + t.x) in
Stdio.printf "sum_x: %d\n" sum_x;
let sum_y = List.fold ts ~init:0. ~f:(fun acc t -> acc +. t.y) in
Stdio.printf "sum_y: %f\n" sum_y;
let sum_z_opt =
List.fold ts ~init:0. ~f:(fun acc t ->
acc +. Option.value t.z_opt ~default:0.)
in
Stdio.printf "sum_z_opt: %f\n" sum_z_opt;
let sum_b = List.fold ts ~init:0 ~f:(fun acc t -> acc + if t.b then 1 else 0) in
Stdio.printf "sum_b: %d\n" sum_b;
let sum_b1 =
List.fold ts ~init:0 ~f:(fun acc t ->
acc
+
match t.b_opt with
| None -> 1
| Some _ -> 0)
in
let sum_b2 =
List.fold ts ~init:0 ~f:(fun acc t ->
acc
+
match t.b_opt with
| Some true -> 1
| Some false | None -> 0)
in
Stdio.printf "sum_b_opt: %d %d\n" sum_b1 sum_b2;
write ~chunk_size:128 filename ts;
let lines = python_read_and_rewrite ~filename ~print_details:true in
List.iter lines ~f:(Stdio.printf ">> %s\n%!");
let ts' = read filename in
let no_diff = ref true in
List.iter2_exn ts ts' ~f:(fun t t' ->
if compare t t' <> 0 && !no_diff
then (
no_diff := false;
Stdio.printf
"in: %s\nout: %s\n\n%!"
(sexp_of_t t |> Sexp.to_string_mach)
(sexp_of_t t' |> Sexp.to_string_mach)));
Stdio.printf "\n")
~finally:(fun () -> Caml.Sys.remove filename));
[%expect
{|
z: foo-55932
sum_x: -29583892
sum_y: -1606806.135343
sum_z_opt: -7181519.032317
sum_b: 447
sum_b_opt: 475 212
>> (900, 6)
>> foo-55932
>> -29583892 -1606806.1353425747 -7181519.032317471
>> 447 475000212.0
z: foo-45
sum_x: -70833733
sum_y: 36206088.482584
sum_z_opt: 7091351.854805
sum_b: 2708
sum_b_opt: 2603 1361
>> (5223, 6)
>> foo-45
>> -70833733 36206088.48258363 7091351.854804993
>> 2708 2603001361.0
z: foo2598252
sum_x: -757868
sum_y: -614689.470489
sum_z_opt: 361861.158869
sum_b: 1
sum_b_opt: 1 0
>> (1, 6)
>> foo2598252
>> -757868 -614689.4704887881 361861.1588694445
>> 1 1000000.0
z: foo1076282
sum_x: -669671
sum_y: -28063312.331175
sum_z_opt: 38821493.026763
sum_b: 3198
sum_b_opt: 3194 1607
>> (6446, 6)
>> foo1076282
>> -669671 -28063312.33117541 38821493.02676311
>> 3198 3194001607.0
z: foo609249368422154
sum_x: 13370204
sum_y: 3993934.619034
sum_z_opt: 1517418.813875
sum_b: 1298
sum_b_opt: 1332 659
>> (2627, 6)
>> foo609249368422154
>> 13370204 3993934.6190337846 1517418.8138750556
>> 1298 1332000659.0
z: foo48647770842302457
sum_x: 63283851
sum_y: 34915730.561748
sum_z_opt: 6679416.315610
sum_b: 1054
sum_b_opt: 1044 549
>> (2100, 6)
>> foo48647770842302457
>> 63283851 34915730.561747685 6679416.3156096125
>> 1054 1044000549.0
z: foo46963576856337718
sum_x: 6385519
sum_y: -66105313.513491
sum_z_opt: -27291947.752573
sum_b: 1743
sum_b_opt: 1802 842
>> (3519, 6)
>> foo46963576856337718
>> 6385519 -66105313.51349127 -27291947.752572805
>> 1743 1802000842.0
z: foo-901387614447954
sum_x: -32942681
sum_y: -63259224.299234
sum_z_opt: 19546809.552908
sum_b: 1265
sum_b_opt: 1247 622
>> (2487, 6)
>> foo-901387614447954
>> -32942681 -63259224.2992335 19546809.552908298
>> 1265 1247000622.0
z: foo-687404271018784
sum_x: -34775365
sum_y: -17426626.705024
sum_z_opt: -3929344.742169
sum_b: 623
sum_b_opt: 596 344
>> (1258, 6)
>> foo-687404271018784
>> -34775365 -17426626.705024164 -3929344.74216865
>> 623 596000344.0
z: foo-296
sum_x: -45829821
sum_y: -25790516.799683
sum_z_opt: 12365958.051864
sum_b: 3398
sum_b_opt: 3300 1757
>> (6760, 6)
>> foo-296
>> -45829821 -25790516.79968277 12365958.051864266
>> 3398 3300001757.0 |}]
let sexp_of_time_ns time_ns =
Time_ns.to_string_iso8601_basic time_ns ~zone:Time.Zone.utc |> sexp_of_string
let sexp_of_ofday_ns ofday = Time_ns.Ofday.to_string ofday |> sexp_of_string
let%expect_test _ =
let filename = Caml.Filename.temp_file "test" ".parquet" in
Exn.protect
~f:(fun () ->
let col_v1 = Writer.float [| 1.; 2.; 3.; 3.14159265358979; 5. |] ~name:"x" in
let col_v2 =
Writer.float_opt
[| Some 2.718281828; None; None; Some 13.37; None |]
~name:"col_v2"
in
let col_date =
let d = Date.of_string "2020-01-01" in
Writer.date
[| d; d; Date.add_days d 12; Date.add_days d (-42); d |]
~name:"col_date"
in
let col_time =
let t = Time_ns.of_string "2021-06-05 09:36:00.123+01:00" in
Writer.time_ns [| t; t; t; t; t |] ~name:"col_time"
in
let col_ofday =
Array.map
[| "00:00"; "23:59"; "11:30:11.123456"; "12:00"; "00:00:01" |]
~f:Time_ns.Ofday.of_string
|> Writer.ofday_ns ~name:"col_ofday"
in
Writer.write filename ~cols:[ col_v1; col_v2; col_date; col_time; col_ofday ];
let lines = python_read_and_rewrite ~filename ~print_details:false in
List.iter lines ~f:(Stdio.printf ">> %s\n%!");
let table = Parquet_reader.table filename in
let rows = Table.num_rows table in
Stdio.printf "%d\n%!" rows;
let col_v2 = Column.read_float_opt table ~column:(`Name "col_v2") in
let col_date = Column.read_date table ~column:(`Name "col_date") in
let col_time = Column.read_time_ns table ~column:(`Name "col_time") in
let col_ofday = Column.read_ofday_ns table ~column:(`Name "col_ofday") in
Stdio.printf
"%s\n%s\n%s\n%s\n%!"
([%sexp_of: float option array] col_v2 |> Sexp.to_string_mach)
([%sexp_of: Date.t array] col_date |> Sexp.to_string_mach)
([%sexp_of: time_ns array] col_time |> Sexp.to_string_mach)
([%sexp_of: ofday_ns array] col_ofday |> Sexp.to_string_mach);
())
~finally:(fun () -> Caml.Sys.remove filename);
[%expect
{|
5
((2.718281828)()()(13.37)())
(2020-01-01 2020-01-01 2020-01-13 2019-11-20 2020-01-01)
(2021-06-05T08:36:00.123000000Z 2021-06-05T08:36:00.123000000Z 2021-06-05T08:36:00.123000000Z 2021-06-05T08:36:00.123000000Z 2021-06-05T08:36:00.123000000Z)
(00:00:00.000000000 23:59:00.000000000 11:30:11.123456000 12:00:00.000000000 00:00:01.000000000)
|}]
| null | https://raw.githubusercontent.com/LaurentMazare/ocaml-arrow/2ffa860cb63236303f4bf6871620318149a46785/tests/python.ml | ocaml | open Core_kernel
open Arrow_c_api
type t =
{ x : int
; y : float
; z : string
; z_opt : float option
; b : bool
; b_opt : bool option
}
[@@deriving sexp_of, fields, compare]
let `read read, `write write =
let open F in
Fields.make_creator ~x:i64 ~y:f64 ~z:str ~z_opt:f64_opt ~b:bool ~b_opt:bool_opt
|> read_write_fn
let gen =
let float_gen = Float.gen_uniform_excl (-1e6) 1e6 in
let open Quickcheck.Let_syntax in
let%map x = Int.gen_incl (-1000000) 1000000
and y = float_gen
and z = Int.quickcheck_generator
and z_opt = Option.quickcheck_generator float_gen
and b = Bool.quickcheck_generator
and b_opt = Option.quickcheck_generator Bool.quickcheck_generator in
{ x; y; z = Printf.sprintf "foo%d" z; z_opt; b; b_opt }
let python_read_and_rewrite ~filename ~print_details =
let in_channel, out_channel = Caml_unix.open_process "python" in
let print_details =
if print_details
then
[ "print(df.shape)"
; "print(df['z'].iloc[0])"
; "print(sum(df['x']), sum(df['y']), np.nansum(df['z_opt']))"
; "print(sum(df['b']), sum(df['b_opt'].astype(float).fillna(10**6)))"
]
else []
in
Out_channel.output_lines
out_channel
([ "import os"
; "import pandas as pd"
; "import numpy as np"
; Printf.sprintf "df = pd.read_parquet('%s')" filename
]
@ print_details
@ [ Printf.sprintf "os.remove('%s')" filename
; Printf.sprintf "df.to_parquet('%s')" filename
]);
Out_channel.close out_channel;
let lines = In_channel.input_lines in_channel in
In_channel.close in_channel;
lines
let%expect_test _ =
let gen =
let%bind.Quickcheck length = Int.gen_incl 1 10_000 in
List.gen_with_length length gen
in
Quickcheck.iter ~trials:10 ~seed:(`Deterministic "fortytwo") gen ~f:(fun ts ->
let filename = Caml.Filename.temp_file "test" ".parquet" in
Exn.protect
~f:(fun () ->
let hd = List.hd_exn ts in
Stdio.printf "z: %s\n" hd.z;
let sum_x = List.fold ts ~init:0 ~f:(fun acc t -> acc + t.x) in
Stdio.printf "sum_x: %d\n" sum_x;
let sum_y = List.fold ts ~init:0. ~f:(fun acc t -> acc +. t.y) in
Stdio.printf "sum_y: %f\n" sum_y;
let sum_z_opt =
List.fold ts ~init:0. ~f:(fun acc t ->
acc +. Option.value t.z_opt ~default:0.)
in
Stdio.printf "sum_z_opt: %f\n" sum_z_opt;
let sum_b = List.fold ts ~init:0 ~f:(fun acc t -> acc + if t.b then 1 else 0) in
Stdio.printf "sum_b: %d\n" sum_b;
let sum_b1 =
List.fold ts ~init:0 ~f:(fun acc t ->
acc
+
match t.b_opt with
| None -> 1
| Some _ -> 0)
in
let sum_b2 =
List.fold ts ~init:0 ~f:(fun acc t ->
acc
+
match t.b_opt with
| Some true -> 1
| Some false | None -> 0)
in
Stdio.printf "sum_b_opt: %d %d\n" sum_b1 sum_b2;
write ~chunk_size:128 filename ts;
let lines = python_read_and_rewrite ~filename ~print_details:true in
List.iter lines ~f:(Stdio.printf ">> %s\n%!");
let ts' = read filename in
let no_diff = ref true in
List.iter2_exn ts ts' ~f:(fun t t' ->
if compare t t' <> 0 && !no_diff
then (
no_diff := false;
Stdio.printf
"in: %s\nout: %s\n\n%!"
(sexp_of_t t |> Sexp.to_string_mach)
(sexp_of_t t' |> Sexp.to_string_mach)));
Stdio.printf "\n")
~finally:(fun () -> Caml.Sys.remove filename));
[%expect
{|
z: foo-55932
sum_x: -29583892
sum_y: -1606806.135343
sum_z_opt: -7181519.032317
sum_b: 447
sum_b_opt: 475 212
>> (900, 6)
>> foo-55932
>> -29583892 -1606806.1353425747 -7181519.032317471
>> 447 475000212.0
z: foo-45
sum_x: -70833733
sum_y: 36206088.482584
sum_z_opt: 7091351.854805
sum_b: 2708
sum_b_opt: 2603 1361
>> (5223, 6)
>> foo-45
>> -70833733 36206088.48258363 7091351.854804993
>> 2708 2603001361.0
z: foo2598252
sum_x: -757868
sum_y: -614689.470489
sum_z_opt: 361861.158869
sum_b: 1
sum_b_opt: 1 0
>> (1, 6)
>> foo2598252
>> -757868 -614689.4704887881 361861.1588694445
>> 1 1000000.0
z: foo1076282
sum_x: -669671
sum_y: -28063312.331175
sum_z_opt: 38821493.026763
sum_b: 3198
sum_b_opt: 3194 1607
>> (6446, 6)
>> foo1076282
>> -669671 -28063312.33117541 38821493.02676311
>> 3198 3194001607.0
z: foo609249368422154
sum_x: 13370204
sum_y: 3993934.619034
sum_z_opt: 1517418.813875
sum_b: 1298
sum_b_opt: 1332 659
>> (2627, 6)
>> foo609249368422154
>> 13370204 3993934.6190337846 1517418.8138750556
>> 1298 1332000659.0
z: foo48647770842302457
sum_x: 63283851
sum_y: 34915730.561748
sum_z_opt: 6679416.315610
sum_b: 1054
sum_b_opt: 1044 549
>> (2100, 6)
>> foo48647770842302457
>> 63283851 34915730.561747685 6679416.3156096125
>> 1054 1044000549.0
z: foo46963576856337718
sum_x: 6385519
sum_y: -66105313.513491
sum_z_opt: -27291947.752573
sum_b: 1743
sum_b_opt: 1802 842
>> (3519, 6)
>> foo46963576856337718
>> 6385519 -66105313.51349127 -27291947.752572805
>> 1743 1802000842.0
z: foo-901387614447954
sum_x: -32942681
sum_y: -63259224.299234
sum_z_opt: 19546809.552908
sum_b: 1265
sum_b_opt: 1247 622
>> (2487, 6)
>> foo-901387614447954
>> -32942681 -63259224.2992335 19546809.552908298
>> 1265 1247000622.0
z: foo-687404271018784
sum_x: -34775365
sum_y: -17426626.705024
sum_z_opt: -3929344.742169
sum_b: 623
sum_b_opt: 596 344
>> (1258, 6)
>> foo-687404271018784
>> -34775365 -17426626.705024164 -3929344.74216865
>> 623 596000344.0
z: foo-296
sum_x: -45829821
sum_y: -25790516.799683
sum_z_opt: 12365958.051864
sum_b: 3398
sum_b_opt: 3300 1757
>> (6760, 6)
>> foo-296
>> -45829821 -25790516.79968277 12365958.051864266
>> 3398 3300001757.0 |}]
let sexp_of_time_ns time_ns =
Time_ns.to_string_iso8601_basic time_ns ~zone:Time.Zone.utc |> sexp_of_string
let sexp_of_ofday_ns ofday = Time_ns.Ofday.to_string ofday |> sexp_of_string
let%expect_test _ =
let filename = Caml.Filename.temp_file "test" ".parquet" in
Exn.protect
~f:(fun () ->
let col_v1 = Writer.float [| 1.; 2.; 3.; 3.14159265358979; 5. |] ~name:"x" in
let col_v2 =
Writer.float_opt
[| Some 2.718281828; None; None; Some 13.37; None |]
~name:"col_v2"
in
let col_date =
let d = Date.of_string "2020-01-01" in
Writer.date
[| d; d; Date.add_days d 12; Date.add_days d (-42); d |]
~name:"col_date"
in
let col_time =
let t = Time_ns.of_string "2021-06-05 09:36:00.123+01:00" in
Writer.time_ns [| t; t; t; t; t |] ~name:"col_time"
in
let col_ofday =
Array.map
[| "00:00"; "23:59"; "11:30:11.123456"; "12:00"; "00:00:01" |]
~f:Time_ns.Ofday.of_string
|> Writer.ofday_ns ~name:"col_ofday"
in
Writer.write filename ~cols:[ col_v1; col_v2; col_date; col_time; col_ofday ];
let lines = python_read_and_rewrite ~filename ~print_details:false in
List.iter lines ~f:(Stdio.printf ">> %s\n%!");
let table = Parquet_reader.table filename in
let rows = Table.num_rows table in
Stdio.printf "%d\n%!" rows;
let col_v2 = Column.read_float_opt table ~column:(`Name "col_v2") in
let col_date = Column.read_date table ~column:(`Name "col_date") in
let col_time = Column.read_time_ns table ~column:(`Name "col_time") in
let col_ofday = Column.read_ofday_ns table ~column:(`Name "col_ofday") in
Stdio.printf
"%s\n%s\n%s\n%s\n%!"
([%sexp_of: float option array] col_v2 |> Sexp.to_string_mach)
([%sexp_of: Date.t array] col_date |> Sexp.to_string_mach)
([%sexp_of: time_ns array] col_time |> Sexp.to_string_mach)
([%sexp_of: ofday_ns array] col_ofday |> Sexp.to_string_mach);
())
~finally:(fun () -> Caml.Sys.remove filename);
[%expect
{|
5
((2.718281828)()()(13.37)())
(2020-01-01 2020-01-01 2020-01-13 2019-11-20 2020-01-01)
(2021-06-05T08:36:00.123000000Z 2021-06-05T08:36:00.123000000Z 2021-06-05T08:36:00.123000000Z 2021-06-05T08:36:00.123000000Z 2021-06-05T08:36:00.123000000Z)
(00:00:00.000000000 23:59:00.000000000 11:30:11.123456000 12:00:00.000000000 00:00:01.000000000)
|}]
|
|
0dbede6f70b1ad22e82e8909976f9adaa40e0ec180a7251b7adb4a09ace02e62 | vraid/earthgen | river-generation.rkt | #lang typed/racket
(require vraid/flow
vraid/sorted-tree
"../grid-base.rkt"
"../terrain.rkt")
(provide planet/rivers)
(define-type corner-node (Pair Integer Float))
(: ref (All (A) ((Vectorof A) -> (Integer -> A))))
(define ((ref v) n)
(vector-ref v n))
(: set-directions/floodfill! (planet-terrain -> Void))
(define (set-directions/floodfill! planet)
(let: ([tree : (sorted-tree corner-node) (make-sorted-tree
(λ ([a : corner-node]
[b : corner-node])
(<= (cdr a) (cdr b))))])
(let* ([start (argmin (curry tile-elevation planet)
(range (tile-count planet)))]
[tile-visited : (Vectorof Boolean) (make-vector (tile-count planet) #f)]
[tile-visited? (ref tile-visited)]
[tile-visit! (λ ([n : Integer])
(vector-set! tile-visited n #t))]
[corner-visited : (Vectorof Boolean) (make-vector (corner-count planet) #f)]
[corner-visited? (ref corner-visited)]
[corner-visit! (λ ([n : Integer])
(vector-set! corner-visited n #t))]
[coast : (Vectorof Boolean) (make-vector (tile-count planet) #f)]
[coast? (ref coast)]
[set-coast! (λ ([n : Integer])
(vector-set! coast n #t))]
[check-tile-elevation (λ ([n : Integer]
[elevation : Float])
(when (and (not (tile-visited? n))
(< (tile-elevation planet n)
elevation))
((tile-terrain-data-elevation-set! (planet-terrain-tile planet)) n elevation)))]
[check-corner-elevation (λ ([n : Integer]
[elevation : Float])
(when (< (corner-elevation planet n)
elevation)
((corner-terrain-data-elevation-set! (planet-terrain-corner planet)) n (* 1.001 elevation))))]
[visit/add! (λ ([n : Integer])
(let ([elevation (corner-elevation planet n)])
(for ([k (grid-corner-corner-list planet n)])
(unless (corner-visited? k)
(corner-visit! k)
(let ([prev-elevation (corner-elevation planet k)])
(check-corner-elevation k elevation)
((corner-terrain-data-river-direction-set! (planet-terrain-corner planet)) k (grid-corner-corner-position planet k n))
(let ([new-elevation (corner-elevation planet k)])
(sorted-tree-add! tree (cons k prev-elevation))
(for ([t (grid-corner-tile-list planet k)])
(check-tile-elevation t new-elevation))))))))])
(letrec ([recuvisit : (Integer -> Void)
(λ (n)
(when (tile-water? planet n)
(unless (tile-visited? n)
(tile-visit! n)
(begin
(for ([k (grid-tile-corner-list planet n)])
(unless (corner-visited? k)
(when (corner-coast? planet k)
((corner-terrain-data-elevation-set! (planet-terrain-corner planet)) k (planet-sea-level planet))
(sorted-tree-add! tree (cons k (corner-elevation planet k))))
(corner-visit! k)))
(for ([k (grid-tile-tile-list planet n)])
(recuvisit k))))))]
[make-next : (-> False)
(thunk
(and-let* ([val (sorted-tree-take-first! tree)]
[n (car val)])
(begin
(visit/add! n)
(make-next))))])
(recuvisit start)
(make-next)
(void)))))
(: river-trees (planet-terrain -> river-list))
(define (river-trees planet)
(: corner-node (Integer -> river))
(define (corner-node n)
(river n (map corner-node
(corner-river-sources planet n))))
(foldl (λ ([n : Integer]
[ls : river-list])
(if (corner-coast? planet n)
(cons (corner-node n) ls)
ls))
'()
(range (corner-count planet))))
(: planet/rivers (planet-terrain -> planet-terrain))
(define (planet/rivers p)
(let* ([tiles ((build-tile-terrain-data (tile-count p))
#:elevation (curry tile-elevation p)
#:water-level (curry tile-water-level p))]
[corners ((build-corner-terrain-data (corner-count p))
#:elevation (curry corner-elevation p)
#:river-direction (curry corner-river-direction p))]
[p (planet-terrain/kw
#:planet-geometry p
#:sea-level (planet-sea-level p)
#:tile tiles
#:corner corners
#:rivers '())])
(set-directions/floodfill! p)
(struct-copy planet-terrain p
[rivers (river-trees p)])))
| null | https://raw.githubusercontent.com/vraid/earthgen/208ac834c02208ddc16a31aa9e7ff7f91c18e046/planet/terrain-generation/river-generation.rkt | racket | #lang typed/racket
(require vraid/flow
vraid/sorted-tree
"../grid-base.rkt"
"../terrain.rkt")
(provide planet/rivers)
(define-type corner-node (Pair Integer Float))
(: ref (All (A) ((Vectorof A) -> (Integer -> A))))
(define ((ref v) n)
(vector-ref v n))
(: set-directions/floodfill! (planet-terrain -> Void))
(define (set-directions/floodfill! planet)
(let: ([tree : (sorted-tree corner-node) (make-sorted-tree
(λ ([a : corner-node]
[b : corner-node])
(<= (cdr a) (cdr b))))])
(let* ([start (argmin (curry tile-elevation planet)
(range (tile-count planet)))]
[tile-visited : (Vectorof Boolean) (make-vector (tile-count planet) #f)]
[tile-visited? (ref tile-visited)]
[tile-visit! (λ ([n : Integer])
(vector-set! tile-visited n #t))]
[corner-visited : (Vectorof Boolean) (make-vector (corner-count planet) #f)]
[corner-visited? (ref corner-visited)]
[corner-visit! (λ ([n : Integer])
(vector-set! corner-visited n #t))]
[coast : (Vectorof Boolean) (make-vector (tile-count planet) #f)]
[coast? (ref coast)]
[set-coast! (λ ([n : Integer])
(vector-set! coast n #t))]
[check-tile-elevation (λ ([n : Integer]
[elevation : Float])
(when (and (not (tile-visited? n))
(< (tile-elevation planet n)
elevation))
((tile-terrain-data-elevation-set! (planet-terrain-tile planet)) n elevation)))]
[check-corner-elevation (λ ([n : Integer]
[elevation : Float])
(when (< (corner-elevation planet n)
elevation)
((corner-terrain-data-elevation-set! (planet-terrain-corner planet)) n (* 1.001 elevation))))]
[visit/add! (λ ([n : Integer])
(let ([elevation (corner-elevation planet n)])
(for ([k (grid-corner-corner-list planet n)])
(unless (corner-visited? k)
(corner-visit! k)
(let ([prev-elevation (corner-elevation planet k)])
(check-corner-elevation k elevation)
((corner-terrain-data-river-direction-set! (planet-terrain-corner planet)) k (grid-corner-corner-position planet k n))
(let ([new-elevation (corner-elevation planet k)])
(sorted-tree-add! tree (cons k prev-elevation))
(for ([t (grid-corner-tile-list planet k)])
(check-tile-elevation t new-elevation))))))))])
(letrec ([recuvisit : (Integer -> Void)
(λ (n)
(when (tile-water? planet n)
(unless (tile-visited? n)
(tile-visit! n)
(begin
(for ([k (grid-tile-corner-list planet n)])
(unless (corner-visited? k)
(when (corner-coast? planet k)
((corner-terrain-data-elevation-set! (planet-terrain-corner planet)) k (planet-sea-level planet))
(sorted-tree-add! tree (cons k (corner-elevation planet k))))
(corner-visit! k)))
(for ([k (grid-tile-tile-list planet n)])
(recuvisit k))))))]
[make-next : (-> False)
(thunk
(and-let* ([val (sorted-tree-take-first! tree)]
[n (car val)])
(begin
(visit/add! n)
(make-next))))])
(recuvisit start)
(make-next)
(void)))))
(: river-trees (planet-terrain -> river-list))
(define (river-trees planet)
(: corner-node (Integer -> river))
(define (corner-node n)
(river n (map corner-node
(corner-river-sources planet n))))
(foldl (λ ([n : Integer]
[ls : river-list])
(if (corner-coast? planet n)
(cons (corner-node n) ls)
ls))
'()
(range (corner-count planet))))
(: planet/rivers (planet-terrain -> planet-terrain))
(define (planet/rivers p)
(let* ([tiles ((build-tile-terrain-data (tile-count p))
#:elevation (curry tile-elevation p)
#:water-level (curry tile-water-level p))]
[corners ((build-corner-terrain-data (corner-count p))
#:elevation (curry corner-elevation p)
#:river-direction (curry corner-river-direction p))]
[p (planet-terrain/kw
#:planet-geometry p
#:sea-level (planet-sea-level p)
#:tile tiles
#:corner corners
#:rivers '())])
(set-directions/floodfill! p)
(struct-copy planet-terrain p
[rivers (river-trees p)])))
|
|
66f68c3b3fc9a69b8379287b8f102a6585d03e1e62a5739253e667442ebd554b | akabe/odoc-ltxhtml | ltxhtml_ltximage.mli | The MIT License ( MIT )
Copyright ( c ) 2014
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE .
Copyright (c) 2014 Akinori ABE
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*)
exception Command_error of string * string (* command, message *)
type t
* [ create dir ] creates a LaTeX image generator .
@param dir a directory for LaTeX image files .
@param dir a directory for LaTeX image files.
*)
val create : string -> t
* [ generate g ~latex ~header ~footer ~dvigif ~fg ~bg ~resolution code ]
compiles LaTeX code and generates a LaTeX image .
@return the file name of the generated LaTeX image .
@param g a LaTeX image generator .
@param latex LaTeX command .
@param header the header of a LaTeX file .
@param footer the footer of a LaTeX file .
@param dvigif dvigif command .
@param fg Foreground color .
@param bg Background color .
@param resolution the resolution of the LaTeX image .
@param code LaTeX code .
compiles LaTeX code and generates a LaTeX image.
@return the file name of the generated LaTeX image.
@param g a LaTeX image generator.
@param latex LaTeX command.
@param header the header of a LaTeX file.
@param footer the footer of a LaTeX file.
@param dvigif dvigif command.
@param fg Foreground color.
@param bg Background color.
@param resolution the resolution of the LaTeX image.
@param code LaTeX code.
*)
val generate : t ->
latex:string -> header:string -> footer:string ->
dvigif:string -> fg:string -> bg:string -> resolution:int ->
string -> string
* [ cleanup g ] removes unused LaTeX images and a temporary directory .
@param g a LaTeX image generator .
@param g a LaTeX image generator.
*)
val cleanup : t -> unit
| null | https://raw.githubusercontent.com/akabe/odoc-ltxhtml/99e3c86c469be9f4f1a2bf7089428be592c5b541/src/ltxhtml_ltximage.mli | ocaml | command, message | The MIT License ( MIT )
Copyright ( c ) 2014
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE .
Copyright (c) 2014 Akinori ABE
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*)
type t
* [ create dir ] creates a LaTeX image generator .
@param dir a directory for LaTeX image files .
@param dir a directory for LaTeX image files.
*)
val create : string -> t
* [ generate g ~latex ~header ~footer ~dvigif ~fg ~bg ~resolution code ]
compiles LaTeX code and generates a LaTeX image .
@return the file name of the generated LaTeX image .
@param g a LaTeX image generator .
@param latex LaTeX command .
@param header the header of a LaTeX file .
@param footer the footer of a LaTeX file .
@param dvigif dvigif command .
@param fg Foreground color .
@param bg Background color .
@param resolution the resolution of the LaTeX image .
@param code LaTeX code .
compiles LaTeX code and generates a LaTeX image.
@return the file name of the generated LaTeX image.
@param g a LaTeX image generator.
@param latex LaTeX command.
@param header the header of a LaTeX file.
@param footer the footer of a LaTeX file.
@param dvigif dvigif command.
@param fg Foreground color.
@param bg Background color.
@param resolution the resolution of the LaTeX image.
@param code LaTeX code.
*)
val generate : t ->
latex:string -> header:string -> footer:string ->
dvigif:string -> fg:string -> bg:string -> resolution:int ->
string -> string
* [ cleanup g ] removes unused LaTeX images and a temporary directory .
@param g a LaTeX image generator .
@param g a LaTeX image generator.
*)
val cleanup : t -> unit
|
bc0ab71c45bd0813f55ee54a7e39223145e66088f786d646a46bef8359054cef | launchdarkly/erlang-server-sdk | ts_command_params.erl | %-------------------------------------------------------------------
%% @doc `ts_command_params' module
%%
Parsers and types for command parameters .
@private
%% @end
%%-------------------------------------------------------------------
-module(ts_command_params).
%% API
-export([
parse_command/1,
format_evaluate_flag_response/1
]).
-type command() :: evaluate | evaluate_all | identify_event | custom_event | flush_events.
-type command_params() :: #{
command := command(),
evaluate => evaluate_flag_params(),
evaluate_all => evaluate_all_flags_params(),
custom_event => custom_event_params(),
identify_event => identify_event_params()
}.
-type evaluate_flag_params() :: #{
flag_key := binary(),
user => ldclient_user:user(),
context => ldclient_context:context(),
value_type := binary(),
default_value := ldclient_flag:variation_value(),
detail := boolean()
}.
-type evaluate_flag_response() :: #{
value := ldclient_flag:variation_value(),
variationIndex => ldclient_eval:variation_index(),
reason => ldclient_eval:reason()
}.
-type evaluate_all_flags_params() :: #{
user => ldclient_user:user(),
context => ldclient_context:context(),
with_reasons := boolean(),
client_side_only := boolean(),
details_only_for_tracked_flags := boolean()
}.
-type evaluate_all_flags_response() :: #{
%% TODO: All flags state is not implemented. When it is this
%% will need a proper type.
state => map()
}.
-type custom_event_params() :: #{
event_key := binary(),
user => ldclient_user:user(),
context => ldclient_context:context(),
data => ldclient_flag:variation_value(),
omit_null_data := boolean(),
metric_value => float()
}.
-type identify_event_params() :: #{
user => ldclient_user:user(),
context => ldclient_context:context()
}.
-export_type([command_params/0]).
-export_type([evaluate_flag_params/0]).
-export_type([evaluate_flag_response/0]).
-export_type([evaluate_all_flags_params/0]).
-export_type([evaluate_all_flags_response/0]).
-export_type([custom_event_params/0]).
-export_type([identify_event_params/0]).
-spec parse_command(Command :: map()) -> command_params().
parse_command(#{<<"command">> := <<"evaluate">>,
<<"evaluate">> := Evaluate} = _Command) -> #{
command => evaluate,
evaluate => parse_evaluate(Evaluate)
};
parse_command(#{<<"command">> := <<"evaluateAll">>,
<<"evaluateAll">> := EvaluateAll} = _Command) -> #{
command => evaluate_all,
evaluate_all => parse_evaluate_all(EvaluateAll)
};
parse_command(#{<<"command">> := <<"identifyEvent">>,
<<"identifyEvent">> := IdentifyEvent} = _Command) -> #{
command => identify_event,
identify_event => parse_identify_event(IdentifyEvent)
};
parse_command(#{<<"command">> := <<"customEvent">>,
<<"customEvent">> := CustomEvent} = _Command) -> #{
command => custom_event,
custom_event => parse_custom_event(CustomEvent)
};
parse_command(#{<<"command">> := <<"flushEvents">>} = _Command) -> #{
command => flush_events
};
parse_command(_Command) ->
%% TODO: Provide some detail.
error.
-spec parse_evaluate(Evaluate :: map()) -> evaluate_flag_params().
parse_evaluate(Evaluate) ->
Parsed = #{
flag_key => maps:get(<<"flagKey">>, Evaluate, <<>>),
value_type => maps:get(<<"valueType">> , Evaluate, <<>>),
default_value => maps:get(<<"defaultValue">>, Evaluate, <<>>),
detail => maps:get(<<"detail">>, Evaluate, false)
},
MaybeWithContext = maybe_add_context(Evaluate, Parsed),
maybe_add_user(parse_user(Evaluate), MaybeWithContext).
-spec maybe_add_user(User :: ldclient_user:user() | undefined, Map :: map()) -> map().
maybe_add_user(undefined, Map) -> Map;
maybe_add_user(User, Map) -> Map#{user => User}.
-spec maybe_add_context(Command :: map(), Map :: map()) -> map().
maybe_add_context(#{<<"context">> := Context} = _Command, Map) when is_map(Context) ->
ParsedContext = ldclient_context:new_from_json(Context),
Map#{context => ParsedContext};
maybe_add_context(_Command, Map) -> Map.
-spec parse_user_with_key(Container ::
identify_event_params()
| custom_event_params()
| evaluate_flag_params()
| evaluate_all_flags_params(),
UserKey :: binary()) -> ldclient_user:user() | undefined.
parse_user_with_key(Container, UserKey) ->
User = maps:get(UserKey, Container, undefined),
parse_user_map(User).
-spec parse_user_map(User :: map() | undefined) -> ldclient_user:user() | undefined.
parse_user_map(undefined) -> undefined;
parse_user_map(User) ->
UserWithKey = #{
key => maps:get(<<"key">>, User)
},
UserWithIp = parse_optional(<<"ip">>, ip, User, UserWithKey),
UserWithCountry = parse_optional(<<"country">>, country, User, UserWithIp),
UserWithEmail = parse_optional(<<"email">>, email, User, UserWithCountry),
UserWithFirstName = parse_optional(<<"firstName">>, first_name, User, UserWithEmail),
UserWithLastName = parse_optional(<<"lastName">>, last_name, User, UserWithFirstName),
UserWithAvatar = parse_optional(<<"avatar">>, avatar, User, UserWithLastName),
UserWithName = parse_optional(<<"name">>, name, User, UserWithAvatar),
UserWithAnonymous = parse_optional(<<"anonymous">>, anonymous, User, UserWithName),
UserWithCustom = parse_optional(<<"custom">>, custom, User, UserWithAnonymous),
parse_optional(<<"privateAttributeNames">>, private_attribute_names, User, UserWithCustom).
-spec parse_user(Container ::
identify_event_params()
| custom_event_params()
| evaluate_flag_params()
| evaluate_all_flags_params()) -> ldclient_user:user() | undefined.
parse_user(Container) ->
parse_user_with_key(Container, <<"user">>).
%% Treats null as not included.
-spec parse_optional(InKey :: binary(), OutKey :: atom(),
Input :: map(), Output :: map()) -> map().
parse_optional(InKey, OutKey, Input, Output) ->
Value = maps:get(InKey, Input, undefined),
add_if_defined(OutKey, Value, Output, false).
%% Conditionally will include null values.
-spec parse_optional(InKey :: binary(), OutKey :: atom(),
Input :: map(), Output :: map(), IncludeNull :: boolean()) -> map().
parse_optional(InKey, OutKey, Input, Output, IncludeNull) ->
Value = maps:get(InKey, Input, undefined),
add_if_defined(OutKey, Value, Output, IncludeNull).
-spec add_if_defined(Key :: atom(), Value :: any(), Output :: map(), IncludeNull :: boolean()) -> map().
add_if_defined(_Key, undefined, Output, _) -> Output;
add_if_defined(_Key, null, Output, false) -> Output;
add_if_defined(Key, Value, Output, _) -> Output#{Key => Value}.
-spec parse_evaluate_all(EvaluateAll :: map()) -> evaluate_all_flags_params().
parse_evaluate_all(EvaluateAll) ->
maybe_add_context(EvaluateAll,
maybe_add_user(parse_user(EvaluateAll), #{
with_reasons => maps:get(<<"withReasons">>, EvaluateAll, false),
client_side_only => maps:get(<<"clientSideOnly">>, EvaluateAll, false),
details_only_for_tracked_flags => maps:get(<<"detailsOnlyForTrackedFlags">>, EvaluateAll, false)
})).
-spec parse_identify_event(IdentifyEvent :: map()) -> identify_event_params().
parse_identify_event(IdentifyEvent) ->
maybe_add_context(IdentifyEvent,
maybe_add_user(parse_user(IdentifyEvent), #{})).
-spec parse_custom_event(CustomEvent :: map()) -> custom_event_params().
parse_custom_event(CustomEvent) ->
CustomEventWithKey = #{
event_key => maps:get(<<"eventKey">>, CustomEvent, <<>>),
omit_null_value => maps:get(<<"omitNullData">>, CustomEvent, false)
},
CustomEventWithUser = maybe_add_user(parse_user(CustomEvent), CustomEventWithKey),
CustomEventWithContext = maybe_add_context(CustomEvent, CustomEventWithUser),
CustomEventWithData = parse_optional(<<"data">>, data, CustomEvent, CustomEventWithContext, true),
CustomEventWithOmitNullData = parse_optional(<<"omitNullData">>, omit_null_data,
CustomEvent, CustomEventWithData),
CustomEventWithMetricValue = parse_optional(<<"metricValue">>, metric_value,
CustomEvent, CustomEventWithOmitNullData),
CustomEventWithMetricValue.
-spec format_evaluate_flag_response(Result ::
ldclient_eval:detail()
| ldclient_eval:result_value()) -> evaluate_flag_response().
format_evaluate_flag_response({VariationIndex, Value, Reason}) ->
#{
value => Value,
variationIndex => VariationIndex,
reason => ldclient_eval_reason:format(Reason)
};
format_evaluate_flag_response({VariationIndex, Value}) ->
#{
value => Value,
variationIndex => VariationIndex
};
format_evaluate_flag_response(Value) ->
#{
value => Value
}.
| null | https://raw.githubusercontent.com/launchdarkly/erlang-server-sdk/d9a4442a8a214bf950dec8182b26cd042436f4c8/test-service/src/ts_command_params.erl | erlang | -------------------------------------------------------------------
@doc `ts_command_params' module
@end
-------------------------------------------------------------------
API
TODO: All flags state is not implemented. When it is this
will need a proper type.
TODO: Provide some detail.
Treats null as not included.
Conditionally will include null values. | Parsers and types for command parameters .
@private
-module(ts_command_params).
-export([
parse_command/1,
format_evaluate_flag_response/1
]).
-type command() :: evaluate | evaluate_all | identify_event | custom_event | flush_events.
-type command_params() :: #{
command := command(),
evaluate => evaluate_flag_params(),
evaluate_all => evaluate_all_flags_params(),
custom_event => custom_event_params(),
identify_event => identify_event_params()
}.
-type evaluate_flag_params() :: #{
flag_key := binary(),
user => ldclient_user:user(),
context => ldclient_context:context(),
value_type := binary(),
default_value := ldclient_flag:variation_value(),
detail := boolean()
}.
-type evaluate_flag_response() :: #{
value := ldclient_flag:variation_value(),
variationIndex => ldclient_eval:variation_index(),
reason => ldclient_eval:reason()
}.
-type evaluate_all_flags_params() :: #{
user => ldclient_user:user(),
context => ldclient_context:context(),
with_reasons := boolean(),
client_side_only := boolean(),
details_only_for_tracked_flags := boolean()
}.
-type evaluate_all_flags_response() :: #{
state => map()
}.
-type custom_event_params() :: #{
event_key := binary(),
user => ldclient_user:user(),
context => ldclient_context:context(),
data => ldclient_flag:variation_value(),
omit_null_data := boolean(),
metric_value => float()
}.
-type identify_event_params() :: #{
user => ldclient_user:user(),
context => ldclient_context:context()
}.
-export_type([command_params/0]).
-export_type([evaluate_flag_params/0]).
-export_type([evaluate_flag_response/0]).
-export_type([evaluate_all_flags_params/0]).
-export_type([evaluate_all_flags_response/0]).
-export_type([custom_event_params/0]).
-export_type([identify_event_params/0]).
-spec parse_command(Command :: map()) -> command_params().
parse_command(#{<<"command">> := <<"evaluate">>,
<<"evaluate">> := Evaluate} = _Command) -> #{
command => evaluate,
evaluate => parse_evaluate(Evaluate)
};
parse_command(#{<<"command">> := <<"evaluateAll">>,
<<"evaluateAll">> := EvaluateAll} = _Command) -> #{
command => evaluate_all,
evaluate_all => parse_evaluate_all(EvaluateAll)
};
parse_command(#{<<"command">> := <<"identifyEvent">>,
<<"identifyEvent">> := IdentifyEvent} = _Command) -> #{
command => identify_event,
identify_event => parse_identify_event(IdentifyEvent)
};
parse_command(#{<<"command">> := <<"customEvent">>,
<<"customEvent">> := CustomEvent} = _Command) -> #{
command => custom_event,
custom_event => parse_custom_event(CustomEvent)
};
parse_command(#{<<"command">> := <<"flushEvents">>} = _Command) -> #{
command => flush_events
};
parse_command(_Command) ->
error.
-spec parse_evaluate(Evaluate :: map()) -> evaluate_flag_params().
parse_evaluate(Evaluate) ->
Parsed = #{
flag_key => maps:get(<<"flagKey">>, Evaluate, <<>>),
value_type => maps:get(<<"valueType">> , Evaluate, <<>>),
default_value => maps:get(<<"defaultValue">>, Evaluate, <<>>),
detail => maps:get(<<"detail">>, Evaluate, false)
},
MaybeWithContext = maybe_add_context(Evaluate, Parsed),
maybe_add_user(parse_user(Evaluate), MaybeWithContext).
-spec maybe_add_user(User :: ldclient_user:user() | undefined, Map :: map()) -> map().
maybe_add_user(undefined, Map) -> Map;
maybe_add_user(User, Map) -> Map#{user => User}.
-spec maybe_add_context(Command :: map(), Map :: map()) -> map().
maybe_add_context(#{<<"context">> := Context} = _Command, Map) when is_map(Context) ->
ParsedContext = ldclient_context:new_from_json(Context),
Map#{context => ParsedContext};
maybe_add_context(_Command, Map) -> Map.
-spec parse_user_with_key(Container ::
identify_event_params()
| custom_event_params()
| evaluate_flag_params()
| evaluate_all_flags_params(),
UserKey :: binary()) -> ldclient_user:user() | undefined.
parse_user_with_key(Container, UserKey) ->
User = maps:get(UserKey, Container, undefined),
parse_user_map(User).
-spec parse_user_map(User :: map() | undefined) -> ldclient_user:user() | undefined.
parse_user_map(undefined) -> undefined;
parse_user_map(User) ->
UserWithKey = #{
key => maps:get(<<"key">>, User)
},
UserWithIp = parse_optional(<<"ip">>, ip, User, UserWithKey),
UserWithCountry = parse_optional(<<"country">>, country, User, UserWithIp),
UserWithEmail = parse_optional(<<"email">>, email, User, UserWithCountry),
UserWithFirstName = parse_optional(<<"firstName">>, first_name, User, UserWithEmail),
UserWithLastName = parse_optional(<<"lastName">>, last_name, User, UserWithFirstName),
UserWithAvatar = parse_optional(<<"avatar">>, avatar, User, UserWithLastName),
UserWithName = parse_optional(<<"name">>, name, User, UserWithAvatar),
UserWithAnonymous = parse_optional(<<"anonymous">>, anonymous, User, UserWithName),
UserWithCustom = parse_optional(<<"custom">>, custom, User, UserWithAnonymous),
parse_optional(<<"privateAttributeNames">>, private_attribute_names, User, UserWithCustom).
-spec parse_user(Container ::
identify_event_params()
| custom_event_params()
| evaluate_flag_params()
| evaluate_all_flags_params()) -> ldclient_user:user() | undefined.
parse_user(Container) ->
parse_user_with_key(Container, <<"user">>).
-spec parse_optional(InKey :: binary(), OutKey :: atom(),
Input :: map(), Output :: map()) -> map().
parse_optional(InKey, OutKey, Input, Output) ->
Value = maps:get(InKey, Input, undefined),
add_if_defined(OutKey, Value, Output, false).
-spec parse_optional(InKey :: binary(), OutKey :: atom(),
Input :: map(), Output :: map(), IncludeNull :: boolean()) -> map().
parse_optional(InKey, OutKey, Input, Output, IncludeNull) ->
Value = maps:get(InKey, Input, undefined),
add_if_defined(OutKey, Value, Output, IncludeNull).
-spec add_if_defined(Key :: atom(), Value :: any(), Output :: map(), IncludeNull :: boolean()) -> map().
add_if_defined(_Key, undefined, Output, _) -> Output;
add_if_defined(_Key, null, Output, false) -> Output;
add_if_defined(Key, Value, Output, _) -> Output#{Key => Value}.
-spec parse_evaluate_all(EvaluateAll :: map()) -> evaluate_all_flags_params().
parse_evaluate_all(EvaluateAll) ->
maybe_add_context(EvaluateAll,
maybe_add_user(parse_user(EvaluateAll), #{
with_reasons => maps:get(<<"withReasons">>, EvaluateAll, false),
client_side_only => maps:get(<<"clientSideOnly">>, EvaluateAll, false),
details_only_for_tracked_flags => maps:get(<<"detailsOnlyForTrackedFlags">>, EvaluateAll, false)
})).
-spec parse_identify_event(IdentifyEvent :: map()) -> identify_event_params().
parse_identify_event(IdentifyEvent) ->
maybe_add_context(IdentifyEvent,
maybe_add_user(parse_user(IdentifyEvent), #{})).
-spec parse_custom_event(CustomEvent :: map()) -> custom_event_params().
parse_custom_event(CustomEvent) ->
CustomEventWithKey = #{
event_key => maps:get(<<"eventKey">>, CustomEvent, <<>>),
omit_null_value => maps:get(<<"omitNullData">>, CustomEvent, false)
},
CustomEventWithUser = maybe_add_user(parse_user(CustomEvent), CustomEventWithKey),
CustomEventWithContext = maybe_add_context(CustomEvent, CustomEventWithUser),
CustomEventWithData = parse_optional(<<"data">>, data, CustomEvent, CustomEventWithContext, true),
CustomEventWithOmitNullData = parse_optional(<<"omitNullData">>, omit_null_data,
CustomEvent, CustomEventWithData),
CustomEventWithMetricValue = parse_optional(<<"metricValue">>, metric_value,
CustomEvent, CustomEventWithOmitNullData),
CustomEventWithMetricValue.
-spec format_evaluate_flag_response(Result ::
ldclient_eval:detail()
| ldclient_eval:result_value()) -> evaluate_flag_response().
format_evaluate_flag_response({VariationIndex, Value, Reason}) ->
#{
value => Value,
variationIndex => VariationIndex,
reason => ldclient_eval_reason:format(Reason)
};
format_evaluate_flag_response({VariationIndex, Value}) ->
#{
value => Value,
variationIndex => VariationIndex
};
format_evaluate_flag_response(Value) ->
#{
value => Value
}.
|
add9d0d22741123afca96f4788319b9e1655c58e62ae90af96e49854f322dd47 | Daniel-Diaz/HaTeX | AMSFonts.hs |
# LANGUAGE CPP #
-- | Module for the package @amsfonts@.
module Text.LaTeX.Packages.AMSFonts
* AMSFonts package
amsfonts
-- * Fonts
, mathbb, mathfrak
-- * Number sets
, naturals, integers, rationals, reals, quaternions
-- ** Complex numbers
, complexes, trealPart, timagPart
) where
import Text.LaTeX.Base.Class
import Text.LaTeX.Base.Types
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid
#endif
| AMSFonts package .
-- Example:
--
> usepackage [ ] amsfonts
amsfonts :: ClassName
amsfonts = "amsfonts"
--
-- | This font is useful for representing sets like
\(\mathbb{R}\ ) ( real numbers ) or \(\mathbb{Z}\ ) ( integers ) . For instance :
--
> " The set of real numbers are represented by " < > " R " < > " . "
--
-- Or in monadic form:
--
-- > "The set of real numbers are represented by " >> mathbb "R" >> "."
--
-- /Note the use of overloaded strings./
mathbb :: LaTeXC l => l -> l
mathbb = comm1 "mathbb"
| Fraktur font , like \(\mathfrak{abcXYZ}\ ) .
mathfrak :: LaTeXC l => l -> l
mathfrak = comm1 "mathfrak"
| )
naturals :: LaTeXC l => l
naturals = mathbb "N"
-- | \(\mathbb{Z}\)
integers :: LaTeXC l => l
integers = mathbb "Z"
-- | \(\mathbb{Q}\)
rationals :: LaTeXC l => l
rationals = mathbb "Q"
-- | \(\mathbb{R}\)
reals :: LaTeXC l => l
reals = mathbb "R"
-- | \(\mathbb{C}\)
complexes :: LaTeXC l => l
complexes = mathbb "C"
| \(\mathbb{H}\ )
quaternions :: LaTeXC l => l
quaternions = mathbb "H"
-- | \(\Re\)
trealPart :: LaTeXC l => l -> l
trealPart z = comm0 "Re" <> z
| \(\Im\ )
timagPart :: LaTeXC l => l -> l
timagPart z = comm0 "Im" <> z
| null | https://raw.githubusercontent.com/Daniel-Diaz/HaTeX/aae193763157378500ebedc733c913e74f53b060/Text/LaTeX/Packages/AMSFonts.hs | haskell | | Module for the package @amsfonts@.
* Fonts
* Number sets
** Complex numbers
Example:
| This font is useful for representing sets like
Or in monadic form:
> "The set of real numbers are represented by " >> mathbb "R" >> "."
/Note the use of overloaded strings./
| \(\mathbb{Z}\)
| \(\mathbb{Q}\)
| \(\mathbb{R}\)
| \(\mathbb{C}\)
| \(\Re\) |
# LANGUAGE CPP #
module Text.LaTeX.Packages.AMSFonts
* AMSFonts package
amsfonts
, mathbb, mathfrak
, naturals, integers, rationals, reals, quaternions
, complexes, trealPart, timagPart
) where
import Text.LaTeX.Base.Class
import Text.LaTeX.Base.Types
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid
#endif
| AMSFonts package .
> usepackage [ ] amsfonts
amsfonts :: ClassName
amsfonts = "amsfonts"
\(\mathbb{R}\ ) ( real numbers ) or \(\mathbb{Z}\ ) ( integers ) . For instance :
> " The set of real numbers are represented by " < > " R " < > " . "
mathbb :: LaTeXC l => l -> l
mathbb = comm1 "mathbb"
| Fraktur font , like \(\mathfrak{abcXYZ}\ ) .
mathfrak :: LaTeXC l => l -> l
mathfrak = comm1 "mathfrak"
| )
naturals :: LaTeXC l => l
naturals = mathbb "N"
integers :: LaTeXC l => l
integers = mathbb "Z"
rationals :: LaTeXC l => l
rationals = mathbb "Q"
reals :: LaTeXC l => l
reals = mathbb "R"
complexes :: LaTeXC l => l
complexes = mathbb "C"
| \(\mathbb{H}\ )
quaternions :: LaTeXC l => l
quaternions = mathbb "H"
trealPart :: LaTeXC l => l -> l
trealPart z = comm0 "Re" <> z
| \(\Im\ )
timagPart :: LaTeXC l => l -> l
timagPart z = comm0 "Im" <> z
|
0399f41078a9abf44e8072f7366816af2c15436a9a85fb649fb8356b634974db | danfran/cabal-macosx | Main.hs | module Main (main) where
import Test.Framework (defaultMain)
import Distribution.MacOSX.Internal.Tests (macosxInternalTests)
main :: IO ()
main = defaultMain
[ macosxInternalTests
]
| null | https://raw.githubusercontent.com/danfran/cabal-macosx/6714a7018ddcd71efd96044bd55c0e19fb690939/tests/Main.hs | haskell | module Main (main) where
import Test.Framework (defaultMain)
import Distribution.MacOSX.Internal.Tests (macosxInternalTests)
main :: IO ()
main = defaultMain
[ macosxInternalTests
]
|
|
4369aca99bad947eab3bdcd13d3466c4ad1a3481fcbfb4c8e2d4bc7358ff8900 | ipfs-shipyard/cube | db_test.clj | (ns cube.db-test
(:require [cube.db :as db])
(:use clojure.test))
(defn test-db [] {:db-path "/tmp/test-cube-db.clj"
:state (atom {:name "barry"
:numbers [5]
:nested {:name "larry"}
:instances {:running {}}})})
(deftest access-value
(is (= "barry" (db/access (test-db) :name)))
(is (= [5] (db/access (test-db) :numbers))))
(deftest access-in-value
(is (= "barry" (db/access-in (test-db) [:name])))
(is (= 5 (db/access-in (test-db) [:numbers 0])))
(is (= {} (db/access-in (test-db) [:instances :running]))))
(deftest put-value
(let [new-db (test-db)]
(db/put new-db :testing false)
(is (= false (db/access new-db :testing)))))
(deftest put-in-value
(let [new-db (test-db)]
(db/put-in new-db [:instances :running :test-id] true)
(is (= true (db/access-in new-db [:instances :running :test-id])))))
(deftest remove-value
(let [new-db (test-db)]
(db/remove new-db :name)
(is (= nil (db/access new-db :name)))))
(deftest remove-in-value
(testing "Remove one key"
(let [new-db (test-db)]
(db/remove-in new-db [:name])
(is (= nil (db/access-in new-db [:name])))))
(testing "Not remove empty maps when removing nested values"
(let [new-db (test-db)]
(db/remove-in new-db [:nested :name])
(is (= {} (db/access-in new-db [:nested])))
(is (= nil (db/access-in new-db [:nested :name]))))))
(deftest add-to-value
(let [new-db (test-db)]
(db/add-to new-db [:numbers] 1)
(is (= [5 1] (db/access-in new-db [:numbers])))))
| null | https://raw.githubusercontent.com/ipfs-shipyard/cube/bd835a35a5273c744cbc415425ffbdb0990901e6/test/cube/db_test.clj | clojure | (ns cube.db-test
(:require [cube.db :as db])
(:use clojure.test))
(defn test-db [] {:db-path "/tmp/test-cube-db.clj"
:state (atom {:name "barry"
:numbers [5]
:nested {:name "larry"}
:instances {:running {}}})})
(deftest access-value
(is (= "barry" (db/access (test-db) :name)))
(is (= [5] (db/access (test-db) :numbers))))
(deftest access-in-value
(is (= "barry" (db/access-in (test-db) [:name])))
(is (= 5 (db/access-in (test-db) [:numbers 0])))
(is (= {} (db/access-in (test-db) [:instances :running]))))
(deftest put-value
(let [new-db (test-db)]
(db/put new-db :testing false)
(is (= false (db/access new-db :testing)))))
(deftest put-in-value
(let [new-db (test-db)]
(db/put-in new-db [:instances :running :test-id] true)
(is (= true (db/access-in new-db [:instances :running :test-id])))))
(deftest remove-value
(let [new-db (test-db)]
(db/remove new-db :name)
(is (= nil (db/access new-db :name)))))
(deftest remove-in-value
(testing "Remove one key"
(let [new-db (test-db)]
(db/remove-in new-db [:name])
(is (= nil (db/access-in new-db [:name])))))
(testing "Not remove empty maps when removing nested values"
(let [new-db (test-db)]
(db/remove-in new-db [:nested :name])
(is (= {} (db/access-in new-db [:nested])))
(is (= nil (db/access-in new-db [:nested :name]))))))
(deftest add-to-value
(let [new-db (test-db)]
(db/add-to new-db [:numbers] 1)
(is (= [5 1] (db/access-in new-db [:numbers])))))
|
|
490e75b85bd93b46461ed464d3b3dc0b59ddc9f216996b465838bc573852808d | nuprl/gradual-typing-performance | decode-struct.rkt | #lang scheme/base
(require "private/provide-structs.rkt")
(provide-structs
[part-index-desc ()])
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/pre-benchmark/ecoop/scribble-lib/scribble/decode-struct.rkt | racket | #lang scheme/base
(require "private/provide-structs.rkt")
(provide-structs
[part-index-desc ()])
|
|
bf603d2a0a0ea2f1bb3cfd2023e3a07be38621cccf036bef194c42c8d8852114 | Frozenlock/wacnet | common.cljs | (ns wacnet.templates.common
(:require [re-com.core :as re]
[reagent.core :as r]))
(defn is-chrome-or-opera?
"True if the browser is Chrome(ium) or Opera"
[]
(let [browser (-> js/goog .-labs .-userAgent .-browser)]
(or (.isChrome browser)
(.isOpera browser))))
(defn cond-h-split
"If the browser is not Chrome or Opera, replace the split by a
simple h-box." [& {:keys [panel-1 panel-2 size initial-split]}]
(if (is-chrome-or-opera?)
[re/h-split
:initial-split initial-split
:panel-1 panel-1
:panel-2 panel-2]
[re/h-box
:size "1"
:height "100%"
:children [[re/box
:size (str initial-split)
:child panel-1]
[re/box
:size (str (- initial-split 100))
:child panel-2]]]
))
(defn scrollable*
([content] (scrollable* {} content))
([attr content]
[:div (merge {:style {:overflow-y "auto"
:height "100vh"}} attr)
content]))
(def scrollable
(with-meta scrollable*
{:component-did-mount
#(let [node (r/dom-node %)
top (.-top (.getBoundingClientRect node))]
(set! (.-style.height node) (str "calc(100vh - "top"px)")))}))
( defn scrollable [ attr content ]
;; [:div (merge {:style {;:overflow-y "auto"
; ; : height " 100 % "
;; }} attr)
;; content])
;;;;;;;;;;;;;;;;;;;;;;
(defn input
([{:keys [text on-save on-stop input-type] :as args}]
(input args "form-control"))
([{:keys [text on-save on-stop input-type]} class]
(let [val (r/atom text)
( reset ! " " )
(if on-stop (on-stop)))
save #(let [v (-> @val str clojure.string/trim)]
(when on-save
(on-save v)))]
(fn [props]
[input-type (merge
{:value @val :on-blur save
:class class
:style {:width "100%"}
:on-change #(reset! val (-> % .-target .-value))
:on-key-up #(case (.-which %)
13 (save) ; enter
27 (stop) ; esc
nil)}
props)]))))
(def edit
(-> input
(with-meta {:component-did-mount
#(let [node (r/dom-node %)
n-value (count (.-value node))]
(.focus node)
(.setSelectionRange node n-value n-value)
)})))
(def edit-with-select
(-> input
(with-meta {:component-did-mount
#(let [node (r/dom-node %)
n-value (count (.-value node))]
(.focus node)
(.setSelectionRange node 0 n-value))})))
(defn save-edit-field [atom key value]
(if-not (empty? value)
(swap! atom assoc key value)
(swap! atom dissoc key)))
(defn editable [input-type atom key]
[edit
{:text (get @atom key)
:input-type input-type
:on-save (partial save-edit-field atom key)}])
(defn editable-with-select [input-type atom key]
[edit-with-select
{:text (get @atom key)
:input-type input-type
:on-save (partial save-edit-field atom key)}])
(defn live-edit
"Same as editable, but immediately updates the atom."
[input-type atom key]
(let [value (get @atom key)]
[input-type {:value value
:class "form-control"
:style {:width "100%"}
:on-change (fn [evt]
(let [temp-val (-> evt .-target .-value)
new-val (if (number? value)
(js/parseInt temp-val) temp-val)]
(if (empty? temp-val)
(swap! atom dissoc key)
(swap! atom assoc key new-val))))}]))
;;; bootstrap
(defn form-group [label id body]
[:div.form-group.form-group-sm
[:label.col-sm-6.control-label {:for id} label]
[:div.col-sm-6 body]])
| null | https://raw.githubusercontent.com/Frozenlock/wacnet/69947dc02c91ae160c759a0abe97d4f472e9a876/src/cljs/wacnet/templates/common.cljs | clojure | [:div (merge {:style {;:overflow-y "auto"
; : height " 100 % "
}} attr)
content])
enter
esc
bootstrap | (ns wacnet.templates.common
(:require [re-com.core :as re]
[reagent.core :as r]))
(defn is-chrome-or-opera?
"True if the browser is Chrome(ium) or Opera"
[]
(let [browser (-> js/goog .-labs .-userAgent .-browser)]
(or (.isChrome browser)
(.isOpera browser))))
(defn cond-h-split
"If the browser is not Chrome or Opera, replace the split by a
simple h-box." [& {:keys [panel-1 panel-2 size initial-split]}]
(if (is-chrome-or-opera?)
[re/h-split
:initial-split initial-split
:panel-1 panel-1
:panel-2 panel-2]
[re/h-box
:size "1"
:height "100%"
:children [[re/box
:size (str initial-split)
:child panel-1]
[re/box
:size (str (- initial-split 100))
:child panel-2]]]
))
(defn scrollable*
([content] (scrollable* {} content))
([attr content]
[:div (merge {:style {:overflow-y "auto"
:height "100vh"}} attr)
content]))
(def scrollable
(with-meta scrollable*
{:component-did-mount
#(let [node (r/dom-node %)
top (.-top (.getBoundingClientRect node))]
(set! (.-style.height node) (str "calc(100vh - "top"px)")))}))
( defn scrollable [ attr content ]
(defn input
([{:keys [text on-save on-stop input-type] :as args}]
(input args "form-control"))
([{:keys [text on-save on-stop input-type]} class]
(let [val (r/atom text)
( reset ! " " )
(if on-stop (on-stop)))
save #(let [v (-> @val str clojure.string/trim)]
(when on-save
(on-save v)))]
(fn [props]
[input-type (merge
{:value @val :on-blur save
:class class
:style {:width "100%"}
:on-change #(reset! val (-> % .-target .-value))
:on-key-up #(case (.-which %)
nil)}
props)]))))
(def edit
(-> input
(with-meta {:component-did-mount
#(let [node (r/dom-node %)
n-value (count (.-value node))]
(.focus node)
(.setSelectionRange node n-value n-value)
)})))
(def edit-with-select
(-> input
(with-meta {:component-did-mount
#(let [node (r/dom-node %)
n-value (count (.-value node))]
(.focus node)
(.setSelectionRange node 0 n-value))})))
(defn save-edit-field [atom key value]
(if-not (empty? value)
(swap! atom assoc key value)
(swap! atom dissoc key)))
(defn editable [input-type atom key]
[edit
{:text (get @atom key)
:input-type input-type
:on-save (partial save-edit-field atom key)}])
(defn editable-with-select [input-type atom key]
[edit-with-select
{:text (get @atom key)
:input-type input-type
:on-save (partial save-edit-field atom key)}])
(defn live-edit
"Same as editable, but immediately updates the atom."
[input-type atom key]
(let [value (get @atom key)]
[input-type {:value value
:class "form-control"
:style {:width "100%"}
:on-change (fn [evt]
(let [temp-val (-> evt .-target .-value)
new-val (if (number? value)
(js/parseInt temp-val) temp-val)]
(if (empty? temp-val)
(swap! atom dissoc key)
(swap! atom assoc key new-val))))}]))
(defn form-group [label id body]
[:div.form-group.form-group-sm
[:label.col-sm-6.control-label {:for id} label]
[:div.col-sm-6 body]])
|
31343e436bd0ad0817e906b065b682bdaaee7cec6c8074eaadbcc03c5b181846 | tbsklg/advent-of-code-2022 | Day4.hs | module Day4 where
import Data.List.Split (splitOn)
type From = Int
type To = Int
type Assignment = (From, To)
solve :: [String] -> Int
solve = length . filter (== True) . map (fullyOverlaps . convertToAssignmentPair)
solvePartTwo :: [String] -> Int
solvePartTwo = length . filter (== True) . map (partiallyOverlaps . convertToAssignmentPair)
convertToAssignmentPair :: String -> (Assignment, Assignment)
convertToAssignmentPair xs = (firstAssignment, secondAssingment)
where
[first, second] = splitOn "," xs
firstAssignment = convertToAssignment first
secondAssingment = convertToAssignment second
convertToAssignment :: String -> Assignment
convertToAssignment xs = (from, to)
where
from = read . head . splitOn "-" $ xs
to = read . last . splitOn "-" $ xs
fullyOverlaps :: (Assignment, Assignment) -> Bool
fullyOverlaps (x, y) = x `within` y || y `within` x
partiallyOverlaps :: (Assignment, Assignment) -> Bool
partiallyOverlaps (x, y) = x `partiallyWithin` y
within :: Assignment -> Assignment -> Bool
within (x, y) (x', y') = x >= x' && y <= y'
partiallyWithin :: Assignment -> Assignment -> Bool
partiallyWithin (x, y) (x', y') = x <= y' && x' <= y
| null | https://raw.githubusercontent.com/tbsklg/advent-of-code-2022/f38d83039d97ebaf8dce342a50f1ee5b95178a03/src/Day4.hs | haskell | module Day4 where
import Data.List.Split (splitOn)
type From = Int
type To = Int
type Assignment = (From, To)
solve :: [String] -> Int
solve = length . filter (== True) . map (fullyOverlaps . convertToAssignmentPair)
solvePartTwo :: [String] -> Int
solvePartTwo = length . filter (== True) . map (partiallyOverlaps . convertToAssignmentPair)
convertToAssignmentPair :: String -> (Assignment, Assignment)
convertToAssignmentPair xs = (firstAssignment, secondAssingment)
where
[first, second] = splitOn "," xs
firstAssignment = convertToAssignment first
secondAssingment = convertToAssignment second
convertToAssignment :: String -> Assignment
convertToAssignment xs = (from, to)
where
from = read . head . splitOn "-" $ xs
to = read . last . splitOn "-" $ xs
fullyOverlaps :: (Assignment, Assignment) -> Bool
fullyOverlaps (x, y) = x `within` y || y `within` x
partiallyOverlaps :: (Assignment, Assignment) -> Bool
partiallyOverlaps (x, y) = x `partiallyWithin` y
within :: Assignment -> Assignment -> Bool
within (x, y) (x', y') = x >= x' && y <= y'
partiallyWithin :: Assignment -> Assignment -> Bool
partiallyWithin (x, y) (x', y') = x <= y' && x' <= y
|
|
b1a0c328df44c3837daa927248c30d9ca9633a055230c411a223802da9b59dda | tonyg/kali-scheme | type-scheme.scm | Copyright ( c ) 1994 . See file COPYING .
; Type schemes
(define-record-type type-scheme
(
type ; a type
free-uvars ; uvars that are free
)
())
(define make-type-scheme type-scheme-maker)
(define-record-discloser type/type-scheme
(lambda (type-scheme)
(list 'type-scheme
(map uvar-id (type-scheme-free-uvars type-scheme))
(type-scheme-type type-scheme))))
; If TYPE has any variables bound at DEPTH this returns a type scheme making
; those variables polymorphic; otherwise TYPE is returned.
; Would like to do limited finalizing of uvars, but can't.
; Consider (lambda (g x) (tuple (g 3) (g x) x))
; (a -> b) -> c -> [d, e, f] with
; a > int8, d > b, a > c, e > b, f > c
; No polymorphism, and no simplification without restricting someone
; But consider NOT a ->b, bool > a, b > bool
; It could just as well be bool -> bool.
; Simplification okay on variables that are not used inside other types?
(define *free-uvars* '())
(define (schemify-type type depth)
(set! *free-uvars* '())
(let* ((type (find-free-uvars type depth))
(free-uvars *free-uvars*))
(set! *free-uvars* '()) ; drop pointers
(for-each (lambda (uvar)
(set-uvar-place! uvar #f))
free-uvars)
(if (not (null? free-uvars))
(make-type-scheme type free-uvars)
type)))
(define (find-free-uvars type depth)
(let label ((type type))
(cond ((other-type? type)
(make-other-type (other-type-kind type)
(map label
(other-type-subtypes type))))
((not (uvar? type))
type)
((uvar-binding type)
=> label)
((and (not (uvar-place type))
(<= depth (uvar-depth type)))
(set-uvar-place! type type)
(set! *free-uvars* (cons type *free-uvars*))
type)
(else
type))))
; Instantiate SCHEME at DEPTH.
;
; New sequence:
; (instantiate-type-scheme scheme depth)
; ... elide bindings in new copy ...
; (clean-type-scheme scheme)
(define (instantiate-type-scheme scheme depth . maybe-thunk)
(instantiate-type-scheme! scheme depth)
(let ((type (copy-type (type-scheme-type scheme))))
(if (not (null? maybe-thunk))
((car maybe-thunk)))
(clean-type-scheme! scheme)
type))
(define (instantiate-type-scheme! scheme depth)
(let ((uid (unique-id)))
(for-each (lambda (uvar)
(set-uvar-place!
uvar
(make-uvar (uvar-prefix uvar) depth uid)))
(type-scheme-free-uvars scheme))))
(define (clean-type-scheme! scheme)
(for-each (lambda (uvar)
(set-uvar-place! uvar #f))
(type-scheme-free-uvars scheme)))
(define (copy-type type)
(cond ((other-type? type)
(make-other-type (other-type-kind type)
(map copy-type
(other-type-subtypes type))))
((not (uvar? type))
type)
((uvar-place type)
=> identity)
((uvar-binding type)
=> copy-type)
(else
type)))
| null | https://raw.githubusercontent.com/tonyg/kali-scheme/79bf76b4964729b63fce99c4d2149b32cb067ac0/ps-compiler/prescheme/type-scheme.scm | scheme | Type schemes
a type
uvars that are free
If TYPE has any variables bound at DEPTH this returns a type scheme making
those variables polymorphic; otherwise TYPE is returned.
Would like to do limited finalizing of uvars, but can't.
Consider (lambda (g x) (tuple (g 3) (g x) x))
(a -> b) -> c -> [d, e, f] with
a > int8, d > b, a > c, e > b, f > c
No polymorphism, and no simplification without restricting someone
But consider NOT a ->b, bool > a, b > bool
It could just as well be bool -> bool.
Simplification okay on variables that are not used inside other types?
drop pointers
Instantiate SCHEME at DEPTH.
New sequence:
(instantiate-type-scheme scheme depth)
... elide bindings in new copy ...
(clean-type-scheme scheme) | Copyright ( c ) 1994 . See file COPYING .
(define-record-type type-scheme
(
)
())
(define make-type-scheme type-scheme-maker)
(define-record-discloser type/type-scheme
(lambda (type-scheme)
(list 'type-scheme
(map uvar-id (type-scheme-free-uvars type-scheme))
(type-scheme-type type-scheme))))
(define *free-uvars* '())
(define (schemify-type type depth)
(set! *free-uvars* '())
(let* ((type (find-free-uvars type depth))
(free-uvars *free-uvars*))
(for-each (lambda (uvar)
(set-uvar-place! uvar #f))
free-uvars)
(if (not (null? free-uvars))
(make-type-scheme type free-uvars)
type)))
(define (find-free-uvars type depth)
(let label ((type type))
(cond ((other-type? type)
(make-other-type (other-type-kind type)
(map label
(other-type-subtypes type))))
((not (uvar? type))
type)
((uvar-binding type)
=> label)
((and (not (uvar-place type))
(<= depth (uvar-depth type)))
(set-uvar-place! type type)
(set! *free-uvars* (cons type *free-uvars*))
type)
(else
type))))
(define (instantiate-type-scheme scheme depth . maybe-thunk)
(instantiate-type-scheme! scheme depth)
(let ((type (copy-type (type-scheme-type scheme))))
(if (not (null? maybe-thunk))
((car maybe-thunk)))
(clean-type-scheme! scheme)
type))
(define (instantiate-type-scheme! scheme depth)
(let ((uid (unique-id)))
(for-each (lambda (uvar)
(set-uvar-place!
uvar
(make-uvar (uvar-prefix uvar) depth uid)))
(type-scheme-free-uvars scheme))))
(define (clean-type-scheme! scheme)
(for-each (lambda (uvar)
(set-uvar-place! uvar #f))
(type-scheme-free-uvars scheme)))
(define (copy-type type)
(cond ((other-type? type)
(make-other-type (other-type-kind type)
(map copy-type
(other-type-subtypes type))))
((not (uvar? type))
type)
((uvar-place type)
=> identity)
((uvar-binding type)
=> copy-type)
(else
type)))
|
c3cdd38a8d24f7f344e2f1bd3addaf2fd2c75730cbf62b1f7f9e662c1d6b04ca | Viasat/halite | test_bound_union.clj | Copyright ( c ) 2022 Viasat , Inc.
Licensed under the MIT license
(ns com.viasat.halite.propagate.test-prop-composition
(:require [com.viasat.halite.propagate.bound-union :refer [union-bounds]]
[schema.core :as s]
[schema.test]
[clojure.test :refer :all]))
Prismatic schema validation is too slow to leave on by default for these tests .
;; If you're debugging a test failure, and the problem is a 'type' error,
;; turning schema validation on is likely to help you track it down.
( use - fixtures : once schema.test/validate-schemas )
(deftest test-union-bounds
(are [a b result]
(= result (union-bounds a b))
:Unset :Unset :Unset
;; integer bounds
1 1 1
1 2 {:$in #{1 2}}
1 :Unset {:$in #{1 :Unset}}
:Unset 1 {:$in #{1 :Unset}}
{:$in #{1 2}} 3 {:$in #{1 2 3}}
{:$in #{1 2}} :Unset {:$in #{1 2 :Unset}}
3 {:$in #{1 2}} {:$in #{1 2 3}}
{:$in #{1 2}} {:$in #{2 3}} {:$in #{1 2 3}}
{:$in [1 3]} 5 {:$in [1 5]}
{:$in [1 3]} -4 {:$in [-4 3]}
{:$in [1 3]} 2 {:$in [1 3]}
{:$in [1 3]} :Unset {:$in [1 3 :Unset]}
5 {:$in [1 3]} {:$in [1 5]}
5 {:$in [1 3 :Unset]} {:$in [1 5 :Unset]}
{:$in #{0 1 2}} {:$in [1 3]} {:$in [0 3]}
{:$in [1 3]} {:$in #{0 1 2}} {:$in [0 3]}
{:$in #{0 1 2 :Unset}} {:$in [1 3]} {:$in [0 3 :Unset]}
{:$in [1 3 :Unset]} {:$in #{0 1 2}} {:$in [0 3 :Unset]}
;; boolean bounds
true true true
false false false
:Unset true {:$in #{:Unset true}}
true false {:$in #{true false}}
{:$in #{true false}} true {:$in #{true false}}
true {:$in #{true false}} {:$in #{true false}}
:Unset {:$in #{true false}} {:$in #{true false :Unset}}
true {:$in #{true false :Unset}} {:$in #{true false :Unset}}
;; spec-bounds
{:$type :ws/A :a 1 :b true} {:$type :ws/A :a 2 :c {:$in [1 3]}}
{:$type :ws/A :a {:$in #{1 2}} :b true :c {:$in [1 3]}}
{:$type :ws/A :$refines-to {:ws/B {:n 1} :ws/D {:d {:$in [1 2]}}}}
{:$type :ws/A :$refines-to {:ws/B {:n 2} :ws/C {:c true}}}
{:$type :ws/A :$refines-to {:ws/B {:n {:$in #{1 2}}}}}
{:$type :ws/A} {:$type [:Maybe :ws/A]} {:$type [:Maybe :ws/A]}
{:$type [:Maybe :ws/A]} {:$type :ws/A} {:$type [:Maybe :ws/A]}
:Unset {:$type :ws/A} {:$type [:Maybe :ws/A]}))
| null | https://raw.githubusercontent.com/Viasat/halite/5a434fa2276f5f2654c4d91045595ae5bbc6580e/test/com/viasat/halite/propagate/test_bound_union.clj | clojure | If you're debugging a test failure, and the problem is a 'type' error,
turning schema validation on is likely to help you track it down.
integer bounds
boolean bounds
spec-bounds | Copyright ( c ) 2022 Viasat , Inc.
Licensed under the MIT license
(ns com.viasat.halite.propagate.test-prop-composition
(:require [com.viasat.halite.propagate.bound-union :refer [union-bounds]]
[schema.core :as s]
[schema.test]
[clojure.test :refer :all]))
Prismatic schema validation is too slow to leave on by default for these tests .
( use - fixtures : once schema.test/validate-schemas )
(deftest test-union-bounds
(are [a b result]
(= result (union-bounds a b))
:Unset :Unset :Unset
1 1 1
1 2 {:$in #{1 2}}
1 :Unset {:$in #{1 :Unset}}
:Unset 1 {:$in #{1 :Unset}}
{:$in #{1 2}} 3 {:$in #{1 2 3}}
{:$in #{1 2}} :Unset {:$in #{1 2 :Unset}}
3 {:$in #{1 2}} {:$in #{1 2 3}}
{:$in #{1 2}} {:$in #{2 3}} {:$in #{1 2 3}}
{:$in [1 3]} 5 {:$in [1 5]}
{:$in [1 3]} -4 {:$in [-4 3]}
{:$in [1 3]} 2 {:$in [1 3]}
{:$in [1 3]} :Unset {:$in [1 3 :Unset]}
5 {:$in [1 3]} {:$in [1 5]}
5 {:$in [1 3 :Unset]} {:$in [1 5 :Unset]}
{:$in #{0 1 2}} {:$in [1 3]} {:$in [0 3]}
{:$in [1 3]} {:$in #{0 1 2}} {:$in [0 3]}
{:$in #{0 1 2 :Unset}} {:$in [1 3]} {:$in [0 3 :Unset]}
{:$in [1 3 :Unset]} {:$in #{0 1 2}} {:$in [0 3 :Unset]}
true true true
false false false
:Unset true {:$in #{:Unset true}}
true false {:$in #{true false}}
{:$in #{true false}} true {:$in #{true false}}
true {:$in #{true false}} {:$in #{true false}}
:Unset {:$in #{true false}} {:$in #{true false :Unset}}
true {:$in #{true false :Unset}} {:$in #{true false :Unset}}
{:$type :ws/A :a 1 :b true} {:$type :ws/A :a 2 :c {:$in [1 3]}}
{:$type :ws/A :a {:$in #{1 2}} :b true :c {:$in [1 3]}}
{:$type :ws/A :$refines-to {:ws/B {:n 1} :ws/D {:d {:$in [1 2]}}}}
{:$type :ws/A :$refines-to {:ws/B {:n 2} :ws/C {:c true}}}
{:$type :ws/A :$refines-to {:ws/B {:n {:$in #{1 2}}}}}
{:$type :ws/A} {:$type [:Maybe :ws/A]} {:$type [:Maybe :ws/A]}
{:$type [:Maybe :ws/A]} {:$type :ws/A} {:$type [:Maybe :ws/A]}
:Unset {:$type :ws/A} {:$type [:Maybe :ws/A]}))
|
1059e85c791f4f281babeee03493de300364a95351e8f9c1e083d444f29a7348 | serokell/qtah | QLayout.hs | This file is part of Qtah .
--
Copyright 2015 - 2018 The Qtah Authors .
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details .
--
You should have received a copy of the GNU Lesser General Public License
-- along with this program. If not, see </>.
module Graphics.UI.Qtah.Generator.Interface.Widgets.QLayout (
aModule,
c_QLayout,
) where
import Foreign.Hoppy.Generator.Spec (
Export (ExportEnum, ExportClass),
addReqIncludes,
classSetEntityPrefix,
ident,
ident1,
includeStd,
makeClass,
mkBoolIsProp,
mkConstMethod,
mkMethod,
mkMethod',
mkStaticMethod,
mkProp,
)
import Foreign.Hoppy.Generator.Types (bitspaceT, boolT, enumT, intT, objT, ptrT, voidT)
import Foreign.Hoppy.Generator.Version (collect, just, test)
import Graphics.UI.Qtah.Generator.Flags (qtVersion)
import Graphics.UI.Qtah.Generator.Interface.Core.QMargins (c_QMargins)
import Graphics.UI.Qtah.Generator.Interface.Core.QObject (c_QObject)
import Graphics.UI.Qtah.Generator.Interface.Core.QRect (c_QRect)
import Graphics.UI.Qtah.Generator.Interface.Core.QSize (c_QSize)
import Graphics.UI.Qtah.Generator.Interface.Core.Types (bs_Alignment)
import Graphics.UI.Qtah.Generator.Interface.Widgets.QLayoutItem (c_QLayoutItem)
import {-# SOURCE #-} Graphics.UI.Qtah.Generator.Interface.Widgets.QWidget (c_QWidget)
import Graphics.UI.Qtah.Generator.Module (AModule (AQtModule), makeQtModule)
import Graphics.UI.Qtah.Generator.Types
{-# ANN module "HLint: ignore Use camelCase" #-}
aModule =
AQtModule $
makeQtModule ["Widgets", "QLayout"]
[ QtExport $ ExportClass c_QLayout
, QtExport $ ExportEnum e_SizeConstraint
]
c_QLayout =
addReqIncludes [includeStd "QLayout"] $
classSetEntityPrefix "" $
makeClass (ident "QLayout") Nothing [c_QObject, c_QLayoutItem] $
collect
-- Abstract.
[ just $ mkMethod "activate" [] boolT
, just $ mkMethod "addItem" [ptrT $ objT c_QLayoutItem] voidT
, just $ mkMethod "addWidget" [ptrT $ objT c_QWidget] voidT
, just $ mkStaticMethod "closestAcceptableSize"
[ptrT $ objT c_QWidget, objT c_QSize] $ objT c_QSize
, test (qtVersion >= [4, 6]) $ mkConstMethod "contentsMargins" [] $ objT c_QMargins
, test (qtVersion >= [4, 3]) $ mkConstMethod "contentsRect" [] $ objT c_QRect
, just $ mkConstMethod "count" [] intT
, just $ mkBoolIsProp "enabled"
, just $ mkConstMethod "indexOf" [ptrT $ objT c_QWidget] intT
, just $ mkConstMethod "itemAt" [intT] $ ptrT $ objT c_QLayoutItem
, just $ mkProp "menuBar" $ ptrT $ objT c_QWidget
, just $ mkConstMethod "parentWidget" [] $ ptrT $ objT c_QWidget
, just $ mkMethod "removeItem" [ptrT $ objT c_QLayoutItem] voidT
, just $ mkMethod "removeWidget" [ptrT $ objT c_QWidget] voidT
, just $ mkMethod' "setAlignment" "setAlignment" [bitspaceT bs_Alignment] voidT
, just $ mkMethod' "setAlignment" "setLayoutAlignment"
[ptrT $ objT c_QLayout, bitspaceT bs_Alignment] boolT
, just $ mkMethod' "setAlignment" "setWidgetAlignment"
[ptrT $ objT c_QWidget, bitspaceT bs_Alignment] boolT
, test (qtVersion >= [4, 6]) $ mkMethod' "setContentsMargins" "setContentsMargins"
[objT c_QMargins] voidT
, test (qtVersion >= [4, 3]) $ mkMethod' "setContentsMargins" "setContentsMarginsRaw"
[intT, intT, intT, intT] voidT
, just $ mkProp "sizeConstraint" $ enumT e_SizeConstraint
, just $ mkProp "spacing" intT
, just $ mkMethod "takeAt" [intT] $ ptrT $ objT c_QLayoutItem
, just $ mkMethod "update" [] voidT
]
e_SizeConstraint =
makeQtEnum (ident1 "QLayout" "SizeConstraint") [includeStd "QLayout"]
[ (0, ["set", "default", "size", "constraint"])
, (1, ["set", "no", "constraint"])
, (2, ["set", "minimum", "size"])
, (3, ["set", "fixed", "size"])
, (4, ["set", "maximum", "size"])
, (5, ["set", "min", "and", "max", "size"])
]
| null | https://raw.githubusercontent.com/serokell/qtah/abb4932248c82dc5c662a20d8f177acbc7cfa722/qtah-generator/src/Graphics/UI/Qtah/Generator/Interface/Widgets/QLayout.hs | haskell |
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
# SOURCE #
# ANN module "HLint: ignore Use camelCase" #
Abstract. | This file is part of Qtah .
Copyright 2015 - 2018 The Qtah Authors .
the Free Software Foundation , either version 3 of the License , or
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public License
module Graphics.UI.Qtah.Generator.Interface.Widgets.QLayout (
aModule,
c_QLayout,
) where
import Foreign.Hoppy.Generator.Spec (
Export (ExportEnum, ExportClass),
addReqIncludes,
classSetEntityPrefix,
ident,
ident1,
includeStd,
makeClass,
mkBoolIsProp,
mkConstMethod,
mkMethod,
mkMethod',
mkStaticMethod,
mkProp,
)
import Foreign.Hoppy.Generator.Types (bitspaceT, boolT, enumT, intT, objT, ptrT, voidT)
import Foreign.Hoppy.Generator.Version (collect, just, test)
import Graphics.UI.Qtah.Generator.Flags (qtVersion)
import Graphics.UI.Qtah.Generator.Interface.Core.QMargins (c_QMargins)
import Graphics.UI.Qtah.Generator.Interface.Core.QObject (c_QObject)
import Graphics.UI.Qtah.Generator.Interface.Core.QRect (c_QRect)
import Graphics.UI.Qtah.Generator.Interface.Core.QSize (c_QSize)
import Graphics.UI.Qtah.Generator.Interface.Core.Types (bs_Alignment)
import Graphics.UI.Qtah.Generator.Interface.Widgets.QLayoutItem (c_QLayoutItem)
import Graphics.UI.Qtah.Generator.Module (AModule (AQtModule), makeQtModule)
import Graphics.UI.Qtah.Generator.Types
aModule =
AQtModule $
makeQtModule ["Widgets", "QLayout"]
[ QtExport $ ExportClass c_QLayout
, QtExport $ ExportEnum e_SizeConstraint
]
c_QLayout =
addReqIncludes [includeStd "QLayout"] $
classSetEntityPrefix "" $
makeClass (ident "QLayout") Nothing [c_QObject, c_QLayoutItem] $
collect
[ just $ mkMethod "activate" [] boolT
, just $ mkMethod "addItem" [ptrT $ objT c_QLayoutItem] voidT
, just $ mkMethod "addWidget" [ptrT $ objT c_QWidget] voidT
, just $ mkStaticMethod "closestAcceptableSize"
[ptrT $ objT c_QWidget, objT c_QSize] $ objT c_QSize
, test (qtVersion >= [4, 6]) $ mkConstMethod "contentsMargins" [] $ objT c_QMargins
, test (qtVersion >= [4, 3]) $ mkConstMethod "contentsRect" [] $ objT c_QRect
, just $ mkConstMethod "count" [] intT
, just $ mkBoolIsProp "enabled"
, just $ mkConstMethod "indexOf" [ptrT $ objT c_QWidget] intT
, just $ mkConstMethod "itemAt" [intT] $ ptrT $ objT c_QLayoutItem
, just $ mkProp "menuBar" $ ptrT $ objT c_QWidget
, just $ mkConstMethod "parentWidget" [] $ ptrT $ objT c_QWidget
, just $ mkMethod "removeItem" [ptrT $ objT c_QLayoutItem] voidT
, just $ mkMethod "removeWidget" [ptrT $ objT c_QWidget] voidT
, just $ mkMethod' "setAlignment" "setAlignment" [bitspaceT bs_Alignment] voidT
, just $ mkMethod' "setAlignment" "setLayoutAlignment"
[ptrT $ objT c_QLayout, bitspaceT bs_Alignment] boolT
, just $ mkMethod' "setAlignment" "setWidgetAlignment"
[ptrT $ objT c_QWidget, bitspaceT bs_Alignment] boolT
, test (qtVersion >= [4, 6]) $ mkMethod' "setContentsMargins" "setContentsMargins"
[objT c_QMargins] voidT
, test (qtVersion >= [4, 3]) $ mkMethod' "setContentsMargins" "setContentsMarginsRaw"
[intT, intT, intT, intT] voidT
, just $ mkProp "sizeConstraint" $ enumT e_SizeConstraint
, just $ mkProp "spacing" intT
, just $ mkMethod "takeAt" [intT] $ ptrT $ objT c_QLayoutItem
, just $ mkMethod "update" [] voidT
]
e_SizeConstraint =
makeQtEnum (ident1 "QLayout" "SizeConstraint") [includeStd "QLayout"]
[ (0, ["set", "default", "size", "constraint"])
, (1, ["set", "no", "constraint"])
, (2, ["set", "minimum", "size"])
, (3, ["set", "fixed", "size"])
, (4, ["set", "maximum", "size"])
, (5, ["set", "min", "and", "max", "size"])
]
|
27059a77c65814c915d6bdf710f9ab9af60c71d47ba9d810745f6ac5ffe43542 | migamake/homplexity | CodeFragment.hs | {-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
-- | This module generalizes over types of code fragments
-- that may need to be iterated upon and measured separately.
module Language.Haskell.Homplexity.CodeFragment (
CodeFragment (fragmentName, fragmentSlice)
, occurs
, occursOf
, allOccurs
, allOccursOf
, Program (..)
, programT
, program
, Module (..)
, moduleT
, Function (..)
, functionT
, DataDef (..)
, dataDefT
, TypeSignature (..)
, typeSignatureT
, TypeClass (..)
, typeClassT
, fragmentLoc
TODO : add ClassSignature
) where
import Data.Data
import Data . Functor
import Data.Generics.Uniplate.Data
import Data.List
import Data.Maybe
import Data . Monoid
import Language.Haskell.Exts.SrcLoc
import Language.Haskell.Exts.Syntax
import Language.Haskell.Homplexity.SrcSlice
import Language.Haskell.Homplexity.Utilities
-- | Program
newtype Program = Program { allModules :: [Module SrcLoc] }
deriving (Data, Typeable, Show)
-- | Smart constructor for adding cross-references in the future.
program :: [Module SrcLoc] -> Program
program = Program
| Proxy for passing type as an argument .
programT :: Proxy Program
programT = Proxy
-- * Type aliases for type-based matching of substructures
-- | Alias for a function declaration
data Function = Function {
functionNames :: [String]
, functionLocations :: [SrcLoc]
, functionRhs :: [Rhs SrcLoc]
, functionBinds :: [Binds SrcLoc]
}
deriving (Data, Typeable, Show)
-- | Proxy for passing @Function@ type as an argument.
functionT :: Proxy Function
functionT = Proxy
-- | Alias for a @data@ declaration
data DataDef = DataDef {
dataDefName :: String
, dataDefCtors :: Either [QualConDecl SrcLoc] [GadtDecl SrcLoc]
}
deriving (Data, Typeable, Show)
| Proxy for passing @DataDef@ type as an argument .
dataDefT :: Proxy DataDef
dataDefT = Proxy
-- ** Type signature of a function
-- | Type alias for a type signature of a function as a @CodeFragment@
data TypeSignature = TypeSignature { loc :: SrcLoc
, identifiers :: [Name SrcLoc]
, theType :: Type SrcLoc }
deriving (Data, Typeable, Show)
-- | Proxy for passing @TypeSignature@ type as an argument.
typeSignatureT :: Proxy TypeSignature
typeSignatureT = Proxy
-- ** TODO: class signatures (number of function decls inside)
-- | Alias for a class signature
data TypeClass = TypeClass { tcName :: String
, tcDecls :: Maybe [ClassDecl SrcLoc]
}
deriving (Data, Typeable, Show)
-- | Proxy for passing @TypeClass@ type as an argument.
typeClassT :: Proxy TypeClass
typeClassT = Proxy
TODO : need combination of Fold and Biplate
-- Resulting record may be created to make pa
-- | Class @CodeFragment@ allows for:
-- * both selecting direct or all descendants
-- of the given type of object within another structure
-- (with @occurs@ and @allOccurs@)
-- * naming the object to allow user to distinguish it.
--
-- In order to compute selection, we just need to know which
@AST@ nodes contain the given object , and how to extract
this given object from @AST@ , if it is there ( @matchAST@).:w
class (Show c, Data (AST c), Data c) => CodeFragment c where
type AST c
matchAST :: AST c -> Maybe c
fragmentName :: c -> String
fragmentSlice :: c -> SrcSlice
fragmentSlice = srcSlice
| First location for each @CodeFragment@ - for convenient reporting .
fragmentLoc :: (CodeFragment c) => c -> SrcLoc
fragmentLoc = getPointLoc
. fragmentSlice
instance CodeFragment Function where
type AST Function = Decl SrcLoc
matchAST (FunBind _ matches) = Just
Function {..}
where
(functionLocations,
(unName <$>) . take 1 -> functionNames,
functionRhs,
catMaybes -> functionBinds) = unzip4 $ map extract matches
extract (Match srcLoc name _ rhs binds) = (srcLoc, name, rhs, binds)
extract (InfixMatch srcLoc _ name _ rhs binds) = (srcLoc, name, rhs, binds)
extract other = error $ "Undocumented constructor: " <> show other
matchAST (PatBind (singleton -> functionLocations) pat
(singleton -> functionRhs )
(maybeToList -> functionBinds )) = Just Function {..}
where
functionNames = wildcards ++ map unName (universeBi pat :: [Name SrcLoc])
wildcards = mapMaybe wildcard (universe pat)
where
wildcard PWildCard {} = Just ".."
wildcard _ = Nothing
matchAST _ = Nothing
fragmentName Function {..} = unwords $ "function":functionNames
instance CodeFragment DataDef where
type AST DataDef = Decl SrcLoc
matchAST (DataDecl _ _ _ declHead qualConDecls _) = do
name <- listToMaybe (universeBi declHead :: [Name SrcLoc])
pure DataDef { dataDefName = unName name, dataDefCtors = Left qualConDecls }
matchAST (GDataDecl _ _ _ declHead _ gadtDecls _) = do
name <- listToMaybe (universeBi declHead :: [Name SrcLoc])
pure DataDef { dataDefName = unName name, dataDefCtors = Right gadtDecls }
matchAST _ = Nothing
fragmentName DataDef {..} = "data " ++ dataDefName
-- | Make a single element list.
singleton :: a -> [a]
singleton = (:[])
-- | Direct occurences of given @CodeFragment@ fragment within another structure.
occurs :: (CodeFragment c, Data from) => from -> [c]
occurs = mapMaybe matchAST . childrenBi
-- | Explicitly typed variant of @occurs@.
occursOf :: (Data from, CodeFragment c) => Proxy c -> from -> [c]
occursOf _ = occurs
allOccurs :: (CodeFragment c, Data from) => from -> [c]
allOccurs = mapMaybe matchAST . universeBi
-- | Explicitly typed variant of @allOccurs@.
allOccursOf :: (Data from, CodeFragment c) => Proxy c -> from -> [c]
allOccursOf _ = allOccurs
instance CodeFragment Program where
type AST Program = Program
matchAST = Just
fragmentName _ = "program"
instance CodeFragment (Module SrcLoc) where
type AST (Module SrcLoc)= Module SrcLoc
matchAST = Just
fragmentName (Module _ (Just (ModuleHead _ (ModuleName _ theName) _ _)) _ _ _) =
"module " ++ theName
fragmentName (Module _ Nothing _ _ _) =
"<unnamed module>"
fragmentName (XmlPage _ (ModuleName _ theName) _ _ _ _ _) = "XML page " ++ theName
fragmentName (XmlHybrid _ (Just (ModuleHead _ (ModuleName _ theName) _ _))
_ _ _ _ _ _ _) = "module with XML " ++ theName
fragmentName (XmlHybrid _ Nothing _ _ _ _ _ _ _ ) = "<unnamed module with XML>"
-- | Proxy for passing @Module@ type as an argument.
moduleT :: Proxy (Module SrcLoc)
moduleT = Proxy
instance CodeFragment TypeSignature where
type AST TypeSignature = Decl SrcLoc
matchAST (TypeSig loc identifiers theType) = Just TypeSignature {..}
matchAST _ = Nothing
fragmentName TypeSignature {..} = "type signature for "
++ intercalate ", " (map unName identifiers)
instance CodeFragment TypeClass where
type AST TypeClass = Decl SrcLoc
matchAST (ClassDecl _ _ declHead _ classDecls)
= Just $ TypeClass (unName . declHeadName $ declHead) classDecls
matchAST _ = Nothing
fragmentName (TypeClass tcName _) = "type class " ++ tcName
| Unpack @Name@ identifier into a @String@.
unName :: Name a -> String
unName (Symbol _ s) = s
unName (Ident _ i) = i
| null | https://raw.githubusercontent.com/migamake/homplexity/ddc2af9f988f1dbcc266c2f804fe6c599180be09/lib/Language/Haskell/Homplexity/CodeFragment.hs | haskell | # LANGUAGE CPP #
# LANGUAGE DeriveDataTypeable #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
| This module generalizes over types of code fragments
that may need to be iterated upon and measured separately.
| Program
| Smart constructor for adding cross-references in the future.
* Type aliases for type-based matching of substructures
| Alias for a function declaration
| Proxy for passing @Function@ type as an argument.
| Alias for a @data@ declaration
** Type signature of a function
| Type alias for a type signature of a function as a @CodeFragment@
| Proxy for passing @TypeSignature@ type as an argument.
** TODO: class signatures (number of function decls inside)
| Alias for a class signature
| Proxy for passing @TypeClass@ type as an argument.
Resulting record may be created to make pa
| Class @CodeFragment@ allows for:
* both selecting direct or all descendants
of the given type of object within another structure
(with @occurs@ and @allOccurs@)
* naming the object to allow user to distinguish it.
In order to compute selection, we just need to know which
| Make a single element list.
| Direct occurences of given @CodeFragment@ fragment within another structure.
| Explicitly typed variant of @occurs@.
| Explicitly typed variant of @allOccurs@.
| Proxy for passing @Module@ type as an argument. | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
module Language.Haskell.Homplexity.CodeFragment (
CodeFragment (fragmentName, fragmentSlice)
, occurs
, occursOf
, allOccurs
, allOccursOf
, Program (..)
, programT
, program
, Module (..)
, moduleT
, Function (..)
, functionT
, DataDef (..)
, dataDefT
, TypeSignature (..)
, typeSignatureT
, TypeClass (..)
, typeClassT
, fragmentLoc
TODO : add ClassSignature
) where
import Data.Data
import Data . Functor
import Data.Generics.Uniplate.Data
import Data.List
import Data.Maybe
import Data . Monoid
import Language.Haskell.Exts.SrcLoc
import Language.Haskell.Exts.Syntax
import Language.Haskell.Homplexity.SrcSlice
import Language.Haskell.Homplexity.Utilities
newtype Program = Program { allModules :: [Module SrcLoc] }
deriving (Data, Typeable, Show)
program :: [Module SrcLoc] -> Program
program = Program
| Proxy for passing type as an argument .
programT :: Proxy Program
programT = Proxy
data Function = Function {
functionNames :: [String]
, functionLocations :: [SrcLoc]
, functionRhs :: [Rhs SrcLoc]
, functionBinds :: [Binds SrcLoc]
}
deriving (Data, Typeable, Show)
functionT :: Proxy Function
functionT = Proxy
data DataDef = DataDef {
dataDefName :: String
, dataDefCtors :: Either [QualConDecl SrcLoc] [GadtDecl SrcLoc]
}
deriving (Data, Typeable, Show)
| Proxy for passing @DataDef@ type as an argument .
dataDefT :: Proxy DataDef
dataDefT = Proxy
data TypeSignature = TypeSignature { loc :: SrcLoc
, identifiers :: [Name SrcLoc]
, theType :: Type SrcLoc }
deriving (Data, Typeable, Show)
typeSignatureT :: Proxy TypeSignature
typeSignatureT = Proxy
data TypeClass = TypeClass { tcName :: String
, tcDecls :: Maybe [ClassDecl SrcLoc]
}
deriving (Data, Typeable, Show)
typeClassT :: Proxy TypeClass
typeClassT = Proxy
TODO : need combination of Fold and Biplate
@AST@ nodes contain the given object , and how to extract
this given object from @AST@ , if it is there ( @matchAST@).:w
class (Show c, Data (AST c), Data c) => CodeFragment c where
type AST c
matchAST :: AST c -> Maybe c
fragmentName :: c -> String
fragmentSlice :: c -> SrcSlice
fragmentSlice = srcSlice
| First location for each @CodeFragment@ - for convenient reporting .
fragmentLoc :: (CodeFragment c) => c -> SrcLoc
fragmentLoc = getPointLoc
. fragmentSlice
instance CodeFragment Function where
type AST Function = Decl SrcLoc
matchAST (FunBind _ matches) = Just
Function {..}
where
(functionLocations,
(unName <$>) . take 1 -> functionNames,
functionRhs,
catMaybes -> functionBinds) = unzip4 $ map extract matches
extract (Match srcLoc name _ rhs binds) = (srcLoc, name, rhs, binds)
extract (InfixMatch srcLoc _ name _ rhs binds) = (srcLoc, name, rhs, binds)
extract other = error $ "Undocumented constructor: " <> show other
matchAST (PatBind (singleton -> functionLocations) pat
(singleton -> functionRhs )
(maybeToList -> functionBinds )) = Just Function {..}
where
functionNames = wildcards ++ map unName (universeBi pat :: [Name SrcLoc])
wildcards = mapMaybe wildcard (universe pat)
where
wildcard PWildCard {} = Just ".."
wildcard _ = Nothing
matchAST _ = Nothing
fragmentName Function {..} = unwords $ "function":functionNames
instance CodeFragment DataDef where
type AST DataDef = Decl SrcLoc
matchAST (DataDecl _ _ _ declHead qualConDecls _) = do
name <- listToMaybe (universeBi declHead :: [Name SrcLoc])
pure DataDef { dataDefName = unName name, dataDefCtors = Left qualConDecls }
matchAST (GDataDecl _ _ _ declHead _ gadtDecls _) = do
name <- listToMaybe (universeBi declHead :: [Name SrcLoc])
pure DataDef { dataDefName = unName name, dataDefCtors = Right gadtDecls }
matchAST _ = Nothing
fragmentName DataDef {..} = "data " ++ dataDefName
singleton :: a -> [a]
singleton = (:[])
occurs :: (CodeFragment c, Data from) => from -> [c]
occurs = mapMaybe matchAST . childrenBi
occursOf :: (Data from, CodeFragment c) => Proxy c -> from -> [c]
occursOf _ = occurs
allOccurs :: (CodeFragment c, Data from) => from -> [c]
allOccurs = mapMaybe matchAST . universeBi
allOccursOf :: (Data from, CodeFragment c) => Proxy c -> from -> [c]
allOccursOf _ = allOccurs
instance CodeFragment Program where
type AST Program = Program
matchAST = Just
fragmentName _ = "program"
instance CodeFragment (Module SrcLoc) where
type AST (Module SrcLoc)= Module SrcLoc
matchAST = Just
fragmentName (Module _ (Just (ModuleHead _ (ModuleName _ theName) _ _)) _ _ _) =
"module " ++ theName
fragmentName (Module _ Nothing _ _ _) =
"<unnamed module>"
fragmentName (XmlPage _ (ModuleName _ theName) _ _ _ _ _) = "XML page " ++ theName
fragmentName (XmlHybrid _ (Just (ModuleHead _ (ModuleName _ theName) _ _))
_ _ _ _ _ _ _) = "module with XML " ++ theName
fragmentName (XmlHybrid _ Nothing _ _ _ _ _ _ _ ) = "<unnamed module with XML>"
moduleT :: Proxy (Module SrcLoc)
moduleT = Proxy
instance CodeFragment TypeSignature where
type AST TypeSignature = Decl SrcLoc
matchAST (TypeSig loc identifiers theType) = Just TypeSignature {..}
matchAST _ = Nothing
fragmentName TypeSignature {..} = "type signature for "
++ intercalate ", " (map unName identifiers)
instance CodeFragment TypeClass where
type AST TypeClass = Decl SrcLoc
matchAST (ClassDecl _ _ declHead _ classDecls)
= Just $ TypeClass (unName . declHeadName $ declHead) classDecls
matchAST _ = Nothing
fragmentName (TypeClass tcName _) = "type class " ++ tcName
| Unpack @Name@ identifier into a @String@.
unName :: Name a -> String
unName (Symbol _ s) = s
unName (Ident _ i) = i
|
59dd006637e3b7adac8c2b803e3cb8551461e747fbb0ecd3285b641677e4a1d5 | stylewarning/deprecated-coalton-prototype | parse-type.lisp | ;;;; parse-type.lisp
(in-package #:coalton-impl)
;;; Grammar:
;;;
< type expr > : = < type alias > ; TODO !
;;; | <type variable>
;;; | <nullary type constructor>
;;; | (fn <type expr>* -> <type-expr>)
;;; | (<type constructor> <type expr>*)
(defun parse-type-expression (whole-expr &key variable-assignments
extra-tycons)
"Parse the type expression WHOLE-EXPR. Return two values:
1. The parsed expression.
2. An a-list of symbol -> TYVAR pairs.
VARIABLE-ASSIGNMENTS is an alist of (SYMBOL TYVAR) pairs.
EXTRA-TYCONS is a list of tycons that are perhaps not globally defined yet. These will be preferred over global definitions.
"
;; Below, TABLE is a mapping from symbols to fresh type variables.
(let ((table (alexandria:alist-hash-table variable-assignments)))
(labels ((knownp (name)
(or (find name extra-tycons :key #'tycon-name)
(tycon-knownp name)))
(find-it (name)
(or (find name extra-tycons :key #'tycon-name)
(find-tycon name)))
(parse-variable (expr)
(check-type expr symbol)
(or (gethash expr table)
(setf (gethash expr table) (make-variable))))
(parse-nullary-constructor (expr)
(check-type expr symbol)
(unless (knownp expr)
(error-parsing whole-expr "Unknown type constructor ~S" expr))
(tyapp (find-it expr)))
(parse-function (expr)
(let ((arrow (position 'coalton:-> expr)))
(when (null arrow)
(error-parsing whole-expr "Invalid function type because it lacks an arrow: ~S" expr))
exclude FN symbol
(to (subseq expr (1+ arrow))))
(cond
((null to) (error-parsing whole-expr "Can't have an empty return type in function type: ~S" expr))
((not (null (rest to))) (error-parsing whole-expr "Can't have more than one return type in function type: ~S" expr)))
;; parse out the input and output types
(setf from (mapcar #'parse from))
(setf to (parse (first to)))
;; return the parsed type
(tyfun from to))))
(parse-application (expr)
(cond
;; Old syntax for doing function types not supported.
((eq 'coalton:-> (first expr))
(error-parsing whole-expr "Function types have syntax (FN <ty>* -> <ty>). Got: ~S" expr))
;; New syntax for doing function types.
((eq 'coalton:fn (first expr)) (parse-function expr))
;; Other applications.
(t
(destructuring-bind (tycon &rest args) expr
(unless (symbolp tycon)
(error-parsing whole-expr "Invalid part of type expression: ~S" tycon))
(unless (knownp tycon)
(error-parsing whole-expr "Unknown type constructor ~S" tycon))
;; TODO: Make sure arity is correct!
(apply #'tyapp
(find-it tycon)
(mapcar #'parse args))))))
(parse (expr)
(typecase expr
;; TODO: Allow () for something useful?
(null
(error-parsing whole-expr "Invalid type expression: ~S" expr))
(symbol
(if (knownp expr)
(parse-nullary-constructor expr)
(parse-variable expr)))
(alexandria:proper-list
(parse-application expr))
(t
(error-parsing whole-expr "Invalid type expression: ~S" expr)))))
(values (parse whole-expr)
(alexandria:hash-table-alist table)))))
| null | https://raw.githubusercontent.com/stylewarning/deprecated-coalton-prototype/4a42ffb4222fde3abfd1b50d96e455ff2eef9fe8/src/parse-type.lisp | lisp | parse-type.lisp
Grammar:
TODO !
| <type variable>
| <nullary type constructor>
| (fn <type expr>* -> <type-expr>)
| (<type constructor> <type expr>*)
Below, TABLE is a mapping from symbols to fresh type variables.
parse out the input and output types
return the parsed type
Old syntax for doing function types not supported.
New syntax for doing function types.
Other applications.
TODO: Make sure arity is correct!
TODO: Allow () for something useful? |
(in-package #:coalton-impl)
(defun parse-type-expression (whole-expr &key variable-assignments
extra-tycons)
"Parse the type expression WHOLE-EXPR. Return two values:
1. The parsed expression.
2. An a-list of symbol -> TYVAR pairs.
VARIABLE-ASSIGNMENTS is an alist of (SYMBOL TYVAR) pairs.
EXTRA-TYCONS is a list of tycons that are perhaps not globally defined yet. These will be preferred over global definitions.
"
(let ((table (alexandria:alist-hash-table variable-assignments)))
(labels ((knownp (name)
(or (find name extra-tycons :key #'tycon-name)
(tycon-knownp name)))
(find-it (name)
(or (find name extra-tycons :key #'tycon-name)
(find-tycon name)))
(parse-variable (expr)
(check-type expr symbol)
(or (gethash expr table)
(setf (gethash expr table) (make-variable))))
(parse-nullary-constructor (expr)
(check-type expr symbol)
(unless (knownp expr)
(error-parsing whole-expr "Unknown type constructor ~S" expr))
(tyapp (find-it expr)))
(parse-function (expr)
(let ((arrow (position 'coalton:-> expr)))
(when (null arrow)
(error-parsing whole-expr "Invalid function type because it lacks an arrow: ~S" expr))
exclude FN symbol
(to (subseq expr (1+ arrow))))
(cond
((null to) (error-parsing whole-expr "Can't have an empty return type in function type: ~S" expr))
((not (null (rest to))) (error-parsing whole-expr "Can't have more than one return type in function type: ~S" expr)))
(setf from (mapcar #'parse from))
(setf to (parse (first to)))
(tyfun from to))))
(parse-application (expr)
(cond
((eq 'coalton:-> (first expr))
(error-parsing whole-expr "Function types have syntax (FN <ty>* -> <ty>). Got: ~S" expr))
((eq 'coalton:fn (first expr)) (parse-function expr))
(t
(destructuring-bind (tycon &rest args) expr
(unless (symbolp tycon)
(error-parsing whole-expr "Invalid part of type expression: ~S" tycon))
(unless (knownp tycon)
(error-parsing whole-expr "Unknown type constructor ~S" tycon))
(apply #'tyapp
(find-it tycon)
(mapcar #'parse args))))))
(parse (expr)
(typecase expr
(null
(error-parsing whole-expr "Invalid type expression: ~S" expr))
(symbol
(if (knownp expr)
(parse-nullary-constructor expr)
(parse-variable expr)))
(alexandria:proper-list
(parse-application expr))
(t
(error-parsing whole-expr "Invalid type expression: ~S" expr)))))
(values (parse whole-expr)
(alexandria:hash-table-alist table)))))
|
12e783864aa0aec8690a567defcfbc3151f6ef0fad9d73ddc10b517e783deae0 | anurudhp/CPHaskell | Week8.hs | module Week8 where
import Control.Monad (unless, void)
import Data.Bifunctor (Bifunctor(first))
import Data.Char (isAlphaNum)
import System.Exit (exitFailure)
import System.IO (hPutStrLn, stderr)
-- Exercise 1: A `Functor` instance
-- Functor laws
-- fmap id == id
fmap ( f . ) = = fmap f .
data ComplicatedA a b
= Con1 a b
| Con2 [Maybe (a -> b)]
data ComplicatedB f g a b
= Con3 (f a)
| Con4 (g b)
| Con5 (g (g [b]))
instance Functor (ComplicatedA a) where
fmap f (Con1 x y) = Con1 x (f y)
fmap f (Con2 xs) = Con2 (map (fmap (f .)) xs)
instance Functor g => Functor (ComplicatedB f g a) where
fmap _ (Con3 x) = Con3 x
fmap f (Con4 y) = Con4 (fmap f y)
fmap f (Con5 zs) = Con5 (fmap (fmap (fmap f)) zs)
Exercise 2 : Rewriting monadic code
func0 :: Monad f => (a -> a) -> f a -> f a
func0 f xs = do
f . f <$> xs
func0' :: Functor f => (a -> a) -> f a -> f a
func0' f xs = f . f <$> xs
func1 :: Monad f => f a -> f (a, a)
func1 xs = xs >>= (\x -> return (x, x))
func1' :: Functor f => f a -> f (a, a)
func1' xs = (\x -> (x, x)) <$> xs
func2 :: Monad f => f a -> f (a, a)
func2 xs = xs >>= (\x -> xs >>= \y -> return (x, y))
func3 :: Monad f => f a -> f (a, a)
func3 xs = xs >>= (\x -> xs >> return (x, x))
func4 :: Monad f => f a -> f a -> f (a, a)
func4 xs ys = xs >>= (\x -> ys >>= \y -> return (x, y))
func4' :: Applicative f => f a -> f a -> f (a, a)
func4' xs ys = (,) <$> xs <*> ys
func5 :: Monad f => f Integer -> f Integer -> f Integer
func5 xs ys = do
x <- xs
let x' = x + 1
y <- (+ 1) <$> ys
return (x' + y)
func5' :: Applicative f => f Integer -> f Integer -> f Integer
func5' xs ys = (+) <$> ((+ 1) <$> xs) <*> ((+ 1) <$> ys)
func6 :: Monad f => f Integer -> f (Integer, Integer)
func6 xs = do
x <- xs
return $
if x > 0
then (x, 0)
else (0, x)
func6' :: Functor f => f Integer -> f (Integer, Integer)
func6' =
fmap
(\x ->
if x > 0
then (x, 0)
else (0, x))
func7 :: Monad f => f Integer -> f (Integer, Integer)
func7 xs = do
x <- xs
if x > 0
then return (x, 0)
else return (0, x)
func8 :: Monad f => f Integer -> Integer -> f Integer
func8 xs x = ((+) <$> xs) <*> pure x
func8' :: Applicative f => f Integer -> Integer -> f Integer
func8' xs x = ((+) <$> xs) <*> pure x
func9 :: Monad f => f Integer -> f Integer -> f Integer -> f Integer
func9 xs ys zs =
xs >>= \x ->
if even x
then ys
else zs
func10 :: Monad f => f Integer -> f Integer
func10 xs = do
x <- xs >>= (\x -> return (x * x))
return (x + 10)
func10' :: Applicative f => f Integer -> f Integer
func10' xs = (+ 10) . (\x -> x * x) <$> xs
Exercise 3 : A parser monad
data Parser a =
P (String -> Maybe (a, String))
runParser :: Parser a -> String -> Maybe (a, String)
runParser (P p) = p
parse :: Parser a -> String -> Maybe a
parse p s = do
(a, s') <- runParser p s
if not (null s')
then Nothing
else return a
noParser :: Parser a
noParser = P (const Nothing)
pureParser :: a -> Parser a
pureParser a = P (\s -> Just (a, s))
instance Functor Parser where
fmap f p = P (fmap (first f) . runParser p)
instance Applicative Parser where
pure = pureParser
fp <*> fx =
P
(\s ->
case runParser fp s of
Nothing -> Nothing
Just (f, s') ->
case runParser fx s' of
Nothing -> Nothing
Just (a, s'') -> Just (f a, s''))
instance Monad Parser where
return = pureParser
fa >>= k =
P
(\s ->
case runParser fa s of
Nothing -> Nothing
Just (a, s') -> runParser (k a) s')
anyChar :: Parser Char
anyChar =
P (\s ->
if null s
then Nothing
else Just (head s, tail s))
char :: Char -> Parser ()
char c = do
c' <- anyChar
unless (c' == c) noParser
anyCharBut :: Char -> Parser Char
anyCharBut c = do
c' <- anyChar
if c /= c'
then return c'
else noParser
orElse :: Parser a -> Parser a -> Parser a
orElse p q =
P (\s ->
case runParser p s of
Nothing -> runParser q s
Just (a, s') -> Just (a, s'))
many :: Parser a -> Parser [a]
many p =
(do a <- p
as <- many p
return (a : as)) `orElse`
pure []
sepBy :: Parser a -> Parser () -> Parser [a]
sepBy pa ps =
(do a <- pa
(do ps
(a :) <$> sepBy pa ps) `orElse`
return [a]) `orElse`
return []
parseCSV :: Parser [[String]]
parseCSV = many parseLine
where
parseLine = parseCell `sepBy` char ',' <* char '\n'
parseCell = do
char '"'
content <- many (anyCharBut '"')
char '"'
return content
Exercise 4 : Parsing an INI file
type Identifer = String
type Declaration = (Identifer, String)
type Section = (Identifer, [Declaration])
type INIFile = [Section]
letterOrDigit :: Parser Char
letterOrDigit = do
c <- anyChar
if isAlphaNum c
then return c
else noParser
many1 :: Parser a -> Parser [a]
many1 p = do
a <- p
as <- many p `orElse` pure []
return (a : as)
skipCommentsAndEmptyLines :: Parser ()
skipCommentsAndEmptyLines = void $ many (skipEmptyLine `orElse` skipComment)
skipComment :: Parser ()
skipComment = char '#' >> many (anyCharBut '\n') >> char '\n'
skipEmptyLine :: Parser ()
skipEmptyLine = char '\n'
parseIdentifier :: Parser Identifer
parseIdentifier = many letterOrDigit
parseSectionHeader :: Parser Identifer
parseSectionHeader = do
_ <- char '['
s <- parseIdentifier
_ <- char ']'
return s
parseDeclaration :: Parser Declaration
parseDeclaration = do
skipCommentsAndEmptyLines
key <- parseIdentifier
_ <- many (char ' ')
_ <- char '='
_ <- many (char ' ')
value <- many1 (anyCharBut '\n')
_ <- char '\n'
return (key, value)
parseSection :: Parser Section
parseSection = do
skipCommentsAndEmptyLines
header <- parseSectionHeader
decls <- many1 parseDeclaration
return (header, decls)
parseINI :: Parser INIFile
parseINI = many1 parseSection <* skipCommentsAndEmptyLines
-- Testing Harness
main :: [String] -> IO ()
main args = do
input <-
case args of
[] -> getContents
[fileName] -> readFile fileName
_ -> hPutStrLn stderr "Too many arguments given" >> exitFailure
case parse parseINI input of
Just i -> print i
Nothing -> do
hPutStrLn stderr "Failed to parse INI file."
exitFailure
| null | https://raw.githubusercontent.com/anurudhp/CPHaskell/01ae8dde6aab4f6ddfebd122ded0b42779dd16f1/learning/CIS194/Week8.hs | haskell | Exercise 1: A `Functor` instance
Functor laws
fmap id == id
Testing Harness | module Week8 where
import Control.Monad (unless, void)
import Data.Bifunctor (Bifunctor(first))
import Data.Char (isAlphaNum)
import System.Exit (exitFailure)
import System.IO (hPutStrLn, stderr)
fmap ( f . ) = = fmap f .
data ComplicatedA a b
= Con1 a b
| Con2 [Maybe (a -> b)]
data ComplicatedB f g a b
= Con3 (f a)
| Con4 (g b)
| Con5 (g (g [b]))
instance Functor (ComplicatedA a) where
fmap f (Con1 x y) = Con1 x (f y)
fmap f (Con2 xs) = Con2 (map (fmap (f .)) xs)
instance Functor g => Functor (ComplicatedB f g a) where
fmap _ (Con3 x) = Con3 x
fmap f (Con4 y) = Con4 (fmap f y)
fmap f (Con5 zs) = Con5 (fmap (fmap (fmap f)) zs)
Exercise 2 : Rewriting monadic code
func0 :: Monad f => (a -> a) -> f a -> f a
func0 f xs = do
f . f <$> xs
func0' :: Functor f => (a -> a) -> f a -> f a
func0' f xs = f . f <$> xs
func1 :: Monad f => f a -> f (a, a)
func1 xs = xs >>= (\x -> return (x, x))
func1' :: Functor f => f a -> f (a, a)
func1' xs = (\x -> (x, x)) <$> xs
func2 :: Monad f => f a -> f (a, a)
func2 xs = xs >>= (\x -> xs >>= \y -> return (x, y))
func3 :: Monad f => f a -> f (a, a)
func3 xs = xs >>= (\x -> xs >> return (x, x))
func4 :: Monad f => f a -> f a -> f (a, a)
func4 xs ys = xs >>= (\x -> ys >>= \y -> return (x, y))
func4' :: Applicative f => f a -> f a -> f (a, a)
func4' xs ys = (,) <$> xs <*> ys
func5 :: Monad f => f Integer -> f Integer -> f Integer
func5 xs ys = do
x <- xs
let x' = x + 1
y <- (+ 1) <$> ys
return (x' + y)
func5' :: Applicative f => f Integer -> f Integer -> f Integer
func5' xs ys = (+) <$> ((+ 1) <$> xs) <*> ((+ 1) <$> ys)
func6 :: Monad f => f Integer -> f (Integer, Integer)
func6 xs = do
x <- xs
return $
if x > 0
then (x, 0)
else (0, x)
func6' :: Functor f => f Integer -> f (Integer, Integer)
func6' =
fmap
(\x ->
if x > 0
then (x, 0)
else (0, x))
func7 :: Monad f => f Integer -> f (Integer, Integer)
func7 xs = do
x <- xs
if x > 0
then return (x, 0)
else return (0, x)
func8 :: Monad f => f Integer -> Integer -> f Integer
func8 xs x = ((+) <$> xs) <*> pure x
func8' :: Applicative f => f Integer -> Integer -> f Integer
func8' xs x = ((+) <$> xs) <*> pure x
func9 :: Monad f => f Integer -> f Integer -> f Integer -> f Integer
func9 xs ys zs =
xs >>= \x ->
if even x
then ys
else zs
func10 :: Monad f => f Integer -> f Integer
func10 xs = do
x <- xs >>= (\x -> return (x * x))
return (x + 10)
func10' :: Applicative f => f Integer -> f Integer
func10' xs = (+ 10) . (\x -> x * x) <$> xs
Exercise 3 : A parser monad
data Parser a =
P (String -> Maybe (a, String))
runParser :: Parser a -> String -> Maybe (a, String)
runParser (P p) = p
parse :: Parser a -> String -> Maybe a
parse p s = do
(a, s') <- runParser p s
if not (null s')
then Nothing
else return a
noParser :: Parser a
noParser = P (const Nothing)
pureParser :: a -> Parser a
pureParser a = P (\s -> Just (a, s))
instance Functor Parser where
fmap f p = P (fmap (first f) . runParser p)
instance Applicative Parser where
pure = pureParser
fp <*> fx =
P
(\s ->
case runParser fp s of
Nothing -> Nothing
Just (f, s') ->
case runParser fx s' of
Nothing -> Nothing
Just (a, s'') -> Just (f a, s''))
instance Monad Parser where
return = pureParser
fa >>= k =
P
(\s ->
case runParser fa s of
Nothing -> Nothing
Just (a, s') -> runParser (k a) s')
anyChar :: Parser Char
anyChar =
P (\s ->
if null s
then Nothing
else Just (head s, tail s))
char :: Char -> Parser ()
char c = do
c' <- anyChar
unless (c' == c) noParser
anyCharBut :: Char -> Parser Char
anyCharBut c = do
c' <- anyChar
if c /= c'
then return c'
else noParser
orElse :: Parser a -> Parser a -> Parser a
orElse p q =
P (\s ->
case runParser p s of
Nothing -> runParser q s
Just (a, s') -> Just (a, s'))
many :: Parser a -> Parser [a]
many p =
(do a <- p
as <- many p
return (a : as)) `orElse`
pure []
sepBy :: Parser a -> Parser () -> Parser [a]
sepBy pa ps =
(do a <- pa
(do ps
(a :) <$> sepBy pa ps) `orElse`
return [a]) `orElse`
return []
parseCSV :: Parser [[String]]
parseCSV = many parseLine
where
parseLine = parseCell `sepBy` char ',' <* char '\n'
parseCell = do
char '"'
content <- many (anyCharBut '"')
char '"'
return content
Exercise 4 : Parsing an INI file
type Identifer = String
type Declaration = (Identifer, String)
type Section = (Identifer, [Declaration])
type INIFile = [Section]
letterOrDigit :: Parser Char
letterOrDigit = do
c <- anyChar
if isAlphaNum c
then return c
else noParser
many1 :: Parser a -> Parser [a]
many1 p = do
a <- p
as <- many p `orElse` pure []
return (a : as)
skipCommentsAndEmptyLines :: Parser ()
skipCommentsAndEmptyLines = void $ many (skipEmptyLine `orElse` skipComment)
skipComment :: Parser ()
skipComment = char '#' >> many (anyCharBut '\n') >> char '\n'
skipEmptyLine :: Parser ()
skipEmptyLine = char '\n'
parseIdentifier :: Parser Identifer
parseIdentifier = many letterOrDigit
parseSectionHeader :: Parser Identifer
parseSectionHeader = do
_ <- char '['
s <- parseIdentifier
_ <- char ']'
return s
parseDeclaration :: Parser Declaration
parseDeclaration = do
skipCommentsAndEmptyLines
key <- parseIdentifier
_ <- many (char ' ')
_ <- char '='
_ <- many (char ' ')
value <- many1 (anyCharBut '\n')
_ <- char '\n'
return (key, value)
parseSection :: Parser Section
parseSection = do
skipCommentsAndEmptyLines
header <- parseSectionHeader
decls <- many1 parseDeclaration
return (header, decls)
parseINI :: Parser INIFile
parseINI = many1 parseSection <* skipCommentsAndEmptyLines
main :: [String] -> IO ()
main args = do
input <-
case args of
[] -> getContents
[fileName] -> readFile fileName
_ -> hPutStrLn stderr "Too many arguments given" >> exitFailure
case parse parseINI input of
Just i -> print i
Nothing -> do
hPutStrLn stderr "Failed to parse INI file."
exitFailure
|
13fc1977e1c6a55e827d1f5e23a29012eca6201c4618327a76c6f28fafcb8679 | Factual/c4 | project.clj | (defproject factual/c4 "0.2.1"
:description "Convenience features for handling record files the Clojure way"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.6.0"]
[fs "1.3.2"]
[org.clojure/tools.logging "0.2.3"]
[clojure-csv/clojure-csv "2.0.1"]
[cheshire "5.2.0"]
[ordered "1.3.1"]]
:aliases {"test!" ["do" "clean," "deps," "test" ":all"]}
:aot :all)
| null | https://raw.githubusercontent.com/Factual/c4/9391e9a4cb5c9ec6da91cce1cdf5db740f79d37e/project.clj | clojure | (defproject factual/c4 "0.2.1"
:description "Convenience features for handling record files the Clojure way"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.6.0"]
[fs "1.3.2"]
[org.clojure/tools.logging "0.2.3"]
[clojure-csv/clojure-csv "2.0.1"]
[cheshire "5.2.0"]
[ordered "1.3.1"]]
:aliases {"test!" ["do" "clean," "deps," "test" ":all"]}
:aot :all)
|
|
c3065cc7486da93868697c14684c2a5860da8d9391c2e0f8bef0c6807c9f3f29 | DogLooksGood/holdem | ladder.cljs | (ns poker.events.ladder
(:require
[re-frame.core :as re-frame]))
(re-frame/reg-event-db :ladder/list-leaderboard-success
(fn [db [_ players]]
(assoc db :ladder/leaderboard players)))
(re-frame/reg-event-fx :ladder/list-leaderboard
(fn [{:keys [db]} _]
{:db db,
:api/send {:event [:ladder/list-leaderboard {}],
:callback {:success :ladder/list-leaderboard-success}}}))
| null | https://raw.githubusercontent.com/DogLooksGood/holdem/bc0f93ed65cab54890c91f78bb95fe3ba020a41f/src/cljs/poker/events/ladder.cljs | clojure | (ns poker.events.ladder
(:require
[re-frame.core :as re-frame]))
(re-frame/reg-event-db :ladder/list-leaderboard-success
(fn [db [_ players]]
(assoc db :ladder/leaderboard players)))
(re-frame/reg-event-fx :ladder/list-leaderboard
(fn [{:keys [db]} _]
{:db db,
:api/send {:event [:ladder/list-leaderboard {}],
:callback {:success :ladder/list-leaderboard-success}}}))
|
|
8a8e8e1fb31cef5ee3ca0ffc3d791f7cc007f29383d75e3c2ed8f64794b40074 | jellelicht/guix | djvu.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2015 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages djvu)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public djvulibre
(package
(name "djvulibre")
(version "3.5.27")
(source (origin
(method url-fetch)
(uri (string-append "mirror-"
version ".tar.gz"))
(sha256
(base32
"0psh3zl9dj4n4r3lx25390nx34xz0bg0ql48zdskhq354ljni5p6"))))
(build-system gnu-build-system)
(home-page "/")
(synopsis "Implementation of DjVu, the document format")
(description "DjVuLibre is an implementation of DjVu,
including viewers, browser plugins, decoders, simple encoders, and
utilities.")
(license license:gpl2+)))
| null | https://raw.githubusercontent.com/jellelicht/guix/83cfc9414fca3ab57c949e18c1ceb375a179b59c/gnu/packages/djvu.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
| Copyright © 2015 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages djvu)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public djvulibre
(package
(name "djvulibre")
(version "3.5.27")
(source (origin
(method url-fetch)
(uri (string-append "mirror-"
version ".tar.gz"))
(sha256
(base32
"0psh3zl9dj4n4r3lx25390nx34xz0bg0ql48zdskhq354ljni5p6"))))
(build-system gnu-build-system)
(home-page "/")
(synopsis "Implementation of DjVu, the document format")
(description "DjVuLibre is an implementation of DjVu,
including viewers, browser plugins, decoders, simple encoders, and
utilities.")
(license license:gpl2+)))
|
8584490ff77793e15e683de71d0f951337cb2a9012e72b27700c554beb33d65c | samply/blaze | json_test.clj | (ns blaze.fhir.spec.type.json-test
(:require
[blaze.fhir.spec.type.json :as json]
[blaze.test-util :as tu :refer [satisfies-prop]]
[clojure.spec.test.alpha :as st]
[clojure.test :as test :refer [deftest testing]]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop])
(:import
[java.nio.charset StandardCharsets]))
(set! *warn-on-reflection* true)
(st/instrument)
(test/use-fixtures :each tu/fixture)
(deftest field-name-test
(testing "getValue"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(= value (.getValue (json/field-name value))))))
(testing "charLength"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(= (count value) (.charLength (json/field-name value))))))
(testing "appendQuotedUTF8"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(let [buffer (byte-array (count value))]
(.appendQuotedUTF8 (json/field-name value) buffer 0)
(= value (String. buffer StandardCharsets/UTF_8))))))
(testing "asUnquotedUTF8"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(= value (String. (.asUnquotedUTF8 (json/field-name value)) StandardCharsets/UTF_8)))))
(testing "asQuotedUTF8"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(= value (String. (.asQuotedUTF8 (json/field-name value)) StandardCharsets/UTF_8))))))
| null | https://raw.githubusercontent.com/samply/blaze/6441a0a2f988b8784ed555c1d20f634ef2df7e4a/modules/fhir-structure/test/blaze/fhir/spec/type/json_test.clj | clojure | (ns blaze.fhir.spec.type.json-test
(:require
[blaze.fhir.spec.type.json :as json]
[blaze.test-util :as tu :refer [satisfies-prop]]
[clojure.spec.test.alpha :as st]
[clojure.test :as test :refer [deftest testing]]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop])
(:import
[java.nio.charset StandardCharsets]))
(set! *warn-on-reflection* true)
(st/instrument)
(test/use-fixtures :each tu/fixture)
(deftest field-name-test
(testing "getValue"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(= value (.getValue (json/field-name value))))))
(testing "charLength"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(= (count value) (.charLength (json/field-name value))))))
(testing "appendQuotedUTF8"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(let [buffer (byte-array (count value))]
(.appendQuotedUTF8 (json/field-name value) buffer 0)
(= value (String. buffer StandardCharsets/UTF_8))))))
(testing "asUnquotedUTF8"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(= value (String. (.asUnquotedUTF8 (json/field-name value)) StandardCharsets/UTF_8)))))
(testing "asQuotedUTF8"
(satisfies-prop 100
(prop/for-all [value gen/string-ascii]
(= value (String. (.asQuotedUTF8 (json/field-name value)) StandardCharsets/UTF_8))))))
|
|
510b2ca26954418917171cd5a80cd98b074078a1e19e5fbd1e7dbaaf7596b39b | Leystryku/mpbomberman_racket | sh_tick_explosion.rkt | #lang racket
;; import
(require 2htdp/image)
(require "cl_sound.rkt")
(require "sh_config.rkt")
(require "sh_structs.rkt")
(require "sh_collisions.rkt")
(require "sh_helper.rkt")
(require "sh_tick_bomb.rkt")
;; export
(provide (all-defined-out))
;; [explosionTickSpreadExplode] Is responsible for turning breakable tiles into breakingTiles
(define (explosionTickSpreadExplodeBreakableTile currentWorld elements tickCount elemToBreak)
(and
(set-fieldElement-elementName! elemToBreak 'breakingTile)
(set-fieldElement-animatedTexture! elemToBreak (animatedTextureBreakingTile))
(set-fieldElement-extraData!
elemToBreak
(extraData-breakingTile
(+ breakingTileVanishInMs (current-inexact-milliseconds))
(+ tickCount breakingTileTickCountTillNextAnim)
1
)
)
currentWorld
)
)
;; [explosionTickSpreadExplodeBomb] Is responsible for exploding other bombs
(define (explosionTickSpreadExplodeBomb currentWorld elements tickCount elemToBreak)
(if gameBombsCanExplodeEachOther
(and
(bombTickExplode
currentWorld
elements
tickCount
elemToBreak
(fieldElement-extraData elemToBreak)
)
currentWorld
)
currentWorld
)
)
;; [explosionTickSpreadExplode] Calls the right function to explode a given tile
(define (explosionTickSpreadExplode setGameFieldFn currentWorld elements tickCount elemToBreak)
(if elemToBreak
(cond
[(equal? (fieldElement-elementName elemToBreak) 'breakableTile)
(explosionTickSpreadExplodeBreakableTile
currentWorld
elements
tickCount
elemToBreak
)
]
[(equal? (fieldElement-elementName elemToBreak) 'bomb)
(explosionTickSpreadExplodeBomb
currentWorld
elements
tickCount
elemToBreak
)
]
[else currentWorld]
)
currentWorld
)
)
;; [explosionTickSpreadExplodeIfCan] Calls the right function to explode a given tile
(define (explosionTickSpreadExplodeIfcan setGameFieldFn currentWorld elements tickCount spreadX spreadY)
(define elemToBreak (getEntByFieldPos elements spreadX spreadY))
(and
(explosionTickSpreadExplode
setGameFieldFn
currentWorld
elements
tickCount
elemToBreak
)
currentWorld
)
)
;; [explosionTickSpreadPos] Spreads the explosion further in its direction
(define (explosionTickSpreadPos setGameFieldFn currentWorld elements tickCount explosionData spreadX spreadY spreadType)
(define vanishWhen (extraData-explosion-vanishWhen explosionData))
(define spreadsLeft (extraData-explosion-spreadsLeft explosionData))
(define ticksWhenNextAnim (+ explosionTickCountTillNextAnim tickCount))
(define user (extraData-explosion-user explosionData))
(addExplosionField
setGameFieldFn
currentWorld
elements
spreadX
spreadY
vanishWhen
spreadType
(- spreadsLeft 1)
ticksWhenNextAnim
user
)
)
;; [explosionTickSpreadKillPlayer] Calls the functions to kill a player who entered the explosion
(define (explosionTickSpreadKillPlayer setGameFieldFn currentWorld elements tickCount explosionData player isServer)
(define user (extraData-explosion-user explosionData))
(if isServer
(and
((third setGameFieldFn)
currentWorld
player
user
)
currentWorld
)
currentWorld
)
)
;; [explosionTickSpreadPosIfCan] Spreads bomb if possible, otherwise calls functions responsible for handling what we touched (other fields, players)
(define (explosionTickSpreadPosIfCan setGameFieldFn currentWorld elements players tickCount explosionData spreadX spreadY spreadType isServer)
(cond
[(getPlayerByFieldPos players spreadX spreadY)
(explosionTickSpreadKillPlayer
setGameFieldFn
currentWorld
elements
tickCount
explosionData
(getPlayerByFieldPos players spreadX spreadY)
isServer
)
]
[(canBombSpreadToField elements players spreadX spreadY)
(explosionTickSpreadPos
setGameFieldFn
currentWorld
elements
tickCount
explosionData
spreadX
spreadY
spreadType
)
]
[else
(explosionTickSpreadExplodeIfcan
setGameFieldFn
currentWorld
elements
tickCount
spreadX
spreadY
)
]
)
)
[ explosionTickSpreadDir ] Makes the explosion store that this part spread and calls functions for spreading with position instead of dir
(define (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData dir isServer)
(define currentX (fieldElement-xtiles explosion))
(define currentY (fieldElement-ytiles explosion))
(set-extraData-explosion-didSpread! explosionData #t)
(cond
[(equal? dir 'up)
(explosionTickSpreadPosIfCan
setGameFieldFn
currentWorld
elements
players
tickCount
explosionData
currentX
(- currentY 1)
dir
isServer
)
]
[(equal? dir 'down)
(explosionTickSpreadPosIfCan
setGameFieldFn
currentWorld
elements
players
tickCount
explosionData
currentX
(+ currentY 1)
dir
isServer
)
]
[(equal? dir 'left)
(explosionTickSpreadPosIfCan
setGameFieldFn
currentWorld
elements
players
tickCount
explosionData
(- currentX 1)
currentY
dir
isServer
)
]
[(equal? dir 'right)
(explosionTickSpreadPosIfCan
setGameFieldFn
currentWorld
elements
players
tickCount
explosionData
(+ currentX 1)
currentY
dir
isServer
)
]
[else
(error 'INVALID_EXPLOSION_SPREAD_DIR)
]
)
)
[ explosionTickSpreadCore ] Responsible for making the core spread in all 4 dirs by calling [ explosionTickSpreadDir ] 4 t imes
(define (explosionTickSpreadCore setGameFieldFn currentWorld elements players explosion tickCount explosionData isServer)
(let* (
[worldFieldA (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData 'up isServer)]
[worldFieldB (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData 'down isServer)]
[worldFieldC (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData 'left isServer)]
[worldFieldD (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData 'right isServer)]
)
worldFieldD
)
)
;; [explosionTickSpread] Calls the right function for spreading this type of explosion
(define (explosionTickSpread setGameFieldFn currentWorld elements players explosion tickCount explosionData isServer)
(define spreadType (extraData-explosion-spreadType explosionData))
(if (equal? spreadType 'core)
(explosionTickSpreadCore
setGameFieldFn
currentWorld
elements
players
explosion
tickCount
explosionData
isServer
)
(explosionTickSpreadDir
setGameFieldFn
currentWorld
elements
players
explosion
tickCount
explosionData
spreadType
isServer
)
)
)
;; [explosionTickSpreadIfShould] Checks whether the explosion should spread and if yes calls the fn to make it do so
(define (explosionTickSpreadIfShould setGameFieldFn currentWorld elements players explosion tickCount explosionData isServer)
(if (or (extraData-explosion-didSpread explosionData) (= (extraData-explosion-spreadsLeft explosionData) 0))
currentWorld
(explosionTickSpread
setGameFieldFn
currentWorld
elements
players
explosion
tickCount
explosionData
isServer
)
)
)
;; [explosionTickVanishIfShould] Checks whether the explosion should vanish and if yes calls the fn to make it vanish
(define (explosionTickVanishIfShould setGameFieldFn currentWorld elements explosion explosionData)
(if
(>
(current-inexact-milliseconds)
(extraData-explosion-vanishWhen explosionData)
)
(removeFieldElement
setGameFieldFn
currentWorld
elements
explosion
)
currentWorld
)
)
;; [explosionTick] Calls the functions to make the bomb spread if it should and vanish if it should
(define (explosionTick setGameFieldFn currentWorld elements players explosion tickCount explosionData isServer)
(explosionTickSpreadIfShould
setGameFieldFn
(explosionTickVanishIfShould
setGameFieldFn
currentWorld
elements
explosion
explosionData
)
elements
players
explosion
tickCount
explosionData
isServer
)
)
| null | https://raw.githubusercontent.com/Leystryku/mpbomberman_racket/059d95040cfad2e27237f8dd41fc32a4fc698afe/game/sh_tick_explosion.rkt | racket | import
export
[explosionTickSpreadExplode] Is responsible for turning breakable tiles into breakingTiles
[explosionTickSpreadExplodeBomb] Is responsible for exploding other bombs
[explosionTickSpreadExplode] Calls the right function to explode a given tile
[explosionTickSpreadExplodeIfCan] Calls the right function to explode a given tile
[explosionTickSpreadPos] Spreads the explosion further in its direction
[explosionTickSpreadKillPlayer] Calls the functions to kill a player who entered the explosion
[explosionTickSpreadPosIfCan] Spreads bomb if possible, otherwise calls functions responsible for handling what we touched (other fields, players)
[explosionTickSpread] Calls the right function for spreading this type of explosion
[explosionTickSpreadIfShould] Checks whether the explosion should spread and if yes calls the fn to make it do so
[explosionTickVanishIfShould] Checks whether the explosion should vanish and if yes calls the fn to make it vanish
[explosionTick] Calls the functions to make the bomb spread if it should and vanish if it should | #lang racket
(require 2htdp/image)
(require "cl_sound.rkt")
(require "sh_config.rkt")
(require "sh_structs.rkt")
(require "sh_collisions.rkt")
(require "sh_helper.rkt")
(require "sh_tick_bomb.rkt")
(provide (all-defined-out))
(define (explosionTickSpreadExplodeBreakableTile currentWorld elements tickCount elemToBreak)
(and
(set-fieldElement-elementName! elemToBreak 'breakingTile)
(set-fieldElement-animatedTexture! elemToBreak (animatedTextureBreakingTile))
(set-fieldElement-extraData!
elemToBreak
(extraData-breakingTile
(+ breakingTileVanishInMs (current-inexact-milliseconds))
(+ tickCount breakingTileTickCountTillNextAnim)
1
)
)
currentWorld
)
)
(define (explosionTickSpreadExplodeBomb currentWorld elements tickCount elemToBreak)
(if gameBombsCanExplodeEachOther
(and
(bombTickExplode
currentWorld
elements
tickCount
elemToBreak
(fieldElement-extraData elemToBreak)
)
currentWorld
)
currentWorld
)
)
(define (explosionTickSpreadExplode setGameFieldFn currentWorld elements tickCount elemToBreak)
(if elemToBreak
(cond
[(equal? (fieldElement-elementName elemToBreak) 'breakableTile)
(explosionTickSpreadExplodeBreakableTile
currentWorld
elements
tickCount
elemToBreak
)
]
[(equal? (fieldElement-elementName elemToBreak) 'bomb)
(explosionTickSpreadExplodeBomb
currentWorld
elements
tickCount
elemToBreak
)
]
[else currentWorld]
)
currentWorld
)
)
(define (explosionTickSpreadExplodeIfcan setGameFieldFn currentWorld elements tickCount spreadX spreadY)
(define elemToBreak (getEntByFieldPos elements spreadX spreadY))
(and
(explosionTickSpreadExplode
setGameFieldFn
currentWorld
elements
tickCount
elemToBreak
)
currentWorld
)
)
(define (explosionTickSpreadPos setGameFieldFn currentWorld elements tickCount explosionData spreadX spreadY spreadType)
(define vanishWhen (extraData-explosion-vanishWhen explosionData))
(define spreadsLeft (extraData-explosion-spreadsLeft explosionData))
(define ticksWhenNextAnim (+ explosionTickCountTillNextAnim tickCount))
(define user (extraData-explosion-user explosionData))
(addExplosionField
setGameFieldFn
currentWorld
elements
spreadX
spreadY
vanishWhen
spreadType
(- spreadsLeft 1)
ticksWhenNextAnim
user
)
)
(define (explosionTickSpreadKillPlayer setGameFieldFn currentWorld elements tickCount explosionData player isServer)
(define user (extraData-explosion-user explosionData))
(if isServer
(and
((third setGameFieldFn)
currentWorld
player
user
)
currentWorld
)
currentWorld
)
)
(define (explosionTickSpreadPosIfCan setGameFieldFn currentWorld elements players tickCount explosionData spreadX spreadY spreadType isServer)
(cond
[(getPlayerByFieldPos players spreadX spreadY)
(explosionTickSpreadKillPlayer
setGameFieldFn
currentWorld
elements
tickCount
explosionData
(getPlayerByFieldPos players spreadX spreadY)
isServer
)
]
[(canBombSpreadToField elements players spreadX spreadY)
(explosionTickSpreadPos
setGameFieldFn
currentWorld
elements
tickCount
explosionData
spreadX
spreadY
spreadType
)
]
[else
(explosionTickSpreadExplodeIfcan
setGameFieldFn
currentWorld
elements
tickCount
spreadX
spreadY
)
]
)
)
[ explosionTickSpreadDir ] Makes the explosion store that this part spread and calls functions for spreading with position instead of dir
(define (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData dir isServer)
(define currentX (fieldElement-xtiles explosion))
(define currentY (fieldElement-ytiles explosion))
(set-extraData-explosion-didSpread! explosionData #t)
(cond
[(equal? dir 'up)
(explosionTickSpreadPosIfCan
setGameFieldFn
currentWorld
elements
players
tickCount
explosionData
currentX
(- currentY 1)
dir
isServer
)
]
[(equal? dir 'down)
(explosionTickSpreadPosIfCan
setGameFieldFn
currentWorld
elements
players
tickCount
explosionData
currentX
(+ currentY 1)
dir
isServer
)
]
[(equal? dir 'left)
(explosionTickSpreadPosIfCan
setGameFieldFn
currentWorld
elements
players
tickCount
explosionData
(- currentX 1)
currentY
dir
isServer
)
]
[(equal? dir 'right)
(explosionTickSpreadPosIfCan
setGameFieldFn
currentWorld
elements
players
tickCount
explosionData
(+ currentX 1)
currentY
dir
isServer
)
]
[else
(error 'INVALID_EXPLOSION_SPREAD_DIR)
]
)
)
[ explosionTickSpreadCore ] Responsible for making the core spread in all 4 dirs by calling [ explosionTickSpreadDir ] 4 t imes
(define (explosionTickSpreadCore setGameFieldFn currentWorld elements players explosion tickCount explosionData isServer)
(let* (
[worldFieldA (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData 'up isServer)]
[worldFieldB (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData 'down isServer)]
[worldFieldC (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData 'left isServer)]
[worldFieldD (explosionTickSpreadDir setGameFieldFn currentWorld elements players explosion tickCount explosionData 'right isServer)]
)
worldFieldD
)
)
(define (explosionTickSpread setGameFieldFn currentWorld elements players explosion tickCount explosionData isServer)
(define spreadType (extraData-explosion-spreadType explosionData))
(if (equal? spreadType 'core)
(explosionTickSpreadCore
setGameFieldFn
currentWorld
elements
players
explosion
tickCount
explosionData
isServer
)
(explosionTickSpreadDir
setGameFieldFn
currentWorld
elements
players
explosion
tickCount
explosionData
spreadType
isServer
)
)
)
(define (explosionTickSpreadIfShould setGameFieldFn currentWorld elements players explosion tickCount explosionData isServer)
(if (or (extraData-explosion-didSpread explosionData) (= (extraData-explosion-spreadsLeft explosionData) 0))
currentWorld
(explosionTickSpread
setGameFieldFn
currentWorld
elements
players
explosion
tickCount
explosionData
isServer
)
)
)
(define (explosionTickVanishIfShould setGameFieldFn currentWorld elements explosion explosionData)
(if
(>
(current-inexact-milliseconds)
(extraData-explosion-vanishWhen explosionData)
)
(removeFieldElement
setGameFieldFn
currentWorld
elements
explosion
)
currentWorld
)
)
(define (explosionTick setGameFieldFn currentWorld elements players explosion tickCount explosionData isServer)
(explosionTickSpreadIfShould
setGameFieldFn
(explosionTickVanishIfShould
setGameFieldFn
currentWorld
elements
explosion
explosionData
)
elements
players
explosion
tickCount
explosionData
isServer
)
)
|
4595cb39cfb5eac658a00f343921bcffffdde696340963f9743ede40c0377f01 | ivanjovanovic/sicp | lazy-streams.scm | ; Based on the lazy evaluation of procedure arguments, we can implement
; list abstraction so they are naturally lazy. We just have to implement
; pair as a procedure that will represent them and selectors just as
; applications of this procedure.
(define (cons x y) (lambda (m) (m x y)))
(define (car z) (z (lambda (p q) p)))
(define (cdr z) (z (lambda (p q) q)))
; When defined this way in a lazy language, lists are even lazier than streams. Streams
; have car always as immediate value, and here we have even car value as thunk that has
; to be forced.
| null | https://raw.githubusercontent.com/ivanjovanovic/sicp/a3bfbae0a0bda414b042e16bbb39bf39cd3c38f8/4.2/lazy-streams.scm | scheme | Based on the lazy evaluation of procedure arguments, we can implement
list abstraction so they are naturally lazy. We just have to implement
pair as a procedure that will represent them and selectors just as
applications of this procedure.
When defined this way in a lazy language, lists are even lazier than streams. Streams
have car always as immediate value, and here we have even car value as thunk that has
to be forced. |
(define (cons x y) (lambda (m) (m x y)))
(define (car z) (z (lambda (p q) p)))
(define (cdr z) (z (lambda (p q) q)))
|
9dfec1f0f23bde8c546196fee3637b989ba78f5994443b114833b3b4e9f1fb40 | lispgames/glkit | vao.lisp | (in-package :kit.gl.vao)
(defvar *vao-declarations* (make-hash-table))
;; GFs
(defgeneric vao-add (vao object))
(defgeneric vao-attr-count (vao)
(:documentation "Number of slots for `VAO`."))
(defgeneric vao-vbo-count (group)
(:documentation "Number of VBOs allocated by `GROUP`."))
(defgeneric vao-set-pointers (group starting-index total-vertices vbos)
(:documentation "Use glVertexAttribPointer and related to define the
attributes. `VBOS` is a vector of `VAO-VBO-COUNT` VBOs. It is
necessary to bind each of these as appropriate. It is not necessary
to call `gl:enable-vertex-attrib-array`.
`STARTING-INDEX` is the starting vertex attribute index for this group.
`TOTAL-VERTICES` is the known vertex count, or `NIL` if it is
unknown."))
(defgeneric attribute-size (attr)
(:documentation "Total size in bytes of `ATTR`."))
(defmethod attribute-size ((attr symbol))
(ecase attr
((:byte :unsigned-byte) 1)
((:short :unsigned-short :half-float) 2)
((:float :int :unsigned-int) 4)
(:double 8)))
VAO declaration
(defun vao-find (name)
(gethash name *vao-declarations*))
(defclass vertex-attribute ()
((name :initarg :name :reader vertex-attribute-name)
(type :initarg :type)
(out-type :initarg :out-type)
(count :initarg :count)
(normalizep :initarg :normalizep)))
(defun attribute-set-pointer (attr index stride offset divisor)
(with-slots (type out-type count normalizep) attr
(ecase out-type
((:byte :unsigned-byte :short :unsigned-short :int :unsigned-int)
(%gl:vertex-attrib-ipointer index count type stride offset))
((:float :half-float)
(%gl:vertex-attrib-pointer index count type
(if normalizep 1 0)
stride offset))
(:double
(%gl:vertex-attrib-lpointer index count type stride offset)))
(when divisor
(%gl:vertex-attrib-divisor index divisor))))
(defclass vertex-attribute-group ()
((divisor :initform 0 :initarg :divisor)
(attributes :initform (make-array 0 :adjustable t :fill-pointer 0))))
(defclass vertex-interleave-group (vertex-attribute-group) ())
(defclass vertex-block-group (vertex-attribute-group) ())
(defclass vertex-separate-group (vertex-attribute-group) ())
(defmethod print-object ((o vertex-attribute-group) stream)
(with-slots (attributes) o
(print-unreadable-object (o stream :type t)
(format stream "~S" (map 'list #'vertex-attribute-name attributes)))))
(defclass vao-declaration ()
((attr-index :initform (make-hash-table))
(attr-count :initform 0)
(groups :initform (make-array 0 :adjustable t :fill-pointer 0))))
(defun vao-decl-add-index (decl attr)
(with-slots (attr-index attr-count) decl
(setf (gethash (vertex-attribute-name attr) attr-index) attr-count)
(incf attr-count)))
VAO - ADD
(defmethod vao-add (vao object)
(error "You may not add ~% ~S~% to~% ~S" object vao))
(defmethod vao-add ((vao vao-declaration) (group vertex-attribute-group))
(with-slots (groups) vao
(vector-push-extend group groups)))
(defmethod vao-add ((group vertex-attribute-group) (attr vertex-attribute))
(with-slots (attributes) group
(vector-push-extend attr attributes)))
(defmethod vao-add ((group vertex-block-group) (ig vertex-interleave-group))
(with-slots (attributes) group
(vector-push-extend ig attributes)))
(defmethod vao-add ((group vertex-attribute-group) (sg vertex-separate-group))
(with-slots (attributes) group
(vector-push-extend sg attributes)))
;;; ATTR/VBO counts
(defmethod vao-attr-count ((attr vertex-attribute)) 1)
(defmethod vao-attr-count ((group vertex-attribute-group))
(with-slots (attributes) group
(reduce #'+ (map 'list #'vao-attr-count attributes))))
(defmethod vao-attr-count ((vao vao-declaration))
(with-slots (groups) vao
(reduce #'+ (map 'list #'vao-attr-count groups))))
(defmethod vao-vbo-count ((attr vertex-attribute)) 1)
(defmethod vao-vbo-count ((group vertex-interleave-group)) 1)
(defmethod vao-vbo-count ((group vertex-block-group)) 1)
(defmethod vao-vbo-count ((group vertex-separate-group))
(with-slots (attributes) group
(reduce #'+ (map 'list #'vao-vbo-count attributes))))
(defmethod vao-vbo-count ((vao vao-declaration))
(with-slots (groups) vao
(reduce #'+ (map 'list #'vao-vbo-count groups))))
;;; ATTRIBUTE-SIZE
(defmethod attribute-size ((attr vertex-attribute))
(with-slots (type count) attr
(* (attribute-size type) count)))
(defmethod attribute-size ((group vertex-interleave-group))
(with-slots (attributes divisor) group
(loop for attr across attributes
summing (attribute-size attr) into size
finally (return size))))
(defmethod attribute-size ((group vertex-separate-group))
(with-slots (attributes divisor) group
(loop for attr across attributes
summing (attribute-size attr) into size
finally (return size))))
VAO - SET - POINTERS
(defmethod vao-set-pointers ((group vertex-interleave-group) starting-index
vertex-count vbos)
(declare (ignore vertex-count))
(let ((stride (attribute-size group))
(offset 0))
(with-slots (attributes divisor) group
(%gl:bind-buffer :array-buffer (aref vbos 0))
(loop for attr across attributes
for i from starting-index
do (attribute-set-pointer attr i stride offset divisor)
(incf offset (attribute-size attr))))))
(defmethod vao-set-pointers ((group vertex-separate-group) starting-index
vertex-count vbos)
(declare (ignore vertex-count))
(with-slots (attributes divisor) group
(loop for attr across attributes
for attr-index from starting-index
for vbo-index from 0
do (gl:bind-buffer :array-buffer (aref vbos vbo-index))
(attribute-set-pointer attr attr-index 0 0 divisor))))
(defmethod vao-set-pointers ((group vertex-block-group) starting-index
vertex-count vbos)
(error "Implement VAO-SET-POINTERS for block groups"))
;; Parsing
(defvar *vao-decl* nil)
(defun vao-parse (list)
The distinction between a group decl and a vertex - attribute
is whether the second argument is an option list ( which may be
NIL ) .
(if (listp (cadr list))
(vao-parse-group (vao-parse-make-group (car list) (cadr list))
(cddr list))
(apply #'vao-parse-decl list)))
;;; VERTEX-ATTRIBUTE parsing
(defun vao-parse-decl (name type count &key out-type normalizep)
(let ((attr (make-instance 'vertex-attribute
:name name
:type type
:count count
:out-type (or out-type type)
:normalizep normalizep)))
(vao-decl-add-index *vao-decl* attr)
attr))
VERTEX - ATTRIBUTE - GROUP parsing
(defgeneric vao-parse-make-group (type options))
(defgeneric vao-parse-group (group body))
(defmethod vao-parse-make-group ((type (eql :interleave)) options)
(apply #'make-instance 'vertex-interleave-group options))
(defmethod vao-parse-make-group ((type (eql :blocks)) options)
(apply #'make-instance 'vertex-block-group options))
(defmethod vao-parse-make-group ((type (eql :separate)) options)
(apply #'make-instance 'vertex-separate-group options))
(defmethod vao-parse-group ((group vertex-attribute-group) body)
(loop for i in body
do (vao-add group (vao-parse i)))
group)
;; DEFVAO
(defmacro defvao (name options &body groups)
(declare (ignore options))
`(eval-when (:compile-toplevel :load-toplevel :execute)
(let ((*vao-decl* (make-instance 'vao-declaration)))
(map 'nil (lambda (x)
(vao-add *vao-decl* (vao-parse x)))
',groups)
(setf (gethash ',name *vao-declarations*) *vao-decl*))))
vao
(defclass vao ()
((type :type vao-declaration)
(id :initform (gl:gen-vertex-array))
(vbos)
(vertex-count :initform nil :initarg :vertex-count)
(pointers :initform nil)
(primitive :initarg :primitive :initform nil)))
(defclass vao-indexed (vao)
((index :initarg :index)))
(defmethod initialize-instance :after ((vao vao) &key type &allow-other-keys)
(vao-bind vao)
(with-slots ((vao-type type) id vbos vertex-count) vao
(if type
(setf vao-type (vao-find type))
(error "No :TYPE specified for VAO."))
(let ((vbo-count (vao-vbo-count vao-type)))
(setf vbos (make-array vbo-count
:initial-contents (gl:gen-buffers vbo-count))))
(with-slots (groups) vao-type
(loop for group across groups
as vbo-offset = 0 then (+ vbo-offset vbo-count)
as vbo-count = (vao-vbo-count group)
as vbo-subset = (make-array vbo-count :displaced-to vbos
:displaced-index-offset vbo-offset)
as attr-offset = 0 then (+ attr-offset attr-count)
as attr-count = (vao-attr-count group)
do (loop for i from 0 below (vao-attr-count group)
do (%gl:enable-vertex-attrib-array (+ i attr-offset)))
(vao-set-pointers group attr-offset vertex-count vbo-subset)))))
(defmethod vao-attr-count ((vao vao))
(with-slots (type) vao
(vao-attr-count type)))
vao activation
(defun vao-bind (vao)
(with-slots (id) vao
(%gl:bind-vertex-array id)))
(defun vao-unbind ()
(%gl:bind-vertex-array 0))
;; buffer-data
(defun guess-buffer-size (array)
(let* ((count (length array))
(type-size
(etypecase array
((simple-array single-float *) 4)
((simple-array double-float *) 8)
((simple-array (signed-byte 8) *) 1)
((simple-array (unsigned-byte 8) *) 1)
((simple-array (signed-byte 16) *) 2)
((simple-array (unsigned-byte 16) *) 2)
((simple-array (signed-byte 32) *) 4)
((simple-array (unsigned-byte 32) *) 4))))
(* count type-size)))
(defun vao-buffer-vector (vao vbo vector &key byte-size (usage :dynamic-draw))
#+glkit-sv
(with-slots (type vbos) vao
(with-slots (attr-index) type
(let* ((sv (static-vectors:make-static-vector
(length vector)
:element-type (array-element-type vector)
:initial-contents vector))
(ptr (static-vectors:static-vector-pointer sv))
(byte-size (or byte-size (guess-buffer-size vector))))
(%gl:bind-buffer :array-buffer (aref vbos vbo))
(%gl:buffer-data :array-buffer byte-size ptr usage)
(static-vectors:free-static-vector sv))))
#-glkit-sv
(error "STATIC-VECTORS not supported by your implementation."))
(defun vao-buffer-data (vao vbo byte-size pointer &optional (usage :dynamic-draw))
(with-slots (type vbos) vao
(with-slots (attr-index) type
(%gl:bind-buffer :array-buffer (aref vbos vbo))
(%gl:buffer-data :array-buffer byte-size pointer usage))))
(defun vao-buffer-sub-vector (vao vbo offset vector &key byte-size)
#+glkit-sv
(with-slots (type vbos) vao
(with-slots (attr-index) type
(let* ((sv (static-vectors:make-static-vector
(length vector)
:element-type (array-element-type vector)
:initial-contents vector))
(ptr (static-vectors:static-vector-pointer sv))
(byte-size (or byte-size (guess-buffer-size vector))))
(%gl:bind-buffer :array-buffer (aref vbos vbo))
(%gl:buffer-sub-data :array-buffer offset byte-size ptr)
(static-vectors:free-static-vector sv))))
#-glkit-sv
(error "STATIC-VECTORS not supported by your implementation."))
(defun vao-buffer-sub-data (vao vbo offset byte-size pointer)
(with-slots (type vbos) vao
(with-slots (attr-index) type
(%gl:bind-buffer :array-buffer (aref vbos vbo))
(%gl:buffer-sub-data :array-buffer offset byte-size pointer))))
;; draw
(defun vao-draw (vao &key primitive (first 0) count)
(with-slots ((prim primitive) vertex-count) vao
(vao-bind vao)
(%gl:draw-arrays (or primitive prim) first (or count vertex-count))))
(defun vao-draw-instanced (vao prim-count &key primitive (first 0) count)
(with-slots ((prim primitive) vertex-count) vao
(vao-bind vao)
(%gl:draw-arrays-instanced (or primitive prim) first (or count vertex-count) prim-count)))
(defun vao-draw-elements (vao &key primitive index count type)
(with-slots ((prim primitive) (ind index) vertex-count) vao
(vao-bind vao)
(%gl:draw-elements (or primitive prim)
(or count vertex-count)
type
(or index ind))))
(defun vao-draw-elements-instanced (vao prim-count &key primitive index count type)
(with-slots ((prim primitive) (ind index) vertex-count) vao
(vao-bind vao)
(%gl:draw-elements-instanced (or primitive prim)
(or count vertex-count)
type
(or index ind)
prim-count)))
(defmacro vao-indexed-draw (vao &key primitive index)
(warn "VAO-INDEXED-DRAW deprecated, use VAO-DRAW-ELEMENTS")
`(vao-draw-elements ,vao :primitive ,primitive :index ,index))
;; delete
(defmethod gl-delete-object ((vao vao))
(with-slots (vbos id) vao
(gl:delete-buffers vbos)
(gl:delete-vertex-arrays (list id))))
| null | https://raw.githubusercontent.com/lispgames/glkit/0d8e7c5fed4231f2177afcf0f3ff66f196ed6a46/src/vao/vao.lisp | lisp | GFs
ATTR/VBO counts
ATTRIBUTE-SIZE
Parsing
VERTEX-ATTRIBUTE parsing
DEFVAO
buffer-data
draw
delete | (in-package :kit.gl.vao)
(defvar *vao-declarations* (make-hash-table))
(defgeneric vao-add (vao object))
(defgeneric vao-attr-count (vao)
(:documentation "Number of slots for `VAO`."))
(defgeneric vao-vbo-count (group)
(:documentation "Number of VBOs allocated by `GROUP`."))
(defgeneric vao-set-pointers (group starting-index total-vertices vbos)
(:documentation "Use glVertexAttribPointer and related to define the
attributes. `VBOS` is a vector of `VAO-VBO-COUNT` VBOs. It is
necessary to bind each of these as appropriate. It is not necessary
to call `gl:enable-vertex-attrib-array`.
`STARTING-INDEX` is the starting vertex attribute index for this group.
`TOTAL-VERTICES` is the known vertex count, or `NIL` if it is
unknown."))
(defgeneric attribute-size (attr)
(:documentation "Total size in bytes of `ATTR`."))
(defmethod attribute-size ((attr symbol))
(ecase attr
((:byte :unsigned-byte) 1)
((:short :unsigned-short :half-float) 2)
((:float :int :unsigned-int) 4)
(:double 8)))
VAO declaration
(defun vao-find (name)
(gethash name *vao-declarations*))
(defclass vertex-attribute ()
((name :initarg :name :reader vertex-attribute-name)
(type :initarg :type)
(out-type :initarg :out-type)
(count :initarg :count)
(normalizep :initarg :normalizep)))
(defun attribute-set-pointer (attr index stride offset divisor)
(with-slots (type out-type count normalizep) attr
(ecase out-type
((:byte :unsigned-byte :short :unsigned-short :int :unsigned-int)
(%gl:vertex-attrib-ipointer index count type stride offset))
((:float :half-float)
(%gl:vertex-attrib-pointer index count type
(if normalizep 1 0)
stride offset))
(:double
(%gl:vertex-attrib-lpointer index count type stride offset)))
(when divisor
(%gl:vertex-attrib-divisor index divisor))))
(defclass vertex-attribute-group ()
((divisor :initform 0 :initarg :divisor)
(attributes :initform (make-array 0 :adjustable t :fill-pointer 0))))
(defclass vertex-interleave-group (vertex-attribute-group) ())
(defclass vertex-block-group (vertex-attribute-group) ())
(defclass vertex-separate-group (vertex-attribute-group) ())
(defmethod print-object ((o vertex-attribute-group) stream)
(with-slots (attributes) o
(print-unreadable-object (o stream :type t)
(format stream "~S" (map 'list #'vertex-attribute-name attributes)))))
(defclass vao-declaration ()
((attr-index :initform (make-hash-table))
(attr-count :initform 0)
(groups :initform (make-array 0 :adjustable t :fill-pointer 0))))
(defun vao-decl-add-index (decl attr)
(with-slots (attr-index attr-count) decl
(setf (gethash (vertex-attribute-name attr) attr-index) attr-count)
(incf attr-count)))
VAO - ADD
(defmethod vao-add (vao object)
(error "You may not add ~% ~S~% to~% ~S" object vao))
(defmethod vao-add ((vao vao-declaration) (group vertex-attribute-group))
(with-slots (groups) vao
(vector-push-extend group groups)))
(defmethod vao-add ((group vertex-attribute-group) (attr vertex-attribute))
(with-slots (attributes) group
(vector-push-extend attr attributes)))
(defmethod vao-add ((group vertex-block-group) (ig vertex-interleave-group))
(with-slots (attributes) group
(vector-push-extend ig attributes)))
(defmethod vao-add ((group vertex-attribute-group) (sg vertex-separate-group))
(with-slots (attributes) group
(vector-push-extend sg attributes)))
(defmethod vao-attr-count ((attr vertex-attribute)) 1)
(defmethod vao-attr-count ((group vertex-attribute-group))
(with-slots (attributes) group
(reduce #'+ (map 'list #'vao-attr-count attributes))))
(defmethod vao-attr-count ((vao vao-declaration))
(with-slots (groups) vao
(reduce #'+ (map 'list #'vao-attr-count groups))))
(defmethod vao-vbo-count ((attr vertex-attribute)) 1)
(defmethod vao-vbo-count ((group vertex-interleave-group)) 1)
(defmethod vao-vbo-count ((group vertex-block-group)) 1)
(defmethod vao-vbo-count ((group vertex-separate-group))
(with-slots (attributes) group
(reduce #'+ (map 'list #'vao-vbo-count attributes))))
(defmethod vao-vbo-count ((vao vao-declaration))
(with-slots (groups) vao
(reduce #'+ (map 'list #'vao-vbo-count groups))))
(defmethod attribute-size ((attr vertex-attribute))
(with-slots (type count) attr
(* (attribute-size type) count)))
(defmethod attribute-size ((group vertex-interleave-group))
(with-slots (attributes divisor) group
(loop for attr across attributes
summing (attribute-size attr) into size
finally (return size))))
(defmethod attribute-size ((group vertex-separate-group))
(with-slots (attributes divisor) group
(loop for attr across attributes
summing (attribute-size attr) into size
finally (return size))))
VAO - SET - POINTERS
(defmethod vao-set-pointers ((group vertex-interleave-group) starting-index
vertex-count vbos)
(declare (ignore vertex-count))
(let ((stride (attribute-size group))
(offset 0))
(with-slots (attributes divisor) group
(%gl:bind-buffer :array-buffer (aref vbos 0))
(loop for attr across attributes
for i from starting-index
do (attribute-set-pointer attr i stride offset divisor)
(incf offset (attribute-size attr))))))
(defmethod vao-set-pointers ((group vertex-separate-group) starting-index
vertex-count vbos)
(declare (ignore vertex-count))
(with-slots (attributes divisor) group
(loop for attr across attributes
for attr-index from starting-index
for vbo-index from 0
do (gl:bind-buffer :array-buffer (aref vbos vbo-index))
(attribute-set-pointer attr attr-index 0 0 divisor))))
(defmethod vao-set-pointers ((group vertex-block-group) starting-index
vertex-count vbos)
(error "Implement VAO-SET-POINTERS for block groups"))
(defvar *vao-decl* nil)
(defun vao-parse (list)
The distinction between a group decl and a vertex - attribute
is whether the second argument is an option list ( which may be
NIL ) .
(if (listp (cadr list))
(vao-parse-group (vao-parse-make-group (car list) (cadr list))
(cddr list))
(apply #'vao-parse-decl list)))
(defun vao-parse-decl (name type count &key out-type normalizep)
(let ((attr (make-instance 'vertex-attribute
:name name
:type type
:count count
:out-type (or out-type type)
:normalizep normalizep)))
(vao-decl-add-index *vao-decl* attr)
attr))
VERTEX - ATTRIBUTE - GROUP parsing
(defgeneric vao-parse-make-group (type options))
(defgeneric vao-parse-group (group body))
(defmethod vao-parse-make-group ((type (eql :interleave)) options)
(apply #'make-instance 'vertex-interleave-group options))
(defmethod vao-parse-make-group ((type (eql :blocks)) options)
(apply #'make-instance 'vertex-block-group options))
(defmethod vao-parse-make-group ((type (eql :separate)) options)
(apply #'make-instance 'vertex-separate-group options))
(defmethod vao-parse-group ((group vertex-attribute-group) body)
(loop for i in body
do (vao-add group (vao-parse i)))
group)
(defmacro defvao (name options &body groups)
(declare (ignore options))
`(eval-when (:compile-toplevel :load-toplevel :execute)
(let ((*vao-decl* (make-instance 'vao-declaration)))
(map 'nil (lambda (x)
(vao-add *vao-decl* (vao-parse x)))
',groups)
(setf (gethash ',name *vao-declarations*) *vao-decl*))))
vao
(defclass vao ()
((type :type vao-declaration)
(id :initform (gl:gen-vertex-array))
(vbos)
(vertex-count :initform nil :initarg :vertex-count)
(pointers :initform nil)
(primitive :initarg :primitive :initform nil)))
(defclass vao-indexed (vao)
((index :initarg :index)))
(defmethod initialize-instance :after ((vao vao) &key type &allow-other-keys)
(vao-bind vao)
(with-slots ((vao-type type) id vbos vertex-count) vao
(if type
(setf vao-type (vao-find type))
(error "No :TYPE specified for VAO."))
(let ((vbo-count (vao-vbo-count vao-type)))
(setf vbos (make-array vbo-count
:initial-contents (gl:gen-buffers vbo-count))))
(with-slots (groups) vao-type
(loop for group across groups
as vbo-offset = 0 then (+ vbo-offset vbo-count)
as vbo-count = (vao-vbo-count group)
as vbo-subset = (make-array vbo-count :displaced-to vbos
:displaced-index-offset vbo-offset)
as attr-offset = 0 then (+ attr-offset attr-count)
as attr-count = (vao-attr-count group)
do (loop for i from 0 below (vao-attr-count group)
do (%gl:enable-vertex-attrib-array (+ i attr-offset)))
(vao-set-pointers group attr-offset vertex-count vbo-subset)))))
(defmethod vao-attr-count ((vao vao))
(with-slots (type) vao
(vao-attr-count type)))
vao activation
(defun vao-bind (vao)
(with-slots (id) vao
(%gl:bind-vertex-array id)))
(defun vao-unbind ()
(%gl:bind-vertex-array 0))
(defun guess-buffer-size (array)
(let* ((count (length array))
(type-size
(etypecase array
((simple-array single-float *) 4)
((simple-array double-float *) 8)
((simple-array (signed-byte 8) *) 1)
((simple-array (unsigned-byte 8) *) 1)
((simple-array (signed-byte 16) *) 2)
((simple-array (unsigned-byte 16) *) 2)
((simple-array (signed-byte 32) *) 4)
((simple-array (unsigned-byte 32) *) 4))))
(* count type-size)))
(defun vao-buffer-vector (vao vbo vector &key byte-size (usage :dynamic-draw))
#+glkit-sv
(with-slots (type vbos) vao
(with-slots (attr-index) type
(let* ((sv (static-vectors:make-static-vector
(length vector)
:element-type (array-element-type vector)
:initial-contents vector))
(ptr (static-vectors:static-vector-pointer sv))
(byte-size (or byte-size (guess-buffer-size vector))))
(%gl:bind-buffer :array-buffer (aref vbos vbo))
(%gl:buffer-data :array-buffer byte-size ptr usage)
(static-vectors:free-static-vector sv))))
#-glkit-sv
(error "STATIC-VECTORS not supported by your implementation."))
(defun vao-buffer-data (vao vbo byte-size pointer &optional (usage :dynamic-draw))
(with-slots (type vbos) vao
(with-slots (attr-index) type
(%gl:bind-buffer :array-buffer (aref vbos vbo))
(%gl:buffer-data :array-buffer byte-size pointer usage))))
(defun vao-buffer-sub-vector (vao vbo offset vector &key byte-size)
#+glkit-sv
(with-slots (type vbos) vao
(with-slots (attr-index) type
(let* ((sv (static-vectors:make-static-vector
(length vector)
:element-type (array-element-type vector)
:initial-contents vector))
(ptr (static-vectors:static-vector-pointer sv))
(byte-size (or byte-size (guess-buffer-size vector))))
(%gl:bind-buffer :array-buffer (aref vbos vbo))
(%gl:buffer-sub-data :array-buffer offset byte-size ptr)
(static-vectors:free-static-vector sv))))
#-glkit-sv
(error "STATIC-VECTORS not supported by your implementation."))
(defun vao-buffer-sub-data (vao vbo offset byte-size pointer)
(with-slots (type vbos) vao
(with-slots (attr-index) type
(%gl:bind-buffer :array-buffer (aref vbos vbo))
(%gl:buffer-sub-data :array-buffer offset byte-size pointer))))
(defun vao-draw (vao &key primitive (first 0) count)
(with-slots ((prim primitive) vertex-count) vao
(vao-bind vao)
(%gl:draw-arrays (or primitive prim) first (or count vertex-count))))
(defun vao-draw-instanced (vao prim-count &key primitive (first 0) count)
(with-slots ((prim primitive) vertex-count) vao
(vao-bind vao)
(%gl:draw-arrays-instanced (or primitive prim) first (or count vertex-count) prim-count)))
(defun vao-draw-elements (vao &key primitive index count type)
(with-slots ((prim primitive) (ind index) vertex-count) vao
(vao-bind vao)
(%gl:draw-elements (or primitive prim)
(or count vertex-count)
type
(or index ind))))
(defun vao-draw-elements-instanced (vao prim-count &key primitive index count type)
(with-slots ((prim primitive) (ind index) vertex-count) vao
(vao-bind vao)
(%gl:draw-elements-instanced (or primitive prim)
(or count vertex-count)
type
(or index ind)
prim-count)))
(defmacro vao-indexed-draw (vao &key primitive index)
(warn "VAO-INDEXED-DRAW deprecated, use VAO-DRAW-ELEMENTS")
`(vao-draw-elements ,vao :primitive ,primitive :index ,index))
(defmethod gl-delete-object ((vao vao))
(with-slots (vbos id) vao
(gl:delete-buffers vbos)
(gl:delete-vertex-arrays (list id))))
|
d745fec5c06ca488c580a600327b4064bc694128368be2903f94350b26be94ec | janestreet/bonsai | dagviz_test.ml | open! Core
open Bonsai_web
open Bonsai.Let_syntax
open Bonsai_web_test
module Id : sig
include Bonsai_experimental_dagviz.Name
val of_string : string -> t
end = struct
module Count = Int
module T = struct
type t =
| User of string
| Gen of Count.t
[@@deriving bin_io, compare, sexp]
end
include T
include Comparable.Make_binable (T)
let curr = ref 0
let create () =
curr := !curr + 1;
User [%string "generated_id_%{!curr#Int}"]
;;
let next count =
let count = Count.succ count in
Gen count, count
;;
let of_string s = User s
let to_string = function
| User s -> s
| Gen s -> [%string "gen_%{s#Int}"]
;;
end
module To_vdom = Bonsai_experimental_dagviz.To_vdom.Make (Id)
open To_vdom
module Dummy_nodes = struct
let a = Id.of_string "A"
let b = Id.of_string "B"
let c = Id.of_string "C"
let d = Id.of_string "D"
let e = Id.of_string "E"
let f = Id.of_string "F"
let g = Id.of_string "G"
let ( ==> ) from to_ = { Edge.from; to_ }
let map_with_ids ids = Id.Map.of_alist_exn (List.map ids ~f:(fun x -> x, ()))
end
let node_to_vdom (id : Id.t Value.t) _ : Vdom.Node.t Computation.t =
let%arr id = id in
Vdom.Node.text (Id.to_string id)
;;
let edge_to_svg ~(edge : Edge.t Value.t) ~from:_ ~to_:_ : Vdom.Node.t Computation.t =
let%arr edge = edge in
Vdom.Node.sexp_for_debugging [%message (edge : Edge.t)]
;;
let create_handle ~dag ~curr_id =
let component =
let%sub dag, _curr_id =
create ~curr_id ~direction:`Top_to_bottom ~node_to_vdom ~edge_to_svg dag
in
match%sub dag with
| Ok dag -> Bonsai.read dag
| Error error ->
let%arr error = error in
Vdom.Node.sexp_for_debugging [%message (error : Error.t)]
in
Handle.create
(Result_spec.vdom
~filter_printed_attributes:(fun key _ ->
match key with
| "bulk_position_tracker" | "bulk_size_tracker" -> false
| s when String.is_prefix s ~prefix:"style" -> false
| _ -> true)
Fn.id)
component
;;
let set_positions ~handle ~ids =
let positions =
List.map ids ~f:(fun id ->
{ Handle.Position_tracker.selector = [%string "[src-name='%{id#Id}']"]
; width = 1
; height = 1
; top = 1
; left = 1
})
in
Handle.Position_tracker.change_positions handle ~get_vdom:Fn.id positions
;;
let curr_id = Value.return Id.Count.zero
let%expect_test "Linked list graph" =
let open Dummy_nodes in
let edges = [ a ==> b; b ==> c; c ==> d ] |> Edge.Set.of_list in
(*
{v
a
|
b
|
c
|
d
v}
*)
let nodes = map_with_ids [ a; b; c; d ] in
let dag = Value.return { edges; nodes } in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_14 data-kind="singleton" src-name="A" outside-id="generated_id_14"> A </div>
<div @key=generated_id_13
kind="mapn"
src-name="B"
outside-id="generated_id_13"
my-id="generated_id_13"
class="dest-class-generated_id_13"> B </div>
<div @key=generated_id_9
kind="mapn"
src-name="C"
outside-id="generated_id_9"
my-id="generated_id_9"
class="dest-class-generated_id_9"> C </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_5
kind="mapn"
src-name="D"
outside-id="generated_id_5"
my-id="generated_id_5"
class="dest-class-generated_id_5"> D </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div>
|}];
set_positions ~handle ~ids:[ a; b; c; d ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_13)) (to_ (User generated_id_9)))) </pre>
+| <pre> (edge ((from (User generated_id_14)) (to_ (User generated_id_13)))) </pre>
+| <pre> (edge ((from (User generated_id_9)) (to_ (User generated_id_5)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_14 data-kind="singleton" src-name="A" outside-id="generated_id_14"> A </div>
<div @key=generated_id_13
kind="mapn"
src-name="B"
outside-id="generated_id_13"
my-id="generated_id_13"
class="dest-class-generated_id_13"> B </div>
<div @key=generated_id_9
kind="mapn"
src-name="C"
outside-id="generated_id_9" |}]
;;
let%expect_test "Dominator reorganizing." =
let open Dummy_nodes in
let edges = [ a ==> c; b ==> c; c ==> d ] |> Edge.Set.of_list in
(*
{v
A B
\ /
C
|
D
v}
*)
let nodes = map_with_ids [ a; b; c; d ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_26
data-kind="singleton"
src-name="A"
outside-id="generated_id_26"> A </div>
<div @key=generated_id_25
data-kind="singleton"
src-name="B"
outside-id="generated_id_25"> B </div>
</div>
<div @key=generated_id_24
kind="mapn"
src-name="C"
outside-id="generated_id_24"
my-id="generated_id_24"
class="dest-class-generated_id_24"> C </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_18
kind="mapn"
src-name="D"
outside-id="generated_id_18"
my-id="generated_id_18"
class="dest-class-generated_id_18"> D </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
set_positions ~handle ~ids:[ a; b; c; d ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_24)) (to_ (User generated_id_18)))) </pre>
+| <pre> (edge ((from (User generated_id_25)) (to_ (User generated_id_24)))) </pre>
+| <pre> (edge ((from (User generated_id_26)) (to_ (User generated_id_24)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_26
data-kind="singleton"
src-name="A"
outside-id="generated_id_26"> A </div>
<div @key=generated_id_25
data-kind="singleton"
src-name="B"
outside-id="generated_id_25"> B </div>
</div>
<div @key=generated_id_24 |}];
let edges = Set.add edges (e ==> a) |> Fn.flip Set.add (e ==> b) in
let nodes = map_with_ids [ a; b; c; d; e ] in
Bonsai.Var.set dag_var { edges; nodes };
(*
{v
E
/ \
A B
\ /
C
|
D
v}
*)
Handle.show handle;
(* Since E was added, it is now the only root. *)
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_45 data-kind="singleton" src-name="E" outside-id="generated_id_45"> E </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_40
kind="mapn"
src-name="B"
outside-id="generated_id_40"
my-id="generated_id_40"
class="dest-class-generated_id_40"> B </div>
<div @key=generated_id_44
kind="mapn"
src-name="A"
outside-id="generated_id_44"
my-id="generated_id_44"
class="dest-class-generated_id_44"> A </div>
</div>
<div @key=generated_id_36
kind="mapn"
src-name="C"
outside-id="generated_id_36"
my-id="generated_id_36"
class="dest-class-generated_id_36"> C </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_30
kind="mapn"
src-name="D"
outside-id="generated_id_30"
my-id="generated_id_30"
class="dest-class-generated_id_30"> D </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
set_positions ~handle ~ids:[ a; b; c; d; e ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_36)) (to_ (User generated_id_30)))) </pre>
+| <pre> (edge ((from (User generated_id_40)) (to_ (User generated_id_36)))) </pre>
+| <pre> (edge ((from (User generated_id_44)) (to_ (User generated_id_36)))) </pre>
+| <pre> (edge ((from (User generated_id_45)) (to_ (User generated_id_40)))) </pre>
+| <pre> (edge ((from (User generated_id_45)) (to_ (User generated_id_44)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_45 data-kind="singleton" src-name="E" outside-id="generated_id_45"> E </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_40
kind="mapn"
src-name="B"
outside-id="generated_id_40"
my-id="generated_id_40"
class="dest-class-generated_id_40"> B </div>
<div @key=generated_id_44
kind="mapn"
src-name="A" |}]
;;
let%expect_test "Missing node in node map from edges is treated as a redirect" =
let open Dummy_nodes in
let edges = [ a ==> b; b ==> c ] |> Edge.Set.of_list in
(* B is not present in the map. *)
let nodes = map_with_ids [ a; c ] in
let dag = Value.return { edges; nodes } in
let handle = create_handle ~dag ~curr_id in
(* The node where [B] would've been is empty.*)
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_54 data-kind="singleton" src-name="A" outside-id="generated_id_54"> A </div>
<div @key=generated_id_53
kind="mapn"
src-name="B"
outside-id="generated_id_53"
my-id="generated_id_53"
class="dest-class-generated_id_53">
<div> </div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_49
kind="mapn"
src-name="C"
outside-id="generated_id_49"
my-id="generated_id_49"
class="dest-class-generated_id_49"> C </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
set_positions ~handle ~ids:[ a; b; c ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_53)) (to_ (User generated_id_49)))) </pre>
+| <pre> (edge ((from (User generated_id_54)) (to_ (User generated_id_53)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_54 data-kind="singleton" src-name="A" outside-id="generated_id_54"> A </div>
<div @key=generated_id_53
kind="mapn"
src-name="B"
outside-id="generated_id_53"
my-id="generated_id_53"
class="dest-class-generated_id_53">
<div> </div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_49 |}]
;;
let%expect_test "Tree-like DAG" =
let open Dummy_nodes in
let edges = [ a ==> b; a ==> c; a ==> d; d ==> e ] |> Edge.Set.of_list in
(*
{v
A
/|\
B C D
|
E
v}
*)
let nodes = map_with_ids [ a; b; c; d; e ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_71 data-kind="singleton" src-name="A" outside-id="generated_id_71"> A </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_62
kind="mapn"
src-name="D"
outside-id="generated_id_62"
my-id="generated_id_62"
class="dest-class-generated_id_62"> D </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_4
data-kind="redirect"
src-name="gen_3"
outside-id="gen_4"
class="dest-class-A redirect_hash_replaced_in_test">
<div> </div>
</div>
<div @key=gen_5
data-kind="redirect"
src-name="gen_2"
outside-id="gen_5"
class="dest-class-gen_3 redirect_hash_replaced_in_test">
<div> </div>
</div>
</div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_58
kind="mapn"
src-name="E"
outside-id="generated_id_58"
my-id="generated_id_58"
class="dest-class-generated_id_58"> E </div>
<div @key=generated_id_66
kind="mapn"
src-name="C"
outside-id="generated_id_66"
my-id="generated_id_66"
class="dest-class-generated_id_66"> C </div>
<div @key=generated_id_70
kind="mapn"
src-name="B"
outside-id="generated_id_70"
my-id="generated_id_70"
class="dest-class-generated_id_70"> B </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
This is not ideal , but having this here showcases current behavior .
Despite there being 4 edges in
{ v
A
/|\
B C D
|
E
v }
only two of them are shown , namely A - D and D - E. The reason for this is that this test
suite needs to manually set the position that each node would appear in the browser
in order for the edges to be rendered . However , after topologically sorting , there is
a topological gap between A - B and A - C. The reason for this is that redirect edges
with i d 's generated using [ count ] were inserted between A - B and A - C. Since the ID 's and
structure number of redirect nodes can change , hardcoding the generated i d could lead
to brittle tests .
Despite there being 4 edges in
{v
A
/|\
B C D
|
E
v}
only two of them are shown, namely A-D and D-E. The reason for this is that this test
suite needs to manually set the position that each node would appear in the browser
in order for the edges to be rendered. However, after topologically sorting, there is
a topological gap between A-B and A-C. The reason for this is that redirect edges
with id's generated using [count] were inserted between A-B and A-C. Since the ID's and
structure number of redirect nodes can change, hardcoding the generated id could lead
to brittle tests.
*)
set_positions ~handle ~ids:[ a; b; c; d; e ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_62)) (to_ (User generated_id_58)))) </pre>
+| <pre> (edge ((from (User generated_id_71)) (to_ (User generated_id_62)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_71 data-kind="singleton" src-name="A" outside-id="generated_id_71"> A </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_62
kind="mapn"
src-name="D"
outside-id="generated_id_62"
my-id="generated_id_62"
class="dest-class-generated_id_62"> D </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_4
data-kind="redirect" |}]
;;
let%expect_test "Cycle" =
let open Dummy_nodes in
let edges = [ a ==> b; a ==> c; a ==> d; d ==> a ] |> Edge.Set.of_list in
(*
{v
A <--\
/|\ |
B C D -/
v}
*)
let nodes = map_with_ids [ a; b; c; d ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect {|
<pre> (error cycle!) </pre> |}]
;;
let%expect_test "redirect nodes" =
let open Dummy_nodes in
let edges =
[ a ==> b; a ==> c; a ==> e; a ==> g; b ==> d; c ==> e; d ==> e; e ==> g; f ==> g ]
|> Edge.Set.of_list
in
(*
{v
a
__/|\
/ | |
b / |
| c R f
d | / \ /
\ // R /
e / /
\ / /
\|__/
g
v}
*)
This test case showcases the creation of " redirect " nodes across different levels .
" redirect " nodes are nodes that help route the different edges . In this particular test case
there are two different redirect nodes created marked in the diagram with " R " .
"redirect" nodes are nodes that help route the different edges. In this particular test case
there are two different redirect nodes created marked in the diagram with "R". *)
let nodes = map_with_ids [ a; b; c; d; e; f; g ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_101
data-kind="singleton"
src-name="A"
outside-id="generated_id_101"> A </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_99
kind="mapn"
src-name="B"
outside-id="generated_id_99"
my-id="generated_id_99"
class="dest-class-generated_id_99"> B </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_12
data-kind="redirect"
src-name="gen_11"
outside-id="gen_12"
class="dest-class-A redirect_hash_replaced_in_test">
<div> </div>
</div>
<div @key=gen_13
data-kind="redirect"
src-name="gen_10"
outside-id="gen_13"
class="dest-class-gen_11 redirect_hash_replaced_in_test">
<div> </div>
</div>
</div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_91
kind="mapn"
src-name="D"
outside-id="generated_id_91"
my-id="generated_id_91"
class="dest-class-generated_id_91"> D </div>
<div @key=generated_id_95
kind="mapn"
src-name="C"
outside-id="generated_id_95"
my-id="generated_id_95"
class="dest-class-generated_id_95"> C </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_8
data-kind="redirect"
src-name="gen_7"
outside-id="gen_8"
class="dest-class-gen_10 redirect_hash_replaced_in_test">
<div> </div>
</div>
<div @key=gen_9
data-kind="redirect"
src-name="gen_6"
outside-id="gen_9"
class="dest-class-gen_7 redirect_hash_replaced_in_test">
<div> </div>
</div>
</div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_100
data-kind="singleton"
src-name="F"
outside-id="generated_id_100"> F </div>
<div @key=generated_id_87
kind="mapn"
src-name="E"
outside-id="generated_id_87"
my-id="generated_id_87"
class="dest-class-generated_id_87"> E </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_4
data-kind="redirect"
src-name="gen_3"
outside-id="gen_4"
class="dest-class-gen_6 redirect_hash_replaced_in_test">
<div> </div>
</div>
<div @key=gen_5
data-kind="redirect"
src-name="gen_2"
outside-id="gen_5"
class="dest-class-gen_3 redirect_hash_replaced_in_test">
<div> </div>
</div>
</div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_79
kind="mapn"
src-name="G"
outside-id="generated_id_79"
my-id="generated_id_79"
class="dest-class-generated_id_79"> G </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
(* For reasons similar to the test case above, (not knowing the id's of generated
redirect nodes to set their positions), this test case does not show all of the edges
that would be created in a browser environment. *)
set_positions ~handle ~ids:[ a; b; c; d; e; f; g ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_100)) (to_ (User generated_id_79)))) </pre>
+| <pre> (edge ((from (User generated_id_101)) (to_ (User generated_id_99)))) </pre>
+| <pre> (edge ((from (User generated_id_87)) (to_ (User generated_id_79)))) </pre>
+| <pre> (edge ((from (User generated_id_91)) (to_ (User generated_id_87)))) </pre>
+| <pre> (edge ((from (User generated_id_95)) (to_ (User generated_id_87)))) </pre>
+| <pre> (edge ((from (User generated_id_99)) (to_ (User generated_id_91)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_101
data-kind="singleton"
src-name="A"
outside-id="generated_id_101"> A </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_99
kind="mapn"
src-name="B"
outside-id="generated_id_99"
my-id="generated_id_99"
class="dest-class-generated_id_99"> B </div> |}]
;;
let%expect_test "Disjoint DAGs" =
let open Dummy_nodes in
let edges = [ a ==> b; c ==> d; e ==> f ] |> Edge.Set.of_list in
(*
{v
A C E G
| | |
B D F
v}
*)
let nodes = map_with_ids [ a; b; c; d; e; f; g ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_117
data-kind="singleton"
src-name="A"
outside-id="generated_id_117"> A </div>
<div @key=generated_id_116
data-kind="singleton"
src-name="C"
outside-id="generated_id_116"> C </div>
<div @key=generated_id_115
data-kind="singleton"
src-name="E"
outside-id="generated_id_115"> E </div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_114
data-kind="singleton"
src-name="G"
outside-id="generated_id_114"> G </div>
<div @key=generated_id_113
kind="mapn"
src-name="B"
outside-id="generated_id_113"
my-id="generated_id_113"
class="dest-class-generated_id_113"> B </div>
<div @key=generated_id_109
kind="mapn"
src-name="D"
outside-id="generated_id_109"
my-id="generated_id_109"
class="dest-class-generated_id_109"> D </div>
<div @key=generated_id_105
kind="mapn"
src-name="F"
outside-id="generated_id_105"
my-id="generated_id_105"
class="dest-class-generated_id_105"> F </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
set_positions ~handle ~ids:[ a; b; c; d; e; f; g ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_115)) (to_ (User generated_id_105)))) </pre>
+| <pre> (edge ((from (User generated_id_116)) (to_ (User generated_id_109)))) </pre>
+| <pre> (edge ((from (User generated_id_117)) (to_ (User generated_id_113)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_117
data-kind="singleton"
src-name="A"
outside-id="generated_id_117"> A </div>
<div @key=generated_id_116
data-kind="singleton"
src-name="C"
outside-id="generated_id_116"> C </div>
<div @key=generated_id_115
data-kind="singleton" |}]
;;
| null | https://raw.githubusercontent.com/janestreet/bonsai/4baeedc75bf73a0915e04dc02d8a49b78779e9b0/experimental/dagviz/test/dagviz_test.ml | ocaml |
{v
a
|
b
|
c
|
d
v}
{v
A B
\ /
C
|
D
v}
{v
E
/ \
A B
\ /
C
|
D
v}
Since E was added, it is now the only root.
B is not present in the map.
The node where [B] would've been is empty.
{v
A
/|\
B C D
|
E
v}
{v
A <--\
/|\ |
B C D -/
v}
{v
a
__/|\
/ | |
b / |
| c R f
d | / \ /
\ // R /
e / /
\ / /
\|__/
g
v}
For reasons similar to the test case above, (not knowing the id's of generated
redirect nodes to set their positions), this test case does not show all of the edges
that would be created in a browser environment.
{v
A C E G
| | |
B D F
v}
| open! Core
open Bonsai_web
open Bonsai.Let_syntax
open Bonsai_web_test
module Id : sig
include Bonsai_experimental_dagviz.Name
val of_string : string -> t
end = struct
module Count = Int
module T = struct
type t =
| User of string
| Gen of Count.t
[@@deriving bin_io, compare, sexp]
end
include T
include Comparable.Make_binable (T)
let curr = ref 0
let create () =
curr := !curr + 1;
User [%string "generated_id_%{!curr#Int}"]
;;
let next count =
let count = Count.succ count in
Gen count, count
;;
let of_string s = User s
let to_string = function
| User s -> s
| Gen s -> [%string "gen_%{s#Int}"]
;;
end
module To_vdom = Bonsai_experimental_dagviz.To_vdom.Make (Id)
open To_vdom
module Dummy_nodes = struct
let a = Id.of_string "A"
let b = Id.of_string "B"
let c = Id.of_string "C"
let d = Id.of_string "D"
let e = Id.of_string "E"
let f = Id.of_string "F"
let g = Id.of_string "G"
let ( ==> ) from to_ = { Edge.from; to_ }
let map_with_ids ids = Id.Map.of_alist_exn (List.map ids ~f:(fun x -> x, ()))
end
let node_to_vdom (id : Id.t Value.t) _ : Vdom.Node.t Computation.t =
let%arr id = id in
Vdom.Node.text (Id.to_string id)
;;
let edge_to_svg ~(edge : Edge.t Value.t) ~from:_ ~to_:_ : Vdom.Node.t Computation.t =
let%arr edge = edge in
Vdom.Node.sexp_for_debugging [%message (edge : Edge.t)]
;;
let create_handle ~dag ~curr_id =
let component =
let%sub dag, _curr_id =
create ~curr_id ~direction:`Top_to_bottom ~node_to_vdom ~edge_to_svg dag
in
match%sub dag with
| Ok dag -> Bonsai.read dag
| Error error ->
let%arr error = error in
Vdom.Node.sexp_for_debugging [%message (error : Error.t)]
in
Handle.create
(Result_spec.vdom
~filter_printed_attributes:(fun key _ ->
match key with
| "bulk_position_tracker" | "bulk_size_tracker" -> false
| s when String.is_prefix s ~prefix:"style" -> false
| _ -> true)
Fn.id)
component
;;
let set_positions ~handle ~ids =
let positions =
List.map ids ~f:(fun id ->
{ Handle.Position_tracker.selector = [%string "[src-name='%{id#Id}']"]
; width = 1
; height = 1
; top = 1
; left = 1
})
in
Handle.Position_tracker.change_positions handle ~get_vdom:Fn.id positions
;;
let curr_id = Value.return Id.Count.zero
let%expect_test "Linked list graph" =
let open Dummy_nodes in
let edges = [ a ==> b; b ==> c; c ==> d ] |> Edge.Set.of_list in
let nodes = map_with_ids [ a; b; c; d ] in
let dag = Value.return { edges; nodes } in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_14 data-kind="singleton" src-name="A" outside-id="generated_id_14"> A </div>
<div @key=generated_id_13
kind="mapn"
src-name="B"
outside-id="generated_id_13"
my-id="generated_id_13"
class="dest-class-generated_id_13"> B </div>
<div @key=generated_id_9
kind="mapn"
src-name="C"
outside-id="generated_id_9"
my-id="generated_id_9"
class="dest-class-generated_id_9"> C </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_5
kind="mapn"
src-name="D"
outside-id="generated_id_5"
my-id="generated_id_5"
class="dest-class-generated_id_5"> D </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div>
|}];
set_positions ~handle ~ids:[ a; b; c; d ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_13)) (to_ (User generated_id_9)))) </pre>
+| <pre> (edge ((from (User generated_id_14)) (to_ (User generated_id_13)))) </pre>
+| <pre> (edge ((from (User generated_id_9)) (to_ (User generated_id_5)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_14 data-kind="singleton" src-name="A" outside-id="generated_id_14"> A </div>
<div @key=generated_id_13
kind="mapn"
src-name="B"
outside-id="generated_id_13"
my-id="generated_id_13"
class="dest-class-generated_id_13"> B </div>
<div @key=generated_id_9
kind="mapn"
src-name="C"
outside-id="generated_id_9" |}]
;;
let%expect_test "Dominator reorganizing." =
let open Dummy_nodes in
let edges = [ a ==> c; b ==> c; c ==> d ] |> Edge.Set.of_list in
let nodes = map_with_ids [ a; b; c; d ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_26
data-kind="singleton"
src-name="A"
outside-id="generated_id_26"> A </div>
<div @key=generated_id_25
data-kind="singleton"
src-name="B"
outside-id="generated_id_25"> B </div>
</div>
<div @key=generated_id_24
kind="mapn"
src-name="C"
outside-id="generated_id_24"
my-id="generated_id_24"
class="dest-class-generated_id_24"> C </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_18
kind="mapn"
src-name="D"
outside-id="generated_id_18"
my-id="generated_id_18"
class="dest-class-generated_id_18"> D </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
set_positions ~handle ~ids:[ a; b; c; d ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_24)) (to_ (User generated_id_18)))) </pre>
+| <pre> (edge ((from (User generated_id_25)) (to_ (User generated_id_24)))) </pre>
+| <pre> (edge ((from (User generated_id_26)) (to_ (User generated_id_24)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_26
data-kind="singleton"
src-name="A"
outside-id="generated_id_26"> A </div>
<div @key=generated_id_25
data-kind="singleton"
src-name="B"
outside-id="generated_id_25"> B </div>
</div>
<div @key=generated_id_24 |}];
let edges = Set.add edges (e ==> a) |> Fn.flip Set.add (e ==> b) in
let nodes = map_with_ids [ a; b; c; d; e ] in
Bonsai.Var.set dag_var { edges; nodes };
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_45 data-kind="singleton" src-name="E" outside-id="generated_id_45"> E </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_40
kind="mapn"
src-name="B"
outside-id="generated_id_40"
my-id="generated_id_40"
class="dest-class-generated_id_40"> B </div>
<div @key=generated_id_44
kind="mapn"
src-name="A"
outside-id="generated_id_44"
my-id="generated_id_44"
class="dest-class-generated_id_44"> A </div>
</div>
<div @key=generated_id_36
kind="mapn"
src-name="C"
outside-id="generated_id_36"
my-id="generated_id_36"
class="dest-class-generated_id_36"> C </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_30
kind="mapn"
src-name="D"
outside-id="generated_id_30"
my-id="generated_id_30"
class="dest-class-generated_id_30"> D </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
set_positions ~handle ~ids:[ a; b; c; d; e ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_36)) (to_ (User generated_id_30)))) </pre>
+| <pre> (edge ((from (User generated_id_40)) (to_ (User generated_id_36)))) </pre>
+| <pre> (edge ((from (User generated_id_44)) (to_ (User generated_id_36)))) </pre>
+| <pre> (edge ((from (User generated_id_45)) (to_ (User generated_id_40)))) </pre>
+| <pre> (edge ((from (User generated_id_45)) (to_ (User generated_id_44)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_45 data-kind="singleton" src-name="E" outside-id="generated_id_45"> E </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_40
kind="mapn"
src-name="B"
outside-id="generated_id_40"
my-id="generated_id_40"
class="dest-class-generated_id_40"> B </div>
<div @key=generated_id_44
kind="mapn"
src-name="A" |}]
;;
let%expect_test "Missing node in node map from edges is treated as a redirect" =
let open Dummy_nodes in
let edges = [ a ==> b; b ==> c ] |> Edge.Set.of_list in
let nodes = map_with_ids [ a; c ] in
let dag = Value.return { edges; nodes } in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_54 data-kind="singleton" src-name="A" outside-id="generated_id_54"> A </div>
<div @key=generated_id_53
kind="mapn"
src-name="B"
outside-id="generated_id_53"
my-id="generated_id_53"
class="dest-class-generated_id_53">
<div> </div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_49
kind="mapn"
src-name="C"
outside-id="generated_id_49"
my-id="generated_id_49"
class="dest-class-generated_id_49"> C </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
set_positions ~handle ~ids:[ a; b; c ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_53)) (to_ (User generated_id_49)))) </pre>
+| <pre> (edge ((from (User generated_id_54)) (to_ (User generated_id_53)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_54 data-kind="singleton" src-name="A" outside-id="generated_id_54"> A </div>
<div @key=generated_id_53
kind="mapn"
src-name="B"
outside-id="generated_id_53"
my-id="generated_id_53"
class="dest-class-generated_id_53">
<div> </div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_49 |}]
;;
let%expect_test "Tree-like DAG" =
let open Dummy_nodes in
let edges = [ a ==> b; a ==> c; a ==> d; d ==> e ] |> Edge.Set.of_list in
let nodes = map_with_ids [ a; b; c; d; e ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_71 data-kind="singleton" src-name="A" outside-id="generated_id_71"> A </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_62
kind="mapn"
src-name="D"
outside-id="generated_id_62"
my-id="generated_id_62"
class="dest-class-generated_id_62"> D </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_4
data-kind="redirect"
src-name="gen_3"
outside-id="gen_4"
class="dest-class-A redirect_hash_replaced_in_test">
<div> </div>
</div>
<div @key=gen_5
data-kind="redirect"
src-name="gen_2"
outside-id="gen_5"
class="dest-class-gen_3 redirect_hash_replaced_in_test">
<div> </div>
</div>
</div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_58
kind="mapn"
src-name="E"
outside-id="generated_id_58"
my-id="generated_id_58"
class="dest-class-generated_id_58"> E </div>
<div @key=generated_id_66
kind="mapn"
src-name="C"
outside-id="generated_id_66"
my-id="generated_id_66"
class="dest-class-generated_id_66"> C </div>
<div @key=generated_id_70
kind="mapn"
src-name="B"
outside-id="generated_id_70"
my-id="generated_id_70"
class="dest-class-generated_id_70"> B </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
This is not ideal , but having this here showcases current behavior .
Despite there being 4 edges in
{ v
A
/|\
B C D
|
E
v }
only two of them are shown , namely A - D and D - E. The reason for this is that this test
suite needs to manually set the position that each node would appear in the browser
in order for the edges to be rendered . However , after topologically sorting , there is
a topological gap between A - B and A - C. The reason for this is that redirect edges
with i d 's generated using [ count ] were inserted between A - B and A - C. Since the ID 's and
structure number of redirect nodes can change , hardcoding the generated i d could lead
to brittle tests .
Despite there being 4 edges in
{v
A
/|\
B C D
|
E
v}
only two of them are shown, namely A-D and D-E. The reason for this is that this test
suite needs to manually set the position that each node would appear in the browser
in order for the edges to be rendered. However, after topologically sorting, there is
a topological gap between A-B and A-C. The reason for this is that redirect edges
with id's generated using [count] were inserted between A-B and A-C. Since the ID's and
structure number of redirect nodes can change, hardcoding the generated id could lead
to brittle tests.
*)
set_positions ~handle ~ids:[ a; b; c; d; e ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_62)) (to_ (User generated_id_58)))) </pre>
+| <pre> (edge ((from (User generated_id_71)) (to_ (User generated_id_62)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_71 data-kind="singleton" src-name="A" outside-id="generated_id_71"> A </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_62
kind="mapn"
src-name="D"
outside-id="generated_id_62"
my-id="generated_id_62"
class="dest-class-generated_id_62"> D </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_4
data-kind="redirect" |}]
;;
let%expect_test "Cycle" =
let open Dummy_nodes in
let edges = [ a ==> b; a ==> c; a ==> d; d ==> a ] |> Edge.Set.of_list in
let nodes = map_with_ids [ a; b; c; d ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect {|
<pre> (error cycle!) </pre> |}]
;;
let%expect_test "redirect nodes" =
let open Dummy_nodes in
let edges =
[ a ==> b; a ==> c; a ==> e; a ==> g; b ==> d; c ==> e; d ==> e; e ==> g; f ==> g ]
|> Edge.Set.of_list
in
This test case showcases the creation of " redirect " nodes across different levels .
" redirect " nodes are nodes that help route the different edges . In this particular test case
there are two different redirect nodes created marked in the diagram with " R " .
"redirect" nodes are nodes that help route the different edges. In this particular test case
there are two different redirect nodes created marked in the diagram with "R". *)
let nodes = map_with_ids [ a; b; c; d; e; f; g ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_101
data-kind="singleton"
src-name="A"
outside-id="generated_id_101"> A </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_99
kind="mapn"
src-name="B"
outside-id="generated_id_99"
my-id="generated_id_99"
class="dest-class-generated_id_99"> B </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_12
data-kind="redirect"
src-name="gen_11"
outside-id="gen_12"
class="dest-class-A redirect_hash_replaced_in_test">
<div> </div>
</div>
<div @key=gen_13
data-kind="redirect"
src-name="gen_10"
outside-id="gen_13"
class="dest-class-gen_11 redirect_hash_replaced_in_test">
<div> </div>
</div>
</div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_91
kind="mapn"
src-name="D"
outside-id="generated_id_91"
my-id="generated_id_91"
class="dest-class-generated_id_91"> D </div>
<div @key=generated_id_95
kind="mapn"
src-name="C"
outside-id="generated_id_95"
my-id="generated_id_95"
class="dest-class-generated_id_95"> C </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_8
data-kind="redirect"
src-name="gen_7"
outside-id="gen_8"
class="dest-class-gen_10 redirect_hash_replaced_in_test">
<div> </div>
</div>
<div @key=gen_9
data-kind="redirect"
src-name="gen_6"
outside-id="gen_9"
class="dest-class-gen_7 redirect_hash_replaced_in_test">
<div> </div>
</div>
</div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_100
data-kind="singleton"
src-name="F"
outside-id="generated_id_100"> F </div>
<div @key=generated_id_87
kind="mapn"
src-name="E"
outside-id="generated_id_87"
my-id="generated_id_87"
class="dest-class-generated_id_87"> E </div>
<div class="vbox_hash_replaced_in_test">
<div @key=gen_4
data-kind="redirect"
src-name="gen_3"
outside-id="gen_4"
class="dest-class-gen_6 redirect_hash_replaced_in_test">
<div> </div>
</div>
<div @key=gen_5
data-kind="redirect"
src-name="gen_2"
outside-id="gen_5"
class="dest-class-gen_3 redirect_hash_replaced_in_test">
<div> </div>
</div>
</div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_79
kind="mapn"
src-name="G"
outside-id="generated_id_79"
my-id="generated_id_79"
class="dest-class-generated_id_79"> G </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
set_positions ~handle ~ids:[ a; b; c; d; e; f; g ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_100)) (to_ (User generated_id_79)))) </pre>
+| <pre> (edge ((from (User generated_id_101)) (to_ (User generated_id_99)))) </pre>
+| <pre> (edge ((from (User generated_id_87)) (to_ (User generated_id_79)))) </pre>
+| <pre> (edge ((from (User generated_id_91)) (to_ (User generated_id_87)))) </pre>
+| <pre> (edge ((from (User generated_id_95)) (to_ (User generated_id_87)))) </pre>
+| <pre> (edge ((from (User generated_id_99)) (to_ (User generated_id_91)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div @key=generated_id_101
data-kind="singleton"
src-name="A"
outside-id="generated_id_101"> A </div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_99
kind="mapn"
src-name="B"
outside-id="generated_id_99"
my-id="generated_id_99"
class="dest-class-generated_id_99"> B </div> |}]
;;
let%expect_test "Disjoint DAGs" =
let open Dummy_nodes in
let edges = [ a ==> b; c ==> d; e ==> f ] |> Edge.Set.of_list in
let nodes = map_with_ids [ a; b; c; d; e; f; g ] in
let dag_var = Bonsai.Var.create { edges; nodes } in
let dag = Bonsai.Var.value dag_var in
let handle = create_handle ~dag ~curr_id in
Handle.show handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
<svg> </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_117
data-kind="singleton"
src-name="A"
outside-id="generated_id_117"> A </div>
<div @key=generated_id_116
data-kind="singleton"
src-name="C"
outside-id="generated_id_116"> C </div>
<div @key=generated_id_115
data-kind="singleton"
src-name="E"
outside-id="generated_id_115"> E </div>
</div>
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_114
data-kind="singleton"
src-name="G"
outside-id="generated_id_114"> G </div>
<div @key=generated_id_113
kind="mapn"
src-name="B"
outside-id="generated_id_113"
my-id="generated_id_113"
class="dest-class-generated_id_113"> B </div>
<div @key=generated_id_109
kind="mapn"
src-name="D"
outside-id="generated_id_109"
my-id="generated_id_109"
class="dest-class-generated_id_109"> D </div>
<div @key=generated_id_105
kind="mapn"
src-name="F"
outside-id="generated_id_105"
my-id="generated_id_105"
class="dest-class-generated_id_105"> F </div>
</div>
</div>
<div> </div>
</div>
<div> </div>
</div>
</div> |}];
set_positions ~handle ~ids:[ a; b; c; d; e; f; g ];
Handle.show_diff handle;
[%expect
{|
<div class="map_hash_replaced_in_test testcase_hash_replaced_in_test">
-| <svg> </svg>
+| <svg>
+| <pre> (edge ((from (User generated_id_115)) (to_ (User generated_id_105)))) </pre>
+| <pre> (edge ((from (User generated_id_116)) (to_ (User generated_id_109)))) </pre>
+| <pre> (edge ((from (User generated_id_117)) (to_ (User generated_id_113)))) </pre>
+| </svg>
<div class="hbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div> </div>
<div class="vbox_hash_replaced_in_test">
<div class="hbox_hash_replaced_in_test">
<div @key=generated_id_117
data-kind="singleton"
src-name="A"
outside-id="generated_id_117"> A </div>
<div @key=generated_id_116
data-kind="singleton"
src-name="C"
outside-id="generated_id_116"> C </div>
<div @key=generated_id_115
data-kind="singleton" |}]
;;
|
056225ddd37d115e812766a12fe081fa7033add752e5530ab48bdad4760e2d83 | benoitc/hooks | hooks_test_SUITE.erl | %%%-------------------------------------------------------------------
@author benoitc
( C ) 2017 , < COMPANY >
%%% @doc
%%%
%%% @end
Created : 27 . Oct 2017 12:29
%%%-------------------------------------------------------------------
-module(hooks_test_SUITE).
-author("benoitc").
-compile(export_all).
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/1
]).
all() ->
[
basic_test,
mreg_test,
run_test,
all_test,
all_till_ok_test,
only_test,
plugin_test,
wait_for_proc_test
].
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(hooks),
Config.
end_per_suite(Config) ->
_ = application:stop(hooks),
Config.
init_per_testcase(_, Config) ->
Config.
end_per_testcase(_Config) ->
ok.
%% hooks for tests
hook1() ->
[].
hook1(A, B) ->
[A | B].
hook2() ->
[].
hook2(A, B) ->
[A | B].
hook3(A, B, C) ->
{hook3, [A, B, C]}.
hook_add(A) ->
A + A.
hook_add1(A) ->
A + A.
hook_add2(A) ->
A + 2 *A.
hooks_ok1() ->
next.
hooks_ok2() ->
ok.
hooks_ok3() ->
next.
hooks_ok4() ->
{ok, 0}.
%% tests
basic_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
lists:foreach(
fun({Name, Specs}) ->
[ok, ok] = [hooks:reg(Name, M, F, A) || {M, F, A} <- Specs]
end,
Hooks),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(a),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(b),
error = hooks:find(c),
ok = hooks:reg(c, ?MODULE, hook3, 3),
{ok, [{?MODULE, hook3}]} = hooks:find(c),
ok = hooks:unreg(c, ?MODULE, hook3, 3),
error = hooks:find(c),
{error, hooks_not_exported} = hooks:reg(c, ?MODULE, hook3, 2),
lists:foreach(
fun({Name, Specs}) ->
[ok, ok] = [hooks:unreg(Name, M, F, A) || {M, F, A} <- Specs]
end,
Hooks),
error = hooks:find(a),
error = hooks:find(b),
ok.
mreg_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
ok = hooks:mreg(Hooks),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(a),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(b),
ok = hooks:munreg(Hooks),
error = hooks:find(a),
error = hooks:find(b),
ok.
run_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]},
{c, [{?MODULE, hook_add, 1},
{?MODULE, hook_add1, 1}]}],
ok = hooks:mreg(Hooks),
ok = hooks:run(a, []),
[1, 1] = hooks:run_fold(a, [1], []),
{ok, [{?MODULE, hook_add}, {?MODULE, hook_add1}]} = hooks:find(c),
4 = hooks:run_fold(c, [], 1),
ok = hooks:munreg(Hooks),
ok.
all_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
ok = hooks:mreg(Hooks),
[[], []] = hooks:all(a, []),
[[1 | 1], [1 | 1]] = hooks:all(b, [1, 1]),
ok = hooks:munreg(Hooks),
ok.
all_till_ok_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hooks_ok1, 0}]},
{b, [{?MODULE, hooks_ok1, 0},
{?MODULE, hooks_ok2, 0}]},
{c, [{?MODULE, hooks_ok1, 0},
{?MODULE, hooks_ok3, 0},
{?MODULE, hooks_ok4, 0}]}],
ok = hooks:mreg(Hooks),
{error, [next]} = hooks:all_till_ok(a, []),
ok = hooks:all_till_ok(b, []),
{ok, 0} = hooks:all_till_ok(c, []),
ok = hooks:munreg(Hooks),
ok.
only_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
ok = hooks:reg(a, ?MODULE, hook_add, 1, 10),
ok = hooks:reg(a, ?MODULE, hook_add2, 1, 0),
{ok, [{?MODULE, hook_add2}, {?MODULE, hook_add}]} = hooks:find(a),
3 = hooks:only(a, [1]),
ok = hooks:unreg(a, ?MODULE, hook_add, 1, 10),
ok = hooks:unreg(a, ?MODULE, hook_add2, 1, 0),
error = hooks:find(a),
ok.
plugin_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
application:set_env(hooks, hooks, Hooks),
hooks:enable_plugin(hooks),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(a),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(b),
ok = hooks:disable_plugin(hooks),
error = hooks:find(a),
error = hooks:find(b),
ok = hooks:disable_plugin(hooks),
ok.
init_test_wait_loop(Server) ->
timer:sleep(300),
register(test, self()),
timer:sleep(200),
Server ! self(),
test_wait_loop().
test_wait_loop() ->
timer:sleep(10),
test_wait_loop().
wait_for_proc_test(_) ->
error_logger:tty(false),
application:stop(hooks),
error_logger:tty(true),
application:set_env(hooks, wait_for_proc, test),
application:set_env(hooks, hooks, []),
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
hooks:mreg(Hooks),
error = hooks:find(a) ,
Self = self(),
Pid = spawn_link(fun() -> init_test_wait_loop(Self) end),
receive
Pid -> ok
end,
FoundA = hooks:find(a),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = FoundA,
exit(Pid, normal),
hooks:munreg(Hooks),
ok.
| null | https://raw.githubusercontent.com/benoitc/hooks/b976cf706f75546cf68ecd341c8d94205b433e75/test/hooks_test_SUITE.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
hooks for tests
tests | @author benoitc
( C ) 2017 , < COMPANY >
Created : 27 . Oct 2017 12:29
-module(hooks_test_SUITE).
-author("benoitc").
-compile(export_all).
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/1
]).
all() ->
[
basic_test,
mreg_test,
run_test,
all_test,
all_till_ok_test,
only_test,
plugin_test,
wait_for_proc_test
].
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(hooks),
Config.
end_per_suite(Config) ->
_ = application:stop(hooks),
Config.
init_per_testcase(_, Config) ->
Config.
end_per_testcase(_Config) ->
ok.
hook1() ->
[].
hook1(A, B) ->
[A | B].
hook2() ->
[].
hook2(A, B) ->
[A | B].
hook3(A, B, C) ->
{hook3, [A, B, C]}.
hook_add(A) ->
A + A.
hook_add1(A) ->
A + A.
hook_add2(A) ->
A + 2 *A.
hooks_ok1() ->
next.
hooks_ok2() ->
ok.
hooks_ok3() ->
next.
hooks_ok4() ->
{ok, 0}.
basic_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
lists:foreach(
fun({Name, Specs}) ->
[ok, ok] = [hooks:reg(Name, M, F, A) || {M, F, A} <- Specs]
end,
Hooks),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(a),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(b),
error = hooks:find(c),
ok = hooks:reg(c, ?MODULE, hook3, 3),
{ok, [{?MODULE, hook3}]} = hooks:find(c),
ok = hooks:unreg(c, ?MODULE, hook3, 3),
error = hooks:find(c),
{error, hooks_not_exported} = hooks:reg(c, ?MODULE, hook3, 2),
lists:foreach(
fun({Name, Specs}) ->
[ok, ok] = [hooks:unreg(Name, M, F, A) || {M, F, A} <- Specs]
end,
Hooks),
error = hooks:find(a),
error = hooks:find(b),
ok.
mreg_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
ok = hooks:mreg(Hooks),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(a),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(b),
ok = hooks:munreg(Hooks),
error = hooks:find(a),
error = hooks:find(b),
ok.
run_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]},
{c, [{?MODULE, hook_add, 1},
{?MODULE, hook_add1, 1}]}],
ok = hooks:mreg(Hooks),
ok = hooks:run(a, []),
[1, 1] = hooks:run_fold(a, [1], []),
{ok, [{?MODULE, hook_add}, {?MODULE, hook_add1}]} = hooks:find(c),
4 = hooks:run_fold(c, [], 1),
ok = hooks:munreg(Hooks),
ok.
all_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
ok = hooks:mreg(Hooks),
[[], []] = hooks:all(a, []),
[[1 | 1], [1 | 1]] = hooks:all(b, [1, 1]),
ok = hooks:munreg(Hooks),
ok.
all_till_ok_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hooks_ok1, 0}]},
{b, [{?MODULE, hooks_ok1, 0},
{?MODULE, hooks_ok2, 0}]},
{c, [{?MODULE, hooks_ok1, 0},
{?MODULE, hooks_ok3, 0},
{?MODULE, hooks_ok4, 0}]}],
ok = hooks:mreg(Hooks),
{error, [next]} = hooks:all_till_ok(a, []),
ok = hooks:all_till_ok(b, []),
{ok, 0} = hooks:all_till_ok(c, []),
ok = hooks:munreg(Hooks),
ok.
only_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
ok = hooks:reg(a, ?MODULE, hook_add, 1, 10),
ok = hooks:reg(a, ?MODULE, hook_add2, 1, 0),
{ok, [{?MODULE, hook_add2}, {?MODULE, hook_add}]} = hooks:find(a),
3 = hooks:only(a, [1]),
ok = hooks:unreg(a, ?MODULE, hook_add, 1, 10),
ok = hooks:unreg(a, ?MODULE, hook_add2, 1, 0),
error = hooks:find(a),
ok.
plugin_test(_) ->
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
application:set_env(hooks, hooks, Hooks),
hooks:enable_plugin(hooks),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(a),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = hooks:find(b),
ok = hooks:disable_plugin(hooks),
error = hooks:find(a),
error = hooks:find(b),
ok = hooks:disable_plugin(hooks),
ok.
init_test_wait_loop(Server) ->
timer:sleep(300),
register(test, self()),
timer:sleep(200),
Server ! self(),
test_wait_loop().
test_wait_loop() ->
timer:sleep(10),
test_wait_loop().
wait_for_proc_test(_) ->
error_logger:tty(false),
application:stop(hooks),
error_logger:tty(true),
application:set_env(hooks, wait_for_proc, test),
application:set_env(hooks, hooks, []),
{ok, _} = application:ensure_all_started(hooks),
Hooks = [{a, [{?MODULE, hook1, 0},
{?MODULE, hook2, 0}]},
{b, [{?MODULE, hook1, 2},
{?MODULE, hook2, 2}]}],
hooks:mreg(Hooks),
error = hooks:find(a) ,
Self = self(),
Pid = spawn_link(fun() -> init_test_wait_loop(Self) end),
receive
Pid -> ok
end,
FoundA = hooks:find(a),
{ok, [{?MODULE, hook1}, {?MODULE, hook2}]} = FoundA,
exit(Pid, normal),
hooks:munreg(Hooks),
ok.
|
1aa61669adc5d378c4240ceb7dd68ceb7cbd13546f93cf6b9496bb01931b3733 | 0zat/gen-bs | draw_objects.ml | open Servo_dom
module Math = Js.Math
type elem = {
mutable x: float;
mutable y: float;
coordinates: (float * float) Queue.t;
mutable speed: float;
friction: float;
brightness: float;
hue: float;
gravity: float;
mutable alpha: float;
decay: float;
angle_rad: float;
}
let random min max = (Math.random ()) *. (max -. min) +. min
let create_elem ~coord_count ~x ~y ~speed ~friction ~brightness ~hue ~gravity ~alpha ~decay ~angle_rad =
let coordinates = Queue.create () in
Array.make coord_count (x, y)
|> Array.iter (fun x -> Queue.add x coordinates)
;
{
x ;
y ;
coordinates;
speed ;
friction ;
brightness ;
hue ;
gravity ;
alpha ;
decay ;
angle_rad
}
module type Conf = sig
val is_delete_elem: elem -> bool
val action_when_delete: elem -> unit
val create_elem: float -> float -> float -> elem
end
module Make (Conf: Conf) = struct
let elems = ref []
let update_elem elem =
Queue.pop elem.coordinates;
Queue.add (elem.x, elem.y) elem.coordinates;
elem.speed <- elem.speed *. elem.friction;
elem.x <- elem.x +. (Math.cos elem.angle_rad) *. elem.speed;
elem.y <- elem.y +. (Math.sin elem.angle_rad) *. elem.speed +. elem.gravity;
elem.alpha <- elem.alpha -. elem.decay;
if Conf.is_delete_elem elem then
(Conf.action_when_delete elem; false)
else
true
let draw_elem ctx elem =
let open CanvasRenderingContext2D in
beginPath ctx;
let x, y = Queue.peek elem.coordinates in
moveTo ~x ~y ctx;
lineTo elem.x elem.y ctx;
let make_stroke_style elem =
Printf.sprintf "hsla(%f, 100%%, %f%%,%f)" elem.hue elem.brightness elem.alpha
in
setStrokeStyle ctx (make_stroke_style elem);
stroke ctx
let update () =
elems := List.filter (fun elem -> update_elem elem) !elems
let draw ctx =
List.iter (draw_elem ctx) !elems
let create count base_hue x y =
for _ = 1 to count do
let elem = Conf.create_elem base_hue x y in
elems := elem :: !elems
done
end | null | https://raw.githubusercontent.com/0zat/gen-bs/20348991775d9ef3974c3b824968a0ab219502a8/example/firework/src/draw_objects.ml | ocaml | open Servo_dom
module Math = Js.Math
type elem = {
mutable x: float;
mutable y: float;
coordinates: (float * float) Queue.t;
mutable speed: float;
friction: float;
brightness: float;
hue: float;
gravity: float;
mutable alpha: float;
decay: float;
angle_rad: float;
}
let random min max = (Math.random ()) *. (max -. min) +. min
let create_elem ~coord_count ~x ~y ~speed ~friction ~brightness ~hue ~gravity ~alpha ~decay ~angle_rad =
let coordinates = Queue.create () in
Array.make coord_count (x, y)
|> Array.iter (fun x -> Queue.add x coordinates)
;
{
x ;
y ;
coordinates;
speed ;
friction ;
brightness ;
hue ;
gravity ;
alpha ;
decay ;
angle_rad
}
module type Conf = sig
val is_delete_elem: elem -> bool
val action_when_delete: elem -> unit
val create_elem: float -> float -> float -> elem
end
module Make (Conf: Conf) = struct
let elems = ref []
let update_elem elem =
Queue.pop elem.coordinates;
Queue.add (elem.x, elem.y) elem.coordinates;
elem.speed <- elem.speed *. elem.friction;
elem.x <- elem.x +. (Math.cos elem.angle_rad) *. elem.speed;
elem.y <- elem.y +. (Math.sin elem.angle_rad) *. elem.speed +. elem.gravity;
elem.alpha <- elem.alpha -. elem.decay;
if Conf.is_delete_elem elem then
(Conf.action_when_delete elem; false)
else
true
let draw_elem ctx elem =
let open CanvasRenderingContext2D in
beginPath ctx;
let x, y = Queue.peek elem.coordinates in
moveTo ~x ~y ctx;
lineTo elem.x elem.y ctx;
let make_stroke_style elem =
Printf.sprintf "hsla(%f, 100%%, %f%%,%f)" elem.hue elem.brightness elem.alpha
in
setStrokeStyle ctx (make_stroke_style elem);
stroke ctx
let update () =
elems := List.filter (fun elem -> update_elem elem) !elems
let draw ctx =
List.iter (draw_elem ctx) !elems
let create count base_hue x y =
for _ = 1 to count do
let elem = Conf.create_elem base_hue x y in
elems := elem :: !elems
done
end |
|
50f9b1e132bdd26f525ff0420083d2c6488fa3069a85f5fe1bee26338a194b9f | BinaryAnalysisPlatform/bap | demangle_main.ml | open Core_kernel[@@warning "-D"]
open Bap_core_theory
open Bap_main
open Bap_demangle.Std
let doc = "
# DESCRIPTION
Performs ABI-specific name resolution and demangling.
"
let provides = [
"symbolizer";
"demangling";
"demangler";
"symbolizer";
]
let override = Extension.Configuration.parameter
~doc:"Overrides the default name demangler with the specified one"
Extension.Type.(some string) "override"
~aliases:["with"]
module Internal = struct
let maybe_mangled name =
String.length name > 2 &&
Char.(name.[0] = '_') &&
Char.is_uppercase name.[1] &&
Char.is_alpha name.[1]
let demangle_internal str =
let open String in
let open Option.Monad_infix in
let extract_number pos_ref =
lfindi str ~pos:!pos_ref ~f:(fun _ c -> Char.is_digit c)
>>= fun s1_p0 ->
lfindi str ~pos:s1_p0 ~f:(fun _ c -> not (Char.is_digit c))
>>= fun s1_p1 ->
let len = (s1_p1 - s1_p0) in
let str = Bytes.of_string str in
let n = Substring.create ~pos:s1_p0 ~len str |>
Substring.to_string |> Int.of_string in
pos_ref := s1_p0 + len;
Some n in
let extract_name pos_ref =
let str = Bytes.of_string str in
extract_number pos_ref >>= fun len ->
let name = Substring.create ~pos:!pos_ref ~len str |>
Substring.to_string in
pos_ref := !pos_ref + len;
Some name in
let pos = ref 0 in
let rec extract_names acc =
match extract_name pos with
| None | Some "" -> List.rev acc
| Some name -> extract_names (name::acc) in
match extract_names [] |> String.concat ~sep:"::" with
| "" -> str
| s -> s
let demangle_internal name =
if maybe_mangled name then
Option.try_with (fun () -> demangle_internal name)
else None
let run name =
Option.value_map ~default:name ~f:Fn.id (demangle_internal name)
let install () =
Demangler.declare ~package:"bap" "internal" run
end
let decide_name_from_possible_name ?override () : unit =
let open KB.Syntax in
KB.Rule.(declare ~package:"core" "name-of-possible-names" |>
require Theory.Label.possible_name |>
provide Theory.Label.name |>
comment "resolves and demangles symbol's name");
KB.promise Theory.Label.name @@ fun lbl ->
let* target = Theory.Label.target lbl in
let demangler = match override with
| None -> Demanglers.select target
| Some name -> Demanglers.get ~package:"bap" name in
let+ name = KB.resolve Theory.Label.possible_name lbl in
Option.map name ~f:(Demangler.run demangler)
let () = Extension.declare ~provides ~doc @@ fun ctxt ->
let override = Extension.Configuration.get ctxt override in
decide_name_from_possible_name ?override ();
Internal.install ();
Ok ()
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap/cbdf732d46c8e38df79d9942fc49bcb97915c657/plugins/demangle/demangle_main.ml | ocaml | open Core_kernel[@@warning "-D"]
open Bap_core_theory
open Bap_main
open Bap_demangle.Std
let doc = "
# DESCRIPTION
Performs ABI-specific name resolution and demangling.
"
let provides = [
"symbolizer";
"demangling";
"demangler";
"symbolizer";
]
let override = Extension.Configuration.parameter
~doc:"Overrides the default name demangler with the specified one"
Extension.Type.(some string) "override"
~aliases:["with"]
module Internal = struct
let maybe_mangled name =
String.length name > 2 &&
Char.(name.[0] = '_') &&
Char.is_uppercase name.[1] &&
Char.is_alpha name.[1]
let demangle_internal str =
let open String in
let open Option.Monad_infix in
let extract_number pos_ref =
lfindi str ~pos:!pos_ref ~f:(fun _ c -> Char.is_digit c)
>>= fun s1_p0 ->
lfindi str ~pos:s1_p0 ~f:(fun _ c -> not (Char.is_digit c))
>>= fun s1_p1 ->
let len = (s1_p1 - s1_p0) in
let str = Bytes.of_string str in
let n = Substring.create ~pos:s1_p0 ~len str |>
Substring.to_string |> Int.of_string in
pos_ref := s1_p0 + len;
Some n in
let extract_name pos_ref =
let str = Bytes.of_string str in
extract_number pos_ref >>= fun len ->
let name = Substring.create ~pos:!pos_ref ~len str |>
Substring.to_string in
pos_ref := !pos_ref + len;
Some name in
let pos = ref 0 in
let rec extract_names acc =
match extract_name pos with
| None | Some "" -> List.rev acc
| Some name -> extract_names (name::acc) in
match extract_names [] |> String.concat ~sep:"::" with
| "" -> str
| s -> s
let demangle_internal name =
if maybe_mangled name then
Option.try_with (fun () -> demangle_internal name)
else None
let run name =
Option.value_map ~default:name ~f:Fn.id (demangle_internal name)
let install () =
Demangler.declare ~package:"bap" "internal" run
end
let decide_name_from_possible_name ?override () : unit =
let open KB.Syntax in
KB.Rule.(declare ~package:"core" "name-of-possible-names" |>
require Theory.Label.possible_name |>
provide Theory.Label.name |>
comment "resolves and demangles symbol's name");
KB.promise Theory.Label.name @@ fun lbl ->
let* target = Theory.Label.target lbl in
let demangler = match override with
| None -> Demanglers.select target
| Some name -> Demanglers.get ~package:"bap" name in
let+ name = KB.resolve Theory.Label.possible_name lbl in
Option.map name ~f:(Demangler.run demangler)
let () = Extension.declare ~provides ~doc @@ fun ctxt ->
let override = Extension.Configuration.get ctxt override in
decide_name_from_possible_name ?override ();
Internal.install ();
Ok ()
|
|
e1a18c0e9c3354b22da1cdf6ff4e232c8c12a14937365892c73487555d3c136f | boomerang-lang/boomerang | my_priority_queue.ml | open Core
open Util
open My_set
open My_heap
module type DataWithPriority =
sig
type t
val show : t shower
val pp : t pper
val compare : t comparer
val hash : t hasher
val hash_fold_t : t hash_folder
val priority : t -> float
end
module PriorityQueueOf(D:DataWithPriority) =
struct
module QueueHeap =
HeapOf(
struct
type t = (D.t * float)
[@@deriving show, hash]
let compare =
(fun (_,f1) (_,f2) ->
(Float.compare f1 f2))
let to_string = fun _ -> "hi"
end)
module PushedSet =
SetOf(D)
type t = QueueHeap.t * PushedSet.t
[@@deriving show, hash]
type element = D.t
let empty = (QueueHeap.empty, PushedSet.empty)
let push ((h,s):t) (e:element) : t =
if PushedSet.member s e then
(h,s)
else
let s' = PushedSet.insert e s in
let pri = D.priority e in
let h' = QueueHeap.push h (e,pri) in
(h',s')
let push_all (q:t) (es:element list) : t =
List.fold_left
~f:(fun q e -> push q e)
~init:q
es
let from_list (es:element list) : t =
push_all empty es
let singleton (e:element) : t =
from_list [e]
let pop ((h,s):t) : (D.t * float * t) option =
Option.map ~f:(fun ((e,p),h') -> (e,p,(h',s))) (QueueHeap.pop h)
let pop_exn (q:t) : D.t * float * t =
begin match pop q with
| None -> failwith "failure: pop_exn"
| Some e -> e
end
let peek : t -> D.t option =
Option.map ~f:fst_trip % pop
let peek_exn : t -> D.t =
fst_trip % pop_exn
let delete : t -> t option =
Option.map ~f:trd_trip % pop
let delete_exn : t -> t =
trd_trip % pop_exn
let all_remaining ((h,_):t) : (D.t * float) list =
QueueHeap.to_list h
let rec pop_until_min_pri_greater_than
(q:t)
(f:float)
: (element * float) list * t =
begin match pop q with
| None -> ([],q)
| Some (e,f',q') ->
if f' > f then
([],q)
else
let (efs,q'') = pop_until_min_pri_greater_than q' f in
((e,f')::efs,q'')
end
let length ((h,_):t) : int = QueueHeap.size h
let compare
: (QueueHeap.t * PushedSet.t) comparer =
let real_heap_compare
(qh1:QueueHeap.t)
(qh2:QueueHeap.t)
: comparison =
let ordered_qhl1 =
List.sort
~cmp:D.compare
(List.map ~f:fst (QueueHeap.to_list qh1))
in
let ordered_qhl2 =
List.sort
~cmp:D.compare
(List.map ~f:fst (QueueHeap.to_list qh2))
in
compare_list
~cmp:D.compare
ordered_qhl1
ordered_qhl2
in
pair_compare
real_heap_compare
PushedSet.compare
end
| null | https://raw.githubusercontent.com/boomerang-lang/boomerang/b42c2bfc72030bbe5c32752c236c0aad3b17d149/stdlib/my_priority_queue.ml | ocaml | open Core
open Util
open My_set
open My_heap
module type DataWithPriority =
sig
type t
val show : t shower
val pp : t pper
val compare : t comparer
val hash : t hasher
val hash_fold_t : t hash_folder
val priority : t -> float
end
module PriorityQueueOf(D:DataWithPriority) =
struct
module QueueHeap =
HeapOf(
struct
type t = (D.t * float)
[@@deriving show, hash]
let compare =
(fun (_,f1) (_,f2) ->
(Float.compare f1 f2))
let to_string = fun _ -> "hi"
end)
module PushedSet =
SetOf(D)
type t = QueueHeap.t * PushedSet.t
[@@deriving show, hash]
type element = D.t
let empty = (QueueHeap.empty, PushedSet.empty)
let push ((h,s):t) (e:element) : t =
if PushedSet.member s e then
(h,s)
else
let s' = PushedSet.insert e s in
let pri = D.priority e in
let h' = QueueHeap.push h (e,pri) in
(h',s')
let push_all (q:t) (es:element list) : t =
List.fold_left
~f:(fun q e -> push q e)
~init:q
es
let from_list (es:element list) : t =
push_all empty es
let singleton (e:element) : t =
from_list [e]
let pop ((h,s):t) : (D.t * float * t) option =
Option.map ~f:(fun ((e,p),h') -> (e,p,(h',s))) (QueueHeap.pop h)
let pop_exn (q:t) : D.t * float * t =
begin match pop q with
| None -> failwith "failure: pop_exn"
| Some e -> e
end
let peek : t -> D.t option =
Option.map ~f:fst_trip % pop
let peek_exn : t -> D.t =
fst_trip % pop_exn
let delete : t -> t option =
Option.map ~f:trd_trip % pop
let delete_exn : t -> t =
trd_trip % pop_exn
let all_remaining ((h,_):t) : (D.t * float) list =
QueueHeap.to_list h
let rec pop_until_min_pri_greater_than
(q:t)
(f:float)
: (element * float) list * t =
begin match pop q with
| None -> ([],q)
| Some (e,f',q') ->
if f' > f then
([],q)
else
let (efs,q'') = pop_until_min_pri_greater_than q' f in
((e,f')::efs,q'')
end
let length ((h,_):t) : int = QueueHeap.size h
let compare
: (QueueHeap.t * PushedSet.t) comparer =
let real_heap_compare
(qh1:QueueHeap.t)
(qh2:QueueHeap.t)
: comparison =
let ordered_qhl1 =
List.sort
~cmp:D.compare
(List.map ~f:fst (QueueHeap.to_list qh1))
in
let ordered_qhl2 =
List.sort
~cmp:D.compare
(List.map ~f:fst (QueueHeap.to_list qh2))
in
compare_list
~cmp:D.compare
ordered_qhl1
ordered_qhl2
in
pair_compare
real_heap_compare
PushedSet.compare
end
|
|
cb4c4f612b06e54c3ee017b41a117249651a42a0070e3a3fff0bf4ca6b5824f2 | noinia/hgeometry | RIC.hs | --------------------------------------------------------------------------------
-- |
Module : Algorithms . Geometry . SmallestEnclosingBall . RIC
Copyright : ( C )
-- License : see the LICENSE file
Maintainer :
--
-- An randomized algorithm to compute the smallest enclosing disk of a set of
-- \(n\) points in \(\mathbb{R}^2\). The expected running time is \(O(n)\).
--
--------------------------------------------------------------------------------
module Algorithms.Geometry.SmallestEnclosingBall.RIC(
smallestEnclosingDisk'
, smallestEnclosingDisk
, smallestEnclosingDiskWithPoint
, smallestEnclosingDiskWithPoints
) where
import Algorithms.Geometry.SmallestEnclosingBall.Types
import Control.Lens
import Control.Monad.Random.Class
import Data.Ext
import qualified Data.Foldable as F
import Geometry.Point
import Geometry.Ball
import qualified Data.List as List
import Data.List.NonEmpty(NonEmpty(..))
import Data.Maybe (fromMaybe, mapMaybe, catMaybes)
import Data.Ord (comparing)
import System.Random.Shuffle (shuffle)
import Data . RealNumber . Rational
-- import Debug.Trace
--------------------------------------------------------------------------------
-- | Compute the smallest enclosing disk of a set of points,
-- implemented using randomized incremental construction.
--
pre : the input has at least two points .
--
-- running time: expected \(O(n)\) time, where \(n\) is the number of input points.
smallestEnclosingDisk :: (Ord r, Fractional r, MonadRandom m
, Show r , Show p
)
=> [Point 2 r :+ p]
-> m (DiskResult p r)
smallestEnclosingDisk pts@(_:_:_) = (\(p:q:pts') -> smallestEnclosingDisk' p q pts')
. F.toList <$> shuffle pts
smallestEnclosingDisk _ = error "smallestEnclosingDisk: Too few points"
-- | Smallest enclosing disk.
smallestEnclosingDisk' :: (Ord r, Fractional r
, Show r , Show p
)
=> Point 2 r :+ p -> Point 2 r :+ p -> [Point 2 r :+ p]
-> DiskResult p r
smallestEnclosingDisk' a b = foldr addPoint (initial a b) . List.tails
where
-- The empty case occurs only initially
addPoint [] br = br
addPoint (p:pts) br@(DiskResult d _)
| (p^.core) `inClosedBall` d = br
| otherwise = fromJust' $ smallestEnclosingDiskWithPoint p (a :| (b : pts))
fromJust' = fromMaybe (error "smallestEncosingDisk' : fromJust, absurd")
| Smallest enclosing disk , given that p should be on it .
smallestEnclosingDiskWithPoint :: (Ord r, Fractional r
, Show r , Show p
)
=> Point 2 r :+ p -> NonEmpty (Point 2 r :+ p)
-> Maybe (DiskResult p r)
smallestEnclosingDiskWithPoint p (a :| pts) = foldr addPoint (Just $ initial p a) $ List.tails pts
where
addPoint [] br = br
addPoint (q:pts') br@(Just (DiskResult d _))
| (q^.core) `inClosedBall` d = br
| otherwise = smallestEnclosingDiskWithPoints p q (a:pts')
addPoint _ br = br
| Smallest enclosing disk , given that p and q should be on it
--
-- running time: \(O(n)\)
smallestEnclosingDiskWithPoints :: (Ord r, Fractional r
, Show r , Show p
)
=> Point 2 r :+ p -> Point 2 r :+ p -> [Point 2 r :+ p]
-> Maybe (DiskResult p r)
smallestEnclosingDiskWithPoints p q ps = minimumOn (^.enclosingDisk.squaredRadius)
$ catMaybes [mkEnclosingDisk dl, mkEnclosingDisk dr, mdc]
where
centers = mapMaybe disk' ps
-- generate a disk with p q and r
disk' r = (r:+) <$> disk (p^.core) (q^.core) (r^.core)
-- partition the points in to those on the left and those on the
-- right. Note that centers still contains only those points (and
disks ) for which the three points are not colinear . So the
-- points are either on the left or on the right.
(leftCenters,rightCenters) = List.partition (\(r :+ _) -> ccw p q r == CCW) centers
-- note that we consider 'leftmost' with respect to going from p
-- to q. This does not really have a global meaning.
-- we need to find the leftmost and rightmost center on the
-- bisector. In case there are left-centers, this means that among
-- the left centers we want to find the point that is furthest way
from p ( or q ) . If there are no left - centers , we with to find
-- the closest one among the right-centers.
leftDist z = let c = z^.extra.center
s = if ccw p q c == CCW then 1 else -1
in s * squaredEuclideanDist (p^.core) (c^.core)
dl = maximumOn leftDist leftCenters -- disk that has the "leftmost" center
dr = minimumOn leftDist rightCenters -- disk that has the "rightmost" center
-- diameteral disk
dd = fromDiameter (p^.core) (q^.core)
mdc | isEnclosingDisk dd ps = Just $ DiskResult dd (Two p q)
| otherwise = Nothing
-- test if d is an enclosing disk.
mkEnclosingDisk md = md >>= mkEnclosingDisk'
mkEnclosingDisk' (r :+ d) | isEnclosingDisk d ps = Just (DiskResult d (Three p q r))
| otherwise = Nothing
isEnclosingDisk :: (Foldable t, Ord r, Num r)
=> Disk p r -> t (Point 2 r :+ extra) -> Bool
isEnclosingDisk d = all (\s -> (s^.core) `inClosedBall` d)
| Constructs the initial ' DiskResult ' from two points
initial :: Fractional r => Point 2 r :+ p -> Point 2 r :+ p -> DiskResult p r
initial p q = DiskResult (fromDiameter (p^.core) (q^.core)) (Two p q)
maximumOn :: Ord b => (a -> b) -> [a] -> Maybe a
maximumOn f = \case
[] -> Nothing
xs -> Just $ List.maximumBy (comparing f) xs
minimumOn :: Ord b => (a -> b) -> [a] -> Maybe a
minimumOn f = \case
[] -> Nothing
xs -> Just $ List.minimumBy (comparing f) xs
--------------------------------------------------------------------------------
test : : Maybe ( DiskResult ( ) Rational )
test = smallestEnclosingDiskWithPoints p q myPts
-- where
-- p = ext $ Point2 0 (-6)
-- q = ext $ Point2 0 6
myPts = map ext [ Point2 5 1 , Point2 3 3 , Point2 ( -2 ) 2 , Point2 ( -4 ) 5 ]
-- disk'' r = (r:+) <$> disk (p^.core) (q^.core) (r^.core)
-- where
-- p = ext $ Point2 0 (-6)
-- q = ext $ Point2 0 6
maartenBug : : ( ) Double
-- maartenBug = let (p:q:rest) = maartenBug'
-- in smallestEnclosingDisk' p q rest
-- maartenBug' :: [Point 2 Double :+ ()]
maartenBug ' = [ Point2 ( 7.2784424e-3 ) ( 249.23 ) : + ( )
, Point2 ( -5.188493 ) ( 249.23 ) : + ( )
, Point2 ( -10.382694 ) ( 249.23 ) : + ( )
, Point2 ( -15.575621 ) ( 249.23 ) : + ( )
, Point2 ( 0.0 ) ( 249.23 ) : + ( )
, Point2 ( 0.0 ) ( 239.9031 ) : + ( )
, Point2 ( 0.0 ) ( 230.37791 ) : + ( )
, Point2 ( 0.0 ) ( 220.67882 ) : + ( )
-- ]
| null | https://raw.githubusercontent.com/noinia/hgeometry/89cd3d3109ec68f877bf8e34dc34b6df337a4ec1/hgeometry/src/Algorithms/Geometry/SmallestEnclosingBall/RIC.hs | haskell | ------------------------------------------------------------------------------
|
License : see the LICENSE file
An randomized algorithm to compute the smallest enclosing disk of a set of
\(n\) points in \(\mathbb{R}^2\). The expected running time is \(O(n)\).
------------------------------------------------------------------------------
import Debug.Trace
------------------------------------------------------------------------------
| Compute the smallest enclosing disk of a set of points,
implemented using randomized incremental construction.
running time: expected \(O(n)\) time, where \(n\) is the number of input points.
| Smallest enclosing disk.
The empty case occurs only initially
running time: \(O(n)\)
generate a disk with p q and r
partition the points in to those on the left and those on the
right. Note that centers still contains only those points (and
points are either on the left or on the right.
note that we consider 'leftmost' with respect to going from p
to q. This does not really have a global meaning.
we need to find the leftmost and rightmost center on the
bisector. In case there are left-centers, this means that among
the left centers we want to find the point that is furthest way
the closest one among the right-centers.
disk that has the "leftmost" center
disk that has the "rightmost" center
diameteral disk
test if d is an enclosing disk.
------------------------------------------------------------------------------
where
p = ext $ Point2 0 (-6)
q = ext $ Point2 0 6
disk'' r = (r:+) <$> disk (p^.core) (q^.core) (r^.core)
where
p = ext $ Point2 0 (-6)
q = ext $ Point2 0 6
maartenBug = let (p:q:rest) = maartenBug'
in smallestEnclosingDisk' p q rest
maartenBug' :: [Point 2 Double :+ ()]
] | Module : Algorithms . Geometry . SmallestEnclosingBall . RIC
Copyright : ( C )
Maintainer :
module Algorithms.Geometry.SmallestEnclosingBall.RIC(
smallestEnclosingDisk'
, smallestEnclosingDisk
, smallestEnclosingDiskWithPoint
, smallestEnclosingDiskWithPoints
) where
import Algorithms.Geometry.SmallestEnclosingBall.Types
import Control.Lens
import Control.Monad.Random.Class
import Data.Ext
import qualified Data.Foldable as F
import Geometry.Point
import Geometry.Ball
import qualified Data.List as List
import Data.List.NonEmpty(NonEmpty(..))
import Data.Maybe (fromMaybe, mapMaybe, catMaybes)
import Data.Ord (comparing)
import System.Random.Shuffle (shuffle)
import Data . RealNumber . Rational
pre : the input has at least two points .
smallestEnclosingDisk :: (Ord r, Fractional r, MonadRandom m
, Show r , Show p
)
=> [Point 2 r :+ p]
-> m (DiskResult p r)
smallestEnclosingDisk pts@(_:_:_) = (\(p:q:pts') -> smallestEnclosingDisk' p q pts')
. F.toList <$> shuffle pts
smallestEnclosingDisk _ = error "smallestEnclosingDisk: Too few points"
smallestEnclosingDisk' :: (Ord r, Fractional r
, Show r , Show p
)
=> Point 2 r :+ p -> Point 2 r :+ p -> [Point 2 r :+ p]
-> DiskResult p r
smallestEnclosingDisk' a b = foldr addPoint (initial a b) . List.tails
where
addPoint [] br = br
addPoint (p:pts) br@(DiskResult d _)
| (p^.core) `inClosedBall` d = br
| otherwise = fromJust' $ smallestEnclosingDiskWithPoint p (a :| (b : pts))
fromJust' = fromMaybe (error "smallestEncosingDisk' : fromJust, absurd")
| Smallest enclosing disk , given that p should be on it .
smallestEnclosingDiskWithPoint :: (Ord r, Fractional r
, Show r , Show p
)
=> Point 2 r :+ p -> NonEmpty (Point 2 r :+ p)
-> Maybe (DiskResult p r)
smallestEnclosingDiskWithPoint p (a :| pts) = foldr addPoint (Just $ initial p a) $ List.tails pts
where
addPoint [] br = br
addPoint (q:pts') br@(Just (DiskResult d _))
| (q^.core) `inClosedBall` d = br
| otherwise = smallestEnclosingDiskWithPoints p q (a:pts')
addPoint _ br = br
| Smallest enclosing disk , given that p and q should be on it
smallestEnclosingDiskWithPoints :: (Ord r, Fractional r
, Show r , Show p
)
=> Point 2 r :+ p -> Point 2 r :+ p -> [Point 2 r :+ p]
-> Maybe (DiskResult p r)
smallestEnclosingDiskWithPoints p q ps = minimumOn (^.enclosingDisk.squaredRadius)
$ catMaybes [mkEnclosingDisk dl, mkEnclosingDisk dr, mdc]
where
centers = mapMaybe disk' ps
disk' r = (r:+) <$> disk (p^.core) (q^.core) (r^.core)
disks ) for which the three points are not colinear . So the
(leftCenters,rightCenters) = List.partition (\(r :+ _) -> ccw p q r == CCW) centers
from p ( or q ) . If there are no left - centers , we with to find
leftDist z = let c = z^.extra.center
s = if ccw p q c == CCW then 1 else -1
in s * squaredEuclideanDist (p^.core) (c^.core)
dd = fromDiameter (p^.core) (q^.core)
mdc | isEnclosingDisk dd ps = Just $ DiskResult dd (Two p q)
| otherwise = Nothing
mkEnclosingDisk md = md >>= mkEnclosingDisk'
mkEnclosingDisk' (r :+ d) | isEnclosingDisk d ps = Just (DiskResult d (Three p q r))
| otherwise = Nothing
isEnclosingDisk :: (Foldable t, Ord r, Num r)
=> Disk p r -> t (Point 2 r :+ extra) -> Bool
isEnclosingDisk d = all (\s -> (s^.core) `inClosedBall` d)
| Constructs the initial ' DiskResult ' from two points
initial :: Fractional r => Point 2 r :+ p -> Point 2 r :+ p -> DiskResult p r
initial p q = DiskResult (fromDiameter (p^.core) (q^.core)) (Two p q)
maximumOn :: Ord b => (a -> b) -> [a] -> Maybe a
maximumOn f = \case
[] -> Nothing
xs -> Just $ List.maximumBy (comparing f) xs
minimumOn :: Ord b => (a -> b) -> [a] -> Maybe a
minimumOn f = \case
[] -> Nothing
xs -> Just $ List.minimumBy (comparing f) xs
test : : Maybe ( DiskResult ( ) Rational )
test = smallestEnclosingDiskWithPoints p q myPts
myPts = map ext [ Point2 5 1 , Point2 3 3 , Point2 ( -2 ) 2 , Point2 ( -4 ) 5 ]
maartenBug : : ( ) Double
maartenBug ' = [ Point2 ( 7.2784424e-3 ) ( 249.23 ) : + ( )
, Point2 ( -5.188493 ) ( 249.23 ) : + ( )
, Point2 ( -10.382694 ) ( 249.23 ) : + ( )
, Point2 ( -15.575621 ) ( 249.23 ) : + ( )
, Point2 ( 0.0 ) ( 249.23 ) : + ( )
, Point2 ( 0.0 ) ( 239.9031 ) : + ( )
, Point2 ( 0.0 ) ( 230.37791 ) : + ( )
, Point2 ( 0.0 ) ( 220.67882 ) : + ( )
|
4316d462eb48e6ec54f32eeda2a7418a6f653e37c6ae609a9fb3137324e1ba66 | skogsbaer/HTF | B.hs | # OPTIONS_GHC -F -pgmF ./scripts / local - htfpp #
module Foo.B (htf_thisModulesTests) where
import qualified Test.Framework as HTF
test_b_OK = HTF.assertEqual 1 1
| null | https://raw.githubusercontent.com/skogsbaer/HTF/a42450c89b7a3a3a50e381f36de3ac28faab2a16/tests/Foo/B.hs | haskell | # OPTIONS_GHC -F -pgmF ./scripts / local - htfpp #
module Foo.B (htf_thisModulesTests) where
import qualified Test.Framework as HTF
test_b_OK = HTF.assertEqual 1 1
|
|
f308ea38b1fc8dec11717c383507f5463a1f0064b4459f0585bb8c55b4d5c7ef | spechub/Hets | Morphism.hs | |
Module : ./HasCASL / Morphism.hs
Description : morphisms implementation
Copyright : ( c ) and Uni Bremen 2002 - 2006
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
mapping entities of morphisms
Module : ./HasCASL/Morphism.hs
Description : morphisms implementation
Copyright : (c) Christian Maeder and Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
mapping entities of morphisms
-}
module HasCASL.Morphism where
import HasCASL.As
import HasCASL.AsToLe
import HasCASL.AsUtils
import HasCASL.FoldType
import HasCASL.Le
import HasCASL.MapTerm
import HasCASL.Merge
import HasCASL.PrintLe
import HasCASL.TypeAna
import Common.DocUtils
import Common.Doc
import Common.Id
import Common.Result
import Common.Utils (composeMap)
import Common.Lib.MapSet (setToMap)
import Control.Monad
import qualified Control.Monad.Fail as Fail
import qualified Data.Set as Set
import qualified Data.Map as Map
disjointKeys :: (Ord a, Pretty a, Fail.MonadFail m) => Map.Map a b -> Map.Map a c
-> m ()
disjointKeys m1 m2 = let d = Map.keysSet $ Map.intersection m1 m2 in
unless (Set.null d) $ Fail.fail $ show
(sep [ text "overlapping identifiers for types and classes:"
, pretty d])
-- | map a kind along an identifier map
mapKindI :: IdMap -> Kind -> Kind
mapKindI jm = mapKind (\ a -> Map.findWithDefault a a jm)
-- | map a kind along a signature morphism (variance is preserved)
mapKinds :: Morphism -> Kind -> Kind
mapKinds = mapKindI . classIdMap
-- | only rename the kinds in a type
mapKindsOfType :: IdMap -> TypeMap -> IdMap -> Type -> Type
mapKindsOfType jm tm im = foldType mapTypeRec
{ foldTypeAbs = \ _ -> TypeAbs . mapTypeArg jm tm im
, foldKindedType = \ _ t -> KindedType t . Set.map (mapKindI jm) }
-- | map type, expand it, and also adjust the kinds
mapTypeE :: IdMap -> TypeMap -> IdMap -> Type -> Type
mapTypeE jm tm im =
mapKindsOfType jm tm im . expandAliases tm . mapType im
-- | map a kind along a signature morphism (variance is preserved)
mapVarKind :: IdMap -> TypeMap -> IdMap -> VarKind -> VarKind
mapVarKind jm tm im vk = case vk of
VarKind k -> VarKind $ mapKindI jm k
Downset ty -> Downset $ mapTypeE jm tm im ty
_ -> vk
mapTypeArg :: IdMap -> TypeMap -> IdMap -> TypeArg -> TypeArg
mapTypeArg jm tm im (TypeArg i v vk rk c s r) =
TypeArg i v (mapVarKind jm tm im vk) rk c s r
mapTypeScheme :: IdMap -> TypeMap -> IdMap -> TypeScheme -> TypeScheme
mapTypeScheme jm tm im (TypeScheme args ty ps) =
TypeScheme (map (mapTypeArg jm tm im) args) (mapTypeE jm tm im ty) ps
mapSen :: IdMap -> TypeMap -> IdMap -> FunMap -> Term -> Term
mapSen jm tm im fm = mapTerm (mapFunSym jm tm im fm, mapTypeE jm tm im)
getDatatypeIds :: DataEntry -> Set.Set Id
getDatatypeIds (DataEntry _ i _ _ _ alts) =
let getAltIds (Construct _ tys _ sels) = Set.union
(Set.unions $ map getTypeIds tys)
$ Set.unions $ concatMap (map getSelIds) sels
getSelIds (Select _ ty _) = getTypeIds ty
getTypeIds = idsOf (== 0)
in Set.insert i $ Set.unions $ map getAltIds $ Set.toList alts
mapDataEntry :: IdMap -> TypeMap -> IdMap -> FunMap -> DataEntry -> DataEntry
mapDataEntry jm tm im fm (DataEntry dm i k args rk alts) =
let nDm = Map.map (\ a -> Map.findWithDefault a a im) dm
newargs = map (mapTypeArg jm tm im) args
nIm = Map.difference im dm
in DataEntry nDm i k newargs rk $ Set.map
(mapAlt jm tm im fm nIm newargs
$ patToType (Map.findWithDefault i i dm) newargs rk) alts
mapAlt :: IdMap -> TypeMap -> IdMap -> FunMap -> IdMap -> [TypeArg] -> Type
-> AltDefn -> AltDefn
mapAlt jm tm im fm nIm args dt (Construct mi ts p sels) =
let newTs = map (mapTypeE jm tm nIm) ts
newSels = map (map (mapSel jm tm im fm nIm args dt)) sels
in case mi of
Just i -> let
sc = TypeScheme args (getFunType dt p ts) nullRange
(j, TypeScheme _ ty _) = mapFunSym jm tm im fm (i, sc)
in Construct (Just j) newTs (getPartiality newTs ty) newSels
Nothing -> Construct mi newTs p newSels
mapSel :: IdMap -> TypeMap -> IdMap -> FunMap -> IdMap -> [TypeArg] -> Type
-> Selector -> Selector
mapSel jm tm im fm nIm args dt (Select mid t p) =
let newT = mapTypeE jm tm nIm t
in case mid of
Nothing -> Select mid newT p
Just i -> let
sc = TypeScheme args (getSelType dt p t) nullRange
(j, TypeScheme _ ty _) = mapFunSym jm tm im fm (i, sc)
in Select (Just j) newT $ getPartiality [dt] ty
{- | get the partiality from a constructor type
with a given number of curried arguments. -}
getPartiality :: [a] -> Type -> Partiality
getPartiality args t = case getTypeAppl t of
(TypeName i _ _, [_, res]) | isArrow i -> case args of
[] -> Total
[_] -> if isPartialArrow i then Partial else Total
_ : rs -> getPartiality rs res
(TypeName i _ _, [_]) | i == lazyTypeId ->
if null args then Partial else error "getPartiality"
_ -> Total
mapSentence :: Morphism -> Sentence -> Result Sentence
mapSentence m s = let
tm = filterAliases . typeMap $ mtarget m
im = typeIdMap m
jm = classIdMap m
fm = funMap m
f = mapFunSym jm tm im fm
in return $ case s of
Formula t -> Formula $ mapSen jm tm im fm t
DatatypeSen td -> DatatypeSen $ map (mapDataEntry jm tm im fm) td
ProgEqSen i sc pe ->
let (ni, nsc) = f (i, sc)
in ProgEqSen ni nsc $ mapEq (f, mapTypeE jm tm im) pe
mapFunSym :: IdMap -> TypeMap -> IdMap -> FunMap -> (Id, TypeScheme)
-> (Id, TypeScheme)
mapFunSym jm tm im fm (i, sc) =
let msc = mapTypeScheme jm tm im sc
in Map.findWithDefault (i, msc) (i, sc) fm
ideMor :: Env -> Morphism
ideMor e = mkMorphism e e
compMor :: Morphism -> Morphism -> Result Morphism
compMor m1 m2 = let
tm1 = typeIdMap m1
tm2 = typeIdMap m2
ctm = composeMap (typeMap src) tm1 tm2
cm1 = classIdMap m1
cm2 = classIdMap m2
ccm = composeMap (classMap src) cm1 cm2
fm2 = funMap m2
fm1 = funMap m1
tar = mtarget m2
src = msource m1
tm = filterAliases $ typeMap tar
emb = mkMorphism src tar
in if isInclMor m1 && isInclMor m2 then return emb else do
disjointKeys ctm ccm
return emb
{ typeIdMap = ctm
, classIdMap = ccm
, funMap = Map.intersection
(Map.foldrWithKey ( \ p1@(i, sc) p2 ->
let p3 = mapFunSym ccm tm tm2 fm2 p2
nSc = mapTypeScheme ccm tm ctm sc
in if (i, nSc) == p3 then Map.delete p1 else
Map.insert p1 p3)
fm2 fm1) $ Map.fromList $
concatMap ( \ (k, os) ->
map ( \ o -> ((k, opType o), ())) $ Set.toList os)
$ Map.toList $ assumps src }
showEnvDiff :: Env -> Env -> String
showEnvDiff e1 e2 =
"Signature 1:\n" ++ showDoc e1 "\nSignature 2:\n"
++ showDoc e2 "\nDifference\n" ++ showDoc
(diffEnv e1 e2) ""
legalMor :: Morphism -> Result ()
legalMor m = let
s = msource m
t = mtarget m
ts = typeIdMap m
cs = classIdMap m
fs = funMap m in
unless (all (`elem` Map.keys (typeMap s)) (Map.keys ts)
&& all (`elem` Map.keys (typeMap t)) (Map.elems ts)
&& all (`elem` Map.keys (classMap s)) (Map.keys cs)
&& all (`elem` Map.keys (classMap t)) (Map.elems cs)
&& all ((`elem` Map.keys (assumps s)) . fst) (Map.keys fs)
&& all ((`elem` Map.keys (assumps t)) . fst) (Map.elems fs))
(Fail.fail "illegal HasCASL morphism")
morphismUnion :: Morphism -> Morphism -> Result Morphism
morphismUnion m1 m2 = do
let s1 = msource m1
s2 = msource m2
s <- merge s1 s2
t <- merge (mtarget m1) $ mtarget m2
let tm1 = typeMap s1
tm2 = typeMap s2
im1 = typeIdMap m1
im2 = typeIdMap m2
-- unchanged types
ut1 = Map.keysSet tm1 Set.\\ Map.keysSet im1
ut2 = Map.keysSet tm2 Set.\\ Map.keysSet im2
ima1 = Map.union im1 $ setToMap ut1
ima2 = Map.union im2 $ setToMap ut2
sAs = filterAliases $ typeMap s
tAs = filterAliases $ typeMap t
cm1 = classMap s1
cm2 = classMap s2
jm1 = classIdMap m1
jm2 = classIdMap m2
-- unchanged classes
cut1 = Map.keysSet cm1 Set.\\ Map.keysSet jm1
cut2 = Map.keysSet cm2 Set.\\ Map.keysSet jm2
cima1 = Map.union jm1 $ setToMap cut1
cima2 = Map.union jm2 $ setToMap cut2
expP = Map.fromList . map ( \ ((i, o), (j, p)) ->
((i, expand tAs o), (j, expand tAs p)))
. Map.toList
fm1 = expP $ funMap m1
fm2 = expP $ funMap m2
af jm im = Set.unions . map ( \ (i, os) ->
Set.map ( \ o -> (i, mapTypeScheme jm tAs im
$ expand sAs $ opType o)) os)
. Map.toList
-- unchanged functions
uf1 = af jm1 im1 (assumps s1) Set.\\ Map.keysSet fm1
uf2 = af jm2 im2 (assumps s2) Set.\\ Map.keysSet fm2
fma1 = Map.union fm1 $ setToMap uf1
fma2 = Map.union fm2 $ setToMap uf2
showFun (i, ty) = showId i . (" : " ++) . showDoc ty
tma <- mergeMap ( \ t1 t2 -> if t1 == t2 then return t1 else
Fail.fail $ "incompatible type mapping to `"
++ showId t1 "' and '" ++ showId t2 "'") ima1 ima2
cma <- mergeMap ( \ t1 t2 -> if t1 == t2 then return t1 else
Fail.fail $ "incompatible class mapping to `"
++ showId t1 "' and '" ++ showId t2 "'") cima1 cima2
fma <- mergeMap ( \ o1 o2 -> if o1 == o2 then return o1 else
Fail.fail $ "incompatible mapping to '"
++ showFun o1 "' and '" ++ showFun o2 "'") fma1 fma2
disjointKeys tma cma
return (mkMorphism s t)
{ typeIdMap = tma
, classIdMap = cma
, funMap = fma }
morphismToSymbMap :: Morphism -> SymbolMap
morphismToSymbMap mor = let
src = msource mor
tar = mtarget mor
im = typeIdMap mor
jm = classIdMap mor
tm = filterAliases $ typeMap tar
classSymMap = Map.foldrWithKey ( \ i ti ->
let j = Map.findWithDefault i i jm
k = rawKind ti
in Map.insert (idToClassSymbol i k)
$ idToClassSymbol j k) Map.empty $ classMap src
typeSymMap = Map.foldrWithKey ( \ i ti ->
let j = Map.findWithDefault i i im
k = typeKind ti
in Map.insert (idToTypeSymbol i k)
$ idToTypeSymbol j k) classSymMap $ typeMap src
in Map.foldrWithKey
( \ i s m ->
Set.fold ( \ oi ->
let ty = opType oi
(j, t2) = mapFunSym jm tm im (funMap mor) (i, ty)
in Map.insert (idToOpSymbol i ty)
(idToOpSymbol j t2)) m s)
typeSymMap $ assumps src
| null | https://raw.githubusercontent.com/spechub/Hets/f582640a174df08d4c965d7c0a1ab24d1a31000d/HasCASL/Morphism.hs | haskell | | map a kind along an identifier map
| map a kind along a signature morphism (variance is preserved)
| only rename the kinds in a type
| map type, expand it, and also adjust the kinds
| map a kind along a signature morphism (variance is preserved)
| get the partiality from a constructor type
with a given number of curried arguments.
unchanged types
unchanged classes
unchanged functions | |
Module : ./HasCASL / Morphism.hs
Description : morphisms implementation
Copyright : ( c ) and Uni Bremen 2002 - 2006
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
mapping entities of morphisms
Module : ./HasCASL/Morphism.hs
Description : morphisms implementation
Copyright : (c) Christian Maeder and Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
mapping entities of morphisms
-}
module HasCASL.Morphism where
import HasCASL.As
import HasCASL.AsToLe
import HasCASL.AsUtils
import HasCASL.FoldType
import HasCASL.Le
import HasCASL.MapTerm
import HasCASL.Merge
import HasCASL.PrintLe
import HasCASL.TypeAna
import Common.DocUtils
import Common.Doc
import Common.Id
import Common.Result
import Common.Utils (composeMap)
import Common.Lib.MapSet (setToMap)
import Control.Monad
import qualified Control.Monad.Fail as Fail
import qualified Data.Set as Set
import qualified Data.Map as Map
disjointKeys :: (Ord a, Pretty a, Fail.MonadFail m) => Map.Map a b -> Map.Map a c
-> m ()
disjointKeys m1 m2 = let d = Map.keysSet $ Map.intersection m1 m2 in
unless (Set.null d) $ Fail.fail $ show
(sep [ text "overlapping identifiers for types and classes:"
, pretty d])
mapKindI :: IdMap -> Kind -> Kind
mapKindI jm = mapKind (\ a -> Map.findWithDefault a a jm)
mapKinds :: Morphism -> Kind -> Kind
mapKinds = mapKindI . classIdMap
mapKindsOfType :: IdMap -> TypeMap -> IdMap -> Type -> Type
mapKindsOfType jm tm im = foldType mapTypeRec
{ foldTypeAbs = \ _ -> TypeAbs . mapTypeArg jm tm im
, foldKindedType = \ _ t -> KindedType t . Set.map (mapKindI jm) }
mapTypeE :: IdMap -> TypeMap -> IdMap -> Type -> Type
mapTypeE jm tm im =
mapKindsOfType jm tm im . expandAliases tm . mapType im
mapVarKind :: IdMap -> TypeMap -> IdMap -> VarKind -> VarKind
mapVarKind jm tm im vk = case vk of
VarKind k -> VarKind $ mapKindI jm k
Downset ty -> Downset $ mapTypeE jm tm im ty
_ -> vk
mapTypeArg :: IdMap -> TypeMap -> IdMap -> TypeArg -> TypeArg
mapTypeArg jm tm im (TypeArg i v vk rk c s r) =
TypeArg i v (mapVarKind jm tm im vk) rk c s r
mapTypeScheme :: IdMap -> TypeMap -> IdMap -> TypeScheme -> TypeScheme
mapTypeScheme jm tm im (TypeScheme args ty ps) =
TypeScheme (map (mapTypeArg jm tm im) args) (mapTypeE jm tm im ty) ps
mapSen :: IdMap -> TypeMap -> IdMap -> FunMap -> Term -> Term
mapSen jm tm im fm = mapTerm (mapFunSym jm tm im fm, mapTypeE jm tm im)
getDatatypeIds :: DataEntry -> Set.Set Id
getDatatypeIds (DataEntry _ i _ _ _ alts) =
let getAltIds (Construct _ tys _ sels) = Set.union
(Set.unions $ map getTypeIds tys)
$ Set.unions $ concatMap (map getSelIds) sels
getSelIds (Select _ ty _) = getTypeIds ty
getTypeIds = idsOf (== 0)
in Set.insert i $ Set.unions $ map getAltIds $ Set.toList alts
mapDataEntry :: IdMap -> TypeMap -> IdMap -> FunMap -> DataEntry -> DataEntry
mapDataEntry jm tm im fm (DataEntry dm i k args rk alts) =
let nDm = Map.map (\ a -> Map.findWithDefault a a im) dm
newargs = map (mapTypeArg jm tm im) args
nIm = Map.difference im dm
in DataEntry nDm i k newargs rk $ Set.map
(mapAlt jm tm im fm nIm newargs
$ patToType (Map.findWithDefault i i dm) newargs rk) alts
mapAlt :: IdMap -> TypeMap -> IdMap -> FunMap -> IdMap -> [TypeArg] -> Type
-> AltDefn -> AltDefn
mapAlt jm tm im fm nIm args dt (Construct mi ts p sels) =
let newTs = map (mapTypeE jm tm nIm) ts
newSels = map (map (mapSel jm tm im fm nIm args dt)) sels
in case mi of
Just i -> let
sc = TypeScheme args (getFunType dt p ts) nullRange
(j, TypeScheme _ ty _) = mapFunSym jm tm im fm (i, sc)
in Construct (Just j) newTs (getPartiality newTs ty) newSels
Nothing -> Construct mi newTs p newSels
mapSel :: IdMap -> TypeMap -> IdMap -> FunMap -> IdMap -> [TypeArg] -> Type
-> Selector -> Selector
mapSel jm tm im fm nIm args dt (Select mid t p) =
let newT = mapTypeE jm tm nIm t
in case mid of
Nothing -> Select mid newT p
Just i -> let
sc = TypeScheme args (getSelType dt p t) nullRange
(j, TypeScheme _ ty _) = mapFunSym jm tm im fm (i, sc)
in Select (Just j) newT $ getPartiality [dt] ty
getPartiality :: [a] -> Type -> Partiality
getPartiality args t = case getTypeAppl t of
(TypeName i _ _, [_, res]) | isArrow i -> case args of
[] -> Total
[_] -> if isPartialArrow i then Partial else Total
_ : rs -> getPartiality rs res
(TypeName i _ _, [_]) | i == lazyTypeId ->
if null args then Partial else error "getPartiality"
_ -> Total
mapSentence :: Morphism -> Sentence -> Result Sentence
mapSentence m s = let
tm = filterAliases . typeMap $ mtarget m
im = typeIdMap m
jm = classIdMap m
fm = funMap m
f = mapFunSym jm tm im fm
in return $ case s of
Formula t -> Formula $ mapSen jm tm im fm t
DatatypeSen td -> DatatypeSen $ map (mapDataEntry jm tm im fm) td
ProgEqSen i sc pe ->
let (ni, nsc) = f (i, sc)
in ProgEqSen ni nsc $ mapEq (f, mapTypeE jm tm im) pe
mapFunSym :: IdMap -> TypeMap -> IdMap -> FunMap -> (Id, TypeScheme)
-> (Id, TypeScheme)
mapFunSym jm tm im fm (i, sc) =
let msc = mapTypeScheme jm tm im sc
in Map.findWithDefault (i, msc) (i, sc) fm
ideMor :: Env -> Morphism
ideMor e = mkMorphism e e
compMor :: Morphism -> Morphism -> Result Morphism
compMor m1 m2 = let
tm1 = typeIdMap m1
tm2 = typeIdMap m2
ctm = composeMap (typeMap src) tm1 tm2
cm1 = classIdMap m1
cm2 = classIdMap m2
ccm = composeMap (classMap src) cm1 cm2
fm2 = funMap m2
fm1 = funMap m1
tar = mtarget m2
src = msource m1
tm = filterAliases $ typeMap tar
emb = mkMorphism src tar
in if isInclMor m1 && isInclMor m2 then return emb else do
disjointKeys ctm ccm
return emb
{ typeIdMap = ctm
, classIdMap = ccm
, funMap = Map.intersection
(Map.foldrWithKey ( \ p1@(i, sc) p2 ->
let p3 = mapFunSym ccm tm tm2 fm2 p2
nSc = mapTypeScheme ccm tm ctm sc
in if (i, nSc) == p3 then Map.delete p1 else
Map.insert p1 p3)
fm2 fm1) $ Map.fromList $
concatMap ( \ (k, os) ->
map ( \ o -> ((k, opType o), ())) $ Set.toList os)
$ Map.toList $ assumps src }
showEnvDiff :: Env -> Env -> String
showEnvDiff e1 e2 =
"Signature 1:\n" ++ showDoc e1 "\nSignature 2:\n"
++ showDoc e2 "\nDifference\n" ++ showDoc
(diffEnv e1 e2) ""
legalMor :: Morphism -> Result ()
legalMor m = let
s = msource m
t = mtarget m
ts = typeIdMap m
cs = classIdMap m
fs = funMap m in
unless (all (`elem` Map.keys (typeMap s)) (Map.keys ts)
&& all (`elem` Map.keys (typeMap t)) (Map.elems ts)
&& all (`elem` Map.keys (classMap s)) (Map.keys cs)
&& all (`elem` Map.keys (classMap t)) (Map.elems cs)
&& all ((`elem` Map.keys (assumps s)) . fst) (Map.keys fs)
&& all ((`elem` Map.keys (assumps t)) . fst) (Map.elems fs))
(Fail.fail "illegal HasCASL morphism")
morphismUnion :: Morphism -> Morphism -> Result Morphism
morphismUnion m1 m2 = do
let s1 = msource m1
s2 = msource m2
s <- merge s1 s2
t <- merge (mtarget m1) $ mtarget m2
let tm1 = typeMap s1
tm2 = typeMap s2
im1 = typeIdMap m1
im2 = typeIdMap m2
ut1 = Map.keysSet tm1 Set.\\ Map.keysSet im1
ut2 = Map.keysSet tm2 Set.\\ Map.keysSet im2
ima1 = Map.union im1 $ setToMap ut1
ima2 = Map.union im2 $ setToMap ut2
sAs = filterAliases $ typeMap s
tAs = filterAliases $ typeMap t
cm1 = classMap s1
cm2 = classMap s2
jm1 = classIdMap m1
jm2 = classIdMap m2
cut1 = Map.keysSet cm1 Set.\\ Map.keysSet jm1
cut2 = Map.keysSet cm2 Set.\\ Map.keysSet jm2
cima1 = Map.union jm1 $ setToMap cut1
cima2 = Map.union jm2 $ setToMap cut2
expP = Map.fromList . map ( \ ((i, o), (j, p)) ->
((i, expand tAs o), (j, expand tAs p)))
. Map.toList
fm1 = expP $ funMap m1
fm2 = expP $ funMap m2
af jm im = Set.unions . map ( \ (i, os) ->
Set.map ( \ o -> (i, mapTypeScheme jm tAs im
$ expand sAs $ opType o)) os)
. Map.toList
uf1 = af jm1 im1 (assumps s1) Set.\\ Map.keysSet fm1
uf2 = af jm2 im2 (assumps s2) Set.\\ Map.keysSet fm2
fma1 = Map.union fm1 $ setToMap uf1
fma2 = Map.union fm2 $ setToMap uf2
showFun (i, ty) = showId i . (" : " ++) . showDoc ty
tma <- mergeMap ( \ t1 t2 -> if t1 == t2 then return t1 else
Fail.fail $ "incompatible type mapping to `"
++ showId t1 "' and '" ++ showId t2 "'") ima1 ima2
cma <- mergeMap ( \ t1 t2 -> if t1 == t2 then return t1 else
Fail.fail $ "incompatible class mapping to `"
++ showId t1 "' and '" ++ showId t2 "'") cima1 cima2
fma <- mergeMap ( \ o1 o2 -> if o1 == o2 then return o1 else
Fail.fail $ "incompatible mapping to '"
++ showFun o1 "' and '" ++ showFun o2 "'") fma1 fma2
disjointKeys tma cma
return (mkMorphism s t)
{ typeIdMap = tma
, classIdMap = cma
, funMap = fma }
morphismToSymbMap :: Morphism -> SymbolMap
morphismToSymbMap mor = let
src = msource mor
tar = mtarget mor
im = typeIdMap mor
jm = classIdMap mor
tm = filterAliases $ typeMap tar
classSymMap = Map.foldrWithKey ( \ i ti ->
let j = Map.findWithDefault i i jm
k = rawKind ti
in Map.insert (idToClassSymbol i k)
$ idToClassSymbol j k) Map.empty $ classMap src
typeSymMap = Map.foldrWithKey ( \ i ti ->
let j = Map.findWithDefault i i im
k = typeKind ti
in Map.insert (idToTypeSymbol i k)
$ idToTypeSymbol j k) classSymMap $ typeMap src
in Map.foldrWithKey
( \ i s m ->
Set.fold ( \ oi ->
let ty = opType oi
(j, t2) = mapFunSym jm tm im (funMap mor) (i, ty)
in Map.insert (idToOpSymbol i ty)
(idToOpSymbol j t2)) m s)
typeSymMap $ assumps src
|
87704f79dfa1aec225aca8e21d45dde585630d06f8181eb6673466ce26477f9e | avsm/platform | uucp_func.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2014 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
let is_dash u =
Uucp_tmapbool.get Uucp_func_data.dash_map (Uchar.to_int u)
let is_diacritic u =
Uucp_tmapbool.get Uucp_func_data.diacritic_map (Uchar.to_int u)
let is_extender u =
Uucp_tmapbool.get Uucp_func_data.extender_map (Uchar.to_int u)
let is_grapheme_base u =
Uucp_tmapbool.get Uucp_func_data.grapheme_base_map (Uchar.to_int u)
let is_grapheme_extend u =
Uucp_tmapbool.get Uucp_func_data.grapheme_extend_map (Uchar.to_int u)
let is_math u =
Uucp_tmapbool.get Uucp_func_data.math_map (Uchar.to_int u)
let is_quotation_mark u =
Uucp_tmapbool.get Uucp_func_data.quotation_mark_map (Uchar.to_int u)
let is_soft_dotted u =
Uucp_tmapbool.get Uucp_func_data.soft_dotted_map (Uchar.to_int u)
let is_terminal_punctuation u =
Uucp_tmapbool.get Uucp_func_data.terminal_punctuation_map (Uchar.to_int u)
let is_regional_indicator u =
Uucp_tmapbool.get Uucp_func_data.regional_indicator_map (Uchar.to_int u)
let is_join_control u =
Uucp_tmapbool.get Uucp_func_data.join_control_map (Uchar.to_int u)
---------------------------------------------------------------------------
Copyright ( c ) 2014
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/uucp.12.0.0%2Bdune/src/uucp_func.ml | ocaml | ---------------------------------------------------------------------------
Copyright ( c ) 2014 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
let is_dash u =
Uucp_tmapbool.get Uucp_func_data.dash_map (Uchar.to_int u)
let is_diacritic u =
Uucp_tmapbool.get Uucp_func_data.diacritic_map (Uchar.to_int u)
let is_extender u =
Uucp_tmapbool.get Uucp_func_data.extender_map (Uchar.to_int u)
let is_grapheme_base u =
Uucp_tmapbool.get Uucp_func_data.grapheme_base_map (Uchar.to_int u)
let is_grapheme_extend u =
Uucp_tmapbool.get Uucp_func_data.grapheme_extend_map (Uchar.to_int u)
let is_math u =
Uucp_tmapbool.get Uucp_func_data.math_map (Uchar.to_int u)
let is_quotation_mark u =
Uucp_tmapbool.get Uucp_func_data.quotation_mark_map (Uchar.to_int u)
let is_soft_dotted u =
Uucp_tmapbool.get Uucp_func_data.soft_dotted_map (Uchar.to_int u)
let is_terminal_punctuation u =
Uucp_tmapbool.get Uucp_func_data.terminal_punctuation_map (Uchar.to_int u)
let is_regional_indicator u =
Uucp_tmapbool.get Uucp_func_data.regional_indicator_map (Uchar.to_int u)
let is_join_control u =
Uucp_tmapbool.get Uucp_func_data.join_control_map (Uchar.to_int u)
---------------------------------------------------------------------------
Copyright ( c ) 2014
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
|
b6c6fe7af40d013a39fc5e30c9258892a63ecab1449f6d9623cbfe3206549c31 | lisp/de.setf.wilbur | packages.lisp | -*- package : CL - USER ; Syntax : Common - lisp ; Base : 10 -*-
;;;
packages.lisp
;;;
;;;
;;; --------------------------------------------------------------------------------------
;;;
The Original Software is
WILBUR2 : Nokia Semantic Web Toolkit for CLOS
;;;
Copyright ( c ) 2001 - 2009 Nokia Corp. and/or its subsidiaries . All Rights Reserved .
Portions Copyright ( c ) 1989 - 1992 . All Rights Reserved .
;;;
Contributor(s ): ( mailto: )
;;;
;;; This program is licensed under the terms of the GNU Lesser General Public License
as published by the Free Software Foundation , version 2.1 of the License . Note
;;; however that a preamble attached below also applies to this program.
;;;
;;;
;;; --------------------------------------------------------------------------------------
;;;
Preamble to the Gnu Lesser General Public License
;;;
Copyright ( c ) 2000 Franz Incorporated , Berkeley , CA 94704
;;;
The concept of the GNU Lesser General Public License version 2.1 ( " LGPL " ) has been
;;; adopted to govern the use and distribution of above-mentioned application. However,
;;; the LGPL uses terminology that is more appropriate for a program written in C than
one written in . Nevertheless , the LGPL can still be applied to a Lisp program
;;; if certain clarifications are made. This document details those clarifications.
;;; Accordingly, the license for the open-source Lisp applications consists of this
;;; document plus the LGPL. Wherever there is a conflict between this document and the
;;; LGPL, this document takes precedence over the LGPL.
;;;
A " Library " in is a collection of Lisp functions , data and foreign modules .
The form of the Library can be Lisp source code ( for processing by an interpreter )
;;; or object code (usually the result of compilation of source code or built with some
;;; other mechanisms). Foreign modules are object code in a form that can be linked
;;; into a Lisp executable. When we speak of functions we do so in the most general way
;;; to include, in addition, methods and unnamed functions. Lisp "data" is also a
;;; general term that includes the data structures resulting from defining Lisp classes.
A Lisp application may include the same set of Lisp objects as does a Library , but
this does not mean that the application is necessarily a " work based on the Library "
;;; it contains.
;;;
The Library consists of everything in the distribution file set before any
;;; modifications are made to the files. If any of the functions or classes in the
;;; Library are redefined in other files, then those redefinitions ARE considered a
work based on the Library . If additional methods are added to generic functions in
the Library , those additional methods are NOT considered a work based on the
Library . If Library classes are subclassed , these subclasses are NOT considered a
work based on the Library . If the Library is modified to explicitly call other
;;; functions that are neither part of Lisp itself nor an available add-on module to
Lisp , then the functions called by the modified Library ARE considered a work based
on the Library . The goal is to ensure that the Library will compile and run without
;;; getting undefined function errors.
;;;
It is permitted to add proprietary source code to the Library , but it must be done
in a way such that the Library will still run without that proprietary code present .
;;; Section 5 of the LGPL distinguishes between the case of a library being dynamically
linked at runtime and one being statically linked at build time . Section 5 of the
;;; LGPL states that the former results in an executable that is a "work that uses the
Library . " Section 5 of the LGPL states that the latter results in one that is a
" derivative of the Library " , which is therefore covered by the LGPL . Since Lisp only
offers one choice , which is to link the Library into an executable at build time , we
declare that , for the purpose applying the LGPL to the Library , an executable that
results from linking a " work that uses the Library " with the Library is considered a
" work that uses the Library " and is therefore NOT covered by the LGPL .
;;;
Because of this declaration , section 6 of LGPL is not applicable to the Library .
;;; However, in connection with each distribution of this executable, you must also
;;; deliver, in accordance with the terms and conditions of the LGPL, the source code
;;; of Library (or your derivative thereof) that is incorporated into this executable.
;;;
;;; --------------------------------------------------------------------------------------
;;;
;;;
Purpose : This file contains the package definition for .
;;;
(in-package "CL-USER")
;;; --------------------------------------------------------------------------------------
;;;
;;; PACKAGE WILBUR
;;;
(defpackage "WILBUR"
(:nicknames "W"
"NOX") ; so as not to have many packages anymore
(:use "COMMON-LISP"
#+(or :digitool :clozure) "CCL"
#+:excl "EXCL"
#+:excl "SOCKET"
#+:excl "MOP"
#+:sbcl "SB-SYS"
#+:sbcl "SB-GRAY"
#+:lispworks "MP")
(:export "*CURRENT-PARSER*"
"*DB*"
"*NAME-READER*"
"*NODES*"
"-DAML+OIL-URI-"
"-DAML-FIRST-URI-"
"-DAML-LIST-URI-"
"-DAML-NIL-URI-"
"-DAML-REST-URI-"
"-OWL-FIRST-URI-"
"-OWL-IMPORTS-URI-"
"-OWL-LIST-URI-"
"-OWL-NIL-URI-"
"-OWL-REST-URI-"
"-RDF-ABOUT-URI-"
"-RDF-ABOUTEACH-URI-"
"-RDF-ABOUTEACHPREFIX-URI-"
"-RDF-ALT-URI-"
"-RDF-ATTR-MAP-"
"-RDF-ATTRS-"
"-RDF-BAG-URI-"
"-RDF-BAGID-URI-"
"-RDF-DATATYPE-URI-"
"-RDF-DESCRIPTION-URI-"
"-RDF-ID-URI-"
"-RDF-LI-URI-"
"-RDF-NODEID-URI-"
"-RDF-OBJECT-URI-"
"-RDF-PARSETYPE-URI-"
"-RDF-PREDICATE-URI-"
"-RDF-RDF-URI-"
"-RDF-RESOURCE-URI-"
"-RDF-SEQ-URI-"
"-RDF-STATEMENT-URI-"
"-RDF-SUBJECT-URI-"
"-RDF-TYPE-URI-"
"-RDF-URI-"
"-RDFS-CLASS-URI-"
"-RDFS-COMMENT-URI-"
"-RDFS-CONSTRAINTPROPERTY-URI-"
"-RDFS-CONSTRAINTRESOURCE-URI-"
"-RDFS-CONTAINER-URI-"
"-RDFS-DOMAIN-URI-"
"-RDFS-ISDEFINEDBY-URI-"
"-RDFS-LABEL-URI-"
"-RDFS-LITERAL-URI-"
"-RDFS-RANGE-URI-"
"-RDFS-RESOURCE-URI-"
"-RDFS-SEEALSO-URI-"
"-RDFS-SUBCLASSOF-URI-"
"-RDFS-SUBPROPERTYOF-URI-"
"-RDFS-URI-"
"-WHITESPACE-CHARS-"
"-XML-LANG-ATTR-"
"ABOUT-AND-ID-BOTH-PRESENT"
"ABOUT-AND-NODEID-BOTH-PRESENT"
"ADD-NAMESPACE"
"ADD-TRIPLE"
"ADD-VALUE"
"ALL-VALUES"
"ATTACH-TO-PARENT"
"BLANK-NODE-DB-MIXIN"
"CHAR-CONTENT"
"CHAR-CONTENT"
"CLOSE-RDF-ELEMENT"
"CLOSE-TAG"
"COLLAPSE-WHITESPACE"
"COLLECT-USING-FSA"
"COMMENT"
"CONTAINER-REQUIRED"
"DAML-CONS"
"DAML-LIST"
"DAML-PARSER"
"DATE-CLEANUP-DB-MIXIN"
"DB"
"DB-ADD-TRIPLE"
"DB-BLANK-NODE-URI"
"DB-BLANK-NODE-URI-P"
"DB-CLEAR"
"DB-CLEAR-REASONER-CACHE"
"DB-DEL-SOURCE"
"DB-DEL-TRIPLE"
"DB-FIND-CBD"
"DB-FIND-SOURCE-DESC"
"DB-GET-VALUES"
"DB-INDEX-LITERALS"
"DB-INDEX-LITERALS-P"
"DB-LOAD"
"DB-LOAD-USING-SOURCE"
"DB-MAKE-TRIPLE"
"DB-MATCH-LITERALS"
"DB-MERGE"
"DB-NODE-PROPERTIES-PARTITIONED"
"DB-NODE-TYPE-P"
"DB-NODE-TYPES"
"DB-QUERY"
"DB-QUERY-BY-SOURCE"
"DB-REIFY"
"DB-RESOLVE-BLANK-NODE-URI"
"DB-RESOLVE-BLANK-NODE-URI"
"DB-SAMEAS-CLUSTERS"
"DB-SOURCE-DESCS"
"DB-SOURCE-REAL-URL"
"DB-SOURCES"
"DB-STARTUP-TIME"
"DB-SUPPORTS-MATCHING-P"
"DB-TRANSFORM-LITERAL"
"DB-TRIPLE-LOCK"
"DB-TRIPLES"
"DB-URI->BLANK-NODE"
"DEDUCTIVE-CLOSURE-DB-MIXIN"
"DEFER-TASK"
"DEFINE-READTABLE"
"DEFINE-RESOURCE-POOL"
"DEL-NAMESPACE"
"DEL-TRIPLE"
"DEL-VALUE"
"DICTIONARY"
"DICTIONARY-ADD-NAMESPACE"
"DICTIONARY-APROPOS-LIST"
"DICTIONARY-NAMESPACES"
"DICTIONARY-NODE-CLASS"
"DICTIONARY-NODES"
"DICTIONARY-REMOVE-NAMESPACE"
"DICTIONARY-RENAME-NAMESPACE"
"DICTIONARY-UNRESOLVED-NODES"
"DO-STRING-DICT"
"DOLIST+"
"DSB"
"DTD-TERMINATION-PROBLEM"
"DUPLICATE-NAMESPACE-PREFIX"
"ENABLE-LITERAL-SHORTHAND"
"ENABLE-NODE-SHORTHAND"
"END-DOCUMENT"
"END-ELEMENT"
"ENTITY-DECLARATION"
"ENTITY-NAME"
"ERROR-DEFINITION-TYPE"
"ERROR-EXPECTATION"
"ERROR-THING"
"EXECUTE-DEFERRED-TASK"
"EXPAND-NAME-WITH-NAMESPACE"
"FEATURE-NOT-SUPPORTED"
"FILE-URL"
"FIND-FIRST-PRODUCER"
"FIND-HTTP-PROXY"
"FIND-LONG-NAME"
"FIND-NODE"
"FIND-SHORT-NAME"
"FRAME"
"FRAMES-RELATED-P"
"GET-ALL-VALUES"
"GET-CANONICAL-URI"
"GET-ENTITY"
"GET-HEADER"
"GET-VALUE"
"HTTP-BODY"
"HTTP-GET"
"HTTP-HEAD"
"HTTP-HEADERS"
"HTTP-MESSAGE"
"HTTP-STATUS"
"HTTP-URL"
"HTTP-VERSION"
"ILLEGAL-CHARACTER-CONTENT"
"INDEX-URI"
"INDEX-URI-P"
"INDEXED-DB"
"INDEXED-LITERAL-DB-MIXIN"
"INTERNED-LITERAL"
"INTERNED-LITERAL-DB-MIXIN"
"INVERT-PATH"
"IS-CONTAINER-P"
"ISO8601-DATE-STRING"
"LITERAL"
"LITERAL-DATATYPE"
"LITERAL-LANGUAGE"
"LITERAL-LANGUAGE-MATCH-P"
"LITERAL-STRING"
"LITERAL-TRANSFORM-DB-MIXIN"
"LITERAL-VALUE"
"LOAD-DB"
"LOAD-DB-FROM-STREAM"
"LOCKED-DB-MIXIN"
"MAKE-CONTAINER"
"MAKE-FILE-URL"
"MAKE-HTTP-URL"
"MAKE-LOCK"
"MAKE-TRIPLE-COLLECTION"
"MAKE-URL"
"MALFORMED-URL"
"MAYBE-USE-NAMESPACE"
"MISSING-DEFINITION"
"MISSING-ENTITY-DEFINITION"
"MISSING-NAMESPACE-DEFINITION"
"NAMESPACES"
"NODE"
"NODE-NAME-RESOLVED-P"
"NODE-URI"
"OPEN-HTTP-STREAM"
"OPEN-TAG"
"OUT-OF-SEQUENCE-INDEX"
"OWL-URI"
"OWN-SLOTS"
"PARSE"
"PARSE-DB-FROM-FILE"
"PARSE-DB-FROM-STREAM"
"PARSE-EXIF-DATE"
"PARSE-FROM-FILE"
"PARSE-FROM-STREAM"
"PARSE-HTTP-DATE"
"PARSE-ISO8601-DATE"
"PARSE-URL"
"PARSE-USING-PARSETYPE"
"PARSER-DB"
"PARSER-INTERPRET-CONTENT"
"PARSER-NODE"
"PARSER-PROPERTY"
"PATH"
"PATH-EXPRESSION"
"PI-TERMINATION-PROBLEM"
"PRIORITIZE"
"PRIORITIZE-LIST"
"PROC-INSTRUCTION"
"QUERY"
"QUIT-LISP-PROCESS"
"RDF-ERROR"
"RDF-PARSER"
"RDF-SYNTAX-NORMALIZER"
"RDF-URI"
"RDFS-URI"
"READ-USING"
"REIFY"
"RELATEDP"
"REPLAY"
"REVERSE-EXPAND-NAME"
"SAX-CONSUMER"
"SAX-CONSUMER-MODE"
"SAX-CONSUMER-PRODUCER"
"SAX-FILTER"
"SAX-PRODUCER"
"SAX-PRODUCER-CONSUMER"
"SIMPLE-EXTERNAL-PROCESS"
"SOURCE-CLOSE-STREAM"
"SOURCE-DESC"
"SOURCE-DESC-LOAD-TIME"
"SOURCE-DESC-LOADED-FROM"
"SOURCE-DESC-URL"
"SOURCE-LOCATOR"
"SOURCE-MODIFICATION"
"SOURCE-OPEN-STREAM"
"SOURCE-ORIGINAL-STREAM"
"SOURCE-WITH-MODIFICATION"
"SPLIT-LIST"
"START-DOCUMENT"
"START-ELEMENT"
"STRING->KEYWORD"
"STRING-DICT-ADD"
"STRING-DICT-DEL"
"STRING-DICT-GET"
"STRING-DICT-GET-BY-VALUE"
"STRING-SOURCE"
"SYNTAX-ERROR"
"TAG-ATTRIBUTE"
"TAG-ATTRIBUTES"
"TAG-COUNTERPART"
"TAG-EMPTY-P"
"TAG-NAMESPACES"
"TASK"
"TASK-NODE"
"TASK-PARAMETER"
"TASK-TYPE"
"TOKEN"
"TOKEN-STRING"
"TREE-PARSER"
"TRIPLE"
"TRIPLE-COLLECTION-ADD"
"TRIPLE-COLLECTION-TRIPLES"
"TRIPLE-OBJECT"
"TRIPLE-PREDICATE"
"TRIPLE-SOURCES"
"TRIPLE-SUBJECT"
"TRIPLE="
"UNEXPECTED-END-TAG"
"UNKNOWN-CHARACTER-REFERENCE"
"UNKNOWN-DECLARATION"
"UNKNOWN-PARSETYPE"
"URL"
"URL-HOST"
"URL-PATH"
"URL-PORT"
"URL-STRING"
"VALUE"
"WALK-USING-FSA"
"WITH-DB-LOCK"
"WITH-HTTP-RESPONSE"
"WITH-LOCK"
"WITH-RESOURCE-FROM-POOL"
"WITH-SPO-CASE"
"WITH-TEMPS"
"WITHOUT-CLOSURE"
"XML-ERROR"
"XML-FEATURE-NOT-SUPPORTED"
"XML-FORMATTER"
"XML-PARSER"
"XML-WARNING"
"XSD-URI"
"WITH-TAGS"
"FORMAT-WITH-TAGS"
"PRINC-WITH-TAGS"
"COMMA-SEPARATED"
"XHTML-PREAMBLE"
"XML-PREAMBLE"
"WITH-RDF-PAGE"
"ESCAPE-JSON-STRING"
"ESCAPE-XML-STRING"
"SERIALIZER"
"SERIALIZER-STREAM"
"SERIALIZER-DUMP"
"SINGLE-SUBJECT-TRIPLES"
"RDF/XML-SERIALIZER"))
| null | https://raw.githubusercontent.com/lisp/de.setf.wilbur/c5c1321e6a05cead8b90e54116f14c3810d520e2/src/packages.lisp | lisp | Syntax : Common - lisp ; Base : 10 -*-
--------------------------------------------------------------------------------------
This program is licensed under the terms of the GNU Lesser General Public License
however that a preamble attached below also applies to this program.
--------------------------------------------------------------------------------------
adopted to govern the use and distribution of above-mentioned application. However,
the LGPL uses terminology that is more appropriate for a program written in C than
if certain clarifications are made. This document details those clarifications.
Accordingly, the license for the open-source Lisp applications consists of this
document plus the LGPL. Wherever there is a conflict between this document and the
LGPL, this document takes precedence over the LGPL.
or object code (usually the result of compilation of source code or built with some
other mechanisms). Foreign modules are object code in a form that can be linked
into a Lisp executable. When we speak of functions we do so in the most general way
to include, in addition, methods and unnamed functions. Lisp "data" is also a
general term that includes the data structures resulting from defining Lisp classes.
it contains.
modifications are made to the files. If any of the functions or classes in the
Library are redefined in other files, then those redefinitions ARE considered a
functions that are neither part of Lisp itself nor an available add-on module to
getting undefined function errors.
Section 5 of the LGPL distinguishes between the case of a library being dynamically
LGPL states that the former results in an executable that is a "work that uses the
However, in connection with each distribution of this executable, you must also
deliver, in accordance with the terms and conditions of the LGPL, the source code
of Library (or your derivative thereof) that is incorporated into this executable.
--------------------------------------------------------------------------------------
--------------------------------------------------------------------------------------
PACKAGE WILBUR
so as not to have many packages anymore |
packages.lisp
The Original Software is
WILBUR2 : Nokia Semantic Web Toolkit for CLOS
Copyright ( c ) 2001 - 2009 Nokia Corp. and/or its subsidiaries . All Rights Reserved .
Portions Copyright ( c ) 1989 - 1992 . All Rights Reserved .
Contributor(s ): ( mailto: )
as published by the Free Software Foundation , version 2.1 of the License . Note
Preamble to the Gnu Lesser General Public License
Copyright ( c ) 2000 Franz Incorporated , Berkeley , CA 94704
The concept of the GNU Lesser General Public License version 2.1 ( " LGPL " ) has been
one written in . Nevertheless , the LGPL can still be applied to a Lisp program
A " Library " in is a collection of Lisp functions , data and foreign modules .
The form of the Library can be Lisp source code ( for processing by an interpreter )
A Lisp application may include the same set of Lisp objects as does a Library , but
this does not mean that the application is necessarily a " work based on the Library "
The Library consists of everything in the distribution file set before any
work based on the Library . If additional methods are added to generic functions in
the Library , those additional methods are NOT considered a work based on the
Library . If Library classes are subclassed , these subclasses are NOT considered a
work based on the Library . If the Library is modified to explicitly call other
Lisp , then the functions called by the modified Library ARE considered a work based
on the Library . The goal is to ensure that the Library will compile and run without
It is permitted to add proprietary source code to the Library , but it must be done
in a way such that the Library will still run without that proprietary code present .
linked at runtime and one being statically linked at build time . Section 5 of the
Library . " Section 5 of the LGPL states that the latter results in one that is a
" derivative of the Library " , which is therefore covered by the LGPL . Since Lisp only
offers one choice , which is to link the Library into an executable at build time , we
declare that , for the purpose applying the LGPL to the Library , an executable that
results from linking a " work that uses the Library " with the Library is considered a
" work that uses the Library " and is therefore NOT covered by the LGPL .
Because of this declaration , section 6 of LGPL is not applicable to the Library .
Purpose : This file contains the package definition for .
(in-package "CL-USER")
(defpackage "WILBUR"
(:nicknames "W"
(:use "COMMON-LISP"
#+(or :digitool :clozure) "CCL"
#+:excl "EXCL"
#+:excl "SOCKET"
#+:excl "MOP"
#+:sbcl "SB-SYS"
#+:sbcl "SB-GRAY"
#+:lispworks "MP")
(:export "*CURRENT-PARSER*"
"*DB*"
"*NAME-READER*"
"*NODES*"
"-DAML+OIL-URI-"
"-DAML-FIRST-URI-"
"-DAML-LIST-URI-"
"-DAML-NIL-URI-"
"-DAML-REST-URI-"
"-OWL-FIRST-URI-"
"-OWL-IMPORTS-URI-"
"-OWL-LIST-URI-"
"-OWL-NIL-URI-"
"-OWL-REST-URI-"
"-RDF-ABOUT-URI-"
"-RDF-ABOUTEACH-URI-"
"-RDF-ABOUTEACHPREFIX-URI-"
"-RDF-ALT-URI-"
"-RDF-ATTR-MAP-"
"-RDF-ATTRS-"
"-RDF-BAG-URI-"
"-RDF-BAGID-URI-"
"-RDF-DATATYPE-URI-"
"-RDF-DESCRIPTION-URI-"
"-RDF-ID-URI-"
"-RDF-LI-URI-"
"-RDF-NODEID-URI-"
"-RDF-OBJECT-URI-"
"-RDF-PARSETYPE-URI-"
"-RDF-PREDICATE-URI-"
"-RDF-RDF-URI-"
"-RDF-RESOURCE-URI-"
"-RDF-SEQ-URI-"
"-RDF-STATEMENT-URI-"
"-RDF-SUBJECT-URI-"
"-RDF-TYPE-URI-"
"-RDF-URI-"
"-RDFS-CLASS-URI-"
"-RDFS-COMMENT-URI-"
"-RDFS-CONSTRAINTPROPERTY-URI-"
"-RDFS-CONSTRAINTRESOURCE-URI-"
"-RDFS-CONTAINER-URI-"
"-RDFS-DOMAIN-URI-"
"-RDFS-ISDEFINEDBY-URI-"
"-RDFS-LABEL-URI-"
"-RDFS-LITERAL-URI-"
"-RDFS-RANGE-URI-"
"-RDFS-RESOURCE-URI-"
"-RDFS-SEEALSO-URI-"
"-RDFS-SUBCLASSOF-URI-"
"-RDFS-SUBPROPERTYOF-URI-"
"-RDFS-URI-"
"-WHITESPACE-CHARS-"
"-XML-LANG-ATTR-"
"ABOUT-AND-ID-BOTH-PRESENT"
"ABOUT-AND-NODEID-BOTH-PRESENT"
"ADD-NAMESPACE"
"ADD-TRIPLE"
"ADD-VALUE"
"ALL-VALUES"
"ATTACH-TO-PARENT"
"BLANK-NODE-DB-MIXIN"
"CHAR-CONTENT"
"CHAR-CONTENT"
"CLOSE-RDF-ELEMENT"
"CLOSE-TAG"
"COLLAPSE-WHITESPACE"
"COLLECT-USING-FSA"
"COMMENT"
"CONTAINER-REQUIRED"
"DAML-CONS"
"DAML-LIST"
"DAML-PARSER"
"DATE-CLEANUP-DB-MIXIN"
"DB"
"DB-ADD-TRIPLE"
"DB-BLANK-NODE-URI"
"DB-BLANK-NODE-URI-P"
"DB-CLEAR"
"DB-CLEAR-REASONER-CACHE"
"DB-DEL-SOURCE"
"DB-DEL-TRIPLE"
"DB-FIND-CBD"
"DB-FIND-SOURCE-DESC"
"DB-GET-VALUES"
"DB-INDEX-LITERALS"
"DB-INDEX-LITERALS-P"
"DB-LOAD"
"DB-LOAD-USING-SOURCE"
"DB-MAKE-TRIPLE"
"DB-MATCH-LITERALS"
"DB-MERGE"
"DB-NODE-PROPERTIES-PARTITIONED"
"DB-NODE-TYPE-P"
"DB-NODE-TYPES"
"DB-QUERY"
"DB-QUERY-BY-SOURCE"
"DB-REIFY"
"DB-RESOLVE-BLANK-NODE-URI"
"DB-RESOLVE-BLANK-NODE-URI"
"DB-SAMEAS-CLUSTERS"
"DB-SOURCE-DESCS"
"DB-SOURCE-REAL-URL"
"DB-SOURCES"
"DB-STARTUP-TIME"
"DB-SUPPORTS-MATCHING-P"
"DB-TRANSFORM-LITERAL"
"DB-TRIPLE-LOCK"
"DB-TRIPLES"
"DB-URI->BLANK-NODE"
"DEDUCTIVE-CLOSURE-DB-MIXIN"
"DEFER-TASK"
"DEFINE-READTABLE"
"DEFINE-RESOURCE-POOL"
"DEL-NAMESPACE"
"DEL-TRIPLE"
"DEL-VALUE"
"DICTIONARY"
"DICTIONARY-ADD-NAMESPACE"
"DICTIONARY-APROPOS-LIST"
"DICTIONARY-NAMESPACES"
"DICTIONARY-NODE-CLASS"
"DICTIONARY-NODES"
"DICTIONARY-REMOVE-NAMESPACE"
"DICTIONARY-RENAME-NAMESPACE"
"DICTIONARY-UNRESOLVED-NODES"
"DO-STRING-DICT"
"DOLIST+"
"DSB"
"DTD-TERMINATION-PROBLEM"
"DUPLICATE-NAMESPACE-PREFIX"
"ENABLE-LITERAL-SHORTHAND"
"ENABLE-NODE-SHORTHAND"
"END-DOCUMENT"
"END-ELEMENT"
"ENTITY-DECLARATION"
"ENTITY-NAME"
"ERROR-DEFINITION-TYPE"
"ERROR-EXPECTATION"
"ERROR-THING"
"EXECUTE-DEFERRED-TASK"
"EXPAND-NAME-WITH-NAMESPACE"
"FEATURE-NOT-SUPPORTED"
"FILE-URL"
"FIND-FIRST-PRODUCER"
"FIND-HTTP-PROXY"
"FIND-LONG-NAME"
"FIND-NODE"
"FIND-SHORT-NAME"
"FRAME"
"FRAMES-RELATED-P"
"GET-ALL-VALUES"
"GET-CANONICAL-URI"
"GET-ENTITY"
"GET-HEADER"
"GET-VALUE"
"HTTP-BODY"
"HTTP-GET"
"HTTP-HEAD"
"HTTP-HEADERS"
"HTTP-MESSAGE"
"HTTP-STATUS"
"HTTP-URL"
"HTTP-VERSION"
"ILLEGAL-CHARACTER-CONTENT"
"INDEX-URI"
"INDEX-URI-P"
"INDEXED-DB"
"INDEXED-LITERAL-DB-MIXIN"
"INTERNED-LITERAL"
"INTERNED-LITERAL-DB-MIXIN"
"INVERT-PATH"
"IS-CONTAINER-P"
"ISO8601-DATE-STRING"
"LITERAL"
"LITERAL-DATATYPE"
"LITERAL-LANGUAGE"
"LITERAL-LANGUAGE-MATCH-P"
"LITERAL-STRING"
"LITERAL-TRANSFORM-DB-MIXIN"
"LITERAL-VALUE"
"LOAD-DB"
"LOAD-DB-FROM-STREAM"
"LOCKED-DB-MIXIN"
"MAKE-CONTAINER"
"MAKE-FILE-URL"
"MAKE-HTTP-URL"
"MAKE-LOCK"
"MAKE-TRIPLE-COLLECTION"
"MAKE-URL"
"MALFORMED-URL"
"MAYBE-USE-NAMESPACE"
"MISSING-DEFINITION"
"MISSING-ENTITY-DEFINITION"
"MISSING-NAMESPACE-DEFINITION"
"NAMESPACES"
"NODE"
"NODE-NAME-RESOLVED-P"
"NODE-URI"
"OPEN-HTTP-STREAM"
"OPEN-TAG"
"OUT-OF-SEQUENCE-INDEX"
"OWL-URI"
"OWN-SLOTS"
"PARSE"
"PARSE-DB-FROM-FILE"
"PARSE-DB-FROM-STREAM"
"PARSE-EXIF-DATE"
"PARSE-FROM-FILE"
"PARSE-FROM-STREAM"
"PARSE-HTTP-DATE"
"PARSE-ISO8601-DATE"
"PARSE-URL"
"PARSE-USING-PARSETYPE"
"PARSER-DB"
"PARSER-INTERPRET-CONTENT"
"PARSER-NODE"
"PARSER-PROPERTY"
"PATH"
"PATH-EXPRESSION"
"PI-TERMINATION-PROBLEM"
"PRIORITIZE"
"PRIORITIZE-LIST"
"PROC-INSTRUCTION"
"QUERY"
"QUIT-LISP-PROCESS"
"RDF-ERROR"
"RDF-PARSER"
"RDF-SYNTAX-NORMALIZER"
"RDF-URI"
"RDFS-URI"
"READ-USING"
"REIFY"
"RELATEDP"
"REPLAY"
"REVERSE-EXPAND-NAME"
"SAX-CONSUMER"
"SAX-CONSUMER-MODE"
"SAX-CONSUMER-PRODUCER"
"SAX-FILTER"
"SAX-PRODUCER"
"SAX-PRODUCER-CONSUMER"
"SIMPLE-EXTERNAL-PROCESS"
"SOURCE-CLOSE-STREAM"
"SOURCE-DESC"
"SOURCE-DESC-LOAD-TIME"
"SOURCE-DESC-LOADED-FROM"
"SOURCE-DESC-URL"
"SOURCE-LOCATOR"
"SOURCE-MODIFICATION"
"SOURCE-OPEN-STREAM"
"SOURCE-ORIGINAL-STREAM"
"SOURCE-WITH-MODIFICATION"
"SPLIT-LIST"
"START-DOCUMENT"
"START-ELEMENT"
"STRING->KEYWORD"
"STRING-DICT-ADD"
"STRING-DICT-DEL"
"STRING-DICT-GET"
"STRING-DICT-GET-BY-VALUE"
"STRING-SOURCE"
"SYNTAX-ERROR"
"TAG-ATTRIBUTE"
"TAG-ATTRIBUTES"
"TAG-COUNTERPART"
"TAG-EMPTY-P"
"TAG-NAMESPACES"
"TASK"
"TASK-NODE"
"TASK-PARAMETER"
"TASK-TYPE"
"TOKEN"
"TOKEN-STRING"
"TREE-PARSER"
"TRIPLE"
"TRIPLE-COLLECTION-ADD"
"TRIPLE-COLLECTION-TRIPLES"
"TRIPLE-OBJECT"
"TRIPLE-PREDICATE"
"TRIPLE-SOURCES"
"TRIPLE-SUBJECT"
"TRIPLE="
"UNEXPECTED-END-TAG"
"UNKNOWN-CHARACTER-REFERENCE"
"UNKNOWN-DECLARATION"
"UNKNOWN-PARSETYPE"
"URL"
"URL-HOST"
"URL-PATH"
"URL-PORT"
"URL-STRING"
"VALUE"
"WALK-USING-FSA"
"WITH-DB-LOCK"
"WITH-HTTP-RESPONSE"
"WITH-LOCK"
"WITH-RESOURCE-FROM-POOL"
"WITH-SPO-CASE"
"WITH-TEMPS"
"WITHOUT-CLOSURE"
"XML-ERROR"
"XML-FEATURE-NOT-SUPPORTED"
"XML-FORMATTER"
"XML-PARSER"
"XML-WARNING"
"XSD-URI"
"WITH-TAGS"
"FORMAT-WITH-TAGS"
"PRINC-WITH-TAGS"
"COMMA-SEPARATED"
"XHTML-PREAMBLE"
"XML-PREAMBLE"
"WITH-RDF-PAGE"
"ESCAPE-JSON-STRING"
"ESCAPE-XML-STRING"
"SERIALIZER"
"SERIALIZER-STREAM"
"SERIALIZER-DUMP"
"SINGLE-SUBJECT-TRIPLES"
"RDF/XML-SERIALIZER"))
|
ff9efca3e42ad2e589472db2608b20c1cccb3d9f0b0e05eee37abcf6e65b92a0 | nyampass/appollo | apps.clj | (ns appollo.web.api.apps
(use conceit.commons
[compojure.core :only [defroutes context GET POST]])
(require [appollo
[pager :as pager]
[apps :as apps]
[users :as users]
[conversion :as conversion]
[notifications :as notifications]]
[appollo.web
[request :as web.request]]
[appollo.web.api
[response :as api.response]]
[appollo.web.api.apps
[users :as api.apps.users]]
[appollo.web.api.response
[notification :as api.response.notification]]))
(defn send-notification-to-all [{{:keys [message number] :as params} :params :as req}]
(let [app (:authenticated-app req)
filter (web.request/structured-params params :filter)
extend (web.request/structured-params params :extend)
request (notifications/new-bulk-request! app
(web.request/with-conversion-context filter)
(web.request/with-conversion-context (?-> (filter-map-by-key #{:message :number} params)
(not-empty extend) (assoc :extend extend))))]
(notifications/send! request)
{:body {:status :succeeded
:request (api.response.notification/make-response request)}}))
(defn get-requests [{params :params :as req}]
(let [filter (conversion/convert (web.request/with-conversion-context (web.request/structured-params params :filter))
{:test [:type :bool
:optional true]
:status [:type :string
:optional true
:apply keyword
:validate #{:succeeded :failed :pending :processing}]})
cursor (conversion/convert (web.request/with-conversion-context (web.request/structured-params params :cursor))
{:previous [:type :string
:optional true]
:next [:type :string
:optional true]
:count [:type :integer
:optional true
:range {:min 1 :max 200}]})
requests (?->> (notifications/requests-of-app (:authenticated-app req))
(true? (:test filter)) (notifications/only-test)
(false? (:test filter)) (notifications/only-not-test)
(:status filter) (notifications/only-status (:status filter)))
pager (pager/make-pager requests :count-per-page (or (:count cursor) 50))
current-result (cond (:previous cursor) (pager/fetch-previous pager (:previous cursor))
(:next cursor) (pager/fetch-next pager (:next cursor))
:else (pager/fetch pager))]
{:body {:status "succeeded"
:requests {:count (count requests)
:data (map api.response.notification/make-response current-result)
:page {:next (let [last (last current-result)]
(when (and last (pager/next? pager (:id last))) (:id last)))
:previous (let [first (first current-result)]
(when (and first (pager/previous? pager (:id first))) (:id first)))}}}}))
(defn get-request [{{:keys [request-id] :as params} :params :as req}]
(if-let [request (notifications/request-of-app (:authenticated-app req) request-id)]
{:body {:status "succeeded"
:request (api.response.notification/make-response request)}}
(api.response/error 404 :requests.not-found (format "The request %s is not found." request-id))))
(defroutes in-app-routes
(context "/users" [] api.apps.users/dispatch)
(POST "/all/send" [] send-notification-to-all)
(GET "/requests" [] get-requests)
(GET "/requests/:request-id" [] get-request))
(defn wrap-app-auth [handler]
(fn [req]
(if-let [app (apps/app-with-authentication (get-in req [:params :app-id]) (get-in req [:headers "x-app-secret"]))]
(handler (assoc req :authenticated-app app))
(api.response/error 401 :apps.authentication-failed "Authentication failed."))))
(defroutes routes
(context "/:app-id" [] (-> in-app-routes wrap-app-auth)))
(def dispatch
routes)
| null | https://raw.githubusercontent.com/nyampass/appollo/50275285c26a2c4fa8f8209a7b6a14aafead86b9/src/appollo/web/api/apps.clj | clojure | (ns appollo.web.api.apps
(use conceit.commons
[compojure.core :only [defroutes context GET POST]])
(require [appollo
[pager :as pager]
[apps :as apps]
[users :as users]
[conversion :as conversion]
[notifications :as notifications]]
[appollo.web
[request :as web.request]]
[appollo.web.api
[response :as api.response]]
[appollo.web.api.apps
[users :as api.apps.users]]
[appollo.web.api.response
[notification :as api.response.notification]]))
(defn send-notification-to-all [{{:keys [message number] :as params} :params :as req}]
(let [app (:authenticated-app req)
filter (web.request/structured-params params :filter)
extend (web.request/structured-params params :extend)
request (notifications/new-bulk-request! app
(web.request/with-conversion-context filter)
(web.request/with-conversion-context (?-> (filter-map-by-key #{:message :number} params)
(not-empty extend) (assoc :extend extend))))]
(notifications/send! request)
{:body {:status :succeeded
:request (api.response.notification/make-response request)}}))
(defn get-requests [{params :params :as req}]
(let [filter (conversion/convert (web.request/with-conversion-context (web.request/structured-params params :filter))
{:test [:type :bool
:optional true]
:status [:type :string
:optional true
:apply keyword
:validate #{:succeeded :failed :pending :processing}]})
cursor (conversion/convert (web.request/with-conversion-context (web.request/structured-params params :cursor))
{:previous [:type :string
:optional true]
:next [:type :string
:optional true]
:count [:type :integer
:optional true
:range {:min 1 :max 200}]})
requests (?->> (notifications/requests-of-app (:authenticated-app req))
(true? (:test filter)) (notifications/only-test)
(false? (:test filter)) (notifications/only-not-test)
(:status filter) (notifications/only-status (:status filter)))
pager (pager/make-pager requests :count-per-page (or (:count cursor) 50))
current-result (cond (:previous cursor) (pager/fetch-previous pager (:previous cursor))
(:next cursor) (pager/fetch-next pager (:next cursor))
:else (pager/fetch pager))]
{:body {:status "succeeded"
:requests {:count (count requests)
:data (map api.response.notification/make-response current-result)
:page {:next (let [last (last current-result)]
(when (and last (pager/next? pager (:id last))) (:id last)))
:previous (let [first (first current-result)]
(when (and first (pager/previous? pager (:id first))) (:id first)))}}}}))
(defn get-request [{{:keys [request-id] :as params} :params :as req}]
(if-let [request (notifications/request-of-app (:authenticated-app req) request-id)]
{:body {:status "succeeded"
:request (api.response.notification/make-response request)}}
(api.response/error 404 :requests.not-found (format "The request %s is not found." request-id))))
(defroutes in-app-routes
(context "/users" [] api.apps.users/dispatch)
(POST "/all/send" [] send-notification-to-all)
(GET "/requests" [] get-requests)
(GET "/requests/:request-id" [] get-request))
(defn wrap-app-auth [handler]
(fn [req]
(if-let [app (apps/app-with-authentication (get-in req [:params :app-id]) (get-in req [:headers "x-app-secret"]))]
(handler (assoc req :authenticated-app app))
(api.response/error 401 :apps.authentication-failed "Authentication failed."))))
(defroutes routes
(context "/:app-id" [] (-> in-app-routes wrap-app-auth)))
(def dispatch
routes)
|
|
12c59b0a97c1e37ba3f6c63c13d3b63db984cceac94a81527bac26b8f60b6bae | hirokai/PaperServer | Render.hs | # LANGUAGE DoAndIfThenElse , DeriveDataTypeable , TemplateHaskell #
# LANGUAGE StandaloneDeriving #
module Handler.Render where
import Import
import Control.Lens
import Text.Blaze.Html.Renderer.Text
import qualified Data.Text as T
import qualified Data.Text.Lazy.IO as TLIO
import Text.Blaze.Html
import Text.HTML.SanitizeXSS (sanitize)
import Yesod.Auth
import System.Directory (doesFileExist,removeFile)
import qualified Parser.Paper as P
import Model.PaperMongo hiding (toStrict)
import Model.PaperP
import Handler.Utils
import Handler.Widget
for FormatC
import Text.Hastache
import Text.Hastache.Context
import qualified Data.ByteString as B
import Data.Text.Encoding (decodeUtf8)
import qualified Data.ByteString.Lazy as BL8
import qualified Data.ByteString.Lazy.Char8 as BL8
import Data.Data
import Data.Generics
import qualified Parser.Lens as L
import Data.Default
data RenderFormat = FormatA | FormatB | FormatC |
FormatATablet | FormatBTablet |
FormatAMobile | FormatBMobile
deriving (Show,Eq,Enum,Bounded)
-- Render paper.
renderPaper :: PaperId -> RenderFormat -> Handler TypedContent
renderPaper paperId format = do
email <- requireAuthId'
res <- getPaperDB email paperId
case res of
ToDo : Take only pid to avoid DB overhead .
let path = renderedCachePath format paperId
-- exist <- liftIO $ doesFileExist path
exist <- return False
if exist then
sendFile typeHtml path
else do
let pp = paperToPaperP paper
saveFormattedCache format paperId pp
ex <- liftIO $ doesFileExist path
if ex then
sendFile typeHtml path
else
notFound
Nothing ->
notFound
data PaperMastache = PaperMastache {
citation :: Citation2
, mcitHtml :: Text
, abstract :: Text
, mainHtml :: Text
, title :: Text
, paperId :: Text
, parser :: Text
, figs :: [P.Figure]
, refs :: [Reference2]
, availability_text :: Text
} deriving (Data, Typeable)
deriving instance Data P.Figure
deriving instance Typeable P.Figure
deriving instance Data Reference2
deriving instance Typeable Reference2
deriving instance Data P.Citation
deriving instance Typeable P.Citation
-- Citation with no maybe values
data Citation2 = Citation2 {
cdoi :: Text,
curl :: Text,
ctitle :: Text,
cjournal :: Text,
cyear :: Int,
cvolume :: Text,
cpageFrom :: Text,
cpageTo :: Text,
cauthors :: [Text],
cpublisher :: Text,
ctype :: Text
} deriving (Data, Typeable)
instance Default Citation2 where
def = fromCit def
-- Reference with no maybe values
data Reference2 = Reference2 {
refId :: Text,
refName :: Text,
refCit :: Citation2,
refText :: Text,
refUrl :: Url
}
fromRef :: P.Reference -> Reference2
fromRef (P.Reference id name cit txt url)
= Reference2
id
name
(maybe def fromCit cit)
(fromMaybe "" txt)
(fromMaybe "" url)
fromCit :: P.Citation -> Citation2
fromCit (P.Citation doi url title journal year volume pageFrom pageTo authors publisher _type)
= Citation2
(fromMaybe "" doi)
(fromMaybe "" url)
(fromMaybe "" title)
(fromMaybe "" journal)
(fromMaybe 0 year)
(fromMaybe "" volume)
(fromMaybe "" pageFrom)
(fromMaybe "" pageTo)
(authors)
(fromMaybe "" publisher)
(fromMaybe "" _type)
toMastacheP :: PaperId -> PaperP -> PaperMastache
toMastacheP pid p =
let
cit = p^.L.citation
title = fromMaybe "N/A" $ cit^.L.title
authors = p^.L.citation^.L.authors
mainHtml = case p^.L.mainHtml of
Just (P.FlatHtml t) -> t
Just _ -> "Stub: not supported structured text"
Nothing -> "(Not available)"
abstract = fromMaybe "(No abstract)" $ p^.L.abstract
cittxt = T.concat ["<i>",fromMaybe "" $ cit^.L.journal, "</i>",
maybe "" (\v -> T.concat [", <b>", v, "</b>"]) (cit^.L.volume),
maybe "" (\p -> T.append ", " p) (cit^.L.pageFrom),
maybe "" (\p -> T.append "-" p) (cit^.L.pageTo),
maybe "" (\y -> T.concat [" (",T.pack $ show y,")"]) (cit^.L.year)]
parser = fromMaybe "" $ p^.L.parserInfo
figs = p^.L.figures
refs = p^.L.references
f (P.Figure a b c d) = P.Figure a b c (T.pack $ localRes d)
avail = T.intercalate ";" $ catMaybes [
if isJust (p^.L.abstract) then Just "abs" else Nothing,
if isJust (p^.L.mainHtml) then Just "full" else Nothing,
if null figs then Nothing else Just "figs",
if null refs then Nothing else Just "refs"
]
in
PaperMastache
(fromCit cit) cittxt abstract mainHtml title (toPathPiece pid)
parser (map f figs) (map fromRef refs)
avail
renderMastache :: FilePath -> PaperId -> PaperP -> IO (PageContent (Route App))
renderMastache file pid pp = do
template <- B.readFile file
let inf = toMastacheP pid pp
res <- hastacheStr (defaultConfig{muEscapeFunc=emptyEscape}) template
(mkGenericContext inf)
return $ PageContent (preEscapedToHtml ("Test title"::Text))
(\_ -> preEscapedToHtml ("Test head"::Text))
(\_ -> preEscapedToHtml $ decodeUtf8 $ toStrict $ res)
toStrict = B.concat . BL8.toChunks
saveFormattedCache :: RenderFormat -> PaperId -> PaperP -> Handler ()
saveFormattedCache format paperId pp = do
render <- getUrlRender
let
paper = paperPToPaper pp
cit = pp^.L.citation
cit' = paperCitation paper
refs = pp^.L.references
refs' = paperReferences paper
figures = pp^.L.figures
mabstract = pp^.L.abstract
mmainHtml = fmap renderStructured $ pp^.L.mainHtml
parser = pp^.L.parserInfo
ctype = cit^.L.ptype
PageContent title head body
<- case format of
FormatA -> widgetToPageContent $(widgetFile "format_a")
FormatB -> widgetToPageContent $(widgetFile "format_b")
FormatC -> liftIO $ renderMastache "templates/format_c.mastache.html" paperId pp
FormatAMobile -> widgetToPageContent $(widgetFile "format_a_mobile")
_ -> widgetToPageContent $(widgetFile "format_b")
liftIO $ TLIO.writeFile (renderedCachePath format paperId) (renderHtml (body (\_ _ -> "")))
return ()
renderedCachePath :: RenderFormat -> PaperId -> String
renderedCachePath format pid = appRootFolder ++ "data/"++show format++"/" ++ T.unpack (toPathPiece pid)
deleteAllRenderedCache :: PaperId -> Handler ()
deleteAllRenderedCache pid = do
forM_ (enumFrom minBound) $ \format -> do
let path = renderedCachePath format pid
ex <- liftIO $ doesFileExist path
if ex then
liftIO $ removeFile path
else
return ()
| null | https://raw.githubusercontent.com/hirokai/PaperServer/b577955af08660253d0cd11282cf141d1c174bc0/Handler/Render.hs | haskell | Render paper.
exist <- liftIO $ doesFileExist path
Citation with no maybe values
Reference with no maybe values | # LANGUAGE DoAndIfThenElse , DeriveDataTypeable , TemplateHaskell #
# LANGUAGE StandaloneDeriving #
module Handler.Render where
import Import
import Control.Lens
import Text.Blaze.Html.Renderer.Text
import qualified Data.Text as T
import qualified Data.Text.Lazy.IO as TLIO
import Text.Blaze.Html
import Text.HTML.SanitizeXSS (sanitize)
import Yesod.Auth
import System.Directory (doesFileExist,removeFile)
import qualified Parser.Paper as P
import Model.PaperMongo hiding (toStrict)
import Model.PaperP
import Handler.Utils
import Handler.Widget
for FormatC
import Text.Hastache
import Text.Hastache.Context
import qualified Data.ByteString as B
import Data.Text.Encoding (decodeUtf8)
import qualified Data.ByteString.Lazy as BL8
import qualified Data.ByteString.Lazy.Char8 as BL8
import Data.Data
import Data.Generics
import qualified Parser.Lens as L
import Data.Default
data RenderFormat = FormatA | FormatB | FormatC |
FormatATablet | FormatBTablet |
FormatAMobile | FormatBMobile
deriving (Show,Eq,Enum,Bounded)
renderPaper :: PaperId -> RenderFormat -> Handler TypedContent
renderPaper paperId format = do
email <- requireAuthId'
res <- getPaperDB email paperId
case res of
ToDo : Take only pid to avoid DB overhead .
let path = renderedCachePath format paperId
exist <- return False
if exist then
sendFile typeHtml path
else do
let pp = paperToPaperP paper
saveFormattedCache format paperId pp
ex <- liftIO $ doesFileExist path
if ex then
sendFile typeHtml path
else
notFound
Nothing ->
notFound
data PaperMastache = PaperMastache {
citation :: Citation2
, mcitHtml :: Text
, abstract :: Text
, mainHtml :: Text
, title :: Text
, paperId :: Text
, parser :: Text
, figs :: [P.Figure]
, refs :: [Reference2]
, availability_text :: Text
} deriving (Data, Typeable)
deriving instance Data P.Figure
deriving instance Typeable P.Figure
deriving instance Data Reference2
deriving instance Typeable Reference2
deriving instance Data P.Citation
deriving instance Typeable P.Citation
data Citation2 = Citation2 {
cdoi :: Text,
curl :: Text,
ctitle :: Text,
cjournal :: Text,
cyear :: Int,
cvolume :: Text,
cpageFrom :: Text,
cpageTo :: Text,
cauthors :: [Text],
cpublisher :: Text,
ctype :: Text
} deriving (Data, Typeable)
instance Default Citation2 where
def = fromCit def
data Reference2 = Reference2 {
refId :: Text,
refName :: Text,
refCit :: Citation2,
refText :: Text,
refUrl :: Url
}
fromRef :: P.Reference -> Reference2
fromRef (P.Reference id name cit txt url)
= Reference2
id
name
(maybe def fromCit cit)
(fromMaybe "" txt)
(fromMaybe "" url)
fromCit :: P.Citation -> Citation2
fromCit (P.Citation doi url title journal year volume pageFrom pageTo authors publisher _type)
= Citation2
(fromMaybe "" doi)
(fromMaybe "" url)
(fromMaybe "" title)
(fromMaybe "" journal)
(fromMaybe 0 year)
(fromMaybe "" volume)
(fromMaybe "" pageFrom)
(fromMaybe "" pageTo)
(authors)
(fromMaybe "" publisher)
(fromMaybe "" _type)
toMastacheP :: PaperId -> PaperP -> PaperMastache
toMastacheP pid p =
let
cit = p^.L.citation
title = fromMaybe "N/A" $ cit^.L.title
authors = p^.L.citation^.L.authors
mainHtml = case p^.L.mainHtml of
Just (P.FlatHtml t) -> t
Just _ -> "Stub: not supported structured text"
Nothing -> "(Not available)"
abstract = fromMaybe "(No abstract)" $ p^.L.abstract
cittxt = T.concat ["<i>",fromMaybe "" $ cit^.L.journal, "</i>",
maybe "" (\v -> T.concat [", <b>", v, "</b>"]) (cit^.L.volume),
maybe "" (\p -> T.append ", " p) (cit^.L.pageFrom),
maybe "" (\p -> T.append "-" p) (cit^.L.pageTo),
maybe "" (\y -> T.concat [" (",T.pack $ show y,")"]) (cit^.L.year)]
parser = fromMaybe "" $ p^.L.parserInfo
figs = p^.L.figures
refs = p^.L.references
f (P.Figure a b c d) = P.Figure a b c (T.pack $ localRes d)
avail = T.intercalate ";" $ catMaybes [
if isJust (p^.L.abstract) then Just "abs" else Nothing,
if isJust (p^.L.mainHtml) then Just "full" else Nothing,
if null figs then Nothing else Just "figs",
if null refs then Nothing else Just "refs"
]
in
PaperMastache
(fromCit cit) cittxt abstract mainHtml title (toPathPiece pid)
parser (map f figs) (map fromRef refs)
avail
renderMastache :: FilePath -> PaperId -> PaperP -> IO (PageContent (Route App))
renderMastache file pid pp = do
template <- B.readFile file
let inf = toMastacheP pid pp
res <- hastacheStr (defaultConfig{muEscapeFunc=emptyEscape}) template
(mkGenericContext inf)
return $ PageContent (preEscapedToHtml ("Test title"::Text))
(\_ -> preEscapedToHtml ("Test head"::Text))
(\_ -> preEscapedToHtml $ decodeUtf8 $ toStrict $ res)
toStrict = B.concat . BL8.toChunks
saveFormattedCache :: RenderFormat -> PaperId -> PaperP -> Handler ()
saveFormattedCache format paperId pp = do
render <- getUrlRender
let
paper = paperPToPaper pp
cit = pp^.L.citation
cit' = paperCitation paper
refs = pp^.L.references
refs' = paperReferences paper
figures = pp^.L.figures
mabstract = pp^.L.abstract
mmainHtml = fmap renderStructured $ pp^.L.mainHtml
parser = pp^.L.parserInfo
ctype = cit^.L.ptype
PageContent title head body
<- case format of
FormatA -> widgetToPageContent $(widgetFile "format_a")
FormatB -> widgetToPageContent $(widgetFile "format_b")
FormatC -> liftIO $ renderMastache "templates/format_c.mastache.html" paperId pp
FormatAMobile -> widgetToPageContent $(widgetFile "format_a_mobile")
_ -> widgetToPageContent $(widgetFile "format_b")
liftIO $ TLIO.writeFile (renderedCachePath format paperId) (renderHtml (body (\_ _ -> "")))
return ()
renderedCachePath :: RenderFormat -> PaperId -> String
renderedCachePath format pid = appRootFolder ++ "data/"++show format++"/" ++ T.unpack (toPathPiece pid)
deleteAllRenderedCache :: PaperId -> Handler ()
deleteAllRenderedCache pid = do
forM_ (enumFrom minBound) $ \format -> do
let path = renderedCachePath format pid
ex <- liftIO $ doesFileExist path
if ex then
liftIO $ removeFile path
else
return ()
|
47aee5532f854c5ec4d06e4bc908f828eab92e8caad257e32228ec1d94f05318 | input-output-hk/cardano-ledger | Compact.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Test.Cardano.Chain.UTxO.Compact (
tests,
)
where
import Cardano.Chain.UTxO (
fromCompactTxId,
fromCompactTxIn,
fromCompactTxOut,
toCompactTxId,
toCompactTxIn,
toCompactTxOut,
)
import Cardano.HeapWords (HeapWords (..))
import Cardano.Prelude
import Hedgehog (MonadTest, assert, forAll, property, tripping)
import Test.Cardano.Chain.UTxO.Gen (genTxId, genTxIn, genTxOut)
import Test.Cardano.Prelude
import Test.Options (TSGroup, TSProperty, concatTSGroups, eachOfTS, withTestsTS)
--------------------------------------------------------------------------------
-- Compact TxIn
--------------------------------------------------------------------------------
ts_roundTripCompactTxIn :: TSProperty
ts_roundTripCompactTxIn =
eachOfTS 1000 genTxIn (trippingCompact toCompactTxIn fromCompactTxIn)
ts_prop_heapWordsSavingsCompactTxIn :: TSProperty
ts_prop_heapWordsSavingsCompactTxIn = withTestsTS 1000 $
property $ do
txIn <- forAll genTxIn
let compactTxIn = toCompactTxIn txIn
assert $ heapWords compactTxIn < heapWords txIn
--------------------------------------------------------------------------------
-- Compact TxId
--------------------------------------------------------------------------------
ts_roundTripCompactTxId :: TSProperty
ts_roundTripCompactTxId =
eachOfTS 1000 genTxId (trippingCompact toCompactTxId fromCompactTxId)
ts_prop_heapWordsSavingsCompactTxId :: TSProperty
ts_prop_heapWordsSavingsCompactTxId = withTestsTS 1000 $
property $ do
txId <- forAll genTxId
let compactTxId = toCompactTxId txId
assert $ heapWords compactTxId < heapWords txId
--------------------------------------------------------------------------------
-- Compact TxOut
--------------------------------------------------------------------------------
ts_roundTripCompactTxOut :: TSProperty
ts_roundTripCompactTxOut =
eachOfTS 1000 genTxOut (trippingCompact toCompactTxOut fromCompactTxOut)
ts_prop_heapWordsSavingsCompactTxOut :: TSProperty
ts_prop_heapWordsSavingsCompactTxOut = withTestsTS 1000 $
property $ do
txOut <- forAll genTxOut
let compactTxOut = toCompactTxOut txOut
assert $ heapWords compactTxOut < heapWords txOut
-------------------------------------------------------------------------------
-- Tripping util
-------------------------------------------------------------------------------
trippingCompact ::
(HasCallStack, MonadTest m, Show a, Show b, Eq a) =>
(a -> b) ->
(b -> a) ->
a ->
m ()
trippingCompact toCompact fromCompact x =
tripping x toCompact (Identity . fromCompact)
-------------------------------------------------------------------------------
-- Main test export
-------------------------------------------------------------------------------
tests :: TSGroup
tests = concatTSGroups [$$discoverPropArg, $$discoverRoundTripArg]
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger/31c0bb1f5e78e40b83adfd1a916e69f47fdc9835/eras/byron/ledger/impl/test/Test/Cardano/Chain/UTxO/Compact.hs | haskell | # LANGUAGE OverloadedStrings #
------------------------------------------------------------------------------
Compact TxIn
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Compact TxId
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Compact TxOut
------------------------------------------------------------------------------
-----------------------------------------------------------------------------
Tripping util
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Main test export
----------------------------------------------------------------------------- | # LANGUAGE TemplateHaskell #
module Test.Cardano.Chain.UTxO.Compact (
tests,
)
where
import Cardano.Chain.UTxO (
fromCompactTxId,
fromCompactTxIn,
fromCompactTxOut,
toCompactTxId,
toCompactTxIn,
toCompactTxOut,
)
import Cardano.HeapWords (HeapWords (..))
import Cardano.Prelude
import Hedgehog (MonadTest, assert, forAll, property, tripping)
import Test.Cardano.Chain.UTxO.Gen (genTxId, genTxIn, genTxOut)
import Test.Cardano.Prelude
import Test.Options (TSGroup, TSProperty, concatTSGroups, eachOfTS, withTestsTS)
ts_roundTripCompactTxIn :: TSProperty
ts_roundTripCompactTxIn =
eachOfTS 1000 genTxIn (trippingCompact toCompactTxIn fromCompactTxIn)
ts_prop_heapWordsSavingsCompactTxIn :: TSProperty
ts_prop_heapWordsSavingsCompactTxIn = withTestsTS 1000 $
property $ do
txIn <- forAll genTxIn
let compactTxIn = toCompactTxIn txIn
assert $ heapWords compactTxIn < heapWords txIn
ts_roundTripCompactTxId :: TSProperty
ts_roundTripCompactTxId =
eachOfTS 1000 genTxId (trippingCompact toCompactTxId fromCompactTxId)
ts_prop_heapWordsSavingsCompactTxId :: TSProperty
ts_prop_heapWordsSavingsCompactTxId = withTestsTS 1000 $
property $ do
txId <- forAll genTxId
let compactTxId = toCompactTxId txId
assert $ heapWords compactTxId < heapWords txId
ts_roundTripCompactTxOut :: TSProperty
ts_roundTripCompactTxOut =
eachOfTS 1000 genTxOut (trippingCompact toCompactTxOut fromCompactTxOut)
ts_prop_heapWordsSavingsCompactTxOut :: TSProperty
ts_prop_heapWordsSavingsCompactTxOut = withTestsTS 1000 $
property $ do
txOut <- forAll genTxOut
let compactTxOut = toCompactTxOut txOut
assert $ heapWords compactTxOut < heapWords txOut
trippingCompact ::
(HasCallStack, MonadTest m, Show a, Show b, Eq a) =>
(a -> b) ->
(b -> a) ->
a ->
m ()
trippingCompact toCompact fromCompact x =
tripping x toCompact (Identity . fromCompact)
tests :: TSGroup
tests = concatTSGroups [$$discoverPropArg, $$discoverRoundTripArg]
|
8b5d30f07f7b9635c8b614520c07334531c08fc39ded43c6d44237e254ae4c2d | replikativ/zufall | project.clj | (defproject io.replikativ/zufall "0.1.0"
:description "Random name generators"
:url ""
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[org.clojure/clojure "1.10.0"]]
:repl-options {:init-ns zufall.core})
| null | https://raw.githubusercontent.com/replikativ/zufall/a1f55cfda7c2a737bd02d69896392db576baa7a5/project.clj | clojure | (defproject io.replikativ/zufall "0.1.0"
:description "Random name generators"
:url ""
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[org.clojure/clojure "1.10.0"]]
:repl-options {:init-ns zufall.core})
|
|
dcfdfdba9c44e920a56296145cb93fd302bb977e9e83531f6a8904632f6aa902 | purescript/spago | Cmd.hs | module Spago.Cmd (getCmdVersion) where
import qualified Spago.Messages as Messages
import qualified Turtle.Bytes
import Spago.Prelude
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text.Encoding
import qualified Data.Text.Encoding.Error as Text.Encoding
import qualified Data.Versions as Version
-- | Get the semantic version of a command, e.g. purs --version
getCmdVersion :: forall io. MonadIO io => Text -> io (Either Text Version.SemVer)
getCmdVersion cmd =
Turtle.Bytes.shellStrictWithErr (cmd <> " --version") empty >>= \case
(ExitSuccess, out, _err) -> do
let versionText = headMay $ Text.split (== ' ') (Text.strip $ Text.Encoding.decodeUtf8With lenientDecode out)
parsed = versionText >>= (\vt -> Text.stripPrefix "v" vt <|> Just vt) >>= (hush . Version.semver)
pure $ case parsed of
Nothing ->
Left $
Messages.failedToParseCommandOutput
(cmd <> " --version")
(Text.Encoding.decodeUtf8With Text.Encoding.lenientDecode out)
(Text.Encoding.decodeUtf8With Text.Encoding.lenientDecode _err)
Just p -> Right p
(_, _out, _err) -> pure $ Left $ "Failed to run '" <> cmd <> " --version'"
| null | https://raw.githubusercontent.com/purescript/spago/310a7096b0d3e86f6464c667aa2339045d85d505/src/Spago/Cmd.hs | haskell | | Get the semantic version of a command, e.g. purs --version | module Spago.Cmd (getCmdVersion) where
import qualified Spago.Messages as Messages
import qualified Turtle.Bytes
import Spago.Prelude
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text.Encoding
import qualified Data.Text.Encoding.Error as Text.Encoding
import qualified Data.Versions as Version
getCmdVersion :: forall io. MonadIO io => Text -> io (Either Text Version.SemVer)
getCmdVersion cmd =
Turtle.Bytes.shellStrictWithErr (cmd <> " --version") empty >>= \case
(ExitSuccess, out, _err) -> do
let versionText = headMay $ Text.split (== ' ') (Text.strip $ Text.Encoding.decodeUtf8With lenientDecode out)
parsed = versionText >>= (\vt -> Text.stripPrefix "v" vt <|> Just vt) >>= (hush . Version.semver)
pure $ case parsed of
Nothing ->
Left $
Messages.failedToParseCommandOutput
(cmd <> " --version")
(Text.Encoding.decodeUtf8With Text.Encoding.lenientDecode out)
(Text.Encoding.decodeUtf8With Text.Encoding.lenientDecode _err)
Just p -> Right p
(_, _out, _err) -> pure $ Left $ "Failed to run '" <> cmd <> " --version'"
|
83a93dd657f2540f38091e6e8e3c3ab0f874d040d53efd76abd9e169339f78fd | erlang/corba | CosFileTransfer_FileTransferSession_impl.erl | %%----------------------------------------------------------------------
%%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2000 - 2016 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%%----------------------------------------------------------------------
%% File : CosFileTransfer_FileTransferSession_impl.erl
%% Description :
%%
Created : 12 Sept 2000
%%----------------------------------------------------------------------
-module('CosFileTransfer_FileTransferSession_impl').
%%----------------------------------------------------------------------
%% Include files
%%----------------------------------------------------------------------
-include_lib("orber/include/corba.hrl").
-include_lib("orber/src/orber_iiop.hrl").
-include("cosFileTransferApp.hrl").
%%----------------------------------------------------------------------
%% External exports
%%----------------------------------------------------------------------
-export([init/1,
terminate/2,
code_change/3,
handle_info/2]).
Interface functions
-export(['_get_protocols_supported'/2,
set_directory/3,
create_file/3,
create_directory/3,
get_file/3,
delete/3,
transfer/4,
append/4,
insert/5,
logout/2]).
%%----------------------------------------------------------------------
%% Internal exports
%%----------------------------------------------------------------------
-export([oe_orber_create_directory_current/2, oe_orber_get_content/4,
oe_orber_count_children/3]).
-export([invoke_call/3]).
%%----------------------------------------------------------------------
Records
%%----------------------------------------------------------------------
-record(state, {protocols, server, type, current, module, connection, mytype,
connection_timeout}).
%%----------------------------------------------------------------------
Macros
%%----------------------------------------------------------------------
-define(create_InitState(P, S, T, C, M, Co, Ty, CT),
#state{protocols=P, server=S, type=T, current=C, module=M, connection=Co,
mytype=Ty, connection_timeout=CT}).
-define(get_Protocols(S), S#state.protocols).
-define(get_Server(S), S#state.server).
-define(get_CurrentDir(S), S#state.current).
-define(get_Module(S), S#state.module).
-define(get_Connection(S), S#state.connection).
-define(get_MyType(S), S#state.mytype).
-define(get_ConnectionTimeout(S), S#state.connection_timeout).
-define(set_CurrentDir(S, C), S#state{current=C}).
-define(is_FTP(S), S#state.type=='FTP').
-define(is_FTAM(S), S#state.type=='FTAM').
-define(is_NATIVE(S), S#state.type=='NATIVE').
-define(is_ORBER_NATIVE(S), S#state.module==cosFileTransferNATIVE_file).
%%======================================================================
%% External functions
%%======================================================================
%%----------------------------------------------------------------------
%% Function : init/1
%% Returns : {ok, State} |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% Description: Initiates the server
%%----------------------------------------------------------------------
init(['FTP', Host, Port, User, Password, _Account, Protocol, Timeout]) ->
{ok, Pid} = inets:start(ftpc, [{host, Host}, {port, Port}], stand_alone),
ok = ftp:user(Pid, User, Password),
{ok, PWD} = ftp:pwd(Pid),
{Connection, ProtocolSupport} = setup_local(Protocol),
{ok, ?create_InitState(ProtocolSupport, Pid, 'FTP',
PWD, ftp, Connection, Protocol, Timeout)};
init([{'NATIVE', Mod}, Host, Port, User, Password, _Account, Protocol, Timeout]) ->
{ok, Pid} = Mod:open(Host, Port),
ok = Mod:user(Pid, User, Password),
{ok, PWD} = Mod:pwd(Pid),
{Connection, ProtocolSupport} = setup_local(Protocol),
{ok, ?create_InitState(ProtocolSupport, Pid, 'NATIVE',
PWD, Mod, Connection, Protocol, Timeout)}.
%%----------------------------------------------------------------------
%% Function : terminate/2
%% Returns : any (ignored by gen_server)
%% Description: Shutdown the server
%%----------------------------------------------------------------------
terminate(_Reason, #state{type = Type, server = Server, module = Mod} = State) ->
case ?get_MyType(State) of
ssl ->
catch ssl:close(?get_Connection(State));
_ ->
catch gen_tcp:close(?get_Connection(State))
end,
case Type of
'FTP' ->
inets:stop(ftpc, Server);
'NATIVE' ->
Mod:close(Server);
_ ->
ok
end,
ok.
%%----------------------------------------------------------------------
%% Function : code_change/3
%% Returns : {ok, NewState}
%% Description: Convert process state when code is changed
%%----------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%---------------------------------------------------------------------%
%% function : handle_info/2
%% Arguments:
%% Returns :
%% Effect :
%%----------------------------------------------------------------------
handle_info(Info, State) ->
case Info of
{'EXIT', _Pid, Reason} ->
{stop, Reason, State};
_Other ->
{noreply, State}
end.
%%======================================================================
%% CosFileTransfer::FileTransferSession
%%======================================================================
%%---------------------------------------------------------------------%
%% Function : _get_protocols_supported
%% Arguments :
%% Returns : A list of CosFileTransfer::ProtocolSupport, i.e.,
struct ProtocolSupport {
Istring protocol_name ;
%% ProtocolAddressList addresses; %% eq a list of strings.
%% };
%% Description:
%%----------------------------------------------------------------------
'_get_protocols_supported'(_OE_This, State) ->
{reply, ?get_Protocols(State), State}.
%%----------------------------------------------------------------------
%% Function : set_directory
%% Arguments : Directory - CosFileTransfer::Directory
%% Returns :
%% Description:
%%----------------------------------------------------------------------
set_directory(_OE_This, State, Directory) when ?is_FTP(State); ?is_NATIVE(State) ->
Mod = ?get_Module(State),
Path = filename:join('CosFileTransfer_Directory':
'_get_complete_file_name'(Directory)),
case catch Mod:cd(?get_Server(State), Path) of
ok ->
{reply, ok, ?set_CurrentDir(State, Path)};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason = "Unexpected error."})
end.
%%----------------------------------------------------------------------
%% Function : create_file
%% Arguments : FileNameList
%% Returns : File
%% Description: This operation creates a File Object representing a
%% file which may or may not exist. Typically used as
%% argument when invoking transfer/3. See also get_file/2.
%%----------------------------------------------------------------------
create_file(OE_This, State, FileNameList) ->
{reply, cosFileTransferApp:create_file(OE_This, FileNameList), State}.
%%----------------------------------------------------------------------
%% Function : create_directory
%% Arguments : FileNameList - full path name.
%% Returns : Directory
%% Description:
%%----------------------------------------------------------------------
create_directory(OE_This, State, FileNameList) when ?is_FTP(State);
?is_NATIVE(State) ->
Mod = ?get_Module(State),
case Mod:mkdir(?get_Server(State), filename:join(FileNameList)) of
ok ->
{reply, cosFileTransferApp:create_dir(OE_This, FileNameList), State};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."})
end.
%%----------------------------------------------------------------------
%% Function : get_file
%% Arguments : FileNameList
Returns : FileWrapper
%% Description: This operation should be independent of the working Directory,
%% i.e., a full path name must be supplied. The file or
%% directory the returned object is supposed to represent
%% MUST(!!!!) exist.
%%----------------------------------------------------------------------
get_file(OE_This, State, FileNameList) when ?is_FTP(State);
?is_NATIVE(State) ->
case check_type(OE_This, State, filename:join(FileNameList)) of
{ndirectory, _Listing} ->
{reply,
#'CosFileTransfer_FileWrapper'{the_file =
cosFileTransferApp:
create_dir(OE_This,
FileNameList),
file_type = ndirectory},
State};
nfile ->
{reply,
#'CosFileTransfer_FileWrapper'{the_file =
cosFileTransferApp:
create_file(OE_This,
FileNameList),
file_type = nfile},
State};
Other ->
%% If we want to return {stop, ....}
Other
end.
%%----------------------------------------------------------------------
%% Function : delete
%% Arguments : File
%% Returns : -
%% Description:
%%----------------------------------------------------------------------
delete(_OE_This, State, File) when ?is_FTP(State); ?is_NATIVE(State) ->
Mod = ?get_Module(State),
Result =
case 'CosPropertyService_PropertySet':
get_property_value(File, "is_directory") of
#any{value=false} ->
Mod:delete(?get_Server(State),
filename:join('CosFileTransfer_File':
'_get_complete_file_name'(File)));
#any{value=true} ->
Mod:rmdir(?get_Server(State),
filename:join('CosFileTransfer_File':
'_get_complete_file_name'(File)));
Other ->
Other
end,
case Result of
ok ->
{reply, ok, State};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="File or Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."})
end.
%%----------------------------------------------------------------------
%% Function : transfer
%% Arguments : SrcFile eq DestFile eq CosFileTransfer::File
%% Returns : -
%% Description: DestFile must be a newly created File object, using create_file()
on the Target FileTransferSession , prior to calling transfer ( ) .
%%----------------------------------------------------------------------
transfer(OE_This, State, SrcFile, DestFile) when ?is_ORBER_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} ->
The source FTS is supposed to be the active one , set up a connection .
Protocols = 'CosFileTransfer_FileTransferSession':
'_get_protocols_supported'(TargetFTS),
SrcName = 'CosFileTransfer_File':'_get_complete_file_name'(SrcFile),
Pid = spawn(?MODULE, invoke_call, [self(), transfer,
[TargetFTS, SrcFile, DestFile]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
filename:join(SrcName)),
check_reply(Pid),
{reply, ok, State};
{target, _SourceFTS} ->
DestName = 'CosFileTransfer_File':'_get_complete_file_name'(DestFile),
receive_file(?get_MyType(State), ?get_Connection(State),
?get_ConnectionTimeout(State),
filename:join(DestName), write),
{reply, ok, State}
end;
transfer(OE_This, State, SrcFile, DestFile) when ?is_FTP(State); ?is_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} ->
source_FTS_operation(State, SrcFile, DestFile, transfer, 0, TargetFTS);
{target, _SourceFTS} ->
target_FTS_operation(State, SrcFile, DestFile, send, 0)
end.
%%----------------------------------------------------------------------
%% Function : append
%% Arguments : SrcFile eq DestFile eq CosFileTransfer::File
%% Returns : -
%% Description:
%%----------------------------------------------------------------------
append(OE_This, State, SrcFile, DestFile) when ?is_ORBER_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} ->
SrcName = filename:join('CosFileTransfer_File':
'_get_complete_file_name'(SrcFile)),
check_type(OE_This, State, SrcName),
The source FTS is supposed to be the active one , set up a connection .
Protocols = 'CosFileTransfer_FileTransferSession':
'_get_protocols_supported'(TargetFTS),
Pid = spawn(?MODULE, invoke_call, [self(), append,
[TargetFTS, SrcFile, DestFile]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
SrcName),
check_reply(Pid),
{reply, ok, State};
{target, _SourceFTS} ->
DestName = filename:join('CosFileTransfer_File':
'_get_complete_file_name'(DestFile)),
check_type(OE_This, State, DestName),
receive_file(?get_MyType(State), ?get_Connection(State),
?get_ConnectionTimeout(State), DestName, append),
{reply, ok, State}
end;
append(OE_This, State, SrcFile, DestFile) when ?is_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} ->
source_FTS_operation(State, SrcFile, DestFile, append, 0, TargetFTS);
{target, _SourceFTS} ->
target_FTS_operation(State, SrcFile, DestFile, append, 0)
end;
append(_OE_This, _State, _SrcFile, _DestFile) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO}).
%%----------------------------------------------------------------------
%% Function : insert
%% Arguments : SrcFile eq DestFile eq CosFileTransfer::File
%% Offset - long
%% Returns : -
%% Description:
%%----------------------------------------------------------------------
insert(OE_This, State, SrcFile, DestFile, Offset) when ?is_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} when ?is_ORBER_NATIVE(State) ->
SrcName = 'CosFileTransfer_File':'_get_complete_file_name'(SrcFile),
check_type(OE_This, State, filename:join(SrcName)),
The source FTS is supposed to be the active one , set up a connection .
Protocols = 'CosFileTransfer_FileTransferSession':
'_get_protocols_supported'(TargetFTS),
Pid = spawn(?MODULE, invoke_call, [self(), insert,
[TargetFTS, SrcFile,
DestFile, Offset]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
filename:join(SrcName)),
check_reply(Pid),
{reply, ok, State};
{source, TargetFTS} ->
source_FTS_operation(State, SrcFile, DestFile, insert, Offset, TargetFTS);
{target, _SourceFTS} ->
target_FTS_operation(State, SrcFile, DestFile, insert, Offset)
end;
insert(_OE_This, _State, _SrcFile, _DestFile, _Offset) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO}).
%%----------------------------------------------------------------------
%% Function : logout
%% Arguments : -
%% Returns : -
%% Description:
%%----------------------------------------------------------------------
logout(_OE_This, State) when ?is_FTP(State); ?is_NATIVE(State) ->
Mod = ?get_Module(State),
catch Mod:close(?get_Server(State)),
{stop, normal, ok, State}.
%%======================================================================
Internal functions
%%======================================================================
%%----------------------------------------------------------------------
%% Function : oe_orber_create_directory_current
%% Arguments : -
%% Returns : Directory
%% Description: Creates a Directory describing the working directory
%% of the remote server, e.g., an FTP-server.
%%----------------------------------------------------------------------
oe_orber_create_directory_current(OE_This, State) when ?is_FTP(State);
?is_NATIVE(State) ->
Mod = ?get_Module(State),
FileNameList = filename:split(?get_CurrentDir(State)),
case Mod:nlist(?get_Server(State), ?get_CurrentDir(State)) of
{ok, _Listing} ->
{reply, cosFileTransferApp:create_dir(OE_This, FileNameList),
State};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."})
end.
%%----------------------------------------------------------------------
%% Function : oe_orber_get_content
%% Arguments : -
%% Returns : string
%% Description:
%%----------------------------------------------------------------------
oe_orber_get_content(OE_This, State, FileNameList, Parent) when ?is_FTP(State);
?is_NATIVE(State) ->
Mod = ?get_Module(State),
case Mod:nlist(?get_Server(State), filename:join(FileNameList)) of
{ok, Listing} ->
create_content(Listing, OE_This, State, Parent, FileNameList);
{error, epath} ->
{reply, [], State};
_ ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="Directory not found."})
end.
%%----------------------------------------------------------------------
%% Function : oe_orber_count_children
%% Arguments : -
%% Returns : string
%% Description:
%%----------------------------------------------------------------------
oe_orber_count_children(OE_This, State, FileNameList) when ?is_FTP(State);
?is_NATIVE(State) ->
case catch check_type(OE_This, State, filename:join(FileNameList)) of
{ndirectory, Members} ->
{reply, length(Members), State};
{stop, normal, _, _} ->
{stop, normal,
{'EXCEPTION', #'INTERNAL'{completion_status=?COMPLETED_NO}},
State};
_->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
%%----------------------------------------------------------------------
%% Function : delete_tmp_file
%% Arguments : -
%% Returns : ok | {'EXCEPTION', E}
%% Description:
%%----------------------------------------------------------------------
delete_tmp_file(TmpFileName, ErrorMsg) ->
case file:delete(TmpFileName) of
ok ->
ok;
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'{reason=ErrorMsg})
end.
%%----------------------------------------------------------------------
%% Function : invoke_call
%% Arguments : -
%% Returns : ok | {'EXCEPTION', E}
%% Description:
%%----------------------------------------------------------------------
invoke_call(Pid, Op, Args) ->
Result = (catch apply('CosFileTransfer_FileTransferSession', Op, Args)),
Pid ! {transfer_result, self(), Result},
ok.
%%----------------------------------------------------------------------
%% Function : check_reply
Arguments : Pid - the pid of the spawned process .
%% Returns : ok | {'EXCEPTION', E}
%% Description:
%%----------------------------------------------------------------------
check_reply(Pid) ->
receive
{transfer_result, Pid, ok} ->
ok;
{transfer_result, Pid, {'EXCEPTION', E}} ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:check_reply();
Raised exception: ", [?LINE, E], ?DEBUG_LEVEL),
corba:raise(E);
{transfer_result, Pid, {'EXIT', Reason}} ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:check_reply();
Got EXIT-signal with reason: ", [?LINE, Reason], ?DEBUG_LEVEL),
corba:raise(#'INTERNAL'{minor=199,
completion_status=?COMPLETED_NO})
after infinity ->
%% Should we add an exception here or do we reuse the iiop_timeout?
%% For now keep as is.
corba:raise(#'INTERNAL'{minor=199,
completion_status=?COMPLETED_NO})
end.
%%----------------------------------------------------------------------
%% Function : which_FTS_type
%% Arguments : -
Returns : { source , FTS } | { target , FTS } | { ' EXCEPTION ' , # ' BAD_PARAM ' { } }
Description : Used to determine if the target FTS is supposed to act
as sender or receiver and also return the counter part FTS .
%% An exception is raised if the user supplied incorrect parameters.
%%----------------------------------------------------------------------
which_FTS_type(OE_This, SrcFile, DestFile) ->
TargetFTS = 'CosFileTransfer_File':'_get_associated_session'(DestFile),
SourceFTS = 'CosFileTransfer_File':'_get_associated_session'(SrcFile),
case corba_object:is_equivalent(OE_This, TargetFTS) of
true ->
{target, SourceFTS};
false ->
case corba_object:is_equivalent(OE_This, SourceFTS) of
true ->
{source, TargetFTS};
false ->
corba:raise(#'BAD_PARAM'{completion_status=?COMPLETED_NO})
end
end.
%%----------------------------------------------------------------------
%% Function : setup_connection
Arguments : A list of # ' CosFileTransfer_ProtocolSupport ' { }
%% Returns :
%% Description:
%%----------------------------------------------------------------------
setup_connection([], Protocol, _) ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:setup_connection(~p);
The Protocols listed are not supported.", [?LINE, Protocol], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_TransferException'{reason="Unsupported protocol"});
setup_connection([#'CosFileTransfer_ProtocolSupport'{protocol_name="TCP/IP",
addresses=Addr}|_],
tcp, Timeout) ->
setup_connection_helper(Addr, gen_tcp, [], Timeout);
setup_connection([#'CosFileTransfer_ProtocolSupport'{protocol_name="SSL",
addresses=Addr}|_],
ssl, Timeout) ->
Options = [{certfile, cosFileTransferApp:ssl_client_certfile()},
{verify, cosFileTransferApp:ssl_client_verify()},
{depth, cosFileTransferApp:ssl_client_depth()}] ++
ssl_client_cacertfile_option(),
setup_connection_helper(Addr, ssl, Options, Timeout);
setup_connection([_|T], Type, Timeout) ->
setup_connection(T, Type, Timeout).
setup_connection_helper([], _, _, _) ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unable to contact remote server."});
setup_connection_helper([H|T], Driver, Options, Timeout) ->
case string:tokens(H, ":") of
[Host, Port] when Driver == gen_tcp ->
case gen_tcp:connect(Host, list_to_integer(Port),
[binary,
{packet, raw},
{reuseaddr, true},
{nodelay, true}|Options], Timeout) of
{ok, Sock} ->
{gen_tcp, Sock};
_->
%% No response.
setup_connection_helper(T, Driver, Options, Timeout)
end;
[Host, Port] when Driver == ssl ->
case ssl:connect(Host, list_to_integer(Port),
[binary,
{packet, 0},
{active, false}|Options], Timeout) of
{ok, Sock} ->
{ssl, Sock};
_->
%% No response.
setup_connection_helper(T, Driver, Options, Timeout)
end;
_ ->
%% Badly configured address.
setup_connection_helper(T, Driver, Options, Timeout)
end.
ssl_client_cacertfile_option() ->
case cosFileTransferApp:ssl_client_cacertfile() of
[] ->
[];
X when is_list(X) ->
{cacertfile, X};
_ ->
[]
end.
%%----------------------------------------------------------------------
%% Function : create_content
%% Arguments :
%% Returns :
%% Description:
%%----------------------------------------------------------------------
create_content(Listing, OE_This, State, Parent, PathList) ->
create_content(string:tokens(Listing, ?SEPARATOR), OE_This,
State, Parent, PathList, []).
create_content([], _OE_This, State, _Parent, _PathList, Acc) ->
{reply, Acc, State};
create_content([H|T], OE_This, State, Parent, PathList, Acc) ->
FullPathList = PathList ++[filename:basename(H)],
case check_type(OE_This, State, filename:join(FullPathList)) of
nfile ->
create_content(T, OE_This, State, Parent, PathList,
[#'CosFileTransfer_FileWrapper'
{the_file = cosFileTransferApp:create_file(OE_This,
FullPathList,
Parent),
file_type = nfile}|Acc]);
{ndirectory, _Members} ->
create_content(T, OE_This, State, Parent, PathList,
[#'CosFileTransfer_FileWrapper'
{the_file = cosFileTransferApp:create_dir(OE_This,
FullPathList,
Parent),
file_type = ndirectory}|Acc]);
Other ->
Other
end.
%%----------------------------------------------------------------------
%% Function : MISC functions
%% Arguments :
%% Returns :
%% Description:
%%----------------------------------------------------------------------
setup_local(tcp) ->
{ok,Socket}=gen_tcp:listen(0, [binary,
{packet, 0},
{backlog,1},
{active, false}]),
{ok, Port} = inet:port(Socket),
{Socket, [#'CosFileTransfer_ProtocolSupport'{protocol_name="TCP/IP",
addresses = [local_address(Port)]}]};
setup_local(ssl) ->
Options = [{certfile, cosFileTransferApp:ssl_server_certfile()},
{verify, cosFileTransferApp:ssl_server_verify()},
{depth, cosFileTransferApp:ssl_server_depth()}] ++
ssl_server_cacertfile_option(),
{ok,Socket}=ssl:listen(0, [binary,
{packet, 0},
{backlog,1},
{active, false}|Options]),
{ok, {_Address, Port}} = ssl:sockname(Socket),
{Socket, [#'CosFileTransfer_ProtocolSupport'{protocol_name="SSL",
addresses = [local_address(Port)]}]}.
local_address(Port) ->
{ok, Hostname} = inet:gethostname(),
{ok, {A1, A2, A3, A4}} = inet:getaddr(Hostname, inet),
integer_to_list(A1) ++ "." ++ integer_to_list(A2) ++ "." ++ integer_to_list(A3)
++ "." ++ integer_to_list(A4)++":"++integer_to_list(Port).
ssl_server_cacertfile_option() ->
case cosFileTransferApp:ssl_server_cacertfile() of
[] ->
[];
X when is_list(X) ->
[{cacertfile, X}];
_ ->
[]
end.
%%----------------------------------------------------------------------
%% Function : source_file_operation
%% Arguments :
%% Returns :
%% Description:
%%----------------------------------------------------------------------
source_FTS_operation(State, SrcFile, DestFile, Op, Offset, FTS) ->
Mod = ?get_Module(State),
The source FTS is supposed to be the active one , set up a connection .
Protocols = 'CosFileTransfer_FileTransferSession':'_get_protocols_supported'(FTS),
SrcName = 'CosFileTransfer_File':'_get_complete_file_name'(SrcFile),
TempName = cosFileTransferApp:create_name("TemporarySrcFile"),
case Mod:recv(?get_Server(State), filename:join(SrcName), TempName) of
ok when Op == insert ->
%% Downloaded the File, we are now ready to transmit.
Pid = spawn(?MODULE, invoke_call, [self(), insert,
[FTS, SrcFile, DestFile, Offset]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
TempName),
%% Delete the temporary local copy.
delete_tmp_file(TempName,
"Transfer completed but failed to remove temporary local copy."),
check_reply(Pid),
{reply, ok, State};
ok ->
%% Downloaded the File, we are now ready to transmit.
Pid = spawn(?MODULE, invoke_call, [self(), Op, [FTS, SrcFile, DestFile]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
TempName),
%% Delete the temporary local copy.
delete_tmp_file(TempName,
"Transfer completed but failed to remove temporary local copy."),
check_reply(Pid),
{reply, ok, State};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="File not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."})
end.
%%----------------------------------------------------------------------
%% Function : target_file_operation
%% Arguments :
%% Returns :
%% Description:
%%----------------------------------------------------------------------
target_FTS_operation(State, _SrcFile, DestFile, Op, Offset) ->
Mod = ?get_Module(State),
DestName = 'CosFileTransfer_File':'_get_complete_file_name'(DestFile),
TempName = cosFileTransferApp:create_name("TemporaryDestFile"),
receive_file(?get_MyType(State), ?get_Connection(State),
?get_ConnectionTimeout(State), TempName, write),
Result =
if
Op == insert ->
Mod:insert(?get_Server(State), TempName, filename:join(DestName), Offset);
true ->
Mod:Op(?get_Server(State), TempName, filename:join(DestName))
end,
case Result of
ok ->
%% Delete the temporary local copy.
delete_tmp_file(TempName,
"Transfer completed but failed to remove temporary local copy."),
%% Completed the transfer successfully.
{reply, ok, State};
{error, epath} ->
delete_tmp_file(TempName,
"IllegalOperationException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_IllegalOperationException'
{reason="Not allowed by destination."});
{error, elogin} ->
delete_tmp_file(TempName,
"SessionException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not logged in."});
{error, econn} ->
delete_tmp_file(TempName,
"TransferException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_TransferException'
{reason="Premature connection ending."});
{error, etnospc} ->
delete_tmp_file(TempName,
"RequestFailureException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
{error, efnamena} ->
delete_tmp_file(TempName,
"IllegalOperationException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_IllegalOperationException'
{reason="Not allowed by destination."})
end.
%%----------------------------------------------------------------------
%% Function : receive_file
%% Arguments : Driver - currently only gen_tcp supported.
%% LSocket - which socket to use.
%% FileName - an absolute file name representing the
%% file we want to create or append to.
%% Type - 'read', 'write', 'append'.
%% Returns :
%% Description:
%%----------------------------------------------------------------------
receive_file(tcp, LSock, Timeout, FileName, Type) ->
%% The Type can be the ones allowed by the file-module, i.e.,
%% 'read', 'write' or 'append'
FD = file_open(FileName, Type),
case gen_tcp:accept(LSock, Timeout) of
{ok, Sock} ->
receive_file_helper(gen_tcp, Sock, FD);
{error, timeout} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"gen_tcp:accept(~p) timed out", [?LINE, Timeout], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept timed out.."});
{error, Why} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"gen_tcp:accept(~p) failed: ~p", [?LINE, Timeout, Why], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept failed."})
end;
receive_file(ssl, LSock, Timeout, FileName, Type) ->
%% The Type can be the ones allowed by the file-module, i.e.,
%% 'read', 'write' or 'append'
FD = file_open(FileName, Type),
case ssl:transport_accept(LSock, Timeout) of
{ok, ASock} ->
case ssl:handshake(ASock, Timeout) of
{ok, Sock} ->
receive_file_helper(ssl, Sock, FD);
{error, Error} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"ssl:handshake(~p) failed: ~p",
[?LINE, Timeout, Error], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept failed."})
end;
{error, timeout} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"ssl:transport_accept(~p) timed out",
[?LINE, Timeout], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept timed out.."});
{error, Why} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"ssl:transport_accept(~p) failed: ~p",
[?LINE, Timeout, Why], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept failed."})
end.
receive_file_helper(Driver, Sock, FD) ->
case Driver:recv(Sock, 0) of
{ok, Bin} ->
file:write(FD, Bin),
receive_file_helper(Driver, Sock, FD);
{error, closed} ->
file:close(FD);
What ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:receive_file(~p);
Error occured when receiving data: ~p", [?LINE, Driver, What], ?DEBUG_LEVEL),
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
%%----------------------------------------------------------------------
%% Function : send_file
%% Arguments : Driver - currently only gen_tcp supported.
%% Sock - which socket to use.
%% FileName - an absolute file name representing the
%% file we want to send.
%% Returns :
%% Description:
%%----------------------------------------------------------------------
send_file(Protocols, Type, Timeout, FileName) ->
{Driver, Sock} = setup_connection(Protocols, Type, Timeout),
FD = file_open(FileName, read),
BuffSize = cosFileTransferApp:get_buffert_size(),
send_file_helper(Driver, Sock, FD, BuffSize).
send_file_helper(Driver, Sock, FD, BuffSize) ->
case file:read(FD, BuffSize) of
eof ->
file:close(FD),
Driver:close(Sock);
{ok, Bin} ->
case Driver:send(Sock, Bin) of
ok ->
send_file_helper(Driver, Sock, FD, BuffSize);
What ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:send_file_helper(~p);
Error occured when sending data: ~p", [?LINE, Driver, What], ?DEBUG_LEVEL),
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end
end.
file_open(File, Type) ->
case file:open(File, [raw, binary, Type]) of
{ok, FD} ->
FD;
{error, What} ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:file_open(~p);
Failed to open the file due to: ~p", [?LINE, File, What], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unable to open given file."})
end.
%%----------------------------------------------------------------------
%% Function : check_type
%% Arguments : FullName - an absolute file name representing the
%% file or directory we want to evaluate.
%% Returns :
%% Description:
%% When communcating with FTP-servers on different platforms a variety of
%% answers can be returned. A few examples:
%%
# # # ftp : nlist on an empty directory # # #
%% {ok, ""}, {error, epath}
%%
# # # ftp : nlist on a non - existing directory or file # # #
%% {ok, "XXX: No such file or directory}, {error, epath}
%%
# # # ftp : nlist on an existing directory with one contained item # # #
%% {ok, "Item"}
%%
%% Comparing the above we see that it's virtually impossible to tell apart
%% {ok, "XXX: No such file or directory} and {ok, "Item"}.
%% Hence, it's easier to test if it's possible to do ftp:cd instead.
%% Ugly, but rather effective. If we look at the bright side, it's only
%% necessary when we try to lookup:
%% * non-existing item
* A directory with one member only .
%% * An empty directory.
%%
Furthermore , no need for traversing Listings etc .
%%----------------------------------------------------------------------
check_type(_OE_This, State, FullName) when ?is_FTP(State); ?is_NATIVE(State) ->
Mod = ?get_Module(State),
Result =
case Mod:nlist(?get_Server(State), FullName) of
{ok, Listing} when length(Listing) > 0->
case string:tokens(Listing, ?SEPARATOR) of
[FullName] ->
nfile;
Members when length(Members) > 1 ->
Must test if more than one member since sometimes
%% this operation returns for example:
%% {ok, "XXX No such file or drectory"}
{ndirectory, Members};
Member ->
case Mod:cd(?get_Server(State), FullName) of
ok ->
case Mod:cd(?get_Server(State),
?get_CurrentDir(State)) of
ok ->
{ndirectory, Member};
_ ->
%% Failed, we cannot continue since the
FTS now pointso an incorrect Directory .
%% Hence, we must terminate.
{stop, normal,
{'EXCEPTION',
#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."}}, State}
end;
{error, E} ->
{error, E};
_ ->
nfile
end
end;
{error, epath} ->
%% Might be a file.
DirName = filename:dirname(FullName),
case Mod:nlist(?get_Server(State), DirName) of
{ok, Listing} when length(Listing) > 0->
Members = string:tokens(Listing, ?SEPARATOR),
case lists:member(FullName, Members) of
true ->
nfile;
_ ->
BName = filename:basename(FullName),
case lists:member(BName, Members) of
true ->
nfile;
_ ->
{error, epath}
end
end;
_ ->
{error, epath}
end;
_ ->
case Mod:cd(?get_Server(State), FullName) of
ok ->
case Mod:cd(?get_Server(State), ?get_CurrentDir(State)) of
ok ->
{ndirectory, []};
_ ->
%% Failed, we cannot continue since the
FTS now pointso an incorrect Directory .
%% Hence, we must terminate.
{stop, normal,
{'EXCEPTION',
#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."}}, State}
end;
_ ->
{error, epath}
end
end,
case Result of
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="File or Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not logged in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
Other ->
Other
end.
%%======================================================================
%% END OF MODULE
%%======================================================================
| null | https://raw.githubusercontent.com/erlang/corba/396df81473a386d0315bbba830db6f9d4b12a04f/lib/cosFileTransfer/src/CosFileTransfer_FileTransferSession_impl.erl | erlang | ----------------------------------------------------------------------
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
----------------------------------------------------------------------
File : CosFileTransfer_FileTransferSession_impl.erl
Description :
----------------------------------------------------------------------
----------------------------------------------------------------------
Include files
----------------------------------------------------------------------
----------------------------------------------------------------------
External exports
----------------------------------------------------------------------
----------------------------------------------------------------------
Internal exports
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
======================================================================
External functions
======================================================================
----------------------------------------------------------------------
Function : init/1
Returns : {ok, State} |
ignore |
{stop, Reason}
Description: Initiates the server
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : terminate/2
Returns : any (ignored by gen_server)
Description: Shutdown the server
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : code_change/3
Returns : {ok, NewState}
Description: Convert process state when code is changed
----------------------------------------------------------------------
---------------------------------------------------------------------%
function : handle_info/2
Arguments:
Returns :
Effect :
----------------------------------------------------------------------
======================================================================
CosFileTransfer::FileTransferSession
======================================================================
---------------------------------------------------------------------%
Function : _get_protocols_supported
Arguments :
Returns : A list of CosFileTransfer::ProtocolSupport, i.e.,
ProtocolAddressList addresses; %% eq a list of strings.
};
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : set_directory
Arguments : Directory - CosFileTransfer::Directory
Returns :
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : create_file
Arguments : FileNameList
Returns : File
Description: This operation creates a File Object representing a
file which may or may not exist. Typically used as
argument when invoking transfer/3. See also get_file/2.
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : create_directory
Arguments : FileNameList - full path name.
Returns : Directory
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : get_file
Arguments : FileNameList
Description: This operation should be independent of the working Directory,
i.e., a full path name must be supplied. The file or
directory the returned object is supposed to represent
MUST(!!!!) exist.
----------------------------------------------------------------------
If we want to return {stop, ....}
----------------------------------------------------------------------
Function : delete
Arguments : File
Returns : -
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : transfer
Arguments : SrcFile eq DestFile eq CosFileTransfer::File
Returns : -
Description: DestFile must be a newly created File object, using create_file()
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : append
Arguments : SrcFile eq DestFile eq CosFileTransfer::File
Returns : -
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : insert
Arguments : SrcFile eq DestFile eq CosFileTransfer::File
Offset - long
Returns : -
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : logout
Arguments : -
Returns : -
Description:
----------------------------------------------------------------------
======================================================================
======================================================================
----------------------------------------------------------------------
Function : oe_orber_create_directory_current
Arguments : -
Returns : Directory
Description: Creates a Directory describing the working directory
of the remote server, e.g., an FTP-server.
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : oe_orber_get_content
Arguments : -
Returns : string
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : oe_orber_count_children
Arguments : -
Returns : string
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : delete_tmp_file
Arguments : -
Returns : ok | {'EXCEPTION', E}
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : invoke_call
Arguments : -
Returns : ok | {'EXCEPTION', E}
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : check_reply
Returns : ok | {'EXCEPTION', E}
Description:
----------------------------------------------------------------------
Should we add an exception here or do we reuse the iiop_timeout?
For now keep as is.
----------------------------------------------------------------------
Function : which_FTS_type
Arguments : -
An exception is raised if the user supplied incorrect parameters.
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : setup_connection
Returns :
Description:
----------------------------------------------------------------------
No response.
No response.
Badly configured address.
----------------------------------------------------------------------
Function : create_content
Arguments :
Returns :
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : MISC functions
Arguments :
Returns :
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : source_file_operation
Arguments :
Returns :
Description:
----------------------------------------------------------------------
Downloaded the File, we are now ready to transmit.
Delete the temporary local copy.
Downloaded the File, we are now ready to transmit.
Delete the temporary local copy.
----------------------------------------------------------------------
Function : target_file_operation
Arguments :
Returns :
Description:
----------------------------------------------------------------------
Delete the temporary local copy.
Completed the transfer successfully.
----------------------------------------------------------------------
Function : receive_file
Arguments : Driver - currently only gen_tcp supported.
LSocket - which socket to use.
FileName - an absolute file name representing the
file we want to create or append to.
Type - 'read', 'write', 'append'.
Returns :
Description:
----------------------------------------------------------------------
The Type can be the ones allowed by the file-module, i.e.,
'read', 'write' or 'append'
The Type can be the ones allowed by the file-module, i.e.,
'read', 'write' or 'append'
----------------------------------------------------------------------
Function : send_file
Arguments : Driver - currently only gen_tcp supported.
Sock - which socket to use.
FileName - an absolute file name representing the
file we want to send.
Returns :
Description:
----------------------------------------------------------------------
----------------------------------------------------------------------
Function : check_type
Arguments : FullName - an absolute file name representing the
file or directory we want to evaluate.
Returns :
Description:
When communcating with FTP-servers on different platforms a variety of
answers can be returned. A few examples:
{ok, ""}, {error, epath}
{ok, "XXX: No such file or directory}, {error, epath}
{ok, "Item"}
Comparing the above we see that it's virtually impossible to tell apart
{ok, "XXX: No such file or directory} and {ok, "Item"}.
Hence, it's easier to test if it's possible to do ftp:cd instead.
Ugly, but rather effective. If we look at the bright side, it's only
necessary when we try to lookup:
* non-existing item
* An empty directory.
----------------------------------------------------------------------
this operation returns for example:
{ok, "XXX No such file or drectory"}
Failed, we cannot continue since the
Hence, we must terminate.
Might be a file.
Failed, we cannot continue since the
Hence, we must terminate.
======================================================================
END OF MODULE
====================================================================== | Copyright Ericsson AB 2000 - 2016 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
Created : 12 Sept 2000
-module('CosFileTransfer_FileTransferSession_impl').
-include_lib("orber/include/corba.hrl").
-include_lib("orber/src/orber_iiop.hrl").
-include("cosFileTransferApp.hrl").
-export([init/1,
terminate/2,
code_change/3,
handle_info/2]).
Interface functions
-export(['_get_protocols_supported'/2,
set_directory/3,
create_file/3,
create_directory/3,
get_file/3,
delete/3,
transfer/4,
append/4,
insert/5,
logout/2]).
-export([oe_orber_create_directory_current/2, oe_orber_get_content/4,
oe_orber_count_children/3]).
-export([invoke_call/3]).
Records
-record(state, {protocols, server, type, current, module, connection, mytype,
connection_timeout}).
Macros
-define(create_InitState(P, S, T, C, M, Co, Ty, CT),
#state{protocols=P, server=S, type=T, current=C, module=M, connection=Co,
mytype=Ty, connection_timeout=CT}).
-define(get_Protocols(S), S#state.protocols).
-define(get_Server(S), S#state.server).
-define(get_CurrentDir(S), S#state.current).
-define(get_Module(S), S#state.module).
-define(get_Connection(S), S#state.connection).
-define(get_MyType(S), S#state.mytype).
-define(get_ConnectionTimeout(S), S#state.connection_timeout).
-define(set_CurrentDir(S, C), S#state{current=C}).
-define(is_FTP(S), S#state.type=='FTP').
-define(is_FTAM(S), S#state.type=='FTAM').
-define(is_NATIVE(S), S#state.type=='NATIVE').
-define(is_ORBER_NATIVE(S), S#state.module==cosFileTransferNATIVE_file).
{ ok , State , Timeout } |
init(['FTP', Host, Port, User, Password, _Account, Protocol, Timeout]) ->
{ok, Pid} = inets:start(ftpc, [{host, Host}, {port, Port}], stand_alone),
ok = ftp:user(Pid, User, Password),
{ok, PWD} = ftp:pwd(Pid),
{Connection, ProtocolSupport} = setup_local(Protocol),
{ok, ?create_InitState(ProtocolSupport, Pid, 'FTP',
PWD, ftp, Connection, Protocol, Timeout)};
init([{'NATIVE', Mod}, Host, Port, User, Password, _Account, Protocol, Timeout]) ->
{ok, Pid} = Mod:open(Host, Port),
ok = Mod:user(Pid, User, Password),
{ok, PWD} = Mod:pwd(Pid),
{Connection, ProtocolSupport} = setup_local(Protocol),
{ok, ?create_InitState(ProtocolSupport, Pid, 'NATIVE',
PWD, Mod, Connection, Protocol, Timeout)}.
terminate(_Reason, #state{type = Type, server = Server, module = Mod} = State) ->
case ?get_MyType(State) of
ssl ->
catch ssl:close(?get_Connection(State));
_ ->
catch gen_tcp:close(?get_Connection(State))
end,
case Type of
'FTP' ->
inets:stop(ftpc, Server);
'NATIVE' ->
Mod:close(Server);
_ ->
ok
end,
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
handle_info(Info, State) ->
case Info of
{'EXIT', _Pid, Reason} ->
{stop, Reason, State};
_Other ->
{noreply, State}
end.
struct ProtocolSupport {
Istring protocol_name ;
'_get_protocols_supported'(_OE_This, State) ->
{reply, ?get_Protocols(State), State}.
set_directory(_OE_This, State, Directory) when ?is_FTP(State); ?is_NATIVE(State) ->
Mod = ?get_Module(State),
Path = filename:join('CosFileTransfer_Directory':
'_get_complete_file_name'(Directory)),
case catch Mod:cd(?get_Server(State), Path) of
ok ->
{reply, ok, ?set_CurrentDir(State, Path)};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason = "Unexpected error."})
end.
create_file(OE_This, State, FileNameList) ->
{reply, cosFileTransferApp:create_file(OE_This, FileNameList), State}.
create_directory(OE_This, State, FileNameList) when ?is_FTP(State);
?is_NATIVE(State) ->
Mod = ?get_Module(State),
case Mod:mkdir(?get_Server(State), filename:join(FileNameList)) of
ok ->
{reply, cosFileTransferApp:create_dir(OE_This, FileNameList), State};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."})
end.
Returns : FileWrapper
get_file(OE_This, State, FileNameList) when ?is_FTP(State);
?is_NATIVE(State) ->
case check_type(OE_This, State, filename:join(FileNameList)) of
{ndirectory, _Listing} ->
{reply,
#'CosFileTransfer_FileWrapper'{the_file =
cosFileTransferApp:
create_dir(OE_This,
FileNameList),
file_type = ndirectory},
State};
nfile ->
{reply,
#'CosFileTransfer_FileWrapper'{the_file =
cosFileTransferApp:
create_file(OE_This,
FileNameList),
file_type = nfile},
State};
Other ->
Other
end.
delete(_OE_This, State, File) when ?is_FTP(State); ?is_NATIVE(State) ->
Mod = ?get_Module(State),
Result =
case 'CosPropertyService_PropertySet':
get_property_value(File, "is_directory") of
#any{value=false} ->
Mod:delete(?get_Server(State),
filename:join('CosFileTransfer_File':
'_get_complete_file_name'(File)));
#any{value=true} ->
Mod:rmdir(?get_Server(State),
filename:join('CosFileTransfer_File':
'_get_complete_file_name'(File)));
Other ->
Other
end,
case Result of
ok ->
{reply, ok, State};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="File or Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."})
end.
on the Target FileTransferSession , prior to calling transfer ( ) .
transfer(OE_This, State, SrcFile, DestFile) when ?is_ORBER_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} ->
The source FTS is supposed to be the active one , set up a connection .
Protocols = 'CosFileTransfer_FileTransferSession':
'_get_protocols_supported'(TargetFTS),
SrcName = 'CosFileTransfer_File':'_get_complete_file_name'(SrcFile),
Pid = spawn(?MODULE, invoke_call, [self(), transfer,
[TargetFTS, SrcFile, DestFile]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
filename:join(SrcName)),
check_reply(Pid),
{reply, ok, State};
{target, _SourceFTS} ->
DestName = 'CosFileTransfer_File':'_get_complete_file_name'(DestFile),
receive_file(?get_MyType(State), ?get_Connection(State),
?get_ConnectionTimeout(State),
filename:join(DestName), write),
{reply, ok, State}
end;
transfer(OE_This, State, SrcFile, DestFile) when ?is_FTP(State); ?is_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} ->
source_FTS_operation(State, SrcFile, DestFile, transfer, 0, TargetFTS);
{target, _SourceFTS} ->
target_FTS_operation(State, SrcFile, DestFile, send, 0)
end.
append(OE_This, State, SrcFile, DestFile) when ?is_ORBER_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} ->
SrcName = filename:join('CosFileTransfer_File':
'_get_complete_file_name'(SrcFile)),
check_type(OE_This, State, SrcName),
The source FTS is supposed to be the active one , set up a connection .
Protocols = 'CosFileTransfer_FileTransferSession':
'_get_protocols_supported'(TargetFTS),
Pid = spawn(?MODULE, invoke_call, [self(), append,
[TargetFTS, SrcFile, DestFile]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
SrcName),
check_reply(Pid),
{reply, ok, State};
{target, _SourceFTS} ->
DestName = filename:join('CosFileTransfer_File':
'_get_complete_file_name'(DestFile)),
check_type(OE_This, State, DestName),
receive_file(?get_MyType(State), ?get_Connection(State),
?get_ConnectionTimeout(State), DestName, append),
{reply, ok, State}
end;
append(OE_This, State, SrcFile, DestFile) when ?is_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} ->
source_FTS_operation(State, SrcFile, DestFile, append, 0, TargetFTS);
{target, _SourceFTS} ->
target_FTS_operation(State, SrcFile, DestFile, append, 0)
end;
append(_OE_This, _State, _SrcFile, _DestFile) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO}).
insert(OE_This, State, SrcFile, DestFile, Offset) when ?is_NATIVE(State) ->
case which_FTS_type(OE_This, SrcFile, DestFile) of
{source, TargetFTS} when ?is_ORBER_NATIVE(State) ->
SrcName = 'CosFileTransfer_File':'_get_complete_file_name'(SrcFile),
check_type(OE_This, State, filename:join(SrcName)),
The source FTS is supposed to be the active one , set up a connection .
Protocols = 'CosFileTransfer_FileTransferSession':
'_get_protocols_supported'(TargetFTS),
Pid = spawn(?MODULE, invoke_call, [self(), insert,
[TargetFTS, SrcFile,
DestFile, Offset]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
filename:join(SrcName)),
check_reply(Pid),
{reply, ok, State};
{source, TargetFTS} ->
source_FTS_operation(State, SrcFile, DestFile, insert, Offset, TargetFTS);
{target, _SourceFTS} ->
target_FTS_operation(State, SrcFile, DestFile, insert, Offset)
end;
insert(_OE_This, _State, _SrcFile, _DestFile, _Offset) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO}).
logout(_OE_This, State) when ?is_FTP(State); ?is_NATIVE(State) ->
Mod = ?get_Module(State),
catch Mod:close(?get_Server(State)),
{stop, normal, ok, State}.
Internal functions
oe_orber_create_directory_current(OE_This, State) when ?is_FTP(State);
?is_NATIVE(State) ->
Mod = ?get_Module(State),
FileNameList = filename:split(?get_CurrentDir(State)),
case Mod:nlist(?get_Server(State), ?get_CurrentDir(State)) of
{ok, _Listing} ->
{reply, cosFileTransferApp:create_dir(OE_This, FileNameList),
State};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."})
end.
oe_orber_get_content(OE_This, State, FileNameList, Parent) when ?is_FTP(State);
?is_NATIVE(State) ->
Mod = ?get_Module(State),
case Mod:nlist(?get_Server(State), filename:join(FileNameList)) of
{ok, Listing} ->
create_content(Listing, OE_This, State, Parent, FileNameList);
{error, epath} ->
{reply, [], State};
_ ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="Directory not found."})
end.
oe_orber_count_children(OE_This, State, FileNameList) when ?is_FTP(State);
?is_NATIVE(State) ->
case catch check_type(OE_This, State, filename:join(FileNameList)) of
{ndirectory, Members} ->
{reply, length(Members), State};
{stop, normal, _, _} ->
{stop, normal,
{'EXCEPTION', #'INTERNAL'{completion_status=?COMPLETED_NO}},
State};
_->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
delete_tmp_file(TmpFileName, ErrorMsg) ->
case file:delete(TmpFileName) of
ok ->
ok;
_ ->
corba:raise(#'CosFileTransfer_RequestFailureException'{reason=ErrorMsg})
end.
invoke_call(Pid, Op, Args) ->
Result = (catch apply('CosFileTransfer_FileTransferSession', Op, Args)),
Pid ! {transfer_result, self(), Result},
ok.
Arguments : Pid - the pid of the spawned process .
check_reply(Pid) ->
receive
{transfer_result, Pid, ok} ->
ok;
{transfer_result, Pid, {'EXCEPTION', E}} ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:check_reply();
Raised exception: ", [?LINE, E], ?DEBUG_LEVEL),
corba:raise(E);
{transfer_result, Pid, {'EXIT', Reason}} ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:check_reply();
Got EXIT-signal with reason: ", [?LINE, Reason], ?DEBUG_LEVEL),
corba:raise(#'INTERNAL'{minor=199,
completion_status=?COMPLETED_NO})
after infinity ->
corba:raise(#'INTERNAL'{minor=199,
completion_status=?COMPLETED_NO})
end.
Returns : { source , FTS } | { target , FTS } | { ' EXCEPTION ' , # ' BAD_PARAM ' { } }
Description : Used to determine if the target FTS is supposed to act
as sender or receiver and also return the counter part FTS .
which_FTS_type(OE_This, SrcFile, DestFile) ->
TargetFTS = 'CosFileTransfer_File':'_get_associated_session'(DestFile),
SourceFTS = 'CosFileTransfer_File':'_get_associated_session'(SrcFile),
case corba_object:is_equivalent(OE_This, TargetFTS) of
true ->
{target, SourceFTS};
false ->
case corba_object:is_equivalent(OE_This, SourceFTS) of
true ->
{source, TargetFTS};
false ->
corba:raise(#'BAD_PARAM'{completion_status=?COMPLETED_NO})
end
end.
Arguments : A list of # ' CosFileTransfer_ProtocolSupport ' { }
setup_connection([], Protocol, _) ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:setup_connection(~p);
The Protocols listed are not supported.", [?LINE, Protocol], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_TransferException'{reason="Unsupported protocol"});
setup_connection([#'CosFileTransfer_ProtocolSupport'{protocol_name="TCP/IP",
addresses=Addr}|_],
tcp, Timeout) ->
setup_connection_helper(Addr, gen_tcp, [], Timeout);
setup_connection([#'CosFileTransfer_ProtocolSupport'{protocol_name="SSL",
addresses=Addr}|_],
ssl, Timeout) ->
Options = [{certfile, cosFileTransferApp:ssl_client_certfile()},
{verify, cosFileTransferApp:ssl_client_verify()},
{depth, cosFileTransferApp:ssl_client_depth()}] ++
ssl_client_cacertfile_option(),
setup_connection_helper(Addr, ssl, Options, Timeout);
setup_connection([_|T], Type, Timeout) ->
setup_connection(T, Type, Timeout).
setup_connection_helper([], _, _, _) ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unable to contact remote server."});
setup_connection_helper([H|T], Driver, Options, Timeout) ->
case string:tokens(H, ":") of
[Host, Port] when Driver == gen_tcp ->
case gen_tcp:connect(Host, list_to_integer(Port),
[binary,
{packet, raw},
{reuseaddr, true},
{nodelay, true}|Options], Timeout) of
{ok, Sock} ->
{gen_tcp, Sock};
_->
setup_connection_helper(T, Driver, Options, Timeout)
end;
[Host, Port] when Driver == ssl ->
case ssl:connect(Host, list_to_integer(Port),
[binary,
{packet, 0},
{active, false}|Options], Timeout) of
{ok, Sock} ->
{ssl, Sock};
_->
setup_connection_helper(T, Driver, Options, Timeout)
end;
_ ->
setup_connection_helper(T, Driver, Options, Timeout)
end.
ssl_client_cacertfile_option() ->
case cosFileTransferApp:ssl_client_cacertfile() of
[] ->
[];
X when is_list(X) ->
{cacertfile, X};
_ ->
[]
end.
create_content(Listing, OE_This, State, Parent, PathList) ->
create_content(string:tokens(Listing, ?SEPARATOR), OE_This,
State, Parent, PathList, []).
create_content([], _OE_This, State, _Parent, _PathList, Acc) ->
{reply, Acc, State};
create_content([H|T], OE_This, State, Parent, PathList, Acc) ->
FullPathList = PathList ++[filename:basename(H)],
case check_type(OE_This, State, filename:join(FullPathList)) of
nfile ->
create_content(T, OE_This, State, Parent, PathList,
[#'CosFileTransfer_FileWrapper'
{the_file = cosFileTransferApp:create_file(OE_This,
FullPathList,
Parent),
file_type = nfile}|Acc]);
{ndirectory, _Members} ->
create_content(T, OE_This, State, Parent, PathList,
[#'CosFileTransfer_FileWrapper'
{the_file = cosFileTransferApp:create_dir(OE_This,
FullPathList,
Parent),
file_type = ndirectory}|Acc]);
Other ->
Other
end.
setup_local(tcp) ->
{ok,Socket}=gen_tcp:listen(0, [binary,
{packet, 0},
{backlog,1},
{active, false}]),
{ok, Port} = inet:port(Socket),
{Socket, [#'CosFileTransfer_ProtocolSupport'{protocol_name="TCP/IP",
addresses = [local_address(Port)]}]};
setup_local(ssl) ->
Options = [{certfile, cosFileTransferApp:ssl_server_certfile()},
{verify, cosFileTransferApp:ssl_server_verify()},
{depth, cosFileTransferApp:ssl_server_depth()}] ++
ssl_server_cacertfile_option(),
{ok,Socket}=ssl:listen(0, [binary,
{packet, 0},
{backlog,1},
{active, false}|Options]),
{ok, {_Address, Port}} = ssl:sockname(Socket),
{Socket, [#'CosFileTransfer_ProtocolSupport'{protocol_name="SSL",
addresses = [local_address(Port)]}]}.
local_address(Port) ->
{ok, Hostname} = inet:gethostname(),
{ok, {A1, A2, A3, A4}} = inet:getaddr(Hostname, inet),
integer_to_list(A1) ++ "." ++ integer_to_list(A2) ++ "." ++ integer_to_list(A3)
++ "." ++ integer_to_list(A4)++":"++integer_to_list(Port).
ssl_server_cacertfile_option() ->
case cosFileTransferApp:ssl_server_cacertfile() of
[] ->
[];
X when is_list(X) ->
[{cacertfile, X}];
_ ->
[]
end.
source_FTS_operation(State, SrcFile, DestFile, Op, Offset, FTS) ->
Mod = ?get_Module(State),
The source FTS is supposed to be the active one , set up a connection .
Protocols = 'CosFileTransfer_FileTransferSession':'_get_protocols_supported'(FTS),
SrcName = 'CosFileTransfer_File':'_get_complete_file_name'(SrcFile),
TempName = cosFileTransferApp:create_name("TemporarySrcFile"),
case Mod:recv(?get_Server(State), filename:join(SrcName), TempName) of
ok when Op == insert ->
Pid = spawn(?MODULE, invoke_call, [self(), insert,
[FTS, SrcFile, DestFile, Offset]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
TempName),
delete_tmp_file(TempName,
"Transfer completed but failed to remove temporary local copy."),
check_reply(Pid),
{reply, ok, State};
ok ->
Pid = spawn(?MODULE, invoke_call, [self(), Op, [FTS, SrcFile, DestFile]]),
send_file(Protocols, ?get_MyType(State), ?get_ConnectionTimeout(State),
TempName),
delete_tmp_file(TempName,
"Transfer completed but failed to remove temporary local copy."),
check_reply(Pid),
{reply, ok, State};
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="File not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not loggen in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."})
end.
target_FTS_operation(State, _SrcFile, DestFile, Op, Offset) ->
Mod = ?get_Module(State),
DestName = 'CosFileTransfer_File':'_get_complete_file_name'(DestFile),
TempName = cosFileTransferApp:create_name("TemporaryDestFile"),
receive_file(?get_MyType(State), ?get_Connection(State),
?get_ConnectionTimeout(State), TempName, write),
Result =
if
Op == insert ->
Mod:insert(?get_Server(State), TempName, filename:join(DestName), Offset);
true ->
Mod:Op(?get_Server(State), TempName, filename:join(DestName))
end,
case Result of
ok ->
delete_tmp_file(TempName,
"Transfer completed but failed to remove temporary local copy."),
{reply, ok, State};
{error, epath} ->
delete_tmp_file(TempName,
"IllegalOperationException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_IllegalOperationException'
{reason="Not allowed by destination."});
{error, elogin} ->
delete_tmp_file(TempName,
"SessionException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not logged in."});
{error, econn} ->
delete_tmp_file(TempName,
"TransferException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_TransferException'
{reason="Premature connection ending."});
{error, etnospc} ->
delete_tmp_file(TempName,
"RequestFailureException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
{error, efnamena} ->
delete_tmp_file(TempName,
"IllegalOperationException and not able to remove temporary local copy."),
corba:raise(#'CosFileTransfer_IllegalOperationException'
{reason="Not allowed by destination."})
end.
receive_file(tcp, LSock, Timeout, FileName, Type) ->
FD = file_open(FileName, Type),
case gen_tcp:accept(LSock, Timeout) of
{ok, Sock} ->
receive_file_helper(gen_tcp, Sock, FD);
{error, timeout} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"gen_tcp:accept(~p) timed out", [?LINE, Timeout], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept timed out.."});
{error, Why} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"gen_tcp:accept(~p) failed: ~p", [?LINE, Timeout, Why], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept failed."})
end;
receive_file(ssl, LSock, Timeout, FileName, Type) ->
FD = file_open(FileName, Type),
case ssl:transport_accept(LSock, Timeout) of
{ok, ASock} ->
case ssl:handshake(ASock, Timeout) of
{ok, Sock} ->
receive_file_helper(ssl, Sock, FD);
{error, Error} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"ssl:handshake(~p) failed: ~p",
[?LINE, Timeout, Error], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept failed."})
end;
{error, timeout} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"ssl:transport_accept(~p) timed out",
[?LINE, Timeout], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept timed out.."});
{error, Why} ->
orber:dbg("[~p] CosFileTransfer_FileTransferSession:receive_file();~n"
"ssl:transport_accept(~p) failed: ~p",
[?LINE, Timeout, Why], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="TCP accept failed."})
end.
receive_file_helper(Driver, Sock, FD) ->
case Driver:recv(Sock, 0) of
{ok, Bin} ->
file:write(FD, Bin),
receive_file_helper(Driver, Sock, FD);
{error, closed} ->
file:close(FD);
What ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:receive_file(~p);
Error occured when receiving data: ~p", [?LINE, Driver, What], ?DEBUG_LEVEL),
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
send_file(Protocols, Type, Timeout, FileName) ->
{Driver, Sock} = setup_connection(Protocols, Type, Timeout),
FD = file_open(FileName, read),
BuffSize = cosFileTransferApp:get_buffert_size(),
send_file_helper(Driver, Sock, FD, BuffSize).
send_file_helper(Driver, Sock, FD, BuffSize) ->
case file:read(FD, BuffSize) of
eof ->
file:close(FD),
Driver:close(Sock);
{ok, Bin} ->
case Driver:send(Sock, Bin) of
ok ->
send_file_helper(Driver, Sock, FD, BuffSize);
What ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:send_file_helper(~p);
Error occured when sending data: ~p", [?LINE, Driver, What], ?DEBUG_LEVEL),
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end
end.
file_open(File, Type) ->
case file:open(File, [raw, binary, Type]) of
{ok, FD} ->
FD;
{error, What} ->
orber:debug_level_print("[~p] CosFileTransfer_FileTransferSession:file_open(~p);
Failed to open the file due to: ~p", [?LINE, File, What], ?DEBUG_LEVEL),
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Unable to open given file."})
end.
# # # ftp : nlist on an empty directory # # #
# # # ftp : nlist on a non - existing directory or file # # #
# # # ftp : nlist on an existing directory with one contained item # # #
* A directory with one member only .
Furthermore , no need for traversing Listings etc .
check_type(_OE_This, State, FullName) when ?is_FTP(State); ?is_NATIVE(State) ->
Mod = ?get_Module(State),
Result =
case Mod:nlist(?get_Server(State), FullName) of
{ok, Listing} when length(Listing) > 0->
case string:tokens(Listing, ?SEPARATOR) of
[FullName] ->
nfile;
Members when length(Members) > 1 ->
Must test if more than one member since sometimes
{ndirectory, Members};
Member ->
case Mod:cd(?get_Server(State), FullName) of
ok ->
case Mod:cd(?get_Server(State),
?get_CurrentDir(State)) of
ok ->
{ndirectory, Member};
_ ->
FTS now pointso an incorrect Directory .
{stop, normal,
{'EXCEPTION',
#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."}}, State}
end;
{error, E} ->
{error, E};
_ ->
nfile
end
end;
{error, epath} ->
DirName = filename:dirname(FullName),
case Mod:nlist(?get_Server(State), DirName) of
{ok, Listing} when length(Listing) > 0->
Members = string:tokens(Listing, ?SEPARATOR),
case lists:member(FullName, Members) of
true ->
nfile;
_ ->
BName = filename:basename(FullName),
case lists:member(BName, Members) of
true ->
nfile;
_ ->
{error, epath}
end
end;
_ ->
{error, epath}
end;
_ ->
case Mod:cd(?get_Server(State), FullName) of
ok ->
case Mod:cd(?get_Server(State), ?get_CurrentDir(State)) of
ok ->
{ndirectory, []};
_ ->
FTS now pointso an incorrect Directory .
{stop, normal,
{'EXCEPTION',
#'CosFileTransfer_RequestFailureException'
{reason="Unknown error."}}, State}
end;
_ ->
{error, epath}
end
end,
case Result of
{error, epath} ->
corba:raise(#'CosFileTransfer_FileNotFoundException'
{reason="File or Directory not found."});
{error, elogin} ->
corba:raise(#'CosFileTransfer_SessionException'
{reason="User not logged in."});
{error, econn} ->
corba:raise(#'CosFileTransfer_RequestFailureException'
{reason="Premature connection ending."});
Other ->
Other
end.
|
2dd3ee0d9aed750debc429c69a353e220c484d8f7d89ec6fd55be335e9b63013 | nikita-volkov/rebase | TBQueue.hs | module Rebase.Control.Concurrent.STM.TBQueue
(
module Control.Concurrent.STM.TBQueue
)
where
import Control.Concurrent.STM.TBQueue
| null | https://raw.githubusercontent.com/nikita-volkov/rebase/7c77a0443e80bdffd4488a4239628177cac0761b/library/Rebase/Control/Concurrent/STM/TBQueue.hs | haskell | module Rebase.Control.Concurrent.STM.TBQueue
(
module Control.Concurrent.STM.TBQueue
)
where
import Control.Concurrent.STM.TBQueue
|
|
5156788322c4bc550213481eaecda2ce01e83d229f41e0ad51062480121fe790 | tweag/asterius | T5149.hs | # LANGUAGE MagicHash , GHCForeignImportPrim , UnliftedFFITypes #
module Main where
import GHC.Exts
foreign import prim "f5149" f :: Int# -> Int# -> Double# -> Int#
main = print (I# (f 1# 2# 1.0##))
| null | https://raw.githubusercontent.com/tweag/asterius/e7b823c87499656860f87b9b468eb0567add1de8/asterius/test/ghc-testsuite/codeGen/T5149.hs | haskell | # LANGUAGE MagicHash , GHCForeignImportPrim , UnliftedFFITypes #
module Main where
import GHC.Exts
foreign import prim "f5149" f :: Int# -> Int# -> Double# -> Int#
main = print (I# (f 1# 2# 1.0##))
|
|
b648dd267f9f8f65182070aa64b29a32f776b2557c862e95559b46eb070cd90a | tsurucapital/euphoria | Main.hs | module Main where
import Test.Framework (defaultMain)
import qualified FRP.Euphoria.EnumCollection.Lazy.Test
import qualified FRP.Euphoria.HashCollection.Strict.Test
import qualified FRP.Euphoria.Event.Test
import qualified FRP.Euphoria.Update.Test
main :: IO ()
main = defaultMain
[ FRP.Euphoria.EnumCollection.Lazy.Test.tests
, FRP.Euphoria.HashCollection.Strict.Test.tests
, FRP.Euphoria.Event.Test.tests
, FRP.Euphoria.Update.Test.tests
]
| null | https://raw.githubusercontent.com/tsurucapital/euphoria/15ddb49ddc79d62970a0163fe8d77789254db202/tests/Main.hs | haskell | module Main where
import Test.Framework (defaultMain)
import qualified FRP.Euphoria.EnumCollection.Lazy.Test
import qualified FRP.Euphoria.HashCollection.Strict.Test
import qualified FRP.Euphoria.Event.Test
import qualified FRP.Euphoria.Update.Test
main :: IO ()
main = defaultMain
[ FRP.Euphoria.EnumCollection.Lazy.Test.tests
, FRP.Euphoria.HashCollection.Strict.Test.tests
, FRP.Euphoria.Event.Test.tests
, FRP.Euphoria.Update.Test.tests
]
|
|
fd44c02a3d170a4473e61c3c5a2a82ff1b7a4d2241066cec4c3493b46e06b2b6 | josefs/Gradualizer | gradualizer_db.erl | @private
%% @doc Collects exported functions and types from multiple files.
%%
%% For exported functions with missing spec, a spec is generated with any()
%% as the type for all parameters and return values.
-module(gradualizer_db).
%% API functions
-export([start_link/1,
get_spec/3,
get_type/3, get_exported_type/3, get_opaque_type/3,
get_record_type/2,
get_modules/0, get_types/1,
save/1, load/1,
import_module/1,
import_erl_files/1, import_erl_files/2, import_beam_files/1, import_extra_specs/1,
import_app/1, import_otp/0, import_prelude/0]).
%% Callbacks
-behaviour(gen_server).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
%% Types for the Erlang Abstract Format
-type type() :: gradualizer_type:abstract_type().
-type typed_record_field() :: {typed_record_field,
{record_field, erl_anno:anno(),
Name :: atom(),
Default :: erl_parse:abstract_expr()},
type()}.
%% Compiled regular expression
-type regexp() :: {re_pattern, _, _, _, _}.
%% Gen server local registered name
-define(name, ?MODULE).
-include_lib("stdlib/include/assert.hrl").
-include("gradualizer.hrl").
Internal data
-record(typeinfo, {exported :: boolean(),
opaque :: boolean(),
params :: [{var, erl_anno:anno(), atom()}],
body :: type()}).
%% Public API functions
start_link(Opts) ->
OptsMap1 = maps:from_list(proplists:unfold(Opts)),
OptsMap2 = OptsMap1#{specs_override => proplists:get_all_values(specs_override, Opts)},
gen_server:start_link({local, ?name}, ?MODULE, OptsMap2, []).
%% @doc Fetches the types of the clauses of an exported function. User-defined
%% types and record types are annotated with filename on the form
%% "module.erl"
-spec get_spec(M :: module(),
F :: atom(),
A :: arity()) -> {ok, [type()]} | not_found.
get_spec(M, F, A) ->
call({get_spec, M, F, A}).
@doc an exported or unexported user - defined type . Does not expand
%% opaque types.
-spec get_type(Module :: module(),
Type :: atom(),
Params :: [type()]) -> {ok, type()} | opaque | not_found.
get_type(M, T, A) ->
call({get_type, M, T, A}).
@doc an exported type . Does not expand opaque types .
-spec get_exported_type(Module :: module(),
Type :: atom(),
Params :: [type()]) -> {ok, type()} | opaque |
not_exported | not_found.
get_exported_type(M, T, A) ->
call({get_exported_type, M, T, A}).
%% @doc Like get_type/3 but also expands opaque types.
-spec get_opaque_type(Module :: module(),
Type :: atom(),
Params :: [type()]) -> {ok, type()} | not_found.
get_opaque_type(M, T, A) ->
call({get_opaque_type, M, T, A}).
@doc a record type defined in the module .
-spec get_record_type(Module :: module(),
Name :: atom()) -> {ok, [typed_record_field()]} | not_found.
get_record_type(Module, Name) ->
call({get_record_type, Module, Name}).
%% @doc Return a list of all known modules.
-spec get_modules() -> [module()].
get_modules() ->
call(get_modules).
-spec get_types(module()) -> [{atom(), arity()}].
get_types(Module) ->
call({get_types, Module}).
-spec save(Filename :: any()) -> ok | {error, any()}.
save(Filename) ->
call({save, Filename}).
-spec load(Filename :: any()) -> ok | {error, any()}.
load(Filename) ->
call({load, Filename}).
-spec import_erl_files([file:filename()]) -> ok.
import_erl_files(Files) ->
call({import_erl_files, Files, []}, infinity).
-spec import_erl_files([file:filename()],any()) -> ok.
import_erl_files(Files,Includes) ->
call({import_erl_files, Files, Includes}, infinity).
-spec import_beam_files([file:filename() | binary()]) ->
ok | gradualizer_file_utils:parsed_file_error().
import_beam_files(Files) ->
call({import_beam_files, Files}, infinity).
-spec import_app(App :: atom()) -> ok.
import_app(App) ->
call({import_app, App}, infinity).
-spec import_otp() -> ok.
import_otp() ->
call(import_otp, infinity).
-spec import_prelude() -> ok.
import_prelude() ->
call(import_prelude, infinity).
-spec import_extra_specs(file:filename()) -> ok.
import_extra_specs(Dirs) ->
call({import_extra_specs, Dirs}, infinity).
-spec import_module(module()) -> ok | not_found.
import_module(Module) ->
call({import_module, Module}, infinity).
%% ----------------------------------------------------------------------------
%% Gen_server
-type opts() :: #{autoimport := boolean(),
prelude := boolean(),
specs_override := [file:filename()]}.
-define(default_opts, #{autoimport => true,
prelude => true,
specs_override => []}).
-record(state, {specs = #{} :: #{mfa() => [type()]},
types = #{} :: #{mfa() => #typeinfo{}},
records = #{} :: #{{module(), atom()} => [typechecker:typed_record_field()]},
opts = ?default_opts :: opts(),
srcmap = #{} :: #{module() => file:filename()},
beammap = #{} :: #{module() => file:filename()},
loaded = #{} :: #{module() => boolean()}}).
-type state() :: #state{}.
-spec init(opts()) -> {ok, state()}.
init(Opts0) ->
Opts = maps:merge(?default_opts, Opts0),
State1 = #state{opts = Opts},
State2 = case Opts of
#{autoimport := true} ->
State1#state{srcmap = get_src_map(), beammap = get_beam_map()};
_ ->
State1
end,
Self = self(),
maps:get(prelude, Opts) andalso (Self ! import_prelude),
Self ! {import_extra_specs, maps:get(specs_override, Opts)},
{ok, State2}.
-spec handle_call(any(), {pid(), term()}, state()) -> {reply, term(), state()}.
handle_call({get_spec, M, F, A}, _From, State) ->
State1 = autoimport(M, State),
K = {M, F, A},
case State1#state.specs of
#{K := Types} ->
Types1 = [typelib:annotate_user_types(M, Type) || Type <- Types],
{reply, {ok, Types1}, State1};
_NoMatch ->
{reply, not_found, State1}
end;
handle_call({get_exported_type, M, T, Args}, _From, State) ->
State1 = autoimport(M, State),
handle_get_type(M, T, Args, true, false, State1);
handle_call({get_type, M, T, Args}, _From, State) ->
State1 = autoimport(M, State),
handle_get_type(M, T, Args, false, false, State1);
handle_call({get_opaque_type, M, T, Args}, _From, State) ->
State1 = autoimport(M, State),
handle_get_type(M, T, Args, false, true, State1);
handle_call({get_record_type, M, Name}, _From, State) ->
State1 = autoimport(M, State),
K = {M, Name},
case State1#state.records of
#{K := TypedFields1} ->
TypedFields2 =
[{typed_record_field, Field, typelib:annotate_user_types(M, Type)}
|| {typed_record_field, Field, Type} <- TypedFields1],
{reply, {ok, TypedFields2}, State1};
_ ->
{reply, not_found, State1}
end;
handle_call(get_modules, _From, State) ->
{reply, maps:keys(State#state.srcmap), State};
handle_call({get_types, M}, _From, State) ->
State1 = autoimport(M, State),
Ts = [{T, A} || {Mod, T, A} <- maps:keys(State#state.types), Mod == M],
{reply, Ts, State1};
handle_call({save, Filename}, _From, State) ->
Permanent = {State#state.specs, State#state.types, State#state.loaded},
Bin = term_to_binary(Permanent, [compressed]),
Res = file:write_file(Filename, Bin),
{reply, Res, State};
handle_call({load, Filename}, _From, State) ->
case file:read_file(Filename) of
{ok, Bin} ->
try
{Sp2, Ty2, Loaded2} = binary_to_term(Bin),
#state{specs = Sp1, types = Ty1, loaded = Loaded1} = State,
NewState = State#state{specs = maps:merge(Sp1, Sp2),
types = maps:merge(Ty1, Ty2),
loaded = maps:merge(Loaded1, Loaded2)},
{reply, ok, NewState}
catch error:E:Stack ->
{reply, {error, E, Stack}, State}
end;
{error, Reason} ->
{reply, {error, Reason}, State}
end;
handle_call({import_module, Mod}, _From, State) ->
case import_module(Mod, State) of
{ok, State1} ->
{reply, ok, State1};
not_found ->
{reply, not_found, State}
end;
handle_call({import_erl_files, Files, Includes}, _From, State) ->
State1 = import_erl_files(Files, Includes, State),
{reply, ok, State1};
handle_call({import_beam_files, Files}, _From, State) ->
case import_beam_files(Files, State) of
{ok, State1} -> {reply, ok, State1};
Error = {_, _} -> {reply, Error, State}
end;
handle_call({import_app, App}, _From, State) ->
case code:lib_dir(App) of
{error, bad_name} ->
error_logger:warning_msg("Unknown app: ~p", [App]),
{reply, ok, State};
LibDir ->
Pattern = LibDir ++ "/src/*.erl",
Files = filelib:wildcard(Pattern),
State1 = import_erl_files(Files, [], State),
{reply, ok, State1}
end;
handle_call(import_otp, _From, State) ->
Pattern = code:lib_dir() ++ "/*/src/*.erl",
Files = filelib:wildcard(Pattern),
State1 = import_erl_files(Files, [], State),
{reply, ok, State1};
handle_call(import_prelude, _From, State) ->
State2 = import_prelude(State),
{reply, ok, State2};
handle_call({import_extra_specs, Dirs}, _From, State) ->
State2 = lists:foldl(fun import_extra_specs/2, State, Dirs),
{reply, ok, State2}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(import_prelude, State) ->
State2 = import_prelude(State),
{noreply, State2};
handle_info({import_extra_specs, Dirs}, State) ->
State2 = lists:foldl(fun import_extra_specs/2, State, Dirs),
{noreply, State2};
handle_info(_Msg, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
%when Reason == normal; Reason == shutdown
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% ----------------------------------------------------------------------------
%% Helpers
-spec import_prelude(state()) -> state().
import_prelude(State = #state{loaded = Loaded}) ->
FormsByModule = gradualizer_prelude:get_modules_and_forms(),
%% Import forms each of the modules to override
State1 = lists:foldl(fun ({Module, Forms}, StateAcc) ->
import_absform(Module, Forms, StateAcc)
end,
State,
FormsByModule),
%% Mark the just overridden modules as not yet loaded, to make sure they
%% are loaded on demand
State1#state{loaded = Loaded}.
-spec import_extra_specs(file:filename(), state()) -> state().
import_extra_specs(Dir, State = #state{loaded = Loaded}) ->
FormsByModule = gradualizer_prelude_parse_trans:get_module_forms_tuples(Dir),
%% Import forms each of the modules to override
State1 = lists:foldl(fun ({Module, Forms}, StateAcc) ->
import_absform(Module, Forms, StateAcc)
end,
State,
FormsByModule),
%% Mark the just overridden modules as not yet loaded, to make sure they
%% are loaded on demand
State1#state{loaded = Loaded}.
%% @doc ensure DB server is started
call(Request) ->
call(Request, 5000).
call(Request, Timeout) ->
gen_server:call(?name, Request, Timeout).
helper for handle_call for get_type , get_exported_type , get_opaque_type .
-spec handle_get_type(module(), Name :: atom(), Params :: [type()],
RequireExported :: boolean(), ExpandOpaque :: boolean(),
state()) -> {reply, {ok, type()} | atom(), state()}.
handle_get_type(M, T, Args, RequireExported, ExpandOpaque, State) ->
K = {M, T, length(Args)},
case State#state.types of
#{K := TypeInfo} ->
case TypeInfo of
#typeinfo{exported = false} when RequireExported ->
{reply, not_exported, State};
#typeinfo{opaque = true} when not ExpandOpaque ->
{reply, opaque, State};
#typeinfo{params = Vars,
body = Type0} ->
VarMap = maps:from_list(lists:zip(Vars, Args)),
Type1 = typelib:annotate_user_type(M, Type0),
Type2 = typelib:substitute_type_vars(Type1, VarMap),
{reply, {ok, Type2}, State}
end;
_NoMatch ->
{reply, not_found, State}
end.
-spec autoimport(module(), state()) -> state().
autoimport(_M, #state{opts = #{autoimport := false}} = State) ->
State;
autoimport(M, #state{opts = #{autoimport := true},
loaded = Loaded} = State) ->
case Loaded of
#{M := _} ->
%% Already loaded or attempted
State;
_ ->
%io:format("Loading types from ~p~n", [M]),
case import_module(M, State) of
{ok, State1} -> State1;
not_found -> State
end
end.
-spec import_module(module(), state()) -> {ok, state()} | not_found.
import_module(Mod, State) ->
case State#state.beammap of
#{Mod := Filename} ->
case import_beam_files([Filename], State) of
{ok, State1} -> {ok, State1};
{_, _} -> import_module_from_erl(Mod, State)
end;
_ ->
import_module_from_erl(Mod, State)
end.
-spec import_module_from_erl(module(), state()) -> {ok, state()} | not_found.
import_module_from_erl(Mod, State) ->
case State#state.srcmap of
#{Mod := Filename} ->
State1 = import_erl_files([Filename], [], State),
{ok, State1};
_ ->
not_found
end.
-spec import_erl_files([file:filename()], [file:filename()], state()) -> state().
import_erl_files([File | Files], Includes, State) ->
EppOpts = [{includes, guess_include_dirs(File) ++ Includes}],
{ok, Forms} = epp:parse_file(File, EppOpts),
{attribute, _, module, Module} = lists:keyfind(module, 3, Forms),
check_epp_errors(File, Forms),
import_erl_files(Files, Includes, import_absform(Module, Forms, State));
import_erl_files([], _Includes, St) ->
St.
-spec import_beam_files([file:filename() | binary()], state()) -> {ok, state()} | gradualizer_file_utils:parsed_file_error().
import_beam_files([File | Files], State) ->
case gradualizer_file_utils:get_forms_from_beam(File) of
{ok, Forms} ->
{attribute, _, module, Module} = lists:keyfind(module, 3, Forms),
import_beam_files(Files, import_absform(Module, Forms, State));
Error = {Status, _} when (Status /= ok) ->
Error
end;
import_beam_files([], St) ->
{ok, St}.
-spec import_absform(module(), gradualizer_file_utils:abstract_forms(), state()) -> state().
import_absform(Module, Forms1, State) ->
Specs = collect_specs(Module, Forms1),
SpecMap1 = add_entries_to_map(Specs, State#state.specs),
Types = collect_types(Module, Forms1),
Records = collect_records(Module, Forms1),
TypeMap1 = add_entries_to_map(Types, State#state.types),
RecMap1 = add_entries_to_map(Records, State#state.records),
Loaded1 = (State#state.loaded)#{Module => true},
State#state{
specs = SpecMap1,
types = TypeMap1,
records = RecMap1,
loaded = Loaded1
}.
Include dirs for OTP apps are given in makefiles . We can never
%% guarantee to get them right without extracting the types during
%% compilation.
-spec guess_include_dirs(file:filename()) -> list().
guess_include_dirs(File) ->
Dir = filename:dirname(File),
case filename:basename(Dir) of
"src" -> [filename:join(Dir, "../include")];
_ -> []
end ++ [code:lib_dir(App, include) || App <- [erts, kernel, stdlib]].
%% Log warnings for epp errors among the given forms
%% Bad errors are failed includes due to bad include paths.
-spec check_epp_errors(file:filename(), Forms :: [tuple()]) -> ok.
check_epp_errors(File, Forms) ->
Errors = [E || {error, E} <- Forms],
MissingIncludes = [F || {_Line, epp, {include, file, F}} <- Errors],
if
MissingIncludes /= [] ->
error_logger:warning_msg("Failed to find the following include"
" files for ~p:~n~p",
[File, MissingIncludes]);
Errors /= [] ->
error_logger:warning_msg("Errors while loading ~p:~n~p",
[File, Errors]);
true ->
ok
end.
%% Add pairs to a map, without overwriting existing values in the map.
-spec add_entries_to_map([{Key, Value}], #{K => V}) -> #{K => V}
when Key :: K, Value :: V.
add_entries_to_map(Entries, Map) ->
lists:foldl(fun ({MFA, Types}, MapAcc) ->
maps:update_with(MFA,
fun (OldTypes) ->
%% Key already present. Keep the
%% old value.
Maybe TODO : Warn if an element
%% is already present
OldTypes
end, Types, MapAcc)
end,
Map,
Entries).
-spec collect_types(module(), Forms) -> [{mfa(), #typeinfo{}}] when
Forms :: gradualizer_file_utils:abstract_forms().
%% Collect exported types, including opaques, record definitions,
%% exported and unexported types
collect_types(Module, Forms) ->
: : [ { atom ( ) , arity ( ) } ]
ExportedTypes = lists:append([Tys || {attribute, _, export_type,
Tys} <- Forms]),
%% Now all type definitions are easy to extract.
Types = [begin
Arity = length(Vars),
Arity >= 0 andalso Arity =< 255 orelse erlang:error({invalid_arity, Arity, Form}),
Id = {Module, Name, ?assert_type(Arity, arity())},
Exported = lists:member({Name, Arity}, ExportedTypes),
Params = [VarName || {var, _, VarName} <- Vars],
Info = #typeinfo{exported = Exported,
opaque = (Attr == opaque),
params = Params,
body = typelib:remove_pos(Body)},
{Id, Info}
end || Form = {attribute, _, Attr, {Name, Body, Vars}} <- Forms,
Attr == type orelse Attr == opaque,
is_atom(Name)],
Types.
collect_records(Module, Forms) ->
[{{Module, Name}, Fields} || {Name, Fields} <- extract_record_defs(Forms)].
%% Normalize Type Defs
%% -------------------
%%
%% Extracts and normalizes type definitions from a list of forms.
%%
Normalise record definitions into types ( i.e. typed record definitions ) .
%% That is, if there is no typed definition of a record among the
%% forms, create one from the untyped one and normalize so that they
%% all have a default value.
%%
Location in field names and types are set to zero to allow comparison using
%% equality and pattern matching. This is not done for the default value (which
%% is an expression, not a type).
-spec extract_record_defs(Forms :: [tuple()]) -> Typedefs :: [{atom(), [type()]}].
extract_record_defs([{attribute, L, record, {Name, _UntypedFields}},
{attribute, L, type, {{record, Name}, Fields, []}} |
Rest]) ->
This representation is only used in OTP < 19
extract_record_defs([{attribute, L, record, {Name, Fields}} | Rest]);
extract_record_defs([{attribute, _L, record, {Name, Fields}} | Rest]) ->
TypedFields = [gradualizer_lib:remove_pos_typed_record_field(
absform:normalize_record_field(Field))
|| Field <- Fields],
R = {Name, TypedFields},
[R | extract_record_defs(Rest)];
extract_record_defs([_ | Rest]) ->
%% Skip forms that are not record definitions
extract_record_defs(Rest);
extract_record_defs([]) ->
[].
%% Returns specs for all exported functions, generating any-types for unspeced
%% functions.
-spec collect_specs(module(), [tuple()]) -> [{mfa(), [type()]}].
collect_specs(Module, Forms) ->
Specs = [normalize_spec(Spec, Module) ||
{attribute, _, spec, Spec} <- Forms],
ExportAll = lists:any(fun ({attribute, _, compile, CompileOpts})
when is_list(CompileOpts) ->
lists:member(export_all, CompileOpts);
({attribute, _, compile, export_all}) ->
true;
(_) ->
false
end,
Forms),
Exports =
if ExportAll ->
[{Name, Arity} || {function, _, Name, Arity, _} <- Forms];
true ->
lists:concat([Exs || {attribute, _, export, Exs} <- Forms])
end,
SpecedFunsSet = sets:from_list([{F, A} || {{M, F, A}, _} <- Specs,
M == Module]),
ImplicitSpecs = [make_spec(Module, F, A) ||
{F, A} <- Exports,
not sets:is_element({F, A},
SpecedFunsSet)],
[{Key, typelib:remove_pos_all(absform:normalize_function_type_list(Types))}
|| {Key, Types} <- Specs ++ ImplicitSpecs].
normalize_spec({{Func, Arity}, Types}, Module) ->
{{Module, Func, Arity}, Types};
normalize_spec(Spec = {{_M, _F, _A}, _Types}, _Module) ->
Spec.
-spec make_spec(module(), atom(), arity()) -> {mfa(), [type()]}.
make_spec(Module, Name, Arity) ->
{{Module, Name, Arity}, [make_function_type(Arity)]}.
%% Creates the function type (any(), any(), ...) -> any().
make_function_type(Arity) ->
A = erl_anno:new(0),
{type, A, 'fun',
[{type, A, product, lists:duplicate(Arity, {type, A, any, []})},
{type, A, any, []}]}.
-spec get_src_map() -> #{module() => file:filename()}.
get_src_map() ->
SrcDirs = [case lists:reverse(Path) of
"nibe/" ++ Tail -> lists:reverse("lre.*/crs/" ++ Tail);
RevPath -> lists:reverse("lre.*/" ++ RevPath)
end || Path <- code:get_path()],
SrcFiles = lists:flatmap(fun filelib:wildcard/1, SrcDirs),
RE = erl_file_regexp(),
Pairs = [begin
{match, [Mod]} = re:run(Filename, RE, [{capture, all_but_first, list}]),
?assert(is_list(Mod), regex_match_not_a_string),
Mod = ?assert_type(Mod, string()),
{list_to_atom(Mod), Filename}
end || Filename <- SrcFiles],
maps:from_list(Pairs).
-spec get_beam_map() -> #{module() => file:filename()}.
get_beam_map() ->
BeamDirs = code:get_path(),
BeamFiles = lists:flatmap(fun (Dir) -> filelib:wildcard(Dir ++ "/*.beam") end, BeamDirs),
RE = beam_file_regexp(),
BeamPairs0 = lists:map(
fun (Filename) ->
case re:run(Filename, RE, [{capture, all_but_first, list}]) of
{match, [Mod]} ->
{list_to_atom(Mod), Filename};
nomatch ->
{false, false};
_ ->
erlang:error({unreachable, "check re:run/3 opts above - this should not happen"})
end
end,
BeamFiles),
BeamPairs = lists:filter(fun ({false, false}) -> false; (_) -> true end, BeamPairs0),
maps:from_list(BeamPairs).
-spec beam_file_regexp() -> regexp().
beam_file_regexp() ->
{ok, RE} = re:compile(<<"^.+\/([^/]+)\.beam$">>),
RE.
-spec erl_file_regexp() -> regexp().
erl_file_regexp() ->
{ok, RE} = re:compile(<<"([^/.]*)\.erl$">>),
RE.
| null | https://raw.githubusercontent.com/josefs/Gradualizer/d2ea3201ee3c55695f50144ee4befcafa0756ccc/src/gradualizer_db.erl | erlang | @doc Collects exported functions and types from multiple files.
For exported functions with missing spec, a spec is generated with any()
as the type for all parameters and return values.
API functions
Callbacks
Types for the Erlang Abstract Format
Compiled regular expression
Gen server local registered name
Public API functions
@doc Fetches the types of the clauses of an exported function. User-defined
types and record types are annotated with filename on the form
"module.erl"
opaque types.
@doc Like get_type/3 but also expands opaque types.
@doc Return a list of all known modules.
----------------------------------------------------------------------------
Gen_server
when Reason == normal; Reason == shutdown
----------------------------------------------------------------------------
Helpers
Import forms each of the modules to override
Mark the just overridden modules as not yet loaded, to make sure they
are loaded on demand
Import forms each of the modules to override
Mark the just overridden modules as not yet loaded, to make sure they
are loaded on demand
@doc ensure DB server is started
Already loaded or attempted
io:format("Loading types from ~p~n", [M]),
guarantee to get them right without extracting the types during
compilation.
Log warnings for epp errors among the given forms
Bad errors are failed includes due to bad include paths.
Add pairs to a map, without overwriting existing values in the map.
Key already present. Keep the
old value.
is already present
Collect exported types, including opaques, record definitions,
exported and unexported types
Now all type definitions are easy to extract.
Normalize Type Defs
-------------------
Extracts and normalizes type definitions from a list of forms.
That is, if there is no typed definition of a record among the
forms, create one from the untyped one and normalize so that they
all have a default value.
equality and pattern matching. This is not done for the default value (which
is an expression, not a type).
Skip forms that are not record definitions
Returns specs for all exported functions, generating any-types for unspeced
functions.
Creates the function type (any(), any(), ...) -> any(). | @private
-module(gradualizer_db).
-export([start_link/1,
get_spec/3,
get_type/3, get_exported_type/3, get_opaque_type/3,
get_record_type/2,
get_modules/0, get_types/1,
save/1, load/1,
import_module/1,
import_erl_files/1, import_erl_files/2, import_beam_files/1, import_extra_specs/1,
import_app/1, import_otp/0, import_prelude/0]).
-behaviour(gen_server).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
-type type() :: gradualizer_type:abstract_type().
-type typed_record_field() :: {typed_record_field,
{record_field, erl_anno:anno(),
Name :: atom(),
Default :: erl_parse:abstract_expr()},
type()}.
-type regexp() :: {re_pattern, _, _, _, _}.
-define(name, ?MODULE).
-include_lib("stdlib/include/assert.hrl").
-include("gradualizer.hrl").
Internal data
-record(typeinfo, {exported :: boolean(),
opaque :: boolean(),
params :: [{var, erl_anno:anno(), atom()}],
body :: type()}).
start_link(Opts) ->
OptsMap1 = maps:from_list(proplists:unfold(Opts)),
OptsMap2 = OptsMap1#{specs_override => proplists:get_all_values(specs_override, Opts)},
gen_server:start_link({local, ?name}, ?MODULE, OptsMap2, []).
-spec get_spec(M :: module(),
F :: atom(),
A :: arity()) -> {ok, [type()]} | not_found.
get_spec(M, F, A) ->
call({get_spec, M, F, A}).
@doc an exported or unexported user - defined type . Does not expand
-spec get_type(Module :: module(),
Type :: atom(),
Params :: [type()]) -> {ok, type()} | opaque | not_found.
get_type(M, T, A) ->
call({get_type, M, T, A}).
@doc an exported type . Does not expand opaque types .
-spec get_exported_type(Module :: module(),
Type :: atom(),
Params :: [type()]) -> {ok, type()} | opaque |
not_exported | not_found.
get_exported_type(M, T, A) ->
call({get_exported_type, M, T, A}).
-spec get_opaque_type(Module :: module(),
Type :: atom(),
Params :: [type()]) -> {ok, type()} | not_found.
get_opaque_type(M, T, A) ->
call({get_opaque_type, M, T, A}).
@doc a record type defined in the module .
-spec get_record_type(Module :: module(),
Name :: atom()) -> {ok, [typed_record_field()]} | not_found.
get_record_type(Module, Name) ->
call({get_record_type, Module, Name}).
-spec get_modules() -> [module()].
get_modules() ->
call(get_modules).
-spec get_types(module()) -> [{atom(), arity()}].
get_types(Module) ->
call({get_types, Module}).
-spec save(Filename :: any()) -> ok | {error, any()}.
save(Filename) ->
call({save, Filename}).
-spec load(Filename :: any()) -> ok | {error, any()}.
load(Filename) ->
call({load, Filename}).
-spec import_erl_files([file:filename()]) -> ok.
import_erl_files(Files) ->
call({import_erl_files, Files, []}, infinity).
-spec import_erl_files([file:filename()],any()) -> ok.
import_erl_files(Files,Includes) ->
call({import_erl_files, Files, Includes}, infinity).
-spec import_beam_files([file:filename() | binary()]) ->
ok | gradualizer_file_utils:parsed_file_error().
import_beam_files(Files) ->
call({import_beam_files, Files}, infinity).
-spec import_app(App :: atom()) -> ok.
import_app(App) ->
call({import_app, App}, infinity).
-spec import_otp() -> ok.
import_otp() ->
call(import_otp, infinity).
-spec import_prelude() -> ok.
import_prelude() ->
call(import_prelude, infinity).
-spec import_extra_specs(file:filename()) -> ok.
import_extra_specs(Dirs) ->
call({import_extra_specs, Dirs}, infinity).
-spec import_module(module()) -> ok | not_found.
import_module(Module) ->
call({import_module, Module}, infinity).
-type opts() :: #{autoimport := boolean(),
prelude := boolean(),
specs_override := [file:filename()]}.
-define(default_opts, #{autoimport => true,
prelude => true,
specs_override => []}).
-record(state, {specs = #{} :: #{mfa() => [type()]},
types = #{} :: #{mfa() => #typeinfo{}},
records = #{} :: #{{module(), atom()} => [typechecker:typed_record_field()]},
opts = ?default_opts :: opts(),
srcmap = #{} :: #{module() => file:filename()},
beammap = #{} :: #{module() => file:filename()},
loaded = #{} :: #{module() => boolean()}}).
-type state() :: #state{}.
-spec init(opts()) -> {ok, state()}.
init(Opts0) ->
Opts = maps:merge(?default_opts, Opts0),
State1 = #state{opts = Opts},
State2 = case Opts of
#{autoimport := true} ->
State1#state{srcmap = get_src_map(), beammap = get_beam_map()};
_ ->
State1
end,
Self = self(),
maps:get(prelude, Opts) andalso (Self ! import_prelude),
Self ! {import_extra_specs, maps:get(specs_override, Opts)},
{ok, State2}.
-spec handle_call(any(), {pid(), term()}, state()) -> {reply, term(), state()}.
handle_call({get_spec, M, F, A}, _From, State) ->
State1 = autoimport(M, State),
K = {M, F, A},
case State1#state.specs of
#{K := Types} ->
Types1 = [typelib:annotate_user_types(M, Type) || Type <- Types],
{reply, {ok, Types1}, State1};
_NoMatch ->
{reply, not_found, State1}
end;
handle_call({get_exported_type, M, T, Args}, _From, State) ->
State1 = autoimport(M, State),
handle_get_type(M, T, Args, true, false, State1);
handle_call({get_type, M, T, Args}, _From, State) ->
State1 = autoimport(M, State),
handle_get_type(M, T, Args, false, false, State1);
handle_call({get_opaque_type, M, T, Args}, _From, State) ->
State1 = autoimport(M, State),
handle_get_type(M, T, Args, false, true, State1);
handle_call({get_record_type, M, Name}, _From, State) ->
State1 = autoimport(M, State),
K = {M, Name},
case State1#state.records of
#{K := TypedFields1} ->
TypedFields2 =
[{typed_record_field, Field, typelib:annotate_user_types(M, Type)}
|| {typed_record_field, Field, Type} <- TypedFields1],
{reply, {ok, TypedFields2}, State1};
_ ->
{reply, not_found, State1}
end;
handle_call(get_modules, _From, State) ->
{reply, maps:keys(State#state.srcmap), State};
handle_call({get_types, M}, _From, State) ->
State1 = autoimport(M, State),
Ts = [{T, A} || {Mod, T, A} <- maps:keys(State#state.types), Mod == M],
{reply, Ts, State1};
handle_call({save, Filename}, _From, State) ->
Permanent = {State#state.specs, State#state.types, State#state.loaded},
Bin = term_to_binary(Permanent, [compressed]),
Res = file:write_file(Filename, Bin),
{reply, Res, State};
handle_call({load, Filename}, _From, State) ->
case file:read_file(Filename) of
{ok, Bin} ->
try
{Sp2, Ty2, Loaded2} = binary_to_term(Bin),
#state{specs = Sp1, types = Ty1, loaded = Loaded1} = State,
NewState = State#state{specs = maps:merge(Sp1, Sp2),
types = maps:merge(Ty1, Ty2),
loaded = maps:merge(Loaded1, Loaded2)},
{reply, ok, NewState}
catch error:E:Stack ->
{reply, {error, E, Stack}, State}
end;
{error, Reason} ->
{reply, {error, Reason}, State}
end;
handle_call({import_module, Mod}, _From, State) ->
case import_module(Mod, State) of
{ok, State1} ->
{reply, ok, State1};
not_found ->
{reply, not_found, State}
end;
handle_call({import_erl_files, Files, Includes}, _From, State) ->
State1 = import_erl_files(Files, Includes, State),
{reply, ok, State1};
handle_call({import_beam_files, Files}, _From, State) ->
case import_beam_files(Files, State) of
{ok, State1} -> {reply, ok, State1};
Error = {_, _} -> {reply, Error, State}
end;
handle_call({import_app, App}, _From, State) ->
case code:lib_dir(App) of
{error, bad_name} ->
error_logger:warning_msg("Unknown app: ~p", [App]),
{reply, ok, State};
LibDir ->
Pattern = LibDir ++ "/src/*.erl",
Files = filelib:wildcard(Pattern),
State1 = import_erl_files(Files, [], State),
{reply, ok, State1}
end;
handle_call(import_otp, _From, State) ->
Pattern = code:lib_dir() ++ "/*/src/*.erl",
Files = filelib:wildcard(Pattern),
State1 = import_erl_files(Files, [], State),
{reply, ok, State1};
handle_call(import_prelude, _From, State) ->
State2 = import_prelude(State),
{reply, ok, State2};
handle_call({import_extra_specs, Dirs}, _From, State) ->
State2 = lists:foldl(fun import_extra_specs/2, State, Dirs),
{reply, ok, State2}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(import_prelude, State) ->
State2 = import_prelude(State),
{noreply, State2};
handle_info({import_extra_specs, Dirs}, State) ->
State2 = lists:foldl(fun import_extra_specs/2, State, Dirs),
{noreply, State2};
handle_info(_Msg, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
-spec import_prelude(state()) -> state().
import_prelude(State = #state{loaded = Loaded}) ->
FormsByModule = gradualizer_prelude:get_modules_and_forms(),
State1 = lists:foldl(fun ({Module, Forms}, StateAcc) ->
import_absform(Module, Forms, StateAcc)
end,
State,
FormsByModule),
State1#state{loaded = Loaded}.
-spec import_extra_specs(file:filename(), state()) -> state().
import_extra_specs(Dir, State = #state{loaded = Loaded}) ->
FormsByModule = gradualizer_prelude_parse_trans:get_module_forms_tuples(Dir),
State1 = lists:foldl(fun ({Module, Forms}, StateAcc) ->
import_absform(Module, Forms, StateAcc)
end,
State,
FormsByModule),
State1#state{loaded = Loaded}.
call(Request) ->
call(Request, 5000).
call(Request, Timeout) ->
gen_server:call(?name, Request, Timeout).
helper for handle_call for get_type , get_exported_type , get_opaque_type .
-spec handle_get_type(module(), Name :: atom(), Params :: [type()],
RequireExported :: boolean(), ExpandOpaque :: boolean(),
state()) -> {reply, {ok, type()} | atom(), state()}.
handle_get_type(M, T, Args, RequireExported, ExpandOpaque, State) ->
K = {M, T, length(Args)},
case State#state.types of
#{K := TypeInfo} ->
case TypeInfo of
#typeinfo{exported = false} when RequireExported ->
{reply, not_exported, State};
#typeinfo{opaque = true} when not ExpandOpaque ->
{reply, opaque, State};
#typeinfo{params = Vars,
body = Type0} ->
VarMap = maps:from_list(lists:zip(Vars, Args)),
Type1 = typelib:annotate_user_type(M, Type0),
Type2 = typelib:substitute_type_vars(Type1, VarMap),
{reply, {ok, Type2}, State}
end;
_NoMatch ->
{reply, not_found, State}
end.
-spec autoimport(module(), state()) -> state().
autoimport(_M, #state{opts = #{autoimport := false}} = State) ->
State;
autoimport(M, #state{opts = #{autoimport := true},
loaded = Loaded} = State) ->
case Loaded of
#{M := _} ->
State;
_ ->
case import_module(M, State) of
{ok, State1} -> State1;
not_found -> State
end
end.
-spec import_module(module(), state()) -> {ok, state()} | not_found.
import_module(Mod, State) ->
case State#state.beammap of
#{Mod := Filename} ->
case import_beam_files([Filename], State) of
{ok, State1} -> {ok, State1};
{_, _} -> import_module_from_erl(Mod, State)
end;
_ ->
import_module_from_erl(Mod, State)
end.
-spec import_module_from_erl(module(), state()) -> {ok, state()} | not_found.
import_module_from_erl(Mod, State) ->
case State#state.srcmap of
#{Mod := Filename} ->
State1 = import_erl_files([Filename], [], State),
{ok, State1};
_ ->
not_found
end.
-spec import_erl_files([file:filename()], [file:filename()], state()) -> state().
import_erl_files([File | Files], Includes, State) ->
EppOpts = [{includes, guess_include_dirs(File) ++ Includes}],
{ok, Forms} = epp:parse_file(File, EppOpts),
{attribute, _, module, Module} = lists:keyfind(module, 3, Forms),
check_epp_errors(File, Forms),
import_erl_files(Files, Includes, import_absform(Module, Forms, State));
import_erl_files([], _Includes, St) ->
St.
-spec import_beam_files([file:filename() | binary()], state()) -> {ok, state()} | gradualizer_file_utils:parsed_file_error().
import_beam_files([File | Files], State) ->
case gradualizer_file_utils:get_forms_from_beam(File) of
{ok, Forms} ->
{attribute, _, module, Module} = lists:keyfind(module, 3, Forms),
import_beam_files(Files, import_absform(Module, Forms, State));
Error = {Status, _} when (Status /= ok) ->
Error
end;
import_beam_files([], St) ->
{ok, St}.
-spec import_absform(module(), gradualizer_file_utils:abstract_forms(), state()) -> state().
import_absform(Module, Forms1, State) ->
Specs = collect_specs(Module, Forms1),
SpecMap1 = add_entries_to_map(Specs, State#state.specs),
Types = collect_types(Module, Forms1),
Records = collect_records(Module, Forms1),
TypeMap1 = add_entries_to_map(Types, State#state.types),
RecMap1 = add_entries_to_map(Records, State#state.records),
Loaded1 = (State#state.loaded)#{Module => true},
State#state{
specs = SpecMap1,
types = TypeMap1,
records = RecMap1,
loaded = Loaded1
}.
Include dirs for OTP apps are given in makefiles . We can never
-spec guess_include_dirs(file:filename()) -> list().
guess_include_dirs(File) ->
Dir = filename:dirname(File),
case filename:basename(Dir) of
"src" -> [filename:join(Dir, "../include")];
_ -> []
end ++ [code:lib_dir(App, include) || App <- [erts, kernel, stdlib]].
-spec check_epp_errors(file:filename(), Forms :: [tuple()]) -> ok.
check_epp_errors(File, Forms) ->
Errors = [E || {error, E} <- Forms],
MissingIncludes = [F || {_Line, epp, {include, file, F}} <- Errors],
if
MissingIncludes /= [] ->
error_logger:warning_msg("Failed to find the following include"
" files for ~p:~n~p",
[File, MissingIncludes]);
Errors /= [] ->
error_logger:warning_msg("Errors while loading ~p:~n~p",
[File, Errors]);
true ->
ok
end.
-spec add_entries_to_map([{Key, Value}], #{K => V}) -> #{K => V}
when Key :: K, Value :: V.
add_entries_to_map(Entries, Map) ->
lists:foldl(fun ({MFA, Types}, MapAcc) ->
maps:update_with(MFA,
fun (OldTypes) ->
Maybe TODO : Warn if an element
OldTypes
end, Types, MapAcc)
end,
Map,
Entries).
-spec collect_types(module(), Forms) -> [{mfa(), #typeinfo{}}] when
Forms :: gradualizer_file_utils:abstract_forms().
collect_types(Module, Forms) ->
: : [ { atom ( ) , arity ( ) } ]
ExportedTypes = lists:append([Tys || {attribute, _, export_type,
Tys} <- Forms]),
Types = [begin
Arity = length(Vars),
Arity >= 0 andalso Arity =< 255 orelse erlang:error({invalid_arity, Arity, Form}),
Id = {Module, Name, ?assert_type(Arity, arity())},
Exported = lists:member({Name, Arity}, ExportedTypes),
Params = [VarName || {var, _, VarName} <- Vars],
Info = #typeinfo{exported = Exported,
opaque = (Attr == opaque),
params = Params,
body = typelib:remove_pos(Body)},
{Id, Info}
end || Form = {attribute, _, Attr, {Name, Body, Vars}} <- Forms,
Attr == type orelse Attr == opaque,
is_atom(Name)],
Types.
collect_records(Module, Forms) ->
[{{Module, Name}, Fields} || {Name, Fields} <- extract_record_defs(Forms)].
Normalise record definitions into types ( i.e. typed record definitions ) .
Location in field names and types are set to zero to allow comparison using
-spec extract_record_defs(Forms :: [tuple()]) -> Typedefs :: [{atom(), [type()]}].
extract_record_defs([{attribute, L, record, {Name, _UntypedFields}},
{attribute, L, type, {{record, Name}, Fields, []}} |
Rest]) ->
This representation is only used in OTP < 19
extract_record_defs([{attribute, L, record, {Name, Fields}} | Rest]);
extract_record_defs([{attribute, _L, record, {Name, Fields}} | Rest]) ->
TypedFields = [gradualizer_lib:remove_pos_typed_record_field(
absform:normalize_record_field(Field))
|| Field <- Fields],
R = {Name, TypedFields},
[R | extract_record_defs(Rest)];
extract_record_defs([_ | Rest]) ->
extract_record_defs(Rest);
extract_record_defs([]) ->
[].
-spec collect_specs(module(), [tuple()]) -> [{mfa(), [type()]}].
collect_specs(Module, Forms) ->
Specs = [normalize_spec(Spec, Module) ||
{attribute, _, spec, Spec} <- Forms],
ExportAll = lists:any(fun ({attribute, _, compile, CompileOpts})
when is_list(CompileOpts) ->
lists:member(export_all, CompileOpts);
({attribute, _, compile, export_all}) ->
true;
(_) ->
false
end,
Forms),
Exports =
if ExportAll ->
[{Name, Arity} || {function, _, Name, Arity, _} <- Forms];
true ->
lists:concat([Exs || {attribute, _, export, Exs} <- Forms])
end,
SpecedFunsSet = sets:from_list([{F, A} || {{M, F, A}, _} <- Specs,
M == Module]),
ImplicitSpecs = [make_spec(Module, F, A) ||
{F, A} <- Exports,
not sets:is_element({F, A},
SpecedFunsSet)],
[{Key, typelib:remove_pos_all(absform:normalize_function_type_list(Types))}
|| {Key, Types} <- Specs ++ ImplicitSpecs].
normalize_spec({{Func, Arity}, Types}, Module) ->
{{Module, Func, Arity}, Types};
normalize_spec(Spec = {{_M, _F, _A}, _Types}, _Module) ->
Spec.
-spec make_spec(module(), atom(), arity()) -> {mfa(), [type()]}.
make_spec(Module, Name, Arity) ->
{{Module, Name, Arity}, [make_function_type(Arity)]}.
make_function_type(Arity) ->
A = erl_anno:new(0),
{type, A, 'fun',
[{type, A, product, lists:duplicate(Arity, {type, A, any, []})},
{type, A, any, []}]}.
-spec get_src_map() -> #{module() => file:filename()}.
get_src_map() ->
SrcDirs = [case lists:reverse(Path) of
"nibe/" ++ Tail -> lists:reverse("lre.*/crs/" ++ Tail);
RevPath -> lists:reverse("lre.*/" ++ RevPath)
end || Path <- code:get_path()],
SrcFiles = lists:flatmap(fun filelib:wildcard/1, SrcDirs),
RE = erl_file_regexp(),
Pairs = [begin
{match, [Mod]} = re:run(Filename, RE, [{capture, all_but_first, list}]),
?assert(is_list(Mod), regex_match_not_a_string),
Mod = ?assert_type(Mod, string()),
{list_to_atom(Mod), Filename}
end || Filename <- SrcFiles],
maps:from_list(Pairs).
-spec get_beam_map() -> #{module() => file:filename()}.
get_beam_map() ->
BeamDirs = code:get_path(),
BeamFiles = lists:flatmap(fun (Dir) -> filelib:wildcard(Dir ++ "/*.beam") end, BeamDirs),
RE = beam_file_regexp(),
BeamPairs0 = lists:map(
fun (Filename) ->
case re:run(Filename, RE, [{capture, all_but_first, list}]) of
{match, [Mod]} ->
{list_to_atom(Mod), Filename};
nomatch ->
{false, false};
_ ->
erlang:error({unreachable, "check re:run/3 opts above - this should not happen"})
end
end,
BeamFiles),
BeamPairs = lists:filter(fun ({false, false}) -> false; (_) -> true end, BeamPairs0),
maps:from_list(BeamPairs).
-spec beam_file_regexp() -> regexp().
beam_file_regexp() ->
{ok, RE} = re:compile(<<"^.+\/([^/]+)\.beam$">>),
RE.
-spec erl_file_regexp() -> regexp().
erl_file_regexp() ->
{ok, RE} = re:compile(<<"([^/.]*)\.erl$">>),
RE.
|
341f52edf1b1e91cb47239e2ca02977c4416f68258f867a292c8d9df93329416 | tsikov/clerk | clerk.lisp | (in-package #:cl-user)
(defpackage #:clerk.test
(:use #:cl #:prove))
(in-package #:clerk.test)
(plan 1)
(subtest "package clerk.test"
(subtest "function (make-job"
(is (type-of (clerk::make-job "Friendly job"
'every
'5.minutes
'(print "Hi!")))
'continuous-job
"Can make continuous job"
:test #'string=)
(is (type-of (clerk::make-job "Friendly job"
'in
'1.day
'(print "Hi!")))
'one-time-job
"Can make one-time job"
:test #'string=))
(clerk:empty-jobs-queue)
(subtest "macro (job ..."
(clerk:job "Cool job" every 5.days (print "Party!"))
(is (length clerk:*jobs*)
1
"Adds an job to the jobs queue.")
(clerk:empty-jobs-queue)
(clerk:job "First job to fire"
in 1.minute (print "Fire!"))
(clerk:job "Second job to fire"
in 2.minutes (print "Fire!"))
(with-slots (clerk::name) (first clerk:*jobs*)
(is clerk::name "First job to fire"
"Orders jobs by time of firing."
:test #'string=)))
(subtest "function (job-fn"
(clerk:empty-jobs-queue)
(clerk:job-fn "Test job-fn" 'every '1.minute #'(lambda () (print "Fire!")))
(with-slots (clerk::name) (first clerk:*jobs*)
(is clerk::name "Test job-fn"
"Adds the job to the job queue."
:test #'string=))
(clerk:job-fn "Test job-fn (interval as a list)"
'in
(list 5 'seconds)
#'(lambda () (print "Fire!")))
(with-slots (clerk::name) (first clerk:*jobs*)
(is clerk::name "Test job-fn (interval as a list)"
"Adds the job to the job queue. Can decipher interval as a list"
:test #'string=)))
(clerk:empty-jobs-queue)
(subtest "function (fire-job-p"
(ok (not (clerk::fire-job-p
(make-instance 'clerk:job
:interval '1.minute)))
"Job is not fired before it's time")
(ok (clerk::fire-job-p
(make-instance 'clerk:job
:interval '-1.second))
"Job is fired when the time comes"))
(clerk:empty-jobs-queue)
(subtest "defmethod (fire-job"
(let ((job-thread (clerk::fire-job
(clerk:job "One-time job" in 1.second (+ 1 2)))))
(is (bt:join-thread job-thread)
3
"The job's calculation is performed successfully"))
(is (length clerk:*jobs*) 1
"One-time jobs don't create a new job in the job queue
when they are fired.")
(clerk:empty-jobs-queue)
(clerk::fire-job
(clerk:job "Continuous job" every 1.second (+ 1 2)))
(is (length clerk:*jobs*) 2
"Continuous jobs create a new job in the job queue when
when they are fired")))
(finalize)
| null | https://raw.githubusercontent.com/tsikov/clerk/96eef1ec4ea6ae7144a4e13be8d5fdbc00ced29d/t/clerk.lisp | lisp | (in-package #:cl-user)
(defpackage #:clerk.test
(:use #:cl #:prove))
(in-package #:clerk.test)
(plan 1)
(subtest "package clerk.test"
(subtest "function (make-job"
(is (type-of (clerk::make-job "Friendly job"
'every
'5.minutes
'(print "Hi!")))
'continuous-job
"Can make continuous job"
:test #'string=)
(is (type-of (clerk::make-job "Friendly job"
'in
'1.day
'(print "Hi!")))
'one-time-job
"Can make one-time job"
:test #'string=))
(clerk:empty-jobs-queue)
(subtest "macro (job ..."
(clerk:job "Cool job" every 5.days (print "Party!"))
(is (length clerk:*jobs*)
1
"Adds an job to the jobs queue.")
(clerk:empty-jobs-queue)
(clerk:job "First job to fire"
in 1.minute (print "Fire!"))
(clerk:job "Second job to fire"
in 2.minutes (print "Fire!"))
(with-slots (clerk::name) (first clerk:*jobs*)
(is clerk::name "First job to fire"
"Orders jobs by time of firing."
:test #'string=)))
(subtest "function (job-fn"
(clerk:empty-jobs-queue)
(clerk:job-fn "Test job-fn" 'every '1.minute #'(lambda () (print "Fire!")))
(with-slots (clerk::name) (first clerk:*jobs*)
(is clerk::name "Test job-fn"
"Adds the job to the job queue."
:test #'string=))
(clerk:job-fn "Test job-fn (interval as a list)"
'in
(list 5 'seconds)
#'(lambda () (print "Fire!")))
(with-slots (clerk::name) (first clerk:*jobs*)
(is clerk::name "Test job-fn (interval as a list)"
"Adds the job to the job queue. Can decipher interval as a list"
:test #'string=)))
(clerk:empty-jobs-queue)
(subtest "function (fire-job-p"
(ok (not (clerk::fire-job-p
(make-instance 'clerk:job
:interval '1.minute)))
"Job is not fired before it's time")
(ok (clerk::fire-job-p
(make-instance 'clerk:job
:interval '-1.second))
"Job is fired when the time comes"))
(clerk:empty-jobs-queue)
(subtest "defmethod (fire-job"
(let ((job-thread (clerk::fire-job
(clerk:job "One-time job" in 1.second (+ 1 2)))))
(is (bt:join-thread job-thread)
3
"The job's calculation is performed successfully"))
(is (length clerk:*jobs*) 1
"One-time jobs don't create a new job in the job queue
when they are fired.")
(clerk:empty-jobs-queue)
(clerk::fire-job
(clerk:job "Continuous job" every 1.second (+ 1 2)))
(is (length clerk:*jobs*) 2
"Continuous jobs create a new job in the job queue when
when they are fired")))
(finalize)
|
|
2bbf6d2ccb2079f94f821b9d9c542bbbde6fa3ca2c097a713ac455560f4d200e | mpickering/apply-refact | Bracket22.hs | foo (True) = 1 | null | https://raw.githubusercontent.com/mpickering/apply-refact/a4343ea0f4f9d8c2e16d6b16b9068f321ba4f272/tests/examples/Bracket22.hs | haskell | foo (True) = 1 |
|
d739e61749135ba71c6f6aaf3adec97c8f2e46acdf83ba0962442d1cc5607297 | freckle/stack-lint-extra-deps | Stackage.hs | # LANGUAGE TupleSections #
module Stackage
( StackageVersions(..)
, getStackageVersions
) where
import RIO
import Data.List (find)
import Network.HTTP.Simple
import Network.HTTP.Types.Status (status200)
import PackageName
import qualified RIO.Map as Map
import RIO.Text (unpack)
import qualified RIO.Text as T
import StackageResolver
import Text.HTML.DOM (parseLBS)
import Text.XML.Cursor
import Version
data StackageVersions = StackageVersions
{ svOnPage :: Version
, svOnHackage :: Version
}
getStackageVersions
:: (MonadUnliftIO m, MonadReader env m, HasLogFunc env)
=> StackageResolver
-> PackageName
-> m (Maybe StackageVersions)
getStackageVersions resolver package = do
req <-
liftIO
$ parseRequest
$ unpack
$ "/"
<> unStackageResolver resolver
<> "/package/"
<> unPackageName package
resp <- httpLBS req
let
mBody = do
guard $ getResponseStatus resp == status200
pure $ getResponseBody resp
mVersions = parseVersionsTable . fromDocument . parseLBS <$> mBody
logDebug
$ "Stackage details for "
<> display package
<> ": "
<> "\n Status: "
<> displayShow (getResponseStatus resp)
<> "\n Versions: "
<> maybe "none" displayVersions mVersions
pure $ do
versions <- mVersions
StackageVersions
<$> Map.lookup currentKey versions
<*> Map.lookup latestKey versions
parseVersionsTable :: Cursor -> Map Text Version
parseVersionsTable cursor = do
fixNightly
$ Map.fromList
$ mapMaybe (toPair . ($// content))
$ cursor
$// element "tr"
where
toPair = \case
[] -> Nothing
[_] -> Nothing
[k, v] -> (k, ) <$> parseVersion (unpack v)
[k, _, v] -> (k, ) <$> parseVersion (unpack v)
(k : v : _) -> (k, ) <$> parseVersion (unpack v)
fixNightly m =
maybe m (\(_, v) -> Map.insertWith (\_new old -> old) currentKey v m)
$ find ((nightlyPrefix `T.isPrefixOf`) . fst)
$ Map.toList m
currentKey :: Text
currentKey = "Version on this page:"
latestKey :: Text
latestKey = "Latest on Hackage:"
nightlyPrefix :: Text
nightlyPrefix = "Stackage Nightly "
displayVersions :: Map Text Version -> Utf8Builder
displayVersions = mconcat . map displayPair . Map.toList
where
displayPair (k, v) =
"\n " <> display k <> " => " <> fromString (showVersion v)
| null | https://raw.githubusercontent.com/freckle/stack-lint-extra-deps/675968214e75ec57f5e5d216ca371c1adfd72082/src/Stackage.hs | haskell | # LANGUAGE TupleSections #
module Stackage
( StackageVersions(..)
, getStackageVersions
) where
import RIO
import Data.List (find)
import Network.HTTP.Simple
import Network.HTTP.Types.Status (status200)
import PackageName
import qualified RIO.Map as Map
import RIO.Text (unpack)
import qualified RIO.Text as T
import StackageResolver
import Text.HTML.DOM (parseLBS)
import Text.XML.Cursor
import Version
data StackageVersions = StackageVersions
{ svOnPage :: Version
, svOnHackage :: Version
}
getStackageVersions
:: (MonadUnliftIO m, MonadReader env m, HasLogFunc env)
=> StackageResolver
-> PackageName
-> m (Maybe StackageVersions)
getStackageVersions resolver package = do
req <-
liftIO
$ parseRequest
$ unpack
$ "/"
<> unStackageResolver resolver
<> "/package/"
<> unPackageName package
resp <- httpLBS req
let
mBody = do
guard $ getResponseStatus resp == status200
pure $ getResponseBody resp
mVersions = parseVersionsTable . fromDocument . parseLBS <$> mBody
logDebug
$ "Stackage details for "
<> display package
<> ": "
<> "\n Status: "
<> displayShow (getResponseStatus resp)
<> "\n Versions: "
<> maybe "none" displayVersions mVersions
pure $ do
versions <- mVersions
StackageVersions
<$> Map.lookup currentKey versions
<*> Map.lookup latestKey versions
parseVersionsTable :: Cursor -> Map Text Version
parseVersionsTable cursor = do
fixNightly
$ Map.fromList
$ mapMaybe (toPair . ($// content))
$ cursor
$// element "tr"
where
toPair = \case
[] -> Nothing
[_] -> Nothing
[k, v] -> (k, ) <$> parseVersion (unpack v)
[k, _, v] -> (k, ) <$> parseVersion (unpack v)
(k : v : _) -> (k, ) <$> parseVersion (unpack v)
fixNightly m =
maybe m (\(_, v) -> Map.insertWith (\_new old -> old) currentKey v m)
$ find ((nightlyPrefix `T.isPrefixOf`) . fst)
$ Map.toList m
currentKey :: Text
currentKey = "Version on this page:"
latestKey :: Text
latestKey = "Latest on Hackage:"
nightlyPrefix :: Text
nightlyPrefix = "Stackage Nightly "
displayVersions :: Map Text Version -> Utf8Builder
displayVersions = mconcat . map displayPair . Map.toList
where
displayPair (k, v) =
"\n " <> display k <> " => " <> fromString (showVersion v)
|
|
95257f4a373228ce2a36b180edbc78c1e6ed2a670dae88a3c338d68b2fd50606 | maximedenes/native-coq | summary.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(** This module registers the declaration of global tables, which will be kept
in synchronization during the various backtracks of the system. *)
type 'a summary_declaration = {
freeze_function : unit -> 'a;
unfreeze_function : 'a -> unit;
init_function : unit -> unit }
val declare_summary : string -> 'a summary_declaration -> unit
type frozen
val freeze_summaries : unit -> frozen
val unfreeze_summaries : frozen -> unit
val init_summaries : unit -> unit
* Beware : if some code is dynamically loaded via dynlink after the
initialization of Coq , the init functions of any summary declared
by this code may not be run . It is hence the responsability of
plugins to initialize themselves properly .
initialization of Coq, the init functions of any summary declared
by this code may not be run. It is hence the responsability of
plugins to initialize themselves properly.
*)
| null | https://raw.githubusercontent.com/maximedenes/native-coq/3623a4d9fe95c165f02f7119c0e6564a83a9f4c9/library/summary.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* This module registers the declaration of global tables, which will be kept
in synchronization during the various backtracks of the system. | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
type 'a summary_declaration = {
freeze_function : unit -> 'a;
unfreeze_function : 'a -> unit;
init_function : unit -> unit }
val declare_summary : string -> 'a summary_declaration -> unit
type frozen
val freeze_summaries : unit -> frozen
val unfreeze_summaries : frozen -> unit
val init_summaries : unit -> unit
* Beware : if some code is dynamically loaded via dynlink after the
initialization of Coq , the init functions of any summary declared
by this code may not be run . It is hence the responsability of
plugins to initialize themselves properly .
initialization of Coq, the init functions of any summary declared
by this code may not be run. It is hence the responsability of
plugins to initialize themselves properly.
*)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.