_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
0af4d7902c0cf675846852a7bc19d873c197a71df449e504249785cc9cbb65c4 | jeffshrager/biobike | boxes-to-json.lisp | ;;;; -*- mode: Lisp; Syntax: Common-Lisp; Package: nvpl; -*-
(in-package :nvpl)
;;; +=========================================================================+
| Copyright ( c ) 2010 |
;;; | |
;;; | Permission is hereby granted, free of charge, to any person obtaining |
;;; | a copy of this software and associated documentation files (the |
| " Software " ) , to deal in the Software without restriction , including |
;;; | without limitation the rights to use, copy, modify, merge, publish, |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
;;; | the following conditions: |
;;; | |
;;; | The above copyright notice and this permission notice shall be included |
| in all copies or substantial portions of the Software . |
;;; | |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
;;; | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
;;; | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
;;; | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
;;; | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
;;; | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
;;; +=========================================================================+
Author :
;;; Temporary (yeah, right) kludge to convert from the old sexp/xexp
representation of boxes to json - exps . Ultimately the right thing
to do is to change the snippet->boxes code to generate
straight away . But since the snippet to code already exists ,
;;; if we can translate from that to json in a general way, we
;;; probably have to touch less code.
(defparameter *jbml-element-types*
'(:jbml-cr
:jbml-dotdotdot
:jbml-exec-icon
:jbml-go-icon
:jbml-close-icon
:jbml-menu-icon
:jbml-menu-entry
:jbml-icon
:jbml-delete
:jbml-clear
:jbml-clear-delete
:jbml-input-text
:jbml-hole
:jbml-hole-opened
:jbml-multiline-hole
:jbml-multiline-hole-opened
:jbml-options-menu
:jbml-options-menu2
:jbml-main-box-menu))
(defparameter *jbml-menu-types*
'(:jbml-options-menu
:jbml-options-menu2
:jbml-kf-multiselect-menu
:jbml-main-box-menu))
(defparameter *jbml-simple-modifiers*
'(:jbml-b
:jbml-i
:jbml-ul
:jbml-courier
:jbml-left-justify
:jbml-center-justify
:jbml-right-justify
:jbml-thick
:jbml-medium
:jbml-thin
:jbml-no-outline
:jbml-dotted
:jbml-dotted-blink
:jbml-button
:jbml-outdent
:jbml-dnd-drag
:jbml-dnd-no-drag
:jbml-dnd-drop
:jbml-dnd-no-drop))
(defparameter *jbml-modifiers-with-arg*
'(:jbml-box-color
:jbml-color
:jbml-background-color
:jbml-name))
(defun jbml-element-type-p (x)
(member x *jbml-element-types*))
(defun jbml-modifier-p (x)
(or (jbml-simple-modifier-p x) (jbml-modifier-with-arg-p x)))
(defun jbml-simple-modifier-p (x)
(member x *jbml-simple-modifiers*))
(defun jbml-modifier-with-arg-p (x)
(member x *jbml-modifiers-with-arg*))
(defun jbml-menu-type-p (x)
(member x *jbml-menu-types*))
(defun boxes->json (boxes)
(labels
((walk (list modifiers children)
;; (print (list 'list list 'mods modifiers 'ch children))
(cond
((null list)
`("modifiers" ,(nreverse modifiers)
"children" ,(coerce (nreverse children) 'vector)))
(t
(destructuring-bind (first . rest) list
(cond
((jbml-simple-modifier-p first)
(walk rest (list* t (string-downcase first) modifiers) children))
((jbml-modifier-with-arg-p first)
(walk
(cdr rest)
(list*
(first rest) (string-downcase first) modifiers) children))
((jbml-element-type-p first)
(walk
rest
modifiers
(cons `("type" ,(string-downcase first)) children)))
((consp first)
(walk rest modifiers (cons (boxes->json first) children)))
((or (stringp first) (symbolp first))
(multiple-value-bind (text-element rest)
(extract-text-element (string first) rest)
(walk rest modifiers (cons text-element children))))
(t
(error
"Don't know what to do with ~s which is of type ~A"
first (type-of first))))))))
(extract-text-modifiers (list modifiers)
(cond
((or (null list) (not (jbml-modifier-p (first list))))
(values (nreverse modifiers) list))
(t
(destructuring-bind (first . rest) list
(cond
((jbml-simple-modifier-p first)
(extract-text-modifiers
rest (list* t (string-downcase first) modifiers)))
((jbml-modifier-with-arg-p first)
(extract-text-modifiers
(cdr rest)
(list* (first rest) (string-downcase first) modifiers)))
(t (error "Can't get here.")))))))
(extract-text-element (first rest)
(multiple-value-bind (modifiers rest) (extract-text-modifiers rest ())
(values
`("type" "text" "value" ,first "modifiers" ,modifiers) rest)
)))
(destructuring-bind (id type . rest) boxes
(cond
((jbml-menu-type-p type)
(menu->json type boxes))
((not (jbml-element-type-p type))
FIXME 2010 - 02 - 23 < > -- this is a
;; kludge to deal with boxe sexps like this:
;;
( 25553
;; :JBML-BACKGROUND-COLOR "#ffc000"
;; :JBML-OUTDENT
;; :JBML-NO-OUTLINE
( 5407 : JBML - OPTIONS - MENU2 " More ... "
( ( 381 : JBML - MENU - ENTRY " Help " )
( 56 : JBML - MENU - ENTRY " Add another " )
( 61 : JBML - MENU - ENTRY " Add two more " ) )
( " ICON " " whitearrowgreen_16x16.gif " ) ) )
;;
`("id" ,id "type" "anonymous" ,@(walk (cons type rest) nil nil)))
(t
`("id" ,id "type" ,(string-downcase type) ,@(walk rest nil nil)))))))
(defgeneric menu->json (type boxes)
(:documentation "Convert the box representation of a menu into JSON."))
(progn
(defmethod menu->json ((type (eql :jbml-options-menu)) boxes)
(normal-menu->json boxes))
(defmethod menu->json ((type (eql :jbml-main-box-menu)) boxes)
(normal-menu->json boxes))
(defmethod menu->json ((type (eql :jbml-kf-multiselect-menu)) boxes)
(destructuring-bind (id type title single-action-items multiselect-items)
boxes
`(
"id" ,id
"type" ,(string-downcase type)
"title" ,(string title)
"entries" ,(jsonify-box-menu-entries single-action-items)
"multientries" ,(jsonify-box-menu-entries multiselect-items)
)))
(defmethod normal-menu->json (boxes)
(destructuring-bind (id type title . entries) boxes
`(
"id" ,id
"type" ,(string-downcase type)
"title" ,(string title)
"entries" ,(jsonify-box-menu-entries entries))))
(defmethod menu->json ((type (eql :jbml-options-menu2)) boxes)
(destructuring-bind (id type title entries (icon-label icon-src)) boxes
(assert (string= icon-label "ICON"))
`(
"id" ,id
"type" ,(string-downcase type)
"title" ,(string title)
"entries" ,(jsonify-box-menu-entries entries)
"iconSrc" ,icon-src)))
(defun jsonify-box-menu-entries (entries)
(map 'vector
#'(lambda (e)
(destructuring-bind (id type title) e
`("id", id "type",(string-downcase type) "title" ,(string title))))
entries))
;;; Written for top-level menus
(defun package-menu-into-json (data &optional (color "black"))
(destructuring-bind (id title submenus entries . rest) data
REVIEW 2010 - 02 - 22 < >
-- as far as I can tell the old XML code ignored it too
(declare (ignore rest))
`(
"id" ,id
"title" ,(string title)
"color" ,color
"submenus" ,(jsonify-submenus submenus)
"entries" ,(jsonify-menu-entries entries))))
(defun jsonify-submenus (submenus)
(coerce
(loop for m in submenus collect
(destructuring-bind (title submenus entries . rest) m
REVIEW 2010 - 02 - 22 < >
-- as far as I can tell the old XML code ignored it too
(declare (ignore rest))
`(
"title" ,(string title)
"submenus" ,(jsonify-submenus submenus)
"entries" ,(jsonify-menu-entries entries))))
'vector))
(defun jsonify-menu-entries (entries)
(coerce
(loop for e in entries collect
(destructuring-bind (title id) e
`("title" ,(string title) "id" ,id)))
'vector))
)
;;; End written for top-level menus
;;;; For debugging messages going out.
(defparameter *save-boxes-counter* 0)
(defun save-boxes (boxes &optional (type :boxes))
(with-open-file (out (format nil "/tmp/sexps/~(~a~)-~6,'0d.sexp" type (incf *save-boxes-counter*)) :direction :output :if-exists :supersede)
(with-standard-io-syntax
(print boxes out))))
| null | https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/vplcode/boxes-to-json.lisp | lisp | -*- mode: Lisp; Syntax: Common-Lisp; Package: nvpl; -*-
+=========================================================================+
| |
| Permission is hereby granted, free of charge, to any person obtaining |
| a copy of this software and associated documentation files (the |
| without limitation the rights to use, copy, modify, merge, publish, |
| the following conditions: |
| |
| The above copyright notice and this permission notice shall be included |
| |
| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
| IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
| SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
+=========================================================================+
Temporary (yeah, right) kludge to convert from the old sexp/xexp
if we can translate from that to json in a general way, we
probably have to touch less code.
(print (list 'list list 'mods modifiers 'ch children))
kludge to deal with boxe sexps like this:
:JBML-BACKGROUND-COLOR "#ffc000"
:JBML-OUTDENT
:JBML-NO-OUTLINE
Written for top-level menus
End written for top-level menus
For debugging messages going out. |
(in-package :nvpl)
| Copyright ( c ) 2010 |
| " Software " ) , to deal in the Software without restriction , including |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
| in all copies or substantial portions of the Software . |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
Author :
representation of boxes to json - exps . Ultimately the right thing
to do is to change the snippet->boxes code to generate
straight away . But since the snippet to code already exists ,
(defparameter *jbml-element-types*
'(:jbml-cr
:jbml-dotdotdot
:jbml-exec-icon
:jbml-go-icon
:jbml-close-icon
:jbml-menu-icon
:jbml-menu-entry
:jbml-icon
:jbml-delete
:jbml-clear
:jbml-clear-delete
:jbml-input-text
:jbml-hole
:jbml-hole-opened
:jbml-multiline-hole
:jbml-multiline-hole-opened
:jbml-options-menu
:jbml-options-menu2
:jbml-main-box-menu))
(defparameter *jbml-menu-types*
'(:jbml-options-menu
:jbml-options-menu2
:jbml-kf-multiselect-menu
:jbml-main-box-menu))
(defparameter *jbml-simple-modifiers*
'(:jbml-b
:jbml-i
:jbml-ul
:jbml-courier
:jbml-left-justify
:jbml-center-justify
:jbml-right-justify
:jbml-thick
:jbml-medium
:jbml-thin
:jbml-no-outline
:jbml-dotted
:jbml-dotted-blink
:jbml-button
:jbml-outdent
:jbml-dnd-drag
:jbml-dnd-no-drag
:jbml-dnd-drop
:jbml-dnd-no-drop))
(defparameter *jbml-modifiers-with-arg*
'(:jbml-box-color
:jbml-color
:jbml-background-color
:jbml-name))
(defun jbml-element-type-p (x)
(member x *jbml-element-types*))
(defun jbml-modifier-p (x)
(or (jbml-simple-modifier-p x) (jbml-modifier-with-arg-p x)))
(defun jbml-simple-modifier-p (x)
(member x *jbml-simple-modifiers*))
(defun jbml-modifier-with-arg-p (x)
(member x *jbml-modifiers-with-arg*))
(defun jbml-menu-type-p (x)
(member x *jbml-menu-types*))
(defun boxes->json (boxes)
(labels
((walk (list modifiers children)
(cond
((null list)
`("modifiers" ,(nreverse modifiers)
"children" ,(coerce (nreverse children) 'vector)))
(t
(destructuring-bind (first . rest) list
(cond
((jbml-simple-modifier-p first)
(walk rest (list* t (string-downcase first) modifiers) children))
((jbml-modifier-with-arg-p first)
(walk
(cdr rest)
(list*
(first rest) (string-downcase first) modifiers) children))
((jbml-element-type-p first)
(walk
rest
modifiers
(cons `("type" ,(string-downcase first)) children)))
((consp first)
(walk rest modifiers (cons (boxes->json first) children)))
((or (stringp first) (symbolp first))
(multiple-value-bind (text-element rest)
(extract-text-element (string first) rest)
(walk rest modifiers (cons text-element children))))
(t
(error
"Don't know what to do with ~s which is of type ~A"
first (type-of first))))))))
(extract-text-modifiers (list modifiers)
(cond
((or (null list) (not (jbml-modifier-p (first list))))
(values (nreverse modifiers) list))
(t
(destructuring-bind (first . rest) list
(cond
((jbml-simple-modifier-p first)
(extract-text-modifiers
rest (list* t (string-downcase first) modifiers)))
((jbml-modifier-with-arg-p first)
(extract-text-modifiers
(cdr rest)
(list* (first rest) (string-downcase first) modifiers)))
(t (error "Can't get here.")))))))
(extract-text-element (first rest)
(multiple-value-bind (modifiers rest) (extract-text-modifiers rest ())
(values
`("type" "text" "value" ,first "modifiers" ,modifiers) rest)
)))
(destructuring-bind (id type . rest) boxes
(cond
((jbml-menu-type-p type)
(menu->json type boxes))
((not (jbml-element-type-p type))
FIXME 2010 - 02 - 23 < > -- this is a
( 25553
( 5407 : JBML - OPTIONS - MENU2 " More ... "
( ( 381 : JBML - MENU - ENTRY " Help " )
( 56 : JBML - MENU - ENTRY " Add another " )
( 61 : JBML - MENU - ENTRY " Add two more " ) )
( " ICON " " whitearrowgreen_16x16.gif " ) ) )
`("id" ,id "type" "anonymous" ,@(walk (cons type rest) nil nil)))
(t
`("id" ,id "type" ,(string-downcase type) ,@(walk rest nil nil)))))))
(defgeneric menu->json (type boxes)
(:documentation "Convert the box representation of a menu into JSON."))
(progn
(defmethod menu->json ((type (eql :jbml-options-menu)) boxes)
(normal-menu->json boxes))
(defmethod menu->json ((type (eql :jbml-main-box-menu)) boxes)
(normal-menu->json boxes))
(defmethod menu->json ((type (eql :jbml-kf-multiselect-menu)) boxes)
(destructuring-bind (id type title single-action-items multiselect-items)
boxes
`(
"id" ,id
"type" ,(string-downcase type)
"title" ,(string title)
"entries" ,(jsonify-box-menu-entries single-action-items)
"multientries" ,(jsonify-box-menu-entries multiselect-items)
)))
(defmethod normal-menu->json (boxes)
(destructuring-bind (id type title . entries) boxes
`(
"id" ,id
"type" ,(string-downcase type)
"title" ,(string title)
"entries" ,(jsonify-box-menu-entries entries))))
(defmethod menu->json ((type (eql :jbml-options-menu2)) boxes)
(destructuring-bind (id type title entries (icon-label icon-src)) boxes
(assert (string= icon-label "ICON"))
`(
"id" ,id
"type" ,(string-downcase type)
"title" ,(string title)
"entries" ,(jsonify-box-menu-entries entries)
"iconSrc" ,icon-src)))
(defun jsonify-box-menu-entries (entries)
(map 'vector
#'(lambda (e)
(destructuring-bind (id type title) e
`("id", id "type",(string-downcase type) "title" ,(string title))))
entries))
(defun package-menu-into-json (data &optional (color "black"))
(destructuring-bind (id title submenus entries . rest) data
REVIEW 2010 - 02 - 22 < >
-- as far as I can tell the old XML code ignored it too
(declare (ignore rest))
`(
"id" ,id
"title" ,(string title)
"color" ,color
"submenus" ,(jsonify-submenus submenus)
"entries" ,(jsonify-menu-entries entries))))
(defun jsonify-submenus (submenus)
(coerce
(loop for m in submenus collect
(destructuring-bind (title submenus entries . rest) m
REVIEW 2010 - 02 - 22 < >
-- as far as I can tell the old XML code ignored it too
(declare (ignore rest))
`(
"title" ,(string title)
"submenus" ,(jsonify-submenus submenus)
"entries" ,(jsonify-menu-entries entries))))
'vector))
(defun jsonify-menu-entries (entries)
(coerce
(loop for e in entries collect
(destructuring-bind (title id) e
`("title" ,(string title) "id" ,id)))
'vector))
)
(defparameter *save-boxes-counter* 0)
(defun save-boxes (boxes &optional (type :boxes))
(with-open-file (out (format nil "/tmp/sexps/~(~a~)-~6,'0d.sexp" type (incf *save-boxes-counter*)) :direction :output :if-exists :supersede)
(with-standard-io-syntax
(print boxes out))))
|
38efad7dcc4e93215a5f3127453adfddf6dde6fed1cd7a7106c2489bc2c5da45 | avodonosov/abcl-idea | swt9jfli-gen.lisp | (defpackage :swt-gen
(:use :common-lisp :jfli))
(in-package :swt-gen)
(def-java-class "java.lang.System")
(def-java-class "java.net.URL")
(def-java-class "java.io.File")
(def-java-class "org.eclipse.jface.action.Action")
(def-java-class "org.eclipse.jface.action.MenuManager")
(def-java-class "org.eclipse.jface.resource.ImageRegistry")
(def-java-class "org.eclipse.jface.resource.ImageDescriptor")
(def-java-class "org.eclipse.jface.window.ApplicationWindow")
(def-java-class "org.eclipse.jface.window.Window")
(def-java-class "org.eclipse.swt.widgets.Display")
(def-java-class "org.eclipse.swt.widgets.Shell")
(def-java-class "org.eclipse.swt.widgets.Button")
(def-java-class "org.eclipse.swt.widgets.Table")
(def-java-class "org.eclipse.swt.widgets.TableColumn")
(def-java-class "org.eclipse.swt.SWT")
(def-java-class "org.eclipse.swt.custom.SashForm")
(def-java-class "org.eclipse.jface.viewers.TreeViewer")
(def-java-class "org.eclipse.jface.viewers.TableViewer")
(def-java-class "org.eclipse.jface.viewers.ViewerFilter")
(def-java-class "org.eclipse.jface.viewers.ViewerSorter")
(def-java-class "org.eclipse.jface.viewers.ITreeContentProvider")
(def-java-class "org.eclipse.jface.viewers.IStructuredContentProvider")
(def-java-class "org.eclipse.jface.viewers.ISelectionChangedListener")
(def-java-class "org.eclipse.jface.viewers.IStructuredSelection")
(def-java-class "org.eclipse.jface.viewers.ITableLabelProvider")
(def-java-class "org.eclipse.jface.viewers.ILabelProvider")
(def-java-class "org.eclipse.jface.viewers.SelectionChangedEvent")
(def-java-class "org.eclipse.swt.graphics.Image")
( def - java - class " org.armedbear.lisp . LispObject " ) | null | https://raw.githubusercontent.com/avodonosov/abcl-idea/a55ab73869a0865be15980216cb51f21553d6685/src/abclidea/lisp/jfli-abcl/examples/swt/swt9jfli-gen.lisp | lisp | (defpackage :swt-gen
(:use :common-lisp :jfli))
(in-package :swt-gen)
(def-java-class "java.lang.System")
(def-java-class "java.net.URL")
(def-java-class "java.io.File")
(def-java-class "org.eclipse.jface.action.Action")
(def-java-class "org.eclipse.jface.action.MenuManager")
(def-java-class "org.eclipse.jface.resource.ImageRegistry")
(def-java-class "org.eclipse.jface.resource.ImageDescriptor")
(def-java-class "org.eclipse.jface.window.ApplicationWindow")
(def-java-class "org.eclipse.jface.window.Window")
(def-java-class "org.eclipse.swt.widgets.Display")
(def-java-class "org.eclipse.swt.widgets.Shell")
(def-java-class "org.eclipse.swt.widgets.Button")
(def-java-class "org.eclipse.swt.widgets.Table")
(def-java-class "org.eclipse.swt.widgets.TableColumn")
(def-java-class "org.eclipse.swt.SWT")
(def-java-class "org.eclipse.swt.custom.SashForm")
(def-java-class "org.eclipse.jface.viewers.TreeViewer")
(def-java-class "org.eclipse.jface.viewers.TableViewer")
(def-java-class "org.eclipse.jface.viewers.ViewerFilter")
(def-java-class "org.eclipse.jface.viewers.ViewerSorter")
(def-java-class "org.eclipse.jface.viewers.ITreeContentProvider")
(def-java-class "org.eclipse.jface.viewers.IStructuredContentProvider")
(def-java-class "org.eclipse.jface.viewers.ISelectionChangedListener")
(def-java-class "org.eclipse.jface.viewers.IStructuredSelection")
(def-java-class "org.eclipse.jface.viewers.ITableLabelProvider")
(def-java-class "org.eclipse.jface.viewers.ILabelProvider")
(def-java-class "org.eclipse.jface.viewers.SelectionChangedEvent")
(def-java-class "org.eclipse.swt.graphics.Image")
( def - java - class " org.armedbear.lisp . LispObject " ) |
|
c765c328cc35b133b723e386ccb3694966de87da015006c29db27ec340189955 | 8c6794b6/guile-tjit | describe.scm | ;;; Describe objects
Copyright ( C ) 2001 , 2009 , 2011 Free Software Foundation , Inc.
;;; This library is free software; you can redistribute it and/or
;;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
;;;
;;; This library is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;; Lesser General Public License for more details.
;;;
You should have received a copy of the GNU Lesser General Public
;;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
;;; Code:
(define-module (system repl describe)
#:use-module (oop goops)
#:use-module (ice-9 regex)
#:use-module (ice-9 format)
#:use-module (ice-9 and-let-star)
#:export (describe))
(define-method (describe (symbol <symbol>))
(format #t "`~s' is " symbol)
(if (not (defined? symbol))
(display "not defined in the current module.\n")
(describe-object (module-ref (current-module) symbol))))
;;;
;;; Display functions
;;;
(define (safe-class-name class)
(if (slot-bound? class 'name)
(class-name class)
class))
(define-method (display-class class . args)
(let* ((name (safe-class-name class))
(desc (if (pair? args) (car args) name)))
(if (eq? *describe-format* 'tag)
(format #t "@class{~a}{~a}" name desc)
(format #t "~a" desc))))
(define (display-list title list)
(if title (begin (display title) (display ":\n\n")))
(if (null? list)
(display "(not defined)\n")
(for-each display-summary list)))
(define (display-slot-list title instance list)
(if title (begin (display title) (display ":\n\n")))
(if (null? list)
(display "(not defined)\n")
(for-each (lambda (slot)
(let ((name (slot-definition-name slot)))
(display "Slot: ")
(display name)
(if (and instance (slot-bound? instance name))
(begin
(display " = ")
(display (slot-ref instance name))))
(newline)))
list)))
(define (display-file location)
(display "Defined in ")
(if (eq? *describe-format* 'tag)
(format #t "@location{~a}.\n" location)
(format #t "`~a'.\n" location)))
(define (format-documentation doc)
(with-current-buffer (make-buffer #:text doc)
(lambda ()
(let ((regexp (make-regexp "@([a-z]*)(\\{([^}]*)\\})?")))
(do-while (match (re-search-forward regexp))
(let ((key (string->symbol (match:substring match 1)))
(value (match:substring match 3)))
(case key
((deffnx)
(delete-region! (match:start match)
(begin (forward-line) (point))))
((var)
(replace-match! match 0 (string-upcase value)))
((code)
(replace-match! match 0 (string-append "`" value "'")))))))
(display (string (current-buffer)))
(newline))))
;;;
;;; Top
;;;
(define description-table
(list
(cons <boolean> "a boolean")
(cons <null> "an empty list")
(cons <integer> "an integer")
(cons <real> "a real number")
(cons <complex> "a complex number")
(cons <char> "a character")
(cons <symbol> "a symbol")
(cons <keyword> "a keyword")
(cons <promise> "a promise")
(cons <hook> "a hook")
(cons <fluid> "a fluid")
(cons <stack> "a stack")
(cons <variable> "a variable")
(cons <regexp> "a regexp object")
(cons <module> "a module object")
(cons <unknown> "an unknown object")))
(define-generic describe-object)
(export describe-object)
(define-method (describe-object (obj <top>))
(display-type obj)
(display-location obj)
(newline)
(display-value obj)
(newline)
(display-documentation obj))
(define-generic display-object)
(define-generic display-summary)
(define-generic display-type)
(define-generic display-value)
(define-generic display-location)
(define-generic display-description)
(define-generic display-documentation)
(export display-object display-summary display-type display-value
display-location display-description display-documentation)
(define-method (display-object (obj <top>))
(write obj))
(define-method (display-summary (obj <top>))
(display "Value: ")
(display-object obj)
(newline))
(define-method (display-type (obj <top>))
(cond
((eof-object? obj) (display "the end-of-file object"))
((unspecified? obj) (display "unspecified"))
(else (let ((class (class-of obj)))
(display-class class (or (assq-ref description-table class)
(safe-class-name class))))))
(display ".\n"))
(define-method (display-value (obj <top>))
(if (not (unspecified? obj))
(begin (display-object obj) (newline))))
(define-method (display-location (obj <top>))
*unspecified*)
(define-method (display-description (obj <top>))
(let* ((doc (with-output-to-string (lambda () (display-documentation obj))))
(index (string-index doc #\newline)))
(display (substring doc 0 (1+ index)))))
(define-method (display-documentation (obj <top>))
(display "Not documented.\n"))
;;;
;;; Pairs
;;;
(define-method (display-type (obj <pair>))
(cond
((list? obj) (display-class <list> "a list"))
((pair? (cdr obj)) (display "an improper list"))
(else (display-class <pair> "a pair")))
(display ".\n"))
;;;
;;; Strings
;;;
(define-method (display-type (obj <string>))
(if (read-only-string? 'obj)
(display "a read-only string")
(display-class <string> "a string"))
(display ".\n"))
;;;
;;; Procedures
;;;
(define-method (display-object (obj <procedure>))
(cond
FIXME : VM programs , ...
(else
;; Primitive procedure. Let's lookup the dictionary.
(and-let* ((entry (lookup-procedure obj)))
(let ((name (entry-property entry 'name))
(print-arg (lambda (arg)
(display " ")
(display (string-upcase (symbol->string arg))))))
(display "(")
(display name)
(and-let* ((args (entry-property entry 'args)))
(for-each print-arg args))
(and-let* ((opts (entry-property entry 'opts)))
(display " &optional")
(for-each print-arg opts))
(and-let* ((rest (entry-property entry 'rest)))
(display " &rest")
(print-arg rest))
(display ")"))))))
(define-method (display-summary (obj <procedure>))
(display "Procedure: ")
(display-object obj)
(newline)
(display " ")
(display-description obj))
(define-method (display-type (obj <procedure>))
(cond
((and (thunk? obj) (not (procedure-name obj))) (display "a thunk"))
((procedure-with-setter? obj)
(display-class <procedure-with-setter> "a procedure with setter"))
(else (display-class <procedure> "a procedure")))
(display ".\n"))
(define-method (display-location (obj <procedure>))
(and-let* ((entry (lookup-procedure obj)))
(display-file (entry-file entry))))
(define-method (display-documentation (obj <procedure>))
(cond ((or (procedure-documentation obj)
(and=> (lookup-procedure obj) entry-text))
=> format-documentation)
(else (next-method))))
;;;
;;; Classes
;;;
(define-method (describe-object (obj <class>))
(display-type obj)
(display-location obj)
(newline)
(display-documentation obj)
(newline)
(display-value obj))
(define-method (display-summary (obj <class>))
(display "Class: ")
(display-class obj)
(newline)
(display " ")
(display-description obj))
(define-method (display-type (obj <class>))
(display-class <class> "a class")
(if (not (eq? (class-of obj) <class>))
(begin (display " of ") (display-class (class-of obj))))
(display ".\n"))
(define-method (display-value (obj <class>))
(display-list "Class precedence list" (class-precedence-list obj))
(newline)
(display-list "Direct superclasses" (class-direct-supers obj))
(newline)
(display-list "Direct subclasses" (class-direct-subclasses obj))
(newline)
(display-slot-list "Direct slots" #f (class-direct-slots obj))
(newline)
(display-list "Direct methods" (class-direct-methods obj)))
;;;
;;; Instances
;;;
(define-method (display-type (obj <object>))
(display-class <object> "an instance")
(display " of class ")
(display-class (class-of obj))
(display ".\n"))
(define-method (display-value (obj <object>))
(display-slot-list #f obj (class-slots (class-of obj))))
;;;
Generic functions
;;;
(define-method (display-type (obj <generic>))
(display-class <generic> "a generic function")
(display " of class ")
(display-class (class-of obj))
(display ".\n"))
(define-method (display-value (obj <generic>))
(display-list #f (generic-function-methods obj)))
;;;
;;; Methods
;;;
(define-method (display-object (obj <method>))
(display "(")
(let ((gf (method-generic-function obj)))
(display (if gf (generic-function-name gf) "#<anonymous>")))
(let loop ((args (method-specializers obj)))
(cond
((null? args))
((pair? args)
(display " ")
(display-class (car args))
(loop (cdr args)))
(else (display " . ") (display-class args))))
(display ")"))
(define-method (display-summary (obj <method>))
(display "Method: ")
(display-object obj)
(newline)
(display " ")
(display-description obj))
(define-method (display-type (obj <method>))
(display-class <method> "a method")
(display " of class ")
(display-class (class-of obj))
(display ".\n"))
(define-method (display-documentation (obj <method>))
(let ((doc (procedure-documentation (method-procedure obj))))
(if doc (format-documentation doc) (next-method))))
| null | https://raw.githubusercontent.com/8c6794b6/guile-tjit/9566e480af2ff695e524984992626426f393414f/module/system/repl/describe.scm | scheme | Describe objects
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
Code:
Display functions
Top
Pairs
Strings
Procedures
Primitive procedure. Let's lookup the dictionary.
Classes
Instances
Methods
|
Copyright ( C ) 2001 , 2009 , 2011 Free Software Foundation , Inc.
version 3 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
(define-module (system repl describe)
#:use-module (oop goops)
#:use-module (ice-9 regex)
#:use-module (ice-9 format)
#:use-module (ice-9 and-let-star)
#:export (describe))
(define-method (describe (symbol <symbol>))
(format #t "`~s' is " symbol)
(if (not (defined? symbol))
(display "not defined in the current module.\n")
(describe-object (module-ref (current-module) symbol))))
(define (safe-class-name class)
(if (slot-bound? class 'name)
(class-name class)
class))
(define-method (display-class class . args)
(let* ((name (safe-class-name class))
(desc (if (pair? args) (car args) name)))
(if (eq? *describe-format* 'tag)
(format #t "@class{~a}{~a}" name desc)
(format #t "~a" desc))))
(define (display-list title list)
(if title (begin (display title) (display ":\n\n")))
(if (null? list)
(display "(not defined)\n")
(for-each display-summary list)))
(define (display-slot-list title instance list)
(if title (begin (display title) (display ":\n\n")))
(if (null? list)
(display "(not defined)\n")
(for-each (lambda (slot)
(let ((name (slot-definition-name slot)))
(display "Slot: ")
(display name)
(if (and instance (slot-bound? instance name))
(begin
(display " = ")
(display (slot-ref instance name))))
(newline)))
list)))
(define (display-file location)
(display "Defined in ")
(if (eq? *describe-format* 'tag)
(format #t "@location{~a}.\n" location)
(format #t "`~a'.\n" location)))
(define (format-documentation doc)
(with-current-buffer (make-buffer #:text doc)
(lambda ()
(let ((regexp (make-regexp "@([a-z]*)(\\{([^}]*)\\})?")))
(do-while (match (re-search-forward regexp))
(let ((key (string->symbol (match:substring match 1)))
(value (match:substring match 3)))
(case key
((deffnx)
(delete-region! (match:start match)
(begin (forward-line) (point))))
((var)
(replace-match! match 0 (string-upcase value)))
((code)
(replace-match! match 0 (string-append "`" value "'")))))))
(display (string (current-buffer)))
(newline))))
(define description-table
(list
(cons <boolean> "a boolean")
(cons <null> "an empty list")
(cons <integer> "an integer")
(cons <real> "a real number")
(cons <complex> "a complex number")
(cons <char> "a character")
(cons <symbol> "a symbol")
(cons <keyword> "a keyword")
(cons <promise> "a promise")
(cons <hook> "a hook")
(cons <fluid> "a fluid")
(cons <stack> "a stack")
(cons <variable> "a variable")
(cons <regexp> "a regexp object")
(cons <module> "a module object")
(cons <unknown> "an unknown object")))
(define-generic describe-object)
(export describe-object)
(define-method (describe-object (obj <top>))
(display-type obj)
(display-location obj)
(newline)
(display-value obj)
(newline)
(display-documentation obj))
(define-generic display-object)
(define-generic display-summary)
(define-generic display-type)
(define-generic display-value)
(define-generic display-location)
(define-generic display-description)
(define-generic display-documentation)
(export display-object display-summary display-type display-value
display-location display-description display-documentation)
(define-method (display-object (obj <top>))
(write obj))
(define-method (display-summary (obj <top>))
(display "Value: ")
(display-object obj)
(newline))
(define-method (display-type (obj <top>))
(cond
((eof-object? obj) (display "the end-of-file object"))
((unspecified? obj) (display "unspecified"))
(else (let ((class (class-of obj)))
(display-class class (or (assq-ref description-table class)
(safe-class-name class))))))
(display ".\n"))
(define-method (display-value (obj <top>))
(if (not (unspecified? obj))
(begin (display-object obj) (newline))))
(define-method (display-location (obj <top>))
*unspecified*)
(define-method (display-description (obj <top>))
(let* ((doc (with-output-to-string (lambda () (display-documentation obj))))
(index (string-index doc #\newline)))
(display (substring doc 0 (1+ index)))))
(define-method (display-documentation (obj <top>))
(display "Not documented.\n"))
(define-method (display-type (obj <pair>))
(cond
((list? obj) (display-class <list> "a list"))
((pair? (cdr obj)) (display "an improper list"))
(else (display-class <pair> "a pair")))
(display ".\n"))
(define-method (display-type (obj <string>))
(if (read-only-string? 'obj)
(display "a read-only string")
(display-class <string> "a string"))
(display ".\n"))
(define-method (display-object (obj <procedure>))
(cond
FIXME : VM programs , ...
(else
(and-let* ((entry (lookup-procedure obj)))
(let ((name (entry-property entry 'name))
(print-arg (lambda (arg)
(display " ")
(display (string-upcase (symbol->string arg))))))
(display "(")
(display name)
(and-let* ((args (entry-property entry 'args)))
(for-each print-arg args))
(and-let* ((opts (entry-property entry 'opts)))
(display " &optional")
(for-each print-arg opts))
(and-let* ((rest (entry-property entry 'rest)))
(display " &rest")
(print-arg rest))
(display ")"))))))
(define-method (display-summary (obj <procedure>))
(display "Procedure: ")
(display-object obj)
(newline)
(display " ")
(display-description obj))
(define-method (display-type (obj <procedure>))
(cond
((and (thunk? obj) (not (procedure-name obj))) (display "a thunk"))
((procedure-with-setter? obj)
(display-class <procedure-with-setter> "a procedure with setter"))
(else (display-class <procedure> "a procedure")))
(display ".\n"))
(define-method (display-location (obj <procedure>))
(and-let* ((entry (lookup-procedure obj)))
(display-file (entry-file entry))))
(define-method (display-documentation (obj <procedure>))
(cond ((or (procedure-documentation obj)
(and=> (lookup-procedure obj) entry-text))
=> format-documentation)
(else (next-method))))
(define-method (describe-object (obj <class>))
(display-type obj)
(display-location obj)
(newline)
(display-documentation obj)
(newline)
(display-value obj))
(define-method (display-summary (obj <class>))
(display "Class: ")
(display-class obj)
(newline)
(display " ")
(display-description obj))
(define-method (display-type (obj <class>))
(display-class <class> "a class")
(if (not (eq? (class-of obj) <class>))
(begin (display " of ") (display-class (class-of obj))))
(display ".\n"))
(define-method (display-value (obj <class>))
(display-list "Class precedence list" (class-precedence-list obj))
(newline)
(display-list "Direct superclasses" (class-direct-supers obj))
(newline)
(display-list "Direct subclasses" (class-direct-subclasses obj))
(newline)
(display-slot-list "Direct slots" #f (class-direct-slots obj))
(newline)
(display-list "Direct methods" (class-direct-methods obj)))
(define-method (display-type (obj <object>))
(display-class <object> "an instance")
(display " of class ")
(display-class (class-of obj))
(display ".\n"))
(define-method (display-value (obj <object>))
(display-slot-list #f obj (class-slots (class-of obj))))
Generic functions
(define-method (display-type (obj <generic>))
(display-class <generic> "a generic function")
(display " of class ")
(display-class (class-of obj))
(display ".\n"))
(define-method (display-value (obj <generic>))
(display-list #f (generic-function-methods obj)))
(define-method (display-object (obj <method>))
(display "(")
(let ((gf (method-generic-function obj)))
(display (if gf (generic-function-name gf) "#<anonymous>")))
(let loop ((args (method-specializers obj)))
(cond
((null? args))
((pair? args)
(display " ")
(display-class (car args))
(loop (cdr args)))
(else (display " . ") (display-class args))))
(display ")"))
(define-method (display-summary (obj <method>))
(display "Method: ")
(display-object obj)
(newline)
(display " ")
(display-description obj))
(define-method (display-type (obj <method>))
(display-class <method> "a method")
(display " of class ")
(display-class (class-of obj))
(display ".\n"))
(define-method (display-documentation (obj <method>))
(let ((doc (procedure-documentation (method-procedure obj))))
(if doc (format-documentation doc) (next-method))))
|
bcf334724fcdb34001935672630a05e1a4de199a053a6fac2b57f1df57a04ad5 | alanz/ghc-exactprint | overloadedrecfldsfail10.hs | Modules A and B both declare F(foo )
Module C declares F($sel : foo : ) but exports A.F(foo ) as well
Thus we ca n't export F ( .. ) even with DuplicateRecordFields enabled
# LANGUAGE DuplicateRecordFields #
module Main (main, F(..)) where
import OverloadedRecFldsFail10_B
import OverloadedRecFldsFail10_C
main = return ()
| null | https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc80/overloadedrecfldsfail10.hs | haskell | Modules A and B both declare F(foo )
Module C declares F($sel : foo : ) but exports A.F(foo ) as well
Thus we ca n't export F ( .. ) even with DuplicateRecordFields enabled
# LANGUAGE DuplicateRecordFields #
module Main (main, F(..)) where
import OverloadedRecFldsFail10_B
import OverloadedRecFldsFail10_C
main = return ()
|
|
2da898a0a48e65b271505beb0312b68983cf861cbed27c2d604cbce3ba76e67b | antono/guix-debian | acl.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2012 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages acl)
#:use-module (guix licenses)
#:use-module (gnu packages attr)
#:use-module (gnu packages perl)
#:use-module (gnu packages gettext)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public acl
(package
(name "acl")
(version "2.2.51")
(source
(origin
(method url-fetch)
(uri (string-append "mirror-"
version ".src.tar.gz"))
(sha256
(base32
"09aj30m49ivycl3irram8c3givc0crivjm3ymw0nhfaxrwhlb186"))))
(build-system gnu-build-system)
(arguments
`(#:phases
(alist-cons-after
'configure 'patch-makefile-SHELL
(lambda _
(patch-makefile-SHELL "include/buildmacros"))
,(if (%current-target-system)
'%standard-phases
'(alist-replace 'check
(lambda _
(system* "make" "tests" "-C" "test")
;; XXX: Ignore the test result since this is
;; dependent on the underlying file system.
#t)
%standard-phases)))))
(inputs `(("attr" ,attr)
;; Perl is needed to run tests; remove it from cross builds.
,@(if (%current-target-system)
'()
`(("perl" ,perl)))))
(native-inputs
`(("gettext" ,gnu-gettext)))
(home-page
"")
(synopsis
"Library and tools for manipulating access control lists")
(description
"Library and tools for manipulating access control lists.")
(license (list gpl2+ lgpl2.1+))))
| null | https://raw.githubusercontent.com/antono/guix-debian/85ef443788f0788a62010a942973d4f7714d10b4/gnu/packages/acl.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
XXX: Ignore the test result since this is
dependent on the underlying file system.
Perl is needed to run tests; remove it from cross builds. | Copyright © 2012 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages acl)
#:use-module (guix licenses)
#:use-module (gnu packages attr)
#:use-module (gnu packages perl)
#:use-module (gnu packages gettext)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public acl
(package
(name "acl")
(version "2.2.51")
(source
(origin
(method url-fetch)
(uri (string-append "mirror-"
version ".src.tar.gz"))
(sha256
(base32
"09aj30m49ivycl3irram8c3givc0crivjm3ymw0nhfaxrwhlb186"))))
(build-system gnu-build-system)
(arguments
`(#:phases
(alist-cons-after
'configure 'patch-makefile-SHELL
(lambda _
(patch-makefile-SHELL "include/buildmacros"))
,(if (%current-target-system)
'%standard-phases
'(alist-replace 'check
(lambda _
(system* "make" "tests" "-C" "test")
#t)
%standard-phases)))))
(inputs `(("attr" ,attr)
,@(if (%current-target-system)
'()
`(("perl" ,perl)))))
(native-inputs
`(("gettext" ,gnu-gettext)))
(home-page
"")
(synopsis
"Library and tools for manipulating access control lists")
(description
"Library and tools for manipulating access control lists.")
(license (list gpl2+ lgpl2.1+))))
|
5f8a7df4e59f5fb26b6dd0aeda125e55eb4cf0d4992e6cfd7325b3502db22a2c | RichiH/git-annex | UUIDBased.hs | git - annex uuid - based logs
-
- This is used to store information about UUIDs in a way that can
- be union merged .
-
- A line of the log will look like : " UUID [ INFO [ timestamp = foo ] ] "
- The timestamp is last for backwards compatability reasons ,
- and may not be present on old log lines .
-
- New uuid based logs instead use the form : " timestamp UUID INFO "
-
- Copyright 2011 - 2013 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- This is used to store information about UUIDs in a way that can
- be union merged.
-
- A line of the log will look like: "UUID[ INFO[ timestamp=foo]]"
- The timestamp is last for backwards compatability reasons,
- and may not be present on old log lines.
-
- New uuid based logs instead use the form: "timestamp UUID INFO"
-
- Copyright 2011-2013 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Logs.UUIDBased (
Log,
LogEntry(..),
VectorClock,
currentVectorClock,
parseLog,
parseLogNew,
parseLogWithUUID,
showLog,
showLogNew,
changeLog,
addLog,
simpleMap,
) where
import qualified Data.Map as M
import Common
import Types.UUID
import Annex.VectorClock
import Logs.MapLog
import Logs.Line
type Log v = MapLog UUID v
showLog :: (v -> String) -> Log v -> String
showLog shower = unlines . map showpair . M.toList
where
showpair (k, LogEntry (VectorClock c) v) =
unwords [fromUUID k, shower v, tskey ++ show c]
showpair (k, LogEntry Unknown v) =
unwords [fromUUID k, shower v]
parseLog :: (String -> Maybe a) -> String -> Log a
parseLog = parseLogWithUUID . const
parseLogWithUUID :: (UUID -> String -> Maybe a) -> String -> Log a
parseLogWithUUID parser = M.fromListWith best . mapMaybe parse . splitLines
where
parse line
-- This is a workaround for a bug that caused
NoUUID items to be stored in the log .
-- It can be removed at any time; is just here to clean
-- up logs where that happened temporarily.
| " " `isPrefixOf` line = Nothing
| null ws = Nothing
| otherwise = parser u (unwords info) >>= makepair
where
makepair v = Just (u, LogEntry ts v)
ws = words line
u = toUUID $ Prelude.head ws
t = Prelude.last ws
ts
| tskey `isPrefixOf` t = fromMaybe Unknown $
parseVectorClock $ drop 1 $ dropWhile (/= '=') t
| otherwise = Unknown
info
| ts == Unknown = drop 1 ws
| otherwise = drop 1 $ beginning ws
showLogNew :: (v -> String) -> Log v -> String
showLogNew = showMapLog fromUUID
parseLogNew :: (String -> Maybe v) -> String -> Log v
parseLogNew = parseMapLog (Just . toUUID)
changeLog :: VectorClock -> UUID -> v -> Log v -> Log v
changeLog = changeMapLog
addLog :: UUID -> LogEntry v -> Log v -> Log v
addLog = addMapLog
tskey :: String
tskey = "timestamp="
| null | https://raw.githubusercontent.com/RichiH/git-annex/bbcad2b0af8cd9264d0cb86e6ca126ae626171f3/Logs/UUIDBased.hs | haskell | This is a workaround for a bug that caused
It can be removed at any time; is just here to clean
up logs where that happened temporarily. | git - annex uuid - based logs
-
- This is used to store information about UUIDs in a way that can
- be union merged .
-
- A line of the log will look like : " UUID [ INFO [ timestamp = foo ] ] "
- The timestamp is last for backwards compatability reasons ,
- and may not be present on old log lines .
-
- New uuid based logs instead use the form : " timestamp UUID INFO "
-
- Copyright 2011 - 2013 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- This is used to store information about UUIDs in a way that can
- be union merged.
-
- A line of the log will look like: "UUID[ INFO[ timestamp=foo]]"
- The timestamp is last for backwards compatability reasons,
- and may not be present on old log lines.
-
- New uuid based logs instead use the form: "timestamp UUID INFO"
-
- Copyright 2011-2013 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Logs.UUIDBased (
Log,
LogEntry(..),
VectorClock,
currentVectorClock,
parseLog,
parseLogNew,
parseLogWithUUID,
showLog,
showLogNew,
changeLog,
addLog,
simpleMap,
) where
import qualified Data.Map as M
import Common
import Types.UUID
import Annex.VectorClock
import Logs.MapLog
import Logs.Line
type Log v = MapLog UUID v
showLog :: (v -> String) -> Log v -> String
showLog shower = unlines . map showpair . M.toList
where
showpair (k, LogEntry (VectorClock c) v) =
unwords [fromUUID k, shower v, tskey ++ show c]
showpair (k, LogEntry Unknown v) =
unwords [fromUUID k, shower v]
parseLog :: (String -> Maybe a) -> String -> Log a
parseLog = parseLogWithUUID . const
parseLogWithUUID :: (UUID -> String -> Maybe a) -> String -> Log a
parseLogWithUUID parser = M.fromListWith best . mapMaybe parse . splitLines
where
parse line
NoUUID items to be stored in the log .
| " " `isPrefixOf` line = Nothing
| null ws = Nothing
| otherwise = parser u (unwords info) >>= makepair
where
makepair v = Just (u, LogEntry ts v)
ws = words line
u = toUUID $ Prelude.head ws
t = Prelude.last ws
ts
| tskey `isPrefixOf` t = fromMaybe Unknown $
parseVectorClock $ drop 1 $ dropWhile (/= '=') t
| otherwise = Unknown
info
| ts == Unknown = drop 1 ws
| otherwise = drop 1 $ beginning ws
showLogNew :: (v -> String) -> Log v -> String
showLogNew = showMapLog fromUUID
parseLogNew :: (String -> Maybe v) -> String -> Log v
parseLogNew = parseMapLog (Just . toUUID)
changeLog :: VectorClock -> UUID -> v -> Log v -> Log v
changeLog = changeMapLog
addLog :: UUID -> LogEntry v -> Log v -> Log v
addLog = addMapLog
tskey :: String
tskey = "timestamp="
|
a49b5408580b0d7f119eaa55aa2dbf18ad827bdf754e35ea3c47ccd026ee30c6 | alanz/ghc-exactprint | T11164.hs | module T11164 where
import T11164b (T)
| null | https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc80/T11164.hs | haskell | module T11164 where
import T11164b (T)
|
|
d6e774753bc0875d7929e4a6761ee321118bf65bbab8c907648187f98de41502 | kamek-pf/ntfd | Weather.hs | module Spec.Stores.Weather where
import Test.Hspec
import Data.Either (isRight)
import qualified Data.Text.IO as Txt
import Spec.Helpers (defaultWeatherCfg)
import Stores.Weather (Store(..), WeatherClient)
spec :: IO ()
spec = hspec $ describe "Weather store" $ it "should synchronize with OpenWeatherMap" $ do
weatherConfig <- defaultWeatherCfg
(client :: WeatherClient) <- initWeatherStore weatherConfig
synced <- syncForecast client
synced `shouldSatisfy` isRight
rendered <- getRenderedTemplate client
let Right r = rendered
Txt.putStrLn $ "Rendered template: " <> r
| null | https://raw.githubusercontent.com/kamek-pf/ntfd/d297a59339b3310a62341ffa9c378180c578dbce/test/Spec/Stores/Weather.hs | haskell | module Spec.Stores.Weather where
import Test.Hspec
import Data.Either (isRight)
import qualified Data.Text.IO as Txt
import Spec.Helpers (defaultWeatherCfg)
import Stores.Weather (Store(..), WeatherClient)
spec :: IO ()
spec = hspec $ describe "Weather store" $ it "should synchronize with OpenWeatherMap" $ do
weatherConfig <- defaultWeatherCfg
(client :: WeatherClient) <- initWeatherStore weatherConfig
synced <- syncForecast client
synced `shouldSatisfy` isRight
rendered <- getRenderedTemplate client
let Right r = rendered
Txt.putStrLn $ "Rendered template: " <> r
|
|
4ddff932744d31df2968ebc3aa91d30ec0dc99892134b0591f9072268b70f1bd | LighghtEeloo/magic-in-ten-mins-ml | Continuation.ml | module Cont = struct
let demo () =
let i = ref 1 in
i := !i + 1;
Printf.printf "%d\n" !i
let _cont2 i =
i := !i + 1;
Printf.printf "%d\n" !i
let _cont3 i =
Printf.printf "%d\n" !i
let rec cont1 () =
let i = ref 1 in
cont2 i
and cont2 i =
i := !i + 1;
cont3 i
and cont3 i =
Printf.printf "%d\n" !i
let demo_cont () =
cont1 ()
end
module CPS = struct
let logic1 f =
let i = ref 1 in
f i
let logic2 i f =
i := !i + 1;
f i
let logic3 i f =
Printf.printf "%d\n" !i;
f i
let demo_cps () =
logic1 ( (* retrieve the return value i *)
fun i -> logic2 i (
fun i -> logic3 i (
fun _i -> ())))
end
module DelimitedCont = struct
let call_t () =
CPS.demo_cps ();
Printf.printf "3\n"
end
module TryThrow = struct
(* try and else is OCaml keyword so (as usual) we'll append `_` *)
(* A type safe version of try_throw *)
type ('r, 'e, 'o) body = ('e, 'o) throw -> ('r, 'o) else_ -> 'o final -> 'o
and ('e, 'o) throw = 'e -> 'o final -> 'o
and ('r, 'o) else_ = 'r -> 'o final -> 'o
and 'o final = 'o -> 'o
type ('r, 'e, 'o) try_throw = {
body : ('r, 'e, 'o) body;
throw : ('e, 'o) throw;
else_ : ('r, 'o) else_;
final : 'o final;
}
let try_ { body; throw; else_; final } =
body throw else_ final
type div_with_zero = unit
let div_with_zero = ()
let try_div (a: int) (b: int): int option =
try_ {
body = (fun throw else_ final -> (
Printf.printf "try\n";
if b = 0 then (throw div_with_zero final) else (else_ (a / b) final)
));
throw = (fun () final -> (
Printf.printf "caught\n";
final None
));
else_ = (fun i final -> (
Printf.printf "else: %d\n" i;
final (Some i)
));
final = (fun o -> (
Printf.printf "final\n";
o
));
}
end
module Test = Utils.MakeTest(struct
let name = "Continuation"
let aloud = false
let test () =
let open TryThrow in
if aloud then
assert begin
try_div 4 0 = None
&& try_div 4 2 = Some 2
end;
()
end)
| null | https://raw.githubusercontent.com/LighghtEeloo/magic-in-ten-mins-ml/5e576d6c144d230f71666ea01e8307ce0020e72d/Paradigms/Continuation.ml | ocaml | retrieve the return value i
try and else is OCaml keyword so (as usual) we'll append `_`
A type safe version of try_throw | module Cont = struct
let demo () =
let i = ref 1 in
i := !i + 1;
Printf.printf "%d\n" !i
let _cont2 i =
i := !i + 1;
Printf.printf "%d\n" !i
let _cont3 i =
Printf.printf "%d\n" !i
let rec cont1 () =
let i = ref 1 in
cont2 i
and cont2 i =
i := !i + 1;
cont3 i
and cont3 i =
Printf.printf "%d\n" !i
let demo_cont () =
cont1 ()
end
module CPS = struct
let logic1 f =
let i = ref 1 in
f i
let logic2 i f =
i := !i + 1;
f i
let logic3 i f =
Printf.printf "%d\n" !i;
f i
let demo_cps () =
fun i -> logic2 i (
fun i -> logic3 i (
fun _i -> ())))
end
module DelimitedCont = struct
let call_t () =
CPS.demo_cps ();
Printf.printf "3\n"
end
module TryThrow = struct
type ('r, 'e, 'o) body = ('e, 'o) throw -> ('r, 'o) else_ -> 'o final -> 'o
and ('e, 'o) throw = 'e -> 'o final -> 'o
and ('r, 'o) else_ = 'r -> 'o final -> 'o
and 'o final = 'o -> 'o
type ('r, 'e, 'o) try_throw = {
body : ('r, 'e, 'o) body;
throw : ('e, 'o) throw;
else_ : ('r, 'o) else_;
final : 'o final;
}
let try_ { body; throw; else_; final } =
body throw else_ final
type div_with_zero = unit
let div_with_zero = ()
let try_div (a: int) (b: int): int option =
try_ {
body = (fun throw else_ final -> (
Printf.printf "try\n";
if b = 0 then (throw div_with_zero final) else (else_ (a / b) final)
));
throw = (fun () final -> (
Printf.printf "caught\n";
final None
));
else_ = (fun i final -> (
Printf.printf "else: %d\n" i;
final (Some i)
));
final = (fun o -> (
Printf.printf "final\n";
o
));
}
end
module Test = Utils.MakeTest(struct
let name = "Continuation"
let aloud = false
let test () =
let open TryThrow in
if aloud then
assert begin
try_div 4 0 = None
&& try_div 4 2 = Some 2
end;
()
end)
|
14f479caa83771e1c71b0c05b116d40741a347207a32c10ac98c655694d938ca | alexandergunnarson/quantum | spec.cljc | (ns quantum.test.core.spec
(:require [quantum.core.spec :as ns]))
| null | https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/test/quantum/test/core/spec.cljc | clojure | (ns quantum.test.core.spec
(:require [quantum.core.spec :as ns]))
|
|
983d99f2c3929542a3f5d6fa930416b7535c10d50752114dd01ba14099f065dd | rnons/ted2srt | Talk.hs | module Models.Talk where
import Control.Monad (liftM, mzero, void)
import Data.Aeson
import qualified Data.ByteString.Char8 as C
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Read as T
import Data.Time (UTCTime)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Database.Persist
import qualified Database.Redis as KV
import GHC.Generics (Generic)
import Network.HTTP.Client.Conduit (HttpException, httpLbs,
parseUrlThrow, responseBody)
import RIO hiding (id)
import Text.HTML.DOM (parseLBS)
import Text.XML.Cursor (fromDocument)
import Types (AppRIO, runDB)
import Config (Config (..))
import Model
import qualified Models.RedisKeys as Keys
import Models.Types (mkTalkUrl)
import Web.TED.TalkPage (parseDescription, parseImage,
parseTalkObject)
data TalkObj = TalkObj
{ id :: Int
, name :: Text
, slug :: Text
, filmedAt :: UTCTime
, publishedAt :: UTCTime
, languages :: [Language]
, mediaSlug :: Text
} deriving (Generic)
instance FromJSON TalkObj where
parseJSON (Object v) = do
idText <- v .: "id"
tid <- case fst <$> T.decimal idText of
Right tid -> pure tid
_ -> fail "id is not int"
TalkObj
<$> pure tid
<*> v .: "name"
<*> v .: "slug"
<*> liftM posixSecondsToUTCTime (v .: "published")
<*> liftM posixSecondsToUTCTime (v .: "published")
<*> v .: "languages"
<*> v .: "mediaIdentifier"
parseJSON _ = mzero
getTalks :: Int -> Int -> AppRIO [Entity Talk]
getTalks offset limit = do
runDB $ selectList []
[ Desc TalkId
, LimitTo limit
, OffsetBy offset
]
getTalk :: Int -> Text -> AppRIO (Maybe (Entity Talk))
getTalk tid url = do
Config { kvConn } <- ask
cached <- liftIO $ KV.runRedis kvConn $
KV.get $ Keys.cache $ fromIntegral tid
case cached of
Right (Just _) -> getTalkById tid (Just url)
Right Nothing -> saveToDB url
Left _ -> saveToDB url
getTalkById :: Int -> Maybe Text -> AppRIO (Maybe (Entity Talk))
getTalkById tid mUrl = do
xs <- runDB $ getEntity $ TalkKey tid
case xs of
Just talk -> return $ Just talk
_ -> maybe (return Nothing) saveToDB mUrl
hush :: Either a b -> Maybe b
hush (Left _) = Nothing
hush (Right v) = Just v
getTalkBySlug :: Text -> AppRIO (Maybe (Entity Talk))
getTalkBySlug slug = do
Config { kvConn } <- ask
mtid <- liftIO $ fmap (join . hush) <$> KV.runRedis kvConn $ KV.get $ Keys.slug slug
case mtid of
Just tid ->
case readMaybe $ C.unpack tid of
Just tid' -> getTalk tid' url
Nothing -> pure Nothing
Nothing ->
saveToDB url
where
url = mkTalkUrl slug
saveToDB :: Text -> AppRIO (Maybe (Entity Talk))
saveToDB url = do
Config{..} <- ask
mTalk <- fetchTalk url
case mTalk of
Just entity@(Entity talkId talk) -> do
void $ liftIO $ KV.runRedis kvConn $ KV.multiExec $ do
void $ KV.setex (Keys.cache $ unTalkKey talkId) (3600*24) ""
KV.set (Keys.slug $ talkSlug talk) (C.pack $ show $ unTalkKey talkId)
runDB $ repsert talkId talk
return $ Just entity
Nothing -> return Nothing
fetchTalk :: Text -> AppRIO (Maybe (Entity Talk))
fetchTalk url = do
handle (\(_::HttpException) -> return Nothing) $ do
req <- parseUrlThrow $ T.unpack url
res <- httpLbs req
let
body = responseBody res
cursor = fromDocument $ parseLBS body
desc = parseDescription cursor
img = parseImage cursor
core = parseTalkObject body
case eitherDecode core of
Right TalkObj{..} -> do
return $ Just $ Entity (TalkKey $ fromIntegral id) (Talk
{ talkName = name
, talkSlug = slug
, talkFilmedAt = filmedAt
, talkPublishedAt = publishedAt
, talkDescription = desc
, talkImage = img
, talkLanguages = toJSON languages
, talkMediaSlug = mediaSlug
, talkMediaPad = 0.0
})
Left err -> do
logErrorS "fetchTalk" $ fromString err
pure Nothing
| null | https://raw.githubusercontent.com/rnons/ted2srt/7456f109bce2b9f07d0c929bef2fd42e6bc4f75d/backend/src/Models/Talk.hs | haskell | module Models.Talk where
import Control.Monad (liftM, mzero, void)
import Data.Aeson
import qualified Data.ByteString.Char8 as C
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Read as T
import Data.Time (UTCTime)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Database.Persist
import qualified Database.Redis as KV
import GHC.Generics (Generic)
import Network.HTTP.Client.Conduit (HttpException, httpLbs,
parseUrlThrow, responseBody)
import RIO hiding (id)
import Text.HTML.DOM (parseLBS)
import Text.XML.Cursor (fromDocument)
import Types (AppRIO, runDB)
import Config (Config (..))
import Model
import qualified Models.RedisKeys as Keys
import Models.Types (mkTalkUrl)
import Web.TED.TalkPage (parseDescription, parseImage,
parseTalkObject)
data TalkObj = TalkObj
{ id :: Int
, name :: Text
, slug :: Text
, filmedAt :: UTCTime
, publishedAt :: UTCTime
, languages :: [Language]
, mediaSlug :: Text
} deriving (Generic)
instance FromJSON TalkObj where
parseJSON (Object v) = do
idText <- v .: "id"
tid <- case fst <$> T.decimal idText of
Right tid -> pure tid
_ -> fail "id is not int"
TalkObj
<$> pure tid
<*> v .: "name"
<*> v .: "slug"
<*> liftM posixSecondsToUTCTime (v .: "published")
<*> liftM posixSecondsToUTCTime (v .: "published")
<*> v .: "languages"
<*> v .: "mediaIdentifier"
parseJSON _ = mzero
getTalks :: Int -> Int -> AppRIO [Entity Talk]
getTalks offset limit = do
runDB $ selectList []
[ Desc TalkId
, LimitTo limit
, OffsetBy offset
]
getTalk :: Int -> Text -> AppRIO (Maybe (Entity Talk))
getTalk tid url = do
Config { kvConn } <- ask
cached <- liftIO $ KV.runRedis kvConn $
KV.get $ Keys.cache $ fromIntegral tid
case cached of
Right (Just _) -> getTalkById tid (Just url)
Right Nothing -> saveToDB url
Left _ -> saveToDB url
getTalkById :: Int -> Maybe Text -> AppRIO (Maybe (Entity Talk))
getTalkById tid mUrl = do
xs <- runDB $ getEntity $ TalkKey tid
case xs of
Just talk -> return $ Just talk
_ -> maybe (return Nothing) saveToDB mUrl
hush :: Either a b -> Maybe b
hush (Left _) = Nothing
hush (Right v) = Just v
getTalkBySlug :: Text -> AppRIO (Maybe (Entity Talk))
getTalkBySlug slug = do
Config { kvConn } <- ask
mtid <- liftIO $ fmap (join . hush) <$> KV.runRedis kvConn $ KV.get $ Keys.slug slug
case mtid of
Just tid ->
case readMaybe $ C.unpack tid of
Just tid' -> getTalk tid' url
Nothing -> pure Nothing
Nothing ->
saveToDB url
where
url = mkTalkUrl slug
saveToDB :: Text -> AppRIO (Maybe (Entity Talk))
saveToDB url = do
Config{..} <- ask
mTalk <- fetchTalk url
case mTalk of
Just entity@(Entity talkId talk) -> do
void $ liftIO $ KV.runRedis kvConn $ KV.multiExec $ do
void $ KV.setex (Keys.cache $ unTalkKey talkId) (3600*24) ""
KV.set (Keys.slug $ talkSlug talk) (C.pack $ show $ unTalkKey talkId)
runDB $ repsert talkId talk
return $ Just entity
Nothing -> return Nothing
fetchTalk :: Text -> AppRIO (Maybe (Entity Talk))
fetchTalk url = do
handle (\(_::HttpException) -> return Nothing) $ do
req <- parseUrlThrow $ T.unpack url
res <- httpLbs req
let
body = responseBody res
cursor = fromDocument $ parseLBS body
desc = parseDescription cursor
img = parseImage cursor
core = parseTalkObject body
case eitherDecode core of
Right TalkObj{..} -> do
return $ Just $ Entity (TalkKey $ fromIntegral id) (Talk
{ talkName = name
, talkSlug = slug
, talkFilmedAt = filmedAt
, talkPublishedAt = publishedAt
, talkDescription = desc
, talkImage = img
, talkLanguages = toJSON languages
, talkMediaSlug = mediaSlug
, talkMediaPad = 0.0
})
Left err -> do
logErrorS "fetchTalk" $ fromString err
pure Nothing
|
|
04f6324e05a10031a160c0c1e2cee9c3faea29dd832c2965352b2ab76ce7309e | chanshunli/wechat-clj | macro.clj | ;; the file form: -async-await
(ns mini-program-cljs.macro
(:refer-clojure :exclude [await])
(:require [cljs.analyzer :as ana]
[cljs.compiler :as compiler]
[clojure.string :as str]))
(def ^:dynamic *in-async* false)
(alter-var-root #'ana/specials #(conj % 'async* 'await*))
(defmethod ana/parse 'await*
[op env [_ expr :as form] _ _]
(when-not *in-async*
(throw (ana/error env "Can't await outside of async block")))
(when (not= 2 (count form))
(throw (ana/error env "Wrong number of args to await")))
{:env env
:op :await
:children [:expr]
:expr (ana/analyze env expr)
:form form})
(defmethod ana/parse 'async*
[op env [_ & exprs :as form] _ _]
(binding [*in-async* true]
(let [statements (ana/disallowing-recur
(->> (butlast exprs)
(mapv #(ana/analyze (assoc env :context :statement) %))))
ret (ana/disallowing-recur
(ana/analyze (assoc env :context :return) (last exprs)))
children [:statements :ret]]
{:op :async
:env env
:form form
:statements statements
:ret ret
:ret-tag 'js/Promise
:children children})))
(defmethod compiler/emit* :await
[{:keys [env expr]}]
(when (= :return (:context env))
(compiler/emits "return "))
(compiler/emits "(await ")
(compiler/emits (assoc-in expr [:env :context] :expr))
(compiler/emits ")"))
(defmethod compiler/emit* :async
[{:keys [statements ret env]}]
(when (= :return (:context env))
(compiler/emits "return "))
(compiler/emitln "(async function (){")
(doseq [s statements]
(compiler/emitln s))
(compiler/emit ret)
(compiler/emitln "})()"))
;; ====== Public API ======
(defmacro async
"Wraps body into self-invoking JavaScript's async function, returns promise"
[& body]
`(~'async* ~@body))
(defmacro await
"Suspends execution of current async block and returns asynchronously resolved value"
[expr]
`(~'await* ~expr))
(defmacro await-all
"Same as (seq (.all js/Promise coll)), but for easier usage within async blocks"
[coll]
`(seq (~'await* (.all js/Promise ~coll))))
(defmacro await-first
"Same as (.race js/Promise coll), but for easier usage within async blocks"
[coll]
`(~'await* (.race js/Promise ~coll)))
;; -------------------
(defmacro call-promise
[{:keys [then-fn catch-fn]} & body]
`(-> ~@body
(.then
(fn [obj#]
(js/console.log "Get Promise Object: " obj#)
(~then-fn obj#)))
(.catch
(fn [e#]
(js/console.error "Promise Error: " e#)
(~catch-fn e#)))))
(comment
(demo.core/call-promise
{:then-fn (fn [miniprogram]
(reset! mini-program miniprogram))
:catch-fn (fn [x] x)}
(.connect automator
{:wsEndpoint "ws:9420"})))
(defmacro call-promise-1
"只是取出来promise的值,不关心错误"
[then-fn & body]
`(-> ~@body
(.then
(fn [obj#]
(js/console.log "Get Promise Object: " obj#)
(~then-fn obj#)))
(.catch
(fn [e#]
(js/console.error "Promise Error: " e#)))))
(comment
(jsname->clj "offPageNotFound")
;; => "off-page-not-found"
(clj->jsname "off-page-not-found")
;; => "offPageNotFound"
)
(defn jsname->clj [stri]
(->>
(seq (str/replace stri "_" "-"))
(map (fn [st]
(let [s (str st)]
(if (re-find #"[A-Z]" s)
(str "-" (str/lower-case s)) s))))
(str/join "")))
(defn clj->jsname [stri]
(str/replace stri #"-([a-z])"
#(str (str/upper-case (last %1)))))
(comment
= > 1 . 生成函数 :
(clojure.pprint/pprint (macroexpand-1 '(wx-fun-dev mini-pro checkSession)))
= > 2 . : ( wx - login : success ( fn [ res ] res ) : fail ( fn [ res ] 111 ) )
---- 生产release
(clojure.pprint/pprint (macroexpand-1 '(wx-fun checkSession)))
(wx-fun-dev @mini-program checkSession) ;; => #'mini-program-cljs.core/wx-check-session
(call-promise-1
(fn [res] (prn "----" res))
(wx-check-session :success (fn [res] res)))
;; Get Promise Object: { errMsg: 'checkSession:ok' }
;; "----" #js {:errMsg "checkSession:ok"}
)
(defmacro wx-fun-dev [mini-pro fname]
`(defn ~(symbol (str "wx-" (jsname->clj (str fname))))
[& args#]
(.callWxMethod ~mini-pro
~(str fname)
(apply hash-map args#))))
(defmacro wx-fun [fname]
`(defn ~(symbol (str "wx-" (jsname->clj (str fname))))
[& args#]
(~(symbol (str "." fname)) js/wx
(apply hash-map args#))))
(comment
(clojure.pprint/pprint
(macroexpand-1 '(defn-js test-fun [:url :method :data :header]
(do 1 2) (let [x 1] x) )))
test in cljs :
( test - fun # js { : url 111 : method 222 } )
= > [ 111 222 ]
(defn-js test-fun [:url :method :data :header]
[url method]))
(defmacro defn-js
"生成js导出需要的cljs函数: 用宏来包装副作用和领域特殊和不干净的东西,其他都可以按照lisp的风格来设计"
[fun-name fun-args export-var & body]
`(do (defn ~fun-name [^js options#]
(let [{:keys [~@(map symbol fun-args)]}
(into {}
(for [k# (.keys js/Object options#)]
[(keyword k#) (aget options# k#)]))]
~@body))
(set! (~(symbol (str ".-" (clj->jsname (str fun-name))))
~export-var) ~fun-name)))
(comment
(clojure.pprint/pprint
(macroexpand-1
'(evaluate-args (fn [arg1 arg2] (js/console.log arg1) )
"aaaa" "bbbb")))
in js_wx file :
(evaluate-args @mini-program
= > 打印出来了 " aaaa , "
"aaaa" "bbbb"))
(defmacro evaluate-args
"万能的eval: 再难难不倒强大的宏"
[mini-program code-fn & args]
`(call-promise-1
(fn [res#] (prn "eval code args: " res#))
(.evaluate ~mini-program ~@(cons code-fn args))))
(comment
(clojure.pprint/pprint
(macroexpand-1
'(c-log @mini-program "aaaa" "bbb" "cccc" "dddd"))))
(defmacro c-log
[mini-program & args]
(let [bindings (vec
(map-indexed
(fn [k v]
(symbol (str "arg" k)))
args))]
`(evaluate-args ~mini-program
(fn ~bindings
(js/console.log ~@bindings))
~@args)))
| null | https://raw.githubusercontent.com/chanshunli/wechat-clj/145a99825669b743a09a8565fa1301d90480c91b/mini-program-cljs/src/main/mini_program_cljs/macro.clj | clojure | the file form: -async-await
====== Public API ======
-------------------
=> "off-page-not-found"
=> "offPageNotFound"
=> #'mini-program-cljs.core/wx-check-session
Get Promise Object: { errMsg: 'checkSession:ok' }
"----" #js {:errMsg "checkSession:ok"} | (ns mini-program-cljs.macro
(:refer-clojure :exclude [await])
(:require [cljs.analyzer :as ana]
[cljs.compiler :as compiler]
[clojure.string :as str]))
(def ^:dynamic *in-async* false)
(alter-var-root #'ana/specials #(conj % 'async* 'await*))
(defmethod ana/parse 'await*
[op env [_ expr :as form] _ _]
(when-not *in-async*
(throw (ana/error env "Can't await outside of async block")))
(when (not= 2 (count form))
(throw (ana/error env "Wrong number of args to await")))
{:env env
:op :await
:children [:expr]
:expr (ana/analyze env expr)
:form form})
(defmethod ana/parse 'async*
[op env [_ & exprs :as form] _ _]
(binding [*in-async* true]
(let [statements (ana/disallowing-recur
(->> (butlast exprs)
(mapv #(ana/analyze (assoc env :context :statement) %))))
ret (ana/disallowing-recur
(ana/analyze (assoc env :context :return) (last exprs)))
children [:statements :ret]]
{:op :async
:env env
:form form
:statements statements
:ret ret
:ret-tag 'js/Promise
:children children})))
(defmethod compiler/emit* :await
[{:keys [env expr]}]
(when (= :return (:context env))
(compiler/emits "return "))
(compiler/emits "(await ")
(compiler/emits (assoc-in expr [:env :context] :expr))
(compiler/emits ")"))
(defmethod compiler/emit* :async
[{:keys [statements ret env]}]
(when (= :return (:context env))
(compiler/emits "return "))
(compiler/emitln "(async function (){")
(doseq [s statements]
(compiler/emitln s))
(compiler/emit ret)
(compiler/emitln "})()"))
(defmacro async
"Wraps body into self-invoking JavaScript's async function, returns promise"
[& body]
`(~'async* ~@body))
(defmacro await
"Suspends execution of current async block and returns asynchronously resolved value"
[expr]
`(~'await* ~expr))
(defmacro await-all
"Same as (seq (.all js/Promise coll)), but for easier usage within async blocks"
[coll]
`(seq (~'await* (.all js/Promise ~coll))))
(defmacro await-first
"Same as (.race js/Promise coll), but for easier usage within async blocks"
[coll]
`(~'await* (.race js/Promise ~coll)))
(defmacro call-promise
[{:keys [then-fn catch-fn]} & body]
`(-> ~@body
(.then
(fn [obj#]
(js/console.log "Get Promise Object: " obj#)
(~then-fn obj#)))
(.catch
(fn [e#]
(js/console.error "Promise Error: " e#)
(~catch-fn e#)))))
(comment
(demo.core/call-promise
{:then-fn (fn [miniprogram]
(reset! mini-program miniprogram))
:catch-fn (fn [x] x)}
(.connect automator
{:wsEndpoint "ws:9420"})))
(defmacro call-promise-1
"只是取出来promise的值,不关心错误"
[then-fn & body]
`(-> ~@body
(.then
(fn [obj#]
(js/console.log "Get Promise Object: " obj#)
(~then-fn obj#)))
(.catch
(fn [e#]
(js/console.error "Promise Error: " e#)))))
(comment
(jsname->clj "offPageNotFound")
(clj->jsname "off-page-not-found")
)
(defn jsname->clj [stri]
(->>
(seq (str/replace stri "_" "-"))
(map (fn [st]
(let [s (str st)]
(if (re-find #"[A-Z]" s)
(str "-" (str/lower-case s)) s))))
(str/join "")))
(defn clj->jsname [stri]
(str/replace stri #"-([a-z])"
#(str (str/upper-case (last %1)))))
(comment
= > 1 . 生成函数 :
(clojure.pprint/pprint (macroexpand-1 '(wx-fun-dev mini-pro checkSession)))
= > 2 . : ( wx - login : success ( fn [ res ] res ) : fail ( fn [ res ] 111 ) )
---- 生产release
(clojure.pprint/pprint (macroexpand-1 '(wx-fun checkSession)))
(call-promise-1
(fn [res] (prn "----" res))
(wx-check-session :success (fn [res] res)))
)
(defmacro wx-fun-dev [mini-pro fname]
`(defn ~(symbol (str "wx-" (jsname->clj (str fname))))
[& args#]
(.callWxMethod ~mini-pro
~(str fname)
(apply hash-map args#))))
(defmacro wx-fun [fname]
`(defn ~(symbol (str "wx-" (jsname->clj (str fname))))
[& args#]
(~(symbol (str "." fname)) js/wx
(apply hash-map args#))))
(comment
(clojure.pprint/pprint
(macroexpand-1 '(defn-js test-fun [:url :method :data :header]
(do 1 2) (let [x 1] x) )))
test in cljs :
( test - fun # js { : url 111 : method 222 } )
= > [ 111 222 ]
(defn-js test-fun [:url :method :data :header]
[url method]))
(defmacro defn-js
"生成js导出需要的cljs函数: 用宏来包装副作用和领域特殊和不干净的东西,其他都可以按照lisp的风格来设计"
[fun-name fun-args export-var & body]
`(do (defn ~fun-name [^js options#]
(let [{:keys [~@(map symbol fun-args)]}
(into {}
(for [k# (.keys js/Object options#)]
[(keyword k#) (aget options# k#)]))]
~@body))
(set! (~(symbol (str ".-" (clj->jsname (str fun-name))))
~export-var) ~fun-name)))
(comment
(clojure.pprint/pprint
(macroexpand-1
'(evaluate-args (fn [arg1 arg2] (js/console.log arg1) )
"aaaa" "bbbb")))
in js_wx file :
(evaluate-args @mini-program
= > 打印出来了 " aaaa , "
"aaaa" "bbbb"))
(defmacro evaluate-args
"万能的eval: 再难难不倒强大的宏"
[mini-program code-fn & args]
`(call-promise-1
(fn [res#] (prn "eval code args: " res#))
(.evaluate ~mini-program ~@(cons code-fn args))))
(comment
(clojure.pprint/pprint
(macroexpand-1
'(c-log @mini-program "aaaa" "bbb" "cccc" "dddd"))))
(defmacro c-log
[mini-program & args]
(let [bindings (vec
(map-indexed
(fn [k v]
(symbol (str "arg" k)))
args))]
`(evaluate-args ~mini-program
(fn ~bindings
(js/console.log ~@bindings))
~@args)))
|
b5f21272df2a3a15304401448110fc7889e8868de5c391477d5230e128416368 | 2600hz/kazoo | fax_app.erl | %%%-----------------------------------------------------------------------------
( C ) 2012 - 2020 , 2600Hz
%%% @doc
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(fax_app).
-behaviour(application).
-include_lib("kazoo_stdlib/include/kz_types.hrl").
-export([start/2, stop/1]).
-export([register_views/0]).
%%------------------------------------------------------------------------------
%% @doc Implement the application start behaviour.
%% @end
%%------------------------------------------------------------------------------
-spec start(application:start_type(), any()) -> kz_types:startapp_ret().
start(_Type, _Args) ->
register_views(),
_ = kapps_maintenance:bind('migrate', 'fax_maintenance', 'migrate'),
_ = kapps_maintenance:bind_and_register_views('fax', 'fax_app', 'register_views'),
fax_sup:start_link().
%%------------------------------------------------------------------------------
%% @doc Implement the application stop behaviour.
%% @end
%%------------------------------------------------------------------------------
-spec stop(any()) -> any().
stop(_State) ->
_ = kapps_maintenance:unbind('migrate', 'fax_maintenance', 'migrate'),
_ = kapps_maintenance:unbind('register_views', 'fax_app', 'register_views'),
_ = cowboy:stop_listener('fax_file'),
_ = fax_ra:stop(),
'ok'.
-spec register_views() -> 'ok'.
register_views() ->
kz_datamgr:register_views_from_folder('fax'),
'ok'.
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/applications/fax/src/fax_app.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Implement the application start behaviour.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Implement the application stop behaviour.
@end
------------------------------------------------------------------------------ | ( C ) 2012 - 2020 , 2600Hz
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(fax_app).
-behaviour(application).
-include_lib("kazoo_stdlib/include/kz_types.hrl").
-export([start/2, stop/1]).
-export([register_views/0]).
-spec start(application:start_type(), any()) -> kz_types:startapp_ret().
start(_Type, _Args) ->
register_views(),
_ = kapps_maintenance:bind('migrate', 'fax_maintenance', 'migrate'),
_ = kapps_maintenance:bind_and_register_views('fax', 'fax_app', 'register_views'),
fax_sup:start_link().
-spec stop(any()) -> any().
stop(_State) ->
_ = kapps_maintenance:unbind('migrate', 'fax_maintenance', 'migrate'),
_ = kapps_maintenance:unbind('register_views', 'fax_app', 'register_views'),
_ = cowboy:stop_listener('fax_file'),
_ = fax_ra:stop(),
'ok'.
-spec register_views() -> 'ok'.
register_views() ->
kz_datamgr:register_views_from_folder('fax'),
'ok'.
|
3df838b5b0bbc5f1f04a108feb571b48bff1381a9f503ea1791edcec5b309092 | camlspotter/ocamloscope.2 | packpath.ml | open Spotlib.Spot
open Opamfind.Utils
open List
open Opamfind
let make_from_names names =
let names = unique & sort compare names in
let best_name = Ocamlfind.choose_best_package_name names in
"{"
^ String.concat "," (best_name :: filter (fun x -> x <> best_name) names)
^ "}"
let make aps =
let names = map Ocamlfind.Analyzed.name aps in
make_from_names names
let parse = function
| "" -> None
| s when String.(unsafe_get s 0 = '{' && unsafe_get s (length s - 1) = '}') ->
Some (String.(split (function ',' -> true | _ -> false)
& sub s 1 (length s - 2)))
| _ -> None
| null | https://raw.githubusercontent.com/camlspotter/ocamloscope.2/49b5977a283cdd373021d41cb3620222351a2efe/packpath.ml | ocaml | open Spotlib.Spot
open Opamfind.Utils
open List
open Opamfind
let make_from_names names =
let names = unique & sort compare names in
let best_name = Ocamlfind.choose_best_package_name names in
"{"
^ String.concat "," (best_name :: filter (fun x -> x <> best_name) names)
^ "}"
let make aps =
let names = map Ocamlfind.Analyzed.name aps in
make_from_names names
let parse = function
| "" -> None
| s when String.(unsafe_get s 0 = '{' && unsafe_get s (length s - 1) = '}') ->
Some (String.(split (function ',' -> true | _ -> false)
& sub s 1 (length s - 2)))
| _ -> None
|
|
29ec6c6c5aea227bf0d500df1ad36ffac6b28460585b6a41a7d25fb1264a36e3 | haskell-works/hw-prim | Null.hs | {-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
module HaskellWorks.Data.Null
( Null(..)
) where
import Data.Int
import Data.Word
import HaskellWorks.Data.Container
import qualified Data.ByteString as BS
import qualified Data.List as L
import qualified Data.Vector as DV
import qualified Data.Vector.Storable as DVS
class Container a => Null a where
null :: a -> Bool
instance Null [a] where
null = L.null
# INLINE null #
instance Null BS.ByteString where
null = BS.null
# INLINE null #
instance Null (DV.Vector Word8) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Word16) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Word32) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Word64) where
null = DV.null
# INLINE null #
instance Null (DVS.Vector Word8) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Word16) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Word32) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Word64) where
null = DVS.null
# INLINE null #
instance Null (DV.Vector Int8) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Int16) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Int32) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Int64) where
null = DV.null
# INLINE null #
instance Null (DVS.Vector Int8) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Int16) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Int32) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Int64) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Int) where
null = DVS.null
# INLINE null #
| null | https://raw.githubusercontent.com/haskell-works/hw-prim/aff74834cd2d3fb0eb4994b24b2d1cdef1e3e673/src/HaskellWorks/Data/Null.hs | haskell | # LANGUAGE FlexibleContexts # | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
module HaskellWorks.Data.Null
( Null(..)
) where
import Data.Int
import Data.Word
import HaskellWorks.Data.Container
import qualified Data.ByteString as BS
import qualified Data.List as L
import qualified Data.Vector as DV
import qualified Data.Vector.Storable as DVS
class Container a => Null a where
null :: a -> Bool
instance Null [a] where
null = L.null
# INLINE null #
instance Null BS.ByteString where
null = BS.null
# INLINE null #
instance Null (DV.Vector Word8) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Word16) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Word32) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Word64) where
null = DV.null
# INLINE null #
instance Null (DVS.Vector Word8) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Word16) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Word32) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Word64) where
null = DVS.null
# INLINE null #
instance Null (DV.Vector Int8) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Int16) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Int32) where
null = DV.null
# INLINE null #
instance Null (DV.Vector Int64) where
null = DV.null
# INLINE null #
instance Null (DVS.Vector Int8) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Int16) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Int32) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Int64) where
null = DVS.null
# INLINE null #
instance Null (DVS.Vector Int) where
null = DVS.null
# INLINE null #
|
98f3f044aae6a811f55b63781850588d033de3107666e81f29af383a0cae4440 | bluddy/rails | pani.ml | open Containers
PANI format ( format flag PANI )
ofs | datatype | description
-----+----------+------------
0x0 | PANI |
0x4 | pani_byte1 | Always 3
0x5 | pani_byte2
0x6 | pani_byte3 | 0 : skip next part 1 : do n't skip
0x6 | header_type | 0 : 17 byte header . 1 : no header . 2 : 774 byte header
No header :
0x7 | 9 words | PANI struct
0x18 | byte | If 0 , nothing . If 2 , set ega params . If 1 , image
read 250 words , look for non - zero
word : number of 16 byte blocks
0x24 | 16 bits | Format flag .
0x26 | 16 bits | Width , always 320
0x28 | 16 bits | Height , always 200
0x2A | byte | dictionary bit width , always 0x0B
0x2B | LZW data | image data in LZW+RLE compressed format
Layout
------
First image
Gap of 500 bytes ( with some data )
- 0s except for a short for each image !
- Increasing numbers
then next images
Data collected
--------------
~1041 : 14 images
WRECKM : 3664 105 images , 148 times 0500
FOLLOWED.PAN 1680 : 29 images , 42 times 0500
FLOODM.PAN 88 images , 172 times 0500
WOOD2 52 images
HQ 14 images : 0 - E , E1c start : 0 , 1 , 2 , 5 , 6 , 7 , 8 , b , c , d , e , f , 10 , 11
CAPTURED : 0 : bckgrnd , 1 : bcgrnd2 5,6,7,8,9 : over same spot ( captured )
- possibly mapped to 2 , 4 , 6 , 8 , 9 , C , E , 10 , 12
Program section
---------------
* ends with 0a
CAPTRED
- Commands separated by 0500
- 2300 : animation , long
IRONM : 42sec
WOOD2 : 18sec
PANI format (format flag PANI)
ofs | datatype | description
-----+----------+------------
0x0 | PANI |
0x4 | pani_byte1 | Always 3
0x5 | pani_byte2
0x6 | pani_byte3 | 0: skip next part 1: don't skip
0x6 | header_type | 0: 17 byte header. 1: no header. 2: 774 byte header
No header:
0x7 | 9 words | PANI struct
0x18 | byte | If 0, nothing. If 2, set ega params. If 1, image
read 250 words, look for non-zero
word: number of 16 byte blocks
0x24 | 16 bits | Format flag.
0x26 | 16 bits | Width, always 320
0x28 | 16 bits | Height, always 200
0x2A | byte | Max LZW dictionary bit width, always 0x0B
0x2B | LZW data | image data in LZW+RLE compressed format
Layout
------
First image
Gap of 500 bytes (with some data)
- 0s except for a short for each image!
- Increasing numbers
then next images
Data collected
--------------
~1041: 14 images
WRECKM: 3664 105 images, 148 times 0500
FOLLOWED.PAN 1680: 29 images, 42 times 0500
FLOODM.PAN 88 images, 172 times 0500
WOOD2 52 images
HQ 14 images: 0-E, E1c start: 0, 1, 2, 5, 6, 7, 8, b, c, d, e, f, 10, 11
CAPTURED: 0:bckgrnd, 1:bcgrnd2 5,6,7,8,9: over same spot (captured)
- possibly mapped to 2, 4, 6, 8, 9, C, E, 10, 12
Program section
---------------
* ends with 0a
CAPTRED
- Commands separated by 0500
- 2300: animation, long
IRONM: 42sec
WOOD2: 18sec
*)
let of_stream ?(dump_files=None) s =
let pani = Gen.take 4 s |> My_gen.to_stringi in
if String.(pani = "PANI")
then ()
else failwith "Not a PANI file";
let _pani_byte1 = My_gen.get_bytei s in
let pani_lzw_encoded = My_gen.get_bytei s in
let pani_byte3 = My_gen.get_bytei s in
Printf.printf "lzw_encoded: 0x%x\nbyte3: 0x%x\n" pani_lzw_encoded pani_byte3; (* debug *)
let header_type = My_gen.get_bytei s in
Printf.printf "header_type: 0x%x\n" header_type; (* debug *)
let _subheader =
match header_type with
| 0 -> Gen.take 17 s |> My_gen.to_stringi
| 1 -> ""
| 2 -> Gen.take 774 s |> My_gen.to_stringi
| n -> failwith @@ Printf.sprintf "Bad header_type %d" n
in
Actually 9 words
Vector.push pani_struct (My_gen.get_wordi s);
Vector.push pani_struct (My_gen.get_wordi s);
Vector.push pani_struct (My_gen.get_wordi s);
Vector.push pani_struct (My_gen.get_wordi s);
let _pani_word = My_gen.get_wordi s in
(* pani_read_buffer_2 *)
let pani_type = My_gen.get_bytei s in
Printf.printf "pani_type: 0x%x\n" pani_type; (* debug *)
let pani_pics = Array.make 251 None in
let pic_bgnd =
match pani_type with
| 0 -> None
| 1 ->
(* let byte = My_gen.get_bytei s in (* optional *)
Printf.printf "byte: 0x%x pos: 0x%x\n" byte (My_gen.pos ()); *)
Printf.printf "Loading background image\n";
let ndarray = Pic.ndarray_of_stream s in
let pic_bgnd = Some(Pic.img_of_ndarray ndarray) in
begin match dump_files with
| Some filepath ->
Pic.png_of_ndarray ndarray ~filename:(Printf.sprintf "%s_bgnd.png" filepath)
| None -> ()
end;
pic_bgnd
| 2 -> None
| _ -> failwith "Unknown value for pani_type"
in
let align_pos () =
let pos = My_gen.pos () + 1 in
if pos land 1 = 1 then (
print_endline "junking odd position";
My_gen.junki s
)
in
HACK ( not in source ) to adjust if we 're not word aligned
align_pos ();
Support up to 250 images , lined up towards end , zeros before then
let pani_pic_ptrs = Array.make 250 0 in
Printf.printf "Post-Background pos: 0x%x\n" (My_gen.pos () + 1);
for i=0 to 249 do
let word = My_gen.get_wordi s in
Printf.printf " % d : 0x%x\n " i word ;
pani_pic_ptrs.(i) <- word
done;
let num = Array.fold (fun acc x -> if x = 0 then acc else acc + 1) 0 pani_pic_ptrs in
Printf.printf "%d pictures expected\n" num;
Array.iteri (fun i x ->
match x with
| 0 -> ()
| _ ->
let pos = My_gen.pos () + 1 in
Printf.printf "pos: 0x%x\n" pos;
(* We can only start at word boundaries *)
align_pos ();
Printf.printf "Load pic. Idx: %d. Pos: 0x%x.\n" i (My_gen.pos () + 1);
let ndarray = Pic.ndarray_of_stream s in
pani_pics.(i) <- Some(Pic.img_of_ndarray ndarray);
match dump_files with
| Some filepath ->
Pic.png_of_ndarray ndarray ~filename:(Printf.sprintf "%s_%d.png" filepath i)
| None -> ()
)
pani_pic_ptrs;
(* Animation interpreter code *)
align_pos ();
let pos = My_gen.pos () + 1 in
let size_ending = My_gen.get_wordi s in
Printf.printf "0x%x: %d 16-byte entries\n" pos size_ending;
(* fill with words for now *)
let pani_arr = Array.make ( size_ending * 8) 0 in
for i=0 to size_ending * 8 - 1 do
pani_arr.(i ) < - My_gen.get_wordi s ;
Printf.printf " : 0x%x\n " ( My_gen.pos ( ) ) pani_arr.(i ) ;
done
let pani_arr = Array.make (size_ending * 8) 0 in
for i=0 to size_ending * 8 - 1 do
pani_arr.(i) <- My_gen.get_wordi s;
Printf.printf "0x%x: 0x%x\n" (My_gen.pos ()) pani_arr.(i);
done
*)
let pani_code_s = My_gen.to_stringi s |> Bytes.of_string in
begin match dump_files with
| Some filepath ->
let out_file = Printf.sprintf "%s_code.txt" filepath in
let f = open_out out_file in
output_bytes f pani_code_s;
close_out f
| None -> ()
end;
let pani_v = Pani_interp.make pani_code_s pic_bgnd pani_pics in
pani_v
let stream_of_file filename =
let str =
IO.with_in filename @@
fun in_channel -> IO.read_all in_channel
in
let stream = My_gen.of_stringi str in
stream
let main filename =
Printf.printf "--- PANI dump: %s\n" filename;
let filepath = Filename.remove_extension filename in
let stream = stream_of_file filename in
let pani_v = of_stream stream ~dump_files:(Some filepath) in
Pani_interp.run_to_end pani_v
| null | https://raw.githubusercontent.com/bluddy/rails/b2a8dd700fdcbdf36984ba50eb148000c1f0b32f/bin/anim/pani.ml | ocaml | debug
debug
pani_read_buffer_2
debug
let byte = My_gen.get_bytei s in (* optional
We can only start at word boundaries
Animation interpreter code
fill with words for now | open Containers
PANI format ( format flag PANI )
ofs | datatype | description
-----+----------+------------
0x0 | PANI |
0x4 | pani_byte1 | Always 3
0x5 | pani_byte2
0x6 | pani_byte3 | 0 : skip next part 1 : do n't skip
0x6 | header_type | 0 : 17 byte header . 1 : no header . 2 : 774 byte header
No header :
0x7 | 9 words | PANI struct
0x18 | byte | If 0 , nothing . If 2 , set ega params . If 1 , image
read 250 words , look for non - zero
word : number of 16 byte blocks
0x24 | 16 bits | Format flag .
0x26 | 16 bits | Width , always 320
0x28 | 16 bits | Height , always 200
0x2A | byte | dictionary bit width , always 0x0B
0x2B | LZW data | image data in LZW+RLE compressed format
Layout
------
First image
Gap of 500 bytes ( with some data )
- 0s except for a short for each image !
- Increasing numbers
then next images
Data collected
--------------
~1041 : 14 images
WRECKM : 3664 105 images , 148 times 0500
FOLLOWED.PAN 1680 : 29 images , 42 times 0500
FLOODM.PAN 88 images , 172 times 0500
WOOD2 52 images
HQ 14 images : 0 - E , E1c start : 0 , 1 , 2 , 5 , 6 , 7 , 8 , b , c , d , e , f , 10 , 11
CAPTURED : 0 : bckgrnd , 1 : bcgrnd2 5,6,7,8,9 : over same spot ( captured )
- possibly mapped to 2 , 4 , 6 , 8 , 9 , C , E , 10 , 12
Program section
---------------
* ends with 0a
CAPTRED
- Commands separated by 0500
- 2300 : animation , long
IRONM : 42sec
WOOD2 : 18sec
PANI format (format flag PANI)
ofs | datatype | description
-----+----------+------------
0x0 | PANI |
0x4 | pani_byte1 | Always 3
0x5 | pani_byte2
0x6 | pani_byte3 | 0: skip next part 1: don't skip
0x6 | header_type | 0: 17 byte header. 1: no header. 2: 774 byte header
No header:
0x7 | 9 words | PANI struct
0x18 | byte | If 0, nothing. If 2, set ega params. If 1, image
read 250 words, look for non-zero
word: number of 16 byte blocks
0x24 | 16 bits | Format flag.
0x26 | 16 bits | Width, always 320
0x28 | 16 bits | Height, always 200
0x2A | byte | Max LZW dictionary bit width, always 0x0B
0x2B | LZW data | image data in LZW+RLE compressed format
Layout
------
First image
Gap of 500 bytes (with some data)
- 0s except for a short for each image!
- Increasing numbers
then next images
Data collected
--------------
~1041: 14 images
WRECKM: 3664 105 images, 148 times 0500
FOLLOWED.PAN 1680: 29 images, 42 times 0500
FLOODM.PAN 88 images, 172 times 0500
WOOD2 52 images
HQ 14 images: 0-E, E1c start: 0, 1, 2, 5, 6, 7, 8, b, c, d, e, f, 10, 11
CAPTURED: 0:bckgrnd, 1:bcgrnd2 5,6,7,8,9: over same spot (captured)
- possibly mapped to 2, 4, 6, 8, 9, C, E, 10, 12
Program section
---------------
* ends with 0a
CAPTRED
- Commands separated by 0500
- 2300: animation, long
IRONM: 42sec
WOOD2: 18sec
*)
let of_stream ?(dump_files=None) s =
let pani = Gen.take 4 s |> My_gen.to_stringi in
if String.(pani = "PANI")
then ()
else failwith "Not a PANI file";
let _pani_byte1 = My_gen.get_bytei s in
let pani_lzw_encoded = My_gen.get_bytei s in
let pani_byte3 = My_gen.get_bytei s in
let header_type = My_gen.get_bytei s in
let _subheader =
match header_type with
| 0 -> Gen.take 17 s |> My_gen.to_stringi
| 1 -> ""
| 2 -> Gen.take 774 s |> My_gen.to_stringi
| n -> failwith @@ Printf.sprintf "Bad header_type %d" n
in
Actually 9 words
Vector.push pani_struct (My_gen.get_wordi s);
Vector.push pani_struct (My_gen.get_wordi s);
Vector.push pani_struct (My_gen.get_wordi s);
Vector.push pani_struct (My_gen.get_wordi s);
let _pani_word = My_gen.get_wordi s in
let pani_type = My_gen.get_bytei s in
let pani_pics = Array.make 251 None in
let pic_bgnd =
match pani_type with
| 0 -> None
| 1 ->
Printf.printf "byte: 0x%x pos: 0x%x\n" byte (My_gen.pos ()); *)
Printf.printf "Loading background image\n";
let ndarray = Pic.ndarray_of_stream s in
let pic_bgnd = Some(Pic.img_of_ndarray ndarray) in
begin match dump_files with
| Some filepath ->
Pic.png_of_ndarray ndarray ~filename:(Printf.sprintf "%s_bgnd.png" filepath)
| None -> ()
end;
pic_bgnd
| 2 -> None
| _ -> failwith "Unknown value for pani_type"
in
let align_pos () =
let pos = My_gen.pos () + 1 in
if pos land 1 = 1 then (
print_endline "junking odd position";
My_gen.junki s
)
in
HACK ( not in source ) to adjust if we 're not word aligned
align_pos ();
Support up to 250 images , lined up towards end , zeros before then
let pani_pic_ptrs = Array.make 250 0 in
Printf.printf "Post-Background pos: 0x%x\n" (My_gen.pos () + 1);
for i=0 to 249 do
let word = My_gen.get_wordi s in
Printf.printf " % d : 0x%x\n " i word ;
pani_pic_ptrs.(i) <- word
done;
let num = Array.fold (fun acc x -> if x = 0 then acc else acc + 1) 0 pani_pic_ptrs in
Printf.printf "%d pictures expected\n" num;
Array.iteri (fun i x ->
match x with
| 0 -> ()
| _ ->
let pos = My_gen.pos () + 1 in
Printf.printf "pos: 0x%x\n" pos;
align_pos ();
Printf.printf "Load pic. Idx: %d. Pos: 0x%x.\n" i (My_gen.pos () + 1);
let ndarray = Pic.ndarray_of_stream s in
pani_pics.(i) <- Some(Pic.img_of_ndarray ndarray);
match dump_files with
| Some filepath ->
Pic.png_of_ndarray ndarray ~filename:(Printf.sprintf "%s_%d.png" filepath i)
| None -> ()
)
pani_pic_ptrs;
align_pos ();
let pos = My_gen.pos () + 1 in
let size_ending = My_gen.get_wordi s in
Printf.printf "0x%x: %d 16-byte entries\n" pos size_ending;
let pani_arr = Array.make ( size_ending * 8) 0 in
for i=0 to size_ending * 8 - 1 do
pani_arr.(i ) < - My_gen.get_wordi s ;
Printf.printf " : 0x%x\n " ( My_gen.pos ( ) ) pani_arr.(i ) ;
done
let pani_arr = Array.make (size_ending * 8) 0 in
for i=0 to size_ending * 8 - 1 do
pani_arr.(i) <- My_gen.get_wordi s;
Printf.printf "0x%x: 0x%x\n" (My_gen.pos ()) pani_arr.(i);
done
*)
let pani_code_s = My_gen.to_stringi s |> Bytes.of_string in
begin match dump_files with
| Some filepath ->
let out_file = Printf.sprintf "%s_code.txt" filepath in
let f = open_out out_file in
output_bytes f pani_code_s;
close_out f
| None -> ()
end;
let pani_v = Pani_interp.make pani_code_s pic_bgnd pani_pics in
pani_v
let stream_of_file filename =
let str =
IO.with_in filename @@
fun in_channel -> IO.read_all in_channel
in
let stream = My_gen.of_stringi str in
stream
let main filename =
Printf.printf "--- PANI dump: %s\n" filename;
let filepath = Filename.remove_extension filename in
let stream = stream_of_file filename in
let pani_v = of_stream stream ~dump_files:(Some filepath) in
Pani_interp.run_to_end pani_v
|
9ffe7f9841ad50aa8d6c74a15d31ca987d2e620ba3e0a48f17d919b4c5571242 | SAP-archive/bosh-kubernetes-cpi-release | ConfigSpec.hs | # LANGUAGE FlexibleInstances #
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE QuasiQuotes #-}
# LANGUAGE ScopedTypeVariables #
module CPI.Kubernetes.ConfigSpec(spec) where
import CPI.Base.System
import CPI.Kubernetes.Config
import Data.Aeson.QQ
import Test.Hspec
import Text.RawString.QQ
import Data.Aeson
import Data.ByteString.Lazy (fromStrict, toStrict)
import Control.Exception.Safe
import Control.Monad.State
import Control.Effect.Stub
import Control.Effect.Stub.Environment (HasEnvironment(..))
import Control.Effect.Stub.FileSystem
import Data.ByteString (ByteString)
import Data.HashMap.Strict (HashMap, (!))
import qualified Data.HashMap.Strict as HashMap
import Data.Text (Text)
import qualified Servant.Common.BaseUrl as Url
runStubT' :: () -> SystemState -> StubT () () SystemState IO () -> IO ((), SystemState, ())
runStubT' = runStubT
spec :: Spec
spec =
describe "parseConfig" $ do
context "given an explicit access" $ do
let rawConfig :: (ToJSON a, ?creds :: a) => ByteString
rawConfig = rawConfig' ?creds
rawConfig' creds = toStrict $ encode [aesonQQ|
{
"access": {
"server": ":4443",
"namespace": "default",
"credentials": #{creds}
},
"agent": {}
}
|]
let ?creds = [aesonQQ|{"token": "xxxxx-xxxxx-xxxxx-xxxxx"}|]
it "should parse namespace" $ do
config <- parseConfig rawConfig
let namespace' = namespace $ clusterAccess config
namespace' `shouldBe` "default"
it "should parse server" $ do
config <- parseConfig rawConfig
let server' = server $ clusterAccess config
expectedServer <- Url.parseBaseUrl ":4443"
server' `shouldBe` expectedServer
context "with valid client certs" $ do
let ?creds = [aesonQQ|
{
"certificate": "certificate",
"private_key": #{privateKey}
}
|]
it "should parse credentials" $ do
config <- parseConfig rawConfig
ClientCertificate credentials' <- credentials $ clusterAccess config
let (certChain, privateKey) = credentials'
1 `shouldBe` 1
context "with a token" $ do
let ?creds = [aesonQQ| { "token": "xxxxx-xxxxx-xxxxx-xxxxx" } |]
it "should parse credentials" $ do
config <- parseConfig rawConfig
Token credentials <- credentials $ clusterAccess config
credentials `shouldBe` "xxxxx-xxxxx-xxxxx-xxxxx"
context "given access of type ServiceAccount" $ do
let rawConfig = toStrict $ encode [aesonQQ|
{
"access": {
"server" : "",
"namespace" : "my-namespace",
"credentials" : "ServiceAccount"
},
"agent": {}
}
|]
let systemState = emptySystemState {
fileSystem = HashMap.fromList [
("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "default")
, ("/var/run/secrets/kubernetes.io/serviceaccount/token", "xxxxx-xxxxx-xxxxx-xxxxx")]
, environment = HashMap.singleton "KUBERNETES_SERVICE_HOST" ""
}
it "should read namespace from configuration" $ do
void $ runStubT' () systemState $ do
config <- parseConfig rawConfig
let namespace' = namespace $ clusterAccess config
lift $ namespace' `shouldBe` "my-namespace"
it "should use read server url from configuration" $ do
void $ runStubT' () systemState $ do
config <- parseConfig rawConfig
let server' = server $ clusterAccess config
expectedServer <- Url.parseBaseUrl ""
lift $ server' `shouldBe` expectedServer
it "should read token from service account" $ do
void $ runStubT' () systemState $ do
config <- parseConfig rawConfig
Token credentials <- credentials $ clusterAccess config
lift $ credentials `shouldBe` "xxxxx-xxxxx-xxxxx-xxxxx"
instance HasFiles SystemState where
asFiles = fileSystem
instance HasEnvironment SystemState where
asEnvironment = environment
data SystemState = SystemState {
fileSystem :: HashMap Text ByteString
, environment :: HashMap Text Text
}
emptySystemState = SystemState {
fileSystem = HashMap.empty
, environment = HashMap.empty
}
privateKey :: Text
privateKey = [r|
-----BEGIN RSA PRIVATE KEY-----
MIIEogIBAAKCAQEAu5sGJ7lKDtJBw5xjpJY5Nn5vg69k6vGOowsDzFRdP0+9JWNq
5Aee+fUoHgRyf0WUi13GH4cjs9sN9DUN1JeKufPkt0rD1w8EFOZxjPt6apRh2SYC
x+I0pz44tEg4OFNSBHO3F8gTLw7K8fiT/OuDEYcNgjew6jfBVVohbVCErfEDnB/G
n5+WxH5clLcqMaZfuC4mJA8tx49msvCADqeoz/DROeYMpA9+l8+4PFMX6RCM96Of
f9NpXsnbug221UzxHZIUEV97Mr/l0Y7rsxwUqaB8pNpJ648+qei5VVa66oMeC8b7
0hZcBcgspiB6FHSK0q9UhD0oSrfxYJOVxM3gqQIDAQABAoIBABgOpRdq90g3Rh+j
alOsv+FxDTPBxhsqprPZsb7+Aocf3o1w1kAvif9bpK1UvKn9bjMA72sTlUx3Bq8O
LpvYYv29fNLUT5DAaDGV63G8vdH0/ScvbKPdKgtYO0VDDZKLfLT9cbkm+u7J4tRs
n+2K9d/FhcHxCkq+o5giWq796EW1razrypIycYKzW/wmbxtI61zte0799eUhjZFg
YK+YYgo08YNyPsyuD/D8KTxa1uoS85VTi5INSW7u+hqbRZAm/BrGCvrZUQ7+piwR
Ep+kIBJnbD1nBuJcyX1QJI1N2i35aUDdSAqM72dQH1OMWBi7daNNUchGC3/E+ZSN
3eoCnkECgYEAwRu/6SFeoumO6ay365gd2PhInXI82bBwXyoNhvBXpaj654ROQfg0
Iu2p830ZaTjSv2xGC7tUmZU1ttQQaymLgUtYcSN86D6/IKCFQs9/zUZ22ZwV1hwq
Jj23wIGkqH4O3QXhduH8YLLiU0O3OjzHbIlvnn18x8oHQwFsVeN+8iUCgYEA+LR6
7j9ZkxL1Vmi3IkdoZmdo/9im7IxaxiK4Q5FNpb0bVBHWmKAC8HfxjOzqYeWs4GwT
1SaOTUhonP21KDsnDJWaFezmuDqPqTS3VMsJGMPNcK+4QkxMPkDDnl9hbNbWuszq
uyoxURirZ3e+jYxEQgZjA3jtWGfRqRXwtXdJEzUCgYBAKIYUYL/ehJa00Guy3LFd
+u+1T9UjxlkvZPtlj8ivA3uJHA4cIOjBihDjEvc0XGq4qrKDB1ROSqK0AbUKxZzR
8kSKIm5Hg0FhB7P+xI4Dl5u5JQCkSGtAlVTNosUgLfGmQWPtaZu+TPChFWh08uiX
CPqKv8qLXnYXLwvdZV4x+QKBgH6IXE7gfjM8nwOibSIMkIohLKOWV37b/cb2nScL
QyUCrGe+V575MeWkMInRc4HxN15KvmBgqF+balYNImDgj4Jwjp9/EvdCHBsrTebf
Eba+z8P4MtfQN64ohx4JSujz+PW7EeW9lq+6zGHs407iwUuSMkfu+1pSH7JWDkxU
7yHNAoGAYZbfzOfkMX82XXHmLenv7ePuFXhjW3cUNNAiOKoQs5Mcv0LHxKJiOr74
GCEfLNbK8p+Gu5RbYnQnTvgcw3/nMcxULwB4rCTJqfQ7Il4z1E0hd7ZmXH37Ixy+
/RPv094xkEoN9uPaJJj0vQEYxkC50ZCzf4gN+qHyH9jdiQ3JoKw=
-----END RSA PRIVATE KEY-----
|]
| null | https://raw.githubusercontent.com/SAP-archive/bosh-kubernetes-cpi-release/3166a74e118e75bbdedb01cff72cbe52968eee62/src/bosh-kubernetes-cpi/test/unit/CPI/Kubernetes/ConfigSpec.hs | haskell | # LANGUAGE ImplicitParams #
# LANGUAGE QuasiQuotes #
---BEGIN RSA PRIVATE KEY-----
---END RSA PRIVATE KEY----- | # LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
module CPI.Kubernetes.ConfigSpec(spec) where
import CPI.Base.System
import CPI.Kubernetes.Config
import Data.Aeson.QQ
import Test.Hspec
import Text.RawString.QQ
import Data.Aeson
import Data.ByteString.Lazy (fromStrict, toStrict)
import Control.Exception.Safe
import Control.Monad.State
import Control.Effect.Stub
import Control.Effect.Stub.Environment (HasEnvironment(..))
import Control.Effect.Stub.FileSystem
import Data.ByteString (ByteString)
import Data.HashMap.Strict (HashMap, (!))
import qualified Data.HashMap.Strict as HashMap
import Data.Text (Text)
import qualified Servant.Common.BaseUrl as Url
runStubT' :: () -> SystemState -> StubT () () SystemState IO () -> IO ((), SystemState, ())
runStubT' = runStubT
spec :: Spec
spec =
describe "parseConfig" $ do
context "given an explicit access" $ do
let rawConfig :: (ToJSON a, ?creds :: a) => ByteString
rawConfig = rawConfig' ?creds
rawConfig' creds = toStrict $ encode [aesonQQ|
{
"access": {
"server": ":4443",
"namespace": "default",
"credentials": #{creds}
},
"agent": {}
}
|]
let ?creds = [aesonQQ|{"token": "xxxxx-xxxxx-xxxxx-xxxxx"}|]
it "should parse namespace" $ do
config <- parseConfig rawConfig
let namespace' = namespace $ clusterAccess config
namespace' `shouldBe` "default"
it "should parse server" $ do
config <- parseConfig rawConfig
let server' = server $ clusterAccess config
expectedServer <- Url.parseBaseUrl ":4443"
server' `shouldBe` expectedServer
context "with valid client certs" $ do
let ?creds = [aesonQQ|
{
"certificate": "certificate",
"private_key": #{privateKey}
}
|]
it "should parse credentials" $ do
config <- parseConfig rawConfig
ClientCertificate credentials' <- credentials $ clusterAccess config
let (certChain, privateKey) = credentials'
1 `shouldBe` 1
context "with a token" $ do
let ?creds = [aesonQQ| { "token": "xxxxx-xxxxx-xxxxx-xxxxx" } |]
it "should parse credentials" $ do
config <- parseConfig rawConfig
Token credentials <- credentials $ clusterAccess config
credentials `shouldBe` "xxxxx-xxxxx-xxxxx-xxxxx"
context "given access of type ServiceAccount" $ do
let rawConfig = toStrict $ encode [aesonQQ|
{
"access": {
"server" : "",
"namespace" : "my-namespace",
"credentials" : "ServiceAccount"
},
"agent": {}
}
|]
let systemState = emptySystemState {
fileSystem = HashMap.fromList [
("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "default")
, ("/var/run/secrets/kubernetes.io/serviceaccount/token", "xxxxx-xxxxx-xxxxx-xxxxx")]
, environment = HashMap.singleton "KUBERNETES_SERVICE_HOST" ""
}
it "should read namespace from configuration" $ do
void $ runStubT' () systemState $ do
config <- parseConfig rawConfig
let namespace' = namespace $ clusterAccess config
lift $ namespace' `shouldBe` "my-namespace"
it "should use read server url from configuration" $ do
void $ runStubT' () systemState $ do
config <- parseConfig rawConfig
let server' = server $ clusterAccess config
expectedServer <- Url.parseBaseUrl ""
lift $ server' `shouldBe` expectedServer
it "should read token from service account" $ do
void $ runStubT' () systemState $ do
config <- parseConfig rawConfig
Token credentials <- credentials $ clusterAccess config
lift $ credentials `shouldBe` "xxxxx-xxxxx-xxxxx-xxxxx"
instance HasFiles SystemState where
asFiles = fileSystem
instance HasEnvironment SystemState where
asEnvironment = environment
data SystemState = SystemState {
fileSystem :: HashMap Text ByteString
, environment :: HashMap Text Text
}
emptySystemState = SystemState {
fileSystem = HashMap.empty
, environment = HashMap.empty
}
privateKey :: Text
privateKey = [r|
MIIEogIBAAKCAQEAu5sGJ7lKDtJBw5xjpJY5Nn5vg69k6vGOowsDzFRdP0+9JWNq
5Aee+fUoHgRyf0WUi13GH4cjs9sN9DUN1JeKufPkt0rD1w8EFOZxjPt6apRh2SYC
x+I0pz44tEg4OFNSBHO3F8gTLw7K8fiT/OuDEYcNgjew6jfBVVohbVCErfEDnB/G
n5+WxH5clLcqMaZfuC4mJA8tx49msvCADqeoz/DROeYMpA9+l8+4PFMX6RCM96Of
f9NpXsnbug221UzxHZIUEV97Mr/l0Y7rsxwUqaB8pNpJ648+qei5VVa66oMeC8b7
0hZcBcgspiB6FHSK0q9UhD0oSrfxYJOVxM3gqQIDAQABAoIBABgOpRdq90g3Rh+j
alOsv+FxDTPBxhsqprPZsb7+Aocf3o1w1kAvif9bpK1UvKn9bjMA72sTlUx3Bq8O
LpvYYv29fNLUT5DAaDGV63G8vdH0/ScvbKPdKgtYO0VDDZKLfLT9cbkm+u7J4tRs
n+2K9d/FhcHxCkq+o5giWq796EW1razrypIycYKzW/wmbxtI61zte0799eUhjZFg
YK+YYgo08YNyPsyuD/D8KTxa1uoS85VTi5INSW7u+hqbRZAm/BrGCvrZUQ7+piwR
Ep+kIBJnbD1nBuJcyX1QJI1N2i35aUDdSAqM72dQH1OMWBi7daNNUchGC3/E+ZSN
3eoCnkECgYEAwRu/6SFeoumO6ay365gd2PhInXI82bBwXyoNhvBXpaj654ROQfg0
Iu2p830ZaTjSv2xGC7tUmZU1ttQQaymLgUtYcSN86D6/IKCFQs9/zUZ22ZwV1hwq
Jj23wIGkqH4O3QXhduH8YLLiU0O3OjzHbIlvnn18x8oHQwFsVeN+8iUCgYEA+LR6
7j9ZkxL1Vmi3IkdoZmdo/9im7IxaxiK4Q5FNpb0bVBHWmKAC8HfxjOzqYeWs4GwT
1SaOTUhonP21KDsnDJWaFezmuDqPqTS3VMsJGMPNcK+4QkxMPkDDnl9hbNbWuszq
uyoxURirZ3e+jYxEQgZjA3jtWGfRqRXwtXdJEzUCgYBAKIYUYL/ehJa00Guy3LFd
+u+1T9UjxlkvZPtlj8ivA3uJHA4cIOjBihDjEvc0XGq4qrKDB1ROSqK0AbUKxZzR
8kSKIm5Hg0FhB7P+xI4Dl5u5JQCkSGtAlVTNosUgLfGmQWPtaZu+TPChFWh08uiX
CPqKv8qLXnYXLwvdZV4x+QKBgH6IXE7gfjM8nwOibSIMkIohLKOWV37b/cb2nScL
QyUCrGe+V575MeWkMInRc4HxN15KvmBgqF+balYNImDgj4Jwjp9/EvdCHBsrTebf
Eba+z8P4MtfQN64ohx4JSujz+PW7EeW9lq+6zGHs407iwUuSMkfu+1pSH7JWDkxU
7yHNAoGAYZbfzOfkMX82XXHmLenv7ePuFXhjW3cUNNAiOKoQs5Mcv0LHxKJiOr74
GCEfLNbK8p+Gu5RbYnQnTvgcw3/nMcxULwB4rCTJqfQ7Il4z1E0hd7ZmXH37Ixy+
/RPv094xkEoN9uPaJJj0vQEYxkC50ZCzf4gN+qHyH9jdiQ3JoKw=
|]
|
c70fa4e2987c694f38d049f83008390f18d605bb033b2971276c1434b5c67f3f | GaloisInc/daedalus | HTML.hs | {-# Language OverloadedStrings #-}
# Language RecordWildCards #
{-# Language BlockArguments #-}
module HTML where
import Prelude hiding (div)
import Data.List(intersperse)
import Data.Ratio(numerator,denominator)
import Types
specToHTML :: Spec -> HTML
specToHTML s = fieldsToHTML (sName s) (sFields s)
fieldsToHTML :: String -> [Field] -> HTML
fieldsToHTML nm fs =
tag "html" [] $ htmls
[ tag "head" [] $ tag' "link" [ ("rel","stylesheet"), ("href","style.css") ]
, tag "body" [] $ htmls $ div "heading" (htmlText nm) : map fieldToHTML2 fs
]
fieldToHTML2 :: Field -> HTML
fieldToHTML2 fld = div "field" $ htmls $
[ div "bounds" bounds
, div "key" (fieldPatHTML (fName fld))
, htmlText "::"
, typeToHTML (fType fld)
] ++
[ htmlText ", required" | fRequired fld ] ++
[ htmlText ", indirect" | fIndirect fld ] ++
[ htmlText ", value used" | fValNeeded fld ] ++
[ htmls [ htmlText ", default = ", exprToHTML x ]
| Just x <- [fDefaultValue fld] ]
where
bounds = htmls [ from, to ]
from = htmlText (show (fSince fld))
to = case fDeprecated fld of
Just x -> htmlText ("--" ++ show x)
Nothing -> htmlText ""
fieldPatHTML :: FieldPat -> HTML
fieldPatHTML fp =
case fp of
PArrayAny -> htmlText "[*]"
PArrayIx n -> htmlText ("[" ++ show n ++ "]")
PFieldName x -> htmlText x
primTyToHTML :: PrimType -> HTML
primTyToHTML ty =
case ty of
TInteger -> htmlText "Integer"
TNumber -> htmlText "Number"
TName -> htmlText "Name"
TBool -> htmlText "Bool"
TStringText -> htmlText "Text"
TStringByte -> htmlText "Bytes"
TStringAscii -> htmlText "ASCII"
TString -> htmlText "String"
TRectangle -> htmlText "Rectangle"
TDate -> htmlText "Date"
TNull -> htmlText "Null"
structTyToHTML :: StructType -> HTML
structTyToHTML ty =
case ty of
TArray -> htmlText "Array"
TStream -> htmlText "PDFStream"
TDictionary -> htmlText "Dictionary"
TNameTree -> htmlText "NameTree"
TNumberTree -> htmlText "NumberTree"
typeToHTML :: Type -> HTML
typeToHTML ty =
case ty of
TOr t1 t2 -> htmls [ typeToHTML t1, " or ", typeToHTML t2 ]
TPrim t mbc ->
case mbc of
Nothing -> primTyToHTML t
Just c -> htmls [ "(", primTyToHTML t, " | ", constraintToHTML c, ")" ]
TStruct t mb ->
case mb of
Nothing -> structTyToHTML t
Just l ->
htmls [ structTyToHTML t, htmlText " ",
tag "a" [ ("href", l ++ ".html") ] (htmlText l) ]
constraintToHTML :: Constraint -> HTML
constraintToHTML c =
case c of
Orc c1 c2 ->
htmls [ constraintToHTML c1, htmlText " or ", constraintToHTML c2 ]
Equals x -> htmls [ htmlText "= ", exprToHTML x ]
Interval x y -> htmls $ [ htmlText "["
, div "literal" (htmlText (show x))
, htmlText ".."
] ++
[ div "literal" (htmlText (show v))
| Just v <- [y] ] ++
[ htmlText "]" ]
IsGreaterThan e -> htmls [ htmlText "> ", exprToHTML e ]
IsLessThan e -> htmls [ htmlText "< ", exprToHTML e ]
fieldIxToHTML :: FieldIx -> HTML
fieldIxToHTML i =
case i of
ArrayIx n -> htmlText ("@" ++ show n)
FieldIx f -> htmlText ("@" ++ show f)
exprToHTML :: Expr -> HTML
exprToHTML e =
case e of
ValueOf i -> fieldIxToHTML i
ELit x -> lit x
ELitI x -> lit x
ELitR x -> div "literal"
$ htmls [ htmlText (show (numerator x))
, htmlText "/"
, htmlText (show (denominator x))
]
EBool x -> lit x
ELitStr x -> lit x
ELitName x -> lit ('/' : x)
EArr es -> htmls ( htmlText "["
: intersperse (htmlText ", ") (map exprToHTML es)
++ [ htmlText "]" ]
)
where
lit x = div "literal" $ htmlText $ show x
--------------------------------------------------------------------------------
type HTML = String
htmlText :: String -> HTML
htmlText = concatMap esc1
where
esc1 c = case c of
'&' -> "&"
'<' -> "<"
'>' -> ">"
'"' -> """
_ -> [c]
htmls :: [HTML] -> HTML
htmls = unlines
tag :: String -> [(String,String)] -> HTML -> HTML
tag t as b = tag' t as ++ b ++ concat [ "</", t, ">" ]
tag' :: String -> [(String,String)] -> HTML
tag' t as = concat [ "<", t, " ", unwords (map attr as), ">" ]
where attr (k,v) = k ++ "=" ++ show (htmlText v)
div :: String -> HTML -> HTML
div x = tag "div" [("class",x)]
| null | https://raw.githubusercontent.com/GaloisInc/daedalus/016da6b2de23747e48642f6ece79c07b436ef5d1/formats/pdf/dom-tool/src/HTML.hs | haskell | # Language OverloadedStrings #
# Language BlockArguments #
------------------------------------------------------------------------------ | # Language RecordWildCards #
module HTML where
import Prelude hiding (div)
import Data.List(intersperse)
import Data.Ratio(numerator,denominator)
import Types
specToHTML :: Spec -> HTML
specToHTML s = fieldsToHTML (sName s) (sFields s)
fieldsToHTML :: String -> [Field] -> HTML
fieldsToHTML nm fs =
tag "html" [] $ htmls
[ tag "head" [] $ tag' "link" [ ("rel","stylesheet"), ("href","style.css") ]
, tag "body" [] $ htmls $ div "heading" (htmlText nm) : map fieldToHTML2 fs
]
fieldToHTML2 :: Field -> HTML
fieldToHTML2 fld = div "field" $ htmls $
[ div "bounds" bounds
, div "key" (fieldPatHTML (fName fld))
, htmlText "::"
, typeToHTML (fType fld)
] ++
[ htmlText ", required" | fRequired fld ] ++
[ htmlText ", indirect" | fIndirect fld ] ++
[ htmlText ", value used" | fValNeeded fld ] ++
[ htmls [ htmlText ", default = ", exprToHTML x ]
| Just x <- [fDefaultValue fld] ]
where
bounds = htmls [ from, to ]
from = htmlText (show (fSince fld))
to = case fDeprecated fld of
Just x -> htmlText ("--" ++ show x)
Nothing -> htmlText ""
fieldPatHTML :: FieldPat -> HTML
fieldPatHTML fp =
case fp of
PArrayAny -> htmlText "[*]"
PArrayIx n -> htmlText ("[" ++ show n ++ "]")
PFieldName x -> htmlText x
primTyToHTML :: PrimType -> HTML
primTyToHTML ty =
case ty of
TInteger -> htmlText "Integer"
TNumber -> htmlText "Number"
TName -> htmlText "Name"
TBool -> htmlText "Bool"
TStringText -> htmlText "Text"
TStringByte -> htmlText "Bytes"
TStringAscii -> htmlText "ASCII"
TString -> htmlText "String"
TRectangle -> htmlText "Rectangle"
TDate -> htmlText "Date"
TNull -> htmlText "Null"
structTyToHTML :: StructType -> HTML
structTyToHTML ty =
case ty of
TArray -> htmlText "Array"
TStream -> htmlText "PDFStream"
TDictionary -> htmlText "Dictionary"
TNameTree -> htmlText "NameTree"
TNumberTree -> htmlText "NumberTree"
typeToHTML :: Type -> HTML
typeToHTML ty =
case ty of
TOr t1 t2 -> htmls [ typeToHTML t1, " or ", typeToHTML t2 ]
TPrim t mbc ->
case mbc of
Nothing -> primTyToHTML t
Just c -> htmls [ "(", primTyToHTML t, " | ", constraintToHTML c, ")" ]
TStruct t mb ->
case mb of
Nothing -> structTyToHTML t
Just l ->
htmls [ structTyToHTML t, htmlText " ",
tag "a" [ ("href", l ++ ".html") ] (htmlText l) ]
constraintToHTML :: Constraint -> HTML
constraintToHTML c =
case c of
Orc c1 c2 ->
htmls [ constraintToHTML c1, htmlText " or ", constraintToHTML c2 ]
Equals x -> htmls [ htmlText "= ", exprToHTML x ]
Interval x y -> htmls $ [ htmlText "["
, div "literal" (htmlText (show x))
, htmlText ".."
] ++
[ div "literal" (htmlText (show v))
| Just v <- [y] ] ++
[ htmlText "]" ]
IsGreaterThan e -> htmls [ htmlText "> ", exprToHTML e ]
IsLessThan e -> htmls [ htmlText "< ", exprToHTML e ]
fieldIxToHTML :: FieldIx -> HTML
fieldIxToHTML i =
case i of
ArrayIx n -> htmlText ("@" ++ show n)
FieldIx f -> htmlText ("@" ++ show f)
exprToHTML :: Expr -> HTML
exprToHTML e =
case e of
ValueOf i -> fieldIxToHTML i
ELit x -> lit x
ELitI x -> lit x
ELitR x -> div "literal"
$ htmls [ htmlText (show (numerator x))
, htmlText "/"
, htmlText (show (denominator x))
]
EBool x -> lit x
ELitStr x -> lit x
ELitName x -> lit ('/' : x)
EArr es -> htmls ( htmlText "["
: intersperse (htmlText ", ") (map exprToHTML es)
++ [ htmlText "]" ]
)
where
lit x = div "literal" $ htmlText $ show x
type HTML = String
htmlText :: String -> HTML
htmlText = concatMap esc1
where
esc1 c = case c of
'&' -> "&"
'<' -> "<"
'>' -> ">"
'"' -> """
_ -> [c]
htmls :: [HTML] -> HTML
htmls = unlines
tag :: String -> [(String,String)] -> HTML -> HTML
tag t as b = tag' t as ++ b ++ concat [ "</", t, ">" ]
tag' :: String -> [(String,String)] -> HTML
tag' t as = concat [ "<", t, " ", unwords (map attr as), ">" ]
where attr (k,v) = k ++ "=" ++ show (htmlText v)
div :: String -> HTML -> HTML
div x = tag "div" [("class",x)]
|
bc152eba29cc28c1b7834a198504f51d89a6fd5cd0ebe84f240e64793449c466 | hunt-framework/hunt | IndexerCore.hs | {-# LANGUAGE FlexibleContexts #-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
-- ------------------------------------------------------------
module Holumbus.Crawler.IndexerCore
( RawDoc
, RawContexts
, RawContext
, RawCrawlerDoc
, RawWords
, RawWord
, RawTitle
, IndexCrawlerConfig
, IndexContextConfig ( .. )
, IndexerState ( .. )
, emptyIndexerState
, indexCrawlerConfig
, stdIndexer
, unionIndexerStatesM
, insertRawDocM
)
( RawDoc
, RawContexts
, RawContext
, RawCrawlerDoc
, RawWords
, RawWord
, RawTitle
, IndexCrawlerConfig
, IndexContextConfig(..)
, IndexerState(..)
, emptyIndexerState
, indexCrawlerConfig
, stdIndexer
, unionIndexerStatesM
, insertRawDocM
)
-}
where
-- ------------------------------------------------------------
import Control.DeepSeq
import Data.Binary (Binary)
import qualified Data.Binary as B
import Data.Function.Selector
import Data.Maybe
import Holumbus.Crawler
import Prelude hiding (Word)
import Text.XML.HXT.Core
-- ------------------------------------------------------------
type RawDoc c = (RawContexts, RawTitle, Maybe c) -- c is the user defined custom info
type RawContexts = [RawContext]
type RawContext = (Context, RawWords)
type RawWords = [RawWord]
type RawWord = (Word, Position)
type RawTitle = String
type IndexCrawlerConfig i d c = CrawlerConfig (RawDoc c) (IndexerState i d c)
type IndexCrawlerState i d c = CrawlerState (IndexerState i d c)
data IndexContextConfig = IndexContextConfig
{ ixc_name :: String
, ixc_collectText :: IOSArrow XmlTree String
, ixc_textToWords :: String -> [String]
, ixc_boringWord :: String -> Bool
}
data IndexerState i d c = IndexerState
{ ixs_index :: ! i -- the index type
, ixs_documents :: ! (d c) -- the type for document descriptions
} deriving (Show)
-- ------------------------------------------------------------
--
-- conversion to JSON of a raw doc is a bit tricky
-- the title attribute must be merged into the custom object
-- to get all attributes together , so we need this hack with addPair
newtype RawCrawlerDoc c = RCD ( URI , RawDoc c )
instance ( ToJSON c ) = > ToJSON ( RawCrawlerDoc c ) where
toJSON ( RCD ( rawUri , ( rawContexts , rawTitle , rawCustom ) ) )
= object
[ " uri " .= rawUri
, ( " description " , addPair ( " title " , ) $ toJSON rawCustom )
, " index " .= object ( map )
]
toJSONRawContext : : RawContext - > Pair
toJSONRawContext ( cx , ws ) = T.pack cx .= toJSONRawWords ws
toJSONRawWords : : RawWords - > Value
toJSONRawWords = object . map pair . toWordPositions
where
pair ( w , ps ) = T.pack w .= ps
toWordPositions : : RawWords - > [ ( Word , [ Position ] ) ]
toWordPositions = M.toList . foldr ins M.empty
where
ins ( w , p ) = M.insertWith ( + + ) w [ p ]
addPair : : Pair - > Value - > Value
addPair ( k , v ) ( Object m ) = Object $ M.insert k v m
addPair p _ = object [ p ]
flushRawCrawlerDoc : : ( ToJSON c ) = > Bool - > ( LB.ByteString - > IO ( ) ) - > c - > IO ( )
flushRawCrawlerDoc pretty io d
= io $ ( if pretty then encodePretty ' else encode ) d
where
: : Config
= Config { confIndent = 2
, confCompare
= keyOrder [ " uri " , " description " , " index " ]
` mappend `
compare
}
--
--
-- conversion to JSON of a raw doc is a bit tricky
-- the title attribute must be merged into the custom object
-- to get all attributes together, so we need this hack with addPair
newtype RawCrawlerDoc c = RCD (URI, RawDoc c)
instance (ToJSON c) => ToJSON (RawCrawlerDoc c) where
toJSON (RCD (rawUri, (rawContexts, rawTitle, rawCustom)))
= object
[ "uri" .= rawUri
, ("description", addPair ("title", toJSON rawTitle) $ toJSON rawCustom)
, "index" .= object (map toJSONRawContext rawContexts)
]
toJSONRawContext :: RawContext -> Pair
toJSONRawContext (cx, ws) = T.pack cx .= toJSONRawWords ws
toJSONRawWords :: RawWords -> Value
toJSONRawWords = object . map pair . toWordPositions
where
pair (w, ps) = T.pack w .= ps
toWordPositions :: RawWords -> [(Word, [Position])]
toWordPositions = M.toList . foldr ins M.empty
where
ins (w, p) = M.insertWith (++) w [p]
addPair :: Pair -> Value -> Value
addPair (k, v) (Object m) = Object $ M.insert k v m
addPair p _ = object [p]
flushRawCrawlerDoc :: (ToJSON c) => Bool -> (LB.ByteString -> IO ()) -> c -> IO ()
flushRawCrawlerDoc pretty io d
= io $ (if pretty then encodePretty' encConfig else encode) d
where
encConfig :: Config
encConfig
= Config { confIndent = 2
, confCompare
= keyOrder ["uri", "description", "index"]
`mappend`
compare
}
-- -}
-- ------------------------------------------------------------
instance (NFData i, NFData (d c)) => NFData (IndexerState i d c)
where
rnf IndexerState { ixs_index = i
, ixs_documents = d
} = rnf i `seq` rnf d
-- ------------------------------------------------------------
instance (Binary i, Binary (d c)) => Binary (IndexerState i d c)
where
put s = B.put (ixs_index s)
>>
B.put (ixs_documents s)
get = do
ix <- B.get
dm <- B.get
return $ IndexerState
{ ixs_index = ix
, ixs_documents = dm
}
-- ------------------------------------------------------------
instance (XmlPickler i, XmlPickler (d c)) => XmlPickler (IndexerState i d c)
where
xpickle = xpElem "index-state" $
xpWrap ( uncurry IndexerState
, \ ix -> (ixs_index ix, ixs_documents ix)
) $
xpPair xpickle xpickle
-- ------------------------------------------------------------
emptyIndexerState :: i -> d c -> IndexerState i d c
emptyIndexerState eix edm = IndexerState
{ ixs_index = eix
, ixs_documents = edm
}
-- ------------------------------------------------------------
indexCrawlerConfig' :: AccumulateDocResult ([RawContext], String, Maybe c) (IndexerState i d c)
-> MergeDocResults (IndexerState i d c)
-> SysConfig -- ^ document read options
^ the filter for deciding , whether the URI shall be processed
^ the document href collection filter , default is ' Holumbus . Crawler . Html.getHtmlReferences '
-> Maybe (IOSArrow XmlTree XmlTree) -- ^ the pre document filter, default is the this arrow
-> Maybe (IOSArrow XmlTree String) -- ^ the filter for computing the document title, default is empty string
-> Maybe (IOSArrow XmlTree c) -- ^ the filter for the cutomized doc info, default Nothing
-> [IndexContextConfig] -- ^ the configuration of the various index parts
-> IndexCrawlerConfig i d c -- ^ result is a crawler config
indexCrawlerConfig' insertRaw unionIndex opts followRef getHrefF preDocF titleF0 customF0 contextCs
= addSysConfig (defaultOpts >>> opts) -- install the default read options
>>>
( setS theFollowRef followRef )
>>>
( setS theProcessRefs $ fromMaybe getHtmlReferences getHrefF )
>>>
( setS thePreDocFilter $ fromMaybe checkDocumentStatus preDocF ) -- in case of errors throw away any contents
>>>
( setS theProcessDoc rawDocF ) -- rawDocF is build up by the context config, text, title and custom
>>>
enableRobotsTxt -- add the robots stuff at the end
>>> -- the filter wrap the other filters
addRobotsNoFollow
>>>
addRobotsNoIndex
$
defaultCrawlerConfig insertRaw unionIndex
-- take the default crawler config
-- and set the result combining functions
where
rawDocF = ( listA contextFs
&&&
titleF
&&&
customF
)
>>^ (\ (x3, (x2, x1)) -> (x3, x2, x1))
titleF = ( fromMaybe (constA "") titleF0 ) >. concat
customF = ( fromMaybe none customF0 ) >. listToMaybe
contextFs :: IOSArrow XmlTree RawContext
contextFs = catA . map contextF $ contextCs -- collect all contexts
contextF :: IndexContextConfig -> IOSArrow XmlTree RawContext
contextF ixc = constA (ixc_name ixc) -- the name of the raw context
&&&
( ixc_collectText ixc >. processText ) -- the list of words and positions of the collected text
where -- this arrow is deterministic, it always delivers a single pair
processText :: [String] -> RawWords
processText = concat
>>>
ixc_textToWords ixc
>>>
flip zip [1..]
>>>
filter (fst >>> ixc_boringWord ixc >>> not)
defaultOpts = withRedirect yes
>>>
withAcceptedMimeTypes ["text/html", "text/xhtml"]
>>>
withInputEncoding isoLatin1
>>>
withEncodingErrors no -- encoding errors and parser warnings are boring
>>>
withValidate no
>>>
withParseHTML yes
>>>
withWarnings no
-- ------------------------------------------------------------
stdIndexer :: ( Binary i
, Binary (d c)
, Binary c
, NFData i
, NFData (d c)
, NFData c) =>
IndexCrawlerConfig i d c -- ^ adapt configuration to special needs,
-- use id if default is ok
-> Maybe String -- ^ resume from interrupted index run with state
-- stored in file
^ start indexing with this set of uris
-> IndexerState i d c -- ^ the initial empty indexer state
-> IO (Either String (IndexCrawlerState i d c)) -- ^ result is a state consisting of the index and the map of indexed documents
stdIndexer config resumeLoc startUris eis
= do res <- execCrawler action config (initCrawlerState eis)
either (\ e -> errC "indexerCore" ["indexer failed:", e])
(\ _ -> noticeC "indexerCore" ["indexer finished"])
res
return res
where
action = do
noticeC "indexerCore" ["indexer started"]
res <- maybe (crawlDocs startUris) crawlerResume $ resumeLoc
return res
-- ------------------------------------------------------------
| null | https://raw.githubusercontent.com/hunt-framework/hunt/d692aae756b7bdfb4c99f5a3951aec12893649a8/hunt-crawler/src/Holumbus/Crawler/IndexerCore.hs | haskell | # LANGUAGE FlexibleContexts #
------------------------------------------------------------
------------------------------------------------------------
------------------------------------------------------------
c is the user defined custom info
the index type
the type for document descriptions
------------------------------------------------------------
conversion to JSON of a raw doc is a bit tricky
the title attribute must be merged into the custom object
to get all attributes together , so we need this hack with addPair
conversion to JSON of a raw doc is a bit tricky
the title attribute must be merged into the custom object
to get all attributes together, so we need this hack with addPair
-}
------------------------------------------------------------
------------------------------------------------------------
------------------------------------------------------------
------------------------------------------------------------
------------------------------------------------------------
^ document read options
^ the pre document filter, default is the this arrow
^ the filter for computing the document title, default is empty string
^ the filter for the cutomized doc info, default Nothing
^ the configuration of the various index parts
^ result is a crawler config
install the default read options
in case of errors throw away any contents
rawDocF is build up by the context config, text, title and custom
add the robots stuff at the end
the filter wrap the other filters
take the default crawler config
and set the result combining functions
collect all contexts
the name of the raw context
the list of words and positions of the collected text
this arrow is deterministic, it always delivers a single pair
encoding errors and parser warnings are boring
------------------------------------------------------------
^ adapt configuration to special needs,
use id if default is ok
^ resume from interrupted index run with state
stored in file
^ the initial empty indexer state
^ result is a state consisting of the index and the map of indexed documents
------------------------------------------------------------ | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
module Holumbus.Crawler.IndexerCore
( RawDoc
, RawContexts
, RawContext
, RawCrawlerDoc
, RawWords
, RawWord
, RawTitle
, IndexCrawlerConfig
, IndexContextConfig ( .. )
, IndexerState ( .. )
, emptyIndexerState
, indexCrawlerConfig
, stdIndexer
, unionIndexerStatesM
, insertRawDocM
)
( RawDoc
, RawContexts
, RawContext
, RawCrawlerDoc
, RawWords
, RawWord
, RawTitle
, IndexCrawlerConfig
, IndexContextConfig(..)
, IndexerState(..)
, emptyIndexerState
, indexCrawlerConfig
, stdIndexer
, unionIndexerStatesM
, insertRawDocM
)
-}
where
import Control.DeepSeq
import Data.Binary (Binary)
import qualified Data.Binary as B
import Data.Function.Selector
import Data.Maybe
import Holumbus.Crawler
import Prelude hiding (Word)
import Text.XML.HXT.Core
type RawContexts = [RawContext]
type RawContext = (Context, RawWords)
type RawWords = [RawWord]
type RawWord = (Word, Position)
type RawTitle = String
type IndexCrawlerConfig i d c = CrawlerConfig (RawDoc c) (IndexerState i d c)
type IndexCrawlerState i d c = CrawlerState (IndexerState i d c)
data IndexContextConfig = IndexContextConfig
{ ixc_name :: String
, ixc_collectText :: IOSArrow XmlTree String
, ixc_textToWords :: String -> [String]
, ixc_boringWord :: String -> Bool
}
data IndexerState i d c = IndexerState
} deriving (Show)
newtype RawCrawlerDoc c = RCD ( URI , RawDoc c )
instance ( ToJSON c ) = > ToJSON ( RawCrawlerDoc c ) where
toJSON ( RCD ( rawUri , ( rawContexts , rawTitle , rawCustom ) ) )
= object
[ " uri " .= rawUri
, ( " description " , addPair ( " title " , ) $ toJSON rawCustom )
, " index " .= object ( map )
]
toJSONRawContext : : RawContext - > Pair
toJSONRawContext ( cx , ws ) = T.pack cx .= toJSONRawWords ws
toJSONRawWords : : RawWords - > Value
toJSONRawWords = object . map pair . toWordPositions
where
pair ( w , ps ) = T.pack w .= ps
toWordPositions : : RawWords - > [ ( Word , [ Position ] ) ]
toWordPositions = M.toList . foldr ins M.empty
where
ins ( w , p ) = M.insertWith ( + + ) w [ p ]
addPair : : Pair - > Value - > Value
addPair ( k , v ) ( Object m ) = Object $ M.insert k v m
addPair p _ = object [ p ]
flushRawCrawlerDoc : : ( ToJSON c ) = > Bool - > ( LB.ByteString - > IO ( ) ) - > c - > IO ( )
flushRawCrawlerDoc pretty io d
= io $ ( if pretty then encodePretty ' else encode ) d
where
: : Config
= Config { confIndent = 2
, confCompare
= keyOrder [ " uri " , " description " , " index " ]
` mappend `
compare
}
newtype RawCrawlerDoc c = RCD (URI, RawDoc c)
instance (ToJSON c) => ToJSON (RawCrawlerDoc c) where
toJSON (RCD (rawUri, (rawContexts, rawTitle, rawCustom)))
= object
[ "uri" .= rawUri
, ("description", addPair ("title", toJSON rawTitle) $ toJSON rawCustom)
, "index" .= object (map toJSONRawContext rawContexts)
]
toJSONRawContext :: RawContext -> Pair
toJSONRawContext (cx, ws) = T.pack cx .= toJSONRawWords ws
toJSONRawWords :: RawWords -> Value
toJSONRawWords = object . map pair . toWordPositions
where
pair (w, ps) = T.pack w .= ps
toWordPositions :: RawWords -> [(Word, [Position])]
toWordPositions = M.toList . foldr ins M.empty
where
ins (w, p) = M.insertWith (++) w [p]
addPair :: Pair -> Value -> Value
addPair (k, v) (Object m) = Object $ M.insert k v m
addPair p _ = object [p]
flushRawCrawlerDoc :: (ToJSON c) => Bool -> (LB.ByteString -> IO ()) -> c -> IO ()
flushRawCrawlerDoc pretty io d
= io $ (if pretty then encodePretty' encConfig else encode) d
where
encConfig :: Config
encConfig
= Config { confIndent = 2
, confCompare
= keyOrder ["uri", "description", "index"]
`mappend`
compare
}
instance (NFData i, NFData (d c)) => NFData (IndexerState i d c)
where
rnf IndexerState { ixs_index = i
, ixs_documents = d
} = rnf i `seq` rnf d
instance (Binary i, Binary (d c)) => Binary (IndexerState i d c)
where
put s = B.put (ixs_index s)
>>
B.put (ixs_documents s)
get = do
ix <- B.get
dm <- B.get
return $ IndexerState
{ ixs_index = ix
, ixs_documents = dm
}
instance (XmlPickler i, XmlPickler (d c)) => XmlPickler (IndexerState i d c)
where
xpickle = xpElem "index-state" $
xpWrap ( uncurry IndexerState
, \ ix -> (ixs_index ix, ixs_documents ix)
) $
xpPair xpickle xpickle
emptyIndexerState :: i -> d c -> IndexerState i d c
emptyIndexerState eix edm = IndexerState
{ ixs_index = eix
, ixs_documents = edm
}
indexCrawlerConfig' :: AccumulateDocResult ([RawContext], String, Maybe c) (IndexerState i d c)
-> MergeDocResults (IndexerState i d c)
^ the filter for deciding , whether the URI shall be processed
^ the document href collection filter , default is ' Holumbus . Crawler . Html.getHtmlReferences '
indexCrawlerConfig' insertRaw unionIndex opts followRef getHrefF preDocF titleF0 customF0 contextCs
>>>
( setS theFollowRef followRef )
>>>
( setS theProcessRefs $ fromMaybe getHtmlReferences getHrefF )
>>>
>>>
>>>
addRobotsNoFollow
>>>
addRobotsNoIndex
$
defaultCrawlerConfig insertRaw unionIndex
where
rawDocF = ( listA contextFs
&&&
titleF
&&&
customF
)
>>^ (\ (x3, (x2, x1)) -> (x3, x2, x1))
titleF = ( fromMaybe (constA "") titleF0 ) >. concat
customF = ( fromMaybe none customF0 ) >. listToMaybe
contextFs :: IOSArrow XmlTree RawContext
contextF :: IndexContextConfig -> IOSArrow XmlTree RawContext
&&&
processText :: [String] -> RawWords
processText = concat
>>>
ixc_textToWords ixc
>>>
flip zip [1..]
>>>
filter (fst >>> ixc_boringWord ixc >>> not)
defaultOpts = withRedirect yes
>>>
withAcceptedMimeTypes ["text/html", "text/xhtml"]
>>>
withInputEncoding isoLatin1
>>>
>>>
withValidate no
>>>
withParseHTML yes
>>>
withWarnings no
stdIndexer :: ( Binary i
, Binary (d c)
, Binary c
, NFData i
, NFData (d c)
, NFData c) =>
^ start indexing with this set of uris
stdIndexer config resumeLoc startUris eis
= do res <- execCrawler action config (initCrawlerState eis)
either (\ e -> errC "indexerCore" ["indexer failed:", e])
(\ _ -> noticeC "indexerCore" ["indexer finished"])
res
return res
where
action = do
noticeC "indexerCore" ["indexer started"]
res <- maybe (crawlDocs startUris) crawlerResume $ resumeLoc
return res
|
77fe75bc7b7edad0976e8f39150c57bfe907658b763cc3261498fc3a9d0b3896 | lillo/compiler-course-unipi | microcc.ml | open Microc
type action = Parse | Type_check | Dump_llvm_ir | Compile
let[@inline] ( >> ) f g x = g (f x)
let action_function outputfile optimize verify_module = function
| Parse ->
Parsing.parse Scanner.next_token
>> Ast.show_program
>> Printf.printf "Parsing succeded!\n\n%s\n"
| Type_check ->
Parsing.parse Scanner.next_token
>> Semantic_analysis.type_check >> Ast.show_program
>> Printf.printf "Type-check succeded!\n\n%s\n"
| Dump_llvm_ir ->
Parsing.parse Scanner.next_token
>> Semantic_analysis.type_check >> Codegen.to_llvm_module
>> (fun llmodule ->
if verify_module then Llvm_analysis.assert_valid_module llmodule;
llmodule)
>> (if optimize then Optimizer.optimize_module else Fun.id)
>> Llvm.dump_module
| Compile ->
(Parsing.parse Scanner.next_token
>> Semantic_analysis.type_check >> Codegen.to_llvm_module
>> (fun llmodule ->
if verify_module then Llvm_analysis.assert_valid_module llmodule;
Llvm_analysis.assert_valid_module llmodule;
llmodule)
>> if optimize then Optimizer.optimize_module else Fun.id)
>> fun llmodule ->
assert (Llvm_bitwriter.write_bitcode_file llmodule outputfile)
let handle_syntatic_error source lexeme_pos msg =
let lines = String.split_on_char '\n' source in
let line = List.nth lines (lexeme_pos.Location.line - 1) in
let prefix = String.make (lexeme_pos.Location.start_column - 1) ' ' in
let middle =
String.make
(lexeme_pos.Location.end_column - lexeme_pos.Location.start_column + 1)
'^'
in
Printf.eprintf "\n*** Error at line %d.\n%s\n%s%s\n*** %s\n\n"
lexeme_pos.Location.line line prefix middle msg
let handle_semantic_error source code_pos msg =
let lines =
String.split_on_char '\n' source
|> List.filteri (fun line _ ->
code_pos.Location.start_line - 1 <= line
&& line <= code_pos.Location.end_line - 1)
in
let length = List.length lines in
if length = 1 then
let line = List.hd lines in
let prefix = String.make (code_pos.Location.start_column - 1) ' ' in
let middle =
String.make
(code_pos.Location.end_column - code_pos.Location.start_column + 1)
'^'
in
Printf.eprintf "\n*** Error at line %d.\n%s\n%s%s\n*** %s\n\n"
code_pos.Location.start_line line prefix middle msg
else
let text = lines |> List.filteri (fun i _ -> i < 5) |> String.concat "\n" in
Printf.eprintf "\n*** Error at lines %d-%d.\n%s\n*** %s\n\n"
code_pos.Location.start_line
(code_pos.Location.start_line + 5)
text msg
let load_file filename =
let ic = open_in filename in
let n = in_channel_length ic in
let s = Bytes.create n in
really_input ic s 0 n;
close_in ic;
Bytes.to_string s
let () =
try
let action = ref Compile in
let filename = ref "" in
let outputfile = ref "a.bc" in
let optimize = ref false in
let verify = ref false in
let spec_list =
[
("-p",
Arg.Unit (fun () -> action := Parse), "Parse and print AST");
( "-t",
Arg.Unit (fun () -> action := Type_check),
"Type checks and print the result" );
( "-d",
Arg.Unit (fun () -> action := Dump_llvm_ir),
"Compile and print the generated LLVM IR" );
( "-c",
Arg.Unit (fun () -> action := Compile),
"Compile the source file (default)" );
( "-o",
Arg.Set_string outputfile,
"Place the output into file (default: a.bc)" );
( "-O",
Arg.Set optimize,
"Optimize the generated LLVM IR (default: false)" );
( "-verify",
Arg.Set verify,
"Verify the generated LLVM module (default: false)" );
]
in
let usage =
Printf.sprintf "Usage:\t%s [options] <source_file>\n" Sys.argv.(0)
in
Arg.parse spec_list (fun file -> filename := file) usage;
if String.equal !filename "" then Arg.usage spec_list usage
else
let source = load_file !filename in
let lexbuf = Lexing.from_string ~with_positions:true source in
try action_function !outputfile !optimize !verify !action lexbuf with
| Scanner.Lexing_error (pos, msg) | Parsing.Syntax_error (pos, msg) ->
handle_syntatic_error source pos msg
| Semantic_analysis.Semantic_error (pos, msg) ->
handle_semantic_error source pos msg
with Sys_error msg -> Printf.eprintf "*** Error %s ***\n" msg
| null | https://raw.githubusercontent.com/lillo/compiler-course-unipi/330349afbf72919f6ab0915c5f8f49507e9aca71/microc/microc-codegen/bin/microcc.ml | ocaml | open Microc
type action = Parse | Type_check | Dump_llvm_ir | Compile
let[@inline] ( >> ) f g x = g (f x)
let action_function outputfile optimize verify_module = function
| Parse ->
Parsing.parse Scanner.next_token
>> Ast.show_program
>> Printf.printf "Parsing succeded!\n\n%s\n"
| Type_check ->
Parsing.parse Scanner.next_token
>> Semantic_analysis.type_check >> Ast.show_program
>> Printf.printf "Type-check succeded!\n\n%s\n"
| Dump_llvm_ir ->
Parsing.parse Scanner.next_token
>> Semantic_analysis.type_check >> Codegen.to_llvm_module
>> (fun llmodule ->
if verify_module then Llvm_analysis.assert_valid_module llmodule;
llmodule)
>> (if optimize then Optimizer.optimize_module else Fun.id)
>> Llvm.dump_module
| Compile ->
(Parsing.parse Scanner.next_token
>> Semantic_analysis.type_check >> Codegen.to_llvm_module
>> (fun llmodule ->
if verify_module then Llvm_analysis.assert_valid_module llmodule;
Llvm_analysis.assert_valid_module llmodule;
llmodule)
>> if optimize then Optimizer.optimize_module else Fun.id)
>> fun llmodule ->
assert (Llvm_bitwriter.write_bitcode_file llmodule outputfile)
let handle_syntatic_error source lexeme_pos msg =
let lines = String.split_on_char '\n' source in
let line = List.nth lines (lexeme_pos.Location.line - 1) in
let prefix = String.make (lexeme_pos.Location.start_column - 1) ' ' in
let middle =
String.make
(lexeme_pos.Location.end_column - lexeme_pos.Location.start_column + 1)
'^'
in
Printf.eprintf "\n*** Error at line %d.\n%s\n%s%s\n*** %s\n\n"
lexeme_pos.Location.line line prefix middle msg
let handle_semantic_error source code_pos msg =
let lines =
String.split_on_char '\n' source
|> List.filteri (fun line _ ->
code_pos.Location.start_line - 1 <= line
&& line <= code_pos.Location.end_line - 1)
in
let length = List.length lines in
if length = 1 then
let line = List.hd lines in
let prefix = String.make (code_pos.Location.start_column - 1) ' ' in
let middle =
String.make
(code_pos.Location.end_column - code_pos.Location.start_column + 1)
'^'
in
Printf.eprintf "\n*** Error at line %d.\n%s\n%s%s\n*** %s\n\n"
code_pos.Location.start_line line prefix middle msg
else
let text = lines |> List.filteri (fun i _ -> i < 5) |> String.concat "\n" in
Printf.eprintf "\n*** Error at lines %d-%d.\n%s\n*** %s\n\n"
code_pos.Location.start_line
(code_pos.Location.start_line + 5)
text msg
let load_file filename =
let ic = open_in filename in
let n = in_channel_length ic in
let s = Bytes.create n in
really_input ic s 0 n;
close_in ic;
Bytes.to_string s
let () =
try
let action = ref Compile in
let filename = ref "" in
let outputfile = ref "a.bc" in
let optimize = ref false in
let verify = ref false in
let spec_list =
[
("-p",
Arg.Unit (fun () -> action := Parse), "Parse and print AST");
( "-t",
Arg.Unit (fun () -> action := Type_check),
"Type checks and print the result" );
( "-d",
Arg.Unit (fun () -> action := Dump_llvm_ir),
"Compile and print the generated LLVM IR" );
( "-c",
Arg.Unit (fun () -> action := Compile),
"Compile the source file (default)" );
( "-o",
Arg.Set_string outputfile,
"Place the output into file (default: a.bc)" );
( "-O",
Arg.Set optimize,
"Optimize the generated LLVM IR (default: false)" );
( "-verify",
Arg.Set verify,
"Verify the generated LLVM module (default: false)" );
]
in
let usage =
Printf.sprintf "Usage:\t%s [options] <source_file>\n" Sys.argv.(0)
in
Arg.parse spec_list (fun file -> filename := file) usage;
if String.equal !filename "" then Arg.usage spec_list usage
else
let source = load_file !filename in
let lexbuf = Lexing.from_string ~with_positions:true source in
try action_function !outputfile !optimize !verify !action lexbuf with
| Scanner.Lexing_error (pos, msg) | Parsing.Syntax_error (pos, msg) ->
handle_syntatic_error source pos msg
| Semantic_analysis.Semantic_error (pos, msg) ->
handle_semantic_error source pos msg
with Sys_error msg -> Printf.eprintf "*** Error %s ***\n" msg
|
|
88fe54deedfc78b7b00a54fd728ea0deb304ea3cf252fb98d1ee3b1aab583447 | emqx/emqx | emqx_persistent_session_backend_dummy.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_persistent_session_backend_dummy).
-include("emqx_persistent_session.hrl").
-export([
first_message_id/0,
next_message_id/1,
delete_message/1,
first_session_message/0,
next_session_message/1,
delete_session_message/1,
put_session_store/1,
delete_session_store/1,
lookup_session_store/1,
put_session_message/1,
put_message/1,
get_message/1,
ro_transaction/1
]).
first_message_id() ->
'$end_of_table'.
next_message_id(_) ->
'$end_of_table'.
-spec delete_message(binary()) -> no_return().
delete_message(_Key) ->
error(should_not_be_called).
first_session_message() ->
'$end_of_table'.
next_session_message(_Key) ->
'$end_of_table'.
delete_session_message(_Key) ->
ok.
put_session_store(#session_store{}) ->
ok.
delete_session_store(_ClientID) ->
ok.
lookup_session_store(_ClientID) ->
none.
put_session_message({_, _, _, _}) ->
ok.
put_message(_Msg) ->
ok.
-spec get_message(binary()) -> no_return().
get_message(_MsgId) ->
error(should_not_be_called).
ro_transaction(Fun) ->
Fun().
| null | https://raw.githubusercontent.com/emqx/emqx/dbc10c2eed3df314586c7b9ac6292083204f1f68/apps/emqx/src/persistent_session/emqx_persistent_session_backend_dummy.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-------------------------------------------------------------------- | Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_persistent_session_backend_dummy).
-include("emqx_persistent_session.hrl").
-export([
first_message_id/0,
next_message_id/1,
delete_message/1,
first_session_message/0,
next_session_message/1,
delete_session_message/1,
put_session_store/1,
delete_session_store/1,
lookup_session_store/1,
put_session_message/1,
put_message/1,
get_message/1,
ro_transaction/1
]).
first_message_id() ->
'$end_of_table'.
next_message_id(_) ->
'$end_of_table'.
-spec delete_message(binary()) -> no_return().
delete_message(_Key) ->
error(should_not_be_called).
first_session_message() ->
'$end_of_table'.
next_session_message(_Key) ->
'$end_of_table'.
delete_session_message(_Key) ->
ok.
put_session_store(#session_store{}) ->
ok.
delete_session_store(_ClientID) ->
ok.
lookup_session_store(_ClientID) ->
none.
put_session_message({_, _, _, _}) ->
ok.
put_message(_Msg) ->
ok.
-spec get_message(binary()) -> no_return().
get_message(_MsgId) ->
error(should_not_be_called).
ro_transaction(Fun) ->
Fun().
|
7683052fec7844d5bb1df14df908995bdaf6419864f0d2ab8af565b59d1ce4fa | AbstractMachinesLab/caramel | path.mli | (** Representation of paths *)
* The aim of this module is to provide a solid basis to reason about file and
directory paths inside the Dune code base . What it is not is a complete API
for paths management that handles all the aspects of file system paths . It
simply exposes a high - level and portable API that covers the needs of Dune .
{ 1 Model of the file system }
{ 2 Local paths }
sees the file system as two parts . The first part is composed of the
source tree and the build directory . In this part , Dune does n't know about
symlinks and has a fully expanded view of the file system . This means that
if the user has a symlink ` src / foo ` pointing to ` bar ` , then ` src / foo / x ` and
` bar / x ` are seen as two different paths .
A path in this world is called a local path and is simply a sequence of path
components . A path component being a string other than " . " or " .. " and not
containing the path separator character ( ' / ' ) .
Such a path can be rooted at the source tree root , the build directory or an
unspecified root . All these paths are represented by values of type
[ ' a Path . Local_gen.t ] where [ ' a ] denotes the root of the path .
{ 2 External paths }
The second part is the " external world " . It is all the paths that live
outside of the workspace and build directory . To be on the safe side Dune
makes no assumption does nothing clever with these paths .
External paths are presented as [ Path . External.t ] values.contents
{ 1 The Path.t type }
The [ Path.t ] type represents all possible paths , i.e. both local and
extenral paths .
directory paths inside the Dune code base. What it is not is a complete API
for paths management that handles all the aspects of file system paths. It
simply exposes a high-level and portable API that covers the needs of Dune.
{1 Model of the file system}
{2 Local paths}
Dune sees the file system as two parts. The first part is composed of the
source tree and the build directory. In this part, Dune doesn't know about
symlinks and has a fully expanded view of the file system. This means that
if the user has a symlink `src/foo` pointing to `bar`, then `src/foo/x` and
`bar/x` are seen as two different paths.
A path in this world is called a local path and is simply a sequence of path
components. A path component being a string other than "." or ".." and not
containing the path separator character ('/').
Such a path can be rooted at the source tree root, the build directory or an
unspecified root. All these paths are represented by values of type
['a Path.Local_gen.t] where ['a] denotes the root of the path.
{2 External paths}
The second part is the "external world". It is all the paths that live
outside of the workspace and build directory. To be on the safe side Dune
makes no assumption does nothing clever with these paths.
External paths are presented as [Path.External.t] values.contents
{1 The Path.t type}
The [Path.t] type represents all possible paths, i.e. both local and
extenral paths. *)
(** Relative path relative to the root tracked by the type system.
Represented as: either the root, or a '/' separated list of components other
that ".", ".." and not containing a '/'. *)
module Local_gen : Path_intf.Local_gen
module Unspecified : sig
type w = Path_intf.Unspecified.w
end
(** Relative path with unspecified root.
Either root, or a '/' separated list of components other that ".", ".." and
not containing a '/'. *)
module Local : sig
type w = Unspecified.w
type t = w Local_gen.t
include Path_intf.S with type t := t
val root : t
module L : sig
val relative : ?error_loc:Loc0.t -> t -> string list -> t
end
val relative : ?error_loc:Loc0.t -> t -> string -> t
val split_first_component : t -> (string * t) option
val explode : t -> string list
end
(** In the source section of the current workspace. *)
module Source : sig
type w
type t = w Local_gen.t
include Path_intf.S with type t := t
val root : t
module L : sig
val relative : ?error_loc:Loc0.t -> t -> string list -> t
end
val of_local : Local.t -> t
val relative : ?error_loc:Loc0.t -> t -> string -> t
val split_first_component : t -> (string * Local.t) option
val explode : t -> string list
(** [Source.t] does not statically forbid overlap with build directory, even
though having such paths is almost always an error. *)
val is_in_build_dir : t -> bool
val descendant : t -> of_:t -> t option
val to_local : t -> Local.t
end
module External : sig
include Path_intf.S
val initial_cwd : t
val cwd : unit -> t
val relative : t -> string -> t
val mkdir_p : ?perms:int -> t -> unit
end
module Build : sig
type w
type t = w Local_gen.t
include Path_intf.S with type t := t
val root : t
val append_source : t -> Source.t -> t
val append_local : t -> Local.t -> t
module L : sig
val relative : ?error_loc:Loc0.t -> t -> string list -> t
end
val relative : ?error_loc:Loc0.t -> t -> string -> t
val split_first_component : t -> (string * Local.t) option
val explode : t -> string list
val local : t -> Local.t
val drop_build_context : t -> Source.t option
val drop_build_context_exn : t -> Source.t
(** [Source.t] here is a lie in some cases: consider when the context name
happens to be ["install"] or [".alias"]. *)
val extract_build_context : t -> (string * Source.t) option
val extract_build_context_exn : t -> string * Source.t
val extract_build_context_dir : t -> (t * Source.t) option
val extract_build_context_dir_exn : t -> t * Source.t
(** This function does the same as [extract_build_context], but has a
"righter" type. *)
val extract_first_component : t -> (string * Local.t) option
module Kind : sig
type t = private
| External of External.t
| In_source_dir of Local.t
val of_string : string -> t
end
(** set the build directory. Can only be called once and must be done before
paths are converted to strings elsewhere. *)
val set_build_dir : Kind.t -> unit
val split_sandbox_root : t -> t option * t
val of_local : Local.t -> t
val chmod : mode:int -> ?op:[ `Add | `Remove | `Set ] -> t -> unit
end
type t = private
| External of External.t
| In_source_tree of Source.t
| In_build_dir of Build.t
include Path_intf.S with type t := t
val hash : t -> int
(** [to_string_maybe_quoted t] is [maybe_quoted (to_string t)] *)
val to_string_maybe_quoted : t -> string
val root : t
val external_ : External.t -> t
val is_root : t -> bool
val is_managed : t -> bool
val relative : ?error_loc:Loc0.t -> t -> string -> t
(** Create an external path. If the argument is relative, assume it is relative
to the initial directory dune was launched in. *)
val of_filename_relative_to_initial_cwd : string -> t
(** Convert a path to an absolute filename. Must be called after the workspace
root has been set. [root] is the root directory of local paths *)
val to_absolute_filename : t -> string
(** Reach a given path [from] a directory. For example, let [p] be a path to the
file [some/dir/file] and [d] be a path to the directory [some/another/dir].
Then [reach p ~from:d] evaluates to [../../dir/file]. *)
val reach : t -> from:t -> string
(** [from] defaults to [Path.root] *)
val reach_for_running : ?from:t -> t -> string
val descendant : t -> of_:t -> t option
val is_descendant : t -> of_:t -> bool
val append_local : t -> Local.t -> t
val append_source : t -> Source.t -> t
val extend_basename : t -> suffix:string -> t
(** Extract the build context from a path. For instance, representing paths as
strings:
{[ extract_build_context "_build/blah/foo/bar" = Some ("blah", "foo/bar") ]}
It doesn't work correctly (doesn't return a sensible source path) for build
directories that are not build contexts, e.g. "_build/install" and
"_build/.aliases". *)
val extract_build_context : t -> (string * Source.t) option
val extract_build_context_exn : t -> string * Source.t
val extract_build_dir_first_component : t -> (string * Local.t) option
(** Same as [extract_build_context] but return the build context as a path:
{[
extract_build_context "_build/blah/foo/bar"
= Some ("_build/blah", "foo/bar")
]} *)
val extract_build_context_dir : t -> (t * Source.t) option
val extract_build_context_dir_maybe_sandboxed : t -> (t * Source.t) option
val extract_build_context_dir_exn : t -> t * Source.t
(** Drop the "_build/blah" prefix *)
val drop_build_context : t -> Source.t option
val drop_build_context_exn : t -> Source.t
(** Drop the "_build/blah" prefix if present, return [t] otherwise *)
val drop_optional_build_context : t -> t
val drop_optional_build_context_maybe_sandboxed : t -> t
val drop_optional_sandbox_root : t -> t
(** Drop the "_build/blah" prefix if present, return [t] if it's a source file,
otherwise fail. *)
val drop_optional_build_context_src_exn : t -> Source.t
val explode : t -> string list option
val explode_exn : t -> string list
(** The build directory *)
val build_dir : t
* [ is_in_build_dir t = is_descendant t ~of : build_dir ]
val is_in_build_dir : t -> bool
(** [is_in_source_tree t = is_managed t && not (is_in_build_dir t)] *)
val is_in_source_tree : t -> bool
val as_in_source_tree : t -> Source.t option
val as_in_source_tree_exn : t -> Source.t
val as_in_build_dir : t -> Build.t option
val as_in_build_dir_exn : t -> Build.t
* [ is_strict_descendant_of_build_dir t = is_in_build_dir t & & t < > build_dir ]
val is_strict_descendant_of_build_dir : t -> bool
* Split after the first component if [ t ] is local
val split_first_component : t -> (string * t) option
val insert_after_build_dir_exn : t -> string -> t
val exists : t -> bool
val readdir_unsorted : t -> (string list, Unix.error) Result.t
val is_dir_sep : char -> bool
val is_directory : t -> bool
val is_directory_with_error : t -> (bool, string) Result.t
val is_file : t -> bool
val rmdir : t -> unit
val unlink : t -> unit
val unlink_no_err : t -> unit
val link : t -> t -> unit
val rm_rf : ?allow_external:bool -> t -> unit
val mkdir_p : ?perms:int -> t -> unit
val touch : ?create:bool -> t -> unit
val build_dir_exists : unit -> bool
val ensure_build_dir_exists : unit -> unit
val source : Source.t -> t
val build : Build.t -> t
(** paths guaranteed to be in the source directory *)
val in_source : string -> t
val of_local : Local.t -> t
(** Set the workspace root. Can only be called once and the path must be
absolute *)
val set_root : External.t -> unit
module L : sig
val relative : t -> string list -> t
end
(** Return the "local part" of a path. For local paths (in build directory or
source tree), this returns the path itself. For external paths, it returns a
path that is relative to the current directory. For example, the local part
of [/a/b] is [./a/b]. *)
val local_part : t -> Local.t
val stat : t -> Unix.stats
(* it would be nice to call this [Set.of_source_paths], but it's annoying to
change the [Set] signature because then we don't comply with [Path_intf.S] *)
val set_of_source_paths : Source.Set.t -> Set.t
val set_of_build_paths_list : Build.t list -> Set.t
val string_of_file_kind : Unix.file_kind -> string
* temp_dir prefix suffix returns the name of a fresh temporary directory in
the temporary directory . The base name of the temporary directory is formed
by concatenating prefix , then a suitably chosen integer number , then suffix .
The optional argument temp_dir indicates the temporary directory to use ,
defaulting to the current result of Filename.get_temp_dir_name . The
temporary directory is created with permissions [ mode ] , defaulting to 0700 .
The directory is guaranteed to be different from any other directory that
existed when temp_dir was called .
the temporary directory. The base name of the temporary directory is formed
by concatenating prefix, then a suitably chosen integer number, then suffix.
The optional argument temp_dir indicates the temporary directory to use,
defaulting to the current result of Filename.get_temp_dir_name. The
temporary directory is created with permissions [mode], defaulting to 0700.
The directory is guaranteed to be different from any other directory that
existed when temp_dir was called. *)
val temp_dir : ?temp_dir:t -> ?mode:int -> string -> string -> t
* Rename a file . rename renames the file called oldpath ,
giving it newpath as its new name , moving it between directories if needed .
If newpath already exists , its contents will be replaced with those of
oldpath .
giving it newpath as its new name, moving it between directories if needed.
If newpath already exists, its contents will be replaced with those of
oldpath. *)
val rename : t -> t -> unit
* Set permissions on the designed files . [ op ] is [ ` Set ] by default , which sets
the permissions exactly to [ mode ] , while [ ` Add ] will add the given [ mode ] to
the current permissions and [ ` Remove ] remove them . [ path ] will be stat'd in
the ` Add and ` Remove case to determine the current premission , unless the
already computed stats are passed as [ stats ] to save a system call .
the permissions exactly to [mode], while [`Add] will add the given [mode] to
the current permissions and [`Remove] remove them. [path] will be stat'd in
the `Add and `Remove case to determine the current premission, unless the
already computed stats are passed as [stats] to save a system call. *)
val chmod :
mode:int
-> ?stats:Unix.stats option
-> ?op:[ `Add | `Remove | `Set ]
-> t
-> unit
| null | https://raw.githubusercontent.com/AbstractMachinesLab/caramel/7d4e505d6032e22a630d2e3bd7085b77d0efbb0c/vendor/ocaml-lsp-1.4.0/vendor/stdune/path.mli | ocaml | * Representation of paths
* Relative path relative to the root tracked by the type system.
Represented as: either the root, or a '/' separated list of components other
that ".", ".." and not containing a '/'.
* Relative path with unspecified root.
Either root, or a '/' separated list of components other that ".", ".." and
not containing a '/'.
* In the source section of the current workspace.
* [Source.t] does not statically forbid overlap with build directory, even
though having such paths is almost always an error.
* [Source.t] here is a lie in some cases: consider when the context name
happens to be ["install"] or [".alias"].
* This function does the same as [extract_build_context], but has a
"righter" type.
* set the build directory. Can only be called once and must be done before
paths are converted to strings elsewhere.
* [to_string_maybe_quoted t] is [maybe_quoted (to_string t)]
* Create an external path. If the argument is relative, assume it is relative
to the initial directory dune was launched in.
* Convert a path to an absolute filename. Must be called after the workspace
root has been set. [root] is the root directory of local paths
* Reach a given path [from] a directory. For example, let [p] be a path to the
file [some/dir/file] and [d] be a path to the directory [some/another/dir].
Then [reach p ~from:d] evaluates to [../../dir/file].
* [from] defaults to [Path.root]
* Extract the build context from a path. For instance, representing paths as
strings:
{[ extract_build_context "_build/blah/foo/bar" = Some ("blah", "foo/bar") ]}
It doesn't work correctly (doesn't return a sensible source path) for build
directories that are not build contexts, e.g. "_build/install" and
"_build/.aliases".
* Same as [extract_build_context] but return the build context as a path:
{[
extract_build_context "_build/blah/foo/bar"
= Some ("_build/blah", "foo/bar")
]}
* Drop the "_build/blah" prefix
* Drop the "_build/blah" prefix if present, return [t] otherwise
* Drop the "_build/blah" prefix if present, return [t] if it's a source file,
otherwise fail.
* The build directory
* [is_in_source_tree t = is_managed t && not (is_in_build_dir t)]
* paths guaranteed to be in the source directory
* Set the workspace root. Can only be called once and the path must be
absolute
* Return the "local part" of a path. For local paths (in build directory or
source tree), this returns the path itself. For external paths, it returns a
path that is relative to the current directory. For example, the local part
of [/a/b] is [./a/b].
it would be nice to call this [Set.of_source_paths], but it's annoying to
change the [Set] signature because then we don't comply with [Path_intf.S] |
* The aim of this module is to provide a solid basis to reason about file and
directory paths inside the Dune code base . What it is not is a complete API
for paths management that handles all the aspects of file system paths . It
simply exposes a high - level and portable API that covers the needs of Dune .
{ 1 Model of the file system }
{ 2 Local paths }
sees the file system as two parts . The first part is composed of the
source tree and the build directory . In this part , Dune does n't know about
symlinks and has a fully expanded view of the file system . This means that
if the user has a symlink ` src / foo ` pointing to ` bar ` , then ` src / foo / x ` and
` bar / x ` are seen as two different paths .
A path in this world is called a local path and is simply a sequence of path
components . A path component being a string other than " . " or " .. " and not
containing the path separator character ( ' / ' ) .
Such a path can be rooted at the source tree root , the build directory or an
unspecified root . All these paths are represented by values of type
[ ' a Path . Local_gen.t ] where [ ' a ] denotes the root of the path .
{ 2 External paths }
The second part is the " external world " . It is all the paths that live
outside of the workspace and build directory . To be on the safe side Dune
makes no assumption does nothing clever with these paths .
External paths are presented as [ Path . External.t ] values.contents
{ 1 The Path.t type }
The [ Path.t ] type represents all possible paths , i.e. both local and
extenral paths .
directory paths inside the Dune code base. What it is not is a complete API
for paths management that handles all the aspects of file system paths. It
simply exposes a high-level and portable API that covers the needs of Dune.
{1 Model of the file system}
{2 Local paths}
Dune sees the file system as two parts. The first part is composed of the
source tree and the build directory. In this part, Dune doesn't know about
symlinks and has a fully expanded view of the file system. This means that
if the user has a symlink `src/foo` pointing to `bar`, then `src/foo/x` and
`bar/x` are seen as two different paths.
A path in this world is called a local path and is simply a sequence of path
components. A path component being a string other than "." or ".." and not
containing the path separator character ('/').
Such a path can be rooted at the source tree root, the build directory or an
unspecified root. All these paths are represented by values of type
['a Path.Local_gen.t] where ['a] denotes the root of the path.
{2 External paths}
The second part is the "external world". It is all the paths that live
outside of the workspace and build directory. To be on the safe side Dune
makes no assumption does nothing clever with these paths.
External paths are presented as [Path.External.t] values.contents
{1 The Path.t type}
The [Path.t] type represents all possible paths, i.e. both local and
extenral paths. *)
module Local_gen : Path_intf.Local_gen
module Unspecified : sig
type w = Path_intf.Unspecified.w
end
module Local : sig
type w = Unspecified.w
type t = w Local_gen.t
include Path_intf.S with type t := t
val root : t
module L : sig
val relative : ?error_loc:Loc0.t -> t -> string list -> t
end
val relative : ?error_loc:Loc0.t -> t -> string -> t
val split_first_component : t -> (string * t) option
val explode : t -> string list
end
module Source : sig
type w
type t = w Local_gen.t
include Path_intf.S with type t := t
val root : t
module L : sig
val relative : ?error_loc:Loc0.t -> t -> string list -> t
end
val of_local : Local.t -> t
val relative : ?error_loc:Loc0.t -> t -> string -> t
val split_first_component : t -> (string * Local.t) option
val explode : t -> string list
val is_in_build_dir : t -> bool
val descendant : t -> of_:t -> t option
val to_local : t -> Local.t
end
module External : sig
include Path_intf.S
val initial_cwd : t
val cwd : unit -> t
val relative : t -> string -> t
val mkdir_p : ?perms:int -> t -> unit
end
module Build : sig
type w
type t = w Local_gen.t
include Path_intf.S with type t := t
val root : t
val append_source : t -> Source.t -> t
val append_local : t -> Local.t -> t
module L : sig
val relative : ?error_loc:Loc0.t -> t -> string list -> t
end
val relative : ?error_loc:Loc0.t -> t -> string -> t
val split_first_component : t -> (string * Local.t) option
val explode : t -> string list
val local : t -> Local.t
val drop_build_context : t -> Source.t option
val drop_build_context_exn : t -> Source.t
val extract_build_context : t -> (string * Source.t) option
val extract_build_context_exn : t -> string * Source.t
val extract_build_context_dir : t -> (t * Source.t) option
val extract_build_context_dir_exn : t -> t * Source.t
val extract_first_component : t -> (string * Local.t) option
module Kind : sig
type t = private
| External of External.t
| In_source_dir of Local.t
val of_string : string -> t
end
val set_build_dir : Kind.t -> unit
val split_sandbox_root : t -> t option * t
val of_local : Local.t -> t
val chmod : mode:int -> ?op:[ `Add | `Remove | `Set ] -> t -> unit
end
type t = private
| External of External.t
| In_source_tree of Source.t
| In_build_dir of Build.t
include Path_intf.S with type t := t
val hash : t -> int
val to_string_maybe_quoted : t -> string
val root : t
val external_ : External.t -> t
val is_root : t -> bool
val is_managed : t -> bool
val relative : ?error_loc:Loc0.t -> t -> string -> t
val of_filename_relative_to_initial_cwd : string -> t
val to_absolute_filename : t -> string
val reach : t -> from:t -> string
val reach_for_running : ?from:t -> t -> string
val descendant : t -> of_:t -> t option
val is_descendant : t -> of_:t -> bool
val append_local : t -> Local.t -> t
val append_source : t -> Source.t -> t
val extend_basename : t -> suffix:string -> t
val extract_build_context : t -> (string * Source.t) option
val extract_build_context_exn : t -> string * Source.t
val extract_build_dir_first_component : t -> (string * Local.t) option
val extract_build_context_dir : t -> (t * Source.t) option
val extract_build_context_dir_maybe_sandboxed : t -> (t * Source.t) option
val extract_build_context_dir_exn : t -> t * Source.t
val drop_build_context : t -> Source.t option
val drop_build_context_exn : t -> Source.t
val drop_optional_build_context : t -> t
val drop_optional_build_context_maybe_sandboxed : t -> t
val drop_optional_sandbox_root : t -> t
val drop_optional_build_context_src_exn : t -> Source.t
val explode : t -> string list option
val explode_exn : t -> string list
val build_dir : t
* [ is_in_build_dir t = is_descendant t ~of : build_dir ]
val is_in_build_dir : t -> bool
val is_in_source_tree : t -> bool
val as_in_source_tree : t -> Source.t option
val as_in_source_tree_exn : t -> Source.t
val as_in_build_dir : t -> Build.t option
val as_in_build_dir_exn : t -> Build.t
* [ is_strict_descendant_of_build_dir t = is_in_build_dir t & & t < > build_dir ]
val is_strict_descendant_of_build_dir : t -> bool
* Split after the first component if [ t ] is local
val split_first_component : t -> (string * t) option
val insert_after_build_dir_exn : t -> string -> t
val exists : t -> bool
val readdir_unsorted : t -> (string list, Unix.error) Result.t
val is_dir_sep : char -> bool
val is_directory : t -> bool
val is_directory_with_error : t -> (bool, string) Result.t
val is_file : t -> bool
val rmdir : t -> unit
val unlink : t -> unit
val unlink_no_err : t -> unit
val link : t -> t -> unit
val rm_rf : ?allow_external:bool -> t -> unit
val mkdir_p : ?perms:int -> t -> unit
val touch : ?create:bool -> t -> unit
val build_dir_exists : unit -> bool
val ensure_build_dir_exists : unit -> unit
val source : Source.t -> t
val build : Build.t -> t
val in_source : string -> t
val of_local : Local.t -> t
val set_root : External.t -> unit
module L : sig
val relative : t -> string list -> t
end
val local_part : t -> Local.t
val stat : t -> Unix.stats
val set_of_source_paths : Source.Set.t -> Set.t
val set_of_build_paths_list : Build.t list -> Set.t
val string_of_file_kind : Unix.file_kind -> string
* temp_dir prefix suffix returns the name of a fresh temporary directory in
the temporary directory . The base name of the temporary directory is formed
by concatenating prefix , then a suitably chosen integer number , then suffix .
The optional argument temp_dir indicates the temporary directory to use ,
defaulting to the current result of Filename.get_temp_dir_name . The
temporary directory is created with permissions [ mode ] , defaulting to 0700 .
The directory is guaranteed to be different from any other directory that
existed when temp_dir was called .
the temporary directory. The base name of the temporary directory is formed
by concatenating prefix, then a suitably chosen integer number, then suffix.
The optional argument temp_dir indicates the temporary directory to use,
defaulting to the current result of Filename.get_temp_dir_name. The
temporary directory is created with permissions [mode], defaulting to 0700.
The directory is guaranteed to be different from any other directory that
existed when temp_dir was called. *)
val temp_dir : ?temp_dir:t -> ?mode:int -> string -> string -> t
* Rename a file . rename renames the file called oldpath ,
giving it newpath as its new name , moving it between directories if needed .
If newpath already exists , its contents will be replaced with those of
oldpath .
giving it newpath as its new name, moving it between directories if needed.
If newpath already exists, its contents will be replaced with those of
oldpath. *)
val rename : t -> t -> unit
* Set permissions on the designed files . [ op ] is [ ` Set ] by default , which sets
the permissions exactly to [ mode ] , while [ ` Add ] will add the given [ mode ] to
the current permissions and [ ` Remove ] remove them . [ path ] will be stat'd in
the ` Add and ` Remove case to determine the current premission , unless the
already computed stats are passed as [ stats ] to save a system call .
the permissions exactly to [mode], while [`Add] will add the given [mode] to
the current permissions and [`Remove] remove them. [path] will be stat'd in
the `Add and `Remove case to determine the current premission, unless the
already computed stats are passed as [stats] to save a system call. *)
val chmod :
mode:int
-> ?stats:Unix.stats option
-> ?op:[ `Add | `Remove | `Set ]
-> t
-> unit
|
3f6fe64d55e0f3f5aff1f2a4cd3f6f716b052fd0cd25af4c7c748efdb9a48713 | seancorfield/vscode-calva-setup | remote_repl.cljs | (ns remote-repl
(:require ["vscode" :as vscode]
[promesa.core :as p]))
(defn- start-tunnel [nrepl-port portal-port label remote-server]
(let [terminal (vscode/window.createTerminal #js {:isTransient true
:name label
:message (str label " Remote REPL...")})]
(.show terminal)
(.sendText terminal (str "ssh -N"
" -L " nrepl-port ":localhost:" nrepl-port
" -L " portal-port ":localhost:" portal-port
" " remote-server))))
(defn- start-browser [portal-port]
(vscode/commands.executeCommand "simpleBrowser.show" (str ":" portal-port))
(p/do
(p/delay 2000)
(vscode/commands.executeCommand "workbench.action.moveEditorToRightGroup")
(p/delay 1000)
(vscode/commands.executeCommand "workbench.action.focusFirstEditorGroup")))
(defn- connect-repl []
(vscode/commands.executeCommand "calva.disconnect")
(vscode/commands.executeCommand "calva.connect"))
(defn repl-setup [nrepl-port portal-port label remote-server]
(start-tunnel nrepl-port portal-port label remote-server)
(p/do
(p/delay 2000)
(start-browser portal-port)
(p/delay 1000)
(connect-repl)))
| null | https://raw.githubusercontent.com/seancorfield/vscode-calva-setup/747736ef93631df1f55c8d7bed2efcdf26dfa41a/joyride/src/remote_repl.cljs | clojure | (ns remote-repl
(:require ["vscode" :as vscode]
[promesa.core :as p]))
(defn- start-tunnel [nrepl-port portal-port label remote-server]
(let [terminal (vscode/window.createTerminal #js {:isTransient true
:name label
:message (str label " Remote REPL...")})]
(.show terminal)
(.sendText terminal (str "ssh -N"
" -L " nrepl-port ":localhost:" nrepl-port
" -L " portal-port ":localhost:" portal-port
" " remote-server))))
(defn- start-browser [portal-port]
(vscode/commands.executeCommand "simpleBrowser.show" (str ":" portal-port))
(p/do
(p/delay 2000)
(vscode/commands.executeCommand "workbench.action.moveEditorToRightGroup")
(p/delay 1000)
(vscode/commands.executeCommand "workbench.action.focusFirstEditorGroup")))
(defn- connect-repl []
(vscode/commands.executeCommand "calva.disconnect")
(vscode/commands.executeCommand "calva.connect"))
(defn repl-setup [nrepl-port portal-port label remote-server]
(start-tunnel nrepl-port portal-port label remote-server)
(p/do
(p/delay 2000)
(start-browser portal-port)
(p/delay 1000)
(connect-repl)))
|
|
7eab2fe4024bfe94a8c11f9d711dfc99e3855fc26686a587db9fc6b3b9542488 | basho/riak_cs | block_audit.erl | %% ---------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2015 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
-module(block_audit).
-mode(compile).
-export([main/1]).
-export([info/2, verbose/3]).
100 hours
-include_lib("riak_cs/include/riak_cs.hrl").
-record(buuid, {uuid :: binary(),
seqs :: [non_neg_integer()] % sequence numbers
}).
main(Args) ->
_ = application:load(lager),
ok = application:set_env(lager, handlers, [{lager_console_backend, info}]),
ok = lager:start(),
{ok, {Options, _PlainArgs}} = getopt:parse(option_spec(), Args),
LogLevel = case proplists:get_value(debug, Options) of
0 ->
info;
_ ->
ok = lager:set_loglevel(lager_console_backend, debug),
debug
end,
debug("Log level is set to ~p", [LogLevel]),
debug("Options: ~p", [Options]),
case proplists:get_value(host, Options) of
undefined ->
getopt:usage(option_spec(), "riak-cs escript /path/to/block_audit.erl"),
halt(1);
Host ->
Port = proplists:get_value(port, Options),
debug("Connecting to Riak ~s:~B...", [Host, Port]),
case riakc_pb_socket:start_link(Host, Port) of
{ok, Pid} ->
pong = riakc_pb_socket:ping(Pid),
audit(Pid, Options),
riakc_pb_socket:stop(Pid),
OTP-9985
{error, Reason} ->
err("Connection to Riak failed ~p", [Reason]),
halt(2)
end
end.
option_spec() ->
[
{host, $h, "host", string, "Host of Riak PB"},
{port, $p, "port", {integer, 8087}, "Port number of Riak PB"},
{bucket, $b, "bucket", string, "CS Bucket to audit, repetitions possible"},
{output, $o, "output", {string, "maybe-orphaned-blocks"}, "Directory to output resutls"},
{ page_size , $ s , " page - size " , { integer , 1000 } , " Specify page size for 2i listing " } ,
{debug, $d, "debug", {integer, 0}, "Enable debug (-dd for more verbose)"}
].
err(Format, Args) ->
log(error, Format, Args).
debug(Format, Args) ->
log(debug, Format, Args).
verbose(Options, Format, Args) ->
{debug, DebugLevel} = lists:keyfind(debug, 1, Options),
case DebugLevel of
Level when 2 =< Level ->
debug(Format, Args);
_ ->
ok
end.
info(Format, Args) ->
log(info, Format, Args).
log(Level, Format, Args) ->
lager:log(Level, self(), Format, Args).
audit(Pid, Opts) ->
Buckets = case proplists:get_all_values(bucket, Opts) of
[] ->
{ok, AllBuckets} = riakc_pb_socket:list_keys(Pid, ?BUCKETS_BUCKET),
AllBuckets;
Values ->
Values
end,
info("Retrieved bucket list. There are ~p buckets, including tombstones.",
[length(Buckets)]),
info("Searching for orphaned blocks. This may take a while...", []),
log_all_maybe_orphaned_blocks(Pid, Opts, Buckets),
ok.
log_all_maybe_orphaned_blocks(_Pid, _Opts, []) ->
ok;
log_all_maybe_orphaned_blocks(Pid, Opts, [Bucket | Buckets]) ->
_ = log_maybe_orphaned_blocks(Pid, Opts, Bucket),
log_all_maybe_orphaned_blocks(Pid, Opts, Buckets).
log_maybe_orphaned_blocks(Pid, Opts, Bucket) when is_binary(Bucket) ->
log_maybe_orphaned_blocks(Pid, Opts, binary_to_list(Bucket));
log_maybe_orphaned_blocks(Pid, Opts, Bucket) ->
info("Finding Orphaned blocks for Bucket ~p", [Bucket]),
BlocksTable = list_to_atom(Bucket),
ets:new(BlocksTable, [set, named_table, public, {keypos, #buuid.uuid}]),
try
{ok, NumKeys} = cache_block_keys(Pid, Bucket, BlocksTable),
case NumKeys of
0 -> ok;
_ -> case delete_manifest_uuids(Pid, Bucket, BlocksTable) of
ok ->
write_uuids(Opts, Bucket,
BlocksTable, ets:info(BlocksTable, size));
_ ->
nop
end
end
after
catch ets:delete(BlocksTable)
end.
write_uuids(_Opts, _Bucket, _BlocksTable, 0) ->
ok;
write_uuids(Opts, Bucket, BlocksTable, _) ->
OutDir = proplists:get_value(output, Opts),
Filename = filename:join(OutDir, Bucket),
ok = filelib:ensure_dir(Filename),
{ok, File} = file:open(Filename, [write, raw, delayed_write]),
{UUIDs, Blocks} = ets:foldl(
fun(#buuid{uuid=UUID, seqs=Seqs},
{TotalUUIDs, TotalBlocks}) ->
verbose(Opts, "~s ~s ~B ~p",
[Bucket, mochihex:to_hex(UUID),
length(Seqs), Seqs]),
[ok = file:write(File,
[Bucket, $\t,
mochihex:to_hex(UUID), $\t,
integer_to_list(Seq), $\n]) ||
Seq <- lists:sort(Seqs)],
{TotalUUIDs + 1, TotalBlocks + length(Seqs)}
end, {0, 0}, BlocksTable),
ok = file:close(File),
info("Total number of UUIDs that has any orphaned blocks: ~p [count]", [UUIDs]),
info("Total number of orphaned blocks: ~p [count]", [Blocks]),
info("Orphaned Blocks written to ~p", [filename:absname(Filename)]).
cache_block_keys(Pid, Bucket, BlocksTable) ->
BlocksBucket = riak_cs_utils:to_bucket_name(blocks, Bucket),
{ok, ReqId} = riakc_pb_socket:stream_list_keys(Pid, BlocksBucket, ?SLK_TIMEOUT),
{ok, NumKeys} = receive_and_cache_blocks(ReqId, BlocksTable),
info("Logged ~p block keys to ~p~n", [NumKeys, BlocksTable]),
{ok, NumKeys}.
delete_manifest_uuids(Pid, Bucket, BlocksTable) ->
ManifestsBucket = riak_cs_utils:to_bucket_name(objects, Bucket),
, 1000 } ,
{start_key, <<>>},
{end_key, riak_cs_utils:big_end_key()},
{timeout, ?SLK_TIMEOUT}],
{ok, ReqID} = riakc_pb_socket:cs_bucket_fold(Pid, ManifestsBucket, Opts),
handle_manifest_fold(Pid, Bucket, BlocksTable, ReqID).
handle_manifest_fold(Pid, Bucket, BlocksTable, ReqID) ->
receive
{ReqID, {ok, Objs}} ->
[ets:delete(BlocksTable, UUID) ||
Obj <- Objs,
UUID <- get_uuids(Obj)],
handle_manifest_fold(Pid, Bucket, BlocksTable, ReqID);
{ReqID, {done, _}} ->
info("handle_manifest_fold done for bucket: ~p", [Bucket]),
ok;
Other ->
err("handle_manifest_fold error; ~p", [Other]),
error
end.
receive_and_cache_blocks(ReqId, TableName) ->
receive_and_cache_blocks(ReqId, TableName, 0).
receive_and_cache_blocks(ReqId, Table, Count) ->
receive
{ReqId, done} ->
{ok, Count};
{ReqId, {error, Reason}} ->
err("receive_and_cache_blocks/3 got error: ~p for table ~p, count: ~p."
" Returning current count.",
[Reason, Table, Count]),
{ok, Count};
{ReqId, {_, Keys}} ->
NewCount = handle_keys(Table, Count, Keys),
receive_and_cache_blocks(ReqId, Table, NewCount)
end.
handle_keys(Table, Count, Keys) ->
lists:foldl(
fun(Key, Acc) ->
{UUID, Seq} = riak_cs_lfs_utils:block_name_to_term(Key),
Rec = case ets:lookup(Table, UUID) of
[#buuid{seqs=Seqs} = B] -> B#buuid{seqs=[Seq|Seqs]};
_ -> #buuid{uuid=UUID, seqs=[Seq]}
end,
ets:insert(Table, Rec),
Acc + 1
end, Count, Keys).
get_uuids(Obj) ->
Manifests = riak_cs_manifest:manifests_from_riak_object(Obj),
BlockUUIDs = [UUID ||
{_ManiUUID, M} <- Manifests,
%% TODO: more efficient way
{UUID, _} <- riak_cs_lfs_utils:block_sequences_for_manifest(M)],
lists:usort(BlockUUIDs).
| null | https://raw.githubusercontent.com/basho/riak_cs/c0c1012d1c9c691c74c8c5d9f69d388f5047bcd2/priv/tools/internal/block_audit.erl | erlang | ---------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---------------------------------------------------------------------
sequence numbers
TODO: more efficient way | Copyright ( c ) 2007 - 2015 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(block_audit).
-mode(compile).
-export([main/1]).
-export([info/2, verbose/3]).
100 hours
-include_lib("riak_cs/include/riak_cs.hrl").
-record(buuid, {uuid :: binary(),
}).
main(Args) ->
_ = application:load(lager),
ok = application:set_env(lager, handlers, [{lager_console_backend, info}]),
ok = lager:start(),
{ok, {Options, _PlainArgs}} = getopt:parse(option_spec(), Args),
LogLevel = case proplists:get_value(debug, Options) of
0 ->
info;
_ ->
ok = lager:set_loglevel(lager_console_backend, debug),
debug
end,
debug("Log level is set to ~p", [LogLevel]),
debug("Options: ~p", [Options]),
case proplists:get_value(host, Options) of
undefined ->
getopt:usage(option_spec(), "riak-cs escript /path/to/block_audit.erl"),
halt(1);
Host ->
Port = proplists:get_value(port, Options),
debug("Connecting to Riak ~s:~B...", [Host, Port]),
case riakc_pb_socket:start_link(Host, Port) of
{ok, Pid} ->
pong = riakc_pb_socket:ping(Pid),
audit(Pid, Options),
riakc_pb_socket:stop(Pid),
OTP-9985
{error, Reason} ->
err("Connection to Riak failed ~p", [Reason]),
halt(2)
end
end.
option_spec() ->
[
{host, $h, "host", string, "Host of Riak PB"},
{port, $p, "port", {integer, 8087}, "Port number of Riak PB"},
{bucket, $b, "bucket", string, "CS Bucket to audit, repetitions possible"},
{output, $o, "output", {string, "maybe-orphaned-blocks"}, "Directory to output resutls"},
{ page_size , $ s , " page - size " , { integer , 1000 } , " Specify page size for 2i listing " } ,
{debug, $d, "debug", {integer, 0}, "Enable debug (-dd for more verbose)"}
].
err(Format, Args) ->
log(error, Format, Args).
debug(Format, Args) ->
log(debug, Format, Args).
verbose(Options, Format, Args) ->
{debug, DebugLevel} = lists:keyfind(debug, 1, Options),
case DebugLevel of
Level when 2 =< Level ->
debug(Format, Args);
_ ->
ok
end.
info(Format, Args) ->
log(info, Format, Args).
log(Level, Format, Args) ->
lager:log(Level, self(), Format, Args).
audit(Pid, Opts) ->
Buckets = case proplists:get_all_values(bucket, Opts) of
[] ->
{ok, AllBuckets} = riakc_pb_socket:list_keys(Pid, ?BUCKETS_BUCKET),
AllBuckets;
Values ->
Values
end,
info("Retrieved bucket list. There are ~p buckets, including tombstones.",
[length(Buckets)]),
info("Searching for orphaned blocks. This may take a while...", []),
log_all_maybe_orphaned_blocks(Pid, Opts, Buckets),
ok.
log_all_maybe_orphaned_blocks(_Pid, _Opts, []) ->
ok;
log_all_maybe_orphaned_blocks(Pid, Opts, [Bucket | Buckets]) ->
_ = log_maybe_orphaned_blocks(Pid, Opts, Bucket),
log_all_maybe_orphaned_blocks(Pid, Opts, Buckets).
log_maybe_orphaned_blocks(Pid, Opts, Bucket) when is_binary(Bucket) ->
log_maybe_orphaned_blocks(Pid, Opts, binary_to_list(Bucket));
log_maybe_orphaned_blocks(Pid, Opts, Bucket) ->
info("Finding Orphaned blocks for Bucket ~p", [Bucket]),
BlocksTable = list_to_atom(Bucket),
ets:new(BlocksTable, [set, named_table, public, {keypos, #buuid.uuid}]),
try
{ok, NumKeys} = cache_block_keys(Pid, Bucket, BlocksTable),
case NumKeys of
0 -> ok;
_ -> case delete_manifest_uuids(Pid, Bucket, BlocksTable) of
ok ->
write_uuids(Opts, Bucket,
BlocksTable, ets:info(BlocksTable, size));
_ ->
nop
end
end
after
catch ets:delete(BlocksTable)
end.
write_uuids(_Opts, _Bucket, _BlocksTable, 0) ->
ok;
write_uuids(Opts, Bucket, BlocksTable, _) ->
OutDir = proplists:get_value(output, Opts),
Filename = filename:join(OutDir, Bucket),
ok = filelib:ensure_dir(Filename),
{ok, File} = file:open(Filename, [write, raw, delayed_write]),
{UUIDs, Blocks} = ets:foldl(
fun(#buuid{uuid=UUID, seqs=Seqs},
{TotalUUIDs, TotalBlocks}) ->
verbose(Opts, "~s ~s ~B ~p",
[Bucket, mochihex:to_hex(UUID),
length(Seqs), Seqs]),
[ok = file:write(File,
[Bucket, $\t,
mochihex:to_hex(UUID), $\t,
integer_to_list(Seq), $\n]) ||
Seq <- lists:sort(Seqs)],
{TotalUUIDs + 1, TotalBlocks + length(Seqs)}
end, {0, 0}, BlocksTable),
ok = file:close(File),
info("Total number of UUIDs that has any orphaned blocks: ~p [count]", [UUIDs]),
info("Total number of orphaned blocks: ~p [count]", [Blocks]),
info("Orphaned Blocks written to ~p", [filename:absname(Filename)]).
cache_block_keys(Pid, Bucket, BlocksTable) ->
BlocksBucket = riak_cs_utils:to_bucket_name(blocks, Bucket),
{ok, ReqId} = riakc_pb_socket:stream_list_keys(Pid, BlocksBucket, ?SLK_TIMEOUT),
{ok, NumKeys} = receive_and_cache_blocks(ReqId, BlocksTable),
info("Logged ~p block keys to ~p~n", [NumKeys, BlocksTable]),
{ok, NumKeys}.
delete_manifest_uuids(Pid, Bucket, BlocksTable) ->
ManifestsBucket = riak_cs_utils:to_bucket_name(objects, Bucket),
, 1000 } ,
{start_key, <<>>},
{end_key, riak_cs_utils:big_end_key()},
{timeout, ?SLK_TIMEOUT}],
{ok, ReqID} = riakc_pb_socket:cs_bucket_fold(Pid, ManifestsBucket, Opts),
handle_manifest_fold(Pid, Bucket, BlocksTable, ReqID).
handle_manifest_fold(Pid, Bucket, BlocksTable, ReqID) ->
receive
{ReqID, {ok, Objs}} ->
[ets:delete(BlocksTable, UUID) ||
Obj <- Objs,
UUID <- get_uuids(Obj)],
handle_manifest_fold(Pid, Bucket, BlocksTable, ReqID);
{ReqID, {done, _}} ->
info("handle_manifest_fold done for bucket: ~p", [Bucket]),
ok;
Other ->
err("handle_manifest_fold error; ~p", [Other]),
error
end.
receive_and_cache_blocks(ReqId, TableName) ->
receive_and_cache_blocks(ReqId, TableName, 0).
receive_and_cache_blocks(ReqId, Table, Count) ->
receive
{ReqId, done} ->
{ok, Count};
{ReqId, {error, Reason}} ->
err("receive_and_cache_blocks/3 got error: ~p for table ~p, count: ~p."
" Returning current count.",
[Reason, Table, Count]),
{ok, Count};
{ReqId, {_, Keys}} ->
NewCount = handle_keys(Table, Count, Keys),
receive_and_cache_blocks(ReqId, Table, NewCount)
end.
handle_keys(Table, Count, Keys) ->
lists:foldl(
fun(Key, Acc) ->
{UUID, Seq} = riak_cs_lfs_utils:block_name_to_term(Key),
Rec = case ets:lookup(Table, UUID) of
[#buuid{seqs=Seqs} = B] -> B#buuid{seqs=[Seq|Seqs]};
_ -> #buuid{uuid=UUID, seqs=[Seq]}
end,
ets:insert(Table, Rec),
Acc + 1
end, Count, Keys).
get_uuids(Obj) ->
Manifests = riak_cs_manifest:manifests_from_riak_object(Obj),
BlockUUIDs = [UUID ||
{_ManiUUID, M} <- Manifests,
{UUID, _} <- riak_cs_lfs_utils:block_sequences_for_manifest(M)],
lists:usort(BlockUUIDs).
|
eff6aa6a3017e736e09d1432d96051ea89385f71f3d097983ed0f7819d6420de | abdulapopoola/SICPBook | Ex1.07.scm | (define (sqrt x)
(sqrt-iter 1.0 x))
(define (sqrt-iter guess x)
(if (good-enough? guess x)
guess
(sqrt-iter (improve guess x) x)))
(define (good-enough? guess x)
(< (abs (- (square guess) x)) 0.001))
(define (square x) ( * x x) )
(define (improve guess x)
(average guess (/ x guess)))
(define (average x y)
(/ (+ x y) 2))
;; The current good-enough? stops as soon as the difference between the
;; square of the guess and the actual number is less than 0.001.
;; For extremely small or large numbers; a close non-accurate guess will pass this test
;; even though it is not the accurate value.
Also this is the reason why ( sqrt 9 ) is not an absolute 3.0 value .
;;
;; NEW implementation
(define (sqrt-iter guess x)
(if (good-enough? guess (improve guess x))
guess
(sqrt-iter (improve guess x) x)))
(define (good-enough? prevGuess nextGuess)
(< (/ (abs (- prevGuess nextGuess)) prevGuess)
1.0e-20))
absolute 3 value
| null | https://raw.githubusercontent.com/abdulapopoola/SICPBook/c8a0228ebf66d9c1ddc5ef1fcc1d05d8684f090a/Chapter%201/1.1/Ex1.07.scm | scheme | The current good-enough? stops as soon as the difference between the
square of the guess and the actual number is less than 0.001.
For extremely small or large numbers; a close non-accurate guess will pass this test
even though it is not the accurate value.
NEW implementation | (define (sqrt x)
(sqrt-iter 1.0 x))
(define (sqrt-iter guess x)
(if (good-enough? guess x)
guess
(sqrt-iter (improve guess x) x)))
(define (good-enough? guess x)
(< (abs (- (square guess) x)) 0.001))
(define (square x) ( * x x) )
(define (improve guess x)
(average guess (/ x guess)))
(define (average x y)
(/ (+ x y) 2))
Also this is the reason why ( sqrt 9 ) is not an absolute 3.0 value .
(define (sqrt-iter guess x)
(if (good-enough? guess (improve guess x))
guess
(sqrt-iter (improve guess x) x)))
(define (good-enough? prevGuess nextGuess)
(< (/ (abs (- prevGuess nextGuess)) prevGuess)
1.0e-20))
absolute 3 value
|
29621a89f7b207fbd793ef64d25d3b2f8f417f57850d9fe86f593d4f95746a42 | fetburner/Coq2SML | recdef.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(*i camlp4deps: "parsing/grammar.cma" i*)
open Term
open Namegen
open Environ
open Declarations
open Entries
open Pp
open Names
open Libnames
open Nameops
open Util
open Closure
open RedFlags
open Tacticals
open Typing
open Tacmach
open Tactics
open Nametab
open Decls
open Declare
open Decl_kinds
open Tacred
open Proof_type
open Vernacinterp
open Pfedit
open Topconstr
open Glob_term
open Pretyping
open Pretyping.Default
open Safe_typing
open Constrintern
open Hiddentac
open Equality
open Auto
open Eauto
open Genarg
let compute_renamed_type gls c =
rename_bound_vars_as_displayed (*no avoid*) [] (*no rels*) []
(pf_type_of gls c)
let qed () = Lemmas.save_named true
let defined () = Lemmas.save_named false
let pf_get_new_ids idl g =
let ids = pf_ids_of_hyps g in
List.fold_right
(fun id acc -> next_global_ident_away id (acc@ids)::acc)
idl
[]
let pf_get_new_id id g =
List.hd (pf_get_new_ids [id] g)
let h_intros l =
tclMAP h_intro l
let debug_queue = Stack.create ()
let rec print_debug_queue b e =
if not (Stack.is_empty debug_queue)
then
begin
let lmsg,goal = Stack.pop debug_queue in
if b then
msgnl (lmsg ++ (str " raised exception " ++ Errors.print e) ++ str " on goal " ++ goal)
else
begin
msgnl (str " from " ++ lmsg ++ str " on goal " ++ goal);
end;
print_debug_queue false e;
end
let do_observe_tac s tac g =
let goal = Printer.pr_goal g in
let lmsg = (str "recdef : ") ++ (str s) in
Stack.push (lmsg,goal) debug_queue;
try
let v = tac g in
ignore(Stack.pop debug_queue);
v
with reraise ->
if not (Stack.is_empty debug_queue)
then
print_debug_queue true reraise;
raise reraise
let observe_tac s tac g =
if Tacinterp.get_debug () <> Tactic_debug.DebugOff
then do_observe_tac s tac g
else tac g
let hyp_ids = List.map id_of_string
["x";"v";"k";"def";"p";"h";"n";"h'"; "anonymous"; "teq"; "rec_res";
"hspec";"heq"; "hrec"; "hex"; "teq"; "pmax";"hle"];;
let rec nthtl = function
l, 0 -> l | _::tl, n -> nthtl (tl, n-1) | [], _ -> [];;
let hyp_id n l = List.nth l n;;
let (x_id:identifier) = hyp_id 0 hyp_ids;;
let (v_id:identifier) = hyp_id 1 hyp_ids;;
let (k_id:identifier) = hyp_id 2 hyp_ids;;
let (def_id:identifier) = hyp_id 3 hyp_ids;;
let (p_id:identifier) = hyp_id 4 hyp_ids;;
let (h_id:identifier) = hyp_id 5 hyp_ids;;
let (n_id:identifier) = hyp_id 6 hyp_ids;;
let (h'_id:identifier) = hyp_id 7 hyp_ids;;
let (ano_id:identifier) = hyp_id 8 hyp_ids;;
let (rec_res_id:identifier) = hyp_id 10 hyp_ids;;
let (hspec_id:identifier) = hyp_id 11 hyp_ids;;
let (heq_id:identifier) = hyp_id 12 hyp_ids;;
let (hrec_id:identifier) = hyp_id 13 hyp_ids;;
let (hex_id:identifier) = hyp_id 14 hyp_ids;;
let (teq_id:identifier) = hyp_id 15 hyp_ids;;
let (pmax_id:identifier) = hyp_id 16 hyp_ids;;
let (hle_id:identifier) = hyp_id 17 hyp_ids;;
let message s = if Flags.is_verbose () then msgnl(str s);;
let def_of_const t =
match (kind_of_term t) with
Const sp ->
(try (match body_of_constant (Global.lookup_constant sp) with
| Some c -> Declarations.force c
| _ -> assert false)
with e when Errors.noncritical e ->
anomaly ("Cannot find definition of constant "^
(string_of_id (id_of_label (con_label sp))))
)
|_ -> assert false
let type_of_const t =
match (kind_of_term t) with
Const sp -> Typeops.type_of_constant (Global.env()) sp
|_ -> assert false
let arg_type t =
match kind_of_term (def_of_const t) with
Lambda(a,b,c) -> b
| _ -> assert false;;
let evaluable_of_global_reference r =
match r with
ConstRef sp -> EvalConstRef sp
| VarRef id -> EvalVarRef id
| _ -> assert false;;
let rank_for_arg_list h =
let predicate a b =
try List.for_all2 eq_constr a b with
Invalid_argument _ -> false in
let rec rank_aux i = function
| [] -> None
| x::tl -> if predicate h x then Some i else rank_aux (i+1) tl in
rank_aux 0;;
let rec check_not_nested f t =
match kind_of_term t with
| App(g, _) when eq_constr f g ->
errorlabstrm "recdef" (str "Nested recursive function are not allowed with Function")
| Var(_) when eq_constr t f -> errorlabstrm "recdef" (str "Nested recursive function are not allowed with Function")
| _ -> iter_constr (check_not_nested f) t
let rec (find_call_occs : int -> int -> constr -> constr ->
(constr list -> constr) * constr list list) =
fun nb_arg nb_lam f expr ->
match (kind_of_term expr) with
App (g, args) when eq_constr g f ->
if Array.length args <> nb_arg then errorlabstrm "recdef" (str "Partial application of function " ++ Printer.pr_lconstr expr ++ str " in its body is not allowed while using Function");
Array.iter (check_not_nested f) args;
(fun l -> List.hd l), [Array.to_list args]
| App (g, args) ->
let (largs: constr list) = Array.to_list args in
let rec find_aux = function
[] -> (fun x -> []), []
| a::upper_tl ->
(match find_aux upper_tl with
(cf, ((arg1::args) as args_for_upper_tl)) ->
(match find_call_occs nb_arg nb_lam f a with
cf2, (_ :: _ as other_args) ->
let rec avoid_duplicates args =
match args with
| [] -> (fun _ -> []), []
| h::tl ->
let recomb_tl, args_for_tl =
avoid_duplicates tl in
match rank_for_arg_list h args_for_upper_tl with
| None ->
(fun l -> List.hd l::recomb_tl(List.tl l)),
h::args_for_tl
| Some i ->
(fun l -> List.nth l (i+List.length args_for_tl)::
recomb_tl l),
args_for_tl
in
let recombine, other_args' =
avoid_duplicates other_args in
let len1 = List.length other_args' in
(fun l -> cf2 (recombine l)::cf(nthtl(l,len1))),
other_args'@args_for_upper_tl
| _, [] -> (fun x -> a::cf x), args_for_upper_tl)
| _, [] ->
(match find_call_occs nb_arg nb_lam f a with
cf, (arg1::args) -> (fun l -> cf l::upper_tl), (arg1::args)
| _, [] -> (fun x -> a::upper_tl), [])) in
begin
match (find_aux largs) with
cf, [] -> (fun l -> mkApp(g, args)), []
| cf, args ->
(fun l -> mkApp (g, Array.of_list (cf l))), args
end
| Rel(v) -> if v > nb_lam then error "find_call_occs : Rel" else ((fun l -> expr),[])
| Var(_) when eq_constr expr f -> errorlabstrm "recdef" (str "Partial application of function " ++ Printer.pr_lconstr expr ++ str " in its body is not allowed while using Function")
| Var(id) -> (fun l -> expr), []
| Meta(_) -> error "Found a metavariable. Can not treat such a term"
| Evar(_) -> error "Found an evar. Can not treat such a term"
| Sort(_) -> (fun l -> expr), []
| Cast(b,_,_) -> find_call_occs nb_arg nb_lam f b
| Prod(na,t,b) ->
error "Found a product. Can not treat such a term"
| Lambda(na,t,b) ->
begin
match find_call_occs nb_arg (succ nb_lam) f b with
| _, [] -> (* Lambda are authorized as long as they do not contain
recursives calls *)
(fun l -> expr),[]
| _ -> error "Found a lambda which body contains a recursive call. Such terms are not allowed"
end
| LetIn(na,v,t,b) ->
begin
match find_call_occs nb_arg nb_lam f v, find_call_occs nb_arg (succ nb_lam) f b with
| (_,[]),(_,[]) ->
((fun l -> expr), [])
| (_,[]),(cf,(_::_ as l)) ->
((fun l -> mkLetIn(na,v,t,cf l)),l)
| (cf,(_::_ as l)),(_,[]) ->
((fun l -> mkLetIn(na,cf l,t,b)), l)
| _ -> error "Found a letin with recursive calls in both variable value and body. Such terms are not allowed."
end
| Const(_) -> (fun l -> expr), []
| Ind(_) -> (fun l -> expr), []
| Construct (_, _) -> (fun l -> expr), []
| Case(i,t,a,r) ->
(match find_call_occs nb_arg nb_lam f a with
cf, (arg1::args) -> (fun l -> mkCase(i, t, (cf l), r)),(arg1::args)
| _ -> (fun l -> expr),[])
| Fix(_) -> error "Found a local fixpoint. Can not treat such a term"
| CoFix(_) -> error "Found a local cofixpoint : CoFix";;
let coq_constant s =
Coqlib.gen_constant_in_modules "RecursiveDefinition"
(Coqlib.init_modules @ Coqlib.arith_modules) s;;
let coq_base_constant s =
Coqlib.gen_constant_in_modules "RecursiveDefinition"
(Coqlib.init_modules @ [["Coq";"Arith";"Le"];["Coq";"Arith";"Lt"]]) s;;
let constant sl s =
constr_of_global
(locate (make_qualid(Names.make_dirpath
(List.map id_of_string (List.rev sl)))
(id_of_string s)));;
let find_reference sl s =
(locate (make_qualid(Names.make_dirpath
(List.map id_of_string (List.rev sl)))
(id_of_string s)));;
let le_lt_SS = function () -> (constant ["Recdef"] "le_lt_SS")
let le_lt_n_Sm = function () -> (coq_base_constant "le_lt_n_Sm")
let le_trans = function () -> (coq_base_constant "le_trans")
let le_lt_trans = function () -> (coq_base_constant "le_lt_trans")
let lt_S_n = function () -> (coq_base_constant "lt_S_n")
let le_n = function () -> (coq_base_constant "le_n")
let refl_equal = function () -> (coq_base_constant "eq_refl")
let eq = function () -> (coq_base_constant "eq")
let ex = function () -> (coq_base_constant "ex")
let coq_sig_ref = function () -> (find_reference ["Coq";"Init";"Specif"] "sig")
let coq_sig = function () -> (coq_base_constant "sig")
let coq_O = function () -> (coq_base_constant "O")
let coq_S = function () -> (coq_base_constant "S")
let gt_antirefl = function () -> (coq_constant "gt_irrefl")
let lt_n_O = function () -> (coq_base_constant "lt_n_O")
let lt_n_Sn = function () -> (coq_base_constant "lt_n_Sn")
let f_equal = function () -> (coq_constant "f_equal")
let well_founded_induction = function () -> (coq_constant "well_founded_induction")
let well_founded = function () -> (coq_constant "well_founded")
let acc_rel = function () -> (coq_constant "Acc")
let acc_inv_id = function () -> (coq_constant "Acc_inv")
let well_founded_ltof = function () -> (Coqlib.coq_constant "" ["Arith";"Wf_nat"] "well_founded_ltof")
let iter_ref = function () -> (try find_reference ["Recdef"] "iter" with Not_found -> error "module Recdef not loaded")
let max_ref = function () -> (find_reference ["Recdef"] "max")
let iter = function () -> (constr_of_global (delayed_force iter_ref))
let max_constr = function () -> (constr_of_global (delayed_force max_ref))
let ltof_ref = function () -> (find_reference ["Coq";"Arith";"Wf_nat"] "ltof")
let coq_conj = function () -> find_reference ["Coq";"Init";"Logic"] "conj"
These are specific to experiments in with lt as well_founded_relation ,
(* but this should be made more general. *)
let nat = function () -> (coq_base_constant "nat")
let lt = function () -> (coq_base_constant "lt")
This is simply an implementation of the case_eq tactic . this code
should be replaced with the tactic defined in in Init / Tactics.v
should be replaced with the tactic defined in Ltac in Init/Tactics.v *)
let mkCaseEq a : tactic =
(fun g ->
let type_of_a = pf_type_of g a in
tclTHENLIST
[h_generalize [mkApp(delayed_force refl_equal, [| type_of_a; a|])];
(fun g2 ->
change_in_concl None
(pattern_occs [((false,[1]), a)] (pf_env g2) Evd.empty (pf_concl g2))
g2);
simplest_case a] g);;
This is like the previous one except that it also rewrite on all
hypotheses except the ones given in the first argument . All the
modified hypotheses are generalized in the process and should be
introduced back later ; the result is the pair of the tactic and the
list of hypotheses that have been generalized and cleared .
hypotheses except the ones given in the first argument. All the
modified hypotheses are generalized in the process and should be
introduced back later; the result is the pair of the tactic and the
list of hypotheses that have been generalized and cleared. *)
let mkDestructEq :
identifier list -> constr -> goal sigma -> tactic * identifier list =
fun not_on_hyp expr g ->
let hyps = pf_hyps g in
let to_revert =
Util.map_succeed
(fun (id,_,t) ->
if List.mem id not_on_hyp || not (Termops.occur_term expr t)
then failwith "is_expr_context";
id) hyps in
let to_revert_constr = List.rev_map mkVar to_revert in
let type_of_expr = pf_type_of g expr in
let new_hyps = mkApp(delayed_force refl_equal, [|type_of_expr; expr|])::
to_revert_constr in
tclTHENLIST
[h_generalize new_hyps;
(fun g2 ->
change_in_concl None
(pattern_occs [((false,[1]), expr)] (pf_env g2) Evd.empty (pf_concl g2)) g2);
simplest_case expr], to_revert
let rec mk_intros_and_continue thin_intros (extra_eqn:bool)
cont_function (eqs:constr list) nb_lam (expr:constr) g =
observe_tac "mk_intros_and_continue" (
let finalize () = if extra_eqn then
let teq = pf_get_new_id teq_id g in
tclTHENLIST
[ h_intro teq;
thin thin_intros;
h_intros thin_intros;
tclMAP
(fun eq -> tclTRY (Equality.general_rewrite_in true Termops.all_occurrences true (* deps proofs also: *) true teq eq false))
(List.rev eqs);
(fun g1 ->
let ty_teq = pf_type_of g1 (mkVar teq) in
let teq_lhs,teq_rhs =
let _,args =
try destApp ty_teq
with e when Errors.noncritical e ->
Pp.msgnl (Printer.pr_goal g1 ++ fnl () ++ pr_id teq ++ str ":" ++ Printer.pr_lconstr ty_teq); assert false
in
args.(1),args.(2)
in
cont_function (mkVar teq::eqs) (Termops.replace_term teq_lhs teq_rhs expr) g1
)
]
else
tclTHENSEQ[
thin thin_intros;
h_intros thin_intros;
cont_function eqs expr
]
in
if nb_lam = 0
then finalize ()
else
match kind_of_term expr with
| Lambda (n, _, b) ->
let n1 =
match n with
Name x -> x
| Anonymous -> ano_id
in
let new_n = pf_get_new_id n1 g in
tclTHEN (h_intro new_n)
(mk_intros_and_continue thin_intros extra_eqn cont_function eqs
(pred nb_lam) (subst1 (mkVar new_n) b))
| _ ->
assert false) g
(* finalize () *)
let const_of_ref = function
ConstRef kn -> kn
| _ -> anomaly "ConstRef expected"
let simpl_iter clause =
reduce
(Lazy
{rBeta=true;rIota=true;rZeta= true; rDelta=false;
rConst = [ EvalConstRef (const_of_ref (delayed_force iter_ref))]})
(* (Simpl (Some ([],mkConst (const_of_ref (delayed_force iter_ref))))) *)
clause
(* The boolean value is_mes expresses that the termination is expressed
using a measure function instead of a well-founded relation. *)
let tclUSER tac is_mes l g =
let clear_tac =
match l with
| None -> h_clear true []
| Some l -> tclMAP (fun id -> tclTRY (h_clear false [id])) (List.rev l)
in
tclTHENSEQ
[
clear_tac;
if is_mes
then tclTHEN
(unfold_in_concl [(Termops.all_occurrences, evaluable_of_global_reference
(delayed_force ltof_ref))])
tac
else tac
]
g
let list_rewrite (rev:bool) (eqs: constr list) =
tclREPEAT
(List.fold_right
(fun eq i -> tclORELSE (rewriteLR eq) i)
(if rev then (List.rev eqs) else eqs) (tclFAIL 0 (mt())));;
let base_leaf_terminate (func:global_reference) eqs expr =
(* let _ = msgnl (str "entering base_leaf") in *)
(fun g ->
let k',h =
match pf_get_new_ids [k_id;h_id] g with
[k';h] -> k',h
| _ -> assert false
in
tclTHENLIST
[observe_tac "first split" (split (ImplicitBindings [expr]));
observe_tac "second split"
(split (ImplicitBindings [delayed_force coq_O]));
observe_tac "intro k" (h_intro k');
observe_tac "case on k"
(tclTHENS (simplest_case (mkVar k'))
[(tclTHEN (h_intro h)
(tclTHEN (simplest_elim (mkApp (delayed_force gt_antirefl,
[| delayed_force coq_O |])))
default_auto)); tclIDTAC ]);
intros;
simpl_iter onConcl;
unfold_constr func;
list_rewrite true eqs;
default_auto] g);;
La fonction est donnee en premier argument a la
...
Pour fonction f a partir de la
fonctionnelle
fonctionnelle suivie d'autres Lambdas et de Case ...
Pour recuperer la fonction f a partir de la
fonctionnelle *)
let get_f foncl =
match (kind_of_term (def_of_const foncl)) with
Lambda (Name f, _, _) -> f
|_ -> error "la fonctionnelle est mal definie";;
let rec compute_le_proofs = function
[] -> assumption
| a::tl ->
tclORELSE assumption
(tclTHENS
(fun g ->
let le_trans = delayed_force le_trans in
let t_le_trans = compute_renamed_type g le_trans in
let m_id =
let _,_,t = destProd t_le_trans in
let na,_,_ = destProd t in
Nameops.out_name na
in
apply_with_bindings
(le_trans,
ExplicitBindings[dummy_loc,NamedHyp m_id,a])
g)
[compute_le_proofs tl;
tclORELSE (apply (delayed_force le_n)) assumption])
let make_lt_proof pmax le_proof =
tclTHENS
(fun g ->
let le_lt_trans = delayed_force le_lt_trans in
let t_le_lt_trans = compute_renamed_type g le_lt_trans in
let m_id =
let _,_,t = destProd t_le_lt_trans in
let na,_,_ = destProd t in
Nameops.out_name na
in
apply_with_bindings
(le_lt_trans,
ExplicitBindings[dummy_loc,NamedHyp m_id, pmax]) g)
[observe_tac "compute_le_proofs" (compute_le_proofs le_proof);
tclTHENLIST[observe_tac "lt_S_n" (apply (delayed_force lt_S_n)); default_full_auto]];;
let rec list_cond_rewrite k def pmax cond_eqs le_proofs =
match cond_eqs with
[] -> tclIDTAC
| eq::eqs ->
(fun g ->
let t_eq = compute_renamed_type g (mkVar eq) in
let k_id,def_id =
let k_na,_,t = destProd t_eq in
let _,_,t = destProd t in
let def_na,_,_ = destProd t in
Nameops.out_name k_na,Nameops.out_name def_na
in
tclTHENS
(general_rewrite_bindings false Termops.all_occurrences
(* dep proofs also: *) true true
(mkVar eq,
ExplicitBindings[dummy_loc, NamedHyp k_id, mkVar k;
dummy_loc, NamedHyp def_id, mkVar def]) false)
[list_cond_rewrite k def pmax eqs le_proofs;
observe_tac "make_lt_proof" (make_lt_proof pmax le_proofs)] g
)
let rec introduce_all_equalities func eqs values specs bound le_proofs
cond_eqs =
match specs with
[] ->
fun g ->
let ids = pf_ids_of_hyps g in
let s_max = mkApp(delayed_force coq_S, [|bound|]) in
let k = next_ident_away_in_goal k_id ids in
let ids = k::ids in
let h' = next_ident_away_in_goal (h'_id) ids in
let ids = h'::ids in
let def = next_ident_away_in_goal def_id ids in
tclTHENLIST
[observe_tac "introduce_all_equalities_final split" (split (ImplicitBindings [s_max]));
observe_tac "introduce_all_equalities_final intro k" (h_intro k);
tclTHENS
(observe_tac "introduce_all_equalities_final case k" (simplest_case (mkVar k)))
[
tclTHENLIST[h_intro h';
simplest_elim(mkApp(delayed_force lt_n_O,[|s_max|]));
default_full_auto];
tclIDTAC
];
observe_tac "clearing k " (clear [k]);
observe_tac "intros k h' def" (h_intros [k;h';def]);
observe_tac "simple_iter" (simpl_iter onConcl);
observe_tac "unfold functional"
(unfold_in_concl[((true,[1]),evaluable_of_global_reference func)]);
observe_tac "rewriting equations"
(list_rewrite true eqs);
observe_tac ("cond rewrite "^(string_of_id k)) (list_cond_rewrite k def bound cond_eqs le_proofs);
observe_tac "refl equal" (apply (delayed_force refl_equal))] g
| spec1::specs ->
fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
let p = next_ident_away_in_goal p_id ids in
let ids = p::ids in
let pmax = next_ident_away_in_goal pmax_id ids in
let ids = pmax::ids in
let hle1 = next_ident_away_in_goal hle_id ids in
let ids = hle1::ids in
let hle2 = next_ident_away_in_goal hle_id ids in
let ids = hle2::ids in
let heq = next_ident_away_in_goal heq_id ids in
tclTHENLIST
[simplest_elim (mkVar spec1);
list_rewrite true eqs;
h_intros [p; heq];
simplest_elim (mkApp(delayed_force max_constr, [| bound; mkVar p|]));
h_intros [pmax; hle1; hle2];
introduce_all_equalities func eqs values specs
(mkVar pmax) ((mkVar pmax)::le_proofs)
(heq::cond_eqs)] g;;
let string_match s =
if String.length s < 3 then failwith "string_match";
try
for i = 0 to 3 do
if String.get s i <> String.get "Acc_" i then failwith "string_match"
done;
with Invalid_argument _ -> failwith "string_match"
let retrieve_acc_var g =
: I do n't like this version ....
let hyps = pf_ids_of_hyps g in
map_succeed
(fun id -> string_match (string_of_id id);id)
hyps
let rec introduce_all_values concl_tac is_mes acc_inv func context_fn
eqs hrec args values specs =
(match args with
[] ->
tclTHENLIST
[observe_tac "split" (split(ImplicitBindings
[context_fn (List.map mkVar (List.rev values))]));
observe_tac "introduce_all_equalities" (introduce_all_equalities func eqs
(List.rev values) (List.rev specs) (delayed_force coq_O) [] [])]
| arg::args ->
(fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
let rec_res = next_ident_away_in_goal rec_res_id ids in
let ids = rec_res::ids in
let hspec = next_ident_away_in_goal hspec_id ids in
let tac =
observe_tac "introduce_all_values" (
introduce_all_values concl_tac is_mes acc_inv func context_fn eqs
hrec args
(rec_res::values)(hspec::specs)) in
(tclTHENS
(observe_tac "elim h_rec"
(simplest_elim (mkApp(mkVar hrec, Array.of_list arg)))
)
[tclTHENLIST [h_intros [rec_res; hspec];
tac];
(tclTHENS
(observe_tac "acc_inv" (apply (Lazy.force acc_inv)))
tclTHEN ( tclTRY(list_rewrite true eqs ) )
(observe_tac "h_assumption" h_assumption)
;
tclTHENLIST
[
tclTRY(list_rewrite true eqs);
observe_tac "user proof"
(fun g ->
tclUSER
concl_tac
is_mes
(Some (hrec::hspec::(retrieve_acc_var g)@specs))
g
)
]
]
)
]) g)
)
let rec_leaf_terminate nb_arg f_constr concl_tac is_mes acc_inv hrec (func:global_reference) eqs expr =
match find_call_occs nb_arg 0 f_constr expr with
| context_fn, args ->
observe_tac "introduce_all_values"
(introduce_all_values concl_tac is_mes acc_inv func context_fn eqs hrec args [] [])
let proveterminate nb_arg rec_arg_id is_mes acc_inv (hrec:identifier)
(f_constr:constr) (func:global_reference) base_leaf rec_leaf =
let rec proveterminate (eqs:constr list) (expr:constr) =
try
let _ = msgnl ( str " entering proveterminate " ) in
let v =
match (kind_of_term expr) with
Case (ci, t, a, l) ->
(match find_call_occs nb_arg 0 f_constr a with
_,[] ->
(fun g ->
let destruct_tac, rev_to_thin_intro =
mkDestructEq rec_arg_id a g in
tclTHENS destruct_tac
(list_map_i
(fun i -> mk_intros_and_continue
(List.rev rev_to_thin_intro)
true
proveterminate
eqs
ci.ci_cstr_ndecls.(i))
0 (Array.to_list l)) g)
| _, _::_ ->
(match find_call_occs nb_arg 0 f_constr expr with
_,[] -> observe_tac "base_leaf" (base_leaf func eqs expr)
| _, _:: _ ->
observe_tac "rec_leaf"
(rec_leaf is_mes acc_inv hrec func eqs expr)))
| _ ->
(match find_call_occs nb_arg 0 f_constr expr with
_,[] ->
(try observe_tac "base_leaf" (base_leaf func eqs expr)
with reraise ->
(msgerrnl (str "failure in base case");raise reraise ))
| _, _::_ ->
observe_tac "rec_leaf"
(rec_leaf is_mes acc_inv hrec func eqs expr)) in
v
with reraise ->
begin
msgerrnl(str "failure in proveterminate");
raise reraise
end
in
proveterminate
let hyp_terminates nb_args func =
let a_arrow_b = arg_type (constr_of_global func) in
let rev_args,b = decompose_prod_n nb_args a_arrow_b in
let left =
mkApp(delayed_force iter,
Array.of_list
(lift 5 a_arrow_b:: mkRel 3::
constr_of_global func::mkRel 1::
List.rev (list_map_i (fun i _ -> mkRel (6+i)) 0 rev_args)
)
)
in
let right = mkRel 5 in
let equality = mkApp(delayed_force eq, [|lift 5 b; left; right|]) in
let result = (mkProd ((Name def_id) , lift 4 a_arrow_b, equality)) in
let cond = mkApp(delayed_force lt, [|(mkRel 2); (mkRel 1)|]) in
let nb_iter =
mkApp(delayed_force ex,
[|delayed_force nat;
(mkLambda
(Name
p_id,
delayed_force nat,
(mkProd (Name k_id, delayed_force nat,
mkArrow cond result))))|])in
let value = mkApp(delayed_force coq_sig,
[|b;
(mkLambda (Name v_id, b, nb_iter))|]) in
compose_prod rev_args value
let tclUSER_if_not_mes concl_tac is_mes names_to_suppress =
if is_mes
then tclCOMPLETE (h_simplest_apply (delayed_force well_founded_ltof))
else tclUSER concl_tac is_mes names_to_suppress
let termination_proof_header is_mes input_type ids args_id relation
rec_arg_num rec_arg_id tac wf_tac : tactic =
begin
fun g ->
let nargs = List.length args_id in
let pre_rec_args =
List.rev_map
mkVar (fst (list_chop (rec_arg_num - 1) args_id))
in
let relation = substl pre_rec_args relation in
let input_type = substl pre_rec_args input_type in
let wf_thm = next_ident_away_in_goal (id_of_string ("wf_R")) ids in
let wf_rec_arg =
next_ident_away_in_goal
(id_of_string ("Acc_"^(string_of_id rec_arg_id)))
(wf_thm::ids)
in
let hrec = next_ident_away_in_goal hrec_id
(wf_rec_arg::wf_thm::ids) in
let acc_inv =
lazy (
mkApp (
delayed_force acc_inv_id,
[|input_type;relation;mkVar rec_arg_id|]
)
)
in
tclTHEN
(h_intros args_id)
(tclTHENS
(observe_tac
"first assert"
(assert_tac
(Name wf_rec_arg)
(mkApp (delayed_force acc_rel,
[|input_type;relation;mkVar rec_arg_id|])
)
)
)
[
(* accesibility proof *)
tclTHENS
(observe_tac
"second assert"
(assert_tac
(Name wf_thm)
(mkApp (delayed_force well_founded,[|input_type;relation|]))
)
)
[
(* interactive proof that the relation is well_founded *)
observe_tac "wf_tac" (wf_tac is_mes (Some args_id));
(* this gives the accessibility argument *)
observe_tac
"apply wf_thm"
(h_simplest_apply (mkApp(mkVar wf_thm,[|mkVar rec_arg_id|]))
)
]
;
(* rest of the proof *)
tclTHENSEQ
[observe_tac "generalize"
(onNLastHypsId (nargs+1)
(tclMAP (fun id ->
tclTHEN (h_generalize [mkVar id]) (h_clear false [id]))
))
;
observe_tac "h_fix" (h_fix (Some hrec) (nargs+1));
h_intros args_id;
h_intro wf_rec_arg;
observe_tac "tac" (tac wf_rec_arg hrec acc_inv)
]
]
) g
end
let rec instantiate_lambda t l =
match l with
| [] -> t
| a::l ->
let (bound_name, _, body) = destLambda t in
instantiate_lambda (subst1 a body) l
;;
let whole_start (concl_tac:tactic) nb_args is_mes func input_type relation rec_arg_num : tactic =
begin
fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
let func_body = (def_of_const (constr_of_global func)) in
let (f_name, _, body1) = destLambda func_body in
let f_id =
match f_name with
| Name f_id -> next_ident_away_in_goal f_id ids
| Anonymous -> anomaly "Anonymous function"
in
let n_names_types,_ = decompose_lam_n nb_args body1 in
let n_ids,ids =
List.fold_left
(fun (n_ids,ids) (n_name,_) ->
match n_name with
| Name id ->
let n_id = next_ident_away_in_goal id ids in
n_id::n_ids,n_id::ids
| _ -> anomaly "anonymous argument"
)
([],(f_id::ids))
n_names_types
in
let rec_arg_id = List.nth n_ids (rec_arg_num - 1) in
let expr = instantiate_lambda func_body (mkVar f_id::(List.map mkVar n_ids)) in
termination_proof_header
is_mes
input_type
ids
n_ids
relation
rec_arg_num
rec_arg_id
(fun rec_arg_id hrec acc_inv g ->
(proveterminate
nb_args
[rec_arg_id]
is_mes
acc_inv
hrec
(mkVar f_id)
func
base_leaf_terminate
(rec_leaf_terminate nb_args (mkVar f_id) concl_tac)
[]
expr
)
g
)
(tclUSER_if_not_mes concl_tac)
g
end
let get_current_subgoals_types () =
let p = Proof_global.give_me_the_proof () in
let { Evd.it=sgs ; sigma=sigma } = Proof.V82.subgoals p in
List.map (Goal.V82.abstract_type sigma) sgs
let build_and_l l =
let and_constr = Coqlib.build_coq_and () in
let conj_constr = coq_conj () in
let mk_and p1 p2 =
Term.mkApp(and_constr,[|p1;p2|]) in
let rec is_well_founded t =
match kind_of_term t with
| Prod(_,_,t') -> is_well_founded t'
| App(_,_) ->
let (f,_) = decompose_app t in
eq_constr f (well_founded ())
| _ -> false
in
let compare t1 t2 =
let b1,b2= is_well_founded t1,is_well_founded t2 in
if (b1&&b2) || not (b1 || b2) then 0
else if b1 && not b2 then 1 else -1
in
let l = List.sort compare l in
let rec f = function
| [] -> failwith "empty list of subgoals!"
| [p] -> p,tclIDTAC,1
| p1::pl ->
let c,tac,nb = f pl in
mk_and p1 c,
tclTHENS
(apply (constr_of_global conj_constr))
[tclIDTAC;
tac
],nb+1
in f l
let is_rec_res id =
let rec_res_name = string_of_id rec_res_id in
let id_name = string_of_id id in
try
String.sub id_name 0 (String.length rec_res_name) = rec_res_name
with e when Errors.noncritical e -> false
let clear_goals =
let rec clear_goal t =
match kind_of_term t with
| Prod(Name id as na,t',b) ->
let b' = clear_goal b in
if noccurn 1 b' && (is_rec_res id)
then Termops.pop b'
else if b' == b then t
else mkProd(na,t',b')
| _ -> map_constr clear_goal t
in
List.map clear_goal
let build_new_goal_type () =
let sub_gls_types = get_current_subgoals_types () in
Pp.msgnl ( str " sub_gls_types1 : = " + + Util.prlist_with_sep ( fun ( ) - > Pp.fnl ( ) + + Pp.fnl ( ) ) Printer.pr_lconstr sub_gls_types ) ;
let sub_gls_types = clear_goals sub_gls_types in
Pp.msgnl ( str " sub_gls_types2 : = " + + Util.prlist_with_sep ( fun ( ) - > Pp.fnl ( ) + + Pp.fnl ( ) ) Printer.pr_lconstr sub_gls_types ) ;
let res = build_and_l sub_gls_types in
res
let is_opaque_constant c =
let cb = Global.lookup_constant c in
match cb.Declarations.const_body with
| Declarations.OpaqueDef _ -> true
| Declarations.Undef _ -> true
| Declarations.Def _ -> false
let open_new_goal (build_proof:tactic -> tactic -> unit) using_lemmas ref_ goal_name (gls_type,decompose_and_tac,nb_goal) =
Pp.msgnl ( str " : = " + + ) ;
let current_proof_name = get_current_proof_name () in
let name = match goal_name with
| Some s -> s
| None ->
try (add_suffix current_proof_name "_subproof")
with e when Errors.noncritical e ->
anomaly "open_new_goal with an unamed theorem"
in
let sign = initialize_named_context_for_proof () in
let na = next_global_ident_away name [] in
if Termops.occur_existential gls_type then
Util.error "\"abstract\" cannot handle existentials";
let hook _ _ =
let opacity =
let na_ref = Libnames.Ident (dummy_loc,na) in
let na_global = Nametab.global na_ref in
match na_global with
ConstRef c -> is_opaque_constant c
| _ -> anomaly "equation_lemma: not a constant"
in
let lemma = mkConst (Lib.make_con na) in
ref_ := Some lemma ;
let lid = ref [] in
let h_num = ref (-1) in
Flags.silently Vernacentries.interp (Vernacexpr.VernacAbort None);
build_proof
( fun gls ->
let hid = next_ident_away_in_goal h_id (pf_ids_of_hyps gls) in
tclTHENSEQ
[
h_generalize [lemma];
h_intro hid;
(fun g ->
let ids = pf_ids_of_hyps g in
tclTHEN
(Elim.h_decompose_and (mkVar hid))
(fun g ->
let ids' = pf_ids_of_hyps g in
lid := List.rev (list_subtract ids' ids);
if !lid = [] then lid := [hid];
tclIDTAC g
)
g
);
] gls)
(fun g ->
match kind_of_term (pf_concl g) with
| App(f,_) when eq_constr f (well_founded ()) ->
Auto.h_auto None [] (Some []) g
| _ ->
incr h_num;
(observe_tac "finishing using"
(
tclCOMPLETE(
tclFIRST[
tclTHEN
(eapply_with_bindings (mkVar (List.nth !lid !h_num), NoBindings))
e_assumption;
Eauto.eauto_with_bases
(true,5)
[Evd.empty,delayed_force refl_equal]
[Auto.Hint_db.empty empty_transparent_state false]
]
)
)
)
g)
;
Lemmas.save_named opacity;
in
start_proof
na
(Decl_kinds.Global, Decl_kinds.Proof Decl_kinds.Lemma)
sign
gls_type
hook ;
if Indfun_common.is_strict_tcc ()
then
by (tclIDTAC)
else
begin
by (
fun g ->
tclTHEN
(decompose_and_tac)
(tclORELSE
(tclFIRST
(List.map
(fun c ->
tclTHENSEQ
[intros;
h_simplest_apply (interp_constr Evd.empty (Global.env()) c);
tclCOMPLETE Auto.default_auto
]
)
using_lemmas)
) tclIDTAC)
g)
end;
try
raises UserError _ if the proof is complete
if Flags.is_verbose () then (pp (Printer.pr_open_subgoals()))
with UserError _ ->
defined ()
;;
let com_terminate
tcc_lemma_name
tcc_lemma_ref
is_mes
fonctional_ref
input_type
relation
rec_arg_num
thm_name using_lemmas
nb_args
hook =
let start_proof (tac_start:tactic) (tac_end:tactic) =
let (evmap, env) = Lemmas.get_current_context() in
start_proof thm_name
(Global, Proof Lemma) (Environ.named_context_val env)
(hyp_terminates nb_args fonctional_ref) hook;
by (observe_tac "starting_tac" tac_start);
by (observe_tac "whole_start" (whole_start tac_end nb_args is_mes fonctional_ref
input_type relation rec_arg_num ))
in
start_proof tclIDTAC tclIDTAC;
try
let new_goal_type = build_new_goal_type () in
open_new_goal start_proof using_lemmas tcc_lemma_ref
(Some tcc_lemma_name)
(new_goal_type);
with Failure "empty list of subgoals!" ->
(* a non recursive function declared with measure ! *)
defined ()
let ind_of_ref = function
| IndRef (ind,i) -> (ind,i)
| _ -> anomaly "IndRef expected"
let (value_f:constr list -> global_reference -> constr) =
fun al fterm ->
let d0 = dummy_loc in
let rev_x_id_l =
(
List.fold_left
(fun x_id_l _ ->
let x_id = next_ident_away_in_goal x_id x_id_l in
x_id::x_id_l
)
[]
al
)
in
let context = List.map
(fun (x, c) -> Name x, None, c) (List.combine rev_x_id_l (List.rev al))
in
let env = Environ.push_rel_context context (Global.env ()) in
let glob_body =
GCases
(d0,RegularStyle,None,
[GApp(d0, GRef(d0,fterm), List.rev_map (fun x_id -> GVar(d0, x_id)) rev_x_id_l),
(Anonymous,None)],
[d0, [v_id], [PatCstr(d0,(ind_of_ref
(delayed_force coq_sig_ref),1),
[PatVar(d0, Name v_id);
PatVar(d0, Anonymous)],
Anonymous)],
GVar(d0,v_id)])
in
let body = understand Evd.empty env glob_body in
it_mkLambda_or_LetIn body context
let (declare_fun : identifier -> logical_kind -> constr -> global_reference) =
fun f_id kind value ->
let ce = {const_entry_body = value;
const_entry_secctx = None;
const_entry_type = None;
const_entry_opaque = false } in
ConstRef(declare_constant f_id (DefinitionEntry ce, kind));;
let (declare_f : identifier -> logical_kind -> constr list -> global_reference -> global_reference) =
fun f_id kind input_type fterm_ref ->
declare_fun f_id kind (value_f input_type fterm_ref);;
let rec n_x_id ids n =
if n = 0 then []
else let x = next_ident_away_in_goal x_id ids in
x::n_x_id (x::ids) (n-1);;
let start_equation (f:global_reference) (term_f:global_reference)
(cont_tactic:identifier list -> tactic) g =
let ids = pf_ids_of_hyps g in
let terminate_constr = constr_of_global term_f in
let nargs = nb_prod (type_of_const terminate_constr) in
let x = n_x_id ids nargs in
tclTHENLIST [
h_intros x;
unfold_in_concl [(Termops.all_occurrences, evaluable_of_global_reference f)];
observe_tac "simplest_case"
(simplest_case (mkApp (terminate_constr,
Array.of_list (List.map mkVar x))));
observe_tac "prove_eq" (cont_tactic x)] g;;
let base_leaf_eq func eqs f_id g =
let ids = pf_ids_of_hyps g in
let k = next_ident_away_in_goal k_id ids in
let p = next_ident_away_in_goal p_id (k::ids) in
let v = next_ident_away_in_goal v_id (p::k::ids) in
let heq = next_ident_away_in_goal heq_id (v::p::k::ids) in
let heq1 = next_ident_away_in_goal heq_id (heq::v::p::k::ids) in
let hex = next_ident_away_in_goal hex_id (heq1::heq::v::p::k::ids) in
tclTHENLIST [
h_intros [v; hex];
simplest_elim (mkVar hex);
h_intros [p;heq1];
tclTRY
(rewriteRL
(mkApp(mkVar heq1,
[|mkApp (delayed_force coq_S, [|mkVar p|]);
mkApp(delayed_force lt_n_Sn, [|mkVar p|]); f_id|])));
simpl_iter onConcl;
tclTRY (unfold_in_concl [((true,[1]), evaluable_of_global_reference func)]);
observe_tac "list_revrite" (list_rewrite true eqs);
apply (delayed_force refl_equal)] g;;
let f_S t = mkApp(delayed_force coq_S, [|t|]);;
let rec introduce_all_values_eq cont_tac functional termine
f p heq1 pmax bounds le_proofs eqs ids =
function
[] ->
let heq2 = next_ident_away_in_goal heq_id ids in
tclTHENLIST
[pose_proof (Name heq2)
(mkApp(mkVar heq1, [|f_S(f_S(mkVar pmax))|]));
simpl_iter (onHyp heq2);
unfold_in_hyp [((true,[1]), evaluable_of_global_reference
(global_of_constr functional))]
(heq2, Termops.InHyp);
tclTHENS
(fun gls ->
let t_eq = compute_renamed_type gls (mkVar heq2) in
let def_id =
let _,_,t = destProd t_eq in let def_na,_,_ = destProd t in
Nameops.out_name def_na
in
observe_tac "rewrite heq" (general_rewrite_bindings false Termops.all_occurrences
true (* dep proofs also: *) true (mkVar heq2,
ExplicitBindings[dummy_loc,NamedHyp def_id,
f]) false) gls)
[tclTHENLIST
[observe_tac "list_rewrite" (list_rewrite true eqs);
cont_tac pmax le_proofs];
tclTHENLIST[apply (delayed_force le_lt_SS);
compute_le_proofs le_proofs]]]
| arg::args ->
let v' = next_ident_away_in_goal v_id ids in
let ids = v'::ids in
let hex' = next_ident_away_in_goal hex_id ids in
let ids = hex'::ids in
let p' = next_ident_away_in_goal p_id ids in
let ids = p'::ids in
let new_pmax = next_ident_away_in_goal pmax_id ids in
let ids = pmax::ids in
let hle1 = next_ident_away_in_goal hle_id ids in
let ids = hle1::ids in
let hle2 = next_ident_away_in_goal hle_id ids in
let ids = hle2::ids in
let heq = next_ident_away_in_goal heq_id ids in
let ids = heq::ids in
let heq2 = next_ident_away_in_goal heq_id ids in
let ids = heq2::ids in
tclTHENLIST
[mkCaseEq(mkApp(termine, Array.of_list arg));
h_intros [v'; hex'];
simplest_elim(mkVar hex');
h_intros [p'];
simplest_elim(mkApp(delayed_force max_constr, [|mkVar pmax;
mkVar p'|]));
h_intros [new_pmax;hle1;hle2];
introduce_all_values_eq
(fun pmax' le_proofs'->
tclTHENLIST
[cont_tac pmax' le_proofs';
h_intros [heq;heq2];
observe_tac ("rewriteRL " ^ (string_of_id heq2))
(tclTRY (rewriteLR (mkVar heq2)));
tclTRY (tclTHENS
( fun g ->
let t_eq = compute_renamed_type g (mkVar heq) in
let k_id,def_id =
let k_na,_,t = destProd t_eq in
let _,_,t = destProd t in
let def_na,_,_ = destProd t in
Nameops.out_name k_na,Nameops.out_name def_na
in
let c_b = (mkVar heq,
ExplicitBindings
[dummy_loc, NamedHyp k_id,
f_S(mkVar pmax');
dummy_loc, NamedHyp def_id, f])
in
observe_tac "general_rewrite_bindings" ( (general_rewrite_bindings false Termops.all_occurrences true (* dep proofs also: *) true
c_b false))
g
)
[tclIDTAC;
tclTHENLIST
[apply (delayed_force le_lt_n_Sm);
compute_le_proofs le_proofs']])])
functional termine f p heq1 new_pmax
(p'::bounds)((mkVar pmax)::le_proofs) eqs
(heq2::heq::hle2::hle1::new_pmax::p'::hex'::v'::ids) args]
let rec_leaf_eq termine f ids functional eqs expr fn args =
let p = next_ident_away_in_goal p_id ids in
let ids = p::ids in
let v = next_ident_away_in_goal v_id ids in
let ids = v::ids in
let hex = next_ident_away_in_goal hex_id ids in
let ids = hex::ids in
let heq1 = next_ident_away_in_goal heq_id ids in
let ids = heq1::ids in
let hle1 = next_ident_away_in_goal hle_id ids in
let ids = hle1::ids in
tclTHENLIST
[observe_tac "intros v hex" (h_intros [v;hex]);
simplest_elim (mkVar hex);
h_intros [p;heq1];
h_generalize [mkApp(delayed_force le_n,[|mkVar p|])];
h_intros [hle1];
observe_tac "introduce_all_values_eq" (introduce_all_values_eq
(fun _ _ -> tclIDTAC)
functional termine f p heq1 p [] [] eqs ids args);
observe_tac "failing here" (apply (delayed_force refl_equal))]
let rec prove_eq nb_arg (termine:constr) (f:constr)(functional:global_reference)
(eqs:constr list) (expr:constr) =
observe_tac "prove_eq" (match kind_of_term expr with
Case(ci,t,a,l) ->
(match find_call_occs nb_arg 0 f a with
_,[] ->
(fun g ->
let destruct_tac,rev_to_thin_intro = mkDestructEq [] a g in
tclTHENS
destruct_tac
(list_map_i
(fun i -> mk_intros_and_continue
(List.rev rev_to_thin_intro) true
(prove_eq nb_arg termine f functional)
eqs ci.ci_cstr_ndecls.(i))
0 (Array.to_list l)) g)
| _,_::_ ->
(match find_call_occs nb_arg 0 f expr with
_,[] -> observe_tac "base_leaf_eq(1)" (base_leaf_eq functional eqs f)
| fn,args ->
fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
observe_tac "rec_leaf_eq" (rec_leaf_eq termine f ids
(constr_of_global functional)
eqs expr fn args) g))
| _ ->
(match find_call_occs nb_arg 0 f expr with
_,[] -> observe_tac "base_leaf_eq(2)" ( base_leaf_eq functional eqs f)
| fn,args ->
fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
observe_tac "rec_leaf_eq" (rec_leaf_eq
termine f ids (constr_of_global functional)
eqs expr fn args) g));;
let (com_eqn : int -> identifier ->
global_reference -> global_reference -> global_reference
-> constr -> unit) =
fun nb_arg eq_name functional_ref f_ref terminate_ref equation_lemma_type ->
let opacity =
match terminate_ref with
| ConstRef c -> is_opaque_constant c
| _ -> anomaly "terminate_lemma: not a constant"
in
let (evmap, env) = Lemmas.get_current_context() in
let f_constr = (constr_of_global f_ref) in
let equation_lemma_type = subst1 f_constr equation_lemma_type in
(start_proof eq_name (Global, Proof Lemma)
(Environ.named_context_val env) equation_lemma_type (fun _ _ -> ());
by
(start_equation f_ref terminate_ref
(fun x ->
prove_eq nb_arg
(constr_of_global terminate_ref)
f_constr
functional_ref
[]
(instantiate_lambda
(def_of_const (constr_of_global functional_ref))
(f_constr::List.map mkVar x)
)
)
);
( try Vernacentries.interp ( Vernacexpr . . ShowProof ) with _ - > ( ) ) ;
Vernacentries.interp ( Vernacexpr . . ShowScript ) ;
Flags.silently (fun () -> Lemmas.save_named opacity) () ;
Pp.msgnl ( str " eqn finished " ) ;
);;
let nf_zeta env =
Reductionops.clos_norm_flags (Closure.RedFlags.mkflags [Closure.RedFlags.fZETA])
env
Evd.empty
let nf_betaiotazeta = (* Reductionops.local_strong Reductionops.whd_betaiotazeta *)
let clos_norm_flags flgs env sigma t =
Closure.norm_val (Closure.create_clos_infos flgs env) (Closure.inject (Reductionops.nf_evar sigma t)) in
clos_norm_flags Closure.betaiotazeta Environ.empty_env Evd.empty
let recursive_definition is_mes function_name rec_impls type_of_f r rec_arg_num eq
generate_induction_principle using_lemmas : unit =
let previous_label = Lib.current_command_label () in
let function_type = interp_constr Evd.empty (Global.env()) type_of_f in
let env = push_named (function_name,None,function_type) (Global.env()) in
Pp.msgnl ( str " function type : = " + + Printer.pr_lconstr function_type ) ;
let equation_lemma_type =
nf_betaiotazeta
(interp_gen (OfType None) Evd.empty env ~impls:rec_impls eq)
in
Pp.msgnl ( str " lemma type : = " + + Printer.pr_lconstr equation_lemma_type + + fnl ( ) ) ;
let res_vars,eq' = decompose_prod equation_lemma_type in
let env_eq' = Environ.push_rel_context (List.map (fun (x,y) -> (x,None,y)) res_vars) env in
let eq' = nf_zeta env_eq' eq' in
let res =
Pp.msgnl ( str " res_var : = " + + Printer.pr_lconstr_env ( push_rel_context ( List.map ( function ( x , t ) - > ( x , None , t ) ) res_vars ) env ) eq ' ) ;
Pp.msgnl ( str " rec_arg_num : = " + + str ( string_of_int rec_arg_num ) ) ;
Pp.msgnl ( str " eq ' : = " + + str ( string_of_int rec_arg_num ) ) ;
match kind_of_term eq' with
| App(e,[|_;_;eq_fix|]) ->
mkLambda (Name function_name,function_type,subst_var function_name (compose_lam res_vars eq_fix))
| _ -> failwith "Recursive Definition (res not eq)"
in
let pre_rec_args,function_type_before_rec_arg = decompose_prod_n (rec_arg_num - 1) function_type in
let (_, rec_arg_type, _) = destProd function_type_before_rec_arg in
let arg_types = List.rev_map snd (fst (decompose_prod_n (List.length res_vars) function_type)) in
let equation_id = add_suffix function_name "_equation" in
let functional_id = add_suffix function_name "_F" in
let term_id = add_suffix function_name "_terminate" in
let functional_ref = declare_fun functional_id (IsDefinition Decl_kinds.Definition) res in
let env_with_pre_rec_args = push_rel_context(List.map (function (x,t) -> (x,None,t)) pre_rec_args) env in
let relation =
interp_constr
Evd.empty
env_with_pre_rec_args
r
in
let tcc_lemma_name = add_suffix function_name "_tcc" in
let tcc_lemma_constr = ref None in
let _ = Pp.msgnl ( str " relation : = " + + Printer.pr_lconstr_env env_with_pre_rec_args relation ) in
let hook _ _ =
let term_ref = Nametab.locate (qualid_of_ident term_id) in
let f_ref = declare_f function_name (IsProof Lemma) arg_types term_ref in
let _ = Table.extraction_inline true [Ident (dummy_loc,term_id)] in
message " start second proof " ;
let stop = ref false in
begin
try com_eqn (List.length res_vars) equation_id functional_ref f_ref term_ref (subst_var function_name equation_lemma_type)
with e when Errors.noncritical e ->
begin
if Tacinterp.get_debug () <> Tactic_debug.DebugOff
then pperrnl (str "Cannot create equation Lemma " ++ Errors.print e)
else anomaly "Cannot create equation Lemma"
;
stop := true;
end
end;
if not !stop
then
let eq_ref = Nametab.locate (qualid_of_ident equation_id ) in
let f_ref = destConst (constr_of_global f_ref)
and functional_ref = destConst (constr_of_global functional_ref)
and eq_ref = destConst (constr_of_global eq_ref) in
generate_induction_principle f_ref tcc_lemma_constr
functional_ref eq_ref rec_arg_num rec_arg_type (nb_prod res) relation;
if Flags.is_verbose ()
then msgnl (h 1 (Ppconstr.pr_id function_name ++
spc () ++ str"is defined" )++ fnl () ++
h 1 (Ppconstr.pr_id equation_id ++
spc () ++ str"is defined" )
)
in
try
com_terminate
tcc_lemma_name
tcc_lemma_constr
is_mes functional_ref
rec_arg_type
relation rec_arg_num
term_id
using_lemmas
(List.length res_vars)
hook
with reraise ->
begin
(try ignore (Backtrack.backto previous_label)
with e when Errors.noncritical e -> ());
(* anomaly "Cannot create termination Lemma" *)
raise reraise
end
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/plugins/funind/recdef.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
i camlp4deps: "parsing/grammar.cma" i
no avoid
no rels
Lambda are authorized as long as they do not contain
recursives calls
but this should be made more general.
deps proofs also:
finalize ()
(Simpl (Some ([],mkConst (const_of_ref (delayed_force iter_ref)))))
The boolean value is_mes expresses that the termination is expressed
using a measure function instead of a well-founded relation.
let _ = msgnl (str "entering base_leaf") in
dep proofs also:
accesibility proof
interactive proof that the relation is well_founded
this gives the accessibility argument
rest of the proof
a non recursive function declared with measure !
dep proofs also:
dep proofs also:
Reductionops.local_strong Reductionops.whd_betaiotazeta
anomaly "Cannot create termination Lemma" | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Term
open Namegen
open Environ
open Declarations
open Entries
open Pp
open Names
open Libnames
open Nameops
open Util
open Closure
open RedFlags
open Tacticals
open Typing
open Tacmach
open Tactics
open Nametab
open Decls
open Declare
open Decl_kinds
open Tacred
open Proof_type
open Vernacinterp
open Pfedit
open Topconstr
open Glob_term
open Pretyping
open Pretyping.Default
open Safe_typing
open Constrintern
open Hiddentac
open Equality
open Auto
open Eauto
open Genarg
let compute_renamed_type gls c =
(pf_type_of gls c)
let qed () = Lemmas.save_named true
let defined () = Lemmas.save_named false
let pf_get_new_ids idl g =
let ids = pf_ids_of_hyps g in
List.fold_right
(fun id acc -> next_global_ident_away id (acc@ids)::acc)
idl
[]
let pf_get_new_id id g =
List.hd (pf_get_new_ids [id] g)
let h_intros l =
tclMAP h_intro l
let debug_queue = Stack.create ()
let rec print_debug_queue b e =
if not (Stack.is_empty debug_queue)
then
begin
let lmsg,goal = Stack.pop debug_queue in
if b then
msgnl (lmsg ++ (str " raised exception " ++ Errors.print e) ++ str " on goal " ++ goal)
else
begin
msgnl (str " from " ++ lmsg ++ str " on goal " ++ goal);
end;
print_debug_queue false e;
end
let do_observe_tac s tac g =
let goal = Printer.pr_goal g in
let lmsg = (str "recdef : ") ++ (str s) in
Stack.push (lmsg,goal) debug_queue;
try
let v = tac g in
ignore(Stack.pop debug_queue);
v
with reraise ->
if not (Stack.is_empty debug_queue)
then
print_debug_queue true reraise;
raise reraise
let observe_tac s tac g =
if Tacinterp.get_debug () <> Tactic_debug.DebugOff
then do_observe_tac s tac g
else tac g
let hyp_ids = List.map id_of_string
["x";"v";"k";"def";"p";"h";"n";"h'"; "anonymous"; "teq"; "rec_res";
"hspec";"heq"; "hrec"; "hex"; "teq"; "pmax";"hle"];;
let rec nthtl = function
l, 0 -> l | _::tl, n -> nthtl (tl, n-1) | [], _ -> [];;
let hyp_id n l = List.nth l n;;
let (x_id:identifier) = hyp_id 0 hyp_ids;;
let (v_id:identifier) = hyp_id 1 hyp_ids;;
let (k_id:identifier) = hyp_id 2 hyp_ids;;
let (def_id:identifier) = hyp_id 3 hyp_ids;;
let (p_id:identifier) = hyp_id 4 hyp_ids;;
let (h_id:identifier) = hyp_id 5 hyp_ids;;
let (n_id:identifier) = hyp_id 6 hyp_ids;;
let (h'_id:identifier) = hyp_id 7 hyp_ids;;
let (ano_id:identifier) = hyp_id 8 hyp_ids;;
let (rec_res_id:identifier) = hyp_id 10 hyp_ids;;
let (hspec_id:identifier) = hyp_id 11 hyp_ids;;
let (heq_id:identifier) = hyp_id 12 hyp_ids;;
let (hrec_id:identifier) = hyp_id 13 hyp_ids;;
let (hex_id:identifier) = hyp_id 14 hyp_ids;;
let (teq_id:identifier) = hyp_id 15 hyp_ids;;
let (pmax_id:identifier) = hyp_id 16 hyp_ids;;
let (hle_id:identifier) = hyp_id 17 hyp_ids;;
let message s = if Flags.is_verbose () then msgnl(str s);;
let def_of_const t =
match (kind_of_term t) with
Const sp ->
(try (match body_of_constant (Global.lookup_constant sp) with
| Some c -> Declarations.force c
| _ -> assert false)
with e when Errors.noncritical e ->
anomaly ("Cannot find definition of constant "^
(string_of_id (id_of_label (con_label sp))))
)
|_ -> assert false
let type_of_const t =
match (kind_of_term t) with
Const sp -> Typeops.type_of_constant (Global.env()) sp
|_ -> assert false
let arg_type t =
match kind_of_term (def_of_const t) with
Lambda(a,b,c) -> b
| _ -> assert false;;
let evaluable_of_global_reference r =
match r with
ConstRef sp -> EvalConstRef sp
| VarRef id -> EvalVarRef id
| _ -> assert false;;
let rank_for_arg_list h =
let predicate a b =
try List.for_all2 eq_constr a b with
Invalid_argument _ -> false in
let rec rank_aux i = function
| [] -> None
| x::tl -> if predicate h x then Some i else rank_aux (i+1) tl in
rank_aux 0;;
let rec check_not_nested f t =
match kind_of_term t with
| App(g, _) when eq_constr f g ->
errorlabstrm "recdef" (str "Nested recursive function are not allowed with Function")
| Var(_) when eq_constr t f -> errorlabstrm "recdef" (str "Nested recursive function are not allowed with Function")
| _ -> iter_constr (check_not_nested f) t
let rec (find_call_occs : int -> int -> constr -> constr ->
(constr list -> constr) * constr list list) =
fun nb_arg nb_lam f expr ->
match (kind_of_term expr) with
App (g, args) when eq_constr g f ->
if Array.length args <> nb_arg then errorlabstrm "recdef" (str "Partial application of function " ++ Printer.pr_lconstr expr ++ str " in its body is not allowed while using Function");
Array.iter (check_not_nested f) args;
(fun l -> List.hd l), [Array.to_list args]
| App (g, args) ->
let (largs: constr list) = Array.to_list args in
let rec find_aux = function
[] -> (fun x -> []), []
| a::upper_tl ->
(match find_aux upper_tl with
(cf, ((arg1::args) as args_for_upper_tl)) ->
(match find_call_occs nb_arg nb_lam f a with
cf2, (_ :: _ as other_args) ->
let rec avoid_duplicates args =
match args with
| [] -> (fun _ -> []), []
| h::tl ->
let recomb_tl, args_for_tl =
avoid_duplicates tl in
match rank_for_arg_list h args_for_upper_tl with
| None ->
(fun l -> List.hd l::recomb_tl(List.tl l)),
h::args_for_tl
| Some i ->
(fun l -> List.nth l (i+List.length args_for_tl)::
recomb_tl l),
args_for_tl
in
let recombine, other_args' =
avoid_duplicates other_args in
let len1 = List.length other_args' in
(fun l -> cf2 (recombine l)::cf(nthtl(l,len1))),
other_args'@args_for_upper_tl
| _, [] -> (fun x -> a::cf x), args_for_upper_tl)
| _, [] ->
(match find_call_occs nb_arg nb_lam f a with
cf, (arg1::args) -> (fun l -> cf l::upper_tl), (arg1::args)
| _, [] -> (fun x -> a::upper_tl), [])) in
begin
match (find_aux largs) with
cf, [] -> (fun l -> mkApp(g, args)), []
| cf, args ->
(fun l -> mkApp (g, Array.of_list (cf l))), args
end
| Rel(v) -> if v > nb_lam then error "find_call_occs : Rel" else ((fun l -> expr),[])
| Var(_) when eq_constr expr f -> errorlabstrm "recdef" (str "Partial application of function " ++ Printer.pr_lconstr expr ++ str " in its body is not allowed while using Function")
| Var(id) -> (fun l -> expr), []
| Meta(_) -> error "Found a metavariable. Can not treat such a term"
| Evar(_) -> error "Found an evar. Can not treat such a term"
| Sort(_) -> (fun l -> expr), []
| Cast(b,_,_) -> find_call_occs nb_arg nb_lam f b
| Prod(na,t,b) ->
error "Found a product. Can not treat such a term"
| Lambda(na,t,b) ->
begin
match find_call_occs nb_arg (succ nb_lam) f b with
(fun l -> expr),[]
| _ -> error "Found a lambda which body contains a recursive call. Such terms are not allowed"
end
| LetIn(na,v,t,b) ->
begin
match find_call_occs nb_arg nb_lam f v, find_call_occs nb_arg (succ nb_lam) f b with
| (_,[]),(_,[]) ->
((fun l -> expr), [])
| (_,[]),(cf,(_::_ as l)) ->
((fun l -> mkLetIn(na,v,t,cf l)),l)
| (cf,(_::_ as l)),(_,[]) ->
((fun l -> mkLetIn(na,cf l,t,b)), l)
| _ -> error "Found a letin with recursive calls in both variable value and body. Such terms are not allowed."
end
| Const(_) -> (fun l -> expr), []
| Ind(_) -> (fun l -> expr), []
| Construct (_, _) -> (fun l -> expr), []
| Case(i,t,a,r) ->
(match find_call_occs nb_arg nb_lam f a with
cf, (arg1::args) -> (fun l -> mkCase(i, t, (cf l), r)),(arg1::args)
| _ -> (fun l -> expr),[])
| Fix(_) -> error "Found a local fixpoint. Can not treat such a term"
| CoFix(_) -> error "Found a local cofixpoint : CoFix";;
let coq_constant s =
Coqlib.gen_constant_in_modules "RecursiveDefinition"
(Coqlib.init_modules @ Coqlib.arith_modules) s;;
let coq_base_constant s =
Coqlib.gen_constant_in_modules "RecursiveDefinition"
(Coqlib.init_modules @ [["Coq";"Arith";"Le"];["Coq";"Arith";"Lt"]]) s;;
let constant sl s =
constr_of_global
(locate (make_qualid(Names.make_dirpath
(List.map id_of_string (List.rev sl)))
(id_of_string s)));;
let find_reference sl s =
(locate (make_qualid(Names.make_dirpath
(List.map id_of_string (List.rev sl)))
(id_of_string s)));;
let le_lt_SS = function () -> (constant ["Recdef"] "le_lt_SS")
let le_lt_n_Sm = function () -> (coq_base_constant "le_lt_n_Sm")
let le_trans = function () -> (coq_base_constant "le_trans")
let le_lt_trans = function () -> (coq_base_constant "le_lt_trans")
let lt_S_n = function () -> (coq_base_constant "lt_S_n")
let le_n = function () -> (coq_base_constant "le_n")
let refl_equal = function () -> (coq_base_constant "eq_refl")
let eq = function () -> (coq_base_constant "eq")
let ex = function () -> (coq_base_constant "ex")
let coq_sig_ref = function () -> (find_reference ["Coq";"Init";"Specif"] "sig")
let coq_sig = function () -> (coq_base_constant "sig")
let coq_O = function () -> (coq_base_constant "O")
let coq_S = function () -> (coq_base_constant "S")
let gt_antirefl = function () -> (coq_constant "gt_irrefl")
let lt_n_O = function () -> (coq_base_constant "lt_n_O")
let lt_n_Sn = function () -> (coq_base_constant "lt_n_Sn")
let f_equal = function () -> (coq_constant "f_equal")
let well_founded_induction = function () -> (coq_constant "well_founded_induction")
let well_founded = function () -> (coq_constant "well_founded")
let acc_rel = function () -> (coq_constant "Acc")
let acc_inv_id = function () -> (coq_constant "Acc_inv")
let well_founded_ltof = function () -> (Coqlib.coq_constant "" ["Arith";"Wf_nat"] "well_founded_ltof")
let iter_ref = function () -> (try find_reference ["Recdef"] "iter" with Not_found -> error "module Recdef not loaded")
let max_ref = function () -> (find_reference ["Recdef"] "max")
let iter = function () -> (constr_of_global (delayed_force iter_ref))
let max_constr = function () -> (constr_of_global (delayed_force max_ref))
let ltof_ref = function () -> (find_reference ["Coq";"Arith";"Wf_nat"] "ltof")
let coq_conj = function () -> find_reference ["Coq";"Init";"Logic"] "conj"
These are specific to experiments in with lt as well_founded_relation ,
let nat = function () -> (coq_base_constant "nat")
let lt = function () -> (coq_base_constant "lt")
This is simply an implementation of the case_eq tactic . this code
should be replaced with the tactic defined in in Init / Tactics.v
should be replaced with the tactic defined in Ltac in Init/Tactics.v *)
let mkCaseEq a : tactic =
(fun g ->
let type_of_a = pf_type_of g a in
tclTHENLIST
[h_generalize [mkApp(delayed_force refl_equal, [| type_of_a; a|])];
(fun g2 ->
change_in_concl None
(pattern_occs [((false,[1]), a)] (pf_env g2) Evd.empty (pf_concl g2))
g2);
simplest_case a] g);;
This is like the previous one except that it also rewrite on all
hypotheses except the ones given in the first argument . All the
modified hypotheses are generalized in the process and should be
introduced back later ; the result is the pair of the tactic and the
list of hypotheses that have been generalized and cleared .
hypotheses except the ones given in the first argument. All the
modified hypotheses are generalized in the process and should be
introduced back later; the result is the pair of the tactic and the
list of hypotheses that have been generalized and cleared. *)
let mkDestructEq :
identifier list -> constr -> goal sigma -> tactic * identifier list =
fun not_on_hyp expr g ->
let hyps = pf_hyps g in
let to_revert =
Util.map_succeed
(fun (id,_,t) ->
if List.mem id not_on_hyp || not (Termops.occur_term expr t)
then failwith "is_expr_context";
id) hyps in
let to_revert_constr = List.rev_map mkVar to_revert in
let type_of_expr = pf_type_of g expr in
let new_hyps = mkApp(delayed_force refl_equal, [|type_of_expr; expr|])::
to_revert_constr in
tclTHENLIST
[h_generalize new_hyps;
(fun g2 ->
change_in_concl None
(pattern_occs [((false,[1]), expr)] (pf_env g2) Evd.empty (pf_concl g2)) g2);
simplest_case expr], to_revert
let rec mk_intros_and_continue thin_intros (extra_eqn:bool)
cont_function (eqs:constr list) nb_lam (expr:constr) g =
observe_tac "mk_intros_and_continue" (
let finalize () = if extra_eqn then
let teq = pf_get_new_id teq_id g in
tclTHENLIST
[ h_intro teq;
thin thin_intros;
h_intros thin_intros;
tclMAP
(List.rev eqs);
(fun g1 ->
let ty_teq = pf_type_of g1 (mkVar teq) in
let teq_lhs,teq_rhs =
let _,args =
try destApp ty_teq
with e when Errors.noncritical e ->
Pp.msgnl (Printer.pr_goal g1 ++ fnl () ++ pr_id teq ++ str ":" ++ Printer.pr_lconstr ty_teq); assert false
in
args.(1),args.(2)
in
cont_function (mkVar teq::eqs) (Termops.replace_term teq_lhs teq_rhs expr) g1
)
]
else
tclTHENSEQ[
thin thin_intros;
h_intros thin_intros;
cont_function eqs expr
]
in
if nb_lam = 0
then finalize ()
else
match kind_of_term expr with
| Lambda (n, _, b) ->
let n1 =
match n with
Name x -> x
| Anonymous -> ano_id
in
let new_n = pf_get_new_id n1 g in
tclTHEN (h_intro new_n)
(mk_intros_and_continue thin_intros extra_eqn cont_function eqs
(pred nb_lam) (subst1 (mkVar new_n) b))
| _ ->
assert false) g
let const_of_ref = function
ConstRef kn -> kn
| _ -> anomaly "ConstRef expected"
let simpl_iter clause =
reduce
(Lazy
{rBeta=true;rIota=true;rZeta= true; rDelta=false;
rConst = [ EvalConstRef (const_of_ref (delayed_force iter_ref))]})
clause
let tclUSER tac is_mes l g =
let clear_tac =
match l with
| None -> h_clear true []
| Some l -> tclMAP (fun id -> tclTRY (h_clear false [id])) (List.rev l)
in
tclTHENSEQ
[
clear_tac;
if is_mes
then tclTHEN
(unfold_in_concl [(Termops.all_occurrences, evaluable_of_global_reference
(delayed_force ltof_ref))])
tac
else tac
]
g
let list_rewrite (rev:bool) (eqs: constr list) =
tclREPEAT
(List.fold_right
(fun eq i -> tclORELSE (rewriteLR eq) i)
(if rev then (List.rev eqs) else eqs) (tclFAIL 0 (mt())));;
let base_leaf_terminate (func:global_reference) eqs expr =
(fun g ->
let k',h =
match pf_get_new_ids [k_id;h_id] g with
[k';h] -> k',h
| _ -> assert false
in
tclTHENLIST
[observe_tac "first split" (split (ImplicitBindings [expr]));
observe_tac "second split"
(split (ImplicitBindings [delayed_force coq_O]));
observe_tac "intro k" (h_intro k');
observe_tac "case on k"
(tclTHENS (simplest_case (mkVar k'))
[(tclTHEN (h_intro h)
(tclTHEN (simplest_elim (mkApp (delayed_force gt_antirefl,
[| delayed_force coq_O |])))
default_auto)); tclIDTAC ]);
intros;
simpl_iter onConcl;
unfold_constr func;
list_rewrite true eqs;
default_auto] g);;
La fonction est donnee en premier argument a la
...
Pour fonction f a partir de la
fonctionnelle
fonctionnelle suivie d'autres Lambdas et de Case ...
Pour recuperer la fonction f a partir de la
fonctionnelle *)
let get_f foncl =
match (kind_of_term (def_of_const foncl)) with
Lambda (Name f, _, _) -> f
|_ -> error "la fonctionnelle est mal definie";;
let rec compute_le_proofs = function
[] -> assumption
| a::tl ->
tclORELSE assumption
(tclTHENS
(fun g ->
let le_trans = delayed_force le_trans in
let t_le_trans = compute_renamed_type g le_trans in
let m_id =
let _,_,t = destProd t_le_trans in
let na,_,_ = destProd t in
Nameops.out_name na
in
apply_with_bindings
(le_trans,
ExplicitBindings[dummy_loc,NamedHyp m_id,a])
g)
[compute_le_proofs tl;
tclORELSE (apply (delayed_force le_n)) assumption])
let make_lt_proof pmax le_proof =
tclTHENS
(fun g ->
let le_lt_trans = delayed_force le_lt_trans in
let t_le_lt_trans = compute_renamed_type g le_lt_trans in
let m_id =
let _,_,t = destProd t_le_lt_trans in
let na,_,_ = destProd t in
Nameops.out_name na
in
apply_with_bindings
(le_lt_trans,
ExplicitBindings[dummy_loc,NamedHyp m_id, pmax]) g)
[observe_tac "compute_le_proofs" (compute_le_proofs le_proof);
tclTHENLIST[observe_tac "lt_S_n" (apply (delayed_force lt_S_n)); default_full_auto]];;
let rec list_cond_rewrite k def pmax cond_eqs le_proofs =
match cond_eqs with
[] -> tclIDTAC
| eq::eqs ->
(fun g ->
let t_eq = compute_renamed_type g (mkVar eq) in
let k_id,def_id =
let k_na,_,t = destProd t_eq in
let _,_,t = destProd t in
let def_na,_,_ = destProd t in
Nameops.out_name k_na,Nameops.out_name def_na
in
tclTHENS
(general_rewrite_bindings false Termops.all_occurrences
(mkVar eq,
ExplicitBindings[dummy_loc, NamedHyp k_id, mkVar k;
dummy_loc, NamedHyp def_id, mkVar def]) false)
[list_cond_rewrite k def pmax eqs le_proofs;
observe_tac "make_lt_proof" (make_lt_proof pmax le_proofs)] g
)
let rec introduce_all_equalities func eqs values specs bound le_proofs
cond_eqs =
match specs with
[] ->
fun g ->
let ids = pf_ids_of_hyps g in
let s_max = mkApp(delayed_force coq_S, [|bound|]) in
let k = next_ident_away_in_goal k_id ids in
let ids = k::ids in
let h' = next_ident_away_in_goal (h'_id) ids in
let ids = h'::ids in
let def = next_ident_away_in_goal def_id ids in
tclTHENLIST
[observe_tac "introduce_all_equalities_final split" (split (ImplicitBindings [s_max]));
observe_tac "introduce_all_equalities_final intro k" (h_intro k);
tclTHENS
(observe_tac "introduce_all_equalities_final case k" (simplest_case (mkVar k)))
[
tclTHENLIST[h_intro h';
simplest_elim(mkApp(delayed_force lt_n_O,[|s_max|]));
default_full_auto];
tclIDTAC
];
observe_tac "clearing k " (clear [k]);
observe_tac "intros k h' def" (h_intros [k;h';def]);
observe_tac "simple_iter" (simpl_iter onConcl);
observe_tac "unfold functional"
(unfold_in_concl[((true,[1]),evaluable_of_global_reference func)]);
observe_tac "rewriting equations"
(list_rewrite true eqs);
observe_tac ("cond rewrite "^(string_of_id k)) (list_cond_rewrite k def bound cond_eqs le_proofs);
observe_tac "refl equal" (apply (delayed_force refl_equal))] g
| spec1::specs ->
fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
let p = next_ident_away_in_goal p_id ids in
let ids = p::ids in
let pmax = next_ident_away_in_goal pmax_id ids in
let ids = pmax::ids in
let hle1 = next_ident_away_in_goal hle_id ids in
let ids = hle1::ids in
let hle2 = next_ident_away_in_goal hle_id ids in
let ids = hle2::ids in
let heq = next_ident_away_in_goal heq_id ids in
tclTHENLIST
[simplest_elim (mkVar spec1);
list_rewrite true eqs;
h_intros [p; heq];
simplest_elim (mkApp(delayed_force max_constr, [| bound; mkVar p|]));
h_intros [pmax; hle1; hle2];
introduce_all_equalities func eqs values specs
(mkVar pmax) ((mkVar pmax)::le_proofs)
(heq::cond_eqs)] g;;
let string_match s =
if String.length s < 3 then failwith "string_match";
try
for i = 0 to 3 do
if String.get s i <> String.get "Acc_" i then failwith "string_match"
done;
with Invalid_argument _ -> failwith "string_match"
let retrieve_acc_var g =
: I do n't like this version ....
let hyps = pf_ids_of_hyps g in
map_succeed
(fun id -> string_match (string_of_id id);id)
hyps
let rec introduce_all_values concl_tac is_mes acc_inv func context_fn
eqs hrec args values specs =
(match args with
[] ->
tclTHENLIST
[observe_tac "split" (split(ImplicitBindings
[context_fn (List.map mkVar (List.rev values))]));
observe_tac "introduce_all_equalities" (introduce_all_equalities func eqs
(List.rev values) (List.rev specs) (delayed_force coq_O) [] [])]
| arg::args ->
(fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
let rec_res = next_ident_away_in_goal rec_res_id ids in
let ids = rec_res::ids in
let hspec = next_ident_away_in_goal hspec_id ids in
let tac =
observe_tac "introduce_all_values" (
introduce_all_values concl_tac is_mes acc_inv func context_fn eqs
hrec args
(rec_res::values)(hspec::specs)) in
(tclTHENS
(observe_tac "elim h_rec"
(simplest_elim (mkApp(mkVar hrec, Array.of_list arg)))
)
[tclTHENLIST [h_intros [rec_res; hspec];
tac];
(tclTHENS
(observe_tac "acc_inv" (apply (Lazy.force acc_inv)))
tclTHEN ( tclTRY(list_rewrite true eqs ) )
(observe_tac "h_assumption" h_assumption)
;
tclTHENLIST
[
tclTRY(list_rewrite true eqs);
observe_tac "user proof"
(fun g ->
tclUSER
concl_tac
is_mes
(Some (hrec::hspec::(retrieve_acc_var g)@specs))
g
)
]
]
)
]) g)
)
let rec_leaf_terminate nb_arg f_constr concl_tac is_mes acc_inv hrec (func:global_reference) eqs expr =
match find_call_occs nb_arg 0 f_constr expr with
| context_fn, args ->
observe_tac "introduce_all_values"
(introduce_all_values concl_tac is_mes acc_inv func context_fn eqs hrec args [] [])
let proveterminate nb_arg rec_arg_id is_mes acc_inv (hrec:identifier)
(f_constr:constr) (func:global_reference) base_leaf rec_leaf =
let rec proveterminate (eqs:constr list) (expr:constr) =
try
let _ = msgnl ( str " entering proveterminate " ) in
let v =
match (kind_of_term expr) with
Case (ci, t, a, l) ->
(match find_call_occs nb_arg 0 f_constr a with
_,[] ->
(fun g ->
let destruct_tac, rev_to_thin_intro =
mkDestructEq rec_arg_id a g in
tclTHENS destruct_tac
(list_map_i
(fun i -> mk_intros_and_continue
(List.rev rev_to_thin_intro)
true
proveterminate
eqs
ci.ci_cstr_ndecls.(i))
0 (Array.to_list l)) g)
| _, _::_ ->
(match find_call_occs nb_arg 0 f_constr expr with
_,[] -> observe_tac "base_leaf" (base_leaf func eqs expr)
| _, _:: _ ->
observe_tac "rec_leaf"
(rec_leaf is_mes acc_inv hrec func eqs expr)))
| _ ->
(match find_call_occs nb_arg 0 f_constr expr with
_,[] ->
(try observe_tac "base_leaf" (base_leaf func eqs expr)
with reraise ->
(msgerrnl (str "failure in base case");raise reraise ))
| _, _::_ ->
observe_tac "rec_leaf"
(rec_leaf is_mes acc_inv hrec func eqs expr)) in
v
with reraise ->
begin
msgerrnl(str "failure in proveterminate");
raise reraise
end
in
proveterminate
let hyp_terminates nb_args func =
let a_arrow_b = arg_type (constr_of_global func) in
let rev_args,b = decompose_prod_n nb_args a_arrow_b in
let left =
mkApp(delayed_force iter,
Array.of_list
(lift 5 a_arrow_b:: mkRel 3::
constr_of_global func::mkRel 1::
List.rev (list_map_i (fun i _ -> mkRel (6+i)) 0 rev_args)
)
)
in
let right = mkRel 5 in
let equality = mkApp(delayed_force eq, [|lift 5 b; left; right|]) in
let result = (mkProd ((Name def_id) , lift 4 a_arrow_b, equality)) in
let cond = mkApp(delayed_force lt, [|(mkRel 2); (mkRel 1)|]) in
let nb_iter =
mkApp(delayed_force ex,
[|delayed_force nat;
(mkLambda
(Name
p_id,
delayed_force nat,
(mkProd (Name k_id, delayed_force nat,
mkArrow cond result))))|])in
let value = mkApp(delayed_force coq_sig,
[|b;
(mkLambda (Name v_id, b, nb_iter))|]) in
compose_prod rev_args value
let tclUSER_if_not_mes concl_tac is_mes names_to_suppress =
if is_mes
then tclCOMPLETE (h_simplest_apply (delayed_force well_founded_ltof))
else tclUSER concl_tac is_mes names_to_suppress
let termination_proof_header is_mes input_type ids args_id relation
rec_arg_num rec_arg_id tac wf_tac : tactic =
begin
fun g ->
let nargs = List.length args_id in
let pre_rec_args =
List.rev_map
mkVar (fst (list_chop (rec_arg_num - 1) args_id))
in
let relation = substl pre_rec_args relation in
let input_type = substl pre_rec_args input_type in
let wf_thm = next_ident_away_in_goal (id_of_string ("wf_R")) ids in
let wf_rec_arg =
next_ident_away_in_goal
(id_of_string ("Acc_"^(string_of_id rec_arg_id)))
(wf_thm::ids)
in
let hrec = next_ident_away_in_goal hrec_id
(wf_rec_arg::wf_thm::ids) in
let acc_inv =
lazy (
mkApp (
delayed_force acc_inv_id,
[|input_type;relation;mkVar rec_arg_id|]
)
)
in
tclTHEN
(h_intros args_id)
(tclTHENS
(observe_tac
"first assert"
(assert_tac
(Name wf_rec_arg)
(mkApp (delayed_force acc_rel,
[|input_type;relation;mkVar rec_arg_id|])
)
)
)
[
tclTHENS
(observe_tac
"second assert"
(assert_tac
(Name wf_thm)
(mkApp (delayed_force well_founded,[|input_type;relation|]))
)
)
[
observe_tac "wf_tac" (wf_tac is_mes (Some args_id));
observe_tac
"apply wf_thm"
(h_simplest_apply (mkApp(mkVar wf_thm,[|mkVar rec_arg_id|]))
)
]
;
tclTHENSEQ
[observe_tac "generalize"
(onNLastHypsId (nargs+1)
(tclMAP (fun id ->
tclTHEN (h_generalize [mkVar id]) (h_clear false [id]))
))
;
observe_tac "h_fix" (h_fix (Some hrec) (nargs+1));
h_intros args_id;
h_intro wf_rec_arg;
observe_tac "tac" (tac wf_rec_arg hrec acc_inv)
]
]
) g
end
let rec instantiate_lambda t l =
match l with
| [] -> t
| a::l ->
let (bound_name, _, body) = destLambda t in
instantiate_lambda (subst1 a body) l
;;
let whole_start (concl_tac:tactic) nb_args is_mes func input_type relation rec_arg_num : tactic =
begin
fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
let func_body = (def_of_const (constr_of_global func)) in
let (f_name, _, body1) = destLambda func_body in
let f_id =
match f_name with
| Name f_id -> next_ident_away_in_goal f_id ids
| Anonymous -> anomaly "Anonymous function"
in
let n_names_types,_ = decompose_lam_n nb_args body1 in
let n_ids,ids =
List.fold_left
(fun (n_ids,ids) (n_name,_) ->
match n_name with
| Name id ->
let n_id = next_ident_away_in_goal id ids in
n_id::n_ids,n_id::ids
| _ -> anomaly "anonymous argument"
)
([],(f_id::ids))
n_names_types
in
let rec_arg_id = List.nth n_ids (rec_arg_num - 1) in
let expr = instantiate_lambda func_body (mkVar f_id::(List.map mkVar n_ids)) in
termination_proof_header
is_mes
input_type
ids
n_ids
relation
rec_arg_num
rec_arg_id
(fun rec_arg_id hrec acc_inv g ->
(proveterminate
nb_args
[rec_arg_id]
is_mes
acc_inv
hrec
(mkVar f_id)
func
base_leaf_terminate
(rec_leaf_terminate nb_args (mkVar f_id) concl_tac)
[]
expr
)
g
)
(tclUSER_if_not_mes concl_tac)
g
end
let get_current_subgoals_types () =
let p = Proof_global.give_me_the_proof () in
let { Evd.it=sgs ; sigma=sigma } = Proof.V82.subgoals p in
List.map (Goal.V82.abstract_type sigma) sgs
let build_and_l l =
let and_constr = Coqlib.build_coq_and () in
let conj_constr = coq_conj () in
let mk_and p1 p2 =
Term.mkApp(and_constr,[|p1;p2|]) in
let rec is_well_founded t =
match kind_of_term t with
| Prod(_,_,t') -> is_well_founded t'
| App(_,_) ->
let (f,_) = decompose_app t in
eq_constr f (well_founded ())
| _ -> false
in
let compare t1 t2 =
let b1,b2= is_well_founded t1,is_well_founded t2 in
if (b1&&b2) || not (b1 || b2) then 0
else if b1 && not b2 then 1 else -1
in
let l = List.sort compare l in
let rec f = function
| [] -> failwith "empty list of subgoals!"
| [p] -> p,tclIDTAC,1
| p1::pl ->
let c,tac,nb = f pl in
mk_and p1 c,
tclTHENS
(apply (constr_of_global conj_constr))
[tclIDTAC;
tac
],nb+1
in f l
let is_rec_res id =
let rec_res_name = string_of_id rec_res_id in
let id_name = string_of_id id in
try
String.sub id_name 0 (String.length rec_res_name) = rec_res_name
with e when Errors.noncritical e -> false
let clear_goals =
let rec clear_goal t =
match kind_of_term t with
| Prod(Name id as na,t',b) ->
let b' = clear_goal b in
if noccurn 1 b' && (is_rec_res id)
then Termops.pop b'
else if b' == b then t
else mkProd(na,t',b')
| _ -> map_constr clear_goal t
in
List.map clear_goal
let build_new_goal_type () =
let sub_gls_types = get_current_subgoals_types () in
Pp.msgnl ( str " sub_gls_types1 : = " + + Util.prlist_with_sep ( fun ( ) - > Pp.fnl ( ) + + Pp.fnl ( ) ) Printer.pr_lconstr sub_gls_types ) ;
let sub_gls_types = clear_goals sub_gls_types in
Pp.msgnl ( str " sub_gls_types2 : = " + + Util.prlist_with_sep ( fun ( ) - > Pp.fnl ( ) + + Pp.fnl ( ) ) Printer.pr_lconstr sub_gls_types ) ;
let res = build_and_l sub_gls_types in
res
let is_opaque_constant c =
let cb = Global.lookup_constant c in
match cb.Declarations.const_body with
| Declarations.OpaqueDef _ -> true
| Declarations.Undef _ -> true
| Declarations.Def _ -> false
let open_new_goal (build_proof:tactic -> tactic -> unit) using_lemmas ref_ goal_name (gls_type,decompose_and_tac,nb_goal) =
Pp.msgnl ( str " : = " + + ) ;
let current_proof_name = get_current_proof_name () in
let name = match goal_name with
| Some s -> s
| None ->
try (add_suffix current_proof_name "_subproof")
with e when Errors.noncritical e ->
anomaly "open_new_goal with an unamed theorem"
in
let sign = initialize_named_context_for_proof () in
let na = next_global_ident_away name [] in
if Termops.occur_existential gls_type then
Util.error "\"abstract\" cannot handle existentials";
let hook _ _ =
let opacity =
let na_ref = Libnames.Ident (dummy_loc,na) in
let na_global = Nametab.global na_ref in
match na_global with
ConstRef c -> is_opaque_constant c
| _ -> anomaly "equation_lemma: not a constant"
in
let lemma = mkConst (Lib.make_con na) in
ref_ := Some lemma ;
let lid = ref [] in
let h_num = ref (-1) in
Flags.silently Vernacentries.interp (Vernacexpr.VernacAbort None);
build_proof
( fun gls ->
let hid = next_ident_away_in_goal h_id (pf_ids_of_hyps gls) in
tclTHENSEQ
[
h_generalize [lemma];
h_intro hid;
(fun g ->
let ids = pf_ids_of_hyps g in
tclTHEN
(Elim.h_decompose_and (mkVar hid))
(fun g ->
let ids' = pf_ids_of_hyps g in
lid := List.rev (list_subtract ids' ids);
if !lid = [] then lid := [hid];
tclIDTAC g
)
g
);
] gls)
(fun g ->
match kind_of_term (pf_concl g) with
| App(f,_) when eq_constr f (well_founded ()) ->
Auto.h_auto None [] (Some []) g
| _ ->
incr h_num;
(observe_tac "finishing using"
(
tclCOMPLETE(
tclFIRST[
tclTHEN
(eapply_with_bindings (mkVar (List.nth !lid !h_num), NoBindings))
e_assumption;
Eauto.eauto_with_bases
(true,5)
[Evd.empty,delayed_force refl_equal]
[Auto.Hint_db.empty empty_transparent_state false]
]
)
)
)
g)
;
Lemmas.save_named opacity;
in
start_proof
na
(Decl_kinds.Global, Decl_kinds.Proof Decl_kinds.Lemma)
sign
gls_type
hook ;
if Indfun_common.is_strict_tcc ()
then
by (tclIDTAC)
else
begin
by (
fun g ->
tclTHEN
(decompose_and_tac)
(tclORELSE
(tclFIRST
(List.map
(fun c ->
tclTHENSEQ
[intros;
h_simplest_apply (interp_constr Evd.empty (Global.env()) c);
tclCOMPLETE Auto.default_auto
]
)
using_lemmas)
) tclIDTAC)
g)
end;
try
raises UserError _ if the proof is complete
if Flags.is_verbose () then (pp (Printer.pr_open_subgoals()))
with UserError _ ->
defined ()
;;
let com_terminate
tcc_lemma_name
tcc_lemma_ref
is_mes
fonctional_ref
input_type
relation
rec_arg_num
thm_name using_lemmas
nb_args
hook =
let start_proof (tac_start:tactic) (tac_end:tactic) =
let (evmap, env) = Lemmas.get_current_context() in
start_proof thm_name
(Global, Proof Lemma) (Environ.named_context_val env)
(hyp_terminates nb_args fonctional_ref) hook;
by (observe_tac "starting_tac" tac_start);
by (observe_tac "whole_start" (whole_start tac_end nb_args is_mes fonctional_ref
input_type relation rec_arg_num ))
in
start_proof tclIDTAC tclIDTAC;
try
let new_goal_type = build_new_goal_type () in
open_new_goal start_proof using_lemmas tcc_lemma_ref
(Some tcc_lemma_name)
(new_goal_type);
with Failure "empty list of subgoals!" ->
defined ()
let ind_of_ref = function
| IndRef (ind,i) -> (ind,i)
| _ -> anomaly "IndRef expected"
let (value_f:constr list -> global_reference -> constr) =
fun al fterm ->
let d0 = dummy_loc in
let rev_x_id_l =
(
List.fold_left
(fun x_id_l _ ->
let x_id = next_ident_away_in_goal x_id x_id_l in
x_id::x_id_l
)
[]
al
)
in
let context = List.map
(fun (x, c) -> Name x, None, c) (List.combine rev_x_id_l (List.rev al))
in
let env = Environ.push_rel_context context (Global.env ()) in
let glob_body =
GCases
(d0,RegularStyle,None,
[GApp(d0, GRef(d0,fterm), List.rev_map (fun x_id -> GVar(d0, x_id)) rev_x_id_l),
(Anonymous,None)],
[d0, [v_id], [PatCstr(d0,(ind_of_ref
(delayed_force coq_sig_ref),1),
[PatVar(d0, Name v_id);
PatVar(d0, Anonymous)],
Anonymous)],
GVar(d0,v_id)])
in
let body = understand Evd.empty env glob_body in
it_mkLambda_or_LetIn body context
let (declare_fun : identifier -> logical_kind -> constr -> global_reference) =
fun f_id kind value ->
let ce = {const_entry_body = value;
const_entry_secctx = None;
const_entry_type = None;
const_entry_opaque = false } in
ConstRef(declare_constant f_id (DefinitionEntry ce, kind));;
let (declare_f : identifier -> logical_kind -> constr list -> global_reference -> global_reference) =
fun f_id kind input_type fterm_ref ->
declare_fun f_id kind (value_f input_type fterm_ref);;
let rec n_x_id ids n =
if n = 0 then []
else let x = next_ident_away_in_goal x_id ids in
x::n_x_id (x::ids) (n-1);;
let start_equation (f:global_reference) (term_f:global_reference)
(cont_tactic:identifier list -> tactic) g =
let ids = pf_ids_of_hyps g in
let terminate_constr = constr_of_global term_f in
let nargs = nb_prod (type_of_const terminate_constr) in
let x = n_x_id ids nargs in
tclTHENLIST [
h_intros x;
unfold_in_concl [(Termops.all_occurrences, evaluable_of_global_reference f)];
observe_tac "simplest_case"
(simplest_case (mkApp (terminate_constr,
Array.of_list (List.map mkVar x))));
observe_tac "prove_eq" (cont_tactic x)] g;;
let base_leaf_eq func eqs f_id g =
let ids = pf_ids_of_hyps g in
let k = next_ident_away_in_goal k_id ids in
let p = next_ident_away_in_goal p_id (k::ids) in
let v = next_ident_away_in_goal v_id (p::k::ids) in
let heq = next_ident_away_in_goal heq_id (v::p::k::ids) in
let heq1 = next_ident_away_in_goal heq_id (heq::v::p::k::ids) in
let hex = next_ident_away_in_goal hex_id (heq1::heq::v::p::k::ids) in
tclTHENLIST [
h_intros [v; hex];
simplest_elim (mkVar hex);
h_intros [p;heq1];
tclTRY
(rewriteRL
(mkApp(mkVar heq1,
[|mkApp (delayed_force coq_S, [|mkVar p|]);
mkApp(delayed_force lt_n_Sn, [|mkVar p|]); f_id|])));
simpl_iter onConcl;
tclTRY (unfold_in_concl [((true,[1]), evaluable_of_global_reference func)]);
observe_tac "list_revrite" (list_rewrite true eqs);
apply (delayed_force refl_equal)] g;;
let f_S t = mkApp(delayed_force coq_S, [|t|]);;
let rec introduce_all_values_eq cont_tac functional termine
f p heq1 pmax bounds le_proofs eqs ids =
function
[] ->
let heq2 = next_ident_away_in_goal heq_id ids in
tclTHENLIST
[pose_proof (Name heq2)
(mkApp(mkVar heq1, [|f_S(f_S(mkVar pmax))|]));
simpl_iter (onHyp heq2);
unfold_in_hyp [((true,[1]), evaluable_of_global_reference
(global_of_constr functional))]
(heq2, Termops.InHyp);
tclTHENS
(fun gls ->
let t_eq = compute_renamed_type gls (mkVar heq2) in
let def_id =
let _,_,t = destProd t_eq in let def_na,_,_ = destProd t in
Nameops.out_name def_na
in
observe_tac "rewrite heq" (general_rewrite_bindings false Termops.all_occurrences
ExplicitBindings[dummy_loc,NamedHyp def_id,
f]) false) gls)
[tclTHENLIST
[observe_tac "list_rewrite" (list_rewrite true eqs);
cont_tac pmax le_proofs];
tclTHENLIST[apply (delayed_force le_lt_SS);
compute_le_proofs le_proofs]]]
| arg::args ->
let v' = next_ident_away_in_goal v_id ids in
let ids = v'::ids in
let hex' = next_ident_away_in_goal hex_id ids in
let ids = hex'::ids in
let p' = next_ident_away_in_goal p_id ids in
let ids = p'::ids in
let new_pmax = next_ident_away_in_goal pmax_id ids in
let ids = pmax::ids in
let hle1 = next_ident_away_in_goal hle_id ids in
let ids = hle1::ids in
let hle2 = next_ident_away_in_goal hle_id ids in
let ids = hle2::ids in
let heq = next_ident_away_in_goal heq_id ids in
let ids = heq::ids in
let heq2 = next_ident_away_in_goal heq_id ids in
let ids = heq2::ids in
tclTHENLIST
[mkCaseEq(mkApp(termine, Array.of_list arg));
h_intros [v'; hex'];
simplest_elim(mkVar hex');
h_intros [p'];
simplest_elim(mkApp(delayed_force max_constr, [|mkVar pmax;
mkVar p'|]));
h_intros [new_pmax;hle1;hle2];
introduce_all_values_eq
(fun pmax' le_proofs'->
tclTHENLIST
[cont_tac pmax' le_proofs';
h_intros [heq;heq2];
observe_tac ("rewriteRL " ^ (string_of_id heq2))
(tclTRY (rewriteLR (mkVar heq2)));
tclTRY (tclTHENS
( fun g ->
let t_eq = compute_renamed_type g (mkVar heq) in
let k_id,def_id =
let k_na,_,t = destProd t_eq in
let _,_,t = destProd t in
let def_na,_,_ = destProd t in
Nameops.out_name k_na,Nameops.out_name def_na
in
let c_b = (mkVar heq,
ExplicitBindings
[dummy_loc, NamedHyp k_id,
f_S(mkVar pmax');
dummy_loc, NamedHyp def_id, f])
in
c_b false))
g
)
[tclIDTAC;
tclTHENLIST
[apply (delayed_force le_lt_n_Sm);
compute_le_proofs le_proofs']])])
functional termine f p heq1 new_pmax
(p'::bounds)((mkVar pmax)::le_proofs) eqs
(heq2::heq::hle2::hle1::new_pmax::p'::hex'::v'::ids) args]
let rec_leaf_eq termine f ids functional eqs expr fn args =
let p = next_ident_away_in_goal p_id ids in
let ids = p::ids in
let v = next_ident_away_in_goal v_id ids in
let ids = v::ids in
let hex = next_ident_away_in_goal hex_id ids in
let ids = hex::ids in
let heq1 = next_ident_away_in_goal heq_id ids in
let ids = heq1::ids in
let hle1 = next_ident_away_in_goal hle_id ids in
let ids = hle1::ids in
tclTHENLIST
[observe_tac "intros v hex" (h_intros [v;hex]);
simplest_elim (mkVar hex);
h_intros [p;heq1];
h_generalize [mkApp(delayed_force le_n,[|mkVar p|])];
h_intros [hle1];
observe_tac "introduce_all_values_eq" (introduce_all_values_eq
(fun _ _ -> tclIDTAC)
functional termine f p heq1 p [] [] eqs ids args);
observe_tac "failing here" (apply (delayed_force refl_equal))]
let rec prove_eq nb_arg (termine:constr) (f:constr)(functional:global_reference)
(eqs:constr list) (expr:constr) =
observe_tac "prove_eq" (match kind_of_term expr with
Case(ci,t,a,l) ->
(match find_call_occs nb_arg 0 f a with
_,[] ->
(fun g ->
let destruct_tac,rev_to_thin_intro = mkDestructEq [] a g in
tclTHENS
destruct_tac
(list_map_i
(fun i -> mk_intros_and_continue
(List.rev rev_to_thin_intro) true
(prove_eq nb_arg termine f functional)
eqs ci.ci_cstr_ndecls.(i))
0 (Array.to_list l)) g)
| _,_::_ ->
(match find_call_occs nb_arg 0 f expr with
_,[] -> observe_tac "base_leaf_eq(1)" (base_leaf_eq functional eqs f)
| fn,args ->
fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
observe_tac "rec_leaf_eq" (rec_leaf_eq termine f ids
(constr_of_global functional)
eqs expr fn args) g))
| _ ->
(match find_call_occs nb_arg 0 f expr with
_,[] -> observe_tac "base_leaf_eq(2)" ( base_leaf_eq functional eqs f)
| fn,args ->
fun g ->
let ids = Termops.ids_of_named_context (pf_hyps g) in
observe_tac "rec_leaf_eq" (rec_leaf_eq
termine f ids (constr_of_global functional)
eqs expr fn args) g));;
let (com_eqn : int -> identifier ->
global_reference -> global_reference -> global_reference
-> constr -> unit) =
fun nb_arg eq_name functional_ref f_ref terminate_ref equation_lemma_type ->
let opacity =
match terminate_ref with
| ConstRef c -> is_opaque_constant c
| _ -> anomaly "terminate_lemma: not a constant"
in
let (evmap, env) = Lemmas.get_current_context() in
let f_constr = (constr_of_global f_ref) in
let equation_lemma_type = subst1 f_constr equation_lemma_type in
(start_proof eq_name (Global, Proof Lemma)
(Environ.named_context_val env) equation_lemma_type (fun _ _ -> ());
by
(start_equation f_ref terminate_ref
(fun x ->
prove_eq nb_arg
(constr_of_global terminate_ref)
f_constr
functional_ref
[]
(instantiate_lambda
(def_of_const (constr_of_global functional_ref))
(f_constr::List.map mkVar x)
)
)
);
( try Vernacentries.interp ( Vernacexpr . . ShowProof ) with _ - > ( ) ) ;
Vernacentries.interp ( Vernacexpr . . ShowScript ) ;
Flags.silently (fun () -> Lemmas.save_named opacity) () ;
Pp.msgnl ( str " eqn finished " ) ;
);;
let nf_zeta env =
Reductionops.clos_norm_flags (Closure.RedFlags.mkflags [Closure.RedFlags.fZETA])
env
Evd.empty
let clos_norm_flags flgs env sigma t =
Closure.norm_val (Closure.create_clos_infos flgs env) (Closure.inject (Reductionops.nf_evar sigma t)) in
clos_norm_flags Closure.betaiotazeta Environ.empty_env Evd.empty
let recursive_definition is_mes function_name rec_impls type_of_f r rec_arg_num eq
generate_induction_principle using_lemmas : unit =
let previous_label = Lib.current_command_label () in
let function_type = interp_constr Evd.empty (Global.env()) type_of_f in
let env = push_named (function_name,None,function_type) (Global.env()) in
Pp.msgnl ( str " function type : = " + + Printer.pr_lconstr function_type ) ;
let equation_lemma_type =
nf_betaiotazeta
(interp_gen (OfType None) Evd.empty env ~impls:rec_impls eq)
in
Pp.msgnl ( str " lemma type : = " + + Printer.pr_lconstr equation_lemma_type + + fnl ( ) ) ;
let res_vars,eq' = decompose_prod equation_lemma_type in
let env_eq' = Environ.push_rel_context (List.map (fun (x,y) -> (x,None,y)) res_vars) env in
let eq' = nf_zeta env_eq' eq' in
let res =
Pp.msgnl ( str " res_var : = " + + Printer.pr_lconstr_env ( push_rel_context ( List.map ( function ( x , t ) - > ( x , None , t ) ) res_vars ) env ) eq ' ) ;
Pp.msgnl ( str " rec_arg_num : = " + + str ( string_of_int rec_arg_num ) ) ;
Pp.msgnl ( str " eq ' : = " + + str ( string_of_int rec_arg_num ) ) ;
match kind_of_term eq' with
| App(e,[|_;_;eq_fix|]) ->
mkLambda (Name function_name,function_type,subst_var function_name (compose_lam res_vars eq_fix))
| _ -> failwith "Recursive Definition (res not eq)"
in
let pre_rec_args,function_type_before_rec_arg = decompose_prod_n (rec_arg_num - 1) function_type in
let (_, rec_arg_type, _) = destProd function_type_before_rec_arg in
let arg_types = List.rev_map snd (fst (decompose_prod_n (List.length res_vars) function_type)) in
let equation_id = add_suffix function_name "_equation" in
let functional_id = add_suffix function_name "_F" in
let term_id = add_suffix function_name "_terminate" in
let functional_ref = declare_fun functional_id (IsDefinition Decl_kinds.Definition) res in
let env_with_pre_rec_args = push_rel_context(List.map (function (x,t) -> (x,None,t)) pre_rec_args) env in
let relation =
interp_constr
Evd.empty
env_with_pre_rec_args
r
in
let tcc_lemma_name = add_suffix function_name "_tcc" in
let tcc_lemma_constr = ref None in
let _ = Pp.msgnl ( str " relation : = " + + Printer.pr_lconstr_env env_with_pre_rec_args relation ) in
let hook _ _ =
let term_ref = Nametab.locate (qualid_of_ident term_id) in
let f_ref = declare_f function_name (IsProof Lemma) arg_types term_ref in
let _ = Table.extraction_inline true [Ident (dummy_loc,term_id)] in
message " start second proof " ;
let stop = ref false in
begin
try com_eqn (List.length res_vars) equation_id functional_ref f_ref term_ref (subst_var function_name equation_lemma_type)
with e when Errors.noncritical e ->
begin
if Tacinterp.get_debug () <> Tactic_debug.DebugOff
then pperrnl (str "Cannot create equation Lemma " ++ Errors.print e)
else anomaly "Cannot create equation Lemma"
;
stop := true;
end
end;
if not !stop
then
let eq_ref = Nametab.locate (qualid_of_ident equation_id ) in
let f_ref = destConst (constr_of_global f_ref)
and functional_ref = destConst (constr_of_global functional_ref)
and eq_ref = destConst (constr_of_global eq_ref) in
generate_induction_principle f_ref tcc_lemma_constr
functional_ref eq_ref rec_arg_num rec_arg_type (nb_prod res) relation;
if Flags.is_verbose ()
then msgnl (h 1 (Ppconstr.pr_id function_name ++
spc () ++ str"is defined" )++ fnl () ++
h 1 (Ppconstr.pr_id equation_id ++
spc () ++ str"is defined" )
)
in
try
com_terminate
tcc_lemma_name
tcc_lemma_constr
is_mes functional_ref
rec_arg_type
relation rec_arg_num
term_id
using_lemmas
(List.length res_vars)
hook
with reraise ->
begin
(try ignore (Backtrack.backto previous_label)
with e when Errors.noncritical e -> ());
raise reraise
end
|
f55b6ad6923c40f34a017c907dd7d8e8163bced0c23da35dd0fa3f0dd1388270 | Eonblast/Scalaxis | tx_op_beh.erl | @copyright 2009 , 2010 onScale solutions GmbH
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
%% @doc Part of generic transactions implementation.
%% The behaviour of an operation in a transaction.
%% @version $Id$
-module(tx_op_beh).
-author('').
-vsn('$Id$').
%-define(TRACE(X,Y), io:format(X,Y)).
-define(TRACE(X,Y), ok).
% for behaviour
-export([behaviour_info/1]).
-spec behaviour_info(atom()) -> [{atom(), arity()}] | undefined.
behaviour_info(callbacks) ->
[
%% do the work phase *asynchronously*, replies to local client with a msg
%% work_phase(ClientPid, Id, Request) ->
%% msg {work_phase_reply, Id, TransLogEntry}
{work_phase, 3},
%% do the work phase *synchronously* based on an existing translog entry
%% work_phase(TransLog, Request) -> NewTransLogEntry
{work_phase, 2},
May make several ones from a single TransLog item ( item replication )
%% validate_prefilter(TransLogEntry) ->
%% [TransLogEntries] (replicas)
{validate_prefilter, 1},
%% validate a single item
%% validate(DB, RTLogentry) -> {DB, Proposal (prepared/abort)}
{validate, 2},
%% commit(DB, RTLogentry, OwnProposalWas)
{commit, 3},
%% abort(DB, RTLogentry, OwnProposalWas)
{abort, 3}
];
behaviour_info(_Other) ->
undefined.
| null | https://raw.githubusercontent.com/Eonblast/Scalaxis/10287d11428e627dca8c41c818745763b9f7e8d4/src/transactions/tx_op_beh.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc Part of generic transactions implementation.
The behaviour of an operation in a transaction.
@version $Id$
-define(TRACE(X,Y), io:format(X,Y)).
for behaviour
do the work phase *asynchronously*, replies to local client with a msg
work_phase(ClientPid, Id, Request) ->
msg {work_phase_reply, Id, TransLogEntry}
do the work phase *synchronously* based on an existing translog entry
work_phase(TransLog, Request) -> NewTransLogEntry
validate_prefilter(TransLogEntry) ->
[TransLogEntries] (replicas)
validate a single item
validate(DB, RTLogentry) -> {DB, Proposal (prepared/abort)}
commit(DB, RTLogentry, OwnProposalWas)
abort(DB, RTLogentry, OwnProposalWas) | @copyright 2009 , 2010 onScale solutions GmbH
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
-module(tx_op_beh).
-author('').
-vsn('$Id$').
-define(TRACE(X,Y), ok).
-export([behaviour_info/1]).
-spec behaviour_info(atom()) -> [{atom(), arity()}] | undefined.
behaviour_info(callbacks) ->
[
{work_phase, 3},
{work_phase, 2},
May make several ones from a single TransLog item ( item replication )
{validate_prefilter, 1},
{validate, 2},
{commit, 3},
{abort, 3}
];
behaviour_info(_Other) ->
undefined.
|
45dfdb2a77ec66f8bf5ecd577bc9a9855896644841fc081461829059fbaaeaea | ndmitchell/uniplate | Zipper.hs | |
A zipper is a structure for walking a value and manipulating it in constant time .
This module was inspired by the paper :
. Scrap Your Zippers : A Generic Zipper for Heterogeneous Types , Workshop on Generic Programming 2010/.
A zipper is a structure for walking a value and manipulating it in constant time.
This module was inspired by the paper:
/Michael D. Adams. Scrap Your Zippers: A Generic Zipper for Heterogeneous Types, Workshop on Generic Programming 2010/.
-}
module Data.Generics.Uniplate.Zipper(
-- * Create a zipper and get back the value
Zipper, zipper, zipperBi, fromZipper,
-- * Navigate within a zipper
left, right, up, down,
-- * Manipulate the zipper hole
hole, replaceHole
) where
import Data.Generics.Uniplate.Operations
import Data.Generics.Str
import Control.Monad
import Data.Maybe
-- | Create a zipper, focused on the top-left value.
zipper :: Uniplate to => to -> Zipper to to
zipper = fromJust . toZipper (\x -> (One x, \(One x) -> x))
-- | Create a zipper with a different focus type from the outer type. Will return
-- @Nothing@ if there are no instances of the focus type within the original value.
zipperBi :: Biplate from to => from -> Maybe (Zipper from to)
zipperBi = toZipper biplate
| Zipper structure , whose root type is the first type argument , and whose
focus type is the second type argument .
data Zipper from to = Zipper
{reform :: Str to -> from
,zipp :: ZipN to
}
rezipp f (Zipper a b) = fmap (Zipper a) $ f b
instance (Eq from, Eq to) => Eq (Zipper from to) where
a == b = fromZipper a == fromZipper b && zipp a == zipp b
toZipper :: Uniplate to => (from -> (Str to, Str to -> from)) -> from -> Maybe (Zipper from to)
toZipper biplate x = fmap (Zipper gen) $ zipN cs
where (cs,gen) = biplate x
-- | From a zipper take the whole structure, including any modifications.
fromZipper :: Zipper from to -> from
fromZipper x = reform x $ top1 $ topN $ zipp x
| Move one step left from the current position .
left :: Zipper from to -> Maybe (Zipper from to)
left = rezipp leftN
| Move one step right from the current position .
right :: Zipper from to -> Maybe (Zipper from to)
right = rezipp rightN
| Move one step down from the current position .
down :: Uniplate to => Zipper from to -> Maybe (Zipper from to)
down = rezipp downN
| Move one step up from the current position .
up :: Zipper from to -> Maybe (Zipper from to)
up = rezipp upN
-- | Retrieve the current focus of the zipper..
hole :: Zipper from to -> to
hole = holeN . zipp
-- | Replace the value currently at the focus of the zipper.
replaceHole :: to -> Zipper from to -> Zipper from to
replaceHole x z = z{zipp=replaceN x (zipp z)}
---------------------------------------------------------------------
-- N LEVEL ZIPPER ON Str
data ZipN x = ZipN [Str x -> Zip1 x] (Zip1 x)
instance Eq x => Eq (ZipN x) where
x@(ZipN _ xx) == y@(ZipN _ yy) = xx == yy && upN x == upN y
zipN :: Str x -> Maybe (ZipN x)
zipN x = fmap (ZipN []) $ zip1 x
leftN (ZipN p x) = fmap (ZipN p) $ left1 x
rightN (ZipN p x) = fmap (ZipN p) $ right1 x
holeN (ZipN _ x) = hole1 x
replaceN v (ZipN p x) = ZipN p $ replace1 x v
upN (ZipN [] x) = Nothing
upN (ZipN (p:ps) x) = Just $ ZipN ps $ p $ top1 x
topN (ZipN [] x) = x
topN x = topN $ fromJust $ upN x
downN :: Uniplate x => ZipN x -> Maybe (ZipN x)
downN (ZipN ps x) = fmap (ZipN $ replace1 x . gen : ps) $ zip1 cs
where (cs,gen) = uniplate $ hole1 x
---------------------------------------------------------------------
1 LEVEL ZIPPER ON Str
data Diff1 a = TwoLeft (Str a) | TwoRight (Str a) deriving Eq
undiff1 r (TwoLeft l) = Two l r
undiff1 l (TwoRight r) = Two l r
Warning : this definition of Eq may look too strong ( Str Left / Right is not relevant )
-- but you don't know what the uniplate.gen function will do
data Zip1 a = Zip1 [Diff1 a] a deriving Eq
zip1 :: Str x -> Maybe (Zip1 x)
zip1 = insert1 True []
insert1 :: Bool -> [Diff1 a] -> Str a -> Maybe (Zip1 a)
insert1 leftmost c Zero = Nothing
insert1 leftmost c (One x) = Just $ Zip1 c x
insert1 leftmost c (Two l r) = if leftmost then ll `mplus` rr else rr `mplus` ll
where ll = insert1 leftmost (TwoRight r:c) l
rr = insert1 leftmost (TwoLeft l:c) r
left1, right1 :: Zip1 a -> Maybe (Zip1 a)
left1 = move1 True
right1 = move1 False
move1 :: Bool -> Zip1 a -> Maybe (Zip1 a)
move1 leftward (Zip1 p x) = f p $ One x
where
f p x = msum $
[insert1 False (TwoRight x:ps) l | TwoLeft l:ps <- [p], leftward] ++
[insert1 True (TwoLeft x:ps) r | TwoRight r:ps <- [p], not leftward] ++
[f ps (x `undiff1` p) | p:ps <- [p]]
top1 :: Zip1 a -> Str a
top1 (Zip1 p x) = f p (One x)
where f :: [Diff1 a] -> Str a -> Str a
f [] x = x
f (p:ps) x = f ps (x `undiff1` p)
hole1 :: Zip1 a -> a
hole1 (Zip1 _ x) = x
-- this way round so the a can be disguarded quickly
replace1 :: Zip1 a -> a -> Zip1 a
replace1 (Zip1 p _) = Zip1 p
| null | https://raw.githubusercontent.com/ndmitchell/uniplate/7d3039606d7a083f6d77f9f960c919668788de91/Data/Generics/Uniplate/Zipper.hs | haskell | * Create a zipper and get back the value
* Navigate within a zipper
* Manipulate the zipper hole
| Create a zipper, focused on the top-left value.
| Create a zipper with a different focus type from the outer type. Will return
@Nothing@ if there are no instances of the focus type within the original value.
| From a zipper take the whole structure, including any modifications.
| Retrieve the current focus of the zipper..
| Replace the value currently at the focus of the zipper.
-------------------------------------------------------------------
N LEVEL ZIPPER ON Str
-------------------------------------------------------------------
but you don't know what the uniplate.gen function will do
this way round so the a can be disguarded quickly | |
A zipper is a structure for walking a value and manipulating it in constant time .
This module was inspired by the paper :
. Scrap Your Zippers : A Generic Zipper for Heterogeneous Types , Workshop on Generic Programming 2010/.
A zipper is a structure for walking a value and manipulating it in constant time.
This module was inspired by the paper:
/Michael D. Adams. Scrap Your Zippers: A Generic Zipper for Heterogeneous Types, Workshop on Generic Programming 2010/.
-}
module Data.Generics.Uniplate.Zipper(
Zipper, zipper, zipperBi, fromZipper,
left, right, up, down,
hole, replaceHole
) where
import Data.Generics.Uniplate.Operations
import Data.Generics.Str
import Control.Monad
import Data.Maybe
zipper :: Uniplate to => to -> Zipper to to
zipper = fromJust . toZipper (\x -> (One x, \(One x) -> x))
zipperBi :: Biplate from to => from -> Maybe (Zipper from to)
zipperBi = toZipper biplate
| Zipper structure , whose root type is the first type argument , and whose
focus type is the second type argument .
data Zipper from to = Zipper
{reform :: Str to -> from
,zipp :: ZipN to
}
rezipp f (Zipper a b) = fmap (Zipper a) $ f b
instance (Eq from, Eq to) => Eq (Zipper from to) where
a == b = fromZipper a == fromZipper b && zipp a == zipp b
toZipper :: Uniplate to => (from -> (Str to, Str to -> from)) -> from -> Maybe (Zipper from to)
toZipper biplate x = fmap (Zipper gen) $ zipN cs
where (cs,gen) = biplate x
fromZipper :: Zipper from to -> from
fromZipper x = reform x $ top1 $ topN $ zipp x
| Move one step left from the current position .
left :: Zipper from to -> Maybe (Zipper from to)
left = rezipp leftN
| Move one step right from the current position .
right :: Zipper from to -> Maybe (Zipper from to)
right = rezipp rightN
| Move one step down from the current position .
down :: Uniplate to => Zipper from to -> Maybe (Zipper from to)
down = rezipp downN
| Move one step up from the current position .
up :: Zipper from to -> Maybe (Zipper from to)
up = rezipp upN
hole :: Zipper from to -> to
hole = holeN . zipp
replaceHole :: to -> Zipper from to -> Zipper from to
replaceHole x z = z{zipp=replaceN x (zipp z)}
data ZipN x = ZipN [Str x -> Zip1 x] (Zip1 x)
instance Eq x => Eq (ZipN x) where
x@(ZipN _ xx) == y@(ZipN _ yy) = xx == yy && upN x == upN y
zipN :: Str x -> Maybe (ZipN x)
zipN x = fmap (ZipN []) $ zip1 x
leftN (ZipN p x) = fmap (ZipN p) $ left1 x
rightN (ZipN p x) = fmap (ZipN p) $ right1 x
holeN (ZipN _ x) = hole1 x
replaceN v (ZipN p x) = ZipN p $ replace1 x v
upN (ZipN [] x) = Nothing
upN (ZipN (p:ps) x) = Just $ ZipN ps $ p $ top1 x
topN (ZipN [] x) = x
topN x = topN $ fromJust $ upN x
downN :: Uniplate x => ZipN x -> Maybe (ZipN x)
downN (ZipN ps x) = fmap (ZipN $ replace1 x . gen : ps) $ zip1 cs
where (cs,gen) = uniplate $ hole1 x
1 LEVEL ZIPPER ON Str
data Diff1 a = TwoLeft (Str a) | TwoRight (Str a) deriving Eq
undiff1 r (TwoLeft l) = Two l r
undiff1 l (TwoRight r) = Two l r
Warning : this definition of Eq may look too strong ( Str Left / Right is not relevant )
data Zip1 a = Zip1 [Diff1 a] a deriving Eq
zip1 :: Str x -> Maybe (Zip1 x)
zip1 = insert1 True []
insert1 :: Bool -> [Diff1 a] -> Str a -> Maybe (Zip1 a)
insert1 leftmost c Zero = Nothing
insert1 leftmost c (One x) = Just $ Zip1 c x
insert1 leftmost c (Two l r) = if leftmost then ll `mplus` rr else rr `mplus` ll
where ll = insert1 leftmost (TwoRight r:c) l
rr = insert1 leftmost (TwoLeft l:c) r
left1, right1 :: Zip1 a -> Maybe (Zip1 a)
left1 = move1 True
right1 = move1 False
move1 :: Bool -> Zip1 a -> Maybe (Zip1 a)
move1 leftward (Zip1 p x) = f p $ One x
where
f p x = msum $
[insert1 False (TwoRight x:ps) l | TwoLeft l:ps <- [p], leftward] ++
[insert1 True (TwoLeft x:ps) r | TwoRight r:ps <- [p], not leftward] ++
[f ps (x `undiff1` p) | p:ps <- [p]]
top1 :: Zip1 a -> Str a
top1 (Zip1 p x) = f p (One x)
where f :: [Diff1 a] -> Str a -> Str a
f [] x = x
f (p:ps) x = f ps (x `undiff1` p)
hole1 :: Zip1 a -> a
hole1 (Zip1 _ x) = x
replace1 :: Zip1 a -> a -> Zip1 a
replace1 (Zip1 p _) = Zip1 p
|
84ce7333d38707b82f556dfe8230f20cf636a2aa026a8bb18bf64a7a3434fa6b | nandor/llir-ocaml | mach.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Representation of machine code by sequences of pseudoinstructions *)
type label = Cmm.label
type integer_comparison =
Isigned of Cmm.integer_comparison
| Iunsigned of Cmm.integer_comparison
type integer_operation =
Iadd | Isub | Imul | Imulh | Idiv | Imod
| Iand | Ior | Ixor | Ilsl | Ilsr | Iasr
| Icomp of integer_comparison
| Icheckbound of { label_after_error : label option;
spacetime_index : int; }
type float_comparison = Cmm.float_comparison
type test =
Itruetest
| Ifalsetest
| Iinttest of integer_comparison
| Iinttest_imm of integer_comparison * int
| Ifloattest of float_comparison
| Ioddtest
| Ieventest
type operation =
Imove
| Ispill
| Ireload
| Iconst_int of nativeint
| Iconst_float of int64
| Iconst_symbol of string
| Icall_ind of { label_after : label; }
| Icall_imm of { func : string; label_after : label; }
| Itailcall_ind of { label_after : label; }
| Itailcall_imm of { func : string; label_after : label; }
| Iextcall of { func : string; alloc : bool; label_after : label; }
| Istackoffset of int
| Iload of Cmm.memory_chunk * Arch.addressing_mode
| Istore of Cmm.memory_chunk * Arch.addressing_mode * bool
| Ialloc of { bytes : int; label_after_call_gc : label option;
dbginfo : Debuginfo.alloc_dbginfo; spacetime_index : int; }
| Iintop of integer_operation
| Iintop_imm of integer_operation * int
| Inegf | Iabsf | Iaddf | Isubf | Imulf | Idivf
| Ifloatofint | Iintoffloat
| Ispecific of Arch.specific_operation
| Iname_for_debugger of { ident : Backend_var.t; which_parameter : int option;
provenance : unit option; is_assignment : bool; }
type instruction =
{ desc: instruction_desc;
next: instruction;
arg: Reg.t array;
res: Reg.t array;
dbg: Debuginfo.t;
mutable live: Reg.Set.t;
mutable available_before: Reg_availability_set.t;
mutable available_across: Reg_availability_set.t option;
}
and instruction_desc =
Iend
| Iop of operation
| Ireturn
| Iifthenelse of test * float option * instruction * instruction
| Iswitch of int array * instruction array
| Icatch of Cmm.rec_flag * (int * instruction) list * instruction
| Iexit of int
| Itrywith of instruction * instruction
| Iraise of Lambda.raise_kind
type spacetime_part_of_shape =
| Direct_call_point of { callee : string; }
| Indirect_call_point
| Allocation_point
type spacetime_shape = (spacetime_part_of_shape * Cmm.label) list
type fundecl =
{ fun_name: string;
fun_args: Reg.t array;
fun_body: instruction;
fun_codegen_options : Cmm.codegen_option list;
fun_dbg : Debuginfo.t;
fun_spacetime_shape : spacetime_shape option;
fun_num_stack_slots: int array;
fun_contains_calls: bool;
}
let rec dummy_instr =
{ desc = Iend;
next = dummy_instr;
arg = [||];
res = [||];
dbg = Debuginfo.none;
live = Reg.Set.empty;
available_before = Reg_availability_set.Ok Reg_with_debug_info.Set.empty;
available_across = None;
}
let end_instr () =
{ desc = Iend;
next = dummy_instr;
arg = [||];
res = [||];
dbg = Debuginfo.none;
live = Reg.Set.empty;
available_before = Reg_availability_set.Ok Reg_with_debug_info.Set.empty;
available_across = None;
}
let instr_cons d a r n =
{ desc = d; next = n; arg = a; res = r;
dbg = Debuginfo.none; live = Reg.Set.empty;
available_before = Reg_availability_set.Ok Reg_with_debug_info.Set.empty;
available_across = None;
}
let instr_cons_debug d a r dbg n =
{ desc = d; next = n; arg = a; res = r; dbg = dbg; live = Reg.Set.empty;
available_before = Reg_availability_set.Ok Reg_with_debug_info.Set.empty;
available_across = None;
}
let rec instr_iter f i =
match i.desc with
Iend -> ()
| _ ->
f i;
match i.desc with
Iend -> ()
| Ireturn | Iop(Itailcall_ind _) | Iop(Itailcall_imm _) -> ()
| Iifthenelse(_tst, _p, ifso, ifnot) ->
instr_iter f ifso; instr_iter f ifnot; instr_iter f i.next
| Iswitch(_index, cases) ->
for i = 0 to Array.length cases - 1 do
instr_iter f cases.(i)
done;
instr_iter f i.next
| Icatch(_, handlers, body) ->
instr_iter f body;
List.iter (fun (_n, handler) -> instr_iter f handler) handlers;
instr_iter f i.next
| Iexit _ -> ()
| Itrywith(body, handler) ->
instr_iter f body; instr_iter f handler; instr_iter f i.next
| Iraise _ -> ()
| _ ->
instr_iter f i.next
let spacetime_node_hole_pointer_is_live_before insn =
match insn.desc with
| Iop op ->
begin match op with
| Icall_ind _ | Icall_imm _ | Itailcall_ind _ | Itailcall_imm _ -> true
| Iextcall { alloc; } -> alloc
| Ialloc _ ->
Allocations are special : the call to [ caml_call_gc ] requires some
instrumentation code immediately prior , but this is not inserted until
the emitter ( since the call is not visible prior to that in any IR ) .
As such , none of the / Linearize analyses will ever see that
we use the node hole pointer for these , and we do not need to say
that it is live at such points .
instrumentation code immediately prior, but this is not inserted until
the emitter (since the call is not visible prior to that in any IR).
As such, none of the Mach / Linearize analyses will ever see that
we use the node hole pointer for these, and we do not need to say
that it is live at such points. *)
false
| Iintop op | Iintop_imm (op, _) ->
begin match op with
| Icheckbound _
(* [Icheckbound] doesn't need to return [true] for the same reason as
[Ialloc]. *)
| Iadd | Isub | Imul | Imulh | Idiv | Imod
| Iand | Ior | Ixor | Ilsl | Ilsr | Iasr
| Icomp _ -> false
end
| Ispecific specific_op ->
Arch.spacetime_node_hole_pointer_is_live_before specific_op
| Imove | Ispill | Ireload | Iconst_int _ | Iconst_float _
| Iconst_symbol _ | Istackoffset _ | Iload _ | Istore _
| Inegf | Iabsf | Iaddf | Isubf | Imulf | Idivf
| Ifloatofint | Iintoffloat
| Iname_for_debugger _ -> false
end
| Iend | Ireturn | Iifthenelse _ | Iswitch _ | Icatch _
| Iexit _ | Itrywith _ | Iraise _ -> false
let operation_can_raise op =
match op with
| Icall_ind _ | Icall_imm _ | Iextcall _
| Iintop (Icheckbound _) | Iintop_imm (Icheckbound _, _)
| Ialloc _ -> true
| _ -> false
| null | https://raw.githubusercontent.com/nandor/llir-ocaml/9c019f15c444e30c825b1673cbe827e0497868fe/asmcomp/mach.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Representation of machine code by sequences of pseudoinstructions
[Icheckbound] doesn't need to return [true] for the same reason as
[Ialloc]. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
type label = Cmm.label
type integer_comparison =
Isigned of Cmm.integer_comparison
| Iunsigned of Cmm.integer_comparison
type integer_operation =
Iadd | Isub | Imul | Imulh | Idiv | Imod
| Iand | Ior | Ixor | Ilsl | Ilsr | Iasr
| Icomp of integer_comparison
| Icheckbound of { label_after_error : label option;
spacetime_index : int; }
type float_comparison = Cmm.float_comparison
type test =
Itruetest
| Ifalsetest
| Iinttest of integer_comparison
| Iinttest_imm of integer_comparison * int
| Ifloattest of float_comparison
| Ioddtest
| Ieventest
type operation =
Imove
| Ispill
| Ireload
| Iconst_int of nativeint
| Iconst_float of int64
| Iconst_symbol of string
| Icall_ind of { label_after : label; }
| Icall_imm of { func : string; label_after : label; }
| Itailcall_ind of { label_after : label; }
| Itailcall_imm of { func : string; label_after : label; }
| Iextcall of { func : string; alloc : bool; label_after : label; }
| Istackoffset of int
| Iload of Cmm.memory_chunk * Arch.addressing_mode
| Istore of Cmm.memory_chunk * Arch.addressing_mode * bool
| Ialloc of { bytes : int; label_after_call_gc : label option;
dbginfo : Debuginfo.alloc_dbginfo; spacetime_index : int; }
| Iintop of integer_operation
| Iintop_imm of integer_operation * int
| Inegf | Iabsf | Iaddf | Isubf | Imulf | Idivf
| Ifloatofint | Iintoffloat
| Ispecific of Arch.specific_operation
| Iname_for_debugger of { ident : Backend_var.t; which_parameter : int option;
provenance : unit option; is_assignment : bool; }
type instruction =
{ desc: instruction_desc;
next: instruction;
arg: Reg.t array;
res: Reg.t array;
dbg: Debuginfo.t;
mutable live: Reg.Set.t;
mutable available_before: Reg_availability_set.t;
mutable available_across: Reg_availability_set.t option;
}
and instruction_desc =
Iend
| Iop of operation
| Ireturn
| Iifthenelse of test * float option * instruction * instruction
| Iswitch of int array * instruction array
| Icatch of Cmm.rec_flag * (int * instruction) list * instruction
| Iexit of int
| Itrywith of instruction * instruction
| Iraise of Lambda.raise_kind
type spacetime_part_of_shape =
| Direct_call_point of { callee : string; }
| Indirect_call_point
| Allocation_point
type spacetime_shape = (spacetime_part_of_shape * Cmm.label) list
type fundecl =
{ fun_name: string;
fun_args: Reg.t array;
fun_body: instruction;
fun_codegen_options : Cmm.codegen_option list;
fun_dbg : Debuginfo.t;
fun_spacetime_shape : spacetime_shape option;
fun_num_stack_slots: int array;
fun_contains_calls: bool;
}
let rec dummy_instr =
{ desc = Iend;
next = dummy_instr;
arg = [||];
res = [||];
dbg = Debuginfo.none;
live = Reg.Set.empty;
available_before = Reg_availability_set.Ok Reg_with_debug_info.Set.empty;
available_across = None;
}
let end_instr () =
{ desc = Iend;
next = dummy_instr;
arg = [||];
res = [||];
dbg = Debuginfo.none;
live = Reg.Set.empty;
available_before = Reg_availability_set.Ok Reg_with_debug_info.Set.empty;
available_across = None;
}
let instr_cons d a r n =
{ desc = d; next = n; arg = a; res = r;
dbg = Debuginfo.none; live = Reg.Set.empty;
available_before = Reg_availability_set.Ok Reg_with_debug_info.Set.empty;
available_across = None;
}
let instr_cons_debug d a r dbg n =
{ desc = d; next = n; arg = a; res = r; dbg = dbg; live = Reg.Set.empty;
available_before = Reg_availability_set.Ok Reg_with_debug_info.Set.empty;
available_across = None;
}
let rec instr_iter f i =
match i.desc with
Iend -> ()
| _ ->
f i;
match i.desc with
Iend -> ()
| Ireturn | Iop(Itailcall_ind _) | Iop(Itailcall_imm _) -> ()
| Iifthenelse(_tst, _p, ifso, ifnot) ->
instr_iter f ifso; instr_iter f ifnot; instr_iter f i.next
| Iswitch(_index, cases) ->
for i = 0 to Array.length cases - 1 do
instr_iter f cases.(i)
done;
instr_iter f i.next
| Icatch(_, handlers, body) ->
instr_iter f body;
List.iter (fun (_n, handler) -> instr_iter f handler) handlers;
instr_iter f i.next
| Iexit _ -> ()
| Itrywith(body, handler) ->
instr_iter f body; instr_iter f handler; instr_iter f i.next
| Iraise _ -> ()
| _ ->
instr_iter f i.next
let spacetime_node_hole_pointer_is_live_before insn =
match insn.desc with
| Iop op ->
begin match op with
| Icall_ind _ | Icall_imm _ | Itailcall_ind _ | Itailcall_imm _ -> true
| Iextcall { alloc; } -> alloc
| Ialloc _ ->
Allocations are special : the call to [ caml_call_gc ] requires some
instrumentation code immediately prior , but this is not inserted until
the emitter ( since the call is not visible prior to that in any IR ) .
As such , none of the / Linearize analyses will ever see that
we use the node hole pointer for these , and we do not need to say
that it is live at such points .
instrumentation code immediately prior, but this is not inserted until
the emitter (since the call is not visible prior to that in any IR).
As such, none of the Mach / Linearize analyses will ever see that
we use the node hole pointer for these, and we do not need to say
that it is live at such points. *)
false
| Iintop op | Iintop_imm (op, _) ->
begin match op with
| Icheckbound _
| Iadd | Isub | Imul | Imulh | Idiv | Imod
| Iand | Ior | Ixor | Ilsl | Ilsr | Iasr
| Icomp _ -> false
end
| Ispecific specific_op ->
Arch.spacetime_node_hole_pointer_is_live_before specific_op
| Imove | Ispill | Ireload | Iconst_int _ | Iconst_float _
| Iconst_symbol _ | Istackoffset _ | Iload _ | Istore _
| Inegf | Iabsf | Iaddf | Isubf | Imulf | Idivf
| Ifloatofint | Iintoffloat
| Iname_for_debugger _ -> false
end
| Iend | Ireturn | Iifthenelse _ | Iswitch _ | Icatch _
| Iexit _ | Itrywith _ | Iraise _ -> false
let operation_can_raise op =
match op with
| Icall_ind _ | Icall_imm _ | Iextcall _
| Iintop (Icheckbound _) | Iintop_imm (Icheckbound _, _)
| Ialloc _ -> true
| _ -> false
|
c9a3d129491504b5f62304f18e4d0cd35a0ad00458d2daef4eb1b72a9dc44660 | ghollisjr/cl-ana | package.lisp | cl - ana is a Common Lisp data analysis library .
Copyright 2013 , 2014
;;;;
This file is part of cl - ana .
;;;;
;;;; cl-ana is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; cl-ana is distributed in the hope that it will be useful, but
;;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;;; General Public License for more details.
;;;;
You should have received a copy of the GNU General Public License
;;;; along with cl-ana. If not, see </>.
;;;;
You may contact ( me ! ) via email at
;;;;
(defpackage #:cl-ana.reusable-table
(:use :cl
:cl-ana.table)
(:export :wrap-for-reuse
:reusable-table
:make-reusable-table
:internal-table
:reusable-table-opener-form))
| null | https://raw.githubusercontent.com/ghollisjr/cl-ana/5cb4c0b0c9c4957452ad2a769d6ff9e8d5df0b10/reusable-table/package.lisp | lisp |
cl-ana is free software: you can redistribute it and/or modify it
(at your option) any later version.
cl-ana is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with cl-ana. If not, see </>.
| cl - ana is a Common Lisp data analysis library .
Copyright 2013 , 2014
This file is part of cl - ana .
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
You may contact ( me ! ) via email at
(defpackage #:cl-ana.reusable-table
(:use :cl
:cl-ana.table)
(:export :wrap-for-reuse
:reusable-table
:make-reusable-table
:internal-table
:reusable-table-opener-form))
|
98c274174d2bca74c99858f5cfe729230cd38924a21212336deb24457ba9ffbc | blitz/stumpwm | surfraw.lisp | SURFRAW module for StumpWM .
;;
Copyright ( C ) 2008 Ivy
;;
;; Maintainer: Ivy Foster
;;
;; This module is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 , or ( at your option )
;; any later version.
;;
;; This module is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with this software; see the file COPYING. If not, write to
the Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
;;; Commentary:
;;
;; I like surfraw (). If you're
;; reading this, you probably like surfraw. I've got surfraw-related
;; code in my .stumpwmrc, and (judging from some judicious googling
for RC files early on in my use of stumpwm ) I know that I 'm not the
only one . So it seemed like a good idea to just put that code in
;; a library.
;;; Usage:
;;
;; Just add the following line to your .stumpwmrc file:
;;
( load " /path / to / stumpwm / contrib / surfraw.lisp " )
;;
;; ...and then either call the functions here with "colon" (C-t ;) or
;; bind them to a key. I figure other people will probably have
;; different key preferences than I have, so I leave them entirely up
;; to you.
;;
;; If you want to use the bookmark functions, don't forget to tell
;; stumpwm where your *surfraw-bookmark-file* is.
;;
;; Note that there are also "surfraw-selection" variants on each
;; command that work on the X selection.
;;; FIXME:
;;
;; - Not all elvi are supported yet. Do they need to be?
;; - It would be pretty cool to have a macro like the
;; surfraw-selection one but for regular surfraw commands.
;; Supported elvi (feel free to add more):
;;
;; - Alioth
;; - Amazon
;; - Archpkg
;; - BBCNews
;; - CDDB
- CNN
;; - DebBugs
;; - Deja
;; - Ebay
;; - Etym
;; - FreeBSD
;; - Freshmeat
;; - GenPkg
;; - Google
;; - Thesaurus
;; - Wayback
;; - Webster
;; - Wikipedia
;;; Code:
;;; Regular surfraw commands
(defcommand surfraw (engine search)
((:string "What engine? ") (:string "Search for what? "))
"Use SURFRAW to surf the net; reclaim heathen lands."
(check-type engine string)
(check-type search string)
(run-shell-command (concat "exec surfraw -g " engine " " search)))
(defcommand alioth (search)
((:string "Search Alioth: "))
(surfraw "alioth" search))
(defcommand amazon (search)
((:string "Search Amazon: "))
(surfraw "amazon" search))
(defcommand archpkg (search)
((:string "Search Arch Linux packages: "))
(surfraw "archpkg" search))
(defcommand bbcnews (search)
((:string "Search BBC News: "))
(surfraw "bbcnews" search))
(defcommand cddb (search)
((:string "Search the CDDB: "))
(surfraw "cddb" search))
(defcommand cnn (search)
((:string "Search CNN: "))
(surfraw "cnn" search))
(defcommand debbugs (search)
((:string "Search the Debian BTS: "))
(surfraw "debbugs" search))
(defcommand deja (search)
((:string "Search Google Groups: "))
(surfraw "deja" search))
(defcommand ebay (search)
((:string "Search Ebay: "))
(surfraw "ebay" search))
(defcommand etym (search)
((:string "Search Etymology Online: "))
(surfraw "etym" search))
(defcommand freebsd (search)
((:string "Search FreeBSD info: "))
(surfraw "freebsd" search))
(defcommand freshmeat (search)
((:string "Search Freshmeat: "))
(surfraw "freshmeat" search))
(defcommand genpkg (search)
((:string "Search Gentoo packages: "))
(surfraw "genpkg" search))
(defcommand google (search)
((:string "Search google: "))
(surfraw "google" search))
(defcommand thesaurus (search)
((:string "Search a thesaurus: "))
(surfraw "thesaurus" search))
(defcommand wayback (search)
((:string "Search wayback: "))
(surfraw "wayback" search))
(defcommand webster (search)
((:string "Search the Merriam-Webster Dictionary: "))
(surfraw "webster" search))
(defcommand wikipedia (search)
((:string "Search wikipedia: "))
(surfraw "wikipedia" search))
;;; X selection
(defmacro surfraw-selection (name engine)
`(defcommand ,name () ()
(surfraw ,engine (get-x-selection))))
(surfraw-selection alioth-selection "alioth")
(surfraw-selection amazon-selection "amazon")
(surfraw-selection archpkg-selection "archpkg")
(surfraw-selection bbcnews-selection "bbcnews")
(surfraw-selection cddb-selection "cddb")
(surfraw-selection cnn-selection "cnn")
(surfraw-selection debbugs-selection "debbugs")
(surfraw-selection deja-selection "deja")
(surfraw-selection ebay-selection "ebay")
(surfraw-selection etym-selection "etym")
(surfraw-selection freebsd-selection "freebsd")
(surfraw-selection freshmeat-selection "freshmeat")
(surfraw-selection genpkg-selection "genpkg")
(surfraw-selection google-selection "google")
(surfraw-selection thesaurus-selection "thesaurus")
(surfraw-selection wayback-selection "wayback")
(surfraw-selection webster-selection "webster")
(surfraw-selection wikipedia-selection "wikipedia")
;;; Bookmarks
(defun display-file (file)
"Display a file in the message area."
(if (probe-file file)
(run-shell-command (concat "cat " file) t)
(message "The file ~a does not exist." file)))
(defvar *surfraw-bookmark-file* nil
"The surfraw bookmark file")
(defcommand sr-bookmark (bmk) ((:string "Bookmark: "))
(surfraw "" bmk))
(defcommand sr-bookmark-file-display () ()
(display-file *surfraw-bookmark-file*))
;;; surfraw.lisp ends here
| null | https://raw.githubusercontent.com/blitz/stumpwm/439180985920a628b18d4426f1a29b1c36576531/contrib/surfraw.lisp | lisp |
Maintainer: Ivy Foster
This module is free software; you can redistribute it and/or modify
either version 2 , or ( at your option )
any later version.
This module is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this software; see the file COPYING. If not, write to
Commentary:
I like surfraw (). If you're
reading this, you probably like surfraw. I've got surfraw-related
code in my .stumpwmrc, and (judging from some judicious googling
a library.
Usage:
Just add the following line to your .stumpwmrc file:
...and then either call the functions here with "colon" (C-t ;) or
bind them to a key. I figure other people will probably have
different key preferences than I have, so I leave them entirely up
to you.
If you want to use the bookmark functions, don't forget to tell
stumpwm where your *surfraw-bookmark-file* is.
Note that there are also "surfraw-selection" variants on each
command that work on the X selection.
FIXME:
- Not all elvi are supported yet. Do they need to be?
- It would be pretty cool to have a macro like the
surfraw-selection one but for regular surfraw commands.
Supported elvi (feel free to add more):
- Alioth
- Amazon
- Archpkg
- BBCNews
- CDDB
- DebBugs
- Deja
- Ebay
- Etym
- FreeBSD
- Freshmeat
- GenPkg
- Google
- Thesaurus
- Wayback
- Webster
- Wikipedia
Code:
Regular surfraw commands
X selection
Bookmarks
surfraw.lisp ends here | SURFRAW module for StumpWM .
Copyright ( C ) 2008 Ivy
it under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
the Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
for RC files early on in my use of stumpwm ) I know that I 'm not the
only one . So it seemed like a good idea to just put that code in
( load " /path / to / stumpwm / contrib / surfraw.lisp " )
- CNN
(defcommand surfraw (engine search)
((:string "What engine? ") (:string "Search for what? "))
"Use SURFRAW to surf the net; reclaim heathen lands."
(check-type engine string)
(check-type search string)
(run-shell-command (concat "exec surfraw -g " engine " " search)))
(defcommand alioth (search)
((:string "Search Alioth: "))
(surfraw "alioth" search))
(defcommand amazon (search)
((:string "Search Amazon: "))
(surfraw "amazon" search))
(defcommand archpkg (search)
((:string "Search Arch Linux packages: "))
(surfraw "archpkg" search))
(defcommand bbcnews (search)
((:string "Search BBC News: "))
(surfraw "bbcnews" search))
(defcommand cddb (search)
((:string "Search the CDDB: "))
(surfraw "cddb" search))
(defcommand cnn (search)
((:string "Search CNN: "))
(surfraw "cnn" search))
(defcommand debbugs (search)
((:string "Search the Debian BTS: "))
(surfraw "debbugs" search))
(defcommand deja (search)
((:string "Search Google Groups: "))
(surfraw "deja" search))
(defcommand ebay (search)
((:string "Search Ebay: "))
(surfraw "ebay" search))
(defcommand etym (search)
((:string "Search Etymology Online: "))
(surfraw "etym" search))
(defcommand freebsd (search)
((:string "Search FreeBSD info: "))
(surfraw "freebsd" search))
(defcommand freshmeat (search)
((:string "Search Freshmeat: "))
(surfraw "freshmeat" search))
(defcommand genpkg (search)
((:string "Search Gentoo packages: "))
(surfraw "genpkg" search))
(defcommand google (search)
((:string "Search google: "))
(surfraw "google" search))
(defcommand thesaurus (search)
((:string "Search a thesaurus: "))
(surfraw "thesaurus" search))
(defcommand wayback (search)
((:string "Search wayback: "))
(surfraw "wayback" search))
(defcommand webster (search)
((:string "Search the Merriam-Webster Dictionary: "))
(surfraw "webster" search))
(defcommand wikipedia (search)
((:string "Search wikipedia: "))
(surfraw "wikipedia" search))
(defmacro surfraw-selection (name engine)
`(defcommand ,name () ()
(surfraw ,engine (get-x-selection))))
(surfraw-selection alioth-selection "alioth")
(surfraw-selection amazon-selection "amazon")
(surfraw-selection archpkg-selection "archpkg")
(surfraw-selection bbcnews-selection "bbcnews")
(surfraw-selection cddb-selection "cddb")
(surfraw-selection cnn-selection "cnn")
(surfraw-selection debbugs-selection "debbugs")
(surfraw-selection deja-selection "deja")
(surfraw-selection ebay-selection "ebay")
(surfraw-selection etym-selection "etym")
(surfraw-selection freebsd-selection "freebsd")
(surfraw-selection freshmeat-selection "freshmeat")
(surfraw-selection genpkg-selection "genpkg")
(surfraw-selection google-selection "google")
(surfraw-selection thesaurus-selection "thesaurus")
(surfraw-selection wayback-selection "wayback")
(surfraw-selection webster-selection "webster")
(surfraw-selection wikipedia-selection "wikipedia")
(defun display-file (file)
"Display a file in the message area."
(if (probe-file file)
(run-shell-command (concat "cat " file) t)
(message "The file ~a does not exist." file)))
(defvar *surfraw-bookmark-file* nil
"The surfraw bookmark file")
(defcommand sr-bookmark (bmk) ((:string "Bookmark: "))
(surfraw "" bmk))
(defcommand sr-bookmark-file-display () ()
(display-file *surfraw-bookmark-file*))
|
0bb2885b4e5e5f0e8e6f9951647e2432df15562bdb5de2e7c712aedd9122b6a3 | fetburner/Coq2SML | reductionops.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Pp
open Util
open Names
open Term
open Termops
open Univ
open Evd
open Declarations
open Environ
open Closure
open Esubst
open Reduction
exception Elimconst
(**********************************************************************)
(* The type of (machine) stacks (= lambda-bar-calculus' contexts) *)
type 'a stack_member =
| Zapp of 'a list
| Zcase of case_info * 'a * 'a array
| Zfix of 'a * 'a stack
| Zshift of int
| Zupdate of 'a
and 'a stack = 'a stack_member list
let empty_stack = []
let append_stack_list l s =
match (l,s) with
| ([],s) -> s
| (l1, Zapp l :: s) -> Zapp (l1@l) :: s
| (l1, s) -> Zapp l1 :: s
let append_stack v s = append_stack_list (Array.to_list v) s
let rec stack_args_size = function
| Zapp l::s -> List.length l + stack_args_size s
| Zshift(_)::s -> stack_args_size s
| Zupdate(_)::s -> stack_args_size s
| _ -> 0
When used as an argument stack ( only can appear )
let rec decomp_stack = function
| Zapp[v]::s -> Some (v, s)
| Zapp(v::l)::s -> Some (v, (Zapp l :: s))
| Zapp [] :: s -> decomp_stack s
| _ -> None
let array_of_stack s =
let rec stackrec = function
| [] -> []
| Zapp args :: s -> args :: (stackrec s)
| _ -> assert false
in Array.of_list (List.concat (stackrec s))
let rec list_of_stack = function
| [] -> []
| Zapp args :: s -> args @ (list_of_stack s)
| _ -> assert false
let rec app_stack = function
| f, [] -> f
| f, (Zapp [] :: s) -> app_stack (f, s)
| f, (Zapp args :: s) ->
app_stack (applist (f, args), s)
| _ -> assert false
let rec stack_assign s p c = match s with
| Zapp args :: s ->
let q = List.length args in
if p >= q then
Zapp args :: stack_assign s (p-q) c
else
(match list_chop p args with
(bef, _::aft) -> Zapp (bef@c::aft) :: s
| _ -> assert false)
| _ -> s
let rec stack_tail p s =
if p = 0 then s else
match s with
| Zapp args :: s ->
let q = List.length args in
if p >= q then stack_tail (p-q) s
else Zapp (list_skipn p args) :: s
| _ -> failwith "stack_tail"
let rec stack_nth s p = match s with
| Zapp args :: s ->
let q = List.length args in
if p >= q then stack_nth s (p-q)
else List.nth args p
| _ -> raise Not_found
(**************************************************************)
(* The type of (machine) states (= lambda-bar-calculus' cuts) *)
type state = constr * constr stack
type contextual_reduction_function = env -> evar_map -> constr -> constr
type reduction_function = contextual_reduction_function
type local_reduction_function = evar_map -> constr -> constr
type contextual_stack_reduction_function =
env -> evar_map -> constr -> constr * constr list
type stack_reduction_function = contextual_stack_reduction_function
type local_stack_reduction_function =
evar_map -> constr -> constr * constr list
type contextual_state_reduction_function =
env -> evar_map -> state -> state
type state_reduction_function = contextual_state_reduction_function
type local_state_reduction_function = evar_map -> state -> state
(*************************************)
(*** Reduction Functions Operators ***)
(*************************************)
let safe_evar_value sigma ev =
try Some (Evd.existential_value sigma ev)
with NotInstantiatedEvar | Not_found -> None
let rec whd_app_state sigma (x, stack as s) =
match kind_of_term x with
| App (f,cl) -> whd_app_state sigma (f, append_stack cl stack)
| Cast (c,_,_) -> whd_app_state sigma (c, stack)
| Evar ev ->
(match safe_evar_value sigma ev with
Some c -> whd_app_state sigma (c,stack)
| _ -> s)
| _ -> s
let safe_meta_value sigma ev =
try Some (Evd.meta_value sigma ev)
with Not_found -> None
let appterm_of_stack (f,s) = (f,list_of_stack s)
let whd_stack sigma x =
appterm_of_stack (whd_app_state sigma (x, empty_stack))
let whd_castapp_stack = whd_stack
let strong whdfun env sigma t =
let rec strongrec env t =
map_constr_with_full_binders push_rel strongrec env (whdfun env sigma t) in
strongrec env t
let local_strong whdfun sigma =
let rec strongrec t = map_constr strongrec (whdfun sigma t) in
strongrec
let rec strong_prodspine redfun sigma c =
let x = redfun sigma c in
match kind_of_term x with
| Prod (na,a,b) -> mkProd (na,a,strong_prodspine redfun sigma b)
| _ -> x
(*************************************)
* * Reduction using * *
(*************************************)
(* This signature is very similar to Closure.RedFlagsSig except there
is eta but no per-constant unfolding *)
module type RedFlagsSig = sig
type flags
type flag
val fbeta : flag
val fdelta : flag
val feta : flag
val fiota : flag
val fzeta : flag
val mkflags : flag list -> flags
val red_beta : flags -> bool
val red_delta : flags -> bool
val red_eta : flags -> bool
val red_iota : flags -> bool
val red_zeta : flags -> bool
end
(* Compact Implementation *)
module RedFlags = (struct
type flag = int
type flags = int
let fbeta = 1
let fdelta = 2
let feta = 8
let fiota = 16
let fzeta = 32
let mkflags = List.fold_left (lor) 0
let red_beta f = f land fbeta <> 0
let red_delta f = f land fdelta <> 0
let red_eta f = f land feta <> 0
let red_iota f = f land fiota <> 0
let red_zeta f = f land fzeta <> 0
end : RedFlagsSig)
open RedFlags
(* Local *)
let beta = mkflags [fbeta]
let eta = mkflags [feta]
let zeta = mkflags [fzeta]
let betaiota = mkflags [fiota; fbeta]
let betaiotazeta = mkflags [fiota; fbeta;fzeta]
(* Contextual *)
let delta = mkflags [fdelta]
let betadelta = mkflags [fbeta;fdelta;fzeta]
let betadeltaeta = mkflags [fbeta;fdelta;fzeta;feta]
let betadeltaiota = mkflags [fbeta;fdelta;fzeta;fiota]
let betadeltaiota_nolet = mkflags [fbeta;fdelta;fiota]
let betadeltaiotaeta = mkflags [fbeta;fdelta;fzeta;fiota;feta]
let betaetalet = mkflags [fbeta;feta;fzeta]
let betalet = mkflags [fbeta;fzeta]
(* Beta Reduction tools *)
let rec stacklam recfun env t stack =
match (decomp_stack stack,kind_of_term t) with
| Some (h,stacktl), Lambda (_,_,c) -> stacklam recfun (h::env) c stacktl
| _ -> recfun (substl env t, stack)
let beta_applist (c,l) =
stacklam app_stack [] c (append_stack_list l empty_stack)
Iota reduction tools
type 'a miota_args = {
mP : constr; (* the result type *)
mconstr : constr; (* the constructor *)
mci : case_info; (* special info to re-build pattern *)
mcargs : 'a list; (* the constructor's arguments *)
mlf : 'a array } (* the branch code vector *)
let reducible_mind_case c = match kind_of_term c with
| Construct _ | CoFix _ -> true
| _ -> false
let contract_cofix (bodynum,(types,names,bodies as typedbodies)) =
let nbodies = Array.length bodies in
let make_Fi j = mkCoFix (nbodies-j-1,typedbodies) in
substl (list_tabulate make_Fi nbodies) bodies.(bodynum)
let reduce_mind_case mia =
match kind_of_term mia.mconstr with
| Construct (ind_sp,i) ->
let = ( fst mia.mci).(i-1 ) in
let real_cargs = list_skipn mia.mci.ci_npar mia.mcargs in
applist (mia.mlf.(i-1),real_cargs)
| CoFix cofix ->
let cofix_def = contract_cofix cofix in
mkCase (mia.mci, mia.mP, applist(cofix_def,mia.mcargs), mia.mlf)
| _ -> assert false
(* contracts fix==FIX[nl;i](A1...Ak;[F1...Fk]{B1....Bk}) to produce
Bi[Fj --> FIX[nl;j](A1...Ak;[F1...Fk]{B1...Bk})] *)
let contract_fix ((recindices,bodynum),(types,names,bodies as typedbodies)) =
let nbodies = Array.length recindices in
let make_Fi j = mkFix ((recindices,nbodies-j-1),typedbodies) in
substl (list_tabulate make_Fi nbodies) bodies.(bodynum)
let fix_recarg ((recindices,bodynum),_) stack =
assert (0 <= bodynum & bodynum < Array.length recindices);
let recargnum = Array.get recindices bodynum in
try
Some (recargnum, stack_nth stack recargnum)
with Not_found ->
None
type fix_reduction_result = NotReducible | Reduced of state
let reduce_fix whdfun sigma fix stack =
match fix_recarg fix stack with
| None -> NotReducible
| Some (recargnum,recarg) ->
let (recarg'hd,_ as recarg') = whdfun sigma (recarg, empty_stack) in
let stack' = stack_assign stack recargnum (app_stack recarg') in
(match kind_of_term recarg'hd with
| Construct _ -> Reduced (contract_fix fix, stack')
| _ -> NotReducible)
Generic reduction function
Y avait un commentaire pour :
NB : Cette fonction
` ` let ( c , cargs ) = whfun ( recarg , empty_stack ) ''
-------------------
NB : Cette fonction alloue peu c'est l'appel
``let (c,cargs) = whfun (recarg, empty_stack)''
-------------------
qui coute cher *)
let rec whd_state_gen flags ts env sigma =
let rec whrec (x, stack as s) =
match kind_of_term x with
| Rel n when red_delta flags ->
(match lookup_rel n env with
| (_,Some body,_) -> whrec (lift n body, stack)
| _ -> s)
| Var id when red_delta flags ->
(match lookup_named id env with
| (_,Some body,_) -> whrec (body, stack)
| _ -> s)
| Evar ev ->
(match safe_evar_value sigma ev with
| Some body -> whrec (body, stack)
| None -> s)
| Meta ev ->
(match safe_meta_value sigma ev with
| Some body -> whrec (body, stack)
| None -> s)
| Const const when is_transparent_constant ts const ->
(match constant_opt_value env const with
| Some body -> whrec (body, stack)
| None -> s)
| LetIn (_,b,_,c) when red_zeta flags -> stacklam whrec [b] c stack
| Cast (c,_,_) -> whrec (c, stack)
| App (f,cl) -> whrec (f, append_stack cl stack)
| Lambda (na,t,c) ->
(match decomp_stack stack with
| Some (a,m) when red_beta flags -> stacklam whrec [a] c m
| None when red_eta flags ->
let env' = push_rel (na,None,t) env in
let whrec' = whd_state_gen flags ts env' sigma in
(match kind_of_term (app_stack (whrec' (c, empty_stack))) with
| App (f,cl) ->
let napp = Array.length cl in
if napp > 0 then
let x', l' = whrec' (array_last cl, empty_stack) in
match kind_of_term x', decomp_stack l' with
| Rel 1, None ->
let lc = Array.sub cl 0 (napp-1) in
let u = if napp=1 then f else appvect (f,lc) in
if noccurn 1 u then (pop u,empty_stack) else s
| _ -> s
else s
| _ -> s)
| _ -> s)
| Case (ci,p,d,lf) when red_iota flags ->
let (c,cargs) = whrec (d, empty_stack) in
if reducible_mind_case c then
whrec (reduce_mind_case
{mP=p; mconstr=c; mcargs=list_of_stack cargs;
mci=ci; mlf=lf}, stack)
else
(mkCase (ci, p, app_stack (c,cargs), lf), stack)
| Fix fix when red_iota flags ->
(match reduce_fix (fun _ -> whrec) sigma fix stack with
| Reduced s' -> whrec s'
| NotReducible -> s)
| x -> s
in
whrec
let local_whd_state_gen flags sigma =
let rec whrec (x, stack as s) =
match kind_of_term x with
| LetIn (_,b,_,c) when red_zeta flags -> stacklam whrec [b] c stack
| Cast (c,_,_) -> whrec (c, stack)
| App (f,cl) -> whrec (f, append_stack cl stack)
| Lambda (_,_,c) ->
(match decomp_stack stack with
| Some (a,m) when red_beta flags -> stacklam whrec [a] c m
| None when red_eta flags ->
(match kind_of_term (app_stack (whrec (c, empty_stack))) with
| App (f,cl) ->
let napp = Array.length cl in
if napp > 0 then
let x', l' = whrec (array_last cl, empty_stack) in
match kind_of_term x', decomp_stack l' with
| Rel 1, None ->
let lc = Array.sub cl 0 (napp-1) in
let u = if napp=1 then f else appvect (f,lc) in
if noccurn 1 u then (pop u,empty_stack) else s
| _ -> s
else s
| _ -> s)
| _ -> s)
| Case (ci,p,d,lf) when red_iota flags ->
let (c,cargs) = whrec (d, empty_stack) in
if reducible_mind_case c then
whrec (reduce_mind_case
{mP=p; mconstr=c; mcargs=list_of_stack cargs;
mci=ci; mlf=lf}, stack)
else
(mkCase (ci, p, app_stack (c,cargs), lf), stack)
| Fix fix when red_iota flags ->
(match reduce_fix (fun _ ->whrec) sigma fix stack with
| Reduced s' -> whrec s'
| NotReducible -> s)
| Evar ev ->
(match safe_evar_value sigma ev with
Some c -> whrec (c,stack)
| None -> s)
| Meta ev ->
(match safe_meta_value sigma ev with
Some c -> whrec (c,stack)
| None -> s)
| x -> s
in
whrec
let stack_red_of_state_red f sigma x =
appterm_of_stack (f sigma (x, empty_stack))
let red_of_state_red f sigma x =
app_stack (f sigma (x,empty_stack))
1 . Beta Reduction Functions
let whd_beta_state = local_whd_state_gen beta
let whd_beta_stack = stack_red_of_state_red whd_beta_state
let whd_beta = red_of_state_red whd_beta_state
Nouveau !
let whd_betaetalet_state = local_whd_state_gen betaetalet
let whd_betaetalet_stack = stack_red_of_state_red whd_betaetalet_state
let whd_betaetalet = red_of_state_red whd_betaetalet_state
let whd_betalet_state = local_whd_state_gen betalet
let whd_betalet_stack = stack_red_of_state_red whd_betalet_state
let whd_betalet = red_of_state_red whd_betalet_state
2 . Delta Reduction Functions
let whd_delta_state e = whd_state_gen delta full_transparent_state e
let whd_delta_stack env = stack_red_of_state_red (whd_delta_state env)
let whd_delta env = red_of_state_red (whd_delta_state env)
let whd_betadelta_state e = whd_state_gen betadelta full_transparent_state e
let whd_betadelta_stack env =
stack_red_of_state_red (whd_betadelta_state env)
let whd_betadelta env =
red_of_state_red (whd_betadelta_state env)
let whd_betadeltaeta_state e =
whd_state_gen betadeltaeta full_transparent_state e
let whd_betadeltaeta_stack env =
stack_red_of_state_red (whd_betadeltaeta_state env)
let whd_betadeltaeta env =
red_of_state_red (whd_betadeltaeta_state env)
3 . Iota reduction Functions
let whd_betaiota_state = local_whd_state_gen betaiota
let whd_betaiota_stack = stack_red_of_state_red whd_betaiota_state
let whd_betaiota = red_of_state_red whd_betaiota_state
let whd_betaiotazeta_state = local_whd_state_gen betaiotazeta
let whd_betaiotazeta_stack = stack_red_of_state_red whd_betaiotazeta_state
let whd_betaiotazeta = red_of_state_red whd_betaiotazeta_state
let whd_betadeltaiota_state env =
whd_state_gen betadeltaiota full_transparent_state env
let whd_betadeltaiota_stack env =
stack_red_of_state_red (whd_betadeltaiota_state env)
let whd_betadeltaiota env =
red_of_state_red (whd_betadeltaiota_state env)
let whd_betadeltaiota_state_using ts env =
whd_state_gen betadeltaiota ts env
let whd_betadeltaiota_stack_using ts env =
stack_red_of_state_red (whd_betadeltaiota_state_using ts env)
let whd_betadeltaiota_using ts env =
red_of_state_red (whd_betadeltaiota_state_using ts env)
let whd_betadeltaiotaeta_state env =
whd_state_gen betadeltaiotaeta full_transparent_state env
let whd_betadeltaiotaeta_stack env =
stack_red_of_state_red (whd_betadeltaiotaeta_state env)
let whd_betadeltaiotaeta env =
red_of_state_red (whd_betadeltaiotaeta_state env)
let whd_betadeltaiota_nolet_state env =
whd_state_gen betadeltaiota_nolet full_transparent_state env
let whd_betadeltaiota_nolet_stack env =
stack_red_of_state_red (whd_betadeltaiota_nolet_state env)
let whd_betadeltaiota_nolet env =
red_of_state_red (whd_betadeltaiota_nolet_state env)
4 . Eta reduction Functions
let whd_eta c = app_stack (local_whd_state_gen eta Evd.empty (c,empty_stack))
5 . Zeta Reduction Functions
let whd_zeta c = app_stack (local_whd_state_gen zeta Evd.empty (c,empty_stack))
(****************************************************************************)
(* Reduction Functions *)
(****************************************************************************)
(* Replacing defined evars for error messages *)
let rec whd_evar sigma c =
match kind_of_term c with
| Evar ev ->
(match safe_evar_value sigma ev with
Some c -> whd_evar sigma c
| None -> c)
| Sort s -> whd_sort_variable sigma c
| _ -> c
let nf_evar =
local_strong whd_evar
(* lazy reduction functions. The infos must be created for each term *)
Note by HH [ oct 08 ] : why would it be the job of clos_norm_flags to add
a [ nf_evar ] here
a [nf_evar] here *)
let clos_norm_flags flgs env sigma t =
try
norm_val
(create_clos_infos ~evars:(safe_evar_value sigma) flgs env)
(inject t)
with Anomaly _ -> error "Tried to normalized ill-typed term"
let nf_beta = clos_norm_flags Closure.beta empty_env
let nf_betaiota = clos_norm_flags Closure.betaiota empty_env
let nf_betadeltaiota env sigma =
clos_norm_flags Closure.betadeltaiota env sigma
Attention reduire un beta - redexe avec un argument pas
une variable , peut changer enormement le temps de conversion lors
du type checking :
( fun x = > x + x ) M
une variable, peut changer enormement le temps de conversion lors
du type checking :
(fun x => x + x) M
*)
let rec whd_betaiota_preserving_vm_cast env sigma t =
let rec stacklam_var subst t stack =
match (decomp_stack stack,kind_of_term t) with
| Some (h,stacktl), Lambda (_,_,c) ->
begin match kind_of_term h with
| Rel i when not (evaluable_rel i env) ->
stacklam_var (h::subst) c stacktl
| Var id when not (evaluable_named id env)->
stacklam_var (h::subst) c stacktl
| _ -> whrec (substl subst t, stack)
end
| _ -> whrec (substl subst t, stack)
and whrec (x, stack as s) =
match kind_of_term x with
| Evar ev ->
(match safe_evar_value sigma ev with
| Some body -> whrec (body, stack)
| None -> s)
| Cast (c,VMcast,t) ->
let c = app_stack (whrec (c,empty_stack)) in
let t = app_stack (whrec (t,empty_stack)) in
(mkCast(c,VMcast,t),stack)
| Cast (c,DEFAULTcast,_) ->
whrec (c, stack)
| App (f,cl) -> whrec (f, append_stack cl stack)
| Lambda (na,t,c) ->
(match decomp_stack stack with
| Some (a,m) -> stacklam_var [a] c m
| _ -> s)
| Case (ci,p,d,lf) ->
let (c,cargs) = whrec (d, empty_stack) in
if reducible_mind_case c then
whrec (reduce_mind_case
{mP=p; mconstr=c; mcargs=list_of_stack cargs;
mci=ci; mlf=lf}, stack)
else
(mkCase (ci, p, app_stack (c,cargs), lf), stack)
| x -> s
in
app_stack (whrec (t,empty_stack))
let nf_betaiota_preserving_vm_cast =
strong whd_betaiota_preserving_vm_cast
(********************************************************************)
(* Conversion *)
(********************************************************************)
(*
let fkey = Profile.declare_profile "fhnf";;
let fhnf info v = Profile.profile2 fkey fhnf info v;;
let fakey = Profile.declare_profile "fhnf_apply";;
let fhnf_apply info k h a = Profile.profile4 fakey fhnf_apply info k h a;;
*)
let is_transparent k =
Conv_oracle.get_strategy k <> Conv_oracle.Opaque
(* Conversion utility functions *)
type conversion_test = constraints -> constraints
let pb_is_equal pb = pb = CONV
let pb_equal = function
| CUMUL -> CONV
| CONV -> CONV
let sort_cmp = sort_cmp
let test_conversion (f: ?l2r:bool-> ?evars:'a->'b) env sigma x y =
try let _ =
f ~evars:(safe_evar_value sigma) env x y in true
with NotConvertible -> false
| Anomaly _ -> error "Conversion test raised an anomaly"
let is_conv env sigma = test_conversion Reduction.conv env sigma
let is_conv_leq env sigma = test_conversion Reduction.conv_leq env sigma
let is_fconv = function | CONV -> is_conv | CUMUL -> is_conv_leq
let test_trans_conversion (f: ?l2r:bool-> ?evars:'a->'b) reds env sigma x y =
try let _ = f ~evars:(safe_evar_value sigma) reds env x y in true
with NotConvertible -> false
| Anomaly _ -> error "Conversion test raised an anomaly"
let is_trans_conv reds env sigma = test_trans_conversion Reduction.trans_conv reds env sigma
let is_trans_conv_leq reds env sigma = test_trans_conversion Reduction.trans_conv_leq reds env sigma
let is_trans_fconv = function | CONV -> is_trans_conv | CUMUL -> is_trans_conv_leq
(********************************************************************)
(* Special-Purpose Reduction *)
(********************************************************************)
let whd_meta sigma c = match kind_of_term c with
| Meta p -> (try meta_value sigma p with Not_found -> c)
| _ -> c
(* Try to replace all metas. Does not replace metas in the metas' values
* Differs from (strong whd_meta). *)
let plain_instance s c =
let rec irec n u = match kind_of_term u with
| Meta p -> (try lift n (List.assoc p s) with Not_found -> u)
| App (f,l) when isCast f ->
let (f,_,t) = destCast f in
let l' = Array.map (irec n) l in
(match kind_of_term f with
| Meta p ->
(* Don't flatten application nodes: this is used to extract a
proof-term from a proof-tree and we want to keep the structure
of the proof-tree *)
(try let g = List.assoc p s in
match kind_of_term g with
| App _ ->
let h = id_of_string "H" in
mkLetIn (Name h,g,t,mkApp(mkRel 1,Array.map (lift 1) l'))
| _ -> mkApp (g,l')
with Not_found -> mkApp (f,l'))
| _ -> mkApp (irec n f,l'))
| Cast (m,_,_) when isMeta m ->
(try lift n (List.assoc (destMeta m) s) with Not_found -> u)
| _ ->
map_constr_with_binders succ irec n u
in
if s = [] then c else irec 0 c
[ instance ] is used for [ res_pf ] ; the call to [ local_strong whd_betaiota ]
has ( unfortunately ) different subtle side effects :
- * * Order of subgoals * *
If the lemma is a case analysis with parameters , it will move the
parameters as first subgoals ( e.g. " case H " applied on
" H : D->A/\B|-C " will present the subgoal |-D first while w/o
betaiota the subgoal |-D would have come last ) .
- * * Betaiota - contraction in statement * *
If the lemma has a parameter which is a function and this
function is applied in the lemma , then the _ strong _ will
contract the application of the function to its argument ( e.g.
" apply ( H ( fun x = > x ) ) " in " H : forall f , f 0 = 0 |- 0=0 " will
result in applying the lemma 0=0 in which " ( fun x = > x ) 0 " has
been contracted ) . A goal to rewrite may then fail or succeed
differently .
- * * Naming of hypotheses * *
If a lemma is a function of the form " fun H:(forall a : A , P a )
= > .. F H .. " where the expected type of H is " forall b : A , P b " ,
then , without reduction , the application of the lemma will
generate a subgoal " forall a : A , P a " ( and intro will use name
" a " ) , while with reduction , it will generate a subgoal " forall
b : A , P b " ( and intro will use name " b " ) .
- * * First - order pattern - matching * *
If a lemma has the type " ( fun x = > p ) t " then rewriting t may fail
if the type of the lemma is first beta - reduced ( this typically happens
when rewriting a single variable and the type of the lemma is obtained
by meta_instance ( with empty map ) which itself calls instance with this
empty map ) .
has (unfortunately) different subtle side effects:
- ** Order of subgoals **
If the lemma is a case analysis with parameters, it will move the
parameters as first subgoals (e.g. "case H" applied on
"H:D->A/\B|-C" will present the subgoal |-D first while w/o
betaiota the subgoal |-D would have come last).
- ** Betaiota-contraction in statement **
If the lemma has a parameter which is a function and this
function is applied in the lemma, then the _strong_ betaiota will
contract the application of the function to its argument (e.g.
"apply (H (fun x => x))" in "H:forall f, f 0 = 0 |- 0=0" will
result in applying the lemma 0=0 in which "(fun x => x) 0" has
been contracted). A goal to rewrite may then fail or succeed
differently.
- ** Naming of hypotheses **
If a lemma is a function of the form "fun H:(forall a:A, P a)
=> .. F H .." where the expected type of H is "forall b:A, P b",
then, without reduction, the application of the lemma will
generate a subgoal "forall a:A, P a" (and intro will use name
"a"), while with reduction, it will generate a subgoal "forall
b:A, P b" (and intro will use name "b").
- ** First-order pattern-matching **
If a lemma has the type "(fun x => p) t" then rewriting t may fail
if the type of the lemma is first beta-reduced (this typically happens
when rewriting a single variable and the type of the lemma is obtained
by meta_instance (with empty map) which itself calls instance with this
empty map).
*)
let instance sigma s c =
(* if s = [] then c else *)
local_strong whd_betaiota sigma (plain_instance s c)
pseudo - reduction rule :
* [ hnf_prod_app env s ( Prod(_,B ) ) N -- > B[N ]
* with an HNF on the first argument to produce a product .
* if this does not work , then we use the string S as part of our
* error message .
* [hnf_prod_app env s (Prod(_,B)) N --> B[N]
* with an HNF on the first argument to produce a product.
* if this does not work, then we use the string S as part of our
* error message. *)
let hnf_prod_app env sigma t n =
match kind_of_term (whd_betadeltaiota env sigma t) with
| Prod (_,_,b) -> subst1 n b
| _ -> anomaly "hnf_prod_app: Need a product"
let hnf_prod_appvect env sigma t nl =
Array.fold_left (hnf_prod_app env sigma) t nl
let hnf_prod_applist env sigma t nl =
List.fold_left (hnf_prod_app env sigma) t nl
let hnf_lam_app env sigma t n =
match kind_of_term (whd_betadeltaiota env sigma t) with
| Lambda (_,_,b) -> subst1 n b
| _ -> anomaly "hnf_lam_app: Need an abstraction"
let hnf_lam_appvect env sigma t nl =
Array.fold_left (hnf_lam_app env sigma) t nl
let hnf_lam_applist env sigma t nl =
List.fold_left (hnf_lam_app env sigma) t nl
let splay_prod env sigma =
let rec decrec env m c =
let t = whd_betadeltaiota env sigma c in
match kind_of_term t with
| Prod (n,a,c0) ->
decrec (push_rel (n,None,a) env)
((n,a)::m) c0
| _ -> m,t
in
decrec env []
let splay_lam env sigma =
let rec decrec env m c =
let t = whd_betadeltaiota env sigma c in
match kind_of_term t with
| Lambda (n,a,c0) ->
decrec (push_rel (n,None,a) env)
((n,a)::m) c0
| _ -> m,t
in
decrec env []
let splay_prod_assum env sigma =
let rec prodec_rec env l c =
let t = whd_betadeltaiota_nolet env sigma c in
match kind_of_term t with
| Prod (x,t,c) ->
prodec_rec (push_rel (x,None,t) env)
(add_rel_decl (x, None, t) l) c
| LetIn (x,b,t,c) ->
prodec_rec (push_rel (x, Some b, t) env)
(add_rel_decl (x, Some b, t) l) c
| Cast (c,_,_) -> prodec_rec env l c
| _ -> l,t
in
prodec_rec env empty_rel_context
let splay_arity env sigma c =
let l, c = splay_prod env sigma c in
match kind_of_term c with
| Sort s -> l,s
| _ -> invalid_arg "splay_arity"
let sort_of_arity env sigma c = snd (splay_arity env sigma c)
let splay_prod_n env sigma n =
let rec decrec env m ln c = if m = 0 then (ln,c) else
match kind_of_term (whd_betadeltaiota env sigma c) with
| Prod (n,a,c0) ->
decrec (push_rel (n,None,a) env)
(m-1) (add_rel_decl (n,None,a) ln) c0
| _ -> invalid_arg "splay_prod_n"
in
decrec env n empty_rel_context
let splay_lam_n env sigma n =
let rec decrec env m ln c = if m = 0 then (ln,c) else
match kind_of_term (whd_betadeltaiota env sigma c) with
| Lambda (n,a,c0) ->
decrec (push_rel (n,None,a) env)
(m-1) (add_rel_decl (n,None,a) ln) c0
| _ -> invalid_arg "splay_lam_n"
in
decrec env n empty_rel_context
exception NotASort
let decomp_sort env sigma t =
match kind_of_term (whd_betadeltaiota env sigma t) with
| Sort s -> s
| _ -> raise NotASort
let is_sort env sigma arity =
try let _ = decomp_sort env sigma arity in true
with NotASort -> false
reduction to head - normal - form allowing delta / zeta only in argument
of case / fix ( heuristic used by evar_conv )
of case/fix (heuristic used by evar_conv) *)
let whd_betaiota_deltazeta_for_iota_state ts env sigma s =
let rec whrec s =
let (t, stack as s) = whd_betaiota_state sigma s in
match kind_of_term t with
| Case (ci,p,d,lf) ->
let (cr,crargs) = whd_betadeltaiota_stack_using ts env sigma d in
let rslt = mkCase (ci, p, applist (cr,crargs), lf) in
if reducible_mind_case cr then
whrec (rslt, stack)
else
s
| Fix fix ->
(match
reduce_fix (whd_betadeltaiota_state_using ts env) sigma fix stack
with
| Reduced s -> whrec s
| NotReducible -> s)
| _ -> s
in whrec s
A reduction function like whd_betaiota but which keeps casts
* and does not reduce redexes containing existential variables .
* Used in Correctness .
* Added by JCF , 29/1/98 .
* and does not reduce redexes containing existential variables.
* Used in Correctness.
* Added by JCF, 29/1/98. *)
let whd_programs_stack env sigma =
let rec whrec (x, stack as s) =
match kind_of_term x with
| App (f,cl) ->
let n = Array.length cl - 1 in
let c = cl.(n) in
if occur_existential c then
s
else
whrec (mkApp (f, Array.sub cl 0 n), append_stack [|c|] stack)
| LetIn (_,b,_,c) ->
if occur_existential b then
s
else
stacklam whrec [b] c stack
| Lambda (_,_,c) ->
(match decomp_stack stack with
| None -> s
| Some (a,m) -> stacklam whrec [a] c m)
| Case (ci,p,d,lf) ->
if occur_existential d then
s
else
let (c,cargs) = whrec (d, empty_stack) in
if reducible_mind_case c then
whrec (reduce_mind_case
{mP=p; mconstr=c; mcargs=list_of_stack cargs;
mci=ci; mlf=lf}, stack)
else
(mkCase (ci, p, app_stack(c,cargs), lf), stack)
| Fix fix ->
(match reduce_fix (fun _ ->whrec) sigma fix stack with
| Reduced s' -> whrec s'
| NotReducible -> s)
| _ -> s
in
whrec
let whd_programs env sigma x =
app_stack (whd_programs_stack env sigma (x, empty_stack))
exception IsType
let find_conclusion env sigma =
let rec decrec env c =
let t = whd_betadeltaiota env sigma c in
match kind_of_term t with
| Prod (x,t,c0) -> decrec (push_rel (x,None,t) env) c0
| Lambda (x,t,c0) -> decrec (push_rel (x,None,t) env) c0
| t -> t
in
decrec env
let is_arity env sigma c =
match find_conclusion env sigma c with
| Sort _ -> true
| _ -> false
(*************************************)
Metas
let meta_value evd mv =
let rec valrec mv =
match meta_opt_fvalue evd mv with
| Some (b,_) ->
instance evd
(List.map (fun mv' -> (mv',valrec mv')) (Metaset.elements b.freemetas))
b.rebus
| None -> mkMeta mv
in
valrec mv
let meta_instance sigma b =
let c_sigma =
List.map
(fun mv -> (mv,meta_value sigma mv)) (Metaset.elements b.freemetas)
in
if c_sigma = [] then b.rebus else instance sigma c_sigma b.rebus
let nf_meta sigma c = meta_instance sigma (mk_freelisted c)
(* Instantiate metas that create beta/iota redexes *)
let meta_reducible_instance evd b =
let fm = Metaset.elements b.freemetas in
let metas = List.fold_left (fun l mv ->
match (try meta_opt_fvalue evd mv with Not_found -> None) with
| Some (g,(_,s)) -> (mv,(g.rebus,s))::l
| None -> l) [] fm in
let rec irec u =
let u = whd_betaiota Evd.empty u in
match kind_of_term u with
| Case (ci,p,c,bl) when isMeta c or isCast c & isMeta (pi1 (destCast c)) ->
let m =
try destMeta c
with e when Errors.noncritical e -> destMeta (pi1 (destCast c))
in
(match
try
let g,s = List.assoc m metas in
if isConstruct g or s <> CoerceToType then Some g else None
with Not_found -> None
with
| Some g -> irec (mkCase (ci,p,g,bl))
| None -> mkCase (ci,irec p,c,Array.map irec bl))
| App (f,l) when isMeta f or isCast f & isMeta (pi1 (destCast f)) ->
let m =
try destMeta f
with e when Errors.noncritical e -> destMeta (pi1 (destCast f))
in
(match
try
let g,s = List.assoc m metas in
if isLambda g or s <> CoerceToType then Some g else None
with Not_found -> None
with
| Some g -> irec (mkApp (g,l))
| None -> mkApp (f,Array.map irec l))
| Meta m ->
(try let g,s = List.assoc m metas in if s<>CoerceToType then irec g else u
with Not_found -> u)
| _ -> map_constr irec u
in
if fm = [] then (* nf_betaiota? *) b.rebus else irec b.rebus
let head_unfold_under_prod ts env _ c =
let unfold cst =
if Cpred.mem cst (snd ts) then
match constant_opt_value env cst with
| Some c -> c
| None -> mkConst cst
else mkConst cst in
let rec aux c =
match kind_of_term c with
| Prod (n,t,c) -> mkProd (n,aux t, aux c)
| _ ->
let (h,l) = decompose_app c in
match kind_of_term h with
| Const cst -> beta_applist (unfold cst,l)
| _ -> c in
aux c
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/pretyping/reductionops.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
********************************************************************
The type of (machine) stacks (= lambda-bar-calculus' contexts)
************************************************************
The type of (machine) states (= lambda-bar-calculus' cuts)
***********************************
** Reduction Functions Operators **
***********************************
***********************************
***********************************
This signature is very similar to Closure.RedFlagsSig except there
is eta but no per-constant unfolding
Compact Implementation
Local
Contextual
Beta Reduction tools
the result type
the constructor
special info to re-build pattern
the constructor's arguments
the branch code vector
contracts fix==FIX[nl;i](A1...Ak;[F1...Fk]{B1....Bk}) to produce
Bi[Fj --> FIX[nl;j](A1...Ak;[F1...Fk]{B1...Bk})]
**************************************************************************
Reduction Functions
**************************************************************************
Replacing defined evars for error messages
lazy reduction functions. The infos must be created for each term
******************************************************************
Conversion
******************************************************************
let fkey = Profile.declare_profile "fhnf";;
let fhnf info v = Profile.profile2 fkey fhnf info v;;
let fakey = Profile.declare_profile "fhnf_apply";;
let fhnf_apply info k h a = Profile.profile4 fakey fhnf_apply info k h a;;
Conversion utility functions
******************************************************************
Special-Purpose Reduction
******************************************************************
Try to replace all metas. Does not replace metas in the metas' values
* Differs from (strong whd_meta).
Don't flatten application nodes: this is used to extract a
proof-term from a proof-tree and we want to keep the structure
of the proof-tree
if s = [] then c else
***********************************
Instantiate metas that create beta/iota redexes
nf_betaiota? | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Pp
open Util
open Names
open Term
open Termops
open Univ
open Evd
open Declarations
open Environ
open Closure
open Esubst
open Reduction
exception Elimconst
type 'a stack_member =
| Zapp of 'a list
| Zcase of case_info * 'a * 'a array
| Zfix of 'a * 'a stack
| Zshift of int
| Zupdate of 'a
and 'a stack = 'a stack_member list
let empty_stack = []
let append_stack_list l s =
match (l,s) with
| ([],s) -> s
| (l1, Zapp l :: s) -> Zapp (l1@l) :: s
| (l1, s) -> Zapp l1 :: s
let append_stack v s = append_stack_list (Array.to_list v) s
let rec stack_args_size = function
| Zapp l::s -> List.length l + stack_args_size s
| Zshift(_)::s -> stack_args_size s
| Zupdate(_)::s -> stack_args_size s
| _ -> 0
When used as an argument stack ( only can appear )
let rec decomp_stack = function
| Zapp[v]::s -> Some (v, s)
| Zapp(v::l)::s -> Some (v, (Zapp l :: s))
| Zapp [] :: s -> decomp_stack s
| _ -> None
let array_of_stack s =
let rec stackrec = function
| [] -> []
| Zapp args :: s -> args :: (stackrec s)
| _ -> assert false
in Array.of_list (List.concat (stackrec s))
let rec list_of_stack = function
| [] -> []
| Zapp args :: s -> args @ (list_of_stack s)
| _ -> assert false
let rec app_stack = function
| f, [] -> f
| f, (Zapp [] :: s) -> app_stack (f, s)
| f, (Zapp args :: s) ->
app_stack (applist (f, args), s)
| _ -> assert false
let rec stack_assign s p c = match s with
| Zapp args :: s ->
let q = List.length args in
if p >= q then
Zapp args :: stack_assign s (p-q) c
else
(match list_chop p args with
(bef, _::aft) -> Zapp (bef@c::aft) :: s
| _ -> assert false)
| _ -> s
let rec stack_tail p s =
if p = 0 then s else
match s with
| Zapp args :: s ->
let q = List.length args in
if p >= q then stack_tail (p-q) s
else Zapp (list_skipn p args) :: s
| _ -> failwith "stack_tail"
let rec stack_nth s p = match s with
| Zapp args :: s ->
let q = List.length args in
if p >= q then stack_nth s (p-q)
else List.nth args p
| _ -> raise Not_found
type state = constr * constr stack
type contextual_reduction_function = env -> evar_map -> constr -> constr
type reduction_function = contextual_reduction_function
type local_reduction_function = evar_map -> constr -> constr
type contextual_stack_reduction_function =
env -> evar_map -> constr -> constr * constr list
type stack_reduction_function = contextual_stack_reduction_function
type local_stack_reduction_function =
evar_map -> constr -> constr * constr list
type contextual_state_reduction_function =
env -> evar_map -> state -> state
type state_reduction_function = contextual_state_reduction_function
type local_state_reduction_function = evar_map -> state -> state
let safe_evar_value sigma ev =
try Some (Evd.existential_value sigma ev)
with NotInstantiatedEvar | Not_found -> None
let rec whd_app_state sigma (x, stack as s) =
match kind_of_term x with
| App (f,cl) -> whd_app_state sigma (f, append_stack cl stack)
| Cast (c,_,_) -> whd_app_state sigma (c, stack)
| Evar ev ->
(match safe_evar_value sigma ev with
Some c -> whd_app_state sigma (c,stack)
| _ -> s)
| _ -> s
let safe_meta_value sigma ev =
try Some (Evd.meta_value sigma ev)
with Not_found -> None
let appterm_of_stack (f,s) = (f,list_of_stack s)
let whd_stack sigma x =
appterm_of_stack (whd_app_state sigma (x, empty_stack))
let whd_castapp_stack = whd_stack
let strong whdfun env sigma t =
let rec strongrec env t =
map_constr_with_full_binders push_rel strongrec env (whdfun env sigma t) in
strongrec env t
let local_strong whdfun sigma =
let rec strongrec t = map_constr strongrec (whdfun sigma t) in
strongrec
let rec strong_prodspine redfun sigma c =
let x = redfun sigma c in
match kind_of_term x with
| Prod (na,a,b) -> mkProd (na,a,strong_prodspine redfun sigma b)
| _ -> x
* * Reduction using * *
module type RedFlagsSig = sig
type flags
type flag
val fbeta : flag
val fdelta : flag
val feta : flag
val fiota : flag
val fzeta : flag
val mkflags : flag list -> flags
val red_beta : flags -> bool
val red_delta : flags -> bool
val red_eta : flags -> bool
val red_iota : flags -> bool
val red_zeta : flags -> bool
end
module RedFlags = (struct
type flag = int
type flags = int
let fbeta = 1
let fdelta = 2
let feta = 8
let fiota = 16
let fzeta = 32
let mkflags = List.fold_left (lor) 0
let red_beta f = f land fbeta <> 0
let red_delta f = f land fdelta <> 0
let red_eta f = f land feta <> 0
let red_iota f = f land fiota <> 0
let red_zeta f = f land fzeta <> 0
end : RedFlagsSig)
open RedFlags
let beta = mkflags [fbeta]
let eta = mkflags [feta]
let zeta = mkflags [fzeta]
let betaiota = mkflags [fiota; fbeta]
let betaiotazeta = mkflags [fiota; fbeta;fzeta]
let delta = mkflags [fdelta]
let betadelta = mkflags [fbeta;fdelta;fzeta]
let betadeltaeta = mkflags [fbeta;fdelta;fzeta;feta]
let betadeltaiota = mkflags [fbeta;fdelta;fzeta;fiota]
let betadeltaiota_nolet = mkflags [fbeta;fdelta;fiota]
let betadeltaiotaeta = mkflags [fbeta;fdelta;fzeta;fiota;feta]
let betaetalet = mkflags [fbeta;feta;fzeta]
let betalet = mkflags [fbeta;fzeta]
let rec stacklam recfun env t stack =
match (decomp_stack stack,kind_of_term t) with
| Some (h,stacktl), Lambda (_,_,c) -> stacklam recfun (h::env) c stacktl
| _ -> recfun (substl env t, stack)
let beta_applist (c,l) =
stacklam app_stack [] c (append_stack_list l empty_stack)
Iota reduction tools
type 'a miota_args = {
let reducible_mind_case c = match kind_of_term c with
| Construct _ | CoFix _ -> true
| _ -> false
let contract_cofix (bodynum,(types,names,bodies as typedbodies)) =
let nbodies = Array.length bodies in
let make_Fi j = mkCoFix (nbodies-j-1,typedbodies) in
substl (list_tabulate make_Fi nbodies) bodies.(bodynum)
let reduce_mind_case mia =
match kind_of_term mia.mconstr with
| Construct (ind_sp,i) ->
let = ( fst mia.mci).(i-1 ) in
let real_cargs = list_skipn mia.mci.ci_npar mia.mcargs in
applist (mia.mlf.(i-1),real_cargs)
| CoFix cofix ->
let cofix_def = contract_cofix cofix in
mkCase (mia.mci, mia.mP, applist(cofix_def,mia.mcargs), mia.mlf)
| _ -> assert false
let contract_fix ((recindices,bodynum),(types,names,bodies as typedbodies)) =
let nbodies = Array.length recindices in
let make_Fi j = mkFix ((recindices,nbodies-j-1),typedbodies) in
substl (list_tabulate make_Fi nbodies) bodies.(bodynum)
let fix_recarg ((recindices,bodynum),_) stack =
assert (0 <= bodynum & bodynum < Array.length recindices);
let recargnum = Array.get recindices bodynum in
try
Some (recargnum, stack_nth stack recargnum)
with Not_found ->
None
type fix_reduction_result = NotReducible | Reduced of state
let reduce_fix whdfun sigma fix stack =
match fix_recarg fix stack with
| None -> NotReducible
| Some (recargnum,recarg) ->
let (recarg'hd,_ as recarg') = whdfun sigma (recarg, empty_stack) in
let stack' = stack_assign stack recargnum (app_stack recarg') in
(match kind_of_term recarg'hd with
| Construct _ -> Reduced (contract_fix fix, stack')
| _ -> NotReducible)
Generic reduction function
Y avait un commentaire pour :
NB : Cette fonction
` ` let ( c , cargs ) = whfun ( recarg , empty_stack ) ''
-------------------
NB : Cette fonction alloue peu c'est l'appel
``let (c,cargs) = whfun (recarg, empty_stack)''
-------------------
qui coute cher *)
let rec whd_state_gen flags ts env sigma =
let rec whrec (x, stack as s) =
match kind_of_term x with
| Rel n when red_delta flags ->
(match lookup_rel n env with
| (_,Some body,_) -> whrec (lift n body, stack)
| _ -> s)
| Var id when red_delta flags ->
(match lookup_named id env with
| (_,Some body,_) -> whrec (body, stack)
| _ -> s)
| Evar ev ->
(match safe_evar_value sigma ev with
| Some body -> whrec (body, stack)
| None -> s)
| Meta ev ->
(match safe_meta_value sigma ev with
| Some body -> whrec (body, stack)
| None -> s)
| Const const when is_transparent_constant ts const ->
(match constant_opt_value env const with
| Some body -> whrec (body, stack)
| None -> s)
| LetIn (_,b,_,c) when red_zeta flags -> stacklam whrec [b] c stack
| Cast (c,_,_) -> whrec (c, stack)
| App (f,cl) -> whrec (f, append_stack cl stack)
| Lambda (na,t,c) ->
(match decomp_stack stack with
| Some (a,m) when red_beta flags -> stacklam whrec [a] c m
| None when red_eta flags ->
let env' = push_rel (na,None,t) env in
let whrec' = whd_state_gen flags ts env' sigma in
(match kind_of_term (app_stack (whrec' (c, empty_stack))) with
| App (f,cl) ->
let napp = Array.length cl in
if napp > 0 then
let x', l' = whrec' (array_last cl, empty_stack) in
match kind_of_term x', decomp_stack l' with
| Rel 1, None ->
let lc = Array.sub cl 0 (napp-1) in
let u = if napp=1 then f else appvect (f,lc) in
if noccurn 1 u then (pop u,empty_stack) else s
| _ -> s
else s
| _ -> s)
| _ -> s)
| Case (ci,p,d,lf) when red_iota flags ->
let (c,cargs) = whrec (d, empty_stack) in
if reducible_mind_case c then
whrec (reduce_mind_case
{mP=p; mconstr=c; mcargs=list_of_stack cargs;
mci=ci; mlf=lf}, stack)
else
(mkCase (ci, p, app_stack (c,cargs), lf), stack)
| Fix fix when red_iota flags ->
(match reduce_fix (fun _ -> whrec) sigma fix stack with
| Reduced s' -> whrec s'
| NotReducible -> s)
| x -> s
in
whrec
let local_whd_state_gen flags sigma =
let rec whrec (x, stack as s) =
match kind_of_term x with
| LetIn (_,b,_,c) when red_zeta flags -> stacklam whrec [b] c stack
| Cast (c,_,_) -> whrec (c, stack)
| App (f,cl) -> whrec (f, append_stack cl stack)
| Lambda (_,_,c) ->
(match decomp_stack stack with
| Some (a,m) when red_beta flags -> stacklam whrec [a] c m
| None when red_eta flags ->
(match kind_of_term (app_stack (whrec (c, empty_stack))) with
| App (f,cl) ->
let napp = Array.length cl in
if napp > 0 then
let x', l' = whrec (array_last cl, empty_stack) in
match kind_of_term x', decomp_stack l' with
| Rel 1, None ->
let lc = Array.sub cl 0 (napp-1) in
let u = if napp=1 then f else appvect (f,lc) in
if noccurn 1 u then (pop u,empty_stack) else s
| _ -> s
else s
| _ -> s)
| _ -> s)
| Case (ci,p,d,lf) when red_iota flags ->
let (c,cargs) = whrec (d, empty_stack) in
if reducible_mind_case c then
whrec (reduce_mind_case
{mP=p; mconstr=c; mcargs=list_of_stack cargs;
mci=ci; mlf=lf}, stack)
else
(mkCase (ci, p, app_stack (c,cargs), lf), stack)
| Fix fix when red_iota flags ->
(match reduce_fix (fun _ ->whrec) sigma fix stack with
| Reduced s' -> whrec s'
| NotReducible -> s)
| Evar ev ->
(match safe_evar_value sigma ev with
Some c -> whrec (c,stack)
| None -> s)
| Meta ev ->
(match safe_meta_value sigma ev with
Some c -> whrec (c,stack)
| None -> s)
| x -> s
in
whrec
let stack_red_of_state_red f sigma x =
appterm_of_stack (f sigma (x, empty_stack))
let red_of_state_red f sigma x =
app_stack (f sigma (x,empty_stack))
1 . Beta Reduction Functions
let whd_beta_state = local_whd_state_gen beta
let whd_beta_stack = stack_red_of_state_red whd_beta_state
let whd_beta = red_of_state_red whd_beta_state
Nouveau !
let whd_betaetalet_state = local_whd_state_gen betaetalet
let whd_betaetalet_stack = stack_red_of_state_red whd_betaetalet_state
let whd_betaetalet = red_of_state_red whd_betaetalet_state
let whd_betalet_state = local_whd_state_gen betalet
let whd_betalet_stack = stack_red_of_state_red whd_betalet_state
let whd_betalet = red_of_state_red whd_betalet_state
2 . Delta Reduction Functions
let whd_delta_state e = whd_state_gen delta full_transparent_state e
let whd_delta_stack env = stack_red_of_state_red (whd_delta_state env)
let whd_delta env = red_of_state_red (whd_delta_state env)
let whd_betadelta_state e = whd_state_gen betadelta full_transparent_state e
let whd_betadelta_stack env =
stack_red_of_state_red (whd_betadelta_state env)
let whd_betadelta env =
red_of_state_red (whd_betadelta_state env)
let whd_betadeltaeta_state e =
whd_state_gen betadeltaeta full_transparent_state e
let whd_betadeltaeta_stack env =
stack_red_of_state_red (whd_betadeltaeta_state env)
let whd_betadeltaeta env =
red_of_state_red (whd_betadeltaeta_state env)
3 . Iota reduction Functions
let whd_betaiota_state = local_whd_state_gen betaiota
let whd_betaiota_stack = stack_red_of_state_red whd_betaiota_state
let whd_betaiota = red_of_state_red whd_betaiota_state
let whd_betaiotazeta_state = local_whd_state_gen betaiotazeta
let whd_betaiotazeta_stack = stack_red_of_state_red whd_betaiotazeta_state
let whd_betaiotazeta = red_of_state_red whd_betaiotazeta_state
let whd_betadeltaiota_state env =
whd_state_gen betadeltaiota full_transparent_state env
let whd_betadeltaiota_stack env =
stack_red_of_state_red (whd_betadeltaiota_state env)
let whd_betadeltaiota env =
red_of_state_red (whd_betadeltaiota_state env)
let whd_betadeltaiota_state_using ts env =
whd_state_gen betadeltaiota ts env
let whd_betadeltaiota_stack_using ts env =
stack_red_of_state_red (whd_betadeltaiota_state_using ts env)
let whd_betadeltaiota_using ts env =
red_of_state_red (whd_betadeltaiota_state_using ts env)
let whd_betadeltaiotaeta_state env =
whd_state_gen betadeltaiotaeta full_transparent_state env
let whd_betadeltaiotaeta_stack env =
stack_red_of_state_red (whd_betadeltaiotaeta_state env)
let whd_betadeltaiotaeta env =
red_of_state_red (whd_betadeltaiotaeta_state env)
let whd_betadeltaiota_nolet_state env =
whd_state_gen betadeltaiota_nolet full_transparent_state env
let whd_betadeltaiota_nolet_stack env =
stack_red_of_state_red (whd_betadeltaiota_nolet_state env)
let whd_betadeltaiota_nolet env =
red_of_state_red (whd_betadeltaiota_nolet_state env)
4 . Eta reduction Functions
let whd_eta c = app_stack (local_whd_state_gen eta Evd.empty (c,empty_stack))
5 . Zeta Reduction Functions
let whd_zeta c = app_stack (local_whd_state_gen zeta Evd.empty (c,empty_stack))
let rec whd_evar sigma c =
match kind_of_term c with
| Evar ev ->
(match safe_evar_value sigma ev with
Some c -> whd_evar sigma c
| None -> c)
| Sort s -> whd_sort_variable sigma c
| _ -> c
let nf_evar =
local_strong whd_evar
Note by HH [ oct 08 ] : why would it be the job of clos_norm_flags to add
a [ nf_evar ] here
a [nf_evar] here *)
let clos_norm_flags flgs env sigma t =
try
norm_val
(create_clos_infos ~evars:(safe_evar_value sigma) flgs env)
(inject t)
with Anomaly _ -> error "Tried to normalized ill-typed term"
let nf_beta = clos_norm_flags Closure.beta empty_env
let nf_betaiota = clos_norm_flags Closure.betaiota empty_env
let nf_betadeltaiota env sigma =
clos_norm_flags Closure.betadeltaiota env sigma
Attention reduire un beta - redexe avec un argument pas
une variable , peut changer enormement le temps de conversion lors
du type checking :
( fun x = > x + x ) M
une variable, peut changer enormement le temps de conversion lors
du type checking :
(fun x => x + x) M
*)
let rec whd_betaiota_preserving_vm_cast env sigma t =
let rec stacklam_var subst t stack =
match (decomp_stack stack,kind_of_term t) with
| Some (h,stacktl), Lambda (_,_,c) ->
begin match kind_of_term h with
| Rel i when not (evaluable_rel i env) ->
stacklam_var (h::subst) c stacktl
| Var id when not (evaluable_named id env)->
stacklam_var (h::subst) c stacktl
| _ -> whrec (substl subst t, stack)
end
| _ -> whrec (substl subst t, stack)
and whrec (x, stack as s) =
match kind_of_term x with
| Evar ev ->
(match safe_evar_value sigma ev with
| Some body -> whrec (body, stack)
| None -> s)
| Cast (c,VMcast,t) ->
let c = app_stack (whrec (c,empty_stack)) in
let t = app_stack (whrec (t,empty_stack)) in
(mkCast(c,VMcast,t),stack)
| Cast (c,DEFAULTcast,_) ->
whrec (c, stack)
| App (f,cl) -> whrec (f, append_stack cl stack)
| Lambda (na,t,c) ->
(match decomp_stack stack with
| Some (a,m) -> stacklam_var [a] c m
| _ -> s)
| Case (ci,p,d,lf) ->
let (c,cargs) = whrec (d, empty_stack) in
if reducible_mind_case c then
whrec (reduce_mind_case
{mP=p; mconstr=c; mcargs=list_of_stack cargs;
mci=ci; mlf=lf}, stack)
else
(mkCase (ci, p, app_stack (c,cargs), lf), stack)
| x -> s
in
app_stack (whrec (t,empty_stack))
let nf_betaiota_preserving_vm_cast =
strong whd_betaiota_preserving_vm_cast
let is_transparent k =
Conv_oracle.get_strategy k <> Conv_oracle.Opaque
type conversion_test = constraints -> constraints
let pb_is_equal pb = pb = CONV
let pb_equal = function
| CUMUL -> CONV
| CONV -> CONV
let sort_cmp = sort_cmp
let test_conversion (f: ?l2r:bool-> ?evars:'a->'b) env sigma x y =
try let _ =
f ~evars:(safe_evar_value sigma) env x y in true
with NotConvertible -> false
| Anomaly _ -> error "Conversion test raised an anomaly"
let is_conv env sigma = test_conversion Reduction.conv env sigma
let is_conv_leq env sigma = test_conversion Reduction.conv_leq env sigma
let is_fconv = function | CONV -> is_conv | CUMUL -> is_conv_leq
let test_trans_conversion (f: ?l2r:bool-> ?evars:'a->'b) reds env sigma x y =
try let _ = f ~evars:(safe_evar_value sigma) reds env x y in true
with NotConvertible -> false
| Anomaly _ -> error "Conversion test raised an anomaly"
let is_trans_conv reds env sigma = test_trans_conversion Reduction.trans_conv reds env sigma
let is_trans_conv_leq reds env sigma = test_trans_conversion Reduction.trans_conv_leq reds env sigma
let is_trans_fconv = function | CONV -> is_trans_conv | CUMUL -> is_trans_conv_leq
let whd_meta sigma c = match kind_of_term c with
| Meta p -> (try meta_value sigma p with Not_found -> c)
| _ -> c
let plain_instance s c =
let rec irec n u = match kind_of_term u with
| Meta p -> (try lift n (List.assoc p s) with Not_found -> u)
| App (f,l) when isCast f ->
let (f,_,t) = destCast f in
let l' = Array.map (irec n) l in
(match kind_of_term f with
| Meta p ->
(try let g = List.assoc p s in
match kind_of_term g with
| App _ ->
let h = id_of_string "H" in
mkLetIn (Name h,g,t,mkApp(mkRel 1,Array.map (lift 1) l'))
| _ -> mkApp (g,l')
with Not_found -> mkApp (f,l'))
| _ -> mkApp (irec n f,l'))
| Cast (m,_,_) when isMeta m ->
(try lift n (List.assoc (destMeta m) s) with Not_found -> u)
| _ ->
map_constr_with_binders succ irec n u
in
if s = [] then c else irec 0 c
[ instance ] is used for [ res_pf ] ; the call to [ local_strong whd_betaiota ]
has ( unfortunately ) different subtle side effects :
- * * Order of subgoals * *
If the lemma is a case analysis with parameters , it will move the
parameters as first subgoals ( e.g. " case H " applied on
" H : D->A/\B|-C " will present the subgoal |-D first while w/o
betaiota the subgoal |-D would have come last ) .
- * * Betaiota - contraction in statement * *
If the lemma has a parameter which is a function and this
function is applied in the lemma , then the _ strong _ will
contract the application of the function to its argument ( e.g.
" apply ( H ( fun x = > x ) ) " in " H : forall f , f 0 = 0 |- 0=0 " will
result in applying the lemma 0=0 in which " ( fun x = > x ) 0 " has
been contracted ) . A goal to rewrite may then fail or succeed
differently .
- * * Naming of hypotheses * *
If a lemma is a function of the form " fun H:(forall a : A , P a )
= > .. F H .. " where the expected type of H is " forall b : A , P b " ,
then , without reduction , the application of the lemma will
generate a subgoal " forall a : A , P a " ( and intro will use name
" a " ) , while with reduction , it will generate a subgoal " forall
b : A , P b " ( and intro will use name " b " ) .
- * * First - order pattern - matching * *
If a lemma has the type " ( fun x = > p ) t " then rewriting t may fail
if the type of the lemma is first beta - reduced ( this typically happens
when rewriting a single variable and the type of the lemma is obtained
by meta_instance ( with empty map ) which itself calls instance with this
empty map ) .
has (unfortunately) different subtle side effects:
- ** Order of subgoals **
If the lemma is a case analysis with parameters, it will move the
parameters as first subgoals (e.g. "case H" applied on
"H:D->A/\B|-C" will present the subgoal |-D first while w/o
betaiota the subgoal |-D would have come last).
- ** Betaiota-contraction in statement **
If the lemma has a parameter which is a function and this
function is applied in the lemma, then the _strong_ betaiota will
contract the application of the function to its argument (e.g.
"apply (H (fun x => x))" in "H:forall f, f 0 = 0 |- 0=0" will
result in applying the lemma 0=0 in which "(fun x => x) 0" has
been contracted). A goal to rewrite may then fail or succeed
differently.
- ** Naming of hypotheses **
If a lemma is a function of the form "fun H:(forall a:A, P a)
=> .. F H .." where the expected type of H is "forall b:A, P b",
then, without reduction, the application of the lemma will
generate a subgoal "forall a:A, P a" (and intro will use name
"a"), while with reduction, it will generate a subgoal "forall
b:A, P b" (and intro will use name "b").
- ** First-order pattern-matching **
If a lemma has the type "(fun x => p) t" then rewriting t may fail
if the type of the lemma is first beta-reduced (this typically happens
when rewriting a single variable and the type of the lemma is obtained
by meta_instance (with empty map) which itself calls instance with this
empty map).
*)
let instance sigma s c =
local_strong whd_betaiota sigma (plain_instance s c)
pseudo - reduction rule :
* [ hnf_prod_app env s ( Prod(_,B ) ) N -- > B[N ]
* with an HNF on the first argument to produce a product .
* if this does not work , then we use the string S as part of our
* error message .
* [hnf_prod_app env s (Prod(_,B)) N --> B[N]
* with an HNF on the first argument to produce a product.
* if this does not work, then we use the string S as part of our
* error message. *)
let hnf_prod_app env sigma t n =
match kind_of_term (whd_betadeltaiota env sigma t) with
| Prod (_,_,b) -> subst1 n b
| _ -> anomaly "hnf_prod_app: Need a product"
let hnf_prod_appvect env sigma t nl =
Array.fold_left (hnf_prod_app env sigma) t nl
let hnf_prod_applist env sigma t nl =
List.fold_left (hnf_prod_app env sigma) t nl
let hnf_lam_app env sigma t n =
match kind_of_term (whd_betadeltaiota env sigma t) with
| Lambda (_,_,b) -> subst1 n b
| _ -> anomaly "hnf_lam_app: Need an abstraction"
let hnf_lam_appvect env sigma t nl =
Array.fold_left (hnf_lam_app env sigma) t nl
let hnf_lam_applist env sigma t nl =
List.fold_left (hnf_lam_app env sigma) t nl
let splay_prod env sigma =
let rec decrec env m c =
let t = whd_betadeltaiota env sigma c in
match kind_of_term t with
| Prod (n,a,c0) ->
decrec (push_rel (n,None,a) env)
((n,a)::m) c0
| _ -> m,t
in
decrec env []
let splay_lam env sigma =
let rec decrec env m c =
let t = whd_betadeltaiota env sigma c in
match kind_of_term t with
| Lambda (n,a,c0) ->
decrec (push_rel (n,None,a) env)
((n,a)::m) c0
| _ -> m,t
in
decrec env []
let splay_prod_assum env sigma =
let rec prodec_rec env l c =
let t = whd_betadeltaiota_nolet env sigma c in
match kind_of_term t with
| Prod (x,t,c) ->
prodec_rec (push_rel (x,None,t) env)
(add_rel_decl (x, None, t) l) c
| LetIn (x,b,t,c) ->
prodec_rec (push_rel (x, Some b, t) env)
(add_rel_decl (x, Some b, t) l) c
| Cast (c,_,_) -> prodec_rec env l c
| _ -> l,t
in
prodec_rec env empty_rel_context
let splay_arity env sigma c =
let l, c = splay_prod env sigma c in
match kind_of_term c with
| Sort s -> l,s
| _ -> invalid_arg "splay_arity"
let sort_of_arity env sigma c = snd (splay_arity env sigma c)
let splay_prod_n env sigma n =
let rec decrec env m ln c = if m = 0 then (ln,c) else
match kind_of_term (whd_betadeltaiota env sigma c) with
| Prod (n,a,c0) ->
decrec (push_rel (n,None,a) env)
(m-1) (add_rel_decl (n,None,a) ln) c0
| _ -> invalid_arg "splay_prod_n"
in
decrec env n empty_rel_context
let splay_lam_n env sigma n =
let rec decrec env m ln c = if m = 0 then (ln,c) else
match kind_of_term (whd_betadeltaiota env sigma c) with
| Lambda (n,a,c0) ->
decrec (push_rel (n,None,a) env)
(m-1) (add_rel_decl (n,None,a) ln) c0
| _ -> invalid_arg "splay_lam_n"
in
decrec env n empty_rel_context
exception NotASort
let decomp_sort env sigma t =
match kind_of_term (whd_betadeltaiota env sigma t) with
| Sort s -> s
| _ -> raise NotASort
let is_sort env sigma arity =
try let _ = decomp_sort env sigma arity in true
with NotASort -> false
reduction to head - normal - form allowing delta / zeta only in argument
of case / fix ( heuristic used by evar_conv )
of case/fix (heuristic used by evar_conv) *)
let whd_betaiota_deltazeta_for_iota_state ts env sigma s =
let rec whrec s =
let (t, stack as s) = whd_betaiota_state sigma s in
match kind_of_term t with
| Case (ci,p,d,lf) ->
let (cr,crargs) = whd_betadeltaiota_stack_using ts env sigma d in
let rslt = mkCase (ci, p, applist (cr,crargs), lf) in
if reducible_mind_case cr then
whrec (rslt, stack)
else
s
| Fix fix ->
(match
reduce_fix (whd_betadeltaiota_state_using ts env) sigma fix stack
with
| Reduced s -> whrec s
| NotReducible -> s)
| _ -> s
in whrec s
A reduction function like whd_betaiota but which keeps casts
* and does not reduce redexes containing existential variables .
* Used in Correctness .
* Added by JCF , 29/1/98 .
* and does not reduce redexes containing existential variables.
* Used in Correctness.
* Added by JCF, 29/1/98. *)
let whd_programs_stack env sigma =
let rec whrec (x, stack as s) =
match kind_of_term x with
| App (f,cl) ->
let n = Array.length cl - 1 in
let c = cl.(n) in
if occur_existential c then
s
else
whrec (mkApp (f, Array.sub cl 0 n), append_stack [|c|] stack)
| LetIn (_,b,_,c) ->
if occur_existential b then
s
else
stacklam whrec [b] c stack
| Lambda (_,_,c) ->
(match decomp_stack stack with
| None -> s
| Some (a,m) -> stacklam whrec [a] c m)
| Case (ci,p,d,lf) ->
if occur_existential d then
s
else
let (c,cargs) = whrec (d, empty_stack) in
if reducible_mind_case c then
whrec (reduce_mind_case
{mP=p; mconstr=c; mcargs=list_of_stack cargs;
mci=ci; mlf=lf}, stack)
else
(mkCase (ci, p, app_stack(c,cargs), lf), stack)
| Fix fix ->
(match reduce_fix (fun _ ->whrec) sigma fix stack with
| Reduced s' -> whrec s'
| NotReducible -> s)
| _ -> s
in
whrec
let whd_programs env sigma x =
app_stack (whd_programs_stack env sigma (x, empty_stack))
exception IsType
let find_conclusion env sigma =
let rec decrec env c =
let t = whd_betadeltaiota env sigma c in
match kind_of_term t with
| Prod (x,t,c0) -> decrec (push_rel (x,None,t) env) c0
| Lambda (x,t,c0) -> decrec (push_rel (x,None,t) env) c0
| t -> t
in
decrec env
let is_arity env sigma c =
match find_conclusion env sigma c with
| Sort _ -> true
| _ -> false
Metas
let meta_value evd mv =
let rec valrec mv =
match meta_opt_fvalue evd mv with
| Some (b,_) ->
instance evd
(List.map (fun mv' -> (mv',valrec mv')) (Metaset.elements b.freemetas))
b.rebus
| None -> mkMeta mv
in
valrec mv
let meta_instance sigma b =
let c_sigma =
List.map
(fun mv -> (mv,meta_value sigma mv)) (Metaset.elements b.freemetas)
in
if c_sigma = [] then b.rebus else instance sigma c_sigma b.rebus
let nf_meta sigma c = meta_instance sigma (mk_freelisted c)
let meta_reducible_instance evd b =
let fm = Metaset.elements b.freemetas in
let metas = List.fold_left (fun l mv ->
match (try meta_opt_fvalue evd mv with Not_found -> None) with
| Some (g,(_,s)) -> (mv,(g.rebus,s))::l
| None -> l) [] fm in
let rec irec u =
let u = whd_betaiota Evd.empty u in
match kind_of_term u with
| Case (ci,p,c,bl) when isMeta c or isCast c & isMeta (pi1 (destCast c)) ->
let m =
try destMeta c
with e when Errors.noncritical e -> destMeta (pi1 (destCast c))
in
(match
try
let g,s = List.assoc m metas in
if isConstruct g or s <> CoerceToType then Some g else None
with Not_found -> None
with
| Some g -> irec (mkCase (ci,p,g,bl))
| None -> mkCase (ci,irec p,c,Array.map irec bl))
| App (f,l) when isMeta f or isCast f & isMeta (pi1 (destCast f)) ->
let m =
try destMeta f
with e when Errors.noncritical e -> destMeta (pi1 (destCast f))
in
(match
try
let g,s = List.assoc m metas in
if isLambda g or s <> CoerceToType then Some g else None
with Not_found -> None
with
| Some g -> irec (mkApp (g,l))
| None -> mkApp (f,Array.map irec l))
| Meta m ->
(try let g,s = List.assoc m metas in if s<>CoerceToType then irec g else u
with Not_found -> u)
| _ -> map_constr irec u
in
let head_unfold_under_prod ts env _ c =
let unfold cst =
if Cpred.mem cst (snd ts) then
match constant_opt_value env cst with
| Some c -> c
| None -> mkConst cst
else mkConst cst in
let rec aux c =
match kind_of_term c with
| Prod (n,t,c) -> mkProd (n,aux t, aux c)
| _ ->
let (h,l) = decompose_app c in
match kind_of_term h with
| Const cst -> beta_applist (unfold cst,l)
| _ -> c in
aux c
|
284e473a9795978b3830553ab3168fa719127f7af125df7395dc014cb1b178d4 | stephenpascoe/hs-arrow | TableBatchReader.hs |
|
Copyright : , and
License : LGPL-2.1
Maintainer : ( )
/No description available in the introspection data./
Copyright : Will Thompson, Iñaki García Etxebarria and Jonas Platte
License : LGPL-2.1
Maintainer : Iñaki García Etxebarria ()
/No description available in the introspection data./
-}
#define ENABLE_OVERLOADING (MIN_VERSION_haskell_gi_overloading(1,0,0) \
&& !defined(__HADDOCK_VERSION__))
module GI.Arrow.Objects.TableBatchReader
(
-- * Exported types
TableBatchReader(..) ,
IsTableBatchReader ,
toTableBatchReader ,
noTableBatchReader ,
-- * Methods
* * new # method : new #
tableBatchReaderNew ,
) where
import Data.GI.Base.ShortPrelude
import qualified Data.GI.Base.ShortPrelude as SP
import qualified Data.GI.Base.Overloading as O
import qualified Prelude as P
import qualified Data.GI.Base.Attributes as GI.Attributes
import qualified Data.GI.Base.ManagedPtr as B.ManagedPtr
import qualified Data.GI.Base.GError as B.GError
import qualified Data.GI.Base.GVariant as B.GVariant
import qualified Data.GI.Base.GValue as B.GValue
import qualified Data.GI.Base.GParamSpec as B.GParamSpec
import qualified Data.GI.Base.CallStack as B.CallStack
import qualified Data.Text as T
import qualified Data.ByteString.Char8 as B
import qualified Data.Map as Map
import qualified Foreign.Ptr as FP
import {-# SOURCE #-} qualified GI.Arrow.Objects.RecordBatchReader as Arrow.RecordBatchReader
import {-# SOURCE #-} qualified GI.Arrow.Objects.Table as Arrow.Table
import qualified GI.GObject.Objects.Object as GObject.Object
-- | Memory-managed wrapper type.
newtype TableBatchReader = TableBatchReader (ManagedPtr TableBatchReader)
foreign import ccall "garrow_table_batch_reader_get_type"
c_garrow_table_batch_reader_get_type :: IO GType
instance GObject TableBatchReader where
gobjectType _ = c_garrow_table_batch_reader_get_type
-- | Type class for types which can be safely cast to `TableBatchReader`, for instance with `toTableBatchReader`.
class GObject o => IsTableBatchReader o
#if MIN_VERSION_base(4,9,0)
instance {-# OVERLAPPABLE #-} (GObject a, O.UnknownAncestorError TableBatchReader a) =>
IsTableBatchReader a
#endif
instance IsTableBatchReader TableBatchReader
instance Arrow.RecordBatchReader.IsRecordBatchReader TableBatchReader
instance GObject.Object.IsObject TableBatchReader
| Cast to ` TableBatchReader ` , for types for which this is known to be safe . For general casts , use ` Data . . ManagedPtr.castTo ` .
toTableBatchReader :: (MonadIO m, IsTableBatchReader o) => o -> m TableBatchReader
toTableBatchReader = liftIO . unsafeCastTo TableBatchReader
-- | A convenience alias for `Nothing` :: `Maybe` `TableBatchReader`.
noTableBatchReader :: Maybe TableBatchReader
noTableBatchReader = Nothing
#if ENABLE_OVERLOADING
type family ResolveTableBatchReaderMethod (t :: Symbol) (o :: *) :: * where
ResolveTableBatchReaderMethod "bindProperty" o = GObject.Object.ObjectBindPropertyMethodInfo
ResolveTableBatchReaderMethod "bindPropertyFull" o = GObject.Object.ObjectBindPropertyFullMethodInfo
ResolveTableBatchReaderMethod "forceFloating" o = GObject.Object.ObjectForceFloatingMethodInfo
ResolveTableBatchReaderMethod "freezeNotify" o = GObject.Object.ObjectFreezeNotifyMethodInfo
ResolveTableBatchReaderMethod "getv" o = GObject.Object.ObjectGetvMethodInfo
ResolveTableBatchReaderMethod "isFloating" o = GObject.Object.ObjectIsFloatingMethodInfo
ResolveTableBatchReaderMethod "notify" o = GObject.Object.ObjectNotifyMethodInfo
ResolveTableBatchReaderMethod "notifyByPspec" o = GObject.Object.ObjectNotifyByPspecMethodInfo
ResolveTableBatchReaderMethod "readNext" o = Arrow.RecordBatchReader.RecordBatchReaderReadNextMethodInfo
ResolveTableBatchReaderMethod "readNextRecordBatch" o = Arrow.RecordBatchReader.RecordBatchReaderReadNextRecordBatchMethodInfo
ResolveTableBatchReaderMethod "ref" o = GObject.Object.ObjectRefMethodInfo
ResolveTableBatchReaderMethod "refSink" o = GObject.Object.ObjectRefSinkMethodInfo
ResolveTableBatchReaderMethod "runDispose" o = GObject.Object.ObjectRunDisposeMethodInfo
ResolveTableBatchReaderMethod "stealData" o = GObject.Object.ObjectStealDataMethodInfo
ResolveTableBatchReaderMethod "stealQdata" o = GObject.Object.ObjectStealQdataMethodInfo
ResolveTableBatchReaderMethod "thawNotify" o = GObject.Object.ObjectThawNotifyMethodInfo
ResolveTableBatchReaderMethod "unref" o = GObject.Object.ObjectUnrefMethodInfo
ResolveTableBatchReaderMethod "watchClosure" o = GObject.Object.ObjectWatchClosureMethodInfo
ResolveTableBatchReaderMethod "getData" o = GObject.Object.ObjectGetDataMethodInfo
ResolveTableBatchReaderMethod "getNextRecordBatch" o = Arrow.RecordBatchReader.RecordBatchReaderGetNextRecordBatchMethodInfo
ResolveTableBatchReaderMethod "getProperty" o = GObject.Object.ObjectGetPropertyMethodInfo
ResolveTableBatchReaderMethod "getQdata" o = GObject.Object.ObjectGetQdataMethodInfo
ResolveTableBatchReaderMethod "getSchema" o = Arrow.RecordBatchReader.RecordBatchReaderGetSchemaMethodInfo
ResolveTableBatchReaderMethod "setData" o = GObject.Object.ObjectSetDataMethodInfo
ResolveTableBatchReaderMethod "setProperty" o = GObject.Object.ObjectSetPropertyMethodInfo
ResolveTableBatchReaderMethod l o = O.MethodResolutionFailed l o
instance (info ~ ResolveTableBatchReaderMethod t TableBatchReader, O.MethodInfo info TableBatchReader p) => O.IsLabelProxy t (TableBatchReader -> p) where
fromLabelProxy _ = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#if MIN_VERSION_base(4,9,0)
instance (info ~ ResolveTableBatchReaderMethod t TableBatchReader, O.MethodInfo info TableBatchReader p) => O.IsLabel t (TableBatchReader -> p) where
#if MIN_VERSION_base(4,10,0)
fromLabel = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#else
fromLabel _ = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#endif
#endif
#endif
#if ENABLE_OVERLOADING
instance O.HasAttributeList TableBatchReader
type instance O.AttributeList TableBatchReader = TableBatchReaderAttributeList
type TableBatchReaderAttributeList = ('[ '("recordBatchReader", Arrow.RecordBatchReader.RecordBatchReaderRecordBatchReaderPropertyInfo)] :: [(Symbol, *)])
#endif
#if ENABLE_OVERLOADING
#endif
#if ENABLE_OVERLOADING
type instance O.SignalList TableBatchReader = TableBatchReaderSignalList
type TableBatchReaderSignalList = ('[ '("notify", GObject.Object.ObjectNotifySignalInfo)] :: [(Symbol, *)])
#endif
-- method TableBatchReader::new
-- method type : Constructor
: [ Arg { argCName = " table " , argType = TInterface ( Name { namespace = " Arrow " , name = " Table " } ) , direction = DirectionIn , mayBeNull = False , argDoc = Documentation { rawDocText = Just " The table to be read . " , sinceVersion = Nothing } , argScope = ScopeTypeInvalid , argClosure = -1 , argDestroy = -1 , argCallerAllocates = False , transfer = TransferNothing } ]
-- Lengths : []
-- returnType : Just (TInterface (Name {namespace = "Arrow", name = "TableBatchReader"}))
-- throws : False
-- Skip return : False
foreign import ccall "garrow_table_batch_reader_new" garrow_table_batch_reader_new ::
table : ( Name { namespace = " Arrow " , name = " Table " } )
IO (Ptr TableBatchReader)
|
/No description available in the introspection data./
@since 0.8.0
/No description available in the introspection data./
@since 0.8.0
-}
tableBatchReaderNew ::
(B.CallStack.HasCallStack, MonadIO m, Arrow.Table.IsTable a) =>
a
{- ^ /@table@/: The table to be read. -}
-> m TableBatchReader
{- ^ __Returns:__ A newly created 'GI.Arrow.Objects.TableBatchReader.TableBatchReader'. -}
tableBatchReaderNew table = liftIO $ do
table' <- unsafeManagedPtrCastPtr table
result <- garrow_table_batch_reader_new table'
checkUnexpectedReturnNULL "tableBatchReaderNew" result
result' <- (wrapObject TableBatchReader) result
touchManagedPtr table
return result'
#if ENABLE_OVERLOADING
#endif
| null | https://raw.githubusercontent.com/stephenpascoe/hs-arrow/86c7c452a8626b1d69a3cffd277078d455823271/gi-arrow/GI/Arrow/Objects/TableBatchReader.hs | haskell | * Exported types
* Methods
# SOURCE #
# SOURCE #
| Memory-managed wrapper type.
| Type class for types which can be safely cast to `TableBatchReader`, for instance with `toTableBatchReader`.
# OVERLAPPABLE #
| A convenience alias for `Nothing` :: `Maybe` `TableBatchReader`.
method TableBatchReader::new
method type : Constructor
Lengths : []
returnType : Just (TInterface (Name {namespace = "Arrow", name = "TableBatchReader"}))
throws : False
Skip return : False
^ /@table@/: The table to be read.
^ __Returns:__ A newly created 'GI.Arrow.Objects.TableBatchReader.TableBatchReader'. |
|
Copyright : , and
License : LGPL-2.1
Maintainer : ( )
/No description available in the introspection data./
Copyright : Will Thompson, Iñaki García Etxebarria and Jonas Platte
License : LGPL-2.1
Maintainer : Iñaki García Etxebarria ()
/No description available in the introspection data./
-}
#define ENABLE_OVERLOADING (MIN_VERSION_haskell_gi_overloading(1,0,0) \
&& !defined(__HADDOCK_VERSION__))
module GI.Arrow.Objects.TableBatchReader
(
TableBatchReader(..) ,
IsTableBatchReader ,
toTableBatchReader ,
noTableBatchReader ,
* * new # method : new #
tableBatchReaderNew ,
) where
import Data.GI.Base.ShortPrelude
import qualified Data.GI.Base.ShortPrelude as SP
import qualified Data.GI.Base.Overloading as O
import qualified Prelude as P
import qualified Data.GI.Base.Attributes as GI.Attributes
import qualified Data.GI.Base.ManagedPtr as B.ManagedPtr
import qualified Data.GI.Base.GError as B.GError
import qualified Data.GI.Base.GVariant as B.GVariant
import qualified Data.GI.Base.GValue as B.GValue
import qualified Data.GI.Base.GParamSpec as B.GParamSpec
import qualified Data.GI.Base.CallStack as B.CallStack
import qualified Data.Text as T
import qualified Data.ByteString.Char8 as B
import qualified Data.Map as Map
import qualified Foreign.Ptr as FP
import qualified GI.GObject.Objects.Object as GObject.Object
newtype TableBatchReader = TableBatchReader (ManagedPtr TableBatchReader)
foreign import ccall "garrow_table_batch_reader_get_type"
c_garrow_table_batch_reader_get_type :: IO GType
instance GObject TableBatchReader where
gobjectType _ = c_garrow_table_batch_reader_get_type
class GObject o => IsTableBatchReader o
#if MIN_VERSION_base(4,9,0)
IsTableBatchReader a
#endif
instance IsTableBatchReader TableBatchReader
instance Arrow.RecordBatchReader.IsRecordBatchReader TableBatchReader
instance GObject.Object.IsObject TableBatchReader
| Cast to ` TableBatchReader ` , for types for which this is known to be safe . For general casts , use ` Data . . ManagedPtr.castTo ` .
toTableBatchReader :: (MonadIO m, IsTableBatchReader o) => o -> m TableBatchReader
toTableBatchReader = liftIO . unsafeCastTo TableBatchReader
noTableBatchReader :: Maybe TableBatchReader
noTableBatchReader = Nothing
#if ENABLE_OVERLOADING
type family ResolveTableBatchReaderMethod (t :: Symbol) (o :: *) :: * where
ResolveTableBatchReaderMethod "bindProperty" o = GObject.Object.ObjectBindPropertyMethodInfo
ResolveTableBatchReaderMethod "bindPropertyFull" o = GObject.Object.ObjectBindPropertyFullMethodInfo
ResolveTableBatchReaderMethod "forceFloating" o = GObject.Object.ObjectForceFloatingMethodInfo
ResolveTableBatchReaderMethod "freezeNotify" o = GObject.Object.ObjectFreezeNotifyMethodInfo
ResolveTableBatchReaderMethod "getv" o = GObject.Object.ObjectGetvMethodInfo
ResolveTableBatchReaderMethod "isFloating" o = GObject.Object.ObjectIsFloatingMethodInfo
ResolveTableBatchReaderMethod "notify" o = GObject.Object.ObjectNotifyMethodInfo
ResolveTableBatchReaderMethod "notifyByPspec" o = GObject.Object.ObjectNotifyByPspecMethodInfo
ResolveTableBatchReaderMethod "readNext" o = Arrow.RecordBatchReader.RecordBatchReaderReadNextMethodInfo
ResolveTableBatchReaderMethod "readNextRecordBatch" o = Arrow.RecordBatchReader.RecordBatchReaderReadNextRecordBatchMethodInfo
ResolveTableBatchReaderMethod "ref" o = GObject.Object.ObjectRefMethodInfo
ResolveTableBatchReaderMethod "refSink" o = GObject.Object.ObjectRefSinkMethodInfo
ResolveTableBatchReaderMethod "runDispose" o = GObject.Object.ObjectRunDisposeMethodInfo
ResolveTableBatchReaderMethod "stealData" o = GObject.Object.ObjectStealDataMethodInfo
ResolveTableBatchReaderMethod "stealQdata" o = GObject.Object.ObjectStealQdataMethodInfo
ResolveTableBatchReaderMethod "thawNotify" o = GObject.Object.ObjectThawNotifyMethodInfo
ResolveTableBatchReaderMethod "unref" o = GObject.Object.ObjectUnrefMethodInfo
ResolveTableBatchReaderMethod "watchClosure" o = GObject.Object.ObjectWatchClosureMethodInfo
ResolveTableBatchReaderMethod "getData" o = GObject.Object.ObjectGetDataMethodInfo
ResolveTableBatchReaderMethod "getNextRecordBatch" o = Arrow.RecordBatchReader.RecordBatchReaderGetNextRecordBatchMethodInfo
ResolveTableBatchReaderMethod "getProperty" o = GObject.Object.ObjectGetPropertyMethodInfo
ResolveTableBatchReaderMethod "getQdata" o = GObject.Object.ObjectGetQdataMethodInfo
ResolveTableBatchReaderMethod "getSchema" o = Arrow.RecordBatchReader.RecordBatchReaderGetSchemaMethodInfo
ResolveTableBatchReaderMethod "setData" o = GObject.Object.ObjectSetDataMethodInfo
ResolveTableBatchReaderMethod "setProperty" o = GObject.Object.ObjectSetPropertyMethodInfo
ResolveTableBatchReaderMethod l o = O.MethodResolutionFailed l o
instance (info ~ ResolveTableBatchReaderMethod t TableBatchReader, O.MethodInfo info TableBatchReader p) => O.IsLabelProxy t (TableBatchReader -> p) where
fromLabelProxy _ = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#if MIN_VERSION_base(4,9,0)
instance (info ~ ResolveTableBatchReaderMethod t TableBatchReader, O.MethodInfo info TableBatchReader p) => O.IsLabel t (TableBatchReader -> p) where
#if MIN_VERSION_base(4,10,0)
fromLabel = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#else
fromLabel _ = O.overloadedMethod (O.MethodProxy :: O.MethodProxy info)
#endif
#endif
#endif
#if ENABLE_OVERLOADING
instance O.HasAttributeList TableBatchReader
type instance O.AttributeList TableBatchReader = TableBatchReaderAttributeList
type TableBatchReaderAttributeList = ('[ '("recordBatchReader", Arrow.RecordBatchReader.RecordBatchReaderRecordBatchReaderPropertyInfo)] :: [(Symbol, *)])
#endif
#if ENABLE_OVERLOADING
#endif
#if ENABLE_OVERLOADING
type instance O.SignalList TableBatchReader = TableBatchReaderSignalList
type TableBatchReaderSignalList = ('[ '("notify", GObject.Object.ObjectNotifySignalInfo)] :: [(Symbol, *)])
#endif
: [ Arg { argCName = " table " , argType = TInterface ( Name { namespace = " Arrow " , name = " Table " } ) , direction = DirectionIn , mayBeNull = False , argDoc = Documentation { rawDocText = Just " The table to be read . " , sinceVersion = Nothing } , argScope = ScopeTypeInvalid , argClosure = -1 , argDestroy = -1 , argCallerAllocates = False , transfer = TransferNothing } ]
foreign import ccall "garrow_table_batch_reader_new" garrow_table_batch_reader_new ::
table : ( Name { namespace = " Arrow " , name = " Table " } )
IO (Ptr TableBatchReader)
|
/No description available in the introspection data./
@since 0.8.0
/No description available in the introspection data./
@since 0.8.0
-}
tableBatchReaderNew ::
(B.CallStack.HasCallStack, MonadIO m, Arrow.Table.IsTable a) =>
a
-> m TableBatchReader
tableBatchReaderNew table = liftIO $ do
table' <- unsafeManagedPtrCastPtr table
result <- garrow_table_batch_reader_new table'
checkUnexpectedReturnNULL "tableBatchReaderNew" result
result' <- (wrapObject TableBatchReader) result
touchManagedPtr table
return result'
#if ENABLE_OVERLOADING
#endif
|
e5f00de0fd74a080f0c3263367bda04971336f32ca6fab714bea3119b2eec564 | nuprl/gradual-typing-performance | cut-tail.rkt | #lang racket
(require "data.rkt")
(require racket/vector)
NeSegs is one of :
;; - (cons Posn empty)
- ( cons )
;; cut-tail : NeSegs -> Segs
;; Cut off the tail.
(define (cut-tail segs)
(let ([r (vector-drop segs 1)])
(cond [(equal? '#() r) '#()]
[else (vector-append (vector (vector-ref segs 0)) (cut-tail r))])))
(provide
cut-tail)
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/experimental/postmortem/experiments/vector-vs-list/snake-with-vectors/cut-tail.rkt | racket | - (cons Posn empty)
cut-tail : NeSegs -> Segs
Cut off the tail. | #lang racket
(require "data.rkt")
(require racket/vector)
NeSegs is one of :
- ( cons )
(define (cut-tail segs)
(let ([r (vector-drop segs 1)])
(cond [(equal? '#() r) '#()]
[else (vector-append (vector (vector-ref segs 0)) (cut-tail r))])))
(provide
cut-tail)
|
b152229fb5ee8d0233345bcd78cd3aed503b0c96b3ff63ac3974a840646e6769 | ktakashi/sagittarius-scheme | r6rs+.scm | ;; -*- scheme -*-
(import (rnrs)
(rnrs eval)
(rnrs mutable-pairs)
(core errors)
(srfi :64 testing))
(test-begin "R6RS+ functionality tests")
;; map
(test-equal "map with different length"
'((a . d) (b . e) (c . f))
(map cons '(a b c d e f) '(d e f)))
;; for-each
(test-equal "for-each with different length"
'((g . j) (h . k) (i . l))
(let* ((r `((a . d) (b . e) (c . f)))
(c r))
(guard (e (else (describe-condition e)))
(for-each (lambda (a b)
(let ((t (car c)))
(set! c (cdr c))
(set-car! t a)
(set-cdr! t b)))
'(g h i j k l) '(j k l))
r)))
(test-assert "string-ref fallback" (boolean? (string-ref "abc" 3 #f)))
(test-equal "string-copy"
"bcdef"
(string-copy "abcdef" 1))
(test-equal "string-copy"
"bcd"
(string-copy "abcdef" 1 4))
(define v '#(1 2 3 4 5 6))
(define l '(1 2 3 4 5 6))
;; fallback
(test-assert "vector fallback" (boolean? (vector-ref v 6 #f)))
(test-equal "vector->list with start"
'(2 3 4 5 6)
(vector->list v 1))
(test-equal "vector->list with start and end"
'(2 3)
(vector->list v 1 3))
(test-equal "list->vector with start"
'#(2 3 4 5 6)
(list->vector l 1))
(test-equal "list->vector with start and end"
'#(2 3)
(list->vector l 1 3))
(test-equal "vector-fill!"
'(#(1 1 1 #f #f #f #f #f #f #f)
#(#f #f #f 2 2 2 #f #f #f #f)
#(3 3 3 3 3 3 3 3 3 3))
(list (let ((v (make-vector 10 #f)))
(vector-fill! v 1 0 3)
v)
(let ((v (make-vector 10 #f)))
(vector-fill! v 2 3 6)
v)
(let ((v (make-vector 10 #f)))
(vector-fill! v 3)
v)
))
(test-equal "bytevector-copy"
#vu8(2 3 4 5 6)
(bytevector-copy #vu8(1 2 3 4 5 6) 1))
(test-equal "bytevector-copy"
#vu8(2 3 4 5)
(bytevector-copy #vu8(1 2 3 4 5 6) 1 5))
;; macro defininion comes *after* the usage
;; this actually not a valid R6RS code
;; eval can't have define but we allow it
(test-equal "simple wrap" '(123)
(eval '(begin
(define (fun) (mac))
(fun)
(let-syntax ((print (syntax-rules ()
((_ o ...) (list o ...)))))
(define-syntax mac
(syntax-rules ()
((_) (print 123))))))
(environment '(rnrs))))
(test-equal "let-syntax define" '(hoge)
(eval '(begin
(define (fun) (foo))
(let-syntax ((print (syntax-rules ()
((_ o ...) (list o ...)))))
(define-syntax mac
(syntax-rules ()
((_)
(begin
(print 123)
(flush-output-port (current-output-port))))))
(define (foo)
(print 'hoge))
)
(fun))
(environment '(rnrs))))
(test-equal "simple wrap (letrec-syntax)" '(123)
(eval '(begin
(define (fun) (mac))
(fun)
(letrec-syntax ((print (syntax-rules ()
((_ o ...) (list o ...)))))
(define-syntax mac
(syntax-rules ()
((_) (print 123))))))
(environment '(rnrs))))
(test-equal "let-syntax define (letrec-syntax)" '(hoge)
(eval '(begin
(define (fun) (foo))
(letrec-syntax ((print (syntax-rules ()
((_ o ...) (list o ...)))))
(define-syntax mac
(syntax-rules ()
((_)
(begin
(print 123)
(flush-output-port (current-output-port))))))
(define (foo)
(print 'hoge))
)
(fun))
(environment '(rnrs))))
call # 107
(let ()
(define-record-type <foo>
(fields a b))
(define foo-rcd (make-record-constructor-descriptor
(record-type-descriptor <foo>) #f
(lambda (p) (lambda () (p 1 2)))))
(define (foo-fields foo) (list (<foo>-a foo) (<foo>-b foo)))
(test-equal "multiple record constructor descriptor (1)"
'(a b)
(foo-fields
((record-constructor (record-constructor-descriptor <foo>))
'a 'b)))
(test-equal "multiple record constructor descriptor (2)"
'(1 2)
(foo-fields ((record-constructor foo-rcd))))
)
call # 112
;; FIXME the test case should use approximate value
;; for now disabled
( test - equal " ( cos 0+i ) " 1.5430806348152437 - 0.0i ( cos 0+i ) )
( test - equal " ( tan 0+i ) " 0.0 + 0.761594155955765i ( tan 0+i ) )
( test - equal " ( sin 0+i ) " 0.0 + 1.1752011936438014i ( sin 0+i ) )
( test - equal " ( sin 0+i ) " 0.0 + 1.1752011936438014i ( sin 0+i ) )
;;(test-equal "(asin 0+i)" -0.0+0.8813735870195429i (asin 0+i))
( test - equal " ( acos 0+i ) " 1.5707963267948966 - 0.8813735870195429i ( acos 0+i ) )
( test - equal " ( atan 0+i ) " 0.0+inf.0i ( atan 0+i ) )
(test-error "(atan 0+i 0+i)" condition? (atan 0+i 0+i))
(test-error "(atan 0 0+i)" condition? (atan 0 0+i))
(test-error "(atan 0+i 0)" condition? (atan 0+i 0))
call # 115
(test-error "u8-list->bytevector (non u8 list)" assertion-violation?
(u8-list->bytevector '(a)))
(test-error "u8-list->bytevector (error)" assertion-violation?
(u8-list->bytevector '(1 2 . 3)))
;; BOM for UTF16
;; it can only be done by using (utf-16-codec)
(test-equal "BOM"
;; internally, it will always big endign
#vu8(#xFE #xFF)
(string->bytevector "" (make-transcoder (utf-16-codec))))
(test-error "parent has custom protocol"
condition?
(eval
'(let ()
(define-record-type this-parent
(fields count elements)
(protocol
(lambda (p)
(lambda (size)
(p size (make-vector size))))))
;; error
(define-record-type child
(fields attr)
(parent this-parent)))
(environment '(rnrs))))
;; From Larceny
(let ()
(define (string~? s1 s2)
(define (replacement? c)
(char=? c #\xfffd))
(define (canonicalized s)
(let loop ((rchars (reverse (string->list s)))
(cchars '()))
(cond ((or (null? rchars) (null? (cdr rchars)))
(list->string cchars))
((and (replacement? (car rchars))
(replacement? (cadr rchars)))
(loop (cdr rchars) cchars))
(else
(loop (cdr rchars) (cons (car rchars) cchars))))))
(string=? (canonicalized s1) (canonicalized s2)))
(test-assert "utf-8, errors 1"
(string~? (utf8->string '#vu8(#x61 ; a
#xc0 #x62 ; ?b
#xc1 #x63 ; ?c
#xc2 #x64 ; ?d
#x80 #x65 ; ?e
#xc0 #xc0 #x66 ; ??f
#xe0 #x67 ; ?g
))
"a\xfffd;b\xfffd;c\xfffd;d\xfffd;e\xfffd;\xfffd;f\xfffd;g"))
(test-assert "utf-8, errors 2"
(string~? (utf8->string '#vu8(#xe0 #x80 #x80 #x68 ; ???h
#xe0 #xc0 #x80 #x69 ; ???i
#xf0 #x6a ; ?j
))
"\xfffd;\xfffd;\xfffd;h\xfffd;\xfffd;\xfffd;i\xfffd;j"))
(test-assert "utf-8, errors 3"
(string~? (utf8->string '#vu8(#x61 ; a
#xf0 #x80 #x80 #x80 #x62 ; ????b
#xf0 #x90 #x80 #x80 #x63 ; .c
))
"a\xfffd;\xfffd;\xfffd;\xfffd;b\x10000;c"))
(test-assert "utf-8, errors 4"
(string~? (utf8->string '#vu8(#x61 ; a
#xf0 #xbf #xbf #xbf #x64 ; .d
#xf0 #xbf #xbf #x65 ; ?e
#xf0 #xbf #x66 ; ?f
))
"a\x3ffff;d\xfffd;e\xfffd;f"))
(test-assert "utf-8, errors 5"
(string~? (utf8->string '#vu8(#x61 ; a
#xf4 #x8f #xbf #xbf #x62 ; .b
#xf4 #x90 #x80 #x80 #x63 ; ????c
))
"a\x10ffff;b\xfffd;\xfffd;\xfffd;\xfffd;c"))
(test-assert "utf-8, errors 6"
(string~? (utf8->string '#vu8(#x61 ; a
#xf5 #x80 #x80 #x80 #x64 ; ????d
))
"a\xfffd;\xfffd;\xfffd;\xfffd;d")))
(let ()
(define buf-size 10)
(define bv (make-bytevector buf-size (char->integer #\a)))
(define (bytevector-append . bvs)
(let* ((len (fold-left (lambda (sum bv)
(+ (bytevector-length bv) sum)) 0 bvs))
(r (make-bytevector len)))
(fold-left (lambda (off bv)
(let ((len (bytevector-length bv)))
(bytevector-copy! bv 0 r off len)
(+ off len)))
0 bvs)
r))
(let ((bv2 (bytevector-append bv #vu8(#xe0 #x67 #x0a))))
(call-with-port (transcoded-port
(open-bytevector-input-port bv2)
(make-transcoder (utf-8-codec)
(eol-style lf)
(error-handling-mode replace)))
(lambda (in)
(get-string-n in (+ 1 buf-size))
(test-equal "read string after error code" "g\n"
(get-string-all in))))
(call-with-port (transcoded-port
(open-bytevector-input-port #vu8(#xe0 #x67 #x0a))
(make-transcoder (utf-8-codec)
(eol-style lf)
(error-handling-mode replace)))
(lambda (in)
(get-char in)
(test-equal "read char after error code" #\g (get-char in))))))
call # 127
in $ lref
(test-error "letrec (1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar bar))
bar))
(environment '(rnrs))))
in $ asm
(test-error "letrec (2)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (cons 'a bar)))
bar))
(environment '(rnrs))))
;; in $call
(test-error "letrec (3)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (for-all (lambda (a) a) bar)))
bar))
(environment '(rnrs))))
;; in $let
(test-error "letrec (4)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (let ((bar2 bar)) bar2)))
bar))
(environment '(rnrs))))
(test-error "letrec (4.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (let ((bar2 'a)) bar)))
bar))
(environment '(rnrs))))
;; in $let ok
(test-assert "letrec (5)"
(eval '(lambda (bar)
(letrec ((bar (let ((bar 'foo)) bar)))
bar))
(environment '(rnrs))))
;; in $receive
(test-error "letrec (6)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (receive (bar) (values bar) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec (6.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (receive (buz) (values a) bar)))
bar))
(environment '(rnrs))))
;; in $receive ok
(test-error "letrec (7)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (receive (bar) (values 'a) bar)))
bar))
(environment '(rnrs))))
in $ seq
(test-error "letrec (8)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (begin bar #t)))
bar))
(environment '(rnrs))))
;; in $list
(test-error "letrec (9)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (list bar #t)))
bar))
(environment '(rnrs))))
;; in $if
(test-error "letrec (10)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (if bar #t #f)))
bar))
(environment '(rnrs))))
(test-error "letrec (10.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (if #t bar #f)))
bar))
(environment '(rnrs))))
(test-error "letrec (10.2)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (if #f #f bar)))
bar))
(environment '(rnrs))))
;; referred above
(test-error "letrec (11)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar buz)
(buz #t))
bar))
(environment '(rnrs))))
(test-error "letrec (11.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((buz #t)
(bar buz))
bar))
(environment '(rnrs))))
;; ok
(test-assert "letrec (12)"
(eval '(lambda (bar)
(letrec ((bar (lambda () bar)))
bar))
(environment '(rnrs))))
;; letrec*
in $ lref
(test-error "letrec* (1)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar bar))
bar))
(environment '(rnrs))))
in $ asm
(test-error "letrec* (2)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (cons 'a bar)))
bar))
(environment '(rnrs))))
;; in $call
(test-error "letrec* (3)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (for-all (lambda (a) a) bar)))
bar))
(environment '(rnrs))))
;; in $let
(test-error "letrec* (4)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (let ((bar2 bar)) bar2)))
bar))
(environment '(rnrs))))
(test-error "letrec* (4.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (let ((bar2 'a)) bar)))
bar))
(environment '(rnrs))))
;; in $let ok
(test-assert "letrec* (5)"
(eval '(lambda (bar)
(letrec* ((bar (let ((bar 'foo)) bar)))
bar))
(environment '(rnrs))))
;; in $receive
(test-error "letrec* (6)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (receive (bar) (values bar) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec* (6.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (receive (buz) (values a) bar)))
bar))
(environment '(rnrs))))
;; in $receive ok
(test-error "letrec* (7)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (receive (bar) (values 'a) bar)))
bar))
(environment '(rnrs))))
in $ seq
(test-error "letrec* (8)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (begin bar #t)))
bar))
(environment '(rnrs))))
;; in $list
(test-error "letrec* (9)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (list bar #t)))
bar))
(environment '(rnrs))))
;; in $if
(test-error "letrec* (10)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (if bar #t #f)))
bar))
(environment '(rnrs))))
(test-error "letrec* (10.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (if #t bar #f)))
bar))
(environment '(rnrs))))
(test-error "letrec* (10.2)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (if #f #f bar)))
bar))
(environment '(rnrs))))
;; referred above
(test-error "letrec* (11)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar buz)
(buz #t))
bar))
(environment '(rnrs))))
(test-assert "letrec* (11.1)"
(eval '(lambda (bar)
(letrec* ((buz #t)
(bar buz))
bar))
(environment '(rnrs))))
;; ok
(test-assert "letrec* (12)"
(eval '(lambda (bar)
(letrec* ((bar (lambda () bar)))
bar))
(environment '(rnrs))))
;; #e1@1
(test-error "#e1@1" implementation-restriction-violation?
(read (open-string-input-port "#e1@1")))
;; peek buffer issue
(define (test-port in peek read expected)
(test-equal (format "port position of ~a ~a" in expected)
expected
(let* ((c0 (peek in))
(pos (port-position in))
(c1 (begin
(set-port-position! in 1)
(read in))))
(list c0 pos c1)))
(close-port in))
(test-port (open-string-input-port "ab") lookahead-char get-char '(#\a 0 #\b))
(test-port (open-bytevector-input-port #vu8(1 2)) lookahead-u8 get-u8
'(1 0 2))
(let ((file "peek.tmp"))
(when (file-exists? file) (delete-file file))
(call-with-output-file file
(lambda (out) (put-string out "abcdefg\n")))
(test-port (open-file-input-port file) lookahead-u8 get-u8
'(97 0 98))
(test-port (open-file-input-port file
(file-options no-fail)
(buffer-mode block)
(native-transcoder))
lookahead-char get-char
'(#\a 0 #\b))
(delete-file file))
;; invalid identifer
(test-error "read identifier(quote)" lexical-violation?
(read (open-string-input-port "#!r6rs foo'bar")))
(test-error "read identifier(unquote)" lexical-violation?
(read (open-string-input-port "#!r6rs foo,bar")))
(test-error "read identifier(quasiquote)" lexical-violation?
(read (open-string-input-port "#!r6rs foo`bar")))
negative result of gcd
(test-equal "gcd returns non-negative integer"
2182600451
(gcd -165543184715050652143983385447792 15946333291432216432322993695213691))
(test-equal 'b
(eval '(let ()
(let-syntax ()
(define foo 'b))
foo)
(environment '(rnrs))))
(test-equal 'b
(eval '(let ()
(letrec-syntax ()
(define foo 'b))
foo)
(environment '(rnrs))))
(test-assert (eval '(library (foo)
(export)
(import (rnrs))
(define-syntax foo
(syntax-rules ()
((foo)
(define set! 42))))
(foo))
(environment '(core) '(sagittarius))))
(test-end)
| null | https://raw.githubusercontent.com/ktakashi/sagittarius-scheme/3971b131d463696297c320dbe595dffd08867dac/test/tests/r6rs%2B.scm | scheme | -*- scheme -*-
map
for-each
fallback
macro defininion comes *after* the usage
this actually not a valid R6RS code
eval can't have define but we allow it
FIXME the test case should use approximate value
for now disabled
(test-equal "(asin 0+i)" -0.0+0.8813735870195429i (asin 0+i))
BOM for UTF16
it can only be done by using (utf-16-codec)
internally, it will always big endign
error
From Larceny
a
?b
?c
?d
?e
??f
?g
???h
???i
?j
a
????b
.c
a
.d
?e
?f
a
.b
????c
a
????d
in $call
in $let
in $let ok
in $receive
in $receive ok
in $list
in $if
referred above
ok
letrec*
in $call
in $let
in $let ok
in $receive
in $receive ok
in $list
in $if
referred above
ok
#e1@1
peek buffer issue
invalid identifer | (import (rnrs)
(rnrs eval)
(rnrs mutable-pairs)
(core errors)
(srfi :64 testing))
(test-begin "R6RS+ functionality tests")
(test-equal "map with different length"
'((a . d) (b . e) (c . f))
(map cons '(a b c d e f) '(d e f)))
(test-equal "for-each with different length"
'((g . j) (h . k) (i . l))
(let* ((r `((a . d) (b . e) (c . f)))
(c r))
(guard (e (else (describe-condition e)))
(for-each (lambda (a b)
(let ((t (car c)))
(set! c (cdr c))
(set-car! t a)
(set-cdr! t b)))
'(g h i j k l) '(j k l))
r)))
(test-assert "string-ref fallback" (boolean? (string-ref "abc" 3 #f)))
(test-equal "string-copy"
"bcdef"
(string-copy "abcdef" 1))
(test-equal "string-copy"
"bcd"
(string-copy "abcdef" 1 4))
(define v '#(1 2 3 4 5 6))
(define l '(1 2 3 4 5 6))
(test-assert "vector fallback" (boolean? (vector-ref v 6 #f)))
(test-equal "vector->list with start"
'(2 3 4 5 6)
(vector->list v 1))
(test-equal "vector->list with start and end"
'(2 3)
(vector->list v 1 3))
(test-equal "list->vector with start"
'#(2 3 4 5 6)
(list->vector l 1))
(test-equal "list->vector with start and end"
'#(2 3)
(list->vector l 1 3))
(test-equal "vector-fill!"
'(#(1 1 1 #f #f #f #f #f #f #f)
#(#f #f #f 2 2 2 #f #f #f #f)
#(3 3 3 3 3 3 3 3 3 3))
(list (let ((v (make-vector 10 #f)))
(vector-fill! v 1 0 3)
v)
(let ((v (make-vector 10 #f)))
(vector-fill! v 2 3 6)
v)
(let ((v (make-vector 10 #f)))
(vector-fill! v 3)
v)
))
(test-equal "bytevector-copy"
#vu8(2 3 4 5 6)
(bytevector-copy #vu8(1 2 3 4 5 6) 1))
(test-equal "bytevector-copy"
#vu8(2 3 4 5)
(bytevector-copy #vu8(1 2 3 4 5 6) 1 5))
(test-equal "simple wrap" '(123)
(eval '(begin
(define (fun) (mac))
(fun)
(let-syntax ((print (syntax-rules ()
((_ o ...) (list o ...)))))
(define-syntax mac
(syntax-rules ()
((_) (print 123))))))
(environment '(rnrs))))
(test-equal "let-syntax define" '(hoge)
(eval '(begin
(define (fun) (foo))
(let-syntax ((print (syntax-rules ()
((_ o ...) (list o ...)))))
(define-syntax mac
(syntax-rules ()
((_)
(begin
(print 123)
(flush-output-port (current-output-port))))))
(define (foo)
(print 'hoge))
)
(fun))
(environment '(rnrs))))
(test-equal "simple wrap (letrec-syntax)" '(123)
(eval '(begin
(define (fun) (mac))
(fun)
(letrec-syntax ((print (syntax-rules ()
((_ o ...) (list o ...)))))
(define-syntax mac
(syntax-rules ()
((_) (print 123))))))
(environment '(rnrs))))
(test-equal "let-syntax define (letrec-syntax)" '(hoge)
(eval '(begin
(define (fun) (foo))
(letrec-syntax ((print (syntax-rules ()
((_ o ...) (list o ...)))))
(define-syntax mac
(syntax-rules ()
((_)
(begin
(print 123)
(flush-output-port (current-output-port))))))
(define (foo)
(print 'hoge))
)
(fun))
(environment '(rnrs))))
call # 107
(let ()
(define-record-type <foo>
(fields a b))
(define foo-rcd (make-record-constructor-descriptor
(record-type-descriptor <foo>) #f
(lambda (p) (lambda () (p 1 2)))))
(define (foo-fields foo) (list (<foo>-a foo) (<foo>-b foo)))
(test-equal "multiple record constructor descriptor (1)"
'(a b)
(foo-fields
((record-constructor (record-constructor-descriptor <foo>))
'a 'b)))
(test-equal "multiple record constructor descriptor (2)"
'(1 2)
(foo-fields ((record-constructor foo-rcd))))
)
call # 112
( test - equal " ( cos 0+i ) " 1.5430806348152437 - 0.0i ( cos 0+i ) )
( test - equal " ( tan 0+i ) " 0.0 + 0.761594155955765i ( tan 0+i ) )
( test - equal " ( sin 0+i ) " 0.0 + 1.1752011936438014i ( sin 0+i ) )
( test - equal " ( sin 0+i ) " 0.0 + 1.1752011936438014i ( sin 0+i ) )
( test - equal " ( acos 0+i ) " 1.5707963267948966 - 0.8813735870195429i ( acos 0+i ) )
( test - equal " ( atan 0+i ) " 0.0+inf.0i ( atan 0+i ) )
(test-error "(atan 0+i 0+i)" condition? (atan 0+i 0+i))
(test-error "(atan 0 0+i)" condition? (atan 0 0+i))
(test-error "(atan 0+i 0)" condition? (atan 0+i 0))
call # 115
(test-error "u8-list->bytevector (non u8 list)" assertion-violation?
(u8-list->bytevector '(a)))
(test-error "u8-list->bytevector (error)" assertion-violation?
(u8-list->bytevector '(1 2 . 3)))
(test-equal "BOM"
#vu8(#xFE #xFF)
(string->bytevector "" (make-transcoder (utf-16-codec))))
(test-error "parent has custom protocol"
condition?
(eval
'(let ()
(define-record-type this-parent
(fields count elements)
(protocol
(lambda (p)
(lambda (size)
(p size (make-vector size))))))
(define-record-type child
(fields attr)
(parent this-parent)))
(environment '(rnrs))))
(let ()
(define (string~? s1 s2)
(define (replacement? c)
(char=? c #\xfffd))
(define (canonicalized s)
(let loop ((rchars (reverse (string->list s)))
(cchars '()))
(cond ((or (null? rchars) (null? (cdr rchars)))
(list->string cchars))
((and (replacement? (car rchars))
(replacement? (cadr rchars)))
(loop (cdr rchars) cchars))
(else
(loop (cdr rchars) (cons (car rchars) cchars))))))
(string=? (canonicalized s1) (canonicalized s2)))
(test-assert "utf-8, errors 1"
))
"a\xfffd;b\xfffd;c\xfffd;d\xfffd;e\xfffd;\xfffd;f\xfffd;g"))
(test-assert "utf-8, errors 2"
))
"\xfffd;\xfffd;\xfffd;h\xfffd;\xfffd;\xfffd;i\xfffd;j"))
(test-assert "utf-8, errors 3"
))
"a\xfffd;\xfffd;\xfffd;\xfffd;b\x10000;c"))
(test-assert "utf-8, errors 4"
))
"a\x3ffff;d\xfffd;e\xfffd;f"))
(test-assert "utf-8, errors 5"
))
"a\x10ffff;b\xfffd;\xfffd;\xfffd;\xfffd;c"))
(test-assert "utf-8, errors 6"
))
"a\xfffd;\xfffd;\xfffd;\xfffd;d")))
(let ()
(define buf-size 10)
(define bv (make-bytevector buf-size (char->integer #\a)))
(define (bytevector-append . bvs)
(let* ((len (fold-left (lambda (sum bv)
(+ (bytevector-length bv) sum)) 0 bvs))
(r (make-bytevector len)))
(fold-left (lambda (off bv)
(let ((len (bytevector-length bv)))
(bytevector-copy! bv 0 r off len)
(+ off len)))
0 bvs)
r))
(let ((bv2 (bytevector-append bv #vu8(#xe0 #x67 #x0a))))
(call-with-port (transcoded-port
(open-bytevector-input-port bv2)
(make-transcoder (utf-8-codec)
(eol-style lf)
(error-handling-mode replace)))
(lambda (in)
(get-string-n in (+ 1 buf-size))
(test-equal "read string after error code" "g\n"
(get-string-all in))))
(call-with-port (transcoded-port
(open-bytevector-input-port #vu8(#xe0 #x67 #x0a))
(make-transcoder (utf-8-codec)
(eol-style lf)
(error-handling-mode replace)))
(lambda (in)
(get-char in)
(test-equal "read char after error code" #\g (get-char in))))))
call # 127
in $ lref
(test-error "letrec (1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar bar))
bar))
(environment '(rnrs))))
in $ asm
(test-error "letrec (2)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (cons 'a bar)))
bar))
(environment '(rnrs))))
(test-error "letrec (3)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (for-all (lambda (a) a) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec (4)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (let ((bar2 bar)) bar2)))
bar))
(environment '(rnrs))))
(test-error "letrec (4.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (let ((bar2 'a)) bar)))
bar))
(environment '(rnrs))))
(test-assert "letrec (5)"
(eval '(lambda (bar)
(letrec ((bar (let ((bar 'foo)) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec (6)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (receive (bar) (values bar) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec (6.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (receive (buz) (values a) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec (7)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (receive (bar) (values 'a) bar)))
bar))
(environment '(rnrs))))
in $ seq
(test-error "letrec (8)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (begin bar #t)))
bar))
(environment '(rnrs))))
(test-error "letrec (9)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (list bar #t)))
bar))
(environment '(rnrs))))
(test-error "letrec (10)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (if bar #t #f)))
bar))
(environment '(rnrs))))
(test-error "letrec (10.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (if #t bar #f)))
bar))
(environment '(rnrs))))
(test-error "letrec (10.2)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar (if #f #f bar)))
bar))
(environment '(rnrs))))
(test-error "letrec (11)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((bar buz)
(buz #t))
bar))
(environment '(rnrs))))
(test-error "letrec (11.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec ((buz #t)
(bar buz))
bar))
(environment '(rnrs))))
(test-assert "letrec (12)"
(eval '(lambda (bar)
(letrec ((bar (lambda () bar)))
bar))
(environment '(rnrs))))
in $ lref
(test-error "letrec* (1)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar bar))
bar))
(environment '(rnrs))))
in $ asm
(test-error "letrec* (2)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (cons 'a bar)))
bar))
(environment '(rnrs))))
(test-error "letrec* (3)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (for-all (lambda (a) a) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec* (4)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (let ((bar2 bar)) bar2)))
bar))
(environment '(rnrs))))
(test-error "letrec* (4.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (let ((bar2 'a)) bar)))
bar))
(environment '(rnrs))))
(test-assert "letrec* (5)"
(eval '(lambda (bar)
(letrec* ((bar (let ((bar 'foo)) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec* (6)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (receive (bar) (values bar) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec* (6.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (receive (buz) (values a) bar)))
bar))
(environment '(rnrs))))
(test-error "letrec* (7)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (receive (bar) (values 'a) bar)))
bar))
(environment '(rnrs))))
in $ seq
(test-error "letrec* (8)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (begin bar #t)))
bar))
(environment '(rnrs))))
(test-error "letrec* (9)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (list bar #t)))
bar))
(environment '(rnrs))))
(test-error "letrec* (10)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (if bar #t #f)))
bar))
(environment '(rnrs))))
(test-error "letrec* (10.1)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (if #t bar #f)))
bar))
(environment '(rnrs))))
(test-error "letrec* (10.2)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar (if #f #f bar)))
bar))
(environment '(rnrs))))
(test-error "letrec* (11)"
syntax-violation?
(eval '(lambda (bar)
(letrec* ((bar buz)
(buz #t))
bar))
(environment '(rnrs))))
(test-assert "letrec* (11.1)"
(eval '(lambda (bar)
(letrec* ((buz #t)
(bar buz))
bar))
(environment '(rnrs))))
(test-assert "letrec* (12)"
(eval '(lambda (bar)
(letrec* ((bar (lambda () bar)))
bar))
(environment '(rnrs))))
(test-error "#e1@1" implementation-restriction-violation?
(read (open-string-input-port "#e1@1")))
(define (test-port in peek read expected)
(test-equal (format "port position of ~a ~a" in expected)
expected
(let* ((c0 (peek in))
(pos (port-position in))
(c1 (begin
(set-port-position! in 1)
(read in))))
(list c0 pos c1)))
(close-port in))
(test-port (open-string-input-port "ab") lookahead-char get-char '(#\a 0 #\b))
(test-port (open-bytevector-input-port #vu8(1 2)) lookahead-u8 get-u8
'(1 0 2))
(let ((file "peek.tmp"))
(when (file-exists? file) (delete-file file))
(call-with-output-file file
(lambda (out) (put-string out "abcdefg\n")))
(test-port (open-file-input-port file) lookahead-u8 get-u8
'(97 0 98))
(test-port (open-file-input-port file
(file-options no-fail)
(buffer-mode block)
(native-transcoder))
lookahead-char get-char
'(#\a 0 #\b))
(delete-file file))
(test-error "read identifier(quote)" lexical-violation?
(read (open-string-input-port "#!r6rs foo'bar")))
(test-error "read identifier(unquote)" lexical-violation?
(read (open-string-input-port "#!r6rs foo,bar")))
(test-error "read identifier(quasiquote)" lexical-violation?
(read (open-string-input-port "#!r6rs foo`bar")))
negative result of gcd
(test-equal "gcd returns non-negative integer"
2182600451
(gcd -165543184715050652143983385447792 15946333291432216432322993695213691))
(test-equal 'b
(eval '(let ()
(let-syntax ()
(define foo 'b))
foo)
(environment '(rnrs))))
(test-equal 'b
(eval '(let ()
(letrec-syntax ()
(define foo 'b))
foo)
(environment '(rnrs))))
(test-assert (eval '(library (foo)
(export)
(import (rnrs))
(define-syntax foo
(syntax-rules ()
((foo)
(define set! 42))))
(foo))
(environment '(core) '(sagittarius))))
(test-end)
|
23ed9e8f4c58d193de56e0bc66081133185b72459e17a95c580d0a412c706baf | funcool/catacumba | testing.clj | Copyright ( c ) 2015 < >
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions are met:
;;
;; * Redistributions of source code must retain the above copyright notice, this
;; list of conditions and the following disclaimer.
;;
;; * Redistributions in binary form must reproduce the above copyright notice,
;; this list of conditions and the following disclaimer in the documentation
;; and/or other materials provided with the distribution.
;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(ns catacumba.testing
"Testing facilities for catacuba."
(:require [catacumba.core :as ct]
[cognitect.transit :as transit])
(:import [java.io ByteArrayOutputStream ByteArrayInputStream]))
(defmacro with-server
"Evaluate code in context of running catacumba server."
[{:keys [handler sleep] :or {sleep 50} :as options} & body]
`(let [server# (ct/run-server ~handler (merge ~options {:debug true}))]
(try
~@body
(finally
(.stop server#)
(Thread/sleep ~sleep)))))
(defn data->transit
"Simple util to convert clojure data structures into transit"
([data]
(data->transit data :json))
([data encoding]
(with-open [out (ByteArrayOutputStream.)]
(let [w (transit/writer out encoding)]
(transit/write w data)
(.toByteArray out)))))
| null | https://raw.githubusercontent.com/funcool/catacumba/a493843176ee8defa2f3c6afa23c720f495d9341/src/clojure/catacumba/testing.clj | clojure | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | Copyright ( c ) 2015 < >
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
(ns catacumba.testing
"Testing facilities for catacuba."
(:require [catacumba.core :as ct]
[cognitect.transit :as transit])
(:import [java.io ByteArrayOutputStream ByteArrayInputStream]))
(defmacro with-server
"Evaluate code in context of running catacumba server."
[{:keys [handler sleep] :or {sleep 50} :as options} & body]
`(let [server# (ct/run-server ~handler (merge ~options {:debug true}))]
(try
~@body
(finally
(.stop server#)
(Thread/sleep ~sleep)))))
(defn data->transit
"Simple util to convert clojure data structures into transit"
([data]
(data->transit data :json))
([data encoding]
(with-open [out (ByteArrayOutputStream.)]
(let [w (transit/writer out encoding)]
(transit/write w data)
(.toByteArray out)))))
|
9369deabfb24d614553edefb7d98bf00f62a6e45d2a701ecaadf2036c5a32980 | cjdev/haskell-fundamentals | Stubs.hs | # LANGUAGE DataKinds #
# LANGUAGE ExplicitNamespaces #
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE TypeOperators #
module Maintainability.Freer.Stubs where
import qualified Data.Text as T
import Control.Monad.Freer (Eff, interpret, reinterpret)
import Control.Monad.Freer.State (State, evalState, get, put)
import Control.Monad.Freer.Writer (runWriter, tell)
import Control.Natural (type (~>))
import Data.Text (Text)
import Data.Time.Clock (NominalDiffTime, UTCTime, addUTCTime)
import Maintainability.Freer.Effects
--------------------------------------------------------------------------------
-- Simple effects
runArgumentsPure :: [Text] -> Eff (Arguments ': effs) ~> Eff effs
runArgumentsPure args = interpret $ \case
GetArgs -> pure args
runFileSystemPure :: [(Text, Text)] -> Eff (FileSystem ': effs) ~> Eff effs
runFileSystemPure fs = interpret $ \case
ReadFile path ->
maybe (fail $ "readFile: no such file ‘" ++ T.unpack path ++ "’")
pure (lookup path fs)
runLogPure :: Eff (Log ': effs) a -> Eff effs (a, [Text])
runLogPure = runWriter . reinterpret (\case
Log txt -> tell [txt])
--------------------------------------------------------------------------------
Time
data ClockState
= ClockStopped !UTCTime
| ClockTick !UTCTime ClockState
| ClockEndOfTime
deriving (Eq, Show)
runClockPure :: ClockState -> Eff (Time ': effs) ~> Eff effs
runClockPure initialState action = evalState initialState (handle action)
where
handle :: Eff (Time ': effs) ~> Eff (State ClockState ': effs)
handle = reinterpret $ \case
CurrentTime -> get >>= \case
ClockStopped t -> pure t
ClockTick t s -> put s >> pure t
ClockEndOfTime -> fail "currentTime: end of time"
-- | Runs a computation with a constant time that never changes.
runStoppedClockPure :: UTCTime -> Eff (Time ': effs) ~> Eff effs
runStoppedClockPure time = runClockPure (ClockStopped time)
| Runs a computation with a clock that advances by 1 second every time the
-- time is read.
runTickingClockPure :: UTCTime -> Eff (Time ': effs) ~> Eff effs
runTickingClockPure = runTickingClockPure' 1
-- | Runs a computation with a clock that advances by the given interval every
-- time the time is read.
runTickingClockPure' :: NominalDiffTime -> UTCTime -> Eff (Time ': effs) ~> Eff effs
runTickingClockPure' d t = runClockPure (ticks t)
where ticks t' = ClockTick t' (ticks (addUTCTime d t'))
-- | Runs a computation with a clock that replays the provided list of times, in
-- order. If the time list of times is exhausted, 'currentTime' will throw an
-- exception the next time it is called.
runPresetClockPure :: [UTCTime] -> Eff (Time ': effs) ~> Eff effs
runPresetClockPure ts = runClockPure (foldr ClockTick ClockEndOfTime ts)
| null | https://raw.githubusercontent.com/cjdev/haskell-fundamentals/03982d7b3ca140bfecc69fa4873fcc0e4fa54044/test/Maintainability/Freer/Stubs.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes #
------------------------------------------------------------------------------
Simple effects
------------------------------------------------------------------------------
| Runs a computation with a constant time that never changes.
time is read.
| Runs a computation with a clock that advances by the given interval every
time the time is read.
| Runs a computation with a clock that replays the provided list of times, in
order. If the time list of times is exhausted, 'currentTime' will throw an
exception the next time it is called. | # LANGUAGE DataKinds #
# LANGUAGE ExplicitNamespaces #
# LANGUAGE LambdaCase #
# LANGUAGE TypeOperators #
module Maintainability.Freer.Stubs where
import qualified Data.Text as T
import Control.Monad.Freer (Eff, interpret, reinterpret)
import Control.Monad.Freer.State (State, evalState, get, put)
import Control.Monad.Freer.Writer (runWriter, tell)
import Control.Natural (type (~>))
import Data.Text (Text)
import Data.Time.Clock (NominalDiffTime, UTCTime, addUTCTime)
import Maintainability.Freer.Effects
runArgumentsPure :: [Text] -> Eff (Arguments ': effs) ~> Eff effs
runArgumentsPure args = interpret $ \case
GetArgs -> pure args
runFileSystemPure :: [(Text, Text)] -> Eff (FileSystem ': effs) ~> Eff effs
runFileSystemPure fs = interpret $ \case
ReadFile path ->
maybe (fail $ "readFile: no such file ‘" ++ T.unpack path ++ "’")
pure (lookup path fs)
runLogPure :: Eff (Log ': effs) a -> Eff effs (a, [Text])
runLogPure = runWriter . reinterpret (\case
Log txt -> tell [txt])
Time
data ClockState
= ClockStopped !UTCTime
| ClockTick !UTCTime ClockState
| ClockEndOfTime
deriving (Eq, Show)
runClockPure :: ClockState -> Eff (Time ': effs) ~> Eff effs
runClockPure initialState action = evalState initialState (handle action)
where
handle :: Eff (Time ': effs) ~> Eff (State ClockState ': effs)
handle = reinterpret $ \case
CurrentTime -> get >>= \case
ClockStopped t -> pure t
ClockTick t s -> put s >> pure t
ClockEndOfTime -> fail "currentTime: end of time"
runStoppedClockPure :: UTCTime -> Eff (Time ': effs) ~> Eff effs
runStoppedClockPure time = runClockPure (ClockStopped time)
| Runs a computation with a clock that advances by 1 second every time the
runTickingClockPure :: UTCTime -> Eff (Time ': effs) ~> Eff effs
runTickingClockPure = runTickingClockPure' 1
runTickingClockPure' :: NominalDiffTime -> UTCTime -> Eff (Time ': effs) ~> Eff effs
runTickingClockPure' d t = runClockPure (ticks t)
where ticks t' = ClockTick t' (ticks (addUTCTime d t'))
runPresetClockPure :: [UTCTime] -> Eff (Time ': effs) ~> Eff effs
runPresetClockPure ts = runClockPure (foldr ClockTick ClockEndOfTime ts)
|
ab9cfb45119a7887ab4e2a3d620c797eaccb02bd15856cd066667be1a4e29ae9 | danprince/wireworld | controls.cljs | (ns wireworld.controls
"Collection of re-agent/hiccup components"
(:require [wireworld.actions :as act]
[wireworld.settings :as s]
[wireworld.encode :as encode]))
(def sprite-size 24)
(defn sprite
[x y]
[:div.sprite
{:style
{:background-image "url(img/sprites.png)"
:background-position-x (str (* x -1 sprite-size) "px")
:background-position-y (str (* y -1 sprite-size) "px")
:height sprite-size
:width sprite-size
:cursor :pointer
:display :inline-block}}])
(def play-icon [sprite 0 0])
(def pause-icon [sprite 1 0])
(def step-icon [sprite 2 0])
(def trash-icon [sprite 3 0])
(def download-icon [sprite 4 0])
(def info-icon [sprite 5 0])
(defn player
[state]
[:div
[:a.icon
{:title "What is Wireworld?"
:href ""}
info-icon]
(if (:paused @state)
[:span
[:span.icon
{:title "Enter"
:on-click #(swap! state act/play)}
play-icon]
[:span.icon
{:title "n"
:on-click #(swap! state act/tick)}
step-icon]]
[:span.icon
{:on-click #(swap! state act/pause)}
pause-icon])])
(defn tool
[state k title]
[:span.swatch
{:title title
:on-click #(swap! state act/select-tool k)
:data-selected (= (get @state :tool) k)
:style {:background (get s/colors k)}}])
(defn select-tool
[state]
[:div
[tool state :empty 1]
[tool state :wire 2]
[tool state :head 3]
[tool state :tail 4]])
(defn toolbar
[app-state]
[:div.toolbar
[:div.fixed.top.left
[player app-state]]
[:div.fixed.top.right
[select-tool app-state]]])
| null | https://raw.githubusercontent.com/danprince/wireworld/3bca90781fb958121208a0f70a7ff3e9154694d8/src/wireworld/controls.cljs | clojure | (ns wireworld.controls
"Collection of re-agent/hiccup components"
(:require [wireworld.actions :as act]
[wireworld.settings :as s]
[wireworld.encode :as encode]))
(def sprite-size 24)
(defn sprite
[x y]
[:div.sprite
{:style
{:background-image "url(img/sprites.png)"
:background-position-x (str (* x -1 sprite-size) "px")
:background-position-y (str (* y -1 sprite-size) "px")
:height sprite-size
:width sprite-size
:cursor :pointer
:display :inline-block}}])
(def play-icon [sprite 0 0])
(def pause-icon [sprite 1 0])
(def step-icon [sprite 2 0])
(def trash-icon [sprite 3 0])
(def download-icon [sprite 4 0])
(def info-icon [sprite 5 0])
(defn player
[state]
[:div
[:a.icon
{:title "What is Wireworld?"
:href ""}
info-icon]
(if (:paused @state)
[:span
[:span.icon
{:title "Enter"
:on-click #(swap! state act/play)}
play-icon]
[:span.icon
{:title "n"
:on-click #(swap! state act/tick)}
step-icon]]
[:span.icon
{:on-click #(swap! state act/pause)}
pause-icon])])
(defn tool
[state k title]
[:span.swatch
{:title title
:on-click #(swap! state act/select-tool k)
:data-selected (= (get @state :tool) k)
:style {:background (get s/colors k)}}])
(defn select-tool
[state]
[:div
[tool state :empty 1]
[tool state :wire 2]
[tool state :head 3]
[tool state :tail 4]])
(defn toolbar
[app-state]
[:div.toolbar
[:div.fixed.top.left
[player app-state]]
[:div.fixed.top.right
[select-tool app-state]]])
|
|
592e7b9312639dc277c4d31f971dcffc2ab13b71f89cd645d2890df5a67f84cb | UU-ComputerScience/uhc | copyFile.hs | module Main where
main = do s <- readFile "copyFile.hs"
writeFile "copyOfCopyFile.hs" s
| null | https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/demo/copyFile.hs | haskell | module Main where
main = do s <- readFile "copyFile.hs"
writeFile "copyOfCopyFile.hs" s
|
|
d769266b2f5f6bfaa054ad9ebedcdd8dafd54ee189598a46f4b89982b94b266c | binaryage/chromex | cpu.cljs | (ns chromex.app.system.cpu (:require-macros [chromex.app.system.cpu :refer [gen-wrap]])
(:require [chromex.core]))
-- functions --------------------------------------------------------------------------------------------------------------
(defn get-info* [config]
(gen-wrap :function ::get-info config))
| null | https://raw.githubusercontent.com/binaryage/chromex/33834ba5dd4f4238a3c51f99caa0416f30c308c5/src/apps/chromex/app/system/cpu.cljs | clojure | (ns chromex.app.system.cpu (:require-macros [chromex.app.system.cpu :refer [gen-wrap]])
(:require [chromex.core]))
-- functions --------------------------------------------------------------------------------------------------------------
(defn get-info* [config]
(gen-wrap :function ::get-info config))
|
|
1bcf2f1a4056cd9696f0426fb323cdc25d1db920ff6b773435f06cd4ed474e03 | ChrisPenner/wave-function-collapse | Grid.hs | # LANGUAGE TypeFamilies #
# LANGUAGE DeriveFunctor #
# LANGUAGE DeriveFoldable #
{-# LANGUAGE DeriveTraversable #-}
# LANGUAGE LambdaCase #
# LANGUAGE TemplateHaskell #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DerivingStrategies #
module WFC.Grid where
import qualified Data.Text as T
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Functor.Rep as R
import Data.Distributive
import Control.Lens hiding (Context)
import qualified Data.Set.NonEmpty as NE
import qualified WFC.Graph as G
import qualified Data.HashMap.Strict as HM
import Data.Hashable
import GHC.Generics (Generic)
import Data.Functor
import Control.Arrow ((&&&))
import WFC.Types
type Coord = (Row, Col)
type Row = Int
type Col = Int
data Dir
= N
| NE
| E
| SE
| S
| SW
| W
| NW
| C
deriving (Eq, Show, Ord, Generic)
deriving anyclass Hashable
data Grid a =
Grid { _graph :: G.Graph Coord Dir a
, _rows :: Row
, _cols :: Col
}
deriving (Show)
makeLenses ''Grid
instance Functor Grid where
fmap f = graph . mapped %~ f
instance Distributive Option where
distribute = distributeRep
instance Representable Option where
type Rep Option = Dir
index (Option nw n ne w c e sw s se) d =
case d of
NW -> nw
N -> n
NE -> ne
E -> e
SE -> se
S -> s
SW -> sw
W -> w
C -> c
tabulate f =
Option
(f NW) (f N) (f NE)
(f W ) (f C) (f E )
(f SW) (f S) (f SE)
data Option a =
Option
a a a
a a a
a a a
deriving (Show, Eq, Ord, Functor, Foldable, Traversable)
gridFromText :: T.Text -> Grid Char
gridFromText txt = labeledGrid
where
nodeChars :: M.Map Coord Char
nodeChars = (labelChars rows)
grid = mkDiagGraph numRows numCols
(numRows, numCols, rows) = rectangularize txt
labeledGrid :: Grid Char
labeledGrid = (\c -> (M.findWithDefault ' ' c nodeChars)) <$> grid
rectangularize :: T.Text -> (Row, Col, [T.Text])
rectangularize txt = (length lines', maxLength, padded)
where
lines' = T.lines txt
maxLength = maximum . fmap T.length $ lines'
padded = T.justifyLeft maxLength ' ' <$> lines'
mkDiagGraph :: Row -> Col -> Grid Coord
mkDiagGraph rows cols =
Grid { _graph = G.newGraph vertices' edges'
, _rows = rows
, _cols = cols
}
where
vertices' :: [(Coord, Coord)]
vertices' = fmap (id &&& id) slots
slots :: [Coord]
slots = [(r, c) | r <- [0..rows - 1], c <- [0..cols - 1]]
edges' :: [(Coord, Coord, Dir)]
edges' = do
(r, c) <- slots
(to', e) <- [ ((r - 1, c - 1), NW)
, ((r - 1, c ), N)
, ((r - 1, c + 1), NE)
, ((r , c + 1), E)
, ((r + 1, c + 1), SE)
, ((r + 1, c ), S)
, ((r + 1, c - 1), SW)
, ((r , c - 1), W)
]
return ((r, c), to', e)
-- mkDiagGraph :: Row -> Col -> Grid Coord
-- mkDiagGraph rows cols =
-- Grid { _graph = gr
-- , _rows = rows
-- , _cols = cols
-- }
-- where
( _ , ( nodemap , gr ) ) = NM.run G.empty $ do
-- let slots = [(r, c) | r <- [0..rows - 1], c <- [0..cols - 1]]
for _ slots
for _ slots $ \(r , c ) - > do
-- let addE = addEdge (r, c)
-- addE NW (r - 1, c - 1)
-- addE N (r - 1, c )
-- addE NE (r - 1, c + 1)
-- addE E (r , c + 1)
addE SE ( r + 1 , c + 1 )
addE S ( r + 1 , c )
addE SW ( r + 1 , c - 1 )
addE ( r , c - 1 )
: : Row - > Col - > Grid Coord
rows cols =
-- Grid { _graph = gr
, _ nodeMap =
-- , _rows = rows
-- , _cols = cols
-- }
-- where
( _ , ( nodemap , gr ) ) = NM.run G.empty $ do
-- let slots = [(r, c) | r <- [0..rows - 1], c <- [0..cols - 1]]
for _ slots
for _ slots $ \(r , c ) - > do
-- let addE = addEdge (r, c)
-- addE N (r - 1, c )
-- addE E (r , c + 1)
addE S ( r + 1 , c )
addE ( r , c - 1 )
labelChars :: [T.Text] -> M.Map Coord Char
labelChars rows = M.fromList $ concat labeled
where
lines' :: [String]
lines' = T.unpack <$> rows
labeled :: [[(Coord, Char)]]
labeled = do
(row, trow) <- zip [0..] (zip [0..] <$> lines')
return $ reassoc row <$> trow
reassoc row (col, c) = ((row, col), c)
gridToText :: Grid Char -> T.Text
gridToText grid = T.pack . unlines $ do
r <- [0..(grid ^. rows) - 1]
return $ do
c <- [0..(grid ^. cols) - 1]
return $ grid ^?! graph . G.valueAtKey (r, c)
printOption :: Option Char -> T.Text
printOption o = T.pack $ unlines (fmap (R.index o) <$>
[ [NW, N, NE]
, [W, C, E ]
, [SW, S, SE]])
gridFilter :: (Eq a) => Option a -> Dir -> (Option a -> Bool)
gridFilter choice dir other =
R.index choice dir
== R.index other C
&& R.index other (flipDir dir)
== R.index choice C
flipDir :: Dir -> Dir
flipDir NW = SE
flipDir N = S
flipDir NE = SW
flipDir E = W
flipDir SE = NW
flipDir S = N
flipDir SW = NE
flipDir W = E
flipDir C = C
type Position = Option Char
collapseOption :: Option a -> a
collapseOption = flip R.index C
addMirrored :: SuperPos Position -> SuperPos Position
addMirrored (Unknown xs) = Unknown . NE.union xs . NE.map flipper $ xs
where
flipper (Option nw n ne
w c e
sw s se)
= Option ne n nw
e c w
se s sw
addMirrored x = x
collectSuperPositions :: Grid Char -> Maybe (SuperPos Position)
collectSuperPositions grid
= Unknown <$> NE.nonEmptySet allEdges
where
allEdges :: S.Set Position
allEdges =
grid ^. graph . G.edges . itraversed . withIndex . folding (uncurry buildOption) . to S.singleton
findEdge :: G.Vertex -> HM.HashMap Dir G.Vertex -> Dir -> Maybe G.Vertex
findEdge n _ C = Just n
findEdge _ m d = m ^? ix d
buildOption :: G.Vertex -> HM.HashMap Dir G.Vertex -> Maybe Position
buildOption n m = do
opts <- sequenceA $ tabulate (findEdge n m)
traverse (\n -> grid ^? graph . G.valueAt n ) ( opts)
generateGrid :: p -> Row -> Col -> Grid p
generateGrid positions rows cols = mkDiagGraph rows cols $> positions
showSuper :: G.Graph k e (SuperPos (Option Char)) -> G.Graph k e Char
showSuper = fmap force
where
force s | null s = 'X'
force (Unknown x) | length x == 1 = collapseOption $ NE.findMin x
| otherwise = ' '
force (Observed c) = collapseOption c
flatten :: G.Graph k e (Option p) -> G.Graph k e p
flatten = fmap collapseOption
showSuperPos :: SuperPos (Option Char) -> Char
showSuperPos (Observed o) = collapseOption o
showSuperPos (Unknown s) = collapseOption $ NE.findMin s
| null | https://raw.githubusercontent.com/ChrisPenner/wave-function-collapse/9ad25912680dc16b5fe0d125b77dd410e3415614/src/WFC/Grid.hs | haskell | # LANGUAGE DeriveTraversable #
# LANGUAGE DeriveAnyClass #
mkDiagGraph :: Row -> Col -> Grid Coord
mkDiagGraph rows cols =
Grid { _graph = gr
, _rows = rows
, _cols = cols
}
where
let slots = [(r, c) | r <- [0..rows - 1], c <- [0..cols - 1]]
let addE = addEdge (r, c)
addE NW (r - 1, c - 1)
addE N (r - 1, c )
addE NE (r - 1, c + 1)
addE E (r , c + 1)
Grid { _graph = gr
, _rows = rows
, _cols = cols
}
where
let slots = [(r, c) | r <- [0..rows - 1], c <- [0..cols - 1]]
let addE = addEdge (r, c)
addE N (r - 1, c )
addE E (r , c + 1) | # LANGUAGE TypeFamilies #
# LANGUAGE DeriveFunctor #
# LANGUAGE DeriveFoldable #
# LANGUAGE LambdaCase #
# LANGUAGE TemplateHaskell #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
module WFC.Grid where
import qualified Data.Text as T
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Functor.Rep as R
import Data.Distributive
import Control.Lens hiding (Context)
import qualified Data.Set.NonEmpty as NE
import qualified WFC.Graph as G
import qualified Data.HashMap.Strict as HM
import Data.Hashable
import GHC.Generics (Generic)
import Data.Functor
import Control.Arrow ((&&&))
import WFC.Types
type Coord = (Row, Col)
type Row = Int
type Col = Int
data Dir
= N
| NE
| E
| SE
| S
| SW
| W
| NW
| C
deriving (Eq, Show, Ord, Generic)
deriving anyclass Hashable
data Grid a =
Grid { _graph :: G.Graph Coord Dir a
, _rows :: Row
, _cols :: Col
}
deriving (Show)
makeLenses ''Grid
instance Functor Grid where
fmap f = graph . mapped %~ f
instance Distributive Option where
distribute = distributeRep
instance Representable Option where
type Rep Option = Dir
index (Option nw n ne w c e sw s se) d =
case d of
NW -> nw
N -> n
NE -> ne
E -> e
SE -> se
S -> s
SW -> sw
W -> w
C -> c
tabulate f =
Option
(f NW) (f N) (f NE)
(f W ) (f C) (f E )
(f SW) (f S) (f SE)
data Option a =
Option
a a a
a a a
a a a
deriving (Show, Eq, Ord, Functor, Foldable, Traversable)
gridFromText :: T.Text -> Grid Char
gridFromText txt = labeledGrid
where
nodeChars :: M.Map Coord Char
nodeChars = (labelChars rows)
grid = mkDiagGraph numRows numCols
(numRows, numCols, rows) = rectangularize txt
labeledGrid :: Grid Char
labeledGrid = (\c -> (M.findWithDefault ' ' c nodeChars)) <$> grid
rectangularize :: T.Text -> (Row, Col, [T.Text])
rectangularize txt = (length lines', maxLength, padded)
where
lines' = T.lines txt
maxLength = maximum . fmap T.length $ lines'
padded = T.justifyLeft maxLength ' ' <$> lines'
mkDiagGraph :: Row -> Col -> Grid Coord
mkDiagGraph rows cols =
Grid { _graph = G.newGraph vertices' edges'
, _rows = rows
, _cols = cols
}
where
vertices' :: [(Coord, Coord)]
vertices' = fmap (id &&& id) slots
slots :: [Coord]
slots = [(r, c) | r <- [0..rows - 1], c <- [0..cols - 1]]
edges' :: [(Coord, Coord, Dir)]
edges' = do
(r, c) <- slots
(to', e) <- [ ((r - 1, c - 1), NW)
, ((r - 1, c ), N)
, ((r - 1, c + 1), NE)
, ((r , c + 1), E)
, ((r + 1, c + 1), SE)
, ((r + 1, c ), S)
, ((r + 1, c - 1), SW)
, ((r , c - 1), W)
]
return ((r, c), to', e)
( _ , ( nodemap , gr ) ) = NM.run G.empty $ do
for _ slots
for _ slots $ \(r , c ) - > do
addE SE ( r + 1 , c + 1 )
addE S ( r + 1 , c )
addE SW ( r + 1 , c - 1 )
addE ( r , c - 1 )
: : Row - > Col - > Grid Coord
rows cols =
, _ nodeMap =
( _ , ( nodemap , gr ) ) = NM.run G.empty $ do
for _ slots
for _ slots $ \(r , c ) - > do
addE S ( r + 1 , c )
addE ( r , c - 1 )
labelChars :: [T.Text] -> M.Map Coord Char
labelChars rows = M.fromList $ concat labeled
where
lines' :: [String]
lines' = T.unpack <$> rows
labeled :: [[(Coord, Char)]]
labeled = do
(row, trow) <- zip [0..] (zip [0..] <$> lines')
return $ reassoc row <$> trow
reassoc row (col, c) = ((row, col), c)
gridToText :: Grid Char -> T.Text
gridToText grid = T.pack . unlines $ do
r <- [0..(grid ^. rows) - 1]
return $ do
c <- [0..(grid ^. cols) - 1]
return $ grid ^?! graph . G.valueAtKey (r, c)
printOption :: Option Char -> T.Text
printOption o = T.pack $ unlines (fmap (R.index o) <$>
[ [NW, N, NE]
, [W, C, E ]
, [SW, S, SE]])
gridFilter :: (Eq a) => Option a -> Dir -> (Option a -> Bool)
gridFilter choice dir other =
R.index choice dir
== R.index other C
&& R.index other (flipDir dir)
== R.index choice C
flipDir :: Dir -> Dir
flipDir NW = SE
flipDir N = S
flipDir NE = SW
flipDir E = W
flipDir SE = NW
flipDir S = N
flipDir SW = NE
flipDir W = E
flipDir C = C
type Position = Option Char
collapseOption :: Option a -> a
collapseOption = flip R.index C
addMirrored :: SuperPos Position -> SuperPos Position
addMirrored (Unknown xs) = Unknown . NE.union xs . NE.map flipper $ xs
where
flipper (Option nw n ne
w c e
sw s se)
= Option ne n nw
e c w
se s sw
addMirrored x = x
collectSuperPositions :: Grid Char -> Maybe (SuperPos Position)
collectSuperPositions grid
= Unknown <$> NE.nonEmptySet allEdges
where
allEdges :: S.Set Position
allEdges =
grid ^. graph . G.edges . itraversed . withIndex . folding (uncurry buildOption) . to S.singleton
findEdge :: G.Vertex -> HM.HashMap Dir G.Vertex -> Dir -> Maybe G.Vertex
findEdge n _ C = Just n
findEdge _ m d = m ^? ix d
buildOption :: G.Vertex -> HM.HashMap Dir G.Vertex -> Maybe Position
buildOption n m = do
opts <- sequenceA $ tabulate (findEdge n m)
traverse (\n -> grid ^? graph . G.valueAt n ) ( opts)
generateGrid :: p -> Row -> Col -> Grid p
generateGrid positions rows cols = mkDiagGraph rows cols $> positions
showSuper :: G.Graph k e (SuperPos (Option Char)) -> G.Graph k e Char
showSuper = fmap force
where
force s | null s = 'X'
force (Unknown x) | length x == 1 = collapseOption $ NE.findMin x
| otherwise = ' '
force (Observed c) = collapseOption c
flatten :: G.Graph k e (Option p) -> G.Graph k e p
flatten = fmap collapseOption
showSuperPos :: SuperPos (Option Char) -> Char
showSuperPos (Observed o) = collapseOption o
showSuperPos (Unknown s) = collapseOption $ NE.findMin s
|
cffcc04bf2b4f66a2f0407c6248145a1066163506e61475054dd71d4b96146cc | eareese/htdp-exercises | 142-ill-sized.rkt | #lang htdp/bsl
(require 2htdp/image)
Exercise 142 . Design ill - sized ? . The function consumes a list of and a positive number n. It produces the first image on that is not an n by n square ; if it can not find such an image , it produces # false .
;; Hint Use
;; ; ImageOrFalse is one of:
;; ; – Image
;; ; – #false
;; for the result part of the signature.
A List - of - images is one of :
; - '()
- ( cons Image List - of - images )
; ImageOrFalse is one of:
; – Image
; – #false
; List-of-images PositiveNumber -> ImageOrFalse
; consumes a List-of-images loi and a positive number n, then it
produces the first image on that is NOT an n x n square .
; If such an image is not found, it produces #false.
(define SQ25 (rectangle 25 25 "solid" "blue"))
(define SQ9 (rectangle 9 9 "solid" "blue"))
(define R5 (rectangle 5 25 "solid" "blue"))
(check-expect (ill-sized?
(cons SQ25 (cons SQ9 (cons R5 '())))
25)
SQ9)
(check-expect (ill-sized?
(cons R5 (cons SQ9 '())) 9) R5)
(check-expect (ill-sized?
(cons SQ9 '()) 9) #f)
(define (ill-sized? loi n)
(cond [(empty? loi) #f]
[(cons? loi)
(if (and
(not (eq? (image-width (first loi)) n))
(not (eq? (image-height (first loi)) n)))
(first loi)
(ill-sized? (rest loi) n))]))
| null | https://raw.githubusercontent.com/eareese/htdp-exercises/a85ff3111d459dda0e94d9b463d01a09accbf9bf/part02-arbitrarily-large-data/142-ill-sized.rkt | racket | if it can not find such an image , it produces # false .
Hint Use
; ImageOrFalse is one of:
; – Image
; – #false
for the result part of the signature.
- '()
ImageOrFalse is one of:
– Image
– #false
List-of-images PositiveNumber -> ImageOrFalse
consumes a List-of-images loi and a positive number n, then it
If such an image is not found, it produces #false. | #lang htdp/bsl
(require 2htdp/image)
A List - of - images is one of :
- ( cons Image List - of - images )
produces the first image on that is NOT an n x n square .
(define SQ25 (rectangle 25 25 "solid" "blue"))
(define SQ9 (rectangle 9 9 "solid" "blue"))
(define R5 (rectangle 5 25 "solid" "blue"))
(check-expect (ill-sized?
(cons SQ25 (cons SQ9 (cons R5 '())))
25)
SQ9)
(check-expect (ill-sized?
(cons R5 (cons SQ9 '())) 9) R5)
(check-expect (ill-sized?
(cons SQ9 '()) 9) #f)
(define (ill-sized? loi n)
(cond [(empty? loi) #f]
[(cons? loi)
(if (and
(not (eq? (image-width (first loi)) n))
(not (eq? (image-height (first loi)) n)))
(first loi)
(ill-sized? (rest loi) n))]))
|
9f0f7b7afe1f4023af3ed6f73b5c0e11830349a118a90c826f8ff2cf2aaed7cc | tsloughter/kuberl | kuberl_v1beta1_self_subject_rules_review_spec.erl | -module(kuberl_v1beta1_self_subject_rules_review_spec).
-export([encode/1]).
-export_type([kuberl_v1beta1_self_subject_rules_review_spec/0]).
-type kuberl_v1beta1_self_subject_rules_review_spec() ::
#{ 'namespace' => binary()
}.
encode(#{ 'namespace' := Namespace
}) ->
#{ 'namespace' => Namespace
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_v1beta1_self_subject_rules_review_spec.erl | erlang | -module(kuberl_v1beta1_self_subject_rules_review_spec).
-export([encode/1]).
-export_type([kuberl_v1beta1_self_subject_rules_review_spec/0]).
-type kuberl_v1beta1_self_subject_rules_review_spec() ::
#{ 'namespace' => binary()
}.
encode(#{ 'namespace' := Namespace
}) ->
#{ 'namespace' => Namespace
}.
|
|
7e1ee049fd5f9c233ddec373f1167d312b7346600759ca33937d28040e26df5f | racket/gui | info.rkt | #lang info
(define collection 'multi)
(define deps '("base" "string-constants-lib"))
(define build-deps '("racket-index"
"scheme-lib"
"draw-lib"
"racket-test"
"sgl"
"snip-lib"
"wxme-lib"
"gui-lib"
"syntax-color-lib"
"rackunit-lib"
"pconvert-lib"
"compatibility-lib"
"sandbox-lib"
"simple-tree-text-markup-lib"
"pict-lib"
"pict-snip-lib"))
(define update-implies '("gui-lib"))
(define pkg-desc "tests for \"gui\"")
(define pkg-authors '(mflatt robby))
(define license
'(Apache-2.0 OR MIT))
| null | https://raw.githubusercontent.com/racket/gui/0e9e69028bfb63fb6b968b4e100e231c54273d86/gui-test/info.rkt | racket | #lang info
(define collection 'multi)
(define deps '("base" "string-constants-lib"))
(define build-deps '("racket-index"
"scheme-lib"
"draw-lib"
"racket-test"
"sgl"
"snip-lib"
"wxme-lib"
"gui-lib"
"syntax-color-lib"
"rackunit-lib"
"pconvert-lib"
"compatibility-lib"
"sandbox-lib"
"simple-tree-text-markup-lib"
"pict-lib"
"pict-snip-lib"))
(define update-implies '("gui-lib"))
(define pkg-desc "tests for \"gui\"")
(define pkg-authors '(mflatt robby))
(define license
'(Apache-2.0 OR MIT))
|
|
5e0fef35baea1ab989c3a6a9f164f14eb9c5a598f24662a643788df9aa23aa39 | MyDataFlow/ttalk-server | mod_admin_extra_srg.erl | %%%-------------------------------------------------------------------
%%% File : mod_admin_extra_srg.erl
Author : > , < >
%%% Purpose : Contributed administrative functions and commands
Created : 10 Aug 2008 by >
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2008 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License
%%% along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
%%%
%%%-------------------------------------------------------------------
-module(mod_admin_extra_srg).
-author('').
-export([
commands/0,
srg_create/5,
srg_delete/2,
srg_list/1,
srg_get_info/2,
srg_get_members/2,
srg_user_add/4,
srg_user_del/4
]).
-include("ejabberd.hrl").
-include("ejabberd_commands.hrl").
-include("mod_roster.hrl").
-include("jlib.hrl").
-include_lib("exml/include/exml.hrl").
%%%
%%% Register commands
%%%
-spec commands() -> [ejabberd_commands:cmd(),...].
commands() ->
[
#ejabberd_commands{name = srg_create, tags = [shared_roster_group],
desc = "Create a Shared Roster Group",
longdesc = "If you want to specify several group "
"identifiers in the Display argument,\n"
"put \\ \" around the argument and\nseparate the "
"identifiers with \\ \\ n\n"
"For example:\n"
" mongooseimctl srg_create group3 localhost "
"name desc \\\"group1\\\\ngroup2\\\"",
module = ?MODULE, function = srg_create,
args = [{group, binary}, {host, binary},
{name, binary}, {description, binary}, {display, binary}],
result = {res, rescode}},
#ejabberd_commands{name = srg_delete, tags = [shared_roster_group],
desc = "Delete a Shared Roster Group",
module = ?MODULE, function = srg_delete,
args = [{group, binary}, {host, binary}],
result = {res, rescode}},
#ejabberd_commands{name = srg_list, tags = [shared_roster_group],
desc = "List the Shared Roster Groups in Host",
module = ?MODULE, function = srg_list,
args = [{host, binary}],
result = {groups, {list, {id, string}}}},
#ejabberd_commands{name = srg_get_info, tags = [shared_roster_group],
desc = "Get info of a Shared Roster Group",
module = ?MODULE, function = srg_get_info,
args = [{group, binary}, {host, binary}],
result = {informations, {list, {information, {tuple, [{key, string}, {value, string}]}}}}},
#ejabberd_commands{name = srg_get_members, tags = [shared_roster_group],
desc = "Get members of a Shared Roster Group",
module = ?MODULE, function = srg_get_members,
args = [{group, binary}, {host, binary}],
result = {members, {list, {member, string}}}},
#ejabberd_commands{name = srg_user_add, tags = [shared_roster_group],
desc = "Add the JID user@host to the Shared Roster Group",
module = ?MODULE, function = srg_user_add,
args = [{user, binary}, {host, binary}, {group, binary}, {grouphost, binary}],
result = {res, rescode}},
#ejabberd_commands{name = srg_user_del, tags = [shared_roster_group],
desc = "Delete this JID user@host from the Shared Roster Group",
module = ?MODULE, function = srg_user_del,
args = [{user, binary}, {host, binary}, {group, binary}, {grouphost, binary}],
result = {res, rescode}}
].
%%%
Shared Roster Groups
%%%
-type group() :: binary().
-spec srg_create(group(), ejabberd:server(), ejabberd:user(),
Description :: binary(), Display :: binary() | []) -> 'ok'.
srg_create(Group, Host, Name, Description, Display) ->
DisplayList = case Display of
[] -> [];
_ -> binary:split(Display, <<"\\\\n">>)
end,
Opts = [{name, Name},
{displayed_groups, DisplayList},
{description, Description}],
{atomic, ok} = mod_shared_roster:create_group(Host, Group, Opts),
ok.
-spec srg_delete(group(), ejabberd:server()) -> 'ok'.
srg_delete(Group, Host) ->
{atomic, ok} = mod_shared_roster:delete_group(Host, Group),
ok.
-spec srg_list(ejabberd:server()) -> [group()].
srg_list(Host) ->
lists:sort(mod_shared_roster:list_groups(Host)).
-spec srg_get_info(group(), ejabberd:server()) -> [{string(), string()}].
srg_get_info(Group, Host) ->
Opts = mod_shared_roster:get_group_opts(Host,Group),
[{io_lib:format("~p", [Title]),
io_lib:format("~p", [Value])} || {Title, Value} <- Opts].
-spec srg_get_members(group(), ejabberd:server()) -> [binary()].
srg_get_members(Group, Host) ->
Members = mod_shared_roster:get_group_explicit_users(Host,Group),
[jid:to_binary(jid:make(MUser, MServer, <<"">>))
|| {MUser, MServer} <- Members].
-spec srg_user_add(ejabberd:user(), ejabberd:server(),
group(), GroupHost :: ejabberd:server()) -> 'ok'.
srg_user_add(User, Host, Group, GroupHost) ->
{atomic, ok} = mod_shared_roster:add_user_to_group(GroupHost, {User, Host}, Group),
ok.
-spec srg_user_del(ejabberd:user(), ejabberd:server(),
group(), GroupHost :: ejabberd:server()) -> 'ok'.
srg_user_del(User, Host, Group, GroupHost) ->
{atomic, ok} = mod_shared_roster:remove_user_from_group(GroupHost, {User, Host}, Group),
ok.
| null | https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/apps/ejabberd/src/mod_admin_extra_srg.erl | erlang | -------------------------------------------------------------------
File : mod_admin_extra_srg.erl
Purpose : Contributed administrative functions and commands
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with this program; if not, write to the Free Software
-------------------------------------------------------------------
Register commands
| Author : > , < >
Created : 10 Aug 2008 by >
ejabberd , Copyright ( C ) 2002 - 2008 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
-module(mod_admin_extra_srg).
-author('').
-export([
commands/0,
srg_create/5,
srg_delete/2,
srg_list/1,
srg_get_info/2,
srg_get_members/2,
srg_user_add/4,
srg_user_del/4
]).
-include("ejabberd.hrl").
-include("ejabberd_commands.hrl").
-include("mod_roster.hrl").
-include("jlib.hrl").
-include_lib("exml/include/exml.hrl").
-spec commands() -> [ejabberd_commands:cmd(),...].
commands() ->
[
#ejabberd_commands{name = srg_create, tags = [shared_roster_group],
desc = "Create a Shared Roster Group",
longdesc = "If you want to specify several group "
"identifiers in the Display argument,\n"
"put \\ \" around the argument and\nseparate the "
"identifiers with \\ \\ n\n"
"For example:\n"
" mongooseimctl srg_create group3 localhost "
"name desc \\\"group1\\\\ngroup2\\\"",
module = ?MODULE, function = srg_create,
args = [{group, binary}, {host, binary},
{name, binary}, {description, binary}, {display, binary}],
result = {res, rescode}},
#ejabberd_commands{name = srg_delete, tags = [shared_roster_group],
desc = "Delete a Shared Roster Group",
module = ?MODULE, function = srg_delete,
args = [{group, binary}, {host, binary}],
result = {res, rescode}},
#ejabberd_commands{name = srg_list, tags = [shared_roster_group],
desc = "List the Shared Roster Groups in Host",
module = ?MODULE, function = srg_list,
args = [{host, binary}],
result = {groups, {list, {id, string}}}},
#ejabberd_commands{name = srg_get_info, tags = [shared_roster_group],
desc = "Get info of a Shared Roster Group",
module = ?MODULE, function = srg_get_info,
args = [{group, binary}, {host, binary}],
result = {informations, {list, {information, {tuple, [{key, string}, {value, string}]}}}}},
#ejabberd_commands{name = srg_get_members, tags = [shared_roster_group],
desc = "Get members of a Shared Roster Group",
module = ?MODULE, function = srg_get_members,
args = [{group, binary}, {host, binary}],
result = {members, {list, {member, string}}}},
#ejabberd_commands{name = srg_user_add, tags = [shared_roster_group],
desc = "Add the JID user@host to the Shared Roster Group",
module = ?MODULE, function = srg_user_add,
args = [{user, binary}, {host, binary}, {group, binary}, {grouphost, binary}],
result = {res, rescode}},
#ejabberd_commands{name = srg_user_del, tags = [shared_roster_group],
desc = "Delete this JID user@host from the Shared Roster Group",
module = ?MODULE, function = srg_user_del,
args = [{user, binary}, {host, binary}, {group, binary}, {grouphost, binary}],
result = {res, rescode}}
].
Shared Roster Groups
-type group() :: binary().
-spec srg_create(group(), ejabberd:server(), ejabberd:user(),
Description :: binary(), Display :: binary() | []) -> 'ok'.
srg_create(Group, Host, Name, Description, Display) ->
DisplayList = case Display of
[] -> [];
_ -> binary:split(Display, <<"\\\\n">>)
end,
Opts = [{name, Name},
{displayed_groups, DisplayList},
{description, Description}],
{atomic, ok} = mod_shared_roster:create_group(Host, Group, Opts),
ok.
-spec srg_delete(group(), ejabberd:server()) -> 'ok'.
srg_delete(Group, Host) ->
{atomic, ok} = mod_shared_roster:delete_group(Host, Group),
ok.
-spec srg_list(ejabberd:server()) -> [group()].
srg_list(Host) ->
lists:sort(mod_shared_roster:list_groups(Host)).
-spec srg_get_info(group(), ejabberd:server()) -> [{string(), string()}].
srg_get_info(Group, Host) ->
Opts = mod_shared_roster:get_group_opts(Host,Group),
[{io_lib:format("~p", [Title]),
io_lib:format("~p", [Value])} || {Title, Value} <- Opts].
-spec srg_get_members(group(), ejabberd:server()) -> [binary()].
srg_get_members(Group, Host) ->
Members = mod_shared_roster:get_group_explicit_users(Host,Group),
[jid:to_binary(jid:make(MUser, MServer, <<"">>))
|| {MUser, MServer} <- Members].
-spec srg_user_add(ejabberd:user(), ejabberd:server(),
group(), GroupHost :: ejabberd:server()) -> 'ok'.
srg_user_add(User, Host, Group, GroupHost) ->
{atomic, ok} = mod_shared_roster:add_user_to_group(GroupHost, {User, Host}, Group),
ok.
-spec srg_user_del(ejabberd:user(), ejabberd:server(),
group(), GroupHost :: ejabberd:server()) -> 'ok'.
srg_user_del(User, Host, Group, GroupHost) ->
{atomic, ok} = mod_shared_roster:remove_user_from_group(GroupHost, {User, Host}, Group),
ok.
|
f29690cedce1bac8e1bbc2116a3f4606e755279f386b6fc47f75a05926370efe | tonsky/down-the-rabbit-hole | rabbit.clj | (ns down-the-rabbit-hole.rabbit
(:require
[down-the-rabbit-hole.core :as core])
(:import
[org.jetbrains.skija Canvas Point Rect]))
(defmethod core/render :renderer/rabbit [canvas db game entity]
(let [{:game/keys [now]} game
[x y] (cond
(core/in-phase? game :phase/player-enter) [276 -128]
(core/in-phase? game :phase/enemy-enter) [276 (core/lerp-phase game -128 48)]
(core/in-phase? game :phase/players-separate) [(core/lerp-phase game 276 326) 48]
:else [326 48])
sprite (-> (:game/now game) (/ 100) (mod 5) long)
dy (core/oscillation (:game/now game) 0 3000 10)
{:keys [center]} entity]
(core/draw-sprite canvas (* sprite 32) 128 32 64 x (+ y dy))
(when (core/in-phase? game :phase/items-enter :phase/player-turn :phase/player-items-leave :phase/enemy-turn :phase/enemy-items-leave)
(core/draw-text-centered canvas
(str "♥" (:health entity) " ⚡" (:energy entity))
(+ x 16) 140))))
(defn rabbit-tx []
[{:role :role/enemy
:renderer :renderer/rabbit
:z-index 200
: center ( Point . 292 -64 )
: ( Rect / makeXYWH 260 32 64 96 )
; :hoverable true
:health 10
:energy 3}]) | null | https://raw.githubusercontent.com/tonsky/down-the-rabbit-hole/cca89725c14f3a316f6a8bf44fe3a1ee2b04617c/src/down_the_rabbit_hole/rabbit.clj | clojure | :hoverable true | (ns down-the-rabbit-hole.rabbit
(:require
[down-the-rabbit-hole.core :as core])
(:import
[org.jetbrains.skija Canvas Point Rect]))
(defmethod core/render :renderer/rabbit [canvas db game entity]
(let [{:game/keys [now]} game
[x y] (cond
(core/in-phase? game :phase/player-enter) [276 -128]
(core/in-phase? game :phase/enemy-enter) [276 (core/lerp-phase game -128 48)]
(core/in-phase? game :phase/players-separate) [(core/lerp-phase game 276 326) 48]
:else [326 48])
sprite (-> (:game/now game) (/ 100) (mod 5) long)
dy (core/oscillation (:game/now game) 0 3000 10)
{:keys [center]} entity]
(core/draw-sprite canvas (* sprite 32) 128 32 64 x (+ y dy))
(when (core/in-phase? game :phase/items-enter :phase/player-turn :phase/player-items-leave :phase/enemy-turn :phase/enemy-items-leave)
(core/draw-text-centered canvas
(str "♥" (:health entity) " ⚡" (:energy entity))
(+ x 16) 140))))
(defn rabbit-tx []
[{:role :role/enemy
:renderer :renderer/rabbit
:z-index 200
: center ( Point . 292 -64 )
: ( Rect / makeXYWH 260 32 64 96 )
:health 10
:energy 3}]) |
75dd340eb18cc7ef734473ec098a9a1c3bf5b019ae53ebaf92d9ec3f9f1f8ce6 | reactiveml/rml | global.ml | (**********************************************************************)
(* *)
(* ReactiveML *)
(* *)
(* *)
(* *)
(* Louis Mandel *)
(* *)
Copyright 2002 , 2007 . All rights reserved .
(* This file is distributed under the terms of the Q Public License *)
version 1.0 .
(* *)
(* ReactiveML has been done in the following labs: *)
- theme SPI , Laboratoire d'Informatique de Paris 6 ( 2002 - 2005 )
- Verimag , CNRS Grenoble ( 2005 - 2006 )
- projet , ( 2006 - 2007 )
(* *)
(**********************************************************************)
(* file: global.ml *)
(* Warning: *)
(* This file is based on the original version of globals.ml *)
from the CamlLight 0.75 distribution , INRIA
first modification : 2004 - 04 - 23
modified by :
$ Id$
open Rml_misc
(* values in the symbol table *)
type 'a global =
{ mutable gi: Global_ident.qualified_ident;
mutable info: 'a option }
let little_name_of_global g = Global_ident.little_name g.gi
let no_info() = None
let gi gl = gl.gi
let info gl =
match gl.info with
| None -> assert false
| Some i -> i
| null | https://raw.githubusercontent.com/reactiveml/rml/d3ac141bd9c6e3333b678716166d988ce04b5c80/compiler/global/global.ml | ocaml | ********************************************************************
ReactiveML
Louis Mandel
This file is distributed under the terms of the Q Public License
ReactiveML has been done in the following labs:
********************************************************************
file: global.ml
Warning:
This file is based on the original version of globals.ml
values in the symbol table | Copyright 2002 , 2007 . All rights reserved .
version 1.0 .
- theme SPI , Laboratoire d'Informatique de Paris 6 ( 2002 - 2005 )
- Verimag , CNRS Grenoble ( 2005 - 2006 )
- projet , ( 2006 - 2007 )
from the CamlLight 0.75 distribution , INRIA
first modification : 2004 - 04 - 23
modified by :
$ Id$
open Rml_misc
type 'a global =
{ mutable gi: Global_ident.qualified_ident;
mutable info: 'a option }
let little_name_of_global g = Global_ident.little_name g.gi
let no_info() = None
let gi gl = gl.gi
let info gl =
match gl.info with
| None -> assert false
| Some i -> i
|
d7ebe081b1b24d0f49120c530cd4b1e1a0e9a045190f698a647a639496c51f04 | oubiwann/star-traders | player_test.clj | (ns starlanes.player-test
(:require [clojure.test :refer :all]
[starlanes.game.base :as game]
[starlanes.player :as player]
[starlanes.util :as util]))
(deftest test-player-data-factory
(let [player (player/player-data-factory)]
(is (= (player :name) ""))
(is (= (player :cash) 0.0))
(is (= (player :stocks) nil)))) | null | https://raw.githubusercontent.com/oubiwann/star-traders/8d7b85ce9ad6446f3b766cced08c120787a82352/clojure/test/starlanes/player_test.clj | clojure | (ns starlanes.player-test
(:require [clojure.test :refer :all]
[starlanes.game.base :as game]
[starlanes.player :as player]
[starlanes.util :as util]))
(deftest test-player-data-factory
(let [player (player/player-data-factory)]
(is (= (player :name) ""))
(is (= (player :cash) 0.0))
(is (= (player :stocks) nil)))) |
|
2eb9b5c1a4aaf9cc61ba58821c9a83d5b4b21c03221f4feedbabe64cb677a3f3 | maximedenes/native-coq | wg_Find.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
class finder : GText.view ->
object
method coerce : GObj.widget
method hide : unit -> unit
method show_find : unit -> unit
method show_replace : unit -> unit
method replace : unit -> unit
method replace_all : unit -> unit
method find_backward : unit -> unit
method find_forward : unit -> unit
end
| null | https://raw.githubusercontent.com/maximedenes/native-coq/3623a4d9fe95c165f02f7119c0e6564a83a9f4c9/ide/wg_Find.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
class finder : GText.view ->
object
method coerce : GObj.widget
method hide : unit -> unit
method show_find : unit -> unit
method show_replace : unit -> unit
method replace : unit -> unit
method replace_all : unit -> unit
method find_backward : unit -> unit
method find_forward : unit -> unit
end
|
32584f6e1fa7d5293246e2764e63baa63be81d20f2520ebeb8eaa7d3fe10b1c8 | kraison/vivace-graph | auxfns.lisp | ;;; -*- Mode: Lisp; Syntax: Common-Lisp -*-
Code from Paradigms of AI Programming
Copyright ( c ) 1991
;;; File auxfns.lisp: Auxiliary functions used by all other programs
;;; Load this file before running any other programs.
;;;; Implementation-Specific Details
(eval-when (eval compile load)
;; Make it ok to place a function definition on a built-in LISP symbol.
#+(or Allegro EXCL)
(dolist (pkg '(excl common-lisp common-lisp-user))
(setf (excl:package-definition-lock (find-package pkg)) nil))
;; Don't warn if a function is defined in multiple files --
;; this happens often since we refine several programs.
#+Lispworks
(setq *PACKAGES-FOR-WARN-ON-REDEFINITION* nil)
#+LCL
(compiler-options :warnings nil)
)
;;;; REQUIRES
;;; The function REQUIRES is used in subsequent files to state dependencies
;;; between files. The current definition just loads the required files,
;;; assumming they match the pathname specified in *PAIP-DIRECTORY*.
;;; You should change that to match where you have stored the files.
;;; A more sophisticated REQUIRES would only load it if it has not yet
;;; been loaded, and would search in different directories if needed.
(defun requires (&rest files)
"The arguments are files that are required to run an application."
(mapc #'load-paip-file files))
(defvar *paip-files*
`("auxfns" "tutor" "examples"
"intro" "simple" "overview" "gps1" "gps" "eliza1" "eliza" "patmatch"
"eliza-pm" "search" "gps-srch" "student" "macsyma" "macsymar" "unify"
"prolog1" "prolog" "prologc1" "prologc2" "prologc" "prologcp"
"clos" "krep1" "krep2" "krep" "cmacsyma" "mycin" "mycin-r" "waltz"
"othello" "othello2" "syntax1" "syntax2" "syntax3" "unifgram"
"grammar" "lexicon" "interp1" "interp2" "interp3"
"compile1" "compile2" "compile3" "compopt"))
(defparameter *paip-directory*
(make-pathname :name nil :type nil
:defaults (or (and (boundp '*load-truename*) *load-truename*)
(truename ""))) ;;??? Maybe Change this
"The location of the source files for this book. If things don't work,
change it to reflect the location of the files on your computer.")
(defparameter *paip-source*
(make-pathname :name nil :type "lisp" ;;??? Maybe Change this
:defaults *paip-directory*))
(defparameter *paip-binary*
(make-pathname
:name nil
:type (first (list #+LCL (first *load-binary-pathname-types*)
#+Lispworks system::*binary-file-type*
#+MCL "fasl"
#+Allegro excl:*fasl-default-type*
#+(or AKCL KCL) "o"
#+CMU "sparcf"
#+CLISP "fas"
"bin")) ;;??? Maybe Change this
:directory (append (pathname-directory *paip-source*) '("bin"))
:defaults *paip-directory*))
(defun paip-pathname (name &optional (type :lisp))
(make-pathname :name name
:defaults (ecase type
((:lisp :source) *paip-source*)
((:binary :bin) *paip-binary*))))
(defun compile-all-paip-files ()
(mapc #'compile-paip-file *paip-files*))
(defun compile-paip-file (name)
(let ((path (paip-pathname name :lisp)))
(load path)
(compile-file path :output-file (paip-pathname name :binary))))
(defun load-paip-file (file)
"Load the binary file if it exists and is newer, else load the source."
(let* ((src (paip-pathname file :lisp))
(src-date (file-write-date src))
(bin (paip-pathname file :binary))
(bin-date (file-write-date bin)))
(load (if (and (probe-file bin) src-date bin-date (>= bin-date src-date))
bin
src))))
Macros ( formerly in auxmacs.lisp : that file no longer needed )
(eval-when (load eval compile)
(defmacro once-only (variables &rest body)
"Returns the code built by BODY. If any of VARIABLES
might have side effects, they are evaluated once and stored
in temporary variables that are then passed to BODY."
(assert (every #'symbolp variables))
(let ((temps nil))
(dotimes (i (length variables)) (push (gensym) temps))
`(if (every #'side-effect-free? (list .,variables))
(progn .,body)
(list 'let
,`(list ,@(mapcar #'(lambda (tmp var)
`(list ',tmp ,var))
temps variables))
(let ,(mapcar #'(lambda (var tmp) `(,var ',tmp))
variables temps)
.,body)))))
(defun side-effect-free? (exp)
"Is exp a constant, variable, or function,
or of the form (THE type x) where x is side-effect-free?"
(or (atom exp) (constantp exp)
(starts-with exp 'function)
(and (starts-with exp 'the)
(side-effect-free? (third exp)))))
(defmacro funcall-if (fn arg)
(once-only (fn)
`(if ,fn (funcall ,fn ,arg) ,arg)))
(defmacro read-time-case (first-case &rest other-cases)
"Do the first case, where normally cases are
specified with #+ or possibly #- marks."
(declare (ignore other-cases))
first-case)
(defun rest2 (x)
"The rest of a list after the first TWO elements."
(rest (rest x)))
(defun find-anywhere (item tree)
"Does item occur anywhere in tree?"
(if (atom tree)
(if (eql item tree) tree)
(or (find-anywhere item (first tree))
(find-anywhere item (rest tree)))))
(defun starts-with (list x)
"Is x a list whose first element is x?"
(and (consp list) (eql (first list) x)))
)
;;;; Auxiliary Functions
(setf (symbol-function 'find-all-if) #'remove-if-not)
(defun find-all (item sequence &rest keyword-args
&key (test #'eql) test-not &allow-other-keys)
"Find all those elements of sequence that match item,
according to the keywords. Doesn't alter sequence."
(if test-not
(apply #'remove item sequence
:test-not (complement test-not) keyword-args)
(apply #'remove item sequence
:test (complement test) keyword-args)))
(defun partition-if (pred list)
"Return 2 values: elements of list that satisfy pred,
and elements that don't."
(let ((yes-list nil)
(no-list nil))
(dolist (item list)
(if (funcall pred item)
(push item yes-list)
(push item no-list)))
(values (nreverse yes-list) (nreverse no-list))))
(defun maybe-add (op exps &optional if-nil)
"For example, (maybe-add 'and exps t) returns
t if exps is nil, exps if there is only one,
and (and exp1 exp2...) if there are several exps."
(cond ((null exps) if-nil)
((length=1 exps) (first exps))
(t (cons op exps))))
;;; ==============================
(defun seq-ref (seq index)
"Return code that indexes into a sequence, using
the pop-lists/aref-vectors strategy."
`(if (listp ,seq)
(prog1 (first ,seq)
(setq ,seq (the list (rest ,seq))))
(aref ,seq ,index)))
(defun maybe-set-fill-pointer (array new-length)
"If this is an array with a fill pointer, set it to
new-length, if that is longer than the current length."
(if (and (arrayp array)
(array-has-fill-pointer-p array))
(setf (fill-pointer array)
(max (fill-pointer array) new-length))))
;;; ==============================
;;; NOTE: In ANSI Common Lisp, the effects of adding a definition (or most
;;; anything else) to a symbol in the common-lisp package is undefined.
;;; Therefore, it would be best to rename the function SYMBOL to something
;;; else. This has not been done (for compatibility with the book).
(defun mk-symbol (&rest args)
"Concatenate symbols or strings to form an interned symbol"
(intern (format nil "~{~a~}" args)))
(defun new-symbol (&rest args)
"Concatenate symbols or strings to form an uninterned symbol"
(make-symbol (format nil "~{~a~}" args)))
(defun last1 (list)
"Return the last element (not last cons cell) of list"
(first (last list)))
;;; ==============================
(defun mappend (fn list)
"Append the results of calling fn on each element of list.
Like mapcon, but uses append instead of nconc."
(apply #'append (mapcar fn list)))
(defun mklist (x)
"If x is a list return it, otherwise return the list of x"
(if (listp x) x (list x)))
(defun flatten (exp)
"Get rid of imbedded lists (to one level only)."
(mappend #'mklist exp))
(defun random-elt (seq)
"Pick a random element out of a sequence."
(elt seq (random (length seq))))
;;; ==============================
(defun member-equal (item list)
(member item list :test #'equal))
;;; ==============================
(defun compose (&rest functions)
#'(lambda (x)
(reduce #'funcall functions :from-end t :initial-value x)))
The Debugging Output Facility :
(defvar *dbg-ids* nil "Identifiers used by dbg")
(defun dbg (id format-string &rest args)
"Print debugging info if (DEBUG ID) has been specified."
(when (member id *dbg-ids*)
(fresh-line *debug-io*)
(apply #'format *debug-io* format-string args)))
(defun my-debug (&rest ids)
"Start dbg output on the given ids."
(setf *dbg-ids* (union ids *dbg-ids*)))
(defun undebug (&rest ids)
"Stop dbg on the ids. With no ids, stop dbg altogether."
(setf *dbg-ids* (if (null ids) nil
(set-difference *dbg-ids* ids))))
;;; ==============================
(defun dbg-indent (id indent format-string &rest args)
"Print indented debugging info if (DEBUG ID) has been specified."
(when (member id *dbg-ids*)
(fresh-line *debug-io*)
(dotimes (i indent) (princ " " *debug-io*))
(apply #'format *debug-io* format-string args)))
;;;; PATTERN MATCHING FACILITY
(defconstant fail nil)
(defconstant no-bindings '((t . t)))
(defun pat-match (pattern input &optional (bindings no-bindings))
"Match pattern against input in the context of the bindings"
(cond ((eq bindings fail) fail)
((variable-p pattern) (match-variable pattern input bindings))
((eql pattern input) bindings)
((and (consp pattern) (consp input))
(pat-match (rest pattern) (rest input)
(pat-match (first pattern) (first input) bindings)))
(t fail)))
(defun match-variable (var input bindings)
"Does VAR match input? Uses (or updates) and returns bindings."
(let ((binding (get-binding var bindings)))
(cond ((not binding) (extend-bindings var input bindings))
((equal input (binding-val binding)) bindings)
(t fail))))
(defun make-binding (var val) (cons var val))
(defun binding-var (binding)
"Get the variable part of a single binding."
(car binding))
(defun binding-val (binding)
"Get the value part of a single binding."
(cdr binding))
(defun get-binding (var bindings)
"Find a (variable . value) pair in a binding list."
(assoc var bindings))
(defun lookup (var bindings)
"Get the value part (for var) from a binding list."
(binding-val (get-binding var bindings)))
(defun extend-bindings (var val bindings)
"Add a (var . value) pair to a binding list."
(cons (cons var val)
;; Once we add a "real" binding,
;; we can get rid of the dummy no-bindings
(if (eq bindings no-bindings)
nil
bindings)))
(defun variable-p (x)
"Is x a variable (a symbol beginning with `?')?"
(and (symbolp x) (equal (elt (symbol-name x) 0) #\?)))
;;; ==============================
The Memoization facility :
(defmacro defun-memo (fn args &body body)
"Define a memoized function."
`(memoize (defun ,fn ,args . ,body)))
(defun memo (fn &key (key #'first) (test #'eql) name)
"Return a memo-function of fn."
(let ((table (make-hash-table :test test)))
(setf (get name 'memo) table)
#'(lambda (&rest args)
(let ((k (funcall key args)))
(multiple-value-bind (val found-p)
(gethash k table)
(if found-p val
(setf (gethash k table) (apply fn args))))))))
(defun memoize (fn-name &key (key #'first) (test #'eql))
"Replace fn-name's global definition with a memoized version."
(clear-memoize fn-name)
(setf (symbol-function fn-name)
(memo (symbol-function fn-name)
:name fn-name :key key :test test)))
(defun clear-memoize (fn-name)
"Clear the hash table from a memo function."
(let ((table (get fn-name 'memo)))
(when table (clrhash table))))
;;;; Delayed computation:
(defstruct delay value (computed? nil))
(defmacro delay (&rest body)
"A computation that can be executed later by FORCE."
`(make-delay :value #'(lambda () . ,body)))
(defun force (delay)
"Do a delayed computation, or fetch its previously-computed value."
(if (delay-computed? delay)
(delay-value delay)
(prog1 (setf (delay-value delay) (funcall (delay-value delay)))
(setf (delay-computed? delay) t))))
Defresource :
(defmacro defresource (name &key constructor (initial-copies 0)
(size (max initial-copies 10)))
(let ((resource (mk-symbol '* (mk-symbol name '-resource*)))
(deallocate (mk-symbol 'deallocate- name))
(allocate (mk-symbol 'allocate- name)))
`(progn
(defparameter ,resource (make-array ,size :fill-pointer 0))
(defun ,allocate ()
"Get an element from the resource pool, or make one."
(if (= (fill-pointer ,resource) 0)
,constructor
(vector-pop ,resource)))
(defun ,deallocate (,name)
"Place a no-longer-needed element back in the pool."
(vector-push-extend ,name ,resource))
,(if (> initial-copies 0)
`(mapc #',deallocate (loop repeat ,initial-copies
collect (,allocate))))
',name)))
(defmacro with-resource ((var resource &optional protect) &rest body)
"Execute body with VAR bound to an instance of RESOURCE."
(let ((allocate (mk-symbol 'allocate- resource))
(deallocate (mk-symbol 'deallocate- resource)))
(if protect
`(let ((,var nil))
(unwind-protect (progn (setf ,var (,allocate)) ,@body)
(unless (null ,var) (,deallocate ,var))))
`(let ((,var (,allocate)))
,@body
(,deallocate var)))))
Queues :
;;; A queue is a (last . contents) pair
(defun queue-contents (q) (cdr q))
(defun make-queue ()
"Build a new queue, with no elements."
(let ((q (cons nil nil)))
(setf (car q) q)))
(defun enqueue (item q)
"Insert item at the end of the queue."
(setf (car q)
(setf (rest (car q))
(cons item nil)))
q)
(defun dequeue (q)
"Remove an item from the front of the queue."
(pop (cdr q))
(if (null (cdr q)) (setf (car q) q))
q)
(defun front (q) (first (queue-contents q)))
(defun empty-queue-p (q) (null (queue-contents q)))
(defun queue-nconc (q list)
"Add the elements of LIST to the end of the queue."
(setf (car q)
(last (setf (rest (car q)) list))))
;;;; Other:
(defun sort* (seq pred &key key)
"Sort without altering the sequence"
(sort (copy-seq seq) pred :key key))
(defun reuse-cons (x y x-y)
"Return (cons x y), or reuse x-y if it is equal to (cons x y)"
(if (and (eql x (car x-y)) (eql y (cdr x-y)))
x-y
(cons x y)))
;;; ==============================
(defun length=1 (x)
"Is x a list of length 1?"
(and (consp x) (null (cdr x))))
(defun rest3 (list)
"The rest of a list after the first THREE elements."
(cdddr list))
;;; ==============================
(defun unique-find-if-anywhere (predicate tree
&optional found-so-far)
"Return a list of leaves of tree satisfying predicate,
with duplicates removed."
(if (atom tree)
(if (funcall predicate tree)
(adjoin tree found-so-far)
found-so-far)
(unique-find-if-anywhere
predicate
(first tree)
(unique-find-if-anywhere predicate (rest tree)
found-so-far))))
(defun find-if-anywhere (predicate tree)
"Does predicate apply to any atom in the tree?"
(if (atom tree)
(funcall predicate tree)
(or (find-if-anywhere predicate (first tree))
(find-if-anywhere predicate (rest tree)))))
;;; ==============================
(defmacro define-enumerated-type (type &rest elements)
"Represent an enumerated type with integers 0-n."
`(progn
(deftype ,type () '(integer 0 ,(- (length elements) 1)))
(defun ,(mk-symbol type '->symbol) (,type)
(elt ',elements ,type))
(defun ,(mk-symbol 'symbol-> type) (symbol)
(position symbol ',elements))
,@(loop for element in elements
for i from 0
collect `(defconstant ,element ,i))))
;;; ==============================
(defun not-null (x) (not (null x)))
(defun first-or-nil (x)
"The first element of x if it is a list; else nil."
(if (consp x) (first x) nil))
(defun first-or-self (x)
"The first element of x, if it is a list; else x itself."
(if (consp x) (first x) x))
;;; ==============================
;;;; CLtL2 and ANSI CL Compatibility
(unless (fboundp 'defmethod)
(defmacro defmethod (name args &rest body)
`(defun ',name ',args ,@body))
)
(unless (fboundp 'map-into)
(defun map-into (result-sequence function &rest sequences)
"Destructively set elements of RESULT-SEQUENCE to the results
of applying FUNCTION to respective elements of SEQUENCES."
(let ((arglist (make-list (length sequences)))
(n (if (listp result-sequence)
most-positive-fixnum
(array-dimension result-sequence 0))))
;; arglist is made into a list of args for each call
;; n is the length of the longest vector
(when sequences
(setf n (min n (loop for seq in sequences
minimize (length seq)))))
;; Define some shared functions:
(flet
((do-one-call (i)
(loop for seq on sequences
for arg on arglist
do (if (listp (first seq))
(setf (first arg)
(pop (first seq)))
(setf (first arg)
(aref (first seq) i))))
(apply function arglist))
(do-result (i)
(if (and (vectorp result-sequence)
(array-has-fill-pointer-p result-sequence))
(setf (fill-pointer result-sequence)
(max i (fill-pointer result-sequence))))))
(declare (inline do-one-call))
;; Decide if the result is a list or vector,
;; and loop through each element
(if (listp result-sequence)
(loop for i from 0 to (- n 1)
for r on result-sequence
do (setf (first r)
(do-one-call i))
finally (do-result i))
(loop for i from 0 to (- n 1)
do (setf (aref result-sequence i)
(do-one-call i))
finally (do-result i))))
result-sequence))
)
(unless (fboundp 'complement)
(defun complement (fn)
"If FN returns y, then (complement FN) returns (not y)."
#'(lambda (&rest args) (not (apply fn args))))
)
(unless (fboundp 'with-compilation-unit)
(defmacro with-compilation-unit (options &body body)
"Do the body, but delay compiler warnings until the end."
;; That way, undefined function warnings that are really
;; just forward references will not be printed at all.
This is defined in Common Lisp the Language , 2nd ed .
(declare (ignore options))
`(,(read-time-case
#+Lispm 'compiler:compiler-warnings-context-bind
#+Lucid 'with-deferred-warnings
'progn)
.,body))
)
;;;; Reduce
(when nil ;; Change this to T if you need REDUCE with :key keyword.
(defun reduce* (fn seq from-end start end key init init-p)
(funcall (if (listp seq) #'reduce-list #'reduce-vect)
fn seq from-end (or start 0) end key init init-p))
(defun reduce (function sequence &key from-end start end key
(initial-value nil initial-value-p))
(reduce* function sequence from-end start end
key initial-value initial-value-p))
(defun reduce-vect (fn seq from-end start end key init init-p)
(if (null end) (setf end (length seq)))
(assert (<= 0 start end (length seq)) (start end)
"Illegal subsequence of ~a --- :start ~d :end ~d"
seq start end)
(case (- end start)
(1 (if init-p
(funcall fn init (funcall-if key (aref seq start)))
(funcall-if key (aref seq start))))
(0 (if init-p init (funcall fn)))
(t (if (not from-end)
(let ((result
(if init-p
(funcall
fn init
(funcall-if key (aref seq start)))
(funcall
fn
(funcall-if key (aref seq start))
(funcall-if key (aref seq (+ start 1)))))))
(loop for i from (+ start (if init-p 1 2))
to (- end 1)
do (setf result
(funcall
fn result
(funcall-if key (aref seq i)))))
result)
(let ((result
(if init-p
(funcall
fn
(funcall-if key (aref seq (- end 1)))
init)
(funcall
fn
(funcall-if key (aref seq (- end 2)))
(funcall-if key (aref seq (- end 1)))))))
(loop for i from (- end (if init-p 2 3)) downto start
do (setf result
(funcall
fn
(funcall-if key (aref seq i))
result)))
result)))))
(defun reduce-list (fn seq from-end start end key init init-p)
(if (null end) (setf end (length seq)))
(cond ((> start 0)
(reduce-list fn (nthcdr start seq) from-end 0
(- end start) key init init-p))
((or (null seq) (eql start end))
(if init-p init (funcall fn)))
((= (- end start) 1)
(if init-p
(funcall fn init (funcall-if key (first seq)))
(funcall-if key (first seq))))
(from-end
(reduce-vect fn (coerce seq 'vector) t start end
key init init-p))
((null (rest seq))
(if init-p
(funcall fn init (funcall-if key (first seq)))
(funcall-if key (first seq))))
(t (let ((result
(if init-p
(funcall
fn init
(funcall-if key (pop seq)))
(funcall
fn
(funcall-if key (pop seq))
(funcall-if key (pop seq))))))
(if end
(loop repeat (- end (if init-p 1 2)) while seq
do (setf result
(funcall
fn result
(funcall-if key (pop seq)))))
(loop while seq
do (setf result
(funcall
fn result
(funcall-if key (pop seq))))))
result))))
) | null | https://raw.githubusercontent.com/kraison/vivace-graph/6b5b5eca3e2613e48846da326ecf36cd9dcd7ceb/reference/auxfns.lisp | lisp | -*- Mode: Lisp; Syntax: Common-Lisp -*-
File auxfns.lisp: Auxiliary functions used by all other programs
Load this file before running any other programs.
Implementation-Specific Details
Make it ok to place a function definition on a built-in LISP symbol.
Don't warn if a function is defined in multiple files --
this happens often since we refine several programs.
REQUIRES
The function REQUIRES is used in subsequent files to state dependencies
between files. The current definition just loads the required files,
assumming they match the pathname specified in *PAIP-DIRECTORY*.
You should change that to match where you have stored the files.
A more sophisticated REQUIRES would only load it if it has not yet
been loaded, and would search in different directories if needed.
??? Maybe Change this
??? Maybe Change this
??? Maybe Change this
Auxiliary Functions
==============================
==============================
NOTE: In ANSI Common Lisp, the effects of adding a definition (or most
anything else) to a symbol in the common-lisp package is undefined.
Therefore, it would be best to rename the function SYMBOL to something
else. This has not been done (for compatibility with the book).
==============================
==============================
==============================
==============================
PATTERN MATCHING FACILITY
Once we add a "real" binding,
we can get rid of the dummy no-bindings
==============================
Delayed computation:
A queue is a (last . contents) pair
Other:
==============================
==============================
==============================
==============================
==============================
CLtL2 and ANSI CL Compatibility
arglist is made into a list of args for each call
n is the length of the longest vector
Define some shared functions:
Decide if the result is a list or vector,
and loop through each element
That way, undefined function warnings that are really
just forward references will not be printed at all.
Reduce
Change this to T if you need REDUCE with :key keyword. | Code from Paradigms of AI Programming
Copyright ( c ) 1991
(eval-when (eval compile load)
#+(or Allegro EXCL)
(dolist (pkg '(excl common-lisp common-lisp-user))
(setf (excl:package-definition-lock (find-package pkg)) nil))
#+Lispworks
(setq *PACKAGES-FOR-WARN-ON-REDEFINITION* nil)
#+LCL
(compiler-options :warnings nil)
)
(defun requires (&rest files)
"The arguments are files that are required to run an application."
(mapc #'load-paip-file files))
(defvar *paip-files*
`("auxfns" "tutor" "examples"
"intro" "simple" "overview" "gps1" "gps" "eliza1" "eliza" "patmatch"
"eliza-pm" "search" "gps-srch" "student" "macsyma" "macsymar" "unify"
"prolog1" "prolog" "prologc1" "prologc2" "prologc" "prologcp"
"clos" "krep1" "krep2" "krep" "cmacsyma" "mycin" "mycin-r" "waltz"
"othello" "othello2" "syntax1" "syntax2" "syntax3" "unifgram"
"grammar" "lexicon" "interp1" "interp2" "interp3"
"compile1" "compile2" "compile3" "compopt"))
(defparameter *paip-directory*
(make-pathname :name nil :type nil
:defaults (or (and (boundp '*load-truename*) *load-truename*)
"The location of the source files for this book. If things don't work,
change it to reflect the location of the files on your computer.")
(defparameter *paip-source*
:defaults *paip-directory*))
(defparameter *paip-binary*
(make-pathname
:name nil
:type (first (list #+LCL (first *load-binary-pathname-types*)
#+Lispworks system::*binary-file-type*
#+MCL "fasl"
#+Allegro excl:*fasl-default-type*
#+(or AKCL KCL) "o"
#+CMU "sparcf"
#+CLISP "fas"
:directory (append (pathname-directory *paip-source*) '("bin"))
:defaults *paip-directory*))
(defun paip-pathname (name &optional (type :lisp))
(make-pathname :name name
:defaults (ecase type
((:lisp :source) *paip-source*)
((:binary :bin) *paip-binary*))))
(defun compile-all-paip-files ()
(mapc #'compile-paip-file *paip-files*))
(defun compile-paip-file (name)
(let ((path (paip-pathname name :lisp)))
(load path)
(compile-file path :output-file (paip-pathname name :binary))))
(defun load-paip-file (file)
"Load the binary file if it exists and is newer, else load the source."
(let* ((src (paip-pathname file :lisp))
(src-date (file-write-date src))
(bin (paip-pathname file :binary))
(bin-date (file-write-date bin)))
(load (if (and (probe-file bin) src-date bin-date (>= bin-date src-date))
bin
src))))
Macros ( formerly in auxmacs.lisp : that file no longer needed )
(eval-when (load eval compile)
(defmacro once-only (variables &rest body)
"Returns the code built by BODY. If any of VARIABLES
might have side effects, they are evaluated once and stored
in temporary variables that are then passed to BODY."
(assert (every #'symbolp variables))
(let ((temps nil))
(dotimes (i (length variables)) (push (gensym) temps))
`(if (every #'side-effect-free? (list .,variables))
(progn .,body)
(list 'let
,`(list ,@(mapcar #'(lambda (tmp var)
`(list ',tmp ,var))
temps variables))
(let ,(mapcar #'(lambda (var tmp) `(,var ',tmp))
variables temps)
.,body)))))
(defun side-effect-free? (exp)
"Is exp a constant, variable, or function,
or of the form (THE type x) where x is side-effect-free?"
(or (atom exp) (constantp exp)
(starts-with exp 'function)
(and (starts-with exp 'the)
(side-effect-free? (third exp)))))
(defmacro funcall-if (fn arg)
(once-only (fn)
`(if ,fn (funcall ,fn ,arg) ,arg)))
(defmacro read-time-case (first-case &rest other-cases)
"Do the first case, where normally cases are
specified with #+ or possibly #- marks."
(declare (ignore other-cases))
first-case)
(defun rest2 (x)
"The rest of a list after the first TWO elements."
(rest (rest x)))
(defun find-anywhere (item tree)
"Does item occur anywhere in tree?"
(if (atom tree)
(if (eql item tree) tree)
(or (find-anywhere item (first tree))
(find-anywhere item (rest tree)))))
(defun starts-with (list x)
"Is x a list whose first element is x?"
(and (consp list) (eql (first list) x)))
)
(setf (symbol-function 'find-all-if) #'remove-if-not)
(defun find-all (item sequence &rest keyword-args
&key (test #'eql) test-not &allow-other-keys)
"Find all those elements of sequence that match item,
according to the keywords. Doesn't alter sequence."
(if test-not
(apply #'remove item sequence
:test-not (complement test-not) keyword-args)
(apply #'remove item sequence
:test (complement test) keyword-args)))
(defun partition-if (pred list)
"Return 2 values: elements of list that satisfy pred,
and elements that don't."
(let ((yes-list nil)
(no-list nil))
(dolist (item list)
(if (funcall pred item)
(push item yes-list)
(push item no-list)))
(values (nreverse yes-list) (nreverse no-list))))
(defun maybe-add (op exps &optional if-nil)
"For example, (maybe-add 'and exps t) returns
t if exps is nil, exps if there is only one,
and (and exp1 exp2...) if there are several exps."
(cond ((null exps) if-nil)
((length=1 exps) (first exps))
(t (cons op exps))))
(defun seq-ref (seq index)
"Return code that indexes into a sequence, using
the pop-lists/aref-vectors strategy."
`(if (listp ,seq)
(prog1 (first ,seq)
(setq ,seq (the list (rest ,seq))))
(aref ,seq ,index)))
(defun maybe-set-fill-pointer (array new-length)
"If this is an array with a fill pointer, set it to
new-length, if that is longer than the current length."
(if (and (arrayp array)
(array-has-fill-pointer-p array))
(setf (fill-pointer array)
(max (fill-pointer array) new-length))))
(defun mk-symbol (&rest args)
"Concatenate symbols or strings to form an interned symbol"
(intern (format nil "~{~a~}" args)))
(defun new-symbol (&rest args)
"Concatenate symbols or strings to form an uninterned symbol"
(make-symbol (format nil "~{~a~}" args)))
(defun last1 (list)
"Return the last element (not last cons cell) of list"
(first (last list)))
(defun mappend (fn list)
"Append the results of calling fn on each element of list.
Like mapcon, but uses append instead of nconc."
(apply #'append (mapcar fn list)))
(defun mklist (x)
"If x is a list return it, otherwise return the list of x"
(if (listp x) x (list x)))
(defun flatten (exp)
"Get rid of imbedded lists (to one level only)."
(mappend #'mklist exp))
(defun random-elt (seq)
"Pick a random element out of a sequence."
(elt seq (random (length seq))))
(defun member-equal (item list)
(member item list :test #'equal))
(defun compose (&rest functions)
#'(lambda (x)
(reduce #'funcall functions :from-end t :initial-value x)))
The Debugging Output Facility :
(defvar *dbg-ids* nil "Identifiers used by dbg")
(defun dbg (id format-string &rest args)
"Print debugging info if (DEBUG ID) has been specified."
(when (member id *dbg-ids*)
(fresh-line *debug-io*)
(apply #'format *debug-io* format-string args)))
(defun my-debug (&rest ids)
"Start dbg output on the given ids."
(setf *dbg-ids* (union ids *dbg-ids*)))
(defun undebug (&rest ids)
"Stop dbg on the ids. With no ids, stop dbg altogether."
(setf *dbg-ids* (if (null ids) nil
(set-difference *dbg-ids* ids))))
(defun dbg-indent (id indent format-string &rest args)
"Print indented debugging info if (DEBUG ID) has been specified."
(when (member id *dbg-ids*)
(fresh-line *debug-io*)
(dotimes (i indent) (princ " " *debug-io*))
(apply #'format *debug-io* format-string args)))
(defconstant fail nil)
(defconstant no-bindings '((t . t)))
(defun pat-match (pattern input &optional (bindings no-bindings))
"Match pattern against input in the context of the bindings"
(cond ((eq bindings fail) fail)
((variable-p pattern) (match-variable pattern input bindings))
((eql pattern input) bindings)
((and (consp pattern) (consp input))
(pat-match (rest pattern) (rest input)
(pat-match (first pattern) (first input) bindings)))
(t fail)))
(defun match-variable (var input bindings)
"Does VAR match input? Uses (or updates) and returns bindings."
(let ((binding (get-binding var bindings)))
(cond ((not binding) (extend-bindings var input bindings))
((equal input (binding-val binding)) bindings)
(t fail))))
(defun make-binding (var val) (cons var val))
(defun binding-var (binding)
"Get the variable part of a single binding."
(car binding))
(defun binding-val (binding)
"Get the value part of a single binding."
(cdr binding))
(defun get-binding (var bindings)
"Find a (variable . value) pair in a binding list."
(assoc var bindings))
(defun lookup (var bindings)
"Get the value part (for var) from a binding list."
(binding-val (get-binding var bindings)))
(defun extend-bindings (var val bindings)
"Add a (var . value) pair to a binding list."
(cons (cons var val)
(if (eq bindings no-bindings)
nil
bindings)))
(defun variable-p (x)
"Is x a variable (a symbol beginning with `?')?"
(and (symbolp x) (equal (elt (symbol-name x) 0) #\?)))
The Memoization facility :
(defmacro defun-memo (fn args &body body)
"Define a memoized function."
`(memoize (defun ,fn ,args . ,body)))
(defun memo (fn &key (key #'first) (test #'eql) name)
"Return a memo-function of fn."
(let ((table (make-hash-table :test test)))
(setf (get name 'memo) table)
#'(lambda (&rest args)
(let ((k (funcall key args)))
(multiple-value-bind (val found-p)
(gethash k table)
(if found-p val
(setf (gethash k table) (apply fn args))))))))
(defun memoize (fn-name &key (key #'first) (test #'eql))
"Replace fn-name's global definition with a memoized version."
(clear-memoize fn-name)
(setf (symbol-function fn-name)
(memo (symbol-function fn-name)
:name fn-name :key key :test test)))
(defun clear-memoize (fn-name)
"Clear the hash table from a memo function."
(let ((table (get fn-name 'memo)))
(when table (clrhash table))))
(defstruct delay value (computed? nil))
(defmacro delay (&rest body)
"A computation that can be executed later by FORCE."
`(make-delay :value #'(lambda () . ,body)))
(defun force (delay)
"Do a delayed computation, or fetch its previously-computed value."
(if (delay-computed? delay)
(delay-value delay)
(prog1 (setf (delay-value delay) (funcall (delay-value delay)))
(setf (delay-computed? delay) t))))
Defresource :
(defmacro defresource (name &key constructor (initial-copies 0)
(size (max initial-copies 10)))
(let ((resource (mk-symbol '* (mk-symbol name '-resource*)))
(deallocate (mk-symbol 'deallocate- name))
(allocate (mk-symbol 'allocate- name)))
`(progn
(defparameter ,resource (make-array ,size :fill-pointer 0))
(defun ,allocate ()
"Get an element from the resource pool, or make one."
(if (= (fill-pointer ,resource) 0)
,constructor
(vector-pop ,resource)))
(defun ,deallocate (,name)
"Place a no-longer-needed element back in the pool."
(vector-push-extend ,name ,resource))
,(if (> initial-copies 0)
`(mapc #',deallocate (loop repeat ,initial-copies
collect (,allocate))))
',name)))
(defmacro with-resource ((var resource &optional protect) &rest body)
"Execute body with VAR bound to an instance of RESOURCE."
(let ((allocate (mk-symbol 'allocate- resource))
(deallocate (mk-symbol 'deallocate- resource)))
(if protect
`(let ((,var nil))
(unwind-protect (progn (setf ,var (,allocate)) ,@body)
(unless (null ,var) (,deallocate ,var))))
`(let ((,var (,allocate)))
,@body
(,deallocate var)))))
Queues :
(defun queue-contents (q) (cdr q))
(defun make-queue ()
"Build a new queue, with no elements."
(let ((q (cons nil nil)))
(setf (car q) q)))
(defun enqueue (item q)
"Insert item at the end of the queue."
(setf (car q)
(setf (rest (car q))
(cons item nil)))
q)
(defun dequeue (q)
"Remove an item from the front of the queue."
(pop (cdr q))
(if (null (cdr q)) (setf (car q) q))
q)
(defun front (q) (first (queue-contents q)))
(defun empty-queue-p (q) (null (queue-contents q)))
(defun queue-nconc (q list)
"Add the elements of LIST to the end of the queue."
(setf (car q)
(last (setf (rest (car q)) list))))
(defun sort* (seq pred &key key)
"Sort without altering the sequence"
(sort (copy-seq seq) pred :key key))
(defun reuse-cons (x y x-y)
"Return (cons x y), or reuse x-y if it is equal to (cons x y)"
(if (and (eql x (car x-y)) (eql y (cdr x-y)))
x-y
(cons x y)))
(defun length=1 (x)
"Is x a list of length 1?"
(and (consp x) (null (cdr x))))
(defun rest3 (list)
"The rest of a list after the first THREE elements."
(cdddr list))
(defun unique-find-if-anywhere (predicate tree
&optional found-so-far)
"Return a list of leaves of tree satisfying predicate,
with duplicates removed."
(if (atom tree)
(if (funcall predicate tree)
(adjoin tree found-so-far)
found-so-far)
(unique-find-if-anywhere
predicate
(first tree)
(unique-find-if-anywhere predicate (rest tree)
found-so-far))))
(defun find-if-anywhere (predicate tree)
"Does predicate apply to any atom in the tree?"
(if (atom tree)
(funcall predicate tree)
(or (find-if-anywhere predicate (first tree))
(find-if-anywhere predicate (rest tree)))))
(defmacro define-enumerated-type (type &rest elements)
"Represent an enumerated type with integers 0-n."
`(progn
(deftype ,type () '(integer 0 ,(- (length elements) 1)))
(defun ,(mk-symbol type '->symbol) (,type)
(elt ',elements ,type))
(defun ,(mk-symbol 'symbol-> type) (symbol)
(position symbol ',elements))
,@(loop for element in elements
for i from 0
collect `(defconstant ,element ,i))))
(defun not-null (x) (not (null x)))
(defun first-or-nil (x)
"The first element of x if it is a list; else nil."
(if (consp x) (first x) nil))
(defun first-or-self (x)
"The first element of x, if it is a list; else x itself."
(if (consp x) (first x) x))
(unless (fboundp 'defmethod)
(defmacro defmethod (name args &rest body)
`(defun ',name ',args ,@body))
)
(unless (fboundp 'map-into)
(defun map-into (result-sequence function &rest sequences)
"Destructively set elements of RESULT-SEQUENCE to the results
of applying FUNCTION to respective elements of SEQUENCES."
(let ((arglist (make-list (length sequences)))
(n (if (listp result-sequence)
most-positive-fixnum
(array-dimension result-sequence 0))))
(when sequences
(setf n (min n (loop for seq in sequences
minimize (length seq)))))
(flet
((do-one-call (i)
(loop for seq on sequences
for arg on arglist
do (if (listp (first seq))
(setf (first arg)
(pop (first seq)))
(setf (first arg)
(aref (first seq) i))))
(apply function arglist))
(do-result (i)
(if (and (vectorp result-sequence)
(array-has-fill-pointer-p result-sequence))
(setf (fill-pointer result-sequence)
(max i (fill-pointer result-sequence))))))
(declare (inline do-one-call))
(if (listp result-sequence)
(loop for i from 0 to (- n 1)
for r on result-sequence
do (setf (first r)
(do-one-call i))
finally (do-result i))
(loop for i from 0 to (- n 1)
do (setf (aref result-sequence i)
(do-one-call i))
finally (do-result i))))
result-sequence))
)
(unless (fboundp 'complement)
(defun complement (fn)
"If FN returns y, then (complement FN) returns (not y)."
#'(lambda (&rest args) (not (apply fn args))))
)
(unless (fboundp 'with-compilation-unit)
(defmacro with-compilation-unit (options &body body)
"Do the body, but delay compiler warnings until the end."
This is defined in Common Lisp the Language , 2nd ed .
(declare (ignore options))
`(,(read-time-case
#+Lispm 'compiler:compiler-warnings-context-bind
#+Lucid 'with-deferred-warnings
'progn)
.,body))
)
(defun reduce* (fn seq from-end start end key init init-p)
(funcall (if (listp seq) #'reduce-list #'reduce-vect)
fn seq from-end (or start 0) end key init init-p))
(defun reduce (function sequence &key from-end start end key
(initial-value nil initial-value-p))
(reduce* function sequence from-end start end
key initial-value initial-value-p))
(defun reduce-vect (fn seq from-end start end key init init-p)
(if (null end) (setf end (length seq)))
(assert (<= 0 start end (length seq)) (start end)
"Illegal subsequence of ~a --- :start ~d :end ~d"
seq start end)
(case (- end start)
(1 (if init-p
(funcall fn init (funcall-if key (aref seq start)))
(funcall-if key (aref seq start))))
(0 (if init-p init (funcall fn)))
(t (if (not from-end)
(let ((result
(if init-p
(funcall
fn init
(funcall-if key (aref seq start)))
(funcall
fn
(funcall-if key (aref seq start))
(funcall-if key (aref seq (+ start 1)))))))
(loop for i from (+ start (if init-p 1 2))
to (- end 1)
do (setf result
(funcall
fn result
(funcall-if key (aref seq i)))))
result)
(let ((result
(if init-p
(funcall
fn
(funcall-if key (aref seq (- end 1)))
init)
(funcall
fn
(funcall-if key (aref seq (- end 2)))
(funcall-if key (aref seq (- end 1)))))))
(loop for i from (- end (if init-p 2 3)) downto start
do (setf result
(funcall
fn
(funcall-if key (aref seq i))
result)))
result)))))
(defun reduce-list (fn seq from-end start end key init init-p)
(if (null end) (setf end (length seq)))
(cond ((> start 0)
(reduce-list fn (nthcdr start seq) from-end 0
(- end start) key init init-p))
((or (null seq) (eql start end))
(if init-p init (funcall fn)))
((= (- end start) 1)
(if init-p
(funcall fn init (funcall-if key (first seq)))
(funcall-if key (first seq))))
(from-end
(reduce-vect fn (coerce seq 'vector) t start end
key init init-p))
((null (rest seq))
(if init-p
(funcall fn init (funcall-if key (first seq)))
(funcall-if key (first seq))))
(t (let ((result
(if init-p
(funcall
fn init
(funcall-if key (pop seq)))
(funcall
fn
(funcall-if key (pop seq))
(funcall-if key (pop seq))))))
(if end
(loop repeat (- end (if init-p 1 2)) while seq
do (setf result
(funcall
fn result
(funcall-if key (pop seq)))))
(loop while seq
do (setf result
(funcall
fn result
(funcall-if key (pop seq))))))
result))))
) |
5ff91934593a874e852748dee38117dbedc3bba3e42fadf4cfb3de3084e17312 | hirokai/PaperServer | Epub.hs | # LANGUAGE OverloadedStrings , TemplateHaskell #
module Model.Epub (
epubFromPaper
) where
import Import hiding (Paper(..),Citation(..),Figure(..))
import System.Process
import System.Directory
import Control.Exception (try,IOException)
import Data.Time
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import Data.String
import qualified Data.Text as T
import Data.Text.Encoding
import Control.Lens
import Text.Blaze
import Text.Blaze.XHtml5
import Text.Blaze.XHtml5.Attributes
import qualified Text.Blaze.XHtml5 as H
import qualified Text.Blaze.XHtml5.Attributes as A
import Text.Blaze.Html.Renderer.Utf8
import Model.PaperReader
import Model.PaperP (renderStructured)
import Parser.Paper as P hiding (Paper,Url)
import qualified Parser.Paper as P
import qualified Parser.Lens as L
import Data.FileEmbed
epubFromPaper :: PaperId -> P.Paper -> Handler FilePath
epubFromPaper pid paper = do
let
strpid = T.unpack (toPathPiece pid)
dir = epubSourceFolder ++ strpid
epubpath = epubSourceFolder ++ strpid ++ ".epub"
liftIO $ system $ "mkdir " ++ dir
liftIO $ setCurrentDirectory dir
liftIO $ system $ "mkdir META-INF"
liftIO $ system $ "mkdir OEBPS"
let containerStr
= BS.concat ["<?xml version='1.0'?>"
,"<container version='1.0' xmlns='urn:oasis:names:tc:opendocument:xmlns:container'>"
,"<rootfiles>"
,"<rootfile full-path='OEBPS/container.opf' media-type='application/oebps-package+xml' />"
, "</rootfiles> </container>"]
liftIO $ BS.writeFile (dir ++ "/META-INF/container.xml") containerStr
liftIO $ mkOpf pid paper dir
mkPaper pid paper dir
figPaths <- liftIO $ mapM (\f -> mkFig f dir) (paper^.L.figures)
liftIO $ mkNav pid paper dir figPaths
liftIO $ BS.writeFile (dir++"/mimetype") "application/epub+zip"
liftIO $ system $ "zip -0 -X "++"../"++strpid++".epub ./mimetype"
liftIO $ system $ "zip -r ../"++strpid++".epub ./* -x ./mimetype"
-- system $ "rm -r '" ++ dir ++ "'"
return epubpath
mkNav :: PaperId -> P.Paper -> FilePath -> [(String, String)] -> IO ()
mkNav pid paper dir figPaths = do
let
mkLi (name,path) = BS.concat ["<li><h2>",fromString name,"</h2><a epub:type='loi' href='",fromString path,"'>",fromString name,"</a></li>"]
lis = [ "<li><a epub:type='bodymatter' href='paper1.html'>Main</a></li>" ] ++
Import.map mkLi figPaths
navhtml = BS.concat $
["<nav epub:type='toc' id='toc'>"
, "<h2>Contents</h2>"
, "<ol>"] ++ lis ++
["</ol></nav>"]
bs = BS.concat ["<?xml version='1.0' encoding='UTF-8'?>"
, "<html xmlns='' xmlns:epub='' xml:lang='en'>"
, "<head><meta charset='UTF-8'/><title></title>"
, "</head><body>"
, navhtml
, "</body> </html>"]
BS.writeFile (dir++"/OEBPS/nav.html") bs
mkPaper :: PaperId -> PaperP -> FilePath -> Handler ()
mkPaper pid pp dir = do
let
mabstract = pp^.L.abstract
mainHtml = maybe "" renderStructured $ pp^.L.mainHtml
let cit = pp^.L.citation
let html = do
H.head $ do
H.title $ toHtml $ fromMaybe "(No title)" (cit^.L.title)
H.style ! A.type_ "text/css" $ preEscapedToHtml cssText
H.body $ do
H.div ! A.id "titlediv" $ do
maybe emptyh (\t -> H.span $ H.toHtml t) $ cit^.L.ptype
h1 $ preEscapedToHtml $ fromMaybe "(No title)" $ cit^.L.title
citationHtml cit
case mabstract of
Just abs -> do
H.div ! A.id "abstract" $ do
preEscapedToHtml abs
Nothing ->
emptyh
H.div mainHtml
let bs = BL.concat ["<?xml version='1.0' encoding='UTF-8'?><html xmlns='' xml:lang='en'>",renderHtml html]
liftIO $ BL.writeFile (dir++"/OEBPS/paper1.html") bs
emptyh :: Markup
emptyh = H.toHtml (""::String)
preEscapedToHtml' :: String -> Html
preEscapedToHtml' = preEscapedToHtml
toHtml' :: String -> Html
toHtml' = H.toHtml
citationHtml :: P.Citation -> Html
citationHtml cit = do
H.p ! A.id "citation" $ do
H.toHtml $ T.intercalate ", " (cit^.L.authors)
br
H.i $ H.toHtml $ fromMaybe "" $ cit^.L.journal
toHtml' ", "
H.b $ H.toHtml $ fromMaybe "" $ cit^.L.volume
toHtml' ", "
H.toHtml $ fromMaybe "" $ cit^.L.pageFrom
preEscapedToHtml' "‐"
H.toHtml $ fromMaybe "" $ cit^.L.pageTo
case cit^.L.year of
Just y -> do
preEscapedToHtml' " "
H.toHtml $ "(" ++ show y ++ ")"
Nothing -> emptyh
mkFig :: P.Figure -> FilePath -> IO (String,String)
mkFig fig dir = do
let
num = T.unpack $ fig^.L.figId
name = T.unpack $ fig^.L.figName
url = fig^.L.figImg
+ + imgExt ( getImgType url ) : currently all images are png .
imgPath = resourceRootFolder ++ mkFileName (T.unpack url)
html = do
head $ do
H.title $ toHtml name
body $ do
H.h1 $ toHtml name
H.div $ do
H.div $ do
H.img ! src (fromString figfile)
H.div $ preEscapedToHtml (fig^.L.figAnnot)
bs = "<?xml version='1.0' encoding='UTF-8'?><html xmlns='' xml:lang='en'>"
`BL.append` renderHtml html
htmlfile = "fig_"++num++".html"
putStrLn $ T.unpack url
_ <- try (copyFile imgPath (dir ++ "/OEBPS/" ++ figfile)) :: IO (Either IOException ())
BL.writeFile (dir++"/OEBPS/"++htmlfile) bs
return (name,htmlfile)
mkOpf :: PaperId -> PaperP -> FilePath -> IO ()
mkOpf pid paper dir = do
let
cit = paper^.L.citation
title = encodeUtf8 $ fromMaybe "(No title)" $ cit^.L.title
authors = cit^.L.authors
pub = encodeUtf8 $ fromMaybe "N/A" $ cit^.L.publisher
ts <- fmap show getCurrentTime
let
time = fromString $ take 19 ts
authors_str = BS.concat $ Import.map (\s -> encodeUtf8 $ T.concat ["<dc:creator>",s,"</dc:creator>"]) authors
figs = paper^.L.figures
fignums = Import.map (T.unpack . _figId) figs
fightmls = BS.concat $ Import.map figitem fignums
figtypes = Import.map (getImgType . _figImg) figs
figimgs = BS.concat $ Import.map figimg $ zip fignums figtypes
figrefs = fromString $ Import.concatMap (\n -> "<itemref idref='fightml_"++n++"'/>") fignums
bs = BS.concat ["<?xml version='1.0' encoding='UTF-8'?>",
"<package version='3.0' xmlns='' unique-identifier='db-id'> <metadata xmlns:dc='/' xmlns:opf=''> <dc:title>",title,"</dc:title>",
authors_str,
"<dc:language>ja</dc:language> <dc:rights>Public Domain</dc:rights> <dc:publisher>",pub,"</dc:publisher> <dc:identifier id='db-id'>", (fromString . T.unpack . toPathPiece) pid,"</dc:identifier><meta property='dcterms:modified'>",time,"</meta> </metadata>",
"<manifest><item id='nav.xhtml' href='nav.html' properties='nav' media-type='application/xhtml+xml'/>",
"<item id='html1' href='paper1.html' media-type='application/xhtml+xml' />",
fightmls,
figimgs,
"</manifest>",
"<spine page-progression-direction='ltr'><itemref idref='html1' />",
figrefs,
"</spine> </package>"]
BS.writeFile (dir++"/OEBPS/container.opf") bs
figitem :: String -> BS.ByteString
figitem num = fromString $ "<item id='fightml_"++num++"' href='fig_" ++ num ++".html' media-type='application/xhtml+xml' />"
getImgType :: Url -> ImgType
--stub : currenly all | " .jpg " ` T.isSuffixOf ` u = ImgJpeg
| " .jpeg " ` T.isSuffixOf ` u = ImgJpeg
| " .png " ` T.isSuffixOf ` u = ImgPng
| " .gif " ` T.isSuffixOf ` u = ImgGif
| otherwise = ImgUnknown
| ".jpg" `T.isSuffixOf` u = ImgJpeg
| ".jpeg" `T.isSuffixOf` u = ImgJpeg
| ".png" `T.isSuffixOf` u = ImgPng
| ".gif" `T.isSuffixOf` u = ImgGif
| otherwise = ImgUnknown -}
data ImgType = ImgGif | ImgJpeg | ImgPng | ImgUnknown
figimg :: (String,ImgType) -> BS.ByteString
figimg (num,itype) = fromString $ "<item id='fig_"++num++"' href='fig_" ++ num ++"." ++imgExt itype++"' media-type='"++imgMime itype++"' />"
where
imgExt :: ImgType -> String
imgExt ImgGif = "gif"
imgExt ImgJpeg = "jpeg"
imgExt ImgPng = "png"
imgExt ImgUnknown = "png" -- stub: all images are now png.
imgExt ImgUnknown = " unknown "
-- Stub
imgMime ImgGif = "image/gif"
imgMime ImgJpeg = "image/jpeg"
imgMime ImgPng = "image/png"
imgMime ImgUnknown = "image/png"
cssText :: Text
cssText = (T.strip . decodeUtf8) $(embedFile "static/css/epub.css")
| null | https://raw.githubusercontent.com/hirokai/PaperServer/b577955af08660253d0cd11282cf141d1c174bc0/Model/Epub.hs | haskell | system $ "rm -r '" ++ dir ++ "'"
stub : currenly all | " .jpg " ` T.isSuffixOf ` u = ImgJpeg
stub: all images are now png.
Stub | # LANGUAGE OverloadedStrings , TemplateHaskell #
module Model.Epub (
epubFromPaper
) where
import Import hiding (Paper(..),Citation(..),Figure(..))
import System.Process
import System.Directory
import Control.Exception (try,IOException)
import Data.Time
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import Data.String
import qualified Data.Text as T
import Data.Text.Encoding
import Control.Lens
import Text.Blaze
import Text.Blaze.XHtml5
import Text.Blaze.XHtml5.Attributes
import qualified Text.Blaze.XHtml5 as H
import qualified Text.Blaze.XHtml5.Attributes as A
import Text.Blaze.Html.Renderer.Utf8
import Model.PaperReader
import Model.PaperP (renderStructured)
import Parser.Paper as P hiding (Paper,Url)
import qualified Parser.Paper as P
import qualified Parser.Lens as L
import Data.FileEmbed
epubFromPaper :: PaperId -> P.Paper -> Handler FilePath
epubFromPaper pid paper = do
let
strpid = T.unpack (toPathPiece pid)
dir = epubSourceFolder ++ strpid
epubpath = epubSourceFolder ++ strpid ++ ".epub"
liftIO $ system $ "mkdir " ++ dir
liftIO $ setCurrentDirectory dir
liftIO $ system $ "mkdir META-INF"
liftIO $ system $ "mkdir OEBPS"
let containerStr
= BS.concat ["<?xml version='1.0'?>"
,"<container version='1.0' xmlns='urn:oasis:names:tc:opendocument:xmlns:container'>"
,"<rootfiles>"
,"<rootfile full-path='OEBPS/container.opf' media-type='application/oebps-package+xml' />"
, "</rootfiles> </container>"]
liftIO $ BS.writeFile (dir ++ "/META-INF/container.xml") containerStr
liftIO $ mkOpf pid paper dir
mkPaper pid paper dir
figPaths <- liftIO $ mapM (\f -> mkFig f dir) (paper^.L.figures)
liftIO $ mkNav pid paper dir figPaths
liftIO $ BS.writeFile (dir++"/mimetype") "application/epub+zip"
liftIO $ system $ "zip -0 -X "++"../"++strpid++".epub ./mimetype"
liftIO $ system $ "zip -r ../"++strpid++".epub ./* -x ./mimetype"
return epubpath
mkNav :: PaperId -> P.Paper -> FilePath -> [(String, String)] -> IO ()
mkNav pid paper dir figPaths = do
let
mkLi (name,path) = BS.concat ["<li><h2>",fromString name,"</h2><a epub:type='loi' href='",fromString path,"'>",fromString name,"</a></li>"]
lis = [ "<li><a epub:type='bodymatter' href='paper1.html'>Main</a></li>" ] ++
Import.map mkLi figPaths
navhtml = BS.concat $
["<nav epub:type='toc' id='toc'>"
, "<h2>Contents</h2>"
, "<ol>"] ++ lis ++
["</ol></nav>"]
bs = BS.concat ["<?xml version='1.0' encoding='UTF-8'?>"
, "<html xmlns='' xmlns:epub='' xml:lang='en'>"
, "<head><meta charset='UTF-8'/><title></title>"
, "</head><body>"
, navhtml
, "</body> </html>"]
BS.writeFile (dir++"/OEBPS/nav.html") bs
mkPaper :: PaperId -> PaperP -> FilePath -> Handler ()
mkPaper pid pp dir = do
let
mabstract = pp^.L.abstract
mainHtml = maybe "" renderStructured $ pp^.L.mainHtml
let cit = pp^.L.citation
let html = do
H.head $ do
H.title $ toHtml $ fromMaybe "(No title)" (cit^.L.title)
H.style ! A.type_ "text/css" $ preEscapedToHtml cssText
H.body $ do
H.div ! A.id "titlediv" $ do
maybe emptyh (\t -> H.span $ H.toHtml t) $ cit^.L.ptype
h1 $ preEscapedToHtml $ fromMaybe "(No title)" $ cit^.L.title
citationHtml cit
case mabstract of
Just abs -> do
H.div ! A.id "abstract" $ do
preEscapedToHtml abs
Nothing ->
emptyh
H.div mainHtml
let bs = BL.concat ["<?xml version='1.0' encoding='UTF-8'?><html xmlns='' xml:lang='en'>",renderHtml html]
liftIO $ BL.writeFile (dir++"/OEBPS/paper1.html") bs
emptyh :: Markup
emptyh = H.toHtml (""::String)
preEscapedToHtml' :: String -> Html
preEscapedToHtml' = preEscapedToHtml
toHtml' :: String -> Html
toHtml' = H.toHtml
citationHtml :: P.Citation -> Html
citationHtml cit = do
H.p ! A.id "citation" $ do
H.toHtml $ T.intercalate ", " (cit^.L.authors)
br
H.i $ H.toHtml $ fromMaybe "" $ cit^.L.journal
toHtml' ", "
H.b $ H.toHtml $ fromMaybe "" $ cit^.L.volume
toHtml' ", "
H.toHtml $ fromMaybe "" $ cit^.L.pageFrom
preEscapedToHtml' "‐"
H.toHtml $ fromMaybe "" $ cit^.L.pageTo
case cit^.L.year of
Just y -> do
preEscapedToHtml' " "
H.toHtml $ "(" ++ show y ++ ")"
Nothing -> emptyh
mkFig :: P.Figure -> FilePath -> IO (String,String)
mkFig fig dir = do
let
num = T.unpack $ fig^.L.figId
name = T.unpack $ fig^.L.figName
url = fig^.L.figImg
+ + imgExt ( getImgType url ) : currently all images are png .
imgPath = resourceRootFolder ++ mkFileName (T.unpack url)
html = do
head $ do
H.title $ toHtml name
body $ do
H.h1 $ toHtml name
H.div $ do
H.div $ do
H.img ! src (fromString figfile)
H.div $ preEscapedToHtml (fig^.L.figAnnot)
bs = "<?xml version='1.0' encoding='UTF-8'?><html xmlns='' xml:lang='en'>"
`BL.append` renderHtml html
htmlfile = "fig_"++num++".html"
putStrLn $ T.unpack url
_ <- try (copyFile imgPath (dir ++ "/OEBPS/" ++ figfile)) :: IO (Either IOException ())
BL.writeFile (dir++"/OEBPS/"++htmlfile) bs
return (name,htmlfile)
mkOpf :: PaperId -> PaperP -> FilePath -> IO ()
mkOpf pid paper dir = do
let
cit = paper^.L.citation
title = encodeUtf8 $ fromMaybe "(No title)" $ cit^.L.title
authors = cit^.L.authors
pub = encodeUtf8 $ fromMaybe "N/A" $ cit^.L.publisher
ts <- fmap show getCurrentTime
let
time = fromString $ take 19 ts
authors_str = BS.concat $ Import.map (\s -> encodeUtf8 $ T.concat ["<dc:creator>",s,"</dc:creator>"]) authors
figs = paper^.L.figures
fignums = Import.map (T.unpack . _figId) figs
fightmls = BS.concat $ Import.map figitem fignums
figtypes = Import.map (getImgType . _figImg) figs
figimgs = BS.concat $ Import.map figimg $ zip fignums figtypes
figrefs = fromString $ Import.concatMap (\n -> "<itemref idref='fightml_"++n++"'/>") fignums
bs = BS.concat ["<?xml version='1.0' encoding='UTF-8'?>",
"<package version='3.0' xmlns='' unique-identifier='db-id'> <metadata xmlns:dc='/' xmlns:opf=''> <dc:title>",title,"</dc:title>",
authors_str,
"<dc:language>ja</dc:language> <dc:rights>Public Domain</dc:rights> <dc:publisher>",pub,"</dc:publisher> <dc:identifier id='db-id'>", (fromString . T.unpack . toPathPiece) pid,"</dc:identifier><meta property='dcterms:modified'>",time,"</meta> </metadata>",
"<manifest><item id='nav.xhtml' href='nav.html' properties='nav' media-type='application/xhtml+xml'/>",
"<item id='html1' href='paper1.html' media-type='application/xhtml+xml' />",
fightmls,
figimgs,
"</manifest>",
"<spine page-progression-direction='ltr'><itemref idref='html1' />",
figrefs,
"</spine> </package>"]
BS.writeFile (dir++"/OEBPS/container.opf") bs
figitem :: String -> BS.ByteString
figitem num = fromString $ "<item id='fightml_"++num++"' href='fig_" ++ num ++".html' media-type='application/xhtml+xml' />"
getImgType :: Url -> ImgType
| " .jpeg " ` T.isSuffixOf ` u = ImgJpeg
| " .png " ` T.isSuffixOf ` u = ImgPng
| " .gif " ` T.isSuffixOf ` u = ImgGif
| otherwise = ImgUnknown
| ".jpg" `T.isSuffixOf` u = ImgJpeg
| ".jpeg" `T.isSuffixOf` u = ImgJpeg
| ".png" `T.isSuffixOf` u = ImgPng
| ".gif" `T.isSuffixOf` u = ImgGif
| otherwise = ImgUnknown -}
data ImgType = ImgGif | ImgJpeg | ImgPng | ImgUnknown
figimg :: (String,ImgType) -> BS.ByteString
figimg (num,itype) = fromString $ "<item id='fig_"++num++"' href='fig_" ++ num ++"." ++imgExt itype++"' media-type='"++imgMime itype++"' />"
where
imgExt :: ImgType -> String
imgExt ImgGif = "gif"
imgExt ImgJpeg = "jpeg"
imgExt ImgPng = "png"
imgExt ImgUnknown = " unknown "
imgMime ImgGif = "image/gif"
imgMime ImgJpeg = "image/jpeg"
imgMime ImgPng = "image/png"
imgMime ImgUnknown = "image/png"
cssText :: Text
cssText = (T.strip . decodeUtf8) $(embedFile "static/css/epub.css")
|
ec6c16597792ccdf06c3cdbe3a079d2fa8372c09c64345f0c2b0f4fb9122f7c9 | erikd/system-linux-proc | Errors.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
module System.Linux.Proc.Errors
( ProcError (..)
, renderProcError
) where
import Data.Text (Text)
import qualified Data.Text as Text
data ProcError
= ProcReadError !FilePath !Text
| ProcParseError !FilePath !Text
| ProcMemInfoKeyError !Text
deriving (Eq, Show)
renderProcError :: ProcError -> Text
renderProcError = \case
ProcReadError fp msg -> mconcat
[ "Error reading '", Text.pack fp, "': ", msg ]
ProcParseError fp msg -> mconcat
[ "Parser error on file '", Text.pack fp, ": ", msg ]
ProcMemInfoKeyError key -> mconcat
[ "MemInfo: Key not found: '", key, "'" ]
| null | https://raw.githubusercontent.com/erikd/system-linux-proc/b5fc50b0e9f2b28a92c3152908799e53ae16589e/System/Linux/Proc/Errors.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE LambdaCase #
module System.Linux.Proc.Errors
( ProcError (..)
, renderProcError
) where
import Data.Text (Text)
import qualified Data.Text as Text
data ProcError
= ProcReadError !FilePath !Text
| ProcParseError !FilePath !Text
| ProcMemInfoKeyError !Text
deriving (Eq, Show)
renderProcError :: ProcError -> Text
renderProcError = \case
ProcReadError fp msg -> mconcat
[ "Error reading '", Text.pack fp, "': ", msg ]
ProcParseError fp msg -> mconcat
[ "Parser error on file '", Text.pack fp, ": ", msg ]
ProcMemInfoKeyError key -> mconcat
[ "MemInfo: Key not found: '", key, "'" ]
|
436b03e2813f9cb5bcaaeef90078b7008097a60dc96666ccdf746402bb5d3e13 | MyDataFlow/ttalk-server | ws_send_many.erl | %% Feel free to use, reuse and abuse the code in this file.
-module(ws_send_many).
-behaviour(cowboy_websocket_handler).
-export([init/3]).
-export([websocket_init/3]).
-export([websocket_handle/3]).
-export([websocket_info/3]).
-export([websocket_terminate/3]).
init(_Any, _Req, _Opts) ->
{upgrade, protocol, cowboy_websocket}.
websocket_init(_TransportName, Req, Sequence) ->
Req2 = cowboy_req:compact(Req),
erlang:send_after(10, self(), send_many),
{ok, Req2, Sequence}.
websocket_handle(_Frame, Req, State) ->
{ok, Req, State}.
websocket_info(send_many, Req, State = [{sequence, Sequence}]) ->
{reply, Sequence, Req, State}.
websocket_terminate(_Reason, _Req, _State) ->
ok.
| null | https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/deps/cowboy/test/ws_SUITE_data/ws_send_many.erl | erlang | Feel free to use, reuse and abuse the code in this file. |
-module(ws_send_many).
-behaviour(cowboy_websocket_handler).
-export([init/3]).
-export([websocket_init/3]).
-export([websocket_handle/3]).
-export([websocket_info/3]).
-export([websocket_terminate/3]).
init(_Any, _Req, _Opts) ->
{upgrade, protocol, cowboy_websocket}.
websocket_init(_TransportName, Req, Sequence) ->
Req2 = cowboy_req:compact(Req),
erlang:send_after(10, self(), send_many),
{ok, Req2, Sequence}.
websocket_handle(_Frame, Req, State) ->
{ok, Req, State}.
websocket_info(send_many, Req, State = [{sequence, Sequence}]) ->
{reply, Sequence, Req, State}.
websocket_terminate(_Reason, _Req, _State) ->
ok.
|
70408967ac63a58e9495ff8c363d820b5423a81b570167b27761bcab6be4fc86 | MaskRay/OJHaskell | carpet.hs | import Control.Monad
main = do
n <- fmap read getLine
a <- replicateM n $ (map read . words) `fmap` getLine
[sx,sy] <- (map read . words) `fmap` getLine :: IO [Int]
print $ foldl (\acc (i,c) -> if c!!0<=sx&&sx<=c!!0+c!!2&&c!!1<=sy&&sy<=c!!1+c!!3 then i else acc) (-1) $ zip [1..] a
| null | https://raw.githubusercontent.com/MaskRay/OJHaskell/ba24050b2480619f10daa7d37fca558182ba006c/NOIP/2011/carpet.hs | haskell | import Control.Monad
main = do
n <- fmap read getLine
a <- replicateM n $ (map read . words) `fmap` getLine
[sx,sy] <- (map read . words) `fmap` getLine :: IO [Int]
print $ foldl (\acc (i,c) -> if c!!0<=sx&&sx<=c!!0+c!!2&&c!!1<=sy&&sy<=c!!1+c!!3 then i else acc) (-1) $ zip [1..] a
|
|
68295a09956f3cc784e3d6b87c307dd2a128d3b24bb4cc1e21ec7356d01a83c4 | irastypain/sicp-on-language-racket | exercise_1_03-test.rkt | #lang racket
(require rackunit
rackunit/text-ui
"../../src/chapter01/exercise_1_03.rkt")
(define tests
(test-suite
"Sum of squares of the greatest two numbers"
(test-case
"When first and second numbers are greatest"
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 2 3 1))
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 3 2 1)))
(test-case
"When second and third numbers are greatest"
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 1 2 3))
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 1 3 2)))
(test-case
"When two numbers equality"
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 2 2 3))
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 2 3 2))
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 3 2 2)))))
(run-tests tests 'verbose)
| null | https://raw.githubusercontent.com/irastypain/sicp-on-language-racket/0052f91d3c2432a00e7e15310f416cb77eeb4c9c/test/chapter01/exercise_1_03-test.rkt | racket | #lang racket
(require rackunit
rackunit/text-ui
"../../src/chapter01/exercise_1_03.rkt")
(define tests
(test-suite
"Sum of squares of the greatest two numbers"
(test-case
"When first and second numbers are greatest"
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 2 3 1))
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 3 2 1)))
(test-case
"When second and third numbers are greatest"
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 1 2 3))
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 1 3 2)))
(test-case
"When two numbers equality"
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 2 2 3))
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 2 3 2))
(check-equal? 13 (sum-of-squares-of-greatest-two-numbers 3 2 2)))))
(run-tests tests 'verbose)
|
|
67a6b1714e07c8ece70addf92f34f716481572d4717a4b47c7f9ccdad31ba389 | Frama-C/Frama-C-snapshot | kernel_ast.ml | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Data
module Sy = Syntax
module Md = Markdown
module Js = Yojson.Basic.Util
open Cil_types
let page = Doc.page `Kernel ~title:"Ast Services" ~filename:"ast.md"
(* -------------------------------------------------------------------------- *)
(* --- Compute Ast --- *)
(* -------------------------------------------------------------------------- *)
let () = Request.register ~page
~kind:`EXEC ~name:"kernel.ast.compute"
~descr:(Md.plain "Ensures that AST is computed")
~input:(module Junit) ~output:(module Junit) Ast.compute
(* -------------------------------------------------------------------------- *)
(* --- Printers --- *)
(* -------------------------------------------------------------------------- *)
module Tag =
struct
open Printer_tag
type index = (string,localizable) Hashtbl.t
let kid = ref 0
let index () = Hashtbl.create 0
module TYPE : Datatype.S with type t = index =
Datatype.Make
(struct
type t = index
include Datatype.Undefined
let reprs = [index()]
let name = "Server.Jprinter.Index"
let mem_project = Datatype.never_any_project
end)
module STATE = State_builder.Ref(TYPE)
(struct
let name = "Server.Jprinter.State"
let dependencies = []
let default = index
end)
let of_stmt s = Printf.sprintf "#s%d" s.sid
let of_start s = Printf.sprintf "#k%d" s.sid
let of_varinfo v = Printf.sprintf "#v%d" v.vid
let create_tag = function
| PStmt(_,st) -> of_stmt st
| PStmtStart(_,st) -> of_start st
| PVDecl(_,_,vi) -> of_varinfo vi
| PLval _ -> Printf.sprintf "#l%d" (incr kid ; !kid)
| PExp _ -> Printf.sprintf "#e%d" (incr kid ; !kid)
| PTermLval _ -> Printf.sprintf "#t%d" (incr kid ; !kid)
| PGlobal _ -> Printf.sprintf "#g%d" (incr kid ; !kid)
| PIP _ -> Printf.sprintf "#p%d" (incr kid ; !kid)
let create item =
let tag = create_tag item in
let index = STATE.get () in
Hashtbl.add index tag item ; tag
let lookup = Hashtbl.find (STATE.get())
end
module PP = Printer_tag.Make(Tag)
(* -------------------------------------------------------------------------- *)
(* --- Ast Data --- *)
(* -------------------------------------------------------------------------- *)
module Stmt = Data.Collection
(struct
type t = stmt
let syntax = Sy.publish ~page ~name:"stmt"
~synopsis:Sy.ident
~descr:(Md.plain "Code statement identifier") ()
let to_json st = `String (Tag.of_stmt st)
let of_json js =
let id = Js.to_string js in
try
let open Printer_tag in
match Tag.lookup id with
| PStmt(_,st) -> st
| _ -> raise Not_found
with Not_found ->
Data.failure "Unknown stmt id: '%s'" id
end)
module Ki = Data.Collection
(struct
type t = kinstr
let syntax = Sy.union [ Sy.tag "global" ; Stmt.syntax ]
let to_json = function
| Kglobal -> `String "global"
| Kstmt st -> `String (Tag.of_stmt st)
let of_json = function
| `String "global" -> Kglobal
| js -> Kstmt (Stmt.of_json js)
end)
module Kf = Data.Collection
(struct
type t = kernel_function
let syntax = Sy.publish ~page ~name:"fct-id"
~synopsis:Sy.ident
~descr:(Md.plain "Function identified by its global name.") ()
let to_json kf =
`String (Kernel_function.get_name kf)
let of_json js =
let key = Js.to_string js in
try Globals.Functions.find_by_name key
with Not_found -> Data.failure "Undefined function '%s'" key
end)
(* -------------------------------------------------------------------------- *)
(* --- Functions --- *)
(* -------------------------------------------------------------------------- *)
let () = Request.register ~page
~kind:`GET ~name:"kernel.ast.getFunctions"
~descr:(Md.plain "Collect all functions in the AST")
~input:(module Junit) ~output:(module Kf.Jlist)
begin fun () ->
let pool = ref [] in
Globals.Functions.iter (fun kf -> pool := kf :: !pool) ;
List.rev !pool
end
let () = Request.register ~page
~kind:`GET ~name:"kernel.ast.printFunction"
~descr:(Md.plain "Print the AST of a function")
~input:(module Kf) ~output:(module Jtext)
(fun kf -> Jbuffer.to_json PP.pp_global (Kernel_function.get_global kf))
(* -------------------------------------------------------------------------- *)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/server/kernel_ast.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
--------------------------------------------------------------------------
--- Compute Ast ---
--------------------------------------------------------------------------
--------------------------------------------------------------------------
--- Printers ---
--------------------------------------------------------------------------
--------------------------------------------------------------------------
--- Ast Data ---
--------------------------------------------------------------------------
--------------------------------------------------------------------------
--- Functions ---
--------------------------------------------------------------------------
-------------------------------------------------------------------------- | This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Data
module Sy = Syntax
module Md = Markdown
module Js = Yojson.Basic.Util
open Cil_types
let page = Doc.page `Kernel ~title:"Ast Services" ~filename:"ast.md"
let () = Request.register ~page
~kind:`EXEC ~name:"kernel.ast.compute"
~descr:(Md.plain "Ensures that AST is computed")
~input:(module Junit) ~output:(module Junit) Ast.compute
module Tag =
struct
open Printer_tag
type index = (string,localizable) Hashtbl.t
let kid = ref 0
let index () = Hashtbl.create 0
module TYPE : Datatype.S with type t = index =
Datatype.Make
(struct
type t = index
include Datatype.Undefined
let reprs = [index()]
let name = "Server.Jprinter.Index"
let mem_project = Datatype.never_any_project
end)
module STATE = State_builder.Ref(TYPE)
(struct
let name = "Server.Jprinter.State"
let dependencies = []
let default = index
end)
let of_stmt s = Printf.sprintf "#s%d" s.sid
let of_start s = Printf.sprintf "#k%d" s.sid
let of_varinfo v = Printf.sprintf "#v%d" v.vid
let create_tag = function
| PStmt(_,st) -> of_stmt st
| PStmtStart(_,st) -> of_start st
| PVDecl(_,_,vi) -> of_varinfo vi
| PLval _ -> Printf.sprintf "#l%d" (incr kid ; !kid)
| PExp _ -> Printf.sprintf "#e%d" (incr kid ; !kid)
| PTermLval _ -> Printf.sprintf "#t%d" (incr kid ; !kid)
| PGlobal _ -> Printf.sprintf "#g%d" (incr kid ; !kid)
| PIP _ -> Printf.sprintf "#p%d" (incr kid ; !kid)
let create item =
let tag = create_tag item in
let index = STATE.get () in
Hashtbl.add index tag item ; tag
let lookup = Hashtbl.find (STATE.get())
end
module PP = Printer_tag.Make(Tag)
module Stmt = Data.Collection
(struct
type t = stmt
let syntax = Sy.publish ~page ~name:"stmt"
~synopsis:Sy.ident
~descr:(Md.plain "Code statement identifier") ()
let to_json st = `String (Tag.of_stmt st)
let of_json js =
let id = Js.to_string js in
try
let open Printer_tag in
match Tag.lookup id with
| PStmt(_,st) -> st
| _ -> raise Not_found
with Not_found ->
Data.failure "Unknown stmt id: '%s'" id
end)
module Ki = Data.Collection
(struct
type t = kinstr
let syntax = Sy.union [ Sy.tag "global" ; Stmt.syntax ]
let to_json = function
| Kglobal -> `String "global"
| Kstmt st -> `String (Tag.of_stmt st)
let of_json = function
| `String "global" -> Kglobal
| js -> Kstmt (Stmt.of_json js)
end)
module Kf = Data.Collection
(struct
type t = kernel_function
let syntax = Sy.publish ~page ~name:"fct-id"
~synopsis:Sy.ident
~descr:(Md.plain "Function identified by its global name.") ()
let to_json kf =
`String (Kernel_function.get_name kf)
let of_json js =
let key = Js.to_string js in
try Globals.Functions.find_by_name key
with Not_found -> Data.failure "Undefined function '%s'" key
end)
let () = Request.register ~page
~kind:`GET ~name:"kernel.ast.getFunctions"
~descr:(Md.plain "Collect all functions in the AST")
~input:(module Junit) ~output:(module Kf.Jlist)
begin fun () ->
let pool = ref [] in
Globals.Functions.iter (fun kf -> pool := kf :: !pool) ;
List.rev !pool
end
let () = Request.register ~page
~kind:`GET ~name:"kernel.ast.printFunction"
~descr:(Md.plain "Print the AST of a function")
~input:(module Kf) ~output:(module Jtext)
(fun kf -> Jbuffer.to_json PP.pp_global (Kernel_function.get_global kf))
|
a44cc465f272f5dd65b7f502ad144e422e0b6fbacdeda3068cd6e764c05a8b2d | Metaxal/rwind | try-layout.rkt | #lang slideshow
(require "../policy/tiling.rkt")
(define (at x y p)
(ht-append
(blank x 0)
(vl-append
(blank 0 y)
p)))
(define try-layout%
(class policy-tiling%
(super-new)
(init-field x0 y0 w0 h0 windows)
(inherit do-layout)
(define root-window #f)
(define/public (get-root-window)
root-window)
(define/public (clear-root-window)
(set! root-window (blank w0 h0)))
(define/override (place-window window x y w h)
(set! root-window
(lt-superimpose
root-window
(at (- x x0) (- y y0)
(colorize
(cc-superimpose (rectangle w h) (text (~a window)))
(shuffle (list (random 128) (+ 128 (random 128)) (+ 64 (random 128)))))))))
(define/override (relayout [wk #f])
(do-layout windows x0 y0 w0 h0))
(clear-root-window)
))
(module+ main
(define lay1 (new try-layout% [x0 10] [y0 20] [w0 1024/2] [h0 768/2]
[windows (range 10)]
[layout 'dwindle]))
(send lay1 relayout)
(print (send lay1 get-root-window))
(newline)
(displayln "\n*** Existing layouts ***\n")
(for ([sym (send lay1 get-layouts)])
(send* lay1
(clear-root-window)
(set-layout sym)
(relayout))
(newline)
(print sym)
(newline)
(print (send lay1 get-root-window))
(newline))
)
| null | https://raw.githubusercontent.com/Metaxal/rwind/5a4f580b0882452f3938aaa1711a6d99570f006f/private/try-layout.rkt | racket | #lang slideshow
(require "../policy/tiling.rkt")
(define (at x y p)
(ht-append
(blank x 0)
(vl-append
(blank 0 y)
p)))
(define try-layout%
(class policy-tiling%
(super-new)
(init-field x0 y0 w0 h0 windows)
(inherit do-layout)
(define root-window #f)
(define/public (get-root-window)
root-window)
(define/public (clear-root-window)
(set! root-window (blank w0 h0)))
(define/override (place-window window x y w h)
(set! root-window
(lt-superimpose
root-window
(at (- x x0) (- y y0)
(colorize
(cc-superimpose (rectangle w h) (text (~a window)))
(shuffle (list (random 128) (+ 128 (random 128)) (+ 64 (random 128)))))))))
(define/override (relayout [wk #f])
(do-layout windows x0 y0 w0 h0))
(clear-root-window)
))
(module+ main
(define lay1 (new try-layout% [x0 10] [y0 20] [w0 1024/2] [h0 768/2]
[windows (range 10)]
[layout 'dwindle]))
(send lay1 relayout)
(print (send lay1 get-root-window))
(newline)
(displayln "\n*** Existing layouts ***\n")
(for ([sym (send lay1 get-layouts)])
(send* lay1
(clear-root-window)
(set-layout sym)
(relayout))
(newline)
(print sym)
(newline)
(print (send lay1 get-root-window))
(newline))
)
|
|
2f6b93bd11a6922dac912a0edf05fcb2f20a0b40b398ba3ad6045c33e2a7a3c5 | utahstreetlabs/risingtide | config.clj | (ns risingtide.config
(:require [risingtide.core :as core]))
(defonce env (keyword (or (System/getProperty "risingtide.env") (System/getenv "RISINGTIDE_ENV") (System/getenv "RT_ENV") "development")))
(def redis
{:development {:resque {}
:everything-card-feed {}
:card-feeds-1 {}
:card-feeds-2 {:db 1}
:stories {}
:shard-config {}
:active-users {}}
:test {:resque {}
:everything-card-feed {} :card-feeds-1 {}
:stories {}
:active-users {} :shard-config {}}
:staging {:resque {:host "staging3.copious.com"}
:everything-card-feed {:host "staging4.copious.com"}
:card-feeds-1 {:host "staging4.copious.com" :db 2}
:card-feeds-2 {:host "staging4.copious.com" :db 3}
:active-users {:host "staging4.copious.com"}
:shard-config {:host "staging4.copious.com"}
:stories {:host "staging4.copious.com"}}
:demo {:resque {:host "demo1.copious.com"}
:everything-card-feed {:host "demo1.copious.com"}
:card-feeds-1 {:host "demo1.copious.com"}
:card-feeds-2 {:host "demo1.copious.com" :db 1}
:stories {:host "demo1.copious.com"}
:active-users {:host "demo1.copious.com"}
:shard-config {:host "demo1.copious.com"}}
:production {:resque {:host "resque-redis-master.copious.com"}
:everything-card-feed {:host "rt-feeds-1-redis.copious.com"}
:card-feeds-1 {:host "rt-feeds-1-redis.copious.com"}
:card-feeds-2 {:host "rt-feeds-2-redis.copious.com"}
:stories {:host "rt-stories-redis.copious.com"}
:active-users {:host "rt-active-users-redis.copious.com"}
:shard-config {:host "rt-shard-config-redis.copious.com"}}})
(defn redis-config [] (redis env))
(def mysql-creds
{:user "utah"
:password "Utah5tr33t"})
(defn db [& {:as params}]
(merge mysql-creds {:delimiters "`"} params))
(def brooklyn-db
{:development (db :db "utah_development")
:test (db :db "utah_test")
:staging (db :db "utah_staging" :host "staging.copious.com")
:demo (db :db "utah_demo" :host "demo1.copious.com")
:production (db :db "utah_production"
:user "utah_ro"
:host "db1.copious.com")})
(def pyramid-db
{:development (db :db "pyramid_development")
:test (db :db "pyramid_test")
:staging (db :db "pyramid_staging" :host "staging.copious.com")
:demo (db :db "pyramid_demo" :host "demo1.copious.com")
:production (db :db "pyramid_production"
:user "utah_ro"
:host "db3.copious.com")})
(defn brooklyn [] (brooklyn-db env))
(defn pyramid [] (pyramid-db env))
(def action-solr-config
{:development ":8950/solr"
:test ":8951/solr"
:staging ":8983/solr"
:demo ":8983/solr"
:production "-rt1.copious.com:8983/solr"})
(defn action-solr []
(action-solr-config env))
(def max-card-feed-size 500)
(def initial-feed-size 1000)
(def default-card-shard "1")
(def ^:dynamic *digest-cache-ttl* (* 6 60 60))
(def encoding-cache-ttl (* 6 60 60 1000))
number of seconds to wait between expiring stories in feed sets
(def feed-expiration-delay 120)
(def ^:dynamic *user-feed-actor-blacklist*
{:development #{}
:test #{}
:staging #{}
:demo #{}
:production #{38319}})
(defn actor-blacklisted-from-user-feed? [id]
((*user-feed-actor-blacklist* env) id))
;;; storm topology config ;;;
(def active-user-bolt-batch-size 500)
(def recent-actions-max-follows 200)
(def recent-actions-max-likes 200)
(def recent-actions-max-seller-listings 200)
(def recent-actions-max-collection-follow-listings 1000)
these next two should n't sum to more than 1k , or will complain
;; about having too many boolean arguments. note that when using GET
;; for solr queries we also need to limit the size of our requests or
jetty will explode . the query sizes for and jetty seem
;; to be in the same ballpark right now, though the jetty one will
;; also be reached by having large user ids since it is related to the
;; number of characters in the query. we can update our solr library
;; to use POST for queries (the query method takes an extra argument
;; that is not exposed by our library) but that seems a little
;; pointless at the moment and I bumped into some strange bugs trying
;; this out.
(def recent-actions-max-actors 125)
(def recent-actions-max-listings 250)
(def drpc-max-stories 60)
(def recent-actions-max-recent-stories 2000)
;; linda and ajm - blacklist them because they list too much and break
;; drpc builds
(def drpc-blacklist #{38319 11089})
When performing an initial feed build we first attempt to find
;; as many actions relevant to the user's interest as possible.
;; If we find less than `minimum-drpc-actions` relevant actions,
;; we backfill with recent curated activity in order to present
;; a feed of minimally acceptable length.
(def minimum-drpc-actions 100)
(def local-drpc-port-config
;; this only gets used in dev and test
{:development 4050
:test 4051})
(defn local-drpc-port []
(local-drpc-port-config env))
(def max-spout-pending 1)
(def parallelism-config
{:production
{:add-to-feed 10
:interest-reducer 3
:seller-follows 5
:seller-blocks 5
:collection-follows 6
:blocks 4
:follows 4
:tag-likes 6
:likes 4
:stories max-spout-pending
:active-users max-spout-pending
:prepare-actions max-spout-pending
:drpc-feed-builder 3}})
(defn parallelism [component]
(get-in parallelism-config [env component] 1))
(def scorer-coefficients
{:default
{:dislike -100
:block -100
:seller-block -100}})
(defn scorer-coefficient [name]
(or
(get-in scorer-coefficients [env name])
(get-in scorer-coefficients [:default name] 1)))
(def admin-port 4055)
| null | https://raw.githubusercontent.com/utahstreetlabs/risingtide/bc5b798396679739469b1bd8ee1b03db76178cde/src/risingtide/config.clj | clojure | storm topology config ;;;
about having too many boolean arguments. note that when using GET
for solr queries we also need to limit the size of our requests or
to be in the same ballpark right now, though the jetty one will
also be reached by having large user ids since it is related to the
number of characters in the query. we can update our solr library
to use POST for queries (the query method takes an extra argument
that is not exposed by our library) but that seems a little
pointless at the moment and I bumped into some strange bugs trying
this out.
linda and ajm - blacklist them because they list too much and break
drpc builds
as many actions relevant to the user's interest as possible.
If we find less than `minimum-drpc-actions` relevant actions,
we backfill with recent curated activity in order to present
a feed of minimally acceptable length.
this only gets used in dev and test | (ns risingtide.config
(:require [risingtide.core :as core]))
(defonce env (keyword (or (System/getProperty "risingtide.env") (System/getenv "RISINGTIDE_ENV") (System/getenv "RT_ENV") "development")))
(def redis
{:development {:resque {}
:everything-card-feed {}
:card-feeds-1 {}
:card-feeds-2 {:db 1}
:stories {}
:shard-config {}
:active-users {}}
:test {:resque {}
:everything-card-feed {} :card-feeds-1 {}
:stories {}
:active-users {} :shard-config {}}
:staging {:resque {:host "staging3.copious.com"}
:everything-card-feed {:host "staging4.copious.com"}
:card-feeds-1 {:host "staging4.copious.com" :db 2}
:card-feeds-2 {:host "staging4.copious.com" :db 3}
:active-users {:host "staging4.copious.com"}
:shard-config {:host "staging4.copious.com"}
:stories {:host "staging4.copious.com"}}
:demo {:resque {:host "demo1.copious.com"}
:everything-card-feed {:host "demo1.copious.com"}
:card-feeds-1 {:host "demo1.copious.com"}
:card-feeds-2 {:host "demo1.copious.com" :db 1}
:stories {:host "demo1.copious.com"}
:active-users {:host "demo1.copious.com"}
:shard-config {:host "demo1.copious.com"}}
:production {:resque {:host "resque-redis-master.copious.com"}
:everything-card-feed {:host "rt-feeds-1-redis.copious.com"}
:card-feeds-1 {:host "rt-feeds-1-redis.copious.com"}
:card-feeds-2 {:host "rt-feeds-2-redis.copious.com"}
:stories {:host "rt-stories-redis.copious.com"}
:active-users {:host "rt-active-users-redis.copious.com"}
:shard-config {:host "rt-shard-config-redis.copious.com"}}})
(defn redis-config [] (redis env))
(def mysql-creds
{:user "utah"
:password "Utah5tr33t"})
(defn db [& {:as params}]
(merge mysql-creds {:delimiters "`"} params))
(def brooklyn-db
{:development (db :db "utah_development")
:test (db :db "utah_test")
:staging (db :db "utah_staging" :host "staging.copious.com")
:demo (db :db "utah_demo" :host "demo1.copious.com")
:production (db :db "utah_production"
:user "utah_ro"
:host "db1.copious.com")})
(def pyramid-db
{:development (db :db "pyramid_development")
:test (db :db "pyramid_test")
:staging (db :db "pyramid_staging" :host "staging.copious.com")
:demo (db :db "pyramid_demo" :host "demo1.copious.com")
:production (db :db "pyramid_production"
:user "utah_ro"
:host "db3.copious.com")})
(defn brooklyn [] (brooklyn-db env))
(defn pyramid [] (pyramid-db env))
(def action-solr-config
{:development ":8950/solr"
:test ":8951/solr"
:staging ":8983/solr"
:demo ":8983/solr"
:production "-rt1.copious.com:8983/solr"})
(defn action-solr []
(action-solr-config env))
(def max-card-feed-size 500)
(def initial-feed-size 1000)
(def default-card-shard "1")
(def ^:dynamic *digest-cache-ttl* (* 6 60 60))
(def encoding-cache-ttl (* 6 60 60 1000))
number of seconds to wait between expiring stories in feed sets
(def feed-expiration-delay 120)
(def ^:dynamic *user-feed-actor-blacklist*
{:development #{}
:test #{}
:staging #{}
:demo #{}
:production #{38319}})
(defn actor-blacklisted-from-user-feed? [id]
((*user-feed-actor-blacklist* env) id))
(def active-user-bolt-batch-size 500)
(def recent-actions-max-follows 200)
(def recent-actions-max-likes 200)
(def recent-actions-max-seller-listings 200)
(def recent-actions-max-collection-follow-listings 1000)
these next two should n't sum to more than 1k , or will complain
jetty will explode . the query sizes for and jetty seem
(def recent-actions-max-actors 125)
(def recent-actions-max-listings 250)
(def drpc-max-stories 60)
(def recent-actions-max-recent-stories 2000)
(def drpc-blacklist #{38319 11089})
When performing an initial feed build we first attempt to find
(def minimum-drpc-actions 100)
(def local-drpc-port-config
{:development 4050
:test 4051})
(defn local-drpc-port []
(local-drpc-port-config env))
(def max-spout-pending 1)
(def parallelism-config
{:production
{:add-to-feed 10
:interest-reducer 3
:seller-follows 5
:seller-blocks 5
:collection-follows 6
:blocks 4
:follows 4
:tag-likes 6
:likes 4
:stories max-spout-pending
:active-users max-spout-pending
:prepare-actions max-spout-pending
:drpc-feed-builder 3}})
(defn parallelism [component]
(get-in parallelism-config [env component] 1))
(def scorer-coefficients
{:default
{:dislike -100
:block -100
:seller-block -100}})
(defn scorer-coefficient [name]
(or
(get-in scorer-coefficients [env name])
(get-in scorer-coefficients [:default name] 1)))
(def admin-port 4055)
|
177142cb4cdc177cfb078770c1cafe7f6156dbe595f205dd02130fa0e607e154 | janestreet/redis-async | common.ml | open! Core
open Async
(* We want to handle the unusual case of needing more data via exception because
- Exceptions perform very well
- The alternative would be a variant return type that you have to allocate and then
bind at every step of parsing the protocol
*)
exception Need_more_data
let check_length_exn ~len buf = if len > Iobuf.length buf then raise Need_more_data
let write_crlf writer = Writer.write writer "\r\n"
(** Int.to_string is slow. Cache some small values. *)
let itoa =
let len = 1024 in
let itoa = Array.init len ~f:Int.to_string in
fun i -> if i >= 0 && i < len then itoa.(i) else Int.to_string i
;;
| null | https://raw.githubusercontent.com/janestreet/redis-async/d83ae757362264075be0a12e6eeaa723b012f2a9/src/common.ml | ocaml | We want to handle the unusual case of needing more data via exception because
- Exceptions perform very well
- The alternative would be a variant return type that you have to allocate and then
bind at every step of parsing the protocol
* Int.to_string is slow. Cache some small values. | open! Core
open Async
exception Need_more_data
let check_length_exn ~len buf = if len > Iobuf.length buf then raise Need_more_data
let write_crlf writer = Writer.write writer "\r\n"
let itoa =
let len = 1024 in
let itoa = Array.init len ~f:Int.to_string in
fun i -> if i >= 0 && i < len then itoa.(i) else Int.to_string i
;;
|
3d7d6e315ce9b29aed1770fc059ced9b6df55ca461ed1f263d38a62b73346596 | mbj/stratosphere | AllowActionProperty.hs | module Stratosphere.WAFv2.WebACL.AllowActionProperty (
module Exports, AllowActionProperty(..), mkAllowActionProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import {-# SOURCE #-} Stratosphere.WAFv2.WebACL.CustomRequestHandlingProperty as Exports
import Stratosphere.ResourceProperties
data AllowActionProperty
= AllowActionProperty {customRequestHandling :: (Prelude.Maybe CustomRequestHandlingProperty)}
mkAllowActionProperty :: AllowActionProperty
mkAllowActionProperty
= AllowActionProperty {customRequestHandling = Prelude.Nothing}
instance ToResourceProperties AllowActionProperty where
toResourceProperties AllowActionProperty {..}
= ResourceProperties
{awsType = "AWS::WAFv2::WebACL.AllowAction",
supportsTags = Prelude.False,
properties = Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "CustomRequestHandling"
Prelude.<$> customRequestHandling])}
instance JSON.ToJSON AllowActionProperty where
toJSON AllowActionProperty {..}
= JSON.object
(Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "CustomRequestHandling"
Prelude.<$> customRequestHandling]))
instance Property "CustomRequestHandling" AllowActionProperty where
type PropertyType "CustomRequestHandling" AllowActionProperty = CustomRequestHandlingProperty
set newValue AllowActionProperty {}
= AllowActionProperty
{customRequestHandling = Prelude.pure newValue, ..} | null | https://raw.githubusercontent.com/mbj/stratosphere/c70f301715425247efcda29af4f3fcf7ec04aa2f/services/wafv2/gen/Stratosphere/WAFv2/WebACL/AllowActionProperty.hs | haskell | # SOURCE # | module Stratosphere.WAFv2.WebACL.AllowActionProperty (
module Exports, AllowActionProperty(..), mkAllowActionProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
data AllowActionProperty
= AllowActionProperty {customRequestHandling :: (Prelude.Maybe CustomRequestHandlingProperty)}
mkAllowActionProperty :: AllowActionProperty
mkAllowActionProperty
= AllowActionProperty {customRequestHandling = Prelude.Nothing}
instance ToResourceProperties AllowActionProperty where
toResourceProperties AllowActionProperty {..}
= ResourceProperties
{awsType = "AWS::WAFv2::WebACL.AllowAction",
supportsTags = Prelude.False,
properties = Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "CustomRequestHandling"
Prelude.<$> customRequestHandling])}
instance JSON.ToJSON AllowActionProperty where
toJSON AllowActionProperty {..}
= JSON.object
(Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "CustomRequestHandling"
Prelude.<$> customRequestHandling]))
instance Property "CustomRequestHandling" AllowActionProperty where
type PropertyType "CustomRequestHandling" AllowActionProperty = CustomRequestHandlingProperty
set newValue AllowActionProperty {}
= AllowActionProperty
{customRequestHandling = Prelude.pure newValue, ..} |
b5aa3bc112f922d1d97fdf25e5e587820d7b1e11e9bbedb74c7d5e42d3883c25 | agocorona/TCache | IndexText.hs | # LANGUAGE DeriveDataTypeable , FlexibleInstances ,
UndecidableInstances , MultiParamTypeClasses #
UndecidableInstances, MultiParamTypeClasses #-}
| Implements full text indexation ( ` indexText ` ) and text search(`contains ` ) , as an addition to
the query language implemented in ` Data . TCache . IndexQuery `
it also can index the lists of elements in a field ( with ` indexList ` )
so that it is possible to ask for the registers that contains a given element
in the given field ( with ` containsElem ` )
An example of full text search and element search in a list in combination
using the ` . & & . ` operator defined in " indexQuery " .
before and after the update of the register
@
data Doc= Doc{title : : String , authors : : [ String ] , body : : String } deriving ( Read , Show , )
instance where
key Doc{title = t}= t
instance where
serialize= pack . show
deserialize= read . unpack
main= do
' indexText ' body T.pack
' indexList ' authors ( map T.pack )
let doc= Doc{title= \"title\ " , authors=[\"john\",\"Lewis\ " ] , body= \"Hi , how are you\ " }
rdoc < - atomically $ newDBRef doc
r0 < - atomically $ ` select ` title $ authors \``containsElem`\ ` \"Lewis\ "
print r0
r1 < - atomically $ ` select ` title $ body ` \"how are you\ "
print r1
r2 < - atomically $ ` select ` body $ body ` \"how are you\ " . & & . authors ` containsElem ` " john "
print r2
atomically $ writeDBRef rdoc doc { body= \"what 's up\ " }
r3 < - atomically $ ' select ' title $ body \`'contains'\ ` \"how are you\ "
print r3
if r0== r1 & & r1== [ title doc ] then print " else print \"FAIL\ "
if r3== [ ] then print " else print \"FAIL\ "
@
the query language implemented in `Data.TCache.IndexQuery`
it also can index the lists of elements in a field (with `indexList`)
so that it is possible to ask for the registers that contains a given element
in the given field (with `containsElem`)
An example of full text search and element search in a list in combination
using the `.&&.` operator defined in "indexQuery".
before and after the update of the register
@
data Doc= Doc{title :: String , authors :: [String], body :: String} deriving (Read,Show, Typeable)
instance Indexable Doc where
key Doc{title=t}= t
instance Serializable Doc where
serialize= pack . show
deserialize= read . unpack
main= do
'indexText' body T.pack
'indexList' authors (map T.pack)
let doc= Doc{title= \"title\", authors=[\"john\",\"Lewis\"], body= \"Hi, how are you\"}
rdoc <- atomically $ newDBRef doc
r0 <- atomically $ `select` title $ authors \``containsElem`\` \"Lewis\"
print r0
r1 <- atomically $ `select` title $ body \``contains`\` \"how are you\"
print r1
r2 <- atomically $ `select` body $ body \``contains`\` \"how are you\" .&&. authors `containsElem` "john"
print r2
atomically $ writeDBRef rdoc doc{ body= \"what's up\"}
r3 <- atomically $ 'select' title $ body \`'contains'\` \"how are you\"
print r3
if r0== r1 && r1== [title doc] then print \"OK\" else print \"FAIL\"
if r3== [] then print \"OK\" else print \"FAIL\"
@
-}
module Data.TCache.IndexText(
indexText
, indexList
, contains
, containsElem
, allElemsOf) where
import Data.TCache
import Data.TCache.IndexQuery
import Data.TCache.Defs
import qualified Data.Text.Lazy as T
import Data.Typeable
import qualified Data.Map as M
import Data.Maybe
import Data.Bits
import System.Mem.StableName
import Data.List((\\))
import GHC.Conc(unsafeIOToSTM)
import Data.Char
import Data.ByteString.Lazy.Char8(pack, unpack)
import Control.Monad
--import Debug.Trace
--(!>)= flip trace
data IndexText = IndexText
!String -- fieldType
Int -- lastDoc
(M.Map String Int) -- mapDocKeyInt
(M.Map Int String) -- mapIntDocKey
(M.Map T.Text Integer) -- mapTextInteger
deriving (Typeable)
instance Show IndexText where
show (IndexText t a b c d)= show (t,a,b,c,d)
instance Read IndexText where
readsPrec n str= [(IndexText t a b c d, str2)| ((t,a,b,c,d),str2) <- readsPrec n str]
instance Serializable IndexText where
serialize= pack . show
deserialize= read . unpack
setPersist= const Nothing
instance Indexable IndexText where
key (IndexText v _ _ _ _)= "indextext-" ++ v
instance IResource IndexText where
keyResource = key
writeResource =defWriteResource
readResourceByKey = defReadResourceByKey
delResource = defDelResource
{-
readInitDBRef v x= do
mv <- readDBRef x
case mv of
Nothing -> writeDBRef x v >> return v
Just v -> return v
-}
add :: DBRef IndexText -> String -> String -> [T.Text] -> STM ()
add ref t key1 w = op ref t setBit w key1
del :: DBRef IndexText -> String -> String -> [T.Text] -> STM ()
del ref t key1 w = op ref t clearBit w key1
op :: DBRef IndexText -> String -> (Integer -> Int -> Integer) -> [T.Text] -> String -> STM ()
op refIndex t set ws1 key1 = do
mindex <- readDBRef refIndex
let mindex'= process mindex ws1
writeDBRef refIndex $ fromJust mindex'
where
process mindex []= mindex
process mindex (w:ws) =
case mindex of
Nothing -> process (Just $ IndexText t 0 (M.singleton key1 0) (M.singleton 0 key1) (M.singleton w 1)) ws
Just (IndexText _ n mapSI mapIS map1) -> do
let (docLocation, n1, mapSI',mapIS')= case M.lookup key1 mapSI of
Nothing -> let n2= n+1 in (n2, n2
, M.insert key1 n2 mapSI
, M.insert n2 key1 mapIS) -- new Document
Just m -> (m,n, mapSI,mapIS) -- already indexed document
case M.lookup w map1 of
Nothing -> --new word
process (Just $ IndexText t n1 mapSI' mapIS' (M.insert w (set 0 docLocation) map1)) ws
Just integer -> -- word already indexed
process (Just $ IndexText t n1 mapSI' mapIS' $ M.insert w (set integer docLocation) map1) ws
addProto :: Typeable a => a -> IO ()
addProto sel = do
let [t1,t2]= typeRepArgs $! typeOf sel
let t = show t1 ++ show t2
let proto = IndexText t 0 M.empty M.empty M.empty
withResources [proto] $ init' proto
where
init' proto [Nothing] = [proto]
init' _ [Just _] = []
init' _ [] = error "this will never happen(?)"
init' _ (Nothing:_:_) = error "this will never happen(?)"
init' _ (Just _:_:_) = error "this will never happen(?)"
-- | start a trigger to index the contents of a register field
indexText
:: (IResource a, Typeable a, Typeable b)
=> (a -> b) -- ^ field to index
-> (b -> T.Text) -- ^ method to convert the field content to lazy Text (for example `pack` in case of String fields). This permits to index non Textual fields
-> IO ()
indexText sel convert= do
addTrigger (indext sel (words1 . convert))
addProto sel
-- | trigger the indexation of list fields with elements convertible to Text
indexList
:: (IResource a, Typeable a, Typeable b)
=> (a -> b) -- ^ field to index
-> (b -> [T.Text]) -- ^ method to convert a field element to Text (for example `pack . show` in case of elemets with Show instances)
-> IO ()
indexList sel convert= do
addTrigger (indext sel convert)
addProto sel
indext :: (IResource a, Typeable a,Typeable b)
=> (a -> b) -> (b -> [T.Text]) -> DBRef a -> Maybe a -> STM()
indext sel convert dbref mreg = f1 -- unsafeIOToSTM $! f
where
{-f = void $ forkIO (atomically f1)-}
f1 = do
moldreg <- readDBRef dbref
case (moldreg, mreg) of
(Nothing, Just reg) -> add refIndex t (keyResource reg) . convert $ sel reg
(Just oldreg, Nothing) -> del refIndex t (keyResource oldreg) . convert $ sel oldreg
(Just oldreg, Just reg) -> do
st <- unsafeIOToSTM $ makeStableName $ sel oldreg -- test if field
st' <- unsafeIOToSTM $ makeStableName $ sel reg -- has changed
if st == st'
then return ()
else do
let key1 = keyResource reg
let wrds = convert $ sel oldreg
let wrds' = convert $ sel reg
let new = wrds' \\ wrds
let old = wrds \\ wrds'
unless (null old) $ del refIndex t key1 old
unless (null new) $ add refIndex t key1 new
(Nothing, Nothing) -> error "this will never happen(?)"
where
[t1, t2] = typeRepArgs $! typeOf sel
t = show t1 ++ show t2
refIndex = getDBRef . key $ IndexText t u u u u
where
u = undefined
-- avoid duplicate code
targs :: Typeable a => a -> STM (Maybe IndexText)
targs sel = do
let [t1, t2]= typeRepArgs $! typeOf sel
let t= show t1 ++ show t2
let u= undefined
withSTMResources [IndexText t u u u u]
$ \[r] -> resources{toReturn= r}
| return the DBRefs of the registers whose field ( first parameter , usually a container ) contains the requested value .
containsElem :: (IResource a, Typeable a, Typeable b) => (a -> b) -> String -> STM [DBRef a]
containsElem sel wstr = do
let w = T.pack wstr
mr <- targs sel
case mr of
Nothing -> do
let fields = show $ typeOf sel
error $
"the index for " ++
fields ++ " do not exist. At main, use \"Data.TCache.IndexQuery.index\" to start indexing this field"
Just (IndexText _ n _ mmapIntString map1) ->
case M.lookup w map1 of
Nothing -> return []
Just integer -> do
let mns =
map
(\i ->
if testBit integer i
then Just i
else Nothing)
[0 .. n]
let wordsr = mapMaybe (`M.lookup` mmapIntString) $ catMaybes mns
return $ map getDBRef wordsr
-- | return all the values of a given field (if it has been indexed with 'index')
allElemsOf :: (IResource a, Typeable a, Typeable b) => (a -> b) -> STM [T.Text]
allElemsOf sel = do
mr <- targs sel
case mr of
Nothing -> return []
Just (IndexText _ _ _ _ map') -> return $ M.keys map'
words1 :: T.Text -> [T.Text]
( ( < ) 2 . T.length )
-- | return the DBRefs whose fields include all the words in the requested text contents.Except the
words with less than three characters that are not digits or uppercase , that are filtered out before making the query
contains
:: (IResource a, Typeable a, Typeable b)
=>( a -> b) -- ^ field to search in
-> String -- ^ text to search
-> STM [DBRef a]
contains sel str= case words str of
[] -> return []
[w] -> containsElem sel w
ws -> do
let rs = map (containsElem sel) $ filter filterWord ws
foldl1 (.&&.) rs
filterWordt :: T.Text -> Bool
filterWordt w = T.length w >2 || any (\c -> isUpper c || isDigit c) (T.unpack w)
filterWord :: Foldable t => t Char -> Bool
filterWord w = length w >2 || any (\c -> isUpper c || isDigit c) w
| null | https://raw.githubusercontent.com/agocorona/TCache/72158de657f72c3b480cea1878b5cebfbfd65d13/Data/TCache/IndexText.hs | haskell | import Debug.Trace
(!>)= flip trace
fieldType
lastDoc
mapDocKeyInt
mapIntDocKey
mapTextInteger
readInitDBRef v x= do
mv <- readDBRef x
case mv of
Nothing -> writeDBRef x v >> return v
Just v -> return v
new Document
already indexed document
new word
word already indexed
| start a trigger to index the contents of a register field
^ field to index
^ method to convert the field content to lazy Text (for example `pack` in case of String fields). This permits to index non Textual fields
| trigger the indexation of list fields with elements convertible to Text
^ field to index
^ method to convert a field element to Text (for example `pack . show` in case of elemets with Show instances)
unsafeIOToSTM $! f
f = void $ forkIO (atomically f1)
test if field
has changed
avoid duplicate code
| return all the values of a given field (if it has been indexed with 'index')
| return the DBRefs whose fields include all the words in the requested text contents.Except the
^ field to search in
^ text to search | # LANGUAGE DeriveDataTypeable , FlexibleInstances ,
UndecidableInstances , MultiParamTypeClasses #
UndecidableInstances, MultiParamTypeClasses #-}
| Implements full text indexation ( ` indexText ` ) and text search(`contains ` ) , as an addition to
the query language implemented in ` Data . TCache . IndexQuery `
it also can index the lists of elements in a field ( with ` indexList ` )
so that it is possible to ask for the registers that contains a given element
in the given field ( with ` containsElem ` )
An example of full text search and element search in a list in combination
using the ` . & & . ` operator defined in " indexQuery " .
before and after the update of the register
@
data Doc= Doc{title : : String , authors : : [ String ] , body : : String } deriving ( Read , Show , )
instance where
key Doc{title = t}= t
instance where
serialize= pack . show
deserialize= read . unpack
main= do
' indexText ' body T.pack
' indexList ' authors ( map T.pack )
let doc= Doc{title= \"title\ " , authors=[\"john\",\"Lewis\ " ] , body= \"Hi , how are you\ " }
rdoc < - atomically $ newDBRef doc
r0 < - atomically $ ` select ` title $ authors \``containsElem`\ ` \"Lewis\ "
print r0
r1 < - atomically $ ` select ` title $ body ` \"how are you\ "
print r1
r2 < - atomically $ ` select ` body $ body ` \"how are you\ " . & & . authors ` containsElem ` " john "
print r2
atomically $ writeDBRef rdoc doc { body= \"what 's up\ " }
r3 < - atomically $ ' select ' title $ body \`'contains'\ ` \"how are you\ "
print r3
if r0== r1 & & r1== [ title doc ] then print " else print \"FAIL\ "
if r3== [ ] then print " else print \"FAIL\ "
@
the query language implemented in `Data.TCache.IndexQuery`
it also can index the lists of elements in a field (with `indexList`)
so that it is possible to ask for the registers that contains a given element
in the given field (with `containsElem`)
An example of full text search and element search in a list in combination
using the `.&&.` operator defined in "indexQuery".
before and after the update of the register
@
data Doc= Doc{title :: String , authors :: [String], body :: String} deriving (Read,Show, Typeable)
instance Indexable Doc where
key Doc{title=t}= t
instance Serializable Doc where
serialize= pack . show
deserialize= read . unpack
main= do
'indexText' body T.pack
'indexList' authors (map T.pack)
let doc= Doc{title= \"title\", authors=[\"john\",\"Lewis\"], body= \"Hi, how are you\"}
rdoc <- atomically $ newDBRef doc
r0 <- atomically $ `select` title $ authors \``containsElem`\` \"Lewis\"
print r0
r1 <- atomically $ `select` title $ body \``contains`\` \"how are you\"
print r1
r2 <- atomically $ `select` body $ body \``contains`\` \"how are you\" .&&. authors `containsElem` "john"
print r2
atomically $ writeDBRef rdoc doc{ body= \"what's up\"}
r3 <- atomically $ 'select' title $ body \`'contains'\` \"how are you\"
print r3
if r0== r1 && r1== [title doc] then print \"OK\" else print \"FAIL\"
if r3== [] then print \"OK\" else print \"FAIL\"
@
-}
module Data.TCache.IndexText(
indexText
, indexList
, contains
, containsElem
, allElemsOf) where
import Data.TCache
import Data.TCache.IndexQuery
import Data.TCache.Defs
import qualified Data.Text.Lazy as T
import Data.Typeable
import qualified Data.Map as M
import Data.Maybe
import Data.Bits
import System.Mem.StableName
import Data.List((\\))
import GHC.Conc(unsafeIOToSTM)
import Data.Char
import Data.ByteString.Lazy.Char8(pack, unpack)
import Control.Monad
data IndexText = IndexText
deriving (Typeable)
instance Show IndexText where
show (IndexText t a b c d)= show (t,a,b,c,d)
instance Read IndexText where
readsPrec n str= [(IndexText t a b c d, str2)| ((t,a,b,c,d),str2) <- readsPrec n str]
instance Serializable IndexText where
serialize= pack . show
deserialize= read . unpack
setPersist= const Nothing
instance Indexable IndexText where
key (IndexText v _ _ _ _)= "indextext-" ++ v
instance IResource IndexText where
keyResource = key
writeResource =defWriteResource
readResourceByKey = defReadResourceByKey
delResource = defDelResource
add :: DBRef IndexText -> String -> String -> [T.Text] -> STM ()
add ref t key1 w = op ref t setBit w key1
del :: DBRef IndexText -> String -> String -> [T.Text] -> STM ()
del ref t key1 w = op ref t clearBit w key1
op :: DBRef IndexText -> String -> (Integer -> Int -> Integer) -> [T.Text] -> String -> STM ()
op refIndex t set ws1 key1 = do
mindex <- readDBRef refIndex
let mindex'= process mindex ws1
writeDBRef refIndex $ fromJust mindex'
where
process mindex []= mindex
process mindex (w:ws) =
case mindex of
Nothing -> process (Just $ IndexText t 0 (M.singleton key1 0) (M.singleton 0 key1) (M.singleton w 1)) ws
Just (IndexText _ n mapSI mapIS map1) -> do
let (docLocation, n1, mapSI',mapIS')= case M.lookup key1 mapSI of
Nothing -> let n2= n+1 in (n2, n2
, M.insert key1 n2 mapSI
case M.lookup w map1 of
process (Just $ IndexText t n1 mapSI' mapIS' (M.insert w (set 0 docLocation) map1)) ws
process (Just $ IndexText t n1 mapSI' mapIS' $ M.insert w (set integer docLocation) map1) ws
addProto :: Typeable a => a -> IO ()
addProto sel = do
let [t1,t2]= typeRepArgs $! typeOf sel
let t = show t1 ++ show t2
let proto = IndexText t 0 M.empty M.empty M.empty
withResources [proto] $ init' proto
where
init' proto [Nothing] = [proto]
init' _ [Just _] = []
init' _ [] = error "this will never happen(?)"
init' _ (Nothing:_:_) = error "this will never happen(?)"
init' _ (Just _:_:_) = error "this will never happen(?)"
indexText
:: (IResource a, Typeable a, Typeable b)
-> IO ()
indexText sel convert= do
addTrigger (indext sel (words1 . convert))
addProto sel
indexList
:: (IResource a, Typeable a, Typeable b)
-> IO ()
indexList sel convert= do
addTrigger (indext sel convert)
addProto sel
indext :: (IResource a, Typeable a,Typeable b)
=> (a -> b) -> (b -> [T.Text]) -> DBRef a -> Maybe a -> STM()
where
f1 = do
moldreg <- readDBRef dbref
case (moldreg, mreg) of
(Nothing, Just reg) -> add refIndex t (keyResource reg) . convert $ sel reg
(Just oldreg, Nothing) -> del refIndex t (keyResource oldreg) . convert $ sel oldreg
(Just oldreg, Just reg) -> do
if st == st'
then return ()
else do
let key1 = keyResource reg
let wrds = convert $ sel oldreg
let wrds' = convert $ sel reg
let new = wrds' \\ wrds
let old = wrds \\ wrds'
unless (null old) $ del refIndex t key1 old
unless (null new) $ add refIndex t key1 new
(Nothing, Nothing) -> error "this will never happen(?)"
where
[t1, t2] = typeRepArgs $! typeOf sel
t = show t1 ++ show t2
refIndex = getDBRef . key $ IndexText t u u u u
where
u = undefined
targs :: Typeable a => a -> STM (Maybe IndexText)
targs sel = do
let [t1, t2]= typeRepArgs $! typeOf sel
let t= show t1 ++ show t2
let u= undefined
withSTMResources [IndexText t u u u u]
$ \[r] -> resources{toReturn= r}
| return the DBRefs of the registers whose field ( first parameter , usually a container ) contains the requested value .
containsElem :: (IResource a, Typeable a, Typeable b) => (a -> b) -> String -> STM [DBRef a]
containsElem sel wstr = do
let w = T.pack wstr
mr <- targs sel
case mr of
Nothing -> do
let fields = show $ typeOf sel
error $
"the index for " ++
fields ++ " do not exist. At main, use \"Data.TCache.IndexQuery.index\" to start indexing this field"
Just (IndexText _ n _ mmapIntString map1) ->
case M.lookup w map1 of
Nothing -> return []
Just integer -> do
let mns =
map
(\i ->
if testBit integer i
then Just i
else Nothing)
[0 .. n]
let wordsr = mapMaybe (`M.lookup` mmapIntString) $ catMaybes mns
return $ map getDBRef wordsr
allElemsOf :: (IResource a, Typeable a, Typeable b) => (a -> b) -> STM [T.Text]
allElemsOf sel = do
mr <- targs sel
case mr of
Nothing -> return []
Just (IndexText _ _ _ _ map') -> return $ M.keys map'
words1 :: T.Text -> [T.Text]
( ( < ) 2 . T.length )
words with less than three characters that are not digits or uppercase , that are filtered out before making the query
contains
:: (IResource a, Typeable a, Typeable b)
-> STM [DBRef a]
contains sel str= case words str of
[] -> return []
[w] -> containsElem sel w
ws -> do
let rs = map (containsElem sel) $ filter filterWord ws
foldl1 (.&&.) rs
filterWordt :: T.Text -> Bool
filterWordt w = T.length w >2 || any (\c -> isUpper c || isDigit c) (T.unpack w)
filterWord :: Foldable t => t Char -> Bool
filterWord w = length w >2 || any (\c -> isUpper c || isDigit c) w
|
e578efa7e7e44120852347861b54fc94267a56f4467acec9fd4dadbb1caae42e | haskell-hint/hint | mk_extensions_mod.hs | import Language.Haskell.Extension
import Distribution.Text
import Text.PrettyPrint
main = writeFile "src/Hint/Extension.hs" $ render moduleDoc
moduleDoc :: Doc
moduleDoc =
vcat [
text "-- this module was automatically generated. do not edit!",
text "-- edit util/mk_extensions_mod.hs instead",
text "module Hint.Extension (",
text " Extension(..), supportedExtensions, availableExtensions, asExtension",
text ") where",
text "",
text "import qualified Hint.GHC as GHC",
text "",
text "supportedExtensions :: [String]",
text "supportedExtensions = map f GHC.xFlags",
text " where",
text " f = GHC.flagSpecName",
text "",
text "-- | List of the extensions known by the interpreter.",
text "availableExtensions :: [Extension]",
text "availableExtensions = map asExtension supportedExtensions",
text "",
text "asExtension :: String -> Extension",
text "asExtension s = if isKnown s",
text " then read s",
text " else let no_s = \"No\" ++ s",
text " in if isKnown no_s then read no_s",
text " else UnknownExtension s",
text " where isKnown e = e `elem` map show knownExtensions",
text "",
text "-- | This represents language extensions beyond Haskell 98",
text "-- that are supported by GHC (it was taken from",
text "-- Cabal's @Language.Haskell.Extension@)",
align "data Extension " $
punctuateL (text "| ") . onFirst (text "= ") $ known ++ [unknown],
nest 8 $ text "deriving (Eq, Show, Read)",
text "",
text "knownExtensions :: [Extension]",
align "knownExtensions = [" (punctuate comma known ++ [text "]"]),
text ""
]
allKnown :: [KnownExtension]
allKnown = [(minBound :: KnownExtension)..]
allPositive, allNegative :: [Extension]
allPositive = map EnableExtension allKnown
allNegative = map DisableExtension allKnown
known :: [Doc]
known = map disp (allPositive ++ allNegative)
unknown :: Doc
unknown = text "UnknownExtension String"
align :: String -> [Doc] -> Doc
align s [] = text s
align s (d:ds) = hang (text s <> d) (length s) (vcat ds)
-- punctuateL p [d1, ..., dn] = [d1, p <> d2, ..., p <> dn]
punctuateL :: Doc -> [Doc] -> [Doc]
punctuateL _ [] = []
punctuateL _ [d] = [d]
punctuateL p (d:ds) = d : map (p <>) ds
onFirst :: Doc -> [Doc] -> [Doc]
onFirst _ [] = []
onFirst p (d:ds) = p <> d : ds
| null | https://raw.githubusercontent.com/haskell-hint/hint/097e307dce86e39f5ccab044fd539b413348ece4/generate/mk_extensions_mod.hs | haskell | punctuateL p [d1, ..., dn] = [d1, p <> d2, ..., p <> dn] | import Language.Haskell.Extension
import Distribution.Text
import Text.PrettyPrint
main = writeFile "src/Hint/Extension.hs" $ render moduleDoc
moduleDoc :: Doc
moduleDoc =
vcat [
text "-- this module was automatically generated. do not edit!",
text "-- edit util/mk_extensions_mod.hs instead",
text "module Hint.Extension (",
text " Extension(..), supportedExtensions, availableExtensions, asExtension",
text ") where",
text "",
text "import qualified Hint.GHC as GHC",
text "",
text "supportedExtensions :: [String]",
text "supportedExtensions = map f GHC.xFlags",
text " where",
text " f = GHC.flagSpecName",
text "",
text "-- | List of the extensions known by the interpreter.",
text "availableExtensions :: [Extension]",
text "availableExtensions = map asExtension supportedExtensions",
text "",
text "asExtension :: String -> Extension",
text "asExtension s = if isKnown s",
text " then read s",
text " else let no_s = \"No\" ++ s",
text " in if isKnown no_s then read no_s",
text " else UnknownExtension s",
text " where isKnown e = e `elem` map show knownExtensions",
text "",
text "-- | This represents language extensions beyond Haskell 98",
text "-- that are supported by GHC (it was taken from",
text "-- Cabal's @Language.Haskell.Extension@)",
align "data Extension " $
punctuateL (text "| ") . onFirst (text "= ") $ known ++ [unknown],
nest 8 $ text "deriving (Eq, Show, Read)",
text "",
text "knownExtensions :: [Extension]",
align "knownExtensions = [" (punctuate comma known ++ [text "]"]),
text ""
]
allKnown :: [KnownExtension]
allKnown = [(minBound :: KnownExtension)..]
allPositive, allNegative :: [Extension]
allPositive = map EnableExtension allKnown
allNegative = map DisableExtension allKnown
known :: [Doc]
known = map disp (allPositive ++ allNegative)
unknown :: Doc
unknown = text "UnknownExtension String"
align :: String -> [Doc] -> Doc
align s [] = text s
align s (d:ds) = hang (text s <> d) (length s) (vcat ds)
punctuateL :: Doc -> [Doc] -> [Doc]
punctuateL _ [] = []
punctuateL _ [d] = [d]
punctuateL p (d:ds) = d : map (p <>) ds
onFirst :: Doc -> [Doc] -> [Doc]
onFirst _ [] = []
onFirst p (d:ds) = p <> d : ds
|
8dbc2443a9ce0c13de64f2c53a76815779c9b74eed9b90ab34a56d53d6213677 | christoff-buerger/racr | runtime-structure-example-slide.scm | ; This program and the accompanying materials are made available under the
terms of the MIT license ( X11 license ) which accompanies this distribution .
Author :
#!r6rs
(import (rnrs) (racr core) (composed-petrinets user-interface) (composed-petrinets analyses))
(define (run-tests)
(define net1
(petrinet:
net1 (p2) (p1)
((p1 'T1 'T1 'T1)
(p2 'T1 'T1))
(transition: trans1
((p1 (t1 (eq? t1 'T1)) (t2 (eq? t2 'T2)) (t3 (eq? t3 'T3)))
(p2 (t4 (eq? t4 'T1)) (t5 (eq? t5 'T3))))
((p2 'T1)))))
(define net2
(petrinet:
net2 (p1) (p1)
((p1 'T2))
(transition: trans1
((p1 (t1 (eq? t1 'T2)) (t2 (eq? t2 'T1))))
((p1 'T2)))
(transition: trans2
((p1 (t1 (eq? t1 'T3)) (t2 (eq? t2 'T3))))
((p1 'T2)))))
(define net3
(petrinet:
net3 (p2) (p1)
((p1 'T3 'T3)
(p2 'T3))
(transition: trans1
((p1 (t (eq? t 'T1))))
((p1 'T3)
(p2 'T3)))))
(define net1-net2-net3
(compose-petrinets:
net1
(compose-petrinets:
net2
net3
((net2 p1) (net3 p2)))
((net1 p1) (net2 p1))
((net3 p1) (net1 p2))))
(assert
(equal? (=fused-places (=p-lookup net1 'p1))
(=fused-places (=p-lookup net2 'p1))))
(assert
(equal? (=fused-places (=p-lookup net1 'p1))
(=fused-places (=p-lookup net3 'p2))))
(assert
(equal? (=fused-places (=p-lookup net1 'p2))
(=fused-places (=p-lookup net3 'p1))))
(assert (= (length (=fused-places (=p-lookup net1 'p1))) 3))
(assert
(and
(member (=p-lookup net1 'p1)
(=fused-places (=p-lookup net1 'p1)))
(member (=p-lookup net2 'p1)
(=fused-places (=p-lookup net1 'p1)))
(member (=p-lookup net3 'p2)
(=fused-places (=p-lookup net1 'p1)))))
(assert (= (length (=fused-places (=p-lookup net1 'p2))) 2))
(assert
(and
(member (=p-lookup net1 'p2)
(=fused-places (=p-lookup net1 'p2)))
(member (=p-lookup net3 'p1)
(=fused-places (=p-lookup net1 'p2)))))
(assert-enabled net1-net2-net3
'(net1 trans1)
'(net2 trans1)
'(net3 trans1))
(fire-transition! (=t-lookup net2 'trans1))
(assert-enabled net1-net2-net3
'(net1 trans1)
'(net2 trans1)
'(net3 trans1))
(fire-transition! (=t-lookup net1 'trans1))
(assert-enabled net1-net2-net3
'(net1)
'(net2)
'(net3 trans1))
(fire-transition! (=t-lookup net3 'trans1))
(assert-enabled net1-net2-net3
'(net1)
'(net2)
'(net3 trans1))
(fire-transition! (=t-lookup net3 'trans1))
(assert-enabled net1-net2-net3
'(net1)
'(net2 trans2)
'(net3))
(fire-transition! (=t-lookup net2 'trans2))
(assert-enabled net1-net2-net3
'(net1)
'(net2 trans1)
'(net3))
(fire-transition! (=t-lookup net2 'trans1))
(assert-enabled net1-net2-net3
'(net1)
'(net2)
'(net3)))
(initialise-petrinet-language #t)
(run-tests) | null | https://raw.githubusercontent.com/christoff-buerger/racr/ecb2a9b8bf3f333550728cf45b97607a8298532f/examples/composed-petrinets/examples/runtime-structure-example-slide.scm | scheme | This program and the accompanying materials are made available under the | terms of the MIT license ( X11 license ) which accompanies this distribution .
Author :
#!r6rs
(import (rnrs) (racr core) (composed-petrinets user-interface) (composed-petrinets analyses))
(define (run-tests)
(define net1
(petrinet:
net1 (p2) (p1)
((p1 'T1 'T1 'T1)
(p2 'T1 'T1))
(transition: trans1
((p1 (t1 (eq? t1 'T1)) (t2 (eq? t2 'T2)) (t3 (eq? t3 'T3)))
(p2 (t4 (eq? t4 'T1)) (t5 (eq? t5 'T3))))
((p2 'T1)))))
(define net2
(petrinet:
net2 (p1) (p1)
((p1 'T2))
(transition: trans1
((p1 (t1 (eq? t1 'T2)) (t2 (eq? t2 'T1))))
((p1 'T2)))
(transition: trans2
((p1 (t1 (eq? t1 'T3)) (t2 (eq? t2 'T3))))
((p1 'T2)))))
(define net3
(petrinet:
net3 (p2) (p1)
((p1 'T3 'T3)
(p2 'T3))
(transition: trans1
((p1 (t (eq? t 'T1))))
((p1 'T3)
(p2 'T3)))))
(define net1-net2-net3
(compose-petrinets:
net1
(compose-petrinets:
net2
net3
((net2 p1) (net3 p2)))
((net1 p1) (net2 p1))
((net3 p1) (net1 p2))))
(assert
(equal? (=fused-places (=p-lookup net1 'p1))
(=fused-places (=p-lookup net2 'p1))))
(assert
(equal? (=fused-places (=p-lookup net1 'p1))
(=fused-places (=p-lookup net3 'p2))))
(assert
(equal? (=fused-places (=p-lookup net1 'p2))
(=fused-places (=p-lookup net3 'p1))))
(assert (= (length (=fused-places (=p-lookup net1 'p1))) 3))
(assert
(and
(member (=p-lookup net1 'p1)
(=fused-places (=p-lookup net1 'p1)))
(member (=p-lookup net2 'p1)
(=fused-places (=p-lookup net1 'p1)))
(member (=p-lookup net3 'p2)
(=fused-places (=p-lookup net1 'p1)))))
(assert (= (length (=fused-places (=p-lookup net1 'p2))) 2))
(assert
(and
(member (=p-lookup net1 'p2)
(=fused-places (=p-lookup net1 'p2)))
(member (=p-lookup net3 'p1)
(=fused-places (=p-lookup net1 'p2)))))
(assert-enabled net1-net2-net3
'(net1 trans1)
'(net2 trans1)
'(net3 trans1))
(fire-transition! (=t-lookup net2 'trans1))
(assert-enabled net1-net2-net3
'(net1 trans1)
'(net2 trans1)
'(net3 trans1))
(fire-transition! (=t-lookup net1 'trans1))
(assert-enabled net1-net2-net3
'(net1)
'(net2)
'(net3 trans1))
(fire-transition! (=t-lookup net3 'trans1))
(assert-enabled net1-net2-net3
'(net1)
'(net2)
'(net3 trans1))
(fire-transition! (=t-lookup net3 'trans1))
(assert-enabled net1-net2-net3
'(net1)
'(net2 trans2)
'(net3))
(fire-transition! (=t-lookup net2 'trans2))
(assert-enabled net1-net2-net3
'(net1)
'(net2 trans1)
'(net3))
(fire-transition! (=t-lookup net2 'trans1))
(assert-enabled net1-net2-net3
'(net1)
'(net2)
'(net3)))
(initialise-petrinet-language #t)
(run-tests) |
72f1737aaec2a1853468768e70cd12725ffd7fbe1d3f925483d098f8e7e5d6a0 | smuenzel/opile | path.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Access paths *)
type t =
Pident of Ident.t
| Pdot of t * string
| Papply of t * t
val same: t -> t -> bool
val compare: t -> t -> int
val find_free_opt: Ident.t list -> t -> Ident.t option
val exists_free: Ident.t list -> t -> bool
val scope: t -> int
val flatten : t -> [ `Contains_apply | `Ok of Ident.t * string list ]
val name: ?paren:(string -> bool) -> t -> string
(* [paren] tells whether a path suffix needs parentheses *)
val head: t -> Ident.t
val print: Format.formatter -> t -> unit
val heads: t -> Ident.t list
val last: t -> string
type typath =
| Regular of t
| Ext of t * string
| LocalExt of Ident.t
| Cstr of t * string
val constructor_typath: t -> typath
val is_constructor_typath: t -> bool
module Map : Map.S with type key = t
module Set : Set.S with type elt = t
| null | https://raw.githubusercontent.com/smuenzel/opile/190ca86df6440550d0fddf0222e16ba2e52bca64/raw-compiler/path.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Access paths
[paren] tells whether a path suffix needs parentheses | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
type t =
Pident of Ident.t
| Pdot of t * string
| Papply of t * t
val same: t -> t -> bool
val compare: t -> t -> int
val find_free_opt: Ident.t list -> t -> Ident.t option
val exists_free: Ident.t list -> t -> bool
val scope: t -> int
val flatten : t -> [ `Contains_apply | `Ok of Ident.t * string list ]
val name: ?paren:(string -> bool) -> t -> string
val head: t -> Ident.t
val print: Format.formatter -> t -> unit
val heads: t -> Ident.t list
val last: t -> string
type typath =
| Regular of t
| Ext of t * string
| LocalExt of Ident.t
| Cstr of t * string
val constructor_typath: t -> typath
val is_constructor_typath: t -> bool
module Map : Map.S with type key = t
module Set : Set.S with type elt = t
|
1e6a5d3fdcf19b9b35e4bfa99af525dec523906b8c435b099c99f0785b36bbad | tfausak/exercism-solutions | Robot.hs | module Robot
( Bearing(..)
, Robot
, bearing
, coordinates
, mkRobot
, simulate
, turnLeft
, turnRight
) where
import Data.List (foldl')
type Point = (Int, Int)
data Bearing = North
| East
| South
| West
deriving (Bounded, Enum, Eq, Show)
data Robot = Robot
{ bearing :: Bearing
, coordinates :: Point
} deriving (Eq, Show)
mkRobot :: Bearing -> Point -> Robot
mkRobot = Robot
simulate :: Robot -> String -> Robot
simulate = foldl' simulate' where
simulate' r i = case i of
'A' -> advance r
'L' -> left r
'R' -> right r
advance r = mkRobot b c where
b = bearing r
(x, y) = coordinates r
c = case b of
North -> (x, y + 1)
East -> (x + 1, y)
South -> (x, y - 1)
West -> (x - 1, y)
left r = mkRobot (turnLeft (bearing r)) (coordinates r)
right r = mkRobot (turnRight (bearing r)) (coordinates r)
turn :: Int -> Bearing -> Bearing
turn x b = toEnum $ a `mod` n where
a = x + fromEnum b
n = 1 + fromEnum (maxBound :: Bearing)
turnLeft :: Bearing -> Bearing
turnLeft = turn (-1)
turnRight :: Bearing -> Bearing
turnRight = turn 1
| null | https://raw.githubusercontent.com/tfausak/exercism-solutions/5e6f93979cc61ef5e3b48a09ca316dfd7fcd319c/haskell/robot-simulator/Robot.hs | haskell | module Robot
( Bearing(..)
, Robot
, bearing
, coordinates
, mkRobot
, simulate
, turnLeft
, turnRight
) where
import Data.List (foldl')
type Point = (Int, Int)
data Bearing = North
| East
| South
| West
deriving (Bounded, Enum, Eq, Show)
data Robot = Robot
{ bearing :: Bearing
, coordinates :: Point
} deriving (Eq, Show)
mkRobot :: Bearing -> Point -> Robot
mkRobot = Robot
simulate :: Robot -> String -> Robot
simulate = foldl' simulate' where
simulate' r i = case i of
'A' -> advance r
'L' -> left r
'R' -> right r
advance r = mkRobot b c where
b = bearing r
(x, y) = coordinates r
c = case b of
North -> (x, y + 1)
East -> (x + 1, y)
South -> (x, y - 1)
West -> (x - 1, y)
left r = mkRobot (turnLeft (bearing r)) (coordinates r)
right r = mkRobot (turnRight (bearing r)) (coordinates r)
turn :: Int -> Bearing -> Bearing
turn x b = toEnum $ a `mod` n where
a = x + fromEnum b
n = 1 + fromEnum (maxBound :: Bearing)
turnLeft :: Bearing -> Bearing
turnLeft = turn (-1)
turnRight :: Bearing -> Bearing
turnRight = turn 1
|
|
38ab4e791b724a58f0d8b0db03a543cecf86b342386c19e129b2b81a0dd86b98 | anchpop/wise_mans_haskell | sillyMaybeDoTest.hs | result = do
x <- Just 0
x <- Just (x + 1)
x <- Just (x * 3)
x <- Just (show x)
Just ("And the answer is " ++ x) | null | https://raw.githubusercontent.com/anchpop/wise_mans_haskell/021ca3f3d96ebc0ecf2daf1a802fc33d067e24cc/haskelltests/should_compile/sillyMaybeDoTest.hs | haskell | result = do
x <- Just 0
x <- Just (x + 1)
x <- Just (x * 3)
x <- Just (show x)
Just ("And the answer is " ++ x) |
|
26fa077e0ec6ec2cec413bbd23761c165854d4c8a46db1027d8da485a4bb9a37 | awakesecurity/spectacle | Internal.hs | # LANGUAGE TypeFamilies #
-- |
-- Module : Language.Spectacle.Syntax.Error.Internal
Copyright : ( c ) Arista Networks , 2022 - 2023
License : Apache License 2.0 , see LICENSE
--
-- Stability : stable
Portability : non - portable ( GHC extensions )
--
-- TODO: docs
--
-- @since 1.0.0
module Language.Spectacle.Syntax.Error.Internal
( Error (ThrowE),
Effect (CatchE),
)
where
import Language.Spectacle.Lang (Effect, EffectK, ScopeK)
-- -------------------------------------------------------------------------------------------------
newtype Error e :: EffectK where
ThrowE :: e -> Error e a
data instance Effect (Error e) :: ScopeK where
CatchE :: m a -> (e -> m a) -> Effect (Error e) m a
| null | https://raw.githubusercontent.com/awakesecurity/spectacle/430680c28b26dabb50f466948180eb59ba72fc8e/src/Language/Spectacle/Syntax/Error/Internal.hs | haskell | |
Module : Language.Spectacle.Syntax.Error.Internal
Stability : stable
TODO: docs
@since 1.0.0
------------------------------------------------------------------------------------------------- | # LANGUAGE TypeFamilies #
Copyright : ( c ) Arista Networks , 2022 - 2023
License : Apache License 2.0 , see LICENSE
Portability : non - portable ( GHC extensions )
module Language.Spectacle.Syntax.Error.Internal
( Error (ThrowE),
Effect (CatchE),
)
where
import Language.Spectacle.Lang (Effect, EffectK, ScopeK)
newtype Error e :: EffectK where
ThrowE :: e -> Error e a
data instance Effect (Error e) :: ScopeK where
CatchE :: m a -> (e -> m a) -> Effect (Error e) m a
|
5ba4c2df1b484b02be2adc0940f1be9209791c350664434df4be87c97a6dca4e | clojure/core.typed | base_env_common.clj | Copyright ( c ) , contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns clojure.core.typed.checker.base-env-common
"Utilities for all implementations of the type checker"
(:require [clojure.core.typed.checker.jvm.parse-unparse :as prs]
[clojure.tools.reader :as rdr]
[clojure.tools.reader.reader-types :as rdrs]))
(defmacro delay-and-cache-env [sym & body]
(let [generator-sym (symbol (str "generator-" sym))
cache-sym (symbol (str "cache-" sym))
thread-bindings (symbol (str "thread-bindings-" sym))
interface-sym sym]
`(do
(def ~thread-bindings (get-thread-bindings))
(defn ~(with-meta generator-sym {:private true}) []
; switch namespace to where this def is defined
; Also helps parse CLJS syntax.
(let [r# (with-bindings ~thread-bindings
~@body)]
;(prn "r" r#)
r#))
; cache is original nil, then is updated only once
(def ~(with-meta cache-sym {:private true})
(atom nil))
(defn ~interface-sym []
(if-let [hit# (deref ~cache-sym)]
hit#
(let [calc# (~generator-sym)]
(reset! ~cache-sym calc#)))))))
(defn parse-cljs-ann-map
[ann-map]
(into {}
(map (fn [[sym ann]]
[(symbol "cljs.core" (name sym))
(prs/parse-type ann)])
ann-map)))
(defn parse-clj-ann-map
[ann-map]
(let [conveyed-parse (bound-fn* prs/parse-type)]
(into {}
(map (fn [[sym ann]]
[sym (delay (conveyed-parse ann))])
ann-map))))
;;these annotations can be parsed in either {cljs,clojure}.core.typed
;;and have the same meaning.
;;ordered the same as in cljs.core
(def common-ann*
"
clojure.core/*1 Any
clojure.core/*2 Any
clojure.core/*3 Any
clojure.core/identical? [Any Any -> Boolean]
clojure.core/number? (Pred Number)
clojure.core/not [Any -> Boolean]
clojure.core/string? (Pred String)
clojure.core/type [Any -> Any]
clojure.core/aclone (All [x] [(ReadOnlyArray x) -> (Array x)])
clojure.core/aget
(All [x]
(IFn [(ReadOnlyArray x)
AnyInteger -> x]
[(ReadOnlyArray (ReadOnlyArray x))
AnyInteger AnyInteger -> x]
[(ReadOnlyArray (ReadOnlyArray (ReadOnlyArray x)))
AnyInteger AnyInteger AnyInteger -> x]
[(ReadOnlyArray (ReadOnlyArray (ReadOnlyArray (ReadOnlyArray x))))
AnyInteger AnyInteger AnyInteger AnyInteger -> x]
; don't support unsound cases
[(ReadOnlyArray (ReadOnlyArray (ReadOnlyArray (ReadOnlyArray (ReadOnlyArray x)))))
AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger -> x]))
clojure.core/aset
(All [x]
(IFn
[(Array x) AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]))
clojure.core/alength [(ReadOnlyArray Any) -> AnyInteger]
;clojure.core/instance? [Class Any -> Booelan]
clojure.core/symbol? (Pred Symbol)
clojure.core/symbol (IFn [(U Symbol String) -> Symbol]
[(U nil String) String -> Symbol])
clojure.core/seq (All [x]
(IFn
[(NonEmptyColl x) -> (NonEmptyASeq x)]
[(Option (Coll x)) -> (Option (NonEmptyASeq x))
:filters {:then (& (is NonEmptyCount 0)
(! nil 0))
:else (| (is nil 0)
(is EmptyCount 0))}]
[(Option (Seqable x)) -> (Option (NonEmptyASeq x))]))
clojure.core/first (All [x]
(IFn [(HSequential [x Any *]) -> x
:object {:id 0 :path [(Nth 0)]}]
[(Option (EmptySeqable x)) -> nil]
[(NonEmptySeqable x) -> x]
[(Option (Seqable x)) -> (Option x)]))
clojure.core/rest (All [x]
[(Option (Seqable x)) -> (ASeq x)])
clojure.core/next (All [x]
(IFn [(Option (Coll x)) -> (Option (NonEmptyASeq x))
:filters {:then (& (is (CountRange 2) 0)
(! nil 0))
:else (| (is (CountRange 0 1) 0)
(is nil 0))}]
[(Option (Seqable x)) -> (Option (NonEmptyASeq x))]))
clojure.core/= [Any Any * -> Boolean]
;clojure.core/reduced (All [x] [x -> (Reduced x)])
;clojure.core/reduced? (Pred (Reduced Any))
clojure.core/second
(All [x]
(IFn [(HSequential [Any x Any *]) -> x
:object {:id 0 :path [(Nth 1)]}]
[(Option (I (Seqable x) (CountRange 0 1))) -> nil]
[(I (Seqable x) (CountRange 2)) -> x]
[(Option (Seqable x)) -> (Option x)]))
clojure.core/ffirst (All [x]
[(Option (Seqable (U nil (Seqable x)))) -> (Option x)])
;clojure.core/nfirst
#_(All [x]
[(Option (Seqable (Option (Seqable x))))
-> (Option (NonEmptyASeq x))])
;clojure.core/fnext
#_(All [x]
[(Option (Seqable (Option (Seqable x)))) -> (Option x)])
clojure.core/nnext (All [x]
[(Option (Seqable x)) -> (Option (NonEmptyASeq x))])
clojure.core/last (All [x]
(IFn [(NonEmptySeqable x) -> x]
[(Option (Seqable x)) -> (U nil x)]))
clojure.core/conj (All [x y]
(IFn [(IPersistentVector x) x x * -> (IPersistentVector x)]
[(APersistentMap x y)
(U nil (Seqable (IMapEntry x y))
(IMapEntry x y) '[x y] (Map x y))
(U nil (Seqable (IMapEntry x y))
(IMapEntry x y) '[x y] (Map x y)) *
-> (APersistentMap x y)]
[(IPersistentMap x y)
(U nil (Seqable (IMapEntry x y))
(IMapEntry x y) '[x y] (Map x y))
(U nil (Seqable (IMapEntry x y))
(IMapEntry x y) '[x y] (Map x y)) * -> (IPersistentMap x y)]
[(IPersistentSet x) x x * -> (IPersistentSet x)]
[(ASeq x) x x * -> (ASeq x)]
[ nil x x * - > ( clojure.lang . PersistentList x ) ]
[(Coll Any) Any Any * -> (Coll Any)]))
clojure.core/get (All [x y]
(IFn
;;no default
[(U nil (Set x) (ILookup Any x)) Any -> (Option x)]
;[(Option String) Any -> (Option Character)]
;;default
[(U nil (Set x) (ILookup Any x)) Any y -> (U y x)]
[(Option (Map x y)) x -> (Option y)]
[(Option (Map x y)) x y -> y]
[(Option (Map Any Any)) Any -> (Option Any)]
[(Option (Map Any Any)) Any y -> (U y Any)]
;[(Option String) Any y -> (U y Character)]
))
clojure.core/assoc (All [b c d]
(IFn [(Map b c) b c -> (Map b c)]
[(Vec d) AnyInteger d -> (Vec d)]))
clojure.core/dissoc (All [k v]
(IFn [(Map k v) Any * -> (Map k v)]))
clojure.core/with-meta
#_(All [[x :< clojure.lang.IObj]]
[x (U nil (Map Any Any)) -> x])
clojure.core/meta [Any -> (U nil (Map Any Any))]
clojure.core/peek
(All [x]
(IFn [(I NonEmptyCount (Stack x)) -> x]
[(Stack x) -> x]))
( All [ x ]
;; (IFn
;; [(List x) -> (List x)]
[ ( ) - > ( ) ]
;; [(Stack x) -> (Stack x)]))
clojure.core/disj
(All [x]
(IFn #_[(SortedSet x) Any Any * -> (SortedSet x)]
[(Set x) Any Any * -> (Set x)]))
clojure.core/hash [Any -> AnyInteger]
clojure.core/empty? (IFn [(Option (HSequential [Any *])) -> Boolean
:filters {:then (| (is EmptyCount 0)
(is nil 0))
:else (is NonEmptyCount 0)}]
[(Option (Coll Any)) -> Boolean
:filters {:then (| (is EmptyCount 0)
(is nil 0))
:else (is NonEmptyCount 0)}]
[(Option (Seqable Any)) -> Boolean])
clojure.core/coll? (Pred (Coll Any))
;clojure.core/set? (Pred (Set Any))
;clojure.core/associative? (Pred (clojure.lang.Associative Any Any Any))
;clojure.core/sequential? (Pred clojure.lang.Sequential)
;clojure.core/sorted? (Pred Sorted)
clojure.core/map? (Pred (Map Any Any))
clojure.core/vector? (Pred (Vec Any))
clojure.core/chunked-seq? [Any -> Any]
clojure.core/false? (Pred false)
clojure.core/true? (Pred true)
clojure.core/seq? (Pred (Seq Any))
clojure.core/boolean [Any -> Boolean]
clojure.core/integer? (Pred AnyInteger)
clojure.core/contains? [(Option (Seqable Any)) Any -> Boolean]
clojure.core/find (All [x y]
[(U nil (Associative Any x y)) Any -> (U nil (HVec [x y]))])
clojure.core/distinct? [Any Any * -> Boolean]
clojure.core/compare [Any Any -> Number]
clojure.core/sort (All [x]
(IFn [(U nil (Seqable x)) -> (U nil (ASeq x))]
[[x x -> AnyInteger] (U nil (Seqable x)) -> (U nil (ASeq x))]))
clojure.core/shuffle (All [x]
(IFn [(I (Collection x) (Seqable x)) -> (Vec x)]
[(Collection x) -> (Vec x)]))
clojure.core/reduce
(All [a c]
(IFn
;;Without accumulator
;; default
;; (reduce + my-coll)
[[a c -> (U (Reduced a) a)] (NonEmptySeqable c) -> a]
[(IFn [a c -> (U (Reduced a) a)] [-> (U (Reduced a) a)]) (Option (Seqable c)) -> a]
;; default
;; (reduce + 3 my-coll)
[[a c -> (U (Reduced a) a)] a (Option (Seqable c)) -> a]))
clojure.core/reduce-kv
(All [a c k v]
[[a k v -> (U (Reduced a) a)] a (Option (Associative Any k v)) -> a])
clojure.core/< [Number Number * -> Boolean]
clojure.core/<= [Number Number * -> Boolean]
clojure.core/> [Number Number * -> Boolean]
clojure.core/>= [Number Number * -> Boolean]
clojure.core/== [Number Number * -> Boolean]
clojure.core/max [Number Number * -> Number]
clojure.core/min [Number Number * -> Number]
clojure.core/int (IFn [Number -> Integer]
;[Character -> Integer]
)
clojure.core/booleans [Any -> (Array boolean)]
clojure.core/ints [Any -> (Array int)]
clojure.core/mod (IFn [AnyInteger AnyInteger -> AnyInteger]
[Number Number -> Number])
clojure.core/rand (IFn [-> Number]
[Number -> Number])
clojure.core/rand-int [Int -> Int]
clojure.core/bit-xor [AnyInteger AnyInteger AnyInteger * -> AnyInteger]
clojure.core/bit-or [AnyInteger AnyInteger AnyInteger * -> AnyInteger]
clojure.core/bit-and-not [AnyInteger AnyInteger AnyInteger * -> AnyInteger]
clojure.core/bit-clear [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-flip [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-not [AnyInteger -> AnyInteger]
clojure.core/bit-set [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-test [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-shift-left [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-shift-right [AnyInteger AnyInteger -> AnyInteger]
clojure.core/pos? (IFn [Number -> Boolean])
clojure.core/neg? (IFn [Number -> Boolean])
clojure.core/nthnext
(All [x]
(IFn [nil AnyInteger -> nil]
[(Option (Seqable x)) AnyInteger -> (Option (NonEmptyASeq x))]))
clojure.core/str [Any * -> String]
clojure.core/subs (IFn [String AnyInteger -> String]
[String AnyInteger AnyInteger -> String])
clojure.core/hash-combine [AnyInteger Any -> AnyInteger]
;clojure.core/rseq
#_(All [x]
[(Reversible x) -> (Option (NonEmptyASeq x))])
clojure.core/reverse (All [x]
[(Option (Seqable x)) -> (ASeq x)])
clojure.core/list (All [x] [x * -> (PersistentList x)])
clojure.core/cons (All [x]
[x (Option (Seqable x)) -> (ASeq x)])
clojure.core/list? (Pred (List Any))
clojure.core/keyword? (Pred Keyword)
clojure.core/namespace [(U Symbol String Keyword) -> (Option String)]
clojure.core/keyword (IFn [(U Keyword Symbol String) -> Keyword]
[String String -> Keyword])
#_clojure.core/chunk-cons
#_(All [x]
[(clojure.lang.IChunk x) (Option (Seqable x)) -> (Option (Seqable x))])
#_clojure.core/chunk-append
#_(All [x]
[(clojure.lang.ChunkBuffer x) x -> Any])
#_clojure.core/chunk
#_(All [x]
[(clojure.lang.ChunkBuffer x) -> (clojure.lang.IChunk x)])
#_clojure.core/chunk-first #_(All [x]
;;should be IChunkedSeq -> IChunk
[(Seqable x) -> (clojure.lang.IChunk x)])
#_clojure.core/chunk-rest
#_(All [x]
;;should be IChunkRest -> Seq
[(clojure.lang.Seqable x) -> (ASeq x)])
clojure.core/int-array
(IFn [(U nil Number (Seqable Number)) -> (Array int)]
[Number (U nil Number (Seqable Number)) -> (Array int)])
clojure.core/concat (All [x] [(Option (Seqable x)) * -> (ASeq x)])
clojure.core/list*
(All [x]
(IFn [(U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]))
clojure.core/apply
(All [y a b c d r z ...]
(IFn [[z ... z -> y] (HSequential [z ... z]) -> y]
[[a z ... z -> y] a (HSequential [z ... z]) -> y]
[[a b z ... z -> y] a b (HSequential [z ... z]) -> y]
[[a b c z ... z -> y] a b c (HSequential [z ... z]) -> y]
[[a b c d z ... z -> y] a b c d (HSequential [z ... z]) -> y]
[[r * -> y] (U nil (Seqable r)) -> y]
[[a r * -> y] a (U nil (Seqable r)) -> y]
[[a b r * -> y] a b (U nil (Seqable r)) -> y]
[[a b c r * -> y] a b c (U nil (Seqable r)) -> y]
[[a b c d r * -> y] a b c d (U nil (Seqable r)) -> y]))
;clojure.core/vary-meta
#_(All [[x :< clojure.lang.IObj] b ...]
[x [(U nil (Map Any Any)) b ... b -> (U nil (Map Any Any))] b ... b -> x])
clojure.core/not= [Any Any * -> Boolean]
clojure.core/every?
(All [x y]
(IFn [[x -> Any :filters {:then (is y 0)}] (Coll x) -> Boolean
:filters {:then (is (Coll y) 1)}]
; argument could be nil
[[x -> Any :filters {:then (is y 0)}] (U nil (Coll x)) -> Boolean
:filters {:then (is (U nil (Coll y)) 1)}]
[[x -> Any] (U nil (Seqable x)) -> Boolean]))
clojure.core/some (All [x y] [[x -> y] (Option (Seqable x)) -> (Option y)])
clojure.core/even? [AnyInteger -> Boolean]
clojure.core/odd? [AnyInteger -> Boolean]
clojure.core/identity (All [x] [x -> x
:filters {:then (! (U nil false) 0)
:else (is (U nil false) 0)}
:object {:id 0}])
clojure.core/complement (All [x] [[x -> Any] -> [x -> Boolean]])
clojure.core/constantly (All [x] [x -> [Any * -> x]])
clojure.core/comp (All [x y b ...]
[[x -> y] [b ... b -> x] -> [b ... b -> y]])
clojure.core/partial
(All [y a b c d z ...]
(IFn [[z ... z -> y] -> [z ... z -> y]]
[[a z ... z -> y] a -> [z ... z -> y]]
[[a b z ... z -> y] a b -> [z ... z -> y]]
[[a b c z ... z -> y] a b c -> [z ... z -> y]]
[[a b c d z ... z -> y] a b c d -> [z ... z -> y]]
[[a * -> y] a * -> [a * -> y]]))
clojure.core/fnil
(All [x y z a b ...]
(IFn [[x b ... b -> a] x -> [(U nil x) b ... b -> a]]
[[x y b ... b -> a] x y -> [(U nil x) (U nil y) b ... b -> a]]
[[x y z b ... b -> a] x y z -> [(U nil x) (U nil y) (U nil z) b ... b -> a]]))
clojure.core/map-indexed
(All [x y] [[AnyInteger x -> y] (Option (Seqable x)) -> (Seqable y)])
clojure.core/every-pred
(All [t0 t1 t2 t3 t4 t5]
(IFn [[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
-> (IFn [Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any * -> Any])]
[[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Boolean :filters {:then (is t1 0) :else (! t1 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1) 0) :else (! (I t0 t1) 0)}]
[Any * -> Any])]
[[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Boolean :filters {:then (is t1 0) :else (! t1 0)}]
[Any -> Boolean :filters {:then (is t2 0) :else (! t2 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1 t2) 0) :else (! (I t0 t1 t2) 0)}]
[Any * -> Any])]
[[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Boolean :filters {:then (is t1 0) :else (! t1 0)}]
[Any -> Boolean :filters {:then (is t2 0) :else (! t2 0)}]
[Any -> Boolean :filters {:then (is t3 0) :else (! t3 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1 t2 t3) 0) :else (! (I t0 t1 t2 t3) 0)}]
[Any * -> Any])]
[[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Boolean :filters {:then (is t1 0) :else (! t1 0)}]
[Any -> Boolean :filters {:then (is t2 0) :else (! t2 0)}]
[Any -> Boolean :filters {:then (is t3 0) :else (! t3 0)}]
[Any -> Boolean :filters {:then (is t4 0) :else (! t4 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1 t2 t3 t4) 0) :else (! (I t0 t1 t2 t3 t4) 0)}]
[Any * -> Any])]
[[Any -> Any :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Any :filters {:then (is t1 0) :else (! t1 0)}]
[Any -> Any :filters {:then (is t2 0) :else (! t2 0)}]
[Any -> Any :filters {:then (is t3 0) :else (! t3 0)}]
[Any -> Any :filters {:then (is t4 0) :else (! t4 0)}]
[Any -> Any :filters {:then (is t5 0) :else (! t5 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1 t2 t3 t4 t5) 0) :else (! (I t0 t1 t2 t3 t4 t5) 0)}]
[Any * -> Any])]
[[Any -> Any] [Any -> Any] * -> [Any * -> Any]]))
clojure.core/map
(All [c a b ...]
(IFn [[a b ... b -> c] (NonEmptySeqable a) (NonEmptySeqable b) ... b -> (NonEmptyASeq c)]
[[a b ... b -> c] (U nil (Seqable a)) (U nil (Seqable b)) ... b -> (ASeq c)]))
clojure.core/mapcat
(All [c b ...]
[[b ... b -> (Option (Seqable c))] (Option (Seqable b)) ... b -> (ASeq c)])
clojure.core/pmap
(All [c a b ...]
(IFn [[a b ... b -> c] (NonEmptySeqable a) (NonEmptySeqable b) ... b -> (NonEmptyASeq c)]
[[a b ... b -> c] (U nil (Seqable a)) (U nil (Seqable b)) ... b -> (ASeq c)]))
clojure.core/take (All [x]
[AnyInteger (Seqable x) -> (ASeq x)])
clojure.core/drop (All [x]
[AnyInteger (Seqable x) -> (ASeq x)])
clojure.core/drop-last (All [x]
[AnyInteger (Seqable x) -> (NilableNonEmptyASeq x)])
clojure.core/take-last (All [x]
[AnyInteger (Seqable x) -> (NilableNonEmptyASeq x)])
clojure.core/drop-while (All [x]
[[x -> Any] (Option (Seqable x)) -> (ASeq x)])
clojure.core/cycle (All [x]
[(U nil (Seqable x)) -> (ASeq x)])
clojure.core/split-at
(All [x y z]
[AnyInteger (Option (Seqable x)) -> '[(ASeq x) (ASeq x)]])
clojure.core/repeat (All [x]
(IFn [x -> (ASeq x)]
[AnyInteger x -> (ASeq x)]))
clojure.core/repeatedly (All [x]
(IFn [[-> x] -> (ASeq x)]
[AnyInteger [-> x] -> (ASeq x)]))
clojure.core/iterate (All [x]
[[x -> x] x -> (ASeq x)])
clojure.core/interleave (All [x] [(Option (Seqable x)) (Option (Seqable x)) (Option (Seqable x)) * -> (ASeq x)])
clojure.core/interpose (All [x] (IFn [x (Option (Seqable x)) -> (ASeq x)]))
clojure.core/filter
(All [x y]
(IFn
[[x -> Any :filters {:then (is y 0)}] (Option (Seqable x)) -> (ASeq y)]
[[x -> Any :filters {:then (! y 0)}] (Option (Seqable x)) -> (ASeq (I x (Not y)))]
[[x -> Any] (Option (Seqable x)) -> (ASeq x)]))
clojure.core/remove
(All [x y]
(IFn
[[x -> Any :filters {:else (is y 0)}] (Option (Seqable x)) -> (ASeq y)]
[[x -> Any :filters {:else (! y 0)}] (Option (Seqable x)) -> (ASeq (I x (Not y)))]
[[x -> Any] (Option (Seqable x)) -> (ASeq x)]))
clojure.core/flatten [Any -> Any]
clojure.core/into
(All [x y]
(IFn [(Map x y) (U nil (Seqable (U nil (Seqable (IMapEntry x y)) (IMapEntry x y) '[x y]))) -> (Map x y)]
[(Vec x) (U nil (Seqable x)) -> (Vec x)]
[(Set x) (U nil (Seqable x)) -> (Set x)]
[(Coll Any) (U nil (Seqable Any)) -> (Coll Any)]))
clojure.core/mapv
(All [c a b ...]
(IFn [[a b ... b -> c] (NonEmptySeqable a) (NonEmptySeqable b) ... b -> (NonEmptyAVec c)]
[[a b ... b -> c] (U nil (Seqable a)) (U nil (Seqable b)) ... b -> (AVec c)]))
clojure.core/filterv
(All [x y]
(IFn
[[x -> Any :filters {:then (is y 0)}] (Option (Seqable x)) -> (AVec y)]
[[x -> Any] (Option (Seqable x)) -> (AVec x)]))
clojure.core/get-in
(IFn [Any (U nil (Seqable Any)) -> Any]
[Any (U nil (Seqable Any)) Any -> Any])
clojure.core/assoc-in
[(U nil (Associative Any Any Any)) (Seqable Any) Any -> Any]
clojure.core/vec (All [x] [(Option (Seqable x)) -> (AVec x)])
clojure.core/vector (All [r b ...]
(IFn [b ... b -> '[b ... b]]
[r * -> (AVec r)]))
clojure.core/subvec (All [x]
(IFn [(Vec x) AnyInteger -> (Vec x)]
[(Vec x) AnyInteger AnyInteger -> (Vec x)]))
clojure.core/keys
(All [k]
[(Map k Any) -> (ASeq k) :object {:id 0 :path [Keys]}])
clojure.core/key (All [x]
[(IMapEntry x Any) -> x])
clojure.core/vals
(All [v]
[(Map Any v) -> (ASeq v) :object {:id 0 :path [Vals]}])
clojure.core/val (All [x]
[(IMapEntry Any x) -> x])
clojure.core/merge
(All [k v]
(IFn [nil * -> nil]
[(IPersistentMap k v) (IPersistentMap k v) * -> (IPersistentMap k v)]
[(Option (IPersistentMap k v)) * -> (Option (IPersistentMap k v))]))
clojure.core/merge-with
(All [k v]
(IFn [[v v -> v] nil * -> nil]
[[v v -> v] (Map k v) * -> (Map k v)]
[[v v -> v] (Option (Map k v)) * -> (Option (Map k v))]))
clojure.core/select-keys (All [k v] [(Map k v) (U nil (Seqable Any))
-> (Map k v)])
;clojure.core/set (All [x] [(Option (Seqable x)) -> (PersistentHashSet x)])
;clojure.core/hash-set (All [x] [x * -> (PersistentHashSet x)])
;clojure.core/sorted-set (All [x] [x * -> (PersistentTreeSet x)])
( All [ x ] [ [ x x - > AnyInteger ] x * - > ( PersistentTreeSet x ) ] )
clojure.core/distinct (All [x] [(U nil (Seqable x)) -> (ASeq x)])
clojure.core/butlast (All [x]
[(Option (Seqable x)) -> (ASeq x)])
;clojure.core/name [(U String Named) -> String]
clojure.core/zipmap
(All [k v]
[(U nil (Seqable k)) (U nil (Seqable v)) -> (APersistentMap k v)])
clojure.core/max-key (All [x]
[[x -> Number] x x x * -> x])
clojure.core/min-key (All [x]
[[x -> Number] x x x * -> x])
clojure.core/partition-all
(All [x]
(IFn [Int (Nilable (Seqable x)) -> (ASeq (ASeq x))]
[Int Int (Nilable (Seqable x)) -> (ASeq (ASeq x))]))
clojure.core/take-while
(All [x y]
(IFn
[[x -> Any :filters {:then (is y 0)}] (Option (Seqable x)) -> (ASeq y)]
[[x -> Any] (Option (Seqable x)) -> (ASeq x)]))
clojure.core/range
(IFn [-> (ASeq AnyInteger)]
[Number -> (ASeq AnyInteger)]
[AnyInteger Number -> (ASeq AnyInteger)]
[Number Number -> (ASeq Number)]
[AnyInteger Number AnyInteger -> (ASeq AnyInteger)]
[Number Number Number -> (ASeq Number)])
clojure.core/take-nth (All [x] [AnyInteger (U nil (Seqable x)) -> (ASeq x)])
clojure.core/split-with
(All [x y z]
(IFn
[[x -> Any :filters {:then (is y 0), :else (is z 0)}] (Option (Seqable x)) -> '[(ASeq y) (ASeq z)]]
[[x -> Any] (Option (Seqable x)) -> '[(ASeq x) (ASeq x)]]))
clojure.core/dorun (IFn [(U nil (Seqable Any)) -> nil]
[AnyInteger (U nil (Seqable Any)) -> nil])
clojure.core/doall (All [[c :< (U nil (Seqable Any))]]
(IFn [c -> c]
[AnyInteger c -> c]))
clojure.core/newline [-> nil]
clojure.core/prn-str [Any * -> String]
clojure.core/pr-str [Any * -> String]
clojure.core/pr [Any * -> nil]
clojure.core/print [Any * -> nil]
clojure.core/println [Any * -> nil]
clojure.core/print-str [Any * -> String]
clojure.core/println-str [Any * -> String]
clojure.core/prn [Any * -> nil]
clojure.core/atom
(All [x]
[x & :optional {:validator (U nil [x -> Any]) :meta Any} -> (Atom2 x x)])
clojure.core/reset! (All [w r]
[(Atom2 w r) w -> w])
;clojure.core/deref
#_(All [x y]
(IFn
[(Deref x) -> x]
[(U (Deref Any) java.util.concurrent.Future) -> Any]
[(BlockingDeref x) AnyInteger y -> (U x y)]
[(U java.util.concurrent.Future (BlockingDeref Any)) AnyInteger Any -> Any]))
clojure.core/swap! (All [w r b ...]
[(Atom2 w r) [r b ... b -> w] b ... b -> w])
clojure.core/compare-and-set!
(All [w]
[(Atom2 w Any) Any w -> Boolean])
;clojure.core/set-validator!
#_(All [w]
[(clojure.lang.IRef w Any) (U nil [w -> Any]) -> Any])
;clojure.core/get-validator
#_(All [w]
[(clojure.lang.IRef w Any) -> (U nil [w -> Any])])
;clojure.core/alter-meta!
#_(All [b ...]
[clojure.lang.IReference [(U nil (Map Any Any)) b ... b -> (U nil (Map Any Any))] b ... b -> (U nil (Map Any Any))])
clojure.core/reset-meta ! [ clojure.lang . IReference ( U nil ( Map Any Any ) ) - > ( U nil ( Map Any Any ) ) ]
;clojure.core/add-watch
#_(All [x [a :< (IRef Nothing x)]]
(IFn
;; this arity remembers the type of reference we pass to the function
[a Any [Any a x x -> Any] -> Any]
;; if the above cannot be inferred,
[(IRef Nothing x) Any [Any (IRef Nothing x) x x -> Any] -> Any]))
clojure.core/remove-watch [ ( IRef Nothing Any ) Any - > Any ]
clojure.core/gensym (IFn [-> Symbol]
[(U Symbol String) -> Symbol])
clojure.core/delay? (Pred (Delay Any))
clojure.core/force (All [x]
(IFn [(Delay x) -> x]
[Any -> Any]))
;clojure.core/realized? [clojure.lang.IPending -> Boolean]
clojure.core/memoize (All [x y ...]
[[y ... y -> x] -> [y ... y -> x]])
clojure.core/trampoline
(All [r b ...]
[[b ... b -> (Rec [f] (U r [-> (U f r)]))]
b ... b -> r])
;clojure.core/make-hierarchy [-> Hierarchy]
clojure.core/isa? (IFn [Any Any -> Boolean]
;[Hierarchy Any Any -> Boolean]
)
;clojure.core/derive
#_(IFn [(U Symbol Keyword Class) (U Symbol Keyword) -> nil]
[ Hierarchy ( U Symbol Keyword Class ) ( U Symbol Keyword ) - > Hierarchy ]
)
clojure.core/prefer-method [ Multi Any Any - > Any ]
;clojure.core/methods [Multi -> (Map Any Any)]
;clojure.core/ex-info
#_(IFn [(U nil String) (Map Any Any) -> ExInfo]
[(U nil String) (Map Any Any) (U nil Throwable) -> ExInfo])
;clojure.core/ex-data
#_(IFn [ExInfo -> (Map Any Any)]
[Any -> (U nil (Map Any Any))])
clojure.core/special-symbol? [Any -> Boolean]
;;;;;;;; add-hook annotations just to improve coverage. correctness isn't assured
clojure.core/reductions (All [a b] (IFn [[a b -> a] (Seqable b) -> (ASeq a)]
[[a b -> a] a (Seqable b) -> (ASeq a)]))
clojure.core/reduced? [Any -> Boolean]
clojure.core/sequence (All [a] [(U nil (Seqable a)) -> (ASeq a)])
clojure.core/dec [Number -> Number]
clojure.core/inc [Number -> Number]
clojure.core/set (All [a] [(Coll a) -> (Set a)])
clojure.core/nfirst (All [a b c] [(Seqable (Seqable a)) -> (ASeq a)])
clojure.core/keep (All [a b] [[a -> (Option b)] (Coll a) -> (Option (ASeq b))])
clojure.core/seqable? (Pred (Seqable Any))
clojure.core/sort-by (All [a] (IFn [(Coll a) -> (ASeq a)]
[[a -> Number] (Coll a) -> (ASeq a)]))
clojure.core/replicate (All [a] [Number a -> (ASeq a)])
clojure.core/quot [Number Number -> Number]
clojure.core/partition (All [a] (IFn [Number (Coll a) -> (ASeq (ASeq a))]
[Number Number (Coll a) -> (ASeq (ASeq a))]
[Number Number Number (Coll a) -> (ASeq (ASeq a))]))
clojure.core/name [(U Keyword String Symbol) -> String]
;todo clojure.core/replace
clojure.core/fnext (All [a] [(Seqable a) -> a])
clojure.core/rem [Number Number -> Number]
clojure.core/frequencies (All [a] [(Seqable a) -> (Map a Int)])")
(def common-var-annotations
(delay
(let [r (rdrs/string-push-back-reader common-ann*)
eof (Object.)
os (loop [os []]
(let [a (rdr/read r false eof)]
(if (identical? eof a)
os
(recur (conj os a)))))
_ (assert (even? (count os)))]
(apply hash-map os))))
| null | https://raw.githubusercontent.com/clojure/core.typed/f5b7d00bbb29d09000d7fef7cca5b40416c9fa91/typed/checker.jvm/src/clojure/core/typed/checker/base_env_common.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
switch namespace to where this def is defined
Also helps parse CLJS syntax.
(prn "r" r#)
cache is original nil, then is updated only once
these annotations can be parsed in either {cljs,clojure}.core.typed
and have the same meaning.
ordered the same as in cljs.core
don't support unsound cases
clojure.core/instance? [Class Any -> Booelan]
clojure.core/reduced (All [x] [x -> (Reduced x)])
clojure.core/reduced? (Pred (Reduced Any))
clojure.core/nfirst
clojure.core/fnext
no default
[(Option String) Any -> (Option Character)]
default
[(Option String) Any y -> (U y Character)]
(IFn
[(List x) -> (List x)]
[(Stack x) -> (Stack x)]))
clojure.core/set? (Pred (Set Any))
clojure.core/associative? (Pred (clojure.lang.Associative Any Any Any))
clojure.core/sequential? (Pred clojure.lang.Sequential)
clojure.core/sorted? (Pred Sorted)
Without accumulator
default
(reduce + my-coll)
default
(reduce + 3 my-coll)
[Character -> Integer]
clojure.core/rseq
should be IChunkedSeq -> IChunk
should be IChunkRest -> Seq
clojure.core/vary-meta
argument could be nil
clojure.core/set (All [x] [(Option (Seqable x)) -> (PersistentHashSet x)])
clojure.core/hash-set (All [x] [x * -> (PersistentHashSet x)])
clojure.core/sorted-set (All [x] [x * -> (PersistentTreeSet x)])
clojure.core/name [(U String Named) -> String]
clojure.core/deref
clojure.core/set-validator!
clojure.core/get-validator
clojure.core/alter-meta!
clojure.core/add-watch
this arity remembers the type of reference we pass to the function
if the above cannot be inferred,
clojure.core/realized? [clojure.lang.IPending -> Boolean]
clojure.core/make-hierarchy [-> Hierarchy]
[Hierarchy Any Any -> Boolean]
clojure.core/derive
clojure.core/methods [Multi -> (Map Any Any)]
clojure.core/ex-info
clojure.core/ex-data
add-hook annotations just to improve coverage. correctness isn't assured
todo clojure.core/replace | Copyright ( c ) , contributors .
(ns clojure.core.typed.checker.base-env-common
"Utilities for all implementations of the type checker"
(:require [clojure.core.typed.checker.jvm.parse-unparse :as prs]
[clojure.tools.reader :as rdr]
[clojure.tools.reader.reader-types :as rdrs]))
(defmacro delay-and-cache-env [sym & body]
(let [generator-sym (symbol (str "generator-" sym))
cache-sym (symbol (str "cache-" sym))
thread-bindings (symbol (str "thread-bindings-" sym))
interface-sym sym]
`(do
(def ~thread-bindings (get-thread-bindings))
(defn ~(with-meta generator-sym {:private true}) []
(let [r# (with-bindings ~thread-bindings
~@body)]
r#))
(def ~(with-meta cache-sym {:private true})
(atom nil))
(defn ~interface-sym []
(if-let [hit# (deref ~cache-sym)]
hit#
(let [calc# (~generator-sym)]
(reset! ~cache-sym calc#)))))))
(defn parse-cljs-ann-map
[ann-map]
(into {}
(map (fn [[sym ann]]
[(symbol "cljs.core" (name sym))
(prs/parse-type ann)])
ann-map)))
(defn parse-clj-ann-map
[ann-map]
(let [conveyed-parse (bound-fn* prs/parse-type)]
(into {}
(map (fn [[sym ann]]
[sym (delay (conveyed-parse ann))])
ann-map))))
(def common-ann*
"
clojure.core/*1 Any
clojure.core/*2 Any
clojure.core/*3 Any
clojure.core/identical? [Any Any -> Boolean]
clojure.core/number? (Pred Number)
clojure.core/not [Any -> Boolean]
clojure.core/string? (Pred String)
clojure.core/type [Any -> Any]
clojure.core/aclone (All [x] [(ReadOnlyArray x) -> (Array x)])
clojure.core/aget
(All [x]
(IFn [(ReadOnlyArray x)
AnyInteger -> x]
[(ReadOnlyArray (ReadOnlyArray x))
AnyInteger AnyInteger -> x]
[(ReadOnlyArray (ReadOnlyArray (ReadOnlyArray x)))
AnyInteger AnyInteger AnyInteger -> x]
[(ReadOnlyArray (ReadOnlyArray (ReadOnlyArray (ReadOnlyArray x))))
AnyInteger AnyInteger AnyInteger AnyInteger -> x]
[(ReadOnlyArray (ReadOnlyArray (ReadOnlyArray (ReadOnlyArray (ReadOnlyArray x)))))
AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger -> x]))
clojure.core/aset
(All [x]
(IFn
[(Array x) AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]
[(Array x) AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger AnyInteger x -> x]))
clojure.core/alength [(ReadOnlyArray Any) -> AnyInteger]
clojure.core/symbol? (Pred Symbol)
clojure.core/symbol (IFn [(U Symbol String) -> Symbol]
[(U nil String) String -> Symbol])
clojure.core/seq (All [x]
(IFn
[(NonEmptyColl x) -> (NonEmptyASeq x)]
[(Option (Coll x)) -> (Option (NonEmptyASeq x))
:filters {:then (& (is NonEmptyCount 0)
(! nil 0))
:else (| (is nil 0)
(is EmptyCount 0))}]
[(Option (Seqable x)) -> (Option (NonEmptyASeq x))]))
clojure.core/first (All [x]
(IFn [(HSequential [x Any *]) -> x
:object {:id 0 :path [(Nth 0)]}]
[(Option (EmptySeqable x)) -> nil]
[(NonEmptySeqable x) -> x]
[(Option (Seqable x)) -> (Option x)]))
clojure.core/rest (All [x]
[(Option (Seqable x)) -> (ASeq x)])
clojure.core/next (All [x]
(IFn [(Option (Coll x)) -> (Option (NonEmptyASeq x))
:filters {:then (& (is (CountRange 2) 0)
(! nil 0))
:else (| (is (CountRange 0 1) 0)
(is nil 0))}]
[(Option (Seqable x)) -> (Option (NonEmptyASeq x))]))
clojure.core/= [Any Any * -> Boolean]
clojure.core/second
(All [x]
(IFn [(HSequential [Any x Any *]) -> x
:object {:id 0 :path [(Nth 1)]}]
[(Option (I (Seqable x) (CountRange 0 1))) -> nil]
[(I (Seqable x) (CountRange 2)) -> x]
[(Option (Seqable x)) -> (Option x)]))
clojure.core/ffirst (All [x]
[(Option (Seqable (U nil (Seqable x)))) -> (Option x)])
#_(All [x]
[(Option (Seqable (Option (Seqable x))))
-> (Option (NonEmptyASeq x))])
#_(All [x]
[(Option (Seqable (Option (Seqable x)))) -> (Option x)])
clojure.core/nnext (All [x]
[(Option (Seqable x)) -> (Option (NonEmptyASeq x))])
clojure.core/last (All [x]
(IFn [(NonEmptySeqable x) -> x]
[(Option (Seqable x)) -> (U nil x)]))
clojure.core/conj (All [x y]
(IFn [(IPersistentVector x) x x * -> (IPersistentVector x)]
[(APersistentMap x y)
(U nil (Seqable (IMapEntry x y))
(IMapEntry x y) '[x y] (Map x y))
(U nil (Seqable (IMapEntry x y))
(IMapEntry x y) '[x y] (Map x y)) *
-> (APersistentMap x y)]
[(IPersistentMap x y)
(U nil (Seqable (IMapEntry x y))
(IMapEntry x y) '[x y] (Map x y))
(U nil (Seqable (IMapEntry x y))
(IMapEntry x y) '[x y] (Map x y)) * -> (IPersistentMap x y)]
[(IPersistentSet x) x x * -> (IPersistentSet x)]
[(ASeq x) x x * -> (ASeq x)]
[ nil x x * - > ( clojure.lang . PersistentList x ) ]
[(Coll Any) Any Any * -> (Coll Any)]))
clojure.core/get (All [x y]
(IFn
[(U nil (Set x) (ILookup Any x)) Any -> (Option x)]
[(U nil (Set x) (ILookup Any x)) Any y -> (U y x)]
[(Option (Map x y)) x -> (Option y)]
[(Option (Map x y)) x y -> y]
[(Option (Map Any Any)) Any -> (Option Any)]
[(Option (Map Any Any)) Any y -> (U y Any)]
))
clojure.core/assoc (All [b c d]
(IFn [(Map b c) b c -> (Map b c)]
[(Vec d) AnyInteger d -> (Vec d)]))
clojure.core/dissoc (All [k v]
(IFn [(Map k v) Any * -> (Map k v)]))
clojure.core/with-meta
#_(All [[x :< clojure.lang.IObj]]
[x (U nil (Map Any Any)) -> x])
clojure.core/meta [Any -> (U nil (Map Any Any))]
clojure.core/peek
(All [x]
(IFn [(I NonEmptyCount (Stack x)) -> x]
[(Stack x) -> x]))
( All [ x ]
[ ( ) - > ( ) ]
clojure.core/disj
(All [x]
(IFn #_[(SortedSet x) Any Any * -> (SortedSet x)]
[(Set x) Any Any * -> (Set x)]))
clojure.core/hash [Any -> AnyInteger]
clojure.core/empty? (IFn [(Option (HSequential [Any *])) -> Boolean
:filters {:then (| (is EmptyCount 0)
(is nil 0))
:else (is NonEmptyCount 0)}]
[(Option (Coll Any)) -> Boolean
:filters {:then (| (is EmptyCount 0)
(is nil 0))
:else (is NonEmptyCount 0)}]
[(Option (Seqable Any)) -> Boolean])
clojure.core/coll? (Pred (Coll Any))
clojure.core/map? (Pred (Map Any Any))
clojure.core/vector? (Pred (Vec Any))
clojure.core/chunked-seq? [Any -> Any]
clojure.core/false? (Pred false)
clojure.core/true? (Pred true)
clojure.core/seq? (Pred (Seq Any))
clojure.core/boolean [Any -> Boolean]
clojure.core/integer? (Pred AnyInteger)
clojure.core/contains? [(Option (Seqable Any)) Any -> Boolean]
clojure.core/find (All [x y]
[(U nil (Associative Any x y)) Any -> (U nil (HVec [x y]))])
clojure.core/distinct? [Any Any * -> Boolean]
clojure.core/compare [Any Any -> Number]
clojure.core/sort (All [x]
(IFn [(U nil (Seqable x)) -> (U nil (ASeq x))]
[[x x -> AnyInteger] (U nil (Seqable x)) -> (U nil (ASeq x))]))
clojure.core/shuffle (All [x]
(IFn [(I (Collection x) (Seqable x)) -> (Vec x)]
[(Collection x) -> (Vec x)]))
clojure.core/reduce
(All [a c]
(IFn
[[a c -> (U (Reduced a) a)] (NonEmptySeqable c) -> a]
[(IFn [a c -> (U (Reduced a) a)] [-> (U (Reduced a) a)]) (Option (Seqable c)) -> a]
[[a c -> (U (Reduced a) a)] a (Option (Seqable c)) -> a]))
clojure.core/reduce-kv
(All [a c k v]
[[a k v -> (U (Reduced a) a)] a (Option (Associative Any k v)) -> a])
clojure.core/< [Number Number * -> Boolean]
clojure.core/<= [Number Number * -> Boolean]
clojure.core/> [Number Number * -> Boolean]
clojure.core/>= [Number Number * -> Boolean]
clojure.core/== [Number Number * -> Boolean]
clojure.core/max [Number Number * -> Number]
clojure.core/min [Number Number * -> Number]
clojure.core/int (IFn [Number -> Integer]
)
clojure.core/booleans [Any -> (Array boolean)]
clojure.core/ints [Any -> (Array int)]
clojure.core/mod (IFn [AnyInteger AnyInteger -> AnyInteger]
[Number Number -> Number])
clojure.core/rand (IFn [-> Number]
[Number -> Number])
clojure.core/rand-int [Int -> Int]
clojure.core/bit-xor [AnyInteger AnyInteger AnyInteger * -> AnyInteger]
clojure.core/bit-or [AnyInteger AnyInteger AnyInteger * -> AnyInteger]
clojure.core/bit-and-not [AnyInteger AnyInteger AnyInteger * -> AnyInteger]
clojure.core/bit-clear [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-flip [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-not [AnyInteger -> AnyInteger]
clojure.core/bit-set [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-test [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-shift-left [AnyInteger AnyInteger -> AnyInteger]
clojure.core/bit-shift-right [AnyInteger AnyInteger -> AnyInteger]
clojure.core/pos? (IFn [Number -> Boolean])
clojure.core/neg? (IFn [Number -> Boolean])
clojure.core/nthnext
(All [x]
(IFn [nil AnyInteger -> nil]
[(Option (Seqable x)) AnyInteger -> (Option (NonEmptyASeq x))]))
clojure.core/str [Any * -> String]
clojure.core/subs (IFn [String AnyInteger -> String]
[String AnyInteger AnyInteger -> String])
clojure.core/hash-combine [AnyInteger Any -> AnyInteger]
#_(All [x]
[(Reversible x) -> (Option (NonEmptyASeq x))])
clojure.core/reverse (All [x]
[(Option (Seqable x)) -> (ASeq x)])
clojure.core/list (All [x] [x * -> (PersistentList x)])
clojure.core/cons (All [x]
[x (Option (Seqable x)) -> (ASeq x)])
clojure.core/list? (Pred (List Any))
clojure.core/keyword? (Pred Keyword)
clojure.core/namespace [(U Symbol String Keyword) -> (Option String)]
clojure.core/keyword (IFn [(U Keyword Symbol String) -> Keyword]
[String String -> Keyword])
#_clojure.core/chunk-cons
#_(All [x]
[(clojure.lang.IChunk x) (Option (Seqable x)) -> (Option (Seqable x))])
#_clojure.core/chunk-append
#_(All [x]
[(clojure.lang.ChunkBuffer x) x -> Any])
#_clojure.core/chunk
#_(All [x]
[(clojure.lang.ChunkBuffer x) -> (clojure.lang.IChunk x)])
#_clojure.core/chunk-first #_(All [x]
[(Seqable x) -> (clojure.lang.IChunk x)])
#_clojure.core/chunk-rest
#_(All [x]
[(clojure.lang.Seqable x) -> (ASeq x)])
clojure.core/int-array
(IFn [(U nil Number (Seqable Number)) -> (Array int)]
[Number (U nil Number (Seqable Number)) -> (Array int)])
clojure.core/concat (All [x] [(Option (Seqable x)) * -> (ASeq x)])
clojure.core/list*
(All [x]
(IFn [(U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]
[x x x x x x x x x x (U nil (Seqable x)) -> (NilableNonEmptyASeq x)]))
clojure.core/apply
(All [y a b c d r z ...]
(IFn [[z ... z -> y] (HSequential [z ... z]) -> y]
[[a z ... z -> y] a (HSequential [z ... z]) -> y]
[[a b z ... z -> y] a b (HSequential [z ... z]) -> y]
[[a b c z ... z -> y] a b c (HSequential [z ... z]) -> y]
[[a b c d z ... z -> y] a b c d (HSequential [z ... z]) -> y]
[[r * -> y] (U nil (Seqable r)) -> y]
[[a r * -> y] a (U nil (Seqable r)) -> y]
[[a b r * -> y] a b (U nil (Seqable r)) -> y]
[[a b c r * -> y] a b c (U nil (Seqable r)) -> y]
[[a b c d r * -> y] a b c d (U nil (Seqable r)) -> y]))
#_(All [[x :< clojure.lang.IObj] b ...]
[x [(U nil (Map Any Any)) b ... b -> (U nil (Map Any Any))] b ... b -> x])
clojure.core/not= [Any Any * -> Boolean]
clojure.core/every?
(All [x y]
(IFn [[x -> Any :filters {:then (is y 0)}] (Coll x) -> Boolean
:filters {:then (is (Coll y) 1)}]
[[x -> Any :filters {:then (is y 0)}] (U nil (Coll x)) -> Boolean
:filters {:then (is (U nil (Coll y)) 1)}]
[[x -> Any] (U nil (Seqable x)) -> Boolean]))
clojure.core/some (All [x y] [[x -> y] (Option (Seqable x)) -> (Option y)])
clojure.core/even? [AnyInteger -> Boolean]
clojure.core/odd? [AnyInteger -> Boolean]
clojure.core/identity (All [x] [x -> x
:filters {:then (! (U nil false) 0)
:else (is (U nil false) 0)}
:object {:id 0}])
clojure.core/complement (All [x] [[x -> Any] -> [x -> Boolean]])
clojure.core/constantly (All [x] [x -> [Any * -> x]])
clojure.core/comp (All [x y b ...]
[[x -> y] [b ... b -> x] -> [b ... b -> y]])
clojure.core/partial
(All [y a b c d z ...]
(IFn [[z ... z -> y] -> [z ... z -> y]]
[[a z ... z -> y] a -> [z ... z -> y]]
[[a b z ... z -> y] a b -> [z ... z -> y]]
[[a b c z ... z -> y] a b c -> [z ... z -> y]]
[[a b c d z ... z -> y] a b c d -> [z ... z -> y]]
[[a * -> y] a * -> [a * -> y]]))
clojure.core/fnil
(All [x y z a b ...]
(IFn [[x b ... b -> a] x -> [(U nil x) b ... b -> a]]
[[x y b ... b -> a] x y -> [(U nil x) (U nil y) b ... b -> a]]
[[x y z b ... b -> a] x y z -> [(U nil x) (U nil y) (U nil z) b ... b -> a]]))
clojure.core/map-indexed
(All [x y] [[AnyInteger x -> y] (Option (Seqable x)) -> (Seqable y)])
clojure.core/every-pred
(All [t0 t1 t2 t3 t4 t5]
(IFn [[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
-> (IFn [Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any * -> Any])]
[[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Boolean :filters {:then (is t1 0) :else (! t1 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1) 0) :else (! (I t0 t1) 0)}]
[Any * -> Any])]
[[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Boolean :filters {:then (is t1 0) :else (! t1 0)}]
[Any -> Boolean :filters {:then (is t2 0) :else (! t2 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1 t2) 0) :else (! (I t0 t1 t2) 0)}]
[Any * -> Any])]
[[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Boolean :filters {:then (is t1 0) :else (! t1 0)}]
[Any -> Boolean :filters {:then (is t2 0) :else (! t2 0)}]
[Any -> Boolean :filters {:then (is t3 0) :else (! t3 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1 t2 t3) 0) :else (! (I t0 t1 t2 t3) 0)}]
[Any * -> Any])]
[[Any -> Boolean :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Boolean :filters {:then (is t1 0) :else (! t1 0)}]
[Any -> Boolean :filters {:then (is t2 0) :else (! t2 0)}]
[Any -> Boolean :filters {:then (is t3 0) :else (! t3 0)}]
[Any -> Boolean :filters {:then (is t4 0) :else (! t4 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1 t2 t3 t4) 0) :else (! (I t0 t1 t2 t3 t4) 0)}]
[Any * -> Any])]
[[Any -> Any :filters {:then (is t0 0) :else (! t0 0)}]
[Any -> Any :filters {:then (is t1 0) :else (! t1 0)}]
[Any -> Any :filters {:then (is t2 0) :else (! t2 0)}]
[Any -> Any :filters {:then (is t3 0) :else (! t3 0)}]
[Any -> Any :filters {:then (is t4 0) :else (! t4 0)}]
[Any -> Any :filters {:then (is t5 0) :else (! t5 0)}]
-> (IFn [Any -> Boolean :filters {:then (is (I t0 t1 t2 t3 t4 t5) 0) :else (! (I t0 t1 t2 t3 t4 t5) 0)}]
[Any * -> Any])]
[[Any -> Any] [Any -> Any] * -> [Any * -> Any]]))
clojure.core/map
(All [c a b ...]
(IFn [[a b ... b -> c] (NonEmptySeqable a) (NonEmptySeqable b) ... b -> (NonEmptyASeq c)]
[[a b ... b -> c] (U nil (Seqable a)) (U nil (Seqable b)) ... b -> (ASeq c)]))
clojure.core/mapcat
(All [c b ...]
[[b ... b -> (Option (Seqable c))] (Option (Seqable b)) ... b -> (ASeq c)])
clojure.core/pmap
(All [c a b ...]
(IFn [[a b ... b -> c] (NonEmptySeqable a) (NonEmptySeqable b) ... b -> (NonEmptyASeq c)]
[[a b ... b -> c] (U nil (Seqable a)) (U nil (Seqable b)) ... b -> (ASeq c)]))
clojure.core/take (All [x]
[AnyInteger (Seqable x) -> (ASeq x)])
clojure.core/drop (All [x]
[AnyInteger (Seqable x) -> (ASeq x)])
clojure.core/drop-last (All [x]
[AnyInteger (Seqable x) -> (NilableNonEmptyASeq x)])
clojure.core/take-last (All [x]
[AnyInteger (Seqable x) -> (NilableNonEmptyASeq x)])
clojure.core/drop-while (All [x]
[[x -> Any] (Option (Seqable x)) -> (ASeq x)])
clojure.core/cycle (All [x]
[(U nil (Seqable x)) -> (ASeq x)])
clojure.core/split-at
(All [x y z]
[AnyInteger (Option (Seqable x)) -> '[(ASeq x) (ASeq x)]])
clojure.core/repeat (All [x]
(IFn [x -> (ASeq x)]
[AnyInteger x -> (ASeq x)]))
clojure.core/repeatedly (All [x]
(IFn [[-> x] -> (ASeq x)]
[AnyInteger [-> x] -> (ASeq x)]))
clojure.core/iterate (All [x]
[[x -> x] x -> (ASeq x)])
clojure.core/interleave (All [x] [(Option (Seqable x)) (Option (Seqable x)) (Option (Seqable x)) * -> (ASeq x)])
clojure.core/interpose (All [x] (IFn [x (Option (Seqable x)) -> (ASeq x)]))
clojure.core/filter
(All [x y]
(IFn
[[x -> Any :filters {:then (is y 0)}] (Option (Seqable x)) -> (ASeq y)]
[[x -> Any :filters {:then (! y 0)}] (Option (Seqable x)) -> (ASeq (I x (Not y)))]
[[x -> Any] (Option (Seqable x)) -> (ASeq x)]))
clojure.core/remove
(All [x y]
(IFn
[[x -> Any :filters {:else (is y 0)}] (Option (Seqable x)) -> (ASeq y)]
[[x -> Any :filters {:else (! y 0)}] (Option (Seqable x)) -> (ASeq (I x (Not y)))]
[[x -> Any] (Option (Seqable x)) -> (ASeq x)]))
clojure.core/flatten [Any -> Any]
clojure.core/into
(All [x y]
(IFn [(Map x y) (U nil (Seqable (U nil (Seqable (IMapEntry x y)) (IMapEntry x y) '[x y]))) -> (Map x y)]
[(Vec x) (U nil (Seqable x)) -> (Vec x)]
[(Set x) (U nil (Seqable x)) -> (Set x)]
[(Coll Any) (U nil (Seqable Any)) -> (Coll Any)]))
clojure.core/mapv
(All [c a b ...]
(IFn [[a b ... b -> c] (NonEmptySeqable a) (NonEmptySeqable b) ... b -> (NonEmptyAVec c)]
[[a b ... b -> c] (U nil (Seqable a)) (U nil (Seqable b)) ... b -> (AVec c)]))
clojure.core/filterv
(All [x y]
(IFn
[[x -> Any :filters {:then (is y 0)}] (Option (Seqable x)) -> (AVec y)]
[[x -> Any] (Option (Seqable x)) -> (AVec x)]))
clojure.core/get-in
(IFn [Any (U nil (Seqable Any)) -> Any]
[Any (U nil (Seqable Any)) Any -> Any])
clojure.core/assoc-in
[(U nil (Associative Any Any Any)) (Seqable Any) Any -> Any]
clojure.core/vec (All [x] [(Option (Seqable x)) -> (AVec x)])
clojure.core/vector (All [r b ...]
(IFn [b ... b -> '[b ... b]]
[r * -> (AVec r)]))
clojure.core/subvec (All [x]
(IFn [(Vec x) AnyInteger -> (Vec x)]
[(Vec x) AnyInteger AnyInteger -> (Vec x)]))
clojure.core/keys
(All [k]
[(Map k Any) -> (ASeq k) :object {:id 0 :path [Keys]}])
clojure.core/key (All [x]
[(IMapEntry x Any) -> x])
clojure.core/vals
(All [v]
[(Map Any v) -> (ASeq v) :object {:id 0 :path [Vals]}])
clojure.core/val (All [x]
[(IMapEntry Any x) -> x])
clojure.core/merge
(All [k v]
(IFn [nil * -> nil]
[(IPersistentMap k v) (IPersistentMap k v) * -> (IPersistentMap k v)]
[(Option (IPersistentMap k v)) * -> (Option (IPersistentMap k v))]))
clojure.core/merge-with
(All [k v]
(IFn [[v v -> v] nil * -> nil]
[[v v -> v] (Map k v) * -> (Map k v)]
[[v v -> v] (Option (Map k v)) * -> (Option (Map k v))]))
clojure.core/select-keys (All [k v] [(Map k v) (U nil (Seqable Any))
-> (Map k v)])
( All [ x ] [ [ x x - > AnyInteger ] x * - > ( PersistentTreeSet x ) ] )
clojure.core/distinct (All [x] [(U nil (Seqable x)) -> (ASeq x)])
clojure.core/butlast (All [x]
[(Option (Seqable x)) -> (ASeq x)])
clojure.core/zipmap
(All [k v]
[(U nil (Seqable k)) (U nil (Seqable v)) -> (APersistentMap k v)])
clojure.core/max-key (All [x]
[[x -> Number] x x x * -> x])
clojure.core/min-key (All [x]
[[x -> Number] x x x * -> x])
clojure.core/partition-all
(All [x]
(IFn [Int (Nilable (Seqable x)) -> (ASeq (ASeq x))]
[Int Int (Nilable (Seqable x)) -> (ASeq (ASeq x))]))
clojure.core/take-while
(All [x y]
(IFn
[[x -> Any :filters {:then (is y 0)}] (Option (Seqable x)) -> (ASeq y)]
[[x -> Any] (Option (Seqable x)) -> (ASeq x)]))
clojure.core/range
(IFn [-> (ASeq AnyInteger)]
[Number -> (ASeq AnyInteger)]
[AnyInteger Number -> (ASeq AnyInteger)]
[Number Number -> (ASeq Number)]
[AnyInteger Number AnyInteger -> (ASeq AnyInteger)]
[Number Number Number -> (ASeq Number)])
clojure.core/take-nth (All [x] [AnyInteger (U nil (Seqable x)) -> (ASeq x)])
clojure.core/split-with
(All [x y z]
(IFn
[[x -> Any :filters {:then (is y 0), :else (is z 0)}] (Option (Seqable x)) -> '[(ASeq y) (ASeq z)]]
[[x -> Any] (Option (Seqable x)) -> '[(ASeq x) (ASeq x)]]))
clojure.core/dorun (IFn [(U nil (Seqable Any)) -> nil]
[AnyInteger (U nil (Seqable Any)) -> nil])
clojure.core/doall (All [[c :< (U nil (Seqable Any))]]
(IFn [c -> c]
[AnyInteger c -> c]))
clojure.core/newline [-> nil]
clojure.core/prn-str [Any * -> String]
clojure.core/pr-str [Any * -> String]
clojure.core/pr [Any * -> nil]
clojure.core/print [Any * -> nil]
clojure.core/println [Any * -> nil]
clojure.core/print-str [Any * -> String]
clojure.core/println-str [Any * -> String]
clojure.core/prn [Any * -> nil]
clojure.core/atom
(All [x]
[x & :optional {:validator (U nil [x -> Any]) :meta Any} -> (Atom2 x x)])
clojure.core/reset! (All [w r]
[(Atom2 w r) w -> w])
#_(All [x y]
(IFn
[(Deref x) -> x]
[(U (Deref Any) java.util.concurrent.Future) -> Any]
[(BlockingDeref x) AnyInteger y -> (U x y)]
[(U java.util.concurrent.Future (BlockingDeref Any)) AnyInteger Any -> Any]))
clojure.core/swap! (All [w r b ...]
[(Atom2 w r) [r b ... b -> w] b ... b -> w])
clojure.core/compare-and-set!
(All [w]
[(Atom2 w Any) Any w -> Boolean])
#_(All [w]
[(clojure.lang.IRef w Any) (U nil [w -> Any]) -> Any])
#_(All [w]
[(clojure.lang.IRef w Any) -> (U nil [w -> Any])])
#_(All [b ...]
[clojure.lang.IReference [(U nil (Map Any Any)) b ... b -> (U nil (Map Any Any))] b ... b -> (U nil (Map Any Any))])
clojure.core/reset-meta ! [ clojure.lang . IReference ( U nil ( Map Any Any ) ) - > ( U nil ( Map Any Any ) ) ]
#_(All [x [a :< (IRef Nothing x)]]
(IFn
[a Any [Any a x x -> Any] -> Any]
[(IRef Nothing x) Any [Any (IRef Nothing x) x x -> Any] -> Any]))
clojure.core/remove-watch [ ( IRef Nothing Any ) Any - > Any ]
clojure.core/gensym (IFn [-> Symbol]
[(U Symbol String) -> Symbol])
clojure.core/delay? (Pred (Delay Any))
clojure.core/force (All [x]
(IFn [(Delay x) -> x]
[Any -> Any]))
clojure.core/memoize (All [x y ...]
[[y ... y -> x] -> [y ... y -> x]])
clojure.core/trampoline
(All [r b ...]
[[b ... b -> (Rec [f] (U r [-> (U f r)]))]
b ... b -> r])
clojure.core/isa? (IFn [Any Any -> Boolean]
)
#_(IFn [(U Symbol Keyword Class) (U Symbol Keyword) -> nil]
[ Hierarchy ( U Symbol Keyword Class ) ( U Symbol Keyword ) - > Hierarchy ]
)
clojure.core/prefer-method [ Multi Any Any - > Any ]
#_(IFn [(U nil String) (Map Any Any) -> ExInfo]
[(U nil String) (Map Any Any) (U nil Throwable) -> ExInfo])
#_(IFn [ExInfo -> (Map Any Any)]
[Any -> (U nil (Map Any Any))])
clojure.core/special-symbol? [Any -> Boolean]
clojure.core/reductions (All [a b] (IFn [[a b -> a] (Seqable b) -> (ASeq a)]
[[a b -> a] a (Seqable b) -> (ASeq a)]))
clojure.core/reduced? [Any -> Boolean]
clojure.core/sequence (All [a] [(U nil (Seqable a)) -> (ASeq a)])
clojure.core/dec [Number -> Number]
clojure.core/inc [Number -> Number]
clojure.core/set (All [a] [(Coll a) -> (Set a)])
clojure.core/nfirst (All [a b c] [(Seqable (Seqable a)) -> (ASeq a)])
clojure.core/keep (All [a b] [[a -> (Option b)] (Coll a) -> (Option (ASeq b))])
clojure.core/seqable? (Pred (Seqable Any))
clojure.core/sort-by (All [a] (IFn [(Coll a) -> (ASeq a)]
[[a -> Number] (Coll a) -> (ASeq a)]))
clojure.core/replicate (All [a] [Number a -> (ASeq a)])
clojure.core/quot [Number Number -> Number]
clojure.core/partition (All [a] (IFn [Number (Coll a) -> (ASeq (ASeq a))]
[Number Number (Coll a) -> (ASeq (ASeq a))]
[Number Number Number (Coll a) -> (ASeq (ASeq a))]))
clojure.core/name [(U Keyword String Symbol) -> String]
clojure.core/fnext (All [a] [(Seqable a) -> a])
clojure.core/rem [Number Number -> Number]
clojure.core/frequencies (All [a] [(Seqable a) -> (Map a Int)])")
(def common-var-annotations
(delay
(let [r (rdrs/string-push-back-reader common-ann*)
eof (Object.)
os (loop [os []]
(let [a (rdr/read r false eof)]
(if (identical? eof a)
os
(recur (conj os a)))))
_ (assert (even? (count os)))]
(apply hash-map os))))
|
8771fabd642a3d0513f3aa55b6376552ec33a028672603b25b492ba5e6505a46 | jasonstolaruk/CurryMUD | ActionParams.hs | {-# OPTIONS_GHC -Wno-unused-do-bind #-}
# LANGUAGE OverloadedStrings , PatternSynonyms , RebindableSyntax , RecordWildCards , ViewPatterns #
module Mud.Data.State.ActionParams.ActionParams ( ActionParams( ..
, AdviseNoArgs
, AdviseOneArg
, Advising
, Ignoring
, Lower
, Lower'
, LowerNub
, LowerNub'
, Msg
, Msg'
, MsgWithTarget
, NoArgs
, NoArgs'
, NoArgs''
, OneArg
, OneArg'
, OneArgLower
, OneArgLower'
, OneArgNubbed
, WithArgs
, WithTarget )
, Cols ) where
import Mud.Data.State.ActionParams.Misc
import Mud.Data.State.MsgQueue
import Mud.Util.List
import Mud.Util.Quoting
import Data.List (nub)
import Data.String (fromString)
import Data.Text (Text)
import qualified Data.Text as T
import Formatting ((%), sformat)
import Formatting.Formatters (string)
import Prelude hiding ((>>))
type Id = Int
type Cols = Int
type Args = [Text]
data ActionParams = ActionParams { myId :: Id
, plaMsgQueue :: MsgQueue
, plaCols :: Cols
, args :: Args }
instance Show ActionParams where
show ActionParams { .. } = showIt (show myId) (show plaCols) (show args)
where
showIt i cols = T.unpack . sformat m i cols
m = do "ActionParams {myId = "
", plaMsgQueue = elided, plaCols = "
", args = "
"}"
a >> b = a % string % b
-- ==================================================
-- Patterns matching type "ActionParams":
pattern AdviseNoArgs :: ActionParams
pattern AdviseNoArgs <- NoArgs' _ _
pattern AdviseOneArg :: ActionParams
pattern AdviseOneArg <- WithArgs _ _ _ [_]
pattern Advising :: MsgQueue -> Cols -> ActionParams
pattern Advising mq cols <- WithArgs _ mq cols _
pattern Ignoring :: MsgQueue -> Cols -> Text -> ActionParams
pattern Ignoring mq cols as <- WithArgs _ mq cols (dblQuote . T.unwords -> as)
pattern Lower :: Id -> MsgQueue -> Cols -> [Text] -> ActionParams
pattern Lower i mq cols as <- WithArgs i mq cols (map T.toLower -> as)
pattern Lower' :: Id -> [Text] -> ActionParams
pattern Lower' i as <- Lower i _ _ as
pattern LowerNub :: Id -> MsgQueue -> Cols -> [Text] -> ActionParams
pattern LowerNub i mq cols as <- WithArgs i mq cols (nub . map T.toLower -> as)
pattern LowerNub' :: Id -> [Text] -> ActionParams
pattern LowerNub' i as <- LowerNub i _ _ as
pattern Msg :: Id -> MsgQueue -> Cols -> Text -> ActionParams
pattern Msg i mq cols msg <- WithArgs i mq cols (formatMsgArgs -> msg)
pattern Msg' :: Id -> MsgQueue -> Text -> ActionParams
pattern Msg' i mq msg <- Msg i mq _ msg
pattern MsgWithTarget :: Id -> MsgQueue -> Cols -> Text -> Text -> ActionParams
pattern MsgWithTarget i mq cols target msg <- WithArgs i mq cols (formatMsgWithTargetArgs -> (target, msg))
pattern NoArgs :: Id -> MsgQueue -> Cols -> ActionParams
pattern NoArgs i mq cols = WithArgs i mq cols []
pattern NoArgs' :: Id -> MsgQueue -> ActionParams
pattern NoArgs' i mq <- NoArgs i mq _
pattern NoArgs'' :: Id -> ActionParams
pattern NoArgs'' i <- NoArgs' i _
pattern OneArg :: Id -> MsgQueue -> Cols -> Text -> ActionParams
pattern OneArg i mq cols a <- WithArgs i mq cols [a]
pattern OneArg' :: Id -> Text -> ActionParams
pattern OneArg' i a <- OneArg i _ _ a
pattern OneArgLower :: Id -> MsgQueue -> Cols -> Text -> ActionParams
pattern OneArgLower i mq cols a <- OneArg i mq cols (T.toLower -> a)
pattern OneArgLower' :: Id -> Text -> ActionParams
pattern OneArgLower' i a <- OneArgLower i _ _ a
pattern OneArgNubbed :: Id -> MsgQueue -> Cols -> Text -> ActionParams
pattern OneArgNubbed i mq cols a <- WithArgs i mq cols (nub . map T.toLower -> [a])
pattern WithArgs :: Id -> MsgQueue -> Cols -> Args -> ActionParams
pattern WithArgs i mq cols as = ActionParams { myId = i
, plaMsgQueue = mq
, plaCols = cols
, args = as }
pattern WithTarget :: Id -> MsgQueue -> Cols -> Text -> Text -> ActionParams
pattern WithTarget i mq cols target rest <- WithArgs i mq cols (headTail -> (target, T.unwords -> rest))
| null | https://raw.githubusercontent.com/jasonstolaruk/CurryMUD/f9775fb3ede08610f33f27bb1fb5fc0565e98266/lib/Mud/Data/State/ActionParams/ActionParams.hs | haskell | # OPTIONS_GHC -Wno-unused-do-bind #
==================================================
Patterns matching type "ActionParams": | # LANGUAGE OverloadedStrings , PatternSynonyms , RebindableSyntax , RecordWildCards , ViewPatterns #
module Mud.Data.State.ActionParams.ActionParams ( ActionParams( ..
, AdviseNoArgs
, AdviseOneArg
, Advising
, Ignoring
, Lower
, Lower'
, LowerNub
, LowerNub'
, Msg
, Msg'
, MsgWithTarget
, NoArgs
, NoArgs'
, NoArgs''
, OneArg
, OneArg'
, OneArgLower
, OneArgLower'
, OneArgNubbed
, WithArgs
, WithTarget )
, Cols ) where
import Mud.Data.State.ActionParams.Misc
import Mud.Data.State.MsgQueue
import Mud.Util.List
import Mud.Util.Quoting
import Data.List (nub)
import Data.String (fromString)
import Data.Text (Text)
import qualified Data.Text as T
import Formatting ((%), sformat)
import Formatting.Formatters (string)
import Prelude hiding ((>>))
type Id = Int
type Cols = Int
type Args = [Text]
data ActionParams = ActionParams { myId :: Id
, plaMsgQueue :: MsgQueue
, plaCols :: Cols
, args :: Args }
instance Show ActionParams where
show ActionParams { .. } = showIt (show myId) (show plaCols) (show args)
where
showIt i cols = T.unpack . sformat m i cols
m = do "ActionParams {myId = "
", plaMsgQueue = elided, plaCols = "
", args = "
"}"
a >> b = a % string % b
pattern AdviseNoArgs :: ActionParams
pattern AdviseNoArgs <- NoArgs' _ _
pattern AdviseOneArg :: ActionParams
pattern AdviseOneArg <- WithArgs _ _ _ [_]
pattern Advising :: MsgQueue -> Cols -> ActionParams
pattern Advising mq cols <- WithArgs _ mq cols _
pattern Ignoring :: MsgQueue -> Cols -> Text -> ActionParams
pattern Ignoring mq cols as <- WithArgs _ mq cols (dblQuote . T.unwords -> as)
pattern Lower :: Id -> MsgQueue -> Cols -> [Text] -> ActionParams
pattern Lower i mq cols as <- WithArgs i mq cols (map T.toLower -> as)
pattern Lower' :: Id -> [Text] -> ActionParams
pattern Lower' i as <- Lower i _ _ as
pattern LowerNub :: Id -> MsgQueue -> Cols -> [Text] -> ActionParams
pattern LowerNub i mq cols as <- WithArgs i mq cols (nub . map T.toLower -> as)
pattern LowerNub' :: Id -> [Text] -> ActionParams
pattern LowerNub' i as <- LowerNub i _ _ as
pattern Msg :: Id -> MsgQueue -> Cols -> Text -> ActionParams
pattern Msg i mq cols msg <- WithArgs i mq cols (formatMsgArgs -> msg)
pattern Msg' :: Id -> MsgQueue -> Text -> ActionParams
pattern Msg' i mq msg <- Msg i mq _ msg
pattern MsgWithTarget :: Id -> MsgQueue -> Cols -> Text -> Text -> ActionParams
pattern MsgWithTarget i mq cols target msg <- WithArgs i mq cols (formatMsgWithTargetArgs -> (target, msg))
pattern NoArgs :: Id -> MsgQueue -> Cols -> ActionParams
pattern NoArgs i mq cols = WithArgs i mq cols []
pattern NoArgs' :: Id -> MsgQueue -> ActionParams
pattern NoArgs' i mq <- NoArgs i mq _
pattern NoArgs'' :: Id -> ActionParams
pattern NoArgs'' i <- NoArgs' i _
pattern OneArg :: Id -> MsgQueue -> Cols -> Text -> ActionParams
pattern OneArg i mq cols a <- WithArgs i mq cols [a]
pattern OneArg' :: Id -> Text -> ActionParams
pattern OneArg' i a <- OneArg i _ _ a
pattern OneArgLower :: Id -> MsgQueue -> Cols -> Text -> ActionParams
pattern OneArgLower i mq cols a <- OneArg i mq cols (T.toLower -> a)
pattern OneArgLower' :: Id -> Text -> ActionParams
pattern OneArgLower' i a <- OneArgLower i _ _ a
pattern OneArgNubbed :: Id -> MsgQueue -> Cols -> Text -> ActionParams
pattern OneArgNubbed i mq cols a <- WithArgs i mq cols (nub . map T.toLower -> [a])
pattern WithArgs :: Id -> MsgQueue -> Cols -> Args -> ActionParams
pattern WithArgs i mq cols as = ActionParams { myId = i
, plaMsgQueue = mq
, plaCols = cols
, args = as }
pattern WithTarget :: Id -> MsgQueue -> Cols -> Text -> Text -> ActionParams
pattern WithTarget i mq cols target rest <- WithArgs i mq cols (headTail -> (target, T.unwords -> rest))
|
4d8c240f01ad9858d7b5cb62fd8af15c7b3009e8e35f5283ce44c86f90bada7d | timgilbert/haunting-refrain-posh | seed.cljs | (ns haunting-refrain.datascript.seed
(:require [shodan.console :as console]
[datascript.core :as d]
[haunting-refrain.model.input :as input]
[haunting-refrain.datascript.util :as u]))
Every checkin has a list of fields , one of which can be selected at random to use as a
;; search string. Every randomly-selected field is a single datum, which we use to search
;; music providers for songs. We store these datums as seeds and hang a list of songs off
;; of them. When the checkin list is shuffled, we re-generate seeds for any fields we
;; haven't seen yet.
(defn generate-seed [db track-eid]
(let [track (d/pull db [:db/id {:track/checkin [:*]} {:track/seed [:*]}] track-eid)
checkin (d/entity db (-> track :track/checkin :db/id))
field (input/random-field checkin)
datum (input/sanitize-datum field (get checkin field))
extant (d/q '[:find ?d .
:in $ ?datum
:where [?d :seed/datum ?datum]]
db datum)]
(if extant
;; If a field already exists, just update the track to use it
{:db/id extant
:track/_seed track-eid}
;; Otherwise create a new seed element and assign it to the track
{:db/id (d/tempid :db.part/user)
:seed/checkin (:db/id checkin)
:seed/field field
:seed/datum datum
:track/_seed track-eid})))
(defn attach-seeds-to-playlist!
"Given a playlist, attach a random seed to each of its tracks that doesn't already have one
by selecting a random field from the checkin data associated with the track."
[conn playlist-eid]
(let [db (d/db conn)
tr (u/get-playlist-tracks conn playlist-eid)
seeds (map (partial generate-seed db) tr)]
(d/transact! conn seeds)))
| null | https://raw.githubusercontent.com/timgilbert/haunting-refrain-posh/99a7daafe54c5905a3d1b0eff691b5c602ad6d8d/src/cljs/haunting_refrain/datascript/seed.cljs | clojure | search string. Every randomly-selected field is a single datum, which we use to search
music providers for songs. We store these datums as seeds and hang a list of songs off
of them. When the checkin list is shuffled, we re-generate seeds for any fields we
haven't seen yet.
If a field already exists, just update the track to use it
Otherwise create a new seed element and assign it to the track | (ns haunting-refrain.datascript.seed
(:require [shodan.console :as console]
[datascript.core :as d]
[haunting-refrain.model.input :as input]
[haunting-refrain.datascript.util :as u]))
Every checkin has a list of fields , one of which can be selected at random to use as a
(defn generate-seed [db track-eid]
(let [track (d/pull db [:db/id {:track/checkin [:*]} {:track/seed [:*]}] track-eid)
checkin (d/entity db (-> track :track/checkin :db/id))
field (input/random-field checkin)
datum (input/sanitize-datum field (get checkin field))
extant (d/q '[:find ?d .
:in $ ?datum
:where [?d :seed/datum ?datum]]
db datum)]
(if extant
{:db/id extant
:track/_seed track-eid}
{:db/id (d/tempid :db.part/user)
:seed/checkin (:db/id checkin)
:seed/field field
:seed/datum datum
:track/_seed track-eid})))
(defn attach-seeds-to-playlist!
"Given a playlist, attach a random seed to each of its tracks that doesn't already have one
by selecting a random field from the checkin data associated with the track."
[conn playlist-eid]
(let [db (d/db conn)
tr (u/get-playlist-tracks conn playlist-eid)
seeds (map (partial generate-seed db) tr)]
(d/transact! conn seeds)))
|
0debb74d024d6bdb909efdaf69cdefe11d87d9bcbdc1b8f9eaa9973e8b3154f5 | ragkousism/Guix-on-Hurd | llvm.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2014 , 2016 < >
Copyright © 2015 < >
Copyright © 2015 , 2017 < >
Copyright © 2016 < >
Copyright © 2016 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages llvm)
#:use-module (guix packages)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (guix build-system cmake)
#:use-module (gnu packages)
#:use-module (gnu packages gcc)
#:use-module (gnu packages bootstrap) ;glibc-dynamic-linker
#:use-module (gnu packages compression)
#:use-module (gnu packages libffi)
#:use-module (gnu packages perl)
#:use-module (gnu packages python)
#:use-module (gnu packages xml))
(define-public llvm
(package
(name "llvm")
(version "3.8.1")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"1ybmnid4pw2hxn12ax5qa5kl1ldfns0njg8533y3mzslvd5cx0kf"))))
(build-system cmake-build-system)
(native-inputs
`(("python" ,python-2) ;bytes->str conversion in clang>=3.7 needs python-2
("perl" ,perl)))
(inputs
`(("libffi" ,libffi)))
(propagated-inputs
to use output from - config
(arguments
`(#:configure-flags '("-DCMAKE_SKIP_BUILD_RPATH=FALSE"
"-DCMAKE_BUILD_WITH_INSTALL_RPATH=FALSE"
"-DBUILD_SHARED_LIBS:BOOL=TRUE"
"-DLLVM_ENABLE_FFI:BOOL=TRUE"
"-DLLVM_INSTALL_UTILS=ON") ; Needed for rustc.
;; Don't use '-g' during the build, to save space.
#:build-type "Release"
#:phases (modify-phases %standard-phases
(add-before 'build 'shared-lib-workaround
Even with CMAKE_SKIP_BUILD_RPATH = FALSE ,
;; doesn't seem to get the correct rpath to be able to run
;; from the build directory. Set LD_LIBRARY_PATH as a
;; workaround.
(lambda _
(setenv "LD_LIBRARY_PATH"
(string-append (getcwd) "/lib"))
#t)))))
(home-page "")
(synopsis "Optimizing compiler infrastructure")
(description
"LLVM is a compiler infrastructure designed for compile-time, link-time,
runtime, and idle-time optimization of programs from arbitrary programming
languages. It currently supports compilation of C and C++ programs, using
front-ends derived from GCC 4.0.1. A new front-end for the C family of
languages is in development. The compiler infrastructure includes mirror sets
of programming tools as well as libraries with equivalent functionality.")
(license license:ncsa)))
(define-public llvm-with-rtti
(package (inherit llvm)
(name "llvm-with-rtti")
(arguments
(substitute-keyword-arguments (package-arguments llvm)
((#:configure-flags flags)
`(append '("-DCMAKE_SKIP_BUILD_RPATH=FALSE"
"-DCMAKE_BUILD_WITH_INSTALL_RPATH=FALSE"
"-DLLVM_REQUIRES_RTTI=1")
,flags))))))
(define (clang-runtime-from-llvm llvm hash)
(package
(name "clang-runtime")
(version (package-version llvm))
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/compiler-rt-" version ".src.tar.xz"))
(sha256 (base32 hash))))
(build-system cmake-build-system)
(native-inputs (package-native-inputs llvm))
(inputs
`(("llvm" ,llvm)))
(arguments
`(;; Don't use '-g' during the build to save space.
#:build-type "Release"
Tests require gtest
(home-page "-rt.llvm.org")
(synopsis "Runtime library for Clang/LLVM")
(description
"The \"clang-runtime\" library provides the implementations of run-time
functions for C and C++ programs. It also provides header files that allow C
and C++ source code to interface with the \"sanitization\" passes of the clang
compiler. In LLVM this library is called \"compiler-rt\".")
(license license:ncsa)
< -rt.llvm.org/ > does n't list MIPS as supported .
(supported-systems (delete "mips64el-linux" %supported-systems))))
(define* (clang-from-llvm llvm clang-runtime hash
#:key (patches '("clang-libc-search-path.patch")))
(package
(name "clang")
(version (package-version llvm))
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/cfe-" version ".src.tar.xz"))
(sha256 (base32 hash))
(patches (map search-patch patches))))
Using cmake allows us to treat as an external library . There
does n't seem to be any way to do this with clang 's - based
;; build system.
(build-system cmake-build-system)
(native-inputs (package-native-inputs llvm))
(inputs
`(("libxml2" ,libxml2)
("gcc-lib" ,gcc "lib")
,@(package-inputs llvm)))
(propagated-inputs
`(("llvm" ,llvm)
("clang-runtime" ,clang-runtime)))
(arguments
`(#:configure-flags
(list "-DCLANG_INCLUDE_TESTS=True"
;; Find libgcc_s, crtbegin.o, and crtend.o.
(string-append "-DGCC_INSTALL_PREFIX="
(assoc-ref %build-inputs "gcc-lib"))
;; Use a sane default include directory.
(string-append "-DC_INCLUDE_DIRS="
(assoc-ref %build-inputs "libc")
"/include"))
;; Don't use '-g' during the build to save space.
#:build-type "Release"
#:phases (modify-phases %standard-phases
(add-after
'unpack 'set-glibc-file-names
(lambda* (#:key inputs #:allow-other-keys)
(let ((libc (assoc-ref inputs "libc"))
(compiler-rt (assoc-ref inputs "clang-runtime")))
(substitute* "lib/Driver/Tools.cpp"
;; Patch the 'getLinuxDynamicLinker' function to that
;; it uses the right dynamic linker file name.
(("/lib64/ld-linux-x86-64.so.2")
(string-append libc
,(glibc-dynamic-linker)))
Link to files from clang - runtime .
(("TC\\.getDriver\\(\\)\\.ResourceDir")
(string-append "\"" compiler-rt "\"")))
Same for libc 's , to allow crt1.o & co. to be
;; found.
(substitute* "lib/Driver/ToolChains.cpp"
(("@GLIBC_LIBDIR@")
(string-append libc "/lib")))))))))
Clang supports the same environment variables as GCC .
(native-search-paths
(list (search-path-specification
(variable "CPATH")
(files '("include")))
(search-path-specification
(variable "LIBRARY_PATH")
(files '("lib" "lib64")))))
(home-page "")
(synopsis "C language family frontend for LLVM")
(description
"Clang is a compiler front end for the C, C++, Objective-C and
Objective-C++ programming languages. It uses LLVM as its back end. The Clang
project includes the Clang front end, the Clang static analyzer, and several
code analysis tools.")
(license license:ncsa)))
(define-public clang-runtime
(clang-runtime-from-llvm
llvm
"0p0y85c7izndbpg2l816z7z7558axq11d5pwkm4h11sdw7d13w0d"))
(define-public clang
(clang-from-llvm llvm clang-runtime
"1prc72xmkgx8wrzmrr337776676nhsp1qd3mw2bvb22bzdnq7lsc"
#:patches '("clang-3.8-libc-search-path.patch")))
(define-public llvm-3.7
(package (inherit llvm)
(version "3.7.1")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"1masakdp9g2dan1yrazg7md5am2vacbkb3nahb3dchpc1knr8xxy"))))))
(define-public clang-runtime-3.7
(clang-runtime-from-llvm
llvm-3.7
"10c1mz2q4bdq9bqfgr3dirc6hz1h3sq8573srd5q5lr7m7j6jiwx"))
(define-public clang-3.7
(clang-from-llvm llvm-3.7 clang-runtime-3.7
"0x065d0w9b51xvdjxwfzjxng0gzpbx45fgiaxpap45ragi61dqjn"))
(define-public llvm-3.6
(package (inherit llvm)
(version "3.6.2")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"153vcvj8gvgwakzr4j0kndc0b7wn91c2g1vy2vg24s6spxcc23gn"))))))
(define-public clang-runtime-3.6
(clang-runtime-from-llvm
llvm-3.6
"11qx8d3pbfqjaj2x207pvlvzihbs1z2xbw4crpz7aid6h1yz6bqg"))
(define-public clang-3.6
(clang-from-llvm llvm-3.6 clang-runtime-3.6
"1wwr8s6lzr324hv4s1k6na4j5zv6n9kdhi14s4kb9b13d93814df"))
(define-public llvm-3.5
(package (inherit llvm)
(version "3.5.2")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"0xf5q17kkxsrm2gsi93h4pwlv663kji73r2g4asb97klsmb626a4"))))))
(define-public clang-runtime-3.5
(clang-runtime-from-llvm
llvm-3.5
"1hsdnzzdr5kglz6fnv3lcsjs222zjsy14y8ax9dy6zqysanplbal"))
(define-public clang-3.5
(clang-from-llvm llvm-3.5 clang-runtime-3.5
"0846h8vn3zlc00jkmvrmy88gc6ql6014c02l4jv78fpvfigmgssg"))
(define-public llvm-for-extempore
(package (inherit llvm-3.7)
(name "llvm-for-extempore")
(source
(origin
(inherit (package-source llvm-3.7))
(patches (list (search-patch "llvm-for-extempore.patch")))))
Extempore refuses to build on architectures other than x86_64
(supported-systems '("x86_64-linux"))))
| null | https://raw.githubusercontent.com/ragkousism/Guix-on-Hurd/e951bb2c0c4961dc6ac2bda8f331b9c4cee0da95/gnu/packages/llvm.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
glibc-dynamic-linker
bytes->str conversion in clang>=3.7 needs python-2
Needed for rustc.
Don't use '-g' during the build, to save space.
doesn't seem to get the correct rpath to be able to run
from the build directory. Set LD_LIBRARY_PATH as a
workaround.
Don't use '-g' during the build to save space.
build system.
Find libgcc_s, crtbegin.o, and crtend.o.
Use a sane default include directory.
Don't use '-g' during the build to save space.
Patch the 'getLinuxDynamicLinker' function to that
it uses the right dynamic linker file name.
found. | Copyright © 2014 , 2016 < >
Copyright © 2015 < >
Copyright © 2015 , 2017 < >
Copyright © 2016 < >
Copyright © 2016 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages llvm)
#:use-module (guix packages)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (guix build-system cmake)
#:use-module (gnu packages)
#:use-module (gnu packages gcc)
#:use-module (gnu packages compression)
#:use-module (gnu packages libffi)
#:use-module (gnu packages perl)
#:use-module (gnu packages python)
#:use-module (gnu packages xml))
(define-public llvm
(package
(name "llvm")
(version "3.8.1")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"1ybmnid4pw2hxn12ax5qa5kl1ldfns0njg8533y3mzslvd5cx0kf"))))
(build-system cmake-build-system)
(native-inputs
("perl" ,perl)))
(inputs
`(("libffi" ,libffi)))
(propagated-inputs
to use output from - config
(arguments
`(#:configure-flags '("-DCMAKE_SKIP_BUILD_RPATH=FALSE"
"-DCMAKE_BUILD_WITH_INSTALL_RPATH=FALSE"
"-DBUILD_SHARED_LIBS:BOOL=TRUE"
"-DLLVM_ENABLE_FFI:BOOL=TRUE"
#:build-type "Release"
#:phases (modify-phases %standard-phases
(add-before 'build 'shared-lib-workaround
Even with CMAKE_SKIP_BUILD_RPATH = FALSE ,
(lambda _
(setenv "LD_LIBRARY_PATH"
(string-append (getcwd) "/lib"))
#t)))))
(home-page "")
(synopsis "Optimizing compiler infrastructure")
(description
"LLVM is a compiler infrastructure designed for compile-time, link-time,
runtime, and idle-time optimization of programs from arbitrary programming
languages. It currently supports compilation of C and C++ programs, using
front-ends derived from GCC 4.0.1. A new front-end for the C family of
languages is in development. The compiler infrastructure includes mirror sets
of programming tools as well as libraries with equivalent functionality.")
(license license:ncsa)))
(define-public llvm-with-rtti
(package (inherit llvm)
(name "llvm-with-rtti")
(arguments
(substitute-keyword-arguments (package-arguments llvm)
((#:configure-flags flags)
`(append '("-DCMAKE_SKIP_BUILD_RPATH=FALSE"
"-DCMAKE_BUILD_WITH_INSTALL_RPATH=FALSE"
"-DLLVM_REQUIRES_RTTI=1")
,flags))))))
(define (clang-runtime-from-llvm llvm hash)
(package
(name "clang-runtime")
(version (package-version llvm))
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/compiler-rt-" version ".src.tar.xz"))
(sha256 (base32 hash))))
(build-system cmake-build-system)
(native-inputs (package-native-inputs llvm))
(inputs
`(("llvm" ,llvm)))
(arguments
#:build-type "Release"
Tests require gtest
(home-page "-rt.llvm.org")
(synopsis "Runtime library for Clang/LLVM")
(description
"The \"clang-runtime\" library provides the implementations of run-time
functions for C and C++ programs. It also provides header files that allow C
and C++ source code to interface with the \"sanitization\" passes of the clang
compiler. In LLVM this library is called \"compiler-rt\".")
(license license:ncsa)
< -rt.llvm.org/ > does n't list MIPS as supported .
(supported-systems (delete "mips64el-linux" %supported-systems))))
(define* (clang-from-llvm llvm clang-runtime hash
#:key (patches '("clang-libc-search-path.patch")))
(package
(name "clang")
(version (package-version llvm))
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/cfe-" version ".src.tar.xz"))
(sha256 (base32 hash))
(patches (map search-patch patches))))
Using cmake allows us to treat as an external library . There
does n't seem to be any way to do this with clang 's - based
(build-system cmake-build-system)
(native-inputs (package-native-inputs llvm))
(inputs
`(("libxml2" ,libxml2)
("gcc-lib" ,gcc "lib")
,@(package-inputs llvm)))
(propagated-inputs
`(("llvm" ,llvm)
("clang-runtime" ,clang-runtime)))
(arguments
`(#:configure-flags
(list "-DCLANG_INCLUDE_TESTS=True"
(string-append "-DGCC_INSTALL_PREFIX="
(assoc-ref %build-inputs "gcc-lib"))
(string-append "-DC_INCLUDE_DIRS="
(assoc-ref %build-inputs "libc")
"/include"))
#:build-type "Release"
#:phases (modify-phases %standard-phases
(add-after
'unpack 'set-glibc-file-names
(lambda* (#:key inputs #:allow-other-keys)
(let ((libc (assoc-ref inputs "libc"))
(compiler-rt (assoc-ref inputs "clang-runtime")))
(substitute* "lib/Driver/Tools.cpp"
(("/lib64/ld-linux-x86-64.so.2")
(string-append libc
,(glibc-dynamic-linker)))
Link to files from clang - runtime .
(("TC\\.getDriver\\(\\)\\.ResourceDir")
(string-append "\"" compiler-rt "\"")))
Same for libc 's , to allow crt1.o & co. to be
(substitute* "lib/Driver/ToolChains.cpp"
(("@GLIBC_LIBDIR@")
(string-append libc "/lib")))))))))
Clang supports the same environment variables as GCC .
(native-search-paths
(list (search-path-specification
(variable "CPATH")
(files '("include")))
(search-path-specification
(variable "LIBRARY_PATH")
(files '("lib" "lib64")))))
(home-page "")
(synopsis "C language family frontend for LLVM")
(description
"Clang is a compiler front end for the C, C++, Objective-C and
Objective-C++ programming languages. It uses LLVM as its back end. The Clang
project includes the Clang front end, the Clang static analyzer, and several
code analysis tools.")
(license license:ncsa)))
(define-public clang-runtime
(clang-runtime-from-llvm
llvm
"0p0y85c7izndbpg2l816z7z7558axq11d5pwkm4h11sdw7d13w0d"))
(define-public clang
(clang-from-llvm llvm clang-runtime
"1prc72xmkgx8wrzmrr337776676nhsp1qd3mw2bvb22bzdnq7lsc"
#:patches '("clang-3.8-libc-search-path.patch")))
(define-public llvm-3.7
(package (inherit llvm)
(version "3.7.1")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"1masakdp9g2dan1yrazg7md5am2vacbkb3nahb3dchpc1knr8xxy"))))))
(define-public clang-runtime-3.7
(clang-runtime-from-llvm
llvm-3.7
"10c1mz2q4bdq9bqfgr3dirc6hz1h3sq8573srd5q5lr7m7j6jiwx"))
(define-public clang-3.7
(clang-from-llvm llvm-3.7 clang-runtime-3.7
"0x065d0w9b51xvdjxwfzjxng0gzpbx45fgiaxpap45ragi61dqjn"))
(define-public llvm-3.6
(package (inherit llvm)
(version "3.6.2")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"153vcvj8gvgwakzr4j0kndc0b7wn91c2g1vy2vg24s6spxcc23gn"))))))
(define-public clang-runtime-3.6
(clang-runtime-from-llvm
llvm-3.6
"11qx8d3pbfqjaj2x207pvlvzihbs1z2xbw4crpz7aid6h1yz6bqg"))
(define-public clang-3.6
(clang-from-llvm llvm-3.6 clang-runtime-3.6
"1wwr8s6lzr324hv4s1k6na4j5zv6n9kdhi14s4kb9b13d93814df"))
(define-public llvm-3.5
(package (inherit llvm)
(version "3.5.2")
(source
(origin
(method url-fetch)
(uri (string-append "/"
version "/llvm-" version ".src.tar.xz"))
(sha256
(base32
"0xf5q17kkxsrm2gsi93h4pwlv663kji73r2g4asb97klsmb626a4"))))))
(define-public clang-runtime-3.5
(clang-runtime-from-llvm
llvm-3.5
"1hsdnzzdr5kglz6fnv3lcsjs222zjsy14y8ax9dy6zqysanplbal"))
(define-public clang-3.5
(clang-from-llvm llvm-3.5 clang-runtime-3.5
"0846h8vn3zlc00jkmvrmy88gc6ql6014c02l4jv78fpvfigmgssg"))
(define-public llvm-for-extempore
(package (inherit llvm-3.7)
(name "llvm-for-extempore")
(source
(origin
(inherit (package-source llvm-3.7))
(patches (list (search-patch "llvm-for-extempore.patch")))))
Extempore refuses to build on architectures other than x86_64
(supported-systems '("x86_64-linux"))))
|
f49b0fa2f6d7333b2cfb08d9f4009cf790258aed4236db652af421c4e4a5e0f9 | ShamoX/cash | signal_3_9.ml | (***********************************************************************)
(* Cash *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
(* under the terms of the GNU Lesser General Public License. *)
(* *)
Cash is based on , by .
(***********************************************************************)
value signal_process proc = Unix.kill proc.Procobj.p_id;
value signal_process_pid = Unix.kill;
value signal_process_group procgroup = Unix.kill (- procgroup.Procobj.p_id);
value signal_process_group_pgrp prgrp = Unix.kill (- prgrp);
value pause_until_interrupt = Unix.pause;
value itimer ?newstat timer =
match newstat with
[ None -> Unix.getitimer timer
| Some stat -> Unix.setitimer timer stat ]
;
value sleep = Unix.sleep;
value rec sleep_until time =
let now = Unix.time () in
let delta = time -. now in
if delta <= 0. then ()
else
let retry = try do { ignore (Unix.select [] [] [] delta); False } with _ -> True in
if retry then sleep_until time else ()
;
| null | https://raw.githubusercontent.com/ShamoX/cash/aa97231154c3f64c9d0a62823e1ed71e32ab8718/signal_3_9.ml | ocaml | *********************************************************************
Cash
under the terms of the GNU Lesser General Public License.
********************************************************************* | , projet Cristal , INRIA Rocquencourt
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
Cash is based on , by .
value signal_process proc = Unix.kill proc.Procobj.p_id;
value signal_process_pid = Unix.kill;
value signal_process_group procgroup = Unix.kill (- procgroup.Procobj.p_id);
value signal_process_group_pgrp prgrp = Unix.kill (- prgrp);
value pause_until_interrupt = Unix.pause;
value itimer ?newstat timer =
match newstat with
[ None -> Unix.getitimer timer
| Some stat -> Unix.setitimer timer stat ]
;
value sleep = Unix.sleep;
value rec sleep_until time =
let now = Unix.time () in
let delta = time -. now in
if delta <= 0. then ()
else
let retry = try do { ignore (Unix.select [] [] [] delta); False } with _ -> True in
if retry then sleep_until time else ()
;
|
c91fbb70f7e90ecac1433bace2c943ff398fd46b0e109c83665780bffbd5045a | erdos/stencil | util_test.clj | (ns stencil.util-test
(:require [clojure.test :refer [deftest testing is are]]
[clojure.zip :as zip]
[stencil.util :refer :all]))
(deftest stacks-difference-test
(testing "Empty cases"
(is (= [[] []] (stacks-difference-key identity nil nil)))
(is (= [[] []] (stacks-difference-key identity () ())))
(is (= [[] []] (stacks-difference-key identity '(:a :b :c) '(:a :b :c)))))
(testing "simple cases"
(is (= [[:a :b] []] (stacks-difference-key identity '(:a :b) ())))
(is (= [[] [:a :b]] (stacks-difference-key identity '() '(:a :b))))
(is (= [[:a] [:b]] (stacks-difference-key identity '(:a :x :y) '(:b :x :y))))
(is (= [[:a] []] (stacks-difference-key identity '(:a :x :y) '(:x :y))))
(is (= [[] [:b]] (stacks-difference-key identity '(:x :y) '(:b :x :y))))))
(deftest mod-stack-top-last-test
(testing "Invalid input"
(is (thrown? IllegalStateException (mod-stack-top-last '([]) inc)))
(is (thrown? IllegalStateException (mod-stack-top-last '() inc))))
(testing "simple cases"
(is (= '([3]) (mod-stack-top-last '([2]) inc)))
(is (= '([1 1 2] [1 1 1])
(mod-stack-top-last '([1 1 1] [1 1 1]) inc)))))
(deftest mod-stack-top-conj-test
(testing "empty input"
(is (thrown? IllegalStateException (mod-stack-top-conj '() 2)))
(is (= '([2]) (mod-stack-top-conj '([]) 2))))
(testing "simple cases"
(is (= '([1 2]) (mod-stack-top-conj '([1]) 2)))
(is (= '([1 1 1] [2 2] [3 3])
(mod-stack-top-conj '([1 1] [2 2] [3 3]) 1)))
(is (= '([1 1 1 2 3] [2 2] [3 3])
(mod-stack-top-conj '([1 1] [2 2] [3 3]) 1 2 3)))))
(deftest update-peek-test
(testing "simple cases"
(is (thrown? IllegalStateException (update-peek [] inc)))
(is (= [1 1 1 2] (update-peek [1 1 1 1] inc)))))
(deftest xml-zip-test
(testing "XML nodes are always branches"
(testing "Clojure Core xml-zip"
(is (zip/branch? (zip/xml-zip {:tag "A"})))
(is (not (zip/branch? (zip/xml-zip "child"))))
(is (zip/branch? (zip/xml-zip 42))))
(testing "Stencil's xml-zip"
(is (zip/branch? (xml-zip {:tag "A"})))
(is (not (zip/branch? (xml-zip "child"))))
(testing "Difference clojure core"
(is (not (zip/branch? (xml-zip 42))))))))
(deftest test-suffixes
(is (= [] (suffixes nil)))
(is (= [] (suffixes [])))
(is (= [[1]] (suffixes [1])))
(is (= [[1 2 3] [2 3] [3]] (suffixes [1 2 3]))))
(deftest test-prefixes
(is (= [] (prefixes nil)))
(is (= [] (prefixes [])))
(is (= [[1]] (prefixes [1])))
(is (= [[1 2 3] [1 2] [1]] (prefixes [1 2 3]))))
(deftest test-->int
(is (= nil (->int nil)))
(is (= 23 (->int 23)))
(is (= 23 (->int "23")))
(is (= 23 (->int 23.2)))
(is (thrown? clojure.lang.ExceptionInfo (->int :asdf))))
(deftest update-some-test
(is (= nil (update-some nil [:a] inc)))
(is (= {:a 1} (update-some {:a 1} [:b] inc)))
(is (= {:a 2 :x 1} (update-some {:a 1 :x 1} [:a] inc)))
(is (= {:a 1 :x 1} (update-some {:a 1 :x 1} [:a] #{}))))
(deftest fixpt-test
(is (= nil (fixpt first [])))
(is (= :a (fixpt {:a :a :b :a :c :b} :c))))
(deftest find-first-test
(is (= 1 (find-first odd? [0 1 2 3 4])))
(is (= nil (find-first odd? [0 2 4])))
(is (= nil (find-first odd? []) (find-first odd? nil))))
(deftest find-last-test
(is (= 3 (find-last odd? [0 1 2 3 4])))
(is (= nil (find-last odd? [0 2 4])))
(is (= nil (find-last odd? []) (find-last odd? nil))))
(deftest fail-test
(is (thrown? clojure.lang.ExceptionInfo (fail "test error" {}))))
(deftest prefixes-test
(is (= [] (prefixes []) (prefixes nil)))
(is (= [[1 2 3] [1 2] [1]] (prefixes [1 2 3]))))
(deftest suffixes-test
(is (= [] (suffixes []) (suffixes nil)))
(is (= [[1 2 3] [2 3] [3]] (suffixes [1 2 3]))))
(deftest whitespace?-test
(is (= true (whitespace? \space)))
(is (= true (whitespace? \tab)))
(is (= false (whitespace? " ")))
(is (= false (whitespace? \A))))
(deftest trim-test
(are [input] (= "" (trim input))
"", " ", "\t\t\n")
(are [input] (= "abc" (trim input))
"abc", " abc", "abc ", " \t \n abc \t")
(is (= "a b c" (trim " a b c \t"))))
| null | https://raw.githubusercontent.com/erdos/stencil/a7dc048dbe57f1d7ffe2ef8b34e44ba9422258f8/test/stencil/util_test.clj | clojure | (ns stencil.util-test
(:require [clojure.test :refer [deftest testing is are]]
[clojure.zip :as zip]
[stencil.util :refer :all]))
(deftest stacks-difference-test
(testing "Empty cases"
(is (= [[] []] (stacks-difference-key identity nil nil)))
(is (= [[] []] (stacks-difference-key identity () ())))
(is (= [[] []] (stacks-difference-key identity '(:a :b :c) '(:a :b :c)))))
(testing "simple cases"
(is (= [[:a :b] []] (stacks-difference-key identity '(:a :b) ())))
(is (= [[] [:a :b]] (stacks-difference-key identity '() '(:a :b))))
(is (= [[:a] [:b]] (stacks-difference-key identity '(:a :x :y) '(:b :x :y))))
(is (= [[:a] []] (stacks-difference-key identity '(:a :x :y) '(:x :y))))
(is (= [[] [:b]] (stacks-difference-key identity '(:x :y) '(:b :x :y))))))
(deftest mod-stack-top-last-test
(testing "Invalid input"
(is (thrown? IllegalStateException (mod-stack-top-last '([]) inc)))
(is (thrown? IllegalStateException (mod-stack-top-last '() inc))))
(testing "simple cases"
(is (= '([3]) (mod-stack-top-last '([2]) inc)))
(is (= '([1 1 2] [1 1 1])
(mod-stack-top-last '([1 1 1] [1 1 1]) inc)))))
(deftest mod-stack-top-conj-test
(testing "empty input"
(is (thrown? IllegalStateException (mod-stack-top-conj '() 2)))
(is (= '([2]) (mod-stack-top-conj '([]) 2))))
(testing "simple cases"
(is (= '([1 2]) (mod-stack-top-conj '([1]) 2)))
(is (= '([1 1 1] [2 2] [3 3])
(mod-stack-top-conj '([1 1] [2 2] [3 3]) 1)))
(is (= '([1 1 1 2 3] [2 2] [3 3])
(mod-stack-top-conj '([1 1] [2 2] [3 3]) 1 2 3)))))
(deftest update-peek-test
(testing "simple cases"
(is (thrown? IllegalStateException (update-peek [] inc)))
(is (= [1 1 1 2] (update-peek [1 1 1 1] inc)))))
(deftest xml-zip-test
(testing "XML nodes are always branches"
(testing "Clojure Core xml-zip"
(is (zip/branch? (zip/xml-zip {:tag "A"})))
(is (not (zip/branch? (zip/xml-zip "child"))))
(is (zip/branch? (zip/xml-zip 42))))
(testing "Stencil's xml-zip"
(is (zip/branch? (xml-zip {:tag "A"})))
(is (not (zip/branch? (xml-zip "child"))))
(testing "Difference clojure core"
(is (not (zip/branch? (xml-zip 42))))))))
(deftest test-suffixes
(is (= [] (suffixes nil)))
(is (= [] (suffixes [])))
(is (= [[1]] (suffixes [1])))
(is (= [[1 2 3] [2 3] [3]] (suffixes [1 2 3]))))
(deftest test-prefixes
(is (= [] (prefixes nil)))
(is (= [] (prefixes [])))
(is (= [[1]] (prefixes [1])))
(is (= [[1 2 3] [1 2] [1]] (prefixes [1 2 3]))))
(deftest test-->int
(is (= nil (->int nil)))
(is (= 23 (->int 23)))
(is (= 23 (->int "23")))
(is (= 23 (->int 23.2)))
(is (thrown? clojure.lang.ExceptionInfo (->int :asdf))))
(deftest update-some-test
(is (= nil (update-some nil [:a] inc)))
(is (= {:a 1} (update-some {:a 1} [:b] inc)))
(is (= {:a 2 :x 1} (update-some {:a 1 :x 1} [:a] inc)))
(is (= {:a 1 :x 1} (update-some {:a 1 :x 1} [:a] #{}))))
(deftest fixpt-test
(is (= nil (fixpt first [])))
(is (= :a (fixpt {:a :a :b :a :c :b} :c))))
(deftest find-first-test
(is (= 1 (find-first odd? [0 1 2 3 4])))
(is (= nil (find-first odd? [0 2 4])))
(is (= nil (find-first odd? []) (find-first odd? nil))))
(deftest find-last-test
(is (= 3 (find-last odd? [0 1 2 3 4])))
(is (= nil (find-last odd? [0 2 4])))
(is (= nil (find-last odd? []) (find-last odd? nil))))
(deftest fail-test
(is (thrown? clojure.lang.ExceptionInfo (fail "test error" {}))))
(deftest prefixes-test
(is (= [] (prefixes []) (prefixes nil)))
(is (= [[1 2 3] [1 2] [1]] (prefixes [1 2 3]))))
(deftest suffixes-test
(is (= [] (suffixes []) (suffixes nil)))
(is (= [[1 2 3] [2 3] [3]] (suffixes [1 2 3]))))
(deftest whitespace?-test
(is (= true (whitespace? \space)))
(is (= true (whitespace? \tab)))
(is (= false (whitespace? " ")))
(is (= false (whitespace? \A))))
(deftest trim-test
(are [input] (= "" (trim input))
"", " ", "\t\t\n")
(are [input] (= "abc" (trim input))
"abc", " abc", "abc ", " \t \n abc \t")
(is (= "a b c" (trim " a b c \t"))))
|
|
28fd98df037fe60766a14c8a64976f8ad3755f30b3dc5d9f9c194b4a0cc42a33 | exoscale/clojure-kubernetes-client | extensions_v1beta1_se_linux_strategy_options.clj | (ns clojure-kubernetes-client.specs.extensions-v1beta1-se-linux-strategy-options
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-se-linux-options :refer :all]
)
(:import (java.io File)))
(declare extensions-v1beta1-se-linux-strategy-options-data extensions-v1beta1-se-linux-strategy-options)
(def extensions-v1beta1-se-linux-strategy-options-data
{
(ds/req :rule) string?
(ds/opt :seLinuxOptions) v1-se-linux-options
})
(def extensions-v1beta1-se-linux-strategy-options
(ds/spec
{:name ::extensions-v1beta1-se-linux-strategy-options
:spec extensions-v1beta1-se-linux-strategy-options-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/extensions_v1beta1_se_linux_strategy_options.clj | clojure | (ns clojure-kubernetes-client.specs.extensions-v1beta1-se-linux-strategy-options
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-se-linux-options :refer :all]
)
(:import (java.io File)))
(declare extensions-v1beta1-se-linux-strategy-options-data extensions-v1beta1-se-linux-strategy-options)
(def extensions-v1beta1-se-linux-strategy-options-data
{
(ds/req :rule) string?
(ds/opt :seLinuxOptions) v1-se-linux-options
})
(def extensions-v1beta1-se-linux-strategy-options
(ds/spec
{:name ::extensions-v1beta1-se-linux-strategy-options
:spec extensions-v1beta1-se-linux-strategy-options-data}))
|
|
ebedf29aeeee9dcd5f2ed0ace3a9b6b9727b0193eebeefab5a6baaafdd8d1934 | tomjridge/tjr_simple_earley | misc.ml | let iter_opt (f:'a -> 'a option) =
let rec loop x =
match f x with
| None -> x
| Some x -> loop x
in
fun x -> loop x
let rev_filter_map f xs =
([],xs) |> iter_opt (function
| _,[] -> None
| xs',x::xs ->
f x |> function
| None -> Some(xs',xs)
| Some y -> Some(y::xs',xs))
|> fun (xs',[]) -> xs'
let _ = rev_filter_map
let string_matches_at ~string ~sub ~pos =
let len = String.length sub in
try
String.sub string pos len = sub
with Invalid_argument _ -> false
iterate over a list until the first Some x ; return this ( or None if no such elt
let iter_till_some (f: 'a -> 'b option) xs =
(None,xs) |> iter_opt (fun (ret,xs) ->
match ret with
| Some x -> None
| None -> (
match xs with
| [] -> None
| x::xs ->
f x |> function
| None -> Some (None,xs)
| Some ret -> Some(Some ret,[])))
|> function (ret,_) -> ret
let _ : ('a -> 'b option) -> 'a list -> 'b option = iter_till_some
module Int_set = Set.Make(
struct type t = int let compare: t -> t -> int = Int.compare end)
* { 2 Logging }
NOTE logging is enabled / disabled by a ppx_optcomp flag .
NOTE logging is enabled/disabled by a ppx_optcomp flag.
*)
[%%import "earley_optcomp_config.ml"]
[%%if LOGGING_ENABLED]
let log = fun (x:unit Lazy.t) -> Lazy.force x [@@inline]
[%%else]
let log = fun (x:unit Lazy.t) -> () [@@inline]
[%%endif]
| null | https://raw.githubusercontent.com/tomjridge/tjr_simple_earley/ca558e0e7f4ddba4cd6573bf180710cd02f25ba4/src/misc.ml | ocaml | let iter_opt (f:'a -> 'a option) =
let rec loop x =
match f x with
| None -> x
| Some x -> loop x
in
fun x -> loop x
let rev_filter_map f xs =
([],xs) |> iter_opt (function
| _,[] -> None
| xs',x::xs ->
f x |> function
| None -> Some(xs',xs)
| Some y -> Some(y::xs',xs))
|> fun (xs',[]) -> xs'
let _ = rev_filter_map
let string_matches_at ~string ~sub ~pos =
let len = String.length sub in
try
String.sub string pos len = sub
with Invalid_argument _ -> false
iterate over a list until the first Some x ; return this ( or None if no such elt
let iter_till_some (f: 'a -> 'b option) xs =
(None,xs) |> iter_opt (fun (ret,xs) ->
match ret with
| Some x -> None
| None -> (
match xs with
| [] -> None
| x::xs ->
f x |> function
| None -> Some (None,xs)
| Some ret -> Some(Some ret,[])))
|> function (ret,_) -> ret
let _ : ('a -> 'b option) -> 'a list -> 'b option = iter_till_some
module Int_set = Set.Make(
struct type t = int let compare: t -> t -> int = Int.compare end)
* { 2 Logging }
NOTE logging is enabled / disabled by a ppx_optcomp flag .
NOTE logging is enabled/disabled by a ppx_optcomp flag.
*)
[%%import "earley_optcomp_config.ml"]
[%%if LOGGING_ENABLED]
let log = fun (x:unit Lazy.t) -> Lazy.force x [@@inline]
[%%else]
let log = fun (x:unit Lazy.t) -> () [@@inline]
[%%endif]
|
|
84a5239ebe32819f944f6445d9443ae9a47050924d9f46734de3894f6c485d34 | oliyh/superlifter | project.clj | (defproject example "0.0.1-SNAPSHOT"
:description "An example use of superlifter for lacinia"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[io.pedestal/pedestal.service "0.5.7"]
[io.pedestal/pedestal.jetty "0.5.7"]
[ch.qos.logback/logback-classic "1.2.3" :exclusions [org.slf4j/slf4j-api]]
[org.slf4j/jul-to-slf4j "1.7.26"]
[org.slf4j/jcl-over-slf4j "1.7.26"]
[org.slf4j/log4j-over-slf4j "1.7.26"]
[com.walmartlabs/lacinia-pedestal "0.13.0-alpha-1"]
[funcool/promesa "4.0.2"]
[superlifter "0.1.3-SNAPSHOT"]]
:min-lein-version "2.0.0"
:source-paths ["src" "../src"]
:resource-paths ["config", "resources"]
If you use HTTP/2 or ALPN , use the java - agent to pull in the correct alpn - boot dependency
: java - agents [ [ org.mortbay.jetty.alpn/jetty-alpn-agent " 2.0.5 " ] ]
:profiles {:dev {:aliases {"run-dev" ["trampoline" "run" "-m" "example.server/run-dev"]}
:dependencies [[io.pedestal/pedestal.service-tools "0.5.7"]
[clj-http "3.10.0"]]}
:uberjar {:aot [example.server]}}
:main ^{:skip-aot true} example.server)
| null | https://raw.githubusercontent.com/oliyh/superlifter/d0baf9538f1dac712415323a2f2a6578c181bd97/example/project.clj | clojure | (defproject example "0.0.1-SNAPSHOT"
:description "An example use of superlifter for lacinia"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[io.pedestal/pedestal.service "0.5.7"]
[io.pedestal/pedestal.jetty "0.5.7"]
[ch.qos.logback/logback-classic "1.2.3" :exclusions [org.slf4j/slf4j-api]]
[org.slf4j/jul-to-slf4j "1.7.26"]
[org.slf4j/jcl-over-slf4j "1.7.26"]
[org.slf4j/log4j-over-slf4j "1.7.26"]
[com.walmartlabs/lacinia-pedestal "0.13.0-alpha-1"]
[funcool/promesa "4.0.2"]
[superlifter "0.1.3-SNAPSHOT"]]
:min-lein-version "2.0.0"
:source-paths ["src" "../src"]
:resource-paths ["config", "resources"]
If you use HTTP/2 or ALPN , use the java - agent to pull in the correct alpn - boot dependency
: java - agents [ [ org.mortbay.jetty.alpn/jetty-alpn-agent " 2.0.5 " ] ]
:profiles {:dev {:aliases {"run-dev" ["trampoline" "run" "-m" "example.server/run-dev"]}
:dependencies [[io.pedestal/pedestal.service-tools "0.5.7"]
[clj-http "3.10.0"]]}
:uberjar {:aot [example.server]}}
:main ^{:skip-aot true} example.server)
|
|
517d93ff675b8cb4f704c6d1d2c8bd9e18b76d370acf815f3a6f64adb0bf344b | tisnik/clojure-examples | dependencies.clj | {[clojure-complete "0.2.5" :exclusions [[org.clojure/clojure]]] nil,
[nrepl "0.7.0" :exclusions [[org.clojure/clojure]]] nil,
[org.clojure/clojure "1.10.1"]
{[org.clojure/core.specs.alpha "0.2.44"] nil,
[org.clojure/spec.alpha "0.2.176"] nil},
[seesaw "1.4.5"]
{[com.fifesoft/rsyntaxtextarea "2.5.6"] nil,
[com.jgoodies/forms "1.2.1"] nil,
[com.miglayout/miglayout "3.7.4"] nil,
[j18n "1.0.2"] nil,
[org.swinglabs.swingx/swingx-core "1.6.3"]
{[org.swinglabs.swingx/swingx-action "1.6.3"] nil,
[org.swinglabs.swingx/swingx-autocomplete "1.6.3"]
{[org.swinglabs.swingx/swingx-common "1.6.3"] nil},
[org.swinglabs.swingx/swingx-painters "1.6.3"] nil,
[org.swinglabs.swingx/swingx-plaf "1.6.3"] nil}},
[venantius/ultra "0.6.0"]
{[grimradical/clj-semver "0.3.0" :exclusions [[org.clojure/clojure]]]
nil,
[io.aviso/pretty "0.1.35"] nil,
[mvxcvi/puget "1.1.0"]
{[fipp "0.6.14"] {[org.clojure/core.rrb-vector "0.0.13"] nil},
[mvxcvi/arrangement "1.1.1"] nil},
[mvxcvi/whidbey "2.1.0"] {[org.clojure/data.codec "0.1.1"] nil},
[org.clojars.brenton/google-diff-match-patch "0.1"] nil,
[robert/hooke "1.3.0"] nil,
[venantius/glow "0.1.5" :exclusions [[hiccup] [garden]]]
{[clj-antlr "0.2.3"]
{[org.antlr/antlr4-runtime "4.5.3"] nil,
[org.antlr/antlr4 "4.5.3"] nil},
[instaparse "1.4.1"] nil}}}
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/15a3bcbd5c4decffa3aa45f1faee127e2f65c7d7/seesaw6/doc/dependencies.clj | clojure | {[clojure-complete "0.2.5" :exclusions [[org.clojure/clojure]]] nil,
[nrepl "0.7.0" :exclusions [[org.clojure/clojure]]] nil,
[org.clojure/clojure "1.10.1"]
{[org.clojure/core.specs.alpha "0.2.44"] nil,
[org.clojure/spec.alpha "0.2.176"] nil},
[seesaw "1.4.5"]
{[com.fifesoft/rsyntaxtextarea "2.5.6"] nil,
[com.jgoodies/forms "1.2.1"] nil,
[com.miglayout/miglayout "3.7.4"] nil,
[j18n "1.0.2"] nil,
[org.swinglabs.swingx/swingx-core "1.6.3"]
{[org.swinglabs.swingx/swingx-action "1.6.3"] nil,
[org.swinglabs.swingx/swingx-autocomplete "1.6.3"]
{[org.swinglabs.swingx/swingx-common "1.6.3"] nil},
[org.swinglabs.swingx/swingx-painters "1.6.3"] nil,
[org.swinglabs.swingx/swingx-plaf "1.6.3"] nil}},
[venantius/ultra "0.6.0"]
{[grimradical/clj-semver "0.3.0" :exclusions [[org.clojure/clojure]]]
nil,
[io.aviso/pretty "0.1.35"] nil,
[mvxcvi/puget "1.1.0"]
{[fipp "0.6.14"] {[org.clojure/core.rrb-vector "0.0.13"] nil},
[mvxcvi/arrangement "1.1.1"] nil},
[mvxcvi/whidbey "2.1.0"] {[org.clojure/data.codec "0.1.1"] nil},
[org.clojars.brenton/google-diff-match-patch "0.1"] nil,
[robert/hooke "1.3.0"] nil,
[venantius/glow "0.1.5" :exclusions [[hiccup] [garden]]]
{[clj-antlr "0.2.3"]
{[org.antlr/antlr4-runtime "4.5.3"] nil,
[org.antlr/antlr4 "4.5.3"] nil},
[instaparse "1.4.1"] nil}}}
|
|
1bb5f91fc828bab9755012a37c82ad7b1dd4e6c40bc84d848e635b591b1c4102 | boris-ci/boris | test.hs | # LANGUAGE NoImplicitPrelude #
import Control.Monad ((>>=), (>>), when, mapM)
import Prelude (($), (.), not, all, id)
import qualified System.Exit as Exit
import System.IO (IO)
import qualified System.IO as IO
import qualified Test.Boris.Core.Data.Build
import qualified Test.Boris.Core.Serial.Command
import qualified Test.Boris.Core.Serial.Ref
main :: IO ()
main =
IO.hSetBuffering IO.stdout IO.LineBuffering >> mapM id [
Test.Boris.Core.Data.Build.tests
, Test.Boris.Core.Serial.Ref.tests
, Test.Boris.Core.Serial.Command.tests
] >>= \rs -> when (not . all id $ rs) Exit.exitFailure
| null | https://raw.githubusercontent.com/boris-ci/boris/c321187490afc889bf281442ac4ef9398b77b200/boris-core/test/test.hs | haskell | # LANGUAGE NoImplicitPrelude #
import Control.Monad ((>>=), (>>), when, mapM)
import Prelude (($), (.), not, all, id)
import qualified System.Exit as Exit
import System.IO (IO)
import qualified System.IO as IO
import qualified Test.Boris.Core.Data.Build
import qualified Test.Boris.Core.Serial.Command
import qualified Test.Boris.Core.Serial.Ref
main :: IO ()
main =
IO.hSetBuffering IO.stdout IO.LineBuffering >> mapM id [
Test.Boris.Core.Data.Build.tests
, Test.Boris.Core.Serial.Ref.tests
, Test.Boris.Core.Serial.Command.tests
] >>= \rs -> when (not . all id $ rs) Exit.exitFailure
|
|
db61fe0731955a999c5385c88423611f8fea3788546f0d3bd13479f69b167dc0 | pirapira/coq2rust | reductionops.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Names
open Term
open Context
open Univ
open Evd
open Environ
(** Reduction Functions. *)
exception Elimconst
(** Machinery to customize the behavior of the reduction *)
module ReductionBehaviour : sig
type flag = [ `ReductionDontExposeCase | `ReductionNeverUnfold ]
* [ set is_local ref ( recargs , , flags ) ]
val set :
bool -> Globnames.global_reference -> (int list * int * flag list) -> unit
val get :
Globnames.global_reference -> (int list * int * flag list) option
val print : Globnames.global_reference -> Pp.std_ppcmds
end
* { 6 Machinery about a stack of unfolded constant }
cst applied to params must convertible to term of the state applied to args
cst applied to params must convertible to term of the state applied to args
*)
module Cst_stack : sig
type t
val empty : t
val add_param : constr -> t -> t
val add_args : constr array -> t -> t
val add_cst : constr -> t -> t
val best_cst : t -> (constr * constr list) option
val best_replace : constr -> t -> constr -> constr
val reference : t -> Constant.t option
val pr : t -> Pp.std_ppcmds
end
module Stack : sig
type 'a app_node
val pr_app_node : ('a -> Pp.std_ppcmds) -> 'a app_node -> Pp.std_ppcmds
type cst_member =
| Cst_const of pconstant
| Cst_proj of projection
type 'a member =
| App of 'a app_node
| Case of case_info * 'a * 'a array * Cst_stack.t
| Proj of int * int * projection * Cst_stack.t
| Fix of fixpoint * 'a t * Cst_stack.t
| Cst of cst_member * int (** current foccussed arg *) * int list (** remaining args *)
* 'a t * Cst_stack.t
| Shift of int
| Update of 'a
and 'a t = 'a member list
val pr : ('a -> Pp.std_ppcmds) -> 'a t -> Pp.std_ppcmds
val empty : 'a t
val is_empty : 'a t -> bool
val append_app : 'a array -> 'a t -> 'a t
val decomp : 'a t -> ('a * 'a t) option
val decomp_node_last : 'a app_node -> 'a t -> ('a * 'a t)
val compare_shape : 'a t -> 'a t -> bool
(** [fold2 f x sk1 sk2] folds [f] on any pair of term in [(sk1,sk2)].
@return the result and the lifts to apply on the terms *)
val fold2 : ('a -> Term.constr -> Term.constr -> 'a) -> 'a ->
Term.constr t -> Term.constr t -> 'a * int * int
val map : (Term.constr -> Term.constr) -> Term.constr t -> Term.constr t
val append_app_list : 'a list -> 'a t -> 'a t
(** if [strip_app s] = [(a,b)], then [s = a @ b] and [b] does not
start by App or Shift *)
val strip_app : 'a t -> 'a t * 'a t
* @return ( the nth first elements , the ( n+1)th element , the remaining stack )
val strip_n_app : int -> 'a t -> ('a t * 'a * 'a t) option
val not_purely_applicative : 'a t -> bool
val list_of_app_stack : constr t -> constr list option
val assign : 'a t -> int -> 'a -> 'a t
val args_size : 'a t -> int
val tail : int -> 'a t -> 'a t
val nth : 'a t -> int -> 'a
val best_state : constr * constr t -> Cst_stack.t -> constr * constr t
val zip : ?refold:bool -> constr * constr t -> constr
end
(************************************************************************)
type state = constr * constr Stack.t
type contextual_reduction_function = env -> evar_map -> constr -> constr
type reduction_function = contextual_reduction_function
type local_reduction_function = evar_map -> constr -> constr
type e_reduction_function = env -> evar_map -> constr -> evar_map * constr
type contextual_stack_reduction_function =
env -> evar_map -> constr -> constr * constr list
type stack_reduction_function = contextual_stack_reduction_function
type local_stack_reduction_function =
evar_map -> constr -> constr * constr list
type contextual_state_reduction_function =
env -> evar_map -> state -> state
type state_reduction_function = contextual_state_reduction_function
type local_state_reduction_function = evar_map -> state -> state
val pr_state : state -> Pp.std_ppcmds
* { 6 Reduction Function Operators }
val strong : reduction_function -> reduction_function
val local_strong : local_reduction_function -> local_reduction_function
val strong_prodspine : local_reduction_function -> local_reduction_function
(*i
val stack_reduction_of_reduction :
'a reduction_function -> 'a state_reduction_function
i*)
val stacklam : (state -> 'a) -> constr list -> constr -> constr Stack.t -> 'a
val whd_state_gen : ?csts:Cst_stack.t -> bool -> Closure.RedFlags.reds ->
Environ.env -> Evd.evar_map -> state -> state * Cst_stack.t
val iterate_whd_gen : bool -> Closure.RedFlags.reds ->
Environ.env -> Evd.evar_map -> Term.constr -> Term.constr
* { 6 Generic Optimized Reduction Function using Closures }
val clos_norm_flags : Closure.RedFlags.reds -> reduction_function
(** Same as [(strong whd_beta[delta][iota])], but much faster on big terms *)
val nf_beta : local_reduction_function
val nf_betaiota : local_reduction_function
val nf_betaiotazeta : local_reduction_function
val nf_betadeltaiota : reduction_function
val nf_evar : evar_map -> constr -> constr
(** Lazy strategy, weak head reduction *)
val whd_evar : evar_map -> constr -> constr
val whd_nored : local_reduction_function
val whd_beta : local_reduction_function
val whd_betaiota : local_reduction_function
val whd_betaiotazeta : local_reduction_function
val whd_betadeltaiota : contextual_reduction_function
val whd_betadeltaiota_nolet : contextual_reduction_function
val whd_betaetalet : local_reduction_function
val whd_betalet : local_reduction_function
(** Removes cast and put into applicative form *)
val whd_nored_stack : local_stack_reduction_function
val whd_beta_stack : local_stack_reduction_function
val whd_betaiota_stack : local_stack_reduction_function
val whd_betaiotazeta_stack : local_stack_reduction_function
val whd_betadeltaiota_stack : contextual_stack_reduction_function
val whd_betadeltaiota_nolet_stack : contextual_stack_reduction_function
val whd_betaetalet_stack : local_stack_reduction_function
val whd_betalet_stack : local_stack_reduction_function
val whd_nored_state : local_state_reduction_function
val whd_beta_state : local_state_reduction_function
val whd_betaiota_state : local_state_reduction_function
val whd_betaiotazeta_state : local_state_reduction_function
val whd_betadeltaiota_state : contextual_state_reduction_function
val whd_betadeltaiota_nolet_state : contextual_state_reduction_function
val whd_betaetalet_state : local_state_reduction_function
val whd_betalet_state : local_state_reduction_function
* { 6 Head normal forms }
val whd_delta_stack : stack_reduction_function
val whd_delta_state : state_reduction_function
val whd_delta : reduction_function
val whd_betadelta_stack : stack_reduction_function
val whd_betadelta_state : state_reduction_function
val whd_betadelta : reduction_function
val whd_betadeltaeta_stack : stack_reduction_function
val whd_betadeltaeta_state : state_reduction_function
val whd_betadeltaeta : reduction_function
val whd_betadeltaiotaeta_stack : stack_reduction_function
val whd_betadeltaiotaeta_state : state_reduction_function
val whd_betadeltaiotaeta : reduction_function
val whd_eta : constr -> constr
val whd_zeta : constr -> constr
(** Various reduction functions *)
val safe_evar_value : evar_map -> existential -> constr option
val beta_applist : constr * constr list -> constr
val hnf_prod_app : env -> evar_map -> constr -> constr -> constr
val hnf_prod_appvect : env -> evar_map -> constr -> constr array -> constr
val hnf_prod_applist : env -> evar_map -> constr -> constr list -> constr
val hnf_lam_app : env -> evar_map -> constr -> constr -> constr
val hnf_lam_appvect : env -> evar_map -> constr -> constr array -> constr
val hnf_lam_applist : env -> evar_map -> constr -> constr list -> constr
val splay_prod : env -> evar_map -> constr -> (Name.t * constr) list * constr
val splay_lam : env -> evar_map -> constr -> (Name.t * constr) list * constr
val splay_arity : env -> evar_map -> constr -> (Name.t * constr) list * sorts
val sort_of_arity : env -> evar_map -> constr -> sorts
val splay_prod_n : env -> evar_map -> int -> constr -> rel_context * constr
val splay_lam_n : env -> evar_map -> int -> constr -> rel_context * constr
val splay_prod_assum :
env -> evar_map -> constr -> rel_context * constr
val is_sort : env -> evar_map -> types -> bool
type 'a miota_args = {
mP : constr; (** the result type *)
mconstr : constr; (** the constructor *)
mci : case_info; (** special info to re-build pattern *)
mcargs : 'a list; (** the constructor's arguments *)
mlf : 'a array } (** the branch code vector *)
val reducible_mind_case : constr -> bool
val reduce_mind_case : constr miota_args -> constr
val find_conclusion : env -> evar_map -> constr -> (constr,constr) kind_of_term
val is_arity : env -> evar_map -> constr -> bool
val is_sort : env -> evar_map -> types -> bool
val contract_fix : ?env:Environ.env -> ?reference:Constant.t -> fixpoint -> constr
val fix_recarg : fixpoint -> constr Stack.t -> (int * constr) option
* { 6 Querying the kernel conversion oracle : opaque / transparent constants }
val is_transparent : Environ.env -> constant tableKey -> bool
(** {6 Conversion Functions (uses closures, lazy strategy) } *)
type conversion_test = constraints -> constraints
val pb_is_equal : conv_pb -> bool
val pb_equal : conv_pb -> conv_pb
val sort_cmp : env -> conv_pb -> sorts -> sorts -> universes -> unit
val is_conv : env -> evar_map -> constr -> constr -> bool
val is_conv_leq : env -> evar_map -> constr -> constr -> bool
val is_fconv : conv_pb -> env -> evar_map -> constr -> constr -> bool
val is_trans_conv : transparent_state -> env -> evar_map -> constr -> constr -> bool
val is_trans_conv_leq : transparent_state -> env -> evar_map -> constr -> constr -> bool
val is_trans_fconv : conv_pb -> transparent_state -> env -> evar_map -> constr -> constr -> bool
* [ check_conv ] Checks universe constraints only .
pb defaults to CUMUL and ts to a full transparent state .
pb defaults to CUMUL and ts to a full transparent state.
*)
val check_conv : ?pb:conv_pb -> ?ts:transparent_state -> env -> evar_map -> constr -> constr -> bool
* [ infer_fconv ] Adds necessary universe constraints to the evar map .
pb defaults to CUMUL and ts to a full transparent state .
pb defaults to CUMUL and ts to a full transparent state.
*)
val infer_conv : ?pb:conv_pb -> ?ts:transparent_state -> env -> evar_map -> constr -> constr ->
evar_map * bool
* { 6 Special - Purpose Reduction Functions }
val whd_meta : evar_map -> constr -> constr
val plain_instance : constr Metamap.t -> constr -> constr
val instance : evar_map -> constr Metamap.t -> constr -> constr
val head_unfold_under_prod : transparent_state -> reduction_function
* { 6 Heuristic for Conversion with Evar }
val whd_betaiota_deltazeta_for_iota_state :
transparent_state -> Environ.env -> Evd.evar_map -> Cst_stack.t -> state ->
state * Cst_stack.t
* { 6 Meta - related reduction functions }
val meta_instance : evar_map -> constr freelisted -> constr
val nf_meta : evar_map -> constr -> constr
val meta_reducible_instance : evar_map -> constr freelisted -> constr
| null | https://raw.githubusercontent.com/pirapira/coq2rust/22e8aaefc723bfb324ca2001b2b8e51fcc923543/pretyping/reductionops.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* Reduction Functions.
* Machinery to customize the behavior of the reduction
* current foccussed arg
* remaining args
* [fold2 f x sk1 sk2] folds [f] on any pair of term in [(sk1,sk2)].
@return the result and the lifts to apply on the terms
* if [strip_app s] = [(a,b)], then [s = a @ b] and [b] does not
start by App or Shift
**********************************************************************
i
val stack_reduction_of_reduction :
'a reduction_function -> 'a state_reduction_function
i
* Same as [(strong whd_beta[delta][iota])], but much faster on big terms
* Lazy strategy, weak head reduction
* Removes cast and put into applicative form
* Various reduction functions
* the result type
* the constructor
* special info to re-build pattern
* the constructor's arguments
* the branch code vector
* {6 Conversion Functions (uses closures, lazy strategy) } | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Names
open Term
open Context
open Univ
open Evd
open Environ
exception Elimconst
module ReductionBehaviour : sig
type flag = [ `ReductionDontExposeCase | `ReductionNeverUnfold ]
* [ set is_local ref ( recargs , , flags ) ]
val set :
bool -> Globnames.global_reference -> (int list * int * flag list) -> unit
val get :
Globnames.global_reference -> (int list * int * flag list) option
val print : Globnames.global_reference -> Pp.std_ppcmds
end
* { 6 Machinery about a stack of unfolded constant }
cst applied to params must convertible to term of the state applied to args
cst applied to params must convertible to term of the state applied to args
*)
module Cst_stack : sig
type t
val empty : t
val add_param : constr -> t -> t
val add_args : constr array -> t -> t
val add_cst : constr -> t -> t
val best_cst : t -> (constr * constr list) option
val best_replace : constr -> t -> constr -> constr
val reference : t -> Constant.t option
val pr : t -> Pp.std_ppcmds
end
module Stack : sig
type 'a app_node
val pr_app_node : ('a -> Pp.std_ppcmds) -> 'a app_node -> Pp.std_ppcmds
type cst_member =
| Cst_const of pconstant
| Cst_proj of projection
type 'a member =
| App of 'a app_node
| Case of case_info * 'a * 'a array * Cst_stack.t
| Proj of int * int * projection * Cst_stack.t
| Fix of fixpoint * 'a t * Cst_stack.t
* 'a t * Cst_stack.t
| Shift of int
| Update of 'a
and 'a t = 'a member list
val pr : ('a -> Pp.std_ppcmds) -> 'a t -> Pp.std_ppcmds
val empty : 'a t
val is_empty : 'a t -> bool
val append_app : 'a array -> 'a t -> 'a t
val decomp : 'a t -> ('a * 'a t) option
val decomp_node_last : 'a app_node -> 'a t -> ('a * 'a t)
val compare_shape : 'a t -> 'a t -> bool
val fold2 : ('a -> Term.constr -> Term.constr -> 'a) -> 'a ->
Term.constr t -> Term.constr t -> 'a * int * int
val map : (Term.constr -> Term.constr) -> Term.constr t -> Term.constr t
val append_app_list : 'a list -> 'a t -> 'a t
val strip_app : 'a t -> 'a t * 'a t
* @return ( the nth first elements , the ( n+1)th element , the remaining stack )
val strip_n_app : int -> 'a t -> ('a t * 'a * 'a t) option
val not_purely_applicative : 'a t -> bool
val list_of_app_stack : constr t -> constr list option
val assign : 'a t -> int -> 'a -> 'a t
val args_size : 'a t -> int
val tail : int -> 'a t -> 'a t
val nth : 'a t -> int -> 'a
val best_state : constr * constr t -> Cst_stack.t -> constr * constr t
val zip : ?refold:bool -> constr * constr t -> constr
end
type state = constr * constr Stack.t
type contextual_reduction_function = env -> evar_map -> constr -> constr
type reduction_function = contextual_reduction_function
type local_reduction_function = evar_map -> constr -> constr
type e_reduction_function = env -> evar_map -> constr -> evar_map * constr
type contextual_stack_reduction_function =
env -> evar_map -> constr -> constr * constr list
type stack_reduction_function = contextual_stack_reduction_function
type local_stack_reduction_function =
evar_map -> constr -> constr * constr list
type contextual_state_reduction_function =
env -> evar_map -> state -> state
type state_reduction_function = contextual_state_reduction_function
type local_state_reduction_function = evar_map -> state -> state
val pr_state : state -> Pp.std_ppcmds
* { 6 Reduction Function Operators }
val strong : reduction_function -> reduction_function
val local_strong : local_reduction_function -> local_reduction_function
val strong_prodspine : local_reduction_function -> local_reduction_function
val stacklam : (state -> 'a) -> constr list -> constr -> constr Stack.t -> 'a
val whd_state_gen : ?csts:Cst_stack.t -> bool -> Closure.RedFlags.reds ->
Environ.env -> Evd.evar_map -> state -> state * Cst_stack.t
val iterate_whd_gen : bool -> Closure.RedFlags.reds ->
Environ.env -> Evd.evar_map -> Term.constr -> Term.constr
* { 6 Generic Optimized Reduction Function using Closures }
val clos_norm_flags : Closure.RedFlags.reds -> reduction_function
val nf_beta : local_reduction_function
val nf_betaiota : local_reduction_function
val nf_betaiotazeta : local_reduction_function
val nf_betadeltaiota : reduction_function
val nf_evar : evar_map -> constr -> constr
val whd_evar : evar_map -> constr -> constr
val whd_nored : local_reduction_function
val whd_beta : local_reduction_function
val whd_betaiota : local_reduction_function
val whd_betaiotazeta : local_reduction_function
val whd_betadeltaiota : contextual_reduction_function
val whd_betadeltaiota_nolet : contextual_reduction_function
val whd_betaetalet : local_reduction_function
val whd_betalet : local_reduction_function
val whd_nored_stack : local_stack_reduction_function
val whd_beta_stack : local_stack_reduction_function
val whd_betaiota_stack : local_stack_reduction_function
val whd_betaiotazeta_stack : local_stack_reduction_function
val whd_betadeltaiota_stack : contextual_stack_reduction_function
val whd_betadeltaiota_nolet_stack : contextual_stack_reduction_function
val whd_betaetalet_stack : local_stack_reduction_function
val whd_betalet_stack : local_stack_reduction_function
val whd_nored_state : local_state_reduction_function
val whd_beta_state : local_state_reduction_function
val whd_betaiota_state : local_state_reduction_function
val whd_betaiotazeta_state : local_state_reduction_function
val whd_betadeltaiota_state : contextual_state_reduction_function
val whd_betadeltaiota_nolet_state : contextual_state_reduction_function
val whd_betaetalet_state : local_state_reduction_function
val whd_betalet_state : local_state_reduction_function
* { 6 Head normal forms }
val whd_delta_stack : stack_reduction_function
val whd_delta_state : state_reduction_function
val whd_delta : reduction_function
val whd_betadelta_stack : stack_reduction_function
val whd_betadelta_state : state_reduction_function
val whd_betadelta : reduction_function
val whd_betadeltaeta_stack : stack_reduction_function
val whd_betadeltaeta_state : state_reduction_function
val whd_betadeltaeta : reduction_function
val whd_betadeltaiotaeta_stack : stack_reduction_function
val whd_betadeltaiotaeta_state : state_reduction_function
val whd_betadeltaiotaeta : reduction_function
val whd_eta : constr -> constr
val whd_zeta : constr -> constr
val safe_evar_value : evar_map -> existential -> constr option
val beta_applist : constr * constr list -> constr
val hnf_prod_app : env -> evar_map -> constr -> constr -> constr
val hnf_prod_appvect : env -> evar_map -> constr -> constr array -> constr
val hnf_prod_applist : env -> evar_map -> constr -> constr list -> constr
val hnf_lam_app : env -> evar_map -> constr -> constr -> constr
val hnf_lam_appvect : env -> evar_map -> constr -> constr array -> constr
val hnf_lam_applist : env -> evar_map -> constr -> constr list -> constr
val splay_prod : env -> evar_map -> constr -> (Name.t * constr) list * constr
val splay_lam : env -> evar_map -> constr -> (Name.t * constr) list * constr
val splay_arity : env -> evar_map -> constr -> (Name.t * constr) list * sorts
val sort_of_arity : env -> evar_map -> constr -> sorts
val splay_prod_n : env -> evar_map -> int -> constr -> rel_context * constr
val splay_lam_n : env -> evar_map -> int -> constr -> rel_context * constr
val splay_prod_assum :
env -> evar_map -> constr -> rel_context * constr
val is_sort : env -> evar_map -> types -> bool
type 'a miota_args = {
val reducible_mind_case : constr -> bool
val reduce_mind_case : constr miota_args -> constr
val find_conclusion : env -> evar_map -> constr -> (constr,constr) kind_of_term
val is_arity : env -> evar_map -> constr -> bool
val is_sort : env -> evar_map -> types -> bool
val contract_fix : ?env:Environ.env -> ?reference:Constant.t -> fixpoint -> constr
val fix_recarg : fixpoint -> constr Stack.t -> (int * constr) option
* { 6 Querying the kernel conversion oracle : opaque / transparent constants }
val is_transparent : Environ.env -> constant tableKey -> bool
type conversion_test = constraints -> constraints
val pb_is_equal : conv_pb -> bool
val pb_equal : conv_pb -> conv_pb
val sort_cmp : env -> conv_pb -> sorts -> sorts -> universes -> unit
val is_conv : env -> evar_map -> constr -> constr -> bool
val is_conv_leq : env -> evar_map -> constr -> constr -> bool
val is_fconv : conv_pb -> env -> evar_map -> constr -> constr -> bool
val is_trans_conv : transparent_state -> env -> evar_map -> constr -> constr -> bool
val is_trans_conv_leq : transparent_state -> env -> evar_map -> constr -> constr -> bool
val is_trans_fconv : conv_pb -> transparent_state -> env -> evar_map -> constr -> constr -> bool
* [ check_conv ] Checks universe constraints only .
pb defaults to CUMUL and ts to a full transparent state .
pb defaults to CUMUL and ts to a full transparent state.
*)
val check_conv : ?pb:conv_pb -> ?ts:transparent_state -> env -> evar_map -> constr -> constr -> bool
* [ infer_fconv ] Adds necessary universe constraints to the evar map .
pb defaults to CUMUL and ts to a full transparent state .
pb defaults to CUMUL and ts to a full transparent state.
*)
val infer_conv : ?pb:conv_pb -> ?ts:transparent_state -> env -> evar_map -> constr -> constr ->
evar_map * bool
* { 6 Special - Purpose Reduction Functions }
val whd_meta : evar_map -> constr -> constr
val plain_instance : constr Metamap.t -> constr -> constr
val instance : evar_map -> constr Metamap.t -> constr -> constr
val head_unfold_under_prod : transparent_state -> reduction_function
* { 6 Heuristic for Conversion with Evar }
val whd_betaiota_deltazeta_for_iota_state :
transparent_state -> Environ.env -> Evd.evar_map -> Cst_stack.t -> state ->
state * Cst_stack.t
* { 6 Meta - related reduction functions }
val meta_instance : evar_map -> constr freelisted -> constr
val nf_meta : evar_map -> constr -> constr
val meta_reducible_instance : evar_map -> constr freelisted -> constr
|
c344e1d277ae82300b8460dd334f4a7d99dd7fe9f46f01d614ee7c5274fd690a | ailisp/Graphic-Forms | scrolling-helper.lisp | (in-package :graphic-forms.uitoolkit.widgets)
;;;
;;; helper functions
;;;
(defun clamp-scroll-pos (pos total-steps page-size)
(setf pos (min pos (- total-steps page-size)))
(max pos 0))
(defun update-scrollbar (scrollbar step-size detail)
(let ((page-size (page-increment scrollbar))
(limit (outer-limit scrollbar))
(curr-pos (thumb-position scrollbar)))
(let ((new-pos (case detail
(:start 0)
(:end limit)
(:step-back (- curr-pos step-size))
(:step-forward (+ curr-pos step-size))
(:page-back (- curr-pos page-size))
(:page-forward (+ curr-pos page-size))
(:thumb-position curr-pos)
(:thumb-track (thumb-track-position scrollbar))
(otherwise curr-pos))))
(setf new-pos (clamp-scroll-pos new-pos limit page-size))
(setf (thumb-position scrollbar) new-pos)
new-pos)))
(defun update-scrolling-state (window axis &optional detail)
(unless axis
(return-from update-scrolling-state nil))
(unless detail
(setf detail :thumb-position))
(let ((disp (dispatcher window))
(hscrollbar (obtain-horizontal-scrollbar window))
(vscrollbar (obtain-vertical-scrollbar window)))
(let ((child (obtain-top-child window))
(origin (slot-value disp 'viewport-origin))
(h-step (gfs:size-width (step-increments disp)))
(v-step (gfs:size-height (step-increments disp)))
(new-hpos 0)
(new-vpos 0))
(cond
((eql axis :horizontal)
(setf new-hpos (update-scrollbar hscrollbar h-step detail))
(setf new-vpos (thumb-position vscrollbar)))
((eql axis :vertical)
(setf new-hpos (thumb-position hscrollbar))
(setf new-vpos (update-scrollbar vscrollbar v-step detail)))
((eql axis :both)
(setf new-hpos (update-scrollbar hscrollbar h-step detail))
(setf new-vpos (update-scrollbar vscrollbar v-step detail))))
(let ((new-x (* (floor new-hpos h-step) h-step))
(new-y (* (floor new-vpos v-step) v-step)))
(scroll child (- (gfs:point-x origin) new-x) (- (gfs:point-y origin) new-y) nil 0)
(setf (gfs:point-x origin) new-x)
(setf (gfs:point-y origin) new-y))))
detail)
(defun validate-step-values (amounts)
(if (or (<= (gfs:size-width amounts) 0) (<= (gfs:size-height amounts) 0))
(error 'gfs:toolkit-error :detail "invalid step increment")))
(defun update-scrollbar-page-sizes (window)
(setf (page-increment (obtain-vertical-scrollbar window))
(gfs:size-height (client-size window)))
(setf (page-increment (obtain-horizontal-scrollbar window))
(gfs:size-width (client-size window)))) ; recalculate client size on purpose
(defun update-viewport-origin-for-resize (window)
(let* ((top (obtain-top-child window))
(viewport-size (client-size window))
(hscrollbar (obtain-horizontal-scrollbar window))
(vscrollbar (obtain-vertical-scrollbar window))
(origin (slot-value (dispatcher window) 'viewport-origin))
(saved-x (gfs:point-x origin))
(saved-y (gfs:point-y origin))
(delta-x (- (+ (gfs:size-width viewport-size) saved-x)
(outer-limit hscrollbar)))
(delta-y (- (+ (gfs:size-height viewport-size) saved-y)
(outer-limit vscrollbar))))
(if (and (> delta-x 0) (> saved-x 0))
(setf (gfs:point-x origin) (max 0 (- saved-x delta-x)))
(setf delta-x 0))
(if (and (> delta-y 0) (> saved-y 0))
(setf (gfs:point-y origin) (max 0 (- saved-y delta-y)))
(setf delta-y 0))
(if (or (and (zerop (gfs:point-x origin)) (/= saved-x 0))
(and (zerop (gfs:point-y origin)) (/= saved-y 0)))
(progn
(redraw top)
(update top))
(scroll top delta-x delta-y nil 0))
origin))
;;;
;;; methods
;;;
(defmethod event-pre-resize ((disp scrolling-helper) (window window) rect type)
(let ((h-step (gfs:size-width (step-increments disp)))
(v-step (gfs:size-height (step-increments disp)))
(outer-size (gfw:size window))
(client-size (gfw:client-size window))
(pnt (gfs:location rect))
(size (gfs:size rect)))
(when (/= h-step 1)
(let* ((width-diff (- (gfs:size-width outer-size) (gfs:size-width client-size)))
(amount (+ (* (floor (- (gfs:size-width size) width-diff) h-step) h-step)
width-diff)))
(if (find type '(:bottom-left :left :top-left))
(decf (gfs:point-x pnt) (- amount (gfs:size-width size))))
(setf (gfs:size-width size) amount)))
(when (/= v-step 1)
(let* ((height-diff (- (gfs:size-height outer-size) (gfs:size-height client-size)))
(amount (+ (* (floor (- (gfs:size-height size) height-diff) v-step) v-step)
height-diff)))
(if (find type '(:top-left :top :top-right))
(decf (gfs:point-y pnt) (- amount (gfs:size-height size))))
(setf (gfs:size-height size) amount)))
(setf (gfs:size rect) size)))
(defmethod event-resize ((disp scrolling-helper) (window window) size type)
(declare (ignore size type))
(call-next-method)
(when (typep (layout-of window) 'heap-layout)
(update-scrollbar-page-sizes window)
(update-viewport-origin-for-resize window)))
(defmethod event-scroll ((disp scrolling-helper) (window window) axis detail)
(declare (ignore disp))
(when (typep (layout-of window) 'heap-layout)
(update-scrolling-state window axis detail)))
(defmethod initialize-instance :after ((self scrolling-helper) &key)
(validate-step-values (step-increments self)))
(defmethod print-object ((self scrolling-helper) stream)
(print-unreadable-object (self stream :type t)
(format stream "horizontal policy: ~a " (horizontal-policy-of self))
(format stream "vertical policy: ~a " (vertical-policy-of self))
(format stream "step increments: ~a" (step-increments self))))
(defmethod (setf step-increment) :after (amounts (self scrolling-helper))
(validate-step-values amounts)
(setf (slot-value self 'step-increment) (gfs:copy-size amounts)))
| null | https://raw.githubusercontent.com/ailisp/Graphic-Forms/1e0723d07e1e4e02b8ae375db8f3d65d1b444f11/src/uitoolkit/widgets/scrolling-helper.lisp | lisp |
helper functions
recalculate client size on purpose
methods
| (in-package :graphic-forms.uitoolkit.widgets)
(defun clamp-scroll-pos (pos total-steps page-size)
(setf pos (min pos (- total-steps page-size)))
(max pos 0))
(defun update-scrollbar (scrollbar step-size detail)
(let ((page-size (page-increment scrollbar))
(limit (outer-limit scrollbar))
(curr-pos (thumb-position scrollbar)))
(let ((new-pos (case detail
(:start 0)
(:end limit)
(:step-back (- curr-pos step-size))
(:step-forward (+ curr-pos step-size))
(:page-back (- curr-pos page-size))
(:page-forward (+ curr-pos page-size))
(:thumb-position curr-pos)
(:thumb-track (thumb-track-position scrollbar))
(otherwise curr-pos))))
(setf new-pos (clamp-scroll-pos new-pos limit page-size))
(setf (thumb-position scrollbar) new-pos)
new-pos)))
(defun update-scrolling-state (window axis &optional detail)
(unless axis
(return-from update-scrolling-state nil))
(unless detail
(setf detail :thumb-position))
(let ((disp (dispatcher window))
(hscrollbar (obtain-horizontal-scrollbar window))
(vscrollbar (obtain-vertical-scrollbar window)))
(let ((child (obtain-top-child window))
(origin (slot-value disp 'viewport-origin))
(h-step (gfs:size-width (step-increments disp)))
(v-step (gfs:size-height (step-increments disp)))
(new-hpos 0)
(new-vpos 0))
(cond
((eql axis :horizontal)
(setf new-hpos (update-scrollbar hscrollbar h-step detail))
(setf new-vpos (thumb-position vscrollbar)))
((eql axis :vertical)
(setf new-hpos (thumb-position hscrollbar))
(setf new-vpos (update-scrollbar vscrollbar v-step detail)))
((eql axis :both)
(setf new-hpos (update-scrollbar hscrollbar h-step detail))
(setf new-vpos (update-scrollbar vscrollbar v-step detail))))
(let ((new-x (* (floor new-hpos h-step) h-step))
(new-y (* (floor new-vpos v-step) v-step)))
(scroll child (- (gfs:point-x origin) new-x) (- (gfs:point-y origin) new-y) nil 0)
(setf (gfs:point-x origin) new-x)
(setf (gfs:point-y origin) new-y))))
detail)
(defun validate-step-values (amounts)
(if (or (<= (gfs:size-width amounts) 0) (<= (gfs:size-height amounts) 0))
(error 'gfs:toolkit-error :detail "invalid step increment")))
(defun update-scrollbar-page-sizes (window)
(setf (page-increment (obtain-vertical-scrollbar window))
(gfs:size-height (client-size window)))
(setf (page-increment (obtain-horizontal-scrollbar window))
(defun update-viewport-origin-for-resize (window)
(let* ((top (obtain-top-child window))
(viewport-size (client-size window))
(hscrollbar (obtain-horizontal-scrollbar window))
(vscrollbar (obtain-vertical-scrollbar window))
(origin (slot-value (dispatcher window) 'viewport-origin))
(saved-x (gfs:point-x origin))
(saved-y (gfs:point-y origin))
(delta-x (- (+ (gfs:size-width viewport-size) saved-x)
(outer-limit hscrollbar)))
(delta-y (- (+ (gfs:size-height viewport-size) saved-y)
(outer-limit vscrollbar))))
(if (and (> delta-x 0) (> saved-x 0))
(setf (gfs:point-x origin) (max 0 (- saved-x delta-x)))
(setf delta-x 0))
(if (and (> delta-y 0) (> saved-y 0))
(setf (gfs:point-y origin) (max 0 (- saved-y delta-y)))
(setf delta-y 0))
(if (or (and (zerop (gfs:point-x origin)) (/= saved-x 0))
(and (zerop (gfs:point-y origin)) (/= saved-y 0)))
(progn
(redraw top)
(update top))
(scroll top delta-x delta-y nil 0))
origin))
(defmethod event-pre-resize ((disp scrolling-helper) (window window) rect type)
(let ((h-step (gfs:size-width (step-increments disp)))
(v-step (gfs:size-height (step-increments disp)))
(outer-size (gfw:size window))
(client-size (gfw:client-size window))
(pnt (gfs:location rect))
(size (gfs:size rect)))
(when (/= h-step 1)
(let* ((width-diff (- (gfs:size-width outer-size) (gfs:size-width client-size)))
(amount (+ (* (floor (- (gfs:size-width size) width-diff) h-step) h-step)
width-diff)))
(if (find type '(:bottom-left :left :top-left))
(decf (gfs:point-x pnt) (- amount (gfs:size-width size))))
(setf (gfs:size-width size) amount)))
(when (/= v-step 1)
(let* ((height-diff (- (gfs:size-height outer-size) (gfs:size-height client-size)))
(amount (+ (* (floor (- (gfs:size-height size) height-diff) v-step) v-step)
height-diff)))
(if (find type '(:top-left :top :top-right))
(decf (gfs:point-y pnt) (- amount (gfs:size-height size))))
(setf (gfs:size-height size) amount)))
(setf (gfs:size rect) size)))
(defmethod event-resize ((disp scrolling-helper) (window window) size type)
(declare (ignore size type))
(call-next-method)
(when (typep (layout-of window) 'heap-layout)
(update-scrollbar-page-sizes window)
(update-viewport-origin-for-resize window)))
(defmethod event-scroll ((disp scrolling-helper) (window window) axis detail)
(declare (ignore disp))
(when (typep (layout-of window) 'heap-layout)
(update-scrolling-state window axis detail)))
(defmethod initialize-instance :after ((self scrolling-helper) &key)
(validate-step-values (step-increments self)))
(defmethod print-object ((self scrolling-helper) stream)
(print-unreadable-object (self stream :type t)
(format stream "horizontal policy: ~a " (horizontal-policy-of self))
(format stream "vertical policy: ~a " (vertical-policy-of self))
(format stream "step increments: ~a" (step-increments self))))
(defmethod (setf step-increment) :after (amounts (self scrolling-helper))
(validate-step-values amounts)
(setf (slot-value self 'step-increment) (gfs:copy-size amounts)))
|
9c51b8dccf841b1410c8c817f43db34fe737bc0aa426e6cb0aafa2b55db7d2e5 | theodormoroianu/SecondYearCourses | LambdaChurch_20210415165424.hs | module LambdaChurch where
import Data.Char (isLetter)
import Data.List ( nub )
class ShowNice a where
showNice :: a -> String
class ReadNice a where
readNice :: String -> (a, String)
data Variable
= Variable
{ name :: String
, count :: Int
}
deriving (Show, Eq, Ord)
var :: String -> Variable
var x = Variable x 0
instance ShowNice Variable where
showNice (Variable x 0) = x
showNice (Variable x cnt) = x <> "_" <> show cnt
instance ReadNice Variable where
readNice s
| null x = error $ "expected variable but found " <> s
| otherwise = (var x, s')
where
(x, s') = span isLetter s
freshVariable :: Variable -> [Variable] -> Variable
freshVariable var vars = Variable x (cnt + 1)
where
x = name var
varsWithName = filter ((== x) . name) vars
Variable _ cnt = maximum (var : varsWithName)
data Term
= V Variable
| App Term Term
| Lam Variable Term
deriving (Show)
-- alpha-equivalence
aEq :: Term -> Term -> Bool
aEq (V x) (V x') = x == x'
aEq (App t1 t2) (App t1' t2') = aEq t1 t1' && aEq t2 t2'
aEq (Lam x t) (Lam x' t')
| x == x' = aEq t t'
| otherwise = aEq (subst (V y) x t) (subst (V y) x' t')
where
fvT = freeVars t
fvT' = freeVars t'
allFV = nub ([x, x'] ++ fvT ++ fvT')
y = freshVariable x allFV
aEq _ _ = False
v :: String -> Term
v x = V (var x)
lam :: String -> Term -> Term
lam x = Lam (var x)
lams :: [String] -> Term -> Term
lams xs t = foldr lam t xs
($$) :: Term -> Term -> Term
($$) = App
infixl 9 $$
instance ShowNice Term where
showNice (V var) = showNice var
showNice (App t1 t2) = "(" <> showNice t1 <> " " <> showNice t2 <> ")"
showNice (Lam var t) = "(" <> "\\" <> showNice var <> "." <> showNice t <> ")"
instance ReadNice Term where
readNice [] = error "Nothing to read"
readNice ('(' : '\\' : s) = (Lam var t, s'')
where
(var, '.' : s') = readNice s
(t, ')' : s'') = readNice s'
readNice ('(' : s) = (App t1 t2, s'')
where
(t1, ' ' : s') = readNice s
(t2, ')' : s'') = readNice s'
readNice s = (V var, s')
where
(var, s') = readNice s
freeVars :: Term -> [Variable]
freeVars (V var) = [var]
freeVars (App t1 t2) = nub $ freeVars t1 ++ freeVars t2
freeVars (Lam var t) = filter (/= var) (freeVars t)
-- subst u x t defines [u/x]t, i.e., substituting u for x in t
for example [ 3 / x](x + x ) = = 3 + 3
-- This substitution avoids variable captures so it is safe to be used when
-- reducing terms with free variables (e.g., if evaluating inside lambda abstractions)
subst
:: Term -- ^ substitution term
-> Variable -- ^ variable to be substitutes
-> Term -- ^ term in which the substitution occurs
-> Term
subst u x (V y)
| x == y = u
| otherwise = V y
subst u x (App t1 t2) = App (subst u x t1) (subst u x t2)
subst u x (Lam y t)
| x == y = Lam y t
| y `notElem` fvU = Lam y (subst u x t)
| x `notElem` fvT = Lam y t
| otherwise = Lam y' (subst u x (subst (V y') y t))
where
fvT = freeVars t
fvU = freeVars u
allFV = nub ([x] ++ fvU ++ fvT)
y' = freshVariable y allFV
-- Normal order reduction
-- - like call by name
-- - but also reduce under lambda abstractions if no application is possible
-- - guarantees reaching a normal form if it exists
normalReduceStep :: Term -> Maybe Term
normalReduceStep (App (Lam v t) t2) = Just $ subst t2 v t
normalReduceStep (App t1 t2)
| Just t1' <- normalReduceStep t1 = Just $ App t1' t2
| Just t2' <- normalReduceStep t2 = Just $ App t1 t2'
normalReduceStep (Lam x t)
| Just t' <- normalReduceStep t = Just $ Lam x t'
normalReduceStep _ = Nothing
normalReduce :: Term -> Term
normalReduce t
| Just t' <- normalReduceStep t = normalReduce t'
| otherwise = t
reduce :: Term -> Term
reduce = normalReduce
-- alpha-beta equivalence (for strongly normalizing terms) is obtained by
-- fully evaluating the terms using beta-reduction, then checking their
-- alpha-equivalence.
abEq :: Term -> Term -> Bool
abEq t1 t2 = aEq (reduce t1) (reduce t2)
evaluate :: String -> String
evaluate s = showNice (reduce t)
where
(t, "") = readNice s
-- Church Encodings in Lambda
------------
--BOOLEANS--
------------
A boolean is any way to choose between two alternatives ( t - > t - > t )
The boolean constant true always chooses the first alternative
cTrue :: Term
cTrue = lams ["t", "f"] (v "t")
The boolean constant false always chooses the second alternative
cFalse :: Term
cFalse = lams ["t", "f"] (v "f")
--If is not really needed because we can use the booleans themselves, but...
cIf :: Term
cIf = lams ["c", "then", "else"] (v "c" $$ v "then" $$ v "else")
--The boolean negation switches the alternatives
cNot :: Term
cNot = lam "b" (v "b" $$ cFalse $$ cTrue)
--The boolean conjunction can be built as a conditional
cAnd :: Term
cAnd = lams ["b1", "b2"] (v "b1" $$ v "b2" $$ cFalse)
--The boolean disjunction can be built as a conditional
cOr :: Term
cOr = lams ["b1", "b2"] (v "b1" $$ cTrue $$ v "b2")
----------
-- PAIRS--
----------
-- a pair with components of type a and b is a way to compute something based
-- on the values contained within the pair (a -> b -> c) -> c
builds a pair out of two values as an object which , when given
--a function to be applied on the values, it will apply it on them.
cPair :: Term
cPair = lams ["f", "s", "action"] (v "action" $$ v "f" $$ v "s")
first projection uses the function selecting first component on a pair
cFst :: Term
cFst = lam "pair" (v "pair" $$ cTrue)
cSnd :: Term
cSnd = lam "pair" (v "pair" $$ cFalse)
c0 :: Term
c0 = lams ["s", "z"] (v "z") -- note that it's the same as cFalse
c1 :: Term
c1 = lams ["s", "z"] (v "s" $$ v "z")
c2 :: Term
c2 = lams ["s", "z"] (v "s" $$ (v "s" $$ v "z"))
cS :: Term
cS = lams ["t","s","z"] (v "s" $$ (v "t" $$ v "s" $$ v "z"))
cNat :: Integer -> Term
cNat = undefined
cPlus :: Term
cPlus = lams ["n", "m", "s", "z"] (v "n" $$ v "s" $$ (v "m" $$ v "s" $$ v "z"))
cPlus' :: Term
cPlus' = lams ["n", "m"] (v "n" $$ cS $$ v "m")
cMul :: Term
cMul = lams ["n", "m", "s"] (v "n" $$ (v "m" $$ v "s"))
cMul' :: Term
cMul' = lams ["n", "m"] (v "n" $$ (cPlus' $$ v "m") $$ c0)
cPow :: Term
cPow = lams ["m", "n"] (v "n" $$ v "m")
cPow' :: Term
cPow' = lams ["m", "n"] (v "n" $$ (cMul' $$ v "m") $$ c1)
cIs0 :: Term
cIs0 = lam "n" (v "n" $$ (cAnd $$ cFalse) $$ cTrue)
cS' :: Term
cS' = lam "n" (v "n" $$ cS $$ c1)
cS'Rev0 :: Term
cS'Rev0 = lams ["s","z"] c0
cPred :: Term
cPred =
lam "n"
(cIf
$$ (cIs0 $$ v "n")
$$ c0
$$ (v "n" $$ cS' $$ cS'Rev0))
cSub :: Term
cSub = lams ["m", "n"] (v "n" $$ cPred $$ v "m")
cLte :: Term
cLte = lams ["m", "n"] (cIs0 $$ (cSub $$ v "m" $$ v "n"))
cGte :: Term
cGte = lams ["m", "n"] (cLte $$ v "n" $$ v "m")
cLt :: Term
cLt = lams ["m", "n"] (cNot $$ (cGte $$ v "m" $$ v "n"))
cGt :: Term
cGt = lams ["m", "n"] (cLt $$ v "n" $$ v "m")
cEq :: Term
cEq = lams ["m", "n"] (cAnd $$ (cLte $$ v "m" $$ v "n") $$ (cLte $$ v "n" $$ v "m"))
cPred' :: Term
cPred' = lam "n" (cFst $$
(v "n"
$$ lam "p" (lam "x" (cPair $$ v "x" $$ (cS $$ v "x"))
$$ (cSnd $$ v "p"))
$$ (cPair $$ c0 $$ c0)
))
cFactorial :: Term
cFactorial = lam "n" (cSnd $$
(v "n"
$$ lam "p"
(cPair
$$ (cS $$ (cFst $$ v "p"))
$$ (cMul $$ (cFst $$ v "p") $$ (cSnd $$ v "p"))
)
$$ (cPair $$ c1 $$ c1)
))
cFibonacci :: Term
cFibonacci = lam "n" (cFst $$
(v "n"
$$ lam "p"
(cPair
$$ (cSnd $$ v "p")
$$ (cPlus $$ (cFst $$ v "p") $$ (cSnd $$ v "p"))
)
$$ (cPair $$ c0 $$ c1)
))
cDivMod :: Term
cDivMod =
lams ["m", "n"]
(v "m"
$$ lam "pair"
(cIf
$$ (cLte $$ v "n" $$ (cSnd $$ v "pair"))
$$ (cPair
$$ (cS $$ (cFst $$ v "pair"))
$$ (cSub
$$ (cSnd $$ v "pair")
$$ v "n"
)
)
$$ v "pair"
)
$$ (cPair $$ c0 $$ v "m")
)
cNil :: Term
cNil = lams ["agg", "init"] (v "init")
cCons :: Term
cCons = lams ["x","l","agg", "init"]
(v "agg"
$$ v "x"
$$ (v "l" $$ v "agg" $$ v "init")
)
cList :: [Term] -> Term
cList = foldr (\x l -> cCons $$ x $$ l) cNil
cNatList :: [Integer] -> Term
cNatList = cList . map cNat
cSum :: Term
cSum = lam "l" (v "l" $$ cPlus $$ c0)
cIsNil :: Term
cIsNil = lam "l" (v "l" $$ lams ["x", "a"] cFalse $$ cTrue)
cHead :: Term
cHead = lams ["l", "default"] (v "l" $$ lams ["x", "a"] (v "x") $$ v "default")
cTail :: Term
cTail = lam "l" (cFst $$
(v "l"
$$ lams ["x","p"] (lam "t" (cPair $$ v "t" $$ (cCons $$ v "x" $$ v "t"))
$$ (cSnd $$ v "p"))
$$ (cPair $$ cNil $$ cNil)
))
fix :: Term
fix = lam "f" (lam "x" (v "f" $$ (v "x" $$ v "x")) $$ lam "x" (v "f" $$ (v "x" $$ v "x")))
cDivMod' :: Term
cDivMod' = lams ["m", "n"]
(cIs0 $$ v "n"
$$ (cPair $$ c0 $$ v "m")
$$ (fix
$$ lams ["f", "p"]
(lam "x"
(cIs0 $$ v "x"
$$ (cLte $$ v "n" $$ (cSnd $$ v "p")
$$ (cPair $$ (cS $$ (cFst $$ v "p")) $$ c0)
$$ v "p"
)
$$ (v "f" $$ (cPair $$ (cS $$ (cFst $$ v "p")) $$ v "x"))
)
$$ (cSub $$ (cSnd $$ v "p") $$ v "n")
)
$$ (cPair $$ c0 $$ v "m")
)
)
cSudan :: Term
cSudan = fix $$ lam "f" (lams ["n", "x", "y"]
(cIs0 $$ v "n"
$$ (cPlus $$ v "x" $$ v "y")
$$ (cIs0 $$ v "y"
$$ v "x"
$$ (lam "fnpy"
(v "f" $$ (cPred $$ v "n")
$$ v "fnpy"
$$ (cPlus $$ v "fnpy" $$ v "y")
)
$$ (v "f" $$ v "n" $$ v "x" $$ (cPred $$ v "y"))
)
)
))
cAckermann :: Term
cAckermann = fix $$ lam "A" (lams ["m", "n"]
(cIs0 $$ v "m"
$$ (cS $$ v "n")
$$ (cIs0 $$ v "n"
$$ (v "A" $$ (cPred $$ v "m") $$ c1)
$$ (v "A" $$ (cPred $$ v "m")
$$ (v "A" $$ v "m" $$ (cPred $$ v "n")))
)
))
| null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/5e359e6a7cf588a527d27209bf53b4ce6b8d5e83/FLP/Laboratoare/Lab%209/.history/LambdaChurch_20210415165424.hs | haskell | alpha-equivalence
subst u x t defines [u/x]t, i.e., substituting u for x in t
This substitution avoids variable captures so it is safe to be used when
reducing terms with free variables (e.g., if evaluating inside lambda abstractions)
^ substitution term
^ variable to be substitutes
^ term in which the substitution occurs
Normal order reduction
- like call by name
- but also reduce under lambda abstractions if no application is possible
- guarantees reaching a normal form if it exists
alpha-beta equivalence (for strongly normalizing terms) is obtained by
fully evaluating the terms using beta-reduction, then checking their
alpha-equivalence.
Church Encodings in Lambda
----------
BOOLEANS--
----------
If is not really needed because we can use the booleans themselves, but...
The boolean negation switches the alternatives
The boolean conjunction can be built as a conditional
The boolean disjunction can be built as a conditional
--------
PAIRS--
--------
a pair with components of type a and b is a way to compute something based
on the values contained within the pair (a -> b -> c) -> c
a function to be applied on the values, it will apply it on them.
note that it's the same as cFalse | module LambdaChurch where
import Data.Char (isLetter)
import Data.List ( nub )
class ShowNice a where
showNice :: a -> String
class ReadNice a where
readNice :: String -> (a, String)
data Variable
= Variable
{ name :: String
, count :: Int
}
deriving (Show, Eq, Ord)
var :: String -> Variable
var x = Variable x 0
instance ShowNice Variable where
showNice (Variable x 0) = x
showNice (Variable x cnt) = x <> "_" <> show cnt
instance ReadNice Variable where
readNice s
| null x = error $ "expected variable but found " <> s
| otherwise = (var x, s')
where
(x, s') = span isLetter s
freshVariable :: Variable -> [Variable] -> Variable
freshVariable var vars = Variable x (cnt + 1)
where
x = name var
varsWithName = filter ((== x) . name) vars
Variable _ cnt = maximum (var : varsWithName)
data Term
= V Variable
| App Term Term
| Lam Variable Term
deriving (Show)
aEq :: Term -> Term -> Bool
aEq (V x) (V x') = x == x'
aEq (App t1 t2) (App t1' t2') = aEq t1 t1' && aEq t2 t2'
aEq (Lam x t) (Lam x' t')
| x == x' = aEq t t'
| otherwise = aEq (subst (V y) x t) (subst (V y) x' t')
where
fvT = freeVars t
fvT' = freeVars t'
allFV = nub ([x, x'] ++ fvT ++ fvT')
y = freshVariable x allFV
aEq _ _ = False
v :: String -> Term
v x = V (var x)
lam :: String -> Term -> Term
lam x = Lam (var x)
lams :: [String] -> Term -> Term
lams xs t = foldr lam t xs
($$) :: Term -> Term -> Term
($$) = App
infixl 9 $$
instance ShowNice Term where
showNice (V var) = showNice var
showNice (App t1 t2) = "(" <> showNice t1 <> " " <> showNice t2 <> ")"
showNice (Lam var t) = "(" <> "\\" <> showNice var <> "." <> showNice t <> ")"
instance ReadNice Term where
readNice [] = error "Nothing to read"
readNice ('(' : '\\' : s) = (Lam var t, s'')
where
(var, '.' : s') = readNice s
(t, ')' : s'') = readNice s'
readNice ('(' : s) = (App t1 t2, s'')
where
(t1, ' ' : s') = readNice s
(t2, ')' : s'') = readNice s'
readNice s = (V var, s')
where
(var, s') = readNice s
freeVars :: Term -> [Variable]
freeVars (V var) = [var]
freeVars (App t1 t2) = nub $ freeVars t1 ++ freeVars t2
freeVars (Lam var t) = filter (/= var) (freeVars t)
for example [ 3 / x](x + x ) = = 3 + 3
subst
-> Term
subst u x (V y)
| x == y = u
| otherwise = V y
subst u x (App t1 t2) = App (subst u x t1) (subst u x t2)
subst u x (Lam y t)
| x == y = Lam y t
| y `notElem` fvU = Lam y (subst u x t)
| x `notElem` fvT = Lam y t
| otherwise = Lam y' (subst u x (subst (V y') y t))
where
fvT = freeVars t
fvU = freeVars u
allFV = nub ([x] ++ fvU ++ fvT)
y' = freshVariable y allFV
normalReduceStep :: Term -> Maybe Term
normalReduceStep (App (Lam v t) t2) = Just $ subst t2 v t
normalReduceStep (App t1 t2)
| Just t1' <- normalReduceStep t1 = Just $ App t1' t2
| Just t2' <- normalReduceStep t2 = Just $ App t1 t2'
normalReduceStep (Lam x t)
| Just t' <- normalReduceStep t = Just $ Lam x t'
normalReduceStep _ = Nothing
normalReduce :: Term -> Term
normalReduce t
| Just t' <- normalReduceStep t = normalReduce t'
| otherwise = t
reduce :: Term -> Term
reduce = normalReduce
abEq :: Term -> Term -> Bool
abEq t1 t2 = aEq (reduce t1) (reduce t2)
evaluate :: String -> String
evaluate s = showNice (reduce t)
where
(t, "") = readNice s
A boolean is any way to choose between two alternatives ( t - > t - > t )
The boolean constant true always chooses the first alternative
cTrue :: Term
cTrue = lams ["t", "f"] (v "t")
The boolean constant false always chooses the second alternative
cFalse :: Term
cFalse = lams ["t", "f"] (v "f")
cIf :: Term
cIf = lams ["c", "then", "else"] (v "c" $$ v "then" $$ v "else")
cNot :: Term
cNot = lam "b" (v "b" $$ cFalse $$ cTrue)
cAnd :: Term
cAnd = lams ["b1", "b2"] (v "b1" $$ v "b2" $$ cFalse)
cOr :: Term
cOr = lams ["b1", "b2"] (v "b1" $$ cTrue $$ v "b2")
builds a pair out of two values as an object which , when given
cPair :: Term
cPair = lams ["f", "s", "action"] (v "action" $$ v "f" $$ v "s")
first projection uses the function selecting first component on a pair
cFst :: Term
cFst = lam "pair" (v "pair" $$ cTrue)
cSnd :: Term
cSnd = lam "pair" (v "pair" $$ cFalse)
c0 :: Term
c1 :: Term
c1 = lams ["s", "z"] (v "s" $$ v "z")
c2 :: Term
c2 = lams ["s", "z"] (v "s" $$ (v "s" $$ v "z"))
cS :: Term
cS = lams ["t","s","z"] (v "s" $$ (v "t" $$ v "s" $$ v "z"))
cNat :: Integer -> Term
cNat = undefined
cPlus :: Term
cPlus = lams ["n", "m", "s", "z"] (v "n" $$ v "s" $$ (v "m" $$ v "s" $$ v "z"))
cPlus' :: Term
cPlus' = lams ["n", "m"] (v "n" $$ cS $$ v "m")
cMul :: Term
cMul = lams ["n", "m", "s"] (v "n" $$ (v "m" $$ v "s"))
cMul' :: Term
cMul' = lams ["n", "m"] (v "n" $$ (cPlus' $$ v "m") $$ c0)
cPow :: Term
cPow = lams ["m", "n"] (v "n" $$ v "m")
cPow' :: Term
cPow' = lams ["m", "n"] (v "n" $$ (cMul' $$ v "m") $$ c1)
cIs0 :: Term
cIs0 = lam "n" (v "n" $$ (cAnd $$ cFalse) $$ cTrue)
cS' :: Term
cS' = lam "n" (v "n" $$ cS $$ c1)
cS'Rev0 :: Term
cS'Rev0 = lams ["s","z"] c0
cPred :: Term
cPred =
lam "n"
(cIf
$$ (cIs0 $$ v "n")
$$ c0
$$ (v "n" $$ cS' $$ cS'Rev0))
cSub :: Term
cSub = lams ["m", "n"] (v "n" $$ cPred $$ v "m")
cLte :: Term
cLte = lams ["m", "n"] (cIs0 $$ (cSub $$ v "m" $$ v "n"))
cGte :: Term
cGte = lams ["m", "n"] (cLte $$ v "n" $$ v "m")
cLt :: Term
cLt = lams ["m", "n"] (cNot $$ (cGte $$ v "m" $$ v "n"))
cGt :: Term
cGt = lams ["m", "n"] (cLt $$ v "n" $$ v "m")
cEq :: Term
cEq = lams ["m", "n"] (cAnd $$ (cLte $$ v "m" $$ v "n") $$ (cLte $$ v "n" $$ v "m"))
cPred' :: Term
cPred' = lam "n" (cFst $$
(v "n"
$$ lam "p" (lam "x" (cPair $$ v "x" $$ (cS $$ v "x"))
$$ (cSnd $$ v "p"))
$$ (cPair $$ c0 $$ c0)
))
cFactorial :: Term
cFactorial = lam "n" (cSnd $$
(v "n"
$$ lam "p"
(cPair
$$ (cS $$ (cFst $$ v "p"))
$$ (cMul $$ (cFst $$ v "p") $$ (cSnd $$ v "p"))
)
$$ (cPair $$ c1 $$ c1)
))
cFibonacci :: Term
cFibonacci = lam "n" (cFst $$
(v "n"
$$ lam "p"
(cPair
$$ (cSnd $$ v "p")
$$ (cPlus $$ (cFst $$ v "p") $$ (cSnd $$ v "p"))
)
$$ (cPair $$ c0 $$ c1)
))
cDivMod :: Term
cDivMod =
lams ["m", "n"]
(v "m"
$$ lam "pair"
(cIf
$$ (cLte $$ v "n" $$ (cSnd $$ v "pair"))
$$ (cPair
$$ (cS $$ (cFst $$ v "pair"))
$$ (cSub
$$ (cSnd $$ v "pair")
$$ v "n"
)
)
$$ v "pair"
)
$$ (cPair $$ c0 $$ v "m")
)
cNil :: Term
cNil = lams ["agg", "init"] (v "init")
cCons :: Term
cCons = lams ["x","l","agg", "init"]
(v "agg"
$$ v "x"
$$ (v "l" $$ v "agg" $$ v "init")
)
cList :: [Term] -> Term
cList = foldr (\x l -> cCons $$ x $$ l) cNil
cNatList :: [Integer] -> Term
cNatList = cList . map cNat
cSum :: Term
cSum = lam "l" (v "l" $$ cPlus $$ c0)
cIsNil :: Term
cIsNil = lam "l" (v "l" $$ lams ["x", "a"] cFalse $$ cTrue)
cHead :: Term
cHead = lams ["l", "default"] (v "l" $$ lams ["x", "a"] (v "x") $$ v "default")
cTail :: Term
cTail = lam "l" (cFst $$
(v "l"
$$ lams ["x","p"] (lam "t" (cPair $$ v "t" $$ (cCons $$ v "x" $$ v "t"))
$$ (cSnd $$ v "p"))
$$ (cPair $$ cNil $$ cNil)
))
fix :: Term
fix = lam "f" (lam "x" (v "f" $$ (v "x" $$ v "x")) $$ lam "x" (v "f" $$ (v "x" $$ v "x")))
cDivMod' :: Term
cDivMod' = lams ["m", "n"]
(cIs0 $$ v "n"
$$ (cPair $$ c0 $$ v "m")
$$ (fix
$$ lams ["f", "p"]
(lam "x"
(cIs0 $$ v "x"
$$ (cLte $$ v "n" $$ (cSnd $$ v "p")
$$ (cPair $$ (cS $$ (cFst $$ v "p")) $$ c0)
$$ v "p"
)
$$ (v "f" $$ (cPair $$ (cS $$ (cFst $$ v "p")) $$ v "x"))
)
$$ (cSub $$ (cSnd $$ v "p") $$ v "n")
)
$$ (cPair $$ c0 $$ v "m")
)
)
cSudan :: Term
cSudan = fix $$ lam "f" (lams ["n", "x", "y"]
(cIs0 $$ v "n"
$$ (cPlus $$ v "x" $$ v "y")
$$ (cIs0 $$ v "y"
$$ v "x"
$$ (lam "fnpy"
(v "f" $$ (cPred $$ v "n")
$$ v "fnpy"
$$ (cPlus $$ v "fnpy" $$ v "y")
)
$$ (v "f" $$ v "n" $$ v "x" $$ (cPred $$ v "y"))
)
)
))
cAckermann :: Term
cAckermann = fix $$ lam "A" (lams ["m", "n"]
(cIs0 $$ v "m"
$$ (cS $$ v "n")
$$ (cIs0 $$ v "n"
$$ (v "A" $$ (cPred $$ v "m") $$ c1)
$$ (v "A" $$ (cPred $$ v "m")
$$ (v "A" $$ v "m" $$ (cPred $$ v "n")))
)
))
|
af69a2f8916ce24f4cbb2f6cb94491123cb7e09a8e1c4296368572c96b9f8f47 | opencog/opencog | rules.scm | Copyright ( C ) 2016 OpenCog Foundation
; --------------------------------------------------------------
; If you want to run this in guile without installing,
1 . run cmake in the build directory
2 . run ( add - to - load - path " /absolute / path / to / build / opencog / scm " )
;
; NOTE:
1 . The context of the rules are created so as to test possible generic
patterns during application development , thus the semantics might
; not make sense.
2 . The numbering of the demands , context , action , and goals are used for
; differentiating and are not necessarily related with the number of tests.
; --------------------------------------------------------------
(use-modules (opencog) (opencog openpsi))
; --------------------------------------------------------------
; Rule - 1
; --------------------------------------------------------------
(define context-1
(list
(ListLink
(VariableNode "x1")
(VariableNode "y1")
(ConceptNode "Required constant for DualLink-1")
(VariableNode "z1"))
(InheritanceLink
(VariableNode "x1")
(VariableNode "z1"))
(EqualLink
(VariableNode "x1")
(VariableNode "y1"))
))
(define (context-1-cpp) (List context-1))
(define action-1
(ExecutionOutput
(GroundedSchema "scm: act-1")
(ListLink (Variable "$abc"))))
(define goal-1 (Concept "goal-1"))
(define (act-1 groundings)
(ConceptNode "act-1")
)
(define (component-1) (psi-component "component-1"))
(define goal-1 (psi-goal "goal-1" .1))
(define (test-update-tv node strength)
(cog-set-tv! node
(stv (cog-number strength) (cog-confidence node)))
(stv 1 1)
)
(define (rule-1) (psi-rule context-1 action-1 goal-1 (stv 1 1) (component-1)))
(define (rule-1-cpp)
(ImplicationLink (stv 1 1)
(AndLink
(ListLink
(VariableNode "x1")
(VariableNode "y1")
(ConceptNode "Required constant for DualLink-1")
(VariableNode "z1")
)
(InheritanceLink
(VariableNode "x1")
(VariableNode "z1")
)
(EqualLink
(VariableNode "x1")
(VariableNode "y1")
)
(ExecutionOutputLink
(GroundedSchemaNode "scm: act-1")
(ListLink
(VariableNode "$abc")
)
)
)
(ConceptNode "goal-1")
)
)
(define (groundable-content-1)
(list
(ListLink
(NumberNode 1)
(NumberNode 1)
(ConceptNode "Required constant for DualLink-1")
(PredicateNode "z"))
(InheritanceLink
(NumberNode 1)
(PredicateNode "z")))
)
; --------------------------------------------------------------
; Rule - 2
; --------------------------------------------------------------
(define context-2
(list ; They are in a list so as to simplify removal.
(ListLink
(VariableNode "x2")
(ConceptNode "Required constant for DualLink-2")
(VariableNode "z2"))
(InheritanceLink
(VariableNode "x2")
(VariableNode "z2"))
(NotLink (EqualLink
(VariableNode "x2")
(VariableNode "z2")))
))
(define (context-2-cpp) (List context-2))
(define action-2
(ExecutionOutput
(GroundedSchema "scm: act-2")
(ListLink (Variable "$abc"))))
(define (act-2 groundings)
(ConceptNode "act-2")
)
(define (component-2) (psi-component "component-2"))
(define goal-2 (psi-goal "goal-2" .2 .5))
(define (rule-2) (psi-rule context-2 action-2 goal-2 (stv 1 1) (component-2)))
(define (rule-2-cpp)
(ImplicationLink (stv 1 1)
(AndLink
(ListLink
(VariableNode "x2")
(ConceptNode "Required constant for DualLink-2")
(VariableNode "z2")
)
(InheritanceLink
(VariableNode "x2")
(VariableNode "z2")
)
(NotLink
(EqualLink
(VariableNode "x2")
(VariableNode "z2")
)
)
(ExecutionOutputLink
(GroundedSchemaNode "scm: act-2")
(ListLink
(VariableNode "$abc")
)
)
)
(ConceptNode "goal-2")
)
)
(define (groundable-content-2)
(list ; They are in a list so as to simplify removal.
(ListLink
(NumberNode 1)
(ConceptNode "Required constant for DualLink-2")
(NumberNode 2))
(InheritanceLink
(NumberNode 1)
(NumberNode 2)))
)
; --------------------------------------------------------------
; Rule - 3
; --------------------------------------------------------------
(define context-3
(list ; They are in a list so as to simplify removal.
(List
(Concept "For PlusLink") ; Avoid error during pattern matching
(Variable "x")
(Variable "y"))
(Equal
(Plus (Variable "x") (Variable "y"))
(Number 5))
))
(define action-3
(ExecutionOutput
(GroundedSchema "scm: act-3")
(List (Variable "$abc"))))
(define (act-3 groundings)
(Concept "act-3")
)
(define (component-3) (psi-component "component-3"))
(define goal-3 (psi-goal "goal-3" .3))
(define (groundable-content-3)
(list ; They are in a list so as to simplify removal.
(List
(Concept "For PlusLink")
(Number 3)
(Number 2)))
)
(define (rule-3) (psi-rule context-3 action-3 goal-3 (stv 1 1) (component-3)))
; --------------------------------------------------------------
; Rule - 4
; --------------------------------------------------------------
(define context-4
(list ; They are in a list so as to simplify removal.
(List
(Concept "For PlusLink") ; Avoid error during pattern matching
(Variable "x")
(Variable "y"))
(Equal
(Plus (Variable "x") (Variable "y"))
(Number 7))
))
(define action-4
(ExecutionOutput
(GroundedSchema "scm: act-4")
(List (Variable "$abc"))))
(define (act-4 groundings)
(Concept "act-4")
)
(define (component-4) (psi-component "component-4"))
(define goal-4 (psi-goal "goal-4" .4))
(define (groundable-content-4)
(list ; They are in a list so as to simplify removal.
(List
(Concept "For PlusLink")
(Number 4)
(Number 3)))
)
(define (rule-4) (psi-rule context-4 action-4 goal-4 (stv 1 1) (component-4)))
; --------------------------------------------------------------
; Helper functions for `OpenPsiUTest::test_psi_related_goals`
(define (test_psi_related_goals_1)
(equal? goal-1 (car (psi-related-goals action-1)))
)
(define (test_psi_related_goals_2)
(if (and
(member goal-1 (psi-related-goals action-2))
(member goal-2 (psi-related-goals action-2)))
#t
#f
)
)
; --------------------------------------------------------------
; Helper functions for `OpenPsiUTest::test_psi_step_*
(define (act-3-present?) (cog-node? (cog-node 'ConceptNode "act-3")))
(define (act-4-present?) (cog-node? (cog-node 'ConceptNode "act-4")))
(define (demand-value demand-node)
"
Returns the strength of the demand-node to two decimal places.
"
(/ (round (* 100 (cog-mean demand-node)) ) 100)
)
(define (do_psi_step)
(psi-step (component-3))
(psi-step (component-4))
)
; --------------------------------------------------------------
(define (component-5) (psi-component "component-5"))
(define (component-6) (psi-component "component-6"))
(define (test-psi-run)
"
If the loop-count is increasing then it means the loop is running
"
(psi-run d1)
; The delay is b/c it is more likely that different components will
; not be started at the same time.
(sleep 1)
(psi-run d2)
(let ((l1 (psi-loop-count d1))
(l2 (psi-loop-count d2)))
; Wait for a while to be sure
(sleep 1)
(and
(< 50 (- (psi-loop-count d1) l1))
(< 50 (- (psi-loop-count d2) l2)))
)
)
(define (test-psi-halt)
"
If the loop-count is not changing then the loop has stopped.
"
(psi-halt d1)
; The delay is b/c it is more likely that different components will
; not be stopped at the same time.
(sleep 1)
(psi-halt d2)
(let ((l1 (psi-loop-count d1))
(l2 (psi-loop-count d2)))
; Wait for a while to be sure
(sleep 1)
(and
(equal? l1 (psi-loop-count d1))
(equal? l2 (psi-loop-count d2)))
)
)
; --------------------------------------------------------------
( define ( ) ( psi - component " component-5 " ) )
;(define (rule-5)
; (psi-rule
; (list context-1 (groundable-content-4))
action-1 goal-2 ( stv 1 1 ) ( ) )
;)
( define ( )
; (equal? (car (psi-get-dual-rules (car (groundable-content-4)))) (rule-4))
;)
;
;(define (test_psi_get_dual_rules_1_2)
( equal ? ( car ( psi - get - dual - rules ( cadr ( groundable - content-4 ) ) ) ) ( rule-4 ) )
;)
;
;(define (test_psi_get_dual_rules_2_1)
; (length (psi-get-dual-rules (car (groundable-content-1))))
;)
;
;(define (test_psi_get_dual_rules_2_2)
; (if (and
; (member (rule-1) (psi-get-dual-rules (car (groundable-content-1))))
( member ( rule-5 ) ( psi - get - dual - rules ( car ( groundable - content-1 ) ) ) ) )
; #t
; #f
; )
;)
; --------------------------------------------------------------
(define (test_psi_get_action_1)
(equal? action-1 (psi-get-action (rule-1)))
)
(define (test_psi_get_context_1)
(equal? (Set context-1) (Set (psi-get-context (rule-1))))
)
(define (test_psi_get_goal_1)
(equal? goal-1 (psi-get-goal (rule-1)))
)
; --------------------------------------------------------------
(define (test_psi_goal_functions_1)
"
Test psi-increase-urge function. The urge should increase in magnitude to
a maximum of 1.
Run before test_psi_goal_functions_2, so as to explore the range of values.
"
(let ((loop 8))
(while (not (equal? 0 loop))
(psi-increase-urge goal-1 0.2)
(set! loop (- loop 1)))
)
(= 1 (psi-urge goal-1))
)
(define (test_psi_goal_functions_2)
"
Test psi-decrease-urge function. The urge should decrease to a minimum of 0.
"
(let ((loop 8))
(while (not (equal? 0 loop))
(psi-decrease-urge goal-1 0.2)
(set! loop (- loop 1)))
)
(= 0.0 (psi-urge goal-1))
)
; --------------------------------------------------------------
| null | https://raw.githubusercontent.com/opencog/opencog/53f2c2c8e26160e3321b399250afb0e3dbc64d4c/tests/openpsi/rules.scm | scheme | --------------------------------------------------------------
If you want to run this in guile without installing,
NOTE:
not make sense.
differentiating and are not necessarily related with the number of tests.
--------------------------------------------------------------
--------------------------------------------------------------
Rule - 1
--------------------------------------------------------------
--------------------------------------------------------------
Rule - 2
--------------------------------------------------------------
They are in a list so as to simplify removal.
They are in a list so as to simplify removal.
--------------------------------------------------------------
Rule - 3
--------------------------------------------------------------
They are in a list so as to simplify removal.
Avoid error during pattern matching
They are in a list so as to simplify removal.
--------------------------------------------------------------
Rule - 4
--------------------------------------------------------------
They are in a list so as to simplify removal.
Avoid error during pattern matching
They are in a list so as to simplify removal.
--------------------------------------------------------------
Helper functions for `OpenPsiUTest::test_psi_related_goals`
--------------------------------------------------------------
Helper functions for `OpenPsiUTest::test_psi_step_*
--------------------------------------------------------------
The delay is b/c it is more likely that different components will
not be started at the same time.
Wait for a while to be sure
The delay is b/c it is more likely that different components will
not be stopped at the same time.
Wait for a while to be sure
--------------------------------------------------------------
(define (rule-5)
(psi-rule
(list context-1 (groundable-content-4))
)
(equal? (car (psi-get-dual-rules (car (groundable-content-4)))) (rule-4))
)
(define (test_psi_get_dual_rules_1_2)
)
(define (test_psi_get_dual_rules_2_1)
(length (psi-get-dual-rules (car (groundable-content-1))))
)
(define (test_psi_get_dual_rules_2_2)
(if (and
(member (rule-1) (psi-get-dual-rules (car (groundable-content-1))))
#t
#f
)
)
--------------------------------------------------------------
--------------------------------------------------------------
-------------------------------------------------------------- | Copyright ( C ) 2016 OpenCog Foundation
1 . run cmake in the build directory
2 . run ( add - to - load - path " /absolute / path / to / build / opencog / scm " )
1 . The context of the rules are created so as to test possible generic
patterns during application development , thus the semantics might
2 . The numbering of the demands , context , action , and goals are used for
(use-modules (opencog) (opencog openpsi))
(define context-1
(list
(ListLink
(VariableNode "x1")
(VariableNode "y1")
(ConceptNode "Required constant for DualLink-1")
(VariableNode "z1"))
(InheritanceLink
(VariableNode "x1")
(VariableNode "z1"))
(EqualLink
(VariableNode "x1")
(VariableNode "y1"))
))
(define (context-1-cpp) (List context-1))
(define action-1
(ExecutionOutput
(GroundedSchema "scm: act-1")
(ListLink (Variable "$abc"))))
(define goal-1 (Concept "goal-1"))
(define (act-1 groundings)
(ConceptNode "act-1")
)
(define (component-1) (psi-component "component-1"))
(define goal-1 (psi-goal "goal-1" .1))
(define (test-update-tv node strength)
(cog-set-tv! node
(stv (cog-number strength) (cog-confidence node)))
(stv 1 1)
)
(define (rule-1) (psi-rule context-1 action-1 goal-1 (stv 1 1) (component-1)))
(define (rule-1-cpp)
(ImplicationLink (stv 1 1)
(AndLink
(ListLink
(VariableNode "x1")
(VariableNode "y1")
(ConceptNode "Required constant for DualLink-1")
(VariableNode "z1")
)
(InheritanceLink
(VariableNode "x1")
(VariableNode "z1")
)
(EqualLink
(VariableNode "x1")
(VariableNode "y1")
)
(ExecutionOutputLink
(GroundedSchemaNode "scm: act-1")
(ListLink
(VariableNode "$abc")
)
)
)
(ConceptNode "goal-1")
)
)
(define (groundable-content-1)
(list
(ListLink
(NumberNode 1)
(NumberNode 1)
(ConceptNode "Required constant for DualLink-1")
(PredicateNode "z"))
(InheritanceLink
(NumberNode 1)
(PredicateNode "z")))
)
(define context-2
(ListLink
(VariableNode "x2")
(ConceptNode "Required constant for DualLink-2")
(VariableNode "z2"))
(InheritanceLink
(VariableNode "x2")
(VariableNode "z2"))
(NotLink (EqualLink
(VariableNode "x2")
(VariableNode "z2")))
))
(define (context-2-cpp) (List context-2))
(define action-2
(ExecutionOutput
(GroundedSchema "scm: act-2")
(ListLink (Variable "$abc"))))
(define (act-2 groundings)
(ConceptNode "act-2")
)
(define (component-2) (psi-component "component-2"))
(define goal-2 (psi-goal "goal-2" .2 .5))
(define (rule-2) (psi-rule context-2 action-2 goal-2 (stv 1 1) (component-2)))
(define (rule-2-cpp)
(ImplicationLink (stv 1 1)
(AndLink
(ListLink
(VariableNode "x2")
(ConceptNode "Required constant for DualLink-2")
(VariableNode "z2")
)
(InheritanceLink
(VariableNode "x2")
(VariableNode "z2")
)
(NotLink
(EqualLink
(VariableNode "x2")
(VariableNode "z2")
)
)
(ExecutionOutputLink
(GroundedSchemaNode "scm: act-2")
(ListLink
(VariableNode "$abc")
)
)
)
(ConceptNode "goal-2")
)
)
(define (groundable-content-2)
(ListLink
(NumberNode 1)
(ConceptNode "Required constant for DualLink-2")
(NumberNode 2))
(InheritanceLink
(NumberNode 1)
(NumberNode 2)))
)
(define context-3
(List
(Variable "x")
(Variable "y"))
(Equal
(Plus (Variable "x") (Variable "y"))
(Number 5))
))
(define action-3
(ExecutionOutput
(GroundedSchema "scm: act-3")
(List (Variable "$abc"))))
(define (act-3 groundings)
(Concept "act-3")
)
(define (component-3) (psi-component "component-3"))
(define goal-3 (psi-goal "goal-3" .3))
(define (groundable-content-3)
(List
(Concept "For PlusLink")
(Number 3)
(Number 2)))
)
(define (rule-3) (psi-rule context-3 action-3 goal-3 (stv 1 1) (component-3)))
(define context-4
(List
(Variable "x")
(Variable "y"))
(Equal
(Plus (Variable "x") (Variable "y"))
(Number 7))
))
(define action-4
(ExecutionOutput
(GroundedSchema "scm: act-4")
(List (Variable "$abc"))))
(define (act-4 groundings)
(Concept "act-4")
)
(define (component-4) (psi-component "component-4"))
(define goal-4 (psi-goal "goal-4" .4))
(define (groundable-content-4)
(List
(Concept "For PlusLink")
(Number 4)
(Number 3)))
)
(define (rule-4) (psi-rule context-4 action-4 goal-4 (stv 1 1) (component-4)))
(define (test_psi_related_goals_1)
(equal? goal-1 (car (psi-related-goals action-1)))
)
(define (test_psi_related_goals_2)
(if (and
(member goal-1 (psi-related-goals action-2))
(member goal-2 (psi-related-goals action-2)))
#t
#f
)
)
(define (act-3-present?) (cog-node? (cog-node 'ConceptNode "act-3")))
(define (act-4-present?) (cog-node? (cog-node 'ConceptNode "act-4")))
(define (demand-value demand-node)
"
Returns the strength of the demand-node to two decimal places.
"
(/ (round (* 100 (cog-mean demand-node)) ) 100)
)
(define (do_psi_step)
(psi-step (component-3))
(psi-step (component-4))
)
(define (component-5) (psi-component "component-5"))
(define (component-6) (psi-component "component-6"))
(define (test-psi-run)
"
If the loop-count is increasing then it means the loop is running
"
(psi-run d1)
(sleep 1)
(psi-run d2)
(let ((l1 (psi-loop-count d1))
(l2 (psi-loop-count d2)))
(sleep 1)
(and
(< 50 (- (psi-loop-count d1) l1))
(< 50 (- (psi-loop-count d2) l2)))
)
)
(define (test-psi-halt)
"
If the loop-count is not changing then the loop has stopped.
"
(psi-halt d1)
(sleep 1)
(psi-halt d2)
(let ((l1 (psi-loop-count d1))
(l2 (psi-loop-count d2)))
(sleep 1)
(and
(equal? l1 (psi-loop-count d1))
(equal? l2 (psi-loop-count d2)))
)
)
( define ( ) ( psi - component " component-5 " ) )
action-1 goal-2 ( stv 1 1 ) ( ) )
( define ( )
( equal ? ( car ( psi - get - dual - rules ( cadr ( groundable - content-4 ) ) ) ) ( rule-4 ) )
( member ( rule-5 ) ( psi - get - dual - rules ( car ( groundable - content-1 ) ) ) ) )
(define (test_psi_get_action_1)
(equal? action-1 (psi-get-action (rule-1)))
)
(define (test_psi_get_context_1)
(equal? (Set context-1) (Set (psi-get-context (rule-1))))
)
(define (test_psi_get_goal_1)
(equal? goal-1 (psi-get-goal (rule-1)))
)
(define (test_psi_goal_functions_1)
"
Test psi-increase-urge function. The urge should increase in magnitude to
a maximum of 1.
Run before test_psi_goal_functions_2, so as to explore the range of values.
"
(let ((loop 8))
(while (not (equal? 0 loop))
(psi-increase-urge goal-1 0.2)
(set! loop (- loop 1)))
)
(= 1 (psi-urge goal-1))
)
(define (test_psi_goal_functions_2)
"
Test psi-decrease-urge function. The urge should decrease to a minimum of 0.
"
(let ((loop 8))
(while (not (equal? 0 loop))
(psi-decrease-urge goal-1 0.2)
(set! loop (- loop 1)))
)
(= 0.0 (psi-urge goal-1))
)
|
61d1c509ec65b7303c5a9582b8064f65aa463a0b115f229d46cbc12f4637fd74 | OCamlPro/ocplib-endian | endianBigstring.cppo.mli | (************************************************************************)
(* ocplib-endian *)
(* *)
(* Copyright 2012 OCamlPro *)
(* *)
(* This file is distributed under the terms of the GNU Lesser General *)
Public License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version ,
(* with the OCaml static compilation exception. *)
(* *)
(* ocplib-endian is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
(************************************************************************)
open Bigarray
type bigstring = (char, int8_unsigned_elt, c_layout) Array1.t
module type EndianBigstringSig = sig
* Functions reading according to Big Endian byte order
val get_char : bigstring -> int -> char
* [ i ] reads 1 byte at offset i as a char
val get_uint8 : bigstring -> int -> int
* [ get_uint8 buff i ] reads 1 byte at offset i as an unsigned int of 8
bits . i.e. It returns a value between 0 and 2 ^ 8 - 1
bits. i.e. It returns a value between 0 and 2^8-1 *)
val get_int8 : bigstring -> int -> int
* [ get_int8 buff i ] reads 1 byte at offset i as a signed int of 8
bits . i.e. It returns a value between -2 ^ 7 and 2 ^ 7 - 1
bits. i.e. It returns a value between -2^7 and 2^7-1 *)
val get_uint16 : bigstring -> int -> int
* [ i ] reads 2 bytes at offset i as an unsigned int
of 16 bits . i.e. It returns a value between 0 and 2 ^ 16 - 1
of 16 bits. i.e. It returns a value between 0 and 2^16-1 *)
val get_int16 : bigstring -> int -> int
* [ get_int16 i ] reads 2 byte at offset i as a signed int of
16 bits . i.e. It returns a value between -2 ^ 15 and 2 ^ 15 - 1
16 bits. i.e. It returns a value between -2^15 and 2^15-1 *)
val get_int32 : bigstring -> int -> int32
* [ get_int32 buff i ] reads 4 bytes at offset i as an int32 .
val get_int64 : bigstring -> int -> int64
* [ get_int64 buff i ] reads 8 bytes at offset i as an int64 .
val get_float : bigstring -> int -> float
* [ i ] is equivalent to
[ Int32.float_of_bits ( get_int32 buff i ) ]
[Int32.float_of_bits (get_int32 buff i)] *)
val get_double : bigstring -> int -> float
* [ get_double buff i ] is equivalent to
[ Int64.float_of_bits ( get_int64 buff i ) ]
[Int64.float_of_bits (get_int64 buff i)] *)
val set_char : bigstring -> int -> char -> unit
* [ set_char buff i v ] writes [ v ] to [ buff ] at offset [ i ]
val set_int8 : bigstring -> int -> int -> unit
* [ set_int8 buff i v ] writes the least significant 8 bits of [ v ]
to [ buff ] at offset [ i ]
to [buff] at offset [i] *)
val set_int16 : bigstring -> int -> int -> unit
* [ i v ] writes the least significant 16 bits of [ v ]
to [ buff ] at offset [ i ]
to [buff] at offset [i] *)
val set_int32 : bigstring -> int -> int32 -> unit
* [ set_int32 buff i v ] writes [ v ] to [ buff ] at offset [ i ]
val set_int64 : bigstring -> int -> int64 -> unit
* [ i v ] writes [ v ] to [ buff ] at offset [ i ]
val set_float : bigstring -> int -> float -> unit
* [ i v ] is equivalent to
[ set_int32 buff i ( Int32.bits_of_float v ) ]
[set_int32 buff i (Int32.bits_of_float v)] *)
val set_double : bigstring -> int -> float -> unit
* [ set_double buff i v ] is equivalent to
[ i ( Int64.bits_of_float v ) ]
[set_int64 buff i (Int64.bits_of_float v)] *)
end
module BigEndian : sig
* Functions reading according to Big Endian byte order without
checking for overflow
checking for overflow *)
include EndianBigstringSig
end
module BigEndian_unsafe : sig
* Functions reading according to Big Endian byte order without
checking for overflow
checking for overflow *)
include EndianBigstringSig
end
module LittleEndian : sig
* Functions reading according to Little Endian byte order
include EndianBigstringSig
end
module LittleEndian_unsafe : sig
* Functions reading according to Big Endian byte order without
checking for overflow
checking for overflow *)
include EndianBigstringSig
end
module NativeEndian : sig
(** Functions reading according to machine endianness *)
include EndianBigstringSig
end
module NativeEndian_unsafe : sig
(** Functions reading according to machine endianness without
checking for overflow *)
include EndianBigstringSig
end
| null | https://raw.githubusercontent.com/OCamlPro/ocplib-endian/4f9fc814497b09da84f94ddf85cb20c9777887c2/src/endianBigstring.cppo.mli | ocaml | **********************************************************************
ocplib-endian
Copyright 2012 OCamlPro
This file is distributed under the terms of the GNU Lesser General
with the OCaml static compilation exception.
ocplib-endian is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
**********************************************************************
* Functions reading according to machine endianness
* Functions reading according to machine endianness without
checking for overflow | Public License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version ,
open Bigarray
type bigstring = (char, int8_unsigned_elt, c_layout) Array1.t
module type EndianBigstringSig = sig
* Functions reading according to Big Endian byte order
val get_char : bigstring -> int -> char
* [ i ] reads 1 byte at offset i as a char
val get_uint8 : bigstring -> int -> int
* [ get_uint8 buff i ] reads 1 byte at offset i as an unsigned int of 8
bits . i.e. It returns a value between 0 and 2 ^ 8 - 1
bits. i.e. It returns a value between 0 and 2^8-1 *)
val get_int8 : bigstring -> int -> int
* [ get_int8 buff i ] reads 1 byte at offset i as a signed int of 8
bits . i.e. It returns a value between -2 ^ 7 and 2 ^ 7 - 1
bits. i.e. It returns a value between -2^7 and 2^7-1 *)
val get_uint16 : bigstring -> int -> int
* [ i ] reads 2 bytes at offset i as an unsigned int
of 16 bits . i.e. It returns a value between 0 and 2 ^ 16 - 1
of 16 bits. i.e. It returns a value between 0 and 2^16-1 *)
val get_int16 : bigstring -> int -> int
* [ get_int16 i ] reads 2 byte at offset i as a signed int of
16 bits . i.e. It returns a value between -2 ^ 15 and 2 ^ 15 - 1
16 bits. i.e. It returns a value between -2^15 and 2^15-1 *)
val get_int32 : bigstring -> int -> int32
* [ get_int32 buff i ] reads 4 bytes at offset i as an int32 .
val get_int64 : bigstring -> int -> int64
* [ get_int64 buff i ] reads 8 bytes at offset i as an int64 .
val get_float : bigstring -> int -> float
* [ i ] is equivalent to
[ Int32.float_of_bits ( get_int32 buff i ) ]
[Int32.float_of_bits (get_int32 buff i)] *)
val get_double : bigstring -> int -> float
* [ get_double buff i ] is equivalent to
[ Int64.float_of_bits ( get_int64 buff i ) ]
[Int64.float_of_bits (get_int64 buff i)] *)
val set_char : bigstring -> int -> char -> unit
* [ set_char buff i v ] writes [ v ] to [ buff ] at offset [ i ]
val set_int8 : bigstring -> int -> int -> unit
* [ set_int8 buff i v ] writes the least significant 8 bits of [ v ]
to [ buff ] at offset [ i ]
to [buff] at offset [i] *)
val set_int16 : bigstring -> int -> int -> unit
* [ i v ] writes the least significant 16 bits of [ v ]
to [ buff ] at offset [ i ]
to [buff] at offset [i] *)
val set_int32 : bigstring -> int -> int32 -> unit
* [ set_int32 buff i v ] writes [ v ] to [ buff ] at offset [ i ]
val set_int64 : bigstring -> int -> int64 -> unit
* [ i v ] writes [ v ] to [ buff ] at offset [ i ]
val set_float : bigstring -> int -> float -> unit
* [ i v ] is equivalent to
[ set_int32 buff i ( Int32.bits_of_float v ) ]
[set_int32 buff i (Int32.bits_of_float v)] *)
val set_double : bigstring -> int -> float -> unit
* [ set_double buff i v ] is equivalent to
[ i ( Int64.bits_of_float v ) ]
[set_int64 buff i (Int64.bits_of_float v)] *)
end
module BigEndian : sig
* Functions reading according to Big Endian byte order without
checking for overflow
checking for overflow *)
include EndianBigstringSig
end
module BigEndian_unsafe : sig
* Functions reading according to Big Endian byte order without
checking for overflow
checking for overflow *)
include EndianBigstringSig
end
module LittleEndian : sig
* Functions reading according to Little Endian byte order
include EndianBigstringSig
end
module LittleEndian_unsafe : sig
* Functions reading according to Big Endian byte order without
checking for overflow
checking for overflow *)
include EndianBigstringSig
end
module NativeEndian : sig
include EndianBigstringSig
end
module NativeEndian_unsafe : sig
include EndianBigstringSig
end
|
9cd5c331a3f39db276f10dc033ea54b366df8212775430eb482355a2a18653c0 | afiniate/aws_async | sqs_deletemessage.ml | open Core.Std
open Async.Std
open Deferred.Result.Monad_infix
type result = {request_id:String.t} with sexp
let parse_result xml_string =
let open Result.Monad_infix in
Sqs_xml.expect_dtd @@ Xmlm.make_input (`String (0, xml_string))
>>= Sqs_xml.expect_tag ~name:"DeleteMessageResponse"
>>= Sqs_util.parse_response_metadata
>>= fun (channel, request_id) ->
Ok {request_id = request_id}
let exec sys
url
receipt_handle =
let rh = Sqs_receivemessage.string_of_receipt_handle receipt_handle in
let uri = url
|> Sqs_util.add_standard_param ~name:"Action"
~value:"DeleteMessage"
|> Sqs_util.add_standard_param ~name:"ReceiptHandle"
~value:rh in
Sqs_request.get sys.Sqs_system.auth "" uri
>>= fun (auth, body) ->
match parse_result body with
| Ok response ->
return @@ Ok ({sys with auth}, response)
| Error err ->
return @@ Result.fail err
let name_exec sys
queue_name
receipt_handle =
Sqs_getqueueurl.exec sys queue_name
>>= fun (sys', {Sqs_getqueueurl.queue_url = queue_url}) ->
exec sys' queue_url receipt_handle
| null | https://raw.githubusercontent.com/afiniate/aws_async/44c27bf9f18f76e9e6405c2252098c4aa3d9a8bc/lib/sqs/sqs_deletemessage.ml | ocaml | open Core.Std
open Async.Std
open Deferred.Result.Monad_infix
type result = {request_id:String.t} with sexp
let parse_result xml_string =
let open Result.Monad_infix in
Sqs_xml.expect_dtd @@ Xmlm.make_input (`String (0, xml_string))
>>= Sqs_xml.expect_tag ~name:"DeleteMessageResponse"
>>= Sqs_util.parse_response_metadata
>>= fun (channel, request_id) ->
Ok {request_id = request_id}
let exec sys
url
receipt_handle =
let rh = Sqs_receivemessage.string_of_receipt_handle receipt_handle in
let uri = url
|> Sqs_util.add_standard_param ~name:"Action"
~value:"DeleteMessage"
|> Sqs_util.add_standard_param ~name:"ReceiptHandle"
~value:rh in
Sqs_request.get sys.Sqs_system.auth "" uri
>>= fun (auth, body) ->
match parse_result body with
| Ok response ->
return @@ Ok ({sys with auth}, response)
| Error err ->
return @@ Result.fail err
let name_exec sys
queue_name
receipt_handle =
Sqs_getqueueurl.exec sys queue_name
>>= fun (sys', {Sqs_getqueueurl.queue_url = queue_url}) ->
exec sys' queue_url receipt_handle
|
|
635cda74e2089da9746ba5d95b98ef92dbf314151d206744b84c278368899a6f | haskell-compat/base-compat | Compat.hs | # LANGUAGE CPP , NoImplicitPrelude , PackageImports #
#if __GLASGOW_HASKELL__ >= 708
# LANGUAGE PatternSynonyms #
#endif
-- | This uses the @OneTuple@ compatibility library to backport 'Solo' to old
versions of GHC . Note that @OneTuple@ makes use of pattern synonyms , which
can not be defined on pre-7.8 versions of GHC . As such , it is not feasible
to backport the @Solo@ data constructor on pre-7.8 versions of GHC , as
-- @OneTuple@ defines this as a pattern synonym.
module Data.Tuple.Compat
(
#if MIN_VERSION_ghc_prim(0,10,0)
Solo(MkSolo, Solo)
#elif MIN_VERSION_ghc_prim(0,7,0)
Solo(Solo)
, pattern MkSolo
#elif __GLASGOW_HASKELL__ >= 800
Solo(MkSolo, Solo)
#elif __GLASGOW_HASKELL__ >= 708
Solo(MkSolo)
, pattern Solo
#else
Solo(MkSolo)
#endif
, fst
, snd
, curry
, uncurry
, swap
) where
#if MIN_VERSION_ghc_prim(0,10,0)
import "base-compat" Data.Tuple.Compat
#elif MIN_VERSION_ghc_prim(0,7,0)
import "base-compat" Data.Tuple.Compat
import "OneTuple" Data.Tuple.Solo (pattern MkSolo)
#else
import "base" Data.Tuple
import "OneTuple" Data.Tuple.Solo
#endif
| null | https://raw.githubusercontent.com/haskell-compat/base-compat/e18c4664d784542505966a7610bbac43652afda6/base-compat-batteries/src/Data/Tuple/Compat.hs | haskell | | This uses the @OneTuple@ compatibility library to backport 'Solo' to old
@OneTuple@ defines this as a pattern synonym. | # LANGUAGE CPP , NoImplicitPrelude , PackageImports #
#if __GLASGOW_HASKELL__ >= 708
# LANGUAGE PatternSynonyms #
#endif
versions of GHC . Note that @OneTuple@ makes use of pattern synonyms , which
can not be defined on pre-7.8 versions of GHC . As such , it is not feasible
to backport the @Solo@ data constructor on pre-7.8 versions of GHC , as
module Data.Tuple.Compat
(
#if MIN_VERSION_ghc_prim(0,10,0)
Solo(MkSolo, Solo)
#elif MIN_VERSION_ghc_prim(0,7,0)
Solo(Solo)
, pattern MkSolo
#elif __GLASGOW_HASKELL__ >= 800
Solo(MkSolo, Solo)
#elif __GLASGOW_HASKELL__ >= 708
Solo(MkSolo)
, pattern Solo
#else
Solo(MkSolo)
#endif
, fst
, snd
, curry
, uncurry
, swap
) where
#if MIN_VERSION_ghc_prim(0,10,0)
import "base-compat" Data.Tuple.Compat
#elif MIN_VERSION_ghc_prim(0,7,0)
import "base-compat" Data.Tuple.Compat
import "OneTuple" Data.Tuple.Solo (pattern MkSolo)
#else
import "base" Data.Tuple
import "OneTuple" Data.Tuple.Solo
#endif
|
6595a729956fdc140bff2bdf235f8c3a6e87cffdd10365dadd797ea2ae974183 | takikawa/racket-clojure | string.rkt | #lang racket/base
;; String functions
(module+ test (require rackunit))
(define upper-case string-upcase)
(define lower-case string-downcase)
;; String -> String
capitalize the first character and down the rest
(define (capitalize str)
(cond [(= (string-length str) 0)
str]
[(= (string-length str) 1)
(string-upcase str)]
[(> (string-length str) 1)
(string-append (string-upcase (substring str 0 1))
(string-downcase (substring str 1)))]))
(module+ test
(check-equal? (capitalize "") "")
(check-equal? (capitalize "a") "A")
(check-equal? (capitalize "MiXeD cAsE") "Mixed case")
(check-equal? (capitalize "mIxEd CaSe") "Mixed case"))
| null | https://raw.githubusercontent.com/takikawa/racket-clojure/6a65b4348770dee984bd6fe8d0e33445887fff17/clojure/string.rkt | racket | String functions
String -> String | #lang racket/base
(module+ test (require rackunit))
(define upper-case string-upcase)
(define lower-case string-downcase)
capitalize the first character and down the rest
(define (capitalize str)
(cond [(= (string-length str) 0)
str]
[(= (string-length str) 1)
(string-upcase str)]
[(> (string-length str) 1)
(string-append (string-upcase (substring str 0 1))
(string-downcase (substring str 1)))]))
(module+ test
(check-equal? (capitalize "") "")
(check-equal? (capitalize "a") "A")
(check-equal? (capitalize "MiXeD cAsE") "Mixed case")
(check-equal? (capitalize "mIxEd CaSe") "Mixed case"))
|
0978b6738d4e4f21482989e1eefdd3afde21ecf64271561b8f1ab583e22ffccc | mfelleisen/RacketSchool | 5-form.rkt | #lang racket
(require "gui.rkt"
"ops.rkt")
(provide form
(rename-out [boolean-widget boolean]
[money-widget money]
[-/coerce -]
[>/coerce >]
[</coerce <]
[=/coerce =]
[and/coerce and]
[or/coerce or])
when
#%app
#%datum
#%module-begin)
(define-syntax-rule (form name clause ...)
(begin
(define name (make-gui 'name)) ; create a container
(form-clause name #t clause) ...
(send name start))) ; show the container
(define-syntax form-clause
(syntax-rules (when)
[(_ form-name guard-expr [id question type])
(form-clause* form-name guard-expr [id question type #f])]
[(_ form-name guard-expr [id question type compute-expr])
(form-clause* form-name guard-expr [id question type (lambda () compute-expr)])]
[(_ form-name guard-expr (when expr clause ...))
(begin
(form-clause form-name (and guard-expr (?/coerce expr)) clause)
...)]))
(define-syntax-rule (form-clause* form-name guard-expr [id question type compute-expr])
(begin
(define id undefined)
(gui-add! form-name ; container
type ; widget
question ; label
(lambda () guard-expr) ;guard
(lambda (v) (set! id v)) ; set value
compute-expr)))
| null | https://raw.githubusercontent.com/mfelleisen/RacketSchool/ada599f31d548a538a37d998b32d80aa881d699a/Plan/ql/5-form.rkt | racket | create a container
show the container
container
widget
label
guard
set value | #lang racket
(require "gui.rkt"
"ops.rkt")
(provide form
(rename-out [boolean-widget boolean]
[money-widget money]
[-/coerce -]
[>/coerce >]
[</coerce <]
[=/coerce =]
[and/coerce and]
[or/coerce or])
when
#%app
#%datum
#%module-begin)
(define-syntax-rule (form name clause ...)
(begin
(form-clause name #t clause) ...
(define-syntax form-clause
(syntax-rules (when)
[(_ form-name guard-expr [id question type])
(form-clause* form-name guard-expr [id question type #f])]
[(_ form-name guard-expr [id question type compute-expr])
(form-clause* form-name guard-expr [id question type (lambda () compute-expr)])]
[(_ form-name guard-expr (when expr clause ...))
(begin
(form-clause form-name (and guard-expr (?/coerce expr)) clause)
...)]))
(define-syntax-rule (form-clause* form-name guard-expr [id question type compute-expr])
(begin
(define id undefined)
compute-expr)))
|
fcaa4aa9c490458017bbd050c923077bd0cf2d6c8d8b0cb73fa27f925fcbee2a | quchen/articles | HindleyMilner.hs | # LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE OverloadedLists #
{-# LANGUAGE OverloadedStrings #-}
-- | This module is an extensively documented walkthrough for typechecking a
basic functional language using the - Damas - Milner algorithm .
--
-- In the end, we'll be able to infer the type of expressions like
--
-- @
-- find (λx. (>) x 0)
-- :: [Integer] -> Either () Integer
-- @
--
-- It can be used in multiple different forms:
--
-- * The source is written in literate programming style, so you can almost
-- read it from top to bottom, minus some few references to later topics.
-- * /Loads/ of doctests (runnable and verified code examples) are included
-- * The code is runnable in GHCi, all definitions are exposed.
-- * A small main module that gives many examples of what you might try out in
-- GHCi is also included.
* The output yields a nice overview over the definitions given , with
a nice rendering of a truckload of comments .
module HindleyMilner where
import Control.Monad.Trans
import Control.Monad.Trans.Except
import Control.Monad.Trans.State
import Data.Map (Map)
import qualified Data.Map as M
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as S
import Data.String
import Data.Text (Text)
import qualified Data.Text as T
-- $setup
--
-- For running doctests:
--
-- >>> :set -XOverloadedStrings
-- >>> :set -XOverloadedLists
-- >>> :set -XLambdaCase
-- >>> import qualified Data.Text.IO as T
> > > let putPprLn = T.putStrLn . ppr
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- * Preliminaries
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- ** Prettyprinting
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- | A prettyprinter class. Similar to 'Show', but with a focus on having
human - readable output as opposed to being valid .
class Pretty a where
ppr :: a -> Text
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- ** Names
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- | A 'name' is an identifier in the language we're going to typecheck.
-- Variables on both the term and type level have 'Name's, for example.
newtype Name = Name Text
deriving (Eq, Ord, Show)
-- | >>> "lorem" :: Name
-- Name "lorem"
instance IsString Name where
fromString = Name . T.pack
-- | >>> putPprLn (Name "var")
-- var
instance Pretty Name where
ppr (Name n) = n
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
* *
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- | A monotype is an unquantified/unparametric type, in other words it contains
no @forall@s . are the inner building blocks of all types . Examples
-- of monotypes are @Int@, @a@, @a -> b@.
--
In formal notation , ' MType 's are often called τ ( tau ) types .
data MType = TVar Name -- ^ @a@
| TFun MType MType -- ^ @a -> b@
^ @Int@ , @()@ , …
-- Since we can't declare our own types in our simple type system
-- here, we'll hard-code certain basic ones so we can typecheck some
-- familiar functions that use them later.
| TList MType -- ^ @[a]@
^ a b@
| TTuple MType MType -- ^ @(a,b)@
deriving Show
| > > > putPprLn ( TFun ( ( TVar " a " ) ( TVar " b " ) ) ( TFun ( TVar " c " ) ( TVar " d " ) ) )
-- Either a b → c → d
--
Using the ' IsString ' instance :
--
> > > putPprLn ( TFun ( " a " " b " ) ( TFun " c " " d " ) )
-- Either a b → c → d
instance Pretty MType where
ppr = go False
where
go _ (TVar name) = ppr name
go _ (TList a) = "[" <> ppr a <> "]"
go _ (TEither l r) = "Either " <> ppr l <> " " <> ppr r
go _ (TTuple a b) = "(" <> ppr a <> ", " <> ppr b <> ")"
go _ (TConst name) = ppr name
go parenthesize (TFun a b)
| parenthesize = "(" <> lhs <> " → " <> rhs <> ")"
| otherwise = lhs <> " → " <> rhs
where lhs = go True a
rhs = go False b
| > > > " var " : : MType
TVar ( Name " var " )
instance IsString MType where
fromString = TVar . fromString
| The free variables of an ' MType ' . This is simply the collection of all the
-- individual type variables occurring inside of it.
--
_ _ Example : _ _ The free variables of @a - > b@ are @a@ and
freeMType :: MType -> Set Name
freeMType = \case
TVar a -> [a]
TFun a b -> freeMType a <> freeMType b
TList a -> freeMType a
TEither l r -> freeMType l <> freeMType r
TTuple a b -> freeMType a <> freeMType b
TConst _ -> []
-- | Substitute all the contained type variables mentioned in the substitution,
-- and leave everything else alone.
instance Substitutable MType where
applySubst s = \case
TVar a -> let Subst s' = s
in M.findWithDefault (TVar a) a s'
TFun f x -> TFun (applySubst s f) (applySubst s x)
TList a -> TList (applySubst s a)
TEither l r -> TEither (applySubst s l) (applySubst s r)
TTuple a b -> TTuple (applySubst s a) (applySubst s b)
c@TConst {} -> c
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- ** Polytypes
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
| A polytype is a monotype universally quantified over a number of type
variables . In Haskell , all definitions have polytypes , but since the @forall@
-- is implicit they look a bit like monotypes, maybe confusingly so. For
example , the type of @1 : : Int@ is actually @forall < nothing > . Int@ , and the
type of @id@ is @forall a. a - > a@ , although GHC displays it as @a - > a@.
--
A polytype claims to work " for all imaginable type parameters " , very similar
-- to how a lambda claims to work "for all imaginable value parameters". We can
-- insert a value into a lambda's parameter to evaluate it to a new value, and
similarly we 'll later insert types into a polytype 's quantified variables to
-- gain new types.
--
-- __Example:__ in a definition @id :: forall a. a -> a@, the @a@ after the
∀ ( " forall " ) is the collection of type variables , and @a - > a@ is the ' MType '
-- quantified over. When we have such an @id@, we also have its specialized
version @Int - > Int@ available . This process will be the topic of the type
-- inference/unification algorithms.
--
In formal notation , ' PType 's are often called σ ( sigma ) types .
--
-- The purpose of having monotypes and polytypes is that we'd like to only have
-- universal quantification at the top level, restricting our language to rank-1
polymorphism , where type inferece is total ( all types can be inferred ) and
-- simple (only a handful of typing rules). Weakening this constraint would be
-- easy: if we allowed universal quantification within function types we would
-- get rank-N polymorphism. Taking it even further to allow it anywhere,
effectively replacing all occurrences of ' MType ' with ' PType ' , yields
-- impredicative types. Both these extensions make the type system
-- *significantly* more complex though.
data PType = Forall (Set Name) MType -- ^ ∀{α}. τ
| > > > putPprLn ( Forall [ " a " ] ( TFun " a " " a " ) )
∀a . a → a
instance Pretty PType where
ppr (Forall qs mType) = "∀" <> pprUniversals <> ". " <> ppr mType
where
pprUniversals
| S.null qs = "∅"
| otherwise = (T.intercalate " " . map ppr . S.toList) qs
| The free variables of a ' PType ' are the free variables of the contained
' MType ' , except those universally quantified .
--
> > > let sigma = Forall [ " a " ] ( TFun " a " ( TFun ( TTuple " b " " a " ) " c " ) )
-- >>> putPprLn sigma
∀a . a → ( b , a ) → c
> > > let display = T.putStrLn . T.intercalate " , " . foldMap ( \x - > [ ppr x ] )
> > > display ( freePType sigma )
-- b, c
freePType :: PType -> Set Name
freePType (Forall qs mType) = freeMType mType `S.difference` qs
-- | Substitute all the free type variables.
instance Substitutable PType where
applySubst (Subst subst) (Forall qs mType) =
let qs' = M.fromSet (const ()) qs
subst' = Subst (subst `M.difference` qs')
in Forall qs (applySubst subst' mType)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- ** The environment
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- | The environment consists of all the values available in scope, and their
-- associated polytypes. Other common names for it include "(typing) context",
-- and because of the commonly used symbol for it sometimes directly
--
There are two kinds of membership in an environment ,
--
-- - @∈@: an environment @Γ@ can be viewed as a set of @(value, type)@ pairs,
-- and we can test whether something is /literally contained/ by it via
x : σ ∈ Γ
-- - @⊢@, pronounced /entails/, describes all the things that are well-typed,
given an environment @Γ@. @Γ : τ@ can thus be seen as a judgement that
-- @x:τ@ is /figuratively contained/ in @Γ@.
--
-- For example, the environment @{x:Int}@ literally contains @x@, but given
this , it also entails @λy . , , @let i d = λy . y in i d and so on .
--
In terms , the environment consists of all the things you currently
-- have available, or that can be built by combining them. If you import the
Prelude , your environment entails
--
-- @
-- id → ∀a. a→a
-- map → ∀a b. (a→b) → [a] → [b]
-- putStrLn → ∀∅. String → IO ()
-- …
-- id map → ∀a b. (a→b) → [a] → [b]
-- map putStrLn → ∀∅. [String] -> [IO ()]
-- …
-- @
newtype Env = Env (Map Name PType)
-- | >>> :{
-- putPprLn (Env
[ ( " i d " , Forall [ " a " ] ( TFun " a " " a " ) )
, ( " const " , Forall [ " a " , " b " ] ( TFun " a " ( TFun " b " " a " ) ) ) ] )
-- :}
-- Γ = { const : ∀a b. a → b → a
-- , id : ∀a. a → a }
instance Pretty Env where
ppr (Env env) = "Γ = { " <> T.intercalate "\n , " pprBindings <> " }"
where
bindings = M.assocs env
pprBinding (name, pType) = ppr name <> " : " <> ppr pType
pprBindings = map pprBinding bindings
-- | The free variables of an 'Env'ironment are all the free variables of the
' PType 's it contains .
freeEnv :: Env -> Set Name
freeEnv (Env env) = let allPTypes = M.elems env
in S.unions (map freePType allPTypes)
-- | Performing a 'Subst'itution in an 'Env'ironment means performing that
substituion on all the contained ' PType 's .
instance Substitutable Env where
applySubst s (Env env) = Env (M.map (applySubst s) env)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- ** Substitutions
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
| A substitution is a mapping from type variables to ' MType 's . Applying a
-- substitution means applying those replacements. For example, the substitution
@a - > Int@ applied to @a - > a@ yields the result @Int - > Int@.
--
A key concept behind Hindley - Milner is that once we dive deeper into an
-- expression, we learn more about our type variables. We might learn that @a@
has to be specialized to @b - > b@ , and then later on that is actually
-- @Int@. Substitutions are an organized way of carrying this information along.
newtype Subst = Subst (Map Name MType)
-- | We're going to apply substitutions to a variety of other values that
-- somehow contain type variables, so we overload this application operation in
-- a class here.
--
-- Laws:
--
-- @
' applySubst ' ' ' ≡ ' i d '
' applySubst ' ( s1 ' < > ' s2 ) ≡ ' applySubst ' s1 . ' applySubst ' s2
-- @
class Substitutable a where
applySubst :: Subst -> a -> a
instance (Substitutable a, Substitutable b) => Substitutable (a,b) where
applySubst s (x,y) = (applySubst s x, applySubst s y)
| @'applySubst ' s1 s2@ applies one substitution to another , replacing all the
bindings in the second argument @s2@ with their values mentioned in the first
-- one (@s1@).
instance Substitutable Subst where
applySubst s (Subst target) = Subst (fmap (applySubst s) target)
-- | >>> :{
putPprLn ( Subst
[ ( " a " , TFun " b " " b " )
, ( " b " , " c " " d " ) ] )
-- :}
-- { a ––> b → b
-- , b ––> Either c d }
instance Pretty Subst where
ppr (Subst s) = "{ " <> T.intercalate "\n, " [ ppr k <> " ––> " <> ppr v | (k,v) <- M.toList s ] <> " }"
| Combine two substitutions by applying all substitutions mentioned in the
first argument to the type variables contained in the second .
instance Monoid Subst where
-- Considering that all we can really do with a substitution is apply it, we
can use one of ' Substitutable 's laws to show that substitutions
-- combine associatively,
--
-- @
applySubst ( compose s1 ( compose s2 s3 ) )
= applySubst s1 . ( compose s2 s3 )
= applySubst s1 . applySubst s2 . applySubst s3
= applySubst ( compose s1 s2 ) . applySubst s3
= applySubst ( compose ( compose s1 s2 ) s3 )
-- @
mappend subst1 subst2 = Subst (s1 `M.union` s2)
where
Subst s1 = subst1
Subst s2 = applySubst subst1 subst2
mempty = Subst M.empty
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- * Typechecking
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
$ Typechecking does two things :
--
1 . If two types are not immediately identical , attempt to ' unify ' them
-- to get a type compatible with both of them
2 . ' infer ' the most general type of a value by comparing the values in its
-- definition with the 'Env'ironment
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- ** Inference context
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- | The inference type holds a supply of unique names, and can fail with a
-- descriptive error if something goes wrong.
--
-- /Invariant:/ the supply must be infinite, or we might run out of names to
-- give to things.
newtype Infer a = Infer (ExceptT InferError (State [Name]) a)
deriving (Functor, Applicative, Monad)
-- | Errors that can happen during the type inference process.
data InferError =
-- | Two types that don't match were attempted to be unified.
--
For example , @a - > a@ and @Int@ do not unify .
--
-- >>> putPprLn (CannotUnify (TFun "a" "a") (TConst "Int"))
-- Cannot unify a → a with Int
CannotUnify MType MType
| A ' TVar ' is bound to an ' MType ' that already contains it .
--
The canonical example of this is @λx . x , where the first @x@
in the body has to have type @a - > b@ , and the second one Since
-- they're both the same @x@, this requires unification of @a@ with
@a - > b@ , which only works if @a = a - > b = ( a - > b ) - > b = … @ , yielding
-- an infinite type.
--
> > > putPprLn ( OccursCheckFailed " a " ( TFun " a " " a " ) )
-- Occurs check failed: a already appears in a → a
| OccursCheckFailed Name MType
-- | The value of an unknown identifier was read.
--
> > > putPprLn ( UnknownIdentifier " a " )
-- Unknown identifier: a
| UnknownIdentifier Name
deriving Show
| > > > putPprLn ( CannotUnify ( " a " " b " ) ( TTuple " a " " b " ) )
-- Cannot unify Either a b with (a, b)
instance Pretty InferError where
ppr = \case
CannotUnify t1 t2 ->
"Cannot unify " <> ppr t1 <> " with " <> ppr t2
OccursCheckFailed name ty ->
"Occurs check failed: " <> ppr name <> " already appears in " <> ppr ty
UnknownIdentifier name ->
"Unknown identifier: " <> ppr name
-- | Evaluate a value in an 'Infer'ence context.
--
> > > let expr = EAbs " f " ( EAbs " g " ( EAbs " x " ( EApp ( EApp " f " " x " ) ( EApp " g " " x " ) ) ) )
-- >>> putPprLn expr
-- λf g x. f x (g x)
-- >>> let inferred = runInfer (infer (Env []) expr)
-- >>> let demonstrate = \case Right (_, ty) -> T.putStrLn (":: " <> ppr ty)
-- >>> demonstrate inferred
-- :: (c → e → f) → (c → e) → c → f
runInfer :: Infer a -- ^ Inference data
-> Either InferError a
runInfer (Infer inf) =
evalState (runExceptT inf) (map Name (infiniteSupply alphabet))
where
alphabet = map T.singleton ['a'..'z']
-- [a, b, c] ==> [a,b,c, a1,b1,c1, a2,b2,c2, …]
infiniteSupply supply = supply <> addSuffixes supply (1 :: Integer)
where
addSuffixes xs n = map (\x -> addSuffix x n) xs <> addSuffixes xs (n+1)
addSuffix x n = x <> T.pack (show n)
-- | Throw an 'InferError' in an 'Infer'ence context.
--
> > > case ( throw ( UnknownIdentifier " var " ) ) of Left err - > putPprLn err
-- Unknown identifier: var
throw :: InferError -> Infer a
throw = Infer . throwE
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- ** Unification
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
$ Unification describes the process of making two different types compatible
-- by specializing them where needed. A desirable property to have here is being
-- able to find the most general unifier. Luckily, we'll be able to do that in
-- our type system.
| The unification of two ' MType 's is the most general substitution that can be
-- applied to both of them in order to yield the same result.
--
> > > let m1 = TFun " a " " b "
-- >>> putPprLn m1
-- a → b
> > > let m2 = TFun " c " ( " d " " e " )
> > >
-- c → Either d e
-- >>> let inferSubst = unify (m1, m2)
> > > case inferSubst of Right subst - > putPprLn subst
-- { a ––> c
-- , b ––> Either d e }
unify :: (MType, MType) -> Infer Subst
unify = \case
(TFun a b, TFun x y) -> unifyBinary (a,b) (x,y)
(TVar v, x) -> v `bindVariableTo` x
(x, TVar v) -> v `bindVariableTo` x
(TConst a, TConst b) | a == b -> pure mempty
(TList a, TList b) -> unify (a,b)
(TEither a b, TEither x y) -> unifyBinary (a,b) (x,y)
(TTuple a b, TTuple x y) -> unifyBinary (a,b) (x,y)
(a, b) -> throw (CannotUnify a b)
where
-- Unification of binary type constructors, such as functions and Either.
Unification is first done for the first operand , and assuming the
required substitution , for the second one .
unifyBinary :: (MType, MType) -> (MType, MType) -> Infer Subst
unifyBinary (a,b) (x,y) = do
s1 <- unify (a, x)
s2 <- unify (applySubst s1 (b, y))
pure (s1 <> s2)
| Build a ' Subst'itution that binds a ' Name ' of a ' TVar ' to an ' MType ' . The
-- resulting substitution should be idempotent, i.e. applying it more than once
-- to something should not be any different from applying it only once.
--
-- - In the simplest case, this just means building a substitution that just
-- does that.
- Substituting a ' Name ' with a ' TVar ' with the same name unifies a type
-- variable with itself, and the resulting substitution does nothing new.
- If the ' Name ' we 're trying to bind to an ' MType ' already occurs in that
' MType ' , the resulting substitution would not be idempotent : the ' MType '
-- would be replaced again, yielding a different result. This is known as the
-- Occurs Check.
bindVariableTo :: Name -> MType -> Infer Subst
bindVariableTo name (TVar v) | boundToSelf = pure mempty
where
boundToSelf = name == v
bindVariableTo name mType | name `occursIn` mType = throw (OccursCheckFailed name mType)
where
n `occursIn` ty = n `S.member` freeMType ty
bindVariableTo name mType = pure (Subst (M.singleton name mType))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- ** Type inference
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-- $ Type inference is the act of finding out a value's type by looking at the
-- environment it is in, in order to make it compatible with it.
--
In literature , the - Damas - Milner inference algorithm ( " Algorithm W " )
-- is often presented in the style of logical formulas, and below you'll find
-- that version along with code that actually does what they say.
--
-- These formulas look a bit like fractions, where the "numerator" is a
-- collection of premises, and the denominator is the consequence if all of them
-- hold.
--
-- __Example:__
--
-- @
Γ ⊢ even : Int → Bool Γ ⊢ 1 : Int
-- –––––––––––––––––––––––––––––––––––
Γ ⊢ even 1 :
-- @
--
-- means that if we have a value of type @Int -> Bool@ called "even" and a value
of type @Int@ called @1@ , then we also have a value of type @Bool@ via
-- @even 1@ available to us.
--
-- The actual inference rules are polymorphic versions of this example, and
-- the code comments will explain each step in detail.
-- -----------------------------------------------------------------------------
-- *** The language: typed lambda calculus
-- -----------------------------------------------------------------------------
-- | The syntax tree of the language we'd like to typecheck. You can view it as
-- a close relative to simply typed lambda calculus, having only the most
-- necessary syntax elements.
--
Since ' ELet ' is non - recursive , the usual fixed - point function
-- @fix : (a → a) → a@ can be introduced to allow recursive definitions.
^ True , 1
| EVar Name -- ^ @x@
| EApp Exp Exp -- ^ @f x@
| EAbs Name Exp -- ^ @λx. e@
^ @let x = e in e'@ ( non - recursive )
deriving Show
-- | Literals we'd like to support. Since we can't define new data types in our
-- simple type system, we'll have to hard-code the possible ones here.
data Lit = LBool Bool
| LInteger Integer
deriving Show
| > > > putPprLn ( EAbs " f " ( EAbs " g " ( EAbs " x " ( EApp ( EApp " f " " x " ) ( EApp " g " " x " ) ) ) ) )
-- λf g x. f x (g x)
instance Pretty Exp where
ppr (ELit lit) = ppr lit
ppr (EVar name) = ppr name
ppr (EApp f x) = pprApp1 f <> " " <> pprApp2 x
where
pprApp1 = \case
eLet@ELet{} -> "(" <> ppr eLet <> ")"
eLet@EAbs{} -> "(" <> ppr eLet <> ")"
e -> ppr e
pprApp2 = \case
eApp@EApp{} -> "(" <> ppr eApp <> ")"
e -> pprApp1 e
ppr x@EAbs{} = pprAbs True x
where
pprAbs True (EAbs name expr) = "λ" <> ppr name <> pprAbs False expr
pprAbs False (EAbs name expr) = " " <> ppr name <> pprAbs False expr
pprAbs _ expr = ". " <> ppr expr
ppr (ELet name value body) =
"let " <> ppr name <> " = " <> ppr value <> " in " <> ppr body
-- | >>> putPprLn (LBool True)
-- True
--
> > > putPprLn ( LInteger 127 )
127
instance Pretty Lit where
ppr = \case
LBool b -> showT b
LInteger i -> showT i
where
showT :: Show a => a -> Text
showT = T.pack . show
-- | >>> "var" :: Exp
EVar ( Name " var " )
instance IsString Exp where
fromString = EVar . fromString
-- -----------------------------------------------------------------------------
-- *** Some useful definitions
-- -----------------------------------------------------------------------------
-- | Generate a fresh 'Name' in a type 'Infer'ence context. An example use case
of this is η expansion , which transforms @f@ into @λx . f , where " x " is a
-- new name, i.e. unbound in the current context.
fresh :: Infer MType
fresh = drawFromSupply >>= \case
Right name -> pure (TVar name)
Left err -> throw err
where
drawFromSupply :: Infer (Either InferError Name)
drawFromSupply = Infer (do
s:upply <- lift get
lift (put upply)
pure (Right s) )
-- | Add a new binding to the environment.
--
The equivalent would be defining a new value , for example in module
scope or in a block . This corresponds to the " comma " operation used in
-- formal notation,
--
-- @
Γ , x : σ ≡ extendEnv Γ ( x , σ )
-- @
extendEnv :: Env -> (Name, PType) -> Env
extendEnv (Env env) (name, pType) = Env (M.insert name pType env)
-- -----------------------------------------------------------------------------
-- *** Inferring the types of all language constructs
-- -----------------------------------------------------------------------------
-- | Infer the type of an 'Exp'ression in an 'Env'ironment, resulting in the
' Exp 's ' MType ' along with a substitution that has to be done in order to reach
-- this goal.
--
-- This is widely known as /Algorithm W/.
infer :: Env -> Exp -> Infer (Subst, MType)
infer env = \case
ELit lit -> inferLit lit
EVar name -> inferVar env name
EApp f x -> inferApp env f x
EAbs x e -> inferAbs env x e
ELet x e e' -> inferLet env x e e'
-- | Literals such as 'True' and '1' have their types hard-coded.
inferLit :: Lit -> Infer (Subst, MType)
inferLit lit = pure (mempty, TConst litTy)
where
litTy = case lit of
LBool {} -> "Bool"
LInteger {} -> "Integer"
-- | Inferring the type of a variable is done via
--
-- @
x : σ ∈ Γ τ = instantiate(σ )
-- –––––––––––––––––––––––––––– [Var]
-- Γ ⊢ x:τ
-- @
--
This means that if @Γ@ /literally contains/ ( @∈@ ) a value , then it also
/entails it/ ( @⊢@ ) in all its instantiations .
inferVar :: Env -> Name -> Infer (Subst, MType)
inferVar env name = do
x : σ ∈ Γ
tau <- instantiate sigma -- τ = instantiate(σ)
-- ------------------
Γ ⊢ x : τ
| Look up the ' PType ' of a ' Name ' in the ' Env'ironment .
--
This checks whether @x : σ@ is /literally contained/ in @Γ@. For more details
-- about this, see the documentation of 'Env'.
--
To give a Haskell analogon , looking up @id@ when @Prelude@ is loaded , the
resulting ' PType ' would be @id@ 's type , namely @forall a. a - > a@.
lookupEnv :: Env -> Name -> Infer PType
lookupEnv (Env env) name = case M.lookup name env of
Just x -> pure x
Nothing -> throw (UnknownIdentifier name)
| Bind all quantified variables of a ' PType ' to ' fresh ' type variables .
--
_ _ Example : _ _ instantiating @forall a. a - > b - > a@ results in the ' MType '
@c - > b - > c@ , where @c@ is a fresh name ( to avoid shadowing issues ) .
--
You can picture the ' PType ' to be the prototype converted to an instantiated
' MType ' , which can now be used in the unification process .
--
-- Another way of looking at it is by simply forgetting which variables were
-- quantified, carefully avoiding name clashes when doing so.
--
-- 'instantiate' can also be seen as the opposite of 'generalize', which we'll
need later to convert an ' MType ' to a ' PType ' .
instantiate :: PType -> Infer MType
instantiate (Forall qs t) = do
subst <- substituteAllWithFresh qs
pure (applySubst subst t)
where
-- For each given name, add a substitution from that name to a fresh type
-- variable to the result.
substituteAllWithFresh :: Set Name -> Infer Subst
substituteAllWithFresh xs = do
let freshSubstActions = M.fromSet (const fresh) xs
freshSubsts <- sequenceA freshSubstActions
pure (Subst freshSubsts)
-- | Function application captures the fact that if we have a function and an
-- argument we can give to that function, we also have the result value of the
-- result type available to us.
--
-- @
Γ ⊢ f : fτ Γ ⊢ x : xτ fxτ = fresh unify(fτ , xτ )
-- ––––––––––––––––––––––––––––––––––––––––––––––––––––––––––– [App]
Γ ⊢ f x : fxτ
-- @
--
-- This rule says that given a function and a value with a type, the function
-- type has to unify with a function type that allows the value type to be its
-- argument.
inferApp
:: Env
-> Exp -- ^ __f__ x
-> Exp -- ^ f __x__
-> Infer (Subst, MType)
inferApp env f x = do
(s1, fTau) <- infer env f -- f : fτ
(s2, xTau) <- infer (applySubst s1 env) x -- x : xτ
fxTau <- fresh -- fxτ = fresh
unify ( fτ , xτ )
let s = s3 <> s2 <> s1 -- --------------------
f x : fxτ
-- | Lambda abstraction is based on the fact that when we introduce a new
-- variable, the resulting lambda maps from that variable's type to the type of
-- the body.
--
-- @
τ = fresh σ = ∀∅. τ Γ , x : σ ⊢ e : τ '
-- ––––––––––––––––––––––––––––––––––––– [Abs]
: τ→τ '
-- @
--
Here , @Γ , x : τ@ is @Γ@ extended by one additional mapping , namely @x : τ@.
--
Abstraction is typed by extending the environment by a new ' MType ' , and if
-- under this assumption we can construct a function mapping to a value of that
-- type, we can say that the lambda takes a value and maps to it.
inferAbs
:: Env
-> Name -- ^ λ__x__. e
-> Exp -- ^ λx. __e__
-> Infer (Subst, MType)
inferAbs env x e = do
tau <- fresh -- τ = fresh
let sigma = Forall [] tau -- σ = ∀∅. τ
Γ , x : σ …
(s, tau') <- infer env' e -- … ⊢ e:τ'
-- ---------------
λx.e : τ→τ '
-- | A let binding allows extending the environment with new bindings in a
principled manner . To do this , we first have to typecheck the expression to
be introduced . The result of this is then generalized to a ' PType ' , since let
-- bindings introduce new polymorphic values, which are then added to the
-- environment. Now we can finally typecheck the body of the "in" part of the
-- let binding.
--
-- Note that in our simple language, let is non-recursive, but recursion can be
-- introduced as usual by adding a primitive @fix : (a → a) → a@ if desired.
--
-- @
Γ ⊢ e : τ σ = gen(Γ , τ ) Γ , x : σ ⊢ e':τ '
-- ––––––––––––––––––––––––––––––––––––––– [Let]
-- Γ ⊢ let x = e in e' : τ'
-- @
inferLet
:: Env
-> Name -- ^ let __x__ = e in e'
-> Exp -- ^ let x = __e__ in e'
-> Exp -- ^ let x = e in __e'__
-> Infer (Subst, MType)
inferLet env x e e' = do
(s1, tau) <- infer env e -- Γ ⊢ e:τ
let env' = applySubst s1 env
let sigma = generalize env' tau -- σ = gen(Γ,τ)
Γ , x : σ
(s2, tau') <- infer env'' e' -- Γ ⊢ …
-- --------------------------
pure (s2 <> s1, tau') -- … let x = e in e' : τ'
| an ' MType ' to a ' PType ' by universally quantifying over all the
-- type variables contained in it, except those already free in the environment.
--
> > > let tau = TFun " a " ( TFun " b " " a " )
-- >>> putPprLn tau
-- a → b → a
-- >>> putPprLn (generalize (Env [("x", Forall [] "b")]) tau)
∀a . a → b → a
--
-- In more formal notation,
--
-- @
-- gen(Γ,τ) = ∀{α}. τ
where { α } = free(τ ) – )
-- @
--
-- 'generalize' can also be seen as the opposite of 'instantiate', which
converts a ' PType ' to an ' MType ' .
generalize :: Env -> MType -> PType
generalize env mType = Forall qs mType
where
qs = freeMType mType `S.difference` freeEnv env
| null | https://raw.githubusercontent.com/quchen/articles/8d5ca414b10d22b418b6b3d1fd57e8d45cef1486/hindley-milner/src/HindleyMilner.hs | haskell | # LANGUAGE OverloadedStrings #
| This module is an extensively documented walkthrough for typechecking a
In the end, we'll be able to infer the type of expressions like
@
find (λx. (>) x 0)
:: [Integer] -> Either () Integer
@
It can be used in multiple different forms:
* The source is written in literate programming style, so you can almost
read it from top to bottom, minus some few references to later topics.
* /Loads/ of doctests (runnable and verified code examples) are included
* The code is runnable in GHCi, all definitions are exposed.
* A small main module that gives many examples of what you might try out in
GHCi is also included.
$setup
For running doctests:
>>> :set -XOverloadedStrings
>>> :set -XOverloadedLists
>>> :set -XLambdaCase
>>> import qualified Data.Text.IO as T
* Preliminaries
** Prettyprinting
| A prettyprinter class. Similar to 'Show', but with a focus on having
** Names
| A 'name' is an identifier in the language we're going to typecheck.
Variables on both the term and type level have 'Name's, for example.
| >>> "lorem" :: Name
Name "lorem"
| >>> putPprLn (Name "var")
var
| A monotype is an unquantified/unparametric type, in other words it contains
of monotypes are @Int@, @a@, @a -> b@.
^ @a@
^ @a -> b@
Since we can't declare our own types in our simple type system
here, we'll hard-code certain basic ones so we can typecheck some
familiar functions that use them later.
^ @[a]@
^ @(a,b)@
Either a b → c → d
Either a b → c → d
individual type variables occurring inside of it.
| Substitute all the contained type variables mentioned in the substitution,
and leave everything else alone.
** Polytypes
is implicit they look a bit like monotypes, maybe confusingly so. For
to how a lambda claims to work "for all imaginable value parameters". We can
insert a value into a lambda's parameter to evaluate it to a new value, and
gain new types.
__Example:__ in a definition @id :: forall a. a -> a@, the @a@ after the
quantified over. When we have such an @id@, we also have its specialized
inference/unification algorithms.
The purpose of having monotypes and polytypes is that we'd like to only have
universal quantification at the top level, restricting our language to rank-1
simple (only a handful of typing rules). Weakening this constraint would be
easy: if we allowed universal quantification within function types we would
get rank-N polymorphism. Taking it even further to allow it anywhere,
impredicative types. Both these extensions make the type system
*significantly* more complex though.
^ ∀{α}. τ
>>> putPprLn sigma
b, c
| Substitute all the free type variables.
** The environment
| The environment consists of all the values available in scope, and their
associated polytypes. Other common names for it include "(typing) context",
and because of the commonly used symbol for it sometimes directly
- @∈@: an environment @Γ@ can be viewed as a set of @(value, type)@ pairs,
and we can test whether something is /literally contained/ by it via
- @⊢@, pronounced /entails/, describes all the things that are well-typed,
@x:τ@ is /figuratively contained/ in @Γ@.
For example, the environment @{x:Int}@ literally contains @x@, but given
have available, or that can be built by combining them. If you import the
@
id → ∀a. a→a
map → ∀a b. (a→b) → [a] → [b]
putStrLn → ∀∅. String → IO ()
…
id map → ∀a b. (a→b) → [a] → [b]
map putStrLn → ∀∅. [String] -> [IO ()]
…
@
| >>> :{
putPprLn (Env
:}
Γ = { const : ∀a b. a → b → a
, id : ∀a. a → a }
| The free variables of an 'Env'ironment are all the free variables of the
| Performing a 'Subst'itution in an 'Env'ironment means performing that
** Substitutions
substitution means applying those replacements. For example, the substitution
expression, we learn more about our type variables. We might learn that @a@
@Int@. Substitutions are an organized way of carrying this information along.
| We're going to apply substitutions to a variety of other values that
somehow contain type variables, so we overload this application operation in
a class here.
Laws:
@
@
one (@s1@).
| >>> :{
:}
{ a ––> b → b
, b ––> Either c d }
Considering that all we can really do with a substitution is apply it, we
combine associatively,
@
@
* Typechecking
to get a type compatible with both of them
definition with the 'Env'ironment
** Inference context
| The inference type holds a supply of unique names, and can fail with a
descriptive error if something goes wrong.
/Invariant:/ the supply must be infinite, or we might run out of names to
give to things.
| Errors that can happen during the type inference process.
| Two types that don't match were attempted to be unified.
>>> putPprLn (CannotUnify (TFun "a" "a") (TConst "Int"))
Cannot unify a → a with Int
they're both the same @x@, this requires unification of @a@ with
an infinite type.
Occurs check failed: a already appears in a → a
| The value of an unknown identifier was read.
Unknown identifier: a
Cannot unify Either a b with (a, b)
| Evaluate a value in an 'Infer'ence context.
>>> putPprLn expr
λf g x. f x (g x)
>>> let inferred = runInfer (infer (Env []) expr)
>>> let demonstrate = \case Right (_, ty) -> T.putStrLn (":: " <> ppr ty)
>>> demonstrate inferred
:: (c → e → f) → (c → e) → c → f
^ Inference data
[a, b, c] ==> [a,b,c, a1,b1,c1, a2,b2,c2, …]
| Throw an 'InferError' in an 'Infer'ence context.
Unknown identifier: var
** Unification
by specializing them where needed. A desirable property to have here is being
able to find the most general unifier. Luckily, we'll be able to do that in
our type system.
applied to both of them in order to yield the same result.
>>> putPprLn m1
a → b
c → Either d e
>>> let inferSubst = unify (m1, m2)
{ a ––> c
, b ––> Either d e }
Unification of binary type constructors, such as functions and Either.
resulting substitution should be idempotent, i.e. applying it more than once
to something should not be any different from applying it only once.
- In the simplest case, this just means building a substitution that just
does that.
variable with itself, and the resulting substitution does nothing new.
would be replaced again, yielding a different result. This is known as the
Occurs Check.
** Type inference
$ Type inference is the act of finding out a value's type by looking at the
environment it is in, in order to make it compatible with it.
is often presented in the style of logical formulas, and below you'll find
that version along with code that actually does what they say.
These formulas look a bit like fractions, where the "numerator" is a
collection of premises, and the denominator is the consequence if all of them
hold.
__Example:__
@
–––––––––––––––––––––––––––––––––––
@
means that if we have a value of type @Int -> Bool@ called "even" and a value
@even 1@ available to us.
The actual inference rules are polymorphic versions of this example, and
the code comments will explain each step in detail.
-----------------------------------------------------------------------------
*** The language: typed lambda calculus
-----------------------------------------------------------------------------
| The syntax tree of the language we'd like to typecheck. You can view it as
a close relative to simply typed lambda calculus, having only the most
necessary syntax elements.
@fix : (a → a) → a@ can be introduced to allow recursive definitions.
^ @x@
^ @f x@
^ @λx. e@
| Literals we'd like to support. Since we can't define new data types in our
simple type system, we'll have to hard-code the possible ones here.
λf g x. f x (g x)
| >>> putPprLn (LBool True)
True
| >>> "var" :: Exp
-----------------------------------------------------------------------------
*** Some useful definitions
-----------------------------------------------------------------------------
| Generate a fresh 'Name' in a type 'Infer'ence context. An example use case
new name, i.e. unbound in the current context.
| Add a new binding to the environment.
formal notation,
@
@
-----------------------------------------------------------------------------
*** Inferring the types of all language constructs
-----------------------------------------------------------------------------
| Infer the type of an 'Exp'ression in an 'Env'ironment, resulting in the
this goal.
This is widely known as /Algorithm W/.
| Literals such as 'True' and '1' have their types hard-coded.
| Inferring the type of a variable is done via
@
–––––––––––––––––––––––––––– [Var]
Γ ⊢ x:τ
@
τ = instantiate(σ)
------------------
about this, see the documentation of 'Env'.
Another way of looking at it is by simply forgetting which variables were
quantified, carefully avoiding name clashes when doing so.
'instantiate' can also be seen as the opposite of 'generalize', which we'll
For each given name, add a substitution from that name to a fresh type
variable to the result.
| Function application captures the fact that if we have a function and an
argument we can give to that function, we also have the result value of the
result type available to us.
@
––––––––––––––––––––––––––––––––––––––––––––––––––––––––––– [App]
@
This rule says that given a function and a value with a type, the function
type has to unify with a function type that allows the value type to be its
argument.
^ __f__ x
^ f __x__
f : fτ
x : xτ
fxτ = fresh
--------------------
| Lambda abstraction is based on the fact that when we introduce a new
variable, the resulting lambda maps from that variable's type to the type of
the body.
@
––––––––––––––––––––––––––––––––––––– [Abs]
@
under this assumption we can construct a function mapping to a value of that
type, we can say that the lambda takes a value and maps to it.
^ λ__x__. e
^ λx. __e__
τ = fresh
σ = ∀∅. τ
… ⊢ e:τ'
---------------
| A let binding allows extending the environment with new bindings in a
bindings introduce new polymorphic values, which are then added to the
environment. Now we can finally typecheck the body of the "in" part of the
let binding.
Note that in our simple language, let is non-recursive, but recursion can be
introduced as usual by adding a primitive @fix : (a → a) → a@ if desired.
@
––––––––––––––––––––––––––––––––––––––– [Let]
Γ ⊢ let x = e in e' : τ'
@
^ let __x__ = e in e'
^ let x = __e__ in e'
^ let x = e in __e'__
Γ ⊢ e:τ
σ = gen(Γ,τ)
Γ ⊢ …
--------------------------
… let x = e in e' : τ'
type variables contained in it, except those already free in the environment.
>>> putPprLn tau
a → b → a
>>> putPprLn (generalize (Env [("x", Forall [] "b")]) tau)
In more formal notation,
@
gen(Γ,τ) = ∀{α}. τ
@
'generalize' can also be seen as the opposite of 'instantiate', which | # LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE OverloadedLists #
basic functional language using the - Damas - Milner algorithm .
* The output yields a nice overview over the definitions given , with
a nice rendering of a truckload of comments .
module HindleyMilner where
import Control.Monad.Trans
import Control.Monad.Trans.Except
import Control.Monad.Trans.State
import Data.Map (Map)
import qualified Data.Map as M
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as S
import Data.String
import Data.Text (Text)
import qualified Data.Text as T
> > > let putPprLn = T.putStrLn . ppr
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
human - readable output as opposed to being valid .
class Pretty a where
ppr :: a -> Text
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
newtype Name = Name Text
deriving (Eq, Ord, Show)
instance IsString Name where
fromString = Name . T.pack
instance Pretty Name where
ppr (Name n) = n
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
* *
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
no @forall@s . are the inner building blocks of all types . Examples
In formal notation , ' MType 's are often called τ ( tau ) types .
^ @Int@ , @()@ , …
^ a b@
deriving Show
| > > > putPprLn ( TFun ( ( TVar " a " ) ( TVar " b " ) ) ( TFun ( TVar " c " ) ( TVar " d " ) ) )
Using the ' IsString ' instance :
> > > putPprLn ( TFun ( " a " " b " ) ( TFun " c " " d " ) )
instance Pretty MType where
ppr = go False
where
go _ (TVar name) = ppr name
go _ (TList a) = "[" <> ppr a <> "]"
go _ (TEither l r) = "Either " <> ppr l <> " " <> ppr r
go _ (TTuple a b) = "(" <> ppr a <> ", " <> ppr b <> ")"
go _ (TConst name) = ppr name
go parenthesize (TFun a b)
| parenthesize = "(" <> lhs <> " → " <> rhs <> ")"
| otherwise = lhs <> " → " <> rhs
where lhs = go True a
rhs = go False b
| > > > " var " : : MType
TVar ( Name " var " )
instance IsString MType where
fromString = TVar . fromString
| The free variables of an ' MType ' . This is simply the collection of all the
_ _ Example : _ _ The free variables of @a - > b@ are @a@ and
freeMType :: MType -> Set Name
freeMType = \case
TVar a -> [a]
TFun a b -> freeMType a <> freeMType b
TList a -> freeMType a
TEither l r -> freeMType l <> freeMType r
TTuple a b -> freeMType a <> freeMType b
TConst _ -> []
instance Substitutable MType where
applySubst s = \case
TVar a -> let Subst s' = s
in M.findWithDefault (TVar a) a s'
TFun f x -> TFun (applySubst s f) (applySubst s x)
TList a -> TList (applySubst s a)
TEither l r -> TEither (applySubst s l) (applySubst s r)
TTuple a b -> TTuple (applySubst s a) (applySubst s b)
c@TConst {} -> c
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
| A polytype is a monotype universally quantified over a number of type
variables . In Haskell , all definitions have polytypes , but since the @forall@
example , the type of @1 : : Int@ is actually @forall < nothing > . Int@ , and the
type of @id@ is @forall a. a - > a@ , although GHC displays it as @a - > a@.
A polytype claims to work " for all imaginable type parameters " , very similar
similarly we 'll later insert types into a polytype 's quantified variables to
∀ ( " forall " ) is the collection of type variables , and @a - > a@ is the ' MType '
version @Int - > Int@ available . This process will be the topic of the type
In formal notation , ' PType 's are often called σ ( sigma ) types .
polymorphism , where type inferece is total ( all types can be inferred ) and
effectively replacing all occurrences of ' MType ' with ' PType ' , yields
| > > > putPprLn ( Forall [ " a " ] ( TFun " a " " a " ) )
∀a . a → a
instance Pretty PType where
ppr (Forall qs mType) = "∀" <> pprUniversals <> ". " <> ppr mType
where
pprUniversals
| S.null qs = "∅"
| otherwise = (T.intercalate " " . map ppr . S.toList) qs
| The free variables of a ' PType ' are the free variables of the contained
' MType ' , except those universally quantified .
> > > let sigma = Forall [ " a " ] ( TFun " a " ( TFun ( TTuple " b " " a " ) " c " ) )
∀a . a → ( b , a ) → c
> > > let display = T.putStrLn . T.intercalate " , " . foldMap ( \x - > [ ppr x ] )
> > > display ( freePType sigma )
freePType :: PType -> Set Name
freePType (Forall qs mType) = freeMType mType `S.difference` qs
instance Substitutable PType where
applySubst (Subst subst) (Forall qs mType) =
let qs' = M.fromSet (const ()) qs
subst' = Subst (subst `M.difference` qs')
in Forall qs (applySubst subst' mType)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
There are two kinds of membership in an environment ,
x : σ ∈ Γ
given an environment @Γ@. @Γ : τ@ can thus be seen as a judgement that
this , it also entails @λy . , , @let i d = λy . y in i d and so on .
In terms , the environment consists of all the things you currently
Prelude , your environment entails
newtype Env = Env (Map Name PType)
[ ( " i d " , Forall [ " a " ] ( TFun " a " " a " ) )
, ( " const " , Forall [ " a " , " b " ] ( TFun " a " ( TFun " b " " a " ) ) ) ] )
instance Pretty Env where
ppr (Env env) = "Γ = { " <> T.intercalate "\n , " pprBindings <> " }"
where
bindings = M.assocs env
pprBinding (name, pType) = ppr name <> " : " <> ppr pType
pprBindings = map pprBinding bindings
' PType 's it contains .
freeEnv :: Env -> Set Name
freeEnv (Env env) = let allPTypes = M.elems env
in S.unions (map freePType allPTypes)
substituion on all the contained ' PType 's .
instance Substitutable Env where
applySubst s (Env env) = Env (M.map (applySubst s) env)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
| A substitution is a mapping from type variables to ' MType 's . Applying a
@a - > Int@ applied to @a - > a@ yields the result @Int - > Int@.
A key concept behind Hindley - Milner is that once we dive deeper into an
has to be specialized to @b - > b@ , and then later on that is actually
newtype Subst = Subst (Map Name MType)
' applySubst ' ' ' ≡ ' i d '
' applySubst ' ( s1 ' < > ' s2 ) ≡ ' applySubst ' s1 . ' applySubst ' s2
class Substitutable a where
applySubst :: Subst -> a -> a
instance (Substitutable a, Substitutable b) => Substitutable (a,b) where
applySubst s (x,y) = (applySubst s x, applySubst s y)
| @'applySubst ' s1 s2@ applies one substitution to another , replacing all the
bindings in the second argument @s2@ with their values mentioned in the first
instance Substitutable Subst where
applySubst s (Subst target) = Subst (fmap (applySubst s) target)
putPprLn ( Subst
[ ( " a " , TFun " b " " b " )
, ( " b " , " c " " d " ) ] )
instance Pretty Subst where
ppr (Subst s) = "{ " <> T.intercalate "\n, " [ ppr k <> " ––> " <> ppr v | (k,v) <- M.toList s ] <> " }"
| Combine two substitutions by applying all substitutions mentioned in the
first argument to the type variables contained in the second .
instance Monoid Subst where
can use one of ' Substitutable 's laws to show that substitutions
applySubst ( compose s1 ( compose s2 s3 ) )
= applySubst s1 . ( compose s2 s3 )
= applySubst s1 . applySubst s2 . applySubst s3
= applySubst ( compose s1 s2 ) . applySubst s3
= applySubst ( compose ( compose s1 s2 ) s3 )
mappend subst1 subst2 = Subst (s1 `M.union` s2)
where
Subst s1 = subst1
Subst s2 = applySubst subst1 subst2
mempty = Subst M.empty
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
$ Typechecking does two things :
1 . If two types are not immediately identical , attempt to ' unify ' them
2 . ' infer ' the most general type of a value by comparing the values in its
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
newtype Infer a = Infer (ExceptT InferError (State [Name]) a)
deriving (Functor, Applicative, Monad)
data InferError =
For example , @a - > a@ and @Int@ do not unify .
CannotUnify MType MType
| A ' TVar ' is bound to an ' MType ' that already contains it .
The canonical example of this is @λx . x , where the first @x@
in the body has to have type @a - > b@ , and the second one Since
@a - > b@ , which only works if @a = a - > b = ( a - > b ) - > b = … @ , yielding
> > > putPprLn ( OccursCheckFailed " a " ( TFun " a " " a " ) )
| OccursCheckFailed Name MType
> > > putPprLn ( UnknownIdentifier " a " )
| UnknownIdentifier Name
deriving Show
| > > > putPprLn ( CannotUnify ( " a " " b " ) ( TTuple " a " " b " ) )
instance Pretty InferError where
ppr = \case
CannotUnify t1 t2 ->
"Cannot unify " <> ppr t1 <> " with " <> ppr t2
OccursCheckFailed name ty ->
"Occurs check failed: " <> ppr name <> " already appears in " <> ppr ty
UnknownIdentifier name ->
"Unknown identifier: " <> ppr name
> > > let expr = EAbs " f " ( EAbs " g " ( EAbs " x " ( EApp ( EApp " f " " x " ) ( EApp " g " " x " ) ) ) )
-> Either InferError a
runInfer (Infer inf) =
evalState (runExceptT inf) (map Name (infiniteSupply alphabet))
where
alphabet = map T.singleton ['a'..'z']
infiniteSupply supply = supply <> addSuffixes supply (1 :: Integer)
where
addSuffixes xs n = map (\x -> addSuffix x n) xs <> addSuffixes xs (n+1)
addSuffix x n = x <> T.pack (show n)
> > > case ( throw ( UnknownIdentifier " var " ) ) of Left err - > putPprLn err
throw :: InferError -> Infer a
throw = Infer . throwE
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
$ Unification describes the process of making two different types compatible
| The unification of two ' MType 's is the most general substitution that can be
> > > let m1 = TFun " a " " b "
> > > let m2 = TFun " c " ( " d " " e " )
> > >
> > > case inferSubst of Right subst - > putPprLn subst
unify :: (MType, MType) -> Infer Subst
unify = \case
(TFun a b, TFun x y) -> unifyBinary (a,b) (x,y)
(TVar v, x) -> v `bindVariableTo` x
(x, TVar v) -> v `bindVariableTo` x
(TConst a, TConst b) | a == b -> pure mempty
(TList a, TList b) -> unify (a,b)
(TEither a b, TEither x y) -> unifyBinary (a,b) (x,y)
(TTuple a b, TTuple x y) -> unifyBinary (a,b) (x,y)
(a, b) -> throw (CannotUnify a b)
where
Unification is first done for the first operand , and assuming the
required substitution , for the second one .
unifyBinary :: (MType, MType) -> (MType, MType) -> Infer Subst
unifyBinary (a,b) (x,y) = do
s1 <- unify (a, x)
s2 <- unify (applySubst s1 (b, y))
pure (s1 <> s2)
| Build a ' Subst'itution that binds a ' Name ' of a ' TVar ' to an ' MType ' . The
- Substituting a ' Name ' with a ' TVar ' with the same name unifies a type
- If the ' Name ' we 're trying to bind to an ' MType ' already occurs in that
' MType ' , the resulting substitution would not be idempotent : the ' MType '
bindVariableTo :: Name -> MType -> Infer Subst
bindVariableTo name (TVar v) | boundToSelf = pure mempty
where
boundToSelf = name == v
bindVariableTo name mType | name `occursIn` mType = throw (OccursCheckFailed name mType)
where
n `occursIn` ty = n `S.member` freeMType ty
bindVariableTo name mType = pure (Subst (M.singleton name mType))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
In literature , the - Damas - Milner inference algorithm ( " Algorithm W " )
Γ ⊢ even : Int → Bool Γ ⊢ 1 : Int
Γ ⊢ even 1 :
of type @Int@ called @1@ , then we also have a value of type @Bool@ via
Since ' ELet ' is non - recursive , the usual fixed - point function
^ True , 1
^ @let x = e in e'@ ( non - recursive )
deriving Show
data Lit = LBool Bool
| LInteger Integer
deriving Show
| > > > putPprLn ( EAbs " f " ( EAbs " g " ( EAbs " x " ( EApp ( EApp " f " " x " ) ( EApp " g " " x " ) ) ) ) )
instance Pretty Exp where
ppr (ELit lit) = ppr lit
ppr (EVar name) = ppr name
ppr (EApp f x) = pprApp1 f <> " " <> pprApp2 x
where
pprApp1 = \case
eLet@ELet{} -> "(" <> ppr eLet <> ")"
eLet@EAbs{} -> "(" <> ppr eLet <> ")"
e -> ppr e
pprApp2 = \case
eApp@EApp{} -> "(" <> ppr eApp <> ")"
e -> pprApp1 e
ppr x@EAbs{} = pprAbs True x
where
pprAbs True (EAbs name expr) = "λ" <> ppr name <> pprAbs False expr
pprAbs False (EAbs name expr) = " " <> ppr name <> pprAbs False expr
pprAbs _ expr = ". " <> ppr expr
ppr (ELet name value body) =
"let " <> ppr name <> " = " <> ppr value <> " in " <> ppr body
> > > putPprLn ( LInteger 127 )
127
instance Pretty Lit where
ppr = \case
LBool b -> showT b
LInteger i -> showT i
where
showT :: Show a => a -> Text
showT = T.pack . show
EVar ( Name " var " )
instance IsString Exp where
fromString = EVar . fromString
of this is η expansion , which transforms @f@ into @λx . f , where " x " is a
fresh :: Infer MType
fresh = drawFromSupply >>= \case
Right name -> pure (TVar name)
Left err -> throw err
where
drawFromSupply :: Infer (Either InferError Name)
drawFromSupply = Infer (do
s:upply <- lift get
lift (put upply)
pure (Right s) )
The equivalent would be defining a new value , for example in module
scope or in a block . This corresponds to the " comma " operation used in
Γ , x : σ ≡ extendEnv Γ ( x , σ )
extendEnv :: Env -> (Name, PType) -> Env
extendEnv (Env env) (name, pType) = Env (M.insert name pType env)
' Exp 's ' MType ' along with a substitution that has to be done in order to reach
infer :: Env -> Exp -> Infer (Subst, MType)
infer env = \case
ELit lit -> inferLit lit
EVar name -> inferVar env name
EApp f x -> inferApp env f x
EAbs x e -> inferAbs env x e
ELet x e e' -> inferLet env x e e'
inferLit :: Lit -> Infer (Subst, MType)
inferLit lit = pure (mempty, TConst litTy)
where
litTy = case lit of
LBool {} -> "Bool"
LInteger {} -> "Integer"
x : σ ∈ Γ τ = instantiate(σ )
This means that if @Γ@ /literally contains/ ( @∈@ ) a value , then it also
/entails it/ ( @⊢@ ) in all its instantiations .
inferVar :: Env -> Name -> Infer (Subst, MType)
inferVar env name = do
x : σ ∈ Γ
Γ ⊢ x : τ
| Look up the ' PType ' of a ' Name ' in the ' Env'ironment .
This checks whether @x : σ@ is /literally contained/ in @Γ@. For more details
To give a Haskell analogon , looking up @id@ when @Prelude@ is loaded , the
resulting ' PType ' would be @id@ 's type , namely @forall a. a - > a@.
lookupEnv :: Env -> Name -> Infer PType
lookupEnv (Env env) name = case M.lookup name env of
Just x -> pure x
Nothing -> throw (UnknownIdentifier name)
| Bind all quantified variables of a ' PType ' to ' fresh ' type variables .
_ _ Example : _ _ instantiating @forall a. a - > b - > a@ results in the ' MType '
@c - > b - > c@ , where @c@ is a fresh name ( to avoid shadowing issues ) .
You can picture the ' PType ' to be the prototype converted to an instantiated
' MType ' , which can now be used in the unification process .
need later to convert an ' MType ' to a ' PType ' .
instantiate :: PType -> Infer MType
instantiate (Forall qs t) = do
subst <- substituteAllWithFresh qs
pure (applySubst subst t)
where
substituteAllWithFresh :: Set Name -> Infer Subst
substituteAllWithFresh xs = do
let freshSubstActions = M.fromSet (const fresh) xs
freshSubsts <- sequenceA freshSubstActions
pure (Subst freshSubsts)
Γ ⊢ f : fτ Γ ⊢ x : xτ fxτ = fresh unify(fτ , xτ )
Γ ⊢ f x : fxτ
inferApp
:: Env
-> Infer (Subst, MType)
inferApp env f x = do
unify ( fτ , xτ )
f x : fxτ
τ = fresh σ = ∀∅. τ Γ , x : σ ⊢ e : τ '
: τ→τ '
Here , @Γ , x : τ@ is @Γ@ extended by one additional mapping , namely @x : τ@.
Abstraction is typed by extending the environment by a new ' MType ' , and if
inferAbs
:: Env
-> Infer (Subst, MType)
inferAbs env x e = do
Γ , x : σ …
λx.e : τ→τ '
principled manner . To do this , we first have to typecheck the expression to
be introduced . The result of this is then generalized to a ' PType ' , since let
Γ ⊢ e : τ σ = gen(Γ , τ ) Γ , x : σ ⊢ e':τ '
inferLet
:: Env
-> Infer (Subst, MType)
inferLet env x e e' = do
let env' = applySubst s1 env
Γ , x : σ
| an ' MType ' to a ' PType ' by universally quantifying over all the
> > > let tau = TFun " a " ( TFun " b " " a " )
∀a . a → b → a
where { α } = free(τ ) – )
converts a ' PType ' to an ' MType ' .
generalize :: Env -> MType -> PType
generalize env mType = Forall qs mType
where
qs = freeMType mType `S.difference` freeEnv env
|
625fb4b74ccb6da4860b92bc5ecb45c868a7859a48bdc4807263389ba814c3bb | ariesteam/aries | varprop.clj | Copyright 2010
;;;
;;; This file is part of clj-misc.
;;;
;;; clj-misc is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation , either version 3 of the License ,
;;; or (at your option) any later version.
;;;
;;; clj-misc is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;; General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with clj-misc. If not, see </>.
;;;
;;;-------------------------------------------------------------------
;;;
;;; This namespace defines functions for creating, querying, and
;;; manipulating fuzzy numbers, which are defined to be pairs of [mean
;;; var].
(ns clj-misc.varprop
(:use [clj-misc.utils :only [replace-all my-partition-all]])
(:import (clojure.lang PersistentStructMap)))
(defstruct FuzzyNumber :mean :var)
(defn fuzzy-number
"Constructs a FuzzyNumber."
[mean var]
(struct FuzzyNumber mean var))
(defn create-from-states
"Constructs a FuzzyNumber from n states and n probs, corresponding
to a finite discrete distribution."
[states probs]
(let [mean (reduce + (map * states probs))
var (reduce + (map (fn [x p] (* (Math/pow (- x mean) 2) p)) states probs))]
(fuzzy-number mean var)))
(defn create-from-ranges
"Constructs a FuzzyNumber from n bounds and n-1 probs corresponding
to a piecewise continuous uniform distribution with
discontinuities (i.e. jumps) at the bounds. prob i represents the
probability of being between bound i and bound i+1."
[bounds probs]
(let [midpoints (map (fn [next prev] (/ (+ next prev) 2.0)) (rest bounds) bounds)
mean (reduce + (map * midpoints probs))
second-moment (* 1/3 (reduce + (map (fn [p1 p2 bp] (* (Math/pow bp 3) (- p1 p2)))
(cons 0 probs)
(concat probs [0])
bounds)))
var (- second-moment (* mean mean))]
(fuzzy-number mean var)))
(def #^{:doc "A FuzzyNumber with mean and variance of 0."} _0_ (fuzzy-number 0.0 0.0))
(defn _+_
"Returns the sum of two or more FuzzyNumbers."
([X Y]
(fuzzy-number (+ (:mean X) (:mean Y)) (+ (:var X) (:var Y))))
([X Y & more]
(reduce _+_ (_+_ X Y) more)))
(defn _-_
"Returns the difference of two or more FuzzyNumbers."
([X Y]
(fuzzy-number (- (:mean X) (:mean Y)) (+ (:var X) (:var Y))))
([X Y & more]
(reduce _-_ (_-_ X Y) more)))
(defn _*_
"Returns the product of two or more FuzzyNumbers."
([{mx :mean, vx :var} {my :mean, vy :var}]
(fuzzy-number (* mx my) (+ (* vx vy) (* mx mx vy) (* my my vx))))
([X Y & more]
(reduce _*_ (_*_ X Y) more)))
(declare d_)
(defn _d_
"Returns the quotient of two or more FuzzyNumbers."
([X Y]
(_*_ X (d_ 1 Y)))
([X Y & more]
(reduce _d_ (_d_ X Y) more)))
(defn _<_
"Compares two or more FuzzyNumbers and returns true if P(X_i < X_i+1) > 0.5 for all i in [1,n]."
([X Y]
(< (:mean X) (:mean Y)))
([X Y & more]
(every? (fn [[X Y]] (_<_ X Y)) (partition 2 1 (list* X Y more)))))
(defn _>_
"Compares two or more FuzzyNumbers and returns true if P(X_i > X_i+1) > 0.5 for all i in [1,n]."
([X Y]
(> (:mean X) (:mean Y)))
([X Y & more]
(every? (fn [[X Y]] (_>_ X Y)) (partition 2 1 (list* X Y more)))))
(defn _min_
"Returns the smallest of two or more FuzzyNumbers using _<_."
([X Y]
(if (_<_ X Y) X Y))
([X Y & more]
(reduce _min_ (_min_ X Y) more)))
(defn _max_
"Returns the greatest of two or more FuzzyNumbers using _>_."
([X Y]
(if (_>_ X Y) X Y))
([X Y & more]
(reduce _max_ (_max_ X Y) more)))
(defn _+
"Returns the sum of a FuzzyNumber and one or more scalars."
([X y]
(fuzzy-number (+ (:mean X) y) (:var X)))
([X y & more]
(reduce _+ (_+ X y) more)))
(defn _-
"Returns the difference of a FuzzyNumber and one or more scalars."
([X y]
(fuzzy-number (- (:mean X) y) (:var X)))
([X y & more]
(reduce _- (_- X y) more)))
(defn _*
"Returns the product of a FuzzyNumber and one or more scalars."
([X y]
(fuzzy-number (* (:mean X) y) (* (:var X) y y)))
([X y & more]
(reduce _* (_* X y) more)))
(defn _d
"Returns the quotient of a FuzzyNumber and one or more scalars."
([X y]
(fuzzy-number (/ (:mean X) y) (/ (:var X) y y)))
([X y & more]
(reduce _d (_d X y) more)))
(defn _<
"Compares a FuzzyNumber and one or more scalars and returns true if
P(X < y_1) > 0.5 and all ys are in monotonically increasing order."
([X y]
(< (:mean X) y))
([X y & more]
(and (_< X y)
(apply < (cons y more)))))
(defn _>
"Compares a FuzzyNumber and one or more scalars and returns true if
P(X > y_1) > 0.5 and all ys are in monotonically decreasing order."
([X y]
(> (:mean X) y))
([X y & more]
(and (_> X y)
(apply > (cons y more)))))
(defn _min
"Returns the smallest of a FuzzyNumber and one or more scalars using _<."
([X y]
(if (_< X y) X y))
([X y & more]
(_min X (reduce min y more))))
(defn _max
"Returns the greatest of a FuzzyNumber and one or more scalars using _>."
([X y]
(if (_> X y) X y))
([X y & more]
(_max X (reduce max y more))))
(defn +_
"Returns the sum of a scalar and one or more FuzzyNumbers."
([x Y]
(fuzzy-number (+ x (:mean Y)) (:var Y)))
([x Y & more]
(reduce _+_ (+_ x Y) more)))
(defn -_
"Returns the difference of a scalar and one or more FuzzyNumbers."
([x Y]
(fuzzy-number (- x (:mean Y)) (:var Y)))
([x Y & more]
(reduce _-_ (-_ x Y) more)))
(defn *_
"Returns the product of a scalar and one or more FuzzyNumbers."
([x Y]
(fuzzy-number (* x (:mean Y)) (* x x (:var Y))))
([x Y & more]
(reduce _*_ (*_ x Y) more)))
(defn d_
"Returns the quotient of a scalar and one or more FuzzyNumbers."
([x {:keys [mean var]}]
(fuzzy-number (/ x mean) (/ (* x x var) (Math/pow mean 4))))
([x Y & more]
(reduce _d_ (d_ x Y) more)))
(defn <_
"Compares a scalar and one or more FuzzyNumbers and returns true if
P(Y > x) > 0.5 and all Ys are in monotonically increasing order by
_<_."
([x Y]
(< x (:mean Y)))
([x Y & more]
(and (<_ x Y)
(apply _<_ (cons Y more)))))
(defn >_
"Compares a scalar and one or more FuzzyNumbers and returns true if
P(Y < x) > 0.5 and all Ys are in monotonically decreasing order by
_>_."
([x Y]
(> x (:mean Y)))
([x Y & more]
(and (>_ x Y)
(apply _>_ (cons Y more)))))
(defn min_
"Returns the smallest of a scalar and one or more FuzzyNumbers using <_."
([x Y]
(if (<_ x Y) x Y))
([x Y & more]
(min_ x (reduce _min_ Y more))))
(defn max_
"Returns the greatest of a scalar and one or more FuzzyNumbers using >_."
([x Y]
(if (>_ x Y) x Y))
([x Y & more]
(max_ x (reduce _max_ Y more))))
(defmulti ?+?
" Returns the sum of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?+? [PersistentStructMap PersistentStructMap] [X Y & _] (_+_ X Y))
(defmethod ?+? [PersistentStructMap Number] [X Y & _] (_+ X Y))
(defmethod ?+? [Number PersistentStructMap] [X Y & _] ( +_ X Y))
(defmethod ?+? [Number Number] [X Y & _] ( + X Y))
(defmethod ?+? :more [X Y & more] (reduce ?+? (?+? X Y) more))
(defmulti ?-?
" Returns the difference of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?-? [PersistentStructMap PersistentStructMap] [X Y & _] (_-_ X Y))
(defmethod ?-? [PersistentStructMap Number] [X Y & _] (_- X Y))
(defmethod ?-? [Number PersistentStructMap] [X Y & _] ( -_ X Y))
(defmethod ?-? [Number Number] [X Y & _] ( - X Y))
(defmethod ?-? :more [X Y & more] (reduce ?-? (?-? X Y) more))
(defmulti ?*?
" Returns the product of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?*? [PersistentStructMap PersistentStructMap] [X Y & _] (_*_ X Y))
(defmethod ?*? [PersistentStructMap Number] [X Y & _] (_* X Y))
(defmethod ?*? [Number PersistentStructMap] [X Y & _] ( *_ X Y))
(defmethod ?*? [Number Number] [X Y & _] ( * X Y))
(defmethod ?*? :more [X Y & more] (reduce ?*? (?*? X Y) more))
(defmulti ?d?
" Returns the quotient of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?d? [PersistentStructMap PersistentStructMap] [X Y & _] (_d_ X Y))
(defmethod ?d? [PersistentStructMap Number] [X Y & _] (_d X Y))
(defmethod ?d? [Number PersistentStructMap] [X Y & _] ( d_ X Y))
(defmethod ?d? [Number Number] [X Y & _] ( / X Y))
(defmethod ?d? :more [X Y & more] (reduce ?d? (?d? X Y) more))
(defmulti ?<?
" Compares two or more values , which may be FuzzyNumbers or
;; scalars, and returns true if X_i < X_i+1 for all i in [1,n]. Uses
;; reflection."
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?<? [PersistentStructMap PersistentStructMap] [X Y & _] (_<_ X Y))
(defmethod ?<? [PersistentStructMap Number] [X Y & _] (_< X Y))
(defmethod ?<? [Number PersistentStructMap] [X Y & _] ( <_ X Y))
(defmethod ?<? [Number Number] [X Y & _] ( < X Y))
(defmethod ?<? :more [X Y & more] (every? (fn [[X Y]] (?<? X Y)) (partition 2 1 (list* X Y more))))
(defmulti ?>?
" Compares two or more values , which may be FuzzyNumbers or
;; scalars, and returns true if X_i > X_i+1 for all i in [1,n]. Uses
;; reflection."
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?>? [PersistentStructMap PersistentStructMap] [X Y & _] (_>_ X Y))
(defmethod ?>? [PersistentStructMap Number] [X Y & _] (_> X Y))
(defmethod ?>? [Number PersistentStructMap] [X Y & _] ( >_ X Y))
(defmethod ?>? [Number Number] [X Y & _] ( > X Y))
(defmethod ?>? :more [X Y & more] (every? (fn [[X Y]] (?>? X Y)) (partition 2 1 (list* X Y more))))
(defmulti ?min?
" Returns the smallest of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?min? [PersistentStructMap PersistentStructMap] [X Y & _] (_min_ X Y))
(defmethod ?min? [PersistentStructMap Number] [X Y & _] (_min X Y))
(defmethod ?min? [Number PersistentStructMap] [X Y & _] ( min_ X Y))
(defmethod ?min? [Number Number] [X Y & _] ( min X Y))
(defmethod ?min? :more [X Y & more] (reduce ?min? (?min? X Y) more))
(defmulti ?max?
" Returns the largest of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?max? [PersistentStructMap PersistentStructMap] [X Y & _] (_max_ X Y))
(defmethod ?max? [PersistentStructMap Number] [X Y & _] (_max X Y))
(defmethod ?max? [Number PersistentStructMap] [X Y & _] ( max_ X Y))
(defmethod ?max? [Number Number] [X Y & _] ( max X Y))
(defmethod ?max? :more [X Y & more] (reduce ?max? (?max? X Y) more))
(def fuzzy-arithmetic-mapping
'{+ clj-misc.varprop/?+?
- clj-misc.varprop/?-?
* clj-misc.varprop/?*?
/ clj-misc.varprop/?d?
< clj-misc.varprop/?<?
> clj-misc.varprop/?>?
min clj-misc.varprop/?min?
max clj-misc.varprop/?max?})
(defn ensure-fuzzy
"If value is a FuzzyNumber, return it. Otherwise, make it into a
FuzzyNumber."
[value]
(if (number? value)
(fuzzy-number value 0.0)
value))
(defn fuzzify-fn
"Transforms f into its fuzzy arithmetic equivalent, using the
mappings defined in fuzzy-arithmetic-mapping."
[f]
(if-let [new-f (fuzzy-arithmetic-mapping f)]
new-f
(if (list? f)
(let [[lambda args & body] f]
(if (and (= lambda 'fn) (vector? args))
`(comp ensure-fuzzy (~lambda ~args ~@(replace-all fuzzy-arithmetic-mapping body)))
f))
f)))
(
;; "Transforms f into its fuzzy arithmetic equivalent, fuzzy-f, and
calls ( apply fuzzy - f Xs ) . Uses reflection on the types of Xs as
well as any numeric literal values used in f. "
;; [f & Xs]
;; `(~(fuzzify-fn f) ~@Xs))
(defn rv-fn
"Transforms f into its fuzzy arithmetic equivalent, fuzzy-f, and
calls (apply fuzzy-f Xs). Uses reflection on the types of Xs as
well as any numeric literal values used in f."
[f & Xs]
(apply (eval (fuzzify-fn f)) Xs))
(defn rv-mean
"Returns the mean of a FuzzyNumber."
[X]
(:mean X))
(defn rv-variance
"Returns the variance of a FuzzyNumber."
[X]
(:var X))
(defn rv-stdev
"Returns the standard deviation of a FuzzyNumber."
[X]
(Math/sqrt (:var X)))
(defn rv-sum
"Returns the sum of a sequence of FuzzyNumbers using _+_."
[Xs]
(cond (empty? Xs)
_0_
(== (count Xs) 1)
(first Xs)
(<= (count Xs) 20)
(reduce _+_ Xs)
:otherwise
(recur (pmap rv-sum
(my-partition-all 20 Xs)))))
(defn rv-extensive-sampler
"Returns the extensive weighted sum of a coverage (i.e. a sequence
of pairs of [value fraction-covered])."
[coverage]
(rv-sum (map (fn [[val frac]] (_* val frac)) coverage)))
(defn rv-intensive-sampler
"Returns the intensive weighted sum of a coverage (i.e. a sequence
of pairs of [value fraction-covered])."
[coverage]
(let [frac-sum (reduce + (map second coverage))]
(rv-sum (map (fn [[val frac]] (_* val (/ frac frac-sum))) coverage))))
(defn rv-distribution-sampler
"Returns the distribution of the means of a coverage (i.e. a
sequence of pairs of [value fraction-covered])."
[coverage]
(let [frac-sum (reduce + (map second coverage))
states (map (comp rv-mean first) coverage)
probs (map #(/ (second %) frac-sum) coverage)]
(create-from-states states probs)))
(let [stored-val (atom nil)]
(defn marsaglia-normal
"Returns a value from X~N(0,1). Uses the Marsaglia polar
method. Memoizes extra computed values for quicker lookups on
even calls."
[]
(when-let [normal-val @stored-val]
(swap! stored-val (constantly nil))
normal-val)
(let [v1 (dec (* 2.0 (rand)))
v2 (dec (* 2.0 (rand)))
s (+ (* v1 v1) (* v2 v2))]
(if (and (not= s 0.0) (< s 1.0))
(let [theta (Math/sqrt (/ (* -2.0 (Math/log s)) s))]
(swap! stored-val (constantly (* v1 theta)))
(* v2 theta))
(recur)))))
(let [stored-val (atom nil)]
(defn box-muller-normal
"Returns a value from X~N(0,1). Uses the Box-Muller
transform. Memoizes extra computed values for quicker lookups on
even calls."
[]
(when-let [normal-val @stored-val]
(swap! stored-val (constantly nil))
normal-val)
adding delta=1e-6 to prevent computing log(0 ) below
u2 (rand)
r (Math/sqrt (* -2.0 (Math/log u1)))
theta (* 2.0 Math/PI u2)
n1 (* r (Math/cos theta))
n2 (* r (Math/sin theta))]
(swap! stored-val (constantly n1))
n2)))
(defn draw
"Extracts a deterministic value from a FuzzyNumber by modelling it
as a normal distribution."
[X]
(+ (* (marsaglia-normal) (Math/sqrt (:var X))) (:mean X)))
(defn draw-repeatedly
"Takes a FuzzyNumber X, and returns an infinite lazy sequence of
normally-distributed, pseudo-random numbers that match the
parameters of X, (or a finite sequence of length n, if an integer n
is provided)."
([{:keys [mean var]}]
(let [sigma (Math/sqrt var)]
(map #(+ (* sigma %) mean) (repeatedly marsaglia-normal))))
([n X]
(take n (draw-repeatedly X))))
| null | https://raw.githubusercontent.com/ariesteam/aries/b3fafd4640f4e7950fff3791bc4ea4c06ee4dcdf/plugins/org.integratedmodelling.aries.core/bindings/clojure/clj_misc/varprop.clj | clojure |
This file is part of clj-misc.
clj-misc is free software: you can redistribute it and/or modify
or (at your option) any later version.
clj-misc is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with clj-misc. If not, see </>.
-------------------------------------------------------------------
This namespace defines functions for creating, querying, and
manipulating fuzzy numbers, which are defined to be pairs of [mean
var].
scalars, and returns true if X_i < X_i+1 for all i in [1,n]. Uses
reflection."
scalars, and returns true if X_i > X_i+1 for all i in [1,n]. Uses
reflection."
"Transforms f into its fuzzy arithmetic equivalent, fuzzy-f, and
[f & Xs]
`(~(fuzzify-fn f) ~@Xs)) | Copyright 2010
it under the terms of the GNU General Public License as published
by the Free Software Foundation , either version 3 of the License ,
You should have received a copy of the GNU General Public License
(ns clj-misc.varprop
(:use [clj-misc.utils :only [replace-all my-partition-all]])
(:import (clojure.lang PersistentStructMap)))
(defstruct FuzzyNumber :mean :var)
(defn fuzzy-number
"Constructs a FuzzyNumber."
[mean var]
(struct FuzzyNumber mean var))
(defn create-from-states
"Constructs a FuzzyNumber from n states and n probs, corresponding
to a finite discrete distribution."
[states probs]
(let [mean (reduce + (map * states probs))
var (reduce + (map (fn [x p] (* (Math/pow (- x mean) 2) p)) states probs))]
(fuzzy-number mean var)))
(defn create-from-ranges
"Constructs a FuzzyNumber from n bounds and n-1 probs corresponding
to a piecewise continuous uniform distribution with
discontinuities (i.e. jumps) at the bounds. prob i represents the
probability of being between bound i and bound i+1."
[bounds probs]
(let [midpoints (map (fn [next prev] (/ (+ next prev) 2.0)) (rest bounds) bounds)
mean (reduce + (map * midpoints probs))
second-moment (* 1/3 (reduce + (map (fn [p1 p2 bp] (* (Math/pow bp 3) (- p1 p2)))
(cons 0 probs)
(concat probs [0])
bounds)))
var (- second-moment (* mean mean))]
(fuzzy-number mean var)))
(def #^{:doc "A FuzzyNumber with mean and variance of 0."} _0_ (fuzzy-number 0.0 0.0))
(defn _+_
"Returns the sum of two or more FuzzyNumbers."
([X Y]
(fuzzy-number (+ (:mean X) (:mean Y)) (+ (:var X) (:var Y))))
([X Y & more]
(reduce _+_ (_+_ X Y) more)))
(defn _-_
"Returns the difference of two or more FuzzyNumbers."
([X Y]
(fuzzy-number (- (:mean X) (:mean Y)) (+ (:var X) (:var Y))))
([X Y & more]
(reduce _-_ (_-_ X Y) more)))
(defn _*_
"Returns the product of two or more FuzzyNumbers."
([{mx :mean, vx :var} {my :mean, vy :var}]
(fuzzy-number (* mx my) (+ (* vx vy) (* mx mx vy) (* my my vx))))
([X Y & more]
(reduce _*_ (_*_ X Y) more)))
(declare d_)
(defn _d_
"Returns the quotient of two or more FuzzyNumbers."
([X Y]
(_*_ X (d_ 1 Y)))
([X Y & more]
(reduce _d_ (_d_ X Y) more)))
(defn _<_
"Compares two or more FuzzyNumbers and returns true if P(X_i < X_i+1) > 0.5 for all i in [1,n]."
([X Y]
(< (:mean X) (:mean Y)))
([X Y & more]
(every? (fn [[X Y]] (_<_ X Y)) (partition 2 1 (list* X Y more)))))
(defn _>_
"Compares two or more FuzzyNumbers and returns true if P(X_i > X_i+1) > 0.5 for all i in [1,n]."
([X Y]
(> (:mean X) (:mean Y)))
([X Y & more]
(every? (fn [[X Y]] (_>_ X Y)) (partition 2 1 (list* X Y more)))))
(defn _min_
"Returns the smallest of two or more FuzzyNumbers using _<_."
([X Y]
(if (_<_ X Y) X Y))
([X Y & more]
(reduce _min_ (_min_ X Y) more)))
(defn _max_
"Returns the greatest of two or more FuzzyNumbers using _>_."
([X Y]
(if (_>_ X Y) X Y))
([X Y & more]
(reduce _max_ (_max_ X Y) more)))
(defn _+
"Returns the sum of a FuzzyNumber and one or more scalars."
([X y]
(fuzzy-number (+ (:mean X) y) (:var X)))
([X y & more]
(reduce _+ (_+ X y) more)))
(defn _-
"Returns the difference of a FuzzyNumber and one or more scalars."
([X y]
(fuzzy-number (- (:mean X) y) (:var X)))
([X y & more]
(reduce _- (_- X y) more)))
(defn _*
"Returns the product of a FuzzyNumber and one or more scalars."
([X y]
(fuzzy-number (* (:mean X) y) (* (:var X) y y)))
([X y & more]
(reduce _* (_* X y) more)))
(defn _d
"Returns the quotient of a FuzzyNumber and one or more scalars."
([X y]
(fuzzy-number (/ (:mean X) y) (/ (:var X) y y)))
([X y & more]
(reduce _d (_d X y) more)))
(defn _<
"Compares a FuzzyNumber and one or more scalars and returns true if
P(X < y_1) > 0.5 and all ys are in monotonically increasing order."
([X y]
(< (:mean X) y))
([X y & more]
(and (_< X y)
(apply < (cons y more)))))
(defn _>
"Compares a FuzzyNumber and one or more scalars and returns true if
P(X > y_1) > 0.5 and all ys are in monotonically decreasing order."
([X y]
(> (:mean X) y))
([X y & more]
(and (_> X y)
(apply > (cons y more)))))
(defn _min
"Returns the smallest of a FuzzyNumber and one or more scalars using _<."
([X y]
(if (_< X y) X y))
([X y & more]
(_min X (reduce min y more))))
(defn _max
"Returns the greatest of a FuzzyNumber and one or more scalars using _>."
([X y]
(if (_> X y) X y))
([X y & more]
(_max X (reduce max y more))))
(defn +_
"Returns the sum of a scalar and one or more FuzzyNumbers."
([x Y]
(fuzzy-number (+ x (:mean Y)) (:var Y)))
([x Y & more]
(reduce _+_ (+_ x Y) more)))
(defn -_
"Returns the difference of a scalar and one or more FuzzyNumbers."
([x Y]
(fuzzy-number (- x (:mean Y)) (:var Y)))
([x Y & more]
(reduce _-_ (-_ x Y) more)))
(defn *_
"Returns the product of a scalar and one or more FuzzyNumbers."
([x Y]
(fuzzy-number (* x (:mean Y)) (* x x (:var Y))))
([x Y & more]
(reduce _*_ (*_ x Y) more)))
(defn d_
"Returns the quotient of a scalar and one or more FuzzyNumbers."
([x {:keys [mean var]}]
(fuzzy-number (/ x mean) (/ (* x x var) (Math/pow mean 4))))
([x Y & more]
(reduce _d_ (d_ x Y) more)))
(defn <_
"Compares a scalar and one or more FuzzyNumbers and returns true if
P(Y > x) > 0.5 and all Ys are in monotonically increasing order by
_<_."
([x Y]
(< x (:mean Y)))
([x Y & more]
(and (<_ x Y)
(apply _<_ (cons Y more)))))
(defn >_
"Compares a scalar and one or more FuzzyNumbers and returns true if
P(Y < x) > 0.5 and all Ys are in monotonically decreasing order by
_>_."
([x Y]
(> x (:mean Y)))
([x Y & more]
(and (>_ x Y)
(apply _>_ (cons Y more)))))
(defn min_
"Returns the smallest of a scalar and one or more FuzzyNumbers using <_."
([x Y]
(if (<_ x Y) x Y))
([x Y & more]
(min_ x (reduce _min_ Y more))))
(defn max_
"Returns the greatest of a scalar and one or more FuzzyNumbers using >_."
([x Y]
(if (>_ x Y) x Y))
([x Y & more]
(max_ x (reduce _max_ Y more))))
(defmulti ?+?
" Returns the sum of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?+? [PersistentStructMap PersistentStructMap] [X Y & _] (_+_ X Y))
(defmethod ?+? [PersistentStructMap Number] [X Y & _] (_+ X Y))
(defmethod ?+? [Number PersistentStructMap] [X Y & _] ( +_ X Y))
(defmethod ?+? [Number Number] [X Y & _] ( + X Y))
(defmethod ?+? :more [X Y & more] (reduce ?+? (?+? X Y) more))
(defmulti ?-?
" Returns the difference of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?-? [PersistentStructMap PersistentStructMap] [X Y & _] (_-_ X Y))
(defmethod ?-? [PersistentStructMap Number] [X Y & _] (_- X Y))
(defmethod ?-? [Number PersistentStructMap] [X Y & _] ( -_ X Y))
(defmethod ?-? [Number Number] [X Y & _] ( - X Y))
(defmethod ?-? :more [X Y & more] (reduce ?-? (?-? X Y) more))
(defmulti ?*?
" Returns the product of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?*? [PersistentStructMap PersistentStructMap] [X Y & _] (_*_ X Y))
(defmethod ?*? [PersistentStructMap Number] [X Y & _] (_* X Y))
(defmethod ?*? [Number PersistentStructMap] [X Y & _] ( *_ X Y))
(defmethod ?*? [Number Number] [X Y & _] ( * X Y))
(defmethod ?*? :more [X Y & more] (reduce ?*? (?*? X Y) more))
(defmulti ?d?
" Returns the quotient of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?d? [PersistentStructMap PersistentStructMap] [X Y & _] (_d_ X Y))
(defmethod ?d? [PersistentStructMap Number] [X Y & _] (_d X Y))
(defmethod ?d? [Number PersistentStructMap] [X Y & _] ( d_ X Y))
(defmethod ?d? [Number Number] [X Y & _] ( / X Y))
(defmethod ?d? :more [X Y & more] (reduce ?d? (?d? X Y) more))
(defmulti ?<?
" Compares two or more values , which may be FuzzyNumbers or
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?<? [PersistentStructMap PersistentStructMap] [X Y & _] (_<_ X Y))
(defmethod ?<? [PersistentStructMap Number] [X Y & _] (_< X Y))
(defmethod ?<? [Number PersistentStructMap] [X Y & _] ( <_ X Y))
(defmethod ?<? [Number Number] [X Y & _] ( < X Y))
(defmethod ?<? :more [X Y & more] (every? (fn [[X Y]] (?<? X Y)) (partition 2 1 (list* X Y more))))
(defmulti ?>?
" Compares two or more values , which may be FuzzyNumbers or
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?>? [PersistentStructMap PersistentStructMap] [X Y & _] (_>_ X Y))
(defmethod ?>? [PersistentStructMap Number] [X Y & _] (_> X Y))
(defmethod ?>? [Number PersistentStructMap] [X Y & _] ( >_ X Y))
(defmethod ?>? [Number Number] [X Y & _] ( > X Y))
(defmethod ?>? :more [X Y & more] (every? (fn [[X Y]] (?>? X Y)) (partition 2 1 (list* X Y more))))
(defmulti ?min?
" Returns the smallest of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?min? [PersistentStructMap PersistentStructMap] [X Y & _] (_min_ X Y))
(defmethod ?min? [PersistentStructMap Number] [X Y & _] (_min X Y))
(defmethod ?min? [Number PersistentStructMap] [X Y & _] ( min_ X Y))
(defmethod ?min? [Number Number] [X Y & _] ( min X Y))
(defmethod ?min? :more [X Y & more] (reduce ?min? (?min? X Y) more))
(defmulti ?max?
" Returns the largest of two or more values , which may be FuzzyNumbers or scalars . Uses reflection . "
(fn [X Y & more] (if (seq more)
:more
[(class X) (class Y)])))
(defmethod ?max? [PersistentStructMap PersistentStructMap] [X Y & _] (_max_ X Y))
(defmethod ?max? [PersistentStructMap Number] [X Y & _] (_max X Y))
(defmethod ?max? [Number PersistentStructMap] [X Y & _] ( max_ X Y))
(defmethod ?max? [Number Number] [X Y & _] ( max X Y))
(defmethod ?max? :more [X Y & more] (reduce ?max? (?max? X Y) more))
(def fuzzy-arithmetic-mapping
'{+ clj-misc.varprop/?+?
- clj-misc.varprop/?-?
* clj-misc.varprop/?*?
/ clj-misc.varprop/?d?
< clj-misc.varprop/?<?
> clj-misc.varprop/?>?
min clj-misc.varprop/?min?
max clj-misc.varprop/?max?})
(defn ensure-fuzzy
"If value is a FuzzyNumber, return it. Otherwise, make it into a
FuzzyNumber."
[value]
(if (number? value)
(fuzzy-number value 0.0)
value))
(defn fuzzify-fn
"Transforms f into its fuzzy arithmetic equivalent, using the
mappings defined in fuzzy-arithmetic-mapping."
[f]
(if-let [new-f (fuzzy-arithmetic-mapping f)]
new-f
(if (list? f)
(let [[lambda args & body] f]
(if (and (= lambda 'fn) (vector? args))
`(comp ensure-fuzzy (~lambda ~args ~@(replace-all fuzzy-arithmetic-mapping body)))
f))
f)))
(
calls ( apply fuzzy - f Xs ) . Uses reflection on the types of Xs as
well as any numeric literal values used in f. "
(defn rv-fn
"Transforms f into its fuzzy arithmetic equivalent, fuzzy-f, and
calls (apply fuzzy-f Xs). Uses reflection on the types of Xs as
well as any numeric literal values used in f."
[f & Xs]
(apply (eval (fuzzify-fn f)) Xs))
(defn rv-mean
"Returns the mean of a FuzzyNumber."
[X]
(:mean X))
(defn rv-variance
"Returns the variance of a FuzzyNumber."
[X]
(:var X))
(defn rv-stdev
"Returns the standard deviation of a FuzzyNumber."
[X]
(Math/sqrt (:var X)))
(defn rv-sum
"Returns the sum of a sequence of FuzzyNumbers using _+_."
[Xs]
(cond (empty? Xs)
_0_
(== (count Xs) 1)
(first Xs)
(<= (count Xs) 20)
(reduce _+_ Xs)
:otherwise
(recur (pmap rv-sum
(my-partition-all 20 Xs)))))
(defn rv-extensive-sampler
"Returns the extensive weighted sum of a coverage (i.e. a sequence
of pairs of [value fraction-covered])."
[coverage]
(rv-sum (map (fn [[val frac]] (_* val frac)) coverage)))
(defn rv-intensive-sampler
"Returns the intensive weighted sum of a coverage (i.e. a sequence
of pairs of [value fraction-covered])."
[coverage]
(let [frac-sum (reduce + (map second coverage))]
(rv-sum (map (fn [[val frac]] (_* val (/ frac frac-sum))) coverage))))
(defn rv-distribution-sampler
"Returns the distribution of the means of a coverage (i.e. a
sequence of pairs of [value fraction-covered])."
[coverage]
(let [frac-sum (reduce + (map second coverage))
states (map (comp rv-mean first) coverage)
probs (map #(/ (second %) frac-sum) coverage)]
(create-from-states states probs)))
(let [stored-val (atom nil)]
(defn marsaglia-normal
"Returns a value from X~N(0,1). Uses the Marsaglia polar
method. Memoizes extra computed values for quicker lookups on
even calls."
[]
(when-let [normal-val @stored-val]
(swap! stored-val (constantly nil))
normal-val)
(let [v1 (dec (* 2.0 (rand)))
v2 (dec (* 2.0 (rand)))
s (+ (* v1 v1) (* v2 v2))]
(if (and (not= s 0.0) (< s 1.0))
(let [theta (Math/sqrt (/ (* -2.0 (Math/log s)) s))]
(swap! stored-val (constantly (* v1 theta)))
(* v2 theta))
(recur)))))
(let [stored-val (atom nil)]
(defn box-muller-normal
"Returns a value from X~N(0,1). Uses the Box-Muller
transform. Memoizes extra computed values for quicker lookups on
even calls."
[]
(when-let [normal-val @stored-val]
(swap! stored-val (constantly nil))
normal-val)
adding delta=1e-6 to prevent computing log(0 ) below
u2 (rand)
r (Math/sqrt (* -2.0 (Math/log u1)))
theta (* 2.0 Math/PI u2)
n1 (* r (Math/cos theta))
n2 (* r (Math/sin theta))]
(swap! stored-val (constantly n1))
n2)))
(defn draw
"Extracts a deterministic value from a FuzzyNumber by modelling it
as a normal distribution."
[X]
(+ (* (marsaglia-normal) (Math/sqrt (:var X))) (:mean X)))
(defn draw-repeatedly
"Takes a FuzzyNumber X, and returns an infinite lazy sequence of
normally-distributed, pseudo-random numbers that match the
parameters of X, (or a finite sequence of length n, if an integer n
is provided)."
([{:keys [mean var]}]
(let [sigma (Math/sqrt var)]
(map #(+ (* sigma %) mean) (repeatedly marsaglia-normal))))
([n X]
(take n (draw-repeatedly X))))
|
2c9e3b56e9a12553b6111ef9379b5bba9b3e96ff16d2bbd728afe0f265acf61a | deadtrickster/prometheus.cl | summary.lisp | (in-package :prometheus.test)
(plan 2)
(subtest "Simple Summary"
(subtest "Errors & Validations"
(is-error-report (prom:summary.observe nil "qwe") prom:invalid-value-error "Value \"qwe\" is invalid. Reason: value is not a number")
(is-error-report (prom:make-summary :name "qwe" :help "" :value 12 :labels '("qwe") :registry nil) prom:invalid-value-error "Value 12 is invalid. Reason: can only specify at most one of value and labels"))
(subtest "OBSERVE"
(let* ((s (prom:make-simple-summary :name "qwe" :help "" :value 12 :count 2 :registry nil))
(nlm (prom:get-metric s nil)))
(is (prom:summary-sum nlm) 12)
(is (prom:summary-count nlm) 2)
(prom:summary.observe nlm 2)
(is (prom:summary-sum nlm) 14)
(is (prom:summary-count nlm) 3)))
(subtest "TIME"
(let* ((s (prom:make-simple-summary :name "qwe" :help "" :registry nil))
(nlm (prom:get-metric s nil)))
(prom:summary.time nlm (sleep 0.5))
(ok (and (>= (prom:summary-sum nlm) 500)
(< (prom:summary-sum nlm) 510)))
(is (prom:summary-count nlm) 1)))
(subtest "REGISTRY"
(with-fresh-registry
(let ((s (prom:make-simple-summary :name "qwe" :help "" :value 12)))
(is (prom:registeredp s prom:*default-registry*) s)))))
(subtest "Summary"
(subtest "Errors & Validations"
(is-error-report (prom:summary.observe nil "qwe") prom:invalid-value-error "Value \"qwe\" is invalid. Reason: value is not a number")
(is-error-report (prom:make-summary :name "qwe" :help "" :value 12 :labels '("qwe") :registry nil) prom:invalid-value-error "Value 12 is invalid. Reason: can only specify at most one of value and labels"))
(subtest "OBSERVE"
(let* ((s (prom:make-summary :name "qwe" :help "" :value 3 :registry nil))
(nlm (prom:get-metric s nil)))
(is (prom:summary-sum nlm) 3)
(is (prom:summary-count nlm) 1)
(prom:summary.observe nlm 5.2)
(prom:summary.observe nlm 13)
(prom:summary.observe nlm 4)
(is (prom:summary-sum nlm) 25.2)
(is (prom:summary-count nlm) 4)
(is (prom:summary-quantiles nlm) '((0.5d0 . 4) (0.9d0 . 5.2) (0.99d0 . 5.2)))))
(subtest "TIME"
(let* ((s (prom:make-summary :name "qwe" :help "" :registry nil))
(nlm (prom:get-metric s nil)))
(prom:summary.time nlm (sleep 0.5))
(ok (and (>= (prom:summary-sum nlm) 500)
(< (prom:summary-sum nlm) 510)))
(is (prom:summary-count nlm) 1)))
(subtest "REGISTRY"
(with-fresh-registry
(let ((s (prom:make-simple-summary :name "qwe" :help "" :value 12)))
(is (prom:registeredp s prom:*default-registry*) s)))))
(finalize)
| null | https://raw.githubusercontent.com/deadtrickster/prometheus.cl/60572b793135e8ab5a857d47cc1a5fe0af3a2d53/t/prometheus/summary.lisp | lisp | (in-package :prometheus.test)
(plan 2)
(subtest "Simple Summary"
(subtest "Errors & Validations"
(is-error-report (prom:summary.observe nil "qwe") prom:invalid-value-error "Value \"qwe\" is invalid. Reason: value is not a number")
(is-error-report (prom:make-summary :name "qwe" :help "" :value 12 :labels '("qwe") :registry nil) prom:invalid-value-error "Value 12 is invalid. Reason: can only specify at most one of value and labels"))
(subtest "OBSERVE"
(let* ((s (prom:make-simple-summary :name "qwe" :help "" :value 12 :count 2 :registry nil))
(nlm (prom:get-metric s nil)))
(is (prom:summary-sum nlm) 12)
(is (prom:summary-count nlm) 2)
(prom:summary.observe nlm 2)
(is (prom:summary-sum nlm) 14)
(is (prom:summary-count nlm) 3)))
(subtest "TIME"
(let* ((s (prom:make-simple-summary :name "qwe" :help "" :registry nil))
(nlm (prom:get-metric s nil)))
(prom:summary.time nlm (sleep 0.5))
(ok (and (>= (prom:summary-sum nlm) 500)
(< (prom:summary-sum nlm) 510)))
(is (prom:summary-count nlm) 1)))
(subtest "REGISTRY"
(with-fresh-registry
(let ((s (prom:make-simple-summary :name "qwe" :help "" :value 12)))
(is (prom:registeredp s prom:*default-registry*) s)))))
(subtest "Summary"
(subtest "Errors & Validations"
(is-error-report (prom:summary.observe nil "qwe") prom:invalid-value-error "Value \"qwe\" is invalid. Reason: value is not a number")
(is-error-report (prom:make-summary :name "qwe" :help "" :value 12 :labels '("qwe") :registry nil) prom:invalid-value-error "Value 12 is invalid. Reason: can only specify at most one of value and labels"))
(subtest "OBSERVE"
(let* ((s (prom:make-summary :name "qwe" :help "" :value 3 :registry nil))
(nlm (prom:get-metric s nil)))
(is (prom:summary-sum nlm) 3)
(is (prom:summary-count nlm) 1)
(prom:summary.observe nlm 5.2)
(prom:summary.observe nlm 13)
(prom:summary.observe nlm 4)
(is (prom:summary-sum nlm) 25.2)
(is (prom:summary-count nlm) 4)
(is (prom:summary-quantiles nlm) '((0.5d0 . 4) (0.9d0 . 5.2) (0.99d0 . 5.2)))))
(subtest "TIME"
(let* ((s (prom:make-summary :name "qwe" :help "" :registry nil))
(nlm (prom:get-metric s nil)))
(prom:summary.time nlm (sleep 0.5))
(ok (and (>= (prom:summary-sum nlm) 500)
(< (prom:summary-sum nlm) 510)))
(is (prom:summary-count nlm) 1)))
(subtest "REGISTRY"
(with-fresh-registry
(let ((s (prom:make-simple-summary :name "qwe" :help "" :value 12)))
(is (prom:registeredp s prom:*default-registry*) s)))))
(finalize)
|
|
f00a0694af32c9277305a78d377471948aacd1e678df0e3c28b7b1691f017542 | tfausak/blunt | Main.hs | module Main (module Blunt) where
import Blunt (main)
| null | https://raw.githubusercontent.com/tfausak/blunt/a44af0e06138bae94d7fd51330eb5602e7f3d5a3/executable/Main.hs | haskell | module Main (module Blunt) where
import Blunt (main)
|
|
43f1322fefc8dc6aa8519ef90f7dba8ea2f8ffa0019c4abf84eae569754bed19 | facebook/pyre-check | log.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(* TODO(T132410158) Add a module-level doc comment. *)
open Core
type section =
[ `Check
| `Debug
| `Dependencies
| `DependencyGraph
| `Dump
| `Environment
| `Error
| `Event
| `Fixpoint
| `Infer
| `Info
| `Interprocedural
| `Memory
| `Performance
| `Progress
| `Protocols
| `Server
| `Taint
| `Warning
]
let section_to_string = function
| `Check -> "Check"
| `Debug -> "Debug"
| `Dependencies -> "Dependencies"
| `DependencyGraph -> "DependencyGraph"
| `Dump -> "Dump"
| `Environment -> "Environment"
| `Error -> "Error"
| `Event -> "Event"
| `Fixpoint -> "Fixpoint"
| `Info -> "Info"
| `Infer -> "Infer"
| `Interprocedural -> "Interprocedural"
| `Memory -> "Memory"
| `Performance -> "Performance"
| `Progress -> "Progress"
| `Protocols -> "Protocols"
| `Server -> "Server"
| `Taint -> "Taint"
| `Warning -> "Warning"
module GlobalState = struct
let enabled =
String.Hash_set.of_list
["Dump"; "Error"; "Info"; "Memory"; "Progress"; "Performance"; "Warning"]
let initialize ~debug ~sections =
if debug then
Hash_set.add enabled "Debug";
let handle_section section =
let normalize section = String.lowercase section |> String.capitalize in
match String.chop_prefix ~prefix:"-" section with
| Some section -> normalize section |> Hash_set.remove enabled
| None -> normalize section |> Hash_set.add enabled
in
List.iter ~f:handle_section sections
let initialize_for_tests () =
Hash_set.clear enabled;
Hash_set.add enabled "Dump"
type t = string list
let get () = Hash_set.to_list enabled
let restore saved_state =
Hash_set.clear enabled;
List.iter saved_state ~f:(Hash_set.add enabled)
end
let is_enabled section = Hash_set.mem GlobalState.enabled (section_to_string section)
let time_zone = ref None
A safer version of Time_unix.Zone.local , which defaults to UTC instead of throwing an exception
if we can not figure out local time . See for one
example when this can happen
if we cannot figure out local time. See for one
example when this can happen *)
let get_time_zone () =
match !time_zone with
| Some zone -> zone
| None ->
let zone =
try force Time_unix.Zone.local with
| _ -> Time_unix.Zone.utc
in
time_zone := Some zone;
zone
let log ~section format =
let section = section_to_string section in
if Hash_set.mem GlobalState.enabled section then
let zone = get_time_zone () in
Format.fprintf
Format.err_formatter
("%s %s " ^^ format ^^ "\n%!")
(Time_unix.format ~zone (Time_unix.now ()) "%Y-%m-%d %H:%M:%S")
(String.uppercase section)
else
Format.ifprintf Format.err_formatter format
let debug format = log ~section:`Debug format
let dump format = log ~section:`Dump format
let info format = log ~section:`Info format
let error format = log ~section:`Error format
let warning format = log ~section:`Warning format
let print format = Printf.printf format
let log_unix_error ?(section = `Error) (error_kind, name, parameters) =
log ~section "Unix error %s: %s(%s)" (Core_unix.error_message error_kind) name parameters
let log_exception message exception_to_log backtrace =
error
"%s\nException: %s\nBacktrace:\n%s"
message
(Exn.to_string exception_to_log)
(Caml.Printexc.raw_backtrace_to_string backtrace)
module Color = struct
let cyan string = Format.asprintf "\027[36m%s\027[0m" string
let red string = Format.asprintf "\027[31m%s\027[0m" string
let yellow string = Format.asprintf "\027[33m%s\027[0m" string
end
let truncate ~size message =
let drop_size = String.length message - Int.max 0 size in
if drop_size <= 0 then
message
else
let truncated = String.drop_suffix message drop_size in
Format.sprintf "%s..(truncated %d bytes)" truncated drop_size
| null | https://raw.githubusercontent.com/facebook/pyre-check/c03d27aceea9f2ed911a18a81615721614c32b31/source/log.ml | ocaml | TODO(T132410158) Add a module-level doc comment. |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Core
type section =
[ `Check
| `Debug
| `Dependencies
| `DependencyGraph
| `Dump
| `Environment
| `Error
| `Event
| `Fixpoint
| `Infer
| `Info
| `Interprocedural
| `Memory
| `Performance
| `Progress
| `Protocols
| `Server
| `Taint
| `Warning
]
let section_to_string = function
| `Check -> "Check"
| `Debug -> "Debug"
| `Dependencies -> "Dependencies"
| `DependencyGraph -> "DependencyGraph"
| `Dump -> "Dump"
| `Environment -> "Environment"
| `Error -> "Error"
| `Event -> "Event"
| `Fixpoint -> "Fixpoint"
| `Info -> "Info"
| `Infer -> "Infer"
| `Interprocedural -> "Interprocedural"
| `Memory -> "Memory"
| `Performance -> "Performance"
| `Progress -> "Progress"
| `Protocols -> "Protocols"
| `Server -> "Server"
| `Taint -> "Taint"
| `Warning -> "Warning"
module GlobalState = struct
let enabled =
String.Hash_set.of_list
["Dump"; "Error"; "Info"; "Memory"; "Progress"; "Performance"; "Warning"]
let initialize ~debug ~sections =
if debug then
Hash_set.add enabled "Debug";
let handle_section section =
let normalize section = String.lowercase section |> String.capitalize in
match String.chop_prefix ~prefix:"-" section with
| Some section -> normalize section |> Hash_set.remove enabled
| None -> normalize section |> Hash_set.add enabled
in
List.iter ~f:handle_section sections
let initialize_for_tests () =
Hash_set.clear enabled;
Hash_set.add enabled "Dump"
type t = string list
let get () = Hash_set.to_list enabled
let restore saved_state =
Hash_set.clear enabled;
List.iter saved_state ~f:(Hash_set.add enabled)
end
let is_enabled section = Hash_set.mem GlobalState.enabled (section_to_string section)
let time_zone = ref None
A safer version of Time_unix.Zone.local , which defaults to UTC instead of throwing an exception
if we can not figure out local time . See for one
example when this can happen
if we cannot figure out local time. See for one
example when this can happen *)
let get_time_zone () =
match !time_zone with
| Some zone -> zone
| None ->
let zone =
try force Time_unix.Zone.local with
| _ -> Time_unix.Zone.utc
in
time_zone := Some zone;
zone
let log ~section format =
let section = section_to_string section in
if Hash_set.mem GlobalState.enabled section then
let zone = get_time_zone () in
Format.fprintf
Format.err_formatter
("%s %s " ^^ format ^^ "\n%!")
(Time_unix.format ~zone (Time_unix.now ()) "%Y-%m-%d %H:%M:%S")
(String.uppercase section)
else
Format.ifprintf Format.err_formatter format
let debug format = log ~section:`Debug format
let dump format = log ~section:`Dump format
let info format = log ~section:`Info format
let error format = log ~section:`Error format
let warning format = log ~section:`Warning format
let print format = Printf.printf format
let log_unix_error ?(section = `Error) (error_kind, name, parameters) =
log ~section "Unix error %s: %s(%s)" (Core_unix.error_message error_kind) name parameters
let log_exception message exception_to_log backtrace =
error
"%s\nException: %s\nBacktrace:\n%s"
message
(Exn.to_string exception_to_log)
(Caml.Printexc.raw_backtrace_to_string backtrace)
module Color = struct
let cyan string = Format.asprintf "\027[36m%s\027[0m" string
let red string = Format.asprintf "\027[31m%s\027[0m" string
let yellow string = Format.asprintf "\027[33m%s\027[0m" string
end
let truncate ~size message =
let drop_size = String.length message - Int.max 0 size in
if drop_size <= 0 then
message
else
let truncated = String.drop_suffix message drop_size in
Format.sprintf "%s..(truncated %d bytes)" truncated drop_size
|
3086de5992f28b19f06e44a4126b3df1d0b3ade222f90859ea4cf409ca08c3d1 | apache/couchdb-couch | couch_passwords.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_passwords).
-export([simple/2, pbkdf2/3, pbkdf2/4, verify/2]).
-export([hash_admin_password/1, get_unhashed_admins/0]).
-include_lib("couch/include/couch_db.hrl").
-define(MAX_DERIVED_KEY_LENGTH, (1 bsl 32 - 1)).
-define(SHA1_OUTPUT_LENGTH, 20).
%% legacy scheme, not used for new passwords.
-spec simple(binary(), binary()) -> binary().
simple(Password, Salt) when is_binary(Password), is_binary(Salt) ->
?l2b(couch_util:to_hex(couch_crypto:hash(sha, <<Password/binary, Salt/binary>>))).
%% CouchDB utility functions
-spec hash_admin_password(binary() | list()) -> binary().
hash_admin_password(ClearPassword) when is_list(ClearPassword) ->
hash_admin_password(?l2b(ClearPassword));
hash_admin_password(ClearPassword) when is_binary(ClearPassword) ->
%% Support both schemes to smooth migration from legacy scheme
Scheme = config:get("couch_httpd_auth", "password_scheme", "pbkdf2"),
hash_admin_password(Scheme, ClearPassword).
hash_admin_password("simple", ClearPassword) -> % deprecated
Salt = couch_uuids:random(),
Hash = couch_crypto:hash(sha, <<ClearPassword/binary, Salt/binary>>),
?l2b("-hashed-" ++ couch_util:to_hex(Hash) ++ "," ++ ?b2l(Salt));
hash_admin_password("pbkdf2", ClearPassword) ->
Iterations = config:get("couch_httpd_auth", "iterations", "10000"),
Salt = couch_uuids:random(),
DerivedKey = couch_passwords:pbkdf2(couch_util:to_binary(ClearPassword),
Salt ,list_to_integer(Iterations)),
?l2b("-pbkdf2-" ++ ?b2l(DerivedKey) ++ ","
++ ?b2l(Salt) ++ ","
++ Iterations).
-spec get_unhashed_admins() -> list().
get_unhashed_admins() ->
lists:filter(
fun({_User, "-hashed-" ++ _}) ->
false; % already hashed
({_User, "-pbkdf2-" ++ _}) ->
false; % already hashed
({_User, _ClearPassword}) ->
true
end,
config:get("admins")).
%% Current scheme, much stronger.
-spec pbkdf2(binary(), binary(), integer()) -> binary().
pbkdf2(Password, Salt, Iterations) when is_binary(Password),
is_binary(Salt),
is_integer(Iterations),
Iterations > 0 ->
{ok, Result} = pbkdf2(Password, Salt, Iterations, ?SHA1_OUTPUT_LENGTH),
Result.
-spec pbkdf2(binary(), binary(), integer(), integer())
-> {ok, binary()} | {error, derived_key_too_long}.
pbkdf2(_Password, _Salt, _Iterations, DerivedLength)
when DerivedLength > ?MAX_DERIVED_KEY_LENGTH ->
{error, derived_key_too_long};
pbkdf2(Password, Salt, Iterations, DerivedLength) when is_binary(Password),
is_binary(Salt),
is_integer(Iterations),
Iterations > 0,
is_integer(DerivedLength) ->
L = ceiling(DerivedLength / ?SHA1_OUTPUT_LENGTH),
<<Bin:DerivedLength/binary,_/binary>> =
iolist_to_binary(pbkdf2(Password, Salt, Iterations, L, 1, [])),
{ok, ?l2b(couch_util:to_hex(Bin))}.
-spec pbkdf2(binary(), binary(), integer(), integer(), integer(), iolist())
-> iolist().
pbkdf2(_Password, _Salt, _Iterations, BlockCount, BlockIndex, Acc)
when BlockIndex > BlockCount ->
lists:reverse(Acc);
pbkdf2(Password, Salt, Iterations, BlockCount, BlockIndex, Acc) ->
Block = pbkdf2(Password, Salt, Iterations, BlockIndex, 1, <<>>, <<>>),
pbkdf2(Password, Salt, Iterations, BlockCount, BlockIndex + 1, [Block|Acc]).
-spec pbkdf2(binary(), binary(), integer(), integer(), integer(),
binary(), binary()) -> binary().
pbkdf2(_Password, _Salt, Iterations, _BlockIndex, Iteration, _Prev, Acc)
when Iteration > Iterations ->
Acc;
pbkdf2(Password, Salt, Iterations, BlockIndex, 1, _Prev, _Acc) ->
InitialBlock = couch_crypto:hmac(sha, Password,
<<Salt/binary,BlockIndex:32/integer>>),
pbkdf2(Password, Salt, Iterations, BlockIndex, 2,
InitialBlock, InitialBlock);
pbkdf2(Password, Salt, Iterations, BlockIndex, Iteration, Prev, Acc) ->
Next = couch_crypto:hmac(sha, Password, Prev),
pbkdf2(Password, Salt, Iterations, BlockIndex, Iteration + 1,
Next, crypto:exor(Next, Acc)).
verify two lists for equality without short - circuits to avoid timing attacks .
-spec verify(string(), string(), integer()) -> boolean().
verify([X|RestX], [Y|RestY], Result) ->
verify(RestX, RestY, (X bxor Y) bor Result);
verify([], [], Result) ->
Result == 0.
-spec verify(binary(), binary()) -> boolean();
(list(), list()) -> boolean().
verify(<<X/binary>>, <<Y/binary>>) ->
verify(?b2l(X), ?b2l(Y));
verify(X, Y) when is_list(X) and is_list(Y) ->
case length(X) == length(Y) of
true ->
verify(X, Y, 0);
false ->
false
end;
verify(_X, _Y) -> false.
-spec ceiling(number()) -> integer().
ceiling(X) ->
T = erlang:trunc(X),
case (X - T) of
Neg when Neg < 0 -> T;
Pos when Pos > 0 -> T + 1;
_ -> T
end.
| null | https://raw.githubusercontent.com/apache/couchdb-couch/21c8d37ac6ee1a7fed1de1f54f95a4d3cd9f5248/src/couch_passwords.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
legacy scheme, not used for new passwords.
CouchDB utility functions
Support both schemes to smooth migration from legacy scheme
deprecated
already hashed
already hashed
Current scheme, much stronger. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(couch_passwords).
-export([simple/2, pbkdf2/3, pbkdf2/4, verify/2]).
-export([hash_admin_password/1, get_unhashed_admins/0]).
-include_lib("couch/include/couch_db.hrl").
-define(MAX_DERIVED_KEY_LENGTH, (1 bsl 32 - 1)).
-define(SHA1_OUTPUT_LENGTH, 20).
-spec simple(binary(), binary()) -> binary().
simple(Password, Salt) when is_binary(Password), is_binary(Salt) ->
?l2b(couch_util:to_hex(couch_crypto:hash(sha, <<Password/binary, Salt/binary>>))).
-spec hash_admin_password(binary() | list()) -> binary().
hash_admin_password(ClearPassword) when is_list(ClearPassword) ->
hash_admin_password(?l2b(ClearPassword));
hash_admin_password(ClearPassword) when is_binary(ClearPassword) ->
Scheme = config:get("couch_httpd_auth", "password_scheme", "pbkdf2"),
hash_admin_password(Scheme, ClearPassword).
Salt = couch_uuids:random(),
Hash = couch_crypto:hash(sha, <<ClearPassword/binary, Salt/binary>>),
?l2b("-hashed-" ++ couch_util:to_hex(Hash) ++ "," ++ ?b2l(Salt));
hash_admin_password("pbkdf2", ClearPassword) ->
Iterations = config:get("couch_httpd_auth", "iterations", "10000"),
Salt = couch_uuids:random(),
DerivedKey = couch_passwords:pbkdf2(couch_util:to_binary(ClearPassword),
Salt ,list_to_integer(Iterations)),
?l2b("-pbkdf2-" ++ ?b2l(DerivedKey) ++ ","
++ ?b2l(Salt) ++ ","
++ Iterations).
-spec get_unhashed_admins() -> list().
get_unhashed_admins() ->
lists:filter(
fun({_User, "-hashed-" ++ _}) ->
({_User, "-pbkdf2-" ++ _}) ->
({_User, _ClearPassword}) ->
true
end,
config:get("admins")).
-spec pbkdf2(binary(), binary(), integer()) -> binary().
pbkdf2(Password, Salt, Iterations) when is_binary(Password),
is_binary(Salt),
is_integer(Iterations),
Iterations > 0 ->
{ok, Result} = pbkdf2(Password, Salt, Iterations, ?SHA1_OUTPUT_LENGTH),
Result.
-spec pbkdf2(binary(), binary(), integer(), integer())
-> {ok, binary()} | {error, derived_key_too_long}.
pbkdf2(_Password, _Salt, _Iterations, DerivedLength)
when DerivedLength > ?MAX_DERIVED_KEY_LENGTH ->
{error, derived_key_too_long};
pbkdf2(Password, Salt, Iterations, DerivedLength) when is_binary(Password),
is_binary(Salt),
is_integer(Iterations),
Iterations > 0,
is_integer(DerivedLength) ->
L = ceiling(DerivedLength / ?SHA1_OUTPUT_LENGTH),
<<Bin:DerivedLength/binary,_/binary>> =
iolist_to_binary(pbkdf2(Password, Salt, Iterations, L, 1, [])),
{ok, ?l2b(couch_util:to_hex(Bin))}.
-spec pbkdf2(binary(), binary(), integer(), integer(), integer(), iolist())
-> iolist().
pbkdf2(_Password, _Salt, _Iterations, BlockCount, BlockIndex, Acc)
when BlockIndex > BlockCount ->
lists:reverse(Acc);
pbkdf2(Password, Salt, Iterations, BlockCount, BlockIndex, Acc) ->
Block = pbkdf2(Password, Salt, Iterations, BlockIndex, 1, <<>>, <<>>),
pbkdf2(Password, Salt, Iterations, BlockCount, BlockIndex + 1, [Block|Acc]).
-spec pbkdf2(binary(), binary(), integer(), integer(), integer(),
binary(), binary()) -> binary().
pbkdf2(_Password, _Salt, Iterations, _BlockIndex, Iteration, _Prev, Acc)
when Iteration > Iterations ->
Acc;
pbkdf2(Password, Salt, Iterations, BlockIndex, 1, _Prev, _Acc) ->
InitialBlock = couch_crypto:hmac(sha, Password,
<<Salt/binary,BlockIndex:32/integer>>),
pbkdf2(Password, Salt, Iterations, BlockIndex, 2,
InitialBlock, InitialBlock);
pbkdf2(Password, Salt, Iterations, BlockIndex, Iteration, Prev, Acc) ->
Next = couch_crypto:hmac(sha, Password, Prev),
pbkdf2(Password, Salt, Iterations, BlockIndex, Iteration + 1,
Next, crypto:exor(Next, Acc)).
verify two lists for equality without short - circuits to avoid timing attacks .
-spec verify(string(), string(), integer()) -> boolean().
verify([X|RestX], [Y|RestY], Result) ->
verify(RestX, RestY, (X bxor Y) bor Result);
verify([], [], Result) ->
Result == 0.
-spec verify(binary(), binary()) -> boolean();
(list(), list()) -> boolean().
verify(<<X/binary>>, <<Y/binary>>) ->
verify(?b2l(X), ?b2l(Y));
verify(X, Y) when is_list(X) and is_list(Y) ->
case length(X) == length(Y) of
true ->
verify(X, Y, 0);
false ->
false
end;
verify(_X, _Y) -> false.
-spec ceiling(number()) -> integer().
ceiling(X) ->
T = erlang:trunc(X),
case (X - T) of
Neg when Neg < 0 -> T;
Pos when Pos > 0 -> T + 1;
_ -> T
end.
|
7263d780e3a662d449497bc4fa4a28764dbaca486fa736afe505a83a5dcae3ae | GaloisInc/pads-haskell | Pnm.hs | # LANGUAGE TypeSynonymInstances , TemplateHaskell , QuasiQuotes , MultiParamTypeClasses , FlexibleInstances , DeriveDataTypeable , NamedFieldPuns , ScopedTypeVariables #
module Pnm where
import qualified Data.Char as Char
import Language.Pads.Padsc
import Control.Monad
_ws = one_or_more Char.isSpace
where one_or_more = undefined
ws, wsnl, whitechar :: RE
ws = REd "[ \t\n\r]+" " " -- whitespace
wsnl = let REd wplus _ = ws in REd wplus "\n" -- whitespace output as \n
one white character
[pads|
data PGMx a = PGM "P5" ws Header whitechar (Pixmap a)
data Header = Header -- fields should be separated by whitespace
{ width :: Int
ws , height :: Int
wsnl , constrain denominator :: Int
where <| 0 <= denominator && denominator < 65536 |>
}
data Pixmap a (h::Header) = Rows [Row a h | wsnl] length <| height h |>
data Row a (h::Header) = Pixels [a h | ws] length <| width h |>
newtype Greypix (h::Header) =
G constrain g::Int16 where <| 0 <= g && g <= denominator h |>
data PGM = PGMx Int16 Greypix
|]
pgm file = do (rep, md) <- parseFile file
return rep | null | https://raw.githubusercontent.com/GaloisInc/pads-haskell/ffa01030cd1c1edf5f50df53f3783ff28bf43c7e/Examples/Pnm.hs | haskell | whitespace
whitespace output as \n
fields should be separated by whitespace | # LANGUAGE TypeSynonymInstances , TemplateHaskell , QuasiQuotes , MultiParamTypeClasses , FlexibleInstances , DeriveDataTypeable , NamedFieldPuns , ScopedTypeVariables #
module Pnm where
import qualified Data.Char as Char
import Language.Pads.Padsc
import Control.Monad
_ws = one_or_more Char.isSpace
where one_or_more = undefined
ws, wsnl, whitechar :: RE
one white character
[pads|
data PGMx a = PGM "P5" ws Header whitechar (Pixmap a)
{ width :: Int
ws , height :: Int
wsnl , constrain denominator :: Int
where <| 0 <= denominator && denominator < 65536 |>
}
data Pixmap a (h::Header) = Rows [Row a h | wsnl] length <| height h |>
data Row a (h::Header) = Pixels [a h | ws] length <| width h |>
newtype Greypix (h::Header) =
G constrain g::Int16 where <| 0 <= g && g <= denominator h |>
data PGM = PGMx Int16 Greypix
|]
pgm file = do (rep, md) <- parseFile file
return rep |
c28802519f3a1d62dc4bcfcab375d1dd815ebcaab1a34c5e2959fd05b755f929 | jobjo/popper | search.mli | type 'a result =
{ num_attempts : int
; num_explored : int
; node : 'a
}
module type Config = sig
type t
val max_tries : int
val compare : t -> t -> int
val keep : t -> t option
val modify : t -> t Random.t
end
module type S = sig
type t
val search : t -> t result Random.t
end
module Make (C : Config) : S with type t = C.t
| null | https://raw.githubusercontent.com/jobjo/popper/33da372946d1d842f75994e086fa81c8cf62986e/src/lib/search.mli | ocaml | type 'a result =
{ num_attempts : int
; num_explored : int
; node : 'a
}
module type Config = sig
type t
val max_tries : int
val compare : t -> t -> int
val keep : t -> t option
val modify : t -> t Random.t
end
module type S = sig
type t
val search : t -> t result Random.t
end
module Make (C : Config) : S with type t = C.t
|
|
ce0fd033b23920526ea44383ddb1706c765d0a9c8804425ba056cd78b4cea6dd | stchang/macrotypes | dep-ind-cur2+eq.rkt | #lang s-exp "dep-ind-cur2.rkt"
(require (only-in turnstile+ struct #%app- void- define-typed-syntax ⇒ ⇐ ≫ ⊢ ≻)
"dep-ind-cur2+sugar.rkt")
eq lib for
(provide = eq-refl eq-elim)
;; equality -------------------------------------------------------------------
(struct =- (l r) #:transparent)
(define-typed-syntax (= t1 t2) ≫
[⊢ t1 ≫ t1- ⇒ ty]
[⊢ t2 ≫ t2- ⇐ ty]
---------------------
[⊢ (#%app- =- t1- t2-) ⇒ Type])
(define-typed-syntax (eq-refl e) ≫
[⊢ e ≫ e- ⇒ _ (⇒ ~Type)]
----------
[⊢ (#%app- void-) ⇒ (= e- e-)])
;; eq-elim: t : T
;; P : (T -> Type)
;; pt : (P t)
;; w : T
peq : (= t w )
;; -> (P w)
(define-typed-syntax (eq-elim t P pt w peq) ≫
[⊢ t ≫ t- ⇒ ty]
[⊢ P ≫ P- ⇐ (→ ty Type)]
[⊢ pt ≫ pt- ⇐ (P- t-)]
[⊢ w ≫ w- ⇐ ty]
[⊢ peq ≫ peq- ⇐ (= t- w-)]
--------------
[⊢ pt- ⇒ (P- w-)])
| null | https://raw.githubusercontent.com/stchang/macrotypes/05ec31f2e1fe0ddd653211e041e06c6c8071ffa6/turnstile-example/turnstile/examples/dep/dep-ind-cur2%2Beq.rkt | racket | equality -------------------------------------------------------------------
eq-elim: t : T
P : (T -> Type)
pt : (P t)
w : T
-> (P w) | #lang s-exp "dep-ind-cur2.rkt"
(require (only-in turnstile+ struct #%app- void- define-typed-syntax ⇒ ⇐ ≫ ⊢ ≻)
"dep-ind-cur2+sugar.rkt")
eq lib for
(provide = eq-refl eq-elim)
(struct =- (l r) #:transparent)
(define-typed-syntax (= t1 t2) ≫
[⊢ t1 ≫ t1- ⇒ ty]
[⊢ t2 ≫ t2- ⇐ ty]
---------------------
[⊢ (#%app- =- t1- t2-) ⇒ Type])
(define-typed-syntax (eq-refl e) ≫
[⊢ e ≫ e- ⇒ _ (⇒ ~Type)]
----------
[⊢ (#%app- void-) ⇒ (= e- e-)])
peq : (= t w )
(define-typed-syntax (eq-elim t P pt w peq) ≫
[⊢ t ≫ t- ⇒ ty]
[⊢ P ≫ P- ⇐ (→ ty Type)]
[⊢ pt ≫ pt- ⇐ (P- t-)]
[⊢ w ≫ w- ⇐ ty]
[⊢ peq ≫ peq- ⇐ (= t- w-)]
--------------
[⊢ pt- ⇒ (P- w-)])
|
64446f5ee59f5b06f2bda3375b7c0b962e6b3146b4ad0ca024c102ed6b976f1d | heyoka/faxe | esp_bottom.erl | Date : 09.12.16 - 18:02
Ⓒ 2016 heyoka
-module(esp_bottom).
-author("Alexander Minichmair").
-include("faxe.hrl").
-behavior(esp_stats).
%% API
-export([execute/2, options/0]).
options() ->
esp_stats:get_options() ++ [{num, integer, 1},{module, atom, ?MODULE}].
execute({Tss, Values}, #{num := Num}) when is_list(Values) ->
lager:debug("execute with: ~p",[Values]),
New = lists:zip(Tss, Values),
Sorted = lists:usort( fun({_Ts, V1}, {_Ts2, V2}) -> V2 =< V1 end, New),
Len = length(Sorted),
RLen = case Len >= Num of true -> Len-Num; false -> 0 end,
lists:unzip(lists:reverse(lists:nthtail(RLen, Sorted))).
-ifdef(TEST).
basic_test() ->
?assertEqual(
{[8,5,2,4,6],[319,322,326,328,331]},
esp_bottom:execute({[1,2,3,4,5,6,7,8,9],[399,326,354,328,322,331,388,319,377]},#{num=>5})
),
?assertEqual(
{[8,5,2,4,6,3,9,7,1],[319,322,326,328,331,354,377,388,399]},
esp_bottom:execute({[1,2,3,4,5,6,7,8,9],[399,326,354,328,322,331,388,319,377]},#{num=>65})
).
-endif. | null | https://raw.githubusercontent.com/heyoka/faxe/e539afe8b62790a6037914751deef7d815be11a2/apps/faxe/src/components/stats/esp_bottom.erl | erlang | API | Date : 09.12.16 - 18:02
Ⓒ 2016 heyoka
-module(esp_bottom).
-author("Alexander Minichmair").
-include("faxe.hrl").
-behavior(esp_stats).
-export([execute/2, options/0]).
options() ->
esp_stats:get_options() ++ [{num, integer, 1},{module, atom, ?MODULE}].
execute({Tss, Values}, #{num := Num}) when is_list(Values) ->
lager:debug("execute with: ~p",[Values]),
New = lists:zip(Tss, Values),
Sorted = lists:usort( fun({_Ts, V1}, {_Ts2, V2}) -> V2 =< V1 end, New),
Len = length(Sorted),
RLen = case Len >= Num of true -> Len-Num; false -> 0 end,
lists:unzip(lists:reverse(lists:nthtail(RLen, Sorted))).
-ifdef(TEST).
basic_test() ->
?assertEqual(
{[8,5,2,4,6],[319,322,326,328,331]},
esp_bottom:execute({[1,2,3,4,5,6,7,8,9],[399,326,354,328,322,331,388,319,377]},#{num=>5})
),
?assertEqual(
{[8,5,2,4,6,3,9,7,1],[319,322,326,328,331,354,377,388,399]},
esp_bottom:execute({[1,2,3,4,5,6,7,8,9],[399,326,354,328,322,331,388,319,377]},#{num=>65})
).
-endif. |
1498fe7a373a2c4f2c0fd7db477dd4a572a37b3a8209fa198ec6bb9e3ebc9392 | amnh/PCG | Tree.hs | {-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeSynonymInstances #-}
module Test.Custom.Tree
( SimpleTree()
, createBinary
, createCherry
, createSimpleTree
, simpleTreeCharacterDecorationEqualityAssertion
) where
import Bio.Character.Encodable
import qualified Bio.PhyloGraph.Network as N
import qualified Bio.PhyloGraph.Node.Encoded as EN
import qualified Bio.PhyloGraph.Node.Final as FN
import qualified Bio.PhyloGraph.Node.ImpliedAlign as IN
import qualified Bio.PhyloGraph.Node.Preliminary as RN
import Bio.PhyloGraph.Node.Referential ()
import Bio.PhyloGraph.Tree.Binary
import qualified Bio.PhyloGraph.Tree.Referential as RT
import Bio.PhyloGraph.Tree.Rose
import Control.Applicative ((<|>))
import Control.Monad ((<=<))
import Data.Alphabet
import Data.Bifunctor (second)
import Data.Foldable
import Data.IntMap (insertWith)
import qualified Data.IntSet as IS
import Data.Key hiding (zipWith)
import Data.List (intercalate)
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Data.List.NonEmpty as NE
import Data.List.Utility (chunksOf)
import Data.Maybe
import Data.MonoTraversable
import Data.Monoid
import Data.Ord (comparing)
import qualified Data.Set as S
import Data.Tree
import Data.Vector (Vector)
import Safe (tailMay)
import Test.QuickCheck
import Test.Tasty.HUnit
createSimpleTree
:: Foldable t
=> Int -- ^ Root node reference
-> String -- ^ Alphabet symbols
-> t (Int, String, [Int]) -- ^ (Node Reference, sequence of dynamic characters, child nodes)
-> SimpleTree
createSimpleTree rootRef symbols xs = TT . setRefIds $ unfoldTree buildTree rootRef
where
alphabet = fromSymbols $ pure <$> symbols
mapping : : ( Foldable a , Foldable c , Foldable v ) = > IntMap ( v ( c ( a String ) ) , )
mapping = foldl' f mempty xs
where
f m (i, strChar, adjacency) = insertWith g i (strChar, IS.fromList adjacency) m
where
g (newSeq, lhs) (_, rhs) = (newSeq, lhs <> rhs)
buildTree :: Int -> (TestingDecoration, [Int])
buildTree i = (def { dEncoded = encodedSequence, suppliedAlphabet = Just alphabet }, otoList children)
where
encodedSequence =
if null strChar
then mempty
else pure . encodeStream alphabet . NE.fromList $ (\c -> [c]:|[]) <$> strChar
(strChar, children) = mapping ! i
setRefIds :: Tree TestingDecoration -> Tree TestingDecoration
setRefIds = snd . f 0
where
f :: Int -> Tree TestingDecoration -> (Int, Tree TestingDecoration)
f counter root = (counter', root')
where
root' = Node decoration' children'
decoration' = (rootLabel root) { refEquality = counter }
(counter', children') = foldr g (counter + 1, []) $ subForest root
g e (n, ys) = second (:ys) $ f n e
createCherry :: String -> String -> String -> SimpleTree
createCherry rootCharacter leftCharacter rightCharacter = createSimpleTree 0 alphabet [(0,rootCharacter,[1,2]), (1,leftCharacter,[]), (2,rightCharacter,[])]
where
alphabet = toList $ foldMap S.fromList [rootCharacter, leftCharacter, rightCharacter]
createBinary :: Foldable t => t String -> SimpleTree
createBinary leafCharacters = TT . setRefIds . createBinary' $ createCherry' <$> chunksOf 2 leafCharacters
where
symbols = toList $ foldMap S.fromList leafCharacters
alphabet = fromSymbols $ pure <$> symbols
strToLeaf :: String -> Tree TestingDecoration
strToLeaf str = Node (def { dEncoded = pure . encodeStream alphabet . NE.fromList $ (\c -> [c]:|[]) <$> str }) []
createCherry' :: [String] -> Tree TestingDecoration
createCherry' [x] = strToLeaf x
createCherry' xs = Node def (strToLeaf <$> xs)
createBinary' :: [Tree TestingDecoration] -> Tree TestingDecoration
createBinary' [x] = x
createBinary' xs = createBinary' $ f <$> chunksOf 2 xs
where
f [y] = y
f ys = Node def ys
newtype SimpleTree = TT (Tree TestingDecoration)
deriving stock (Eq)
data TestingDecoration
= Decorations
{ dEncoded :: Vector DynamicCharacter
, dSingle :: Vector DynamicCharacter
, dFinal :: Vector DynamicCharacter
, dGapped :: Vector DynamicCharacter
, dPreliminary :: Vector DynamicCharacter
, dLeftAlignment :: Vector DynamicCharacter
, dRightAlignment :: Vector DynamicCharacter
, dAligned :: Vector DynamicCharacter
, dTemporary :: Vector DynamicCharacter
, dLocalCost :: Double
, dTotalCost :: Double
, dIaHomology :: IN.HomologyTrace
, dImpliedAlignment :: Vector DynamicCharacter
, refEquality :: Int
, suppliedAlphabet :: Maybe (Alphabet String)
} deriving (Eq)
def :: TestingDecoration
def = Decorations
{ dEncoded = mempty
, dSingle = mempty
, dFinal = mempty
, dGapped = mempty
, dPreliminary = mempty
, dLeftAlignment = mempty
, dRightAlignment = mempty
, dAligned = mempty
, dTemporary = mempty
, dLocalCost = 0.0
, dTotalCost = 0.0
, dIaHomology = mempty
, dImpliedAlignment = mempty
, refEquality = -1
, suppliedAlphabet = Nothing
}
sameRef :: SimpleTree -> SimpleTree -> Bool
sameRef x y = nodeRef x == nodeRef y
nodeRef :: SimpleTree -> Int
nodeRef (TT x) = refEquality $ rootLabel x
instance Show SimpleTree where
show (TT x) = drawTreeMultiLine $ show <$> x
instance Show TestingDecoration where
show decoration = intercalate "\n" $ catMaybes renderings
where
renderings = fold [renderedId, renderedCosts, renderedDecorations]
renderedId = pure . pure $ "Node ( " <> show (refEquality decoration) <> " )"
renderedCosts =
[ pure $ "LocalCost " <> show (dLocalCost decoration)
, pure $ "TotalCost " <> show (dTotalCost decoration)
[ ( " LocalCost " < > ) < $ > h dLocalCost
-- , ("TotalCost " <>) <$> h dTotalCost
]
renderedDecorations =
[ g "Encoded " <$> f dEncoded
, g "Single " <$> f dSingle
, g "Final Ungapped " <$> f dFinal
, g "Final Gapped " <$> f dGapped
, g "Preliminary Ungapped " <$> f dPreliminary
, g "Preliminary Gapped " <$> f dAligned
, g "Left Child-wise Alignment" <$> f dLeftAlignment
, g "Right Child-wise Alignment" <$> f dRightAlignment
, g "Implied Alignment " <$> f dImpliedAlignment
]
alphabetToken = suppliedAlphabet decoration
f x = renderDynamicCharacter alphabetToken <$> headMay (x decoration)
intercalate " \n " $ ( prefix < > " : " < > y ) : ( ( " " < > ) < $ > zs )
-- where
-- (x:y:zs) = lines shown :: [String]
h x
| x decoration = = 0.0 = Nothing
| otherwise = Just . show $ x decoration
h x
| x decoration == 0.0 = Nothing
| otherwise = Just . show $ x decoration
-}
-- |
-- Neat 2-dimensional drawing of a tree.
drawTreeMultiLine :: Tree String -> String
drawTreeMultiLine = unlines . draw
draw :: Tree String -> [String]
draw (Node x xs) = lines x <> drawSubTrees xs
where
drawSubTrees [] = []
drawSubTrees [t] =
"|" : shift "`- " " " (draw t)
drawSubTrees (t:ts) =
"|" : shift "+- " "| " (draw t) <> drawSubTrees ts
shift first other = Prelude.zipWith (<>) (first : repeat other)
renderDynamicCharacter :: Maybe (Alphabet String) -> DynamicCharacter -> String
renderDynamicCharacter alphabetMay char
| onull char = ""
| otherwise = concatMap (f . toList) $ decodeStream alphabet char
where
numSymbols = symbolCount char
symbols = take numSymbols arbitrarySymbols
defaultAlphabet = fromSymbols symbols
alphabet = fromMaybe defaultAlphabet alphabetMay
f :: [String] -> String
f [x] = x
f ambiguityGroup = "[" <> concat ambiguityGroup <> "]"
arbitrarySymbols :: [String]
arbitrarySymbols = fmap pure . ('-' :) $ ['0'..'9'] <> ['A'..'Z'] <> ['a'..'z']
instance Arbitrary SimpleTree where
-- Arbitrary Cherry
arbitrary = do
let defaultSymbols = ['0'..'9'] <> ['A'..'Z'] <> ['a'..'z']
alphabetLength <- choose (1, length defaultSymbols) -- Inclusive bounds
let defaultAlphabetSymbols = take alphabetLength defaultSymbols
leafNodeCount <- choose (2, 16) -- Inclusive bounds
let leafNodeCharGen = listOf1 (elements defaultAlphabetSymbols)
createBinary <$> vectorOf leafNodeCount leafNodeCharGen
type instance Element SimpleTree = SimpleTree
treeFold :: SimpleTree -> [SimpleTree]
treeFold x@(TT root) = (x :) . concatMap (treeFold . TT) $ subForest root
instance MonoFoldable SimpleTree where
# INLINE ofoldMap #
ofoldMap f = foldr (mappend . f) mempty . treeFold
# INLINE ofoldr #
ofoldr f e = foldr f e . treeFold
{-# INLINE ofoldl' #-}
ofoldl' f e = foldl' f e . treeFold
# INLINE ofoldr1Ex #
ofoldr1Ex f = foldr1 f . treeFold
# INLINE ofoldl1Ex ' #
ofoldl1Ex' f = foldl1 f . treeFold
instance EN.EncodedNode SimpleTree DynamicCharacter where
getEncoded (TT n) = dEncoded $ rootLabel n
setEncoded (TT n) x = TT $ n { rootLabel = decoration { dEncoded = x } }
where
decoration = rootLabel n
instance FN.FinalNode SimpleTree DynamicCharacter where
getFinal (TT n) = dFinal $ rootLabel n
setFinal x (TT n) = TT $ n { rootLabel = decoration { dFinal = x } }
where
decoration = rootLabel n
getFinalGapped (TT n) = dGapped $ rootLabel n
setFinalGapped x (TT n) = TT $ n { rootLabel = decoration { dGapped = x } }
where
decoration = rootLabel n
getSingle (TT n) = dSingle $ rootLabel n
setSingle x (TT n) = TT $ n { rootLabel = decoration { dSingle = x } }
where
decoration = rootLabel n
instance RN.PreliminaryNode SimpleTree DynamicCharacter where
getPreliminaryUngapped (TT n) = dPreliminary $ rootLabel n
setPreliminaryUngapped x (TT n) = TT $ n { rootLabel = decoration { dPreliminary = x } }
where
decoration = rootLabel n
getPreliminaryGapped (TT n) = dAligned $ rootLabel n
setPreliminaryGapped x (TT n) = TT $ n { rootLabel = decoration { dAligned = x } }
where
decoration = rootLabel n
getLeftAlignment (TT n) = dLeftAlignment $ rootLabel n
setLeftAlignment x (TT n) = TT $ n { rootLabel = decoration { dLeftAlignment = x } }
where
decoration = rootLabel n
getRightAlignment (TT n) = dRightAlignment $ rootLabel n
setRightAlignment x (TT n) = TT $ n { rootLabel = decoration { dRightAlignment = x } }
where
decoration = rootLabel n
getLocalCost (TT n) = dLocalCost $ rootLabel n
setLocalCost x (TT n) = TT $ n { rootLabel = decoration { dLocalCost = x } }
where
decoration = rootLabel n
getTotalCost (TT n) = dTotalCost $ rootLabel n
setTotalCost x (TT n) = TT $ n { rootLabel = decoration { dTotalCost = x } }
where
decoration = rootLabel n
instance IN.IANode SimpleTree where
getHomologies (TT n) = dIaHomology $ rootLabel n
setHomologies (TT n) x = TT $ n { rootLabel = decoration { dIaHomology = x } }
where
decoration = rootLabel n
instance IN.IANode' SimpleTree DynamicCharacter where
getHomologies' (TT n) = dImpliedAlignment $ rootLabel n
setHomologies' (TT n) x = TT $ n { rootLabel = decoration { dImpliedAlignment = x } }
where
decoration = rootLabel n
instance N.Network SimpleTree SimpleTree where
-- Not efficient but correct.
parents (TT node) (TT tree)
| node == tree = []
| otherwise = foldMap (f tree) $ subForest tree
where
f parentNode childNode
| childNode == node = [TT parentNode]
| otherwise = foldMap (f childNode) $ subForest childNode
root tree = tree
children (TT x) _ = TT <$> subForest x
numNodes (TT x) = length x
addNode _ _ = error "addNode called on a TestingTree. Not implemented, don't call it!" -- Just don't call this!
update (TT root) nodes = TT $ modifyTopology root'
where
-- Step 1: We apply the new decorations to all the nodes in the original tree
root' :: Tree TestingDecoration
root' = modifyDecoration <$> root
where
modifyDecoration :: TestingDecoration -> TestingDecoration
modifyDecoration decoration =
case find (idMatches (refEquality decoration)) nodes of
Nothing -> decoration
Just (TT x) -> rootLabel x
Step 2 : We apply the new decorations to the subtrees in the input list of updated nodes
nodes' :: [SimpleTree]
nodes' = f <$> nodes
where
f :: SimpleTree -> SimpleTree
f node@(TT internal) = TT $ internal { rootLabel = decoration', subForest = children' }
where
children' = (\(TT x) -> x) . f . TT <$> subForest internal
decoration' =
case findNode (sameRef node) (TT root') of
Nothing -> rootLabel internal
Just (TT x) -> rootLabel x
Step 3 : We rebuild the tree applying the updated subtrees to the existing topology
modifyTopology :: Tree TestingDecoration -> Tree TestingDecoration
modifyTopology = unfoldTree f
where
f :: Tree TestingDecoration -> (TestingDecoration, [Tree TestingDecoration])
f node = (rootLabel node, children')
where
children' = subForest . maybe node (\(TT x) -> x) $ find (sameRef (TT node)) nodes'
instance RT.ReferentialTree SimpleTree SimpleTree where
getNodeIdx (TT node) (TT root) = snd $ foldl' f (0, Nothing) root
where
target = refEquality $ rootLabel node
f :: (Int, Maybe Int) -> TestingDecoration -> (Int, Maybe Int)
f (counter, done) e
| isJust done = (counter , done )
| refEquality e == target = (counter , Just counter)
| otherwise = (counter + 1, Nothing )
getNthNode tree@(TT root) pos =
case foldl' f (0, Nothing) root of
(outerBound, Nothing ) -> error $ fold ["Could not get node at position ", show pos, "! Valid range is [0,", show $ outerBound - 1, "]."]
(_ , Just decoration ) -> fromJust $ findNode (idMatches (refEquality decoration)) tree
where
f (counter, found) e
| isJust found = (counter , found )
| counter == pos = (counter + 1, Just e )
| otherwise = (counter + 1, Nothing )
instance BinaryTree SimpleTree SimpleTree where
leftChild (TT internal) _ = fmap TT . headMay $ subForest internal
rightChild (TT internal) _ = fmap TT . (headMay <=< tailMay) $ subForest internal
verifyBinary = isNothing . findNode isNotBinaryNode
where
isNotBinaryNode (TT node) = (> 2) . length $ subForest node
instance RoseTree SimpleTree SimpleTree where
parent node = findNode isParent
where
isParent (TT internal) = any (sameRef node . TT) $ subForest internal
idMatches :: Int -> SimpleTree -> Bool
idMatches target (TT internal) = refEquality (rootLabel internal) == target
findNode :: (SimpleTree -> Bool) -> SimpleTree -> Maybe SimpleTree
findNode f tree@(TT x)
| f tree = Just tree
| otherwise = foldl' (<|>) Nothing $ findNode f . TT <$> subForest x
data CharacterValueComparison
= AllCharactersMatched
| MismatchedCharacterValues [String]
deriving (Eq, Show)
mismatches :: CharacterValueComparison -> [String]
mismatches AllCharactersMatched = []
mismatches (MismatchedCharacterValues xs) = xs
simpleTreeCharacterDecorationEqualityAssertion :: Foldable t
=> Int -- ^ Root node reference
-> String -- ^ Alphabet symbols
-> (SimpleTree -> SimpleTree) -- ^ Topology invariant tree transformation.
-> (SimpleTree -> Vector DynamicCharacter) -- ^ Node accessing function
-> t (Int, String, [String], [Int]) -- ^ (Node Reference, sequence of dynamic characters, expected values, child nodes)
-> Assertion
simpleTreeCharacterDecorationEqualityAssertion rootRef symbols transformation accessor spec =
assertMinimalFailure $ compareTree outputTree <$> valueTrees
where
assertMinimalFailure :: [CharacterValueComparison] -> Assertion
assertMinimalFailure comparisons =
case minimumBy (comparing length) $ mismatches <$> comparisons of
[] -> True @=? True
ys -> assertFailure . (<> suffix) $ unlines ys
where
suffix = "In the transformed tree: \n" <> indentBlock (show outputTree)
compareTree :: SimpleTree -> SimpleTree -> CharacterValueComparison
compareTree actualValueTree expectedValueTree =
case catMaybes $ checkTree' actualValueTree expectedValueTree of
[] -> AllCharactersMatched
es -> MismatchedCharacterValues es
where
checkTree' :: SimpleTree -> SimpleTree -> [Maybe String]
checkTree' actualValueNode expectedValueNode
| notEqualReference ||
length xs /= length ys = [Just "The tree topology changed!"]
| actual /= expected = Just failureMessage : recursiveFailures
| otherwise = Nothing : recursiveFailures
where
recursiveFailures = concat $ zipWith checkTree' xs ys
xs = N.children actualValueNode actualValueNode
ys = N.children expectedValueNode expectedValueNode
expected = EN.getEncoded expectedValueNode
actual = accessor actualValueNode
notEqualReference = not $ expectedValueNode `sameRef` actualValueNode
nodeAlphabet = suppliedAlphabet . rootLabel $ (\(TT x) -> x) actualValueNode
failureMessage = "For Node ( " <> show (nodeRef actualValueNode) <> " )\n" <>
(indentBlock . unlines)
[ "Expected value: " <> seqShow expected
, "Actual value : " <> seqShow actual
]
where
seqShow = indentLine . maybe "Empty sequence" (renderDynamicCharacter nodeAlphabet) . headMay
indentLine = (" " <>)
indentBlock = unlines . fmap indentLine . lines
Construct the tree to be transformed .
inputTree = createSimpleTree rootRef symbols . fmap (\(x,y,_,z) -> (x,y,z)) $ toList spec
-- The result of the tree transformation.
outputTree = transformation inputTree
-- A list of "comparison" trees, each tree in the list is a possible correct decoration after the tree transformation.
valueTrees = toValueTree <$> [0 .. valueTreeCount -1]
valueTreeCount = length . fst . head $ otoList mapping
-- Takes an index from the list of possible tree decorations and constructs a "comparison" tree.
toValueTree j = TT . setRefIds $ unfoldTree buildExpectedTree rootRef
where
buildExpectedTree :: Int -> (TestingDecoration, [Int])
buildExpectedTree i = (def { dEncoded = encodedSequence }, otoList children)
where
encodedSequence
| null (expectedChar ! j) = mempty
| otherwise = pure . encodeStream alphabet . NE.fromList $ (\c -> [c]:|[]) <$> (expectedChar ! j)
(expectedChar, children) = mapping ! i
--
alphabet = fromSymbols $ pure <$> symbols
mapping : : ( Foldable a , Foldable c , Foldable v ) = > IntMap ( v ( c ( a String ) ) , )
mapping = foldl' f mempty spec
where
f m (i, _, valChar, adjacency) = insertWith g i (valChar, IS.fromList adjacency) m
where
g (newSeq, lhs) (_, rhs) = (newSeq, lhs <> rhs)
| null | https://raw.githubusercontent.com/amnh/PCG/3a1b6bdde273ed4dc09717623986e1144b006904/lib/core/data-structures/src/Test/Custom/Tree.hs | haskell | # LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeSynonymInstances #
^ Root node reference
^ Alphabet symbols
^ (Node Reference, sequence of dynamic characters, child nodes)
, ("TotalCost " <>) <$> h dTotalCost
where
(x:y:zs) = lines shown :: [String]
|
Neat 2-dimensional drawing of a tree.
Arbitrary Cherry
Inclusive bounds
Inclusive bounds
# INLINE ofoldl' #
Not efficient but correct.
Just don't call this!
Step 1: We apply the new decorations to all the nodes in the original tree
^ Root node reference
^ Alphabet symbols
^ Topology invariant tree transformation.
^ Node accessing function
^ (Node Reference, sequence of dynamic characters, expected values, child nodes)
The result of the tree transformation.
A list of "comparison" trees, each tree in the list is a possible correct decoration after the tree transformation.
Takes an index from the list of possible tree decorations and constructs a "comparison" tree.
| # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
module Test.Custom.Tree
( SimpleTree()
, createBinary
, createCherry
, createSimpleTree
, simpleTreeCharacterDecorationEqualityAssertion
) where
import Bio.Character.Encodable
import qualified Bio.PhyloGraph.Network as N
import qualified Bio.PhyloGraph.Node.Encoded as EN
import qualified Bio.PhyloGraph.Node.Final as FN
import qualified Bio.PhyloGraph.Node.ImpliedAlign as IN
import qualified Bio.PhyloGraph.Node.Preliminary as RN
import Bio.PhyloGraph.Node.Referential ()
import Bio.PhyloGraph.Tree.Binary
import qualified Bio.PhyloGraph.Tree.Referential as RT
import Bio.PhyloGraph.Tree.Rose
import Control.Applicative ((<|>))
import Control.Monad ((<=<))
import Data.Alphabet
import Data.Bifunctor (second)
import Data.Foldable
import Data.IntMap (insertWith)
import qualified Data.IntSet as IS
import Data.Key hiding (zipWith)
import Data.List (intercalate)
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Data.List.NonEmpty as NE
import Data.List.Utility (chunksOf)
import Data.Maybe
import Data.MonoTraversable
import Data.Monoid
import Data.Ord (comparing)
import qualified Data.Set as S
import Data.Tree
import Data.Vector (Vector)
import Safe (tailMay)
import Test.QuickCheck
import Test.Tasty.HUnit
createSimpleTree
:: Foldable t
-> SimpleTree
createSimpleTree rootRef symbols xs = TT . setRefIds $ unfoldTree buildTree rootRef
where
alphabet = fromSymbols $ pure <$> symbols
mapping : : ( Foldable a , Foldable c , Foldable v ) = > IntMap ( v ( c ( a String ) ) , )
mapping = foldl' f mempty xs
where
f m (i, strChar, adjacency) = insertWith g i (strChar, IS.fromList adjacency) m
where
g (newSeq, lhs) (_, rhs) = (newSeq, lhs <> rhs)
buildTree :: Int -> (TestingDecoration, [Int])
buildTree i = (def { dEncoded = encodedSequence, suppliedAlphabet = Just alphabet }, otoList children)
where
encodedSequence =
if null strChar
then mempty
else pure . encodeStream alphabet . NE.fromList $ (\c -> [c]:|[]) <$> strChar
(strChar, children) = mapping ! i
setRefIds :: Tree TestingDecoration -> Tree TestingDecoration
setRefIds = snd . f 0
where
f :: Int -> Tree TestingDecoration -> (Int, Tree TestingDecoration)
f counter root = (counter', root')
where
root' = Node decoration' children'
decoration' = (rootLabel root) { refEquality = counter }
(counter', children') = foldr g (counter + 1, []) $ subForest root
g e (n, ys) = second (:ys) $ f n e
createCherry :: String -> String -> String -> SimpleTree
createCherry rootCharacter leftCharacter rightCharacter = createSimpleTree 0 alphabet [(0,rootCharacter,[1,2]), (1,leftCharacter,[]), (2,rightCharacter,[])]
where
alphabet = toList $ foldMap S.fromList [rootCharacter, leftCharacter, rightCharacter]
createBinary :: Foldable t => t String -> SimpleTree
createBinary leafCharacters = TT . setRefIds . createBinary' $ createCherry' <$> chunksOf 2 leafCharacters
where
symbols = toList $ foldMap S.fromList leafCharacters
alphabet = fromSymbols $ pure <$> symbols
strToLeaf :: String -> Tree TestingDecoration
strToLeaf str = Node (def { dEncoded = pure . encodeStream alphabet . NE.fromList $ (\c -> [c]:|[]) <$> str }) []
createCherry' :: [String] -> Tree TestingDecoration
createCherry' [x] = strToLeaf x
createCherry' xs = Node def (strToLeaf <$> xs)
createBinary' :: [Tree TestingDecoration] -> Tree TestingDecoration
createBinary' [x] = x
createBinary' xs = createBinary' $ f <$> chunksOf 2 xs
where
f [y] = y
f ys = Node def ys
newtype SimpleTree = TT (Tree TestingDecoration)
deriving stock (Eq)
data TestingDecoration
= Decorations
{ dEncoded :: Vector DynamicCharacter
, dSingle :: Vector DynamicCharacter
, dFinal :: Vector DynamicCharacter
, dGapped :: Vector DynamicCharacter
, dPreliminary :: Vector DynamicCharacter
, dLeftAlignment :: Vector DynamicCharacter
, dRightAlignment :: Vector DynamicCharacter
, dAligned :: Vector DynamicCharacter
, dTemporary :: Vector DynamicCharacter
, dLocalCost :: Double
, dTotalCost :: Double
, dIaHomology :: IN.HomologyTrace
, dImpliedAlignment :: Vector DynamicCharacter
, refEquality :: Int
, suppliedAlphabet :: Maybe (Alphabet String)
} deriving (Eq)
def :: TestingDecoration
def = Decorations
{ dEncoded = mempty
, dSingle = mempty
, dFinal = mempty
, dGapped = mempty
, dPreliminary = mempty
, dLeftAlignment = mempty
, dRightAlignment = mempty
, dAligned = mempty
, dTemporary = mempty
, dLocalCost = 0.0
, dTotalCost = 0.0
, dIaHomology = mempty
, dImpliedAlignment = mempty
, refEquality = -1
, suppliedAlphabet = Nothing
}
sameRef :: SimpleTree -> SimpleTree -> Bool
sameRef x y = nodeRef x == nodeRef y
nodeRef :: SimpleTree -> Int
nodeRef (TT x) = refEquality $ rootLabel x
instance Show SimpleTree where
show (TT x) = drawTreeMultiLine $ show <$> x
instance Show TestingDecoration where
show decoration = intercalate "\n" $ catMaybes renderings
where
renderings = fold [renderedId, renderedCosts, renderedDecorations]
renderedId = pure . pure $ "Node ( " <> show (refEquality decoration) <> " )"
renderedCosts =
[ pure $ "LocalCost " <> show (dLocalCost decoration)
, pure $ "TotalCost " <> show (dTotalCost decoration)
[ ( " LocalCost " < > ) < $ > h dLocalCost
]
renderedDecorations =
[ g "Encoded " <$> f dEncoded
, g "Single " <$> f dSingle
, g "Final Ungapped " <$> f dFinal
, g "Final Gapped " <$> f dGapped
, g "Preliminary Ungapped " <$> f dPreliminary
, g "Preliminary Gapped " <$> f dAligned
, g "Left Child-wise Alignment" <$> f dLeftAlignment
, g "Right Child-wise Alignment" <$> f dRightAlignment
, g "Implied Alignment " <$> f dImpliedAlignment
]
alphabetToken = suppliedAlphabet decoration
f x = renderDynamicCharacter alphabetToken <$> headMay (x decoration)
intercalate " \n " $ ( prefix < > " : " < > y ) : ( ( " " < > ) < $ > zs )
h x
| x decoration = = 0.0 = Nothing
| otherwise = Just . show $ x decoration
h x
| x decoration == 0.0 = Nothing
| otherwise = Just . show $ x decoration
-}
drawTreeMultiLine :: Tree String -> String
drawTreeMultiLine = unlines . draw
draw :: Tree String -> [String]
draw (Node x xs) = lines x <> drawSubTrees xs
where
drawSubTrees [] = []
drawSubTrees [t] =
"|" : shift "`- " " " (draw t)
drawSubTrees (t:ts) =
"|" : shift "+- " "| " (draw t) <> drawSubTrees ts
shift first other = Prelude.zipWith (<>) (first : repeat other)
renderDynamicCharacter :: Maybe (Alphabet String) -> DynamicCharacter -> String
renderDynamicCharacter alphabetMay char
| onull char = ""
| otherwise = concatMap (f . toList) $ decodeStream alphabet char
where
numSymbols = symbolCount char
symbols = take numSymbols arbitrarySymbols
defaultAlphabet = fromSymbols symbols
alphabet = fromMaybe defaultAlphabet alphabetMay
f :: [String] -> String
f [x] = x
f ambiguityGroup = "[" <> concat ambiguityGroup <> "]"
arbitrarySymbols :: [String]
arbitrarySymbols = fmap pure . ('-' :) $ ['0'..'9'] <> ['A'..'Z'] <> ['a'..'z']
instance Arbitrary SimpleTree where
arbitrary = do
let defaultSymbols = ['0'..'9'] <> ['A'..'Z'] <> ['a'..'z']
let defaultAlphabetSymbols = take alphabetLength defaultSymbols
let leafNodeCharGen = listOf1 (elements defaultAlphabetSymbols)
createBinary <$> vectorOf leafNodeCount leafNodeCharGen
type instance Element SimpleTree = SimpleTree
treeFold :: SimpleTree -> [SimpleTree]
treeFold x@(TT root) = (x :) . concatMap (treeFold . TT) $ subForest root
instance MonoFoldable SimpleTree where
# INLINE ofoldMap #
ofoldMap f = foldr (mappend . f) mempty . treeFold
# INLINE ofoldr #
ofoldr f e = foldr f e . treeFold
ofoldl' f e = foldl' f e . treeFold
# INLINE ofoldr1Ex #
ofoldr1Ex f = foldr1 f . treeFold
# INLINE ofoldl1Ex ' #
ofoldl1Ex' f = foldl1 f . treeFold
instance EN.EncodedNode SimpleTree DynamicCharacter where
getEncoded (TT n) = dEncoded $ rootLabel n
setEncoded (TT n) x = TT $ n { rootLabel = decoration { dEncoded = x } }
where
decoration = rootLabel n
instance FN.FinalNode SimpleTree DynamicCharacter where
getFinal (TT n) = dFinal $ rootLabel n
setFinal x (TT n) = TT $ n { rootLabel = decoration { dFinal = x } }
where
decoration = rootLabel n
getFinalGapped (TT n) = dGapped $ rootLabel n
setFinalGapped x (TT n) = TT $ n { rootLabel = decoration { dGapped = x } }
where
decoration = rootLabel n
getSingle (TT n) = dSingle $ rootLabel n
setSingle x (TT n) = TT $ n { rootLabel = decoration { dSingle = x } }
where
decoration = rootLabel n
instance RN.PreliminaryNode SimpleTree DynamicCharacter where
getPreliminaryUngapped (TT n) = dPreliminary $ rootLabel n
setPreliminaryUngapped x (TT n) = TT $ n { rootLabel = decoration { dPreliminary = x } }
where
decoration = rootLabel n
getPreliminaryGapped (TT n) = dAligned $ rootLabel n
setPreliminaryGapped x (TT n) = TT $ n { rootLabel = decoration { dAligned = x } }
where
decoration = rootLabel n
getLeftAlignment (TT n) = dLeftAlignment $ rootLabel n
setLeftAlignment x (TT n) = TT $ n { rootLabel = decoration { dLeftAlignment = x } }
where
decoration = rootLabel n
getRightAlignment (TT n) = dRightAlignment $ rootLabel n
setRightAlignment x (TT n) = TT $ n { rootLabel = decoration { dRightAlignment = x } }
where
decoration = rootLabel n
getLocalCost (TT n) = dLocalCost $ rootLabel n
setLocalCost x (TT n) = TT $ n { rootLabel = decoration { dLocalCost = x } }
where
decoration = rootLabel n
getTotalCost (TT n) = dTotalCost $ rootLabel n
setTotalCost x (TT n) = TT $ n { rootLabel = decoration { dTotalCost = x } }
where
decoration = rootLabel n
instance IN.IANode SimpleTree where
getHomologies (TT n) = dIaHomology $ rootLabel n
setHomologies (TT n) x = TT $ n { rootLabel = decoration { dIaHomology = x } }
where
decoration = rootLabel n
instance IN.IANode' SimpleTree DynamicCharacter where
getHomologies' (TT n) = dImpliedAlignment $ rootLabel n
setHomologies' (TT n) x = TT $ n { rootLabel = decoration { dImpliedAlignment = x } }
where
decoration = rootLabel n
instance N.Network SimpleTree SimpleTree where
parents (TT node) (TT tree)
| node == tree = []
| otherwise = foldMap (f tree) $ subForest tree
where
f parentNode childNode
| childNode == node = [TT parentNode]
| otherwise = foldMap (f childNode) $ subForest childNode
root tree = tree
children (TT x) _ = TT <$> subForest x
numNodes (TT x) = length x
update (TT root) nodes = TT $ modifyTopology root'
where
root' :: Tree TestingDecoration
root' = modifyDecoration <$> root
where
modifyDecoration :: TestingDecoration -> TestingDecoration
modifyDecoration decoration =
case find (idMatches (refEquality decoration)) nodes of
Nothing -> decoration
Just (TT x) -> rootLabel x
Step 2 : We apply the new decorations to the subtrees in the input list of updated nodes
nodes' :: [SimpleTree]
nodes' = f <$> nodes
where
f :: SimpleTree -> SimpleTree
f node@(TT internal) = TT $ internal { rootLabel = decoration', subForest = children' }
where
children' = (\(TT x) -> x) . f . TT <$> subForest internal
decoration' =
case findNode (sameRef node) (TT root') of
Nothing -> rootLabel internal
Just (TT x) -> rootLabel x
Step 3 : We rebuild the tree applying the updated subtrees to the existing topology
modifyTopology :: Tree TestingDecoration -> Tree TestingDecoration
modifyTopology = unfoldTree f
where
f :: Tree TestingDecoration -> (TestingDecoration, [Tree TestingDecoration])
f node = (rootLabel node, children')
where
children' = subForest . maybe node (\(TT x) -> x) $ find (sameRef (TT node)) nodes'
instance RT.ReferentialTree SimpleTree SimpleTree where
getNodeIdx (TT node) (TT root) = snd $ foldl' f (0, Nothing) root
where
target = refEquality $ rootLabel node
f :: (Int, Maybe Int) -> TestingDecoration -> (Int, Maybe Int)
f (counter, done) e
| isJust done = (counter , done )
| refEquality e == target = (counter , Just counter)
| otherwise = (counter + 1, Nothing )
getNthNode tree@(TT root) pos =
case foldl' f (0, Nothing) root of
(outerBound, Nothing ) -> error $ fold ["Could not get node at position ", show pos, "! Valid range is [0,", show $ outerBound - 1, "]."]
(_ , Just decoration ) -> fromJust $ findNode (idMatches (refEquality decoration)) tree
where
f (counter, found) e
| isJust found = (counter , found )
| counter == pos = (counter + 1, Just e )
| otherwise = (counter + 1, Nothing )
instance BinaryTree SimpleTree SimpleTree where
leftChild (TT internal) _ = fmap TT . headMay $ subForest internal
rightChild (TT internal) _ = fmap TT . (headMay <=< tailMay) $ subForest internal
verifyBinary = isNothing . findNode isNotBinaryNode
where
isNotBinaryNode (TT node) = (> 2) . length $ subForest node
instance RoseTree SimpleTree SimpleTree where
parent node = findNode isParent
where
isParent (TT internal) = any (sameRef node . TT) $ subForest internal
idMatches :: Int -> SimpleTree -> Bool
idMatches target (TT internal) = refEquality (rootLabel internal) == target
findNode :: (SimpleTree -> Bool) -> SimpleTree -> Maybe SimpleTree
findNode f tree@(TT x)
| f tree = Just tree
| otherwise = foldl' (<|>) Nothing $ findNode f . TT <$> subForest x
data CharacterValueComparison
= AllCharactersMatched
| MismatchedCharacterValues [String]
deriving (Eq, Show)
mismatches :: CharacterValueComparison -> [String]
mismatches AllCharactersMatched = []
mismatches (MismatchedCharacterValues xs) = xs
simpleTreeCharacterDecorationEqualityAssertion :: Foldable t
-> Assertion
simpleTreeCharacterDecorationEqualityAssertion rootRef symbols transformation accessor spec =
assertMinimalFailure $ compareTree outputTree <$> valueTrees
where
assertMinimalFailure :: [CharacterValueComparison] -> Assertion
assertMinimalFailure comparisons =
case minimumBy (comparing length) $ mismatches <$> comparisons of
[] -> True @=? True
ys -> assertFailure . (<> suffix) $ unlines ys
where
suffix = "In the transformed tree: \n" <> indentBlock (show outputTree)
compareTree :: SimpleTree -> SimpleTree -> CharacterValueComparison
compareTree actualValueTree expectedValueTree =
case catMaybes $ checkTree' actualValueTree expectedValueTree of
[] -> AllCharactersMatched
es -> MismatchedCharacterValues es
where
checkTree' :: SimpleTree -> SimpleTree -> [Maybe String]
checkTree' actualValueNode expectedValueNode
| notEqualReference ||
length xs /= length ys = [Just "The tree topology changed!"]
| actual /= expected = Just failureMessage : recursiveFailures
| otherwise = Nothing : recursiveFailures
where
recursiveFailures = concat $ zipWith checkTree' xs ys
xs = N.children actualValueNode actualValueNode
ys = N.children expectedValueNode expectedValueNode
expected = EN.getEncoded expectedValueNode
actual = accessor actualValueNode
notEqualReference = not $ expectedValueNode `sameRef` actualValueNode
nodeAlphabet = suppliedAlphabet . rootLabel $ (\(TT x) -> x) actualValueNode
failureMessage = "For Node ( " <> show (nodeRef actualValueNode) <> " )\n" <>
(indentBlock . unlines)
[ "Expected value: " <> seqShow expected
, "Actual value : " <> seqShow actual
]
where
seqShow = indentLine . maybe "Empty sequence" (renderDynamicCharacter nodeAlphabet) . headMay
indentLine = (" " <>)
indentBlock = unlines . fmap indentLine . lines
Construct the tree to be transformed .
inputTree = createSimpleTree rootRef symbols . fmap (\(x,y,_,z) -> (x,y,z)) $ toList spec
outputTree = transformation inputTree
valueTrees = toValueTree <$> [0 .. valueTreeCount -1]
valueTreeCount = length . fst . head $ otoList mapping
toValueTree j = TT . setRefIds $ unfoldTree buildExpectedTree rootRef
where
buildExpectedTree :: Int -> (TestingDecoration, [Int])
buildExpectedTree i = (def { dEncoded = encodedSequence }, otoList children)
where
encodedSequence
| null (expectedChar ! j) = mempty
| otherwise = pure . encodeStream alphabet . NE.fromList $ (\c -> [c]:|[]) <$> (expectedChar ! j)
(expectedChar, children) = mapping ! i
alphabet = fromSymbols $ pure <$> symbols
mapping : : ( Foldable a , Foldable c , Foldable v ) = > IntMap ( v ( c ( a String ) ) , )
mapping = foldl' f mempty spec
where
f m (i, _, valChar, adjacency) = insertWith g i (valChar, IS.fromList adjacency) m
where
g (newSeq, lhs) (_, rhs) = (newSeq, lhs <> rhs)
|
ed87e9c9976d53f0336994c3d4db514f9a96cb1b0af6b1500bfc2c7a818f18a0 | ocaml-explore/explore | explore.ml | module Collection = Collection
module Page = Page
module Build = Build
module Files = Files
module Toc = Toc
module Utils = Utils
| null | https://raw.githubusercontent.com/ocaml-explore/explore/117768b378959f18a8d818f37779a750e90f3438/explore/lib/explore.ml | ocaml | module Collection = Collection
module Page = Page
module Build = Build
module Files = Files
module Toc = Toc
module Utils = Utils
|
|
03aa4566457311a17bff1211da2bc0726f4f999bca835931405e9316a6cd07a0 | dizzyd/retest | retest_SUITE.erl | -module(retest_SUITE).
-compile([export_all]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
init_per_testcase(_Func, Config) ->
Config.
end_per_testcase(_Func, Config) ->
Config.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
suite() -> [].
all() ->
[basic_run, basic_run_all_args, directory_does_not_exist,
wrong_arguments, test_exceeds_timeout, test_exceeds_custom_timeout,
setup, create, copy, template, replace, touch, create_dir,
logging, shell_api, shell_async_api, shell_async_api].
groups() ->
[].
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
init_per_suite(Config) ->
Config.
end_per_suite(Config) ->
Config.
retest_run(Dir, Config) ->
retest_run(Dir, [], Config).
retest_run(Dir, Args, Config) ->
DataDir = proplists:get_value(data_dir, Config),
retest_core:run(Args ++ [DataDir ++ "/" ++ Dir]).
basic_run(doc) -> ["Does retest run?"];
basic_run(suite) -> [];
basic_run(Config) when is_list(Config) ->
ok = retest_run("basic", Config).
basic_run_all_args(doc) -> ["Does retest run?"];
basic_run_all_args(suite) -> [];
basic_run_all_args(Config) when is_list(Config) ->
ok = retest_run("basic", ["--verbose", "--outdir", "out_dir",
"--loglevel", "debug"], Config).
directory_does_not_exist(doc) -> ["Does retest fail when provided with non existing test files?"];
directory_does_not_exist(suite) -> [];
directory_does_not_exist(Config) when is_list(Config) ->
?assertException(throw, abort, retest_run("non_existent", Config)).
wrong_arguments(doc) -> ["Does retest correctly handle wrong arguments"];
wrong_arguments(suite) -> [];
wrong_arguments(Config) when is_list(Config) ->
?assertException(throw, abort, retest_run("basic", ["--verbos"], Config)).
test_exceeds_timeout(doc) -> ["Does retest correctly tests that exceed the configured timeout"];
test_exceeds_timeout(suite) -> [];
test_exceeds_timeout(Config) when is_list(Config) ->
?assertException(throw, abort, retest_run("long_test", Config)).
test_exceeds_custom_timeout(doc) -> ["Does retest correctly tests that exceed the custom timeout passed on the command line"];
test_exceeds_custom_timeout(suite) -> [];
test_exceeds_custom_timeout(Config) when is_list(Config) ->
{_, S0, _} = os:timestamp(),
?assertException(throw, abort, retest_run("long_test_custom_timeout",
["--timeout", "2000"], Config)),
{_, S1, _} = os:timestamp(),
there should only have elapsed 2s
2 = S1 - S0.
copy(doc) -> ["Test copy directive"];
copy(suite) -> [];
copy(Config) when is_list(Config)->
ok = retest_run("copy", Config).
create(doc) -> ["Test create directive"];
create(suite) -> [];
create(Config) when is_list(Config)->
ok = retest_run("create", Config).
template(doc) -> ["Test template directive"];
template(suite) -> [];
template(Config) when is_list(Config)->
ok = retest_run("template", Config).
replace(doc) -> ["Test replace directive"];
replace(suite) -> [];
replace(Config) when is_list(Config)->
ok = retest_run("replace", Config).
touch(doc) -> ["Test touch directive"];
touch(suite) -> [];
touch(Config) when is_list(Config)->
ok = retest_run("touch", Config).
create_dir(doc) -> ["Test create_dir directive"];
create_dir(suite) -> [];
create_dir(Config) when is_list(Config)->
ok = retest_run("create_dir", Config).
logging(doc) -> ["Test logging"];
logging(suite) -> [];
logging(Config) when is_list(Config)->
ok = retest:log(debug, "debug message"),
ok = retest:log(debug, "debug message: ~p", ["args"]),
ok = retest:log(info, "info message"),
ok = retest:log(info, "info message: ~p", ["args"]),
ok = retest:log(warn, "warn message"),
ok = retest:log(warn, "warn message: ~p", ["args"]),
ok = retest:log(error, "error message"),
ok = retest:log(error, "error message: ~p", ["args"]).
shell_api(doc) -> ["Test Shell API"];
shell_api(suite) -> [];
shell_api(Config) when is_list(Config)->
{ok, [_Pid, "test"]} = retest:sh("echo test").
shell_async_api(doc) -> ["Test Shell async API"];
shell_async_api(suite) -> [];
shell_async_api(Config) when is_list(Config)->
Ref1 = retest:sh("echo test1", [async]),
{ok, [{0,5}]} = retest:sh_expect(Ref1, "test1", []),
_Ref2 = retest:sh("sleep 5", [async]),
timer:sleep(1000),
ok = retest_sh:stop_all().
setup(doc) -> ["Test setup optional callback"];
setup(suite) -> [];
setup(Config) when is_list(Config)->
ok = retest_run("setup", Config).
| null | https://raw.githubusercontent.com/dizzyd/retest/6b33038bd4df4aab1dc15bb59076512ba8fbb113/test/retest_SUITE.erl | erlang | -module(retest_SUITE).
-compile([export_all]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
init_per_testcase(_Func, Config) ->
Config.
end_per_testcase(_Func, Config) ->
Config.
suite() -> [].
all() ->
[basic_run, basic_run_all_args, directory_does_not_exist,
wrong_arguments, test_exceeds_timeout, test_exceeds_custom_timeout,
setup, create, copy, template, replace, touch, create_dir,
logging, shell_api, shell_async_api, shell_async_api].
groups() ->
[].
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
init_per_suite(Config) ->
Config.
end_per_suite(Config) ->
Config.
retest_run(Dir, Config) ->
retest_run(Dir, [], Config).
retest_run(Dir, Args, Config) ->
DataDir = proplists:get_value(data_dir, Config),
retest_core:run(Args ++ [DataDir ++ "/" ++ Dir]).
basic_run(doc) -> ["Does retest run?"];
basic_run(suite) -> [];
basic_run(Config) when is_list(Config) ->
ok = retest_run("basic", Config).
basic_run_all_args(doc) -> ["Does retest run?"];
basic_run_all_args(suite) -> [];
basic_run_all_args(Config) when is_list(Config) ->
ok = retest_run("basic", ["--verbose", "--outdir", "out_dir",
"--loglevel", "debug"], Config).
directory_does_not_exist(doc) -> ["Does retest fail when provided with non existing test files?"];
directory_does_not_exist(suite) -> [];
directory_does_not_exist(Config) when is_list(Config) ->
?assertException(throw, abort, retest_run("non_existent", Config)).
wrong_arguments(doc) -> ["Does retest correctly handle wrong arguments"];
wrong_arguments(suite) -> [];
wrong_arguments(Config) when is_list(Config) ->
?assertException(throw, abort, retest_run("basic", ["--verbos"], Config)).
test_exceeds_timeout(doc) -> ["Does retest correctly tests that exceed the configured timeout"];
test_exceeds_timeout(suite) -> [];
test_exceeds_timeout(Config) when is_list(Config) ->
?assertException(throw, abort, retest_run("long_test", Config)).
test_exceeds_custom_timeout(doc) -> ["Does retest correctly tests that exceed the custom timeout passed on the command line"];
test_exceeds_custom_timeout(suite) -> [];
test_exceeds_custom_timeout(Config) when is_list(Config) ->
{_, S0, _} = os:timestamp(),
?assertException(throw, abort, retest_run("long_test_custom_timeout",
["--timeout", "2000"], Config)),
{_, S1, _} = os:timestamp(),
there should only have elapsed 2s
2 = S1 - S0.
copy(doc) -> ["Test copy directive"];
copy(suite) -> [];
copy(Config) when is_list(Config)->
ok = retest_run("copy", Config).
create(doc) -> ["Test create directive"];
create(suite) -> [];
create(Config) when is_list(Config)->
ok = retest_run("create", Config).
template(doc) -> ["Test template directive"];
template(suite) -> [];
template(Config) when is_list(Config)->
ok = retest_run("template", Config).
replace(doc) -> ["Test replace directive"];
replace(suite) -> [];
replace(Config) when is_list(Config)->
ok = retest_run("replace", Config).
touch(doc) -> ["Test touch directive"];
touch(suite) -> [];
touch(Config) when is_list(Config)->
ok = retest_run("touch", Config).
create_dir(doc) -> ["Test create_dir directive"];
create_dir(suite) -> [];
create_dir(Config) when is_list(Config)->
ok = retest_run("create_dir", Config).
logging(doc) -> ["Test logging"];
logging(suite) -> [];
logging(Config) when is_list(Config)->
ok = retest:log(debug, "debug message"),
ok = retest:log(debug, "debug message: ~p", ["args"]),
ok = retest:log(info, "info message"),
ok = retest:log(info, "info message: ~p", ["args"]),
ok = retest:log(warn, "warn message"),
ok = retest:log(warn, "warn message: ~p", ["args"]),
ok = retest:log(error, "error message"),
ok = retest:log(error, "error message: ~p", ["args"]).
shell_api(doc) -> ["Test Shell API"];
shell_api(suite) -> [];
shell_api(Config) when is_list(Config)->
{ok, [_Pid, "test"]} = retest:sh("echo test").
shell_async_api(doc) -> ["Test Shell async API"];
shell_async_api(suite) -> [];
shell_async_api(Config) when is_list(Config)->
Ref1 = retest:sh("echo test1", [async]),
{ok, [{0,5}]} = retest:sh_expect(Ref1, "test1", []),
_Ref2 = retest:sh("sleep 5", [async]),
timer:sleep(1000),
ok = retest_sh:stop_all().
setup(doc) -> ["Test setup optional callback"];
setup(suite) -> [];
setup(Config) when is_list(Config)->
ok = retest_run("setup", Config).
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.