text
stringlengths 0
3.34M
|
---|
#include <config.h>
#include <math.h>
#include <gsl/gsl_ntuple.h>
#include <gsl/gsl_histogram.h>
struct data
{
double x;
double y;
double z;
};
int sel_func (void *ntuple_data, void *params);
double val_func (void *ntuple_data, void *params);
int
main (void)
{
struct data ntuple_row;
int i;
gsl_ntuple *ntuple = gsl_ntuple_open ("test.dat", &ntuple_row,
sizeof (ntuple_row));
gsl_histogram *h = gsl_histogram_calloc_uniform (100, 0., 10.);
gsl_ntuple_select_fn S;
gsl_ntuple_value_fn V;
double scale = 1.5;
S.function = &sel_func;
S.params = &scale;
V.function = &val_func;
V.params = 0;
gsl_ntuple_project (h, ntuple, &V, &S);
gsl_histogram_fprintf (stdout, h, "%f", "%f");
gsl_histogram_free (h);
gsl_ntuple_close (ntuple);
}
int
sel_func (void *ntuple_data, void *params)
{
double x, y, z, E, scale;
scale = *(double *) params;
x = ((struct data *) ntuple_data)->x;
y = ((struct data *) ntuple_data)->y;
z = ((struct data *) ntuple_data)->z;
E = x * x + y * y + z * z;
return E / scale > 1;
}
double
val_func (void *ntuple_data, void *params)
{
double x, y, z;
x = ((struct data *) ntuple_data)->x;
y = ((struct data *) ntuple_data)->y;
z = ((struct data *) ntuple_data)->z;
return x * x + y * y + z * z;
}
|
-- Andreas, 2014-04-11, issue reported by James Chapman
-- {-# OPTIONS -v tc.decl.ax:100 #-}
-- {-# OPTIONS -v tc.polarity:100 #-}
{-# OPTIONS --copatterns --sized-types #-}
module _ where
open import Common.Size
module Works where
mutual
data Delay i (A : Set) : Set where
now : A → Delay i A
later : ∞Delay i A → Delay i A
record ∞Delay i A : Set where
coinductive
field force : {j : Size< i} → Delay j A
open ∞Delay
mutual
_=<<_ : ∀{i A B} → Delay i A → (A → Delay i B) → Delay i B
now x =<< f = f x
later x =<< f = later (x ∞=<< f)
_∞=<<_ : ∀{i A B} → ∞Delay i A → (A → Delay i B) → ∞Delay i B
force (c ∞=<< f) = force c =<< f
-- Polarity of Issue1099.Delay from positivity: [Contravariant,Covariant]
-- Refining polarity with type Size → Set → Set
-- Polarity of Issue1099.Delay: [Contravariant,Covariant]
module Fails where
mutual
data Delay i A : Set where
now : A → Delay i A
later : ∞Delay i A → Delay i A
record ∞Delay i A : Set where
coinductive
field force : {j : Size< i} → Delay j A
open ∞Delay
mutual
_=<<_ : ∀{i A B} → Delay i A → (A → Delay i B) → Delay i B
now x =<< f = f x
later x =<< f = later (x ∞=<< f)
_∞=<<_ : ∀{i A B} → ∞Delay i A → (A → Delay i B) → ∞Delay i B
force (c ∞=<< f) = force c =<< f
-- Polarity of Issue1099.Delay from positivity: [Contravariant,Covariant]
-- Refining polarity with type (i₁ : Size) → Set → Set
-- WAS: Polarity of Issue1099.Delay: [Invariant,Covariant]
-- NOW: Polarity of Issue1099.Delay: [Contravariant,Covariant]
-- Polarity refinement calls free variable analysis, which is not in the
-- monad. Thus, need to instantiate metas before polarity refinement.
|
section \<open>@{term "\<R>"} is not in BC\label{s:r1_bc}\<close>
theory R1_BC
imports Lemma_R
CP_FIN_NUM (* for V0 *)
begin
text \<open>We show that @{term "U\<^sub>0 \<union> V\<^sub>0"} is not in BC,
which implies @{term "\<R> \<notin> BC"}.
The proof is by contradiction. Assume there is a strategy $S$ learning @{term
"U\<^sub>0 \<union> V\<^sub>0"} behaviorally correct in the limit with respect to our
standard Gödel numbering $\varphi$. Thanks to Lemma~R for BC we can assume
$S$ to be total. Then we construct a function in @{term "U\<^sub>0 \<union> V\<^sub>0"} for
which $S$ fails.
As usual, there is a computable process building prefixes of functions
$\psi_j$. For every $j$ it starts with the singleton prefix $b = [j]$ and
computes the next prefix from a given prefix $b$ as follows:
\begin{enumerate}
\item Simulate $\varphi_{S(b0^k)}(|b| + k)$ for increasing $k$ for an
increasing number of steps.
\item Once a $k$ with $\varphi_{S(b0^k)}(|b| + k) = 0$ is found, extend the
prefix by $0^k1$.
\end{enumerate}
There is always such a $k$ because by assumption $S$ learns $b0^\infty \in
U_0$ and thus outputs a hypothesis for $b0^\infty$ on almost all of its
prefixes. Therefore for almost all prefixes of the form $b0^k$, we have
$\varphi_{S(b0^k)} = b0^\infty$ and hence $\varphi_{S(b0^k)}(|b| + k) = 0$.
But Step~2 constructs $\psi_j$ such that $\psi_j(|b| + k) = 1$. Therefore $S$
does not hypothesize $\psi_j$ on the prefix $b0^k$ of $\psi_j$. And since the
process runs forever, $S$ outputs infinitely many incorrect hypotheses for
$\psi_j$ and thus does not learn $\psi_j$.
Applying Kleene's fixed-point theorem to @{term "\<psi> \<in> \<R>\<^sup>2"}
yields a $j$ with $\varphi_j = \psi_j$ and thus $\psi_j \in V_0$. But $S$
does not learn any $\psi_j$, contradicting our assumption.
The result @{prop "\<R> \<notin> BC"} can be obtained more directly by
running the process with the empty prefix, thereby constructing only one
function instead of a numbering. This function is in @{term R1}, and $S$
fails to learn it by the same reasoning as above. The stronger statement
about @{term "U\<^sub>0 \<union> V\<^sub>0"} will be exploited in
Section~\ref{s:union}.
In the following locale the assumption that $S$ learns @{term "U\<^sub>0"}
suffices for analyzing the process. However, in order to arrive at the
desired contradiction this assumption is too weak because the functions built
by the process are not in @{term "U\<^sub>0"}.\<close>
locale r1_bc =
fixes s :: partial1
assumes s_in_R1: "s \<in> \<R>" and s_learn_U0: "learn_bc \<phi> U\<^sub>0 s"
begin
lemma s_learn_prenum: "\<And>b. learn_bc \<phi> {prenum b} s"
using s_learn_U0 U0_altdef learn_bc_closed_subseteq by blast
text \<open>A @{typ recf} for the strategy:\<close>
definition r_s :: recf where
"r_s \<equiv> SOME rs. recfn 1 rs \<and> total rs \<and> s = (\<lambda>x. eval rs [x])"
lemma r_s_recfn [simp]: "recfn 1 r_s"
and r_s_total: "\<And>x. eval r_s [x] \<down>"
and eval_r_s: "\<And>x. s x = eval r_s [x]"
using r_s_def R1_SOME[OF s_in_R1, of r_s] by simp_all
text \<open>We begin with the function that finds the $k$ from Step~1 of the
construction of $\psi$.\<close>
definition "r_find_k \<equiv>
let k = Cn 2 r_pdec1 [Id 2 0];
r = Cn 2 r_result1
[Cn 2 r_pdec2 [Id 2 0],
Cn 2 r_s [Cn 2 r_append_zeros [Id 2 1, k]],
Cn 2 r_add [Cn 2 r_length [Id 2 1], k]]
in Cn 1 r_pdec1 [Mn 1 (Cn 2 r_eq [r, r_constn 1 1])]"
lemma r_find_k_recfn [simp]: "recfn 1 r_find_k"
unfolding r_find_k_def by (simp add: Let_def)
text \<open>There is always a suitable $k$, since the strategy learns
$b0^\infty$ for all $b$.\<close>
lemma learn_bc_prenum_eventually_zero:
"\<exists>k. \<phi> (the (s (e_append_zeros b k))) (e_length b + k) \<down>= 0"
proof -
let ?f = "prenum b"
have "\<exists>n\<ge>e_length b. \<phi> (the (s (?f \<triangleright> n))) = ?f"
using learn_bcE s_learn_prenum by (meson le_cases singletonI)
then obtain n where n: "n \<ge> e_length b" "\<phi> (the (s (?f \<triangleright> n))) = ?f"
by auto
define k where "k = Suc n - e_length b"
let ?e = "e_append_zeros b k"
have len: "e_length ?e = Suc n"
using k_def n e_append_zeros_length by simp
have "?f \<triangleright> n = ?e"
proof -
have "e_length ?e > 0"
using len n(1) by simp
moreover have "?f x \<down>= e_nth ?e x" for x
proof (cases "x < e_length b")
case True
then show ?thesis using e_nth_append_zeros by simp
next
case False
then have "?f x \<down>= 0" by simp
moreover from False have "e_nth ?e x = 0"
using e_nth_append_zeros_big by simp
ultimately show ?thesis by simp
qed
ultimately show ?thesis using initI[of "?e"] len by simp
qed
with n(2) have "\<phi> (the (s ?e)) = ?f" by simp
then have "\<phi> (the (s ?e)) (e_length ?e) \<down>= 0"
using len n(1) by auto
then show ?thesis using e_append_zeros_length by auto
qed
lemma if_eq_eq: "(if v = 1 then (0 :: nat) else 1) = 0 \<Longrightarrow> v = 1"
by presburger
lemma r_find_k:
shows "eval r_find_k [b] \<down>"
and "let k = the (eval r_find_k [b])
in \<phi> (the (s (e_append_zeros b k))) (e_length b + k) \<down>= 0"
proof -
let ?k = "Cn 2 r_pdec1 [Id 2 0]"
let ?argt = "Cn 2 r_pdec2 [Id 2 0]"
let ?argi = "Cn 2 r_s [Cn 2 r_append_zeros [Id 2 1, ?k]]"
let ?argx = "Cn 2 r_add [Cn 2 r_length [Id 2 1], ?k]"
let ?r = "Cn 2 r_result1 [?argt, ?argi, ?argx]"
define f where "f \<equiv>
let k = Cn 2 r_pdec1 [Id 2 0];
r = Cn 2 r_result1
[Cn 2 r_pdec2 [Id 2 0],
Cn 2 r_s [Cn 2 r_append_zeros [Id 2 1, k]],
Cn 2 r_add [Cn 2 r_length [Id 2 1], k]]
in Cn 2 r_eq [r, r_constn 1 1]"
then have "recfn 2 f" by (simp add: Let_def)
have "total r_s"
by (simp add: r_s_total totalI1)
then have "total f"
unfolding f_def using Cn_total Mn_free_imp_total by (simp add: Let_def)
have "eval ?argi [z, b] = s (e_append_zeros b (pdec1 z))" for z
using r_append_zeros \<open>recfn 2 f\<close> eval_r_s by auto
then have "eval ?argi [z, b] \<down>= the (s (e_append_zeros b (pdec1 z)))" for z
using eval_r_s r_s_total by simp
moreover have "recfn 2 ?r" using \<open>recfn 2 f\<close> by auto
ultimately have r: "eval ?r [z, b] =
eval r_result1 [pdec2 z, the (s (e_append_zeros b (pdec1 z))), e_length b + pdec1 z]"
for z
by simp
then have f: "eval f [z, b] \<down>= (if the (eval ?r [z, b]) = 1 then 0 else 1)" for z
using f_def \<open>recfn 2 f\<close> prim_recfn_total by (auto simp add: Let_def)
have "\<exists>k. \<phi> (the (s (e_append_zeros b k))) (e_length b + k) \<down>= 0"
using s_learn_prenum learn_bc_prenum_eventually_zero by auto
then obtain k where "\<phi> (the (s (e_append_zeros b k))) (e_length b + k) \<down>= 0"
by auto
then obtain t where "eval r_result1 [t, the (s (e_append_zeros b k)), e_length b + k] \<down>= Suc 0"
using r_result1_converg_phi(1) by blast
then have t: "eval r_result1 [t, the (s (e_append_zeros b k)), e_length b + k] \<down>= Suc 0"
by simp
let ?z = "prod_encode (k, t)"
have "eval ?r [?z, b] \<down>= Suc 0"
using t r by (metis fst_conv prod_encode_inverse snd_conv)
with f have fzb: "eval f [?z, b] \<down>= 0" by simp
moreover have "eval (Mn 1 f) [b] =
(if (\<exists>z. eval f ([z, b]) \<down>= 0)
then Some (LEAST z. eval f [z, b] \<down>= 0)
else None)"
using eval_Mn_total[of 1 f "[b]"] \<open>total f\<close> \<open>recfn 2 f\<close> by simp
ultimately have mn1f: "eval (Mn 1 f) [b] \<down>= (LEAST z. eval f [z, b] \<down>= 0)"
by auto
with fzb have "eval f [the (eval (Mn 1 f) [b]), b] \<down>= 0" (is "eval f [?zz, b] \<down>= 0")
using \<open>total f\<close> \<open>recfn 2 f\<close> LeastI_ex[of "%z. eval f [z, b] \<down>= 0"] by auto
moreover have "eval f [?zz, b] \<down>= (if the (eval ?r [?zz, b]) = 1 then 0 else 1)"
using f by simp
ultimately have "(if the (eval ?r [?zz, b]) = 1 then (0 :: nat) else 1) = 0" by auto
then have "the (eval ?r [?zz, b]) = 1"
using if_eq_eq[of "the (eval ?r [?zz, b])"] by simp
then have
"eval r_result1
[pdec2 ?zz, the (s (e_append_zeros b (pdec1 ?zz))), e_length b + pdec1 ?zz] \<down>=
1"
using r r_result1_total r_result1_prim totalE
by (metis length_Cons list.size(3) numeral_3_eq_3 option.collapse)
then have *: "\<phi> (the (s (e_append_zeros b (pdec1 ?zz)))) (e_length b + pdec1 ?zz) \<down>= 0"
by (simp add: r_result1_some_phi)
define Mn1f where "Mn1f = Mn 1 f"
then have "eval Mn1f [b] \<down>= ?zz"
using mn1f by auto
moreover have "recfn 1 (Cn 1 r_pdec1 [Mn1f])"
using \<open>recfn 2 f\<close> Mn1f_def by simp
ultimately have "eval (Cn 1 r_pdec1 [Mn1f]) [b] = eval r_pdec1 [the (eval (Mn1f) [b])]"
by auto
then have "eval (Cn 1 r_pdec1 [Mn1f]) [b] = eval r_pdec1 [?zz]"
using Mn1f_def by blast
then have 1: "eval (Cn 1 r_pdec1 [Mn1f]) [b] \<down>= pdec1 ?zz"
by simp
moreover have "recfn 1 (Cn 1 S [Cn 1 r_pdec1 [Mn1f]])"
using \<open>recfn 2 f\<close> Mn1f_def by simp
ultimately have "eval (Cn 1 S [Cn 1 r_pdec1 [Mn1f]]) [b] =
eval S [the (eval (Cn 1 r_pdec1 [Mn1f]) [b])]"
by simp
then have "eval (Cn 1 S [Cn 1 r_pdec1 [Mn1f]]) [b] = eval S [pdec1 ?zz]"
using 1 by simp
then have "eval (Cn 1 S [Cn 1 r_pdec1 [Mn1f]]) [b] \<down>= Suc (pdec1 ?zz)"
by simp
moreover have "eval r_find_k [b] = eval (Cn 1 r_pdec1 [Mn1f]) [b]"
unfolding r_find_k_def Mn1f_def f_def by metis
ultimately have r_find_ksb: "eval r_find_k [b] \<down>= pdec1 ?zz"
using 1 by simp
then show "eval r_find_k [b] \<down>" by simp_all
from r_find_ksb have "the (eval r_find_k [b]) = pdec1 ?zz"
by simp
moreover have "\<phi> (the (s (e_append_zeros b (pdec1 ?zz)))) (e_length b + pdec1 ?zz) \<down>= 0"
using * by simp
ultimately show "let k = the (eval r_find_k [b])
in \<phi> (the (s (e_append_zeros b k))) (e_length b + k) \<down>= 0"
by simp
qed
lemma r_find_k_total: "total r_find_k"
by (simp add: s_learn_prenum r_find_k(1) totalI1)
text \<open>The following function represents one iteration of the
process.\<close>
abbreviation "r_next \<equiv>
Cn 3 r_snoc [Cn 3 r_append_zeros [Id 3 1, Cn 3 r_find_k [Id 3 1]], r_constn 2 1]"
text \<open>Using @{term r_next} we define the function @{term r_prefixes}
that computes the prefix after every iteration of the process.\<close>
definition r_prefixes :: recf where
"r_prefixes \<equiv> Pr 1 r_singleton_encode r_next"
lemma r_prefixes_total: "total r_prefixes"
proof -
have "recfn 3 r_next" by simp
then have "total r_next"
using \<open>recfn 3 r_next\<close> r_find_k_total Cn_total Mn_free_imp_total by auto
then show ?thesis
by (simp add: Mn_free_imp_total Pr_total r_prefixes_def)
qed
lemma r_prefixes_0: "eval r_prefixes [0, j] \<down>= list_encode [j]"
unfolding r_prefixes_def by simp
lemma r_prefixes_Suc:
"eval r_prefixes [Suc n, j] \<down>=
(let b = the (eval r_prefixes [n, j])
in e_snoc (e_append_zeros b (the (eval r_find_k [b]))) 1)"
proof -
have "recfn 3 r_next" by simp
then have "total r_next"
using \<open>recfn 3 r_next\<close> r_find_k_total Cn_total Mn_free_imp_total by auto
have eval_next: "eval r_next [t, v, j] \<down>=
e_snoc (e_append_zeros v (the (eval r_find_k [v]))) 1"
for t v j
using r_find_k_total \<open>recfn 3 r_next\<close> r_append_zeros by simp
then have "eval r_prefixes [Suc n, j] = eval r_next [n, the (eval r_prefixes [n, j]), j]"
using r_prefixes_total by (simp add: r_prefixes_def)
then show "eval r_prefixes [Suc n, j] \<down>=
(let b = the (eval r_prefixes [n, j])
in e_snoc (e_append_zeros b (the (eval r_find_k [b]))) 1)"
using eval_next by metis
qed
text \<open>Since @{term r_prefixes} is total, we can get away with
introducing a total function.\<close>
definition prefixes :: "nat \<Rightarrow> nat \<Rightarrow> nat" where
"prefixes j t \<equiv> the (eval r_prefixes [t, j])"
lemma prefixes_Suc:
"prefixes j (Suc t) =
e_snoc (e_append_zeros (prefixes j t) (the (eval r_find_k [prefixes j t]))) 1"
unfolding prefixes_def using r_prefixes_Suc by (simp_all add: Let_def)
lemma prefixes_Suc_length:
"e_length (prefixes j (Suc t)) =
Suc (e_length (prefixes j t) + the (eval r_find_k [prefixes j t]))"
using e_append_zeros_length prefixes_Suc by simp
lemma prefixes_length_mono: "e_length (prefixes j t) < e_length (prefixes j (Suc t))"
using prefixes_Suc_length by simp
lemma prefixes_length_mono': "e_length (prefixes j t) \<le> e_length (prefixes j (t + d))"
proof (induction d)
case 0
then show ?case by simp
next
case (Suc d)
then show ?case using prefixes_length_mono le_less_trans by fastforce
qed
lemma prefixes_length_lower_bound: "e_length (prefixes j t) \<ge> Suc t"
proof (induction t)
case 0
then show ?case by (simp add: prefixes_def r_prefixes_0)
next
case (Suc t)
moreover have "Suc (e_length (prefixes j t)) \<le> e_length (prefixes j (Suc t))"
using prefixes_length_mono by (simp add: Suc_leI)
ultimately show ?case by simp
qed
lemma prefixes_Suc_nth:
assumes "x < e_length (prefixes j t)"
shows "e_nth (prefixes j t) x = e_nth (prefixes j (Suc t)) x"
proof -
define k where "k = the (eval r_find_k [prefixes j t])"
let ?u = "e_append_zeros (prefixes j t) k"
have "prefixes j (Suc t) =
e_snoc (e_append_zeros (prefixes j t) (the (eval r_find_k [prefixes j t]))) 1"
using prefixes_Suc by simp
with k_def have "prefixes j (Suc t) = e_snoc ?u 1"
by simp
then have "e_nth (prefixes j (Suc t)) x = e_nth (e_snoc ?u 1) x"
by simp
moreover have "x < e_length ?u"
using assms e_append_zeros_length by auto
ultimately have "e_nth (prefixes j (Suc t)) x = e_nth ?u x"
using e_nth_snoc_small by simp
moreover have "e_nth ?u x = e_nth (prefixes j t) x"
using assms e_nth_append_zeros by simp
ultimately show "e_nth (prefixes j t) x = e_nth (prefixes j (Suc t)) x"
by simp
qed
lemma prefixes_Suc_last: "e_nth (prefixes j (Suc t)) (e_length (prefixes j (Suc t)) - 1) = 1"
using prefixes_Suc by simp
lemma prefixes_le_nth:
assumes "x < e_length (prefixes j t)"
shows "e_nth (prefixes j t) x = e_nth (prefixes j (t + d)) x"
proof (induction d)
case 0
then show ?case by simp
next
case (Suc d)
have "x < e_length (prefixes j (t + d))"
using s_learn_prenum assms prefixes_length_mono'
by (simp add: less_eq_Suc_le order_trans_rules(23))
then have "e_nth (prefixes j (t + d)) x = e_nth (prefixes j (t + Suc d)) x"
using prefixes_Suc_nth by simp
with Suc show ?case by simp
qed
text \<open>The numbering $\psi$ is defined via @{term[names_short] prefixes}.\<close>
definition psi :: partial2 ("\<psi>") where
"\<psi> j x \<equiv> Some (e_nth (prefixes j (Suc x)) x)"
lemma psi_in_R2: "\<psi> \<in> \<R>\<^sup>2"
proof
define r where "r \<equiv> Cn 2 r_nth [Cn 2 r_prefixes [Cn 2 S [Id 2 1], Id 2 0], Id 2 1]"
then have "recfn 2 r"
using r_prefixes_recfn by simp
then have "eval r [j, x] \<down>= e_nth (prefixes j (Suc x)) x" for j x
unfolding r_def prefixes_def using r_prefixes_total r_prefixes_recfn e_nth by simp
then have "eval r [j, x] = \<psi> j x" for j x
unfolding psi_def by simp
then show "\<psi> \<in> \<P>\<^sup>2"
using \<open>recfn 2 r\<close> by auto
show "total2 \<psi>"
unfolding psi_def by auto
qed
lemma psi_eq_nth_prefixes:
assumes "x < e_length (prefixes j t)"
shows "\<psi> j x \<down>= e_nth (prefixes j t) x"
proof (cases "Suc x < t")
case True
have "x \<le> e_length (prefixes j x)"
using prefixes_length_lower_bound by (simp add: Suc_leD)
also have "... < e_length (prefixes j (Suc x))"
using prefixes_length_mono s_learn_prenum by simp
finally have "x < e_length (prefixes j (Suc x))" .
with True have "e_nth (prefixes j (Suc x)) x = e_nth (prefixes j t) x"
using prefixes_le_nth[of x j "Suc x" "t - Suc x"] by simp
then show ?thesis using psi_def by simp
next
case False
then have "e_nth (prefixes j (Suc x)) x = e_nth (prefixes j t) x"
using prefixes_le_nth[of x j t "Suc x - t"] assms by simp
then show ?thesis using psi_def by simp
qed
lemma psi_at_0: "\<psi> j 0 \<down>= j"
using psi_eq_nth_prefixes[of 0 j 0] prefixes_length_lower_bound[of 0 j]
by (simp add: prefixes_def r_prefixes_0)
text \<open>The prefixes output by the process @{term[names_short] "prefixes j"} are
indeed prefixes of $\psi_j$.\<close>
lemma prefixes_init_psi: "\<psi> j \<triangleright> (e_length (prefixes j (Suc t)) - 1) = prefixes j (Suc t)"
proof (rule initI[of "prefixes j (Suc t)"])
let ?e = "prefixes j (Suc t)"
show "e_length ?e > 0"
using prefixes_length_lower_bound[of "Suc t" j] by auto
show "\<And>x. x < e_length ?e \<Longrightarrow> \<psi> j x \<down>= e_nth ?e x"
using prefixes_Suc_nth psi_eq_nth_prefixes by simp
qed
text \<open>Every prefix of $\psi_j$ generated by the process
@{term[names_short] "prefixes j"} (except for the initial one) is of the form
$b0^k1$. But $k$ is chosen such that $\varphi_{S(b0^k)}(|b|+k) = 0 \neq 1 =
b0^k1_{|b|+k}$. Therefore the hypothesis $S(b0^k)$ is incorrect for
$\psi_j$.\<close>
lemma hyp_wrong_at_last:
"\<phi> (the (s (e_butlast (prefixes j (Suc t))))) (e_length (prefixes j (Suc t)) - 1) \<noteq>
\<psi> j (e_length (prefixes j (Suc t)) - 1)"
(is "?lhs \<noteq> ?rhs")
proof -
let ?b = "prefixes j t"
let ?k = "the (eval r_find_k [?b])"
let ?x = "e_length (prefixes j (Suc t)) - 1"
have "e_butlast (prefixes j (Suc t)) = e_append_zeros ?b ?k"
using s_learn_prenum prefixes_Suc by simp
then have "?lhs = \<phi> (the (s (e_append_zeros ?b ?k))) ?x"
by simp
moreover have "?x = e_length ?b + ?k"
using prefixes_Suc_length by simp
ultimately have "?lhs = \<phi> (the (s (e_append_zeros ?b ?k))) (e_length ?b + ?k)"
by simp
then have "?lhs \<down>= 0"
using r_find_k(2) r_s_total s_learn_prenum by metis
moreover have "?x < e_length (prefixes j (Suc t))"
using prefixes_length_lower_bound le_less_trans linorder_not_le s_learn_prenum
by fastforce
ultimately have "?rhs \<down>= e_nth (prefixes j (Suc t)) ?x"
using psi_eq_nth_prefixes[of ?x j "Suc t"] by simp
moreover have "e_nth (prefixes j (Suc t)) ?x = 1"
using prefixes_Suc prefixes_Suc_last by simp
ultimately have "?rhs \<down>= 1" by simp
with \<open>?lhs \<down>= 0\<close> show ?thesis by simp
qed
corollary hyp_wrong: "\<phi> (the (s (e_butlast (prefixes j (Suc t))))) \<noteq> \<psi> j"
using hyp_wrong_at_last[of j t] by auto
text \<open>For all $j$, the strategy $S$ outputs infinitely many wrong hypotheses for
$\psi_j$\<close>
lemma infinite_hyp_wrong: "\<exists>m>n. \<phi> (the (s (\<psi> j \<triangleright> m))) \<noteq> \<psi> j"
proof -
let ?b = "prefixes j (Suc (Suc n))"
let ?bb = "e_butlast ?b"
have len_b: "e_length ?b > Suc (Suc n)"
using prefixes_length_lower_bound by (simp add: Suc_le_lessD)
then have len_bb: "e_length ?bb > Suc n" by simp
define m where "m = e_length ?bb - 1"
with len_bb have "m > n" by simp
have "\<psi> j \<triangleright> m = ?bb"
proof -
have "\<psi> j \<triangleright> (e_length ?b - 1) = ?b"
using prefixes_init_psi by simp
then have "\<psi> j \<triangleright> (e_length ?b - 2) = ?bb"
using init_butlast_init psi_in_R2 R2_proj_R1 R1_imp_total1 len_bb length_init
by (metis Suc_1 diff_diff_left length_butlast length_greater_0_conv
list.size(3) list_decode_encode not_less0 plus_1_eq_Suc)
then show ?thesis by (metis diff_Suc_1 length_init m_def)
qed
moreover have "\<phi> (the (s ?bb)) \<noteq> \<psi> j"
using hyp_wrong by simp
ultimately have "\<phi> (the (s (\<psi> j \<triangleright> m))) \<noteq> \<psi> j"
by simp
with \<open>m > n\<close> show ?thesis by auto
qed
lemma U0_V0_not_learn_bc: "\<not> learn_bc \<phi> (U\<^sub>0 \<union> V\<^sub>0) s"
proof -
obtain j where j: "\<phi> j = \<psi> j"
using R2_imp_P2 kleene_fixed_point psi_in_R2 by blast
moreover have "\<exists>m>n. \<phi> (the (s ((\<psi> j) \<triangleright> m))) \<noteq> \<psi> j" for n
using infinite_hyp_wrong[of _ j] by simp
ultimately have "\<not> learn_bc \<phi> {\<psi> j} s"
using infinite_hyp_wrong_not_BC by simp
moreover have "\<psi> j \<in> V\<^sub>0"
proof -
have "\<psi> j \<in> \<R>" (is "?f \<in> \<R>")
using psi_in_R2 by simp
moreover have "\<phi> (the (?f 0)) = ?f"
using j psi_at_0[of j] by simp
ultimately show ?thesis by (simp add: V0_def)
qed
ultimately show "\<not> learn_bc \<phi> (U\<^sub>0 \<union> V\<^sub>0) s"
using learn_bc_closed_subseteq by auto
qed
end
lemma U0_V0_not_in_BC: "U\<^sub>0 \<union> V\<^sub>0 \<notin> BC"
proof
assume in_BC: "U\<^sub>0 \<union> V\<^sub>0 \<in> BC"
then have "U\<^sub>0 \<union> V\<^sub>0 \<in> BC_wrt \<phi>"
using BC_wrt_phi_eq_BC by simp
then obtain s where "learn_bc \<phi> (U\<^sub>0 \<union> V\<^sub>0) s"
using BC_wrt_def by auto
then obtain s' where s': "s' \<in> \<R>" "learn_bc \<phi> (U\<^sub>0 \<union> V\<^sub>0) s'"
using lemma_R_for_BC_simple by blast
then have learn_U0: "learn_bc \<phi> U\<^sub>0 s'"
using learn_bc_closed_subseteq[of \<phi> "U\<^sub>0 \<union> V\<^sub>0" "s'"] by simp
then interpret r1_bc s'
by (simp add: r1_bc_def s'(1))
have "\<not> learn_bc \<phi> (U\<^sub>0 \<union> V\<^sub>0) s'"
using learn_bc_closed_subseteq U0_V0_not_learn_bc by simp
with s'(2) show False by simp
qed
theorem R1_not_in_BC: "\<R> \<notin> BC"
proof -
have "U\<^sub>0 \<union> V\<^sub>0 \<subseteq> \<R>"
using V0_def U0_in_NUM by auto
then show ?thesis
using U0_V0_not_in_BC BC_closed_subseteq by auto
qed
end |
[GOAL]
α : Type u_1
inst✝ : StrictOrderedSemiring α
n : ℕ
⊢ ↑n < ↑(n + 1)
[PROOFSTEP]
rw [Nat.cast_succ]
[GOAL]
α : Type u_1
inst✝ : StrictOrderedSemiring α
n : ℕ
⊢ ↑n < ↑n + 1
[PROOFSTEP]
apply lt_add_one
|
[STATEMENT]
lemma less_le_diff:"x < n \<Longrightarrow> x \<le> n - Suc 0"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. x < n \<Longrightarrow> x \<le> n - Suc 0
[PROOF STEP]
by arith |
On August 5 , the KPA 's 12th Division pushed back the ROK Capital Division in the Ch <unk> @-@ Kigye area , and linked up with elements of the 766th which had infiltrated the area of <unk> . Unopposed , they began to prepare to attack P 'ohang to secure entry into the UN 's newly established Pusan Perimeter . The Regiment was ordered to begin an attack in coordination with the KPA 's 5th Division . The Korean People 's Army planned simultaneous offensives across the entire Perimeter , including a flanking maneuver by the 766th and the 5th Division to envelop UN troops and push them back to Pusan . The 766th was not reinforced ; North Korean planners intended it to move unseen around the UN lines while the majority of the UN and North Korean troops were locked in fighting around Taegu and the Naktong Bulge .
|
State Before: α : Type u_2
β : Type u_3
γ : Type ?u.11121
δ : Type ?u.11124
ι : Sort u_1
s : Set α
t : Set β
f✝ : Filter α
g✝ : Filter β
inst✝ : Nonempty ι
f : ι → Filter α
g : Filter β
⊢ (⨅ (i : ι), f i) ×ˢ g = ⨅ (i : ι), f i ×ˢ g State After: α : Type u_2
β : Type u_3
γ : Type ?u.11121
δ : Type ?u.11124
ι : Sort u_1
s : Set α
t : Set β
f✝ : Filter α
g✝ : Filter β
inst✝ : Nonempty ι
f : ι → Filter α
g : Filter β
⊢ Filter.prod (⨅ (i : ι), f i) g = ⨅ (i : ι), Filter.prod (f i) g Tactic: dsimp only [SProd.sprod] State Before: α : Type u_2
β : Type u_3
γ : Type ?u.11121
δ : Type ?u.11124
ι : Sort u_1
s : Set α
t : Set β
f✝ : Filter α
g✝ : Filter β
inst✝ : Nonempty ι
f : ι → Filter α
g : Filter β
⊢ Filter.prod (⨅ (i : ι), f i) g = ⨅ (i : ι), Filter.prod (f i) g State After: α : Type u_2
β : Type u_3
γ : Type ?u.11121
δ : Type ?u.11124
ι : Sort u_1
s : Set α
t : Set β
f✝ : Filter α
g✝ : Filter β
inst✝ : Nonempty ι
f : ι → Filter α
g : Filter β
⊢ (⨅ (x : ι), comap Prod.fst (f x) ⊓ comap Prod.snd g) = ⨅ (i : ι), Filter.prod (f i) g Tactic: rw [Filter.prod, comap_iInf, iInf_inf] State Before: α : Type u_2
β : Type u_3
γ : Type ?u.11121
δ : Type ?u.11124
ι : Sort u_1
s : Set α
t : Set β
f✝ : Filter α
g✝ : Filter β
inst✝ : Nonempty ι
f : ι → Filter α
g : Filter β
⊢ (⨅ (x : ι), comap Prod.fst (f x) ⊓ comap Prod.snd g) = ⨅ (i : ι), Filter.prod (f i) g State After: no goals Tactic: simp only [Filter.prod, eq_self_iff_true] |
/-
Copyright (c) 2022 Yuma Mizuno. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Yuma Mizuno, Junyan Xu
! This file was ported from Lean 3 source module category_theory.bicategory.coherence
! leanprover-community/mathlib commit f187f1074fa1857c94589cc653c786cadc4c35ff
! Please do not edit these lines, except to modify the commit id
! if you have ported upstream changes.
-/
import Mathbin.CategoryTheory.PathCategory
import Mathbin.CategoryTheory.Functor.FullyFaithful
import Mathbin.CategoryTheory.Bicategory.Free
import Mathbin.CategoryTheory.Bicategory.LocallyDiscrete
/-!
# The coherence theorem for bicategories
In this file, we prove the coherence theorem for bicategories, stated in the following form: the
free bicategory over any quiver is locally thin.
The proof is almost the same as the proof of the coherence theorem for monoidal categories that
has been previously formalized in mathlib, which is based on the proof described by Ilya Beylin
and Peter Dybjer. The idea is to view a path on a quiver as a normal form of a 1-morphism in the
free bicategory on the same quiver. A normalization procedure is then described by
`normalize : pseudofunctor (free_bicategory B) (locally_discrete (paths B))`, which is a
pseudofunctor from the free bicategory to the locally discrete bicategory on the path category.
It turns out that this pseudofunctor is locally an equivalence of categories, and the coherence
theorem follows immediately from this fact.
## Main statements
* `locally_thin` : the free bicategory is locally thin, that is, there is at most one
2-morphism between two fixed 1-morphisms.
## References
* [Ilya Beylin and Peter Dybjer, Extracting a proof of coherence for monoidal categories from a
proof of normalization for monoids][beylin1996]
-/
open Quiver (Path)
open Quiver.Path
namespace CategoryTheory
open Bicategory Category
open Bicategory
universe v u
namespace FreeBicategory
variable {B : Type u} [Quiver.{v + 1} B]
/- warning: category_theory.free_bicategory.inclusion_path_aux -> CategoryTheory.FreeBicategory.inclusionPathAux is a dubious translation:
lean 3 declaration is
forall {B : Type.{u2}} [_inst_1 : Quiver.{succ u1, u2} B] {a : B} {b : B}, (Quiver.Path.{succ u1, u2} B _inst_1 a b) -> (CategoryTheory.FreeBicategory.Hom.{u1, u2} B _inst_1 a b)
but is expected to have type
forall {B : Type.{u1}} [_inst_1 : Quiver.{succ u2, u1} B] {a : B} {b : B}, (Quiver.Path.{succ u2, u1} B _inst_1 a b) -> (CategoryTheory.FreeBicategory.Hom.{u2, u1} B _inst_1 a b)
Case conversion may be inaccurate. Consider using '#align category_theory.free_bicategory.inclusion_path_aux CategoryTheory.FreeBicategory.inclusionPathAuxₓ'. -/
/-- Auxiliary definition for `inclusion_path`. -/
@[simp]
def inclusionPathAux {a : B} : ∀ {b : B}, Path a b → Hom a b
| _, nil => Hom.id a
| _, cons p f => (inclusion_path_aux p).comp (Hom.of f)
#align category_theory.free_bicategory.inclusion_path_aux CategoryTheory.FreeBicategory.inclusionPathAux
/-- The discrete category on the paths includes into the category of 1-morphisms in the free
bicategory.
-/
def inclusionPath (a b : B) : Discrete (Path.{v + 1} a b) ⥤ Hom a b :=
Discrete.functor inclusionPathAux
#align category_theory.free_bicategory.inclusion_path CategoryTheory.FreeBicategory.inclusionPath
/-- The inclusion from the locally discrete bicategory on the path category into the free bicategory
as a prelax functor. This will be promoted to a pseudofunctor after proving the coherence theorem.
See `inclusion`.
-/
def preinclusion (B : Type u) [Quiver.{v + 1} B] :
PrelaxFunctor (LocallyDiscrete (Paths B)) (FreeBicategory B)
where
obj := id
map a b := (inclusionPath a b).obj
zipWith a b f g η := (inclusionPath a b).map η
#align category_theory.free_bicategory.preinclusion CategoryTheory.FreeBicategory.preinclusion
@[simp]
theorem preinclusion_obj (a : B) : (preinclusion B).obj a = a :=
rfl
#align category_theory.free_bicategory.preinclusion_obj CategoryTheory.FreeBicategory.preinclusion_obj
@[simp]
theorem preinclusion_map₂ {a b : B} (f g : Discrete (Path.{v + 1} a b)) (η : f ⟶ g) :
(preinclusion B).zipWith η = eqToHom (congr_arg _ (Discrete.ext _ _ (Discrete.eq_of_hom η))) :=
by
rcases η with ⟨⟨⟩⟩
cases discrete.ext _ _ η
exact (inclusion_path a b).map_id _
#align category_theory.free_bicategory.preinclusion_map₂ CategoryTheory.FreeBicategory.preinclusion_map₂
/- warning: category_theory.free_bicategory.normalize_aux -> CategoryTheory.FreeBicategory.normalizeAux is a dubious translation:
lean 3 declaration is
forall {B : Type.{u2}} [_inst_1 : Quiver.{succ u1, u2} B] {a : B} {b : B} {c : B}, (Quiver.Path.{succ u1, u2} B _inst_1 a b) -> (CategoryTheory.FreeBicategory.Hom.{u1, u2} B _inst_1 b c) -> (Quiver.Path.{succ u1, u2} B _inst_1 a c)
but is expected to have type
forall {B : Type.{u1}} [_inst_1 : Quiver.{succ u2, u1} B] {a : B} {b : B} {c : B}, (Quiver.Path.{succ u2, u1} B _inst_1 a b) -> (CategoryTheory.FreeBicategory.Hom.{u2, u1} B _inst_1 b c) -> (Quiver.Path.{succ u2, u1} B _inst_1 a c)
Case conversion may be inaccurate. Consider using '#align category_theory.free_bicategory.normalize_aux CategoryTheory.FreeBicategory.normalizeAuxₓ'. -/
/-- The normalization of the composition of `p : path a b` and `f : hom b c`.
`p` will eventually be taken to be `nil` and we then get the normalization
of `f` alone, but the auxiliary `p` is necessary for Lean to accept the definition of
`normalize_iso` and the `whisker_left` case of `normalize_aux_congr` and `normalize_naturality`.
-/
@[simp]
def normalizeAux {a : B} : ∀ {b c : B}, Path a b → Hom b c → Path a c
| _, _, p, hom.of f => p.cons f
| _, _, p, hom.id b => p
| _, _, p, hom.comp f g => normalize_aux (normalize_aux p f) g
#align category_theory.free_bicategory.normalize_aux CategoryTheory.FreeBicategory.normalizeAux
/- warning: category_theory.free_bicategory.normalize_iso -> CategoryTheory.FreeBicategory.normalizeIso is a dubious translation:
lean 3 declaration is
forall {B : Type.{u2}} [_inst_1 : Quiver.{succ u1, u2} B] {a : B} {b : B} {c : B} (p : Quiver.Path.{succ u1, u2} B _inst_1 a b) (f : CategoryTheory.FreeBicategory.Hom.{u1, u2} B _inst_1 b c), CategoryTheory.Iso.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a) c) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} B _inst_1 (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a) c) (CategoryTheory.CategoryStruct.comp.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1)) (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a) (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) b) c (CategoryTheory.PrelaxFunctor.map.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a b (CategoryTheory.Discrete.mk.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b) p)) f) (CategoryTheory.PrelaxFunctor.map.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a c (CategoryTheory.Discrete.mk.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a c) (CategoryTheory.FreeBicategory.normalizeAux.{u1, u2} B _inst_1 a b c p f)))
but is expected to have type
PUnit.{max (succ (succ u1)) (succ (succ u2))}
Case conversion may be inaccurate. Consider using '#align category_theory.free_bicategory.normalize_iso CategoryTheory.FreeBicategory.normalizeIsoₓ'. -/
/-
We may define
```
def normalize_aux' : ∀ {a b : B}, hom a b → path a b
| _ _ (hom.of f) := f.to_path
| _ _ (hom.id b) := nil
| _ _ (hom.comp f g) := (normalize_aux' f).comp (normalize_aux' g)
```
and define `normalize_aux p f` to be `p.comp (normalize_aux' f)` and this will be
equal to the above definition, but the equality proof requires `comp_assoc`, and it
thus lacks the correct definitional property to make the definition of `normalize_iso`
typecheck.
```
example {a b c : B} (p : path a b) (f : hom b c) :
normalize_aux p f = p.comp (normalize_aux' f) :=
by { induction f, refl, refl,
case comp : _ _ _ _ _ ihf ihg { rw [normalize_aux, ihf, ihg], apply comp_assoc } }
```
-/
/-- A 2-isomorphism between a partially-normalized 1-morphism in the free bicategory to the
fully-normalized 1-morphism.
-/
@[simp]
def normalizeIso {a : B} :
∀ {b c : B} (p : Path a b) (f : Hom b c),
(preinclusion B).map ⟨p⟩ ≫ f ≅ (preinclusion B).map ⟨normalizeAux p f⟩
| _, _, p, hom.of f => Iso.refl _
| _, _, p, hom.id b => ρ_ _
| _, _, p, hom.comp f g =>
(α_ _ _ _).symm ≪≫ whiskerRightIso (normalize_iso p f) g ≪≫ normalize_iso (normalizeAux p f) g
#align category_theory.free_bicategory.normalize_iso CategoryTheory.FreeBicategory.normalizeIso
/-- Given a 2-morphism between `f` and `g` in the free bicategory, we have the equality
`normalize_aux p f = normalize_aux p g`.
-/
theorem normalizeAux_congr {a b c : B} (p : Path a b) {f g : Hom b c} (η : f ⟶ g) :
normalizeAux p f = normalizeAux p g :=
by
rcases η with ⟨⟩
apply @congr_fun _ _ fun p => normalize_aux p f
clear p
induction η
case vcomp => apply Eq.trans <;> assumption
-- p ≠ nil required! See the docstring of `normalize_aux`.
case whisker_left _ _ _ _ _ _ _ ih => funext; apply congr_fun ih
case whisker_right _ _ _ _ _ _ _ ih => funext; apply congr_arg₂ _ (congr_fun ih p) rfl
all_goals funext; rfl
#align category_theory.free_bicategory.normalize_aux_congr CategoryTheory.FreeBicategory.normalizeAux_congr
/-- The 2-isomorphism `normalize_iso p f` is natural in `f`. -/
theorem normalize_naturality {a b c : B} (p : Path a b) {f g : Hom b c} (η : f ⟶ g) :
(preinclusion B).map ⟨p⟩ ◁ η ≫ (normalizeIso p g).Hom =
(normalizeIso p f).Hom ≫
(preinclusion B).zipWith (eqToHom (Discrete.ext _ _ (normalizeAux_congr p η))) :=
by
rcases η with ⟨⟩; induction η
case id => simp
case
vcomp _ _ _ _ _ _ _ ihf ihg =>
rw [mk_vcomp, bicategory.whisker_left_comp]
slice_lhs 2 3 => rw [ihg]
slice_lhs 1 2 => rw [ihf]
simp
case
whisker_left _ _ _ _ _ _ _ ih =>-- p ≠ nil required! See the docstring of `normalize_aux`.
dsimp;
simp_rw [associator_inv_naturality_right_assoc, whisker_exchange_assoc, ih, assoc]
case whisker_right _ _ _ _ _ h η ih =>
dsimp
rw [associator_inv_naturality_middle_assoc, ← comp_whisker_right_assoc, ih, comp_whisker_right]
have := dcongr_arg (fun x => (normalize_iso x h).Hom) (normalize_aux_congr p (Quot.mk _ η))
dsimp at this; simp [this]
all_goals dsimp; dsimp [id_def, comp_def]; simp
#align category_theory.free_bicategory.normalize_naturality CategoryTheory.FreeBicategory.normalize_naturality
@[simp]
theorem normalizeAux_nil_comp {a b c : B} (f : Hom a b) (g : Hom b c) :
normalizeAux nil (f.comp g) = (normalizeAux nil f).comp (normalizeAux nil g) :=
by
induction g generalizing a
case id => rfl
case of => rfl
case comp _ _ _ g _ ihf ihg => erw [ihg (f.comp g), ihf f, ihg g, comp_assoc]
#align category_theory.free_bicategory.normalize_aux_nil_comp CategoryTheory.FreeBicategory.normalizeAux_nil_comp
/-- The normalization pseudofunctor for the free bicategory on a quiver `B`. -/
def normalize (B : Type u) [Quiver.{v + 1} B] :
Pseudofunctor (FreeBicategory B) (LocallyDiscrete (Paths B))
where
obj := id
map a b f := ⟨normalizeAux nil f⟩
zipWith a b f g η := eqToHom <| Discrete.ext _ _ <| normalizeAux_congr nil η
map_id a := eqToIso <| Discrete.ext _ _ rfl
map_comp a b c f g := eqToIso <| Discrete.ext _ _ <| normalizeAux_nil_comp f g
#align category_theory.free_bicategory.normalize CategoryTheory.FreeBicategory.normalize
/-- Auxiliary definition for `normalize_equiv`. -/
def normalizeUnitIso (a b : FreeBicategory B) :
𝟭 (a ⟶ b) ≅ (normalize B).mapFunctor a b ⋙ inclusionPath a b :=
NatIso.ofComponents (fun f => (λ_ f).symm ≪≫ normalizeIso nil f)
(by
intro f g η
erw [left_unitor_inv_naturality_assoc, assoc]
congr 1
exact normalize_naturality nil η)
#align category_theory.free_bicategory.normalize_unit_iso CategoryTheory.FreeBicategory.normalizeUnitIso
/-- Normalization as an equivalence of categories. -/
def normalizeEquiv (a b : B) : Hom a b ≌ Discrete (Path.{v + 1} a b) :=
Equivalence.mk ((normalize _).mapFunctor a b) (inclusionPath a b) (normalizeUnitIso a b)
(Discrete.natIso fun f => eqToIso (by induction f <;> induction f <;> tidy))
#align category_theory.free_bicategory.normalize_equiv CategoryTheory.FreeBicategory.normalizeEquiv
/-- The coherence theorem for bicategories. -/
instance locally_thin {a b : FreeBicategory B} : Quiver.IsThin (a ⟶ b) := fun _ _ =>
⟨fun η θ => (normalizeEquiv a b).Functor.map_injective (Subsingleton.elim _ _)⟩
#align category_theory.free_bicategory.locally_thin CategoryTheory.FreeBicategory.locally_thin
/- warning: category_theory.free_bicategory.inclusion_map_comp_aux -> CategoryTheory.FreeBicategory.inclusionMapCompAux is a dubious translation:
lean 3 declaration is
forall {B : Type.{u2}} [_inst_1 : Quiver.{succ u1, u2} B] {a : B} {b : B} {c : B} (f : Quiver.Path.{succ u1, u2} B _inst_1 a b) (g : Quiver.Path.{succ u1, u2} B _inst_1 b c), CategoryTheory.Iso.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a) (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) c)) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} B _inst_1 (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a) (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) c)) (CategoryTheory.PrelaxFunctor.map.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a c (CategoryTheory.CategoryStruct.comp.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b c (CategoryTheory.Discrete.mk.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b) f) (CategoryTheory.Discrete.mk.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) b c) g))) (CategoryTheory.CategoryStruct.comp.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1)) (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a) (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) b) (CategoryTheory.PrelaxFunctor.obj.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) c) (CategoryTheory.PrelaxFunctor.map.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) a b (CategoryTheory.Discrete.mk.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b) f)) (CategoryTheory.PrelaxFunctor.map.{max u2 u1, max u2 u1, max u2 u1, max u2 u1, u2, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (b : CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.LocallyDiscrete.{u2} (CategoryTheory.Paths.{u2} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u2} B) (b : CategoryTheory.FreeBicategory.{u2} B) => CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u2 u1, max u2 u1, u2} (CategoryTheory.FreeBicategory.{u2} B) (CategoryTheory.FreeBicategory.bicategory.{u1, u2} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u1, u2} (CategoryTheory.FreeBicategory.{u2} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u1, u2} B _inst_1) b c (CategoryTheory.Discrete.mk.{max u2 u1} (Quiver.Hom.{succ (max u2 u1), u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.CategoryStruct.toQuiver.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Category.toCategoryStruct.{max u2 u1, u2} (CategoryTheory.Paths.{u2} B) (CategoryTheory.Paths.categoryPaths.{u1, u2} B _inst_1))) b c) g)))
but is expected to have type
forall {B : Type.{u1}} [_inst_1 : Quiver.{succ u2, u1} B] {a : B} {b : B} {c : B} (f : Quiver.Path.{succ u2, u1} B _inst_1 a b) (g : Quiver.Path.{succ u2, u1} B _inst_1 b c), CategoryTheory.Iso.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (CategoryTheory.PrelaxFunctor.obj.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) a) (CategoryTheory.PrelaxFunctor.obj.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) c)) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} B _inst_1 (CategoryTheory.PrelaxFunctor.obj.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) a) (CategoryTheory.PrelaxFunctor.obj.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) c)) (CategoryTheory.PrelaxFunctor.map.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) a c (CategoryTheory.CategoryStruct.comp.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b c (CategoryTheory.Discrete.mk.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b) f) (CategoryTheory.Discrete.mk.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) b c) g))) (CategoryTheory.CategoryStruct.comp.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1)) (CategoryTheory.PrelaxFunctor.obj.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) a) (CategoryTheory.PrelaxFunctor.obj.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) b) (CategoryTheory.PrelaxFunctor.obj.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) c) (CategoryTheory.PrelaxFunctor.map.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) a b (CategoryTheory.Discrete.mk.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b) f)) (CategoryTheory.PrelaxFunctor.map.{max u1 u2, max u1 u2, max u1 u2, max u1 u2, u1, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) (fun (a : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (b : CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.LocallyDiscrete.{u1} (CategoryTheory.Paths.{u1} B)) (CategoryTheory.LocallyDiscrete.CategoryTheory.categoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1)))) a b) (CategoryTheory.discreteCategory.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) a b)))) (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) (fun (a : CategoryTheory.FreeBicategory.{u1} B) (b : CategoryTheory.FreeBicategory.{u1} B) => CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.Bicategory.toCategoryStruct.{max u1 u2, max u1 u2, u1} (CategoryTheory.FreeBicategory.{u1} B) (CategoryTheory.FreeBicategory.bicategory.{u2, u1} B _inst_1))) a b) (CategoryTheory.FreeBicategory.homCategory.{u2, u1} (CategoryTheory.FreeBicategory.{u1} B) _inst_1 a b))) (CategoryTheory.FreeBicategory.preinclusion.{u2, u1} B _inst_1) b c (CategoryTheory.Discrete.mk.{max u1 u2} (Quiver.Hom.{succ (max u1 u2), u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Category.toCategoryStruct.{max u1 u2, u1} (CategoryTheory.Paths.{u1} B) (CategoryTheory.Paths.categoryPaths.{u2, u1} B _inst_1))) b c) g)))
Case conversion may be inaccurate. Consider using '#align category_theory.free_bicategory.inclusion_map_comp_aux CategoryTheory.FreeBicategory.inclusionMapCompAuxₓ'. -/
/-- Auxiliary definition for `inclusion`. -/
def inclusionMapCompAux {a b : B} :
∀ {c : B} (f : Path a b) (g : Path b c),
(preinclusion _).map (⟨f⟩ ≫ ⟨g⟩) ≅ (preinclusion _).map ⟨f⟩ ≫ (preinclusion _).map ⟨g⟩
| _, f, nil => (ρ_ ((preinclusion _).map ⟨f⟩)).symm
| _, f, cons g₁ g₂ => whiskerRightIso (inclusion_map_comp_aux f g₁) (Hom.of g₂) ≪≫ α_ _ _ _
#align category_theory.free_bicategory.inclusion_map_comp_aux CategoryTheory.FreeBicategory.inclusionMapCompAux
/-- The inclusion pseudofunctor from the locally discrete bicategory on the path category into the
free bicategory.
-/
def inclusion (B : Type u) [Quiver.{v + 1} B] :
Pseudofunctor (LocallyDiscrete (Paths B)) (FreeBicategory B) :=
{-- All the conditions for 2-morphisms are trivial thanks to the coherence theorem!
preinclusion
B with
map_id := fun a => Iso.refl (𝟙 a)
map_comp := fun a b c f g => inclusionMapCompAux f.as g.as }
#align category_theory.free_bicategory.inclusion CategoryTheory.FreeBicategory.inclusion
end FreeBicategory
end CategoryTheory
|
(* Title: HOL/Auth/n_germanSimp_lemma_inv__14_on_rules.thy
Author: Yongjian Li and Kaiqiang Duan, State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
Copyright 2016 State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
*)
header{*The n_germanSimp Protocol Case Study*}
theory n_germanSimp_lemma_inv__14_on_rules imports n_germanSimp_lemma_on_inv__14
begin
section{*All lemmas on causal relation between inv__14*}
lemma lemma_inv__14_on_rules:
assumes b1: "r \<in> rules N" and b2: "(\<exists> p__Inv3 p__Inv4. p__Inv3\<le>N\<and>p__Inv4\<le>N\<and>p__Inv3~=p__Inv4\<and>f=inv__14 p__Inv3 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
proof -
have c1: "(\<exists> i d. i\<le>N\<and>d\<le>N\<and>r=n_Store i d)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvReqS N i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvReqE__part__0 N i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvReqE__part__1 N i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendInv__part__0 i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendInv__part__1 i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendInvAck i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvInvAck i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendGntS i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendGntE N i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvGntS i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvGntE i)"
apply (cut_tac b1, auto) done
moreover {
assume d1: "(\<exists> i d. i\<le>N\<and>d\<le>N\<and>r=n_Store i d)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_StoreVsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvReqS N i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvReqSVsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvReqE__part__0 N i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvReqE__part__0Vsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvReqE__part__1 N i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvReqE__part__1Vsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendInv__part__0 i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendInv__part__0Vsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendInv__part__1 i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendInv__part__1Vsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendInvAck i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendInvAckVsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvInvAck i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvInvAckVsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendGntS i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendGntSVsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendGntE N i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendGntEVsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvGntS i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvGntSVsinv__14) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvGntE i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvGntEVsinv__14) done
}
ultimately show "invHoldForRule s f r (invariants N)"
by satx
qed
end
|
{-# OPTIONS --cubical --no-import-sorts --safe #-}
module Cubical.Algebra.Monoid.Base where
open import Cubical.Foundations.Prelude
open import Cubical.Foundations.Equiv
open import Cubical.Foundations.Equiv.HalfAdjoint
open import Cubical.Foundations.Function
open import Cubical.Foundations.HLevels
open import Cubical.Foundations.Isomorphism
open import Cubical.Foundations.Univalence
open import Cubical.Foundations.Transport
open import Cubical.Foundations.SIP
open import Cubical.Data.Sigma
open import Cubical.Structures.Axioms
open import Cubical.Structures.Auto
open import Cubical.Structures.Record
open import Cubical.Algebra.Semigroup
open import Cubical.Reflection.StrictEquiv
open Iso
private
variable
ℓ : Level
record IsMonoid {A : Type ℓ} (ε : A) (_·_ : A → A → A) : Type ℓ where
constructor ismonoid
field
isSemigroup : IsSemigroup _·_
identity : (x : A) → (x · ε ≡ x) × (ε · x ≡ x)
open IsSemigroup isSemigroup public
lid : (x : A) → ε · x ≡ x
lid x = identity x .snd
rid : (x : A) → x · ε ≡ x
rid x = identity x .fst
record MonoidStr (A : Type ℓ) : Type (ℓ-suc ℓ) where
constructor monoidstr
field
ε : A
_·_ : A → A → A
isMonoid : IsMonoid ε _·_
infixl 7 _·_
open IsMonoid isMonoid public
-- semigrp : Semigroup
-- semigrp = record { isSemigroup = isSemigroup }
-- open Semigroup semigrp public
Monoid : Type (ℓ-suc ℓ)
Monoid = TypeWithStr _ MonoidStr
monoid : (A : Type ℓ) (ε : A) (_·_ : A → A → A) (h : IsMonoid ε _·_) → Monoid
monoid A ε _·_ h = A , monoidstr ε _·_ h
-- Easier to use constructors
makeIsMonoid : {M : Type ℓ} {ε : M} {_·_ : M → M → M}
(is-setM : isSet M)
(assoc : (x y z : M) → x · (y · z) ≡ (x · y) · z)
(rid : (x : M) → x · ε ≡ x)
(lid : (x : M) → ε · x ≡ x)
→ IsMonoid ε _·_
IsMonoid.isSemigroup (makeIsMonoid is-setM assoc rid lid) = issemigroup is-setM assoc
IsMonoid.identity (makeIsMonoid is-setM assoc rid lid) = λ x → rid x , lid x
makeMonoid : {M : Type ℓ} (ε : M) (_·_ : M → M → M)
(is-setM : isSet M)
(assoc : (x y z : M) → x · (y · z) ≡ (x · y) · z)
(rid : (x : M) → x · ε ≡ x)
(lid : (x : M) → ε · x ≡ x)
→ Monoid
makeMonoid ε _·_ is-setM assoc rid lid =
monoid _ ε _·_ (makeIsMonoid is-setM assoc rid lid)
record MonoidEquiv (M N : Monoid {ℓ}) (e : ⟨ M ⟩ ≃ ⟨ N ⟩) : Type ℓ where
constructor monoidiso
private
module M = MonoidStr (snd M)
module N = MonoidStr (snd N)
field
presε : equivFun e M.ε ≡ N.ε
isHom : (x y : ⟨ M ⟩) → equivFun e (x M.· y) ≡ equivFun e x N.· equivFun e y
module MonoidΣTheory {ℓ} where
RawMonoidStructure : Type ℓ → Type ℓ
RawMonoidStructure X = X × (X → X → X)
RawMonoidEquivStr = AutoEquivStr RawMonoidStructure
rawMonoidUnivalentStr : UnivalentStr _ RawMonoidEquivStr
rawMonoidUnivalentStr = autoUnivalentStr RawMonoidStructure
MonoidAxioms : (M : Type ℓ) → RawMonoidStructure M → Type ℓ
MonoidAxioms M (e , _·_) = IsSemigroup _·_
× ((x : M) → (x · e ≡ x) × (e · x ≡ x))
MonoidStructure : Type ℓ → Type ℓ
MonoidStructure = AxiomsStructure RawMonoidStructure MonoidAxioms
MonoidΣ : Type (ℓ-suc ℓ)
MonoidΣ = TypeWithStr ℓ MonoidStructure
isPropMonoidAxioms : (M : Type ℓ) (s : RawMonoidStructure M) → isProp (MonoidAxioms M s)
isPropMonoidAxioms M (e , _·_) =
isPropΣ (isPropIsSemigroup _·_)
λ α → isPropΠ λ _ → isProp× (IsSemigroup.is-set α _ _) (IsSemigroup.is-set α _ _)
MonoidEquivStr : StrEquiv MonoidStructure ℓ
MonoidEquivStr = AxiomsEquivStr RawMonoidEquivStr MonoidAxioms
MonoidAxiomsIsoIsMonoid : {M : Type ℓ} (s : RawMonoidStructure M)
→ Iso (MonoidAxioms M s) (IsMonoid (s .fst) (s .snd))
fun (MonoidAxiomsIsoIsMonoid s) (x , y) = ismonoid x y
inv (MonoidAxiomsIsoIsMonoid s) a = (IsMonoid.isSemigroup a) , IsMonoid.identity a
rightInv (MonoidAxiomsIsoIsMonoid s) _ = refl
leftInv (MonoidAxiomsIsoIsMonoid s) _ = refl
MonoidAxioms≡IsMonoid : {M : Type ℓ} (s : RawMonoidStructure M)
→ MonoidAxioms M s ≡ IsMonoid (s .fst) (s .snd)
MonoidAxioms≡IsMonoid s = ua (strictIsoToEquiv (MonoidAxiomsIsoIsMonoid s))
open MonoidStr
Monoid→MonoidΣ : Monoid → MonoidΣ
Monoid→MonoidΣ (A , M) =
A , (ε M , _·_ M) , MonoidAxiomsIsoIsMonoid (ε M , _·_ M) .inv (isMonoid M)
MonoidΣ→Monoid : MonoidΣ → Monoid
MonoidΣ→Monoid (M , (ε , _·_) , isMonoidΣ) =
monoid M ε _·_ (MonoidAxiomsIsoIsMonoid (ε , _·_) .fun isMonoidΣ)
MonoidIsoMonoidΣ : Iso Monoid MonoidΣ
MonoidIsoMonoidΣ =
iso Monoid→MonoidΣ MonoidΣ→Monoid (λ _ → refl) (λ _ → refl)
monoidUnivalentStr : UnivalentStr MonoidStructure MonoidEquivStr
monoidUnivalentStr = axiomsUnivalentStr _ isPropMonoidAxioms rawMonoidUnivalentStr
MonoidΣPath : (M N : MonoidΣ) → (M ≃[ MonoidEquivStr ] N) ≃ (M ≡ N)
MonoidΣPath = SIP monoidUnivalentStr
MonoidEquivΣ : (M N : Monoid) → Type ℓ
MonoidEquivΣ M N = Monoid→MonoidΣ M ≃[ MonoidEquivStr ] Monoid→MonoidΣ N
MonoidIsoΣPath : {M N : Monoid} → Iso (Σ[ e ∈ ⟨ M ⟩ ≃ ⟨ N ⟩ ] (MonoidEquiv M N e)) (MonoidEquivΣ M N)
fun MonoidIsoΣPath (e , monoidiso h1 h2) = (e , h1 , h2)
inv MonoidIsoΣPath (e , h1 , h2) = (e , monoidiso h1 h2)
rightInv MonoidIsoΣPath _ = refl
leftInv MonoidIsoΣPath _ = refl
MonoidPath : (M N : Monoid {ℓ}) → (Σ[ e ∈ ⟨ M ⟩ ≃ ⟨ N ⟩ ] (MonoidEquiv M N e)) ≃ (M ≡ N)
MonoidPath M N =
Σ[ e ∈ ⟨ M ⟩ ≃ ⟨ N ⟩ ] MonoidEquiv M N e ≃⟨ isoToEquiv MonoidIsoΣPath ⟩
MonoidEquivΣ M N ≃⟨ MonoidΣPath _ _ ⟩
Monoid→MonoidΣ M ≡ Monoid→MonoidΣ N ≃⟨ isoToEquiv (invIso (congIso MonoidIsoMonoidΣ)) ⟩
M ≡ N ■
RawMonoidΣ : Type (ℓ-suc ℓ)
RawMonoidΣ = TypeWithStr ℓ RawMonoidStructure
Monoid→RawMonoidΣ : Monoid → RawMonoidΣ
Monoid→RawMonoidΣ (A , M) = A , (ε M) , (_·_ M)
InducedMonoid : (M : Monoid) (N : RawMonoidΣ) (e : M .fst ≃ N .fst)
→ RawMonoidEquivStr (Monoid→RawMonoidΣ M) N e → Monoid
InducedMonoid M N e r =
MonoidΣ→Monoid (inducedStructure rawMonoidUnivalentStr (Monoid→MonoidΣ M) N (e , r))
InducedMonoidPath : (M : Monoid {ℓ}) (N : RawMonoidΣ) (e : M .fst ≃ N .fst)
(E : RawMonoidEquivStr (Monoid→RawMonoidΣ M) N e)
→ M ≡ InducedMonoid M N e E
InducedMonoidPath M N e E =
MonoidPath M (InducedMonoid M N e E) .fst (e , monoidiso (E .fst) (E .snd))
-- We now extract the important results from the above module
isPropIsMonoid : {M : Type ℓ} (ε : M) (_·_ : M → M → M) → isProp (IsMonoid ε _·_)
isPropIsMonoid ε _·_ =
subst isProp (MonoidΣTheory.MonoidAxioms≡IsMonoid (ε , _·_))
(MonoidΣTheory.isPropMonoidAxioms _ (ε , _·_))
MonoidPath : (M N : Monoid {ℓ}) → (Σ[ e ∈ ⟨ M ⟩ ≃ ⟨ N ⟩ ] MonoidEquiv M N e) ≃ (M ≡ N)
MonoidPath {ℓ = ℓ} =
SIP
(autoUnivalentRecord
(autoRecordSpec (MonoidStr {ℓ}) MonoidEquiv
(fields:
data[ ε ∣ presε ]
data[ _·_ ∣ isHom ]
prop[ isMonoid ∣ (λ _ → isPropIsMonoid _ _) ]))
_ _)
where
open MonoidStr
open MonoidEquiv
InducedMonoid : (M : Monoid {ℓ}) (N : MonoidΣTheory.RawMonoidΣ) (e : M .fst ≃ N .fst)
→ MonoidΣTheory.RawMonoidEquivStr (MonoidΣTheory.Monoid→RawMonoidΣ M) N e
→ Monoid
InducedMonoid = MonoidΣTheory.InducedMonoid
InducedMonoidPath : (M : Monoid {ℓ}) (N : MonoidΣTheory.RawMonoidΣ) (e : M .fst ≃ N .fst)
(E : MonoidΣTheory.RawMonoidEquivStr (MonoidΣTheory.Monoid→RawMonoidΣ M) N e)
→ M ≡ InducedMonoid M N e E
InducedMonoidPath = MonoidΣTheory.InducedMonoidPath
module MonoidTheory {ℓ} (M : Monoid {ℓ}) where
open MonoidStr (snd M)
-- Added for its use in groups
-- If there exists a inverse of an element it is unique
inv-lemma : (x y z : ⟨ M ⟩) → y · x ≡ ε → x · z ≡ ε → y ≡ z
inv-lemma x y z left-inverse right-inverse =
y ≡⟨ sym (rid y) ⟩
y · ε ≡⟨ cong (λ - → y · -) (sym right-inverse) ⟩
y · (x · z) ≡⟨ assoc y x z ⟩
(y · x) · z ≡⟨ cong (λ - → - · z) left-inverse ⟩
ε · z ≡⟨ lid z ⟩
z ∎
|
# A more complex example for modeling drug-protein interactions with asymmetric RAF kinase dimerization and two RAF inhibitors
Here, we propose a more complex example of modeling RAF kinases and RAF inhibitors by reproducing a model of asymmetric RAF kinase dimerization and combination of two RAF inhibitors. This model was originally developed in *Kholodenko BN (2015) Drug Resistance Resulting from Kinase Dimerization Is Rationalized by Thermodynamic Factors Describing Allosteric Inhibitor Effects. Cell Rep 12: 1939–1949*. In the paper, the model is shown in *Figure 3* and the corresponding rate derivations are in *Reaction List S3*. While in the original work the cooperative thermodynamic reaction rates were derived manually, here we use energy PySB (via energy BNG) to recreate the same reaction network structure.
First, we define the model:
```python
from pysb import Model, Monomer, Parameter, Expression, Rule, Observable, Initial, Annotation, EnergyPattern, ANY
from pysb.bng import generate_equations
from pysb.export import export
from pysb.core import as_complex_pattern, ComplexPattern
from sympy import exp, log
Model();
model.name='complex_example_asym_RAF_two_RAFi';
#define RAF kinase with a binding site for RAF (r) and a drug (i) that can be into two configurations when dimerized (R1,R2)
Monomer('R', ['r', 'i', 'state'], {'state':['none', 'R1', 'R2']});
#define first RAF inhibitor with binding site for RAF
Monomer('I1',['r']);
#define second RAF inhibitor with binding site for RAF
Monomer('I2',['r']);
#define the initial conditions for R and I1,I2
Parameter('R_0',0.01); # uM
Parameter('I1_0',0.0); # uM
Parameter('I2_0',0.0); # uM
Initial(R(r=None, i=None, state='none'), R_0);
Initial(I1(r=None), I1_0);
Initial(I2(r=None), I2_0);
#define kinetic parameters and rules for independent binding
#RAF dimerization
#define dissociation constant (kD), forward rate (kf) and distributionr rate (phi) for RAF dimerization
Parameter('kr_1',10); #/s
Parameter('kf_1',1.0); #/s/uM
Expression('Kd_1', kr_1 / kf_1);
Parameter('phi_1',1.0); #unitless
#convert kinetic parameters into energies for RAF dimerization
Expression('Gf_1', log(kr_1/kf_1)); #unitless
Expression('Ea0_1',-phi_1*log(kr_1/kf_1)-log(kf_1)); #unitless
# define energy in bond between R and R
EnergyPattern('ep_1',R(r=1)%R(r=1),Gf_1);
#define RAF dimerization reaction with asymmetry of configurations R1 and R2
Rule('R1', R(r=None, state='none')+R(r=None, state='none') | R(r=1, state='R1')%R(r=1, state='R2') , phi_1, Ea0_1, energy=True);
#I1 binding to RAF
#define dissociation constant (kD), forward rate (kf) and distributionr rate (phi) for drug binding to RAF
Parameter('kr_2a',0.1); #/s
Parameter('kf_2a',1.0); #/s/uM
Expression('Kd_2a', kr_2a / kf_2a);
Parameter('phi_2a',1.0); #unitless
#convert kinetic parameters into energies for drug binding to RAF
Expression('Gf_2a', log(kr_2a/kf_2a)); #unitless
Expression('Ea0_2a',-phi_2a*log(kr_2a/kf_2a)-log(kf_2a)); #unitless
# define energy in bond between R and I
EnergyPattern('ep_2a',R(i=1)%I1(r=1),Gf_2a);
#define drug binding to RAF reaction
Rule('R2a', R(i=None)+I1(r=None) | R(i=1)%I1(r=1) , phi_2a, Ea0_2a, energy=True);
#I2 binding to RAF
#define dissociation constant (kD), forward rate (kf) and distributionr rate (phi) for drug binding to RAF
Parameter('kr_2b',0.1); #/s
Parameter('kf_2b',1.0); #/s/uM
Expression('Kd_2b', kr_2b / kf_2b);
Parameter('phi_2b',1.0); #unitless
#convert kinetic parameters into energies for drug binding to RAF
Expression('Gf_2b', log(kr_2b/kf_2b)); #unitless
Expression('Ea0_2b',-phi_2b*log(kr_2b/kf_2b)-log(kf_2b)); #unitless
# define energy in bond between R and I
EnergyPattern('ep_3',R(i=1)%I2(r=1),Gf_2b);
#define drug binding to RAF reaction
Rule('R2b', R(i=None)+I2(r=None) | R(i=1)%I2(r=1) , phi_2b, Ea0_2b, energy=True);
```
Next, we define the thermodynamic parameters and impose energy patterns to model cooperativity in drug-protein interactions.
The thermodynamic parameters quantify the following:
* *f_a*: The Kd ratio of I1 binding to the free monomer R versus to the promoter R1 in the R1-R2 dimers (yielding R1I1-R2)
* *f_b*: The Kd ratio of I2 binding to the free monomer R versus to the promoter R1 in the R1-R2 dimers (yielding R1I2-R2)
* *g_1a*: The Kd ratio of I1 binding to the protomer R1 versus the promoter R2 in the R1-R2 dimers (yielding R1-R2I1)
* *g_1b*: The Kd ratio of I2 binding to the protomer R1 versus to the promoter R2 in the R1-R2 dimers (yielding R1-R2I2)
* *g_2a*: The Kd ratio of I1 binding to the free monomer R versus to the protomer R1 in the R1-R2I1 dimers (yielding R1I1R2I1)
* *g_2b*: The Kd ratio of I2 binding to the free monomer R versus to the protomer R1 in the R1-R2I2 dimers (yielding R1I2R2I1)
* *g_3a*: The Kd ratio of I1 binding to a free monomer R versus to the promoter R1 in the R1-R2I2 dimer (yielding R1I1-R2I2)
* *g_3b*: The Kd ratio of I2 binding to a free monomer R versus to the promoter R1 in the R1-R2I1 dimer (yielding R1I1-R2I1)
```python
#define thermodynamic factors
Parameter('f_a',1.0); #unitless
Parameter('f_b',1.0); #unitless
Parameter('g_1a',1.0); #unitless
Parameter('g_1b',1.0); #unitless
Parameter('g_2a',1.0); #unitless
Parameter('g_2b',1.0); #unitless
Parameter('g_3a',1.0); #unitless
Parameter('g_3b',1.0); #unitless
#convert thermodynamic factors in energies
Expression('Gf_fa',log(f_a)); #unitless
Expression('Gf_fb',log(f_b)); #unitless
Expression('Gf_g1a',log(g_1a)); #unitless
Expression('Gf_g1b',log(g_1b)); #unitless
Expression('Gf_g2a',log(g_2a)); #unitless
Expression('Gf_g2b',log(g_2b)); #unitless
Expression('Gf_g3a',log(g_3a)); #unitless
Expression('Gf_g3b',log(g_3b)); #unitless
#define energy patterns for fa
EnergyPattern('ep_fa_R2R1I1',R(r=1,state='R2')%R(r=1,i=2,state='R1')%I1(r=2), Gf_fa);
#define energy patterns for fb
EnergyPattern('ep_fb_R2R1I2',R(r=1,state='R2')%R(r=1,i=2,state='R1')%I2(r=2), Gf_fb);
#define energy patterns for g1a
Expression('Gf_g1a_fa', Gf_g1a + Gf_fa);
EnergyPattern('ep_g1a_R1R2I1',R(r=1,state='R1')%R(r=1,i=2,state='R2')%I1(r=2), Gf_g1a_fa);
#define energy patterns for g1b
Expression('Gf_g1b_fb', Gf_g1b + Gf_fb);
EnergyPattern('ep_g1b_R1R2I2',R(r=1,state='R1')%R(r=1,i=2,state='R2')%I2(r=2), Gf_g1b_fb);
#define energy pattern for g2a
Expression('Gf_g2a_fa', Gf_g2a - Gf_fa);
EnergyPattern('ep_g2a_I1R1R2I1',I1(r=3)%R(r=1,i=3,state='R1')%R(r=1,i=2,state='R2')%I1(r=2), Gf_g2a_fa);
#define energy pattern for g2b
Expression('Gf_g2b_fb', Gf_g2b - Gf_fb);
EnergyPattern('ep_g2b_I2R1R2I2',I2(r=3)%R(r=1,i=3,state='R1')%R(r=1,i=2,state='R2')%I2(r=2), Gf_g2b_fb);
#define energy pattern for g3a
Expression('Gf_g3a_fa', Gf_g3a - Gf_fa);
EnergyPattern('ep_g3a_I1R1R2I2',I2(r=3)%R(r=1,i=3,state='R2')%R(r=1,i=2,state='R1')%I1(r=2), Gf_g3a_fa);
#define energy pattern for g3b
Expression('Gf_g3b_fb', Gf_g3b - Gf_fb);
EnergyPattern('ep_g3b_I2R1R2I1',I1(r=3)%R(r=1,i=3,state='R2')%R(r=1,i=2,state='R1')%I2(r=2), Gf_g3b_fb);
#define the observable for total kinase activity (proportional to the weighted sum of the concentrations of
#inhibitor free monomers and dimers and partially inhibited dimers: we assume lamba 10-15, p1=p2=0.5, see Section S2.5
#of the original paper)
Parameter('kcat',1.0);
Parameter('plambda',10.0);
Parameter('p1', 0.5);
Parameter('p2', 0.5);
Observable('R_obs', R(i=None, r=None));
Observable('RR_obs', R(i=None, r=1)%R(i=None, r=1));
Observable('R1R2I_obs', R(i=ANY, r=1, state='R1')%R(i=None, r=1, state='R2'));
Observable('R2R1I_obs', R(i=ANY, r=1, state='R2')%R(i=None, r=1, state='R1'));
Observable('IRRI_obs', R(i=ANY, r=1,)%R(i=ANY, r=1));
Expression('R_active', kcat * (R_obs + plambda * ( RR_obs + p1 * (R1R2I_obs) + p2 * (R2R1I_obs) ) ));
```
We generate the kinetic model:
```python
from util_display import display_model_info
# generate the model equations
generate_equations(model)
#display model informations
display_model_info(model)
#save the generated model in PySB and BNG format
generated_model_code = export(model, 'pysb_flat')
with open(model.name+'.py', 'wt') as f:
f.write(generated_model_code);
generated_model_code = export(model, 'bngl')
with open(model.name+'.bngl', 'wt') as f:
f.write(generated_model_code);
```
Model information
Species: 14
Parameters: 27
Expressions: 24
Observables: 5
Total Rules: 3
Energy Rules: 3
Non-energy Rules: 0
Energy Patterns: 11
Reactions: 46
We inspect the species and reaction rates automatically generated by the energy-based model formulation:
```python
from util_display import format_species_reactions, display_table
import pandas as pd
# Prevent pandas from truncating long LaTeX expressions when rendering.
pd.options.display.max_colwidth=None
#obtain dataframe with math latex expression visualization for species and reactions
(speciesdisp, reactionsdisp)=format_species_reactions(model);
#display species with the right style
display_table(speciesdisp, caption='SPECIES');
#display reactions in the same order as Reaction List S3 in original publication
reaction_order=[0,1,2,11,13, 12, 14, 5, 8, 3, 4, 17, 22, 15, 20, 6, 10, 7, 21, 16, 9, 18, 19];
display_table(reactionsdisp.iloc[reaction_order], caption='REACTIONS');
```
<style type="text/css" >
#T_50483_ th {
text-align: center;
} #T_50483_ .MathJax_Display {
text-align: center !important;
}#T_50483_row0_col0,#T_50483_row0_col1,#T_50483_row1_col0,#T_50483_row1_col1,#T_50483_row2_col0,#T_50483_row2_col1,#T_50483_row3_col0,#T_50483_row3_col1,#T_50483_row4_col0,#T_50483_row4_col1,#T_50483_row5_col0,#T_50483_row5_col1,#T_50483_row6_col0,#T_50483_row6_col1,#T_50483_row7_col0,#T_50483_row7_col1,#T_50483_row8_col0,#T_50483_row8_col1,#T_50483_row9_col0,#T_50483_row9_col1,#T_50483_row10_col0,#T_50483_row10_col1,#T_50483_row11_col0,#T_50483_row11_col1,#T_50483_row12_col0,#T_50483_row12_col1,#T_50483_row13_col0,#T_50483_row13_col1{
text-align: center;
}</style><table id="T_50483_" ><caption>SPECIES</caption><thead> <tr> <th class="col_heading level0 col0" >ID</th> <th class="col_heading level0 col1" >Pattern</th> </tr></thead><tbody>
<tr>
<td id="T_50483_row0_col0" class="data row0 col0" >\[s_{0}\]</td>
<td id="T_50483_row0_col1" class="data row0 col1" >R(r=None, i=None, state='none')</td>
</tr>
<tr>
<td id="T_50483_row1_col0" class="data row1 col0" >\[s_{1}\]</td>
<td id="T_50483_row1_col1" class="data row1 col1" >I1(r=None)</td>
</tr>
<tr>
<td id="T_50483_row2_col0" class="data row2 col0" >\[s_{2}\]</td>
<td id="T_50483_row2_col1" class="data row2 col1" >I2(r=None)</td>
</tr>
<tr>
<td id="T_50483_row3_col0" class="data row3 col0" >\[s_{3}\]</td>
<td id="T_50483_row3_col1" class="data row3 col1" >R(r=1, i=None, state='R1') % R(r=1, i=None, state='R2')</td>
</tr>
<tr>
<td id="T_50483_row4_col0" class="data row4 col0" >\[s_{4}\]</td>
<td id="T_50483_row4_col1" class="data row4 col1" >I1(r=1) % R(r=None, i=1, state='none')</td>
</tr>
<tr>
<td id="T_50483_row5_col0" class="data row5 col0" >\[s_{5}\]</td>
<td id="T_50483_row5_col1" class="data row5 col1" >I2(r=1) % R(r=None, i=1, state='none')</td>
</tr>
<tr>
<td id="T_50483_row6_col0" class="data row6 col0" >\[s_{6}\]</td>
<td id="T_50483_row6_col1" class="data row6 col1" >I1(r=1) % R(r=2, i=1, state='R2') % R(r=2, i=None, state='R1')</td>
</tr>
<tr>
<td id="T_50483_row7_col0" class="data row7 col0" >\[s_{7}\]</td>
<td id="T_50483_row7_col1" class="data row7 col1" >I2(r=1) % R(r=2, i=1, state='R2') % R(r=2, i=None, state='R1')</td>
</tr>
<tr>
<td id="T_50483_row8_col0" class="data row8 col0" >\[s_{8}\]</td>
<td id="T_50483_row8_col1" class="data row8 col1" >I1(r=1) % R(r=2, i=1, state='R1') % R(r=2, i=None, state='R2')</td>
</tr>
<tr>
<td id="T_50483_row9_col0" class="data row9 col0" >\[s_{9}\]</td>
<td id="T_50483_row9_col1" class="data row9 col1" >I1(r=1) % I1(r=2) % R(r=3, i=1, state='R1') % R(r=3, i=2, state='R2')</td>
</tr>
<tr>
<td id="T_50483_row10_col0" class="data row10 col0" >\[s_{10}\]</td>
<td id="T_50483_row10_col1" class="data row10 col1" >I1(r=1) % I2(r=2) % R(r=3, i=1, state='R1') % R(r=3, i=2, state='R2')</td>
</tr>
<tr>
<td id="T_50483_row11_col0" class="data row11 col0" >\[s_{11}\]</td>
<td id="T_50483_row11_col1" class="data row11 col1" >I2(r=1) % R(r=2, i=1, state='R1') % R(r=2, i=None, state='R2')</td>
</tr>
<tr>
<td id="T_50483_row12_col0" class="data row12 col0" >\[s_{12}\]</td>
<td id="T_50483_row12_col1" class="data row12 col1" >I1(r=1) % I2(r=2) % R(r=3, i=2, state='R1') % R(r=3, i=1, state='R2')</td>
</tr>
<tr>
<td id="T_50483_row13_col0" class="data row13 col0" >\[s_{13}\]</td>
<td id="T_50483_row13_col1" class="data row13 col1" >I2(r=1) % I2(r=2) % R(r=3, i=1, state='R1') % R(r=3, i=2, state='R2')</td>
</tr>
</tbody></table>
<style type="text/css" >
#T_69a63_ th {
text-align: center;
} #T_69a63_ .MathJax_Display {
text-align: center !important;
}#T_69a63_row0_col0,#T_69a63_row0_col1,#T_69a63_row0_col2,#T_69a63_row0_col3,#T_69a63_row0_col4,#T_69a63_row0_col5,#T_69a63_row1_col0,#T_69a63_row1_col1,#T_69a63_row1_col2,#T_69a63_row1_col3,#T_69a63_row1_col4,#T_69a63_row1_col5,#T_69a63_row2_col0,#T_69a63_row2_col1,#T_69a63_row2_col2,#T_69a63_row2_col3,#T_69a63_row2_col4,#T_69a63_row2_col5,#T_69a63_row3_col0,#T_69a63_row3_col1,#T_69a63_row3_col2,#T_69a63_row3_col3,#T_69a63_row3_col4,#T_69a63_row3_col5,#T_69a63_row4_col0,#T_69a63_row4_col1,#T_69a63_row4_col2,#T_69a63_row4_col3,#T_69a63_row4_col4,#T_69a63_row4_col5,#T_69a63_row5_col0,#T_69a63_row5_col1,#T_69a63_row5_col2,#T_69a63_row5_col3,#T_69a63_row5_col4,#T_69a63_row5_col5,#T_69a63_row6_col0,#T_69a63_row6_col1,#T_69a63_row6_col2,#T_69a63_row6_col3,#T_69a63_row6_col4,#T_69a63_row6_col5,#T_69a63_row7_col0,#T_69a63_row7_col1,#T_69a63_row7_col2,#T_69a63_row7_col3,#T_69a63_row7_col4,#T_69a63_row7_col5,#T_69a63_row8_col0,#T_69a63_row8_col1,#T_69a63_row8_col2,#T_69a63_row8_col3,#T_69a63_row8_col4,#T_69a63_row8_col5,#T_69a63_row9_col0,#T_69a63_row9_col1,#T_69a63_row9_col2,#T_69a63_row9_col3,#T_69a63_row9_col4,#T_69a63_row9_col5,#T_69a63_row10_col0,#T_69a63_row10_col1,#T_69a63_row10_col2,#T_69a63_row10_col3,#T_69a63_row10_col4,#T_69a63_row10_col5,#T_69a63_row11_col0,#T_69a63_row11_col1,#T_69a63_row11_col2,#T_69a63_row11_col3,#T_69a63_row11_col4,#T_69a63_row11_col5,#T_69a63_row12_col0,#T_69a63_row12_col1,#T_69a63_row12_col2,#T_69a63_row12_col3,#T_69a63_row12_col4,#T_69a63_row12_col5,#T_69a63_row13_col0,#T_69a63_row13_col1,#T_69a63_row13_col2,#T_69a63_row13_col3,#T_69a63_row13_col4,#T_69a63_row13_col5,#T_69a63_row14_col0,#T_69a63_row14_col1,#T_69a63_row14_col2,#T_69a63_row14_col3,#T_69a63_row14_col4,#T_69a63_row14_col5,#T_69a63_row15_col0,#T_69a63_row15_col1,#T_69a63_row15_col2,#T_69a63_row15_col3,#T_69a63_row15_col4,#T_69a63_row15_col5,#T_69a63_row16_col0,#T_69a63_row16_col1,#T_69a63_row16_col2,#T_69a63_row16_col3,#T_69a63_row16_col4,#T_69a63_row16_col5,#T_69a63_row17_col0,#T_69a63_row17_col1,#T_69a63_row17_col2,#T_69a63_row17_col3,#T_69a63_row17_col4,#T_69a63_row17_col5,#T_69a63_row18_col0,#T_69a63_row18_col1,#T_69a63_row18_col2,#T_69a63_row18_col3,#T_69a63_row18_col4,#T_69a63_row18_col5,#T_69a63_row19_col0,#T_69a63_row19_col1,#T_69a63_row19_col2,#T_69a63_row19_col3,#T_69a63_row19_col4,#T_69a63_row19_col5,#T_69a63_row20_col0,#T_69a63_row20_col1,#T_69a63_row20_col2,#T_69a63_row20_col3,#T_69a63_row20_col4,#T_69a63_row20_col5,#T_69a63_row21_col0,#T_69a63_row21_col1,#T_69a63_row21_col2,#T_69a63_row21_col3,#T_69a63_row21_col4,#T_69a63_row21_col5,#T_69a63_row22_col0,#T_69a63_row22_col1,#T_69a63_row22_col2,#T_69a63_row22_col3,#T_69a63_row22_col4,#T_69a63_row22_col5{
text-align: center;
}</style><table id="T_69a63_" ><caption>REACTIONS</caption><thead> <tr> <th class="col_heading level0 col0" >ID</th> <th class="col_heading level0 col1" >Base rule</th> <th class="col_heading level0 col2" >Reaction</th> <th class="col_heading level0 col3" >Forward rate ($k$ )</th> <th class="col_heading level0 col4" >Backward rate ($kr$ )</th> <th class="col_heading level0 col5" >Dissociation constant ($K_d$ )</th> </tr></thead><tbody>
<tr>
<td id="T_69a63_row0_col0" class="data row0 col0" >\[R_{0}\]</td>
<td id="T_69a63_row0_col1" class="data row0 col1" >R1</td>
<td id="T_69a63_row0_col2" class="data row0 col2" >$s_{0}$ + $s_{0}$ ⇄ $s_{3}$</td>
<td id="T_69a63_row0_col3" class="data row0 col3" >\[k_{1}\]</td>
<td id="T_69a63_row0_col4" class="data row0 col4" >\[kr_{1}\]</td>
<td id="T_69a63_row0_col5" class="data row0 col5" >\[\frac{kr_{1}}{k_{1}}\]</td>
</tr>
<tr>
<td id="T_69a63_row1_col0" class="data row1 col0" >\[R_{1}\]</td>
<td id="T_69a63_row1_col1" class="data row1 col1" >R2a</td>
<td id="T_69a63_row1_col2" class="data row1 col2" >$s_{0}$ + $s_{1}$ ⇄ $s_{4}$</td>
<td id="T_69a63_row1_col3" class="data row1 col3" >\[k_{2a}\]</td>
<td id="T_69a63_row1_col4" class="data row1 col4" >\[kr_{2a}\]</td>
<td id="T_69a63_row1_col5" class="data row1 col5" >\[\frac{kr_{2a}}{k_{2a}}\]</td>
</tr>
<tr>
<td id="T_69a63_row2_col0" class="data row2 col0" >\[R_{2}\]</td>
<td id="T_69a63_row2_col1" class="data row2 col1" >R2b</td>
<td id="T_69a63_row2_col2" class="data row2 col2" >$s_{0}$ + $s_{2}$ ⇄ $s_{5}$</td>
<td id="T_69a63_row2_col3" class="data row2 col3" >\[k_{2b}\]</td>
<td id="T_69a63_row2_col4" class="data row2 col4" >\[kr_{2b}\]</td>
<td id="T_69a63_row2_col5" class="data row2 col5" >\[\frac{kr_{2b}}{k_{2b}}\]</td>
</tr>
<tr>
<td id="T_69a63_row3_col0" class="data row3 col0" >\[R_{11}\]</td>
<td id="T_69a63_row3_col1" class="data row3 col1" >R2a</td>
<td id="T_69a63_row3_col2" class="data row3 col2" >$s_{1}$ + $s_{3}$ ⇄ $s_{8}$</td>
<td id="T_69a63_row3_col3" class="data row3 col3" >\[f_{a}^{- \phi_{2a}} \cdot k_{2a}\]</td>
<td id="T_69a63_row3_col4" class="data row3 col4" >\[f_{a}^{1 - \phi_{2a}} \cdot kr_{2a}\]</td>
<td id="T_69a63_row3_col5" class="data row3 col5" >\[\frac{f_{a} \cdot kr_{2a}}{k_{2a}}\]</td>
</tr>
<tr>
<td id="T_69a63_row4_col0" class="data row4 col0" >\[R_{13}\]</td>
<td id="T_69a63_row4_col1" class="data row4 col1" >R2b</td>
<td id="T_69a63_row4_col2" class="data row4 col2" >$s_{2}$ + $s_{3}$ ⇄ $s_{11}$</td>
<td id="T_69a63_row4_col3" class="data row4 col3" >\[f_{b}^{- \phi_{2b}} \cdot k_{2b}\]</td>
<td id="T_69a63_row4_col4" class="data row4 col4" >\[f_{b}^{1 - \phi_{2b}} \cdot kr_{2b}\]</td>
<td id="T_69a63_row4_col5" class="data row4 col5" >\[\frac{f_{b} \cdot kr_{2b}}{k_{2b}}\]</td>
</tr>
<tr>
<td id="T_69a63_row5_col0" class="data row5 col0" >\[R_{12}\]</td>
<td id="T_69a63_row5_col1" class="data row5 col1" >R2a</td>
<td id="T_69a63_row5_col2" class="data row5 col2" >$s_{1}$ + $s_{3}$ ⇄ $s_{6}$</td>
<td id="T_69a63_row5_col3" class="data row5 col3" >\[k_{2a} \cdot \left(\frac{1}{f_{a} \cdot g_{1a}}\right)^{\phi_{2a}}\]</td>
<td id="T_69a63_row5_col4" class="data row5 col4" >\[kr_{2a} \cdot \left(f_{a} \cdot g_{1a}\right)^{1 - \phi_{2a}}\]</td>
<td id="T_69a63_row5_col5" class="data row5 col5" >\[\frac{f_{a} \cdot g_{1a} \cdot kr_{2a}}{k_{2a}}\]</td>
</tr>
<tr>
<td id="T_69a63_row6_col0" class="data row6 col0" >\[R_{14}\]</td>
<td id="T_69a63_row6_col1" class="data row6 col1" >R2b</td>
<td id="T_69a63_row6_col2" class="data row6 col2" >$s_{2}$ + $s_{3}$ ⇄ $s_{7}$</td>
<td id="T_69a63_row6_col3" class="data row6 col3" >\[k_{2b} \cdot \left(\frac{1}{f_{b} \cdot g_{1b}}\right)^{\phi_{2b}}\]</td>
<td id="T_69a63_row6_col4" class="data row6 col4" >\[kr_{2b} \cdot \left(f_{b} \cdot g_{1b}\right)^{1 - \phi_{2b}}\]</td>
<td id="T_69a63_row6_col5" class="data row6 col5" >\[\frac{f_{b} \cdot g_{1b} \cdot kr_{2b}}{k_{2b}}\]</td>
</tr>
<tr>
<td id="T_69a63_row7_col0" class="data row7 col0" >\[R_{5}\]</td>
<td id="T_69a63_row7_col1" class="data row7 col1" >R1</td>
<td id="T_69a63_row7_col2" class="data row7 col2" >$s_{0}$ + $s_{4}$ ⇄ $s_{8}$</td>
<td id="T_69a63_row7_col3" class="data row7 col3" >\[f_{a}^{- \phi_{1}} \cdot k_{1}\]</td>
<td id="T_69a63_row7_col4" class="data row7 col4" >\[f_{a}^{1 - \phi_{1}} \cdot kr_{1}\]</td>
<td id="T_69a63_row7_col5" class="data row7 col5" >\[\frac{f_{a} \cdot kr_{1}}{k_{1}}\]</td>
</tr>
<tr>
<td id="T_69a63_row8_col0" class="data row8 col0" >\[R_{8}\]</td>
<td id="T_69a63_row8_col1" class="data row8 col1" >R1</td>
<td id="T_69a63_row8_col2" class="data row8 col2" >$s_{0}$ + $s_{5}$ ⇄ $s_{11}$</td>
<td id="T_69a63_row8_col3" class="data row8 col3" >\[f_{b}^{- \phi_{1}} \cdot k_{1}\]</td>
<td id="T_69a63_row8_col4" class="data row8 col4" >\[f_{b}^{1 - \phi_{1}} \cdot kr_{1}\]</td>
<td id="T_69a63_row8_col5" class="data row8 col5" >\[\frac{f_{b} \cdot kr_{1}}{k_{1}}\]</td>
</tr>
<tr>
<td id="T_69a63_row9_col0" class="data row9 col0" >\[R_{3}\]</td>
<td id="T_69a63_row9_col1" class="data row9 col1" >R1</td>
<td id="T_69a63_row9_col2" class="data row9 col2" >$s_{0}$ + $s_{4}$ ⇄ $s_{6}$</td>
<td id="T_69a63_row9_col3" class="data row9 col3" >\[k_{1} \cdot \left(\frac{1}{f_{a} \cdot g_{1a}}\right)^{\phi_{1}}\]</td>
<td id="T_69a63_row9_col4" class="data row9 col4" >\[kr_{1} \cdot \left(f_{a} \cdot g_{1a}\right)^{1 - \phi_{1}}\]</td>
<td id="T_69a63_row9_col5" class="data row9 col5" >\[\frac{f_{a} \cdot g_{1a} \cdot kr_{1}}{k_{1}}\]</td>
</tr>
<tr>
<td id="T_69a63_row10_col0" class="data row10 col0" >\[R_{4}\]</td>
<td id="T_69a63_row10_col1" class="data row10 col1" >R1</td>
<td id="T_69a63_row10_col2" class="data row10 col2" >$s_{0}$ + $s_{5}$ ⇄ $s_{7}$</td>
<td id="T_69a63_row10_col3" class="data row10 col3" >\[k_{1} \cdot \left(\frac{1}{f_{b} \cdot g_{1b}}\right)^{\phi_{1}}\]</td>
<td id="T_69a63_row10_col4" class="data row10 col4" >\[kr_{1} \cdot \left(f_{b} \cdot g_{1b}\right)^{1 - \phi_{1}}\]</td>
<td id="T_69a63_row10_col5" class="data row10 col5" >\[\frac{f_{b} \cdot g_{1b} \cdot kr_{1}}{k_{1}}\]</td>
</tr>
<tr>
<td id="T_69a63_row11_col0" class="data row11 col0" >\[R_{17}\]</td>
<td id="T_69a63_row11_col1" class="data row11 col1" >R2a</td>
<td id="T_69a63_row11_col2" class="data row11 col2" >$s_{1}$ + $s_{8}$ ⇄ $s_{9}$</td>
<td id="T_69a63_row11_col3" class="data row11 col3" >\[k_{2a} \cdot \left(\frac{1}{g_{1a} \cdot g_{2a}}\right)^{\phi_{2a}}\]</td>
<td id="T_69a63_row11_col4" class="data row11 col4" >\[kr_{2a} \cdot \left(g_{1a} \cdot g_{2a}\right)^{1 - \phi_{2a}}\]</td>
<td id="T_69a63_row11_col5" class="data row11 col5" >\[\frac{g_{1a} \cdot g_{2a} \cdot kr_{2a}}{k_{2a}}\]</td>
</tr>
<tr>
<td id="T_69a63_row12_col0" class="data row12 col0" >\[R_{22}\]</td>
<td id="T_69a63_row12_col1" class="data row12 col1" >R2b</td>
<td id="T_69a63_row12_col2" class="data row12 col2" >$s_{2}$ + $s_{11}$ ⇄ $s_{13}$</td>
<td id="T_69a63_row12_col3" class="data row12 col3" >\[k_{2b} \cdot \left(\frac{1}{g_{1b} \cdot g_{2b}}\right)^{\phi_{2b}}\]</td>
<td id="T_69a63_row12_col4" class="data row12 col4" >\[kr_{2b} \cdot \left(g_{1b} \cdot g_{2b}\right)^{1 - \phi_{2b}}\]</td>
<td id="T_69a63_row12_col5" class="data row12 col5" >\[\frac{g_{1b} \cdot g_{2b} \cdot kr_{2b}}{k_{2b}}\]</td>
</tr>
<tr>
<td id="T_69a63_row13_col0" class="data row13 col0" >\[R_{15}\]</td>
<td id="T_69a63_row13_col1" class="data row13 col1" >R2a</td>
<td id="T_69a63_row13_col2" class="data row13 col2" >$s_{1}$ + $s_{6}$ ⇄ $s_{9}$</td>
<td id="T_69a63_row13_col3" class="data row13 col3" >\[g_{2a}^{- \phi_{2a}} \cdot k_{2a}\]</td>
<td id="T_69a63_row13_col4" class="data row13 col4" >\[g_{2a}^{1 - \phi_{2a}} \cdot kr_{2a}\]</td>
<td id="T_69a63_row13_col5" class="data row13 col5" >\[\frac{g_{2a} \cdot kr_{2a}}{k_{2a}}\]</td>
</tr>
<tr>
<td id="T_69a63_row14_col0" class="data row14 col0" >\[R_{20}\]</td>
<td id="T_69a63_row14_col1" class="data row14 col1" >R2b</td>
<td id="T_69a63_row14_col2" class="data row14 col2" >$s_{2}$ + $s_{7}$ ⇄ $s_{13}$</td>
<td id="T_69a63_row14_col3" class="data row14 col3" >\[g_{2b}^{- \phi_{2b}} \cdot k_{2b}\]</td>
<td id="T_69a63_row14_col4" class="data row14 col4" >\[g_{2b}^{1 - \phi_{2b}} \cdot kr_{2b}\]</td>
<td id="T_69a63_row14_col5" class="data row14 col5" >\[\frac{g_{2b} \cdot kr_{2b}}{k_{2b}}\]</td>
</tr>
<tr>
<td id="T_69a63_row15_col0" class="data row15 col0" >\[R_{6}\]</td>
<td id="T_69a63_row15_col1" class="data row15 col1" >R1</td>
<td id="T_69a63_row15_col2" class="data row15 col2" >$s_{4}$ + $s_{4}$ ⇄ $s_{9}$</td>
<td id="T_69a63_row15_col3" class="data row15 col3" >\[k_{1} \cdot \left(\frac{1}{f_{a} \cdot g_{1a} \cdot g_{2a}}\right)^{\phi_{1}}\]</td>
<td id="T_69a63_row15_col4" class="data row15 col4" >\[kr_{1} \cdot \left(f_{a} \cdot g_{1a} \cdot g_{2a}\right)^{1 - \phi_{1}}\]</td>
<td id="T_69a63_row15_col5" class="data row15 col5" >\[\frac{f_{a} \cdot g_{1a} \cdot g_{2a} \cdot kr_{1}}{k_{1}}\]</td>
</tr>
<tr>
<td id="T_69a63_row16_col0" class="data row16 col0" >\[R_{10}\]</td>
<td id="T_69a63_row16_col1" class="data row16 col1" >R1</td>
<td id="T_69a63_row16_col2" class="data row16 col2" >$s_{5}$ + $s_{5}$ ⇄ $s_{13}$</td>
<td id="T_69a63_row16_col3" class="data row16 col3" >\[k_{1} \cdot \left(\frac{1}{f_{b} \cdot g_{1b} \cdot g_{2b}}\right)^{\phi_{1}}\]</td>
<td id="T_69a63_row16_col4" class="data row16 col4" >\[kr_{1} \cdot \left(f_{b} \cdot g_{1b} \cdot g_{2b}\right)^{1 - \phi_{1}}\]</td>
<td id="T_69a63_row16_col5" class="data row16 col5" >\[\frac{f_{b} \cdot g_{1b} \cdot g_{2b} \cdot kr_{1}}{k_{1}}\]</td>
</tr>
<tr>
<td id="T_69a63_row17_col0" class="data row17 col0" >\[R_{7}\]</td>
<td id="T_69a63_row17_col1" class="data row17 col1" >R1</td>
<td id="T_69a63_row17_col2" class="data row17 col2" >$s_{4}$ + $s_{5}$ ⇄ $s_{10}$</td>
<td id="T_69a63_row17_col3" class="data row17 col3" >\[k_{1} \cdot \left(\frac{1}{f_{b} \cdot g_{1b} \cdot g_{3a}}\right)^{\phi_{1}}\]</td>
<td id="T_69a63_row17_col4" class="data row17 col4" >\[kr_{1} \cdot \left(f_{b} \cdot g_{1b} \cdot g_{3a}\right)^{1 - \phi_{1}}\]</td>
<td id="T_69a63_row17_col5" class="data row17 col5" >\[\frac{f_{b} \cdot g_{1b} \cdot g_{3a} \cdot kr_{1}}{k_{1}}\]</td>
</tr>
<tr>
<td id="T_69a63_row18_col0" class="data row18 col0" >\[R_{21}\]</td>
<td id="T_69a63_row18_col1" class="data row18 col1" >R2b</td>
<td id="T_69a63_row18_col2" class="data row18 col2" >$s_{2}$ + $s_{8}$ ⇄ $s_{10}$</td>
<td id="T_69a63_row18_col3" class="data row18 col3" >\[k_{2b}^{1 - \phi_{2b}} \cdot \left(\frac{f_{a} \cdot k_{2b}}{f_{b} \cdot g_{1b} \cdot g_{3a}}\right)^{\phi_{2b}}\]</td>
<td id="T_69a63_row18_col4" class="data row18 col4" >\[k_{2b} \cdot \left(\frac{kr_{2b}}{k_{2b}}\right)^{\phi_{2b}} \cdot \left(\frac{f_{b} \cdot g_{1b} \cdot g_{3a} \cdot kr_{2b}}{f_{a} \cdot k_{2b}}\right)^{1 - \phi_{2b}}\]</td>
<td id="T_69a63_row18_col5" class="data row18 col5" >\[\frac{f_{b} \cdot g_{1b} \cdot g_{3a} \cdot kr_{2b}}{f_{a} \cdot k_{2b}}\]</td>
</tr>
<tr>
<td id="T_69a63_row19_col0" class="data row19 col0" >\[R_{16}\]</td>
<td id="T_69a63_row19_col1" class="data row19 col1" >R2a</td>
<td id="T_69a63_row19_col2" class="data row19 col2" >$s_{1}$ + $s_{7}$ ⇄ $s_{10}$</td>
<td id="T_69a63_row19_col3" class="data row19 col3" >\[g_{3a}^{- \phi_{2a}} \cdot k_{2a}\]</td>
<td id="T_69a63_row19_col4" class="data row19 col4" >\[g_{3a}^{1 - \phi_{2a}} \cdot kr_{2a}\]</td>
<td id="T_69a63_row19_col5" class="data row19 col5" >\[\frac{g_{3a} \cdot kr_{2a}}{k_{2a}}\]</td>
</tr>
<tr>
<td id="T_69a63_row20_col0" class="data row20 col0" >\[R_{9}\]</td>
<td id="T_69a63_row20_col1" class="data row20 col1" >R1</td>
<td id="T_69a63_row20_col2" class="data row20 col2" >$s_{4}$ + $s_{5}$ ⇄ $s_{12}$</td>
<td id="T_69a63_row20_col3" class="data row20 col3" >\[k_{1} \cdot \left(\frac{1}{f_{a} \cdot g_{1a} \cdot g_{3b}}\right)^{\phi_{1}}\]</td>
<td id="T_69a63_row20_col4" class="data row20 col4" >\[kr_{1} \cdot \left(f_{a} \cdot g_{1a} \cdot g_{3b}\right)^{1 - \phi_{1}}\]</td>
<td id="T_69a63_row20_col5" class="data row20 col5" >\[\frac{f_{a} \cdot g_{1a} \cdot g_{3b} \cdot kr_{1}}{k_{1}}\]</td>
</tr>
<tr>
<td id="T_69a63_row21_col0" class="data row21 col0" >\[R_{18}\]</td>
<td id="T_69a63_row21_col1" class="data row21 col1" >R2a</td>
<td id="T_69a63_row21_col2" class="data row21 col2" >$s_{1}$ + $s_{11}$ ⇄ $s_{12}$</td>
<td id="T_69a63_row21_col3" class="data row21 col3" >\[k_{2a}^{1 - \phi_{2a}} \cdot \left(\frac{f_{b} \cdot k_{2a}}{f_{a} \cdot g_{1a} \cdot g_{3b}}\right)^{\phi_{2a}}\]</td>
<td id="T_69a63_row21_col4" class="data row21 col4" >\[k_{2a} \cdot \left(\frac{kr_{2a}}{k_{2a}}\right)^{\phi_{2a}} \cdot \left(\frac{f_{a} \cdot g_{1a} \cdot g_{3b} \cdot kr_{2a}}{f_{b} \cdot k_{2a}}\right)^{1 - \phi_{2a}}\]</td>
<td id="T_69a63_row21_col5" class="data row21 col5" >\[\frac{f_{a} \cdot g_{1a} \cdot g_{3b} \cdot kr_{2a}}{f_{b} \cdot k_{2a}}\]</td>
</tr>
<tr>
<td id="T_69a63_row22_col0" class="data row22 col0" >\[R_{19}\]</td>
<td id="T_69a63_row22_col1" class="data row22 col1" >R2b</td>
<td id="T_69a63_row22_col2" class="data row22 col2" >$s_{2}$ + $s_{6}$ ⇄ $s_{12}$</td>
<td id="T_69a63_row22_col3" class="data row22 col3" >\[g_{3b}^{- \phi_{2b}} \cdot k_{2b}\]</td>
<td id="T_69a63_row22_col4" class="data row22 col4" >\[g_{3b}^{1 - \phi_{2b}} \cdot kr_{2b}\]</td>
<td id="T_69a63_row22_col5" class="data row22 col5" >\[\frac{g_{3b} \cdot kr_{2b}}{k_{2b}}\]</td>
</tr>
</tbody></table>
Having generated the model, we can now study the efficacy of RAF inhibitors with different thermodynamic properties using model simulations. First, we set up parameters to reproduce Figures 3B-C and S3 in the original paper which shows the synergy between I1 and I2 with under certain conditions of cooperativity. We simulate the response of the model for each combination of parameters and the selected RAF inhibitor concentrations:
```python
from tqdm.notebook import tqdm, trange
from util_simulation import equilibrate
from pysb.simulator import ScipyOdeSimulator
import numpy as np
#set initial conditions
model.parameters['R_0'].value=1.0;
model.parameters['I1_0'].value=0.0;
model.parameters['I2_0'].value=0.0;
#set kinetic parameters
model.parameters['kf_1'].value=1.0; #Kd_1= 100
model.parameters['kr_1'].value=100.0;
model.parameters['kf_2a'].value=1.0; #Kd_2a= 1
model.parameters['kr_2a'].value=1.0;
model.parameters['kf_2b'].value=2.0; #Kd_2b= 2
model.parameters['kr_2b'].value=1.0;
#define observables and expression to extract
ext_species=['R_active', 'R_obs', 'RR_obs', 'R1R2I_obs', 'R2R1I_obs', 'IRRI_obs'];
ext_species_names=['RAF active', 'R', 'RR', 'R1R2I', 'R2R1I', 'IRRI'];
#define cooperativity parameters in different scenarios to be simulated
param_names= [ 'f_a', 'f_b', 'g_1a', 'g_1b', 'g_2a', 'g_2b', 'g_3a', 'g_3b'];
param_values=[ [0.01, 0.005, 2, 5, 10, 20, 0.01, 10 ], #synergistic (Fig. 3C)
[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ]]; #no cooperativity
params= pd.DataFrame(np.array(param_values), columns=param_names);
#set the dilution range for the RAF inhibitors
I1_doses=np.concatenate((np.array([0]), np.logspace(-2, 1, 25, base=10.0)))
I2_doses=np.concatenate((np.array([0]), np.logspace(-2, 1, 25, base=10.0)))
#define conditions of simulation
doses=pd.merge(pd.Series(I1_doses, name="I1_0"), pd.Series(I1_doses, name="I2_0"), how="cross")
conditions = pd.merge(params, doses, how="cross")
ss_v = pd.DataFrame(
index=pd.MultiIndex.from_frame(conditions), columns=ext_species, dtype=float
)
#set up the ODE simulator for the model
sim = ScipyOdeSimulator(model);
#for each drug dose combination
for i, r in tqdm(conditions.iterrows(), desc="Simulation progress", total=len(conditions)):
#simulate the model to steady state
res = equilibrate(sim, param_values= (r.to_dict()))
#extract end of simulation for each selected species
ss_v.loc[tuple(r)] = res.dataframe[ext_species].iloc[-1]
```
Simulation progress: 0%| | 0/1352 [00:00<?, ?it/s]
We plot the results as dilution dose response for I1, I2 and I2 with I1 at the fixed IC_50 dose (Fig 3B):
```python
%matplotlib notebook
import matplotlib.pyplot as plt
from matplotlib.colors import DivergingNorm
#calculate IC50 of the drugs
I1_IC50 = model.parameters['kr_2a'].value / model.parameters['kf_2a'].value
I2_IC50 = model.parameters['kr_2b'].value / model.parameters['kf_2b'].value
#for each parameter set plot the dose responses
fig, ax = plt.subplots(1, len(params), sharey=True)
for i in range(len(params)):
#extract datastructure for that parameter set
sel_param=tuple(params.iloc[i].values)
ss_v_param=ss_v.loc[sel_param]
#extract reference condition at (0,0) drugs
R_tot = ss_v_param.loc[(0, 0), "R_active"]
#extract active RAFs for dose responses
I1_response = ss_v_param.xs(0.0, level="I2_0")["R_active"]
I2_response = ss_v_param.xs(0.0, level="I1_0")["R_active"]
I2_I1IC50_response = ss_v_param.xs(I1_IC50, level="I1_0")["R_active"]
#for I1 only
ax[i].plot(I1_response.index, I1_response / R_tot, ".-k")
#for I2 only
ax[i].plot(I2_response.index, I2_response / R_tot, ".-b")
#for I2 at fixed I1 dose
ax[i].plot(I2_I1IC50_response.index, I2_I1IC50_response / R_tot, ".-r");
ax[i].set_xlabel('Inhibitor dose');
ax[i].set_ylabel('Normalized Total Activity');
plt.tight_layout()
```
<IPython.core.display.Javascript object>
The plot above shows synergy of two drugs that used as single agents do not inhibit RAF signaling but when taken as a combination with a fixed dose of I1 are able to block it. To fully quantify synergy, we show the isobologram analysis of I1 and I2 drug-dose combinations (Fig 3C) and also calculate the degree of synergy according to Bliss independence:
```python
#import synergy package
from synergy.combination import Bliss
#compare synergies among set of cooperative parameters
fig, ax = plt.subplots(len(params), 3)
plt_titles=['Model simulations', 'Bliss null model', 'Bliss excess'];
for i in range(len(params)):
#extract datastructure for that parameter set
sel_param=tuple(params.iloc[i].values)
ss_v_param=ss_v.loc[sel_param]
#prepare data to use synergy package
E_sim=ss_v_param['R_active'].to_numpy()/R_tot;
D1=ss_v_param.index.get_level_values(0).to_numpy();
D2=ss_v_param.index.get_level_values(1).to_numpy();
#create and fit the Bliss models
bliss = Bliss()
bliss.fit(D1, D2, E_sim);
#reshape E_sim to matrix
E_matrix_sim=ss_v_param["R_active"].unstack("I2_0").to_numpy()/R_tot;
E_matrix_sim=np.rot90(E_matrix_sim);
#extract bliss reference and bliss excess
E_mat_bliss_ref=np.rot90(bliss.reference.reshape(len(I1_doses),len(I2_doses)));
E_mat_bliss_syn=np.rot90(bliss.synergy.reshape(len(I1_doses),len(I2_doses)));
Emax=np.maximum(E_mat_bliss_ref.max(), E_matrix_sim.max());
Es=[E_matrix_sim, E_mat_bliss_ref, E_mat_bliss_syn];
for j in range(len(plt_titles)):
im=ax[i,j].imshow(Es[j], extent=[0, 1, 0, 1])
fig.colorbar(im, ax=ax[i,j], fraction=0.046, pad=0.04)
ax[i,j].set_xticks([])
ax[i,j].set_yticks([])
ax[i,j].title.set_text(plt_titles[j])
if (i==len(params)-1):
ax[i,j].set_xlabel('Drug 2')
if (j==0):
ax[i,j].set_ylabel('Drug 1')
plt.tight_layout()
```
<IPython.core.display.Javascript object>
These plots show that, with certain cooperativity parameters, there exists dose ranges of drug 1 and 2 that induce synergistic inhibition of RAF signaling (first row). This synergy is not observed if the drugs have no cooperative interactions with RAF dimerization status.
To understand the reason for drug synergy, we plot observables that report the status of R dimerization and drug binding:
```python
#plot each species extract from simulations
fig, ax = plt.subplots(len(params),len(ext_species))
#for each parameter setting
for i in range(len(params)):
#extract datastructure for that parameter set
sel_param=tuple(params.iloc[i].values)
ss_v_param=ss_v.loc[sel_param]
#for each specie
for j in range(len(ext_species)):
E_matrix_sim=ss_v_param[ext_species[j]].unstack("I2_0").to_numpy();
E_matrix_sim=np.rot90(E_matrix_sim);
im=ax[i,j].imshow(E_matrix_sim, extent=[0, 1, 0, 1])
ax[i,j].set_xticks([])
ax[i,j].set_yticks([])
ax[i,j].title.set_text(ext_species_names[j])
if (i==len(params)-1):
ax[i,j].set_xlabel('Drug 2')
if (j==0):
ax[i,j].set_ylabel('Drug 1')
plt.tight_layout()
```
<IPython.core.display.Javascript object>
These simulations show that in the synergistic case there are drug concentrations of drug 1 and 2 that block both R1 and R2 configurations of RAF dimers at concentrations in which, as single agents, both drugs promoter RAF dimerization and only partially inhibit one protomer in RAF dimers (first row). In absence of cooperativity, the drugs show no synergy: drugs 1 and 2 shut down RAF signaling at the same concentration as single agents (second row).
|
[STATEMENT]
lemma valid_length_mmuaux:
assumes "valid_mmuaux args cur aux auxlist"
shows "length_mmuaux args aux = length auxlist"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. length_mmuaux args aux = length auxlist
[PROOF STEP]
using assms
[PROOF STATE]
proof (prove)
using this:
valid_mmuaux args cur aux auxlist
goal (1 subgoal):
1. length_mmuaux args aux = length auxlist
[PROOF STEP]
by (cases aux) (auto simp add: valid_mmuaux_def dest: list_all2_lengthD) |
#script to create maps for comparison of modelled vs observed land cover
#reads file created by CRAFTYsummary_5LCs.r
rm(list=ls())
#script assumes there are three calibration years (2005, 2010, 2015); edit next line if that is not correct
calib_yrs <- c(2005, 2010, 2015)
#maps can be plotted to pdf by setting following variable appropriately (TRUE/FALSE)
library(tidyverse)
library(sf)
library(RColorBrewer) #for plotting
#this directory should exist and contain the CRAFTYmunisServCap.csv
scenario <- "Testing_2018-08-23l"
runID <- "0-0"
#input/putput variables
output_name <- paste0("Data/",scenario,"/",runID,"/",scenario,"_LCcomparisonMaps.pdf")
cDat <- readr::read_csv(paste0("Data/",scenario,"/",runID,"/",scenario,"_CRAFTYmunisLC.csv"),
col_types = cols(Year = col_integer(), diffcProp3 = col_double())) #needed to ensure correct import (many zeros in diffcProp3 at top of file)
#note following shp was created using simplyfying_shapefiles.r
BRmunis <- st_read("Data/Vector/BRmunis_sim10_simple2.shp")
## Maps
#loop through years, printing maps
for(yr in calib_yrs){
cDat_map <- left_join(BRmunis, filter(cDat, Year == yr), by = c("CD_GEOCMUn" ="muniID"))
#create land cover palette
map_pal <- c("forestgreen", "darkcyan", "wheat2", "black", "orange2")
png(paste0("Data/",scenario,"/",runID,"/",scenario,"_ObsMode",yr,".png"))
#plot observed vs modelled modal muni land cover
plot(cDat_map["ObsMode"], pal = map_pal, graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Observed Mode LC"), key.pos = NULL)
legend("bottomright", cex = 1.3, c("Nature", "Other Agri", "Agriculture", "Other", "Pasture"), fill = map_pal, bg = "white")
dev.off()
png(paste0("Data/",scenario,"/",runID,"/",scenario,"_ModMode",yr,".png"))
plot(cDat_map["ModMode"], pal = map_pal, graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Modelled Mode LC"), key.pos = NULL)
legend("bottomright", cex = 1.3, c("Nature", "Other Agri", "Agriculture", "Other", "Pasture"), fill = map_pal, bg = "white")
dev.off()
png(paste0("Data/",scenario,"/",runID,"/",scenario,"_diffcMode",yr,".png"))
#map of muni mode correct/incorrect
plot(cDat_map["diffcMode"], pal = c("darkgreen","red"), graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Model vs Obs Mode Comparison"), key.pos = NULL)
legend("bottomright", cex = 1.3, c("Correct", "Incorrect"), fill = c("darkgreen","red"), bg = "white")
dev.off()
#get max value for colour breaks below
errorMax <- max(filter(cDat, Year == yr)$cellDiffcCount)
#for cell accuracy maps
cell_pal <- brewer.pal(8, "Reds")
#total count of cells incorrect
plot(cDat_map["cellDiffcCount"], pal = cell_pal, breaks = seq(0,errorMax, length.out = length(cell_pal)+1), graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Count Incorrect Cells"))
#proportion of cells incorrect
plot(cDat_map["cellDiffcProp"], pal = cell_pal, breaks = seq(0,1, length.out = length(cell_pal)+1), graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Prop Incorrect Cells"))
#for LC proportion accuracy maps
prop_pal <- brewer.pal(11, "RdYlGn")
#difference in proportion predictions (negative is under-prediction by model, positive over-prediction)
png(paste0("Data/",scenario,"/",runID,"/",scenario,"_diffcProp1",yr,".png"))
plot(cDat_map["diffcProp1"], pal = prop_pal, breaks = seq(-1,1, length.out = length(prop_pal)+1), graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Nature Prop Diffc"))
dev.off()
png(paste0("Data/",scenario,"/",runID,"/",scenario,"_diffcProp2",yr,".png"))
plot(cDat_map["diffcProp2"], pal = prop_pal, breaks = seq(-1,1, length.out = length(prop_pal)+1), graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Other Agri Prop Diffc"))
dev.off()
png(paste0("Data/",scenario,"/",runID,"/",scenario,"_diffcProp3",yr,".png"))
plot(cDat_map["diffcProp3"], pal = prop_pal, breaks = seq(-1,1, length.out = length(prop_pal)+1), graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Agriculture Prop Diffc"))
dev.off()
png(paste0("Data/",scenario,"/",runID,"/",scenario,"_diffcProp4",yr,".png"))
plot(cDat_map["diffcProp5"], pal = prop_pal, breaks = seq(-1,1, length.out = length(prop_pal)+1), graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Pasture Prop Diffc"))
dev.off()
png(paste0("Data/",scenario,"/",runID,"/",scenario,"_diffcProp5",yr,".png"))
plot(cDat_map["diffcProp4"], pal = prop_pal, breaks = seq(-1,1, length.out = length(prop_pal)+1), graticule = st_crs(cDat_map), axes = TRUE, lty = 0, main = paste(yr,"Other Prop Diffc"))
dev.off()
par(mfrow=c(1,1)) #needed to ensure plotting plays nicely with key.pos = NULL above
}
|
--{-# OPTIONS --allow-unsolved-metas #-}
module StateSizedIO.GUI.Prelude where
open import Size public renaming (Size to AgdaSize)
open import Data.Nat.Base public
open import Data.Bool.Base hiding (_≟_) public
open import Data.List.Base public
open import Function public
open import Data.Integer.Base public hiding (_*_; _+_; _-_; _⊓_; _⊔_; pred; suc;_≤_)
open import Agda.Builtin.Equality public
open import Data.Product public using (_×_; _,_)
open import NativeIO public
open import StateSizedIO.GUI.WxBindingsFFI public
open import StateSizedIO.GUI.VariableList public
open import SizedIO.Base public
open import StateSizedIO.GUI.BaseStateDependent public
|
#
# These procedures come from the source code of GFun.
#
getname:=proc(yofz::function(name), y, z)
y:=op(0,yofz);
if type(y,'procedure') then error `not an unassigned name`,y fi;
z:=op(yofz)
end proc:
#
# returns the smallest i such that u(n+i) appears in a recurrence
#
minindex := proc(rec,u,n)
min(op(map(op,indets(rec,'specfunc'('linear'(n),u)))))-n
end proc:
#
# returns the largest i such that u(n+i) appears in a recurrence
#
maxindex := proc(rec,u,n)
max(op(map(op,indets(rec,'specfunc'('linear'(n),u)))))-n
end proc:
#
# A recurrence of the form a(n+d) = p(n)/q(n) a(n) is represented through a record:
# OneTermRecurrence : record(order, numerator, denominator)
#
`type/OneTermRecurrence` := 'record(order, numerator, denominator)':
#
#checkOneTermRecurrence
# Input: a recurrence rec (either with or without initial conditions).
# If it has initial conditions, they are ignored.
# a(n): the name of the sequence and the name of the variable.
#
# Output:
# This procedure checks that rec is a recurrence of the form a(n+d) = p(n)/q(n) a(n)
# If the check succeeds, it returns the corresponding record. If it fails, an error is
# returned.
#
checkOneTermRecurrence := proc(rec, aofn)::OneTermRecurrence;
local r, d, a, n, term1, term2, res;
getname(aofn, a, n):
if type(rec, 'set') then
r:=select(has, rec, n);
if nops(r)>1
then error `invalid recurrence`, rec
fi:
if nops(r)=0
then error "%1 does not appear in the recurrence", n
fi:
r := op(r):
else r:=rec:
fi:
if type(r,'`=`')
then r:=op(1,r)-op(2,r)
fi:
if indets(r,'specfunc'('anything',a)) <> indets(r,'specfunc'('linear'(n),a))
then error "the recurrence contains elements that are not linear in %1", n
fi:
if nops(r) <> 2
then error "the recurrence contains %1 terms (expected 2)", nops(r)
fi:
r := subs(n=n-minindex(r, a, n), r):
d := maxindex(r, a, n):
term1 := select(has, r, a(n)):
term2 := select(has, r, a(n+d)):
res := factor( -(term1/a(n)) / (term2/a(n+d)) ):
Record( 'order'=d, 'numerator' = numer(res), 'denominator' = denom(res) )
end proc:
#
# my_factors factorizes p the same way as factors(p) would do except that the constant part is computed
# differently. We assume here that p has integer coefficients, and we want to factorize it over polynomials
# with integer coefficients. my_factors ensures that the factors have integer coefficients.
#
my_factors := proc(p)
local L, c, fact, i, my_c, my_fact, q:
L := factors(p):
c := L[1]: fact := L[2]:
my_c := c: my_fact := []:
for i from 1 to nops(fact) do
q := denom(fact[i][1]):
my_fact := [ op(my_fact), [ fact[i][1]*q, fact[i][2] ] ]:
my_c := my_c / (q^fact[i][2]):
od:
[ my_c, my_fact]:
end proc:
#
# This procedure decomposes a one-term recurrence with the following form:
# a(n+d) = c * s1(n)/s1(n+d) * s2(n+d)/s2(n) * p(n)/q(n) * a(n)
#
# Known issue: this procedure assumes that the only variables involved are n and x with their usual meaning.
#
decomposeOneTermRecurrence := proc(formalRec::OneTermRecurrence, res_cste, res_s1, res_s2, res_p, res_q)
local p, q, cste, s1, s2, d, L, i, tmp, exponent, r, polyring;
p := formalRec:-numerator:
q := formalRec:-denominator:
d := formalRec:-order:
s1 := 1:
L := op(2,my_factors(p)): # L contains the non trivial factors of p
for i from 1 to nops(L) do
tmp := L[i][1]: exponent := L[i][2]:
r := gcd(tmp^exponent, subs(n=n-d, q)):
p := quo(p,r,n): q := quo(q, subs(n=n+d, r),n): s1 := s1 * r:
od:
s2 := 1:
L := op(2,my_factors(p)): # L contains the *remaining* non trivial factors of p
for i from 1 to nops(L) do
tmp := L[i][1]: exponent := L[i][2]:
r := gcd(tmp^exponent, subs(n=n+d, q)):
p := quo(p, r, n): q := quo(q, subs(n=n-d, r), n): s2 := s2 * r:
od:
# Finally we look for the constant part (with respect to n) of p/q
cste := op(1, my_factors(p))/op(1, my_factors(q)):
p := p/op(1, my_factors(p)): q := q/op(1, my_factors(q)):
polyring := RegularChains[PolynomialRing]([n,x]):
L := op(2, my_factors(p)):
for i from 1 to nops(L) do
if RegularChains[MainVariable](L[i][1], polyring) = x
then cste := cste * L[i][1]^L[i][2]: p := quo(p,L[i][1]^L[i][2],x):
fi:
od:
L := op(2, my_factors(q)):
for i from 1 to nops(L) do
if RegularChains[MainVariable](L[i][1], polyring) = x
then cste := cste / L[i][1]^L[i][2]: q := quo(q,L[i][1]^L[i][2],x):
fi:
od:
res_cste := cste;
res_s1 := s1;
res_s2 := s2;
res_p := simplify(p);
res_q := simplify(q);
end proc:
#
#coeffrecToTermsrec
# Input: a linear recurrence rec (either with or without initial conditions).
# a(n): the name of the sequence and the name of the variable.
# x: a value or symbolic name
#
# Output:
# The recurrence satisfied by a(n)*x^n. Note that this recurrence is also denoted by a(n).
# If initial conditions were provided, corresponding initial conditions are computed.
#
coeffrecToTermsrec := proc(rec, aofn, x)
local a,n,L,r,cond,d,i,tmp,c,res;
getname(aofn, a, n):
if type(rec, 'set') then
L := selectremove(has, rec, n):
r := L[1]:
if nops(r)>1
then error `invalid recurrence`, rec
fi:
if nops(r)=0
then error "%1 does not appear in the recurrence", n
fi:
r := op(r):
cond := L[2]:
else r := rec:
fi:
d := maxindex(r, a, n):
L := indets(r,'specfunc'('linear'(n),a)):
if indets(r,'specfunc'('anything',a)) <> L
then error "the recurrence contains elements that are not linear in %1", n
fi:
L := map(op, L):
for i from 1 to nops(L) do
r := subs(a(op(i,L))=a(op(i,L))*x^(d-op(i,L)+n), r):
od:
if cond<>'cond' then
c := {}:
for i from 1 to nops(cond) do
tmp := op(i, cond): # tmp should have the form 'a(k) = cste'
if not type(tmp,'`=`') then error "Invalid initial condition: %1", tmp: fi:
L := selectremove(has, {op(tmp)}, a):
if (nops(L[1]) <> 1) or (nops(L[2])<>1)
then error "Invalid initial condition: %1", tmp:
fi:
tmp := op(1, L[1]): # tmp has the form 'a(k)'
c := {op(c), tmp = op(1, L[2])*x^op(tmp)}:
od:
res := {r, op(c)}:
else res := r:
fi:
res:
end proc:
#
# This procedure removes the conditions of the form a(k)=0 from the initial conditions of rec
# It returns a list L = [L1, L2, ...] where Li = [k, expr] representing the condition a(k)=expr.
# Moreover, it asserts that the Li are ordered by increasing k.
#
removeTrivialConditions := proc(rec, aofn)
local a,n,i,L,tmp,c,cond,k:
getname(aofn, a, n):
if not type(rec, 'set') then
error "%1 is not a recurrence with initial conditions", rec
else
L := selectremove(has, rec, n):
cond := L[2]:
if nops(cond)=0
then error "%1 does not contain initial conditions", rec
fi:
fi:
c := []:
for i from 1 to nops(cond) do
tmp := op(i, cond): # tmp should have the form 'a(k) = cste'
if not type(tmp,'`=`') then error "Invalid initial condition: %1", tmp: fi:
L := selectremove(has, {op(tmp)}, a):
if (nops(L[1]) <> 1) or (nops(L[2])<>1)
then error "Invalid initial condition: %1", tmp:
fi:
if op(1, L[2])<>0 then c := [op(c), [op(op(1, L[1])), op(1, L[2])]]: fi:
od:
# We check that the conditions are ordered by increasing k.
if (nops(c)=0) then return c: fi:
k := c[1][1]:
for i from 2 to nops(c) do
if (c[i][1]<=k)
then error "Unexpected error in removeTrivialConditions: the conditions are not correctly ordered (%1)\n", c
else k := c[i][1]
fi:
od:
c:
end proc:
#
# findFixpointOfDifferences: takes a set L of integer and returns the smallest set S
# containing L and such that for each i, S[i]-S[i-1] \in S
findFixpointOfDifferences := proc(L)
local res, i:
res := L:
for i from 2 to nops(L) do
res := { op(res), L[i]-L[i-1] }:
od:
if (res=L) then return res else return findFixpointOfDifferences(res) fi:
end proc:
#
# error_counter functions allows one to follow the accumulation of errors in each variable.
# an error_counter is a list of the form [[var1, c1], [var2, c2], ... ]
# where the vari are variable names and the ci indicate how many approximation errors
# are accumulated in vari.
#
#
# This procedure initializes the counter associated with variable var to 1 (and creates it if needed.)
# It returns an up-to-date error_counter.
init_error_counter := proc (var, error_counter)
local i, res:
res := error_counter:
for i from 1 to nops(res) do
if (res[i][1]=var)
then res[i][2] := 1:
return res:
fi
od:
res := [op(res), [var, 1]]:
end:
#
# This procedure adds a given number to the counter associated with variable var.
# It returns an up-to-date error_counter.
add_to_error_counter := proc (var, n, error_counter)
local i, res:
res := error_counter:
for i from 1 to nops(res) do
if (res[i][1]=var)
then res[i][2] := res[i][2]+n:
return res:
fi
od:
res := [op(res), [var, n]]:
end proc:
#
# This procedure sets the value of the counter associated with variable var.
# It returns an up-to-date error_counter.
set_error_counter := proc(var, n, error_counter)
local i,err:
err := error_counter:
for i from 1 to nops(err) do
if (err[i][1]=var)
then err[i][2] := n:
return err:
fi
od:
err := [op(err), [var, n]]:
end proc:
#
# This procedure initializes the counter associated to the multiplication of var2 and var3,
# putting the result in variable var1.
# It returns an up-to-date error_counter.
error_counter_of_a_multiplication := proc (var1, var2, var3, error_counter)
local i, res, c2, c3:
c2 := 0: c3 := 0:
for i from 1 to nops(error_counter) do
if (error_counter[i][1]=var2) then c2 := error_counter[i][2] fi:
if (error_counter[i][1]=var3) then c3 := error_counter[i][2] fi:
if (error_counter[i][1]=var1)
then
res := [ op(error_counter[1..i-1]), op(error_counter[i+1..nops(error_counter)]) ]
fi:
od:
if (res = 'res') then res := error_counter fi:
res := [op(res), [var1, c2+c3+1]]:
end:
#
# Copies the error counter of var2 into var1
error_counter_on_copy := proc(var1, var2, error_counter)
local i, err, c2:
c2 := 0:
for i from 1 to nops(error_counter) do
if (error_counter[i][1] = var2) then c2 := error_counter[i][2] fi:
if (error_counter[i][1] = var1)
then
err := [ op(error_counter[1..i-1]), op(error_counter[i+1..nops(error_counter)]) ]
fi:
od:
if (err = 'err') then err := error_counter fi:
if (c2 <> 0) then err := [op(res), [var1, c2]] fi:
end proc:
#
# Returns the value of the error counter associated to a variable
find_in_error_counter := proc(var, error_counter)
local i:
for i from 1 to nops(error_counter) do
if (error_counter[i][1] = var) then return error_counter[i][2] fi:
od:
return 0:
end proc:
#
# generate_multiply_rational(fd, var1, var2, r, error_counter, indent) generates code for performing
# var1 = var2*r in MPFR
# fd is the file descriptor in which the code shall be produced.
# var1 and var2 are strings representing variable names. r is a Maple rational number.
# error_counter is an error_counter (as described above).
# indent is an optional argument. It is a string used to correctly indent the code. It is prefixed to any
# generated line. Hence, if indent=" ", the generated code will be indented by 2 spaces.
# An up-to-date error_counter is returned.
generate_multiply_rational := proc(fd, var1, var2, r, error_counter, indent:="")
local p,q,err:
err := error_counter:
if (whattype(r)<>'fraction') and (whattype(r)<>'integer')
then error "generate_multiply_rational used with non rational number %1", r: fi:
if (abs(r)=1)
then
if (var1=var2)
then
if (r<>1) then fprintf(fd, "%sMPFR_CHANGE_SIGN (%s);\n", indent, var1) fi:
return err:
else
if (r=1)
then fprintf(fd, "%smpfr_set (%s, %s, MPFR_RNDN);\n", indent, var1, var2):
else fprintf(fd, "%smpfr_neg (%s, %s, MPFR_RNDN);\n", indent, var1, var2):
fi:
return error_counter_on_copy(var1, var2, err):
fi
fi:
# Now, r is a rational number different from 1.
p := numer(r): q := denom(r):
if (abs(p)<>1)
then
fprintf(fd, "%smpfr_mul_si (%s, %s, %d, MPFR_RNDN);\n", indent, var1, var2, p):
err := error_counter_of_a_multiplication(var1, var2, "", err):
if(q<>1)
then
fprintf(fd, "%smpfr_div_si (%s, %s, %d, MPFR_RNDN);\n", indent, var1, var1, q):
err := error_counter_of_a_multiplication(var1, var1, "", err):
fi:
else
fprintf(fd, "%smpfr_div_si (%s, %s, %d, MPFR_RNDN);\n", indent, var1, var2, p*q):
err := error_counter_of_a_multiplication(var1, var2, "", err):
fi:
return err:
end proc:
#
# generate_multiply_poly is the same as generate_multiply_rational but when r is a rational fraction.
# The fraction r must have the form p/q where p and q are polynomials with integer coefficients.
# Moreover, the gcd of the coefficients of p must be 1. Idem for q.
# The procedure returned a list [m, d, err] where m is the set of indices k such that
# a mpfr_mul_sik function is needed and idem for d with mpfr_div_sik.
# err is an up-to-date error counter.
generate_multiply_poly := proc(fd, var1, var2, r, error_counter, indent:="")
local p,q,Lp,Lq,n,i,j,var, required_mulsi, required_divsi, err:
err := error_counter:
required_mulsi := {}:
required_divsi := {}:
p := numer(r): q := denom(r):
Lp := my_factors(p): Lq := my_factors(q):
if (Lp[1] <> 1)
then error "generate_multiply_poly: an integer can be factored out of %1", p:
fi:
if (Lq[1] <> 1)
then error "generate_multiply_poly: an integer can be factored out of %1", q:
fi:
Lp := Lp[2]: Lq := Lq[2]:
var := var2:
if (nops(Lp) <> 0)
then
n := 0:
for i from 1 to nops(Lp) do n := n + Lp[i][2] od:
if (n=1)
then
fprintf(fd, "%smpfr_mul_si (%s, %s", indent, var1, var):
else
required_mulsi := { op(required_mulsi), n }:
fprintf(fd, "%smpfr_mul_si%d (%s, %s", indent, n, var1, var):
fi:
for i from 1 to nops(Lp) do
for j from 1 to Lp[i][2] do
fprintf(fd, ", %a", Lp[i][1]):
od:
od:
fprintf(fd, ", MPFR_RNDN);\n"):
err := set_error_counter(var1, n+find_in_error_counter(var, err) , err):
var := var1:
fi:
if (nops(Lq) <> 0)
then
n := 0:
for i from 1 to nops(Lq) do n := n + Lq[i][2] od:
if (n=1)
then
fprintf(fd, "%smpfr_div_si (%s, %s", indent, var1, var):
else
required_divsi := { op(required_divsi), n }:
fprintf(fd, "%smpfr_div_si%d (%s, %s", indent, n, var1, var)
fi:
for i from 1 to nops(Lq) do
for j from 1 to Lq[i][2] do
fprintf(fd, ", %a", Lq[i][1])
od:
od:
fprintf(fd, ", MPFR_RNDN);\n"):
err := set_error_counter(var1, n+find_in_error_counter(var, err) , err):
var := var1:
fi:
if (var1 <> var) then
fprintf(fd, "%smpfr_set (%s, %s, MPFR_RNDN);\n", indent, var1, var):
err := set_error_counter(var1, find_in_error_counter(var, err) , err):
fi:
return [required_mulsi, required_divsi, err]:
end proc:
#
# This function generates the code of a procedure mpfr_mul_uin or mpfr_div_uin
#
generate_muldivsin := proc(op, n)
local i, var:
if ((op <> "mul") and (op <> "div"))
then error "Invalid argument to generate_muldivuin (%1). Must be 'mul' or 'div'", op
fi:
if (whattype(n) <> 'integer')
then error "Invalid argument to generate_muldivuin (%1). Must be an integer.", n
fi:
if (op="mul") then var := "MUL" else var := "DIV" fi:
printf("__MPFR_DECLSPEC void mpfr_div_si%d _MPFR_PROTO((mpfr_ptr, mpfr_srcptr,\n", n):
for i from n to 2 by -2 do
printf(" long int, long int,\n"):
od:
if (i=1)
then
printf(" long int, mpfr_rnd_t));\n"):
else
printf(" mpfr_rnd_t));\n")
fi:
printf("\n\n\n"):
printf("void\n"):
printf("mpfr_%s_si%d (mpfr_ptr y, mpfr_srcptr x,\n", op, n):
for i from n to 2 by -2 do
printf(" long int v%d, long int v%d,\n", n-i+1, n-i+2):
od:
if (i=1)
then
printf(" long int v%d, mpfr_rnd_t mode)\n", n):
else
printf(" mpfr_rnd_t mode)\n")
fi:
printf("{\n"):
printf(" long int acc = v1;\n"):
printf(" mpfr_set (y, x, mode);\n"):
for i from 2 to n do
printf(" MPFR_ACC_OR_%s (v%d);\n", var, i):
od:
printf(" mpfr_%s_si (y, y, acc, mode);\n", op):
printf("}\n"):
return:
end proc:
|
(* Property from Case-Analysis for Rippling and Inductive Proof,
Moa Johansson, Lucas Dixon and Alan Bundy, ITP 2010.
This Isabelle theory is produced using the TIP tool offered at the following website:
https://github.com/tip-org/tools
This file was originally provided as part of TIP benchmark at the following website:
https://github.com/tip-org/benchmarks
Yutaka Nagashima at CIIRC, CTU changed the TIP output theory file slightly
to make it compatible with Isabelle2017.
Some proofs were added by Yutaka Nagashima.*)
theory TIP_prop_73
imports "../../Test_Base"
begin
datatype 'a list = nil2 | cons2 "'a" "'a list"
fun x :: "'a list => 'a list => 'a list" where
"x (nil2) z = z"
| "x (cons2 z2 xs) z = cons2 z2 (x xs z)"
fun rev :: "'a list => 'a list" where
"rev (nil2) = nil2"
| "rev (cons2 z xs) = x (rev xs) (cons2 z (nil2))"
fun filter :: "('a => bool) => 'a list => 'a list" where
"filter y (nil2) = nil2"
| "filter y (cons2 z2 xs) =
(if y z2 then cons2 z2 (filter y xs) else filter y xs)"
theorem property0 :
"((rev (filter p xs)) = (filter p (rev xs)))"
apply(induct xs arbitrary: p)(*arbitrary: p is optional*)
apply auto[1]
apply(subst filter.simps)
apply(subst rev.simps)
apply(case_tac "p x1")
apply simp
apply(subgoal_tac "\<And>x1 xs revxs.
TIP_prop_73.rev (TIP_prop_73.filter p xs) = TIP_prop_73.filter p revxs \<Longrightarrow>
p x1 \<Longrightarrow>
x (TIP_prop_73.filter p revxs) (cons2 x1 nil2) =
TIP_prop_73.filter p (x revxs (cons2 x1 nil2))")
apply fastforce
apply(induct_tac rule:x.induct)
oops
end
|
#if !defined(PETIGAFTN_H)
#define PETIGAFTN_H
#include <petsc.h>
EXTERN_C_BEGIN
extern void IGA_Quadrature_1D(PetscInt,const PetscReal[],const PetscReal[],const PetscReal*,
PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_Quadrature_2D(PetscInt,const PetscReal[],const PetscReal[],const PetscReal*,
PetscInt,const PetscReal[],const PetscReal[],const PetscReal*,
PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_Quadrature_3D(PetscInt,const PetscReal[],const PetscReal[],const PetscReal*,
PetscInt,const PetscReal[],const PetscReal[],const PetscReal*,
PetscInt,const PetscReal[],const PetscReal[],const PetscReal*,
PetscReal[],PetscReal[],PetscReal[]);
EXTERN_C_END
EXTERN_C_BEGIN
extern void IGA_BasisFuns_1D(PetscInt,
PetscInt,PetscInt,const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_BasisFuns_2D(PetscInt,
PetscInt,PetscInt,const PetscReal[],
PetscInt,PetscInt,const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_BasisFuns_3D(PetscInt,
PetscInt,PetscInt,const PetscReal[],
PetscInt,PetscInt,const PetscReal[],
PetscInt,PetscInt,const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
EXTERN_C_END
EXTERN_C_BEGIN
extern void IGA_Rationalize_1D(PetscInt,PetscInt,PetscInt,const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_Rationalize_2D(PetscInt,PetscInt,PetscInt,const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_Rationalize_3D(PetscInt,PetscInt,PetscInt,const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
EXTERN_C_END
EXTERN_C_BEGIN
extern void IGA_GeometryMap_1D(PetscInt,PetscInt,PetscInt,const PetscReal[],
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_GeometryMap_2D(PetscInt,PetscInt,PetscInt,const PetscReal[],
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_GeometryMap_3D(PetscInt,PetscInt,PetscInt,const PetscReal[],
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_GeometryMap (PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscReal[],
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
EXTERN_C_END
EXTERN_C_BEGIN
extern void IGA_InverseMap_1D(PetscInt,PetscInt,
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_InverseMap_2D(PetscInt,PetscInt,
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_InverseMap_3D(PetscInt,PetscInt,
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
EXTERN_C_END
EXTERN_C_BEGIN
extern void IGA_ShapeFuns_1D(PetscInt,PetscInt,PetscInt,
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_ShapeFuns_2D(PetscInt,PetscInt,PetscInt,
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
extern void IGA_ShapeFuns_3D(PetscInt,PetscInt,PetscInt,
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
const PetscReal[],const PetscReal[],const PetscReal[],const PetscReal[],
PetscReal[],PetscReal[],PetscReal[],PetscReal[]);
EXTERN_C_END
#endif/*PETIGAFTN_H*/
|
function [Gc,Kp,Ti,Td,H]=optPID(key,typ,vars)
k=vars(1); L=vars(2); T=vars(3); N=vars(4);
Td=[];H=1;
if length(vars)==5, iC=vars(5); tt=0;
else,
Kc=vars(5); Tc=vars(6); kappa=vars(7); tt=1;
end
if tt==0
if key==2
PIDtab=[0.980, 0.712, 0.569, 1.072, 0.786, 0.628;
-0.892,-0.921,-0.951,-0.560,-0.559,-0.583;
0.690, 0.968, 1.023, 0.648, 0.883, 1.007;
-0.155,-0.247,-0.179,-0.114,-0.158,-0.167];
elseif key==3
PIDtab=[1.048, 1.042, 0.968, 1.154, 1.142, 1.061;
-0.897,-0.897,-0.904,-0.567,-0.579,-0.583;
1.195, 0.987, 0.977, 1.047, 0.919, 0.892;
-0.368,-0.238,-0.253,-0.220,-0.172,-0.165;
0.489, 0.385, 0.316, 0.490, 0.384, 0.315;
0.888, 0.906, 0.892, 0.708, 0.839, 0.832];
elseif key==4
PIDtab=[1.260, 1.053, 0.942, 1.295, 1.120, 1.001;
-0.887,-0.930,-0.933,-0.619,-0.625,-0.624;
0.701, 0.736, 0.770, 0.661, 0.720, 0.754;
-0.147,-0.126,-0.130,-0.110,-0.114,-0.116;
0.375, 0.349, 0.308, 0.378, 0.350, 0.308;
0.886, 0.907, 0.897, 0.756, 0.811, 0.813];
end
ii=0; if (L/T>1) ii=3; end; tt=L/T;
a1=PIDtab(1,ii+iC); b1=PIDtab(2,ii+iC);
a2=PIDtab(3,ii+iC); b2=PIDtab(4,ii+iC);
Kp=a1/k*tt^b1; Ti=T/(a2+b2*tt);
if key==3| key==4
a3=PIDtab(5,ii+iC); b3=PIDtab(6,ii+iC);
Td=a3*T*tt^b3;
end
else
if key==2,
Kp=0.361*Kc; Ti=0.083*(1.935*kappa+1)*Tc;
elseif key==3,
Kp=0.509*Kc; Td=0.125*Tc;
Ti=0.051*(3.302*kappa+1)*Tc;
elseif key==4,
Kp=(4.437*kappa-1.587)...
/(8.024*kappa-1.435)*Kc;
Ti=0.037*(5.89*kappa+1)*Tc;
Td=0.112*Tc;
end
end
if key==2, Gc=tf(Kp*[Ti,1],[Ti,0]);
elseif key==3
nn=[Kp*Ti*Td*(N+1)/N, Kp*(Ti+Td/N), Kp];
dd=Ti*[Td/N,1,0]; Gc=tf(nn,dd);
elseif key==4
Gc=tf(Kp*[Ti,1],[Ti,0]);
nH=[(1+Kp/N)*Ti*Td, Kp*(Ti+Td/N), Kp];
dH=Kp*conv([Ti,1],[Td/N,1]); H=tf(nH,dH);
end
|
(* Title: HOL/Auth/n_germanSimp_lemma_inv__38_on_rules.thy
Author: Yongjian Li and Kaiqiang Duan, State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
Copyright 2016 State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
*)
header{*The n_germanSimp Protocol Case Study*}
theory n_germanSimp_lemma_inv__38_on_rules imports n_germanSimp_lemma_on_inv__38
begin
section{*All lemmas on causal relation between inv__38*}
lemma lemma_inv__38_on_rules:
assumes b1: "r \<in> rules N" and b2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__38 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
proof -
have c1: "(\<exists> i d. i\<le>N\<and>d\<le>N\<and>r=n_Store i d)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvReqS N i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvReqE__part__0 N i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvReqE__part__1 N i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendInv__part__0 i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendInv__part__1 i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendInvAck i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvInvAck i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendGntS i)\<or>
(\<exists> i. i\<le>N\<and>r=n_SendGntE N i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvGntS i)\<or>
(\<exists> i. i\<le>N\<and>r=n_RecvGntE i)"
apply (cut_tac b1, auto) done
moreover {
assume d1: "(\<exists> i d. i\<le>N\<and>d\<le>N\<and>r=n_Store i d)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_StoreVsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvReqS N i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvReqSVsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvReqE__part__0 N i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvReqE__part__0Vsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvReqE__part__1 N i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvReqE__part__1Vsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendInv__part__0 i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendInv__part__0Vsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendInv__part__1 i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendInv__part__1Vsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendInvAck i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendInvAckVsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvInvAck i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvInvAckVsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendGntS i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendGntSVsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_SendGntE N i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_SendGntEVsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvGntS i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvGntSVsinv__38) done
}
moreover {
assume d1: "(\<exists> i. i\<le>N\<and>r=n_RecvGntE i)"
have "invHoldForRule s f r (invariants N)"
apply (cut_tac b2 d1, metis n_RecvGntEVsinv__38) done
}
ultimately show "invHoldForRule s f r (invariants N)"
by satx
qed
end
|
`is_element/graphs` := (V::set) -> proc(E)
if not(`is_element/autorel`(V)(E)) then
return false;
fi;
if not(`is_symmetric/autorel`(V)(E)) then
return false;
fi;
if not(`is_irreflexive/autorel`(V)(E)) then
return false;
fi;
return true;
end:
`is_leq/graphs` := (V::set) -> (E0,E1) -> evalb(E0 minus E1 = {}):
`list_elements/graphs` := proc(V::set)
local n,E0,EE,i,j;
n := nops(V);
E0 := {seq(seq([V[i],V[j]],j=i+1..n),i=1..n-1)};
EE := `list_elements/subsets`(E0);
EE := map(E -> E union `op/autorel`(V)(E),EE);
return EE;
end:
`count_elements/graphs` := (V::set) -> 2^(nops(V)*(nops(V)-1)/2);
`random_element/graphs` := (V::set) -> proc()
local n,E0,E,i,j;
n := nops(V);
E0 := {seq(seq([V[i],V[j]],j=i+1..n),i=1..n-1)};
E := `random_element/subsets`(E0)();
E := E union `op/autorel`(V)(E);
return E;
end:
`neighbour_table/graphs` := (V) -> proc(E)
local N,v,e;
N := table();
for v in V do N[v] := {}; od:
for e in E do
N[e[1]] := {op(N[e[1]]),e[2]};
od:
return eval(N);
end:
`component_relation/graphs` := (V) -> (E) ->
`transitive_closure/autorel`(V)(
E union `id/autorel`(V)
);
`components/graphs` := (V) -> (E) ->
`block_partition/equiv`(`component_relation/graphs`(V)(E));
`is_connected/graphs` := (V) -> (E) ->
evalb(`component_relation/graphs`(V)(E) = `top/autorel`(V));
`is_forest/graphs` := (V) -> proc(E)
local CC,V0,E0;
CC := `components/graphs`(V)(E);
for V0 in CC do
E0 := select(e -> member(e[1],V0),V0);
if nops(E0) <> nops(V0) - 1 then
return false;
fi;
od:
return true;
end:
`is_tree/graphs` := (V) -> proc(E)
`is_connected/graphs`(V)(E) and
nops(E) = nops(V) - 1;
end:
`skeleton/graphs` := (V::set) -> proc(E)
local n,V0,E0,ix,i;
n := nops(V);
V0 := {seq(i,i=1..n)};
ix := table():
for i from 1 to n do ix[V[i]] := i; od;
E0 := map(e -> [ix[e[1]],ix[e[2]]],E);
return [V0,E0];
end:
`is_element/paths` := (V) -> (E) -> proc(p)
local i,n;
if not type(p,list) then
return false;
fi;
n := nops(p) - 1;
if n < 0 then
return false;
fi;
if {op(p)} minus V <> {} then
return false;
fi;
for i from 1 to n do
if not(member([p[i],p[i+1]],E)) then
return false;
fi;
od;
return true;
end:
`is_small_element/paths` := (V) -> (E) -> (n::nonnegint) -> proc(p)
return evalb(`is_element/paths`(V)(E)(p) and (nops(p) = n+1));
end:
`random_small_element/paths` := (V) -> (E) -> (n::nonnegint) -> proc()
local v,p,e,i;
if E = {} then
if V = {} or n > 0 then
return FAIL;
else
return [random_element_of(V)()];
fi;
fi;
v := random_element_of(E)()[1];
p := v;
for i from 1 to n do
e := random_element_of(select(e -> member(v,e),E))();
v := op({op(e)} minus {v});
p := p,v;
od;
return [p];
end:
`list_small_elements/paths` := (V) -> (E) -> proc(n::nonnegint)
option remember;
local P,Q,p,v,F,e;
if n = 0 then
return map(v -> [v],V);
else
P := `list_small_elements/paths`(V)(E)(n-1);
Q := NULL;
for p in P do
v := p[1];
F := select(e -> e[2] = v,E);
Q := Q,seq([e[1],op(p)],e in F);
od:
return [Q];
fi;
end:
`is_leq/paths` := NULL;
`count_elements/paths` := NULL:
`count_small_elements/paths` := NULL:
`length/paths` := (V) -> (E) -> (p) -> nops(p) - 1;
`is_trail/paths` := proc(p)
local n,e,i;
n := nops(p) - 1;
e := {seq([p[i],p[i+1]],i=1..n)};
return evalb(nops(e) = n);
end:
`is_cycle/paths` := proc(p)
local n,i;
n := nops(p) - 1;
return evalb(p[1] = p[n+1] and nops({seq(p[i],i=1..n)}) = n);
end:
`is_small_element/trails` := (V) -> (E) -> (n) -> proc(p)
if not `is_small_element/paths`(V)(E)(n)(p) then
return false;
fi;
return `is_trail/paths`(p);
end:
`is_element/trails` := (V) -> (E) -> proc(p)
if not `is_element/paths`(V)(E)(p) then
return false;
fi;
return `is_trail/paths`(p);
end:
`list_small_elements/trails` := (V) -> (E) -> proc(n)
option remember;
local E0,E1,P,Q,p,v,F,i,e;
if n = 0 then
return map(v -> [v],V);
else
P := `list_small_elements/trails`(V)(E)(n-1);
Q := NULL;
for p in P do
E0 := {seq([p[i],p[i+1]],i=1..n-1),seq([p[i+1],p[i]],i=1..n-1)};
E1 := E minus E0;
v := p[1];
F := select(e -> e[2] = v,E1);
Q := Q,seq([e[1],op(p)],e in F);
od:
return [Q];
fi;
end:
`random_small_element/trails` := (V) -> (E) -> (n) -> proc(num_tries := 10)
local k,p,E0,E1,v,F,e,i;
k := num_tries;
if n = 0 then
if V = {} then
return FAIL;
else
return [random_element_of(V)()];
fi;
elif n = 1 then
if E = {} then
return FAIL;
else
return random_element_of(E)();
fi;
fi;
while k > 0 do
p := `random_small_element/trails`(V)(E)(n-1)();
k := k - 1;
if p <> FAIL then
E0 := {seq([p[i],p[i+1]],i=1..n-1),seq([p[i+1],p[i]],i=1..n-1)};
E1 := E minus E0;
v := p[1];
F := select(e -> e[2] = v,E1);
if F <> {} then
e := random_element_of(F)();
return [e[1],op(p)];
fi;
fi;
od;
return FAIL;
end:
`is_leq/trails` := NULL:
`count_elements/trails` := NULL:
`count_small_elements/trails` := NULL:
###################################################################### |
[STATEMENT]
lemma part_reflexivity: "P x x"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. P x x
[PROOF STEP]
proof -
[PROOF STATE]
proof (state)
goal (1 subgoal):
1. P x x
[PROOF STEP]
have "x = x"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. x = x
[PROOF STEP]
..
[PROOF STATE]
proof (state)
this:
x = x
goal (1 subgoal):
1. P x x
[PROOF STEP]
hence "PP x x \<or> x = x"
[PROOF STATE]
proof (prove)
using this:
x = x
goal (1 subgoal):
1. PP x x \<or> x = x
[PROOF STEP]
..
[PROOF STATE]
proof (state)
this:
PP x x \<or> x = x
goal (1 subgoal):
1. P x x
[PROOF STEP]
with part_eq
[PROOF STATE]
proof (chain)
picking this:
P ?x ?y = (PP ?x ?y \<or> ?x = ?y)
PP x x \<or> x = x
[PROOF STEP]
show "P x x"
[PROOF STATE]
proof (prove)
using this:
P ?x ?y = (PP ?x ?y \<or> ?x = ?y)
PP x x \<or> x = x
goal (1 subgoal):
1. P x x
[PROOF STEP]
..
[PROOF STATE]
proof (state)
this:
P x x
goal:
No subgoals!
[PROOF STEP]
qed |
(*
* Copyright 2014, General Dynamics C4 Systems
*
* SPDX-License-Identifier: GPL-2.0-only
*)
theory Ipc_R
imports Finalise_R
begin
context begin interpretation Arch . (*FIXME: arch_split*)
lemmas lookup_slot_wrapper_defs'[simp] =
lookupSourceSlot_def lookupTargetSlot_def lookupPivotSlot_def
lemma get_mi_corres: "corres ((=) \<circ> message_info_map)
(tcb_at t) (tcb_at' t)
(get_message_info t) (getMessageInfo t)"
apply (rule corres_guard_imp)
apply (unfold get_message_info_def getMessageInfo_def fun_app_def)
apply (simp add: X64_H.msgInfoRegister_def
X64.msgInfoRegister_def X64_A.msg_info_register_def)
apply (rule corres_split_eqr [OF _ user_getreg_corres])
apply (rule corres_trivial, simp add: message_info_from_data_eqv)
apply (wp | simp)+
done
lemma get_mi_inv'[wp]: "\<lbrace>I\<rbrace> getMessageInfo a \<lbrace>\<lambda>x. I\<rbrace>"
by (simp add: getMessageInfo_def, wp)
definition
"get_send_cap_relation rv rv' \<equiv>
(case rv of Some (c, cptr) \<Rightarrow> (\<exists>c' cptr'. rv' = Some (c', cptr') \<and>
cte_map cptr = cptr' \<and>
cap_relation c c')
| None \<Rightarrow> rv' = None)"
lemma cap_relation_mask:
"\<lbrakk> cap_relation c c'; msk' = rights_mask_map msk \<rbrakk> \<Longrightarrow>
cap_relation (mask_cap msk c) (maskCapRights msk' c')"
by simp
lemma lsfco_cte_at':
"\<lbrace>valid_objs' and valid_cap' cap\<rbrace>
lookupSlotForCNodeOp f cap idx depth
\<lbrace>\<lambda>rv. cte_at' rv\<rbrace>, -"
apply (simp add: lookupSlotForCNodeOp_def)
apply (rule conjI)
prefer 2
apply clarsimp
apply (wp)
apply (clarsimp simp: split_def unlessE_def
split del: if_split)
apply (wp hoare_drop_imps throwE_R)
done
declare unifyFailure_wp [wp]
(* FIXME: move *)
lemma unifyFailure_wp_E [wp]:
"\<lbrace>P\<rbrace> f -, \<lbrace>\<lambda>_. E\<rbrace> \<Longrightarrow> \<lbrace>P\<rbrace> unifyFailure f -, \<lbrace>\<lambda>_. E\<rbrace>"
unfolding validE_E_def
by (erule unifyFailure_wp)+
(* FIXME: move *)
lemma unifyFailure_wp2 [wp]:
assumes x: "\<lbrace>P\<rbrace> f \<lbrace>\<lambda>_. Q\<rbrace>"
shows "\<lbrace>P\<rbrace> unifyFailure f \<lbrace>\<lambda>_. Q\<rbrace>"
by (wp x, simp)
definition
ct_relation :: "captransfer \<Rightarrow> cap_transfer \<Rightarrow> bool"
where
"ct_relation ct ct' \<equiv>
ct_receive_root ct = to_bl (ctReceiveRoot ct')
\<and> ct_receive_index ct = to_bl (ctReceiveIndex ct')
\<and> ctReceiveDepth ct' = unat (ct_receive_depth ct)"
(* MOVE *)
lemma valid_ipc_buffer_ptr_aligned_word_size_bits:
"\<lbrakk>valid_ipc_buffer_ptr' a s; is_aligned y word_size_bits \<rbrakk> \<Longrightarrow> is_aligned (a + y) word_size_bits"
unfolding valid_ipc_buffer_ptr'_def
apply clarsimp
apply (erule (1) aligned_add_aligned)
apply (simp add: msg_align_bits word_size_bits_def)
done
(* MOVE *)
lemma valid_ipc_buffer_ptr'D2:
"\<lbrakk>valid_ipc_buffer_ptr' a s; y < max_ipc_words * word_size; is_aligned y word_size_bits\<rbrakk> \<Longrightarrow> typ_at' UserDataT (a + y && ~~ mask pageBits) s"
unfolding valid_ipc_buffer_ptr'_def
apply clarsimp
apply (subgoal_tac "(a + y) && ~~ mask pageBits = a && ~~ mask pageBits")
apply simp
apply (rule mask_out_first_mask_some [where n = msg_align_bits])
apply (erule is_aligned_add_helper [THEN conjunct2])
apply (erule order_less_le_trans)
apply (simp add: msg_align_bits max_ipc_words word_size_def)
apply simp
done
lemma load_ct_corres:
notes msg_max_words_simps = max_ipc_words_def msgMaxLength_def msgMaxExtraCaps_def msgLengthBits_def
capTransferDataSize_def msgExtraCapBits_def
shows
"corres ct_relation \<top> (valid_ipc_buffer_ptr' buffer) (load_cap_transfer buffer) (loadCapTransfer buffer)"
apply (simp add: load_cap_transfer_def loadCapTransfer_def
captransfer_from_words_def
capTransferDataSize_def capTransferFromWords_def
msgExtraCapBits_def word_size add.commute add.left_commute
msg_max_length_def msg_max_extra_caps_def word_size_def
msgMaxLength_def msgMaxExtraCaps_def msgLengthBits_def wordSize_def wordBits_def
del: upt.simps)
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ load_word_corres])
apply (rule corres_split [OF _ load_word_corres])
apply (rule corres_split [OF _ load_word_corres])
apply (rule_tac P=\<top> and P'=\<top> in corres_inst)
apply (clarsimp simp: ct_relation_def)
apply (wp no_irq_loadWord)+
apply simp
apply (simp add: conj_comms)
apply safe
apply (erule valid_ipc_buffer_ptr_aligned_word_size_bits, simp add: is_aligned_def word_size_bits_def)+
apply (erule valid_ipc_buffer_ptr'D2,
simp add: msg_max_words_simps word_size_def word_size_bits_def,
simp add: word_size_bits_def is_aligned_def)+
done
lemma get_recv_slot_corres:
"corres (\<lambda>xs ys. ys = map cte_map xs)
(tcb_at receiver and valid_objs and pspace_aligned)
(tcb_at' receiver and valid_objs' and pspace_aligned' and pspace_distinct' and
case_option \<top> valid_ipc_buffer_ptr' recv_buf)
(get_receive_slots receiver recv_buf)
(getReceiveSlots receiver recv_buf)"
apply (cases recv_buf)
apply (simp add: getReceiveSlots_def)
apply (simp add: getReceiveSlots_def split_def)
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ load_ct_corres])
apply (rule corres_empty_on_failure)
apply (rule corres_splitEE)
prefer 2
apply (rule corres_unify_failure)
apply (rule lookup_cap_corres)
apply (simp add: ct_relation_def)
apply simp
apply (rule corres_splitEE)
prefer 2
apply (rule corres_unify_failure)
apply (simp add: ct_relation_def)
apply (erule lsfc_corres [OF _ refl])
apply simp
apply (simp add: split_def liftE_bindE unlessE_whenE)
apply (rule corres_split [OF _ get_cap_corres])
apply (rule corres_split_norE)
apply (rule corres_trivial, simp add: returnOk_def)
apply (rule corres_whenE)
apply (case_tac cap, auto)[1]
apply (rule corres_trivial, simp)
apply simp
apply (wp lookup_cap_valid lookup_cap_valid' lsfco_cte_at | simp)+
done
lemma get_recv_slot_inv'[wp]:
"\<lbrace> P \<rbrace> getReceiveSlots receiver buf \<lbrace>\<lambda>rv'. P \<rbrace>"
apply (case_tac buf)
apply (simp add: getReceiveSlots_def)
apply (simp add: getReceiveSlots_def
split_def unlessE_def)
apply (wp | simp)+
done
lemma get_rs_cte_at'[wp]:
"\<lbrace>\<top>\<rbrace>
getReceiveSlots receiver recv_buf
\<lbrace>\<lambda>rv s. \<forall>x \<in> set rv. cte_wp_at' (\<lambda>c. cteCap c = capability.NullCap) x s\<rbrace>"
apply (cases recv_buf)
apply (simp add: getReceiveSlots_def)
apply (wp,simp)
apply (clarsimp simp add: getReceiveSlots_def
split_def whenE_def unlessE_whenE)
apply wp
apply simp
apply (rule getCTE_wp)
apply (simp add: cte_wp_at_ctes_of cong: conj_cong)
apply wp+
apply simp
done
lemma get_rs_real_cte_at'[wp]:
"\<lbrace>valid_objs'\<rbrace>
getReceiveSlots receiver recv_buf
\<lbrace>\<lambda>rv s. \<forall>x \<in> set rv. real_cte_at' x s\<rbrace>"
apply (cases recv_buf)
apply (simp add: getReceiveSlots_def)
apply (wp,simp)
apply (clarsimp simp add: getReceiveSlots_def
split_def whenE_def unlessE_whenE)
apply wp
apply simp
apply (wp hoare_drop_imps)[1]
apply simp
apply (wp lookup_cap_valid')+
apply simp
done
declare word_div_1 [simp]
declare word_minus_one_le [simp]
declare word64_minus_one_le [simp]
lemma load_word_offs_corres':
"\<lbrakk> y < unat max_ipc_words; y' = of_nat y * 8 \<rbrakk> \<Longrightarrow>
corres (=) \<top> (valid_ipc_buffer_ptr' a) (load_word_offs a y) (loadWordUser (a + y'))"
apply simp
apply (erule load_word_offs_corres)
done
declare loadWordUser_inv [wp]
lemma getExtraCptrs_inv[wp]:
"\<lbrace>P\<rbrace> getExtraCPtrs buf mi \<lbrace>\<lambda>rv. P\<rbrace>"
apply (cases mi, cases buf, simp_all add: getExtraCPtrs_def)
apply (wp dmo_inv' mapM_wp' loadWord_inv)
done
lemma getSlotCap_cte_wp_at_rv:
"\<lbrace>cte_wp_at' (\<lambda>cte. P (cteCap cte) cte) p\<rbrace>
getSlotCap p
\<lbrace>\<lambda>rv. cte_wp_at' (P rv) p\<rbrace>"
apply (simp add: getSlotCap_def)
apply (wp getCTE_ctes_wp)
apply (clarsimp simp: cte_wp_at_ctes_of)
done
lemma badge_derived_mask [simp]:
"badge_derived' (maskCapRights R c) c' = badge_derived' c c'"
by (simp add: badge_derived'_def)
declare derived'_not_Null [simp]
lemma maskCapRights_vsCapRef[simp]:
"vsCapRef (maskCapRights msk cap) = vsCapRef cap"
unfolding vsCapRef_def
apply (cases cap, simp_all add: maskCapRights_def isCap_simps Let_def)
apply (rename_tac arch_capability)
apply (case_tac arch_capability;
simp add: maskCapRights_def X64_H.maskCapRights_def isCap_simps Let_def)
done
lemma corres_set_extra_badge:
"b' = b \<Longrightarrow>
corres dc (in_user_frame buffer)
(valid_ipc_buffer_ptr' buffer and
(\<lambda>_. msg_max_length + 2 + n < unat max_ipc_words))
(set_extra_badge buffer b n) (setExtraBadge buffer b' n)"
apply (rule corres_gen_asm2)
apply (drule store_word_offs_corres [where a=buffer and w=b])
apply (simp add: set_extra_badge_def setExtraBadge_def buffer_cptr_index_def
bufferCPtrOffset_def Let_def)
apply (simp add: word_size word_size_def wordSize_def wordBits_def
bufferCPtrOffset_def buffer_cptr_index_def msgMaxLength_def
msg_max_length_def msgLengthBits_def store_word_offs_def
add.commute add.left_commute)
done
crunch typ_at': setExtraBadge "\<lambda>s. P (typ_at' T p s)"
lemmas setExtraBadge_typ_ats' [wp] = typ_at_lifts [OF setExtraBadge_typ_at']
crunch valid_pspace' [wp]: setExtraBadge valid_pspace'
crunch cte_wp_at' [wp]: setExtraBadge "cte_wp_at' P p"
crunch ipc_buffer' [wp]: setExtraBadge "valid_ipc_buffer_ptr' buffer"
crunch inv'[wp]: getExtraCPtr P (wp: dmo_inv' loadWord_inv)
lemmas unifyFailure_discard2
= corres_injection[OF id_injection unifyFailure_injection, simplified]
lemma deriveCap_not_null:
"\<lbrace>\<top>\<rbrace> deriveCap slot cap \<lbrace>\<lambda>rv. K (rv \<noteq> NullCap \<longrightarrow> cap \<noteq> NullCap)\<rbrace>,-"
apply (simp add: deriveCap_def split del: if_split)
apply (case_tac cap)
apply (simp_all add: Let_def isCap_simps)
apply wp
apply simp
done
lemma deriveCap_derived_foo:
"\<lbrace>\<lambda>s. \<forall>cap'. (cte_wp_at' (\<lambda>cte. badge_derived' cap (cteCap cte)
\<and> capASID cap = capASID (cteCap cte) \<and> cap_asid_base' cap = cap_asid_base' (cteCap cte)
\<and> cap_vptr' cap = cap_vptr' (cteCap cte)) slot s
\<and> valid_objs' s \<and> cap' \<noteq> NullCap \<longrightarrow> cte_wp_at' (is_derived' (ctes_of s) slot cap' \<circ> cteCap) slot s)
\<and> (cte_wp_at' (untyped_derived_eq cap \<circ> cteCap) slot s
\<longrightarrow> cte_wp_at' (untyped_derived_eq cap' \<circ> cteCap) slot s)
\<and> (s \<turnstile>' cap \<longrightarrow> s \<turnstile>' cap') \<and> (cap' \<noteq> NullCap \<longrightarrow> cap \<noteq> NullCap) \<longrightarrow> Q cap' s\<rbrace>
deriveCap slot cap \<lbrace>Q\<rbrace>,-"
using deriveCap_derived[where slot=slot and c'=cap] deriveCap_valid[where slot=slot and c=cap]
deriveCap_untyped_derived[where slot=slot and c'=cap] deriveCap_not_null[where slot=slot and cap=cap]
apply (clarsimp simp: validE_R_def validE_def valid_def split: sum.split)
apply (frule in_inv_by_hoareD[OF deriveCap_inv])
apply (clarsimp simp: o_def)
apply (drule spec, erule mp)
apply safe
apply fastforce
apply (drule spec, drule(1) mp)
apply fastforce
apply (drule spec, drule(1) mp)
apply fastforce
apply (drule spec, drule(1) bspec, simp)
done
lemma valid_mdb_untyped_incD':
"valid_mdb' s \<Longrightarrow> untyped_inc' (ctes_of s)"
by (simp add: valid_mdb'_def valid_mdb_ctes_def)
lemma cteInsert_cte_wp_at:
"\<lbrace>\<lambda>s. cte_wp_at' (\<lambda>c. is_derived' (ctes_of s) src cap (cteCap c)) src s
\<and> valid_mdb' s \<and> valid_objs' s
\<and> (if p = dest then P cap
else cte_wp_at' (\<lambda>c. P (maskedAsFull (cteCap c) cap)) p s)\<rbrace>
cteInsert cap src dest
\<lbrace>\<lambda>uu. cte_wp_at' (\<lambda>c. P (cteCap c)) p\<rbrace>"
apply (simp add: cteInsert_def)
apply (wp updateMDB_weak_cte_wp_at updateCap_cte_wp_at_cases getCTE_wp static_imp_wp
| clarsimp simp: comp_def
| unfold setUntypedCapAsFull_def)+
apply (drule cte_at_cte_wp_atD)
apply (elim exE)
apply (rule_tac x=cte in exI)
apply clarsimp
apply (drule cte_at_cte_wp_atD)
apply (elim exE)
apply (rule_tac x=ctea in exI)
apply clarsimp
apply (cases "p=dest")
apply (clarsimp simp: cte_wp_at'_def)
apply (cases "p=src")
apply clarsimp
apply (intro conjI impI)
apply ((clarsimp simp: cte_wp_at'_def maskedAsFull_def split: if_split_asm)+)[2]
apply clarsimp
apply (rule conjI)
apply (clarsimp simp: maskedAsFull_def cte_wp_at_ctes_of split:if_split_asm)
apply (erule disjE) prefer 2 apply simp
apply (clarsimp simp: is_derived'_def isCap_simps)
apply (drule valid_mdb_untyped_incD')
apply (case_tac cte, case_tac cteb, clarsimp)
apply (drule untyped_incD', (simp add: isCap_simps)+)
apply (frule(1) ctes_of_valid'[where p = p])
apply (clarsimp simp:valid_cap'_def capAligned_def split:if_splits)
apply (drule_tac y ="of_nat fb" in word_plus_mono_right[OF _ is_aligned_no_overflow',rotated])
apply simp+
apply (rule word_of_nat_less)
apply simp
apply (simp add:p_assoc_help)
apply (simp add: max_free_index_def)
apply (clarsimp simp: maskedAsFull_def is_derived'_def badge_derived'_def
isCap_simps capMasterCap_def cte_wp_at_ctes_of
split: if_split_asm capability.splits)
done
lemma cteInsert_weak_cte_wp_at3:
assumes imp:"\<And>c. P c \<Longrightarrow> \<not> isUntypedCap c"
shows " \<lbrace>\<lambda>s. if p = dest then P cap
else cte_wp_at' (\<lambda>c. P (cteCap c)) p s\<rbrace>
cteInsert cap src dest
\<lbrace>\<lambda>uu. cte_wp_at' (\<lambda>c. P (cteCap c)) p\<rbrace>"
by (wp updateMDB_weak_cte_wp_at updateCap_cte_wp_at_cases getCTE_wp' static_imp_wp
| clarsimp simp: comp_def cteInsert_def
| unfold setUntypedCapAsFull_def
| auto simp: cte_wp_at'_def dest!: imp)+
lemma maskedAsFull_null_cap[simp]:
"(maskedAsFull x y = capability.NullCap) = (x = capability.NullCap)"
"(capability.NullCap = maskedAsFull x y) = (x = capability.NullCap)"
by (case_tac x, auto simp:maskedAsFull_def isCap_simps )
lemma maskCapRights_eq_null:
"(RetypeDecls_H.maskCapRights r xa = capability.NullCap) =
(xa = capability.NullCap)"
apply (cases xa; simp add: maskCapRights_def isCap_simps)
apply (rename_tac arch_capability)
apply (case_tac arch_capability)
apply (simp_all add: X64_H.maskCapRights_def isCap_simps)
done
lemma cte_refs'_maskedAsFull[simp]:
"cte_refs' (maskedAsFull a b) = cte_refs' a"
apply (rule ext)+
apply (case_tac a)
apply (clarsimp simp:maskedAsFull_def isCap_simps)+
done
lemma tc_loop_corres:
"\<lbrakk> list_all2 (\<lambda>(cap, slot) (cap', slot'). cap_relation cap cap'
\<and> slot' = cte_map slot) caps caps';
mi' = message_info_map mi \<rbrakk> \<Longrightarrow>
corres ((=) \<circ> message_info_map)
(\<lambda>s. valid_objs s \<and> pspace_aligned s \<and> pspace_distinct s \<and> valid_mdb s
\<and> valid_list s
\<and> (case ep of Some x \<Rightarrow> ep_at x s | _ \<Rightarrow> True)
\<and> (\<forall>x \<in> set slots. cte_wp_at (\<lambda>cap. cap = cap.NullCap) x s \<and>
real_cte_at x s)
\<and> (\<forall>(cap, slot) \<in> set caps. valid_cap cap s \<and>
cte_wp_at (\<lambda>cp'. (cap \<noteq> cap.NullCap \<longrightarrow> cp'\<noteq>cap \<longrightarrow> cp' = masked_as_full cap cap )) slot s )
\<and> distinct slots
\<and> in_user_frame buffer s)
(\<lambda>s. valid_pspace' s
\<and> (case ep of Some x \<Rightarrow> ep_at' x s | _ \<Rightarrow> True)
\<and> (\<forall>x \<in> set (map cte_map slots).
cte_wp_at' (\<lambda>cte. cteCap cte = NullCap) x s
\<and> real_cte_at' x s)
\<and> distinct (map cte_map slots)
\<and> valid_ipc_buffer_ptr' buffer s
\<and> (\<forall>(cap, slot) \<in> set caps'. valid_cap' cap s \<and>
cte_wp_at' (\<lambda>cte. cap \<noteq> NullCap \<longrightarrow> cteCap cte \<noteq> cap \<longrightarrow> cteCap cte = maskedAsFull cap cap) slot s)
\<and> 2 + msg_max_length + n + length caps' < unat max_ipc_words)
(transfer_caps_loop ep buffer n caps slots mi)
(transferCapsToSlots ep buffer n caps'
(map cte_map slots) mi')"
(is "\<lbrakk> list_all2 ?P caps caps'; ?v \<rbrakk> \<Longrightarrow> ?corres")
proof (induct caps caps' arbitrary: slots n mi mi' rule: list_all2_induct)
case Nil
show ?case using Nil.prems by (case_tac mi, simp)
next
case (Cons x xs y ys slots n mi mi')
note if_weak_cong[cong] if_cong [cong del]
assume P: "?P x y"
show ?case using Cons.prems P
apply (clarsimp split del: if_split)
apply (simp add: Let_def split_def word_size liftE_bindE
word_bits_conv[symmetric] split del: if_split)
apply (rule corres_const_on_failure)
apply (simp add: dc_def[symmetric] split del: if_split)
apply (rule corres_guard_imp)
apply (rule corres_if2)
apply (case_tac "fst x", auto simp add: isCap_simps)[1]
apply (rule corres_split [OF _ corres_set_extra_badge])
apply (drule conjunct1)
apply simp
apply (rule corres_rel_imp, rule Cons.hyps, simp_all)[1]
apply (case_tac mi, simp)
apply (clarsimp simp: is_cap_simps)
apply (simp add: split_def)
apply (wp hoare_vcg_const_Ball_lift)
apply (subgoal_tac "obj_ref_of (fst x) = capEPPtr (fst y)")
prefer 2
apply (clarsimp simp: is_cap_simps)
apply (simp add: split_def)
apply (wp hoare_vcg_const_Ball_lift)
apply (rule_tac P="slots = []" and Q="slots \<noteq> []" in corres_disj_division)
apply simp
apply (rule corres_trivial, simp add: returnOk_def)
apply (case_tac mi, simp)
apply (simp add: list_case_If2 split del: if_split)
apply (rule corres_splitEE)
prefer 2
apply (rule unifyFailure_discard2)
apply (case_tac mi, clarsimp)
apply (rule derive_cap_corres)
apply (simp add: remove_rights_def)
apply clarsimp
apply (rule corres_split_norE)
apply (simp add: liftE_bindE)
apply (rule corres_split_nor)
prefer 2
apply (rule cins_corres, simp_all add: hd_map)[1]
apply (simp add: tl_map)
apply (rule corres_rel_imp, rule Cons.hyps, simp_all)[1]
apply (wp valid_case_option_post_wp hoare_vcg_const_Ball_lift
hoare_vcg_const_Ball_lift cap_insert_weak_cte_wp_at)
apply (wp hoare_vcg_const_Ball_lift | simp add:split_def del: imp_disj1)+
apply (wp cap_insert_cte_wp_at)
apply (wp valid_case_option_post_wp hoare_vcg_const_Ball_lift
cteInsert_valid_pspace
| simp add: split_def)+
apply (wp cteInsert_weak_cte_wp_at hoare_valid_ipc_buffer_ptr_typ_at')+
apply (wp hoare_vcg_const_Ball_lift cteInsert_cte_wp_at valid_case_option_post_wp
| simp add:split_def)+
apply (rule corres_whenE)
apply (case_tac cap', auto)[1]
apply (rule corres_trivial, simp)
apply (case_tac mi, simp)
apply simp
apply (unfold whenE_def)
apply wp+
apply (clarsimp simp: conj_comms ball_conj_distrib split del: if_split)
apply (rule_tac Q' ="\<lambda>cap' s. (cap'\<noteq> cap.NullCap \<longrightarrow>
cte_wp_at (is_derived (cdt s) (a, b) cap') (a, b) s
\<and> QM s cap')" for QM
in hoare_post_imp_R)
prefer 2
apply clarsimp
apply assumption
apply (subst imp_conjR)
apply (rule hoare_vcg_conj_liftE_R)
apply (rule derive_cap_is_derived)
apply (wp derive_cap_is_derived_foo)+
apply (simp split del: if_split)
apply (rule_tac Q' ="\<lambda>cap' s. (cap'\<noteq> capability.NullCap \<longrightarrow>
cte_wp_at' (\<lambda>c. is_derived' (ctes_of s) (cte_map (a, b)) cap' (cteCap c)) (cte_map (a, b)) s
\<and> QM s cap')" for QM
in hoare_post_imp_R)
prefer 2
apply clarsimp
apply assumption
apply (subst imp_conjR)
apply (rule hoare_vcg_conj_liftE_R)
apply (rule hoare_post_imp_R[OF deriveCap_derived])
apply (clarsimp simp:cte_wp_at_ctes_of)
apply (wp deriveCap_derived_foo)
apply (clarsimp simp: cte_wp_at_caps_of_state remove_rights_def
real_cte_tcb_valid if_apply_def2
split del: if_split)
apply (rule conjI, (clarsimp split del: if_split)+)
apply (clarsimp simp:conj_comms split del:if_split)
apply (intro conjI allI)
apply (clarsimp split:if_splits)
apply (case_tac "cap = fst x",simp+)
apply (clarsimp simp:masked_as_full_def is_cap_simps cap_master_cap_simps)
apply (clarsimp split del: if_split)
apply (intro conjI)
apply (clarsimp simp:neq_Nil_conv)
apply (drule hd_in_set)
apply (drule(1) bspec)
apply (clarsimp split:if_split_asm)
apply (fastforce simp:neq_Nil_conv)
apply (intro ballI conjI)
apply (clarsimp simp:neq_Nil_conv)
apply (intro impI)
apply (drule(1) bspec[OF _ subsetD[rotated]])
apply (clarsimp simp:neq_Nil_conv)
apply (clarsimp split:if_splits)
apply clarsimp
apply (intro conjI)
apply (drule(1) bspec,clarsimp)+
subgoal for \<dots> aa _ _ capa
by (case_tac "capa = aa"; clarsimp split:if_splits simp:masked_as_full_def is_cap_simps)
apply (case_tac "isEndpointCap (fst y) \<and> capEPPtr (fst y) = the ep \<and> (\<exists>y. ep = Some y)")
apply (clarsimp simp:conj_comms split del:if_split)
apply (subst if_not_P)
apply clarsimp
apply (clarsimp simp:valid_pspace'_def cte_wp_at_ctes_of split del:if_split)
apply (intro conjI)
apply (case_tac "cteCap cte = fst y",clarsimp simp: badge_derived'_def)
apply (clarsimp simp: maskCapRights_eq_null maskedAsFull_def badge_derived'_def isCap_simps
split: if_split_asm)
apply (clarsimp split del: if_split)
apply (case_tac "fst y = capability.NullCap")
apply (clarsimp simp: neq_Nil_conv split del: if_split)+
apply (intro allI impI conjI)
apply (clarsimp split:if_splits)
apply (clarsimp simp:image_def)+
apply (thin_tac "\<forall>x\<in>set ys. Q x" for Q)
apply (drule(1) bspec)+
apply clarsimp+
apply (drule(1) bspec)
apply (rule conjI)
apply clarsimp+
apply (case_tac "cteCap cteb = ab")
by (clarsimp simp: isCap_simps maskedAsFull_def split:if_splits)+
qed
declare constOnFailure_wp [wp]
lemma transferCapsToSlots_pres1[crunch_rules]:
assumes x: "\<And>cap src dest. \<lbrace>P\<rbrace> cteInsert cap src dest \<lbrace>\<lambda>rv. P\<rbrace>"
assumes eb: "\<And>b n. \<lbrace>P\<rbrace> setExtraBadge buffer b n \<lbrace>\<lambda>_. P\<rbrace>"
shows "\<lbrace>P\<rbrace> transferCapsToSlots ep buffer n caps slots mi \<lbrace>\<lambda>rv. P\<rbrace>"
apply (induct caps arbitrary: slots n mi)
apply simp
apply (simp add: Let_def split_def whenE_def
cong: if_cong list.case_cong
split del: if_split)
apply (rule hoare_pre)
apply (wp x eb | assumption | simp split del: if_split | wpc
| wp (once) hoare_drop_imps)+
done
lemma cteInsert_cte_cap_to':
"\<lbrace>ex_cte_cap_to' p and cte_wp_at' (\<lambda>cte. cteCap cte = NullCap) dest\<rbrace>
cteInsert cap src dest
\<lbrace>\<lambda>rv. ex_cte_cap_to' p\<rbrace>"
apply (simp add: ex_cte_cap_to'_def)
apply (rule hoare_pre)
apply (rule hoare_use_eq_irq_node' [OF cteInsert_ksInterruptState])
apply (clarsimp simp:cteInsert_def)
apply (wp hoare_vcg_ex_lift updateMDB_weak_cte_wp_at updateCap_cte_wp_at_cases
setUntypedCapAsFull_cte_wp_at getCTE_wp static_imp_wp)
apply (clarsimp simp:cte_wp_at_ctes_of)
apply (rule_tac x = "cref" in exI)
apply (rule conjI)
apply clarsimp+
done
declare maskCapRights_eq_null[simp]
crunch ex_cte_cap_wp_to' [wp]: setExtraBadge "ex_cte_cap_wp_to' P p"
(rule: ex_cte_cap_to'_pres)
crunch valid_objs' [wp]: setExtraBadge valid_objs'
crunch aligned' [wp]: setExtraBadge pspace_aligned'
crunch distinct' [wp]: setExtraBadge pspace_distinct'
lemma cteInsert_assume_Null:
"\<lbrace>P\<rbrace> cteInsert cap src dest \<lbrace>Q\<rbrace> \<Longrightarrow>
\<lbrace>\<lambda>s. cte_wp_at' (\<lambda>cte. cteCap cte = NullCap) dest s \<longrightarrow> P s\<rbrace>
cteInsert cap src dest
\<lbrace>Q\<rbrace>"
apply (rule hoare_name_pre_state)
apply (erule impCE)
apply (simp add: cteInsert_def)
apply (rule hoare_seq_ext[OF _ getCTE_sp])+
apply (rule hoare_name_pre_state)
apply (clarsimp simp: cte_wp_at_ctes_of)
apply (erule hoare_pre(1))
apply simp
done
crunch mdb'[wp]: setExtraBadge valid_mdb'
lemma cteInsert_weak_cte_wp_at2:
assumes weak:"\<And>c cap. P (maskedAsFull c cap) = P c"
shows
"\<lbrace>\<lambda>s. if p = dest then P cap else cte_wp_at' (\<lambda>c. P (cteCap c)) p s\<rbrace>
cteInsert cap src dest
\<lbrace>\<lambda>uu. cte_wp_at' (\<lambda>c. P (cteCap c)) p\<rbrace>"
apply (rule hoare_pre)
apply (rule hoare_use_eq_irq_node' [OF cteInsert_ksInterruptState])
apply (clarsimp simp:cteInsert_def)
apply (wp hoare_vcg_ex_lift updateMDB_weak_cte_wp_at updateCap_cte_wp_at_cases
setUntypedCapAsFull_cte_wp_at getCTE_wp static_imp_wp)
apply (clarsimp simp:cte_wp_at_ctes_of weak)
apply auto
done
lemma transferCapsToSlots_presM:
assumes x: "\<And>cap src dest. \<lbrace>\<lambda>s. P s \<and> (emx \<longrightarrow> cte_wp_at' (\<lambda>cte. cteCap cte = NullCap) dest s \<and> ex_cte_cap_to' dest s)
\<and> (vo \<longrightarrow> valid_objs' s \<and> valid_cap' cap s \<and> real_cte_at' dest s)
\<and> (drv \<longrightarrow> cte_wp_at' (is_derived' (ctes_of s) src cap \<circ> cteCap) src s
\<and> cte_wp_at' (untyped_derived_eq cap o cteCap) src s
\<and> valid_mdb' s)
\<and> (pad \<longrightarrow> pspace_aligned' s \<and> pspace_distinct' s)\<rbrace>
cteInsert cap src dest \<lbrace>\<lambda>rv. P\<rbrace>"
assumes eb: "\<And>b n. \<lbrace>P\<rbrace> setExtraBadge buffer b n \<lbrace>\<lambda>_. P\<rbrace>"
shows "\<lbrace>\<lambda>s. P s
\<and> (emx \<longrightarrow> (\<forall>x \<in> set slots. ex_cte_cap_to' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = NullCap) x s) \<and> distinct slots)
\<and> (vo \<longrightarrow> valid_objs' s \<and> (\<forall>x \<in> set slots. real_cte_at' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = NullCap) x s)
\<and> (\<forall>x \<in> set caps. s \<turnstile>' fst x ) \<and> distinct slots)
\<and> (pad \<longrightarrow> pspace_aligned' s \<and> pspace_distinct' s)
\<and> (drv \<longrightarrow> vo \<and> pspace_aligned' s \<and> pspace_distinct' s \<and> valid_mdb' s
\<and> length slots \<le> 1
\<and> (\<forall>x \<in> set caps. s \<turnstile>' fst x \<and> (slots \<noteq> []
\<longrightarrow> cte_wp_at' (\<lambda>cte. fst x \<noteq> NullCap \<longrightarrow> cteCap cte = fst x) (snd x) s)))\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. P\<rbrace>"
apply (induct caps arbitrary: slots n mi)
apply (simp, wp, simp)
apply (simp add: Let_def split_def whenE_def
cong: if_cong list.case_cong split del: if_split)
apply (rule hoare_pre)
apply (wp eb hoare_vcg_const_Ball_lift hoare_vcg_const_imp_lift
| assumption | wpc)+
apply (rule cteInsert_assume_Null)
apply (wp x hoare_vcg_const_Ball_lift cteInsert_cte_cap_to' static_imp_wp)
apply (rule cteInsert_weak_cte_wp_at2,clarsimp)
apply (wp hoare_vcg_const_Ball_lift static_imp_wp)+
apply (rule cteInsert_weak_cte_wp_at2,clarsimp)
apply (wp hoare_vcg_const_Ball_lift cteInsert_cte_wp_at static_imp_wp
deriveCap_derived_foo)+
apply (thin_tac "\<And>slots. PROP P slots" for P)
apply (clarsimp simp: cte_wp_at_ctes_of remove_rights_def
real_cte_tcb_valid if_apply_def2
split del: if_split)
apply (rule conjI)
apply (clarsimp simp:cte_wp_at_ctes_of untyped_derived_eq_def)
apply (intro conjI allI)
apply (clarsimp simp:Fun.comp_def cte_wp_at_ctes_of)+
apply (clarsimp simp:valid_capAligned)
done
lemmas transferCapsToSlots_pres2
= transferCapsToSlots_presM[where vo=False and emx=True
and drv=False and pad=False, simplified]
crunch pspace_aligned'[wp]: transferCapsToSlots pspace_aligned'
crunch pspace_canonical'[wp]: transferCapsToSlots pspace_canonical'
crunch pspace_in_kernel_mappings'[wp]: transferCapsToSlots pspace_in_kernel_mappings'
crunch pspace_distinct'[wp]: transferCapsToSlots pspace_distinct'
lemma transferCapsToSlots_typ_at'[wp]:
"\<lbrace>\<lambda>s. P (typ_at' T p s)\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv s. P (typ_at' T p s)\<rbrace>"
by (wp transferCapsToSlots_pres1 setExtraBadge_typ_at')
lemma transferCapsToSlots_valid_objs[wp]:
"\<lbrace>valid_objs' and valid_mdb' and (\<lambda>s. \<forall>x \<in> set slots. real_cte_at' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = capability.NullCap) x s)
and (\<lambda>s. \<forall>x \<in> set caps. s \<turnstile>' fst x) and K(distinct slots)\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. valid_objs'\<rbrace>"
apply (rule hoare_pre)
apply (rule transferCapsToSlots_presM[where vo=True and emx=False and drv=False and pad=False])
apply (wp | simp)+
done
abbreviation(input)
"transferCaps_srcs caps s \<equiv> \<forall>x\<in>set caps. cte_wp_at' (\<lambda>cte. fst x \<noteq> NullCap \<longrightarrow> cteCap cte = fst x) (snd x) s"
lemma transferCapsToSlots_mdb[wp]:
"\<lbrace>\<lambda>s. valid_pspace' s \<and> distinct slots
\<and> length slots \<le> 1
\<and> (\<forall>x \<in> set slots. ex_cte_cap_to' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = capability.NullCap) x s)
\<and> (\<forall>x \<in> set slots. real_cte_at' x s)
\<and> transferCaps_srcs caps s\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. valid_mdb'\<rbrace>"
apply (wp transferCapsToSlots_presM[where drv=True and vo=True and emx=True and pad=True])
apply clarsimp
apply (frule valid_capAligned)
apply (clarsimp simp: cte_wp_at_ctes_of is_derived'_def badge_derived'_def)
apply wp
apply (clarsimp simp: valid_pspace'_def)
apply (clarsimp simp:cte_wp_at_ctes_of)
apply (drule(1) bspec,clarify)
apply (case_tac cte)
apply (clarsimp dest!:ctes_of_valid_cap' split:if_splits)
apply (fastforce simp:valid_cap'_def)
done
crunch no_0' [wp]: setExtraBadge no_0_obj'
lemma transferCapsToSlots_no_0_obj' [wp]:
"\<lbrace>no_0_obj'\<rbrace> transferCapsToSlots ep buffer n caps slots mi \<lbrace>\<lambda>rv. no_0_obj'\<rbrace>"
by (wp transferCapsToSlots_pres1)
lemma transferCapsToSlots_vp[wp]:
"\<lbrace>\<lambda>s. valid_pspace' s \<and> distinct slots
\<and> length slots \<le> 1
\<and> (\<forall>x \<in> set slots. ex_cte_cap_to' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = capability.NullCap) x s)
\<and> (\<forall>x \<in> set slots. real_cte_at' x s)
\<and> transferCaps_srcs caps s\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. valid_pspace'\<rbrace>"
apply (rule hoare_pre)
apply (simp add: valid_pspace'_def | wp)+
apply (fastforce simp: cte_wp_at_ctes_of dest: ctes_of_valid')
done
crunches setExtraBadge, doIPCTransfer
for sch_act [wp]: "\<lambda>s. P (ksSchedulerAction s)"
(wp: crunch_wps mapME_wp' simp: zipWithM_x_mapM)
crunches setExtraBadge
for pred_tcb_at' [wp]: "\<lambda>s. pred_tcb_at' proj P p s"
and ksCurThread[wp]: "\<lambda>s. P (ksCurThread s)"
and ksCurDomain[wp]: "\<lambda>s. P (ksCurDomain s)"
and obj_at' [wp]: "\<lambda>s. P' (obj_at' P p s)"
and queues [wp]: "\<lambda>s. P (ksReadyQueues s)"
and queuesL1 [wp]: "\<lambda>s. P (ksReadyQueuesL1Bitmap s)"
and queuesL2 [wp]: "\<lambda>s. P (ksReadyQueuesL2Bitmap s)"
(simp: storeWordUser_def)
lemma tcts_sch_act[wp]:
"\<lbrace>\<lambda>s. sch_act_wf (ksSchedulerAction s) s\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv s. sch_act_wf (ksSchedulerAction s) s\<rbrace>"
by (wp sch_act_wf_lift tcb_in_cur_domain'_lift transferCapsToSlots_pres1)
lemma tcts_vq[wp]:
"\<lbrace>Invariants_H.valid_queues\<rbrace> transferCapsToSlots ep buffer n caps slots mi \<lbrace>\<lambda>rv. Invariants_H.valid_queues\<rbrace>"
by (wp valid_queues_lift transferCapsToSlots_pres1)
lemma tcts_vq'[wp]:
"\<lbrace>valid_queues'\<rbrace> transferCapsToSlots ep buffer n caps slots mi \<lbrace>\<lambda>rv. valid_queues'\<rbrace>"
by (wp valid_queues_lift' transferCapsToSlots_pres1)
crunch state_refs_of' [wp]: setExtraBadge "\<lambda>s. P (state_refs_of' s)"
lemma tcts_state_refs_of'[wp]:
"\<lbrace>\<lambda>s. P (state_refs_of' s)\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv s. P (state_refs_of' s)\<rbrace>"
by (wp transferCapsToSlots_pres1)
crunch if_live' [wp]: setExtraBadge if_live_then_nonz_cap'
lemma tcts_iflive[wp]:
"\<lbrace>\<lambda>s. if_live_then_nonz_cap' s \<and> distinct slots \<and>
(\<forall>x\<in>set slots.
ex_cte_cap_to' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = capability.NullCap) x s)\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. if_live_then_nonz_cap'\<rbrace>"
by (wp transferCapsToSlots_pres2 | simp)+
crunch if_unsafe' [wp]: setExtraBadge if_unsafe_then_cap'
lemma tcts_ifunsafe[wp]:
"\<lbrace>\<lambda>s. if_unsafe_then_cap' s \<and> distinct slots \<and>
(\<forall>x\<in>set slots. cte_wp_at' (\<lambda>cte. cteCap cte = capability.NullCap) x s \<and>
ex_cte_cap_to' x s)\<rbrace> transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. if_unsafe_then_cap'\<rbrace>"
by (wp transferCapsToSlots_pres2 | simp)+
crunch it[wp]: ensureNoChildren "\<lambda>s. P (ksIdleThread s)"
crunch idle'[wp]: deriveCap "valid_idle'"
crunch valid_idle' [wp]: setExtraBadge valid_idle'
lemma tcts_idle'[wp]:
"\<lbrace>\<lambda>s. valid_idle' s\<rbrace> transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. valid_idle'\<rbrace>"
apply (rule hoare_pre)
apply (wp transferCapsToSlots_pres1)
apply simp
done
lemma tcts_ct[wp]:
"\<lbrace>cur_tcb'\<rbrace> transferCapsToSlots ep buffer n caps slots mi \<lbrace>\<lambda>rv. cur_tcb'\<rbrace>"
by (wp transferCapsToSlots_pres1 cur_tcb_lift)
crunch valid_arch_state' [wp]: setExtraBadge valid_arch_state'
lemma transferCapsToSlots_valid_arch [wp]:
"\<lbrace>valid_arch_state'\<rbrace> transferCapsToSlots ep buffer n caps slots mi \<lbrace>\<lambda>rv. valid_arch_state'\<rbrace>"
by (rule transferCapsToSlots_pres1; wp)
crunch valid_global_refs' [wp]: setExtraBadge valid_global_refs'
lemma transferCapsToSlots_valid_globals [wp]:
"\<lbrace>valid_global_refs' and valid_objs' and valid_mdb' and pspace_distinct' and pspace_aligned' and K (distinct slots)
and K (length slots \<le> 1)
and (\<lambda>s. \<forall>x \<in> set slots. real_cte_at' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = capability.NullCap) x s)
and transferCaps_srcs caps\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. valid_global_refs'\<rbrace>"
apply (wp transferCapsToSlots_presM[where vo=True and emx=False and drv=True and pad=True] | clarsimp)+
apply (clarsimp simp:cte_wp_at_ctes_of)
apply (drule(1) bspec,clarsimp)
apply (case_tac cte,clarsimp)
apply (frule(1) CSpace_I.ctes_of_valid_cap')
apply (fastforce simp:valid_cap'_def)
done
crunch irq_node' [wp]: setExtraBadge "\<lambda>s. P (irq_node' s)"
lemma transferCapsToSlots_irq_node'[wp]:
"\<lbrace>\<lambda>s. P (irq_node' s)\<rbrace> transferCapsToSlots ep buffer n caps slots mi \<lbrace>\<lambda>rv s. P (irq_node' s)\<rbrace>"
by (wp transferCapsToSlots_pres1)
lemma valid_irq_handlers_ctes_ofD:
"\<lbrakk> ctes_of s p = Some cte; cteCap cte = IRQHandlerCap irq; valid_irq_handlers' s \<rbrakk>
\<Longrightarrow> irq_issued' irq s"
by (auto simp: valid_irq_handlers'_def cteCaps_of_def ran_def)
crunch valid_irq_handlers' [wp]: setExtraBadge valid_irq_handlers'
lemma transferCapsToSlots_irq_handlers[wp]:
"\<lbrace>valid_irq_handlers' and valid_objs' and valid_mdb' and pspace_distinct' and pspace_aligned'
and K(distinct slots \<and> length slots \<le> 1)
and (\<lambda>s. \<forall>x \<in> set slots. real_cte_at' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = capability.NullCap) x s)
and transferCaps_srcs caps\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. valid_irq_handlers'\<rbrace>"
apply (wp transferCapsToSlots_presM[where vo=True and emx=False and drv=True and pad=False])
apply (clarsimp simp: is_derived'_def cte_wp_at_ctes_of badge_derived'_def)
apply (erule(2) valid_irq_handlers_ctes_ofD)
apply wp
apply (clarsimp simp:cte_wp_at_ctes_of | intro ballI conjI)+
apply (drule(1) bspec,clarsimp)
apply (case_tac cte,clarsimp)
apply (frule(1) CSpace_I.ctes_of_valid_cap')
apply (fastforce simp:valid_cap'_def)
done
crunch ioports'[wp]: setExtraBadge valid_ioports'
lemma valid_ioports'_derivedD:
"\<lbrakk>valid_ioports' s; cte_wp_at' (is_derived' (ctes_of s) src cap \<circ> cteCap) src s\<rbrakk> \<Longrightarrow>
safe_ioport_insert' cap NullCap s"
apply (clarsimp simp: is_derived'_def cte_wp_at_ctes_of safe_ioport_insert'_def badge_derived'_def)
apply (case_tac cap; clarsimp)
apply (rename_tac acap, case_tac acap; clarsimp simp: isCap_simps)
apply (clarsimp simp: valid_ioports'_def cteCaps_of_def
elim!: ranE
split: capability.splits arch_capability.splits)
apply (rule conjI, force simp: ioports_no_overlap'_def ran_def split: if_splits)
apply (force simp: ran_def issued_ioports'_def all_ioports_issued'_def split: if_splits)
done
lemma transferCapsToSlots_ioports'[wp]:
"\<lbrace>valid_ioports' and valid_objs' and valid_mdb' and pspace_distinct' and pspace_aligned'
and K(distinct slots \<and> length slots \<le> 1)
and (\<lambda>s. \<forall>x \<in> set slots. real_cte_at' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = capability.NullCap) x s)
and transferCaps_srcs caps\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. valid_ioports'\<rbrace>"
apply (wp transferCapsToSlots_presM[where vo=True and emx=False and drv=True and pad=False])
apply (clarsimp simp: valid_ioports'_derivedD)
apply wp
apply (clarsimp simp:cte_wp_at_ctes_of | intro ballI conjI)+
apply (drule(1) bspec,clarsimp)
apply (case_tac cte,clarsimp)
apply (frule(1) CSpace_I.ctes_of_valid_cap')
apply (fastforce simp:valid_cap'_def)
done
crunch irq_state' [wp]: setExtraBadge "\<lambda>s. P (ksInterruptState s)"
lemma setExtraBadge_irq_states'[wp]:
"\<lbrace>valid_irq_states'\<rbrace> setExtraBadge buffer b n \<lbrace>\<lambda>_. valid_irq_states'\<rbrace>"
apply (wp valid_irq_states_lift')
apply (simp add: setExtraBadge_def storeWordUser_def)
apply (wpsimp wp: no_irq dmo_lift' no_irq_storeWord)
apply assumption
done
lemma transferCapsToSlots_irq_states' [wp]:
"\<lbrace>valid_irq_states'\<rbrace> transferCapsToSlots ep buffer n caps slots mi \<lbrace>\<lambda>_. valid_irq_states'\<rbrace>"
by (wp transferCapsToSlots_pres1)
lemma transferCapsToSlots_irqs_masked'[wp]:
"\<lbrace>irqs_masked'\<rbrace> transferCapsToSlots ep buffer n caps slots mi \<lbrace>\<lambda>rv. irqs_masked'\<rbrace>"
by (wp transferCapsToSlots_pres1 irqs_masked_lift)
lemma storeWordUser_vms'[wp]:
"\<lbrace>valid_machine_state'\<rbrace> storeWordUser a w \<lbrace>\<lambda>_. valid_machine_state'\<rbrace>"
proof -
have aligned_offset_ignore:
"\<And>(l::machine_word) (p::machine_word) sz. l<8 \<Longrightarrow> p && mask 3 = 0 \<Longrightarrow>
p+l && ~~ mask pageBits = p && ~~ mask pageBits"
proof -
fix l p sz
assume al: "(p::machine_word) && mask 3 = 0"
assume "(l::machine_word) < 8" hence less: "l<2^3" by simp
have le: "3 \<le> pageBits" by (simp add: pageBits_def)
show "?thesis l p sz"
by (rule is_aligned_add_helper[simplified is_aligned_mask,
THEN conjunct2, THEN mask_out_first_mask_some,
where n=3, OF al less le])
qed
show ?thesis
apply (simp add: valid_machine_state'_def storeWordUser_def
doMachineOp_def split_def)
apply wp
apply clarsimp
apply (drule use_valid)
apply (rule_tac x=p in storeWord_um_inv, simp+)
apply (drule_tac x=p in spec)
apply (erule disjE, simp_all)
apply (erule conjE)
apply (erule disjE, simp)
apply (simp add: pointerInUserData_def word_size)
apply (subgoal_tac "a && ~~ mask pageBits = p && ~~ mask pageBits", simp)
apply (simp only: is_aligned_mask[of _ 3])
apply (elim disjE, simp_all)
apply (rule aligned_offset_ignore[symmetric], simp+)+
done
qed
lemma setExtraBadge_vms'[wp]:
"\<lbrace>valid_machine_state'\<rbrace> setExtraBadge buffer b n \<lbrace>\<lambda>_. valid_machine_state'\<rbrace>"
by (simp add: setExtraBadge_def) wp
lemma transferCapsToSlots_vms[wp]:
"\<lbrace>\<lambda>s. valid_machine_state' s\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>_ s. valid_machine_state' s\<rbrace>"
by (wp transferCapsToSlots_pres1)
crunches setExtraBadge, transferCapsToSlots
for pspace_domain_valid[wp]: "pspace_domain_valid"
crunch ct_not_inQ[wp]: setExtraBadge "ct_not_inQ"
lemma tcts_ct_not_inQ[wp]:
"\<lbrace>ct_not_inQ\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>_. ct_not_inQ\<rbrace>"
by (wp transferCapsToSlots_pres1)
crunch gsUntypedZeroRanges[wp]: setExtraBadge "\<lambda>s. P (gsUntypedZeroRanges s)"
crunch ctes_of[wp]: setExtraBadge "\<lambda>s. P (ctes_of s)"
lemma tcts_zero_ranges[wp]:
"\<lbrace>\<lambda>s. untyped_ranges_zero' s \<and> valid_pspace' s \<and> distinct slots
\<and> (\<forall>x \<in> set slots. ex_cte_cap_to' x s \<and> cte_wp_at' (\<lambda>cte. cteCap cte = capability.NullCap) x s)
\<and> (\<forall>x \<in> set slots. real_cte_at' x s)
\<and> length slots \<le> 1
\<and> transferCaps_srcs caps s\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. untyped_ranges_zero'\<rbrace>"
apply (wp transferCapsToSlots_presM[where emx=True and vo=True
and drv=True and pad=True])
apply (clarsimp simp: cte_wp_at_ctes_of)
apply (simp add: cteCaps_of_def)
apply (rule hoare_pre, wp untyped_ranges_zero_lift)
apply (simp add: o_def)
apply (clarsimp simp: valid_pspace'_def ball_conj_distrib[symmetric])
apply (drule(1) bspec)
apply (clarsimp simp: cte_wp_at_ctes_of)
apply (case_tac cte, clarsimp)
apply (frule(1) ctes_of_valid_cap')
apply auto[1]
done
crunch ct_idle_or_in_cur_domain'[wp]: setExtraBadge ct_idle_or_in_cur_domain'
crunch ct_idle_or_in_cur_domain'[wp]: transferCapsToSlots ct_idle_or_in_cur_domain'
crunch ksCurDomain[wp]: transferCapsToSlots "\<lambda>s. P (ksCurDomain s)"
crunch ksDomSchedule[wp]: setExtraBadge "\<lambda>s. P (ksDomSchedule s)"
crunch ksDomScheduleIdx[wp]: setExtraBadge "\<lambda>s. P (ksDomScheduleIdx s)"
crunch ksDomSchedule[wp]: transferCapsToSlots "\<lambda>s. P (ksDomSchedule s)"
crunch ksDomScheduleIdx[wp]: transferCapsToSlots "\<lambda>s. P (ksDomScheduleIdx s)"
lemma transferCapsToSlots_invs[wp]:
"\<lbrace>\<lambda>s. invs' s \<and> distinct slots
\<and> (\<forall>x \<in> set slots. cte_wp_at' (\<lambda>cte. cteCap cte = NullCap) x s)
\<and> (\<forall>x \<in> set slots. ex_cte_cap_to' x s)
\<and> (\<forall>x \<in> set slots. real_cte_at' x s)
\<and> length slots \<le> 1
\<and> transferCaps_srcs caps s\<rbrace>
transferCapsToSlots ep buffer n caps slots mi
\<lbrace>\<lambda>rv. invs'\<rbrace>"
apply (simp add: invs'_def valid_state'_def)
apply (wp valid_irq_node_lift)
apply fastforce
done
lemma grs_distinct'[wp]:
"\<lbrace>\<top>\<rbrace> getReceiveSlots t buf \<lbrace>\<lambda>rv s. distinct rv\<rbrace>"
apply (cases buf, simp_all add: getReceiveSlots_def
split_def unlessE_def)
apply (wp, simp)
apply (wp | simp only: distinct.simps list.simps empty_iff)+
apply simp
done
lemma tc_corres:
"\<lbrakk> info' = message_info_map info;
list_all2 (\<lambda>x y. cap_relation (fst x) (fst y) \<and> snd y = cte_map (snd x))
caps caps' \<rbrakk>
\<Longrightarrow>
corres ((=) \<circ> message_info_map)
(tcb_at receiver and valid_objs and
pspace_aligned and pspace_distinct and valid_mdb
and valid_list
and (\<lambda>s. case ep of Some x \<Rightarrow> ep_at x s | _ \<Rightarrow> True)
and case_option \<top> in_user_frame recv_buf
and (\<lambda>s. valid_message_info info)
and transfer_caps_srcs caps)
(tcb_at' receiver and valid_objs' and
pspace_aligned' and pspace_distinct' and pspace_canonical' and pspace_in_kernel_mappings'
and no_0_obj' and valid_mdb'
and (\<lambda>s. case ep of Some x \<Rightarrow> ep_at' x s | _ \<Rightarrow> True)
and case_option \<top> valid_ipc_buffer_ptr' recv_buf
and transferCaps_srcs caps'
and (\<lambda>s. length caps' \<le> msgMaxExtraCaps))
(transfer_caps info caps ep receiver recv_buf)
(transferCaps info' caps' ep receiver recv_buf)"
apply (simp add: transfer_caps_def transferCaps_def
getThreadCSpaceRoot)
apply (rule corres_assume_pre)
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ get_recv_slot_corres])
apply (rule_tac x=recv_buf in option_corres)
apply (rule_tac P=\<top> and P'=\<top> in corres_inst)
apply (case_tac info, simp)
apply simp
apply (rule corres_rel_imp, rule tc_loop_corres,
simp_all add: split_def)[1]
apply (case_tac info, simp)
apply (wp hoare_vcg_all_lift get_rs_cte_at static_imp_wp
| simp only: ball_conj_distrib)+
apply (simp add: cte_map_def tcb_cnode_index_def split_def)
apply (clarsimp simp: valid_pspace'_def valid_ipc_buffer_ptr'_def2
split_def
cong: option.case_cong)
apply (drule(1) bspec)
apply (clarsimp simp:cte_wp_at_caps_of_state)
apply (frule(1) Invariants_AI.caps_of_state_valid)
apply (fastforce simp:valid_cap_def)
apply (cases info)
apply (clarsimp simp: msg_max_extra_caps_def valid_message_info_def
max_ipc_words msg_max_length_def
msgMaxExtraCaps_def msgExtraCapBits_def
shiftL_nat valid_pspace'_def)
apply (drule(1) bspec)
apply (clarsimp simp:cte_wp_at_ctes_of)
apply (case_tac cte,clarsimp)
apply (frule(1) ctes_of_valid_cap')
apply (fastforce simp:valid_cap'_def)
done
crunch typ_at'[wp]: transferCaps "\<lambda>s. P (typ_at' T p s)"
lemmas transferCaps_typ_ats[wp] = typ_at_lifts [OF transferCaps_typ_at']
declare maskCapRights_Reply [simp]
lemma isIRQControlCap_mask [simp]:
"isIRQControlCap (maskCapRights R c) = isIRQControlCap c"
apply (case_tac c)
apply (clarsimp simp: isCap_simps maskCapRights_def Let_def)+
apply (rename_tac arch_capability)
apply (case_tac arch_capability)
apply (clarsimp simp: isCap_simps X64_H.maskCapRights_def
maskCapRights_def Let_def)+
done
lemma isIOPortControlCap'_mask [simp]:
"isIOPortControlCap' (maskCapRights R c) = isIOPortControlCap' c"
apply (case_tac c)
apply (clarsimp simp: isCap_simps maskCapRights_def Let_def)+
apply (rename_tac arch_capability)
apply (case_tac arch_capability)
apply (clarsimp simp: isCap_simps X64_H.maskCapRights_def
maskCapRights_def Let_def)+
done
lemma isPageCap_maskCapRights[simp]:
" isArchCap isPageCap (RetypeDecls_H.maskCapRights R c) = isArchCap isPageCap c"
apply (case_tac c; simp add: isCap_simps isArchCap_def maskCapRights_def)
apply (rename_tac arch_capability)
apply (case_tac arch_capability; simp add: isCap_simps X64_H.maskCapRights_def)
done
lemma capReplyMaster_mask[simp]:
"isReplyCap c \<Longrightarrow> capReplyMaster (maskCapRights R c) = capReplyMaster c"
by (clarsimp simp: isCap_simps maskCapRights_def)
lemma is_derived_mask' [simp]:
"is_derived' m p (maskCapRights R c) = is_derived' m p c"
apply (rule ext)
apply (simp add: is_derived'_def badge_derived'_def)
done
lemma updateCapData_ordering:
"\<lbrakk> (x, capBadge cap) \<in> capBadge_ordering P; updateCapData p d cap \<noteq> NullCap \<rbrakk>
\<Longrightarrow> (x, capBadge (updateCapData p d cap)) \<in> capBadge_ordering P"
apply (cases cap, simp_all add: updateCapData_def isCap_simps Let_def
capBadge_def X64_H.updateCapData_def
split: if_split_asm)
apply fastforce+
done
lemma updateCapData_capReplyMaster:
"isReplyCap cap \<Longrightarrow> capReplyMaster (updateCapData p d cap) = capReplyMaster cap"
by (clarsimp simp: isCap_simps updateCapData_def split del: if_split)
lemma updateCapData_is_Reply[simp]:
"(updateCapData p d cap = ReplyCap x y z) = (cap = ReplyCap x y z)"
by (rule ccontr,
clarsimp simp: isCap_simps updateCapData_def Let_def
X64_H.updateCapData_def
split del: if_split
split: if_split_asm)
lemma updateCapDataIRQ:
"updateCapData p d cap \<noteq> NullCap \<Longrightarrow>
isIRQControlCap (updateCapData p d cap) = isIRQControlCap cap"
apply (cases cap, simp_all add: updateCapData_def isCap_simps Let_def
X64_H.updateCapData_def
split: if_split_asm)
done
lemma updateCapDataIOPortC:
"updateCapData p d cap \<noteq> NullCap \<Longrightarrow>
isIOPortControlCap' (updateCapData p d cap) = isIOPortControlCap' cap"
apply (cases cap, simp_all add: updateCapData_def isCap_simps Let_def
X64_H.updateCapData_def
split: if_split_asm)
done
lemma updateCapData_vsCapRef[simp]:
"vsCapRef (updateCapData pr D c) = vsCapRef c"
by (rule ccontr,
clarsimp simp: isCap_simps updateCapData_def Let_def
X64_H.updateCapData_def
vsCapRef_def
split del: if_split
split: if_split_asm)
lemma isPageCap_updateCapData[simp]:
"isArchCap isPageCap (updateCapData pr D c) = isArchCap isPageCap c"
apply (case_tac c; simp add:updateCapData_def isCap_simps isArchCap_def)
apply (rename_tac arch_capability)
apply (case_tac arch_capability; simp add: X64_H.updateCapData_def isCap_simps isArchCap_def)
apply (clarsimp split:capability.splits simp:Let_def)
done
lemma lookup_cap_to'[wp]:
"\<lbrace>\<top>\<rbrace> lookupCap t cref \<lbrace>\<lambda>rv s. \<forall>r\<in>cte_refs' rv (irq_node' s). ex_cte_cap_to' r s\<rbrace>,-"
by (simp add: lookupCap_def lookupCapAndSlot_def | wp)+
lemma grs_cap_to'[wp]:
"\<lbrace>\<top>\<rbrace> getReceiveSlots t buf \<lbrace>\<lambda>rv s. \<forall>x \<in> set rv. ex_cte_cap_to' x s\<rbrace>"
apply (cases buf; simp add: getReceiveSlots_def split_def unlessE_def)
apply (wp, simp)
apply (wp | simp | rule hoare_drop_imps)+
done
lemma grs_length'[wp]:
"\<lbrace>\<lambda>s. 1 \<le> n\<rbrace> getReceiveSlots receiver recv_buf \<lbrace>\<lambda>rv s. length rv \<le> n\<rbrace>"
apply (simp add: getReceiveSlots_def split_def unlessE_def)
apply (rule hoare_pre)
apply (wp | wpc | simp)+
done
lemma transferCaps_invs' [wp]:
"\<lbrace>invs' and transferCaps_srcs caps\<rbrace>
transferCaps mi caps ep receiver recv_buf
\<lbrace>\<lambda>rv. invs'\<rbrace>"
apply (simp add: transferCaps_def Let_def split_def)
apply (wp get_rs_cte_at' hoare_vcg_const_Ball_lift
| wpcw | clarsimp)+
done
lemma get_mrs_inv'[wp]:
"\<lbrace>P\<rbrace> getMRs t buf info \<lbrace>\<lambda>rv. P\<rbrace>"
by (simp add: getMRs_def load_word_offs_def getRegister_def
| wp dmo_inv' loadWord_inv mapM_wp'
asUser_inv det_mapM[where S=UNIV] | wpc)+
lemma copyMRs_typ_at':
"\<lbrace>\<lambda>s. P (typ_at' T p s)\<rbrace> copyMRs s sb r rb n \<lbrace>\<lambda>rv s. P (typ_at' T p s)\<rbrace>"
by (simp add: copyMRs_def | wp mapM_wp [where S=UNIV, simplified] | wpc)+
lemmas copyMRs_typ_at_lifts[wp] = typ_at_lifts [OF copyMRs_typ_at']
lemma copy_mrs_invs'[wp]:
"\<lbrace> invs' and tcb_at' s and tcb_at' r \<rbrace> copyMRs s sb r rb n \<lbrace>\<lambda>rv. invs' \<rbrace>"
including no_pre
apply (simp add: copyMRs_def)
apply (wp dmo_invs' no_irq_mapM no_irq_storeWord|
simp add: split_def)
apply (case_tac sb, simp_all)[1]
apply wp+
apply (case_tac rb, simp_all)[1]
apply (wp mapM_wp dmo_invs' no_irq_mapM no_irq_storeWord no_irq_loadWord)
apply blast
apply (rule hoare_strengthen_post)
apply (rule mapM_wp)
apply (wp | simp | blast)+
done
crunch aligned'[wp]: transferCaps pspace_aligned'
(wp: crunch_wps simp: zipWithM_x_mapM)
crunch distinct'[wp]: transferCaps pspace_distinct'
(wp: crunch_wps simp: zipWithM_x_mapM)
crunch aligned'[wp]: setMRs pspace_aligned'
(wp: crunch_wps simp: crunch_simps)
crunch distinct'[wp]: setMRs pspace_distinct'
(wp: crunch_wps simp: crunch_simps)
crunch aligned'[wp]: copyMRs pspace_aligned'
(wp: crunch_wps simp: crunch_simps)
crunch pspace_canonical'[wp]: copyMRs pspace_canonical'
(wp: crunch_wps simp: crunch_simps)
crunch pspace_in_kernel_mappings'[wp]: copyMRs pspace_in_kernel_mappings'
(wp: crunch_wps simp: crunch_simps)
crunch distinct'[wp]: copyMRs pspace_distinct'
(wp: crunch_wps simp: crunch_simps)
crunch aligned'[wp]: setMessageInfo pspace_aligned'
(wp: crunch_wps simp: crunch_simps)
crunch distinct'[wp]: setMessageInfo pspace_distinct'
(wp: crunch_wps simp: crunch_simps)
crunch valid_objs'[wp]: storeWordUser valid_objs'
crunch valid_pspace'[wp]: storeWordUser valid_pspace'
lemma set_mrs_valid_objs' [wp]:
"\<lbrace>valid_objs'\<rbrace> setMRs t a msgs \<lbrace>\<lambda>rv. valid_objs'\<rbrace>"
apply (simp add: setMRs_def zipWithM_x_mapM split_def)
apply (wp asUser_valid_objs crunch_wps)
done
crunch valid_objs'[wp]: copyMRs valid_objs'
(wp: crunch_wps simp: crunch_simps)
crunch valid_queues'[wp]: asUser "Invariants_H.valid_queues'"
(simp: crunch_simps wp: hoare_drop_imps)
lemma setMRs_invs_bits[wp]:
"\<lbrace>valid_pspace'\<rbrace> setMRs t buf mrs \<lbrace>\<lambda>rv. valid_pspace'\<rbrace>"
"\<lbrace>\<lambda>s. sch_act_wf (ksSchedulerAction s) s\<rbrace>
setMRs t buf mrs \<lbrace>\<lambda>rv s. sch_act_wf (ksSchedulerAction s) s\<rbrace>"
"\<lbrace>\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>
setMRs t buf mrs \<lbrace>\<lambda>rv s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>"
"\<lbrace>Invariants_H.valid_queues\<rbrace> setMRs t buf mrs \<lbrace>\<lambda>rv. Invariants_H.valid_queues\<rbrace>"
"\<lbrace>valid_queues'\<rbrace> setMRs t buf mrs \<lbrace>\<lambda>rv. valid_queues'\<rbrace>"
"\<lbrace>\<lambda>s. P (state_refs_of' s)\<rbrace>
setMRs t buf mrs
\<lbrace>\<lambda>rv s. P (state_refs_of' s)\<rbrace>"
"\<lbrace>if_live_then_nonz_cap'\<rbrace> setMRs t buf mrs \<lbrace>\<lambda>rv. if_live_then_nonz_cap'\<rbrace>"
"\<lbrace>ex_nonz_cap_to' p\<rbrace> setMRs t buf mrs \<lbrace>\<lambda>rv. ex_nonz_cap_to' p\<rbrace>"
"\<lbrace>cur_tcb'\<rbrace> setMRs t buf mrs \<lbrace>\<lambda>rv. cur_tcb'\<rbrace>"
"\<lbrace>if_unsafe_then_cap'\<rbrace> setMRs t buf mrs \<lbrace>\<lambda>rv. if_unsafe_then_cap'\<rbrace>"
by (simp add: setMRs_def zipWithM_x_mapM split_def storeWordUser_def | wp crunch_wps)+
crunch no_0_obj'[wp]: setMRs no_0_obj'
(wp: crunch_wps simp: crunch_simps)
lemma copyMRs_invs_bits[wp]:
"\<lbrace>valid_pspace'\<rbrace> copyMRs s sb r rb n \<lbrace>\<lambda>rv. valid_pspace'\<rbrace>"
"\<lbrace>\<lambda>s. sch_act_wf (ksSchedulerAction s) s\<rbrace> copyMRs s sb r rb n
\<lbrace>\<lambda>rv s. sch_act_wf (ksSchedulerAction s) s\<rbrace>"
"\<lbrace>Invariants_H.valid_queues\<rbrace> copyMRs s sb r rb n \<lbrace>\<lambda>rv. Invariants_H.valid_queues\<rbrace>"
"\<lbrace>valid_queues'\<rbrace> copyMRs s sb r rb n \<lbrace>\<lambda>rv. valid_queues'\<rbrace>"
"\<lbrace>\<lambda>s. P (state_refs_of' s)\<rbrace>
copyMRs s sb r rb n
\<lbrace>\<lambda>rv s. P (state_refs_of' s)\<rbrace>"
"\<lbrace>if_live_then_nonz_cap'\<rbrace> copyMRs s sb r rb n \<lbrace>\<lambda>rv. if_live_then_nonz_cap'\<rbrace>"
"\<lbrace>ex_nonz_cap_to' p\<rbrace> copyMRs s sb r rb n \<lbrace>\<lambda>rv. ex_nonz_cap_to' p\<rbrace>"
"\<lbrace>cur_tcb'\<rbrace> copyMRs s sb r rb n \<lbrace>\<lambda>rv. cur_tcb'\<rbrace>"
"\<lbrace>if_unsafe_then_cap'\<rbrace> copyMRs s sb r rb n \<lbrace>\<lambda>rv. if_unsafe_then_cap'\<rbrace>"
by (simp add: copyMRs_def storeWordUser_def | wp mapM_wp' | wpc)+
crunch no_0_obj'[wp]: copyMRs no_0_obj'
(wp: crunch_wps simp: crunch_simps)
lemma mi_map_length[simp]: "msgLength (message_info_map mi) = mi_length mi"
by (cases mi, simp)
crunch cte_wp_at'[wp]: copyMRs "cte_wp_at' P p"
(wp: crunch_wps)
lemma lookupExtraCaps_srcs[wp]:
"\<lbrace>\<top>\<rbrace> lookupExtraCaps thread buf info \<lbrace>transferCaps_srcs\<rbrace>,-"
apply (simp add: lookupExtraCaps_def lookupCapAndSlot_def
split_def lookupSlotForThread_def
getSlotCap_def)
apply (wp mapME_set[where R=\<top>] getCTE_wp')
apply (rule_tac P=\<top> in hoare_trivE_R)
apply (simp add: cte_wp_at_ctes_of)
apply (wp | simp)+
done
crunch inv[wp]: lookupExtraCaps "P"
(wp: crunch_wps mapME_wp' simp: crunch_simps)
lemma invs_mdb_strengthen':
"invs' s \<longrightarrow> valid_mdb' s" by auto
lemma lookupExtraCaps_length:
"\<lbrace>\<lambda>s. unat (msgExtraCaps mi) \<le> n\<rbrace> lookupExtraCaps thread send_buf mi \<lbrace>\<lambda>rv s. length rv \<le> n\<rbrace>,-"
apply (simp add: lookupExtraCaps_def getExtraCPtrs_def)
apply (rule hoare_pre)
apply (wp mapME_length | wpc)+
apply (clarsimp simp: upto_enum_step_def Suc_unat_diff_1 word_le_sub1)
done
lemma getMessageInfo_msgExtraCaps[wp]:
"\<lbrace>\<top>\<rbrace> getMessageInfo t \<lbrace>\<lambda>rv s. unat (msgExtraCaps rv) \<le> msgMaxExtraCaps\<rbrace>"
apply (simp add: getMessageInfo_def)
apply wp
apply (simp add: messageInfoFromWord_def Let_def msgMaxExtraCaps_def
shiftL_nat)
apply (subst nat_le_Suc_less_imp)
apply (rule unat_less_power)
apply (simp add: word_bits_def msgExtraCapBits_def)
apply (rule and_mask_less'[unfolded mask_2pm1])
apply (simp add: msgExtraCapBits_def)
apply wpsimp+
done
lemma lcs_corres:
"cptr = to_bl cptr' \<Longrightarrow>
corres (lfr \<oplus> (\<lambda>a b. cap_relation (fst a) (fst b) \<and> snd b = cte_map (snd a)))
(valid_objs and pspace_aligned and tcb_at thread)
(valid_objs' and pspace_distinct' and pspace_aligned' and tcb_at' thread)
(lookup_cap_and_slot thread cptr) (lookupCapAndSlot thread cptr')"
unfolding lookup_cap_and_slot_def lookupCapAndSlot_def
apply (simp add: liftE_bindE split_def)
apply (rule corres_guard_imp)
apply (rule_tac r'="\<lambda>rv rv'. rv' = cte_map (fst rv)"
in corres_splitEE)
apply (rule corres_split[OF _ getSlotCap_corres])
apply (rule corres_returnOkTT, simp)
apply simp
apply wp+
apply (rule corres_rel_imp, rule lookup_slot_corres)
apply (simp add: split_def)
apply (wp | simp add: liftE_bindE[symmetric])+
done
lemma lec_corres:
"\<lbrakk> info' = message_info_map info; buffer = buffer'\<rbrakk> \<Longrightarrow>
corres (fr \<oplus> list_all2 (\<lambda>x y. cap_relation (fst x) (fst y) \<and> snd y = cte_map (snd x)))
(valid_objs and pspace_aligned and tcb_at thread and (\<lambda>_. valid_message_info info))
(valid_objs' and pspace_distinct' and pspace_aligned' and tcb_at' thread
and case_option \<top> valid_ipc_buffer_ptr' buffer')
(lookup_extra_caps thread buffer info) (lookupExtraCaps thread buffer' info')"
unfolding lookupExtraCaps_def lookup_extra_caps_def
apply (rule corres_gen_asm)
apply (cases "mi_extra_caps info = 0")
apply (cases info)
apply (simp add: Let_def returnOk_def getExtraCPtrs_def
liftE_bindE upto_enum_step_def mapM_def
sequence_def doMachineOp_return mapME_Nil
split: option.split)
apply (cases info)
apply (rename_tac w1 w2 w3 w4)
apply (simp add: Let_def liftE_bindE)
apply (cases buffer')
apply (simp add: getExtraCPtrs_def mapME_Nil)
apply (rule corres_returnOk)
apply simp
apply (simp add: msgLengthBits_def msgMaxLength_def word_size field_simps
getExtraCPtrs_def upto_enum_step_def upto_enum_word
word_size_def msg_max_length_def liftM_def
Suc_unat_diff_1 word_le_sub1 mapM_map_simp
upt_lhs_sub_map[where x=buffer_cptr_index]
wordSize_def wordBits_def
del: upt.simps)
apply (rule corres_guard_imp)
apply (rule corres_split')
apply (rule_tac S = "\<lambda>x y. x = y \<and> x < unat w2"
in corres_mapM_list_all2
[where Q = "\<lambda>_. valid_objs and pspace_aligned and tcb_at thread" and r = "(=)"
and Q' = "\<lambda>_. valid_objs' and pspace_aligned' and pspace_distinct' and tcb_at' thread
and case_option \<top> valid_ipc_buffer_ptr' buffer'" and r'="(=)" ])
apply simp
apply simp
apply simp
apply (rule corres_guard_imp)
apply (rule load_word_offs_corres')
apply (clarsimp simp: buffer_cptr_index_def msg_max_length_def
max_ipc_words valid_message_info_def
msg_max_extra_caps_def word_le_nat_alt)
apply (simp add: buffer_cptr_index_def msg_max_length_def)
apply simp
apply simp
apply (simp add: load_word_offs_word_def)
apply (wp | simp)+
apply (subst list_all2_same)
apply (clarsimp simp: max_ipc_words field_simps)
apply (simp add: mapME_def, fold mapME_def)[1]
apply (rule corres_mapME [where S = Id and r'="(\<lambda>x y. cap_relation (fst x) (fst y) \<and> snd y = cte_map (snd x))"])
apply simp
apply simp
apply simp
apply (rule corres_cap_fault [OF lcs_corres])
apply simp
apply simp
apply (wp | simp)+
apply (simp add: set_zip_same Int_lower1)
apply (wp mapM_wp [OF _ subset_refl] | simp)+
done
crunch ctes_of[wp]: copyMRs "\<lambda>s. P (ctes_of s)"
(ignore: threadSet
wp: threadSet_ctes_of crunch_wps)
lemma copyMRs_valid_mdb[wp]:
"\<lbrace>valid_mdb'\<rbrace> copyMRs t buf t' buf' n \<lbrace>\<lambda>rv. valid_mdb'\<rbrace>"
by (simp add: valid_mdb'_def copyMRs_ctes_of)
lemma do_normal_transfer_corres:
"corres dc
(tcb_at sender and tcb_at receiver and (pspace_aligned:: det_state \<Rightarrow> bool)
and valid_objs and cur_tcb and valid_mdb and valid_list and pspace_distinct
and (\<lambda>s. case ep of Some x \<Rightarrow> ep_at x s | _ \<Rightarrow> True)
and case_option \<top> in_user_frame send_buf
and case_option \<top> in_user_frame recv_buf)
(tcb_at' sender and tcb_at' receiver and valid_objs'
and pspace_aligned' and pspace_distinct' and pspace_canonical' and cur_tcb'
and valid_mdb' and no_0_obj' and pspace_in_kernel_mappings'
and (\<lambda>s. case ep of Some x \<Rightarrow> ep_at' x s | _ \<Rightarrow> True)
and case_option \<top> valid_ipc_buffer_ptr' send_buf
and case_option \<top> valid_ipc_buffer_ptr' recv_buf)
(do_normal_transfer sender send_buf ep badge can_grant receiver recv_buf)
(doNormalTransfer sender send_buf ep badge can_grant receiver recv_buf)"
apply (simp add: do_normal_transfer_def doNormalTransfer_def)
apply (rule corres_guard_imp)
apply (rule corres_split_mapr [OF _ get_mi_corres])
apply (rule_tac F="valid_message_info mi" in corres_gen_asm)
apply (rule_tac r'="list_all2 (\<lambda>x y. cap_relation (fst x) (fst y) \<and> snd y = cte_map (snd x))"
in corres_split)
prefer 2
apply (rule corres_if[OF refl])
apply (rule corres_split_catch)
apply (rule corres_trivial, simp)
apply (rule lec_corres, simp+)
apply wp+
apply (rule corres_trivial, simp)
apply simp
apply (rule corres_split_eqr [OF _ copy_mrs_corres])
apply (rule corres_split [OF _ tc_corres])
apply (rename_tac mi' mi'')
apply (rule_tac F="mi_label mi' = mi_label mi"
in corres_gen_asm)
apply (rule corres_split_nor [OF _ set_mi_corres])
apply (simp add: badge_register_def badgeRegister_def)
apply (fold dc_def)
apply (rule user_setreg_corres)
apply (case_tac mi', clarsimp)
apply wp
apply simp+
apply ((wp valid_case_option_post_wp hoare_vcg_const_Ball_lift
hoare_case_option_wp
hoare_valid_ipc_buffer_ptr_typ_at' copyMRs_typ_at'
hoare_vcg_const_Ball_lift lookupExtraCaps_length
| simp add: if_apply_def2)+)
apply (wp static_imp_wp | strengthen valid_msg_length_strengthen)+
apply clarsimp
apply auto
done
lemma corres_liftE_lift:
"corres r1 P P' m m' \<Longrightarrow>
corres (f1 \<oplus> r1) P P' (liftE m) (withoutFailure m')"
by simp
lemmas corres_ipc_thread_helper =
corres_split_eqrE [OF _ corres_liftE_lift [OF gct_corres]]
lemmas corres_ipc_info_helper =
corres_split_maprE [where f = message_info_map, OF _
corres_liftE_lift [OF get_mi_corres]]
crunch typ_at'[wp]: doNormalTransfer "\<lambda>s. P (typ_at' T p s)"
lemmas doNormal_lifts[wp] = typ_at_lifts [OF doNormalTransfer_typ_at']
lemma doNormal_invs'[wp]:
"\<lbrace>tcb_at' sender and tcb_at' receiver and invs'\<rbrace>
doNormalTransfer sender send_buf ep badge
can_grant receiver recv_buf \<lbrace>\<lambda>r. invs'\<rbrace>"
apply (simp add: doNormalTransfer_def)
apply (wp hoare_vcg_const_Ball_lift | simp)+
done
crunch aligned'[wp]: doNormalTransfer pspace_aligned'
(wp: crunch_wps)
crunch distinct'[wp]: doNormalTransfer pspace_distinct'
(wp: crunch_wps)
lemma transferCaps_urz[wp]:
"\<lbrace>untyped_ranges_zero' and valid_pspace'
and (\<lambda>s. (\<forall>x\<in>set caps. cte_wp_at' (\<lambda>cte. fst x \<noteq> capability.NullCap \<longrightarrow> cteCap cte = fst x) (snd x) s))\<rbrace>
transferCaps tag caps ep receiver recv_buf
\<lbrace>\<lambda>r. untyped_ranges_zero'\<rbrace>"
apply (simp add: transferCaps_def)
apply (rule hoare_pre)
apply (wp hoare_vcg_all_lift hoare_vcg_const_imp_lift
| wpc
| simp add: ball_conj_distrib)+
apply clarsimp
done
crunch gsUntypedZeroRanges[wp]: doNormalTransfer "\<lambda>s. P (gsUntypedZeroRanges s)"
(wp: crunch_wps transferCapsToSlots_pres1 ignore: constOnFailure)
lemmas asUser_urz = untyped_ranges_zero_lift[OF asUser_gsUntypedZeroRanges]
crunch urz[wp]: doNormalTransfer "untyped_ranges_zero'"
(ignore: asUser wp: crunch_wps asUser_urz hoare_vcg_const_Ball_lift)
lemma msgFromLookupFailure_map[simp]:
"msgFromLookupFailure (lookup_failure_map lf)
= msg_from_lookup_failure lf"
by (cases lf, simp_all add: lookup_failure_map_def msgFromLookupFailure_def)
lemma getRestartPCs_corres:
"corres (=) (tcb_at t) (tcb_at' t)
(as_user t getRestartPC) (asUser t getRestartPC)"
apply (rule corres_as_user')
apply (rule corres_Id, simp, simp)
apply (rule no_fail_getRestartPC)
done
lemma user_mapM_getRegister_corres:
"corres (=) (tcb_at t) (tcb_at' t)
(as_user t (mapM getRegister regs))
(asUser t (mapM getRegister regs))"
apply (rule corres_as_user')
apply (rule corres_Id [OF refl refl])
apply (rule no_fail_mapM)
apply (simp add: getRegister_def)
done
lemma make_arch_fault_msg_corres:
"corres (=) (tcb_at t) (tcb_at' t)
(make_arch_fault_msg f t)
(makeArchFaultMessage (arch_fault_map f) t)"
apply (cases f, clarsimp simp: makeArchFaultMessage_def split: arch_fault.split)
apply (rule corres_guard_imp)
apply (rule corres_split_eqr[OF _ getRestartPCs_corres])
apply (rule corres_trivial, simp add: arch_fault_map_def)
apply (wp+, auto)
done
lemma mk_ft_msg_corres:
"corres (=) (tcb_at t) (tcb_at' t)
(make_fault_msg ft t)
(makeFaultMessage (fault_map ft) t)"
apply (cases ft, simp_all add: makeFaultMessage_def split del: if_split)
apply (rule corres_guard_imp)
apply (rule corres_split_eqr [OF _ getRestartPCs_corres])
apply (rule corres_trivial, simp add: fromEnum_def enum_bool)
apply (wp | simp)+
apply (simp add: X64_H.syscallMessage_def)
apply (rule corres_guard_imp)
apply (rule corres_split_eqr [OF _ user_mapM_getRegister_corres])
apply (rule corres_trivial, simp)
apply (wp | simp)+
apply (simp add: X64_H.exceptionMessage_def)
apply (rule corres_guard_imp)
apply (rule corres_split_eqr [OF _ user_mapM_getRegister_corres])
apply (rule corres_trivial, simp)
apply (wp | simp)+
apply (rule make_arch_fault_msg_corres)
done
lemma makeFaultMessage_inv[wp]:
"\<lbrace>P\<rbrace> makeFaultMessage ft t \<lbrace>\<lambda>rv. P\<rbrace>"
apply (cases ft, simp_all add: makeFaultMessage_def)
apply (wp asUser_inv mapM_wp' det_mapM[where S=UNIV]
det_getRestartPC getRestartPC_inv
| clarsimp simp: getRegister_def makeArchFaultMessage_def
split: arch_fault.split)+
done
lemmas threadget_fault_corres =
threadget_corres [where r = fault_rel_optionation
and f = tcb_fault and f' = tcbFault,
simplified tcb_relation_def, simplified]
lemma do_fault_transfer_corres:
"corres dc
(obj_at (\<lambda>ko. \<exists>tcb ft. ko = TCB tcb \<and> tcb_fault tcb = Some ft) sender
and tcb_at receiver and case_option \<top> in_user_frame recv_buf)
(tcb_at' sender and tcb_at' receiver and
case_option \<top> valid_ipc_buffer_ptr' recv_buf)
(do_fault_transfer badge sender receiver recv_buf)
(doFaultTransfer badge sender receiver recv_buf)"
apply (clarsimp simp: do_fault_transfer_def doFaultTransfer_def split_def
X64_H.badgeRegister_def badge_register_def)
apply (rule_tac Q="\<lambda>fault. K (\<exists>f. fault = Some f) and
tcb_at sender and tcb_at receiver and
case_option \<top> in_user_frame recv_buf"
and Q'="\<lambda>fault'. tcb_at' sender and tcb_at' receiver and
case_option \<top> valid_ipc_buffer_ptr' recv_buf"
in corres_split')
apply (rule corres_guard_imp)
apply (rule threadget_fault_corres)
apply (clarsimp simp: obj_at_def is_tcb)+
apply (rule corres_assume_pre)
apply (fold assert_opt_def | unfold haskell_fail_def)+
apply (rule corres_assert_opt_assume)
apply (clarsimp split: option.splits
simp: fault_rel_optionation_def assert_opt_def
map_option_case)
defer
defer
apply (clarsimp simp: fault_rel_optionation_def)
apply (wp thread_get_wp)
apply (clarsimp simp: obj_at_def is_tcb)
apply wp
apply (rule corres_guard_imp)
apply (rule corres_split_eqr [OF _ mk_ft_msg_corres])
apply (rule corres_split_eqr [OF _ set_mrs_corres [OF refl]])
apply (rule corres_split_nor [OF _ set_mi_corres])
apply (rule user_setreg_corres)
apply simp
apply (wp | simp)+
apply (rule corres_guard_imp)
apply (rule corres_split_eqr [OF _ mk_ft_msg_corres])
apply (rule corres_split_eqr [OF _ set_mrs_corres [OF refl]])
apply (rule corres_split_nor [OF _ set_mi_corres])
apply (rule user_setreg_corres)
apply simp
apply (wp | simp)+
done
lemma doFaultTransfer_invs[wp]:
"\<lbrace>invs' and tcb_at' receiver\<rbrace>
doFaultTransfer badge sender receiver recv_buf
\<lbrace>\<lambda>rv. invs'\<rbrace>"
by (simp add: doFaultTransfer_def split_def | wp
| clarsimp split: option.split)+
lemma lookupIPCBuffer_valid_ipc_buffer [wp]:
"\<lbrace>valid_objs'\<rbrace> VSpace_H.lookupIPCBuffer b s \<lbrace>case_option \<top> valid_ipc_buffer_ptr'\<rbrace>"
unfolding lookupIPCBuffer_def X64_H.lookupIPCBuffer_def
apply (simp add: Let_def getSlotCap_def getThreadBufferSlot_def
locateSlot_conv threadGet_def comp_def)
apply (wp getCTE_wp getObject_tcb_wp | wpc)+
apply (clarsimp simp del: imp_disjL)
apply (drule obj_at_ko_at')
apply (clarsimp simp del: imp_disjL)
apply (rule_tac x = ko in exI)
apply (frule ko_at_cte_ipcbuffer)
apply (clarsimp simp: cte_wp_at_ctes_of simp del: imp_disjL)
apply (clarsimp simp: valid_ipc_buffer_ptr'_def)
apply (frule (1) ko_at_valid_objs')
apply (clarsimp simp: projectKO_opts_defs split: kernel_object.split_asm)
apply (clarsimp simp add: valid_obj'_def valid_tcb'_def
isCap_simps cte_level_bits_def field_simps)
apply (drule bspec [OF _ ranI [where a = "0x80"]])
apply simp
apply (clarsimp simp add: valid_cap'_def)
apply (rule conjI)
apply (rule aligned_add_aligned)
apply (clarsimp simp add: capAligned_def)
apply assumption
apply (erule is_aligned_andI1)
apply (case_tac xd, simp_all add: msg_align_bits bit_simps)[1]
apply (clarsimp simp: capAligned_def)
apply (drule_tac x =
"(tcbIPCBuffer ko && mask (pageBitsForSize xd)) >> pageBits" in spec)
apply (subst(asm) mult.commute mult.left_commute, subst(asm) shiftl_t2n[symmetric])
apply (simp add: shiftr_shiftl1 )
apply (subst (asm) mask_out_add_aligned)
apply (erule is_aligned_weaken [OF _ pbfs_atleast_pageBits])
apply (erule mp)
apply (rule shiftr_less_t2n)
apply (clarsimp simp: pbfs_atleast_pageBits)
apply (rule and_mask_less')
apply (simp add: word_bits_conv)
done
lemma dit_corres:
"corres dc
(tcb_at s and tcb_at r and valid_objs and pspace_aligned
and valid_list
and pspace_distinct and valid_mdb and cur_tcb
and (\<lambda>s. case ep of Some x \<Rightarrow> ep_at x s | _ \<Rightarrow> True))
(tcb_at' s and tcb_at' r and valid_pspace' and cur_tcb'
and (\<lambda>s. case ep of Some x \<Rightarrow> ep_at' x s | _ \<Rightarrow> True))
(do_ipc_transfer s ep bg grt r)
(doIPCTransfer s ep bg grt r)"
apply (simp add: do_ipc_transfer_def doIPCTransfer_def)
apply (rule_tac Q="%receiveBuffer sa. tcb_at s sa \<and> valid_objs sa \<and>
pspace_aligned sa \<and> tcb_at r sa \<and>
cur_tcb sa \<and> valid_mdb sa \<and> valid_list sa \<and> pspace_distinct sa \<and>
(case ep of None \<Rightarrow> True | Some x \<Rightarrow> ep_at x sa) \<and>
case_option (\<lambda>_. True) in_user_frame receiveBuffer sa \<and>
obj_at (\<lambda>ko. \<exists>tcb. ko = TCB tcb
\<comment> \<open>\<exists>ft. tcb_fault tcb = Some ft\<close>) s sa"
in corres_split')
apply (rule corres_guard_imp)
apply (rule lipcb_corres')
apply auto[2]
apply (rule corres_split' [OF _ _ thread_get_sp threadGet_inv])
apply (rule corres_guard_imp)
apply (rule threadget_fault_corres)
apply simp
defer
apply (rule corres_guard_imp)
apply (subst case_option_If)+
apply (rule corres_if2)
apply (simp add: fault_rel_optionation_def)
apply (rule corres_split_eqr [OF _ lipcb_corres'])
apply (simp add: dc_def[symmetric])
apply (rule do_normal_transfer_corres)
apply (wp | simp add: valid_pspace'_def)+
apply (simp add: dc_def[symmetric])
apply (rule do_fault_transfer_corres)
apply (clarsimp simp: obj_at_def)
apply (erule ignore_if)
apply (wp|simp add: obj_at_def is_tcb valid_pspace'_def)+
done
crunch ifunsafe[wp]: doIPCTransfer "if_unsafe_then_cap'"
(wp: crunch_wps hoare_vcg_const_Ball_lift get_rs_cte_at' ignore: transferCapsToSlots
simp: zipWithM_x_mapM ball_conj_distrib )
crunch iflive[wp]: doIPCTransfer "if_live_then_nonz_cap'"
(wp: crunch_wps hoare_vcg_const_Ball_lift get_rs_cte_at' ignore: transferCapsToSlots
simp: zipWithM_x_mapM ball_conj_distrib )
lemma valid_pspace_valid_objs'[elim!]:
"valid_pspace' s \<Longrightarrow> valid_objs' s"
by (simp add: valid_pspace'_def)
crunch vp[wp]: doIPCTransfer "valid_pspace'"
(wp: crunch_wps hoare_vcg_const_Ball_lift get_rs_cte_at' wp: transferCapsToSlots_vp simp:ball_conj_distrib )
crunch sch_act_wf[wp]: doIPCTransfer "\<lambda>s. sch_act_wf (ksSchedulerAction s) s"
(wp: crunch_wps get_rs_cte_at' ignore: transferCapsToSlots simp: zipWithM_x_mapM)
crunch vq[wp]: doIPCTransfer "Invariants_H.valid_queues"
(wp: crunch_wps get_rs_cte_at' ignore: transferCapsToSlots simp: zipWithM_x_mapM)
crunch vq'[wp]: doIPCTransfer "valid_queues'"
(wp: crunch_wps get_rs_cte_at' ignore: transferCapsToSlots simp: zipWithM_x_mapM)
crunch state_refs_of[wp]: doIPCTransfer "\<lambda>s. P (state_refs_of' s)"
(wp: crunch_wps get_rs_cte_at' ignore: transferCapsToSlots simp: zipWithM_x_mapM)
crunch ct[wp]: doIPCTransfer "cur_tcb'"
(wp: crunch_wps get_rs_cte_at' ignore: transferCapsToSlots simp: zipWithM_x_mapM)
crunch idle'[wp]: doIPCTransfer "valid_idle'"
(wp: crunch_wps get_rs_cte_at' ignore: transferCapsToSlots simp: zipWithM_x_mapM)
crunch typ_at'[wp]: doIPCTransfer "\<lambda>s. P (typ_at' T p s)"
(wp: crunch_wps simp: zipWithM_x_mapM)
lemmas dit'_typ_ats[wp] = typ_at_lifts [OF doIPCTransfer_typ_at']
crunch irq_node'[wp]: doIPCTransfer "\<lambda>s. P (irq_node' s)"
(wp: crunch_wps simp: crunch_simps)
lemmas dit_irq_node'[wp]
= valid_irq_node_lift [OF doIPCTransfer_irq_node' doIPCTransfer_typ_at']
crunch valid_arch_state'[wp]: doIPCTransfer "valid_arch_state'"
(wp: crunch_wps simp: crunch_simps)
(* Levity: added (20090126 19:32:26) *)
declare asUser_global_refs' [wp]
lemma lec_valid_cap' [wp]:
"\<lbrace>valid_objs'\<rbrace> lookupExtraCaps thread xa mi \<lbrace>\<lambda>rv s. (\<forall>x\<in>set rv. s \<turnstile>' fst x)\<rbrace>, -"
apply (rule hoare_pre, rule hoare_post_imp_R)
apply (rule hoare_vcg_conj_lift_R[where R=valid_objs' and S="\<lambda>_. valid_objs'"])
apply (rule lookupExtraCaps_srcs)
apply wp
apply (clarsimp simp: cte_wp_at_ctes_of)
apply (fastforce elim: ctes_of_valid')
apply simp
done
crunch objs'[wp]: doIPCTransfer "valid_objs'"
( wp: crunch_wps hoare_vcg_const_Ball_lift
transferCapsToSlots_valid_objs
simp: zipWithM_x_mapM ball_conj_distrib )
crunch global_refs'[wp]: doIPCTransfer "valid_global_refs'"
(wp: crunch_wps hoare_vcg_const_Ball_lift threadSet_global_refsT
transferCapsToSlots_valid_globals
simp: zipWithM_x_mapM ball_conj_distrib)
declare asUser_irq_handlers' [wp]
crunch irq_handlers'[wp]: doIPCTransfer "valid_irq_handlers'"
(wp: crunch_wps hoare_vcg_const_Ball_lift threadSet_irq_handlers'
transferCapsToSlots_irq_handlers
simp: zipWithM_x_mapM ball_conj_distrib )
crunch ioports'[wp]: doIPCTransfer "valid_ioports'"
(wp: crunch_wps hoare_vcg_const_Ball_lift
simp: zipWithM_x_mapM ball_conj_distrib)
crunch irq_states'[wp]: doIPCTransfer "valid_irq_states'"
(wp: crunch_wps no_irq no_irq_mapM no_irq_storeWord no_irq_loadWord
no_irq_case_option simp: crunch_simps zipWithM_x_mapM)
crunch irqs_masked'[wp]: doIPCTransfer "irqs_masked'"
(wp: crunch_wps simp: crunch_simps rule: irqs_masked_lift)
lemma doIPCTransfer_invs[wp]:
"\<lbrace>invs' and tcb_at' s and tcb_at' r\<rbrace>
doIPCTransfer s ep bg grt r
\<lbrace>\<lambda>rv. invs'\<rbrace>"
apply (simp add: doIPCTransfer_def)
apply (wpsimp wp: hoare_drop_imp)
done
crunch nosch[wp]: doIPCTransfer "\<lambda>s. P (ksSchedulerAction s)"
(wp: hoare_drop_imps hoare_vcg_split_case_option mapM_wp'
simp: split_def zipWithM_x_mapM)
lemma sanitise_register_corres:
"foldl (\<lambda>s (a, b). UserContext (fpu_state s) ((user_regs s)(a := sanitise_register x a b))) s
(zip msg_template msg) =
foldl (\<lambda>s (a, b). UserContext (fpu_state s) ((user_regs s)(a := sanitiseRegister y a b))) s
(zip msg_template msg)"
apply (rule foldl_cong)
apply simp
apply simp
apply (clarsimp)
apply (rule arg_cong)
apply (clarsimp simp: sanitise_register_def sanitiseRegister_def)
by (auto simp: sanitise_or_flags_def sanitise_and_flags_def user_vtop_def mask_def
sanitiseOrFlags_def sanitiseAndFlags_def)
lemma handle_fault_reply_registers_corres:
"corres (=) (tcb_at t) (tcb_at' t)
(do t' \<leftarrow> arch_get_sanitise_register_info t;
y \<leftarrow> as_user t
(zipWithM_x
(\<lambda>r v. setRegister r
(sanitise_register t' r v))
msg_template msg);
return (label = 0)
od)
(do t' \<leftarrow> getSanitiseRegisterInfo t;
y \<leftarrow> asUser t
(zipWithM_x
(\<lambda>r v. setRegister r (sanitiseRegister t' r v))
msg_template msg);
return (label = 0)
od)"
apply (rule corres_guard_imp)
apply (clarsimp simp: arch_get_sanitise_register_info_def getSanitiseRegisterInfo_def)
apply (rule corres_split)
apply (rule corres_trivial, simp)
apply (rule corres_as_user')
apply(simp add: setRegister_def syscallMessage_def)
apply(subst zipWithM_x_modify)+
apply(rule corres_modify')
apply (clarsimp simp: sanitise_register_corres|wp)+
done
lemma handle_fault_reply_corres:
"ft' = fault_map ft \<Longrightarrow>
corres (=) (tcb_at t) (tcb_at' t)
(handle_fault_reply ft t label msg)
(handleFaultReply ft' t label msg)"
apply (cases ft)
apply(simp_all add: handleFaultReply_def
handle_arch_fault_reply_def handleArchFaultReply_def
syscallMessage_def exceptionMessage_def
split: arch_fault.split)
by (rule handle_fault_reply_registers_corres)+
crunch typ_at'[wp]: handleFaultReply "\<lambda>s. P (typ_at' T p s)"
lemmas hfr_typ_ats[wp] = typ_at_lifts [OF handleFaultReply_typ_at']
crunch ct'[wp]: handleFaultReply "\<lambda>s. P (ksCurThread s)"
lemma doIPCTransfer_sch_act_simple [wp]:
"\<lbrace>sch_act_simple\<rbrace> doIPCTransfer sender endpoint badge grant receiver \<lbrace>\<lambda>_. sch_act_simple\<rbrace>"
by (simp add: sch_act_simple_def, wp)
lemma possibleSwitchTo_invs'[wp]:
"\<lbrace>invs' and st_tcb_at' runnable' t
and (\<lambda>s. ksSchedulerAction s = ResumeCurrentThread \<longrightarrow> ksCurThread s \<noteq> t)\<rbrace>
possibleSwitchTo t \<lbrace>\<lambda>_. invs'\<rbrace>"
apply (simp add: possibleSwitchTo_def curDomain_def)
apply (wp tcbSchedEnqueue_invs' ssa_invs')
apply (rule hoare_post_imp[OF _ rescheduleRequired_sa_cnt])
apply (wpsimp wp: ssa_invs' threadGet_wp)+
apply (clarsimp dest!: obj_at_ko_at' simp: tcb_in_cur_domain'_def obj_at'_def)
done
crunch cur' [wp]: isFinalCapability "\<lambda>s. P (cur_tcb' s)"
(simp: crunch_simps unless_when
wp: crunch_wps getObject_inv loadObject_default_inv)
crunch ct' [wp]: deleteCallerCap "\<lambda>s. P (ksCurThread s)"
(simp: crunch_simps unless_when
wp: crunch_wps getObject_inv loadObject_default_inv)
lemma getThreadCallerSlot_inv:
"\<lbrace>P\<rbrace> getThreadCallerSlot t \<lbrace>\<lambda>_. P\<rbrace>"
by (simp add: getThreadCallerSlot_def, wp)
lemma deleteCallerCap_ct_not_ksQ:
"\<lbrace>invs' and ct_in_state' simple' and sch_act_sane
and (\<lambda>s. ksCurThread s \<notin> set (ksReadyQueues s p))\<rbrace>
deleteCallerCap t
\<lbrace>\<lambda>rv s. ksCurThread s \<notin> set (ksReadyQueues s p)\<rbrace>"
apply (simp add: deleteCallerCap_def getSlotCap_def getThreadCallerSlot_def locateSlot_conv)
apply (wp getThreadCallerSlot_inv cteDeleteOne_ct_not_ksQ getCTE_wp)
apply (clarsimp simp: cte_wp_at_ctes_of)
done
crunch tcb_at'[wp]: unbindNotification "tcb_at' x"
lemma finaliseCapTrue_standin_tcb_at' [wp]:
"\<lbrace>tcb_at' x\<rbrace> finaliseCapTrue_standin cap v2 \<lbrace>\<lambda>_. tcb_at' x\<rbrace>"
apply (simp add: finaliseCapTrue_standin_def Let_def)
apply (safe)
apply (wp getObject_ntfn_inv
| wpc
| simp)+
done
lemma finaliseCapTrue_standin_cur':
"\<lbrace>\<lambda>s. cur_tcb' s\<rbrace> finaliseCapTrue_standin cap v2 \<lbrace>\<lambda>_ s'. cur_tcb' s'\<rbrace>"
apply (simp add: cur_tcb'_def)
apply (rule hoare_lift_Pf2 [OF _ finaliseCapTrue_standin_ct'])
apply (wp)
done
lemma cteDeleteOne_cur' [wp]:
"\<lbrace>\<lambda>s. cur_tcb' s\<rbrace> cteDeleteOne slot \<lbrace>\<lambda>_ s'. cur_tcb' s'\<rbrace>"
apply (simp add: cteDeleteOne_def unless_def when_def)
apply (wp hoare_drop_imps finaliseCapTrue_standin_cur' isFinalCapability_cur'
| simp add: split_def | wp (once) cur_tcb_lift)+
done
lemma handleFaultReply_cur' [wp]:
"\<lbrace>\<lambda>s. cur_tcb' s\<rbrace> handleFaultReply x0 thread label msg \<lbrace>\<lambda>_ s'. cur_tcb' s'\<rbrace>"
apply (clarsimp simp add: cur_tcb'_def)
apply (rule hoare_lift_Pf2 [OF _ handleFaultReply_ct'])
apply (wp)
done
lemma capClass_Reply:
"capClass cap = ReplyClass tcb \<Longrightarrow> isReplyCap cap \<and> capTCBPtr cap = tcb"
apply (cases cap, simp_all add: isCap_simps)
apply (rename_tac arch_capability)
apply (case_tac arch_capability, simp_all)
done
lemma reply_cap_end_mdb_chain:
"\<lbrakk> cte_wp_at (is_reply_cap_to t) slot s; invs s;
invs' s';
(s, s') \<in> state_relation; ctes_of s' (cte_map slot) = Some cte \<rbrakk>
\<Longrightarrow> (mdbPrev (cteMDBNode cte) \<noteq> nullPointer
\<and> mdbNext (cteMDBNode cte) = nullPointer)
\<and> cte_wp_at' (\<lambda>cte. isReplyCap (cteCap cte) \<and> capReplyMaster (cteCap cte))
(mdbPrev (cteMDBNode cte)) s'"
apply (clarsimp simp only: cte_wp_at_reply_cap_to_ex_rights)
apply (frule(1) pspace_relation_ctes_ofI[OF state_relation_pspace_relation],
clarsimp+)
apply (subgoal_tac "\<exists>slot' rights'. caps_of_state s slot' = Some (cap.ReplyCap t True rights')
\<and> descendants_of slot' (cdt s) = {slot}")
apply (elim state_relationE exE)
apply (clarsimp simp: cdt_relation_def
simp del: split_paired_All)
apply (drule spec, drule(1) mp[OF _ caps_of_state_cte_at])
apply (frule(1) pspace_relation_cte_wp_at[OF _ caps_of_state_cteD],
clarsimp+)
apply (clarsimp simp: descendants_of'_def cte_wp_at_ctes_of)
apply (frule_tac f="\<lambda>S. cte_map slot \<in> S" in arg_cong, simp(no_asm_use))
apply (frule invs_mdb'[unfolded valid_mdb'_def])
apply (rule context_conjI)
apply (clarsimp simp: nullPointer_def valid_mdb_ctes_def)
apply (erule(4) subtree_prev_0)
apply (rule conjI)
apply (rule ccontr)
apply (frule valid_mdb_no_loops, simp add: no_loops_def)
apply (drule_tac x="cte_map slot" in spec)
apply (erule notE, rule r_into_trancl, rule ccontr)
apply (clarsimp simp: mdb_next_unfold valid_mdb_ctes_def nullPointer_def)
apply (rule valid_dlistEn, assumption+)
apply (subgoal_tac "ctes_of s' \<turnstile> cte_map slot \<leadsto> mdbNext (cteMDBNode cte)")
apply (frule(3) class_linksD)
apply (clarsimp simp: isCap_simps dest!: capClass_Reply[OF sym])
apply (drule_tac f="\<lambda>S. mdbNext (cteMDBNode cte) \<in> S" in arg_cong)
apply (simp, erule notE, rule subtree.trans_parent, assumption+)
apply (case_tac ctea, case_tac cte')
apply (clarsimp simp add: parentOf_def isMDBParentOf_CTE)
apply (simp add: sameRegionAs_def2 isCap_simps)
apply (erule subtree.cases)
apply (clarsimp simp: parentOf_def isMDBParentOf_CTE)
apply (clarsimp simp: parentOf_def isMDBParentOf_CTE)
apply (simp add: mdb_next_unfold)
apply (erule subtree.cases)
apply (clarsimp simp: valid_mdb_ctes_def)
apply (erule_tac cte=ctea in valid_dlistEn, assumption)
apply (simp add: mdb_next_unfold)
apply (clarsimp simp: mdb_next_unfold isCap_simps)
apply (drule_tac f="\<lambda>S. c' \<in> S" in arg_cong)
apply (clarsimp simp: no_loops_direct_simp valid_mdb_no_loops)
apply (frule invs_mdb)
apply (drule invs_valid_reply_caps)
apply (clarsimp simp: valid_mdb_def reply_mdb_def
valid_reply_caps_def reply_caps_mdb_def
cte_wp_at_caps_of_state
simp del: split_paired_All)
apply (erule_tac x=slot in allE, erule_tac x=t in allE, erule impE, fast)
apply (elim exEI)
apply clarsimp
apply (subgoal_tac "P" for P, rule sym, rule equalityI, assumption)
apply clarsimp
apply (erule(4) unique_reply_capsD)
apply (simp add: descendants_of_def)
apply (rule r_into_trancl)
apply (simp add: cdt_parent_rel_def is_cdt_parent_def)
done
lemma unbindNotification_valid_objs'_strengthen:
"valid_tcb' tcb s \<longrightarrow> valid_tcb' (tcbBoundNotification_update Map.empty tcb) s"
"valid_ntfn' ntfn s \<longrightarrow> valid_ntfn' (ntfnBoundTCB_update Map.empty ntfn) s"
by (simp_all add: valid_tcb'_def valid_ntfn'_def valid_bound_tcb'_def valid_tcb_state'_def tcb_cte_cases_def split: ntfn.splits)
crunch valid_objs'[wp]: cteDeleteOne "valid_objs'"
(simp: crunch_simps unless_def
wp: crunch_wps getObject_inv loadObject_default_inv)
crunch nosch[wp]: handleFaultReply "\<lambda>s. P (ksSchedulerAction s)"
lemma emptySlot_weak_sch_act[wp]:
"\<lbrace>\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>
emptySlot slot irq
\<lbrace>\<lambda>_ s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>"
by (wp weak_sch_act_wf_lift tcb_in_cur_domain'_lift)
lemma cancelAllIPC_weak_sch_act_wf[wp]:
"\<lbrace>\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>
cancelAllIPC epptr
\<lbrace>\<lambda>_ s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>"
apply (simp add: cancelAllIPC_def)
apply (wp rescheduleRequired_weak_sch_act_wf hoare_drop_imp | wpc | simp)+
done
lemma cancelAllSignals_weak_sch_act_wf[wp]:
"\<lbrace>\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>
cancelAllSignals ntfnptr
\<lbrace>\<lambda>_ s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>"
apply (simp add: cancelAllSignals_def)
apply (wp rescheduleRequired_weak_sch_act_wf hoare_drop_imp | wpc | simp)+
done
crunch weak_sch_act_wf[wp]: finaliseCapTrue_standin "\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s"
(ignore: setThreadState
simp: crunch_simps
wp: crunch_wps getObject_inv loadObject_default_inv)
lemma cteDeleteOne_weak_sch_act[wp]:
"\<lbrace>\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>
cteDeleteOne sl
\<lbrace>\<lambda>_ s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>"
apply (simp add: cteDeleteOne_def unless_def)
apply (wp hoare_drop_imps finaliseCapTrue_standin_cur' isFinalCapability_cur'
| simp add: split_def)+
done
crunch weak_sch_act_wf[wp]: emptySlot "\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s"
crunch pred_tcb_at'[wp]: handleFaultReply "pred_tcb_at' proj P t"
crunch valid_queues[wp]: handleFaultReply "Invariants_H.valid_queues"
crunch valid_queues'[wp]: handleFaultReply "valid_queues'"
crunch tcb_in_cur_domain'[wp]: handleFaultReply "tcb_in_cur_domain' t"
crunch sch_act_wf[wp]: unbindNotification "\<lambda>s. sch_act_wf (ksSchedulerAction s) s"
(wp: sbn_sch_act')
crunch valid_queues'[wp]: cteDeleteOne valid_queues'
(simp: crunch_simps unless_def inQ_def
wp: crunch_wps sts_st_tcb' getObject_inv loadObject_default_inv
threadSet_valid_queues' rescheduleRequired_valid_queues'_weak)
lemma cancelSignal_valid_queues'[wp]:
"\<lbrace>valid_queues'\<rbrace> cancelSignal t ntfn \<lbrace>\<lambda>rv. valid_queues'\<rbrace>"
apply (simp add: cancelSignal_def)
apply (rule hoare_pre)
apply (wp getNotification_wp| wpc | simp)+
done
lemma cancelIPC_valid_queues'[wp]:
"\<lbrace>valid_queues' and (\<lambda>s. sch_act_wf (ksSchedulerAction s) s) \<rbrace> cancelIPC t \<lbrace>\<lambda>rv. valid_queues'\<rbrace>"
apply (simp add: cancelIPC_def Let_def getThreadReplySlot_def locateSlot_conv liftM_def)
apply (rule hoare_seq_ext[OF _ gts_sp'])
apply (case_tac state, simp_all) defer 2
apply (rule hoare_pre)
apply ((wp getEndpoint_wp getCTE_wp | wpc | simp)+)[8]
apply (wp cteDeleteOne_valid_queues')
apply (rule_tac Q="\<lambda>_. valid_queues' and (\<lambda>s. sch_act_wf (ksSchedulerAction s) s)" in hoare_post_imp)
apply (clarsimp simp: capHasProperty_def cte_wp_at_ctes_of)
apply (wp threadSet_valid_queues' threadSet_sch_act| simp)+
apply (clarsimp simp: inQ_def)
done
crunch valid_objs'[wp]: handleFaultReply valid_objs'
lemma valid_tcb'_tcbFault_update[simp]: "\<And>tcb s. valid_tcb' tcb s \<Longrightarrow> valid_tcb' (tcbFault_update f tcb) s"
by (clarsimp simp: valid_tcb'_def tcb_cte_cases_def)
lemma cte_wp_at_is_reply_cap_toI:
"cte_wp_at ((=) (cap.ReplyCap t False rights)) ptr s
\<Longrightarrow> cte_wp_at (is_reply_cap_to t) ptr s"
by (fastforce simp: cte_wp_at_reply_cap_to_ex_rights)
lemma do_reply_transfer_corres:
"corres dc
(einvs and tcb_at receiver and tcb_at sender
and cte_wp_at ((=) (cap.ReplyCap receiver False rights)) slot)
(invs' and tcb_at' sender and tcb_at' receiver
and valid_pspace' and cte_at' (cte_map slot))
(do_reply_transfer sender receiver slot grant)
(doReplyTransfer sender receiver (cte_map slot) grant)"
apply (simp add: do_reply_transfer_def doReplyTransfer_def cong: option.case_cong)
apply (rule corres_split' [OF _ _ gts_sp gts_sp'])
apply (rule corres_guard_imp)
apply (rule gts_corres, (clarsimp simp add: st_tcb_at_tcb_at)+)
apply (rule_tac F = "awaiting_reply state" in corres_req)
apply (clarsimp simp add: st_tcb_at_def obj_at_def is_tcb)
apply (fastforce simp: invs_def valid_state_def intro: has_reply_cap_cte_wpD
dest: has_reply_cap_cte_wpD
dest!: valid_reply_caps_awaiting_reply cte_wp_at_is_reply_cap_toI)
apply (case_tac state, simp_all add: bind_assoc)
apply (simp add: isReply_def liftM_def)
apply (rule corres_symb_exec_r[OF _ getCTE_sp getCTE_inv, rotated])
apply (rule no_fail_pre, wp)
apply clarsimp
apply (rename_tac mdbnode)
apply (rule_tac P="Q" and Q="Q" and P'="Q'" and Q'="(\<lambda>s. Q' s \<and> R' s)" for Q Q' R'
in stronger_corres_guard_imp[rotated])
apply assumption
apply (rule conjI, assumption)
apply (clarsimp simp: cte_wp_at_ctes_of)
apply (drule cte_wp_at_is_reply_cap_toI)
apply (erule(4) reply_cap_end_mdb_chain)
apply (rule corres_assert_assume[rotated], simp)
apply (simp add: getSlotCap_def)
apply (rule corres_symb_exec_r[OF _ getCTE_sp getCTE_inv, rotated])
apply (rule no_fail_pre, wp)
apply (clarsimp simp: cte_wp_at_ctes_of)
apply (rule corres_assert_assume[rotated])
apply (clarsimp simp: cte_wp_at_ctes_of)
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ threadget_fault_corres])
apply (case_tac rv, simp_all add: fault_rel_optionation_def bind_assoc)[1]
apply (rule corres_split [OF _ dit_corres])
apply (rule corres_split [OF _ cap_delete_one_corres])
apply (rule corres_split [OF _ sts_corres])
apply (rule possibleSwitchTo_corres)
apply simp
apply (wp set_thread_state_runnable_valid_sched set_thread_state_runnable_weak_valid_sched_action sts_st_tcb_at' sts_st_tcb' sts_valid_queues sts_valid_objs' delete_one_tcbDomain_obj_at'
| simp add: valid_tcb_state'_def)+
apply (strengthen cte_wp_at_reply_cap_can_fast_finalise)
apply (wp hoare_vcg_conj_lift)
apply (rule hoare_strengthen_post [OF do_ipc_transfer_non_null_cte_wp_at])
prefer 2
apply (erule cte_wp_at_weakenE)
apply (fastforce)
apply (clarsimp simp:is_cap_simps)
apply (wp weak_valid_sched_action_lift)+
apply (rule_tac Q="\<lambda>_. valid_queues' and valid_objs' and cur_tcb' and tcb_at' receiver and (\<lambda>s. sch_act_wf (ksSchedulerAction s) s)" in hoare_post_imp, simp add: sch_act_wf_weak)
apply (wp tcb_in_cur_domain'_lift)
defer
apply (simp)
apply (wp)+
apply (clarsimp)
apply (rule conjI, erule invs_valid_objs)
apply (rule conjI, clarsimp)+
apply (rule conjI)
apply (erule cte_wp_at_weakenE)
apply (clarsimp)
apply (rule conjI, rule refl)
apply (fastforce)
apply (clarsimp simp: invs_def valid_sched_def valid_sched_action_def)
apply (simp)
apply (auto simp: invs'_def valid_state'_def)[1]
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ cap_delete_one_corres])
apply (rule corres_split_mapr [OF _ get_mi_corres])
apply (rule corres_split_eqr [OF _ lipcb_corres'])
apply (rule corres_split_eqr [OF _ get_mrs_corres])
apply (simp(no_asm) del: dc_simp)
apply (rule corres_split_eqr [OF _ handle_fault_reply_corres])
apply (rule corres_split [OF _ threadset_corresT])
apply (rule_tac Q="valid_sched and cur_tcb and tcb_at receiver"
and Q'="tcb_at' receiver and cur_tcb'
and (\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s)
and Invariants_H.valid_queues and valid_queues' and valid_objs'"
in corres_guard_imp)
apply (case_tac rvb, simp_all)[1]
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ sts_corres])
apply (fold dc_def, rule possibleSwitchTo_corres)
apply simp
apply (wp static_imp_wp static_imp_conj_wp set_thread_state_runnable_weak_valid_sched_action sts_st_tcb_at'
sts_st_tcb' sts_valid_queues | simp | force simp: valid_sched_def valid_sched_action_def valid_tcb_state'_def)+
apply (rule corres_guard_imp)
apply (rule sts_corres)
apply (simp_all)[20]
apply (clarsimp simp add: tcb_relation_def fault_rel_optionation_def
tcb_cap_cases_def tcb_cte_cases_def exst_same_def)+
apply (wp threadSet_cur weak_sch_act_wf_lift_linear threadSet_pred_tcb_no_state
thread_set_not_state_valid_sched threadSet_valid_queues threadSet_valid_queues'
threadSet_tcbDomain_triv threadSet_valid_objs'
| simp add: valid_tcb_state'_def)+
apply (wp threadSet_cur weak_sch_act_wf_lift_linear threadSet_pred_tcb_no_state
thread_set_not_state_valid_sched threadSet_valid_queues threadSet_valid_queues'
| simp add: runnable_def inQ_def valid_tcb'_def)+
apply (rule_tac Q="\<lambda>_. valid_sched and cur_tcb and tcb_at sender and tcb_at receiver and valid_objs and pspace_aligned"
in hoare_strengthen_post [rotated], clarsimp)
apply (wp)
apply (rule hoare_chain [OF cap_delete_one_invs])
apply (assumption)
apply (rule conjI, clarsimp)
apply (clarsimp simp add: invs_def valid_state_def)
apply (rule_tac Q="\<lambda>_. tcb_at' sender and tcb_at' receiver and invs'"
in hoare_strengthen_post [rotated])
apply (solves\<open>auto simp: invs'_def valid_state'_def\<close>)
apply wp
apply clarsimp
apply (rule conjI)
apply (erule cte_wp_at_weakenE)
apply (clarsimp simp add: can_fast_finalise_def)
apply (erule(1) emptyable_cte_wp_atD)
apply (rule allI, rule impI)
apply (clarsimp simp add: is_master_reply_cap_def)
apply (clarsimp)
done
(* when we cannot talk about reply cap rights explicitly (for instance, when a schematic ?rights
would be generated too early *)
lemma do_reply_transfer_corres':
"corres dc
(einvs and tcb_at receiver and tcb_at sender
and cte_wp_at (is_reply_cap_to receiver) slot)
(invs' and tcb_at' sender and tcb_at' receiver
and valid_pspace' and cte_at' (cte_map slot))
(do_reply_transfer sender receiver slot grant)
(doReplyTransfer sender receiver (cte_map slot) grant)"
using do_reply_transfer_corres[of receiver sender _ slot]
by (fastforce simp add: cte_wp_at_reply_cap_to_ex_rights corres_underlying_def)
lemma valid_pspace'_splits[elim!]:
"valid_pspace' s \<Longrightarrow> valid_objs' s"
"valid_pspace' s \<Longrightarrow> pspace_aligned' s"
"valid_pspace' s \<Longrightarrow> pspace_canonical' s"
"valid_pspace' s \<Longrightarrow> pspace_in_kernel_mappings' s"
"valid_pspace' s \<Longrightarrow> pspace_distinct' s"
"valid_pspace' s \<Longrightarrow> valid_mdb' s"
"valid_pspace' s \<Longrightarrow> no_0_obj' s"
by (simp add: valid_pspace'_def)+
lemma sts_valid_pspace_hangers:
"\<lbrace>valid_pspace' and tcb_at' t and valid_tcb_state' st\<rbrace> setThreadState st t \<lbrace>\<lambda>rv. valid_objs'\<rbrace>"
"\<lbrace>valid_pspace' and tcb_at' t and valid_tcb_state' st\<rbrace> setThreadState st t \<lbrace>\<lambda>rv. pspace_distinct'\<rbrace>"
"\<lbrace>valid_pspace' and tcb_at' t and valid_tcb_state' st\<rbrace> setThreadState st t \<lbrace>\<lambda>rv. pspace_aligned'\<rbrace>"
"\<lbrace>valid_pspace' and tcb_at' t and valid_tcb_state' st\<rbrace> setThreadState st t \<lbrace>\<lambda>rv. pspace_canonical'\<rbrace>"
"\<lbrace>valid_pspace' and tcb_at' t and valid_tcb_state' st\<rbrace> setThreadState st t \<lbrace>\<lambda>rv. pspace_in_kernel_mappings'\<rbrace>"
"\<lbrace>valid_pspace' and tcb_at' t and valid_tcb_state' st\<rbrace> setThreadState st t \<lbrace>\<lambda>rv. valid_mdb'\<rbrace>"
"\<lbrace>valid_pspace' and tcb_at' t and valid_tcb_state' st\<rbrace> setThreadState st t \<lbrace>\<lambda>rv. no_0_obj'\<rbrace>"
by (safe intro!: hoare_strengthen_post [OF sts'_valid_pspace'_inv])
declare no_fail_getSlotCap [wp]
lemma setup_caller_corres:
"corres dc
(st_tcb_at (Not \<circ> halted) sender and tcb_at receiver and
st_tcb_at (Not \<circ> awaiting_reply) sender and valid_reply_caps and
valid_objs and pspace_distinct and pspace_aligned and valid_mdb
and valid_list and
valid_reply_masters and cte_wp_at (\<lambda>c. c = cap.NullCap) (receiver, tcb_cnode_index 3))
(tcb_at' sender and tcb_at' receiver and valid_pspace'
and (\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s))
(setup_caller_cap sender receiver grant)
(setupCallerCap sender receiver grant)"
supply if_split[split del]
apply (simp add: setup_caller_cap_def setupCallerCap_def
getThreadReplySlot_def locateSlot_conv
getThreadCallerSlot_def)
apply (rule stronger_corres_guard_imp)
apply (rule corres_split_nor)
apply (rule corres_symb_exec_r)
apply (rule_tac F="\<exists>r. cteCap masterCTE = capability.ReplyCap sender True r
\<and> mdbNext (cteMDBNode masterCTE) = nullPointer"
in corres_gen_asm2, clarsimp simp add: isCap_simps)
apply (rule corres_symb_exec_r)
apply (rule_tac F="rv = capability.NullCap"
in corres_gen_asm2, simp)
apply (rule cins_corres)
apply (simp split: if_splits)
apply (simp add: cte_map_def tcbReplySlot_def
tcb_cnode_index_def cte_level_bits_def)
apply (simp add: cte_map_def tcbCallerSlot_def
tcb_cnode_index_def cte_level_bits_def)
apply (rule_tac Q="\<lambda>rv. cte_at' (receiver + 2 ^ cte_level_bits * tcbCallerSlot)"
in valid_prove_more)
apply (wp, (wp getSlotCap_wp)+)
apply blast
apply (rule no_fail_pre, wp)
apply (clarsimp simp: cte_wp_at'_def cte_at'_def)
apply (rule_tac Q="\<lambda>rv. cte_at' (sender + 2 ^ cte_level_bits * tcbReplySlot)"
in valid_prove_more)
apply (wp, (wp getCTE_wp')+)
apply blast
apply (rule no_fail_pre, wp)
apply (clarsimp simp: cte_wp_at_ctes_of)
apply (rule sts_corres)
apply (simp split: option.split)
apply (wp sts_valid_pspace_hangers
| simp add: cte_wp_at_ctes_of)+
apply (clarsimp simp: valid_tcb_state_def st_tcb_at_reply_cap_valid
st_tcb_at_tcb_at st_tcb_at_caller_cap_null
split: option.split)
apply (clarsimp simp: valid_tcb_state'_def valid_cap'_def capAligned_reply_tcbI)
apply (frule(1) st_tcb_at_reply_cap_valid, simp, clarsimp)
apply (clarsimp simp: cte_wp_at_ctes_of cte_wp_at_caps_of_state)
apply (drule pspace_relation_cte_wp_at[rotated, OF caps_of_state_cteD],
erule valid_pspace'_splits, clarsimp+)+
apply (clarsimp simp: cte_wp_at_ctes_of cte_map_def tcbReplySlot_def
tcbCallerSlot_def tcb_cnode_index_def
is_cap_simps)
apply (auto intro: reply_no_descendants_mdbNext_null[OF not_waiting_reply_slot_no_descendants]
simp: cte_index_repair)
done
crunch tcb_at'[wp]: getThreadCallerSlot "tcb_at' t"
lemma getThreadReplySlot_tcb_at'[wp]:
"\<lbrace>tcb_at' t\<rbrace> getThreadReplySlot tcb \<lbrace>\<lambda>_. tcb_at' t\<rbrace>"
by (simp add: getThreadReplySlot_def, wp)
lemma setupCallerCap_tcb_at'[wp]:
"\<lbrace>tcb_at' t\<rbrace> setupCallerCap sender receiver grant \<lbrace>\<lambda>_. tcb_at' t\<rbrace>"
by (simp add: setupCallerCap_def, wp hoare_drop_imp)
crunch ct'[wp]: setupCallerCap "\<lambda>s. P (ksCurThread s)"
(wp: crunch_wps)
lemma cteInsert_sch_act_wf[wp]:
"\<lbrace>\<lambda>s. sch_act_wf (ksSchedulerAction s) s\<rbrace>
cteInsert newCap srcSlot destSlot
\<lbrace>\<lambda>_ s. sch_act_wf (ksSchedulerAction s) s\<rbrace>"
by (wp sch_act_wf_lift tcb_in_cur_domain'_lift)
lemma setupCallerCap_sch_act [wp]:
"\<lbrace>\<lambda>s. sch_act_not t s \<and> sch_act_wf (ksSchedulerAction s) s\<rbrace>
setupCallerCap t r g \<lbrace>\<lambda>_ s. sch_act_wf (ksSchedulerAction s) s\<rbrace>"
apply (simp add: setupCallerCap_def getSlotCap_def getThreadCallerSlot_def
getThreadReplySlot_def locateSlot_conv)
apply (wp getCTE_wp' sts_sch_act' hoare_drop_imps hoare_vcg_all_lift)
apply clarsimp
done
lemma possibleSwitchTo_weak_sch_act_wf[wp]:
"\<lbrace>\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s \<and> st_tcb_at' runnable' t s\<rbrace>
possibleSwitchTo t \<lbrace>\<lambda>rv s. weak_sch_act_wf (ksSchedulerAction s) s\<rbrace>"
apply (simp add: possibleSwitchTo_def setSchedulerAction_def threadGet_def curDomain_def
bitmap_fun_defs)
apply (wp rescheduleRequired_weak_sch_act_wf
weak_sch_act_wf_lift_linear[where f="tcbSchedEnqueue t"]
getObject_tcb_wp static_imp_wp
| wpc)+
apply (clarsimp simp: obj_at'_def projectKOs weak_sch_act_wf_def ps_clear_def tcb_in_cur_domain'_def)
done
lemmas transferCapsToSlots_pred_tcb_at' =
transferCapsToSlots_pres1 [OF cteInsert_pred_tcb_at']
crunches doIPCTransfer, possibleSwitchTo
for pred_tcb_at'[wp]: "pred_tcb_at' proj P t"
(wp: mapM_wp' crunch_wps simp: zipWithM_x_mapM)
(* FIXME move *)
lemma tcb_in_cur_domain'_ksSchedulerAction_update[simp]:
"tcb_in_cur_domain' t (ksSchedulerAction_update f s) = tcb_in_cur_domain' t s"
by (simp add: tcb_in_cur_domain'_def)
(* FIXME move *)
lemma ct_idle_or_in_cur_domain'_ksSchedulerAction_update[simp]:
"b\<noteq> ResumeCurrentThread \<Longrightarrow>
ct_idle_or_in_cur_domain' (s\<lparr>ksSchedulerAction := b\<rparr>)"
apply (clarsimp simp add: ct_idle_or_in_cur_domain'_def)
done
lemma setSchedulerAction_ct_in_domain:
"\<lbrace>\<lambda>s. ct_idle_or_in_cur_domain' s
\<and> p \<noteq> ResumeCurrentThread \<rbrace> setSchedulerAction p
\<lbrace>\<lambda>_. ct_idle_or_in_cur_domain'\<rbrace>"
by (simp add:setSchedulerAction_def | wp)+
crunches setupCallerCap, doIPCTransfer, possibleSwitchTo
for ct_idle_or_in_cur_domain'[wp]: ct_idle_or_in_cur_domain'
and ksCurDomain[wp]: "\<lambda>s. P (ksCurDomain s)"
and ksDomSchedule[wp]: "\<lambda>s. P (ksDomSchedule s)"
(wp: crunch_wps setSchedulerAction_ct_in_domain simp: zipWithM_x_mapM)
crunch tcbDomain_obj_at'[wp]: doIPCTransfer "obj_at' (\<lambda>tcb. P (tcbDomain tcb)) t"
(wp: crunch_wps constOnFailure_wp simp: crunch_simps)
crunch tcb_at'[wp]: possibleSwitchTo "tcb_at' t"
(wp: crunch_wps)
crunch valid_pspace'[wp]: possibleSwitchTo valid_pspace'
(wp: crunch_wps)
lemma send_ipc_corres:
(* call is only true if called in handleSyscall SysCall, which
is always blocking. *)
assumes "call \<longrightarrow> bl"
shows
"corres dc (einvs and st_tcb_at active t and ep_at ep and ex_nonz_cap_to t)
(invs' and sch_act_not t and tcb_at' t and ep_at' ep)
(send_ipc bl call bg cg cgr t ep) (sendIPC bl call bg cg cgr t ep)"
proof -
show ?thesis
apply (insert assms)
apply (unfold send_ipc_def sendIPC_def Let_def)
apply (case_tac bl)
apply clarsimp
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ get_ep_corres,
where
R="\<lambda>rv. einvs and st_tcb_at active t and ep_at ep and
valid_ep rv and obj_at (\<lambda>ob. ob = Endpoint rv) ep
and ex_nonz_cap_to t"
and
R'="\<lambda>rv'. invs' and tcb_at' t and sch_act_not t
and ep_at' ep and valid_ep' rv'"])
apply (case_tac rv)
apply (simp add: ep_relation_def)
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ sts_corres])
apply (rule set_ep_corres)
apply (simp add: ep_relation_def)
apply (simp add: fault_rel_optionation_def)
apply wp+
apply (clarsimp simp: st_tcb_at_tcb_at valid_tcb_state_def)
apply clarsimp
\<comment> \<open>concludes IdleEP if bl branch\<close>
apply (simp add: ep_relation_def)
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ sts_corres])
apply (rule set_ep_corres)
apply (simp add: ep_relation_def)
apply (simp add: fault_rel_optionation_def)
apply wp+
apply (clarsimp simp: st_tcb_at_tcb_at valid_tcb_state_def)
apply clarsimp
\<comment> \<open>concludes SendEP if bl branch\<close>
apply (simp add: ep_relation_def)
apply (rename_tac list)
apply (rule_tac F="list \<noteq> []" in corres_req)
apply (simp add: valid_ep_def)
apply (case_tac list)
apply simp
apply (clarsimp split del: if_split)
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ set_ep_corres])
apply (simp add: isReceive_def split del:if_split)
apply (rule corres_split [OF _ gts_corres])
apply (rule_tac
F="\<exists>data. recv_state = Structures_A.BlockedOnReceive ep data"
in corres_gen_asm)
apply (clarsimp simp: case_bool_If case_option_If if3_fold
simp del: dc_simp split del: if_split cong: if_cong)
apply (rule corres_split [OF _ dit_corres])
apply (rule corres_split [OF _ sts_corres])
apply (rule corres_split [OF _ possibleSwitchTo_corres])
apply (fold when_def)[1]
apply (rule_tac P="call" and P'="call"
in corres_symmetric_bool_cases, blast)
apply (simp add: when_def dc_def[symmetric] split del: if_split)
apply (rule corres_if2, simp)
apply (rule setup_caller_corres)
apply (rule sts_corres, simp)
apply (rule corres_trivial)
apply (simp add: when_def dc_def[symmetric] split del: if_split)
apply (simp split del: if_split add: if_apply_def2)
apply (wp hoare_drop_imps)[1]
apply (simp split del: if_split add: if_apply_def2)
apply (wp hoare_drop_imps)[1]
apply (wp | simp)+
apply (wp sts_cur_tcb set_thread_state_runnable_weak_valid_sched_action sts_st_tcb_at_cases)
apply (wp setThreadState_valid_queues' sts_valid_queues sts_weak_sch_act_wf
sts_cur_tcb' setThreadState_tcb' sts_st_tcb_at'_cases)[1]
apply (simp add: valid_tcb_state_def pred_conj_def)
apply (strengthen reply_cap_doesnt_exist_strg disjI2_strg)
apply ((wp hoare_drop_imps do_ipc_transfer_tcb_caps weak_valid_sched_action_lift
| clarsimp simp: is_cap_simps)+)[1]
apply (simp add: pred_conj_def)
apply (strengthen sch_act_wf_weak)
apply (simp add: valid_tcb_state'_def)
apply (wp weak_sch_act_wf_lift_linear tcb_in_cur_domain'_lift hoare_drop_imps)[1]
apply (wp gts_st_tcb_at)+
apply (simp add: ep_relation_def split: list.split)
apply (simp add: pred_conj_def cong: conj_cong)
apply (wp hoare_post_taut)
apply (simp)
apply (wp weak_sch_act_wf_lift_linear set_ep_valid_objs' setEndpoint_valid_mdb')+
apply (clarsimp simp add: invs_def valid_state_def valid_pspace_def ep_redux_simps
ep_redux_simps' st_tcb_at_tcb_at valid_ep_def
cong: list.case_cong)
apply (drule(1) sym_refs_obj_atD[where P="\<lambda>ob. ob = e" for e])
apply (clarsimp simp: st_tcb_at_refs_of_rev st_tcb_at_reply_cap_valid
st_tcb_def2 valid_sched_def valid_sched_action_def)
apply (force simp: st_tcb_def2 dest!: st_tcb_at_caller_cap_null[simplified,rotated])
subgoal by (auto simp: valid_ep'_def invs'_def valid_state'_def split: list.split)
apply wp+
apply (clarsimp simp: ep_at_def2)+
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ get_ep_corres,
where
R="\<lambda>rv. einvs and st_tcb_at active t and ep_at ep and
valid_ep rv and obj_at (\<lambda>k. k = Endpoint rv) ep"
and
R'="\<lambda>rv'. invs' and tcb_at' t and sch_act_not t
and ep_at' ep and valid_ep' rv'"])
apply (rename_tac rv rv')
apply (case_tac rv)
apply (simp add: ep_relation_def)
\<comment> \<open>concludes IdleEP branch if not bl and no ft\<close>
apply (simp add: ep_relation_def)
\<comment> \<open>concludes SendEP branch if not bl and no ft\<close>
apply (simp add: ep_relation_def)
apply (rename_tac list)
apply (rule_tac F="list \<noteq> []" in corres_req)
apply (simp add: valid_ep_def)
apply (case_tac list)
apply simp
apply (rule_tac F="a \<noteq> t" in corres_req)
apply (clarsimp simp: invs_def valid_state_def
valid_pspace_def)
apply (drule(1) sym_refs_obj_atD[where P="\<lambda>ob. ob = e" for e])
apply (clarsimp simp: st_tcb_at_def obj_at_def tcb_bound_refs_def2)
apply fastforce
apply (clarsimp split del: if_split)
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ set_ep_corres])
apply (rule corres_split [OF _ gts_corres])
apply (rule_tac
F="\<exists>data. recv_state = Structures_A.BlockedOnReceive ep data"
in corres_gen_asm)
apply (clarsimp simp: isReceive_def case_bool_If
split del: if_split cong: if_cong)
apply (rule corres_split [OF _ dit_corres])
apply (rule corres_split [OF _ sts_corres])
apply (rule possibleSwitchTo_corres)
apply (simp add: if_apply_def2)
apply (wp hoare_drop_imps)
apply (simp add: if_apply_def2)
apply ((wp sts_cur_tcb set_thread_state_runnable_weak_valid_sched_action sts_st_tcb_at_cases |
simp add: if_apply_def2 split del: if_split)+)[1]
apply (wp setThreadState_valid_queues' sts_valid_queues sts_weak_sch_act_wf
sts_cur_tcb' setThreadState_tcb' sts_st_tcb_at'_cases)
apply (simp add: valid_tcb_state_def pred_conj_def)
apply ((wp hoare_drop_imps do_ipc_transfer_tcb_caps weak_valid_sched_action_lift
| clarsimp simp:is_cap_simps)+)[1]
apply (simp add: valid_tcb_state'_def pred_conj_def)
apply (strengthen sch_act_wf_weak)
apply (wp weak_sch_act_wf_lift_linear hoare_drop_imps)
apply (wp gts_st_tcb_at)+
apply (simp add: ep_relation_def split: list.split)
apply (simp add: pred_conj_def cong: conj_cong)
apply (wp hoare_post_taut)
apply simp
apply (wp weak_sch_act_wf_lift_linear set_ep_valid_objs' setEndpoint_valid_mdb')
apply (clarsimp simp add: invs_def valid_state_def
valid_pspace_def ep_redux_simps ep_redux_simps'
st_tcb_at_tcb_at
cong: list.case_cong)
apply (clarsimp simp: valid_ep_def)
apply (drule(1) sym_refs_obj_atD[where P="\<lambda>ob. ob = e" for e])
apply (clarsimp simp: st_tcb_at_refs_of_rev st_tcb_at_reply_cap_valid
st_tcb_at_caller_cap_null)
apply (fastforce simp: st_tcb_def2 valid_sched_def valid_sched_action_def)
subgoal by (auto simp: valid_ep'_def
split: list.split;
clarsimp simp: invs'_def valid_state'_def)
apply wp+
apply (clarsimp simp: ep_at_def2)+
done
qed
crunch typ_at'[wp]: setMessageInfo "\<lambda>s. P (typ_at' T p s)"
lemmas setMessageInfo_typ_ats[wp] = typ_at_lifts [OF setMessageInfo_typ_at']
(* Annotation added by Simon Winwood (Thu Jul 1 20:54:41 2010) using taint-mode *)
declare tl_drop_1[simp]
crunch cur[wp]: cancel_ipc "cur_tcb"
(wp: select_wp crunch_wps simp: crunch_simps)
crunch valid_objs'[wp]: asUser "valid_objs'"
lemma valid_sched_weak_strg:
"valid_sched s \<longrightarrow> weak_valid_sched_action s"
by (simp add: valid_sched_def valid_sched_action_def)
crunch weak_valid_sched_action[wp]: as_user weak_valid_sched_action
(wp: weak_valid_sched_action_lift)
lemma send_signal_corres:
"corres dc (einvs and ntfn_at ep) (invs' and ntfn_at' ep)
(send_signal ep bg) (sendSignal ep bg)"
apply (simp add: send_signal_def sendSignal_def Let_def)
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ get_ntfn_corres,
where
R = "\<lambda>rv. einvs and ntfn_at ep and valid_ntfn rv and
ko_at (Structures_A.Notification rv) ep" and
R' = "\<lambda>rv'. invs' and ntfn_at' ep and
valid_ntfn' rv' and ko_at' rv' ep"])
defer
apply (wp get_simple_ko_ko_at get_ntfn_ko')+
apply (simp add: invs_valid_objs)+
apply (case_tac "ntfn_obj ntfn")
\<comment> \<open>IdleNtfn\<close>
apply (clarsimp simp add: ntfn_relation_def)
apply (case_tac "ntfnBoundTCB nTFN")
apply clarsimp
apply (rule corres_guard_imp[OF set_ntfn_corres])
apply (clarsimp simp add: ntfn_relation_def)+
apply (rule corres_guard_imp)
apply (rule corres_split[OF _ gts_corres])
apply (rule corres_if)
apply (fastforce simp: receive_blocked_def receiveBlocked_def
thread_state_relation_def
split: Structures_A.thread_state.splits
Structures_H.thread_state.splits)
apply (rule corres_split[OF _ cancel_ipc_corres])
apply (rule corres_split[OF _ sts_corres])
apply (simp add: badgeRegister_def badge_register_def)
apply (rule corres_split[OF _ user_setreg_corres])
apply (rule possibleSwitchTo_corres)
apply wp
apply (clarsimp simp: thread_state_relation_def)
apply (wp set_thread_state_runnable_weak_valid_sched_action sts_st_tcb_at'
sts_valid_queues sts_st_tcb' hoare_disjI2
cancel_ipc_cte_wp_at_not_reply_state
| strengthen invs_vobjs_strgs invs_psp_aligned_strg valid_sched_weak_strg
| simp add: valid_tcb_state_def)+
apply (rule_tac Q="\<lambda>rv. invs' and tcb_at' a" in hoare_strengthen_post)
apply wp
apply (clarsimp simp: invs'_def valid_state'_def sch_act_wf_weak
valid_tcb_state'_def)
apply (rule set_ntfn_corres)
apply (clarsimp simp add: ntfn_relation_def)
apply (wp gts_wp gts_wp' | clarsimp)+
apply (auto simp: valid_ntfn_def receive_blocked_def valid_sched_def invs_cur
elim: pred_tcb_weakenE
intro: st_tcb_at_reply_cap_valid
split: Structures_A.thread_state.splits)[1]
apply (clarsimp simp: valid_ntfn'_def invs'_def valid_state'_def valid_pspace'_def sch_act_wf_weak)
\<comment> \<open>WaitingNtfn\<close>
apply (clarsimp simp add: ntfn_relation_def Let_def)
apply (simp add: update_waiting_ntfn_def)
apply (rename_tac list)
apply (case_tac "tl list = []")
\<comment> \<open>tl list = []\<close>
apply (rule corres_guard_imp)
apply (rule_tac F="list \<noteq> []" in corres_gen_asm)
apply (simp add: list_case_helper split del: if_split)
apply (rule corres_split [OF _ set_ntfn_corres])
apply (rule corres_split [OF _ sts_corres])
apply (simp add: badgeRegister_def badge_register_def)
apply (rule corres_split [OF _ user_setreg_corres])
apply (rule possibleSwitchTo_corres)
apply ((wp | simp)+)[1]
apply (rule_tac Q="\<lambda>_. Invariants_H.valid_queues and valid_queues' and
(\<lambda>s. sch_act_wf (ksSchedulerAction s) s) and
cur_tcb' and
st_tcb_at' runnable' (hd list) and valid_objs'"
in hoare_post_imp, clarsimp simp: pred_tcb_at' elim!: sch_act_wf_weak)
apply (wp | simp)+
apply (wp sts_st_tcb_at' set_thread_state_runnable_weak_valid_sched_action
| simp)+
apply (wp sts_st_tcb_at'_cases sts_valid_queues setThreadState_valid_queues'
setThreadState_st_tcb
| simp)+
apply (simp add: ntfn_relation_def)
apply (wp set_simple_ko_valid_objs set_ntfn_aligned' set_ntfn_valid_objs'
hoare_vcg_disj_lift weak_sch_act_wf_lift_linear
| simp add: valid_tcb_state_def valid_tcb_state'_def)+
apply (clarsimp simp: invs_def valid_state_def valid_ntfn_def
valid_pspace_def ntfn_queued_st_tcb_at valid_sched_def
valid_sched_action_def)
apply (auto simp: valid_ntfn'_def )[1]
apply (clarsimp simp: invs'_def valid_state'_def)
\<comment> \<open>tl list \<noteq> []\<close>
apply (rule corres_guard_imp)
apply (rule_tac F="list \<noteq> []" in corres_gen_asm)
apply (simp add: list_case_helper)
apply (rule corres_split [OF _ set_ntfn_corres])
apply (rule corres_split [OF _ sts_corres])
apply (simp add: badgeRegister_def badge_register_def)
apply (rule corres_split [OF _ user_setreg_corres])
apply (rule possibleSwitchTo_corres)
apply (wp cur_tcb_lift | simp)+
apply (wp sts_st_tcb_at' set_thread_state_runnable_weak_valid_sched_action
| simp)+
apply (wp sts_st_tcb_at'_cases sts_valid_queues setThreadState_valid_queues'
setThreadState_st_tcb
| simp)+
apply (simp add: ntfn_relation_def split:list.splits)
apply (wp set_ntfn_aligned' set_simple_ko_valid_objs set_ntfn_valid_objs'
hoare_vcg_disj_lift weak_sch_act_wf_lift_linear
| simp add: valid_tcb_state_def valid_tcb_state'_def)+
apply (clarsimp simp: invs_def valid_state_def valid_ntfn_def
valid_pspace_def neq_Nil_conv
ntfn_queued_st_tcb_at valid_sched_def valid_sched_action_def
split: option.splits)
apply (auto simp: valid_ntfn'_def neq_Nil_conv invs'_def valid_state'_def
weak_sch_act_wf_def
split: option.splits)[1]
\<comment> \<open>ActiveNtfn\<close>
apply (clarsimp simp add: ntfn_relation_def)
apply (rule corres_guard_imp)
apply (rule set_ntfn_corres)
apply (simp add: ntfn_relation_def combine_ntfn_badges_def
combine_ntfn_msgs_def)
apply (simp add: invs_def valid_state_def valid_ntfn_def)
apply (simp add: invs'_def valid_state'_def valid_ntfn'_def)
done
lemma valid_Running'[simp]:
"valid_tcb_state' Running = \<top>"
by (rule ext, simp add: valid_tcb_state'_def)
crunch typ'[wp]: setMRs "\<lambda>s. P (typ_at' T p s)"
(wp: crunch_wps simp: zipWithM_x_mapM)
lemma possibleSwitchTo_sch_act[wp]:
"\<lbrace>\<lambda>s. sch_act_wf (ksSchedulerAction s) s \<and> st_tcb_at' runnable' t s\<rbrace>
possibleSwitchTo t
\<lbrace>\<lambda>rv s. sch_act_wf (ksSchedulerAction s) s\<rbrace>"
apply (simp add: possibleSwitchTo_def curDomain_def bitmap_fun_defs)
apply (wp static_imp_wp threadSet_sch_act setQueue_sch_act threadGet_wp
| simp add: unless_def | wpc)+
apply (auto simp: obj_at'_def projectKOs tcb_in_cur_domain'_def)
done
lemma possibleSwitchTo_valid_queues[wp]:
"\<lbrace>Invariants_H.valid_queues and valid_objs' and (\<lambda>s. sch_act_wf (ksSchedulerAction s) s) and st_tcb_at' runnable' t\<rbrace>
possibleSwitchTo t
\<lbrace>\<lambda>rv. Invariants_H.valid_queues\<rbrace>"
apply (simp add: possibleSwitchTo_def curDomain_def bitmap_fun_defs)
apply (wp hoare_drop_imps | wpc | simp)+
apply (auto simp: valid_tcb'_def weak_sch_act_wf_def
dest: pred_tcb_at'
elim!: valid_objs_valid_tcbE)
done
lemma possibleSwitchTo_ksQ':
"\<lbrace>(\<lambda>s. t' \<notin> set (ksReadyQueues s p) \<and> sch_act_not t' s) and K(t' \<noteq> t)\<rbrace>
possibleSwitchTo t
\<lbrace>\<lambda>_ s. t' \<notin> set (ksReadyQueues s p)\<rbrace>"
apply (simp add: possibleSwitchTo_def curDomain_def bitmap_fun_defs)
apply (wp static_imp_wp rescheduleRequired_ksQ' tcbSchedEnqueue_ksQ threadGet_wp
| wpc
| simp split del: if_split)+
apply (auto simp: obj_at'_def)
done
lemma possibleSwitchTo_valid_queues'[wp]:
"\<lbrace>valid_queues' and (\<lambda>s. sch_act_wf (ksSchedulerAction s) s)
and st_tcb_at' runnable' t\<rbrace>
possibleSwitchTo t
\<lbrace>\<lambda>rv. valid_queues'\<rbrace>"
apply (simp add: possibleSwitchTo_def curDomain_def bitmap_fun_defs)
apply (wp static_imp_wp threadGet_wp | wpc | simp)+
apply (auto simp: obj_at'_def)
done
crunches possibleSwitchTo
for st_refs_of'[wp]: "\<lambda>s. P (state_refs_of' s)"
and cap_to'[wp]: "ex_nonz_cap_to' p"
and objs'[wp]: valid_objs'
and ct[wp]: cur_tcb'
(wp: cur_tcb_lift crunch_wps)
lemma possibleSwitchTo_iflive[wp]:
"\<lbrace>if_live_then_nonz_cap' and ex_nonz_cap_to' t
and (\<lambda>s. sch_act_wf (ksSchedulerAction s) s)\<rbrace>
possibleSwitchTo t
\<lbrace>\<lambda>rv. if_live_then_nonz_cap'\<rbrace>"
apply (simp add: possibleSwitchTo_def curDomain_def)
apply (wp | wpc | simp)+
apply (simp only: imp_conv_disj, wp hoare_vcg_all_lift hoare_vcg_disj_lift)
apply (wp threadGet_wp)+
apply (auto simp: obj_at'_def projectKOs)
done
crunches possibleSwitchTo
for ifunsafe[wp]: if_unsafe_then_cap'
and idle'[wp]: valid_idle'
and global_refs'[wp]: valid_global_refs'
and arch_state'[wp]: valid_arch_state'
and irq_node'[wp]: "\<lambda>s. P (irq_node' s)"
and typ_at'[wp]: "\<lambda>s. P (typ_at' T p s)"
and irq_handlers'[wp]: valid_irq_handlers'
and irq_states'[wp]: valid_irq_states'
(simp: unless_def tcb_cte_cases_def wp: crunch_wps)
crunches sendSignal
for ct'[wp]: "\<lambda>s. P (ksCurThread s)"
and it'[wp]: "\<lambda>s. P (ksIdleThread s)"
(wp: crunch_wps simp: crunch_simps o_def)
crunches sendSignal, setBoundNotification
for irqs_masked'[wp]: "irqs_masked'"
(wp: crunch_wps getObject_inv loadObject_default_inv
simp: crunch_simps unless_def o_def
rule: irqs_masked_lift)
lemma sts_running_valid_queues:
"runnable' st \<Longrightarrow> \<lbrace> Invariants_H.valid_queues \<rbrace> setThreadState st t \<lbrace>\<lambda>_. Invariants_H.valid_queues \<rbrace>"
by (wp sts_valid_queues, clarsimp)
lemma ct_in_state_activatable_imp_simple'[simp]:
"ct_in_state' activatable' s \<Longrightarrow> ct_in_state' simple' s"
apply (simp add: ct_in_state'_def)
apply (erule pred_tcb'_weakenE)
apply (case_tac st; simp)
done
lemma setThreadState_nonqueued_state_update:
"\<lbrace>\<lambda>s. invs' s \<and> st_tcb_at' simple' t s
\<and> st \<in> {Inactive, Running, Restart, IdleThreadState}
\<and> (st \<noteq> Inactive \<longrightarrow> ex_nonz_cap_to' t s)
\<and> (t = ksIdleThread s \<longrightarrow> idle' st)
\<and> (\<not> runnable' st \<longrightarrow> sch_act_simple s)
\<and> (\<not> runnable' st \<longrightarrow> (\<forall>p. t \<notin> set (ksReadyQueues s p)))\<rbrace>
setThreadState st t \<lbrace>\<lambda>rv. invs'\<rbrace>"
apply (simp add: invs'_def valid_state'_def)
apply (rule hoare_pre, wp valid_irq_node_lift
sts_valid_queues
setThreadState_ct_not_inQ)
apply (clarsimp simp: pred_tcb_at')
apply (rule conjI, fastforce simp: valid_tcb_state'_def)
apply (drule simple_st_tcb_at_state_refs_ofD')
apply (drule bound_tcb_at_state_refs_ofD')
apply (rule conjI, fastforce)
apply clarsimp
apply (erule delta_sym_refs)
apply (fastforce split: if_split_asm)
apply (fastforce simp: symreftype_inverse' tcb_bound_refs'_def
split: if_split_asm)
done
lemma cteDeleteOne_reply_cap_to'[wp]:
"\<lbrace>ex_nonz_cap_to' p and
cte_wp_at' (\<lambda>c. isReplyCap (cteCap c)) slot\<rbrace>
cteDeleteOne slot
\<lbrace>\<lambda>rv. ex_nonz_cap_to' p\<rbrace>"
apply (simp add: cteDeleteOne_def ex_nonz_cap_to'_def unless_def)
apply (rule hoare_seq_ext [OF _ getCTE_sp])
apply (rule hoare_assume_pre)
apply (subgoal_tac "isReplyCap (cteCap cte)")
apply (wp hoare_vcg_ex_lift emptySlot_cte_wp_cap_other isFinalCapability_inv
| clarsimp simp: finaliseCap_def isCap_simps
| wp (once) hoare_drop_imps)+
apply (fastforce simp: cte_wp_at_ctes_of)
apply (clarsimp simp: cte_wp_at_ctes_of isCap_simps)
done
crunches setupCallerCap, possibleSwitchTo, asUser, doIPCTransfer
for vms'[wp]: "valid_machine_state'"
(wp: crunch_wps simp: zipWithM_x_mapM_x)
crunch nonz_cap_to'[wp]: cancelSignal "ex_nonz_cap_to' p"
(wp: crunch_wps simp: crunch_simps)
lemma cancelIPC_nonz_cap_to'[wp]:
"\<lbrace>ex_nonz_cap_to' p\<rbrace> cancelIPC t \<lbrace>\<lambda>rv. ex_nonz_cap_to' p\<rbrace>"
apply (simp add: cancelIPC_def getThreadReplySlot_def Let_def
capHasProperty_def)
apply (wp threadSet_cap_to'
| wpc
| simp
| clarsimp elim!: cte_wp_at_weakenE'
| rule hoare_post_imp[where Q="\<lambda>rv. ex_nonz_cap_to' p"])+
done
crunches activateIdleThread, getThreadReplySlot, isFinalCapability
for nosch[wp]: "\<lambda>s. P (ksSchedulerAction s)"
(simp: Let_def)
crunches setupCallerCap, asUser, setMRs, doIPCTransfer, possibleSwitchTo
for pspace_domain_valid[wp]: "pspace_domain_valid"
(wp: crunch_wps simp: zipWithM_x_mapM_x)
crunches setupCallerCap, doIPCTransfer, possibleSwitchTo
for ksDomScheduleIdx[wp]: "\<lambda>s. P (ksDomScheduleIdx s)"
(wp: crunch_wps simp: zipWithM_x_mapM)
lemma setThreadState_not_rct[wp]:
"\<lbrace>\<lambda>s. ksSchedulerAction s \<noteq> ResumeCurrentThread \<rbrace>
setThreadState st t
\<lbrace>\<lambda>_ s. ksSchedulerAction s \<noteq> ResumeCurrentThread \<rbrace>"
apply (simp add: setThreadState_def)
apply (wp)
apply (rule hoare_post_imp [OF _ rescheduleRequired_notresume], simp)
apply (simp)
apply (wp)+
apply simp
done
lemma cancelAllIPC_not_rct[wp]:
"\<lbrace>\<lambda>s. ksSchedulerAction s \<noteq> ResumeCurrentThread \<rbrace>
cancelAllIPC epptr
\<lbrace>\<lambda>_ s. ksSchedulerAction s \<noteq> ResumeCurrentThread \<rbrace>"
apply (simp add: cancelAllIPC_def)
apply (wp | wpc)+
apply (rule hoare_post_imp [OF _ rescheduleRequired_notresume], simp)
apply simp
apply (rule mapM_x_wp_inv)
apply (wp)+
apply (rule hoare_post_imp [OF _ rescheduleRequired_notresume], simp)
apply simp
apply (rule mapM_x_wp_inv)
apply (wp)+
apply (wp hoare_vcg_all_lift hoare_drop_imp)
apply (simp_all)
done
lemma cancelAllSignals_not_rct[wp]:
"\<lbrace>\<lambda>s. ksSchedulerAction s \<noteq> ResumeCurrentThread \<rbrace>
cancelAllSignals epptr
\<lbrace>\<lambda>_ s. ksSchedulerAction s \<noteq> ResumeCurrentThread \<rbrace>"
apply (simp add: cancelAllSignals_def)
apply (wp | wpc)+
apply (rule hoare_post_imp [OF _ rescheduleRequired_notresume], simp)
apply simp
apply (rule mapM_x_wp_inv)
apply (wp)+
apply (wp hoare_vcg_all_lift hoare_drop_imp)
apply (simp_all)
done
crunch not_rct[wp]: finaliseCapTrue_standin "\<lambda>s. ksSchedulerAction s \<noteq> ResumeCurrentThread"
(simp: Let_def)
declare setEndpoint_ct' [wp]
lemma cancelIPC_ResumeCurrentThread_imp_notct[wp]:
"\<lbrace>\<lambda>s. ksSchedulerAction s = ResumeCurrentThread \<longrightarrow> ksCurThread s \<noteq> t'\<rbrace>
cancelIPC t
\<lbrace>\<lambda>_ s. ksSchedulerAction s = ResumeCurrentThread \<longrightarrow> ksCurThread s \<noteq> t'\<rbrace>"
(is "\<lbrace>?PRE t'\<rbrace> _ \<lbrace>_\<rbrace>")
proof -
have aipc: "\<And>t t' ntfn.
\<lbrace>\<lambda>s. ksSchedulerAction s = ResumeCurrentThread \<longrightarrow> ksCurThread s \<noteq> t'\<rbrace>
cancelSignal t ntfn
\<lbrace>\<lambda>_ s. ksSchedulerAction s = ResumeCurrentThread \<longrightarrow> ksCurThread s \<noteq> t'\<rbrace>"
apply (simp add: cancelSignal_def)
apply (wp)[1]
apply (wp hoare_convert_imp)+
apply (rule_tac P="\<lambda>s. ksSchedulerAction s \<noteq> ResumeCurrentThread"
in hoare_weaken_pre)
apply (wpc)
apply (wp | simp)+
apply (wpc, wp+)
apply (rule_tac Q="\<lambda>_. ?PRE t'" in hoare_post_imp, clarsimp)
apply (wp)
apply simp
done
have cdo: "\<And>t t' slot.
\<lbrace>\<lambda>s. ksSchedulerAction s = ResumeCurrentThread \<longrightarrow> ksCurThread s \<noteq> t'\<rbrace>
cteDeleteOne slot
\<lbrace>\<lambda>_ s. ksSchedulerAction s = ResumeCurrentThread \<longrightarrow> ksCurThread s \<noteq> t'\<rbrace>"
apply (simp add: cteDeleteOne_def unless_def split_def)
apply (wp)
apply (wp hoare_convert_imp)[1]
apply (wp)
apply (rule_tac Q="\<lambda>_. ?PRE t'" in hoare_post_imp, clarsimp)
apply (wp hoare_convert_imp | simp)+
done
show ?thesis
apply (simp add: cancelIPC_def Let_def)
apply (wp, wpc)
prefer 4 \<comment> \<open>state = Running\<close>
apply wp
prefer 7 \<comment> \<open>state = Restart\<close>
apply wp
apply (wp)+
apply (wp hoare_convert_imp)[1]
apply (wpc, wp+)
apply (rule_tac Q="\<lambda>_. ?PRE t'" in hoare_post_imp, clarsimp)
apply (wp cdo)+
apply (rule_tac Q="\<lambda>_. ?PRE t'" in hoare_post_imp, clarsimp)
apply ((wp aipc hoare_convert_imp)+)[6]
apply (wp)
apply (wp hoare_convert_imp)[1]
apply (wpc, wp+)
apply (rule_tac Q="\<lambda>_. ?PRE t'" in hoare_post_imp, clarsimp)
apply (wp)
apply (rule_tac Q="\<lambda>_. ?PRE t'" in hoare_post_imp, clarsimp)
apply (wp)
apply simp
done
qed
crunch nosch[wp]: setMRs "\<lambda>s. P (ksSchedulerAction s)"
lemma sai_invs'[wp]:
"\<lbrace>invs' and ex_nonz_cap_to' ntfnptr\<rbrace>
sendSignal ntfnptr badge \<lbrace>\<lambda>y. invs'\<rbrace>"
unfolding sendSignal_def
apply (rule hoare_seq_ext[OF _ get_ntfn_sp'])
apply (case_tac "ntfnObj nTFN", simp_all)
prefer 3
apply (rename_tac list)
apply (case_tac list,
simp_all split del: if_split
add: setMessageInfo_def)[1]
apply (wp hoare_convert_imp [OF asUser_nosch]
hoare_convert_imp [OF setMRs_sch_act])+
apply (clarsimp simp:conj_comms)
apply (simp add: invs'_def valid_state'_def)
apply (wp valid_irq_node_lift sts_valid_objs' setThreadState_ct_not_inQ
sts_valid_queues [where st="Structures_H.thread_state.Running", simplified]
set_ntfn_valid_objs' cur_tcb_lift sts_st_tcb'
hoare_convert_imp [OF setNotification_nosch]
| simp split del: if_split)+
apply (intro conjI[rotated];
(solves \<open>clarsimp simp: invs'_def valid_state'_def valid_pspace'_def\<close>)?)
apply (clarsimp simp: invs'_def valid_state'_def split del: if_split)
apply (drule(1) ct_not_in_ntfnQueue, simp+)
apply clarsimp
apply (frule ko_at_valid_objs', clarsimp)
apply (simp add: projectKOs)
apply (clarsimp simp: valid_obj'_def valid_ntfn'_def
split: list.splits)
apply (clarsimp simp: invs'_def valid_state'_def)
apply (clarsimp simp: st_tcb_at_refs_of_rev' valid_idle'_def pred_tcb_at'_def
dest!: sym_refs_ko_atD' sym_refs_st_tcb_atD' sym_refs_obj_atD'
split: list.splits)
apply (clarsimp simp: invs'_def valid_state'_def valid_pspace'_def)
apply (frule(1) ko_at_valid_objs')
apply (simp add: projectKOs)
apply (clarsimp simp: valid_obj'_def valid_ntfn'_def
split: list.splits option.splits)
apply (clarsimp elim!: if_live_then_nonz_capE' simp:invs'_def valid_state'_def)
apply (drule(1) sym_refs_ko_atD')
apply (clarsimp elim!: ko_wp_at'_weakenE
intro!: refs_of_live')
apply (clarsimp split del: if_split)+
apply (frule ko_at_valid_objs', clarsimp)
apply (simp add: projectKOs)
apply (clarsimp simp: valid_obj'_def valid_ntfn'_def split del: if_split)
apply (frule invs_sym')
apply (drule(1) sym_refs_obj_atD')
apply (clarsimp split del: if_split cong: if_cong
simp: st_tcb_at_refs_of_rev' ep_redux_simps' ntfn_bound_refs'_def)
apply (frule st_tcb_at_state_refs_ofD')
apply (erule delta_sym_refs)
apply (fastforce simp: split: if_split_asm)
apply (fastforce simp: tcb_bound_refs'_def set_eq_subset symreftype_inverse'
split: if_split_asm)
apply (clarsimp simp:invs'_def)
apply (frule ko_at_valid_objs')
apply (clarsimp simp: valid_pspace'_def valid_state'_def)
apply (simp add: projectKOs)
apply (clarsimp simp: valid_obj'_def valid_ntfn'_def split del: if_split)
apply (clarsimp simp:invs'_def valid_state'_def valid_pspace'_def)
apply (frule(1) ko_at_valid_objs')
apply (simp add: projectKOs)
apply (clarsimp simp: valid_obj'_def valid_ntfn'_def
split: list.splits option.splits)
apply (case_tac "ntfnBoundTCB nTFN", simp_all)
apply (wp set_ntfn_minor_invs')
apply (fastforce simp: valid_ntfn'_def invs'_def valid_state'_def
elim!: obj_at'_weakenE
dest!: global'_no_ex_cap)
apply (wp add: hoare_convert_imp [OF asUser_nosch]
hoare_convert_imp [OF setMRs_sch_act]
setThreadState_nonqueued_state_update sts_st_tcb'
del: cancelIPC_simple)
apply (clarsimp | wp cancelIPC_ct')+
apply (wp set_ntfn_minor_invs' gts_wp' | clarsimp)+
apply (frule pred_tcb_at')
by (wp set_ntfn_minor_invs'
| rule conjI
| clarsimp elim!: st_tcb_ex_cap''
| fastforce simp: invs'_def valid_state'_def receiveBlocked_def projectKOs
valid_obj'_def valid_ntfn'_def
split: thread_state.splits
dest!: global'_no_ex_cap st_tcb_ex_cap'' ko_at_valid_objs'
| fastforce simp: receiveBlocked_def projectKOs pred_tcb_at'_def obj_at'_def
dest!: invs_rct_ct_activatable'
split: thread_state.splits)+
lemma rfk_corres:
"corres dc (tcb_at t and invs) (tcb_at' t and invs')
(reply_from_kernel t r) (replyFromKernel t r)"
apply (case_tac r)
apply (clarsimp simp: replyFromKernel_def reply_from_kernel_def
badge_register_def badgeRegister_def)
apply (rule corres_guard_imp)
apply (rule corres_split_eqr [OF _ lipcb_corres])
apply (rule corres_split [OF _ user_setreg_corres])
apply (rule corres_split_eqr [OF _ set_mrs_corres])
apply (rule set_mi_corres)
apply (wp hoare_case_option_wp hoare_valid_ipc_buffer_ptr_typ_at'
| clarsimp)+
done
lemma rfk_invs':
"\<lbrace>invs' and tcb_at' t\<rbrace> replyFromKernel t r \<lbrace>\<lambda>rv. invs'\<rbrace>"
apply (simp add: replyFromKernel_def)
apply (cases r)
apply (wp | clarsimp)+
done
crunch nosch[wp]: replyFromKernel "\<lambda>s. P (ksSchedulerAction s)"
lemma complete_signal_corres:
"corres dc (ntfn_at ntfnptr and tcb_at tcb and pspace_aligned and valid_objs
\<comment> \<open>and obj_at (\<lambda>ko. ko = Notification ntfn \<and> Ipc_A.isActive ntfn) ntfnptr\<close>)
(ntfn_at' ntfnptr and tcb_at' tcb and valid_pspace' and obj_at' isActive ntfnptr)
(complete_signal ntfnptr tcb) (completeSignal ntfnptr tcb)"
apply (simp add: complete_signal_def completeSignal_def)
apply (rule corres_guard_imp)
apply (rule_tac R'="\<lambda>ntfn. ntfn_at' ntfnptr and tcb_at' tcb and valid_pspace'
and valid_ntfn' ntfn and (\<lambda>_. isActive ntfn)"
in corres_split [OF _ get_ntfn_corres])
apply (rule corres_gen_asm2)
apply (case_tac "ntfn_obj rv")
apply (clarsimp simp: ntfn_relation_def isActive_def
split: ntfn.splits Structures_H.notification.splits)+
apply (rule corres_guard2_imp)
apply (simp add: badgeRegister_def badge_register_def)
apply (rule corres_split[OF set_ntfn_corres user_setreg_corres])
apply (clarsimp simp: ntfn_relation_def)
apply (wp set_simple_ko_valid_objs get_simple_ko_wp getNotification_wp | clarsimp simp: valid_ntfn'_def)+
apply (clarsimp simp: valid_pspace'_def)
apply (frule_tac P="(\<lambda>k. k = ntfn)" in obj_at_valid_objs', assumption)
apply (clarsimp simp: projectKOs valid_obj'_def valid_ntfn'_def obj_at'_def)
done
lemma do_nbrecv_failed_transfer_corres:
"corres dc (tcb_at thread)
(tcb_at' thread)
(do_nbrecv_failed_transfer thread)
(doNBRecvFailedTransfer thread)"
unfolding do_nbrecv_failed_transfer_def doNBRecvFailedTransfer_def
by (simp add: badgeRegister_def badge_register_def, rule user_setreg_corres)
lemma receive_ipc_corres:
assumes "is_ep_cap cap" and "cap_relation cap cap'"
shows "
corres dc (einvs and valid_sched and tcb_at thread and valid_cap cap and ex_nonz_cap_to thread
and cte_wp_at (\<lambda>c. c = cap.NullCap) (thread, tcb_cnode_index 3))
(invs' and tcb_at' thread and valid_cap' cap')
(receive_ipc thread cap isBlocking) (receiveIPC thread cap' isBlocking)"
apply (insert assms)
apply (simp add: receive_ipc_def receiveIPC_def
split del: if_split)
apply (case_tac cap, simp_all add: isEndpointCap_def)
apply (rename_tac word1 word2 right)
apply clarsimp
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ get_ep_corres])
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ gbn_corres])
apply (rule_tac r'="ntfn_relation" in corres_split)
apply (rule corres_if)
apply (clarsimp simp: ntfn_relation_def Ipc_A.isActive_def Endpoint_H.isActive_def
split: Structures_A.ntfn.splits Structures_H.notification.splits)
apply clarsimp
apply (rule complete_signal_corres)
apply (rule_tac P="einvs and valid_sched and tcb_at thread and
ep_at word1 and valid_ep ep and
obj_at (\<lambda>k. k = Endpoint ep) word1
and cte_wp_at (\<lambda>c. c = cap.NullCap) (thread, tcb_cnode_index 3)
and ex_nonz_cap_to thread" and
P'="invs' and tcb_at' thread and ep_at' word1 and
valid_ep' epa"
in corres_inst)
apply (case_tac ep)
\<comment> \<open>IdleEP\<close>
apply (simp add: ep_relation_def)
apply (rule corres_guard_imp)
apply (case_tac isBlocking; simp)
apply (rule corres_split [OF _ sts_corres])
apply (rule set_ep_corres)
apply (simp add: ep_relation_def)
apply simp
apply wp+
apply (rule corres_guard_imp, rule do_nbrecv_failed_transfer_corres, simp)
apply simp
apply (clarsimp simp add: invs_def valid_state_def valid_pspace_def
valid_tcb_state_def st_tcb_at_tcb_at)
apply auto[1]
\<comment> \<open>SendEP\<close>
apply (simp add: ep_relation_def)
apply (rename_tac list)
apply (rule_tac F="list \<noteq> []" in corres_req)
apply (clarsimp simp: valid_ep_def)
apply (case_tac list, simp_all split del: if_split)[1]
apply (rule corres_guard_imp)
apply (rule corres_split [OF _ set_ep_corres])
apply (rule corres_split [OF _ gts_corres])
apply (rule_tac
F="\<exists>data.
sender_state =
Structures_A.thread_state.BlockedOnSend word1 data"
in corres_gen_asm)
apply (clarsimp simp: isSend_def case_bool_If
case_option_If if3_fold
split del: if_split cong: if_cong)
apply (rule corres_split [OF _ dit_corres])
apply (simp split del: if_split cong: if_cong)
apply (fold dc_def)[1]
apply (rule_tac P="valid_objs and valid_mdb and valid_list
and valid_sched
and cur_tcb
and valid_reply_caps
and pspace_aligned and pspace_distinct
and st_tcb_at (Not \<circ> awaiting_reply) a
and st_tcb_at (Not \<circ> halted) a
and tcb_at thread and valid_reply_masters
and cte_wp_at (\<lambda>c. c = cap.NullCap)
(thread, tcb_cnode_index 3)"
and P'="tcb_at' a and tcb_at' thread and cur_tcb'
and Invariants_H.valid_queues
and valid_queues'
and valid_pspace'
and valid_objs'
and (\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s)"
in corres_guard_imp [OF corres_if])
apply (simp add: fault_rel_optionation_def)
apply (rule corres_if2 [OF _ setup_caller_corres sts_corres])
apply simp
apply simp
apply (rule corres_split [OF _ sts_corres])
apply (rule possibleSwitchTo_corres)
apply simp
apply (wp sts_st_tcb_at' set_thread_state_runnable_weak_valid_sched_action
| simp)+
apply (wp sts_st_tcb_at'_cases sts_valid_queues setThreadState_valid_queues'
setThreadState_st_tcb
| simp)+
apply (clarsimp simp: st_tcb_at_tcb_at st_tcb_def2 valid_sched_def
valid_sched_action_def)
apply (clarsimp split: if_split_asm)
apply (clarsimp | wp do_ipc_transfer_tcb_caps)+
apply (rule_tac Q="\<lambda>_ s. sch_act_wf (ksSchedulerAction s) s"
in hoare_post_imp, erule sch_act_wf_weak)
apply (wp sts_st_tcb' gts_st_tcb_at | simp)+
apply (case_tac lista, simp_all add: ep_relation_def)[1]
apply (simp cong: list.case_cong)
apply wp
apply simp
apply (wp weak_sch_act_wf_lift_linear setEndpoint_valid_mdb' set_ep_valid_objs')
apply (clarsimp split: list.split)
apply (clarsimp simp add: invs_def valid_state_def st_tcb_at_tcb_at)
apply (clarsimp simp add: valid_ep_def valid_pspace_def)
apply (drule(1) sym_refs_obj_atD[where P="\<lambda>ko. ko = Endpoint e" for e])
apply (fastforce simp: st_tcb_at_refs_of_rev elim: st_tcb_weakenE)
apply (auto simp: valid_ep'_def invs'_def valid_state'_def split: list.split)[1]
\<comment> \<open>RecvEP\<close>
apply (simp add: ep_relation_def)
apply (rule_tac corres_guard_imp)
apply (case_tac isBlocking; simp)
apply (rule corres_split [OF _ sts_corres])
apply (rule set_ep_corres)
apply (simp add: ep_relation_def)
apply simp
apply wp+
apply (rule corres_guard_imp, rule do_nbrecv_failed_transfer_corres, simp)
apply simp
apply (clarsimp simp: valid_tcb_state_def)
apply (clarsimp simp add: valid_tcb_state'_def)
apply (rule corres_option_split[rotated 2])
apply (rule get_ntfn_corres)
apply clarsimp
apply (rule corres_trivial, simp add: ntfn_relation_def default_notification_def
default_ntfn_def)
apply (wp get_simple_ko_wp[where f=Notification] getNotification_wp gbn_wp gbn_wp'
hoare_vcg_all_lift hoare_vcg_imp_lift hoare_vcg_if_lift
| wpc | simp add: ep_at_def2[symmetric, simplified] | clarsimp)+
apply (clarsimp simp: valid_cap_def invs_psp_aligned invs_valid_objs pred_tcb_at_def
valid_obj_def valid_tcb_def valid_bound_ntfn_def
dest!: invs_valid_objs
elim!: obj_at_valid_objsE
split: option.splits)
apply (auto simp: valid_cap'_def invs_valid_pspace' valid_obj'_def valid_tcb'_def
valid_bound_ntfn'_def obj_at'_def projectKOs pred_tcb_at'_def
dest!: invs_valid_objs' obj_at_valid_objs'
split: option.splits)
done
lemma receive_signal_corres:
"\<lbrakk> is_ntfn_cap cap; cap_relation cap cap' \<rbrakk> \<Longrightarrow>
corres dc (invs and st_tcb_at active thread and valid_cap cap and ex_nonz_cap_to thread)
(invs' and tcb_at' thread and valid_cap' cap')
(receive_signal thread cap isBlocking) (receiveSignal thread cap' isBlocking)"
apply (simp add: receive_signal_def receiveSignal_def)
apply (case_tac cap, simp_all add: isEndpointCap_def)
apply (rename_tac word1 word2 rights)
apply (rule corres_guard_imp)
apply (rule_tac R="\<lambda>rv. invs and tcb_at thread and st_tcb_at active thread and
ntfn_at word1 and ex_nonz_cap_to thread and
valid_ntfn rv and
obj_at (\<lambda>k. k = Notification rv) word1" and
R'="\<lambda>rv'. invs' and tcb_at' thread and ntfn_at' word1 and
valid_ntfn' rv'"
in corres_split [OF _ get_ntfn_corres])
apply clarsimp
apply (case_tac "ntfn_obj rv")
\<comment> \<open>IdleNtfn\<close>
apply (simp add: ntfn_relation_def)
apply (rule corres_guard_imp)
apply (case_tac isBlocking; simp)
apply (rule corres_split [OF _ sts_corres])
apply (rule set_ntfn_corres)
apply (simp add: ntfn_relation_def)
apply simp
apply wp+
apply (rule corres_guard_imp, rule do_nbrecv_failed_transfer_corres, simp+)
\<comment> \<open>WaitingNtfn\<close>
apply (simp add: ntfn_relation_def)
apply (rule corres_guard_imp)
apply (case_tac isBlocking; simp)
apply (rule corres_split[OF _ sts_corres])
apply (rule set_ntfn_corres)
apply (simp add: ntfn_relation_def)
apply simp
apply wp+
apply (rule corres_guard_imp)
apply (rule do_nbrecv_failed_transfer_corres, simp+)
\<comment> \<open>ActiveNtfn\<close>
apply (simp add: ntfn_relation_def)
apply (rule corres_guard_imp)
apply (simp add: badgeRegister_def badge_register_def)
apply (rule corres_split [OF _ user_setreg_corres])
apply (rule set_ntfn_corres)
apply (simp add: ntfn_relation_def)
apply wp+
apply (fastforce simp: invs_def valid_state_def valid_pspace_def
elim!: st_tcb_weakenE)
apply (clarsimp simp: invs'_def valid_state'_def valid_pspace'_def)
apply wp+
apply (clarsimp simp add: ntfn_at_def2 valid_cap_def st_tcb_at_tcb_at)
apply (clarsimp simp add: valid_cap'_def)
done
lemma tg_sp':
"\<lbrace>P\<rbrace> threadGet f p \<lbrace>\<lambda>t. obj_at' (\<lambda>t'. f t' = t) p and P\<rbrace>"
including no_pre
apply (simp add: threadGet_def)
apply wp
apply (rule hoare_strengthen_post)
apply (rule getObject_tcb_sp)
apply clarsimp
apply (erule obj_at'_weakenE)
apply simp
done
declare lookup_cap_valid' [wp]
lemma send_fault_ipc_corres:
"valid_fault f \<Longrightarrow> fr f f' \<Longrightarrow>
corres (fr \<oplus> dc)
(einvs and st_tcb_at active thread and ex_nonz_cap_to thread)
(invs' and sch_act_not thread and tcb_at' thread)
(send_fault_ipc thread f) (sendFaultIPC thread f')"
apply (simp add: send_fault_ipc_def sendFaultIPC_def
liftE_bindE Let_def)
apply (rule corres_guard_imp)
apply (rule corres_split [where r'="\<lambda>fh fh'. fh = to_bl fh'"])
apply simp
apply (rule corres_splitEE)
prefer 2
apply (rule corres_cap_fault)
apply (rule lookup_cap_corres, rule refl)
apply (rule_tac P="einvs and st_tcb_at active thread
and valid_cap handler_cap and ex_nonz_cap_to thread"
and P'="invs' and tcb_at' thread and sch_act_not thread
and valid_cap' handlerCap"
in corres_inst)
apply (case_tac handler_cap,
simp_all add: isCap_defs lookup_failure_map_def
case_bool_If If_rearrage
split del: if_split cong: if_cong)[1]
apply (rule corres_guard_imp)
apply (rule corres_if2 [OF refl])
apply (simp add: dc_def[symmetric])
apply (rule corres_split [OF send_ipc_corres threadset_corres], simp_all)[1]
apply (simp add: tcb_relation_def fault_rel_optionation_def exst_same_def)+
apply (wp thread_set_invs_trivial thread_set_no_change_tcb_state
thread_set_typ_at ep_at_typ_at ex_nonz_cap_to_pres
thread_set_cte_wp_at_trivial thread_set_not_state_valid_sched
| simp add: tcb_cap_cases_def)+
apply ((wp threadSet_invs_trivial threadSet_tcb'
| simp add: tcb_cte_cases_def
| wp (once) sch_act_sane_lift)+)[1]
apply (rule corres_trivial, simp add: lookup_failure_map_def)
apply (clarsimp simp: st_tcb_at_tcb_at split: if_split)
apply (simp add: valid_cap_def)
apply (clarsimp simp: valid_cap'_def inQ_def)
apply auto[1]
apply (clarsimp simp: lookup_failure_map_def)
apply wp+
apply (rule threadget_corres)
apply (simp add: tcb_relation_def)
apply wp+
apply (fastforce elim: st_tcb_at_tcb_at)
apply fastforce
done
lemma gets_the_noop_corres:
assumes P: "\<And>s. P s \<Longrightarrow> f s \<noteq> None"
shows "corres dc P P' (gets_the f) (return x)"
apply (clarsimp simp: corres_underlying_def gets_the_def
return_def gets_def bind_def get_def)
apply (clarsimp simp: assert_opt_def return_def dest!: P)
done
lemma hdf_corres:
"corres dc (tcb_at thread)
(tcb_at' thread and (\<lambda>s. weak_sch_act_wf (ksSchedulerAction s) s))
(handle_double_fault thread f ft)
(handleDoubleFault thread f' ft')"
apply (simp add: handle_double_fault_def handleDoubleFault_def)
apply (rule corres_guard_imp)
apply (subst bind_return [symmetric],
rule corres_split' [OF sts_corres])
apply simp
apply (rule corres_noop2)
apply (simp add: exs_valid_def return_def)
apply (rule hoare_eq_P)
apply wp
apply (rule asUser_inv)
apply (rule getRestartPC_inv)
apply (wp no_fail_getRestartPC)+
apply (wp|simp)+
done
crunch tcb' [wp]: sendFaultIPC "tcb_at' t" (wp: crunch_wps)
crunch typ_at'[wp]: receiveIPC "\<lambda>s. P (typ_at' T p s)"
(wp: crunch_wps)
lemmas receiveIPC_typ_ats[wp] = typ_at_lifts [OF receiveIPC_typ_at']
crunch typ_at'[wp]: receiveSignal "\<lambda>s. P (typ_at' T p s)"
(wp: crunch_wps)
lemmas receiveAIPC_typ_ats[wp] = typ_at_lifts [OF receiveSignal_typ_at']
declare cart_singleton_empty[simp]
declare cart_singleton_empty2[simp]
crunch aligned'[wp]: setupCallerCap "pspace_aligned'"
(wp: crunch_wps)
crunch distinct'[wp]: setupCallerCap "pspace_distinct'"
(wp: crunch_wps)
crunch cur_tcb[wp]: setupCallerCap "cur_tcb'"
(wp: crunch_wps)
lemma setupCallerCap_state_refs_of[wp]:
"\<lbrace>\<lambda>s. P ((state_refs_of' s) (sender := {r \<in> state_refs_of' s sender. snd r = TCBBound}))\<rbrace>
setupCallerCap sender rcvr grant
\<lbrace>\<lambda>rv s. P (state_refs_of' s)\<rbrace>"
apply (simp add: setupCallerCap_def getThreadCallerSlot_def
getThreadReplySlot_def)
apply (wp hoare_drop_imps)
apply (simp add: fun_upd_def cong: if_cong)
done
crunch sch_act_wf: setupCallerCap
"\<lambda>s. sch_act_wf (ksSchedulerAction s) s"
(wp: crunch_wps ssa_sch_act sts_sch_act rule: sch_act_wf_lift)
lemma setCTE_valid_queues[wp]:
"\<lbrace>Invariants_H.valid_queues\<rbrace> setCTE ptr val \<lbrace>\<lambda>rv. Invariants_H.valid_queues\<rbrace>"
by (wp valid_queues_lift setCTE_pred_tcb_at')
crunch vq[wp]: cteInsert "Invariants_H.valid_queues"
(wp: crunch_wps)
crunch vq[wp]: getThreadCallerSlot "Invariants_H.valid_queues"
(wp: crunch_wps)
crunch vq[wp]: getThreadReplySlot "Invariants_H.valid_queues"
(wp: crunch_wps)
lemma setupCallerCap_vq[wp]:
"\<lbrace>Invariants_H.valid_queues and (\<lambda>s. \<forall>p. send \<notin> set (ksReadyQueues s p))\<rbrace>
setupCallerCap send recv grant \<lbrace>\<lambda>_. Invariants_H.valid_queues\<rbrace>"
apply (simp add: setupCallerCap_def)
apply (wp crunch_wps sts_valid_queues)
apply (fastforce simp: valid_queues_def obj_at'_def inQ_def)
done
crunch vq'[wp]: setupCallerCap "valid_queues'"
(wp: crunch_wps)
lemma is_derived_ReplyCap' [simp]:
"\<And>m p g. is_derived' m p (capability.ReplyCap t False g) =
(\<lambda>c. \<exists> g. c = capability.ReplyCap t True g)"
apply (subst fun_eq_iff)
apply clarsimp
apply (case_tac x, simp_all add: is_derived'_def isCap_simps
badge_derived'_def
vsCapRef_def)
done
lemma unique_master_reply_cap':
"\<And>c t. isReplyCap c \<and> capReplyMaster c \<and> capTCBPtr c = t \<longleftrightarrow>
(\<exists>g . c = capability.ReplyCap t True g)"
by (fastforce simp: isCap_simps conj_comms)
lemma getSlotCap_cte_wp_at:
"\<lbrace>\<top>\<rbrace> getSlotCap sl \<lbrace>\<lambda>rv. cte_wp_at' (\<lambda>c. cteCap c = rv) sl\<rbrace>"
apply (simp add: getSlotCap_def)
apply (wp getCTE_wp)
apply (clarsimp simp: cte_wp_at_ctes_of)
done
crunch no_0_obj'[wp]: setThreadState no_0_obj'
lemma setupCallerCap_vp[wp]:
"\<lbrace>valid_pspace' and tcb_at' sender and tcb_at' rcvr\<rbrace>
setupCallerCap sender rcvr grant \<lbrace>\<lambda>rv. valid_pspace'\<rbrace>"
apply (simp add: valid_pspace'_def setupCallerCap_def getThreadCallerSlot_def
getThreadReplySlot_def locateSlot_conv getSlotCap_def)
apply (wp getCTE_wp)
apply (rule_tac Q="\<lambda>_. valid_pspace' and
tcb_at' sender and tcb_at' rcvr"
in hoare_post_imp)
apply (clarsimp simp: valid_cap'_def o_def cte_wp_at_ctes_of isCap_simps
valid_pspace'_def)
apply (frule(1) ctes_of_valid', simp add: valid_cap'_def capAligned_def)
apply clarsimp
apply (wp | simp add: valid_pspace'_def valid_tcb_state'_def)+
done
declare haskell_assert_inv[wp del]
lemma setupCallerCap_iflive[wp]:
"\<lbrace>if_live_then_nonz_cap' and ex_nonz_cap_to' sender\<rbrace>
setupCallerCap sender rcvr grant
\<lbrace>\<lambda>rv. if_live_then_nonz_cap'\<rbrace>"
unfolding setupCallerCap_def getThreadCallerSlot_def
getThreadReplySlot_def locateSlot_conv
by (wp getSlotCap_cte_wp_at
| simp add: unique_master_reply_cap'
| strengthen eq_imp_strg
| wp (once) hoare_drop_imp[where f="getCTE rs" for rs])+
lemma setupCallerCap_ifunsafe[wp]:
"\<lbrace>if_unsafe_then_cap' and valid_objs' and
ex_nonz_cap_to' rcvr and tcb_at' rcvr\<rbrace>
setupCallerCap sender rcvr grant
\<lbrace>\<lambda>rv. if_unsafe_then_cap'\<rbrace>"
unfolding setupCallerCap_def getThreadCallerSlot_def
getThreadReplySlot_def locateSlot_conv
apply (wp getSlotCap_cte_wp_at
| simp add: unique_master_reply_cap' | strengthen eq_imp_strg
| wp (once) hoare_drop_imp[where f="getCTE rs" for rs])+
apply (rule_tac Q="\<lambda>rv. valid_objs' and tcb_at' rcvr and ex_nonz_cap_to' rcvr"
in hoare_post_imp)
apply (clarsimp simp: ex_nonz_tcb_cte_caps' tcbCallerSlot_def
objBits_def objBitsKO_def dom_def cte_level_bits_def)
apply (wp sts_valid_objs' | simp)+
apply (clarsimp simp: valid_tcb_state'_def)+
done
lemma setupCallerCap_global_refs'[wp]:
"\<lbrace>valid_global_refs'\<rbrace>
setupCallerCap sender rcvr grant
\<lbrace>\<lambda>rv. valid_global_refs'\<rbrace>"
unfolding setupCallerCap_def getThreadCallerSlot_def
getThreadReplySlot_def locateSlot_conv
apply (wp getSlotCap_cte_wp_at
| simp add: o_def unique_master_reply_cap'
| strengthen eq_imp_strg
| wp (once) getCTE_wp | clarsimp simp: cte_wp_at_ctes_of)+
(* at setThreadState *)
apply (rule_tac Q="\<lambda>_. valid_global_refs'" in hoare_post_imp, wpsimp+)
done
crunch valid_arch'[wp]: setupCallerCap "valid_arch_state'"
(wp: hoare_drop_imps)
crunch typ'[wp]: setupCallerCap "\<lambda>s. P (typ_at' T p s)"
crunch irq_node'[wp]: setupCallerCap "\<lambda>s. P (irq_node' s)"
(wp: hoare_drop_imps)
lemma setupCallerCap_irq_handlers'[wp]:
"\<lbrace>valid_irq_handlers'\<rbrace>
setupCallerCap sender rcvr grant
\<lbrace>\<lambda>rv. valid_irq_handlers'\<rbrace>"
unfolding setupCallerCap_def getThreadCallerSlot_def
getThreadReplySlot_def locateSlot_conv
by (wp hoare_drop_imps | simp)+
lemma setupCallerCap_ioports'[wp]:
"\<lbrace>valid_ioports'\<rbrace>
setupCallerCap sender rcvr grant
\<lbrace>\<lambda>rv. valid_ioports'\<rbrace>"
unfolding setupCallerCap_def getThreadCallerSlot_def
getThreadReplySlot_def locateSlot_conv
by (wp hoare_drop_imps | simp add: isCap_simps)+
lemma cteInsert_cap_to':
"\<lbrace>ex_nonz_cap_to' p and cte_wp_at' (\<lambda>c. cteCap c = NullCap) dest\<rbrace>
cteInsert cap src dest
\<lbrace>\<lambda>rv. ex_nonz_cap_to' p\<rbrace>"
apply (simp add: cteInsert_def ex_nonz_cap_to'_def
updateCap_def setUntypedCapAsFull_def
split del: if_split)
apply (rule hoare_pre, rule hoare_vcg_ex_lift)
apply (wp updateMDB_weak_cte_wp_at
setCTE_weak_cte_wp_at
| simp
| rule hoare_drop_imps)+
apply (wp getCTE_wp)
apply clarsimp
apply (rule_tac x=cref in exI)
apply (rule conjI)
apply (clarsimp simp: cte_wp_at_ctes_of)+
done
crunch cap_to'[wp]: setExtraBadge "ex_nonz_cap_to' p"
crunch cap_to'[wp]: doIPCTransfer "ex_nonz_cap_to' p"
(ignore: transferCapsToSlots
wp: crunch_wps transferCapsToSlots_pres2 cteInsert_cap_to' hoare_vcg_const_Ball_lift
simp: zipWithM_x_mapM ball_conj_distrib)
lemma st_tcb_idle':
"\<lbrakk>valid_idle' s; st_tcb_at' P t s\<rbrakk> \<Longrightarrow>
(t = ksIdleThread s) \<longrightarrow> P IdleThreadState"
by (clarsimp simp: valid_idle'_def pred_tcb_at'_def obj_at'_def)
crunch idle'[wp]: getThreadCallerSlot "valid_idle'"
crunch idle'[wp]: getThreadReplySlot "valid_idle'"
crunch it[wp]: setupCallerCap "\<lambda>s. P (ksIdleThread s)"
(simp: updateObject_cte_inv wp: crunch_wps)
lemma setupCallerCap_idle'[wp]:
"\<lbrace>valid_idle' and valid_pspace' and
(\<lambda>s. st \<noteq> ksIdleThread s \<and> rt \<noteq> ksIdleThread s)\<rbrace>
setupCallerCap st rt gr
\<lbrace>\<lambda>_. valid_idle'\<rbrace>"
by (simp add: setupCallerCap_def capRange_def | wp hoare_drop_imps)+
crunch idle'[wp]: doIPCTransfer "valid_idle'"
(wp: crunch_wps simp: crunch_simps ignore: transferCapsToSlots)
crunch it[wp]: setExtraBadge "\<lambda>s. P (ksIdleThread s)"
crunch it[wp]: receiveIPC "\<lambda>s. P (ksIdleThread s)"
(ignore: transferCapsToSlots
wp: transferCapsToSlots_pres2 crunch_wps hoare_vcg_const_Ball_lift
simp: crunch_simps ball_conj_distrib)
crunch irq_states' [wp]: setupCallerCap valid_irq_states'
(wp: crunch_wps)
crunch irqs_masked' [wp]: receiveIPC "irqs_masked'"
(wp: crunch_wps rule: irqs_masked_lift)
crunch ct_not_inQ[wp]: getThreadCallerSlot "ct_not_inQ"
crunch ct_not_inQ[wp]: getThreadReplySlot "ct_not_inQ"
lemma setupCallerCap_ct_not_inQ[wp]:
"\<lbrace>ct_not_inQ\<rbrace> setupCallerCap sender receiver grant \<lbrace>\<lambda>_. ct_not_inQ\<rbrace>"
apply (simp add: setupCallerCap_def)
apply (wp hoare_drop_imp setThreadState_ct_not_inQ)
done
crunch ksQ'[wp]: copyMRs "\<lambda>s. P (ksReadyQueues s)"
(wp: mapM_wp' hoare_drop_imps simp: crunch_simps)
crunch ksQ[wp]: doIPCTransfer "\<lambda>s. P (ksReadyQueues s)"
(wp: hoare_drop_imps hoare_vcg_split_case_option
mapM_wp'
simp: split_def zipWithM_x_mapM)
crunch ct'[wp]: doIPCTransfer "\<lambda>s. P (ksCurThread s)"
(wp: hoare_drop_imps hoare_vcg_split_case_option
mapM_wp'
simp: split_def zipWithM_x_mapM)
lemma asUser_ct_not_inQ[wp]:
"\<lbrace>ct_not_inQ\<rbrace> asUser t m \<lbrace>\<lambda>rv. ct_not_inQ\<rbrace>"
apply (simp add: asUser_def split_def)
apply (wp hoare_drop_imps threadSet_not_inQ | simp)+
done
crunch ct_not_inQ[wp]: copyMRs "ct_not_inQ"
(wp: mapM_wp' hoare_drop_imps simp: crunch_simps)
crunch ct_not_inQ[wp]: doIPCTransfer "ct_not_inQ"
(ignore: getRestartPC setRegister transferCapsToSlots
wp: hoare_drop_imps hoare_vcg_split_case_option
mapM_wp'
simp: split_def zipWithM_x_mapM)
lemma ntfn_q_refs_no_bound_refs': "rf : ntfn_q_refs_of' (ntfnObj ob) \<Longrightarrow> rf ~: ntfn_bound_refs' (ntfnBoundTCB ob')"
by (auto simp add: ntfn_q_refs_of'_def ntfn_bound_refs'_def
split: Structures_H.ntfn.splits)
lemma completeSignal_invs:
"\<lbrace>invs' and tcb_at' tcb\<rbrace>
completeSignal ntfnptr tcb
\<lbrace>\<lambda>_. invs'\<rbrace>"
apply (simp add: completeSignal_def)
apply (rule hoare_seq_ext[OF _ get_ntfn_sp'])
apply (rule hoare_pre)
apply (wp set_ntfn_minor_invs' | wpc | simp)+
apply (rule_tac Q="\<lambda>_ s. (state_refs_of' s ntfnptr = ntfn_bound_refs' (ntfnBoundTCB ntfn))
\<and> ntfn_at' ntfnptr s
\<and> valid_ntfn' (ntfnObj_update (\<lambda>_. Structures_H.ntfn.IdleNtfn) ntfn) s
\<and> ((\<exists>y. ntfnBoundTCB ntfn = Some y) \<longrightarrow> ex_nonz_cap_to' ntfnptr s)
\<and> ntfnptr \<noteq> ksIdleThread s"
in hoare_strengthen_post)
apply ((wp hoare_vcg_ex_lift static_imp_wp | wpc | simp add: valid_ntfn'_def)+)[1]
apply (clarsimp simp: obj_at'_def state_refs_of'_def typ_at'_def ko_wp_at'_def projectKOs split: option.splits)
apply (blast dest: ntfn_q_refs_no_bound_refs')
apply wp
apply (subgoal_tac "valid_ntfn' ntfn s")
apply (subgoal_tac "ntfnptr \<noteq> ksIdleThread s")
apply (fastforce simp: valid_ntfn'_def valid_bound_tcb'_def projectKOs ko_at_state_refs_ofD'
elim: obj_at'_weakenE
if_live_then_nonz_capD'[OF invs_iflive'
obj_at'_real_def[THEN meta_eq_to_obj_eq,
THEN iffD1]])
apply (fastforce simp: valid_idle'_def pred_tcb_at'_def obj_at'_def projectKOs
dest!: invs_valid_idle')
apply (fastforce dest: invs_valid_objs' ko_at_valid_objs'
simp: valid_obj'_def projectKOs)[1]
done
lemma setupCallerCap_urz[wp]:
"\<lbrace>untyped_ranges_zero' and valid_pspace' and tcb_at' sender\<rbrace>
setupCallerCap sender t g \<lbrace>\<lambda>rv. untyped_ranges_zero'\<rbrace>"
apply (simp add: setupCallerCap_def getSlotCap_def
getThreadCallerSlot_def getThreadReplySlot_def
locateSlot_conv)
apply (wp getCTE_wp')
apply (rule_tac Q="\<lambda>_. untyped_ranges_zero' and valid_mdb' and valid_objs'" in hoare_post_imp)
apply (clarsimp simp: cte_wp_at_ctes_of cteCaps_of_def untyped_derived_eq_def
isCap_simps)
apply (wp sts_valid_pspace_hangers)
apply (clarsimp simp: valid_tcb_state'_def)
done
lemmas threadSet_urz = untyped_ranges_zero_lift[where f="cteCaps_of", OF _ threadSet_cteCaps_of]
crunch urz[wp]: doIPCTransfer "untyped_ranges_zero'"
(ignore: threadSet wp: threadSet_urz crunch_wps simp: zipWithM_x_mapM)
crunch gsUntypedZeroRanges[wp]: receiveIPC "\<lambda>s. P (gsUntypedZeroRanges s)"
(wp: crunch_wps transferCapsToSlots_pres1 simp: zipWithM_x_mapM ignore: constOnFailure)
crunch ctes_of[wp]: possibleSwitchTo "\<lambda>s. P (ctes_of s)"
(wp: crunch_wps ignore: constOnFailure)
lemmas possibleSwitchToTo_cteCaps_of[wp]
= cteCaps_of_ctes_of_lift[OF possibleSwitchTo_ctes_of]
crunches possibleSwitchTo
for ksArch[wp]: "\<lambda>s. P (ksArchState s)"
and ioports'[wp]: valid_ioports'
(wp: valid_ioports_lift' possibleSwitchTo_ctes_of crunch_wps ignore: constOnFailure)
(* t = ksCurThread s *)
lemma ri_invs' [wp]:
"\<lbrace>invs' and sch_act_not t
and ct_in_state' simple'
and st_tcb_at' simple' t
and (\<lambda>s. \<forall>p. t \<notin> set (ksReadyQueues s p))
and ex_nonz_cap_to' t
and (\<lambda>s. \<forall>r \<in> zobj_refs' cap. ex_nonz_cap_to' r s)\<rbrace>
receiveIPC t cap isBlocking
\<lbrace>\<lambda>_. invs'\<rbrace>" (is "\<lbrace>?pre\<rbrace> _ \<lbrace>_\<rbrace>")
apply (clarsimp simp: receiveIPC_def)
apply (rule hoare_seq_ext [OF _ get_ep_sp'])
apply (rule hoare_seq_ext [OF _ gbn_sp'])
apply (rule hoare_seq_ext)
(* set up precondition for old proof *)
apply (rule_tac R="ko_at' ep (capEPPtr cap) and ?pre" in hoare_vcg_if_split)
apply (wp completeSignal_invs)
apply (case_tac ep)
\<comment> \<open>endpoint = RecvEP\<close>
apply (simp add: invs'_def valid_state'_def)
apply (rule hoare_pre, wpc, wp valid_irq_node_lift)
apply (simp add: valid_ep'_def)
apply (wp sts_sch_act' hoare_vcg_const_Ball_lift valid_irq_node_lift
sts_valid_queues setThreadState_ct_not_inQ
asUser_urz
| simp add: doNBRecvFailedTransfer_def cteCaps_of_def)+
apply (clarsimp simp: valid_tcb_state'_def pred_tcb_at' o_def)
apply (rule conjI, clarsimp elim!: obj_at'_weakenE)
apply (frule obj_at_valid_objs')
apply (clarsimp simp: valid_pspace'_def)
apply (drule(1) sym_refs_ko_atD')
apply (drule simple_st_tcb_at_state_refs_ofD')
apply (drule bound_tcb_at_state_refs_ofD')
apply (clarsimp simp: st_tcb_at_refs_of_rev' valid_ep'_def
valid_obj'_def projectKOs tcb_bound_refs'_def
dest!: isCapDs)
apply (rule conjI, clarsimp)
apply (drule (1) bspec)
apply (clarsimp dest!: st_tcb_at_state_refs_ofD')
apply (clarsimp simp: set_eq_subset)
apply (rule conjI, erule delta_sym_refs)
apply (clarsimp split: if_split_asm)
apply ((case_tac tp; fastforce elim: nonempty_cross_distinct_singleton_elim)+)[2]
apply (clarsimp split: if_split_asm)
apply (fastforce simp: valid_pspace'_def global'_no_ex_cap idle'_not_queued)
\<comment> \<open>endpoint = IdleEP\<close>
apply (simp add: invs'_def valid_state'_def)
apply (rule hoare_pre, wpc, wp valid_irq_node_lift)
apply (simp add: valid_ep'_def)
apply (wp sts_sch_act' valid_irq_node_lift
sts_valid_queues setThreadState_ct_not_inQ
asUser_urz
| simp add: doNBRecvFailedTransfer_def cteCaps_of_def)+
apply (clarsimp simp: pred_tcb_at' valid_tcb_state'_def o_def)
apply (rule conjI, clarsimp elim!: obj_at'_weakenE)
apply (subgoal_tac "t \<noteq> capEPPtr cap")
apply (drule simple_st_tcb_at_state_refs_ofD')
apply (drule ko_at_state_refs_ofD')
apply (drule bound_tcb_at_state_refs_ofD')
apply (clarsimp dest!: isCapDs)
apply (rule conjI, erule delta_sym_refs)
apply (clarsimp split: if_split_asm)
apply (clarsimp simp: tcb_bound_refs'_def
dest: symreftype_inverse'
split: if_split_asm)
apply (fastforce simp: global'_no_ex_cap)
apply (clarsimp simp: obj_at'_def pred_tcb_at'_def projectKOs)
\<comment> \<open>endpoint = SendEP\<close>
apply (simp add: invs'_def valid_state'_def)
apply (rename_tac list)
apply (case_tac list, simp_all split del: if_split)
apply (rename_tac sender queue)
apply (rule hoare_pre)
apply (wp valid_irq_node_lift hoare_drop_imps setEndpoint_valid_mdb'
set_ep_valid_objs' sts_st_tcb' sts_sch_act' sts_valid_queues
setThreadState_ct_not_inQ possibleSwitchTo_valid_queues
possibleSwitchTo_valid_queues'
possibleSwitchTo_ct_not_inQ hoare_vcg_all_lift
setEndpoint_ksQ setEndpoint_ct'
| simp add: valid_tcb_state'_def case_bool_If
case_option_If
split del: if_split cong: if_cong
| wp (once) sch_act_sane_lift hoare_vcg_conj_lift hoare_vcg_all_lift
untyped_ranges_zero_lift)+
apply (clarsimp split del: if_split simp: pred_tcb_at')
apply (frule obj_at_valid_objs')
apply (clarsimp simp: valid_pspace'_def)
apply (frule(1) ct_not_in_epQueue, clarsimp, clarsimp)
apply (drule(1) sym_refs_ko_atD')
apply (drule simple_st_tcb_at_state_refs_ofD')
apply (clarsimp simp: projectKOs valid_obj'_def valid_ep'_def
st_tcb_at_refs_of_rev' conj_ac
split del: if_split
cong: if_cong)
apply (frule_tac t=sender in valid_queues_not_runnable'_not_ksQ)
apply (erule pred_tcb'_weakenE, clarsimp)
apply (subgoal_tac "sch_act_not sender s")
prefer 2
apply (clarsimp simp: pred_tcb_at'_def obj_at'_def)
apply (drule st_tcb_at_state_refs_ofD')
apply (simp only: conj_ac(1, 2)[where Q="sym_refs R" for R])
apply (subgoal_tac "distinct (ksIdleThread s # capEPPtr cap # t # sender # queue)")
apply (rule conjI)
apply (clarsimp simp: ep_redux_simps' cong: if_cong)
apply (erule delta_sym_refs)
apply (clarsimp split: if_split_asm)
apply (fastforce simp: tcb_bound_refs'_def
dest: symreftype_inverse'
split: if_split_asm)
apply (clarsimp simp: singleton_tuple_cartesian split: list.split
| rule conjI | drule(1) bspec
| drule st_tcb_at_state_refs_ofD' bound_tcb_at_state_refs_ofD'
| clarsimp elim!: if_live_state_refsE)+
apply (case_tac cap, simp_all add: isEndpointCap_def)
apply (clarsimp simp: global'_no_ex_cap)
apply (rule conjI
| clarsimp simp: singleton_tuple_cartesian split: list.split
| clarsimp elim!: if_live_state_refsE
| clarsimp simp: global'_no_ex_cap idle'_not_queued' idle'_no_refs tcb_bound_refs'_def
| drule(1) bspec | drule st_tcb_at_state_refs_ofD'
| clarsimp simp: set_eq_subset dest!: bound_tcb_at_state_refs_ofD' )+
apply (rule hoare_pre)
apply (wp getNotification_wp | wpc | clarsimp)+
done
(* t = ksCurThread s *)
lemma rai_invs'[wp]:
"\<lbrace>invs' and sch_act_not t
and st_tcb_at' simple' t
and (\<lambda>s. \<forall>p. t \<notin> set (ksReadyQueues s p))
and ex_nonz_cap_to' t
and (\<lambda>s. \<forall>r \<in> zobj_refs' cap. ex_nonz_cap_to' r s)
and (\<lambda>s. \<exists>ntfnptr. isNotificationCap cap
\<and> capNtfnPtr cap = ntfnptr
\<and> obj_at' (\<lambda>ko. ntfnBoundTCB ko = None \<or> ntfnBoundTCB ko = Some t)
ntfnptr s)\<rbrace>
receiveSignal t cap isBlocking
\<lbrace>\<lambda>_. invs'\<rbrace>"
apply (simp add: receiveSignal_def)
apply (rule hoare_seq_ext [OF _ get_ntfn_sp'])
apply (rename_tac ep)
apply (case_tac "ntfnObj ep")
\<comment> \<open>ep = IdleNtfn\<close>
apply (simp add: invs'_def valid_state'_def)
apply (rule hoare_pre)
apply (wp valid_irq_node_lift sts_sch_act' typ_at_lifts
sts_valid_queues setThreadState_ct_not_inQ
asUser_urz
| simp add: valid_ntfn'_def doNBRecvFailedTransfer_def | wpc)+
apply (clarsimp simp: pred_tcb_at' valid_tcb_state'_def)
apply (rule conjI, clarsimp elim!: obj_at'_weakenE)
apply (subgoal_tac "capNtfnPtr cap \<noteq> t")
apply (frule valid_pspace_valid_objs')
apply (frule (1) ko_at_valid_objs')
apply (clarsimp simp: projectKOs)
apply (clarsimp simp: valid_obj'_def valid_ntfn'_def)
apply (rule conjI, clarsimp simp: obj_at'_def split: option.split)
apply (drule simple_st_tcb_at_state_refs_ofD'
ko_at_state_refs_ofD' bound_tcb_at_state_refs_ofD')+
apply (clarsimp dest!: isCapDs)
apply (rule conjI, erule delta_sym_refs)
apply (clarsimp split: if_split_asm)
apply (fastforce simp: tcb_bound_refs'_def symreftype_inverse'
split: if_split_asm)
apply (clarsimp dest!: global'_no_ex_cap)
apply (clarsimp simp: pred_tcb_at'_def obj_at'_def projectKOs)
\<comment> \<open>ep = ActiveNtfn\<close>
apply (simp add: invs'_def valid_state'_def)
apply (rule hoare_pre)
apply (wp valid_irq_node_lift sts_valid_objs' typ_at_lifts static_imp_wp
asUser_urz
| simp add: valid_ntfn'_def)+
apply (clarsimp simp: pred_tcb_at' valid_pspace'_def)
apply (frule (1) ko_at_valid_objs')
apply (clarsimp simp: projectKOs)
apply (clarsimp simp: valid_obj'_def valid_ntfn'_def isCap_simps)
apply (drule simple_st_tcb_at_state_refs_ofD'
ko_at_state_refs_ofD')+
apply (erule delta_sym_refs)
apply (clarsimp split: if_split_asm simp: global'_no_ex_cap)+
\<comment> \<open>ep = WaitingNtfn\<close>
apply (simp add: invs'_def valid_state'_def)
apply (rule hoare_pre)
apply (wp hoare_vcg_const_Ball_lift valid_irq_node_lift sts_sch_act'
sts_valid_queues setThreadState_ct_not_inQ typ_at_lifts
asUser_urz
| simp add: valid_ntfn'_def doNBRecvFailedTransfer_def | wpc)+
apply (clarsimp simp: valid_tcb_state'_def)
apply (frule_tac t=t in not_in_ntfnQueue)
apply (simp)
apply (simp)
apply (erule pred_tcb'_weakenE, clarsimp)
apply (frule ko_at_valid_objs')
apply (clarsimp simp: valid_pspace'_def)
apply (simp add: projectKOs)
apply (clarsimp simp: valid_obj'_def)
apply (clarsimp simp: valid_ntfn'_def pred_tcb_at')
apply (rule conjI, clarsimp elim!: obj_at'_weakenE)
apply (rule conjI, clarsimp simp: obj_at'_def split: option.split)
apply (drule(1) sym_refs_ko_atD')
apply (drule simple_st_tcb_at_state_refs_ofD')
apply (drule bound_tcb_at_state_refs_ofD')
apply (clarsimp simp: st_tcb_at_refs_of_rev'
dest!: isCapDs)
apply (rule conjI, erule delta_sym_refs)
apply (clarsimp split: if_split_asm)
apply (rename_tac list one two three four five six seven eight nine)
apply (subgoal_tac "set list \<times> {NTFNSignal} \<noteq> {}")
apply safe[1]
apply (auto simp: symreftype_inverse' ntfn_bound_refs'_def tcb_bound_refs'_def)[5]
apply (fastforce simp: tcb_bound_refs'_def
split: if_split_asm)
apply (clarsimp dest!: global'_no_ex_cap)
done
lemma getCTE_cap_to_refs[wp]:
"\<lbrace>\<top>\<rbrace> getCTE p \<lbrace>\<lambda>rv s. \<forall>r\<in>zobj_refs' (cteCap rv). ex_nonz_cap_to' r s\<rbrace>"
apply (rule hoare_strengthen_post [OF getCTE_sp])
apply (clarsimp simp: ex_nonz_cap_to'_def)
apply (fastforce elim: cte_wp_at_weakenE')
done
lemma lookupCap_cap_to_refs[wp]:
"\<lbrace>\<top>\<rbrace> lookupCap t cref \<lbrace>\<lambda>rv s. \<forall>r\<in>zobj_refs' rv. ex_nonz_cap_to' r s\<rbrace>,-"
apply (simp add: lookupCap_def lookupCapAndSlot_def split_def
getSlotCap_def)
apply (wp | simp)+
done
crunch valid_objs'[wp]: setVMRoot valid_objs'
(wp: crunch_wps simp: crunch_simps)
lemma arch_stt_objs' [wp]:
"\<lbrace>valid_objs'\<rbrace> Arch.switchToThread t \<lbrace>\<lambda>rv. valid_objs'\<rbrace>"
apply (simp add: X64_H.switchToThread_def)
apply wp
done
declare zipWithM_x_mapM [simp]
lemma cteInsert_invs_bits[wp]:
"\<lbrace>\<lambda>s. sch_act_wf (ksSchedulerAction s) s\<rbrace>
cteInsert a b c
\<lbrace>\<lambda>rv s. sch_act_wf (ksSchedulerAction s) s\<rbrace>"
"\<lbrace>Invariants_H.valid_queues\<rbrace> cteInsert a b c \<lbrace>\<lambda>rv. Invariants_H.valid_queues\<rbrace>"
"\<lbrace>cur_tcb'\<rbrace> cteInsert a b c \<lbrace>\<lambda>rv. cur_tcb'\<rbrace>"
"\<lbrace>\<lambda>s. P (state_refs_of' s)\<rbrace>
cteInsert a b c
\<lbrace>\<lambda>rv s. P (state_refs_of' s)\<rbrace>"
apply (wp sch_act_wf_lift valid_queues_lift
cur_tcb_lift tcb_in_cur_domain'_lift)+
done
lemma possibleSwitchTo_sch_act_not:
"\<lbrace>sch_act_not t' and K (t \<noteq> t')\<rbrace> possibleSwitchTo t \<lbrace>\<lambda>rv. sch_act_not t'\<rbrace>"
apply (simp add: possibleSwitchTo_def setSchedulerAction_def curDomain_def)
apply (wp hoare_drop_imps | wpc | simp)+
done
crunch vms'[wp]: possibleSwitchTo valid_machine_state'
crunch pspace_domain_valid[wp]: possibleSwitchTo pspace_domain_valid
crunch ct_idle_or_in_cur_domain'[wp]: possibleSwitchTo ct_idle_or_in_cur_domain'
crunch ct'[wp]: possibleSwitchTo "\<lambda>s. P (ksCurThread s)"
crunch it[wp]: possibleSwitchTo "\<lambda>s. P (ksIdleThread s)"
crunch irqs_masked'[wp]: possibleSwitchTo "irqs_masked'"
crunch urz[wp]: possibleSwitchTo "untyped_ranges_zero'"
(simp: crunch_simps unless_def wp: crunch_wps)
lemma si_invs'[wp]:
"\<lbrace>invs' and st_tcb_at' simple' t
and (\<lambda>s. \<forall>p. t \<notin> set (ksReadyQueues s p))
and sch_act_not t
and ex_nonz_cap_to' ep and ex_nonz_cap_to' t\<rbrace>
sendIPC bl call ba cg cgr t ep
\<lbrace>\<lambda>rv. invs'\<rbrace>"
supply if_split[split del]
apply (simp add: sendIPC_def split del: if_split)
apply (rule hoare_seq_ext [OF _ get_ep_sp'])
apply (case_tac epa)
\<comment> \<open>epa = RecvEP\<close>
apply simp
apply (rename_tac list)
apply (case_tac list)
apply simp
apply (simp split del: if_split add: invs'_def valid_state'_def)
apply (rule hoare_pre)
apply (rule_tac P="a\<noteq>t" in hoare_gen_asm)
apply (wp valid_irq_node_lift
sts_valid_objs' set_ep_valid_objs' setEndpoint_valid_mdb' sts_st_tcb' sts_sch_act'
possibleSwitchTo_sch_act_not sts_valid_queues setThreadState_ct_not_inQ
possibleSwitchTo_ksQ' possibleSwitchTo_ct_not_inQ hoare_vcg_all_lift sts_ksQ'
hoare_convert_imp [OF doIPCTransfer_sch_act doIPCTransfer_ct']
hoare_convert_imp [OF setEndpoint_nosch setEndpoint_ct']
hoare_drop_imp [where f="threadGet tcbFault t"]
| rule_tac f="getThreadState a" in hoare_drop_imp
| wp (once) hoare_drop_imp[where R="\<lambda>_ _. call"]
hoare_drop_imp[where R="\<lambda>_ _. \<not> call"]
hoare_drop_imp[where R="\<lambda>_ _. cg"]
| simp add: valid_tcb_state'_def case_bool_If
case_option_If
cong: if_cong
split del: if_split
| wp (once) sch_act_sane_lift tcb_in_cur_domain'_lift hoare_vcg_const_imp_lift)+
apply (clarsimp simp: pred_tcb_at' cong: conj_cong imp_cong
split del: if_split)
apply (frule obj_at_valid_objs', clarsimp)
apply (frule(1) sym_refs_ko_atD')
apply (clarsimp simp: projectKOs valid_obj'_def valid_ep'_def
st_tcb_at_refs_of_rev' pred_tcb_at'
conj_comms fun_upd_def[symmetric]
split del: if_split)
apply (frule pred_tcb_at')
apply (drule simple_st_tcb_at_state_refs_ofD' st_tcb_at_state_refs_ofD')+
apply (clarsimp simp: valid_pspace'_splits)
apply (subst fun_upd_idem[where x=t])
apply (clarsimp split: if_split)
apply (rule conjI, clarsimp simp: obj_at'_def projectKOs)
apply (drule bound_tcb_at_state_refs_ofD')
apply (fastforce simp: tcb_bound_refs'_def)
apply (subgoal_tac "ex_nonz_cap_to' a s")
prefer 2
apply (clarsimp elim!: if_live_state_refsE)
apply clarsimp
apply (rule conjI)
apply (drule bound_tcb_at_state_refs_ofD')
apply (fastforce simp: tcb_bound_refs'_def set_eq_subset)
apply (clarsimp simp: conj_ac)
apply (rule conjI, clarsimp simp: idle'_no_refs)
apply (rule conjI, clarsimp simp: global'_no_ex_cap)
apply (rule conjI)
apply (rule impI)
apply (frule(1) ct_not_in_epQueue, clarsimp, clarsimp)
apply (clarsimp)
apply (simp add: ep_redux_simps')
apply (rule conjI, clarsimp split: if_split)
apply (rule conjI, fastforce simp: tcb_bound_refs'_def set_eq_subset)
apply (clarsimp, erule delta_sym_refs;
solves\<open>auto simp: symreftype_inverse' tcb_bound_refs'_def split: if_split_asm\<close>)
apply (solves\<open>clarsimp split: list.splits\<close>)
\<comment> \<open>epa = IdleEP\<close>
apply (cases bl)
apply (simp add: invs'_def valid_state'_def)
apply (rule hoare_pre, wp valid_irq_node_lift)
apply (simp add: valid_ep'_def)
apply (wp valid_irq_node_lift sts_sch_act' sts_valid_queues
setThreadState_ct_not_inQ)
apply (clarsimp simp: valid_tcb_state'_def pred_tcb_at')
apply (rule conjI, clarsimp elim!: obj_at'_weakenE)
apply (subgoal_tac "ep \<noteq> t")
apply (drule simple_st_tcb_at_state_refs_ofD' ko_at_state_refs_ofD'
bound_tcb_at_state_refs_ofD')+
apply (rule conjI, erule delta_sym_refs)
apply (auto simp: tcb_bound_refs'_def symreftype_inverse'
split: if_split_asm)[2]
apply (fastforce simp: global'_no_ex_cap)
apply (clarsimp simp: pred_tcb_at'_def obj_at'_def projectKOs)
apply simp
apply wp
apply simp
\<comment> \<open>epa = SendEP\<close>
apply (cases bl)
apply (simp add: invs'_def valid_state'_def)
apply (rule hoare_pre, wp valid_irq_node_lift)
apply (simp add: valid_ep'_def)
apply (wp hoare_vcg_const_Ball_lift valid_irq_node_lift sts_sch_act'
sts_valid_queues setThreadState_ct_not_inQ)
apply (clarsimp simp: valid_tcb_state'_def pred_tcb_at')
apply (rule conjI, clarsimp elim!: obj_at'_weakenE)
apply (frule obj_at_valid_objs', clarsimp)
apply (frule(1) sym_refs_ko_atD')
apply (frule pred_tcb_at')
apply (drule simple_st_tcb_at_state_refs_ofD')
apply (drule bound_tcb_at_state_refs_ofD')
apply (clarsimp simp: valid_obj'_def valid_ep'_def
projectKOs st_tcb_at_refs_of_rev')
apply (rule conjI, clarsimp)
apply (drule (1) bspec)
apply (clarsimp dest!: st_tcb_at_state_refs_ofD' bound_tcb_at_state_refs_ofD'
simp: tcb_bound_refs'_def)
apply (clarsimp simp: set_eq_subset)
apply (rule conjI, erule delta_sym_refs)
subgoal by (fastforce simp: obj_at'_def projectKOs symreftype_inverse'
split: if_split_asm)
apply (fastforce simp: tcb_bound_refs'_def symreftype_inverse'
split: if_split_asm)
apply (fastforce simp: global'_no_ex_cap idle'_not_queued)
apply (simp | wp)+
done
lemma sfi_invs_plus':
"\<lbrace>invs' and st_tcb_at' simple' t
and sch_act_not t
and (\<lambda>s. \<forall>p. t \<notin> set (ksReadyQueues s p))
and ex_nonz_cap_to' t\<rbrace>
sendFaultIPC t f
\<lbrace>\<lambda>rv. invs'\<rbrace>, \<lbrace>\<lambda>rv. invs' and st_tcb_at' simple' t
and (\<lambda>s. \<forall>p. t \<notin> set (ksReadyQueues s p))
and sch_act_not t and (\<lambda>s. ksIdleThread s \<noteq> t)\<rbrace>"
apply (simp add: sendFaultIPC_def)
apply (wp threadSet_invs_trivial threadSet_pred_tcb_no_state
threadSet_cap_to'
| wpc | simp)+
apply (rule_tac Q'="\<lambda>rv s. invs' s \<and> sch_act_not t s
\<and> st_tcb_at' simple' t s
\<and> (\<forall>p. t \<notin> set (ksReadyQueues s p))
\<and> ex_nonz_cap_to' t s
\<and> t \<noteq> ksIdleThread s
\<and> (\<forall>r\<in>zobj_refs' rv. ex_nonz_cap_to' r s)"
in hoare_post_imp_R)
apply wp
apply (clarsimp simp: inQ_def pred_tcb_at')
apply (wp | simp)+
apply (clarsimp simp: eq_commute)
apply (subst(asm) global'_no_ex_cap, auto)
done
lemma hf_corres:
"fr f f' \<Longrightarrow>
corres dc (einvs and st_tcb_at active thread and ex_nonz_cap_to thread
and (%_. valid_fault f))
(invs' and sch_act_not thread
and (\<lambda>s. \<forall>p. thread \<notin> set(ksReadyQueues s p))
and st_tcb_at' simple' thread and ex_nonz_cap_to' thread)
(handle_fault thread f) (handleFault thread f')"
apply (simp add: handle_fault_def handleFault_def)
apply (rule corres_guard_imp)
apply (subst return_bind [symmetric],
rule corres_split [where P="tcb_at thread",
OF _ gets_the_noop_corres [where x="()"]])
apply (rule corres_split_catch)
apply (rule hdf_corres)
apply (rule_tac F="valid_fault f" in corres_gen_asm)
apply (rule send_fault_ipc_corres, assumption)
apply simp
apply wp+
apply (rule hoare_post_impErr, rule sfi_invs_plus', simp_all)[1]
apply clarsimp
apply (simp add: tcb_at_def)
apply wp+
apply (clarsimp simp: st_tcb_at_tcb_at st_tcb_def2 invs_def
valid_state_def valid_idle_def)
apply auto
done
lemma sts_invs_minor'':
"\<lbrace>st_tcb_at' (\<lambda>st'. tcb_st_refs_of' st' = tcb_st_refs_of' st
\<and> (st \<noteq> Inactive \<and> \<not> idle' st \<longrightarrow>
st' \<noteq> Inactive \<and> \<not> idle' st')) t
and (\<lambda>s. t = ksIdleThread s \<longrightarrow> idle' st)
and (\<lambda>s. (\<exists>p. t \<in> set (ksReadyQueues s p)) \<longrightarrow> runnable' st)
and (\<lambda>s. runnable' st \<and> obj_at' tcbQueued t s
\<longrightarrow> st_tcb_at' runnable' t s)
and (\<lambda>s. \<not> runnable' st \<longrightarrow> sch_act_not t s)
and invs'\<rbrace>
setThreadState st t
\<lbrace>\<lambda>rv. invs'\<rbrace>"
apply (simp add: invs'_def valid_state'_def)
apply (rule hoare_pre)
apply (wp valid_irq_node_lift sts_sch_act' sts_valid_queues
setThreadState_ct_not_inQ)
apply clarsimp
apply (rule conjI)
apply fastforce
apply (rule conjI)
apply (clarsimp simp: pred_tcb_at'_def)
apply (drule obj_at_valid_objs')
apply (clarsimp simp: valid_pspace'_def)
apply (clarsimp simp: valid_obj'_def valid_tcb'_def projectKOs)
subgoal by (cases st, auto simp: valid_tcb_state'_def
split: Structures_H.thread_state.splits)[1]
apply (rule conjI)
apply (clarsimp dest!: st_tcb_at_state_refs_ofD'
elim!: rsubst[where P=sym_refs]
intro!: ext)
apply (clarsimp elim!: st_tcb_ex_cap'')
done
lemma hf_invs' [wp]:
"\<lbrace>invs' and sch_act_not t
and (\<lambda>s. \<forall>p. t \<notin> set(ksReadyQueues s p))
and st_tcb_at' simple' t
and ex_nonz_cap_to' t and (\<lambda>s. t \<noteq> ksIdleThread s)\<rbrace>
handleFault t f \<lbrace>\<lambda>r. invs'\<rbrace>"
apply (simp add: handleFault_def)
apply wp
apply (simp add: handleDoubleFault_def)
apply (wp sts_invs_minor'' dmo_invs')+
apply (rule hoare_post_impErr, rule sfi_invs_plus',
simp_all)
apply (strengthen no_refs_simple_strg')
apply clarsimp
done
declare zipWithM_x_mapM [simp del]
lemma gts_st_tcb':
"\<lbrace>\<top>\<rbrace> getThreadState t \<lbrace>\<lambda>r. st_tcb_at' (\<lambda>st. st = r) t\<rbrace>"
apply (rule hoare_strengthen_post)
apply (rule gts_sp')
apply simp
done
declare setEndpoint_ct' [wp]
lemma setupCallerCap_pred_tcb_unchanged:
"\<lbrace>pred_tcb_at' proj P t and K (t \<noteq> t')\<rbrace>
setupCallerCap t' t'' g
\<lbrace>\<lambda>rv. pred_tcb_at' proj P t\<rbrace>"
apply (simp add: setupCallerCap_def getThreadCallerSlot_def
getThreadReplySlot_def)
apply (wp sts_pred_tcb_neq' hoare_drop_imps)
apply clarsimp
done
lemma si_blk_makes_simple':
"\<lbrace>st_tcb_at' simple' t and K (t \<noteq> t')\<rbrace>
sendIPC True call bdg x x' t' ep
\<lbrace>\<lambda>rv. st_tcb_at' simple' t\<rbrace>"
apply (simp add: sendIPC_def)
apply (rule hoare_seq_ext [OF _ get_ep_inv'])
apply (case_tac xa, simp_all)
apply (rename_tac list)
apply (case_tac list, simp_all add: case_bool_If case_option_If
split del: if_split cong: if_cong)
apply (rule hoare_pre)
apply (wp sts_st_tcb_at'_cases setupCallerCap_pred_tcb_unchanged
hoare_drop_imps)
apply (clarsimp simp: pred_tcb_at' del: disjCI)
apply (wp sts_st_tcb_at'_cases)
apply clarsimp
apply (wp sts_st_tcb_at'_cases)
apply clarsimp
done
lemma si_blk_makes_runnable':
"\<lbrace>st_tcb_at' runnable' t and K (t \<noteq> t')\<rbrace>
sendIPC True call bdg x x' t' ep
\<lbrace>\<lambda>rv. st_tcb_at' runnable' t\<rbrace>"
apply (simp add: sendIPC_def)
apply (rule hoare_seq_ext [OF _ get_ep_inv'])
apply (case_tac xa, simp_all)
apply (rename_tac list)
apply (case_tac list, simp_all add: case_bool_If case_option_If
split del: if_split cong: if_cong)
apply (rule hoare_pre)
apply (wp sts_st_tcb_at'_cases setupCallerCap_pred_tcb_unchanged
hoare_vcg_const_imp_lift hoare_drop_imps
| simp)+
apply (clarsimp del: disjCI simp: pred_tcb_at' elim!: pred_tcb'_weakenE)
apply (wp sts_st_tcb_at'_cases)
apply clarsimp
apply (wp sts_st_tcb_at'_cases)
apply clarsimp
done
lemma sfi_makes_simple':
"\<lbrace>st_tcb_at' simple' t and K (t \<noteq> t')\<rbrace>
sendFaultIPC t' ft
\<lbrace>\<lambda>rv. st_tcb_at' simple' t\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: sendFaultIPC_def
cong: if_cong capability.case_cong bool.case_cong)
apply (wpsimp wp: si_blk_makes_simple' threadSet_pred_tcb_no_state hoare_drop_imps
hoare_vcg_all_lift_R)
done
lemma sfi_makes_runnable':
"\<lbrace>st_tcb_at' runnable' t and K (t \<noteq> t')\<rbrace>
sendFaultIPC t' ft
\<lbrace>\<lambda>rv. st_tcb_at' runnable' t\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: sendFaultIPC_def
cong: if_cong capability.case_cong bool.case_cong)
apply (wpsimp wp: si_blk_makes_runnable' threadSet_pred_tcb_no_state hoare_drop_imps
hoare_vcg_all_lift_R)
done
lemma hf_makes_runnable_simple':
"\<lbrace>st_tcb_at' P t' and K (t \<noteq> t') and K (P = runnable' \<or> P = simple')\<rbrace>
handleFault t ft
\<lbrace>\<lambda>rv. st_tcb_at' P t'\<rbrace>"
apply (safe intro!: hoare_gen_asm)
apply (simp_all add: handleFault_def handleDoubleFault_def)
apply (wp sfi_makes_runnable' sfi_makes_simple' sts_st_tcb_at'_cases
| simp add: handleDoubleFault_def)+
done
crunches possibleSwitchTo, completeSignal
for pred_tcb_at'[wp]: "pred_tcb_at' proj P t"
lemma ri_makes_runnable_simple':
"\<lbrace>st_tcb_at' P t' and K (t \<noteq> t') and K (P = runnable' \<or> P = simple')\<rbrace>
receiveIPC t cap isBlocking
\<lbrace>\<lambda>rv. st_tcb_at' P t'\<rbrace>"
including no_pre
apply (rule hoare_gen_asm)+
apply (simp add: receiveIPC_def)
apply (case_tac cap, simp_all add: isEndpointCap_def)
apply (rule hoare_seq_ext [OF _ get_ep_inv'])
apply (rule hoare_seq_ext [OF _ gbn_sp'])
apply wp
apply (rename_tac ep q r)
apply (case_tac ep, simp_all)
apply (wp sts_st_tcb_at'_cases | wpc | simp add: doNBRecvFailedTransfer_def)+
apply (rename_tac list)
apply (case_tac list, simp_all add: case_bool_If case_option_If
split del: if_split cong: if_cong)
apply (rule hoare_pre)
apply (wp sts_st_tcb_at'_cases setupCallerCap_pred_tcb_unchanged
hoare_vcg_const_imp_lift)+
apply (simp, simp only: imp_conv_disj)
apply (wp hoare_vcg_disj_lift)+
apply (clarsimp simp: pred_tcb_at'_def obj_at'_def projectKOs)
apply (fastforce simp: pred_tcb_at'_def obj_at'_def isSend_def
split: Structures_H.thread_state.split_asm)
apply (rule hoare_pre)
apply wpsimp+
done
lemma rai_makes_runnable_simple':
"\<lbrace>st_tcb_at' P t' and K (t \<noteq> t') and K (P = runnable' \<or> P = simple')\<rbrace>
receiveSignal t cap isBlocking
\<lbrace>\<lambda>rv. st_tcb_at' P t'\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: receiveSignal_def)
apply (rule hoare_pre)
by (wp sts_st_tcb_at'_cases getNotification_wp | wpc | simp add: doNBRecvFailedTransfer_def)+
lemma sendSignal_st_tcb'_Running:
"\<lbrace>st_tcb_at' (\<lambda>st. st = Running \<or> P st) t\<rbrace>
sendSignal ntfnptr bdg
\<lbrace>\<lambda>_. st_tcb_at' (\<lambda>st. st = Running \<or> P st) t\<rbrace>"
apply (simp add: sendSignal_def)
apply (wp sts_st_tcb_at'_cases cancelIPC_st_tcb_at' gts_wp' getNotification_wp static_imp_wp
| wpc | clarsimp simp: pred_tcb_at')+
done
lemma sai_st_tcb':
"\<lbrace>st_tcb_at' P t and K (P Running)\<rbrace>
sendSignal ntfn bdg
\<lbrace>\<lambda>rv. st_tcb_at' P t\<rbrace>"
apply (rule hoare_gen_asm)
apply (subgoal_tac "\<exists>Q. P = (\<lambda>st. st = Running \<or> Q st)")
apply (clarsimp intro!: sendSignal_st_tcb'_Running)
apply (fastforce intro!: exI[where x=P])
done
end
end
|
Formal statement is: lemma dist_nz: "x \<noteq> y \<longleftrightarrow> 0 < dist x y" Informal statement is: $x \neq y$ if and only if $0 < \text{dist}(x, y)$. |
module Verified.Monad
import Verified.Applicative
%default total
class (Monad m, VerifiedApplicative m) => VerifiedMonad (m : Type -> Type) where
monadApplicative : (mf : m (a -> b)) -> (mx : m a) ->
mf <*> mx = mf >>= \f =>
mx >>= \x =>
pure (f x)
monadLeftIdentity : (x : a) -> (f : a -> m b) -> return x >>= f = f x
monadRightIdentity : (mx : m a) -> mx >>= return = mx
monadAssociativity : (mx : m a) -> (f : a -> m b) -> (g : b -> m c) ->
(mx >>= f) >>= g = mx >>= (\x => f x >>= g)
|
-- --------------------------------------------------------------- [ Error.idr ]
-- Module : Error.idr
-- Copyright : (c) Jan de Muijnck-Hughes
-- License : see LICENSE
-- --------------------------------------------------------------------- [ EOH ]
module Toolkit.Options.ArgParse.Error
import Data.String
import System.File
import Toolkit.Data.Location
import Toolkit.Options.ArgParse.Model
import Toolkit.Options.ArgParse.Lexer
import Toolkit.Options.ArgParse.Parser
%default total
public export
data ArgParseError : Type where
InvalidOption : Arg -> ArgParseError
MalformedOption : ParseError Token -> ArgParseError
export
(Show Arg) => Show ArgParseError where
show (InvalidOption o)
= "Invalid Option " ++ show o
show (MalformedOption err)
= "Malformed Option " ++ show err
-- --------------------------------------------------------------------- [ EOF ]
|
import deduction semantics lindenbaum
open encodable
namespace fopl
variables {L : language.{0}}
local infix ` ≃₁ `:80 := ((≃) : term L → term L → formula L)
local prefix `∏₁ `:64 := (has_univ_quantifier.univ : formula L → formula L)
local prefix `∐₁ `:64 := (has_exists_quantifier.ex : formula L → formula L)
variables [decidable_eq (formula L)]
@[simp] def formula.arrow : formula L → option (formula L × formula L)
| (p ⟶ q) := some (p, q)
| _ := none
lemma arrow_eq {p : formula L} {v} : p.arrow = some v → p = v.1 ⟶ v.2 :=
by { cases p; simp[show ∀ x y : term L, (x ≃ y : formula L).arrow = none, from λ _ _, rfl,
show ∀ p : formula L, (⁻p).arrow = none, from λ _, rfl,
show ∀ p : formula L, (∏ p : formula L).arrow = none, from λ _, rfl], intros h, simp[←h] }
inductive proof (L : language.{0}) : Type
| root : formula L → proof
| ge : proof → proof
| mp : proof → proof → proof
@[simp] def proof.conseq : proof L → option (formula L)
| (proof.root p) := some p
| (proof.ge φ) := φ.conseq.map (λ p, ∏ p)
| (proof.mp φ ψ) :=
if (φ.conseq >>= formula.arrow).map prod.fst = ψ.conseq then (φ.conseq >>= formula.arrow).map prod.snd
else ψ.conseq
inductive formula.is_axiom (T : theory L) (i : ℕ) : formula L → Prop
| p1 {p q} : formula.is_axiom (p ⟶ q ⟶ p)
| p2 {p q r} : formula.is_axiom ((p ⟶ q ⟶ r) ⟶ (p ⟶ q) ⟶ p ⟶ r)
| p3 {p q} : formula.is_axiom ((⁻p ⟶ ⁻q) ⟶ q ⟶ p)
| q1 {p t} : formula.is_axiom (∏₁ p ⟶ p.rew ι[0 ⇝ t])
| q2 {p q} : formula.is_axiom (∏₁ (p ⟶ q) ⟶ ∏₁ p ⟶ ∏₁ q)
| q3 {p} : formula.is_axiom (p ⟶ ∏₁ (p^1))
| e1 : formula.is_axiom ∏₁ #0 ≃₁ #0
| e2 : formula.is_axiom ∏₁ ∏₁ (#0 ≃₁ #1 ⟶ #1 ≃₁ #0)
| e3 : formula.is_axiom ∏₁ ∏₁ ∏₁ (#0 ≃₁ #1 ⟶ #1 ≃₁ #2 ⟶ #0 ≃₁ #2)
| e4 {n} {f : L.fn n} : formula.is_axiom (eq_axiom4 f)
| e5 {n} {r : L.pr n} : formula.is_axiom (eq_axiom5 r)
| by_axiom {p} : p ∈ T^i → formula.is_axiom p
@[simp] def proof.proper (T : theory L) : ℕ → proof L → Prop
| i (proof.root p) := p.is_axiom T i
| i (proof.ge φ) := φ.proper (i + 1)
| i (proof.mp φ ψ) := (φ.proper i) ∧ (ψ.proper i)
def proof.of (T : theory L) (i : ℕ) (p : formula L) (φ : proof L) : Prop := φ.proper T i ∧ φ.conseq = some p
namespace proof
open nat
variables {T : theory L} {i : ℕ}
lemma provable_of_is_axiom {p} (h : is_axiom T i p) : T^i ⊢ p :=
begin
cases h; try {simp}, { exact provable.e4 }, { exact provable.e5 },
{ exact provable.AX (by simp*) }
end
lemma sound {T : theory L} {i} {p} {φ} : proof.of T i p φ → T^i ⊢ p :=
begin
induction φ generalizing p i; simp[proof.of],
case root : i p { rintros h rfl, exact provable_of_is_axiom h },
case ge : φ IH p { rintros proper q conseq rfl, exact provable.generalize (IH ⟨proper, conseq⟩) },
case mp : φ ψ IHφ IHψ
{ cases φ_conseq : φ.conseq with cφ; cases ψ_conseq : ψ.conseq with cψ; simp[φ_conseq, ψ_conseq],
{ rintros pφ pψ rfl, exact IHψ ⟨pψ, ψ_conseq⟩ },
{ intros pφ pψ, simp[show (∀ (a b a_1 : formula L), cφ = a_1 → ¬a_1.arrow = some (a, b)) ↔ cφ.arrow = none,
from ⟨λ h, by { cases C : cφ.arrow with v; simp, exact h v.1 v.2 cφ rfl (by simp[C]) },
by { rintros h a b _ rfl, simp[h] }⟩],
cases C : cφ.arrow with v; simp[C] },
{ rintros pφ pψ, cases C : cφ.arrow with v; simp, { rintros rfl, exact IHψ ⟨pψ, ψ_conseq⟩ },
{ by_cases C₂ : v.1 = cψ,
{ simp[←C₂, show ∃ a, v = (v.fst, a), from ⟨v.2, by simp⟩],
rintros rfl, rcases C₂ with rfl, rcases arrow_eq C with rfl,
exact classical_logic.modus_ponens (IHφ ⟨pφ, φ_conseq⟩) (IHψ ⟨pψ, ψ_conseq⟩) },
{ simp[show ¬∃ (a : formula L), v = (cψ, a), by { simp, rintros s rfl, simp at C₂, contradiction }],
rintros rfl, exact IHψ ⟨pψ, ψ_conseq⟩ } } } }
end
lemma complete {T : theory L} {i} (p : formula L) : T^i ⊢ p ↔ ∃ φ, of T i p φ :=
⟨λ h,
begin
apply fopl.provable.rec_on' h,
{ rintros i p _ ⟨φ, φ_proper, φ_conseq⟩, refine ⟨φ.ge, _, _⟩; simp* },
{ rintros i p q _ _ ⟨φ, φ_proper, φ_conseq⟩ ⟨ψ, ψ_proper, ψ_conseq⟩,
refine ⟨φ.mp ψ, _, _⟩; simp[*, (>>=)] },
{ intros i p _, refine ⟨root p, _, _⟩; simp, exact formula.is_axiom.by_axiom mem },
{ intros i p q, refine ⟨root (p ⟶ q ⟶ p), _, _⟩; simp, exact formula.is_axiom.p1 },
{ intros i p q r, refine ⟨root ((p ⟶ q ⟶ r) ⟶ (p ⟶ q) ⟶ p ⟶ r), _, _⟩; simp, exact formula.is_axiom.p2 },
{ intros i p q, refine ⟨root ((⁻p ⟶ ⁻q) ⟶ q ⟶ p), _, _⟩; simp, exact formula.is_axiom.p3 },
{ intros i p t, refine ⟨root (∏ p ⟶ formula.rew ι[0 ⇝ t] p), _, _⟩; simp, exact formula.is_axiom.q1 },
{ intros i p q, refine ⟨root (∏ (p ⟶ q) ⟶ ∏ p ⟶ ∏ q), _, _⟩; simp, exact formula.is_axiom.q2 },
{ intros i p, refine ⟨root (p ⟶ ∏ p ^ 1), _, _⟩; simp, exact formula.is_axiom.q3 },
{ intros i, refine ⟨root (∏₁ #0 ≃₁ #0), _, _⟩; simp, exact formula.is_axiom.e1 },
{ intros i, refine ⟨root (∏₁ ∏₁ (#0 ≃₁ #1 ⟶ #1 ≃₁ #0)), _, _⟩; simp, exact formula.is_axiom.e2 },
{ intros i, refine ⟨root (∏₁ ∏₁ ∏₁ (#0 ≃₁ #1 ⟶ #1 ≃₁ #2 ⟶ #0 ≃₁ #2)), _, _⟩; simp, exact formula.is_axiom.e3 },
{ intros i m f, refine ⟨root (eq_axiom4 f), _, _⟩; simp, exact formula.is_axiom.e4 },
{ intros i m p, refine ⟨root (eq_axiom5 p), _, _⟩; simp, exact formula.is_axiom.e5 }
end, λ ⟨φ, h⟩, sound h⟩
variables [primcodable (formula L)]
@[simp] def encode_pcode : proof L → ℕ
| (root p) := (bit0 $ encode p) + 1
| (ge φ) := (bit1 $ bit0 $ encode_pcode φ) + 1
| (mp φ ψ) := (bit1 $ bit1 $ nat.mkpair (encode_pcode φ) (encode_pcode ψ)) + 1
@[simp] def decode_pcode : ℕ → option (proof L)
| 0 := none
| (n + 1) :=
have div4 : n.div2.div2 ≤ n :=
by { simp[nat.div2_val], exact le_trans (nat.div_le_self (n / 2) 2) (nat.div_le_self n 2) },
have n.div2.div2 < n + 1, from nat.lt_succ_iff.mpr div4,
have n.div2.div2.unpair.1 < n + 1,
from nat.lt_succ_iff.mpr (le_trans (nat.unpair_left_le n.div2.div2) div4),
have n.div2.div2.unpair.2 < n + 1,
from nat.lt_succ_iff.mpr (le_trans (nat.unpair_right_le n.div2.div2) div4),
match n.bodd, n.div2.bodd with
| ff, _ := (decode (formula L) n.div2).map root
| tt, ff := (decode_pcode n.div2.div2).map proof.ge
| tt, tt := proof.mp <$> (decode_pcode n.div2.div2.unpair.1) <*> (decode_pcode n.div2.div2.unpair.2)
end
instance : encodable (proof L) :=
{ encode := encode_pcode,
decode := decode_pcode,
encodek := by { intros φ, induction φ; simp[encode_pcode, decode_pcode, *] } }
noncomputable def of_n (T : theory L) (p : ℕ) (s : ℕ) : ℕ :=
encode
( do p ← decode (formula L) p,
φ ← decode (proof L) s,
some (to_bool (proof.of T 0 p φ)))
lemma of_n_complete (p : formula L) :
T ⊢ p ↔ ∃ s, of_n T (encode p) s = encode (some tt) :=
by { have : T ⊢ p ↔ ∃ φ, of T 0 p φ, rw[show T = T^0, by simp], from complete p,
simp[this, of_n],
split,
{ rintros ⟨φ, of⟩, refine ⟨encode φ, _⟩, simp[of] },
{ rintros ⟨s, of⟩, cases C : decode (proof L) s with φ; simp[C] at of,
{ contradiction },
{ refine ⟨φ, _⟩, by_cases C₂ : fopl.proof.of T 0 p φ; simp[C₂] at of ⊢, { contradiction } } } }
end proof
variables [primcodable (formula L)] [primcodable (proof L)]
class primrec_theory (T : theory L) :=
(prim : primrec₂ (λ p φ, proof.of_n T p φ))
variables {T : theory L} [primrec_theory T]
#eval encode (some tt)
end fopl |
Require Import Coq.Program.Equality.
Require Export Memory.
Definition is_preorder {Conf} (pre : Conf -> Conf -> Prop) : Prop
:= (forall c, pre c c) /\ (forall c1 c2 c3, pre c1 c2 -> pre c2 c3 -> pre c1 c3).
Definition monotonicity {Conf}
(pre : Conf -> Conf -> Prop) (vm : Conf -> Conf -> Prop) :
Prop := forall (C1 C1' C2 : Conf),
pre C1 C1' ->
vm C1 C2 ->
exists C2', vm C1' C2' /\ pre C2 C2'.
Ltac prove_preorder :=
split;[
intros c; destruct c;
repeat
(match goal with
| [H : _ * _ |- _] => destruct H
end);
eauto with memory
| intros c1 c2 c3 L1 L2; destruct c1, c2, c3;
inversion L1; inversion L2; subst;
repeat
(match goal with
| [H : (_, _) = (_, _) |- _] => inversion H; clear H
end); subst;
eauto with memory].
Ltac prove_monotonicity1 :=
do 3 intro; intros Hle Step;
dependent destruction Step; inversion Hle.
Ltac prove_monotonicity2 := subst;
eexists; (split; [try solve [econstructor; eauto with memory]| eauto with memory]).
Ltac prove_monotonicity := prove_monotonicity1; prove_monotonicity2.
Module Type Machine.
Parameter Conf : Type.
Parameter Pre : Conf -> Conf -> Prop.
Parameter Rel : Conf -> Conf -> Prop.
Parameter preorder : is_preorder Pre.
Parameter monotone : monotonicity Pre Rel.
End Machine.
Require Import List.
Require Import Relations.
Module MetaTheory (machine : Machine).
Export machine.
Import ListNotations.
Declare Scope machine_scope.
Infix "==>" := Rel(at level 80, no associativity) : machine_scope.
Definition trc := clos_refl_trans Conf Rel.
Infix "=>>" := trc (at level 80, no associativity) : machine_scope.
Open Scope machine_scope.
Lemma trc_refl c : c =>> c.
Proof. apply rt_refl. Qed.
Lemma trc_step c1 c2 : c1 ==> c2 -> c1 =>> c2.
Proof. apply rt_step. Qed.
Lemma trc_step_trans c1 c2 c3 : c1 =>> c2 -> c2 ==> c3 -> c1 =>> c3.
Proof. intros. eapply rt_trans; eauto using rt_step. Qed.
Lemma trc_step_trans' c1 c2 c3 : c1 ==> c2 -> c2 =>> c3 -> c1 =>> c3.
Proof. intros. eapply rt_trans; eauto using rt_step. Qed.
Lemma trc_trans c1 c2 c3 : c1 =>> c2 -> c2 =>> c3 -> c1 =>> c3.
Proof. apply rt_trans. Qed.
Hint Resolve trc_step trc_step_trans : core.
Hint Immediate trc_refl : core.
Lemma trc_ind' :
forall P : Conf -> Conf -> Prop,
(forall c : Conf, P c c) ->
(forall c1 c2 c3 : Conf, c1 ==> c2 -> c2 =>> c3 -> P c2 c3 -> P c1 c3) ->
forall c c0 : Conf, c =>> c0 -> P c c0.
Proof.
intros X Y Z c1 c2 S. unfold trc in S. rewrite -> clos_rt_rt1n_iff in S.
induction S; eauto. rewrite <- clos_rt_rt1n_iff in S. eauto.
Qed.
Infix "⊑" := Pre (at level 70) : machine_scope.
Notation "x ⊒ y" := (Pre y x) (at level 70) : machine_scope.
Lemma cle_trans (C1 C2 C3 : Conf) : C1 ⊑ C2 -> C2 ⊑ C3 -> C1 ⊑ C3.
Proof.
intros. pose preorder as P. unfold is_preorder in *. destruct P. eauto.
Qed.
Lemma cle_refl (C : Conf) : C ⊑ C.
Proof.
intros. pose preorder as P. unfold is_preorder in *. destruct P. eauto.
Qed.
Hint Resolve cle_refl : core.
Lemma monotone_step (C1 C1' C2 : Conf) :
C1 ⊑ C1' ->
C1 ==> C2 ->
exists C2', C1' ==> C2' /\ C2 ⊑ C2' .
Proof.
intros. pose monotone as M. unfold monotonicity in M. eauto.
Qed.
Lemma monotone_machine (C1 C1' C2 : Conf) :
C1 ⊑ C1' ->
C1 =>> C2 ->
exists C2', C1' =>> C2' /\ C2 ⊑ C2' .
Proof.
intros I M. generalize dependent C1'. dependent induction M using trc_ind';intros.
- exists C1'. split; eauto.
- eapply monotone_step in I; eauto. destruct I as [m2' HS]. destruct HS as [S Ic'].
apply IHM in Ic'. destruct Ic'. destruct H0. eexists. split. eapply trc_step_trans'; eassumption. assumption.
Qed.
Definition Reach (C1 C2 : Conf) : Prop := exists C, C1 =>> C /\ C ⊒ C2.
Infix "=|>" := Reach (at level 80, no associativity).
Lemma Reach_refl C : C =|> C.
Proof.
exists C. split; auto.
Qed.
Hint Resolve Reach_refl : core.
Lemma Reach_eq C1 C2 : C1 = C2 -> C1 =|> C2.
Proof.
intros. subst. auto.
Qed.
Lemma Reach_trans C1 C2 C3 : C1 =|> C2 -> C2 =|> C3 -> C1 =|> C3.
Proof.
intros L1 L2.
destruct L1 as [C1' L1]. destruct L1 as [S1 M1].
destruct L2 as [C2' L2]. destruct L2 as [S2 M2].
eapply monotone_machine in S2;[|eassumption]. destruct S2 as [C3' G]. destruct G as [S2' M2'].
eexists. split.
- eapply trc_trans. apply S1. apply S2'.
- eapply cle_trans;eassumption.
Qed.
Lemma Reach_cle C1 C2 : C1 ⊒ C2 -> C1 =|> C2.
Proof.
intros L. eexists. split. apply trc_refl. assumption.
Qed.
Lemma Reach_trc C1 C2 : C1 =>> C2 -> C1 =|> C2.
Proof.
intros L. eexists. split. eassumption. apply cle_refl.
Qed.
Lemma Reach_vm C1 C2 : C1 ==> C2 -> C1 =|> C2.
Proof.
intros L. apply Reach_trc. apply trc_step. assumption.
Qed.
Definition determ {A} (R : A -> A -> Prop) : Prop := forall C C1 C2, R C C1 -> R C C2 -> C1 = C2.
Definition trc' C C' := C =>> C' /\ ~ exists C'', C' ==> C''.
Notation "x =>>! y" := (trc' x y) (at level 80, no associativity).
Lemma determ_factor C1 C2 C3 : determ Rel -> C1 ==> C2 -> C1 =>>! C3 -> C2 =>> C3.
Proof.
unfold determ. intros. destruct H1.
destruct H1 using trc_ind'.
- exfalso. apply H2. eexists. eassumption.
- assert (c2 = C2). eapply H. apply H1. apply H0. subst. auto.
Qed.
Lemma determ_trc : determ Rel -> determ trc'.
Proof.
intros. unfold determ. intros. destruct H0. induction H0 using trc_ind'.
- destruct H1. destruct H0 using trc_ind'. reflexivity. exfalso. apply H2. eexists. eassumption.
- apply IHtrc. apply H2. split. eapply determ_factor; eassumption. destruct H1. assumption.
Qed.
End MetaTheory.
|
/*****************
* @file leastsq.h
* @date 4/1/08
* @author Douglas Applegate
*
* @brief Provides a wrapper around the GSL libraries for fitting functions
*****************/
//CVSID $Id: leastsq.h,v 1.1 2008-04-04 01:43:12 mallen Exp $
////////////////////////////////////////////////////
#ifndef LEASTSQ_H
#define LEASTSQ_H
#include <gsl/gsl_vector.h>
#include <gsl/gsl_matrix.h>
/**
* Function prototype for fitting function and its derivative
* @param double x position at which to evaluate function
* @param const double* parameter values to use in evaluation
*/
typedef double (*FitFunction_t)(double, const double*);
/**
* Provides non-linear fitting functionality
*/
class Leastsq {
public:
/**
* Initializes object with basic fit information
* @param fitFunc function to fit to data
* @param dfitFunc array of fit function derivatives, one for each param
* @param nparams number of parameters that fitFunc and dfitFunc accept
*/
Leastsq(FitFunction_t fitFunc,
FitFunction_t* dfitFunc,
int nparams);
/**
* Default constructor
*/
virtual ~Leastsq();
/**
* Fits function to the given data, starting at initial guess
* @param x x values of data
* @param y y values of data
* @param ndata number of data points
* @param guess initial parameter values for fit
*
* @returns status status code for fit. 1 == fit ran fine
*/
int doFit(const double* x, const double* y, int ndata, double* guess);
/////GETTERS
double numParams() const {return _nparams;};
const double** covarMatrix() const {
return const_cast<const double**>(_covar);
};
double chisq() const {return _chisq;};
const double* parameters() const {
return const_cast<const double*>(_parameters);
};
protected:
/////INSTANCE METHODS
int gslFitFunction(const gsl_vector* x,
void* params,
gsl_vector* result);
int gslDFitFunction(const gsl_vector* x,
void* params,
gsl_matrix* J);
/////Instance Variables
FitFunction_t _fitFunc; ///< Function to fit to data
FitFunction_t* _dfitFunc; ///< Analytical derivative of fitFunc
int _nparams; ///< Number of parameters in fitFunc and dfitFunc
double** _covar; ///< Covariance Matrix
double _chisq; ///< Chisq or residual from fit
double* _parameters; ///< Best fit parameters
const double* _x; ///< x values of data
const double* _y; ///< y values of data
int _ndata; ///< number of data
};
#endif
|
There Are Hundreds of Practicing Exorcists in the U.S.
A recent survey of modern exorcists reveals a surprising lack of drama in the business of casting out demons.
This Halloween, just shy of the 40th anniversary of the movie The Exorcist, has seen the novel’s author and the movie’s producer, William Peter Blatty, get an extra dollop of the attention he sees around this holiday. Not that people aren’t already obsessed by the subject—The Exorcist remains, in inflation adjusted box office, the most popular R-rated film ever.
At my own Catholic high school, the Christian Brother teachers (yeah, the brandy guys) could be remarkably post-Vatican II flip and modern about most things doctrinal, but they got sober really fast about not messing around with demon-y things. Their sudden seriousness always made me wonder if they knew something. Every once in a while, I still wonder about that.
I’m not the only one. Writing last year in The Journal of Christian Ministry, University of Kentucky psychometrician Kenneth D. Royal described his survey of modern American exorcists.
Those last three words, outside of Hollywood, might sound like an oxymoron. Beside the odd story about some loony parents whose child dies while the ham-handed cast out demons, surely any instance of demonic possession is just a certain kind of psychological malady, and exorcism just a ridiculous exercise. If somebody’s better after an exorcism, light a candle to Saint Placebo. Plus, garden-variety evil seems to be doing a damned good job without extra-curricular shenanigans.
In such cases good communication with priests is recommended, but we are surprised that in 21st century and in Europe, there are still experts and clerics who believe that some types of schizophrenia are due to demonic possession. Our intention was to ask an expert cleric from the Madrid archdiocese to try to convince the patient that her symptoms were due to a mental disorder, in an effort to improve her insight. To our surprise, clerics assumed that the patient's psychotic symptoms were due to a malign presence.
In the U.S., exorcism takes on a mostly Christian cast, although evennon-Abrahamic religions recognize the practice. The bias here is probably because culturally we’re mostly Christian and the New Testament is chock full of demons being cast out, usually by Jesus but sometimes by his followers in his name. But there are demons in the Old Testament, too, and in Jesus’ day casting them out was a recognized job in the Jewish community. Professional exorcists also were acknowledged in the early days of the Christian church.
Two millennia later, in 1972, Pope Paul VI abolished the minor order of exorcist, but that was more of a human resources reshuffling of roles (porter and catechist also got the papal boot) than a statement that exorcism was passé. Nonetheless, in 1999 the Vatican updated its 300-plus-year-old exorcism regulations and said an allegedly possessed person should be given a thorough medical and psychological exam before phoning the Jesuit hotline.
Between talking with Christian leaders, authors of texts on exorcism, and an Internet posting, Royal was able to reach out to 316 (!) American exorcists. Some 170 answered his survey. Fifteen of these 170 took part in an additional personal interview. Most of the larger set were men, most were Protestant (all the big denominations, including the so-called mainline ones, were represented), and most had “set demonized people free” between 11 and 1,000 times.
Furthermore, most practicing exorcists (but not Catholic ones) believe that “any mature Christian” has the necessary authority to cast out demons.
Nonetheless, fasting, along with being patient and humble, were seen as necessary or at least useful preparations for the casting out. “Satan," One interviewee told Royal, "has no equipment to fight humility.” That might be a useful prescription for fighting any sort of devil these days.
Who Wants a Christian America?
A look at the results of a recent YouGov survey.
The went-to-heaven-and-back genre is more popular than ever, but Biblical scholar N.T. Wright's recent book argues that this cultural conception of heaven has little basis in scripture.
We all have rituals for confronting the dark. Here’s what happened when my father met a literal demon.
Maybe not, but they should offend most passengers.
As the U.S. military today begins allowing gay service members to no longer hide their sexuality, we look at the various academic and empirical studies that surround the issue. |
import hom.basic group_theory.congruence
namespace mygroup
namespace quotient
-- We will in this file define quotient groups using congruences
/- We define `group_con` as an extention of `con` which respects the inverse
operation -/
structure group_con (G : Type) [group G] extends con G :=
(inv' : ∀ {x y}, r x y → r x⁻¹ y⁻¹)
-- A `group_con G` induces a group structure with its congruence classes
variables {G : Type} [group G]
-- We define a coercion from `group_con G` to `setoid G`
instance has_coe_to_setoid : has_coe (group_con G) (setoid G) :=
⟨λ R, R.to_con.to_setoid⟩
/- Coercion from a `group_con G` to its underlying binary relation -/
instance : has_coe_to_fun (group_con G) := ⟨_, λ R, λ x y, R.r x y⟩
lemma mul {R : group_con G} {x₀ x₁ y₀ y₁ : G} :
R x₀ x₁ → R y₀ y₁ → R (x₀ * y₀) (x₁ * y₁) := by apply R.mul'
lemma inv {R : group_con G} {x y : G} : R x y → R x⁻¹ y⁻¹ := by apply R.inv'
-- A quotient on a `group_con G` is the quotient on its coerced setoid
def quotient (R : group_con G) := quotient (R : setoid G)
variables {R : group_con G}
-- Coercion from a group to its quotient
instance : has_coe_t G (quotient R) := ⟨quotient.mk'⟩
-- We can think of the coercion above as making a term of `G` into its
-- equivalence class. So two elements of type `quotient R` are equal iff
-- they are related by the binary relation `R`
lemma eq {x y : G} : (x : quotient R) = y ↔ R x y := quotient.eq'
def lift_on {β} {R : group_con G} (x : quotient R) (f : G → β)
(h : ∀ x y, R x y → f x = f y) : β := quotient.lift_on' x f h
def lift_on₂ {β} {R : group_con G} (x y : quotient R) (f : G → G → β)
(h : ∀ a₁ a₂ b₁ b₂, R a₁ b₁ → R a₂ b₂ → f a₁ a₂ = f b₁ b₂) : β :=
quotient.lift_on₂' x y f h
-- Mathematically, we define mul for `quotient R` by taking to congruence
-- classes and outputing the congruence class of their mul, i.e.
-- (*) : (⟦g⟧, ⟦h⟧) ↦ ⟦g * h⟧
-- In Lean, we achieve this by using `lift_on₂` where given some type `β`
-- (in this case `quotient R`), two elements of `quotient R` and a function
-- `f : G → G → β` that respects `R`, it returns a term of `β`.
instance : has_mul (quotient R) :=
{ mul := λ x y, lift_on₂ x y (λ x y, ((x * y : G) : quotient R))
$ λ _ _ _ _ h₁ h₂, eq.2 (mul h₁ h₂) }
-- Similar story for the inverse in which we use `lift_on` instead.
-- Mathematically, the inverse is defined to be the congruence class of the
-- inverse, i.e. (⁻¹) : ⟦g⟧ ↦ ⟦g⁻¹⟧
instance : has_inv (quotient R) :=
⟨λ x, lift_on x (λ x, ((x⁻¹ : G) : quotient R)) $ λ _ _ h, eq.2 (inv h)⟩
instance : has_one (quotient R) := ⟨((1 : G) : quotient R)⟩
lemma coe (x : G) : quotient.mk' x = (x : quotient R) := rfl
lemma coe_mul (x y : G) : (x : quotient R) * y = ((x * y : G) : quotient R) := rfl
lemma coe_inv (x : G) : (x : quotient R)⁻¹ = ((x⁻¹ : G): quotient R) := rfl
lemma coe_one : ((1 : G) : quotient R) = 1 := rfl
-- I think the rhs is more desirable in most cases so I will make simp use them
attribute [simp] coe coe_mul coe_inv coe_one
-- We now simply need to prove all the group axioms
-- To prove propositions regarding elements of `quotient R` we need to use the
-- induction principle for quotients `quotient.induciton_on`.
-- In this case we are using the variant of this induction principle with
-- three arguments.
-- Essentially, to prove a proposition true for all `x : quotient R`, it
-- suffices to prove that the proposition is true for all `(g : G) : quotient R`
lemma mul_assoc' {a b c : quotient R} : a * b * c = a * (b * c) :=
begin
apply quotient.induction_on₃' a b c,
intros _ _ _,
iterate 3 { rw coe },
iterate 4 { rw coe_mul },
rw group.mul_assoc
end
lemma one_mul' {a : quotient R} : 1 * a = a :=
begin
apply quotient.induction_on' a,
intro x, rw [coe, ← coe_one, coe_mul, group.one_mul]
end
lemma mul_left_inv' {a : quotient R} : a⁻¹ * a = 1 :=
begin
apply quotient.induction_on' a,
intro x, rw [coe, coe_inv, coe_mul, group.mul_left_inv, coe_one]
end
-- With that we find `quotient R` form a group
instance of_quotient : group (quotient R) :=
{ mul := (*), one := (1), inv := has_inv.inv,
mul_assoc := λ _ _ _, mul_assoc',
one_mul := λ _, one_mul',
mul_left_inv := λ _, mul_left_inv' }
-- But this is not how most of us learnt quotient groups. For us, quotient groups
-- are defined by creating a group structure on the set of coests of a normal
-- subgroup. We will show that the equivalence relation `lcoset_rel H` in which
-- `x ~ y ↔ x ⋆ H = y ⋆ H` is a group congruence if and only if H is normal.
/- The main proposition we will prove is that given a subgroup H of the group G,
the equivalence relation ~ : (g, k) ↦ g H = k H on G is a group congruence if
and only if H is normal. -/
open mygroup.subgroup lagrange function
variables {H : subgroup G} {N : normal G}
def lcoset_rel (H : subgroup G) := λ x y, x ⋆ H = y ⋆ H
local notation x ` ~ `:70 y := lcoset_rel H x y
local notation x ` ~[ `:70 H:70 ` ] `:0 y:0 := lcoset_rel H x y
lemma lcoset_rel_def (x y : G) : x ~ y ↔ x ⋆ H = y ⋆ H := iff.rfl
lemma lcoset_iseqv (H : subgroup G) : equivalence (lcoset_rel H) :=
begin
refine ⟨by tauto, λ _ _ hxy, hxy.symm, _⟩,
intros _ _ _ hxy hyz, unfold lcoset_rel at *, rw [hxy, hyz]
end
lemma lcoset_mul {x₀ x₁ y₀ y₁ : G}
(hx : x₀ ~[↑N] x₁) (hy : y₀ ~[↑N] y₁) : (x₀ * y₀) ~[↑N] (x₁ * y₁) :=
begin
rw [lcoset_rel_def, lcoset_eq] at *,
have := N.conj_mem _ hx y₁⁻¹,
rw group.inv_inv at this,
replace this := N.mul_mem' this hy,
rw [← group.mul_assoc, group.mul_assoc (y₁⁻¹ * (x₁⁻¹ * x₀)),
group.mul_right_inv, group.mul_one, ← group.mul_assoc] at this,
rwa [group.inv_mul, ← group.mul_assoc],
end
lemma lcoset_inv {x y : G} (hxy : x ~[↑N] y) : (x⁻¹ ~[↑N] y⁻¹) :=
begin
rw [lcoset_rel_def, lcoset_eq] at *,
rw ← group.inv_mul,
apply N.inv_mem',
convert N.conj_mem' _ hxy y,
simp [← group.mul_assoc]
end
/-- If `H` is normal, then `lcoset_rel H` is a group congruence -/
def con_of_normal (G : Type) [group G] (N : normal G) : group_con G :=
{ r := lcoset_rel N,
iseqv := lcoset_iseqv N,
mul' := λ x₀ x₁ y₀ y₁ hx hy, lcoset_mul hx hy,
inv' := λ x y hxy, lcoset_inv hxy }
lemma con_of_normal_def {G : Type} [group G] {N : normal G} (x y : G) :
con_of_normal G N x y ↔ x ~[↑N] y := iff.rfl
lemma con_one_of_mem : ∀ h ∈ H, h ~ 1 :=
begin
intros h hh,
rw [lcoset_rel_def, lcoset_eq], simpa
end
lemma mem_of_con_one {g : G} (hg : g ~ 1) : g ∈ H :=
by rwa [lcoset_rel_def, lcoset_eq, group.one_inv, group.one_mul] at hg
/-- If `lcoset_rel H` is a congruence then `H` is normal -/
def normal_of_con (H : subgroup G) {R : group_con G}
(hR : R.r = lcoset_rel H) : normal G :=
{ conj_mem' := λ n hn g, mem_of_con_one $
begin
rw [← hR, (show (1 : G) = g * 1 * g⁻¹, by simp)],
refine R.mul' (R.mul' (R.iseqv.1 _) _) (R.iseqv.1 _),
{ rw hR, exact con_one_of_mem _ hn }
end .. H }
lemma con_one_iff_mem (h : G) : h ~ 1 ↔ h ∈ H := ⟨mem_of_con_one, con_one_of_mem h⟩
-- So now, whenever we would like to work with "normal" quotient groups of
-- a group `G` over a normal group `N`, we write `quotient (con_of_normal N)`
notation G ` /ₘ `:70 N:70 := quotient (con_of_normal G N)
/-- For all elements `c : G /ₘ N`, there is some `g : G` such that `⟦g⟧ = c`-/
lemma exists_mk {N : normal G} (c : G /ₘ N) : ∃ g : G, (g : G /ₘ N) = c :=
@quotient.exists_rep G (con_of_normal G N) c
/-- `(⟦p⟧ : G /ₘ N) = ⟦q⟧` iff `p ⋆ N = q ⋆ N` where `p q : G` -/
lemma mk_eq {p q : G} : (p : G /ₘ N) = q ↔ p ⋆ N = q ⋆ N :=
⟨λ h, quotient.eq.1 h, λ h, quotient.eq.2 h⟩
lemma mk_eq' {p q : G} : (p : G /ₘ N) = q ↔ q⁻¹ * p ∈ N :=
begin
rw mk_eq,
exact lcoset_eq
end
lemma coe_pow (g : G) (n : ℤ) : (g : G /ₘ N) ^ n = ((g ^ n : G) : G /ₘ N) :=
begin
apply int.induction_on n,
{ refl },
{ intros _ hi,
rw [group.pow_add, group.pow_add, ← quotient.coe_mul, hi], congr },
{ intros _ hi,
rw [group.pow_sub, group.pow_sub, group.pow_neg_one_inv,
group.pow_neg_one_inv, hi, ← quotient.coe_mul, quotient.coe_inv] }
end
end quotient
end mygroup |
%% Transient diffusion equation
%% PDE and boundary conditions
% The transient diffusion equation reads
%
% $$\alpha\frac{\partial c}{\partial t}+\nabla.\left(-D\nabla c\right)=0,$$
%
% where $c$ is the independent variable (concentration, temperature, etc)
% , $D$ is the diffusion coefficient, and $\alpha$ is a constant.
% Written by Ali A. Eftekhari
% Last checked: June 2021
clc
%% Define the domain and create a mesh structure
L = 50; % domain length
Nx = 20; % number of cells
m = createMesh2D(Nx,Nx, L,L);
%% Create the boundary condition structure
BC = createBC(m); % all Neumann boundary condition structure
BC.left.a(:) = 0; BC.left.b(:)=1; BC.left.c(:)=0; % left boundary
BC.right.a(:) = 0; BC.right.b(:)=1; BC.right.c(:)=0; % right boundary
BC.top.a(:) = 0; BC.top.b(:)=1; BC.top.c(:)=0; % top boundary
BC.bottom.a(:) = 0; BC.bottom.b(:)=1; BC.bottom.c(:)=0; % bottom boundary
%% define the transfer coeffs
D_val = 1;
D = createCellVariable(m, D_val);
alfa = createCellVariable(m, 1);
%% define initial values
c_init = 1;
c_old = createCellVariable(m, c_init,BC); % initial values
c = c_old; % assign the old value of the cells to the current values
%% loop
dt = 1; % time step
final_t = 100;
for t=dt:dt:final_t
[M_trans, RHS_trans] = transientTerm(c_old, dt, alfa);
Dave = harmonicMean(D);
Mdiff = diffusionTerm(Dave);
[Mbc, RHSbc] = boundaryCondition(BC);
M = M_trans-Mdiff+Mbc;
RHS = RHS_trans+RHSbc;
c = solvePDE(m,M, RHS);
c_old = c;
figure(1);visualizeCells(c);drawnow;
end
|
(* Title: HOL/Auth/n_german_lemma_on_inv__29.thy
Author: Yongjian Li and Kaiqiang Duan, State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
Copyright 2016 State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
*)
header{*The n_german Protocol Case Study*}
theory n_german_lemma_on_inv__29 imports n_german_base
begin
section{*All lemmas on causal relation between inv__29 and some rule r*}
lemma n_RecvReqVsinv__29:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_RecvReq N i)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_RecvReq N i" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__29 p__Inv4" apply fastforce done
have "(i=p__Inv4)\<or>(i~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendInvEVsinv__29:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendInvE i)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendInvE i" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__29 p__Inv4" apply fastforce done
have "(i=p__Inv4)\<or>(i~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendInvSVsinv__29:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendInvS i)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendInvS i" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__29 p__Inv4" apply fastforce done
have "(i=p__Inv4)\<or>(i~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_RecvInvAckVsinv__29:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_RecvInvAck i)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_RecvInvAck i" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__29 p__Inv4" apply fastforce done
have "(i=p__Inv4)\<or>(i~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv4)"
have "?P3 s"
apply (cut_tac a1 a2 b1, simp, rule_tac x="(neg (andForm (eqn (IVar (Para (Ident ''InvSet'') p__Inv4)) (Const true)) (eqn (IVar (Field (Para (Ident ''Chan3'') p__Inv4) ''Cmd'')) (Const InvAck))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendGntSVsinv__29:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendGntS i)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendGntS i" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__29 p__Inv4" apply fastforce done
have "(i=p__Inv4)\<or>(i~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendGntEVsinv__29:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendGntE N i)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendGntE N i" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__29 p__Inv4" apply fastforce done
have "(i=p__Inv4)\<or>(i~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_StoreVsinv__29:
assumes a1: "\<exists> i d. i\<le>N\<and>d\<le>N\<and>r=n_Store i d" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_SendReqESVsinv__29:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_SendReqES i" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_RecvGntSVsinv__29:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_RecvGntS i" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_RecvGntEVsinv__29:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_RecvGntE i" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_SendInvAckVsinv__29:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_SendInvAck i" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_SendReqSVsinv__29:
assumes a1: "\<exists> j. j\<le>N\<and>r=n_SendReqS j" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_SendReqEIVsinv__29:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_SendReqEI i" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__29 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
end
|
(* Property from Case-Analysis for Rippling and Inductive Proof,
Moa Johansson, Lucas Dixon and Alan Bundy, ITP 2010.
This Isabelle theory is produced using the TIP tool offered at the following website:
https://github.com/tip-org/tools
This file was originally provided as part of TIP benchmark at the following website:
https://github.com/tip-org/benchmarks
Yutaka Nagashima at CIIRC, CTU changed the TIP output theory file slightly
to make it compatible with Isabelle2017.
Some proofs were added by Yutaka Nagashima.*)
theory TIP_prop_29
imports "../../Test_Base"
begin
datatype 'a list = nil2 | cons2 "'a" "'a list"
datatype Nat = Z | S "Nat"
fun x :: "Nat => Nat => bool" where
"x (Z) (Z) = True"
| "x (Z) (S z2) = False"
| "x (S x2) (Z) = False"
| "x (S x2) (S y2) = x x2 y2"
fun ins1 :: "Nat => Nat list => Nat list" where
"ins1 y (nil2) = cons2 y (nil2)"
| "ins1 y (cons2 z2 xs) =
(if x y z2 then cons2 z2 xs else cons2 z2 (ins1 y xs))"
fun elem :: "Nat => Nat list => bool" where
"elem y (nil2) = False"
| "elem y (cons2 z2 xs) = (if x y z2 then True else elem y xs)"
theorem property0 :
"elem y (ins1 y xs)"
(*"arbitrary:y" also works well.*)
apply(induct xs (*arbitrary: y*))
apply clarsimp
apply(induct_tac y)
by auto
end |
using Sialia
using Test
@testset "Sialia.jl" begin
include("eventdispatcher_tests.jl")
include("eventstore_tests.jl")
end
|
Notaries in Bridgeton, MO are listed below. Use the filters to search for specific criteria.
I am a former loan officer and title officer, with 12 years experience in the mortgage closing industry.
Im certified notary and have been for over 12 years. Have experience with legal & mortgage documents, etc. |
State Before: α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s ∪ t + s ∩ t = s + t State After: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s ∪ t + s ∩ t ≤ s + t
case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s + t ≤ s ∪ t + s ∩ t Tactic: apply _root_.le_antisymm State Before: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s ∪ t + s ∩ t ≤ s + t State After: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s + s ∩ t ∪ (t + s ∩ t) ≤ s + t Tactic: rw [union_add_distrib] State Before: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s + s ∩ t ∪ (t + s ∩ t) ≤ s + t State After: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ t + s ∩ t ≤ s + t Tactic: refine' union_le (add_le_add_left (inter_le_right _ _) _) _ State Before: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ t + s ∩ t ≤ s + t State After: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s ∩ t + t ≤ s + t Tactic: rw [add_comm] State Before: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s ∩ t + t ≤ s + t State After: no goals Tactic: exact add_le_add_right (inter_le_left _ _) _ State Before: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s + t ≤ s ∪ t + s ∩ t State After: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ t + s ≤ (s ∪ t + s) ∩ (s ∪ t + t) Tactic: rw [add_comm, add_inter_distrib] State Before: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ t + s ≤ (s ∪ t + s) ∩ (s ∪ t + t) State After: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ t + s ≤ s ∪ t + t Tactic: refine' le_inter (add_le_add_right (le_union_right _ _) _) _ State Before: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ t + s ≤ s ∪ t + t State After: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s + t ≤ s ∪ t + t Tactic: rw [add_comm] State Before: case a
α : Type u_1
β : Type ?u.190902
γ : Type ?u.190905
inst✝ : DecidableEq α
s✝ t✝ u : Multiset α
a b : α
s t : Multiset α
⊢ s + t ≤ s ∪ t + t State After: no goals Tactic: exact add_le_add_right (le_union_left _ _) _ |
function f=burger(u,deriv)
if nargin<2,
deriv=0;
end;
if deriv==0,
f=0.5*u.^2;
else
f=u;
end; |
(* comp := fun f g x => f (g x) と 定義する版 *)
(**
シングルトン
対象の型 unit
対象の例 tt(のみ)
射の型 Hom := ∀a b : unit, nat
射の型 Hom tt tt (= nat)
射の例 1, 2
集合の圏
対象の型 Set
対象の例 nat(のみ)
射の型 Hom := ∀A B : Set, A -> B
射の型 Hom nat nat (= nat -> nat)
射の例 plus 1, plus 2
半順序集合の圏
対象の型 nat
対象の例 3,4
射の型 Hom := ∀m n : nat, m ≦ n
射の型 Hom 3 4 (= 3 ≦ 4)
射の例 le3_4
しりとりの圏
対象の型 Hira
対象の例 こ,ぶ,た
射の型 (Inductive な定義)
射の例 こ ぶ た
型の圏
対象の型 Type
対象の例 nat
射の型 Hom := ∀A B : Type, A -> B
射の型 Hom nat nat (= nat -> nat)
射の例 plus 1, plus 2 (項)
論理式の圏
対象の型 Prop
対象の例 1=2,2=1
射の型 Hom := ∀A B : Prop, A -> B
射の型 Hom (1=2) (2=1)
射の例 1=2 -> 2=1 (演繹、証明図)
*)
Require Import Omega.
Require Import Coq.Logic.ProofIrrelevance.
(* Axiom proof_irrelevance : forall (P : Prop) (p1 p2 : P), p1 = p2. *)
Set Implicit Arguments.
Generalizable Variables Obj.
Class Category `(Hom : Obj -> Obj -> Set) : Type :=
{
Hom := Hom;
Obj := Obj;
id : forall {A : Obj}, Hom A A;
comp : forall {A B C : Obj}, Hom B C -> Hom A B -> Hom A C; (* *** *)
left_identity : forall {A B : Obj} {f : Hom A B}, comp id f = f;
right_identity : forall {A B : Obj} {f : Hom A B}, comp f id = f;
associativity : forall {A B C D : Obj} {f : Hom C D} {g : Hom B C} {h : Hom A B},
comp f (comp g h) = comp (comp f g) h
}.
Notation "A ~> B" := (Hom A B) (at level 51, left associativity).
Notation "f \o g" := (comp f g) (at level 51, left associativity).
(* *********** *)
(* シングルトン *)
(* *********** *)
Definition Hom0 (A B : unit) : Set := nat.
Program Instance SINGLETON : @Category unit Hom0 :=
{|
id _ := 0;
comp _ _ _ := fun m n => m + n
|}.
Obligation 3.
Proof.
now apply plus_assoc.
Qed.
(* 例 *)
Check Hom : unit -> unit -> Set.
Check comp 2 3 : Hom tt tt.
Compute comp 2 3.
Check 2 \o 3 : tt ~> tt.
(* ******** *)
(* 集合の圏 *)
(* ******** *)
Definition Hom1 (A B : Set) : Set := A -> B.
Program Instance SETS : @Category Set Hom1 :=
{|
id _ := fun x => x;
comp _ _ _ := fun f g x => f (g x) (* *** *)
|}.
(* 例 *)
Check Hom : Set -> Set -> Set.
Check comp (plus 2) (plus 3) : Hom nat nat.
Compute comp (plus 2) (plus 3).
Check (plus 2) \o (plus 3) : nat ~> nat.
(* ************* *)
(* 半順序集合の圏 *)
(* ************* *)
Definition Hom2 (m n : nat) : Set := m <= n.
Definition id2 (n : nat) : Hom2 n n.
Proof.
unfold Hom2. easy. (* omega でなく *)
Defined.
Definition comp2 {m n p} H2 H1 := le_trans m n p H1 H2.
Program Instance NAT : @Category nat Hom2 :=
{|
id := id2;
comp nat _ _ := comp2
|}.
Obligation 1.
Proof.
unfold Hom2 in *.
apply proof_irrelevance.
Qed.
Obligation 2.
Proof.
unfold Hom2 in *.
apply proof_irrelevance.
Qed.
Obligation 3.
Proof.
unfold Hom2 in *.
apply proof_irrelevance.
Qed.
(* 例 *)
Definition le34 : Hom 3 4. Proof. unfold Hom, Hom2. omega. Defined.
Definition le45 : Hom 4 5. Proof. unfold Hom, Hom2. omega. Defined.
Check comp le45 le34 : Hom 3 5.
Compute comp le45 le34.
Check le45 \o le34 : 3 ~> 5.
(* *********** *)
(* しりとりの圏 *)
(* *********** *)
(* http://yosh.hateblo.jp/entry/20090425/p1 *)
(* comp の引数の順番を関数とおなじにしている。 *)
Inductive Hira : Set := こ | ぶ | た | ぬ | き | つ | ね.
Inductive Hom3 : Hira -> Hira -> Set :=
| single : forall A, Hom3 A A
| cons : forall {A' B : Hira} (A : Hira) (tl : Hom3 A' B), Hom3 A B.
Definition comp3 {A B C : Hira} (f : Hom3 B C) (g : Hom3 A B) : Hom3 A C.
Proof.
intros.
induction g.
+ easy.
+ Check (cons A (IHg f)).
apply (cons A (IHg f)).
Defined.
Program Instance SIRI : @Category Hira Hom3 :=
{|
id := single;
comp _ _ _ := comp3
|}.
Obligation 1.
induction f.
- simpl.
reflexivity.
- simpl.
rewrite IHf.
reflexivity.
Qed.
Obligation 3.
Proof.
induction h.
+ easy.
+ simpl.
now rewrite IHh.
Qed.
(* 例 *)
Definition こぶた := cons こ (cons ぶ (single た)) : Hom こ た.
Definition たぬき := cons た (cons ぬ (single き)) : Hom た き.
Check comp たぬき こぶた : Hom こ き.
Compute comp たぬき こぶた. (* こ ぶ た ぬ き : Home こ き *)
Check たぬき \o こぶた : こ ~> き.
(* END *)
|
{-# OPTIONS --cubical --safe --postfix-projections #-}
open import Cubical.Core.Everything
open import Cubical.Foundations.Embedding
open import Cubical.Foundations.Equiv
open import Cubical.Foundations.Function
open import Cubical.Foundations.Isomorphism
open import Cubical.Foundations.Prelude
open import Cubical.Foundations.Univalence
-- A helper module for deriving univalence for a higher inductive-recursive
-- universe.
--
-- U is the type of codes
-- El is the decoding
-- uaf is a higher constructor that requires paths between codes to exist
-- for equivalences of decodings
-- comp is intended to be the computational behavior of El on uaf, although
-- it seems that being a path is sufficient.
-- ret is a higher constructor that fills out the equivalence structure
-- for uaf and the computational behavior of El.
--
-- Given a universe defined as above, it's possible to show that the path
-- space of the code type is equivalent to the path space of the actual
-- decodings, which are themselves determined by equivalences.
--
-- The levels are left independent, but of course it will generally be
-- impossible to define this sort of universe unless ℓ' < ℓ, because El will
-- be too big to go in a constructor of U. The exception would be if U could
-- be defined independently of El, though it might be tricky to get the right
-- higher structure in such a case.
module Cubical.Foundations.Univalence.Universe {ℓ ℓ'}
(U : Type ℓ)
(El : U → Type ℓ')
(uaf : ∀{s t} → El s ≃ El t → s ≡ t)
(comp : ∀{s t} (e : El s ≃ El t) → cong El (uaf e) ≡ ua e)
(ret : ∀{s t : U} → (p : s ≡ t) → uaf (lineToEquiv (λ i → El (p i))) ≡ p)
where
minivalence : ∀{s t} → (s ≡ t) ≃ (El s ≡ El t)
minivalence {s} {t} = isoToEquiv mini
where
open Iso
mini : Iso (s ≡ t) (El s ≡ El t)
mini .fun = cong El
mini .inv = uaf ∘ pathToEquiv
mini .rightInv p = comp (pathToEquiv p) ∙ uaη p
mini .leftInv = ret
isEmbeddingEl : isEmbedding El
isEmbeddingEl s t = snd minivalence
|
/-
Copyright (c) 2018 Johannes Hölzl. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Johannes Hölzl, Kenny Lau, Johan Commelin, Mario Carneiro, Kevin Buzzard
-/
import group_theory.submonoid.basic
import algebra.big_operators.basic
import deprecated.group
/-!
# Unbundled submonoids (deprecated)
> THIS FILE IS SYNCHRONIZED WITH MATHLIB4.
> Any changes to this file require a corresponding PR to mathlib4.
This file is deprecated, and is no longer imported by anything in mathlib other than other
deprecated files, and test files. You should not need to import it.
This file defines unbundled multiplicative and additive submonoids. Instead of using this file,
please use `submonoid G` and `add_submonoid A`, defined in `group_theory.submonoid.basic`.
## Main definitions
`is_add_submonoid (S : set M)` : the predicate that `S` is the underlying subset of an additive
submonoid of `M`. The bundled variant `add_submonoid M` should be used in preference to this.
`is_submonoid (S : set M)` : the predicate that `S` is the underlying subset of a submonoid
of `M`. The bundled variant `submonoid M` should be used in preference to this.
## Tags
submonoid, submonoids, is_submonoid
-/
open_locale big_operators
variables {M : Type*} [monoid M] {s : set M}
variables {A : Type*} [add_monoid A] {t : set A}
/-- `s` is an additive submonoid: a set containing 0 and closed under addition.
Note that this structure is deprecated, and the bundled variant `add_submonoid A` should be
preferred. -/
structure is_add_submonoid (s : set A) : Prop :=
(zero_mem : (0:A) ∈ s)
(add_mem {a b} : a ∈ s → b ∈ s → a + b ∈ s)
/-- `s` is a submonoid: a set containing 1 and closed under multiplication.
Note that this structure is deprecated, and the bundled variant `submonoid M` should be
preferred. -/
@[to_additive]
structure is_submonoid (s : set M) : Prop :=
(one_mem : (1:M) ∈ s)
(mul_mem {a b} : a ∈ s → b ∈ s → a * b ∈ s)
lemma additive.is_add_submonoid
{s : set M} : ∀ (is : is_submonoid s), @is_add_submonoid (additive M) _ s
| ⟨h₁, h₂⟩ := ⟨h₁, @h₂⟩
theorem additive.is_add_submonoid_iff
{s : set M} : @is_add_submonoid (additive M) _ s ↔ is_submonoid s :=
⟨λ ⟨h₁, h₂⟩, ⟨h₁, @h₂⟩, additive.is_add_submonoid⟩
lemma multiplicative.is_submonoid
{s : set A} : ∀ (is : is_add_submonoid s), @is_submonoid (multiplicative A) _ s
| ⟨h₁, h₂⟩ := ⟨h₁, @h₂⟩
theorem multiplicative.is_submonoid_iff
{s : set A} : @is_submonoid (multiplicative A) _ s ↔ is_add_submonoid s :=
⟨λ ⟨h₁, h₂⟩, ⟨h₁, @h₂⟩, multiplicative.is_submonoid⟩
/-- The intersection of two submonoids of a monoid `M` is a submonoid of `M`. -/
@[to_additive "The intersection of two `add_submonoid`s of an `add_monoid` `M` is
an `add_submonoid` of M."]
lemma is_submonoid.inter {s₁ s₂ : set M} (is₁ : is_submonoid s₁) (is₂ : is_submonoid s₂) :
is_submonoid (s₁ ∩ s₂) :=
{ one_mem := ⟨is₁.one_mem, is₂.one_mem⟩,
mul_mem := λ x y hx hy,
⟨is₁.mul_mem hx.1 hy.1, is₂.mul_mem hx.2 hy.2⟩ }
/-- The intersection of an indexed set of submonoids of a monoid `M` is a submonoid of `M`. -/
@[to_additive "The intersection of an indexed set of `add_submonoid`s of an `add_monoid` `M` is
an `add_submonoid` of `M`."]
lemma is_submonoid.Inter {ι : Sort*} {s : ι → set M} (h : ∀ y : ι, is_submonoid (s y)) :
is_submonoid (set.Inter s) :=
{ one_mem := set.mem_Inter.2 $ λ y, (h y).one_mem,
mul_mem := λ x₁ x₂ h₁ h₂, set.mem_Inter.2 $
λ y, (h y).mul_mem (set.mem_Inter.1 h₁ y) (set.mem_Inter.1 h₂ y) }
/-- The union of an indexed, directed, nonempty set of submonoids of a monoid `M` is a submonoid
of `M`. -/
@[to_additive "The union of an indexed, directed, nonempty set
of `add_submonoid`s of an `add_monoid` `M` is an `add_submonoid` of `M`. "]
lemma is_submonoid_Union_of_directed {ι : Type*} [hι : nonempty ι]
{s : ι → set M} (hs : ∀ i, is_submonoid (s i))
(directed : ∀ i j, ∃ k, s i ⊆ s k ∧ s j ⊆ s k) :
is_submonoid (⋃i, s i) :=
{ one_mem := let ⟨i⟩ := hι in set.mem_Union.2 ⟨i, (hs i).one_mem⟩,
mul_mem := λ a b ha hb,
let ⟨i, hi⟩ := set.mem_Union.1 ha in
let ⟨j, hj⟩ := set.mem_Union.1 hb in
let ⟨k, hk⟩ := directed i j in
set.mem_Union.2 ⟨k, (hs k).mul_mem (hk.1 hi) (hk.2 hj)⟩ }
section powers
/-- The set of natural number powers `1, x, x², ...` of an element `x` of a monoid. -/
@[to_additive multiples
"The set of natural number multiples `0, x, 2x, ...` of an element `x` of an `add_monoid`."]
def powers (x : M) : set M := {y | ∃ n:ℕ, x^n = y}
/-- 1 is in the set of natural number powers of an element of a monoid. -/
@[to_additive "0 is in the set of natural number multiples of an element of an `add_monoid`."]
lemma powers.one_mem {x : M} : (1 : M) ∈ powers x := ⟨0, pow_zero _⟩
/-- An element of a monoid is in the set of that element's natural number powers. -/
@[to_additive
"An element of an `add_monoid` is in the set of that element's natural number multiples."]
lemma powers.self_mem {x : M} : x ∈ powers x := ⟨1, pow_one _⟩
/-- The set of natural number powers of an element of a monoid is closed under multiplication. -/
@[to_additive
"The set of natural number multiples of an element of an `add_monoid` is closed under addition."]
lemma powers.mul_mem {x y z : M} : (y ∈ powers x) → (z ∈ powers x) → (y * z ∈ powers x) :=
λ ⟨n₁, h₁⟩ ⟨n₂, h₂⟩, ⟨n₁ + n₂, by simp only [pow_add, *]⟩
/-- The set of natural number powers of an element of a monoid `M` is a submonoid of `M`. -/
@[to_additive "The set of natural number multiples of an element of
an `add_monoid` `M` is an `add_submonoid` of `M`."]
lemma powers.is_submonoid (x : M) : is_submonoid (powers x) :=
{ one_mem := powers.one_mem,
mul_mem := λ y z, powers.mul_mem }
/-- A monoid is a submonoid of itself. -/
@[to_additive "An `add_monoid` is an `add_submonoid` of itself."]
lemma univ.is_submonoid : is_submonoid (@set.univ M) := by split; simp
/-- The preimage of a submonoid under a monoid hom is a submonoid of the domain. -/
@[to_additive "The preimage of an `add_submonoid` under an `add_monoid` hom is
an `add_submonoid` of the domain."]
lemma is_submonoid.preimage {N : Type*} [monoid N] {f : M → N} (hf : is_monoid_hom f)
{s : set N} (hs : is_submonoid s) : is_submonoid (f ⁻¹' s) :=
{ one_mem := show f 1 ∈ s, by rw is_monoid_hom.map_one hf; exact hs.one_mem,
mul_mem := λ a b (ha : f a ∈ s) (hb : f b ∈ s),
show f (a * b) ∈ s, by rw is_monoid_hom.map_mul hf; exact hs.mul_mem ha hb }
/-- The image of a submonoid under a monoid hom is a submonoid of the codomain. -/
@[to_additive "The image of an `add_submonoid` under an `add_monoid`
hom is an `add_submonoid` of the codomain."]
lemma is_submonoid.image {γ : Type*} [monoid γ] {f : M → γ} (hf : is_monoid_hom f)
{s : set M} (hs : is_submonoid s) : is_submonoid (f '' s) :=
{ one_mem := ⟨1, hs.one_mem, hf.map_one⟩,
mul_mem := λ a b ⟨x, hx⟩ ⟨y, hy⟩, ⟨x * y, hs.mul_mem hx.1 hy.1,
by rw [hf.map_mul, hx.2, hy.2]⟩ }
/-- The image of a monoid hom is a submonoid of the codomain. -/
@[to_additive "The image of an `add_monoid` hom is an `add_submonoid`
of the codomain."]
lemma range.is_submonoid {γ : Type*} [monoid γ] {f : M → γ} (hf : is_monoid_hom f) :
is_submonoid (set.range f) :=
by { rw ← set.image_univ, exact univ.is_submonoid.image hf }
/-- Submonoids are closed under natural powers. -/
@[to_additive is_add_submonoid.smul_mem
"An `add_submonoid` is closed under multiplication by naturals."]
lemma is_submonoid.pow_mem {a : M} (hs : is_submonoid s) (h : a ∈ s) : ∀ {n : ℕ}, a ^ n ∈ s
| 0 := by { rw pow_zero, exact hs.one_mem }
| (n + 1) := by { rw pow_succ, exact hs.mul_mem h is_submonoid.pow_mem }
/-- The set of natural number powers of an element of a submonoid is a subset of the submonoid. -/
@[to_additive is_add_submonoid.multiples_subset "The set of natural number multiples of an element
of an `add_submonoid` is a subset of the `add_submonoid`."]
lemma is_submonoid.power_subset {a : M} (hs : is_submonoid s) (h : a ∈ s) : powers a ⊆ s :=
assume x ⟨n, hx⟩, hx ▸ hs.pow_mem h
end powers
namespace is_submonoid
/-- The product of a list of elements of a submonoid is an element of the submonoid. -/
@[to_additive "The sum of a list of elements of an `add_submonoid` is an element of the
`add_submonoid`."]
lemma list_prod_mem (hs : is_submonoid s) : ∀{l : list M}, (∀x∈l, x ∈ s) → l.prod ∈ s
| [] h := hs.one_mem
| (a::l) h :=
suffices a * l.prod ∈ s, by simpa,
have a ∈ s ∧ (∀x∈l, x ∈ s), by simpa using h,
hs.mul_mem this.1 (list_prod_mem this.2)
/-- The product of a multiset of elements of a submonoid of a `comm_monoid` is an element of
the submonoid. -/
@[to_additive "The sum of a multiset of elements of an `add_submonoid` of an `add_comm_monoid`
is an element of the `add_submonoid`. "]
lemma multiset_prod_mem {M} [comm_monoid M] {s : set M} (hs : is_submonoid s) (m : multiset M) :
(∀a∈m, a ∈ s) → m.prod ∈ s :=
begin
refine quotient.induction_on m (assume l hl, _),
rw [multiset.quot_mk_to_coe, multiset.coe_prod],
exact list_prod_mem hs hl
end
/-- The product of elements of a submonoid of a `comm_monoid` indexed by a `finset` is an element
of the submonoid. -/
@[to_additive "The sum of elements of an `add_submonoid` of an `add_comm_monoid` indexed by
a `finset` is an element of the `add_submonoid`."]
lemma finset_prod_mem {M A} [comm_monoid M] {s : set M} (hs : is_submonoid s) (f : A → M) :
∀(t : finset A), (∀b∈t, f b ∈ s) → ∏ b in t, f b ∈ s
| ⟨m, hm⟩ _ := multiset_prod_mem hs _ (by simpa)
end is_submonoid
namespace add_monoid
/-- The inductively defined membership predicate for the submonoid generated by a subset of a
monoid. -/
inductive in_closure (s : set A) : A → Prop
| basic {a : A} : a ∈ s → in_closure a
| zero : in_closure 0
| add {a b : A} : in_closure a → in_closure b → in_closure (a + b)
end add_monoid
namespace monoid
/-- The inductively defined membership predicate for the `submonoid` generated by a subset of an
monoid. -/
@[to_additive]
inductive in_closure (s : set M) : M → Prop
| basic {a : M} : a ∈ s → in_closure a
| one : in_closure 1
| mul {a b : M} : in_closure a → in_closure b → in_closure (a * b)
/-- The inductively defined submonoid generated by a subset of a monoid. -/
@[to_additive "The inductively defined `add_submonoid` genrated by a subset of an `add_monoid`."]
def closure (s : set M) : set M := {a | in_closure s a }
@[to_additive]
lemma closure.is_submonoid (s : set M) : is_submonoid (closure s) :=
{ one_mem := in_closure.one, mul_mem := assume a b, in_closure.mul }
/-- A subset of a monoid is contained in the submonoid it generates. -/
@[to_additive "A subset of an `add_monoid` is contained in the `add_submonoid` it generates."]
theorem subset_closure {s : set M} : s ⊆ closure s :=
assume a, in_closure.basic
/-- The submonoid generated by a set is contained in any submonoid that contains the set. -/
@[to_additive "The `add_submonoid` generated by a set is contained in any `add_submonoid` that
contains the set."]
theorem closure_subset {s t : set M} (ht : is_submonoid t) (h : s ⊆ t) : closure s ⊆ t :=
assume a ha, by induction ha; simp [h _, *, is_submonoid.one_mem, is_submonoid.mul_mem]
/-- Given subsets `t` and `s` of a monoid `M`, if `s ⊆ t`, the submonoid of `M` generated by `s` is
contained in the submonoid generated by `t`. -/
@[to_additive "Given subsets `t` and `s` of an `add_monoid M`, if `s ⊆ t`, the `add_submonoid`
of `M` generated by `s` is contained in the `add_submonoid` generated by `t`."]
theorem closure_mono {s t : set M} (h : s ⊆ t) : closure s ⊆ closure t :=
closure_subset (closure.is_submonoid t) $ set.subset.trans h subset_closure
/-- The submonoid generated by an element of a monoid equals the set of natural number powers of
the element. -/
@[to_additive "The `add_submonoid` generated by an element of an `add_monoid` equals the set of
natural number multiples of the element."]
theorem closure_singleton {x : M} : closure ({x} : set M) = powers x :=
set.eq_of_subset_of_subset (closure_subset (powers.is_submonoid x) $ set.singleton_subset_iff.2 $
powers.self_mem) $ is_submonoid.power_subset (closure.is_submonoid _) $
set.singleton_subset_iff.1 $ subset_closure
/-- The image under a monoid hom of the submonoid generated by a set equals the submonoid generated
by the image of the set under the monoid hom. -/
@[to_additive "The image under an `add_monoid` hom of the `add_submonoid` generated by a set equals
the `add_submonoid` generated by the image of the set under the `add_monoid` hom."]
lemma image_closure {A : Type*} [monoid A] {f : M → A} (hf : is_monoid_hom f) (s : set M) :
f '' closure s = closure (f '' s) :=
le_antisymm
begin
rintros _ ⟨x, hx, rfl⟩,
apply in_closure.rec_on hx; intros,
{ solve_by_elim [subset_closure, set.mem_image_of_mem] },
{ rw [hf.map_one], apply is_submonoid.one_mem (closure.is_submonoid (f '' s))},
{ rw [hf.map_mul], solve_by_elim [(closure.is_submonoid _).mul_mem] }
end
(closure_subset (is_submonoid.image hf (closure.is_submonoid _)) $
set.image_subset _ subset_closure)
/-- Given an element `a` of the submonoid of a monoid `M` generated by a set `s`, there exists
a list of elements of `s` whose product is `a`. -/
@[to_additive "Given an element `a` of the `add_submonoid` of an `add_monoid M` generated by
a set `s`, there exists a list of elements of `s` whose sum is `a`."]
theorem exists_list_of_mem_closure {s : set M} {a : M} (h : a ∈ closure s) :
(∃l:list M, (∀x∈l, x ∈ s) ∧ l.prod = a) :=
begin
induction h,
case in_closure.basic : a ha { existsi ([a]), simp [ha] },
case in_closure.one { existsi ([]), simp },
case in_closure.mul : a b _ _ ha hb
{ rcases ha with ⟨la, ha, eqa⟩,
rcases hb with ⟨lb, hb, eqb⟩,
existsi (la ++ lb),
simp [eqa.symm, eqb.symm, or_imp_distrib],
exact assume a, ⟨ha a, hb a⟩ }
end
/-- Given sets `s, t` of a commutative monoid `M`, `x ∈ M` is in the submonoid of `M` generated by
`s ∪ t` iff there exists an element of the submonoid generated by `s` and an element of the
submonoid generated by `t` whose product is `x`. -/
@[to_additive "Given sets `s, t` of a commutative `add_monoid M`, `x ∈ M` is in the `add_submonoid`
of `M` generated by `s ∪ t` iff there exists an element of the `add_submonoid` generated by `s`
and an element of the `add_submonoid` generated by `t` whose sum is `x`."]
theorem mem_closure_union_iff {M : Type*} [comm_monoid M] {s t : set M} {x : M} :
x ∈ closure (s ∪ t) ↔ ∃ y ∈ closure s, ∃ z ∈ closure t, y * z = x :=
⟨λ hx, let ⟨L, HL1, HL2⟩ := exists_list_of_mem_closure hx in HL2 ▸
list.rec_on L (λ _, ⟨1, (closure.is_submonoid _).one_mem, 1,
(closure.is_submonoid _).one_mem, mul_one _⟩)
(λ hd tl ih HL1, let ⟨y, hy, z, hz, hyzx⟩ := ih (list.forall_mem_of_forall_mem_cons HL1) in
or.cases_on (HL1 hd $ list.mem_cons_self _ _)
(λ hs, ⟨hd * y, (closure.is_submonoid _).mul_mem (subset_closure hs) hy, z, hz,
by rw [mul_assoc, list.prod_cons, ← hyzx]; refl⟩)
(λ ht, ⟨y, hy, z * hd, (closure.is_submonoid _).mul_mem hz (subset_closure ht),
by rw [← mul_assoc, list.prod_cons, ← hyzx, mul_comm hd]; refl⟩)) HL1,
λ ⟨y, hy, z, hz, hyzx⟩, hyzx ▸ (closure.is_submonoid _).mul_mem
(closure_mono (set.subset_union_left _ _) hy)
(closure_mono (set.subset_union_right _ _) hz)⟩
end monoid
/-- Create a bundled submonoid from a set `s` and `[is_submonoid s]`. -/
@[to_additive "Create a bundled additive submonoid from a set `s` and `[is_add_submonoid s]`."]
def submonoid.of {s : set M} (h : is_submonoid s) : submonoid M := ⟨s, λ _ _, h.2, h.1⟩
@[to_additive]
lemma submonoid.is_submonoid (S : submonoid M) : is_submonoid (S : set M) := ⟨S.3, λ _ _, S.2⟩
|
[STATEMENT]
lemma (in linorder_topology) eventually_at_split:
"eventually P (at x) \<longleftrightarrow> eventually P (at_left x) \<and> eventually P (at_right x)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. eventually P (at x) = (eventually P (at_left x) \<and> eventually P (at_right x))
[PROOF STEP]
by (subst at_eq_sup_left_right) (simp add: eventually_sup) |
Formal statement is: lemma less_degree_imp: "n < degree p \<Longrightarrow> \<exists>i>n. coeff p i \<noteq> 0" Informal statement is: If $n$ is less than the degree of a polynomial $p$, then there exists an index $i > n$ such that the coefficient of $x^i$ in $p$ is nonzero. |
module Oscar.Function where
open import Function public using (id; _∘_; _∘′_; flip; _on_; _$_) renaming (const to const_)
infix -1 _∋_
_∋_ : ∀ {a} (A : Set a) → A → A
_ ∋ x = x
open import Prelude.Function public using (it)
|
(*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*)
Require Import Bool.
Require Import String.
Require Import List.
Require Import Eqdep_dec.
Require Import RelationClasses.
Require Import Utils.
Require Import ForeignType.
Require Import RType.
Require Import BrandRelation.
Section RSubtype.
Context {ftype:foreign_type}.
Context {br:brand_relation}.
Inductive subtype : rtype -> rtype -> Prop :=
| STop r : subtype r ⊤
| SBottom r : subtype ⊥ r
| SRefl r : subtype r r
| SColl r1 r2 : subtype r1 r2 -> subtype (Coll r1) (Coll r2)
(** Allow width subtyping of open records and depth subtyping of both types of records. Also, a closed record can be a subtype of an open record (but not vice versa) *)
| SRec k1 k2 rl1 rl2 pf1 pf2 : (forall s r',
lookup string_dec rl2 s = Some r' ->
exists r, lookup string_dec rl1 s = Some r /\
subtype r r') ->
(k2 = Closed -> k1 = Closed /\
(forall s, In s (domain rl1) -> In s (domain rl2))) ->
subtype (Rec k1 rl1 pf1) (Rec k2 rl2 pf2)
| SEither l1 l2 r1 r2 :
subtype l1 l2 ->
subtype r1 r2 ->
subtype (Either l1 r1) (Either l2 r2)
| SArrow in1 in2 out1 out2:
subtype in2 in1 ->
subtype out1 out2 ->
subtype (Arrow in1 out1) (Arrow in2 out2)
| SBrand b1 b2 : sub_brands brand_relation_brands b1 b2 -> subtype (Brand b1) (Brand b2)
| SForeign ft1 ft2 :
foreign_type_sub ft1 ft2 ->
subtype (Foreign ft1) (Foreign ft2)
.
Lemma SRec_open k1 rl1 rl2 pf1 pf2 :
(forall s r',
lookup string_dec rl2 s = Some r' ->
exists r, lookup string_dec rl1 s = Some r /\
subtype r r') ->
subtype (Rec k1 rl1 pf1) (Rec Open rl2 pf2).
Proof.
intros; constructor; intuition; discriminate.
Qed.
Lemma SRec_closed_in_domain {k1 k2 rl1 rl2 pf1 pf2} :
subtype (Rec k1 rl1 pf1) (Rec k2 rl2 pf2) ->
(forall x, In x (domain rl2) -> In x (domain rl1)).
Proof.
inversion 1; rtype_equalizer; subst; intuition.
destruct (in_dom_lookup string_dec H0).
destruct (H3 _ _ H1) as [? [inn ?]].
apply lookup_in in inn.
apply in_dom in inn.
trivial.
Qed.
Lemma SRec_closed_equiv_domain {k1 rl1 rl2 pf1 pf2} :
subtype (Rec k1 rl1 pf1) (Rec Closed rl2 pf2) ->
(forall x, In x (domain rl1) <-> In x (domain rl2)).
Proof.
inversion 1; rtype_equalizer; subst; intuition.
eapply SRec_closed_in_domain; eauto.
Qed.
Lemma UIP_refl_dec
{A:Type}
(dec:forall x y:A, {x = y} + {x <> y})
{x:A}
(p1:x = x) : p1 = eq_refl x.
Proof.
intros. apply (UIP_dec); auto.
Qed.
(** This follows trivially from the consistency of join and subtype.
However, this version should have better computational properties.*)
Hint Constructors subtype : qcert.
Lemma subtype_both_dec x y :
(prod ({subtype x y} + {~ subtype x y}) ({subtype y x} + {~ subtype y x})).
Proof.
Ltac simp := match goal with
| [H:(@eq bool ?x ?x) |- _ ] => generalize (UIP_refl_dec bool_dec H); intro; subst H
end.
destruct x.
revert y; induction x
; intros y; destruct y as [y pfy]; destruct y; constructor;
try solve[right; inversion 1 | left; simpl in *; repeat simp; eauto with qcert ].
- destruct (IHx e (exist _ y pfy)) as [[?|?]_].
+ left. repeat rewrite (Coll_canon).
auto with qcert.
+ right. intro ss; apply n. inversion ss; subst.
* erewrite (rtype_ext); eauto with qcert.
* destruct r1; destruct r2; simpl in *.
erewrite (rtype_ext e); erewrite (rtype_ext pfy); eauto.
- destruct (IHx e (exist _ y pfy)) as [_[?|?]].
+ left. repeat rewrite (Coll_canon).
auto with qcert.
+ right. intro ss; apply n. inversion ss; subst.
* erewrite (rtype_ext); eauto with qcert.
* destruct r1; destruct r2; simpl in *.
erewrite (rtype_ext e); erewrite (rtype_ext pfy); eauto.
- rename srl into srl0; rename r into srl.
assert (sub:{forall s r' pf',
lookup string_dec srl0 s = Some r' ->
exists r pf, lookup string_dec srl s = Some r /\
subtype (exist _ r pf) (exist _ r' pf')} + {~ (forall s r' pf',
lookup string_dec srl0 s = Some r' ->
exists r pf, lookup string_dec srl s = Some r /\
subtype (exist _ r pf) (exist _ r' pf'))}).
+ induction srl0; simpl; [left; intros; discriminate | ].
destruct a.
case_eq (lookup string_dec srl s).
* intros ? inn.
assert (wfr:wf_rtype₀ r = true)
by (eapply (wf_rtype₀_Rec_In pfy); simpl; left; reflexivity).
destruct (Forallt_In H _ (lookup_in string_dec _ inn) (wf_rtype₀_Rec_In e _ _ ((lookup_in string_dec _ inn))) (exist _ r wfr)) as [[?|?]_].
destruct (IHsrl0 (wf_rtype₀_cons_tail pfy)).
left. intros ? ? ? eqq. match_destr_in eqq; subst; eauto 2.
inversion eqq; subst.
rewrite (rtype_ext pf' wfr); eauto.
right; intro nin. apply n; intros ss rr rrpf sin.
specialize (nin ss rr).
match_destr_in nin; [| intuition ].
subst.
apply wf_rtype₀_cons_nin in pfy.
congruence.
right; intro nin. specialize (nin s r).
match_destr_in nin; [| intuition ].
destruct (nin wfr (eq_refl _)) as [? [?[??]]]; simpl in *.
rewrite inn in H0. inversion H0; subst.
apply n.
rewrite (rtype_ext _ x0). trivial.
* right; intro nin. specialize (nin s r).
match_destr_in nin; [| intuition ].
assert (wfr:wf_rtype₀ r = true)
by (eapply (wf_rtype₀_Rec_In pfy); simpl; left; reflexivity).
destruct (nin wfr (eq_refl _)) as [? [?[??]]].
congruence.
+ destruct sub.
* destruct k0.
left.
destruct (from_Rec₀ srl e) as [? [?[??]]]; subst.
rewrite <- H1.
destruct (from_Rec₀ _ pfy) as [? [?[??]]]; subst.
rewrite <- H2.
econstructor; try discriminate.
intros ? ? lo. destruct r'.
apply lookup_map_some' in lo.
destruct (e0 _ _ e1 lo) as [?[?[??]]].
rewrite <- (lookup_map_some' _ _ _ x5) in H0.
exists (exist _ x4 x5). intuition.
destruct k.
right; inversion 1; intuition; discriminate.
destruct (incl_list_dec string_dec (domain srl) (domain srl0)).
left.
destruct (from_Rec₀ srl e) as [? [?[??]]]; subst.
rewrite <- H1.
destruct (from_Rec₀ _ pfy) as [? [?[??]]]; subst.
rewrite <- H2.
constructor; intros.
destruct r'.
rewrite (lookup_map_some' _ _ _ e1) in H0.
destruct (e0 _ _ e1 H0) as [?[?[??]]].
rewrite <- (lookup_map_some' _ _ _ x5) in H3.
exists (exist _ x4 x5). intuition.
intuition.
specialize (i s).
unfold domain in i; repeat rewrite map_map in i.
simpl in i.
auto.
right; inversion 1; rtype_equalizer; subst; eauto 2.
intuition. apply n.
intros ? .
unfold domain; repeat rewrite map_map.
simpl.
auto.
* right; inversion 1; apply n; rtype_equalizer; subst; eauto with qcert.
intros.
rewrite <- (lookup_map_some' _ _ _ pf') in H1.
destruct (H4 _ _ H1) as [? [??]].
destruct x.
exists x; exists e0.
rewrite <- (lookup_map_some' _ _ _ e0).
intuition.
- rename srl into srl0; rename r into srl.
assert (sub:{forall s r' pf',
lookup string_dec srl s = Some r' ->
exists r pf, lookup string_dec srl0 s = Some r /\
subtype (exist _ r pf) (exist _ r' pf')} + {~ (forall s r' pf',
lookup string_dec srl s = Some r' ->
exists r pf, lookup string_dec srl0 s = Some r /\
subtype (exist _ r pf) (exist _ r' pf'))}).
+ induction srl; simpl; [left; intros; discriminate | ].
destruct a.
case_eq (lookup string_dec srl0 s).
* intros ? inn.
assert (wfr0:wf_rtype₀ r0 = true)
by (eapply (wf_rtype₀_Rec_In pfy); eapply lookup_in; eauto).
assert (wfr:wf_rtype₀ r = true)
by (eapply (wf_rtype₀_Rec_In e); simpl; left; reflexivity).
invcs H.
simpl in H2.
destruct (H2 wfr (exist _ r0 wfr0)) as [_ issub].
{ destruct issub.
- destruct (IHsrl H3 (wf_rtype₀_cons_tail e)).
+ left; intros ? ? ? eqq.
match_destr_in eqq; subst; eauto 2.
inversion eqq; subst.
rewrite (rtype_ext pf' wfr); eauto.
+ right; intro nin. apply n; intros ss rr rrpf sin.
specialize (nin ss rr).
match_destr_in nin; [| intuition ].
subst.
apply wf_rtype₀_cons_nin in e.
congruence.
- right; intro nin. specialize (nin s r).
match_destr_in nin; [| intuition ].
destruct (nin wfr (eq_refl _)) as [? [?[??]]]; simpl in *.
rewrite inn in H. inversion H; subst.
apply n.
rewrite (rtype_ext _ x0). trivial.
}
* right; intro nin. specialize (nin s r).
match_destr_in nin; [| intuition ].
assert (wfr:wf_rtype₀ r = true)
by (eapply (wf_rtype₀_Rec_In e); simpl; left; reflexivity).
destruct (nin wfr (eq_refl _)) as [? [?[??]]].
congruence.
+ destruct sub.
* {destruct k.
- left.
destruct (from_Rec₀ srl e) as [? [?[??]]]; subst.
rewrite <- H1.
destruct (from_Rec₀ _ pfy) as [? [?[??]]]; subst.
rewrite <- H2.
econstructor; try discriminate.
intros ? ? lo. destruct r'.
apply lookup_map_some' in lo.
destruct (e0 _ _ e1 lo) as [?[?[??]]].
rewrite <- (lookup_map_some' _ _ _ x5) in H0.
exists (exist _ x4 x5). intuition.
- destruct k0.
+ right.
inversion 1; subst.
intuition; discriminate.
+ destruct (incl_list_dec string_dec (domain srl0) (domain srl)).
* left.
destruct (from_Rec₀ srl e) as [? [?[??]]]; subst.
rewrite <- H1.
destruct (from_Rec₀ _ pfy) as [? [?[??]]]; subst.
rewrite <- H2.
{ constructor; intros.
- destruct r'.
rewrite (lookup_map_some' _ _ _ e1) in H0.
destruct (e0 _ _ e1 H0) as [?[?[??]]].
rewrite <- (lookup_map_some' _ _ _ x5) in H3.
exists (exist _ x4 x5). intuition.
- intuition.
specialize (i s).
unfold domain in i; repeat rewrite map_map in i.
simpl in i.
auto.
}
* right; inversion 1; rtype_equalizer; subst; eauto 2.
intuition. apply n.
intros ? .
unfold domain; repeat rewrite map_map.
simpl.
auto.
}
* right; inversion 1; apply n; rtype_equalizer; subst; eauto with qcert.
intros.
rewrite <- (lookup_map_some' _ _ _ pf') in H1.
destruct (H4 _ _ H1) as [? [??]].
destruct x.
exists x; exists e0.
rewrite <- (lookup_map_some' _ _ _ e0).
intuition.
- destruct (Either₀_wf_inv e) as [pfl1 pfr1].
destruct (Either₀_wf_inv pfy) as [pfl2 pfr2].
destruct (IHx1 pfl1 (exist _ _ pfl2)) as [[?|?]_].
+ destruct (IHx2 pfr1 (exist _ _ pfr2)) as [[?|?]_].
* left.
rewrite (Either_canon _ _ _ pfl1 pfr1).
rewrite (Either_canon _ _ _ pfl2 pfr2).
eauto with qcert.
* right; inversion 1; subst.
apply n. rewrite (rtype_ext pfr1 pfr2). eauto with qcert.
rewrite (Either_canon _ _ _ pfl1 pfr1) in H.
rewrite (Either_canon _ _ _ pfl2 pfr2) in H.
apply n.
inversion H; rtype_equalizer; subst.
rewrite (rtype_ext pfr1 pfr2). eauto with qcert.
subst.
rewrite (rtype_ext pfr1 (proj2_sig r1)).
rewrite (rtype_ext pfr2 (proj2_sig r2)).
destruct r1; destruct r2. simpl in *.
trivial.
+ right; inversion 1; subst.
apply n. rewrite (rtype_ext pfl1 pfl2). eauto with qcert.
rewrite (Either_canon _ _ _ pfl1 pfr1) in H.
rewrite (Either_canon _ _ _ pfl2 pfr2) in H.
apply n.
inversion H. rtype_equalizer.
subst. rewrite (rtype_ext pfl1 pfl2). eauto with qcert.
subst.
rewrite (rtype_ext pfl1 (proj2_sig l1)).
rewrite (rtype_ext pfl2 (proj2_sig l2)).
destruct l1; destruct l2. simpl in *.
trivial.
- destruct (Either₀_wf_inv e) as [pfl1 pfr1].
destruct (Either₀_wf_inv pfy) as [pfl2 pfr2].
destruct (IHx1 pfl1 (exist _ _ pfl2)) as [_[?|?]].
+ destruct (IHx2 pfr1 (exist _ _ pfr2)) as [_[?|?]].
* left.
rewrite (Either_canon _ _ _ pfl1 pfr1).
rewrite (Either_canon _ _ _ pfl2 pfr2).
eauto with qcert.
* right; inversion 1; subst.
apply n. rewrite (rtype_ext pfr1 pfr2). eauto with qcert.
rewrite (Either_canon _ _ _ pfl1 pfr1) in H.
rewrite (Either_canon _ _ _ pfl2 pfr2) in H.
apply n.
{ inversion H; rtype_equalizer; subst.
- rewrite (rtype_ext pfr1 pfr2). eauto with qcert.
- subst.
rewrite (rtype_ext pfr2 (proj2_sig r1)).
rewrite (rtype_ext pfr1 (proj2_sig r2)).
destruct r1; destruct r2. simpl in *.
trivial.
}
+ right; inversion 1; subst.
apply n. rewrite (rtype_ext pfl1 pfl2). eauto with qcert.
rewrite (Either_canon _ _ _ pfl1 pfr1) in H.
rewrite (Either_canon _ _ _ pfl2 pfr2) in H.
apply n.
inversion H. rtype_equalizer.
subst. rewrite (rtype_ext pfl1 pfl2). eauto with qcert.
subst.
rewrite (rtype_ext pfl2 (proj2_sig l1)).
rewrite (rtype_ext pfl1 (proj2_sig l2)).
destruct l1; destruct l2. simpl in *.
trivial.
- destruct (Arrow₀_wf_inv e) as [pfl1 pfr1].
destruct (Arrow₀_wf_inv pfy) as [pfl2 pfr2].
destruct (IHx1 pfl1 (exist _ _ pfl2)) as [?[?|?]].
+ destruct (IHx2 pfr1 (exist _ _ pfr2)) as [[?|?]?].
* left.
rewrite (Arrow_canon _ _ _ pfl1 pfr1).
rewrite (Arrow_canon _ _ _ pfl2 pfr2).
econstructor; eauto.
* right; inversion 1; subst.
apply n. rewrite (rtype_ext pfr1 pfr2). eauto with qcert.
rewrite (Arrow_canon _ _ _ pfl1 pfr1) in H.
rewrite (Arrow_canon _ _ _ pfl2 pfr2) in H.
apply n.
inversion H; rtype_equalizer; subst.
rewrite (rtype_ext pfr1 pfr2). eauto with qcert.
subst.
rewrite (rtype_ext pfr1 (proj2_sig out1)).
rewrite (rtype_ext pfr2 (proj2_sig out2)).
destruct out1; destruct out2. simpl in *.
trivial.
+ right; inversion 1; subst.
apply n. rewrite (rtype_ext pfl1 pfl2). eauto with qcert.
rewrite (Arrow_canon _ _ _ pfl1 pfr1) in H.
rewrite (Arrow_canon _ _ _ pfl2 pfr2) in H.
apply n.
inversion H. rtype_equalizer.
subst. rewrite (rtype_ext pfl1 pfl2). eauto with qcert.
rtype_equalizer.
subst.
rewrite (rtype_ext pfl1 (proj2_sig in1)).
rewrite (rtype_ext pfl2 (proj2_sig in2)).
destruct in1; destruct in2. simpl in *.
trivial.
- destruct (Arrow₀_wf_inv e) as [pfl1 pfr1].
destruct (Arrow₀_wf_inv pfy) as [pfl2 pfr2].
destruct (IHx1 pfl1 (exist _ _ pfl2)) as [[?|?]?].
+ destruct (IHx2 pfr1 (exist _ _ pfr2)) as [?[?|?]].
* left.
rewrite (Arrow_canon _ _ _ pfl1 pfr1).
rewrite (Arrow_canon _ _ _ pfl2 pfr2).
econstructor; eauto.
* right; inversion 1; subst.
apply n. rewrite (rtype_ext pfr1 pfr2). eauto with qcert.
rewrite (Arrow_canon _ _ _ pfl1 pfr1) in H.
rewrite (Arrow_canon _ _ _ pfl2 pfr2) in H.
apply n.
inversion H; rtype_equalizer; subst.
rewrite (rtype_ext pfr1 pfr2). eauto with qcert.
subst.
rewrite (rtype_ext pfr1 (proj2_sig out2)).
rewrite (rtype_ext pfr2 (proj2_sig out1)).
destruct out1; destruct out2. simpl in *.
trivial.
+ right; inversion 1; subst.
apply n. rewrite (rtype_ext pfl1 pfl2). eauto with qcert.
rewrite (Arrow_canon _ _ _ pfl1 pfr1) in H.
rewrite (Arrow_canon _ _ _ pfl2 pfr2) in H.
apply n.
inversion H. rtype_equalizer.
subst. rewrite (rtype_ext pfl1 pfl2). eauto with qcert.
rtype_equalizer.
subst.
rewrite (rtype_ext pfl1 (proj2_sig in2)).
rewrite (rtype_ext pfl2 (proj2_sig in1)).
destruct in1; destruct in2. simpl in *.
trivial.
- destruct (sub_brands_dec brand_relation_brands b b0).
+ left; repeat rewrite Brand_canon; eauto with qcert.
+ right. inversion 1; subst; eauto 2.
* intuition.
* apply n.
repeat rewrite (canon_brands_equiv).
trivial.
- destruct (sub_brands_dec brand_relation_brands b0 b).
+ left; repeat rewrite Brand_canon; eauto with qcert.
+ right. inversion 1; subst; eauto 2.
* intuition.
* apply n.
repeat rewrite (canon_brands_equiv).
trivial.
- destruct (foreign_type_sub_dec ft ft0).
+ left. repeat rewrite Foreign_canon.
apply SForeign; trivial.
+ right; intros sub.
invcs sub.
* apply n; reflexivity.
* intuition.
- destruct (foreign_type_sub_dec ft0 ft).
+ left. repeat rewrite Foreign_canon.
apply SForeign; trivial.
+ right; intros sub.
invcs sub.
* apply n; reflexivity.
* intuition.
Defined.
Theorem subtype_dec x y : {subtype x y} + {~ subtype x y}.
Proof.
destruct (subtype_both_dec x y) as [? _].
trivial.
Defined.
End RSubtype.
Section Misc.
Context {ftype:foreign_type}.
Context {br:brand_relation}.
Lemma subtype_ext {a b pfa pfb} :
subtype (exist _ a pfa) (exist _ b pfb) ->
forall pfa' pfb',
subtype (exist _ a pfa') (exist _ b pfb').
Proof.
intros.
rewrite (rtype_ext pfa' pfa).
rewrite (rtype_ext pfb' pfb).
trivial.
Qed.
Lemma subtype_Either_inv {τl τr τl' τr'} :
subtype (Either τl τr) (Either τl' τr') ->
subtype τl τl' /\
subtype τr τr'.
Proof.
inversion 1; rtype_equalizer; subst.
- subst; split; econstructor.
- subst. intuition.
Qed.
Lemma subtype_Arrow_inv {τl τr τl' τr'} :
subtype (Arrow τl τr) (Arrow τl' τr') ->
subtype τl' τl /\
subtype τr τr'.
Proof.
inversion 1; rtype_equalizer; subst.
- subst; split; econstructor.
- subst. intuition.
Qed.
Definition check_subtype_pairs (l:list (rtype*rtype)) : bool
:= forallb (fun τs => if subtype_dec (fst τs) (snd τs) then true else false) l.
Definition enforce_unary_op_schema (ts1:rtype*rtype) (tr:rtype)
: option (rtype*rtype)
:= if check_subtype_pairs (ts1::nil)
then Some (tr, (snd ts1))
else None.
Definition enforce_binary_op_schema (ts1:rtype*rtype) (ts2:rtype*rtype) (tr:rtype)
: option (rtype*rtype*rtype)
:= if check_subtype_pairs (ts1::ts2::nil)
then Some (tr, (snd ts1), (snd ts2))
else None.
End Misc.
Notation "r1 <: r2" := (subtype r1 r2) (at level 70).
|
import Data.Vect
%default total
-- variable
-- function1 incr
-- function2 add
-- adt sum type
-- function adt show
-- Nat
-- mylist
-- variable xs vect
-- vtake
-- vtake to string
-- vappend
|
[STATEMENT]
lemma (in Module) unique_expression7_1:"\<lbrakk>free_generator R M H;
f \<in> {j. j \<le> (n::nat)} \<rightarrow> H; inj_on f {j. j \<le> n};
s \<in> {j. j \<le> n} \<rightarrow> carrier R;
g \<in> {j. j \<le> (m::nat)} \<rightarrow> H; inj_on g {j. j \<le> m};
t \<in> {j. j \<le> m} \<rightarrow> carrier R;
l_comb R M n s f = l_comb R M m t g;
\<forall>j \<in> {j. j \<le> n}. s j \<noteq> \<zero>\<^bsub>R\<^esub>; \<forall>k\<in>{j. j \<le> m}. t k \<noteq> \<zero>\<^bsub>R\<^esub>\<rbrakk> \<Longrightarrow> n = m"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>free_generator R M H; f \<in> {j. j \<le> n} \<rightarrow> H; inj_on f {j. j \<le> n}; s \<in> {j. j \<le> n} \<rightarrow> carrier R; g \<in> {j. j \<le> m} \<rightarrow> H; inj_on g {j. j \<le> m}; t \<in> {j. j \<le> m} \<rightarrow> carrier R; l_comb R M n s f = l_comb R M m t g; \<forall>j\<in>{j. j \<le> n}. s j \<noteq> \<zero>\<^bsub>R\<^esub>; \<forall>k\<in>{j. j \<le> m}. t k \<noteq> \<zero>\<^bsub>R\<^esub>\<rbrakk> \<Longrightarrow> n = m
[PROOF STEP]
apply (frule_tac A = "{j. j \<le> n}" and f = f in card_image,
frule_tac A = "{j. j \<le> m}" and f = g in card_image)
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>free_generator R M H; f \<in> {j. j \<le> n} \<rightarrow> H; inj_on f {j. j \<le> n}; s \<in> {j. j \<le> n} \<rightarrow> carrier R; g \<in> {j. j \<le> m} \<rightarrow> H; inj_on g {j. j \<le> m}; t \<in> {j. j \<le> m} \<rightarrow> carrier R; l_comb R M n s f = l_comb R M m t g; \<forall>j\<in>{j. j \<le> n}. s j \<noteq> \<zero>\<^bsub>R\<^esub>; \<forall>k\<in>{j. j \<le> m}. t k \<noteq> \<zero>\<^bsub>R\<^esub>; card (f ` {j. j \<le> n}) = card {j. j \<le> n}; card (g ` {j. j \<le> m}) = card {j. j \<le> m}\<rbrakk> \<Longrightarrow> n = m
[PROOF STEP]
apply (frule_tac H = H and f = f and n = n and s = s and g = g and t = t and
m = m in unique_expression6, assumption+)
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>free_generator R M H; f \<in> {j. j \<le> n} \<rightarrow> H; inj_on f {j. j \<le> n}; s \<in> {j. j \<le> n} \<rightarrow> carrier R; g \<in> {j. j \<le> m} \<rightarrow> H; inj_on g {j. j \<le> m}; t \<in> {j. j \<le> m} \<rightarrow> carrier R; l_comb R M n s f = l_comb R M m t g; \<forall>j\<in>{j. j \<le> n}. s j \<noteq> \<zero>\<^bsub>R\<^esub>; \<forall>k\<in>{j. j \<le> m}. t k \<noteq> \<zero>\<^bsub>R\<^esub>; card (f ` {j. j \<le> n}) = card {j. j \<le> n}; card (g ` {j. j \<le> m}) = card {j. j \<le> m}; f ` {j. j \<le> n} = g ` {j. j \<le> m}\<rbrakk> \<Longrightarrow> n = m
[PROOF STEP]
apply (rotate_tac -3, frule sym,
thin_tac "card (f ` {j. j \<le> n}) = card ({j. j \<le> n})")
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>card (g ` {j. j \<le> m}) = card {j. j \<le> m}; f ` {j. j \<le> n} = g ` {j. j \<le> m}; free_generator R M H; f \<in> {j. j \<le> n} \<rightarrow> H; inj_on f {j. j \<le> n}; s \<in> {j. j \<le> n} \<rightarrow> carrier R; g \<in> {j. j \<le> m} \<rightarrow> H; inj_on g {j. j \<le> m}; t \<in> {j. j \<le> m} \<rightarrow> carrier R; l_comb R M n s f = l_comb R M m t g; \<forall>j\<in>{j. j \<le> n}. s j \<noteq> \<zero>\<^bsub>R\<^esub>; \<forall>k\<in>{j. j \<le> m}. t k \<noteq> \<zero>\<^bsub>R\<^esub>; card {j. j \<le> n} = card (f ` {j. j \<le> n})\<rbrakk> \<Longrightarrow> n = m
[PROOF STEP]
apply simp
[PROOF STATE]
proof (prove)
goal:
No subgoals!
[PROOF STEP]
done |
module STLC.Type.Relation where
open import Data.Nat using (ℕ)
open import Data.Fin using (Fin)
open import STLC.Term
open import STLC.Type
open import STLC.Type.Context using (Ctxt)
open import Data.Vec using (_∷_; lookup)
open import Relation.Nullary using (¬_)
open import Relation.Binary.PropositionalEquality as Eq
using (refl; _≡_; sym; cong)
open Eq.≡-Reasoning using (begin_; _≡⟨⟩_; step-≡; _∎)
infix 4 _⊢_⦂_
data _⊢_⦂_ { m n } (Γ : Ctxt m n) : Term m -> Type n -> Set where
⊢# : ∀ { x : Fin m }
-- ---------------
-> Γ ⊢ # x ⦂ lookup Γ x
⊢ƛ : ∀ { t τ₁ τ₂ }
-> τ₁ ∷ Γ ⊢ t ⦂ τ₂
-- -----------------
-> Γ ⊢ ƛ t ⦂ τ₁ ⇒ τ₂
⊢_·_ : ∀ { t₁ t₂ τ₁ τ₂ }
-> Γ ⊢ t₁ ⦂ τ₁ ⇒ τ₂
-> Γ ⊢ t₂ ⦂ τ₁
-- ------------------
-> Γ ⊢ t₁ · t₂ ⦂ τ₂
_⊬_⦂_ : ∀ { m n } -> Ctxt m n -> Term m -> Type n -> Set
Γ ⊬ t ⦂ τ = ¬ (Γ ⊢ t ⦂ τ)
module Lemmas₀ where
⊢-subst : ∀ { m n } { Γ₁ Γ₂ : Ctxt m n } { t₁ t₂ : Term m } { τ₁ τ₂ : Type n }
-> Γ₁ ≡ Γ₂ -> t₁ ≡ t₂ -> τ₁ ≡ τ₂ -> Γ₁ ⊢ t₁ ⦂ τ₁ -> Γ₂ ⊢ t₂ ⦂ τ₂
⊢-subst refl refl refl hyp = hyp
⊢-Γ-subst : ∀ { m n } { Γ₁ Γ₂ : Ctxt m n } { t : Term m } { τ : Type n }
-> Γ₁ ≡ Γ₂ -> Γ₁ ⊢ t ⦂ τ -> Γ₂ ⊢ t ⦂ τ
⊢-Γ-subst ≡-Γ hyp = ⊢-subst ≡-Γ refl refl hyp
⊢-τ-subst : ∀ { m n } { Γ : Ctxt m n } { t : Term m } { τ₁ τ₂ : Type n }
-> τ₁ ≡ τ₂ -> Γ ⊢ t ⦂ τ₁ -> Γ ⊢ t ⦂ τ₂
⊢-τ-subst ≡-τ hyp = ⊢-subst refl refl ≡-τ hyp
-- TODO: Substitutions on typing derivations
-- TODO: Equivalent for Value e.g. value ⦂ τ relation w/ some lemmas :)
|
If $f$ and $g$ are continuous functions at $a$ and $g(a) \neq 0$, then the function $f/g$ is continuous at $a$. |
{-# OPTIONS --cubical --safe #-}
module Cubical.Structures.AbGroup where
open import Cubical.Foundations.Prelude
open import Cubical.Foundations.Equiv
open import Cubical.Foundations.HLevels
open import Cubical.Data.Sigma
open import Cubical.Foundations.SIP renaming (SNS-PathP to SNS)
open import Cubical.Structures.NAryOp
open import Cubical.Structures.Group hiding (⟨_⟩)
private
variable
ℓ : Level
abelian-group-axioms : (X : Type ℓ) → raw-group-structure X → Type ℓ
abelian-group-axioms X _·_ = group-axioms X _·_ ×
((x y : X) → x · y ≡ y · x)
abelian-group-structure : Type ℓ → Type ℓ
abelian-group-structure = add-to-structure raw-group-structure abelian-group-axioms
AbGroup : Type (ℓ-suc ℓ)
AbGroup {ℓ} = TypeWithStr ℓ abelian-group-structure
abelian-group-iso : StrIso abelian-group-structure ℓ
abelian-group-iso = add-to-iso (nAryFunIso 2) abelian-group-axioms
abelian-group-axioms-isProp : (X : Type ℓ)
→ (s : raw-group-structure X)
→ isProp (abelian-group-axioms X s)
abelian-group-axioms-isProp X _·_ = isPropΣ (group-axioms-isProp X _·_)
λ { ((isSetX , _) , _) → isPropΠ2 λ _ _ → isSetX _ _}
abelian-group-is-SNS : SNS {ℓ} abelian-group-structure abelian-group-iso
abelian-group-is-SNS = add-axioms-SNS _ abelian-group-axioms-isProp raw-group-is-SNS
AbGroupPath : (M N : AbGroup {ℓ}) → (M ≃[ abelian-group-iso ] N) ≃ (M ≡ N)
AbGroupPath = SIP abelian-group-is-SNS
-- Abelian group is group
AbGroup→Group : AbGroup {ℓ} → Group
AbGroup→Group (G , _·_ , isGroup , ·comm) = G , _·_ , isGroup
-- Abelian group extractors
⟨_⟩ : AbGroup {ℓ} → Type ℓ
⟨ G , _ ⟩ = G
module _ (G : AbGroup {ℓ}) where
abgroup-operation = group-operation (AbGroup→Group G)
abgroup-is-set = group-is-set (AbGroup→Group G)
abgroup-assoc = group-assoc (AbGroup→Group G)
abgroup-id = group-id (AbGroup→Group G)
abgroup-rid = group-rid (AbGroup→Group G)
abgroup-lid = group-lid (AbGroup→Group G)
abgroup-inv = group-inv (AbGroup→Group G)
abgroup-rinv = group-rinv (AbGroup→Group G)
abgroup-linv = group-linv (AbGroup→Group G)
module abgroup-operation-syntax where
abgroup-operation-syntax : (G : AbGroup {ℓ}) → ⟨ G ⟩ → ⟨ G ⟩ → ⟨ G ⟩
abgroup-operation-syntax G = abgroup-operation G
infixr 20 abgroup-operation-syntax
syntax abgroup-operation-syntax G x y = x ·⟨ G ⟩ y
open abgroup-operation-syntax
abgroup-comm : (G : AbGroup {ℓ}) (x y : ⟨ G ⟩) → x ·⟨ G ⟩ y ≡ y ·⟨ G ⟩ x
abgroup-comm (_ , _ , _ , P) = P
-- AbGroup ·syntax
module abgroup-·syntax (G : AbGroup {ℓ}) where
open group-·syntax (AbGroup→Group G) public
|
Formal statement is: lemma contour_integrable_div: "f contour_integrable_on g \<Longrightarrow> (\<lambda>x. f x / c) contour_integrable_on g" Informal statement is: If $f$ is contour-integrable on a contour $g$, then $f/c$ is contour-integrable on $g$ for any constant $c$. |
theory T118
imports Main
begin
lemma "(
(\<forall> x::nat. \<forall> y::nat. meet(x, y) = meet(y, x)) &
(\<forall> x::nat. \<forall> y::nat. join(x, y) = join(y, x)) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. meet(x, meet(y, z)) = meet(meet(x, y), z)) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. join(x, join(y, z)) = join(join(x, y), z)) &
(\<forall> x::nat. \<forall> y::nat. meet(x, join(x, y)) = x) &
(\<forall> x::nat. \<forall> y::nat. join(x, meet(x, y)) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(x, join(y, z)) = join(mult(x, y), mult(x, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(join(x, y), z) = join(mult(x, z), mult(y, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. meet(x, over(join(mult(x, y), z), y)) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. meet(y, undr(x, join(mult(x, y), z))) = y) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. join(mult(over(x, y), y), x) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. join(mult(y, undr(y, x)), x) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(x, meet(y, z)) = meet(mult(x, y), mult(x, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(meet(x, y), z) = meet(mult(x, z), mult(y, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. undr(x, join(y, z)) = join(undr(x, y), undr(x, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. over(x, meet(y, z)) = join(over(x, y), over(x, z))) &
(\<forall> x::nat. \<forall> y::nat. invo(join(x, y)) = meet(invo(x), invo(y))) &
(\<forall> x::nat. \<forall> y::nat. invo(meet(x, y)) = join(invo(x), invo(y))) &
(\<forall> x::nat. invo(invo(x)) = x)
) \<longrightarrow>
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. over(join(x, y), z) = join(over(x, z), over(y, z)))
"
nitpick[card nat=8,timeout=86400]
oops
end |
{-# OPTIONS --safe #-}
module Cubical.Algebra.MonoidSolver.Examples where
open import Cubical.Foundations.Prelude
open import Cubical.Algebra.Monoid.Base
open import Cubical.Algebra.CommMonoid.Base
open import Cubical.Algebra.MonoidSolver.Reflection
private
variable
ℓ : Level
module ExamplesMonoid (M : Monoid ℓ) where
open MonoidStr (snd M)
_ : ε ≡ ε
_ = solveMonoid M
_ : ε · ε · ε ≡ ε
_ = solveMonoid M
_ : ∀ x → ε · x ≡ x
_ = solveMonoid M
_ : ∀ x y z → (x · y) · z ≡ x · (y · z)
_ = solveMonoid M
_ : ∀ x y z → z · (x · y) · ε · z ≡ z · x · (y · z)
_ = solveMonoid M
module ExamplesCommMonoid (M : CommMonoid ℓ) where
open CommMonoidStr (snd M)
_ : ε ≡ ε
_ = solveCommMonoid M
_ : ε · ε · ε ≡ ε
_ = solveCommMonoid M
_ : ∀ x → ε · x ≡ x
_ = solveCommMonoid M
_ : ∀ x y z → (x · z) · y ≡ x · (y · z)
_ = solveCommMonoid M
_ : ∀ x y → (x · y) · y · x · (x · y) ≡ x · x · x · (y · y · y)
_ = solveCommMonoid M
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File : findFootball.py
@Contact : [email protected]
@License : (C)Copyright 2017-2018, Liugroup-NLPR-CASIA
@Modify Time @Author @Version @Desciption
------------ ------- -------- -----------
2021/2/25 0025 22:19 gxrao 1.0 None
'''
import cv2
import numpy as np
import math
from shapely.geometry import Point
#图像识别足球,对外接口,设置ROI区域,设置视频参数
#内部接口,
class findball():
def __init__(self,lower,higher,roi):
self.lower, self.higher=lower,higher
self.roi=roi
def setValue(self):
self.lower=np.array(self.lower)
self.higer=np.array(self.higher)
return self.lower,self.higher
# def isArea(self,x,y,roi):
# point = Point([x, y])
# polygon = Polygon(roi)
# return polygon.contains(point)
#
# return 0
def imageProcess(self,frame):
# 内核
lower_yellos,upper_yellos = self.setValue()
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV) # get mask
mask = cv2.inRange(hsv, lower_yellos, upper_yellos)
res = cv2.bitwise_and(frame, frame, mask=mask)
res = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY) # 灰度处理
ret, binary = cv2.threshold(res, 20, 255, cv2.THRESH_BINARY)
binary = cv2.erode(binary, None, iterations=1)
binary=cv2.dilate(binary,None,iterations=2)
contours, hierarchy = cv2.findContours(binary, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
if len(contours)<1:
return []
for con in contours:
x, y, w, h = cv2.boundingRect(con) # 将轮廓分解为识别对象的左上角坐标和宽、高
if self.roi.contains(Point(x,y)):
area=cv2.contourArea(con)
if (50<area<60*60*4) :
# 在图像上画上矩形(图片、左上角坐标、右下角坐标、颜色、线条宽度)
cv2.rectangle(frame, (x, y), (x + w, y + h), (255, 0, 0), 3)
return [x,y,w,h]
else:
return [] |
Formal statement is: lemma continuous_at_Inf_mono: fixes f :: "'a::{linorder_topology,conditionally_complete_linorder} \<Rightarrow> 'b::{linorder_topology,conditionally_complete_linorder}" assumes "mono f" and cont: "continuous (at_right (Inf S)) f" and S: "S \<noteq> {}" "bdd_below S" shows "f (Inf S) = (INF s\<in>S. f s)" Informal statement is: If $f$ is a monotone function and $f$ is continuous at the right of the infimum of a nonempty set $S$ that is bounded below, then $f$ is continuous at the infimum of $S$. |
(* Title: JinjaThreads/J/SmallStep.thy
Author: Tobias Nipkow, Andreas Lochbihler
*)
section \<open>Small Step Semantics\<close>
theory SmallStep
imports
Expr
State
JHeap
begin
type_synonym
('addr, 'thread_id, 'heap) J_thread_action =
"('addr, 'thread_id, 'addr expr \<times> 'addr locals,'heap) Jinja_thread_action"
type_synonym
('addr, 'thread_id, 'heap) J_state =
"('addr,'thread_id,'addr expr \<times> 'addr locals,'heap,'addr) state"
(* pretty printing for J_thread_action type *)
print_translation \<open>
let
fun tr'
[a1, t
, Const (@{type_syntax "prod"}, _) $
(Const (@{type_syntax "exp"}, _) $
Const (@{type_syntax "String.literal"}, _) $ Const (@{type_syntax "unit"}, _) $ a2) $
(Const (@{type_syntax "fun"}, _) $
Const (@{type_syntax "String.literal"}, _) $
(Const (@{type_syntax "option"}, _) $
(Const (@{type_syntax "val"}, _) $ a3)))
, h] =
if a1 = a2 andalso a2 = a3 then Syntax.const @{type_syntax "J_thread_action"} $ a1 $ t $ h
else raise Match;
in [(@{type_syntax "Jinja_thread_action"}, K tr')]
end
\<close>
typ "('addr,'thread_id,'heap) J_thread_action"
(* pretty printing for J_state type *)
print_translation \<open>
let
fun tr'
[a1, t
, Const (@{type_syntax "prod"}, _) $
(Const (@{type_syntax "exp"}, _) $
Const (@{type_syntax "String.literal"}, _) $ Const (@{type_syntax "unit"}, _) $ a2) $
(Const (@{type_syntax "fun"}, _) $
Const (@{type_syntax "String.literal"}, _) $
(Const (@{type_syntax "option"}, _) $
(Const (@{type_syntax "val"}, _) $ a3)))
, h, a4] =
if a1 = a2 andalso a2 = a3 andalso a3 = a4 then Syntax.const @{type_syntax "J_state"} $ a1 $ t $ h
else raise Match;
in [(@{type_syntax "state"}, K tr')]
end
\<close>
typ "('addr, 'thread_id, 'heap) J_state"
definition extNTA2J :: "'addr J_prog \<Rightarrow> (cname \<times> mname \<times> 'addr) \<Rightarrow> 'addr expr \<times> 'addr locals"
where "extNTA2J P = (\<lambda>(C, M, a). let (D,Ts,T,meth) = method P C M; (pns,body) = the meth
in ({this:Class D=\<lfloor>Addr a\<rfloor>; body}, Map.empty))"
abbreviation J_local_start ::
"cname \<Rightarrow> mname \<Rightarrow> ty list \<Rightarrow> ty \<Rightarrow> 'addr J_mb \<Rightarrow> 'addr val list
\<Rightarrow> 'addr expr \<times> 'addr locals"
where
"J_local_start \<equiv>
\<lambda>C M Ts T (pns, body) vs.
(blocks (this # pns) (Class C # Ts) (Null # vs) body, Map.empty)"
abbreviation (in J_heap_base)
J_start_state :: "'addr J_prog \<Rightarrow> cname \<Rightarrow> mname \<Rightarrow> 'addr val list \<Rightarrow> ('addr, 'thread_id, 'heap) J_state"
where
"J_start_state \<equiv> start_state J_local_start"
lemma extNTA2J_iff [simp]:
"extNTA2J P (C, M, a) = ({this:Class (fst (method P C M))=\<lfloor>Addr a\<rfloor>; snd (the (snd (snd (snd (method P C M)))))}, Map.empty)"
by(simp add: extNTA2J_def split_beta)
abbreviation extTA2J ::
"'addr J_prog \<Rightarrow> ('addr, 'thread_id, 'heap) external_thread_action \<Rightarrow> ('addr, 'thread_id, 'heap) J_thread_action"
where "extTA2J P \<equiv> convert_extTA (extNTA2J P)"
lemma extTA2J_\<epsilon>: "extTA2J P \<epsilon> = \<epsilon>"
by(simp)
text\<open>Locking mechanism:
The expression on which the thread is synchronized is evaluated first to a value.
If this expression evaluates to null, a null pointer expression is thrown.
If this expression evaluates to an address, a lock must be obtained on this address, the
sync expression is rewritten to insync.
For insync expressions, the body expression may be evaluated.
If the body expression is only a value or a thrown exception, the lock is released and
the synchronized expression reduces to the body's expression. This is the normal Java semantics,
not the one as presented in LNCS 1523, Cenciarelli/Knapp/Reus/Wirsing. There
the expression on which the thread synchronized is evaluated except for the last step.
If the thread can obtain the lock on the object immediately after the last evaluation step, the evaluation is
done and the lock acquired. If the lock cannot be obtained, the evaluation step is discarded. If another thread
changes the evaluation result of this last step, the thread then will try to synchronize on the new object.\<close>
context J_heap_base begin
inductive red ::
"(('addr, 'thread_id, 'heap) external_thread_action \<Rightarrow> ('addr, 'thread_id, 'x,'heap) Jinja_thread_action)
\<Rightarrow> 'addr J_prog \<Rightarrow> 'thread_id
\<Rightarrow> 'addr expr \<Rightarrow> ('addr, 'heap) Jstate
\<Rightarrow> ('addr, 'thread_id, 'x,'heap) Jinja_thread_action
\<Rightarrow> 'addr expr \<Rightarrow> ('addr, 'heap) Jstate \<Rightarrow> bool"
("_,_,_ \<turnstile> ((1\<langle>_,/_\<rangle>) -_\<rightarrow>/ (1\<langle>_,/_\<rangle>))" [51,51,0,0,0,0,0,0] 81)
and reds ::
"(('addr, 'thread_id, 'heap) external_thread_action \<Rightarrow> ('addr, 'thread_id, 'x,'heap) Jinja_thread_action)
\<Rightarrow> 'addr J_prog \<Rightarrow> 'thread_id
\<Rightarrow> 'addr expr list \<Rightarrow> ('addr, 'heap) Jstate
\<Rightarrow> ('addr, 'thread_id, 'x,'heap) Jinja_thread_action
\<Rightarrow> 'addr expr list \<Rightarrow> ('addr, 'heap) Jstate \<Rightarrow> bool"
("_,_,_ \<turnstile> ((1\<langle>_,/_\<rangle>) [-_\<rightarrow>]/ (1\<langle>_,/_\<rangle>))" [51,51,0,0,0,0,0,0] 81)
for extTA :: "('addr, 'thread_id, 'heap) external_thread_action \<Rightarrow> ('addr, 'thread_id, 'x, 'heap) Jinja_thread_action"
and P :: "'addr J_prog" and t :: 'thread_id
where
RedNew:
"(h', a) \<in> allocate h (Class_type C)
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>new C, (h, l)\<rangle> -\<lbrace>NewHeapElem a (Class_type C)\<rbrace>\<rightarrow> \<langle>addr a, (h', l)\<rangle>"
| RedNewFail:
"allocate h (Class_type C) = {}
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>new C, (h, l)\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW OutOfMemory, (h, l)\<rangle>"
| NewArrayRed:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>newA T\<lfloor>e\<rceil>, s\<rangle> -ta\<rightarrow> \<langle>newA T\<lfloor>e'\<rceil>, s'\<rangle>"
| RedNewArray:
"\<lbrakk> 0 <=s i; (h', a) \<in> allocate h (Array_type T (nat (sint i))) \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>newA T\<lfloor>Val (Intg i)\<rceil>, (h, l)\<rangle> -\<lbrace>NewHeapElem a (Array_type T (nat (sint i)))\<rbrace>\<rightarrow> \<langle>addr a, (h', l)\<rangle>"
| RedNewArrayNegative:
"i <s 0 \<Longrightarrow> extTA,P,t \<turnstile> \<langle>newA T\<lfloor>Val (Intg i)\<rceil>, s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NegativeArraySize, s\<rangle>"
| RedNewArrayFail:
"\<lbrakk> 0 <=s i; allocate h (Array_type T (nat (sint i))) = {} \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>newA T\<lfloor>Val (Intg i)\<rceil>, (h, l)\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW OutOfMemory, (h, l)\<rangle>"
| CastRed:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>Cast C e, s\<rangle> -ta\<rightarrow> \<langle>Cast C e', s'\<rangle>"
| RedCast:
"\<lbrakk> typeof\<^bsub>hp s\<^esub> v = \<lfloor>U\<rfloor>; P \<turnstile> U \<le> T \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>Cast T (Val v), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Val v, s\<rangle>"
| RedCastFail:
"\<lbrakk> typeof\<^bsub>hp s\<^esub> v = \<lfloor>U\<rfloor>; \<not> P \<turnstile> U \<le> T \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>Cast T (Val v), s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW ClassCast, s\<rangle>"
| InstanceOfRed:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>e instanceof T, s\<rangle> -ta\<rightarrow> \<langle>e' instanceof T, s'\<rangle>"
| RedInstanceOf:
"\<lbrakk> typeof\<^bsub>hp s\<^esub> v = \<lfloor>U\<rfloor>; b \<longleftrightarrow> v \<noteq> Null \<and> P \<turnstile> U \<le> T \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(Val v) instanceof T, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Val (Bool b), s\<rangle>"
| BinOpRed1:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>e \<guillemotleft>bop\<guillemotright> e2, s\<rangle> -ta\<rightarrow> \<langle>e' \<guillemotleft>bop\<guillemotright> e2, s'\<rangle>"
| BinOpRed2:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>(Val v) \<guillemotleft>bop\<guillemotright> e, s\<rangle> -ta\<rightarrow> \<langle>(Val v) \<guillemotleft>bop\<guillemotright> e', s'\<rangle>"
| RedBinOp:
"binop bop v1 v2 = Some (Inl v) \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>(Val v1) \<guillemotleft>bop\<guillemotright> (Val v2), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Val v, s\<rangle>"
| RedBinOpFail:
"binop bop v1 v2 = Some (Inr a) \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>(Val v1) \<guillemotleft>bop\<guillemotright> (Val v2), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| RedVar:
"lcl s V = Some v \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>Var V, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Val v, s\<rangle>"
| LAssRed:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>V:=e, s\<rangle> -ta\<rightarrow> \<langle>V:=e', s'\<rangle>"
| RedLAss:
"extTA,P,t \<turnstile> \<langle>V:=(Val v), (h, l)\<rangle> -\<epsilon>\<rightarrow> \<langle>unit, (h, l(V \<mapsto> v))\<rangle>"
| AAccRed1:
"extTA,P,t \<turnstile> \<langle>a, s\<rangle> -ta\<rightarrow> \<langle>a', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>a\<lfloor>i\<rceil>, s\<rangle> -ta\<rightarrow> \<langle>a'\<lfloor>i\<rceil>, s'\<rangle>"
| AAccRed2:
"extTA,P,t \<turnstile> \<langle>i, s\<rangle> -ta\<rightarrow> \<langle>i', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>(Val a)\<lfloor>i\<rceil>, s\<rangle> -ta\<rightarrow> \<langle>(Val a)\<lfloor>i'\<rceil>, s'\<rangle>"
| RedAAccNull:
"extTA,P,t \<turnstile> \<langle>null\<lfloor>Val i\<rceil>, s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
| RedAAccBounds:
"\<lbrakk> typeof_addr (hp s) a = \<lfloor>Array_type T n\<rfloor>; i <s 0 \<or> sint i \<ge> int n \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<lfloor>Val (Intg i)\<rceil>, s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW ArrayIndexOutOfBounds, s\<rangle>"
| RedAAcc:
"\<lbrakk> typeof_addr h a = \<lfloor>Array_type T n\<rfloor>; 0 <=s i; sint i < int n;
heap_read h a (ACell (nat (sint i))) v \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<lfloor>Val (Intg i)\<rceil>, (h, l)\<rangle> -\<lbrace>ReadMem a (ACell (nat (sint i))) v\<rbrace>\<rightarrow> \<langle>Val v, (h, l)\<rangle>"
| AAssRed1:
"extTA,P,t \<turnstile> \<langle>a, s\<rangle> -ta\<rightarrow> \<langle>a', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>a\<lfloor>i\<rceil> := e, s\<rangle> -ta\<rightarrow> \<langle>a'\<lfloor>i\<rceil> := e, s'\<rangle>"
| AAssRed2:
"extTA,P,t \<turnstile> \<langle>i, s\<rangle> -ta\<rightarrow> \<langle>i', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>(Val a)\<lfloor>i\<rceil> := e, s\<rangle> -ta\<rightarrow> \<langle>(Val a)\<lfloor>i'\<rceil> := e, s'\<rangle>"
| AAssRed3:
"extTA,P,t \<turnstile> \<langle>(e::'addr expr), s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>(Val a)\<lfloor>Val i\<rceil> := e, s\<rangle> -ta\<rightarrow> \<langle>(Val a)\<lfloor>Val i\<rceil> := e', s'\<rangle>"
| RedAAssNull:
"extTA,P,t \<turnstile> \<langle>null\<lfloor>Val i\<rceil> := (Val e::'addr expr), s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
| RedAAssBounds:
"\<lbrakk> typeof_addr (hp s) a = \<lfloor>Array_type T n\<rfloor>; i <s 0 \<or> sint i \<ge> int n \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<lfloor>Val (Intg i)\<rceil> := (Val e::'addr expr), s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW ArrayIndexOutOfBounds, s\<rangle>"
| RedAAssStore:
"\<lbrakk> typeof_addr (hp s) a = \<lfloor>Array_type T n\<rfloor>; 0 <=s i; sint i < int n;
typeof\<^bsub>hp s\<^esub> w = \<lfloor>U\<rfloor>; \<not> (P \<turnstile> U \<le> T) \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<lfloor>Val (Intg i)\<rceil> := (Val w::'addr expr), s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW ArrayStore, s\<rangle>"
| RedAAss:
"\<lbrakk> typeof_addr h a = \<lfloor>Array_type T n\<rfloor>; 0 <=s i; sint i < int n; typeof\<^bsub>h\<^esub> w = Some U; P \<turnstile> U \<le> T;
heap_write h a (ACell (nat (sint i))) w h' \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<lfloor>Val (Intg i)\<rceil> := Val w::'addr expr, (h, l)\<rangle> -\<lbrace>WriteMem a (ACell (nat (sint i))) w\<rbrace>\<rightarrow> \<langle>unit, (h', l)\<rangle>"
| ALengthRed:
"extTA,P,t \<turnstile> \<langle>a, s\<rangle> -ta\<rightarrow> \<langle>a', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>a\<bullet>length, s\<rangle> -ta\<rightarrow> \<langle>a'\<bullet>length, s'\<rangle>"
| RedALength:
"typeof_addr h a = \<lfloor>Array_type T n\<rfloor>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>addr a\<bullet>length, (h, l)\<rangle> -\<epsilon>\<rightarrow> \<langle>Val (Intg (word_of_int (int n))), (h, l)\<rangle>"
| RedALengthNull:
"extTA,P,t \<turnstile> \<langle>null\<bullet>length, s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
| FAccRed:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>e\<bullet>F{D}, s\<rangle> -ta\<rightarrow> \<langle>e'\<bullet>F{D}, s'\<rangle>"
| RedFAcc:
"heap_read h a (CField D F) v
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<bullet>F{D}, (h, l)\<rangle> -\<lbrace>ReadMem a (CField D F) v\<rbrace>\<rightarrow> \<langle>Val v, (h, l)\<rangle>"
| RedFAccNull:
"extTA,P,t \<turnstile> \<langle>null\<bullet>F{D}, s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
| FAssRed1:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>e\<bullet>F{D}:=e2, s\<rangle> -ta\<rightarrow> \<langle>e'\<bullet>F{D}:=e2, s'\<rangle>"
| FAssRed2:
"extTA,P,t \<turnstile> \<langle>(e::'addr expr), s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>Val v\<bullet>F{D}:=e, s\<rangle> -ta\<rightarrow> \<langle>Val v\<bullet>F{D}:=e', s'\<rangle>"
| RedFAss:
"heap_write h a (CField D F) v h' \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>(addr a)\<bullet>F{D}:= Val v, (h, l)\<rangle> -\<lbrace>WriteMem a (CField D F) v\<rbrace>\<rightarrow> \<langle>unit, (h', l)\<rangle>"
| RedFAssNull:
"extTA,P,t \<turnstile> \<langle>null\<bullet>F{D}:=Val v::'addr expr, s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
| CASRed1:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>e\<bullet>compareAndSwap(D\<bullet>F, e2, e3), s\<rangle> -ta\<rightarrow> \<langle>e'\<bullet>compareAndSwap(D\<bullet>F, e2, e3), s'\<rangle>"
| CASRed2:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>Val v\<bullet>compareAndSwap(D\<bullet>F, e, e3), s\<rangle> -ta\<rightarrow> \<langle>Val v\<bullet>compareAndSwap(D\<bullet>F, e', e3), s'\<rangle>"
| CASRed3:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>Val v\<bullet>compareAndSwap(D\<bullet>F, Val v', e), s\<rangle> -ta\<rightarrow> \<langle>Val v\<bullet>compareAndSwap(D\<bullet>F, Val v', e'), s'\<rangle>"
| CASNull:
"extTA,P,t \<turnstile> \<langle>null\<bullet>compareAndSwap(D\<bullet>F, Val v, Val v'), s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
| RedCASSucceed:
"\<lbrakk> heap_read h a (CField D F) v; heap_write h a (CField D F) v' h' \<rbrakk> \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>addr a\<bullet>compareAndSwap(D\<bullet>F, Val v, Val v'), (h, l)\<rangle>
-\<lbrace>ReadMem a (CField D F) v, WriteMem a (CField D F) v'\<rbrace>\<rightarrow>
\<langle>true, (h', l)\<rangle>"
| RedCASFail:
"\<lbrakk> heap_read h a (CField D F) v''; v \<noteq> v'' \<rbrakk> \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>addr a\<bullet>compareAndSwap(D\<bullet>F, Val v, Val v'), (h, l)\<rangle>
-\<lbrace>ReadMem a (CField D F) v''\<rbrace>\<rightarrow>
\<langle>false, (h, l)\<rangle>"
| CallObj:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>e\<bullet>M(es), s\<rangle> -ta\<rightarrow> \<langle>e'\<bullet>M(es), s'\<rangle>"
| CallParams:
"extTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es',s'\<rangle> \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>(Val v)\<bullet>M(es),s\<rangle> -ta\<rightarrow> \<langle>(Val v)\<bullet>M(es'),s'\<rangle>"
| RedCall:
"\<lbrakk> typeof_addr (hp s) a = \<lfloor>hU\<rfloor>; P \<turnstile> class_type_of hU sees M:Ts\<rightarrow>T = \<lfloor>(pns,body)\<rfloor> in D;
size vs = size pns; size Ts = size pns \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<bullet>M(map Val vs), s\<rangle> -\<epsilon>\<rightarrow> \<langle>blocks (this # pns) (Class D # Ts) (Addr a # vs) body, s\<rangle>"
| RedCallExternal:
"\<lbrakk> typeof_addr (hp s) a = \<lfloor>hU\<rfloor>; P \<turnstile> class_type_of hU sees M:Ts\<rightarrow>T = Native in D;
P,t \<turnstile> \<langle>a\<bullet>M(vs), hp s\<rangle> -ta\<rightarrow>ext \<langle>va, h'\<rangle>;
ta' = extTA ta; e' = extRet2J ((addr a)\<bullet>M(map Val vs)) va; s' = (h', lcl s) \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<bullet>M(map Val vs), s\<rangle> -ta'\<rightarrow> \<langle>e', s'\<rangle>"
| RedCallNull:
"extTA,P,t \<turnstile> \<langle>null\<bullet>M(map Val vs), s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
| BlockRed:
"extTA,P,t \<turnstile> \<langle>e, (h, l(V:=vo))\<rangle> -ta\<rightarrow> \<langle>e', (h', l')\<rangle>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>{V:T=vo; e}, (h, l)\<rangle> -ta\<rightarrow> \<langle>{V:T=l' V; e'}, (h', l'(V := l V))\<rangle>"
| RedBlock:
"extTA,P,t \<turnstile> \<langle>{V:T=vo; Val u}, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Val u, s\<rangle>"
| SynchronizedRed1:
"extTA,P,t \<turnstile> \<langle>o', s\<rangle> -ta\<rightarrow> \<langle>o'', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>sync(o') e, s\<rangle> -ta\<rightarrow> \<langle>sync(o'') e, s'\<rangle>"
| SynchronizedNull:
"extTA,P,t \<turnstile> \<langle>sync(null) e, s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
| LockSynchronized:
"extTA,P,t \<turnstile> \<langle>sync(addr a) e, s\<rangle> -\<lbrace>Lock\<rightarrow>a, SyncLock a\<rbrace>\<rightarrow> \<langle>insync(a) e, s\<rangle>"
| SynchronizedRed2:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>insync(a) e, s\<rangle> -ta\<rightarrow> \<langle>insync(a) e', s'\<rangle>"
| UnlockSynchronized:
"extTA,P,t \<turnstile> \<langle>insync(a) (Val v), s\<rangle> -\<lbrace>Unlock\<rightarrow>a, SyncUnlock a\<rbrace>\<rightarrow> \<langle>Val v, s\<rangle>"
| SeqRed:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>e;;e2, s\<rangle> -ta\<rightarrow> \<langle>e';;e2, s'\<rangle>"
| RedSeq:
"extTA,P,t \<turnstile> \<langle>(Val v);;e, s\<rangle> -\<epsilon>\<rightarrow> \<langle>e, s\<rangle>"
| CondRed:
"extTA,P,t \<turnstile> \<langle>b, s\<rangle> -ta\<rightarrow> \<langle>b', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>if (b) e1 else e2, s\<rangle> -ta\<rightarrow> \<langle>if (b') e1 else e2, s'\<rangle>"
| RedCondT:
"extTA,P,t \<turnstile> \<langle>if (true) e1 else e2, s\<rangle> -\<epsilon>\<rightarrow> \<langle>e1, s\<rangle>"
| RedCondF:
"extTA,P,t \<turnstile> \<langle>if (false) e1 else e2, s\<rangle> -\<epsilon>\<rightarrow> \<langle>e2, s\<rangle>"
| RedWhile:
"extTA,P,t \<turnstile> \<langle>while(b) c, s\<rangle> -\<epsilon>\<rightarrow> \<langle>if (b) (c;;while(b) c) else unit, s\<rangle>"
| ThrowRed:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>throw e, s\<rangle> -ta\<rightarrow> \<langle>throw e', s'\<rangle>"
| RedThrowNull:
"extTA,P,t \<turnstile> \<langle>throw null, s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
| TryRed:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>try e catch(C V) e2, s\<rangle> -ta\<rightarrow> \<langle>try e' catch(C V) e2, s'\<rangle>"
| RedTry:
"extTA,P,t \<turnstile> \<langle>try (Val v) catch(C V) e2, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Val v, s\<rangle>"
| RedTryCatch:
"\<lbrakk> typeof_addr (hp s) a = \<lfloor>Class_type D\<rfloor>; P \<turnstile> D \<preceq>\<^sup>* C \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>try (Throw a) catch(C V) e2, s\<rangle> -\<epsilon>\<rightarrow> \<langle>{V:Class C=\<lfloor>Addr a\<rfloor>; e2}, s\<rangle>"
| RedTryFail:
"\<lbrakk> typeof_addr (hp s) a = \<lfloor>Class_type D\<rfloor>; \<not> P \<turnstile> D \<preceq>\<^sup>* C \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>try (Throw a) catch(C V) e2, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| ListRed1:
"extTA,P,t \<turnstile> \<langle>e,s\<rangle> -ta\<rightarrow> \<langle>e',s'\<rangle> \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>e#es,s\<rangle> [-ta\<rightarrow>] \<langle>e'#es,s'\<rangle>"
| ListRed2:
"extTA,P,t \<turnstile> \<langle>es,s\<rangle> [-ta\<rightarrow>] \<langle>es',s'\<rangle> \<Longrightarrow>
extTA,P,t \<turnstile> \<langle>Val v # es,s\<rangle> [-ta\<rightarrow>] \<langle>Val v # es',s'\<rangle>"
\<comment> \<open>Exception propagation\<close>
| NewArrayThrow: "extTA,P,t \<turnstile> \<langle>newA T\<lfloor>Throw a\<rceil>, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| CastThrow: "extTA,P,t \<turnstile> \<langle>Cast C (Throw a), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| InstanceOfThrow: "extTA,P,t \<turnstile> \<langle>(Throw a) instanceof T, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| BinOpThrow1: "extTA,P,t \<turnstile> \<langle>(Throw a) \<guillemotleft>bop\<guillemotright> e\<^sub>2, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| BinOpThrow2: "extTA,P,t \<turnstile> \<langle>(Val v\<^sub>1) \<guillemotleft>bop\<guillemotright> (Throw a), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| LAssThrow: "extTA,P,t \<turnstile> \<langle>V:=(Throw a), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| AAccThrow1: "extTA,P,t \<turnstile> \<langle>(Throw a)\<lfloor>i\<rceil>, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| AAccThrow2: "extTA,P,t \<turnstile> \<langle>(Val v)\<lfloor>Throw a\<rceil>, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| AAssThrow1: "extTA,P,t \<turnstile> \<langle>(Throw a)\<lfloor>i\<rceil> := e, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| AAssThrow2: "extTA,P,t \<turnstile> \<langle>(Val v)\<lfloor>Throw a\<rceil> := e, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| AAssThrow3: "extTA,P,t \<turnstile> \<langle>(Val v)\<lfloor>Val i\<rceil> := Throw a :: 'addr expr, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| ALengthThrow: "extTA,P,t \<turnstile> \<langle>(Throw a)\<bullet>length, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| FAccThrow: "extTA,P,t \<turnstile> \<langle>(Throw a)\<bullet>F{D}, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| FAssThrow1: "extTA,P,t \<turnstile> \<langle>(Throw a)\<bullet>F{D}:=e\<^sub>2, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| FAssThrow2: "extTA,P,t \<turnstile> \<langle>Val v\<bullet>F{D}:=(Throw a::'addr expr), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| CASThrow: "extTA,P,t \<turnstile> \<langle>Throw a\<bullet>compareAndSwap(D\<bullet>F, e2, e3), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| CASThrow2: "extTA,P,t \<turnstile> \<langle>Val v\<bullet>compareAndSwap(D\<bullet>F, Throw a, e3), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| CASThrow3: "extTA,P,t \<turnstile> \<langle>Val v\<bullet>compareAndSwap(D\<bullet>F, Val v', Throw a), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| CallThrowObj: "extTA,P,t \<turnstile> \<langle>(Throw a)\<bullet>M(es), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| CallThrowParams: "\<lbrakk> es = map Val vs @ Throw a # es' \<rbrakk> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>(Val v)\<bullet>M(es), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| BlockThrow: "extTA,P,t \<turnstile> \<langle>{V:T=vo; Throw a}, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| SynchronizedThrow1: "extTA,P,t \<turnstile> \<langle>sync(Throw a) e, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| SynchronizedThrow2: "extTA,P,t \<turnstile> \<langle>insync(a) Throw ad, s\<rangle> -\<lbrace>Unlock\<rightarrow>a, SyncUnlock a\<rbrace>\<rightarrow> \<langle>Throw ad, s\<rangle>"
| SeqThrow: "extTA,P,t \<turnstile> \<langle>(Throw a);;e\<^sub>2, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| CondThrow: "extTA,P,t \<turnstile> \<langle>if (Throw a) e\<^sub>1 else e\<^sub>2, s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
| ThrowThrow: "extTA,P,t \<turnstile> \<langle>throw(Throw a), s\<rangle> -\<epsilon>\<rightarrow> \<langle>Throw a, s\<rangle>"
inductive_cases red_cases:
"extTA,P,t \<turnstile> \<langle>new C, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>newA T\<lfloor>e\<rceil>, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>Cast T e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>e instanceof T, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>e \<guillemotleft>bop\<guillemotright> e', s\<rangle> -ta\<rightarrow> \<langle>e'', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>Var V, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>V:=e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>a\<lfloor>i\<rceil>, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>a\<lfloor>i\<rceil> := e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>a\<bullet>length, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>e\<bullet>F{D}, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>e\<bullet>F{D} := e', s\<rangle> -ta\<rightarrow> \<langle>e'', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>e\<bullet>compareAndSwap(D\<bullet>F, e', e''), s\<rangle> -ta\<rightarrow> \<langle>e''', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>e\<bullet>M(es), s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>{V:T=vo; e}, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>sync(o') e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>insync(a) e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>e;;e', s\<rangle> -ta\<rightarrow> \<langle>e'', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>if (b) e1 else e2, s \<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>while (b) e, s \<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>throw e, s \<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
"extTA,P,t \<turnstile> \<langle>try e catch(C V) e', s\<rangle> -ta\<rightarrow> \<langle>e'', s'\<rangle>"
inductive_cases reds_cases:
"extTA,P,t \<turnstile> \<langle>e # es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle>"
abbreviation red' ::
"'addr J_prog \<Rightarrow> 'thread_id \<Rightarrow> 'addr expr \<Rightarrow> ('heap \<times> 'addr locals)
\<Rightarrow> ('addr, 'thread_id, 'heap) J_thread_action \<Rightarrow> 'addr expr \<Rightarrow> ('heap \<times> 'addr locals) \<Rightarrow> bool"
("_,_ \<turnstile> ((1\<langle>_,/_\<rangle>) -_\<rightarrow>/ (1\<langle>_,/_\<rangle>))" [51,0,0,0,0,0,0] 81)
where "red' P \<equiv> red (extTA2J P) P"
abbreviation reds' ::
"'addr J_prog \<Rightarrow> 'thread_id \<Rightarrow> 'addr expr list \<Rightarrow> ('heap \<times> 'addr locals)
\<Rightarrow> ('addr, 'thread_id, 'heap) J_thread_action \<Rightarrow> 'addr expr list \<Rightarrow> ('heap \<times> 'addr locals) \<Rightarrow> bool"
("_,_ \<turnstile> ((1\<langle>_,/_\<rangle>) [-_\<rightarrow>]/ (1\<langle>_,/_\<rangle>))" [51,0,0,0,0,0,0] 81)
where "reds' P \<equiv> reds (extTA2J P) P"
subsection\<open>Some easy lemmas\<close>
lemma red_no_val [dest]:
"\<lbrakk> extTA,P,t \<turnstile> \<langle>e, s\<rangle> -tas\<rightarrow> \<langle>e', s'\<rangle>; is_val e \<rbrakk> \<Longrightarrow> False"
by(auto)
lemma [iff]: "\<not> extTA,P,t \<turnstile> \<langle>Throw a, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>"
by(fastforce elim: red_cases)
lemma reds_map_Val_Throw:
"extTA,P,t \<turnstile> \<langle>map Val vs @ Throw a # es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle> \<longleftrightarrow> False"
by(induct vs arbitrary: es')(auto elim!: reds_cases)
lemma reds_preserves_len:
"extTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle> \<Longrightarrow> length es' = length es"
by(induct es arbitrary: es')(auto elim: reds.cases)
lemma red_lcl_incr: "extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> dom (lcl s) \<subseteq> dom (lcl s')"
and reds_lcl_incr: "extTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle> \<Longrightarrow> dom (lcl s) \<subseteq> dom (lcl s')"
apply(induct rule:red_reds.inducts)
apply(auto simp del: fun_upd_apply split: if_split_asm)
done
lemma red_lcl_add_aux:
"extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>e, (hp s, l0 ++ lcl s)\<rangle> -ta\<rightarrow> \<langle>e', (hp s', l0 ++ lcl s')\<rangle>"
and reds_lcl_add_aux:
"extTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>es, (hp s, l0 ++ lcl s)\<rangle> [-ta\<rightarrow>] \<langle>es', (hp s', l0 ++ lcl s')\<rangle>"
proof (induct arbitrary: l0 and l0 rule:red_reds.inducts)
case (BlockRed e h x V vo ta e' h' x' T)
note IH = \<open>\<And>l0. extTA,P,t \<turnstile> \<langle>e,(hp (h, x(V := vo)), l0 ++ lcl (h, x(V := vo)))\<rangle> -ta\<rightarrow> \<langle>e',(hp (h', x'), l0 ++ lcl (h', x'))\<rangle>\<close>[simplified]
have lrew: "\<And>x x'. x(V := vo) ++ x'(V := vo) = (x ++ x')(V := vo)"
by(simp add:fun_eq_iff map_add_def)
have lrew1: "\<And>X X' X'' vo. (X(V := vo) ++ X')(V := (X ++ X'') V) = X ++ X'(V := X'' V)"
by(simp add: fun_eq_iff map_add_def)
have lrew2: "\<And>X X'. (X(V := None) ++ X') V = X' V"
by(simp add: map_add_def)
show ?case
proof(cases vo)
case None
from IH[of "l0(V := vo)"]
show ?thesis
apply(simp del: fun_upd_apply add: lrew)
apply(drule red_reds.BlockRed)
by(simp only: lrew1 None lrew2)
next
case (Some v)
with \<open>extTA,P,t \<turnstile> \<langle>e,(h, x(V := vo))\<rangle> -ta\<rightarrow> \<langle>e',(h', x')\<rangle>\<close>
have "x' V \<noteq> None"
by -(drule red_lcl_incr, auto split: if_split_asm)
with IH[of "l0(V := vo)"]
show ?thesis
apply(clarsimp simp del: fun_upd_apply simp add: lrew)
apply(drule red_reds.BlockRed)
by(simp add: lrew1 Some del: fun_upd_apply)
qed
next
case RedTryFail thus ?case
by(auto intro: red_reds.RedTryFail)
qed(fastforce intro:red_reds.intros simp del: fun_upd_apply)+
lemma red_lcl_add: "extTA,P,t \<turnstile> \<langle>e, (h, l)\<rangle> -ta\<rightarrow> \<langle>e', (h', l')\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>e, (h, l0 ++ l)\<rangle> -ta\<rightarrow> \<langle>e', (h', l0 ++ l')\<rangle>"
and reds_lcl_add: "extTA,P,t \<turnstile> \<langle>es, (h, l)\<rangle> [-ta\<rightarrow>] \<langle>es', (h', l')\<rangle> \<Longrightarrow> extTA,P,t \<turnstile> \<langle>es, (h, l0 ++ l)\<rangle> [-ta\<rightarrow>] \<langle>es', (h', l0 ++ l')\<rangle>"
by(auto dest:red_lcl_add_aux reds_lcl_add_aux)
lemma reds_no_val [dest]:
"\<lbrakk> extTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle>; is_vals es \<rbrakk> \<Longrightarrow> False"
apply(induct es arbitrary: s ta es' s')
apply(blast elim: reds.cases)
apply(erule reds.cases)
apply(auto, blast)
done
lemma red_no_Throw [dest!]:
"extTA,P,t \<turnstile> \<langle>Throw a, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> False"
by(auto elim!: red_cases)
lemma red_lcl_sub:
"\<lbrakk> extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>; fv e \<subseteq> W \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>e, (hp s, (lcl s)|`W)\<rangle> -ta\<rightarrow> \<langle>e', (hp s', (lcl s')|`W)\<rangle>"
and reds_lcl_sub:
"\<lbrakk> extTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle>; fvs es \<subseteq> W \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>es, (hp s, (lcl s)|`W)\<rangle> [-ta\<rightarrow>] \<langle>es', (hp s', (lcl s')|`W)\<rangle>"
proof(induct arbitrary: W and W rule: red_reds.inducts)
case (RedLAss V v h l W)
have "extTA,P,t \<turnstile> \<langle>V:=Val v,(h, l |` W)\<rangle> -\<epsilon>\<rightarrow> \<langle>unit,(h, (l |`W)(V \<mapsto> v))\<rangle>"
by(rule red_reds.RedLAss)
with RedLAss show ?case by(simp del: fun_upd_apply)
next
case (BlockRed e h x V vo ta e' h' x' T)
have IH: "\<And>W. fv e \<subseteq> W \<Longrightarrow> extTA,P,t \<turnstile> \<langle>e,(hp (h, x(V := vo)), lcl (h, x(V := vo)) |` W)\<rangle> -ta\<rightarrow> \<langle>e',(hp (h', x'), lcl (h', x') |` W)\<rangle>" by fact
from \<open>fv {V:T=vo; e} \<subseteq> W\<close> have fve: "fv e \<subseteq> insert V W" by auto
show ?case
proof(cases "V \<in> W")
case True
with fve have "fv e \<subseteq> W" by auto
from True IH[OF this] have "extTA,P,t \<turnstile> \<langle>e,(h, (x |` W )(V := vo))\<rangle> -ta\<rightarrow> \<langle>e',(h', x' |` W)\<rangle>" by(simp)
with True have "extTA,P,t \<turnstile> \<langle>{V:T=vo; e},(h, x |` W)\<rangle> -ta\<rightarrow> \<langle>{V:T=x' V; e'},(h', (x' |` W)(V := x V))\<rangle>"
by -(drule red_reds.BlockRed[where T=T], simp)
with True show ?thesis by(simp del: fun_upd_apply)
next
case False
with IH[OF fve] have "extTA,P,t \<turnstile> \<langle>e,(h, (x |` W)(V := vo))\<rangle> -ta\<rightarrow> \<langle>e',(h', x' |` insert V W)\<rangle>" by(simp)
with False have "extTA,P,t \<turnstile> \<langle>{V:T=vo; e},(h, x |` W)\<rangle> -ta\<rightarrow> \<langle>{V:T=x' V; e'},(h', (x' |` W))\<rangle>"
by -(drule red_reds.BlockRed[where T=T],simp)
with False show ?thesis by(simp del: fun_upd_apply)
qed
next
case RedTryFail thus ?case by(auto intro: red_reds.RedTryFail)
qed(fastforce intro: red_reds.intros)+
lemma red_notfree_unchanged: "\<lbrakk> extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>; V \<notin> fv e \<rbrakk> \<Longrightarrow> lcl s' V = lcl s V"
and reds_notfree_unchanged: "\<lbrakk> extTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle>; V \<notin> fvs es \<rbrakk> \<Longrightarrow> lcl s' V = lcl s V"
apply(induct rule: red_reds.inducts)
apply(fastforce)+
done
lemma red_dom_lcl: "extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> dom (lcl s') \<subseteq> dom (lcl s) \<union> fv e"
and reds_dom_lcl: "extTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle> \<Longrightarrow> dom (lcl s') \<subseteq> dom (lcl s) \<union> fvs es"
proof (induct rule:red_reds.inducts)
case (BlockRed e h x V vo ta e' h' x' T)
thus ?case by(clarsimp)(fastforce split:if_split_asm)
qed auto
lemma red_Suspend_is_call:
"\<lbrakk> convert_extTA extNTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>; Suspend w \<in> set \<lbrace>ta\<rbrace>\<^bsub>w\<^esub> \<rbrakk>
\<Longrightarrow> \<exists>a vs hT Ts Tr D. call e' = \<lfloor>(a, wait, vs)\<rfloor> \<and> typeof_addr (hp s) a = \<lfloor>hT\<rfloor> \<and> P \<turnstile> class_type_of hT sees wait:Ts\<rightarrow>Tr = Native in D"
and reds_Suspend_is_calls:
"\<lbrakk> convert_extTA extNTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle>; Suspend w \<in> set \<lbrace>ta\<rbrace>\<^bsub>w\<^esub> \<rbrakk>
\<Longrightarrow> \<exists>a vs hT Ts Tr D. calls es' = \<lfloor>(a, wait, vs)\<rfloor> \<and> typeof_addr (hp s) a = \<lfloor>hT\<rfloor> \<and> P \<turnstile> class_type_of hT sees wait:Ts\<rightarrow>Tr = Native in D"
proof(induct rule: red_reds.inducts)
case RedCallExternal
thus ?case
apply clarsimp
apply(frule red_external_Suspend_StaySame, simp)
apply(drule red_external_Suspend_waitD, fastforce+)
done
qed auto
end
context J_heap begin
lemma red_hext_incr: "extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle> \<Longrightarrow> hp s \<unlhd> hp s'"
and reds_hext_incr: "extTA,P,t \<turnstile> \<langle>es, s\<rangle> [-ta\<rightarrow>] \<langle>es', s'\<rangle> \<Longrightarrow> hp s \<unlhd> hp s'"
by(induct rule:red_reds.inducts)(auto intro: hext_heap_ops red_external_hext)
lemma red_preserves_tconf: "\<lbrakk> extTA,P,t \<turnstile> \<langle>e, s\<rangle> -ta\<rightarrow> \<langle>e', s'\<rangle>; P,hp s \<turnstile> t \<surd>t \<rbrakk> \<Longrightarrow> P,hp s' \<turnstile> t \<surd>t"
by(drule red_hext_incr)(rule tconf_hext_mono)
end
subsection \<open>Code generation\<close>
context J_heap_base begin
lemma RedCall_code:
"\<lbrakk> is_vals es; typeof_addr (hp s) a = \<lfloor>hU\<rfloor>; P \<turnstile> class_type_of hU sees M:Ts\<rightarrow>T = \<lfloor>(pns,body)\<rfloor> in D;
size es = size pns; size Ts = size pns \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<bullet>M(es), s\<rangle> -\<epsilon>\<rightarrow> \<langle>blocks (this # pns) (Class D # Ts) (Addr a # map the_Val es) body, s\<rangle>"
and RedCallExternal_code:
"\<lbrakk> is_vals es; typeof_addr (hp s) a = \<lfloor>hU\<rfloor>; P \<turnstile> class_type_of hU sees M:Ts\<rightarrow>T = Native in D;
P,t \<turnstile> \<langle>a\<bullet>M(map the_Val es), hp s\<rangle> -ta\<rightarrow>ext \<langle>va, h'\<rangle> \<rbrakk>
\<Longrightarrow> extTA,P,t \<turnstile> \<langle>(addr a)\<bullet>M(es), s\<rangle> -extTA ta\<rightarrow> \<langle>extRet2J ((addr a)\<bullet>M(es)) va, (h', lcl s)\<rangle>"
and RedCallNull_code:
"is_vals es \<Longrightarrow> extTA,P,t \<turnstile> \<langle>null\<bullet>M(es), s\<rangle> -\<epsilon>\<rightarrow> \<langle>THROW NullPointer, s\<rangle>"
and CallThrowParams_code:
"is_Throws es \<Longrightarrow> extTA,P,t \<turnstile> \<langle>(Val v)\<bullet>M(es), s\<rangle> -\<epsilon>\<rightarrow> \<langle>hd (dropWhile is_val es), s\<rangle>"
apply(auto simp add: is_vals_conv is_Throws_conv o_def intro: RedCall RedCallExternal RedCallNull simp del: blocks.simps)
apply(subst dropWhile_append2)
apply(auto intro: CallThrowParams)
done
end
lemmas [code_pred_intro] =
J_heap_base.RedNew[folded Predicate_Compile.contains_def] J_heap_base.RedNewFail J_heap_base.NewArrayRed
J_heap_base.RedNewArray[folded Predicate_Compile.contains_def]
J_heap_base.RedNewArrayNegative J_heap_base.RedNewArrayFail
J_heap_base.CastRed J_heap_base.RedCast J_heap_base.RedCastFail J_heap_base.InstanceOfRed
J_heap_base.RedInstanceOf J_heap_base.BinOpRed1 J_heap_base.BinOpRed2 J_heap_base.RedBinOp J_heap_base.RedBinOpFail
J_heap_base.RedVar J_heap_base.LAssRed J_heap_base.RedLAss
J_heap_base.AAccRed1 J_heap_base.AAccRed2 J_heap_base.RedAAccNull
J_heap_base.RedAAccBounds J_heap_base.RedAAcc J_heap_base.AAssRed1 J_heap_base.AAssRed2 J_heap_base.AAssRed3
J_heap_base.RedAAssNull J_heap_base.RedAAssBounds J_heap_base.RedAAssStore J_heap_base.RedAAss J_heap_base.ALengthRed
J_heap_base.RedALength J_heap_base.RedALengthNull J_heap_base.FAccRed J_heap_base.RedFAcc J_heap_base.RedFAccNull
J_heap_base.FAssRed1 J_heap_base.FAssRed2 J_heap_base.RedFAss J_heap_base.RedFAssNull
J_heap_base.CASRed1 J_heap_base.CASRed2 J_heap_base.CASRed3 J_heap_base.CASNull J_heap_base.RedCASSucceed J_heap_base.RedCASFail
J_heap_base.CallObj J_heap_base.CallParams
declare
J_heap_base.RedCall_code[code_pred_intro RedCall_code]
J_heap_base.RedCallExternal_code[code_pred_intro RedCallExternal_code]
J_heap_base.RedCallNull_code[code_pred_intro RedCallNull_code]
lemmas [code_pred_intro] =
J_heap_base.BlockRed J_heap_base.RedBlock J_heap_base.SynchronizedRed1 J_heap_base.SynchronizedNull
J_heap_base.LockSynchronized J_heap_base.SynchronizedRed2 J_heap_base.UnlockSynchronized
J_heap_base.SeqRed J_heap_base.RedSeq J_heap_base.CondRed J_heap_base.RedCondT J_heap_base.RedCondF J_heap_base.RedWhile
J_heap_base.ThrowRed
declare
J_heap_base.RedThrowNull[code_pred_intro RedThrowNull']
lemmas [code_pred_intro] =
J_heap_base.TryRed J_heap_base.RedTry J_heap_base.RedTryCatch
J_heap_base.RedTryFail J_heap_base.ListRed1 J_heap_base.ListRed2
J_heap_base.NewArrayThrow J_heap_base.CastThrow J_heap_base.InstanceOfThrow J_heap_base.BinOpThrow1 J_heap_base.BinOpThrow2
J_heap_base.LAssThrow J_heap_base.AAccThrow1 J_heap_base.AAccThrow2 J_heap_base.AAssThrow1 J_heap_base.AAssThrow2
J_heap_base.AAssThrow3 J_heap_base.ALengthThrow J_heap_base.FAccThrow J_heap_base.FAssThrow1 J_heap_base.FAssThrow2
J_heap_base.CASThrow J_heap_base.CASThrow2 J_heap_base.CASThrow3
J_heap_base.CallThrowObj
declare
J_heap_base.CallThrowParams_code[code_pred_intro CallThrowParams_code]
lemmas [code_pred_intro] =
J_heap_base.BlockThrow J_heap_base.SynchronizedThrow1 J_heap_base.SynchronizedThrow2 J_heap_base.SeqThrow
J_heap_base.CondThrow
declare
J_heap_base.ThrowThrow[code_pred_intro ThrowThrow']
code_pred
(modes:
J_heap_base.red: i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> (i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> o \<Rightarrow> bool) \<Rightarrow> (i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> o \<Rightarrow> bool) \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> bool
and
J_heap_base.reds: i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> (i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> o \<Rightarrow> bool) \<Rightarrow> (i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> o \<Rightarrow> bool) \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> i \<Rightarrow> o \<Rightarrow> o \<Rightarrow> o \<Rightarrow> bool)
[detect_switches, skip_proof] \<comment> \<open>proofs are possible, but take veeerry long\<close>
J_heap_base.red
proof -
case red
from red.prems show thesis
proof(cases rule: J_heap_base.red.cases[consumes 1, case_names
RedNew RedNewFail NewArrayRed RedNewArray RedNewArrayNegative RedNewArrayFail CastRed RedCast RedCastFail InstanceOfRed
RedInstanceOf BinOpRed1 BinOpRed2 RedBinOp RedBinOpFail RedVar LAssRed RedLAss
AAccRed1 AAccRed2 RedAAccNull RedAAccBounds RedAAcc
AAssRed1 AAssRed2 AAssRed3 RedAAssNull RedAAssBounds RedAAssStore RedAAss ALengthRed RedALength RedALengthNull FAccRed
RedFAcc RedFAccNull FAssRed1 FAssRed2 RedFAss RedFAssNull CASRed1 CASRed2 CASRed3 RedCASNull RedCASSucceed RedCASFail
CallObj CallParams RedCall RedCallExternal RedCallNull
BlockRed RedBlock SynchronizedRed1 SynchronizedNull LockSynchronized SynchronizedRed2 UnlockSynchronized SeqRed
RedSeq CondRed RedCondT RedCondF RedWhile ThrowRed RedThrowNull TryRed RedTry RedTryCatch RedTryFail
NewArrayThrow CastThrow InstanceOfThrow BinOpThrow1 BinOpThrow2 LAssThrow AAccThrow1 AAccThrow2 AAssThrow1 AAssThrow2
AAssThrow3 ALengthThrow FAccThrow FAssThrow1 FAssThrow2 CASThrow CASThrow2 CASThrow3
CallThrowObj CallThrowParams BlockThrow SynchronizedThrow1
SynchronizedThrow2 SeqThrow CondThrow ThrowThrow])
case (RedCall s a U M Ts T pns body D vs)
with red.RedCall_code[OF refl refl refl refl refl refl refl refl refl refl refl, of a M "map Val vs" s pns D Ts body U T]
show ?thesis by(simp add: o_def)
next
case (RedCallExternal s a U M Ts T D vs ta va h' ta' e' s')
with red.RedCallExternal_code[OF refl refl refl refl refl refl refl refl refl refl refl, of a M "map Val vs" s ta va h' U Ts T D]
show ?thesis by(simp add: o_def)
next
case (RedCallNull M vs s)
with red.RedCallNull_code[OF refl refl refl refl refl refl refl refl refl refl refl, of M "map Val vs" s]
show ?thesis by(simp add: o_def)
next
case (CallThrowParams es vs a es' v M s)
with red.CallThrowParams_code[OF refl refl refl refl refl refl refl refl refl refl refl, of v M "map Val vs @ Throw a # es'" s]
show ?thesis
apply(auto simp add: is_Throws_conv)
apply(erule meta_impE)
apply(subst dropWhile_append2)
apply auto
done
next
case RedThrowNull thus ?thesis
by-(erule (4) red.RedThrowNull'[OF refl refl refl refl refl refl refl refl refl refl refl])
next
case ThrowThrow thus ?thesis
by-(erule (4) red.ThrowThrow'[OF refl refl refl refl refl refl refl refl refl refl refl])
qed(assumption|erule (4) red.that[unfolded Predicate_Compile.contains_def, OF refl refl refl refl refl refl refl refl refl refl refl])+
next
case reds
from reds.prems show thesis
by(rule J_heap_base.reds.cases)(assumption|erule (4) reds.that[OF refl refl refl refl refl refl refl refl refl refl refl])+
qed
end
|
Formal statement is: lemma mem_cball [simp, metric_unfold]: "y \<in> cball x e \<longleftrightarrow> dist x y \<le> e" Informal statement is: A point $y$ is in the closed ball of radius $e$ centered at $x$ if and only if the distance between $x$ and $y$ is less than or equal to $e$. |
lemma linear_imp_differentiable: fixes f :: "'a::euclidean_space \<Rightarrow> 'b::real_normed_vector" shows "linear f \<Longrightarrow> f differentiable net" |
lemma tendsto_at_botI_sequentially: fixes f :: "real \<Rightarrow> 'b::first_countable_topology" assumes *: "\<And>X. filterlim X at_bot sequentially \<Longrightarrow> (\<lambda>n. f (X n)) \<longlonglongrightarrow> y" shows "(f \<longlongrightarrow> y) at_bot" |
(* Title: HOL/Auth/n_german_lemma_on_inv__31.thy
Author: Yongjian Li and Kaiqiang Duan, State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
Copyright 2016 State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
*)
header{*The n_german Protocol Case Study*}
theory n_german_lemma_on_inv__31 imports n_german_base
begin
section{*All lemmas on causal relation between inv__31 and some rule r*}
lemma n_SendInv__part__0Vsinv__31:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendInv__part__0 i)" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendInv__part__0 i" apply fastforce done
from a2 obtain p__Inv2 where a2:"p__Inv2\<le>N\<and>f=inv__31 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendInv__part__1Vsinv__31:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendInv__part__1 i)" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendInv__part__1 i" apply fastforce done
from a2 obtain p__Inv2 where a2:"p__Inv2\<le>N\<and>f=inv__31 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendInvAckVsinv__31:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendInvAck i)" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendInvAck i" apply fastforce done
from a2 obtain p__Inv2 where a2:"p__Inv2\<le>N\<and>f=inv__31 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_RecvInvAckVsinv__31:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_RecvInvAck i)" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_RecvInvAck i" apply fastforce done
from a2 obtain p__Inv2 where a2:"p__Inv2\<le>N\<and>f=inv__31 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "((formEval (eqn (IVar (Ident ''ExGntd'')) (Const true)) s))\<or>((formEval (neg (eqn (IVar (Ident ''ExGntd'')) (Const true))) s))" by auto
moreover {
assume c1: "((formEval (eqn (IVar (Ident ''ExGntd'')) (Const true)) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (neg (eqn (IVar (Ident ''ExGntd'')) (Const true))) s))"
have "?P2 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
moreover {
assume b1: "(i~=p__Inv2)"
have "((formEval (eqn (IVar (Ident ''ExGntd'')) (Const true)) s))\<or>((formEval (neg (eqn (IVar (Ident ''ExGntd'')) (Const true))) s))" by auto
moreover {
assume c1: "((formEval (eqn (IVar (Ident ''ExGntd'')) (Const true)) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (neg (eqn (IVar (Ident ''ExGntd'')) (Const true))) s))"
have "?P2 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendGntSVsinv__31:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendGntS i)" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendGntS i" apply fastforce done
from a2 obtain p__Inv2 where a2:"p__Inv2\<le>N\<and>f=inv__31 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendGntEVsinv__31:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendGntE N i)" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendGntE N i" apply fastforce done
from a2 obtain p__Inv2 where a2:"p__Inv2\<le>N\<and>f=inv__31 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv2)"
have "?P3 s"
apply (cut_tac a1 a2 b1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Ident ''Chan2'') p__Inv2) ''Cmd'')) (Const GntS)) (eqn (IVar (Para (Ident ''ShrSet'') p__Inv2)) (Const false))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_RecvGntSVsinv__31:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_RecvGntS i)" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_RecvGntS i" apply fastforce done
from a2 obtain p__Inv2 where a2:"p__Inv2\<le>N\<and>f=inv__31 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_RecvGntEVsinv__31:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_RecvGntE i)" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_RecvGntE i" apply fastforce done
from a2 obtain p__Inv2 where a2:"p__Inv2\<le>N\<and>f=inv__31 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendReqE__part__1Vsinv__31:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_SendReqE__part__1 i" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_StoreVsinv__31:
assumes a1: "\<exists> i d. i\<le>N\<and>d\<le>N\<and>r=n_Store i d" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_RecvReqEVsinv__31:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_RecvReqE N i" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_SendReqE__part__0Vsinv__31:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_SendReqE__part__0 i" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_SendReqSVsinv__31:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_SendReqS i" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_RecvReqSVsinv__31:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_RecvReqS N i" and
a2: "(\<exists> p__Inv2. p__Inv2\<le>N\<and>f=inv__31 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
end
|
(* Title: HOL/Auth/n_german_lemma_on_inv__13.thy
Author: Yongjian Li and Kaiqiang Duan, State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
Copyright 2016 State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
*)
header{*The n_german Protocol Case Study*}
theory n_german_lemma_on_inv__13 imports n_german_base
begin
section{*All lemmas on causal relation between inv__13 and some rule r*}
lemma n_SendInv__part__0Vsinv__13:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendInv__part__0 i)" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendInv__part__0 i" apply fastforce done
from a2 obtain p__Inv1 p__Inv2 where a2:"p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i=p__Inv1)\<or>(i~=p__Inv1\<and>i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i=p__Inv1)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv1\<and>i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendInv__part__1Vsinv__13:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendInv__part__1 i)" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendInv__part__1 i" apply fastforce done
from a2 obtain p__Inv1 p__Inv2 where a2:"p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i=p__Inv1)\<or>(i~=p__Inv1\<and>i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i=p__Inv1)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv1\<and>i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendInvAckVsinv__13:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendInvAck i)" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendInvAck i" apply fastforce done
from a2 obtain p__Inv1 p__Inv2 where a2:"p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i=p__Inv1)\<or>(i~=p__Inv1\<and>i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i=p__Inv1)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv1\<and>i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendGntSVsinv__13:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendGntS i)" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendGntS i" apply fastforce done
from a2 obtain p__Inv1 p__Inv2 where a2:"p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i=p__Inv1)\<or>(i~=p__Inv1\<and>i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P3 s"
apply (cut_tac a1 a2 b1, simp, rule_tac x="(neg (andForm (eqn (IVar (Ident ''ExGntd'')) (Const false)) (eqn (IVar (Field (Para (Ident ''Cache'') p__Inv1) ''State'')) (Const E))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i=p__Inv1)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv1\<and>i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendGntEVsinv__13:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_SendGntE N i)" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_SendGntE N i" apply fastforce done
from a2 obtain p__Inv1 p__Inv2 where a2:"p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i=p__Inv1)\<or>(i~=p__Inv1\<and>i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i=p__Inv1)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv1\<and>i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_RecvGntSVsinv__13:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_RecvGntS i)" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_RecvGntS i" apply fastforce done
from a2 obtain p__Inv1 p__Inv2 where a2:"p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i=p__Inv1)\<or>(i~=p__Inv1\<and>i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i=p__Inv1)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv1\<and>i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_RecvGntEVsinv__13:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_RecvGntE i)" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_RecvGntE i" apply fastforce done
from a2 obtain p__Inv1 p__Inv2 where a2:"p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2" apply fastforce done
have "(i=p__Inv2)\<or>(i=p__Inv1)\<or>(i~=p__Inv1\<and>i~=p__Inv2)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv2)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i=p__Inv1)"
have "?P3 s"
apply (cut_tac a1 a2 b1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Ident ''Chan2'') p__Inv2) ''Cmd'')) (Const GntS)) (eqn (IVar (Field (Para (Ident ''Chan2'') p__Inv1) ''Cmd'')) (Const GntE))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv1\<and>i~=p__Inv2)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_SendReqE__part__1Vsinv__13:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_SendReqE__part__1 i" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_StoreVsinv__13:
assumes a1: "\<exists> i d. i\<le>N\<and>d\<le>N\<and>r=n_Store i d" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_RecvInvAckVsinv__13:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_RecvInvAck i" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_RecvReqEVsinv__13:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_RecvReqE N i" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_SendReqE__part__0Vsinv__13:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_SendReqE__part__0 i" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_SendReqSVsinv__13:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_SendReqS i" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_RecvReqSVsinv__13:
assumes a1: "\<exists> i. i\<le>N\<and>r=n_RecvReqS N i" and
a2: "(\<exists> p__Inv1 p__Inv2. p__Inv1\<le>N\<and>p__Inv2\<le>N\<and>p__Inv1~=p__Inv2\<and>f=inv__13 p__Inv1 p__Inv2)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
end
|
lemma eq_cbox: "cbox a b = cbox c d \<longleftrightarrow> cbox a b = {} \<and> cbox c d = {} \<or> a = c \<and> b = d" (is "?lhs = ?rhs") |
theory Boolean_Expression_Example
imports Boolean_Expression_Checkers
begin
section{* Example *}
text {* Example usage of checkers. We have our own type of boolean expressions
with its own evaluation function: *}
datatype 'a bexp =
Const bool |
Atom 'a |
Neg "'a bexp" |
And "'a bexp" "'a bexp"
fun bval where
"bval (Const b) s = b" |
"bval (Atom a) s = s a" |
"bval (Neg b) s = (\<not> bval b s)" |
"bval (And b1 b2) s = (bval b1 s \<and> bval b2 s)"
text{* Now we translate into the datatype provided by the checkers interface
and show that the semantics remains the same: *}
fun bool_expr_of_bexp :: "'a bexp \<Rightarrow> 'a bool_expr" where
"bool_expr_of_bexp (Const b) = Const_bool_expr b" |
"bool_expr_of_bexp (Atom a) = Atom_bool_expr a" |
"bool_expr_of_bexp (Neg b) = Neg_bool_expr(bool_expr_of_bexp b)" |
"bool_expr_of_bexp (And b1 b2) = And_bool_expr (bool_expr_of_bexp b1) (bool_expr_of_bexp b2)"
lemma val_preservation: "val_bool_expr(bool_expr_of_bexp b) s = bval b s"
by(induction b) auto
text{* Trivial tautology checker and its correctness: *}
definition "my_taut_test = taut_test o bool_expr_of_bexp"
corollary my_taut_test: "my_taut_test b = (\<forall>s. bval b s)"
by(simp add: my_taut_test_def val_preservation taut_test)
text{* Test: pigeonhole formulas *}
definition "Or b1 b2 == Neg (And (Neg b1) (Neg b2))"
definition "ors = foldl Or (Const False)"
definition "ands = foldl And (Const True)"
definition "pc n = ands[ors[Atom(i,j). j <- [1..<n+1]]. i <- [1..<n+2]]"
definition "nc n = ands[Or (Neg(Atom(i,k))) (Neg(Atom(j,k))).
k <- [1..<n+1], i <- [1..<n+1], j <- [i+1..<n+2]]"
definition "php n = Neg(And (pc n) (nc n))"
text{* Takes about 5 secs; with 7 instead of 6 it takes about 4 mins (2014). *}
lemma "my_taut_test (php 6)"
by eval
end
|
(*
Copyright (C) 2017 M.A.L. Marques
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
*)
(* type: gga_exc *)
wc_mu := 0.2195149727645171:
wc_c := (146/2025)*(4/9) - (73/405)*(2/3) + (wc_mu - MU_GE):
wc_kappa := KAPPA_PBE:
wc_f0_aux := s -> wc_kappa + MU_GE * s^2 + (wc_mu - MU_GE) * s^2 * exp(-s^2) + log(1 + wc_c * s^4):
wc_f0 := s -> 1 + wc_kappa*(1 - wc_kappa/wc_f0_aux(s)):
wc_f := x -> wc_f0(X2S*x):
f := (rs, z, xt, xs0, xs1) -> gga_exchange(wc_f, rs, z, xs0, xs1):
|
[STATEMENT]
lemma isolated_verts_dv: "H.isolated_verts = isolated_verts - {u}"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. H.isolated_verts = isolated_verts - {u}
[PROOF STEP]
by (auto simp: isolated_verts_def H.isolated_verts_def verts_del_vert out_arcs_dv) |
------------------------------------------------------------------------
-- A semantics which uses continuation-passing style
------------------------------------------------------------------------
module TotalParserCombinators.Semantics.Continuation where
open import Algebra
open import Codata.Musical.Notation
open import Data.List
import Data.List.Properties as ListProp
open import Data.List.Relation.Binary.BagAndSetEquality
using (bag) renaming (_∼[_]_ to _List-∼[_]_)
open import Data.Maybe using (Maybe)
open import Data.Product as Prod
open import Function
open import Relation.Binary.PropositionalEquality as P using (_≡_)
private
module LM {Tok : Set} = Monoid (ListProp.++-monoid Tok)
open import TotalParserCombinators.Parser
open import TotalParserCombinators.Semantics as S
hiding ([_-_]_⊛_; [_-_]_>>=_)
-- The statement x ⊕ s₂ ∈ p · s means that there is some s₁ such that
-- s ≡ s₁ ++ s₂ and x ∈ p · s₁. This variant of the semantics is
-- perhaps harder to understand, but sometimes easier to work with
-- (and it is proved to be language equivalent to the semantics in
-- TotalParserCombinators.Semantics).
infix 60 <$>_
infixl 50 [_-_]_⊛_
infixl 10 [_-_]_>>=_
infix 4 _⊕_∈_·_
data _⊕_∈_·_ {Tok} : ∀ {R xs} → R → List Tok →
Parser Tok R xs → List Tok → Set₁ where
return : ∀ {R} {x : R} {s} → x ⊕ s ∈ return x · s
token : ∀ {x s} → x ⊕ s ∈ token · x ∷ s
∣-left : ∀ {R x xs₁ xs₂ s s₁}
{p₁ : Parser Tok R xs₁} {p₂ : Parser Tok R xs₂}
(x∈p₁ : x ⊕ s₁ ∈ p₁ · s) → x ⊕ s₁ ∈ p₁ ∣ p₂ · s
∣-right : ∀ {R x xs₂ s s₁} xs₁
{p₁ : Parser Tok R xs₁} {p₂ : Parser Tok R xs₂}
(x∈p₂ : x ⊕ s₁ ∈ p₂ · s) → x ⊕ s₁ ∈ p₁ ∣ p₂ · s
<$>_ : ∀ {R₁ R₂ x s s₁ xs} {p : Parser Tok R₁ xs} {f : R₁ → R₂}
(x∈p : x ⊕ s₁ ∈ p · s) → f x ⊕ s₁ ∈ f <$> p · s
[_-_]_⊛_ : ∀ {R₁ R₂ f x s s₁ s₂} xs fs
{p₁ : ∞⟨ xs ⟩Parser Tok (R₁ → R₂) (flatten fs)}
{p₂ : ∞⟨ fs ⟩Parser Tok R₁ (flatten xs)} →
(f∈p₁ : f ⊕ s₁ ∈ ♭? p₁ · s)
(x∈p₂ : x ⊕ s₂ ∈ ♭? p₂ · s₁) →
f x ⊕ s₂ ∈ p₁ ⊛ p₂ · s
[_-_]_>>=_ : ∀ {R₁ R₂ x y s s₁ s₂} (f : Maybe (R₁ → List R₂)) xs
{p₁ : ∞⟨ f ⟩Parser Tok R₁ (flatten xs)}
{p₂ : (x : R₁) → ∞⟨ xs ⟩Parser Tok R₂ (apply f x)} →
(x∈p₁ : x ⊕ s₁ ∈ ♭? p₁ · s)
(y∈p₂x : y ⊕ s₂ ∈ ♭? (p₂ x) · s₁) →
y ⊕ s₂ ∈ p₁ >>= p₂ · s
nonempty : ∀ {R xs x y s₂} s₁ {p : Parser Tok R xs}
(x∈p : y ⊕ s₂ ∈ p · x ∷ s₁ ++ s₂) →
y ⊕ s₂ ∈ nonempty p · x ∷ s₁ ++ s₂
cast : ∀ {R xs₁ xs₂ x s₁ s₂} {xs₁≈xs₂ : xs₁ List-∼[ bag ] xs₂}
{p : Parser Tok R xs₁}
(x∈p : x ⊕ s₂ ∈ p · s₁) → x ⊕ s₂ ∈ cast xs₁≈xs₂ p · s₁
-- A simple cast lemma.
private
cast∈′ : ∀ {Tok R xs} {p : Parser Tok R xs} {x s s′ s₁} →
s ≡ s′ → x ⊕ s₁ ∈ p · s → x ⊕ s₁ ∈ p · s′
cast∈′ P.refl x∈ = x∈
-- The definition is sound and complete with respect to the one in
-- TotalParserCombinators.Semantics.
sound′ : ∀ {Tok R xs x s₂ s} {p : Parser Tok R xs} →
x ⊕ s₂ ∈ p · s → ∃ λ s₁ → s ≡ s₁ ++ s₂ × x ∈ p · s₁
sound′ return = ([] , P.refl , return)
sound′ {x = x} token = ([ x ] , P.refl , token)
sound′ (∣-left x∈p₁) = Prod.map id (Prod.map id ∣-left) (sound′ x∈p₁)
sound′ (∣-right e₁ x∈p₁) = Prod.map id (Prod.map id (∣-right e₁)) (sound′ x∈p₁)
sound′ (<$> x∈p) = Prod.map id (Prod.map id (<$>_)) (sound′ x∈p)
sound′ ([ xs - fs ] f∈p₁ ⊛ x∈p₂) with sound′ f∈p₁ | sound′ x∈p₂
sound′ ([ xs - fs ] f∈p₁ ⊛ x∈p₂) | (s₁ , P.refl , f∈p₁′) | (s₂ , P.refl , x∈p₂′) =
(s₁ ++ s₂ , P.sym (LM.assoc s₁ s₂ _) ,
S.[_-_]_⊛_ xs fs f∈p₁′ x∈p₂′)
sound′ (nonempty s₁ x∈p) with sound′ x∈p
sound′ (nonempty s₁ x∈p) | (y ∷ s , eq , x∈p′) = (y ∷ s , eq , nonempty x∈p′)
sound′ (nonempty s₁ x∈p) | ([] , eq , x∈p′)
with ListProp.++-identityˡ-unique (_ ∷ s₁) (P.sym eq)
sound′ (nonempty s₁ x∈p) | ([] , eq , x∈p′) | ()
sound′ (cast x∈p) = Prod.map id (Prod.map id cast) (sound′ x∈p)
sound′ ([ f - xs ] x∈p₁ >>= y∈p₂x) with sound′ x∈p₁ | sound′ y∈p₂x
sound′ ([ f - xs ] x∈p₁ >>= y∈p₂x) | (s₁ , P.refl , x∈p₁′) | (s₂ , P.refl , y∈p₂x′) =
(s₁ ++ s₂ , P.sym (LM.assoc s₁ s₂ _) , S.[_-_]_>>=_ f xs x∈p₁′ y∈p₂x′)
sound : ∀ {Tok R xs x s} {p : Parser Tok R xs} →
x ⊕ [] ∈ p · s → x ∈ p · s
sound x∈p with sound′ x∈p
sound x∈p | (s , P.refl , x∈p′) with s ++ [] | Prod.proj₂ LM.identity s
sound x∈p | (s , P.refl , x∈p′) | .s | P.refl = x∈p′
extend : ∀ {Tok R xs x s s′ s″} {p : Parser Tok R xs} →
x ⊕ s′ ∈ p · s → x ⊕ s′ ++ s″ ∈ p · s ++ s″
extend return = return
extend token = token
extend (∣-left x∈p₁) = ∣-left (extend x∈p₁)
extend (∣-right e₁ x∈p₂) = ∣-right e₁ (extend x∈p₂)
extend (<$> x∈p) = <$> extend x∈p
extend ([ xs - fs ] f∈p₁ ⊛ x∈p₂) = [ xs - fs ] extend f∈p₁ ⊛ extend x∈p₂
extend ([ f - xs ] x∈p₁ >>= y∈p₂x) = [ f - xs ] extend x∈p₁ >>= extend y∈p₂x
extend (cast x∈p) = cast (extend x∈p)
extend (nonempty s₁ x∈p) = cast₂ (nonempty s₁ (cast₁ (extend x∈p)))
where
lem = LM.assoc (_ ∷ s₁) _ _
cast₁ = cast∈′ lem
cast₂ = cast∈′ (P.sym lem)
complete : ∀ {Tok R xs x s} {p : Parser Tok R xs} →
x ∈ p · s → x ⊕ [] ∈ p · s
complete return = return
complete token = token
complete (∣-left x∈p₁) = ∣-left (complete x∈p₁)
complete (∣-right e₁ x∈p₂) = ∣-right e₁ (complete x∈p₂)
complete (<$> x∈p) = <$> complete x∈p
complete (_⊛_ {fs = fs} {xs = xs} f∈p₁ x∈p₂) = [ xs - fs ] extend (complete f∈p₁) ⊛ complete x∈p₂
complete (_>>=_ {xs = xs} {f = f} x∈p₁ y∈p₂x) = [ f - xs ] extend (complete x∈p₁) >>= complete y∈p₂x
complete (cast x∈p) = cast (complete x∈p)
complete (nonempty {s = s} x∈p) = cast₂ (nonempty s (cast₁ (complete x∈p)))
where
lem = Prod.proj₂ LM.identity _
cast₁ = cast∈′ (P.sym lem)
cast₂ = cast∈′ lem
complete′ : ∀ {Tok R xs x s₂ s} {p : Parser Tok R xs} →
(∃ λ s₁ → s ≡ s₁ ++ s₂ × x ∈ p · s₁) →
x ⊕ s₂ ∈ p · s
complete′ (s₁ , P.refl , x∈p) = extend (complete x∈p)
|
lemma closedin_component: "C \<in> components s \<Longrightarrow> closedin (top_of_set s) C" |
Formal statement is: lemma continuous_on_avoid: fixes f :: "'a::metric_space \<Rightarrow> 'b::t1_space" assumes "continuous_on s f" and "x \<in> s" and "f x \<noteq> a" shows "\<exists>e>0. \<forall>y \<in> s. dist x y < e \<longrightarrow> f y \<noteq> a" Informal statement is: If $f$ is a continuous function from a metric space to a T1 space, and $f(x) \neq a$, then there exists an open neighborhood of $x$ on which $f$ does not take the value $a$. |
/-
Homeowork 8
Sina Hazratpour
Introduction to Proof
MATH 301, Johns Hopkins University, Fall 2022
-/
import ..prooflab
import lectures.lec11_type_classes
/-! # Homework 8: ...
Homework must be done individually.
Replace the placeholders (e.g., `:= sorry`) with your solutions.
You are allowed to use all the tactics we have learned so far.
-/
variables {X Y : Type} {f : X → Y} {p : Y → X}
open PROOFS
open PROOFS.STR
open PROOFS.type_classes
local notation `𝟙` := unit -- type as \b1
local notation `⋆` := unit.star
lemma unit_unique (x : 𝟙) :
x = ⋆ :=
begin
exact unit.ext,
end
/-
A map is __constant__ if it maps all points in the domain to the same point in the codomain.
-/
def is_constant (f : X → Y) := ∀ x x' : X, f x = f x'
/-
A map `f : X → Y` is __constant at a point__ `y` of `Y` if `f` maps all points in `X` to `y`.
-/
def is_constant_at (f : X → Y) (y : Y) := ∀ x : X, f x = y
/-! ## Question 1 (20 pts):
Show that if a function is constant at a point then it is constant.
-/
lemma constant_at_point_implies_constant {f : X → Y} :
(∃ y : Y, is_constant_at f y) → is_constant f :=
begin
sorry,
end
/-! ## Question 2 (20 pts):
Prove that a function which factors through a type which is equivalent to the one-point type is constant.
Feel free to use the lemma `ptwise.left_inv`
-/
#check @ptwise.left_inv
theorem constant_of_factor_unit {f : X → Y} {Φ : fun_fact f} {α : fun_equiv Φ.node 𝟙} :
is_constant f :=
begin
unfold is_constant,
end
/- For every type `X` there is a unique function
from `X` to `𝟙` which takes all points of `X` to `⋆`.
-/
@[simp]
def to_terminal (X : Type) : X → 𝟙 := λ x, ⋆
notation ` ! ` := to_terminal
infix ` ≅ `:35 := fun_equiv
/-! ## Question 3 (20 pts):
Prove that the unique function `X → 𝟙` is surjective iff `X` is pointed by filling the `sorry` placeholder.
-/
def is_surj_of_pointed_type {X : pointed_type} :
is_surjective (! X.type) :=
begin
sorry,
end
/-
**Formalize the converse**, that is if `! X : X → 𝟙` is surjective then `X` is pointed (i.e. it admits the structure of a pointed type). Then **prove** the converse statement.
-/
#check classical.some
noncomputable
def is_pointed_of_surj {X : Type} {h : is_surjective (! X)} : pointed_type :=
{
type := X,
point := let h' : (∃ x : X, true) := by {unfold is_surjective at h,
simp at *, assumption} in classical.some h' ,
}
/-! ## Question 4 (20 pts):
Prove that the image of the unique function `X → 𝟙` is equivalent to to `𝟙` if `X` is pointed.
Feel free to use the lemma `ptwise.left_inv`
-/
def truncation_of_pointed_type {X : pointed_type} :
𝟙 ≅ (fun_image (! X.type)) :=
{
to_fun := sorry,
inv_fun := sorry,
left_inv := by {sorry},
right_inv := by {sorry},
}
/-
We say a type is __inhabited__ if there is some element in it.
-/
@[simp]
def is_inhabited (X : Type) := ∃ x : X, true
/-
The __fibre at a point__ `x : X` of a function `p : Y → X` is the preimage of `x` under `p`.
-/
@[simp]
def fibre_at (x : X) := { y : Y // p y = x}
#check @fibre_at
local notation ` p⁻¹ ` : 15 := λ x, @fibre_at X Y p x
#check p⁻¹
/-! ## Question 5 (20 pts):
Let `p : Y → X` be a function. Prove that if all the fibres of `p` are inhabited then `p` is surjective.
-/
def surj_of_pointed_fibres {ptd_fibres : ∀ x : X, is_inhabited (p⁻¹ x) } : is_surjective p :=
begin
sorry,
end
|
module Main
import Data.Array16Spec
import Data.HashMapSpec
main : IO ()
main = do
ignore $ specArray16
ignore $ specHashMap |
{-# OPTIONS --safe #-}
module Cubical.Algebra.CommAlgebra.Base where
open import Cubical.Foundations.Prelude
open import Cubical.Foundations.Equiv
open import Cubical.Foundations.HLevels
open import Cubical.Foundations.SIP
open import Cubical.Data.Sigma
open import Cubical.Algebra.Semigroup
open import Cubical.Algebra.Monoid
open import Cubical.Algebra.CommRing
open import Cubical.Algebra.Ring
open import Cubical.Algebra.Algebra
open import Cubical.Displayed.Base
open import Cubical.Displayed.Auto
open import Cubical.Displayed.Record
open import Cubical.Displayed.Universe
open import Cubical.Reflection.RecordEquiv
private
variable
ℓ ℓ' : Level
record IsCommAlgebra (R : CommRing ℓ) {A : Type ℓ'}
(0a : A) (1a : A)
(_+_ : A → A → A) (_·_ : A → A → A) (-_ : A → A)
(_⋆_ : ⟨ R ⟩ → A → A) : Type (ℓ-max ℓ ℓ') where
constructor iscommalgebra
field
isAlgebra : IsAlgebra (CommRing→Ring R) 0a 1a _+_ _·_ -_ _⋆_
·-comm : (x y : A) → x · y ≡ y · x
open IsAlgebra isAlgebra public
unquoteDecl IsCommAlgebraIsoΣ = declareRecordIsoΣ IsCommAlgebraIsoΣ (quote IsCommAlgebra)
record CommAlgebraStr (R : CommRing ℓ) (A : Type ℓ') : Type (ℓ-max ℓ ℓ') where
constructor commalgebrastr
field
0a : A
1a : A
_+_ : A → A → A
_·_ : A → A → A
-_ : A → A
_⋆_ : ⟨ R ⟩ → A → A
isCommAlgebra : IsCommAlgebra R 0a 1a _+_ _·_ -_ _⋆_
open IsCommAlgebra isCommAlgebra public
infix 8 -_
infixl 7 _·_
infixl 7 _⋆_
infixl 6 _+_
CommAlgebra : (R : CommRing ℓ) → ∀ ℓ' → Type (ℓ-max ℓ (ℓ-suc ℓ'))
CommAlgebra R ℓ' = Σ[ A ∈ Type ℓ' ] CommAlgebraStr R A
module _ {R : CommRing ℓ} where
open CommRingStr (snd R) using (1r) renaming (_+_ to _+r_; _·_ to _·s_)
CommAlgebraStr→AlgebraStr : {A : Type ℓ'} → CommAlgebraStr R A → AlgebraStr (CommRing→Ring R) A
CommAlgebraStr→AlgebraStr (commalgebrastr _ _ _ _ _ _ (iscommalgebra isAlgebra ·-comm)) =
algebrastr _ _ _ _ _ _ isAlgebra
CommAlgebra→Algebra : (A : CommAlgebra R ℓ') → Algebra (CommRing→Ring R) ℓ'
CommAlgebra→Algebra (_ , str) = (_ , CommAlgebraStr→AlgebraStr str)
CommAlgebra→CommRing : (A : CommAlgebra R ℓ') → CommRing ℓ'
CommAlgebra→CommRing (_ , commalgebrastr _ _ _ _ _ _ (iscommalgebra isAlgebra ·-comm)) =
_ , commringstr _ _ _ _ _ (iscommring (IsAlgebra.isRing isAlgebra) ·-comm)
isSetCommAlgebra : (A : CommAlgebra R ℓ') → isSet ⟨ A ⟩
isSetCommAlgebra A = isSetAlgebra (CommAlgebra→Algebra A)
makeIsCommAlgebra : {A : Type ℓ'} {0a 1a : A}
{_+_ _·_ : A → A → A} { -_ : A → A} {_⋆_ : ⟨ R ⟩ → A → A}
(isSet-A : isSet A)
(+-assoc : (x y z : A) → x + (y + z) ≡ (x + y) + z)
(+-rid : (x : A) → x + 0a ≡ x)
(+-rinv : (x : A) → x + (- x) ≡ 0a)
(+-comm : (x y : A) → x + y ≡ y + x)
(·-assoc : (x y z : A) → x · (y · z) ≡ (x · y) · z)
(·-lid : (x : A) → 1a · x ≡ x)
(·-ldist-+ : (x y z : A) → (x + y) · z ≡ (x · z) + (y · z))
(·-comm : (x y : A) → x · y ≡ y · x)
(⋆-assoc : (r s : ⟨ R ⟩) (x : A) → (r ·s s) ⋆ x ≡ r ⋆ (s ⋆ x))
(⋆-ldist : (r s : ⟨ R ⟩) (x : A) → (r +r s) ⋆ x ≡ (r ⋆ x) + (s ⋆ x))
(⋆-rdist : (r : ⟨ R ⟩) (x y : A) → r ⋆ (x + y) ≡ (r ⋆ x) + (r ⋆ y))
(⋆-lid : (x : A) → 1r ⋆ x ≡ x)
(⋆-lassoc : (r : ⟨ R ⟩) (x y : A) → (r ⋆ x) · y ≡ r ⋆ (x · y))
→ IsCommAlgebra R 0a 1a _+_ _·_ -_ _⋆_
makeIsCommAlgebra {A = A} {0a} {1a} {_+_} {_·_} { -_} {_⋆_} isSet-A
+-assoc +-rid +-rinv +-comm
·-assoc ·-lid ·-ldist-+ ·-comm
⋆-assoc ⋆-ldist ⋆-rdist ⋆-lid ⋆-lassoc
= iscommalgebra
(makeIsAlgebra
isSet-A
+-assoc +-rid +-rinv +-comm
·-assoc
(λ x → x · 1a ≡⟨ ·-comm _ _ ⟩ 1a · x ≡⟨ ·-lid _ ⟩ x ∎)
·-lid
(λ x y z → x · (y + z) ≡⟨ ·-comm _ _ ⟩
(y + z) · x ≡⟨ ·-ldist-+ _ _ _ ⟩
(y · x) + (z · x) ≡⟨ cong (λ u → (y · x) + u) (·-comm _ _) ⟩
(y · x) + (x · z) ≡⟨ cong (λ u → u + (x · z)) (·-comm _ _) ⟩
(x · y) + (x · z) ∎)
·-ldist-+
⋆-assoc
⋆-ldist
⋆-rdist
⋆-lid
⋆-lassoc
λ r x y → r ⋆ (x · y) ≡⟨ cong (λ u → r ⋆ u) (·-comm _ _) ⟩
r ⋆ (y · x) ≡⟨ sym (⋆-lassoc _ _ _) ⟩
(r ⋆ y) · x ≡⟨ ·-comm _ _ ⟩
x · (r ⋆ y) ∎)
·-comm
module _ (S : CommRing ℓ) where
open CommRingStr (snd S) renaming (1r to 1S)
open CommRingStr (snd R) using () renaming (_·_ to _·R_; _+_ to _+R_; 1r to 1R)
commAlgebraFromCommRing :
(_⋆_ : fst R → fst S → fst S)
→ ((r s : fst R) (x : fst S) → (r ·R s) ⋆ x ≡ r ⋆ (s ⋆ x))
→ ((r s : fst R) (x : fst S) → (r +R s) ⋆ x ≡ (r ⋆ x) + (s ⋆ x))
→ ((r : fst R) (x y : fst S) → r ⋆ (x + y) ≡ (r ⋆ x) + (r ⋆ y))
→ ((x : fst S) → 1R ⋆ x ≡ x)
→ ((r : fst R) (x y : fst S) → (r ⋆ x) · y ≡ r ⋆ (x · y))
→ CommAlgebra R ℓ
commAlgebraFromCommRing _⋆_ ·Assoc⋆ ⋆DistR ⋆DistL ⋆Lid ⋆Assoc· = fst S ,
commalgebrastr 0r 1S _+_ _·_ -_ _⋆_
(makeIsCommAlgebra is-set +Assoc +Rid +Rinv +Comm ·Assoc ·Lid ·Ldist+ ·Comm
·Assoc⋆ ⋆DistR ⋆DistL ⋆Lid ⋆Assoc·)
IsCommAlgebraEquiv : {A B : Type ℓ'}
(M : CommAlgebraStr R A) (e : A ≃ B) (N : CommAlgebraStr R B)
→ Type (ℓ-max ℓ ℓ')
IsCommAlgebraEquiv M e N =
IsAlgebraHom (CommAlgebraStr→AlgebraStr M) (e .fst) (CommAlgebraStr→AlgebraStr N)
CommAlgebraEquiv : (M N : CommAlgebra R ℓ') → Type (ℓ-max ℓ ℓ')
CommAlgebraEquiv M N = Σ[ e ∈ ⟨ M ⟩ ≃ ⟨ N ⟩ ] IsCommAlgebraEquiv (M .snd) e (N .snd)
IsCommAlgebraHom : {A B : Type ℓ'}
(M : CommAlgebraStr R A) (f : A → B) (N : CommAlgebraStr R B)
→ Type (ℓ-max ℓ ℓ')
IsCommAlgebraHom M f N =
IsAlgebraHom (CommAlgebraStr→AlgebraStr M) f (CommAlgebraStr→AlgebraStr N)
CommAlgebraHom : (M N : CommAlgebra R ℓ') → Type (ℓ-max ℓ ℓ')
CommAlgebraHom M N = Σ[ f ∈ (⟨ M ⟩ → ⟨ N ⟩) ] IsCommAlgebraHom (M .snd) f (N .snd)
module _ {M N : CommAlgebra R ℓ'} where
open CommAlgebraStr {{...}}
open IsAlgebraHom
private
instance
_ = snd M
_ = snd N
makeCommAlgebraHom : (f : fst M → fst N)
→ (fPres1 : f 1a ≡ 1a)
→ (fPres+ : (x y : fst M) → f (x + y) ≡ f x + f y)
→ (fPres· : (x y : fst M) → f (x · y) ≡ f x · f y)
→ (fPres⋆ : (r : fst R) (x : fst M) → f (r ⋆ x) ≡ r ⋆ f x)
→ CommAlgebraHom M N
makeCommAlgebraHom f fPres1 fPres+ fPres· fPres⋆ = f , isHom
where fPres0 =
f 0a ≡⟨ sym (+-rid _) ⟩
f 0a + 0a ≡⟨ cong (λ u → f 0a + u) (sym (+-rinv (f 0a))) ⟩
f 0a + (f 0a - f 0a) ≡⟨ +-assoc (f 0a) (f 0a) (- f 0a) ⟩
(f 0a + f 0a) - f 0a ≡⟨ cong (λ u → u - f 0a) (sym (fPres+ 0a 0a)) ⟩
f (0a + 0a) - f 0a ≡⟨ cong (λ u → f u - f 0a) (+-lid 0a) ⟩
f 0a - f 0a ≡⟨ +-rinv (f 0a) ⟩
0a ∎
isHom : IsCommAlgebraHom (snd M) f (snd N)
pres0 isHom = fPres0
pres1 isHom = fPres1
pres+ isHom = fPres+
pres· isHom = fPres·
pres- isHom = (λ x →
f (- x) ≡⟨ sym (+-rid _) ⟩
(f (- x) + 0a) ≡⟨ cong (λ u → f (- x) + u) (sym (+-rinv (f x))) ⟩
(f (- x) + (f x - f x)) ≡⟨ +-assoc _ _ _ ⟩
((f (- x) + f x) - f x) ≡⟨ cong (λ u → u - f x) (sym (fPres+ _ _)) ⟩
(f ((- x) + x) - f x) ≡⟨ cong (λ u → f u - f x) (+-linv x) ⟩
(f 0a - f x) ≡⟨ cong (λ u → u - f x) fPres0 ⟩
(0a - f x) ≡⟨ +-lid _ ⟩ (- f x) ∎)
pres⋆ isHom = fPres⋆
isPropIsCommAlgebraHom : (f : fst M → fst N) → isProp (IsCommAlgebraHom (snd M) f (snd N))
isPropIsCommAlgebraHom f = isPropIsAlgebraHom
(CommRing→Ring R)
(snd (CommAlgebra→Algebra M))
f
(snd (CommAlgebra→Algebra N))
isPropIsCommAlgebra : (R : CommRing ℓ) {A : Type ℓ'}
(0a 1a : A)
(_+_ _·_ : A → A → A)
(-_ : A → A)
(_⋆_ : ⟨ R ⟩ → A → A)
→ isProp (IsCommAlgebra R 0a 1a _+_ _·_ -_ _⋆_)
isPropIsCommAlgebra R _ _ _ _ _ _ =
isOfHLevelRetractFromIso 1 IsCommAlgebraIsoΣ
(isPropΣ (isPropIsAlgebra _ _ _ _ _ _ _)
(λ alg → isPropΠ2 λ _ _ → alg .IsAlgebra.is-set _ _))
𝒮ᴰ-CommAlgebra : (R : CommRing ℓ) → DUARel (𝒮-Univ ℓ') (CommAlgebraStr R) (ℓ-max ℓ ℓ')
𝒮ᴰ-CommAlgebra R =
𝒮ᴰ-Record (𝒮-Univ _) (IsCommAlgebraEquiv {R = R})
(fields:
data[ 0a ∣ nul ∣ pres0 ]
data[ 1a ∣ nul ∣ pres1 ]
data[ _+_ ∣ bin ∣ pres+ ]
data[ _·_ ∣ bin ∣ pres· ]
data[ -_ ∣ autoDUARel _ _ ∣ pres- ]
data[ _⋆_ ∣ autoDUARel _ _ ∣ pres⋆ ]
prop[ isCommAlgebra ∣ (λ _ _ → isPropIsCommAlgebra _ _ _ _ _ _ _) ])
where
open CommAlgebraStr
open IsAlgebraHom
-- faster with some sharing
nul = autoDUARel (𝒮-Univ _) (λ A → A)
bin = autoDUARel (𝒮-Univ _) (λ A → A → A → A)
CommAlgebraPath : (R : CommRing ℓ) → (A B : CommAlgebra R ℓ') → (CommAlgebraEquiv A B) ≃ (A ≡ B)
CommAlgebraPath R = ∫ (𝒮ᴰ-CommAlgebra R) .UARel.ua
isGroupoidCommAlgebra : {R : CommRing ℓ} → isGroupoid (CommAlgebra R ℓ')
isGroupoidCommAlgebra A B = isOfHLevelRespectEquiv 2 (CommAlgebraPath _ _ _) (isSetAlgebraEquiv _ _)
|
Using codeine together with ezogabine may increase side effects such as dizziness, drowsiness, confusion, and difficulty concentrating. Some people, especially the elderly, may also experience impairment in thinking, judgment, and motor coordination. You should avoid or limit the use of alcohol while being treated with these medications. Also avoid activities requiring mental alertness such as driving or operating hazardous machinery until you know how the medications affect you. Talk to your doctor if you have any questions or concerns. It is important to tell your doctor about all other medications you use, including vitamins and herbs. Do not stop using any medications without first talking to your doctor.
Using dexbrompheniramine together with ezogabine may increase side effects such as dizziness, drowsiness, confusion, and difficulty concentrating. Some people, especially the elderly, may also experience impairment in thinking, judgment, and motor coordination. You should avoid or limit the use of alcohol while being treated with these medications. Also avoid activities requiring mental alertness such as driving or operating hazardous machinery until you know how the medications affect you. Talk to your doctor if you have any questions or concerns. It is important to tell your doctor about all other medications you use, including vitamins and herbs. Do not stop using any medications without first talking to your doctor. |
[GOAL]
β : Type u_1
M : Type ?u.36709
α : Type ?u.36708
inst✝² : SMul M α
inst✝¹ : FaithfulSMul M α
inst✝ : Nontrivial β
c₁ c₂ : Mᵈᵐᵃ
h : ∀ (a : α → β), c₁ • a = c₂ • a
a : α
⊢ ↑mk.symm c₁ • a = ↑mk.symm c₂ • a
[PROOFSTEP]
rcases exists_pair_ne β with ⟨x, y, hne⟩
[GOAL]
case intro.intro
β : Type u_1
M : Type ?u.36709
α : Type ?u.36708
inst✝² : SMul M α
inst✝¹ : FaithfulSMul M α
inst✝ : Nontrivial β
c₁ c₂ : Mᵈᵐᵃ
h : ∀ (a : α → β), c₁ • a = c₂ • a
a : α
x y : β
hne : x ≠ y
⊢ ↑mk.symm c₁ • a = ↑mk.symm c₂ • a
[PROOFSTEP]
contrapose! hne
[GOAL]
case intro.intro
β : Type u_1
M : Type ?u.36709
α : Type ?u.36708
inst✝² : SMul M α
inst✝¹ : FaithfulSMul M α
inst✝ : Nontrivial β
c₁ c₂ : Mᵈᵐᵃ
h : ∀ (a : α → β), c₁ • a = c₂ • a
a : α
x y : β
hne : ↑mk.symm c₁ • a ≠ ↑mk.symm c₂ • a
⊢ x = y
[PROOFSTEP]
haveI := Classical.decEq α
[GOAL]
case intro.intro
β : Type u_1
M : Type ?u.36709
α : Type ?u.36708
inst✝² : SMul M α
inst✝¹ : FaithfulSMul M α
inst✝ : Nontrivial β
c₁ c₂ : Mᵈᵐᵃ
h : ∀ (a : α → β), c₁ • a = c₂ • a
a : α
x y : β
hne : ↑mk.symm c₁ • a ≠ ↑mk.symm c₂ • a
this : DecidableEq α
⊢ x = y
[PROOFSTEP]
replace h := congr_fun (h (update (const α x) (mk.symm c₂ • a) y)) a
[GOAL]
case intro.intro
β : Type u_1
M : Type ?u.36709
α : Type ?u.36708
inst✝² : SMul M α
inst✝¹ : FaithfulSMul M α
inst✝ : Nontrivial β
c₁ c₂ : Mᵈᵐᵃ
a : α
x y : β
hne : ↑mk.symm c₁ • a ≠ ↑mk.symm c₂ • a
this : DecidableEq α
h : (c₁ • update (const α x) (↑mk.symm c₂ • a) y) a = (c₂ • update (const α x) (↑mk.symm c₂ • a) y) a
⊢ x = y
[PROOFSTEP]
simpa [smul_apply, hne] using h
|
! *****************************COPYRIGHT*******************************
! (C) Crown copyright Met Office. All rights reserved.
! For further details please refer to the file COPYRIGHT.txt
! which you should have received as part of this distribution.
! *****************************COPYRIGHT*******************************
!
! This file is part of the UM Shared Library project.
!
! The UM Shared Library is free software: you can redistribute it
! and/or modify it under the terms of the Modified BSD License, as
! published by the Open Source Initiative.
!
! The UM Shared Library is distributed in the hope that it will be
! useful, but WITHOUT ANY WARRANTY; without even the implied warranty
! of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
! Modified BSD License for more details.
!
! You should have received a copy of the Modified BSD License
! along with the UM Shared Library.
! If not, see <http://opensource.org/licenses/BSD-3-Clause>.
!
!*******************************************************************************
!
! Description : ! Global standard invariant physical constants/conversions
MODULE f_shum_conversions_mod
USE, INTRINSIC :: ISO_C_BINDING, ONLY: &
C_INT64_T, C_INT32_T, C_FLOAT, C_DOUBLE
IMPLICIT NONE
PRIVATE
PUBLIC :: shum_rsec_per_day_const, shum_isec_per_day_const, &
shum_rsec_per_hour_const, shum_isec_per_hour_const, &
shum_isec_per_min_const, shum_rhour_per_day_const, &
shum_ihour_per_day_const, shum_rhour_per_sec_const, &
shum_rday_per_hour_const, shum_pi_const, &
shum_pi_over_180_const, shum_180_over_pi_const, &
shum_zerodegc_const, shum_kt2ms_const, &
shum_ft2m_const, shum_rsec_per_min_const, &
shum_rsec_per_day_const_32, shum_isec_per_day_const_32, &
shum_rsec_per_hour_const_32, shum_isec_per_hour_const_32, &
shum_isec_per_min_const_32, shum_rhour_per_day_const_32, &
shum_ihour_per_day_const_32, shum_rhour_per_sec_const_32, &
shum_rday_per_hour_const_32, shum_pi_const_32, &
shum_pi_over_180_const_32, shum_180_over_pi_const_32, &
shum_zerodegc_const_32, shum_kt2ms_const_32, &
shum_ft2m_const_32, shum_rsec_per_min_const_32
!------------------------------------------------------------------------------!
! We're going to use the types from the ISO_C_BINDING module, since although !
! the REALs aren't 100% guaranteed to correspond to the sizes we want to !
! enforce, they should be good enough on the majority of systems. !
! !
! Additional protection for the case that FLOAT/DOUBLE do not conform to the !
! sizes we expect is provided via the "precision_bomb" macro-file !
!------------------------------------------------------------------------------!
INTEGER, PARAMETER :: int64 = C_INT64_T
INTEGER, PARAMETER :: int32 = C_INT32_T
INTEGER, PARAMETER :: real64 = C_DOUBLE
INTEGER, PARAMETER :: real32 = C_FLOAT
!------------------------------------------------------------------------------!
!------------------------------------------------------------------------------!
! 64 Bit Conversion Paramters !
!------------------------------------------------------------------------------!
! Number of seconds in one day - now rsec_per_day and isec_per_day
! which will replace magic number 86400 wherever possible
REAL(KIND=real64), PARAMETER :: shum_rsec_per_day_const = 86400.0_real64
INTEGER(KIND=int64), PARAMETER :: shum_isec_per_day_const = 86400_int64
REAL(KIND=real64), PARAMETER :: shum_rsec_per_hour_const = 3600.0_real64
INTEGER(KIND=int64), PARAMETER :: shum_isec_per_hour_const = 3600_int64
REAL(KIND=real64), PARAMETER :: shum_rsec_per_min_const = 60.0_real64
INTEGER(KIND=int64), PARAMETER :: shum_isec_per_min_const = 60_int64
REAL(KIND=real64), PARAMETER :: shum_rhour_per_day_const = 24.0_real64
INTEGER(KIND=int64), PARAMETER :: shum_ihour_per_day_const = 24_int64
REAL(KIND=real64), PARAMETER :: &
shum_rhour_per_sec_const = 1.0_real64/shum_rsec_per_hour_const, &
shum_rday_per_hour_const = 1.0_real64/shum_rhour_per_day_const
! Pi
REAL(KIND=real64), PARAMETER :: &
shum_pi_const = 3.14159265358979323846_real64
! Conversion factor degrees to radians
REAL(KIND=real64), PARAMETER :: &
shum_pi_over_180_const = shum_pi_const/180.0_real64
! Conversion factor radians to degrees
REAL(KIND=real64), PARAMETER :: &
shum_180_over_pi_const = 180.0_real64/shum_pi_const
! zerodegc is a conversion between degrees centigrade and kelvin
REAL(KIND=real64), PARAMETER :: shum_zerodegc_const = 273.15_real64
! Knots to m/s conversion
REAL(KIND=real64), PARAMETER :: shum_kt2ms_const = 1852.0_real64/3600.0_real64
! Feet to metres conversion
REAL(KIND=real64), PARAMETER :: shum_ft2m_const = 0.3048_real64
!------------------------------------------------------------------------------!
! 32 Bit Conversion Paramters (as above but in 32-bit types) !
!------------------------------------------------------------------------------!
REAL(KIND=real32), PARAMETER :: &
shum_rsec_per_day_const_32 = REAL(shum_rsec_per_day_const,real32)
INTEGER(KIND=int32), PARAMETER :: &
shum_isec_per_day_const_32 = INT(shum_isec_per_day_const,int32)
REAL(KIND=real32), PARAMETER :: &
shum_rsec_per_hour_const_32 = REAL(shum_rsec_per_hour_const,real32)
INTEGER(KIND=int32), PARAMETER :: &
shum_isec_per_hour_const_32 = INT(shum_isec_per_hour_const,int32)
REAL(KIND=real32), PARAMETER :: &
shum_rsec_per_min_const_32 = REAL(shum_rsec_per_min_const,real32)
INTEGER(KIND=int32), PARAMETER :: &
shum_isec_per_min_const_32 = INT(shum_isec_per_min_const,int32)
REAL(KIND=real32), PARAMETER :: &
shum_rhour_per_day_const_32 = REAL(shum_rhour_per_day_const,real32)
INTEGER(KIND=int32), PARAMETER :: &
shum_ihour_per_day_const_32 = INT(shum_ihour_per_day_const,int32)
REAL(KIND=real32), PARAMETER :: &
shum_rhour_per_sec_const_32 = 1.0_real32/shum_rsec_per_hour_const_32
REAL(KIND=real32), PARAMETER :: &
shum_rday_per_hour_const_32 = 1.0_real32/shum_rhour_per_day_const_32
REAL(KIND=real32), PARAMETER :: &
shum_pi_const_32 = 3.14159265358979323846_real32
REAL(KIND=real32), PARAMETER :: &
shum_pi_over_180_const_32 = shum_pi_const_32/180.0_real32
REAL(KIND=real32), PARAMETER :: &
shum_180_over_pi_const_32 = 180.0_real32/shum_pi_const_32
REAL(KIND=real32), PARAMETER :: &
shum_zerodegc_const_32 = 273.15_real32
REAL(KIND=real32), PARAMETER :: &
shum_kt2ms_const_32 = 1852.0_real32/3600.0_real32
REAL(KIND=real32), PARAMETER :: &
shum_ft2m_const_32 = 0.3048_real32
!------------------------------------------------------------------------------!
END MODULE f_shum_conversions_mod
|
-- {-# OPTIONS -v tc.mod.apply:80 #-}
module Issue1985 where
module Def where
postulate A : Set
module Par (X : Set₁) where
postulate B : Set
open Def public
-- module Works where
-- module Ren B = Par B
-- module App = Ren Set
module Fails where
module RenP (X : Set₁) = Par X
module Ren = Par
-- Like RenP, Ren should contain
-- A : (B : Set) → Set
-- A B = Par.A B
-- but it incorrectly contained
-- A : Set
-- A = Par.A
A₁ A₂ B₁ B₂ : Set₁ → Set
A₁ = RenP.A
A₂ = Ren.A
B₁ = RenP.B
B₂ = Ren.B
module App = Ren Set
A₃ B₃ : Set
A₃ = App.A
B₃ = App.B
|
import numpy as np
import numpy.linalg as LA
from typing import List, Dict, Tuple
from math import log2, log, pi
from sklearn.cluster import KMeans
import math
from .graph import Graph, AbstractGraph
from .node import Node
from .cover import Cover, UniformCover, AbstractCover, CentroidCover
from .oracle import _check_clustering_object, map_overlap_cluster_to_interval
from .mapper import generate_mapper_graph
from .converter import graph_to_networkx
def adaptive_cover_graph(X: np.ndarray, lens: np.ndarray, cover: Cover, clusterer: object, per_interval_aggregator):
# Quick checks to fail if input is malformed
_check_clustering_object(clusterer)
if len(lens.shape) == 2:
assert lens.shape[1] == 1, 'Only 1D mapper is supported!'
lens = lens.reshape(-1)
interval_clusterings: List[List[int]] = []
graph: Graph = Graph()
interval_clusterings = []
graph_loss = 0
for i, interval_members in enumerate(cover.fit_intervals(lens)):
interval_clusterings.append([])
if len(interval_members) == 0:
continue
assignments: np.ndarray = clusterer.fit_predict(X[interval_members])
graph_loss += per_interval_aggregator(X[interval_members], assignments, clusterer)
num_clusters: int = assignments.max() + 1 # if 3 is a cluster, then there are 4 clusters: see dbscan sklearn docs
for cluster in range(num_clusters):
cluster_members = interval_members[assignments == cluster]
interval_clusterings[i].append(cluster_members)
if len(cluster_members) == 0:
continue
node = Node(i, cluster, cluster_members)
graph.add_node(node)
if i > 0: # beyond first interval
lower_interval_clusters = interval_clusterings[i - 1]
map_down = map_overlap_cluster_to_interval(cluster_members, lower_interval_clusters)
for down in map_down:
node2 = graph.get_node(i - 1, down)
graph.add_edge(node, node2)
return len(graph.nodes), graph_loss
def compute_centroids(X, graph):
hard_cluster = graph.to_hard_clustering_set(X)
n_clusters = len(graph.nodes)
centroids = []
dists = 0
for c in range(n_clusters):
members = [i for i, val in enumerate(hard_cluster) if val == c]
centroid = np.mean(X[members], axis=0)
for m in members:
dists += np.linalg.norm(centroid - X[m])
centroids.append(centroid)
for m, val in enumerate(hard_cluster):
if val == -1 and len(centroids) != 0:
min_dist = np.linalg.norm(centroids[0] - X[m]) ** 2
for centroid in centroids:
min_dist = min(min_dist, np.linalg.norm(centroid - X[m]) )
dists += min_dist
return centroids, dists
def AIC_Cover_Centroid(X, lens, perc_overlap, min_intervals, max_intervals, interval_step, clusterer):
print("DEPRECATED - INCORRECT AIC CALCULATION")
# Returns optimal cover object, costs, num_clusters
costs = []
intervals = [i for i in range(min_intervals, max_intervals, interval_step)]
num_clusters = []
def _aic(p, llh):
return 2* p - 2 * llh
for interval in intervals:
current_cover = Cover(num_intervals=interval, percent_overlap=perc_overlap, enhanced=False)
graph = generate_mapper_graph(X, lens, current_cover, clusterer)
n_clusters = len(graph.nodes)
num_clusters.append(n_clusters)
centroids, var = compute_centroids(X, graph)
k = len(centroids)
var = var / (X.shape[0] - k)
p = (k-1) + (k * X.shape[1]) + 1
membership = assign_membership(X, centroids)
llh = 0
for c in range(len(centroids)):
cluster_membership = X[membership == c]
if cluster_membership.shape[0] == 0:
continue
llh += xmeans_log_likelyhood(cluster_membership.shape[0], X.shape[1], var, k, X.shape[0])
costs.append(_aic(p, llh))
return costs, intervals
def AIC_normal_pdf(X, lens, perc_overlap, min_intervals, max_intervals, interval_step, clusterer):
costs = []
intervals = [i for i in range(min_intervals, max_intervals, interval_step)]
for interval in intervals:
current_cover = Cover(num_intervals=interval, percent_overlap=perc_overlap, enhanced=False)
graph = generate_mapper_graph(X, lens, current_cover, clusterer)
cost = 0
nodes = list(graph.nodes)
max_vals = [lens[n.members].max() for n in nodes]
min_vals = [lens[n.members].min() for n in nodes]
counts = [len(n.members) for n in nodes]
for i in range(X.shape[0]):
function_value = lens[i]
C_y = 0
for ub, lb, c in zip(max_vals, min_vals, counts):
if ub >= function_value and lb <= function_value:
C_y += c
if C_y != 0:
cost = cost + log2(C_y)
cost = cost - len(graph.nodes) * 3
costs.append(cost)
return costs, intervals
def BIC_normal_pdf(X, lens, perc_overlap, min_intervals, max_intervals, interval_step, clusterer):
costs = []
intervals = [i for i in range(min_intervals, max_intervals, interval_step)]
for interval in intervals:
current_cover = Cover(num_intervals=interval, percent_overlap=perc_overlap, enhanced=False)
graph = generate_mapper_graph(X, lens, current_cover, clusterer)
cost = 0
nodes = list(graph.nodes)
max_vals = [lens[n.members].max() for n in nodes]
min_vals = [lens[n.members].min() for n in nodes]
counts = [len(n.members) for n in nodes]
for i in range(X.shape[0]):
function_value = lens[i]
C_y = 0
for ub, lb, c in zip(max_vals, min_vals, counts):
if ub >= function_value and lb <= function_value:
C_y += c
if C_y != 0:
cost = cost + log2(C_y)
cost = cost - len(graph.nodes) * 3 * log2(X.shape[0])
costs.append(cost)
return costs, intervals
def Adj_Entropy(X, lens, perc_overlap, min_intervals, max_intervals, interval_step, clusterer, weighted=True):
entropies = []
intervals = [i for i in range(min_intervals, max_intervals, interval_step)]
for interval in intervals:
current_cover = Cover(num_intervals=interval, percent_overlap=perc_overlap, enhanced=False)
graph = generate_mapper_graph(X, lens, current_cover, clusterer)
edges = list(graph.edges)
probabilities = []
for e in edges:
n1, n2 = e
n1 = n1.members
n2 = n2.members
if weighted:
probabilities.append(len(set(n1).intersection(set(n2))))
else:
probabilities.append(1)
probabilities = np.asarray(probabilities)
probabilities = probabilities / probabilities.sum()
entropy = 0
for p in probabilities:
entropy = entropy + p * log2(p)
entropy = entropy * -1
entropies.append(entropy)
return entropies, intervals
def Adj_Entropy_Pointwise(X, lens, perc_overlap, min_intervals, max_intervals, interval_step, clusterer, weighted=True):
entropies = []
intervals = [i for i in range(min_intervals, max_intervals, interval_step)]
for interval in intervals:
print(interval)
current_cover = Cover(num_intervals=interval, percent_overlap=perc_overlap, enhanced=False)
graph = generate_mapper_graph(X, lens, current_cover, clusterer)
edges = list(graph.edges)
probabilities = np.zeros((X.shape[0], X.shape[0]))
total_weight = 0
for e in edges:
n1, n2 = e
n1 = n1.members
n2 = n2.members
weight = len(set(n1).intersection(set(n2))) if weighted else 1
unique_n1 = list(set(n1).difference(set(n2)))
unique_n2 = list(set(n2).difference(set(n1)))
for m in unique_n2:
probabilities[unique_n1, m] = weight
total_weight = total_weight + weight * len(unique_n1)
probabilities = probabilities.flatten()
probabilities = probabilities[probabilities.nonzero()]
probabilities = probabilities / probabilities.sum()
entropy = 0
for p in probabilities:
entropy = entropy + p * log2(p)
entropy = entropy * -1
entropies.append(entropy)
return entropies, intervals
def KL_adj(X, lens, perc_overlap, min_intervals, max_intervals, interval_step, clusterer):
divergences = []
intervals = [i for i in range(min_intervals, max_intervals, interval_step)]
prev = None
current = None
def _kl(a, b):
entropy = 0
for i in range(a.shape[0]):
for j in range(i, a.shape[0]):
if a[i][j] != 0 and b[i][j] != 0:
entropy = entropy + a[i][j] * log2(b[i][j] / a[i][j])
return entropy * -1
for interval in intervals:
current_cover = Cover(num_intervals=interval, percent_overlap=perc_overlap, enhanced=False)
graph = generate_mapper_graph(X, lens, current_cover, clusterer)
edges = list(graph.edges)
probabilities = np.zeros((X.shape[0], X.shape[0]))
total_weight = 0
for e in edges:
n1, n2 = e
n1 = n1.members
n2 = n2.members
weight = len(set(n1).intersection(set(n2)))
for m in n2:
probabilities[n1, m] = 1
# total_weight = total_weight + weight * len(n1)
total_weight = total_weight + len(n1)
probabilities = probabilities / total_weight
if current is None:
current = probabilities
else:
prev = current
current = probabilities
divergences.append(_kl(prev, current))
return divergences, intervals
def f_Entropy(X, lens, perc_overlap, min_intervals, max_intervals, interval_step, clusterer):
entropies = []
intervals = [i for i in range(min_intervals, max_intervals, interval_step)]
for interval in intervals:
current_cover = Cover(num_intervals=interval, percent_overlap=perc_overlap, enhanced=False)
graph = generate_mapper_graph(X, lens, current_cover, clusterer)
edges = list(graph.edges)
total = 0
for n in graph.nodes:
total += len(n.members)
probabilities = []
nodes = list(graph.nodes)
max_vals = [lens[n.members].max() for n in nodes]
min_vals = [lens[n.members].min() for n in nodes]
counts = [len(n.members) for n in nodes]
for i in range(X.shape[0]):
function_value = lens[i]
C_y = 0
for ub, lb, c in zip(max_vals, min_vals, counts):
if ub >= function_value and lb <= function_value:
C_y += c
if C_y != 0:
probabilities.append(C_y)
entropy = 0
probabilities = np.asarray(probabilities)
probabilities = probabilities / probabilities.sum()
for p in probabilities:
entropy = entropy + p * log2(p)
entropy = entropy * -1
entropies.append(entropy)
return entropies, intervals
def f_unique_Entropy(X, lens, perc_overlap, min_intervals, max_intervals, interval_step, clusterer):
entropies = []
intervals = [i for i in range(min_intervals, max_intervals, interval_step)]
for interval in intervals:
current_cover = Cover(num_intervals=interval, percent_overlap=perc_overlap, enhanced=False)
graph = generate_mapper_graph(X, lens, current_cover, clusterer)
edges = list(graph.edges)
total = X.shape[0]
probabilities = []
nodes = list(graph.nodes)
max_vals = [lens[n.members].max() for n in nodes]
min_vals = [lens[n.members].min() for n in nodes]
for i in range(X.shape[0]):
function_value = lens[i]
mem = []
for ub, lb, n in zip(max_vals, min_vals, nodes):
if ub >= function_value and lb <= function_value:
mem = mem + n.members.tolist()
if len(mem) != 0:
probabilities.append(len(set(mem)))
entropy = 0
probabilities = np.asarray(probabilities)
probabilities = probabilities / probabilities.sum()
for p in probabilities:
entropy = entropy + p * log2(p)
entropy = entropy * -1
entropies.append(entropy)
return entropies, intervals
|
I have been busy baking Christmas cookies for the last couple of days. This is one of the first ones I did. It is a butter sugar cookie that you can flavor anyway you like and color anyway you like. I had intended to make these redder than they are but after about 10 drops of food coloring in them I decided that was enough. When I make these again I will use paste food coloring to get a deeper more intense color like a Christmas red. I like pink and green together so it worked for me. Store these in an airtight container and they should last for days.
Confused as to what type of butter, shortening or margarine to use and does it make a difference? You bet it does. Vegetable shortening, margarine and butter in some recipes are interchangeable. Ask most experienced bakers, though, and they will tell you that one of their secret weapons is plain, pure butter. Butter serves several purposes in cookie baking, it tenderizes and conveys the flavor of the cookie.
Compare the difference between a shortbread cookie and that of biscotti. Shortbread cookies seem to almost melt in your mouth, where as biscotti has a much lower butter content and are hard and crunchy. If a cookie recipe calls for butter, I’d use the butter, especially in cookies where butter is the flavor of the cookie. On the other hand cookies such as Molasses or gingerbread, I wouldn’t hesitate to use shortening. Do not use “reduced fat” or whipped butter products when you bake cookies, they can contain up to 58% water.
You want the cookies to spread more: Use all butter or add 1 to 2 tablespoons liquid (water, milk or cream) or use a low-protein flour such as bleached all-purpose (but not one that is chlorinated) or add 1 to 2 tablespoons sugar.
You want the cookies to spread less: Cut the sugar by a few tablespoons or add 1/4-1/2 cup additional flour.
You want the cookies to have a chewy quality: Melt the butter instead of simply using it at room temperature.
Brightly colored Christmas Pinwheel Cookies are perfect on the holiday cookie tray.
In your mixer bowl combine butter and powdered sugar. When combined add the egg and flavoring.
Add the flour, baking soda and salt. Mix well. Divide dough in half.
Add food coloring of choice to each half. Fold the dough over and over until it is mixed in.
Take a tablespoon of each dough and put together to form a ball. Roll the dough into a long snake about 6 inches long. Starting at one end roll the dough into a cylinder. Dip the edges into the sprinkles and place on a greased baking sheet.
Bake for about 8 minutes. Do not overbake. Let set on cookie sheet for about 5 minutes when the cookies come out of the oven. Remove to a wire rack to cool.
Would you like to try another recipe? |
module TryIndex
import Data.Vect
%default total
tryIndex : { n : _ } -> Integer -> Vect n elem -> Maybe elem
tryIndex m xs = case integerToFin m n of
Nothing => Nothing
Just idx => Just $ index idx xs
vi : Vect 5 Int
vi = [1, 2, 3, 4, 5] |
Require Export Limits.
Require Import Common Notations DiscreteCategory DiscreteCategoryFunctors.
Set Implicit Arguments.
Generalizable All Variables.
Set Asymmetric Patterns.
Set Universe Polymorphism.
Section Products.
Context `{C : @SpecializedCategory objC}.
Variable I : Type.
Variable f : I -> C.
Definition Product := Limit (InducedDiscreteFunctor C f).
Definition Coproduct := Colimit (InducedDiscreteFunctor C f).
End Products.
(* XXX: [Reserved Notation] doesn't work here? *)
Notation "∏_{ x } f" := (@Product _ _ _ (fun x => f)) (at level 0, x at level 99).
Notation "∏_{ x : A } f" := (@Product _ _ A (fun x : A => f)) (at level 0, x at level 99).
Notation "∐_{ x } f" := (@Coproduct _ _ _ (fun x => f)) (at level 0, x at level 99).
Notation "∐_{ x : A } f" := (@Coproduct _ _ A (fun x : A => f)) (at level 0, x at level 99).
|
lemma infnorm_eq_0: fixes x :: "'a::euclidean_space" shows "infnorm x = 0 \<longleftrightarrow> x = 0" |
Formal statement is: lemma open_contains_cball: "open S \<longleftrightarrow> (\<forall>x\<in>S. \<exists>e>0. cball x e \<subseteq> S)" Informal statement is: A set $S$ is open if and only if for every $x \in S$, there exists an $\epsilon > 0$ such that the open ball of radius $\epsilon$ centered at $x$ is contained in $S$. |
# **Monitoring and Optimizing Quantum Circuits**
```python
import numpy as np
# Importing standard Qiskit libraries
from qiskit import QuantumCircuit, transpile, Aer, IBMQ, execute
from qiskit.tools.jupyter import *
from qiskit.visualization import *
from ibm_quantum_widgets import *
from qiskit.providers.aer import QasmSimulator
# Loading your IBM Quantum account(s)
provider = IBMQ.load_account()
```
## **Monitoring and Tracking Jobs**
```python
# Import the Qiskit Jupyter tools
from qiskit.tools import jupyter
```
```python
# Initialize the job tracker to automatically track all jobs
%qiskit_job_watcher
```
Accordion(children=(VBox(layout=Layout(max_width='710px', min_width='710px')),), layout=Layout(max_height='500…
<IPython.core.display.Javascript object>
```python
# Let's run a simple circuit on the least busy quantum device
# and check the job watcher widget.
from qiskit.providers.ibmq import least_busy
backend = least_busy(provider.backends(filters=lambda x: x.configuration().n_qubits >= (2) and
not x.configuration().simulator
and x.status().operational==True))
#Create a simple circuit
qc = QuantumCircuit(1)
qc.h(0)
qc.measure_all()
#Execute the circuit on the backend
job = execute(qc, backend)
```
```python
#Disable the job watcher
%qiskit_disable_job_watcher
```
```python
#Display the list of all available backends and provide
#a brief overview of each
%qiskit_backend_overview
```
VBox(children=(HTML(value="<h2 style ='color:#ffffff; background-color:#000000;padding-top: 1%; padding-bottom…
## **Transpiling a Circuit**
```python
# Import the transpiler passes object
from qiskit.transpiler import passes
# List out all the passes available
print(dir(passes))
```
['ALAPSchedule', 'ASAPSchedule', 'AlignMeasures', 'ApplyLayout', 'BIPMapping', 'BarrierBeforeFinalMeasurements', 'BasicSwap', 'BasisTranslator', 'CSPLayout', 'CXCancellation', 'CXDirection', 'CheckCXDirection', 'CheckGateDirection', 'CheckMap', 'Collect1qRuns', 'Collect2qBlocks', 'CollectMultiQBlocks', 'CommutationAnalysis', 'CommutativeCancellation', 'ConsolidateBlocks', 'ContainsInstruction', 'CountOps', 'CountOpsLongestPath', 'CrosstalkAdaptiveSchedule', 'DAGFixedPoint', 'DAGLongestPath', 'Decompose', 'DenseLayout', 'Depth', 'DynamicalDecoupling', 'EchoRZXWeylDecomposition', 'EnlargeWithAncilla', 'Error', 'FixedPoint', 'FullAncillaAllocation', 'GateDirection', 'GatesInBasis', 'HoareOptimizer', 'InverseCancellation', 'Layout2qDistance', 'LayoutTransformation', 'LookaheadSwap', 'MergeAdjacentBarriers', 'NoiseAdaptiveLayout', 'NumTensorFactors', 'Optimize1qGates', 'Optimize1qGatesDecomposition', 'Optimize1qGatesSimpleCommutation', 'OptimizeSwapBeforeMeasure', 'PulseGates', 'RZXCalibrationBuilder', 'RZXCalibrationBuilderNoEcho', 'RemoveBarriers', 'RemoveDiagonalGatesBeforeMeasure', 'RemoveFinalMeasurements', 'RemoveResetInZeroState', 'ResourceEstimation', 'SabreLayout', 'SabreSwap', 'SetLayout', 'Size', 'StochasticSwap', 'TemplateOptimization', 'TimeUnitConversion', 'TrivialLayout', 'UnitarySynthesis', 'Unroll3qOrMore', 'UnrollCustomDefinitions', 'Unroller', 'VF2Layout', 'ValidatePulseGates', 'Width', '__builtins__', '__cached__', '__doc__', '__file__', '__loader__', '__name__', '__package__', '__path__', '__spec__', 'analysis', 'basis', 'calibration', 'layout', 'optimization', 'routing', 'scheduling', 'synthesis', 'unitary_synthesis_plugin_names', 'utils']
```python
#Basic Toffoli gate,
qc = QuantumCircuit(3)
qc.ccx(0,1,2)
qc.draw()
```
```python
qc_decomposed = qc.decompose()
qc_decomposed.draw()
```
```python
#Basic circuit with a single and multi-qubit gates
qc = QuantumCircuit(4)
qc.h(0)
qc.cx(0,1)
qc.cx(0,2)
qc.cx(0,3)
qc.draw()
```
```python
#Print the depth of both inital and decomposed circuit
print('Initial circuit depth: ', qc.depth())
print('Decomposed circuit depth: ', qc_decomposed.depth())
#Get the number of operators in initial circuit
print('Initial circuit operation count: ', qc.count_ops())
#Get the number of operators in decomposed circuit
print('Decomposed circuit operation count: ', qc_decomposed.count_ops())
```
Initial circuit depth: 4
Decomposed circuit depth: 11
Initial circuit operation count: OrderedDict([('cx', 3), ('h', 1)])
Decomposed circuit operation count: OrderedDict([('cx', 6), ('t', 4), ('tdg', 3), ('h', 2)])
## **Configuration and Optimization**
```python
# Get the backend device: ibmq_santiago
backend_santiago = provider.get_backend('ibmq_santiago')
# Launch backend viewer of ibmq_santiago
backend_santiago
```
VBox(children=(HTML(value="<h1 style='color:#ffffff;background-color:#000000;padding-top: 1%;padding-bottom: 1…
<IBMQBackend('ibmq_santiago') from IBMQ(hub='ibm-q', group='open', project='main')>
```python
# Get the backend device: ibmq_lima
backend_lima = provider.get_backend('ibmq_lima')
# Launch backend viewer of ibmq_lima
backend_lima
```
VBox(children=(HTML(value="<h1 style='color:#ffffff;background-color:#000000;padding-top: 1%;padding-bottom: 1…
<IBMQBackend('ibmq_lima') from IBMQ(hub='ibm-q', group='open', project='main')>
```python
# Visualize the coupling directional map between the qubits
plot_gate_map(backend_santiago, plot_directed=True)
```
```python
# Visualize the coupling directional map between the qubits
plot_gate_map(backend_lima, plot_directed=True)
```
```python
# Quantum circuit with a single and multi-qubit gates
qc = QuantumCircuit(4)
qc.h(0)
qc.cx(0,1)
qc.cx(0,2)
qc.cx(0,3)
qc.draw()
```
```python
# Transpile the circuit with an optimization level = 0
qc_santiago_0 = transpile(qc, backend_santiago,
seed_transpiler=10258, optimization_level=0)
# Print out the depth of the circuit
print('Depth:', qc_santiago_0.depth())
# Plot the resulting layout of the quantum circuit after Layout
plot_circuit_layout(qc_santiago_0, backend_santiago)
```
```python
# Draw the transpiled circuit pertaining to Santiago
qc_santiago_0.draw()
```
```python
# View the transpiled circuit with an optimization level = 0
qc_lima_0 = transpile(qc, backend_lima, seed_transpiler=10258, optimization_level=0)
print('Depth:', qc_lima_0.depth())
plot_circuit_layout(qc_lima_0, backend_lima)
```
```python
# Draw the transpiled circuit pertaining to Lima
qc_lima_0.draw()
```
```python
# Transpile the circuit with the optimization level = 3
qc_transpiled_santiago = transpile(qc, backend_santiago, optimization_level=3)
# Print the depth of the transpiled circuit
print('Depth:', qc_transpiled_santiago.depth())
# Print the number of operations of the transpiled circuit
print('Ops count: ', qc_transpiled_santiago.count_ops())
# Plot the layout mapping of the transpiled circuit
plot_circuit_layout(qc_transpiled_santiago, backend_santiago)
```
```python
# Redraw the transpiled circuit at new level
qc_transpiled_santiago.draw()
```
```python
# Transpile the quantum circuit with the optimization level = 3
qc_transpiled_lima = transpile(qc, backend_lima, optimization_level=3)
# Get the depth and operation count of the transpiled circuit.
print('Depth:', qc_transpiled_lima.depth())
print('Ops count: ', qc_transpiled_lima.count_ops())
# Print the circuit layout
plot_circuit_layout(qc_transpiled_lima, backend_lima)
```
```python
# View the ibmq_quito backend device configuration and properties
backend = provider.get_backend('ibmq_quito')
backend
```
VBox(children=(HTML(value="<h1 style='color:#ffffff;background-color:#000000;padding-top: 1%;padding-bottom: 1…
<IBMQBackend('ibmq_quito') from IBMQ(hub='ibm-q', group='open', project='main')>
```python
# View the backend coupling map, displayed as CNOTs (Control-Target)
backend = provider.get_backend('ibmq_quito')
# Extract the coupling map from the backend
ibmqquito_coupling_map = backend.configuration().coupling_map
# List out the extracted coupling map
ibmqquito_coupling_map
```
[[0, 1], [1, 0], [1, 2], [1, 3], [2, 1], [3, 1], [3, 4], [4, 3]]
```python
# Transpile a custom circuit using only the coupling map.
# Set the backend to ‘None’ so it will force using the coupling map provided.
qc_custom = transpile(qc, backend=None,
coupling_map=ibmqquito_coupling_map)
# Draw the resulting custom topology circuit.
qc_custom.draw()
```
```python
# Create our own coupling map (custom topology)
custom_linear_topology = [[0,1],[1,2],[2,3],[3,4]]
# Set the coupling map to our custom linear topology
qc_custom = transpile(qc, backend=None, coupling_map=custom_linear_topology)
# Draw the resulting circuit.
qc_custom.draw()
```
```python
# Import the PassManager and a few Passes
from qiskit.transpiler import PassManager, CouplingMap
from qiskit.transpiler.passes import TrivialLayout, BasicSwap
# Create a TrivialLayout based on the ibmqx2 coupling map
trivial = TrivialLayout(CouplingMap(ibmqquito_coupling_map))
pm = PassManager()
# Append the TrivialLayout to the PassManager
pm.append(trivial)
# Run the PassManager and draw the resulting circuit
tv_qc = pm.run(qc)
tv_qc.draw()
```
```python
# Create a BasicSwap based on the ibmq_quito coupling map we used earlier
basic_swap = BasicSwap(CouplingMap(ibmqquito_coupling_map))
#Add the BasicSwap to the PassManager
pm = PassManager(basic_swap)
# Run the PassManager and draw the results
new_qc = pm.run(qc)
new_qc.draw()
```
```python
# Sample quantum circuit
qc = QuantumCircuit(4)
qc.h(0)
qc.cx(0,1)
qc.barrier()
qc.cx(0,2)
qc.cx(0,3)
qc.barrier()
qc.cz(3,0)
qc.h(0)
qc.measure_all()
# Draw the circuit using the default renderer
qc.draw()
```
```python
qc.draw('latex')
```
```python
```
|
theory P20 imports Main begin
datatype 'a tree = Tip | Node "'a tree" 'a "'a tree"
primrec preorder :: "'a tree \<Rightarrow> 'a list" where
"preorder Tip = Nil" |
"preorder (Node l x r) = x # (preorder l) @ (preorder r)"
primrec postorder :: "'a tree \<Rightarrow> 'a list" where
"postorder Tip = Nil" |
"postorder (Node l x r) = (postorder l) @ (postorder r) @ [x]"
primrec postorder_acc :: "'a tree \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"postorder_acc Tip xs = xs" |
"postorder_acc (Node l x r) xs = postorder_acc l (postorder_acc r (x # xs))"
lemma "postorder_acc t xs = (postorder t) @ xs"
apply (induct t arbitrary: xs)
apply auto
done
primrec foldl_tree :: "('b => 'a => 'b) \<Rightarrow> 'b \<Rightarrow> 'a tree \<Rightarrow> 'b" where
"foldl_tree f b Tip = b" |
"foldl_tree f b (Node l x r) = foldl_tree f (foldl_tree f (f b x) r) l"
lemma "\<forall> a. postorder_acc t a = foldl_tree (\<lambda> xs x. x # xs) a t"
apply (induct t)
apply auto
done
primrec tree_sum :: "nat tree \<Rightarrow> nat" where
"tree_sum Tip = 0" |
"tree_sum (Node l x r) = x + (tree_sum l) + (tree_sum r)"
primrec list_sum :: "nat list \<Rightarrow> nat" where
"list_sum Nil = 0" |
"list_sum (x # xs) = x + list_sum xs"
lemma partition_sum: "list_sum (xs @ ys) = list_sum xs + list_sum ys"
apply (induct xs)
apply auto
done
lemma "tree_sum t = list_sum (preorder t)"
apply (induct t)
apply (auto simp add: partition_sum)
done
end |
/-
Copyright (c) 2022 Yaël Dillies, Sara Rousta. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Yaël Dillies
! This file was ported from Lean 3 source module order.upper_lower.hom
! leanprover-community/mathlib commit 0a0ec35061ed9960bf0e7ffb0335f44447b58977
! Please do not edit these lines, except to modify the commit id
! if you have ported upstream changes.
-/
import Mathlib.Order.UpperLower.Basic
import Mathlib.Order.Hom.CompleteLattice
/-!
# `UpperSet.Ici` etc as `Sup`/`Supₛ`/`Inf`/`Infₛ`-homomorphisms
In this file we define `UpperSet.iciSupHom` etc. These functions are `UpperSet.Ici` and
`LowerSet.Iic` bundled as `SupHom`s, `InfHom`s, `SupₛHom`s, or `InfₛHom`s.
-/
variable {α : Type _}
open OrderDual
namespace UpperSet
section SemilatticeSup
variable [SemilatticeSup α]
/-- `UpperSet.Ici` as a `SupHom`. -/
def iciSupHom : SupHom α (UpperSet α) :=
⟨Ici, Ici_sup⟩
#align upper_set.Ici_sup_hom UpperSet.iciSupHom
@[simp]
theorem coe_iciSupHom : (iciSupHom : α → UpperSet α) = Ici :=
rfl
#align upper_set.coe_Ici_sup_hom UpperSet.coe_iciSupHom
@[simp]
theorem iciSupHom_apply (a : α) : iciSupHom a = Ici a :=
rfl
#align upper_set.Ici_sup_hom_apply UpperSet.iciSupHom_apply
end SemilatticeSup
variable [CompleteLattice α]
/-- `UpperSet.Ici` as a `SupₛHom`. -/
def iciSupₛHom : SupₛHom α (UpperSet α) :=
⟨Ici, fun s => (Ici_supₛ s).trans supₛ_image.symm⟩
-- Porting note: `ₓ` because typeclass assumption changed
#align upper_set.Ici_Sup_hom UpperSet.iciSupₛHomₓ
@[simp]
theorem coe_iciSupₛHom : (iciSupₛHom : α → UpperSet α) = Ici :=
rfl
-- Porting note: `ₓ` because typeclass assumption changed
#align upper_set.coe_Ici_Sup_hom UpperSet.coe_iciSupₛHomₓ
@[simp]
theorem iciSupₛHom_apply (a : α) : iciSupₛHom a = Ici a :=
rfl
-- Porting note: `ₓ` because typeclass assumption changed
#align upper_set.Ici_Sup_hom_apply UpperSet.iciSupₛHom_applyₓ
end UpperSet
namespace LowerSet
section SemilatticeInf
variable [SemilatticeInf α]
/-- `LowerSet.Iic` as an `InfHom`. -/
def iicInfHom : InfHom α (LowerSet α) :=
⟨Iic, Iic_inf⟩
#align lower_set.Iic_inf_hom LowerSet.iicInfHom
@[simp]
theorem coe_iicInfHom : (iicInfHom : α → LowerSet α) = Iic :=
rfl
#align lower_set.coe_Iic_inf_hom LowerSet.coe_iicInfHom
@[simp]
theorem iicInfHom_apply (a : α) : iicInfHom a = Iic a :=
rfl
#align lower_set.Iic_inf_hom_apply LowerSet.iicInfHom_apply
end SemilatticeInf
variable [CompleteLattice α]
/-- `LowerSet.Iic` as an `InfₛHom`. -/
def iicInfₛHom : InfₛHom α (LowerSet α) :=
⟨Iic, fun s => (Iic_infₛ s).trans infₛ_image.symm⟩
-- Porting note: `ₓ` because typeclass assumption changed
#align lower_set.Iic_Inf_hom LowerSet.iicInfₛHomₓ
@[simp]
theorem coe_iicInfₛHom : (iicInfₛHom : α → LowerSet α) = Iic :=
rfl
-- Porting note: `ₓ` because typeclass assumption changed
#align lower_set.coe_Iic_Inf_hom LowerSet.coe_iicInfₛHomₓ
@[simp]
theorem iicInfₛHom_apply (a : α) : iicInfₛHom a = Iic a :=
rfl
-- Porting note: `ₓ` because typeclass assumption changed
#align lower_set.Iic_Inf_hom_apply LowerSet.iicInfₛHom_applyₓ
end LowerSet
|
theory T155
imports Main
begin
lemma "(
(\<forall> x::nat. \<forall> y::nat. meet(x, y) = meet(y, x)) &
(\<forall> x::nat. \<forall> y::nat. join(x, y) = join(y, x)) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. meet(x, meet(y, z)) = meet(meet(x, y), z)) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. join(x, join(y, z)) = join(join(x, y), z)) &
(\<forall> x::nat. \<forall> y::nat. meet(x, join(x, y)) = x) &
(\<forall> x::nat. \<forall> y::nat. join(x, meet(x, y)) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(x, join(y, z)) = join(mult(x, y), mult(x, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(join(x, y), z) = join(mult(x, z), mult(y, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. meet(x, over(join(mult(x, y), z), y)) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. meet(y, undr(x, join(mult(x, y), z))) = y) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. join(mult(over(x, y), y), x) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. join(mult(y, undr(y, x)), x) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(x, meet(y, z)) = meet(mult(x, y), mult(x, z))) &
(\<forall> x::nat. \<forall> y::nat. invo(join(x, y)) = meet(invo(x), invo(y))) &
(\<forall> x::nat. \<forall> y::nat. invo(meet(x, y)) = join(invo(x), invo(y))) &
(\<forall> x::nat. invo(invo(x)) = x)
) \<longrightarrow>
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. undr(meet(x, y), z) = join(undr(x, z), undr(y, z)))
"
nitpick[card nat=4,timeout=86400]
oops
end |
from itertools import product
import numpy as np
import pytest
from mrrt.mri.sim import mri_object_2d, mri_object_3d
from mrrt.mri import mri_partial_fourier_nd
from mrrt.utils import config, ifftnc, ImageGeometry
all_xp = [np]
if config.have_cupy:
import cupy
all_xp += [cupy]
# TODO: add test case with non-zero phase
@pytest.mark.parametrize(
"xp, dtype", product(all_xp, [np.complex64, np.complex128])
)
def test_partial_fourier_2d(xp, dtype):
shape = (256, 192)
ig = ImageGeometry(shape, distances=(1, 1), offsets="dsp")
obj = mri_object_2d(ig.fov)
coords = ig.fgrid()
# fully-sampled k-space
kspace_full = xp.asarray(obj.kspace(*coords), dtype=dtype)
# partial-Fourier k-space
pf_fractions = (0.6, 0.7)
nkeep = [int(f * s) for f, s in zip(pf_fractions, shape)]
pf_mask = xp.zeros(shape, dtype=xp.bool)
pf_mask[: nkeep[0], : nkeep[1]] = 1
kspace_pf = kspace_full[: nkeep[0], : nkeep[1]]
# direct reconstruction using zero-filled k-space
direct_recon = ifftnc(kspace_full * pf_mask)
# partial Fourier reconstruction
pf_recon = mri_partial_fourier_nd(kspace_pf, pf_mask)
# dtype is preserved
assert pf_recon.dtype == dtype
# ground truth image
# x_true = xp.asarray(obj.image(*ig.grid()))
# recon from fully sampled k-space
x_full = xp.asarray(ifftnc(kspace_full))
# Error of partial-Fourier recon should be much less than for zero-filling
mse_pf = xp.mean(xp.abs(x_full - pf_recon) ** 2)
mse_direct = xp.mean(xp.abs(x_full - direct_recon) ** 2)
assert mse_pf < 0.2 * mse_direct
@pytest.mark.parametrize(
"xp, dtype, pf_fractions",
product(
all_xp,
[np.complex64, np.complex128],
[(1, 0.6, 1), (0.6, 0.7, 1), (0.65, 0.65, 0.65)],
),
)
def test_partial_fourier_3d(xp, dtype, pf_fractions):
shape = (128, 128, 64)
ig = ImageGeometry(shape, distances=(1,) * len(shape), offsets="dsp")
obj = mri_object_3d(ig.fov)
coords = ig.fgrid()
# fully-sampled k-space
kspace_full = xp.asarray(obj.kspace(*coords), dtype=dtype)
# partial-Fourier k-space
nkeep = [int(f * s) for f, s in zip(pf_fractions, shape)]
pf_mask = xp.zeros(shape, dtype=xp.bool)
pf_mask[: nkeep[0], : nkeep[1]] = 1
kspace_pf = kspace_full[: nkeep[0], : nkeep[1]]
# direct reconstruction using zero-filled k-space
direct_recon = ifftnc(kspace_full * pf_mask)
# partial Fourier reconstruction
pf_recon = mri_partial_fourier_nd(kspace_pf, pf_mask)
# dtype is preserved
assert pf_recon.dtype == dtype
# ground truth image
# x_true = xp.asarray(obj.image(*ig.grid()))
# recon from fully sampled k-space
x_full = xp.asarray(ifftnc(kspace_full))
# Error of partial-Fourier recon should be much less than for zero-filling
mse_pf = xp.mean(xp.abs(x_full - pf_recon) ** 2)
mse_direct = xp.mean(xp.abs(x_full - direct_recon) ** 2)
assert mse_pf < 0.25 * mse_direct
|
= = = Names and epithets = = =
|
[STATEMENT]
lemma grey_protects_whiteD[dest]:
"(g grey_protects_white w) s \<Longrightarrow> grey g s \<and> (g = w \<or> white w s)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (g grey_protects_white w) s \<Longrightarrow> grey g s \<and> (g = w \<or> white w s)
[PROOF STEP]
by (auto simp: grey_protects_white_def) |
section \<open>Lens algebraic operators\<close>
theory Lens_Algebra
imports Lens_Laws
begin
subsection \<open>Lens composition, plus, unit, and identity\<close>
text \<open>We introduce the algebraic lens operators; for more information please see our paper~\cite{Foster16a}.
Lens composition constructs a lens by composing the source of one lens with the view of another.\<close>
definition lens_comp :: "('a \<Longrightarrow> 'b) \<Rightarrow> ('b \<Longrightarrow> 'c) \<Rightarrow> ('a \<Longrightarrow> 'c)" (infixr ";\<^sub>L" 80) where
[lens_defs]: "lens_comp Y X = \<lparr> lens_get = lens_get Y \<circ> lens_get X
, lens_put = (\<lambda> \<sigma> v. lens_put X \<sigma> (lens_put Y (lens_get X \<sigma>) v)) \<rparr>"
text \<open>Lens plus parallel composes two indepedent lenses, resulting in a lens whose view is the
product of the two underlying lens views.\<close>
definition lens_plus :: "('a \<Longrightarrow> 'c) \<Rightarrow> ('b \<Longrightarrow> 'c) \<Rightarrow> 'a \<times> 'b \<Longrightarrow> 'c" (infixr "+\<^sub>L" 75) where
[lens_defs]: "X +\<^sub>L Y = \<lparr> lens_get = (\<lambda> \<sigma>. (lens_get X \<sigma>, lens_get Y \<sigma>))
, lens_put = (\<lambda> \<sigma> (u, v). lens_put X (lens_put Y \<sigma> v) u) \<rparr>"
text \<open>The product functor lens similarly parallel composes two lenses, but in this case the lenses
have different sources and so the resulting source is also a product.\<close>
definition lens_prod :: "('a \<Longrightarrow> 'c) \<Rightarrow> ('b \<Longrightarrow> 'd) \<Rightarrow> ('a \<times> 'b \<Longrightarrow> 'c \<times> 'd)" (infixr "\<times>\<^sub>L" 85) where
[lens_defs]: "lens_prod X Y = \<lparr> lens_get = map_prod get\<^bsub>X\<^esub> get\<^bsub>Y\<^esub>
, lens_put = \<lambda> (u, v) (x, y). (put\<^bsub>X\<^esub> u x, put\<^bsub>Y\<^esub> v y) \<rparr>"
text \<open>The $\lfst$ and $\lsnd$ lenses project the first and second elements, respectively, of a
product source type.\<close>
definition fst_lens :: "'a \<Longrightarrow> 'a \<times> 'b" ("fst\<^sub>L") where
[lens_defs]: "fst\<^sub>L = \<lparr> lens_get = fst, lens_put = (\<lambda> (\<sigma>, \<rho>) u. (u, \<rho>)) \<rparr>"
definition snd_lens :: "'b \<Longrightarrow> 'a \<times> 'b" ("snd\<^sub>L") where
[lens_defs]: "snd\<^sub>L = \<lparr> lens_get = snd, lens_put = (\<lambda> (\<sigma>, \<rho>) u. (\<sigma>, u)) \<rparr>"
lemma get_fst_lens [simp]: "get\<^bsub>fst\<^sub>L\<^esub> (x, y) = x"
by (simp add: fst_lens_def)
lemma get_snd_lens [simp]: "get\<^bsub>snd\<^sub>L\<^esub> (x, y) = y"
by (simp add: snd_lens_def)
text \<open>The swap lens is a bijective lens which swaps over the elements of the product source type.\<close>
abbreviation swap_lens :: "'a \<times> 'b \<Longrightarrow> 'b \<times> 'a" ("swap\<^sub>L") where
"swap\<^sub>L \<equiv> snd\<^sub>L +\<^sub>L fst\<^sub>L"
text \<open>The zero lens is an ineffectual lens whose view is a unit type. This means the zero lens
cannot distinguish or change the source type.\<close>
definition zero_lens :: "unit \<Longrightarrow> 'a" ("0\<^sub>L") where
[lens_defs]: "0\<^sub>L = \<lparr> lens_get = (\<lambda> _. ()), lens_put = (\<lambda> \<sigma> x. \<sigma>) \<rparr>"
text \<open>The identity lens is a bijective lens where the source and view type are the same.\<close>
definition id_lens :: "'a \<Longrightarrow> 'a" ("1\<^sub>L") where
[lens_defs]: "1\<^sub>L = \<lparr> lens_get = id, lens_put = (\<lambda> _. id) \<rparr>"
text \<open>The quotient operator $X \lquot Y$ shortens lens $X$ by cutting off $Y$ from the end. It is
thus the dual of the composition operator.\<close>
definition lens_quotient :: "('a \<Longrightarrow> 'c) \<Rightarrow> ('b \<Longrightarrow> 'c) \<Rightarrow> 'a \<Longrightarrow> 'b" (infixr "'/\<^sub>L" 90) where
[lens_defs]: "X /\<^sub>L Y = \<lparr> lens_get = \<lambda> \<sigma>. get\<^bsub>X\<^esub> (create\<^bsub>Y\<^esub> \<sigma>)
, lens_put = \<lambda> \<sigma> v. get\<^bsub>Y\<^esub> (put\<^bsub>X\<^esub> (create\<^bsub>Y\<^esub> \<sigma>) v) \<rparr>"
text \<open>Lens override uses a lens to override part of a source type.\<close>
definition lens_override :: "'a \<Rightarrow> 'a \<Rightarrow> ('b \<Longrightarrow> 'a) \<Rightarrow> 'a" ("_ \<oplus>\<^sub>L _ on _" [95,0,96] 95) where
[lens_defs]: "S\<^sub>1 \<oplus>\<^sub>L S\<^sub>2 on X = put\<^bsub>X\<^esub> S\<^sub>1 (get\<^bsub>X\<^esub> S\<^sub>2)"
text \<open>Lens inversion take a bijective lens and swaps the source and view types.\<close>
definition lens_inv :: "('a \<Longrightarrow> 'b) \<Rightarrow> ('b \<Longrightarrow> 'a)" ("inv\<^sub>L") where
[lens_defs]: "lens_inv x = \<lparr> lens_get = create\<^bsub>x\<^esub>, lens_put = \<lambda> \<sigma>. get\<^bsub>x\<^esub> \<rparr>"
subsection \<open>Closure properties\<close>
lemma id_wb_lens: "wb_lens 1\<^sub>L"
by (unfold_locales, simp_all add: id_lens_def)
lemma unit_wb_lens: "wb_lens 0\<^sub>L"
by (unfold_locales, simp_all add: zero_lens_def)
lemma comp_wb_lens: "\<lbrakk> wb_lens x; wb_lens y \<rbrakk> \<Longrightarrow> wb_lens (x ;\<^sub>L y)"
by (unfold_locales, simp_all add: lens_comp_def)
lemma comp_mwb_lens: "\<lbrakk> mwb_lens x; mwb_lens y \<rbrakk> \<Longrightarrow> mwb_lens (x ;\<^sub>L y)"
by (unfold_locales, simp_all add: lens_comp_def)
lemma id_vwb_lens: "vwb_lens 1\<^sub>L"
by (unfold_locales, simp_all add: id_lens_def)
lemma unit_vwb_lens: "vwb_lens 0\<^sub>L"
by (unfold_locales, simp_all add: zero_lens_def)
lemma comp_vwb_lens: "\<lbrakk> vwb_lens x; vwb_lens y \<rbrakk> \<Longrightarrow> vwb_lens (x ;\<^sub>L y)"
by (unfold_locales, simp_all add: lens_comp_def)
lemma unit_ief_lens: "ief_lens 0\<^sub>L"
by (unfold_locales, simp_all add: zero_lens_def)
lemma plus_mwb_lens:
assumes "mwb_lens x" "mwb_lens y" "x \<bowtie> y"
shows "mwb_lens (x +\<^sub>L y)"
using assms
apply (unfold_locales)
apply (simp_all add: lens_plus_def prod.case_eq_if lens_indep_sym)
apply (simp add: lens_indep_comm)
done
lemma plus_wb_lens:
assumes "wb_lens x" "wb_lens y" "x \<bowtie> y"
shows "wb_lens (x +\<^sub>L y)"
using assms
apply (unfold_locales, simp_all add: lens_plus_def)
apply (simp add: lens_indep_sym prod.case_eq_if)
done
lemma plus_vwb_lens:
assumes "vwb_lens x" "vwb_lens y" "x \<bowtie> y"
shows "vwb_lens (x +\<^sub>L y)"
using assms
apply (unfold_locales, simp_all add: lens_plus_def)
apply (simp add: lens_indep_sym prod.case_eq_if)
apply (simp add: lens_indep_comm prod.case_eq_if)
done
lemma prod_mwb_lens:
"\<lbrakk> mwb_lens X; mwb_lens Y \<rbrakk> \<Longrightarrow> mwb_lens (X \<times>\<^sub>L Y)"
by (unfold_locales, simp_all add: lens_prod_def prod.case_eq_if)
lemma prod_wb_lens:
"\<lbrakk> wb_lens X; wb_lens Y \<rbrakk> \<Longrightarrow> wb_lens (X \<times>\<^sub>L Y)"
by (unfold_locales, simp_all add: lens_prod_def prod.case_eq_if)
lemma prod_vwb_lens:
"\<lbrakk> vwb_lens X; vwb_lens Y \<rbrakk> \<Longrightarrow> vwb_lens (X \<times>\<^sub>L Y)"
by (unfold_locales, simp_all add: lens_prod_def prod.case_eq_if)
lemma prod_bij_lens:
"\<lbrakk> bij_lens X; bij_lens Y \<rbrakk> \<Longrightarrow> bij_lens (X \<times>\<^sub>L Y)"
by (unfold_locales, simp_all add: lens_prod_def prod.case_eq_if)
lemma fst_vwb_lens: "vwb_lens fst\<^sub>L"
by (unfold_locales, simp_all add: fst_lens_def prod.case_eq_if)
lemma snd_vwb_lens: "vwb_lens snd\<^sub>L"
by (unfold_locales, simp_all add: snd_lens_def prod.case_eq_if)
lemma id_bij_lens: "bij_lens 1\<^sub>L"
by (unfold_locales, simp_all add: id_lens_def)
lemma inv_id_lens: "inv\<^sub>L 1\<^sub>L = 1\<^sub>L"
by (auto simp add: lens_inv_def id_lens_def lens_create_def)
lemma lens_inv_bij: "bij_lens X \<Longrightarrow> bij_lens (inv\<^sub>L X)"
by (unfold_locales, simp_all add: lens_inv_def lens_create_def)
lemma swap_bij_lens: "bij_lens swap\<^sub>L"
by (unfold_locales, simp_all add: lens_plus_def prod.case_eq_if fst_lens_def snd_lens_def)
subsection \<open>Composition laws\<close>
lemma lens_comp_assoc: "(X ;\<^sub>L Y) ;\<^sub>L Z = X ;\<^sub>L (Y ;\<^sub>L Z)"
by (auto simp add: lens_comp_def)
lemma lens_comp_left_id [simp]: "1\<^sub>L ;\<^sub>L X = X"
by (simp add: id_lens_def lens_comp_def)
lemma lens_comp_right_id [simp]: "X ;\<^sub>L 1\<^sub>L = X"
by (simp add: id_lens_def lens_comp_def)
lemma lens_comp_anhil [simp]: "wb_lens X \<Longrightarrow> 0\<^sub>L ;\<^sub>L X = 0\<^sub>L"
by (simp add: zero_lens_def lens_comp_def comp_def)
subsection \<open>Independence laws\<close>
lemma zero_lens_indep: "0\<^sub>L \<bowtie> X"
by (auto simp add: zero_lens_def lens_indep_def)
lemma lens_indep_quasi_irrefl: "\<lbrakk> wb_lens x; eff_lens x \<rbrakk> \<Longrightarrow> \<not> (x \<bowtie> x)"
by (auto simp add: lens_indep_def ief_lens_def ief_lens_axioms_def, metis (full_types) wb_lens.get_put)
lemma lens_indep_left_comp [simp]:
"\<lbrakk> mwb_lens z; x \<bowtie> y \<rbrakk> \<Longrightarrow> (x ;\<^sub>L z) \<bowtie> (y ;\<^sub>L z)"
apply (rule lens_indepI)
apply (auto simp add: lens_comp_def)
apply (simp add: lens_indep_comm)
apply (simp add: lens_indep_sym)
done
lemma lens_indep_right_comp:
"y \<bowtie> z \<Longrightarrow> (x ;\<^sub>L y) \<bowtie> (x ;\<^sub>L z)"
apply (auto intro!: lens_indepI simp add: lens_comp_def)
using lens_indep_comm lens_indep_sym apply fastforce
apply (simp add: lens_indep_sym)
done
lemma lens_indep_left_ext [intro]:
"y \<bowtie> z \<Longrightarrow> (x ;\<^sub>L y) \<bowtie> z"
apply (auto intro!: lens_indepI simp add: lens_comp_def)
apply (simp add: lens_indep_comm)
apply (simp add: lens_indep_sym)
done
lemma lens_indep_right_ext [intro]:
"x \<bowtie> z \<Longrightarrow> x \<bowtie> (y ;\<^sub>L z)"
by (simp add: lens_indep_left_ext lens_indep_sym)
lemma fst_snd_lens_indep:
"fst\<^sub>L \<bowtie> snd\<^sub>L"
by (simp add: lens_indep_def fst_lens_def snd_lens_def)
lemma split_prod_lens_indep:
assumes "mwb_lens X"
shows "(fst\<^sub>L ;\<^sub>L X) \<bowtie> (snd\<^sub>L ;\<^sub>L X)"
using assms fst_snd_lens_indep lens_indep_left_comp vwb_lens_mwb by blast
lemma plus_pres_lens_indep: "\<lbrakk> X \<bowtie> Z; Y \<bowtie> Z \<rbrakk> \<Longrightarrow> (X +\<^sub>L Y) \<bowtie> Z"
apply (rule lens_indepI)
apply (simp_all add: lens_plus_def prod.case_eq_if)
apply (simp add: lens_indep_comm)
apply (simp add: lens_indep_sym)
done
lemma lens_comp_indep_cong_left:
"\<lbrakk> mwb_lens Z; X ;\<^sub>L Z \<bowtie> Y ;\<^sub>L Z \<rbrakk> \<Longrightarrow> X \<bowtie> Y"
apply (rule lens_indepI)
apply (rename_tac u v \<sigma>)
apply (drule_tac u=u and v=v and \<sigma>="create\<^bsub>Z\<^esub> \<sigma>" in lens_indep_comm)
apply (simp add: lens_comp_def)
apply (meson mwb_lens_weak weak_lens.view_determination)
apply (rename_tac v \<sigma>)
apply (drule_tac v=v and \<sigma>="create\<^bsub>Z\<^esub> \<sigma>" in lens_indep_get)
apply (simp add: lens_comp_def)
apply (drule lens_indep_sym)
apply (rename_tac u \<sigma>)
apply (drule_tac v=u and \<sigma>="create\<^bsub>Z\<^esub> \<sigma>" in lens_indep_get)
apply (simp add: lens_comp_def)
done
lemma lens_comp_indep_cong:
"mwb_lens Z \<Longrightarrow> (X ;\<^sub>L Z) \<bowtie> (Y ;\<^sub>L Z) \<longleftrightarrow> X \<bowtie> Y"
using lens_comp_indep_cong_left lens_indep_left_comp by blast
lemma lens_indep_prod:
"\<lbrakk> X\<^sub>1 \<bowtie> X\<^sub>2; Y\<^sub>1 \<bowtie> Y\<^sub>2 \<rbrakk> \<Longrightarrow> X\<^sub>1 \<times>\<^sub>L Y\<^sub>1 \<bowtie> X\<^sub>2 \<times>\<^sub>L Y\<^sub>2"
apply (rule lens_indepI)
apply (auto simp add: lens_prod_def prod.case_eq_if lens_indep_comm map_prod_def)
apply (simp_all add: lens_indep_sym)
done
subsection \<open>Algebraic laws\<close>
lemma fst_lens_plus:
"wb_lens y \<Longrightarrow> fst\<^sub>L ;\<^sub>L (x +\<^sub>L y) = x"
by (simp add: fst_lens_def lens_plus_def lens_comp_def comp_def)
text {* The second law requires independence as we have to apply x first, before y *}
lemma snd_lens_plus:
"\<lbrakk> wb_lens x; x \<bowtie> y \<rbrakk> \<Longrightarrow> snd\<^sub>L ;\<^sub>L (x +\<^sub>L y) = y"
apply (simp add: snd_lens_def lens_plus_def lens_comp_def comp_def)
apply (subst lens_indep_comm)
apply (simp_all)
done
lemma lens_plus_swap:
"X \<bowtie> Y \<Longrightarrow> (snd\<^sub>L +\<^sub>L fst\<^sub>L) ;\<^sub>L (X +\<^sub>L Y) = (Y +\<^sub>L X)"
by (auto simp add: lens_plus_def fst_lens_def snd_lens_def id_lens_def lens_comp_def lens_indep_comm)
lemma prod_as_plus: "X \<times>\<^sub>L Y = X ;\<^sub>L fst\<^sub>L +\<^sub>L Y ;\<^sub>L snd\<^sub>L"
by (auto simp add: lens_prod_def fst_lens_def snd_lens_def lens_comp_def lens_plus_def)
lemma prod_lens_id_equiv:
"1\<^sub>L \<times>\<^sub>L 1\<^sub>L = 1\<^sub>L"
by (auto simp add: lens_prod_def id_lens_def)
lemma prod_lens_comp_plus:
"X\<^sub>2 \<bowtie> Y\<^sub>2 \<Longrightarrow> ((X\<^sub>1 \<times>\<^sub>L Y\<^sub>1) ;\<^sub>L (X\<^sub>2 +\<^sub>L Y\<^sub>2)) = (X\<^sub>1 ;\<^sub>L X\<^sub>2) +\<^sub>L (Y\<^sub>1 ;\<^sub>L Y\<^sub>2)"
by (auto simp add: lens_comp_def lens_plus_def lens_prod_def prod.case_eq_if fun_eq_iff)
lemma fst_snd_id_lens: "fst\<^sub>L +\<^sub>L snd\<^sub>L = 1\<^sub>L"
by (auto simp add: lens_plus_def fst_lens_def snd_lens_def id_lens_def)
lemma swap_lens_idem: "swap\<^sub>L ;\<^sub>L swap\<^sub>L = 1\<^sub>L"
by (simp add: fst_snd_id_lens fst_snd_lens_indep lens_indep_sym lens_plus_swap)
lemma swap_lens_fst: "fst\<^sub>L ;\<^sub>L swap\<^sub>L = snd\<^sub>L"
by (simp add: fst_lens_plus fst_vwb_lens)
lemma swap_lens_snd: "snd\<^sub>L ;\<^sub>L swap\<^sub>L = fst\<^sub>L"
by (simp add: fst_snd_lens_indep lens_indep_sym snd_lens_plus snd_vwb_lens)
end |
[STATEMENT]
lemma not_getbase_shares:
"D \<notin> set (map getbase xs) \<Longrightarrow> Shares D \<notin> set xs"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. D \<notin> set (map getbase xs) \<Longrightarrow> Shares D \<notin> set xs
[PROOF STEP]
by (induct rule: list.induct, auto) |
(* Title: HOL/Auth/n_mutualEx_lemma_on_inv__4.thy
Author: Yongjian Li and Kaiqiang Duan, State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
Copyright 2016 State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
*)
header{*The n_mutualEx Protocol Case Study*}
theory n_mutualEx_lemma_on_inv__4 imports n_mutualEx_base
begin
section{*All lemmas on causal relation between inv__4 and some rule r*}
lemma n_TryVsinv__4:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_Try i)" and
a2: "(\<exists> p__Inv0. p__Inv0\<le>N\<and>f=inv__4 p__Inv0)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_Try i" apply fastforce done
from a2 obtain p__Inv0 where a2:"p__Inv0\<le>N\<and>f=inv__4 p__Inv0" apply fastforce done
have "(i=p__Inv0)\<or>(i~=p__Inv0)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv0)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv0)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_CritVsinv__4:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_Crit i)" and
a2: "(\<exists> p__Inv0. p__Inv0\<le>N\<and>f=inv__4 p__Inv0)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_Crit i" apply fastforce done
from a2 obtain p__Inv0 where a2:"p__Inv0\<le>N\<and>f=inv__4 p__Inv0" apply fastforce done
have "(i=p__Inv0)\<or>(i~=p__Inv0)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv0)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv0)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_ExitVsinv__4:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_Exit i)" and
a2: "(\<exists> p__Inv0. p__Inv0\<le>N\<and>f=inv__4 p__Inv0)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_Exit i" apply fastforce done
from a2 obtain p__Inv0 where a2:"p__Inv0\<le>N\<and>f=inv__4 p__Inv0" apply fastforce done
have "(i=p__Inv0)\<or>(i~=p__Inv0)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv0)"
have "?P3 s"
apply (cut_tac a1 a2 b1, simp, rule_tac x="(neg (andForm (eqn (IVar (Para (Ident ''n'') p__Inv0)) (Const C)) (eqn (IVar (Ident ''x'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv0)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_IdleVsinv__4:
assumes a1: "(\<exists> i. i\<le>N\<and>r=n_Idle i)" and
a2: "(\<exists> p__Inv0. p__Inv0\<le>N\<and>f=inv__4 p__Inv0)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain i where a1:"i\<le>N\<and>r=n_Idle i" apply fastforce done
from a2 obtain p__Inv0 where a2:"p__Inv0\<le>N\<and>f=inv__4 p__Inv0" apply fastforce done
have "(i=p__Inv0)\<or>(i~=p__Inv0)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(i=p__Inv0)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(i~=p__Inv0)"
have "?P3 s"
apply (cut_tac a1 a2 b1, simp, rule_tac x="(neg (andForm (eqn (IVar (Para (Ident ''n'') p__Inv0)) (Const E)) (eqn (IVar (Para (Ident ''n'') i)) (Const E))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
end
|
Formal statement is: lemma topological_basis: "topological_basis B \<longleftrightarrow> (\<forall>x. open x \<longleftrightarrow> (\<exists>B'. B' \<subseteq> B \<and> \<Union>B' = x))" Informal statement is: A set $B$ is a topological basis if and only if every open set is a union of elements of $B$. |
[STATEMENT]
lemma class_add_classes_above:
assumes ns: "\<not> is_class P C" and "\<not>P \<turnstile> D \<preceq>\<^sup>* C"
shows "classes_above (class_add P (C, cdec)) D = classes_above P D"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. classes_above (class_add P (C, cdec)) D = classes_above P D
[PROOF STEP]
using assms
[PROOF STATE]
proof (prove)
using this:
\<not> is_class P C
(D, C) \<notin> (subcls1 P)\<^sup>*
goal (1 subgoal):
1. classes_above (class_add P (C, cdec)) D = classes_above P D
[PROOF STEP]
by(auto intro: class_add_subcls class_add_subcls_rev) |
theory T48
imports Main
begin
lemma "(
(\<forall> x::nat. \<forall> y::nat. meet(x, y) = meet(y, x)) &
(\<forall> x::nat. \<forall> y::nat. join(x, y) = join(y, x)) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. meet(x, meet(y, z)) = meet(meet(x, y), z)) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. join(x, join(y, z)) = join(join(x, y), z)) &
(\<forall> x::nat. \<forall> y::nat. meet(x, join(x, y)) = x) &
(\<forall> x::nat. \<forall> y::nat. join(x, meet(x, y)) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(x, join(y, z)) = join(mult(x, y), mult(x, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(join(x, y), z) = join(mult(x, z), mult(y, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. meet(x, over(join(mult(x, y), z), y)) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. meet(y, undr(x, join(mult(x, y), z))) = y) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. join(mult(over(x, y), y), x) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. join(mult(y, undr(y, x)), x) = x) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(x, meet(y, z)) = meet(mult(x, y), mult(x, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. undr(x, join(y, z)) = join(undr(x, y), undr(x, z))) &
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. undr(meet(x, y), z) = join(undr(x, z), undr(y, z))) &
(\<forall> x::nat. \<forall> y::nat. invo(join(x, y)) = meet(invo(x), invo(y))) &
(\<forall> x::nat. \<forall> y::nat. invo(meet(x, y)) = join(invo(x), invo(y))) &
(\<forall> x::nat. invo(invo(x)) = x)
) \<longrightarrow>
(\<forall> x::nat. \<forall> y::nat. \<forall> z::nat. mult(meet(x, y), z) = meet(mult(x, z), mult(y, z)))
"
nitpick[card nat=8,timeout=86400]
oops
end |
Tertiary structure : the overall shape of a single protein molecule ; the spatial relationship of the secondary structures to one another . Tertiary structure is generally stabilized by nonlocal interactions , most commonly the formation of a hydrophobic core , but also through salt bridges , hydrogen bonds , disulfide bonds , and even posttranslational modifications . The term " tertiary structure " is often used as synonymous with the term fold . The tertiary structure is what controls the basic function of the protein .
|
Describe Users/NorCalFoodLover here.
20100827 06:35:07 nbsp Welcome to the Wiki! Thanks for the restaurant reviews. Users/TomGarberson
|
#include <iostream>
#include "socks_server.hpp"
#include <boost/program_options.hpp>
namespace po = boost::program_options;
int main(int argc, char **argv)
{
try {
unsigned short port;
std::string address;
po::options_description desc("Options");
desc.add_options()
("help,h", "Help message.")
("version", "Current version.")
("port", po::value<unsigned short>(&port)->default_value(1080), "Socks porxy port.")
("address", po::value<std::string>(&address)->default_value("127.0.0.1"), "Socks listen bind address.")
;
// 解析命令行.
po::variables_map vm;
po::store(po::parse_command_line(argc, argv, desc), vm);
po::notify(vm);
// 帮助输出.
if (vm.count("help") || argc == 1)
{
std::cout << desc;
return 0;
}
boost::asio::io_context io;
socks::socks_server s(io, port, address);
io.run();
}
catch (std::exception& e)
{
std::cerr << e.what() << std::endl;
}
return 0;
}
|
import numpy as np
import cv2
import image_funcs as imf
img = cv2.imread("Images/black_board.JPG")
img = imf.scale(img, 500, 500)
cv2.imshow("small", img)
cv2.waitKey()
Z = img.reshape((-1, 3))
# convert to np.float32
Z = np.float32(Z)
# define criteria, number of clusters(K) and apply kmeans()
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
K = 4
print("start kmeans")
ret, label, center = cv2.kmeans(Z, K, None, criteria, 10, cv2.KMEANS_RANDOM_CENTERS)
print("finish kmeans")
# Now convert back into uint8, and make original image
center = np.uint8(center)
res = center[label.flatten()]
res2 = res.reshape(img.shape)
cv2.imshow('res2', res2)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
// The following only applies to changes made to this file as part of YugaByte development.
//
// Portions Copyright (c) YugaByte, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing permissions and limitations
// under the License.
//
#include "yb/util/trace.h"
#include <iomanip>
#include <ios>
#include <iostream>
#include <strstream>
#include <string>
#include <vector>
#include <boost/range/iterator_range.hpp>
#include <boost/range/adaptor/indirected.hpp>
#include "yb/gutil/strings/substitute.h"
#include "yb/gutil/walltime.h"
#include "yb/util/memory/arena.h"
#include "yb/util/memory/memory.h"
#include "yb/util/object_pool.h"
#include "yb/util/size_literals.h"
DEFINE_bool(enable_tracing, false, "Flag to enable/disable tracing across the code.");
namespace yb {
using strings::internal::SubstituteArg;
__thread Trace* Trace::threadlocal_trace_;
namespace {
// Get the part of filepath after the last path separator.
// (Doesn't modify filepath, contrary to basename() in libgen.h.)
// Borrowed from glog.
const char* const_basename(const char* filepath) {
const char* base = strrchr(filepath, '/');
return base ? (base + 1) : filepath;
}
template<class Children>
void DumpChildren(std::ostream* out, bool include_time_deltas, const Children* children) {
for (auto &child_trace : *children) {
*out << "Related trace:" << std::endl;
*out << child_trace->DumpToString(include_time_deltas);
}
}
void DumpChildren(std::ostream* out, bool include_time_deltas, std::nullptr_t children) {
}
template<class Entries>
void DumpEntries(std::ostream* out,
bool include_time_deltas,
int64_t start,
const Entries& entries) {
if (entries.empty()) {
return;
}
auto time_usec = entries.begin()->timestamp.GetDeltaSinceMin().ToMicroseconds();
const int64_t time_correction_usec = start - time_usec;
int64_t prev_usecs = time_usec;
for (const auto& e : entries) {
time_usec = e.timestamp.GetDeltaSinceMin().ToMicroseconds();
const int64_t usecs_since_prev = time_usec - prev_usecs;
prev_usecs = time_usec;
const auto absolute_time_usec = time_usec + time_correction_usec;
const time_t secs_since_epoch = absolute_time_usec / 1000000;
const int usecs = absolute_time_usec % 1000000;
struct tm tm_time;
localtime_r(&secs_since_epoch, &tm_time);
// Log format borrowed from glog/logging.cc
using std::setw;
out->fill('0');
*out << setw(2) << (1 + tm_time.tm_mon)
<< setw(2) << tm_time.tm_mday
<< ' '
<< setw(2) << tm_time.tm_hour << ':'
<< setw(2) << tm_time.tm_min << ':'
<< setw(2) << tm_time.tm_sec << '.'
<< setw(6) << usecs << ' ';
if (include_time_deltas) {
out->fill(' ');
*out << "(+" << setw(6) << usecs_since_prev << "us) ";
}
e.Dump(out);
*out << std::endl;
}
}
template<class Entries, class Children>
void DoDump(std::ostream* out,
bool include_time_deltas,
int64_t start,
const Entries& entries,
Children children) {
// Save original flags.
std::ios::fmtflags save_flags(out->flags());
DumpEntries(out, include_time_deltas, start, entries);
DumpChildren(out, include_time_deltas, children);
// Restore stream flags.
out->flags(save_flags);
}
std::once_flag init_get_current_micros_fast_flag;
int64_t initial_micros_offset;
void InitGetCurrentMicrosFast() {
auto before = MonoTime::Now();
initial_micros_offset = GetCurrentTimeMicros();
auto after = MonoTime::Now();
auto mid = after.GetDeltaSinceMin().ToMicroseconds();
mid += before.GetDeltaSinceMin().ToMicroseconds();
mid /= 2;
initial_micros_offset -= mid;
}
int64_t GetCurrentMicrosFast(MonoTime now) {
std::call_once(init_get_current_micros_fast_flag, InitGetCurrentMicrosFast);
return initial_micros_offset + now.GetDeltaSinceMin().ToMicroseconds();
}
} // namespace
ScopedAdoptTrace::ScopedAdoptTrace(Trace* t)
: old_trace_(Trace::threadlocal_trace_), is_enabled_(GetAtomicFlag(&FLAGS_enable_tracing)) {
if (is_enabled_) {
trace_ = t;
Trace::threadlocal_trace_ = t;
DFAKE_SCOPED_LOCK_THREAD_LOCKED(ctor_dtor_);
}
}
ScopedAdoptTrace::~ScopedAdoptTrace() {
if (is_enabled_) {
Trace::threadlocal_trace_ = old_trace_;
// It's critical that we Release() the reference count on 't' only
// after we've unset the thread-local variable. Otherwise, we can hit
// a nasty interaction with tcmalloc contention profiling. Consider
// the following sequence:
//
// 1. threadlocal_trace_ has refcount = 1
// 2. we call threadlocal_trace_->Release() which decrements refcount to 0
// 3. this calls 'delete' on the Trace object
// 3a. this calls tcmalloc free() on the Trace and various sub-objects
// 3b. the free() calls may end up experiencing contention in tcmalloc
// 3c. we try to account the contention in threadlocal_trace_'s TraceMetrics,
// but it has already been freed.
//
// In the best case, we just scribble into some free tcmalloc memory. In the
// worst case, tcmalloc would have already re-used this memory for a new
// allocation on another thread, and we end up overwriting someone else's memory.
//
// Waiting to Release() only after 'unpublishing' the trace solves this.
trace_.reset();
DFAKE_SCOPED_LOCK_THREAD_LOCKED(ctor_dtor_);
}
}
// Struct which precedes each entry in the trace.
struct TraceEntry {
MonoTime timestamp;
// The source file and line number which generated the trace message.
const char* file_path;
int line_number;
uint32_t message_len;
TraceEntry* next;
char message[0];
void Dump(std::ostream* out) const {
*out << const_basename(file_path) << ':' << line_number
<< "] ";
out->write(message, message_len);
}
};
Trace::Trace() {
}
ThreadSafeObjectPool<ThreadSafeArena>& ArenaPool() {
static ThreadSafeObjectPool<ThreadSafeArena> result([] {
return new ThreadSafeArena(8_KB, 128_KB);
});
return result;
}
Trace::~Trace() {
auto* arena = arena_.load(std::memory_order_acquire);
if (arena) {
arena->Reset();
ArenaPool().Release(arena);
}
}
ThreadSafeArena* Trace::GetAndInitArena() {
auto* arena = arena_.load(std::memory_order_acquire);
if (arena == nullptr) {
arena = ArenaPool().Take();
ThreadSafeArena* existing_arena = nullptr;
if (arena_.compare_exchange_strong(existing_arena, arena, std::memory_order_release)) {
return arena;
} else {
ArenaPool().Release(arena);
return existing_arena;
}
}
return arena;
}
void Trace::SubstituteAndTrace(
const char* file_path, int line_number, MonoTime now, StringPiece format) {
int msg_len = format.size();
DCHECK_NE(msg_len, 0) << "Bad format specification";
TraceEntry* entry = NewEntry(msg_len, file_path, line_number, now);
if (entry == nullptr) return;
memcpy(entry->message, format.data(), msg_len);
AddEntry(entry);
}
void Trace::SubstituteAndTrace(const char* file_path,
int line_number,
MonoTime now,
StringPiece format,
const SubstituteArg& arg0, const SubstituteArg& arg1,
const SubstituteArg& arg2, const SubstituteArg& arg3,
const SubstituteArg& arg4, const SubstituteArg& arg5,
const SubstituteArg& arg6, const SubstituteArg& arg7,
const SubstituteArg& arg8, const SubstituteArg& arg9) {
const SubstituteArg* const args_array[] = {
&arg0, &arg1, &arg2, &arg3, &arg4, &arg5, &arg6, &arg7, &arg8, &arg9, nullptr
};
int msg_len = strings::internal::SubstitutedSize(format, args_array);
DCHECK_NE(msg_len, 0) << "Bad format specification";
TraceEntry* entry = NewEntry(msg_len, file_path, line_number, now);
if (entry == nullptr) return;
SubstituteToBuffer(format, args_array, entry->message);
AddEntry(entry);
}
TraceEntry* Trace::NewEntry(int msg_len, const char* file_path, int line_number, MonoTime now) {
auto* arena = GetAndInitArena();
size_t size = offsetof(TraceEntry, message) + msg_len;
void* dst = arena->AllocateBytesAligned(size, alignof(TraceEntry));
if (dst == nullptr) {
LOG(ERROR) << "NewEntry(msg_len, " << file_path << ", " << line_number
<< ") received nullptr from AllocateBytes.\n So far:" << DumpToString(true);
return nullptr;
}
TraceEntry* entry = new (dst) TraceEntry;
entry->timestamp = now;
entry->message_len = msg_len;
entry->file_path = file_path;
entry->line_number = line_number;
return entry;
}
void Trace::AddEntry(TraceEntry* entry) {
std::lock_guard<simple_spinlock> l(lock_);
entry->next = nullptr;
if (entries_tail_ != nullptr) {
entries_tail_->next = entry;
} else {
DCHECK(entries_head_ == nullptr);
entries_head_ = entry;
trace_start_time_usec_ = GetCurrentMicrosFast(entry->timestamp);
}
entries_tail_ = entry;
}
void Trace::Dump(std::ostream *out, bool include_time_deltas) const {
// Gather a copy of the list of entries under the lock. This is fast
// enough that we aren't worried about stalling concurrent tracers
// (whereas doing the logging itself while holding the lock might be
// too slow, if the output stream is a file, for example).
vector<TraceEntry*> entries;
vector<scoped_refptr<Trace> > child_traces;
decltype(trace_start_time_usec_) trace_start_time_usec;
{
std::lock_guard<simple_spinlock> l(lock_);
for (TraceEntry* cur = entries_head_;
cur != nullptr;
cur = cur->next) {
entries.push_back(cur);
}
child_traces = child_traces_;
trace_start_time_usec = trace_start_time_usec_;
}
DoDump(out,
include_time_deltas,
trace_start_time_usec,
entries | boost::adaptors::indirected,
&child_traces);
}
string Trace::DumpToString(bool include_time_deltas) const {
std::stringstream s;
Dump(&s, include_time_deltas);
return s.str();
}
void Trace::DumpCurrentTrace() {
Trace* t = CurrentTrace();
if (t == nullptr) {
LOG(INFO) << "No trace is currently active.";
return;
}
t->Dump(&std::cerr, true);
}
void Trace::AddChildTrace(Trace* child_trace) {
CHECK_NOTNULL(child_trace);
{
std::lock_guard<simple_spinlock> l(lock_);
scoped_refptr<Trace> ptr(child_trace);
child_traces_.push_back(ptr);
}
CHECK(!child_trace->HasOneRef());
}
PlainTrace::PlainTrace() {
}
void PlainTrace::Trace(const char *file_path, int line_number, const char *message) {
auto timestamp = MonoTime::Now();
{
std::lock_guard<decltype(mutex_)> lock(mutex_);
if (size_ < kMaxEntries) {
if (size_ == 0) {
trace_start_time_usec_ = GetCurrentMicrosFast(timestamp);
}
entries_[size_] = {file_path, line_number, message, timestamp};
++size_;
}
}
}
void PlainTrace::Dump(std::ostream *out, bool include_time_deltas) const {
size_t size;
decltype(trace_start_time_usec_) trace_start_time_usec;
{
std::lock_guard<decltype(mutex_)> lock(mutex_);
size = size_;
trace_start_time_usec = trace_start_time_usec_;
}
auto entries = boost::make_iterator_range(entries_, entries_ + size);
DoDump(out, include_time_deltas, trace_start_time_usec, entries, /* children */ nullptr);
}
std::string PlainTrace::DumpToString(bool include_time_deltas) const {
std::stringstream s;
Dump(&s, include_time_deltas);
return s.str();
}
void PlainTrace::Entry::Dump(std::ostream *out) const {
*out << const_basename(file_path) << ':' << line_number << "] " << message;
}
} // namespace yb
|
Formal statement is: lemma BseqI: "0 < K \<Longrightarrow> \<forall>n. norm (X n) \<le> K \<Longrightarrow> Bseq X" Informal statement is: If $X_n$ is a sequence of complex numbers such that $|X_n| \leq K$ for all $n$, then $X_n$ is a bounded sequence. |
[STATEMENT]
lemma "normalized_nnf_match m \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf m)) a p = matches \<gamma> m a p"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. normalized_nnf_match m \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf m)) a p = matches \<gamma> m a p
[PROOF STEP]
proof(induction m rule: to_negation_type_nnf.induct)
[PROOF STATE]
proof (state)
goal (7 subgoals):
1. normalized_nnf_match MatchAny \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf MatchAny)) a p = matches \<gamma> MatchAny a p
2. \<And>aa. normalized_nnf_match (Match aa) \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf (Match aa))) a p = matches \<gamma> (Match aa) a p
3. \<And>aa. normalized_nnf_match (MatchNot (Match aa)) \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf (MatchNot (Match aa)))) a p = matches \<gamma> (MatchNot (Match aa)) a p
4. \<And>aa b. \<lbrakk>normalized_nnf_match aa \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf aa)) a p = matches \<gamma> aa a p; normalized_nnf_match b \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf b)) a p = matches \<gamma> b a p; normalized_nnf_match (MatchAnd aa b)\<rbrakk> \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf (MatchAnd aa b))) a p = matches \<gamma> (MatchAnd aa b) a p
5. \<And>va. normalized_nnf_match (MatchNot (MatchNot va)) \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf (MatchNot (MatchNot va)))) a p = matches \<gamma> (MatchNot (MatchNot va)) a p
6. \<And>va vb. normalized_nnf_match (MatchNot (MatchAnd va vb)) \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf (MatchNot (MatchAnd va vb)))) a p = matches \<gamma> (MatchNot (MatchAnd va vb)) a p
7. normalized_nnf_match (MatchNot MatchAny) \<Longrightarrow> matches \<gamma> (alist_and (to_negation_type_nnf (MatchNot MatchAny))) a p = matches \<gamma> (MatchNot MatchAny) a p
[PROOF STEP]
qed(simp_all add: bunch_of_lemmata_about_matches alist_and_append) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.