text
stringlengths 0
3.34M
|
---|
/-
Copyright (c) 2022 Yaël Dillies. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Yaël Dillies
! This file was ported from Lean 3 source module topology.order.hom.basic
! leanprover-community/mathlib commit 0a0ec35061ed9960bf0e7ffb0335f44447b58977
! Please do not edit these lines, except to modify the commit id
! if you have ported upstream changes.
-/
import Mathlib.Order.Hom.Basic
import Mathlib.Topology.ContinuousFunction.Basic
/-!
# Continuous order homomorphisms
This file defines continuous order homomorphisms, that is maps which are both continuous and
monotone. They are also called Priestley homomorphisms because they are the morphisms of the
category of Priestley spaces.
We use the `FunLike` design, so each type of morphisms has a companion typeclass which is meant to
be satisfied by itself and all stricter types.
## Types of morphisms
* `ContinuousOrderHom`: Continuous monotone functions, aka Priestley homomorphisms.
## Typeclasses
* `ContinuousOrderHomClass`
-/
open Function
variable {F α β γ δ : Type _}
/-- The type of continuous monotone maps from `α` to `β`, aka Priestley homomorphisms. -/
structure ContinuousOrderHom (α β : Type _) [Preorder α] [Preorder β] [TopologicalSpace α]
[TopologicalSpace β] extends OrderHom α β where
continuous_toFun : Continuous toFun
#align continuous_order_hom ContinuousOrderHom
-- mathport name: «expr →Co »
infixr:25 " →Co " => ContinuousOrderHom
section
-- porting note: extending `ContinuousMapClass` instead of `OrderHomClass`
/-- `ContinuousOrderHomClass F α β` states that `F` is a type of continuous monotone maps.
You should extend this class when you extend `ContinuousOrderHom`. -/
class ContinuousOrderHomClass (F : Type _) (α β : outParam <| Type _) [Preorder α] [Preorder β]
[TopologicalSpace α] [TopologicalSpace β] extends
ContinuousMapClass F α β where
map_monotone (f : F) : Monotone f
#align continuous_order_hom_class ContinuousOrderHomClass
end
-- See note [lower instance priority]
instance (priority := 100) ContinuousOrderHomClass.toOrderHomClass {_ : Preorder α} {_ : Preorder β}
{_ : TopologicalSpace α} {_ : TopologicalSpace β} [ContinuousOrderHomClass F α β] :
OrderHomClass F α β :=
{ ‹ContinuousOrderHomClass F α β› with
map_rel := ContinuousOrderHomClass.map_monotone }
#align continuous_order_hom_class.to_continuous_map_class ContinuousOrderHomClass.toContinuousMapClass
instance [Preorder α] [Preorder β] [TopologicalSpace α] [TopologicalSpace β]
[ContinuousOrderHomClass F α β] : CoeTC F (α →Co β) :=
⟨fun f =>
{ toFun := f
monotone' := ContinuousOrderHomClass.map_monotone f
continuous_toFun := map_continuous f }⟩
/-! ### Top homomorphisms -/
namespace ContinuousOrderHom
variable [TopologicalSpace α] [Preorder α] [TopologicalSpace β]
section Preorder
variable [Preorder β] [TopologicalSpace γ] [Preorder γ] [TopologicalSpace δ] [Preorder δ]
/-- Reinterpret a `ContinuousOrderHom` as a `ContinuousMap`. -/
def toContinuousMap (f : α →Co β) : C(α, β) :=
{ f with }
#align continuous_order_hom.to_continuous_map ContinuousOrderHom.toContinuousMap
instance : ContinuousOrderHomClass (α →Co β) α β where
coe f := f.toFun
coe_injective' f g h := by
obtain ⟨⟨_, _⟩, _⟩ := f
obtain ⟨⟨_, _⟩, _⟩ := g
congr
map_monotone f := f.monotone'
map_continuous f := f.continuous_toFun
-- porting note: new lemma
@[simp] theorem coe_toOrderHom (f : α →Co β) : ⇑f.toOrderHom = f := rfl
theorem toFun_eq_coe {f : α →Co β} : f.toFun = (f : α → β) := rfl
#align continuous_order_hom.to_fun_eq_coe ContinuousOrderHom.toFun_eq_coe
@[ext]
theorem ext {f g : α →Co β} (h : ∀ a, f a = g a) : f = g :=
FunLike.ext f g h
#align continuous_order_hom.ext ContinuousOrderHom.ext
/-- Copy of a `ContinuousOrderHom` with a new `ContinuousMap` equal to the old one. Useful to fix
definitional equalities. -/
protected def copy (f : α →Co β) (f' : α → β) (h : f' = f) : α →Co β :=
⟨f.toOrderHom.copy f' h, h.symm.subst f.continuous_toFun⟩
#align continuous_order_hom.copy ContinuousOrderHom.copy
@[simp]
theorem coe_copy (f : α →Co β) (f' : α → β) (h : f' = f) : ⇑(f.copy f' h) = f' :=
rfl
#align continuous_order_hom.coe_copy ContinuousOrderHom.coe_copy
theorem copy_eq (f : α →Co β) (f' : α → β) (h : f' = f) : f.copy f' h = f :=
FunLike.ext' h
#align continuous_order_hom.copy_eq ContinuousOrderHom.copy_eq
variable (α)
/-- `id` as a `ContinuousOrderHom`. -/
protected def id : α →Co α :=
⟨OrderHom.id, continuous_id⟩
#align continuous_order_hom.id ContinuousOrderHom.id
instance : Inhabited (α →Co α) :=
⟨ContinuousOrderHom.id _⟩
@[simp]
theorem coe_id : ⇑(ContinuousOrderHom.id α) = id :=
rfl
#align continuous_order_hom.coe_id ContinuousOrderHom.coe_id
variable {α}
@[simp]
theorem id_apply (a : α) : ContinuousOrderHom.id α a = a :=
rfl
#align continuous_order_hom.id_apply ContinuousOrderHom.id_apply
/-- Composition of `ContinuousOrderHom`s as a `ContinuousOrderHom`. -/
def comp (f : β →Co γ) (g : α →Co β) : ContinuousOrderHom α γ :=
⟨f.toOrderHom.comp g.toOrderHom, f.continuous_toFun.comp g.continuous_toFun⟩
#align continuous_order_hom.comp ContinuousOrderHom.comp
@[simp]
theorem coe_comp (f : β →Co γ) (g : α →Co β) : (f.comp g : α → γ) = f ∘ g :=
rfl
#align continuous_order_hom.coe_comp ContinuousOrderHom.coe_comp
@[simp]
theorem comp_apply (f : β →Co γ) (g : α →Co β) (a : α) : (f.comp g) a = f (g a) :=
rfl
#align continuous_order_hom.comp_apply ContinuousOrderHom.comp_apply
@[simp]
theorem comp_assoc (f : γ →Co δ) (g : β →Co γ) (h : α →Co β) :
(f.comp g).comp h = f.comp (g.comp h) :=
rfl
#align continuous_order_hom.comp_assoc ContinuousOrderHom.comp_assoc
@[simp]
theorem comp_id (f : α →Co β) : f.comp (ContinuousOrderHom.id α) = f :=
ext fun _ => rfl
#align continuous_order_hom.comp_id ContinuousOrderHom.comp_id
@[simp]
theorem id_comp (f : α →Co β) : (ContinuousOrderHom.id β).comp f = f :=
ext fun _ => rfl
#align continuous_order_hom.id_comp ContinuousOrderHom.id_comp
theorem cancel_right {g₁ g₂ : β →Co γ} {f : α →Co β} (hf : Surjective f) :
g₁.comp f = g₂.comp f ↔ g₁ = g₂ :=
⟨fun h => ext <| hf.forall.2 <| FunLike.ext_iff.1 h, fun h => congr_arg₂ _ h rfl⟩
#align continuous_order_hom.cancel_right ContinuousOrderHom.cancel_right
theorem cancel_left {g : β →Co γ} {f₁ f₂ : α →Co β} (hg : Injective g) :
g.comp f₁ = g.comp f₂ ↔ f₁ = f₂ :=
⟨fun h => ext fun a => hg <| by rw [← comp_apply, h, comp_apply], congr_arg _⟩
#align continuous_order_hom.cancel_left ContinuousOrderHom.cancel_left
instance : Preorder (α →Co β) :=
Preorder.lift ((↑) : (α →Co β) → α → β)
end Preorder
instance [PartialOrder β] : PartialOrder (α →Co β) :=
PartialOrder.lift ((↑) : (α →Co β) → α → β) FunLike.coe_injective
end ContinuousOrderHom
|
(* Title: JinjaThreads/Execute/J_Execute.thy
Author: Andreas Lochbihler
*)
header {* \isaheader{Executable semantics for J} *}
theory J_Execute
imports
SC_Schedulers
"../J/Threaded"
begin
interpretation sc!:
J_heap_base
"addr2thread_id"
"thread_id2addr"
"sc_spurious_wakeups"
"sc_empty"
"sc_allocate P"
"sc_typeof_addr"
"sc_heap_read"
"sc_heap_write"
for P .
abbreviation sc_red ::
"((addr, thread_id, heap) external_thread_action \<Rightarrow> (addr, thread_id, 'o, heap) Jinja_thread_action)
\<Rightarrow> addr J_prog \<Rightarrow> thread_id \<Rightarrow> addr expr \<Rightarrow> heap \<times> addr locals
\<Rightarrow> (addr, thread_id, 'o, heap) Jinja_thread_action \<Rightarrow> addr expr \<Rightarrow> heap \<times> addr locals \<Rightarrow> bool"
("_,_,_ \<turnstile>sc ((1\<langle>_,/_\<rangle>) -_\<rightarrow>/ (1\<langle>_,/_\<rangle>))" [51,51,0,0,0,0,0,0] 81)
where
"sc_red extTA P \<equiv> sc.red (TYPE(addr J_mb)) P extTA P"
fun sc_red_i_i_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o
where
"sc_red_i_i_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o P t ((e, xs), h) =
red_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o
addr2thread_id thread_id2addr sc_spurious_wakeups
sc_empty (sc_allocate P) sc_typeof_addr sc_heap_read_i_i_i_o sc_heap_write_i_i_i_i_o
(extTA2J P) P t e (h, xs)
\<guillemotright>= (\<lambda>(ta, e, h, xs). Predicate.single (ta, (e, xs), h))"
abbreviation sc_J_start_state_refine ::
"addr J_prog \<Rightarrow> cname \<Rightarrow> mname \<Rightarrow> addr val list \<Rightarrow>
(addr, thread_id, heap, (thread_id, (addr expr \<times> addr locals) \<times> addr released_locks) rm, (thread_id, addr wait_set_status) rm, thread_id rs) state_refine"
where
"sc_J_start_state_refine \<equiv>
sc_start_state_refine
(rm_empty ()) rm_update (rm_empty ()) (rs_empty ())
(\<lambda>C M Ts T (pns, body) vs. (blocks (this # pns) (Class C # Ts) (Null # vs) body, empty))"
lemma eval_sc_red_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o:
"(\<lambda>t xm ta x'm'. Predicate.eval (sc_red_i_i_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o P t xm) (ta, x'm')) =
(\<lambda>t ((e, xs), h) ta ((e', xs'), h'). extTA2J P,P,t \<turnstile>sc \<langle>e, (h, xs)\<rangle> -ta\<rightarrow> \<langle>e', (h', xs')\<rangle>)"
by(auto elim!: red_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_oE intro!: red_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_oI ext SUP1_I simp add: eval_sc_heap_write_i_i_i_i_o eval_sc_heap_read_i_i_i_o)
lemma sc_J_start_state_invar: "(\<lambda>_. True) (sc_state_\<alpha> (sc_J_start_state_refine P C M vs))"
by simp
subsection {* Round-robin scheduler *}
interpretation J_rr!:
sc_round_robin_base
final_expr "sc_red_i_i_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o P" convert_RA Jinja_output
for P
.
definition sc_rr_J_start_state :: "nat \<Rightarrow> 'm prog \<Rightarrow> thread_id fifo round_robin"
where "sc_rr_J_start_state n0 P = J_rr.round_robin_start n0 (sc_start_tid P)"
definition exec_J_rr ::
"nat \<Rightarrow> addr J_prog \<Rightarrow> cname \<Rightarrow> mname \<Rightarrow> addr val list \<Rightarrow>
(thread_id \<times> (addr, thread_id) obs_event list,
(addr, thread_id) locks \<times> ((thread_id, (addr expr \<times> addr locals) \<times> addr released_locks) rm \<times> heap) \<times>
(thread_id, addr wait_set_status) rm \<times> thread_id rs) tllist"
where
"exec_J_rr n0 P C M vs = J_rr.exec P n0 (sc_rr_J_start_state n0 P) (sc_J_start_state_refine P C M vs)"
interpretation J_rr!:
sc_round_robin
final_expr "sc_red_i_i_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o P" convert_RA Jinja_output
for P
by(unfold_locales)
interpretation J_rr!:
sc_scheduler
final_expr "sc_red_i_i_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o P" convert_RA
"J_rr.round_robin P n0" Jinja_output "pick_wakeup_via_sel (\<lambda>s P. rm_sel s (\<lambda>(k,v). P k v))" J_rr.round_robin_invar
UNIV
for P n0
unfolding sc_scheduler_def
apply(rule J_rr.round_robin_scheduler)
apply(unfold eval_sc_red_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o)
apply(rule sc.red_mthr_deterministic[OF sc_deterministic_heap_ops])
apply(simp add: sc_spurious_wakeups)
done
subsection {* Random scheduler *}
interpretation J_rnd!:
sc_random_scheduler_base
final_expr "sc_red_i_i_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o P" convert_RA Jinja_output
for P
.
definition sc_rnd_J_start_state :: "Random.seed \<Rightarrow> random_scheduler"
where "sc_rnd_J_start_state seed = seed"
definition exec_J_rnd ::
"Random.seed \<Rightarrow> addr J_prog \<Rightarrow> cname \<Rightarrow> mname \<Rightarrow> addr val list \<Rightarrow>
(thread_id \<times> (addr, thread_id) obs_event list,
(addr, thread_id) locks \<times> ((thread_id, (addr expr \<times> addr locals) \<times> addr released_locks) rm \<times> heap) \<times>
(thread_id, addr wait_set_status) rm \<times> thread_id rs) tllist"
where
"exec_J_rnd seed P C M vs = J_rnd.exec P (sc_rnd_J_start_state seed) (sc_J_start_state_refine P C M vs)"
interpretation J_rnd!:
sc_random_scheduler
final_expr "sc_red_i_i_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o P" convert_RA Jinja_output
for P
by(unfold_locales)
interpretation J_rnd!:
sc_scheduler
final_expr "sc_red_i_i_i_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o P" convert_RA
"J_rnd.random_scheduler P" Jinja_output "pick_wakeup_via_sel (\<lambda>s P. rm_sel s (\<lambda>(k,v). P k v))" "\<lambda>_ _. True"
UNIV
for P
unfolding sc_scheduler_def
apply(rule J_rnd.random_scheduler_scheduler)
apply(unfold eval_sc_red_i_i_i_i_i_Fii_i_oB_Fii_i_i_oB_i_i_i_i_i_o_o_o)
apply(rule sc.red_mthr_deterministic[OF sc_deterministic_heap_ops])
apply(simp add: sc_spurious_wakeups)
done
ML_val {* @{code exec_J_rr} *}
ML_val {* @{code exec_J_rnd} *}
end |
The fractional part of a sum is the sum of the fractional parts. |
module ReferenceTests
using Test
using ImageCore
using Distances
using FileIO
using ImageInTerminal
using SHA
using DeepDiffs
using Random
export
@withcolor,
@io2str,
@test_reference,
psnr_equality
include("utils.jl")
include("test_reference.jl")
include("fileio.jl")
include("equality_metrics.jl")
include("render.jl")
end # module
|
```python
import numpy as np
import numba
import matplotlib.pyplot as plt
import sympy as sym
%matplotlib notebook
plt.style.use('presentation.mplstyle')
colors_cycle=plt.rcParams.get('axes.prop_cycle')
colors = [item['color'] for item in colors_cycle]
def d2np(d):
names = []
numbers = ()
dtypes = []
for item in d:
names += item
if type(d[item]) == float:
numbers += (d[item],)
dtypes += [(item,float)]
if type(d[item]) == int:
numbers += (d[item],)
dtypes += [(item,int)]
if type(d[item]) == np.ndarray:
numbers += (d[item],)
dtypes += [(item,np.float64,d[item].shape)]
return np.array([numbers],dtype=dtypes)
```
## Steady state solution
```python
psi_ds,psi_qs,psi_dr,psi_qr = sym.symbols('psi_ds,psi_qs,psi_dr,psi_qr')
i_ds,i_qs,i_dr,i_qr = sym.symbols('i_ds,i_qs,i_dr,i_qr')
di_ds,di_qs,di_dr,di_qr = sym.symbols('di_ds,di_qs,di_dr,di_qr')
L_s,L_r,L_m = sym.symbols('L_s,L_r,L_m')
R_s,R_r = sym.symbols('R_s,R_r')
omega_s,omega_r,sigma = sym.symbols('omega_s,omega_r,sigma')
v_ds,v_qs,v_dr,v_qr = sym.symbols('v_ds,v_qs,v_dr,v_qr')
eq_ds = (L_s+L_m)*i_ds + L_m*i_dr - psi_ds
eq_qs = (L_s+L_m)*i_qs + L_m*i_qr - psi_qs
eq_dr = (L_r+L_m)*i_dr + L_m*i_ds - psi_dr
eq_qr = (L_r+L_m)*i_qr + L_m*i_qs - psi_qr
dpsi_ds = v_ds - R_s*i_ds + omega_s*psi_qs
dpsi_qs = v_qs - R_s*i_qs - omega_s*psi_ds
dpsi_dr = v_dr - R_r*i_dr + sigma*omega_s*psi_qr
dpsi_qr = v_qr - R_r*i_qr - sigma*omega_s*psi_dr
'''
s = sym.solve([ eq_dr, eq_qr, dpsi_ds, dpsi_qs, dpsi_dr, dpsi_qr],
[ i_ds, i_qs, psi_ds, psi_qs, v_dr, v_qr])
s = sym.solve([dpsi_ds,dpsi_qs,dpsi_dr,dpsi_qr],
[ i_ds, i_qs, i_dr, i_qr,
psi_ds, psi_qs, i_dr, psi_qr])
s = sym.solve([ eq_ds, eq_qs, eq_dr, eq_qr,
dpsi_ds,dpsi_qs,dpsi_dr,dpsi_qr],
[ i_ds, i_qs, v_dr, v_qr,
psi_ds, psi_qs, psi_dr, psi_qr])
'''
s = sym.solve([ eq_dr, eq_qr,
dpsi_ds,dpsi_qs,dpsi_dr,dpsi_qr],
[ i_ds, i_qs, v_dr, v_qr,
psi_dr, psi_qr])
s = sym.solve([ eq_ds, eq_qs, eq_dr, eq_qr,
dpsi_ds,dpsi_qs,dpsi_dr,dpsi_qr],
[ i_ds, i_qs, v_dr, v_qr,
psi_ds, psi_qs, psi_dr, psi_qr])
for item in s:
print(item, '=', sym.simplify(s[item]))
```
i_ds = (L_m*R_s*i_qr*omega_s - L_m*i_dr*omega_s**2*(L_m + L_s) + R_s*v_ds + omega_s*v_qs*(L_m + L_s))/(R_s**2 + omega_s**2*(L_m + L_s)**2)
i_qs = (-L_m*R_s*i_dr*omega_s - L_m*i_qr*omega_s**2*(L_m + L_s) + R_s*v_qs - omega_s*v_ds*(L_m + L_s))/(R_s**2 + omega_s**2*(L_m + L_s)**2)
v_dr = (L_m**2*R_s*i_dr*omega_s**2*sigma + L_m**2*i_qr*omega_s**3*sigma*(L_m + L_s) - L_m*R_s*omega_s*sigma*v_qs + L_m*omega_s**2*sigma*v_ds*(L_m + L_s) + R_r*i_dr*(R_s**2 + omega_s**2*(L_m + L_s)**2) - i_qr*omega_s*sigma*(L_m + L_r)*(R_s**2 + omega_s**2*(L_m + L_s)**2))/(R_s**2 + omega_s**2*(L_m + L_s)**2)
v_qr = (L_m**2*R_s*i_qr*omega_s**2*sigma - L_m**2*i_dr*omega_s**3*sigma*(L_m + L_s) + L_m*R_s*omega_s*sigma*v_ds + L_m*omega_s**2*sigma*v_qs*(L_m + L_s) + R_r*i_qr*(R_s**2 + omega_s**2*(L_m + L_s)**2) + i_dr*omega_s*sigma*(L_m + L_r)*(R_s**2 + omega_s**2*(L_m + L_s)**2))/(R_s**2 + omega_s**2*(L_m + L_s)**2)
psi_ds = (L_m*R_s**2*i_dr + L_m*R_s*i_qr*omega_s*(L_m + L_s) + R_s*v_ds*(L_m + L_s) + omega_s*v_qs*(L_m + L_s)**2)/(R_s**2 + omega_s**2*(L_m + L_s)**2)
psi_qs = (L_m*R_s**2*i_qr - L_m*R_s*i_dr*omega_s*(L_m + L_s) + R_s*v_qs*(L_m + L_s) - omega_s*v_ds*(L_m + L_s)**2)/(R_s**2 + omega_s**2*(L_m + L_s)**2)
psi_dr = (L_m**2*R_s*i_qr*omega_s - L_m**2*i_dr*omega_s**2*(L_m + L_s) + L_m*R_s*v_ds + L_m*omega_s*v_qs*(L_m + L_s) + i_dr*(L_m + L_r)*(R_s**2 + omega_s**2*(L_m + L_s)**2))/(R_s**2 + omega_s**2*(L_m + L_s)**2)
psi_qr = (-L_m**2*R_s*i_dr*omega_s - L_m**2*i_qr*omega_s**2*(L_m + L_s) + L_m*R_s*v_qs - L_m*omega_s*v_ds*(L_m + L_s) + i_qr*(L_m + L_r)*(R_s**2 + omega_s**2*(L_m + L_s)**2))/(R_s**2 + omega_s**2*(L_m + L_s)**2)
```python
# [1] T. Demiray, F. Milano, and G. Andersson,
# “Dynamic phasor modeling of the doubly-fed induction generator under unbalanced conditions,” 2007 IEEE Lausanne POWERTECH, Proc., no. 2, pp. 1049–1054, 2007.
@numba.jit(nopython=True, cache=True)
def dfim_alg_ctrl1(struct,i,m):
'''
Doubly Fed Induction Machine in with neglected dynamics and
rotor side converter and control level 1 already implemented.
i_rd = i_rd_ref and i_rq = i_rq_ref without dynamics
'''
x_idx = struct[i]['dfim_idx']
#psi_dr = float(struct[i]['x'][x_idx+0,0])
#psi_qr = float(struct[i]['x'][x_idx+1,0])
L_m = struct[i]['L_m']
L_r = struct[i]['L_r']
L_s = struct[i]['L_s']
R_r = struct[i]['R_r']
R_s = struct[i]['R_s']
N_pp = struct[i]['N_pp']
Dt = struct[i]['Dt']
i_dr_ref = struct[i]['i_dr_ref']
i_qr_ref = struct[i]['i_qr_ref']
i_dr = i_dr_ref
i_qr = i_qr_ref
v_ds = struct[i]['v_ds']
v_qs = struct[i]['v_qs']
omega_r = struct[i]['omega_r']
omega_s = struct[i]['omega_s']
sigma = (omega_s - omega_r)/omega_s
den = R_s**2 + omega_s**2*(L_m + L_s)**2
i_qs = (-L_m*R_s*i_dr*omega_s - L_m*i_qr*omega_s**2*(L_m + L_s) + R_s*v_qs - omega_s*v_ds*(L_m + L_s))/den
i_ds = ( L_m*R_s*i_qr*omega_s - L_m*i_dr*omega_s**2*(L_m + L_s) + R_s*v_ds + omega_s*v_qs*(L_m + L_s))/den
v_qr = R_r*i_qr + omega_s*sigma*(L_m*i_dr + L_m*i_ds + L_r*i_dr)
v_dr = R_r*i_dr - omega_s*sigma*(L_m*i_qr + L_m*i_qs + L_r*i_qr)
psi_dr = L_m*i_dr + L_m*i_ds + L_r*i_dr
psi_qs = (R_s*i_ds - v_ds)/omega_s
psi_ds = (-R_s*i_qs + v_qs)/omega_s
psi_qr = L_m*i_qr + L_m*i_qs + L_r*i_qr
tau_e = 3.0/2.0*N_pp*(psi_qr*i_dr - psi_dr*i_qr)
struct[i]['v_dr'] = v_dr
struct[i]['v_qr'] = v_qr
struct[i]['i_ds'] = i_ds
struct[i]['i_qs'] = i_qs
struct[i]['i_dr'] = i_dr
struct[i]['i_qr'] = i_qr
struct[i]['psi_ds'] = psi_ds
struct[i]['psi_qs'] = psi_qs
struct[i]['psi_dr'] = psi_dr
struct[i]['psi_qr'] = psi_qr
struct[i]['tau_e'] = tau_e
struct[i]['sigma'] = sigma
struct[i]['p_s'] = 3.0/2.0*(v_ds*i_ds + v_qs*i_qs)
struct[i]['q_s'] = 3.0/2.0*(v_ds*i_qs - v_qs*i_ds)
struct[i]['p_r'] = 3.0/2.0*(v_dr*i_dr + v_qr*i_qr)
struct[i]['q_r'] = 3.0/2.0*(v_dr*i_qr - v_qr*i_dr)
return tau_e
@numba.jit(nopython=True, cache=True)
def wecs_mech_1(struct,i,m):
x_idx = struct[i]['mech_idx']
omega_t = struct[i]['x'][x_idx,0] # rad/s
tau_t = struct[i]['tau_t']
tau_r = struct[i]['tau_r']
J_t = struct[i]['J_t']
N_tr = struct[i]['N_tr']
Dt = struct[i]['Dt']
domega_t = 1.0/J_t*(tau_t - N_tr*tau_r)
omega_r = N_tr*omega_t
struct[i]['f'][x_idx,0] = domega_t
struct[i]['omega_r'] = omega_r
struct[i]['omega_t'] = omega_t
return omega_t
```
## Controler level 2
```python
@numba.jit(nopython=True, cache=True)
def dfim_ctrl2(struct,i,m):
'''
Control level 2 for DFIM for stator active and reactive power.
'''
x_idx = struct[i]['ctrl2r_idx']
xi_p_s = float(struct[i]['x'][x_idx+0,0])
xi_q_s = float(struct[i]['x'][x_idx+1,0])
K_r_p = struct[i]['K_r_p']
K_r_i = struct[i]['K_r_i']
p_s_ref = struct[i]['p_s_ref']
q_s_ref = struct[i]['q_s_ref']
p_s = struct[i]['p_s']
q_s = struct[i]['q_s']
S_b = struct[i]['S_b']
omega_r = struct[i]['omega_r']
omega_s = struct[i]['omega_s']
R_r = struct[i]['R_r']
I_b = S_b/(np.sqrt(3)*690.0)
sigma = (omega_s - omega_r)/omega_s
error_p_s = (p_s_ref - p_s)/S_b
error_q_s = (q_s_ref - q_s)/S_b
dxi_p_s = error_p_s
dxi_q_s = error_q_s
struct[i]['f'][x_idx+0,0] = dxi_p_s
struct[i]['f'][x_idx+1,0] = dxi_q_s
struct[i]['i_dr_ref'] = -I_b*(K_r_p*error_p_s + K_r_i*xi_p_s)
struct[i]['i_qr_ref'] = -I_b*(K_r_p*error_q_s + K_r_i*xi_q_s)
return struct[0]['i_dr_ref'],struct[0]['i_qr_ref']
```
```python
Omega_b = 2.0*np.pi*50.0
omega_r = Omega_b/2
2.0e6/omega_r
```
12732.395447351626
```python
Omega_b = 2.0*np.pi*50.0
S_b = 2.0e6
U_b = 690.0
Z_b = U_b**2/S_b
#nu_w =np.linspace(0.1,15,N)
H = 2.0
N_pp = 2
N_tr = 20
# H = 0.5*J*Omega_t_n**2/S_b
S_b = 2.0e6
Omega_t_n = Omega_b/N_pp/N_tr
J_t = 2*H*S_b/Omega_t_n**2
#Z_b = 1.0
#Omega_b = 1.0
d =dict(S_b = S_b,
Omega_b = Omega_b,
R_r = 0.01*Z_b,
R_s = 0.01*Z_b,
L_r = 0.08*Z_b/Omega_b,
L_s = 0.1*Z_b/Omega_b,
L_m = 3.0*Z_b/Omega_b,
N_pp = N_pp,
psi_ds = 0.0,
psi_qs = 0.0,
p_s = 0.0,
q_s = 0.0,
p_r = 0.0,
q_r = 0.0,
psi_dr = 0.0,
psi_qr = 0.0,
p_s_ref = 0.0,
q_s_ref = 0.0,
i_ds = 0.0,
i_qs = 0.0,
i_dr = 0.0,
i_qr = 0.0,
i_dr_ref = 0.0,
i_qr_ref = 0.0,
v_ds = 0.0,
v_qs = 0.0,
v_dr = 0.0,
v_qr = 0.0,
omega_r = Omega_b/N_pp,
omega_s = Omega_b/N_pp,
sigma = 0.0,
tau_e = 0.0,
x = np.zeros((3,1)),
f = np.zeros((3,1)),
Dt = 0.0,
J_t = J_t,
omega_t = 0.0,
tau_t = 0.0,
tau_r = 0.0,
N_tr = N_tr,
K_r_p = 0.02,
K_r_i = 20.0,
dfim_idx = 0,
mech_idx = 0,
ctrl2r_idx = 1
)
struct = d2np(d)
struct = np.hstack((struct[0],np.copy(struct[0])))
#wecs_mech_1(struct,0)
dfim_alg_ctrl1(struct,0,0)
dfim_ctrl2(struct,0,0)
dfim_alg_ctrl1(struct,1,0)
dfim_ctrl2(struct,1,0)
print(struct[0]['p_s']/1e6,struct[0]['q_s']/1e6,struct[0]['tau_e'])
print(struct[1]['p_s']/1e6,struct[0]['q_s']/1e6,struct[0]['tau_e'])
```
0.0 0.0 0.0
0.0 0.0 0.0
```python
struct = d2np(d)
struct = np.hstack((struct[0],np.copy(struct[0])))
sys_d = dict(x = np.zeros((6,1)),
f = np.zeros((6,1)))
sys_struct = d2np(sys_d)
@numba.jit(nopython=True, cache=True)
def f_eval(sys_struct,struct):
for i in range(2):
struct[i]['x'][:,0] = sys_struct[0]['x'][3*i:3*(i+1),0]
wecs_mech_1(struct,i,2)
dfim_ctrl2(struct,i,2)
dfim_alg_ctrl1(struct,i,2)
sys_struct[0]['f'][3*i:3*(i+1),:] = struct[i]['f']
return 0
```
```python
@numba.jit(nopython=True, cache=True)
def run(sys_struct,struct):
N_steps = 1000
N_states = 6
Dt = 10.0e-3
Omega_r = np.zeros((N_steps,1))
Omega_t = np.zeros((N_steps,1))
P_s_1 = np.zeros((N_steps,1))
Q_s_1 = np.zeros((N_steps,1))
P_r_1 = np.zeros((N_steps,1))
Q_r_1 = np.zeros((N_steps,1))
P_s_2 = np.zeros((N_steps,1))
Q_s_2 = np.zeros((N_steps,1))
P_r_2 = np.zeros((N_steps,1))
Q_r_2 = np.zeros((N_steps,1))
V_dr = np.zeros((N_steps,1))
V_qr = np.zeros((N_steps,1))
I_dr = np.zeros((N_steps,1))
I_qr = np.zeros((N_steps,1))
I_ds = np.zeros((N_steps,1))
I_qs = np.zeros((N_steps,1))
Tau_e = np.zeros((N_steps,1))
Tau_t = np.zeros((N_steps,1))
T = np.zeros((N_steps,1))
X = np.zeros((N_steps,N_states))
p_ref = 0.0
q_ref = 0.0
xi_p = 0.0
xi_q = 0.0
struct[0]['x'][:,0] = np.copy(sys_struct[0]['x'][0:3,0])
struct[1]['x'][:,0] = np.copy(sys_struct[0]['x'][3:6,0])
for it in range(N_steps):
t = Dt*float(it)
# perturbations and references
struct[0]['p_s_ref'] = 0.0
struct[1]['p_s_ref'] = 0.0
struct[0]['q_s_ref'] = 0.0
struct[1]['q_s_ref'] = 0.0
if t>1.0:
Omega_t_b = struct[0]['Omega_b']/struct[0]['N_tr']/struct[0]['N_pp']
struct[0]['tau_t'] = 1.5e6/Omega_t_b+np.random.normal(500e3,100e3)/Omega_t_b
if t>1.5:
struct[0]['p_s_ref'] = 1.0e6
if t>3.0:
struct[1]['p_s_ref'] = 1.50e6
if t>4.0:
struct[0]['q_s_ref'] = 0.5e6
if t>5.0:
struct[1]['q_s_ref'] = -0.7e6
## solver
f_eval(sys_struct,struct)
f1 = np.copy(sys_struct[0]['f'])
x1 = np.copy(sys_struct[0]['x'])
sys_struct[0]['x'][:]= np.copy(x1 + Dt*f1)
f_eval(sys_struct,struct)
f2 = np.copy(sys_struct[0]['f'])
sys_struct[0]['x'][:]= np.copy(x1 + 0.5*Dt*(f1 + f2))
for i in range(2):
struct[i]['x'][:,0] = sys_struct[0]['x'][3*i:3*(i+1),0]
struct[0]['tau_r'] = struct[0]['tau_e']
struct[1]['tau_r'] = struct[1]['tau_e']
T[it,0] = t
P_s_1[it,0] = float(struct[0]['p_s'])
Q_s_1[it,0] = float(struct[0]['q_s'])
P_r_1[it,0] = float(struct[0]['p_r'])
Q_r_1[it,0] = float(struct[0]['q_r'])
P_s_2[it,0] = float(struct[1]['p_s'])
Q_s_2[it,0] = float(struct[1]['q_s'])
P_r_2[it,0] = float(struct[1]['p_r'])
Q_r_2[it,0] = float(struct[1]['q_r'])
I_dr[it,0] = float(struct[0]['i_dr'])
I_qr[it,0] = float(struct[0]['i_qr'])
I_ds[it,0] = float(struct[0]['i_ds'])
I_qs[it,0] = float(struct[0]['i_qs'])
Omega_r[it,0] = float(struct[0]['omega_r'])
Omega_t[it,0] = float(struct[0]['omega_t'])
V_dr[it,0] = float(struct[0]['v_dr'])
V_qr[it,0] = float(struct[0]['v_qr'])
Tau_e[it,0] = float(struct[0]['tau_e'])
Tau_t[it,0] = float(struct[0]['tau_t'])
X[it,:] = sys_struct[0]['x'][:].T
return T,X,Tau_e,P_s_1,Q_s_1,P_r_1,Q_r_1,P_s_2,Q_s_2,P_r_2,Q_r_2,V_dr,V_qr,Omega_r,Omega_t,I_dr,I_qr,I_ds,I_qs,Tau_t
%timeit run(sys_struct, struct)
```
1.2 ms ± 74 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each)
```python
sys_struct['x'][:]= np.zeros((6,1))
struct['v_qs'] = 0.0
struct['v_ds'] = 690.0*np.sqrt(2.0/3.0)
struct['tau_t'] = 0.0
sys_struct[0]['x'][0,0] = Omega_b*0.9/struct[0]['N_tr']/struct[0]['N_pp']
sys_struct[0]['x'][3,0] = Omega_b*1.1/struct[1]['N_tr']/struct[0]['N_pp']
#%timeit run(sys_struct, struct)
T,X,Tau_e,P_s_1,Q_s_1,P_r_1,Q_r_1,P_s_2,Q_s_2,P_r_2,Q_r_2,V_dr,V_qr,Omega_r,Omega_t,I_dr,I_qr,I_ds,I_qs,Tau_t = run(sys_struct, struct)
```
```python
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(8, 4), sharex = True)
fig.set_tight_layout(False)
axes.plot(T,Tau_e/1e3)
axes.set_xlabel('Time (s)')
axes.set_ylabel('DFIM Torque (kNm)')
fig.savefig('dfim_tau_e.svg', bbox_inches='tight')
```
<IPython.core.display.Javascript object>
```python
from matplotlib import rcParams
plt.style.use('presentation.mplstyle')
#rcParams.update({'figure.tight_layout':False})
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(8, 6), sharex = True)
fig.set_tight_layout(False)
axes[0].plot(T,P_s_1/1e6, label='$\sf p_{s1}$')
axes[0].plot(T,Q_s_1/1e6, label='$\sf q_{s1}$')
#axes[0].plot(T,P_s_2/1e6, label='$\sf p_{s2}$')
#axes[0].plot(T,Q_s_2/1e6, label='$\sf q_{s2}$')
axes[1].plot(T,P_r_1/1e6, label='$\sf p_{r1}$')
axes[1].plot(T,Q_r_1/1e6, label='$\sf q_{r1}$')
#axes[1].plot(T,P_r_2/1e6, label='$\sf p_{r2}$')
#axes[1].plot(T,Q_r_2/1e6, label='$\sf q_{r2}$')
axes[0].legend(loc='best')
axes[1].legend(loc='best')
axes[0].set_ylabel('Stator powers (MVA)')
axes[1].set_ylabel('Rotor powers (MVA)')
axes[1].set_xlabel('Time (s)')
axes[0].set_ylim([-0.1,1.1])
#axes[0].set_xlim([0,3.0])
fig.savefig('dfim_pq_s_pq_r.svg', bbox_inches='tight')
```
<IPython.core.display.Javascript object>
```python
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(8, 6))
fig.set_tight_layout(False)
axes[0].plot(T,Omega_t*60/(2*np.pi))
axes[1].plot(T,Tau_t/20.0/1000, label='$\sf \\tau_t$')
axes[1].plot(T,Tau_e/1000, label='$\sf \\tau_e$')
#axes[1].plot(T,P_r_2/1e6, label='$\sf p_{r2}$')
#axes[1].plot(T,Q_r_2/1e6, label='$\sf q_{r2}$')
axes[1].legend(loc='best')
axes[0].set_ylabel('Rotor speed (rev/m)')
axes[1].set_ylabel('Torques (kNm)')
axes[1].set_xlabel('Time (s)')
#axes[0].set_ylim([0,2.5])
#axes[0].set_xlim([0,3.0])
fig.savefig('dfim_omega_taus.svg', bbox_inches='tight')
```
<IPython.core.display.Javascript object>
```python
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(8, 6), sharex = True)
fig.set_tight_layout(False)
axes[0].plot(T,V_dr, label='$\sf v_{dr}$')
axes[0].plot(T,V_qr, label='$\sf v_{qr}$')
axes[1].plot(T,P_r_1/1e6, label='$\sf p_{r}$', color = colors[2])
axes[1].plot(T,Q_r_1/1e6, label='$\sf q_{r}$', color = colors[3])
axes[1].plot(T,(P_r_1**2+Q_r_1**2)**0.5/1e6, label='$\sf s_{r}$', color = colors[4])
axes[0].legend(loc='best')
axes[1].legend(loc='best')
axes[0].set_ylabel('Rotor voltages (V)')
axes[1].set_ylabel('Rotor powers (MVA)')
axes[1].set_xlabel('Time (s)')
#axes[0].set_ylim([0,2.5])
#axes[0].set_xlim([0,3.0])
fig.savefig('dfim_rotor_v_powers.svg', bbox_inches='tight')
```
<IPython.core.display.Javascript object>
```python
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(8, 7), sharex = True)
fig.set_tight_layout(False)
axes[0].plot(T,I_dr/1000, label='$\sf i_{dr}$')
axes[0].plot(T,I_qr/1000, label='$\sf i_{qr}$')
axes[1].plot(T,I_ds/1000, label='$\sf i_{ds}$')
axes[1].plot(T,I_qs/1000, label='$\sf i_{qs}$')
axes[0].legend()
axes[1].legend()
axes[0].set_ylabel('Stator currents (kA)')
axes[1].set_ylabel('Rotor currents (kA)')
axes[1].set_xlabel('Time (s)')
#axes[0].set_ylim([0,2.5])
#axes[0].set_xlim([0,3.0])
fig.savefig('dfim_i_s_i_r.svg', bbox_inches='tight')
```
<IPython.core.display.Javascript object>
```python
```
```python
```
```python
```
|
<a href="https://colab.research.google.com/github/NeuromatchAcademy/course-content-dl/blob/main/tutorials/W2D3_ModernRecurrentNeuralNetworks/student/W2D3_Tutorial1.ipynb" target="_parent"></a> <a href="https://kaggle.com/kernels/welcome?src=https://raw.githubusercontent.com/NeuromatchAcademy/course-content-dl/main/tutorials/W2D3_ModernRecurrentNeuralNetworks/student/W2D3_Tutorial1.ipynb" target="_parent"></a>
# Tutorial 1: Modeling sequencies and encoding text
**Week 2, Day 3: Modern RNNs**
**By Neuromatch Academy**
__Content creators:__ Bhargav Srinivasa Desikan, Anis Zahedifard, James Evans
__Content reviewers:__ Lily Cheng, Melvin Selim Atay, Ezekiel Williams, Kelson Shilling-Scrivo
__Content editors:__ Nina Kudryashova, Spiros Chavlis
__Production editors:__ Roberto Guidotti, Spiros Chavlis
**Our 2021 Sponsors, including Presenting Sponsor Facebook Reality Labs**
<p align='center'></p>
----
# Tutorial objectives
Before we begin with exploring how RNNs excel at modelling sequences, we will explore some of the other ways we can model sequences, encode text, and make meaningful measurements using such encodings and embeddings.
```python
# @title Tutorial slides
# @markdown These are the slides for the videos in this tutorial
# @markdown If you want to locally download the slides, click [here](https://osf.io/n263c/download)
from IPython.display import IFrame
IFrame(src=f"https://mfr.ca-1.osf.io/render?url=https://osf.io/n263c/?direct%26mode=render%26action=download%26mode=render", width=854, height=480)
```
---
## Setup
```python
# @title Install dependencies
# @markdown There may be `Errors`/`Warnings` reported during the installation. However, they are to be ignored.
!pip install torchtext==0.4.0 --quiet
!pip install --upgrade gensim --quiet
!pip install unidecode --quiet
!pip install hmmlearn --quiet
!pip install fasttext --quiet
!pip install nltk --quiet
!pip install pandas --quiet
!pip install python-Levenshtein --quiet
!pip install git+https://github.com/NeuromatchAcademy/evaltools --quiet
from evaltools.airtable import AirtableForm
# generate airtable form
atform = AirtableForm('appn7VdPRseSoMXEG','W2D3_T1','https://portal.neuromatchacademy.org/api/redirect/to/9c55f6cb-cdf9-4429-ac1c-ec44fe64c303')
```
```python
# Imports
import time
import fasttext
import numpy as np
import pandas as pd
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
from torch.nn import functional as F
from hmmlearn import hmm
from scipy.sparse import dok_matrix
from torchtext import data, datasets
from torchtext.vocab import FastText
import nltk
from nltk import FreqDist
from nltk.corpus import brown
from nltk.tokenize import word_tokenize
from gensim.models import Word2Vec
from sklearn.manifold import TSNE
from sklearn.preprocessing import LabelEncoder
from tqdm import tqdm_notebook as tqdm
```
```python
# @title Figure Settings
import ipywidgets as widgets
%config InlineBackend.figure_format = 'retina'
plt.style.use("https://raw.githubusercontent.com/NeuromatchAcademy/content-creation/main/nma.mplstyle")
```
```python
# @title Load Dataset from `nltk`
# no critical warnings, so we supress it
import warnings
warnings.simplefilter("ignore")
nltk.download('punkt')
nltk.download('averaged_perceptron_tagger')
nltk.download('brown')
nltk.download('webtext')
```
[nltk_data] Downloading package punkt to
[nltk_data] /home/[email protected]/nltk_data...
[nltk_data] Unzipping tokenizers/punkt.zip.
[nltk_data] Downloading package averaged_perceptron_tagger to
[nltk_data] /home/[email protected]/nltk_data...
[nltk_data] Unzipping taggers/averaged_perceptron_tagger.zip.
[nltk_data] Downloading package brown to
[nltk_data] /home/[email protected]/nltk_data...
[nltk_data] Unzipping corpora/brown.zip.
[nltk_data] Downloading package webtext to
[nltk_data] /home/[email protected]/nltk_data...
[nltk_data] Unzipping corpora/webtext.zip.
True
```python
# @title Helper functions
import requests
def cosine_similarity(vec_a, vec_b):
"""Compute cosine similarity between vec_a and vec_b"""
return np.dot(vec_a, vec_b) / (np.linalg.norm(vec_a) * np.linalg.norm(vec_b))
def tokenize(sentences):
#Tokenize the sentence
#from nltk.tokenize library use word_tokenize
token = word_tokenize(sentences)
return token
def plot_train_val(x, train, val, train_label, val_label, title, y_label,
color):
plt.plot(x, train, label=train_label, color=color)
plt.plot(x, val, label=val_label, color=color, linestyle='--')
plt.legend(loc='lower right')
plt.xlabel('epoch')
plt.ylabel(y_label)
plt.title(title)
def load_dataset(emb_vectors, sentence_length=50, seed=522):
TEXT = data.Field(sequential=True,
tokenize=tokenize,
lower=True,
include_lengths=True,
batch_first=True,
fix_length=sentence_length)
LABEL = data.LabelField(dtype=torch.float)
train_data, test_data = datasets.IMDB.splits(TEXT, LABEL)
TEXT.build_vocab(train_data, vectors=emb_vectors)
LABEL.build_vocab(train_data)
train_data, valid_data = train_data.split(split_ratio=0.7,
random_state=random.seed(seed))
train_iter, valid_iter, test_iter = data.BucketIterator.splits((train_data,
valid_data,
test_data),
batch_size=32,
sort_key=lambda x: len(x.text),
repeat=False,
shuffle=True)
vocab_size = len(TEXT.vocab)
print(f'Data are loaded. sentence length: {sentence_length} '
f'seed: {seed}')
return TEXT, vocab_size, train_iter, valid_iter, test_iter
def download_file_from_google_drive(id, destination):
URL = "https://docs.google.com/uc?export=download"
session = requests.Session()
response = session.get(URL, params={ 'id': id }, stream=True)
token = get_confirm_token(response)
if token:
params = { 'id': id, 'confirm': token }
response = session.get(URL, params=params, stream=True)
save_response_content(response, destination)
def get_confirm_token(response):
for key, value in response.cookies.items():
if key.startswith('download_warning'):
return value
return None
def save_response_content(response, destination):
CHUNK_SIZE = 32768
with open(destination, "wb") as f:
for chunk in response.iter_content(CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
```
```python
# @title Set random seed
# @markdown Executing `set_seed(seed=seed)` you are setting the seed
# for DL its critical to set the random seed so that students can have a
# baseline to compare their results to expected results.
# Read more here: https://pytorch.org/docs/stable/notes/randomness.html
# Call `set_seed` function in the exercises to ensure reproducibility.
import random
import torch
def set_seed(seed=None, seed_torch=True):
if seed is None:
seed = np.random.choice(2 ** 32)
random.seed(seed)
np.random.seed(seed)
if seed_torch:
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.cuda.manual_seed(seed)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
print(f'Random seed {seed} has been set.')
# In case that `DataLoader` is used
def seed_worker(worker_id):
worker_seed = torch.initial_seed() % 2**32
np.random.seed(worker_seed)
random.seed(worker_seed)
```
```python
# @title Set device (GPU or CPU). Execute `set_device()`
# inform the user if the notebook uses GPU or CPU.
def set_device():
device = "cuda" if torch.cuda.is_available() else "cpu"
if device != "cuda":
print("WARNING: For this notebook to perform best, "
"if possible, in the menu under `Runtime` -> "
"`Change runtime type.` select `GPU` ")
else:
print("GPU is enabled in this notebook.")
return device
```
```python
DEVICE = set_device()
SEED = 2021
set_seed(seed=SEED)
```
GPU is enabled in this notebook.
Random seed 2021 has been set.
---
# Section 1: Sequences, Markov Chains & HMMs
*Time estimate: ~45mins*
```python
# @title Video 1: Sequences & Markov Processes
from ipywidgets import widgets
out2 = widgets.Output()
with out2:
from IPython.display import IFrame
class BiliVideo(IFrame):
def __init__(self, id, page=1, width=400, height=300, **kwargs):
self.id=id
src = "https://player.bilibili.com/player.html?bvid={0}&page={1}".format(id, page)
super(BiliVideo, self).__init__(src, width, height, **kwargs)
video = BiliVideo(id=f"BV1jg411774B", width=854, height=480, fs=1)
print("Video available at https://www.bilibili.com/video/{0}".format(video.id))
display(video)
out1 = widgets.Output()
with out1:
from IPython.display import YouTubeVideo
video = YouTubeVideo(id=f"ApkE7UFaJAQ", width=854, height=480, fs=1, rel=0)
print("Video available at https://youtube.com/watch?v=" + video.id)
display(video)
out = widgets.Tab([out1, out2])
out.set_title(0, 'Youtube')
out.set_title(1, 'Bilibili')
# add event to airtable
atform.add_event('Video 1: Sequences & Markov Processes')
display(out)
```
Tab(children=(Output(), Output()), _titles={'0': 'Youtube', '1': 'Bilibili'})
In this notebook we will be exploring the world of sequences - thinking of what kind of data can be thought of as sequences, and how these sequences can be represented as Markov Chains and Hidden Markov Models. These ideas and methods were an important part of natural language processing and language modelling, and serve as a useful way to ground ourselves before we dive into neural network methods.
## Why is this relevant? How are these sequences related to modern recurrent neural networks?
Like we mentioned before, the notion of modelling sequences of data - in this particular case, **language**, is an ideal place to start. RNNs themselves were constructed keeping in mind sequences, and the ability to temporally model sequences is what inspired RNNs (and the family of LSTM, GRUs - we will see this in the next notebook).
Markov models and hidden markov models serve as an introduction to these concepts because they were some of the earliest ways to think about sequences. They do not capture a lot of the complexity that RNNs excel at, but are an useful way of thinking of sequences, probabilities, and how we can use these concepts to perform tasks such as text generation, or classification - tasks that RNNs excel at today.
Think of this section as an introduction to thinking with sequences and text data, and as a historical introduction to the world of modelling sequential data.
## Section 1.1: What data are sequences?
Native Sequences:
- Temporally occurring events (e.g., history, stock prices)
- Temporally processed events (e.g., communication)
- Topologically connected components (e.g., polymers, peptides)
Synthetic Sequences:
- Anything processed as a sequence (e.g., scanned pixels in an image)
Sequences can be represented as a Markov Process - since this notion of sequential data is intrinsically linked to RNNs, it is a good place for us to start, and natural language (text!) will be our sequence of choice.
We will be using the Brown corpus which comes loaded with NLTK, and using the entire corpus - this requires a lot of RAM for some of the methods, so we recommend using a smaller subset of categories if you do not have enough RAM.
We will be using some of the code from this [tutorial](https://www.kdnuggets.com/2019/11/markov-chains-train-text-generation.html) and this [Jupyter notebook](https://github.com/StrikingLoo/ASOIAF-Markov/blob/master/ASOIAF.ipynb)
The first few cells of code all involve set-up; some of this code will be hidden because they are not necessary to understand the ideas of markov models, but the way data is setup can be vital to the way the model performs (something in common with neural network models!).
Let us start with loading our corpus.
```python
category = ['editorial', 'fiction', 'government', 'news', 'religion']
sentences = brown.sents(categories=category)
```
Now that we have our sentences, let us look at some statistics to get an idea of what we are dealing with.
```python
lengths = [len(sentence) for sentence in sentences]
lengths = pd.Series(lengths)
```
Find the 80-th percentile: the minimal length of such a sentence, which is longer than at least 80% of sentences in the *Brown corpus*.
```python
lengths.quantile(.8)
```
30.0
```python
lengths.describe()
```
count 16617.000000
mean 20.470723
std 13.656809
min 1.000000
25% 10.000000
50% 18.000000
75% 28.000000
max 180.000000
dtype: float64
```python
sentences[0:2]
```
[['The',
'Fulton',
'County',
'Grand',
'Jury',
'said',
'Friday',
'an',
'investigation',
'of',
"Atlanta's",
'recent',
'primary',
'election',
'produced',
'``',
'no',
'evidence',
"''",
'that',
'any',
'irregularities',
'took',
'place',
'.'],
['The',
'jury',
'further',
'said',
'in',
'term-end',
'presentments',
'that',
'the',
'City',
'Executive',
'Committee',
',',
'which',
'had',
'over-all',
'charge',
'of',
'the',
'election',
',',
'``',
'deserves',
'the',
'praise',
'and',
'thanks',
'of',
'the',
'City',
'of',
'Atlanta',
"''",
'for',
'the',
'manner',
'in',
'which',
'the',
'election',
'was',
'conducted',
'.']]
This gives us an idea of what our dataset looks like, along with some average lengths. This kind of quick data exploration can be very useful - we know how long different sequences are, and how we might want to collect these words.
Since we will be modelling words as sequences in sentences, let us first collect all the words in our corpus.
```python
corpus_words = []
for sentence in sentences:
for word in sentence:
if "''" not in word and "``" not in word:
corpus_words.append(word)
```
```python
print(f"Corpus length: {len(corpus_words)}")
```
Corpus length: 335766
```python
corpus_words[0:20]
```
['The',
'Fulton',
'County',
'Grand',
'Jury',
'said',
'Friday',
'an',
'investigation',
'of',
"Atlanta's",
'recent',
'primary',
'election',
'produced',
'no',
'evidence',
'that',
'any',
'irregularities']
We'll now get distinct (unique) words and create a matrix to represent all these words. This is necessary because we will be using this matrix to look at the probability of the words in sequences.
```python
# @title Creating Matrices and Distinct Words
distinct_words = list(set(corpus_words))
word_idx_dict = {word: i for i, word in enumerate(distinct_words)}
distinct_words_count = len(list(set(corpus_words)))
next_word_matrix = np.zeros([distinct_words_count, distinct_words_count])
```
```python
print("Number of distinct words: " + str(distinct_words_count))
```
Number of distinct words: 27485
In the following lines of code we are populating the matrix that tracks the next word in a sentence.
```python
# @title Populating Matric that tracks next word
for i, word in enumerate(corpus_words[:-1]):
first_word_idx = word_idx_dict[word]
next_word_idx = word_idx_dict[corpus_words[i+1]]
next_word_matrix[first_word_idx][next_word_idx] +=1
```
Now we have the information ready to construct a markov chain. The next word matrix is crucial in this, as it allows us to go from one word in the sequence to the next. We will soon see how this is used.
## Section 1.2: What is a Markov Chain or Model?
A Markov Chain (or Model) is a:
- stochastic model describing a sequence of possible events
- the probability of each event depends only on the state attained in the previous event.
- a countably infinite sequence, in which the chain moves state at discrete time steps, gives a discrete-time Markov chain (DTMC) [vs. a continuous-time process or CTMC].
- The classic formal language model is a Markov Model
*Helpful explanations from [eric mjl's tutorial](https://ericmjl.github.io/essays-on-data-science/machine-learning/markov-models/#non-autoregressive-homoskedastic-emissions)*!
The simplest Markov models assume that we have a _system_ that contains a finite set of states,
and that the _system_ transitions between these states with some probability at each time step $t$,
thus generating a sequence of states over time.
Let's call these states $S$, where
\begin{equation}
S = \{s_1, s_2, ..., s_n\}
\end{equation}
To keep things simple, let's start with three states:
\begin{equation}
S = \{s_1, s_2, s_3\}
\end{equation}
A Markov model generates a sequence of states, with one possible realization being:
\begin{equation}
\{s_1, s_1, s_1, s_3, s_3, s_3, s_2, s_2, s_3, s_3, s_3, s_3, s_1, ...\}
\end{equation}
And generically, we represent it as a sequence of states $x_t, x_{t+1}... x_{t+n}$. (We have chosen a different symbol to not confuse the "generic" state with the specific realization. Graphically, a plain and simple Markov model looks like the following:
<center></center>
### Modelling transitions between states
To know how a system transitions between states, we now need a **transition matrix**.
The transition matrix describes the probability of transitioning from one state to another (The probability of staying in the same state is semantically equivalent to transitioning to the same state).
By convention, transition matrix rows correspond to the state at time $t$,
while columns correspond to state at time $t+1$.
Hence, row probabilities sum to one, because the probability of transitioning to the next state depends on only the current state, and all possible states are known and enumerated.
Let's call the transition matrix $P_{transition}$:
\begin{equation}
P_{transition} =
\begin{pmatrix}
p_{11} & p_{12} & p_{13} \\
p_{21} & p_{22} & p_{23} \\
p_{31} & p_{32} & p_{33} \\
\end{pmatrix}
\end{equation}
Using the transition matrix, we can express different behaviors of the system. For example:
1. by assigning larger probability mass to the diagonals, we can express that the system likes to stay in the current state;
2. by assigning larger probability mass to the off-diagonal, we can express that the system likes to transition out of its current state.
In our case, this matrix is created by measuring how often one word appeared after another.
```python
# @title Function for most likely word
def most_likely_word_after(word):
# we check for the word most likely to occur using the matrix
most_likely = next_word_matrix[word_idx_dict[word]].argmax()
return distinct_words[most_likely]
```
Using our most likely word function, we can begin to create chains of words and create sequences. In the code below we create a naive chain that simply choses the most likely word.
```python
# @title Function for building Naive Chain
def naive_chain(word, length=15):
current_word = word
sentence = word
# we now build a naive chain by picking up the most likely word
for _ in range(length):
sentence += ' '
next_word = most_likely_word_after(current_word)
sentence += next_word
current_word = next_word
return sentence
```
Let us now use this naive chain to see what comes up, using some simple words.
```python
print(naive_chain('the'))
print(naive_chain('I'))
print(naive_chain('What'))
print(naive_chain('park'))
```
the United States , and the United States , and the United States , and the
I have been a new members of the United States , and the United States ,
What is a new members of the United States , and the United States , and
park . The new members of the United States , and the United States , and
We notice that after the word `the`, `United States` comes up each time. All the other sequencies starting from other words also end up at `the` quite often. Since we use a *deterministic* markov chain model, its next state only depends on the previous one. Therefore, once the sequence comes to `the`, it inevitably continues the sequence with the `United States`.
We can now be a little more sophisticated, and return words in a sequence using a *weighted choice*, which randomly selects the next word from a set of words with some probability (weight).
```python
# @title Function for weighted choice
def weighted_choice(objects, weights):
"""
Returns randomly an element from the sequence of 'objects',
the likelihood of the objects is weighted according
to the sequence of 'weights', i.e. percentages.
"""
weights = np.array(weights, dtype=np.float64)
sum_of_weights = weights.sum()
# standardization:
np.multiply(weights, 1 / sum_of_weights)
weights = weights.cumsum()
x = random.random()
for i in range(len(weights)):
if x < weights[i]:
return objects[i]
```
```python
# @title Function for sampling next word with weights
def sample_next_word_after(word, alpha=0):
next_word_vector = next_word_matrix[word_idx_dict[word]] + alpha
likelihoods = next_word_vector/next_word_vector.sum()
return weighted_choice(distinct_words, likelihoods)
```
```python
sample_next_word_after('The')
```
'modest'
```python
sample_next_word_after('The')
```
'psalmist'
There! We don't see the same word twice, because of the added randomisation (i.e., stochasticity). Our algorithm calculates how likely it is to find a certain word after a given word (`The` in this case) in the corpus, and then generates 1 sample of the next word with a matching probability.
In this example, we generated only one next word. Now, using this function, we'll build a chain.
```python
# @title Function for a stochastic chain using weighted choice
def stochastic_chain(word, length=15):
current_word = word
sentence = word
for _ in range(length):
sentence += ' '
next_word = sample_next_word_after(current_word)
sentence += next_word
current_word = next_word
return sentence
```
```python
stochastic_chain('Hospital')
```
'Hospital for the monk then by realtors in 1959 . Letters by promulgating separately films related'
Neat - we can create stochastic chains for a single word. For a more effective language model, we would want to model sets of words - in the following cells, we create sets of words to predict a chain after a sequence.
```python
k = 3
```
```python
def sequences_matrices(k=3):
# @title Code to build sets of words for more realistic sequences
sets_of_k_words = [' '.join(corpus_words[i:i+k]) for i, _ in enumerate(corpus_words[:-k])]
sets_count = len(list(set(sets_of_k_words)))
next_after_k_words_matrix = dok_matrix((sets_count, len(distinct_words)))
distinct_sets_of_k_words = list(set(sets_of_k_words))
k_words_idx_dict = {word: i for i, word in enumerate(distinct_sets_of_k_words)}
distinct_k_words_count = len(list(set(sets_of_k_words)))
for i, word in tqdm(enumerate(sets_of_k_words[:-k])):
word_sequence_idx = k_words_idx_dict[word]
next_word_idx = word_idx_dict[corpus_words[i+k]]
next_after_k_words_matrix[word_sequence_idx, next_word_idx] += 1
return k_words_idx_dict,distinct_sets_of_k_words,next_after_k_words_matrix
k_words_idx_dict, distinct_sets_of_k_words, next_after_k_words_matrix = sequences_matrices(k=k)
```
0it [00:00, ?it/s]
Let's have a look at what that bit of code did.
```python
distinct_sets_of_k_words[:10]
```
['Steve Hutchins of',
'national committee post',
'be nailed .',
'living together six',
'civil economy of',
'D. D. will',
"he won't !",
"' dust-settling '",
"Majdanek . I'm",
'interest or a']
Great! Now we are going to create a transition matrix for the sets of words.
```python
# @title Code to populate matrix of sets of words
for i, word in tqdm(enumerate(distinct_sets_of_k_words[:-k])):
word_sequence_idx = k_words_idx_dict[word]
next_word_idx = word_idx_dict[corpus_words[i+k]]
next_after_k_words_matrix[word_sequence_idx, next_word_idx] += 1
```
0it [00:00, ?it/s]
We now have what we need to build a stochastic chain over a `K` set of words.
```python
# @title Function for stochastic Chain for sets of words
def stochastic_chain_sequence(words, chain_length=15, k=2):
current_words = words.split(' ')
if len(current_words) != k:
raise ValueError(f'wrong number of words, expected {k}')
sentence = words
# pre-calculate seq embedding + transition matrix for a given k
matrices = sequences_matrices(k=k)
for _ in range(chain_length):
sentence += ' '
next_word = sample_next_word_after_sequence(matrices,' '.join(current_words))
sentence += next_word
current_words = current_words[1:]+[next_word]
return sentence
```
```python
# @title Function to sample next word after sequence
def sample_next_word_after_sequence(matrices, word_sequence, alpha=0):
# unpack a tuple of matrices
k_words_idx_dict,distinct_sets_of_k_words, next_after_k_words_matrix = matrices
next_word_vector = next_after_k_words_matrix[k_words_idx_dict[word_sequence]] + alpha
likelihoods = next_word_vector/next_word_vector.sum()
return weighted_choice(distinct_words, likelihoods.toarray())
```
```python
stochastic_chain_sequence('Judges under the', chain_length=3, k=3)
```
0it [00:00, ?it/s]
'Judges under the jurisdiction of the'
Great! This sentence was created using two of the techniques we recently saw - creating sets of words, and using a weighted average stochastic chain. Both of these methods contributed in making it a more meaningful sequence of words. Some of these notions are also captured by Recurrent Neural Networks!
### Think! 1.2: How does changing parameters affect the generated sentences?
Try and use a set of words but using a naive chain, and try a stochastic chain with a low value of k (i.e., 2), and a higher value (i.e., 5). How do these different configurations change the quality of the sequences produced? Below you have sample code to try these out.
```python
stochastic_chain_sequence(..., chain_length=..., k=...)
```
You should be able to use these matrices and the previous functions to be able to create the necessary configurations.
```python
stochastic_chain_sequence("The United States was", chain_length=10, k=4)
```
0it [00:00, ?it/s]
'The United States was engaged in a military attack on a peaceful , orderly'
```python
stochastic_chain_sequence("The", chain_length=10, k=1)
```
0it [00:00, ?it/s]
'The first person will reach a position ( 64-13 ) ;'
```python
# @title Student Response
from ipywidgets import widgets
text=widgets.Textarea(
value='Type your answer here and click on `Submit!`',
placeholder='Type something',
description='',
disabled=False
)
button = widgets.Button(description="Submit!")
display(text,button)
def on_button_clicked(b):
atform.add_answer('q1', text.value)
print("Submission successful!")
button.on_click(on_button_clicked)
```
Textarea(value='Type your answer here and click on `Submit!`', placeholder='Type something')
Button(description='Submit!', style=ButtonStyle())
## Section 1.3: What is a Hidden Markov Model?
A 1960s advance (by Leonard Baum and colleagues): Hidden Markov Models are:
- a Markov model in which the system modeled is assumed to be a Markov process/chain with unobservable ("hidden") states.
- HMM assumes there is another surrogate process whose behavior "depends" on the state--you learn about the state by observing the surrogate process.
- HMMs have successfully been applied in fields where the goal is to recover a data sequence not immediately observable (but other data that depend on the sequence are).
- The first dominant application: Speech and text processing (1970s)
In this sub-section we will use the python library [hmmlearn](https://hmmlearn.readthedocs.io/en/latest/tutorial.html#training-hmm-parameters-and-inferring-the-hidden-states), which is part of the *scikit-learn* ecosystem. [nlg-with-hmmlearn](https://github.com/mfilej/nlg-with-hmmlearn) offers useful code snippets to adapt ```hmmlearn``` for text data. Because we are using a package that offers many out of the box implementations for HMMs, we don't have to worry about the states, transition matrices,
```python
# load the data
sentences = brown.sents(categories=category)
words = [word.lower() for sentence in sentences for word in sentence]
lengths = [len(sentence) for sentence in sentences]
alphabet = set(words)
# Encode words
le = LabelEncoder()
_ = le.fit(list(alphabet))
# Find word freqeuncies
seq = le.transform(words)
features = np.fromiter(seq, np.int64)
features = np.atleast_2d(features).T
fd = FreqDist(seq)
```
Now that we have our data setup, we can create our model. We use a multinomial HMM with 8 states, and can either do a random initialisation or use word frequences. We recommend trying both options!
```python
# @title Function to create default Multinomial HMM model
def get_model(num_states):
print("Initial parameter estimation using built-in method")
model = hmm.MultinomialHMM(n_components=num_states, init_params='ste')
return model
```
```python
# @title Function to create default Multinomial HMM model information of relative frequencies of words
def frequencies(num_states):
print("Initial parameter estimation using relative frequencies")
frequencies = np.fromiter((fd.freq(i) for i in range(len(alphabet))),
dtype=np.float64)
emission_prob = np.stack([frequencies]*num_states)
model = hmm.MultinomialHMM(n_components=num_states, init_params='st')
model.emissionprob_ = emission_prob
return model
print(frequencies(2))
```
Initial parameter estimation using relative frequencies
MultinomialHMM(init_params='st', n_components=2)
**Note**:
The following lines of code are commented out because they take a long time (~17 mins for default Brown corpus categories).
If you do not have that time, you can download the default model to try to generate text. You have to uncomment the appropriate lines.
**Note:** Either you may want to uncomment Line 11 or Line 14, not both, as the output variable `model` will be overwritten.
```python
## Fitting a default multinomial HMM. This is lengthy (~17 mins)
def run_model(features, length, num_states):
model = get_model(num_states)
model = model.fit(features, lengths)
return model
num_states = 1
## Uncomment, if you have time!
model = run_model(features, lengths, num_states)
## Another way to get a model is to use default frequencies when initialising the model
#model = frequencies(num_states)
```
Initial parameter estimation using built-in method
Alternatively, you could use a saved model. Here is a [link](https://drive.google.com/file/d/1IymcmcO48V6q3x-6dhf7-OU5NByo5W2F/view?usp=sharing) to the default model, which you can download and then upload into Colab.
```python
# @markdown Execute this cell to download the saved model.
import pickle
url = "https://osf.io/5k6cs/download"
r = requests.get(url)
with open('model_w2d3_t1.pkl', 'wb') as fd:
fd.write(r.content)
# Load the pickle file
with open("model_w2d3_t1.pkl", "rb") as file:
model = pickle.load(file)
```
```python
# @title Function to generate words given a hmm model
def generate_text(model, num_lines = 5, random_len=15):
for _i in range(num_lines):
set_seed(_i)
symbols, _states = model.sample(random_len)
output = le.inverse_transform(np.squeeze(symbols))
for word in output:
print(word, end=" ")
print()
```
```python
generate_text(model, num_lines=20, random_len=20)
```
Random seed 0 has been set.
representative look one to for lack u.s. . the then that such on was greatly still mines of of police
Random seed 1 has been set.
rhode completed . e. lifelike power thing pay may and which proceeds to , thing had lay court the shrewdest
Random seed 2 has been set.
, he developing of business laid it system insisting . -- . not . effort and in for much put
Random seed 3 has been set.
received it to any heavy hooliganism can newman may grew production accrued such as values people the . mature at
Random seed 4 has been set.
loyalties reported arrested '' he and with not for week was their . alien go very 10 looked officially opening
Random seed 5 has been set.
theology traffic of jack and seen action by coconut much bureau better a when and announced success mr. it women
Random seed 6 has been set.
did , norm had of secret mrs. would granny the result secrets lines well as year's peacetime resources elios game's
Random seed 7 has been set.
such rolling life . irremediable the -- to his , was made `` sick if incurred in the countries calendar
Random seed 8 has been set.
which last ) game in local relax has wife as with daylight assume to without elfin special daily but ol'
Random seed 9 has been set.
is `` as been e. think , more parlor quo programs i'm the all was the and these so i
Random seed 10 has been set.
, should assumed some . we it of city report a period he it now jr. cuts communists the of
Random seed 11 has been set.
, ruin in in the 116 the of 676 act the currently . note in the number was doctor that
Random seed 12 has been set.
sensual leadership trailing , a of the jonathan's in administration , 6 peasants the scholars department winston was the his
Random seed 13 has been set.
baseball where his stowey risen comfort the play east etc. as town -- these she . it we'd been decorator
Random seed 14 has been set.
stepped '' well cubans as dramatic '' aptly fig. southward 187-mile in blues other day . , set would and
Random seed 15 has been set.
an exaggerated lap conference began brought their are , and felt in was i life liberalism experience on gave gilbert
Random seed 16 has been set.
just , asked after w. . action claus i him town in , local tell out potter if prices )
Random seed 17 has been set.
last . other most established , they or not categories meat tax-exempt a , city the . acts : tidelands
Random seed 18 has been set.
is and shrewd world , the , combatted the other emissaries the was park the clearly the and on morality
Random seed 19 has been set.
something a . the of made away ionosphere law president was of on shrilly record price mantle also , regarding
We see that a hidden markov model also does well in generating text. We encourage you to try out different initialisations and hyperparameters to see how the model does.
### (Bonus) Exercise 1.3: Transition probabilities
We have seen how we can use sequences of text to form probability chains, as well as how we can use out of the box models to generate text. In this exercise, you will be using your own data to generate sequences using ```hmmlearn``` or any other implementation of a markov model. Explore the transition probabilities in your corpus and generate sentences. For example, one such exploration can be - how does using a model with the word frequencies incorporated in compare to using a default model?
Perform any one such comparison or exploration, and generate 3 sentences or 50 words using your model. You should be able to use all the existing functions defined for this exercise.
**Note:** We suggest to do this exercise after the completion of both tutorials.
```python
# load your own dataset and create a model using the frequencies based HMM model!
```
### Useful links for Markov Models and HMM:
Here are some useful links if you wish to explore this topic further.
- [Markov Chain Text](https://towardsdatascience.com/simulating-text-with-markov-chains-in-python-1a27e6d13fc6)
- [Python QuantEcon: Finite Markov Chains with Finance](https://python.quantecon.org/finite_markov.html)
- [Markov Models from the ground up, with python](https://ericmjl.github.io/essays-on-data-science/machine-learning/markov-models/)
- [GenTex](https://github.com/nareshkumar66675/GenTex)
- [HMM learn](https://hmmlearn.readthedocs.io/en/latest/tutorial.html)
---
# Section 2: Word Embeddings
*Time estimate: ~60mins*
```python
# @title Video 2: Textual Dimension Reduction
from ipywidgets import widgets
out2 = widgets.Output()
with out2:
from IPython.display import IFrame
class BiliVideo(IFrame):
def __init__(self, id, page=1, width=400, height=300, **kwargs):
self.id=id
src = "https://player.bilibili.com/player.html?bvid={0}&page={1}".format(id, page)
super(BiliVideo, self).__init__(src, width, height, **kwargs)
video = BiliVideo(id=f"BV1oM4y1P7Mn", width=854, height=480, fs=1)
print("Video available at https://www.bilibili.com/video/{0}".format(video.id))
display(video)
out1 = widgets.Output()
with out1:
from IPython.display import YouTubeVideo
video = YouTubeVideo(id=f"kweySXAZ1os", width=854, height=480, fs=1, rel=0)
print("Video available at https://youtube.com/watch?v=" + video.id)
display(video)
out = widgets.Tab([out1, out2])
out.set_title(0, 'Youtube')
out.set_title(1, 'Bilibili')
# add event to airtable
atform.add_event('Video 2: Textual Dimension Reduction')
display(out)
```
Tab(children=(Output(), Output()), _titles={'0': 'Youtube', '1': 'Bilibili'})
Words or subword units such as morphemes are the basic units that we use to express meaning in language. The technique of mapping words to vectors of real numbers is known as word embedding.
Word2vec is based on theories of distributional semantics - words that appear around each other are more likely to mean similar things than words that do not appear around each other. Keeping this in mind, our job is to create a high dimensional space where these semantic relations are preserved. The innovation in word2vec is the realisation that we can use unlabelled, running text in sentences as inputs for a supervised learning algorithm--as a self-supervision task. It is supervised because we use the words in a sentence to serve as positive and negative examples. Let’s break this down:
... "use the kitchen knife to chop the vegetables"…
**C1 C2 C3 T C4 C5 C6 C7**
Here, the target word is knife, and the context words are the ones in its immediate (6-word) window.
The first word2vec method we’ll see is called skipgram, where the task is to assign a probability for how likely it is that the context window appears around the target word. In the training process, positive examples are samples of words and their context words, and negative examples are created by sampling from pairs of words that do not appear nearby one another.
This method of implementing word2vec is called skipgram with negative sampling. So while the algorithm tries to better learn which context words are likely to appear around a target word, it ends up pushing the embedded representations for every word so that they are located optimally (e.g., with minimal semantic distortion). In this process of adjusting embedding values, the algorithm brings semantically similar words close together in the resulting high dimensional space, and dissimilar words far away.
Another word2vec training method, Continuous Bag of Words (CBOW), works in a similar fashion, and tries to predict the target word, given context. This is converse of skipgram, which tries to predict the context, given the target word. Skip-gram represents rare words and phrases well, often requiring more data for stable representations, while CBOW is several times faster to train than the skip-gram, but with slightly better accuracy for the frequent words in its prediction task. The popular gensim implementation of word2vec has both the methods included.
## Section 2.1: Creating Word Embeddings
We will create embeddings for a subset of categories in [Brown corpus](https://www1.essex.ac.uk/linguistics/external/clmt/w3c/corpus_ling/content/corpora/list/private/brown/brown.html). In order to achieve this task we will use [gensim](https://radimrehurek.com/gensim/) library to create word2vec embeddings. Gensim’s word2vec expects a sequence of sentences as its input. Each sentence is a list of words.
Calling `Word2Vec(sentences, iter=1)` will run two passes over the sentences iterator (or, in general iter+1 passes). The first pass collects words and their frequencies to build an internal dictionary tree structure. The second and subsequent passes train the neural model.
`Word2vec` accepts several parameters that affect both training speed and quality.
One of them is for pruning the internal dictionary. Words that appear only once or twice in a billion-word corpus are probably uninteresting typos and garbage. In addition, there’s not enough data to make any meaningful training on those words, so it’s best to ignore them:
`model = Word2Vec(sentences, min_count=10) # default value is 5`
A reasonable value for min_count is between 0-100, depending on the size of your dataset.
Another parameter is the size of the NN layers, which correspond to the “degrees” of freedom the training algorithm has:
`model = Word2Vec(sentences, size=200) # default value is 100`
Bigger size values require more training data, but can lead to better (more accurate) models. Reasonable values are in the tens to hundreds.
The last of the major parameters (full list [here](https://radimrehurek.com/gensim/models/word2vec.html#gensim.models.word2vec.Word2Vec)) is for training parallelization, to speed up training:
`model = Word2Vec(sentences, workers=4) # default = 1 worker = no parallelization`
```python
category = ['editorial', 'fiction', 'government', 'mystery', 'news', 'religion',
'reviews', 'romance', 'science_fiction']
```
```python
def create_word2vec_model(category='news', size=50, sg=1, min_count=5):
try:
sentences = brown.sents(categories=category)
model = Word2Vec(sentences, vector_size=size, sg=sg, min_count=min_count)
except (AttributeError, TypeError):
raise AssertionError('Input variable "category" should be a string or list,'
'"size", "sg", "min_count" should be integers')
return model
def model_dictionary(model):
words = list(model.wv.key_to_index)
return words
def get_embedding(word, model):
if word in model.wv.key_to_index:
return model.wv[word]
else:
return None
```
```python
all_categories = brown.categories()
```
```python
all_categories
```
['adventure',
'belles_lettres',
'editorial',
'fiction',
'government',
'hobbies',
'humor',
'learned',
'lore',
'mystery',
'news',
'religion',
'reviews',
'romance',
'science_fiction']
```python
w2vmodel = create_word2vec_model(all_categories)
```
```python
print(model_dictionary(w2vmodel))
```
['the', ',', '.', 'of', 'and', 'to', 'a', 'in', 'that', 'is', 'was', 'for', '``', "''", 'The', 'with', 'it', 'as', 'he', 'his', 'on', 'be', ';', 'I', 'by', 'had', 'at', '?', 'not', 'are', 'from', 'or', 'this', 'have', 'an', 'which', '--', 'were', 'but', 'He', 'her', 'one', 'they', 'you', 'all', 'would', 'him', 'their', 'been', ')', '(', 'has', 'will', 'who', 'more', 'out', 'It', 'we', 'she', 'said', 'there', 'up', 'In', ':', 'than', 'them', 'into', 'no', 'its', 'about', 'so', 'when', 'can', 'only', 'other', '!', 'could', 'time', 'if', 'what', 'some', 'But', 'A', 'two', 'any', 'may', 'do', 'first', 'like', 'these', 'over', 'such', 'This', 'me', 'my', 'man', 'our', 'made', 'new', 'most', 'now', 'then', 'must', 'also', 'Af', 'did', 'even', 'back', 'before', 'years', 'through', 'And', 'many', 'She', 'much', 'way', 'down', 'your', 'should', 'There', 'where', 'They', 'Mr.', 'after', 'because', 'people', 'too', 'little', 'those', 'very', 'own', 'make', 'good', 'each', 'well', 'work', 'just', 'men', 'If', 'still', 'see', 'get', 'between', 'long', 'being', 'world', 'know', 'same', 'We', 'life', 'us', 'might', 'never', 'year', 'under', 'For', 'both', 'last', 'off', 'how', 'day', 'came', 'against', 'used', 'great', 'here', 'go', 'himself', 'right', 'come', 'When', 'few', 'take', 'New', 'another', 'American', 'old', 'use', 'while', 'around', 'three', 'As', 'state', 'without', 'found', 'again', 'Mrs.', 'His', 'place', '1', 'home', 'You', 'small', 'thought', 'went', 'say', 'upon', 'What', 'left', 'got', 'number', 'part', 'course', 'United', 'high', 'since', 'during', 'away', 'always', 'fact', '2', 'does', 'States', 'every', 'until', 'water', 'think', 'less', 'enough', 'To', 'took', 'put', 'head', 'something', 'One', 'hand', 'school', 'once', 'told', 'far', 'At', 'set', 'almost', 'better', 'public', 'house', 'end', 'night', 'called', 'find', 'knew', 'system', "didn't", 'going', 'eyes', "don't", 'asked', 'however', 'group', 'toward', 'give', 'days', 'though', 'point', 'possible', 'program', 'given', 'face', 'per', 'present', 'side', 'looked', 'room', 'important', 'look', 'order', 'business', 'next', 'things', 'John', 'become', 'young', 'No', 'nothing', 'That', 'felt', 'later', 'social', 'Then', 'case', 'rather', 'large', 'need', 'form', 'saw', 'often', 'On', 'These', 'least', 'children', 'along', 'best', 'second', 'several', 'seemed', 'early', 'ever', 'Of', 'thing', 'want', 'power', 'four', 'mind', 'interest', 'within', 'turned', 'light', 'area', "'", 'done', 'big', 'among', 'problem', 'members', 'country', 'began', 'door', 'family', 'general', 'sense', 'development', 'kind', 'whole', 'different', 'matter', 'open', 'war', 'itself', 'certain', 'help', 'York', 'God', 'human', 'others', 'let', 'name', 'means', '3', 'action', 'example', 'hands', 'gave', 'yet', 'feet', 'line', 'taken', 'law', 'With', 'past', 'seen', 'All', 'above', 'across', 'local', 'either', 'experience', 'government', 'show', 'quite', 'car', 'themselves', 'words', 'Now', "I'm", 'body', 'history', 'really', 'anything', 'times', 'death', 'period', 'State', 'half', 'word', 'together', 'city', 'held', 'money', 'information', 'whether', 'cannot', 'having', 'already', 'week', 'President', 'shall', 'white', 'political', 'brought', 'making', 'seems', 'real', 'question', 'whose', 'keep', 'ago', 'moment', 'today', 'After', 'tell', 'known', 'service', 'became', 'behind', 'field', 'result', 'free', 'five', 'why', 'available', 'problems', 'heard', 'sure', 'reason', 'study', 'position', 'change', 'probably', 'job', 'boy', 'areas', 'individual', 'special', 'Miss', 'So', 'close', 'seem', 'major', 'wife', 'wanted', 'turn', 'am', 'full', 'church', 'cost', 'necessary', 'policy', 'voice', 'economic', 'clear', 'company', 'air', 'woman', 'true', 'front', 'able', 'feel', 'age', 'future', 'perhaps', 'community', 'office', 'provide', 'love', 'effect', 'national', 'level', 'stood', 'Some', 'girl', 'How', 'child', 'rate', 'total', 'control', 'common', 'morning', 'short', '4', 'Washington', 'run', 'following', 'students', 'million', 'By', 'sound', 'evidence', 'town', 'force', 'value', 'mean', 'hard', 'believe', 'top', 'land', 'leave', 'surface', 'although', 'An', 'play', 'type', 'situation', 'plan', 'six', 'military', 'process', 'various', 'English', 'party', 'started', 'increase', 'lines', 'says', 'strong', 'further', 'music', 'America', 'minutes', 'idea', 'longer', 'Dr.', 'personal', 'society', 'House', 'tax', 'alone', 'mother', 'near', 'schools', 'outside', 'gone', 'usually', 'months', 'General', 'center', 'needed', 'Not', 'West', 'private', 'expected', 'kept', 'nature', 'values', 'Even', 'living', 'century', 'pressure', 'basis', 'ground', 'art', 'view', 'women', "I'll", 'wrote', 'required', 'greater', 'South', 'call', 'moved', 'modern', 'cut', 'conditions', 'complete', 'road', 'particular', 'return', 'support', 'attention', 'book', 'soon', 'else', 'education', 'live', 'material', 'late', "couldn't", 'lost', 'Since', 'stage', 'hours', 'single', 'followed', 'amount', '1960', 'costs', 'except', 'added', 'third', 'space', 'hundred', 'coming', 'However', 'board', 'dark', 'including', 'fire', 'tried', 'person', 'heart', 'miles', 'pay', 'reached', 'developed', 'move', 'feeling', 'act', 'recent', 'makes', 'read', 'shown', 'Thus', 'figure', 'simply', 'dead', 'hope', 'class', 'Department', 'St.', 'equipment', 'From', 'National', 'received', 'taking', 'basic', '&', 'doing', 'college', 'looking', 'trying', 'everything', 'sometimes', 'Why', 'inside', 'hold', 'picture', 'cold', 'sort', 'industry', 'U.S.', 'data', 'rest', 'spirit', "It's", 'low', 'father', 'terms', 'difficult', 'care', 'War', 'walked', 'nor', 'beginning', 'subject', 'states', 'getting', 'religious', 'passed', 'My', 'floor', 'beyond', 'bring', 'especially', 'black', 'therefore', 'cent', 'simple', 'range', 'England', 'property', 'paper', 'report', 'written', 'needs', '10', 'natural', 'Secretary', 'hear', 'final', "can't", 'Her', 'higher', 'growth', 'likely', 'talk', 'meeting', 'considered', 'Congress', 'friends', 'sat', 'answer', 'entire', 'fine', 'ten', 'forces', 'countries', 'story', 'difference', 'working', "wasn't", 'table', 'William', 'cases', 'building', 'hair', 'involved', 'meet', 'similar', 'Government', 'training', 'happened', 'street', 'thus', 'hour', 'effort', 'purpose', 'stand', 'Christian', 'issue', 'paid', 'North', 'sent', 'knowledge', 'earlier', 'points', 'market', 'Here', 'whom', 'ready', 'particularly', "it's", '5', 'decided', 'results', 'statement', 'increased', 'addition', 'showed', 'East', 'start', 'thinking', 'production', 'Kennedy', 'Well', 'letter', 'son', 'due', 'wall', 'weeks', 'French', 'moral', 'bad', 'girls', 'methods', 'size', 'reading', 'programs', 'ideas', 'stock', 'directly', 'color', 'understand', 'method', 'population', 'fall', 'Yet', 'Island', 'appeared', 'normal', 'lay', 'concerned', 'strength', 'foreign', 'food', 'police', 'Federal', 'City', 'deal', '1961', 'nearly', 'trade', 'record', 'questions', 'merely', 'continued', 'comes', 'peace', 'member', 'research', 'using', 'ran', 'influence', 'section', 'direction', 'During', 'summer', 'boys', 'below', 'husband', 'finally', 'trouble', 'literature', 'physical', 'opened', 'suddenly', 'month', 'led', 'list', 'step', 'temperature', 'stopped', 'cause', 'met', 'piece', 'instead', 'George', 'chance', 'try', 'services', 'Soviet', 'worked', 'effective', 'evening', 'anyone', 'former', 'myself', 'wrong', 'provided', 'theory', 'actually', 'average', 'forms', 'ways', 'bed', 'defense', 'changes', 'lead', 'placed', "I've", 'groups', 'Church', 'sales', "wouldn't", 'efforts', 'president', 'degree', 'herself', 'friend', 'systems', 'manner', 'University', 'movement', 'lot', 'carried', 'meaning', 'truth', 'aid', 'wide', 'somewhat', 'Although', 'respect', 'fear', 'plant', 'series', 'throughout', 'treatment', 'direct', 'performance', 'game', 'beautiful', 'larger', 'works', 'red', 'approach', 'easy', 'reaction', 'Just', 'couple', 'numbers', 'C', 'While', 'charge', 'described', 'court', 'opportunity', 'remember', 'J.', 'indeed', 'decision', 'determined', 'served', 'First', 'eye', 'running', 'freedom', 'labor', 'generally', 'window', 'understanding', 'reported', 'clearly', 'Each', 'trial', 'hot', 'Europe', 'nation', 'medical', 'Aj', 'British', 'lower', 'S.', 'immediately', 'international', 'certainly', 'character', 'account', 'persons', 'image', 'nations', 'appear', 'feed', 'ask', 'fiscal', 'based', 'earth', 'responsibility', 'learned', 'Act', 'activity', 'steps', 'obtained', 'returned', 'volume', 'forward', 'audience', 'technical', 'arms', "man's", 'length', 'planning', 'ones', 'industrial', '6', 'types', 'gives', 'blood', 'function', 'saying', 'doubt', 'serious', 'test', 'straight', '15', 'lived', 'corner', 'quality', 'farm', 'plane', '30', 'latter', 'federal', 'hit', 'Such', 'according', 'Another', 'square', 'Brown', 'include', 'White', 'pattern', 'writing', 'horse', 'Our', 'extent', 'completely', 'choice', 'organization', 'A.', 'stop', 'Court', 'May', 'letters', 'moving', 'activities', 'nuclear', 'born', 'student', 'design', 'specific', 'progress', 'parts', 'cars', 'recently', 'wish', 'Oh', 'shot', 'pool', 'Southern', 'visit', 'lack', 'established', 'distance', 'firm', 'main', 'importance', 'effects', 'indicated', 'slowly', 'plans', 'expect', 'Their', 'Negro', 'principle', 'spring', 'hall', 'neither', 'stress', 'heavy', 'speak', 'stay', 'additional', 'operation', 'existence', 'cities', 'attitude', 'easily', 'remained', 'leaders', 'designed', 'ahead', 'consider', 'applied', '8', 'note', 'central', 'language', 'growing', 'science', 'write', 'reach', 'radio', 'afternoon', 'season', 'Many', 'continue', 'Rhode', 'closed', "I'd", 'covered', 'Do', 'Most', 'interested', 'spent', 'becomes', 'College', 'role', 'apparently', 'waiting', 'press', 'Union', "won't", 'analysis', 'faith', 'elements', 'serve', 'suggested', 'mouth', 'played', 'World', 'attack', 'Western', 'staff', 'administration', '1959', 'limited', 'reasons', 'scene', 'original', 'date', "Don't", 'B', 'prepared', 'factors', 'sun', 'Only', 'married', 'teeth', 'professional', 'rise', 'Sunday', 'bit', 'religion', 'exactly', 'built', 'current', 'Two', 'deep', 'raised', 'dropped', 'ball', 'James', 'demand', 'playing', 'price', 'meant', 'Thomas', 'rates', 'figures', 'race', 'filled', 'hardly', 'sides', 'talking', 'pretty', 'places', 'daily', 'gun', 'events', 'June', 'knows', "hadn't", 'machine', 'related', 'Is', 'Chicago', 'Street', 'entered', 'facilities', 'techniques', 'concern', 'claim', 'style', 'March', 'fight', 'income', 'blue', "That's", 'supply', 'caught', 'dollars', 'officer', 'Christ', '12', 'energy', 'glass', 'standing', 'Charles', 'actual', 'status', 'usual', 'institutions', 'walk', 'popular', 'studies', 'products', 'attempt', 'trees', 'behavior', 'share', 'thousand', 'changed', '7', 'opinion', 'accepted', 'included', 'unit', 'materials', 'gas', 'considerable', 'seven', 'film', 'green', 'shows', 'remain', 'relations', 'drive', 'older', 'station', "you're", 'primary', 'eight', 'Perhaps', 'Old', 'Americans', 'C.', 'standard', 'successful', 'giving', 'practice', 'highly', 'Communist', 'books', '20', 'fell', 'marriage', 'middle', 'none', 'thin', 'obvious', 'proper', 'Jack', 'tradition', 'sitting', 'determine', 'pieces', 'poor', 'Let', 'Mike', 'worth', 'objective', 'arm', '1958', 'sign', 'structure', 'happy', 'radiation', 'project', 'dinner', 'weight', 'produced', 'kitchen', 'funds', 'entirely', 'caused', 'follow', 'heat', 'complex', 'source', 'purposes', 'balance', 'university', "that's", 'cattle', 'news', 'condition', 'London', 'annual', 'leadership', 'construction', 'goes', 'mass', 'regard', 'Yes', 'famous', 'noted', 'discussion', 'health', 'Or', 'develop', 'announced', 'Committee', 'unless', 'equal', 'management', 'principal', 'River', 'names', 'carry', "isn't", 'whatever', 'Mary', 'measure', 'clothes', 'laws', 'pain', 'oil', 'enemy', 'units', 'Where', 'quickly', 'obviously', 'Both', 'possibility', 'spoke', 'relationship', 'doctor', 'break', 'allowed', 'takes', 'pass', 'companies', 'finished', 'E.', 'Association', 'facts', 'inches', 'Lord', 'died', 'patient', 'touch', 'success', 'failure', 'German', 'require', 'loss', 'cover', 'marked', 'concept', 'records', 'frequently', 'build', "doesn't", 'More', 'Once', 'published', 'County', 'assistance', 'greatest', 'Corps', 'remains', 'stated', 'aware', 'previous', 'relatively', 'significant', 'appears', 'Catholic', 'carefully', 'base', 'variety', 'stations', 'clay', 'shape', 'bridge', 'W.', 'distribution', 'product', 'prevent', 'sight', 'learn', 'authority', 'Henry', 'hotel', 'capacity', 'interests', 'Robert', 'collection', 'offered', 'remembered', 'dance', 'sources', 'churches', 'opening', 'requirements', 'impossible', 'poetry', 'presented', '25', 'produce', 'active', 'instance', 'begin', 'broad', 'ship', 'circumstances', 'chief', 'named', 'proposed', 'slightly', 'fixed', 'trip', 'watched', 'operating', 'team', 'Germany', 'scientific', 'created', 'interesting', 'jazz', 'houses', 'provides', 'immediate', 'officers', 'cells', 'parents', 'financial', 'event', 'philosophy', 'sex', 'essential', 'key', 'agreed', 'recognized', 'fully', 'committee', 'explained', 'sea', 'bottom', 'subjects', 'capital', 'campaign', 'workers', 'indicate', 'leaving', 'yesterday', 'manager', 'agreement', 'musical', 'Russian', 'features', 'lives', 'speed', 'spread', 'operations', 'offer', 'forced', 'expression', 'desire', 'term', 'regular', 'drink', 'apartment', 'mentioned', 'studied', 'Sam', 'session', 'atmosphere', 'add', 'enter', 'teacher', 'reports', 'opposite', 'reality', 'fast', 'maximum', 'differences', 'union', 'Its', 'neck', 'picked', '-', 'citizens', 'someone', 'battle', 'bill', '100', 'laid', 'crisis', 'bright', 'secret', 'Providence', 'edge', 'poems', 'looks', 'river', 'district', 'traditional', 'smaller', 'anode', 'reduced', 'train', 'believed', 'favor', 'winter', 'director', 'strange', 'Club', 'editor', 'Commission', 'Council', 'watch', 'procedure', 'literary', 'beside', 'bottle', 'School', 'classes', 'index', 'formed', 'economy', 'B.', 'buildings', 'follows', 'fresh', 'response', 'rules', 'receive', 'separate', 'address', 'difficulty', 'France', 'file', 'treated', 'Red', 'memory', 'removed', 'killed', 'stayed', 'hearing', 'justice', 'presence', 'permit', 'vote', 'Jr.', 'Virginia', 'hospital', "he'd", 'Other', 'coffee', 'expressed', 'pointed', 'tone', 'Jewish', 'quiet', 'observed', 'ability', 'check', 'fit', 'seeing', 'security', 'failed', 'November', 'Berlin', 'watching', 'De', 'Army', 'nine', 'hell', 'discovered', 'contrast', 'faculty', 'twenty', 'knife', 'frame', 'selected', 'advantage', 'gain', 'Louis', 'wait', 'pulled', 'youth', 'everyone', 'Morgan', 'resources', 'official', 'Russia', 'store', 'murder', 'Because', 'Company', 'election', 'personnel', 'individuals', 'Jones', 'planned', 'assumed', 'standards', 'T', 'positive', 'familiar', 'detail', 'proved', 'accept', 'smiled', 'increasing', 'forth', 'items', 'Richard', 'chosen', 'legal', 'region', 'factor', 'writer', 'broke', 'brother', 'H.', 'faces', 'bar', 'responsible', 'dog', 'allow', 'Also', 'send', 'Rome', 'Mercer', 'San', 'round', 'sharp', 'platform', 'solid', 'corporation', 'evil', 'brief', 'carrying', 'county', 'daughter', 'April', 'Service', 'rose', 'wants', 'buy', 'levels', 'increases', 'suppose', 'otherwise', 'fields', 'realize', 'realized', 'seek', 'walls', 'shelter', 'nice', 'rich', 'leader', 'waited', 'rights', 'constant', 'Texas', 'vocational', 'die', 'club', 'willing', 'boat', 'ordered', 'Board', 'won', 'Before', 'Monday', 'rule', 'characteristic', 'completed', 'Democratic', 'Khrushchev', 'historical', 'connection', 'minimum', 'animal', 'poet', 'danger', 'settled', 'flow', 'Those', 'dust', 'calls', 'principles', 'lips', 'drawn', 'horses', 'foot', 'sweet', 'writers', 'fair', 'compared', 'maybe', 'statements', 'wine', 'largely', 'answered', 'career', 'yourself', 'appropriate', 'Paris', 'actions', 'thick', 'excellent', 'powers', 'legs', 'beauty', 'asking', 'occurred', 'Saturday', 'plus', 'Moreover', 'camp', 'dogs', 'ordinary', 'ends', 'potential', 'Du', 'significance', 'background', 'unity', 'dress', 'ourselves', 'helped', 'policies', 'July', 'upper', 'issues', 'flat', 'practical', 'Hanover', 'independent', 'phase', 'dry', 'substantial', 'fingers', 'rain', 'relief', 'box', 'reference', 'initial', 'impact', 'signs', 'quick', 'block', 'intellectual', 'directed', 'division', 'chair', 'despite', 'ought', 'twice', 'emotional', 'declared', 'telephone', 'weather', 'membership', 'sections', "He's", 'Though', 'projects', 'D.', 'brown', 'plays', 'wore', 'educational', 'imagination', 'California', 'search', 'adequate', 'Maybe', 'rapidly', 'electronic', 'claims', 'employees', 'politics', 'gets', 'supposed', 'hung', 'estimated', 'sit', 'measured', 'Phil', 'Peace', 'leading', 'Bill', 'fashion', 'desk', 'beat', 'stands', 'warm', 'communication', 'matters', 'empty', 'gray', 'object', 'clean', 'objects', 'protection', 'families', 'wonder', 'Day', 'Laos', 'reasonable', 'capable', 'bodies', 'aircraft', 'application', 'teachers', 'discussed', 'grew', 'site', 'approximately', 'cell', 'happen', 'yards', 'drew', 'fighting', 'King', 'fifty', 'belief', 'birth', 'argument', 'Tom', 'China', 'aspects', 'located', 'spiritual', 'jury', 'message', 'Parker', 'ancient', 'broken', 'jobs', 'grow', 'explain', 'Jesus', 'universe', 'primarily', 'closely', 'holding', 'contemporary', 'typical', 'model', 'sleep', "one's", 'dominant', 'newspaper', 'arrived', 'assignment', 'unusual', 'relation', 'mission', 'recognize', 'passing', 'December', 'struggle', 'honor', 'turning', 'M.', '9', 'greatly', 'powerful', 'reduce', 'cutting', 'billion', 'highest', 'kill', 'towards', 'assume', 'dramatic', 'achieved', 'north', 'benefit', 'resolution', '11', 'fellow', 'drove', 'Wilson', 'sufficient', 'affairs', 'pictures', 'portion', 'agencies', 'wind', 'narrow', 'friendly', 'Greek', 'properties', 'Boston', 'weapons', 'procedures', 'homes', 'post', 'department', '50', 'liberal', 'U.', 'Are', 'master', 'escape', 'prices', 'soft', 'starting', 'European', 'showing', 'conference', 'Sir', 'teaching', 'R.', 'page', 'speech', 'rising', 'column', 'stared', 'learning', 'machinery', 'refused', 'commercial', "he's", 'sets', 'everybody', 'experiment', 'goal', 'extreme', 'onto', 'vast', 'experiments', 'location', 'traffic', 'contained', 'careful', 'closer', 'feelings', 'Finally', 'pleasure', 'join', 'nose', 'dream', 'save', 'Who', 'Friday', 'fourth', "you'll", 'maintain', 'domestic', 'existing', 'sensitive', 'Under', 'F.', 'Bible', 'exchange', 'Sometimes', 'exist', 'score', 'task', 'shoulder', 'conclusion', 'heavily', 'finds', 'eat', 'ended', 'contract', 'struck', 'headed', 'equally', 'organizations', 'Negroes', 'south', 'tomorrow', 'Central', 'ideal', 'novel', 'rifle', 'maintenance', 'ultimate', 'setting', 'concerning', 'formula', 'technique', 'cool', "You're", 'India', 'tests', 'emphasis', 'circle', 'understood', 'metal', 'exercise', 'useful', 'safe', 'sexual', 'busy', 'lie', 'smile', 'stories', 'wondered', 'District', 'Nations', 'possibly', 'neighborhood', 'liked', 'command', 'parties', 'establish', 'Tuesday', 'slow', 'Roman', 'judgment', 'duty', 'occasion', 'Dallas', 'uses', 'courses', 'lose', 'etc.', 'Administration', 'talked', 'army', 'roof', 'plants', 'appeal', 'alive', 'streets', 'shook', 'details', 'fairly', 'Three', 'enjoyed', 'goods', 'associated', 'taste', 'songs', 'aside', 'guests', 'pale', 'appearance', 'drop', 'combination', 'somehow', 'apparent', 'Lewis', 'Fig.', 'keeping', 'fat', 'continuing', 'unique', 'contact', 'Section', 'Every', 'processes', 'requires', 'Senate', 'baby', 'painting', 'Congo', 'charged', 'permitted', 'Great', 'budget', 'causes', 'majority', 'achievement', 'chemical', 'electric', 'Man', 'solution', 'song', 'September', 'shop', 'loved', 'becoming', 'Chinese', 'truly', 'tree', 'signal', 'entrance', "There's", 'culture', 'sky', 'wage', 'avoid', 'tension', 'headquarters', 'bought', 'academic', 'apply', 'represented', 'repeated', '14', 'entitled', 'providing', 'Palmer', 'officials', 'organized', 'spite', 'advance', 'thirty', 'surprised', 'intensity', 'minds', 'competition', 'informed', 'Among', 'evident', 'spot', 'demands', 'begins', 'device', 'extended', 'Your', 'yes', 'scale', 'artist', 'regarded', 'afraid', 'Joseph', 'win', 'Britain', 'critical', 'replied', 'soldiers', 'truck', 'theme', 'perfect', "she'd", 'previously', 'credit', 'hat', 'mine', 'runs', 'Like', 'firms', 'wished', 'Martin', '18', 'Joe', 'kid', 'travel', 'identity', 'vision', 'joined', 'confidence', 'conducted', 'notice', 'sought', 'item', 'naturally', 'hole', 'wheel', 'rock', '16', 'components', 'dictionary', 'text', 'double', 'vital', 'rooms', 'unable', 'notes', 'pure', 'supported', 'improved', 'L.', 'bank', 'symbol', 'positions', 'Republican', 'attend', 'motion', 'Smith', 'roads', 'victory', 'seat', 'trained', 'worry', 'grounds', 'sample', 'Jews', 'divided', 'minute', 'games', 'prove', 'negative', 'conduct', 'bear', 'Nothing', 'flowers', 'suggest', 'absence', 'imagine', 'create', 'January', 'Japanese', 'properly', 'spend', 'orders', 'sin', 'inner', 'granted', 'assigned', 'leg', 'draw', 'depth', 'wonderful', "there's", 'percent', 'largest', 'disease', 'generation', 'description', 'motor', 'chest', 'experienced', 'experiences', 'windows', 'tall', 'beneath', 'loose', 'risk', 'huge', 'pushed', 'p.m.', 'No.', 'Hudson', 'guess', 'Alfred', 'bedroom', 'yellow', 'U.N.', 'decisions', 'baseball', 'phone', 'soil', 'Indian', 'content', 'sounds', 'flesh', 'establishment', 'element', 'widely', 'vehicles', 'title', 'forget', 'acting', 'pick', 'dozen', 'payment', 'wagon', 'troops', 'introduced', 'grass', 'machines', 'bitter', 'crowd', 'August', 'centers', 'driving', 'developing', 'Los', 'plenty', 'laughed', 'waves', 'wild', 'conflict', 'nodded', 'situations', 'David', 'achieve', 'Protestant', 'snow', 'thoughts', 'handle', 'Still', 'necessarily', 'Arthur', 'animals', 'wet', 'stages', 'approval', 'slight', 'Table', 'putting', 'angle', 'characteristics', 'begun', 'agree', 'governments', 'views', 'raise', 'telling', 'correct', 'abroad', 'stream', 'shoulders', 'breakfast', 'October', 'wood', 'advanced', 'mark', 'Philadelphia', 'conversation', 'uniform', 'conviction', 'Have', 'extremely', 'Latin', 'housing', 'breath', 'expense', 'interpretation', 'Eisenhower', 'practices', 'elections', 'remove', 'easier', 'D', 'Did', 'detailed', 'mention', 'brilliant', 'extra', 'Massachusetts', 'proud', 'devoted', 'noticed', 'Warren', 'pull', 'pair', 'rear', 'faced', 'surprise', 'issued', 'desired', 'walking', 'advice', 'taught', 'silent', 'papers', 'millions', 'fill', 'publication', 'About', 'sick', 'convinced', 'cultural', 'employed', 'consideration', 'firmly', 'colors', 'childhood', 'minor', 'request', 'trust', 'depends', 'incident', 'television', 'hopes', 'moon', 'conventional', 'operator', 'tendency', 'Therefore', '13', 'towns', 'administrative', 'wooden', 'tiny', 'west', 'Co.', 'maintained', 'measurements', 'finding', 'opportunities', 'measures', 'operate', 'park', 'acceptance', 'impressive', 'tired', 'suit', 'Hearst', 'attitudes', 'Charlie', 'moments', 'valley', 'agency', 'gold', 'driver', 'remarks', 'knowing', 'speaking', 'formal', 'leaves', 'cry', 'swung', 'creative', 'estate', 'lady', 'chain', 'destroy', '24', 'advertising', 'chairman', 'comparison', 'choose', 'worse', 'efficiency', 'quietly', 'rolled', 'passage', '1957', 'female', 'de', 'fifteen', 'poem', 'hoped', 'Moscow', 'limit', 'plain', 'shooting', 'liquid', 'detective', 'soul', 'internal', 'P', 'denied', 'patterns', 'Angeles', 'integration', 'payments', 'fundamental', 'testimony', 'version', 'village', 'mere', 'examples', 'concentration', 'Black', '21', 'engine', 'somewhere', 'deny', 'burning', 'library', 'Fort', 'tragedy', 'drinking', 'dear', 'unknown', 'vacation', '23', 'aspect', 'Lincoln', 'boats', 'skin', 'railroad', 'Cuba', 'garden', 'screen', 'bonds', 'stone', 'sold', 'apart', 'silence', 'personality', 'practically', 'Italian', 'periods', 'remarkable', 'expenses', 'lights', 'Q', 'welfare', 'creation', 'conscious', 'partly', 'rural', 'League', 'congregation', 'wearing', 'hero', 'decade', '1954', 'anger', 'resistance', 'threw', 'minister', 'numerous', 'review', 'Wright', 'nobody', 'movements', '60', 'Mantle', 'contrary', 'difficulties', 'equivalent', 'strike', 'association', 'Blue', 'Civil', 'percentage', 'missile', 'centuries', 'till', 'models', 'fears', 'recommended', 'seriously', 'Junior', 'identified', 'humor', 'blind', 'independence', 'Park', 'losses', 'birds', 'artery', 'pink', 'trend', 'fiction', 'stairs', 'wave', 'executive', 'Georgia', 'article', 'interior', 'precisely', 'functions', '22', 'concrete', 'Frank', 'engaged', 'provision', 'Africa', 'opposition', 'intended', 'welcome', 'Hall', 'courts', 'Watson', 'sports', 'somebody', 'contain', 'civil', 'ride', 'please', 'medium', 'cast', 'February', 'feels', 'depend', 'impression', 'target', 'containing', 'load', 'shut', 'shares', 'referred', 'sorry', 'promised', 'curve', 'governor', 'Alexander', 'Edward', 'preparation', 'honest', 'Society', 'encourage', 'Institute', 'twelve', 'diameter', 'Pennsylvania', 'luck', 'panels', 'curious', 'promise', 'adopted', 'safety', 'fought', 'meat', 'native', 'bills', 'precision', 'supplies', 'expansion', 'sheet', 'weakness', 'includes', 'See', 'Medical', 'Big', 'Lucy', 'amounts', 'milk', 'employment', 'intelligence', 'Hans', 'climbed', 'Asia', 'sum', 'arranged', 'mounted', 'expensive', 'thickness', 'suffering', 'cup', 'boards', 'lies', 'accomplished', 'path', 'cross', 'captain', 'relative', 'admitted', 'sale', 'instrument', 'offers', 'band', 'calling', 'taxes', 'thousands', 'mold', 'slaves', 'burden', 'opinions', 'considerably', 'qualities', 'pounds', 'elsewhere', 'charges', 'shoes', 'Manchester', 'answers', 'Instead', 'terrible', 'legislation', 'singing', 'odd', 'Five', 'agent', 'valuable', 'approached', 'Adam', 'dangerous', 'environment', 'confusion', 'lovely', 'resulting', 'comfort', 'Mama', 'Hotel', 'interference', 'fun', 'explanation', 'occur', 'recognition', 'drama', 'lawyer', 'expenditures', 'bond', 'grown', 'Little', 'reader', 'nearby', 'Motors', 'P.', 'coat', 'artists', 'violence', 'ice', 'saved', 'lifted', 'confused', 'insisted', 'rapid', 'contribute', 'Any', 'eventually', 'driven', 'separated', 'readily', 'collected', 'count', 'essentially', 'fired', "you've", 'consists', 'darkness', 'Him', 'Indeed', 'believes', 'gradually', 'purchase', 'warning', 'emotions', 'oxygen', 'investigation', 'riding', 'flight', 'Lake', 'warfare', 'revolution', 'Class', 'i.e.', 'Again', 'dollar', 'automobile', 'concerns', 'Figure', 'enjoy', 'guy', 'washing', 'recorded', 'Nevertheless', 'smooth', 'raw', 'suffered', 'exists', 'spirits', 'fund', 'origin', 'obtain', 'varied', 'bag', 'catch', 'crossed', "ain't", 'reactions', 'angry', 'shore', 'Without', 'tend', 'mile', 'excess', 'increasingly', 'reflected', 'dirt', 'east', 'wisdom', 'identification', 'Place', 'seeking', 'wages', 'snake', 'pilot', 'porch', 'cloth', 'Adams', 'particles', 'bound', 'appointed', 'La', 'liquor', 'brain', 'foods', 'heads', 'weapon', 'sufficiently', 'possibilities', 'touched', 'helping', 'hate', 'acres', 'pace', 'mail', 'G.', 'Anne', 'switch', 'arts', 'Linda', 'pocket', 'replaced', 'author', 'Time', 'distinct', 'core', 'Rachel', 'listed', 'editorial', 'continues', 'extensive', 'ease', 'involves', 'Carl', 'describe', 'ships', 'Defense', 'salary', 'varying', 'distinction', 'comment', 'residential', 'Captain', 'hundreds', 'Kate', 'dealing', 'insurance', "Let's", 'mental', 'errors', 'constantly', 'continuous', 'loan', "'em", 'wire', 'remaining', 'tour', 'favorite', 'Francisco', 'Henrietta', 'urban', 'match', 'developments', 'holds', 'distinguished', 'tissue', 'storage', 'teach', 'visited', 'Will', 'communities', 'volumes', 'dancing', 'Besides', 'salt', 'opposed', 'psychological', 'democratic', 'reduction', 'pleased', 'shift', 'brush', 'Division', 'spending', 'Office', '17', 'proposal', 'Come', 'bread', 'Illinois', 'manufacturers', 'limits', 'universal', 'neighbors', 'Commerce', 'pride', 'shouted', 'long-range', 'Last', 'stepped', 'shared', 'focus', 'atoms', 'liberty', 'ring', 'threat', 'removal', 'demanded', 'younger', 'guns', 'Walter', 'sell', 'prominent', 'smoke', 'finger', 'writes', 'virtually', 'Gross', 'talent', 'permanent', 'Later', 'societies', 'Bob', 'arc', 'thrown', 'Democrats', 'notion', 'burned', 'cooling', 'painted', 'composed', 'discover', 'indicates', 'marine', 'shear', 'throw', 'skill', 'steady', 'rough', 'Papa', 'prior', 'joy', 'throat', '40', 'forest', 'Atlantic', 'prison', 'marketing', 'Winston', 'rare', 'Uncle', 'Does', 'Orleans', 'display', 'seed', 'changing', 'existed', 'investment', 'missed', 'brings', 'fed', 'representative', 'everywhere', 'orchestra', 'happens', 'parallel', 'rode', 'afford', 'exception', 'provisions', 'rarely', 'anybody', 'objectives', "o'clock", 'downtown', 'transportation', 'award', 'allotment', 'represents', 'Good', 'revealed', 'attractive', 'surely', 'Communists', 'TV', 'express', 'derived', 'visual', 'automatic', 'inch', 'assured', 'regarding', 'trials', 'recall', 'Rayburn', 'anxiety', 'bars', 'offices', 'electrical', 'striking', 'Judge', 'Long', 'colleges', 'Attorney', 'Justice', 'necessity', 'muscle', 'languages', 'secretary', 'deeply', 'Life', 'participation', 'Lawrence', 'stranger', 'defined', 'funny', 'musicians', 'Mississippi', 'sake', 'legislative', 'glance', 'intense', 'Avenue', 'engineering', 'destroyed', 'considering', 'customers', 'Air', 'determination', 'communism', 'combined', 'Several', 'contributed', 'Out', 'comparable', 'controlled', 'lots', 'Education', 'dependent', 'reaching', 'gained', 'turns', 'Constitution', 'selection', 'agents', 'tables', 'voices', 'judge', 'represent', 'theater', 'transfer', "father's", 'holes', 'Actually', 'mostly', 'handed', 'contains', 'nineteenth', 'copy', 'studying', 'International', 'assumption', 'bringing', 'Today', 'sudden', 'primitive', 'Within', 'candidates', 'institution', 'Robinson', 'engineer', 'leaned', 'soldier', 'Japan', 'Pike', 'Thompson', 'external', 'Bay', 'mankind', '31', 'brothers', 'tool', 'Cousin', 'relationships', 'decide', 'Treasury', 'criticism', 'knees', 'Research', 'goals', 'Despite', 'eleven', 'thoroughly', 'guidance', 'flying', 'steel', 'scheduled', 'satisfactory', 'Governor', 'atomic', "What's", 'Was', 'Ohio', '200', "God's", "We'll", 'sharply', 'doctrine', 'corresponding', 'hill', 'hydrogen', 'Paul', 'fail', 'ages', 'anyway', 'strongly', 'listening', 'Roberts', 'chapter', 'illusion', 'sleeping', 'comfortable', 'enormous', 'anywhere', "they're", 'Would', 'authorized', 'forever', 'drawing', 'Chief', 'innocent', 'species', 'stronger', 'Much', 'conscience', 'serving', 'skills', 'distant', 'atom', 'stomach', 'deeper', 'admit', 'Holmes', 'organic', 'ears', 'pressures', 'briefly', 'track', 'iron', 'tremendous', 'improvement', 'newspapers', 'E', 'devices', 'handsome', 'finish', 'Roy', 'Public', 'civilization', 'tested', 'reply', 'destruction', 'magic', 'assure', 'Susan', 'constructed', 'swimming', 'male', 'approved', 'Center', 'handling', 'blocks', 'forgotten', 'regional', 'profession', 'hills', 'Senator', 'R', 'delivered', 'arrangements', 'belong', 'improve', 'resulted', 'noise', 'formation', 'furniture', 'thyroid', 'questionnaire', 'foam', 'edges', 'stick', 'chose', 'comedy', 'acts', 'fool', 'prime', 'restaurant', 'signed', 'Furthermore', 'Times', 'Journal', 'attempts', 'Jackson', 'mystery', 'accompanied', 'host', 'peoples', 'intention', 'cooperation', 'contribution', 'moves', 'crew', 'equipped', 'authorities', 'Spanish', 'specifically', 'effectively', 'managed', 'occupied', 'mixed', 'High', 'instant', 'supplied', 'completion', 'definite', 'observations', 'kinds', 'error', 'cleaning', 'feature', 'images', 'Peter', 'symbols', 'maturity', 'staining', 'random', 'seeds', "year's", 'desirable', 'edition', 'release', 'People', 'substantially', 'Jim', 'plot', 'variable', 'waters', 'Hardy', 'license', 'severe', 'attended', 'districts', 'stored', 'beings', 'dressed', 'accounts', 'push', 'aim', 'Nor', 'coating', 'mood', 'schedule', 'spoken', 'Maris', 'hurt', 'characters', 'ratio', 'Chapter', 'affected', 'glad', 'guide', 'serves', 'recreation', 'Stanley', 'Matsuo', 'Jane', 'Atlanta', 'bombs', 'Hospital', 'Republic', 'jumped', 'outstanding', 'wear', 'worried', 'over-all', 'sees', 'doors', 'calculated', 'grade', 'automatically', 'experts', 'Conference', 'personally', 'output', 'guard', 'outlook', 'Look', 'technology', 'Pope', 'probability', 'yield', 'sequence', 'installed', 'Pont', 'sympathetic', 'eggs', 'claimed', 'tough', 'classic', 'regions', 'waste', 'accurate', 'grant', 'producing', 'prepare', 'patients', 'satisfied', 'inevitably', 'pleasant', 'readers', 'consequences', 'concert', 'Clark', 'agricultural', 'conclusions', 'phenomenon', 'paint', 'tongue', 'visiting', 'dirty', "we're", 'sounded', "haven't", 'Wednesday', 'accuracy', 'mad', '1962', 'disappeared', 'symbolic', 'challenge', 'consumer', 'Part', 'western', 'visible', 'nationalism', 'binomial', 'scholarship', 'Hill', 'dignity', 'lying', 'tells', 'tools', 'washed', 'sacred', 'coast', 'definition', 'myth', 'owned', 'stores', 'affect', 'mistake', 'Harvard', 'presumably', 'patent', 'multiple', 'tonight', 'emotion', 'exposed', 'filling', 'Earth', 'estimate', 'frequent', 'urged', 'swept', 'height', 'reasonably', 'Lee', 'self', 'tied', 'performed', 'mountains', 'bus', 'owners', 'illustrated', 'sister', 'axis', 'sending', 'isolated', 'Sept.', 'Samuel', 'sad', 'visitors', 'candidate', 'camera', 'calm', 'sir', 'drunk', 'row', 'normally', 'blame', 'meets', 'worship', 'tends', 'latest', 'Johnson', 'clerk', 'Can', 'clouds', 'alternative', 'dispute', 'decades', "today's", 'guest', 'raising', 'thereby', 'lists', 'paintings', 'processing', 'vary', 'realistic', 'logical', 'Canada', 'Harry', 'Baker', 'extension', 'bigger', 'retired', 'Neither', 'openly', 'preserve', 'Italy', 'bent', 'protect', 'stretched', 'pencil', 'Four', 'smell', 'context', "mother's", 'reception', 'desegregation', 'mechanical', 'suggestion', 'genuine', 'sympathy', 'listen', 'guilt', 'discovery', 'Catholics', 'benefits', 'cash', 'findings', 'gate', 'tragic', 'worst', 'survive', 'Andy', 'badly', 'unlike', 'Prokofieff', 'Jess', 'blow', 'scheme', 'massive', 'involving', 'demonstrated', 'grave', 'vehicle', 'sentence', 'inevitable', 'examine', 'folk', 'fewer', 'determining', 'affair', 'cards', 'equation', 'Supreme', 'leads', 'fallen', 'Never', 'Be', 'wash', 'suitable', 'alert', 'parked', 'Early', 'hunting', 'advised', 'healthy', 'phrase', 'beach', 'bone', 'knee', 'neutral', 'route', 'attempted', 'chlorine', 'seldom', 'Thursday', 'arrangement', 'routine', 'succeeded', 'presents', 'sugar', 'quantity', 'emergency', 'missing', 'performances', 'Take', 'bomb', 'admission', 'owner', 'favorable', 'branches', 'dying', 'namely', 'wise', 'roles', 'yard', 'filed', 'jacket', 'Too', 'connected', 'experimental', 'rejected', 'Poland', 'damage', 'Player', 'crime', 'tears', 'slipped', 'strictly', 'accident', 'suffer', 'cooking', 'Mark', 'proof', 'hurry', 'courage', 'stems', 'occasional', 'weak', 'forty', 'returning', 'e.g.', 'adjustment', 'fruit', 'covers', 'crowded', 'Holy', 'financing', 'awareness', 'interview', '26', 'Second', 'kids', 'reporters', 'occasionally', 'efficient', 'presentation', 'magazine', 'household', 'collective', 'duties', 'partner', 'F', 'burst', 'Indians', 'violent', "world's", 'fly', 'promote', 'Lady', 'joint', 'Pat', 'lunch', 'chicken', 'Next', 'initiative', 'Harold', 'bride', '19', 'instructions', 'artistic', 'testing', 'covering', 'reflection', 'regardless', 'remote', 'gathered', 'effectiveness', 'circles', 'beer', 'conception', 'currently', 'grace', 'simultaneously', 'island', 'Square', "child's", 'ocean', 'Chandler', 'variation', 'crazy', 'concluded', 'remarked', 'Howard', 'gift', 'smiling', 'Scotty', 'Castro', 'Williams', 'elected', 'gesture', 'Gorton', 'inadequate', 'measurement', 'highway', 'receiving', '**zg', "nation's", 'shouting', 'locking', 'knocked', 'slavery', 'nevertheless', 'perfectly', 'corn', 'ending', 'ceiling', 'pages', 'poets', 'elaborate', 'emission', 'allowing', 'Dave', 'tossed', 'thermal', 'N', 'stuff', 'mature', 'fish', 'density', 'considerations', 'whereas', 'columns', 'discussions', 'poetic', 'Field', 'involve', 'armed', 'decline', 'returns', 'composer', 'ruled', 'dancer', 'Via', 'Had', 'saline', 'survey', "aren't", 'remark', 'counties', 'winning', 'Poet', 'Russians', 'barely', 'successfully', 'mud', 'Navy', 'so-called', 'fallout', 'senior', 'nights', 'excitement', 'respectively', 'quarter', 'frequencies', 'muscles', 'chamber', 'panel', 'strain', 'Kansas', 'long-term', 'printed', 'Communism', 'Pacific', 'gently', 'identical', 'spots', 'wedding', 'precise', 'Christianity', 'depending', '1952', 'naked', 'tape', 'skywave', 'invariably', 'facing', 'checked', 'parking', 'secondary', 'Prince', 'counter', 'Curt', 'enterprise', 'protected', 'transition', 'worker', 'temporary', 'reducing', 'excessive', 'widespread', 'Alex', 'Force', 'softly', 'piano', 'blanket', 'expert', 'historian', 'campus', 'eating', 'impressed', 'meal', 'farther', 'dates', 'conservative', 'heading', 'Sharpe', 'Franklin', 'occurrence', 'listened', 'Something', 'shock', 'confronted', 'tube', 'encountered', 'lumber', 'troubled', 'suspect', 'letting', 'selling', 'Mayor', 'Miriam', 'starts', 'Eddie', 'concentrated', 'absolute', 'ranging', 'romantic', 'Louisiana', 'crucial', 'complicated', 'sponsored', 'Dartmouth', 'reveal', 'matching', 'Over', 'Shakespeare', 'cuts', 'net', 'Van', 'stable', 'divine', 'network', 'whenever', 'Through', "You'll", 'Business', 'Rector', 'surprising', 'Johnny', 'Labor', 'skilled', 'abstract', 'Get', 'supper', 'substance', 'stem', 'scientists', 'instances', 'universities', 'survival', 'slave', 'competitive', 'falling', 'preceding', 'films', 'Professor', 'Woodruff', 'articles', 'fence', 'football', 'et', 'flux', 'extraordinary', 'virtue', 'superior', 'basement', 'markets', 'roll', 'movies', 'suspended', 'beef', 'mixture', 'consequence', 'Felix', 'lightly', 'Sen.', 'secure', 'grand', 'trips', 'sciences', 'transferred', 'trail', 'fishing', 'buying', 'Carleton', 'perform', 'Armed', 'Sherman', 'loans', 'entertainment', 'sewage', 'grinned', 'argued', 'utility', 'guilty', 'suggests', 'suburban', 'morality', 'Republicans', 'Brannon', 'hence', 'creating', 'shadow', 'replace', 'shots', 'According', 'Hence', 'furnish', 'Juniors', 'glasses', 'Assembly', 'era', 'recording', 'exciting', 'divorce', 'ethical', 'dancers', 'residence', 'modest', 'poured', 'delight', 'junior', 'signals', 'ear', 'dealers', 'constitute', 'comments', 'proportion', 'argue', 'fate', 'pressed', 'ignored', 'mainly', 'Bank', 'Others', 'O.', 'lesson', 'unconscious', 'movie', 'tasks', 'asleep', 'Industrial', 'barn', 'threatened', 'Ann', 'helpful', 'p', 'Morse', 'colored', 'extend', 'shorts', 'radical', 'cotton', 'Albert', 'purely', 'helps', 'strip', 'magnitude', 'Mickey', 'reminded', 'Brooklyn', 'evaluation', 'hoping', 'scenes', 'measuring', 'missiles', 'sang', 'consciousness', 'controls', 'bronchial', 'split', 'Until', 'deliberately', 'commonly', 'citizen', 'fabrics', 'colony', 'voting', 'mechanism', 'wound', 'locked', 'plastics', 'Harris', 'laugh', 'discuss', 'offering', 'anxious', 'calendar', "They're", 'happening', 'Hal', 'demonstrate', 'representing', 'meetings', 'Roosevelt', 'specified', 'ballet', 'Yankees', 'structures', 'extending', 'introduction', '1955', 'southern', 'examined', 'newly', 'sand', 'coal', 'surfaces', 'Oct.', 'debate', 'cure', 'Santa', 'encouraged', 'libraries', 'satisfaction', 'peculiar', 'consistent', "you'd", 'profit', 'tight', 'warmth', 'sovereign', 'plastic', 'jurisdiction', 'pot', 'verse', 'defeat', 'polynomial', 'quarters', 'electronics', 'fourteen', 'talents', 'root', 'committed', 'industries', 'drugs', 'sensitivity', 'closing', 'optimal', 'lighted', 'Suppose', 'branch', 'contributions', 'unions', 'African', 'absent', 'Whether', 'solutions', 'electron', 'propaganda', 'classical', 'dimensions', '1956', 'apparatus', 'gay', 'vigorous', 'bench', 'graduate', 'questioned', 'innocence', 'expanding', 'sovereignty', 'Irish', 'pound', 'Ramey', 'studio', "let's", 'carbon', 'fled', 'N.', 'false', 'silver', 'tons', 'cow', 'arise', 'losing', 'rendered', 'enthusiasm', 'marks', 'witness', 'encounter', 'bare', 'hated', 'paused', 'Nobody', 'Interior', 'shapes', 'Beach', 'clarity', 'sacrifice', 'laughing', 'minimal', 'powder', 'upstairs', 'refer', 'associations', 'finance', 'profound', 'Christmas', 'seconds', 'Fred', 'examination', 'exclusive', 'discipline', 'Legislature', 'shade', 'proposals', 'flew', 'responses', 'consisting', 'engineers', 'friendship', 'horizon', 'representatives', 'V', 'gentle', 'prefer', 'surplus', 'merit', 'tea', 'holder', 'Marshall', 'samples', 'Germans', 'sing', 'scope', 'formerly', 'T.', 'operated', 'Dan', 'delicate', 'shoot', 'prevented', 'municipal', 'recovery', 'subsequent', 'dining', 'a.m.', 'combat', 'pulmonary', 'observation', 'maintaining', 'voted', 'damn', 'Coast', 'gathering', 'Myra', 'utterly', 'requirement', 'fees', 'exact', 'professor', 'limitations', 'clinical', 'applying', 'Dean', 'advantages', 'commission', 'acquire', 'insist', 'Shayne', 'council', 'prestige', 'distributed', 'outer', 'Clayton', 'salvation', 'ecumenical', 'opera', 'Davis', 'regulations', 'passes', 'permits', 'imagined', 'absolutely', 'maid', 'scattered', 'rushed', 'Delaware', 'veteran', 'Newport', 'reputation', 'pistol', 'occurs', 'mirror', 'Jefferson', 'drying', 'ladies', 'mighty', 'territory', 'Greenwich', 'Revolution', 'Men', 'slept', 'mountain', 'Forces', 'marriages', 'golf', 'doctors', 'weekend', 'diplomatic', 'Cady', 'Julia', 'grain', 'upward', 'Hughes', 'thanks', 'Obviously', 'reserved', 'dull', 'pursuant', 'perception', 'fiber', 'permission', 'passion', 'restrictions', 'bullet', 'corps', 'pointing', 'released', 'lively', 'critics', 'butter', 'unhappy', 'Jew', 'dreams', 'delightful', 'velocity', 'Science', 'Certainly', 'Palfrey', 'handled', 'staring', 'humanity', 'submarine', 'frightened', 'appreciate', 'compare', 'altogether', 'preferred', 'storm', 'paying', 'frames', 'Almost', 'tangent', 'Johnnie', 'Pathology', 'customer', 'victim', 'phenomena', 'reform', 'Bureau', 'Virgin', 'Young', 'overcome', 'invited', 'reserve', "Kennedy's", 'journey', 'mysterious', 'Mitchell', '1953', 'perspective', 'presently', 'cellar', 'identify', 'belt', 'builder', 'marginal', 'eternal', 'Colonel', 'anti-trust', 'inherent', 'reorganization', 'ranch', 'Middle', 'acquired', 'instruction', '27', 'gyro', 'injury', "She's", 'fellowship', 'justify', 'sophisticated', 'appointment', 'recalled', 'concepts', 'Greg', 'placing', 'Billy', 'disturbed', 'meals', 'legend', 'farmers', 'outcome', 'magnificent', 'Coolidge', 'climate', 'Eileen', 'stockholders', 'suspicion', "we'll", 'Patchen', 'vice', 'approaching', 'eager', 'gross', 'Arlene', 'disaster', 'funeral', 'enemies', "We're", 'royal', 'Forest', 'shopping', 'Zen', 'ours', 'Make', 'revenues', 'ill', 'attorney', 'variables', 'conceived', 'emerged', 'electricity', 'stained', 'aesthetic', 'shirt', 'quoted', 'threatening', 'cents', 'desperate', 'lacking', 'Helva', 'textile', 'Sarah', 'Agreement', 'frozen', 'literally', 'Ekstrohm', 'cried', 'card', 'judgments', 'establishing', 'trembling', 'controversy', 'Southerners', 'dried', 'publicly', 'players', 'sheets', 'stars', 'O', 'abandoned', 'painful', 'Probably', 'intelligent', 'volunteers', 'aboard', 'magazines', 'attacked', 'Eugene', 'Bang-Jensen', 'feeding', 'peaceful', 'continually', 'flexible', 'wars', 'SBA', 'scholars', 'Pittsburgh', 'exhibit', 'medicine', 'torn', 'traders', 'productive', 'exceptions', 'directions', 'rational', 'promptly', 'inspired', 'proceeded', 'golden', 'reflect', 'responsibilities', 'Speaker', 'Whatever', 'assist', 'observe', 'switches', 'Valley', 'gentleman', 'disk', 'Village', 'mothers', 'cloud', 'D.C.', 'generous', 'breaking', 'roughly', 'shelters', 'surrounding', 'eliminate', 'reaches', 'composition', 'destructive', 'subtle', 'Mother', "O'Banion", 'ritual', 'dawn', 'attached', 'sessions', 'landing', 'glanced', 'builders', 'oral', 'prevention', 'gear', 'Nick', 'laboratory', 'allowances', 'besides', 'entry', 'temperatures', 'Art', 'stretch', 'Use', 'intervals', 'communications', 'transformed', 'publicity', 'settlement', 'lonely', 'grateful', 'Son', 'Aug.', 'Carolina', 'demonstration', 'Nixon', 'bath', 'worthy', 'dilemma', 'component', 'Tim', 'attracted', '300', 'NATO', 'prospect', 'supporting', 'partially', 'bearing', 'suits', 'Wally', 'Tilghman', 'Houston', 'delayed', 'frontier', 'vivid', 'king', 'Owen', 'harm', 'hanging', 'approaches', 'vein', 'Director', 'bird', 'designs', 'charm', 'Alec', 'departments', 'repeat', 'fist', 'beard', 'Barton', 'shortly', 'instruments', 'mobile', 'accused', 'recommend', 'Title', '70', 'adult', 'aimed', 'parade', 'sitter', 'drill', 'contracts', 'commander', 'Broadway', 'expressing', 'Social', 'assembled', 'cycle', 'wishes', 'seized', 'Sure', 'pupils', 'specimen', 'Development', 'insure', 'rice', 'wines', 'exclusively', 'Economic', 'accomplish', 'continuity', 'woods', 'magnetic', 'Blanche', 'accurately', 'operational', 'nearest', 'announcement', 'retirement', 'Portland', 'Mexican', 'yours', 'faint', 'noon', 'tubes', 'nervous', 'probable', 'impressions', 'drivers', 'scarcely', 'absorbed', 'entering', 'fog', 'constitutional', "they'd", 'theological', 'arrive', 'speaker', 'march', 'rigid', 'wildly', 'hang', 'automobiles', 'viewed', 'tail', 'wings', 'interpreted', 'applications', 'firing', 'recommendation', 'prayer', 'cook', 'managers', 'chances', 'Independence', 'Mason', 'racial', 'promotion', 'functional', 'Further', 'Northern', 'access', 'peas', 'promising', 'backed', 'allies', 'critic', 'wherever', 'conductor', 'Small', 'detergent', 'realization', 'honored', 'wholly', 'nowhere', 'vague', 'Six', 'Puerto', 'Theresa', 'cited', 'Moore', 'cocktail', 'neighboring', 'Post', 'planes', 'hired', 'qualified', 'desires', 'contacts', 'correspondence', 'tended', 'phases', 'diffusion', 'Very', 'actor', 'consisted', 'Premier', 'contest', 'subjected', 'prospects', 'enthusiastic', 'chin', 'excuse', 'freight', 'dealer', 'categories', 'full-time', 'philosophical', 'tribute', "You've", 'Ford', 'slid', 'Jersey', 'Arts', 'heating', 'lawyers', 'Everything', 'dive', 'habit', 'lengths', 'cigarette', 'channels', 'cleared', 'refrigerator', 'registration', 'mathematical', 'unexpected', 'heaven', 'enable', 'factory', 'Casey', 'Hollywood', "Brown's", 'widow', 'hungry', 'beliefs', 'meaningful', 'discrimination', 'continuously', 'civilian', 'bore', 'Cook', 'furnished', 'imitation', 'twenty-five', 'sink', 'realism', 'exposure', 'dedicated', 'Jean', 'musician', 'empirical', 'Tennessee', 'compete', 'Suddenly', 'thinks', 'lands', 'arrival', 'Spencer', 'Quiney', 'norms', 'fortune', '29', "He'd", 'occupation', 'harder', 'jump', 'luncheon', 'relieved', 'Westminster', "President's", 'Soon', 'rocks', 'radar', 'weekly', 'suggestions', 'Onset', 'belly', 'genius', 'excited', 'anti-Semitism', 'pond', 'plug', 'Fogg', 'outdoor', 'incredible', 'coverage', 'grades', 'treat', 'generations', 'astronomy', 'controlling', 'killing', 'snakes', 'colleagues', 'regime', 'Presidential', 'Tell', 'fibers', 'historic', 'wounded', 'terror', 'falls', 'charming', 'noble', 'Gen.', 'northern', 'narrative', 'stressed', 'depression', 'bases', 'trace', 'hurried', 'saddle', 'Madison', 'barrel', 'part-time', 'midnight', 'giant', 'manufacturer', 'curiosity', 'System', 'stupid', 'leather', 'Foreign', 'Pete', 'folklore', 'justified', 'code', 'pack', 'assistant', 'stuck', 'league', 'whispered', 'Maggie', 'Symphony', 'explicit', 'lock', 'Ruth', 'settle', 'category', 'Daily', 'assumptions', 'estimates', 'wake', 'exercises', 'Douglas', 'adjusted', 'employee', 'authors', 'Press', 'cheap', 'substrate', 'Father', 'attempting', 'governmental', 'resumed', 'Testament', 'zero', 'trading', 'protest', 'Palace', 'Maryland', 'Should', 'reflects', 'degrees', 'searching', 'uneasy', 'novels', 'plaster', 'Law', 'overseas', 'cat', 'Anglo-Saxon', 'formulas', 'voluntary', 'appreciation', 'targets', 'casual', 'foams', 'dressing', 'drinks', 'chairs', 'reliable', 'uncertain', 'altered', 'troubles', 'mess', 'sharing', 'manufacturing', 'rank', 'adding', 'destiny', 'bridges', 'Spirit', 'adults', 'tractor', 'servants', 'Green', 'honey', 'bombers', 'Vienna', 'Victor', 'economical', 'worn', 'freely', 'exploration', 'deck', '0', 'amateur', 'Sandburg', 'thrust', 'implications', 'sighed', 'sticks', 'pressing', 'sentiment', 'souls', 'herd', "she's", 'hide', 'ward', 'successes', 'metropolitan', 'whisky', 'aids', 'Music', 'Article', 'laughter', 'carries', 'unfortunate', 'grows', 'melting', 'joke', 'conspiracy', 'valid', 'urethane', 'relevant', 'biggest', 'sphere', 'fault', 'convention', 'ambiguous', 'attending', 'traveled', 'tie', 'trucks', 'occasions', 'preparing', 'participate', 'Don', 'Affairs', 'surrender', 'registered', 'dealt', 'convenient', 'Student', 'expects', 'Bobbie', 'autumn', 'lift', 'displacement', 'frequency', 'bother', 'retained', 'possessed', "shouldn't", 'totally', 'resist', 'tip', 'obligations', 'plates', "we'd", 'posts', 'happiness', 'Rev.', 'steadily', 'Meanwhile', 'alienation', 'triumph', 'middle-class', 'respects', 'structural', 'assessment', 'democracy', 'anticipated', 'frank', 'rifles', 'earliest', 'Baltimore', 'pulling', 'Ralph', 'Deegan', 'select', 'belongs', 'spare', 'planets', 'capabilities', 'loaded', 'panic', 'Royal', 'yelled', 'eliminated', 'S', 'insects', 'suited', 'temple', 'intentions', 'ownership', 'describes', 'gang', 'complained', 'Miller', 'insight', 'fifth', 'Miami', 'engagement', 'sampling', 'evidently', 'Alaska', 'Elaine', 'warned', "America's", 'regularly', 'Century', 'oxidation', 'factories', "B'dikkat", 'boating', 'ideological', 'strategy', 'killer', 'Russ', 'hen', 'Marshal', 'Utopia', 'defend', 'investigations', 'tale', 'wheels', 'milligrams', 'charter', 'Johnston', 'Program', 'availability', 'certainty', 'Vermont', 'accepting', 'questioning', 'desperately', 'H', 'Home', 'behalf', 'Hoag', 'drawings', 'craft', 'urgent', 'concerts', 'backward', 'spectacular', 'pause', 'Wait', 'painter', 'shining', 'Morris', 'originally', 'Above', 'planetary', 'Consider', 'delay', 'luxury', 'rang', 'fluid', 'fortunate', 'shame', 'envelope', 'preliminary', 'nuts', 'Faulkner', 'particle', 'possession', 'neat', 'Joyce', 'protein', 'occurring', 'Apparently', 'hesitated', "John's", "weren't", 'Detroit', 'Which', 'balanced', 'linear', '1948', 'Being', 'characterized', 'Cromwell', 'miss', "children's", 'dances', 'classification', 'moderate', 'Steele', 'pertinent', 'Police', 'thank', 'persuaded', 'Shu', 'Lo', 'chiefly', 'cope', 'farmer', 'Ben', 'Taylor', 'Book', 'forming', 'Everyone', 'Warwick', 'teams', 'stern', "patient's", 'crack', 'Di', 'Queen', 'doubtful', 'rent', 'definitely', 'Nation', 'stiff', 'Wall', 'moreover', 'planet', 'assembly', 'reveals', 'Bobby', 'Reverend', 'recommendations', 'skirt', 'profits', 'mode', 'harmony', 'saving', 'addresses', 'statistics', 'smart', 'Go', 'Rock', 'stumbled', 'Capitol', 'Keith', 'rhythm', 'unlikely', '28', 'Podger', 'passengers', 'seated', 'Rico', 'Aristotle', "We've", 'computed', 'garage', 'inclined', 'lowered', 'heels', 'keeps', 'Could', 'corporations', 'pile', 'thorough', 'submitted', 'Mills', 'dishes', 'mutual', 'Penny', 'Kay', 'races', 'hits', 'obliged', 'substitute', 'bid', 'Completion', 'mate', 'arbitrary', 'Eastern', 'Party', 'Report', 'relatives', 'respond', 'despair', 'Note', 'governing', 'intimate', 'regiment', 'liberals', '34', 'meanings', 'racing', 'launched', 'mines', 'hearts', 'Philip', 'roots', 'Nov.', 'complement', 'hypothalamic', 'dynamic', 'Missouri', 'suspected', 'cabin', 'Hell', 'dare', 'Francis', 'Bridget', '1950', 'traditions', 'Michigan', 'loyalty', 'seventeen', 'Katanga', 'respectable', 'habits', 'Belgians', 'masses', 'crises', 'educated', 'Having', 'soap', 'ugly', 'displays', 'Maude', 'enjoyment', 'prospective', 'Vernon', 'conferences', 'Wagner', 'fans', 'Poor', 'replacement', 'Plato', 'struggling', 'enforced', 'anticipation', 'Allen', 'trends', 'Orchestra', 'intervention', 'motives', 'listeners', 'landscape', 'guided', 'orderly', 'lean', 'Major', 'distinctive', 'cream', 'crossing', 'temporarily', 'binding', 'ranks', "hasn't", 'Family', 'invention', 'figured', 'conditioned', '/', 'refund', 'indirect', 'Skyros', 'physiological', 'conversion', 'worries', 'cheek', 'flash', 'devil', 'grants', 'El', 'punishment', 'Opera', 'Corporation', 'forgive', 'breathing', 'converted', 'speeches', 'dedication', 'repair', 'Laura', 'guys', 'revolutionary', 'Spring', 'illness', 'fraction', 'monthly', 'banks', 'warrant', 'inventory', 'leaped', 'puts', '1949', 'merger', 'Freddy', 'Alabama', 'Pip', 'Upon', 'disposal', 'nerves', 'confirmed', 'probabilities', 'decent', 'votes', 'rehabilitation', 'pursue', 'lucky', 'Pa', 'Oedipus', '1946', 'package', 'patience', 'quarrel', 'impulse', 'crop', 'displayed', 'sidewalk', 'consequently', 'grains', 'expanded', 'grip', 'creatures', 'newer', 'melody', 'savage', 'encouraging', 'Often', 'wealth', 'relating', 'Michelangelo', 'shadows', 'negotiations', 'passages', 'pipe', 'patrol', 'alike', "Christ's", 'Dolores', 'input', 'attain', 'Heaven', 'solve', 'amazing', 'colorful', 'Notte', 'promises', 'similarly', 'sauce', 'sweat', 'clock', 'procurement', 'residents', 'queen', 'solely', 'lungs', 'Jan.', 'Fund', 'Volume', 'Swift', 'wit', 'gentlemen', 'naval', 'brushed', 'bowl', 'clothing', "company's", 'wondering', 'B.C.', 'Florida', 'brave', 'tent', 'Arnold', 'emphasize', 'crawled', 'greeted', 'Point', 'auto', 'sixth', 'grabbed', 'injured', 'fitted', 'transformation', 'Outside', 'heroic', 'vacuum', 'exhibition', 'urge', 'acceptable', 'motel', 'criminal', 'attacks', 'commodities', 'define', 'Feb.', 'Cathy', 'beam', 'Manhattan', 'Donald', 'Borden', 'crash', 'receives', 'Inc.', 'strongest', 'screw', 'pitch', 'pitcher', 'endless', 'suitcase', 'secrets', '1951', 'competent', 'blonde', 'Abel', 'diet', 'shaking', 'tire', 'taxpayers', 'bold', 'manage', 'helpless', 'fascinating', '75', 'accordance', 'responded', 'precious', 'peered', 'adjustments', "city's", 'suite', 'minority', 'slide', 'purchased', 'authentic', 'inspection', 'convictions', 'smelled', 'plate', 'surrounded', 'numerical', 'murderer', 'ultimately', 'cottage', 'pupil', 'whip', 'assessors', 'boots', 'theatre', 'trap', 'sailing', 'lighting', 'toes', 'Land', 'verbal', 'distinguish', 'addressed', 'bet', 'allows', 'dominated', 'rises', 'guards', 'lobby', 'Paula', 'demanding', 'explains', 'decisive', 'witnesses', 'affects', 'notable', 'register', 'retail', 'Seven', 'reporter', 'depths', 'stroke', 'bones', 'hotels', 'remainder', 'cholesterol', 'foil', 'bundle', 'respective', 'ignore', 'fractions', 'Killpath', 'Letch', 'Road', 'arrest', 'overwhelming', 'hatred', 'scared', 'Lane', 'motive', 'applies', 'happily', 'Shore', 'lacked', 'submarines', 'fever', 'reactivity', 'salesmen', 'curves', 'tactics', 'jet', 'theirs', 'devotion', 'rid', 'Throughout', 'vector', 'startled', 'Third', 'vertex', 'united', 'Allied', 'consistently', 'Doc', 'gardens', 'broader', 'bend', 'midst', 'Means', 'nude', 'mercy', 'winds', 'bullets', 'rush', "state's", 'optical', 'venture', 'packed', 'petitioner', 'Salem', 'companion', 'Rather', 'tsunami', 'startling', 'lid', 'hidden', 'tales', 'males', 'swing', 'glued', 'supervision', 'earnings', 'insistence', 'Gun', 'clubs', 'Vincent', 'dim', 'fatal', 'essay', 'ruling', 'dated', 'Form', 'loves', 'solved', 'hypothalamus', 'stake', 'intermediate', 'lesser', 'produces', 'canvas', 'compromise', 'lb.', 'Haney', 'therapist', 'whereby', '1927', 'ladder', 'discharge', 'prisoners', 'label', 'Memorial', 'ambition', 'Mobile', 'foundation', 'civic', 'timber', 'cancer', 'poverty', 'assurance', 'Accordingly', 'slope', 'drank', 'dreamed', 'combinations', 'architect', 'condemned', 'strategic', 'spell', 'Rourke', 'slip', 'revenue', 'imposed', 'specialists', 'bay', 'exceed', 'realm', 'stresses', 'Mars', 'politicians', 'neatly', 'tangible', 'preserved', 'savings', 'variations', 'victims', 'Calif.', 'Unfortunately', 'Gov.', 'Foundation', "State's", 'tetrachloride', 'individually', 'assert', 'awake', 'Frederick', 'nest', 'Mexico', 'sketches', 'Pamela', 'mechanics', 'Chairman', 'Wisconsin', 'harbor', 'sheep', 'achievements', 'rolling', 'wiped', 'avoided', 'Stadium', 'Cuban', 'likes', 'loud', 'twisted', 'Rousseau', 'improvements', 'patents', 'aroused', 'indication', 'filing', 'enforcement', 'sera', 'champion', 'arrested', 'immortality', 'physics', 'attract', 'fantastic', 'rubbed', 'pronounced', 'policeman', 'representation', 'partnership', 'brass', 'adolescence', 'dairy', 'observers', 'Beyond', 'monument', '1945', 'religions', 'parlor', 'Modern', 'ratios', 'Sergeant', 'ceremony', 'officially', 'Said', 'effluent', 'servant', 'marry', 'privilege', 'humble', 'nonspecific', 'Saxon', 'autonomy', 'roared', 'proceedings', 'comprehensive', 'Academy', 'mechanisms', 'founded', 'Harbor', 'Dick', 'pioneer', 'resting', 'drops', 'stolen', 'earned', 'commerce', 'aged', 'congregations', 'relax', 'predicted', 'deadly', 'shipping', 'sons', 'ranged', 'applicable', 'specialized', 'Kowalski', 'terribly', 'Room', 'jungle', 'systematic', 'escaped', 'drift', 'physically', 'earnest', 'subjective', 'iodine', 'Stephen', 'Remember', 'instantly', 'hospitals', 'flower', 'gallery', 'bathroom', 'investigated', 'businesses', 'marble', 'attributed', 'consistency', '50%', 'sung', "doctor's", 'cooperative', 'manufacture', 'objection', 'favored', 'corporate', 'tooth', 'disposed', 'kicked', 'livestock', 'crops', 'interrupted', 'Griffith', 'wildlife', 'ridiculous', 'deliver', '400', 'designer', 'slender', 'buried', 'phrases', "Can't", 'supreme', 'splendid', 'snapped', 'Islands', 'emphasized', 'submit', 'styles', 'directors', 'plainly', 'folks', 'informal', 'repeatedly', 'brick', '35', 'defended', 'gather', 'exceptional', 'classroom', 'supplement', 'gains', 'makers', 'priority', 'coach', 'Oxford', 'Yankee', 'tensions', 'overhead', 'lamp', 'Upton', 'judges', 'Helion', 'lap', 'Austin', 'abrupt', 'workshop', 'string', 'player', 'spectacle', 'epic', 'bargaining', 'underground', 'camps', 'Tokyo', 'strikes', 'acted', 'grams', 'sank', 'circuit', 'causing', 'Cobb', 'Columbia', 'bat', 'locations', 'assignments', 'tournament', 'naive', 'theology', 'bunk', 'interval', 'straightened', '80', 'tray', 'invitation', 'emerge', 'Barco', 'theories', 'brightness', 'proportions', 'talks', 'glory', 'Harvey', 'Religion', 'cosmic', 'voters', 'Stevie', 'puzzled', 'Arkansas', 'refers', 'shrugged', 'competence', 'eighteenth', 'fats', 'Langford', "men's", 'Grand', 'costumes', 'Kitti', 'mustard', 'Jazz', 'boss', 'Tommy', 'proceed', 'hostile', 'sixty', 'affection', 'basically', 'simpler', 'Meredith', 'mathematics', 'Week', 'speaks', 'Andrei', 'rugged', 'Ah', 'Few', 'lip', 'technological', 'Madden', 'aluminum', 'Organization', 'exercised', 'Keys', 'faster', 'consumption', 'compelled', 'daytime', 'succession', 'Southeast', 'differ', 'commented', 'blowing', 'birthday', 'sixteen', 'corners', 'announce', 'arose', 'History', 'movable', 'denial', 'Ernie', 'drug', 'shifted', 'situated', 'prize', 'theoretical', 'Stevens', 'toast', 'remarkably', 'faded', 'vicious', 'transport', 'neglected', 'captured', 'Lizzie', 'Banks', 'cooled', 'Fire', 'Rob', 'stocks', 'sensed', 'abruptly', 'convenience', 'ammunition', 'inquiry', 'praise', 'Golden', 'shorter', 'destroying', 'sums', 'K.', 'admired', 'shocked', 'seal', 'associate', 'Community', 'Cleveland', 'Hank', 'Pierre', 'artificial', 'brains', 'accelerometer', 'and/or', 'codes', 'motions', 'Giffen', 'interaction', 'basket', 'prevailing', 'Health', 'hunger', 'pays', 'collar', 'pushing', 'chaos', 'delivery', 'Keep', 'tanks', 'screaming', 'screamed', 'Lalaurie', 'substances', 'Fortunately', 'assign', 'Angie', 'Milton', 'holy', 'chart', 'designated', 'copies', 'sport', 'Geneva', 'Listen', 'flood', 'customs', 'separation', 'advances', 'circular', 'Baptist', 'hay', 'flowing', 'Seventh', 'squeezed', 'Usually', 'renewed', 'print', 'alternatives', 'Lao', 'departure', 'preaching', 'desert', 'corridor', 'interviews', 'declined', 'expecting', 'infinite', 'Ballet', 'Gulf', 'bears', 'formidable', 'reporting', 'crystal', 'checks', 'accompanying', 'entries', 'guitar', 'diplomacy', 'defensive', 'acceleration', 'cracked', 'Show', 'Far', 'cap', 'barriers', 'balls', 'flame', 'Horn', 'editors', 'scientist', 'intersection', 'Benson', 'murmured', 'acquisition', 'obscure', 'undertaken', 'compensation', 'undoubtedly', 'Lieutenant', 'rests', 'followers', 'violation', 'pastor', 'stretching', 'milling', 'Ma', 'reverse', 'shifts', 'present-day', 'loyal', 'enjoying', 'staying', 'supernatural', 'females', 'beloved', 'stall', 'Yeah', 'brand', 'indifference', 'transom', 'holiday', 'medieval', 'Otherwise', 'merchants', 'throwing', 'pockets', 'resentment', 'surprisingly', 'mistaken', 'hypothesis', 'campaigns', 'crown', 'wing', 'stripped', 'illustrate', 'progressive', 'molecular', 'historians', 'neighborhoods', 'Connecticut', 'chorus', 'uncle', 'lest', 'seasons', 'wagons', 'absurd', 'eighth', 'logic', 'aggressive', 'fan', 'visits', 'forgot', 'notions', 'implied', 'resolved', 'formulation', 'Security', 'dome', 'distances', 'counted', 'Raymond', 'Standard', 'cats', 'awful', 'anniversary', 'cops', 'consulted', 'Nadine', 'lit', "They'll", 'performing', 'capture', 'anonymous', 'Fulton', 'awards', 'quote', 'Dec.', 'persuade', 'god', 'Please', 'well-known', 'inquiries', 'gin', 'Special', 'sunlight', 'Frankie', 'rested', 'suicide', 'bureau', 'Hamilton', 'via', "woman's", 'friction', 'consist', 'v.', 'Carroll', 'intuition', 'jail', 'underlying', 'vaguely', 'Ada', 'Burma', 'sole', 'socialism', 'formally', 'vitality', 'popularity', 'BOD', 'awarded', 'explosive', 'belonged', 'Krim', 'Christians', 'Trevelyan', 'slowed', 'Love', 'loudly', 'Moreland', 'shaped', 'describing', 'Simms', 'kingdom', 'chip', 'bitterness', 'Anniston', 'erected', 'plantation', 'spontaneous', "husband's", 'TSH', 'useless', 'voyage', 'photograph', 'grasp', 'Cape', 'amendment', 'fuel', 'possess', 'palm', 'comparative', 'heroes', 'asks', 'candle', 'engines', 'preservation', 'muttered', 'damned', 'shops', 'injustice', 'import', 'Mission', 'Scott', 'flexibility', 'flag', 'instructed', 'Declaration', 'resume', 'bleeding', "boy's", 'Walker', 'Right', 'Anthony', 'Henri', 'mailed', 'youngsters', 'broadcast', 'ideals', '500', 'tracing', 'Mae', 'confession', 'confident', 'weary', "he'll", 'heated', 'asserted', 'secants', 'spreading', 'orange', 'reflecting', 'translated', 'detectives', 'keys', 'installations', 'Code', 'inquired', 'crouched', 'Free', 'Stein', 'allocation', 'I.', 'inserted', 'orientation', 'ticket', 'wanting', 'horror', 'qualifications', 'shell', "Hudson's", 'synthesis', 'priest', 'operand', 'contents', 'orbit', 'Willis', 'secular', 'fences', 'inability', 'Dickens', 'lessons', 'severely', 'pitching', 'overnight', 'matched', 'tap', 'wives', 'cylinder', 'gambling', 'Denver', 'satisfy', 'referring', 'delighted', 'chores', 'buffer', 'traveling', 'slammed', 'mistakes', 'uncertainty', 'Nelson', 'forehead', 'conformity', 'beautifully', 'adjust', 'earn', 'legislators', 'kiss', 'persistent', 'implies', 'unto', 'telephoned', 'collect', 'finest', 'trim', 'portrait', 'Me', 'fame', 'refuse', 'sketch', 'breed', 'Handley', 'legitimate', 'sites', 'trails', 'spotted', 'Payne', 'costly', 'rage', 'proposition', 'cockpit', 'selective', 'humorous', 'philosopher', 'improving', 'taxi', 'fee', 'Gray', 'influenced', 'raises', 'sins', 'museum', 'consent', 'draft', 'gown', 'hitting', 'Someone', '1910', 'graph', 'stride', 'financed', 'liver', 'gotten', 'partners', 'analytic', 'payroll', 'Hawaii', 'curriculum', 'Byron', 'outfit', "o'", 'Viet', 'aims', 'rows', 'disastrous', 'loop', 'Hammarskjold', 'lined', 'alien', 'clearing', 'bass', 'pen', 'statue', 'opens', 'mortgage', 'flavor', 'fires', 'tile', 'silently', 'metaphysical', 'confined', 'confirm', 'squad', 'thyroglobulin', 'bunks', 'abuse', 'camping', 'minimize', 'respiratory', 'odd-lot', 'angular', 'successor', 'Brooks', 'notably', 'discussing', 'Up', 'operators', 'bounced', 'Trial', 'farms', 'compounds', 'readings', 'closet', 'illustration', 'alarm', 'thee', 'span', 'rail', 'graduates', 'chemistry', 'documents', 'excluding', 'ambitious', 'apartments', 'pottery', 'devised', 'jar', 'lover', 'ivory', 'Following', 'translation', 'Stuart', 'shake', 'foolish', 'Mount', 'Occasionally', 'Clay', 'leveling', 'Asked', 'abandon', 'hers', 'messages', 'dangers', 'bulk', 'deserves', 'Styka', 'thereof', 'occupy', 'obligation', 'spun', 'cavalry', 'unnecessary', 'Claire', 'commit', 'diseases', 'stating', 'viewpoint', 'Gavin', 'Power', 'Way', 'None', 'Revenue', 'switched', 'imaginary', 'correlation', 'vertical', 'M', 'conclude', 'isolation', '42', 'fringe', 'twist', 'Greece', 'bitterly', 'Cranston', 'serum', 'expectations', 'piled', 'link', 'compatible', 'Already', 'employers', 'clover', 'spray', 'endurance', 'appearing', 'translate', 'whites', 'priests', 'conjunction', 'photographs', 'anyhow', 'bunch', 'weighed', 'Geometric', 'titles', 'cowboy', 'strengthen', 'sober', 'justification', 'dusty', 'claiming', 'Nicolas', 'requiring', 'hiding', 'Back', 'fitting', 'Hanford', 'downward', 'cherished', 'Museum', 'audiences', 'nurse', 'inherited', 'allowance', 'star', 'morale', 'ample', 'gonna', 'suburbs', 'wasted', 'adequately', 'Ray', 'circulation', 'fancy', 'Montgomery', '1/2', 'optimum', 'Military', 'Superior', 'ruined', 'disturbing', 'halfway', 'Rose', 'maintains', 'jaw', 'voltage', 'Fifth', 'opium', 'waved', 'infectious', 'geographical', 'quest', 'insights', 'glimpse', 'Similarly', 'goodness', 'indirectly', 'steam', 'thereafter', 'differently', 'reads', 'capita', 'Sec.', 'Gilborn', 'ate', 'Line', 'simplicity', 'commitments', 'elementary', 'Temple', 'lyrics', 'preceded', "what's", 'tore', 'deals', 'Sea', 'stove', 'landed', 'disappointment', 'sentimental', 'staged', 'Vice', 'contours', '1944', 'segregated', 'Havana', 'joining', 'self-help', 'clue', 'Continental', 'governed', 'blast', 'crying', 'attraction', 'guerrillas', 'Englishman', 'daylight', 'pursuit', 'Common', 'forbidden', 'divisions', 'thru', 'disappointed', 'Berger', 'client', 'tense', 'cracking', 'Marine', 'stature', 'tourist', 'Northwest', 'feasible', 'essence', 'farming', 'missions', 'Amy', 'specialist', 'seemingly', 'restricted', 'miracle', 'cease', 'Waddell', 'ambitions', 'Sloan', "women's", 'dragged', 'trick', 'practicing', 'verdict', 'purchases', '1940', 'anchor', 'Ryan', 'optimism', 'Thank', 'agreements', 'Stevenson', 'penetration', 'ninth', 'crude', 'sturdy', 'needle', 'indicating', 'trains', 'Juanita', 'tightly', 'ties', 'battens', 'dug', 'decrease', 'deliberate', 'novelist', 'Roger', 'stimulus', 'stirring', 'nut', 'Bridge', 'Ulyate', 'Dutch', 'dimension', 'glorious', 'vanished', 'Ten', 'substituted', 'commanded', 'unemployment', 'Naturally', 'Selden', 'tract', 'burn', 'Cambridge', 'glow', 'muscular', 'afterward', 'exhibits', 'pools', 'sustained', 'textiles', 'networks', 'differential', 'discouraged', 'intensive', 'whiskey', 'Local', 'intend', 'imports', 'stirred', 'rope', 'soils', "Department's", 'touching', 'palace', 'vs.', 'batting', 'reward', 'devote', 'virtues', 'parks', 'explosion', 'Powell', 'assault', 'kissed', 'catcher', 'meaningless', 'dropping', 'Al', 'battery', 'reasoning', 'Norman', 'memories', 'Jerry', 'excellence', 'realities', 'nineteenth-century', 'tentative', 'preventive', "Drug's", 'biological', 'boost', 'validity', 'gradual', 'transmission', '1912', 'fluids', 'plunged', 'constructive', 'shower', 'Steinberg', 'onion', 'ham', 'ashamed', 'bottles', 'potato', 'Morton', 'contributing', 'deserted', 'Elizabeth', 'Carla', 'kick', 'Dandy', "wife's", 'Oriental', 'ballistic', 'razor', 'wider', 'Aegean', 'dish', 'dragging', 'p.', 'printing', 'distinctions', 'defeated', 'productivity', 'sweep', 'shipments', "Pont's", 'oppose', 'Homeric', 'Stalin', 'apt', 'potatoes', 'prolonged', 'Katie', 'collage', 'sights', 'Determine', 'shortage', 'distress', 'heavier', 'comparatively', 'tones', 'bees', 'Laboratory', 'accustomed', 'domination', 'Buck', 'opponent', 'fabric', "Fromm's", 'alter', 'Eugenia', 'explanations', 'Dark', 'linked', 'senses', 'invasion', 'sheriff', 'Somehow', 'judged', 'coalition', 'texture', 'hire', 'onset', 'commonplace', "Hardy's", 'sponsor', 'guessed', 'Fromm', 'Congressman', 'misery', 'peak', 'denominations', 'refusal', 'railroads', 'considers', 'clever', 'heritage', 'alongside', 'exhausted', 'sooner', 'continuation', 'doorway', 'employes', 'Internal', 'Socialist', "country's", 'cared', 'cafe', 'Minister', 'Anything', 'Festival', 'pill', 'writings', 'lung', 'owed', 'operates', 'optimistic', 'Chamber', 'vegetables', 'packing', 'doubts', 'Burton', 'juvenile', 'tappet', 'flashed', 'remind', 'actors', 'reactionary', 'knock', 'competing', 'arranging', 'vessel', 'Blackman', 'dissolved', 'horrible', 'creature', 'parent', 'swinging', 'damp', 'originated', 'termed', 'Israel', 'cubic', 'react', 'silly', 'Biblical', 'proves', 'pin', 'petition', 'personalities', 'picnic', 'committees', 'Radio', 'Madame', "they'll", 'unpleasant', 'mentally', 'freezing', 'smashed', 'scored', 'lecture', 'qualify', 'helium', 'burns', 'drain', 'Lublin', 'physician', 'straw', 'arguments', 'arteries', 'Library', 'indispensable', 'sociology', 'socially', 'eighteen', "Russia's", 'literal', 'proportional', 'carved', 'wrapped', 'selections', 'nearer', 'eligible', 'driveway', 'bow', 'coupled', 'convicted', 'upset', 'survived', 'readiness', 'Marty', 'strips', '10,000', 'superiority', 'fails', 'cleaned', 'Between', 'centered', 'molding', 'loses', 'leap', 'threshold', 'deemed', 'fruits', 'producers', 'wax', 'fighters', 'dismissed', 'Diane', 'odor', 'province', 'drag', 'entertain', 'Model', 'highways', 'Emory', 'supports', 'succeed', 'appearances', 'careers', 'penetrating', 'lemon', 'valued', 'Fourth', 'commands', 'folded', 'one-third', 'bubbles', 'rescue', 'revolver', 'determines', 'Daniel', 'Speaking', 'width', 'Dominican', 'retreat', 'Churches', 'hats', 'expedition', 'rounded', 'oldest', 'flung', 'debut', 'celebrated', 'fury', 'attach', 'Eve', 'vulnerable', 'realtors', 'conceive', 'sweater', 'hastily', 'Swiss', 'influences', 'pit', 'Advisory', 'proceeds', 'boxes', "Plato's", 'combine', 'thoughtfully', 'seats', 'dislike', 'complaint', 'expressions', 'candy', 'ceremonies', 'mild', 'Command', 'Garryowen', 'scrutiny', 'connections', 'swiftly', 'reluctant', 'boundaries', 'Belgian', 'administered', 'achieving', 'heater', 'counsel', 'ignorance', 'policemen', 'Greville', 'coordination', 'Yalta', 'prescribed', 'stare', 'Amen', 'adventure', 'beaches', 'flights', 'sincere', 'capability', 'attic', 'Picasso', 'unfortunately', 'swore', 'generator', 'paths', "He'll", 'pie', 'Welch', 'criticized', 'AP', 'soup', 'feared', 'pine', 'pan', 'analyzed', 'Theodore', 'dared', 'threats', 'honors', 'python', 'usage', 'Behind', 'fix', 'delegates', 'woke', 'touches', 'preparations', 'intact', 'Nassau', 'bothered', 'cruel', 'lion', 'executed', 'short-term', 'Song', 'True', '15th', 'projected', 'border', 'pairs', 'Beverly', 'marching', 'climax', 'momentum', '45', 'Water', 'illuminated', 'backs', 'toll', 'fork', 'cups', 'Eichmann', 'upright', 'Kent', 'pursued', 'cop', 'Okay', 'husbands', 'conjugates', 'chill', 'sizable', 'picking', 'administrator', 'borrowed', 'organize', 'Angels', 'Jessica', 'pickup', 'vigor', 'grim', 'statistical', 'tobacco', 'Beowulf', 'Edythe', 'Aunt', 'spectra', 'Start', 'Kentucky', 'blank', 'relaxed', 'Harlem', 'Renaissance', 'favorably', 'amended', 'prone', 'clung', 'uncomfortable', 'daughters', 'arises', 'Confederate', 'deviation', 'contempt', 'treats', 'Joan', 'painfully', 'indications', 'Jay', 'shallow', 'rancher', 'Export-Import', 'protective', 'accommodate', 'Shall', "industry's", 'ruin', 'annually', 'scenery', 'reforms', 'purchasing', 'profitable', 'eliminating', 'solitary', 'nails', 'bathing', 'colonial', 'Additional', 'bell', 'nursing', 'clearer', 'seventh', 'tickets', "Jess's", 'coordinated', 'bark', 'immense', 'aqueous', 'paragraphs', 'disapproval', '$1', 'summary', 'uniforms', 'heap', 'caution', 'intent', 'singular', 'counts', "Khrushchev's", '1947', 'publications', 'Ages', 'proteins', 'drums', 'pro', 'speeds', 'appealing', 'resonance', 'engage', 'Garth', 'employer', 'sensation', 'armies', 'Recently', 'evenings', 'pains', 'sustain', 'spectrum', 'yarn', 'Representatives', 'emerging', 'decomposition', 'pianist', 'fig.', 'manners', 'producer', 'organs', 'superb', 'breeze', 'draws', 'amusing', 'encouragement', 'rupees', 'V.', 'Alice', 'Control', 'penalty', 'Bari', 'bored', 'horn', 'alliance', 'Iliad', 'Egypt', 'passenger', 'frightening', 'locate', 'Protestants', 'Theater', 'Columbus', 'approve', 'framed', 'habitat', 'diagonalizable', 'applause', 'remembering', 'trustees', 'structured', 'conditioning', 'symptoms', 'significantly', 'sensible', 'participating', 'foundations', 'gradient', 'feathers', 'answering', 'instinct', 'discretion', 'disclosed', 'revised', 'irrelevant', 'professors', 'failing', 'fastened', "town's", 'Check', 'judicial', 'polished', 'monopoly', 'loving', 'enterprises', 'shells', 'shoe', 'Hodges', 'celebration', 'Planning', 'merits', 'transit', 'odds', 'lawn', 'cannery', 'restored', 'hopeless', 'gum', 'Irenaeus', "people's", 'apprentice', "day's", 'affixed', 'oils', 'rivers', 'cafeteria', 'countless', 'dwelling', 'smallest', 'Utopian', 'mileage', 'cousin', 'beaten', 'Space', 'eagerly', 'Put', 'strings', 'files', 'Below', 'superintendent', 'Gallery', 'arrow', 'Commissioner', 'lever', 'restrained', 'preferably', 'Tory', 'collecting', 'imply', 'solids', 'insert', 'dean', 'supplying', 'Nam', 'references', 'fictional', 'Ed', 'permanently', 'modified', 'Greene', 'ordering', 'Light', 'classified', 'tumbled', 'dumb', 'singers', 'Malraux', 'abilities', 'fits', 'blade', 'familiarity', 'averaged', 'Empire', 'conducting', 'relieve', 'pint', 'Nazi', 'Rep.', 'invitations', '65', 'monk', "we've", 'explaining', 'adopt', 'prayers', 'disposition', 'fists', 'bull', 'clarify', 'investigators', 'lend', 'Naval', 'moonlight', 'Telegraph', 'cluster', 'associates', 'remedy', 'separately', 'flames', 'gaining', 'understandable', 'Using', 'brutality', 'monstrous', 'involvement', 'statewide', 'port', 'miserable', 'Bishop', 'swift', 'Assistant', 'nighttime', 'cone', 'congressional', 'dictatorship', 'Birmingham', 'gates', 'acid', 'Word', 'Turning', 'utter', 'lowest', 'prose', 'Children', 'monitoring', 'depreciation', 'Oklahoma', 'pepper', 'derive', 'Truman', 'romance', 'reviewed', 'dough', 'Minnesota', 'Packard', 'suspicious', 'colt', 'Rod', 'rental', 'athletic', 'casually', 'lasting', 'static', 'hormone', 'debt', 'Springs', 'plasma', 'par', 'owns', 'businessmen', 'rebellion', 'cows', 'Sox', 'imaginative', 'defending', 'hut', 'frieze', 'taxed', 'Comedie', 'Glendora', 'Helen', 'steep', 'implicit', 'fellows', 'appeals', 'impersonal', 'paramagnetic', 'sentences', 'Lucille', 'ideology', 'commitment', 'behave', 'statutory', 'behaved', 'Doctor', 'sinister', '800', 'pole', 'curled', 'Ireland', 'correctly', 'treasurer', 'ethics', 'economics', 'observer', 'Thayer', 'assets', 'Figures', 'handy', 'pending', 'barbecue', 'sells', 'waving', 'publishing', 'Garibaldi', 'accelerated', 'handful', 'walks', 'nuclei', 'manifold', 'streetcar', 'witnessed', 'gifted', 'Generally', 'Ambassador', 'roar', 'collapsed', 'stimulation', 'abundance', 'integral', 'curb', 'solar', 'McClellan', 'safely', '1900', 'Scotland', 'A.L.A.M.', 'stopping', 'breakdown', 'N.Y.', 'G', 'Copernicus', 'twentieth', 'Women', 'unaware', 'Ave.', 'litigation', 'maps', 'rely', 'sprang', 'Along', 'Somewhere', 'subsection', 'opposing', 'couples', 'omitted', 'Garden', 'satisfying', 'Piepsam', 'lantern', 'tries', 'carpet', 'undertake', 'mutually', 'abstraction', 'portable', 'leveled', 'liberalism', 'fond', 'Order', 'brooding', 'Jeep', 'assuming', 'proclaim', 'null', 'exerted', 'exaggerated', 'grin', 'beating', 'Building', 'spur', 'Sally', 'rounds', 'Arlen', 'programing', 'ethnic', 'Hart', 'tissues', 'evaluate', 'equilibrium', 'insane', 'rug', 'solidarity', 'typically', 'charcoal', 'Colorado', 'ton', 'crimes', 'treaty', 'restless', 'invented', 'forcing', 'Whenever', 'modernization', 'nursery', 'enters', 'autistic', 'outset', 'expand', 'biography', 'psychology', 'tips', 'convey', 'linguist', 'Dad', 'Death', 'champagne', 'Gardens', 'spokesman', 'beams', 'Democrat', 'Hand', 'rockets', 'Prairie', 'Night', 'doses', 'missionary', 'cruelty', 'protested', 'neighbor', 'Guard', 'Lester', 'Food', 'squares', 'fusion', 'defects', 'guaranteed', 'experimentation', 'tumor', 'enabling', 'Unless', 'radically', 'elderly', 'Forests', 'manpower', 'Members', 'Products', 'resemblance', 'perceive', 'rubber', 'conditioner', 'undue', 'sincerity', 'planners', '32', 'Yale', 'phosphor', 'repetition', 'belonging', 'Methodist', 'differed', 'acute', 'Somebody', 'begged', 'creates', 'stability', 'progressed', 'ace', 'Ludie', 'objections', 'lectures', 'casework', 'objected', 'dot', 'Hope', 'cake', 'scores', 'earnestly', 'underwater', 'Indiana', 'mix', 'Lou', 'customary', 'complexity', 'politician', 'toilet', 'invariant', 'GOP', 'unfair', 'placement', 'Argiento', 'sector', 'blockade', 'patch', 'screwed', 'posture', '$600', 'cereal', 'forgiveness', 'shattered', 'elegant', 'decay', "officer's", 'Athabascan', 'WTV', 'incomplete', 'matches', 'deepest', 'gloom', 'vigorously', 'appealed', 'afternoons', 'therapeutic', 'contradiction', 'selecting', 'Hino', 'sue', 'communicate', 'analogy', 'Kohnstamm-positive', 'Kohnstamm', 'scream', 'X', 'compulsivity', 'document', 'corruption', 'sealed', 'roleplaying', 'cheeks', 'visitor', 'participated', 'Lafayette', 'tempted', 'computer', 'thread', 'execution', 'tub', 'meanwhile', 'lagoon', 'Ill.', 'Mays', 'quit', 'vocal', 'embrace', 'propose', 'likewise', 'ultraviolet', 'praised', 'consumed', 'influential', 'Hills', 'drainage', 'compass', 'Oersted', 'detection', 'deputies', 'receiver', 'inhabitants', 'antenna', 'erect', "night's", 'Graham', 'graduated', 'promoting', 'bacterial', 'surviving', 'A.M.', 'infantry', 'remembers', 'standpoint', 'media', 'diverse', 'Open', 'Low', "Pip's", 'boundary', 'necessities', 'sheer', 'suburb', 'ego', 'adapted', 'Inquirer', 'slightest', 'absorb', "She'd", 'induced', 'suggesting', 'Anderson', 'Tribune', 'Artists', 'lieutenant', 'clergy', 'explore', 'world-wide', 'independently', 'Ask', 'Market', 'realizing', 'whisper', 'compounded', 'Richardson', 'demographic', 'Cut', 'presumed', 'youngest', 'Persians', 'tilted', 'Winchester', 'swallowed', 'unstructured', 'Anyone', 'tourists', 'eaten', 'pray', 'Chris', 'Charter', 'tank', 'spokesmen', 'coincide', 'trailers', "Foundation's", 'deduct', 'cooler', 'lately', 'bastards', 'Income', 'prayed', 'Richmond', 'amid', 'Sansom', 'affirm', 'aspirations', 'blocked', 'performers', 'Monsieur', 'lens', 'inning', 'tennis', 'ridge', 'prosperity', 'harsh', 'solemn', 'gaze', 'employ', 'Switzerland', 'Rockefeller', 'strokes', 'Station', 'Hunter', 'downstairs', 'nilpotent', 'stereotype', 'crashed', 'mare', 'adventures', 'intersections', 'anti-slavery', 'McFeeley', 'Andrus', 'manned', 'gossip', 'leaning', 'exclaimed', 'blew', 'suspension', 'sisters', 'combustion', 'dolls', 'decides', 'pro-Western', 'magical', 'fulfillment', '2:36', 'purse', 'unlocked', 'ironic', 'hymen', 'meadow', 'equality', 'partisan', 'enacted', 'Electric', 'perceived', 'Garry', 'trot', 'welcomed', 'processed', 'Purdew', 'cane', 'Recent', 'intellectuals', 'fearful', 'Churchill', 'Friends', 'expended', 'buddy', 'dental', 'limp', 'cm.', 'Meltzer', '$500', 'festival', 'detected', 'joints', 'Data', 'fountain', 'Bryan', 'controversial', 'two-thirds', 'weep', 'farewell', 'luminous', 'yielded', 'entertaining', "Women's", 'Federation', 'unwed', 'believing', 'interpretations', 'choices', 'islands', 'unlimited', 'secant', 'Country', 'restaurants', 'halted', 'challenging', 'cumulative', 'fantasy', 'whipped', 'Brassnose', 'insect', 'Fine', 'acknowledged', 'installation', 'egg', 'wrinkled', 'functioning', 'legislature', '1943', 'Max', 'staggered', 'durable', 'dialogue', 'Grant', 'carryover', 'chapters', 'incest', 'passions', 'enabled', 'fleet', 'hairs', 'Tri-State', 'sizes', 'honestly', 'pension', 'bubble', 'purity', 'vessels', 'discount', 'seeming', 'beds', 'Better', 'landlord', 'Gabriel', 'kicking', 'diversity', 'refrigeration', 'blues', 'parochial', 'initiated', 'Korea', 'leaf', 'porous', "Lord's", 'Drive', 'virus', 'Cappy', 'overall', 'incentive', 'defenses', 'stones', 'inventories', 'Delphine', 'Andrew', 'extends', 'Farm', 'Southwest', 'Feathertop', 'stereo', 'pleading', 'pretending', 'Wayne', 'heights', 'liquidation', 'temptation', 'Try', 'cigarettes', 'Hetman', 'louder', 'deficit', 'Mexicans', 'baptized', '1913', 'cm', "Steele's", 'stubborn', 'elevator', 'cab', 'Guam', 'Yang', 'Seeing', 'focused', 'snap', 'foremost', 'idle', 'insoluble', 'distinctly', 'correspondent', 'terminate', 'statute', 'supposedly', 'quaint', 'incorporated', 'state-owned', 'absorption', 'contended', 'irony', 'blindness', 'resident', 'Ptolemaic', 'maker', 'Cooper', 'acknowledge', 'attendant', 'boil', 'urgency', 'Dwight', '20th', 'painters', 'dice', 'lighter', '104', 'alcohol', 'calculation', 'agriculture', 'composite', 'spectators', 'breaks', 'adolescent', 'episode', 'bastard', '$100', 'Bulletin', 'smoothly', 'adjacent', 'hardened', 'maids', "Trevelyan's", 'layer', 'terminal', 'map', 'blades', 'copper', 'arriving', 'Future', 'molded', 'loosely', 'Atlas', 'cabinet', 'Everybody', 'crossroads', 'floors', 'sweeping', 'neon', 'gods', 'newest', 'compact', 'diagnosis', 'transferor', 'mythological', 'hollow', 'masters', 'theatrical', 'formulaic', '1.5', 'undergoing', 'unadjusted', 'evidenced', 'hopeful', 'furiously', 'Harmony', 'Benjamin', 'Monroe', 'infrared', 'grab', 'individualism', 'tappets', 'traced', 'everyday', 'swollen', 'Selkirk', 'requested', 'opponents', 'passionate', 'convert', 'territorial', 'Taking', 'shelf', 'Lumumba', 'Metropolitan', 'raced', 'chewing', 'ignorant', 'wealthy', 'grandfather', 'resolve', 'lime', 'sunset', 'consciously', 'dentist', 'villages', 'collaboration', 'Beethoven', 'transparent', 'nomination', 'rebels', 'Russell', 'populated', 'elite', 'youthful', 'evolution', 'stockade', 'ceased', 'lasted', "Roberts'", 'coatings', 'Turkish', '7th', 'retention', 'aloud', 'resultant', 'Masters', 'Grandma', 'tower', 'sunny', 'Age', 'Mass.', 'Parliament', 'dependence', 'Joel', 'icy', 'deserved', 'Trujillo', 'recalls', 'sorts', 'reacted', 'murders', 'Soviets', 'boot', 'antigen', 'counting', 'Nazis', 'doubtless', 'Pentagon', 'babies', 'acquainted', 'credited', 'ordinarily', 'turmoil', 'co-operation', 'ominous', 'wires', 'lyric', 'cavity', 'mercenaries', 'composers', 'strengthening', 'jerked', 'resort', 'sadly', 'antibody', 'guerrilla', 'vocabulary', 'effected', 'deserve', "kid's", 'dialysis', 'lightning', 'Emperor', 'accordingly', 'gauge', 'Torrio', 'nerve', 'trigger', 'crest', 'adjoining', 'News', 'nineteen', 'teen-agers', 'advisers', 'daring', 'phony', 'impulses', 'interviewed', 'Has', 'ballot', 'initially', 'Half', 'antique', 'violently', 'Hot', 'Essex', 'Inside', 'noting', 'circumstance', 'speakers', 'giants', 'chickens', 'couch', 'Charlotte', 'globe', 'Discovery', 'thunder', 'pity', 'Herbert', 'instituted', 'anaconda', 'stray', 'ribbon', 'fashionable', 'promoted', 'settlers', 'salesman', 'indictment', 'gripped', 'Shelley', 'Congolese', 'borders', 'Pearson', 'proven', 'eccentric', 'satisfactorily', '38', 'capitalism', 'embarrassing', 'evaluated', 'deficiency', 'Hesperus', 'weights', 'towels', 'muffled', "Scotty's", 'render', 'sodium', 'frighten', 'tear', 'Chapel', 'breast', 'beg', 'injuries', 'airplane', 'counterparts', 'bursting', 'Earl', 'happier', 'infancy', 'sexes', 'furnace', '1920', 'trailer', 'sixties', 'construct', 'constitution', 'Korean', 'presidential', 'curse', 'ribs', 'Bar', 'squeeze', 'juice', 'restraint', 'bits', 'Hillsboro', 'enzymes', 'marking', 'proprietor', 'Column', 'custom', 'ninety', 'Exchange', 'crawl', 'requests', 'purple', 'concentrate', 'electoral', 'sprawled', 'deciding', 'commercially', 'Fall', 'twenty-four', 'unusually', '$1,000', 'Vince', 'Knight', 'Boys', 'rabbit', 'threaten', 'successive', 'fixing', 'accumulation', 'rejects', '1941', 'limiting', 'Reserve', 'tender', 'Main', 'Pohl', 'towering', 'loading', 'civilized', 'fluorescence', 'compartment', 'drastic', 'enclosed', 'colonel', 'Timothy', 'comprise', 'retain', 'disappear', 'snatched', 'rubbing', 'commissioners', 'emotionally', 'passive', "Jack's", 'gifts', 'Hey', 'honeymoon', 'rotation', 'exert', 'Railroad', 'buys', 'debts', 'formulated', 'Progress', 'Consequently', 'shaken', 'Dillon', 'Australia', 'forthcoming', 'conclusive', 'Hogan', 'Herald', 'Moon', 'consultant', 'Edwin', 'angles', 'wonderfully', 'calendars', 'liking', 'choosing', 'cylindrical', 'invested', 'settling', 'necessitated', 'revealing', 'dreaming', 'respected', 'warn', 'duration', 'natives', 'mills', 'noticeable', 'divide', 'Tony', 'Jastrow', 'investigate', 'consult', 'acrylic', 'recover', 'moist', 'locating', 'Szold', 'Kremlin', 'planted', 'dose', 'Throat', 'grasped', 'rejection', 'chronic', 'arched', 'grill', 'declares', 'entities', 'enlisted', 'meats', 'lane', 'shades', 'poorly', 'apprehension', 'fur', 'Anglican', 'pavement', 'Introduction', 'taxpayer', 'intelligible', 'comfortably', 'constitutes', 'lodge', 'foregoing', 'Heights', 'deaf', 'Dylan', 'formulations', 'keen', 'contrasts', 'cemetery', 'treatments', '90', 'bloom', 'bronze', 'spinning', 'indifferent', 'flies', 'Jimmy', 'historically', 'Midwest', 'McBride', 'deductions', 'metaphysics', 'frowning', 'mineral', 'conceivable', 'Looking', 'plow', 'yearly', 'gasoline', 'nucleus', 'eloquent', 'bankers', 'lake', 'crept', 'blond', 'script', 'revelation', 'shaft', 'floating', 'cheaper', 'Phillips', 'energies', 'founding', 'pose', 'Oregon', 'complain', 'endured', 'statesman', 'disguised', 'minerals', 'tracks', 'frantic', 'tires', 'orthodox', 'Brumidi', 'Got', 'memorable', 'streams', 'dies', 'therapy', 'pituitary', 'Ross', 'Barnett', 'discrepancy', 'hazard', 'nearing', 'mg.', 'develops', 'Similar', 'criticisms', 'Quint', 'Protestantism', 'L', 'inspect', 'Albany', 'epidemic', 'Agriculture', "individual's", 'Moritz', 'observing', 'wired', 'approximate', 'framework', 'posted', 'Dill', 'Presbyterian', 'Taliesin', "Eisenhower's", 'appreciated', 'Cubism', 'gap', 'surgeon', 'adopting', 'merchant', 'contrasting', 'portions', 'Sixties', 'twisting', 'awkward', 'amazed', 'motivation', 'trusted', 'partial', 'usefulness', 'shifting', 'tin', 'aerator', 'smiles', 'Services', 'thereto', 'specify', 'theaters', 'Willie', 'integrated', 'investors', 'eventual', 'teaches', 'infant', 'autonomic', 'housed', 'energetic', 'regretted', 'manufactured', 'Ever', 'conceded', 'Hiroshima', 'fulfilled', 'thoughtful', 'Stone', 'childish', 'Information', '1942', 'contraction', 'frustration', 'array', 'Piazza', 'transfers', 'preserves', "Shakespeare's", 'Spelman', 'spaces', 'sliding', 'Sons', 'peasants', 'photographic', 'McKinley', 'nod', 'coincidence', 'specimens', 'arising', 'criteria', 'lousy', 'veterans', 'dots', 'wart', 'assistants', 'symbolized', 'entertained', 'explored', 'Maria', 'summit', 'quantities', 'continent', 'Crombie', 'Mahzeer', 'electrons', 'cohesive', 'sanction', 'faithful', 'butt', 'ml', 'pollen', 'rendering', 'Reef', 'carriage', 'temper', 'privacy', 'fatigue', 'mist', 'plowing', 'climb', 'rival', 'meters', 'squarely', 'rebel', 'psychologists', 'Birds', 'incidents', 'narrator', 'actively', 'frankly', 'unified', 'organ', 'delegation', 'coexistence', 'sculpture', 'Easter', 'Less', '41', 'spit', 'gymnastics', 'neglect', 'interpret', 'grimly', 'unexpectedly', 'swim', 'tropical', 'UN', 'Plymouth', 'leisure', '1000', 'nominal', 'premium', 'politically', 'postwar', 'afforded', 'graduation', 'congruence', 'scholar', 'unprecedented', 'scratching', 'distorted', 'rhythms', 'journalism', 'expectation', 'lengthy', 'Venus', 'secured', 'constituted', '250', 'agrees', 'hardest', 'Viola', 'Porter', 'appropriated', 'tomb', 'Michael', 'anticipate', 'Hong', 'Eight', 'posse', 'varies', 'Kong', 'equitable', 'Kehl', 'Vivian', 'rider', 'strained', 'Hampshire', 'airport', 'Rev', 'mounting', 'Claude', 'log', '1,000', 'nigger', 'flush', 'outline', 'architecture', 'Master', 'titled', 'doubled', 'shed', 'barbed', 'cooperate', 'dominance', 'bands', 'undergraduate', 'appetite', 'attributes', 'Trustees', 'cult', 'silk', 'Corp.', 'Maxwell', 'sleeve', 'Mahayana', 'curtain', "earth's", 'analyses', 'uniformity', 'finite', 'carriers', 'convincing', 'aided', 'disliked', 'testified', 'surveys', 'rocking', 'locally', 'Town', 'trivial', 'Cross', 'Either', 'cultures', 'rotating', 'timing', 'detached', 'tribes', 'facility', 'proclamation', 'quo', 'possessions', 'microorganisms', 'obtainable', 'outcomes', 'nailed', 'preacher', 'adverse', 'Orioles', 'U', 'perfection', 'dimensional', 'eastern', 'disciplined', 'Winslow', 'laying', 'repaired', 'blankets', 'oriented', 'Polish', 'Side', 'generated', 'beneficial', 'confess', 'treating', 'foreigners', 'schoolhouse', 'ghetto', 'slice', 'sigh', 'compound', '1930', 'vacant', 'waist', 'depressed', 'vapor', 'persisted', 'Pedersen', 'rally', 'urging', 'nitrogen', 'distributions', 'strenuous', 'Lilly', 'blessing', 'Governments', 'risen', 'puzzle', 'la', 'zone', 'halt', 'Gordon', 'descending', 'outward', 'wholesome', "brother's", 'insulation', 'screens', 'thermometer', 'button', 'fathers', 'comrades', 'utilize', 'deputy', 'Evans', 'Sturley', 'managerial', 'professionals', 'tasted', 'exceeds', 'instrumental', 'deficiencies', 'tremble', '17th', 'solidly', "latter's", 'spark', 'fleeing', 'projections', 'refrain', 'summoned', 'unite', 'Give', 'afterwards', 'vain', 'Fosdick', 'refusing', 'mice', 'expenditure', 'ration', 'poison', 'descent', 'glowing', 'Must', 'enjoys', '57', 'Prize', 'Except', 'deceased', 'greatness', 'plotted', 'mg/l', 'ranges', 'burial', 'duly', 'ripe', 'Adjusted', 'Herman', 'Drexel', 'tents', 'recruit', 'departing', "We'd", 'Best', 'A.D.', 'subsequently', 'Littlepage', 'Eleanor', 'Tax', 'merge', 'eighty', 'rake', 'reject', 'hysterical', 'economically', 'contributes', "girl's", 'diagram', 'sixteenth', "bull's-eye", 'ardent', 'compiled', 'Ramsey', 'Thanks', 'Authority', 'Margaret', 'drastically', 'VA', 'technicians', 'tide', 'coordinate', 'disabled', 'recordings', 'hip', "Alex's", 'Pathet', 'Skorich', 'elevated', 'mining', 'dash', 'warmed', 'cursed', 'luggage', 'assessing', 'waiter', 'feminine', 'Hawksley', 'secede', 'principally', 'Odyssey', 'akin', 'founder', 'indebted', 'Asian', 'vice-president', 'hesitate', 'specificity', 'declaring', 'hemisphere', 'vengeance', 'admire', 'Boris', 'editorials', 'avoiding', 'kennings', 'missionaries', 'Sweden', 'pasture', 'perfume', 'segment', 'slim', 'Telephone', 'deposited', 'Stacy', 'mount', 'creep', 'advocate', 'crushed', 'Eyes', 'Simmons', 'alternate', 'insists', 'theft', 'channel', 'slashed', 'lunar', 'radioactive', 'swelling', 'blooming', 'Huff', 'Lauren', 'gm.', 'rushing', 'forecast', 'philosophic', 'prostitution', 'indignant', 'intricate', 'mob', 'prediction', 'negligible', 'Hebrew', 'Depot', 'masterpiece', 'Howe', 'transducer', 'turnpikes', 'Slim', 'muddy', 'cigar', 'entity', 'Cold', 'saloon', 'rustling', 'freed', '36', 'defendants', 'synthetic', "Isn't", 'Riverside', 'imperative', 'subsistence', 'preoccupied', 'misleading', 'descriptions', 'Apart', 'brace', 'acreage', 'sermon', 'climbing', 'advisory', 'Stock', 'Autocoder', 'megatons', "Papa's", 'Persian', "Government's", 'representations', 'Newman', 'misfortune', 'buttons', 'dialect', 'legends', 'educator', 'paste', 'Cavalry', 'waking', 'Sun', 'sings', 'oily', 'utilized', 'clues', 'disagreement', 'coats', 'marital', 'privileges', 'psychologist', 'dozens', 'topics', 'disturb', 'muzzle', 'summers', 'bounds', 'coin', 'Rusk', '85', 'youths', 'framing', 'nickname', 'void', 'impatient', 'hull', 'drilling', 'marines', 'harvest', 'text-form', 'politely', 'cathode', 'robbed', 'albumin', "Chicago's", 'Certain', 'magnification', 'layers', 'misunderstanding', 'disturbance', 'catastrophe', 'Dumont', 'lease', 'Pratt', 'assuring', 'earthy', 'paragraph', 'shy', 'fertility', 'acquiring', 'simplest', 'huddled', 'journalist', 'applicants', '1859', 'devoting', 'poised', 'Bright', 'sectors', 'darling', "Johnson's", 'Northeast', 'Pops', 'loads', 'prejudice', 'Brandt', 'arch', 'Figs.', 'hardware', 'allotted', 'Trig', 'confronting', 'strict', 'hideous', 'anchored', 'imagery', 'meantime', 'streak', 'pump', 'scar', 'consecutive', '1935', 'Isaac', 'sailed', 'handicapped', 'honesty', 'fragments', 'Romans', 'non-violent', 'genuinely', 'conscientious', 'integrity', 'adjusting', 'obstacle', 'criterion', 'arguing', 'screws', 'privileged', 'implication', '1937', 'Cap', 'diminished', 'patches', 'Foster', 'holster', 'admiration', 'mm.', 'bee', 'archaeology', 'Stop', 'Industry', 'underdeveloped', 'phonologic', 'unfamiliar', 'singled', 'Plains', 'calmly', 'accumulated', 'reservoir', 'semester', 'pleasing', 'Polaris', 'seas', 'Jet', 'tenure', 'reinforce', 'inorganic', 'spreads', 'commute', 'embodiment', 'Tar', 'Hub', 'occurrences', 'likelihood', 'incapable', 'typewriter', 'Steichen', 'decency', 'mortal', 'negotiate', 'allied', 'clients', 'mates', 'relevance', 'nowadays', 'accomplishments', 'theorem', 'mastery', 'learns', 'container', 'Review', 'denying', 'anionic', 'ads', 'paced', 'harness', 'vagina', 'adoption', 'Fla.', 'accuse', 'commercials', 'neurotic', 'argues', 'polynomials', 'danced', 'thirty-five', 'acre', 'schedules', 'elephants', 'doll', 'Goldberg', 'analyze', 'disarmament', 'Lucien', 'mixing', 'combining', 'ad', 'tyranny', 'privately', 'cement', 'Concord', 'Meeker', 'patriotic', 'alleged', 'Magnum', 'museums', 'logically', 'pious', 'identities', 'veto', 'Herr', 'lifetime', '48', 'willingness', 'ingenious', 'stole', "King's", 'compilation', 'prints', 'conversations', 'Leonard', "Who's", '72', 'parish', 'coarse', 'vegetable', 'uniquely', 'keelson', 'listener', 'Quite', 'bearded', 'margin', 'owe', 'longing', 'gigantic', 'airplanes', 'Larkin', 'Majdanek', 'Emma', 'grief', 'wheeled', 'sociological', 'bags', 'overt', 'constituents', 'sergeant', 'marijuana', 'compulsive', 'three-dimensional', 'licked', 'saint', 'bite', 'impatience', 'sensations', 'consulting', 'elemental', "Mary's", 'recognizes', 'admirable', 'accounting', 'abandonment', "Here's", 'chains', 'communion', 'Lloyd', 'Carmer', 'nationally', 'bloc', 'immigrants', 'Calhoun', 'guiding', 'amazement', 'Brodie', 'angels', 'detect', 'champions', 'bush', 'undesirable', "Greg's", 'chase', 'segments', 'Todman', 'Gazette', 'Aside', 'drum', 'compositions', 'boiled', 'crowds', 'employing', 'limitation', 'raid', '1917', 'scars', 'violin', 'mornings', 'Ken', 'costume', 'Richards', 'Fair', 'doomed', 'Conrad', 'Freeman', 'unanimously', 'segregation', 'Paso', 'AIA', 'Clearly', 'patronage', 'differs', 'rides', 'curiously', 'strode', 'favorites', 'Petitioner', 'completing', 'paints', 'tremendously', 'Route', '71', 'clergyman', 'thumb', 'rolls', 'scanned', 'tops', 'Blues', 'commuter', 'intensely', 'Fellowship', 'Appendix', 'holidays', 'Del', 'Bake', 'retrieved', 'Ernest', 'Ellen', 'conform', 'Whereas', 'aggression', 'symphony', 'Theatre', 'dreadful', 'Warsaw', 'extensively', 'Training', 'diagnostic', 'tunnel', 'awfully', 'cultivated', 'frustrated', 'fake', 'oh', 'Brenner', 'wipe', 'northwest', "Man's", 'Hansen', 'launch', 'preventing', 'southeast', 'melodies', 'Staff', 'swear', 'exports', 'judging', 'Istiqlal', 'poultry', 'oxen', 'sail', 'favors', 'feeds', 'fragile', 'pork', 'railway', 'Scottish', 'monks', 'Montero', 'interim', 'restrain', 'wool', 'conjugate', 'Barbara', 'nonsense', 'viscosity', 'no.', 'dimly', 'dramatically', 'specialization', 'token', 'jaws', 'witty', 'ye', 'waterfront', 'bolt', 'Julie', 'pleasantly', 'generals', 'Parents', 'adds', 'linguists', 'undergone', 'restrict', 'militia', 'boredom', 'introduce', 'enduring', 'rhythmic', 'reunion', 'Chester', 'bronchioles', 'bleak', 'Phoenix', 'Ball', 'reckless', 'involution', 'spat', 'Av.', 'murdered', 'motivated', 'guarding', 'Pa.', 'impurities', 'ions', 'stiffly', 'dusk', 'Eromonga', 'Wyoming', 'skies', 'thirteen', 'Grosse', 'ditch', 'distal', 'cooked', 'Blatz', 'occupants', "governor's", 'Minneapolis', 'exhibited', 'Oscar', 'corrected', 'derives', 'chairmen', 'eyebrows', 'Hengesbach', 'Randolph', 'Schaffner', 'slower', 'Roebuck', 'duck', 'arrange', 'Gran', 'apportionment', 'Larry', 'delivering', 'Pastern', 'picket', 'f{t}', 'hard-surface', 'sponsors', 'designing', 'sped', 'aerated', 'Shea', 'outright', 'potent', 'proportionate', 'tearing', 'Partisan', 'generators', 'Jupiter', 'Living', "it'll", "other's", 'businessman', 'averages', 'Marvin', 'spy', 'whirling', 'escort', 'grammatical', 'antibodies', 'chapel', 'dissatisfaction', 'Elec', 'Gonzales', 'sewing', 'humans', 'menace', 'alveolar', 'Rankin', "It'll", 'tackle', 'attacking', 'biwa', "Georgia's", 'handkerchief', 'heel', 'half-man', 'handles', 'dinners', 'Garson', 'Skeletal', 'deposit', 'Interstate', 'Bears', 'Jubal', 'doubted', 'Bernard', 'ringing', 'Year', 'PBS', 'DEAE-cellulose', 'Trade', 'Shaw', 'Gene', '46', 'End', 'Executive', 'morphophonemics', 'bushes', 'Dogtown', 'discharged', 'drifting', 'resigned', 'Bermuda', 'Hague', 'steeple', 'Jenkins', 'irradiation', 'Giants', 'challenged', 'Credit', 'competitors', 'Moses', 'suds', 'Hohlbein', 'Savannah', 'coins', 'Kayabashi', 'collaborated', 'sore', 'majestic', 'skiff', 'AM', 'Wagner-Peyser', 'Douglass', 'reared', 'dumped', 'regulus', 'Holden', 'flooded', 'Sidney', 'fulfill', 'discharges', 'Esther', 'Cincinnati', 'al', 'Prevot', '1924', 'Profile', 'Freedom', "else's", 'palatability', 'Students', 'Kohnstamm-negative', 'Angel', 'taxation', 'subsystems', 'tastes', 'Braque', 'Christiana', 'Byrd', 'finishing', 'Muller', 'figs.', 'clerical', 'depended', 'Docherty', 'Partlow', 'vases', 'stillness', 'draped', 'provinces', 'graceful', 'interfere', 'elder', 'awoke', 'cave', 'hint', 'Mountains', 'sullen', 'slug', 'colleague', 'labeled', 'B-52', 'grease', 'Maine', 'positively', 'Higher', 'cheerful', 'Turnpike', 'Girl', 'departed', 'Eden', 'Australian', 'fox', 'earthquake', 'Into', "O'Banion's", "Island's", 'lazy', 'resisted', 'chromatic', 'occupational', 'flashes', 'ambassador', '1925', 'guts', 'Barnes', 'plea', 'homely', 'wilderness', 'groupings', 'exploited', "Peter's", 'admissible', 'Fiat', 'Plan', 'enables', 'abundant', 'stains', 'nicely', 'precipitated', 'Philharmonic', 'searched', 'Acting', 'anxiously', 'presenting', 'prosecution', 'voluntarily', 'recovered', 'illegal', 'seam', 'bounded', 'prohibition', 'hazards', 'brilliantly', 'ills', 'proudly', 'empire', 'spared', 'orthodontist', 'industrialized', 'pour', 'seams', 'projection', 'Shortly', 'proportionately', 'cheese', 'hastened', 'resource', 'marched', 'settings', 'classics', 'municipalities', 'vitally', '1/4', 'Frankfurter', 'await', 'quack', 'zinc', 'anecdote', 'locker', 'blown', '1933', 'tortured', "Gavin's", 'wheat', 'simplify', 'Walton', 'antagonism', 'radius', 'stretches', 'brood', "Palfrey's", 'retire', 'skillful', 'consumers', 'cites', '2:35', 'sterile', 'embassy', 'Thor', '150', 'advertised', 'gospel', 'simplified', 'circled', 'emerges', 'denoted', 'attainment', 'paradox', 'equations', 'knit', 'guarantee', 'earning', 'unconsciously', 'geometry', 'constants', 'begging', 'preoccupation', 'Kirby', 'indignation', 'computing', 'illustrations', 'tune', 'soaking', 'calculations', 'Minor', 'destined', 'addressing', 'seeks', 'mask', 'Salter', 'roast', 'extremes', 'amused', 'twentieth-century', 'Harriet', 'Always', 'reins', 'elegance', "Wright's", 'picturesque', '10-year', '100,000', 'surveyed', 'Fighting', 'Indies', 'unquestionably', 'resin', 'attractions', 'sticky', 'Precious', 'cautious', 'Dulles', 'Duke', 'ally', 'rack', 'numbered', 'traditionally', 'spontaneously', 'choke', 'timely', 'smoked', 'scrambled', 'elders', 'closest', 'bouncing', 'Unlike', 'stepping', 'manifestations', 'resembles', 'discourage', 'vastly', 'heavens', 'atmospheric', 'angel', 'irresponsible', 'toe', 'tailored', 'cork', 'proceeding', 'assigning', 'Indonesia', 'scratch', 'viewing', 'Battle', '40,000', 'gland', 'institutional', 'Publications', 'defining', 'verses', 'etcetera', 'ensemble', 'plumbing', 'winding', 'salad', 'pineapple', 'offset', 'efficacy', 'theologians', 'hierarchy', 'extracted', 'Freud', 'looming', 'clause', 'wishful', 'currents', 'tenants', 'advancement', 'dazzling', 'Forbes', 'broadening', 'campers', 'quantitative', 'guides', 'Arizona', '47', 'manifest', 'dictates', 'beginnings', 'Ritter', 'borrow', 'ghost', 'Sundays', 'Congressional', 'regret', 'specially', 'inspiration', 'versus', 'anatomical', 'heartily', 'prevents', 'well-being', 'assessed', 'impose', 'trades', 'discoveries', 'immensely', 'similarity', '0.5', 'swayed', 'breathed', 'watercolor', 'utilization', 'gram', 'shaved', 'pants', 'agony', '0.1', 'additions', 'barrier', 'augmented', 'sweetheart', 'anatomy', 'persistence', 'Say', 'annoyance', 'destination', 'versions', 'butyrate', 'cellulose', 'counterpart', 'unification', 'marvelous', 'soloist', 'hillside', 'preference', 'gravel', 'commissions', 'pouring', 'restore', 'subordinates', 'forecasting', 'unchanged', 'comparing', 'summarized', 'Against', 'unimportant', 'rocky', 'caring', 'quill', 'pigment', 'Bronx', 'conflicts', 'creativity', 'intercourse', 'bin', 'Deal', 'grotesque', 'persuasion', 'conveyed', 'gracious', 'fighter', 'induce', 'conventions', 'satire', 'Mass', 'troop', 'commanding', "How's", 'Anyway', 'replies', 'overcast', 'conservatism', 'horizontal', 'ft.', '18th', 'blessed', 'sometime', 'conceptions', 'attendance', 'drifted', 'withdrew', 'indulge', 'supplementary', 'Left', 'ministers', 'untouched', 'tolerant', 'ratings', 'invite', 'engendered', 'filter', 'cousins', 'cage', "son's", 'enchanting', 'linguistic', 'indicators', 'publishers', 'sensory', 'mortar', 'irregular', 'reviewing', 'Claims', 'precedent', 'weird', 'Merchant', 'malaise', 'Rules', 'Coombs', 'gratitude', 'nightmare', 'Ferry', 'superbly', 'beans', 'dresses', 'Kingdom', 'yell', 'illumination', 'avocado', 'polls', 'Cotton', 'mill', 'Preparation', 'Presently', 'consolidation', 'employs', 'Factory', 'Unit', 'existential', 'spectator', 'latent', 'editions', 'Cadillac', 'protests', 'echo', '3,000', 'evils', 'melted', 'dual', 'improves', 'ingredients', 'pamphlets', 'retains', 'reconnaissance', 'formulate', 'snapping', 'proclaimed', 'export', 'weighing', 'battered', 'domain', 'eminent', 'nagging', 'Gentile', 'Luis', 'puzzling', 'philosophers', 'therein', 'spacious', 'Foods', 'miscellaneous', 'boiling', 'sorrow', 'vividly', 'Classes', 'declining', 'Wisman', 'Kirov', 'compressed', 'Cologne', 'molecules', '$10,000', 'revisions', 'Eventually', 'narrowed', 'Headquarters', 'Add', 'elimination', 'generalized', 'reviews', 'perceptions', 'Fathers', 'wrist', 'reservations', 'denominational', 'grunted', 'ragged', 'pathological', 'symbolize', 'glaze', 'permitting', 'monkey', "Mann's", 'Buddhism', 'contention', 'chocolate', 'scholastic', 'catching', 'appalling', 'reportedly', 'mimesis', 'filly', 'virgin', 'purified', 'resumption', 'obedience', 'convertible', 'Hungarian', 'breasts', 'suggestive', 'one-half', 'postponed', 'Housing', 'Shann', 'rated', 'spatial', 'histories', 'facets', 'P.M.', 'Toynbee', 'accord', 'accent', 'bulky', 'workable', 'thesis', 'notices', 'questionable', 'warming', 'Falls', 'Boulevard', 'Youth', 'honorable', 'hardship', 'monetary', 'topic', 'thigh', 'tougher', 'patrons', 'seventeenth', 'obtaining', 'smells', 'systematically', 'shores', 'pulls', 'surge', 'deadlock', 'Germanic', 'enrolled', 'lessened', 'Group', 'Texans', 'steering', 'exploit', 'poll', 'adaptation', 'timed', 'tours', 'hints', 'counseling', 'averaging', 'centralized', '2%', 'invasions', 'rails', 'vent', 'developmental', 'ordinance', 'pulse', 'sewer', 'nephew', 'enormously', 'trifle', 'Push-Pull', 'Fiedler', 'outlet', 'warnings', 'conceivably', 'pansies', 'roaring', 'introducing', 'halfback', "Faulkner's", 'Drill', 'forests', 'badness', 'glove', 'Voltaire', 'Han', 'inward', 'worldly', 'steer', 'condensed', 'orbits', 'wept', 'rating', 'Rice', 'miniature', 'plywood', 'Relations', 'furnishings', 'metallic', 'deduction', 'vitamins', 'Norton', 'Kearton', 'pelts', 'K', 'moisture', 'redcoats', 'hauled', 'Bienville', 'concentrations', '55', 'incurred', 'shivering', 'swallow', 'Da', 'Schweitzer', 'dread', 'haste', 'yielding', 'Band', 'dig', 'Othon', 'balloon', 'dense', 'Star', 'disclose', 'pennant', 'Meek', 'Rogers', 'carefree', 'Milwaukee', 'acquaintance', 'homer', 'peering', 'Super-Set', 'wounds', 'irregularities', 'examinations', 'coupling', 'compelling', 'spindle', 'Von', 'concealed', 'removing', 'Gradually', 'veil', 'Doaty', 'Yin', 'Near', 'saloons', 'plots', 'Draft', 'Rhodes', 'upside', 'kills', 'ancestry', 'pathetic', 'Works', 'evolved', 'recreational', 'sailors', 'selfish', 'Robards', 'technically', 'corrupt', 'imported', 'skinny', 'Buzz', 'Papa-san', 'relates', 'scrap', 'diagrams', "Rayburn's", 'chooses', 'reciprocal', 'Gratt', 'symbolism', 'trio', 'jumping', 'chef', 'CD', 'Atomic', 'Buddha', 'fitness', '24-hr.', 'stamp', 'Gibson', 'exploded', 'periodic', 'yourselves', 'Pirates', '1931', 'crystals', 'focal', 'footsteps', 'umbrella', 'Dead', 'Marx', 'qualification', 'shaping', 'flu', 'mock', "Barco's", 'resemble', 'directional', 'withheld', 'accompaniment', 'neutralist', 'overly', 'apportioned', 'interdependent', 'equals', 'Connally', 'perilous', 'expose', 'Jacques', 'bacteria', 'revolt', 'ultrasonic', 'workbench', 'distilled', 'attained', 'Inspector', 'everlasting', 'strains', 'wins', 'Seminary', 'heed', 'dependable', 'palms', 'Guy', 'Guideposts', 'residue', 'Investigation', 'Leaving', 'analyzing', 'Catholicism', 'Babe', 'mentioning', 'Orthodox', 'irresistible', 'spirited', 'tolerance', 'shield', 'interpreter', 'festivities', 'sympathies', 'discontent', 'accelerometers', 'pumping', 'setup', 'debris', 'Angelo', 'differentiation', 'pH', 'ensure', 'accidental', 'Ryusenji', 'gaiety', 'foamed', 'census', 'Hamrick', 'insistent', 'avoidance', 'post-war', 'reminder', '**yf', 'Citizens', 'Walnut', 'shelves', 'cardinal', 'restorative', 'cynical', 'repel', 'Thornburg', 'scenic', 'Friend', 'oysters', 'knitted', 'inescapable', 'anguish', 'realistically', 'Confrontation', 'jokes', 'sophistication', 'compulsion', 'wicked', 'deeds', 'accidents', 'veranda', 'baroque', 'traces', 'distaste', 'downright', 'McCormick', 'generously', 'cares', 'deprived', 'microscope', 'Constantine', 'Newark', 'sticking', "Helva's", 'polish', 'singer', 'agreeable', 'Nogol', 'niece', 'Whoever', 'Carnegie', 'Cruz', 'Dance', 'Showmanship', 'solemnly', 'quirt', 'apple', 'Holland', 'wrath', 'Macklin', 'unpaid', 'themes', 'Wendell', 'outgoing', 'stool', 'characterization', 'possesses', 'architects', 'dashed', 'surroundings', "enemy's", 'disadvantages', 'glancing', 'Surely', 'horns', '1921', 'witches', 'dispelled', 'non', 'metropolis', 'calcium', 'FBI', 'slack', 'consultation', 'relied', 'profoundly', 'Beauclerk', 'Stravinsky', '600', 'expresses', 'centimeters', 'supplements', 'canvases', 'ash', 'penetrated', 'kindly', 'inclination', 'flair', 'Matson', 'gracefully', 'charts', 'Studies', 'creeping', 'satellite', 'peer', 'emancipation', 'weeping', 'dwell', 'mingled', 'foreseen', 'motif', '2-56', 'Chien', 'Really', "poet's", 'Memphis', 'chuck', 'belts', 'peers', 'resent', 'haunting', 'barrels', 'cliff', 'Lolotte', 'stumbling', 'faulty', 'reminds', 'thrusting', 'skipped', 'observes', 'rocked', 'dock', "Didn't", 'interlocking', 'lone', 'logs', 'pills', 'Pretty', "1/4''", "1/2''", 'Hitler', 'scholarly', 'boast', 'shoved', 'bosom', 'deer', 'jig', 'Jed', 'planking', 'cooks', 'relish', 'snarled', 'Controller', "they've", 'descended', 'yeah', 'muttering', 'SAC', 'Artie', 'mahogany', 'offense', 'accusing', 'Liberal', 'Emerson', 'poles', "1920's", 'interruption', 'peaks', 'urges', 'Unitarian', 'Lucia', 'slapped', 'cushion', 'minus', 'half-hour', 'springs', 'pitched', 'curtains', 'perpetual', 'mounts', 'accommodations', 'winked', 'Need', 'rust', 'contented', 'Snow', 'absurdity', 'exterior', 'Scots', 'chuckled', 'compost', 'testify', 'radiant', 'Brandon', 'assemble', 'clamped', 'Saviour', 'comic', 'prominently', 'spaced', 'potentialities', 'Strange', 'stag', 'Faith', 'franks', 'echoes', 'buns', 'rousing', 'Chase', 'someday', 'Englishmen', 'Universe', 'feeble', 'tactical', '1861', 'inviting', 'elect', 'one-shot', "Ruth's", 'coals', 'analogous', 'importantly', 'excluded', 'courtyard', 'seriousness', "Curt's", 'Mosque', 'Across', 'frankfurter', 'impassioned', 'authenticity', 'unsuccessful', 'lotion', 'architectural', "who'd", 'trunk', 'obey', 'adherence', 'Fifteen', 'resented', 'Sonata', 'tangle', 'Piano', 'Creator', 'mortality', 'impatiently', 'Op.', 'clutching', 'mobility', 'spiral', 'decorations', 'preached', 'exceedingly', 'vinegar', 'enforce', 'ecclesiastical', 'embarrassment', 'hypothetical', 'bounce', 'Hugh', 'irrational', 'zeal', 'aperture', 'brethren', 'triumphantly', 'N.C.', 'negotiating', 'Embassy', 'clicked', 'knuckles', 'pregnant', 'Sullivan', 'automotive', 'conflicting', 'Camp', 'encounters', 'duplication', 'resembled', 'capillary', 'restriction', 'Garland', 'survivors', 'alibi', 'regulation', 'Proof', 'sorbed', 'exaggerate', 'actives', 'boycott', 'Rights', 'caliber', 'grips', 'delegate', "goin'", '**yc', 'confrontation', 'decreased', 'Barker', 'uncommon', 'Presidents', 'co-optation', 'unhappily', 'Charley', 'headlights', 'pillow', 'arcs', 'drawer', 'discarded', 'toys', 'revision', 'contests', 'civilizational', 'cameras', 'lower-class', 'Palazzo', 'benches', 'outsiders', 'blows', 'monotonous', 'staircase', 'wreath', 'aide', 'parameters', 'Expressway', 'simultaneous', 'kinetic', 'diminishing', 'mound', 'telegraph', 'grandmother', 'Victoria', 'paramount', 'engagements', 'radish', 'Traffic', 'athlete', 'Help', 'peril', 'Oak', 'Examiner', 'Lodge', 'dominate', 'conspicuously', 'ADC', 'Frenchman', 'boldly', 'mouse', 'stiffened', 'pet', '1938', 'concede', 'Airport', 'confided', 'illiterate', 'applicant', 'homogeneous', 'boom', 'subdivision', 'bias', 'vines', 'sandwich', 'weaker', 'detectable', 'honoring', 'Mel', 'interlobular', 'strikingly', 'denomination', 'declaration', 'willow', 'micrometeorite', 'requesting', 'scraped', 'endure', 'meteorites', 'staffs', 'Highway', 'blunt', 'southward', 'artillery', '1922', '1914', 'Clarence', 'iodide', 'Lt.', 'taxing', 'afflicted', 'notorious', 'miracles', 'authoritative', 'peripheral', 'bumblebees', 'instructor', 'raged', 'Ivy', 'auditorium', 'Considering', 'Southerner', 'basketball', 'Technology', 'gait', 'knot', 'confront', '1040', 'preach', 'hunt', 'undergo', 'parliamentary', "Mercer's", 'Peterson', 'traded', 'sixty-one', "Hetman's", 'Tech', 'whereof', 'accompany', 'ankle', 'predispositions', 'unsatisfactory', 'shotgun', 'claimant', 'multiplicity', 'Oil', 'slaughter', '100%', 'Weston', 'trailed', 'Sioux', 'appliances', 'diamond', 'thieves', 'efficiently', 'Rural', 'dividends', 'taut', 'deed', 'precarious', 'collections', 'Price', 'Buffalo', 'appropriations', 'decoration', "Garibaldi's", "Malraux's", 'serene', 'hips', 'archaeological', 'retreated', 'fresco', 'haunted', 'science-fiction', "Krim's", 'bomber', 'Doctrine', 'farmhouse', 'dystopias', 'sanctuary', 'aerial', 'abolish', 'stupidity', 'Sales', '1896', 'prompt', 'Strasbourg', 'cleaner', 'traps', 'cleaners', "They've", 'surveying', 'congressmen', 'Athens', 'ruins', 'O.K.', 'newt', 'charging', "week's", 'Mamma', 'Copernican', 'crawling', 'termination', 'Judy', 'Manager', 'comforting', 'dismal', 'knelt', 'entails', 'Budapest', 'practiced', 'betrayed', 'allotments', 'foliage', 'recipe', 'baked', '99', 'Agency', 'Sheldon', 'Pollock', 'Murray', 'Le', 'wandered', 'freshman', "who's", 'Kern', 'potters', 'Francesca', 'chambers', 'disappearance', 'sunrise', 'strangers', 'leagues', 'agrarian', 'decreases', 'appraisal', 'cyclist', 'Achievement', 'Poetics', 'wardrobe', 'proprietorship', 'germanium', 'Noel', 'martyr', 'Strong', 'designers', 'ontological', 'utopian', 'republic', 'Grace', 'declare', 'tax-free', 'Ptolemy', 'pad', 'supporters', 'steak', 'immediacy', 'ceramic', 'rooted', 'reconstruction', 'isolate', 'prosperous', 'decorative', 'notify', 'varieties', 'undermine', 'Danish', 'flour', 'X-ray', 'exceptionally', 'mating', 'Results', 'Chancellor', 'fort', 'statesmen', 'Industries', 'restoration', 'predictable', 'struggled', 'releases', 'Bottom', 'presses', 'Ruanda-Urundi', 'repairs', 'stimulating', 'impartial', 'unstable', 'bankruptcy', 'divorced', 'peculiarly', 'crisp', 'Presidency', 'Hoover', 'slump', 'frowned', '4th', 'brighter', 'Series', 'slab', 'vacations', 'investigating', 'Dorset', 'rulers', 'consonantal', 'guarantees', 'devise', 'aunt', 'Burns', 'spends', 'panting', 'blindly', 'sleeves', 'humidity', 'C-plane', 'domes', 'clocks', 'telegram', 'rot', 'momentous', 'infection', 'bloody', 'undergraduates', 'Wolfe', 'worthwhile', 'precaution', 'complaints', 'renewal', 'melancholy', '$2', 'Remove', 'astonishing', 'reassurance', 'sentenced', 'financially', 'Jenny', 'Meeting', 'flashlight', 'populations', 'youngster', 'sprung', 'stacked', '1963', 'lb', 'respondents', 'respondent', 'thou', 'overboard', 'carrier', "You'd", 'jammed', 'smoking', '19th', 'La.', 'rigidly', 'extruded', 'guessing', 'predict', 'wreck', 'mayor', 'busily', 'social-class', '180', 'fabrication', 'foe', 'essays', 'tactual', 'revival', 'establishments', 'public-limit', 'territories', 'counters', 'intonation', 'contracted', 'blend', 'recommending', 'Vecchio', 'two-story', 'Barney', 'straightforward', 'junk', 'phonemic', 'creek', 'calf', 'Hemphill', 'revive', 'stunned', 'Swadesh', 'usable', 'actuality', 'wonders', 'ambiguity', 'Imagine', 'discernible', 'scholarships', 'turtle', 'toss', 'kilometer', 'Pam', 'hearings', 'messenger', 'borne', 'toll-road', 'speculative', 'slumped', 'hostess', 'frantically', 'pig', 'Production', 'mouths', 'grinding', 'Wales', 'lovers', 'mourning', 'Port', 'embarrassed', 'coldly', 'celestial', 'sucking', 'broaden', 'orthodontic', 'Basic', 'catalogue', 'scrub', 'electrostatic', 'quacks', 'utilizing', 'tumors', 'Vocational', 'replacing', 'clarified', 'secrecy', 'bluff', 'premises', 'thyroxine', 'Deep', 'companions', '$300', 'modes', 'slum', 'succeeds', '1908', 'aborigines', 'bless', 'provincial', 'transmitted', '350', 'withdraw', 'Juet', 'inhibit', 'indicator', 'recruits', 'volunteer', 'unnatural', 'featured', 'propagation', 'mg', 'championship', 'sanctions', 'eased', 'advise', 'Mo.', 'hose', 'barred', 'prairie', 'crimson', 'conquest', 'westward', 'thief', "party's", 'Issue', 'Theorem', 'erotic', 'patiently', 'monuments', 'Victorian', 'Pulley', "years'", '2,000', 'terrain', 'militant', 'bloat', 'Twice', 'backing', 'articulate', 'forceful', 'Perry', 'Julian', 'buffalo', 'slips', 'deaths', 'bites', 'repeating', "Navy's", 'se', 'paradoxically', 'dispatch', 'fundamentally', 'granting', 'bacon', 'reversed', 'nominated', 'Prime', 'parasympathetic', 'assumes', '$50', 'avenue', 'recipient', 'enactment', 'constructing', 'trailing', 'Y.', 'half-mile', '110', 'pricing', 'Burke', 'radios', 'Same', 'stops', 'donated', 'Award', '37', 'statues', '$10', 'occupying', 'prevailed', 'enlarge', 'proposes', 'Tex.', 'collectors', 'wholesale', 'watered', 'merchandising', 'assertion', 'Questions', 'Happy', 'assisted', 'doubling', 'coping', 'confessed', 'legislator', 'issuance', 'retarded', 'ban', 'deterrent', 'teamsters', 'Welfare', 'Karns', 'Golf', 'retiring', 'waged', '8,000', 'Incidentally', 'lending', 'forum', 'fraud', 'resignation', 'dismissal', 'mails', 'Reno', 'probation', 'carcass', 'pleaded', 'Ted', 'exotic', 'switching', 'ankles', 'insult', 'Delta', 'Olympic', 'fruitful', 'credits', 'preserving', 'petitions', 'congenial', 'Orange', 'admitting', 'repay', 'Whipple', 'agreeing', 'expedient', 'Case', 'Grove', 'bowed', 'hailed', 'organizing', "car's", 'charity', 'Signal', 'awaited', 'overwhelmingly', 'Patricia', 'gubernatorial', 'solo', 'Deputy', 'grocery', 'norm', 'tragedies', 'con', 'Steel', 'statutes', 'Ala.', 'slums', 'bang', 'levy', 'brisk', 'enact', 'victories', 'Prior', 'troublesome', 'floated', 'contractual', 'violate', 'Question', 'crushing', 'Treaty', 'scandals', 'fare', 'labor-management', 'Martinelli', 'infield', 'Paradise', 'facto', 'cease-fire', 'Representative', 'Buckley', 'pitchers', 'Sr.', '$5', 'dwellings', '1923', 'dividing', 'utilities', 'certificate', 'distressing', '24th', 'cooperating', '9th', 'relaxation', 'enlarged', 'Maurice', 'Gorboduc', 'Nine', 'uncovered', 'lambs', 'provocative', 'inference', 'intensification', 'breathe', 'confines', 'distributor', 'vanity', 'economies', 'triggered', 'Caribbean', "Company's", 'examining', 'sanitation', 'builds', 'accreditation', 'Recreation', 'Thanksgiving', 'salami', 'unbroken', 'incidence', 'diarrhea', 'lessening', 'misplaced', 'anarchy', 'mercury', 'Italians', 'mouthpiece', 'submerged', "Oersted's", 'chatter', 'banished', 'dealings', 'voltaic', '1819', 'links', 'magnetism', 'sipping', 'Electrical', 'Present', 'consultants', 'Automobile', 'rosy', 'Success', 'nickel', 'charting', 'Things', 'impaired', 'relate', 'rehearsed', 'gazed', 'crashing', 'invisible', 'deja', 'vue', 'surged', 'concurrent', 'nutrition', 'Mediterranean', 'uniformed', 'wiser', 'twenty-one', 'wandering', 'squat', 'repeal', 'Approximately', 'discriminating', 'multiplying', 'liability', 'styrene', 'dominates', 'acetate', 'openings', 'wary', 'plight', 'stud', 'hopelessly', 'unload', 'clinging', 'Follow', 'calculating', 'Horse', 'experimentally', 'wiry', 'curved', 'molds', 'envy', 'grudge', 'bending', 'echoed', 'grey', 'warmly', 'strand', 'Hold', 'Shadow', 'heretofore', 'glue', 'retaining', 'flooring', 'U.S.S.R.', 'axes', 'Competition', 'rag', 'drained', 'shrill', 'frankfurters', 'Score', 'gravely', 'predominantly', 'landmarks', 'formations', 'sunshine', 'Hardly', 'Sophia', 'diamonds', 'woven', 'old-fashioned', 'heir', 'chilled', 'knives', 'Years', 'endlessly', 'stranded', 'obscured', 'Body', 'breadth', 'postcard', "farmer's", 'sands', 'underneath', 'Key', 'Animals', 'slopes', 'skeptical', 'high-pitched', 'beset', 'fashioned', 'Problems', 'pinched', 'consensus', 'transform', 'moods', 'Dag', 'recognizable', 'symptomatic', 'Juan', 'innovation', 'hereby', 'penetrate', 'immigration', 'rigorous', 'Batista', 'gatherings', 'elephant', 'protesting', 'Tammany', 'brutal', 'dripping', 'Ellis', 'pipes', 'countryside', 'Joint', 'dunes', "Washington's", 'sighted', 'hurricane', 'Montpelier', 'blamed', 'Operation', 'biology', 'riot', 'wasteful', 'cosmetics', 'Cattle', 'ranchers', 'choked', 'generosity', 'accomplishment', 'cancel', 'regulated', 'reluctantly', 'stir', 'deliberations', 'discontinued', 'faintly', 'cracks', 'bruised', 'spin', "1930's", 'temperament', 'typing', 'salaries', 'Winter', 'brandy', 'evaluating', 'bucket', 'aeration', 'govern', 'inform', 'evoked', 'abuses', 'defective', 'attributable', 'Hanoverian', 'provoked', 'disputes', 'conveniently', 'Giles', 'Religious', 'pony', 'prevails', 'concessions', 'trapped', 'congregational', 'recruitment', 'nonetheless', 'summed', 'displeased', 'listing', 'Rooney', 'harmonies', 'peasant', 'tails', 'Menshikov', 'Kyoto', 'vulgar', 'Imperial', '7:30', 'uphold', 'strive', 'instinctively', 'callous', 'preferences', 'Corso', 'Nikita', 'haze', 'solving', 'pretend', 'postulated', 'sculptures', 'polar', 'deterministic', 'self-certainty', 'infringement', 'conceal', 'procedural', 'greet', 'royalty', 'withholding', 'Dei', 'facade', 'denounced', 'scraping', 'Somers', 'twins', 'revenge', 'barge', 'bargain', 'headlines', 'gaudy', 'exhaust', 'elastic', 'tapped', 'intuitive', 'Blood', 'Adlai', 'odors', 'socks', 'irons', 'rented', "ladies'", 'fierce', "Wasn't", 'skins', 'beatnik', 'disorder', 'Shaefer', 'commenced', 'gadgets', 'Tshombe', 'colonies', 'vaginal', 'indefinite', '25,000', 'orgasm', 'ruthless', 'Diana', 'commencing', 'ancestor', 'Harrington', 'Jelke', 'reign', 'lure', 'Conant', 'unavailable', 'kidding', 'regards', 'earthquakes', 'inferior', 'Mister', 'flatly', 'platoon', 'ambush', 'Study', "Year's", 'barefoot', 'Lexington', 'appreciably', 'fronts', 'erupted', 'Gore', 'graduating', 'bathed', 'exemption', 'rusty', 'Goulding', 'rattling', 'ascertain', 'chore', 'broadened', 'condemnation', 'incredibly', 'backgrounds', 'paces', 'fragmentary', 'cabins', 'twenty-three', 'Snelling', 'annoyed', 'Starting', 'laundering', "Sherman's", 'disappointing', 'lore', 'collector', 'analyst', 'ledger', 'Nashville', 'incidentally', 'bureaucracy', 'maneuvers', 'Napoleon', 'suspicions', 'calories', 'Molotov', 'spoilage', 'concentrates', 'powdered', 'inventor', 'publisher', 'coronary', 'prey', 'Twentieth', 'strangely', 'ugliness', 'portraits', 'fostered', 'fatty', 'morally', 'beast', 'liberties', 'discourse', 'traveler', 'illusions', 'edited', 'reproduce', 'experimenting', 'communicating', 'McCarthy', 'hotter', 'Nice', 'mansion', 'mint', 'Swedish', 'Suite', 'broadcasts', 'weekends', 'lakes', 'Clerfayt', 'origins', '$200', 'correspond', 'exclude', 'popularly', 'implying', 'utmost', 'unmistakable', 'mapping', 'plentiful', 'thy', 'persists', 'reassuring', 'sympathize', 'yearning', 'lord', 'dominion', 'disobedience', 'generalizations', 'insufficient', 'concentrating', 'Taiwan', 'Mao', 'emptied', 'starvation', 'motionless', 'superstition', 'filthy', "Bultmann's", 'unavoidable', 'flock', 'despise', 'cycles', 'emergencies', 'rags', 'gestures', 'Whitehead', 'arrives', 'inhuman', 'Tibet', 'undertook', 'defy', 'flattered', "Charles'", 'oven', 'chord', 'Jen', 'Elizabethan', 'equivalents', 'superficial', 'blaze', 'overcomes', 'stamped', 'inexperienced', 'Together', 'raining', 'exuberant', 'Going', 'careless', 'myriad', 'battles', 'Victory', 'ballad', 'generate', 'novelties', 'tonal', 'whipping', 'symphonic', 'Kennan', 'revolutions', 'concluding', 'negotiated', "author's", 'mildly', 'noteworthy', 'endowed', 'Yorker', 'scratched', 'numbering', 'talented', 'torso', 'ballads', 'linger', 'digging', 'sensibility', 'idiom', 'large-scale', 'Ferguson', 'chronological', 'dignified', 'strides', 'Sponsor', 'assimilation', "artist's", 'Gloucester', 'mysteries', 'galaxies', 'hopefully', 'rays', 'tunes', 'gases', 'enclosure', 'responds', 'depart', 'Brahms', 'fortified', 'trusts', 'Spain', 'mores', 'tricks', 'lyrical', '1929', 'raucous', 'expressive', 'invent', 'topped', 'newcomer', 'wink', 'irritation', 'recorder', 'reinforced', 'conclusively', 'magnificently', 'Call', 'hesitation', 'bony', 'sincerely', 'Magazine', 'Pauling', 'intake', 'alteration', 'performer', 'tapered', 'Quartet', 'Congregational', 'comprehension', 'cite', 'fidelity', 'multiplied', 'easiest', 'discomfort', 'pacing', 'Scriptures', 'Finals', 'sensing', 'fishermen', 'recalling', 'nostalgia', 'IBM', 'oval', 'photography', 'tan', 'Large', 'phosphate', 'fifties', 'blossoms', 'reputable', 'narrower', 'acids', 'discs', 'terrifying', 'vulnerability', 'Titan', 'Turn', 'cerebral', 'disorders', 'crowned', 'airborne', 'brushing', 'brushes', 'motors', 'locks', 'terrified', 'Direct', '2:37', 'depicted', 'immature', 'antelope', 'coloring', 'Dear', 'shortened', 'Physical', 'hunch', 'tallyho', 'courtesy', 'enlightened', 'Heat', 'ink', 'cured', 'cruising', 'tablespoons', 'trotted', 'masculine', 'confinement', 'Hoot', 'prisoner', 'Sing', 'Manufacturers', 'nomenclature', 'Ring', 'sloping', "1/8''", 'unwilling', 'postpone', 'lifting', 'hysteria', 'fascinated', 'illustrates', 'cylinders', 'piston', 'participates', 'CDC', 'sprinkling', 'glaring', 'thighs', 'emperor', 'Supper', 'preparatory', 'premise', 'hymn', 'inactive', 'sentiments', 'install', 'exploding', 'exclusion', 'Schuylkill', 'polite', 'Jerusalem', 'alley', 'subway', 'reservation', 'prohibited', 'immunity', 'Membership', 'stimulated', 'contemplate', 'refuge', 'offspring', 'embodied', 'credo', 'corpse', 'factual', 'alarmed', 'spiritually', 'ministry', 'joys', 'amusement', 'Karl', 'hanged', 'dubious', 'diversion', 'graveyard', 'departures', 'sixty-five', 'prevail', 'terraces', 'symmetry', 'packaging', 'manipulation', 'monastic', 'beckoned', 'abolition', 'commend', 'desperation', 'hears', 'mailing', 'Marina', 'visions', 'Undoubtedly', 'skiing', 'distortion', 'wisely', 'straining', 'Bench', 'elbows', 'generating', 'Hopkins', 'precautions', 'Engineering', 'unrelated', 'Real', 'restrictive', 'securing', "client's", 'Twenty', 'blizzard', 'snows', 'swell', 'certified', 'burdens', 'self-determination', 'obeyed', 'disagree', 'futility', 'approximation', 'patriot', 'Celtic', 'proxy', 'tart', 'streaming', 'bolted', 'suppression', 'Fe', 'wearily', 'polyether', 'threads', 'Meynell', 'photocathode', 'roses', 'slippers', 'surrendered', 'chic', 'Rhine', 'rocket', 'darkened', 'persecution', 'managing', 'grinning', 'executives', 'shout', 'errand', 'ballroom', 'jointly', 'Carnival', "Shelley's", 'maximization', 'speck', 'choking', 'Seems', 'Saxons', 'overlooked', 'exit', 'Work', 'apron', 'Expectations', 'Princeton', 'hammer', 'spraying', 'elbow', 'Define', 'quarry', 'DIOCS', 'ponds', "Hitler's", 'algae', 'conservation', 'forgetting', 'Boy', 'Tessie', 'Annapolis', 'protozoa', 'stalked', 'damaged', 'middle-aged', 'pilots', 'Mines', 'clutched', 'Nicholas', 'Succession', 'terrestrial', 'experiencing', 'yields', '10%', 'Farmers', 'tractors', 'Va.', 'Kahler', 'Nearly', 'receipts', 'hawk', 'motels', 'discrete', 'lump', 'Molesworth', 'taxable', 'groped', 'spherical', 'softened', 'sporting', 'signing', 'speculate', 'bum', 'stack', 'hallway', 'Duclos', 'entirety', 'Rebel', 'attendants', "1960's", 'Rabbi', 'taller', 'Greater', 'invites', 'peaked', 'Troop', 'Col.', 'goddamn', 'bluntly', 'chimney', 'dessert', 'trousers', 'upturn', 'Heard', 'bundles', '40%', 'Shop', 'securities', 'Read', 'Carter', 'Lilian', 'eyed', 'lace', 'writ', 'awaken', 'Olgivanna', 'dystopian', 'inefficient', 'Flannagan', 'subdued', 'sporadic', 'integrate', 'centrifuged', 'Evanston', 'hebephrenic', 'chromatography', 'aerosol', 'semantic', 'lookup', 'meteoritic', 'photochemical', 'CTA', 'syllables', 'queens', 'declarative', 'soiled', 'mentions', 'surface-active', 'cursing', '5,000', 'arena', 'symmetric', 'pensions', 'posed', 'parameter', 'Yokuts', 'hr', 'beebread', 'Mussorgsky', 'marksman', 'vectors', 'dilution', 'offensive', 'Vientiane', 'confirmation', 'Murphy', 'gall', 'Seattle', 'subspace', 'Fidel', 'cf.', 'announcing', 'twin', 'Brandywine', 'Lemma', 'denotes', 'obstacles', 'Summary', 'affiliated', 'quadric', 'Funeral', 'equate', 'supportive', 'regression', 'Anna', 'Andrena', 'narcotics', 'verbs', 'Darling', 'wallpaper', 'pathology', 'oak', 'hereunto', 'Plantations', '69', 'terrace', 'follow-up', 'aggregate', "More's", '33', 'garbage', 'Interest', 'flatness', 'fiery', 'Costaggini', 'valuation', 'totaled', 'Fletcher', 'Kitty', 'Oliver', 'alterations', 'Hawk', 'hunted', "Patchen's", 'Simpkins', 'afloat', 'Vermejo', 'Illustration', 'reactor', 'consequent', 'Gibbs', 'indices', 'bidding', 'bottoms', 'fastest', 'directing', 'restraints', '203', 'referrals', 'reproduced', 'Burnside', 'trustee', 'Yugoslav', 'collision', 'diaphragm', 'bulb', 'currency', 'Beatrice', 'resistors', 'potentially', 'bail', 'booked', 'positivist', 'Aaron', 'Canadian', 'simulated', 'cortex', 'buses', 'warriors', 'Colonial', 'shrewd', "Today's", 'slick', 'punished', '1105', 'monotony', 'longed', 'Slowly', 'diffraction', 'Frankfurt', 'Palestine', 'exalted', 'arches', 'refugee', 'hinted', 'wrinkles', 'Woman', 'grove', 'eyelids', "Russell's", 'Leave', 'Willings', 'Everywhere', 'dispersed', 'Nobel', 'aborigine', 'boulder', 'n', 'Mullins', 'configuration', 'faults', "Meeker's", 'Solid', 'defiance', 'patted', 'Heiser', 'phones', 'injection', 'execute', "an'", '1832', 'embassies', "Mike's", 'Hez', 'Wells', 'Register', 'bestowed', 'Venice', 'Oso', 'Homer', 'buckskin', 'Summers', 'Leo', 'ambiguities', 'organisms', 'Hamm', 'excitedly', 'perceptual', 'continents', 'awaiting', 'smoothed', 'combines', 'oats', 'Digby', 'Macneff', 'gapt', 'accessories', 'quivering', 'Days', 'exchanged', 'Siddo', 'wretched', 'urbanization', 'epithets', 'Pakistan', 'fifteenth', 'Donovan', 'hoofs', 'cautiously', 'comprised', 'Paxton', "Nick's", 'Gibby', 'Geely', 'accruing', 'frail', 'redhead', 'slacks', 'recession', 'imposing', 'Down', 'Benington', "club's", 'Base', 'trumpet', 'portrayal', 'Mousie', 'rendezvous', 'Shylock', 'sampled', 'dugout', 'restraining', 'Cardinal', 'Fudo', 'activation', 'Marcus', 'cough', 'collapse', 'seasonal', 'edged', 'subsidiary', 'knight', 'hugging', 'Catatonia', 'glare', "York's", 'Kodyke', 'cheer', 'pursuing', 'Sabella', 'screeching', 'squall', 'Regulation', '1815', 'seizure', 'Berry', 'totalitarian', 'bloomed', 'frivolous', 'overheard', 'Hawthorne', "Schnabel's", 'divides', 'Gyp', 'Ridge', "N'", 'initiation', 'oppression', 'thinner', 'Souvanna', 'all-out', 'ancestors', '7-1', 'zing', 'Sources', 'questionnaires', 'observance', 'maladjustment', 'Rossoff', 'manifested', 'thaw', 'tanned', 'all-Negro', 'Gansevoort', 'Count', 'pear', 'schizophrenic', 'Manuel', 'Hernandez', 'Orvis', 'slapping', 'shipped', 'compliance', 'Appeals', 'Granny', 'pegboard', 'agitation', 'Handler', 'Pearl', 'sportsmen', 'assures', 'Matilda', 'stint', 'brute', 'modify', 'noises', 'Shafer', "Welch's", 'Pels', 'relinquish', 'Aid', 'canyon', 'suspense', 'treason', 'sponge', 'sax', 'magnified', 'pessimistic', 'attacker', 'Doolin', "bull's-eyes", 'pansy', 'workmanship', 'exercising', 'elaborately', 'Weider', 'bodybuilder', 'contemporaries', 'refined', 'referral', 'concerto', 'Gladdy', 'gleaming', 'inexplicable', 'airfields', 'invalid', 'Tolley', 'compute', 'vowel', 'Set', 'arm-elevation', 'B-70', 'album', 'mandatory', 'writhing', "Spencer's", 'morphophonemic', 'orthography', "Doaty's", 'pleasures', 'Gunny', 'sensual', 'Conservation', 'dissatisfied', 'Des', 'freeze', 'candidacy', 'china', 'witnessing', 'inhabited', 'contemplation', 'commissioner', 'manure', 'f-plane', "mind's", 'boxcar', 'Alma', 'diluted', 'susceptible', 'handicap', 'Schnabel', 'inscribed', 'tolerated', 'steamed', 'demons', 'operetta', 'disseminated', 'GNP', 'descriptive', 'educators', 'single-valued', 'Text', 'Prieur', 'portray', 'materialism', 'tangents', 'mulch', '74', 'Around', 'propulsion', 'cliche', 'bells', 'Katharine', 'humming', 'Sixth', 'aligned', 'Schwarzkopf', 'Coughlin', 'smug', 'fingerprint', "They'd", 'redcoat', 'demythologization', 'darted', 'Think', 'universally', 'potency', "Black's", 'outlets', 'underworld', 'musket', 'characterize', 'Jake', 'Bonner', 'fucken', 'extract', 'bothering', 'sneaked', 'depicting', 'fold', 'throttle', 'bitch', 'Westfield', 'billions', 'decreasing', 'longest', "Killpath's", 'Roots', 'yelling', 'sleepy', 'licensed', 'merchandise', 'Bong', 'Continent', 'Hello', 'modernity', 'solace', 'dripped', 'Hirey', 'attaining', 'inspiring', 'routes', "baby's", 'winking', 'flushed', 'Gap', 'Mountain', 'Upper', 'Thelma', 'Dakota', 'lbs.', 'creaked', 'marinas', 'outboard', 'originate', 'mast', "Remarque's", 'grounded', 'treacherous', 'restoring', 'terminology', 'saturated', 'scarce', 'molecule', 'ingredient', 'okay', 'snoring', 'slowing', 'Mollie', 'Perrin', 'deposits', 'arterial', 'Jarrodsville', 'Roberta', 'Scientists', 'flopped', 'crutches', 'bartender', 'unbearable', 'fille', 'Says', 'distribute', 'Mose', 'Fritzie', 'blushed', 'absently', 'clergymen', 'identifies', 'Phedre', 'hostility', 'partition', 'interdependence', 'paradigm', "Thomas's", 'forefinger', 'unconcerned', 'bulletin', 'tightened', 'DA', 'whatsoever', 'pastoral', 'rebut', 'evangelism', 'workings', 'DC', 'dipper', 'Kid', 'sinned', 'sword', 'amorphous', 'Rangoni', 'willed', 'Powers', 'frightful', 'Nineteen', 'annihilation', 'distinguishing', 'deduced', 'Rodgers', "Brumidi's", 'indefinitely', 'Presumably', 'delivers', 'mainland', 'elicited', 'Simultaneously', 'cathedral', 'Highlands', 'affectionate', 'charted', 'pre-war', 'plank', 'loneliness', 'pounding', 'Shell', 'Practically', 'portrayed', 'whereupon', 'blot', 'propriety', 'intensifiers', 'distrust', 'R-stage', 'suppress', 'responding', 'insisting', 'gyro-stabilized', 'rotor', 'gorgeous', 'amplifier', 'Nellie', 'grandeur', 'appoint', 'broadcasting', 'immortal', 'similitude', 'Negro-appeal', 'intensifier', 'foaming', 'shoving', 'clear-cut', 'Munich', 'continuance', 'forty-five', 'Narragansett', 'ailments', 'sweetly', 'Thou', 'concessionaires', 'Olga', 'surveillance', 'Test', 'Norberg', 'radiopasteurization', 'anew', 'Hesiometer', 'peeling', 'astonished', 'canons', 'garment', 'accepts', 'nation-state', 'firmer', 'Arp', 'forties', 'journal', 'tenuous', 'reversible', "captain's", 'affirmed', 'psychoanalytic', 'neurosis', 'comparisons', 'blurred', 'sensibilities', 'convict', 'betrayal', "Eichmann's", 'coasts', 'overcoming', 'strangled', 'adolescents', "family's", '1939', 'originality', 'reflective', 'J', 'contour', 'rat', 'straighten', 'dissolve', 'Multiply', 'calmed', 'axe', 'huts', 'ox', 'Leyte', 'ridden', 'trooper', 'organism', 'glowed', 'noisy', 'amplitude', 'gravity', 'Osaka', 'insolence', 'methodically', 'Capone', 'Saint', 'Amendment', 'definitions', 'indecent', 'myths', 'patriotism', 'drunken', 'Bellows', 'dodge', 'pretense', 'attorneys', 'trader', 'Examples', 'prompted', 'barley', 'Reich', 'flashing', 'metals', 'Burlington', 'complexion', 'whole-wheat', 'wastes', 'erosion', 'sandy', 'bizarre', '28th', 'momentary', 'Adoniram', 'cries', 'clambered', 'bellowed', 'chattering', 'decks', 'cargo', 'laden', 'psyche', 'Immediately', 'Whig', 'Tories', 'nodding', 'pretentious', 'anthropology', 'Cost', "Hearst's", 'brow', 'stairway', 'Manley', "Bang-Jensen's", "Copernicus'", 'scout', 'quackery', "Ptolemy's", 'kidney', 'epicycles', 'velocities', 'luxurious', 'Bey', 'astronomical', 'hid', 'prostitute', 'rabbi', 'submitting', 'sexually', 'Calderone', 'Kenneth', 'Jo', 'posterior', 'membrane', 'Huxley', 'Wexler', 'projecting', 'Travel', 'unhappiness', 'urgently', 'precinct', 'broadly', 'legendary', 'exclamation', 'Spade', 'verified', 'shaded', 'Della', 'piazza', 'renaissance', 'adjunct', 'blinked', 'interchange', 'disregard', 'glistening', 'explicitly', 'transcends', 'polarization', 'elementary-school', 'sniffed', 'vaults', 'attribute', 'lounge', 'Erikson', 'delinquent', 'Whigs', 'fuzzy', 'illustrative', 'testament', 'sidewise', 'whining', 'oddly', 'ponies', 'graves', 'starving', 'Agnese', 'whirled', 'uneasily', "Court's", 'internally', 'outbursts', 'Bourbons', 'socialist', 'feudal', 'complicity', 'reflections', 'abstractions', 'Nara', 'Masu', 'licensing', 'thinkers', 'Money', 'countenance', 'barren', 'oath', 'confidential', "Wisman's", 'Perier', 'engages', 'characteristically', 'rebuilding', 'Schwartz', 'sucked', 'sequences', 'robbers', 'construed', 'Antonio', 'misunderstood', 'slit', 'Emancipation', "William's", 'planter', 'Hays', 'Burgundy', 'practicable', 'churchyard', 'soaked', 'highroad', 'causal', 'merry', 'Heidenstam', 'healing', 'Parthenon', "parents'", 'patrolman', 'weakening', 'antiseptic', 'wrists', 'traversed', 'entrepreneur', "life's", 'Sherlock', 'Cultural', 'informs', 'newspaperman', 'pretended', '300,000', 'sofa', 'unreal', 'travelers', 'defenders', "'im", 'small-town', 'unwanted', 'environmental', 'Collins', 'clad', 'syntax', 'Newton', 'Chapman', 'impelled', 'strays', 'Brazil', 'reconstruct', 'Seeds', 'Beckett', 'Waiting', 'corresponds', 'buzzing', 'dialectic', 'liberated', 'drafting', 'Operating', '1888', 'queer', 'scent', "master's", 'veiled', '$5,000', 'recurring', 'gunfire', 'tapping', 'Construction', 'Radiation', 'spear', 'tenor', 'spoiled', 'tablespoon', 'Contrary', 'rupee', 'groundwave', 'Vandiver', 'gloves', 'flares', 'yarns', 'rinse', 'emeralds', 'digital', 'manometer', '1895', 'Fountain', 'interfacial', 'Canyon', 'Zion', 'filtering', 'uneven', 'Pictures', 'Forge', 'authorizations', 'stain', 'greasy', 'users', 'cling', 'piling', 'buggy', 'drowned', 'Publication', 'conspicuous', 'thirty-four', 'calibration', 'Haven', 'Liberty', 'wander', 'Experiments', 'dusting', 'spectral', 'courteous', 'expandable', 'cottages', 'Linden', 'thermometers', 'clamps', '**yl', 'Needless', 'Run', 'sew', 'spoon', 'coil', 'Measurements', 'birthplace', 'polymerization', 'revolving', 'liquids', 'pleura', 'pleural', 'anastomoses', 'subtraction', 'File', 'progresses', 'Tables', 'slot', 'spacing', 'right-hand', 'arrows', 'ossification', 'connecting', 'levers', 'iodinated', 'notches', 'fibrosis', 'Microscopically', 'cord', 'Dowex-2-chloride', 'unduly', 'pseudophloem', 'Designers', 'horizons', 'tedious', 'NS', 'Engineers', 'compression', 'rpm', 'micelle', 'septa', 'Substances', 'pigs', 'strewn', 'sensors', 'intersect', 'meteorite', 'fried', 'pinch', 'minced', 'shave', 'porter', 'fragrance', 'Creek', 'Neutral', 'impetus', '1905', 'cocked', 'compares', 'carbine', 'agglutinin', 'cartridge', 'Deerstalker', 'Hi', 'larvae', 'boa', 'constrictor', 'Adios', 'annoying', 'Forensic', 'Hellenic', 'periodically', 'divan', 'denies', 'Suvorov', 'inhibition', 'Acropolis', 'shattering', 'assassin', 'severed', "Aristotle's", 'headaches', "Morgan's", 'Frequently', 'riflemen', 'ketosis', 'reproduction', 'enzyme', 'forty-four', 'one-fourth', 'abortion', '$2,000', '700', 'reduces', 'intercept', 'Drug', 'web', '13th', 'salts', 'comb', 'diction', 'place-name', 'firearms', "Milton's", 'carpenter', 'scaffold', 'packages', 'Officials', 'exhibiting', 'papal', 'quicker', 'sprayed', 'Venetian', 'adhesive', 'interplay', 'Lowell', 'low-cost', 'tubing', 'supplier', 'Ministry', 'lays', '$20', 'secretaries', 'remedies', "Thomas'", "public's", 'sundown', 'whistled', "SAAMI's", 'coordinates', 'jug', 'Poetry', 'intends', 'Rexroth', 'epoch', 'appointments', 'outraged', 'twenties', "Prokofieff's", 'Lauro', 'frontage', 'Bosis', 'swam', 'Contact', 'desolate', 'Geological', 'Offices', 'Cooperatives', 'condensation', 'BTU', 'focusing', 'Fixed', 'thermostat', 'filtered', 'upkeep', 'out-of-state', 'ducts', 'Symposium', 'pins', "pool's", 'centum', 'compose', 'profile', 'endorse', 'sunk', 'supplemented', 'Copenhagen', 'stimulate', 'educate', 'infiltration', 'veins', 'Macbeth', 'outlines', 'rotated', 'amplified', 'Shakespearean', 'Blake', 'replaces', 'compulsory', 'x-ray', 'staggering', 'streaks', 'bailiff', 'Stratford', 'clumsy', 'newborn', "Earth's", 'spontaneity', 'Breasted', 'expeditions', 'Trees', 'bewildered', 'loom', 'commodity', 'moderately', 'Turkey', 'deadline', 'pry', 'Border', 'aisle', 'dapper', 'Alliance', 'reliance', 'preferable', 'aiding', 'withdrawal', 'Multnomah', 'succeeding', 'unprepared', 'le', 'Individual', 'uranium', 'enrollment', 'discovering', 'successors', 'desegregated', 'tentatively', 'treasures', 'prizes', 'first-class', 'smothered', 'lad', 'Lacking', 'flattened', 'investments', 'folly', 'turnpike', 'coincided', 'gripping', 'dramas', 'classify', 'flourished', 'Grasslands', 'zoo', 'roam', 'recognizing', 'wartime', 'repression', 'chanted', 'schemes', 'DeKalb', 'stance', 'retaliation', 'governors', 'resolutions', 'insured', 'budgeting', 'abandoning', 'geographically', 'understands', 'ever-present', 'apologetically', 'margins', 'outspoken', 'Elder', 'Fitzgerald', '1919', 'Beaverton', 'laymen', 'invoked', 'engaging', 'rationale', 'upheld', 'defendant', 'Aircraft', 'Nebraska', 'declines', 'esprit', "A's", 'well-informed', 'strengthened', 'episodes', 'Initially', 'throws', 'secretly', 'probe', 'weakened', 'debates', 'toughness', 'negotiation', 'announcements', 'heightened', 'disgusted', 'doubles', 'Ancient', 'pastors', 'advocating', 'guise', 'paired', 'aching', 'specifications', 'Farrell', 'photographers', "Miller's", 'Terry', 'grumble', 'drives', 'separating', 'downhill', 'contemptuous', 'rivalry', 'visibly', '120', 'putt', 'Augusta', 'disbelief', 'presiding', 'perennial', 'Humphrey', 'bodily', 'Colmer', 'noses', 'mountainous', 'upwards', 'invaded', 'presidents', 'banner', 'savages', 'fortunes', 'RCA', 'Records', 'inexpensive', 'presentations', 'interviewing', 'sculptor', 'manifestation', 'vantage', 'glands', 'experimented', 'shaving', 'toothbrush', 'throats', 'textures', 'airy', 'teachings', 'righteousness', 'Version', 'cohesion', 'gazing', 'academy', 'congressman', '1883', 'foes', 'bust', 'Specific', 'pasted', '1865', 'complexes', 'seller', 'Short', 'hardy', 'flowering', 'cordial', 'refreshing', 'inquire', 'Toward', 'omitting', 'laborer', 'packaged', 'cutters', 'riders', 'Voice', 'persist', 'angrily', 'costing', 'inception', 'canoe', 'accelerating', 'sack', "government's", 'devoid', 'wears', 'goat', 'fleeting', 'temptations', 'helper', 'subscribers', 'predecessors', 'mandate', 'futile', 'blossom', 'inaction', 'man-made', 'signature', 'floods', '1936', 'contractor', 'Nowhere', 'strife', 'refugees', 'outlined', 'sits', 'Survey', 'Udall', 'shoreline', 'keel', 'predicting', 'automation', 'breach', 'anti-Communist', 'Regardless', 'chancellor', 'assess', 'documented', '95', 'maze', 'stealing', 'canning', 'poise', 'forgiven', 'thinker', 'artificially', 'Iron', 'subordinate', 'unorthodox', 'Plans', 'clouded', 'assembling', 'fabulous', 'Reports', 'notch', 'versa', 'finances', 'out-of-town', 'heavenly', 'manhood', 'alternately', 'ambivalent', 'jam', 'Desegregation', 'towel', 'thanked', 'relic', 'feather', 'sadness', 'earthly', 'commotion', 'Confederacy', 'paneling', 'fireplace', 'storms', 'Jenks', 'Patrolman', '1934', 'pardon', 'Stewart', '$800', 'wavelengths', 'detecting', 'revived', 'navy', 'reactors', 'adviser', 'manuscript', 'wrapping', 'passport', 'patterned', 'hunter', 'Dreadnought', "Britain's", 'Various', 'abide', 'NBC', 'Elmer', 'finale', 'laborers', 'manual', "Mantle's", 'Johns', 'Prosecutor', 'Mile', '68', 'weaknesses', 'crippling', 'Indianapolis', 'Louisville', 'congestion', 'Turner', 'Pierce', 'Chuck', 'scratches', 'ripped', 'Wilmington', 'Wallace', 'tagged', 'arrears', 'bruises', 'creed', 'Teaching', 'Utah', 'tax-exempt', 'reflexes', 'Siberia', 'makeshift', 'Vietnamese', 'narrowly', 'northeast', 'Liston', 'Werner', 'ambulance', 'Abbey', 'feat', "She'll", 'athletics', 'morals', 'promotional', 'canceled', 'hooked', 'Honolulu', "summer's", 'buffet', 'Arms', 'idol', 'Cancer', 'Alvin', "Palmer's", 'possessing', 'spelled', 'flowed', 'sensational', 'Crystal', 'super', 'Dame', 'Notre', 'inscription', 'healed', 'runaway', 'bats', 'Danny', 'slogan', 'bayonet', 'blazing', 'Harcourt', '$150', 'assaulted', '$20,000', 'robbery', 'Wiley', 'ashes', 'Auditorium', 'attends', 'Katherine', 'Sutherland', 'fraternity', 'Meyer', 'totals', 'doings', 'hospitality', 'Marie', 'Alpha', 'ants', 'accidentally', 'marvel', 'stadium', 'cookies', 'winner', 'Marr', 'Trophy', 'gala', '1926', 'grandchildren', 'caps', 'Stay', 'Stengel', 'residing', 'assisting', 'Pasadena', 'Browning', 'impeccable', 'pilgrimage', 'Getting', 'Bird', 'needles', 'linen', 'Robbins', 'Dunn', 'coconut', 'Hawaiian', 'ramp', 'vicinity', 'someplace', 'attire', 'Fresh', 'colts', 'tucked', 'snack', 'softening', 'outdoors', 'Heritage', 'sprinkle', "Sunday's", 'puppet', 'lamps', 'Yuri', 'Whitey', 'fore', 'windshield', 'wiping', 'gallons', 'popped', 'pitches', 'Channel', 'tease', 'boasted', 'senseless', 'auspices', 'Episcopal', 'manipulate', '86', 'assemblies', 'shortstop', 'lecturer', 'Workshop', 'productions', 'Lauderdale', 'props', 'workout', 'relegated', 'rookie', 'circus', 'Mostly', 'Forsythe', 'dividend', 'creator', 'Coach', 'ghastly', 'Quaker', 'm.p.h.', 'Allies', 'pollution', 'wrecked', 'plague', 'Convention', 'Kelsey', 'Buchheister', 'Sciences', 'opener', 'imprisonment', 'Riders', 'interfering', 'sinking', 'rescued', 'ashore', 'contributors', 'Workers', 'rings', 'wallet', 'lamb', 'assessments', 'ribbons', 'Nevada', 'Caesar', 'workmen', 'Leavitt', 'weaken', 'Greer', 'Butcher', 'Waters', 'Football', 'percentages', 'figuring', 'depletion', 'Toronto', '25%', '5%', 'borrowing', 'examiner', 'conversions', 'touchdown', 'debentures', 'liable', 'cautioned', 'pessimism', 'Janice', 'banquet', 'bearings', 'Moss', 'Gin', '56', 'Bonn', 'meter', "France's", '6th', 'rub', 'stunning', 'lacks', "Where's", 'booking', 'wrongs', 'demonstrates', 'splendor', 'vibrant', 'Lenin', 'imperial', 'Phase', 'subtly', 'paralysis', 'forthright', 'jackets', 'intimately', 'Guardian', 'Hough', 'Different', 'Writing', 'hymns', 'revulsion', 'continuum', 'adaptations', 'alienated', 'singly', 'Bates', 'unpopular', 'Especially', 'coffin', 'economist', 'spine', 'herds', 'excerpt', 'practitioners', 'atop', 'explode', 'ambassadors', 'proposing', 'grenades', 'poignant', 'brightly', 'spotlight', 'fairness', 'publicized', 'Eighth', 'scandal', 'soprano', 'gangs', 'disc', 'grudgingly', 'biting', 'saga', 'lash', 'casts', 'castle', 'uniformly', "leader's", 'evoke', 'Ethics', "Church's", 'virtuous', 'Emergency', 'indicative', 'Archbishop', 'volley', 'tingling', 'closes', 'abdomen', 'progressively', 'subjectively', "yesterday's", 'induction', 'alas', 'recourse', 'Abraham', 'idly', 'exceeding', 'non-Catholic', 'robe', 'glittering', 'hypocrisy', 'fascination', 'participants', 'communicative', 'complied', 'petty', 'twenty-two', 'frenzy', 'contend', 'superimposed', 'chords', 'slipping', 'Berman', 'Glenn', 'dreary', 'Armstrong', 'recital', 'juicy', 'Thurber', 'lapse', 'ecstasy', 'Hero', 'clip', 'operative', 'Active', 'auxiliary', 'twists', 'satellites', '$60', 'meticulously', 'mediocre', 'Ordinary', 'platforms', 'cock', 'Carey', 'buddies', 'Supply', 'probing', 'syndicate', 'Mando', 'huh', 'planks', 'Maintenance', 'brows', 'Orders', 'Burr', 'kindness', 'tribal', 'SMU', 'mastered', 'converse', 'supervise', 'Ronnie', 'Herford', 'allusions', 'monacle', 'showered', 'whoever', 'asserts', '77', 'Crisis', 'salesmanship', 'operatic', 'Page', 'Hayes', "Sloan's", 'shadowing', 'Sheriff', 'chronology', 'inflation', 'Clubs', 'Caravan', 'superseded', 'glared', 'hairy', 'palette', 'complaining', 'squared', 'transactions', 'Baer', "lady's", 'Rebels', 'differentiated', 'sophomore', 'Loan', 'Blenheim', 'tattered', 'Grey', 'Investors', 'circulating', 'Letters', 'romantics', 'mergers', 'buyers', 'proximity', 'indecision', 'make-up', '1834', 'legacy', 'extraction', 'Gentile-Jewish', 'swirled', 'Emmett', 'piping', 'banister', 'brandishing', 'fruitless', 'devastating', 'telephones', 'entitle', 'thrusts', 'theologian', 'Trinity', 'Leesona', 'Eh', 'rotary', 'garages', 'Registry', 'cabinets', 'Remarks', 'self-sustaining', 'Sydney', 'richness', '3rd', 'Bud', 'Printed', 'Were', 'Rider', 'one-story', 'ghosts', 'Elman', 'pennies', 'dozed', 'disfigured', 'ghettos', 'Budd', 'celebrating', "months'", 'truthfully', 'excellently', 'out-of-doors', 'Gone', 'furs', 'Analysis', 'installment', 'reproducible', '1928', 'Arnolphe', 'speedy', 'interstellar', 'AFL-CIO', 'shutter', 'Rite', 'atmospheres', 'Larson', '$4', 'Seigner', 'Fifty', 'teenagers', 'mammalian', 'sensuality', 'morphological', 'Regions', 'baton', 'Sue', 'Millie', 'Pilgrims', 'Islanders', 'Pageant', 'eighty-sixth', 'buff', 'quarterly', 'Held', 'skepticism', 'Advocate', 'Reed', 'zest', 'Kemble', 'inject', 'grapes', 'rude', 'tangled', 'ratification', 'Round', 'Alpert', 'nasty', 'clerks', 'arbiter', "Edward's", 'thankful', 'Machine', 'Wild', 'periphery', "horse's", 'hurtling', 'kitchens', 'maneuver', 'airfield', 'Gosson', 'Thy', 'skimmed', 'exceeded', 'bequest', 'hemorrhage', 'Barber', 'Lappenberg', 'astonishingly', 'CTCA', 'Merchants', 'reserves', "Childhood's", "Wells's", 'banter', '1625', "Lewis's", 'Gantry', 'S.K.', 'interstate', 'adventurous', 'unseen', 'hys', 'Bancroft', 'mee', 'heroine', 'ther', 'yow', 'monei', 'estates', 'Apollo', 'gaunt', 'brigadier', 'immaculate', 'Audubon', 'pas', 'handing', 'selects', 'Ltd.', 'sparse', 'offenses', 'communes', 'hostilities', 'transitions', 'rounding', 'Cunningham', 'evolve', 'abused', 'Ching', 'intensities', 'serial', 'refinements', 'tapestry', 'coaches', 'choreographer', 'insofar', 'individualized', 'deference', 'ignores', 'ever-changing', '15%', 'astonishment', 'invaluable', 'Changes', 'option', 'plead', 'Personally', 'Petersburg', 'Byzantine', 'expired', 'Godot', 'merging', 'drugged', 'silhouettes', 'cunning', 'centrally', 'spire', 'enchanted', 'Md.', 'pots', 'tubs', 'Nathan', 'Las', 'Vegas', 'garments', 'Parks', 'enhanced', 'Drs.', 'negation', 'boarding', 'psychoanalysis', 'balcony', 'tame', 'mystique', 'Lipton', 'kneel', 'tweed', 'volatile', 'humility', 'sharpened', 'slides', 'puppets', 'Founding', 'authorize', 'staccato', 'continual', 'Cafe', 'stuffed', 'Nature', "Governor's", 'Late', 'elapsed', 'glinting', 'homeland', 'coincides', 'retailers', 'librarian', 'workshops', 'claimants', 'format', 'therefrom', "South's", 'vows', 'disrupted', 'unreconstructed', 'wrecking', "Army's", 'scornful', "states'", 'fundamentals', 'exploitation', 'Guest', 'capitalist', 'Knox', 'obnoxious', 'self-consciousness', 'nondescript', 'SR', 'cheerfully', 'avant-garde', 'fortress', 'senate', 'tying', 'alcoves', 'meager', 'upstream', 'relentless', 'lugged', 'enterprising', 'fella', 'decorator', 'planting', 'Flying', 'bivouac', 'Kings', 'self-conscious', 'Plus', 'displaying', 'swooped', 'thickened', 'batch', 'dissent', 'senators', 'fragment', 'guarded', 'Concerto', 'Jacoby', 'hatching', 'transaction', 'irritated', 'cradle', 'Lyford', 'particulars', 'habitual', 'Pendleton', '63', 'donor', 'Jason', 'Flemish', 'Ninth', 'sentry', 'enforcing', 'kerosene', 'diurnal', 'wasting', 'imposition', 'overrun', 'fisherman', 'Grigorss', 'uncanny', 'whistling', 'contends', 'thrill', 'Reactionary', 'Treatment', 'prefers', 'Civilization', 'Ga.', 'receding', 'capacities', "Joe's", 'indulged', 'tact', 'roofs', 'adamant', 'maternal', 'slate', 'mattered', 'Constable', 'warrants', 'pads', 'Sadie', 'renting', 'unwillingness', 'saturation', 'Dietrich', 'mustache', '20%', "Harper's", 'monster', '1915', 'approximated', 'resultants', 'sojourn', 'logging', 'economists', 'distinguishes', 'Lyttleton', 'Scotch', 'deserts', 'Thinking', 'humiliation', 'Design', 'tiles', 'happenings', 'procession', 'Glen', 'coherent', 'illuminating', 'acquiescence', 'grouped', 'noblest', 'Cox', '1916', 'lawns', 'enroll', "Thompson's", 'sculptured', 'scepticism', 'Senators', 'interrelated', 'Parkhouse', 'intellect', 'authorizing', 'pedestal', 'nostalgic', 'slashing', 'catastrophes', 'schooling', 'disguise', 'downed', 'Called', 'advent', 'bout', "Gabriel's", 'competently', 'inheritance', 'powders', 'aberrant', 'commuting', 'Activities', "moment's", 'stimuli', 'actress', "corporation's", 'princess', 'Kerr', 'lets', 'hides', 'spitting', 'fervent', 'hamburger', 'Self', 'Morocco', 'Chicken', 'endeavors', 'measurable', 'Salt', 'seasoned', 'canned', 'Moriarty', 'crowding', "Watson's", 'gradients', 'cochannel', 'Wives', 'lavender', 'sparkling', 'Branch', 'hoarse', 'Moll', 'gasped', 'whispering', 'mumbled', 'Dodgers', 'satin', 'revivals', 'greatcoat', 'Mutton', 'Kappa', 'countrymen', 'drizzle', 'Shirley', 'Bern', 'Favre', 'flared', "Woman's", 'Thrift', 'Damn', 'Christopher', 'populous', "That'll", 'Couperin', 'Slater', 'Boyd', 'screeched', 'gasps', 'franchise', 'impinging', 'stockings', 'touring', 'knocking', 'scrawled', 'asset', 'Stowey', 'Salvation', 'shovel', 'froze', 'crib', 'smoothness', 'gulf', 'mat', 'steaming', 'chat', 'Meadow', 'implementation', 'informally', 'parole', "mustn't", 'striped', 'joyous', 'parted', 'overcoat', 'suitcases', 'Grabski', 'braced', 'scented', 'foster', 'shuddered', 'Baseball', 'Violet', 'Criticality', 'laundry', 'cu.', 'Simpson', 'shriek', 'baritone', 'three-year', 'accelerator', 'irradiated', 'proficient', 'radiosterilization', 'sterilization', 'ionizing', 'Currency', 'Properties', 'lay-offs', 'solvent', 'gallium', 'tektites', 'Eagles', 'hydrolysis', 'cliches', 'metabolite', 'chuckle', 'Cardinals', 'Arundel', 'discrepancies', 'Commissioners', 'tracts', 'template', 'embroidered', 'Drew', 'blackness', 'Monte', 'yanked', 'trembled', 'Jerome', 'dingy', 'Lynn', 'Louise', 'concertos', 'administrators', 'Rex', 'Seward', 'hosts', 'Musical', 'sweaty', 'coupler', 'phoned', 'belligerent', 'biscuits', 'sprawling', 'rococo', 'servo', 'torquer', 'torque', 'gyros', 'fringed', 'optimality', 'shakes', 'edging', 'Leona', 'down-to-earth', 'teens', 'kissing', "Kitty's", 'Teachers', 'cheekbones', 'downstream', 'metaphor', 'jagged', "Throat's", 'Manu', 'bleached', 'Hettie', 'Grafin', '130', 'Yugoslavia', 'Sante', 'DiMaggio', 'prescribe', 'glamorous', 'Nate', 'Burnsides', 'Arbuckle', 'gully', 'impeded', "bride's", 'Conchita', 'Maguire', 'Kruger', 'limbs', 'citation', 'vaudeville', 'Pompeii', 'Kizzie', 'utterance', '96', 'Bend', 'Kafka', 'Ruling', 'Hemus', 'Richert', 'Zenith', 'A-Z', 'furrow', 'rains', 'Acala', 'puny', 'twelfth', 'Tuxapoka', 'Thom', 'justly', 'John-and-Linda', 'complications', 'Hartweger', 'Walitzee', 'Askington', 'clutch', 'Reuveni', 'co-operative', "Roosevelt's", 'artistically', 'Diego', 'sunburn', 'Adelia', 'flicked', "Indian's", 'bale', 'kittens', 'mink', 'lush', 'plunge', 'Dolce', 'Vita', 'overalls', 'clapping', 'plowed', 'dynamite', 'Loop', 'melodic', 'profess', 'Emile', 'virtual', "Garth's", 'technician', 'genial', 'tournaments', 'dived', 'Beginning', 'sponsorship', 'lurched', 'Guests', "driver's", 'Bradford', 'Carruthers', 'Writers', 'Homicide', 'Brian', "Mahzeer's", "Montero's", 'Ariz.', 'Aricaras', 'Fiske', 'incidental', 'half-breed', 'Molly', 'dromozoa', 'Florence', 'Earthmen', 'fairway', 'Towne', 'Martian', 'en', 'grok', 'admirably', 'fumbled', 'Anta', "Alec's", 'slogans', 'plumb', 'Gary', 'Jaguar', 'Silence', 'scoop', "Poet's", 'Calenda', 'Needham', 'Doris', 'Flower', 'DUF', 'Councilman', 'dispatched', 'bleachers', 'hatch', 'Turk', 'Human', 'catkins', 'pussy', 'Twins', 'Region', 'COAHR', 'eve', 'min', 'pledged', 'donors', 'Rh', 'ABO', 'uptown', 'electrophoresis', 'ulcer', 'inert', 'Survivors', 'otter', 'BW', 'intentional', 'Biological', 'microns', 'Nancy', 'inverse', 'sundry', 'basin', 'Slate', 'Psithyrus', 'rattlesnakes', 'Appeal', 'precincts', "Castro's", 'neutrophils', 'marrow', 'anemia', 'uptake', 'antithyroid', "University's", 'di-iodotyrosine', 'percussive', 'potassium', 'epiphysis', 'Milk', 'conforms', "Smith's", 'pictorial', 'signatures', 'Onsets', 'Chart', 'exempt', 'unanimous', 'shunts', 'Rall', 'artery-pulmonary', "president's", 'NE', 'hilum', 'pop', 'Leon', 'micrometeorites', 'Poynting-Robertson', 'inversely', '353', 'Fellows', 'constancy', 'one-', '111', 'Example', 'refinement', 'Sections', 'flaming', 'Webster', 'needing', 'suffrage', 'decrees', 'pinpoint', 'Patterson', 'Unifil', 'widened', 'shielding', 'Method', "Moliere's", 'Financial', 'backlog', 'Huntley', 'unofficial', 'Detective', 'hereinafter', 'culminates', 'Temperature', 'conductivity', 'Patrol', 'Uniconer', 'flask', 'ant', 'Pyrex', 'quantum', 'Ronald', 'interfaces', 'latch', 'accounted', 'lobes', 'Tucker', 'bunched', 'ion', 'disability', 'dipole', 'coordinator', 'curvature', 'Alumni', 'deformation', 'unfolding', 'elasticity', '30,000', 'Experimental', 'electrode', 'graphite', 'mm', 'reflector', 'Sloanaker', '20,000', 'Devey', 'retailing', 'mucosa', 'Legion', 'tribunal', 'sub', 'shreds', 'warrior', 'commanders', 'Marsden', "clerk's", 'Athletic', 'drugstore', 'masonry', 'whereabouts', 'Beautiful', 'dismounted', 'blackout', 'haunches', 'Baby', 'Clifford', 'priorities', 'Catskill', 'delicacy', 'marshal', 'widowed', 'plaintiff', '76', 'communist', "Secretary's", 'Xydis', 'timid', "mayor's", 'AWOC', 'Hartsfield', 'Cubist', 'relaxing', 'Poems', 'applaud', 'Index', 'two-digit', 'compiler', 'EQU', 'tread', 'shouts', '$15', 'attentive', 'soles', 'Gertrude', 'Tones', 'worrying', 'identifiable', 'lenses', 'Mityukh', 'gasping', 'barrage', 'Pimen', 'Capt.', 'Pozzatti', "university's", 'purposely', 'irritable', 'Dodge', 'Grady', 'amounted', 'Analytical', "Respondents'", "artists'", 'conjugated', 'triumphant', 'AIMO', 'subgroups', 'rebuild', 'Nagrin', 'helplessness', 'reassured', 'Sterling', 'pertains', "C'", 'Mt.', 'l', 'Elliott', 'Spahn', 'Cornell', 'Scale', 't', 'fumes', 'anymore', "D'", '**ya', 'Funds', 'Given', 'truce', 'neocortex', 'Delhi', 'verify', 'obscurity', 'Cubans', 'accents', 'I.Q.', 'Fifties', 'criminals', 'Gehrig', 'hunter-killer', 'loadings', 'clattered', 'U.N.F.P.', 'Balafrej', 'long-run', 'aberrations', 'nail', '53', 'racket', 'Pelham', 'aiming', 'phonology', 'mindful', 'peddler', 'Sandman', 'modifier', 'premiere', 'W-region', 'boarded', '30th', 'Herb', 'ensued', 'arresting', 'transposed', 'receptionist', 'Stickney', 'shapeless', 'Gas', 'scoring', 'comprehend', 'Estimate', 'housekeeping', 'rip', 'chines', 'safeguard', 'one-inch', 'permissive', 'Hotei', 'postal', 'Letter', 'Occasional', 'suffused', 'marker', 'bids', 'candles', 'cans', 'endeavor', 'affiliations', 'lids', 'allegations', 'Creek-Turn', 'craters', 'holocaust', 'butchery', 'beaming', 'disciples', 'diplomat', 'arouse', 'girlish', 'installing', 'deceived', 'drains', 'Jonathan', 'navigation', 'intellectually', 'piers', '1793', '1810', 'aloof', 'rod', 'ridicule', 'prophecy', 'reminding', 'lent', 'Commonwealth', 'Deer', 'Salisbury', 'Available', 'Newburyport', 'Almighty', 'reversing', 'Feed', 'Cunard', 'slippery', 'securely', 'radial', 'bishops', 'inspector', 'timbers', 'pope', 'surprises', 'chunks', 'Makes', 'predecessor', 'subsided', 'midday', 'carriages', 'mischief', 'wrap', 'bedside', 'invaders', 'hopped', 'attachment', 'photo', 'expanse', 'chilly', 'hum', 'neutralism', 'peacefully', 'all-white', 'apples', 'pies', 'nuisance', 'walnuts', 'freeways', 'roasted', 'freeway', 'dormant', 'allocated', 'Thant', 'ripple', 'tilt', 'peninsula', 'primacy', 'signaling', 'avenues', 'self-contained', 'sauces', 'chili', 'popping', "Aren't", 'breathless', 'cartoons', 'forbids', 'sausages', 'Simple', 'unworthy', 'believers', 'unbreakable', 'hibachi', 'suffers', 'begotten', 'Istanbul', 'correspondents', 'hammock', 'towers', 'pillars', 'obelisk', 'caressing', 'Hippodrome', 'plastered', 'bazaar', 'harshly', 'Turks', 'Bosphorus', 'disappearing', 'salvage', 'Swing', 'roadway', '1868', 'lodging', 'Frontier', 'sane', 'surveyor', 'boyhood', 'persuading', 'Horace', 'receivers', 'sonar', 'Nugent', 'Barnard', 'viable', 'Leader', 'murderers', 'computation', 'collects', 'invade', 'microscopic', 'laboratories', 'sidewalks', 'inconsistent', 'attrition', 'abiding', 'Algerian', 'unwelcome', 'athletes', 'connotation', 'burnt', 'easel', 'oyster', 'unwittingly', 'bosses', 'Area', 'valleys', 'portrays', 'calves', 'milligram', 'infections', 'totaling', 'Unity', 'eternity', 'Aureomycin', 'severity', 'scours', 'Simply', 'glamour', 'polarity', 'divisive', 'Eastwick', 'fuse', 'faction', 'esteem', 'faculties', '39', 'reckon', 'institute', 'shortcomings', 'disorganized', 'fertile', "host's", 'regulars', 'envied', 'washes', 'temporal', 'Faget', 'freer', 'penance', 'Sargent', 'animated', 'Shriver', 'latitude', 'stagnant', 'adherents', 'enjoined', "Berger's", 'zoning', 'tiger', 'trunks', 'pertaining', 'conditioners', 'dictated', 'colder', 'preposterous', 'Channing', "another's", 'screening', 'bankrupt', 'carving', 'Grafton', 'haven', 'retreating', 'Philippi', 'clash', 'refreshed', 'reluctance', 'regulatory', 'laissez-faire', "Hammarskjold's", 'shabby', 'homogeneity', 'uncompromising', 'novelty', 'sting', 'provisional', 'fluent', 'haul', 'fission', 'captive', 'inhibited', 'mistress', 'sprouting', 'dismay', 'genetic', 'commendable', '$5000', 'checking', 'affinity', 'encroachment', 'caves', 'would-be', 'apprehensions', 'Oslo', 'indelible', 'hazardous', 'posterity', 'accelerate', 'unequivocally', 'consuming', 'Ephesians', '2-year-old', 'beauties', 'substitutes', 'Clean', 'uncertainties', 'Leg', 'glances', 'affords', 'defines', 'wry', 'fading', 'interfaith', 'affecting', 'occasioned', 'carts', 'authoritarian', 'gotta', 'barbell', 'Operations', 'ready-made', 'bravado', 'Courts', 'prominence', 'foresight', 'developer', 'Beth', 'astounding', 'brotherhood', 'instantaneous', 'encourages', 'confuse', 'depressing', 'dietary', 'Reply', 'symptom', 'perfected', 'baths', 'committing', 'corruptible', 'ideally', 'bordering', 'unscrupulous', 'stealth', 'Nuclear', 'vibration', 'enrich', 'vitamin', 'Welcome', 'appreciable', 'Nehru', 'provocation', 'nutrients', 'Noting', 'Secretariat', 'refrigerated', 'pulp', 'avocados', 'buds', 'voiced', "Lincoln's", 'justifiably', 'amen', 'Words', 'warts', 'Veterans', 'denounce', 'certify', 'terrific', 'diplomats', 'cypress', 'swamp', 'professed', 'agenda', 'professions', 'stumps', 'equity', 'contingencies', 'Princess', 'Discussion', 'minimized', 'Realtors', 'respectability', 'stricken', 'Lower', 'oneself', 'ambivalence', "anyone's", 'self-evident', 'post-attack', 'lessen', 'clothed', 'cafes', 'incur', 'External', 'discouraging', 'counterpoint', 'worlds', 'arithmetic', 'SEATO', 'impractical', 'initiate', 'exasperation', 'sympathetically', 'inaugural', 'Ceylon', 'Top', 'doctrines', 'suffocating', 'pro-Communist', 'Cairo', 'insuring', 'prevalent', 'expelled', 'repelled', 'neutralized', 'fearless', 'risks', 'comply', 'greedy', 'Bertha', 'proprietors', 'imputed', 'Walt', 'inaccurate', 'lowering', 'generalize', 'escalation', 'threatens', 'equated', 'obsolete', 'Rich', 'Martha', 'Faber', 'revered', 'do-it-yourself', "Stalin's", 'fine-looking', 'psalmist', 'warlike', 'thrived', 'Caper', 'enlist', 'Fury', 'Dale', 'wailing', 'axle', 'irregularly', 'Carvey', 'thence', 'organizational', 'boring', 'Communese', 'multiplication', "'round", 'triangular', 'excerpts', 'accessible', 'Cabinet', 'unfavorable', 'baffled', 'Demon', '6,000', 'scabbard', '30%', 'misdeeds', 'medicines', 'Watch', 'hamper', "Richard's", 'orchards', 'duplicate', 'creaking', 'adapt', 'Colt', 'hunters', 'ingenuity', 'Rouge', 'incoming', 'brakes', 'single-shot', 'cartridges', 'recoil', 'Magnums', 'edible', 'Remington', 'scales', 'Baton', 'buck', 'flows', 'mania', 'traits', 'imitate', 'underside', 'slots', 'Miss.', 'congratulations', 'harmonious', 'Aids', 'tendencies', 'reverence', 'monumental', 'Dog', 'proving', 'vanish', 'Divinity', 'faithfully', 'archaic', 'Directions', 'contestants', 'activated', 'uttered', 'bishop', 'ascribed', 'embark', 'Curzon', 'elongated', 'Wine', 'pumps', 'Universal', 'sandals', 'priceless', 'padded', 'dip', 'Judging', 'kitten', 'Number', 'despotism', 'spacers', 'drilled', 'left-hand', 'intercontinental', "12''", 'injecting', 'modifications', 'widths', 'succumbed', '1821', 'Evening', 'Classical', 'dynasty', 'glitter', 'pets', "water's", 'headache', 'divergent', 'wherein', 'horsepower', 'paved', 'fullest', 'Train', 'fills', 'arbitrarily', 'woodwork', 'craftsmanship', 'symbolically', 'misgivings', 'confronts', 'wits', 'journals', 'poker', 'Tobacco', 'injunctions', 'necessitate', 'sickness', 'coolness', 'Fran', 'interfered', 'generates', 'academically', 'President-elect', 'ancestral', 'beards', 'exaggeration', "People's", 'chaotic', 'directs', 'seize', 'transports', 'Inauguration', 'Scientific', 'Educational', 'spans', 'siege', 'blackened', 'Election', 'shutters', 'thumping', 'rattle', 'mechanic', 'proverb', 'capsule', 'Koreans', 'supremacy', 'ax', 'linking', 'chopping', 'cropped', 'surround', 'tenant', 'shrine', 'debated', 'Ideas', 'appointees', 'apologized', 'Minutemen', 'furious', 'glycerine', 'rig', 'humane', 'Folklore', 'Pp.', 'one-tenth', 'chilling', 'allowable', 'dispose', 'inscrutable', 'liquidated', '1776', 'steeped', 'monopolies', 'decisively', 'backwoods', 'machinist', 'reputed', 'shudder', 'acutely', 'Coe', 'analysts', 'hasty', 'appestat', 'obsession', 'whichever', 'rationalize', 'Assemblies', 'awed', 'ass', 'stereotyped', 'insecurity', 'Parsons', 'shaky', 'bogey', 'Rebs', 'stationed', 'Yank', 'two-year', 'figurative', 'thicker', 'unmistakably', '1844', 'Lucian', 'butts', 'aggressiveness', 'Alan', 'immoral', 'Cicero', 'lethal', 'Rule', 'abreast', 'oppressed', 'graphic', 'battlefield', 'hurling', 'lukewarm', 'leaked', 'smelling', 'troopers', 'Custer', 'swivel', 'recipients', 'phalanx', 'verbally', 'obsessed', 'anti-party', 'lingering', 'Medicine', 'clarification', 'editing', 'catastrophic', 'proliferation', 'buckle', 'midway', "O'Connor", '1909', 'waiters', 'bulwark', 'factions', 'punch', 'Penn', 'plagued', 'age-old', 'denoting', 'OK', 'Join', 'buzz', 'skillfully', 'Corinthian', 'aft', 'Columns', 'poking', 'Americana', 'Texan', 'Hanch', 'escaping', 'Off', 'O.E.C.D.', 'Regional', 'Brevard', 'crippled', 'Concerts', 'Palm', 'welcoming', 'Hail', 'tag', 'self-discipline', 'groping', 'liaison', 'completes', 'Hartman', 'Standing', 'uneasiness', 'overweight', 'commentary', 'gaily', 'Philosophy', 'espionage', 'triple', 'runners', 'soloists', 'Mail', 'indoors', '16th', 'tuition', 'harmless', 'harassed', 'unfriendly', 'upper-middle-class', 'fins', 'Staten', 'frigid', 'motifs', 'outreach', 'exploring', 'briskly', 'cart', 'austere', 'advantageous', 'registers', 'self-examination', 'Historically', 'Northerners', 'Breeding', "Nation's", 'Portago', 'constituent', 'salutary', 'minorities', 'formulae', 'Oriole', 'sacrifices', 'hosses', 'peanut', 'Fisher', 'achieves', 'stony', 'fibrous', 'locust', 'nourished', 'sparks', 'lieu', 'hiring', 'soybeans', 'consolidated', 'tenacity', 'intangible', 'hardships', 'rhetoric', 'definitive', 'Fleet', "Morse's", 'inquest', 'unloaded', 'corral', "givin'", 'resorted', 'trolley', 'limb', 'shone', 'Europeans', 'kidneys', 'exchanges', 'aimless', 'inn', 'Likewise', 'tariff', 'tuberculosis', 'scant', 'southpaw', 'intimated', 'Lillian', 'ducked', 'mystical', 'Ideally', 'stamping', 'Heywood', 'adversary', 'thickly', 'fetch', 'existent', 'insignificant', 'Loveless', 'second-rate', 'sway', 'hoss', 'transported', 'vices', 'chemically', 'Caution', 'cross-section', 'Barth', 'ventured', 'bombing', 'psychiatric', 'frontiers', 'linguistics', 'firemen', 'unmarried', 'perverse', 'programming', 'cute', 'nephews', 'seating', 'contemplated', 'extensions', 'liberation', 'Luther', 'lofty', 'layout', 'shrink', 'handwriting', 'bricks', 'high-priced', 'coolly', 'unloading', 'Mainland', 'unitized', 'Lumber', 'budgets', 'psychiatrists', 'psychotherapy', 'flags', 'overthrow', 'conferred', 'perpetuate', 'mortgages', '29th', 'Greeks', "sheriff's", 'wakeful', 'steal', 'richly', 'high-school', 'rape', 'dismiss', 'beacon', 'Beardens', 'drab', 'silenced', 'Seaman', 'paradise', 'Vesole', 'raids', 'Crosby', 'distracted', 'vile', 'poisonous', 'all-important', 'stew', 'menu', 'stressing', 'richest', 'cherish', 'Daer', 'librarians', 'migration', 'undeniable', 'peeled', 'undertaking', 'strengthens', 'UPI', 'alleviate', 'checkbook', 'removes', 'relying', 'vending', 'uninterrupted', 'shortsighted', 'refuses', 'pedestrian', 'extant', 'demonstrating', 'politeness', 'beverage', 'qualitative', 'Technical', 'extravagant', 'enhance', 'Increased', 'Reynolds', 'Capital', 'clusters', 'primeval', 'isolating', "Jefferson's", 'comforts', '260', 'entrenched', 'entail', 'opaque', '1965', 'Full', 'shielded', 'stays', 'polyester', 'periodicals', 'chipping', 'saves', 'bookkeeping', 'Schools', 're-enter', 'Story', 'fireworks', 'bugs', 'gallon', 'shocks', 'richer', 'Oakwood', 'evaluations', 'fuller', 'badge', 'Formosa', 'turbine', 'fights', 'needy', 'tenth', 'escorted', 'orator', 'abstention', 'scan', 'brother-in-law', 'ripples', 'greeting', 'hail', 'nationwide', 'locality', 'weeds', 'Rotary', 'week-end', 'intrinsic', 'Cod', 'Z', 'empirically', 'inquiring', 'Conversely', 'evacuation', 'witch', 'experimenter', 'glimpsed', 'sly', 'disrupt', 'forecasts', 'appliance', 'Susie', 'suck', 'prosecuted', 'Stephens', 'prescription', 'twirling', 'disinterested', 'corpses', 'legally', 'Cerv', '160', 'Kingston', 'crystalline', 'Forks', 'awe', 'sworn', 'patched', 'spurred', "ship's", 'Pole', '43', 'fund-raising', 'flourish', 'landscapes', 'corpus', 'clenched', 'ballplayer', 'murky', 'volunteered', 'Arabic', 'materially', 'vowed', 'Final', 'Czechoslovakia', 'Circuit', 'greetings', 'recollection', 'catharsis', 'chloride', 'Ernst', '2000', 'siding', 'licenses', 'motivations', 'Kasavubu', 'Patrice', 'Find', 'fragmentation', 'serenity', 'hazy', 'Mayer', 'meadows', 'one-man', 'engulfed', '23d', 'safer', 'manifestly', 'Waco', 'intimidation', 'livelihood', 'Englander', 'Barre', 'owes', 'misuse', 'labored', 'northward', 'variously', 'Finding', 'adhered', 'Inter-American', 'forestall', 'needless', 'Reama', 'geography', 'disintegration', 'classrooms', 'evokes', 'evasive', 'lied', 'Liberals', 'decorated', 'designate', 'gamblers', 'gushed', 'Farouk', 'Egyptian', 'quarreling', 'innumerable', 'unrest', 'Private', 'ironing', 'Mutual', 'Insurance', 'bump', 'censorship', 'Devil', 'compassion', 'facilitate', 'anterior', 'physicians', 'caresses', 'fearing', '1845']
```python
print(get_embedding('weather', w2vmodel))
```
[-0.06657724 0.23923697 -0.22193056 -0.05053873 -0.15198396 -0.36812955
0.31793493 0.08513108 -0.22167377 -0.09185731 -0.07330202 -0.15558665
0.29993063 0.01376305 -0.2741381 0.11167396 -0.275254 0.20519948
-0.32458746 -0.5467078 0.17347142 0.41114295 0.50191754 -0.13568546
-0.02426466 0.0544681 0.15221758 0.21194571 -0.32719845 0.13250726
-0.31539917 -0.2656799 0.1510801 -0.13705443 0.0154411 -0.24956354
0.353087 0.13721961 0.03000911 -0.08374713 0.07330421 0.13896415
-0.20446609 0.17883033 0.4363915 0.37328458 0.12124036 -0.21962334
0.3051153 0.03175303]
## Section 2.2: Visualizing Word Embedding
We can now obtain the word embeddings for any word in the dictionary using word2vec. Let's visualize these embeddings to get an inuition of what these embeddings mean. The word embeddings obtained from word2vec model are in high dimensional space. We will use `tSNE` (t-distributed stochastic neighbor embedding), a statistical method for dimensionality deduction that allow us to visualize high-dimensional data in a 2D or 3D space. Here, we will use `tSNE` from [`scikit-learn`] module(https://scikit-learn.org/stable/modules/generated/sklearn.manifold.TSNE.html) (if you are not familiar with this method, think about `PCA`) to project our high dimensional embeddings in the 2D space.
For each word in `keys`, we pick the top 10 similar words (using cosine similarity) and plot them.
What should be the arrangement of similar words?
What should be arrangement of the key clusters with respect to each other?
```python
keys = ['voters', 'magic', 'love', 'God', 'evidence', 'administration', 'governments']
```
```python
def get_cluster_embeddings(keys):
embedding_clusters = []
word_clusters = []
# find closest words and add them to cluster
for word in keys:
embeddings = []
words = []
if not word in w2vmodel.wv.key_to_index:
print('The word ', word, 'is not in the dictionary')
continue
for similar_word, _ in w2vmodel.wv.most_similar(word, topn=10):
words.append(similar_word)
embeddings.append(w2vmodel.wv[similar_word])
embedding_clusters.append(embeddings)
word_clusters.append(words)
# get embeddings for the words in clusers
embedding_clusters = np.array(embedding_clusters)
n, m, k = embedding_clusters.shape
tsne_model_en_2d = TSNE(perplexity=10, n_components=2, init='pca', n_iter=3500, random_state=32)
embeddings_en_2d = np.array(tsne_model_en_2d.fit_transform(embedding_clusters.reshape(n * m, k))).reshape(n, m, 2)
return embeddings_en_2d, word_clusters
```
```python
def tsne_plot_similar_words(title, labels, embedding_clusters,
word_clusters, a, filename=None):
plt.figure(figsize=(16, 9))
colors = cm.rainbow(np.linspace(0, 1, len(labels)))
for label, embeddings, words, color in zip(labels, embedding_clusters, word_clusters, colors):
x = embeddings[:, 0]
y = embeddings[:, 1]
plt.scatter(x, y, color=color, alpha=a, label=label)
for i, word in enumerate(words):
plt.annotate(word,
alpha=0.5,
xy=(x[i], y[i]),
xytext=(5, 2),
textcoords='offset points',
ha='right',
va='bottom',
size=10)
plt.legend(loc="lower left")
plt.title(title)
plt.grid(True)
if filename:
plt.savefig(filename, format='png', dpi=150, bbox_inches='tight')
plt.show()
```
```python
embeddings_en_2d, word_clusters = get_cluster_embeddings(keys)
tsne_plot_similar_words('Similar words from Brown Corpus', keys, embeddings_en_2d, word_clusters, 0.7)
```
## Section 2.3: Exploring meaning with word embeddings
While word2vec was the method that started it all, research has since boomed, and we now have more sophisticated ways to represent words. One such method is FastText, developed at Facebook AI research, which breaks words into sub-words: such a technique also allows us to create embedding representations for unseen words. In this section, we will explore how semantics and meaning are captured using embedidngs, after downloading a pre-trained FastText model. Downloading pre-trained models is a way for us to plug in word embeddings and explore them without training them ourselves.
```python
# @title Download FastText English Embeddings of dimension 100
import os, io, zipfile
from urllib.request import urlopen
zipurl = 'https://osf.io/w9sr7/download'
print(f"Downloading and unzipping the file... Please wait.")
with urlopen(zipurl) as zipresp:
with zipfile.ZipFile(io.BytesIO(zipresp.read())) as zfile:
zfile.extractall('.')
print("Download completed!")
```
Downloading and unzipping the file... Please wait.
Download completed!
```python
# Load 100 dimension FastText Vectors using FastText library
ft_en_vectors = fasttext.load_model('cc.en.100.bin')
```
Warning : `load_model` does not return WordVectorModel or SupervisedModel any more, but a `FastText` object which is very similar.
```python
print(f"Length of the embedding is: {len(ft_en_vectors.get_word_vector('king'))}")
print(f"Embedding for the word King is: {ft_en_vectors.get_word_vector('king')}")
```
Length of the embedding is: 100
Embedding for the word King is: [-0.04045481 -0.10617249 -0.27222311 0.06879666 0.16408321 0.00276707
0.27080125 -0.05805573 -0.31865698 0.03748008 -0.00254088 0.13805169
-0.00182498 -0.08973497 0.00319015 -0.19619396 -0.09858181 -0.10103802
-0.08279888 0.0082208 0.13119364 -0.15956607 0.17203182 0.0315701
-0.25064597 0.06182072 0.03929246 0.05157393 0.03543638 0.13660161
0.05473648 0.06072914 -0.04709269 0.17394426 -0.02101276 -0.11402624
-0.24489872 -0.08576579 -0.00322696 -0.04509873 -0.00614253 -0.05772085
-0.073414 -0.06718913 -0.06057961 0.10963406 0.1245006 -0.04819863
0.11408057 0.11081408 0.06752145 -0.01689911 -0.01186301 -0.11716368
-0.01287614 0.10639337 -0.04243141 0.01057278 -0.0230855 -0.04930984
0.04717607 0.03696446 0.0015999 -0.02193867 -0.01331578 0.11102925
0.1686794 0.05814958 -0.00296521 -0.04252011 -0.00352389 0.06267346
-0.07747819 -0.08959802 -0.02445797 -0.08913022 0.13422231 0.1258949
-0.01296814 0.0531218 -0.00541025 -0.16908626 0.06323182 -0.11510128
-0.08352032 -0.07224389 0.01023453 0.08263734 -0.03859017 -0.00798539
-0.01498295 0.05448429 0.02708506 0.00549948 0.14634523 -0.12550676
0.04641578 -0.10164826 0.05370862 0.01217492]
Cosine similarity is used for similarities between words. Similarity is a scalar between 0 and 1.
Now find the 10 most similar words to "King"
```python
ft_en_vectors.get_nearest_neighbors("king", 10) # Most similar by key
```
[(0.8168574571609497, 'prince'),
(0.796097457408905, 'emperor'),
(0.7907207608222961, 'kings'),
(0.7655220627784729, 'lord'),
(0.7435404062271118, 'king-'),
(0.7394551634788513, 'chieftain'),
(0.7307553291320801, 'tyrant'),
(0.7226710319519043, 'conqueror'),
(0.719561755657196, 'kingly'),
(0.718187689781189, 'queen')]
### Word Similarity
```python
# @title Video 3: Semantic Measurements
from ipywidgets import widgets
out2 = widgets.Output()
with out2:
from IPython.display import IFrame
class BiliVideo(IFrame):
def __init__(self, id, page=1, width=400, height=300, **kwargs):
self.id=id
src = "https://player.bilibili.com/player.html?bvid={0}&page={1}".format(id, page)
super(BiliVideo, self).__init__(src, width, height, **kwargs)
video = BiliVideo(id=f"BV15w411R7SW", width=854, height=480, fs=1)
print("Video available at https://www.bilibili.com/video/{0}".format(video.id))
display(video)
out1 = widgets.Output()
with out1:
from IPython.display import YouTubeVideo
video = YouTubeVideo(id=f"Y45KIAOw4OY", width=854, height=480, fs=1, rel=0)
print("Video available at https://youtube.com/watch?v=" + video.id)
display(video)
out = widgets.Tab([out1, out2])
out.set_title(0, 'Youtube')
out.set_title(1, 'Bilibili')
# add event to airtable
atform.add_event('Video 3: Semantic Measurements')
display(out)
```
Tab(children=(Output(), Output()), _titles={'0': 'Youtube', '1': 'Bilibili'})
More on similarity between words. Let's check how similar different pairs of word are. Feel free to play around.
```python
def getSimilarity(word1, word2):
v1 = ft_en_vectors.get_word_vector(word1)
v2 = ft_en_vectors.get_word_vector(word2)
return cosine_similarity(v1, v2)
print("Similarity between the words King and Queen: ", getSimilarity("king", "queen"))
print("Similarity between the words King and Knight: ", getSimilarity("king", "knight"))
print("Similarity between the words King and Rock: ", getSimilarity("king", "rock"))
print("Similarity between the words King and Twenty: ", getSimilarity("king", "twenty"))
## Try the same for two more pairs
# print("Similarity between the words ___ and ___: ", getSimilarity(...))
# print("Similarity between the words ___ and ___: ", getSimilarity(...))
# print("Similarity between the words ___ and ___: ", getSimilarity(...))
# print("Similarity between the words ___ and ___: ", getSimilarity(...))
```
Similarity between the words King and Queen: 0.71818775
Similarity between the words King and Knight: 0.6881009
Similarity between the words King and Rock: 0.28928384
Similarity between the words King and Twenty: 0.19655468
### Homonym Words$^\dagger$
Find the similarity for homonym words with their different meanings. The first one has been implemented for you.
$^\dagger$: Two or more words having the same spelling or pronunciation but different meanings and origins are called *homonyms*. E.g.,
```python
####################### Words with multiple meanings ##########################
print("Similarity between the words Cricket and Insect: ", getSimilarity("cricket", "insect"))
print("Similarity between the words Cricket and Sport: ", getSimilarity("cricket", "sport"))
## Try the same for two more pairs
print("Similarity between the words deep and creep: ", getSimilarity("deep", "creep"))
print("Similarity between the words deep and linear: ", getSimilarity("deep", "regression"))
print("Similarity between the words buzzard and deep: ", getSimilarity("buzzard", "yard"))
print("Similarity between the words buzzard and linear: ", getSimilarity("buzzard", "dee"))
```
Similarity between the words Cricket and Insect: 0.40722153
Similarity between the words Cricket and Sport: 0.58123744
Similarity between the words deep and creep: 0.4388425
Similarity between the words deep and linear: 0.23548378
Similarity between the words buzzard and deep: 0.4081001
Similarity between the words buzzard and linear: 0.36218688
### Word Analogies
Embeddings can be used to find word analogies.
Let's try it:
1. Man : Woman :: King : _____
2. Germany: Berlin :: France : ______
3. Leaf : Tree :: Petal : _____
```python
## Use get_analogies() funnction. The words have to be in the order Positive, negative, Positve
# Man : Woman :: King : _____
# Positive=(woman, king), Negative=(man)
print(ft_en_vectors.get_analogies("woman", "man", "king",1))
# Germany: Berlin :: France : ______
# Positive=(berlin, frannce), Negative=(germany)
print(ft_en_vectors.get_analogies("berlin", "germany", "france",1))
# Leaf : Tree :: Petal : _____
# Positive=(tree, petal), Negative=(leaf)
print(ft_en_vectors.get_analogies("tree", "leaf", "petal",1))
# Hammer : Nail :: Comb : _____
# Positive=(nail, comb), Negative=(hammer)
print(ft_en_vectors.get_analogies("nail", "hammer", "comb",1))
```
[(0.8162637948989868, 'queen')]
[(0.8568049669265747, 'paris')]
[(0.7037209272384644, 'flower')]
[(0.6908746361732483, 'hair')]
But, does it always work?
1. Poverty : Wealth :: Sickness : _____
2. train : board :: horse : _____
```python
# Poverty : Wealth :: Sickness : _____
print(ft_en_vectors.get_analogies("wealth", "poverty", "sickness",1))
# train : board :: horse : _____
print(ft_en_vectors.get_analogies("board", "train", "horse",1))
```
[(0.615874171257019, 'affliction')]
[(0.5437814593315125, 'bull')]
---
# Section 3: Neural Net with word embeddings
*Time estimate: ~16mins*
Let's use the pretrained FastText embeddings to train a neural network on the IMDB dataset.
To recap, the data consists of reviews and sentiments attached to it. It is a binary classification task. As a simple preview of the upcoming neural networks, we are going to introduce neural net with word embeddings. We'll see detailed networks in the next tutorial.
## Coding Exercise 3.1: Simple Feed Forward Net
This will load 300 dim FastText embeddings. It will take around 2-3 minutes.
Define a vanilla neural network with linear layers. Then average the word embeddings to get an embedding for the entire review.
The neural net will have one hidden layer of size 128.
```python
# @title Download embeddings and clear old variables to clean memory.
# @markdown #### Execute this cell!
if 'ft_en_vectors' in locals():
del ft_en_vectors
if 'w2vmodel' in locals():
del w2vmodel
embedding_fasttext = FastText('simple')
```
.vector_cache/wiki.simple.vec: 293MB [00:16, 17.9MB/s]
0%| | 0/111051 [00:00<?, ?it/s]Skipping token b'111051' with 1-dimensional vector [b'300']; likely a header
100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 111051/111051 [00:06<00:00, 17521.28it/s]
```python
# @markdown Load the Dataset
TEXT, vocab_size, train_iter, valid_iter, test_iter = load_dataset(embedding_fasttext, seed=SEED)
```
downloading aclImdb_v1.tar.gz
aclImdb_v1.tar.gz: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 84.1M/84.1M [00:04<00:00, 17.4MB/s]
Data are loaded. sentence length: 50 seed: 2021
```python
class NeuralNet(nn.Module):
def __init__(self, output_size, hidden_size, vocab_size, embedding_length,
word_embeddings):
super(NeuralNet, self).__init__()
self.word_embeddings = nn.Embedding(vocab_size, embedding_length)
self.word_embeddings.weight = nn.Parameter(word_embeddings,
requires_grad=False)
self.fc1 = nn.Linear(embedding_length, hidden_size)
self.fc2 = nn.Linear(hidden_size, output_size)
def forward(self, inputs):
input = self.word_embeddings(inputs) # convert text to embeddings
####################################################################
# Fill in missing code below (...)
#raise NotImplementedError("Fill in the Neural Net")
####################################################################
# Average the word embeddings in a sentence
# Use torch.nn.functional.avg_pool2d to compute the averages
pooled = torch.nn.functional.avg_pool2d(input, kernel_size=(input.shape[1], 1)).squeeze(1)
# Pass the embeddings through the neural net
# A fully-connected layer
x = self.fc1(pooled)
# ReLU activation
x = torch.nn.functional.relu(x)
# Another fully-connected layer
x = self.fc2(x)
output = F.log_softmax(x, dim=1)
return output
# add event to airtable
atform.add_event('Coding Exercise 3.1: Simple Feed Forward Net')
# Uncomment to check your code
nn_model = NeuralNet(2, 128, 100, 300, TEXT.vocab.vectors)
print(nn_model)
```
NeuralNet(
(word_embeddings): Embedding(100, 300)
(fc1): Linear(in_features=300, out_features=128, bias=True)
(fc2): Linear(in_features=128, out_features=2, bias=True)
)
[*Click for solution*](https://github.com/NeuromatchAcademy/course-content-dl/tree/main//tutorials/W2D3_ModernRecurrentNeuralNetworks/solutions/W2D3_Tutorial1_Solution_6b55212b.py)
```
NeuralNet(
(word_embeddings): Embedding(100, 300)
(fc1): Linear(in_features=300, out_features=128, bias=True)
(fc2): Linear(in_features=128, out_features=2, bias=True)
)
```
```python
# @title Training and Testing Functions
# @markdown #### `train(model, device, train_iter, valid_iter, epochs, learning_rate)`
# @markdown #### `test(model, device, test_iter)`
def train(model, device, train_iter, valid_iter, epochs, learning_rate):
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
train_loss, validation_loss = [], []
train_acc, validation_acc = [], []
for epoch in range(epochs):
# train
model.train()
running_loss = 0.
correct, total = 0, 0
steps = 0
for idx, batch in enumerate(train_iter):
text = batch.text[0]
target = batch.label
target = torch.autograd.Variable(target).long()
text, target = text.to(device), target.to(device)
# add micro for coding training loop
optimizer.zero_grad()
output = model(text)
loss = criterion(output, target)
loss.backward()
optimizer.step()
steps += 1
running_loss += loss.item()
# get accuracy
_, predicted = torch.max(output, 1)
total += target.size(0)
correct += (predicted == target).sum().item()
train_loss.append(running_loss/len(train_iter))
train_acc.append(correct/total)
print(f'Epoch: {epoch + 1}, '
f'Training Loss: {running_loss/len(train_iter):.4f}, '
f'Training Accuracy: {100*correct/total: .2f}%')
# evaluate on validation data
model.eval()
running_loss = 0.
correct, total = 0, 0
with torch.no_grad():
for idx, batch in enumerate(valid_iter):
text = batch.text[0]
target = batch.label
target = torch.autograd.Variable(target).long()
text, target = text.to(device), target.to(device)
optimizer.zero_grad()
output = model(text)
loss = criterion(output, target)
running_loss += loss.item()
# get accuracy
_, predicted = torch.max(output, 1)
total += target.size(0)
correct += (predicted == target).sum().item()
validation_loss.append(running_loss/len(valid_iter))
validation_acc.append(correct/total)
print (f'Validation Loss: {running_loss/len(valid_iter):.4f}, '
f'Validation Accuracy: {100*correct/total: .2f}%')
return train_loss, train_acc, validation_loss, validation_acc
def test(model, device, test_iter):
model.eval()
correct = 0
total = 0
with torch.no_grad():
for idx, batch in enumerate(test_iter):
text = batch.text[0]
target = batch.label
target = torch.autograd.Variable(target).long()
text, target = text.to(device), target.to(device)
outputs = model(text)
_, predicted = torch.max(outputs, 1)
total += target.size(0)
correct += (predicted == target).sum().item()
acc = 100 * correct / total
return acc
```
```python
# Model hyperparameters
learning_rate = 0.0003
output_size = 2
hidden_size = 128
embedding_length = 300
epochs = 15
word_embeddings = TEXT.vocab.vectors
vocab_size = len(TEXT.vocab)
# Model set-up
nn_model = NeuralNet(output_size,
hidden_size,
vocab_size,
embedding_length,
word_embeddings)
nn_model.to(DEVICE)
nn_start_time = time.time()
set_seed(522)
nn_train_loss, nn_train_acc, nn_validation_loss, nn_validation_acc = train(nn_model,
DEVICE,
train_iter,
valid_iter,
epochs,
learning_rate)
print(f"--- Time taken to train = {(time.time() - nn_start_time)} seconds ---")
test_accuracy = test(nn_model, DEVICE, test_iter)
print(f'\n\nTest Accuracy: {test_accuracy}%')
```
Random seed 522 has been set.
Epoch: 1, Training Loss: 0.6641, Training Accuracy: 60.79%
Validation Loss: 0.6393, Validation Accuracy: 64.45%
Epoch: 2, Training Loss: 0.6235, Training Accuracy: 66.10%
Validation Loss: 0.6082, Validation Accuracy: 67.89%
Epoch: 3, Training Loss: 0.6004, Training Accuracy: 68.30%
Validation Loss: 0.6025, Validation Accuracy: 67.81%
Epoch: 4, Training Loss: 0.5886, Training Accuracy: 68.96%
Validation Loss: 0.5989, Validation Accuracy: 67.93%
Epoch: 5, Training Loss: 0.5837, Training Accuracy: 69.45%
Validation Loss: 0.5825, Validation Accuracy: 69.40%
Epoch: 6, Training Loss: 0.5805, Training Accuracy: 69.49%
Validation Loss: 0.5844, Validation Accuracy: 68.80%
Epoch: 7, Training Loss: 0.5771, Training Accuracy: 69.87%
Validation Loss: 0.5791, Validation Accuracy: 69.80%
Epoch: 8, Training Loss: 0.5752, Training Accuracy: 70.07%
Validation Loss: 0.5786, Validation Accuracy: 70.09%
Epoch: 9, Training Loss: 0.5729, Training Accuracy: 70.09%
Validation Loss: 0.5794, Validation Accuracy: 69.43%
Epoch: 10, Training Loss: 0.5703, Training Accuracy: 70.07%
Validation Loss: 0.5775, Validation Accuracy: 70.09%
Epoch: 11, Training Loss: 0.5696, Training Accuracy: 70.65%
Validation Loss: 0.5756, Validation Accuracy: 70.37%
Epoch: 12, Training Loss: 0.5680, Training Accuracy: 70.49%
Validation Loss: 0.5777, Validation Accuracy: 69.15%
Epoch: 13, Training Loss: 0.5662, Training Accuracy: 70.72%
Validation Loss: 0.5741, Validation Accuracy: 70.27%
Epoch: 14, Training Loss: 0.5653, Training Accuracy: 70.27%
Validation Loss: 0.5749, Validation Accuracy: 70.48%
Epoch: 15, Training Loss: 0.5630, Training Accuracy: 70.59%
Validation Loss: 0.5711, Validation Accuracy: 70.03%
--- Time taken to train = 11.02334189414978 seconds ---
Test Accuracy: 69.912%
```python
# Plot accuracy curves
plt.figure()
plt.subplot(211)
plot_train_val(np.arange(0, epochs), nn_train_acc, nn_validation_acc,
'train accuracy', 'val accuracy',
'Neural Net on IMDB text classification', 'accuracy',
color='C0')
plt.legend(loc='upper left')
plt.subplot(212)
plot_train_val(np.arange(0, epochs), nn_train_loss,
nn_validation_loss,
'train loss', 'val loss',
'',
'loss [a.u.]',
color='C0')
plt.legend(loc='upper left')
plt.show()
```
---
# Summary
In this tutorial, we explored two different concepts linked to sequences, and text in particular, that will be the conceptual foundation for Recurrent Neural Networks.
The first concept was that of sequences and probabilities. We saw how we can model language as sequences of text, and use this analogy to generate text. Such a setup is also used to classify text or identify parts of speech. We can either build chains manually using simple python and numerical computation, or use a package such as ```hmmlearn``` that allows us to train models a lot easier. These notions of sequences and probabilities (i.e, creating language models!) are key to the internals of a recurrent neural network as well.
The second concept is that of word embeddings, now a mainstay of natural language processing. By using a neural network to predict context of words, these neural networks learn internal representions of words that are a decent approximation of semantic meaning (i.e embeddings!). We saw how these embeddings can be visualised, as well as how they capture meaning. We finally saw how they can be integrated into neural networks to better classify text documents.
```python
# @title Airtable Submission Link
from IPython import display as IPydisplay
IPydisplay.HTML(
f"""
<div>
<a href= "{atform.url()}" target="_blank">
</a>
</div>""" )
```
<div>
<a href= "https://portal.neuromatchacademy.org/api/redirect/to/9c55f6cb-cdf9-4429-ac1c-ec44fe64c303?data=eyJmb3JtX2lkIjogImFwcG43VmRQUnNlU29NWEVHIiwgInRhYmxlX25hbWUiOiAiVzJEM19UMSIsICJhbnN3ZXJzIjoge30sICJldmVudHMiOiBbeyJldmVudCI6ICJpbml0IiwgInRzIjogMTYyODcwMzU0OC42Njg3MTAyfSwgeyJldmVudCI6ICJWaWRlbyAxOiBTZXF1ZW5jZXMgJiBNYXJrb3YgUHJvY2Vzc2VzIiwgInRzIjogMTYyODcwMzU1MS45OTc4MzkyfSwgeyJldmVudCI6ICJWaWRlbyAyOiBUZXh0dWFsIERpbWVuc2lvbiBSZWR1Y3Rpb24iLCAidHMiOiAxNjI4NzA5NDAzLjE1NTEwNH0sIHsiZXZlbnQiOiAiVmlkZW8gMzogU2VtYW50aWMgTWVhc3VyZW1lbnRzIiwgInRzIjogMTYyODcxMTIxNy4wODUzODk2fSwgeyJldmVudCI6ICJDb2RpbmcgRXhlcmNpc2UgMy4xOiBTaW1wbGUgRmVlZCBGb3J3YXJkIE5ldCIsICJ0cyI6IDE2Mjg3MTMyMjkuOTI1MjA0OH0sIHsiZXZlbnQiOiAiQ29kaW5nIEV4ZXJjaXNlIDMuMTogU2ltcGxlIEZlZWQgRm9yd2FyZCBOZXQiLCAidHMiOiAxNjI4NzEzNDI4LjMxMzg4N30sIHsiZXZlbnQiOiAiQ29kaW5nIEV4ZXJjaXNlIDMuMTogU2ltcGxlIEZlZWQgRm9yd2FyZCBOZXQiLCAidHMiOiAxNjI4NzEzNDgxLjExMjA3ODR9LCB7ImV2ZW50IjogIkNvZGluZyBFeGVyY2lzZSAzLjE6IFNpbXBsZSBGZWVkIEZvcndhcmQgTmV0IiwgInRzIjogMTYyODcxMzUyNi4wMDY1NDJ9LCB7ImV2ZW50IjogIkNvZGluZyBFeGVyY2lzZSAzLjE6IFNpbXBsZSBGZWVkIEZvcndhcmQgTmV0IiwgInRzIjogMTYyODcxMzU0NC4xODAwMTYzfSwgeyJldmVudCI6ICJDb2RpbmcgRXhlcmNpc2UgMy4xOiBTaW1wbGUgRmVlZCBGb3J3YXJkIE5ldCIsICJ0cyI6IDE2Mjg3MTM2MzAuMDY4Nzd9LCB7ImV2ZW50IjogInVybCBnZW5lcmF0ZWQiLCAidHMiOiAxNjI4NzEzOTc3LjAwNzQ1Nn1dfQ%3D%3D" target="_blank">
</a>
</div>
```python
```
|
# -*- coding: utf-8 -*-
"""
Image Cleanup Operations
========================
Functions to clean data for improved extraction.
author: Ed Beard
email: [email protected]
"""
import copy
import numpy as np
import warnings
from .ocr import read_label, read_diag_text
def find_repeating_unit(labels, diags, fig):
""" Identifies 'n' labels as repeating unit identifiers.
Removal only occurs when a label and diagram overlap
:param labels: List of Label objects
:param diags: List of Diagram objects
:param fig: Input Figure
:returns labels: List of cleaned label objects
:returns diags: List of diagram objects (flagged as repeating)
"""
ns = []
for diag in diags:
for cand in labels:
if diag.overlaps(cand):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
repeating_units = [token for sentence in read_label(fig, cand)[0].text for token in sentence.tokens if 'n' is token.text]
if repeating_units:
ns.append(cand)
diag.repeating = True
labels = [label for label in labels if label not in ns]
return labels, diags
def remove_diagram_numbers(diags, fig):
""" Removes vertex numbers from diagrams for cleaner OSRA resolution"""
num_bbox = []
for diag in diags:
diag_text = read_diag_text(fig, diag)
# Simplify into list comprehension when working...
for token in diag_text:
if token.text in '123456789':
print("Numeral successfully extracted %s" % token.text)
num_bbox.append((diag.left + token.left, diag.left + token.right,
diag.top + token.top, diag.top + token.bottom))
# Make a cleaned copy of image to be used when resolving diagrams
diag_fig = copy.deepcopy(fig)
for bbox in num_bbox:
diag_fig.img[bbox[2]:bbox[3], bbox[0]:bbox[1]] = np.ones(3)
return diag_fig
def clean_output(text):
""" Remove whitespace and newline characters from input text."""
# text = text.replace(' ', '')
return text.replace('\n', '')
|
import tactic
namespace paulo
class group (G : Type) extends has_mul G, has_one G, has_inv G :=
(mul_assoc : ∀ (a b c : G), a * b * c = a * (b * c))
(one_mul : ∀ (a : G), 1 * a = a)
(mul_left_inv : ∀ (a : G), a⁻¹ * a = 1)
variables {G: Type} [group G]
namespace group
def is_abelian (G : Type) [group G] :=
∀ (a b : G), a * b = b * a
structure subgroup (G : Type) [group G] :=
(carrier : set G)
(one_mem' : (1 : G) ∈ carrier)
(mul_mem' {x y} : x ∈ carrier → y ∈ carrier → x * y ∈ carrier)
(inv_mem' {x} : x ∈ carrier → x⁻¹ ∈ carrier)
variables (H : subgroup G)
instance : has_mem G (subgroup G) := ⟨λ m H, m ∈ H.carrier⟩
instance : has_coe (subgroup G) (set G) := ⟨λ H, H.carrier⟩
@[simp] lemma mem_carrier
{g : G}
: g ∈ H.carrier ↔ g ∈ H :=
by refl
theorem one_mem :
(1 : G) ∈ H :=
by apply H.one_mem'
/-- A subgroup is closed under multiplication. -/
theorem mul_mem
{x y : G}
: x ∈ H → y ∈ H → x * y ∈ H :=
by apply H.mul_mem'
/-- A subgroup is closed under inverse -/
theorem inv_mem
{x : G}
: x ∈ H → x⁻¹ ∈ H :=
by apply H.inv_mem'
@[simp] lemma mem_coe
{g : G}
: g ∈ (↑H : set G) ↔ g ∈ H :=
by refl
namespace subgroup
def is_normal_subgroup :=
∀ (g : G), ∀ h, h ∈ H → g*h*g⁻¹ ∈ H
lemma abelian_subgroups_normal
{G₁ : Type}
[group G₁]
(hyp : is_abelian G₁)
(H₁ : subgroup G₁)
: is_normal_subgroup H₁ :=
begin
intros g h h_in,
rwa [hyp, ← mul_assoc, mul_left_inv, one_mul]
end
end subgroup
end group
end paulo
|
module Functors.Fin where
open import Library
open import Categories.Sets
open import Categories.Setoids
open import Categories
open import Categories.Initial
open import Categories.CoProducts
open import Functors
open import Isomorphism
open import Functors.FullyFaithful
open Cat
open Fun
open Iso
Nats : Cat {lzero}{lzero}
Nats = record{
Obj = ℕ;
Hom = λ m n → Fin m → Fin n;
iden = id;
comp = λ f g → f ∘ g;
idl = refl;
idr = refl;
ass = refl}
-- initial object
initN : Init Nats zero
initN = record {
i = λ ();
law = ext λ ()}
-- coproducts
extend : ∀ {m n} -> Fin m -> Fin (m + n)
extend zero = zero
extend (suc i) = suc (extend i)
lift : ∀ m {n} -> Fin n -> Fin (m + n)
lift zero i = i
lift (suc m) i = suc (lift m i)
case : ∀ (m : ℕ){n : ℕ}{X : Set} →
(Fin m → X) → (Fin n → X) → Fin (m + n) → X
case zero f g i = g i
case (suc m) f g zero = f zero
case (suc m) f g (suc i) = case m (f ∘ suc) g i
lem1 : ∀ A {B C}(f : Fin A → C) (g : Fin B → C)(i : Fin A) →
case A f g (extend i) ≅ f i
lem1 zero f g ()
lem1 (suc A) f g zero = refl
lem1 (suc A) f g (suc i) = lem1 A (f ∘ suc) g i
lem2 : ∀ A {B C} (f : Fin A → C) (g : Fin B → C)(i : Fin B) →
case A f g (lift A i) ≅ g i
lem2 zero f g zero = refl
lem2 zero f g (suc i) = refl
lem2 (suc A) f g i = lem2 A (f ∘ suc) g i
lem3 : ∀ A {B C}(f : Fin A → C) (g : Fin B → C)
(h : Fin (A + B) → C) →
(λ x → h (extend {A} x)) ≅ f →
(λ x → h (lift A x)) ≅ g → ∀ i → h i ≅ case A f g i
lem3 zero f g h p q i = fcong i q
lem3 (suc A) f g h p q zero = fcong zero p
lem3 (suc A) f g h p q (suc i) =
lem3 A (f ∘ suc) g (h ∘ suc) (ext (λ i → fcong (suc i) p)) q i
coprod : CoProd Nats
coprod = record
{ _+_ = _+_
; inl = extend
; inr = λ{m} → lift m
; [_,_] = λ{m} → case m
; law1 = λ{m} f g → ext (lem1 m f g)
; law2 = λ{m} f g → ext (lem2 m f g)
; law3 = λ{m} f g h p q → ext (lem3 m f g h p q)
}
--
FinF : Fun Nats Sets
FinF = record {
OMap = Fin;
HMap = id;
fid = refl;
fcomp = refl}
FinFoid : Fun Nats Setoids
FinFoid = record {
OMap = λ n → record {
set = Fin n ;
eq = λ i j → i ≅ j;
ref = refl;
sym' = sym;
trn = trans};
HMap = λ f → record {
fun = f; feq = cong f};
fid = SetoidFunEq refl (iext λ _ → iext λ _ → ext congid);
fcomp = λ{_ _ _ f g} →
SetoidFunEq refl (iext λ _ → iext λ _ → ext (congcomp f g))}
FinFF : FullyFaithful FinF
FinFF X Y = record {
fun = id;
inv = id;
law1 = λ _ → refl;
law2 = λ _ → refl}
open import Data.Bool
feq : forall {n} -> Fin n -> Fin n -> Bool
feq zero zero = true
feq zero (suc j) = false
feq (suc i) zero = false
feq (suc i) (suc j) = true
|
# Optimization
- [Least squares](#Least-squares)
- [Gradient descent](#Gradient-descent)
- [Constraint optimization](#Constraint-optimization)
- [Global optimization](#Global-optimization)
## Intro
Biological research uses optimization when performing many types of machine learning, or when it interfaces with engineering. A particular example is metabolic engineering. As a topic in itself, optimization is extremely complex and useful, so much so that it touches to the core of mathematics and computing.
An optimization problem complexity is dependent on several factors, such as:
- Do you intend a local or a global optimization?
- Is the function linear or nonlinear?
- Is the function convex or not?
- Can a gradient be computed?
- Can the Hessian matrix be computed?
- Do we perform optimization under constraints?
- Are those constraints integers?
- Is there a single objective or several?
Scipy does not cover all solvers efficiently but there are several Python packages specialized for certain classes of optimization problems. In general though, the heavier optimization tasks are solved with dedicated programs, many of whom have language bindings for Python.
## Least squares
In practical terms, the most basic application of optimization is computing the local vs global minima of functions. We will exemplify this with the method of least squares. This method is being used to fit the parameters of a function by performing an error minimization.
**Problem context** Having a set of $m$ data points, $(x_1, y_1), (x_2, y_2),\dots,(x_m, y_m)$ and a curve (model function) $y=f(x, \boldsymbol \beta)$ that in addition to the variable $x$ also depends on $n$ parameters, $\boldsymbol \beta = (\beta_1, \beta_2, \dots, \beta_n)$ with $m\ge n$.
It is desired to find the vector $\boldsymbol \beta$ of parameters such that the curve fits best the given data in the least squares sense, that is, the sum of squares of the residuals is minimized:
$$ min \sum_{i=1}^{m}(y_i - f(x_i, \boldsymbol \beta))^2$$
Let us use a similar exercise as the basic linear regression performet in the statistics chapter, but fit a curve instead. That is to say, we are now performing a very basic form of nonlinear regression. While not strictly statistics related, this exercise can be useful for example if we want to decide how a probability distribution fits our data. We will use the least-square again, through the optimization module of scipy.
```python
%matplotlib inline
import numpy as np
import pylab as plt
from scipy import optimize
nsamp = 30
x = np.linspace(0,1,nsamp)
"""
y = -0.5*x**2 + 7*sin(x)
This is what we try to fit against. Suppose we know our function is generated
by this law and want to find the (-0.5, 7) parameters. Alternatively we might
not know anything about this dataset but just want to fit this curve to it.
"""
# define the normal function
f = lambda p, x: p[0]*x*x + p[1]*np.sin(x)
testp = (-0.5, 7)
print("True(unknown) parameter value:", testp)
y = f(testp,x)
yr = y + .5*np.random.normal(size=nsamp) # adding a small noise
# define the residual function
e = lambda p, x, y: (abs((f(p,x)-y))).sum()
p0 = (5, 20) # initial parameter value
print("Initial parameter value:", p0)
# uses the standard least squares algorithm
p_est1 = optimize.least_squares(e, p0, args=(x, yr))
print("Parameters estimated with least squares:",p_est1.x)
y_est1 = f(p_est1.x, x)
plt.plot(x,y_est1,'r-', x,yr,'o', x,y,'b-')
plt.show()
# uses a simplex algorithm
p_est2 = optimize.fmin(e, p0, args=(x,yr))
print("Parameters estimated with the simplex algorithm:",p_est2)
y_est2 = f(p_est2, x)
plt.plot(x,y_est2,'r-', x,yr,'o', x,y,'b-')
plt.show()
```
Exercises:
- Use a different nonlinear function.
- Define a normal Python function f() instead!
- Improve the LS fit by using nonstandart loss functions (soft_l1, cauchy)
- Improve the LS fit by using different methods {‘dogbox’, ‘lm’}
## Gradient descent
Note that least squares is not an optimization method per se, it is a method to frame linear regression in the terms of an optimization problem. Gradient descent is the basis optimizatin method for most of modern machine learning, and any processor software today is judged by its speed to compute gradient descent. On GPUs, it sits as the foundation for Deep Learning and Reinforcement Learning.
The method is making an iterative walk in the direction of the local gradient, until the step size becomes :
$$\mathbf{a}_{n+1} = \mathbf{a}_n-\gamma\nabla F(\mathbf{a}_n)$$
Here is the naive algorithm, adapted from Wikipedia:
```python
%matplotlib inline
import numpy as np
import pylab as plt
cur_x = 6 # The algorithm starts at x=6
gamma = 0.01 # step size multiplier
precision = 0.00001
previous_step_size = 1/precision; # some large value
f = lambda x: x**4 - 3 * x**3 + 2
df = lambda x: 4 * x**3 - 9 * x**2
x = np.linspace(-4,cur_x,100)
while previous_step_size > precision:
prev_x = cur_x
cur_x += -gamma * df(prev_x)
previous_step_size = abs(cur_x - prev_x)
print("The local minimum occurs at %f" % cur_x)
plt.plot(x,f(x),'b-')
```
The naive implementations suffer from many downsides, from speed to oscilating accross a valey. Another typical fault of the naive approach is assuming that the functions to be optimized are smooth, with expected variation for small variations in their parameters. Here is an example of how to compute the gradient descent with scipy, for the rosenbrock function, a function known to be ill-conditioned.
But before, here is a [practical advice from Scipy](https://www.scipy-lectures.org/advanced/mathematical_optimization/index.html):
- Gradient not known:
> In general, prefer BFGS or L-BFGS, even if you have to approximate numerically gradients. These are also the default if you omit the parameter method - depending if the problem has constraints or bounds. On well-conditioned problems, Powell and Nelder-Mead, both gradient-free methods, work well in high dimension, but they collapse for ill-conditioned problems.
- With knowledge of the gradient:
> BFGS or L-BFGS. Computational overhead of BFGS is larger than that L-BFGS, itself larger than that of conjugate gradient. On the other side, BFGS usually needs less function evaluations than CG. Thus conjugate gradient method is better than BFGS at optimizing computationally cheap functions.
With the Hessian:
- If you can compute the Hessian, prefer the Newton method (Newton-CG or TCG).
- If you have noisy measurements: Use Nelder-Mead or Powell.
```python
import numpy as np
import scipy.optimize as optimize
def f(x): # The rosenbrock function
return .5*(1 - x[0])**2 + (x[1] - x[0]**2)**2
def fprime(x):
return np.array((-2*.5*(1 - x[0]) - 4*x[0]*(x[1] - x[0]**2), 2*(x[1] - x[0]**2)))
print(optimize.fmin_ncg(f, [2, 2], fprime=fprime))
def hessian(x): # Computed with sympy
return np.array(((1 - 4*x[1] + 12*x[0]**2, -4*x[0]), (-4*x[0], 2)))
print(optimize.fmin_ncg(f, [2, 2], fprime=fprime, fhess=hessian))
%matplotlib inline
from matplotlib import cm
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = fig.gca(projection='3d')
X = np.arange(-1, 1, 0.005)
Y = np.arange(-1, 1, 0.005)
X, Y = np.meshgrid(X, Y)
Z = .5*(1 - X)**2 + (Y - X**2)**2
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm,
linewidth=0, antialiased=False)
#ax.set_zlim(-1000.01, 1000.01)
fig.colorbar(surf, shrink=0.5, aspect=5)
plt.show()
```
## Constraint optimization
Problem definition:
$$
\begin{array}{rcll}
\min &~& f(\mathbf{x}) & \\
\mathrm{subject~to} &~& g_i(\mathbf{x}) = c_i &\text{for } i=1,\ldots,n \quad \text{Equality constraints} \\
&~& h_j(\mathbf{x}) \geqq d_j &\text{for } j=1,\ldots,m \quad \text{Inequality constraints}
\end{array}
$$
Let's take the particular case when the objective function and the constraints linear, such as in the cannonical form:
$$\begin{align}
& \text{maximize} && \mathbf{c}^\mathrm{T} \mathbf{x}\\
& \text{subject to} && A \mathbf{x} \leq \mathbf{b} \\
& \text{and} && \mathbf{x} \ge \mathbf{0}
\end{align}$$
Scipy has methods for optimizing functions under constraints, including linear programming. Aditionally many (linear or nonlinear) constraint optimization problems can be turned into full optimization problems using Lagrange multipliers. We will learn how to run linear problems with PuLP.
```python
"""
maximize: 4x + 3y
x > 0
y >= 2
x + 2y <= 25
2x - 4y <= 8
-2x + y <= -5
"""
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
x = np.linspace(0, 20, 2000)
y1 = (x*0) + 2
y2 = (25-x)/2.0
y3 = (2*x-8)/4.0
y4 = 2 * x -5
# Make plot
plt.plot(x, y1, label=r'$y\geq2$')
plt.plot(x, y2, label=r'$2y\leq25-x$')
plt.plot(x, y3, label=r'$4y\geq 2x - 8$')
plt.plot(x, y4, label=r'$y\leq 2x-5$')
plt.xlim((0, 16))
plt.ylim((0, 11))
plt.xlabel(r'$x$')
plt.ylabel(r'$y$')
# Fill feasible region
y5 = np.minimum(y2, y4)
y6 = np.maximum(y1, y3)
plt.fill_between(x, y5, y6, where=y5>y6, color='grey', alpha=0.5)
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
```
The solution to an optimization problem with linear constraints lies somewhere on the gray area, and it is not necessarily unique. However for linear optimization functions the solution is on one of the vertices. Now let us frame it with PuLP:
```
conda install -c conda-forge pulp
```
```python
import pulp
my_lp_problem = pulp.LpProblem("My LP Problem", pulp.LpMaximize)
x = pulp.LpVariable('x', lowBound=0, cat='Continuous')
y = pulp.LpVariable('y', lowBound=2, cat='Continuous')
# Objective function
my_lp_problem += 4 * x + 3 * y, "Z"
# Constraints
my_lp_problem += 2 * y <= 25 - x
my_lp_problem += 4 * y >= 2 * x - 8
my_lp_problem += y <= 2 * x - 5
my_lp_problem
```
My LP Problem:
MAXIMIZE
4*x + 3*y + 0
SUBJECT TO
_C1: x + 2 y <= 25
_C2: - 2 x + 4 y >= -8
_C3: - 2 x + y <= -5
VARIABLES
x Continuous
2 <= y Continuous
```python
my_lp_problem.solve()
print(pulp.LpStatus[my_lp_problem.status])
for variable in my_lp_problem.variables():
print("{} = {}".format(variable.name, variable.varValue))
print(pulp.value(my_lp_problem.objective))
```
Optimal
x = 14.5
y = 5.25
73.75
Further complications arise when some of the constraints need to be integers, in which case the problem becomes known as Mixed Integer Liniar Programming and it computationally more expensive. Yet such problems are quite frequent, for example in metabolic engineering, where you need to deal machines operating on discrete intervals, or when studying protein folding or DNA recombination. In such cases one can also install python packages that deal with the specific problem, such as [cobrapy](https://cobrapy.readthedocs.io/en/latest/) for metabolic engineering.
Further reading:
- For LP with PuLP, I recommend this tutorial, which also uses some real life problems, and has a github link for the notebooks: [http://benalexkeen.com/linear-programming-with-python-and-pulp/](http://benalexkeen.com/linear-programming-with-python-and-pulp/)
- For a great list of LP solvers check [https://stackoverflow.com/questions/26305704/python-mixed-integer-linear-programming](https://stackoverflow.com/questions/26305704/python-mixed-integer-linear-programming)
## Global optimization
The most computation efficient optimization methods can only find local optima, while using different heuristics when needing to access global optima. In the class of global optimization algorithms a few methods to mention are:
- Grid search: These methods belong to the class of brute force or greedy searches and check for solutions in multidimensional sollution spaces. Typicall employed when having to find optimal parameter combinations for machine learning problems (hyperparametrization)
- Branch and Bound: This method, belonging to the more general class called dynamical programming, uses an optimal rooted tree, thus breaking the problem into smaller local optima problems. It is used in LP/MILP for example, or by sequence alignment programs such as BLAST.
- Monte Carlo: This method belongs to the stochastic optimization class, that instead of looking for an exact fit is using random sampling and bayesian statistics. These methods are expected to gain more traction with the evolution of computing power.
- Heuristics: Many methods in this class are nature inspired, such as genetic programming, ant colony optimization etc.
```python
import sys
sys.version
```
'3.6.5 |Anaconda, Inc.| (default, Mar 29 2018, 18:21:58) \n[GCC 7.2.0]'
```python
sys.path
```
['',
'/home/sergiu/programs/miniconda3/envs/pycourse/lib/python36.zip',
'/home/sergiu/programs/miniconda3/envs/pycourse/lib/python3.6',
'/home/sergiu/programs/miniconda3/envs/pycourse/lib/python3.6/lib-dynload',
'/home/sergiu/programs/miniconda3/envs/pycourse/lib/python3.6/site-packages',
'/home/sergiu/programs/miniconda3/envs/pycourse/lib/python3.6/site-packages/IPython/extensions',
'/home/sergiu/.ipython']
```python
```
|
(** * Generation of the interconnections between PDIs and TDIs *)
Require Import common.CoqLib.
Require Import common.GlobalTypes.
Require Import common.ListPlus.
Require Import common.ListDep.
Require Import common.StateAndErrorMonad.
Require Import String.
Require Import sitpn.Sitpn.
Require Import sitpn.SitpnTypes.
Require Import hvhdl.HVhdlTypes.
Require Import hvhdl.AbstractSyntax.
Require Import hvhdl.Petri.
Require Import hvhdl.Place.
Require Import hvhdl.Transition.
Require Import transformation.Sitpn2HVhdlTypes.
Require Import transformation.Sitpn2HVhdlUtils.
Set Implicit Arguments.
Section GenInter.
Variable sitpn : Sitpn.
(* Proof of decidability for the priority relation of [sitpn] *)
Variable decpr : forall x y : T sitpn, {x >~ y} + {~x >~ y}.
(* Alias for the state-and-error monad instantiated with the
compile-time state. *)
Definition CompileTimeState := @Mon (Sitpn2HVhdlState sitpn).
Local Open Scope abss_scope.
(** Retrieves the TDI [id__t] associated with transition [t], and
connects the [idx-th] element of the [itf] input port with the
actual part of the [fired] output port.
Returns the modified [i__p] input port map, and an incremented
index. *)
Definition connect_to_input_tdi
(i__p : inputmap)
(idx : nat)
(t : T sitpn) :
CompileTimeState (inputmap * nat) :=
do id__t <- get_tdi_id_from_binder t;
do tdi <- get_comp id__t;
let '(id__e, g__t, i__t, o__t) := tdi in
do a <- actual Transition.fired o__t;
match a with
| None => Err ("connect_to_input_tdi: The fired port of TDI " ++ $$id__t ++ " is open.")
| Some n =>
Ret (i__p ++ [ipa_ (Place.input_transitions_fired $[[idx]]) (e_name n)], (idx + 1)%nat)
end.
(** Iterates and calls the [connect_to_input_tdi] function over the
set of input transitions of a place [p]. *)
Definition connect_to_input_tdis
(pinfo : PlaceInfo sitpn)
(i__p : inputmap) :
CompileTimeState inputmap :=
do iidx <- ListMonad.fold_left (fun '(i, idx) => connect_to_input_tdi i idx) (tinputs pinfo) (i__p, 0%nat);
Ret (fst iidx).
(** Parameters:
Assume there is a place [p] such that:
- [pinfo] represents the information associated with [p].
- [i__p] and [o__p] represents the input and output port maps of a
PDI [id__p], associated with [p] through the [γ] binder.
- [t] is a conflicting output transition of [p].
Retrieves the TDI [id__t] associated with transition [t], and
connects elements of the input port map [i__p] and the output port
map [o__p] to elements of the input and output port maps of the
TDI [id__t].
Replaces the TDI [id__t] by its modified version in the
compile-time state behavior.
Returns the modified [i__p] input port map, the modified [o__p]
output port map, and an incremented index. *)
Definition connect_to_confl_tdi
(i__p : inputmap)
(o__p : outputmap)
(idx : nat)
(t : T sitpn) :
CompileTimeState (inputmap * outputmap * nat) :=
do id__t <- get_tdi_id_from_binder t;
do tdi <- get_comp id__t;
let '(id__e, g__t, i__t, o__t) := tdi in
(* Interconnects [o__p] to to [i__t], and [i__p] to [o__t]. *)
do oi1 <- connect o__p i__t Place.output_arcs_valid idx Transition.input_arcs_valid;
do oi2 <- connect (fst oi1) (snd oi1) Place.reinit_transitions_time idx Transition.reinit_time;
do oi3 <- connect (fst oi2) (snd oi2) Place.priority_authorizations idx Transition.priority_authorizations;
let '(o__p3, i__t3) := oi3 in
(* Replaces TDI [id__t] by a new TDI in the compile-time state's behavior. *)
do _ <- put_comp id__t id__e g__t i__t3 o__t;
(* Last interconnection between [i__p] and [o__t]. *)
do a <- actual Transition.fired o__t;
match a with
| None => Err ("connect_to_input_tdi: The fired port of TDI " ++ $$id__t ++ " is open.")
| Some n =>
Ret (i__p ++ [ipa_ (Place.output_transitions_fired $[[idx]]) (e_name n)], o__p3, (idx + 1)%nat)
end.
(** Parameters:
Assume there is a place [p] such that:
- [pinfo] represents the information associated with [p].
- [i__p] and [o__p] represents the input and output port maps of a
PDI [id__p], associated with [p] through the [γ] binder.
- [t] is a non-conflicting output transition of [p].
Retrieves the TDI [id__t] associated with transition [t], and
connects elements of the input port map [i__p] and the output port
map [o__p] to elements of the input and output port maps of the
TDI [id__t].
Replaces the TDI [id__t] by its modified version in the
compile-time state behavior.
Returns the modified [i__p] input port map, the modified [o__p]
output port map, and an incremented index. *)
Definition connect_to_nconfl_tdi
(i__p : inputmap)
(o__p : outputmap)
(idx : nat)
(t : T sitpn) :
CompileTimeState (inputmap * outputmap * nat) :=
do id__t <- get_tdi_id_from_binder t;
do tdi <- get_comp id__t;
let '(id__e, g__t, i__t, o__t) := tdi in
(* Interconnects [o__p] to to [i__t], and [i__p] to [o__t]. *)
do oi1 <- connect o__p i__t Place.output_arcs_valid idx Transition.input_arcs_valid;
do oi2 <- connect (fst oi1) (snd oi1) Place.reinit_transitions_time idx Transition.reinit_time;
(* Connects [pauths(idx)] to [true] in input port map [i__t2]. *)
let '(o__p2, i__t2) := oi2 in
do i__t3 <- cassoc_imap i__t2 Transition.priority_authorizations true;
(* Interconnects [pauths(idx)] to a newly generated but
unconnected internal signal [id__s] in output port map [o__p2]. *)
do id__s <- get_nextid;
do _ <- add_sig_decl (sdecl_ id__s tind_boolean);
do o__p3 <- Ret (o__p2 ++ [opa_idx Place.priority_authorizations idx ($id__s)]);
(* Replaces TDI [id__t] by a new TDI in the compile-time state's behavior. *)
do _ <- put_comp id__t id__e g__t i__t3 o__t;
(* Last interconnection between [i__p] and [o__t]. *)
do a <- actual Transition.fired o__t;
match a with
| None => Err ("connect_to_input_tdi: The fired port of TDI " ++ $$id__t ++ " is open.")
| Some n =>
Ret (i__p ++ [ipa_ (Place.output_transitions_fired $[[idx]]) (e_name n)], o__p3, (idx + 1)%nat)
end.
(** Iterates and calls the [connect_to_input_tdi] function over the
set of input transitions of a place [p]. *)
Definition connect_to_output_tdis
(pinfo : PlaceInfo sitpn)
(i__p : inputmap) (o__p : outputmap) :
CompileTimeState (inputmap * outputmap) :=
do ioidx <- ListMonad.fold_left (fun '(i, o, idx) => connect_to_confl_tdi i o idx) (tconflict pinfo) (i__p, o__p, 0%nat);
let '(i__p1, o__p1, idx) := ioidx in
do ioidx1 <- ListMonad.fold_left (fun '(i, o, idx) => connect_to_nconfl_tdi i o idx) (toutputs pinfo) (i__p1, o__p1, idx);
Ret (fst ioidx1).
(** Retrieves the behavior [beh] (i.e. the currently generated
behavior) the PDI [id__p] associated with place [p] (i.e. γ(p) =
[id__p]), and connects the interface of the PDI [id__p] to the
interface of its input and output TDIs. Then, replaces the old
PDI [id__p] by the new in the compile-time state's behavior. *)
Definition connect_place (p : P sitpn) :
CompileTimeState unit :=
(* Retrieves some elements from the compile-time state, namely:
- The informations associated with place [p] in the
[SitpnInfos] structure.
- The identifier [id__p] associated with place [p] in the [γ] binder.
- The PDI [id__p] from the behavior [beh]. *)
do pinfo <- get_pinfo p;
do id__p <- get_pdi_id_from_binder p;
do pdi <- get_comp id__p;
let '(id__e, g, i, o) := pdi in
(* Connects the PDI [pdi] to the TDIs implementing the input
transitions of place [p]. *)
do i1 <- connect_to_input_tdis pinfo i;
(* Connects the PDI [pdi] to the TDIs implementing the output
transitions of place [p]. *)
do io2 <- connect_to_output_tdis pinfo i1 o;
(* Replaces the PDI [pdi] by a new PDI in the compile-time state's
behavior. *)
let '(i2, o2) := io2 in
put_comp id__p id__e g i2 o2.
(** Generates the interconnections between PDIs and TDIS by
modifying the compile-time state's behavior. *)
Definition generate_interconnections :
CompileTimeState unit :=
(* Calls connect_place on each place of sitpn. *)
do Plist <- get_lofPs; ListMonad.iter connect_place Plist.
End GenInter.
Arguments generate_interconnections {sitpn}.
|
using OpenQuantumBase, Test
c = ConstantCouplings(["ZI", "IZ"])
@test isequal(c.mats[1], 2π*σz⊗σi)
@test c.mats[2] == 2π*σi⊗σz
@test c[2](2.0) == 2π*σi⊗σz
res = c(0.2)
@test isequal(res[1], 2π*σz⊗σi)
@test res[2] == 2π*σi⊗σz
crot = rotate(c, σx⊗σi)
@test crot[1](0) ≈ -2π*σz⊗σi
@test crot[2](0.5) ≈ 2π*σi⊗σz
c = ConstantCouplings(["ZI", "IZ"], unit=:ħ)
@test isequal(c.mats[1], σz⊗σi)
@test isequal(c.mats[2], σi⊗σz)
res = c(0.2)
@test res[1] == σz⊗σi
@test res[2] == σi⊗σz
@test [op(0) for op in c] == [σz⊗σi, σi⊗σz]
c = ConstantCouplings([σz⊗σi, σi⊗σz], unit=:ħ)
@test isequal(c.mats[1], σz⊗σi)
@test isequal(c.mats[2], σi⊗σz)
res = c(0.2)
@test res[1] == σz⊗σi
@test res[2] == σi⊗σz
@test [op(0) for op in c] == [σz⊗σi, σi⊗σz]
c = ConstantCouplings(["ZI", "IZ"], sp=true)
@test isequal(c.mats[1], 2π*spσz⊗spσi)
@test isequal(c.mats[2], 2π*spσi⊗spσz)
c1 = TimeDependentCoupling([(s)->s], [σz], unit=:ħ)
@test c1(0.5) == 0.5σz
c2 = TimeDependentCoupling([(s)->s], [σx], unit=:ħ)
c = TimeDependentCouplings(c1, c2)
@test size(c) == (2, 2)
@test [op for op in c(0.5)] == [c1(0.5), c2(0.5)]
@test [op(0.5) for op in c] == [c1(0.5), c2(0.5)]
c = collective_coupling("Z", 2, unit=:ħ)
@test isequal(c(0.1), [σz⊗σi, σi⊗σz])
test_coupling = [(s)->s*σx, (s)->(1-s)*σz]
coupling = CustomCouplings(test_coupling, unit=:ħ)
@test size(coupling) == (2, 2)
@test coupling(0.5) == 0.5 * [σx, σz]
@test [c(0.2) for c in coupling] == [0.2*σx, 0.8*σz]
|
The product of a number and the negative of another number is the negative of the product of the two numbers. |
Ireland continued as a patchwork of rival kingdoms but , beginning in the 7th century AD , a concept of national kingship gradually became articulated through the concept of a High King of Ireland . Medieval Irish literature portrays an almost unbroken sequence of High Kings stretching back thousands of years but modern historians believe the scheme was constructed in the 8th century to justify the status of powerful political groupings by projecting the origins of their rule into the remote past .
|
-- Las relaciones reflexivas y euclídeas son de equivalencia
-- =========================================================
-- ----------------------------------------------------
-- Una relación binaria (≈) es euclídea si
-- ∀ {a b c}, a ≈ b → c ≈ b → a ≈ c
--
-- El objetivo de esta teoría es demostrar que si una
-- relación es reflexiva y euclídea, entonces es de
-- equivalencia.
-- ----------------------------------------------------
import tactic
section
parameter {A : Type}
parameter (R : A → A → Prop)
local infix ≈ := R
parameter reflexivaR : reflexive (≈)
parameter euclideaR : ∀ {a b c}, a ≈ b → c ≈ b → a ≈ c
include reflexivaR euclideaR
-- ----------------------------------------------------
-- Ej. 1. Demostrar que las relaciones reflexivas y
-- y euclídeas son simétricas.
-- ----------------------------------------------------
-- 1ª demostración
example : symmetric (≈) :=
begin
intros a b h,
exact euclideaR (reflexivaR b) h,
end
-- 2ª demostración
example : symmetric (≈) :=
λ a b h, euclideaR (reflexivaR b) h
-- 3ª demostración
lemma simetricaR : symmetric (≈) :=
assume a b (h1 : a ≈ b),
have h2 : b ≈ b, from (reflexivaR b),
show b ≈ a, from euclideaR h2 h1
-- ----------------------------------------------------
-- Ej. 2. Demostrar que las relaciones reflexivas y
-- y euclídeas son transitivas.
-- ----------------------------------------------------
-- 1ª demostración
example : transitive (≈) :=
begin
rintros a b c h1 h2,
apply euclideaR h1,
exact euclideaR (reflexivaR c) h2,
end
-- 2ª demostración
lemma transitivaR : transitive (≈) :=
λ a b c h1 h2, (euclideaR h1) (euclideaR (reflexivaR c) h2)
-- 3ª demostración
example : transitive (≈) :=
assume a b c (h1 : a ≈ b) (h2 : b ≈ c),
have h3 : c ≈ b, from euclideaR (reflexivaR c) h2,
show a ≈ c, from euclideaR h1 h3
-- ----------------------------------------------------
-- Ej. 3. Demostrar que las relaciones reflexivas y
-- y euclídeas son de equivalencia.
-- ----------------------------------------------------
-- 1ª demostración
example : equivalence (≈) :=
begin
unfold equivalence,
exact ⟨reflexivaR, simetricaR, transitivaR⟩,
end
-- 2ª demostración
example : equivalence (≈) :=
⟨reflexivaR, simetricaR, transitivaR⟩
end
|
```python
# Importing packages
import matplotlib.pyplot as plt
import pandas as pd
import ipywidgets as widgets
import numpy as np
import itertools
from ipywidgets import Layout
import seaborn as sns
import math
import time
from scipy import linalg
import scipy.optimize as optimize
import sympy as sm
from tkinter import *
import tkinter as tk
from data_gen import gen_df
```
In this project, we want to try to estimate the ratings of movies. That is, we well calculate the utility of a movie, given genres, duration, awards and so forth, and we will try to calculate the IMDB ratings for each movie, based on this utility. Finally, we will then minimize the distance between our estimated ratings and the actual ratings.
Our utility function is given by:
$$ U_i = \sum_{k=1}^{23}(\alpha_k G_{ik}) + \sum_{k=1920s}^{2010s} (\beta_k D_{ik}) + \gamma N_i + \delta W_i + \rho L_i $$.
Based on this utility function, we will estimate the ratings of each movies,
$$ R_i^{model} = \frac{\exp(\omega x_i)}{1 + \exp(\omega x_i)} $$
where,
$$ x_i = \big[G_1, G_2, ..., G_n, D_{1920}, D_{1930}, ..., D_{2010}, N_i, W_i, L_i \big] $$
$$ \omega = \big[\alpha_1, \alpha_2, ..., \alpha_{23}, \beta_{1920s}, \beta_{1930s}, ..., \beta_{2010s}, \gamma, \delta, \rho \big] $$
We then use optimize methods to solve the following:
$$ \min_{\omega} \Big\{ \sum_{i=1}^{n} \left( R_i^{model} - R_i^{data} \right)^2 \Big\} $$
### The following code will optimize for each decade and plot all estimates
```python
decade_list = [1920, 1930, 1940, 1950, 1960, 1970, 1980, 1990, 2000, 2010]
vars = ['Action','Adventure','Animation','Biography','Comedy','Crime','Documentary',
'Drama','Family','Fantasy','FilmNoir','History','Horror','Music','Musical',
'Mystery','Romance','SciFi','Short','Sport','Thriller','War','Western',
'nrOfNominations','nrOfWins','duration']
```
```python
## The following code define a function that generates two dataframe,
## one with the x-variables and one with the y-variable. The dataframe
## with x-variables only contain movies from the specified decade.
def df_dec(decade):
""" Generates two dataframe, one with the x-variables (for the specified decade) and one with the y-variable
Args:
decade (int): The decade should be named as either 1920, 1930, 1940, 1950, 1960, 1970, 1980, 1990, 2000 or 2010
Returns:
Two dataframes.
"""
# Import the IMDB dataset with information of ratings and X-variables
df = gen_df('imdb.csv')
# Dropping movies with less than 5000 ratings
df = df.loc[(df['ratingCount']>=5000)]
# Dropping genres with less than 20 obervations
df.drop(columns=['Adult','GameShow','News','RealityTV','TalkShow'], inplace=True)
# Keeps movies from the specified decade
df = df.loc[df['decade'] == f'{decade}s']
df_X = df.copy() # Creating the dataframe for the x-variables
# Rearrange columns and keep the specified variables
df_X = df_X.reindex(['Action','Adventure','Animation','Biography','Comedy','Crime','Documentary',
'Drama','Family','Fantasy','FilmNoir','History','Horror','Music','Musical',
'Mystery','Romance','SciFi','Short','Sport','Thriller','War','Western',
'nrOfNominations','nrOfWins','duration'], axis=1)
# Creates a dataframe with the observed ratings
df_Y = pd.DataFrame(df[['imdbRating','index']].copy())
df_Y.rename(columns = {'imdbRating':'rat_data'}, inplace=True) # Rename variable
return df_X, df_Y # Return the two dataframes
```
```python
## The following code produces the parameter estimates where the sample is restricted to all the decades,
## one at the time. The function return a list which contains a list of the estimates for every decade.
## OBS: The code will take about 3-4 minutes to run due to the model is estimated 10 times!!
def optimizer_dec():
""" Generates a list containing 10 list with estimates of the model for every decade.
Args:
No arguments are needed.
Returns:
A list.
Notice:
It will raise an error if the function are given an argument!
"""
# Defining a function to calculate the sum of squared differences
# between the ratings from the data and from the model.
def sqr_diff_sum(df_X,pars):
""" Generates a float of the sum of squared differences between the ratings from the data and the model.
Args:
df_X (DataFrame): DataFrame containing the variable in x for all observations.
pars (List): List of parameters in omega
Returns:
A float.
"""
util = df_X@pars # Calculate the matrix product between omega and X
df_Y['rat_model'] = 10*np.exp(util)/(1+np.exp(util)) # Scale the product so is between 0 and 10. This is the R_model
df_Y['sqr_diff'] = (df_Y['rat_model']-df_Y['rat_data'])**2 # Calculate the squared difference between R_data and R_model
return df_Y['sqr_diff'].sum() # Returns the sum of the squared differences
result = [] # Emty list to store the estimated parameters
# Make a loop so the model is estimated for all ten decade
for decade in decade_list:
df_X, df_Y = df_dec(decade) # Call the function to generate the two dataframes
x0 = np.zeros(len(vars)) # Starting values
obj_fun = lambda x: sqr_diff_sum(df_X,x) # The objective function -> sum of squared differences
# Use Scipy optimizer to solve the model
result_i = optimize.minimize(obj_fun,x0,
method='Nelder-Mead',
options={"disp":True, "maxiter":50000}, # display the results
);
# Add the result for each deacde to the result-list
result.append(list(result_i.x))
return result # Returns the result-list
# Call the optimize_dec function
result_dec = optimizer_dec()
```
Optimization terminated successfully.
Current function value: 5.649208
Iterations: 10176
Function evaluations: 12236
Optimization terminated successfully.
Current function value: 20.618022
Iterations: 7768
Function evaluations: 9337
Optimization terminated successfully.
Current function value: 23.340167
Iterations: 7874
Function evaluations: 9474
Optimization terminated successfully.
Current function value: 81.101081
Iterations: 11566
Function evaluations: 13883
Optimization terminated successfully.
Current function value: 114.676976
Iterations: 7507
Function evaluations: 9034
Optimization terminated successfully.
Current function value: 165.183683
Iterations: 6818
Function evaluations: 8234
Optimization terminated successfully.
Current function value: 401.211190
Iterations: 7408
Function evaluations: 8973
Optimization terminated successfully.
Current function value: 698.471642
Iterations: 11452
Function evaluations: 13733
Optimization terminated successfully.
Current function value: 1164.868912
Iterations: 7780
Function evaluations: 9371
Optimization terminated successfully.
Current function value: 412.781524
Iterations: 9187
Function evaluations: 11048
```python
## The following code produces a figure with the estimated parameters for each deacde for a chosen variable
result_dec_mod = [] # Empty list to storage
# The loop changes the order of the result-list so the
# estimates are ordered by the variables and subordered by decade
# insted of being ordered by decade and subordered by variables
for j,var in enumerate(vars):
temp = []
for i,dec in enumerate(decade_list):
temp.append(result_dec[i][j])
result_dec_mod.append(temp)
# Defining a figure to plot the estimates
def fig(var):
""" Generates a figure which plots estimated parameters for each decade for one variable
Args:
var (string): Should be one of the variables in the X-vector
Returns:
One interactive plot.
Notice:
The function is generated so that it can be called using widgets.interact.
Thus, it is not intended to be used on its own.
"""
fig = plt.figure(figsize=(8,6))
ax1 = fig.add_subplot(1,1,1)
ax1.bar(decade_list, result_dec_mod[vars.index(var)],width=6)
# Setting labels, ticks etc.
ax1.set_ylabel('Parameter estimates')
ax1.set_title(f'Parameter estimates for {var} per decade')
ax1.set_xticks(decade_list)
ax1.axhline(y=0,color='black',linewidth=1)
# Making the figure interactive so the estimates are shown for the chosen variable
widgets.interact(fig,
var = widgets.Dropdown(description='Variable', value='Action', options=vars,
),
);
```
## The following code will produce a plot with estimate for each parameter
```python
## Fra Sebs kode
def df_s(keep_top=None):
global df
global decade_list
filename = 'imdb.csv'
df = gen_df(filename)
decade_list = [1920, 1930, 1940, 1950, 1960, 1970, 1980, 1990, 2000, 2010]
# Decade dummies
for i in decade_list:
df[f'decade_{i}'] = 0
df.loc[(df['decade'] == f'{i}s'),f'decade_{i}'] = 1
df = df.loc[(df['ratingCount']>=5000)]
df.drop(columns=['Adult','GameShow','News','RealityTV','TalkShow'], inplace=True)
if keep_top != None:
df = df.sort_values('imdbRating', ascending=False)
df = df.iloc[:keep_top]
df_X = df.copy()
df_Y = pd.DataFrame(df['imdbRating'].copy())
df_Y.rename(columns = {'imdbRating':'rat_data'}, inplace=True)
drops = ['index', 'tid', 'title', 'imdbRating', 'ratingCount', 'year',
'nrOfGenre', 'nrOfPhotos', 'nrOfNewsArticles', 'nrOfUserReviews', 'decade']
for i in drops:
df_X.drop(columns=[i], inplace=True)
# Rearrange columns
df_X = df_X.reindex(['Action','Adventure','Animation','Biography','Comedy','Crime','Documentary',
'Drama','Family','Fantasy','FilmNoir','History','Horror','Music','Musical',
'Mystery','Romance','SciFi','Short','Sport','Thriller','War','Western',
'decade_1920','decade_1930','decade_1940','decade_1950','decade_1960',
'decade_1970','decade_1980','decade_1990','decade_2000','decade_2010',
'nrOfNominations','nrOfWins','duration'], axis=1)
return df_X, df_Y
```
```python
## Fra Sebs kode
vars2 = ['Action','Adventure','Animation','Biography','Comedy','Crime','Documentary',
'Drama','Family','Fantasy','FilmNoir','History','Horror','Music','Musical',
'Mystery','Romance','SciFi','Short','Sport','Thriller','War','Western',
'decade_1920','decade_1930','decade_1940','decade_1950','decade_1960',
'decade_1970','decade_1980','decade_1990','decade_2000','decade_2010',
'nrOfNominations','nrOfWins','duration']
def optimizer(keep_top=None):
global fs
global evals
global x0
global df
def sqr_diff_sum(df_X,pars):
util = df_X@pars
df_Y['rat_model'] = 10*np.exp(util)/(1+np.exp(util))
df_Y['sqr_diff'] = (df_Y['rat_model']-df_Y['rat_data'])**2
return df_Y['sqr_diff'].sum()
# Scipy minimize
def zeros(n):
list = [0] * n
return list
df_X, df_Y = df_s(keep_top)
x0 = zeros(len(vars2))
evals = 0
obj_fun = lambda x: sqr_diff_sum(df_X,x)
result = optimize.minimize(obj_fun,x0,
method="Nelder-Mead",
options={"disp":True, "maxiter":50000}, # display the results
)
return result
results = [] # Empty list to store results
# This loop solve the model for all movies and the top 500, 1000, and 2000 movies
for i in [None, 500, 1000, 2000]:
res_temp = optimizer(keep_top=i)
temp = res_temp.x
results.append(temp) # Store the results in the 'results-list'
```
Optimization terminated successfully.
Current function value: 3735.382286
Iterations: 16566
Function evaluations: 19203
Optimization terminated successfully.
Current function value: 213.265323
Iterations: 17235
Function evaluations: 19895
Optimization terminated successfully.
Current function value: 582.831479
Iterations: 16138
Function evaluations: 18644
Optimization terminated successfully.
Current function value: 895.336144
Iterations: 19477
Function evaluations: 22457
[array([-0.02249121, -0.03089684, 0.17149985, -0.17152826, -0.02084888,
-0.03048913, 0.11009148, 0.12785315, -0.06051079, -0.32745048,
0.02199167, -0.07578793, -0.02139979, -0.03056553, -0.08347352,
-0.09390543, -0.03941612, -0.09520167, 0.12192359, -0.06356771,
-0.06100096, 0.19041382, 0.03038359, -0.04148942, 0.12249617,
0.17989531, -0.03093947, 0.30781892, 0.09645697, 0.00852061,
-0.2156607 , -0.2024172 , -0.21196039, 0.00395598, 0.01588309,
0.45167799]), array([-0.11877128, -0.10281598, 0.37591941, -0.34640937, 0.44963963,
0.01393978, 0.18374984, 0.30143242, -0.1955955 , -0.10595112,
-0.23594299, 0.36870347, -0.5404849 , -0.13993417, -0.40877283,
-0.10383054, -0.12874325, 0.13003553, 0.10715166, -0.21497945,
0.17173981, -0.06567007, 0.37923244, 0.11617889, -0.17575408,
0.01725559, -0.10747867, 0.06821961, -0.08781796, -0.13053973,
-0.05139849, -0.03877125, 0.19630215, -0.00240662, 0.00274183,
0.68502714]), array([ 0.1704907 , -0.03096311, -0.10803699, -0.09170437, 0.23683231,
-0.24947417, 0.05282689, 0.28486822, 0.38642667, -0.02400058,
-0.01892814, -0.05772398, -0.21109486, -0.09879497, -0.22273195,
0.31960029, 0.07178682, 0.32504088, 0.09569002, 0.31668506,
-0.06381533, -0.68753283, -0.19333604, -0.21751563, 0.34852917,
0.13482503, 0.01268436, -0.13773108, -0.03321873, 0.00730996,
-0.40534081, 0.02219064, -0.33867179, -0.00593339, 0.01095337,
0.65302193]), array([-0.33121987, -0.20515105, 0.3966847 , -0.08547312, -0.037707 ,
0.01622495, 0.15708472, 0.12690045, -0.06592735, 0.41234624,
-0.10957163, -0.36811232, 0.29573276, -0.22474644, 0.13497452,
-0.09475096, 0.05859084, -0.07161956, -0.17934684, -0.05673295,
0.06807332, -0.18819276, -0.50668664, -0.03372878, -0.12263855,
0.0367653 , 0.09566702, 0.13520308, -0.00541802, 0.40282482,
0.08692355, 0.10161355, 0.12506686, -0.00192629, 0.00361094,
0.58200293])]
[-0.11877128 -0.10281598 0.37591941 -0.34640937 0.44963963 0.01393978
0.18374984 0.30143242 -0.1955955 -0.10595112 -0.23594299 0.36870347
-0.5404849 -0.13993417 -0.40877283 -0.10383054 -0.12874325 0.13003553
0.10715166 -0.21497945 0.17173981 -0.06567007 0.37923244 0.11617889
-0.17575408 0.01725559 -0.10747867 0.06821961 -0.08781796 -0.13053973
-0.05139849 -0.03877125 0.19630215 -0.00240662 0.00274183 0.68502714]
```python
## The code creates an interactive plot of the estimated parameters for all variables.
## In the interactive part you can choose between estimates when the model is solved
## for all movies or just top top 500, 1000, or 2000 rated movies.
def fig_2(val):
""" Generates a figure which plots estimated parameters for all variables.
Args:
val (string or int): Should be one of the elements in the options-list
Returns:
One interactive plot.
Notice:
The function is generated so that it can be called using widgets.interact.
Thus, it is not intended to be used on its own.
"""
fig = plt.figure(figsize=(12,6))
ax1 = fig.add_subplot(1,1,1)
# Plots the estimated parameters for the chosen sample of movies
ax1.bar(vars2, results[options.index(val)], label=f'Estimates with {options[options.index(val)]} movies')
# Scatter plot with the estimated paramters for the entire sample
ax1.scatter(vars2, results[0], marker='D', s=15, zorder=2, label='Estimates with all movies')
# Legends and labels
ax1.legend(loc="lower right")
ax1.set_ylabel('Parameter estimates')
ax1.set_title(f'Parameter estimates for {options[options.index(val)]} movies')
ax1.set_ylim([-0.7,0.7])
ax1.axhline(y=0,color='black',linewidth=1)
for tick in ax1.get_xticklabels():
tick.set_rotation(90)
options = ['All', 500, 1000, 2000] # Option list
# Making the figure interactive so the estimates are shown for the chosen sample size
widgets.interact(fig_2,
val = widgets.Dropdown(description='Nr. of movies', value='All', options=options,
),
);
```
interactive(children=(Dropdown(description='Nr. of movies', options=('All', 500, 1000, 2000), value='All'), Ou…
```python
```
|
From Test Require Import tactic.
Section FOFProblem.
Variable Universe : Set.
Variable UniverseElement : Universe.
Variable wd_ : Universe -> Universe -> Prop.
Variable col_ : Universe -> Universe -> Universe -> Prop.
Variable col_swap1_1 : (forall A B C : Universe, (col_ A B C -> col_ B A C)).
Variable col_swap2_2 : (forall A B C : Universe, (col_ A B C -> col_ B C A)).
Variable col_triv_3 : (forall A B : Universe, col_ A B B).
Variable wd_swap_4 : (forall A B : Universe, (wd_ A B -> wd_ B A)).
Variable col_trans_5 : (forall P Q A B C : Universe, ((wd_ P Q /\ (col_ P Q A /\ (col_ P Q B /\ col_ P Q C))) -> col_ A B C)).
Theorem pipo_6 : (forall A Bprime L11 L12 L21 L22 : Universe, ((wd_ L11 L12 /\ (wd_ L21 L22 /\ (col_ L11 L12 A /\ (col_ L11 L12 Bprime /\ (col_ L11 L21 L22 /\ col_ L12 L21 L22))))) -> col_ L21 L22 Bprime)).
Proof.
time tac.
Qed.
End FOFProblem.
|
[GOAL]
α : Type u_1
inst✝¹ : AddGroupWithOne α
inst✝ : CharZero α
n : ℤ
h : ↑n = 0
⊢ n = 0
[PROOFSTEP]
cases n
[GOAL]
case ofNat
α : Type u_1
inst✝¹ : AddGroupWithOne α
inst✝ : CharZero α
a✝ : ℕ
h : ↑(ofNat a✝) = 0
⊢ ofNat a✝ = 0
[PROOFSTEP]
erw [Int.cast_ofNat] at h
[GOAL]
case ofNat
α : Type u_1
inst✝¹ : AddGroupWithOne α
inst✝ : CharZero α
a✝ : ℕ
h : ↑a✝ = 0
⊢ ofNat a✝ = 0
[PROOFSTEP]
exact congr_arg _ (Nat.cast_eq_zero.1 h)
[GOAL]
case negSucc
α : Type u_1
inst✝¹ : AddGroupWithOne α
inst✝ : CharZero α
a✝ : ℕ
h : ↑-[a✝+1] = 0
⊢ -[a✝+1] = 0
[PROOFSTEP]
rw [cast_negSucc, neg_eq_zero, Nat.cast_eq_zero] at h
[GOAL]
case negSucc
α : Type u_1
inst✝¹ : AddGroupWithOne α
inst✝ : CharZero α
a✝ : ℕ
h : a✝ + 1 = 0
⊢ -[a✝+1] = 0
[PROOFSTEP]
contradiction
[GOAL]
α : Type u_1
inst✝¹ : AddGroupWithOne α
inst✝ : CharZero α
n : ℤ
h : n = 0
⊢ ↑n = 0
[PROOFSTEP]
rw [h, cast_zero]
[GOAL]
α : Type u_1
inst✝¹ : AddGroupWithOne α
inst✝ : CharZero α
m n : ℤ
⊢ ↑m = ↑n ↔ m = n
[PROOFSTEP]
rw [← sub_eq_zero, ← cast_sub, cast_eq_zero, sub_eq_zero]
[GOAL]
α : Type u_1
inst✝¹ : AddGroupWithOne α
inst✝ : CharZero α
n : ℤ
⊢ ↑n = 1 ↔ n = 1
[PROOFSTEP]
rw [← cast_one, cast_inj]
[GOAL]
α : Type u_1
k : Type u_2
inst✝¹ : DivisionRing k
inst✝ : CharZero k
m n : ℤ
n_dvd : n ∣ m
⊢ ↑(m / n) = ↑m / ↑n
[PROOFSTEP]
rcases eq_or_ne n 0 with (rfl | hn)
[GOAL]
case inl
α : Type u_1
k : Type u_2
inst✝¹ : DivisionRing k
inst✝ : CharZero k
m : ℤ
n_dvd : 0 ∣ m
⊢ ↑(m / 0) = ↑m / ↑0
[PROOFSTEP]
simp [Int.ediv_zero]
[GOAL]
case inr
α : Type u_1
k : Type u_2
inst✝¹ : DivisionRing k
inst✝ : CharZero k
m n : ℤ
n_dvd : n ∣ m
hn : n ≠ 0
⊢ ↑(m / n) = ↑m / ↑n
[PROOFSTEP]
exact cast_div n_dvd (cast_ne_zero.mpr hn)
|
[STATEMENT]
lemma map_charpair_map_pairtrue_R:
"s\<in>S \<Longrightarrow> t\<in>S \<Longrightarrow>
map (charpair S) (pair_relator_list s t) = map pairtrue (pair_relator_list s t)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>s \<in> S; t \<in> S\<rbrakk> \<Longrightarrow> map (charpair S) (pair_relator_list s t) = map pairtrue (pair_relator_list s t)
[PROOF STEP]
using set_alternating_list map_charpair_uniform
[PROOF STATE]
proof (prove)
using this:
set (alternating_list ?n ?s ?t) \<subseteq> {?s, ?t}
?ss \<in> lists ?S \<Longrightarrow> map (charpair ?S) ?ss = map pairtrue ?ss
goal (1 subgoal):
1. \<lbrakk>s \<in> S; t \<in> S\<rbrakk> \<Longrightarrow> map (charpair S) (pair_relator_list s t) = map pairtrue (pair_relator_list s t)
[PROOF STEP]
by fastforce |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
#pragma once
#include <algorithm>
#include <array>
#include <vector>
#include <gsl/gsl>
#include <assert.h>
#include <boost/optional.hpp>
#include "Data/Intrinsics.h"
#include "TimeUnits.h"
#include "MageSettings.h"
namespace mage
{
enum class TrackingState
{
INITIALIZING = 0,
TRACKING = 1,
RELOCALIZING = 2,
SKIPPED = 3
};
enum class FuserMode
{
Invalid = 0 ,
WaitForMageInit = 1,
WaitForGravityConverge = 2,
VisualTrackingLost = 3,
VisualTrackingReacquired = 4,
ScaleInit = 5,
Tracking = 6
};
enum class PixelFormat
{
GRAYSCALE8,
NV12
};
struct FrameId
{
std::uint64_t CorrelationId; //this is the number used to match frames either from each stereo cameras, or from low-res to high-res in the mono case. Should be unique (and increasing) per camera.
CameraIdentity Camera;
FrameId()
: CorrelationId{ 0 }, Camera{ mage::CameraIdentity::MONO }
{}
FrameId(const std::uint64_t& id, CameraIdentity cam)
: CorrelationId{ id }, Camera{ cam }
{}
bool operator ==(const FrameId& other) const
{
return CorrelationId == other.CorrelationId && Camera == other.Camera;
}
bool operator !=(const FrameId& other) const
{
return !(*this == other);
}
bool operator <(const FrameId& other) const
{
if (CorrelationId < other.CorrelationId)
return true;
if (other.CorrelationId < CorrelationId)
return false;
return Camera < other.Camera;
}
};
struct Size
{
size_t Width;
size_t Height;
};
struct Rect
{
int X;
int Y;
size_t Width;
size_t Height;
};
struct Matrix
{
float M11, M12, M13, M14;
float M21, M22, M23, M24;
float M31, M32, M33, M34;
float M41, M42, M43, M44;
};
struct Position
{
float X;
float Y;
float Z;
std::array<float, 3> AsArray() const
{
return{ X, Y, Z };
}
};
struct ProjectedPoint
{
float X;
float Y;
float Depth;
int32_t Id;
ProjectedPoint(float x, float y, float depth, int32_t id)
: X{ x }, Y{ y }, Depth{ depth }, Id{ id }
{}
ProjectedPoint() = default;
};
struct AxisAlignedVolume
{
Position Min;
Position Max;
};
struct Direction
{
float X;
float Y;
float Z;
};
enum class DataFormat
{
BINARY,
PNG,
LIVE,
BOB,
MIDDLEBURY,
};
namespace calibration
{
enum class DistortionType
{
None,
Poly3k,
Rational6k
};
class CameraModel
{
public:
CameraModel() = default;
CameraModel(const Intrinsics& intrinsics, DistortionType distType) : m_intrinsics(intrinsics), m_distortionType(distType) {}
const Intrinsics& GetIntrinsics() const { return m_intrinsics; }
//not in opencv order
virtual gsl::span<const float> GetDistortionCoefficients() const = 0;
virtual DistortionType GetDistortionType() const { return m_distortionType; }
private:
Intrinsics m_intrinsics;
DistortionType m_distortionType;
};
class PinholeCameraModel : public CameraModel
{
public:
PinholeCameraModel() = delete;
PinholeCameraModel(const Intrinsics& intrinsics) : CameraModel(intrinsics, DistortionType::None) {}
gsl::span<const float> GetDistortionCoefficients() const { return {}; }
};
class Poly3KCameraModel : public CameraModel
{
public:
Poly3KCameraModel() = delete;
Poly3KCameraModel(const Intrinsics& intrinsics, std::vector<float> distortionCoefficients) :
CameraModel(intrinsics, DistortionType::Poly3k),
m_distortionCoefficients{ distortionCoefficients }
{
assert(m_distortionCoefficients.size()== 5 && "incorrect number of distortion coefficients. want k1, k2, k3, p1, p2");
}
//not in opencv order
gsl::span<const float> GetDistortionCoefficients() const { return m_distortionCoefficients; }
float GetK1() const { return m_distortionCoefficients[0];}
float GetK2() const { return m_distortionCoefficients[1];}
float GetK3() const { return m_distortionCoefficients[2];}
float GetP1() const { return m_distortionCoefficients[3];}
float GetP2() const { return m_distortionCoefficients[4];}
void SetK1(float k1) { m_distortionCoefficients[0] = k1; }
void SetK2(float k2) { m_distortionCoefficients[1] = k2; }
void SetK3(float k3) { m_distortionCoefficients[2] = k3; }
void SetP1(float p1) { m_distortionCoefficients[3] = p1; }
void SetP2(float p2) { m_distortionCoefficients[4] = p2; }
private:
// k1, k2, k3, p1, p2
std::vector<float> m_distortionCoefficients;
};
class Rational6KCameraModel : public CameraModel
{
public:
Rational6KCameraModel() = delete;
Rational6KCameraModel(const Intrinsics& intrinsics, std::vector<float> distortionCoefficients)
: CameraModel(intrinsics, DistortionType::Rational6k),
m_distortionCoefficients{ distortionCoefficients }
{
assert(m_distortionCoefficients.size() == 8 && "incorrect number of distortion coefficients. want k1, k2, k3, k4, k5, k6, p1, p2");
}
//not in opencv order
gsl::span<const float> GetDistortionCoefficients() const { return m_distortionCoefficients; }
float GetK1() const { return m_distortionCoefficients[0]; }
float GetK2() const { return m_distortionCoefficients[1]; }
float GetK3() const { return m_distortionCoefficients[2]; }
float GetK4() const { return m_distortionCoefficients[3]; }
float GetK5() const { return m_distortionCoefficients[4]; }
float GetK6() const { return m_distortionCoefficients[5]; }
float GetP1() const { return m_distortionCoefficients[6]; }
float GetP2() const { return m_distortionCoefficients[7]; }
void SetK1(float k1) { m_distortionCoefficients[0] = k1; }
void SetK2(float k2) { m_distortionCoefficients[1] = k2; }
void SetK3(float k3) { m_distortionCoefficients[2] = k3; }
void SetK4(float k4) { m_distortionCoefficients[3] = k4; }
void SetK5(float k5) { m_distortionCoefficients[4] = k5; }
void SetK6(float k6) { m_distortionCoefficients[5] = k6; }
void SetP1(float p1) { m_distortionCoefficients[6] = p1; }
void SetP2(float p2) { m_distortionCoefficients[7] = p2; }
private:
// k1, k2, k3, k4, k5, k6, p1, p2
std::vector<float> m_distortionCoefficients;
};
struct Line
{
float M;
float B;
};
struct Bounds
{
float Lower;
float Upper;
};
class LinearFocalLengthModel
{
public:
LinearFocalLengthModel(Line fx, Line fy, float cx, float cy, Bounds focalbounds, Size calibrationSize,
const std::vector<float>& distortionPoly3k = {},
const std::vector<float>& distortionRational6k = {}) :
m_fx(fx), m_fy(fy), m_cx(cx), m_cy(cy), m_focalBounds(focalbounds), m_calibrationSize(calibrationSize)
{
assert(distortionPoly3k.empty() || distortionRational6k.empty() && "can't pass two types currently. calibrations have different intrinsics depending on model");
assert(cx >= 0 && cx <= 1 && "cx and cy need to be a ratio");
assert(cy >= 0 && cy <= 1 && "cx and cy need to be a ratio");
if (!distortionPoly3k.empty())
{
assert(distortionPoly3k.size() == 5 && "expecting k0, k1, k2, p0, p1, for poly3k distortion");
std::copy(distortionPoly3k.begin(), distortionPoly3k.end(), std::back_inserter(m_distortionPoly3k));
}
if (!distortionRational6k.empty())
{
assert(distortionRational6k.size() == 8 && "expecting k0, k1, k2, k3, k4, k5, p0, p1, for rational6k distortion");
std::copy(distortionRational6k.begin(), distortionRational6k.end(), std::back_inserter(m_distortionRational6k));
}
}
LinearFocalLengthModel()
{
m_fx = { 0,0 };
m_fy = { 0,0 };
m_cx = 0;
m_cy = 0;
m_focalBounds = { 0,0 };
m_calibrationSize = { 0, 0 };
}
Intrinsics CreateIntrinsics(boost::optional<uint32_t> lensPosition, size_t width, size_t height) const
{
assert(width / (float)height == m_calibrationSize.Width / (float)m_calibrationSize.Height && "aspect not equal need to crop to modify resolution");
assert(lensPosition || (m_fx.M == 0 && m_fy.M == 0));
float fx = m_fx.B;
float fy = m_fy.B;
if (lensPosition)
{
fx += m_fx.M * lensPosition.value();
fy += m_fy.M * lensPosition.value();
}
float FxInPixelCoordinates = fx * width;
float FyInPixelCoordinates = fy * height;
float CxInPixelCoordinates = m_cx * width;
float CyInPixelCoordinates = m_cy * height;
return Intrinsics(CxInPixelCoordinates, CyInPixelCoordinates,
FxInPixelCoordinates, FyInPixelCoordinates,
gsl::narrow<uint32_t>(width), gsl::narrow<uint32_t>(height));
}
std::shared_ptr<const PinholeCameraModel> CreatePinholeCameraModel(const boost::optional<uint32_t>& lensPosition, size_t width, size_t height) const
{
return std::make_shared<const PinholeCameraModel>(CreateIntrinsics(lensPosition, width, height));
}
std::shared_ptr<const Poly3KCameraModel> CreatePoly3kCameraModel(const boost::optional<uint32_t>& lensPosition, size_t width, size_t height) const
{
return std::make_shared<const Poly3KCameraModel>(CreateIntrinsics(lensPosition, width, height), m_distortionPoly3k);
}
std::shared_ptr<const Rational6KCameraModel> CreateRational6kCameraModel(const boost::optional<uint32_t>& lensPosition, size_t width, size_t height) const
{
return std::make_shared<const Rational6KCameraModel>(CreateIntrinsics(lensPosition, width, height), m_distortionRational6k);
}
std::shared_ptr<const CameraModel> GetCameraModel(const boost::optional<uint32_t>& lensPosition, size_t width, size_t height)
{
if (HasPoly3kModel())
{
return CreatePoly3kCameraModel(lensPosition, width, height);
}
else if (HasRational6kModel())
{
return CreateRational6kCameraModel(lensPosition, width, height);
}
else
{
return CreatePinholeCameraModel(lensPosition, width, height);
}
}
Line GetFx() const { return m_fx; }
Line GetFy() const { return m_fy; }
float GetCx() const { return m_cx; }
float GetCy() const { return m_cy; }
Bounds GetFocalBounds() const { return m_focalBounds; }
Size GetCalibrationSize() const { return m_calibrationSize; }
bool HasPoly3kModel()
{
return m_distortionPoly3k.size() > 0;
}
bool HasRational6kModel()
{
return m_distortionRational6k.size() > 0;
}
std::vector<float> GetDistortionPoly3k() const { return m_distortionPoly3k; }
std::vector<float> GetDistortionRational6k() const { return m_distortionRational6k; }
private:
//F = m*focusValue + b
Line m_fx, m_fy;
float m_cx, m_cy;
Bounds m_focalBounds;
Size m_calibrationSize;
//distortion
std::vector<float> m_distortionPoly3k;
std::vector<float> m_distortionRational6k;
};
}
struct Depth
{
static constexpr float INVALID_DEPTH = -1.0f;
float NearPlaneDepth;
float FarPlaneDepth;
gsl::span<const ProjectedPoint> SparseDepth;
Depth(float nearDepth, float farDepth, gsl::span<const ProjectedPoint> sparse)
: NearPlaneDepth{ nearDepth }, FarPlaneDepth{ farDepth }, SparseDepth{ sparse }
{}
Depth()
: NearPlaneDepth{ INVALID_DEPTH }, FarPlaneDepth{ INVALID_DEPTH }
{}
};
}
|
(* Title: HOL/Auth/n_flash_lemma_on_inv__103.thy
Author: Yongjian Li and Kaiqiang Duan, State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
Copyright 2016 State Key Lab of Computer Science, Institute of Software, Chinese Academy of Sciences
*)
header{*The n_flash Protocol Case Study*}
theory n_flash_lemma_on_inv__103 imports n_flash_base
begin
section{*All lemmas on causal relation between inv__103 and some rule r*}
lemma n_PI_Remote_PutXVsinv__103:
assumes a1: "(\<exists> dst. dst\<le>N\<and>r=n_PI_Remote_PutX dst)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain dst where a1:"dst\<le>N\<and>r=n_PI_Remote_PutX dst" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(dst=p__Inv4)\<or>(dst~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(dst=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_PI_Remote_ReplaceVsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_PI_Remote_Replace src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_PI_Remote_Replace src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_Get_Put_HeadVsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_Get_Put_Head N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_Get_Put_Head N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P3 s"
apply (cut_tac a1 a2 b1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''Dirty'')) (Const false))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P3 s"
apply (cut_tac a1 a2 b1, simp, rule_tac x="(neg (andForm (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true)) (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Remote_Get_PutVsinv__103:
assumes a1: "(\<exists> src dst. src\<le>N\<and>dst\<le>N\<and>src~=dst\<and>r=n_NI_Remote_Get_Put src dst)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src dst where a1:"src\<le>N\<and>dst\<le>N\<and>src~=dst\<and>r=n_NI_Remote_Get_Put src dst" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4\<and>dst~=p__Inv4)\<or>(src~=p__Inv4\<and>dst=p__Inv4)\<or>(src~=p__Inv4\<and>dst~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4\<and>dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4\<and>dst=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4\<and>dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Remote_Get_Put_HomeVsinv__103:
assumes a1: "(\<exists> dst. dst\<le>N\<and>r=n_NI_Remote_Get_Put_Home dst)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain dst where a1:"dst\<le>N\<and>r=n_NI_Remote_Get_Put_Home dst" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(dst=p__Inv4)\<or>(dst~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(dst=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_1Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_1 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_1 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_2Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_2 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_2 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_3Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_3 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_3 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_4Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_4 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_4 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_5Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_5 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_5 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_6Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_6 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_6 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_7__part__0Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_7__part__0 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_7__part__0 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_7__part__1Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_7__part__1 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_7__part__1 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''Dirty'')) (Const false))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_7_NODE_Get__part__0Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_7_NODE_Get__part__0 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_7_NODE_Get__part__0 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_7_NODE_Get__part__1Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_7_NODE_Get__part__1 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_7_NODE_Get__part__1 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''Dirty'')) (Const false))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_8_HomeVsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_8_Home N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_8_Home N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeShrSet'')) (Const true)))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_8_Home_NODE_GetVsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_8_Home_NODE_Get N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_8_Home_NODE_Get N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeShrSet'')) (Const true)))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_8Vsinv__103:
assumes a1: "(\<exists> src pp. src\<le>N\<and>pp\<le>N\<and>src~=pp\<and>r=n_NI_Local_GetX_PutX_8 N src pp)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src pp where a1:"src\<le>N\<and>pp\<le>N\<and>src~=pp\<and>r=n_NI_Local_GetX_PutX_8 N src pp" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4\<and>pp~=p__Inv4)\<or>(src~=p__Inv4\<and>pp=p__Inv4)\<or>(src~=p__Inv4\<and>pp~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4\<and>pp~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4\<and>pp=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
moreover {
assume b1: "(src~=p__Inv4\<and>pp~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_8_NODE_GetVsinv__103:
assumes a1: "(\<exists> src pp. src\<le>N\<and>pp\<le>N\<and>src~=pp\<and>r=n_NI_Local_GetX_PutX_8_NODE_Get N src pp)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src pp where a1:"src\<le>N\<and>pp\<le>N\<and>src~=pp\<and>r=n_NI_Local_GetX_PutX_8_NODE_Get N src pp" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4\<and>pp~=p__Inv4)\<or>(src~=p__Inv4\<and>pp=p__Inv4)\<or>(src~=p__Inv4\<and>pp~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4\<and>pp~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4\<and>pp=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
moreover {
assume b1: "(src~=p__Inv4\<and>pp~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_9__part__0Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_9__part__0 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_9__part__0 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_9__part__1Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_9__part__1 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_9__part__1 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''Dirty'')) (Const false))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_10_HomeVsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_10_Home N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_10_Home N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeShrSet'')) (Const true)))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_10Vsinv__103:
assumes a1: "(\<exists> src pp. src\<le>N\<and>pp\<le>N\<and>src~=pp\<and>r=n_NI_Local_GetX_PutX_10 N src pp)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src pp where a1:"src\<le>N\<and>pp\<le>N\<and>src~=pp\<and>r=n_NI_Local_GetX_PutX_10 N src pp" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4\<and>pp~=p__Inv4)\<or>(src~=p__Inv4\<and>pp=p__Inv4)\<or>(src~=p__Inv4\<and>pp~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4\<and>pp~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4\<and>pp=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
moreover {
assume b1: "(src~=p__Inv4\<and>pp~=p__Inv4)"
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 b1 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4)))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadVld'')) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_GetX_PutX_11Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_PutX_11 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Local_GetX_PutX_11 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Remote_GetX_PutXVsinv__103:
assumes a1: "(\<exists> src dst. src\<le>N\<and>dst\<le>N\<and>src~=dst\<and>r=n_NI_Remote_GetX_PutX src dst)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src dst where a1:"src\<le>N\<and>dst\<le>N\<and>src~=dst\<and>r=n_NI_Remote_GetX_PutX src dst" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4\<and>dst~=p__Inv4)\<or>(src~=p__Inv4\<and>dst=p__Inv4)\<or>(src~=p__Inv4\<and>dst~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4\<and>dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4\<and>dst=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4\<and>dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Remote_GetX_PutX_HomeVsinv__103:
assumes a1: "(\<exists> dst. dst\<le>N\<and>r=n_NI_Remote_GetX_PutX_Home dst)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain dst where a1:"dst\<le>N\<and>r=n_NI_Remote_GetX_PutX_Home dst" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(dst=p__Inv4)\<or>(dst~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(dst=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Remote_PutVsinv__103:
assumes a1: "(\<exists> dst. dst\<le>N\<and>r=n_NI_Remote_Put dst)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain dst where a1:"dst\<le>N\<and>r=n_NI_Remote_Put dst" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(dst=p__Inv4)\<or>(dst~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(dst=p__Inv4)"
have "((formEval (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''InvMarked'')) (Const true)) s))\<or>((formEval (neg (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''InvMarked'')) (Const true))) s))" by auto
moreover {
assume c1: "((formEval (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''InvMarked'')) (Const true)) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (neg (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''InvMarked'')) (Const true))) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
moreover {
assume b1: "(dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Remote_PutXVsinv__103:
assumes a1: "(\<exists> dst. dst\<le>N\<and>r=n_NI_Remote_PutX dst)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain dst where a1:"dst\<le>N\<and>r=n_NI_Remote_PutX dst" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(dst=p__Inv4)\<or>(dst~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(dst=p__Inv4)"
have "?P3 s"
apply (cut_tac a1 a2 b1, simp, rule_tac x="(neg (andForm (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''InvSet'') p__Inv4)) (Const true)) (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''UniMsg'') p__Inv4) ''Cmd'')) (Const UNI_PutX))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_InvVsinv__103:
assumes a1: "(\<exists> dst. dst\<le>N\<and>r=n_NI_Inv dst)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain dst where a1:"dst\<le>N\<and>r=n_NI_Inv dst" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(dst=p__Inv4)\<or>(dst~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(dst=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(dst~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_InvAck_exists_HomeVsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_InvAck_exists_Home src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_InvAck_exists_Home src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_InvAck_existsVsinv__103:
assumes a1: "(\<exists> src pp. src\<le>N\<and>pp\<le>N\<and>src~=pp\<and>r=n_NI_InvAck_exists src pp)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src pp where a1:"src\<le>N\<and>pp\<le>N\<and>src~=pp\<and>r=n_NI_InvAck_exists src pp" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4\<and>pp~=p__Inv4)\<or>(src~=p__Inv4\<and>pp=p__Inv4)\<or>(src~=p__Inv4\<and>pp~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4\<and>pp~=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4\<and>pp=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4\<and>pp~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_InvAck_1Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_InvAck_1 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_InvAck_1 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_InvAck_2Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_InvAck_2 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_InvAck_2 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_InvAck_3Vsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_InvAck_3 N src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_InvAck_3 N src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "?P1 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_ReplaceVsinv__103:
assumes a1: "(\<exists> src. src\<le>N\<and>r=n_NI_Replace src)" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a1 obtain src where a1:"src\<le>N\<and>r=n_NI_Replace src" apply fastforce done
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "(src=p__Inv4)\<or>(src~=p__Inv4)" apply (cut_tac a1 a2, auto) done
moreover {
assume b1: "(src=p__Inv4)"
have "((formEval (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) s))\<or>((formEval (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) s))" by auto
moreover {
assume c1: "((formEval (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) s))"
have "?P1 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) s))"
have "?P2 s"
proof(cut_tac a1 a2 b1 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately have "invHoldForRule s f r (invariants N)" by satx
}
moreover {
assume b1: "(src~=p__Inv4)"
have "?P2 s"
proof(cut_tac a1 a2 b1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_PI_Local_GetX_PutX_HeadVld__part__0Vsinv__103:
assumes a1: "(r=n_PI_Local_GetX_PutX_HeadVld__part__0 N )" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P3 s"
apply (cut_tac a1 a2 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''Dirty'')) (Const false))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_PI_Local_GetX_PutX_HeadVld__part__1Vsinv__103:
assumes a1: "(r=n_PI_Local_GetX_PutX_HeadVld__part__1 N )" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))\<or>((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true)) (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) s))"
have "?P3 s"
apply (cut_tac a1 a2 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false))) s))"
have "?P3 s"
apply (cut_tac a1 a2 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''Dirty'')) (Const false))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HeadPtr'')) (Const (index p__Inv4))))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''ShrVld'')) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true))) (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''Dir'') ''HomeHeadPtr'')) (Const false)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_ShWbVsinv__103:
assumes a1: "(r=n_NI_ShWb N )" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)" (is "?P1 s \<or> ?P2 s \<or> ?P3 s")
proof -
from a2 obtain p__Inv4 where a2:"p__Inv4\<le>N\<and>f=inv__103 p__Inv4" apply fastforce done
have "((formEval (andForm (eqn (Const (index p__Inv4)) (IVar (Field (Field (Ident ''Sta'') ''ShWbMsg'') ''Proc''))) (eqn (IVar (Field (Field (Ident ''Sta'') ''ShWbMsg'') ''HomeProc'')) (Const false))) s))\<or>((formEval (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true)) s))\<or>((formEval (andForm (neg (eqn (Const (index p__Inv4)) (IVar (Field (Field (Ident ''Sta'') ''ShWbMsg'') ''Proc'')))) (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true)))) s))\<or>((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''ShWbMsg'') ''HomeProc'')) (Const false))) (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true)))) s))" by auto
moreover {
assume c1: "((formEval (andForm (eqn (Const (index p__Inv4)) (IVar (Field (Field (Ident ''Sta'') ''ShWbMsg'') ''Proc''))) (eqn (IVar (Field (Field (Ident ''Sta'') ''ShWbMsg'') ''HomeProc'')) (Const false))) s))"
have "?P3 s"
apply (cut_tac a1 a2 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E)) (eqn (IVar (Field (Field (Ident ''Sta'') ''ShWbMsg'') ''Cmd'')) (Const SHWB_ShWb))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true)) s))"
have "?P3 s"
apply (cut_tac a1 a2 c1, simp, rule_tac x="(neg (andForm (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true)) (eqn (IVar (Field (Para (Field (Ident ''Sta'') ''Proc'') p__Inv4) ''CacheState'')) (Const CACHE_E))))" in exI, auto) done
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (Const (index p__Inv4)) (IVar (Field (Field (Ident ''Sta'') ''ShWbMsg'') ''Proc'')))) (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
moreover {
assume c1: "((formEval (andForm (neg (eqn (IVar (Field (Field (Ident ''Sta'') ''ShWbMsg'') ''HomeProc'')) (Const false))) (neg (eqn (IVar (Para (Field (Field (Ident ''Sta'') ''Dir'') ''ShrSet'') p__Inv4)) (Const true)))) s))"
have "?P1 s"
proof(cut_tac a1 a2 c1, auto) qed
then have "invHoldForRule s f r (invariants N)" by auto
}
ultimately show "invHoldForRule s f r (invariants N)" by satx
qed
lemma n_NI_Local_Get_Get__part__1Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_Get_Get__part__1 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Remote_GetVsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_PI_Remote_Get src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Local_GetX_PutX__part__0Vsinv__103:
assumes a1: "r=n_PI_Local_GetX_PutX__part__0 " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_WbVsinv__103:
assumes a1: "r=n_NI_Wb " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_StoreVsinv__103:
assumes a1: "\<exists> src data. src\<le>N\<and>data\<le>N\<and>r=n_Store src data" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_GetX_GetX__part__1Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_GetX__part__1 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Local_GetX_GetX__part__1Vsinv__103:
assumes a1: "r=n_PI_Local_GetX_GetX__part__1 " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Local_GetX_GetX__part__0Vsinv__103:
assumes a1: "r=n_PI_Local_GetX_GetX__part__0 " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_Store_HomeVsinv__103:
assumes a1: "\<exists> data. data\<le>N\<and>r=n_Store_Home data" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Local_ReplaceVsinv__103:
assumes a1: "r=n_PI_Local_Replace " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_GetX_Nak__part__1Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_Nak__part__1 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_Get_Nak__part__1Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_Get_Nak__part__1 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_Get_Get__part__0Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_Get_Get__part__0 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_GetX_Nak__part__2Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_Nak__part__2 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Local_PutXVsinv__103:
assumes a1: "r=n_PI_Local_PutX " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_Get_Nak__part__2Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_Get_Nak__part__2 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_GetX_GetX__part__0Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_GetX__part__0 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Local_Get_PutVsinv__103:
assumes a1: "r=n_PI_Local_Get_Put " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Remote_GetX_Nak_HomeVsinv__103:
assumes a1: "\<exists> dst. dst\<le>N\<and>r=n_NI_Remote_GetX_Nak_Home dst" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_PutXAcksDoneVsinv__103:
assumes a1: "r=n_NI_Local_PutXAcksDone " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Remote_GetX_NakVsinv__103:
assumes a1: "\<exists> src dst. src\<le>N\<and>dst\<le>N\<and>src~=dst\<and>r=n_NI_Remote_GetX_Nak src dst" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_NakVsinv__103:
assumes a1: "\<exists> dst. dst\<le>N\<and>r=n_NI_Nak dst" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Remote_GetXVsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_PI_Remote_GetX src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Local_GetX_PutX__part__1Vsinv__103:
assumes a1: "r=n_PI_Local_GetX_PutX__part__1 " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Remote_Get_Nak_HomeVsinv__103:
assumes a1: "\<exists> dst. dst\<le>N\<and>r=n_NI_Remote_Get_Nak_Home dst" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_Get_PutVsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_Get_Put src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_GetX_Nak__part__0Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_GetX_Nak__part__0 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Replace_HomeVsinv__103:
assumes a1: "r=n_NI_Replace_Home " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_PutVsinv__103:
assumes a1: "r=n_NI_Local_Put " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Remote_Get_NakVsinv__103:
assumes a1: "\<exists> src dst. src\<le>N\<and>dst\<le>N\<and>src~=dst\<and>r=n_NI_Remote_Get_Nak src dst" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Nak_ClearVsinv__103:
assumes a1: "r=n_NI_Nak_Clear " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_Get_Put_DirtyVsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_Get_Put_Dirty src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Local_Get_Nak__part__0Vsinv__103:
assumes a1: "\<exists> src. src\<le>N\<and>r=n_NI_Local_Get_Nak__part__0 src" and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_PI_Local_Get_GetVsinv__103:
assumes a1: "r=n_PI_Local_Get_Get " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_Nak_HomeVsinv__103:
assumes a1: "r=n_NI_Nak_Home " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
lemma n_NI_FAckVsinv__103:
assumes a1: "r=n_NI_FAck " and
a2: "(\<exists> p__Inv4. p__Inv4\<le>N\<and>f=inv__103 p__Inv4)"
shows "invHoldForRule s f r (invariants N)"
apply (rule noEffectOnRule, cut_tac a1 a2, auto) done
end
|
/**************************************************************************/
/* Common definitions for the c vader routines */
/**************************************************************************/
/**************************************************************************/
/* General note on naming conventions: arrays without subscripts have */
/* indices that range from 0 to nr-1, with no ghost zones, and are cell- */
/* centered. Arrays with _h are edge centered and have indices that run */
/* from 0 to nr. Arrays with _g are cell-centered with one ghost zone, */
/* and have indices that go from 0 to nr+1; indices 0 and nr are the */
/* ghost zones. */
/**************************************************************************/
#ifndef _vader_common_h_
#define _vader_common_h_
#include <gsl/gsl_vector.h>
#include <float.h>
#include <stdbool.h>
#include <time.h>
/* Slope limit parameter */
#define SLOPELIMIT 0.1
/* Descriptor for a grid */
typedef struct {
unsigned long nr; /* Number of real cells */
bool linear; /* Is this a linear or logarithmic grid? */
double *r_g, *r_h; /* Cell center, edge locations */
double *dr_g; /* Cell sizes / log sizes */
double *area; /* Area of a zone */
double *vphi_g, *vphi_h; /* Rotation curve */
double *beta_g, *beta_h; /* Logarithmic index of rotation curve */
double *psiEff_g, *psiEff_h; /* Effective gravitational potential */
double *g_h; /* Factor appearing in derivatives */
} grid;
/* Workspace for calculations */
typedef struct {
double *pres_g, *presNew_g, *colNew, *colTmp;
double *alpha_g, *hint_g, *hintL_g, *hintR_g;
double *ppmwksp_g;
double *fmLast_h, *fmNew_h;
double *ftLast_h, *feLast_h;
double *massSrcLast, *massSrcNew, *intEnSrc;
double *eIntTmp, *eIntNew;
double *gammaLast, *deltaLast, *gammaNew, *deltaNew;
double *mSrc, *eSrc;
gsl_vector *ud_g, *ld_g, *diag_g, *rhs_g, *presTmp_g;
#if AA_M > 0
double *colHist, *presHist, *eIntHist;
double *colResid, *presResid, *eIntResid;
gsl_vector *constraint;
#endif
} wksp;
/* Pressure boundary condition types */
typedef enum { FIXED_MASS_FLUX, FIXED_TORQUE_FLUX, FIXED_TORQUE }
pres_bc_type;
/* Enthalpy boundary condition types */
typedef enum { FIXED_ENTHALPY_VALUE, FIXED_ENTHALPY_GRADIENT }
enth_bc_type;
/* IO status indicators */
typedef enum { GOOD_IO, IO_ERROR, ALLOCATION_ERROR } iostatus;
/* Startup status indicators */
typedef enum { GOOD_START, RESTART_ERROR, MEMORY_ERROR, FIRST_DT_ERROR }
setup_status;
/* Simulation status indicators */
typedef enum { RUNNING, NORMAL_EXIT, ZENO_ERROR, TOO_MANY_STEPS }
status;
/* Macros used various places in code */
#define SQR(x) ((x)*(x))
#define LARGE DBL_MAX
#define SMALL DBL_MIN
#endif
/* end _vader_common_h_ */
|
# Partial Differential Equation and Gaussian Process
# 1 The Spatial Temporal Model
A model of post-transcriptional processing is formulated to describe the spatio-temporal Drosophila
protein expression data(Becker 2012). Protein production is considered to be linearly dependent on the
concentration of mRNA at an earlier time point. The model also allows for diffusion of protein between
nuclei and linear protein decay. These processes are dependent on the diffusion parameter and the degradation
rate of protein respectively.
\begin{equation}
a \frac{\partial ^2 y_{x,t}}{\partial x^2} + b \frac{\partial y_{x,t}}{\partial t} + c y_{x,t}= f_{x,t}
\end{equation}
The coefficients a, b and c are unknown. In this study, we use Gaussian process with RBF kernel as a prior over $y_{x,t}$. The multi-output Gaussian process are developed by applying the partial differential operator on the spatial-temporal RBF kernel.
```python
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
# model : a*d^2y/dx^2 + b * dy/dt + c * y = f
# lengthscale for Yt :
# lengthscale for Yx :
# variance for Yt :
# variance for Yx :
%pylab inline
%config InlineBackend.figure_format = 'svg'
import numpy as np
import pylab as pb
import GPy
```
Populating the interactive namespace from numpy and matplotlib
The spatiol-temporal multi-output partial differential equation kernel are developed with kernel name "GPy.kern.ODE_st" in GPy. The inputs are one dimension spatial data, one dimension temporal data and one dimension index which is used to indicate f and Y.
```python
KY = GPy.kern.ODE_st(input_dim=3,a=1.,b=1,c=1,variance_Yx=1.,variance_Yt=1.,
lengthscale_Yx=1.,lengthscale_Yt=1.)
nd =10
t1 = 10*np.random.rand(nd)[:,None]
x1 = 10*np.random.rand(nd)[:,None]
inx = np.zeros(nd**2)[:,None]
T = np.kron(t1,np.ones(nd)[:,None])
S = np.kron(np.ones(nd)[:,None],x1)
inx[nd**2/2:nd**2]=1
X = np.hstack([T,S,inx])
#Y=np.sin(X[:,0:1])
#Y=np.sin(X[:,0:1])
Y = np.sin(X[:,0:1]) + np.cos(X[:,1:2])#*(1-X[:,1:2]) #+ np.random.randn(16,1)*0.1
Y[nd**2/2:nd**2] = np.sin(X[nd**2/2:nd**2,0:1]) + np.cos(X[nd**2/2:nd**2,0:1]) + 2*np.cos(X[nd**2/2:nd**2,1:2])
```
```python
m = GPy.models.GPRegression(X,Y,KY)
```
# 2 Model Output
In order to test the kernel, we defined
$y_{x,t} = cos(x) + sin(t)$
$f_{x,t} = sin(t) + cos(t) + 2cos(x)$
and a, b and c are equal to 1. With some arbitray choices of kernel parameters we plot the random field of f and y separatly.
The spatial variable can be fixed to certain value. It become easier to compare the true curve and our estimation.
```python
fixX = 5.
xplot = np.linspace(-2,12,200)
m.plot(fixed_inputs=[(2,0)], which_data_rows = slice(0,nd**2/2))
m.plot(fixed_inputs=[(1,fixX),(2,0)], which_data_rows = slice(0,nd**2/2))
pb.plot(xplot,np.cos(fixX) + np.sin(xplot))
m.plot(fixed_inputs=[(2,1)], which_data_rows = slice(nd**2/2,nd**2))
m.plot(fixed_inputs=[(1,fixX),(2,1)], which_data_rows = slice(nd**2/2,nd**2))
pb.plot(xplot,np.cos(xplot) + np.sin(xplot) + 2*np.cos(fixX))
```
/home/nbuser/anaconda2_501/lib/python2.7/site-packages/matplotlib/contour.py:967: UserWarning:The following kwargs were not used by contour: 'linewidth', 'label'
[<matplotlib.lines.Line2D at 0x7feaae4ae2d0>]
/home/nbuser/anaconda2_501/lib/python2.7/site-packages/matplotlib/figure.py:1999: UserWarning:This figure includes Axes that are not compatible with tight_layout, so results might be incorrect.
```python
m.optimize()
```
<paramz.optimization.optimization.opt_lbfgsb at 0x7feab2164050>
```python
print m
```
Name : GP regression
Objective : -316.891724445
Number of Parameters : 8
Number of Optimization Parameters : 8
Updates : True
Parameters:
[1mGP_regression. [0;0m | value | constraints | priors
[1mode_st.a [0;0m | 0.927831276506 | +ve |
[1mode_st.b [0;0m | 0.878872529752 | +ve |
[1mode_st.c [0;0m | 1.07222979737 | +ve |
[1mode_st.variance_Yt [0;0m | 5.87569800896 | +ve |
[1mode_st.variance_Yx [0;0m | 5.87569800896 | +ve |
[1mode_st.lengthscale_Yt [0;0m | 12.464230291 | +ve |
[1mode_st.lengthscale_Yx [0;0m | 12.713259674 | +ve |
[1mGaussian_noise.variance[0;0m | 1.74044285035e-16 | +ve |
After optimization, the estimated value of a, b and c are printed. It is quite close to the true parameters we used to generate the testing data. The plot of the random fields are plotted below.
```python
fixX = 5.
xplot = np.linspace(-2,12,200)
m.plot(fixed_inputs=[(2,0)], which_data_rows = slice(0,nd**2/2))
m.plot(fixed_inputs=[(1,fixX),(2,0)], which_data_rows = slice(0,nd**2/2))
pb.plot(xplot,np.cos(fixX) + np.sin(xplot))
m.plot(fixed_inputs=[(2,1)], which_data_rows = slice(nd**2/2,nd**2))
m.plot(fixed_inputs=[(1,fixX),(2,1)], which_data_rows = slice(nd**2/2,nd**2))
pb.plot(xplot,np.cos(xplot) + np.sin(xplot) + 2*np.cos(fixX))
```
[<matplotlib.lines.Line2D at 0x7feaadcfeb10>]
The true random field is plotted below
```python
xplot = np.linspace(-1,10,200)
#pb.plot(xplot,np.sin(xplot))
PP = np.zeros((xplot.shape[0], xplot.shape[0]))
QQ = np.zeros((xplot.shape[0], xplot.shape[0]))
for i in range(0,200):
for j in range(0,200):
PP[i,j] = np.sin(xplot[i]) + np.cos(xplot[j])
QQ[i,j] = np.sin(xplot[i]) + np.cos(xplot[i]) + 2*np.cos(xplot[j])
pb.figure()
pb.imshow(PP)
```
<matplotlib.image.AxesImage at 0x7feaad8b8090>
```python
```
|
[STATEMENT]
lemma oneBIT_step3y:
assumes "x\<noteq>y" "x : {x0,y0}" "y\<in>{x0,y0}"
shows "BIT_Step (type3 [x0, y0] x y) y = type0 [x0, y0] y x"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. type3 [x0, y0] x y \<bind> (\<lambda>s. BIT_step s y \<bind> (\<lambda>(a, is'). return_pmf (step (fst s) y a, is'))) = type0 [x0, y0] y x
[PROOF STEP]
using assms
[PROOF STATE]
proof (prove)
using this:
x \<noteq> y
x \<in> {x0, y0}
y \<in> {x0, y0}
goal (1 subgoal):
1. type3 [x0, y0] x y \<bind> (\<lambda>s. BIT_step s y \<bind> (\<lambda>(a, is'). return_pmf (step (fst s) y a, is'))) = type0 [x0, y0] y x
[PROOF STEP]
apply(simp add: type3_def BIT_step_def bind_assoc_pmf bind_return_pmf step_def mtf2_def)
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>x \<noteq> y; x = x0 \<or> x = y0; y = x0 \<or> y = y0\<rbrakk> \<Longrightarrow> (y0 = y \<longrightarrow> (x0 = y \<longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xb. return_pmf (if y \<in> set (fst (if [] ! 0 \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) then swaps [index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y - (if fst (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) ! index (snd (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y then 0 else length (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))..<index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y] (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) else swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))), flip (index (snd (snd (if [] ! 0 \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y) (fst (snd (if [] ! 0 \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))), snd (snd (if [] ! 0 \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))))) = type0 [y, y] y x) \<and> (x0 \<noteq> y \<longrightarrow> (x0 = x \<longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xb. return_pmf (if y \<in> set (fst (if xa \<and> \<not> xb then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) then swaps [index (swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y - (if fst (snd (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) ! index (snd (snd (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y then 0 else length (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))..<index (swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y] (swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) else swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))), flip (index (snd (snd (if xa \<and> \<not> xb then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y) (fst (snd (if xa \<and> \<not> xb then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))), snd (snd (if xa \<and> \<not> xb then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))))) = type0 [x, y] y x) \<and> (x0 \<noteq> x \<longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xb. return_pmf (if y \<in> set (fst (if [] ! 0 \<and> \<not> xb then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) then swaps [index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y - (if fst (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) ! index (snd (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y then 0 else length (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))..<index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y] (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) else swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))), flip (index (snd (snd (if [] ! 0 \<and> \<not> xb then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y) (fst (snd (if [] ! 0 \<and> \<not> xb then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))), snd (snd (if [] ! 0 \<and> \<not> xb then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))))) = type0 [x0, y] y x))) \<and> (y0 \<noteq> y \<longrightarrow> (x0 = y \<longrightarrow> (y0 = x \<longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xb. return_pmf (if y \<in> set (fst (if xb \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) then swaps [index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y - (if fst (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) ! index (snd (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y then 0 else length (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))..<index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y] (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) else swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))), flip (index (snd (snd (if xb \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y) (fst (snd (if xb \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))), snd (snd (if xb \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))))) = type0 [y, x] y x) \<and> (y0 \<noteq> x \<longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xb. return_pmf (if y \<in> set (fst (if [] ! 0 \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) then swaps [index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y - (if fst (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) ! index (snd (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y then 0 else length (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))..<index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y] (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) else swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))), flip (index (snd (snd (if [] ! 0 \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y) (fst (snd (if [] ! 0 \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))), snd (snd (if [] ! 0 \<and> \<not> xa then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))))) = type0 [y, y0] y x)) \<and> (x0 \<noteq> y \<longrightarrow> (y0 = x \<longrightarrow> (x0 = x \<longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xb. return_pmf (if y \<in> set (fst (if xa \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) then swaps [index (swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y - (if fst (snd (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) ! index (snd (snd (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y then 0 else length (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))..<index (swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y] (swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) else swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))), flip (length (snd (snd (if xa \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))) (fst (snd (if xa \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))), snd (snd (if xa \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))))) = type0 [x, x] y x) \<and> (x0 \<noteq> x \<longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xb. return_pmf (if y \<in> set (fst (if xb \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) then swaps [index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y - (if fst (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) ! index (snd (snd (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y then 0 else length (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))..<index (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y] (swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) else swaps [] (fst (if [xa, xb] ! (index [y0] x + 1) \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))), flip (length (snd (snd (if xb \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))) (fst (snd (if xb \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))), snd (snd (if xb \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))))) = type0 [x0, x] y x)) \<and> (y0 \<noteq> x \<longrightarrow> (x0 = x \<longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xb. return_pmf (if y \<in> set (fst (if xa \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) then swaps [index (swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y - (if fst (snd (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))) ! index (snd (snd (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y then 0 else length (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))..<index (swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) y] (swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))) else swaps [] (fst (if [xa, xb] ! 0 \<and> \<not> [xa, xb] ! (index [y0] y + 1) then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))), flip (length (snd (snd (if xa \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0]))))) (fst (snd (if xa \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))), snd (snd (if xa \<and> \<not> [] ! 0 then ([x, y], [xa, xb], [x0, y0]) else ([y, x], [xa, xb], [x0, y0])))))) = type0 [x, y0] y x) \<and> (x0 \<noteq> x \<longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xb. return_pmf ([y, x], [xa, xb], [x0, y0]))) = type0 [x0, y0] y x))))
[PROOF STEP]
apply(safe)
[PROOF STATE]
proof (prove)
goal (2 subgoals):
1. \<lbrakk>x0 \<noteq> y0; x = x0; y = y0; x0 \<noteq> y0\<rbrakk> \<Longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>x. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. return_pmf (if y0 \<in> set (fst (if x \<and> \<not> xa then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0]))) then swaps [index (swaps [] (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) y0 - (if fst (snd (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0]))) ! index (snd (snd (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) y0 then 0 else length (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0]))))..<index (swaps [] (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) y0] (swaps [] (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) else swaps [] (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0]))), flip (index (snd (snd (if x \<and> \<not> xa then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) y0) (fst (snd (if x \<and> \<not> xa then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))), snd (snd (if x \<and> \<not> xa then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))))) = type0 [x0, y0] y0 x0
2. \<lbrakk>y0 \<noteq> x0; x = y0; y = x0; y0 \<noteq> x0\<rbrakk> \<Longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>x. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. return_pmf (if x0 \<in> set (fst (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))) then swaps [index (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0 - (if fst (snd (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))) ! index (snd (snd (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0 then 0 else length (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))))..<index (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0] (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) else swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))), flip (index (snd (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0) (fst (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))), snd (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))))) = type0 [x0, y0] x0 y0
[PROOF STEP]
apply(rule pmf_eqI)
[PROOF STATE]
proof (prove)
goal (2 subgoals):
1. \<And>i. \<lbrakk>x0 \<noteq> y0; x = x0; y = y0; x0 \<noteq> y0\<rbrakk> \<Longrightarrow> pmf (bernoulli_pmf (1 / 2) \<bind> (\<lambda>x. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. return_pmf (if y0 \<in> set (fst (if x \<and> \<not> xa then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0]))) then swaps [index (swaps [] (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) y0 - (if fst (snd (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0]))) ! index (snd (snd (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) y0 then 0 else length (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0]))))..<index (swaps [] (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) y0] (swaps [] (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) else swaps [] (fst (if [x, xa] ! 0 \<and> \<not> [x, xa] ! (index [y0] y0 + 1) then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0]))), flip (index (snd (snd (if x \<and> \<not> xa then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))) y0) (fst (snd (if x \<and> \<not> xa then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0])))), snd (snd (if x \<and> \<not> xa then ([x0, y0], [x, xa], [x0, y0]) else ([y0, x0], [x, xa], [x0, y0]))))))) i = pmf (type0 [x0, y0] y0 x0) i
2. \<lbrakk>y0 \<noteq> x0; x = y0; y = x0; y0 \<noteq> x0\<rbrakk> \<Longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>x. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. return_pmf (if x0 \<in> set (fst (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))) then swaps [index (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0 - (if fst (snd (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))) ! index (snd (snd (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0 then 0 else length (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))))..<index (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0] (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) else swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))), flip (index (snd (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0) (fst (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))), snd (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))))) = type0 [x0, y0] x0 y0
[PROOF STEP]
apply(simp add: add.commute pmf_bind swap_def type0_def)
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>y0 \<noteq> x0; x = y0; y = x0; y0 \<noteq> x0\<rbrakk> \<Longrightarrow> bernoulli_pmf (1 / 2) \<bind> (\<lambda>x. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. return_pmf (if x0 \<in> set (fst (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))) then swaps [index (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0 - (if fst (snd (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))) ! index (snd (snd (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0 then 0 else length (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))))..<index (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0] (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) else swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))), flip (index (snd (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0) (fst (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))), snd (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))))) = type0 [x0, y0] x0 y0
[PROOF STEP]
apply(rule pmf_eqI)
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<And>i. \<lbrakk>y0 \<noteq> x0; x = y0; y = x0; y0 \<noteq> x0\<rbrakk> \<Longrightarrow> pmf (bernoulli_pmf (1 / 2) \<bind> (\<lambda>x. bernoulli_pmf (1 / 2) \<bind> (\<lambda>xa. return_pmf (if x0 \<in> set (fst (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))) then swaps [index (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0 - (if fst (snd (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))) ! index (snd (snd (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0 then 0 else length (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))))..<index (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0] (swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) else swaps [] (fst (if [x, xa] ! (index [y0] y0 + 1) \<and> \<not> [x, xa] ! 0 then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))), flip (index (snd (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))) x0) (fst (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0])))), snd (snd (if xa \<and> \<not> x then ([y0, x0], [x, xa], [x0, y0]) else ([x0, y0], [x, xa], [x0, y0]))))))) i = pmf (type0 [x0, y0] x0 y0) i
[PROOF STEP]
apply(simp add: pmf_bind swap_def type0_def)
[PROOF STATE]
proof (prove)
goal:
No subgoals!
[PROOF STEP]
done |
Load LFindLoad.
From lfind Require Import LFind.
From QuickChick Require Import QuickChick.
From adtind Require Import goal33.
Derive Show for natural.
Derive Arbitrary for natural.
Instance Dec_Eq_natural : Dec_Eq natural.
Proof. dec_eq. Qed.
Lemma conj23synthconj2 : forall (lv0 : natural) (lv1 : natural) (lv2 : natural) (lv3 : natural), (@eq natural (Succ (plus (mult lv0 lv1) lv2)) (plus lv3 (plus (Succ lv1) Zero))).
Admitted.
QuickChick conj23synthconj2.
|
lemma linear_imp_has_derivative: fixes f :: "'a::euclidean_space \<Rightarrow> 'b::real_normed_vector" shows "linear f \<Longrightarrow> (f has_derivative f) net" |
!|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
module hydro_sections
!BOP
! !MODULE: hydro_sections
!
! !DESCRIPTION:
! This module computes data along hydrographic sections to compare
! with cruise data.
! NOTE: currently does not work. routines appended below are from old
! CM-5 version and must be re-done.
!
! !REVISION HISTORY:
! CVS:$Id: hydro_sections.F90,v 1.4 2002/04/16 15:22:56 pwjones Exp $
! CVS:$Name: POP_2_0_1 $
! !USES:
use kinds_mod
implicit none
private
save
!EOP
!BOC
!EOC
!***********************************************************************
contains
!***********************************************************************
!BOP
! !IROUTINE: init_hydro_sections
! !INTERFACE:
subroutine init_hydro_sections
! !DESCRIPTION:
! Initializes all variables to be used for hydrographic sections.
!
! !REVISION HISTORY:
! same as module
!EOP
!BOC
!-----------------------------------------------------------------------
!-----------------------------------------------------------------------
!EOC
end subroutine init_hydro_sections
!***********************************************************************
end module hydro_sections
!|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
||| Copyright 2016 Google Inc.
|||
||| Licensed under the Apache License, Version 2.0 (the "License");
||| you may not use this file except in compliance with the License.
||| You may obtain a copy of the License at
|||
||| http://www.apache.org/licenses/LICENSE-2.0
|||
||| Unless required by applicable law or agreed to in writing, software
||| distributed under the License is distributed on an "AS IS" BASIS,
||| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
||| See the License for the specific language governing permissions and
||| limitations under the License.
module Protobuf.ParseUtils
import Protobuf.Core
%default total
public export singularTypeForField : FieldDescriptor -> Type
singularTypeForField (MkFieldDescriptor _ ty _ _) = interpFieldValue ty
-- Because fields can come in any order, parsing a message is done in two
-- phases. First, we parse all the fields into a list of pairs
-- (i : Fin k ** interpField (index fields i))
-- Second, for each field we scan through this list and fill in that field's
-- value based on the list (or create an error).
public export FieldList : Vect k FieldDescriptor -> Type
FieldList {k=k} fields = List (i : Fin k ** singularTypeForField (index i fields))
||| Takes a `FieldList`, and selects the elements which represent the 0th field
||| and puts these into a list, and also creates a list of the remaining
||| elements, which are mapped to be part of of a new `FieldList` for the
||| remaining fields.
reduceFieldList : FieldList (f :: fs) -> (List (singularTypeForField f), FieldList fs)
reduceFieldList Nil = (Nil, Nil)
reduceFieldList ((FZ ** x) :: xs) = let (ys, zs) = reduceFieldList xs in
(x :: ys, zs)
reduceFieldList (((FS k) ** x) :: xs) = let (ys, zs) = reduceFieldList xs in
(ys, (k ** x) :: zs)
optionalFieldFromList : List (interpFieldValue f) -> Maybe (interpFieldValue f)
optionalFieldFromList Nil = Nothing
optionalFieldFromList (x::Nil) = Just x
optionalFieldFromList (x::xs) = optionalFieldFromList xs
fieldFromFieldList : List (singularTypeForField d) -> Either String (interpField d)
fieldFromFieldList {d=MkFieldDescriptor Optional _ _ _} xs = Right (last' xs)
fieldFromFieldList {d=MkFieldDescriptor Required _ name _} xs = case (last' xs) of
Nothing => Left ("The required field " ++ (show name) ++ " was not set.")
(Just x) => Right x
fieldFromFieldList {d=MkFieldDescriptor Repeated _ _ _} xs = Right xs
export messageFromFieldList : FieldList fields -> Either String (InterpFields fields)
messageFromFieldList {fields=Nil} _ = return Nil
messageFromFieldList {fields=f::fs} xs = let (ys, zs) = reduceFieldList xs in
case fieldFromFieldList ys of
Left err => Left err
Right first => case messageFromFieldList zs of
Left err => Left err
Right rest => Right (first :: rest)
|
\section{Finite State Automata}
\subsection{Alphabets \& Strings}
\begin{itemize}
\item Let $ A $ be a set; then $ A^n $ is the set of all finite sequences $ a_1 \dots a_n $ with $ a_i \in A $, $ 1 \le i \le m $
\begin{itemize}
\item Elements of $ A $ are \textit{letters} or \textit{symbols}
\item Elements of $ A^n $ are \textit{words} or \textit{strings} over $ A $ of length $ m $
\end{itemize}
\item $ \varepsilon $ is the special \textit{empty string}, the only string of length $ 0 $
\item $ A^+ = \bigcup_{m \ge 1} A^m $ -- the set of non-empty strings over $ A $ of any length
\item $ A^* = A^+ \cup \varepsilon = \bigcup_{m \ge 0} A^m$ -- the set of (possibly empty) strings over $ A $ of any length
\item If $ \alpha = a_1 \dots a_m $, $ \beta = b_1 \dots b_m \in A^* $, then define $ \alpha \beta $ to be $ a_1 \dots a_m b_1 \dots b_m \in A^{m + n} $. This gives binary `product' or \textit{concatenation} on $ A^* $
\item For $ \alpha \in A^+ $, define $ \alpha^n, n \in \Nat $ by $ \alpha^0 = \varepsilon $, and $ \alpha^{n+1} = \alpha^n \alpha $
\item A \textit{language} with alphabet $ A $ is a subset of $ A^* $
\end{itemize}
\clearpage
\subsection{Definition of an FSA}
\begin{itemize}
\item A Finite State Automaton (FSA) is a tuple $ M = (Q, F, A, \tau, q_0) $
\begin{itemize}
\item $ Q $ is a finite set of states
\item $ F \subseteq Q $ is the set of final states
\item $ A $ is the alphabet
\item $ \tau \subseteq Q \times A \times Q $ is the set of transitions
\item $ q_0 \in Q $ is the initial state
\end{itemize}
\item The transition diagram of an FSA is a directed graph with:
\begin{itemize}
\item Vertex set $ Q $
\item An edge for each transition; $ (q, a, q') \in \tau $ corresponds to an edge from $ q $ to $ q' $ with label $ a $
\item Initial state $ q_0 $ labelled with $ - $
\item Final states labelled with $ + $
\item Example: a \textit{non-deterministic} `haha machine', with $ A = \set{h, a} $
\begin{tikzpicture}
\node[circle,thick,draw] (q0) at (0, 0) {$ - $};
\node[circle,thick,draw] (q1) at (2, 0) {$ \; $};
\node[circle,thick,draw] (q2) at (4, 0) {$ + $};
\draw[edge] (q0) to[bend left] node[above] {h} (q1);
\draw[edge] (q1) to[bend left] node[above] {a} (q2);
\draw[edge] (q1) to[bend left] node[below] {a} (q0);
\end{tikzpicture}
\end{itemize}
\item A \textit{computation} of $ M $ is a sequence $ q_0, a_1, q_1, a_2, \dots, a_n, q_n $ with $ n \ge 0 $ where $ (q_i, a_{i+1}, q_{i+1}) \in \tau $ for $ 0 \le i \le n - 1 $
\begin{itemize}
\item The \textit{label} on the computation is $ a_1 \dots a_m $
\item The computation is \textit{successful} if $ q_n \in F $
\item A string $ a_1 \dots a_n $ is \textit{accepted} by $ M $ if there is a successful computation with label $ a_1 \dots a_n $, and it is \textit{rejected} otherwise
\end{itemize}
\item The language recognised by $ M $ is $ \Lang(M) = \setcomp{w \in A^*}{w \text{ is accepted by } M} $
\item There is a one-to-one correspondence between computations of $ M $ and paths in the graph from $ q_0 $
\item Example: $ A = \set{a, b} $ of an FSA accepting only words with an odd number of 'a's
\begin{tikzpicture}
\node[circle,thick,draw] (q0) at (0, 0) {$ - $};
\node[circle,thick,draw] (q1) at (4, 0) {$ + $};
\draw[edge] (q0) to[bend left] node[above] {a} (q1);
\draw[edge] (q0) to[loop left] node[left] {b} (q0);
\draw[edge] (q1) to[bend left] node[above] {a} (q0);
\draw[edge] (q1) to[loop right] node[right] {b} (q1);
\end{tikzpicture}
\item An FSA is deterministic (a DFA) if for all $ q \in Q, a \in A $ there is exactly one $ q' \in Q $ such that $ (q, a, q') \in \tau $
\item Example: DFA for the `haha machine'
\begin{tikzpicture}
\node[circle,thick,draw] (q0) at (0, 0) {$ - $};
\node[circle,thick,draw] (q1) at (2, 0) {$ \; $};
\node[circle,thick,draw] (q2) at (4, 0) {$ + $};
\draw[edge] (q0) to[bend left] node[above] {h} (q1);
\draw[edge] (q1) to[bend left] node[above] {a} (q2);
\draw[edge] (q2) to[bend left] node[below] {h} (q1);
\end{tikzpicture}
\item Note this machine lacks a transition for $ a $ when in the initial state -- though technically required for a DFA, it is easily fixed by adding an `error state' to catch what would otherwise be missing transitions
\end{itemize}
\clearpage
\subsection{Deterministic FSAs}
\begin{itemize}
\item For a DFA $ M $, define the transition function $ \delta: Q \times A \to Q $ by $ q' = \delta(q, a) $, where $ q' $ is the unique element such that $ (q, a, q') \in \tau $
\item If $ \Lang $ is a language with alphabet $ A $, then the following are equivalent:
\begin{enumerate}
\item $ \Lang $ is recognised by an FSA
\item $ \Lang $ is recognised by a DFA
\end{enumerate}
\item Given a non-deterministic FSA $ M = (Q, F, A, \tau, q_0) $, an equivalent DFA $ M' = (Q', F', A, \tau', q_0') $ may be generated by the \textit{powerset method}:
\begin{itemize}
\item $ Q' = \powerset{Q} \setminus \emptyset $ (i.e. the set of all subsets of $ Q $ that aren't empty)
\item $ F' = \setcomp{X \in Q'}{q \in X \text{ for some } q \in F} $
\item For $ X \in Q', a \in A $, define $ \delta(X, a) := \setcomp{q \in Q}{(x, a, q) \in \tau \text{ for some } x \in X} $
\item $ \tau' = \setcomp{(X, a, \delta(X, a))}{X \in Q', a \in A} $
\item $ q_0' = \set{q_0} $
\end{itemize}
\item Proof: show that $ \Lang(M) = \Lang(M') $
\begin{itemize}
\item $ \Lang(M) \subseteq Lang(M') $:
\begin{itemize}
\item Given $ w \in \Lang(M) $, $ q_0 a_1 \dots a_n q_n $ is a successful computation of $ M $
\item Then define $ q_i' = \delta(q_{i-1}', a_i) $ for $ 1 \le i \le n $
\item $ q_0', a_1, q_1' \dots a_n, q_n' $ will be a successful computation of $ M' $
\item Therefore $ w \in \Lang(M') $
\end{itemize}
\item $ \Lang(M') \subseteq Lang(M) $:
\begin{itemize}
\item Let $ w = a_1 \dots a_n \in L(M') $, and $ q_0', a_1, q_1' \dots a_n, q_n' $ be a successful computation of $ M $
\item Each $ q_i' $ cannot be the empty set
\item By definition of $ \tau' $, $ \exists q_1 \in q_1' $ s.t. $ (q_0, a_1, q_1) \in \tau $
\item Then we can find $ q_i \in q_i' $ s.t. $ (q_{i-1}, a_i, q_i) \in \tau $ for $ 1 \le i \le n $
\item For $ q_n $ we further require $ q_n \in F $
\item Therefore, $ q_0, a_1, q_1, a_2, \dots a_n, q_n $ is a successful computation
\item Therefore $ w \in \Lang(M) $
\end{itemize}
\end{itemize}
\end{itemize}
\clearpage
\subsection{The Pumping Lemma}
\begin{itemize}
\item The Pumping Lemma says that for any $ \Lang $ recognised by an FSA $ M $, there is a certain word length beyond which all words can be split into sections as $ xyz$, where $ x y^n z $ is also in the language
\item Formally there is an integer $ p > 0 $ s.t. any word $ w \in L $ with $ \abs{w} \ge p $ is of the form $ w = xyz $, where $ \abs{y} > 0 $, $ \abs{xy} \le p $ and $ x y^i z \in \Lang $ for $ i \ge 0 $
\item Proof:
\begin{itemize}
\item Let $ p $ be the number of states in $ M $, and suppose $ w = a_1 \dots a_n \in \Lang $, where $ n \ge p $
\item A successful computation $ q_0, a_1, \dots, q_n $ has to pass through a certain state at least twice (by the pigeonhole principle)
\item Therefore, $ \exists r < s $ s.t. $ q_r = q_s $; choose minimal such $ s $
\item Now put $ x = a_1 \dots a_r $, $ y = a_{r+1} \dots a_s $ (note $ \abs{y} > 0$), and $ z = a_{s+1} \dots a_n $
\item By minimality of $ s $, $ q_0, \dots q_{s-1} $ are distinct, and $ \abs{xy} = s \le p $
\item Then, note that $ q_r, a_{r+1}, \dots, q_{s} $ is a loop, which may be validly repeated $ i \ge 0 $ times
\item Therefore, $ x y^i z \in \Lang $
\end{itemize}
\item Corollary: there exist languages which are not computable by an FSA
\item Example: there is no FSA which can recognise $ \Lang = \setcomp{a^n b^n}{n \in \Nat} $
\item Proof:
\begin{itemize}
\item Assume for a contradiction there exists an FSA $ M $ which can recognise $ \Lang $
\item Let $ p $ be the number from the pumping lemma, and choose $ n \ge p $ and consider $ w = a^n b^n $
\item By the pumping lemma, $ \exists x, y, z $ s.t. $ a^n b^n = xyz $, with $ \abs{y} \ge 1 $ and $ \abs{xy} \le p \le n $
\item Then $ y $ is written entirely in terms of the letter a, and $ \abs{y} \ge 1 $
\item By the pumping lemma, $ x y^i z \in \Lang $ for all $ i $
\item So choose $ i = 0 $, then some $ w = a^k b^n \in \Lang $ s.t. $ k < n $, which is a contradiction
\end{itemize}
\end{itemize}
|
import io
import PIL.Image
import numpy as np
import botocore.exceptions
import pytest
from sm.engine import image_storage
from sm.engine.storage import get_s3_bucket
@pytest.fixture(autouse=True, scope='module')
def clean_storage(sm_config):
yield
get_s3_bucket(sm_config['image_storage']['bucket'], sm_config).objects.all().delete()
def make_test_image_bytes() -> bytes:
array = np.array([[0, 0], [1, 1], [2, 2]])
image = PIL.Image.fromarray(array.astype(np.uint16))
fp = io.BytesIO()
image.save(fp, format='PNG')
fp.seek(0)
return fp.read()
def test_post_get_image_success():
test_image_bytes = make_test_image_bytes()
image_id = image_storage.post_image(image_storage.ISO, "ds-id", test_image_bytes)
fetched_image_bytes = image_storage.get_image(image_storage.ISO, "ds-id", image_id)
assert fetched_image_bytes == test_image_bytes
def assert_no_image(image_id):
try:
image_storage.get_image(image_storage.ISO, "ds-id", image_id)
except botocore.exceptions.ClientError as error:
assert error.response['Error']['Code'] == 'NoSuchKey'
def test_get_image_wrong_id():
assert_no_image('wrong-id')
def test_delete_image_success():
test_image_bytes = make_test_image_bytes()
image_id = image_storage.post_image(image_storage.ISO, "ds-id", test_image_bytes)
image_storage.delete_image(image_storage.ISO, "ds-id", image_id)
assert_no_image(image_id)
# delete non-existing image should not raise exception
image_storage.delete_image(image_storage.ISO, "ds-id", image_id)
|
#pragma once
// STL and std libs
#include <map>
#include <set>
#include <vector>
#include <list>
#include <memory>
#include <functional>
#include <iostream>
#include <fstream>
#include <algorithm>
#include <chrono>
#include <type_traits>
// Guildline Support Library
#include <gsl.h>
//// boost and std extension
//#include <boost\signals2.hpp>
//#include <boost\any.hpp>
//#include <boost\range\iterator_range_core.hpp>
//#include <boost\range\adaptor\transformed.hpp>
//#include <boost\operators.hpp>
//#include <boost\format.hpp>
//#include <boost\filesystem.hpp>
#include <stride_range.h>
#include <tree.h>
#include <minmax>
#include "Math3D.h"
#include "SmartPointers.h"
#include "String.h"
//#if defined __AVX__
//#undef __AVX__ //#error Eigen have problem with AVX now
//#endif
//#define EIGEN_HAS_CXX11_MATH 1
//#define EIGEN_HAS_STD_RESULT_OF 1
//#define EIGEN_HAS_VARIADIC_TEMPLATES 1
//#include <Eigen\Dense>
namespace Causality
{
using time_seconds = std::chrono::duration<double>;
typedef uint64_t id_t;
using std::string;
using std::iterator_range;
//using boost::sub_range;
//namespace adaptors = boost::adaptors;
using gsl::owner;
using gsl::byte;
using gsl::not_null;
using stdx::tree_node;
using stdx::foward_tree_node;
using stdx::stride_range;
using stdx::stride_iterator;
using std::vector;
using std::map;
using std::function;
using std::unique_ptr;
using std::shared_ptr;
using std::list;
using std::weak_ptr;
} |
-- Non-strictly-positive recursion
data La : Type where
Lam : (La -> La) -> La
app : La -> La -> La
app (Lam f) x = f x
selfApp : La
selfApp = Lam \x => app x x
om : La
om = app selfApp selfApp
|
Technology 2: The instruction focuses on managing vocabulary, grammar a phrases within set range and preparing for presentation. A student will be able to discuss about basic terminology within oil industry, talk about technologies for saving the environment, he will manage to read the numbers and elementary mathematical operations.
MCCARTHY, M.,O´DELL, F. English Vocabulary in Use Elementary. Cambridge. Cambridge University Press 2010. ISBN 9780521136174. MURPHY, R. Cambridge English Grammar In Use Elementary. Cambridge: Cambridge University Press 2007. ISBN 978-0-521-67543-7. IBBOTSON, M. Professional English in Use Engineering. Cambridge: Cambridge University Press 2009. ISBN 9780521734882. MURPHY, R. English Grammar in Use 3rd Edition with Answers. Cambridge: Cambridge University Press 2004. ISBN 9780521537629. |
Today's crossword puzzle clue is a cryptic one: Type of party – ring mobile phone to get through. We will try to find the right answer to this particular crossword clue. Here are the possible solutions for "Type of party – ring mobile phone to get through" clue. It was last seen in The Daily Telegraph cryptic crossword. We have 1 possible answer in our database.
Underdeveloped, like an untended flower bed?
We provide the likeliest answers for every crossword clue. Undoubtedly, there may be other solutions for Type of party – ring mobile phone to get through. If you discover one of these, please send it to us, and we'll add it to our database of clues and answers, so others can benefit from your research. |
If $f$ and $g$ are holomorphic functions on an open set $S'$ and agree on an open subset $S$ of $S'$, then $f$ and $g$ agree on all of $S'$. |
SUBROUTINE EXIO2
C
C EXIO2 COPIES SUBSTRUCTURE ITEMS BETWEEN THE SOF AND AN EXTERNAL
C TAPE USING FORTRAN FORMATTED IO. THE TAPE COULD HAVE BEEN CREATED
C OR COULD BE READ ON A DIFFERENT BRAND OF COMPUTER.
C
C
LOGICAL UNIVAC
INTEGER DRY ,XBLK ,UNAME ,POS ,
1 UNIT ,FORT ,NUM(32) ,SOFIN ,
2 SOFOUT ,REWI ,EQF
CHARACTER UFM*23 ,UWM*25
COMMON /XMSSG / UFM ,UWM
COMMON /BLANK / DRY ,XBLK ,DEVICE(2),UNAME(2) ,
1 FORMT(2) ,MODE(2) ,POS(2) ,DATYPE(2),
2 NAMES(10),UNIT ,UNIVAC ,LBUF ,
3 IADD
COMMON /SYSTEM/ SYSBUF ,NOUT ,X1(36) ,NBPC ,
1 NBPW ,NCPW
DATA FORT , SOFIN ,SOFOUT ,REWI ,EQF /
1 4HFORT, 4HSOFI ,4HSOFO ,4HREWI ,4HEOF /
DATA NUM /
1 2H1 , 2H2 ,2H3 ,2H4 ,2H5 ,
2 2H6 , 2H7 ,2H8 ,2H9 ,2H10 ,
3 2H11 , 2H12 ,2H13 ,2H14 ,2H15 ,
4 2H16 , 2H17 ,2H18 ,2H19 ,2H20 ,
5 2H21 , 2H22 ,2H23 ,2H24 ,2H25 ,
6 2H26 , 2H27 ,2H28 ,2H29 ,2H30 ,
7 2H31 , 2H32 /
C
C INITIALIZE
C
NOGO = 0
C
C DECODE FORTRAN UNIT
C
IF (UNAME(1) .NE. FORT) GO TO 20
DO 10 I = 1,32
UNIT = I
IF (UNAME(2) .EQ. NUM(UNIT)) GO TO 30
10 CONTINUE
20 NOGO = 1
CALL PAGE2 (-2)
WRITE (NOUT,6356) UWM,UNAME
C
C DECODE MODE OF OPERATION
C
30 IOMODE = 0
IF (MODE(1) .EQ. SOFOUT) IOMODE = 1
IF (MODE(1) .EQ. SOFIN ) IOMODE = 2
IF (IOMODE .GT. 0) GO TO 40
NOGO = 1
CALL PAGE2 (-2)
WRITE (NOUT,6338) UWM,MODE
C
C IF ERRORS THEN QUIT
C
40 IF (NOGO .EQ. 0) GO TO 50
DRY = -2
GO TO 300
C
C SET POSITION AND UNIVAC FLAGS
C
50 UNIVAC = .TRUE.
IF (XBLK .LE. 0) XBLK = 3960
XBLK = XBLK - MOD(XBLK,132)
LBUF = XBLK/NCPW
IF (MOD(XBLK,NCPW) .NE. 0) LBUF = LBUF + 1
IADD = 2
IF (POS(1) .EQ. REWI) IADD = 1
IF (POS(1) .EQ. EQF) IADD = 3
C
C BRANCH ON MODE OF OPERATION
C
GO TO (100,200), IOMODE
C
C SOFOUT
C
100 CALL EXO2
GO TO 300
C
C SOFIN
C
200 CALL EXI2
C
C NORMAL MODULE COMPLETION
C
300 RETURN
C
C MESSAGE TEXT
C
6338 FORMAT (A25,' 6338, ',2A4,' IS AN INVALID MODE PARAMETER FOR ',
1 'MODULE EXIO')
6356 FORMAT (A25,' 6356, ',2A4,' IS AN INVALID UNIT FOR MODULE EXIO,',
1 ' EXTERNAL FORMAT')
END
|
State Before: C : Type u_1
inst✝¹ : Category C
inst✝ : ConcreteCategory C
X Y : C
f g : X ⟶ Y
h : f = g
x : (forget C).obj X
⊢ (forget C).map f x = (forget C).map g x State After: no goals Tactic: rw [h] |
module StingerGraphs
# package code goes here
include("stinger_lib.jl")
include("stinger_graph.jl")
include("stinger_config.jl")
include("stinger_core.jl")
include("fields.jl")
include("traversal.jl")
include("algorithms/bfs.jl")
include("algorithms/parallelbfs.jl")
include("algorithms/kcore.jl")
include("generators/kronecker.jl")
end # module
|
Require Import Setoid.
Require Import ConstructiveEpsilon.
Definition iffT (X Y : Type) : Type := (X -> Y) * (Y -> X).
Notation "X <=> Y" := (iffT X Y) (at level 95, no associativity).
Definition surj {X Y} (f : X -> Y) := forall y, exists x, f x = y.
Definition inj {X Y} (f : X -> Y) := forall x x', f x = f x' -> x = x'.
Definition bij {X Y} (f : X -> Y) := inj f /\ surj f.
(** * Basic logical notions. *)
Definition definite P := P \/ ~P.
Definition Definite {X} p := forall x : X, definite (p x).
Definition LEM := forall P, definite P.
Definition stable P := ~~P -> P.
Definition Stable {X} p := forall x : X, stable (p x).
Definition DNE := forall P, stable P.
Definition MP := forall (f : nat -> nat), stable (exists n, f n = 0).
Definition UC X Y := forall R, (forall x:X, exists! y:Y, R x y) -> exists f, forall x, R x (f x).
Fact LEM_DNE :
(LEM <-> DNE) /\ (DNE -> MP).
Proof.
unfold LEM, DNE, MP. repeat split.
- intros lem p. now destruct (lem p).
- intros dne p. apply dne. unfold definite; tauto.
- intros dne f. apply dne.
Qed.
(* Lemmas for handling double negations. *)
Fact DN_remove {A B} :
~~A -> (A -> ~B) -> ~B.
Proof. tauto. Qed.
Fact DN_chaining {A B : Prop} :
~ ~ A -> ~ ~(A -> B) -> ~ ~ B.
Proof. tauto. Qed.
Fact DN {A : Prop} :
A -> ~~A.
Proof. tauto. Qed.
Fact NNN_N A :
~~~A <-> ~A.
Proof. tauto. Qed.
Fact DN_forall_stable {X} p :
Stable p -> (forall x : X, ~~p x) -> forall x, p x.
Proof. unfold Stable; firstorder. Qed.
(** * Definitions in synthetic computability. *)
Definition decider {X} p f := forall x : X, p x <-> f x = true.
Definition Dec {X} p := inhabited(forall x : X, {p x} + {~p x}).
Definition Dec_sigT {X} p := forall x : X, {p x} + {~p x}.
Definition dec (P : Prop) := {P} + {~P}.
Definition witnessing {X} (p : X -> Prop) := ex p -> sigT p.
Definition enumerable {X} p := exists f : nat -> option X, forall x, p x <-> exists n, f n = Some x.
Definition Enumerable X := exists f : nat -> option X, forall x, exists n, f n = Some x.
Definition Discrete X := Dec (fun p : X * X => fst p = snd p).
Definition Separated X := Dec (fun p : X * X => fst p <> snd p).
Definition Markov X := forall p : X -> Prop, Dec p -> (~~ ex p) -> ex p.
Definition Witnessing X := forall p : X -> Prop, Dec_sigT p -> witnessing p.
(** ** Equivalent characterizations *)
Fact Dec_decider {X} p :
Dec p <-> ex (@decider X p).
Proof.
split.
- intros [D].
exists (fun x => if D x then true else false).
intros x. destruct (D x); cbn; intuition congruence.
- intros [f decf]. constructor. intros x.
specialize (decf x).
destruct (f x) eqn:Hx; [left; tauto|right].
now intros ?%decf.
Qed.
Fact Dec_decider_nat p :
Dec p -> exists f : nat -> nat, forall x : nat, p x <-> f x = 0.
Proof.
intros [f decf]%Dec_decider.
exists (fun n => if f n then 0 else 1).
intros x. specialize (decf x).
destruct (f x) eqn:Hx; try tauto.
rewrite decf. split; congruence.
Qed.
Fact Dec_sigT_decider {X} p :
Dec_sigT p <=> sigT (@decider X p).
Proof.
split.
- intros D.
exists (fun x => if D x then true else false).
intros x. destruct (D x); cbn; intuition congruence.
- intros [f decf]. intros x.
specialize (decf x).
destruct (f x) eqn:Hx; [left; tauto|right].
now intros ?%decf.
Qed.
Fact dec_Dec_sig_T P :
dec P <=> Dec_sigT (fun _ : True => P).
Proof.
split.
- intros []; now constructor.
- intros []; unfold dec; tauto.
Qed.
Lemma DN_Dec_equiv X (p : X -> Prop) :
~ ~ Dec p <-> ((Dec_sigT p -> False) -> False).
Proof.
split.
- intros nnH. apply (DN_remove nnH).
intros [H]. intros nH. apply nH.
intros x. apply H.
- intros nnH nH. apply nnH. intros H.
apply nH. constructor. apply H.
Qed.
Lemma Witnessing_equiv X :
Witnessing X <=> forall (f : X -> bool), (exists x, f x = true) -> {x & f x = true}.
Proof.
split; intros H.
- intros f. apply H.
intros x. decide equality.
- intros p [f Hf]%Dec_sigT_decider Ex.
unfold decider in *.
destruct (H f).
+ destruct Ex as [x Hx]. exists x. now apply Hf.
+ exists x. now apply Hf.
Qed.
Definition Witnessing_nat : Witnessing nat.
Proof.
intros p Dec_p H.
specialize (constructive_indefinite_ground_description_nat p Dec_p H).
intros [x Hx]. now exists x.
Defined.
Fact Discrete_sum {X} :
Discrete X <-> inhabited(forall x y : X, {x = y} + {~ x = y}).
Proof.
split; intros [H]; constructor.
- intros x y. destruct (H (x,y)); cbn in *; tauto.
- intros [x y]; cbn. destruct (H x y); tauto.
Qed.
Fact Separated_sum {X} :
Separated X <-> inhabited(forall x y : X, {~ x = y} + {~~ x = y}).
Proof.
split; intros [H]; constructor.
- intros x y. destruct (H (x,y)); cbn in *; tauto.
- intros [x y]; cbn. destruct (H x y); tauto.
Qed.
Fact enumerable_nat p :
enumerable p -> exists f, forall x : nat, p x <-> exists n : nat, f n = S x.
Proof.
intros [f Hf].
exists (fun n => match f n with Some x => S x | _ => 0 end).
intros x. rewrite Hf. split; intros [n Hn]; exists n.
- now rewrite Hn.
- destruct (f n); congruence.
Qed.
Fact enumerable_equiv X :
Enumerable X <-> enumerable (fun x : X => True).
Proof.
split; intros [g Hg]; exists g; firstorder.
Qed.
Fact MP_Markov_nat :
MP <-> Markov nat.
Proof.
split.
- intros mp p [Dec_p] nnH.
specialize (mp (fun x => if Dec_p x then 0 else 1)).
destruct mp as [n Hn].
+ apply (DN_chaining nnH), DN.
intros [n Hn]. exists n.
destruct (Dec_p n) eqn:fn; congruence.
+ exists n. destruct (Dec_p n) eqn:?; congruence.
- intros markov f.
refine (markov (fun n => f n = 0) _).
constructor; intros ?.
decide equality.
Qed.
Lemma UC_Def_Dec X (p : X -> Prop) :
UC X bool -> Definite p -> Dec p.
Proof.
intros uc Def. apply Dec_decider.
refine (uc (fun x y => p x <-> y = true) _).
intros n. destruct (Def n) as [h|h].
- exists true; split; [tauto|].
intros []; try congruence.
intros H. now rewrite H in h.
- exists false; split.
+ split; try tauto; congruence.
+ intros []; try congruence.
intros H. now rewrite H in h.
Qed.
Fact MP_Dec_stable :
MP -> forall (p : nat -> Prop), Dec p -> stable (ex p).
Proof.
intros mp p [f Hf]%Dec_decider nnH.
destruct (mp (fun n => if f n then 0 else 1)) as [y Hy].
- apply (DN_chaining nnH), DN. intros [y Hy].
exists y. apply Hf in Hy. now destruct (f y).
- exists y. apply Hf. now destruct (f y).
Qed. |
!------------------------------------------------------------------------------
!! module: compute sub grid stress
!------------------------------------------------------------------------------
!!
!! - last modified: fabien margairaz ( [email protected] ), 09/06/2014
!!
!! - description:
!! - this module contains all the routines used for computing sgs
!! - cs_opt2 & nu_t are definded on w-nodes
!!
!------------------------------------------------------------------------------
module compute_dns_stress
use decomp_2d
use parameters_IO, only : rprec
!! - global variables
use system_variables, only : dudx,dudy,dudz,dvdx,dvdy,dvdz,dwdx,dwdy,dwdz,&
txx,tyy,tzz,txy,txz,tyz
implicit none
private
public :: dns_stress
contains !=======================================================================
!!--------------------------------------------------------------------------------
!! subroutine: dns_stress
!!--------------------------------------------------------------------------------
!!
!! last checked/modified: marco giometto ( [email protected] ) on 26/03/2013
!!
!!--------------------------------------------------------------------------------
subroutine dns_stress (dcp,Re,lbc_special)
implicit none
TYPE(DECOMP_INFO),intent(in) :: dcp
real(rprec),intent(in) :: Re
character(*),intent(in) :: lbc_special
integer :: i,j,k,k_min
! txx,tyy,tzz,txy on uvp-nodes & txz,tyz on w-nodes
if((dcp%xst(3)==1).and.(lbc_special.eq.'wall_law'))then
do j=1,dcp%xsz(2)
do i=1,dcp%xsz(1)
txx(i,j,1)=2.0_rprec*dudx(i,j,1)/Re
tyy(i,j,1)=2.0_rprec*dvdy(i,j,1)/Re
tzz(i,j,1)=2.0_rprec*dwdz(i,j,1)/Re
txy(i,j,1)=(dudy(i,j,k)+dvdx(i,j,1))/Re
enddo
enddo
k_min=2
else
k_min=1
end if
!$omp parallel do
do k=k_min,dcp%xsz(3)
do j=1,dcp%xsz(2)
do i=1,dcp%xsz(1)
txx(i,j,k)=2.0_rprec*dudx(i,j,k)/Re
tyy(i,j,k)=2.0_rprec*dvdy(i,j,k)/Re
tzz(i,j,k)=2.0_rprec*dwdz(i,j,k)/Re
txy(i,j,k)=(dudy(i,j,k)+dvdx(i,j,k))/Re
txz(i,j,k)=(dudz(i,j,k)+dwdx(i,j,k))/Re
tyz(i,j,k)=(dvdz(i,j,k)+dwdy(i,j,k))/Re
enddo
enddo
enddo
!$omp end parallel do
return
end subroutine dns_stress
end module compute_dns_stress
|
------------------------------------------------------------------------
-- A variant of Nat.Wrapper.Cubical, defined using --erased-cubical
------------------------------------------------------------------------
{-# OPTIONS --erased-cubical --safe #-}
open import Equality.Path as P
open import Prelude hiding (zero; suc; _+_)
open import Bijection equality-with-J using (_↔_)
module Nat.Wrapper.Cubical.Erased
-- The underlying representation of natural numbers.
(Nat′ : Type)
-- A bijection between this representation and the unary natural
-- numbers.
(Nat′↔ℕ : Nat′ ↔ ℕ)
where
open import Equality.Path.Univalence
open import Logical-equivalence using (_⇔_)
import Equivalence equality-with-J as Eq
open import Equivalence.Erased.Cubical equality-with-paths as EEq
using (_≃ᴱ_)
import Equivalence.Erased.Contractible-preimages.Cubical
equality-with-paths
as ECP
open import Erased.Cubical equality-with-paths
open import Function-universe equality-with-J as F hiding (_∘_)
open import H-level equality-with-J
open import H-level.Closure equality-with-J
open import H-level.Truncation.Propositional.Erased equality-with-paths
as Trunc
import Nat equality-with-J as Nat
import Univalence-axiom equality-with-J as U
open import Nat.Wrapper equality-with-J Nat′ Nat′↔ℕ as NW
open NW.[]-cong instance-of-[]-cong-axiomatisation
private
variable
A : Type
m n : A
------------------------------------------------------------------------
-- Could Nat have been defined using the propositional truncation
-- operator (with an erased higher constructor) instead of Erased?
-- Could Nat have been defined using ∥_∥ᴱ instead of Erased? Let us
-- try.
-- Given a truncated natural number we can kind of apply Nat-[_] to
-- it, because Nat-[_] is a family of contractible types. (The code
-- uses erased univalence.)
Nat-[]′ : ∥ ℕ ∥ᴱ → ∃ λ (A : Type) → Contractible A
Nat-[]′ = Trunc.rec λ where
.truncation-is-propositionʳ →
U.∃-H-level-H-level-1+ ext univ 0
.∣∣ʳ n →
Nat-[ n ]
, propositional⇒inhabited⇒contractible
Nat-[]-propositional
( _↔_.from Nat′↔ℕ n
, [ _↔_.right-inverse-of Nat′↔ℕ n ]
)
Nat-[_]′ : ∥ ℕ ∥ᴱ → Type
Nat-[ n ]′ = proj₁ (Nat-[]′ n)
-- Thus we can form a variant of Nat.
Nat-with-∥∥ᴱ : Type
Nat-with-∥∥ᴱ = Σ ∥ ℕ ∥ᴱ Nat-[_]′
-- However, this variant is equivalent (with erased proofs) to the
-- unit type.
Nat-with-∥∥ᴱ≃ᴱ⊤ : Nat-with-∥∥ᴱ ≃ᴱ ⊤
Nat-with-∥∥ᴱ≃ᴱ⊤ =
_⇔_.to EEq.Contractibleᴱ⇔≃ᴱ⊤ $
ECP.Contractibleᴱ-Σ
(ECP.inhabited→Is-proposition→Contractibleᴱ
∣ 0 ∣ truncation-is-proposition)
(ECP.Contractible→Contractibleᴱ ∘ proj₂ ∘ Nat-[]′)
-- And thus it is not isomorphic to the natural numbers.
¬-Nat-with-∥∥ᴱ↔ℕ : ¬ (Nat-with-∥∥ᴱ ↔ ℕ)
¬-Nat-with-∥∥ᴱ↔ℕ =
Stable-¬
[ Nat-with-∥∥ᴱ ↔ ℕ ↝⟨ F._∘ inverse (from-equivalence (EEq.≃ᴱ→≃ Nat-with-∥∥ᴱ≃ᴱ⊤)) ⟩
⊤ ↔ ℕ ↝⟨ (λ hyp → _↔_.injective (inverse hyp) refl) ⟩
0 ≡ 1 ↝⟨ Nat.0≢+ ⟩□
⊥ □
]
------------------------------------------------------------------------
-- Addition of "wrapped" numbers is commutative and associative
module _ (o : Operations) where
open Operations-for-Nat o
open Operations-for-Nat-correct o
private
-- A lemma used several times below.
from[to+to]≡+ :
∀ m →
_↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ m Prelude.+ _↔_.to Nat↔ℕ n) ≡ m + n
from[to+to]≡+ {n = n} m =
_↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ m Prelude.+ _↔_.to Nat↔ℕ n) ≡⟨ cong (_↔_.from Nat↔ℕ) $ sym $ to-ℕ-+ m n ⟩
_↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ (m + n)) ≡⟨ _↔_.left-inverse-of Nat↔ℕ _ ⟩∎
m + n ∎
-- First two "traditional" proofs.
-- Addition is commutative.
+-comm-traditional : ∀ m {n} → m + n ≡ n + m
+-comm-traditional m {n = n} =
m + n ≡⟨ sym $ from[to+to]≡+ m ⟩
_↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ m Prelude.+ _↔_.to Nat↔ℕ n) ≡⟨ cong (_↔_.from Nat↔ℕ) $ Nat.+-comm (_↔_.to Nat↔ℕ m) ⟩
_↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ n Prelude.+ _↔_.to Nat↔ℕ m) ≡⟨ from[to+to]≡+ n ⟩∎
n + m ∎
-- Addition is associative.
+-assoc-traditional : ∀ m {n o} → m + (n + o) ≡ (m + n) + o
+-assoc-traditional m {n = n} {o = o} =
m + (n + o) ≡⟨ cong (m +_) $ sym $ from[to+to]≡+ n ⟩
m + (_↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ n Prelude.+ _↔_.to Nat↔ℕ o)) ≡⟨ sym $ from[to+to]≡+ m ⟩
_↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ m
Prelude.+
_↔_.to Nat↔ℕ (_↔_.from Nat↔ℕ
(_↔_.to Nat↔ℕ n Prelude.+ _↔_.to Nat↔ℕ o))) ≡⟨ cong (λ n → _↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ m Prelude.+ n)) $
_↔_.right-inverse-of Nat↔ℕ _ ⟩
_↔_.from Nat↔ℕ
(_↔_.to Nat↔ℕ m
Prelude.+
(_↔_.to Nat↔ℕ n Prelude.+ _↔_.to Nat↔ℕ o)) ≡⟨ cong (_↔_.from Nat↔ℕ) $ Nat.+-assoc (_↔_.to Nat↔ℕ m) ⟩
_↔_.from Nat↔ℕ
((_↔_.to Nat↔ℕ m Prelude.+ _↔_.to Nat↔ℕ n)
Prelude.+
_↔_.to Nat↔ℕ o) ≡⟨ cong (λ n → _↔_.from Nat↔ℕ (n Prelude.+ _↔_.to Nat↔ℕ o)) $ sym $
_↔_.right-inverse-of Nat↔ℕ _ ⟩
_↔_.from Nat↔ℕ
(_↔_.to Nat↔ℕ (_↔_.from Nat↔ℕ
(_↔_.to Nat↔ℕ m Prelude.+ _↔_.to Nat↔ℕ n))
Prelude.+
_↔_.to Nat↔ℕ o) ≡⟨ from[to+to]≡+ (_↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ m Prelude.+ _↔_.to Nat↔ℕ n)) ⟩
(_↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ m Prelude.+ _↔_.to Nat↔ℕ n)) + o ≡⟨ cong (_+ o) $ from[to+to]≡+ {n = n} m ⟩∎
(m + n) + o ∎
-- The following proofs are instead based on a technique used by
-- Vezzosi, Mörtberg and Abel in "Cubical Agda: A Dependently Typed
-- Programming Language with Univalence and Higher Inductive Types".
-- The type of unary natural numbers is equal to the type of wrapped
-- natural numbers (in erased contexts).
@0 ℕ≡Nat : ℕ ≡ Nat
ℕ≡Nat = sym (≃⇒≡ (Eq.↔⇒≃ Nat↔ℕ))
-- Addition of unary natural numbers is, in a certain sense, equal
-- to addition of wrapped natural numbers (in erased contexts).
@0 +≡+ : P.[ (λ i → ℕ≡Nat i → ℕ≡Nat i → ℕ≡Nat i) ] Prelude._+_ ≡ _+_
+≡+ =
Prelude._+_ ≡⟨ (λ i → transport
(λ j → ℕ≡Nat (min i j) → ℕ≡Nat (min i j) → ℕ≡Nat (min i j))
(- i) Prelude._+_) ⟩h
transport (λ i → ℕ≡Nat i → ℕ≡Nat i → ℕ≡Nat i) 0̲ Prelude._+_ ≡⟨⟩
(λ m n → _↔_.from Nat↔ℕ (_↔_.to Nat↔ℕ m Prelude.+ _↔_.to Nat↔ℕ n)) ≡⟨ (⟨ext⟩ λ m → ⟨ext⟩ λ _ → from[to+to]≡+ m) ⟩∎
_+_ ∎
-- Addition is commutative (in erased contexts).
@0 +-comm-cubical : ∀ m {n} → m + n ≡ n + m
+-comm-cubical =
transport
(λ i → (m {n} : ℕ≡Nat i) → +≡+ i m n ≡ +≡+ i n m)
0̲
Nat.+-comm
-- Addition is associative (in erased contexts).
@0 +-assoc-cubical : ∀ m {n o} → m + (n + o) ≡ (m + n) + o
+-assoc-cubical =
transport
(λ i → (m {n o} : ℕ≡Nat i) →
+≡+ i m (+≡+ i n o) ≡ +≡+ i (+≡+ i m n) o)
0̲
Nat.+-assoc
-- This proof technique seems to scale better than the one used
-- above, at least for examples of the kind used here. However, when
-- --erased-cubical is used it only works in erased contexts.
|
function [W] = MW2W(MW)
% Convert power from megawatts to watts.
% Chad A. Greene 2012
W = MW*1000000 ; |
subroutine wplot
c
c ******************************************************************
c * *
c * dumps files for plot3d graphic program *
c * it makes a conversion to single precision, *
c * and strips the convex binary *
c * for compatibility with sgi iris *
c * *
c ******************************************************************
c
c w(i,j,1) = density
c w(i,j,2) = momentum in x direction
c w(i,j,3) = momentum in y direction
c w(i,j,4) = total energy
c
c ******************************************************************
c
use dims
c
c ******************************************************************
c
use flo_var
use mesh_var
c
c ******************************************************************
c
use flo_param
c
c ******************************************************************
c
implicit none
c
c ******************************************************************
c
c local variables
c
c ******************************************************************
c
integer :: ix,iw
integer :: i,j,n
c
c ******************************************************************
c
real*4 xi(idm,jdm,2),wi(idn,jdn,4),conv,srm,sal,sre,stime
c
c ******************************************************************
c
data stime/0.0/,conv/4.0/
c
c ******************************************************************
c
ix = 21
iw = 22
open(unit=ix,form='unformatted')
rewind ix
do 10 n=1,2
do 10 j=1,jl
do 10 i=1,il
xi(i,j,n) = sngl(x(i,j,n))/conv
end do
end do
end do
write (ix) il,jl,
. ((xi(i,j,1),i=1,il),j=1,jl),
. ((xi(i,j,2),i=1,il),j=1,jl)
istat = system('tail +5c < fort.21 > xyz.bin')
close (ix,status='delete')
open (unit=iw,form='unformatted')
rewind iw
fac = 1./sqrt(gamma)
do j=1,je
do i=1,ie
wi(i,j,1) = sngl(w(i,j,1))/conv
wi(i,j,2) = sngl(w(i,j,2)*fac)/conv
wi(i,j,3) = sngl(w(i,j,3)*fac)/conv
wi(i,j,4) = sngl(w(i,j,4)*fac*fac)/conv
end do
end do
c
c shift w to the cell corners
c
do n=1,4
do j=1,jl
do i=1,il
wi(i,j,n) = .5*(wi(i,j,n)+wi(i+1,j,n))
end do
end do
do j=1,jl
do i=1,il
wi(i,j,n) = .5*(wi(i,j,n)+wi(i,j+1,n))
end do
end do
end do
srm = sngl(rm) /conv
sal = sngl(al) /conv
sre = sngl(re) /conv
stime = sngl(time) /conv
write (iw) il,jl,
. srm,sal,sre,stime,
. (((wi(i,j,n),i=1,il,1),j=1,jl,1),n=1,4,1)
istat = system('tail +5c < fort.22 > flo.bin')
close (iw,status='delete')
return
end
|
module AI.Learning.CrossValidation where
import Control.Monad.Random
import Foreign.Storable (Storable)
import Numeric.LinearAlgebra
import qualified Data.List as L
import AI.Util.Matrix
import AI.Util.Util
----------------------
-- Cross Validation --
----------------------
class Indexable c where
index :: c -> Index -> c
nobs :: c -> Int
instance Storable a => Indexable (Vector a) where
index = subRefVec
nobs = dim
instance Element a => Indexable (Matrix a) where
index = subRefRows
nobs = rows
instance Indexable [a] where
index = map . (!!)
nobs = length
-- |Indexes are lists of 'Int'. Should refactor this to use something more
-- efficient.
type Index = [Int]
-- |Type for cross-validation partition.
data CVPartition = CVPartition [(Index, Index)]
-- |Specify what type of cross-validation you want to do.
data CVType = LeaveOneOut
| KFold Int
-- |Prediction function. A prediction function should take a training and a test
-- set, and use the training set to build a model whose performance is
-- evaluated on the test set, returning a final score as a 'Double'.
type PredFun a b = a -- Training set predictors
-> b -- Training set target
-> a -- Test set predictors
-> b -- Test set target
-> Double -- Performance score
-- |Create a partition into test and training sets.
cvPartition :: RandomGen g => Int -> CVType -> Rand g CVPartition
cvPartition sz cvtype = case cvtype of
KFold i -> cvp sz i
LeaveOneOut -> cvp sz sz
-- |Helper function for 'cvPartition'.
cvp :: RandomGen g => Int -> Int -> Rand g CVPartition
cvp n k = do
is <- go i (k - i) idx
return . CVPartition $ map (\i -> (idx L.\\ i, i)) is
where
go 0 0 idx = return []
go 0 j idx = do
(is, idx') <- selectMany' s idx
iss <- go 0 (j-1) idx'
return (is:iss)
go i j idx = do
(is, idx') <- selectMany' (s+1) idx
iss <- go (i-1) j idx'
return (is:iss)
s = n `div` k
i = n `mod` k
idx = [0 .. n-1]
-- |Perform k-fold cross-validation. Given a 'CVPartition' containing a list
-- of training and test sets, we repeatedly fit a model on the training set
-- and test its performance on the test set/
kFoldCV_ :: (Indexable a, Indexable b) =>
CVPartition
-> PredFun a b
-> a
-> b
-> [Double]
kFoldCV_ (CVPartition partition) predfun x y = map go partition
where
go (trainIdx,testIdx) = predfun xTrain yTrain xTest yTest
where
xTrain = x `index` trainIdx
yTrain = y `index` trainIdx
xTest = x `index` testIdx
yTest = y `index` testIdx
-- |Perform k-fold cross-validation, randomly generating the training and
-- test sets first.
kFoldCV :: (RandomGen g, Indexable a, Indexable b) =>
CVType -- What type of cross-validation?
-> PredFun a b -- Prediction function
-> a -- Predictors
-> b -- Targets
-> Rand g [Double] -- List of scores
kFoldCV cvtype predfun x y = if nobs x /= nobs y
then error "Inconsistent dimensions -- KFOLDCV"
else do
cp <- cvPartition (nobs x) cvtype
return (kFoldCV_ cp predfun x y)
---------------
-- Old Stuff --
---------------
-- |Model builder. A model builder takes a training set of regressors and
-- targets, and constructs a function that makes predictions from an out-
-- of-sample set of regressors.
type ModelBuilder = Matrix Double -- Training set regressors
-> Vector Double -- Training set target
-> Matrix Double -- Out-of-sample regressors
-> Vector Double -- Predictions
-- |Evaluation function. An evaluation function takes a vector of targets and
-- a vector of predictions, and returns a score corresponding to how closely
-- the predictions match the target.
type EvalFun = Vector Double -- Target
-> Vector Double -- Predictions
-> Double -- Score (e.g. MSE, MCR, likelihood) |
Formal statement is: lemma islimpt_UNIV_iff: "x islimpt UNIV \<longleftrightarrow> \<not> open {x}" Informal statement is: A point $x$ is a limit point of the whole space if and only if the singleton $\{x\}$ is not open. |
@testset "Quadrature" begin
tests = ["gauss_quadrature",
]
for t in tests
include("$(t).jl")
end
end
|
/*
Copyright (C) 2003-2013 by David White <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <boost/bind.hpp>
#include <boost/function.hpp>
#include <boost/regex.hpp>
#include <boost/scoped_ptr.hpp>
#include <algorithm>
#include "asserts.hpp"
#include "graphics.hpp"
#include "clipboard.hpp"
#include "font.hpp"
#include "foreach.hpp"
#include "input.hpp"
#include "raster.hpp"
#include "string_utils.hpp"
#include "text_editor_widget.hpp"
#include "unit_test.hpp"
namespace gui {
namespace {
const int BorderSize = 3;
const int TabWidth = 4;
const int TabAdjust = TabWidth - 1;
typedef boost::shared_ptr<graphics::texture> char_texture_ptr;
std::vector<char_texture_ptr> char_textures;
struct CharArea {
GLfloat x1, y1, x2, y2;
};
std::map<int, std::map<char, CharArea> > all_char_to_area;
std::string monofont()
{
return font::get_default_monospace_font();
}
const CharArea& get_char_area(int font_size, char c)
{
std::map<char, CharArea>& char_to_area = all_char_to_area[font_size];
std::map<char, CharArea>::const_iterator i = char_to_area.find(c);
if(i != char_to_area.end()) {
return i->second;
}
const CharArea& result = char_to_area[c];
const int char_width = font::char_width(font_size, monofont());
const int char_height = font::char_height(font_size, monofont());
std::string str;
int row = 0, col = 0;
int nchars = 0;
for(std::map<char, CharArea>::iterator i = char_to_area.begin();
i != char_to_area.end(); ++i) {
str.push_back(i->first);
CharArea area = {(GLfloat)col*char_width, (GLfloat)row*char_height, (GLfloat)(col+1)*char_width, (GLfloat)(row+1)*char_height};
char_to_area[i->first] = area;
++col;
if(col == 128) {
str += "\n";
col = 0;
++row;
}
}
char_texture_ptr& char_texture = char_textures[font_size];
char_texture.reset(new graphics::texture(font::render_text(str, graphics::color_white(), font_size, monofont())));
for(std::map<char, CharArea>::iterator i = char_to_area.begin();
i != char_to_area.end(); ++i) {
CharArea& area = i->second;
area.x1 = char_texture->translate_coord_x(area.x1/GLfloat(char_texture->width()));
area.x2 = char_texture->translate_coord_x(area.x2/GLfloat(char_texture->width()));
area.y1 = char_texture->translate_coord_y(area.y1/GLfloat(char_texture->height()));
area.y2 = char_texture->translate_coord_y(area.y2/GLfloat(char_texture->height()));
}
return result;
}
void init_char_area(size_t font_size)
{
if(char_textures.size() <= font_size) {
char_textures.resize(font_size+1);
}
if(char_textures[font_size].get()) {
return;
}
std::map<char, CharArea>& char_to_area = all_char_to_area[font_size];
for(char c = 1; c < 127; ++c) {
if(util::c_isprint(c) && c != 'a') {
char_to_area[c] = CharArea();
}
}
get_char_area(font_size, 'a');
ASSERT_LOG(char_textures[font_size].get(), "DID NOT INIT CHAR TEXTURE\n");
}
}
text_editor_widget::text_editor_widget(int width, int height)
: last_op_type_(NULL),
font_size_(14),
char_width_(font::char_width(font_size_, monofont())),
char_height_(font::char_height(font_size_, monofont())),
select_(0,0), cursor_(0,0),
nrows_((height - BorderSize*2)/char_height_),
ncols_((width - 20 - BorderSize*2)/char_width_),
scroll_pos_(0), xscroll_pos_(0),
begin_highlight_line_(-1), end_highlight_line_(-1),
has_focus_(false),
editable_(true),
is_dragging_(false),
begin_enter_return_(true),
last_click_at_(-1),
consecutive_clicks_(0),
text_color_(255, 255, 255, 255),
in_event_(0),
password_entry_(false),
no_border_(false),
clear_on_focus_(false)
{
set_environment();
if(height == 0) {
height = char_height_ + BorderSize*2;
nrows_ = 1;
ncols_ = (width - BorderSize*2)/char_width_;
widget::set_dim(width, height);
} else {
widget::set_dim(width - 20, height);
}
text_.push_back("");
init_clipboard();
}
text_editor_widget::text_editor_widget(const variant& v, game_logic::formula_callable* e)
: scrollable_widget(v,e), last_op_type_(NULL), font_size_(14),
select_(0,0), cursor_(0,0), scroll_pos_(0), xscroll_pos_(0),
begin_highlight_line_(-1), end_highlight_line_(-1),
has_focus_(v["focus"].as_bool(false)),
editable_(v["editable"].as_bool(true)),
is_dragging_(false),
begin_enter_return_(true),
last_click_at_(-1),
consecutive_clicks_(0),
text_color_(255, 255, 255, 255),
in_event_(0),
password_entry_(v["password"].as_bool(false)),
no_border_(v["no_border"].as_bool(false)),
clear_on_focus_(v["clear_on_focus"].as_bool(false))
{
ASSERT_LOG(get_environment() != 0, "You must specify a callable environment");
if(v.has_key("bg_color")) {
bg_color_.reset(new graphics::color(v["bg_color"]));
}
int width = v.has_key("width") ? v["width"].as_int() : 0;
int height = v.has_key("height") ? v["height"].as_int() : 0;
if(v.has_key("font_size")) {
font_size_ = v["font_size"].as_int();
}
if(v.has_key("color")) {
text_color_ = graphics::color(v["color"]);
} else if(v.has_key("colour")) {
text_color_ = graphics::color(v["colour"]);
}
if(v.has_key("on_change")) {
on_change_ = boost::bind(&text_editor_widget::change_delegate, this);
ffl_on_change_ = get_environment()->create_formula(v["on_change"]);
}
if(v.has_key("on_move_cursor")) {
on_move_cursor_ = boost::bind(&text_editor_widget::move_cursor_delegate, this);
ffl_on_move_cursor_ = get_environment()->create_formula(v["on_move_cursor"]);
}
if(v.has_key("on_enter")) {
on_enter_ = boost::bind(&text_editor_widget::enter_delegate, this);
ffl_on_enter_ = get_environment()->create_formula(v["on_enter"]);
}
if(v.has_key("on_tab")) {
on_tab_ = boost::bind(&text_editor_widget::tab_delegate, this);
ffl_on_tab_ = get_environment()->create_formula(v["on_tab"]);
}
if(v.has_key("on_escape")) {
on_escape_ = boost::bind(&text_editor_widget::escape_delegate, this);
ffl_on_escape_ = get_environment()->create_formula(v["on_escape"]);
}
if(v.has_key("on_begin_enter")) {
on_begin_enter_ = boost::bind(&text_editor_widget::begin_enter_delegate, this);
ffl_on_begin_enter_ = get_environment()->create_formula(v["on_begin_enter"]);
}
if(v.has_key("on_change_focus")) {
on_change_focus_ = boost::bind(&text_editor_widget::change_focus_delgate, this, _1);
ffl_on_change_focus_ = get_environment()->create_formula(v["on_change_focus"]);
}
char_width_= font::char_width(font_size_, monofont());
char_height_ = font::char_height(font_size_, monofont());
nrows_ = (height - BorderSize*2)/char_height_;
ncols_ = (width - 20 - BorderSize*2)/char_width_;
if(height == 0) {
height = char_height_ + BorderSize*2;
nrows_ = 1;
widget::set_dim(width - 20, height);
} else {
widget::set_dim(width - 20, height);
}
if(v.has_key("text") && v["text"].is_string()) {
set_text(v["text"].as_string());
} else {
text_.push_back("");
}
if(v["select_all"].as_bool(false)) {
cursor_ = Loc(text_.size()-1, text_.back().size());
}
init_clipboard();
}
text_editor_widget::~text_editor_widget()
{
}
std::string text_editor_widget::text() const
{
std::string result;
foreach(const std::string& line, text_) {
result += line;
result += "\n";
}
result.resize(result.size()-1);
return result;
}
void text_editor_widget::set_row_contents(int row, const std::string& value)
{
ASSERT_LOG(row >= 0 && size_t(row) < text_.size(), "ILLEGAL ROW SET: " << row << " / " << text_.size());
text_[row] = value;
refresh_scrollbar();
on_change();
}
void text_editor_widget::highlight(Loc begin, Loc end)
{
search_matches_.clear();
for(int n = begin.row; n <= end.row && n < text_.size(); ++n) {
int begin_col = 0;
if(n == begin.row) {
begin_col = begin.col;
}
int end_col = text_[n].size();
if(n == end.row) {
end_col = end.col;
}
Loc a(n, begin_col);
Loc b(n, end_col);
search_matches_.push_back(std::pair<Loc,Loc>(a, b));
}
}
void text_editor_widget::set_text(const std::string& value, bool reset_cursor)
{
const int current_in_event = in_event_;
util::scope_manager event_recorder(
[this]() { this->in_event_ = 0; },
[this, current_in_event]() { this->in_event_ = current_in_event; }
);
std::string txt = value;
txt.erase(std::remove(txt.begin(), txt.end(), '\r'), txt.end());
text_ = util::split(txt, '\n', 0 /*don't remove empties or strip spaces*/);
if(text_.empty()) {
text_.push_back("");
}
if(reset_cursor) {
select_ = cursor_ = Loc(0,0);
xscroll_pos_ = scroll_pos_ = 0;
} else {
if(select_.row >= text_.size()) {
select_.row = text_.size() - 1;
}
if(cursor_.row >= text_.size()) {
cursor_.row = text_.size() - 1;
}
}
refresh_scrollbar();
on_change();
}
void text_editor_widget::set_font_size(int font_size)
{
if(font_size < 6) {
font_size = 6;
} else if(font_size > 28) {
font_size = 28;
}
font_size_ = font_size;
char_width_ = font::char_width(font_size_, monofont());
char_height_ = font::char_height(font_size_, monofont());
nrows_ = (height() - BorderSize*2)/char_height_;
ncols_ = (width() - BorderSize*2)/char_width_;
refresh_scrollbar();
}
void text_editor_widget::change_font_size(int amount)
{
set_font_size(font_size_ + amount);
}
void text_editor_widget::set_dim(int w, int h)
{
widget::set_dim(w - 20, h);
nrows_ = (height() - BorderSize*2)/char_height_;
ncols_ = (width() - BorderSize*2)/char_width_;
refresh_scrollbar();
}
namespace {
struct RectDraw {
rect area;
graphics::color col;
bool merge(RectDraw& o) {
if(o.col.value() != col.value()) {
return false;
}
if(o.area.y() != area.y() || o.area.x() > area.x() + area.w()) {
return false;
}
area = rect(area.x(), area.y(), area.w() + o.area.w(), area.h());
return true;
}
};
}
void text_editor_widget::handle_draw() const
{
init_char_area(font_size_);
std::vector<RectDraw> rects;
std::map<uint32_t, graphics::blit_queue> chars;
int begin_build = SDL_GetTicks();
const int xpos = x() + BorderSize;
const int ypos = y() + BorderSize;
int r = 0;
for(int n = scroll_pos_; n < text_.size() && r < nrows_; ++n, ++r) {
if(n >= begin_highlight_line_ && n <= end_highlight_line_) {
RectDraw rect_draw = { rect(xpos, ypos + r*char_height_, width(), char_height_), graphics::color(255, 255, 255, 32) };
rects.push_back(rect_draw);
}
int c = 0;
std::vector<std::pair<Loc, Loc> >::const_iterator search_itor = std::lower_bound(search_matches_.begin(), search_matches_.end(), std::pair<Loc,Loc>(Loc(n,0),Loc(n,0)));
for(int m = xscroll_pos_; m < text_[n].size(); ++m, ++c) {
if(c >= ncols_) {
++r;
c -= ncols_;
if(r == nrows_) {
break;
}
}
const char ch = password_entry_ && !clear_on_focus_ ? '*' : text_[n][m];
const int char_size = ch == '\t' ? 4 : 1;
Loc pos(n, m);
Loc begin_select = select_;
Loc end_select = cursor_;
if(end_select < begin_select) {
std::swap(begin_select, end_select);
}
graphics::color col = get_character_color(n, m);
if(pos >= begin_select && pos < end_select) {
RectDraw rect_draw = { rect(xpos + c*char_width_, ypos + r*char_height_, char_width_*char_size, char_height_), col };
if(rects.empty() || !rects.back().merge(rect_draw)) {
rects.push_back(rect_draw);
}
col = graphics::color(0,0,0,255);
} else {
for(std::vector<std::pair<Loc,Loc> >::const_iterator i = search_itor; i != search_matches_.end() && i->first <= pos; ++i) {
if(pos >= i->first && pos < i->second) {
RectDraw rect_draw = { rect(xpos + c*char_width_, ypos + r*char_height_, char_width_*char_size, char_height_), graphics::color(255,255,0,128) };
if(rects.empty() || !rects.back().merge(rect_draw)) {
rects.push_back(rect_draw);
}
col = graphics::color(0,0,0,255);
}
}
}
if(!util::c_isspace(ch) && util::c_isprint(ch)) {
const CharArea& area = get_char_area(font_size_, ch);
const int x1 = xpos + c*char_width_;
const int y1 = ypos + r*char_height_;
const int x2 = x1 + char_width_;
const int y2 = y1 + char_height_;
graphics::blit_queue& q = chars[col.rgba()];
q.repeat_last();
q.add(x1, y1, area.x1, area.y1);
q.repeat_last();
q.add(x2, y1, area.x2, area.y1);
q.add(x1, y2, area.x1, area.y2);
q.add(x2, y2, area.x2, area.y2);
}
if(cursor_.row == n && cursor_.col == m &&
(SDL_GetTicks()%500 < 350 || !has_focus_) &&
!clear_on_focus_) {
RectDraw rect_draw = { rect(xpos + c*char_width_+1, ypos + r*char_height_, 1, char_height_), graphics::color(255,255,255,255) };
rects.push_back(rect_draw);
}
if(ch == '\t') {
c += TabAdjust;
}
}
if(has_focus_ && cursor_.row == n && cursor_.col >= text_[n].size() && SDL_GetTicks()%500 < 350) {
RectDraw rect_draw = { rect(xpos + c*char_width_+1, ypos + r*char_height_, 1, char_height_), graphics::color(255,255,255,255) };
rects.push_back(rect_draw);
}
}
const int begin_draw = SDL_GetTicks();
if(bg_color_.get() != NULL) {
SDL_Rect area = {x(), y(), width(), height()};
graphics::draw_rect(area, bg_color_->as_sdl_color());
}
foreach(const RectDraw& r, rects) {
graphics::draw_rect(r.area, r.col);
}
for(std::map<uint32_t, graphics::blit_queue>::iterator i = chars.begin(); i != chars.end(); ++i) {
graphics::color(i->first).set_as_current_color();
i->second.set_texture(char_textures[font_size_]->get_id());
i->second.do_blit();
}
SDL_Color border_color = graphics::color_white();
if(!has_focus_) {
border_color.r = 128;
border_color.g = 128;
border_color.b = 128;
}
SDL_Rect border = {x()+1, y()+1, width()-2, height()-2};
if(no_border_ == false) {
graphics::draw_hollow_rect(border, border_color);
}
scrollable_widget::handle_draw();
}
bool text_editor_widget::handle_event(const SDL_Event& event, bool claimed)
{
util::scope_manager event_recorder(
[this]() { this->in_event_++; },
[this]() { this->in_event_--; }
);
if(!claimed) {
claimed = clipboard_handle_event(event);
}
claimed = scrollable_widget::handle_event(event, claimed) || claimed;
switch(event.type) {
case SDL_KEYDOWN:
return handle_key_press(event.key) || claimed;
case SDL_MOUSEBUTTONDOWN:
return handle_mouse_button_down(event.button) || claimed;
case SDL_MOUSEBUTTONUP:
return handle_mouse_button_up(event.button) || claimed;
case SDL_MOUSEMOTION:
return handle_mouse_motion(event.motion) || claimed;
case SDL_MOUSEWHEEL:
return handle_mouse_wheel(event.wheel) || claimed;
case SDL_TEXTINPUT:
return handle_text_input(event.text) || claimed;
case SDL_TEXTEDITING:
return handle_text_editing(event.edit) || claimed;
}
return false;
}
bool text_editor_widget::handle_mouse_wheel(const SDL_MouseWheelEvent& event)
{
int mx, my;
input::sdl_get_mouse_state(&mx, &my);
if(mx >= x() && mx < x() + width() && my >= y() && my < y() + height()) {
if(event.y > 0) {
if(cursor_.row > 2) {
cursor_.row -= 3;
scroll_pos_ -= 3;
if( scroll_pos_ < 0 ){
scroll_pos_ = 0;
}
cursor_.col = find_equivalent_col(cursor_.col, cursor_.row+3, cursor_.row);
on_move_cursor();
}
return true;
} else {
if(text_.size() > 2 && cursor_.row < text_.size()-3) {
cursor_.row += 3;
scroll_pos_ += 3;
if( scroll_pos_ > text_.size() ){
scroll_pos_ = text_.size();
}
cursor_.col = find_equivalent_col(cursor_.col, cursor_.row-3, cursor_.row);
on_move_cursor();
}
return true;
}
}
return false;
}
void text_editor_widget::set_focus(bool value)
{
if(has_focus_ != value && on_change_focus_) {
on_change_focus_(value);
}
has_focus_ = value;
if(clear_on_focus_) {
set_text("");
clear_on_focus_ = false;
}
if(nrows_ == 1 && value) {
cursor_ = Loc(0, text_.front().size());
select_ = Loc(0, 0);
on_move_cursor();
}
}
void text_editor_widget::set_cursor(int row, int col, bool move_selection)
{
if(row < 0) {
row = 0;
}
if(col < 0) {
col = 0;
}
if(row >= text_.size()) {
row = text_.size() - 1;
}
if(col > text_[row].size()) {
col = text_[row].size();
}
cursor_ = Loc(row, col);
if(move_selection) {
select_ = cursor_;
}
on_move_cursor();
}
int text_editor_widget::row_col_to_text_pos(int row, int col) const
{
if(col > text_[row].size()) {
col = text_[row].size();
}
int result = 0;
for(int n = 0; n != row; ++n) {
result += text_[n].size() + 1;
}
return result + col;
}
std::pair<int,int> text_editor_widget::text_pos_to_row_col(int pos) const
{
int nrow = 0;
while(pos > text_[nrow].size()+1) {
pos -= text_[nrow].size()+1;
++nrow;
}
return std::pair<int,int>(nrow, pos);
}
void text_editor_widget::set_highlight_lines(int begin, int end)
{
begin_highlight_line_ = begin;
end_highlight_line_ = end;
}
void text_editor_widget::clear_highlight_lines()
{
set_highlight_lines(-1, -1);
}
bool text_editor_widget::handle_mouse_button_down(const SDL_MouseButtonEvent& event)
{
record_op();
if(event.x >= x() && event.x < x() + width() && event.y >= y() && event.y < y() + height()) {
set_focus(true);
std::pair<int, int> pos = mouse_position_to_row_col(event.x, event.y);
if(pos.first != -1) {
cursor_.row = pos.first;
cursor_.col = pos.second;
on_move_cursor();
}
if(last_click_at_ != -1 && SDL_GetTicks() - last_click_at_ < 500) {
++consecutive_clicks_;
const int nclicks = consecutive_clicks_%3;
if(nclicks == 1) {
select_ = cursor_;
select_token(text_[cursor_.row], select_.row, cursor_.row, select_.col, cursor_.col);
} else if(nclicks == 2) {
select_ = Loc(cursor_.row, 0);
cursor_.col = text_[cursor_.row].size();
}
if(select_ != cursor_) {
//a mouse-based copy for X-style copy/paste
handle_copy(true);
}
} else {
consecutive_clicks_ = 0;
if(event.button == SDL_BUTTON_MIDDLE && clipboard_has_mouse_area()) {
std::string txt = copy_from_clipboard(true);
handle_paste(txt);
}
}
last_click_at_ = SDL_GetTicks();
is_dragging_ = true;
return claim_mouse_events();
}
if(has_focus_ != false && on_change_focus_) {
on_change_focus_(false);
}
is_dragging_ = false;
has_focus_ = false;
return false;
}
bool text_editor_widget::handle_mouse_button_up(const SDL_MouseButtonEvent& event)
{
record_op();
is_dragging_ = false;
return false;
}
bool text_editor_widget::handle_mouse_motion(const SDL_MouseMotionEvent& event)
{
int mousex, mousey;
if(is_dragging_ && has_focus_ && input::sdl_get_mouse_state(&mousex, &mousey)) {
std::pair<int, int> pos = mouse_position_to_row_col(event.x, event.y);
if(pos.first != -1) {
cursor_.row = pos.first;
cursor_.col = pos.second;
on_move_cursor(true /*don't check for shift, assume it is*/);
}
if(mousey >= y() + height() && scroll_pos_ < int(text_.size())-2) {
++scroll_pos_;
int end = scroll_pos_ + nrows_ - 1;
if(end >= text_.size()) {
end = text_.size() - 1;
}
cursor_ = Loc(end, text_[end].size());
on_move_cursor(true /*don't check for shift, assume it is*/);
refresh_scrollbar();
} else if(mousey <= y() && scroll_pos_ > 0) {
--scroll_pos_;
cursor_ = Loc(scroll_pos_, 0);
on_move_cursor(true /*don't check for shift, assume it is*/);
refresh_scrollbar();
}
}
return false;
}
bool text_editor_widget::handle_key_press(const SDL_KeyboardEvent& event)
{
if(!has_focus_) {
return false;
}
if(event.keysym.sym == SDLK_a && (event.keysym.mod&KMOD_CTRL)) {
record_op();
cursor_.row = text_.size()-1;
cursor_.col = text_[cursor_.row].size();
on_move_cursor();
select_ = Loc(0, 0);
if(select_ != cursor_) {
//a mouse-based copy for X-style copy/paste
handle_copy(true);
}
return true;
}
if(editable_ && event.keysym.sym == SDLK_z && (event.keysym.mod&KMOD_CTRL)) {
record_op();
undo();
return true;
}
if(editable_ && event.keysym.sym == SDLK_y && (event.keysym.mod&KMOD_CTRL)) {
record_op();
redo();
return true;
}
if((event.keysym.sym == SDLK_c || event.keysym.sym == SDLK_x) && (event.keysym.mod&KMOD_CTRL)) {
record_op();
handle_copy();
if(editable_ && event.keysym.sym == SDLK_x) {
save_undo_state();
delete_selection();
on_change();
}
return true;
} else if(editable_ && event.keysym.sym == SDLK_v && (event.keysym.mod&KMOD_CTRL)) {
handle_paste(copy_from_clipboard(false));
return true;
}
if(editable_ && (event.keysym.mod&KMOD_CTRL)) {
if(event.keysym.sym == SDLK_BACKSPACE) {
if(select_ == cursor_) {
//We delete the current word behind us.
truncate_col_position();
if(cursor_.col > 0) {
save_undo_state();
}
const std::string& line = text_[select_.row];
int col = select_.col;
while(col > 0 && !(util::c_isalnum(line[col-1]) || line[col-1] == '_')) {
--col;
}
while(col > 0 && (util::c_isalnum(line[col-1]) || line[col-1] == '_')) {
--col;
}
select_.col = col;
delete_selection();
record_op();
return true;
}
} else if(event.keysym.sym == SDLK_DELETE) {
if(select_ == cursor_) {
//We delete until end of line.
truncate_col_position();
if(cursor_.col < text_[select_.row].size()) {
save_undo_state();
}
select_ = Loc(select_.row, text_[select_.row].size());
delete_selection();
record_op();
return true;
}
} else {
record_op();
return false;
}
}
if(event.keysym.sym == SDLK_ESCAPE && on_escape_) {
on_escape_();
return true;
}
switch(event.keysym.sym) {
case SDLK_LEFT:
record_op();
if(cursor_ != select_ && !(SDL_GetModState()&KMOD_SHIFT)) {
//pressing left without shift while we have a selection moves us to the beginning of the selection
if(cursor_ < select_) {
select_ = cursor_;
} else {
cursor_ = select_;
}
} else {
if(cursor_.col > text_[cursor_.row].size()) {
cursor_.col = text_[cursor_.row].size();
}
--cursor_.col;
if(cursor_.col < 0) {
if(cursor_.row == 0) {
cursor_.col = 0;
} else {
--cursor_.row;
cursor_.col = text_[cursor_.row].size();
}
}
}
on_move_cursor();
break;
case SDLK_RIGHT:
record_op();
if(cursor_ != select_ && !(SDL_GetModState()&KMOD_SHIFT)) {
//pressing right without shift while we have a selection moves us to the end of the selection
if(cursor_ < select_) {
cursor_ = select_;
} else {
select_ = cursor_;
}
} else {
++cursor_.col;
if(cursor_.col > text_[cursor_.row].size()) {
if(cursor_.row == text_.size()-1) {
--cursor_.col;
} else if(cursor_.row < text_.size()-1) {
++cursor_.row;
cursor_.col = 0;
} else {
--cursor_.col;
}
}
}
on_move_cursor();
break;
case SDLK_UP:
record_op();
if(cursor_.row > 0) {
--cursor_.row;
cursor_.col = find_equivalent_col(cursor_.col, cursor_.row+1, cursor_.row);
}
on_move_cursor();
break;
case SDLK_DOWN:
record_op();
if(cursor_.row < text_.size()-1) {
++cursor_.row;
cursor_.col = find_equivalent_col(cursor_.col, cursor_.row-1, cursor_.row);
}
on_move_cursor();
break;
case SDLK_PAGEUP: {
record_op();
on_page_up();
bool move_cursor = false;
while(cursor_.row > scroll_pos_ && char_position_on_screen(cursor_.row, cursor_.col).first == -1) {
--cursor_.row;
cursor_.col = find_equivalent_col(cursor_.col, cursor_.row+1, cursor_.row);
move_cursor = true;
}
if(move_cursor) {
on_move_cursor();
}
if(!(SDL_GetModState()&KMOD_SHIFT)) {
select_ = cursor_;
}
break;
}
case SDLK_PAGEDOWN: {
record_op();
on_page_down();
bool move_cursor = false;
while(cursor_.row < scroll_pos_ && char_position_on_screen(cursor_.row, cursor_.col).first == -1) {
++cursor_.row;
cursor_.col = find_equivalent_col(cursor_.col, cursor_.row-1, cursor_.row);
move_cursor = true;
}
if(move_cursor) {
on_move_cursor();
}
if(!(SDL_GetModState()&KMOD_SHIFT)) {
select_ = cursor_;
}
break;
}
case SDLK_HOME:
record_op();
#ifdef __APPLE__
cursor_.row = 0;
#endif
if((SDL_GetModState()&KMOD_CTRL)) {
cursor_.row = 0;
}
cursor_.col = 0;
on_move_cursor();
break;
case SDLK_END:
record_op();
#ifdef __APPLE__
cursor_.row = text_.size()-1;
#endif
if((SDL_GetModState()&KMOD_CTRL)) {
cursor_.row = text_.size()-1;
}
cursor_.col = text_[cursor_.row].size();
on_move_cursor();
break;
case SDLK_DELETE:
case SDLK_BACKSPACE:
if(!editable_) {
break;
}
if(record_op("delete")) {
save_undo_state();
}
if(cursor_ == select_) {
if(event.keysym.sym == SDLK_BACKSPACE) {
//backspace is like delete but we move to the left first.
if(cursor_.col > text_[cursor_.row].size()) {
cursor_.col = text_[cursor_.row].size();
}
if(cursor_.row == 0 && cursor_.col == 0) {
break;
}
--cursor_.col;
if(cursor_.col < 0) {
--cursor_.row;
cursor_.col = text_[cursor_.row].size();
}
on_move_cursor();
}
if(cursor_.col >= text_[cursor_.row].size()) {
if(text_.size() > cursor_.row+1) {
cursor_.col = text_[cursor_.row].size();
text_[cursor_.row] += text_[cursor_.row+1];
text_.erase(text_.begin() + cursor_.row + 1);
}
} else {
text_[cursor_.row].erase(text_[cursor_.row].begin() + cursor_.col);
}
} else {
delete_selection();
}
refresh_scrollbar();
on_change();
break;
case SDLK_RETURN: {
if(!editable_) {
break;
}
if(record_op("enter")) {
save_undo_state();
}
if(nrows_ == 1) {
if(on_enter_) {
on_enter_();
}
break;
}
if(on_begin_enter_) {
if(!on_begin_enter_()) {
break;
}
}
delete_selection();
truncate_col_position();
std::string new_line(text_[cursor_.row].begin() + cursor_.col, text_[cursor_.row].end());
text_[cursor_.row].erase(text_[cursor_.row].begin() + cursor_.col, text_[cursor_.row].end());
std::string::iterator indent = text_[cursor_.row].begin();
while(indent != text_[cursor_.row].end() && strchr(" \t", *indent)) {
++indent;
}
new_line.insert(new_line.begin(), text_[cursor_.row].begin(), indent);
cursor_.col = indent - text_[cursor_.row].begin();
text_.insert(text_.begin() + cursor_.row + 1, new_line);
++cursor_.row;
select_ = cursor_;
refresh_scrollbar();
on_change();
on_move_cursor();
if(on_enter_) {
on_enter_();
}
break;
}
case SDLK_TAB: {
if(on_tab_) {
on_tab_();
} else if(nrows_ == 1) {
return false;
} else if(editable_) {
handle_text_input_internal("\t");
}
}
default: return true;
}
return true;
}
bool text_editor_widget::handle_text_input(const SDL_TextInputEvent& event)
{
return handle_text_input_internal(event.text);
}
bool text_editor_widget::handle_text_input_internal(const char* text)
{
if(!has_focus_ || !editable_) {
return false;
}
if(record_op("chars")) {
save_undo_state();
}
delete_selection();
if(cursor_.col > text_[cursor_.row].size()) {
cursor_.col = text_[cursor_.row].size();
}
for(const char* c = text; *c != 0; ++c) {
text_[cursor_.row].insert(text_[cursor_.row].begin() + cursor_.col, *c);
++cursor_.col;
}
select_ = cursor_;
if(nrows_ == 1) {
on_move_cursor();
}
refresh_scrollbar();
on_change();
return true;
}
bool text_editor_widget::handle_text_editing(const SDL_TextEditingEvent& event)
{
if(!has_focus_) {
return false;
}
return false;
}
void text_editor_widget::handle_paste(std::string txt)
{
if(!editable_) {
return;
}
record_op();
save_undo_state();
delete_selection();
txt.erase(std::remove(txt.begin(), txt.end(), '\r'), txt.end());
std::vector<std::string> lines = util::split(txt, '\n', 0 /*don't remove empties or strip spaces*/);
truncate_col_position();
if(lines.size() == 1) {
text_[cursor_.row].insert(text_[cursor_.row].begin() + cursor_.col, lines.front().begin(), lines.front().end());
cursor_.col += lines.front().size();
refresh_scrollbar();
select_ = cursor_;
} else if(lines.size() >= 2) {
text_.insert(text_.begin() + cursor_.row + 1, lines.back() + std::string(text_[cursor_.row].begin() + cursor_.col, text_[cursor_.row].end()));
text_[cursor_.row] = std::string(text_[cursor_.row].begin(), text_[cursor_.row].begin() + cursor_.col) + lines.front();
text_.insert(text_.begin() + cursor_.row + 1, lines.begin()+1, lines.end()-1);
cursor_ = select_ = Loc(cursor_.row + lines.size() - 1, lines.back().size());
}
on_change();
}
void text_editor_widget::handle_copy(bool mouse_based)
{
std::cerr << "HANDLE COPY...\n";
if(mouse_based && !clipboard_has_mouse_area()) {
return;
}
Loc begin = cursor_;
Loc end = select_;
if(begin.col > text_[begin.row].size()) {
begin.col = text_[begin.row].size();
}
if(end.col > text_[end.row].size()) {
end.col = text_[end.row].size();
}
if(end < begin) {
std::swap(begin, end);
}
std::string str;
if(begin.row == end.row) {
str = std::string(text_[begin.row].begin() + begin.col, text_[begin.row].begin() + end.col);
} else {
str = std::string(text_[begin.row].begin() + begin.col, text_[begin.row].end());
while(++begin.row < end.row) {
str += "\n" + text_[begin.row];
}
str += "\n" + std::string(text_[end.row].begin(), text_[end.row].begin() + end.col);
}
std::cerr << "COPY TO CLIPBOARD: " << str << " " << mouse_based << "\n";
copy_to_clipboard(str, mouse_based);
}
void text_editor_widget::delete_selection()
{
if(cursor_.col == select_.col && cursor_.row == select_.row) {
return;
}
if(cursor_.col > text_[cursor_.row].size()) {
cursor_.col = text_[cursor_.row].size();
}
if(select_.col > text_[select_.row].size()) {
select_.col = text_[select_.row].size();
}
if(select_ < cursor_) {
std::swap(cursor_, select_);
}
std::string& cursor_line = text_[cursor_.row];
std::string& select_line = text_[select_.row];
if(cursor_.row == select_.row) {
cursor_line.erase(cursor_line.begin() + cursor_.col, cursor_line.begin() + select_.col);
} else {
cursor_line = std::string(cursor_line.begin(), cursor_line.begin() + cursor_.col) + std::string(select_line.begin() + select_.col, select_line.end());
text_.erase(text_.begin() + cursor_.row + 1, text_.begin() + select_.row + 1);
}
select_ = cursor_;
}
graphics::color text_editor_widget::get_character_color(int row, int col) const
{
return text_color_;
}
std::pair<int, int> text_editor_widget::mouse_position_to_row_col(int xpos, int ypos) const
{
const int xloc = x() + BorderSize;
const int yloc = y() + BorderSize;
int r = 0;
for(int n = scroll_pos_; n < text_.size() && r < nrows_; ++n, ++r) {
int c = 0;
bool matches_row = ypos >= yloc + r*char_height_ && ypos < yloc + (r+1)*char_height_;
for(size_t m = xscroll_pos_; m < text_[n].size(); ++m, ++c) {
if(c >= ncols_) {
if(matches_row) {
break;
}
++r;
c -= ncols_;
matches_row = ypos >= yloc + r*char_height_ && ypos < yloc + (r+1)*char_height_;
if(r == nrows_) {
break;
}
}
const int char_size = text_[n][m] == '\t' ? TabWidth : 1;
if(matches_row && xpos >= xloc + c*char_width_ && xpos < xloc + (c+char_size)*char_width_) {
return std::pair<int, int>(n, m);
}
if(text_[n][m] == '\t') {
c += TabAdjust;
continue;
}
}
if(matches_row) {
return std::pair<int, int>(n, text_[n].size());
}
}
return std::pair<int, int>(-1,-1);
}
std::pair<int, int> text_editor_widget::char_position_on_screen(int row, int col) const
{
if(row < scroll_pos_) {
return std::pair<int, int>(-1, -1);
}
int r = 0;
for(size_t n = scroll_pos_; n < text_.size() && r < nrows_; ++n, ++r) {
int c = 0;
size_t m;
for(m = 0; m < text_[n].size(); ++m, ++c) {
if(c >= ncols_) {
++r;
c -= ncols_;
if(r == nrows_) {
break;
}
}
if(row == n && col == m) {
return std::pair<int, int>(BorderSize + r*char_height_, BorderSize + c*char_width_);
}
if(text_[n][m] == '\t') {
c += TabAdjust;
continue;
}
}
if(row == n && m == text_[n].size()) {
return std::pair<int, int>(BorderSize + r*char_height_, BorderSize + c*char_width_);
}
}
return std::pair<int, int>(-1,-1);
}
void text_editor_widget::on_page_up()
{
int leap = nrows_ - 1;
while(scroll_pos_ > 0 && leap > 0) {
--scroll_pos_;
--leap;
for(int n = int(text_[scroll_pos_].size()) - ncols_; n > 0; n -= ncols_) {
--leap;
}
}
refresh_scrollbar();
}
void text_editor_widget::on_page_down()
{
int leap = nrows_ - 1;
while(scroll_pos_ < int(text_.size())-2 && leap > 0) {
++scroll_pos_;
--leap;
for(int n = int(text_[scroll_pos_].size()) - ncols_; n > 0; n -= ncols_) {
--leap;
}
}
refresh_scrollbar();
}
void text_editor_widget::on_move_cursor(bool auto_shift)
{
const int start_pos = scroll_pos_;
if(cursor_.row < scroll_pos_) {
scroll_pos_ = cursor_.row;
} else {
while(scroll_pos_ < cursor_.row && char_position_on_screen(cursor_.row, cursor_.col).first == -1) {
++scroll_pos_;
}
}
if(nrows_ == 1) {
if(cursor_.col < xscroll_pos_) {
xscroll_pos_ = std::max<int>(0, cursor_.col - 4);
} else if(cursor_.col >= xscroll_pos_ + ncols_) {
xscroll_pos_ = cursor_.col + 4 - ncols_;
}
}
if(start_pos != scroll_pos_) {
refresh_scrollbar();
}
if(!auto_shift && !(SDL_GetModState()&KMOD_SHIFT)) {
select_ = cursor_;
}
scrollable_widget::set_yscroll(scroll_pos_*char_height_);
if(select_ != cursor_) {
//a mouse-based copy for X-style copy/paste
handle_copy(true);
}
if(on_move_cursor_) {
on_move_cursor_();
}
}
int text_editor_widget::find_equivalent_col(int old_col, int old_row, int new_row) const
{
int actual_pos = old_col + std::count(text_[old_row].begin(), text_[old_row].end(), '\t')*TabAdjust;
for(int n = 0; n < actual_pos; ++n) {
if(n < text_[new_row].size() && text_[new_row][n] == '\t') {
actual_pos -= TabAdjust;
}
}
return actual_pos;
}
void text_editor_widget::on_set_yscroll(int old_pos, int new_pos)
{
scroll_pos_ = new_pos/char_height_;
}
void text_editor_widget::refresh_scrollbar()
{
int total_rows = 0;
//See if it can all fit without a scrollbar.
for(int n = 0; n != text_.size(); ++n) {
const int rows = 1 + text_[n].size()/ncols_;
total_rows += rows;
if(total_rows > nrows_) {
break;
}
}
if(total_rows <= nrows_ || nrows_ == 1) {
//no scrollbar needed.
set_virtual_height(height());
update_scrollbar();
return;
}
set_virtual_height(text_.size()*char_height_ + height() - char_height_);
set_scroll_step(char_height_);
set_arrow_scroll_step(char_height_);
set_yscroll(scroll_pos_*char_height_);
update_scrollbar();
}
void text_editor_widget::select_token(const std::string& row, int& begin_row, int& end_row, int& begin_col, int& end_col)
{
if(util::c_isdigit(row[begin_col]) || row[begin_col] == '.' && begin_col+1 < row.size() && util::c_isdigit(row[begin_col+1])) {
while(begin_col >= 0 && (util::c_isdigit(row[begin_col]) || row[begin_col] == '.')) {
--begin_col;
}
if(begin_col < 0 || row[begin_col] != '-') {
++begin_col;
}
while(end_col < row.size() && (util::c_isdigit(row[end_col]) || row[end_col] == '.')) {
++end_col;
}
} else if(util::c_isalnum(row[begin_col]) || row[begin_col] == '_') {
while(begin_col >= 0 && (util::c_isalnum(row[begin_col]) || row[begin_col] == '_')) {
--begin_col;
}
++begin_col;
while(end_col < row.size() && (util::c_isalnum(row[end_col]) || row[end_col] == '_')) {
++end_col;
}
} else if(end_col < row.size()) {
++end_col;
}
}
text_editor_widget_ptr text_editor_widget::clone() const
{
text_editor_widget_ptr result = new text_editor_widget(*this);
result->last_op_type_ = NULL;
return result;
}
void text_editor_widget::restore(const text_editor_widget* state)
{
*this = *state;
}
void text_editor_widget::save_undo_state()
{
redo_.clear();
undo_.push_back(text_editor_widget_ptr(clone()));
}
bool text_editor_widget::record_op(const char* type)
{
if(type == NULL || type != last_op_type_) {
last_op_type_ = type;
return true;
} else {
return false;
}
}
void text_editor_widget::undo()
{
if(undo_.empty()) {
return;
}
std::vector<text_editor_widget_ptr> redo_state = redo_;
save_undo_state();
redo_state.push_back(undo_.back());
undo_.pop_back();
//Save the state before restoring it so it doesn't get cleaned up
//while we're in the middle of the restore call.
text_editor_widget_ptr state = undo_.back();
restore(state.get());
redo_ = redo_state;
on_change();
}
void text_editor_widget::redo()
{
if(redo_.empty()) {
return;
}
std::vector<text_editor_widget_ptr> redo_state = redo_;
redo_state.pop_back();
//Save the state before restoring it so it doesn't get cleaned up
//while we're in the middle of the restore call.
text_editor_widget_ptr state = redo_.back();
restore(state.get());
redo_ = redo_state;
on_change();
}
void text_editor_widget::truncate_col_position()
{
if(cursor_.col > text_[cursor_.row].size()) {
cursor_.col = text_[cursor_.row].size();
}
if(select_.col > text_[select_.row].size()) {
select_.col = text_[select_.row].size();
}
}
void text_editor_widget::set_search(const std::string& term)
{
search_ = term;
calculate_search_matches();
if(search_matches_.empty()) {
return;
}
std::vector<std::pair<Loc, Loc> >::const_iterator search_itor =
std::lower_bound(search_matches_.begin(), search_matches_.end(),
std::pair<Loc,Loc>(cursor_, cursor_));
if(search_itor == search_matches_.end()) {
search_itor = search_matches_.begin();
}
select_ = cursor_ = search_itor->first;
on_move_cursor();
}
void text_editor_widget::next_search_match()
{
if(search_matches_.empty()) {
return;
}
cursor_.col++;
select_ = cursor_;
set_search(search_);
}
void text_editor_widget::calculate_search_matches()
{
search_matches_.clear();
if(search_.empty()) {
return;
}
try {
boost::regex re(search_, boost::regex::perl|boost::regex::icase);
for(int n = 0; n != text_.size(); ++n) {
boost::cmatch match;
const char* ptr = text_[n].c_str();
while(boost::regex_search(ptr, match, re)) {
const int base = ptr - text_[n].c_str();
const Loc begin(n, base + match.position());
const Loc end(n, base + match.position() + match.length());
search_matches_.push_back(std::pair<Loc,Loc>(begin,end));
const int advance = match.position() + match.length();
if(advance == 0) {
break;
}
ptr += advance;
}
}
} catch(boost::regex_error&) {
}
}
void text_editor_widget::replace(const std::string& replace_with)
{
record_op();
save_undo_state();
//we have to get the end itor here because some compilers don't
//support comparing a const and non-const reverse iterator
const std::vector<std::pair<Loc, Loc> >::const_reverse_iterator end_itor = search_matches_.rend();
for(std::vector<std::pair<Loc, Loc> >::const_reverse_iterator i = search_matches_.rbegin(); i != end_itor; ++i) {
const Loc& begin = i->first;
const Loc& end = i->second;
if(begin.row != end.row) {
continue;
}
text_[begin.row].erase(text_[begin.row].begin() + begin.col, text_[begin.row].begin() + end.col);
text_[begin.row].insert(text_[begin.row].begin() + begin.col, replace_with.begin(), replace_with.end());
}
on_change();
}
void text_editor_widget::on_change()
{
if(on_change_) {
on_change_();
}
if(on_user_change_ && in_event_) {
on_user_change_();
}
calculate_search_matches();
}
BEGIN_DEFINE_CALLABLE(text_editor_widget, widget)
DEFINE_FIELD(text, "string")
return variant(obj.text());
DEFINE_SET_FIELD
obj.set_text(value.as_string());
DEFINE_FIELD(begin_enter, "bool")
return variant::from_bool(obj.begin_enter_return_);
DEFINE_SET_FIELD
obj.begin_enter_return_ = value.as_bool();
DEFINE_FIELD(color, "string")
return variant("");
DEFINE_SET_FIELD
obj.text_color_ = graphics::color(value);
DEFINE_FIELD(has_focus, "bool")
return variant::from_bool(obj.has_focus_);
DEFINE_SET_FIELD
obj.has_focus_ = value.as_bool();
if(obj.clear_on_focus_ && obj.has_focus_) {
obj.set_text("");
obj.clear_on_focus_ = false;
}
END_DEFINE_CALLABLE(text_editor_widget)
void text_editor_widget::change_delegate()
{
using namespace game_logic;
if(get_environment()) {
map_formula_callable_ptr callable = map_formula_callable_ptr(new map_formula_callable(get_environment()));
callable->add("text", variant(text()));
variant value = ffl_on_change_->execute(*callable);
get_environment()->execute_command(value);
} else {
std::cerr << "text_editor_widget::change_delegate() called without environment!" << std::endl;
}
}
void text_editor_widget::move_cursor_delegate()
{
if(get_environment()) {
variant value = ffl_on_move_cursor_->execute(*get_environment());
get_environment()->execute_command(value);
} else {
std::cerr << "text_editor_widget::move_cursor_delegate() called without environment!" << std::endl;
}
}
void text_editor_widget::enter_delegate()
{
using namespace game_logic;
if(get_environment()) {
map_formula_callable_ptr callable = map_formula_callable_ptr(new map_formula_callable(get_environment()));
callable->add("text", variant(text()));
variant value = ffl_on_enter_->execute(*callable);
get_environment()->execute_command(value);
} else {
std::cerr << "text_editor_widget::enter_delegate() called without environment!" << std::endl;
}
}
void text_editor_widget::escape_delegate()
{
using namespace game_logic;
if(get_environment()) {
map_formula_callable_ptr callable = map_formula_callable_ptr(new map_formula_callable(get_environment()));
callable->add("text", variant(text()));
variant value = ffl_on_escape_->execute(*callable);
get_environment()->execute_command(value);
} else {
std::cerr << "text_editor_widget::escape_delegate() called without environment!" << std::endl;
}
}
void text_editor_widget::tab_delegate()
{
using namespace game_logic;
if(get_environment()) {
map_formula_callable_ptr callable = map_formula_callable_ptr(new map_formula_callable(get_environment()));
callable->add("text", variant(text()));
variant value = ffl_on_tab_->execute(*callable);
get_environment()->execute_command(value);
} else {
std::cerr << "text_editor_widget::tab_delegate() called without environment!" << std::endl;
}
}
bool text_editor_widget::begin_enter_delegate()
{
if(get_environment()) {
variant value = ffl_on_begin_enter_->execute(*get_environment());
get_environment()->execute_command(value);
} else {
std::cerr << "text_editor_widget::begin_enter_delegate() called without environment!" << std::endl;
}
// XXX Need some way of doing the return value here.
return begin_enter_return_;
}
void text_editor_widget::change_focus_delgate(bool new_focus_value)
{
using namespace game_logic;
if(get_environment()) {
map_formula_callable_ptr callable = map_formula_callable_ptr(new map_formula_callable(get_environment()));
callable->add("focus", variant::from_bool(new_focus_value));
callable->add("text", variant(text()));
variant value = ffl_on_change_focus_->execute(*callable);
get_environment()->execute_command(value);
} else {
std::cerr << "text_editor_widget::tab_delegate() called without environment!" << std::endl;
}
}
}
#include "code_editor_widget.hpp"
#include "dialog.hpp"
#include "filesystem.hpp"
namespace {
void on_change_search(const gui::text_editor_widget_ptr search_entry, gui::text_editor_widget_ptr editor)
{
editor->set_search(search_entry->text());
}
}
UTILITY(textedit)
{
using namespace gui;
if(args.size() != 1) {
std::cerr << "textedit usage: <filename>\n";
return;
}
std::string file = args[0];
std::string contents = sys::read_file(file);
if(contents.empty()) {
std::cerr << "Could not read file (" << file << ")\n";
return;
}
text_editor_widget_ptr entry = new text_editor_widget(120);
text_editor_widget_ptr editor = new code_editor_widget(600, 400);
editor->set_text(contents);
entry->set_on_change_handler(boost::bind(on_change_search, entry, editor));
entry->set_on_enter_handler(boost::bind(&text_editor_widget::next_search_match, editor));
dialog d(0, 0, graphics::screen_width(), graphics::screen_height());
d.add_widget(widget_ptr(entry), 10, 10);
d.add_widget(widget_ptr(editor), 10, 30);
d.show_modal();
}
UNIT_TEST(test_regex)
{
std::string searching = "abcdefg";
boost::regex re("cde");
boost::cmatch matches;
const char* ptr = searching.c_str();
if(boost::regex_search(ptr, matches, re)) {
CHECK_EQ(matches.size(), 1);
CHECK_EQ(matches.position(), 2);
CHECK_EQ(matches.length(), 3);
}
}
|
import pso
import numpy as np
def golinski(x):
return 0.7854*x[0]*x[1]**2*(3.3333*x[2]**2 + 14.9334*x[2] - 43.0934) - 1.508*x[0]*(x[5]**2 + x[6]**2) + 7.4777*(x[5]**3 + x[6]**3) + 0.7854*(x[3]*x[5]**2 + x[4]*x[6]**2)
bounds = [[2.6, 3.6], [0.7, 0.8], [17, 28], [7.3, 8.3], [7.3, 8.3], [2.9, 3.9], [5, 5.5]]
optimizer = pso.Optimizer(golinski, bounds)
optimizer.addBounds(bounds)
optimizer.addConstraint(lambda x: 27.0/(x[0] * x[1]**2 * x[2]) <= 1)
optimizer.addConstraint(lambda x: 397.5/(x[0] * x[1]**2 * x[2]**2) <= 1)
optimizer.addConstraint(lambda x: 1.93*x[3]**3/(x[1] * x[2] * x[5]**4) <= 1)
optimizer.addConstraint(lambda x: 1.93*x[4]**3/(x[1] * x[2] * x[6]**4) <= 1)
optimizer.addConstraint(lambda x: np.sqrt((745*x[3]/x[1]/x[2])**2 + 16.9*1e6)/(110*x[5]**3) <= 1)
optimizer.addConstraint(lambda x: np.sqrt((745*x[4]/x[1]/x[2])**2 + 157.5*1e6)/(85*x[6]**3) <= 1)
optimizer.addConstraint(lambda x: x[1]*x[2]/40 <= 1)
optimizer.addConstraint(lambda x: 5*x[1]/x[0] <= 1)
optimizer.addConstraint(lambda x: x[0]/12/x[1] <= 1)
optimizer.addConstraint(lambda x: (1.5*x[5] + 1.9)/x[3] <= 1)
optimizer.addConstraint(lambda x: (1.1*x[6] + 1.9)/x[4] <= 1)
optimizer.stepUntil(1e-6, logging=True)
print optimizer.populationSize
print optimizer.globalBestFitness
print optimizer.globalBestValue
print len(optimizer.globalBestHistory) |
# First setup of the model
remove(list=ls()) # Remove all variables from memory
on=1;off=0;
require(fields) # image.plot originates here
setwd('/Simulations/OpenCL/clGray-Scott/clGray-Scott')
# Parameter settings
Movie = off # Whether a movie is made
Wait = off # If on, the program waits after every frame
# Figure window size, should match movie dimensions
WinWidth = 960 # Width of the figure window
WinHeight = 720 # Height of the figure window
DPI = 144 # Figure scale
# Linking to file that contains the data
FileName = "Output.dat"
cat(paste("Data file date :",file.info(FileName )$mtime ),"\n")
FID = file(FileName, "rb")
# Reading the settings from the file
NX = readBin(FID, integer(), n = 1, endian = "little");
NY = readBin(FID, integer(), n = 1, endian = "little");
Length=readBin(FID, numeric(), size=4, n = 1, endian = "little")
NumFrames = readBin(FID, integer(), n = 1, endian = "little");
EndTime=readBin(FID, integer(), n = 1, endian = "little")
# Color palette for the images, obtained from a real-wold picture
#Bush.palette= colorRampPalette(c("#cd9557", "#f8e29f", "#82A045", "#628239", "#506736","#385233"))
# --- Opening a window --------------------------------------------------------
if (Movie==off)
quartz(width=WinWidth/DPI, height=WinHeight/DPI, dpi=DPI)
# --- The loop the plots each frame
for (jj in 0:(NumFrames-1)){ # Here the time loop starts
if (Movie==on) # plotting into a jpeg image
tiff(filename = sprintf("Images/Rplot%03d.tiff",jj),
width = WinWidth, height = WinHeight,
units = "px", pointsize = 24,
compression="none",
bg = "white", res = NA,
type = "quartz")
# Loading the data for a single frame
Data_W = matrix(nrow=NY, ncol=NX, readBin(FID, numeric(), size=4, n = NX*NY, endian = "little"));
Data_N = matrix(nrow=NY, ncol=NX, readBin(FID, numeric(), size=4, n = NX*NY, endian = "little"));
par(mar=c(2, 3, 2, 5) + 0.1) # margin settings
image.plot(pmin(Data_N[,2:(NX-1)],0.4), zlim=c(0,0.4), xaxt="n", yaxt="n",
asp=1, bty="n", useRaster=TRUE,
legend.shrink = 0.99, legend.width = 2,
legend.args=list(text=expression(Concentration),
cex=0.8, line=0.5))
title("Substance V")
mtext(text=sprintf("Time : %1.0f of %1.0f timesteps", (jj+1)/NumFrames*EndTime, EndTime),
side=1, adj=0.5, line=0.5, cex=1)
# Adding the scale bar and text
#axis(side=1, at=c(0.8,1), line=0.5, labels = c(0,trunc(Length/5+0.5)),
# cex.axis=0.8, tck = -0.015, mgp=c(3, .25, 0))
#mtext(text="Scale (m)", side=1, adj=1.2, line=0.5, cex=0.8)
if (Movie==on) dev.off() else {
dev.flush() # this prevents flickering
dev.hold() # screen is frozen again
}
if (Wait==on){
cat ("Press [enter] to continue, [q] to quit")
line <- readline()
if (line=='q'){ stop() }
}
}
close(FID)
# --- Here, a movie is made by parameterezing ffmpeg --------------------------
if (Movie==on) {
InFiles=paste(getwd(),"/Images/Rplot%03d.tiff", sep="")
OutFile="Gray-Scott.mp4"
print(paste(" building :", OutFile))
CmdLine=sprintf("ffmpeg -y -r 30 -i %s -c:v libx264 -pix_fmt yuv420p -b:v 2000k %s", InFiles, OutFile)
cmd = system(CmdLine)
# if (cmd==0) try(system(paste("open ", paste(getwd(),"Mussels_PDE.mp4"))))
}
system('say All ready')
|
(*
Demonstration of type elaboration so far.
Joshua Chen, Alexander Krauss
Presented at CICM '19, Prague.
*)
theory Cicm2019_Demo
imports Soft_Types.Soft_Types_HOL
begin
typedecl i
type_synonym U = "i type"
abbreviation "U \<equiv> Any :: U type" \<comment>\<open>reflection of the rigid type into the soft type system\<close>
text \<open>Polymorphic lists:\<close>
axiomatization list :: "U \<Rightarrow> U"
and cons
and nil
and append
\<comment>\<open>Declare the (soft!) types of constructors:\<close>
where 1 [type implicit: 1]: "nil : (A : U) \<Rightarrow> list A"
and 2 [type implicit: 1]: "cons : (A : U) \<Rightarrow> A \<Rightarrow> list A \<Rightarrow> list A"
and 3 [type implicit: 1]: "append : (A : U) \<Rightarrow> list A \<Rightarrow> list A \<Rightarrow> list A"
declare [[auto_elaborate]]
term "cons x xs"
term "append xs ys"
term "nil"
lemma "append xs nil = xs"
oops
declare [[auto_elaborate=false]]
text \<open>An example from dependent type theory that we can't yet handle!\<close>
axiomatization Id
and refl
and J
where Id_type [type implicit: 1]: "Id : (A : U) \<Rightarrow> A \<Rightarrow> A \<Rightarrow> U"
and refl_type [type implicit: 1]: "refl: (A : U) \<Rightarrow> (x: A) \<Rightarrow> Id A x x"
and J_type [type implicit: 2]: "J: (A : U) \<Rightarrow> (C: (x: A) \<Rightarrow> (y: A) \<Rightarrow>
(p: Id A x y) \<Rightarrow> U) \<Rightarrow> ((x: A) \<Rightarrow> C x x (refl A x)) \<Rightarrow> (a: A) \<Rightarrow> (b: A)
\<Rightarrow> (p: Id A a b) \<Rightarrow> C a b p"
text \<open>The proof term for reflexivity of equality:\<close>
declare [[auto_elaborate]]
term J
(* term "J (\<lambda>x. refl x) a b p" *)
end
|
(*
* Copyright 2014, General Dynamics C4 Systems
*
* This software may be distributed and modified according to the terms of
* the GNU General Public License version 2. Note that NO WARRANTY is provided.
* See "LICENSE_GPLv2.txt" for details.
*
* @TAG(GD_GPL)
*)
(*
Results about CNode Invocations, particularly the
recursive revoke and delete operations.
*)
theory CNodeInv_AI
imports "./$L4V_ARCH/ArchIpc_AI"
begin
context begin interpretation Arch .
requalify_facts set_cap_arch
end
declare set_cap_arch[wp]
primrec
valid_cnode_inv :: "cnode_invocation \<Rightarrow> 'z::state_ext state \<Rightarrow> bool"
where
"valid_cnode_inv (InsertCall cap ptr ptr') =
(valid_cap cap and real_cte_at ptr and real_cte_at ptr' and
(\<lambda>s. cte_wp_at (is_derived (cdt s) ptr cap) ptr s) and
cte_wp_at (\<lambda>c. c = NullCap) ptr' and
ex_cte_cap_wp_to is_cnode_cap ptr' and K (ptr \<noteq> ptr') and
(\<lambda>s. \<forall>r\<in>obj_refs cap. \<forall>p'.
ptr' \<noteq> p' \<and> cte_wp_at (\<lambda>cap'. r \<in> obj_refs cap') p' s \<longrightarrow>
cte_wp_at (Not \<circ> is_zombie) p' s \<and> \<not> is_zombie cap))"
| "valid_cnode_inv (MoveCall cap ptr ptr') =
(valid_cap cap and cte_wp_at (op = cap.NullCap) ptr' and
cte_wp_at (op \<noteq> NullCap) ptr and cte_wp_at (weak_derived cap) ptr and
cte_wp_at (\<lambda>c. is_untyped_cap c \<longrightarrow> c = cap) ptr and
ex_cte_cap_wp_to is_cnode_cap ptr' and
real_cte_at ptr and real_cte_at ptr')"
| "valid_cnode_inv (RevokeCall ptr) = cte_at ptr"
| "valid_cnode_inv (DeleteCall ptr) = real_cte_at ptr"
| "valid_cnode_inv (RotateCall s_cap p_cap src pivot dest) =
(valid_cap s_cap and valid_cap p_cap and
real_cte_at src and real_cte_at dest and real_cte_at pivot and
cte_wp_at (weak_derived s_cap) src and
cte_wp_at (\<lambda>c. is_untyped_cap c \<longrightarrow> c = s_cap) src and
cte_wp_at (op \<noteq> NullCap) src and
cte_wp_at (weak_derived p_cap) pivot and
cte_wp_at (\<lambda>c. is_untyped_cap c \<longrightarrow> c = p_cap) pivot and
cte_wp_at (op \<noteq> NullCap) pivot and K (src \<noteq> pivot \<and> pivot \<noteq> dest) and
(\<lambda>s. src \<noteq> dest \<longrightarrow> cte_wp_at (\<lambda>c. c = NullCap) dest s) and
ex_cte_cap_wp_to is_cnode_cap pivot and ex_cte_cap_wp_to is_cnode_cap dest)"
| "valid_cnode_inv (SaveCall ptr) =
(ex_cte_cap_wp_to is_cnode_cap ptr and
cte_wp_at (\<lambda>c. c = NullCap) ptr and real_cte_at ptr)"
| "valid_cnode_inv (CancelBadgedSendsCall cap) =
(valid_cap cap and K (has_cancel_send_rights cap))"
primrec
valid_rec_del_call :: "rec_del_call \<Rightarrow> 'z::state_ext state \<Rightarrow> bool"
where
"valid_rec_del_call (CTEDeleteCall slot _) = \<top>"
| "valid_rec_del_call (FinaliseSlotCall slot _) = \<top>"
| "valid_rec_del_call (ReduceZombieCall cap slot _) =
(cte_wp_at (op = cap) slot and is_final_cap' cap
and K (is_zombie cap))"
locale CNodeInv_AI =
fixes state_ext_t :: "'state_ext::state_ext itself"
assumes derive_cap_objrefs:
"\<And>P cap slot.
\<lbrace>\<lambda>s::'state_ext state. P (obj_refs cap)\<rbrace>
derive_cap slot cap
\<lbrace>\<lambda>rv s. rv \<noteq> NullCap \<longrightarrow> P (obj_refs rv)\<rbrace>,-"
assumes derive_cap_zobjrefs:
"\<And>P cap slot.
\<lbrace>\<lambda>s::'state_ext state. P (zobj_refs cap)\<rbrace>
derive_cap slot cap
\<lbrace>\<lambda>rv s. rv \<noteq> NullCap \<longrightarrow> P (zobj_refs rv)\<rbrace>,-"
assumes update_cap_objrefs:
"\<And>P dt cap. \<lbrakk> update_cap_data P dt cap \<noteq> NullCap \<rbrakk> \<Longrightarrow>
obj_refs (update_cap_data P dt cap) = obj_refs cap"
assumes update_cap_zobjrefs:
"\<And>P dt cap. \<lbrakk> update_cap_data P dt cap \<noteq> cap.NullCap \<rbrakk> \<Longrightarrow>
zobj_refs (update_cap_data P dt cap) = zobj_refs cap"
assumes copy_mask [simp]:
"\<And>R c. copy_of (mask_cap R c) = copy_of c"
assumes update_cap_data_mask_Null [simp]:
"\<And>P x m c. (update_cap_data P x (mask_cap m c) = NullCap) = (update_cap_data P x c = NullCap)"
assumes cap_master_update_cap_data:
"\<And>P x c. \<lbrakk> update_cap_data P x c \<noteq> NullCap \<rbrakk> \<Longrightarrow>
cap_master_cap (update_cap_data P x c) = cap_master_cap c"
assumes same_object_as_cap_master:
"\<And>cap cap'. same_object_as cap cap' \<Longrightarrow> cap_master_cap cap = cap_master_cap cap'"
assumes cap_asid_update_cap_data:
"\<And>P x c. update_cap_data P x c \<noteq> NullCap \<Longrightarrow> cap_asid (update_cap_data P x c) = cap_asid c"
assumes cap_vptr_update_cap_data:
"\<And>P x c. update_cap_data P x c \<noteq> NullCap \<Longrightarrow> cap_vptr (update_cap_data P x c) = cap_vptr c"
assumes cap_asid_base_update_cap_data:
"\<And>P x c. update_cap_data P x c \<noteq> NullCap \<Longrightarrow>
cap_asid_base (update_cap_data P x c) = cap_asid_base c"
assumes same_object_as_update_cap_data:
"\<And>P x c c'. \<lbrakk> update_cap_data P x c \<noteq> NullCap; same_object_as c' c \<rbrakk> \<Longrightarrow>
same_object_as c' (update_cap_data P x c)"
assumes weak_derived_update_cap_data:
"\<And>P x c c'. \<lbrakk>update_cap_data P x c \<noteq> NullCap; weak_derived c c'\<rbrakk> \<Longrightarrow>
weak_derived (update_cap_data P x c) c'"
assumes cap_badge_update_cap_data:
"\<And>x c bdg. update_cap_data False x c \<noteq> NullCap \<and> (bdg, cap_badge c) \<in> capBadge_ordering False
\<longrightarrow> (bdg, cap_badge (update_cap_data False x c)) \<in> capBadge_ordering False"
assumes cap_vptr_rights_update[simp]:
"\<And>f c. cap_vptr (cap_rights_update f c) = cap_vptr c"
assumes cap_vptr_mask[simp]:
"\<And>m c. cap_vptr (mask_cap m c) = cap_vptr c"
assumes cap_asid_base_rights [simp]:
"\<And>R c. cap_asid_base (cap_rights_update R c) = cap_asid_base c"
assumes cap_asid_base_mask[simp]:
"\<And>m c. cap_asid_base (mask_cap m c) = cap_asid_base c"
assumes weak_derived_mask:
"\<And>c c' m. \<lbrakk> weak_derived c c'; cap_aligned c \<rbrakk> \<Longrightarrow> weak_derived (mask_cap m c) c'"
assumes vs_cap_ref_update_cap_data[simp]:
"\<And>P d cap. vs_cap_ref (update_cap_data P d cap) = vs_cap_ref cap"
assumes weak_derived_cap_is_device:
"\<And>c c'. \<lbrakk>weak_derived c' c\<rbrakk> \<Longrightarrow> cap_is_device c = cap_is_device c'"
assumes in_preempt[simp,intro]:
"\<And>rv s' (s::'state_ext state).
(Inr rv, s') \<in> fst (preemption_point s) \<Longrightarrow>
(\<exists>f es. s' = s \<lparr> machine_state := machine_state s
\<lparr> irq_state := f (irq_state (machine_state s)) \<rparr>, exst := es\<rparr>)"
assumes invs_irq_state_independent[intro!, simp]:
"\<And>(s::'state_ext state) f.
invs (s\<lparr>machine_state := machine_state s\<lparr>irq_state := f (irq_state (machine_state s))\<rparr>\<rparr>)
= invs s"
assumes cte_at_nat_to_cref_zbits:
"\<And>(s::'state_ext state) oref zb n m.
\<lbrakk> s \<turnstile> Zombie oref zb n; m < n \<rbrakk> \<Longrightarrow> cte_at (oref, nat_to_cref (zombie_cte_bits zb) m) s"
assumes copy_of_cap_range:
"\<And>cap cap'. copy_of cap cap' \<Longrightarrow> cap_range cap = cap_range cap'"
assumes copy_of_zobj_refs:
"\<And>cap cap'. copy_of cap cap' \<Longrightarrow> zobj_refs cap = zobj_refs cap'"
assumes vs_cap_ref_master:
"\<And> cap cap'.
\<lbrakk> cap_master_cap cap = cap_master_cap cap';
cap_asid cap = cap_asid cap';
cap_asid_base cap = cap_asid_base cap';
cap_vptr cap = cap_vptr cap' \<rbrakk>
\<Longrightarrow> vs_cap_ref cap = vs_cap_ref cap'"
assumes weak_derived_vs_cap_ref:
"\<And>c c'. weak_derived c c' \<Longrightarrow> vs_cap_ref c = vs_cap_ref c'"
assumes weak_derived_table_cap_ref:
"\<And>c c'. weak_derived c c' \<Longrightarrow> table_cap_ref c = table_cap_ref c'"
assumes swap_of_caps_valid_arch_caps:
"\<And>c a c' b.
\<lbrace>valid_arch_caps and cte_wp_at (weak_derived c) a and cte_wp_at (weak_derived c') b\<rbrace>
do
y \<leftarrow> set_cap c b;
set_cap c' a
od
\<lbrace>\<lambda>rv. valid_arch_caps :: 'state_ext state \<Rightarrow> bool\<rbrace>"
assumes cap_swap_asid_map[wp]:
"\<And>c a c' b.
\<lbrace>valid_asid_map and cte_wp_at (weak_derived c) a and cte_wp_at (weak_derived c') b\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv. valid_asid_map :: 'state_ext state \<Rightarrow> bool\<rbrace>"
assumes cap_swap_cap_refs_in_kernel_window[wp]:
"\<And>c a c' b.
\<lbrace>cap_refs_in_kernel_window and cte_wp_at (weak_derived c) a and cte_wp_at (weak_derived c') b\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv. cap_refs_in_kernel_window :: 'state_ext state \<Rightarrow> bool\<rbrace>"
assumes cap_swap_vms[wp]:
"\<And>c a c' b.
\<lbrace>valid_machine_state :: 'state_ext state \<Rightarrow> bool\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv. valid_machine_state\<rbrace>"
assumes unat_of_bl_nat_to_cref:
"\<And>n ln. \<lbrakk> n < 2 ^ ln; ln < word_bits \<rbrakk>
\<Longrightarrow> unat (of_bl (nat_to_cref ln n) :: word32) = n"
assumes zombie_is_cap_toE_pre:
"\<And>(s::'state_ext state) ptr zbits n m irqn.
\<lbrakk> s \<turnstile> Zombie ptr zbits n; invs s; m < n \<rbrakk>
\<Longrightarrow> (ptr, nat_to_cref (zombie_cte_bits zbits) m) \<in> cte_refs (Zombie ptr zbits n) irqn"
assumes finalise_cap_makes_halted:
"\<And>cap ex slot.
\<lbrace>invs and valid_cap cap and (\<lambda>s. ex = is_final_cap' cap s)
and cte_wp_at (op = cap) slot\<rbrace>
finalise_cap cap ex
\<lbrace>\<lambda>rv (s::'state_ext state). \<forall>t \<in> obj_refs (fst rv). halted_if_tcb t s\<rbrace>"
assumes finalise_cap_emptyable[wp]:
"\<And>sl c f.
\<lbrace>emptyable sl and (invs and valid_mdb)\<rbrace>
finalise_cap c f
\<lbrace>\<lambda>_. emptyable sl :: 'state_ext state \<Rightarrow> bool\<rbrace>"
assumes deleting_irq_handler_emptyable[wp]:
"\<And>sl irq.
\<lbrace>emptyable sl and invs :: 'state_ext state \<Rightarrow> bool\<rbrace>
deleting_irq_handler irq
\<lbrace>\<lambda>_. emptyable sl\<rbrace>"
assumes arch_finalise_cap_emptyable[wp]:
"\<And>sl c f.
\<lbrace>emptyable sl :: 'state_ext state \<Rightarrow> bool\<rbrace>
arch_finalise_cap c f
\<lbrace>\<lambda>_. emptyable sl\<rbrace>"
assumes finalise_cap_not_reply_master_unlifted:
"\<And>rv s' cap sl (s::'state_ext state).
(rv, s') \<in> fst (finalise_cap cap sl s) \<Longrightarrow>
\<not> is_master_reply_cap (fst rv)"
assumes nat_to_cref_0_replicate:
"\<And>n. n < word_bits \<Longrightarrow> nat_to_cref n 0 = replicate n False"
assumes prepare_thread_delete_thread_cap:
"\<And>x p t. \<lbrace>\<lambda>(s::'state_ext state). caps_of_state s x = Some (cap.ThreadCap p)\<rbrace>
prepare_thread_delete t
\<lbrace>\<lambda>rv s. caps_of_state s x = Some (cap.ThreadCap p)\<rbrace>"
locale CNodeInv_AI_2 = CNodeInv_AI state_ext_t
for state_ext_t :: "'state_ext::state_ext itself" +
assumes rec_del_invs':
"\<And>(s::'state_ext state) call.
s \<turnstile> \<lbrace>\<lambda>x. invs x \<and> valid_rec_del_call call x \<and>
(\<not> exposed_rdcall call \<longrightarrow> ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) (slot_rdcall call) x) \<and>
emptyable (slot_rdcall call) x \<and>
(case call of ReduceZombieCall cap sl ex \<Rightarrow> \<not> cap_removeable cap sl \<and>
(\<forall>t\<in>obj_refs cap. halted_if_tcb t x)
| _ \<Rightarrow> True)\<rbrace>
rec_del call
\<lbrace>\<lambda>rv s. invs s \<and>
(case call of CTEDeleteCall _ bool \<Rightarrow> True
| FinaliseSlotCall sl x \<Rightarrow> (fst rv \<or> x \<longrightarrow> cte_wp_at (replaceable s sl NullCap) sl s) \<and>
(\<forall>irq. snd rv = Some irq \<longrightarrow> IRQHandlerCap irq \<notin> ran (caps_of_state s(sl \<mapsto> NullCap)))
| ReduceZombieCall cap sl x \<Rightarrow> \<not> x \<longrightarrow> ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) sl s) \<and>
emptyable (slot_rdcall call) s\<rbrace>,
\<lbrace>\<lambda>rv. invs\<rbrace>"
lemma mask_cap_all:
"mask_cap (all_rights \<inter> r) c = mask_cap r c"
unfolding all_rights_def by simp
lemma decode_cnode_cases2:
assumes mvins: "\<And>index bits src_index src_depth args' src_root_cap exs'.
\<lbrakk> args = index # bits # src_index # src_depth # args';
exs = src_root_cap # exs';
invocation_type label \<in> set [CNodeCopy .e. CNodeMutate];
invocation_type label \<in> set [CNodeRevoke .e. CNodeSaveCaller];
invocation_type label \<notin> {CNodeRevoke, CNodeDelete,
CNodeCancelBadgedSends, CNodeRotate, CNodeSaveCaller} \<rbrakk> \<Longrightarrow> P"
assumes rvk: "\<And>index bits args'. \<lbrakk> args = index # bits # args';
invocation_type label \<notin> set [CNodeCopy .e. CNodeMutate];
invocation_type label \<in> set [CNodeRevoke .e. CNodeSaveCaller];
invocation_type label = CNodeRevoke \<rbrakk> \<Longrightarrow> P"
assumes dlt: "\<And>index bits args'. \<lbrakk> args = index # bits # args';
invocation_type label \<notin> set [CNodeCopy .e. CNodeMutate];
invocation_type label \<in> set [CNodeRevoke .e. CNodeSaveCaller];
invocation_type label = CNodeDelete \<rbrakk> \<Longrightarrow> P"
assumes svc: "\<And>index bits args'. \<lbrakk> args = index # bits # args';
invocation_type label \<notin> set [CNodeCopy .e. CNodeMutate];
invocation_type label \<in> set [CNodeRevoke .e. CNodeSaveCaller];
invocation_type label = CNodeSaveCaller \<rbrakk> \<Longrightarrow> P"
assumes rcy: "\<And>index bits args'. \<lbrakk> args = index # bits # args';
invocation_type label \<notin> set [CNodeCopy .e. CNodeMutate];
invocation_type label \<in> set [CNodeRevoke .e. CNodeSaveCaller];
invocation_type label = CNodeCancelBadgedSends \<rbrakk> \<Longrightarrow> P"
assumes rot: "\<And>index bits pivot_new_data pivot_index pivot_depth src_new_data
src_index src_depth args' pivot_root_cap src_root_cap exs'.
\<lbrakk> args = index # bits # pivot_new_data # pivot_index # pivot_depth
# src_new_data # src_index # src_depth # args';
exs = pivot_root_cap # src_root_cap # exs';
invocation_type label \<notin> set [CNodeCopy .e. CNodeMutate];
invocation_type label \<in> set [CNodeRevoke .e. CNodeSaveCaller];
invocation_type label = CNodeRotate \<rbrakk> \<Longrightarrow> P"
assumes errs:
"\<lbrakk> invocation_type label \<notin> set [CNodeRevoke .e. CNodeSaveCaller] \<or>
args = [] \<or> (\<exists>x. args = [x]) \<or> (\<exists>index bits args'. args = index # bits # args' \<and>
invocation_type label \<in> set [CNodeRevoke .e. CNodeSaveCaller] \<and>
(invocation_type label \<in> set [CNodeCopy .e. CNodeMutate]
\<and> invocation_type label \<notin> {CNodeRevoke, CNodeDelete,
CNodeCancelBadgedSends, CNodeRotate, CNodeSaveCaller}
\<and> (case (args', exs) of (src_index # src_depth # args'',
src_root_cap # exs') \<Rightarrow> False | _ \<Rightarrow> True) \<or>
invocation_type label \<notin> set [CNodeCopy .e. CNodeMutate] \<and>
invocation_type label = CNodeRotate \<and> (case (args', exs) of
(pivot_new_data # pivot_index # pivot_depth
# src_new_data # src_index # src_depth # args'',
pivot_root_cap # src_root_cap # exs') \<Rightarrow> False
| _ \<Rightarrow> True))) \<rbrakk> \<Longrightarrow> P"
shows "P"
proof -
have simps: "[CNodeRevoke .e. CNodeSaveCaller]
= [CNodeRevoke, CNodeDelete, CNodeCancelBadgedSends, CNodeCopy, CNodeMint,
CNodeMove, CNodeMutate, CNodeRotate, CNodeSaveCaller]"
"[CNodeCopy .e. CNodeMutate] = [CNodeCopy, CNodeMint,
CNodeMove, CNodeMutate]"
by (simp_all add: upto_enum_def fromEnum_def toEnum_def enum_invocation_label)
show ?thesis
apply (cases args)
apply (simp add: errs)
apply (case_tac list)
apply (simp add: errs)
apply (case_tac "invocation_type label \<in> set [CNodeCopy .e. CNodeMutate]")
apply (case_tac "case (lista, exs) of (src_index # src_depth # args'',
src_root_cap # exs'') \<Rightarrow> False | _ \<Rightarrow> True")
apply (rule errs)
apply (simp add: simps)
apply (rule disjI2)
apply auto[1]
apply (simp split: prod.split_asm list.split_asm)
apply (erule(2) mvins, auto simp: simps)[1]
apply (case_tac "invocation_type label \<in> set [CNodeRevoke .e. CNodeSaveCaller]")
apply (simp_all add: errs)
apply (insert rvk dlt svc rcy rot)
apply (simp add: simps)
apply atomize
apply (elim disjE, simp_all)
apply (case_tac "case (lista, exs) of
(pivot_new_data # pivot_index # pivot_depth
# src_new_data # src_index # src_depth # args'',
pivot_root_cap # src_root_cap # exs') \<Rightarrow> False
| _ \<Rightarrow> True")
apply (rule errs)
apply (simp add: simps)
apply (simp split: prod.split_asm list.split_asm)
done
qed
lemma valid_cnode_capI:
"\<lbrakk>cap_table_at n w s; valid_objs s; pspace_aligned s; n > 0; length g \<le> 32\<rbrakk>
\<Longrightarrow> s \<turnstile> cap.CNodeCap w n g"
apply (simp add: valid_cap_def cap_aligned_def)
apply (rule conjI)
apply (clarsimp simp add: pspace_aligned_def obj_at_def)
apply (drule bspec, fastforce)
apply (clarsimp simp: is_obj_defs wf_obj_bits cte_level_bits_def)
apply (clarsimp simp add: obj_at_def is_obj_defs valid_objs_def dom_def)
apply (erule allE, erule impE, blast)
apply (simp add: valid_obj_def valid_cs_def valid_cs_size_def)
apply (simp add: cte_level_bits_def word_bits_def)
done
lemma Suc_length_not_empty:
"length xs = length xs' \<Longrightarrow> Suc 0 \<le> length xs' = (xs \<noteq> [])"
by (fastforce simp: le_simps)
lemma update_cap_hoare_helper:
"\<lbrace>P\<rbrace> f \<lbrace>\<lambda>rv s. valid_cap (C rv s) s\<rbrace> \<Longrightarrow>
\<lbrace>P\<rbrace> f \<lbrace>\<lambda>rv s. valid_cap (update_cap_data prs n (C rv s)) s\<rbrace>"
apply (erule hoare_strengthen_post)
apply (erule update_cap_data_validI)
done
lemma mask_cap_hoare_helper:
"\<lbrace>P\<rbrace> f \<lbrace>\<lambda>rv s. valid_cap (C rv s) s\<rbrace> \<Longrightarrow>
\<lbrace>P\<rbrace> f \<lbrace>\<lambda>rv s. valid_cap (mask_cap (M rv s) (C rv s)) s\<rbrace>"
by (fastforce simp add: valid_def)
lemma derive_cap_untyped:
"\<lbrace>\<lambda>s. P (untyped_range cap)\<rbrace> derive_cap slot cap \<lbrace>\<lambda>rv s. rv \<noteq> cap.NullCap \<longrightarrow> P (untyped_range rv)\<rbrace>,-"
unfolding derive_cap_def is_zombie_def
by (cases cap; (wp ensure_no_children_inv | simp add: o_def)+)
lemma zombies_final_helper:
"\<lbrakk> cte_wp_at (\<lambda>c. c = cap) p s; \<not> is_zombie cap; zombies_final s \<rbrakk>
\<Longrightarrow> (\<forall>r\<in>obj_refs cap. \<forall>a b.
cte_wp_at (\<lambda>cap'. r \<in> obj_refs cap') (a, b) s \<longrightarrow> cte_wp_at (Not \<circ> is_zombie) (a, b) s)"
apply (clarsimp simp: cte_wp_at_def)
apply (case_tac "p = (a, b)")
apply simp
apply (drule(2) zombies_finalD2)
apply clarsimp
apply blast
apply simp
done
lemma cap_asid_mask[simp]:
"cap_asid (mask_cap m c) = cap_asid c"
by (simp add: mask_cap_def)
lemma cap_master_mask[simp]:
"cap_master_cap (mask_cap rs cap) = cap_master_cap cap"
by (simp add: mask_cap_def)
lemma cap_badge_mask[simp]:
"cap_badge (mask_cap rs cap) = cap_badge cap"
by (simp add: mask_cap_def)
lemma ensure_empty_cte_wp_at:
"\<lbrace>\<top>\<rbrace> ensure_empty c \<lbrace>\<lambda>rv s. cte_wp_at (op = cap.NullCap) c s\<rbrace>, -"
unfolding ensure_empty_def
apply (wp whenE_throwError_wp get_cap_wp)
apply simp
done
lemmas get_cap_cte_caps_to_no_wp[wp]
= get_cap_cte_caps_to[where P="\<top>", simplified]
lemma lookup_cap_ex[wp]:
"\<lbrace>\<top>\<rbrace> lookup_cap t c \<lbrace>\<lambda>rv s. \<forall>r\<in>cte_refs rv (interrupt_irq_node s). ex_cte_cap_to r s\<rbrace>, -"
by (simp add: split_def lookup_cap_def) wp
lemmas cap_aligned_valid[elim!] = valid_cap_aligned
lemma cap_derive_not_null_helper2:
"\<lbrace>P\<rbrace> derive_cap slot cap \<lbrace>\<lambda>rv s. rv \<noteq> cap.NullCap \<longrightarrow> Q rv s\<rbrace>, -
\<Longrightarrow>
\<lbrace>\<lambda>s. cap \<noteq> cap.NullCap \<and> \<not> is_zombie cap \<and> cap \<noteq> cap.IRQControlCap \<longrightarrow> P s\<rbrace>
derive_cap slot cap
\<lbrace>\<lambda>rv s. rv \<noteq> cap.NullCap \<longrightarrow> Q rv s\<rbrace>, -"
apply (drule cap_derive_not_null_helper)
apply (erule hoare_post_imp_R)
apply simp
done
lemma has_cancel_send_rights_ep_cap:
"has_cancel_send_rights cap \<Longrightarrow> is_ep_cap cap"
by (clarsimp simp: has_cancel_send_rights_def split: cap.splits)
lemma is_untyped_update_cap_data[intro]:
"is_untyped_cap r \<Longrightarrow> update_cap_data c x r = r"
by (cases r; clarsimp simp: update_cap_data_def is_arch_cap_def)
context CNodeInv_AI begin
lemma decode_cnode_inv_wf[wp]:
"\<And>cap.
\<lbrace>invs and valid_cap cap
and (\<lambda>s. \<forall>r\<in>zobj_refs cap. ex_nonz_cap_to r s)
and (\<lambda>s. is_cnode_cap cap \<longrightarrow> (\<forall>r\<in>cte_refs cap (interrupt_irq_node s).
ex_cte_cap_wp_to is_cnode_cap r s))
and (\<lambda>s. \<forall>cap \<in> set cs. s \<turnstile> cap)
and (\<lambda>s. \<forall>cap \<in> set cs. is_cnode_cap cap \<longrightarrow>
(\<forall>r\<in>cte_refs cap (interrupt_irq_node s). ex_cte_cap_wp_to is_cnode_cap r s)) \<rbrace>
decode_cnode_invocation mi args cap cs
\<lbrace>valid_cnode_inv\<rbrace>,-"
including no_pre
apply (rule decode_cnode_cases2[where args=args and exs=cs and label=mi])
-- "Move/Insert"
apply (simp add: decode_cnode_invocation_def unlessE_whenE
split del: if_split)
apply (wp lsfco_cte_at ensure_no_children_wp whenE_throwError_wp
| simp add: split_beta split del: if_split
| (fold validE_R_def)[1])+
apply (rule cap_derive_not_null_helper2)
apply (simp only: imp_conjR)
apply ((wp derive_cap_is_derived
derive_cap_valid_cap
derive_cap_zobjrefs derive_cap_objrefs_iszombie
| wp_once hoare_drop_imps)+ )[1]
apply (wp whenE_throwError_wp | wpcw)+
apply simp
apply (rule_tac Q="\<lambda>src_cap. valid_cap src_cap and ex_cte_cap_wp_to is_cnode_cap x
and zombies_final and valid_objs
and real_cte_at src_slot and real_cte_at x
and cte_wp_at (\<lambda>c. c = src_cap) src_slot
and cte_wp_at (op = cap.NullCap) x"
in hoare_post_imp)
apply (clarsimp simp: cte_wp_at_caps_of_state all_rights_def)
apply (simp add: cap_master_update_cap_data weak_derived_update_cap_data
cap_asid_update_cap_data
update_cap_data_validI update_cap_objrefs)
apply (strengthen cap_badge_update_cap_data)
apply simp
apply (frule (1) caps_of_state_valid_cap)
apply (case_tac "is_zombie r")
apply (clarsimp simp add: valid_cap_def2 update_cap_data_def
is_cap_simps
split: if_split_asm)
apply (frule(2) zombies_final_helper [OF caps_of_state_cteD[simplified cte_wp_at_eq_simp]])
apply (clarsimp simp: valid_cap_def2 cte_wp_at_caps_of_state)
apply (rule conjI, clarsimp+)+
apply (fastforce simp: is_untyped_update_cap_data
weak_derived_update_cap_data[OF _ weak_derived_refl])
apply (wp get_cap_cte_wp_at ensure_empty_cte_wp_at)+
apply simp
apply (fold validE_R_def)
apply (rule hoare_pre, wp lookup_slot_for_cnode_op_cap_to)
apply (clarsimp simp: invs_def valid_state_def valid_pspace_def)
-- "Revoke"
apply (simp add: decode_cnode_invocation_def unlessE_whenE cong: if_cong)
apply (wp lsfco_cte_at hoare_drop_imps whenE_throwError_wp
| simp add: split_beta validE_R_def[symmetric])+
apply clarsimp
-- "Delete"
apply (simp add: decode_cnode_invocation_def unlessE_whenE cong: if_cong)
apply (wp lsfco_cte_at hoare_drop_imps whenE_throwError_wp
| simp add: split_beta validE_R_def[symmetric])+
apply clarsimp
-- "Save"
apply (simp add: decode_cnode_invocation_def unlessE_whenE cong: if_cong)
apply (rule hoare_pre)
apply (wp ensure_empty_stronger whenE_throwError_wp
lsfco_cte_at lookup_slot_for_cnode_op_cap_to
hoare_vcg_const_imp_lift
| simp add: split_beta
| wp_once hoare_drop_imps)+
apply clarsimp
-- "CancelBadgedSends"
apply (simp add: decode_cnode_invocation_def
unlessE_def whenE_def
split del: if_split)
apply (wp get_cap_wp hoare_vcg_all_lift | simp add: )+
apply (fold validE_R_def)
apply (rule_tac Q'="\<lambda>rv. invs and cte_wp_at (\<lambda>_. True) rv" in hoare_post_imp_R)
apply (rule hoare_pre, wp lsfco_cte_at)
apply (clarsimp simp: cte_wp_valid_cap invs_valid_objs has_cancel_send_rights_ep_cap)+
-- "Rotate"
apply (simp add: decode_cnode_invocation_def split_def
whenE_def unlessE_def)
apply (rule hoare_pre)
apply (wp get_cap_wp ensure_empty_stronger | simp)+
apply (rule_tac Q'="\<lambda>rv s. real_cte_at rv s \<and> real_cte_at x s
\<and> real_cte_at src_slot s
\<and> ex_cte_cap_wp_to is_cnode_cap rv s
\<and> ex_cte_cap_wp_to is_cnode_cap x s
\<and> invs s" in hoare_post_imp_R)
apply wp+
apply (clarsimp simp: cte_wp_at_caps_of_state
dest!: real_cte_at_cte del: impI)
apply (frule invs_valid_objs)
apply (simp add: update_cap_data_validI weak_derived_update_cap_data
caps_of_state_valid_cap)
subgoal by (auto,(clarsimp simp:is_cap_simps update_cap_data_def)+)[1](* Bad practise *)
apply wp+
apply clarsimp
apply (elim disjE exE conjE,
simp_all add: decode_cnode_invocation_def validE_R_def
split_def unlessE_whenE
split: list.split_asm
split del: if_split)
apply (wp | simp)+
done
end
lemma decode_cnode_inv_inv[wp]:
"\<lbrace>P\<rbrace> decode_cnode_invocation mi args cap cs \<lbrace>\<lambda>rv. P\<rbrace>"
unfolding decode_cnode_invocation_def
apply (simp add: split_def unlessE_def whenE_def
cong: if_cong split del: if_split)
apply (rule hoare_pre)
apply (wp hoare_drop_imps | simp | wpcw)+
done
definition
not_recursive_cspaces :: "'z::state_ext state \<Rightarrow> cslot_ptr set"
where
"not_recursive_cspaces s \<equiv> {ptr. cte_wp_at (\<lambda>cap. ptr \<notin> fst_cte_ptrs cap) ptr s}"
definition
state_cte_ptrs :: "'z::state_ext state \<Rightarrow> cslot_ptr set"
where
"state_cte_ptrs s \<equiv> {ptr. cte_at ptr s}"
lemma fixed_length_finite:
"finite (UNIV :: 'a set) \<Longrightarrow> finite {x :: 'a list. length x = n}"
apply (induct n)
apply simp
apply (subgoal_tac "{x :: 'a list. length x = Suc n} = image (split Cons) (UNIV \<times> {x. length x = n})")
apply clarsimp
apply safe
apply (case_tac x, simp_all add: image_def)
done
lemma state_cte_ptrs_finite:
"finite (state_cte_ptrs s)"
apply (clarsimp simp add: state_cte_ptrs_def cte_at_cases Collect_disj_eq
Collect_conj_eq set_pair_UN tcb_cap_cases_def)
apply (clarsimp simp: well_formed_cnode_n_def fixed_length_finite)
done
lemma cte_wp_at_set_finite:
"finite {p. cte_wp_at (P p) p s}"
apply (rule finite_subset [OF _ state_cte_ptrs_finite[where s=s]])
apply (clarsimp simp: state_cte_ptrs_def elim!: cte_wp_at_weakenE)
done
lemma not_recursive_cspaces_finite:
"finite (not_recursive_cspaces s)"
unfolding not_recursive_cspaces_def
by (rule cte_wp_at_set_finite)
lemma set_cdt_not_recursive[wp]:
"\<lbrace>\<lambda>s. P (not_recursive_cspaces s)\<rbrace> set_cdt f \<lbrace>\<lambda>rv s. P (not_recursive_cspaces s)\<rbrace>"
apply (simp add: set_cdt_def, wp)
apply (simp add: not_recursive_cspaces_def)
done
lemma not_recursive_mdb[simp]:
"not_recursive_cspaces (is_original_cap_update f s) =
not_recursive_cspaces s"
"not_recursive_cspaces (cdt_update f' s) =
not_recursive_cspaces s"
by (simp add: not_recursive_cspaces_def)+
lemma set_cap_no_new_recursive:
"\<lbrace>\<lambda>s. x \<notin> not_recursive_cspaces s
\<and> cte_wp_at (\<lambda>cap. ptr \<notin> fst_cte_ptrs cap) ptr s\<rbrace>
set_cap cap ptr
\<lbrace>\<lambda>rv s. x \<notin> not_recursive_cspaces s\<rbrace>"
apply (simp add: not_recursive_cspaces_def)
apply (wp set_cap_cte_wp_at_neg)
apply (clarsimp simp: cte_wp_at_neg split: if_split)
done
lemma not_recursive_set_cap_shrinks:
"\<lbrace>\<lambda>s. card (not_recursive_cspaces s) \<le> n
\<and> cte_wp_at (\<lambda>cap. ptr \<notin> fst_cte_ptrs cap) ptr s
\<and> ptr \<in> fst_cte_ptrs cap\<rbrace>
set_cap cap ptr
\<lbrace>\<lambda>rv s. card (not_recursive_cspaces s) < n\<rbrace>"
apply (rule shrinks_proof[where x=ptr])
apply (rule not_recursive_cspaces_finite)
apply (wp set_cap_no_new_recursive)
apply simp
apply (simp add: not_recursive_cspaces_def)
apply (wp set_cap_cte_wp_at_neg)
apply (clarsimp elim!: cte_wp_at_weakenE)
apply (simp add: not_recursive_cspaces_def)
done
lemma not_recursive_set_cap_doesn't_grow:
"\<lbrace>\<lambda>s. card (not_recursive_cspaces s) < n
\<and> cte_wp_at (\<lambda>cap. ptr \<notin> fst_cte_ptrs cap) ptr s\<rbrace>
set_cap cap ptr
\<lbrace>\<lambda>rv s. card (not_recursive_cspaces s) < n\<rbrace>"
apply (rule doesn't_grow_proof)
apply (rule not_recursive_cspaces_finite)
apply (rule set_cap_no_new_recursive)
done
lemma final_cap_duplicate_obj_ref:
"\<lbrakk> fst (get_cap p1 s) = {(cap1, s)}; fst (get_cap p2 s) = {(cap2, s)}; is_final_cap' cap1 s;
x \<in> obj_refs cap1; p1 \<noteq> p2 \<rbrakk> \<Longrightarrow> x \<notin> obj_refs cap2"
apply (clarsimp simp: is_final_cap'_def)
apply (subgoal_tac "{p1, p2} \<subseteq> {(a, b)}")
apply simp
apply (drule sym[where s="Collect p" for p], simp)
apply blast
done
lemma final_cap_duplicate_irq:
"\<lbrakk> fst (get_cap p1 s) = {(cap1, s)}; fst (get_cap p2 s) = {(cap2, s)}; is_final_cap' cap1 s;
x \<in> cap_irqs cap1; p1 \<noteq> p2 \<rbrakk> \<Longrightarrow> x \<notin> cap_irqs cap2"
apply (clarsimp simp: is_final_cap'_def)
apply (subgoal_tac "{p1, p2} \<subseteq> {(a, b)}")
apply simp
apply (drule sym[where s="Collect p" for p], simp)
apply blast
done
lemma fst_cte_ptrs_link_obj_refs:
"x \<in> fst_cte_ptrs cap \<Longrightarrow> fst x \<in> obj_refs cap"
by (case_tac cap, simp_all add: fst_cte_ptrs_def)
lemma final_cap_duplicate_cte_ptr:
"\<lbrakk> fst (get_cap p s) = {(cap, s)}; fst (get_cap p' s) = {(cap', s)}; is_final_cap' cap s;
x \<in> fst_cte_ptrs cap; p \<noteq> p' \<rbrakk> \<Longrightarrow> x \<notin> fst_cte_ptrs cap'"
apply (drule(2) final_cap_duplicate_obj_ref)
apply (erule fst_cte_ptrs_link_obj_refs)
apply assumption
apply (clarsimp simp: fst_cte_ptrs_link_obj_refs)
done
lemma not_recursive_cspaces_more_update[iff]:
"not_recursive_cspaces (trans_state f s) = not_recursive_cspaces s"
by (simp add: not_recursive_cspaces_def)
lemma cap_swap_not_recursive:
"\<lbrace>\<lambda>s. card (not_recursive_cspaces s) \<le> n
\<and> cte_wp_at (\<lambda>cap. is_final_cap' cap s
\<and> p1 \<in> fst_cte_ptrs cap) p2 s
\<and> cte_wp_at (op = c1) p1 s
\<and> cte_wp_at (op = c2) p2 s
\<and> p1 \<noteq> p2\<rbrace>
cap_swap c1 p1 c2 p2
\<lbrace>\<lambda>rv s. card (not_recursive_cspaces s) < n\<rbrace>"
apply (cases "p1 = p2", simp_all)
apply (simp add: cap_swap_def set_cdt_def when_def)
apply (rule hoare_vcg_precond_imp)
apply (wp | simp)+
apply (rule not_recursive_set_cap_doesn't_grow)
apply (wp not_recursive_set_cap_shrinks set_cap_cte_wp_at' get_cap_wp hoare_vcg_disj_lift)
apply (clarsimp simp: cte_wp_at_def)
apply (frule(3) final_cap_duplicate_cte_ptr)
apply simp
apply (case_tac c2, simp_all add: fst_cte_ptrs_def)
done
lemma cap_swap_fd_not_recursive:
"\<lbrace>\<lambda>s. card (not_recursive_cspaces s) \<le> n
\<and> cte_wp_at (\<lambda>cap. is_final_cap' cap s
\<and> p1 \<in> fst_cte_ptrs cap) p2 s
\<and> p1 \<noteq> p2\<rbrace>
cap_swap_for_delete p1 p2
\<lbrace>\<lambda>rv s. card (not_recursive_cspaces s) < n\<rbrace>"
unfolding cap_swap_for_delete_def
by (wpsimp wp: cap_swap_not_recursive get_cap_wp)
lemma set_mrs_typ_at [wp]:
"\<lbrace>\<lambda>s. P (typ_at T p s)\<rbrace> set_mrs p' b m \<lbrace>\<lambda>rv s. P (typ_at T p s)\<rbrace>"
apply (simp add: set_mrs_def bind_assoc set_object_def)
apply (cases b)
apply simp
apply wp
apply clarsimp
apply (drule get_tcb_SomeD)
apply (clarsimp simp: obj_at_def a_type_def split: if_split)
apply (clarsimp simp: zipWithM_x_mapM split_def
split del: if_split)
apply (wp mapM_wp')
apply clarsimp
apply (drule get_tcb_SomeD)
apply (clarsimp simp: obj_at_def a_type_def split: if_split)
done
lemma cte_wp_and:
"cte_wp_at (P and Q) c s = (cte_wp_at P c s \<and> cte_wp_at Q c s)"
by (auto simp: cte_wp_at_def)
lemma set_ep_cte_wp_at [wp]:
"\<lbrace>cte_wp_at P c\<rbrace> set_endpoint e p \<lbrace>\<lambda>_. cte_wp_at P c\<rbrace>"
apply (simp add: set_endpoint_def set_object_def get_object_def)
apply wp
apply (auto simp: cte_wp_at_cases split: if_split)
done
lemma set_ntfn_cte_wp_at [wp]:
"\<lbrace>cte_wp_at P c\<rbrace> set_notification e p \<lbrace>\<lambda>_. cte_wp_at P c\<rbrace>"
apply (simp add: set_notification_def set_object_def get_object_def)
apply wp
apply (auto simp: cte_wp_at_cases)
done
crunch cte_wp_at[wp]: get_mrs "cte_wp_at P c"
(wp: crunch_wps simp: crunch_simps)
lemmas cte_wp_and' = cte_wp_and [unfolded pred_conj_def]
lemma in_pspace_typ_at:
"r \<notin> dom (kheap s) = (\<forall>T. \<not> typ_at T r s)"
apply (simp add: dom_def)
apply (subst simp_thms(2)[symmetric])
apply (fastforce simp: obj_at_def)
done
lemma prepare_thread_delete_not_recursive:
"\<lbrace>\<lambda>s. P (not_recursive_cspaces s)\<rbrace>
prepare_thread_delete t
\<lbrace>\<lambda>rv s. P (not_recursive_cspaces s)\<rbrace>"
apply (simp add: not_recursive_cspaces_def cte_wp_at_caps_of_state)
apply (wp prepare_thread_delete_caps_of_state)
done
lemma suspend_not_recursive:
"\<lbrace>\<lambda>s. P (not_recursive_cspaces s)\<rbrace>
IpcCancel_A.suspend t
\<lbrace>\<lambda>rv s. P (not_recursive_cspaces s)\<rbrace>"
apply (simp add: not_recursive_cspaces_def cte_wp_at_caps_of_state)
apply (wp suspend_caps_of_state)
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply (erule rsubst[where P=P])
apply (intro set_eqI iffI)
apply (clarsimp simp: fst_cte_ptrs_def)
apply clarsimp
apply (clarsimp simp: fst_cte_ptrs_def can_fast_finalise_def
split: cap.split_asm)
done
lemma unbind_notification_not_recursive:
"\<lbrace>\<lambda>s. P (not_recursive_cspaces s)\<rbrace>
unbind_notification tcb
\<lbrace>\<lambda>rv s. P (not_recursive_cspaces s)\<rbrace>"
apply (simp add: not_recursive_cspaces_def cte_wp_at_caps_of_state)
apply (wp unbind_notification_caps_of_state)
done
lemma get_cap_det2:
"(r, s') \<in> fst (get_cap p s) \<Longrightarrow> get_cap p s = ({(r, s)}, False) \<and> s' = s"
apply (rule conjI)
apply (erule get_cap_det)
apply (erule use_valid [OF _ get_cap_inv])
apply simp
done
lemma set_zombie_not_recursive:
"\<lbrace>\<lambda>s. cte_wp_at (\<lambda>c. fst_cte_ptrs c = fst_cte_ptrs (cap.Zombie p zb n)) slot s
\<and> P (not_recursive_cspaces s)\<rbrace>
set_cap (cap.Zombie p zb n) slot
\<lbrace>\<lambda>rv s. P (not_recursive_cspaces s)\<rbrace>"
apply (simp add: not_recursive_cspaces_def)
apply (rule set_preserved_proof[where P=P])
apply simp_all
apply (wp hoare_vcg_all_lift hoare_vcg_disj_lift set_cap_cte_wp_at)
apply (fastforce simp: cte_wp_at_def fst_cte_ptrs_def)
apply (simp only: cte_wp_at_neg imp_conv_disj de_Morgan_conj simp_thms)
apply (wp hoare_vcg_ex_lift valid_cte_at_neg_typ[OF set_cap_typ_at]
hoare_vcg_disj_lift set_cap_cte_wp_at)
apply (fastforce simp: fst_cte_ptrs_def cte_wp_at_def)
done
definition
rdcall_finalise_ord_lift :: "((cslot_ptr \<times> 'z state) \<times> (cslot_ptr \<times> 'z state)) set
\<Rightarrow> ((rec_del_call \<times> 'z state) \<times> (rec_del_call \<times> 'z state)) set"
where
"rdcall_finalise_ord_lift S \<equiv>
(\<lambda>(x, s). case x of CTEDeleteCall a b \<Rightarrow> 3 | FinaliseSlotCall a b \<Rightarrow> 2
| ReduceZombieCall cap a b \<Rightarrow> 1)
<*mlex*>
((map_prod (\<lambda>(x, s). (FinaliseSlotCall x True, s)) (\<lambda>(x, s). (FinaliseSlotCall x True, s)) ` S)
\<union> (map_prod (\<lambda>(x, s). (FinaliseSlotCall x False, s)) (\<lambda>(x, s). (FinaliseSlotCall x False, s)) ` S))"
lemma wf_rdcall_finalise_ord_lift:
"wf S \<Longrightarrow> wf (rdcall_finalise_ord_lift S)"
unfolding rdcall_finalise_ord_lift_def
by (auto intro!: wf_mlex wf_Un wf_map_prod_image inj_onI)
definition
rec_del_recset :: "((rec_del_call \<times> 'z::state_ext state) \<times> (rec_del_call \<times> 'z::state_ext state)) set"
where
"rec_del_recset \<equiv>
wf_sum (exposed_rdcall \<circ> fst)
(rdcall_finalise_ord_lift (inv_image
(less_than <*lex*> less_than)
(\<lambda>(x, s). case caps_of_state s x of
Some cap.NullCap \<Rightarrow> (0, 0)
| Some (cap.Zombie p zb n) \<Rightarrow>
(if fst_cte_ptrs (cap.Zombie p zb n) = {x} then 1 else 2, n)
| _ \<Rightarrow> (3, 0))))
(rdcall_finalise_ord_lift (measure (\<lambda>(x, s). card (not_recursive_cspaces s))))"
lemma rec_del_recset_wf: "wf rec_del_recset"
unfolding rec_del_recset_def
by (intro wf_sum_wf wf_rdcall_finalise_ord_lift wf_measure
wf_inv_image wf_lex_prod wf_less_than)
lemma in_get_cap_cte_wp_at:
"(rv, s') \<in> fst (get_cap p s) = (s = s' \<and> cte_wp_at (op = rv) p s)"
apply (rule iffI)
apply (clarsimp dest!: get_cap_det2 simp: cte_wp_at_def)
apply (clarsimp simp: cte_wp_at_def)
done
lemma fst_cte_ptrs_first_cte_of:
"fst_cte_ptrs (cap.Zombie ptr zb n) = {first_cslot_of (cap.Zombie ptr zb n)}"
by (simp add: fst_cte_ptrs_def tcb_cnode_index_def)
lemma final_cap_still_at:
"\<lbrace>\<lambda>s. cte_wp_at (\<lambda>c. obj_refs cap = obj_refs c \<and> cap_irqs cap = cap_irqs c
\<and> P cap (is_final_cap' c s)) ptr s\<rbrace>
set_cap cap ptr
\<lbrace>\<lambda>rv s. cte_wp_at (\<lambda>c. P c (is_final_cap' c s)) ptr s\<rbrace>"
apply (simp add: is_final_cap'_def2 cte_wp_at_caps_of_state)
apply wp
apply (clarsimp elim!: rsubst[where P="P cap"])
apply (intro ext arg_cong[where f=Ex] arg_cong[where f=All])
apply (case_tac "(aa, ba) = ptr", simp_all add: obj_irq_refs_def)
done
lemma suspend_thread_cap:
"\<lbrace>\<lambda>s. caps_of_state s x = Some (cap.ThreadCap p)\<rbrace>
IpcCancel_A.suspend t
\<lbrace>\<lambda>rv s. caps_of_state s x = Some (cap.ThreadCap p)\<rbrace>"
apply (rule hoare_chain)
apply (rule suspend_cte_wp_at_preserved
[where p=x and P="op = (cap.ThreadCap p)"])
apply (clarsimp simp add: can_fast_finalise_def)
apply (simp add: cte_wp_at_caps_of_state)+
done
lemma emptyable_irq_state_independent[intro!, simp]:
"emptyable x (s\<lparr>machine_state := machine_state s\<lparr>irq_state := f (irq_state (machine_state s))\<rparr>\<rparr>)
= emptyable x s"
by (auto simp: emptyable_def)
lemma not_recursive_cspaces_irq_state_independent[intro!, simp]:
"not_recursive_cspaces (s \<lparr> machine_state := machine_state s \<lparr> irq_state := f (irq_state (machine_state s)) \<rparr> \<rparr>)
= not_recursive_cspaces s"
by (simp add: not_recursive_cspaces_def)
context CNodeInv_AI begin
lemma rec_del_termination:
"All (rec_del_dom :: rec_del_call \<times> 'state_ext state \<Rightarrow> bool)"
apply (rule rec_del.termination,
rule rec_del_recset_wf,
simp_all add: rec_del_recset_def wf_sum_def
in_monad is_final_cap_def
is_zombie_def rdcall_finalise_ord_lift_def
mlex_prod_def,
drule in_preempt)
apply (case_tac exposed, simp_all)
apply (rule disjI1, rule map_prod_split_imageI)
apply (simp only: trans_state_update'[symmetric])
apply (clarsimp)
apply (case_tac aa, simp_all add: fail_def rec_del.psimps)[1]
apply (rename_tac word option nat)
apply (case_tac nat, simp_all)[1]
apply (clarsimp simp: in_monad rec_del.psimps)
apply (clarsimp simp: in_monad in_get_cap_cte_wp_at
cte_wp_at_caps_of_state rec_del.psimps
split: if_split_asm)
apply (erule use_valid [OF _ set_cap_caps_of_state])+
apply (simp add: fst_cte_ptrs_first_cte_of cong: if_cong)
apply (case_tac rv, simp_all)[1]
apply (clarsimp simp: in_monad fst_cte_ptrs_first_cte_of)
apply (case_tac new_cap, simp_all add: is_cap_simps)[1]
apply (case_tac rv, simp_all)[1]
apply (clarsimp simp: fst_cte_ptrs_first_cte_of)
apply (case_tac rv, simp_all)[1]
apply (clarsimp simp: fst_cte_ptrs_first_cte_of in_monad)
apply (rule disjI2, rule map_prod_split_imageI)
apply clarsimp
apply (case_tac aa, simp_all add: fail_def rec_del.psimps)[1]
apply (rename_tac word option nat)
apply (case_tac nat, simp_all)
apply (simp only: trans_state_update'[symmetric] not_recursive_cspaces_more_update)
apply (clarsimp simp: in_monad prod_eqI rec_del.psimps)
apply (erule use_valid [OF _ cap_swap_fd_not_recursive])
apply (frule use_valid [OF _ get_cap_cte_wp_at], simp)
apply (drule in_inv_by_hoareD [OF get_cap_inv])
apply clarsimp
apply (erule use_valid [OF _ hoare_vcg_conj_lift [OF set_zombie_not_recursive
final_cap_still_at]])
apply (frule use_valid [OF _ finalise_cap_cases])
apply (fastforce simp add: cte_wp_at_eq_simp)
apply clarsimp
apply (case_tac rv, simp_all add: fst_cte_ptrs_def)
apply (clarsimp simp: in_monad cte_wp_at_caps_of_state
fst_cte_ptrs_def
split: if_split_asm)
apply (clarsimp simp: in_monad cte_wp_at_caps_of_state
fst_cte_ptrs_def
split: if_split_asm)
apply (frule(1) use_valid [OF _ unbind_notification_caps_of_state],
frule(1) use_valid [OF _ suspend_thread_cap],
frule(1) use_valid [OF _ prepare_thread_delete_thread_cap])
apply clarsimp
apply (erule use_valid [OF _ prepare_thread_delete_not_recursive])
apply (erule use_valid [OF _ suspend_not_recursive])
apply (erule use_valid [OF _ unbind_notification_not_recursive])
apply simp
apply (clarsimp simp: in_monad cte_wp_at_caps_of_state
fst_cte_ptrs_def zombie_cte_bits_def
tcb_cnode_index_def
split: option.split_asm)
done
lemma rec_del_dom: "\<And> (p :: rec_del_call \<times> 'state_ext state). rec_del_dom p"
using rec_del_termination by blast
lemmas rec_del_simps = rec_del.psimps[OF rec_del_dom]
lemmas rec_del_simps_ext =
rec_del_simps [THEN ext[where f="rec_del args" for args]]
lemmas rec_del_fails = spec_validE_fail rec_del_simps_ext(5-)
declare assertE_wp[wp]
declare unlessE_wp[wp_split]
lemma without_preemption_wp [wp_split]:
"\<lbrace>P\<rbrace> f \<lbrace>Q\<rbrace> \<Longrightarrow> \<lbrace>P\<rbrace> without_preemption f \<lbrace>Q\<rbrace>,\<lbrace>E\<rbrace>"
by simp
lemmas rec_del_induct = rec_del.pinduct[OF rec_del_dom]
lemma rec_del_preservation':
fixes s :: "'state_ext state"
fixes P :: "'state_ext state \<Rightarrow> bool"
assumes wp:
"\<And>sl1 sl2. \<lbrace>P\<rbrace> cap_swap_for_delete sl1 sl2 \<lbrace>\<lambda>rv. P\<rbrace>"
"\<And>sl cap. \<lbrace>P\<rbrace> set_cap sl cap \<lbrace>\<lambda>rv. P\<rbrace>"
"\<And>sl opt. \<lbrace>P\<rbrace> empty_slot sl opt \<lbrace>\<lambda>rv. P\<rbrace>"
"\<And>cap fin. \<lbrace>P\<rbrace> finalise_cap cap fin \<lbrace>\<lambda>rv. P\<rbrace>"
"\<And>cap fin. \<lbrace>P\<rbrace> preemption_point \<lbrace>\<lambda>rv. P\<rbrace>"
shows
"s \<turnstile> \<lbrace>P\<rbrace> rec_del call \<lbrace>\<lambda>_. P\<rbrace>, \<lbrace>\<lambda>_. P\<rbrace>"
proof (induct rule: rec_del_induct)
case (1 slot exposed s)
show ?case
apply (subst rec_del_simps)
apply (simp only: split_def)
apply wp
apply (wp wp)[1]
apply (rule spec_strengthen_postE)
apply (rule "1.hyps")
apply simp
done
next
case (2 slot exposed s)
show ?case
apply (subst rec_del_simps)
apply (simp only: split_def)
apply (wp wp "2.hyps")
apply (wp wp)[1]
apply (simp only: simp_thms)
apply (rule "2.hyps", assumption+)
apply (wp wp hoare_drop_imps | simp add: is_final_cap_def)+
done
next
case 3
show ?case
apply (simp add: rec_del_simps | wp wp)+
done
next
case (4 ptr bits n slot s)
show ?case
apply (subst rec_del_simps)
apply (wp wp)
apply (wp hoare_drop_imps)[1]
apply (simp only: simp_thms)
apply (rule "4.hyps", assumption+)
apply wp
done
qed (auto simp: rec_del_dom rec_del_fails)
lemmas rec_del_preservation[crunch_rules] =
validE_valid [OF use_spec(2) [OF rec_del_preservation']]
end
crunch typ_at: cap_swap_for_delete "\<lambda>s. P (typ_at T p s)"
lemma cap_swap_valid_cap:
"\<lbrace>valid_cap c\<rbrace> cap_swap_for_delete x y \<lbrace>\<lambda>_. valid_cap c\<rbrace>"
apply(simp add: cap_swap_for_delete_def)
apply(wp cap_swap_valid_cap)
apply(simp)
done
lemma cap_swap_cte_at:
"\<lbrace>cte_at p\<rbrace> cap_swap_for_delete x y \<lbrace>\<lambda>_. cte_at p\<rbrace>"
apply(simp add: cap_swap_for_delete_def)
apply(wp cap_swap_cte_at)
apply(simp)
done
context CNodeInv_AI begin
crunch typ_at: rec_del "\<lambda>s::'state_ext state. P (typ_at T p s)"
(ignore: preemption_point wp: preemption_point_inv)
lemma rec_del_cte_at:
"\<And>c call. \<lbrace>cte_at c :: 'state_ext state \<Rightarrow> bool\<rbrace> rec_del call \<lbrace>\<lambda>_. cte_at c\<rbrace>"
by (wp valid_cte_at_typ rec_del_typ_at)
end
lemma dom_valid_cap[wp]:
"\<lbrace>valid_cap c\<rbrace> do_machine_op f \<lbrace>\<lambda>_. valid_cap c\<rbrace>"
apply (simp add: do_machine_op_def split_def)
apply (wp select_wp)
apply simp
done
lemma dom_cte_at:
"\<lbrace>cte_at c\<rbrace> do_machine_op f \<lbrace>\<lambda>_. cte_at c\<rbrace>"
apply (simp add: do_machine_op_def split_def)
apply (wp select_wp)
apply (simp add: cte_at_cases)
done
lemma cnode_to_zombie_valid:
"\<lbrakk> s \<turnstile> cap.CNodeCap oref bits guard \<rbrakk>
\<Longrightarrow> s \<turnstile> cap.Zombie oref (Some bits) (2 ^ bits)"
by (clarsimp simp: valid_cap_def cap_table_at_cte_at
word_unat_power cap_aligned_def)
lemma tcb_to_zombie_valid:
"\<lbrakk> s \<turnstile> cap.ThreadCap t \<rbrakk>
\<Longrightarrow> s \<turnstile> cap.Zombie t None 5"
apply (simp add: valid_cap_def)
apply (simp add: cap_aligned_def)
done
lemmas do_machine_op_cte_at [wp] = dom_cte_at
declare set_cap_cte_at[wp]
set_cap_valid_cap [wp]
lemma set_original_valid_pspace:
"\<lbrace>valid_pspace\<rbrace> set_original p v \<lbrace>\<lambda>rv. valid_pspace\<rbrace>"
apply wp
apply (erule valid_pspace_eqI)
apply simp
done
locale mdb_swap_abs_invs = mdb_swap_abs +
fixes cs cs' cap cap' scap dcap
defines "cs \<equiv> caps_of_state s"
defines "cs' \<equiv> cs (src \<mapsto> dcap, dest \<mapsto> scap)"
assumes cap: "cs src = Some cap"
assumes cap': "cs dest = Some cap'"
assumes sder: "weak_derived scap cap"
assumes dder: "weak_derived dcap cap'"
lemma obj_ref_untyped_empty [simp]:
"obj_refs c \<inter> untyped_range c = {}"
by (cases c, auto)
lemma weak_derived_Reply_eq:
"\<lbrakk> weak_derived c c'; c = ReplyCap t m \<rbrakk> \<Longrightarrow> c' = ReplyCap t m"
"\<lbrakk> weak_derived c c'; c' = ReplyCap t m \<rbrakk> \<Longrightarrow> c = ReplyCap t m"
by (auto simp: weak_derived_def copy_of_def
same_object_as_def is_cap_simps
split: if_split_asm cap.split_asm)
context mdb_swap_abs_invs begin
lemmas src_ranges [simp] = weak_derived_ranges [OF sder]
lemmas dest_ranges [simp] = weak_derived_ranges [OF dder]
lemma no_mloop_n:
"no_mloop n"
by (simp add: no_mloop_def parency)
lemma mdb_cte_n:
"mdb_cte_at (\<lambda>p. \<exists>c. cs' p = Some c \<and> cap.NullCap \<noteq> c) n"
proof -
from valid_mdb
have "mdb_cte_at (\<lambda>p. \<exists>c. cs p = Some c \<and> cap.NullCap \<noteq> c) m"
by (simp add: cs_def m valid_mdb_def2)
thus ?thesis using cap cap' sder dder
apply (clarsimp simp add: mdb_cte_at_def)
apply (cases src, cases dest)
apply (simp add: n_def n'_def cs'_def split: if_split_asm)
apply fastforce
apply fastforce
apply fastforce
apply fastforce
apply fastforce
apply fastforce
apply fastforce
done
qed
lemma descendants_no_loop [simp]:
"x \<notin> descendants_of x m"
by (simp add: descendants_of_def)
lemma untyped_mdb_n:
"untyped_mdb n cs'"
proof -
from valid_mdb
have "untyped_mdb m cs"
by (simp add: cs_def m valid_mdb_def2)
thus ?thesis using cap cap'
by (simp add: untyped_mdb_def cs'_def descendants_of_def parency
s_d_swap_def
del: split_paired_All)
qed
lemma descendants_inc_n:
shows "descendants_inc n cs'"
proof -
from valid_mdb
have "descendants_inc m cs"
by (simp add:cs_def m valid_mdb_def2)
thus ?thesis using cap cap' sder dder
apply (simp add:descendants_inc_def descendants_of_def del: split_paired_All)
apply (intro impI allI)
apply (simp add:parency cs'_def del:split_paired_All)
apply (drule spec)+
apply (erule(1) impE)
apply (simp add: weak_derived_cap_range)
apply (intro conjI impI)
apply (simp add:s_d_swap_other)+
done
qed
lemma untyped_inc_n:
assumes untyped_eq:"(is_untyped_cap cap \<Longrightarrow> scap = cap)" "(is_untyped_cap cap' \<Longrightarrow> dcap = cap')"
shows "untyped_inc n cs'"
proof -
from valid_mdb
have "untyped_inc m cs"
by (simp add: cs_def m valid_mdb_def2)
thus ?thesis using cap cap'
apply (simp add: untyped_inc_def cs'_def descendants_of_def parency s_d_swap_def
del: split_paired_All)
apply (intro allI)
apply (intro conjI)
apply (intro impI allI)
apply (intro conjI)
apply (drule_tac x = p in spec)
apply (drule_tac x = p' in spec)
apply (clarsimp simp:untyped_eq)
apply (intro impI allI)
apply (drule_tac x = p' in spec)
apply (drule_tac x = dest in spec)
apply (clarsimp simp:untyped_eq)
apply (intro impI)
apply (intro conjI)
apply (intro impI allI)
apply (drule_tac x = src in spec)
apply (intro conjI)
apply (drule_tac x = dest in spec)
apply (clarsimp simp:untyped_eq)
apply (drule_tac x = p' in spec)
apply (clarsimp simp:untyped_eq)
apply (intro impI allI)
apply (intro conjI)
apply (drule_tac x = dest in spec)
apply (drule_tac x = p in spec)
apply (clarsimp simp:untyped_eq)
apply (drule_tac x = src in spec)
apply (drule_tac x = p in spec)
apply (clarsimp simp:untyped_eq)
done
qed
lemmas src_replies[simp] = weak_derived_replies [OF sder]
lemmas dest_replies[simp] = weak_derived_replies [OF dder]
lemma reply_caps_mdb_n:
"reply_caps_mdb n cs'"
proof -
from valid_mdb
have "reply_caps_mdb m cs"
by (simp add: cs_def m valid_mdb_def2 reply_mdb_def)
thus ?thesis using cap cap' unfolding reply_caps_mdb_def cs'_def n_def n'_def
apply (intro allI impI)
apply (simp split: if_split_asm del: split_paired_All split_paired_Ex)
apply (elim allE)
apply (drule weak_derived_Reply_eq(1) [OF sder], simp del: split_paired_Ex)
apply (erule(1) impE)
apply (intro conjI impI)
apply (clarsimp elim!: weak_derived_Reply_eq(2) [OF dder])
apply (erule exEI, clarsimp)
apply (elim allE)
apply (drule weak_derived_Reply_eq(1) [OF dder], simp del: split_paired_Ex)
apply (erule(1) impE)
apply (intro conjI impI)
apply (clarsimp elim!: weak_derived_Reply_eq(2) [OF sder])
apply (erule exEI, clarsimp)
apply (erule_tac x=ptr in allE, erule_tac x=t in allE)
apply (erule(1) impE)
apply (intro conjI impI)
apply (clarsimp elim!: weak_derived_Reply_eq(2) [OF dder])
apply (clarsimp elim!: weak_derived_Reply_eq(2) [OF sder])
apply fastforce
done
qed
lemma reply_masters_mdb_n:
"reply_masters_mdb n cs'"
proof -
from valid_mdb
have r: "reply_masters_mdb m cs"
by (simp add: cs_def m valid_mdb_def2 reply_mdb_def)
have n_None:
"\<And>t. scap = cap.ReplyCap t True \<Longrightarrow> n dest = None"
"\<And>t. dcap = cap.ReplyCap t True \<Longrightarrow> n src = None"
using r cap cap' unfolding reply_masters_mdb_def n_def
by (drule_tac weak_derived_Reply_eq(1) [OF sder]
weak_derived_Reply_eq(1) [OF dder],
fastforce simp: n'_def simp del: split_paired_All)+
show ?thesis unfolding reply_masters_mdb_def cs'_def using cap cap' r
apply (intro allI impI)
apply (simp add: n_None descendants s_d_swap_def
split: if_split_asm del: split_paired_All)
apply (unfold reply_masters_mdb_def)[1]
apply (drule weak_derived_Reply_eq(1) [OF sder], simp del: split_paired_All)
apply (elim allE, erule(1) impE, elim conjE)
apply (intro impI conjI)
apply (drule(1) bspec, rule weak_derived_Reply_eq(2) [OF dder], simp)
apply fastforce
apply (unfold reply_masters_mdb_def)[1]
apply (drule weak_derived_Reply_eq(1) [OF dder], simp del: split_paired_All)
apply (elim allE, erule(1) impE, elim conjE)
apply (intro impI conjI)
apply (drule(1) bspec, rule weak_derived_Reply_eq(2) [OF sder], simp)
apply fastforce
apply (unfold reply_masters_mdb_def)[1]
apply (erule_tac x=ptr in allE, erule_tac x=t in allE)
apply (erule(1) impE, erule conjE, simp add: n_def n'_def)
apply (intro impI conjI)
apply (rule weak_derived_Reply_eq(2) [OF dder]
weak_derived_Reply_eq(2) [OF sder],
simp)+
apply fastforce
done
qed
lemma reply_mdb_n:
"reply_mdb n cs'"
by (simp add: reply_mdb_def reply_masters_mdb_n reply_caps_mdb_n)
end
definition
"swap_mdb m src dest \<equiv>
let n' = (\<lambda>n. if m n = Some src then Some dest
else if m n = Some dest then Some src
else m n) in
n' (src := n' dest, dest := n' src)"
lemma cap_swap_mdb [wp]:
"\<lbrace>valid_mdb and
cte_wp_at (weak_derived c) a and
cte_wp_at (\<lambda>cc. is_untyped_cap cc \<longrightarrow> cc = c) a and
cte_wp_at (weak_derived c') b and K (a \<noteq> b) and cte_wp_at (\<lambda>cc. is_untyped_cap cc \<longrightarrow> cc = c') b\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>_. valid_mdb\<rbrace>"
apply (simp add: valid_mdb_def2 cap_swap_def set_cdt_def bind_assoc set_original_def)
apply (wp | simp del: fun_upd_apply split del: if_split)+
apply (fold swap_mdb_def [simplified Let_def])
apply (wp set_cap_caps_of_state2 get_cap_wp)+
apply (clarsimp simp: cte_wp_at_caps_of_state simp del: fun_upd_apply)
apply (subgoal_tac "mdb_swap_abs_invs (cdt s) a b s cap capb c c'")
prefer 2
apply (rule mdb_swap_abs_invs.intro)
apply (rule mdb_swap_abs.intro)
apply (simp add: valid_mdb_def2)
apply (fastforce simp: cte_wp_at_caps_of_state)
apply (fastforce simp: cte_wp_at_caps_of_state)
apply (rule refl)
apply assumption
apply (erule (3) mdb_swap_abs_invs_axioms.intro)
apply (unfold swap_mdb_def Let_def)
apply (simp add: mdb_swap_abs_invs.no_mloop_n
mdb_swap_abs_invs.untyped_mdb_n
mdb_swap_abs_invs.mdb_cte_n
mdb_swap_abs_invs.reply_mdb_n
del: fun_upd_apply
split del: if_split)
apply (rule conjI)
apply (erule mdb_swap_abs_invs.descendants_inc_n)
apply (rule conjI)
apply (erule mdb_swap_abs_invs.untyped_inc_n)
apply (clarsimp simp:cte_wp_at_caps_of_state)+
apply (rule conjI)
apply (simp add: ut_revocable_def weak_derived_ranges del: split_paired_All)
apply (rule conjI)
apply (simp add: irq_revocable_def del: split_paired_All)
apply (intro conjI impI allI)
apply (simp del: split_paired_All)
apply (simp del: split_paired_All)
apply (simp add: reply_master_revocable_def weak_derived_replies
del: split_paired_All)
done
lemma set_cdt_valid_objs[wp]:
"\<lbrace>valid_objs\<rbrace> set_cdt m \<lbrace>\<lambda>rv. valid_objs\<rbrace>"
by (simp add: set_cdt_def | wp)+
lemma cap_swap_valid_objs[wp]:
"\<lbrace>valid_objs and valid_cap c and valid_cap c'
and tcb_cap_valid c b and tcb_cap_valid c' a\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv. valid_objs\<rbrace>"
apply (simp add: cap_swap_def)
apply (wp set_cap_valid_objs
| simp split del: if_split)+
done
crunch aligned[wp]: cap_swap "pspace_aligned"
crunch disctinct[wp]: cap_swap "pspace_distinct"
lemma cap_swap_iflive[wp]:
"\<lbrace>if_live_then_nonz_cap and cte_wp_at (\<lambda>x. zobj_refs x = zobj_refs c) a
and cte_wp_at (\<lambda>x. zobj_refs x = zobj_refs c') b\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv. if_live_then_nonz_cap\<rbrace>"
apply (simp add: cap_swap_def)
apply (wp | simp split del: if_split)+
apply (rule hoare_post_imp)
apply (simp only: if_live_then_nonz_cap_def ex_nonz_cap_to_def
cte_wp_at_caps_of_state imp_conv_disj)
apply (wp hoare_vcg_all_lift hoare_vcg_disj_lift hoare_vcg_ex_lift
get_cap_wp)+
apply (clarsimp simp add: cte_wp_at_caps_of_state)
apply (frule(1) if_live_then_nonz_capD)
apply assumption
apply (clarsimp simp: ex_nonz_cap_to_def cte_wp_at_caps_of_state)
apply (subst split_paired_Ex[symmetric])
apply (rule_tac x="if (aa, ba) = a then b else if (aa, ba) = b then a else (aa, ba)"
in exI)
apply (clarsimp | rule conjI)+
done
lemma cap_swap_fd_iflive[wp]:
"\<lbrace>if_live_then_nonz_cap\<rbrace>
cap_swap_for_delete a b
\<lbrace>\<lambda>rv. if_live_then_nonz_cap\<rbrace>"
apply (simp add: cap_swap_for_delete_def)
apply (wp get_cap_wp)
apply (clarsimp simp: cte_wp_at_caps_of_state)
done
lemma set_cdt_caps_of[wp]:
"\<lbrace>\<lambda>s. P (caps_of_state s)\<rbrace> set_cdt m \<lbrace>\<lambda>rv s. P (caps_of_state s)\<rbrace>"
by wp
lemma cap_swap_ex_cte_cap[wp]:
"\<lbrace>ex_cte_cap_wp_to P p
and cte_wp_at (\<lambda>x. cte_refs x = cte_refs c
\<and> ((\<exists>y. cte_refs x y \<noteq> {}) \<longrightarrow> P x = P c)) a
and cte_wp_at (\<lambda>x. cte_refs x = cte_refs c'
\<and> ((\<exists>y. cte_refs x y \<noteq> {}) \<longrightarrow> P x = P c')) b\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv. ex_cte_cap_wp_to P p\<rbrace>"
apply (simp add: cap_swap_def ex_cte_cap_wp_to_def
cte_wp_at_caps_of_state
del: split_paired_Ex)
apply (wp get_cap_wp | simp split del: if_split del: split_paired_Ex)+
apply (simp del: split_paired_Ex | intro allI impI | erule conjE)+
apply (erule exfEI [where f="id ( a := b, b := a )"])
apply (clarsimp simp: cte_wp_at_caps_of_state | rule conjI)+
done
lemma cap_swap_fd_ex_cte_cap[wp]:
"\<lbrace>ex_cte_cap_wp_to P p\<rbrace> cap_swap_for_delete a b \<lbrace>\<lambda>rv. ex_cte_cap_wp_to P p\<rbrace>"
apply (simp add: cap_swap_for_delete_def)
apply (wp get_cap_wp)
apply (clarsimp simp: cte_wp_at_caps_of_state)
done
lemma cap_swap_caps_of_state[wp]:
"\<lbrace>\<lambda>s. P ((caps_of_state s) ( a := Some c', b := Some c ))\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv s. P (caps_of_state s)\<rbrace>"
apply (simp add: cap_swap_def)
apply (wp get_cap_wp | simp del: fun_upd_apply split del: if_split)+
done
lemma cap_swap_fd_caps_of_state[wp]:
"\<lbrace>\<lambda>s. P ((caps_of_state s) \<circ> (id ( a := b, b := a )))\<rbrace>
cap_swap_for_delete a b
\<lbrace>\<lambda>rv s. P (caps_of_state s)\<rbrace>"
apply (simp add: cap_swap_for_delete_def)
apply (wp get_cap_wp)
apply (cases "a = b")
apply (simp add: fun_upd_def id_def[symmetric] cong: if_cong)
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply (erule rsubst[where P=P])
apply (clarsimp intro!: ext)
done
lemma cap_irqs_appropriateness:
"cap_irqs cap = cap_irqs cap'
\<Longrightarrow> \<forall>cp. appropriate_cte_cap cp cap = appropriate_cte_cap cp cap'"
by (simp add: appropriate_cte_cap_irqs)
lemma cap_swap_ifunsafe[wp]:
"\<lbrace>if_unsafe_then_cap
and ex_cte_cap_wp_to (appropriate_cte_cap c') a
and ex_cte_cap_wp_to (appropriate_cte_cap c) b
and cte_wp_at (\<lambda>x. cte_refs x = cte_refs c
\<and> ((\<exists>y. cte_refs x y \<noteq> {}) \<longrightarrow> cap_irqs x = cap_irqs c)) a
and cte_wp_at (\<lambda>x. cte_refs x = cte_refs c'
\<and> ((\<exists>y. cte_refs x y \<noteq> {}) \<longrightarrow> cap_irqs x = cap_irqs c')) b\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv s. if_unsafe_then_cap s\<rbrace>"
apply (simp only: if_unsafe_then_cap_def cte_wp_at_caps_of_state
imp_conv_disj not_ex)
apply (wp hoare_vcg_all_lift hoare_vcg_disj_lift)
apply (clarsimp split del: if_split del: disjCI intro!: disjCI2)
apply (intro conjI)
apply (clarsimp split: if_split_asm)
apply (drule(1) if_unsafe_then_capD[OF caps_of_state_cteD])
apply clarsimp
apply (erule ex_cte_cap_wp_to_weakenE)
apply clarsimp
apply (auto dest!: cap_irqs_appropriateness elim!: cte_wp_at_weakenE)
done
lemma cap_irqs_appropriate_strengthen:
"ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) x s
\<longrightarrow> ex_cte_cap_wp_to (appropriate_cte_cap cap) x s"
by (auto simp: appropriate_cte_cap_def
elim!: ex_cte_cap_wp_to_weakenE
split: cap.split)
lemma cap_swap_fd_ifunsafe[wp]:
"\<lbrace>if_unsafe_then_cap
and ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) a
and ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) b\<rbrace>
cap_swap_for_delete a b
\<lbrace>\<lambda>rv s. if_unsafe_then_cap s\<rbrace>"
apply (simp add: cap_swap_for_delete_def)
apply (wp get_cap_wp)
apply (clarsimp simp: cte_wp_at_caps_of_state
| strengthen cap_irqs_appropriate_strengthen)+
done
lemma cap_swap_zombies[wp]:
"\<lbrace>zombies_final and cte_wp_at (\<lambda>x. is_zombie x = is_zombie c
\<and> obj_refs x = obj_refs c
\<and> cap_irqs x = cap_irqs c) a
and cte_wp_at (\<lambda>x. is_zombie x = is_zombie c' \<and> obj_refs x = obj_refs c'
\<and> cap_irqs x = cap_irqs c') b\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv. zombies_final\<rbrace>"
apply (simp only: zombies_final_def final_cap_at_eq
cte_wp_at_caps_of_state simp_thms pred_conj_def)
apply wp
apply (elim conjE)
apply (erule allfEI[where f="id ( a := b, b := a )"])
apply (intro impI)
apply (drule mp)
apply (clarsimp split: if_split_asm)
apply (elim exE conjE, simp only: simp_thms option.simps)
apply (rule conjI)
apply (clarsimp simp: is_cap_simps obj_irq_refs_def)
apply (erule allfEI[where f="id ( a := b, b := a )"])
apply (intro impI, elim exE conjE, simp only: simp_thms option.simps)
apply (clarsimp simp: obj_irq_refs_Int split: if_split_asm)
done
lemma cap_swap_fd_zombies[wp]:
"\<lbrace>zombies_final\<rbrace>
cap_swap_for_delete p p'
\<lbrace>\<lambda>rv. zombies_final\<rbrace>"
apply (simp add: cap_swap_for_delete_def)
apply (wp get_cap_wp)
apply (clarsimp simp: cte_wp_at_caps_of_state)
done
lemma cap_swap_pred_tcb_at[wp]:
"\<lbrace>pred_tcb_at proj P t\<rbrace> cap_swap c sl c' sl' \<lbrace>\<lambda>rv. pred_tcb_at proj P t\<rbrace>"
unfolding cap_swap_def by (wp | simp)+
lemma unique_reply_caps_cap_swap:
assumes u: "unique_reply_caps cs"
and c: "cs p = Some cap"
and c': "cs p' = Some cap'"
and wd: "weak_derived c cap"
and wd': "weak_derived c' cap'"
and pneq: "p \<noteq> p'"
shows "unique_reply_caps (cs (p \<mapsto> c', p' \<mapsto> c))"
proof -
have new_cap_is_unique[elim]:
"\<And>p'' c''.
\<lbrakk> is_reply_cap c''; p'' \<noteq> p; p'' \<noteq> p'; cs p'' = Some c''; c'' = c \<or> c'' = c' \<rbrakk>
\<Longrightarrow> False"
using u unfolding unique_reply_caps_def
apply (erule_tac disjE)
apply (elim allE)
apply (erule (1) impE, erule (1) impE)
apply (erule impE, rule c)
apply (simp add: weak_derived_reply_eq[OF wd])
apply (elim allE)
apply (erule (1) impE, erule (1) impE)
apply (erule impE, rule c')
apply (simp add: weak_derived_reply_eq[OF wd'])
done
have old_caps_differ:
"\<And>cap''.
\<lbrakk> is_reply_cap cap; is_reply_cap cap'; cap = cap''; cap' = cap'' \<rbrakk>
\<Longrightarrow> False"
using u unfolding unique_reply_caps_def
apply (elim allE)
apply (erule impE, rule c)
apply (erule impE, simp)
apply (erule impE, rule c')
apply (simp add: pneq)
done
have new_caps_differ:
"\<And>c''. \<lbrakk> is_reply_cap c''; c = c''; c' = c'' \<rbrakk> \<Longrightarrow> False"
apply (subgoal_tac "is_reply_cap c", subgoal_tac "is_reply_cap c'")
apply (subst(asm) weak_derived_replies [OF wd])
apply (subst(asm) weak_derived_replies [OF wd'])
apply (frule(1) old_caps_differ)
apply (simp add: weak_derived_reply_eq [OF wd])
apply (simp add: weak_derived_reply_eq [OF wd'])
apply simp+
done
show ?thesis
using u unfolding unique_reply_caps_def
apply (intro allI impI)
apply (simp split: if_split_asm del: split_paired_All)
apply (erule(2) new_caps_differ | fastforce)+
done
qed
lemma cap_swap_no_reply_caps:
assumes cap: "cs p = Some cap"
and cap': "cs p' = Some cap'"
and wd: "weak_derived c cap"
and wd': "weak_derived c' cap'"
and nr: "\<forall>sl. cs sl \<noteq> Some (cap.ReplyCap t False)"
shows "\<forall>sl. (cs(p \<mapsto> c', p' \<mapsto> c)) sl \<noteq> Some (cap.ReplyCap t False)"
proof -
have
"cap \<noteq> cap.ReplyCap t False"
"cap' \<noteq> cap.ReplyCap t False"
using cap cap' nr by clarsimp+
hence
"c \<noteq> cap.ReplyCap t False"
"c' \<noteq> cap.ReplyCap t False"
by (rule_tac ccontr, simp,
drule_tac weak_derived_Reply_eq [OF wd]
weak_derived_Reply_eq [OF wd'],
simp)+
thus ?thesis
using nr unfolding fun_upd_def
by (clarsimp split: if_split_asm)
qed
lemma cap_swap_has_reply_cap_neg:
"\<lbrace>\<lambda>s. \<not> has_reply_cap t s \<and>
cte_wp_at (weak_derived c) p s \<and>
cte_wp_at (weak_derived c') p' s \<and>
p \<noteq> p'\<rbrace>
cap_swap c p c' p' \<lbrace>\<lambda>rv s. \<not> has_reply_cap t s\<rbrace>"
apply (simp add: has_reply_cap_def cte_wp_at_caps_of_state
del: split_paired_All split_paired_Ex)
apply (wp cap_swap_caps_of_state)
apply (elim conjE exE)
apply (erule(4) cap_swap_no_reply_caps)
done
lemma cap_swap_replies:
"\<lbrace>\<lambda>s. valid_reply_caps s
\<and> cte_wp_at (weak_derived c) p s
\<and> cte_wp_at (weak_derived c') p' s
\<and> p \<noteq> p'\<rbrace>
cap_swap c p c' p'
\<lbrace>\<lambda>rv s. valid_reply_caps s\<rbrace>"
apply (simp add: valid_reply_caps_def)
apply (rule hoare_pre)
apply (simp only: imp_conv_disj)
apply (wp hoare_vcg_all_lift hoare_vcg_disj_lift cap_swap_has_reply_cap_neg)
apply (clarsimp simp: fun_upd_def cte_wp_at_caps_of_state
unique_reply_caps_cap_swap [simplified fun_upd_def])
done
lemma cap_swap_fd_replies[wp]:
"\<lbrace>\<lambda>s. valid_reply_caps s\<rbrace>
cap_swap_for_delete p p'
\<lbrace>\<lambda>rv s. valid_reply_caps s\<rbrace>"
apply (simp add: cap_swap_for_delete_def)
apply (wp cap_swap_replies get_cap_wp)
apply (fastforce elim: cte_wp_at_weakenE)
done
lemma cap_swap_reply_masters:
"\<lbrace>valid_reply_masters and K(\<not> is_master_reply_cap c \<and> \<not> is_master_reply_cap c')\<rbrace>
cap_swap c p c' p' \<lbrace>\<lambda>_. valid_reply_masters\<rbrace>"
apply (simp add: valid_reply_masters_def cte_wp_at_caps_of_state)
apply (rule hoare_pre)
apply (simp only: imp_conv_disj)
apply (wp hoare_vcg_all_lift hoare_vcg_disj_lift cap_swap_caps_of_state
cap_swap_typ_at tcb_at_typ_at)
apply (auto simp: is_cap_simps)
done
lemma cap_swap_fd_reply_masters[wp]:
"\<lbrace>valid_reply_masters and
cte_wp_at (\<lambda>c. \<not> is_master_reply_cap c) p and
cte_wp_at (\<lambda>c. \<not> is_master_reply_cap c) p'\<rbrace>
cap_swap_for_delete p p'
\<lbrace>\<lambda>rv. valid_reply_masters\<rbrace>"
apply (simp add: cap_swap_for_delete_def)
apply (wp cap_swap_reply_masters get_cap_wp)
apply (clarsimp simp: cte_wp_at_def)
done
crunch refs_of[wp]: cap_swap "\<lambda>s. P (state_refs_of s)"
(ignore: set_cap simp: state_refs_of_pspaceI)
crunch cur_tcb[wp]: cap_swap "cur_tcb"
lemma copy_of_cte_refs:
"copy_of cap cap' \<Longrightarrow> cte_refs cap = cte_refs cap'"
apply (rule ext, clarsimp simp: copy_of_def split: if_split_asm)
apply (cases cap', simp_all add: same_object_as_def)
apply (clarsimp simp: is_cap_simps bits_of_def
split: cap.split_asm)+
done
lemma copy_of_is_zombie:
"copy_of cap cap' \<Longrightarrow> is_zombie cap = is_zombie cap'"
apply (clarsimp simp: copy_of_def split: if_split_asm)
apply (cases cap', simp_all add: same_object_as_def)
apply (clarsimp simp: is_cap_simps bits_of_def
split: cap.split_asm)+
done
lemma copy_of_reply_cap:
"copy_of (ReplyCap t False) cap \<Longrightarrow> cap = ReplyCap t False"
apply (clarsimp simp: copy_of_def is_cap_simps)
by (cases cap, simp_all add: same_object_as_def)
lemma copy_of_cap_irqs:
"copy_of cap cap' \<Longrightarrow> cap_irqs cap = cap_irqs cap'"
apply (clarsimp simp: copy_of_def cap_irqs_def split: if_split_asm)
apply (cases cap', simp_all add: same_object_as_def)
by (clarsimp simp: is_cap_simps bits_of_def cap_range_def
split: cap.split_asm)+
lemma cap_swap_valid_idle[wp]:
"\<lbrace>valid_idle\<rbrace>
cap_swap c a c' b \<lbrace>\<lambda>_. valid_idle\<rbrace>"
apply (simp add: cap_swap_def set_cdt_def)
apply (wp set_cap_idle set_cap_it|simp)+
done
lemma cap_swap_global_refs[wp]:
"\<lbrace>valid_global_refs and
(\<lambda>s. global_refs s \<inter> cap_range c = {}) and
(\<lambda>s. global_refs s \<inter> cap_range c' = {})\<rbrace>
cap_swap c a c' b \<lbrace>\<lambda>_. valid_global_refs\<rbrace>"
apply (simp add: cap_swap_def set_cdt_def)
apply (wp set_cap_globals | simp)+
done
crunch arch[wp]: cap_swap "\<lambda>s. P (arch_state s)"
crunch irq_node[wp]: cap_swap "\<lambda>s. P (interrupt_irq_node s)"
lemma valid_reply_caps_of_stateD:
"\<And>p t s. \<lbrakk> valid_reply_caps s; caps_of_state s p = Some (cap.ReplyCap t False) \<rbrakk>
\<Longrightarrow> st_tcb_at awaiting_reply t s"
by (auto simp: valid_reply_caps_def has_reply_cap_def cte_wp_at_caps_of_state)
crunch interrupt_states[wp]: cap_swap "\<lambda>s. P (interrupt_states s)"
lemma weak_derived_cap_irqs:
"weak_derived c c' \<Longrightarrow> cap_irqs c = cap_irqs c'"
by (auto simp add: weak_derived_def copy_of_cap_irqs)
lemma cap_swap_irq_handlers[wp]:
"\<lbrace>valid_irq_handlers and
cte_wp_at (weak_derived c) a and
cte_wp_at (weak_derived c') b\<rbrace>
cap_swap c a c' b \<lbrace>\<lambda>rv. valid_irq_handlers\<rbrace>"
apply (simp add: valid_irq_handlers_def irq_issued_def)
apply (rule hoare_pre)
apply (wp hoare_use_eq [where f=interrupt_states,
OF cap_swap_interrupt_states cap_swap_caps_of_state])
apply (clarsimp simp: cte_wp_at_caps_of_state
elim!: ranE split: if_split_asm
dest!: weak_derived_cap_irqs)
apply auto
done
crunch arch_objs [wp]: cap_swap "valid_arch_objs"
crunch arch_objs [wp]: cap_move "valid_arch_objs"
crunch arch_objs [wp]: empty_slot "valid_arch_objs"
crunch valid_global_objs [wp]: cap_swap "valid_global_objs"
context CNodeInv_AI begin
lemma cap_swap_valid_arch_caps[wp]:
"\<And>c a c' b.
\<lbrace>valid_arch_caps and cte_wp_at (weak_derived c) a and cte_wp_at (weak_derived c') b\<rbrace>
cap_swap c a c' b
\<lbrace>\<lambda>rv. valid_arch_caps :: 'state_ext state \<Rightarrow> bool\<rbrace>"
apply (simp add: cap_swap_def)
apply (rule hoare_pre)
apply (subst bind_assoc[symmetric],
rule hoare_seq_ext [rotated],
rule swap_of_caps_valid_arch_caps)
apply (wp | simp split del: if_split)+
done
end
crunch v_ker_map[wp]: cap_swap "valid_kernel_mappings"
crunch eq_ker_map[wp]: cap_swap "equal_kernel_mappings"
crunch only_idle [wp]: cap_swap only_idle
crunch global_pd_mappings[wp]: cap_swap "valid_global_vspace_mappings"
crunch pspace_in_kernel_window[wp]: cap_swap "pspace_in_kernel_window"
lemma cap_swap_valid_ioc[wp]:
"\<lbrace>\<lambda>s. valid_ioc s \<and>
cte_wp_at (weak_derived c) p s \<and>
cte_wp_at (weak_derived c') p' s\<rbrace>
cap_swap c p c' p'
\<lbrace>\<lambda>_ s. valid_ioc s\<rbrace>"
apply (simp add: cap_swap_def valid_ioc_def cte_wp_at_caps_of_state)
apply (wp set_cdt_cos_ioc set_cap_caps_of_state2 | simp split del: if_split)+
apply (cases p, cases p')
apply fastforce
done
crunch machine_state[wp]: cap_swap "\<lambda>s. P(machine_state s)"
crunch valid_irq_states[wp]: cap_swap "valid_irq_states"
crunch pspace_respects_device_region[wp]: cap_swap pspace_respects_device_region
lemma cap_refs_respects_device_region_original_cap[wp]:
"cap_refs_respects_device_region
(s\<lparr>is_original_cap := ocp\<rparr>) = cap_refs_respects_device_region s"
by (simp add:cap_refs_respects_device_region_def)
context CNodeInv_AI begin
lemma cap_swap_cap_refs_respects_device_region[wp]:
"\<lbrace>cap_refs_respects_device_region and cte_wp_at (weak_derived c) a and cte_wp_at (weak_derived c') b\<rbrace>
cap_swap c a c' b \<lbrace>\<lambda>rv. cap_refs_respects_device_region\<rbrace>"
apply (simp add:cap_swap_def)
apply wp
apply (simp add: cap_refs_respects_device_region_def)
apply (rule hoare_strengthen_post[OF CSpace_AI.set_cdt_cap_refs_respects_device_region])
apply simp
apply wp+
apply (clarsimp simp add: cap_refs_respects_device_region_def cte_wp_at_caps_of_state
cap_range_respects_device_region_def
simp del: split_paired_All split_paired_Ex
| (wp hoare_vcg_all_lift hoare_vcg_imp_lift)+)+
apply (frule_tac x = a in spec)
apply (frule_tac x = b in spec)
apply (clarsimp simp: weak_derived_cap_range)
apply (intro conjI impI allI)
apply (simp add: weak_derived_cap_range weak_derived_cap_is_device)+
apply (rule ccontr)
apply simp
apply (rule disjI2)
apply (intro conjI impI)
apply (simp add: weak_derived_cap_range weak_derived_cap_is_device)+
apply (rule ccontr)
apply simp
apply (simp add: weak_derived_cap_range weak_derived_cap_is_device)+
apply (rule ccontr)
apply simp
apply (rule disjI2)
apply (rule ccontr)
apply (clarsimp simp add: weak_derived_cap_range weak_derived_cap_is_device)+
apply fastforce
done
lemma cap_swap_aobj_at:
"arch_obj_pred P' \<Longrightarrow>
\<lbrace>\<lambda>s. P (obj_at P' pd s)\<rbrace> cap_swap c (a, b) c' (aa, ba) \<lbrace>\<lambda>r s. P (obj_at P' pd s)\<rbrace>"
unfolding cap_swap_def set_cdt_def by (wpsimp wp: set_cap.aobj_at)
lemma cap_swap_invs[wp]:
"\<And>c' a c b.
\<lbrace>invs and ex_cte_cap_wp_to (appropriate_cte_cap c') a
and ex_cte_cap_wp_to (appropriate_cte_cap c) b and
valid_cap c and valid_cap c' and
tcb_cap_valid c b and tcb_cap_valid c' a and
cte_wp_at (weak_derived c) a and
cte_wp_at (\<lambda>cc. is_untyped_cap cc \<longrightarrow> cc = c) a and
cte_wp_at (weak_derived c') b and
cte_wp_at (\<lambda>cc. is_untyped_cap cc \<longrightarrow> cc = c') b and
K (a \<noteq> b \<and> \<not> is_master_reply_cap c \<and> \<not> is_master_reply_cap c')\<rbrace>
cap_swap c a c' b \<lbrace>\<lambda>rv. invs :: 'state_ext state \<Rightarrow> bool\<rbrace>"
unfolding invs_def valid_state_def valid_pspace_def
apply (wp cap_swap_replies cap_swap_reply_masters valid_arch_state_lift_aobj_at
cap_swap_typ_at valid_irq_node_typ cap_swap_aobj_at
| simp
| erule disjE
| clarsimp simp: cte_wp_at_caps_of_state copy_of_cte_refs weak_derived_def
copy_obj_refs copy_of_zobj_refs copy_of_is_zombie
copy_of_cap_irqs
| clarsimp simp: valid_global_refs_def valid_refs_def copy_of_cap_range
cte_wp_at_caps_of_state
simp del: split_paired_Ex split_paired_All
| rule conjI
| fastforce dest!: valid_reply_caps_of_stateD)+
done
lemma cap_swap_fd_invs[wp]:
"\<And>a b.
\<lbrace>invs and ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) a
and ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) b
and (\<lambda>s. \<forall>c. tcb_cap_valid c a s)
and (\<lambda>s. \<forall>c. tcb_cap_valid c b s)
and cte_wp_at (\<lambda>c. \<not> is_master_reply_cap c) a
and cte_wp_at (\<lambda>c. \<not> is_master_reply_cap c) b\<rbrace>
cap_swap_for_delete a b \<lbrace>\<lambda>rv. invs :: 'state_ext state \<Rightarrow> bool\<rbrace>"
apply (simp add: cap_swap_for_delete_def)
apply (wp get_cap_wp)
apply (clarsimp)
apply (strengthen cap_irqs_appropriate_strengthen, simp)
apply (rule conjI, fastforce dest: cte_wp_at_valid_objs_valid_cap)
apply (rule conjI, fastforce dest: cte_wp_at_valid_objs_valid_cap)
apply (clarsimp simp: cte_wp_at_caps_of_state weak_derived_def)
done
end
lemma final_cap_unchanged:
assumes x: "\<And>P p. \<lbrace>cte_wp_at P p\<rbrace> f \<lbrace>\<lambda>rv. cte_wp_at P p\<rbrace>"
assumes y: "\<And>P T p. \<lbrace>\<lambda>s. P (typ_at T p s)\<rbrace> f \<lbrace>\<lambda>rv s. P (typ_at T p s)\<rbrace>"
shows "\<lbrace>is_final_cap' cap\<rbrace> f \<lbrace>\<lambda>rv. is_final_cap' cap\<rbrace>"
apply (simp only: is_final_cap'_def3 imp_conv_disj de_Morgan_conj)
apply (wp hoare_vcg_ex_lift hoare_vcg_all_lift x hoare_vcg_disj_lift
valid_cte_at_neg_typ [OF y])
done
lemmas set_cap_cte_wp_at_cases = set_cap_cte_wp_at[simplified if_bool_eq_conj pred_conj_def conj_comms]
lemma cyclic_zombieD[dest!]:
"cap_cyclic_zombie cap sl
\<Longrightarrow> \<exists>p zb n. cap = cap.Zombie p zb n
\<and> sl = (p, replicate (zombie_cte_bits zb) False)"
by (cases cap, simp_all add: cap_cyclic_zombie_def)
context CNodeInv_AI begin
lemma rec_del_abort_cases:
"\<And>args (s::'state_ext state).
case args of FinaliseSlotCall sl ex \<Rightarrow> s \<turnstile> \<lbrace>\<top>\<rbrace>
rec_del (FinaliseSlotCall sl ex)
\<lbrace>\<lambda>rv s. (fst rv) \<or> (\<not> ex \<and> cte_wp_at (\<lambda>c. is_zombie c \<and> sl \<in> fst_cte_ptrs c) sl s)\<rbrace>,\<lbrace>\<top>\<top>\<rbrace>
| _ \<Rightarrow> True"
subgoal for args s
proof (induct rule: rec_del_induct)
case (2 slot exposed)
note wp = "2.hyps"[simplified rec_del_call.simps]
show ?case
apply (subst rec_del_simps_ext)
apply (simp only: rec_del_call.simps split_def)
apply wp
apply (simp add: cte_wp_at_caps_of_state)
apply (wp wp)+
apply (wp irq_state_independent_AI | simp)+
apply (rule hoare_strengthen_post)
apply (rule finalise_cap_cases[where slot=slot])
apply clarsimp
apply (fastforce simp: fst_cte_ptrs_def)
apply (simp add: is_final_cap_def | wp get_cap_wp)+
done
qed (simp_all add: rec_del_fails)
done
lemma rec_del_delete_cases:
"\<And>sl ex.
\<lbrace>\<top> :: 'state_ext state \<Rightarrow> bool\<rbrace>
rec_del (CTEDeleteCall sl ex)
\<lbrace>\<lambda>rv s. cte_wp_at (\<lambda>c. c = cap.NullCap \<or> \<not> ex \<and> is_zombie c \<and> sl \<in> fst_cte_ptrs c) sl s\<rbrace>,-"
subgoal for sl ex
using rec_del_abort_cases [where args="FinaliseSlotCall sl ex"]
apply (subst rec_del_simps_ext, simp add: split_def)
apply wp
apply (rule hoare_strengthen_post [OF empty_slot_deletes])
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply (rule use_spec, rule spec_strengthen_postE, assumption)
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply assumption
done
done
lemma cap_delete_deletes:
notes hoare_pre [wp_pre del]
shows
"\<And>p.
\<lbrace>\<top> :: 'state_ext state \<Rightarrow> bool\<rbrace>
cap_delete p
\<lbrace>\<lambda>rv. cte_wp_at (\<lambda>c. c = cap.NullCap) p\<rbrace>,-"
subgoal for p
unfolding cap_delete_def
using rec_del_delete_cases[where sl=p and ex=True]
apply (simp add: validE_R_def)
apply wp
apply simp
done
done
end
lemma final_cap_same_objrefs:
"\<lbrace>is_final_cap' cap and cte_wp_at (\<lambda>c. obj_refs cap \<inter> obj_refs c \<noteq> {}
\<or> cap_irqs cap \<inter> cap_irqs c \<noteq> {}) ptr\<rbrace>
set_cap cap ptr \<lbrace>\<lambda>rv. is_final_cap' cap\<rbrace>"
apply (simp only: is_final_cap'_def3 pred_conj_def
cte_wp_at_caps_of_state)
apply wp
apply (clarsimp simp del: split_paired_Ex split_paired_All)
apply (rule_tac x=ptr in exI)
apply (subgoal_tac "(a, b) = ptr")
apply clarsimp
apply (erule_tac x="ptr" in allE)
apply (fastforce simp: obj_irq_refs_Int)
done
lemma cte_wp_at_weakenE_customised:
"\<lbrakk>cte_wp_at P t s; \<And>c. \<lbrakk> P c; cte_wp_at (op = c) t s \<rbrakk> \<Longrightarrow> P' c\<rbrakk> \<Longrightarrow> cte_wp_at P' t s"
by (clarsimp simp: cte_wp_at_def)
lemma final_cap_at_same_objrefs:
"\<lbrace>\<lambda>s. cte_wp_at (\<lambda>c. obj_refs c \<noteq> {} \<and> is_final_cap' c s) p s
\<and> cte_wp_at (\<lambda>c. obj_refs cap = obj_refs c
\<and> cap_irqs cap = cap_irqs c) ptr s \<and> p \<noteq> ptr\<rbrace>
set_cap cap ptr \<lbrace>\<lambda>rv s. cte_wp_at (\<lambda>c. is_final_cap' c s) p s\<rbrace>"
apply (simp only: final_cap_at_eq cte_wp_at_conj)
apply (simp add: cte_wp_at_caps_of_state)
apply wp
apply (clarsimp simp del: split_paired_All split_paired_Ex
simp: obj_irq_refs_Int obj_irq_refs_empty)
apply fastforce
done
lemma cap_swap_fd_final_cap_at_one_case:
"\<lbrace>\<lambda>s. p \<noteq> p'' \<and> ((p = p') \<longrightarrow> cte_wp_at (\<lambda>c. is_final_cap' c s) p'' s)
\<and> ((p \<noteq> p') \<longrightarrow> cte_wp_at (\<lambda>c. is_final_cap' c s) p s)\<rbrace>
cap_swap_for_delete p' p''
\<lbrace>\<lambda>rv s. cte_wp_at (\<lambda>c. is_final_cap' c s) p s\<rbrace>"
apply (simp only: final_cap_at_eq cte_wp_at_conj)
apply (simp add: cte_wp_at_caps_of_state)
apply wp
apply (cases "p = p'")
apply (cases p', clarsimp)
apply clarsimp
apply (cases p', cases p'', clarsimp)
done
lemma cap_swap_fd_cte_wp_at_one_case:
"\<lbrace>\<lambda>s. p \<noteq> p'' \<and> ((p = p') \<longrightarrow> cte_wp_at P p'' s) \<and> ((p \<noteq> p') \<longrightarrow> cte_wp_at P p s)\<rbrace>
cap_swap_for_delete p' p''
\<lbrace>\<lambda>rv s. cte_wp_at P p s\<rbrace>"
apply (simp add: cte_wp_at_caps_of_state)
apply wp
apply clarsimp
done
lemma valid_cte_wp_at_prop:
assumes x: "\<And>P p. \<lbrace>cte_wp_at P p\<rbrace> f \<lbrace>\<lambda>rv. cte_wp_at P p\<rbrace>"
assumes y: "\<And>P T p. \<lbrace>\<lambda>s. P (typ_at T p s)\<rbrace> f \<lbrace>\<lambda>rv s. P (typ_at T p s)\<rbrace>"
shows "\<lbrace>\<lambda>s. P' (cte_wp_at P p s)\<rbrace> f \<lbrace>\<lambda>rv s. P' (cte_wp_at P p s)\<rbrace>"
proof -
have cte_wp_at_neg2:
"\<And>P p s. (\<not> cte_wp_at P p s) = (\<not> cte_at p s \<or> cte_wp_at (\<lambda>c. \<not> P c) p s)"
by (fastforce simp: cte_wp_at_def)
have rev_iffI:
"\<And>P Q. \<lbrakk> P \<Longrightarrow> Q; \<not> P \<Longrightarrow> \<not> Q \<rbrakk> \<Longrightarrow> P = Q"
by fastforce
show ?thesis
apply (clarsimp simp: valid_def elim!: rsubst[where P=P'])
apply (rule rev_iffI)
apply (erule(1) use_valid [OF _ x])
apply (subst cte_wp_at_neg2)
apply (erule use_valid)
apply (wp hoare_vcg_disj_lift x y valid_cte_at_neg_typ)
apply (simp only: cte_wp_at_neg2[symmetric] simp_thms)
done
qed
lemma final_cap_at_unchanged:
assumes x: "\<And>P p. \<lbrace>cte_wp_at (\<lambda>c. P (obj_refs c) (cap_irqs c)) p\<rbrace> f
\<lbrace>\<lambda>rv. cte_wp_at (\<lambda>c. P (obj_refs c) (cap_irqs c)) p\<rbrace>"
assumes y: "\<And>P T p. \<lbrace>\<lambda>s. P (typ_at T p s)\<rbrace> f \<lbrace>\<lambda>rv s. P (typ_at T p s)\<rbrace>"
shows "\<lbrace>\<lambda>s. cte_wp_at (\<lambda>c. is_final_cap' c s) p s\<rbrace> f
\<lbrace>\<lambda>rv s. cte_wp_at (\<lambda>c. is_final_cap' c s) p s\<rbrace>"
proof -
have final_cap_at_eq':
"\<And>p s. cte_wp_at (\<lambda>c. is_final_cap' c s) p s =
(\<exists>cp. cte_wp_at (\<lambda>c. obj_refs c = obj_refs cp \<and> cap_irqs c = cap_irqs cp) p s
\<and> (obj_refs cp \<noteq> {} \<or> cap_irqs cp \<noteq> {})
\<and> (\<forall>p'. (cte_at p' s \<and> p' \<noteq> p) \<longrightarrow>
cte_wp_at (\<lambda>c. obj_refs cp \<inter> obj_refs c = {}
\<and> cap_irqs cp \<inter> cap_irqs c = {}) p' s))"
apply (simp add: final_cap_at_eq cte_wp_at_def)
apply (rule iffI)
apply (clarsimp simp: obj_irq_refs_Int obj_irq_refs_empty)
apply (rule exI, rule conjI, rule refl)
apply clarsimp
apply (clarsimp simp: obj_irq_refs_Int obj_irq_refs_empty)
done
show ?thesis
apply (simp only: final_cap_at_eq' imp_conv_disj de_Morgan_conj)
apply (wp hoare_vcg_ex_lift hoare_vcg_all_lift x hoare_vcg_disj_lift
valid_cte_at_neg_typ y)
done
qed
lemma zombie_has_objrefs:
"is_zombie c \<Longrightarrow> obj_refs c \<noteq> {}"
by (case_tac c, simp_all add: is_zombie_def)
lemma word_same_bl_memo_unify_word_type:
"\<lbrakk> of_bl xs = (of_bl ys :: ('a :: len) word); length xs = length ys;
length xs \<le> len_of TYPE('a) \<rbrakk> \<Longrightarrow> xs = ys"
apply (subst same_append_eq[symmetric])
apply (rule word_bl.Abs_eqD)
apply (subst of_bl_rep_False)+
apply simp
apply simp
apply (erule le_add_diff_inverse2)
apply simp
done
lemma word_and_bl_proof:
"\<lbrakk> invs s; kheap s x = Some (CNode sz cs);
unat (of_bl y :: word32) = 0; unat (of_bl z :: word32) = 0;
y \<in> dom cs; z \<in> dom cs \<rbrakk> \<Longrightarrow> y = z"
apply (simp add: unat_eq_0)
apply (frule invs_valid_objs, erule(1) valid_objsE)
apply (clarsimp simp: valid_obj_def valid_cs_def
valid_cs_size_def well_formed_cnode_n_def)
apply (rule word_same_bl_memo_unify_word_type[where 'a=32])
apply simp
apply simp
apply (simp add: word_bits_def)
done
lemma final_zombie_not_live:
"\<lbrakk> is_final_cap' (cap.Zombie ptr b n) s; cte_wp_at (op = (cap.Zombie ptr b n)) p s;
if_live_then_nonz_cap s \<rbrakk>
\<Longrightarrow> \<not> obj_at live ptr s"
apply clarsimp
apply (drule(1) if_live_then_nonz_capD, simp)
apply (clarsimp simp: ex_nonz_cap_to_def zobj_refs_to_obj_refs)
apply (subgoal_tac "(a, ba) \<noteq> p")
apply (clarsimp simp: is_final_cap'_def)
apply (erule(1) obvious)
apply (clarsimp simp: cte_wp_at_def is_zombie_def)+
done
lemma suspend_ex_cte_cap[wp]:
"\<lbrace>ex_cte_cap_wp_to P p\<rbrace> IpcCancel_A.suspend t \<lbrace>\<lambda>rv. ex_cte_cap_wp_to P p\<rbrace>"
apply (simp add: ex_cte_cap_wp_to_def cte_wp_at_caps_of_state
del: split_paired_Ex)
apply (wp hoare_use_eq_irq_node [OF suspend_irq_node suspend_caps_of_state])
apply (simp del: split_paired_Ex split_paired_All)
apply (intro allI impI, erule exEI)
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply (clarsimp simp: can_fast_finalise_def
split: cap.split_asm)
done
lemma of_bl_eq_0:
"\<lbrakk> of_bl xs = (0 :: ('a :: len) word); length xs \<le> len_of TYPE('a) \<rbrakk>
\<Longrightarrow> \<exists>n. xs = replicate n False"
apply (rule exI)
apply (rule word_same_bl_memo_unify_word_type[where 'a='a]; simp)
done
(* FIXME: eliminate *)
lemmas cte_at_length = cte_at_length_limit
context CNodeInv_AI begin
lemma zombie_is_cap_toE:
"\<And>ptr zbits n p (s::'state_ext state) m P.
\<lbrakk> cte_wp_at (op = (Zombie ptr zbits n)) p s; invs s; m < n; P (Zombie ptr zbits n) \<rbrakk>
\<Longrightarrow> ex_cte_cap_wp_to P (ptr, nat_to_cref (zombie_cte_bits zbits) m) s"
unfolding ex_cte_cap_wp_to_def
apply (frule cte_wp_at_valid_objs_valid_cap, clarsimp)
apply (intro exI, erule cte_wp_at_weakenE)
apply clarsimp
apply (drule(2) zombie_is_cap_toE_pre, simp)
done
end
lemma zombie_is_cap_toE2:
"\<lbrakk> cte_wp_at (op = (cap.Zombie ptr zbits n)) p s; 0 < n;
P (cap.Zombie ptr zbits n) \<rbrakk>
\<Longrightarrow> ex_cte_cap_wp_to P (ptr, replicate (zombie_cte_bits zbits) False) s"
unfolding ex_cte_cap_wp_to_def
apply (rule exI, erule cte_wp_at_weakenE)
apply clarsimp
done
lemma set_cap_emptyable[wp]:
"\<not> is_master_reply_cap cap \<Longrightarrow>
\<lbrace>emptyable sl and cte_at p\<rbrace> set_cap cap p \<lbrace>\<lambda>rv. emptyable sl\<rbrace>"
apply (simp add: emptyable_def)
apply (subst imp_conv_disj)+
apply (wp hoare_vcg_disj_lift set_cap_typ_at set_cap_cte_wp_at
| simp add: tcb_at_typ)+
done
lemma set_cap_halted_if_tcb[wp]:
"\<lbrace>halted_if_tcb t\<rbrace> set_cap cap p \<lbrace>\<lambda>rv. halted_if_tcb t\<rbrace>"
apply (simp add: halted_if_tcb_def)
apply (subst imp_conv_disj)+
apply (wp hoare_vcg_disj_lift set_cap_typ_at | simp add: tcb_at_typ)+
done
lemma valid_Zombie_n_less_cte_bits:
"s \<turnstile> cap.Zombie p zb n \<Longrightarrow> n \<le> 2 ^ zombie_cte_bits zb"
by (clarsimp simp: valid_cap_def split: option.split_asm)
lemma zombie_cte_bits_less:
"s \<turnstile> cap.Zombie p zb m \<Longrightarrow> zombie_cte_bits zb < word_bits"
by (clarsimp simp: valid_cap_def cap_aligned_def
split: option.split_asm)
context CNodeInv_AI begin
lemma nat_to_cref_replicate_Zombie:
"\<And>zb n (s::'state_ext state) p m.
\<lbrakk> nat_to_cref (zombie_cte_bits zb) n = replicate (zombie_cte_bits zb) False;
s \<turnstile> cap.Zombie p zb m; n < m \<rbrakk>
\<Longrightarrow> n = 0"
apply (subgoal_tac "unat (of_bl (nat_to_cref (zombie_cte_bits zb) n)) = 0")
apply (subst(asm) unat_of_bl_nat_to_cref)
apply (drule valid_Zombie_n_less_cte_bits, simp)
apply (erule zombie_cte_bits_less)
apply simp
apply simp
done
end
lemma replicate_False_tcb_valid[simp]:
"tcb_cap_valid cap (p, replicate n False) s"
apply (clarsimp simp: tcb_cap_valid_def st_tcb_def2 tcb_at_def)
apply (rule conjI)
apply (clarsimp split: option.split)
apply (frule tcb_cap_cases_length[OF domI])
apply (clarsimp simp add: tcb_cap_cases_def tcb_cnode_index_def to_bl_1)
apply (cases n, simp_all add: tcb_cnode_index_def)
done
lemma tcb_valid_nonspecial_cap:
"\<lbrakk> caps_of_state s p = Some cap; valid_objs s;
\<forall>ptr st. \<forall>(getF, setF, restr) \<in> ran tcb_cap_cases.
\<not> restr ptr st cap \<or> (\<forall>cap. restr ptr st cap);
\<forall>ptr. (is_nondevice_page_cap cap \<or> cap = cap.NullCap) \<and>
valid_ipc_buffer_cap cap ptr
\<longrightarrow> valid_ipc_buffer_cap cap' ptr \<rbrakk>
\<Longrightarrow> tcb_cap_valid cap' p s"
apply (drule cte_wp_tcb_cap_valid[rotated])
apply (erule caps_of_state_cteD)
apply (clarsimp simp: tcb_cap_valid_def st_tcb_def2)
apply (clarsimp split: option.split_asm)
apply (rule conjI)
apply (drule spec, drule spec, drule bspec, erule ranI)
apply fastforce
apply (clarsimp simp: eq_commute)
done
lemma suspend_makes_halted[wp]:
"\<lbrace>valid_objs\<rbrace> IpcCancel_A.suspend thread \<lbrace>\<lambda>_. st_tcb_at halted thread\<rbrace>"
unfolding IpcCancel_A.suspend_def
by (wp hoare_strengthen_post [OF sts_st_tcb_at]
| clarsimp elim!: pred_tcb_weakenE)+
lemma empty_slot_emptyable[wp]:
"\<lbrace>emptyable sl and cte_at slot'\<rbrace> empty_slot slot' opt \<lbrace>\<lambda>rv. emptyable sl\<rbrace>"
apply (rule hoare_assume_pre)
apply (rule hoare_weaken_pre)
apply (simp add: emptyable_def)
apply (subst imp_conv_disj)+
apply (wp hoare_vcg_disj_lift | simp add: tcb_at_typ)+
apply (simp add: is_cap_simps emptyable_def tcb_at_typ)
done
crunch emptyable[wp]: blocked_cancel_ipc "emptyable sl"
(ignore: set_thread_state wp: emptyable_lift sts_st_tcb_at_cases static_imp_wp)
crunch emptyable[wp]: cancel_signal "emptyable sl"
(ignore: set_thread_state wp: emptyable_lift sts_st_tcb_at_cases static_imp_wp)
lemma cap_delete_one_emptyable[wp]:
"\<lbrace>invs and emptyable sl and cte_at sl'\<rbrace> cap_delete_one sl' \<lbrace>\<lambda>_. emptyable sl\<rbrace>"
apply (simp add: cap_delete_one_def unless_def is_final_cap_def)
apply (wpsimp wp: get_cap_wp)
done
lemmas tcb_at_cte_at_2 = tcb_at_cte_at [where ref="tcb_cnode_index 2",
simplified dom_tcb_cap_cases]
declare thread_set_Pmdb [wp]
lemma reply_cancel_ipc_emptyable[wp]:
"\<lbrace>invs and emptyable sl and valid_mdb\<rbrace> reply_cancel_ipc ptr \<lbrace>\<lambda>_. emptyable sl\<rbrace>"
apply (simp add: reply_cancel_ipc_def)
apply (wp select_wp select_inv hoare_drop_imps | simp add: Ball_def)+
apply (wp hoare_vcg_all_lift hoare_convert_imp thread_set_Pmdb
thread_set_invs_trivial thread_set_emptyable thread_set_cte_at
| simp add: tcb_cap_cases_def descendants_of_cte_at)+
done
crunch emptyable[wp]: cancel_ipc "emptyable sl"
lemma suspend_emptyable[wp]:
"\<lbrace>invs and emptyable sl and valid_mdb\<rbrace> IpcCancel_A.suspend l \<lbrace>\<lambda>_. emptyable sl\<rbrace>"
apply (simp add: IpcCancel_A.suspend_def)
apply (wp|simp)+
apply (wp emptyable_lift sts_st_tcb_at_cases)
apply simp
apply (wp set_thread_state_cte_wp_at | simp)+
done
crunch emptyable[wp]: do_machine_op "emptyable sl"
(rule: emptyable_lift)
crunch emptyable[wp]: set_irq_state "emptyable sl"
(rule: emptyable_lift)
declare get_irq_slot_real_cte [wp]
lemma cap_swap_for_delete_emptyable[wp]:
"\<lbrace>emptyable sl and emptyable sl'\<rbrace> cap_swap_for_delete sl' sl \<lbrace>\<lambda>rv. emptyable sl\<rbrace>"
apply (simp add: emptyable_def cap_swap_for_delete_def cap_swap_def tcb_at_typ)
apply (rule hoare_pre)
apply (subst imp_conv_disj)+
apply (wp hoare_vcg_disj_lift set_cdt_typ_at set_cap_typ_at | simp split del: if_split)+
done
context CNodeInv_AI begin
lemma finalise_cap_not_reply_master:
"\<And>rv s' cap sl (s::'state_ext state).
(Inr rv, s') \<in> fst (liftE (finalise_cap cap sl) s) \<Longrightarrow> \<not> is_master_reply_cap (fst rv)"
by (simp add: Inr_in_liftE_simp finalise_cap_not_reply_master_unlifted)
end
crunch cte_at_pres[wp]: empty_slot "cte_at sl"
lemma cte_wp_at_emptyableD:
"\<And>P. \<lbrakk> cte_wp_at (\<lambda>c. c = cap) p s; valid_objs s; \<And>cap. P cap \<Longrightarrow> \<not> is_master_reply_cap cap \<rbrakk> \<Longrightarrow>
P cap \<longrightarrow> emptyable p s"
apply (simp add: emptyable_def)
apply (clarsimp simp add: obj_at_def is_tcb)
apply (erule(1) valid_objsE)
apply (clarsimp simp: cte_wp_at_cases valid_obj_def valid_tcb_def
tcb_cap_cases_def pred_tcb_at_def obj_at_def
split: Structures_A.thread_state.splits)
done
lemma cte_wp_at_not_reply_master:
"\<And>a b s. \<lbrakk> tcb_at a s \<longrightarrow> b \<noteq> tcb_cnode_index 2; cte_at (a, b) s;
valid_objs s; valid_reply_masters s \<rbrakk>
\<Longrightarrow> cte_wp_at (\<lambda>c. \<not> is_master_reply_cap c) (a, b) s"
by (fastforce simp: valid_reply_masters_def cte_wp_at_caps_of_state
is_cap_simps valid_cap_def
dest: caps_of_state_valid_cap)
declare finalise_cap_cte_cap_to [wp]
lemma appropriate_Zombie:
"\<And>ptr zbits n. appropriate_cte_cap (cap.Zombie ptr zbits n)
= (\<lambda>cap. cap_irqs cap = {})"
by (rule ext, simp add: appropriate_cte_cap_def)
lemma no_cap_to_obj_with_diff_ref_eqE:
"\<lbrakk> no_cap_to_obj_with_diff_ref cap S s;
obj_refs cap' = obj_refs cap; table_cap_ref cap' = table_cap_ref cap;
S \<subseteq> S' \<rbrakk>
\<Longrightarrow> no_cap_to_obj_with_diff_ref cap' S' s"
by (auto simp add: no_cap_to_obj_with_diff_ref_def Ball_def)
lemma context_conjI': "\<lbrakk>P; P \<Longrightarrow> Q\<rbrakk> \<Longrightarrow> Q \<and> P"
apply simp
done
lemma real_cte_at_not_tcb:
"real_cte_at sl s \<Longrightarrow> \<not> tcb_at (fst sl) s"
apply (simp add: tcb_at_typ obj_at_def)
apply (clarsimp simp: is_cap_table_def a_type_def split: if_split_asm
Structures_A.kernel_object.split)[1]
done
context CNodeInv_AI_2 begin
lemma rec_del_invs:
"\<And>args.
\<lbrace>invs and valid_rec_del_call args
and (\<lambda>s. \<not> exposed_rdcall args
\<longrightarrow> ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) (slot_rdcall args) s)
and emptyable (slot_rdcall args)
and (\<lambda>s. case args of ReduceZombieCall cap sl ex \<Rightarrow>
\<not> cap_removeable cap sl
\<and> (\<forall>t\<in>obj_refs cap. halted_if_tcb t s)
| _ \<Rightarrow> True)\<rbrace>
rec_del args
\<lbrace>\<lambda>rv. invs :: 'state_ext state \<Rightarrow> bool\<rbrace>"
apply (rule validE_valid)
apply (rule hoare_post_impErr)
apply (rule hoare_pre)
apply (rule use_spec)
apply (rule rec_del_invs')
apply simp+
done
lemma cap_delete_invs[wp]:
"\<And>ptr.
\<lbrace>invs and emptyable ptr :: 'state_ext state \<Rightarrow> bool\<rbrace>
cap_delete ptr
\<lbrace>\<lambda>rv. invs\<rbrace>"
unfolding cap_delete_def
apply (rule hoare_pre, wp rec_del_invs)
apply simp
done
lemma cap_delete_tcb[wp]:
"\<And>t ptr. \<lbrace>tcb_at t :: 'state_ext state \<Rightarrow> bool\<rbrace> cap_delete ptr \<lbrace>\<lambda>rv. tcb_at t\<rbrace>"
unfolding cap_delete_def
by (simp add: tcb_at_typ | wp rec_del_typ_at)+
lemma cap_delete_valid_cap:
"\<And>c p. \<lbrace>valid_cap c :: 'state_ext state \<Rightarrow> bool\<rbrace> cap_delete p \<lbrace>\<lambda>_. valid_cap c\<rbrace>"
unfolding cap_delete_def
by (wp valid_cap_typ rec_del_typ_at | simp)+
lemma cap_delete_cte_at:
"\<And>c p. \<lbrace>cte_at c :: 'state_ext state \<Rightarrow> bool\<rbrace> cap_delete p \<lbrace>\<lambda>_. cte_at c\<rbrace>"
unfolding cap_delete_def by (wp rec_del_cte_at | simp)+
lemma cap_delete_typ_at:
"\<And>P T p cref. \<lbrace>\<lambda>s::'state_ext state. P (typ_at T p s)\<rbrace> cap_delete cref \<lbrace>\<lambda>rv s. P (typ_at T p s)\<rbrace>"
unfolding cap_delete_def by (wp rec_del_typ_at | simp)+
end
lemma cap_swap_fd_st_tcb_at[wp]:
"\<lbrace>pred_tcb_at proj P t\<rbrace> cap_swap_for_delete sl sl' \<lbrace>\<lambda>rv. pred_tcb_at proj P t\<rbrace>"
unfolding cap_swap_for_delete_def
by (wp, simp)
declare if_cong[cong]
lemma cases2 [case_names pos_pos neg_pos pos_neg neg_neg]:
"\<lbrakk> \<lbrakk>p; q\<rbrakk> \<Longrightarrow> R; \<lbrakk>\<not> p; q\<rbrakk> \<Longrightarrow> R; \<lbrakk>p; \<not> q\<rbrakk> \<Longrightarrow> R; \<lbrakk>\<not> p; \<not> q\<rbrakk> \<Longrightarrow> R \<rbrakk> \<Longrightarrow> R"
by auto
definition
rpo_measure :: "'a \<Rightarrow> ('a option \<times> nat) option \<Rightarrow> nat"
where
"rpo_measure x v \<equiv> case v of Some (y, n) \<Rightarrow> (if y = Some x then n - 1 else n)"
lemma rpo_measure_simps[simp]:
"rpo_measure x (Some (y, n)) = (if y = Some x then n - 1 else n)"
by (simp add: rpo_measure_def)
definition
revoke_progress_ord :: "('a \<rightharpoonup> 'a option \<times> nat) \<Rightarrow> ('a \<rightharpoonup> 'a option \<times> nat) \<Rightarrow> bool"
where
"revoke_progress_ord mapa mapb \<equiv> (mapa = mapb)
\<or> (mapb, mapa) \<in> measure (\<lambda>mp. \<Sum>x\<in>dom mp. rpo_measure x (mp x))"
lemma rpo_trans:
"\<lbrakk> revoke_progress_ord mapa mapb; revoke_progress_ord mapb mapc \<rbrakk>
\<Longrightarrow> revoke_progress_ord mapa mapc"
apply (simp add: revoke_progress_ord_def)
apply (elim disjE, simp_all)
done
interpretation mult_is_add: comm_monoid_mult "op +" "0::'a::comm_monoid_add"
by (unfold_locales) (auto simp: field_simps)
lemma fold_Int_sub:
assumes "finite S" "finite T"
shows "(\<Sum>x \<in> (S \<inter> T). (f x :: nat)) = (\<Sum>x \<in> T. f x) - (\<Sum>x \<in> (T - S). f x)"
proof -
from assms sum.union_disjoint[where A="S \<inter> T" and B="T - S" and g=f]
show ?thesis
apply simp
apply (drule meta_mp)
apply blast
apply (subgoal_tac "S \<inter> T \<union> (T - S) = T")
apply simp
apply blast
done
qed
lemma rpo_delta:
assumes x: "\<And>x. x \<notin> S \<Longrightarrow> mapa x = mapb x"
assumes F: "finite S" "finite (dom mapa)" "finite (dom mapb)"
assumes y:
"(mapb, mapa) \<in> measure (\<lambda>mp. \<Sum>x \<in> S \<inter> dom mp. rpo_measure x (mp x))"
shows "revoke_progress_ord mapa mapb"
proof -
have P: "(dom mapa - S) = (dom mapb - S)"
by (fastforce simp: x)
have Q: "(\<Sum>x \<in> dom mapa - S. rpo_measure x (mapa x))
= (\<Sum>x \<in> dom mapb - S. rpo_measure x (mapb x))"
apply (rule sum.cong)
apply (simp add: P)
apply (simp add: x)
done
show ?thesis using y
apply (simp add: revoke_progress_ord_def)
apply (rule disjI2)
apply (fastforce simp: fold_Int_sub F Q)
done
qed
definition
cap_to_rpo :: "cap \<Rightarrow> cslot_ptr option \<times> nat"
where
"cap_to_rpo cap \<equiv> case cap of
cap.NullCap \<Rightarrow> (None, 0)
| cap.Zombie p zb n \<Rightarrow> (Some (p, replicate (zombie_cte_bits zb) False), 2)
| _ \<Rightarrow> (None, 3)"
lemmas caps_of_state_set_finite'
= cte_wp_at_set_finite[simplified cte_wp_at_caps_of_state]
lemmas caps_of_state_set_finite
= caps_of_state_set_finite'
caps_of_state_set_finite'[where P="\<top>\<top>", simplified]
lemma empty_slot_rvk_prog:
"\<lbrace>\<lambda>s. revoke_progress_ord m (option_map cap_to_rpo \<circ> caps_of_state s)\<rbrace>
empty_slot sl opt
\<lbrace>\<lambda>rv s. revoke_progress_ord m (option_map cap_to_rpo \<circ> caps_of_state s)\<rbrace>"
apply (simp add: empty_slot_def)
apply (rule hoare_pre)
apply (wp opt_return_pres_lift | simp split del: if_split)+
apply (wp get_cap_wp)
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply (erule rpo_trans)
apply (rule rpo_delta[where S="{sl}"],
simp_all add: dom_def caps_of_state_set_finite exception_set_finite)
apply (case_tac cap, simp_all add: cap_to_rpo_def)
done
lemma rvk_prog_update_strg:
"revoke_progress_ord m (option_map cap_to_rpo \<circ> caps_of_state s)
\<and> cte_wp_at (\<lambda>cp. cap_to_rpo cp = cap_to_rpo cap
\<or> rpo_measure p (Some (cap_to_rpo cp))
> rpo_measure p (Some (cap_to_rpo cap))) p s
\<longrightarrow> revoke_progress_ord m (option_map cap_to_rpo \<circ> ((caps_of_state s) (p \<mapsto> cap)))"
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply (erule disjE)
apply (erule rsubst[where P="\<lambda>mp. revoke_progress_ord m mp"])
apply (rule ext, simp)
apply (erule rpo_trans)
apply (rule rpo_delta[where S="{p}"],
simp_all add: dom_def caps_of_state_set_finite)
apply (rule exception_set_finite)
apply (rule finite_subset [OF _ caps_of_state_set_finite(2)[where s=s]])
apply clarsimp
done
lemma cap_swap_fd_rvk_prog:
"\<lbrace>\<lambda>s. revoke_progress_ord m (option_map cap_to_rpo \<circ> caps_of_state s)
\<and> cte_wp_at (\<lambda>cp. cap_to_rpo cp = (Some p1, 2) \<and> is_final_cap' cp s) p2 s\<rbrace>
cap_swap_for_delete p1 p2
\<lbrace>\<lambda>rv s. revoke_progress_ord m (option_map cap_to_rpo \<circ> caps_of_state s)\<rbrace>"
apply (simp add: cap_swap_for_delete_def cap_swap_def)
apply (wp get_cap_wp | simp split del: if_split)+
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply (erule rpo_trans)
apply (rule rpo_delta[where S="{p1, p2}"],
simp_all add: caps_of_state_set_finite exception_set_finite
dom_def)
apply (clarsimp simp: is_final_cap'_def2)
apply (frule spec[where x="fst p1"], drule spec[where x="snd p1"])
apply (drule spec[where x="fst p2"], drule spec[where x="snd p2"])
apply (clarsimp simp: cap_to_rpo_def split: cap.split_asm)
apply (simp split: cap.split)
apply (clarsimp simp: cte_wp_at_caps_of_state obj_irq_refs_empty)
apply (drule iffD1)
apply (simp add: obj_irq_refs_Int)
apply (simp only:)
apply simp
done
lemmas empty_slot_rvk_prog' = empty_slot_rvk_prog[unfolded o_def]
crunch rvk_prog: cancel_ipc "\<lambda>s. revoke_progress_ord m (\<lambda>x. option_map cap_to_rpo (caps_of_state s x))"
(simp: crunch_simps o_def unless_def is_final_cap_def tcb_cap_cases_def
wp: hoare_drop_imps empty_slot_rvk_prog' select_wp
thread_set_caps_of_state_trivial)
crunch rvk_prog: cancel_all_ipc "\<lambda>s. revoke_progress_ord m (\<lambda>x. option_map cap_to_rpo (caps_of_state s x))"
(simp: crunch_simps o_def unless_def is_final_cap_def
wp: crunch_wps empty_slot_rvk_prog' select_wp)
crunch rvk_prog: cancel_all_signals "\<lambda>s. revoke_progress_ord m (\<lambda>x. option_map cap_to_rpo (caps_of_state s x))"
(simp: crunch_simps o_def unless_def is_final_cap_def
wp: crunch_wps empty_slot_rvk_prog' select_wp)
crunch rvk_prog: suspend "\<lambda>s. revoke_progress_ord m (\<lambda>x. option_map cap_to_rpo (caps_of_state s x))"
(simp: crunch_simps o_def unless_def is_final_cap_def
wp: crunch_wps empty_slot_rvk_prog' select_wp)
crunch rvk_prog: deleting_irq_handler "\<lambda>s. revoke_progress_ord m (\<lambda>x. option_map cap_to_rpo (caps_of_state s x))"
(simp: crunch_simps o_def unless_def is_final_cap_def
wp: crunch_wps empty_slot_rvk_prog' select_wp)
locale CNodeInv_AI_3 = CNodeInv_AI_2 state_ext_t
for state_ext_t :: "'state_ext::state_ext itself" +
assumes finalise_cap_rvk_prog:
"\<And>a b.
\<lbrace>\<lambda>s::'state_ext state. revoke_progress_ord m (\<lambda>x. map_option cap_to_rpo (caps_of_state s x))\<rbrace>
finalise_cap a b
\<lbrace>\<lambda>_ s. revoke_progress_ord m (\<lambda>x. map_option cap_to_rpo (caps_of_state s x))\<rbrace>"
assumes rec_del_rvk_prog:
"\<And>(st::'state_ext state) args.
st \<turnstile> \<lbrace>\<lambda>s. revoke_progress_ord m (option_map cap_to_rpo \<circ> caps_of_state s)
\<and> (case args of ReduceZombieCall cap sl ex \<Rightarrow>
cte_wp_at (\<lambda>c. c = cap) sl s \<and> is_final_cap' cap s
| _ \<Rightarrow> True)\<rbrace>
rec_del args
\<lbrace>\<lambda>rv s. revoke_progress_ord m (option_map cap_to_rpo \<circ> caps_of_state s)\<rbrace>,\<lbrace>\<top>\<top>\<rbrace>"
lemmas rdcall_simps = rec_del_call.simps exposed_rdcall.simps slot_rdcall.simps
context CNodeInv_AI_3 begin
lemma cap_delete_rvk_prog:
"\<And>m ptr.
\<lbrace>\<lambda>s::'state_ext state. revoke_progress_ord m (option_map cap_to_rpo \<circ> caps_of_state s)\<rbrace>
cap_delete ptr
\<lbrace>\<lambda>rv s. revoke_progress_ord m (option_map cap_to_rpo \<circ> caps_of_state s)\<rbrace>,-"
unfolding cap_delete_def validE_R_def
apply wpsimp
apply (unfold validE_R_def)
apply (rule use_spec)
apply (rule rec_del_rvk_prog)
apply (simp add: o_def)
done
end
lemma set_cap_id:
"cte_wp_at (op = c) p s \<Longrightarrow> set_cap c p s = ({((),s)}, False)"
apply (clarsimp simp: cte_wp_at_cases)
apply (cases p)
apply (erule disjE)
apply clarsimp
apply (simp add: set_cap_def get_object_def bind_assoc exec_gets)
apply (rule conjI)
apply (clarsimp simp: set_object_def exec_get put_def)
apply (cases s)
apply simp
apply (rule ext)
apply auto[1]
apply clarsimp
apply clarsimp
apply (simp add: set_cap_def get_object_def bind_assoc
exec_gets set_object_def exec_get put_def)
apply (clarsimp simp: tcb_cap_cases_def
split: if_split_asm,
simp_all add: map_upd_triv)
done
declare Inr_in_liftE_simp[simp]
lemma get_cap_fail_or_not:
"fst (get_cap slot s) \<noteq> {} \<Longrightarrow> snd (get_cap slot s) = False"
by (clarsimp elim!: nonemptyE dest!: get_cap_det)
function(sequential) red_zombie_will_fail :: "cap \<Rightarrow> bool"
where
"red_zombie_will_fail (cap.Zombie ptr zb 0) = True"
| "red_zombie_will_fail (cap.Zombie ptr zb (Suc n)) = False"
| "red_zombie_will_fail cap = True"
apply simp_all
apply (case_tac x)
prefer 11
apply (rename_tac nat)
apply (case_tac nat, simp_all)[1]
apply fastforce+
done
termination red_zombie_will_fail
by (rule red_zombie_will_fail.termination [OF Wellfounded.wf_empty])
context CNodeInv_AI_3 begin
lemma rec_del_emptyable:
"\<And>args.
\<lbrace>invs and valid_rec_del_call args
and (\<lambda>s. \<not> exposed_rdcall args
\<longrightarrow> ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) (slot_rdcall args) s)
and emptyable (slot_rdcall args)
and (\<lambda>s. case args of ReduceZombieCall cap sl ex \<Rightarrow>
\<not> cap_removeable cap sl
\<and> (\<forall>t\<in>obj_refs cap. halted_if_tcb t s)
| _ \<Rightarrow> True)\<rbrace>
rec_del args
\<lbrace>\<lambda>rv. emptyable (slot_rdcall args) :: 'state_ext state \<Rightarrow> bool\<rbrace>, -"
apply (rule validE_validE_R)
apply (rule hoare_post_impErr)
apply (rule hoare_pre)
apply (rule use_spec)
apply (rule rec_del_invs')
apply simp+
done
lemma reduce_zombie_cap_to:
"\<And>cap slot exp.
\<lbrace>invs and valid_rec_del_call (ReduceZombieCall cap slot exp) and
emptyable slot and
(\<lambda>s. \<not> exp \<longrightarrow> ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) slot s) and
K (\<not> cap_removeable cap slot) and
(\<lambda>s. \<forall>t\<in>obj_refs cap. halted_if_tcb t s)\<rbrace>
rec_del (ReduceZombieCall cap slot exp)
\<lbrace>\<lambda>rv (s::'state_ext state). \<not> exp \<longrightarrow> ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) slot s\<rbrace>, -"
apply (rule validE_validE_R)
apply (rule hoare_post_impErr)
apply (rule hoare_pre)
apply (rule use_spec)
apply (rule rec_del_invs')
apply simp+
done
lemma cte_at_replicate_zbits:
"\<And>(s::'state_ext state) oref zb n.
\<lbrakk> s \<turnstile> cap.Zombie oref zb n \<rbrakk> \<Longrightarrow> cte_at (oref, replicate (zombie_cte_bits zb) False) s"
apply (clarsimp simp: valid_cap_def obj_at_def is_tcb is_cap_table
split: option.split_asm)
apply (rule cte_wp_at_tcbI, simp)
apply (fastforce simp add: tcb_cap_cases_def tcb_cnode_index_def to_bl_1)
apply simp
apply (subgoal_tac "replicate x2 False \<in> dom cs")
apply safe[1]
apply (rule cte_wp_at_cteI, fastforce)
apply (simp add: well_formed_cnode_n_def length_set_helper)
apply simp
apply simp
apply (clarsimp simp: well_formed_cnode_n_def)
done
lemma reduce_zombie_cap_somewhere:
"\<And>exp cap slot.
\<lbrace>\<lambda>s::'state_ext state. \<not> exp \<longrightarrow> (\<exists>oref cref. cte_wp_at P (oref, cref) s)\<rbrace>
rec_del (ReduceZombieCall cap slot exp)
\<lbrace>\<lambda>rv s. \<not> exp \<longrightarrow> (\<exists>oref cref. cte_wp_at P (oref, cref) s)\<rbrace>"
subgoal for exp cap slot
apply (cases exp, simp_all, wp)
apply (cases cap, simp_all add: rec_del_fails)
apply (rename_tac word option nat)
apply (case_tac nat, simp_all add: rec_del_simps_ext)
apply (simp add: cte_wp_at_caps_of_state)
apply wp
apply safe
apply (rule_tac x="fst ((id ((word, replicate (zombie_cte_bits option) False) := slot,
slot := (word, replicate (zombie_cte_bits option) False))) (oref, cref))"
in exI)
apply (rule_tac x="snd ((id ((word, replicate (zombie_cte_bits option) False) := slot,
slot := (word, replicate (zombie_cte_bits option) False))) (oref, cref))"
in exI)
apply fastforce
done
done
end
lemma set_cap_cap_somewhere:
"\<lbrace>\<lambda>s. cte_wp_at (\<lambda>cp. P (fst slot) (snd slot) cp \<longrightarrow> P (fst slot) (snd slot) cap) slot s
\<and> (\<exists>oref cref. cte_wp_at (P oref cref) (oref, cref) s)\<rbrace>
set_cap cap slot
\<lbrace>\<lambda>rv s. \<exists>oref cref. cte_wp_at (P oref cref) (oref, cref) s\<rbrace>"
apply (simp add: cte_wp_at_caps_of_state)
apply wp
apply clarsimp
apply (rule_tac x=oref in exI)
apply (rule_tac x=cref in exI)
apply fastforce
done
context CNodeInv_AI_3 begin
lemma rec_del_ReduceZombie_emptyable:
"\<And>cap slot ex.
\<lbrace>invs and (cte_wp_at (op = cap) slot and is_final_cap' cap
and (\<lambda>y. is_zombie cap))
and (\<lambda>s. \<not> ex \<longrightarrow> ex_cte_cap_wp_to (\<lambda>cp. cap_irqs cp = {}) slot s)
and emptyable slot
and (\<lambda>s. \<not> cap_removeable cap slot \<and> (\<forall>t\<in>obj_refs cap. halted_if_tcb t s))\<rbrace>
rec_del (ReduceZombieCall cap slot ex)
\<lbrace>\<lambda>rv. emptyable slot :: 'state_ext state \<Rightarrow> bool\<rbrace>, -"
subgoal for cap slot ex
by (rule rec_del_emptyable [where args="ReduceZombieCall cap slot ex", simplified])
done
end
text {* The revoke function and its properties are
slightly easier to deal with than the delete
function. However, its termination argument
is complex, requiring that the delete function
reduces the number of non-null capabilities. *}
definition
cap_revoke_recset :: "((cslot_ptr \<times> 'z::state_ext state) \<times> (cslot_ptr \<times> 'z::state_ext state)) set"
where
"cap_revoke_recset \<equiv> measure (\<lambda>(sl, s). (\<lambda>mp. \<Sum>x \<in> dom mp. rpo_measure x (mp x))
(option_map cap_to_rpo \<circ> caps_of_state s))"
lemma wf_cap_revoke_recset:
"wf cap_revoke_recset"
by (simp add: cap_revoke_recset_def)
lemma rpo_sym:
"revoke_progress_ord m m"
by (simp add: revoke_progress_ord_def)
lemma in_select_ext_weak: "(a,b) \<in> fst (select_ext f S s) \<Longrightarrow>
(a,b) \<in> fst (select S s)"
apply (drule_tac Q="\<lambda>r s'. r \<in> S \<and> s' =s" in use_valid[OF _ select_ext_weak_wp])
apply (simp add: select_def)+
done
context CNodeInv_AI_3 begin
lemma cap_revoke_termination:
"All (cap_revoke_dom :: (machine_word \<times> bool list) \<times> 'state_ext state \<Rightarrow> bool)"
apply (rule cap_revoke.termination)
apply (rule wf_cap_revoke_recset)
apply (clarsimp simp add: cap_revoke_recset_def in_monad select_def
dest!: iffD1[OF in_get_cap_cte_wp_at] in_select_ext_weak)
apply (frule use_validE_R [OF _ cap_delete_rvk_prog])
apply (rule rpo_sym)
apply (frule use_validE_R [OF _ cap_delete_deletes])
apply simp
apply (simp add: revoke_progress_ord_def)
apply (erule disjE)
apply (drule_tac f="\<lambda>f. f (aa, ba)" in arg_cong)
apply (clarsimp simp: cte_wp_at_caps_of_state cap_to_rpo_def)
apply (simp split: cap.split_asm)
apply (drule in_preempt, clarsimp simp: trans_state_update'[symmetric])
done
lemma cap_revoke_dom: "\<And> (p :: (machine_word \<times> bool list) \<times> 'state_ext state). cap_revoke_dom p"
using cap_revoke_termination by blast
lemmas cap_revoke_simps = cap_revoke.psimps[OF cap_revoke_dom]
lemmas cap_revoke_induct = cap_revoke.pinduct[OF cap_revoke_dom]
lemma cap_revoke_preservation':
fixes P and s :: "'state_ext state" and ptr
assumes x: "\<And>p. \<lbrace>P\<rbrace> cap_delete p \<lbrace>\<lambda>rv. P\<rbrace>"
assumes p: "\<lbrace>P\<rbrace> preemption_point \<lbrace>\<lambda>rv. P\<rbrace>"
shows "s \<turnstile> \<lbrace>P\<rbrace> cap_revoke ptr \<lbrace>\<lambda>rv. P\<rbrace>, \<lbrace>\<lambda>rv. P\<rbrace>"
proof (induct rule: cap_revoke_induct)
case (1 slot)
show ?case
apply (subst cap_revoke_simps)
apply (wp "1.hyps")
apply (wp x p hoare_drop_imps select_wp)+
apply simp_all
done
qed
lemmas cap_revoke_preservation = use_spec(2) [OF cap_revoke_preservation']
lemmas cap_revoke_preservation2 = cap_revoke_preservation[THEN validE_valid]
lemma ball_subset: "\<forall>x\<in>A. Q x \<Longrightarrow> B \<subseteq> A \<Longrightarrow> \<forall>x\<in>B. Q x"
apply blast
done
lemma cap_revoke_preservation_desc_of':
fixes P Q and s :: "'state_ext state"
assumes x: "\<And>p. \<lbrace>P and Q p\<rbrace> cap_delete p \<lbrace>\<lambda>rv. P\<rbrace>"
and y: "\<And>sl s. P s \<Longrightarrow> \<forall>sl' \<in> descendants_of sl (cdt s). Q sl' s"
assumes p: "\<lbrace>P\<rbrace> preemption_point \<lbrace>\<lambda>rv. P\<rbrace>"
shows "s \<turnstile> \<lbrace>P\<rbrace> cap_revoke ptr \<lbrace>\<lambda>rv. P\<rbrace>, \<lbrace>\<lambda>rv. P\<rbrace>"
proof (induct rule: cap_revoke_induct)
case (1 slot)
show ?case
apply (subst cap_revoke_simps)
apply (wp "1.hyps")
apply (wp x p hoare_drop_imps select_wp)+
apply (simp_all add: y)
done
qed
lemmas cap_revoke_preservation_desc_of =
use_spec(2) [OF cap_revoke_preservation_desc_of']
lemma cap_revoke_typ_at:
"\<And>P T p. \<lbrace>\<lambda>s::'state_ext state. P (typ_at T p s)\<rbrace> cap_revoke ptr \<lbrace>\<lambda>rv s. P (typ_at T p s)\<rbrace>"
by (wp cap_delete_typ_at cap_revoke_preservation irq_state_independent_AI preemption_point_inv, simp+)
lemma cap_revoke_invs:
"\<And>ptr. \<lbrace>\<lambda>s::'state_ext state. invs s\<rbrace> cap_revoke ptr \<lbrace>\<lambda>rv. invs\<rbrace>"
apply (wp cap_revoke_preservation_desc_of)
apply (fastforce simp: emptyable_def dest: reply_slot_not_descendant)
apply (wp preemption_point_inv)
apply simp+
done
end
lemma descendants_of_cdt_parent:
"\<lbrakk> p' \<in> descendants_of p (cdt s) \<rbrakk> \<Longrightarrow> \<exists>p''. cdt s \<Turnstile> p'' \<leadsto> p'"
apply (simp add: descendants_of_def del: split_paired_Ex)
apply (erule tranclE)
apply (erule exI)
apply (erule exI)
done
lemma cap_revoke_mdb_stuff3:
"\<lbrakk> p' \<in> descendants_of p (cdt s); valid_mdb s \<rbrakk>
\<Longrightarrow> cte_wp_at (op \<noteq> cap.NullCap) p' s"
apply (clarsimp simp add: valid_mdb_def
dest!: descendants_of_cdt_parent)
apply (simp add: cdt_parent_of_def)
apply (drule(1) mdb_cte_atD)
apply simp
done
crunch typ_at[wp]: cancel_badged_sends "\<lambda>s. P (typ_at T p s)"
(wp: crunch_wps simp: crunch_simps filterM_mapM unless_def
ignore: without_preemption filterM set_object clearMemory)
locale CNodeInv_AI_4 = CNodeInv_AI_3 state_ext_t
for state_ext_t :: "'state_ext::state_ext itself" +
assumes finalise_slot_typ_at [wp]:
"\<And>P T p. \<lbrace>\<lambda>s::'state_ext state. P (typ_at T p s)\<rbrace> finalise_slot a b \<lbrace>\<lambda>_ s. P (typ_at T p s)\<rbrace>"
assumes weak_derived_appropriate:
"\<And>cap cap'. weak_derived cap cap' \<Longrightarrow> appropriate_cte_cap cap = appropriate_cte_cap cap'"
context CNodeInv_AI_4 begin
lemma inv_cnode_typ_at:
"\<And>P T p ci. \<lbrace>\<lambda>s::'state_ext state. P (typ_at T p s)\<rbrace> invoke_cnode ci \<lbrace>\<lambda>rv s. P (typ_at T p s)\<rbrace>"
apply (case_tac ci, simp_all add: invoke_cnode_def split del: if_split)
apply (wp cap_insert_typ_at cap_move_typ_at cap_swap_typ_at hoare_drop_imps
cap_delete_typ_at cap_revoke_typ_at hoare_vcg_all_lift | wpc |
simp | rule conjI impI | rule hoare_pre)+
done
lemma invoke_cnode_tcb[wp]:
"\<And>tptr ci. \<lbrace>tcb_at tptr::'state_ext state \<Rightarrow> bool\<rbrace> invoke_cnode ci \<lbrace>\<lambda>rv. tcb_at tptr\<rbrace>"
by (simp add: tcb_at_typ, wp inv_cnode_typ_at)
end
lemma duplicate_creation:
"\<lbrace>cte_wp_at (\<lambda>c. obj_refs c = obj_refs cap
\<and> cap_irqs c = cap_irqs cap) p
and cte_at p' and K (p \<noteq> p')\<rbrace>
set_cap cap p'
\<lbrace>\<lambda>rv s. cte_wp_at (\<lambda>cap. \<not> is_final_cap' cap s) p s\<rbrace>"
apply (rule hoare_gen_asm)
apply (rule hoare_post_imp [where Q="\<lambda>rv. cte_wp_at (\<lambda>c. obj_refs c = obj_refs cap
\<and>cap_irqs c = cap_irqs cap) p
and cte_wp_at (op = cap) p'"])
apply (clarsimp simp: cte_wp_at_def)
apply (case_tac "\<exists>x. x \<in> obj_refs cap \<and> x \<in> obj_refs capa")
apply (elim exE conjE)
apply (frule (4) final_cap_duplicate_obj_ref)
apply simp
apply (case_tac "\<exists>x. x \<in> cap_irqs cap \<and> x \<in> cap_irqs capa")
apply (elim exE conjE)
apply (frule (4) final_cap_duplicate_irq, simp)
apply (simp add: is_final_cap'_def)
apply (wp set_cap_cte_wp_at)
apply simp_all
done
definition
zombies_final_caps :: "(cslot_ptr \<rightharpoonup> cap) \<Rightarrow> bool"
where
"zombies_final_caps \<equiv> \<lambda>cps. \<forall>p p' cap cap'.
cps p = Some cap \<and> cps p' = Some cap'
\<and> obj_refs cap \<inter> obj_refs cap' \<noteq> {} \<and> p \<noteq> p'
\<longrightarrow> \<not> is_zombie cap \<and> \<not> is_zombie cap'"
lemma zombies_final_caps_of_state:
"zombies_final = zombies_final_caps \<circ> caps_of_state"
by (rule ext,
simp add: zombies_final_def2 zombies_final_caps_def
cte_wp_at_caps_of_state)
lemma zombies_final_injective:
"\<lbrakk> zombies_final_caps (caps_of_state s); inj f \<rbrakk>
\<Longrightarrow> zombies_final_caps (caps_of_state s \<circ> f)"
apply (simp only: zombies_final_caps_def o_def)
apply (intro allI impI)
apply (elim conjE allE, erule mp)
apply (erule conjI)+
apply (simp add: inj_eq)
done
lemma set_cdt_caps_of_state[wp]:
"\<lbrace>\<lambda>s. P (caps_of_state s)\<rbrace> set_cdt p \<lbrace>\<lambda>rv s. P (caps_of_state s)\<rbrace>"
apply (simp add: set_cdt_def)
apply wp
apply (simp add: caps_of_state_cte_wp_at)
done
lemma cap_move_caps_of_state:
notes fun_upd_apply [simp del]
shows "\<lbrace>\<lambda>s. P ((caps_of_state s) (ptr' \<mapsto> cap, ptr \<mapsto> cap.NullCap ))\<rbrace>
cap_move cap ptr ptr'
\<lbrace>\<lambda>rv s. P (caps_of_state s)\<rbrace>"
by (wpsimp simp: cap_move_def)
lemma zombies_duplicate_creation:
"\<lbrace>\<lambda>s. zombies_final s \<and> \<not> is_zombie cap
\<and> (\<exists>p'. cte_wp_at (\<lambda>c. obj_refs c = obj_refs cap \<and> \<not> is_zombie c) p' s)
\<and> cte_wp_at (op = cap.NullCap) p s\<rbrace>
set_cap cap p
\<lbrace>\<lambda>rv. zombies_final\<rbrace>"
apply (wp set_cap_zombies)
apply (clarsimp simp: cte_wp_at_def)
apply (thin_tac "x \<noteq> y" for x y)
apply (case_tac "(a, b) = (aa, ba)")
apply clarsimp
apply (drule(3) zombies_finalD2)
apply blast
apply simp
done
lemma state_refs_of_rvk[simp]:
"state_refs_of (is_original_cap_update f s) = state_refs_of s"
by (simp add: state_refs_of_def)
lemma weak_derived_is_zombie:
"weak_derived cap cap' \<Longrightarrow> is_zombie cap = is_zombie cap'"
by (auto simp: weak_derived_def copy_of_def is_cap_simps same_object_as_def
split: if_split_asm cap.splits)
lemma cap_move_zombies_final[wp]:
"\<lbrace>zombies_final and cte_wp_at (op = cap.NullCap) ptr'
and cte_wp_at (weak_derived cap) ptr
and K (ptr \<noteq> ptr')\<rbrace>
cap_move cap ptr ptr'
\<lbrace>\<lambda>rv. zombies_final\<rbrace>"
unfolding cap_move_def zombies_final_caps_of_state o_def set_cdt_def
apply (rule hoare_pre)
apply (wp|simp)+
apply (simp add: cte_wp_at_caps_of_state zombies_final_caps_def del: split_paired_All)
apply (elim conjE exE)
apply (intro impI allI)
apply (simp add: weak_derived_obj_refs weak_derived_is_zombie del: split_paired_All)
apply blast
done
lemma cap_move_if_live[wp]:
"\<lbrace>cte_wp_at (op = cap.NullCap) ptr'
and cte_wp_at (weak_derived cap) ptr
and K (ptr \<noteq> ptr')
and if_live_then_nonz_cap\<rbrace>
cap_move cap ptr ptr'
\<lbrace>\<lambda>rv s. if_live_then_nonz_cap s\<rbrace>"
unfolding cap_move_def
apply (rule hoare_pre)
apply (wp|simp)+
apply (rule hoare_post_imp, simp only: if_live_then_nonz_cap_def)
apply (simp only: ex_nonz_cap_to_def cte_wp_at_caps_of_state
imp_conv_disj)
apply (wp hoare_vcg_disj_lift hoare_vcg_all_lift)+
apply (clarsimp simp: if_live_then_nonz_cap_def
ex_nonz_cap_to_def cte_wp_at_caps_of_state
del: allI
simp del: split_paired_Ex)
apply (erule allEI, rule impI, drule(1) mp)
apply (erule exfEI[where f="id (ptr := ptr', ptr' := ptr)"])
apply (clarsimp simp: weak_derived_obj_refs zobj_refs_to_obj_refs)
apply (rule conjI)
apply (clarsimp simp: weak_derived_is_zombie)
apply clarsimp
done
lemma weak_derived_cte_refs':
"weak_derived cap cap' \<Longrightarrow> cte_refs cap = cte_refs cap'"
by (fastforce simp: copy_of_cte_refs weak_derived_def)
lemma appropriate_cte_master:
"appropriate_cte_cap (cap_master_cap cap) = appropriate_cte_cap cap"
apply (rule ext)
apply (simp add: cap_master_cap_def appropriate_cte_cap_def
split: cap.split)
done
context CNodeInv_AI_4 begin
lemma cap_move_if_unsafe [wp]:
"\<And>ptr' cap ptr.
\<lbrace>cte_wp_at (op = cap.NullCap) ptr'
and cte_wp_at (weak_derived cap) ptr
and K (ptr \<noteq> ptr')
and if_unsafe_then_cap
and ex_cte_cap_wp_to (appropriate_cte_cap cap) ptr'\<rbrace>
cap_move cap ptr ptr'
\<lbrace>\<lambda>rv. if_unsafe_then_cap :: 'state_ext state \<Rightarrow> bool\<rbrace>"
subgoal for ptr' cap ptr
apply (simp add: cap_move_def)
apply (wp | simp)+
apply (rule hoare_post_imp, simp only: if_unsafe_then_cap_def)
apply (simp only: ex_cte_cap_wp_to_def cte_wp_at_caps_of_state)
apply wp+
apply (clarsimp simp: if_unsafe_then_cap_def
ex_cte_cap_wp_to_def cte_wp_at_caps_of_state
simp del: split_paired_All split_paired_Ex
del: allI
split del: if_split)
apply (frule weak_derived_Null)
apply (frule weak_derived_cte_refs')
apply (frule cap_irqs_appropriateness [OF weak_derived_cap_irqs])
apply (frule weak_derived_appropriate)
apply (erule allfEI[where f="id (ptr := ptr', ptr' := ptr)"])
apply (case_tac "cref = ptr'")
apply (intro allI impI,
rule_tac x="(id (ptr := ptr', ptr' := ptr)) (a, b)" in exI)
apply fastforce
apply (clarsimp split: if_split_asm split del: if_split del: exE
simp del: split_paired_All split_paired_Ex)
apply (erule exfEI[where f="id (ptr := ptr', ptr' := ptr)"])
apply (clarsimp split: if_split_asm)
apply fastforce
done
done
end
crunch arch[wp]: cap_move "\<lambda>s. P (arch_state s)"
crunch irq_node[wp]: cap_move "\<lambda>s. P (interrupt_irq_node s)"
lemma cap_range_NullCap:
"cap_range cap.NullCap = {}"
by (simp add: cap_range_def)
crunch interrupt_states[wp]: cap_move "\<lambda>s. P (interrupt_states s)"
lemma cap_move_irq_handlers[wp]:
"\<lbrace>valid_irq_handlers and cte_wp_at (op = cap.NullCap) ptr'
and cte_wp_at (weak_derived cap) ptr\<rbrace>
cap_move cap ptr ptr'
\<lbrace>\<lambda>rv. valid_irq_handlers\<rbrace>"
apply (simp add: valid_irq_handlers_def irq_issued_def)
apply (rule hoare_pre)
apply (rule hoare_use_eq [where f=interrupt_states, OF cap_move_interrupt_states])
apply (simp add: cap_move_def set_cdt_def)
apply (wp | simp)+
apply (clarsimp simp: cte_wp_at_caps_of_state
elim!: ranE split: if_split_asm
dest!: weak_derived_cap_irqs)
apply auto
done
lemma cap_move_has_reply_cap_neg:
"\<lbrace>\<lambda>s. \<not> has_reply_cap t s \<and>
cte_wp_at (weak_derived c) p s \<and>
cte_wp_at (op = cap.NullCap) p' s \<and>
p \<noteq> p'\<rbrace>
cap_move c p p' \<lbrace>\<lambda>rv s. \<not> has_reply_cap t s\<rbrace>"
apply (simp add: has_reply_cap_def cte_wp_at_caps_of_state
del: split_paired_All split_paired_Ex)
apply (wp cap_move_caps_of_state)
apply (elim conjE exE)
apply (erule(1) cap_swap_no_reply_caps, clarsimp+)
done
lemma cap_move_replies:
"\<lbrace>\<lambda>s. valid_reply_caps s
\<and> cte_wp_at (weak_derived c) p s
\<and> cte_wp_at (op = cap.NullCap) p' s
\<and> p \<noteq> p'\<rbrace>
cap_move c p p'
\<lbrace>\<lambda>rv s. valid_reply_caps s\<rbrace>"
apply (simp add: valid_reply_caps_def)
apply (rule hoare_pre)
apply (simp only: imp_conv_disj)
apply (wp hoare_vcg_all_lift hoare_vcg_disj_lift cap_move_has_reply_cap_neg)
apply (simp add: cap_move_def, (wp|simp)+)
apply (rule cap_move_caps_of_state)
apply (clarsimp simp: fun_upd_def cte_wp_at_caps_of_state
unique_reply_caps_cap_swap [simplified fun_upd_def])
done
lemma copy_of_reply_master:
"copy_of cap cap' \<Longrightarrow> is_master_reply_cap cap = is_master_reply_cap cap'"
apply (clarsimp simp: copy_of_def is_cap_simps)
apply (clarsimp simp: same_object_as_def split: cap.splits)
done
context CNodeInv_AI_4 begin
lemma cap_move_valid_arch_caps[wp]:
"\<And>cap ptr.
\<lbrace>valid_arch_caps
and cte_wp_at (weak_derived cap) ptr
and cte_wp_at (op = cap.NullCap) ptr'\<rbrace>
cap_move cap ptr ptr'
\<lbrace>\<lambda>rv. valid_arch_caps :: 'state_ext state \<Rightarrow> bool\<rbrace>"
apply (simp add: cap_move_def)
apply (rule hoare_pre)
apply (subst bind_assoc[symmetric],
rule hoare_seq_ext [rotated],
rule swap_of_caps_valid_arch_caps)
apply (wp | simp)+
apply (clarsimp elim!: cte_wp_at_weakenE)
done
end
crunch valid_global_objs[wp]: cap_move "valid_global_objs"
lemma cap_move_valid_ioc[wp]:
"\<lbrace>valid_ioc and
cte_wp_at (weak_derived cap) ptr and cte_wp_at (op = cap.NullCap) ptr'\<rbrace>
cap_move cap ptr ptr'
\<lbrace>\<lambda>rv. valid_ioc\<rbrace>"
apply (simp add: cap_move_def valid_ioc_def[abs_def] cte_wp_at_caps_of_state
pred_conj_def)
apply (wp set_cdt_cos_ioc set_cap_caps_of_state2 | simp)+
apply (cases ptr, clarsimp simp add: cte_wp_at_caps_of_state valid_ioc_def)
apply (drule spec, drule spec, erule impE, assumption)
apply clarsimp
done
declare cdt_update.state_refs_update [simp]
locale CNodeInv_AI_5 = CNodeInv_AI_4 state_ext_t
for state_ext_t :: "'state_ext::state_ext itself" +
assumes cap_move_invs[wp]:
"\<And>cap ptr' ptr.
\<lbrace>invs and valid_cap cap and cte_wp_at (op = cap.NullCap) ptr'
and tcb_cap_valid cap ptr'
and cte_wp_at (weak_derived cap) ptr
and cte_wp_at (\<lambda>c. c \<noteq> cap.NullCap) ptr
and ex_cte_cap_wp_to (appropriate_cte_cap cap) ptr' and K (ptr \<noteq> ptr')
and K (\<not> is_master_reply_cap cap)\<rbrace>
cap_move cap ptr ptr'
\<lbrace>\<lambda>rv. invs::'state_ext state \<Rightarrow> bool\<rbrace>"
lemma cte_wp_at_use2:
"\<lbrakk>cte_wp_at P p s; cte_wp_at P' p s; \<And>c. \<lbrakk>cte_wp_at (op = c) p s; P c; P' c\<rbrakk> \<Longrightarrow> Q \<rbrakk> \<Longrightarrow> Q"
by (auto simp: cte_wp_at_caps_of_state)
lemma cte_wp_at_use3:
"\<lbrakk>cte_wp_at P p s; cte_wp_at P' p s; cte_wp_at P'' p s; \<And>c. \<lbrakk>cte_wp_at (op = c) p s; P c; P' c; P'' c\<rbrakk> \<Longrightarrow> Q \<rbrakk> \<Longrightarrow> Q"
by (auto simp: cte_wp_at_caps_of_state)
lemma cap_move_valid_cap[wp]:
"\<lbrace>\<lambda>s. s \<turnstile> cap'\<rbrace> cap_move cap p p' \<lbrace>\<lambda>_ s. s \<turnstile> cap'\<rbrace>"
unfolding cap_move_def
by (wp set_cdt_valid_cap | simp)+
lemma weak_derived_cte_refs_abs:
"weak_derived c c' \<Longrightarrow> cte_refs c' = cte_refs c"
apply (clarsimp simp: weak_derived_def copy_of_def)
apply (auto simp: same_object_as_def is_cap_simps bits_of_def
split: if_split_asm cap.splits)
done
lemma cap_move_ex_cap_cte:
"\<lbrace>ex_cte_cap_wp_to P ptr and
cte_wp_at (weak_derived cap) p and
cte_wp_at (op = cap.NullCap) p' and
K (p \<noteq> p') and K (\<forall>cap'. weak_derived cap cap' \<longrightarrow> P cap = P cap')\<rbrace>
cap_move cap p p'
\<lbrace>\<lambda>_. ex_cte_cap_wp_to P ptr\<rbrace>"
unfolding cap_move_def ex_cte_cap_wp_to_def cte_wp_at_caps_of_state set_cdt_def
apply (rule hoare_pre)
apply wp
apply (simp del: split_paired_Ex)
apply (wp set_cap_caps_of_state | simp del: split_paired_Ex add: cte_wp_at_caps_of_state)+
apply (elim conjE exE)
apply (case_tac "cref = p")
apply (rule_tac x=p' in exI)
apply clarsimp
apply (drule weak_derived_cte_refs_abs)
apply simp
apply (rule_tac x=cref in exI)
apply clarsimp
done
lemma cap_move_src_slot_Null:
"\<lbrace>cte_at src and K(src \<noteq> dest)\<rbrace> cap_move cap src dest \<lbrace>\<lambda>_ s. cte_wp_at (op = cap.NullCap) src s\<rbrace>"
unfolding cap_move_def
by (wp set_cdt_cte_wp_at set_cap_cte_wp_at' | simp)+
crunch pred_tcb_at[wp]: cap_move "pred_tcb_at proj P t"
lemmas (in CNodeInv_AI_5) cap_revoke_cap_table[wp]
= cap_table_at_lift_valid [OF cap_revoke_typ_at]
lemmas appropriate_cte_cap_simps = appropriate_cte_cap_def [split_simps cap.split]
context CNodeInv_AI_5 begin
crunch inv [wp]: is_final_cap "P"
lemma is_final_cap_is_final[wp]:
"\<lbrace>\<top>\<rbrace> is_final_cap cap \<lbrace>\<lambda>rv s. rv = is_final_cap' cap s\<rbrace>"
unfolding is_final_cap_def
by wp simp
end
lemma real_cte_not_reply_masterD:
"\<And>P ptr.
\<lbrakk> real_cte_at ptr s; valid_reply_masters s; valid_objs s \<rbrakk> \<Longrightarrow>
cte_wp_at (\<lambda>cap. \<not> is_master_reply_cap cap) ptr s"
apply clarsimp
apply (subgoal_tac "\<not> tcb_at a s")
apply (clarsimp simp: cap_table_at_cte_at cte_wp_at_not_reply_master)
apply (clarsimp simp: obj_at_def is_tcb is_cap_table)
done
lemma real_cte_weak_derived_not_reply_masterD:
"\<And>cap ptr.
\<lbrakk> cte_wp_at (weak_derived cap) ptr s; real_cte_at ptr s;
valid_reply_masters s; valid_objs s \<rbrakk> \<Longrightarrow>
\<not> is_master_reply_cap cap"
by (fastforce simp: cte_wp_at_caps_of_state weak_derived_replies
dest!: real_cte_not_reply_masterD)
lemma real_cte_is_derived_not_replyD:
"\<And>m p cap ptr.
\<lbrakk> cte_wp_at (is_derived m p cap) ptr s; real_cte_at ptr s;
valid_reply_masters s; valid_objs s \<rbrakk> \<Longrightarrow>
\<not> is_reply_cap cap"
by (fastforce simp: cte_wp_at_caps_of_state is_derived_def
dest!: real_cte_not_reply_masterD)
lemma cap_irqs_is_derived:
"is_derived m ptr cap cap' \<Longrightarrow> cap_irqs cap = cap_irqs cap'"
by (clarsimp simp: is_derived_def cap_master_cap_irqs split: if_split_asm)
lemma tcb_cap_valid_mdb[simp]:
"tcb_cap_valid cap p (cdt_update mfn s) = tcb_cap_valid cap p s"
by (simp add: tcb_cap_valid_def)
lemma tcb_cap_valid_is_original_cap[simp]:
"tcb_cap_valid cap p (is_original_cap_update mfn s) = tcb_cap_valid cap p s"
by (simp add: tcb_cap_valid_def)
crunch tcb_cap_valid[wp]: cap_move "tcb_cap_valid cap p"
context CNodeInv_AI_5 begin
lemma invoke_cnode_invs[wp]:
fixes i shows
"\<lbrace>invs and valid_cnode_inv i\<rbrace> invoke_cnode i \<lbrace>\<lambda>rv. invs::'state_ext state \<Rightarrow> bool\<rbrace>"
unfolding invoke_cnode_def
apply (cases i)
apply simp
apply wp
apply (simp add: ex_cte_cap_to_cnode_always_appropriate_strg
real_cte_tcb_valid)
apply (rule conjI)
apply (clarsimp simp: cte_wp_at_caps_of_state dest!: cap_irqs_is_derived)
apply (rule conjI)
apply (elim conjE)
apply (drule real_cte_is_derived_not_replyD)
apply (simp add:invs_valid_objs invs_valid_reply_masters)+
apply (clarsimp simp:is_cap_simps)
apply (elim conjE)
apply (drule real_cte_not_reply_masterD)
apply (simp add:invs_valid_objs invs_valid_reply_masters)+
apply (clarsimp simp: cte_wp_at_caps_of_state is_derived_def)
apply simp
apply wp
apply (fastforce simp: real_cte_tcb_valid cte_wp_at_caps_of_state
ex_cte_cap_to_cnode_always_appropriate_strg
dest: real_cte_weak_derived_not_reply_masterD)
apply simp
apply (wp cap_revoke_invs)
apply simp
apply simp
apply wp
apply (clarsimp simp: emptyable_def obj_at_def is_tcb is_cap_table)
apply simp
apply (rule conjI)
apply (rule impI)
apply wp
apply (fastforce simp: real_cte_tcb_valid
ex_cte_cap_to_cnode_always_appropriate_strg
dest: real_cte_weak_derived_not_reply_masterD)
apply (rule impI)
apply (rule hoare_pre)
apply wp
apply (simp add: cte_wp_at_caps_of_state)
apply (wp cap_move_caps_of_state cap_move_ex_cap_cte)
apply (simp add: pred_conj_def)
apply (elim conjE exE)
apply (simp add: real_cte_tcb_valid ex_cte_cap_to_cnode_always_appropriate_strg
cap_irqs_appropriateness [OF weak_derived_cap_irqs])
apply (intro conjI,
(fastforce simp: cte_wp_at_caps_of_state
dest: real_cte_weak_derived_not_reply_masterD)+)[1]
apply (wpsimp wp: hoare_drop_imps get_cap_wp)+
apply (rule conjI)
apply (clarsimp elim!: cte_wp_valid_cap)
apply (clarsimp simp: real_cte_tcb_valid cte_wp_at_caps_of_state
is_cap_simps ex_cte_cap_to_cnode_always_appropriate_strg)
apply (wpsimp)
done
end
crunch pred_tcb_at[wp]: cap_move "pred_tcb_at proj P t"
(* FIXME: rename, move *)
lemma omgwtfbbq[simp]:
"(\<forall>x. y \<noteq> x) = False"
by clarsimp
lemma corres_underlying_lift_ex1:
assumes c: "\<And>v. corres_underlying sr nf nf' r (P v and Q) P' a c"
shows "corres_underlying sr nf nf' r ((\<lambda>s. \<exists>v. P v s) and Q) P' a c"
unfolding corres_underlying_def
apply clarsimp
apply (cut_tac v = v in c)
apply (auto simp: corres_underlying_def)
done
lemmas corres_underlying_lift_ex1' = corres_underlying_lift_ex1 [where Q = \<top>, simplified]
lemma corres_underlying_lift_ex2:
assumes c: "\<And>v. corres_underlying sr nf nf' r P (P' v and Q) a c"
shows "corres_underlying sr nf nf' r P ((\<lambda>s. \<exists>v. P' v s) and Q) a c"
unfolding corres_underlying_def
apply clarsimp
apply (cut_tac v = v in c)
apply (auto simp: corres_underlying_def)
done
lemmas corres_underlying_lift_ex2' = corres_underlying_lift_ex2 [where Q = \<top>, simplified]
lemma real_cte_halted_if_tcb[simp]:
"real_cte_at (a, b) s \<Longrightarrow> halted_if_tcb a s"
by (clarsimp simp: halted_if_tcb_def obj_at_def is_cap_table is_tcb)
end
|
20070515 19:25:18 nbsp Howdy. You might want to consider discussing your issues rather than simply going back and forth about it. Users/JabberWokky
20070515 19:27:52 nbsp You might want to consider not deleting The California Aggie to make an argument. Users/BrentLaabs
20070516 00:41:14 nbsp You might want to consider not wikiediting your way out of a huge problem that needs to be immediately resolved through reallife interaction. Users/ElisaHough
20070516 22:05:09 nbsp This account is a PeterHamilton sock puppet. Users/EdWins
20070518 14:53:52 nbsp The IP address is interesting... no? In any case, this behavior could be viewed as disappointing and unprofessional. smile. Users/BoHeeKim
20070518 16:04:17 nbsp Maybe you should spend your time saving the little respect you have instead of using it on this pathetic wiki war. Users/LizethCazares
|
#' Flattens an array along an axis
#'
#' @param x Array
#' @param along Along which axis to bind them together (default: last)
#' @param name_sep Which character to use for naming new arrays [default: NA, do not touch names]
#' @return An array with n-1 dimensions
#' @export
flatten = function(x, along=-1, name_sep=NA) {
if (along == -1)
along = length(dim(x))
re = split(x, along=along, drop=TRUE)
if (!is.na(name_sep))
re = mapply(function(x, n) {
dimnames(x)[[along]] = paste(n, dimnames(x)[[along]], sep=name_sep)
x
}, re, names(re), SIMPLIFY=FALSE)
stack(re, along=along)
}
|
module BadTermination where
data N : Set where
zero : N
suc : N -> N
postulate inf : N
data D : N -> Set where
d₁ : D (suc inf)
d₂ : D inf
f : (n : N) -> D n -> N
f .inf d₂ = zero
f .(suc inf) d₁ = f inf d₂
|
(*
Author: Norbert Schirmer
Maintainer: Norbert Schirmer, norbert.schirmer at web de
License: LGPL
*)
(* Title: StateSpace.thy
Author: Norbert Schirmer, TU Muenchen
Copyright (C) 2004-2008 Norbert Schirmer
Some rights reserved, TU Muenchen
This library is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation; either version 2.1 of the
License, or (at your option) any later version.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
USA
*)
section {* State Space Template *}
theory StateSpace imports Hoare
begin
record 'g state = "globals"::'g
definition
upd_globals:: "('g \<Rightarrow> 'g) \<Rightarrow> ('g,'z) state_scheme \<Rightarrow> ('g,'z) state_scheme"
where
"upd_globals upd s = s\<lparr>globals := upd (globals s)\<rparr>"
record ('g, 'n, 'val) stateSP = "'g state" +
locals :: "'n \<Rightarrow> 'val"
lemma upd_globals_conv: "upd_globals f = (\<lambda>s. s\<lparr>globals := f (globals s)\<rparr>)"
by (rule ext) (simp add: upd_globals_def)
end |
= = Early life and education = =
|
function b = r8mat_l1_inverse ( n, a )
%*****************************************************************************80
%
%% R8MAT_L1_INVERSE inverts a unit lower triangular R8MAT.
%
% Discussion:
%
% A unit lower triangular matrix is a matrix with only 1's on the main
% diagonal, and only 0's above the main diagonal.
%
% The inverse of a unit lower triangular matrix is also
% a unit lower triangular matrix.
%
% Licensing:
%
% This code is distributed under the GNU LGPL license.
%
% Modified:
%
% 28 October 2005
%
% Author:
%
% Original FORTRAN77 version by Albert Nijenhuis, Herbert Wilf.
% MATLAB version by John Burkardt.
%
% Reference:
%
% A Nijenhuis and H Wilf,
% Combinatorial Algorithms,
% Academic Press, 1978, second edition,
% ISBN 0-12-519260-6.
%
% Parameters:
%
% Input, integer N, number of rows and columns in the matrix.
%
% Input, real A(N,N), the unit lower triangular matrix.
%
% Output, real B(N,N), the inverse matrix.
%
for i = 1 : n
for j = 1 : n
if ( i < j )
b(i,j) = 0.0;
elseif ( j == i )
b(i,j) = 1.0;
else
b(i,j) = -( a(i,1:i-1) * b(1:i-1,j) );
end
end
end
return
end
|
\chapter{Requirements}\label{sec:requirements}
|
!--------------------------------------------------------------------
! Created by Mahdi Esmaily Moghadam
! contact [email protected] for reporting the bugs.
!--------------------------------------------------------------------
!
! UC Copyright Notice
! This software is Copyright ©2012 The Regents of the University of
! California. All Rights Reserved.
!
! Permission to copy and modify this software and its documentation
! for educational, research and non-profit purposes, without fee,
! and without a written agreement is hereby granted, provided that
! the above copyright notice, this paragraph and the following three
! paragraphs appear in all copies.
!
! Permission to make commercial use of this software may be obtained
! by contacting:
! Technology Transfer Office
! 9500 Gilman Drive, Mail Code 0910
! University of California
! La Jolla, CA 92093-0910
! (858) 534-5815
! [email protected]
!
! This software program and documentation are copyrighted by The
! Regents of the University of California. The software program and
! documentation are supplied "as is", without any accompanying
! services from The Regents. The Regents does not warrant that the
! operation of the program will be uninterrupted or error-free. The
! end-user understands that the program was developed for research
! purposes and is advised not to rely exclusively on the program for
! any reason.
!
! IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY
! PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL
! DAMAGES, INCLUDING LOST PROFITS, ARISING OUT OF THE USE OF THIS
! SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE UNIVERSITY OF
! CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
! THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY
! WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
! OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
! SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THE
! UNIVERSITY OF CALIFORNIA HAS NO OBLIGATIONS TO PROVIDE
! MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
!
!--------------------------------------------------------------------
! Communication structure is created here.
!--------------------------------------------------------------------
SUBROUTINE FSILS_COMMU_CREATE(commu, commi)
INCLUDE "FSILS_STD.h"
TYPE(FSILS_commuType), INTENT(INOUT) :: commu
INTEGER, INTENT(IN) :: commi
INTEGER ierr
IF (commu%foC) THEN
PRINT *, "FSILS: COMMU is not free, you may use", &
& " FSILS_COMMU_FREE to free it"
STOP "FSILS: FATAL ERROR"
END IF
! Some of these parameters are set for sequential version
commu%foC = .TRUE.
commu%comm = commi
commu%nTasks = 1
commu%task = 0
commu%master = 0
CALL MPI_COMM_RANK(commi, commu%task, ierr)
CALL MPI_COMM_SIZE(commi, commu%nTasks, ierr)
CALL MPI_ALLREDUCE(commu%task, commu%master, 1, mpint, MPI_MIN, &
& commi, ierr)
IF (commu%master .NE. 0) THEN
STOP "Master ID is not zero - might cause problems"
END IF
commu%masF = .FALSE.
commu%tF = commu%task + 1
IF (commu%task .EQ. commu%master) THEN
commu%masF = .TRUE.
END IF
RETURN
END SUBROUTINE FSILS_COMMU_CREATE
!====================================================================
SUBROUTINE FSILS_COMMU_FREE(commu)
INCLUDE "FSILS_STD.h"
TYPE(FSILS_commuType), INTENT(INOUT) :: commu
IF (.NOT.commu%foC) STOP 'COMMU is not created yet to be freed'
commu%foC = .FALSE.
RETURN
END SUBROUTINE FSILS_COMMU_FREE
|
lemma frontier_cball [simp]: fixes a :: "'a::{real_normed_vector, perfect_space}" shows "frontier (cball a e) = sphere a e" |
Oops, you’re not logged in. To manage your email subscription preferences, you'll need to log in, or if you're a new subscriber click "Create a Profile" below.
Log in to manage your email subscription preferences.
Create a profile now to subscribe and manage your subscriptions.
Having trouble receiving or unsubscribing from IAPP emails? Click here for troubleshooting tips. |
import algebraic_topology.fundamental_groupoid.basic
import topology.category.Top.basic
import topology.path_connected
noncomputable theory
structure top_subspace (X : Type*) [topological_space X] :=
(carrier : set X)
(univ_open : is_open carrier)
variables {X : Type*} [topological_space X]
instance : has_mem X (top_subspace X) :=
begin
split, intro s, intro S, exact s ∈ S.carrier,
end
instance : has_coe (top_subspace X) (set X) :=
begin
split, intro S, exact S.carrier,
end
-- instance to_topspace (H : top_subspace X) : topological_space H :=
-- begin
-- exact subtype.topological_space,
-- end
variable {H : top_subspace X}
@[simp] lemma mem_carrier {x : X} : x ∈ H.carrier ↔ x ∈ H :=
begin
refl
end
@[simp] lemma mem_coe {x : X} : x ∈ (↑H : set X) ↔ x ∈ H :=
begin
refl
end
@[simp] lemma mem_coe_self : (H : set X) = H.carrier :=
begin
refl,
end
@[reducible] def incl : H → X :=
begin
intro h, cases h, exact h_val,
end
instance has_coe_to_topspace : has_coe (H : top_subspace X) X :=
begin
split, exact incl,
end
instance derived_topspace : topological_space H :=
begin
exact @topological_space.induced _ _ incl _inst_1,
end
instance has_coe_to_parent_set : has_coe (set H) (set X) :=
begin
fconstructor, intro h, exact set.image incl h,
end
theorem subspace_topology_equiv {X : Type*} [top_X : topological_space X] {H : top_subspace X} (x : set H):
is_open x ↔ (∃(y : set X), is_open y ∧ y ∩ H = ↑x) :=
begin
split,
{
intro h, cases h, use h_w, cases h_h, fconstructor, assumption,
norm_num, rw incl at *, induction h_h_right, ring_nf, ext1,
split, intro h2, fconstructor, fconstructor, exact x_1, finish,
finish, intro h3, cases h3, fconstructor, finish, cases h3_h, cases h3_w, finish,
},
{
intro h, cases h, cases h_h, fconstructor, exact h_w, fconstructor, assumption,
rw set.preimage, ext1, rw incl, simp only [set.mem_set_of_eq], split,
{
intro hhh,
cases x_1, simp at *, unfold_coes at *, rw incl at *, simp at *, dsimp at *,
have s: x_1_val ∈ h_w ∩ H.carrier, fconstructor, exact hhh, exact x_1_property,
rw h_h_right at s, finish,
},
intro h, have p: ↑x ⊆ h_w, rw ←h_h_right, intro s, intro hs, cases hs, assumption,
apply p, fconstructor,exact x_1, split,exact h, rw incl,
},
end
@[continuity] lemma incl_continuous : @continuous H X _ _ incl :=
begin
exact continuous_induced_dom,
end
@[reducible] def i : C(↥H, X) := ⟨incl, incl_continuous⟩
def to_set (H : top_subspace X): set H := λh : H, true
@[simp] lemma to_set_id : set.image incl (to_set H) = H :=
begin
unfold_coes, rw set.image, ext1, split, intro x,cases x,
rw to_set at *, cases x_h with xh1 xh2, rw incl at xh2,simp at xh2, induction xh2, cases x_w,tauto,
intro a, split, split,rotate 2,fconstructor,exact x,tauto, solve_by_elim, tauto,
end
-- lemma incl_intersection (x : set H) (y : set H): incl '' x ∩ incl '' y = incl '' (x ∩ y) :=
-- begin
-- sorry{rw incl, ext1, simp, split,
-- {intro s,cases s with sx sy, cases sx, cases sx_h with sxh1 sxh2, cases sy,
-- cases sy_h with syh1 syh2, cases sx_w, cases sy_w, use x_1, simp at *, finish, fconstructor, finish,
-- finish,},
-- {
-- intro s, cases s, cases s_w, cases s_h with sh1 sh2,cases sh1, simp at *, split, use x_1, rw ←sh2, assumption,
-- split, finish, finish, use x_1, finish, split, finish,finish,
-- },}
-- end
variables {a b : (H : top_subspace X)}
-- lift a path from subspace H to X --
-- theorem subspace_path_incl (p : path a b) : path (a:X) (b:X) :=
-- begin
-- fconstructor,
-- {
-- exact { to_fun := i ∘ p.to_continuous_map,
-- continuous_to_fun := continuous_induced_dom.comp (map_continuous p.to_continuous_map)}
-- },
-- {
-- simp only [path.coe_to_continuous_map, function.comp_app, path.source], unfold i, unfold incl, refl,
-- },
-- {
-- simp only [path.target, path.coe_to_continuous_map, function.comp_app], unfold i, unfold incl, refl,
-- }
-- end
-- notation `πₓ` := fundamental_groupoid.fundamental_groupoid_functor.obj
-- notation `π` := fundamental_groupoid.fundamental_groupoid_functor
-- notation `πₘ` := fundamental_groupoid.fundamental_groupoid_functor.map
-- def induced_top_morphism : Top.of H ⟶ Top.of X := i
-- def induced_groupoid_morphism : πₓ (Top.of H) ⟶ πₓ (Top.of X) := πₘ i
def to_subspace (X : Type*) [top : topological_space X]: top_subspace X :=
{ carrier := @set.univ X,
univ_open := top.is_open_univ}
instance has_coe_to_subspace {Y : Type*} [topological_space Y] : has_coe Y (top_subspace Y) :=
begin
fconstructor, intro h, apply to_subspace,
end
-- Partial order --
theorem ext' {H K : top_subspace X} (h : H.carrier = K.carrier) : H = K :=
begin
cases H,
cases K,
simp * at *,
end
theorem ext'_iff {H K : top_subspace X} :
H.carrier = K.carrier ↔ H = K :=
begin
split,
{ exact ext' },
{ intro h,
rw h, }
end
@[ext] theorem ext {H K : top_subspace X} (h : ∀ x, x ∈ H ↔ x ∈ K) : H = K :=
begin
apply ext',
ext,
apply h,
end
instance : has_le (top_subspace X) := {le := λ X Y, X.carrier ⊆ Y.carrier}
variable K : (top_subspace X)
lemma le_def : H ≤ K ↔ H.carrier ⊆ K.carrier :=
begin
refl
end
lemma le_iff : H ≤ K ↔ ∀ g, g ∈ H → g ∈ K :=
begin
refl,
end
lemma le_space : H ≤ to_subspace X :=
begin
rw le_def, rw to_subspace, simp only [set.subset_univ],
end
@[refl] lemma subspace_le_refl {H : top_subspace X}: H ≤ H :=
begin
rw le_def,
end
lemma subspace_le_antisymm : H ≤ K → K ≤ H → H = K :=
begin
rw [le_def, le_def, ← ext'_iff],
exact set.subset.antisymm,
end
@[trans] lemma subspace_le_trans {H J K : top_subspace X}: H ≤ J → J ≤ K → H ≤ K :=
begin
rw [le_def, le_def, le_def],
exact set.subset.trans,
end
instance {X : Type*} [topological_space X]: partial_order (top_subspace X) :=
{ le := (≤),
le_refl :=
begin
intro a, apply subspace_le_refl,
end,
le_antisymm :=
begin
intros a b, apply subspace_le_antisymm,
end,
le_trans :=
begin
intros a b c, apply subspace_le_trans,
end}
-- open subspace
variables {A B : top_subspace X}
@[reducible] def open_incl (h: A ≤ B) : A → B :=
begin
intro a, cases a, fconstructor, exact a_val, rw le_def at h, apply h, assumption,
end
lemma incl_open_map {X : Type*} [top_X : topological_space X] {A : top_subspace X} (x : set A) :
is_open x → is_open (set.image incl x):=
begin
intro h, rw subspace_topology_equiv at *, cases h, cases h_h, rw is_open, rw incl, simp,
have p : is_open (h_w ∩ A.carrier), apply is_open.inter, assumption, exact A.univ_open, unfold_coes at *,
rw incl at *,
simp at h_h_right,
rw h_h_right at p, finish,
end
@[simp] lemma incl_composition (h: A ≤ B) : (incl ∘ (open_incl h)) = incl :=
begin
rw open_incl,rw incl, rw incl, simp, ext1, simp, cases x, simp,
end
@[simp] lemma incl_set_composition (h: A ≤ B) (x : set A): incl '' (open_incl h '' x) = incl '' x :=
begin
rw ←set.image_comp, rw incl_composition,
-- ext1, split, intro s, cases s, cases s_h, fconstructor,
end
@[simp] lemma incl_point_composition (h: A ≤ B) (x : A) : incl ((open_incl h) x) = incl x :=
begin
rw open_incl, rw incl, rw incl, cases x, simp,
end
@[simp] lemma incl_self : incl '' (to_set A) = A.carrier :=
begin
apply to_set_id,
end
lemma incl_inj {H: top_subspace X}: function.injective (@incl X _ H) :=
begin
rw function.injective, intros a b,
rw incl, cases a, cases b, finish,
end
-- @[simp] lemma incl_open_intersection (x : set A) (y : set A) : incl '' x ∩ incl '' y = incl '' (x ∩ y) :=
-- begin
-- apply incl_intersection,
-- end
-- @[simp] lemma incl_composition_asso (h: A ≤ B) (x : A): incl (open_incl h) = incl :=
-- begin
-- rw open_incl,rw incl, rw incl, cases x, simp,
-- end
-- @[simp] lemma set_incl_composition (h: A ≤ B) (x : set A): (incl ∘ (open_incl h))'' x = incl '' x :=
-- begin
-- ext1, split, intro hs,
-- end
theorem top_subspace_set_incl {X : Type*} [top_X : topological_space X] {A : top_subspace X} {B : top_subspace X} (x : set A)
(h: A ≤ B): is_open x → is_open (set.image (open_incl h) x):=
begin
intro hs, have hrs : is_open (set.image incl x), apply incl_open_map, exact hs,
rw subspace_topology_equiv, use (incl '' x), split, assumption, unfold_coes, rw incl_set_composition,
simp, intro,intro hx, rw set.mem_preimage,rw incl, simp, cases x_1, solve_by_elim,
end
lemma range_incl {A : top_subspace X} : set.range (@incl X _ A) = A.carrier :=
begin
rw set.range, ext1, split, intro hx, cases hx, cases hx_w, simp_rw incl at *, finish,
intro hx, split, rotate, split, rotate, exact x, simp_rw incl, exact hx,
end
lemma incl_sub (x : A) (s : set A) : x ∈ s ↔ incl x ∈ incl '' s :=
begin
split, intro hx, rw incl, finish,
rw incl, intro hx, cases x, cases hx, simp at *, cases hx_h, cases hx_w, finish,
end
lemma incl_point_self (x : A) : incl x ∈ A.carrier :=
begin
rw incl, cases x, simp, finish,
end
def top_subspace_incl {X : Type*} [top_X : topological_space X] {A : top_subspace X} {B : top_subspace X} (h : A ≤ B) : C(A,B) := {
to_fun :=
begin
exact open_incl h,
end,
continuous_to_fun :=
begin
fconstructor, intro bns, intro hs, rw subspace_topology_equiv at *, cases hs with sX hsX, cases hsX with hsOpen ps,
let ans : set X := sX ∩ A, use ans, fconstructor, apply is_open.inter, assumption, exact A.univ_open,
rw set.subset.antisymm_iff, unfold_coes, have hp : open_incl h ⁻¹' bns = incl ⁻¹' ans,
{
rw set.preimage, rw set.preimage, ext1,split, intro hx, simp only [set.mem_set_of_eq] at hx,
rw incl_sub at hx, rw incl_point_composition at hx, unfold_coes at *,rw ←ps at hx, cases hx, fconstructor,
assumption, apply incl_point_self,
intro hx, simp only [set.mem_set_of_eq] at hx, cases hx with hx1 hx2, simp only [set.mem_set_of_eq], rw incl_sub, rw incl_point_composition, rw le_iff at h,
have hx3: incl x ∈ B, apply h, assumption,have hx4 : incl x ∈ sX∩↑B, split, assumption, assumption,
have hx5: sX ∩ ↑B ⊆ incl '' bns, rotate,apply hx5, assumption, rw ps, unfold_coes,
},
{
rw hp, simp only [set.inter_subset_left,
set.image_subset_iff,
and_true,
set.subset_inter_iff,
mem_coe_self,
set.inter_subset_right,
set.preimage_inter,
and_self], rw ←set.preimage_inter, rw set.image_preimage_eq_of_subset,
intro x, intro hx, cases hx, fconstructor, exact hx_left.1,
assumption, rw range_incl, refine set.inter_subset_right sX A.carrier,
},
end}
instance point_lift (h : A ≤ B): has_coe A B :=
begin
fconstructor, exact open_incl h,
end
--- intersection and union of subspace ---
def intersection (A : top_subspace X) (B : top_subspace X) : top_subspace X :=
begin
fconstructor, exact A.carrier ∩ B.carrier, apply is_open.inter, exact A.univ_open, exact B.univ_open,
end
instance : has_inter (top_subspace X) := {inter := intersection}
@[simp] theorem inter_sub : A ∩ B ≤ A :=
begin
rw le_def, intro, intro hx, cases hx, assumption,
end
@[simp] theorem inter_sub2 : A ∩ B ≤ B :=
begin
rw le_def, intro, intro hx, cases hx, assumption,
end
def union (A : top_subspace X) (B : top_subspace X) : top_subspace X :=
begin
fconstructor, exact A.carrier ∪ B.carrier, apply is_open.union, exact A.univ_open, exact B.univ_open,
end
instance : has_union (top_subspace X) := {union := union}
@[simp] theorem union_sub : A ≤ A ∪ B :=
begin
rw le_def, intro, intro hx, fconstructor, exact hx,
end
@[simp] theorem union_sub2 : B ≤ A ∪ B :=
begin
rw le_def, intro, intro hx, simp at *, right, exact hx,
end
-- (backward) lift of element
def elem_backward_inclusion (x : X) (hx : x ∈ H) : H :=
begin
fconstructor, exact x, exact hx,
end
instance hascoe_top : has_coe (top_subspace X) Top :=
begin
fconstructor, intro x, exact Top.of x,
end
-- (backward) lift of function
@[simp] lemma incl_inj_eq {x y : H}: incl x = incl y ↔ x = y :=
begin
rw incl, cases x, cases y, simp,
end
@[simp] lemma incl_carrier (a : H) : incl a ∈ H.carrier :=
begin
cases a, simp_rw incl, assumption,
end
lemma openincl_carrier {G : top_subspace X} (h : H ≤ G) (a : H) : incl a ∈ G.carrier :=
begin
rw le_def at h, apply h, simp,
end
-- variables {A B : top_subspace X} {hAB : A≤ B}
-- instance subspace_has_coe (hAB : A ≤ B): has_coe (A : top_subspace X) (top_subspace B):=
-- begin
-- split, intro a, split, cases A, cases B, rw le_def at hAB, simp only [] at hAB,
-- cases a, tauto,
-- end
-- def subspace_lift {A B : top_subspace X} (hAB : A ≤ B): top_subspace B :=
-- begin
-- split, rw le_def at hAB, cases A, cases B, simp only [] at hAB, simp only [coe_sort_coe_base],
-- unfold_coes, simp only [], intro h, cases h, exact h_val ∈ A,
-- end
-- #check (@derived_topspace X _ A)
-- #check (@subspace_lift X _ A B hAB)
-- #check @derived_topspace B _ ((@subspace_lift X _ A B hAB) : top_subspace B)
-- theorem topology_equivalence (hAB : A ≤ B): (@derived_topspace X _ A).is_open = (@derived_topspace B _ (@subspace_lift X _ A B hAB)).is_open
-- def continuous_subspace_incl (A B : top_subspace X) {hAB : A ≤ B}: C(A,B) :=
-- { to_fun :=
-- begin
-- intro a, exact {val := a.val,
-- property := hAB a.property}
-- end,
-- continuous_to_fun :=
-- begin
-- fconstructor,
-- end, }
|
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f g : CentroidHom α
h : (fun f => f.toFun) f = (fun f => f.toFun) g
⊢ f = g
[PROOFSTEP]
cases f
[GOAL]
case mk
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
g : CentroidHom α
toAddMonoidHom✝ : α →+ α
map_mul_left'✝ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝) (a * b) = a * ZeroHom.toFun (↑toAddMonoidHom✝) b
map_mul_right'✝ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝) (a * b) = ZeroHom.toFun (↑toAddMonoidHom✝) a * b
h :
(fun f => f.toFun)
{ toAddMonoidHom := toAddMonoidHom✝, map_mul_left' := map_mul_left'✝, map_mul_right' := map_mul_right'✝ } =
(fun f => f.toFun) g
⊢ { toAddMonoidHom := toAddMonoidHom✝, map_mul_left' := map_mul_left'✝, map_mul_right' := map_mul_right'✝ } = g
[PROOFSTEP]
cases g
[GOAL]
case mk.mk
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
toAddMonoidHom✝¹ : α →+ α
map_mul_left'✝¹ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝¹) (a * b) = a * ZeroHom.toFun (↑toAddMonoidHom✝¹) b
map_mul_right'✝¹ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝¹) (a * b) = ZeroHom.toFun (↑toAddMonoidHom✝¹) a * b
toAddMonoidHom✝ : α →+ α
map_mul_left'✝ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝) (a * b) = a * ZeroHom.toFun (↑toAddMonoidHom✝) b
map_mul_right'✝ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝) (a * b) = ZeroHom.toFun (↑toAddMonoidHom✝) a * b
h :
(fun f => f.toFun)
{ toAddMonoidHom := toAddMonoidHom✝¹, map_mul_left' := map_mul_left'✝¹, map_mul_right' := map_mul_right'✝¹ } =
(fun f => f.toFun)
{ toAddMonoidHom := toAddMonoidHom✝, map_mul_left' := map_mul_left'✝, map_mul_right' := map_mul_right'✝ }
⊢ { toAddMonoidHom := toAddMonoidHom✝¹, map_mul_left' := map_mul_left'✝¹, map_mul_right' := map_mul_right'✝¹ } =
{ toAddMonoidHom := toAddMonoidHom✝, map_mul_left' := map_mul_left'✝, map_mul_right' := map_mul_right'✝ }
[PROOFSTEP]
congr with x
[GOAL]
case mk.mk.e_toAddMonoidHom.h
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
toAddMonoidHom✝¹ : α →+ α
map_mul_left'✝¹ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝¹) (a * b) = a * ZeroHom.toFun (↑toAddMonoidHom✝¹) b
map_mul_right'✝¹ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝¹) (a * b) = ZeroHom.toFun (↑toAddMonoidHom✝¹) a * b
toAddMonoidHom✝ : α →+ α
map_mul_left'✝ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝) (a * b) = a * ZeroHom.toFun (↑toAddMonoidHom✝) b
map_mul_right'✝ : ∀ (a b : α), ZeroHom.toFun (↑toAddMonoidHom✝) (a * b) = ZeroHom.toFun (↑toAddMonoidHom✝) a * b
h :
(fun f => f.toFun)
{ toAddMonoidHom := toAddMonoidHom✝¹, map_mul_left' := map_mul_left'✝¹, map_mul_right' := map_mul_right'✝¹ } =
(fun f => f.toFun)
{ toAddMonoidHom := toAddMonoidHom✝, map_mul_left' := map_mul_left'✝, map_mul_right' := map_mul_right'✝ }
x : α
⊢ ↑toAddMonoidHom✝¹ x = ↑toAddMonoidHom✝ x
[PROOFSTEP]
exact congrFun h x
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
f' : α → α
h : f' = ↑f
src✝ : α →+ α := AddMonoidHom.copy f.toAddMonoidHom f' h
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := { toFun := f', map_zero' := (_ : ZeroHom.toFun (↑src✝) 0 = 0) },
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := { toFun := f', map_zero' := (_ : ZeroHom.toFun (↑src✝) 0 = 0) },
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
b
[PROOFSTEP]
simp_rw [h, map_mul_left]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
f' : α → α
h : f' = ↑f
src✝ : α →+ α := AddMonoidHom.copy f.toAddMonoidHom f' h
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := { toFun := f', map_zero' := (_ : ZeroHom.toFun (↑src✝) 0 = 0) },
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := { toFun := f', map_zero' := (_ : ZeroHom.toFun (↑src✝) 0 = 0) },
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
a *
b
[PROOFSTEP]
simp_rw [h, map_mul_right]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
g f₁ f₂ : CentroidHom α
hg : Injective ↑g
h : comp g f₁ = comp g f₂
a : α
⊢ ↑g (↑f₁ a) = ↑g (↑f₂ a)
[PROOFSTEP]
rw [← comp_apply, h, comp_apply]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f g : CentroidHom α
src✝ : α →+ α := ↑f + ↑g
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
b
[PROOFSTEP]
show f (a * b) + g (a * b) = a * (f b + g b)
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f g : CentroidHom α
src✝ : α →+ α := ↑f + ↑g
a b : α
⊢ ↑f (a * b) + ↑g (a * b) = a * (↑f b + ↑g b)
[PROOFSTEP]
simp [map_mul_left, mul_add]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f g : CentroidHom α
src✝ : α →+ α := ↑f + ↑g
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
a *
b
[PROOFSTEP]
show f (a * b) + g (a * b) = (f a + g a) * b
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f g : CentroidHom α
src✝ : α →+ α := ↑f + ↑g
a b : α
⊢ ↑f (a * b) + ↑g (a * b) = (↑f a + ↑g a) * b
[PROOFSTEP]
simp [map_mul_right, add_mul]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
n : ℕ
f : CentroidHom α
src✝ : α →+ α := SMul.smul n ↑f
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
b
[PROOFSTEP]
change n • f (a * b) = a * n • f b
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
n : ℕ
f : CentroidHom α
src✝ : α →+ α := SMul.smul n ↑f
a b : α
⊢ n • ↑f (a * b) = a * n • ↑f b
[PROOFSTEP]
rw [map_mul_left f, ← mul_smul_comm]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
n : ℕ
f : CentroidHom α
src✝ : α →+ α := SMul.smul n ↑f
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
a *
b
[PROOFSTEP]
change n • f (a * b) = n • f a * b
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
n : ℕ
f : CentroidHom α
src✝ : α →+ α := SMul.smul n ↑f
a b : α
⊢ n • ↑f (a * b) = n • ↑f a * b
[PROOFSTEP]
rw [map_mul_right f, ← smul_mul_assoc]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n : ℕ
src✝ : AddMonoid.End α := toEnd f ^ n
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
b
[PROOFSTEP]
induction' n with n ih
[GOAL]
case zero
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n : ℕ
src✝¹ : AddMonoid.End α := toEnd f ^ n
a b : α
src✝ : AddMonoid.End α := toEnd f ^ Nat.zero
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
b
[PROOFSTEP]
exact rfl
[GOAL]
case succ
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n✝ : ℕ
src✝¹ : AddMonoid.End α := toEnd f ^ n✝
a b : α
n : ℕ
ih :
let src := toEnd f ^ n;
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
b
src✝ : AddMonoid.End α := toEnd f ^ Nat.succ n
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
b
[PROOFSTEP]
simp
[GOAL]
case succ
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n✝ : ℕ
src✝¹ : AddMonoid.End α := toEnd f ^ n✝
a b : α
n : ℕ
ih :
let src := toEnd f ^ n;
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
b
src✝ : AddMonoid.End α := toEnd f ^ Nat.succ n
⊢ ↑(toEnd f ^ Nat.succ n) (a * b) = a * ↑(toEnd f ^ Nat.succ n) b
[PROOFSTEP]
rw [pow_succ]
[GOAL]
case succ
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n✝ : ℕ
src✝¹ : AddMonoid.End α := toEnd f ^ n✝
a b : α
n : ℕ
ih :
let src := toEnd f ^ n;
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
b
src✝ : AddMonoid.End α := toEnd f ^ Nat.succ n
⊢ ↑(toEnd f * toEnd f ^ n) (a * b) = a * ↑(toEnd f * toEnd f ^ n) b
[PROOFSTEP]
exact (congr_arg f.toEnd ih).trans (f.map_mul_left' _ _)
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n : ℕ
src✝ : AddMonoid.End α := toEnd f ^ n
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
a *
b
[PROOFSTEP]
induction' n with n ih
[GOAL]
case zero
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n : ℕ
src✝¹ : AddMonoid.End α := toEnd f ^ n
a b : α
src✝ : AddMonoid.End α := toEnd f ^ Nat.zero
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
a *
b
[PROOFSTEP]
exact rfl
[GOAL]
case succ
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n✝ : ℕ
src✝¹ : AddMonoid.End α := toEnd f ^ n✝
a b : α
n : ℕ
ih :
let src := toEnd f ^ n;
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
a *
b
src✝ : AddMonoid.End α := toEnd f ^ Nat.succ n
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
a *
b
[PROOFSTEP]
simp
[GOAL]
case succ
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n✝ : ℕ
src✝¹ : AddMonoid.End α := toEnd f ^ n✝
a b : α
n : ℕ
ih :
let src := toEnd f ^ n;
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
a *
b
src✝ : AddMonoid.End α := toEnd f ^ Nat.succ n
⊢ ↑(toEnd f ^ Nat.succ n) (a * b) = ↑(toEnd f ^ Nat.succ n) a * b
[PROOFSTEP]
rw [pow_succ]
[GOAL]
case succ
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
f : CentroidHom α
n✝ : ℕ
src✝¹ : AddMonoid.End α := toEnd f ^ n✝
a b : α
n : ℕ
ih :
let src := toEnd f ^ n;
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src) (x + y) = ZeroHom.toFun (↑src) x + ZeroHom.toFun (↑src) y) })
a *
b
src✝ : AddMonoid.End α := toEnd f ^ Nat.succ n
⊢ ↑(toEnd f * toEnd f ^ n) (a * b) = ↑(toEnd f * toEnd f ^ n) a * b
[PROOFSTEP]
exact (congr_arg f.toEnd ih).trans (f.map_mul_right' _ _)
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
T S : CentroidHom α
a b : α
⊢ (↑T ∘ ↑S) (a * b) = (↑S ∘ ↑T) (a * b)
[PROOFSTEP]
simp
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocSemiring α
T S : CentroidHom α
a b : α
⊢ ↑T (↑S (a * b)) = ↑S (↑T (a * b))
[PROOFSTEP]
rw [map_mul_right, map_mul_left, ← map_mul_right, ← map_mul_left]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
f : CentroidHom α
src✝ : α →+ α := -↑f
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
b
[PROOFSTEP]
change -f (a * b) = a * (-f b)
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
f : CentroidHom α
src✝ : α →+ α := -↑f
a b : α
⊢ -↑f (a * b) = a * -↑f b
[PROOFSTEP]
simp [map_mul_left]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
f : CentroidHom α
src✝ : α →+ α := -↑f
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
a *
b
[PROOFSTEP]
change -f (a * b) = (-f a) * b
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
f : CentroidHom α
src✝ : α →+ α := -↑f
a b : α
⊢ -↑f (a * b) = -↑f a * b
[PROOFSTEP]
simp [map_mul_right]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
f g : CentroidHom α
src✝ : α →+ α := ↑f - ↑g
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
b
[PROOFSTEP]
change (FunLike.coe f - FunLike.coe g) (a * b) = a * (FunLike.coe f - FunLike.coe g) b
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
f g : CentroidHom α
src✝ : α →+ α := ↑f - ↑g
a b : α
⊢ (↑f - ↑g) (a * b) = a * (↑f - ↑g) b
[PROOFSTEP]
simp [map_mul_left, mul_sub]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
f g : CentroidHom α
src✝ : α →+ α := ↑f - ↑g
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
a *
b
[PROOFSTEP]
change (FunLike.coe f - FunLike.coe g) (a * b) = ((FunLike.coe f - FunLike.coe g) a) * b
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
f g : CentroidHom α
src✝ : α →+ α := ↑f - ↑g
a b : α
⊢ (↑f - ↑g) (a * b) = (↑f - ↑g) a * b
[PROOFSTEP]
simp [map_mul_right, sub_mul]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
n : ℤ
f : CentroidHom α
src✝ : α →+ α := SMul.smul n ↑f
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
a *
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
b
[PROOFSTEP]
change n • f (a * b) = a * n • f b
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
n : ℤ
f : CentroidHom α
src✝ : α →+ α := SMul.smul n ↑f
a b : α
⊢ n • ↑f (a * b) = a * n • ↑f b
[PROOFSTEP]
rw [map_mul_left f, ← mul_smul_comm]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
n : ℤ
f : CentroidHom α
src✝ : α →+ α := SMul.smul n ↑f
a b : α
⊢ ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
(a * b) =
ZeroHom.toFun
(↑{ toZeroHom := ↑src✝,
map_add' :=
(_ : ∀ (x y : α), ZeroHom.toFun (↑src✝) (x + y) = ZeroHom.toFun (↑src✝) x + ZeroHom.toFun (↑src✝) y) })
a *
b
[PROOFSTEP]
change n • f (a * b) = n • f a * b
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalNonAssocRing α
n : ℤ
f : CentroidHom α
src✝ : α →+ α := SMul.smul n ↑f
a b : α
⊢ n • ↑f (a * b) = n • ↑f a * b
[PROOFSTEP]
rw [map_mul_right f, ← smul_mul_assoc]
[GOAL]
F : Type u_1
α : Type u_2
inst✝ : NonUnitalRing α
h : ∀ (a b : α), (∀ (r : α), a * r * b = 0) → a = 0 ∨ b = 0
src✝ : Ring (CentroidHom α) := instRing
f g : CentroidHom α
⊢ f * g = g * f
[PROOFSTEP]
ext
[GOAL]
case h
F : Type u_1
α : Type u_2
inst✝ : NonUnitalRing α
h : ∀ (a b : α), (∀ (r : α), a * r * b = 0) → a = 0 ∨ b = 0
src✝ : Ring (CentroidHom α) := instRing
f g : CentroidHom α
a✝ : α
⊢ ↑(f * g) a✝ = ↑(g * f) a✝
[PROOFSTEP]
refine' sub_eq_zero.1 ((or_self_iff _).1 <| (h _ _) fun r ↦ _)
[GOAL]
case h
F : Type u_1
α : Type u_2
inst✝ : NonUnitalRing α
h : ∀ (a b : α), (∀ (r : α), a * r * b = 0) → a = 0 ∨ b = 0
src✝ : Ring (CentroidHom α) := instRing
f g : CentroidHom α
a✝ r : α
⊢ (↑(f * g) a✝ - ↑(g * f) a✝) * r * (↑(f * g) a✝ - ↑(g * f) a✝) = 0
[PROOFSTEP]
rw [mul_assoc, sub_mul, sub_eq_zero, ← map_mul_right, ← map_mul_right, coe_mul, coe_mul, comp_mul_comm]
|
{-
This file contains a diagonalization procedure simpler than Smith normalization.
For any matrix M, it provides two invertible matrices P, Q, one diagonal matrix D and an equality M = P·D·Q.
The only difference from Smith is, the numbers in D are allowed to be arbitrary, instead of being consecutively divisible.
But it is enough to establish important properties of finitely presented abelian groups.
Also, it can be computed much more efficiently (than Smith, only).
-}
{-# OPTIONS --safe #-}
module Cubical.Algebra.IntegerMatrix.Diagonalization where
open import Cubical.Foundations.Prelude
open import Cubical.Foundations.Function
open import Cubical.Foundations.HLevels
open import Cubical.Data.Nat
hiding (_·_)
renaming (_+_ to _+ℕ_ ; +-assoc to +Assocℕ)
open import Cubical.Data.Nat.Order
open import Cubical.Data.Nat.Divisibility
using (m∣n→m≤n)
renaming (∣-trans to ∣ℕ-trans ; ∣-refl to ∣-reflℕ)
open import Cubical.Data.Int
hiding (_+_ ; _·_ ; _-_ ; -_ ; addEq)
open import Cubical.Data.Int.Divisibility
open import Cubical.Data.FinData
open import Cubical.Data.Empty as Empty
open import Cubical.Data.Unit as Unit
open import Cubical.Data.Sum
open import Cubical.Data.Sigma
open import Cubical.Data.List
open import Cubical.Algebra.Matrix
open import Cubical.Algebra.Matrix.CommRingCoefficient
open import Cubical.Algebra.Matrix.Elementaries
open import Cubical.Algebra.IntegerMatrix.Base
open import Cubical.Algebra.IntegerMatrix.Elementaries
open import Cubical.Algebra.CommRing
open import Cubical.Algebra.CommRing.Instances.Int
renaming (ℤ to ℤRing)
open import Cubical.Relation.Nullary
open import Cubical.Induction.WellFounded
private
variable
m n k : ℕ
open CommRingStr (ℤRing .snd)
open Coefficient ℤRing
open Sim
open ElemTransformation ℤRing
open ElemTransformationℤ
open SwapPivot
open RowsImproved
open ColsImproved
-- Sequence of non-zero integers
isNonZero : List ℤ → Type
isNonZero [] = Unit
isNonZero (x ∷ xs) = (¬ x ≡ 0) × isNonZero xs
isPropIsNonZero : (xs : List ℤ) → isProp (isNonZero xs)
isPropIsNonZero [] = isPropUnit
isPropIsNonZero (x ∷ xs) = isProp× (isPropΠ (λ _ → isProp⊥)) (isPropIsNonZero xs)
NonZeroList : Type
NonZeroList = Σ[ xs ∈ List ℤ ] isNonZero xs
cons : (n : ℤ)(xs : NonZeroList) → ¬ n ≡ 0 → NonZeroList
cons n (xs , _) _ .fst = n ∷ xs
cons n ([] , _) p .snd = p , tt
cons n (x ∷ xs , q) p .snd = p , q
-- Smith normal matrix
_+length_ : NonZeroList → ℕ → ℕ
xs +length n = length (xs .fst) +ℕ n
diagMat : (xs : List ℤ)(m n : ℕ) → Mat (length xs +ℕ m) (length xs +ℕ n)
diagMat [] _ _ = 𝟘
diagMat (x ∷ xs) _ _ = x ⊕ diagMat xs _ _
diagMat⊕ :
(a : ℤ)(xs : NonZeroList){m n : ℕ}
→ (p : ¬ a ≡ 0)
→ a ⊕ diagMat (xs .fst) m n ≡ diagMat (cons a xs p .fst) m n
diagMat⊕ _ _ _ = refl
-- Diagonal matrix with non-zero diagonal elements
-- Notice that we allow non-square matrices.
record isDiagonal (M : Mat m n) : Type where
field
divs : NonZeroList
rowNull : ℕ
colNull : ℕ
rowEq : divs +length rowNull ≡ m
colEq : divs +length colNull ≡ n
matEq : PathP (λ t → Mat (rowEq t) (colEq t)) (diagMat (divs .fst) rowNull colNull) M
open isDiagonal
row col : {M : Mat m n} → isDiagonal M → ℕ
row isNorm = isNorm .divs +length isNorm .rowNull
col isNorm = isNorm .divs +length isNorm .colNull
isDiagonal𝟘 : isDiagonal (𝟘 {m = m} {n = n})
isDiagonal𝟘 .divs = [] , tt
isDiagonal𝟘 {m = m} .rowNull = m
isDiagonal𝟘 {n = n} .colNull = n
isDiagonal𝟘 .rowEq = refl
isDiagonal𝟘 .colEq = refl
isDiagonal𝟘 .matEq = refl
isDiagonalEmpty : (M : Mat 0 n) → isDiagonal M
isDiagonalEmpty _ .divs = [] , tt
isDiagonalEmpty _ .rowNull = 0
isDiagonalEmpty {n = n} _ .colNull = n
isDiagonalEmpty _ .rowEq = refl
isDiagonalEmpty _ .colEq = refl
isDiagonalEmpty _ .matEq = isContr→isProp isContrEmpty _ _
isDiagonalEmptyᵗ : (M : Mat m 0) → isDiagonal M
isDiagonalEmptyᵗ _ .divs = [] , tt
isDiagonalEmptyᵗ {m = m} _ .rowNull = m
isDiagonalEmptyᵗ _ .colNull = 0
isDiagonalEmptyᵗ _ .rowEq = refl
isDiagonalEmptyᵗ _ .colEq = refl
isDiagonalEmptyᵗ _ .matEq = isContr→isProp isContrEmptyᵗ _ _
-- Induction step towards diagonalization
data DivStatus (a : ℤ)(M : Mat (suc m) (suc n)) : Type where
badCol : (i : Fin m)(p : ¬ a ∣ M (suc i) zero) → DivStatus a M
badRow : (j : Fin n)(p : ¬ a ∣ M zero (suc j)) → DivStatus a M
allDone : ((i : Fin m) → a ∣ M (suc i) zero)
→ ((j : Fin n) → a ∣ M zero (suc j)) → DivStatus a M
divStatus : (a : ℤ)(M : Mat (suc m) (suc n)) → DivStatus a M
divStatus a M =
let col? = ∀Dec (λ i → a ∣ M (suc i) zero) (λ _ → dec∣ _ _)
row? = ∀Dec (λ j → a ∣ M zero (suc j)) (λ _ → dec∣ _ _) in
case col?
return (λ _ → DivStatus a M) of λ
{ (inr p) → badCol (p .fst) (p .snd)
; (inl p) →
case row?
return (λ _ → DivStatus a M) of λ
{ (inr q) → badRow (q .fst) (q .snd)
; (inl q) → allDone p q }}
record DiagStep (M : Mat (suc m) (suc n)) : Type where
field
sim : Sim M
firstColClean : (i : Fin m) → sim .result (suc i) zero ≡ 0
firstRowClean : (j : Fin n) → sim .result zero (suc j) ≡ 0
nonZero : ¬ sim .result zero zero ≡ 0
open DiagStep
simDiagStep : {M : Mat (suc m) (suc n)}(sim : Sim M) → DiagStep (sim .result) → DiagStep M
simDiagStep simM diag .sim = compSim simM (diag .sim)
simDiagStep _ diag .firstColClean = diag .firstColClean
simDiagStep _ diag .firstRowClean = diag .firstRowClean
simDiagStep _ diag .nonZero = diag .nonZero
private
diagStep-helper :
(M : Mat (suc m) (suc n))
→ (p : ¬ M zero zero ≡ 0)(h : Norm (M zero zero))
→ (div? : DivStatus (M zero zero) M)
→ DiagStep M
diagStep-helper M p (acc ind) (badCol i q) =
let improved = improveRows M p
normIneq =
ind _ (stDivIneq p q (improved .div zero) (improved .div (suc i)))
in simDiagStep (improved .sim)
(diagStep-helper _ (improved .nonZero) normIneq (divStatus _ _))
diagStep-helper M p (acc ind) (badRow j q) =
let improved = improveCols M p
normIneq =
ind _ (stDivIneq p q (improved .div zero) (improved .div (suc j)))
in simDiagStep (improved .sim)
(diagStep-helper _ (improved .nonZero) normIneq (divStatus _ _))
diagStep-helper M p (acc ind) (allDone div₁ div₂) =
let improveColM = improveCols M p
invCol = bézoutRows-inv _ p div₂
divCol = (λ i → transport (λ t → invCol t zero ∣ invCol t (suc i)) (div₁ i))
improveRowM = improveRows (improveColM .sim .result) (improveColM .nonZero)
invCol = bézoutRows-inv _ (improveColM .nonZero) divCol
in record
{ sim = compSim (improveColM .sim) (improveRowM .sim)
; firstColClean = improveRowM .vanish
; firstRowClean = (λ j → (λ t → invCol (~ t) (suc j)) ∙ improveColM .vanish j)
; nonZero = improveRowM .nonZero }
diagStep-getStart : (M : Mat (suc m) (suc n)) → NonZeroOrNot M → DiagStep M ⊎ (M ≡ 𝟘)
diagStep-getStart _ (allZero p) = inr p
diagStep-getStart M (hereIs i j p) =
let swapM = swapPivot i j M
swapNonZero = (λ r → p (swapM .swapEq ∙ r))
diagM = diagStep-helper _ swapNonZero (<-wellfounded _) (divStatus _ _)
in inl (simDiagStep (swapM .sim) diagM)
diagStep : (M : Mat (suc m) (suc n)) → DiagStep M ⊎ (M ≡ 𝟘)
diagStep _ = diagStep-getStart _ (findNonZero _)
-- The diagonalization
record Diag (M : Mat m n) : Type where
field
sim : Sim M
isdiag : isDiagonal (sim .result)
open Diag
simDiag : {M : Mat m n}(sim : Sim M) → Diag (sim .result) → Diag M
simDiag simM diag .sim = compSim simM (diag .sim)
simDiag _ diag .isdiag = diag .isdiag
diag𝟘 : Diag (𝟘 {m = m} {n = n})
diag𝟘 .sim = idSim _
diag𝟘 .isdiag = isDiagonal𝟘
diagEmpty : (M : Mat 0 n) → Diag M
diagEmpty _ .sim = idSim _
diagEmpty M .isdiag = isDiagonalEmpty M
diagEmptyᵗ : (M : Mat m 0) → Diag M
diagEmptyᵗ _ .sim = idSim _
diagEmptyᵗ M .isdiag = isDiagonalEmptyᵗ M
decompDiagStep :
(M : Mat (suc m) (suc n))(step : DiagStep M)
→ step .sim .result ≡ step .sim .result zero zero ⊕ sucMat (step .sim .result)
decompDiagStep M step t zero zero = step .sim .result zero zero
decompDiagStep M step t zero (suc j) = step .firstRowClean j t
decompDiagStep M step t (suc i) zero = step .firstColClean i t
decompDiagStep M step t (suc i) (suc j) = step .sim .result (suc i) (suc j)
consIsDiagonal :
(a : ℤ)(M : Mat m n)
→ (p : ¬ a ≡ 0)
→ isDiagonal M → isDiagonal (a ⊕ M)
consIsDiagonal a _ p diag .divs = cons a (diag .divs) p
consIsDiagonal _ _ _ diag .rowNull = diag .rowNull
consIsDiagonal _ _ _ diag .colNull = diag .colNull
consIsDiagonal _ _ _ diag .rowEq = (λ t → suc (diag .rowEq t))
consIsDiagonal _ _ _ diag .colEq = (λ t → suc (diag .colEq t))
consIsDiagonal a _ _ diag .matEq = (λ t → a ⊕ diag .matEq t)
diagReduction :
(a : ℤ)(M : Mat m n)
→ (p : ¬ a ≡ 0)
→ Diag M → Diag (a ⊕ M)
diagReduction a _ _ diag .sim = ⊕Sim a (diag .sim)
diagReduction a _ p diag .isdiag = consIsDiagonal a _ p (diag .isdiag)
-- The Existence of Diagonalization
diagonalize : (M : Mat m n) → Diag M
diagonalize {m = 0} = diagEmpty
diagonalize {m = suc m} {n = 0} = diagEmptyᵗ
diagonalize {m = suc m} {n = suc n} M = helper (diagStep _)
where
helper : DiagStep M ⊎ (M ≡ 𝟘) → Diag M
helper (inr p) = subst Diag (sym p) diag𝟘
helper (inl stepM) =
let sucM = sucMat (stepM .sim .result)
diagM = diagReduction _ _ (stepM .nonZero) (diagonalize sucM)
in simDiag (compSim (stepM .sim) (≡Sim (decompDiagStep _ stepM))) diagM
|
[STATEMENT]
lemma (in hlde) subdprodl_impl [simp]: "subdprodl_impl a b x y = subdprodl x y"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. subdprodl_impl a b x y = subdprodl x y
[PROOF STEP]
by (simp add: subdprodl_impl_def subdprodl_def) |
State Before: α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
⊢ a < b → b < c → a < c State After: case intro.intro.intro.intro.intro.intro
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
⊢ a < c Tactic: rintro ⟨k₁, k₁z, notinA, inB⟩ ⟨k₂, k₂z, notinB, inC⟩ State Before: case intro.intro.intro.intro.intro.intro
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
⊢ a < c State After: case intro.intro.intro.intro.intro.intro.inl
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
⊢ a < c
case intro.intro.intro.intro.intro.intro.inr
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ > k₂
⊢ a < c Tactic: cases' lt_or_gt_of_ne (ne_of_mem_of_not_mem inB notinB) with h h State Before: case intro.intro.intro.intro.intro.intro.inl
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
⊢ a < c State After: case intro.intro.intro.intro.intro.intro.inl.refine'_1
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
x : α
hx : k₂ < x
⊢ x ∈ a ↔ x ∈ c
case intro.intro.intro.intro.intro.intro.inl.refine'_2
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
⊢ ¬k₂ ∈ a Tactic: refine' ⟨k₂, @fun x hx => _, _, inC⟩ State Before: case intro.intro.intro.intro.intro.intro.inl.refine'_1
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
x : α
hx : k₂ < x
⊢ x ∈ a ↔ x ∈ c
case intro.intro.intro.intro.intro.intro.inl.refine'_2
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
⊢ ¬k₂ ∈ a State After: case intro.intro.intro.intro.intro.intro.inl.refine'_1
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
x : α
hx : k₂ < x
⊢ x ∈ a ↔ x ∈ b
case intro.intro.intro.intro.intro.intro.inl.refine'_2
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
⊢ ¬k₂ ∈ a Tactic: rw [← k₂z hx] State Before: case intro.intro.intro.intro.intro.intro.inl.refine'_1
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
x : α
hx : k₂ < x
⊢ x ∈ a ↔ x ∈ b
case intro.intro.intro.intro.intro.intro.inl.refine'_2
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
⊢ ¬k₂ ∈ a State After: case intro.intro.intro.intro.intro.intro.inl.refine'_2
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
⊢ ¬k₂ ∈ a Tactic: apply k₁z (Trans.trans h hx) State Before: case intro.intro.intro.intro.intro.intro.inl.refine'_2
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ < k₂
⊢ ¬k₂ ∈ a State After: no goals Tactic: rwa [k₁z h] State Before: case intro.intro.intro.intro.intro.intro.inr
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ > k₂
⊢ a < c State After: case intro.intro.intro.intro.intro.intro.inr
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ > k₂
x : α
hx : k₁ < x
⊢ x ∈ a ↔ x ∈ c Tactic: refine' ⟨k₁, @fun x hx => _, notinA, by rwa [← k₂z h]⟩ State Before: case intro.intro.intro.intro.intro.intro.inr
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ > k₂
x : α
hx : k₁ < x
⊢ x ∈ a ↔ x ∈ c State After: case intro.intro.intro.intro.intro.intro.inr
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ > k₂
x : α
hx : k₁ < x
⊢ x ∈ b ↔ x ∈ c Tactic: rw [k₁z hx] State Before: case intro.intro.intro.intro.intro.intro.inr
α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ > k₂
x : α
hx : k₁ < x
⊢ x ∈ b ↔ x ∈ c State After: no goals Tactic: apply k₂z (Trans.trans h hx) State Before: α : Type u_1
inst✝ : LinearOrder α
a b c : Colex α
k₁ : α
k₁z : ∀ {x : α}, k₁ < x → (x ∈ a ↔ x ∈ b)
notinA : ¬k₁ ∈ a
inB : k₁ ∈ b
k₂ : α
k₂z : ∀ {x : α}, k₂ < x → (x ∈ b ↔ x ∈ c)
notinB : ¬k₂ ∈ b
inC : k₂ ∈ c
h : k₁ > k₂
⊢ k₁ ∈ c State After: no goals Tactic: rwa [← k₂z h] |
Formal statement is: lemma nested_sequence_unique: assumes "\<forall>n. f n \<le> f (Suc n)" "\<forall>n. g (Suc n) \<le> g n" "\<forall>n. f n \<le> g n" "(\<lambda>n. f n - g n) \<longlonglongrightarrow> 0" shows "\<exists>l::real. ((\<forall>n. f n \<le> l) \<and> f \<longlonglongrightarrow> l) \<and> ((\<forall>n. l \<le> g n) \<and> g \<longlonglongrightarrow> l)" Informal statement is: If $f$ and $g$ are two nested sequences of real numbers such that $f_n \leq g_n$ for all $n$ and $\lim_{n \to \infty} (f_n - g_n) = 0$, then there exists a real number $l$ such that $\lim_{n \to \infty} f_n = l = \lim_{n \to \infty} g_n$. |
function [t, storedb] = getLinesearch(problem, x, d, storedb)
% Returns a hint for line-search algorithms.
%
% function [t, storedb] = getLinesearch(problem, x, d, storedb)
%
% For a line-search problem at x along the tangent direction d, computes
% and returns t such that retracting t*d at x yields a good point around
% where to look for a line-search solution. That is: t is a hint as to "how
% far to look" along the line.
%
% The cache database storedb is passed along, possibly modified and
% returned in the process.
%
% See also: canGetLinesearch
% This file is part of Manopt: www.manopt.org.
% Original author: Nicolas Boumal, July 17, 2014.
% Contributors:
% Change log:
if isfield(problem, 'linesearch')
%% Compute the line-search hint function using linesearch.
is_octave = exist('OCTAVE_VERSION', 'builtin');
if ~is_octave
narg = nargin(problem.linesearch);
else
narg = 3;
end
% Check whether the linesearch function wants to deal with the
% store structure or not.
switch narg
case 2
t = problem.linesearch(x, d);
case 3
% Obtain, pass along, and save the store structure
% associated to this point.
store = getStore(problem, x, storedb);
[t, store] = problem.linesearch(x, d, store);
storedb = setStore(problem, x, storedb, store);
otherwise
up = MException('manopt:getLinesearch:badfun', ...
'linesearch should accept 2 or 3 inputs.');
throw(up);
end
else
%% Abandon computing the line-search function.
up = MException('manopt:getLinesearch:fail', ...
['The problem description is not explicit enough to ' ...
'compute a line-search hint.']);
throw(up);
end
end
|
lemma sets_scale_measure [simp, measurable_cong]: "sets (scale_measure r M) = sets M" |
#' bdist - Extra Distributions for Bayesian Analysis
#'
#' This package includes statistical functions I have found useful.
#' You are welcome to use it, but you do so at your own risk.
#'
#' @name bdist
#' @docType package
#'
#' @importFrom graphics abline lines mtext par plot
#' @importFrom stats density dnorm dt pgamma pnorm pt
#' rchisq rexp rgamma rnorm rt runif
NULL
|
Formal statement is: lemma totally_bounded_subset: "totally_bounded S \<Longrightarrow> T \<subseteq> S \<Longrightarrow> totally_bounded T" Informal statement is: If $S$ is totally bounded, then any subset $T$ of $S$ is also totally bounded. |
\documentclass{article}
\usepackage[utf8]{inputenc}
\usepackage{graphicx}
\graphicspath{ {images/} }
\begin{document}
\title{Application of Graph Colouring}
\author{Shashvat Kedia(1610110347) , Raghav Kirpekar(1610110271) , Raman Dutt(1610110357)}
\maketitle
\begin{abstract}
A Graph G is a mathematical structure of consisting of two sets V(G) (vertices of G) and E(G) (edges of G). Proper colouring of a graph is an assignment of colours to the vertices such that the adjacent vertices are coloured differently.
This paper discusses colouring on graphs with \textit{Mathematica} and \textit{WebMathematica}. \hfill \break
We draw any graph and also try to show weather it has a Eulerian and Hamiltonian cycles.
\end{abstract}
\section{Introduction}
Graph theory would not be what it is today if there had been no colouring problems. A Graph G is a mathematical structure of consisting of two sets V(G) (vertices of G) and E(G) (edges of G).\hfill \break
Vertex colouring is a hard combinatorial optimisation problem we apply several operations on graphs to give different graphs. in addition, we colour vertices to these obtained graphs properly. Many of these graphs are truly beautiful and they provide a wide range of structures to manipulate and study.\hfill \break
A complete graph is a simple graph such that every pair of vertices is joined by an edge. Some number of vertices connected in a close chain is called a cycle. A graph which is obtained by joining a new vertex to every vertices of a cycle is called a Wheel. A connected Acyclic graph is called a tree \cite{1}. \hfill \break
\textbf{Graph Colouring Algorithms} : There are many sequential techniques for colouring a graph. One of them is Greedy Graph Colouring. Greedy Colouring concentrates on carefully picking the next vertex to colour and the colour for the next vertex. There are two basic ordering techniques used First Fit and Degree Based Ordering.\hfill \break
\textbf{First Fit} : The First Fit colouring algorithm is given the set of vertices in an arbitrary order and the job of the algorithm is to assign lowest legal colour to each vertex. First fit is an O(n) time algorithm. \hfill \break
\textbf{Degree Based Ordering} : Degree Based Ordering is considered a better strategy as compared to First Fit because it uses a certain criterion for choosing the vertex to be coloured from a set of uncoloured vertices rather than picking the next vertex from an arbitrary order. Some of the well known Degree based Ordering techniques are Largest Degree Based Ordering and Saturation Degree Based Ordering.\hfill \break
\textbf{Largest Degree Based Ordering} : Ordering the vertices by decreasing degree proposed by C. Avanthay, A. Hertz, N. Zufferey \cite{5}. it is considered one of the oldest ordering strategies.Suppose the vertices v1, v2…,vi-1 have been chosen and coloured. Vertex vi is chosen to be the vertex with the maximum degree among the set of uncoloured vertices. Largest Degree Ordering provides a better colouring that First Fit since during each iteration chooses a vertex with the highest number of neighbours which potentially produces he highest colour. Largest Degree Based Ordering Algorithms has a complexity of O($n^{2}$).
\textbf{Saturation Degree Based Ordering} : Saturated degree ordering was proposed by E.Falkenauer \cite{6}. The saturation degree of a vertex is defined as the number of differently coloured vertices the vertex is adjacent to. Saturation Degree Based Ordering works as follows : Suppose that vertices v1, v2,…,vi-1 have been chosen and colored. Then at step i, vertex vi with the maximum Saturation degree is selected. While choosing a vertex of maximum saturation degree, ties are broken in favor of the vertex with the largest degree. \hfill \break
Saturation Degree Based Ordering provides better colouring as compared Largest Degree Based Ordering since it first colours the vertices most constrained by previous colour choices. Saturation Degree Based Ordering algorithm has a complexity of O($n^{2}$).
\section{Graph Colouring with WebMathematica}
\textit{WebMathematica} is a technology developed by Wolfram research which enables instructors to create websites that allow users to compute and visualize results directly from a web browser. It is based on a standard Java technology called servlets. It allows a site to deliver HTML(Hyper Text Markup Language) pages that are enhanced by the addition of Mathematica commands \cite{2}. When a request is made for one these pages, the commands are evaluated and the computed result is placed in the page. \hfill \break In this section we give applications to colour the vertices of the graphs with \textit{WebMathematica}.
\subsection{Vertex Colouring}
The most applications involving vertex colouring are concerned with finding the minimum number of colours required so that two adjacent vertices cannot have the same colour. A proper vertex colouring of a Graph is an assignment from its vertex set to a coloured set that any two adjacent vertices are assigned two different colours. \hfill \break
The chromatic colour of a Graph G denoted by \(\chi\)(G), is the minimum number of different colours required for a proper vertex colouring of G. \hfill \break Applications of vertex colouring include timetable scheduling, assignment of radio frequencies and computer optimization. We use commands in the Combinatorica package with Mathematica to colour the vertices of the graph given below\cite{3}. \hfill \break
\hfill \break
\includegraphics{vertexcolouring}.
\hfill \break
\subsection{Edge Colouring}
Edge colouring is an optimization problem: An edge colouring of a graph G is an assignment of colours to the edges of G such that edges with a common endpoint have different colours. The problem to colour edges is an NP problem which means that the algorithm used for the solving th problem does not have a polynomial time complexity.\hfill \break
Commands in Mathematica can be used to colour edges of graphs and give web-based examples with \textit{WebMathematica}\cite{3}.\hfill \break
\hfill \break
\includegraphics{edgescolouring}. \hfill \break
\hfill \break
If the user presses the Wheel button nd enter the number of vertices, he/she can get the dge-coloured graph.
\section{Generating Graphs with WebMathematica}
Some of the most important operations on Graphs are sum,union, join and product of two graphs etc. These operations can be performed on common graphs, complete graphs, cycle, wheel and random tree using \emph{\textit{Web Mathematica}}. \hfill \break
\textbf{Join of two graphs} : The union of two graphs with addition f edges between all pairs of vertices from different graphs is know as the join of two graphs. \hfill \break
\textbf{Union of two graphs} : The union of two graphs is formed by taking the union of the vertices and edges of the graphs. Thus the unions of graphs is always disconnected. \hfill \break
\textbf{Sum of two graphs} : The sum operation of two graphs is to take the edges of the second graph and add them to the first graph. \hfill \break
\textbf{Product of two graphs} : The product of two graphs G\( \times \)H has a vertex set defined by he Cartesian product of the vertex sets of G and H
There is an edge between (u,v) and (s,t) if u = s and v is adjacent to t in H or
v = t and u is adjacent to s in G. \hfill \break
\section{Cycle Structure in Graphs with \hfill \break WebMathematica}
A Cycle in a graph is a simple closed path. We will represent a cycle in Graph G as a list of vertices C = v1,v2,........,v1 such that there is an edge of G from each vertex to the next in G.
\subsection{Eulerian Cycle}
An Eulerian Cycle, also called a Eulerian Circuit is a path with starts and ends at the same vertex in graph G. In other words it is a graph cycle which uses each vertex edge exactly once. \hfill \break
Euler initiated the study of Graph theory in 1736 with the famour seven bridges of Konigsberg Problem the town of Konigsberg straddled the Pregel river with a total of seven bridges connecting the two shores and two islands.
The tows folk were interested in crossing every bridge exactly once and returning to the starting point. \hfill \break
An Eulerian cycle is a complete tour of all the edges of a graph. The term circuit is often used instead of cycle since each vertex can be visited more then once. \hfill \break
We use \textit{WebMathematica} to find the Eulerian cycle and to give web based examples. If the number of the vertices is entered, it is possible to see the Eulerian cycle in that graph if there exists one \cite{4}. \hfill \break
\includegraphics{euleriancyclegraphs}
\hfill \break
\subsection{Hamiltonian Cycle}
Hamiltonian Cycle of a Graph G is a cycle which visits each vertex in G exactly once, as opposed to a Eulerian Cycle which visits each edge exactly once. A Hamiltonian Path is a path between two vertices of a graph that visits each vertex exactly once. The problem of computing a Hamiltonian Cycle is fundamentally different from the problem of computing a Eulerian Cycle because testing whether a graph is Hamiltonian is NP complete which means that the algorithm used for computing weather a graph contain a Hamiltonian circuit does not have a polynomial time complexity. \hfill \break
We use \textit{WebMathematica} to find the Hamiltonian cycle and to give web based examples. If the number of the vertices is entered it is possible to see the Hamiltonian cycle in that graph if there exists one\cite{4}. \hfill \break
\hfill \break
\includegraphics{hamiltoniancyclegraphs}
\hfill \break
\subsection{An Application}
Some scheduling problems can induce a graph colouring i.e An assignment of colours to vertices of a graph. We discuss a simple example for colouring the vertices of a graph with a small number k of colours and present computational results for calculating the chromatic number i.e the minimum possible value of such a k. \hfill \break
\textbf{Example} : Set of students: S1, S2, S3, S4, S5, S6, S7, S8, S9 Examination subjects
for each group: {algebra, real analysis, and topology}, {algebra, operations
research, and complex analysis}, {real analysis, functional analysis, and
topology}, {algebra, graph theory, and combinatorics}, {combinatorics, topology,
and functional analysis}, {operations research, graph theory, and coding
theory}, {operations research, graph theory, and number theory}, {algebra,
number theory, and coding theory}, {algebra, operations research, and real
analysis}. \hfill \break
Let S be a set of students, P = \(\{1, 2, 3, 4, 5, 6, 7, 8, 9, 10\}\) be the set of
examinations respectively algebra,real analysis, topology, operational research,
complex analysis, functional analysis, graph theory, combinatorics, coding
theory, and number theory. S(p) be the set of students who will take the
examination p \(\in\) P. Form a graph G = G(P, E), where a, b \(\in\) P are adjacent if and only if S(a) \( \bigcap\) S(b) \(\neq\) \(\Phi\). Then each proper vertex colouring of G yields an examination schedule with the vertices in any color class representing the
schedule on a particular day. Thus \(\chi\)(G) gives the minimum number of days required for the examination schedule. \hfill \break
5 days are required and you can see below the lessons in the same parenthesis which are on the same day
\(\{\{1, 6\}, \{2, 8, 9\}, \{3, 4\}, \{5, 7\}, \{10\}\}\)
\begin{thebibliography} {1}
\bibitem {1} Jonathan,G and Jay,Y : Graph theory and its application, CRC Press, (1999).
\bibitem {2} Pemmaraju,S. and Skiena,S. : Computational Discrete Mathematics, Cambridge University Press,(2003).
\bibitem {3} Ufuktepe,U. Bacak,G. and Beseri,T. : Graph colouring with \textit{WebMathematica}, Springer-Verlag,(2003).
\bibitem{4} Google Images
\bibitem{5} C. Avanthay, A. Hertz, N. Zufferey, A variable neighborhood search for graph coloring, European Journal of Operational Research 151 (2) (2003) 379–388.
\bibitem {6} E. Falkenauer, A hybrid grouping genetic algorithm for bin packing, Journal of Heuristics 2 (1) (1996) 5–30.
\bibitem {7}
\end{thebibliography}
\end{document} |
/-
Copyright (c) 2020 Scott Morrison. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Scott Morrison
-/
import category_theory.monoidal.category
/-!
# Monoidal opposites
We write `Cᵐᵒᵖ` for the monoidal opposite of a monoidal category `C`.
-/
universes v₁ v₂ u₁ u₂
variables {C : Type u₁}
namespace category_theory
open category_theory.monoidal_category
/-- A type synonym for the monoidal opposite. Use the notation `Cᴹᵒᵖ`. -/
@[nolint has_inhabited_instance]
def monoidal_opposite (C : Type u₁) := C
namespace monoidal_opposite
notation C `ᴹᵒᵖ`:std.prec.max_plus := monoidal_opposite C
/-- Think of an object of `C` as an object of `Cᴹᵒᵖ`. -/
@[pp_nodot]
def mop (X : C) : Cᴹᵒᵖ := X
/-- Think of an object of `Cᴹᵒᵖ` as an object of `C`. -/
@[pp_nodot]
def unmop (X : Cᴹᵒᵖ) : C := X
lemma op_injective : function.injective (mop : C → Cᴹᵒᵖ) := λ _ _, id
lemma unop_injective : function.injective (unmop : Cᴹᵒᵖ → C) := λ _ _, id
@[simp] lemma op_inj_iff (x y : C) : mop x = mop y ↔ x = y := iff.rfl
@[simp]
attribute [irreducible] monoidal_opposite
@[simp] lemma mop_unmop (X : Cᴹᵒᵖ) : mop (unmop X) = X := rfl
@[simp] lemma unmop_mop (X : C) : unmop (mop X) = X := rfl
instance monoidal_opposite_category [I : category.{v₁} C] : category Cᴹᵒᵖ :=
{ hom := λ X Y, unmop X ⟶ unmop Y,
id := λ X, 𝟙 (unmop X),
comp := λ X Y Z f g, f ≫ g, }
end monoidal_opposite
end category_theory
open category_theory
open category_theory.monoidal_opposite
variables [category.{v₁} C]
/-- The monoidal opposite of a morphism `f : X ⟶ Y` is just `f`, thought of as `mop X ⟶ mop Y`. -/
def quiver.hom.mop {X Y : C} (f : X ⟶ Y) : @quiver.hom Cᴹᵒᵖ _ (mop X) (mop Y) := f
/-- We can think of a morphism `f : mop X ⟶ mop Y` as a morphism `X ⟶ Y`. -/
def quiver.hom.unmop {X Y : Cᴹᵒᵖ} (f : X ⟶ Y) : unmop X ⟶ unmop Y := f
namespace category_theory
lemma mop_inj {X Y : C} :
function.injective (quiver.hom.mop : (X ⟶ Y) → (mop X ⟶ mop Y)) :=
λ _ _ H, congr_arg quiver.hom.unmop H
lemma unmop_inj {X Y : Cᴹᵒᵖ} :
function.injective (quiver.hom.unmop : (X ⟶ Y) → (unmop X ⟶ unmop Y)) :=
λ _ _ H, congr_arg quiver.hom.mop H
@[simp] lemma unmop_mop {X Y : C} {f : X ⟶ Y} : f.mop.unmop = f := rfl
@[simp] lemma mop_unmop {X Y : Cᴹᵒᵖ} {f : X ⟶ Y} : f.unmop.mop = f := rfl
@[simp] lemma mop_comp {X Y Z : C} {f : X ⟶ Y} {g : Y ⟶ Z} :
(f ≫ g).mop = f.mop ≫ g.mop := rfl
@[simp] lemma mop_id {X : C} : (𝟙 X).mop = 𝟙 (mop X) := rfl
@[simp] lemma unmop_comp {X Y Z : Cᴹᵒᵖ} {f : X ⟶ Y} {g : Y ⟶ Z} :
(f ≫ g).unmop = f.unmop ≫ g.unmop := rfl
@[simp] lemma unmop_id {X : Cᴹᵒᵖ} : (𝟙 X).unmop = 𝟙 (unmop X) := rfl
@[simp] lemma unmop_id_mop {X : C} : (𝟙 (mop X)).unmop = 𝟙 X := rfl
@[simp] lemma mop_id_unmop {X : Cᴹᵒᵖ} : (𝟙 (unmop X)).mop = 𝟙 X := rfl
namespace iso
variables {X Y : C}
/-- An isomorphism in `C` gives an isomorphism in `Cᴹᵒᵖ`. -/
@[simps]
def mop (f : X ≅ Y) : mop X ≅ mop Y :=
{ hom := f.hom.mop,
inv := f.inv.mop,
hom_inv_id' := unmop_inj f.hom_inv_id,
inv_hom_id' := unmop_inj f.inv_hom_id }
end iso
variables [monoidal_category.{v₁} C]
open opposite monoidal_category
instance monoidal_category_op : monoidal_category Cᵒᵖ :=
{ tensor_obj := λ X Y, op (unop X ⊗ unop Y),
tensor_hom := λ X₁ Y₁ X₂ Y₂ f g, (f.unop ⊗ g.unop).op,
tensor_unit := op (𝟙_ C),
associator := λ X Y Z, (α_ (unop X) (unop Y) (unop Z)).symm.op,
left_unitor := λ X, (λ_ (unop X)).symm.op,
right_unitor := λ X, (ρ_ (unop X)).symm.op,
associator_naturality' :=
begin
intros,
apply quiver.hom.unop_inj,
simp [associator_inv_naturality],
end,
left_unitor_naturality' :=
begin
intros,
apply quiver.hom.unop_inj,
simp [left_unitor_inv_naturality],
end,
right_unitor_naturality' :=
begin
intros,
apply quiver.hom.unop_inj,
simp [right_unitor_inv_naturality],
end,
triangle' :=
begin
intros,
apply quiver.hom.unop_inj,
dsimp,
simp,
end,
pentagon' :=
begin
intros,
apply quiver.hom.unop_inj,
dsimp,
simp [pentagon_inv],
end }
lemma op_tensor_obj (X Y : Cᵒᵖ) : X ⊗ Y = op (unop X ⊗ unop Y) := rfl
lemma op_tensor_unit : (𝟙_ Cᵒᵖ) = op (𝟙_ C) := rfl
instance monoidal_category_mop : monoidal_category Cᴹᵒᵖ :=
{ tensor_obj := λ X Y, mop (unmop Y ⊗ unmop X),
tensor_hom := λ X₁ Y₁ X₂ Y₂ f g, (g.unmop ⊗ f.unmop).mop,
tensor_unit := mop (𝟙_ C),
associator := λ X Y Z, (α_ (unmop Z) (unmop Y) (unmop X)).symm.mop,
left_unitor := λ X, (ρ_ (unmop X)).mop,
right_unitor := λ X, (λ_ (unmop X)).mop,
associator_naturality' :=
begin
intros,
apply unmop_inj,
simp [associator_inv_naturality],
end,
left_unitor_naturality' :=
begin
intros,
apply unmop_inj,
simp [right_unitor_naturality],
end,
right_unitor_naturality' :=
begin
intros,
apply unmop_inj,
simp [left_unitor_naturality],
end,
triangle' :=
begin
intros,
apply unmop_inj,
dsimp,
simp,
end,
pentagon' :=
begin
intros,
apply unmop_inj,
dsimp,
simp [pentagon_inv],
end }
lemma mop_tensor_obj (X Y : Cᴹᵒᵖ) : X ⊗ Y = mop (unmop Y ⊗ unmop X) := rfl
lemma mop_tensor_unit : (𝟙_ Cᴹᵒᵖ) = mop (𝟙_ C) := rfl
end category_theory
|
/-
Copyright (c) 2021 Eric Wieser. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Eric Wieser
! This file was ported from Lean 3 source module group_theory.submonoid.center
! leanprover-community/mathlib commit 6cb77a8eaff0ddd100e87b1591c6d3ad319514ff
! Please do not edit these lines, except to modify the commit id
! if you have ported upstream changes.
-/
import Mathlib.GroupTheory.Submonoid.Operations
import Mathlib.GroupTheory.Subsemigroup.Center
/-!
# Centers of monoids
## Main definitions
* `Submonoid.center`: the center of a monoid
* `AddSubmonoid.center`: the center of an additive monoid
We provide `Subgroup.center`, `AddSubgroup.center`, `Subsemiring.center`, and `Subring.center` in
other files.
-/
namespace Submonoid
section
variable (M : Type _) [Monoid M]
/-- The center of a monoid `M` is the set of elements that commute with everything in `M` -/
@[to_additive
"The center of a monoid `M` is the set of elements that commute with everything in `M`"]
def center : Submonoid M where
carrier := Set.center M
one_mem' := Set.one_mem_center M
mul_mem' := Set.mul_mem_center
#align submonoid.center Submonoid.center
#align add_submonoid.center AddSubmonoid.center
@[to_additive]
theorem coe_center : ↑(center M) = Set.center M :=
rfl
#align submonoid.coe_center Submonoid.coe_center
#align add_submonoid.coe_center AddSubmonoid.coe_center
@[to_additive (attr := simp) AddSubmonoid.center_toAddSubsemigroup]
variable {M}
@[to_additive]
theorem mem_center_iff {z : M} : z ∈ center M ↔ ∀ g, g * z = z * g :=
Iff.rfl
#align submonoid.mem_center_iff Submonoid.mem_center_iff
#align add_submonoid.mem_center_iff AddSubmonoid.mem_center_iff
@[to_additive]
instance decidableMemCenter (a) [Decidable <| ∀ b : M, b * a = a * b] : Decidable (a ∈ center M) :=
decidable_of_iff' _ mem_center_iff
#align submonoid.decidable_mem_center Submonoid.decidableMemCenter
#align add_submonoid.decidable_mem_center AddSubmonoid.decidableMemCenter
/-- The center of a monoid is commutative. -/
instance center.commMonoid : CommMonoid (center M) :=
{ (center M).toMonoid with
mul_comm := fun _ b => Subtype.ext <| b.prop _ }
/-- The center of a monoid acts commutatively on that monoid. -/
instance center.smulCommClass_left : SMulCommClass (center M) M M
where smul_comm m x y := (Commute.left_comm (m.prop x) y).symm
#align submonoid.center.smul_comm_class_left Submonoid.center.smulCommClass_left
/-- The center of a monoid acts commutatively on that monoid. -/
instance center.smulCommClass_right : SMulCommClass M (center M) M :=
SMulCommClass.symm _ _ _
#align submonoid.center.smul_comm_class_right Submonoid.center.smulCommClass_right
/-! Note that `smulCommClass (center M) (center M) M` is already implied by
`Submonoid.smulCommClass_right` -/
example : SMulCommClass (center M) (center M) M := by infer_instance
end
section
variable (M : Type _) [CommMonoid M]
@[simp]
theorem center_eq_top : center M = ⊤ :=
SetLike.coe_injective (Set.center_eq_univ M)
#align submonoid.center_eq_top Submonoid.center_eq_top
end
end Submonoid
-- Porting note: `assert_not_exists` is not ported yet
-- Guard against import creep
--assert_not_exists finset
|
!! Local Data
INTEGER :: grid_error
INTEGER :: sum_outer_indices
CALL MPI_Comm_size(communicator,helper%comm_size,grid_error)
!! Build Storage
CALL DestructMatrix(gathered_matrix)
sum_outer_indices = (matrix%columns+1)*helper%comm_size
ALLOCATE(gathered_matrix%outer_index(sum_outer_indices+1))
!! Gather Outer Indices
CALL MPI_IAllGather(matrix%outer_index, matrix%columns+1,&
& MPINTINTEGER, gathered_matrix%outer_index, matrix%columns+1, &
& MPINTINTEGER, communicator, helper%outer_request, grid_error)
|
import tactic
import ring_theory.power_series.basic
noncomputable theory
open_locale classical
variables {α β γ R : Type*} [comm_ring R]
example : α → α ⊕ β := sum.inl
example : β → α ⊕ β := sum.inr
example (f : α → γ) (g : β → γ) : α ⊕ β → γ :=
λ t, sum.rec_on t f g
example (f : α → γ) (g : β → γ) : α ⊕ β → γ := λ t,
match t with
| sum.inl a := f a
| sum.inr b := g b
end
#check @dif_neg
#check @dif_pos
#check tactic.split_ifs
variables (α β R)
def incl_fun : mv_power_series α R → mv_power_series (α ⊕ β) R :=
show ((α →₀ ℕ) → R) → ((α ⊕ β →₀ ℕ) → R), from λ f m,
let n := finsupp.sum_finsupp_equiv_prod_finsupp m in
if n.2 = 0 then f n.1 else 0
def incl : mv_power_series α R →+* mv_power_series (α ⊕ β) R :=
{ to_fun := incl_fun α β R,
map_one' := sorry,
map_mul' := sorry,
map_zero' := sorry,
map_add' := sorry }
def incr_fun : mv_power_series β R → mv_power_series (α ⊕ β) R :=
show ((β →₀ ℕ) → R) → ((α ⊕ β →₀ ℕ) → R), from λ f m,
let n := finsupp.sum_finsupp_equiv_prod_finsupp m in
if n.1 = 0 then f n.2 else 0
def incr : mv_power_series β R →+* mv_power_series (α ⊕ β) R :=
{ to_fun := incr_fun α β R,
map_one' := sorry,
map_mul' := sorry,
map_zero' := sorry,
map_add' := sorry } |
using Rcl
using Test
# run test in the whole directory, latest modified files
# are run first, this makes waiting time shorter when writing
# or modifying unit-tests
function run_tests(dir)
files = readdir(dir)
filter!(files) do x
occursin(r".*\.jl$", x)
end
sort!(files; by = fn -> stat(joinpath(dir, fn)).mtime, rev = true)
map(files) do file
include(joinpath(dir, file))
end
end
@testset "Rcl Test" begin
run_tests(joinpath(dirname(@__FILE__), "unit"))
end
|
[STATEMENT]
lemma length_nzrows: "length (nzrows A) \<le> dim_row A"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. length (nzrows A) \<le> dim_row A
[PROOF STEP]
by (simp add: nzrows_def length_rows[symmetric] del: length_rows) |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS -fno-warn-orphans #-}
module Data.Array.Accelerate.Data.Complex (
-- * Rectangular from
Complex(..),
real,
imag,
-- * Polar form
mkPolar,
cis,
polar,
magnitude,
phase,
-- * Conjugate
conjugate,
) where
import Prelude
import Data.Complex ( Complex(..) )
import Data.Array.Accelerate as A
import Data.Array.Accelerate.Smart
import Data.Array.Accelerate.Product
import Data.Array.Accelerate.Array.Sugar
type instance EltRepr (Complex a) = EltRepr (a, a)
instance Elt a => Elt (Complex a) where
eltType _ = eltType (undefined :: (a,a))
toElt p = let (a, b) = toElt p in a :+ b
fromElt (a :+ b) = fromElt (a, b)
instance cst a => IsProduct cst (Complex a) where
type ProdRepr (Complex a) = ProdRepr (a, a)
fromProd cst (x :+ y) = fromProd cst (x, y)
toProd cst p = let (x, y) = toProd cst p in (x :+ y)
prod cst _ = prod cst (undefined :: (a, a))
instance (Lift Exp a, Elt (Plain a)) => Lift Exp (Complex a) where
type Plain (Complex a) = Complex (Plain a)
lift (x1 :+ x2) = Exp $ Tuple (NilTup `SnocTup` lift x1 `SnocTup` lift x2)
instance Elt a => Unlift Exp (Complex (Exp a)) where
unlift e
= let x = Exp $ SuccTupIdx ZeroTupIdx `Prj` e
y = Exp $ ZeroTupIdx `Prj` e
in
x :+ y
instance (Elt a, IsFloating a) => Num (Exp (Complex a)) where
(+) = lift2 ((+) :: Complex (Exp a) -> Complex (Exp a) -> Complex (Exp a))
(-) = lift2 ((-) :: Complex (Exp a) -> Complex (Exp a) -> Complex (Exp a))
(*) = lift2 ((*) :: Complex (Exp a) -> Complex (Exp a) -> Complex (Exp a))
negate = lift1 (negate :: Complex (Exp a) -> Complex (Exp a))
signum = lift1 (signum :: Complex (Exp a) -> Complex (Exp a))
abs = lift1 (abs :: Complex (Exp a) -> Complex (Exp a))
fromInteger n = lift (constant (fromInteger n) :+ 0)
instance (Elt a, IsFloating a) => Fractional (Exp (Complex a)) where
c / c'
= let x :+ y = unlift c
x' :+ y' = unlift c' :: Complex (Exp a)
den = x'^(2 :: Int) + y'^(2 :: Int)
re = (x * x' + y * y') / den
im = (y * x' - x * y') / den
in
lift (re :+ im)
fromRational x
= lift (constant (fromRational x) :+ constant 0)
instance (Elt a, IsFloating a, RealFloat a) => Floating (Exp (Complex a)) where
sqrt z
= let
x :+ y = unlift z
v' = abs y / (u'*2)
u' = sqrt ((magnitude z + abs x) / 2)
(u, v) = unlift ( x A.<* 0 ? ( lift (v',u'), lift (u',v') ) )
in
x ==* 0 &&* y ==* 0 ?
{- then -} ( 0
{- else -} , lift (u :+ (y A.<* 0 ? (-v,v))) )
pi = lift (pi :+ constant 0)
log z = lift (log (magnitude z) :+ phase z)
exp = lift1 (exp :: Complex (Exp a) -> Complex (Exp a))
sin = lift1 (sin :: Complex (Exp a) -> Complex (Exp a))
cos = lift1 (cos :: Complex (Exp a) -> Complex (Exp a))
tan = lift1 (tan :: Complex (Exp a) -> Complex (Exp a))
sinh = lift1 (sinh :: Complex (Exp a) -> Complex (Exp a))
cosh = lift1 (cosh :: Complex (Exp a) -> Complex (Exp a))
tanh = lift1 (tanh :: Complex (Exp a) -> Complex (Exp a))
asin = lift1 (asin :: Complex (Exp a) -> Complex (Exp a))
acos = lift1 (acos :: Complex (Exp a) -> Complex (Exp a))
atan = lift1 (atan :: Complex (Exp a) -> Complex (Exp a))
asinh = lift1 (asinh :: Complex (Exp a) -> Complex (Exp a))
acosh = lift1 (acosh :: Complex (Exp a) -> Complex (Exp a))
atanh = lift1 (atanh :: Complex (Exp a) -> Complex (Exp a))
-- | The non-negative magnitude of a complex number
--
magnitude :: (Elt a, IsFloating a) => Exp (Complex a) -> Exp a
magnitude c =
let r :+ i = unlift c
in sqrt (r*r + i*i)
-- | The phase of a complex number, in the range @(-'pi', 'pi']@. If the
-- magnitude is zero, then so is the phase.
--
phase :: (Elt a, IsFloating a) => Exp (Complex a) -> Exp a
phase c =
let x :+ y = unlift c
in atan2 y x
-- | The function 'polar' takes a complex number and returns a (magnitude,
-- phase) pair in canonical form: the magnitude is non-negative, and the phase
-- in the range @(-'pi', 'pi']@; if the magnitude is zero, then so is the phase.
--
polar :: (Elt a, IsFloating a) => Exp (Complex a) -> Exp (a,a)
polar z = lift (magnitude z, phase z)
-- | Form a complex number from polar components of magnitude and phase.
--
mkPolar :: (Elt a, IsFloating a) => Exp a -> Exp a -> Exp (Complex a)
mkPolar r theta = lift $ r * cos theta :+ r * sin theta
-- | @'cis' t@ is a complex value with magnitude @1@ and phase @t@ (modulo
-- @2*'pi'@).
--
cis :: (Elt a, IsFloating a) => Exp a -> Exp (Complex a)
cis theta = lift $ cos theta :+ sin theta
-- | Return the real part of a complex number
--
real :: Elt a => Exp (Complex a) -> Exp a
real c =
let r :+ _ = unlift c
in r
-- | Return the imaginary part of a complex number
--
imag :: Elt a => Exp (Complex a) -> Exp a
imag c =
let _ :+ i = unlift c
in i
-- | Return the complex conjugate of a complex number, defined as
--
-- > conjugate(Z) = X - iY
--
conjugate :: (Elt a, IsNum a) => Exp (Complex a) -> Exp (Complex a)
conjugate z = lift $ real z :+ (- imag z)
|
The argument principle states that the number of zeros of a holomorphic function $f$ inside a closed curve $C$ is equal to the number of poles of $f$ inside $C$ plus the winding number of $C$ around the origin. |
% Script demonstrating usage of the ccmod function.
%
% Author: Brendt Wohlberg <[email protected]> Modified: 2015-04-09
%
% This file is part of the SPORCO library. Details of the copyright
% and user license can be found in the 'Copyright' and 'License' files
% distributed with the library.
% Training images
S0 = zeros(512, 512, 5);
S0(:,:,1) = single(stdimage('lena.grey')) / 255;
S0(:,:,2) = single(stdimage('barbara.grey')) / 255;
S0(:,:,3) = single(stdimage('kiel.grey')) / 255;
S0(:,:,4) = single(rgb2gray(stdimage('mandrill'))) / 255;
tmp = single(stdimage('man.grey')) / 255;
S0(:,:,5) = tmp(101:612, 101:612);
%Reduce images size to speed up demo script
tmp = zeros(256, 256, 5);
for k = 1:size(S0,3),
tmp(:,:,k) = imresize(S0(:,:,k), 0.5);
end
S0 = tmp;
% Filter input images and compute highpass images
npd = 16;
fltlmbd = 5;
[Sl, Sh] = lowpass(S0, fltlmbd, npd);
% Load dictionary
load([sporco_path '/Data/ConvDict.mat']);
dmap = containers.Map(ConvDict.Label, ConvDict.Dict);
D0 = dmap('12x12x36');
% Set up cbpdn parameters
lambda = 0.1;
opt = [];
opt.Verbose = 1;
opt.MaxMainIter = 200;
opt.AutoRho = 1;
opt.AutoRhoPeriod = 1;
opt.RelaxParam = 1.8;
% Compute sparse representation on current dictionary
[X, optinf] = cbpdn(D0, Sh, lambda, opt);
% Set up ccmod parameters
opt = [];
opt.Verbose = 1;
opt.MaxMainIter = 500;
opt.sigma = size(Sh,3);
opt.AutoSigma = 1;
opt.AutoSigmaPeriod = 1;
opt.RelaxParam = 1.8;
opt.AuxVarObj = 1;
% Update dictionary for training set S
[D1, optinf1] = ccmod(X, Sh, size(D0), opt);
% Plot functional value and residuals
figure;
subplot(1,3,1);
plot(optinf1.itstat(:,2));
xlabel('Iterations');
ylabel('Functional value');
subplot(1,3,2);
semilogy(optinf1.itstat(:,4));
xlabel('Iterations');
ylabel('Primal residual');
subplot(1,3,3);
semilogy(optinf1.itstat(:,5));
xlabel('Iterations');
ylabel('Dual residual');
% Update dictionary with new filter sizes for training set S
dsz = [repmat([12 12]', [1 24]) repmat([8 8]', [1 12])];
[D2, optinf2] = ccmod(X, Sh, dsz, opt);
% Display dictionaries
figure;
subplot(1,3,1);
imdisp(tiledict(D0));
title('D0');
subplot(1,3,2);
imdisp(tiledict(D1));
title('D1');
subplot(1,3,3);
imdisp(tiledict(D2, dsz));
title('D2');
% Plot functional value evolution
figure;
plot(optinf1.itstat(:,2), 'r');
hold on;
plot(optinf2.itstat(:,2), 'b');
hold off;
xlabel('Iterations');
ylabel('Functional value');
legend('D2', 'D3');
|
Load LFindLoad.
From lfind Require Import LFind.
From QuickChick Require Import QuickChick.
From adtind Require Import goal33.
Derive Show for natural.
Derive Arbitrary for natural.
Instance Dec_Eq_natural : Dec_Eq natural.
Proof. dec_eq. Qed.
Lemma conj27synthconj1 : forall (lv0 : natural) (lv1 : natural) (lv2 : natural) (lv3 : natural), (@eq natural (Succ (plus (plus Zero (mult lv0 lv1)) (plus lv0 lv1))) (plus lv2 lv3)).
Admitted.
QuickChick conj27synthconj1.
|
module Augmentation.DGA
(DGA_Map (DGA_Map)
,Augmentation (Aug)
,applyDGAMap
,compose_maps
,Algebra
,fromDGAMap
) where
import Algebra
import Braid
import Data.Maybe
import Numeric.LinearAlgebra as N
import Data.List
import Debug.Trace
default (Int,Double)
data DGA_Map = DGA_Map [(Char,Algebra)] deriving Eq
instance Show DGA_Map where
show (DGA_Map l) = "[" ++ (foldr (\(c,e) xs -> [c] ++ "→" ++ show e ++ (if xs /= "]" then "," else "") ++ xs) "]" l)
data Augmentation = Aug StdBraid [(Char,[Vector Z])]
instance Show Augmentation where
show (Aug b m) = (show b) ++ "\n" ++ (concat $ map (\(c,vs) -> [c] ++ "→" ++ (foldr (\x xs -> show x ++ if xs == "" then "" else "+"++xs) "" vs) ++ " ") m)
instance Eq Augmentation where
(Aug b1 m1) == (Aug b2 m2) = b1 == b2 && inZ2 m1 == inZ2 m2 && (m1 == m2 || eqh m1 m2)
inZ2 :: [(Char,[Vector Z])] -> [(Char,Z2)]
inZ2 = map (\(c,vs) -> (c,sum $ map (\_ -> fromInteger 1) vs))
isUpperTri :: Matrix Z -> Bool
isUpperTri mat = uptrih mat 0
uptrih :: Matrix Z -> Int -> Bool
uptrih mat i = let (l,w) = size mat
cond = and $ map (\j -> (==0) $ mat `atIndex` (j,i)) [i+1..l-1]
in if i == w then True else cond && (uptrih mat $ i+1)
eqh :: [(Char,[Vector Z])] -> [(Char,[Vector Z])] -> Bool
eqh l1 l2 = maybe False id $ do
{ l1' <- mapM (\c -> lookup c l1) $ map fst l2
; l2' <- mapM (\(l,l') -> if length l == length l' then Just l' else Nothing) $ zip l1' $ map snd l2
; return True
; let dims1 = nub $ map size $ concat l1'
; let dims2 = nub $ map size $ concat l2'
; dim <- if length dims1 == 1 && length dims2 == 1 && dims1 == dims2 then Just $ head dims1 else Nothing
; let m2 = fromRows $ map (fromZ :: Vector Z -> Vector R) $ concat l2'
; let n0s = map (\l -> (0,(length $ permutations l)-1)) l1'
; let suc mns = if mns == [] then [] else if (fst $ head mns) == (snd $ head mns) then (0,snd $ head mns):(suc $ tail mns) else (1 + (fst $ head mns), snd $ head mns):(tail mns)
; let bound (acc,mns) = if mns == [] then acc else bound (((snd $ head mns)+1)*acc,tail mns)
; let ubound = bound (1,n0s)
; let mat mns = fromRows $ map (fromZ :: Vector Z -> Vector R) $ concat $ zipWith (\l (m,_) -> (permutations l) !! m) l1' mns
; let check m = maybe False id $ do
{ let (l,u,p,s) = lu m
; let (lR,lC) = size l
; let sq = abs $ lR - lC
; let l' = if lC == lR then l else if lC < lR then l ||| (konst 0 (lC,sq) === ident sq) else l === (konst 0 (sq,lR) ||| ident sq)
; linv <- if det l' == 0 then Nothing else Just $ inv l'
; pinv <- if s == 0 then Nothing else Just $ inv p
; let mat' = linv N.<> pinv N.<> m2
; let matz = fromColumns $ map (toZ . roundVector) $ toColumns mat'
; let cond = isUpperTri matz
; let cond' = and $ map (\x -> (x - (fromIntegral $ floor x) < cutoff) || (((fromIntegral $ ceiling x) - x) < cutoff)) $ toList $ flatten mat'
; return $ cond && cond'
}
; let checkAll k mns = if size (mat mns) /= size m2 then trace "Dim mismatch" False else if check $ mat mns then trace "Found one!" True else if k > ubound then trace ("Tried: "++show k) False else checkAll (k+1) (suc mns)
; return $ checkAll 0 n0s
}
fromDGAMap :: StdBraid -> DGA_Map -> [Char] -> Maybe Augmentation
fromDGAMap b (DGA_Map l) chars = do
{ l' <- mapM (\(c,a) -> (represent chars a) >>= (\vs -> return (c,vs))) l
; return $ Aug b l'
}
compose_maps :: DGA_Map -> DGA_Map -> DGA_Map
compose_maps (DGA_Map map1) (DGA_Map map2) = DGA_Map $ (map (\(c,exp) -> (c,applyDGAMap (DGA_Map map2) exp)) map1) ++ (filter (\(c,_) -> not $ elem c $ map fst map1) map2)
applyDGAMap :: DGA_Map -> Algebra -> Algebra
applyDGAMap (DGA_Map alist) a = appmaph alist a
appmaph::[(Char,Algebra)] -> Algebra -> Algebra
appmaph [] = id
appmaph cs = plugIn (\c -> case (lookup c cs) of Just e -> e
Nothing -> G $ E c)
|
section \<open>Framework Graph Lifting for Noninterference\<close>
theory LiftingInter
imports NonInterferenceInter
begin
text \<open>In this section, we show how a valid CFG from the slicing framework in
\<^cite>\<open>"Wasserrab:08"\<close> can be lifted to fulfil all properties of the
\<open>NonInterferenceIntraGraph\<close> locale. Basically, we redefine the
hitherto existing \<open>Entry\<close> and \<open>Exit\<close> nodes as new
\<open>High\<close> and \<open>Low\<close> nodes, and introduce two new nodes
\<open>NewEntry\<close> and \<open>NewExit\<close>. Then, we have to lift all functions
to operate on this new graph.\<close>
subsection \<open>Liftings\<close>
subsubsection \<open>The datatypes\<close>
datatype 'node LDCFG_node = Node 'node
| NewEntry
| NewExit
type_synonym ('edge,'node,'var,'val,'ret,'pname) LDCFG_edge =
"'node LDCFG_node \<times> (('var,'val,'ret,'pname) edge_kind) \<times> 'node LDCFG_node"
subsubsection \<open>Lifting basic definitions using @{typ 'edge} and @{typ 'node}\<close>
inductive lift_valid_edge :: "('edge \<Rightarrow> bool) \<Rightarrow> ('edge \<Rightarrow> 'node) \<Rightarrow> ('edge \<Rightarrow> 'node) \<Rightarrow>
('edge \<Rightarrow> ('var,'val,'ret,'pname) edge_kind) \<Rightarrow> 'node \<Rightarrow> 'node \<Rightarrow>
('edge,'node,'var,'val,'ret,'pname) LDCFG_edge \<Rightarrow>
bool"
for valid_edge::"'edge \<Rightarrow> bool" and src::"'edge \<Rightarrow> 'node" and trg::"'edge \<Rightarrow> 'node"
and knd::"'edge \<Rightarrow> ('var,'val,'ret,'pname) edge_kind" and E::'node and X::'node
where lve_edge:
"\<lbrakk>valid_edge a; src a \<noteq> E \<or> trg a \<noteq> X;
e = (Node (src a),knd a,Node (trg a))\<rbrakk>
\<Longrightarrow> lift_valid_edge valid_edge src trg knd E X e"
| lve_Entry_edge:
"e = (NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node E)
\<Longrightarrow> lift_valid_edge valid_edge src trg knd E X e"
| lve_Exit_edge:
"e = (Node X,(\<lambda>s. True)\<^sub>\<surd>,NewExit)
\<Longrightarrow> lift_valid_edge valid_edge src trg knd E X e"
| lve_Entry_Exit_edge:
"e = (NewEntry,(\<lambda>s. False)\<^sub>\<surd>,NewExit)
\<Longrightarrow> lift_valid_edge valid_edge src trg knd E X e"
fun lift_get_proc :: "('node \<Rightarrow> 'pname) \<Rightarrow> 'pname \<Rightarrow> 'node LDCFG_node \<Rightarrow> 'pname"
where "lift_get_proc get_proc Main (Node n) = get_proc n"
| "lift_get_proc get_proc Main NewEntry = Main"
| "lift_get_proc get_proc Main NewExit = Main"
inductive_set lift_get_return_edges :: "('edge \<Rightarrow> 'edge set) \<Rightarrow> ('edge \<Rightarrow> bool) \<Rightarrow>
('edge \<Rightarrow> 'node) \<Rightarrow> ('edge \<Rightarrow> 'node) \<Rightarrow> ('edge \<Rightarrow> ('var,'val,'ret,'pname) edge_kind)
\<Rightarrow> ('edge,'node,'var,'val,'ret,'pname) LDCFG_edge
\<Rightarrow> ('edge,'node,'var,'val,'ret,'pname) LDCFG_edge set"
for get_return_edges :: "'edge \<Rightarrow> 'edge set" and valid_edge :: "'edge \<Rightarrow> bool"
and src::"'edge \<Rightarrow> 'node" and trg::"'edge \<Rightarrow> 'node"
and knd::"'edge \<Rightarrow> ('var,'val,'ret,'pname) edge_kind"
and e::"('edge,'node,'var,'val,'ret,'pname) LDCFG_edge"
where lift_get_return_edgesI:
"\<lbrakk>e = (Node (src a),knd a,Node (trg a)); valid_edge a; a' \<in> get_return_edges a;
e' = (Node (src a'),knd a',Node (trg a'))\<rbrakk>
\<Longrightarrow> e' \<in> lift_get_return_edges get_return_edges valid_edge src trg knd e"
subsubsection \<open>Lifting the Def and Use sets\<close>
inductive_set lift_Def_set :: "('node \<Rightarrow> 'var set) \<Rightarrow> 'node \<Rightarrow> 'node \<Rightarrow>
'var set \<Rightarrow> 'var set \<Rightarrow> ('node LDCFG_node \<times> 'var) set"
for Def::"('node \<Rightarrow> 'var set)" and E::'node and X::'node
and H::"'var set" and L::"'var set"
where lift_Def_node:
"V \<in> Def n \<Longrightarrow> (Node n,V) \<in> lift_Def_set Def E X H L"
| lift_Def_High:
"V \<in> H \<Longrightarrow> (Node E,V) \<in> lift_Def_set Def E X H L"
abbreviation lift_Def :: "('node \<Rightarrow> 'var set) \<Rightarrow> 'node \<Rightarrow> 'node \<Rightarrow>
'var set \<Rightarrow> 'var set \<Rightarrow> 'node LDCFG_node \<Rightarrow> 'var set"
where "lift_Def Def E X H L n \<equiv> {V. (n,V) \<in> lift_Def_set Def E X H L}"
inductive_set lift_Use_set :: "('node \<Rightarrow> 'var set) \<Rightarrow> 'node \<Rightarrow> 'node \<Rightarrow>
'var set \<Rightarrow> 'var set \<Rightarrow> ('node LDCFG_node \<times> 'var) set"
for Use::"'node \<Rightarrow> 'var set" and E::'node and X::'node
and H::"'var set" and L::"'var set"
where
lift_Use_node:
"V \<in> Use n \<Longrightarrow> (Node n,V) \<in> lift_Use_set Use E X H L"
| lift_Use_High:
"V \<in> H \<Longrightarrow> (Node E,V) \<in> lift_Use_set Use E X H L"
| lift_Use_Low:
"V \<in> L \<Longrightarrow> (Node X,V) \<in> lift_Use_set Use E X H L"
abbreviation lift_Use :: "('node \<Rightarrow> 'var set) \<Rightarrow> 'node \<Rightarrow> 'node \<Rightarrow>
'var set \<Rightarrow> 'var set \<Rightarrow> 'node LDCFG_node \<Rightarrow> 'var set"
where "lift_Use Use E X H L n \<equiv> {V. (n,V) \<in> lift_Use_set Use E X H L}"
fun lift_ParamUses :: "('node \<Rightarrow> 'var set list) \<Rightarrow> 'node LDCFG_node \<Rightarrow> 'var set list"
where "lift_ParamUses ParamUses (Node n) = ParamUses n"
| "lift_ParamUses ParamUses NewEntry = []"
| "lift_ParamUses ParamUses NewExit = []"
fun lift_ParamDefs :: "('node \<Rightarrow> 'var list) \<Rightarrow> 'node LDCFG_node \<Rightarrow> 'var list"
where "lift_ParamDefs ParamDefs (Node n) = ParamDefs n"
| "lift_ParamDefs ParamDefs NewEntry = []"
| "lift_ParamDefs ParamDefs NewExit = []"
subsection \<open>The lifting lemmas\<close>
subsubsection \<open>Lifting the CFG locales\<close>
abbreviation src :: "('edge,'node,'var,'val,'ret,'pname) LDCFG_edge \<Rightarrow> 'node LDCFG_node"
where "src a \<equiv> fst a"
abbreviation trg :: "('edge,'node,'var,'val,'ret,'pname) LDCFG_edge \<Rightarrow> 'node LDCFG_node"
where "trg a \<equiv> snd(snd a)"
abbreviation knd :: "('edge,'node,'var,'val,'ret,'pname) LDCFG_edge \<Rightarrow>
('var,'val,'ret,'pname) edge_kind"
where "knd a \<equiv> fst(snd a)"
lemma lift_CFG:
assumes wf:"CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses"
and pd:"Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit"
shows "CFG src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) NewEntry
(lift_get_proc get_proc Main)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
procs Main"
proof -
interpret CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses
by(rule wf)
interpret Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit
by(rule pd)
show ?thesis
proof
fix a assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "trg a = NewEntry"
thus False by(fastforce elim:lift_valid_edge.cases)
next
show "lift_get_proc get_proc Main NewEntry = Main" by simp
next
fix a Q r p fs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" and "src a = NewEntry"
thus False by(fastforce elim:lift_valid_edge.cases)
next
fix a a'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'"
and "src a = src a'" and "trg a = trg a'"
thus "a = a'"
proof(induct rule:lift_valid_edge.induct)
case lve_edge thus ?case by -(erule lift_valid_edge.cases,auto dest:edge_det)
qed(auto elim:lift_valid_edge.cases)
next
fix a Q r f
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>Main\<^esub>f"
thus False by(fastforce elim:lift_valid_edge.cases dest:Main_no_call_target)
next
fix a Q' f'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q'\<hookleftarrow>\<^bsub>Main\<^esub>f'"
thus False by(fastforce elim:lift_valid_edge.cases dest:Main_no_return_source)
next
fix a Q r p fs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
thus "\<exists>ins outs. (p, ins, outs) \<in> set procs"
by(fastforce elim:lift_valid_edge.cases intro:callee_in_procs)
next
fix a assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "intra_kind (knd a)"
thus "lift_get_proc get_proc Main (src a) = lift_get_proc get_proc Main (trg a)"
by(fastforce elim:lift_valid_edge.cases intro:get_proc_intra
simp:get_proc_Entry get_proc_Exit)
next
fix a Q r p fs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
thus "lift_get_proc get_proc Main (trg a) = p"
by(fastforce elim:lift_valid_edge.cases intro:get_proc_call)
next
fix a Q' p f'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'"
thus "lift_get_proc get_proc Main (src a) = p"
by(fastforce elim:lift_valid_edge.cases intro:get_proc_return)
next
fix a Q r p fs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
then obtain ax where "valid_edge ax" and "kind ax = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
and "sourcenode ax \<noteq> Entry \<or> targetnode ax \<noteq> Exit"
and "src a = Node (sourcenode ax)" and "trg a = Node (targetnode ax)"
by(fastforce elim:lift_valid_edge.cases)
from \<open>valid_edge ax\<close> \<open>kind ax = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
have all:"\<forall>a'. valid_edge a' \<and> targetnode a' = targetnode ax \<longrightarrow>
(\<exists>Qx rx fsx. kind a' = Qx:rx\<hookrightarrow>\<^bsub>p\<^esub>fsx)"
by(auto dest:call_edges_only)
{ fix a'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'"
and "trg a' = trg a"
hence "\<exists>Qx rx fsx. knd a' = Qx:rx\<hookrightarrow>\<^bsub>p\<^esub>fsx"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge ax' e)
note [simp] = \<open>e = (Node (sourcenode ax'), kind ax', Node (targetnode ax'))\<close>
from \<open>trg e = trg a\<close> \<open>trg a = Node (targetnode ax)\<close>
have "targetnode ax' = targetnode ax" by simp
with \<open>valid_edge ax'\<close> all have "\<exists>Qx rx fsx. kind ax' = Qx:rx\<hookrightarrow>\<^bsub>p\<^esub>fsx" by blast
thus ?case by simp
next
case (lve_Entry_edge e)
from \<open>e = (NewEntry, (\<lambda>s. True)\<^sub>\<surd>, Node Entry)\<close> \<open>trg e = trg a\<close>
\<open>trg a = Node (targetnode ax)\<close>
have "targetnode ax = Entry" by simp
with \<open>valid_edge ax\<close> have False by(rule Entry_target)
thus ?case by simp
next
case (lve_Exit_edge e)
from \<open>e = (Node Exit, (\<lambda>s. True)\<^sub>\<surd>, NewExit)\<close> \<open>trg e = trg a\<close>
\<open>trg a = Node (targetnode ax)\<close> have False by simp
thus ?case by simp
next
case (lve_Entry_Exit_edge e)
from \<open>e = (NewEntry,(\<lambda>s. False)\<^sub>\<surd>,NewExit)\<close> \<open>trg e = trg a\<close>
\<open>trg a = Node (targetnode ax)\<close> have False by simp
thus ?case by simp
qed }
thus "\<forall>a'. lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a' \<and>
trg a' = trg a \<longrightarrow> (\<exists>Qx rx fsx. knd a' = Qx:rx\<hookrightarrow>\<^bsub>p\<^esub>fsx)" by simp
next
fix a Q' p f'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'"
then obtain ax where "valid_edge ax" and "kind ax = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'"
and "sourcenode ax \<noteq> Entry \<or> targetnode ax \<noteq> Exit"
and "src a = Node (sourcenode ax)" and "trg a = Node (targetnode ax)"
by(fastforce elim:lift_valid_edge.cases)
from \<open>valid_edge ax\<close> \<open>kind ax = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'\<close>
have all:"\<forall>a'. valid_edge a' \<and> sourcenode a' = sourcenode ax \<longrightarrow>
(\<exists>Qx fx. kind a' = Qx\<hookleftarrow>\<^bsub>p\<^esub>fx)"
by(auto dest:return_edges_only)
{ fix a'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'"
and "src a' = src a"
hence "\<exists>Qx fx. knd a' = Qx\<hookleftarrow>\<^bsub>p\<^esub>fx"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge ax' e)
note [simp] = \<open>e = (Node (sourcenode ax'), kind ax', Node (targetnode ax'))\<close>
from \<open>src e = src a\<close> \<open>src a = Node (sourcenode ax)\<close>
have "sourcenode ax' = sourcenode ax" by simp
with \<open>valid_edge ax'\<close> all have "\<exists>Qx fx. kind ax' = Qx\<hookleftarrow>\<^bsub>p\<^esub>fx" by blast
thus ?case by simp
next
case (lve_Entry_edge e)
from \<open>e = (NewEntry, (\<lambda>s. True)\<^sub>\<surd>, Node Entry)\<close> \<open>src e = src a\<close>
\<open>src a = Node (sourcenode ax)\<close> have False by simp
thus ?case by simp
next
case (lve_Exit_edge e)
from \<open>e = (Node Exit, (\<lambda>s. True)\<^sub>\<surd>, NewExit)\<close> \<open>src e = src a\<close>
\<open>src a = Node (sourcenode ax)\<close> have "sourcenode ax = Exit" by simp
with \<open>valid_edge ax\<close> have False by(rule Exit_source)
thus ?case by simp
next
case (lve_Entry_Exit_edge e)
from \<open>e = (NewEntry,(\<lambda>s. False)\<^sub>\<surd>,NewExit)\<close> \<open>src e = src a\<close>
\<open>src a = Node (sourcenode ax)\<close> have False by simp
thus ?case by simp
qed }
thus "\<forall>a'. lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a' \<and>
src a' = src a \<longrightarrow> (\<exists>Qx fx. knd a' = Qx\<hookleftarrow>\<^bsub>p\<^esub>fx)" by simp
next
fix a Q r p fs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
thus "lift_get_return_edges get_return_edges valid_edge
sourcenode targetnode kind a \<noteq> {}"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge ax e)
from \<open>e = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
\<open>knd e = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
have "kind ax = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" by simp
with \<open>valid_edge ax\<close> have "get_return_edges ax \<noteq> {}"
by(rule get_return_edge_call)
then obtain ax' where "ax' \<in> get_return_edges ax" by blast
with \<open>e = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close> \<open>valid_edge ax\<close>
have "(Node (sourcenode ax'),kind ax',Node (targetnode ax')) \<in>
lift_get_return_edges get_return_edges valid_edge
sourcenode targetnode kind e"
by(fastforce intro:lift_get_return_edgesI)
thus ?case by fastforce
qed simp_all
next
fix a a'
assume "a' \<in> lift_get_return_edges get_return_edges valid_edge
sourcenode targetnode kind a"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
thus "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'"
proof (induct rule:lift_get_return_edges.induct)
case (lift_get_return_edgesI ax a' e')
from \<open>valid_edge ax\<close> \<open>a' \<in> get_return_edges ax\<close> have "valid_edge a'"
by(rule get_return_edges_valid)
from \<open>valid_edge ax\<close> \<open>a' \<in> get_return_edges ax\<close> obtain Q r p fs
where "kind ax = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" by(fastforce dest!:only_call_get_return_edges)
with \<open>valid_edge ax\<close> \<open>a' \<in> get_return_edges ax\<close> obtain Q' f'
where "kind a' = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'" by(fastforce dest!:call_return_edges)
from \<open>valid_edge a'\<close> \<open>kind a' = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'\<close> have "get_proc(sourcenode a') = p"
by(rule get_proc_return)
have "sourcenode a' \<noteq> Entry"
proof
assume "sourcenode a' = Entry"
with get_proc_Entry \<open>get_proc(sourcenode a') = p\<close> have "p = Main" by simp
with \<open>kind a' = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'\<close> have "kind a' = Q'\<hookleftarrow>\<^bsub>Main\<^esub>f'" by simp
with \<open>valid_edge a'\<close> show False by(rule Main_no_return_source)
qed
with \<open>e' = (Node (sourcenode a'), kind a', Node (targetnode a'))\<close>
\<open>valid_edge a'\<close>
show ?case by(fastforce intro:lve_edge)
qed
next
fix a a'
assume "a' \<in> lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind a"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
thus "\<exists>Q r p fs. knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
proof (induct rule:lift_get_return_edges.induct)
case (lift_get_return_edgesI ax a' e')
from \<open>valid_edge ax\<close> \<open>a' \<in> get_return_edges ax\<close>
have "\<exists>Q r p fs. kind ax = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
by(rule only_call_get_return_edges)
with \<open>a = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
show ?case by simp
qed
next
fix a Q r p fs a'
assume "a' \<in> lift_get_return_edges get_return_edges
valid_edge sourcenode targetnode kind a" and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
thus "\<exists>Q' f'. knd a' = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'"
proof (induct rule:lift_get_return_edges.induct)
case (lift_get_return_edgesI ax a' e')
from \<open>a = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
\<open>knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
have "kind ax = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" by simp
with \<open>valid_edge ax\<close> \<open>a' \<in> get_return_edges ax\<close> have "\<exists>Q' f'. kind a' = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'"
by -(rule call_return_edges)
with \<open>e' = (Node (sourcenode a'), kind a', Node (targetnode a'))\<close>
show ?case by simp
qed
next
fix a Q' p f'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'"
thus "\<exists>!a'. lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a' \<and>
(\<exists>Q r fs. knd a' = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs) \<and> a \<in> lift_get_return_edges get_return_edges
valid_edge sourcenode targetnode kind a'"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>knd e = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'\<close> have "kind a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'" by simp
with \<open>valid_edge a\<close>
have "\<exists>!a'. valid_edge a' \<and> (\<exists>Q r fs. kind a' = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs) \<and>
a \<in> get_return_edges a'"
by(rule return_needs_call)
then obtain a' Q r fs where "valid_edge a'" and "kind a' = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
and "a \<in> get_return_edges a'"
and imp:"\<forall>x. valid_edge x \<and> (\<exists>Q r fs. kind x = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs) \<and>
a \<in> get_return_edges x \<longrightarrow> x = a'"
by(fastforce elim:ex1E)
let ?e' = "(Node (sourcenode a'),kind a',Node (targetnode a'))"
have "sourcenode a' \<noteq> Entry"
proof
assume "sourcenode a' = Entry"
with \<open>valid_edge a'\<close> \<open>kind a' = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
show False by(rule Entry_no_call_source)
qed
with \<open>valid_edge a'\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e'"
by(fastforce intro:lift_valid_edge.lve_edge)
moreover
from \<open>kind a' = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> have "knd ?e' = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" by simp
moreover
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>valid_edge a'\<close> \<open>a \<in> get_return_edges a'\<close>
have "e \<in> lift_get_return_edges get_return_edges valid_edge
sourcenode targetnode kind ?e'" by(fastforce intro:lift_get_return_edgesI)
moreover
{ fix x
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit x"
and "\<exists>Q r fs. knd x = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
and "e \<in> lift_get_return_edges get_return_edges valid_edge
sourcenode targetnode kind x"
from \<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit x\<close>
\<open>\<exists>Q r fs. knd x = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> obtain y where "valid_edge y"
and "x = (Node (sourcenode y), kind y, Node (targetnode y))"
by(fastforce elim:lift_valid_edge.cases)
with \<open>e \<in> lift_get_return_edges get_return_edges valid_edge
sourcenode targetnode kind x\<close> \<open>valid_edge a\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "x = ?e'"
proof(induct rule:lift_get_return_edges.induct)
case (lift_get_return_edgesI ax ax' e)
from \<open>valid_edge ax\<close> \<open>ax' \<in> get_return_edges ax\<close> have "valid_edge ax'"
by(rule get_return_edges_valid)
from \<open>e = (Node (sourcenode ax'), kind ax', Node (targetnode ax'))\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "sourcenode a = sourcenode ax'" and "targetnode a = targetnode ax'"
by simp_all
with \<open>valid_edge a\<close> \<open>valid_edge ax'\<close> have [simp]:"a = ax'" by(rule edge_det)
from \<open>x = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
\<open>\<exists>Q r fs. knd x = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> have "\<exists>Q r fs. kind ax = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" by simp
with \<open>valid_edge ax\<close> \<open>ax' \<in> get_return_edges ax\<close> imp
have "ax = a'" by fastforce
with \<open>x = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
show ?thesis by simp
qed }
ultimately show ?case by(blast intro:ex1I)
qed simp_all
next
fix a a'
assume "a' \<in> lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind a"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
thus "\<exists>a''. lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'' \<and>
src a'' = trg a \<and> trg a'' = src a' \<and> knd a'' = (\<lambda>cf. False)\<^sub>\<surd>"
proof(induct rule:lift_get_return_edges.induct)
case (lift_get_return_edgesI ax a' e')
from \<open>valid_edge ax\<close> \<open>a' \<in> get_return_edges ax\<close>
obtain ax' where "valid_edge ax'" and "sourcenode ax' = targetnode ax"
and "targetnode ax' = sourcenode a'" and "kind ax' = (\<lambda>cf. False)\<^sub>\<surd>"
by(fastforce dest:intra_proc_additional_edge)
let ?ex = "(Node (sourcenode ax'), kind ax', Node (targetnode ax'))"
have "targetnode ax \<noteq> Entry"
proof
assume "targetnode ax = Entry"
with \<open>valid_edge ax\<close> show False by(rule Entry_target)
qed
with \<open>sourcenode ax' = targetnode ax\<close> have "sourcenode ax' \<noteq> Entry" by simp
with \<open>valid_edge ax'\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?ex"
by(fastforce intro:lve_edge)
with \<open>e' = (Node (sourcenode a'), kind a', Node (targetnode a'))\<close>
\<open>a = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
\<open>e' = (Node (sourcenode a'), kind a', Node (targetnode a'))\<close>
\<open>sourcenode ax' = targetnode ax\<close> \<open>targetnode ax' = sourcenode a'\<close>
\<open>kind ax' = (\<lambda>cf. False)\<^sub>\<surd>\<close>
show ?case by simp
qed
next
fix a a'
assume "a' \<in> lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind a"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
thus "\<exists>a''. lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'' \<and>
src a'' = src a \<and> trg a'' = trg a' \<and> knd a'' = (\<lambda>cf. False)\<^sub>\<surd>"
proof(induct rule:lift_get_return_edges.induct)
case (lift_get_return_edgesI ax a' e')
from \<open>valid_edge ax\<close> \<open>a' \<in> get_return_edges ax\<close>
obtain ax' where "valid_edge ax'" and "sourcenode ax' = sourcenode ax"
and "targetnode ax' = targetnode a'" and "kind ax' = (\<lambda>cf. False)\<^sub>\<surd>"
by(fastforce dest:call_return_node_edge)
let ?ex = "(Node (sourcenode ax'), kind ax', Node (targetnode ax'))"
from \<open>valid_edge ax\<close> \<open>a' \<in> get_return_edges ax\<close>
obtain Q r p fs where "kind ax = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
by(fastforce dest!:only_call_get_return_edges)
have "sourcenode ax \<noteq> Entry"
proof
assume "sourcenode ax = Entry"
with \<open>valid_edge ax\<close> \<open>kind ax = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> show False
by(rule Entry_no_call_source)
qed
with \<open>sourcenode ax' = sourcenode ax\<close> have "sourcenode ax' \<noteq> Entry" by simp
with \<open>valid_edge ax'\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?ex"
by(fastforce intro:lve_edge)
with \<open>e' = (Node (sourcenode a'), kind a', Node (targetnode a'))\<close>
\<open>a = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
\<open>e' = (Node (sourcenode a'), kind a', Node (targetnode a'))\<close>
\<open>sourcenode ax' = sourcenode ax\<close> \<open>targetnode ax' = targetnode a'\<close>
\<open>kind ax' = (\<lambda>cf. False)\<^sub>\<surd>\<close>
show ?case by simp
qed
next
fix a Q r p fs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
thus "\<exists>!a'. lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a' \<and>
src a' = src a \<and> intra_kind (knd a')"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>knd e = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
have "kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" by simp
with \<open>valid_edge a\<close> have "\<exists>!a'. valid_edge a' \<and> sourcenode a' = sourcenode a \<and>
intra_kind(kind a')" by(rule call_only_one_intra_edge)
then obtain a' where "valid_edge a'" and "sourcenode a' = sourcenode a"
and "intra_kind(kind a')"
and imp:"\<forall>x. valid_edge x \<and> sourcenode x = sourcenode a \<and> intra_kind(kind x)
\<longrightarrow> x = a'" by(fastforce elim:ex1E)
let ?e' = "(Node (sourcenode a'), kind a', Node (targetnode a'))"
have "sourcenode a \<noteq> Entry"
proof
assume "sourcenode a = Entry"
with \<open>valid_edge a\<close> \<open>kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> show False
by(rule Entry_no_call_source)
qed
with \<open>sourcenode a' = sourcenode a\<close> have "sourcenode a' \<noteq> Entry" by simp
with \<open>valid_edge a'\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e'"
by(fastforce intro:lift_valid_edge.lve_edge)
moreover
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>sourcenode a' = sourcenode a\<close>
have "src ?e' = src e" by simp
moreover
from \<open>intra_kind(kind a')\<close> have "intra_kind (knd ?e')" by simp
moreover
{ fix x
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit x"
and "src x = src e" and "intra_kind (knd x)"
from \<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit x\<close>
have "x = ?e'"
proof(induct rule:lift_valid_edge.cases)
case (lve_edge ax ex)
from \<open>intra_kind (knd x)\<close> \<open>x = ex\<close> \<open>src x = src e\<close>
\<open>ex = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "intra_kind (kind ax)" and "sourcenode ax = sourcenode a" by simp_all
with \<open>valid_edge ax\<close> imp have "ax = a'" by fastforce
with \<open>x = ex\<close> \<open>ex = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
show ?case by simp
next
case (lve_Entry_edge ex)
with \<open>src x = src e\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have False by simp
thus ?case by simp
next
case (lve_Exit_edge ex)
with \<open>src x = src e\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "sourcenode a = Exit" by simp
with \<open>valid_edge a\<close> have False by(rule Exit_source)
thus ?case by simp
next
case (lve_Entry_Exit_edge ex)
with \<open>src x = src e\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have False by simp
thus ?case by simp
qed }
ultimately show ?case by(blast intro:ex1I)
qed simp_all
next
fix a Q' p f'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'"
thus "\<exists>!a'. lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a' \<and>
trg a' = trg a \<and> intra_kind (knd a')"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>knd e = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'\<close>
have "kind a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'" by simp
with \<open>valid_edge a\<close> have "\<exists>!a'. valid_edge a' \<and> targetnode a' = targetnode a \<and>
intra_kind(kind a')" by(rule return_only_one_intra_edge)
then obtain a' where "valid_edge a'" and "targetnode a' = targetnode a"
and "intra_kind(kind a')"
and imp:"\<forall>x. valid_edge x \<and> targetnode x = targetnode a \<and> intra_kind(kind x)
\<longrightarrow> x = a'" by(fastforce elim:ex1E)
let ?e' = "(Node (sourcenode a'), kind a', Node (targetnode a'))"
have "targetnode a \<noteq> Exit"
proof
assume "targetnode a = Exit"
with \<open>valid_edge a\<close> \<open>kind a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'\<close> show False
by(rule Exit_no_return_target)
qed
with \<open>targetnode a' = targetnode a\<close> have "targetnode a' \<noteq> Exit" by simp
with \<open>valid_edge a'\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e'"
by(fastforce intro:lift_valid_edge.lve_edge)
moreover
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>targetnode a' = targetnode a\<close>
have "trg ?e' = trg e" by simp
moreover
from \<open>intra_kind(kind a')\<close> have "intra_kind (knd ?e')" by simp
moreover
{ fix x
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit x"
and "trg x = trg e" and "intra_kind (knd x)"
from \<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit x\<close>
have "x = ?e'"
proof(induct rule:lift_valid_edge.cases)
case (lve_edge ax ex)
from \<open>intra_kind (knd x)\<close> \<open>x = ex\<close> \<open>trg x = trg e\<close>
\<open>ex = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "intra_kind (kind ax)" and "targetnode ax = targetnode a" by simp_all
with \<open>valid_edge ax\<close> imp have "ax = a'" by fastforce
with \<open>x = ex\<close> \<open>ex = (Node (sourcenode ax), kind ax, Node (targetnode ax))\<close>
show ?case by simp
next
case (lve_Entry_edge ex)
with \<open>trg x = trg e\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "targetnode a = Entry" by simp
with \<open>valid_edge a\<close> have False by(rule Entry_target)
thus ?case by simp
next
case (lve_Exit_edge ex)
with \<open>trg x = trg e\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have False by simp
thus ?case by simp
next
case (lve_Entry_Exit_edge ex)
with \<open>trg x = trg e\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have False by simp
thus ?case by simp
qed }
ultimately show ?case by(blast intro:ex1I)
qed simp_all
next
fix a a' Q\<^sub>1 r\<^sub>1 p fs\<^sub>1 Q\<^sub>2 r\<^sub>2 fs\<^sub>2
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'"
and "knd a = Q\<^sub>1:r\<^sub>1\<hookrightarrow>\<^bsub>p\<^esub>fs\<^sub>1" and "knd a' = Q\<^sub>2:r\<^sub>2\<hookrightarrow>\<^bsub>p\<^esub>fs\<^sub>2"
then obtain x x' where "valid_edge x"
and a:"a = (Node (sourcenode x),kind x,Node (targetnode x))" and "valid_edge x'"
and a':"a' = (Node (sourcenode x'),kind x',Node (targetnode x'))"
by(auto elim!:lift_valid_edge.cases)
with \<open>knd a = Q\<^sub>1:r\<^sub>1\<hookrightarrow>\<^bsub>p\<^esub>fs\<^sub>1\<close> \<open>knd a' = Q\<^sub>2:r\<^sub>2\<hookrightarrow>\<^bsub>p\<^esub>fs\<^sub>2\<close>
have "kind x = Q\<^sub>1:r\<^sub>1\<hookrightarrow>\<^bsub>p\<^esub>fs\<^sub>1" and "kind x' = Q\<^sub>2:r\<^sub>2\<hookrightarrow>\<^bsub>p\<^esub>fs\<^sub>2" by simp_all
with \<open>valid_edge x\<close> \<open>valid_edge x'\<close> have "targetnode x = targetnode x'"
by(rule same_proc_call_unique_target)
with a a' show "trg a = trg a'" by simp
next
from unique_callers show "distinct_fst procs" .
next
fix p ins outs
assume "(p, ins, outs) \<in> set procs"
from distinct_formal_ins[OF this] show "distinct ins" .
next
fix p ins outs
assume "(p, ins, outs) \<in> set procs"
from distinct_formal_outs[OF this] show "distinct outs" .
qed
qed
lemma lift_CFG_wf:
assumes wf:"CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses"
and pd:"Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit"
shows "CFG_wf src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) NewEntry
(lift_get_proc get_proc Main)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
procs Main (lift_Def Def Entry Exit H L) (lift_Use Use Entry Exit H L)
(lift_ParamDefs ParamDefs) (lift_ParamUses ParamUses)"
proof -
interpret CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses
by(rule wf)
interpret Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit
by(rule pd)
interpret CFG:CFG src trg knd
"lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit" NewEntry
"lift_get_proc get_proc Main"
"lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind"
procs Main
by(fastforce intro:lift_CFG wf pd)
show ?thesis
proof
show "lift_Def Def Entry Exit H L NewEntry = {} \<and>
lift_Use Use Entry Exit H L NewEntry = {}"
by(fastforce elim:lift_Use_set.cases lift_Def_set.cases)
next
fix a Q r p fs ins outs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" and "(p, ins, outs) \<in> set procs"
thus "length (lift_ParamUses ParamUses (src a)) = length ins"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>knd e = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
have "kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" and "src e = Node (sourcenode a)" by simp_all
with \<open>valid_edge a\<close> \<open>(p,ins,outs) \<in> set procs\<close>
have "length(ParamUses (sourcenode a)) = length ins"
by -(rule ParamUses_call_source_length)
with \<open>src e = Node (sourcenode a)\<close> show ?case by simp
qed simp_all
next
fix a assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
thus "distinct (lift_ParamDefs ParamDefs (trg a))"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>valid_edge a\<close> have "distinct (ParamDefs (targetnode a))"
by(rule distinct_ParamDefs)
with \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
show ?case by simp
next
case (lve_Entry_edge e)
have "ParamDefs Entry = []"
proof(rule ccontr)
assume "ParamDefs Entry \<noteq> []"
then obtain V Vs where "ParamDefs Entry = V#Vs"
by(cases "ParamDefs Entry") auto
hence "V \<in> set (ParamDefs Entry)" by fastforce
hence "V \<in> Def Entry" by(fastforce intro:ParamDefs_in_Def)
with Entry_empty show False by simp
qed
with \<open>e = (NewEntry, (\<lambda>s. True)\<^sub>\<surd>, Node Entry)\<close> show ?case by simp
qed simp_all
next
fix a Q' p f' ins outs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'" and "(p, ins, outs) \<in> set procs"
thus "length (lift_ParamDefs ParamDefs (trg a)) = length outs"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>knd e = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'\<close>
have "kind a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'" and "trg e = Node (targetnode a)" by simp_all
with \<open>valid_edge a\<close> \<open>(p,ins,outs) \<in> set procs\<close>
have "length(ParamDefs (targetnode a)) = length outs"
by -(rule ParamDefs_return_target_length)
with \<open>trg e = Node (targetnode a)\<close> show ?case by simp
qed simp_all
next
fix n V
assume "CFG.CFG.valid_node src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) n"
and "V \<in> set (lift_ParamDefs ParamDefs n)"
hence "((n = NewEntry) \<or> n = NewExit) \<or> (\<exists>m. n = Node m \<and> valid_node m)"
by(auto elim:lift_valid_edge.cases simp:CFG.valid_node_def)
thus "V \<in> lift_Def Def Entry Exit H L n" apply -
proof(erule disjE)+
assume "n = NewEntry"
with \<open>V \<in> set (lift_ParamDefs ParamDefs n)\<close> show ?thesis by simp
next
assume "n = NewExit"
with \<open>V \<in> set (lift_ParamDefs ParamDefs n)\<close> show ?thesis by simp
next
assume "\<exists>m. n = Node m \<and> valid_node m"
then obtain m where "n = Node m" and "valid_node m" by blast
from \<open>n = Node m\<close> \<open>V \<in> set (lift_ParamDefs ParamDefs n)\<close>
have "V \<in> set (ParamDefs m)" by simp
with \<open>valid_node m\<close> have "V \<in> Def m" by(rule ParamDefs_in_Def)
with \<open>n = Node m\<close> show ?thesis by(fastforce intro:lift_Def_node)
qed
next
fix a Q r p fs ins outs V
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" and "(p, ins, outs) \<in> set procs" and "V \<in> set ins"
thus "V \<in> lift_Def Def Entry Exit H L (trg a)"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>knd e = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
have "kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" by simp
from \<open>valid_edge a\<close> \<open>kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> \<open>(p, ins, outs) \<in> set procs\<close> \<open>V \<in> set ins\<close>
have "V \<in> Def (targetnode a)" by(rule ins_in_Def)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "trg e = Node (targetnode a)" by simp
with \<open>V \<in> Def (targetnode a)\<close> show ?case by(fastforce intro:lift_Def_node)
qed simp_all
next
fix a Q r p fs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs"
thus "lift_Def Def Entry Exit H L (src a) = {}"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
show ?case
proof(rule ccontr)
assume "lift_Def Def Entry Exit H L (src e) \<noteq> {}"
then obtain x where "x \<in> lift_Def Def Entry Exit H L (src e)" by blast
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>knd e = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
have "kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" by simp
with \<open>valid_edge a\<close> have "Def (sourcenode a) = {}"
by(rule call_source_Def_empty)
have "sourcenode a \<noteq> Entry"
proof
assume "sourcenode a = Entry"
with \<open>valid_edge a\<close> \<open>kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
show False by(rule Entry_no_call_source)
qed
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "src e = Node (sourcenode a)" by simp
with \<open>Def (sourcenode a) = {}\<close> \<open>x \<in> lift_Def Def Entry Exit H L (src e)\<close>
\<open>sourcenode a \<noteq> Entry\<close>
show False by(fastforce elim:lift_Def_set.cases)
qed
qed simp_all
next
fix n V
assume "CFG.CFG.valid_node src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) n"
and "V \<in> \<Union>(set (lift_ParamUses ParamUses n))"
hence "((n = NewEntry) \<or> n = NewExit) \<or> (\<exists>m. n = Node m \<and> valid_node m)"
by(auto elim:lift_valid_edge.cases simp:CFG.valid_node_def)
thus "V \<in> lift_Use Use Entry Exit H L n" apply -
proof(erule disjE)+
assume "n = NewEntry"
with \<open>V \<in> \<Union>(set (lift_ParamUses ParamUses n))\<close> show ?thesis by simp
next
assume "n = NewExit"
with \<open>V \<in> \<Union>(set (lift_ParamUses ParamUses n))\<close> show ?thesis by simp
next
assume "\<exists>m. n = Node m \<and> valid_node m"
then obtain m where "n = Node m" and "valid_node m" by blast
from \<open>V \<in> \<Union>(set (lift_ParamUses ParamUses n))\<close> \<open>n = Node m\<close>
have "V \<in> \<Union>(set (ParamUses m))" by simp
with \<open>valid_node m\<close> have "V \<in> Use m" by(rule ParamUses_in_Use)
with \<open>n = Node m\<close> show ?thesis by(fastforce intro:lift_Use_node)
qed
next
fix a Q p f ins outs V
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q\<hookleftarrow>\<^bsub>p\<^esub>f" and "(p, ins, outs) \<in> set procs" and "V \<in> set outs"
thus "V \<in> lift_Use Use Entry Exit H L (src a)"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>knd e = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close>
have "kind a = Q\<hookleftarrow>\<^bsub>p\<^esub>f" by simp
from \<open>valid_edge a\<close> \<open>kind a = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close> \<open>(p, ins, outs) \<in> set procs\<close> \<open>V \<in> set outs\<close>
have "V \<in> Use (sourcenode a)" by(rule outs_in_Use)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "src e = Node (sourcenode a)" by simp
with \<open>V \<in> Use (sourcenode a)\<close> show ?case by(fastforce intro:lift_Use_node)
qed simp_all
next
fix a V s
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "V \<notin> lift_Def Def Entry Exit H L (src a)" and "intra_kind (knd a)"
and "pred (knd a) s"
thus "state_val (transfer (knd a) s) V = state_val s V"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>intra_kind (knd e)\<close> \<open>pred (knd e) s\<close>
have "intra_kind (kind a)" and "pred (kind a) s"
and "knd e = kind a" and "src e = Node (sourcenode a)" by simp_all
from \<open>V \<notin> lift_Def Def Entry Exit H L (src e)\<close> \<open>src e = Node (sourcenode a)\<close>
have "V \<notin> Def (sourcenode a)" by (auto dest: lift_Def_node)
from \<open>valid_edge a\<close> \<open>V \<notin> Def (sourcenode a)\<close> \<open>intra_kind (kind a)\<close>
\<open>pred (kind a) s\<close>
have "state_val (transfer (kind a) s) V = state_val s V"
by(rule CFG_intra_edge_no_Def_equal)
with \<open>knd e = kind a\<close> show ?case by simp
next
case (lve_Entry_edge e)
from \<open>e = (NewEntry, (\<lambda>s. True)\<^sub>\<surd>, Node Entry)\<close> \<open>pred (knd e) s\<close>
show ?case by(cases s) auto
next
case (lve_Exit_edge e)
from \<open>e = (Node Exit, (\<lambda>s. True)\<^sub>\<surd>, NewExit)\<close> \<open>pred (knd e) s\<close>
show ?case by(cases s) auto
next
case (lve_Entry_Exit_edge e)
from \<open>e = (NewEntry, (\<lambda>s. False)\<^sub>\<surd>, NewExit)\<close> \<open>pred (knd e) s\<close>
have False by(cases s) auto
thus ?case by simp
qed
next
fix a s s'
assume assms:"lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
"\<forall>V\<in>lift_Use Use Entry Exit H L (src a). state_val s V = state_val s' V"
"intra_kind (knd a)" "pred (knd a) s" "pred (knd a) s'"
show "\<forall>V\<in>lift_Def Def Entry Exit H L (src a).
state_val (transfer (knd a) s) V = state_val (transfer (knd a) s') V"
proof
fix V assume "V \<in> lift_Def Def Entry Exit H L (src a)"
with assms
show "state_val (transfer (knd a) s) V = state_val (transfer (knd a) s') V"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>intra_kind (knd e)\<close> have "intra_kind (kind a)" by simp
show ?case
proof (cases "Node (sourcenode a) = Node Entry")
case True
hence "sourcenode a = Entry" by simp
from Entry_Exit_edge obtain a' where "valid_edge a'"
and "sourcenode a' = Entry" and "targetnode a' = Exit"
and "kind a' = (\<lambda>s. False)\<^sub>\<surd>" by blast
have "\<exists>Q. kind a = (Q)\<^sub>\<surd>"
proof(cases "targetnode a = Exit")
case True
with \<open>valid_edge a\<close> \<open>valid_edge a'\<close> \<open>sourcenode a = Entry\<close>
\<open>sourcenode a' = Entry\<close> \<open>targetnode a' = Exit\<close>
have "a = a'" by(fastforce dest:edge_det)
with \<open>kind a' = (\<lambda>s. False)\<^sub>\<surd>\<close> show ?thesis by simp
next
case False
with \<open>valid_edge a\<close> \<open>valid_edge a'\<close> \<open>sourcenode a = Entry\<close>
\<open>sourcenode a' = Entry\<close> \<open>targetnode a' = Exit\<close>
\<open>intra_kind (kind a)\<close> \<open>kind a' = (\<lambda>s. False)\<^sub>\<surd>\<close>
show ?thesis by(auto dest:deterministic simp:intra_kind_def)
qed
from True \<open>V \<in> lift_Def Def Entry Exit H L (src e)\<close> Entry_empty
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "V \<in> H" by(fastforce elim:lift_Def_set.cases)
from True \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>sourcenode a \<noteq> Entry \<or> targetnode a \<noteq> Exit\<close>
have "\<forall>V\<in>H. V \<in> lift_Use Use Entry Exit H L (src e)"
by(fastforce intro:lift_Use_High)
with \<open>\<forall>V\<in>lift_Use Use Entry Exit H L (src e).
state_val s V = state_val s' V\<close> \<open>V \<in> H\<close>
have "state_val s V = state_val s' V" by simp
with \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>\<exists>Q. kind a = (Q)\<^sub>\<surd>\<close> \<open>pred (knd e) s\<close> \<open>pred (knd e) s'\<close>
show ?thesis by(cases s,auto,cases s',auto)
next
case False
{ fix V' assume "V' \<in> Use (sourcenode a)"
with \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "V' \<in> lift_Use Use Entry Exit H L (src e)"
by(fastforce intro:lift_Use_node)
}
with \<open>\<forall>V\<in>lift_Use Use Entry Exit H L (src e).
state_val s V = state_val s' V\<close>
have "\<forall>V\<in>Use (sourcenode a). state_val s V = state_val s' V"
by fastforce
from \<open>valid_edge a\<close> this \<open>pred (knd e) s\<close> \<open>pred (knd e) s'\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>intra_kind (knd e)\<close>
have "\<forall>V \<in> Def (sourcenode a). state_val (transfer (kind a) s) V =
state_val (transfer (kind a) s') V"
by -(erule CFG_intra_edge_transfer_uses_only_Use,auto)
from \<open>V \<in> lift_Def Def Entry Exit H L (src e)\<close> False
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
have "V \<in> Def (sourcenode a)" by(fastforce elim:lift_Def_set.cases)
with \<open>\<forall>V \<in> Def (sourcenode a). state_val (transfer (kind a) s) V =
state_val (transfer (kind a) s') V\<close>
\<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
show ?thesis by simp
qed
next
case (lve_Entry_edge e)
from \<open>V \<in> lift_Def Def Entry Exit H L (src e)\<close>
\<open>e = (NewEntry, (\<lambda>s. True)\<^sub>\<surd>, Node Entry)\<close>
have False by(fastforce elim:lift_Def_set.cases)
thus ?case by simp
next
case (lve_Exit_edge e)
from \<open>V \<in> lift_Def Def Entry Exit H L (src e)\<close>
\<open>e = (Node Exit, (\<lambda>s. True)\<^sub>\<surd>, NewExit)\<close>
have False
by(fastforce elim:lift_Def_set.cases intro!:Entry_noteq_Exit simp:Exit_empty)
thus ?case by simp
next
case (lve_Entry_Exit_edge e)
thus ?case by(cases s) auto
qed
qed
next
fix a s s'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "pred (knd a) s" and "snd (hd s) = snd (hd s')"
and "\<forall>V\<in>lift_Use Use Entry Exit H L (src a). state_val s V = state_val s' V"
and "length s = length s'"
thus "pred (knd a) s'"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>pred (knd e) s\<close>
have "pred (kind a) s" and "src e = Node (sourcenode a)" by simp_all
from \<open>src e = Node (sourcenode a)\<close>
\<open>\<forall>V\<in>lift_Use Use Entry Exit H L (src e). state_val s V = state_val s' V\<close>
have "\<forall>V \<in> Use (sourcenode a). state_val s V = state_val s' V"
by(auto dest:lift_Use_node)
from \<open>valid_edge a\<close> \<open>pred (kind a) s\<close> \<open>snd (hd s) = snd (hd s')\<close>
this \<open>length s = length s'\<close>
have "pred (kind a) s'" by(rule CFG_edge_Uses_pred_equal)
with \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
show ?case by simp
next
case (lve_Entry_edge e)
thus ?case by(cases s') auto
next
case (lve_Exit_edge e)
thus ?case by(cases s') auto
next
case (lve_Entry_Exit_edge e)
thus ?case by(cases s) auto
qed
next
fix a Q r p fs ins outs
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" and "(p, ins, outs) \<in> set procs"
thus "length fs = length ins"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>knd e = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
have "kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" by simp
from \<open>valid_edge a\<close> \<open>kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> \<open>(p, ins, outs) \<in> set procs\<close>
show ?case by(rule CFG_call_edge_length)
qed simp_all
next
fix a Q r p fs a' Q' r' p' fs' s s'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" and "knd a' = Q':r'\<hookrightarrow>\<^bsub>p'\<^esub>fs'"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'"
and "src a = src a'" and "pred (knd a) s" and "pred (knd a') s"
from \<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a\<close>
\<open>knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> \<open>pred (knd a) s\<close>
obtain x where a:"a = (Node (sourcenode x),kind x,Node (targetnode x))"
and "valid_edge x" and "src a = Node (sourcenode x)"
and "kind x = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" and "pred (kind x) s"
by(fastforce elim:lift_valid_edge.cases)
from \<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'\<close>
\<open>knd a' = Q':r'\<hookrightarrow>\<^bsub>p'\<^esub>fs'\<close> \<open>pred (knd a') s\<close>
obtain x' where a':"a' = (Node (sourcenode x'),kind x',Node (targetnode x'))"
and "valid_edge x'" and "src a' = Node (sourcenode x')"
and "kind x' = Q':r'\<hookrightarrow>\<^bsub>p'\<^esub>fs'" and "pred (kind x') s"
by(fastforce elim:lift_valid_edge.cases)
from \<open>src a = Node (sourcenode x)\<close> \<open>src a' = Node (sourcenode x')\<close>
\<open>src a = src a'\<close>
have "sourcenode x = sourcenode x'" by simp
from \<open>valid_edge x\<close> \<open>kind x = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> \<open>valid_edge x'\<close> \<open>kind x' = Q':r'\<hookrightarrow>\<^bsub>p'\<^esub>fs'\<close>
\<open>sourcenode x = sourcenode x'\<close> \<open>pred (kind x) s\<close> \<open>pred (kind x') s\<close>
have "x = x'" by(rule CFG_call_determ)
with a a' show "a = a'" by simp
next
fix a Q r p fs i ins outs s s'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" and "i < length ins" and "(p, ins, outs) \<in> set procs"
and "pred (knd a) s" and "pred (knd a) s'"
and "\<forall>V\<in>lift_ParamUses ParamUses (src a) ! i. state_val s V = state_val s' V"
thus "params fs (state_val s) ! i = CFG.params fs (state_val s') ! i"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>knd e = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
\<open>pred (knd e) s\<close> \<open>pred (knd e) s'\<close>
have "kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs" and "pred (kind a) s" and "pred (kind a) s'"
and "src e = Node (sourcenode a)"
by simp_all
from \<open>\<forall>V\<in>lift_ParamUses ParamUses (src e) ! i. state_val s V = state_val s' V\<close>
\<open>src e = Node (sourcenode a)\<close>
have "\<forall>V \<in> (ParamUses (sourcenode a))!i. state_val s V = state_val s' V" by simp
with \<open>valid_edge a\<close> \<open>kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close> \<open>i < length ins\<close>
\<open>(p, ins, outs) \<in> set procs\<close> \<open>pred (kind a) s\<close> \<open>pred (kind a) s'\<close>
show ?case by(rule CFG_call_edge_params)
qed simp_all
next
fix a Q' p f' ins outs cf cf'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'" and "(p, ins, outs) \<in> set procs"
thus "f' cf cf' = cf'(lift_ParamDefs ParamDefs (trg a) [:=] map cf outs)"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close> \<open>knd e = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'\<close>
have "kind a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'" and "trg e = Node (targetnode a)" by simp_all
from \<open>valid_edge a\<close> \<open>kind a = Q'\<hookleftarrow>\<^bsub>p\<^esub>f'\<close> \<open>(p, ins, outs) \<in> set procs\<close>
have "f' cf cf' = cf'(ParamDefs (targetnode a) [:=] map cf outs)"
by(rule CFG_return_edge_fun)
with \<open>trg e = Node (targetnode a)\<close> show ?case by simp
qed simp_all
next
fix a a'
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'"
and "src a = src a'" and "trg a \<noteq> trg a'"
and "intra_kind (knd a)" and "intra_kind (knd a')"
thus "\<exists>Q Q'. knd a = (Q)\<^sub>\<surd> \<and> knd a' = (Q')\<^sub>\<surd> \<and>
(\<forall>s. (Q s \<longrightarrow> \<not> Q' s) \<and> (Q' s \<longrightarrow> \<not> Q s))"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'\<close>
\<open>valid_edge a\<close> \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>src e = src a'\<close> \<open>trg e \<noteq> trg a'\<close> \<open>intra_kind (knd e)\<close> \<open>intra_kind (knd a')\<close>
show ?case
proof(induct rule:lift_valid_edge.induct)
case lve_edge thus ?case by(auto dest:deterministic)
next
case (lve_Exit_edge e')
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>e' = (Node Exit, (\<lambda>s. True)\<^sub>\<surd>, NewExit)\<close> \<open>src e = src e'\<close>
have "sourcenode a = Exit" by simp
with \<open>valid_edge a\<close> have False by(rule Exit_source)
thus ?case by simp
qed auto
qed (fastforce elim:lift_valid_edge.cases)+
qed
qed
lemma lift_CFGExit:
assumes wf:"CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses"
and pd:"Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit"
shows "CFGExit src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) NewEntry
(lift_get_proc get_proc Main)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
procs Main NewExit"
proof -
interpret CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses
by(rule wf)
interpret Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit
by(rule pd)
interpret CFG:CFG src trg knd
"lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit" NewEntry
"lift_get_proc get_proc Main"
"lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind"
procs Main
by(fastforce intro:lift_CFG wf pd)
show ?thesis
proof
fix a assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "src a = NewExit"
thus False by(fastforce elim:lift_valid_edge.cases)
next
show "lift_get_proc get_proc Main NewExit = Main" by simp
next
fix a Q p f
assume "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q\<hookleftarrow>\<^bsub>p\<^esub>f" and "trg a = NewExit"
thus False by(fastforce elim:lift_valid_edge.cases)
next
show "\<exists>a. lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a \<and>
src a = NewEntry \<and> trg a = NewExit \<and> knd a = (\<lambda>s. False)\<^sub>\<surd>"
by(fastforce intro:lve_Entry_Exit_edge)
qed
qed
lemma lift_CFGExit_wf:
assumes wf:"CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses"
and pd:"Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit"
shows "CFGExit_wf src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) NewEntry
(lift_get_proc get_proc Main)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
procs Main NewExit (lift_Def Def Entry Exit H L) (lift_Use Use Entry Exit H L)
(lift_ParamDefs ParamDefs) (lift_ParamUses ParamUses)"
proof -
interpret CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses
by(rule wf)
interpret Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit
by(rule pd)
interpret CFG_wf:CFG_wf src trg knd
"lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit" NewEntry
"lift_get_proc get_proc Main"
"lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind"
procs Main "lift_Def Def Entry Exit H L" "lift_Use Use Entry Exit H L"
"lift_ParamDefs ParamDefs" "lift_ParamUses ParamUses"
by(fastforce intro:lift_CFG_wf wf pd)
interpret CFGExit:CFGExit src trg knd
"lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit" NewEntry
"lift_get_proc get_proc Main"
"lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind"
procs Main NewExit
by(fastforce intro:lift_CFGExit wf pd)
show ?thesis
proof
show "lift_Def Def Entry Exit H L NewExit = {} \<and>
lift_Use Use Entry Exit H L NewExit = {}"
by(fastforce elim:lift_Def_set.cases lift_Use_set.cases)
qed
qed
subsubsection \<open>Lifting the SDG\<close>
lemma lift_Postdomination:
assumes wf:"CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses"
and pd:"Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit"
and inner:"CFGExit.inner_node sourcenode targetnode valid_edge Entry Exit nx"
shows "Postdomination src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) NewEntry
(lift_get_proc get_proc Main)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
procs Main NewExit"
proof -
interpret CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses
by(rule wf)
interpret Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit
by(rule pd)
interpret CFGExit:CFGExit src trg knd
"lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit" NewEntry
"lift_get_proc get_proc Main"
"lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind"
procs Main NewExit
by(fastforce intro:lift_CFGExit wf pd)
{ fix m assume "valid_node m"
then obtain a where "valid_edge a" and "m = sourcenode a \<or> m = targetnode a"
by(auto simp:valid_node_def)
from \<open>m = sourcenode a \<or> m = targetnode a\<close>
have "CFG.CFG.valid_node src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) (Node m)"
proof
assume "m = sourcenode a"
show ?thesis
proof(cases "m = Entry")
case True
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)" by(fastforce intro:lve_Entry_edge)
with \<open>m = Entry\<close> show ?thesis by(fastforce simp:CFGExit.valid_node_def)
next
case False
with \<open>m = sourcenode a\<close> \<open>valid_edge a\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(Node (sourcenode a),kind a,Node(targetnode a))"
by(fastforce intro:lve_edge)
with \<open>m = sourcenode a\<close> show ?thesis by(fastforce simp:CFGExit.valid_node_def)
qed
next
assume "m = targetnode a"
show ?thesis
proof(cases "m = Exit")
case True
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)" by(fastforce intro:lve_Exit_edge)
with \<open>m = Exit\<close> show ?thesis by(fastforce simp:CFGExit.valid_node_def)
next
case False
with \<open>m = targetnode a\<close> \<open>valid_edge a\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(Node (sourcenode a),kind a,Node(targetnode a))"
by(fastforce intro:lve_edge)
with \<open>m = targetnode a\<close> show ?thesis by(fastforce simp:CFGExit.valid_node_def)
qed
qed }
note lift_valid_node = this
{ fix n as n' cs m m'
assume "valid_path_aux cs as" and "m -as\<rightarrow>* m'" and "\<forall>c \<in> set cs. valid_edge c"
and "m \<noteq> Entry \<or> m' \<noteq> Exit"
hence "\<exists>cs' es. CFG.CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
cs' es \<and>
list_all2 (\<lambda>c c'. c' = (Node (sourcenode c),kind c,Node (targetnode c))) cs cs'
\<and> CFG.CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node m) es (Node m')"
proof(induct arbitrary:m rule:vpa_induct)
case (vpa_empty cs)
from \<open>m -[]\<rightarrow>* m'\<close> have [simp]:"m = m'" by fastforce
from \<open>m -[]\<rightarrow>* m'\<close> have "valid_node m" by(rule path_valid_node)
obtain cs' where "cs' =
map (\<lambda>c. (Node (sourcenode c),kind c,Node (targetnode c))) cs" by simp
hence "list_all2
(\<lambda>c c'. c' = (Node (sourcenode c),kind c,Node (targetnode c))) cs cs'"
by(simp add:list_all2_conv_all_nth)
with \<open>valid_node m\<close> show ?case
apply(rule_tac x="cs'" in exI)
apply(rule_tac x="[]" in exI)
by(fastforce intro:CFGExit.empty_path lift_valid_node)
next
case (vpa_intra cs a as)
note IH = \<open>\<And>m. \<lbrakk>m -as\<rightarrow>* m'; \<forall>c\<in>set cs. valid_edge c; m \<noteq> Entry \<or> m' \<noteq> Exit\<rbrakk> \<Longrightarrow>
\<exists>cs' es. CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) cs' es \<and>
list_all2 (\<lambda>c c'. c' = (Node (sourcenode c), kind c, Node (targetnode c))) cs
cs' \<and> CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node m) es (Node m')\<close>
from \<open>m -a # as\<rightarrow>* m'\<close> have "m = sourcenode a" and "valid_edge a"
and "targetnode a -as\<rightarrow>* m'" by(auto elim:path_split_Cons)
show ?case
proof(cases "sourcenode a = Entry \<and> targetnode a = Exit")
case True
with \<open>m = sourcenode a\<close> \<open>m \<noteq> Entry \<or> m' \<noteq> Exit\<close>
have "m' \<noteq> Exit" by simp
from True have "targetnode a = Exit" by simp
with \<open>targetnode a -as\<rightarrow>* m'\<close> have "m' = Exit"
by -(drule path_Exit_source,auto)
with \<open>m' \<noteq> Exit\<close> have False by simp
thus ?thesis by simp
next
case False
let ?e = "(Node (sourcenode a),kind a,Node (targetnode a))"
from False \<open>valid_edge a\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e"
by(fastforce intro:lve_edge)
have "targetnode a \<noteq> Entry"
proof
assume "targetnode a = Entry"
with \<open>valid_edge a\<close> show False by(rule Entry_target)
qed
hence "targetnode a \<noteq> Entry \<or> m' \<noteq> Exit" by simp
from IH[OF \<open>targetnode a -as\<rightarrow>* m'\<close> \<open>\<forall>c\<in>set cs. valid_edge c\<close> this]
obtain cs' es
where valid_path:"CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) cs' es"
and list:"list_all2
(\<lambda>c c'. c' = (Node (sourcenode c), kind c, Node (targetnode c))) cs cs'"
and path:"CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node (targetnode a)) es (Node m')" by blast
from \<open>intra_kind (kind a)\<close> valid_path have "CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) cs' (?e#es)" by(fastforce simp:intra_kind_def)
moreover
from path \<open>m = sourcenode a\<close>
\<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e\<close>
have "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node m) (?e#es) (Node m')" by(fastforce intro:CFGExit.Cons_path)
ultimately show ?thesis using list by blast
qed
next
case (vpa_Call cs a as Q r p fs)
note IH = \<open>\<And>m. \<lbrakk>m -as\<rightarrow>* m'; \<forall>c\<in>set (a # cs). valid_edge c;
m \<noteq> Entry \<or> m' \<noteq> Exit\<rbrakk> \<Longrightarrow>
\<exists>cs' es. CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) cs' es \<and>
list_all2 (\<lambda>c c'. c' = (Node (sourcenode c), kind c, Node (targetnode c)))
(a#cs) cs' \<and> CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node m) es (Node m')\<close>
from \<open>m -a # as\<rightarrow>* m'\<close> have "m = sourcenode a" and "valid_edge a"
and "targetnode a -as\<rightarrow>* m'" by(auto elim:path_split_Cons)
from \<open>\<forall>c\<in>set cs. valid_edge c\<close> \<open>valid_edge a\<close>
have "\<forall>c\<in>set (a # cs). valid_edge c" by simp
let ?e = "(Node (sourcenode a),kind a,Node (targetnode a))"
have "sourcenode a \<noteq> Entry"
proof
assume "sourcenode a = Entry"
with \<open>valid_edge a\<close> \<open>kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
show False by(rule Entry_no_call_source)
qed
with \<open>valid_edge a\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e"
by(fastforce intro:lve_edge)
have "targetnode a \<noteq> Entry"
proof
assume "targetnode a = Entry"
with \<open>valid_edge a\<close> show False by(rule Entry_target)
qed
hence "targetnode a \<noteq> Entry \<or> m' \<noteq> Exit" by simp
from IH[OF \<open>targetnode a -as\<rightarrow>* m'\<close> \<open>\<forall>c\<in>set (a # cs). valid_edge c\<close> this]
obtain cs' es
where valid_path:"CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) cs' es"
and list:"list_all2
(\<lambda>c c'. c' = (Node (sourcenode c), kind c, Node (targetnode c))) (a#cs) cs'"
and path:"CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node (targetnode a)) es (Node m')" by blast
from list obtain cx csx where "cs' = cx#csx"
and cx:"cx = (Node (sourcenode a), kind a, Node (targetnode a))"
and list':"list_all2
(\<lambda>c c'. c' = (Node (sourcenode c), kind c, Node (targetnode c))) cs csx"
by(fastforce simp:list_all2_Cons1)
from valid_path cx \<open>cs' = cx#csx\<close> \<open>kind a = Q:r\<hookrightarrow>\<^bsub>p\<^esub>fs\<close>
have "CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) csx (?e#es)" by simp
moreover
from path \<open>m = sourcenode a\<close>
\<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e\<close>
have "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node m) (?e#es) (Node m')" by(fastforce intro:CFGExit.Cons_path)
ultimately show ?case using list' by blast
next
case (vpa_ReturnEmpty cs a as Q p f)
note IH = \<open>\<And>m. \<lbrakk>m -as\<rightarrow>* m'; \<forall>c\<in>set []. valid_edge c; m \<noteq> Entry \<or> m' \<noteq> Exit\<rbrakk> \<Longrightarrow>
\<exists>cs' es. CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) cs' es \<and>
list_all2 (\<lambda>c c'. c' = (Node (sourcenode c), kind c, Node (targetnode c)))
[] cs' \<and> CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node m) es (Node m')\<close>
from \<open>m -a # as\<rightarrow>* m'\<close> have "m = sourcenode a" and "valid_edge a"
and "targetnode a -as\<rightarrow>* m'" by(auto elim:path_split_Cons)
let ?e = "(Node (sourcenode a),kind a,Node (targetnode a))"
have "targetnode a \<noteq> Exit"
proof
assume "targetnode a = Exit"
with \<open>valid_edge a\<close> \<open>kind a = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close> show False by(rule Exit_no_return_target)
qed
with \<open>valid_edge a\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e"
by(fastforce intro:lve_edge)
have "targetnode a \<noteq> Entry"
proof
assume "targetnode a = Entry"
with \<open>valid_edge a\<close> show False by(rule Entry_target)
qed
hence "targetnode a \<noteq> Entry \<or> m' \<noteq> Exit" by simp
from IH[OF \<open>targetnode a -as\<rightarrow>* m'\<close> _ this] obtain es
where valid_path:"CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) [] es"
and path:"CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node (targetnode a)) es (Node m')" by auto
from valid_path \<open>kind a = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close>
have "CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) [] (?e#es)" by simp
moreover
from path \<open>m = sourcenode a\<close>
\<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e\<close>
have "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node m) (?e#es) (Node m')" by(fastforce intro:CFGExit.Cons_path)
ultimately show ?case using \<open>cs = []\<close> by blast
next
case (vpa_ReturnCons cs a as Q p f c' cs')
note IH = \<open>\<And>m. \<lbrakk>m -as\<rightarrow>* m'; \<forall>c\<in>set cs'. valid_edge c; m \<noteq> Entry \<or> m' \<noteq> Exit\<rbrakk> \<Longrightarrow>
\<exists>csx es. CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) csx es \<and>
list_all2 (\<lambda>c c'. c' = (Node (sourcenode c), kind c, Node (targetnode c)))
cs' csx \<and> CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node m) es (Node m')\<close>
from \<open>m -a # as\<rightarrow>* m'\<close> have "m = sourcenode a" and "valid_edge a"
and "targetnode a -as\<rightarrow>* m'" by(auto elim:path_split_Cons)
from \<open>\<forall>c\<in>set cs. valid_edge c\<close> \<open>cs = c' # cs'\<close>
have "valid_edge c'" and "\<forall>c\<in>set cs'. valid_edge c" by simp_all
let ?e = "(Node (sourcenode a),kind a,Node (targetnode a))"
have "targetnode a \<noteq> Exit"
proof
assume "targetnode a = Exit"
with \<open>valid_edge a\<close> \<open>kind a = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close> show False by(rule Exit_no_return_target)
qed
with \<open>valid_edge a\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e"
by(fastforce intro:lve_edge)
have "targetnode a \<noteq> Entry"
proof
assume "targetnode a = Entry"
with \<open>valid_edge a\<close> show False by(rule Entry_target)
qed
hence "targetnode a \<noteq> Entry \<or> m' \<noteq> Exit" by simp
from IH[OF \<open>targetnode a -as\<rightarrow>* m'\<close> \<open>\<forall>c\<in>set cs'. valid_edge c\<close> this]
obtain csx es
where valid_path:"CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) csx es"
and list:"list_all2
(\<lambda>c c'. c' = (Node (sourcenode c), kind c, Node (targetnode c))) cs' csx"
and path:"CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node (targetnode a)) es (Node m')" by blast
from \<open>valid_edge c'\<close> \<open>a \<in> get_return_edges c'\<close>
have "?e \<in> lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind (Node (sourcenode c'),kind c',Node (targetnode c'))"
by(fastforce intro:lift_get_return_edgesI)
with valid_path \<open>kind a = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close>
have "CFG.valid_path_aux knd
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
((Node (sourcenode c'),kind c',Node (targetnode c'))#csx) (?e#es)"
by simp
moreover
from list \<open>cs = c' # cs'\<close>
have "list_all2
(\<lambda>c c'. c' = (Node (sourcenode c), kind c, Node (targetnode c))) cs
((Node (sourcenode c'),kind c',Node (targetnode c'))#csx)"
by simp
moreover
from path \<open>m = sourcenode a\<close>
\<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit ?e\<close>
have "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node m) (?e#es) (Node m')" by(fastforce intro:CFGExit.Cons_path)
ultimately show ?case using \<open>kind a = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close> by blast
qed }
hence lift_valid_path:"\<And>m as m'. \<lbrakk>m -as\<rightarrow>\<^sub>\<surd>* m'; m \<noteq> Entry \<or> m' \<noteq> Exit\<rbrakk>
\<Longrightarrow> \<exists>es. CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
(Node m) es (Node m')"
by(fastforce simp:vp_def valid_path_def CFGExit.vp_def CFGExit.valid_path_def)
show ?thesis
proof
fix n assume "CFG.CFG.valid_node src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) n"
hence "((n = NewEntry) \<or> n = NewExit) \<or> (\<exists>m. n = Node m \<and> valid_node m)"
by(auto elim:lift_valid_edge.cases simp:CFGExit.valid_node_def)
thus "\<exists>as. CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
NewEntry as n" apply -
proof(erule disjE)+
assume "n = NewEntry"
hence "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
NewEntry [] n"
by(fastforce intro:CFGExit.empty_path
simp:CFGExit.vp_def CFGExit.valid_path_def)
thus ?thesis by blast
next
assume "n = NewExit"
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(NewEntry,(\<lambda>s. False)\<^sub>\<surd>,NewExit)" by(fastforce intro:lve_Entry_Exit_edge)
hence "CFG.CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
NewEntry [(NewEntry,(\<lambda>s. False)\<^sub>\<surd>,NewExit)] NewExit"
by(fastforce dest:CFGExit.path_edge)
with \<open>n = NewExit\<close> have "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
NewEntry [(NewEntry,(\<lambda>s. False)\<^sub>\<surd>,NewExit)] n"
by(fastforce simp:CFGExit.vp_def CFGExit.valid_path_def)
thus ?thesis by blast
next
assume "\<exists>m. n = Node m \<and> valid_node m"
then obtain m where "n = Node m" and "valid_node m" by blast
from \<open>valid_node m\<close>
show ?thesis
proof(cases m rule:valid_node_cases)
case Entry
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)" by(fastforce intro:lve_Entry_edge)
with \<open>m = Entry\<close> \<open>n = Node m\<close> have "CFG.CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
NewEntry [(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)] n"
by(fastforce intro:CFGExit.Cons_path CFGExit.empty_path
simp:CFGExit.valid_node_def)
thus ?thesis by(fastforce simp:CFGExit.vp_def CFGExit.valid_path_def)
next
case Exit
from inner obtain ax where "valid_edge ax" and "intra_kind (kind ax)"
and "inner_node (sourcenode ax)"
and "targetnode ax = Exit" by(erule inner_node_Exit_edge)
hence "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(Node (sourcenode ax),kind ax,Node Exit)"
by(auto intro:lift_valid_edge.lve_edge simp:inner_node_def)
hence "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node (sourcenode ax)) [(Node (sourcenode ax),kind ax,Node Exit)]
(Node Exit)"
by(fastforce intro:CFGExit.Cons_path CFGExit.empty_path
simp:CFGExit.valid_node_def)
with \<open>intra_kind (kind ax)\<close>
have slp_edge:"CFG.CFG.same_level_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind)
(Node (sourcenode ax)) [(Node (sourcenode ax),kind ax,Node Exit)]
(Node Exit)"
by(fastforce simp:CFGExit.slp_def CFGExit.same_level_path_def
intra_kind_def)
have "sourcenode ax \<noteq> Exit"
proof
assume "sourcenode ax = Exit"
with \<open>valid_edge ax\<close> show False by(rule Exit_source)
qed
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)" by(fastforce intro:lve_Entry_edge)
hence "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(NewEntry) [(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)] (Node Entry)"
by(fastforce intro:CFGExit.Cons_path CFGExit.empty_path
simp:CFGExit.valid_node_def)
hence slp_edge':"CFG.CFG.same_level_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind)
(NewEntry) [(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)] (Node Entry)"
by(fastforce simp:CFGExit.slp_def CFGExit.same_level_path_def)
from \<open>inner_node (sourcenode ax)\<close> have "valid_node (sourcenode ax)"
by(rule inner_is_valid)
then obtain asx where "Entry -asx\<rightarrow>\<^sub>\<surd>* sourcenode ax"
by(fastforce dest:Entry_path)
with \<open>sourcenode ax \<noteq> Exit\<close>
have "\<exists>es. CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node Entry) es (Node (sourcenode ax))"
by(fastforce intro:lift_valid_path)
then obtain es where "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node Entry) es (Node (sourcenode ax))" by blast
with slp_edge have "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind)
(Node Entry) (es@[(Node (sourcenode ax),kind ax,Node Exit)]) (Node Exit)"
by -(rule CFGExit.vp_slp_Append)
with slp_edge' have "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) NewEntry
([(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)]@
(es@[(Node (sourcenode ax),kind ax,Node Exit)])) (Node Exit)"
by(rule CFGExit.slp_vp_Append)
with \<open>m = Exit\<close> \<open>n = Node m\<close> show ?thesis by simp blast
next
case inner
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)" by(fastforce intro:lve_Entry_edge)
hence "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(NewEntry) [(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)] (Node Entry)"
by(fastforce intro:CFGExit.Cons_path CFGExit.empty_path
simp:CFGExit.valid_node_def)
hence slp_edge:"CFG.CFG.same_level_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind)
(NewEntry) [(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)] (Node Entry)"
by(fastforce simp:CFGExit.slp_def CFGExit.same_level_path_def)
from \<open>valid_node m\<close> obtain as where "Entry -as\<rightarrow>\<^sub>\<surd>* m"
by(fastforce dest:Entry_path)
with \<open>inner_node m\<close>
have "\<exists>es. CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node Entry) es (Node m)"
by(fastforce intro:lift_valid_path simp:inner_node_def)
then obtain es where "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node Entry) es (Node m)" by blast
with slp_edge have "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) NewEntry ([(NewEntry,(\<lambda>s. True)\<^sub>\<surd>,Node Entry)]@es) (Node m)"
by(rule CFGExit.slp_vp_Append)
with \<open>n = Node m\<close> show ?thesis by simp blast
qed
qed
next
fix n assume "CFG.CFG.valid_node src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) n"
hence "((n = NewEntry) \<or> n = NewExit) \<or> (\<exists>m. n = Node m \<and> valid_node m)"
by(auto elim:lift_valid_edge.cases simp:CFGExit.valid_node_def)
thus "\<exists>as. CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
n as NewExit" apply -
proof(erule disjE)+
assume "n = NewEntry"
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(NewEntry,(\<lambda>s. False)\<^sub>\<surd>,NewExit)" by(fastforce intro:lve_Entry_Exit_edge)
hence "CFG.CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
NewEntry [(NewEntry,(\<lambda>s. False)\<^sub>\<surd>,NewExit)] NewExit"
by(fastforce dest:CFGExit.path_edge)
with \<open>n = NewEntry\<close> have "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
n [(NewEntry,(\<lambda>s. False)\<^sub>\<surd>,NewExit)] NewExit"
by(fastforce simp:CFGExit.vp_def CFGExit.valid_path_def)
thus ?thesis by blast
next
assume "n = NewExit"
hence "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
n [] NewExit"
by(fastforce intro:CFGExit.empty_path
simp:CFGExit.vp_def CFGExit.valid_path_def)
thus ?thesis by blast
next
assume "\<exists>m. n = Node m \<and> valid_node m"
then obtain m where "n = Node m" and "valid_node m" by blast
from \<open>valid_node m\<close>
show ?thesis
proof(cases m rule:valid_node_cases)
case Entry
from inner obtain ax where "valid_edge ax" and "intra_kind (kind ax)"
and "inner_node (targetnode ax)" and "sourcenode ax = Entry"
by(erule inner_node_Entry_edge)
hence "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(Node Entry,kind ax,Node (targetnode ax))"
by(auto intro:lift_valid_edge.lve_edge simp:inner_node_def)
hence "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node Entry) [(Node Entry,kind ax,Node (targetnode ax))]
(Node (targetnode ax))"
by(fastforce intro:CFGExit.Cons_path CFGExit.empty_path
simp:CFGExit.valid_node_def)
with \<open>intra_kind (kind ax)\<close>
have slp_edge:"CFG.CFG.same_level_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind)
(Node Entry) [(Node Entry,kind ax,Node (targetnode ax))]
(Node (targetnode ax))"
by(fastforce simp:CFGExit.slp_def CFGExit.same_level_path_def
intra_kind_def)
have "targetnode ax \<noteq> Entry"
proof
assume "targetnode ax = Entry"
with \<open>valid_edge ax\<close> show False by(rule Entry_target)
qed
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)" by(fastforce intro:lve_Exit_edge)
hence "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node Exit) [(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)] NewExit"
by(fastforce intro:CFGExit.Cons_path CFGExit.empty_path
simp:CFGExit.valid_node_def)
hence slp_edge':"CFG.CFG.same_level_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind)
(Node Exit) [(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)] NewExit"
by(fastforce simp:CFGExit.slp_def CFGExit.same_level_path_def)
from \<open>inner_node (targetnode ax)\<close> have "valid_node (targetnode ax)"
by(rule inner_is_valid)
then obtain asx where "targetnode ax -asx\<rightarrow>\<^sub>\<surd>* Exit"
by(fastforce dest:Exit_path)
with \<open>targetnode ax \<noteq> Entry\<close>
have "\<exists>es. CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node (targetnode ax)) es (Node Exit)"
by(fastforce intro:lift_valid_path)
then obtain es where "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node (targetnode ax)) es (Node Exit)" by blast
with slp_edge have "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind)
(Node Entry) ([(Node Entry,kind ax,Node (targetnode ax))]@es) (Node Exit)"
by(rule CFGExit.slp_vp_Append)
with slp_edge' have "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node Entry)
(([(Node Entry,kind ax,Node (targetnode ax))]@es)@
[(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)]) NewExit"
by -(rule CFGExit.vp_slp_Append)
with \<open>m = Entry\<close> \<open>n = Node m\<close> show ?thesis by simp blast
next
case Exit
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)" by(fastforce intro:lve_Exit_edge)
with \<open>m = Exit\<close> \<open>n = Node m\<close> have "CFG.CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
n [(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)] NewExit"
by(fastforce intro:CFGExit.Cons_path CFGExit.empty_path
simp:CFGExit.valid_node_def)
thus ?thesis by(fastforce simp:CFGExit.vp_def CFGExit.valid_path_def)
next
case inner
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit
(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)" by(fastforce intro:lve_Exit_edge)
hence "CFG.path src trg
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(Node Exit) [(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)] NewExit"
by(fastforce intro:CFGExit.Cons_path CFGExit.empty_path
simp:CFGExit.valid_node_def)
hence slp_edge:"CFG.CFG.same_level_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind)
(Node Exit) [(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)] NewExit"
by(fastforce simp:CFGExit.slp_def CFGExit.same_level_path_def)
from \<open>valid_node m\<close> obtain as where "m -as\<rightarrow>\<^sub>\<surd>* Exit"
by(fastforce dest:Exit_path)
with \<open>inner_node m\<close>
have "\<exists>es. CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node m) es (Node Exit)"
by(fastforce intro:lift_valid_path simp:inner_node_def)
then obtain es where "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node m) es (Node Exit)" by blast
with slp_edge have "CFG.CFG.valid_path' src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit)
(lift_get_return_edges get_return_edges valid_edge sourcenode
targetnode kind) (Node m) (es@[(Node Exit,(\<lambda>s. True)\<^sub>\<surd>,NewExit)]) NewExit"
by -(rule CFGExit.vp_slp_Append)
with \<open>n = Node m\<close> show ?thesis by simp blast
qed
qed
next
fix n n'
assume method_exit1:"CFGExit.CFGExit.method_exit src knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) NewExit n"
and method_exit2:"CFGExit.CFGExit.method_exit src knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) NewExit n'"
and lift_eq:"lift_get_proc get_proc Main n = lift_get_proc get_proc Main n'"
from method_exit1 show "n = n'"
proof(rule CFGExit.method_exit_cases)
assume "n = NewExit"
from method_exit2 show ?thesis
proof(rule CFGExit.method_exit_cases)
assume "n' = NewExit"
with \<open>n = NewExit\<close> show ?thesis by simp
next
fix a Q f p
assume "n' = src a"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q\<hookleftarrow>\<^bsub>p\<^esub>f"
hence "lift_get_proc get_proc Main (src a) = p"
by -(rule CFGExit.get_proc_return)
with CFGExit.get_proc_Exit lift_eq \<open>n' = src a\<close> \<open>n = NewExit\<close>
have "p = Main" by simp
with \<open>knd a = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close> have "knd a = Q\<hookleftarrow>\<^bsub>Main\<^esub>f" by simp
with \<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a\<close>
have False by(rule CFGExit.Main_no_return_source)
thus ?thesis by simp
qed
next
fix a Q f p
assume "n = src a"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
and "knd a = Q\<hookleftarrow>\<^bsub>p\<^esub>f"
then obtain x where "valid_edge x" and "src a = Node (sourcenode x)"
and "kind x = Q\<hookleftarrow>\<^bsub>p\<^esub>f"
by(fastforce elim:lift_valid_edge.cases)
hence "method_exit (sourcenode x)" by(fastforce simp:method_exit_def)
from method_exit2 show ?thesis
proof(rule CFGExit.method_exit_cases)
assume "n' = NewExit"
from \<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a\<close>
\<open>knd a = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close>
have "lift_get_proc get_proc Main (src a) = p"
by -(rule CFGExit.get_proc_return)
with CFGExit.get_proc_Exit lift_eq \<open>n = src a\<close> \<open>n' = NewExit\<close>
have "p = Main" by simp
with \<open>knd a = Q\<hookleftarrow>\<^bsub>p\<^esub>f\<close> have "knd a = Q\<hookleftarrow>\<^bsub>Main\<^esub>f" by simp
with \<open>lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a\<close>
have False by(rule CFGExit.Main_no_return_source)
thus ?thesis by simp
next
fix a' Q' f' p'
assume "n' = src a'"
and "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a'"
and "knd a' = Q'\<hookleftarrow>\<^bsub>p'\<^esub>f'"
then obtain x' where "valid_edge x'" and "src a' = Node (sourcenode x')"
and "kind x' = Q'\<hookleftarrow>\<^bsub>p'\<^esub>f'"
by(fastforce elim:lift_valid_edge.cases)
hence "method_exit (sourcenode x')" by(fastforce simp:method_exit_def)
with \<open>method_exit (sourcenode x)\<close> lift_eq \<open>n = src a\<close> \<open>n' = src a'\<close>
\<open>src a = Node (sourcenode x)\<close> \<open>src a' = Node (sourcenode x')\<close>
have "sourcenode x = sourcenode x'" by(fastforce intro:method_exit_unique)
with \<open>src a = Node (sourcenode x)\<close> \<open>src a' = Node (sourcenode x')\<close>
\<open>n = src a\<close> \<open>n' = src a'\<close>
show ?thesis by simp
qed
qed
qed
qed
lemma lift_SDG:
assumes SDG:"SDG sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses"
and inner:"CFGExit.inner_node sourcenode targetnode valid_edge Entry Exit nx"
shows "SDG src trg knd
(lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit) NewEntry
(lift_get_proc get_proc Main)
(lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind)
procs Main NewExit (lift_Def Def Entry Exit H L) (lift_Use Use Entry Exit H L)
(lift_ParamDefs ParamDefs) (lift_ParamUses ParamUses)"
proof -
interpret SDG sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses
by(rule SDG)
have wf:"CFGExit_wf sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses"
by(unfold_locales)
have pd:"Postdomination sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit"
by(unfold_locales)
interpret wf':CFGExit_wf src trg knd
"lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit" NewEntry
"lift_get_proc get_proc Main"
"lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind"
procs Main NewExit "lift_Def Def Entry Exit H L" "lift_Use Use Entry Exit H L"
"lift_ParamDefs ParamDefs" "lift_ParamUses ParamUses"
by(fastforce intro:lift_CFGExit_wf wf pd)
interpret pd':Postdomination src trg knd
"lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit" NewEntry
"lift_get_proc get_proc Main"
"lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind"
procs Main NewExit
by(fastforce intro:lift_Postdomination wf pd inner)
show ?thesis by(unfold_locales)
qed
subsubsection \<open>Low-deterministic security via the lifted graph\<close>
lemma Lift_NonInterferenceGraph:
fixes valid_edge and sourcenode and targetnode and kind and Entry and Exit
and get_proc and get_return_edges and procs and Main
and Def and Use and ParamDefs and ParamUses and H and L
defines lve:"lve \<equiv> lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit"
and lget_proc:"lget_proc \<equiv> lift_get_proc get_proc Main"
and lget_return_edges:"lget_return_edges \<equiv>
lift_get_return_edges get_return_edges valid_edge sourcenode targetnode kind"
and lDef:"lDef \<equiv> lift_Def Def Entry Exit H L"
and lUse:"lUse \<equiv> lift_Use Use Entry Exit H L"
and lParamDefs:"lParamDefs \<equiv> lift_ParamDefs ParamDefs"
and lParamUses:"lParamUses \<equiv> lift_ParamUses ParamUses"
assumes SDG:"SDG sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses"
and inner:"CFGExit.inner_node sourcenode targetnode valid_edge Entry Exit nx"
and "H \<inter> L = {}" and "H \<union> L = UNIV"
shows "NonInterferenceInterGraph src trg knd lve NewEntry lget_proc
lget_return_edges procs Main NewExit lDef lUse lParamDefs lParamUses H L
(Node Entry) (Node Exit)"
proof -
interpret SDG sourcenode targetnode kind valid_edge Entry get_proc
get_return_edges procs Main Exit Def Use ParamDefs ParamUses
by(rule SDG)
interpret SDG':SDG src trg knd lve NewEntry lget_proc lget_return_edges
procs Main NewExit lDef lUse lParamDefs lParamUses
by(fastforce intro:lift_SDG SDG inner simp:lve lget_proc lget_return_edges lDef
lUse lParamDefs lParamUses)
show ?thesis
proof
fix a assume "lve a" and "src a = NewEntry"
thus "trg a = NewExit \<or> trg a = Node Entry"
by(fastforce elim:lift_valid_edge.cases simp:lve)
next
show "\<exists>a. lve a \<and> src a = NewEntry \<and> trg a = Node Entry \<and> knd a = (\<lambda>s. True)\<^sub>\<surd>"
by(fastforce intro:lve_Entry_edge simp:lve)
next
fix a assume "lve a" and "trg a = Node Entry"
from \<open>lve a\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
by(simp add:lve)
from this \<open>trg a = Node Entry\<close>
show "src a = NewEntry"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>trg e = Node Entry\<close>
have "targetnode a = Entry" by simp
with \<open>valid_edge a\<close> have False by(rule Entry_target)
thus ?case by simp
qed simp_all
next
fix a assume "lve a" and "trg a = NewExit"
thus "src a = NewEntry \<or> src a = Node Exit"
by(fastforce elim:lift_valid_edge.cases simp:lve)
next
show "\<exists>a. lve a \<and> src a = Node Exit \<and> trg a = NewExit \<and> knd a = (\<lambda>s. True)\<^sub>\<surd>"
by(fastforce intro:lve_Exit_edge simp:lve)
next
fix a assume "lve a" and "src a = Node Exit"
from \<open>lve a\<close>
have "lift_valid_edge valid_edge sourcenode targetnode kind Entry Exit a"
by(simp add:lve)
from this \<open>src a = Node Exit\<close>
show "trg a = NewExit"
proof(induct rule:lift_valid_edge.induct)
case (lve_edge a e)
from \<open>e = (Node (sourcenode a), kind a, Node (targetnode a))\<close>
\<open>src e = Node Exit\<close>
have "sourcenode a = Exit" by simp
with \<open>valid_edge a\<close> have False by(rule Exit_source)
thus ?case by simp
qed simp_all
next
from lDef show "lDef (Node Entry) = H"
by(fastforce elim:lift_Def_set.cases intro:lift_Def_High)
next
from Entry_noteq_Exit lUse show "lUse (Node Entry) = H"
by(fastforce elim:lift_Use_set.cases intro:lift_Use_High)
next
from Entry_noteq_Exit lUse show "lUse (Node Exit) = L"
by(fastforce elim:lift_Use_set.cases intro:lift_Use_Low)
next
from \<open>H \<inter> L = {}\<close> show "H \<inter> L = {}" .
next
from \<open>H \<union> L = UNIV\<close> show "H \<union> L = UNIV" .
qed
qed
end
|
# ----------------------------------------------------------------------------
# Copyright (c) 2021, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import numpy as np
import pandas as pd
import skbio
import skbio.io
from q2_types.feature_data._transformer import AlignedProteinIterator
from q2_types.feature_data import ProteinFASTAFormat, AlignedProteinFASTAFormat
from q2_alignment._mafft import run_command
def _mafft(sequences_fp, alignment_fp, n_threads, parttree):
# Save original sequence IDs since long ids (~250 chars) can be truncated
# by mafft. We'll replace the IDs in the aligned sequences file output by
# mafft with the originals.
#
# https://github.com/qiime2/q2-alignment/issues/37
aligned_seq_ids = {}
unaligned_seq_ids = {}
# if alignment_fp is not None:
# for seq in skbio.io.read(alignment_fp, format='fasta',
# constructor=skbio.Protein):
# id_ = seq.metadata['id']
# if id_ in aligned_seq_ids:
# raise ValueError(
# "A sequence ID is duplicated in the aligned sequences: "
# "%r" % id_)
# else:
# aligned_seq_ids[id_] = True
for seq in skbio.io.read(sequences_fp, format='fasta',
constructor=skbio.Protein):
id_ = seq.metadata['id']
if id_ in unaligned_seq_ids:
raise ValueError(
"A sequence ID is duplicated in the unaligned sequences: "
"%r" % id_)
elif id_ in aligned_seq_ids:
raise ValueError(
"A sequence ID is present in both the aligned and unaligned "
"sequences: %r" % id_)
else:
unaligned_seq_ids[id_] = True
result = AlignedProteinFASTAFormat()
result_fp = str(result)
ids = {**aligned_seq_ids, **unaligned_seq_ids}
# mafft will fail if the number of sequences is larger than 1 million.
# mafft requires using parttree which is an algorithm to build an
# approximate tree from a large number of unaligned sequences.
# By catching the error below if a user has not used parttree flag, we are
# eliminating the need for the mafft error to be shown to the user which
# can be confusing and intimidating.
if not parttree and len(ids) > 1000000:
raise ValueError(
"The number of sequences in your feature table is larger than "
"1 million, please use the parttree parameter")
# mafft's signal for utilizing all cores is -1. We want to our users
# to enter auto for using all cores. This is to prevent any confusion and
# to keep the UX consisent.
if n_threads == 'auto':
n_threads = -1
# `--inputorder` must be turned on because we need the input and output in
# the same sequence order to replace the IDs below. This is mafft's default
# behavior but we pass the flag in case that changes in the future.
cmd = ["mafft", "--preservecase", "--inputorder",
"--thread", str(n_threads)]
if parttree:
cmd += ['--parttree']
if alignment_fp is not None:
cmd += ['--add', sequences_fp, alignment_fp]
else:
cmd += [sequences_fp]
run_command(cmd, result_fp)
# Read output alignment into memory, reassign original sequence IDs, and
# write alignment back to disk.
msa = skbio.TabularMSA.read(result_fp, format='fasta',
constructor=skbio.Protein)
# Using `assert` because mafft would have had to add or drop sequences
# while aligning, which would be a bug on mafft's end. This is just a
# sanity check and is not expected to trigger in practice.
assert len(ids) == len(msa)
for id, seq in zip(ids, msa):
seq.metadata['id'] = id
# Turning off roundtripping options to speed up writing. We can safely turn
# these options off because we know the sequence IDs are rountrip-safe
# since we read them from a FASTA file above.
#
# http://scikit-bio.org/docs/latest/generated/
# skbio.io.format.fasta.html#writer-specific-parameters
msa.write(result_fp, id_whitespace_replacement=None,
description_newline_replacement=None)
return result
def _map_positions(aligned_sequences: AlignedProteinIterator) -> pd.DataFrame:
mapping_df = pd.DataFrame()
for aln_seq_record in aligned_sequences:
id_aln = aln_seq_record.metadata['id']
seq_aln = pd.Series(aln_seq_record.values.astype('str'), name=id_aln)
seq_aln_degapped = seq_aln[seq_aln != "-"]
original_positions = pd.Series([int(x) for x in range(
len(seq_aln_degapped.index))], index=seq_aln_degapped.index)
seq_aln[seq_aln != "-"] = original_positions
seq_aln.replace(to_replace="-", value=np.nan, inplace=True)
seq_aln = seq_aln.astype('Int64')
mapping_df = pd.concat([mapping_df, seq_aln], axis=1)
mapping_df.index.name = 'Alignment position'
mapping_df.index = mapping_df.index.astype('int64')
return mapping_df
def mafft(sequences: ProteinFASTAFormat,
n_threads: int = 1,
parttree: bool = False) -> AlignedProteinFASTAFormat:
sequences_fp = str(sequences)
return _mafft(sequences_fp, None, n_threads, parttree)
def map_positions(
aligned_sequences: AlignedProteinIterator) -> pd.DataFrame:
return _map_positions(aligned_sequences)
|
Formal statement is: lemma monom_1_dvd_iff: "p \<noteq> 0 \<Longrightarrow> monom 1 n dvd p \<longleftrightarrow> n \<le> order 0 p" Informal statement is: If $p$ is a nonzero polynomial, then $x^n$ divides $p$ if and only if $n \leq \text{order}(0, p)$. |
module Main
double : Int -> Int
double x = 2 * x
triple : Int -> Int
triple x = 10 * x
multiply : Int -> Int -> Int
multiply x y = x * y
IntFun : Type
IntFun = (Int -> Int)
data TheAlt : Type where
MyCon : IntFun -> Lazy IntFun -> TheAlt
runCon : TheAlt -> Int -> Int
runCon (MyCon a b) i = let x = a i in if x > 5 then x else (b i)
main : IO ()
main = do
let obj = MyCon double (multiply 100)
--let obj = MyCon double triple
printLn $ runCon obj 1
printLn $ runCon obj 2
printLn $ runCon obj 3
|
from clpy.testing import array # NOQA
from clpy.testing import attr # NOQA
from clpy.testing import helper # NOQA
from clpy.testing import parameterized # NOQA
from clpy.testing import random # NOQA
from clpy.testing.array import assert_allclose # NOQA
from clpy.testing.array import assert_array_almost_equal # NOQA
from clpy.testing.array import assert_array_almost_equal_nulp # NOQA
from clpy.testing.array import assert_array_equal # NOQA
from clpy.testing.array import assert_array_less # NOQA
from clpy.testing.array import assert_array_list_equal # NOQA
from clpy.testing.array import assert_array_max_ulp # NOQA
from clpy.testing.attr import gpu # NOQA
from clpy.testing.attr import multi_gpu # NOQA
from clpy.testing.attr import slow # NOQA
from clpy.testing.bufio import readbuf # NOQA
from clpy.testing.bufio import writebuf # NOQA
from clpy.testing.helper import assert_warns # NOQA
from clpy.testing.helper import for_all_dtypes # NOQA
from clpy.testing.helper import for_all_dtypes_combination # NOQA
from clpy.testing.helper import for_CF_orders # NOQA
from clpy.testing.helper import for_dtypes # NOQA
from clpy.testing.helper import for_dtypes_combination # NOQA
from clpy.testing.helper import for_float_dtypes # NOQA
from clpy.testing.helper import for_int_dtypes # NOQA
from clpy.testing.helper import for_int_dtypes_combination # NOQA
from clpy.testing.helper import for_orders # NOQA
from clpy.testing.helper import for_signed_dtypes # NOQA
from clpy.testing.helper import for_signed_dtypes_combination # NOQA
from clpy.testing.helper import for_unsigned_dtypes # NOQA
from clpy.testing.helper import for_unsigned_dtypes_combination # NOQA
from clpy.testing.helper import numpy_clpy_allclose # NOQA
from clpy.testing.helper import numpy_clpy_array_almost_equal # NOQA
from clpy.testing.helper import numpy_clpy_array_almost_equal_nulp # NOQA
from clpy.testing.helper import numpy_clpy_array_equal # NOQA
from clpy.testing.helper import numpy_clpy_array_less # NOQA
from clpy.testing.helper import numpy_clpy_array_list_equal # NOQA
from clpy.testing.helper import numpy_clpy_array_max_ulp # NOQA
from clpy.testing.helper import numpy_clpy_equal # NOQA
from clpy.testing.helper import numpy_clpy_raises # NOQA
from clpy.testing.helper import NumpyError # NOQA
from clpy.testing.helper import shaped_arange # NOQA
from clpy.testing.helper import shaped_random # NOQA
from clpy.testing.helper import shaped_reverse_arange # NOQA
from clpy.testing.helper import with_requires # NOQA
from clpy.testing.parameterized import parameterize # NOQA
from clpy.testing.parameterized import product # NOQA
from clpy.testing.random import fix_random # NOQA
from clpy.testing.random import generate_seed # NOQA
|
In 2011 , the UT Board of Regents approved the construction of a new 13 story hospital that will be located next to John Sealy Hospital . Construction will begin in the fall of 2011 , with the demolition of the old Jennie Sealy and Shriners hospitals , and continue until completion in 2016 . The facility will have 250 room , 20 operating suites and 54 intensive care beds . When the new hospital is complete , along with the renovations at John Sealy , both complexes will have around 600 beds .
|
abstract type AbstractCR3BPIndirectParams end
# CR3BP Indirect parameters
mutable struct CR3BPIndirectParams <: AbstractCR3BPIndirectParams
# Spacecraft parameters
sp::SimpleSpacecraft
# CR3BP parameters
crp::CR3BPParams
# Continuation variable
ϵ::Float64
# Thrust type
utype::Int64
function CR3BPIndirectParams(sp::Spacecraft, crp::CR3BPParams, ϵ)
new(sp, crp, ϵ, 0)
end
end
# CR3BP CoState Dynamics
function cr3bpCostateEom(u::AbstractArray, p::AbstractCR3BPIndirectParams, γ, homotopyFlag::MEMF)
@inbounds begin
# Get requirements
TU = p.crp.TU
LU = p.crp.LU
MU = p.crp.MU
μ = p.crp.μ
# Scale requirements
tMaxSc = p.sp.tMax * TU * TU / (MU*LU*1000.0)
# Compute Requirements
xpmu = u[1] + μ
xpmum1 = u[1] + μ - 1
r1 = sqrt(xpmu*xpmu + u[2]*u[2] + u[3]*u[3])
r2 = sqrt(xpmum1*xpmum1 + u[2]*u[2] + u[3]*u[3])
invr13 = r1^(-3)
invr15 = r1^(-5)
invr23 = r2^(-3)
invr25 = r2^(-5)
λv = sqrt(u[11]^2 + u[12]^2 + u[13]^2)
# Compute G
G11 = 1 - (1 - μ)*invr13 + 3*(1 - μ)*(u[1] + μ)^2*invr15 -
μ*invr23 + 3*μ*(u[1] + μ - 1)^2*invr25;
G22 = 1 - (1 - μ)*invr13 + 3*(1 - μ)*u[2]^2*invr15 -
μ*invr23 + 3*μ*u[2]^2*invr25;
G33 = -(1 - μ)*invr13 + 3*(1 - μ)*u[3]^2*invr15 -
μ*invr23 + 3*μ*u[3]^2*invr25;
G12 = 3*(1 - μ)*(u[1] + μ)*u[2]*invr15 +
3*μ*(u[1] + μ - 1)*u[2]*invr25;
G13 = 3*(1 - μ)*(u[1] + μ)*u[3]*invr15 +
3*μ*(u[1] + μ - 1)*u[3]*invr25;
G23 = 3*(1 - μ)*u[2]*u[3]*invr15 +
3*μ*u[2]*u[3]*invr25;
# Compute and return Dynamics
dλ = @SVector [ -G11*u[11] - G12*u[12] - G13*u[13],
-G12*u[11] - G22*u[12] - G23*u[13],
-G13*u[11] - G23*u[12] - G33*u[13],
-u[8] + 2.0*u[12],
-u[9] - 2.0*u[11],
-u[10],
-λv*γ*tMaxSc / (u[7]*u[7])]
end
return dλ
end
function cr3bpCostateEom!(dλ::AbstractArray, u::AbstractArray,
p::AbstractCR3BPIndirectParams, γ,
homotopyFlag::MEMF)
@inbounds begin
# Get requirements
TU = p.crp.TU
LU = p.crp.LU
MU = p.crp.MU
μ = p.crp.μ
# Scale requirements
tMaxSc = p.sp.tMax * TU^2 / (MU*LU*1000.0)
# Compute Requirements
r1 = sqrt((u[1] + μ)^2 + u[2]^2 + u[3]^2)
r2 = sqrt((u[1] + μ - 1)^2 + u[2]^2 + u[3]^2)
invr13 = r1^(-3)
invr15 = r1^(-5)
invr23 = r2^(-3)
invr25 = r2^(-5)
λv = sqrt(u[11]^2 + u[12]^2 + u[13]^2)
# Compute G
G11 = 1 - (1 - μ)*invr13 + 3*(1 - μ)*(u[1] + μ)^2*invr15 -
μ*invr23 + 3*μ*(u[1] + μ - 1)^2*invr25;
G22 = 1 - (1 - μ)*invr13 + 3*(1 - μ)*u[2]^2*invr15 -
μ*invr23 + 3*μ*u[2]^2*invr25;
G33 = -(1 - μ)*invr13 + 3*(1 - μ)*u[3]^2*invr15 -
μ*invr23 + 3*μ*u[3]^2*invr25;
G12 = 3*(1 - μ)*(u[1] + μ)*u[2]*invr15 +
3*μ*(u[1] + μ - 1)*u[2]*invr25;
G13 = 3*(1 - μ)*(u[1] + μ)*u[3]*invr15 +
3*μ*(u[1] + μ - 1)*u[3]*invr25;
G23 = 3*(1 - μ)*u[2]*u[3]*invr15 +
3*μ*u[2]*u[3]*invr25;
# Compute and return Dynamics
dλ[1] = -G11*u[11] - G12*u[12] - G13*u[13]
dλ[2] = -G12*u[11] - G22*u[12] - G23*u[13]
dλ[3] = -G13*u[11] - G23*u[12] - G33*u[13]
dλ[4] = -u[8] + 2.0*u[12]
dλ[5] = -u[9] - 2.0*u[11]
dλ[6] = -u[10]
dλ[7] = -λv*γ*tMaxSc / (u[7]^2)
end
return @SVector [G11, G22, G33, G12, G13, G23]
end
# CR3BP Indirect EOMs
function cr3bpEomIndirect(u::AbstractVector, p::AbstractCR3BPIndirectParams, t, homotopyFlag::MEMF)
@inbounds begin
# Get requirements
TU = p.crp.TU
LU = p.crp.LU
VU = p.crp.VU
MU = p.crp.MU
isp = p.sp.isp
c = p.sp.c
# Scale requirements
tMaxSc = p.sp.tMax * TU * TU / (MU*LU*1000.0)
cSc = c*TU / (LU*1000.0)
# Compute thrust direction
λv = sqrt(u[11]*u[11] + u[12]*u[12] + u[13]*u[13])
invλv = 1.0 / λv
α = @SVector [-u[11]*invλv, -u[12]*invλv, -u[13]*invλv]
# Compute throttline
S = computeS(u, λv, cSc)
γ = computeU(S, p.utype, p.ϵ)
# Compute Thrust Acceleration
atMag = γ*tMaxSc / u[7]
at = @SVector [α[1]*atMag,
α[2]*atMag,
α[3]*atMag]
# Derivatives
dx = cr3bpEomControl(u,p.crp,t,at)
dλ = cr3bpCostateEom(u,p, γ, homotopyFlag)
du = @SVector [dx[1], dx[2], dx[3], dx[4], dx[5], dx[6], -γ*tMaxSc / cSc,
dλ[1], dλ[2], dλ[3], dλ[4], dλ[5], dλ[6], dλ[7]]
return du
end
end
function cr3bpEomIndirect!(du::AbstractVector, u::AbstractVector,
p::AbstractCR3BPIndirectParams, t, homotopyFlag::MEMF)
@inbounds begin
# Get requirements
TU = p.crp.TU
LU = p.crp.LU
VU = p.crp.VU
MU = p.crp.MU
isp = p.sp.isp
c = p.sp.c
# Scale requirements
tMaxSc = p.sp.tMax * TU^2 / (MU*LU*1000.0)
cSc = c*TU / (LU*1000.0)
# Compute thrust direction
λv = sqrt(u[11]^2 + u[12]^2 + u[13]^2)
invλv = 1.0 / λv
α = @SVector [-u[11]*invλv, -u[12]*invλv, -u[13]*invλv]
# Compute throttline
S = computeS(u, λv, cSc)
γ = computeU(S, p.utype, p.ϵ)
# Compute Thrust Acceleration
atMag = γ*tMaxSc / u[7]
at = @SVector [α[1]*atMag,
α[2]*atMag,
α[3]*atMag]
# Compute dynamics
cr3bpEomControl!(view(du,1:6), u, p.crp, t, at)
du[7] = -γ*tMaxSc / cSc
GVec = cr3bpCostateEom!(view(du,8:14), u, p, γ, homotopyFlag)
end
return GVec
end
function cr3bpEomIndirectIntegralCost(u::AbstractVector, p::AbstractCR3BPIndirectParams, t, homotopyFlag::MEMF)
@inbounds begin
# Get requirements
TU = p.crp.TU
LU = p.crp.LU
VU = p.crp.VU
MU = p.crp.MU
isp = p.sp.isp
c = p.sp.c
# Scale requirements
tMaxSc = p.sp.tMax * TU * TU / (MU*LU*1000.0)
cSc = c*TU / (LU*1000.0)
# Compute thrust direction
λv = sqrt(u[11]*u[11] + u[12]*u[12] + u[13]*u[13])
invλv = 1.0 / λv
α = @SVector [-u[11]*invλv, -u[12]*invλv, -u[13]*invλv]
# Compute throttline
S = computeS(u, λv, cSc)
γ = computeU(S, p.utype, p.ϵ)
# Compute Thrust Acceleration
atMag = γ*tMaxSc / u[7]
at = @SVector [α[1]*atMag,
α[2]*atMag,
α[3]*atMag]
# Derivatives
dx = cr3bpEomControl(u,p.crp,t,at)
dλ = cr3bpCostateEom(u,p, γ, homotopyFlag)
usqr = γ - p.ϵ*γ*(1.0 - γ)
du = @SVector [dx[1], dx[2], dx[3], dx[4], dx[5], dx[6], -γ*tMaxSc / cSc,
dλ[1], dλ[2], dλ[3], dλ[4], dλ[5], dλ[6], dλ[7], usqr]
return du
end
end
function cr3bpEomIndirectIntegralCost!(du::AbstractArray, u::AbstractArray, p::AbstractCR3BPIndirectParams, t, homotopyFlag::MEMF)
@inbounds begin
# Get requirements
TU = p.crp.TU
LU = p.crp.LU
VU = p.crp.VU
MU = p.crp.MU
isp = p.sp.isp
c = p.sp.c
# Scale requirements
tMaxSc = p.sp.tMax * TU^2 / (MU*LU*1000.0)
cSc = c*TU / (LU*1000.0)
# Compute thrust direction
λv = sqrt(u[11]^2 + u[12]^2 + u[13]^2)
invλv = 1.0 / λv
α = @SVector [-u[11]*invλv, -u[12]*invλv, -u[13]*invλv]
# Compute throttline
S = computeS(u, λv, cSc)
γ = computeU(S, p.utype, p.ϵ)
# Compute Thrust Acceleration
atMag = γ*tMaxSc / u[7]
at = @SVector [α[1]*atMag,
α[2]*atMag,
α[3]*atMag]
# Compute dynamics
cr3bpEomControl!(view(du,1:6), u, p.crp, t, at)
du[7] = -γ*tMaxSc / cSc
GVec = cr3bpCostateEom!(view(du,8:14), u, p, γ, homotopyFlag)
du[15] = γ - p.ϵ*γ*(1.0 - γ)
end
return GVec
end
# Utility Functions
function computeS(x::AbstractVector, λv, cSc)
@inbounds begin
return -λv*cSc / x[7] - x[14] + 1.0
end
end
function computeU(S, utype, ϵ)
if utype == 2
return 1.0
elseif utype == 0
return 0.0
else
return (ϵ - S) / (2.0*ϵ)
end
end
|
module IcyAntics
export ContinuumIceModel, ViscoElasticRheology
include("ContinuumIceModels.jl")
using .ContinuumIceModels
end # module
|
library(ggplot2)
library(tidyverse)
# Load data
setwd("./data/event_scores")
file_names <- dir()
data <- do.call(rbind, lapply(file_names, read.csv, header = FALSE))
setwd("../event_matchdata")
file_names <- dir()
kda_data <- do.call(rbind, lapply(file_names, read.csv, header = FALSE))
setwd("../event_scores_tagged")
file_names <- dir()
kda_stomps <- do.call(rbind, lapply(file_names, read.csv, header = FALSE))
kda_stomps$stomps <- ifelse(kda_stomps$V1 >= 13, # if bo1
ifelse(kda_stomps$V1 - kda_stomps$V2 <= 3, FALSE, TRUE), #process a bo1
ifelse(kda_stomps$V2 == 0, TRUE, FALSE)) # otherwise, process a bo3
kda_data$stomps <- kda_stomps$stomps
kda_data$col <- ifelse(kda_data$stomps == TRUE, "steelblue", "brown2")
# Sanity check just in case there's differences in the data you need to find
#kda_stomps$in_kda_data <- ifelse(kda_data$V1 == kda_stomps$V3, TRUE,
# FALSE)
# Competitive graph in general case
bo3 <- subset(data, V1 < 3)
bo3_stomps <- subset(bo3, V2 == 0)
bo3_close <- subset(bo3, V2 == 1)
stomps_vs_close <- data.frame(type = c("close", "stomps"),
number = c(nrow(bo3_close),
c(nrow(bo3_stomps))))
p <- ggplot(data = stomps_vs_close, aes(x = type, y = number)) +
geom_bar(stat = "identity", fill = "steelblue") + theme_minimal()
p + ggtitle("Competitive vs non-competitive matches") +
theme(plot.title = element_text(hjust = 0.5))
# Histogram of bo1 win differentials
bo1 <- subset(data, V1 > 3)
bo1$diff <- bo1$V1 - bo1$V2
bo1_dist <- as.data.frame(table(bo1$diff))
bo1_dist <- bo1_dist %>% rename(Difference = Var1)
p <- ggplot(data = bo1_dist, aes(x = Difference, y = Freq)) +
geom_bar(stat = "identity", fill = "steelblue") + theme_minimal()
p + ggtitle("Distribution of difference in score on match termination") +
theme(plot.title = element_text(hjust = 0.5))
# Competitive graph in finals case
finals_data <- do.call(rbind, lapply(c("205.csv", "251.csv", "198.csv",
"66.csv", "40.csv", "274.csv", "40.csv", "256.csv", "280.csv"),
read.csv, header = FALSE))
finals_stomps <- subset(finals_data, V2 == 0)
finals_close <- subset(finals_data, V2 == 1)
stomps_vs_close <- data.frame(type = c("close", "stomps"),
number = c(nrow(finals_close),
c(nrow(finals_stomps))))
p <- ggplot(data = stomps_vs_close, aes(x = type, y = number)) +
geom_bar(stat = "identity", fill = "steelblue") + theme_minimal()
p + ggtitle("Competitive vs non-competitive matches in finals matches") +
theme(plot.title = element_text(hjust = 0.5))
# kda analysis
kd_rat_win <- (kda_data$V2 / kda_data$V3)
a_rat_win <- (kda_data$V4 / kda_data$V5)
kda_rat_win <- ((kda_data$V2 + kda_data$V4) / (kda_data$V3 + kda_data$V5))
kd_rat_lose <- (kda_data$V3 / kda_data$V2)
a_rat_lose <- (kda_data$V5 / kda_data$V4)
kda_rat_lose <- ((kda_data$V3 + kda_data$V5) / (kda_data$V2 + kda_data$V4))
boxplot(kd_rat_win, a_rat_win, kda_rat_win, kd_rat_lose,
a_rat_lose, kda_rat_lose, main = "Distribution of Teams' KDAs",
xlab = "KDAs", ylab = "Ratios", notch = TRUE,
col = (c("dodgerblue4", "deepskyblue2", "steelblue",
"brown4", "brown2", "brown3")),
names = c("Winning KD", "Winning Assist Ratio", "Winning KDA",
"Losing KD", "Losing Assist Ratio", "Losing KDA"))
# A/D analysis
ad_rat_win <- (kda_data$V4 / kda_data$V3)
ad_rat_lose <- (kda_data$V5 / kda_data$V2)
boxplot(ad_rat_win, ad_rat_lose, main = "Distribution of Team Coordination",
xlab = "A/D", ylab = "Ratio", notch = TRUE,
col = (c("deepskyblue2", "brown2")),
names = c("Winning Coordination", "Losing Coordination"))
# Does coordination track more with stomps than raw kills?
plot(kda_data$V2, kda_data$V3, main = "Winner KD Plot, Colored with Stomp(blue)/No stomp(red)",
xlab = "Deaths", ylab = "Kills", pch = 19, col=kda_data$col)
plot(kda_data$V4, kda_data$V3, main = "Winner Coordination Plot, Colored with Stomp(blue)/No stomp(red)",
xlab = "Deaths", ylab = "Assists", pch = 19, col=kda_data$col)
abline(h = 175, col="purple")
plot(kda_data$V5, kda_data$V2, main = "Loser Coordination Plot, Colored with Stomp(blue)/No stomp(red)",
xlab = "Deaths", ylab = "Assists", pch = 19, col=kda_data$col)
segments(10, 83, 50, 83, col="purple")
segments(45, 180, 200, 285, col="purple") |
(********************************************************************)
(* *)
(* The Why3 Verification Platform / The Why3 Development Team *)
(* Copyright 2010-2018 -- Inria - CNRS - Paris-Sud University *)
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
(* General Public License version 2.1, with the special exception *)
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
(* This file is generated by Why3's Coq-realize driver *)
(* Beware! Only edit allowed sections below *)
Require Import BuiltIn.
Require BuiltIn.
Require int.Int.
(* Why3 assumption *)
Definition even (n:Z): Prop := exists k:Z, (n = (2%Z * k)%Z).
(* Why3 assumption *)
Definition odd (n:Z): Prop := exists k:Z, (n = ((2%Z * k)%Z + 1%Z)%Z).
Lemma even_is_Zeven :
forall n, even n <-> Zeven n.
Proof.
intros n.
refine (conj _ (Zeven_ex n)).
intros (k,H).
rewrite H.
apply Zeven_2p.
Qed.
Lemma odd_is_Zodd :
forall n, odd n <-> Zodd n.
Proof.
intros n.
refine (conj _ (Zodd_ex n)).
intros (k,H).
rewrite H.
apply Zodd_2p_plus_1.
Qed.
(* Why3 goal *)
Lemma even_or_odd :
forall (n:Z), (even n) \/ (odd n).
Proof.
intros n.
destruct (Zeven_odd_dec n).
left.
now apply <- even_is_Zeven.
right.
now apply <- odd_is_Zodd.
Qed.
(* Why3 goal *)
Lemma even_not_odd :
forall (n:Z), (even n) -> ~ (odd n).
Proof.
intros n H1 H2.
apply (Zeven_not_Zodd n).
now apply -> even_is_Zeven.
now apply -> odd_is_Zodd.
Qed.
(* Why3 goal *)
Lemma odd_not_even :
forall (n:Z), (odd n) -> ~ (even n).
Proof.
intros n H1.
contradict H1.
now apply even_not_odd.
Qed.
(* Why3 goal *)
Lemma even_odd :
forall (n:Z), (even n) -> (odd (n + 1%Z)%Z).
Proof.
intros n H.
apply <- odd_is_Zodd.
apply Zeven_plus_Zodd.
now apply -> even_is_Zeven.
easy.
Qed.
(* Why3 goal *)
Lemma odd_even :
forall (n:Z), (odd n) -> (even (n + 1%Z)%Z).
Proof.
intros n H.
apply <- even_is_Zeven.
apply Zodd_plus_Zodd.
now apply -> odd_is_Zodd.
easy.
Qed.
(* Why3 goal *)
Lemma even_even :
forall (n:Z), (even n) -> (even (n + 2%Z)%Z).
Proof.
intros n H.
apply <- even_is_Zeven.
apply Zeven_plus_Zeven.
now apply -> even_is_Zeven.
easy.
Qed.
(* Why3 goal *)
Lemma odd_odd :
forall (n:Z), (odd n) -> (odd (n + 2%Z)%Z).
Proof.
intros n H.
apply <- odd_is_Zodd.
apply Zodd_plus_Zeven.
now apply -> odd_is_Zodd.
easy.
Qed.
(* Why3 goal *)
Lemma even_2k :
forall (k:Z), (even (2%Z * k)%Z).
Proof.
intros k.
now exists k.
Qed.
(* Why3 goal *)
Lemma odd_2k1 :
forall (k:Z), (odd ((2%Z * k)%Z + 1%Z)%Z).
Proof.
intros k.
now exists k.
Qed.
|
C Expected result: nothing to scalarize
subroutine scalarization(x,y,n)
real x(n,n), y(n,n)
real t(100)
do i=1,n
t(i+1) = x(i,i)
x(i,i) = y(i,i)
y(i,i) = t(i)
enddo
end
program scalarization04
parameter(n=100)
real x(n,n), y(n,n)
read *,x,y
call scalarization(x,y,n)
print *,x,y
end
|
The filter map $\lambda x \mapsto x - d$ applied to the filter at right $a$ is the filter at right $a - d$. |
open import Common.Equality
data Bool : Set where
true false : Bool
data IsTrue : Bool → Set where
itis : IsTrue true
module M (u : Bool) where
foo bar : (p : IsTrue u) → Bool
foo = \ { itis → true }
bar = \ { itis → false }
test : ∀ u → M.foo u ≡ M.bar u
test u = refl
-- Trigger printing of the extended lambdas.
-- Ideally, this would not show the internal names, like in
--
-- .Issue2047a.M..extendedlambda0 u p !=
-- .Issue2047a.M..extendedlambda1 u p of type Bool
-- when checking that the expression refl has type M.foo u ≡ M.bar u
--
-- but rather show
--
-- (\ { itis → true }) .p != (\ { itis → false }) .p
|
import numpy as np
from rtdpy.rtd import RTD, RTDInputError
class Zusatz(RTD):
"""
Create Zusatz Residence Time Distribution (RTD) model. [1]_
Parameter a is chosen such that the integral is 1.
.. math::
E(t) = a t^{-c-1} b^{c+1}
\\text{exp}\\left[\\left(b^c t^{-c} -1\\right)\\frac{-c-1}{c}\\right]
\\\\a = \\frac{1+c}{b\\, \\text{exp}\\left[1+1/c\\right]}
Parameters
----------
b : scalar
b Zusatz parameter. ``b>0``
c : scalar
c Zusatz parameter. ``c>0``
Mean residence time only defined for ``c>1``.
Variance only defined for ``c>2``.
dt : scalar
Time step for RTD. ``dt>0``
time_end : scalar
End time for RTD. ``time_end>0``
References
----------
.. [1] Poulesquen A., et al. (2003) A study of residence time distribution
in co-rotating twin-screw extruders. Part II: Experimental
validation. "Polymer Engineering and Science", 43(12), 1849-1862.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> import rtdpy
>>> for c in [3, 7]:
>>> a = rtdpy.Zusatz(b=25, c=c, dt=.01, time_end=100)
>>> plt.plot(a.time, a.exitage, label=f"c={c}")
>>> plt.xlabel('Time')
>>> plt.ylabel('Exit Age Function')
>>> plt.legend()
>>> plt.show()
"""
def __init__(self, b, c, dt, time_end):
super().__init__(dt, time_end)
if b <= 0:
raise RTDInputError("b less than zero")
self._b = b
if c <= 0:
raise RTDInputError("c less than zero")
self._c = c
self._a = self._calc_a()
self._exitage = self._calc_exitage()
def _calc_exitage(self):
"""equation for exit age function"""
time_safe = np.clip(self.time, np.finfo(float).eps, None)
output = self.a * time_safe**(-self.c-1) \
* self.b**(self.c + 1) \
* np.exp((self.b**self.c * time_safe**(-1 * self.c) - 1)
* (-self.c - 1) / self.c)
return output
def _calc_a(self):
"""Calculate a to normalize rtd"""
return (1 + self.c) / (self.b * np.exp(1 + 1 / self.c))
@property
def a(self):
"""a parameter that normalizes RTD to 1"""
return self._a
@property
def b(self):
"""b parameter"""
return self._b
@property
def c(self):
"""c parameter"""
return self._c
def __repr__(self):
"""Returns representation of object"""
return ("Zusatz(b={}, c={}, dt={}, time_end={})".format(
self.b, self.c, self.dt, self.time_end))
|
{-# OPTIONS --allow-unsolved-metas #-}
module fin where
open import Data.Fin hiding (_<_ ; _≤_ )
open import Data.Fin.Properties hiding ( <-trans )
open import Data.Nat
open import logic
open import nat
open import Relation.Binary.PropositionalEquality
-- toℕ<n
fin<n : {n : ℕ} {f : Fin n} → toℕ f < n
fin<n {_} {zero} = s≤s z≤n
fin<n {suc n} {suc f} = s≤s (fin<n {n} {f})
-- toℕ≤n
fin≤n : {n : ℕ} (f : Fin (suc n)) → toℕ f ≤ n
fin≤n {_} zero = z≤n
fin≤n {suc n} (suc f) = s≤s (fin≤n {n} f)
pred<n : {n : ℕ} {f : Fin (suc n)} → n > 0 → Data.Nat.pred (toℕ f) < n
pred<n {suc n} {zero} (s≤s z≤n) = s≤s z≤n
pred<n {suc n} {suc f} (s≤s z≤n) = fin<n
fin<asa : {n : ℕ} → toℕ (fromℕ< {n} a<sa) ≡ n
fin<asa = toℕ-fromℕ< nat.a<sa
-- fromℕ<-toℕ
toℕ→from : {n : ℕ} {x : Fin (suc n)} → toℕ x ≡ n → fromℕ n ≡ x
toℕ→from {0} {zero} refl = refl
toℕ→from {suc n} {suc x} eq = cong (λ k → suc k ) ( toℕ→from {n} {x} (cong (λ k → Data.Nat.pred k ) eq ))
0≤fmax : {n : ℕ } → (# 0) Data.Fin.≤ fromℕ< {n} a<sa
0≤fmax = subst (λ k → 0 ≤ k ) (sym (toℕ-fromℕ< a<sa)) z≤n
0<fmax : {n : ℕ } → (# 0) Data.Fin.< fromℕ< {suc n} a<sa
0<fmax = subst (λ k → 0 < k ) (sym (toℕ-fromℕ< a<sa)) (s≤s z≤n)
-- toℕ-injective
i=j : {n : ℕ} (i j : Fin n) → toℕ i ≡ toℕ j → i ≡ j
i=j {suc n} zero zero refl = refl
i=j {suc n} (suc i) (suc j) eq = cong ( λ k → suc k ) ( i=j i j (cong ( λ k → Data.Nat.pred k ) eq) )
-- raise 1
fin+1 : { n : ℕ } → Fin n → Fin (suc n)
fin+1 zero = zero
fin+1 (suc x) = suc (fin+1 x)
open import Data.Nat.Properties as NatP hiding ( _≟_ )
fin+1≤ : { i n : ℕ } → (a : i < n) → fin+1 (fromℕ< a) ≡ fromℕ< (<-trans a a<sa)
fin+1≤ {0} {suc i} (s≤s z≤n) = refl
fin+1≤ {suc n} {suc (suc i)} (s≤s (s≤s a)) = cong (λ k → suc k ) ( fin+1≤ {n} {suc i} (s≤s a) )
fin+1-toℕ : { n : ℕ } → { x : Fin n} → toℕ (fin+1 x) ≡ toℕ x
fin+1-toℕ {suc n} {zero} = refl
fin+1-toℕ {suc n} {suc x} = cong (λ k → suc k ) (fin+1-toℕ {n} {x})
open import Relation.Nullary
open import Data.Empty
fin-1 : { n : ℕ } → (x : Fin (suc n)) → ¬ (x ≡ zero ) → Fin n
fin-1 zero ne = ⊥-elim (ne refl )
fin-1 {n} (suc x) ne = x
fin-1-sx : { n : ℕ } → (x : Fin n) → fin-1 (suc x) (λ ()) ≡ x
fin-1-sx zero = refl
fin-1-sx (suc x) = refl
fin-1-xs : { n : ℕ } → (x : Fin (suc n)) → (ne : ¬ (x ≡ zero )) → suc (fin-1 x ne ) ≡ x
fin-1-xs zero ne = ⊥-elim ( ne refl )
fin-1-xs (suc x) ne = refl
-- suc-injective
-- suc-eq : {n : ℕ } {x y : Fin n} → Fin.suc x ≡ Fin.suc y → x ≡ y
-- suc-eq {n} {x} {y} eq = subst₂ (λ j k → j ≡ k ) {!!} {!!} (cong (λ k → Data.Fin.pred k ) eq )
-- this is refl
lemma3 : {a b : ℕ } → (lt : a < b ) → fromℕ< (s≤s lt) ≡ suc (fromℕ< lt)
lemma3 (s≤s lt) = refl
-- fromℕ<-toℕ
lemma12 : {n m : ℕ } → (n<m : n < m ) → (f : Fin m ) → toℕ f ≡ n → f ≡ fromℕ< n<m
lemma12 {zero} {suc m} (s≤s z≤n) zero refl = refl
lemma12 {suc n} {suc m} (s≤s n<m) (suc f) refl = cong suc ( lemma12 {n} {m} n<m f refl )
open import Relation.Binary.HeterogeneousEquality as HE using (_≅_ )
open import Data.Fin.Properties
-- <-irrelevant
<-nat=irr : {i j n : ℕ } → ( i ≡ j ) → {i<n : i < n } → {j<n : j < n } → i<n ≅ j<n
<-nat=irr {zero} {zero} {suc n} refl {s≤s z≤n} {s≤s z≤n} = HE.refl
<-nat=irr {suc i} {suc i} {suc n} refl {s≤s i<n} {s≤s j<n} = HE.cong (λ k → s≤s k ) ( <-nat=irr {i} {i} {n} refl )
lemma8 : {i j n : ℕ } → ( i ≡ j ) → {i<n : i < n } → {j<n : j < n } → i<n ≅ j<n
lemma8 {zero} {zero} {suc n} refl {s≤s z≤n} {s≤s z≤n} = HE.refl
lemma8 {suc i} {suc i} {suc n} refl {s≤s i<n} {s≤s j<n} = HE.cong (λ k → s≤s k ) ( lemma8 {i} {i} {n} refl )
-- fromℕ<-irrelevant
lemma10 : {n i j : ℕ } → ( i ≡ j ) → {i<n : i < n } → {j<n : j < n } → fromℕ< i<n ≡ fromℕ< j<n
lemma10 {n} refl = HE.≅-to-≡ (HE.cong (λ k → fromℕ< k ) (lemma8 refl ))
lemma31 : {a b c : ℕ } → { a<b : a < b } { b<c : b < c } { a<c : a < c } → NatP.<-trans a<b b<c ≡ a<c
lemma31 {a} {b} {c} {a<b} {b<c} {a<c} = HE.≅-to-≡ (lemma8 refl)
-- toℕ-fromℕ<
lemma11 : {n m : ℕ } {x : Fin n } → (n<m : n < m ) → toℕ (fromℕ< (NatP.<-trans (toℕ<n x) n<m)) ≡ toℕ x
lemma11 {n} {m} {x} n<m = begin
toℕ (fromℕ< (NatP.<-trans (toℕ<n x) n<m))
≡⟨ toℕ-fromℕ< _ ⟩
toℕ x
∎ where
open ≡-Reasoning
|
State Before: α : Type u_1
l✝ : List α
inst✝ : DecidableEq α
a b : α
l : List α
⊢ count a (b :: l) = count a l + if a = b then 1 else 0 State After: no goals Tactic: conv =>
simp [count, countp_cons]
lhs
simp only [eq_comm] |
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tensorio import compare_tensor
import numpy as np
from akg.utils import kernel_exec as utils
from akg.ops.nn import avgpool
from akg.utils.dsl_create import cal_pad_shapes_by_strategy
from gen_random import random_gaussian
def benchmark(input, kernel, stride, pad):
sh, sw = stride
N, C1, H, W, C0 = input.shape
KH, KW = kernel
[ph_h, ph_t, pw_h, pw_t], [out_size_h, out_size_w] = cal_pad_shapes_by_strategy(input.shape, kernel, stride, pad)
out_shape = (N, C1, out_size_h, out_size_w, C0)
out = np.zeros(out_shape)
inputpad = np.zeros((N, C1, H + ph_h + ph_t, W + pw_h + pw_t, C0))
inputpad[:, :, ph_h:ph_h + H, pw_h:pw_h + W, :] = input
for i in range(out_size_h):
for j in range(out_size_w):
out[:, :, i, j, :] = np.mean(inputpad[:, :, i * sh:i * sh + KH, j * sw:j * sw + KW, :], axis=(2, 3))
return out
def avgpool_run(shape, kernel, stride, strategy, dtype, attrs):
if 'tuning' in attrs.keys():
t = attrs.get("tuning", False)
kernel_name = attrs.get("kernel_name", False)
mod = utils.op_build_test(avgpool.avgpool, [shape], [dtype], op_attrs=[kernel, stride, strategy],
kernel_name=kernel_name, attrs=attrs, tuning=t)
if t:
expect, input, output = gen_data(dtype, kernel, shape, strategy, stride)
return mod, expect, (input, output)
else:
return mod
else:
mod = utils.op_build_test(avgpool.avgpool, [shape], [dtype], op_attrs=[kernel, stride, strategy],
kernel_name='avgpool', attrs=attrs)
expect, input, output = gen_data(dtype, kernel, shape, strategy, stride)
output = utils.mod_launch(mod, [input, output], expect=expect)
return input, output, expect, compare_tensor(output, expect, rtol=5e-03, atol=5e-03, equal_nan=True)
def gen_data(dtype, kernel, shape, strategy, stride):
support_list = {"float16": np.float16, "float32": np.float32}
input = random_gaussian(shape, miu=1, sigma=0.1).astype(support_list[dtype])
expect = benchmark(input, kernel, stride, strategy)
out_shape = expect.shape
output = np.full(out_shape, 0, dtype)
return expect, input, output
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.