text
stringlengths 0
3.34M
|
---|
#pragma once
#include <type_traits>
#include <cuda/runtime_api.hpp>
#include <gsl-lite/gsl-lite.hpp>
namespace thrustshift {
namespace kernel {
template <typename SrcT, typename MapT, typename DstT>
__global__ void scatter(gsl_lite::span<const SrcT> src,
gsl_lite::span<const MapT> map,
gsl_lite::span<DstT> dst) {
const auto gtid = threadIdx.x + blockIdx.x * blockDim.x;
if (gtid < src.size()) {
dst[map[gtid]] = src[gtid];
}
}
} // namespace kernel
namespace async {
template <class SrcRange, class MapRange, class DstRange>
void scatter(cuda::stream_t& stream,
SrcRange&& src,
MapRange&& map,
DstRange&& dst) {
gsl_Expects(src.size() == dst.size());
gsl_Expects(src.size() == map.size());
gsl_Expects(src.data() != dst.data());
if (src.empty()) {
return;
}
using src_value_type =
typename std::remove_reference<SrcRange>::type::value_type;
using map_index_type =
typename std::remove_reference<MapRange>::type::value_type;
using dst_value_type =
typename std::remove_reference<DstRange>::type::value_type;
constexpr cuda::grid::block_dimension_t block_dim = 128;
const cuda::grid::dimension_t grid_dim =
(src.size() + block_dim - 1) / block_dim;
auto c = cuda::make_launch_config(grid_dim, block_dim);
auto k = kernel::scatter<src_value_type, map_index_type, dst_value_type>;
cuda::enqueue_launch(k, stream, c, src, map, dst);
}
} // namespace async
} // namespace thrustshift
|
Formal statement is: corollary\<^marker>\<open>tag unimportant\<close> open_mapping_thm3: assumes holf: "f holomorphic_on S" and "open S" and injf: "inj_on f S" shows "open (f ` S)" Informal statement is: If $f$ is a holomorphic function on an open set $S$ and $f$ is injective on $S$, then $f(S)$ is open. |
Formal statement is: lemma convex_epigraph: "convex (epigraph S f) \<longleftrightarrow> convex_on S f \<and> convex S" Informal statement is: The epigraph of a convex function is convex if and only if the function is convex and the domain is convex. |
# ------------------------------------------------------------
# Scaling functions
@doc """
HaarScaling( xi[, J, k] ) -> Float
The Haar scaling function evaluated in `xi` at level `J` and translation `k`.
By default, `J=0` and `k=0`.
"""->
function HaarScaling(xi::Real)
zero(xi) <= xi < one(xi) ? 1.0 : 0.0
end
@doc """
DaubScaling(p, R) -> x, y
A Daubechies `p` scaling function evaluated in the dyadic rationals at resolution `R`.
"""->
function DaubScaling(p::Integer, R::Integer, symmlet::Bool=true)
IF = ifilter(p, symmlet)
supp = support(IF)
x = dyadic_rationals(supp, R)
phi = DaubScaling( IF, R )
return x, phi
end
@doc """
dyadic_dil_matrix(C::Vector) -> Matrix
The "dyadic dilation matrix" `D` of the filter `C`:
`D[i,j] = C[2i-j]`.
"""->
function dyadic_dil_matrix(C::Vector{Float64})
NC = length(C)
sz = NC - 2
dydil_mat = zeros(Float64, sz, sz)
for nj in 1:sz, ni in 1:sz
Cidx = 2*ni - nj + 1
if 1 <= Cidx <= NC
dydil_mat[ni, nj] = sqrt2*C[Cidx]
end
end
return dydil_mat
end
#=
DaubScaling(IF::InteriorFilter) -> Vector
Compute function values of the scaling function defined by the filter `IF` at the integers in the support.
=#
function DaubScaling(IF::InteriorFilter)
if van_moment(IF) == 1
return [1.0; 0.0]
end
C = coef(IF)
L = dyadic_dil_matrix( C )
# Eigenvector of eigenvalue 1:
E = zeros(Float64, length(C))
E[2:end-1] = eigval1(L)
# Normalize scaling function in L2
scale!(E, 1/sum(E))
return E
end
@doc """
DaubScaling(C::InteriorFilter, R::Int) -> Vector
Compute function values of the scaling function defined by the filter
`C` at the dyadic rationals of resolution `R` in the support.
"""->
function DaubScaling(IF::InteriorFilter, R::Integer)
supp = support(IF)
# There are 2^R points on each unit + endpoint
Nx = length(supp)*2^R + 1
phi = zeros(Float64, Nx)
# Base level
cur_idx = dyadic_rationals(supp, R, 0)
phi[cur_idx] = DaubScaling(IF)
# Recursion: Fill remaining levels
coeff = coef(IF)
Lsupp = left(supp)
Rsupp = right(supp)
for L in 1:R
# Indices of x values on scale L
cur_idx = dyadic_rationals(supp, R, L)
for xindex in cur_idx
twox = 2*index2x( xindex, supp, R )
for k in Lsupp:Rsupp
if isinside(twox-k, supp)
twox_index = x2index( twox-k, supp, R )
phi[xindex] += sqrt2 * coeff[k-Lsupp+1] * phi[twox_index]
end
end
end
end
return phi
end
|
function [h, compUp] = lfmComputeH4(gamma1_p, gamma1_m, sigma2, t1, preFactor, preExp,...
mode, term )
% LFMCOMPUTEH4 Helper function for computing part of the LFM kernel.
% FORMAT
% DESC computes a portion of the LFM kernel.
% ARG gamma1 : Gamma value for first system.
% ARG gamma2 : Gamma value for second system.
% ARG sigma2 : length scale of latent process.
% ARG t1 : first time input (number of time points x 1).
% ARG t2 : second time input (number of time points x 1).
% ARG mode: indicates in which way the vectors t1 and t2 must be transposed
% RETURN h : result of this subcomponent of the kernel for the given values.
%
% COPYRIGHT : David Luengo, 2007
%
% COPYRIGHT : Mauricio Alvarez, 2008
%
% MODIFICATIONS : Neil D. Lawrence, 2007
%
%
%
% SEEALSO : lfmKernParamInit, lfmXlfmKernCompute
% KERN
% Evaluation of h
if nargin<8
term =[];
end
if ~mode
if ~term
if nargout>1
compUp = lfmComputeUpsilonVector(gamma1_p,sigma2, t1);
h = compUp*( preExp/preFactor(1) - conj(preExp)/preFactor(2)).';
else
h = lfmComputeUpsilonVector(gamma1_p,sigma2, t1)*( preExp/preFactor(1) - conj(preExp)/preFactor(2)).';
end
else
if nargout>1
compUp = lfmComputeUpsilonVector(gamma1_p,sigma2, t1);
h = compUp*(preExp/preFactor(1)).' - conj(compUp)*(preExp/preFactor(2)).';
else
upsilon = lfmComputeUpsilonVector(gamma1_p,sigma2, t1);
h = upsilon*(preExp/preFactor(1)).' - conj(upsilon)*(preExp/preFactor(2)).';
end
end
else
if nargout > 1
compUp{1} = lfmComputeUpsilonVector(gamma1_p,sigma2, t1);
compUp{2} = lfmComputeUpsilonVector(gamma1_m,sigma2, t1);
h = compUp{1}*( preExp(:,1)/preFactor(1) - preExp(:,2)/preFactor(2)).' ...
+ compUp{2}*( preExp(:,2)/preFactor(3) - preExp(:,1)/preFactor(4)).';
else
h = lfmComputeUpsilonVector(gamma1_p,sigma2, t1)*( preExp(:,1)/preFactor(1) - preExp(:,2)/preFactor(2)).' ...
+ lfmComputeUpsilonVector(gamma1_m,sigma2, t1)*( preExp(:,2)/preFactor(3) - preExp(:,1)/preFactor(4)).';
end
end
|
-- --------------------------------------------------------------- [ Day06.idr ]
-- Module : Data.Advent.Day06
-- Description : My solution to the Day 6 puzzle of the 2016 Advent of Code.
-- Copyright : Copyright (c) 2016, Eric Bailey
-- License : MIT
-- Link : http://adventofcode.com/2016/day/6
-- --------------------------------------------------------------------- [ EOH ]
||| Day 6: Signals and Noise
module Data.Advent.Day06
import public Data.SortedMap
%access export
-- ----------------------------------------------------------- [ Generic Logic ]
main' : (String -> String) -> IO ()
main' f = either printLn (putStrLn . f) !(readFile "input/day06.txt")
decode' : (f : Nat -> Nat -> Bool) -> String -> String
decode' f = pack . map (fst . foldr1 go . frequencies') . transpose .
map unpack . lines
where
go : (Char, Nat) -> (Char, Nat) -> (Char, Nat)
go elem@(_,m) acc@(_,n) = if m `f` n then elem else acc
frequencies' : Ord a => List a -> List (a, Nat)
frequencies' = toList . foldr inc empty
where
inc : k -> SortedMap k Nat -> SortedMap k Nat
inc k m = case lookup k m of
Nothing => insert k 1 m
Just v => insert k (S v) $ delete k m
-- ---------------------------------------------------------------- [ Part One ]
namespace PartOne
decode : String -> String
decode = decode' (>)
main : IO ()
main = main' decode
-- ---------------------------------------------------------------- [ Part Two ]
namespace PartTwo
decode : String -> String
decode = decode' (<)
main : IO ()
main = main' PartTwo.decode
-- -------------------------------------------------------------------- [ Main ]
namespace Main
main : IO ()
main = putStr "Part One: " *> PartOne.main *>
putStr "Part Two: " *> PartTwo.main
-- --------------------------------------------------------------------- [ EOF ]
|
module Builtin.Coinduction where
open import Agda.Builtin.Coinduction public
|
function ar=v_lpcls2ar(ls)
%V_LPCLS2AR convert line spectrum pair frequencies to ar polynomial AR=(LS)
% input vector elements should be in the range 0 to 0.5
% Copyright (C) Mike Brookes 1997
% Version: $Id: v_lpcls2ar.m 10865 2018-09-21 17:22:45Z dmb $
%
% VOICEBOX is a MATLAB toolbox for speech processing.
% Home page: http://www.ee.ic.ac.uk/hp/staff/dmb/voicebox/voicebox.html
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% This program is free software; you can redistribute it and/or modify
% it under the terms of the GNU General Public License as published by
% the Free Software Foundation; either version 2 of the License, or
% (at your option) any later version.
%
% This program is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
% GNU General Public License for more details.
%
% You can obtain a copy of the GNU General Public License from
% http://www.gnu.org/copyleft/gpl.html or by writing to
% Free Software Foundation, Inc.,675 Mass Ave, Cambridge, MA 02139, USA.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
[nf,p]=size(ls);
p1=p+1;
p2 = p1*2;
ar=zeros(nf,p1);
for k=1:nf
le=exp(ls(k,:)*pi*2i);
lf=[1 le -1 conj(fliplr(le))];
y=real(poly(lf(1:2:p2)));
x=real(poly(lf(2:2:p2)));
ar(k,:)=(x(1:p1)+y(1:p1))/2;
end
|
lemma bilinear_bounded: fixes h :: "'m::euclidean_space \<Rightarrow> 'n::euclidean_space \<Rightarrow> 'k::real_normed_vector" assumes bh: "bilinear h" shows "\<exists>B. \<forall>x y. norm (h x y) \<le> B * norm x * norm y" |
#######################################################################
# Copyright (C) 2017 Shangtong Zhang([email protected]) #
# Permission given to modify the code as long as you keep this #
# declaration at the top #
#######################################################################
import torch
import numpy as np
import torch.multiprocessing as mp
from collections import deque
from skimage.io import imsave
# from ..utils import *
from ..utils.logger import Logger, GlobalLogger
from ..utils.normalizer import build_normalizer
from ..utils.misc import mkdir, close_obj, has_flag
from ..utils.torch_utils import random_seed
import logging
from ..component.envs import Task
from common.utils.checkpoint import CheckpointerV2_RL
from ..network.network_builder import build_network
import pickle
def update_dict_with_key_map(d1, d2, key_map):
for k1, k2 in key_map.items():
if k1 not in d1:
raise Exception('k1 not in d1')
if k2 not in d2:
raise Exception('k2 not in d2')
d1[k1] = d2[k2]
class BaseAgent:
def __init__(self, config):
self.config = config
self.logger = GlobalLogger(logging.getLogger('RL'), config.final_output_dir, 0)
self.task_ind = 0
self.state_normalizer = build_normalizer(config.RL.state_normalizer)
self.reward_normalizer = build_normalizer(config.RL.reward_normalizer)
self.checkpointer = None
self.first_eval = True
def close(self):
close_obj(self.task)
close_obj(self.evaluator)
def lazy_init_checkpointer(self):
if self.checkpointer is None:
self.checkpointer = CheckpointerV2_RL(self.network,
state_normalizer=self.state_normalizer,
optimizer=self.optimizer,
save_dir=self.config.final_output_dir,
logger=self.logger,
max_to_keep=self.config.train.n_checkpoints_to_keep)
def save(self, tag=None):
self.lazy_init_checkpointer()
filename = '{:d}'.format(self.total_steps)
if tag: filename += ('_' + tag)
self.checkpointer.save(filename)
def try_to_load_network(self):
config = self.config
if config.load_ckpt:
self.load(config.load_ckpt)
def load(self, ckpt_path):
self.lazy_init_checkpointer()
self.checkpointer.load(ckpt_path, resume=False, resume_states=False)
def eval_episodes(self):
if self.config.eval.is_async and not self.first_eval:
self.evaluator.query_eval_done() # let the training wait for evaluation
self.first_eval = False
self.evaluator.eval_episodes(self.total_steps)
def record_online_return(self, info, offset=0):
# pass
if isinstance(info, dict):
if 'episodic_return' in info: # wrapped by OriginalReturnWrapper
ret = info['episodic_return']
elif 'episode' in info: # procgen env
ret = info['episode']['r']
else:
return
if ret is not None:
self.logger.add_scalar('episodic_return_train', ret, self.total_steps + offset)
if not has_flag(self.config.train, 'hide_episodic_return'):
self.logger.info('steps %d, episodic_return_train %s' % (self.total_steps + offset, ret))
elif isinstance(info, tuple) or isinstance(info, list):
for i, info_ in enumerate(info):
self.record_online_return(info_, i)
else:
raise NotImplementedError
def switch_task(self):
config = self.config
# if not config.tasks:
if not hasattr(config, 'tasks'):
return
segs = np.linspace(0, config.max_steps, len(config.tasks) + 1)
if self.total_steps > segs[self.task_ind + 1]:
self.task_ind += 1
self.task = config.tasks[self.task_ind]
self.states = self.task.reset()
self.states = config.state_normalizer(self.states)
def record_episode(self, dir, env):
mkdir(dir)
steps = 0
state = env.reset()
while True:
self.record_obs(env, dir, steps)
action = self.record_step(state)
state, reward, done, info = env.step(action)
ret = info[0]['episodic_return']
steps += 1
if ret is not None:
break
def record_step(self, state):
raise NotImplementedError
# For DMControl
def record_obs(self, env, dir, steps):
env = env.env.envs[0]
obs = env.render(mode='rgb_array')
imsave('%s/%04d.png' % (dir, steps), obs)
# from ..component.envs import LazyFrames
class BaseActor(mp.Process):
STEP = 0
RESET = 1
EXIT = 2
SPECS = 3
NETWORK = 4
CACHE = 5
def __init__(self, config, lock):
mp.Process.__init__(self)
self.config = config
self.lock = lock
self.state_normalizer = build_normalizer(config.RL.state_normalizer)
self.__pipe, self.__worker_pipe = mp.Pipe()
self._state = None
self._task = None
self._network = None
self._total_steps = 0
self.__cache_len = 2
if not config.DQN.async_actor:
self.start = lambda: None
self.step = self._sample
self._set_up()
# self._task = self.task_fn()
self._task = self.build_task()
def build_task(self):
config = self.config
return Task(config.task.full_name, **dict(config.other))
def _sample(self):
transitions = []
for _ in range(self.config.DQN.sgd_update_frequency):
transitions.append(self._transition())
return transitions
def run(self):
self._set_up()
config = self.config
# self._task = self.task_fn()
self._task = self.build_task()
if hasattr(self.config.other, 'save_all_experience'):
import h5py
self.h5_data = h5py.File(self.config.other.save_all_experience, mode='w')
cache = deque([], maxlen=2)
while True:
op, data = self.__worker_pipe.recv()
if op == self.STEP:
if not len(cache):
cache.append(self._sample())
cache.append(self._sample())
self.__worker_pipe.send(cache.popleft())
cache.append(self._sample())
elif op == self.EXIT:
self.__worker_pipe.close()
if hasattr(self.config.other, 'save_all_experience'):
self.h5_data.close()
print('@@@@@@@@@@@@@@@@ close h5')
return
elif op == self.NETWORK:
self._network = data
else:
raise NotImplementedError
def _transition(self):
raise NotImplementedError
def _set_up(self):
pass
def step(self):
self.__pipe.send([self.STEP, None])
return self.__pipe.recv()
def close(self):
if self.config.DQN.async_actor:
self.__pipe.send([self.EXIT, None])
self.__pipe.close()
def set_network(self, net):
if not self.config.DQN.async_actor:
self._network = net
else:
self.__pipe.send([self.NETWORK, net])
from ..utils.logger import EvalResultsWriter
class BaseEvaluator(mp.Process):
EVAL = 0
EXIT = 1
NETWORK = 2
# LOG = 3
def __init__(self, config, lock, logger):
mp.Process.__init__(self)
self.config = config
self.lock = lock
self.logger = logger
self.state_normalizer = build_normalizer(config.RL.state_normalizer)
if config.eval.is_async:
self.__pipe, self.__worker_pipe = mp.Pipe()
self.task = None
self.network_outside = None # this is just a handle
self.network = None
else:
self.start = lambda: None
self.close = lambda: None
self.eval_episodes = self._eval_episodes
self._set_up()
# self.task = self.task_fn()
self.task = self.build_task()
self.network = build_network(config)
# self.results_writer = self.results_writer_fn()
self.results_writer = self.build_writer()
def build_task(self):
config = self.config
return Task(config.task.full_name,
num_envs=config.eval.n_episodes if config.eval.parallel else 1,
single_process=not config.eval.env_subprocess,
**dict(config.other))
def build_writer(self):
config = self.config
return EvalResultsWriter('{:s}/eval'.format(config.final_output_dir) if config.final_output_dir else None,
header={'env_id' : config.task.full_name})
def run(self):
self._set_up()
random_seed()
config = self.config
# self.task = self.task_fn()
self.task = self.build_task()
self.network = build_network(config)
# self.results_writer = self.results_writer_fn()
self.results_writer = self.build_writer()
while True:
op, data = self.__worker_pipe.recv()
if op == self.EVAL:
eval_done = self._eval_episodes(data)
self.__worker_pipe.send(eval_done)
# steps, mean, std = self._eval_episodes(data)
# self.__worker_pipe.send((steps, mean, std))
elif op == self.EXIT:
self.__worker_pipe.close()
return
elif op == self.NETWORK:
self.network_outside = data
else:
raise NotImplementedError
def _set_up(self):
pass
def close(self):
self.__pipe.send([self.EXIT, None])
self.__pipe.close()
def set_network(self, net):
if not self.config.eval.is_async:
self.network_outside = net
else:
self.__pipe.send([self.NETWORK, net])
def query_eval_done(self):
eval_done = self.__pipe.recv()
return eval_done
def eval_episodes(self, current_steps):
self.__pipe.send([self.EVAL, current_steps])
def eval_single_episode(self):
env = self.task
state = env.reset()
while True:
action = self.eval_step(state)
state, reward, done, info = env.step(action)
ret = info[0]['episodic_return']
if ret is not None:
break
return ret
def _eval_episodes(self, steps):
with self.lock: # copy the network weight
self.network.load_state_dict(self.network_outside.state_dict())
self.network.eval()
if self.config.eval.parallel:
episodic_returns = self.eval_episode_parallel()
else:
episodic_returns = self.eval_episode_sequential()
# print('@@@@@@@@@@@@@@@@@@@@ eval done')
self.logger.info('steps %d, *** episodic_return_test %.3f (std = %.2f)' % (
steps, np.mean(episodic_returns), np.std(episodic_returns)
))
self.logger.add_scalar('episodic_return_test', np.mean(episodic_returns), steps)
self.results_writer.write_row(steps, episodic_returns)
return True
# return steps, np.mean(episodic_returns), np.std(episodic_returns)
def eval_episode_parallel(self):
episodic_returns = [ None for _ in range(self.config.eval.n_episodes) ]
done_cnt = 0
env = self.task
state = env.reset()
step_cnt = 0
while True:
step_cnt += 1
action = self.eval_step(state)
state, reward, done, info = env.step(action)
for i_env, _info in enumerate(info):
ret = _info['episodic_return']
if episodic_returns[i_env] is None and ret is not None:
episodic_returns[i_env] = ret
done_cnt += 1
if done_cnt >= self.config.eval.n_episodes:
# print('@@@@@@@@ eval step cnt:', step_cnt)
return episodic_returns
def eval_episode_sequential(self):
episodic_returns = []
for ep in range(self.config.eval.n_episodes):
total_rewards = self.eval_single_episode()
episodic_returns.append(np.sum(total_rewards))
return episodic_returns
def eval_step(self, state):
raise NotImplementedError
|
[STATEMENT]
lemma pgwt_public: "\<lbrakk>public_ground_wf_term t; Fun f T \<sqsubseteq> t\<rbrakk> \<Longrightarrow> public f"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>public_ground_wf_term t; Fun f T \<sqsubseteq> t\<rbrakk> \<Longrightarrow> public f
[PROOF STEP]
by (induct t rule: public_ground_wf_term.induct) auto |
import group_theory.group_action.option
import mathlib.group_action
import mathlib.pointwise
import phase1.allowable
noncomputable theory
open function set with_bot
open_locale cardinal pointwise
universe u
namespace con_nf
variables [params.{u}] [position_data.{}]
open code Iio_index
variables (α : Λ) [core_tangle_cumul α] {β : Iio_index α} {γ : Iio α}
abbreviation extensions := Π β : Iio α, set (tangle β)
namespace semitangle
variables [positioned_tangle_cumul α] [almost_tangle_cumul α]
/-- Keeps track of the preferred extension of a semitangle, along with coherence conditions
relating each extension of the semitangle. -/
@[nolint has_nonempty_instance] inductive preference (members : extensions α)
| base (atoms : set (tangle (⊥ : Iio_index α))) :
(∀ γ, A_map bot_ne_coe atoms = members γ) →
preference
| proper (β : Iio α) :
(mk β (members β) : code α).is_even →
(∀ (γ : Iio α) (hβγ : Iio_coe β ≠ γ), A_map hβγ (members β) = members γ) →
preference
variables {α} {members : extensions α}
/-- The `-1`-extension associated with a given semitangle extension. -/
def preference.atoms : preference α members → set atom
| (preference.base atoms _) := (atoms : set (tangle ⊥))
| (preference.proper _ _ _) := ∅
lemma preference.base_heq_base {m₁ m₂ : extensions α} {s₁ s₂ h₁ h₂}
(hm : m₁ = m₂) (hs : s₁ = s₂) :
(preference.base s₁ h₁ : preference α m₁) == (preference.base s₂ h₂ : preference α m₂) :=
by cases hm; cases hs; refl
lemma preference.proper_heq_proper {m₁ m₂ : extensions α} {β₁ β₂ h₁ h₂ h₃ h₄}
(hm : m₁ = m₂) (hs : β₁ = β₂) :
(preference.proper β₁ h₁ h₂ : preference α m₁) ==
(preference.proper β₂ h₃ h₄ : preference α m₂) :=
by cases hm; cases hs; refl
end semitangle
open semitangle
variables [positioned_tangle_cumul α] [almost_tangle_cumul α]
/-- A *semitangle* may become an element of our model of tangled type theory.
We keep track of its members, written as tangles of all lower levels `β < α`. -/
@[nolint has_nonempty_instance]
structure semitangle :=
(members : extensions α)
(pref : preference α members)
variables {α}
namespace semitangle
/-- The membership relation for nonempty semitangles. -/
def mem (t : tangle γ) (s : semitangle α) : Prop := t ∈ s.members γ
notation t ` ∈ₛₜ `:50 s:50 := mem t s
/-- The even code associated to a nonempty semitangle. -/
def repr_code : semitangle α → code α
| ⟨exts, preference.base atoms hA⟩ := ⟨⊥, atoms⟩
| ⟨exts, preference.proper β rep hA⟩ := ⟨β, exts β⟩
@[simp] lemma repr_code_base (exts : extensions α) (atoms hA) :
repr_code ⟨exts, preference.base atoms hA⟩ = ⟨⊥, atoms⟩ := rfl
@[simp] lemma repr_code_proper (exts : extensions α) (β rep hA) :
repr_code ⟨exts, preference.proper β rep hA⟩ = ⟨β, exts β⟩ := rfl
lemma repr_code_spec : Π (s : semitangle α), (repr_code s : code α).is_even
| ⟨exts, preference.proper β rep hA⟩ := rep
| ⟨exts, preference.base atoms hA⟩ := is_even_bot _
lemma repr_code_members_ne :
Π (s : semitangle α) (γ : Iio α) (hcγ : (repr_code s : code α).1 ≠ γ),
(A_map_code γ (repr_code s)).2 = s.members γ
| ⟨exts, preference.proper β rep hA⟩ γ hcγ := by rw snd_A_map_code; exact hA _ hcγ
| ⟨exts, preference.base atoms hA⟩ γ hcγ := hA _
-- Remark: This formulation of extensionality holds only for types larger than type zero, since
-- it doesn't take into account any `-1`-extension.
lemma ext_core (x y : semitangle α) : (∃ γ, γ < α) → x.members = y.members → x = y :=
begin
obtain ⟨xs, hxs⟩ := x,
obtain ⟨ys, hys⟩ := y,
dsimp,
rintro ⟨γ, hγ⟩ rfl,
have γ : Iio α := ⟨γ, hγ⟩,
refine congr_arg (λ h, ⟨xs, h⟩) _,
obtain ⟨atoms₁, hA₁⟩ | ⟨β, even₁, hA₁⟩ := hxs;
obtain ⟨atoms₂, hA₂⟩ | ⟨γ, even₂, hA₂⟩ := hys,
{ simp_rw A_map_injective ((hA₁ γ).trans (hA₂ _).symm) },
{ cases (is_even_bot _).A_map_code_ne even₂ bot_ne_mk_coe (sigma.ext_iff.2 ⟨rfl, (hA₁ γ).heq⟩) },
{ cases (is_even_bot _).A_map_code_ne even₁ bot_ne_mk_coe (sigma.ext_iff.2 ⟨rfl, (hA₂ β).heq⟩) },
{ simp only,
refine not_ne_iff.1 (λ hβγ, even₂.A_map_code_ne even₁ (Iio.coe_injective.ne hβγ.symm) $
sigma.ext_iff.2 ⟨rfl, heq_of_eq _⟩),
rw snd_A_map_code,
exact hA₂ β (λ h, hβγ.symm (Iio.coe_injective h)) }
end
/-- One useful form of extensionality in tangled type theory. Two nonempty semitangles are equal if
their even codes are equivalent (and hence equal, by uniqueness). -/
lemma ext_code : ∀ {x y : semitangle α}, (repr_code x : code α) ≡ repr_code y → x = y
| ⟨x, preference.base atoms₁ hA₁⟩ ⟨y, preference.base atoms₂ hA₂⟩ h := begin
obtain rfl := code.equiv.bot_bot_iff.1 h,
obtain rfl : x = y := funext (λ γ, (hA₁ _).symm.trans $ hA₂ _),
refl,
end
| ⟨x, preference.base s hA₁⟩ ⟨y, preference.proper γ even₂ hA₂⟩ h := begin
change code.mk _ _ ≡ code.mk _ _ at h,
obtain ⟨δ, hδ⟩ := (code.equiv.bot_left_iff.1 h).resolve_left
(ne_of_apply_ne sigma.fst bot_ne_mk_coe),
rw hδ at even₂,
cases even₂.not_is_odd ((is_even_bot _).A_map_code bot_ne_mk_coe),
end
| ⟨x, preference.proper γ even₁ hA₁⟩ ⟨y, preference.base s hA₂⟩ h := begin
change code.mk _ _ ≡ code.mk _ _ at h,
obtain ⟨δ, hδ⟩ := (code.equiv.bot_right_iff.1 h).resolve_left
(ne_of_apply_ne sigma.fst mk_coe_ne_bot),
rw hδ at even₁,
cases even₁.not_is_odd ((is_even_bot _).A_map_code bot_ne_mk_coe),
end
| ⟨x, preference.proper γ even₁ hA₁⟩ ⟨y, preference.proper δ even₂ hA₂⟩ h := begin
dsimp at h,
simp only [code.equiv_iff, sigma.ext_iff, mem_Iio, Iio.coe_inj, ne.def, fst_A_map_code,
snd_A_map_code, Iio.coe_mk] at h,
obtain ⟨rfl, h⟩ | ⟨-, γ, hδγ, rfl, h⟩ | ⟨-, δ, hγδ, rfl, h⟩ |
⟨c, hc, γ, hcγ, δ, hcδ, ⟨⟨rfl, hx'⟩, hx⟩, _⟩ := h,
{ suffices : x = y,
{ subst this },
refine funext (λ ε, _),
obtain rfl | hδε := eq_or_ne δ ε,
{ exact h.eq.symm },
refine (hA₁ _ (λ h, hδε (Iio.coe_injective h))).symm.trans
(eq.trans _ $ hA₂ _ (λ h, hδε (Iio.coe_injective h))),
dsimp,
rw h.eq },
{ rw h.eq at even₁,
cases (even₂.A_map_code $ Iio.coe_injective.ne hδγ).not_is_even even₁ },
{ rw h.eq at even₂,
cases (even₁.A_map_code $ Iio.coe_injective.ne hγδ).not_is_even even₂ },
{ rw hx.eq at even₁,
cases (hc.A_map_code hcγ).not_is_even even₁ }
end
/-- Extensionality in tangled type theory. Two nonempty semitangles are equal if their
`β`-extensions are equal for *any* choice of `γ < α`.
TODO: This proof can be golfed quite a bit just by cleaning up the `simp` calls. -/
lemma ext (x y : semitangle α) (h : x.members γ = y.members γ) : x = y :=
begin
obtain ⟨xs, hxs⟩ := x,
obtain ⟨ys, hys⟩ := y,
dsimp only at h,
refine ext_code _,
obtain ⟨atoms₁, hA₁⟩ | ⟨β, even₁, hA₁⟩ := hxs;
obtain ⟨atoms₂, hA₂⟩ | ⟨δ, even₂, hA₂⟩ := hys,
{ refine (code.equiv.A_map_right _ (code.is_even_bot _) γ bot_ne_mk_coe).trans _,
simp only [ne.def, Iio_index.bot_ne_coe, not_false_iff, A_map_code_mk_ne,
repr_code_base, subtype.coe_mk],
rw [hA₁ γ, h, ← hA₂ γ],
exact code.equiv.A_map_left _ (code.is_even_bot _) γ bot_ne_mk_coe },
{ simp only [repr_code_base, subtype.coe_mk, repr_code_proper],
obtain rfl | hδγ := eq_or_ne δ γ,
{ simp only [is_even_bot, mem_Iio, subtype.val_eq_coe, set_coe.forall,
ne.def, Iio.coe_inj] at *,
have := hA₁ δ δ.prop,
rw subtype.coe_eta at this,
rw [← h, ← this],
exact code.equiv.A_map_right _ (code.is_even_bot _) _ bot_ne_mk_coe },
{ refine (code.equiv.A_map_right _ (code.is_even_bot _) γ bot_ne_mk_coe).trans _,
simp only [ne.def, Iio_index.bot_ne_coe, not_false_iff, A_map_code_mk_ne],
rw [hA₁ γ, h, ←hA₂ γ (Iio.coe_injective.ne hδγ), ← A_map_code_mk_ne],
exact code.equiv.A_map_left _ even₂ γ (Iio.coe_injective.ne hδγ) } },
{ simp only [repr_code_proper, subtype.coe_mk, repr_code_base],
obtain rfl | hβγ := eq_or_ne β γ,
{ dsimp only [mem_Iio, ne.def, subtype.val_eq_coe, set_coe.forall] at *,
rw [h, ←hA₂ β],
exact code.equiv.A_map_left _ (code.is_even_bot _) _ bot_ne_mk_coe },
{ refine (code.equiv.A_map_right _ even₁ γ $ Iio.coe_injective.ne hβγ).trans _,
dsimp only [mem_Iio, ne.def, subtype.val_eq_coe, set_coe.forall] at *,
rw A_map_code_mk_ne _ _ (Iio.coe_injective.ne hβγ),
rw [hA₁ γ (Iio.coe_injective.ne hβγ), h, ←hA₂ γ],
exact code.equiv.A_map_left _ (code.is_even_bot _) γ bot_ne_mk_coe } },
{ simp only [repr_code_proper, subtype.coe_mk],
obtain rfl | hβγ := eq_or_ne β γ,
{ obtain rfl | hδβ := eq_or_ne δ β,
{ rw h },
{ have := A_map_code_ne β (code.mk δ (ys δ)) (Iio.coe_injective.ne hδβ),
dsimp only [mem_Iio, ne.def, subtype.val_eq_coe, set_coe.forall, code.snd_mk] at *,
rw [h, ←hA₂ _ (Iio.coe_injective.ne hδβ), ← code.mk_def, ← this],
exact code.equiv.A_map_left _ even₂ _ (Iio.coe_injective.ne hδβ) } },
obtain rfl | hδγ := eq_or_ne δ γ,
{ have := A_map_code_ne δ (code.mk β (xs β)) (Iio.coe_injective.ne hβγ),
dsimp only [mem_Iio, ne.def, subtype.val_eq_coe, set_coe.forall, code.snd_mk] at *,
simp_rw [←h, ←hA₁ _ (Iio.coe_injective.ne hβγ), ← code.mk_def, ← this],
exact code.equiv.A_map_right _ even₁ _ (Iio.coe_injective.ne hβγ) },
refine (code.equiv.A_map_right _ even₁ γ $ Iio.coe_injective.ne hβγ).trans _,
have := A_map_code_ne γ (code.mk ↑δ (ys δ)) (Iio.coe_injective.ne hδγ),
dsimp only [mem_Iio, ne.def, subtype.val_eq_coe, set_coe.forall, code.snd_mk] at *,
rw A_map_code_ne,
rw [code.snd_mk, hA₁ γ (Iio.coe_injective.ne hβγ), h, ←hA₂ γ (Iio.coe_injective.ne hδγ)],
rw ← this,
exact code.equiv.A_map_left _ even₂ γ (Iio.coe_injective.ne hδγ) }
end
/-- Extensionality in tangled type theory. Two nonempty semitangles are equal if their
`β`-extensions are equal for *any* choice of `β < α`. -/
lemma ext' (x y : semitangle α) (h : ∀ t : tangle γ, t ∈ₛₜ x ↔ t ∈ₛₜ y) : x = y :=
ext x y $ set.ext h
/-- Extensionality at the lowest level of tangled type theory.
At type 0, all nonempty semitangles have a `-1`-extension.
Therefore, the extensionality principle in this case applies to the `-1`-extensions. -/
lemma ext_zero (x y : semitangle α) (α_zero : is_min α) (h : x.pref.atoms = y.pref.atoms) :
x = y :=
begin
obtain ⟨xs, ⟨atoms₁, hA₁⟩ | ⟨γ, _, _⟩⟩ := x, swap,
{ cases α_zero.not_lt γ.2 },
obtain ⟨ys, ⟨atoms₂, hA₂⟩ | ⟨γ, _, _⟩⟩ := y, swap,
{ cases α_zero.not_lt γ.2 },
have : atoms₁ = atoms₂ := h,
subst this,
suffices : xs = ys, by subst this,
ext β -,
cases α_zero.not_lt β.2,
end
/-- Construct a semitangle from an even nonempty code. -/
def intro (s : set (tangle β)) (heven : (code.mk β s : code α).is_even) : semitangle α :=
⟨extension s, match β, s, heven with
| ⟨⊥, _⟩, s, _ := preference.base s $ λ β, rfl
| ⟨(γ : Λ), hγ⟩, s, heven := preference.proper ⟨γ, coe_lt_coe.1 hγ⟩
(by { convert heven, exact extension_self (show set (tangle $ Iio_coe ⟨γ, _⟩), from s) }) $
λ δ hδ, by { rw extension_ne s δ hδ, congr,
exact extension_self (show set (tangle $ Iio_coe ⟨γ, _⟩), from s) }
end⟩
@[simp] lemma exts_intro (s : set (tangle β)) (heven) :
(intro s heven).members = extension s := rfl
end semitangle
open semitangle
variables [core_tangle_data α]
namespace allowable_perm
variables {f : allowable_perm α} {e : extensions α}
@[simp] lemma smul_extension_apply (f : allowable_perm α) (s : set (tangle β)) :
f • extension s γ = extension (f • s) γ :=
begin
by_cases β = γ,
{ subst h,
simp only [extension_eq, cast_eq], },
{ simp only [extension_ne _ _ h, smul_A_map], },
end
@[simp] lemma smul_extension (f : allowable_perm α) (s : set (tangle β)) :
f • extension s = extension (f • s) :=
begin
ext γ : 1,
rw ← smul_extension_apply,
refl,
end
lemma smul_aux₁ {s : set (tangle (⊥ : Iio_index α))}
(h : ∀ (γ : Iio α), A_map bot_ne_coe s = (e γ : set (tangle (Iio_coe γ)))) (γ : Iio α) :
A_map bot_ne_coe (f • s) = (f • e) γ :=
by simpa only [smul_A_map] using congr_arg (λ c, f • c) (h γ)
lemma smul_aux₂ (h : ∀ (δ : Iio α) (hγδ : Iio_coe γ ≠ δ),
A_map hγδ (e γ) = (e δ : set (tangle (Iio_coe δ)))) (δ : Iio α) (hγδ : Iio_coe γ ≠ δ) :
A_map hγδ ((f • e) γ) = (f • e) δ :=
by simpa only [smul_A_map] using congr_arg (λ c, f • c) (h δ hγδ)
/-- Allowable permutations act on nonempty semitangles. -/
noncomputable! instance : has_smul (allowable_perm α) (semitangle α) :=
{ smul := λ f t, ⟨f • t.members, begin
obtain ⟨members, ⟨s, h⟩ | ⟨γ, ht, h⟩⟩ := t,
{ exact preference.base (f • s) (smul_aux₁ h) },
{ exact preference.proper _ ht.smul (smul_aux₂ h) }
end⟩ }
@[simp] lemma members_smul (f : allowable_perm α) (s : semitangle α) :
(f • s).members = f • s.members := rfl
@[simp] lemma smul_base (f : allowable_perm α) (e : extensions α) (s h) :
f • (⟨e, preference.base s h⟩ : semitangle α) =
⟨f • e, preference.base (f • s) (smul_aux₁ h)⟩ := rfl
@[simp] lemma smul_proper (f : allowable_perm α) (e : extensions α) (γ ht h) :
f • (⟨e, preference.proper γ ht h⟩ : semitangle α) =
⟨f • e, preference.proper _ ht.smul (smul_aux₂ h)⟩ := rfl
instance mul_action_semitangle : mul_action (allowable_perm α) (semitangle α) := {
one_smul := begin
rintro ⟨exts, ⟨s, h⟩ | ⟨γ, ht, h⟩⟩,
{ rw smul_base,
simp only [one_smul, eq_self_iff_true, true_and],
refine preference.base_heq_base _ _,
rw one_smul,
refl, },
{ rw smul_proper,
simp only [one_smul, eq_self_iff_true, true_and],
refine semitangle.preference.proper_heq_proper _ rfl,
rw one_smul, },
end,
mul_smul := begin
rintro f g ⟨exts, ⟨s, h⟩ | ⟨γ, ht, h⟩⟩,
{ simp only [smul_base, mul_smul, eq_self_iff_true, true_and],
refine preference.base_heq_base _ _,
rw mul_smul,
refl, },
{ simp only [smul_proper, mul_smul, eq_self_iff_true, true_and],
refine semitangle.preference.proper_heq_proper _ rfl,
rw mul_smul, },
end
}
end allowable_perm
variables (α)
/-- A tangle at the new level `α` is a semitangle supported by a small support.
This is `τ_α` in the blueprint.
Unlike the type `tangle`, this is not an opaque definition, and we can inspect and unfold it. -/
@[nolint has_nonempty_instance]
def new_tangle := {s : semitangle α // supported α (allowable_perm α) s}
variables {α} {c d : code α} {S : set (support_condition α)}
open mul_action
/-- If a set of support conditions supports a code, it supports all equivalent codes. -/
protected lemma code.equiv.supports (hcd : c ≡ d) (hS : supports (allowable_perm α) S c) :
supports (allowable_perm α) S d :=
λ f h, (hcd.symm.smul.trans $ (code.equiv.of_eq $ hS f h).trans hcd).unique rfl
lemma code.equiv.supports_iff (hcd : c ≡ d) :
supports (allowable_perm α) S c ↔ supports (allowable_perm α) S d :=
⟨hcd.supports, hcd.symm.supports⟩
/-- If two codes are equivalent, one is supported if and only if the other is. -/
lemma code.equiv.small_supported_iff (hcd : c ≡ d) :
supported α (allowable_perm α) c ↔ supported α (allowable_perm α) d :=
⟨λ ⟨⟨s, hs, h⟩⟩, ⟨⟨s, hs, hcd.supports h⟩⟩, λ ⟨⟨s, hs, h⟩⟩, ⟨⟨s, hs, hcd.symm.supports h⟩⟩⟩
@[simp] lemma smul_intro (f : allowable_perm α) (s : set (tangle β)) (hs) :
f • intro s hs = intro (f • s) hs.smul :=
begin
cases β,
induction β_val using with_bot.rec_bot_coe,
{ simp only [intro, allowable_perm.smul_base, allowable_perm.smul_extension,
eq_self_iff_true, true_and],
refine preference.base_heq_base _ rfl,
rw allowable_perm.smul_extension },
{ simp only [intro, allowable_perm.smul_proper, allowable_perm.smul_extension,
eq_self_iff_true, true_and],
refine preference.proper_heq_proper _ rfl,
rw allowable_perm.smul_extension }
end
-- TODO: Move next two lemmas elsewhere.
lemma allowable_to_struct_perm_bot (π : allowable (⊥ : Iio_index α)) :
core_tangle_data.allowable_to_struct_perm π = struct_perm.to_bot_iso.to_monoid_hom π := rfl
lemma _root_.con_nf.semiallowable_perm.to_allowable_bot (π : semiallowable_perm α) :
semiallowable_perm.to_allowable ⊥ π = struct_perm.to_near_litter_perm
(semiallowable_perm.to_struct_perm π) :=
begin
unfold semiallowable_perm.to_allowable semiallowable_perm.to_struct_perm
struct_perm.to_near_litter_perm struct_perm.lower allowable.to_struct_perm,
rw dif_neg with_bot.bot_ne_coe,
simp only [monoid_hom.coe_mk, monoid_hom.coe_comp, mul_equiv.coe_to_monoid_hom,
comp_app, struct_perm.of_coe_to_coe, allowable_to_struct_perm_bot, mul_equiv.symm_apply_apply],
refl,
end
/-- For any near-litter `N`, the code `(α, -1, N)` is a tangle at level `α`.
This is called a *typed near litter*. -/
def new_typed_near_litter (N : near_litter) : new_tangle α :=
⟨intro (show set (tangle (⊥ : Iio_index α)), from N.2.1) $
code.is_even_bot _, ⟨⟨{(sum.inr N, default)}, small_singleton _, λ π h, begin
simp only [subtype.val_eq_coe, option.smul_some, smul_intro, option.some_inj],
have := show (struct_perm.lower (bot_lt_coe α).le (semiallowable_perm.to_struct_perm ↑π)) •
sum.inr N = sum.inr N, from congr_arg prod.fst (h rfl),
simp only [sum.smul_inr] at this,
have : π • N = N := this,
conv_rhs { rw ← this },
congr' 1,
ext : 1,
simp only [coe_smul_nonempty, subtype.coe_mk, allowable_perm.snd_smul_near_litter],
unfold has_smul.smul has_smul.comp.smul,
simp only [semiallowable_perm.to_allowable_bot (allowable_perm.coe_hom π)],
end⟩⟩⟩
/-- For any supported tangle `x`, the code `(α, β, {x})` is a tangle at level `α`. -/
def supported_singleton (x : tangle β) (supp : supported α (allowable_perm α) x) :
new_tangle α :=
⟨intro {x} (code.is_even_singleton _), begin
unfreezingI { obtain ⟨s, hs₁, hs₂⟩ := supp },
refine ⟨⟨s, hs₁, λ π h, _⟩⟩,
conv_rhs { rw ← hs₂ π h },
simp only [smul_set_singleton, smul_nonempty_mk, option.smul_some, smul_intro],
end⟩
/-- For any small set `B` of supported `β`-tangles, the code `(α, β, B)` is a tangle at level `α` if
it is even. -/
def supported_set (s : set (tangle β)) (hs : small s) (hc : (mk β s).is_even)
(symm : ∀ b ∈ s, supported α (allowable_perm α) b) :
new_tangle α :=
⟨intro s hc, begin
have symm : Π b ∈ s, support α (allowable_perm α) b,
{ intros b hb, exact (symm b hb).some },
refine ⟨⟨⋃ b ∈ s, symm b ‹_›, hs.bUnion (λ i hi, (symm _ _).small), λ π h, _⟩⟩,
suffices : π • s = s,
{ simp only [option.smul_some, smul_intro, option.some_inj, this] },
have : ∀ x ∈ s, π • x = x,
{ intros x hx,
refine (symm x hx).supports π _,
intros a ha,
refine h _,
simp only [mem_Union, set_like.mem_coe],
refine ⟨x, hx, ha⟩ },
ext : 2,
refine ⟨λ hx, _, λ hx, _⟩,
{ have := this (π⁻¹ • x) _,
{ rw smul_inv_smul at this,
rw this,
rwa ←mem_smul_set_iff_inv_smul_mem },
{ rwa ←mem_smul_set_iff_inv_smul_mem } },
{ rw ← this x hx,
exact smul_mem_smul_set hx }
end⟩
variables {α}
namespace new_tangle
instance : has_coe (new_tangle α) (semitangle α) := coe_subtype
lemma coe_injective : injective (coe : new_tangle α → semitangle α) := subtype.coe_injective
end new_tangle
namespace allowable_perm
/-- Allowable permutations act on `α`-tangles. -/
--Yaël: I suspect we can generalize `supports.smul` so that it applies here
instance has_smul_new_tangle : has_smul (allowable_perm α) (new_tangle α) :=
⟨λ π t, ⟨π • t, t.2.map $ λ s, { carrier := π • s, small := s.2.image, supports := begin
intros σ h,
have := s.supports (π⁻¹ * σ * π) _,
{ conv_rhs { rw [← subtype.val_eq_coe, ← this, ← mul_smul, ← mul_assoc, ← mul_assoc,
mul_inv_self, one_mul, mul_smul] },
refl },
{ intros a ha,
rw [mul_smul, mul_smul, inv_smul_eq_iff],
exact h (smul_mem_smul_set ha) },
end }⟩⟩
@[simp, norm_cast] lemma coe_smul_new_tangle (f : allowable_perm α) (t : new_tangle α) :
(↑(f • t) : semitangle α) = f • t := rfl
instance mul_action_new_tangle : mul_action (allowable_perm α) (new_tangle α) :=
new_tangle.coe_injective.mul_action _ coe_smul_new_tangle
end allowable_perm
end con_nf
|
"""
ArithmeticBlocks
"""
export MathFunctionBlock
mutable struct MathFunctionBlock <: AbstractArithmeticBlock
inport::Vector{InPort}
outport::Vector{OutPort}
op::Symbol
function MathFunctionBlock(op::Symbol)
@createblock new(Vector{InPort}(), Vector{OutPort}(), op) 1 1
end
end
"""
IO
"""
function Base.show(io::IO, blk::MathFunctionBlock)
println(io, "MathFunctionBlock($(blk.op))")
end
"""
to expr
"""
function _toexpr(blk::MathFunctionBlock)
if blk.op == :reciprocal
Expr(:call, :/, 1, _toexpr(blk.inport[1]))
elseif blk.op == :square
Expr(:call, :^, _toexpr(blk.inport[1]), 2)
end
end
|
Formal statement is: lemma measure_eqI: fixes M N :: "'a measure" assumes "sets M = sets N" and eq: "\<And>A. A \<in> sets M \<Longrightarrow> emeasure M A = emeasure N A" shows "M = N" Informal statement is: If two measures agree on all measurable sets, then they are equal. |
///////////////////////////////////////////////////////////////////////////////
// Copyright Christopher Kormanyos 2015.
// Copyright Nikhar Agrawal 2015.
// Copyright Paul Bristow 2015.
// Distributed under the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt
// or copy at http://www.boost.org/LICENSE_1_0.txt)
//! \file
//!\brief Tests for the transcendental logarithm function of (fixed_point) for a tiny digit range.
#define BOOST_TEST_MODULE test_negatable_func_log_tiny
#define BOOST_LIB_DIAGNOSTIC
#include <cmath>
#include <boost/fixed_point/fixed_point.hpp>
#include <boost/test/included/unit_test.hpp>
namespace local
{
template<typename FixedPointType>
const FixedPointType& tolerance_maker(const int fuzzy_bits)
{
static const FixedPointType the_tolerance = ldexp(FixedPointType(1), FixedPointType::resolution + fuzzy_bits);
return the_tolerance;
}
template<typename FixedPointType,
typename FloatPointType = typename FixedPointType::float_type>
void test_log(const int fuzzy_bits)
{
// Use at least 10 resolution bits.
// Use at least 5 range bits.
BOOST_STATIC_ASSERT(-FixedPointType::resolution >= 10);
BOOST_STATIC_ASSERT( FixedPointType::range >= 4);
const FixedPointType a1(+2L ); const FloatPointType b1(+2L );
const FixedPointType a2(+3L ); const FloatPointType b2(+3L );
const FixedPointType a3(+4.375L); const FloatPointType b3(+4.375L);
const FixedPointType a4(+1.125L); const FloatPointType b4(+1.125L);
const FixedPointType a5(+0.125L); const FloatPointType b5(+0.125L);
const FixedPointType a6(+0.875L); const FloatPointType b6(+0.875L);
const FixedPointType a7(FixedPointType( 1) / 3); const FloatPointType b7(FloatPointType( 1) / 3);
const FixedPointType a8(FixedPointType(13) / 10); const FloatPointType b8(FloatPointType(13) / 10);
const FixedPointType a9(boost::math::constants::pi<FixedPointType>()); const FloatPointType b9(boost::math::constants::pi<FloatPointType>());
using std::log;
BOOST_CHECK_CLOSE_FRACTION(log(a1), FixedPointType(log(b1)), tolerance_maker<FixedPointType>(fuzzy_bits));
BOOST_CHECK_CLOSE_FRACTION(log(a2), FixedPointType(log(b2)), tolerance_maker<FixedPointType>(fuzzy_bits));
BOOST_CHECK_CLOSE_FRACTION(log(a3), FixedPointType(log(b3)), tolerance_maker<FixedPointType>(fuzzy_bits));
BOOST_CHECK_CLOSE_FRACTION(log(a4), FixedPointType(log(b4)), tolerance_maker<FixedPointType>(fuzzy_bits));
BOOST_CHECK_CLOSE_FRACTION(log(a5), FixedPointType(log(b5)), tolerance_maker<FixedPointType>(fuzzy_bits));
BOOST_CHECK_CLOSE_FRACTION(log(a6), FixedPointType(log(b6)), tolerance_maker<FixedPointType>(fuzzy_bits));
BOOST_CHECK_CLOSE_FRACTION(log(a7), FixedPointType(log(b7)), tolerance_maker<FixedPointType>(fuzzy_bits));
BOOST_CHECK_CLOSE_FRACTION(log(a8), FixedPointType(log(b8)), tolerance_maker<FixedPointType>(fuzzy_bits));
BOOST_CHECK_CLOSE_FRACTION(log(a9), FixedPointType(log(b9)), tolerance_maker<FixedPointType>(fuzzy_bits));
}
}
BOOST_AUTO_TEST_CASE(test_negatable_func_log_tiny)
{
typedef boost::fixed_point::negatable<4, -11> fixed_point_type;
local::test_log<fixed_point_type>(3);
}
|
Barker was a devout Christian , and produced religious @-@ themed works throughout her life . She published eight postcards and five guardian angel birthday cards for the Society for Promoting Christian Knowledge in 1916 and in 1923 respectively . Christmas cards were designed for The Girls ' Friendly Society over a 20 @-@ year period , and the first three designs sold out a combined printing of 46 @,@ 500 in 1923 . An original design for the society called The Darling of the World Has Come was purchased by Queen Mary for ₤ 5 @.@ 5 @.@ 0 in 1926 . The Croydon Art Society hung Barker 's booklet cover design for the Society for the Propagation of the Gospel in its November 1919 exhibition .
|
{-# OPTIONS --cubical --no-import-sorts --safe #-}
module Cubical.HITs.Cost where
open import Cubical.HITs.Cost.Base
|
(*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the GNU General Public License version 2. Note that NO WARRANTY is provided.
* See "LICENSE_GPLv2.txt" for details.
*
* @TAG(NICTA_GPL)
*)
theory Arch_AC
imports Retype_AC
begin
context begin interpretation Arch . (*FIXME: arch_split*)
text{*
Arch-specific access control.
*}
lemma store_pde_respects:
"\<lbrace>integrity aag X st and K (is_subject aag (p && ~~ mask pd_bits)) \<rbrace>
store_pde p pde
\<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
apply (simp add: store_pde_def set_pd_def)
apply (wp get_object_wp set_object_integrity_autarch)
apply simp
done
lemma store_pte_respects:
"\<lbrace>integrity aag X st and K (is_subject aag (p && ~~ mask pt_bits)) \<rbrace>
store_pte p pte
\<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
apply (simp add: store_pte_def set_pt_def)
apply (wp get_object_wp set_object_integrity_autarch)
apply simp
done
lemma integrity_arch_state [iff]:
"\<lbrakk>arm_asid_table v = arm_asid_table (arch_state s)\<rbrakk> \<Longrightarrow>
integrity aag X st (s\<lparr>arch_state := v\<rparr>) = integrity aag X st s"
unfolding integrity_def
by (simp )
lemma integrity_arm_asid_map [iff]:
"integrity aag X st (s\<lparr>arch_state := ((arch_state s)\<lparr>arm_asid_map := v\<rparr>)\<rparr>) = integrity aag X st s"
unfolding integrity_def
by (simp )
lemma integrity_arm_hwasid_table [iff]:
"integrity aag X st (s\<lparr>arch_state := ((arch_state s)\<lparr>arm_hwasid_table := v\<rparr>)\<rparr>) = integrity aag X st s"
unfolding integrity_def
by (simp )
lemma integrity_arm_next_asid [iff]:
"integrity aag X st (s\<lparr>arch_state := ((arch_state s)\<lparr>arm_next_asid := v\<rparr>)\<rparr>) = integrity aag X st s"
unfolding integrity_def
by (simp )
declare dmo_mol_respects [wp]
crunch respects[wp]: arm_context_switch "integrity X aag st"
(simp: dmo_bind_valid dsb_def isb_def writeTTBR0_def invalidateTLB_ASID_def
setHardwareASID_def setCurrentPD_def
ignore: do_machine_op)
crunch respects[wp]: find_pd_for_asid "integrity X aag st"
crunch respects[wp]: set_vm_root "integrity X aag st"
(wp: crunch_wps
simp: setCurrentPD_def isb_def dsb_def writeTTBR0_def dmo_bind_valid crunch_simps
ignore: do_machine_op)
crunch respects[wp]: set_vm_root_for_flush "integrity X aag st"
(wp: crunch_wps simp: setCurrentPD_def crunch_simps ignore: do_machine_op)
crunch respects[wp]: flush_table "integrity X aag st"
(wp: crunch_wps simp: invalidateTLB_ASID_def crunch_simps ignore: do_machine_op)
crunch respects[wp]: page_table_mapped "integrity X aag st"
lemma kheap_eq_state_vrefsD:
"kheap s p = Some ko \<Longrightarrow> state_vrefs s p = vs_refs_no_global_pts ko"
by (simp add: state_vrefs_def)
lemma kheap_eq_state_vrefs_pas_refinedD:
"\<lbrakk> kheap s p = Some ko;
(p', r, a) \<in> vs_refs_no_global_pts ko; pas_refined aag s \<rbrakk>
\<Longrightarrow> (pasObjectAbs aag p, a, pasObjectAbs aag p') \<in> pasPolicy aag"
apply (drule kheap_eq_state_vrefsD)
apply (erule pas_refined_mem[OF sta_vref, rotated])
apply simp
done
lemma find_pd_for_asid_authority1:
"\<lbrace>pas_refined aag\<rbrace>
find_pd_for_asid asid
\<lbrace>\<lambda>pd s. (pasASIDAbs aag asid, Control, pasObjectAbs aag pd) \<in> pasPolicy aag\<rbrace>,-"
apply (rule hoare_pre)
apply (simp add: find_pd_for_asid_def)
apply (wp | wpc)+
apply (clarsimp simp: obj_at_def pas_refined_def)
apply (erule subsetD, erule sata_asid_lookup)
apply (simp add: state_vrefs_def vs_refs_no_global_pts_def image_def)
apply (rule rev_bexI, erule graph_ofI)
apply (simp add: mask_asid_low_bits_ucast_ucast)
done
lemma find_pd_for_asid_authority2:
"\<lbrace>\<lambda>s. \<forall>pd. (\<forall>aag. pas_refined aag s \<longrightarrow> (pasASIDAbs aag asid, Control, pasObjectAbs aag pd) \<in> pasPolicy aag)
\<and> (pspace_aligned s \<and> valid_arch_objs s \<longrightarrow> is_aligned pd pd_bits)
\<and> (\<exists>\<rhd> pd) s
\<longrightarrow> Q pd s\<rbrace> find_pd_for_asid asid \<lbrace>Q\<rbrace>, -"
(is "\<lbrace>?P\<rbrace> ?f \<lbrace>Q\<rbrace>,-")
apply (clarsimp simp: validE_R_def validE_def valid_def imp_conjL[symmetric])
apply (frule in_inv_by_hoareD[OF find_pd_for_asid_inv], clarsimp)
apply (drule spec, erule mp)
apply (simp add: use_validE_R[OF _ find_pd_for_asid_authority1]
use_validE_R[OF _ find_pd_for_asid_aligned_pd_bits]
use_validE_R[OF _ find_pd_for_asid_lookup])
done
lemma find_pd_for_asid_pd_slot_authorised [wp]:
"\<lbrace>pas_refined aag and K (is_subject_asid aag asid) and pspace_aligned and valid_arch_objs\<rbrace>
find_pd_for_asid asid
\<lbrace>\<lambda>rv s. is_subject aag (lookup_pd_slot rv vptr && ~~ mask pd_bits) \<rbrace>, -"
apply (wp find_pd_for_asid_authority2)
apply (clarsimp simp: lookup_pd_slot_pd)
apply (fastforce dest: pas_refined_Control)
done
lemma unmap_page_table_respects:
"\<lbrace>integrity aag X st and pas_refined aag and invs
and K (is_subject_asid aag asid \<and> vaddr < kernel_base)\<rbrace>
unmap_page_table asid vaddr pt
\<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: unmap_page_table_def page_table_mapped_def )
apply (rule hoare_pre)
apply (wp store_pde_respects page_table_mapped_wp_weak get_pde_wp
hoare_vcg_all_lift_R dmo_mol_respects
| wpc
| simp add: cleanByVA_PoU_def
| wp_once hoare_drop_imps)+
apply auto
done
definition
authorised_page_table_inv :: "'a PAS \<Rightarrow> page_table_invocation \<Rightarrow> bool"
where
"authorised_page_table_inv aag pti \<equiv> case pti of
page_table_invocation.PageTableMap cap cslot_ptr pde obj_ref \<Rightarrow>
is_subject aag (fst cslot_ptr) \<and> is_subject aag (obj_ref && ~~ mask pd_bits)
\<and> (case_option True (is_subject aag o fst) (pde_ref2 pde))
\<and> pas_cap_cur_auth aag cap
| page_table_invocation.PageTableUnmap cap cslot_ptr \<Rightarrow>
is_subject aag (fst cslot_ptr) \<and> aag_cap_auth aag (pasSubject aag) cap
\<and> (\<forall>p asid vspace_ref. cap = cap.ArchObjectCap (arch_cap.PageTableCap p (Some (asid, vspace_ref)))
\<longrightarrow> is_subject_asid aag asid
\<and> (\<forall>x \<in> set [p , p + 4 .e. p + 2 ^ pt_bits - 1]. is_subject aag (x && ~~ mask pt_bits)))"
lemma perform_page_table_invocation_respects:
"\<lbrace>integrity aag X st and pas_refined aag and invs
and valid_pti page_table_invocation
and K (authorised_page_table_inv aag page_table_invocation)\<rbrace>
perform_page_table_invocation page_table_invocation
\<lbrace>\<lambda>s. integrity aag X st\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: perform_page_table_invocation_def
cong: page_table_invocation.case_cong option.case_cong prod.case_cong
cap.case_cong arch_cap.case_cong)
apply (rule hoare_pre)
apply (wp store_pde_respects get_cap_integrity_autarch set_cap_integrity_autarch store_pte_respects unmap_page_table_respects mapM_wp'
| wpc
| simp add: mapM_x_mapM authorised_page_table_inv_def cleanByVA_PoU_def)+
apply (auto simp: cap_auth_conferred_def is_page_cap_def
pas_refined_all_auth_is_owns
valid_pti_def valid_cap_simps)
done
crunch arch_state [wp]: do_machine_op "\<lambda>s. P (arch_state s)"
crunch arm_asid_table_inv [wp]: unmap_page_table "\<lambda>s. P (arm_asid_table (arch_state s))"
(wp: crunch_wps simp: crunch_simps)
lemma clas_update_map_data_strg:
"(is_pg_cap cap \<or> is_pt_cap cap) \<longrightarrow> cap_links_asid_slot aag p (cap.ArchObjectCap (update_map_data (the_arch_cap cap) None))"
unfolding cap_links_asid_slot_def
by (fastforce simp: is_cap_simps update_map_data_def)
lemmas pte_ref_simps = pte_ref_def[split_simps pte.split]
lemmas store_pde_pas_refined_simple
= store_pde_pas_refined[where pde=InvalidPDE, simplified pde_ref_simps, simplified]
crunch pas_refined[wp]: unmap_page_table "pas_refined aag"
(wp: crunch_wps store_pde_pas_refined_simple
simp: crunch_simps pde_ref_simps)
lemma pde_ref_pde_ref2:
"\<lbrakk> pde_ref x = Some v; pde_ref2 x = Some v' \<rbrakk> \<Longrightarrow> v' = (v, 0, {Control})"
unfolding pde_ref_def pde_ref2_def
by (cases x, simp_all)
lemma ptr_range_0 [simp]: "ptr_range (p :: word32) 0 = {p}"
unfolding ptr_range_def by simp
lemma mask_PTCap_eq:
"(ArchObjectCap (PageTableCap a b) = mask_cap R cap) = (cap = ArchObjectCap (PageTableCap a b))"
by (auto simp: mask_cap_def cap_rights_update_def acap_rights_update_def
split: arch_cap.splits cap.splits)
lemma perform_page_table_invocation_pas_refined [wp]:
"\<lbrace> pas_refined aag and valid_pti page_table_invocation
and K (authorised_page_table_inv aag page_table_invocation)\<rbrace>
perform_page_table_invocation page_table_invocation
\<lbrace>\<lambda>s. pas_refined aag\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: perform_page_table_invocation_def
cong: page_table_invocation.case_cong option.case_cong prod.case_cong
cap.case_cong arch_cap.case_cong)
apply (rule hoare_pre)
apply (wp get_cap_wp mapM_wp' store_pte_cte_wp_at do_machine_op_cte_wp_at hoare_vcg_all_lift
| (wp hoare_vcg_imp_lift, unfold disj_not1)
| wpc
| simp add: mapM_x_mapM authorised_page_table_inv_def imp_conjR disj_not1
pte_ref_simps)+
apply (cases page_table_invocation)
apply (fastforce simp: valid_pti_def is_cap_simps pas_refined_refl auth_graph_map_def2 dest: pde_ref_pde_ref2)
apply clarsimp
apply (subgoal_tac "cte_wp_at (op = xc) (aa, ba) s \<longrightarrow>
pas_cap_cur_auth aag (cap.ArchObjectCap (update_map_data (the_arch_cap xc) None))")
apply simp
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply (frule (1) cap_cur_auth_caps_of_state)
apply simp
apply (clarsimp simp: valid_pti_def cte_wp_at_caps_of_state is_cap_simps is_arch_diminished_def)
apply (clarsimp simp: diminished_def mask_PTCap_eq)
apply (clarsimp simp: cap_auth_conferred_def update_map_data_def is_page_cap_def cap_links_asid_slot_def cap_links_irq_def aag_cap_auth_def)
done
definition
authorised_slots :: "'a PAS \<Rightarrow> pte \<times> (obj_ref list) + pde \<times> (obj_ref list) \<Rightarrow> bool"
where
"authorised_slots aag m \<equiv> case m of
Inl (pte, slots) \<Rightarrow> (\<forall>x. pte_ref pte = Some x \<longrightarrow> (\<forall>a \<in> (snd (snd x)). \<forall>p \<in> ptr_range (fst x) (fst (snd x)).(aag_has_auth_to aag a p)))
\<and> (\<forall>x \<in> set slots. is_subject aag (x && ~~ mask pt_bits))
| Inr (pde, slots) \<Rightarrow> (\<forall>x. pde_ref2 pde = Some x \<longrightarrow> (\<forall>a \<in> (snd (snd x)). \<forall>p \<in> ptr_range (fst x) (fst (snd x)). (aag_has_auth_to aag a p)))
\<and> (\<forall>x \<in> set slots. is_subject aag (x && ~~ mask pd_bits))"
definition
authorised_page_inv :: "'a PAS \<Rightarrow> page_invocation \<Rightarrow> bool"
where
"authorised_page_inv aag pgi \<equiv> case pgi of
PageMap asid cap ptr slots \<Rightarrow>
pas_cap_cur_auth aag cap \<and> is_subject aag (fst ptr) \<and> authorised_slots aag slots
| PageRemap asid slots \<Rightarrow> authorised_slots aag slots
| PageUnmap cap ptr \<Rightarrow> pas_cap_cur_auth aag (Structures_A.ArchObjectCap cap) \<and> is_subject aag (fst ptr)
| PageFlush typ start end pstart pd asid \<Rightarrow> True
| PageGetAddr ptr \<Rightarrow> True"
crunch respects[wp]: check_mapping_pptr "integrity X aag st"
crunch respects[wp]: lookup_pt_slot "integrity X aag st"
lemma ptrFromPAddr_inj: "inj ptrFromPAddr"
by (auto intro: injI simp: ptrFromPAddr_def)
lemma vs_refs_no_global_pts_pdI:
"\<lbrakk>pd (ucast r) = PageTablePDE x a b;
((ucast r)::12 word) < ucast (kernel_base >> 20) \<rbrakk> \<Longrightarrow>
(ptrFromPAddr x, VSRef (r && mask 12)
(Some APageDirectory), Control)
\<in> vs_refs_no_global_pts (ArchObj (PageDirectory pd))"
apply(clarsimp simp: vs_refs_no_global_pts_def)
apply (drule_tac f=pde_ref2 in arg_cong, simp add: pde_ref_simps o_def)
apply (rule rev_bexI, rule DiffI, erule graph_ofI)
apply simp
apply (clarsimp simp: ucast_ucast_mask )
done
lemma aag_has_auth_to_Control_eq_owns:
"pas_refined aag s \<Longrightarrow> aag_has_auth_to aag Control p = is_subject aag p"
by (auto simp: pas_refined_refl elim: aag_Control_into_owns)
lemma lookup_pt_slot_authorised:
"\<lbrace>\<exists>\<rhd> pd and valid_arch_objs and pspace_aligned and pas_refined aag
and K (is_subject aag pd) and K (is_aligned pd 14 \<and> vptr < kernel_base)\<rbrace>
lookup_pt_slot pd vptr
\<lbrace>\<lambda>rv s. is_subject aag (rv && ~~ mask pt_bits)\<rbrace>, -"
apply (simp add: lookup_pt_slot_def)
apply (wp get_pde_wp | wpc)+
apply (subgoal_tac "is_aligned pd pd_bits")
apply (clarsimp simp: lookup_pd_slot_pd)
apply (drule(2) valid_arch_objsD)
apply (clarsimp simp: obj_at_def)
apply (drule kheap_eq_state_vrefs_pas_refinedD)
apply (erule vs_refs_no_global_pts_pdI)
apply (drule(1) less_kernel_base_mapping_slots)
apply (simp add: kernel_mapping_slots_def)
apply assumption
apply (simp add: aag_has_auth_to_Control_eq_owns)
apply (drule_tac f="\<lambda>pde. valid_pde pde s" in arg_cong, simp)
apply (clarsimp simp: obj_at_def a_type_def less_kernel_base_mapping_slots)
apply (clarsimp split: Structures_A.kernel_object.split_asm if_split_asm
arch_kernel_obj.split_asm)
apply (erule pspace_alignedE, erule domI)
apply (simp add: pt_bits_def pageBits_def)
apply (subst is_aligned_add_helper, assumption)
apply (rule shiftl_less_t2n)
apply (rule order_le_less_trans, rule word_and_le1, simp)
apply simp
apply simp
apply (simp add: pd_bits_def pageBits_def)
done
lemma is_aligned_6_masks:
fixes p :: word32
shows
"\<lbrakk> is_aligned p 6; bits = pt_bits \<or> bits = pd_bits \<rbrakk>
\<Longrightarrow> \<forall>x \<in> set [0, 4 .e. 0x3C]. x + p && ~~ mask bits = p && ~~ mask bits"
apply clarsimp
apply (drule subsetD[OF upto_enum_step_subset])
apply (subst mask_lower_twice[symmetric, where n=6])
apply (auto simp add: pt_bits_def pageBits_def pd_bits_def)[1]
apply (subst add.commute, subst is_aligned_add_helper, assumption)
apply (simp add: order_le_less_trans)
apply simp
done
lemma lookup_pt_slot_authorised2:
"\<lbrace>\<exists>\<rhd> pd and K (is_subject aag pd) and
K (is_aligned pd 14 \<and> is_aligned vptr 16 \<and> vptr < kernel_base) and
valid_arch_objs and valid_arch_state and equal_kernel_mappings and
valid_global_objs and pspace_aligned and pas_refined aag\<rbrace>
lookup_pt_slot pd vptr
\<lbrace>\<lambda>rv s. \<forall>x\<in>set [0 , 4 .e. 0x3C]. is_subject aag (x + rv && ~~ mask pt_bits)\<rbrace>, -"
apply (clarsimp simp: validE_R_def valid_def validE_def
split: sum.split)
apply (frule use_validE_R[OF _ lookup_pt_slot_authorised])
apply fastforce
apply (frule use_validE_R[OF _ lookup_pt_slot_is_aligned_6])
apply (fastforce simp add: vmsz_aligned_def pd_bits_def pageBits_def)
apply (simp add: is_aligned_6_masks)
done
lemma lookup_pt_slot_authorised3:
"\<lbrace>\<exists>\<rhd> pd and K (is_subject aag pd) and
K (is_aligned pd 14 \<and> is_aligned vptr 16 \<and> vptr < kernel_base) and
valid_arch_objs and valid_arch_state and equal_kernel_mappings and
valid_global_objs and pspace_aligned and pas_refined aag\<rbrace>
lookup_pt_slot pd vptr
\<lbrace>\<lambda>rv s. \<forall>x\<in>set [rv , rv + 4 .e. rv + 0x3C]. is_subject aag (x && ~~ mask pt_bits)\<rbrace>, -"
apply (rule hoare_post_imp_R [where Q' = "\<lambda>rv s. is_aligned rv 6 \<and> (\<forall>x\<in>set [0 , 4 .e. 0x3C]. is_subject aag (x + rv && ~~ mask pt_bits))"])
apply (rule hoare_pre)
apply (wp lookup_pt_slot_is_aligned_6 lookup_pt_slot_authorised2)
apply (fastforce simp: vmsz_aligned_def pd_bits_def pageBits_def)
apply simp
apply (simp add: p_0x3C_shift)
done
lemma mapM_set':
assumes ip: "\<And>x y. \<lbrakk> x \<in> set xs; y \<in> set xs \<rbrakk> \<Longrightarrow> \<lbrace>I x and I y\<rbrace> f x \<lbrace>\<lambda>_. I y\<rbrace>"
and rl: "\<And>s. (\<forall>x \<in> set xs. I x s) \<Longrightarrow> P s"
shows "\<lbrace>\<lambda>s. (\<forall>x \<in> set xs. I x s)\<rbrace> mapM f xs \<lbrace>\<lambda>_. P\<rbrace>"
apply (rule hoare_post_imp [OF rl])
apply assumption
apply (rule mapM_set)
apply (rule hoare_pre)
apply (rule hoare_vcg_ball_lift)
apply (erule (1) ip)
apply clarsimp
apply (rule hoare_pre)
apply (rule ip)
apply assumption
apply assumption
apply clarsimp
apply (rule hoare_pre)
apply (rule ip)
apply simp+
done
lemma mapM_set'':
assumes ip: "\<And>x y. \<lbrakk> x \<in> set xs; y \<in> set xs \<rbrakk> \<Longrightarrow> \<lbrace>I x and I y and Q\<rbrace> f x \<lbrace>\<lambda>_. I y and Q\<rbrace>"
and rl: "\<And>s. (\<forall>x \<in> set xs. I x s) \<and> Q s \<Longrightarrow> P s"
shows "\<lbrace>\<lambda>s. (\<forall>x \<in> set xs. I x s) \<and> Q s\<rbrace> mapM f xs \<lbrace>\<lambda>_. P\<rbrace>"
apply (rule hoare_post_imp [OF rl])
apply assumption
apply (cases "xs = []")
apply (simp add: mapM_Nil)
apply (rule hoare_pre)
apply (rule mapM_set' [where I = "\<lambda>x s. I x s \<and> Q s"])
apply (rule hoare_pre)
apply (rule ip [simplified pred_conj_def])
apply simp_all
apply (clarsimp simp add: neq_Nil_conv)
done
crunch respects [wp]: flush_page "integrity aag X st"
(simp: invalidateTLB_VAASID_def ignore: do_machine_op)
lemma find_pd_for_asid_pd_owned[wp]:
"\<lbrace>pas_refined aag and K (is_subject_asid aag asid)\<rbrace>
find_pd_for_asid asid
\<lbrace>\<lambda>rv s. is_subject aag rv\<rbrace>,-"
apply (wp find_pd_for_asid_authority2)
apply (auto simp: aag_has_auth_to_Control_eq_owns)
done
lemmas store_pte_pas_refined_simple
= store_pte_pas_refined[where pte=InvalidPTE, simplified pte_ref_simps, simplified]
crunch pas_refined[wp]: unmap_page "pas_refined aag"
(wp: crunch_wps store_pde_pas_refined_simple store_pte_pas_refined_simple
simp: crunch_simps)
crunch pspace_aligned[wp]: flush_page "pspace_aligned"
lemma unmap_page_respects:
"\<lbrace>integrity aag X st and K (is_subject_asid aag asid) and pas_refined aag and pspace_aligned and valid_arch_objs
and K (vmsz_aligned vptr sz \<and> vptr < kernel_base)\<rbrace>
unmap_page sz asid vptr pptr
\<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: unmap_page_def swp_def cong: vmpage_size.case_cong)
apply (rule hoare_pre)
apply (wp store_pte_respects store_pde_respects
hoare_drop_imps[where Q="\<lambda>rv. integrity aag X st"]
lookup_pt_slot_authorised
| wpc
| simp add: is_aligned_6_masks is_aligned_mask[symmetric] cleanByVA_PoU_def
| wp_once hoare_drop_imps
mapM_set'' [where f = "(\<lambda>a. store_pte a InvalidPTE)" and I = "\<lambda>x s. is_subject aag (x && ~~ mask pt_bits)" and Q = "integrity aag X st"]
mapM_set'' [where f = "(\<lambda>a. store_pde a InvalidPDE)" and I = "\<lambda>x s. is_subject aag (x && ~~ mask pd_bits)" and Q = "integrity aag X st"]
| wp_once hoare_drop_imps[where R="\<lambda>rv s. rv"])+
done
(* FIXME: MOVE *)
lemma less_shiftr:
shows "\<lbrakk> x < y; is_aligned y n \<rbrakk> \<Longrightarrow> x >> n < y >> n"
apply (simp add: word_less_nat_alt shiftr_div_2n')
apply (subst td_gal_lt[symmetric])
apply simp
apply (subst dvd_div_mult_self)
apply (simp add: is_aligned_def)
apply simp
done
lemma kernel_base_aligned_20:
"is_aligned kernel_base 20"
apply(simp add: kernel_base_def is_aligned_def)
done
lemma diminished_PageCapD:
"diminished (ArchObjectCap (PageCap dev p R sz m)) cap
\<Longrightarrow> \<exists>R'. cap = ArchObjectCap (PageCap dev p R' sz m)"
apply (cases cap, auto simp add: diminished_def mask_cap_def cap_rights_update_def)
apply (auto simp: acap_rights_update_def split: arch_cap.splits)
done
(* FIXME: CLAG *)
lemmas do_machine_op_bind =
submonad_bind [OF submonad_do_machine_op submonad_do_machine_op
submonad_do_machine_op]
(* FIXME: CLAG *)
lemma ef_machine_op_lift [simp]:
"empty_fail (machine_op_lift f)"
by (simp add: machine_op_lift_def)
lemma mol_mem[wp]:
"\<lbrace>\<lambda>ms. P (underlying_memory ms)\<rbrace> machine_op_lift mop \<lbrace>\<lambda>rv ms. P (underlying_memory ms)\<rbrace>"
by (simp add: machine_op_lift_def machine_rest_lift_def split_def | wp)+
lemma mol_dvs[wp]:
"\<lbrace>\<lambda>ms. P (device_state ms)\<rbrace> machine_op_lift mop \<lbrace>\<lambda>rv ms. P (device_state ms)\<rbrace>"
by (simp add: machine_op_lift_def machine_rest_lift_def split_def | wp)+
lemmas do_flush_defs = cleanCacheRange_RAM_def cleanCacheRange_PoC_def cleanCacheRange_PoU_def invalidateCacheRange_RAM_def cleanInvalidateCacheRange_RAM_def branchFlushRange_def invalidateCacheRange_I_def
lemma do_flush_respects[wp]:
"\<lbrace>integrity aag X st\<rbrace> do_machine_op (do_flush typ start end pstart)
\<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
apply (cases "typ")
apply (wp dmo_no_mem_respects | simp add: do_flush_def cache_machine_op_defs do_flush_defs)+
done
crunch pspace_aligned[wp]: flush_page "pspace_aligned"
lemma invalidate_tlb_by_asid_respects[wp]:
"\<lbrace>integrity aag X st\<rbrace> invalidate_tlb_by_asid asid
\<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
apply (simp add: invalidate_tlb_by_asid_def)
apply (wp dmo_no_mem_respects | wpc | simp add: invalidateTLB_ASID_def )+
done
lemma invalidate_tlb_by_asid_pas_refined[wp]:
"\<lbrace>pas_refined aag\<rbrace> invalidate_tlb_by_asid asid \<lbrace>\<lambda>rv. pas_refined aag\<rbrace>"
by (wp dmo_no_mem_respects | wpc | simp add: invalidate_tlb_by_asid_def invalidateTLB_ASID_def)+
crunch pas_refined[wp]: set_message_info "pas_refined aag"
(* FIXME: move *)
lemma set_message_info_mdb[wp]:
"\<lbrace>\<lambda>s. P (cdt s)\<rbrace> set_message_info thread info \<lbrace>\<lambda>rv s. P (cdt s)\<rbrace>"
unfolding set_message_info_def by wp
crunch state_vrefs[wp]: do_machine_op "\<lambda>s::'z::state_ext state. P (state_vrefs s)"
crunch thread_states[wp]: do_machine_op "\<lambda>s. P (thread_states s)"
(* FIXME: move *)
lemma set_mrs_state_vrefs[wp]:
"\<lbrace>\<lambda>s. P (state_vrefs s)\<rbrace> set_mrs thread buf msgs \<lbrace>\<lambda>rv s. P (state_vrefs s)\<rbrace>"
apply (simp add: set_mrs_def split_def set_object_def)
apply (wp gets_the_wp get_wp put_wp mapM_x_wp'
| wpc
| simp split del: if_split add: zipWithM_x_mapM_x split_def store_word_offs_def)+
apply (auto simp: obj_at_def state_vrefs_def get_tcb_ko_at
elim!: rsubst[where P=P, OF _ ext]
split: if_split_asm simp: vs_refs_no_global_pts_def)
done
(* FIXME: move *)
lemma set_mrs_thread_states[wp]:
"\<lbrace>\<lambda>s. P (thread_states s)\<rbrace> set_mrs thread buf msgs \<lbrace>\<lambda>rv s. P (thread_states s)\<rbrace>"
apply (simp add: set_mrs_def split_def set_object_def)
apply (wp gets_the_wp get_wp put_wp mapM_x_wp'
| wpc
| simp split del: if_split add: zipWithM_x_mapM_x split_def store_word_offs_def)+
apply (clarsimp simp: fun_upd_def[symmetric] thread_states_preserved)
done
lemma set_mrs_thread_bound_ntfns[wp]:
"\<lbrace>\<lambda>s. P (thread_bound_ntfns s)\<rbrace> set_mrs thread buf msgs \<lbrace>\<lambda>rv s. P (thread_bound_ntfns s)\<rbrace>"
apply (simp add: set_mrs_def split_def set_object_def)
apply (wp gets_the_wp get_wp put_wp mapM_x_wp' dmo_wp
| wpc
| simp split del: if_split add: zipWithM_x_mapM_x split_def store_word_offs_def no_irq_storeWord)+
apply (clarsimp simp: fun_upd_def[symmetric] thread_bound_ntfns_preserved )
done
lemma set_mrs_pas_refined[wp]:
"\<lbrace>pas_refined aag\<rbrace> set_mrs thread buf msgs \<lbrace>\<lambda>rv. pas_refined aag\<rbrace>"
apply (simp add: pas_refined_def state_objs_to_policy_def)
apply (rule hoare_pre)
apply (wp | wps)+
apply (clarsimp dest!: auth_graph_map_memD)
done
crunch integrity_autarch: set_message_info "integrity aag X st"
lemma dmo_storeWord_respects_Write:
"\<lbrace>integrity aag X st and K (\<forall>p' \<in> ptr_range p 2. aag_has_auth_to aag Write p')\<rbrace>
do_machine_op (storeWord p v)
\<lbrace>\<lambda>a. integrity aag X st\<rbrace>"
apply (rule hoare_pre)
apply (wp dmo_wp storeWord_respects)
apply simp
done
(* c.f. auth_ipc_buffers *)
definition
ipc_buffer_has_auth :: "'a PAS \<Rightarrow> word32 \<Rightarrow> word32 option \<Rightarrow> bool"
where
"ipc_buffer_has_auth aag thread \<equiv>
case_option True (\<lambda>buf'. is_aligned buf' msg_align_bits \<and> (\<forall>x \<in> ptr_range buf' msg_align_bits. (pasObjectAbs aag thread, Write, pasObjectAbs aag x) \<in> pasPolicy aag))"
lemma ipc_buffer_has_auth_wordE:
"\<lbrakk> ipc_buffer_has_auth aag thread (Some buf); p \<in> ptr_range (buf + off) sz; is_aligned off sz; sz \<le> msg_align_bits; off < 2 ^ msg_align_bits \<rbrakk>
\<Longrightarrow> (pasObjectAbs aag thread, Write, pasObjectAbs aag p) \<in> pasPolicy aag"
unfolding ipc_buffer_has_auth_def
apply clarsimp
apply (erule bspec)
apply (erule (4) set_mp [OF ptr_range_subset])
done
lemma is_aligned_word_size_2 [simp]:
"is_aligned (p * of_nat word_size) 2"
unfolding word_size_def
by (simp add: is_aligned_mult_triv2 [where n = 2, simplified] word_bits_conv)
lemma mul_word_size_lt_msg_align_bits_ofnat:
"p < 2 ^ (msg_align_bits - 2) \<Longrightarrow> of_nat p * of_nat word_size < (2 :: word32) ^ msg_align_bits"
unfolding word_size_def
apply simp
apply (rule word_less_power_trans_ofnat [where k = 2, simplified])
apply (simp_all add: msg_align_bits word_bits_conv)
done
lemma store_word_offs_integrity_autarch:
"\<lbrace>integrity aag X st and K (is_subject aag thread \<and> ipc_buffer_has_auth aag thread (Some buf) \<and> r < 2 ^ (msg_align_bits - 2))\<rbrace>
store_word_offs buf r v
\<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
apply (simp add: store_word_offs_def)
apply (rule hoare_pre)
apply (wp dmo_storeWord_respects_Write)
apply clarsimp
apply (drule (1) ipc_buffer_has_auth_wordE)
apply simp
apply (simp add: msg_align_bits)
apply (erule mul_word_size_lt_msg_align_bits_ofnat)
apply simp
done
lemma set_mrs_integrity_autarch:
"\<lbrace>integrity aag X st and K (is_subject aag thread \<and> ipc_buffer_has_auth aag thread buf)\<rbrace>
set_mrs thread buf msgs
\<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
apply (rule hoare_gen_asm)
apply (simp add: set_mrs_def)
apply (wp gets_the_wp get_wp put_wp mapM_x_wp' store_word_offs_integrity_autarch [where aag = aag and thread = thread]
| wpc
| simp split del: if_split add: split_def zipWithM_x_mapM_x )+
apply (clarsimp elim!: in_set_zipE split: if_split_asm)
apply (rule order_le_less_trans [where y = msg_max_length])
apply (fastforce simp add: le_eq_less_or_eq)
apply (simp add: msg_max_length_def msg_align_bits)
apply simp
apply (wp set_object_integrity_autarch hoare_drop_imps hoare_vcg_all_lift)+
apply simp
done
lemma perform_page_invocation_respects:
"\<lbrace>integrity aag X st and pas_refined aag and K (authorised_page_inv aag pgi) and valid_page_inv pgi and valid_arch_objs and pspace_aligned and is_subject aag \<circ> cur_thread\<rbrace>
perform_page_invocation pgi
\<lbrace>\<lambda>s. integrity aag X st\<rbrace>"
proof -
(* does not work as elim rule with clarsimp, which hammers Ball in concl. *)
have set_tl_subset_mp: "\<And>xs a. a \<in> set (tl xs) \<Longrightarrow> a \<in> set xs" by (case_tac xs,clarsimp+)
have hd_valid_slots:
"\<And>a xs s. valid_slots (Inl (a, xs)) s \<Longrightarrow> hd xs \<in> set xs"
"\<And>a xs s. valid_slots (Inr (a, xs)) s \<Longrightarrow> hd xs \<in> set xs"
by (simp_all add: valid_slots_def)
show ?thesis
apply (simp add: perform_page_invocation_def mapM_discarded swp_def
valid_page_inv_def valid_unmap_def
authorised_page_inv_def authorised_slots_def
split: page_invocation.split sum.split
arch_cap.split option.split,
safe)
apply (wp set_cap_integrity_autarch unmap_page_respects
mapM_x_and_const_wp[OF store_pte_respects] store_pte_respects
mapM_x_and_const_wp[OF store_pde_respects] store_pde_respects
| elim conjE hd_valid_slots[THEN bspec[rotated]]
| clarsimp dest!:set_tl_subset_mp
| wpc )+
apply (clarsimp simp: cte_wp_at_caps_of_state is_arch_diminished_def
cap_auth_conferred_def cap_rights_update_def
acap_rights_update_def update_map_data_def is_pg_cap_def
valid_page_inv_def valid_cap_simps
dest!: diminished_PageCapD cap_master_cap_eqDs)
apply (drule (1) clas_caps_of_state)
apply (simp add: cap_links_asid_slot_def label_owns_asid_slot_def)
apply (auto dest: pas_refined_Control)[1]
apply (wp set_mrs_integrity_autarch set_message_info_integrity_autarch | simp add: ipc_buffer_has_auth_def)+
done
qed
lemma perform_page_invocation_pas_refined [wp]:
"\<lbrace>pas_refined aag and K (authorised_page_inv aag pgi) and valid_page_inv pgi\<rbrace>
perform_page_invocation pgi
\<lbrace>\<lambda>_. pas_refined aag\<rbrace>"
apply (simp add: perform_page_invocation_def mapM_discarded
valid_page_inv_def valid_unmap_def swp_def
authorised_page_inv_def authorised_slots_def
cong: page_invocation.case_cong sum.case_cong)
apply (rule hoare_pre)
apply wpc
apply (wp set_cap_pas_refined unmap_page_pas_refined case_sum_wp case_prod_wp
mapM_x_and_const_wp [OF store_pte_pas_refined] mapM_x_and_const_wp [OF store_pde_pas_refined]
hoare_vcg_all_lift hoare_vcg_const_imp_lift get_cap_wp
| (wp hoare_vcg_imp_lift, unfold disj_not1)
| strengthen clas_update_map_data_strg
| wpc
| simp)+
apply (case_tac pgi)
apply (clarsimp simp: valid_slots_def pte_ref_def
pde_ref2_def auth_graph_map_mem pas_refined_refl split:sum.splits)
apply (clarsimp simp: pte_ref_def pde_ref2_def pte_ref_def
valid_slots_def auth_graph_map_mem pas_refined_refl split:sum.splits)
apply (clarsimp simp: cte_wp_at_caps_of_state
is_arch_diminished_def pte_ref_def pde_ref2_def
is_cap_simps is_pg_cap_def cap_auth_conferred_def
dest!: diminished_PageCapD)
apply (frule(1) cap_cur_auth_caps_of_state,simp)
apply (((rule conjI, rule impI)+)?, clarsimp,
clarsimp simp: update_map_data_def clas_no_asid aag_cap_auth_def
cap_auth_conferred_def vspace_cap_rights_to_auth_def
cli_no_irqs is_pg_cap_def pte_ref_def
[simplified aag_cap_auth_def])+
done
definition
authorised_asid_control_inv :: "'a PAS \<Rightarrow> asid_control_invocation \<Rightarrow> bool"
where
"authorised_asid_control_inv aag aci \<equiv> case aci of
asid_control_invocation.MakePool frame slot parent base \<Rightarrow>
is_subject aag (fst slot) \<and> is_aligned frame pageBits \<and> (\<forall>asid. is_subject_asid aag asid) \<and> is_subject aag (fst parent) \<and>
(\<forall>x \<in> {frame..frame + 2 ^ pageBits - 1}. is_subject aag x)"
lemma integrity_arm_asid_table_entry_update':
"\<lbrakk>integrity aag X st s; asid_table = arm_asid_table (arch_state s);
(\<forall>asid'.
asid' \<noteq> 0 \<and>
asid_high_bits_of asid' = asid_high_bits_of asid \<longrightarrow>
is_subject_asid aag asid')\<rbrakk> \<Longrightarrow>
integrity aag X st
(s\<lparr>arch_state := arch_state s
\<lparr>arm_asid_table :=
\<lambda>a. if a = asid_high_bits_of asid then v
else asid_table a\<rparr>\<rparr>)"
apply(clarsimp simp: integrity_def integrity_asids_def)
done
lemma arm_asid_table_entry_update_integrity[wp]:
"\<lbrace>integrity aag X st and (\<lambda> s. asid_table = arm_asid_table (arch_state s)) and K (\<forall>asid'.
asid' \<noteq> 0 \<and>
asid_high_bits_of asid' = asid_high_bits_of asid \<longrightarrow>
is_subject_asid aag asid')\<rbrace>
modify
(\<lambda>s. s\<lparr>arch_state := arch_state s
\<lparr>arm_asid_table := asid_table
(asid_high_bits_of asid := v)\<rparr>\<rparr>)
\<lbrace>\<lambda>_. integrity aag X st\<rbrace>"
apply (wp| simp)+
apply (blast intro: integrity_arm_asid_table_entry_update')
done
lemma perform_asid_control_invocation_respects:
"\<lbrace>integrity aag X st and K (authorised_asid_control_inv aag aci)\<rbrace>
perform_asid_control_invocation aci
\<lbrace>\<lambda>s. integrity aag X st\<rbrace>"
apply (simp add: perform_asid_control_invocation_def)
apply (rule hoare_pre)
apply (wpc, simp)
apply (wp set_cap_integrity_autarch cap_insert_integrity_autarch retype_region_integrity[where sz=12] static_imp_wp | simp)+
apply (clarsimp simp: authorised_asid_control_inv_def
ptr_range_def pageBits_def word_bits_def page_bits_def
is_aligned_neg_mask_eq add.commute
range_cover_def obj_bits_api_def default_arch_object_def
pageBits_def word_bits_def)
apply(subst is_aligned_neg_mask_eq[THEN sym], assumption)
apply(simp add: mask_neg_mask_is_zero)
done
lemma pas_refined_cdt:
"\<lbrakk> cdt s slot' = Some slot; pas_refined aag s \<rbrakk> \<Longrightarrow> (pasObjectAbs aag (fst slot), Control, pasObjectAbs aag (fst slot')) \<in> pasPolicy aag"
unfolding pas_refined_def
apply (drule sta_cdt)
apply (fastforce simp add: auth_graph_map_def)
done
lemma pas_refined_set_asid_strg:
"pas_refined aag s \<and> is_subject aag pool \<and> (\<forall>asid. asid_high_bits_of asid = base \<longrightarrow> is_subject_asid aag asid)
\<longrightarrow>
pas_refined aag (s\<lparr>arch_state := arch_state s \<lparr>arm_asid_table := (arm_asid_table (arch_state s))(base \<mapsto> pool)\<rparr>\<rparr>)"
apply (clarsimp simp: pas_refined_def state_objs_to_policy_def)
apply (erule state_asids_to_policy_aux.cases, simp_all split: if_split_asm)
apply (auto intro: state_asids_to_policy_aux.intros auth_graph_map_memI[OF sbta_vref] pas_refined_refl[simplified pas_refined_def state_objs_to_policy_def])
done
(* FIXME: copied from Machine_R. *)
lemma ef_storeWord: "empty_fail (storeWord x y)"
by (simp add: storeWord_def)
lemma empty_fail_clearMemory [intro!]:
"\<And>a b. empty_fail (clearMemory a b)"
by (simp add: clearMemory_def mapM_x_mapM ef_storeWord)
(* FIXME: copied from Detype_R. *)
lemma gets_modify_comm2:
"\<forall>s. g (f s) = g s \<Longrightarrow>
(do x \<leftarrow> modify f; y \<leftarrow> gets g; m x y od) =
(do y \<leftarrow> gets g; x \<leftarrow> modify f; m x y od)"
apply (rule ext)
apply (drule spec)
by (rule gets_modify_comm)
lemma dmo_detype_comm:
assumes "empty_fail f"
shows "do_machine_op f >>= (\<lambda>s. modify (detype S)) =
modify (detype S) >>= (\<lambda>s. do_machine_op f)"
proof -
have machine_state_detype: "\<forall>s. machine_state (detype S s) = machine_state s"
by (simp add: detype_def)
have detype_msu_independent:
"\<And>f. detype S \<circ> machine_state_update f = machine_state_update f \<circ> detype S"
by (simp add: detype_def ext)
from assms
show ?thesis
apply (simp add: do_machine_op_def split_def bind_assoc)
apply (simp add: gets_modify_comm2[OF machine_state_detype])
apply (rule arg_cong2[where f=bind, OF refl], rule ext)
apply (simp add: empty_fail_def select_f_walk[OF empty_fail_modify]
modify_modify detype_msu_independent)
done
qed
(* FIXME: copied from Detype_R. *)
lemma empty_fail_freeMemory: "empty_fail (freeMemory ptr bits)"
by (simp add: freeMemory_def mapM_x_mapM ef_storeWord)
(* FIXME: copied from Detype_R. *)
lemma delete_objects_def2:
"delete_objects ptr bits \<equiv>
do modify (detype {ptr..ptr + 2 ^ bits - 1});
do_machine_op (freeMemory ptr bits)
od"
by (rule eq_reflection)
(simp add: delete_objects_def dmo_detype_comm[OF empty_fail_freeMemory])
lemma delete_objects_pspace_no_overlap:
"\<lbrace> pspace_aligned and valid_objs and
(\<lambda> s. \<exists> idx. cte_wp_at (op = (UntypedCap dev ptr sz idx)) slot s)\<rbrace>
delete_objects ptr sz
\<lbrace>\<lambda>rv. pspace_no_overlap_range_cover ptr sz\<rbrace>"
unfolding delete_objects_def do_machine_op_def
apply(wp | simp add: split_def detype_msu_comm)+
apply(clarsimp)
apply(rule pspace_no_overlap_detype)
apply(auto dest: cte_wp_at_valid_objs_valid_cap)
done
lemma delete_objects_invs_ex:
"\<lbrace>(\<lambda>s. \<exists>slot dev f.
cte_wp_at (op = (UntypedCap dev ptr bits f)) slot s \<and>
descendants_range (UntypedCap dev ptr bits f) slot s) and
invs and
ct_active\<rbrace>
delete_objects ptr bits \<lbrace>\<lambda>_. invs\<rbrace>"
apply(clarsimp simp: valid_def)
apply(erule use_valid)
apply wp
apply auto
done
lemma perform_asid_control_invocation_pas_refined [wp]:
notes delete_objects_invs[wp del]
shows
"\<lbrace>pas_refined aag and pas_cur_domain aag and invs and valid_aci aci and ct_active and
K (authorised_asid_control_inv aag aci)\<rbrace>
perform_asid_control_invocation aci
\<lbrace>\<lambda>_. pas_refined aag\<rbrace>"
apply (simp add: perform_asid_control_invocation_def)
apply (rule hoare_pre)
apply (wp cap_insert_pas_refined static_imp_wp
| strengthen pas_refined_set_asid_strg
| wpc
| simp add: delete_objects_def2 fun_upd_def[symmetric])+
apply (wp retype_region_pas_refined'[where sz=pageBits]
max_index_upd_invs_simple max_index_upd_caps_overlap_reserved
hoare_vcg_ex_lift set_cap_cte_wp_at hoare_vcg_disj_lift
set_cap_descendants_range_in set_cap_no_overlap get_cap_wp
hoare_vcg_all_lift static_imp_wp
| simp add: do_machine_op_def split_def)+
apply(rename_tac word1 prod1 prod2 word2 cap )
apply(rule_tac Q="\<lambda> rv s. (\<exists> idx. cte_wp_at (op = (UntypedCap False word1 pageBits idx)) prod2 s) \<and>
(\<forall> x\<in>ptr_range word1 pageBits. is_subject aag x) \<and>
pas_refined aag s \<and>
pas_cur_domain aag s \<and>
pspace_no_overlap_range_cover word1 pageBits s \<and>
invs s \<and>
descendants_range_in
{word1..(word1 && ~~ mask pageBits) + 2 ^ pageBits - 1} prod2
s \<and>
range_cover word1 pageBits
(obj_bits_api (ArchObject ASIDPoolObj) 0) (Suc 0) \<and>
is_subject aag (fst prod1) \<and>
is_subject aag (fst prod2) \<and>
pas_cap_cur_auth aag (ArchObjectCap (ASIDPoolCap word1 word2)) \<and>
is_subject aag word1 \<and>
(\<forall>x. asid_high_bits_of x = asid_high_bits_of word2 \<longrightarrow>
is_subject_asid aag x)" in hoare_strengthen_post)
apply (simp add: page_bits_def)
apply (wp add: delete_objects_pspace_no_overlap hoare_vcg_ex_lift
delete_objects_descendants_range_in delete_objects_invs_ex
delete_objects_pas_refined
del: Untyped_AI.delete_objects_pspace_no_overlap
| simp add: page_bits_def)+
apply clarsimp
apply (frule untyped_cap_aligned, simp add: invs_valid_objs)
apply (fastforce simp: cte_wp_at_def aag_cap_auth_def ptr_range_def pas_refined_refl cap_links_asid_slot_def cap_links_irq_def is_aligned_neg_mask_eq obj_bits_api_def default_arch_object_def retype_addrs_def)
apply (clarsimp simp: valid_aci_def authorised_asid_control_inv_def)
apply (subgoal_tac "is_aligned x pageBits")
apply (clarsimp simp: cte_wp_at_caps_of_state)
apply (rule conjI)
apply(drule untyped_slots_not_in_untyped_range)
apply(erule empty_descendants_range_in)
apply(simp add: cte_wp_at_caps_of_state)
apply simp
apply(rule refl)
apply(rule subset_refl)
apply(simp add: page_bits_def)
apply(clarsimp simp: ptr_range_def invs_psp_aligned invs_valid_objs
descendants_range_def2 empty_descendants_range_in
is_aligned_neg_mask_eq page_bits_def)
apply ((strengthen refl | simp)+)?
apply (rule conjI, fastforce)
apply (rule conjI, fastforce intro: empty_descendants_range_in)
apply(rule conjI)
apply(clarsimp simp: range_cover_def obj_bits_api_def default_arch_object_def)
apply(subst is_aligned_neg_mask_eq[THEN sym], assumption)
apply(simp add: mask_neg_mask_is_zero pageBits_def)
apply(clarsimp simp: aag_cap_auth_def pas_refined_refl)
apply(drule_tac x=x in bspec)
apply(simp add: is_aligned_no_overflow)
apply(clarsimp simp: pas_refined_refl cap_links_asid_slot_def label_owns_asid_slot_def cap_links_irq_def)
apply(fastforce dest: cte_wp_at_valid_objs_valid_cap simp: valid_cap_def cap_aligned_def)
done
lemma set_asid_pool_respects:
"\<lbrace>integrity aag X st and K (is_subject aag ptr)\<rbrace>
set_asid_pool ptr pool
\<lbrace>\<lambda>s. integrity aag X st\<rbrace>"
apply (simp add: set_asid_pool_def)
apply (wp get_object_wp set_object_integrity_autarch)
apply simp
done
definition
authorised_asid_pool_inv :: "'a PAS \<Rightarrow> asid_pool_invocation \<Rightarrow> bool"
where
"authorised_asid_pool_inv aag api \<equiv> case api of
asid_pool_invocation.Assign asid pool_ptr ct_slot \<Rightarrow>
is_subject aag pool_ptr \<and> is_subject aag (fst ct_slot)
\<and> asid \<noteq> 0
\<and> (\<forall>asid'. asid_high_bits_of asid' = asid_high_bits_of asid \<and> asid' \<noteq> 0
\<longrightarrow> is_subject_asid aag asid')"
lemma perform_asid_pool_invocation_respects:
"\<lbrace>integrity aag X st and K (authorised_asid_pool_inv aag api)\<rbrace>
perform_asid_pool_invocation api
\<lbrace>\<lambda>s. integrity aag X st\<rbrace>"
apply (simp add: perform_asid_pool_invocation_def)
apply (rule hoare_pre)
apply (wp set_asid_pool_respects get_cap_wp get_cap_integrity_autarch set_cap_integrity_autarch
| wpc
| simp)+
apply (auto iff: authorised_asid_pool_inv_def)
done
lemma is_subject_asid_into_loas:
"\<lbrakk> is_subject_asid aag asid; pas_refined aag s \<rbrakk> \<Longrightarrow> label_owns_asid_slot aag (pasSubject aag) asid"
unfolding label_owns_asid_slot_def
by (clarsimp simp: pas_refined_refl)
lemma asid_pool_into_aag:
"\<lbrakk>kheap s p = Some (ArchObj (arch_kernel_obj.ASIDPool pool)); pool r = Some p'; pas_refined aag s \<rbrakk>
\<Longrightarrow> (pasObjectAbs aag p, Control, pasObjectAbs aag p') \<in> pasPolicy aag"
apply (rule pas_refined_mem [rotated], assumption)
apply (rule sta_vref)
apply (fastforce simp add: state_vrefs_def vs_refs_no_global_pts_def intro!: graph_ofI)
done
lemma asid_pool_uniqueness:
"\<lbrakk> ([VSRef (ucast (asid_high_bits_of asid)) None] \<rhd> p) s;
arm_asid_table (arch_state s) (asid_high_bits_of asid') = Some p;
invs s; \<forall>pt. \<not> ko_at (ArchObj (PageTable pt)) p s \<rbrakk>
\<Longrightarrow> asid_high_bits_of asid' = asid_high_bits_of asid"
apply (drule valid_vs_lookupD[OF vs_lookup_pages_vs_lookupI], clarsimp)
apply (drule vs_lookup_atI, drule valid_vs_lookupD[OF vs_lookup_pages_vs_lookupI], clarsimp)
apply (clarsimp dest!: obj_ref_elemD)
apply (drule(1) unique_table_refsD[where cps="caps_of_state s", rotated])
apply simp
apply (clarsimp simp: vs_cap_ref_def table_cap_ref_def up_ucast_inj_eq
split: vmpage_size.splits option.splits cap.splits arch_cap.splits)+
done
lemma perform_asid_pool_invocation_pas_refined [wp]:
"\<lbrace>pas_refined aag and invs and valid_apinv api and K (authorised_asid_pool_inv aag api)\<rbrace>
perform_asid_pool_invocation api
\<lbrace>\<lambda>_. pas_refined aag\<rbrace>"
apply (simp add: perform_asid_pool_invocation_def)
apply (rule hoare_pre)
apply (wp get_cap_auth_wp [where aag = aag] | wpc)+
apply (clarsimp simp: authorised_asid_pool_inv_def cap_links_asid_slot_def is_subject_asid_into_loas aag_cap_auth_def)
apply (clarsimp simp: cap_auth_conferred_def is_cap_simps is_page_cap_def auth_graph_map_mem
pas_refined_all_auth_is_owns pas_refined_refl cli_no_irqs
dest!: graph_ofD)
apply (clarsimp split: if_split_asm)
apply (clarsimp simp add: pas_refined_refl auth_graph_map_def2
mask_asid_low_bits_ucast_ucast[symmetric]
valid_apinv_def obj_at_def)
apply (drule(2) asid_pool_uniqueness)
apply (simp add: obj_at_def)
apply (case_tac "asid = 0", simp_all add: pas_refined_refl)[1]
apply (simp add: asid_low_high_bits[rotated, OF arg_cong[where f=ucast]])
apply (clarsimp simp: obj_at_def)
apply (frule (2) asid_pool_into_aag)
apply (drule kheap_eq_state_vrefsD)
apply (clarsimp simp: auth_graph_map_def2 pas_refined_refl)
apply (clarsimp simp: pas_refined_def vs_refs_no_global_pts_def)
apply (erule subsetD, erule state_asids_to_policy_aux.intros,
simp add: split_def, rule image_eqI[rotated], erule graph_ofI)
apply simp
done
definition
authorised_page_directory_inv :: "'a PAS \<Rightarrow> page_directory_invocation \<Rightarrow> bool"
where
"authorised_page_directory_inv aag pdi \<equiv> True"
definition
authorised_arch_inv :: "'a PAS \<Rightarrow> arch_invocation \<Rightarrow> bool"
where
"authorised_arch_inv aag ai \<equiv> case ai of
InvokePageTable pti \<Rightarrow> authorised_page_table_inv aag pti
| InvokePageDirectory pdi \<Rightarrow> authorised_page_directory_inv aag pdi
| InvokePage pgi \<Rightarrow> authorised_page_inv aag pgi
| InvokeASIDControl aci \<Rightarrow> authorised_asid_control_inv aag aci
| InvokeASIDPool api \<Rightarrow> authorised_asid_pool_inv aag api"
crunch respects [wp]: perform_page_directory_invocation "integrity aag X st"
(ignore: do_machine_op)
lemma invoke_arch_respects:
"\<lbrace>integrity aag X st and K (authorised_arch_inv aag ai) and pas_refined aag and invs and valid_arch_inv ai and is_subject aag \<circ> cur_thread\<rbrace>
arch_perform_invocation ai
\<lbrace>\<lambda>s. integrity aag X st\<rbrace>"
apply (simp add: arch_perform_invocation_def)
apply (rule hoare_pre)
apply (wp perform_page_table_invocation_respects perform_page_invocation_respects
perform_asid_control_invocation_respects perform_asid_pool_invocation_respects
| wpc)+
apply (auto simp: authorised_arch_inv_def valid_arch_inv_def)
done
crunch pas_refined [wp]: perform_page_directory_invocation "pas_refined aag"
lemma invoke_arch_pas_refined:
"\<lbrace>pas_refined aag and pas_cur_domain aag and invs and ct_active and valid_arch_inv ai and K (authorised_arch_inv aag ai)\<rbrace>
arch_perform_invocation ai
\<lbrace>\<lambda>s. pas_refined aag\<rbrace>"
apply (simp add: arch_perform_invocation_def valid_arch_inv_def)
apply (rule hoare_pre)
apply (wp | wpc)+
apply (fastforce simp add: authorised_arch_inv_def)
done
(* FIXME: move *)
lemma invs_equal_kernel_mappings[elim!]: "invs s \<Longrightarrow> equal_kernel_mappings s"
by (simp add: invs_def valid_state_def)
lemma create_mapping_entries_authorised_slots [wp]:
"\<lbrace>\<exists>\<rhd> pd and invs and pas_refined aag
and K (is_subject aag pd \<and> is_aligned pd pd_bits
\<and> vmsz_aligned vptr vmpage_size \<and> vptr < kernel_base
\<and> (\<forall>a\<in>vspace_cap_rights_to_auth rights. \<forall>p\<in>ptr_range (ptrFromPAddr base) (pageBitsForSize vmpage_size). aag_has_auth_to aag a p))\<rbrace>
create_mapping_entries base vptr vmpage_size rights attrib pd
\<lbrace>\<lambda>rv s. authorised_slots aag rv\<rbrace>, -"
unfolding authorised_slots_def
apply (rule hoare_gen_asmE)
apply (cases vmpage_size)
apply (wp lookup_pt_slot_authorised
| simp add: pte_ref_simps | fold validE_R_def)+
apply (auto simp: pd_bits_def pageBits_def)[1]
apply (wp lookup_pt_slot_authorised3 | simp add: pte_ref_simps | fold validE_R_def)+
apply (auto simp: pd_bits_def pageBits_def vmsz_aligned_def intro: invs_equal_kernel_mappings)[1]
apply (wp | simp)+
apply (auto simp: pde_ref2_def lookup_pd_slot_pd)[1]
apply (wp | simp)+
apply (subst p_0x3C_shift, rule lookup_pd_slot_aligned_6, simp)
apply (simp add: pd_bits_def pageBits_def)
apply (auto simp: pde_ref2_def vmsz_aligned_def lookup_pd_slot_add_eq)
done
lemma x_t2n_sub_1_neg_mask:
"\<lbrakk> is_aligned x n; n \<le> m \<rbrakk>
\<Longrightarrow> x + 2 ^ n - 1 && ~~ mask m = x && ~~ mask m"
apply (erule is_aligned_get_word_bits)
apply (rule trans, rule mask_lower_twice[symmetric], assumption)
apply (subst add_diff_eq[symmetric], subst is_aligned_add_helper, assumption)
apply simp+
apply (simp add: mask_def power_overflow)
done
lemma pageBitsForSize_le_t28:
"pageBitsForSize sz \<le> 28"
by (cases sz, simp_all)
lemma pageBitsForSize_le_t29:
"pageBitsForSize sz \<le> 29"
by (cases sz, simp_all)
lemmas vmsz_aligned_t2n_neg_mask
= x_t2n_sub_1_neg_mask[OF _ pageBitsForSize_le_t29, folded vmsz_aligned_def]
lemma decode_arch_invocation_authorised:
"\<lbrace>invs and pas_refined aag
and cte_wp_at (diminished (cap.ArchObjectCap cap)) slot
and (\<lambda>s. \<forall>(cap, slot) \<in> set excaps. cte_wp_at (diminished cap) slot s)
and K (\<forall>(cap, slot) \<in> {(cap.ArchObjectCap cap, slot)} \<union> set excaps.
aag_cap_auth aag (pasObjectAbs aag (fst slot)) cap \<and> is_subject aag (fst slot)
\<and> (\<forall>v \<in> cap_asid' cap. is_subject_asid aag v))\<rbrace>
arch_decode_invocation label msg x_slot slot cap excaps
\<lbrace>\<lambda>rv s. authorised_arch_inv aag rv\<rbrace>,-"
unfolding arch_decode_invocation_def authorised_arch_inv_def aag_cap_auth_def
apply (rule hoare_pre)
apply (simp add: split_def Let_def
cong: cap.case_cong arch_cap.case_cong if_cong option.case_cong split del: if_split)
apply (wp select_wp whenE_throwError_wp check_vp_wpR
find_pd_for_asid_authority2
| wpc
| simp add: authorised_asid_control_inv_def authorised_page_inv_def
authorised_page_directory_inv_def
del: hoare_post_taut hoare_True_E_R
split del: if_split)+
apply (clarsimp simp: authorised_asid_pool_inv_def authorised_page_table_inv_def
neq_Nil_conv invs_psp_aligned invs_arch_objs cli_no_irqs)
apply (drule diminished_cte_wp_at_valid_cap, clarsimp+)
apply (cases cap, simp_all)
-- "asid pool"
apply ((clarsimp simp: split_def cap_auth_conferred_def is_page_cap_def
pas_refined_all_auth_is_owns asid_high_bits_of_add_ucast
valid_cap_simps cap_links_asid_slot_def
label_owns_asid_slot_def pas_refined_refl )+)[1]
-- "ControlCap"
apply (clarsimp simp: neq_Nil_conv split_def valid_cap_simps)
apply (cases excaps, simp_all)[1]
apply (clarsimp simp: neq_Nil_conv aag_has_auth_to_Control_eq_owns)
apply (drule cte_wp_at_valid_objs_valid_cap, clarsimp)
apply (clarsimp simp: valid_cap_def cap_aligned_def)
apply (clarsimp simp: is_cap_simps cap_auth_conferred_def pas_refined_all_auth_is_owns aag_cap_auth_def)
-- "PageCap"
apply (clarsimp simp: valid_cap_simps cli_no_irqs)
apply (cases "invocation_type label", simp_all)
apply (rename_tac archlabel)
apply (case_tac archlabel, simp_all)
-- "Map"
apply (clarsimp simp: cap_auth_conferred_def is_cap_simps is_page_cap_def pas_refined_all_auth_is_owns)
apply (rule conjI)
apply (clarsimp simp: cap_auth_conferred_def is_page_cap_def pas_refined_all_auth_is_owns aag_cap_auth_def cli_no_irqs cap_links_asid_slot_def)
apply (rule conjI, fastforce)
apply (simp only: linorder_not_le kernel_base_less_observation
vmsz_aligned_t2n_neg_mask simp_thms)
apply (clarsimp simp: cap_auth_conferred_def is_page_cap_def
pas_refined_all_auth_is_owns cli_no_irqs aag_cap_auth_def
exI vspace_cap_rights_to_auth_def mask_vm_rights_def
validate_vm_rights_def vm_read_write_def vm_read_only_def
vm_kernel_only_def)
-- "Remap"
apply (clarsimp simp: cap_auth_conferred_def
is_page_cap_def pas_refined_all_auth_is_owns)
apply (rule conjI, fastforce)
apply clarsimp
apply (drule (1) bspec)
apply (erule bspec)
apply (clarsimp simp: vspace_cap_rights_to_auth_def mask_vm_rights_def
validate_vm_rights_def vm_read_write_def vm_read_only_def
vm_kernel_only_def
split: if_split_asm)
-- "Unmap"
apply (simp add: aag_cap_auth_def cli_no_irqs)
-- "PageTableCap"
apply (cases "invocation_type label", simp_all)
apply (rename_tac archlabel)
apply (case_tac archlabel, simp_all)
-- "PTMap"
apply (clarsimp simp: aag_cap_auth_def cli_no_irqs cap_links_asid_slot_def cap_auth_conferred_def is_page_cap_def
pde_ref2_def pas_refined_all_auth_is_owns pas_refined_refl pd_shifting [folded pd_bits_14] )
-- "Unmap"
apply (rename_tac word option archlabel)
apply (clarsimp simp: aag_cap_auth_def cli_no_irqs cap_links_asid_slot_def cap_auth_conferred_def is_page_cap_def
pde_ref2_def pas_refined_all_auth_is_owns pas_refined_refl )
apply (subgoal_tac "x && ~~ mask pt_bits = word")
apply simp
apply (clarsimp simp: valid_cap_simps cap_aligned_def split: if_split_asm)
apply (subst (asm) upto_enum_step_subtract)
apply (subgoal_tac "is_aligned word pt_bits")
apply (simp add: is_aligned_no_overflow)
apply (simp add: pt_bits_def pageBits_def)
apply (simp add: word_minus_1 minus_one_norm)
apply (subst (asm) upto_enum_step_red [where us = 2, simplified])
apply (simp add: pt_bits_def pageBits_def word_bits_conv)
apply (simp add: pt_bits_def pageBits_def word_bits_conv)
apply clarsimp
apply (subst is_aligned_add_helper)
apply (simp add: pt_bits_def pageBits_def)
apply (erule word_less_power_trans_ofnat [where k = 2, simplified])
apply (simp add: pt_bits_def pageBits_def)
apply (simp add: pt_bits_def pageBits_def word_bits_conv)
apply simp
done
crunch pas_refined[wp]: invalidate_asid_entry "pas_refined aag"
(wp: crunch_wps simp: crunch_simps)
crunch pas_refined[wp]: flush_space "pas_refined aag"
(wp: crunch_wps simp: crunch_simps)
lemma delete_asid_pas_refined[wp]:
"\<lbrace>pas_refined aag\<rbrace> delete_asid asid pd \<lbrace>\<lambda>rv. pas_refined aag\<rbrace>"
apply (simp add: delete_asid_def)
apply (wp | wpc)+
apply (clarsimp simp add: split_def Ball_def obj_at_def)
apply (rule conjI)
apply (clarsimp dest!: auth_graph_map_memD graph_ofD)
apply (erule pas_refined_mem[OF sta_vref, rotated])
apply (fastforce simp: state_vrefs_def vs_refs_no_global_pts_def
image_def graph_of_def split: if_split_asm)
apply (clarsimp simp: pas_refined_def dest!: graph_ofD)
apply (erule subsetD, erule state_asids_to_policy_aux.intros)
apply (fastforce simp: state_vrefs_def vs_refs_no_global_pts_def
graph_of_def image_def split: if_split_asm)
done
lemma delete_asid_pool_pas_refined [wp]:
"\<lbrace>pas_refined aag\<rbrace> delete_asid_pool param_a param_b \<lbrace>\<lambda>_. pas_refined aag\<rbrace>"
unfolding delete_asid_pool_def
apply (wp | wpc | simp)+
apply (rule_tac Q = "\<lambda>_ s. pas_refined aag s \<and> asid_table = arm_asid_table (arch_state s)" in hoare_post_imp)
apply clarsimp
apply (erule pas_refined_clear_asid)
apply (wp mapM_wp' | simp)+
done
crunch respects[wp]: invalidate_asid_entry "integrity aag X st"
crunch respects[wp]: flush_space "integrity aag X st"
(ignore: do_machine_op simp: invalidateTLB_ASID_def cleanCaches_PoU_def dsb_def clean_D_PoU_def invalidate_I_PoU_def do_machine_op_bind)
lemma delete_asid_pool_respects[wp]:
"\<lbrace> integrity aag X st and K (\<forall>asid'.
asid' \<noteq> 0 \<and> asid_high_bits_of asid' = asid_high_bits_of x \<longrightarrow>
is_subject_asid aag asid')\<rbrace>
delete_asid_pool x y
\<lbrace> \<lambda>_. integrity aag X st \<rbrace>"
unfolding delete_asid_pool_def
apply simp
apply (wp mapM_wp[OF _ subset_refl] | simp)+
done
lemma pas_refined_asid_mem:
"\<lbrakk> v \<in> state_asids_to_policy aag s; pas_refined aag s \<rbrakk>
\<Longrightarrow> v \<in> pasPolicy aag"
by (auto simp add: pas_refined_def)
lemma set_asid_pool_respects_clear:
"\<lbrace>(\<lambda>s. \<forall>pool'. ko_at (ArchObj (arch_kernel_obj.ASIDPool pool')) ptr s
\<longrightarrow> (\<forall>x. pool x \<noteq> pool' x \<longrightarrow> pool x = None \<and> aag_has_auth_to aag Control (the (pool' x))))
and integrity aag X st\<rbrace>
set_asid_pool ptr pool \<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
apply (simp add: set_asid_pool_def set_object_def)
apply (wp get_object_wp)
apply (clarsimp simp: obj_at_def
split: Structures_A.kernel_object.split_asm arch_kernel_obj.split_asm)
apply (erule integrity_trans)
apply (clarsimp simp: integrity_def)
apply (rule tro_asidpool_clear, simp+)
done
lemma delete_asid_respects:
"\<lbrace>integrity aag X st and pas_refined aag and invs and K (is_subject aag pd)\<rbrace>
delete_asid asid pd
\<lbrace>\<lambda>rv. integrity aag X st\<rbrace>"
by (wpsimp wp: set_asid_pool_respects_clear hoare_vcg_all_lift
simp: obj_at_def pas_refined_refl delete_asid_def)
end
end
|
= = Personal background and education = =
|
# Checks if an expression has domain bounds/shape, and check for either one.
export Has := module ()
export ModuleApply := proc(e, $)::truefalse;
Bound(e) or Shape(e);
end proc;
export Bound := proc(e, $)::truefalse;
assigned(Domain:-ExtBound[op(0,e)]) and
evalb(e :: Domain:-ExtBound[op(0,e)]:-MapleType);
end proc;
export Shape := proc(e, $)::truefalse;
assigned(Domain:-ExtShape[op(0,e)]) and
evalb(e :: Domain:-ExtShape[op(0,e)]:-MapleType);
end proc;
end module;
|
[STATEMENT]
lemma all_edges_finite: "finite V \<Longrightarrow> finite (all_edges V)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. finite V \<Longrightarrow> finite (all_edges V)
[PROOF STEP]
unfolding all_edges_def
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. finite V \<Longrightarrow> finite (mk_uedge ` {uv \<in> V \<times> V. fst uv \<noteq> snd uv})
[PROOF STEP]
by simp |
[STATEMENT]
lemma algebraic_int_cnj [intro]:
assumes "algebraic_int x"
shows "algebraic_int (cnj x)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. algebraic_int (cnj x)
[PROOF STEP]
proof -
[PROOF STATE]
proof (state)
goal (1 subgoal):
1. algebraic_int (cnj x)
[PROOF STEP]
from assms
[PROOF STATE]
proof (chain)
picking this:
algebraic_int x
[PROOF STEP]
obtain p where p: "lead_coeff p = 1" "\<forall>i. coeff p i \<in> \<int>" "poly p x = 0"
[PROOF STATE]
proof (prove)
using this:
algebraic_int x
goal (1 subgoal):
1. (\<And>p. \<lbrakk>lead_coeff p = 1; \<forall>i. coeff p i \<in> \<int>; poly p x = 0\<rbrakk> \<Longrightarrow> thesis) \<Longrightarrow> thesis
[PROOF STEP]
by (auto simp: algebraic_int.simps)
[PROOF STATE]
proof (state)
this:
lead_coeff p = 1
\<forall>i. coeff p i \<in> \<int>
poly p x = 0
goal (1 subgoal):
1. algebraic_int (cnj x)
[PROOF STEP]
show ?thesis
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. algebraic_int (cnj x)
[PROOF STEP]
proof
[PROOF STATE]
proof (state)
goal (3 subgoals):
1. lead_coeff ?p = 1
2. \<forall>i. coeff ?p i \<in> \<int>
3. poly ?p (cnj x) = 0
[PROOF STEP]
show "poly (map_poly cnj p) (cnj x) = 0"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. poly (map_poly cnj p) (cnj x) = 0
[PROOF STEP]
using p
[PROOF STATE]
proof (prove)
using this:
lead_coeff p = 1
\<forall>i. coeff p i \<in> \<int>
poly p x = 0
goal (1 subgoal):
1. poly (map_poly cnj p) (cnj x) = 0
[PROOF STEP]
by simp
[PROOF STATE]
proof (state)
this:
poly (map_poly cnj p) (cnj x) = 0
goal (2 subgoals):
1. lead_coeff (map_poly cnj p) = 1
2. \<forall>i. coeff (map_poly cnj p) i \<in> \<int>
[PROOF STEP]
show "lead_coeff (map_poly cnj p) = 1"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. lead_coeff (map_poly cnj p) = 1
[PROOF STEP]
using p
[PROOF STATE]
proof (prove)
using this:
lead_coeff p = 1
\<forall>i. coeff p i \<in> \<int>
poly p x = 0
goal (1 subgoal):
1. lead_coeff (map_poly cnj p) = 1
[PROOF STEP]
by (simp add: coeff_map_poly degree_map_poly)
[PROOF STATE]
proof (state)
this:
lead_coeff (map_poly cnj p) = 1
goal (1 subgoal):
1. \<forall>i. coeff (map_poly cnj p) i \<in> \<int>
[PROOF STEP]
show "\<forall>i. coeff (map_poly cnj p) i \<in> \<int>"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<forall>i. coeff (map_poly cnj p) i \<in> \<int>
[PROOF STEP]
using p
[PROOF STATE]
proof (prove)
using this:
lead_coeff p = 1
\<forall>i. coeff p i \<in> \<int>
poly p x = 0
goal (1 subgoal):
1. \<forall>i. coeff (map_poly cnj p) i \<in> \<int>
[PROOF STEP]
by (auto simp: coeff_map_poly)
[PROOF STATE]
proof (state)
this:
\<forall>i. coeff (map_poly cnj p) i \<in> \<int>
goal:
No subgoals!
[PROOF STEP]
qed
[PROOF STATE]
proof (state)
this:
algebraic_int (cnj x)
goal:
No subgoals!
[PROOF STEP]
qed |
(*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the BSD 2-Clause license. Note that NO WARRANTY is provided.
* See "LICENSE_BSD2.txt" for details.
*
* @TAG(NICTA_BSD)
*)
(* License: BSD, terms see file ./LICENSE *)
theory CSepFrame
imports SepTactic
begin
class heap_state_type'
instance heap_state_type' \<subseteq> type ..
consts
hst_mem :: "'a::heap_state_type' \<Rightarrow> heap_mem"
hst_mem_update :: "(heap_mem \<Rightarrow> heap_mem) \<Rightarrow> 'a::heap_state_type' \<Rightarrow> 'a"
hst_htd :: "'a::heap_state_type' \<Rightarrow> heap_typ_desc"
hst_htd_update :: "(heap_typ_desc \<Rightarrow> heap_typ_desc) \<Rightarrow> 'a::heap_state_type' \<Rightarrow> 'a"
class heap_state_type = heap_state_type' +
assumes hst_htd_htd_update [simp]: "hst_htd (hst_htd_update d s) = d (hst_htd s)"
assumes hst_mem_mem_update [simp]: "hst_mem (hst_mem_update h s) = h (hst_mem s)"
assumes hst_htd_mem_update [simp]: "hst_htd (hst_mem_update h s) = hst_htd s"
assumes hst_mem_htd_update [simp]: "hst_mem (hst_htd_update d s) = hst_mem s"
translations
"s\<lparr> hst_mem := x\<rparr>" <= "CONST hst_mem_update (K_record x) s"
"s\<lparr> hst_htd := x\<rparr>" <= "CONST hst_htd_update (K_record x) s"
definition lift_hst :: "'a::heap_state_type' \<Rightarrow> heap_state" where
"lift_hst s \<equiv> lift_state (hst_mem s,hst_htd s)"
definition
point_eq_mod :: "('a \<Rightarrow> 'b) \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> 'a set \<Rightarrow> bool"
where
"point_eq_mod f g X \<equiv> \<forall>x. x \<notin> X \<longrightarrow> f x = g x"
definition
exec_fatal :: "('s,'b,'c) com \<Rightarrow> ('s,'b,'c) body \<Rightarrow> 's \<Rightarrow> bool"
where
"exec_fatal C \<Gamma> s \<equiv> (\<exists>f. \<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> Fault f) \<or>
(\<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> Stuck)"
definition
exec_fatal_seq :: "('s,'b,'c) Language.com \<Rightarrow> ('s,'b,'c) Semantic.body \<Rightarrow> 's \<Rightarrow> bool"
where
"exec_fatal_seq C \<Gamma> s \<equiv> (\<exists>f. \<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> Fault f) \<or>
(\<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> Stuck)"
definition
restrict_htd :: "'s::heap_state_type' \<Rightarrow> s_addr set \<Rightarrow> 's"
where
"restrict_htd s X \<equiv> s\<lparr> hst_htd := restrict_s (hst_htd s)sorry X \<rparr>"
definition
restrict_safe_OK :: "'s \<Rightarrow> 's \<Rightarrow> ('s \<Rightarrow> ('s,'c) xstate) \<Rightarrow>
s_addr set \<Rightarrow> ('s::heap_state_type','b,'c) com \<Rightarrow> ('s,'b,'c) body \<Rightarrow> bool"
where
"restrict_safe_OK s t f X C \<Gamma> \<equiv>
\<Gamma> \<turnstile>\<^sub>p \<langle>C,(Normal (restrict_htd s X))\<rangle> \<Rightarrow> f (restrict_htd t X) \<and>
point_eq_mod (lift_state (hst_mem t,hst_htd t))
(lift_state (hst_mem s,hst_htd s)) X"
definition
restrict_safe_OK_seq :: "'s \<Rightarrow> 's \<Rightarrow> ('s \<Rightarrow> ('s,'c) xstate) \<Rightarrow>
s_addr set \<Rightarrow> ('s::heap_state_type','b,'c) Language.com \<Rightarrow> ('s,'b,'c) Semantic.body \<Rightarrow> bool"
where
"restrict_safe_OK_seq s t f X C \<Gamma> \<equiv>
\<Gamma> \<turnstile> \<langle>C,(Normal (restrict_htd s X))\<rangle> \<Rightarrow> f (restrict_htd t X) \<and>
point_eq_mod (lift_state (hst_mem t,hst_htd t))
(lift_state (hst_mem s,hst_htd s)) X"
definition
restrict_safe :: "'s \<Rightarrow> ('s,'c) xstate \<Rightarrow>
('s::heap_state_type','b,'c) com \<Rightarrow> ('s,'b,'c) body \<Rightarrow> bool"
where
"restrict_safe s t C \<Gamma> \<equiv> \<forall>X. (case t of
Normal t' \<Rightarrow> restrict_safe_OK s t' Normal X C \<Gamma> |
Abrupt t' \<Rightarrow> restrict_safe_OK s t' Abrupt X C \<Gamma> |
_ \<Rightarrow> False) \<or>
exec_fatal C \<Gamma> (restrict_htd s X)"
definition
restrict_safe_seq :: "'s \<Rightarrow> ('s,'c) xstate \<Rightarrow>
('s::heap_state_type','b,'c) Language.com \<Rightarrow> ('s,'b,'c) Semantic.body \<Rightarrow> bool"
where
"restrict_safe_seq s t C \<Gamma> \<equiv> \<forall>X. (case t of
Normal t' \<Rightarrow> restrict_safe_OK_seq s t' Normal X C \<Gamma> |
Abrupt t' \<Rightarrow> restrict_safe_OK_seq s t' Abrupt X C \<Gamma> |
_ \<Rightarrow> False) \<or>
exec_fatal_seq C \<Gamma> (restrict_htd s X)"
definition
mem_safe :: "('s::{heap_state_type',type},'b,'c) com \<Rightarrow>
('s,'b,'c) body \<Rightarrow> bool"
where
"mem_safe C \<Gamma> \<equiv> \<forall>s t. \<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> t \<longrightarrow>
restrict_safe s t C \<Gamma>"
definition
mem_safe_seq :: "('s::{heap_state_type',type},'b,'c) Language.com \<Rightarrow>
('s,'b,'c) Semantic.body \<Rightarrow> bool"
where
"mem_safe_seq C \<Gamma> \<equiv> \<forall>s t. \<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> t \<longrightarrow>
restrict_safe_seq s t C \<Gamma>"
definition
point_eq_mod_safe :: "'s::{heap_state_type',type} set \<Rightarrow>
('s \<Rightarrow> 's) \<Rightarrow> ('s \<Rightarrow> s_addr \<Rightarrow> 'c) \<Rightarrow> bool"
where
"point_eq_mod_safe P f g \<equiv> \<forall>s X. restrict_htd s X \<in> P \<longrightarrow>
point_eq_mod (g (f s)) (g s) X"
definition
comm_restrict :: "('s::{heap_state_type',type} \<Rightarrow> 's) \<Rightarrow> 's \<Rightarrow> s_addr set \<Rightarrow> bool"
where
"comm_restrict f s X \<equiv> f (restrict_htd s X) = restrict_htd (f s) X"
definition
comm_restrict_safe :: "'s set \<Rightarrow> ('s::{heap_state_type',type} \<Rightarrow> 's) \<Rightarrow> bool"
where
"comm_restrict_safe P f \<equiv> \<forall>s X. restrict_htd s X \<in> P \<longrightarrow>
comm_restrict f s X"
definition htd_ind :: "('a::{heap_state_type',type} \<Rightarrow> 'b) \<Rightarrow> bool" where
"htd_ind f \<equiv> \<forall>x s. f s = f (hst_htd_update x s)"
definition mono_guard :: "'s::{heap_state_type',type} set \<Rightarrow> bool" where
"mono_guard P \<equiv> \<forall>s X. restrict_htd s X \<in> P \<longrightarrow> s \<in> P"
definition expr_htd_ind :: "'s::{heap_state_type',type} set \<Rightarrow> bool" where
"expr_htd_ind P \<equiv> \<forall>d s. s\<lparr> hst_htd := d \<rparr> \<in> P = (s \<in> P)"
primrec intra_safe_seq :: "('s::heap_state_type','b,'c) Language.com \<Rightarrow> bool"
where
"intra_safe_seq Language.Skip = True"
| "intra_safe_seq (Language.Basic f) = (comm_restrict_safe UNIV f \<and>
point_eq_mod_safe UNIV f (\<lambda>s. lift_state (hst_mem s,hst_htd s)))"
| "intra_safe_seq (Language.Spec r) = (\<forall>\<Gamma>. mem_safe_seq (Language.Spec r) (\<Gamma> :: ('s,'b,'c) Semantic.body))"
| "intra_safe_seq (Language.Seq C D) = (intra_safe_seq C \<and> intra_safe_seq D)"
| "intra_safe_seq (Language.Cond P C D) = (expr_htd_ind P \<and> intra_safe_seq C \<and> intra_safe_seq D)"
| "intra_safe_seq (Language.While P C) = (expr_htd_ind P \<and> intra_safe_seq C)"
| "intra_safe_seq (Language.Call p) = True"
| "intra_safe_seq (Language.DynCom f) = (htd_ind f \<and> (\<forall>s. intra_safe_seq (f s)))"
| "intra_safe_seq (Language.Guard f P C) = (mono_guard P \<and> (case C of
Language.Basic g \<Rightarrow> comm_restrict_safe P g \<and> (*point_eq_mod_safe P g hst_mem \<and> *)
point_eq_mod_safe P g (\<lambda>s. lift_state (hst_mem s,hst_htd s))
| _ \<Rightarrow> intra_safe_seq C))"
| "intra_safe_seq Language.Throw = True"
| "intra_safe_seq (Language.Catch C D) = (intra_safe_seq C \<and> intra_safe_seq D)"
primrec intra_safe :: "('s::heap_state_type','b,'c) com \<Rightarrow> bool"
where
"intra_safe Skip = True"
| "intra_safe (Basic f) = (comm_restrict_safe UNIV f \<and>
point_eq_mod_safe UNIV f (\<lambda>s. lift_state (hst_mem s,hst_htd s)))"
| "intra_safe (Spec r) = (\<forall>\<Gamma>. mem_safe (Spec r) (\<Gamma> :: ('s,'b,'c) body))"
| "intra_safe (Seq C D) = (intra_safe C \<and> intra_safe D)"
| "intra_safe (Cond P C D) = (expr_htd_ind P \<and> intra_safe C \<and> intra_safe D)"
| "intra_safe (While P C) = (expr_htd_ind P \<and> intra_safe C)"
| "intra_safe (Call p) = True"
| "intra_safe (DynCom f) = (htd_ind f \<and> (\<forall>s. intra_safe (f s)))"
| "intra_safe (Guard f P C) = (mono_guard P \<and> (case C of
Basic g \<Rightarrow> comm_restrict_safe P g \<and> (*point_eq_mod_safe P g hst_mem \<and> *)
point_eq_mod_safe P g (\<lambda>s. lift_state (hst_mem s,hst_htd s))
| _ \<Rightarrow> intra_safe C))"
| "intra_safe Throw = True"
| "intra_safe (Catch C D) = (intra_safe C \<and> intra_safe D)"
| "intra_safe (Await b C) = (expr_htd_ind b \<and> intra_safe_seq C)"
lemma assumes
a0:"\<Gamma>\<^sub>\<not>\<^sub>a \<turnstile> \<langle>c', s\<rangle> \<Rightarrow> t" and a1:"c' = sequential C\<^sub>p" and
a0':"noawaits C\<^sub>p"
shows "\<Gamma> \<turnstile>\<^sub>p \<langle>C\<^sub>p, s\<rangle> \<Rightarrow> t"
using a0 a0' a1
proof(induct arbitrary: C\<^sub>p)
case (Skip s)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (Guard s g c t f)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (GuardFault s g f c)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (FaultProp c f)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (Basic f s)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (Spec s t r)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (SpecStuck s r)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (Seq c\<^sub>1 s s' c\<^sub>2 t)
then obtain c1p c2p where "C\<^sub>p = Seq c1p c2p \<and>
c\<^sub>1 = sequential c1p \<and>
c\<^sub>2 = sequential c2p \<and> noawaits c1p \<and> noawaits c2p"
by (cases C\<^sub>p, auto)
then show ?case using Seq SemanticCon.exec.intros(8) by metis
next
case (CondTrue s b c\<^sub>1 t c\<^sub>2)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (CondFalse s b c\<^sub>2 t c\<^sub>1)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (WhileTrue s b c s' t)
then show ?case apply (cases C\<^sub>p) apply (auto simp: SemanticCon.exec.intros)
by (metis SemanticCon.exec.WhileTrue WhileTrue.prems(1) WhileTrue.prems(2))
next
case (WhileFalse s b c)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (Call p bdy s t)
then have "C\<^sub>p = LanguageCon.com.Call p"
by (cases C\<^sub>p,auto)
moreover obtain bdyp where " \<Gamma> p = Some (bdyp) \<and> bdy = sequential bdyp \<and> noawaits bdyp"
using Call lam1_seq no_await_some_some_p
by (metis None_not_eq no_await_some_no_await)
ultimately show ?case using Call(3)
by (metis SemanticCon.exec.Call)
next
case (CallUndefined p s)
then have "C\<^sub>p = LanguageCon.com.Call p"
by (cases C\<^sub>p,auto)
moreover have " \<Gamma> p = None"
using CallUndefined lam1_seq no_await_some_some_p
sorry
ultimately show ?case using CallUndefined
sorry
next
case (StuckProp c)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (DynCom c s t)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (Throw s)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (AbruptProp c s)
then show ?case by (cases C\<^sub>p, auto simp:SemanticCon.exec.intros)
next
case (CatchMatch c\<^sub>1 s s' c\<^sub>2 t)
then obtain c1p c2p where "C\<^sub>p = Catch c1p c2p \<and>
c\<^sub>1 = sequential c1p \<and>
c\<^sub>2 = sequential c2p \<and> noawaits c1p \<and> noawaits c2p"
by (cases C\<^sub>p, auto)
then show ?case using CatchMatch SemanticCon.exec.intros by metis
next
case (CatchMiss c\<^sub>1 s t c\<^sub>2)
then obtain c1p c2p where "C\<^sub>p = Catch c1p c2p \<and>
c\<^sub>1 = sequential c1p \<and>
c\<^sub>2 = sequential c2p \<and> noawaits c1p \<and> noawaits c2p"
by (cases C\<^sub>p, auto)
then show ?case using CatchMiss SemanticCon.exec.intros
by (metis Semantic.isAbr_def SemanticCon.isAbrE)
qed
lemma l1:"noawaits C\<^sub>p \<Longrightarrow> intra_safe C\<^sub>p \<Longrightarrow> intra_safe_seq (sequential C\<^sub>p)"
proof(induct C\<^sub>p)
case Skip
then show ?case by auto
next
case (Basic x)
then show ?case by auto
next
case (Spec x)
then show ?case apply auto unfolding mem_safe_seq_def mem_safe_def exec_fatal_def restrict_safe_OK_def
exec_fatal_seq_def restrict_safe_OK_seq_def restrict_safe_def restrict_safe_seq_def
apply auto
by (smt Semantic.exec.simps SemanticCon.exec_Normal_elim_cases(7) exec_seq_parallel parallel.simps(3)
xstate.simps(16) xstate.simps(19))
next
case (Seq C\<^sub>p1 C\<^sub>p2)
then show ?case
by simp
next
case (Cond x1 C\<^sub>p1 C\<^sub>p2)
then show ?case by simp
next
case (While x1 C\<^sub>p)
then show ?case by simp
next
case (Call x)
then show ?case by simp
next
case (DynCom x)
then show ?case
by (simp add: htd_ind_def)
next
case (Guard x1 x2a C\<^sub>p)
then show ?case
proof (cases "\<exists>g. C\<^sub>p = Basic g")
case False with Guard show ?thesis
by - (clarsimp, split com.splits, auto dest: )
next
case True with Guard show ?thesis
by (force simp: restrict_safe_def restrict_safe_OK_def
point_eq_mod_safe_def
intro: exec.intros
elim: exec_Normal_elim_cases)
qed
next
case Throw
then show ?case by auto
next
case (Catch C\<^sub>p1 C\<^sub>p2)
then show ?case by auto
next
case (Await x1 x2a)
then show ?case by auto
qed
instance state_ext :: (heap_state_type',type) heap_state_type' ..
overloading
hs_mem_state \<equiv> hst_mem
hs_mem_update_state \<equiv> hst_mem_update
hs_htd_state \<equiv> hst_htd
hs_htd_update_state \<equiv> hst_htd_update
begin
definition hs_mem_state [simp]: "hs_mem_state \<equiv> hst_mem \<circ> globals"
definition hs_mem_update_state [simp]: "hs_mem_update_state \<equiv> globals_update \<circ> hst_mem_update"
definition hs_htd_state[simp]: "hs_htd_state \<equiv> hst_htd \<circ> globals"
definition hs_htd_update_state [simp]: "hs_htd_update_state \<equiv> globals_update \<circ> hst_htd_update"
end
instance state_ext :: (heap_state_type,type) heap_state_type
apply intro_classes
apply auto
done
primrec
intra_deps_seq :: "('s','b,'c) Language.com \<Rightarrow> 'b set"
where
"intra_deps_seq Language.Skip = {}"
| "intra_deps_seq (Language.Basic f) = {}"
| "intra_deps_seq (Language.Spec r) = {}"
| "intra_deps_seq (Language.Seq C D) = (intra_deps_seq C \<union> intra_deps_seq D)"
| "intra_deps_seq (Language.Cond P C D) = (intra_deps_seq C \<union> intra_deps_seq D)"
| "intra_deps_seq (Language.While P C) = intra_deps_seq C"
| "intra_deps_seq (Language.Call p) = {p}"
| "intra_deps_seq (Language.DynCom f) = \<Union>{intra_deps_seq (f s) | s. True}"
| "intra_deps_seq (Language.Guard f P C) = intra_deps_seq C"
| "intra_deps_seq Language.Throw = {}"
| "intra_deps_seq (Language.Catch C D) = (intra_deps_seq C \<union> intra_deps_seq D)"
inductive_set
proc_deps_seq :: "('s','b,'c) Language.com \<Rightarrow> ('s,'b,'c) Semantic.body \<Rightarrow> 'b set"
for "C" :: "('s','b,'c) Language.com"
and "\<Gamma>" :: "('s,'b,'c) Semantic.body"
where
"x \<in> intra_deps_seq C \<Longrightarrow> x \<in> proc_deps_seq C \<Gamma>"
| "\<lbrakk> x \<in> proc_deps_seq C \<Gamma>; \<Gamma> x = Some D; y \<in> intra_deps_seq D \<rbrakk> \<Longrightarrow> y \<in> proc_deps_seq C \<Gamma>"
primrec
intra_deps :: "('s','b,'c) com \<Rightarrow> 'b set"
where
"intra_deps Skip = {}"
| "intra_deps (Basic f) = {}"
| "intra_deps (Spec r) = {}"
| "intra_deps (Seq C D) = (intra_deps C \<union> intra_deps D)"
| "intra_deps (Cond P C D) = (intra_deps C \<union> intra_deps D)"
| "intra_deps (While P C) = intra_deps C"
| "intra_deps (Call p) = {p}"
| "intra_deps (DynCom f) = \<Union>{intra_deps (f s) | s. True}"
| "intra_deps (Guard f P C) = intra_deps C"
| "intra_deps Throw = {}"
| "intra_deps (Catch C D) = (intra_deps C \<union> intra_deps D)"
| "intra_deps (Await b C) = intra_deps_seq C"
inductive_set
proc_deps :: "('s','b,'c) com \<Rightarrow> ('s,'b,'c) body \<Rightarrow> 'b set"
for "C" :: "('s','b,'c) com"
and "\<Gamma>" :: "('s,'b,'c) body"
where
"x \<in> intra_deps C \<Longrightarrow> x \<in> proc_deps C \<Gamma>"
| "\<lbrakk> x \<in> proc_deps C \<Gamma>; \<Gamma> x = Some D; y \<in> intra_deps D \<rbrakk> \<Longrightarrow> y \<in> proc_deps C \<Gamma>"
text {* ---- *}
lemma point_eq_mod_refl [simp]:
"point_eq_mod f f X"
by (simp add: point_eq_mod_def)
lemma point_eq_mod_subs:
"\<lbrakk> point_eq_mod f g Y; Y \<subseteq> X \<rbrakk> \<Longrightarrow> point_eq_mod f g X"
by (force simp: point_eq_mod_def)
lemma point_eq_mod_trans:
"\<lbrakk> point_eq_mod x y X; point_eq_mod y z X \<rbrakk> \<Longrightarrow> point_eq_mod x z X"
by (force simp: point_eq_mod_def)
lemma mem_safe_NormalD:
"\<lbrakk> \<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> Normal t; mem_safe C \<Gamma>;
\<not> exec_fatal C \<Gamma> (restrict_htd s X) \<rbrakk> \<Longrightarrow>
(\<Gamma> \<turnstile>\<^sub>p \<langle>C,(Normal (restrict_htd s X))\<rangle> \<Rightarrow> Normal (restrict_htd t X) \<and>
point_eq_mod (lift_state (hst_mem t,hst_htd t))
(lift_state (hst_mem s,hst_htd s)) X)"
by (force simp: mem_safe_def restrict_safe_def restrict_safe_OK_def)
lemma mem_safe_seq_NormalD:
"\<lbrakk> \<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> Normal t; mem_safe_seq C \<Gamma>;
\<not> exec_fatal_seq C \<Gamma> (restrict_htd s X) \<rbrakk> \<Longrightarrow>
(\<Gamma> \<turnstile> \<langle>C,(Normal (restrict_htd s X))\<rangle> \<Rightarrow> Normal (restrict_htd t X) \<and>
point_eq_mod (lift_state (hst_mem t,hst_htd t))
(lift_state (hst_mem s,hst_htd s)) X)"
by (force simp: mem_safe_seq_def restrict_safe_seq_def restrict_safe_OK_seq_def)
lemma mem_safe_AbruptD:
"\<lbrakk> \<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> Abrupt t; mem_safe C \<Gamma>;
\<not> exec_fatal C \<Gamma> (restrict_htd s X) \<rbrakk> \<Longrightarrow>
(\<Gamma> \<turnstile>\<^sub>p \<langle>C,(Normal (restrict_htd s X))\<rangle> \<Rightarrow> Abrupt (restrict_htd t X) \<and>
point_eq_mod (lift_state (hst_mem t,hst_htd t))
(lift_state (hst_mem s,hst_htd s)) X)"
by (force simp: mem_safe_def restrict_safe_def restrict_safe_OK_def)
lemma mem_safe_seq_AbruptD:
"\<lbrakk> \<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> Abrupt t; mem_safe_seq C \<Gamma>;
\<not> exec_fatal_seq C \<Gamma> (restrict_htd s X) \<rbrakk> \<Longrightarrow>
(\<Gamma> \<turnstile> \<langle>C,(Normal (restrict_htd s X))\<rangle> \<Rightarrow> Abrupt (restrict_htd t X) \<and>
point_eq_mod (lift_state (hst_mem t,hst_htd t))
(lift_state (hst_mem s,hst_htd s)) X)"
by (force simp: mem_safe_seq_def restrict_safe_seq_def restrict_safe_OK_seq_def)
lemma mem_safe_FaultD:
"\<lbrakk> \<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> Fault f; mem_safe C \<Gamma> \<rbrakk> \<Longrightarrow>
exec_fatal C \<Gamma> (restrict_htd s X)"
by (force simp: mem_safe_def restrict_safe_def)
lemma mem_safe_StuckD:
"\<lbrakk> \<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> Stuck; mem_safe C \<Gamma> \<rbrakk> \<Longrightarrow>
exec_fatal C \<Gamma> (restrict_htd s X)"
by (force simp: mem_safe_def restrict_safe_def)
lemma mem_safe_seq_FaultD:
"\<lbrakk> \<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> Fault f; mem_safe_seq C \<Gamma> \<rbrakk> \<Longrightarrow>
exec_fatal_seq C \<Gamma> (restrict_htd s X)"
by (force simp: mem_safe_seq_def restrict_safe_seq_def)
lemma mem_safe_seq_StuckD:
"\<lbrakk> \<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> Stuck; mem_safe_seq C \<Gamma> \<rbrakk> \<Longrightarrow>
exec_fatal_seq C \<Gamma> (restrict_htd s X)"
by (force simp: mem_safe_seq_def restrict_safe_seq_def)
lemma lift_state_d_restrict [simp]:
"lift_state (h,(restrict_s d X)) = lift_state (h,d) |` X"
by (auto simp: lift_state_def restrict_map_def restrict_s_def intro!: ext split: s_heap_index.splits)
lemma dom_merge_restrict [simp]:
"(x ++ y) |` dom y = y"
by (force simp: restrict_map_def None_com intro: ext)
lemma dom_compl_restrict [simp]:
"x |` (UNIV - dom x) = empty"
by (force simp: restrict_map_def intro: ext)
lemma lift_state_point_eq_mod:
"\<lbrakk> point_eq_mod (lift_state (h,d)) (lift_state (h',d')) X \<rbrakk> \<Longrightarrow>
lift_state (h,d) |` (UNIV - X) =
lift_state (h',d') |` (UNIV - X)"
by (auto simp: point_eq_mod_def restrict_map_def intro: ext)
lemma htd_'_update_ind [simp]:
"htd_ind f \<Longrightarrow> f (hst_htd_update x s) = f s"
by (simp add: htd_ind_def)
lemma sep_frame':
assumes orig_spec: "\<forall>s. \<Gamma> \<turnstile> \<lbrace>s. P (f \<acute>(\<lambda>x. x)) (lift_hst \<acute>(\<lambda>x. x))\<rbrace>
C
\<lbrace>Q (g s \<acute>(\<lambda>x. x)) (lift_hst \<acute>(\<lambda>x. x))\<rbrace>"
and hi_f: "htd_ind f" and hi_g: "htd_ind g"
and hi_g': "\<forall>s. htd_ind (g s)"
and safe: "mem_safe_seq (C::('s::heap_state_type,'b,'c) Language.com) \<Gamma>"
shows "\<forall>s. \<Gamma> \<turnstile> \<lbrace>s. (P (f \<acute>(\<lambda>x. x)) \<and>\<^sup>* R (h \<acute>(\<lambda>x. x))) (lift_hst \<acute>(\<lambda>x. x))\<rbrace>
C
\<lbrace>(Q (g s \<acute>(\<lambda>x. x)) \<and>\<^sup>* R (h s)) (lift_hst \<acute>(\<lambda>x. x))\<rbrace>"
proof (rule, rule hoare_complete, simp only: valid_def, clarify)
fix ta x
assume ev: "\<Gamma>\<turnstile> \<langle>C,Normal x\<rangle> \<Rightarrow> ta" and
pre: "(P (f x) \<and>\<^sup>* R (h x)) (lift_hst x)"
then obtain s\<^sub>0 and s\<^sub>1 where pre_P: "P (f x) s\<^sub>0" and pre_R: "R (h x) s\<^sub>1" and
disj: "s\<^sub>0 \<bottom> s\<^sub>1" and m: "lift_hst x = s\<^sub>1 ++ s\<^sub>0"
by (clarsimp simp: sep_conj_def map_ac_simps)
with orig_spec hi_f have nofault: "\<not> exec_fatal_seq C \<Gamma>
(restrict_htd x (dom s\<^sub>0))"
by (force simp: exec_fatal_seq_def image_def lift_hst_def cvalid_def valid_def
restrict_htd_def
dest: hoare_sound)
show "ta \<in> Normal ` {t. (Q (g x t) \<and>\<^sup>* R (h x)) (lift_hst t)}"
proof (cases ta)
case (Normal s)
moreover with ev safe nofault have ev': "\<Gamma> \<turnstile>
\<langle>C,Normal (x\<lparr> hst_htd := (restrict_s (hst_htd x) (dom s\<^sub>0)) \<rparr>)\<rangle> \<Rightarrow>
Normal (s\<lparr> hst_htd := (restrict_s (hst_htd s) (dom s\<^sub>0)) \<rparr>)" and
"point_eq_mod (lift_state (hst_mem s,hst_htd s))
(lift_state (hst_mem x,hst_htd x)) (dom s\<^sub>0)"
by (auto simp: restrict_htd_def dest: mem_safe_seq_NormalD)
moreover with m disj have "s\<^sub>1 = lift_hst s |` (UNIV - dom s\<^sub>0)"
apply -
apply(clarsimp simp: lift_hst_def)
apply(subst lift_state_point_eq_mod)
apply(drule sym)
apply clarsimp
apply fast
apply(simp add: lift_hst_def lift_state_point_eq_mod map_add_restrict)
apply(subst restrict_map_subdom, auto dest: map_disjD)
done
ultimately show ?thesis using orig_spec hi_f hi_g hi_g' pre_P pre_R m
by (force simp: cvalid_def valid_def image_def lift_hst_def
map_disj_def
intro: sep_conjI dest: hoare_sound)
next
case (Abrupt s) with ev safe nofault orig_spec pre_P hi_f m show ?thesis
by - (simp, drule spec, drule hoare_sound, drule_tac X="dom s\<^sub>0" in
mem_safe_seq_AbruptD, assumption+,
force simp: valid_def cvalid_def lift_hst_def restrict_htd_def)
next
case (Fault f) with ev safe nofault show ?thesis
by (force dest: mem_safe_seq_FaultD)
next
case Stuck with ev safe nofault show ?thesis
by (force dest: mem_safe_seq_StuckD)
qed
qed
lemma sep_frame:
"\<lbrakk> k = (\<lambda>s. (hst_mem s,hst_htd s));
\<forall>s. \<Gamma> \<turnstile> \<lbrace>s. P (f \<acute>(\<lambda>x. x)) (lift_state (k \<acute>(\<lambda>x. x)))\<rbrace>
C
\<lbrace>Q (g s \<acute>(\<lambda>x. x)) (lift_state (k \<acute>(\<lambda>x. x)))\<rbrace>;
htd_ind f; htd_ind g; \<forall>s. htd_ind (g s);
mem_safe_seq (C::('s::heap_state_type,'b,'c) Language.com) \<Gamma> \<rbrakk> \<Longrightarrow>
\<forall>s. \<Gamma> \<turnstile> \<lbrace>s. (P (f \<acute>(\<lambda>x. x)) \<and>\<^sup>* R (h \<acute>(\<lambda>x. x))) (lift_state (k \<acute>(\<lambda>x. x)))\<rbrace>
C
\<lbrace>(Q (g s \<acute>(\<lambda>x. x)) \<and>\<^sup>* R (h s)) (lift_state (k \<acute>(\<lambda>x. x)))\<rbrace>"
apply(simp only:)
apply(fold lift_hst_def)
apply(erule (4) sep_frame')
done
lemma point_eq_mod_safe [simp]:
"\<lbrakk> point_eq_mod_safe P f g; restrict_htd s X \<in> P; x \<notin> X \<rbrakk> \<Longrightarrow>
g (f s) x = (g s) x"
apply (simp add: point_eq_mod_safe_def point_eq_mod_def)
apply(case_tac x, force)
done
lemma comm_restrict_safe [simp]:
"\<lbrakk> comm_restrict_safe P f; restrict_htd s X \<in> P \<rbrakk> \<Longrightarrow>
restrict_htd (f s ) X = f (restrict_htd s X)"
by (simp add: comm_restrict_safe_def comm_restrict_def)
lemma mono_guardD:
"\<lbrakk> mono_guard P; restrict_htd s X \<in> P \<rbrakk> \<Longrightarrow> s \<in> P"
by (unfold mono_guard_def, fast)
lemma expr_htd_ind:
"expr_htd_ind P \<Longrightarrow> restrict_htd s X \<in> P = (s \<in> P)"
by (simp add: expr_htd_ind_def restrict_htd_def)
lemma exec_fatal_Seq:
"exec_fatal C \<Gamma> s \<Longrightarrow> exec_fatal (C;;D) \<Gamma> s"
by (force simp: exec_fatal_def intro: exec.intros)
lemma exec_fatal_Seq2:
"\<lbrakk> \<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> Normal t; exec_fatal D \<Gamma> t \<rbrakk> \<Longrightarrow> exec_fatal (C;;D) \<Gamma> s"
by (force simp: exec_fatal_def intro: exec.intros)
lemma exec_fatal_seq_Seq:
"exec_fatal_seq C \<Gamma> s \<Longrightarrow> exec_fatal_seq (Language.Seq C D) \<Gamma> s"
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros)
lemma exec_fatal_seq_Seq2:
"\<lbrakk> \<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> Normal t; exec_fatal_seq D \<Gamma> t \<rbrakk> \<Longrightarrow> exec_fatal_seq (Language.Seq C D) \<Gamma> s"
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros)
lemma exec_fatal_Cond:
"exec_fatal (Cond P C D) \<Gamma> s = (if s \<in> P then exec_fatal C \<Gamma> s else
exec_fatal D \<Gamma> s)"
by (force simp: exec_fatal_def intro: exec.intros
elim: exec_Normal_elim_cases)
lemma exec_fatal_seq_Cond:
"exec_fatal_seq (Language.Cond P C D) \<Gamma> s = (if s \<in> P then exec_fatal_seq C \<Gamma> s else
exec_fatal_seq D \<Gamma> s)"
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros
elim: Semantic.exec_Normal_elim_cases)
lemma exec_fatal_While:
"\<lbrakk> exec_fatal C \<Gamma> s; s \<in> P \<rbrakk> \<Longrightarrow> exec_fatal (While P C) \<Gamma> s"
by (force simp: exec_fatal_def intro: exec.intros)
lemma exec_fatal_While2:
"\<lbrakk> exec_fatal (While P C) \<Gamma> t; \<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> Normal t; s \<in> P \<rbrakk> \<Longrightarrow>
exec_fatal (While P C) \<Gamma> s"
by (force simp: exec_fatal_def intro: exec.intros
elim: exec_Normal_elim_cases)
lemma exec_fatal_seq_While:
"\<lbrakk> exec_fatal_seq C \<Gamma> s; s \<in> P \<rbrakk> \<Longrightarrow> exec_fatal_seq (Language.While P C) \<Gamma> s"
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros)
lemma exec_fatal_seq_While2:
"\<lbrakk> exec_fatal_seq (Language.While P C) \<Gamma> t; \<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> Normal t; s \<in> P \<rbrakk> \<Longrightarrow>
exec_fatal_seq (Language.While P C) \<Gamma> s"
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros)
lemma exec_fatal_Call:
"\<lbrakk> \<Gamma> p = Some C; exec_fatal C \<Gamma> s \<rbrakk> \<Longrightarrow> exec_fatal (Call p) \<Gamma> s"
by (force simp: exec_fatal_def intro: exec.intros)
lemma exec_fatal_seq_Call:
"\<lbrakk> \<Gamma> p = Some C; exec_fatal_seq C \<Gamma> s \<rbrakk> \<Longrightarrow> exec_fatal_seq (Language.Call p) \<Gamma> s"
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros)
lemma exec_fatal_DynCom:
"exec_fatal (f s) \<Gamma> s \<Longrightarrow> exec_fatal (DynCom f) \<Gamma> s"
by (force simp: exec_fatal_def intro: exec.intros)
lemma exec_fatal_seq_DynCom:
"exec_fatal_seq (f s) \<Gamma> s \<Longrightarrow> exec_fatal_seq (Language.DynCom f) \<Gamma> s"
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros)
lemma exec_fatal_Guard:
"exec_fatal (Guard f P C) \<Gamma> s = (s \<in> P \<longrightarrow> exec_fatal C \<Gamma> s)"
proof (cases "s \<in> P")
case True thus ?thesis
by (force simp: exec_fatal_def intro: exec.intros
elim: exec_Normal_elim_cases)
next
case False thus ?thesis
by (force simp: exec_fatal_def intro: exec.intros)
qed
lemma exec_fatal_seq_Guard:
"exec_fatal_seq (Language.Guard f P C) \<Gamma> s = (s \<in> P \<longrightarrow> exec_fatal_seq C \<Gamma> s)"
proof (cases "s \<in> P")
case True thus ?thesis
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros
elim: Semantic.exec_Normal_elim_cases)
next
case False thus ?thesis
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros)
qed
(* lemma exec_fatal_Await:
"exec_fatal (Await b C) \<Gamma> s = (s \<in> P \<longrightarrow> exec_fatal_seq C \<Gamma>\<^sub>\<not>\<^sub>a s)"
sorry
by (force simp: exec_fatal_def exec_fatal_seq_def intro: exec.intros Semantic.exec.intros
elim: exec_Normal_elim_cases Semantic.exec_Normal_elim_cases)
*)
lemma restrict_safe_seq_Guard:
assumes restrict: "restrict_safe_seq s t C \<Gamma>"
shows "restrict_safe_seq s t (Language.Guard f P C) \<Gamma>"
using restrict
by (cases t; force simp: restrict_safe_seq_def restrict_safe_OK_seq_def exec_fatal_seq_Guard
intro: Semantic.exec.intros)
lemma restrict_safe_seq_Guard2:
"\<lbrakk> s \<notin> P; mono_guard P \<rbrakk> \<Longrightarrow> restrict_safe_seq s (Fault f) (Language.Guard f P C) \<Gamma>"
by (force simp: restrict_safe_seq_def exec_fatal_seq_def intro: Semantic.exec.intros
dest: mono_guardD)
lemma restrict_safe_Guard:
assumes restrict: "restrict_safe s t C \<Gamma>"
shows "restrict_safe s t (Guard f P C) \<Gamma>"
proof (cases t)
case (Normal s) with restrict show ?thesis
by (force simp: restrict_safe_def restrict_safe_OK_def exec_fatal_Guard
intro: exec.intros)
next
case (Abrupt s) with restrict show ?thesis
by (force simp: restrict_safe_def restrict_safe_OK_def exec_fatal_Guard
intro: exec.intros)
next
case (Fault f) with restrict show ?thesis
by (force simp: restrict_safe_def exec_fatal_Guard)
next
case Stuck with restrict show ?thesis
by (force simp: restrict_safe_def exec_fatal_Guard)
qed
lemma restrict_safe_Guard2:
"\<lbrakk> s \<notin> P; mono_guard P \<rbrakk> \<Longrightarrow> restrict_safe s (Fault f) (Guard f P C) \<Gamma>"
by (force simp: restrict_safe_def exec_fatal_def intro: exec.intros
dest: mono_guardD)
lemma exec_fatal_Catch:
"exec_fatal C \<Gamma> s \<Longrightarrow> exec_fatal (TRY C CATCH D END) \<Gamma> s"
by (force simp: exec_fatal_def intro: exec.intros)
lemma exec_fatal_Catch2:
"\<lbrakk> \<Gamma> \<turnstile>\<^sub>p \<langle>C,Normal s\<rangle> \<Rightarrow> Abrupt t; exec_fatal D \<Gamma> t \<rbrakk> \<Longrightarrow>
exec_fatal (TRY C CATCH D END) \<Gamma> s"
by (force simp: exec_fatal_def intro: exec.intros)
lemma exec_fatal_seq_Catch:
"exec_fatal_seq C \<Gamma> s \<Longrightarrow> exec_fatal_seq (Language.Catch C D ) \<Gamma> s"
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros)
lemma exec_fatal_seq_Catch2:
"\<lbrakk> \<Gamma> \<turnstile> \<langle>C,Normal s\<rangle> \<Rightarrow> Abrupt t; exec_fatal_seq D \<Gamma> t \<rbrakk> \<Longrightarrow>
exec_fatal_seq (Language.Catch C D ) \<Gamma> s"
by (force simp: exec_fatal_seq_def intro: Semantic.exec.intros)
lemma intra_safe_restrict_seq [rule_format]:
assumes safe_env: "\<And>n C. \<Gamma> n = Some C \<Longrightarrow> intra_safe_seq C" and
exec: "\<Gamma> \<turnstile> \<langle>C,s\<rangle> \<Rightarrow> t"
shows "\<forall>s'. s = Normal s' \<longrightarrow> intra_safe_seq C \<longrightarrow> restrict_safe_seq s' t C \<Gamma>"
using exec
proof induct
case (Skip s) thus ?case
by (force simp: restrict_safe_seq_def restrict_safe_OK_seq_def intro: Semantic.exec.intros)
next
case (Guard s' P C t f) show ?case
proof (cases "\<exists>g. C = Language.Basic g")
case False with Guard show ?thesis
by - (clarsimp, split Language.com.splits, auto dest: restrict_safe_seq_Guard)
next
case True with Guard show ?thesis
by (cases t) (force simp: restrict_safe_seq_def restrict_safe_OK_seq_def
point_eq_mod_safe_def exec_fatal_seq_Guard
intro: Semantic.exec.intros
elim: Semantic.exec_Normal_elim_cases,
(fast elim: Semantic.exec_Normal_elim_cases)+)
qed
next
case (GuardFault C f P s) thus ?case
by (force dest: restrict_safe_seq_Guard2)
next
case (FaultProp C f) thus ?case by simp
next
case (Basic f s) thus ?case
by (force simp: restrict_safe_seq_def restrict_safe_OK_seq_def point_eq_mod_safe_def
intro: Semantic.exec.intros)
next
case (Spec r s t) thus ?case
apply (clarsimp simp: mem_safe_seq_def)
apply (fastforce intro: Semantic.exec.Spec)
done
next
case (SpecStuck r s) thus ?case
apply clarsimp
apply (erule_tac x=\<Gamma> in allE)
apply (simp add: mem_safe_seq_def)
apply (erule allE, erule allE, erule impE, erule Semantic.exec.SpecStuck)
apply assumption
done
next
case (Seq C s sa D ta) show ?case
proof (cases sa)
case (Normal s') with Seq show ?thesis
by (cases ta)
(clarsimp simp: restrict_safe_seq_def restrict_safe_OK_seq_def,
(drule_tac x=X in spec)+, auto intro: Semantic.exec.intros point_eq_mod_trans
exec_fatal_seq_Seq exec_fatal_seq_Seq2)+
next
case (Abrupt s') with Seq show ?thesis
by (force simp: restrict_safe_seq_def restrict_safe_OK_seq_def
intro: Semantic.exec.intros dest: exec_fatal_seq_Seq
elim: Semantic.exec_Normal_elim_cases)
next
case (Fault f) with Seq show ?thesis
by (force simp: restrict_safe_seq_def dest: exec_fatal_seq_Seq)
next
case Stuck with Seq show ?thesis
by (force simp: restrict_safe_seq_def dest: exec_fatal_seq_Seq)
qed
next
case (CondTrue s P C t D) thus ?case
by (cases t)
(auto simp: restrict_safe_seq_def restrict_safe_OK_seq_def exec_fatal_seq_Cond
intro: Semantic.exec.intros dest: expr_htd_ind split: if_split_asm)
next
case (CondFalse s P C t D) thus ?case
by (cases t)
(auto simp: restrict_safe_seq_def restrict_safe_OK_seq_def exec_fatal_seq_Cond
intro: Semantic.exec.intros dest: expr_htd_ind split: if_split_asm)
next
case (WhileTrue P C s s' t) show ?case
proof (cases s')
case (Normal sa) with WhileTrue show ?thesis
by (cases t)
(clarsimp simp: restrict_safe_seq_def restrict_safe_OK_seq_def,
(drule_tac x=X in spec)+, auto simp: expr_htd_ind intro: Semantic.exec.intros
point_eq_mod_trans exec_fatal_seq_While exec_fatal_seq_While2)+
next
case (Abrupt sa) with WhileTrue show ?thesis
by (force simp: restrict_safe_seq_def restrict_safe_OK_seq_def expr_htd_ind
intro: Semantic.exec.intros exec_fatal_seq_While
elim: Semantic.exec_Normal_elim_cases)
next
case (Fault f) with WhileTrue show ?thesis
by (force simp: restrict_safe_seq_def expr_htd_ind intro: exec_fatal_seq_While)
next
case Stuck with WhileTrue show ?thesis
by (force simp: restrict_safe_seq_def expr_htd_ind intro: exec_fatal_seq_While)
qed
next
case (WhileFalse P C s) thus ?case
by (force simp: restrict_safe_seq_def restrict_safe_OK_seq_def expr_htd_ind
intro: Semantic.exec.intros)
next
case (Call C p s t) with safe_env show ?case
by (cases t)
(auto simp: restrict_safe_seq_def restrict_safe_OK_seq_def
intro: exec_fatal_seq_Call Semantic.exec.intros)
next
case (CallUndefined p s) thus ?case
by (force simp: restrict_safe_seq_def exec_fatal_seq_def intro: Semantic.exec.intros)
next
case (StuckProp C) thus ?case by simp
next
case (DynCom f s t) thus ?case
by (cases t)
(auto simp: restrict_safe_seq_def restrict_safe_OK_seq_def
restrict_htd_def
intro!: Semantic.exec.intros exec_fatal_seq_DynCom)
next
case (Throw s) thus ?case
by (force simp: restrict_safe_seq_def restrict_safe_OK_seq_def intro: Semantic.exec.intros)
next
case (AbruptProp C s) thus ?case by simp
next
case (CatchMatch C D s s' t) thus ?case
by (cases t)
(clarsimp simp: restrict_safe_seq_def, drule_tac x=X in spec,
auto simp: restrict_safe_OK_seq_def intro: Semantic.exec.intros point_eq_mod_trans
dest: exec_fatal_seq_Catch exec_fatal_seq_Catch2)+
next
case (CatchMiss C s t D) thus ?case
by (cases t)
(clarsimp simp: restrict_safe_seq_def, drule_tac x=X in spec,
auto simp: restrict_safe_OK_seq_def intro: Semantic.exec.intros
dest: exec_fatal_seq_Catch)+
qed
lemma intra_safe_to_intra_safe_seq: assumes a0:"\<And>n C. \<Gamma> n = Some C \<Longrightarrow> intra_safe C"
shows "\<And>n C. \<Gamma>\<^sub>\<not>\<^sub>a n = Some C \<Longrightarrow> intra_safe_seq C"
proof-
fix n C
assume "\<Gamma>\<^sub>\<not>\<^sub>a n = Some C"
then obtain C\<^sub>p where "C = sequential C\<^sub>p \<and> \<Gamma> n = Some C\<^sub>p \<and> noawaits C\<^sub>p"
by (meson lam1_seq no_await_some_no_await no_await_some_some_p not_Some_eq)
moreover have "intra_safe C\<^sub>p" using a0 calculation by auto
ultimately show "intra_safe_seq C" using l1 by fastforce
qed
lemma intra_safe_restrict [rule_format]:
assumes safe_env: "\<And>n C. \<Gamma> n = Some C \<Longrightarrow> intra_safe C" and
exec: "\<Gamma> \<turnstile>\<^sub>p \<langle>C,s\<rangle> \<Rightarrow> t"
shows "\<forall>s'. s = Normal s' \<longrightarrow> intra_safe C \<longrightarrow> restrict_safe s' t C \<Gamma>"
using exec
proof induct
case (Skip s) thus ?case
by (force simp: restrict_safe_def restrict_safe_OK_def intro: exec.intros)
next
case (Guard s' P C t f) show ?case
proof (cases "\<exists>g. C = Basic g")
case False with Guard show ?thesis
by - (clarsimp, split com.splits, auto dest: restrict_safe_Guard)
next
case True with Guard show ?thesis
by (cases t) (force simp: restrict_safe_def restrict_safe_OK_def
point_eq_mod_safe_def exec_fatal_Guard
intro: exec.intros
elim: exec_Normal_elim_cases,
(fast elim: exec_Normal_elim_cases)+)
qed
next
case (GuardFault C f P s) thus ?case
by (force dest: restrict_safe_Guard2)
next
case (FaultProp C f) thus ?case by simp
next
case (Basic f s) thus ?case
by (force simp: restrict_safe_def restrict_safe_OK_def point_eq_mod_safe_def
intro: exec.intros)
next
case (Spec r s t) thus ?case
apply (clarsimp simp: mem_safe_def)
apply (fastforce intro: exec.Spec)
done
next
case (SpecStuck r s) thus ?case
apply clarsimp
apply (erule_tac x=\<Gamma> in allE)
apply (simp add: mem_safe_def)
apply (erule allE, erule allE, erule impE, erule exec.SpecStuck)
apply assumption
done
next
case (Seq C s sa D ta) show ?case
proof (cases sa)
case (Normal s') with Seq show ?thesis
by (cases ta)
(clarsimp simp: restrict_safe_def restrict_safe_OK_def,
(drule_tac x=X in spec)+, auto intro: exec.intros point_eq_mod_trans
exec_fatal_Seq exec_fatal_Seq2)+
next
case (Abrupt s') with Seq show ?thesis
by (force simp: restrict_safe_def restrict_safe_OK_def
intro: exec.intros dest: exec_fatal_Seq
elim: exec_Normal_elim_cases)
next
case (Fault f) with Seq show ?thesis
by (force simp: restrict_safe_def dest: exec_fatal_Seq
elim: exec_Normal_elim_cases)
next
case Stuck with Seq show ?thesis
by (force simp: restrict_safe_def dest: exec_fatal_Seq
elim: exec_Normal_elim_cases)
qed
next
case (CondTrue s P C t D) thus ?case
by (cases t)
(auto simp: restrict_safe_def restrict_safe_OK_def exec_fatal_Cond
intro: exec.intros dest: expr_htd_ind split: if_split_asm)
next
case (CondFalse s P C t D) thus ?case
by (cases t)
(auto simp: restrict_safe_def restrict_safe_OK_def exec_fatal_Cond
intro: exec.intros dest: expr_htd_ind split: if_split_asm)
next
case (WhileTrue P C s s' t) show ?case
proof (cases s')
case (Normal sa) with WhileTrue show ?thesis
by (cases t)
(clarsimp simp: restrict_safe_def restrict_safe_OK_def,
(drule_tac x=X in spec)+, auto simp: expr_htd_ind intro: exec.intros
point_eq_mod_trans exec_fatal_While exec_fatal_While2)+
next
case (Abrupt sa) with WhileTrue show ?thesis
by (force simp: restrict_safe_def restrict_safe_OK_def expr_htd_ind
intro: exec.intros exec_fatal_While
elim: exec_Normal_elim_cases)
next
case (Fault f) with WhileTrue show ?thesis
by (force simp: restrict_safe_def expr_htd_ind intro: exec_fatal_While)
next
case Stuck with WhileTrue show ?thesis
by (force simp: restrict_safe_def expr_htd_ind intro: exec_fatal_While)
qed
next
case (AwaitTrue s b \<Gamma>\<^sub>p ca t)
then have "\<And>n C. \<Gamma>\<^sub>p n = Some C \<Longrightarrow> intra_safe_seq C" using intra_safe_to_intra_safe_seq
by (metis safe_env)
then have restrict_safe_seq:"\<forall>s'. Normal s = Normal s' \<longrightarrow> intra_safe_seq ca \<longrightarrow> restrict_safe_seq s' t ca \<Gamma>\<^sub>p"
using intra_safe_restrict_seq AwaitTrue by blast
then show ?case
apply (cases t)
unfolding restrict_safe_def restrict_safe_OK_def exec_fatal_def exec_fatal_seq_def restrict_safe_OK_seq_def restrict_safe_seq_def
apply auto
by (metis (no_types, lifting) exec.intros(12) AwaitTrue.hyps(1) AwaitTrue.hyps(2) expr_htd_ind)+
next
case (AwaitFalse s b ca) thus ?case
by (simp add: exec.AwaitFalse expr_htd_ind restrict_safe_OK_def restrict_safe_def)
next
case (WhileFalse P C s) thus ?case
by (force simp: restrict_safe_def restrict_safe_OK_def expr_htd_ind
intro: exec.intros)
next
case (Call C p s t) with safe_env show ?case
by (cases t)
(auto simp: restrict_safe_def restrict_safe_OK_def
intro: exec_fatal_Call exec.intros)
next
case (CallUndefined p s) thus ?case
by (force simp: restrict_safe_def exec_fatal_def intro: exec.intros)
next
case (StuckProp C) thus ?case by simp
next
case (DynCom f s t) thus ?case
by (cases t)
(auto simp: restrict_safe_def restrict_safe_OK_def
restrict_htd_def
intro!: exec.intros exec_fatal_DynCom)
next
case (Throw s) thus ?case
by (force simp: restrict_safe_def restrict_safe_OK_def intro: exec.intros)
next
case (AbruptProp C s) thus ?case by simp
next
case (CatchMatch C D s s' t) thus ?case
by (cases t)
(clarsimp simp: restrict_safe_def, drule_tac x=X in spec,
auto simp: restrict_safe_OK_def intro: exec.intros point_eq_mod_trans
dest: exec_fatal_Catch exec_fatal_Catch2)+
next
case (CatchMiss C s t D) thus ?case
by (cases t)
(clarsimp simp: restrict_safe_def, drule_tac x=X in spec,
auto simp: restrict_safe_OK_def intro: exec.intros
dest: exec_fatal_Catch)+
qed
lemma intra_mem_safe:
"\<lbrakk> \<And>n C. \<Gamma> n = Some C \<Longrightarrow> intra_safe C; intra_safe C \<rbrakk> \<Longrightarrow> mem_safe C \<Gamma>"
by (force simp: mem_safe_def intro: intra_safe_restrict)
lemma point_eq_mod_safe_triv:
"(\<And>s. g (f s) = g s) \<Longrightarrow> point_eq_mod_safe P f g"
by (simp add: point_eq_mod_safe_def point_eq_mod_def)
lemma comm_restrict_safe_triv:
"(\<And>s X. f (s\<lparr> hst_htd := restrict_s (hst_htd s) X \<rparr>) =
(f s)\<lparr> hst_htd := restrict_s (hst_htd (f s)) X \<rparr>) \<Longrightarrow> comm_restrict_safe P f"
by (force simp: comm_restrict_safe_def comm_restrict_def restrict_htd_def)
lemma mono_guard_UNIV [simp]:
"mono_guard UNIV"
by (force simp: mono_guard_def)
lemma mono_guard_triv:
"(\<And>s X. s\<lparr> hst_htd := X \<rparr> \<in> g \<Longrightarrow> s \<in> g) \<Longrightarrow> mono_guard g"
by (unfold mono_guard_def, unfold restrict_htd_def, fast)
lemma mono_guard_triv2:
"(\<And>s X. s\<lparr> hst_htd := X \<rparr> \<in> g = ((s::'a::heap_state_type') \<in> g)) \<Longrightarrow>
mono_guard g"
by (unfold mono_guard_def, unfold restrict_htd_def, fast)
lemma dom_restrict_s:
"x \<in> dom_s (restrict_s d X) \<Longrightarrow> x \<in> dom_s d \<and> x \<in> X"
apply(auto simp: restrict_s_def dom_s_def split: if_split_asm)
done
lemma mono_guard_ptr_safe:
"\<lbrakk> \<And>s. d s = hst_htd (s::'a::heap_state_type); htd_ind p \<rbrakk> \<Longrightarrow>
mono_guard {s. ptr_safe (p s) (d s)}"
apply (auto simp: mono_guard_def ptr_safe_def restrict_htd_def )
apply(drule (1) subsetD)
apply(drule dom_restrict_s)
apply simp
done
lemma point_eq_mod_safe_ptr_safe_update:
"\<lbrakk> d = (hst_htd::'a::heap_state_type \<Rightarrow> heap_typ_desc);
m = (\<lambda>s. hst_mem_update (heap_update (p s) ((v s)::'b::mem_type)) s);
h = hst_mem; k = (\<lambda>s. lift_state (h s,d s)); htd_ind p \<rbrakk> \<Longrightarrow>
point_eq_mod_safe {s. ptr_safe (p s) (d s)} m k"
apply (auto simp: point_eq_mod_safe_def point_eq_mod_def ptr_safe_def heap_update_def
restrict_htd_def lift_state_def
intro!: heap_update_nmem_same split: s_heap_index.splits)
apply(subgoal_tac "(a,SIndexVal) \<in> s_footprint (p s)")
apply(drule (1) subsetD)
apply(drule dom_restrict_s, clarsimp)
apply(drule intvlD, clarsimp)
apply(erule s_footprintI2)
done
lemma field_ti_s_sub_typ:
"field_lookup (export_uinfo (typ_info_t TYPE('b::mem_type))) f 0 = Some (typ_uinfo_t TYPE('a),b) \<Longrightarrow>
s_footprint ((Ptr &(p\<rightarrow>f))::'a::mem_type ptr) \<subseteq> s_footprint (p::'b ptr)"
apply(drule field_ti_s_sub)
apply(simp add: s_footprint_def)
done
lemma ptr_safe_mono:
"\<lbrakk> ptr_safe (p::'a::mem_type ptr) d; field_lookup (typ_info_t TYPE('a)) f 0
= Some (t,n); export_uinfo t = typ_uinfo_t TYPE('b) \<rbrakk> \<Longrightarrow>
ptr_safe ((Ptr &(p\<rightarrow>f))::'b::mem_type ptr) d"
apply(simp add: ptr_safe_def)
apply(drule field_lookup_export_uinfo_Some)
apply simp
apply(drule field_ti_s_sub_typ)
apply(erule (1) subset_trans)
done
lemma point_eq_mod_safe_ptr_safe_update_fl:
"\<lbrakk> d = (hst_htd::'a::heap_state_type \<Rightarrow> heap_typ_desc);
m = (\<lambda>s. hst_mem_update (heap_update (Ptr &((p s)\<rightarrow>f)) ((v s)::'b::mem_type)) s);
h = hst_mem; k = (\<lambda>s. lift_state (h s,d s)); htd_ind p;
field_lookup (typ_info_t TYPE('c)) f 0 = Some (t,n);
export_uinfo t = typ_uinfo_t TYPE('b) \<rbrakk> \<Longrightarrow>
point_eq_mod_safe {s. ptr_safe ((p::'a \<Rightarrow> 'c::mem_type ptr) s) (d s)} m k"
apply(drule (3) point_eq_mod_safe_ptr_safe_update)
apply(simp only: htd_ind_def)
apply clarify
apply(clarsimp simp: point_eq_mod_safe_def)
apply(drule_tac x=s in spec)
apply(drule_tac x=X in spec)
apply(erule impE)
apply(erule (2) ptr_safe_mono)
apply simp
done
lemma point_eq_mod_safe_ptr_safe_tag:
"\<lbrakk> d = (hst_htd::'a::heap_state_type \<Rightarrow> heap_typ_desc); h = hst_mem;
m = (\<lambda>s. hst_htd_update (ptr_retyp (p s)) s);
k = (\<lambda>s. lift_state (h s,d s));
htd_ind p \<rbrakk> \<Longrightarrow>
point_eq_mod_safe {s. ptr_safe ((p s)::'b::mem_type ptr) (d s)} m k"
apply(auto simp: point_eq_mod_safe_def point_eq_mod_def ptr_safe_def)
apply(subgoal_tac "(a,b) \<notin> s_footprint (p (restrict_htd s X))")
prefer 2
apply clarsimp
apply(drule (1) subsetD)
apply(clarsimp simp: restrict_htd_def)
apply(drule dom_restrict_s, clarsimp)
apply(thin_tac "P \<notin> Q" for P Q)
apply(auto simp: restrict_htd_def lift_state_def split_def split: s_heap_index.splits split: option.splits)
apply(subst (asm) ptr_retyp_d_eq_fst)
apply(clarsimp split: if_split_asm)
apply(erule notE)
apply(drule intvlD, clarsimp)
apply(erule s_footprintI2)
apply(subst (asm) ptr_retyp_d_eq_fst)
apply(clarsimp split: if_split_asm)
apply(subst (asm) ptr_retyp_d_eq_snd)
apply(clarsimp split: if_split_asm)
apply(subst (asm) ptr_retyp_d_eq_snd)
apply(clarsimp split: if_split_asm)
apply(erule notE)
apply(frule intvlD, clarsimp)
apply(rule s_footprintI)
apply(subst (asm) ptr_retyp_footprint)
apply simp
apply clarsimp
apply(clarsimp simp: list_map_eq split: if_split_asm)
apply(subst (asm) unat_of_nat)
apply(subst (asm) mod_less)
apply(subst len_of_addr_card)
apply(erule less_trans)
apply simp
apply fast
apply assumption
apply(simp add: ptr_retyp_d_eq_snd)
apply(clarsimp split: if_split_asm)
apply(simp add: ptr_retyp_footprint)
apply(clarsimp simp: list_map_eq split: if_split_asm)
apply(erule notE)
apply(drule intvlD, clarsimp)
apply(rule s_footprintI)
apply(subst (asm) unat_of_nat)
apply(subst (asm) mod_less)
apply(subst len_of_addr_card)
apply(erule less_trans)
apply simp
apply assumption+
apply(simp add: ptr_retyp_d_eq_snd)
apply(clarsimp split: if_split_asm)
apply(simp add: ptr_retyp_footprint)
apply(clarsimp simp: list_map_eq split: if_split_asm)
apply(erule notE)
apply(drule intvlD, clarsimp)
apply(rule s_footprintI)
apply(subst (asm) unat_of_nat)
apply(subst (asm) mod_less)
apply(subst len_of_addr_card)
apply(erule less_trans)
apply simp
apply assumption+
apply(simp add: ptr_retyp_d_eq_snd)
apply(clarsimp split: if_split_asm)
apply(simp add: ptr_retyp_footprint)
apply(clarsimp simp: list_map_eq split: if_split_asm)
apply(erule notE)
apply(drule intvlD, clarsimp)
apply(rule s_footprintI)
apply(subst (asm) unat_of_nat)
apply(subst (asm) mod_less)
apply(subst len_of_addr_card)
apply(erule less_trans)
apply simp
apply assumption+
done
lemma comm_restrict_safe_ptr_safe_tag:
fixes d::"'a::heap_state_type \<Rightarrow> heap_typ_desc"
assumes fun_d: "d = hst_htd" and fun_upd:
"m = (\<lambda>s. hst_htd_update (ptr_retyp (p s)) s)" and ind: "htd_ind p" and
upd: "\<And>d d' (s::'a). hst_htd_update (d s) (hst_htd_update (d' s) s) =
hst_htd_update ((d s) \<circ> (d' s)) s"
shows "comm_restrict_safe {s. ptr_safe ((p s)::'b::mem_type ptr) (d s)}
m"
proof (simp only: comm_restrict_safe_def comm_restrict_def, auto)
fix s X
assume "ptr_safe (p (restrict_htd s X)) (d (restrict_htd s X))"
moreover from ind have p: "p (restrict_htd s X) = p s"
by (simp add: restrict_htd_def)
ultimately have "ptr_retyp (p s) (restrict_s (hst_htd s) X) =
restrict_s (ptr_retyp (p s) (hst_htd s)) X" using fun_d
apply -
apply(rule ext)
apply(auto simp: point_eq_mod_safe_def point_eq_mod_def ptr_safe_def)
apply(auto simp: restrict_htd_def )
apply(case_tac "x \<notin> {ptr_val (p s)..+size_of TYPE('b)}")
apply(subst ptr_retyp_d)
apply clarsimp
apply(clarsimp simp: restrict_map_def restrict_s_def)
apply(subst ptr_retyp_d)
apply clarsimp
apply simp
apply(subst ptr_retyp_d)
apply clarsimp
apply simp
apply clarsimp
apply(subst ptr_retyp_footprint)
apply fast
apply(clarsimp simp: restrict_map_def restrict_s_def)
apply(subst ptr_retyp_footprint)
apply fast
apply simp
apply(subst ptr_retyp_footprint)
apply fast
apply(rule)
apply(subgoal_tac "(x,SIndexVal) \<in> s_footprint (p s)")
apply(drule (1) subsetD)
apply(clarsimp simp: dom_s_def)
apply(drule intvlD, clarsimp)
apply(erule s_footprintI2)
apply(rule ext)
apply(clarsimp simp: map_add_def list_map_eq)
apply(subgoal_tac "(x,SIndexTyp y) \<in> s_footprint (p s)")
apply(drule (1) subsetD)
apply(clarsimp simp: dom_s_def split: if_split_asm)
apply(drule intvlD, clarsimp)
apply(rule s_footprintI)
apply(subst (asm) unat_simps)
apply(subst (asm) mod_less)
apply(subst len_of_addr_card)
apply(erule less_trans)
apply simp
apply assumption+
done
hence "((ptr_retyp (p s) \<circ> (%x _. x) (restrict_s (hst_htd s) X)::heap_typ_desc \<Rightarrow> heap_typ_desc) =
(%x _. x) (restrict_s (ptr_retyp (p s) (hst_htd s)) X))"
by - (rule ext, simp)
moreover from upd have "hst_htd_update (ptr_retyp (p s))
(hst_htd_update ((%x _. x) (restrict_s (hst_htd s) X)) s) =
hst_htd_update (((ptr_retyp (p s)) \<circ> ((%x _. x) (restrict_s (hst_htd s) X)))) s" .
moreover from upd have "hst_htd_update ((%x _. x) (restrict_s (ptr_retyp (p s) (hst_htd s)) X))
(hst_htd_update (ptr_retyp (p s)) s) =
hst_htd_update (((%x _. x) (restrict_s ((ptr_retyp (p s) (hst_htd s))) X)) \<circ> (ptr_retyp (p s)))
s" .
ultimately show "m (restrict_htd s X) =
restrict_htd (m s) X" using fun_d fun_upd upd p
by (simp add: restrict_htd_def o_def)
qed
lemmas intra_sc = hrs_comm comp_def hrs_htd_update_htd_update
point_eq_mod_safe_triv comm_restrict_safe_triv mono_guard_triv2
mono_guard_ptr_safe point_eq_mod_safe_ptr_safe_update
point_eq_mod_safe_ptr_safe_tag comm_restrict_safe_ptr_safe_tag
point_eq_mod_safe_ptr_safe_update_fl
declare expr_htd_ind_def [iff]
declare htd_ind_def [iff]
lemma noawaits_parallel:"noawaits (LanguageCon.parallel ca)"
by(induct ca, auto)
(*
lemma "proc_deps_seq A (\<Gamma>\<^sub>\<not>\<^sub>a) \<subseteq> proc_deps (LanguageCon.parallel A) \<Gamma>"
sorry
lemma proc_deps_par:"proc_deps (LanguageCon.parallel ca) (parallel_env \<Gamma>) = proc_deps_seq ca \<Gamma>"
sorry *)
(* lemma "x \<in> intra_deps (Await b ca) \<Longrightarrow>
x \<in> proc_deps (LanguageCon.parallel ca) \<Gamma>"
proof(induct ca)
case Skip
then show ?case by simp
next
case (Basic x)
then show ?case by simp
next
case (Spec x)
then show ?case by simp
next
case (Seq ca1 ca2)
then show ?case by simp
next
case (Cond x1 ca1 ca2)
then show ?case by simp
next
case (While x1 ca)
then show ?case by simp
next
case (Call x)
then show ?case by simp
next
case (DynCom x)
then show ?case by simp
next
case (Guard x1 x2a ca)
then show ?case by simp
next
case Throw
then show ?case by simp
next
case (Catch ca1 ca2)
then show ?case by simp
qed *)
(* lemma procs_deps_Await [simp]:
"proc_deps (Await b ca) \<Gamma> = proc_deps (LanguageCon.parallel ca) \<Gamma>"
proof
show "proc_deps (Await b ca) \<Gamma> \<subseteq> proc_deps (LanguageCon.parallel ca) \<Gamma>"
proof
fix x
assume a0:"x \<in> proc_deps (Await b ca) \<Gamma>"
then show "x \<in> proc_deps (LanguageCon.parallel ca) \<Gamma>"
proof (induct)
case (1 x)
then show ?case sorry
next
case (2 x D y)
then show ?case sorry
qed
qed
next
show "proc_deps (LanguageCon.parallel ca) \<Gamma> \<subseteq> proc_deps (Await b ca) \<Gamma>"
sorry
qed *)
lemma proc_deps_Skip [simp]:
"proc_deps Skip \<Gamma> = {}"
by (force elim: proc_deps.induct)
lemma proc_deps_Basic [simp]:
"proc_deps (Basic f) \<Gamma> = {}"
by (force elim: proc_deps.induct)
lemma proc_deps_Spec [simp]:
"proc_deps (Spec r) \<Gamma> = {}"
by (force elim: proc_deps.induct)
lemma proc_deps_Seq [simp]:
"proc_deps (Seq C D) \<Gamma> = proc_deps C \<Gamma> \<union> proc_deps D \<Gamma>"
proof
show "proc_deps (C;; D) \<Gamma> \<subseteq> proc_deps C \<Gamma> \<union> proc_deps D \<Gamma>"
by - (rule, erule proc_deps.induct, auto intro: proc_deps.intros)
next
show "proc_deps C \<Gamma> \<union> proc_deps D \<Gamma> \<subseteq> proc_deps (C;; D) \<Gamma>"
by auto (erule proc_deps.induct, auto intro: proc_deps.intros)+
qed
lemma proc_deps_Cond [simp]:
"proc_deps (Cond P C D) \<Gamma> = proc_deps C \<Gamma> \<union> proc_deps D \<Gamma>"
proof
show "proc_deps (Cond P C D) \<Gamma> \<subseteq> proc_deps C \<Gamma> \<union> proc_deps D \<Gamma>"
by - (rule, erule proc_deps.induct, auto intro: proc_deps.intros)
next
show "proc_deps C \<Gamma> \<union> proc_deps D \<Gamma> \<subseteq> proc_deps (Cond P C D) \<Gamma>"
by auto (erule proc_deps.induct, auto intro: proc_deps.intros)+
qed
lemma proc_deps_While [simp]:
"proc_deps (While P C) \<Gamma> = proc_deps C \<Gamma>"
by auto (erule proc_deps.induct, auto intro: proc_deps.intros)+
lemma proc_deps_Guard [simp]:
"proc_deps (Guard f P C) \<Gamma> = proc_deps C \<Gamma>"
by auto (erule proc_deps.induct, auto intro: proc_deps.intros)+
lemma proc_deps_Throw [simp]:
"proc_deps Throw \<Gamma> = {}"
by (force elim: proc_deps.induct)
lemma proc_deps_Catch [simp]:
"proc_deps (Catch C D) \<Gamma> = proc_deps C \<Gamma> \<union> proc_deps D \<Gamma>"
proof
show "proc_deps (Catch C D) \<Gamma> \<subseteq> proc_deps C \<Gamma> \<union> proc_deps D \<Gamma>"
by - (rule, erule proc_deps.induct, auto intro: proc_deps.intros)
next
show "proc_deps C \<Gamma> \<union> proc_deps D \<Gamma> \<subseteq> proc_deps (Catch C D) \<Gamma>"
by auto (erule proc_deps.induct, auto intro: proc_deps.intros)+
qed
lemma proc_deps_Call [simp]:
"proc_deps (Call p) \<Gamma> = {p} \<union> (case \<Gamma> p of Some C \<Rightarrow>
proc_deps C (\<Gamma>(p := None)) | _ \<Rightarrow> {})" (is "?X = ?Y \<union> ?Z")
proof
show "?X \<subseteq> ?Y \<union> ?Z"
by - (rule, erule proc_deps.induct,
auto intro: proc_deps.intros,
case_tac "xa = p", auto intro: proc_deps.intros split: option.splits)
next
show "?Y \<union> ?Z \<subseteq> ?X"
proof (clarsimp, rule)
show "p \<in> ?X" by (force intro: proc_deps.intros)
next
show "?Z \<subseteq> ?X"
by (split option.splits, rule, force intro: proc_deps.intros)
(clarify, erule proc_deps.induct, (force intro: proc_deps.intros
split: if_split_asm)+)
qed
qed
lemma proc_deps_DynCom [simp]:
"proc_deps (DynCom f) \<Gamma> = \<Union>{proc_deps (f s) \<Gamma> | s. True}"
by auto (erule proc_deps.induct, force intro: proc_deps.intros,
force intro: proc_deps.intros)+
lemma proc_deps_Skip_seq [simp]:
"proc_deps_seq Language.Skip \<Gamma> = {}"
by (force elim: proc_deps_seq.induct)
lemma proc_deps_Basic_seq [simp]:
"proc_deps_seq (Language.Basic f) \<Gamma> = {}"
by (force elim: proc_deps_seq.induct)
lemma proc_deps_Spec_seq [simp]:
"proc_deps_seq (Language.Spec r) \<Gamma> = {}"
by (force elim: proc_deps_seq.induct)
lemma proc_deps_Seq_seq [simp]:
"proc_deps_seq (Language.Seq C D) \<Gamma> = proc_deps_seq C \<Gamma> \<union> proc_deps_seq D \<Gamma>"
proof
show "proc_deps_seq (Language.com.Seq C D) \<Gamma> \<subseteq> proc_deps_seq C \<Gamma> \<union> proc_deps_seq D \<Gamma>"
by - (rule, erule proc_deps_seq.induct, auto intro: proc_deps_seq.intros)
next
show "proc_deps_seq C \<Gamma> \<union> proc_deps_seq D \<Gamma> \<subseteq> proc_deps_seq (Language.com.Seq C D) \<Gamma>"
by auto (erule proc_deps_seq.induct, auto intro: proc_deps_seq.intros)+
qed
lemma proc_deps_Cond_seq [simp]:
"proc_deps_seq (Language.Cond P C D) \<Gamma> = proc_deps_seq C \<Gamma> \<union> proc_deps_seq D \<Gamma>"
proof
show "proc_deps_seq (Language.Cond P C D) \<Gamma> \<subseteq> proc_deps_seq C \<Gamma> \<union> proc_deps_seq D \<Gamma>"
by - (rule, erule proc_deps_seq.induct, auto intro: proc_deps_seq.intros)
next
show "proc_deps_seq C \<Gamma> \<union> proc_deps_seq D \<Gamma> \<subseteq> proc_deps_seq (Language.Cond P C D) \<Gamma>"
by auto (erule proc_deps_seq.induct, auto intro: proc_deps_seq.intros)+
qed
lemma proc_deps_While_seq [simp]:
"proc_deps_seq (Language.While P C) \<Gamma> = proc_deps_seq C \<Gamma>"
by auto (erule proc_deps_seq.induct, auto intro: proc_deps_seq.intros)+
lemma proc_deps_Guard_seq [simp]:
"proc_deps_seq (Language.Guard f P C) \<Gamma> = proc_deps_seq C \<Gamma>"
by auto (erule proc_deps_seq.induct, auto intro: proc_deps_seq.intros)+
lemma proc_deps_Throw_seq [simp]:
"proc_deps_seq Language.Throw \<Gamma> = {}"
by (force elim: proc_deps_seq.induct)
lemma proc_deps_Catch_seq [simp]:
"proc_deps_seq (Language.Catch C D) \<Gamma> = proc_deps_seq C \<Gamma> \<union> proc_deps_seq D \<Gamma>"
proof
show "proc_deps_seq (Language.Catch C D) \<Gamma> \<subseteq> proc_deps_seq C \<Gamma> \<union> proc_deps_seq D \<Gamma>"
by - (rule, erule proc_deps_seq.induct, auto intro: proc_deps_seq.intros)
next
show "proc_deps_seq C \<Gamma> \<union> proc_deps_seq D \<Gamma> \<subseteq> proc_deps_seq (Language.Catch C D) \<Gamma>"
by auto (erule proc_deps_seq.induct, auto intro: proc_deps_seq.intros)+
qed
lemma proc_deps_Call_seq [simp]:
"proc_deps_seq (Language.Call p) \<Gamma> = {p} \<union> (case \<Gamma> p of Some C \<Rightarrow>
proc_deps_seq C (\<Gamma>(p := None)) | _ \<Rightarrow> {})" (is "?X = ?Y \<union> ?Z")
proof
show "?X \<subseteq> ?Y \<union> ?Z"
by - (rule, erule proc_deps_seq.induct,
auto intro: proc_deps_seq.intros,
case_tac "xa = p", auto intro: proc_deps_seq.intros split: option.splits)
next
show "?Y \<union> ?Z \<subseteq> ?X"
proof (clarsimp, rule)
show "p \<in> ?X" by (force intro: proc_deps_seq.intros)
next
show "?Z \<subseteq> ?X"
by (split option.splits, rule, force intro: proc_deps_seq.intros)
(clarify, erule proc_deps_seq.induct, (force intro: proc_deps_seq.intros
split: if_split_asm)+)
qed
qed
lemma proc_deps_DynCom_seq [simp]:
"proc_deps_seq (Language.DynCom f) \<Gamma> = \<Union>{proc_deps_seq (f s) \<Gamma> | s. True}"
by auto (erule proc_deps_seq.induct, force intro: proc_deps_seq.intros,
force intro: proc_deps_seq.intros)+
lemma proc_deps_restrict:
"proc_deps C \<Gamma> \<subseteq> proc_deps C (\<Gamma>(p := None)) \<union> proc_deps (Call p) \<Gamma>"
proof rule
fix xa
assume mem: "xa \<in> proc_deps C \<Gamma>"
hence "\<forall>p. xa \<in> proc_deps C (\<Gamma>(p := None)) \<union> proc_deps (Call p) \<Gamma>" (is "?X")
using mem
proof induct
fix x
assume "x \<in> intra_deps C"
thus "\<forall>p. x \<in> proc_deps C (\<Gamma>(p := None)) \<union> proc_deps (Call p) \<Gamma>"
by (force intro: proc_deps.intros)
next
fix D x y
assume x:
"x \<in> proc_deps C \<Gamma>"
"x \<in> proc_deps C \<Gamma> \<Longrightarrow> \<forall>p. x \<in> proc_deps C (\<Gamma>(p := None)) \<union> proc_deps (Call p) \<Gamma>"
"\<Gamma> x = Some D"
"y \<in> intra_deps D"
"y \<in> proc_deps C \<Gamma>"
show "\<forall>p. y \<in> proc_deps C (\<Gamma>(p := None)) \<union> proc_deps (Call p) \<Gamma>"
proof clarify
fix p
assume y: "y \<notin> proc_deps (Call p) \<Gamma>"
show "y \<in> proc_deps C (\<Gamma>(p := None))"
proof (cases "x=p")
case True with x y show ?thesis
by (force intro: proc_deps.intros)
next
case False with x y show ?thesis
by (clarsimp, drule_tac x=p in spec)
(auto intro: proc_deps.intros split: option.splits)
qed
qed
qed
thus "xa \<in> proc_deps C (\<Gamma>(p := None)) \<union> proc_deps (Call p) \<Gamma>" by simp
qed
lemma proc_deps_seq_restrict:
"proc_deps_seq C \<Gamma> \<subseteq> proc_deps_seq C (\<Gamma>(p := None)) \<union> proc_deps_seq (Language.Call p) \<Gamma>"
proof rule
fix xa
assume mem: "xa \<in> proc_deps_seq C \<Gamma>"
hence "\<forall>p. xa \<in> proc_deps_seq C (\<Gamma>(p := None)) \<union> proc_deps_seq (Language.Call p) \<Gamma>" (is "?X")
using mem
proof induct
fix x
assume "x \<in> intra_deps_seq C"
thus "\<forall>p. x \<in> proc_deps_seq C (\<Gamma>(p := None)) \<union> proc_deps_seq (Language.Call p) \<Gamma>"
by (force intro: proc_deps_seq.intros)
next
fix D x y
assume x:
"x \<in> proc_deps_seq C \<Gamma>"
"x \<in> proc_deps_seq C \<Gamma> \<Longrightarrow> \<forall>p. x \<in> proc_deps_seq C (\<Gamma>(p := None)) \<union> proc_deps_seq (Language.Call p) \<Gamma>"
"\<Gamma> x = Some D"
"y \<in> intra_deps_seq D"
"y \<in> proc_deps_seq C \<Gamma>"
show "\<forall>p. y \<in> proc_deps_seq C (\<Gamma>(p := None)) \<union> proc_deps_seq (Language.Call p) \<Gamma>"
proof clarify
fix p
assume y: "y \<notin> proc_deps_seq (Language.Call p) \<Gamma>"
show "y \<in> proc_deps_seq C (\<Gamma>(p := None))"
proof (cases "x=p")
case True with x y show ?thesis
by (force intro: proc_deps_seq.intros)
next
case False with x y show ?thesis
by (clarsimp, drule_tac x=p in spec)
(auto intro: proc_deps_seq.intros split: option.splits)
qed
qed
qed
thus "xa \<in> proc_deps_seq C (\<Gamma>(p := None)) \<union> proc_deps_seq (Language.Call p) \<Gamma>" by simp
qed
(* lemma equiv_proc_deps_seq1:
"x \<in> proc_deps_seq C \<Gamma> \<Longrightarrow> x \<in> proc_deps (LanguageCon.parallel C) (parallel_env \<Gamma>)"
proof(induct C)
case Skip
then show ?case sorry
next
case (Basic x)
then show ?case sorry
next
case (Spec x)
then show ?case sorry
next
case (Seq C1 C2)
then show ?case sorry
next
case (Cond x1 C1 C2)
then show ?case sorry
next
case (While x1 C)
then show ?case sorry
next
case (Call x)
then show ?case sorry
next
case (DynCom x)
then show ?case sorry
next
case (Guard x1 x2a C)
then show ?case sorry
next
case Throw
then show ?case sorry
next
case (Catch C1 C2)
then show ?case sorry
qed
case (1 x)
then show ?case sorry
next
case (2 x D y)
then show ?case sorry
qed
*)
(* lemma equiv_proc_deps_seq2:
"x \<in> proc_deps (LanguageCon.parallel C) (parallel_env \<Gamma>) \<Longrightarrow> x \<in> proc_deps_seq C \<Gamma>"
sorry
lemma "proc_deps_seq C \<Gamma> = proc_deps (LanguageCon.parallel C) (parallel_env \<Gamma>)"
using equiv_proc_deps_seq1 equiv_proc_deps_seq2
by (metis Collect_cong proc_deps_def proc_deps_seq_def
proc_deps_seqp_proc_deps_seq_eq proc_depsp_proc_deps_eq) *)
lemma exec_restrict2_seq:
assumes exec: "\<Gamma> \<turnstile> \<langle>C,s\<rangle> \<Rightarrow> t"
shows "\<And>X. proc_deps_seq C \<Gamma> \<subseteq> X \<Longrightarrow> \<Gamma> |` X \<turnstile> \<langle>C,s\<rangle> \<Rightarrow> t"
using exec
proof induct
case (Call p C s t) thus ?case
by (insert proc_deps_seq_restrict [of C \<Gamma> p],
auto intro!: Semantic.exec.intros split: option.splits)
next
case (DynCom f s t) thus ?case
by - (rule Semantic.exec.intros, simp, blast)
qed(auto intro: Semantic.exec.intros)+
lemma intra_seq_in_intra:
assumes a0:"D = sequential d" and
a1:"y \<in> intra_deps_seq D"
shows "y \<in> intra_deps d"
using a0 a1
proof(induct d arbitrary: D y)
case (DynCom x)
then show ?case by fastforce
qed(auto)
lemma proc_deps_seq_subset1:"x \<in> proc_deps_seq ca (\<Gamma>\<^sub>\<not>\<^sub>a) \<Longrightarrow> x \<in> proc_deps (Await b ca) \<Gamma>"
proof (induct rule:proc_deps_seq.induct)
case (1 x)
then show ?case
by (fastforce intro: proc_deps.intros(1))
next
case (2 x D y)
then obtain d where "\<Gamma> x = Some d \<and> D = sequential d"
using in_gamma_in_noawait_gamma lam1_seq[of "\<Gamma>\<^sub>\<not>\<^sub>a" "\<Gamma>" x D] by fast
moreover have "y\<in> intra_deps d" using calculation 2 intra_seq_in_intra no_await_some_no_await
by fastforce
ultimately show ?case using 2 proc_deps.intros by auto
qed
lemma proc_deps_seq_subset:"proc_deps_seq ca (\<Gamma>\<^sub>\<not>\<^sub>a) \<subseteq> proc_deps (Await b ca) \<Gamma>"
using proc_deps_seq_subset1 by auto
lemma exec_restrict2_Await:
assumes a0:"\<Gamma>\<^sub>\<not>\<^sub>a \<turnstile> \<langle>ca,Normal s\<rangle> \<Rightarrow> t" and a1:"proc_deps (Await b ca) \<Gamma> \<subseteq> X"
shows "\<Gamma>\<^sub>\<not>\<^sub>a |\<^bsub>X\<^esub> \<turnstile> \<langle>ca,Normal s\<rangle> \<Rightarrow> t"
apply (rule exec_restrict2_seq[OF a0])
by (meson a1 dual_order.trans proc_deps_seq_subset)
lemma exec_restrict_seq:
assumes exec: "\<Gamma>' \<turnstile> \<langle>C,s\<rangle> \<Rightarrow> t"
shows "\<And>\<Gamma> X. \<lbrakk> \<Gamma>' = \<Gamma> |` X; proc_deps_seq C \<Gamma> \<subseteq> X \<rbrakk> \<Longrightarrow> \<Gamma> \<turnstile> \<langle>C,s\<rangle> \<Rightarrow> t"
using exec
proof induct
case (Seq C D s sa ta) thus ?case by (force intro: Semantic.exec.intros)
next
case (WhileTrue P C s s' t) thus ?case by (force intro: Semantic.exec.intros)
next
case (Call p C s t) thus ?case
by - (insert proc_deps_seq_restrict [of C \<Gamma> p], force intro: Semantic.exec.intros)
next
case (DynCom f s t) thus ?case by (force intro: Semantic.exec.intros)
next
case (CatchMatch C D s s' t) thus ?case by (force intro: Semantic.exec.intros)
qed (auto simp: intro: Semantic.exec.intros)
lemma exec_restrict1_seq:
assumes
a1:"(\<Gamma>|\<^bsub>X\<^esub>)\<^sub>\<not>\<^sub>a\<turnstile> \<langle>ca,Normal s\<rangle> \<Rightarrow> t" and
a3:"proc_deps (Await b ca) \<Gamma> \<subseteq> X"
shows "\<Gamma>\<^sub>\<not>\<^sub>a\<turnstile> \<langle>ca,Normal s\<rangle> \<Rightarrow> t"
using exec_restrict_seq
using a1 a3 proc_deps_seq_subset restrict_eq by fastforce
lemma exec_restrict_Await:
assumes a0:"s \<in> b" and
a1:"(\<Gamma>|\<^bsub>X\<^esub>)\<^sub>\<not>\<^sub>a\<turnstile> \<langle>ca,Normal s\<rangle> \<Rightarrow> t" and
a2:"proc_deps (Await b ca) \<Gamma> \<subseteq> X"
shows "\<Gamma>\<turnstile>\<^sub>p \<langle>Await b ca,Normal s\<rangle> \<Rightarrow> t"
using a0 exec.intros exec_restrict1_seq[OF a1 a2] by fast
lemma exec_restrict:
assumes exec: "\<Gamma>' \<turnstile>\<^sub>p \<langle>C,s\<rangle> \<Rightarrow> t"
shows "\<And>\<Gamma> X. \<lbrakk> \<Gamma>' = \<Gamma> |` X; proc_deps C \<Gamma> \<subseteq> X \<rbrakk> \<Longrightarrow> \<Gamma> \<turnstile>\<^sub>p \<langle>C,s\<rangle> \<Rightarrow> t"
using exec
proof induct
case (Seq C D s sa ta) thus ?case by (force intro: exec.intros)
next
case (WhileTrue P C s s' t) thus ?case by (force intro: exec.intros)
next
case (AwaitTrue s b \<Gamma>\<^sub>p ca t \<Gamma> X) thus ?case by (auto simp: exec_restrict_Await intro: exec.intros )
next
case (Call p C s t) thus ?case
by - (insert proc_deps_restrict [of C \<Gamma> p], force intro: exec.intros)
next
case (DynCom f s t) thus ?case by (force intro: exec.intros)
next
case (CatchMatch C D s s' t) thus ?case by (force intro: exec.intros)
qed (auto simp: exec_restrict1_seq intro: exec.intros)
lemma exec_restrict2:
assumes exec: "\<Gamma> \<turnstile>\<^sub>p \<langle>C,s\<rangle> \<Rightarrow> t"
shows "\<And>X. proc_deps C \<Gamma> \<subseteq> X \<Longrightarrow> \<Gamma> |` X \<turnstile>\<^sub>p \<langle>C,s\<rangle> \<Rightarrow> t"
using exec
proof induct
case (Call p C s t) thus ?case
by (insert proc_deps_restrict [of C \<Gamma> p],
auto intro!: exec.intros split: option.splits)
next
case (DynCom f s t) thus ?case
by - (rule exec.intros, simp, blast)
qed (auto simp: restrict_eq exec_restrict2_Await intro: exec.intros)
lemma exec_restrict_eq:
"\<Gamma> |` proc_deps C \<Gamma> \<turnstile>\<^sub>p \<langle>C,s\<rangle> \<Rightarrow> t = \<Gamma> \<turnstile>\<^sub>p \<langle>C,s\<rangle> \<Rightarrow> t"
by (fast intro: exec_restrict exec_restrict2)
lemma mem_safe_restrict:
"mem_safe C \<Gamma> = mem_safe C (\<Gamma> |` proc_deps C \<Gamma>)"
by (auto simp: mem_safe_def restrict_safe_def restrict_safe_OK_def
exec_restrict_eq exec_fatal_def
split: xstate.splits)
end
|
/**
* Copyright (c) 2019 Melown Technologies SE
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVaENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <set>
#include <boost/filesystem.hpp>
#include <boost/lexical_cast.hpp>
#include "utility/buildsys.hpp"
#include "utility/gccversion.hpp"
#include "utility/limits.hpp"
#include "utility/path.hpp"
#include "utility/openmp.hpp"
#include "utility/format.hpp"
#include "utility/streams.hpp"
#include "service/cmdline.hpp"
#include "geometry/meshop.hpp"
#include "imgproc/imagesize.cpp"
#include "3dtiles/b3dm.hpp"
#include "gltf/meshloader.hpp"
namespace po = boost::program_options;
namespace bio = boost::iostreams;
namespace fs = boost::filesystem;
namespace tdt = threedtiles;
namespace {
class B3dm2Obj : public service::Cmdline
{
public:
B3dm2Obj()
: service::Cmdline("b3dm2obj", BUILD_TARGET_VERSION)
{}
private:
virtual void configuration(po::options_description &cmdline
, po::options_description &config
, po::positional_options_description &pd)
override;
virtual void configure(const po::variables_map &vars)
override;
virtual bool help(std::ostream &out, const std::string &what) const
override;
virtual int run() override;
fs::path output_;
fs::path input_;
};
void B3dm2Obj::configuration(po::options_description &cmdline
, po::options_description &config
, po::positional_options_description &pd)
{
cmdline.add_options()
("output", po::value(&output_)->required()
, "Path to output converted input.")
("input", po::value(&input_)->required()
, "Path to input SLPK archive.")
;
pd
.add("input", 1)
.add("output", 1);
(void) config;
}
void B3dm2Obj::configure(const po::variables_map&) {}
bool B3dm2Obj::help(std::ostream &out, const std::string &what) const
{
if (what.empty()) {
out << R"RAW(b3dm2obj
Converts batched 3D model into textured meshes in OBJ format.
usage
b3dm2obj INPUT OUTPUT [OPTIONS]
)RAW";
}
return false;
}
struct MeshLoader : gltf::MeshLoader
{
MeshLoader(const fs::path &output)
: output(output), smIndex(-1), smStart()
{}
/** New mesh has been encountered.
*/
void mesh() override {
smStart = m.vertices.size();
++smIndex;
}
/** Mesh vertices.
*/
void vertices(math::Points3d &&v) override {
m.vertices.insert(m.vertices.end()
, std::make_move_iterator(v.begin())
, std::make_move_iterator(v.end()));
}
/** Mesh texture coordinates.
*/
void tc(math::Points2d &&tc) override {
m.tCoords.insert(m.tCoords.end()
, std::make_move_iterator(tc.begin())
, std::make_move_iterator(tc.end()));
}
/** Mexh faces. Indices are valid for both 3D and 2D vertices (i.e. vertices
* and texture coordinates.
*/
void faces(Faces &&faces) override {
for (const auto &face : faces) {
m.faces.emplace_back
(face(0) + smStart, face(1) + smStart, face(2) + smStart
, face(0) + smStart, face(1) + smStart, face(2) + smStart
, smIndex);
}
}
/** Image data.
*/
void image(const DataView &imageData) override {
const auto &type(imgproc::imageType
(imageData.first, gltf::size(imageData)));
const auto name(utility::format("%d%s", smIndex, type));
utility::write(output / name, imageData.first, gltf::size(imageData));
imageNames.push_back(name);
}
/** Image reference.
*/
void image(const fs::path &path) override {
imageNames.push_back(path.string());
}
void finish() {
{
LOG(info1) << "Writing material file.";
std::ofstream f((output / "mesh.mtl").string());
int index(0);
for (const auto &name : imageNames) {
f << "newmtl " << index << "\n"
<< "map_Kd " << name
<< "\n";
++index;
}
f.close();
}
LOG(info3) << "Saving mesh to " << output << ".";
saveAsObj(m, output / "mesh.obj", geometry::ObjMaterial("mesh.mtl"));
}
const fs::path output;
geometry::Mesh m;
int smIndex;
geometry::Face::index_type smStart;
std::vector<std::string> imageNames;
};
int B3dm2Obj::run()
{
auto model(tdt::b3dm(input_));
fs::create_directories(output_);
MeshLoader loader(output_);
MeshLoader::DecodeOptions options;
options.flipTc = true;
tdt::loadMesh(loader, model, options);
loader.finish();
return EXIT_SUCCESS;
}
} // namespace
int main(int argc, char *argv[])
{
utility::unlimitedCoredump();
return B3dm2Obj()(argc, argv);
}
|
(*
* Copyright 2022, Proofcraft Pty Ltd
* Copyright 2020, Data61, CSIRO (ABN 41 687 119 230)
*
* SPDX-License-Identifier: GPL-2.0-only
*)
theory ArchInterrupt_AI
imports Interrupt_AI
begin
context Arch begin global_naming AARCH64
primrec arch_irq_control_inv_valid_real ::
"arch_irq_control_invocation \<Rightarrow> 'a::state_ext state \<Rightarrow> bool"
where
"arch_irq_control_inv_valid_real (ARMIRQControlInvocation irq dest_slot src_slot trigger) =
(cte_wp_at ((=) cap.NullCap) dest_slot and
cte_wp_at ((=) cap.IRQControlCap) src_slot and
ex_cte_cap_wp_to is_cnode_cap dest_slot and
real_cte_at dest_slot and
K (irq \<le> maxIRQ))"
defs arch_irq_control_inv_valid_def:
"arch_irq_control_inv_valid \<equiv> arch_irq_control_inv_valid_real"
named_theorems Interrupt_AI_asms
lemma (* decode_irq_control_invocation_inv *)[Interrupt_AI_asms]:
"\<lbrace>P\<rbrace> decode_irq_control_invocation label args slot caps \<lbrace>\<lambda>rv. P\<rbrace>"
apply (simp add: decode_irq_control_invocation_def Let_def arch_check_irq_def
arch_decode_irq_control_invocation_def whenE_def, safe)
apply (wp | simp)+
done
lemma decode_irq_control_valid [Interrupt_AI_asms]:
"\<lbrace>\<lambda>s. invs s \<and> (\<forall>cap \<in> set caps. s \<turnstile> cap)
\<and> (\<forall>cap \<in> set caps. is_cnode_cap cap \<longrightarrow>
(\<forall>r \<in> cte_refs cap (interrupt_irq_node s). ex_cte_cap_wp_to is_cnode_cap r s))
\<and> cte_wp_at ((=) cap.IRQControlCap) slot s\<rbrace>
decode_irq_control_invocation label args slot caps
\<lbrace>irq_control_inv_valid\<rbrace>,-"
apply (simp add: decode_irq_control_invocation_def Let_def split_def
whenE_def arch_check_irq_def
arch_decode_irq_control_invocation_def
split del: if_split cong: if_cong)
apply (wpsimp wp: ensure_empty_stronger simp: cte_wp_at_eq_simp arch_irq_control_inv_valid_def
| wp (once) hoare_drop_imps)+
apply (clarsimp simp: linorder_not_less word_le_nat_alt unat_ucast maxIRQ_def)
apply (cases caps; clarsimp simp: cte_wp_at_eq_simp)
apply (intro conjI impI; clarsimp)
done
lemma get_irq_slot_different_ARCH[Interrupt_AI_asms]:
"\<lbrace>\<lambda>s. valid_global_refs s \<and> ex_cte_cap_wp_to is_cnode_cap ptr s\<rbrace>
get_irq_slot irq
\<lbrace>\<lambda>rv s. rv \<noteq> ptr\<rbrace>"
apply (simp add: get_irq_slot_def)
apply wp
apply (clarsimp simp: valid_global_refs_def valid_refs_def
ex_cte_cap_wp_to_def)
apply (elim allE, erule notE, erule cte_wp_at_weakenE)
apply (clarsimp simp: global_refs_def is_cap_simps cap_range_def)
done
lemma is_derived_use_interrupt_ARCH[Interrupt_AI_asms]:
"(is_ntfn_cap cap \<and> interrupt_derived cap cap') \<longrightarrow> (is_derived m p cap cap')"
apply (clarsimp simp: is_cap_simps)
apply (clarsimp simp: interrupt_derived_def is_derived_def)
apply (clarsimp simp: cap_master_cap_def split: cap.split_asm)
apply (simp add: is_cap_simps is_pt_cap_def vs_cap_ref_def)
done
lemma maskInterrupt_invs_ARCH[Interrupt_AI_asms]:
"\<lbrace>invs and (\<lambda>s. \<not>b \<longrightarrow> interrupt_states s irq \<noteq> IRQInactive)\<rbrace>
do_machine_op (maskInterrupt b irq)
\<lbrace>\<lambda>rv. invs\<rbrace>"
apply (simp add: do_machine_op_def split_def maskInterrupt_def)
apply wp
apply (clarsimp simp: in_monad invs_def valid_state_def all_invs_but_valid_irq_states_for_def
valid_irq_states_but_def valid_irq_masks_but_def valid_machine_state_def
cur_tcb_def valid_irq_states_def valid_irq_masks_def)
done
crunch device_state_inv[wp]: plic_complete_claim "\<lambda>ms. P (device_state ms)"
lemma dmo_plic_complete_claim[wp]:
"do_machine_op (plic_complete_claim irq) \<lbrace>invs\<rbrace>"
apply (wp dmo_invs)
apply (auto simp: plic_complete_claim_def machine_op_lift_def machine_rest_lift_def in_monad select_f_def)
done
lemma no_cap_to_obj_with_diff_IRQHandler_ARCH[Interrupt_AI_asms]:
"no_cap_to_obj_with_diff_ref (IRQHandlerCap irq) S = \<top>"
by (rule ext, simp add: no_cap_to_obj_with_diff_ref_def
cte_wp_at_caps_of_state
obj_ref_none_no_asid)
lemma (* set_irq_state_valid_cap *)[Interrupt_AI_asms]:
"\<lbrace>valid_cap cap\<rbrace> set_irq_state IRQSignal irq \<lbrace>\<lambda>rv. valid_cap cap\<rbrace>"
apply (clarsimp simp: set_irq_state_def)
apply (wp do_machine_op_valid_cap)
apply (auto simp: valid_cap_def valid_untyped_def
split: cap.splits option.splits arch_cap.splits
split del: if_split)
done
crunch valid_global_refs[Interrupt_AI_asms]: set_irq_state "valid_global_refs"
lemma invoke_irq_handler_invs'[Interrupt_AI_asms]:
assumes dmo_ex_inv[wp]: "\<And>f. \<lbrace>invs and ex_inv\<rbrace> do_machine_op f \<lbrace>\<lambda>rv::unit. ex_inv\<rbrace>"
assumes cap_insert_ex_inv[wp]: "\<And>cap src dest.
\<lbrace>ex_inv and invs and K (src \<noteq> dest)\<rbrace>
cap_insert cap src dest
\<lbrace>\<lambda>_.ex_inv\<rbrace>"
assumes cap_delete_one_ex_inv[wp]: "\<And>cap.
\<lbrace>ex_inv and invs\<rbrace> cap_delete_one cap \<lbrace>\<lambda>_.ex_inv\<rbrace>"
shows
"\<lbrace>invs and ex_inv and irq_handler_inv_valid i\<rbrace> invoke_irq_handler i \<lbrace>\<lambda>rv s. invs s \<and> ex_inv s\<rbrace>"
proof -
have
cap_insert_invs_ex_invs[wp]: "\<And>cap src dest. \<lbrace>ex_inv and (invs and cte_wp_at (\<lambda>c. c = NullCap) dest and valid_cap cap and
tcb_cap_valid cap dest and
ex_cte_cap_wp_to (appropriate_cte_cap cap) dest and
(\<lambda>s. \<forall>r\<in>obj_refs cap.
\<forall>p'. dest \<noteq> p' \<and> cte_wp_at (\<lambda>cap'. r \<in> obj_refs cap') p' s \<longrightarrow>
cte_wp_at (Not \<circ> is_zombie) p' s \<and> \<not> is_zombie cap) and
(\<lambda>s. cte_wp_at (is_derived (cdt s) src cap) src s) and
(\<lambda>s. cte_wp_at (\<lambda>cap'. \<forall>irq\<in>cap_irqs cap - cap_irqs cap'. irq_issued irq s)
src s) and
(\<lambda>s. \<forall>t R. cap = ReplyCap t False R \<longrightarrow>
st_tcb_at awaiting_reply t s \<and> \<not> has_reply_cap t s) and
K (\<not> is_master_reply_cap cap))\<rbrace>
cap_insert cap src dest \<lbrace>\<lambda>rv s. invs s \<and> ex_inv s\<rbrace>"
apply wp
apply (auto simp: cte_wp_at_caps_of_state)
done
show ?thesis
apply (cases i, simp_all)
apply (wp dmo_plic_complete_claim maskInterrupt_invs)
apply simp+
apply (rename_tac irq cap prod)
apply (rule hoare_pre)
apply (wp valid_cap_typ [OF cap_delete_one_typ_at])
apply (strengthen real_cte_tcb_valid)
apply (wp real_cte_at_typ_valid [OF cap_delete_one_typ_at])
apply (rule_tac Q="\<lambda>rv s. is_ntfn_cap cap \<and> invs s
\<and> cte_wp_at (is_derived (cdt s) prod cap) prod s"
in hoare_post_imp)
apply (clarsimp simp: is_cap_simps is_derived_def cte_wp_at_caps_of_state)
apply (simp split: if_split_asm)
apply (simp add: cap_master_cap_def split: cap.split_asm)
apply (drule cte_wp_valid_cap [OF caps_of_state_cteD] | clarsimp)+
apply (clarsimp simp: cap_master_cap_simps valid_cap_def obj_at_def is_ntfn is_tcb is_cap_table
split: option.split_asm dest!:cap_master_cap_eqDs)
apply (wp cap_delete_one_still_derived)
apply simp
apply (wp get_irq_slot_ex_cte get_irq_slot_different_ARCH hoare_drop_imps)
apply (clarsimp simp: valid_state_def invs_def appropriate_cte_cap_def
is_cap_simps)
apply (erule cte_wp_at_weakenE, simp add: is_derived_use_interrupt_ARCH)
apply (wp| simp add: )+
done
qed
lemma (* invoke_irq_control_invs *) [Interrupt_AI_asms]:
"\<lbrace>invs and irq_control_inv_valid i\<rbrace> invoke_irq_control i \<lbrace>\<lambda>rv. invs\<rbrace>"
apply (cases i, simp_all)
apply (wp cap_insert_simple_invs
| simp add: IRQHandler_valid is_cap_simps no_cap_to_obj_with_diff_IRQHandler_ARCH
| strengthen real_cte_tcb_valid)+
apply (clarsimp simp: cte_wp_at_caps_of_state
is_simple_cap_def is_cap_simps is_pt_cap_def
safe_parent_for_def is_simple_cap_arch_def
ex_cte_cap_to_cnode_always_appropriate_strg)
apply (rename_tac irq_control, case_tac irq_control)
apply (simp add: arch_irq_control_inv_valid_def)
apply (wp cap_insert_simple_invs
| simp add: IRQHandler_valid is_cap_simps no_cap_to_obj_with_diff_IRQHandler_ARCH
| strengthen real_cte_tcb_valid)+
apply (clarsimp simp: cte_wp_at_caps_of_state is_simple_cap_def is_simple_cap_arch_def
is_cap_simps is_pt_cap_def safe_parent_for_def
ex_cte_cap_to_cnode_always_appropriate_strg)
done
crunch device_state_inv[wp]: resetTimer "\<lambda>ms. P (device_state ms)"
lemma resetTimer_invs_ARCH[Interrupt_AI_asms]:
"\<lbrace>invs\<rbrace> do_machine_op resetTimer \<lbrace>\<lambda>_. invs\<rbrace>"
apply (wp dmo_invs)
apply safe
apply (drule_tac Q="%_ b. underlying_memory b p = underlying_memory m p"
in use_valid)
apply (simp add: resetTimer_def
machine_op_lift_def machine_rest_lift_def split_def)
apply wp
apply (clarsimp+)[2]
apply(erule use_valid, wp no_irq_resetTimer no_irq, assumption)
done
lemma empty_fail_ackInterrupt_ARCH[Interrupt_AI_asms]:
"empty_fail (ackInterrupt irq)"
by (wp | simp add: ackInterrupt_def)+
lemma empty_fail_maskInterrupt_ARCH[Interrupt_AI_asms]:
"empty_fail (maskInterrupt f irq)"
by (wp | simp add: maskInterrupt_def)+
lemma dmo_st_tcb_cur[wp]:
"\<lbrace>\<lambda>s. st_tcb_at P (cur_thread s) s\<rbrace> do_machine_op f \<lbrace>\<lambda>rv s. st_tcb_at P (cur_thread s) s\<rbrace>"
by (rule hoare_lift_Pf[where f=cur_thread]; wp)
lemma dmo_ex_nonz_cap_to[wp]:
"\<lbrace>\<lambda>s. ex_nonz_cap_to (cur_thread s) s\<rbrace> do_machine_op f \<lbrace>\<lambda>rv s. ex_nonz_cap_to (cur_thread s) s\<rbrace>"
by (rule hoare_lift_Pf[where f=cur_thread]; wp)
lemma conj_imp_strg:
"P \<Longrightarrow> (A \<longrightarrow> P) \<and> (B \<longrightarrow> P)" by simp
lemma runnable_eq:
"runnable st = (st = Running \<or> st = Restart)"
by (cases st; simp)
lemma halted_eq:
"halted st = (st = Inactive \<or> st = IdleThreadState)"
by (cases st; simp)
crunches vgic_update, vgic_update_lr, vcpu_update for ex_nonz_cap_to[wp]: "ex_nonz_cap_to p"
(wp: ex_nonz_cap_to_pres)
lemma vgic_maintenance_invs[wp]:
"\<lbrace>invs\<rbrace> vgic_maintenance \<lbrace>\<lambda>_. invs\<rbrace>"
unfolding vgic_maintenance_def
supply if_split[split del] valid_fault_def[simp]
apply (wpsimp simp: get_gic_vcpu_ctrl_misr_def get_gic_vcpu_ctrl_eisr1_def
get_gic_vcpu_ctrl_eisr0_def if_apply_def2
wp: thread_get_wp' hoare_vcg_imp_lift' gts_wp hoare_vcg_all_lift
| wps
| wp (once) hoare_drop_imp[where f="do_machine_op m" for m]
hoare_drop_imp[where f="return $ m" for m]
| strengthen not_pred_tcb_at_strengthen
| wp (once) hoare_vcg_imp_lift' gts_wp)+
apply (frule tcb_at_invs)
apply (clarsimp simp: runnable_eq halted_eq not_pred_tcb)
apply (fastforce intro!: st_tcb_ex_cap[where P=active]
simp: not_pred_tcb st_tcb_at_def obj_at_def halted_eq)
done
lemma vppi_event_invs[wp]:
"\<lbrace>invs\<rbrace> vppi_event irq \<lbrace>\<lambda>_. invs\<rbrace>"
unfolding vppi_event_def
supply if_split[split del] valid_fault_def[simp]
apply (wpsimp simp: if_apply_def2
wp: hoare_vcg_imp_lift' gts_wp hoare_vcg_all_lift maskInterrupt_invs
cong: vcpu.fold_congs
| wps
| strengthen not_pred_tcb_at_strengthen)+
apply (frule tcb_at_invs)
apply (clarsimp simp: runnable_eq halted_eq not_pred_tcb)
apply (fastforce intro!: st_tcb_ex_cap[where P=active]
simp: not_pred_tcb st_tcb_at_def obj_at_def halted_eq)
done
lemma handle_reserved_irq_invs[wp]:
"\<lbrace>invs\<rbrace> handle_reserved_irq irq \<lbrace>\<lambda>_. invs\<rbrace>"
unfolding handle_reserved_irq_def by (wpsimp simp: non_kernel_IRQs_def)
lemma (* handle_interrupt_invs *) [Interrupt_AI_asms]:
"\<lbrace>invs\<rbrace> handle_interrupt irq \<lbrace>\<lambda>_. invs\<rbrace>"
apply (simp add: handle_interrupt_def)
apply (rule conjI; rule impI)
apply (simp add: do_machine_op_bind empty_fail_ackInterrupt_ARCH empty_fail_maskInterrupt_ARCH)
apply (wpsimp wp: dmo_maskInterrupt_invs maskInterrupt_invs_ARCH dmo_ackInterrupt
send_signal_interrupt_states simp: arch_mask_irq_signal_def)+
apply (wp get_cap_wp send_signal_interrupt_states )
apply (rule_tac Q="\<lambda>rv. invs and (\<lambda>s. st = interrupt_states s irq)" in hoare_post_imp)
apply (clarsimp simp: ex_nonz_cap_to_def invs_valid_objs)
apply (intro allI exI, erule cte_wp_at_weakenE)
apply (clarsimp simp: is_cap_simps)
apply (wpsimp wp: hoare_drop_imps resetTimer_invs_ARCH
simp: get_irq_state_def
| rule conjI)+
done
lemma sts_arch_irq_control_inv_valid[wp, Interrupt_AI_asms]:
"\<lbrace>arch_irq_control_inv_valid i\<rbrace>
set_thread_state t st
\<lbrace>\<lambda>rv. arch_irq_control_inv_valid i\<rbrace>"
apply (simp add: arch_irq_control_inv_valid_def)
apply (cases i, simp)
apply (wpsimp wp: ex_cte_cap_to_pres simp: cap_table_at_typ)
done
crunch typ_at[wp]: arch_invoke_irq_handler "\<lambda>s. P (typ_at T p s)"
end
interpretation Interrupt_AI?: Interrupt_AI
proof goal_cases
interpret Arch .
case 1 show ?case by (intro_locales; (unfold_locales, simp_all add: Interrupt_AI_asms)?)
qed
end
|
{-# OPTIONS --safe --cubical #-}
module Data.List.Kleene where
open import Prelude
open import Data.Fin
mutual
infixr 5 _&_ ∹_
infixl 5 _⁺ _⋆
record _⁺ {a} (A : Set a) : Set a where
inductive
constructor _&_
field
head : A
tail : A ⋆
data _⋆ {a} (A : Set a) : Set a where
[] : A ⋆
∹_ : A ⁺ → A ⋆
open _⁺ public
mutual
foldr⁺ : (A → B → B) → B → A ⁺ → B
foldr⁺ f b (x & xs) = f x (foldr⋆ f b xs)
foldr⋆ : (A → B → B) → B → A ⋆ → B
foldr⋆ f b [] = b
foldr⋆ f b (∹ xs) = foldr⁺ f b xs
length⋆ : A ⋆ → ℕ
length⋆ = foldr⋆ (const suc) zero
length⁺ : A ⁺ → ℕ
length⁺ = foldr⁺ (const suc) zero
mutual
_!⁺_ : (xs : A ⁺) → Fin (length⁺ xs) → A
xs !⁺ f0 = xs .head
xs !⁺ fs i = xs .tail !⋆ i
_!⋆_ : (xs : A ⋆) → Fin (length⋆ xs) → A
(∹ xs) !⋆ i = xs !⁺ i
map⋆ : (A → B) → A ⋆ → B ⋆
map⋆ f = foldr⋆ (λ x xs → ∹ f x & xs) []
map⁺ : (A → B) → A ⁺ → B ⁺
map⁺ f (x & xs) = f x & map⋆ f xs
mutual
_⋆++⋆_ : A ⋆ → A ⋆ → A ⋆
[] ⋆++⋆ ys = ys
(∹ xs) ⋆++⋆ ys = ∹ (xs ⁺++⋆ ys)
_⁺++⋆_ : A ⁺ → A ⋆ → A ⁺
head (xs ⁺++⋆ ys) = head xs
tail (xs ⁺++⋆ ys) = tail xs ⋆++⋆ ys
|
%==============================================================================
% This code is part of the Matlab-based toolbox
% FAIR - Flexible Algorithms for Image Registration.
% For details see
% - https://github.com/C4IR and
% - http://www.siam.org/books/fa06/
% ##2
%==============================================================================
%
% - data PETCT, Omega=(0,140)x(0,151), level=4:7, m=[128,128]
% - viewer viewImage2D
% - interpolation splineInter
% - distance MI
% - pre-registration rigid2D
% - regularizer mbCurvature
% - optimization lBFGS
% ===============================================================================
close all, help(mfilename);
setup2DPETCTData
imgModel('reset','imgModel','splineInter','regularizer','none','theta',1e-3);
distance('reset','distance','MI','nT',32,'nR',32);
trafo('reset','trafo','rigid2D');
regularizer('reset','regularizer','mfCurvature','alpha',1e-1);
PIRpara = optPara('lBFGS','solver','backslash');
NPIRpara = optPara('lBFGS','solver',regularizer('get','solver'),'maxIter',40);
[yc,wc,his] = MLIR(ML,'PIRobj',@PIRBFGSobjFctn,'PIRpara',PIRpara,...
'NPIRobj',@NPIRBFGSobjFctn,'NPIRpara',NPIRpara,...
'minLevel',4,'maxLevel',7,'parametric',1,'plotMLiter',0);
%==============================================================================
|
[STATEMENT]
lemma bfun_spec[intro]: "f \<in> bfun \<Longrightarrow> (\<lambda>x. f (g x)) \<in> bfun"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. f \<in> bfun \<Longrightarrow> (\<lambda>x. f (g x)) \<in> bfun
[PROOF STEP]
unfolding bfun_def bounded_def
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. f \<in> {f. \<exists>x e. \<forall>y\<in>range f. dist x y \<le> e} \<Longrightarrow> (\<lambda>x. f (g x)) \<in> {f. \<exists>x e. \<forall>y\<in>range f. dist x y \<le> e}
[PROOF STEP]
by auto |
luxury diy crystal chandelier or diy crystal chandelierpendant light fixtures om88505 500 78 diy crystal chandelier ceiling fan.
diy crystal chandelier floor lamp easy kit tutorial cleaner,pinterest chandelier diy crystal cake stand big room easy tutorial girl ceiling fan lamp,diy crystal chandelier ceiling fan along way cake stand ball view cleaner the,steps chandelier create 6 crystal beautiful easy in diy makeover centerpiece a table lamp,chandelier sphere antique 4 diy crystal floor lamp ceiling fan bronze kit light vintage,diy crystal chandelier floor lamp cleaner beads chandeliers master table pinterest,diy crystal chandelier makeover iron cake stand ceiling floor lamp and,diy crystal chandelier kit to a great y make d how lamp floor i spectacular,beauty diy crystal chandelier lamp ceiling fan of table combo for the infinite,united diy crystal chandelier kit pg everyday en states cake stand floor lamp.
We hope you can find what you need here. We always effort to show a picture with HD resolution or at least with perfect images. Luxury Diy Crystal Chandelier Or Diy Crystal Chandelierpendant Light Fixtures Om88505 500 78 Diy Crystal Chandelier Ceiling Fan can be beneficial inspiration for those who seek an image according specific categories; you can find it in this site. Finally all pictures we have been displayed in this site will inspire you all.. |
module DeBruijnGraph
# package code goes here
using Graphs
export Node, DebruijnGraph,
isSemiBalanced, isBalanced, name,
nnodes,nedges,hasEulerianWalk,hasEulerianCycle,isEulerian,eulerianWalkOrCyle
include("graph/graph.jl")
include("visual/visual.jl")
include("assembly/assembly.jl")
end # module
|
color:: yellowblue Blue double pack (+10.00)Yellow double pack (+10.00)One blue and one yellow (+10.00) This item is currently out of stock!
This unique POP-UP beach mat with sun shade cover is made from Du Pont Tyvek which is a patented high-tech fabric created by DuPont from high-density polyethylene (HDPE). Tyvek?is an extremely versatile material, it has superior protection against sun's UV rays, it can block out 99.8% of harmful UV rays which is the main source causing sunburn. It is ultra thin and light weight, feel like paper yet it is very strong and extremely durable. Tyvek also resists water penetration but it allows vapor to pass through. it is a perfect material for beach sun shelter and tent applications. |
module Main where
import Numeric.LinearAlgebra
import Common
import Forward
import BackProp
import AutoEncoder
import ActivFunc
import Other
main :: IO ()
main = do
regression
classification
regression :: IO ()
regression = do
ws <- genWeights [2, 4, 8, 1]
let x = matrix 4 [0, 0, 1, 1,
0, 1, 0, 1]
let y = matrix 4 [0, 1, 1, 0]
let i = matrix 4 [0, 0, 1, 1,
0, 1, 0, 1] -- example
-- let nws = last . take 500 $ iterate (backPropRegression sigmoids (x, y)) ws
nws <- last . take 500 $ iterateM (sgdMethod 2 (x, y) $ backPropRegression 0.1 sigmoids) ws
let pws = preTrains 0.1 500 sigmoids x ws
npws <- last . take 500 $ iterateM (sgdMethod 2 (x, y) $ backPropRegression 0.1 sigmoids) pws
putStrLn "training inputs"
print x
putStrLn "training outputs"
print y
putStrLn "inputs"
print i
putStrLn "not trained outputs"
print $ forwardRegression sigmoidC ws i
putStrLn "trainined outputs"
print $ forwardRegression sigmoidC nws i
putStrLn "pretrainined outputs"
print $ forwardRegression sigmoidC npws i
classification :: IO ()
classification = do
ws <- genWeights [2, 4, 8, 3]
let x = matrix 4 [0, 0, 1, 1,
0, 1, 0, 1]
let y = matrix 4 [1, 0, 0, 0,
0, 1, 1, 0,
0, 0, 0, 1]
let i = matrix 4 [0, 0, 1, 1,
0, 1, 0, 1] -- example
-- let nws = last . take 500 $ iterate (backPropClassification sigmoids (x, y)) ws
nws <- last . take 500 $ iterateM (sgdMethod 2 (x, y) $ backPropClassification 0.1 sigmoids) ws
let pws = preTrains 0.1 500 sigmoids x ws
npws <- last . take 500 $ iterateM (sgdMethod 2 (x, y) $ backPropClassification 0.1 sigmoids) pws
putStrLn "training inputs"
print x
putStrLn "training outputs"
print y
putStrLn "inputs"
print i
putStrLn "not trained outputs"
print $ forwardClassification sigmoidC ws i
putStrLn "trainined outputs"
print $ forwardClassification sigmoidC nws i
putStrLn "pretrainined outputs"
print $ forwardClassification sigmoidC npws i
|
function model = mogUpdateCovariance(model)
% MOGUPDATECOVARIANCE Update the covariances of an MOG model.
% FORMAT
% DESC updates the covariance matrices of a mixtures of
% Gaussians model. The implementation currently uses an
% eigenvalue based update.
% ARG model : the model which is to be updated.
% RETURN model : the model with updated covariances.
%
% SEEALSO : mogCreate, mogUpdateMean, mogEstep
%
% COPYRIGHT : Neil D. Lawrence, 2006
% MLTOOLS
for i = 1:model.m
centredY = model.Y - repmat(model.mean(i,:), model.N, 1);
centredY = centredY.*repmat(sqrt(model.posterior(:,i)), 1, model.d);
switch model.covtype
case 'ppca'
C = (centredY'*centredY+0.001*eye(model.d))/sum(model.posterior(:, i)+.001);
[vec, val] = eig(C);
val = diag(val);
[val, ind] = sort(val);
ind = ind(end:-1:1);
val = val(end:-1:1);
vec = vec(:, ind(1:model.q));
sigma2 = mean(val(model.q+1:end));
if sigma2<eps
sigma2 = eps;
end
lambda = val(1:model.q) - sigma2;
%[sigma2, eigVec, lambda] = ppca(C, model.q);
if length(lambda) ~= model.q
% Something's wrong here ...
sigma2 = 1e-6;
warning('Not enough eigenvalues extracted.')
lambdaTemp = lambda;
lambda = zeros(model.q, 1);
lambda(1:length(lambdaTemp)) = lambdaTemp;
end
model.sigma2(i) = sigma2;
model.W{i} = vec*diag(sqrt(lambda));
model.U{i} = sqrt(sigma2)*eye(model.d);
for j = 1:model.q
model.U{i} = cholupdate(model.U{i}, model.W{i}(:, j));
end
case 'spherical'
model.sigma2(i) = sum(sum(centredY.*centredY))/(model.d*sum(model.posterior(:, i)));
end
end
|
Require Export SpecializedCategory Group.
Require Import Notations ComputableCategory SetCategory.
Set Implicit Arguments.
Generalizable All Variables.
Set Asymmetric Patterns.
Set Universe Polymorphism.
Ltac destruct_first_if_not_second a b :=
(constr_eq a b; fail 1) || (let H := fresh in set (H := a : unit) in *; destruct H).
Ltac destruct_singleton_constructor c :=
let t := type of c in
repeat match goal with
| [ H : t |- _ ] => destruct H
| [ H : context[?e] |- _ ] => destruct_first_if_not_second e c
| [ |- context[?e] ] => destruct_first_if_not_second e c
end.
Ltac destruct_units := destruct_singleton_constructor tt.
Ltac destruct_Trues := destruct_singleton_constructor I.
Section as_category.
Definition CategoryOfGroup (G : Group) : SpecializedCategory unit.
refine (@Build_SpecializedCategory unit
(fun _ _ => G)
(fun _ => @GroupIdentity G)
(fun _ _ _ => @GroupOperation G)
_
_
_);
abstract (destruct G; intuition).
Defined.
End as_category.
Coercion CategoryOfGroup : Group >-> SpecializedCategory.
Section category_of_groups.
Definition GroupCat : SpecializedCategory Group
:= Eval unfold ComputableCategory in ComputableCategory _ CategoryOfGroup.
End category_of_groups.
Section forgetful_functor.
Definition GroupForgetfulFunctor : SpecializedFunctor GroupCat TypeCat.
refine (Build_SpecializedFunctor GroupCat TypeCat
GroupObjects
(fun s d m => MorphismOf m (s := tt) (d := tt))
_
_);
simpl; abstract (intros; destruct_units; reflexivity).
Defined.
End forgetful_functor.
|
lemma lmeasurable_interior: "bounded S \<Longrightarrow> interior S \<in> lmeasurable" |
/-
Copyright (c) 2016 Microsoft Corporation. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Jeremy Avigad
-/
import tactic.finish
/-!
# Examples for `finish`
Those come from the tutorial.
-/
open auto
section
variables p q r s : Prop
-- commutativity of ∧ and ∨
example : p ∧ q ↔ q ∧ p := by finish
example : p ∨ q ↔ q ∨ p := by finish
-- associativity of ∧ and ∨
example : (p ∧ q) ∧ r ↔ p ∧ (q ∧ r) := by finish
example : (p ∨ q) ∨ r ↔ p ∨ (q ∨ r) := by finish
-- distributivity
example : p ∧ (q ∨ r) ↔ (p ∧ q) ∨ (p ∧ r) := by finish [iff_def]
example : p ∨ (q ∧ r) ↔ (p ∨ q) ∧ (p ∨ r) := by finish [iff_def]
-- other properties
example : (p → (q → r)) ↔ (p ∧ q → r) := by finish [iff_def]
example : ((p ∨ q) → r) ↔ (p → r) ∧ (q → r) := by finish [iff_def]
example : ¬(p ∨ q) ↔ ¬p ∧ ¬q := by finish
example : ¬p ∨ ¬q → ¬(p ∧ q) := by finish
example : ¬(p ∧ ¬ p) := by finish
example : p ∧ ¬q → ¬(p → q) := by finish
example : ¬p → (p → q) := by finish
example : (¬p ∨ q) → (p → q) := by finish
example : p ∨ false ↔ p := by finish
example : p ∧ false ↔ false := by finish
example : ¬(p ↔ ¬p) := by finish
example : (p → q) → (¬q → ¬p) := by finish
-- these require classical reasoning
example : (p → r ∨ s) → ((p → r) ∨ (p → s)) := by finish
example : ¬(p ∧ q) → ¬p ∨ ¬q := by finish
example : ¬(p → q) → p ∧ ¬q := by finish
example : (p → q) → (¬p ∨ q) := by finish
example : (¬q → ¬p) → (p → q) := by finish
example : p ∨ ¬p := by finish
example : (((p → q) → p) → p) := by finish
end
section
variables (A : Type) (p q : A → Prop)
variable a : A
variable r : Prop
example : (∃ x : A, r) → r := by finish
-- TODO(Jeremy): can we get these automatically?
example (a : A) : r → (∃ x : A, r) := begin safe; apply_assumption; assumption end
example : (∃ x, p x ∧ r) ↔ (∃ x, p x) ∧ r := by finish
theorem foo': (∃ x, p x ∨ q x) ↔ (∃ x, p x) ∨ (∃ x, q x) :=
by finish [iff_def]
example (h : ∀ x, ¬ ¬ p x) : p a := by finish
example (h : ∀ x, ¬ ¬ p x) : ∀ x, p x := by finish
example : (∀ x, p x) ↔ ¬ (∃ x, ¬ p x) := by finish
example : (∃ x, p x) ↔ ¬ (∀ x, ¬ p x) := by finish
example : (¬ ∃ x, p x) ↔ (∀ x, ¬ p x) := by finish
example : (¬ ∀ x, p x) ↔ (∃ x, ¬ p x) := by finish
example : (∃ x, ¬ p x) → (¬ ∀ x, p x) := by finish
example : (∀ x, p x → r) ↔ (∃ x, p x) → r := by finish [iff_def]
-- TODO(Jeremy): can we get these automatically?
example (a : A) : (∃ x, p x → r) ↔ (∀ x, p x) → r := begin safe [iff_def]; exact h a end
example (a : A) : (∃ x, r → p x) ↔ (r → ∃ x, p x) := begin safe [iff_def]; exact h a end
example : (∃ x, p x → r) → (∀ x, p x) → r := by finish
example : (∃ x, r → p x) → (r → ∃ x, p x) := by finish
end
|
(**
Version très simplifiée des idée de: "Java bytecode verification: algorithms
and formalizations", Xavier Leroy (Journal of Automated Reasoning,
30(3-4):235-269, 2003).
Dont voici l'abstract:
"Bytecode verification is a crucial security component for Java applets, on
the Web and on embedded devices such as smart cards. This paper reviews the
various bytecode verification algorithms that have been proposed, recasts
them in a common framework of dataflow analysis, and surveys the use of proof
assistants to specify bytecode verification and prove its correctness."
*)
Require Import OrderedType OrderedTypeEx OrderedTypeAlt DecidableType
DecidableTypeEx RelationClasses Omega.
From bcv Require Import LibHypsNaming heritage vmtype vmdefinition.
(** * Valeurs de la machine offensive
La définition des types, classes et instruction est fixée dans [vmtype]. *)
Module OffVal <: VMVal.
Definition Val := nat.
(** Calcul du type d'une valeur offensive. *)
Definition v2t (v:Val): VMType := Top.
Lemma val_eq_dec : forall v1 v2:Val, {v1=v2}+{v1<>v2}.
Proof.
intros v1 v2.
decide equality.
Qed.
End OffVal.
Module O (Import H:Herit).
(** États offensifs. *)
Module Off := VMDefinition(OffVal)(H).
Include Off.
Ltac rename_ovm h th := fail.
(* Hypothesis renaming stuff from other files + current file.
DO NOT REDEFINE IN THIS FILE. Redefine rename_ovm instead. *)
Ltac rename_hyp h th ::=
match th with
| _ => (rename_ovm h th)
| _ => (Off.rename_vmdef h th)
| _ => (LibHypsNaming.rename_hyp_neg h th)
end.
(** test *)
(*
Definition obj1:Obj := {| objclass:=1; objfields:=Dico.empty _|}.
Definition heap1:Heap := Dico.empty _ .
Definition heap2:Heap := Dico.add 1 obj1 heap1.
Definition heap3:Heap := Dico.add 2 obj1 heap2.
Eval vm_compute in (maxkey heap3).
Eval vm_compute in (maxkey heap2).
*)
(* fin test *)
(** * Construction d'objet frais pour le bytecode [New]. *)
(** Construit un Dico de valeur par défaut à partir d'un Dico de
types. + preuve que [build_flds] est un morphisme pour [Dico.Equal]. *)
Definition build_flds: ClasseDef -> (Dico.t Val) :=
Dico.map
(fun t:VMType =>
match t with
| Tint => 0
| Tref id => 0 (** null = 0 *)
| Object => 0 (** null = 0 *)
| Top => 0 (** Should never happen *)
| Trefnull => 0 (** Should never happen *)
end).
Add Parametric Morphism: O.build_flds with signature (Dico.Equal ==> Dico.Equal) as build_flds_morphism.
intros x y H.
unfold O.build_flds.
setoid_rewrite H.
apply Dicomore.F.Equal_refl.
Qed.
(*
Lemma build_flds_no_vref:forall t i x, FIND i (build_flds t) <> Some x.
Proof.
intros t.
pattern t.
apply Dicomore.map_induction_bis;simpl;intros.
setoid_rewrite <- H.
apply H0.
vm_compute.
intro abs; discriminate abs.
unfold O.build_flds.
rewrite Dicomore.add_map.
destruct (Dico.E.eq_dec i x);simpl in *; subst.
rewrite Dicofacts.add_eq_o;auto.
destruct e;simpl;intro;discriminate.
rewrite Dicofacts.add_neq_o;auto.
Qed.
*)
(** [new cldefs clid heap] vérifie que clid existe bien, puis retourne une
reference [r] un nouveau [Heap] [h] et telle que [heap(r) = None] et [h(r) =
Some(o)] où [o] est un objet "frais" et toutes les adresses de [heap] sont
identiques dans [h]. *)
Function new (clid:class_id) (heap:Heap) : option (heap_idx * Heap) :=
match Dico.find clid allcl with
| None => None (** Classe inconnue *)
| Some cldef =>
let flds:Obj := {| objclass := clid; objfields := build_flds cldef |} in
let newhpidx: nat := maxkey heap in
Some((S newhpidx), Dico.add (S newhpidx) flds heap)
end.
Lemma new_one_change : forall clid hp hpidx newhp,
new clid hp = Some (hpidx, newhp) ->
forall idx,
FIND idx newhp = FIND idx hp \/ idx=hpidx.
Proof.
intros clid hp.
!functional induction new clid hp;intros;simpl;try solve [discriminate].
inversion heq_Some;clear heq_Some;subst.
destruct (Dico.E.eq_dec idx (S (maxkey hp)));simpl.
- right;auto.
- rewrite Dicomore.add_neq_o;auto.
Qed.
Lemma build_flds_empty : Dico.Equal (build_flds Dico.empty) Dico.empty.
Proof.
unfold build_flds.
apply Dicomore.empty_map.
Qed.
(** * Fonction d'exécution offensive d'*un* bytecode *)
(** Pas de vérif de overflow. pas de nb négatifs. *)
Definition exec_step (s:State): option State :=
let frm:Frame := s.(frame) in
let pc: pc_idx := frm.(pc) in
let instr_opt := Dico.find pc (frm.(mdef).(instrs)) in
match instr_opt with
| None => None
| Some instr =>
match instr with
| ret => Some s
| Iconst i =>
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef) ; regs:= s.(frame).(regs);
pc:= pc + 1;
stack:= i :: s.(frame).(stack)
|}
|}
| Iadd =>
match s.(frame).(stack) with
| i1 :: i2 :: stack' =>
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef); regs:= s.(frame).(regs);
pc:= pc + 1;
stack:= (i1+i2) :: stack'
|}
|}
| nil | _ :: nil => None (** Stack underflow *)
end
| Iload ridx =>
match Dico.find ridx (s.(frame).(regs)) with
| Some i =>
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef); regs:= s.(frame).(regs);
pc:= pc + 1;
stack:= i :: s.(frame).(stack)
|}
|}
| None => None (** Bad register number *)
end
| Rload clid ridx =>
match Dico.find ridx (s.(frame).(regs)) with
| Some r =>
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef) ; regs:= s.(frame).(regs);
pc:= pc + 1;
stack:= r :: s.(frame).(stack)
|}
|}
| None => None (** Bad register number *)
end
| Istore ridx =>
match s.(frame).(stack) with
| i :: stack' =>
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef) ;
regs:= Dico.add ridx i (s.(frame).(regs));
pc:= pc + 1;
stack:= stack'
|}
|}
| nil => None (** Stack underflow *)
end
| Rstore clid ridx =>
match s.(frame).(stack) with
| i :: stack' =>
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef) ;
regs:= Dico.add ridx i (s.(frame).(regs));
pc:= pc + 1;
stack:= stack'
|}
|}
| nil => None (** Stack underflow *)
end
| Iifle jmp => (** ifeqe *)
match s.(frame).(stack) with
| 0 :: stack' => (** = 0 --> jump *)
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef) ; regs:= s.(frame).(regs);
pc:= jmp;
stack:= stack'
|}
|}
| _ :: stack' => (** <> 0 --> pc+1 *)
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef) ; regs:= s.(frame).(regs);
pc:= pc+1;
stack:= stack'
|}
|}
| nil => None (** Stack underflow *)
end
| Goto jmp =>
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef) ; regs:= s.(frame).(regs);
stack:= s.(frame).(stack);
pc:= jmp
|}
|}
| Getfield cl namefld typ =>
match s.(frame).(stack) with
| hpidx :: stack' =>
match Dico.find hpidx s.(heap) with
| None => None (** adresse inconnue *)
| Some {|objclass:= objcl; objfields:= flds |} => (* TODO: vérifier le type retourné? *)
match Dico.find namefld flds with
| None => None (** Champ de classe inconnu ou pas initialisé *)
| Some v =>
Some {| framestack := s.(framestack); heap := s.(heap);
frame := {| mdef:=s.(frame).(mdef) ; regs:= s.(frame).(regs);
pc:= pc+1;
stack:= v :: stack'
|}
|}
end
end
| nil => None (** Stack underflow *)
end
| Putfield cl namefld typ =>
match s.(frame).(stack) with
| hpidx :: v :: stack' =>
match Dico.find hpidx s.(heap) with
| None => None (** adresse inconnue, objet non alloué *)
| Some {| objclass:= objcl; objfields:= flds |}=>
let newflds := {| objclass:= objcl;
objfields:=Dico.add namefld v flds |} in
let newheap := Dico.add hpidx newflds s.(heap) in
Some {| framestack := s.(framestack);
heap := newheap;
frame := {| mdef:=s.(frame).(mdef) ;
regs:= s.(frame).(regs);
pc:= pc+1;
stack:= stack'
|}
|}
end
| nil | _ :: nil => None (** Stack underflow *)
end
| New clid =>
match new clid s.(heap) with
| None => None (** Classe inconnue *)
| Some (newobj,newhp) =>
Some {| framestack := s.(framestack); heap := newhp;
frame := {| mdef:=s.(frame).(mdef) ; regs:= s.(frame).(regs);
stack:= newobj :: s.(frame).(stack);
pc:= pc+1
|}
|}
end
end
end.
Functional Scheme exec_step_ind := Induction for exec_step Sort Prop.
(** * Tests *)
Notation "k --> i , d" := (Dico.add k i d) (at level 55, right associativity).
Definition prog:MethodDef :=
{| instrs := (0 --> Iload 1 ,
1 --> Istore 2 ,
2 --> ret ,
Dico.empty) ;
argstype :=(Tint :: Tint:: nil);
restype := Tint |}.
Definition startstate:State :=
{|
framestack := nil;
heap := Dico.empty ;
frame := {|
mdef:= prog ;
regs:= (0 --> 32 , 1--> 11, Dico.empty);
pc:= 0;
stack:= nil
|}
|}.
Fixpoint exec_n (s : State) (n:nat) {struct n}: option State :=
match n with
| 0 => Some s
| S n' =>
match exec_step s with
| None => None
| Some s' => exec_n s' n'
end
end.
(*
Eval simpl in exec_n startstate 1.
Eval simpl in exec_n startstate 2.
Eval simpl in exec_n startstate 5.
*)
(* Eval simpl in exec_n (fun x y => false) (Dico.empty _) startstate 2. *)
(** Exemple de preuve très simple sur la fonction d'exécution *)
Lemma essai : forall s x,
exec_step s = Some x ->
x.(framestack) = s.(framestack).
Proof.
intros s.
functional induction exec_step s;intros ;simpl;
try solve [discriminate | inversion H; subst;simpl;reflexivity] .
Qed.
End O.
|
Formal statement is: lemma (in topological_space) tendsto_eq_rhs: "(f \<longlongrightarrow> x) F \<Longrightarrow> x = y \<Longrightarrow> (f \<longlongrightarrow> y) F" Informal statement is: If $f$ converges to $x$ in $F$, and $x = y$, then $f$ converges to $y$ in $F$. |
module InputOutput
main : IO ()
main = do
putStr "Enter your name: "
x <- getLine
putStrLn ("Halo " ++ x ++ "!")
readNumber : IO (Maybe Nat)
readNumber = do
input <- getLine
if all isDigit (unpack input) then
pure (Just (cast input))
else
pure Nothing
|
(* Property from Case-Analysis for Rippling and Inductive Proof,
Moa Johansson, Lucas Dixon and Alan Bundy, ITP 2010.
This Isabelle theory is produced using the TIP tool offered at the following website:
https://github.com/tip-org/tools
This file was originally provided as part of TIP benchmark at the following website:
https://github.com/tip-org/benchmarks
Yutaka Nagashima at CIIRC, CTU changed the TIP output theory file slightly
to make it compatible with Isabelle2017.
Some proofs were added by Yutaka Nagashima.*)
theory TIP_prop_26
imports "../../Test_Base"
begin
datatype 'a list = nil2 | cons2 "'a" "'a list"
datatype Nat = Z | S "Nat"
fun y :: "'a list => 'a list => 'a list" where
"y (nil2) y2 = y2"
| "y (cons2 z2 xs) y2 = cons2 z2 (y xs y2)"
fun x :: "Nat => Nat => bool" where
"x (Z) (Z) = True"
| "x (Z) (S z2) = False"
| "x (S x2) (Z) = False"
| "x (S x2) (S y22) = x x2 y22"
fun elem :: "Nat => Nat list => bool" where
"elem z (nil2) = False"
| "elem z (cons2 z2 xs) = (if x z z2 then True else elem z xs)"
theorem property0 :
"((elem z xs) ==> (elem z (y xs ys)))"
find_proof DInd
apply (induct arbitrary: ys rule: elem.induct)
apply auto
done
theorem property0' :
"((elem z xs) ==> (elem z (y xs ys)))"
(*Why induction on "xs"?
Because "elem" is the only recursive function in the premise, which pattern-matches on the
second parameter, which is "xs" in this case.
Furthermore, the innermost recursive function "y" is pattern-matches on the first parameter,
which is "xs" in this case.*)
apply(induct xs)
apply auto
done
theorem property0 :
"((elem z xs) ==> (elem z (y xs ys)))"
apply (induct arbitrary: ys rule: y.induct)
apply (induct z)
apply(induct_tac y2)
nitpick
oops
end |
# Copyright (c) 2018-2020, Carnegie Mellon University
# See LICENSE for details
#
# Top-level qCirc object and associated helpers
#
##
#F CircTerm( <ops>, <n>, <arch> )
##
#F given a list of operations to perform on target qubits, the number of qubits, and the hardware architecture,
#F expand out the qCircuit object as a matrix product of qEmbed objects
##
CircTerm := function(ops, n, arch)
local mat, h, oper;
mat := I(2^(n));
for oper in ops do
# oper is [[1,2,3], qHT], for example
h := qEmbed(oper[1], n, arch, oper[2]).terminate();
mat := mat * h;
od;
return mat;
end;
##
#F ArchUpdate( <oplist>, <arch> )
##
#F updates all operations in oplist to use the new architecture
##
ArchUpdate := function(oplist, arch)
local newlist, ops, qubits, T;
newlist := [];
for ops in oplist do
# op is [[qubits], Transform]
qubits := ops[1];
T := ops[2].recursive_def(arch);
Add(newlist, [qubits, T]);
od;
return newlist;
end;
##
#F qCirc( <arch>, <n>, <ops>) Object
##
#F Given architecture arch and number of qubits n, embed each operation in ops
#F an operation is a [[qubit list], <Non-Terminal>] list
#F ops in an operation list
Class(qCirc, TaggedNonTerminal, rec(
abbrevs := [ (arch, n, ops) -> Checked(IsPosInt(n), [arch, n, ops]) ],
dims := self >> let(size := 2^self.params[2], [size, size]),
terminate := self >> CircTerm(self.params[3], self.params[2], self.params[1]),
isReal := self >> false,
recursive_def := (self, arch) >> self.__bases__[1](arch, self.params[2], ArchUpdate(self.params[3], arch) ),
rChildren := self >> self.params,
from_rChildren := (self, rch) >> self.__bases__[1](rch[1], rch[2], rch[3]),
SmallRandom := () -> Random([2..5]),
LargeRandom := () -> Random([6..15]),
normalizedArithCost := self >> Error("ArithCost not implemented"),
TType := T_Complex(64)
));
##
#F GenCircChildren( <arch>, <n>, <ops> )
##
#F given a list of operations to perform on target qubits, the number of qubits, and the hardware architecture,
#F expand out the qCircuit object as a matrix product of qEmbed objects
##
GenCircChildren := function(arch, n, ops)
local current_op, ops, h1, h, rest;
current_op := ops[1];
h1 := qEmbed(current_op[1], n, arch, current_op[2]);
h := h1;
if Length(ops) > 1 then
rest := [2..Length(ops)];
rest := Map(rest, i -> ops[i]);
h := h * qCirc(arch, n, rest);
fi;
return [h];
end;
##
#F qCirc Breakdown Rules
##
NewRulesFor(qCirc, rec(
# qCirc_Expand rule
# qCirc_Expand qCirc(arch, n, ops) -> qEmbed(qubits, n, arch, <Non-Terminal>) * ...
qCirc_Expand := rec (
forTransposition := false,
minSize := 2,
applicable := (self, nt) >> (nt.params[3] <> []),
children := nt -> List([GenCircChildren(nt.params[1], nt.params[2], nt.params[3])]),
apply := (nt, c, cnt) -> c[1],
switch := true,
),
# qCirc_Base rule
# QCirc_Base qCirc(arch, n, []) -> I(2^n)
qCirc_Base := rec(
info := "qCirc -> Identity",
forTransposition := false,
applicable := (self, nt) >> (nt.params[3] = []),
apply := (nt, c, cnt) -> I(2^(nt.params[2])),
)
)); |
module Named where
open import Data.String
open import Data.Nat hiding (_≟_)
open import Data.Bool using (T; not)
open import Data.Product
open import Data.Sum
-- open import Data.Nat.Properties using (strictTotalOrder)
-- open import Relation.Binary using (StrictTotalOrder)
-- open import Relation.Binary.Core
open import Function using (id)
open import Function.Equivalence using (_⇔_; equivalence)
open import Relation.Nullary
open import Relation.Unary
open import Relation.Nullary.Negation
open import Data.Unit using (⊤)
open import Function using (_∘_)
-- open import Level renaming (zero to Lzero)
open import Relation.Binary.PropositionalEquality
-- open ≡-Reasoning
-- open ≡-Reasoning
-- renaming (begin_ to beginEq_; _≡⟨_⟩_ to _≡Eq⟨_⟩_; _∎ to _∎Eq)
open import Data.Collection
open import Data.Collection.Properties
open import Data.Collection.Equivalence
open import Data.Collection.Inclusion
open import Relation.Binary.PartialOrderReasoning ⊆-Poset
Variable = String
data PreTerm : Set where
Var : (w : Variable) → PreTerm
App : (P : PreTerm) → (Q : PreTerm) → PreTerm
Abs : (w : Variable) → (Q : PreTerm) → PreTerm
showPreTerm : PreTerm → String
showPreTerm (Var x) = x
showPreTerm (App P Q) = "(" ++ showPreTerm P ++ " " ++ showPreTerm Q ++ ")"
showPreTerm (Abs x M) = "(λ" ++ x ++ "." ++ showPreTerm M ++ ")"
I : PreTerm
I = Abs "x" (Var "x")
S : PreTerm
S = Abs "x" (App (Var "y") (Var "x"))
FV : PreTerm → Collection
FV (Var x ) = singleton x
FV (App f x) = union (FV f) (FV x)
FV (Abs x m) = delete x (FV m)
-- a = singleton "x" ∋ (elem "x" ∪ elem "y")
-- b = C[ singleton "x" ] ∩ C[ singleton "x" ]
-- M = FV S
-- neither∈ : ∀ {x A B} → x ∉ C[ A union B ] →
_[_≔_] : PreTerm → Variable → PreTerm → PreTerm
Var x [ v ≔ N ] with x ≟ v
Var x [ v ≔ N ] | yes p = N
Var x [ v ≔ N ] | no ¬p = Var x
App P Q [ v ≔ N ] = App (P [ v ≔ N ]) (Q [ v ≔ N ])
Abs x P [ v ≔ N ] with x ≟ v
Abs x P [ v ≔ N ] | yes p = Abs v P
Abs x P [ v ≔ N ] | no ¬p = Abs x (P [ v ≔ N ])
-- If v ∉ FV(M) then M[v≔N] is defined and M[v≔N] ≡ M
lem-1-2-5-a : ∀ M N v → v ∉ c[ FV M ] → M [ v ≔ N ] ≡ M
lem-1-2-5-a (Var x) N v v∉M with x ≟ v
lem-1-2-5-a (Var x) N .x v∉M | yes refl = contradiction here v∉M
lem-1-2-5-a (Var x) N v v∉M | no ¬p = refl
lem-1-2-5-a (App P Q) N v v∉M = cong₂
App
(lem-1-2-5-a P N v (not-in-left-union (FV P) (FV Q) v∉M))
(lem-1-2-5-a Q N v (not-in-right-union (FV P) (FV Q) v∉M))
lem-1-2-5-a (Abs x M) N v v∉M with x ≟ v
lem-1-2-5-a (Abs x M) N v v∉M | yes p = cong (λ z → Abs z M) (sym p)
lem-1-2-5-a (Abs x M) N v v∉M | no ¬p = cong (Abs x) (lem-1-2-5-a M N v (still-∉-after-recovered x (FV M) ¬p v∉M))
-- begin
-- {! !}
-- ≡⟨ {! !} ⟩
-- {! !}
-- ≡⟨ {! !} ⟩
-- {! !}
-- ∎
-- begin
-- {! !}
-- ≤⟨ {! !} ⟩
-- {! !}
-- ≤⟨ {! !} ⟩
-- {! !}
-- ∎
-- If M[v≔N] is defined, v ≠ x and x ∈ FV(M) iff x ∈ FV(M[v≔N])
lem-1-2-5-b-i : ∀ {v N} M → c[ FV M ] ≋[ _≢_ v ] c[ FV (M [ v ≔ N ]) ]
lem-1-2-5-b-i {v} {N} M v≢x = equivalence (to M v≢x) (from M v≢x)
where
to : ∀ {v N} M → c[ FV M ] ⊆[ _≢_ v ] c[ FV (M [ v ≔ N ]) ]
to {v} (Var w) v≢x ∈FV-M with w ≟ v
to (Var w) v≢x ∈FV-M | yes p = contradiction (sym (trans (nach singleton-≡ ∈FV-M) p)) v≢x
to (Var w) v≢x ∈FV-M | no ¬p = ∈FV-M
to {v} {N} (App P Q) v≢x = begin
c[ union (FV P) (FV Q) ]
≤⟨ union-monotone {! !} {! !} {! !} ⟩
{! !}
≤⟨ {! !} ⟩
{! !}
≤⟨ {! !} ⟩
{! !}
≤⟨ {! !} ⟩
{! !}
≤⟨ {! !} ⟩
c[ union (FV (P [ v ≔ N ])) (FV (Q [ v ≔ N ])) ]
∎
to (Abs w M) v≢x ∈FV-M = {! !}
-- to (Var w) ∈FV-M with w ≟ v
-- to (Var w) ∈FV-M | yes p = contradiction (sym (trans (nach singleton-≡ ∈FV-M) p)) v≢x
-- to (Var w) ∈FV-M | no ¬p = ∈FV-M
-- to (App P Q) = begin
-- c[ union (FV P) (FV Q) ]
-- ≤⟨ union-monotone {! !} {! !} {! !} ⟩
-- {! !}
-- ≤⟨ {! !} ⟩
-- {! !}
-- ≤⟨ {! !} ⟩
-- c[ union (FV (P [ v ≔ N ])) (FV (Q [ v ≔ N ])) ]
-- ∎
-- to (Abs w M) ∈FV-M = {! !}
from : ∀ {v N} M → c[ FV (M [ v ≔ N ]) ] ⊆[ _≢_ v ] c[ FV M ]
from M = {! !}
-- lem-1-2-5-b-i : ∀ {x v N} M → v ≢ x → x ∈ c[ FV M ] ⇔ x ∈ c[ FV (M [ v ≔ N ]) ]
-- lem-1-2-5-b-i {x} {v} {N} (Var w) v≢x with w ≟ v -- x ≡ w
-- lem-1-2-5-b-i {x} {v} {N} (Var w) v≢x | yes p =
-- equivalence
-- (λ ∈[w] → contradiction (sym (trans (nach singleton-≡ ∈[w]) p)) v≢x)
-- from
-- where to : x ∈ c[ w ∷ [] ] → x ∈ c[ FV N ]
-- to ∈[w] = {! !}
-- from : x ∈ c[ FV N ] → x ∈ c[ w ∷ [] ] -- x ∈ c[ FV (N [ v ≔ N ]) ]
-- from ∈FV-N = {! !}
-- lem-1-2-5-b-i {x} {v} {N} (Var w) v≢x | no ¬p = equivalence id id
-- lem-1-2-5-b-i {x} {v} {N} (App P Q) v≢x = equivalence to {! !}
-- where to : c[ union (FV P) (FV Q) ] ⊆ c[ union (FV (P [ v ≔ N ])) (FV (Q [ v ≔ N ])) ]
-- to = map-⊆-union {FV P} {FV Q} {FV (P [ v ≔ N ])} {FV (Q [ v ≔ N ])} (_≢_ v) {! !} {! !} {! !}
--
--
-- lem-1-2-5-b-i (Abs w M) v≢x = {! !}
-- lem-1-2-5-b-i : ∀ {x v N} M → v ≢ x → (x ∈ FV M) ⇔ (x ∈ FV (M [ v ≔ N ]))
-- lem-1-2-5-b-i : ∀ {x v N} M → v ≢ x → (x ∈ FV M) ≡ (x ∈ FV (M [ v ≔ N ]))
-- lem-1-2-5-b-i : ∀ {x v N} M → v ≢ x → (x ∈ c[ FV M ]) ≡ (x ∈ c[ FV (M [ v ≔ N ]) ])
-- lem-1-2-5-b-i {v = v} (Var w) v≢x with w ≟ v
-- lem-1-2-5-b-i (Var v) v≢x | yes refl = {! !}
-- lem-1-2-5-b-i {x} (Var w) v≢x | no ¬p = refl -- cong (_∈_ x) refl
-- lem-1-2-5-b-i {x} {v} {N} (App P Q) v≢x =
-- begin
-- x ∈ c[ union (FV P) (FV Q) ]
-- ≡⟨ sym {! ∪-union !} ⟩
-- {! !}
-- ≡⟨ {! !} ⟩
-- {! !}
-- ≡⟨ {! !} ⟩
-- {! !}
-- ≡⟨ {! !} ⟩
-- {! !}
-- ≡⟨ {! !} ⟩
-- x ∈ c[ union (FV (P [ v ≔ N ])) (FV (Q [ v ≔ N ])) ]
-- ∎
-- lem-1-2-5-b-i (Abs w P) v≢x = {! !}
-- lem-1-2-5-b-i {v = v} (Var w) v≢x x∈FV-M with w ≟ v
-- lem-1-2-5-b-i (Var w) v≢x x∈FV-M | yes p = ? -- contradiction (trans (singleton-≡ x∈FV-M) p) (v≢x ∘ sym)
-- lem-1-2-5-b-i (Var w) v≢x x∈FV-M | no ¬p = ? -- x∈FV-M
-- lem-1-2-5-b-i (App P Q) v≢x x∈FV-M = ? -- ∈-respects-≡ {! !} x∈FV-M
-- lem-1-2-5-b-i (App P Q) v≢x x∈FV-M = ∈-respects-≡ (cong₂ union (cong FV {! !}) (cong FV {! !})) x∈FV-M
-- lem-1-2-5-b-i (App P Q) v≢x x∈FV-M = ∈-respects-≡ (cong₂ union ({! !}) {! !}) x∈FV-M
-- lem-1-2-5-b-i (Abs w P) v≢x x∈FV-M = {! !}
-- If M[v≔N] is defined then y ∈ FV(M[v≔N]) iff either y ∈ FV(M) and v ≠ y
-- or y ∈ FV(N) and x ∈ FV(M)
-- lem-1-2-5-b-i : ∀ {x y N} M v → y ∈ FV (M [ v ≔ N ]) → y ∈ FV M × x ≢ y ⊎ y ∈ FV N × x ∈ FV M
-- lem-1-2-5-b⇒ (Var w) v y∈Applied with w ≟ v
-- lem-1-2-5-b⇒ (Var w) v y∈Applied | yes p = {! !}
-- lem-1-2-5-b⇒ (Var w) v y∈Applied | no ¬p = inj₁ (y∈Applied , {! singleton-≡ ∈ !})
-- lem-1-2-5-b⇒ (App P Q) v y∈Applied = {! !}
-- lem-1-2-5-b⇒ (Abs w P) v y∈Applied = {! !}
--
-- lem-1-2-5-b⇐ : ∀ {x y v M N} → y ∈ FV M × x ≢ y ⊎ y ∈ FV N × x ∈ FV M → y ∈ FV (M [ v ≔ N ])
-- lem-1-2-5-b⇐ = {! !}
lem-1-2-5-c : (M : PreTerm) → (x : Variable) → M [ x ≔ Var x ] ≡ M
lem-1-2-5-c (Var x ) y with x ≟ y
lem-1-2-5-c (Var x ) y | yes p = sym (cong Var p)
lem-1-2-5-c (Var x ) y | no ¬p = refl
lem-1-2-5-c (App P Q) y = cong₂ App (lem-1-2-5-c P y) (lem-1-2-5-c Q y)
lem-1-2-5-c (Abs x M) y with x ≟ y
lem-1-2-5-c (Abs x M) y | yes p = cong (λ w → Abs w M) (sym p)
lem-1-2-5-c (Abs x M) y | no ¬p = cong (Abs x) (lem-1-2-5-c M y)
length : PreTerm → ℕ
length (Var x) = 1
length (App P Q) = length P + length Q
length (Abs x M) = 1 + length M
-- lem-1-2-5-c : (M : PreTerm) → (x : Variable) → (N : PreTerm) → T (not (x ∈? FV M)) → M [ x ≔ N ] ≡ M
-- lem-1-2-5-c (Var x') x N x∉M with x' ≟ x
-- lem-1-2-5-c (Var x') x N x∉M | yes p =
-- begin
-- N
-- ≡⟨ {! !} ⟩
-- {! !}
-- ≡⟨ {! !} ⟩
-- Var x'
-- ∎
-- lem-1-2-5-c (Var x') x N x∉M | no ¬p = {! !}
-- lem-1-2-5-c (App P Q) x N x∉M =
-- begin
-- App (P [ x ≔ N ]) (Q [ x ≔ N ])
-- ≡⟨ refl ⟩
-- App P Q [ x ≔ N ]
-- ≡⟨ {! !} ⟩
-- App P Q
-- ∎
-- lem-1-2-5-c (Abs x' M) x N x∉M = {! !}
-- begin
-- {! !}
-- ≡⟨ {! !} ⟩
-- {! !}
-- ≡⟨ {! !} ⟩
-- {! !}
-- ∎
|
\chapter{Kilika}
\restartlist{enumerate}
\liteversiondetermination{Exclude}{%
\begin{enumerate}
\item \sd\ on exiting the boat, go up and left, \sd. \skippablefmv[2:00], (press Start immediately after skip) \sd
\item Exit inn, go right to \wakka, \sd. Go left and up to Kilika Woods, \sd
\end{enumerate}
}
\begin{battle}{Lancet Tutorial}
\begin{itemize}
\item \sd
\kimahrif Lancet
\kimahrif Attack
\tidusf Defend
\luluf Fire
\end{itemize}
\end{battle}
\liteversiondetermination{Exclude}{\includegraphics{graphics/kilikamapNSG_small}}
\begin{enumerate}[resume]
\item Go left and up the hidden path, \pickup{Scout}. Do not equip, it is sold later. (\textbf{\textcolor{blue}{Blue Path}})
\item Teach Tidus Flee via the Sphere Grid
\item Immediately after crossing the log bridge turn right (\textbf{\textcolor{blue}{Blue Path}}). Repeatedly run into \textbf{Lord Ochu} and Flee until your total encounter count is 159 (141 Random Encounters). After the first Flee a crusader will give you 3x Phoenix Down. It is important to keep track of your encounters as you will need 240 total / 206 random encounters before Seymour and 270 total / 222 random encounters before Isaaru.
\end{enumerate}
\liteversiondetermination{Exclude}{%
\begin{enumerate}[resume]
\item Continue up the hidden path, following the map, to the temple steps (\textbf{\textcolor{red}{Red Path}})
\item \sd
\item \formation{\yuna}{\kimahri}{\lulu}
\item \save
\end{enumerate}
}
\liteversiondetermination{Include}{%
\begin{enumerate}[resume]
\item Before Geneaux: \formation{\yuna}{\kimahri}{\lulu}
\item \save
\end{enumerate}
}
\begin{battle}[3000]{Sinspawn Geneaux}
\begin{itemize}
\summon{\valefor}
\valeforf Fire Tentacle
\valeforf Fire Tentacle
\valeforf Fire Main Body x3
\valeforf Sonic Wings Main Body
\valeforf Fire Main Body x1-2
\end{itemize}
\end{battle}
\liteversiondetermination{Exclude}{%
\begin{enumerate}[resume]
\item \sd\ on stone steps and temple. go into temple. Walk up to \wakka\ and Pray. \sd\ inside temple and go up steps. Wait for lift and \sd.
\end{enumerate}
\begin{trial}
\begin{itemize}
\item Take the sphere from the pedestal
\item Place into the door, take it off of the door.
\item Place sphere into the next door, take the sphere back.
\item Place the sphere into the right holder
\item Touch glpyh
\item Take the sphere from the next room
\item Place it into the left holder
\item Take the glyph sphere from the pedestal
\item Place it in the Fire Room
\item Take the sphere that you put into the right holder
\item Use it to open the door in the Fire Room
\item Take the sphere off the door
\item Enter the Fayth room
\end{itemize}
\end{trial}
\begin{enumerate}[resume]
\item In Fayth room, \sd, speak to \wakka\ first. Try to leave room, \sd, name \ifrit
\item Hold down to exit temple, \cs[0:40], \sd
\item Exit Kilika Woods same way that you entered
\item Go down and right to S.S. Winno. \sd
\end{enumerate}
}
\liteversiondetermination{Include}{%
\begin{enumerate}[resume]
\item After the temple exit Kilika, via the same route you entered, Fleeing all encounters
\end{enumerate}
} |
Require Import Platform.AutoSep Platform.Malloc Platform.tests.Echo Platform.Bootstrap.
Module Type S.
Variable heapSize : nat.
End S.
Module Make(M : S).
Import M.
Section boot.
Hypothesis heapSizeLowerBound : (3 <= heapSize)%nat.
Definition size := heapSize + 50 + 0.
Hypothesis mem_size : goodSize (size * 4)%nat.
Let heapSizeUpperBound : goodSize (heapSize * 4).
goodSize.
Qed.
Definition bootS := bootS heapSize 0.
Definition boot := bimport [[ "malloc"!"init" @ [Malloc.initS], "test"!"main" @ [Echo.mainS] ]]
bmodule "main" {{
bfunctionNoRet "main"() [bootS]
Sp <- (heapSize * 4)%nat;;
Assert [PREonly[_] 0 =?> heapSize];;
Call "malloc"!"init"(0, heapSize)
[PREonly[_] mallocHeap 0];;
Call "test"!"main"()
[PREonly[_] [| False |] ]
end
}}.
Theorem ok : moduleOk boot.
vcgen; abstract genesis.
Qed.
Definition m0 := link Malloc.m boot.
Definition m1 := link Echo.m m0.
Lemma ok0 : moduleOk m0.
link Malloc.ok ok.
Qed.
Lemma ok1 : moduleOk m1.
link Echo.ok ok0.
Qed.
Variable stn : settings.
Variable prog : program.
Hypothesis inj : forall l1 l2 w, Labels stn l1 = Some w
-> Labels stn l2 = Some w
-> l1 = l2.
Hypothesis agree : forall l pre bl,
LabelMap.MapsTo l (pre, bl) (XCAP.Blocks m1)
-> exists w, Labels stn l = Some w
/\ prog w = Some bl.
Hypothesis agreeImp : forall l pre, LabelMap.MapsTo l pre (XCAP.Imports m1)
-> exists w, Labels stn l = Some w
/\ prog w = None.
Hypothesis omitImp : forall l w,
Labels stn ("sys", l) = Some w
-> prog w = None.
Variable w : W.
Hypothesis at_start : Labels stn ("main", Global "main") = Some w.
Variable st : state.
Hypothesis mem_low : forall n, (n < size * 4)%nat -> st.(Mem) n <> None.
Hypothesis mem_high : forall w, $ (size * 4) <= w -> st.(Mem) w = None.
Theorem safe : sys_safe stn prog (w, st).
safety ok1.
Qed.
End boot.
End Make.
|
[STATEMENT]
lemma EphK_Tag: "EphK X \<noteq> Tag X'"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. EphK X \<noteq> Tag X'
[PROOF STEP]
by transfer auto |
import Nemo.setcoeff!, Nemo.exp, Base.start, Base.next, Base.done, Nemo.lift, Hecke.lift
export start, next, done, SetPrimes, psi_lower, psi_upper
#function setcoeff!(g::fmpz_mod_rel_series, i::Int64, a::Nemo.GenRes{Nemo.fmpz})
# setcoeff!(g, i, lift(a))
#end
function bernstein(h::Int, it::Any, Q = FlintQQ, cl = ceil, a::Int = 776)
#use 771 and cl = floor to get decent upper bounds
# more on the choice of 776 and 771 in Dan's paper
#println("in bernstein with a=$a and cl(0.5) = $(cl(0.5))")
R,t = PowerSeriesRing(Q, a*h+1, "t")
#implements https://cr.yp.to/papers/psi.pdf
# for smoothness for ideals, replace next_prime by the list of the norms of the prime
# ideals
# the sums of the coefficients of exp(g) are bounds for psi
st = start(it)
p, st = next(it, st)
g = R(0)
tp = R(0)
lpp = Int(cl(Float64(log(p))/log(2)*a))
function do_single!(g::SeriesElem, pp::Int, np::Int)
i = pp
nu = Q(np)*Q(pp)
while i <= a*h
A = coeff(g, i)
A += divexact(nu, Q(i))
setcoeff!(g, i, A)
i += pp
end
end
while true
pp = lpp
np = 0
while pp == lpp && !done(it, st)
np += 1
p, st = next(it, st)
lpp = Int(cl(Float64(log(p))/log(2)*a))
if done(it, st)
break
end
end
if done(it, st) && pp == lpp
np += 1
end
do_single!(g, pp, np)
if done(it, st)
if pp != lpp
do_single!(g, lpp, 1)
end
return g
end
end
end
function _exp(a::fmpz_mod_rel_series)
R = base_ring(parent(a))
Rx,x = PolynomialRing(R)
A = Rx()
for i=0:length(a)-1
setcoeff!(A, i, coeff(a, i))
end
E = Rx()
ccall((:nmod_poly_exp_series, :libflint), Void, (Ptr{nmod_poly}, Ptr{nmod_poly}, Int64), &E, &A, length(a))
r = parent(a)()
for i=0:length(E)-1
setcoeff!(r, i, coeff(E, i))
end
return r
end
immutable SetPrimes{T}
from::T
to::T
mod::T # if set (i.e. >1), only primes p % mod == a are returned
a::T
function SetPrimes(f::T, t::T)
r = new(f, t, T(1), T(0))
return r
end
function SetPrimes(f::T, t::T, mod::T, val::T)
r = new(f, t, mod, val)
if gcd(mod, val) != 1
error("modulus and value need to be coprime")
end
return r
end
end
function SetPrimes{T}(f::T, t::T)
return SetPrimes{T}(f, t)
end
function SetPrimes{T}(f::T, t::T, mod::T, val::T)
return SetPrimes{T}(f, t, mod, val)
end
function start{T<: Union{Integer, fmpz}}(A::SetPrimes{T})
curr = A.from
if A.mod >1 && curr % A.mod != A.a
curr += A.mod - (curr % A.mod) + A.a
end
while !isprime(curr)
curr += A.mod
end
return curr
end
function next{T<: Union{Integer, fmpz}}(A::SetPrimes{T}, st::T)
p = st
if A.mod >1
m = A.mod
else
if p==2
st = T(3)
return p, st
end
m = T(2)
end
st = p+m
while !isprime(st)
st += m
end
return p, st
end
function done{T <: Union{Integer, fmpz}}(A::SetPrimes{T}, st::T)
return A.to != -1 && st > A.to
end
eltype{T <: Union{Integer, fmpz}}(::SetPrimes{T}) = T
function lift(R::FmpzRelSeriesRing, f::fmpz_mod_rel_series)
r = R()
for i=0:length(f)-1
setcoeff!(r, i, lift(coeff(f, i)))
end
return r
end
function psi_lower(N::fmpz, pr, a::Int=776, cl = ceil)
p = fmpz(next_prime(2^60))
n = Int(ceil(log(N)/log(2)))
# println("precision of $n")
f = _exp(bernstein(n, pr, ResidueRing(FlintZZ, p), cl, a))
Rt, t = PowerSeriesRing(FlintZZ, n*a+1, "t")
f = lift(Rt, f)
pp = p
while pp < N
p = next_prime(p)
# println("p: $p, pp: $pp N:$N")
g = _exp(bernstein(n, pr, ResidueRing(FlintZZ, p), cl, a))
@assert length(g) == length(f)
for i=0:length(f)
setcoeff!(f, i, crt(coeff(f, i), pp, lift(coeff(g, i)), p))
end
pp *= p
end
res = []
s = 0
j = 0
for i=0:n
while j <= i*a && j < length(f)
s += coeff(f, j)
j += 1
end
push!(res, s)
if j >= length(f) break; end
end
return res, f # res[i] <= psi(2^(i-1), B)
end
function psi_lower(N::fmpz, B::Int, a::Int = 776, cl = ceil)
return psi_lower(fmpz(N), SetPrimes{Int}(2, B), a, cl)
end
function psi_lower(N::Integer, B::Int, a::Int = 776, cl = ceil)
return psi_lower(fmpz(N), SetPrimes{Int}(2, B), a, cl)
end
function psi_upper(N::fmpz, B::Int, a::Int=771, fl = floor)
return psi_lower(N, SetPrimes{Int}(2, B), a, fl)
end
function psi_upper(N::Integer, B::Int, a::Int=771, fl = floor)
return psi_lower(fmpz(N), SetPrimes{Int}(2, B), a, fl)
end
function psi_lower(N::Integer, B::Hecke.NfFactorBase, a::Int=776, cl = ceil)
lp = sort(fmpz[norm(x) for x=B.ideals])
return psi_lower(fmpz(N), lp, a, cl)
end
function psi_lower(N::fmpz, B::Hecke.NfFactorBase, a::Int=776, cl = ceil)
lp = sort(fmpz[norm(x) for x=B.ideals])
return psi_lower(N, lp, a, cl)
end
function psi_upper(N::Integer, B::Hecke.NfFactorBase, a::Int=771, cl = floor)
lp = sort(fmpz[norm(x) for x=B.ideals])
return psi_lower(fmpz(N), lp, a, cl)
end
function psi_upper(N::fmpz, B::Hecke.NfFactorBase, a::Int=771, cl = floor)
lp = sort(fmpz[norm(x) for x=B.ideals])
return psi_lower(N, lp, a, cl)
end
|
The comic book 's initial cancellation , and the introduction of the character to DC led to many negative feedback and reception . I , Vampire writer Joshua Hale Fialkov expressed sadness he would never get to write " the ' real ' John Constantine " , noted crime author and former Hellblazer writer Ian Rankin stated that Constantine was the only comic book character he ever wanted to write for , and Alan Moore 's daughter , Leah Moore expressing doubt that Constantine could replace Hellblazer , among others . As a result , DC co @-@ publisher Dan <unk> issued a statement defending this decision , stating that , " Hellblazer 's had a long and incredibly successful run and that 's a tip of the hat to all the great creators that have worked on the book over the years . The new Constantine series will return him back to his roots in the DCU and hopefully be the start of another incredible run . " Comic Alliance described Hellblazer 's cancellation as marking " the end of an era for Vertigo " while adding it to be " one of a handful of comics from the late eighties that helped comic books and their readers grow up . "
|
/-
Copyright (c) 2022 Devon Tuma. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Devon Tuma
-/
import computational_monads.oracle_comp
/-!
# Running Compuations With No Oracles
This file defines a function `run_comp` for "running" an `oracle_comp` that doesn't have access to
any oracles, where `empty_spec` is used to represent the lack of available oracles.
In this case the `oracle_comp.query` constructor can't be called, and so we can eliminate this
case and perform the natural reduction on other computations.
TODO: Should be possible to generalize this to use `random_gen`, allowing greater flexibility.
-/
namespace oracle_comp
open oracle_spec
/-- Run a computation with `[]ₒ` as the oracles, using `empty.elim` in the `query` case. -/
def run_comp : Π {α : Type}, oracle_comp []ₒ α → α
| _ (pure' α a) := a
| _ (bind' α β oa ob) := let a : α := run_comp oa in run_comp (ob a)
| _ (query i t) := empty.elim i
variables {α β : Type} (oa : oracle_comp []ₒ α) (ob : α → oracle_comp []ₒ β) (a : α) (b : β)
@[simp] lemma run_comp_return : run_comp (return a) = a := rfl
@[simp] lemma run_comp_bind : run_comp (oa >>= ob) = run_comp (ob $ run_comp oa) := rfl
lemma run_comp_query (i : []ₒ.ι) (t : []ₒ.domain i) (u : []ₒ.range i) : run_comp (query i t) = u :=
empty.elim i
example : run_comp
(do { x ← return 1,
y ← return (x + 1),
z ← return (x * y + y * x),
return (y + y = z) }) = true := -- Check that `2 + 2 = 4`
by simp only [run_comp_bind, run_comp_return, one_mul, mul_one, eq_self_iff_true]
end oracle_comp |
%% triangleRayIntersection
% Below is a demonstration of the features of the |triangleRayIntersection| function
%%
clear; close all; clc;
%% Syntax
% |[V_intersect,L_intersect,T] = triangleRayIntersection (V_ori,R,V,F,optStruct);|
%% Description
% UNDOCUMENTED
%% Examples
%
%%
%
% <<gibbVerySmall.gif>>
%
% _*GIBBON*_
% <www.gibboncode.org>
%
% _Kevin Mattheus Moerman_, <[email protected]>
%%
% _*GIBBON footer text*_
%
% License: <https://github.com/gibbonCode/GIBBON/blob/master/LICENSE>
%
% GIBBON: The Geometry and Image-based Bioengineering add-On. A toolbox for
% image segmentation, image-based modeling, meshing, and finite element
% analysis.
%
% Copyright (C) 2006-2022 Kevin Mattheus Moerman and the GIBBON contributors
%
% This program is free software: you can redistribute it and/or modify
% it under the terms of the GNU General Public License as published by
% the Free Software Foundation, either version 3 of the License, or
% (at your option) any later version.
%
% This program is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
% GNU General Public License for more details.
%
% You should have received a copy of the GNU General Public License
% along with this program. If not, see <http://www.gnu.org/licenses/>.
|
Formal statement is: lemma AE_iff_null: assumes "{x\<in>space M. \<not> P x} \<in> sets M" (is "?P \<in> sets M") shows "(AE x in M. P x) \<longleftrightarrow> {x\<in>space M. \<not> P x} \<in> null_sets M" Informal statement is: A property holds almost everywhere if and only if the set of points where it fails is a null set. |
module Data.Nat.Equational
import Data.Nat
%default total
||| Subtract a number from both sides of an equation.
||| Due to partial nature of subtraction in natural numbers, an equation of
||| special form is required in order for subtraction to be total.
export
subtractEqLeft : (a : Nat) -> {b, c : Nat} -> a + b = a + c -> b = c
subtractEqLeft 0 prf = prf
subtractEqLeft (S k) prf = subtractEqLeft k $ succInjective (k + b) (k + c) prf
||| Subtract a number from both sides of an equation.
||| Due to partial nature of subtraction in natural numbers, an equation of
||| special form is required in order for subtraction to be total.
export
subtractEqRight : {a, b : Nat} -> (c : Nat) -> a + c = b + c -> a = b
subtractEqRight c prf =
subtractEqLeft c $
rewrite plusCommutative c a in
rewrite plusCommutative c b in
prf
||| Add a number to both sides of an inequality
export
plusLteLeft : (a : Nat) -> {b, c : Nat} -> LTE b c -> LTE (a + b) (a + c)
plusLteLeft 0 bLTEc = bLTEc
plusLteLeft (S k) bLTEc = LTESucc $ plusLteLeft k bLTEc
||| Add a number to both sides of an inequality
export
plusLteRight : {a, b : Nat} -> (c : Nat) -> LTE a b -> LTE (a + c) (b + c)
plusLteRight c aLTEb =
rewrite plusCommutative a c in
rewrite plusCommutative b c in
plusLteLeft c aLTEb
||| Subtract a number from both sides of an inequality.
||| Due to partial nature of subtraction, we require an inequality of special form.
export
subtractLteLeft : (a : Nat) -> {b, c : Nat} -> LTE (a + b) (a + c) -> LTE b c
subtractLteLeft 0 abLTEac = abLTEac
subtractLteLeft (S k) abLTEac = subtractLteLeft k $ fromLteSucc abLTEac
||| Subtract a number from both sides of an inequality.
||| Due to partial nature of subtraction, we require an inequality of special form.
export
subtractLteRight : {a, b : Nat} -> (c : Nat) -> LTE (a + c) (b + c) -> LTE a b
subtractLteRight c acLTEbc =
subtractLteLeft c $
rewrite plusCommutative c a in
rewrite plusCommutative c b in
acLTEbc
||| If one of the factors of a product is greater than 0, then the other factor
||| is less than or equal to the product..
export
rightFactorLteProduct : (a, b : Nat) -> LTE b (S a * b)
rightFactorLteProduct a b = lteAddRight b
||| If one of the factors of a product is greater than 0, then the other factor
||| is less than or equal to the product..
export
leftFactorLteProduct : (a, b : Nat) -> LTE a (a * S b)
leftFactorLteProduct a b =
rewrite multRightSuccPlus a b in
lteAddRight a
|
/-
Copyright (c) 2020 Yury Kudryashov. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Yury Kudryashov
-/
import Mathlib.PrePort
import Mathlib.Lean3Lib.init.default
import Mathlib.tactic.ring_exp
import Mathlib.algebra.algebra.basic
import Mathlib.algebra.opposites
import Mathlib.data.equiv.ring
import Mathlib.PostPort
universes u_1 l
namespace Mathlib
/-!
# Quaternions
In this file we define quaternions `ℍ[R]` over a commutative ring `R`, and define some
algebraic structures on `ℍ[R]`.
## Main definitions
* `quaternion_algebra R a b`, `ℍ[R, a, b]` :
[quaternion algebra](https://en.wikipedia.org/wiki/Quaternion_algebra) with coefficients `a`, `b`
* `quaternion R`, `ℍ[R]` : the space of quaternions, a.k.a. `quaternion_algebra R (-1) (-1)`;
* `quaternion.norm_sq` : square of the norm of a quaternion;
* `quaternion.conj` : conjugate of a quaternion;
We also define the following algebraic structures on `ℍ[R]`:
* `ring ℍ[R, a, b]` and `algebra R ℍ[R, a, b]` : for any commutative ring `R`;
* `ring ℍ[R]` and `algebra R ℍ[R]` : for any commutative ring `R`;
* `domain ℍ[R]` : for a linear ordered commutative ring `R`;
* `division_algebra ℍ[R]` : for a linear ordered field `R`.
## Notation
The following notation is available with `open_locale quaternion`.
* `ℍ[R, c₁, c₂]` : `quaternion_algebra R c₁ c₂`
* `ℍ[R]` : quaternions over `R`.
## Implementation notes
We define quaternions over any ring `R`, not just `ℝ` to be able to deal with, e.g., integer
or rational quaternions without using real numbers. In particular, all definitions in this file
are computable.
## Tags
quaternion
-/
/-- Quaternion algebra over a type with fixed coefficients $a=i^2$ and $b=j^2$.
Implemented as a structure with four fields: `re`, `im_i`, `im_j`, and `im_k`. -/
structure quaternion_algebra (R : Type u_1) (a : R) (b : R) where
re : R
im_i : R
im_j : R
im_k : R
namespace quaternion_algebra
@[simp] theorem mk.eta {R : Type u_1} {c₁ : R} {c₂ : R} (a : quaternion_algebra R c₁ c₂) :
mk (re a) (im_i a) (im_j a) (im_k a) = a :=
sorry
protected instance has_coe_t {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
has_coe_t R (quaternion_algebra R c₁ c₂) :=
has_coe_t.mk fun (x : R) => mk x 0 0 0
@[simp] theorem coe_re {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) : re ↑x = x := rfl
@[simp] theorem coe_im_i {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) : im_i ↑x = 0 := rfl
@[simp] theorem coe_im_j {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) : im_j ↑x = 0 := rfl
@[simp] theorem coe_im_k {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) : im_k ↑x = 0 := rfl
theorem coe_injective {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} : function.injective coe :=
fun (x y : R) (h : ↑x = ↑y) => congr_arg re h
@[simp] theorem coe_inj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} {x : R} {y : R} :
↑x = ↑y ↔ x = y :=
function.injective.eq_iff coe_injective
@[simp] theorem has_zero_zero_im_j {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} : im_j 0 = 0 :=
Eq.refl (im_j 0)
@[simp] theorem coe_zero {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} : ↑0 = 0 := rfl
protected instance inhabited {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
Inhabited (quaternion_algebra R c₁ c₂) :=
{ default := 0 }
@[simp] theorem has_one_one_re {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} : re 1 = 1 :=
Eq.refl (re 1)
@[simp] theorem coe_one {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} : ↑1 = 1 := rfl
@[simp] theorem has_add_add_im_i {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) (b : quaternion_algebra R c₁ c₂) :
im_i (a + b) = im_i a + im_i b :=
Eq.refl (im_i (a + b))
@[simp] theorem mk_add_mk {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (a₁ : R) (a₂ : R) (a₃ : R)
(a₄ : R) (b₁ : R) (b₂ : R) (b₃ : R) (b₄ : R) :
mk a₁ a₂ a₃ a₄ + mk b₁ b₂ b₃ b₄ = mk (a₁ + b₁) (a₂ + b₂) (a₃ + b₃) (a₄ + b₄) :=
rfl
protected instance has_neg {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
Neg (quaternion_algebra R c₁ c₂) :=
{ neg := fun (a : quaternion_algebra R c₁ c₂) => mk (-re a) (-im_i a) (-im_j a) (-im_k a) }
@[simp] theorem neg_mk {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (a₁ : R) (a₂ : R) (a₃ : R)
(a₄ : R) : -mk a₁ a₂ a₃ a₄ = mk (-a₁) (-a₂) (-a₃) (-a₄) :=
rfl
@[simp] theorem has_sub_sub_im_k {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) (b : quaternion_algebra R c₁ c₂) :
im_k (a - b) = im_k a - im_k b :=
Eq.refl (im_k (a - b))
@[simp] theorem mk_sub_mk {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (a₁ : R) (a₂ : R) (a₃ : R)
(a₄ : R) (b₁ : R) (b₂ : R) (b₃ : R) (b₄ : R) :
mk a₁ a₂ a₃ a₄ - mk b₁ b₂ b₃ b₄ = mk (a₁ - b₁) (a₂ - b₂) (a₃ - b₃) (a₄ - b₄) :=
rfl
/-- Multiplication is given by
* `1 * x = x * 1 = x`;
* `i * i = c₁`;
* `j * j = c₂`;
* `i * j = k`, `j * i = -k`;
* `k * k = -c₁ * c₂`;
* `i * k = c₁ * j`, `k * i = `-c₁ * j`;
* `j * k = -c₂ * i`, `k * j = c₂ * i`. -/
@[simp] theorem has_mul_mul_im_i {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) (b : quaternion_algebra R c₁ c₂) :
im_i (a * b) = re a * im_i b + im_i a * re b - c₂ * im_j a * im_k b + c₂ * im_k a * im_j b :=
Eq.refl (im_i (a * b))
@[simp] theorem mk_mul_mk {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (a₁ : R) (a₂ : R) (a₃ : R)
(a₄ : R) (b₁ : R) (b₂ : R) (b₃ : R) (b₄ : R) :
mk a₁ a₂ a₃ a₄ * mk b₁ b₂ b₃ b₄ =
mk (a₁ * b₁ + c₁ * a₂ * b₂ + c₂ * a₃ * b₃ - c₁ * c₂ * a₄ * b₄)
(a₁ * b₂ + a₂ * b₁ - c₂ * a₃ * b₄ + c₂ * a₄ * b₃)
(a₁ * b₃ + c₁ * a₂ * b₄ + a₃ * b₁ - c₁ * a₄ * b₂)
(a₁ * b₄ + a₂ * b₃ - a₃ * b₂ + a₄ * b₁) :=
rfl
protected instance ring {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
ring (quaternion_algebra R c₁ c₂) :=
ring.mk Add.add sorry 0 sorry sorry Neg.neg Sub.sub sorry sorry Mul.mul sorry 1 sorry sorry sorry
sorry
protected instance algebra {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
algebra R (quaternion_algebra R c₁ c₂) :=
algebra.mk (ring_hom.mk coe sorry sorry sorry sorry) sorry sorry
@[simp] theorem smul_re {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (r : R)
(a : quaternion_algebra R c₁ c₂) : re (r • a) = r • re a :=
rfl
@[simp] theorem smul_im_i {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (r : R)
(a : quaternion_algebra R c₁ c₂) : im_i (r • a) = r • im_i a :=
rfl
@[simp] theorem smul_im_j {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (r : R)
(a : quaternion_algebra R c₁ c₂) : im_j (r • a) = r • im_j a :=
rfl
@[simp] theorem smul_im_k {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (r : R)
(a : quaternion_algebra R c₁ c₂) : im_k (r • a) = r • im_k a :=
rfl
@[simp] theorem coe_add {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) (y : R) :
↑(x + y) = ↑x + ↑y :=
ring_hom.map_add (algebra_map R (quaternion_algebra R c₁ c₂)) x y
@[simp] theorem coe_sub {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) (y : R) :
↑(x - y) = ↑x - ↑y :=
ring_hom.map_sub (algebra_map R (quaternion_algebra R c₁ c₂)) x y
@[simp] theorem coe_neg {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) : ↑(-x) = -↑x :=
ring_hom.map_neg (algebra_map R (quaternion_algebra R c₁ c₂)) x
@[simp] theorem coe_mul {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) (y : R) :
↑(x * y) = ↑x * ↑y :=
ring_hom.map_mul (algebra_map R (quaternion_algebra R c₁ c₂)) x y
theorem coe_commutes {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (r : R)
(a : quaternion_algebra R c₁ c₂) : ↑r * a = a * ↑r :=
algebra.commutes r a
theorem coe_commute {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (r : R)
(a : quaternion_algebra R c₁ c₂) : commute (↑r) a :=
coe_commutes r a
theorem coe_mul_eq_smul {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (r : R)
(a : quaternion_algebra R c₁ c₂) : ↑r * a = r • a :=
Eq.symm (algebra.smul_def r a)
theorem mul_coe_eq_smul {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (r : R)
(a : quaternion_algebra R c₁ c₂) : a * ↑r = r • a :=
eq.mpr (id (Eq._oldrec (Eq.refl (a * ↑r = r • a)) (Eq.symm (coe_commutes r a))))
(eq.mpr (id (Eq._oldrec (Eq.refl (↑r * a = r • a)) (coe_mul_eq_smul r a))) (Eq.refl (r • a)))
@[simp] theorem coe_algebra_map {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
⇑(algebra_map R (quaternion_algebra R c₁ c₂)) = coe :=
rfl
theorem smul_coe {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) (y : R) :
x • ↑y = ↑(x * y) :=
eq.mpr (id (Eq._oldrec (Eq.refl (x • ↑y = ↑(x * y))) (coe_mul x y)))
(eq.mpr (id (Eq._oldrec (Eq.refl (x • ↑y = ↑x * ↑y)) (coe_mul_eq_smul x ↑y)))
(Eq.refl (x • ↑y)))
/-- Quaternion conjugate. -/
def conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
linear_equiv R (quaternion_algebra R c₁ c₂) (quaternion_algebra R c₁ c₂) :=
linear_equiv.of_involutive
(linear_map.mk (fun (a : quaternion_algebra R c₁ c₂) => mk (re a) (-im_i a) (-im_j a) (-im_k a))
sorry sorry)
sorry
@[simp] theorem re_conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : re (coe_fn conj a) = re a :=
rfl
@[simp] theorem im_i_conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : im_i (coe_fn conj a) = -im_i a :=
rfl
@[simp] theorem im_j_conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : im_j (coe_fn conj a) = -im_j a :=
rfl
@[simp] theorem im_k_conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : im_k (coe_fn conj a) = -im_k a :=
rfl
@[simp] theorem conj_conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : coe_fn conj (coe_fn conj a) = a :=
ext (coe_fn conj (coe_fn conj a)) a rfl (neg_neg (im_i a)) (neg_neg (im_j a)) (neg_neg (im_k a))
theorem conj_add {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (a : quaternion_algebra R c₁ c₂)
(b : quaternion_algebra R c₁ c₂) : coe_fn conj (a + b) = coe_fn conj a + coe_fn conj b :=
linear_equiv.map_add conj a b
@[simp] theorem conj_mul {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) (b : quaternion_algebra R c₁ c₂) :
coe_fn conj (a * b) = coe_fn conj b * coe_fn conj a :=
sorry
theorem conj_conj_mul {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) (b : quaternion_algebra R c₁ c₂) :
coe_fn conj (coe_fn conj a * b) = coe_fn conj b * a :=
eq.mpr
(id
(Eq._oldrec (Eq.refl (coe_fn conj (coe_fn conj a * b) = coe_fn conj b * a))
(conj_mul (coe_fn conj a) b)))
(eq.mpr
(id
(Eq._oldrec (Eq.refl (coe_fn conj b * coe_fn conj (coe_fn conj a) = coe_fn conj b * a))
(conj_conj a)))
(Eq.refl (coe_fn conj b * a)))
theorem conj_mul_conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) (b : quaternion_algebra R c₁ c₂) :
coe_fn conj (a * coe_fn conj b) = b * coe_fn conj a :=
eq.mpr
(id
(Eq._oldrec (Eq.refl (coe_fn conj (a * coe_fn conj b) = b * coe_fn conj a))
(conj_mul a (coe_fn conj b))))
(eq.mpr
(id
(Eq._oldrec (Eq.refl (coe_fn conj (coe_fn conj b) * coe_fn conj a = b * coe_fn conj a))
(conj_conj b)))
(Eq.refl (b * coe_fn conj a)))
theorem self_add_conj' {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : a + coe_fn conj a = ↑(bit0 1 * re a) :=
sorry
theorem self_add_conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : a + coe_fn conj a = bit0 1 * ↑(re a) :=
sorry
theorem conj_add_self' {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : coe_fn conj a + a = ↑(bit0 1 * re a) :=
eq.mpr
(id (Eq._oldrec (Eq.refl (coe_fn conj a + a = ↑(bit0 1 * re a))) (add_comm (coe_fn conj a) a)))
(eq.mpr (id (Eq._oldrec (Eq.refl (a + coe_fn conj a = ↑(bit0 1 * re a))) (self_add_conj' a)))
(Eq.refl ↑(bit0 1 * re a)))
theorem conj_add_self {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : coe_fn conj a + a = bit0 1 * ↑(re a) :=
eq.mpr
(id (Eq._oldrec (Eq.refl (coe_fn conj a + a = bit0 1 * ↑(re a))) (add_comm (coe_fn conj a) a)))
(eq.mpr (id (Eq._oldrec (Eq.refl (a + coe_fn conj a = bit0 1 * ↑(re a))) (self_add_conj a)))
(Eq.refl (bit0 1 * ↑(re a))))
theorem conj_eq_two_re_sub {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : coe_fn conj a = ↑(bit0 1 * re a) - a :=
iff.mpr eq_sub_iff_add_eq (conj_add_self' a)
theorem commute_conj_self {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : commute (coe_fn conj a) a :=
eq.mpr (id (Eq._oldrec (Eq.refl (commute (coe_fn conj a) a)) (conj_eq_two_re_sub a)))
(commute.sub_left (coe_commute (bit0 1 * re a) a) (commute.refl a))
theorem commute_self_conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : commute a (coe_fn conj a) :=
commute.symm (commute_conj_self a)
theorem commute_conj_conj {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
{a : quaternion_algebra R c₁ c₂} {b : quaternion_algebra R c₁ c₂} (h : commute a b) :
commute (coe_fn conj a) (coe_fn conj b) :=
sorry
@[simp] theorem conj_coe {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (x : R) :
coe_fn conj ↑x = ↑x :=
sorry
theorem conj_smul {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (r : R)
(a : quaternion_algebra R c₁ c₂) : coe_fn conj (r • a) = r • coe_fn conj a :=
linear_equiv.map_smul conj r a
@[simp] theorem conj_one {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} : coe_fn conj 1 = 1 :=
conj_coe 1
theorem eq_re_of_eq_coe {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
{a : quaternion_algebra R c₁ c₂} {x : R} (h : a = ↑x) : a = ↑(re a) :=
eq.mpr (id (Eq._oldrec (Eq.refl (a = ↑(re a))) h))
(eq.mpr (id (Eq._oldrec (Eq.refl (↑x = ↑(re ↑x))) (coe_re x))) (Eq.refl ↑x))
theorem eq_re_iff_mem_range_coe {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
{a : quaternion_algebra R c₁ c₂} : a = ↑(re a) ↔ a ∈ set.range coe :=
sorry
@[simp] theorem conj_fixed {R : Type u_1} [comm_ring R] [no_zero_divisors R] [char_zero R] {c₁ : R}
{c₂ : R} {a : quaternion_algebra R c₁ c₂} : coe_fn conj a = a ↔ a = ↑(re a) :=
sorry
-- Can't use `rw ← conj_fixed` in the proof without additional assumptions
theorem conj_mul_eq_coe {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : coe_fn conj a * a = ↑(re (coe_fn conj a * a)) :=
sorry
theorem mul_conj_eq_coe {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : a * coe_fn conj a = ↑(re (a * coe_fn conj a)) :=
eq.mpr
(id
(Eq._oldrec (Eq.refl (a * coe_fn conj a = ↑(re (a * coe_fn conj a))))
(commute.eq (commute_self_conj a))))
(conj_mul_eq_coe a)
theorem conj_zero {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} : coe_fn conj 0 = 0 :=
linear_equiv.map_zero conj
theorem conj_neg {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (a : quaternion_algebra R c₁ c₂) :
coe_fn conj (-a) = -coe_fn conj a :=
linear_equiv.map_neg conj a
theorem conj_sub {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} (a : quaternion_algebra R c₁ c₂)
(b : quaternion_algebra R c₁ c₂) : coe_fn conj (a - b) = coe_fn conj a - coe_fn conj b :=
linear_equiv.map_sub conj a b
protected instance star_ring {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
star_ring (quaternion_algebra R c₁ c₂) :=
star_ring.mk conj_add
@[simp] theorem star_def {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R}
(a : quaternion_algebra R c₁ c₂) : star a = coe_fn conj a :=
rfl
/-- Quaternion conjugate as an `alg_equiv` to the opposite ring. -/
def conj_alg_equiv {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
alg_equiv R (quaternion_algebra R c₁ c₂) (quaternion_algebra R c₁ c₂ᵒᵖ) :=
alg_equiv.mk (opposite.op ∘ ⇑conj) (⇑conj ∘ opposite.unop) sorry sorry sorry sorry sorry
@[simp] theorem coe_conj_alg_equiv {R : Type u_1} [comm_ring R] {c₁ : R} {c₂ : R} :
⇑conj_alg_equiv = opposite.op ∘ ⇑conj :=
rfl
end quaternion_algebra
/-- Space of quaternions over a type. Implemented as a structure with four fields:
`re`, `im_i`, `im_j`, and `im_k`. -/
def quaternion (R : Type u_1) [HasOne R] [Neg R] := quaternion_algebra R (-1) (-1)
namespace quaternion
protected instance has_coe_t {R : Type u_1} [comm_ring R] : has_coe_t R (quaternion R) :=
quaternion_algebra.has_coe_t
protected instance ring {R : Type u_1} [comm_ring R] : ring (quaternion R) :=
quaternion_algebra.ring
protected instance inhabited {R : Type u_1} [comm_ring R] : Inhabited (quaternion R) :=
quaternion_algebra.inhabited
protected instance algebra {R : Type u_1} [comm_ring R] : algebra R (quaternion R) :=
quaternion_algebra.algebra
protected instance star_ring {R : Type u_1} [comm_ring R] : star_ring (quaternion R) :=
quaternion_algebra.star_ring
theorem ext {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
re a = re b → im_i a = im_i b → im_j a = im_j b → im_k a = im_k b → a = b :=
quaternion_algebra.ext a b
theorem ext_iff {R : Type u_1} [comm_ring R] {a : quaternion R} {b : quaternion R} :
a = b ↔ re a = re b ∧ im_i a = im_i b ∧ im_j a = im_j b ∧ im_k a = im_k b :=
quaternion_algebra.ext_iff a b
@[simp] theorem coe_re {R : Type u_1} [comm_ring R] (x : R) : re ↑x = x := rfl
@[simp] theorem coe_im_i {R : Type u_1} [comm_ring R] (x : R) : im_i ↑x = 0 := rfl
@[simp] theorem coe_im_j {R : Type u_1} [comm_ring R] (x : R) : im_j ↑x = 0 := rfl
@[simp] theorem coe_im_k {R : Type u_1} [comm_ring R] (x : R) : im_k ↑x = 0 := rfl
@[simp] theorem zero_re {R : Type u_1} [comm_ring R] : re 0 = 0 := rfl
@[simp] theorem zero_im_i {R : Type u_1} [comm_ring R] : im_i 0 = 0 := rfl
@[simp] theorem zero_im_j {R : Type u_1} [comm_ring R] : im_j 0 = 0 := rfl
@[simp] theorem zero_im_k {R : Type u_1} [comm_ring R] : im_k 0 = 0 := rfl
@[simp] theorem coe_zero {R : Type u_1} [comm_ring R] : ↑0 = 0 := rfl
@[simp] theorem one_re {R : Type u_1} [comm_ring R] : re 1 = 1 := rfl
@[simp] theorem one_im_i {R : Type u_1} [comm_ring R] : im_i 1 = 0 := rfl
@[simp] theorem one_im_j {R : Type u_1} [comm_ring R] : im_j 1 = 0 := rfl
@[simp] theorem one_im_k {R : Type u_1} [comm_ring R] : im_k 1 = 0 := rfl
@[simp] theorem coe_one {R : Type u_1} [comm_ring R] : ↑1 = 1 := rfl
@[simp] theorem add_re {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
re (a + b) = re a + re b :=
rfl
@[simp] theorem add_im_i {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
im_i (a + b) = im_i a + im_i b :=
rfl
@[simp] theorem add_im_j {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
im_j (a + b) = im_j a + im_j b :=
rfl
@[simp] theorem add_im_k {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
im_k (a + b) = im_k a + im_k b :=
rfl
@[simp] theorem coe_add {R : Type u_1} [comm_ring R] (x : R) (y : R) : ↑(x + y) = ↑x + ↑y :=
quaternion_algebra.coe_add x y
@[simp] theorem neg_re {R : Type u_1} [comm_ring R] (a : quaternion R) : re (-a) = -re a := rfl
@[simp] theorem neg_im_i {R : Type u_1} [comm_ring R] (a : quaternion R) : im_i (-a) = -im_i a :=
rfl
@[simp] theorem neg_im_j {R : Type u_1} [comm_ring R] (a : quaternion R) : im_j (-a) = -im_j a :=
rfl
@[simp] theorem neg_im_k {R : Type u_1} [comm_ring R] (a : quaternion R) : im_k (-a) = -im_k a :=
rfl
@[simp] theorem coe_neg {R : Type u_1} [comm_ring R] (x : R) : ↑(-x) = -↑x :=
quaternion_algebra.coe_neg x
@[simp] theorem sub_re {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
re (a - b) = re a - re b :=
rfl
@[simp] theorem sub_im_i {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
im_i (a - b) = im_i a - im_i b :=
rfl
@[simp] theorem sub_im_j {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
im_j (a - b) = im_j a - im_j b :=
rfl
@[simp] theorem sub_im_k {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
im_k (a - b) = im_k a - im_k b :=
rfl
@[simp] theorem coe_sub {R : Type u_1} [comm_ring R] (x : R) (y : R) : ↑(x - y) = ↑x - ↑y :=
quaternion_algebra.coe_sub x y
@[simp] theorem mul_re {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
re (a * b) = re a * re b - im_i a * im_i b - im_j a * im_j b - im_k a * im_k b :=
sorry
@[simp] theorem mul_im_i {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
im_i (a * b) = re a * im_i b + im_i a * re b + im_j a * im_k b - im_k a * im_j b :=
sorry
@[simp] theorem mul_im_j {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
im_j (a * b) = re a * im_j b - im_i a * im_k b + im_j a * re b + im_k a * im_i b :=
sorry
@[simp] theorem mul_im_k {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
im_k (a * b) = re a * im_k b + im_i a * im_j b - im_j a * im_i b + im_k a * re b :=
sorry
@[simp] theorem coe_mul {R : Type u_1} [comm_ring R] (x : R) (y : R) : ↑(x * y) = ↑x * ↑y :=
quaternion_algebra.coe_mul x y
theorem coe_injective {R : Type u_1} [comm_ring R] : function.injective coe :=
quaternion_algebra.coe_injective
@[simp] theorem coe_inj {R : Type u_1} [comm_ring R] {x : R} {y : R} : ↑x = ↑y ↔ x = y :=
function.injective.eq_iff coe_injective
@[simp] theorem smul_re {R : Type u_1} [comm_ring R] (r : R) (a : quaternion R) :
re (r • a) = r • re a :=
rfl
@[simp] theorem smul_im_i {R : Type u_1} [comm_ring R] (r : R) (a : quaternion R) :
im_i (r • a) = r • im_i a :=
rfl
@[simp] theorem smul_im_j {R : Type u_1} [comm_ring R] (r : R) (a : quaternion R) :
im_j (r • a) = r • im_j a :=
rfl
@[simp] theorem smul_im_k {R : Type u_1} [comm_ring R] (r : R) (a : quaternion R) :
im_k (r • a) = r • im_k a :=
rfl
theorem coe_commutes {R : Type u_1} [comm_ring R] (r : R) (a : quaternion R) : ↑r * a = a * ↑r :=
quaternion_algebra.coe_commutes r a
theorem coe_commute {R : Type u_1} [comm_ring R] (r : R) (a : quaternion R) : commute (↑r) a :=
quaternion_algebra.coe_commute r a
theorem coe_mul_eq_smul {R : Type u_1} [comm_ring R] (r : R) (a : quaternion R) : ↑r * a = r • a :=
quaternion_algebra.coe_mul_eq_smul r a
theorem mul_coe_eq_smul {R : Type u_1} [comm_ring R] (r : R) (a : quaternion R) : a * ↑r = r • a :=
quaternion_algebra.mul_coe_eq_smul r a
@[simp] theorem algebra_map_def {R : Type u_1} [comm_ring R] :
⇑(algebra_map R (quaternion R)) = coe :=
rfl
theorem smul_coe {R : Type u_1} [comm_ring R] (x : R) (y : R) : x • ↑y = ↑(x * y) :=
quaternion_algebra.smul_coe x y
/-- Quaternion conjugate. -/
def conj {R : Type u_1} [comm_ring R] : linear_equiv R (quaternion R) (quaternion R) :=
quaternion_algebra.conj
@[simp] theorem conj_re {R : Type u_1} [comm_ring R] (a : quaternion R) :
re (coe_fn conj a) = re a :=
rfl
@[simp] theorem conj_im_i {R : Type u_1} [comm_ring R] (a : quaternion R) :
im_i (coe_fn conj a) = -im_i a :=
rfl
@[simp] theorem conj_im_j {R : Type u_1} [comm_ring R] (a : quaternion R) :
im_j (coe_fn conj a) = -im_j a :=
rfl
@[simp] theorem conj_im_k {R : Type u_1} [comm_ring R] (a : quaternion R) :
im_k (coe_fn conj a) = -im_k a :=
rfl
@[simp] theorem conj_conj {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn conj (coe_fn conj a) = a :=
quaternion_algebra.conj_conj a
@[simp] theorem conj_add {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
coe_fn conj (a + b) = coe_fn conj a + coe_fn conj b :=
quaternion_algebra.conj_add a b
@[simp] theorem conj_mul {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
coe_fn conj (a * b) = coe_fn conj b * coe_fn conj a :=
quaternion_algebra.conj_mul a b
theorem conj_conj_mul {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
coe_fn conj (coe_fn conj a * b) = coe_fn conj b * a :=
quaternion_algebra.conj_conj_mul a b
theorem conj_mul_conj {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
coe_fn conj (a * coe_fn conj b) = b * coe_fn conj a :=
quaternion_algebra.conj_mul_conj a b
theorem self_add_conj' {R : Type u_1} [comm_ring R] (a : quaternion R) :
a + coe_fn conj a = ↑(bit0 1 * re a) :=
quaternion_algebra.self_add_conj' a
theorem self_add_conj {R : Type u_1} [comm_ring R] (a : quaternion R) :
a + coe_fn conj a = bit0 1 * ↑(re a) :=
quaternion_algebra.self_add_conj a
theorem conj_add_self' {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn conj a + a = ↑(bit0 1 * re a) :=
quaternion_algebra.conj_add_self' a
theorem conj_add_self {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn conj a + a = bit0 1 * ↑(re a) :=
quaternion_algebra.conj_add_self a
theorem conj_eq_two_re_sub {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn conj a = ↑(bit0 1 * re a) - a :=
quaternion_algebra.conj_eq_two_re_sub a
theorem commute_conj_self {R : Type u_1} [comm_ring R] (a : quaternion R) :
commute (coe_fn conj a) a :=
quaternion_algebra.commute_conj_self a
theorem commute_self_conj {R : Type u_1} [comm_ring R] (a : quaternion R) :
commute a (coe_fn conj a) :=
quaternion_algebra.commute_self_conj a
theorem commute_conj_conj {R : Type u_1} [comm_ring R] {a : quaternion R} {b : quaternion R}
(h : commute a b) : commute (coe_fn conj a) (coe_fn conj b) :=
quaternion_algebra.commute_conj_conj h
theorem Mathlib.commute.quaternion_conj {R : Type u_1} [comm_ring R] {a : quaternion R}
{b : quaternion R} (h : commute a b) : commute (coe_fn conj a) (coe_fn conj b) :=
commute_conj_conj
@[simp] theorem conj_coe {R : Type u_1} [comm_ring R] (x : R) : coe_fn conj ↑x = ↑x :=
quaternion_algebra.conj_coe x
@[simp] theorem conj_smul {R : Type u_1} [comm_ring R] (r : R) (a : quaternion R) :
coe_fn conj (r • a) = r • coe_fn conj a :=
quaternion_algebra.conj_smul r a
@[simp] theorem conj_one {R : Type u_1} [comm_ring R] : coe_fn conj 1 = 1 := conj_coe 1
theorem eq_re_of_eq_coe {R : Type u_1} [comm_ring R] {a : quaternion R} {x : R} (h : a = ↑x) :
a = ↑(re a) :=
quaternion_algebra.eq_re_of_eq_coe h
theorem eq_re_iff_mem_range_coe {R : Type u_1} [comm_ring R] {a : quaternion R} :
a = ↑(re a) ↔ a ∈ set.range coe :=
quaternion_algebra.eq_re_iff_mem_range_coe
@[simp] theorem conj_fixed {R : Type u_1} [comm_ring R] [no_zero_divisors R] [char_zero R]
{a : quaternion R} : coe_fn conj a = a ↔ a = ↑(re a) :=
quaternion_algebra.conj_fixed
theorem conj_mul_eq_coe {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn conj a * a = ↑(re (coe_fn conj a * a)) :=
quaternion_algebra.conj_mul_eq_coe a
theorem mul_conj_eq_coe {R : Type u_1} [comm_ring R] (a : quaternion R) :
a * coe_fn conj a = ↑(re (a * coe_fn conj a)) :=
quaternion_algebra.mul_conj_eq_coe a
@[simp] theorem conj_zero {R : Type u_1} [comm_ring R] : coe_fn conj 0 = 0 :=
quaternion_algebra.conj_zero
@[simp] theorem conj_neg {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn conj (-a) = -coe_fn conj a :=
quaternion_algebra.conj_neg a
@[simp] theorem conj_sub {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
coe_fn conj (a - b) = coe_fn conj a - coe_fn conj b :=
quaternion_algebra.conj_sub a b
/-- Quaternion conjugate as an `alg_equiv` to the opposite ring. -/
def conj_alg_equiv {R : Type u_1} [comm_ring R] : alg_equiv R (quaternion R) (quaternion Rᵒᵖ) :=
quaternion_algebra.conj_alg_equiv
@[simp] theorem coe_conj_alg_equiv {R : Type u_1} [comm_ring R] :
⇑conj_alg_equiv = opposite.op ∘ ⇑conj :=
rfl
/-- Square of the norm. -/
def norm_sq {R : Type u_1} [comm_ring R] : monoid_with_zero_hom (quaternion R) R :=
monoid_with_zero_hom.mk (fun (a : quaternion R) => re (a * coe_fn conj a)) sorry sorry sorry
theorem norm_sq_def {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn norm_sq a = re (a * coe_fn conj a) :=
rfl
theorem norm_sq_def' {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn norm_sq a = re a ^ bit0 1 + im_i a ^ bit0 1 + im_j a ^ bit0 1 + im_k a ^ bit0 1 :=
sorry
theorem norm_sq_coe {R : Type u_1} [comm_ring R] (x : R) : coe_fn norm_sq ↑x = x ^ bit0 1 := sorry
@[simp] theorem norm_sq_neg {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn norm_sq (-a) = coe_fn norm_sq a :=
sorry
theorem self_mul_conj {R : Type u_1} [comm_ring R] (a : quaternion R) :
a * coe_fn conj a = ↑(coe_fn norm_sq a) :=
eq.mpr (id (Eq._oldrec (Eq.refl (a * coe_fn conj a = ↑(coe_fn norm_sq a))) (mul_conj_eq_coe a)))
(eq.mpr
(id (Eq._oldrec (Eq.refl (↑(re (a * coe_fn conj a)) = ↑(coe_fn norm_sq a))) (norm_sq_def a)))
(Eq.refl ↑(re (a * coe_fn conj a))))
theorem conj_mul_self {R : Type u_1} [comm_ring R] (a : quaternion R) :
coe_fn conj a * a = ↑(coe_fn norm_sq a) :=
sorry
theorem coe_norm_sq_add {R : Type u_1} [comm_ring R] (a : quaternion R) (b : quaternion R) :
↑(coe_fn norm_sq (a + b)) =
↑(coe_fn norm_sq a) + a * coe_fn conj b + b * coe_fn conj a + ↑(coe_fn norm_sq b) :=
sorry
end quaternion
namespace quaternion
@[simp] theorem norm_sq_eq_zero {R : Type u_1} [linear_ordered_comm_ring R] {a : quaternion R} :
coe_fn norm_sq a = 0 ↔ a = 0 :=
sorry
theorem norm_sq_ne_zero {R : Type u_1} [linear_ordered_comm_ring R] {a : quaternion R} :
coe_fn norm_sq a ≠ 0 ↔ a ≠ 0 :=
not_congr norm_sq_eq_zero
@[simp] theorem norm_sq_nonneg {R : Type u_1} [linear_ordered_comm_ring R] {a : quaternion R} :
0 ≤ coe_fn norm_sq a :=
eq.mpr (id (Eq._oldrec (Eq.refl (0 ≤ coe_fn norm_sq a)) (norm_sq_def' a)))
(add_nonneg
(add_nonneg (add_nonneg (pow_two_nonneg (re a)) (pow_two_nonneg (im_i a)))
(pow_two_nonneg (im_j a)))
(pow_two_nonneg (im_k a)))
@[simp] theorem norm_sq_le_zero {R : Type u_1} [linear_ordered_comm_ring R] {a : quaternion R} :
coe_fn norm_sq a ≤ 0 ↔ a = 0 :=
sorry
protected instance domain {R : Type u_1} [linear_ordered_comm_ring R] : domain (quaternion R) :=
domain.mk ring.add sorry ring.zero sorry sorry ring.neg ring.sub sorry sorry ring.mul sorry
ring.one sorry sorry sorry sorry sorry sorry
theorem has_inv_inv {R : Type u_1} [linear_ordered_field R] (a : quaternion R) :
a⁻¹ = coe_fn norm_sq a⁻¹ • coe_fn conj a :=
Eq.refl (a⁻¹)
protected instance division_ring {R : Type u_1} [linear_ordered_field R] :
division_ring (quaternion R) :=
division_ring.mk domain.add sorry domain.zero sorry sorry domain.neg domain.sub sorry sorry
domain.mul sorry domain.one sorry sorry sorry sorry has_inv.inv
(div_inv_monoid.div._default domain.mul sorry domain.one sorry sorry has_inv.inv) sorry sorry
sorry
@[simp] theorem norm_sq_inv {R : Type u_1} [linear_ordered_field R] (a : quaternion R) :
coe_fn norm_sq (a⁻¹) = (coe_fn norm_sq a⁻¹) :=
monoid_with_zero_hom.map_inv' norm_sq a
@[simp] theorem norm_sq_div {R : Type u_1} [linear_ordered_field R] (a : quaternion R)
(b : quaternion R) : coe_fn norm_sq (a / b) = coe_fn norm_sq a / coe_fn norm_sq b :=
monoid_with_zero_hom.map_div norm_sq a b
end Mathlib |
/-
Copyright (c) 2021 Yakov Pechersky. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Yakov Pechersky
-/
import algebra.group_power.order
import algebra.order.monoid.with_top
import algebra.smul_with_zero
import algebra.order.monoid.min_max
/-!
# Tropical algebraic structures
> THIS FILE IS SYNCHRONIZED WITH MATHLIB4.
> Any changes to this file require a corresponding PR to mathlib4.
This file defines algebraic structures of the (min-)tropical numbers, up to the tropical semiring.
Some basic lemmas about conversion from the base type `R` to `tropical R` are provided, as
well as the expected implementations of tropical addition and tropical multiplication.
## Main declarations
* `tropical R`: The type synonym of the tropical interpretation of `R`.
If `[linear_order R]`, then addition on `R` is via `min`.
* `semiring (tropical R)`: A `linear_ordered_add_comm_monoid_with_top R`
induces a `semiring (tropical R)`. If one solely has `[linear_ordered_add_comm_monoid R]`,
then the "tropicalization of `R`" would be `tropical (with_top R)`.
## Implementation notes
The tropical structure relies on `has_top` and `min`. For the max-tropical numbers, use
`order_dual R`.
Inspiration was drawn from the implementation of `additive`/`multiplicative`/`opposite`,
where a type synonym is created with some barebones API, and quickly made irreducible.
Algebraic structures are provided with as few typeclass assumptions as possible, even though
most references rely on `semiring (tropical R)` for building up the whole theory.
## References followed
* https://arxiv.org/pdf/math/0408099.pdf
* https://www.mathenjeans.fr/sites/default/files/sujets/tropical_geometry_-_casagrande.pdf
-/
universes u v
variables (R : Type u)
/-- The tropicalization of a type `R`. -/
def tropical : Type u := R
variables {R}
namespace tropical
/-- Reinterpret `x : R` as an element of `tropical R`.
See `tropical.trop_equiv` for the equivalence.
-/
@[pp_nodot]
def trop : R → tropical R := id
/-- Reinterpret `x : tropical R` as an element of `R`.
See `tropical.trop_equiv` for the equivalence. -/
@[pp_nodot]
def untrop : tropical R → R := id
lemma trop_injective : function.injective (trop : R → tropical R) := λ _ _, id
lemma untrop_injective : function.injective (untrop : tropical R → R) := λ _ _, id
@[simp] lemma trop_inj_iff (x y : R) : trop x = trop y ↔ x = y := iff.rfl
@[simp] lemma untrop_inj_iff (x y : tropical R) : untrop x = untrop y ↔ x = y := iff.rfl
@[simp] lemma trop_untrop (x : tropical R) : trop (untrop x) = x := rfl
@[simp] lemma untrop_trop (x : R) : untrop (trop x) = x := rfl
lemma left_inverse_trop : function.left_inverse (trop : R → tropical R) untrop := trop_untrop
lemma right_inverse_trop : function.right_inverse (trop : R → tropical R) untrop := trop_untrop
attribute [irreducible] tropical
/-- Reinterpret `x : R` as an element of `tropical R`.
See `tropical.trop_order_iso` for the order-preserving equivalence. -/
def trop_equiv : R ≃ tropical R :=
{ to_fun := trop,
inv_fun := untrop,
left_inv := untrop_trop,
right_inv := trop_untrop }
@[simp]
lemma trop_equiv_coe_fn : (trop_equiv : R → tropical R) = trop := rfl
@[simp]
lemma trop_equiv_symm_coe_fn : (trop_equiv.symm : tropical R → R) = untrop := rfl
lemma trop_eq_iff_eq_untrop {x : R} {y} : trop x = y ↔ x = untrop y :=
trop_equiv.apply_eq_iff_eq_symm_apply
lemma untrop_eq_iff_eq_trop {x} {y : R} : untrop x = y ↔ x = trop y :=
trop_equiv.symm.apply_eq_iff_eq_symm_apply
lemma injective_trop : function.injective (trop : R → tropical R) := trop_equiv.injective
instance [inhabited R] : inhabited (tropical R) := ⟨trop default⟩
/-- Recursing on a `x' : tropical R` is the same as recursing on an `x : R` reinterpreted
as a term of `tropical R` via `trop x`. -/
@[simp]
def trop_rec {F : Π (X : tropical R), Sort v} (h : Π X, F (trop X)) : Π X, F X :=
λ X, h (untrop X)
instance [decidable_eq R] : decidable_eq (tropical R) :=
λ x y, decidable_of_iff _ injective_untrop.eq_iff
section order
instance [has_le R] : has_le (tropical R) :=
{ le := λ x y, untrop x ≤ untrop y }
@[simp] lemma untrop_le_iff [has_le R] {x y : tropical R} :
untrop x ≤ untrop y ↔ x ≤ y := iff.rfl
instance decidable_le [has_le R] [decidable_rel ((≤) : R → R → Prop)] :
decidable_rel ((≤) : tropical R → tropical R → Prop) :=
λ x y, ‹decidable_rel (≤)› (untrop x) (untrop y)
instance [has_lt R] : has_lt (tropical R) :=
{ lt := λ x y, untrop x < untrop y }
@[simp] lemma untrop_lt_iff [has_lt R] {x y : tropical R} :
untrop x < untrop y ↔ x < y := iff.rfl
instance decidable_lt [has_lt R] [decidable_rel ((<) : R → R → Prop)] :
decidable_rel ((<) : tropical R → tropical R → Prop) :=
λ x y, ‹decidable_rel (<)› (untrop x) (untrop y)
instance [preorder R] : preorder (tropical R) :=
{ le_refl := λ _, le_rfl,
le_trans := λ _ _ _ h h', le_trans h h',
lt_iff_le_not_le := λ _ _, lt_iff_le_not_le,
..tropical.has_le,
..tropical.has_lt }
/-- Reinterpret `x : R` as an element of `tropical R`, preserving the order. -/
def trop_order_iso [preorder R] : R ≃o tropical R :=
{ map_rel_iff' := λ _ _, untrop_le_iff,
..trop_equiv }
@[simp]
lemma trop_order_iso_coe_fn [preorder R] : (trop_order_iso : R → tropical R) = trop := rfl
@[simp]
lemma trop_order_iso_symm_coe_fn [preorder R] : (trop_order_iso.symm : tropical R → R) = untrop :=
rfl
lemma trop_monotone [preorder R] : monotone (trop : R → tropical R) := λ _ _, id
lemma untrop_monotone [preorder R] : monotone (untrop : tropical R → R) := λ _ _, id
instance [partial_order R] : partial_order (tropical R) :=
{ le_antisymm := λ _ _ h h', untrop_injective (le_antisymm h h'),
..tropical.preorder }
instance [has_top R] : has_zero (tropical R) := ⟨trop ⊤⟩
instance [has_top R] : has_top (tropical R) := ⟨0⟩
@[simp] lemma untrop_zero [has_top R] : untrop (0 : tropical R) = ⊤ := rfl
@[simp] lemma trop_top [has_top R] : trop (⊤ : R) = 0 := rfl
@[simp] lemma trop_coe_ne_zero (x : R) : trop (x : with_top R) ≠ 0 .
@[simp] lemma zero_ne_trop_coe (x : R) : (0 : tropical (with_top R)) ≠ trop x .
@[simp] lemma le_zero [has_le R] [order_top R] (x : tropical R) : x ≤ 0 := le_top
instance [has_le R] [order_top R] : order_top (tropical R) :=
{ le_top := λ _, le_top,
..tropical.has_top }
variable [linear_order R]
/-- Tropical addition is the minimum of two underlying elements of `R`. -/
instance : has_add (tropical R) :=
⟨λ x y, trop (min (untrop x) (untrop y))⟩
instance : add_comm_semigroup (tropical R) :=
{ add := (+),
add_assoc := λ _ _ _, untrop_injective (min_assoc _ _ _),
add_comm := λ _ _, untrop_injective (min_comm _ _) }
@[simp] lemma untrop_add (x y : tropical R) : untrop (x + y) = min (untrop x) (untrop y) := rfl
@[simp] lemma trop_min (x y : R) : trop (min x y) = trop x + trop y := rfl
@[simp] lemma trop_inf (x y : R) : trop (x ⊓ y) = trop x + trop y := rfl
lemma trop_add_def (x y : tropical R) : x + y = trop (min (untrop x) (untrop y)) := rfl
instance : linear_order (tropical R) :=
{ le_total := λ a b, le_total (untrop a) (untrop b),
decidable_le := tropical.decidable_le,
decidable_lt := tropical.decidable_lt,
decidable_eq := tropical.decidable_eq,
max := λ a b, trop (max (untrop a) (untrop b)),
max_def := begin
ext x y,
rw [max_default, max_def, apply_ite trop, trop_untrop, trop_untrop,
if_congr untrop_le_iff rfl rfl],
end,
min := (+),
min_def := begin
ext x y,
rw [trop_add_def, min_default, min_def, apply_ite trop, trop_untrop, trop_untrop,
if_congr untrop_le_iff rfl rfl],
end,
..tropical.partial_order }
@[simp] lemma untrop_sup (x y : tropical R) : untrop (x ⊔ y) = untrop x ⊔ untrop y := rfl
@[simp] lemma untrop_max (x y : tropical R) : untrop (max x y) = max (untrop x) (untrop y) := rfl
@[simp] lemma min_eq_add : (min : tropical R → tropical R → tropical R) = (+) := rfl
@[simp] lemma inf_eq_add : ((⊓) : tropical R → tropical R → tropical R) = (+) := rfl
lemma trop_max_def (x y : tropical R) : max x y = trop (max (untrop x) (untrop y)) := rfl
lemma trop_sup_def (x y : tropical R) : x ⊔ y = trop (untrop x ⊔ untrop y) := rfl
@[simp] lemma add_eq_left ⦃x y : tropical R⦄ (h : x ≤ y) :
x + y = x := untrop_injective (by simpa using h)
@[simp] lemma add_eq_right ⦃x y : tropical R⦄ (h : y ≤ x) :
x + y = y := untrop_injective (by simpa using h)
lemma add_eq_left_iff {x y : tropical R} : x + y = x ↔ x ≤ y :=
by rw [trop_add_def, trop_eq_iff_eq_untrop, ←untrop_le_iff, min_eq_left_iff]
lemma add_eq_right_iff {x y : tropical R} : x + y = y ↔ y ≤ x :=
by rw [trop_add_def, trop_eq_iff_eq_untrop, ←untrop_le_iff, min_eq_right_iff]
@[simp] lemma add_self (x : tropical R) : x + x = x := untrop_injective (min_eq_right le_rfl)
@[simp] lemma bit0 (x : tropical R) : bit0 x = x := add_self x
lemma add_eq_iff {x y z : tropical R} :
x + y = z ↔ x = z ∧ x ≤ y ∨ y = z ∧ y ≤ x :=
by { rw [trop_add_def, trop_eq_iff_eq_untrop], simp [min_eq_iff] }
@[simp] lemma add_eq_zero_iff {a b : tropical (with_top R)} :
a + b = 0 ↔ a = 0 ∧ b = 0 :=
begin
rw add_eq_iff,
split,
{ rintro (⟨rfl, h⟩|⟨rfl, h⟩),
{ exact ⟨rfl, le_antisymm (le_zero _) h⟩ },
{ exact ⟨le_antisymm (le_zero _) h, rfl⟩ } },
{ rintro ⟨rfl, rfl⟩,
simp }
end
instance [order_top R] : add_comm_monoid (tropical R) :=
{ zero_add := λ _, untrop_injective (min_top_left _),
add_zero := λ _, untrop_injective (min_top_right _),
..tropical.has_zero,
..tropical.add_comm_semigroup }
end order
section monoid
/-- Tropical multiplication is the addition in the underlying `R`. -/
instance [has_add R] : has_mul (tropical R) :=
⟨λ x y, trop (untrop x + untrop y)⟩
@[simp] lemma trop_add [has_add R] (x y : R) :
trop (x + y) = trop x * trop y := rfl
@[simp] lemma untrop_mul [has_add R] (x y : tropical R) :
untrop (x * y) = untrop x + untrop y := rfl
lemma trop_mul_def [has_add R] (x y : tropical R) :
x * y = trop (untrop x + untrop y) := rfl
instance [has_zero R] : has_one (tropical R) := ⟨trop 0⟩
@[simp] lemma trop_zero [has_zero R] : trop (0 : R) = 1 := rfl
@[simp] lemma untrop_one [has_zero R] : untrop (1 : tropical R) = 0 := rfl
instance [linear_order R] [order_top R] [has_zero R] : add_monoid_with_one (tropical R) :=
{ nat_cast := λ n, if n = 0 then 0 else 1,
nat_cast_zero := rfl,
nat_cast_succ := λ n, (untrop_inj_iff _ _).1 (by cases n; simp [nat.cast]),
.. tropical.has_one, .. tropical.add_comm_monoid }
instance [has_zero R] : nontrivial (tropical (with_top R)) :=
⟨⟨0, 1, trop_injective.ne with_top.top_ne_coe⟩⟩
instance [has_neg R] : has_inv (tropical R) := ⟨λ x, trop (- untrop x)⟩
@[simp] lemma untrop_inv [has_neg R] (x : tropical R) : untrop x⁻¹ = - untrop x := rfl
instance [has_sub R] : has_div (tropical R) := ⟨λ x y, trop (untrop x - untrop y)⟩
@[simp] lemma untrop_div [has_sub R] (x y : tropical R) :
untrop (x / y) = untrop x - untrop y := rfl
instance [add_semigroup R] : semigroup (tropical R) :=
{ mul := (*),
mul_assoc := λ _ _ _, untrop_injective (add_assoc _ _ _) }
instance [add_comm_semigroup R] : comm_semigroup (tropical R) :=
{ mul_comm := λ _ _, untrop_injective (add_comm _ _),
..tropical.semigroup }
instance {α : Type*} [has_smul α R] : has_pow (tropical R) α :=
{ pow := λ x n, trop $ n • untrop x }
@[simp] lemma untrop_pow {α : Type*} [has_smul α R] (x : tropical R) (n : α) :
untrop (x ^ n) = n • untrop x := rfl
@[simp] lemma trop_smul {α : Type*} [has_smul α R] (x : R) (n : α) :
trop (n • x) = trop x ^ n := rfl
instance [add_zero_class R] : mul_one_class (tropical R) :=
{ one := 1,
mul := (*),
one_mul := λ _, untrop_injective $ zero_add _,
mul_one := λ _, untrop_injective $ add_zero _ }
instance [add_monoid R] : monoid (tropical R) :=
{ npow := λ n x, x ^ n,
npow_zero' := λ _, untrop_injective $ zero_smul _ _,
npow_succ' := λ _ _, untrop_injective $ succ_nsmul _ _,
..tropical.mul_one_class,
..tropical.semigroup }
@[simp] lemma trop_nsmul [add_monoid R] (x : R) (n : ℕ) :
trop (n • x) = trop x ^ n := rfl
instance [add_comm_monoid R] : comm_monoid (tropical R) :=
{ ..tropical.monoid, ..tropical.comm_semigroup }
instance [add_group R] : group (tropical R) :=
{ inv := has_inv.inv,
mul_left_inv := λ _, untrop_injective $ add_left_neg _,
zpow := λ n x, trop $ n • untrop x,
zpow_zero' := λ _, untrop_injective $ zero_zsmul _,
zpow_succ' := λ _ _, untrop_injective $ add_group.zsmul_succ' _ _,
zpow_neg' := λ _ _, untrop_injective $ add_group.zsmul_neg' _ _,
..tropical.monoid }
instance [add_comm_group R] : comm_group (tropical R) :=
{ mul_comm := λ _ _, untrop_injective (add_comm _ _),
..tropical.group }
@[simp] lemma untrop_zpow [add_group R] (x : tropical R) (n : ℤ) :
untrop (x ^ n) = n • untrop x := rfl
@[simp] lemma trop_zsmul [add_group R] (x : R) (n : ℤ) :
trop (n • x) = trop x ^ n := rfl
end monoid
section distrib
instance covariant_mul [has_le R] [has_add R] [covariant_class R R (+) (≤)] :
covariant_class (tropical R) (tropical R) (*) (≤) :=
⟨λ x y z h, add_le_add_left h _⟩
instance covariant_swap_mul [has_le R] [has_add R] [covariant_class R R (function.swap (+)) (≤)] :
covariant_class (tropical R) (tropical R) (function.swap (*)) (≤) :=
⟨λ x y z h, add_le_add_right h _⟩
instance covariant_add [linear_order R] : covariant_class (tropical R) (tropical R) (+) (≤) :=
⟨λ x y z h, begin
cases le_total x y with hx hy,
{ rw [add_eq_left hx, add_eq_left (hx.trans h)] },
{ rw [add_eq_right hy],
cases le_total x z with hx hx,
{ rwa [add_eq_left hx] },
{ rwa [add_eq_right hx] } }
end⟩
instance covariant_mul_lt [has_lt R] [has_add R] [covariant_class R R (+) (<)] :
covariant_class (tropical R) (tropical R) (*) (<) :=
⟨λ x y z h, add_lt_add_left h _⟩
instance covariant_swap_mul_lt [preorder R] [has_add R]
[covariant_class R R (function.swap (+)) (<)] :
covariant_class (tropical R) (tropical R) (function.swap (*)) (<) :=
⟨λ x y z h, add_lt_add_right h _⟩
instance [linear_order R] [has_add R]
[covariant_class R R (+) (≤)] [covariant_class R R (function.swap (+)) (≤)] :
distrib (tropical R) :=
{ mul := (*),
add := (+),
left_distrib := λ _ _ _, untrop_injective (min_add_add_left _ _ _).symm,
right_distrib := λ _ _ _, untrop_injective (min_add_add_right _ _ _).symm }
@[simp] lemma add_pow [linear_order R] [add_monoid R]
[covariant_class R R (+) (≤)] [covariant_class R R (function.swap (+)) (≤)]
(x y : tropical R) (n : ℕ) :
(x + y) ^ n = x ^ n + y ^ n :=
begin
cases le_total x y with h h,
{ rw [add_eq_left h, add_eq_left (pow_le_pow_of_le_left' h _)] },
{ rw [add_eq_right h, add_eq_right (pow_le_pow_of_le_left' h _)] }
end
end distrib
section semiring
variable [linear_ordered_add_comm_monoid_with_top R]
instance : comm_semiring (tropical R) :=
{ zero_mul := λ _, untrop_injective (top_add _),
mul_zero := λ _, untrop_injective (add_top _),
..tropical.add_monoid_with_one,
..tropical.distrib,
..tropical.add_comm_monoid,
..tropical.comm_monoid }
@[simp] lemma succ_nsmul {R} [linear_order R] [order_top R] (x : tropical R) (n : ℕ) :
(n + 1) • x = x :=
begin
induction n with n IH,
{ simp },
{ rw [add_nsmul, IH, one_nsmul, add_self] }
end
-- TODO: find/create the right classes to make this hold (for enat, ennreal, etc)
-- Requires `zero_eq_bot` to be true
-- lemma add_eq_zero_iff {a b : tropical R} :
-- a + b = 1 ↔ a = 1 ∨ b = 1 := sorry
@[simp] lemma mul_eq_zero_iff {R : Type*} [linear_ordered_add_comm_monoid R]
{a b : tropical (with_top R)} :
a * b = 0 ↔ a = 0 ∨ b = 0 :=
by simp [←untrop_inj_iff, with_top.add_eq_top]
instance {R : Type*} [linear_ordered_add_comm_monoid R] :
no_zero_divisors (tropical (with_top R)) :=
⟨λ _ _, mul_eq_zero_iff.mp⟩
end semiring
end tropical
|
State Before: α : Type u_1
m : MeasurableSpace α
f : α → α
s : Set α
μ : MeasureTheory.Measure α
hf : Ergodic f
hs : MeasurableSet s
hs' : s ≤ᶠ[ae μ] f ⁻¹' s
h_fin : ↑↑μ s ≠ ⊤
⊢ s =ᶠ[ae μ] ∅ ∨ s =ᶠ[ae μ] univ State After: case h_fin
α : Type u_1
m : MeasurableSpace α
f : α → α
s : Set α
μ : MeasureTheory.Measure α
hf : Ergodic f
hs : MeasurableSet s
hs' : s ≤ᶠ[ae μ] f ⁻¹' s
h_fin : ↑↑μ s ≠ ⊤
⊢ ↑↑μ (f ⁻¹' s) ≠ ⊤
α : Type u_1
m : MeasurableSpace α
f : α → α
s : Set α
μ : MeasureTheory.Measure α
hf : Ergodic f
hs : MeasurableSet s
hs' : s ≤ᶠ[ae μ] f ⁻¹' s
h_fin : ↑↑μ (f ⁻¹' s) ≠ ⊤
⊢ s =ᶠ[ae μ] ∅ ∨ s =ᶠ[ae μ] univ Tactic: replace h_fin : μ (f ⁻¹' s) ≠ ∞ State Before: α : Type u_1
m : MeasurableSpace α
f : α → α
s : Set α
μ : MeasureTheory.Measure α
hf : Ergodic f
hs : MeasurableSet s
hs' : s ≤ᶠ[ae μ] f ⁻¹' s
h_fin : ↑↑μ (f ⁻¹' s) ≠ ⊤
⊢ s =ᶠ[ae μ] ∅ ∨ s =ᶠ[ae μ] univ State After: α : Type u_1
m : MeasurableSpace α
f : α → α
s : Set α
μ : MeasureTheory.Measure α
hf : Ergodic f
hs : MeasurableSet s
hs' : s ≤ᶠ[ae μ] f ⁻¹' s
h_fin : ↑↑μ (f ⁻¹' s) ≠ ⊤
⊢ f ⁻¹' s =ᶠ[ae μ] s Tactic: refine' hf.quasiErgodic.ae_empty_or_univ' hs _ State Before: α : Type u_1
m : MeasurableSpace α
f : α → α
s : Set α
μ : MeasureTheory.Measure α
hf : Ergodic f
hs : MeasurableSet s
hs' : s ≤ᶠ[ae μ] f ⁻¹' s
h_fin : ↑↑μ (f ⁻¹' s) ≠ ⊤
⊢ f ⁻¹' s =ᶠ[ae μ] s State After: no goals Tactic: exact (ae_eq_of_ae_subset_of_measure_ge hs' (hf.measure_preimage hs).le hs h_fin).symm State Before: case h_fin
α : Type u_1
m : MeasurableSpace α
f : α → α
s : Set α
μ : MeasureTheory.Measure α
hf : Ergodic f
hs : MeasurableSet s
hs' : s ≤ᶠ[ae μ] f ⁻¹' s
h_fin : ↑↑μ s ≠ ⊤
⊢ ↑↑μ (f ⁻¹' s) ≠ ⊤ State After: no goals Tactic: rwa [hf.measure_preimage hs] |
[STATEMENT]
lemma exec_lub_conv:
"\<lbrakk> acyclic r; \<forall>x y. (x,y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u \<rbrakk> \<Longrightarrow>
exec_lub r f x y = u"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> exec_lub r f x y = u
[PROOF STEP]
(*<*)
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> exec_lub r f x y = u
[PROOF STEP]
apply(unfold exec_lub_def)
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> while (\<lambda>z. (x, z) \<notin> r\<^sup>*) f y = u
[PROOF STEP]
apply(rule_tac P = "\<lambda>z. (y,z) \<in> r\<^sup>* \<and> (z,u) \<in> r\<^sup>*" and
r = "(r \<inter> {(a,b). (y,a) \<in> r\<^sup>* \<and> (b,u) \<in> r\<^sup>*})^-1" in while_rule)
[PROOF STATE]
proof (prove)
goal (5 subgoals):
1. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> (y, y) \<in> r\<^sup>* \<and> (y, u) \<in> r\<^sup>*
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (y, f s) \<in> r\<^sup>* \<and> (f s, u) \<in> r\<^sup>*
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
4. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
5. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(blast dest: is_lubD is_ubD)
[PROOF STATE]
proof (prove)
goal (4 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (y, f s) \<in> r\<^sup>* \<and> (f s, u) \<in> r\<^sup>*
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
3. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
4. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(erule conjE)
[PROOF STATE]
proof (prove)
goal (4 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (x, s) \<notin> r\<^sup>*; (y, s) \<in> r\<^sup>*; (s, u) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> (y, f s) \<in> r\<^sup>* \<and> (f s, u) \<in> r\<^sup>*
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
3. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
4. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(erule_tac z = u in converse_rtranclE)
[PROOF STATE]
proof (prove)
goal (5 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (x, s) \<notin> r\<^sup>*; (y, s) \<in> r\<^sup>*; s = u\<rbrakk> \<Longrightarrow> (y, f s) \<in> r\<^sup>* \<and> (f s, u) \<in> r\<^sup>*
2. \<And>s ya. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (x, s) \<notin> r\<^sup>*; (y, s) \<in> r\<^sup>*; (s, ya) \<in> r; (ya, u) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> (y, f s) \<in> r\<^sup>* \<and> (f s, u) \<in> r\<^sup>*
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
4. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
5. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(blast dest: is_lubD is_ubD)
[PROOF STATE]
proof (prove)
goal (4 subgoals):
1. \<And>s ya. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (x, s) \<notin> r\<^sup>*; (y, s) \<in> r\<^sup>*; (s, ya) \<in> r; (ya, u) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> (y, f s) \<in> r\<^sup>* \<and> (f s, u) \<in> r\<^sup>*
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
3. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
4. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(blast dest:rtrancl_into_rtrancl)
[PROOF STATE]
proof (prove)
goal (3 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
2. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(rename_tac s)
[PROOF STATE]
proof (prove)
goal (3 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
2. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(subgoal_tac "is_ub (r\<^sup>*) x y s")
[PROOF STATE]
proof (prove)
goal (4 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s\<rbrakk> \<Longrightarrow> s = u
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> is_ub (r\<^sup>*) x y s
3. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
4. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
prefer 2
[PROOF STATE]
proof (prove)
goal (4 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> is_ub (r\<^sup>*) x y s
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s\<rbrakk> \<Longrightarrow> s = u
3. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
4. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(simp add:is_ub_def)
[PROOF STATE]
proof (prove)
goal (3 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s\<rbrakk> \<Longrightarrow> s = u
2. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(subgoal_tac "(u, s) \<in> r\<^sup>*")
[PROOF STATE]
proof (prove)
goal (4 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s; (u, s) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s\<rbrakk> \<Longrightarrow> (u, s) \<in> r\<^sup>*
3. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
4. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
prefer 2
[PROOF STATE]
proof (prove)
goal (4 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s\<rbrakk> \<Longrightarrow> (u, s) \<in> r\<^sup>*
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s; (u, s) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
3. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
4. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(blast dest:is_lubD)
[PROOF STATE]
proof (prove)
goal (3 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s; (u, s) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
2. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(erule converse_rtranclE)
[PROOF STATE]
proof (prove)
goal (4 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s; u = s\<rbrakk> \<Longrightarrow> s = u
2. \<And>s ya. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s; (u, ya) \<in> r; (ya, s) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
3. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
4. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply blast
[PROOF STATE]
proof (prove)
goal (3 subgoals):
1. \<And>s ya. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s; (u, ya) \<in> r; (ya, s) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
2. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(simp only:acyclic_def)
[PROOF STATE]
proof (prove)
goal (3 subgoals):
1. \<And>s ya. \<lbrakk>\<forall>x. (x, x) \<notin> r\<^sup>+; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; \<not> (x, s) \<notin> r\<^sup>*; is_ub (r\<^sup>*) x y s; (u, ya) \<in> r; (ya, s) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> s = u
2. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(blast intro:rtrancl_into_trancl2 rtrancl_trancl_trancl)
[PROOF STATE]
proof (prove)
goal (2 subgoals):
1. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> wf ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(rule finite_acyclic_wf)
[PROOF STATE]
proof (prove)
goal (3 subgoals):
1. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> finite ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
2. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> acyclic ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply simp
[PROOF STATE]
proof (prove)
goal (3 subgoals):
1. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> finite (r \<inter> {a. (y, a) \<in> r\<^sup>*} \<times> {b. (b, u) \<in> r\<^sup>*})
2. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> acyclic ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(erule acyclic_single_valued_finite)
[PROOF STATE]
proof (prove)
goal (4 subgoals):
1. \<lbrakk>\<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> single_valued r
2. \<lbrakk>\<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> (y, u) \<in> r\<^sup>*
3. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> acyclic ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
4. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(blast intro:single_valuedI)
[PROOF STATE]
proof (prove)
goal (3 subgoals):
1. \<lbrakk>\<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> (y, u) \<in> r\<^sup>*
2. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> acyclic ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
3. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(simp add:is_lub_def is_ub_def)
[PROOF STATE]
proof (prove)
goal (2 subgoals):
1. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> acyclic ((r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>)
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply simp
[PROOF STATE]
proof (prove)
goal (2 subgoals):
1. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> acyclic (r \<inter> {a. (y, a) \<in> r\<^sup>*} \<times> {b. (b, u) \<in> r\<^sup>*})
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply(erule acyclic_subset)
[PROOF STATE]
proof (prove)
goal (2 subgoals):
1. \<lbrakk>\<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u\<rbrakk> \<Longrightarrow> r \<inter> {a. (y, a) \<in> r\<^sup>*} \<times> {b. (b, u) \<in> r\<^sup>*} \<subseteq> r
2. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply blast
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (f s, s) \<in> (r \<inter> {(a, b). (y, a) \<in> r\<^sup>* \<and> (b, u) \<in> r\<^sup>*})\<inverse>
[PROOF STEP]
apply simp
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (y, s) \<in> r\<^sup>* \<and> (s, u) \<in> r\<^sup>*; (x, s) \<notin> r\<^sup>*\<rbrakk> \<Longrightarrow> (s, f s) \<in> r \<and> (f s, u) \<in> r\<^sup>*
[PROOF STEP]
apply(erule conjE)
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (x, s) \<notin> r\<^sup>*; (y, s) \<in> r\<^sup>*; (s, u) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> (s, f s) \<in> r \<and> (f s, u) \<in> r\<^sup>*
[PROOF STEP]
apply(erule_tac z = u in converse_rtranclE)
[PROOF STATE]
proof (prove)
goal (2 subgoals):
1. \<And>s. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (x, s) \<notin> r\<^sup>*; (y, s) \<in> r\<^sup>*; s = u\<rbrakk> \<Longrightarrow> (s, f s) \<in> r \<and> (f s, u) \<in> r\<^sup>*
2. \<And>s ya. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (x, s) \<notin> r\<^sup>*; (y, s) \<in> r\<^sup>*; (s, ya) \<in> r; (ya, u) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> (s, f s) \<in> r \<and> (f s, u) \<in> r\<^sup>*
[PROOF STEP]
apply(blast dest: is_lubD is_ubD)
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. \<And>s ya. \<lbrakk>acyclic r; \<forall>x y. (x, y) \<in> r \<longrightarrow> f x = y; is_lub (r\<^sup>*) x y u; (x, s) \<notin> r\<^sup>*; (y, s) \<in> r\<^sup>*; (s, ya) \<in> r; (ya, u) \<in> r\<^sup>*\<rbrakk> \<Longrightarrow> (s, f s) \<in> r \<and> (f s, u) \<in> r\<^sup>*
[PROOF STEP]
apply(blast dest:rtrancl_into_rtrancl)
[PROOF STATE]
proof (prove)
goal:
No subgoals!
[PROOF STEP]
done |
box::use(./b)
.on_unload = function (ns) {
message('a unloaded')
}
box::export()
|
[STATEMENT]
lemma ab_g_in_Mx[simp]:
"map_prod \<alpha> \<beta> \<circ> real_real.g \<in> pair_qbs_Mx X Y"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. map_prod \<alpha> \<beta> \<circ> real_real.g \<in> pair_qbs_Mx X Y
[PROOF STEP]
using qbs_closed1_dest[OF qp1.in_Mx] qbs_closed1_dest[OF qp2.in_Mx]
[PROOF STATE]
proof (prove)
using this:
?f \<in> borel_measurable real_borel \<Longrightarrow> \<alpha> \<circ> ?f \<in> qbs_Mx X
?f \<in> borel_measurable real_borel \<Longrightarrow> \<beta> \<circ> ?f \<in> qbs_Mx Y
goal (1 subgoal):
1. map_prod \<alpha> \<beta> \<circ> real_real.g \<in> pair_qbs_Mx X Y
[PROOF STEP]
by(auto simp add: pair_qbs_Mx_def comp_def) |
State Before: F : Type u_3
α : Type u_2
β : Type u_1
γ : Type ?u.37721
δ : Type ?u.37724
inst✝² : CoheytingAlgebra α
inst✝¹ : CoheytingAlgebra β
inst✝ : CoheytingHomClass F α β
f : F
a : α
⊢ ↑f (¬a) = ¬↑f a State After: no goals Tactic: rw [← top_sdiff', ← top_sdiff', map_sdiff, map_top] |
# Neural Nets t3
```python
%matplotlib widget
#%matplotlib inline
%load_ext autoreload
%autoreload 2
```
The autoreload extension is already loaded. To reload it, use:
%reload_ext autoreload
```python
# import Importing_Notebooks
import numpy as np
from scipy import ndimage
import matplotlib.pyplot as plt
import dill
```
A network built of components which:
1. accept an ordered set of reals (we'll use `numpy.array`, and call them vectors) at the input port and produce another at the output port - this is forward propagation. ${\displaystyle f\colon \mathbf {R} ^{n}\to \mathbf {R} ^{m}}$
1. accept an ordered set of reals at the output port, representing the gradient of the loss function at the output, and produce the gradient of the loss function at the input port - this is back propagation, aka backprop. ${\displaystyle b\colon \mathbf {R} ^{m}\to \mathbf {R} ^{n}}$
1. from the gradient of the loss function at the output, calculate the partial of the loss function w.r.t the internal parameters ${\displaystyle \frac{\partial E}{\partial w} }$
1. accept a scalar $\eta$ to control the adjustment of internal parameters. _Or is this effected by scaling the loss gradient before passing??_
1. update internal parameters ${\displaystyle w \leftarrow w - \eta \frac{\partial E}{\partial w} }$
```python
class Layer:
def __init__(self):
pass
def __call__(self, x):
"""Computes response to input"""
raise NotImplementedError
def backprop(self, output_delE):
"""Uses output error gradient to adjust internal parameters, and returns gradient of error at input"""
raise NotImplementedError
```
A network built of a cascade of layers:
```python
class Network:
def __init__(self):
self.layers = []
self.eta = 0.1 #FIXME
def extend(self, net):
self.layers.append(net)
def __call__(self, input):
v = input
for net in self.layers:
v = net(v)
return v
def learn(self, facts):
for (x, expected) in facts:
y = self(x)
e = y - expected
loss = e.dot(e)/2.0
egrad = e * self.eta
for net in reversed(self.layers):
egrad = net.backprop(egrad)
return loss
```
## A Neural Net lab bench
```python
# from nnbench import NNBench
from matplotlib.widgets import Slider, Button, RadioButtons
```
```python
class NNBench:
def __init__(self, net, ideal=lambda x:x):
self.net = net
self.ideal = ideal
self.gc_protect = []
self.seed = 3
def checkpoint_net(self):
self.net_checkpoint = dill.dumps(self.net)
def rollback_net(self):
self.net = dill.loads(self.net_checkpoint)
def training_data_gen(self, n):
"""Generate n instances of labelled training data"""
np.random.seed(self.seed)
for i in range(n):
v = np.random.randn(2)
yield (v, self.ideal(v))
def learn(self, n=100):
return [self.net.learn([fact]) for fact in self.training_data_gen(n)]
def learning_potential(self, n=100, eta=None):
stash = dill.dumps(self.net)
if eta is not None: # only change the net's eta if a value was passed to us
self.net.eta = eta
loss = self.net.learn(fact for fact in self.training_data_gen(n))
self.net = dill.loads(stash)
return -np.log(loss)
def plot_learning(self, n):
from matplotlib import pyplot as plt
# self.losses = losses = [self.net.learn(fact for fact in self.training_data_gen(n))]
losses = self.learn(n)
plt.yscale('log')
plt.plot(range(len(losses)),losses)
plt.show(block=0)
def knobs_plot_learning(self, n):
pickled_net = dill.dumps(self.net)
# from matplotlib import pyplot as plt
fig, ax = plt.subplots()
plt.subplots_adjust(left=0.25, bottom=0.25)
a0 = 5
f0 = 3
###
losses = [self.net.learn([fact]) for fact in self.training_data_gen(n)]
l, = plt.plot(range(len(losses)), losses, lw=2)
ax.margins(x=0)
plt.yscale('log')
axcolor = 'lightgoldenrodyellow'
axfreq = plt.axes([0.25, 0.1, 0.65, 0.03], facecolor=axcolor)
axamp = plt.axes([0.25, 0.15, 0.65, 0.03], facecolor=axcolor)
sfreq = Slider(axfreq, '⍺', 0, 1, valinit=self.net.eta)
samp = Slider(axamp, 'Num', 1, 1000, valinit=100, valstep=1)
filtfunc = [lambda x:x]
big = max(losses)
ax.set_title(f"maxloss:{big}")
iax = plt.axes([0.025, 0.7, 0.15, 0.15])
def make_iax_image():
return np.concatenate([np.concatenate((l.M,np.array([l.b])),axis=0)
for l in self.net.layers
if hasattr(l, 'M')],axis=1)
def update_iax(img=[iax.imshow(make_iax_image())]):
img[0].remove()
img[0] = iax.imshow(make_iax_image())
def update(val,ax=ax,loc=[l]):
n = int(samp.val)
self.net = dill.loads(pickled_net)
sfunc = lambda x: 2**(-1.005/(x+.005))
self.net.eta = sfunc(sfreq.val)
#sfreq.set_label("2.4e"%(self.net.eta,))
losses = filtfunc[0]([self.net.learn([fact]) for fact in self.training_data_gen(n)])
big = max(losses)
ax.set_title(f"⍺={self.net.eta:1.3e},max loss:{big}")
loc[0].remove()
loc[0], = ax.plot(range(len(losses)), losses, lw=2,color='xkcd:blue')
ax.set_xlim((0,len(losses)))
ax.set_ylim((min(losses),big))
update_iax()
fig.canvas.draw_idle()
sfreq.on_changed(update)
samp.on_changed(update)
resetax = plt.axes([0.8, 0.025, 0.1, 0.04])
button = Button(resetax, 'Reset', color=axcolor, hovercolor='0.975')
def reset(event):
self.seed += 1
update()
button.on_clicked(reset)
rax = plt.axes([0.025, 0.5, 0.15, 0.15], facecolor=axcolor)
radio = RadioButtons(rax, ('raw', 'low pass', 'green'), active=0)
def colorfunc(label):
if label == "raw":
filtfunc[0] = lambda x:x
elif label == "low pass":
filtfunc[0] = lambda x:ndimage.gaussian_filter(np.array(x),3)
#l.set_color(label)
#fig.canvas.draw_idle()
update()
radio.on_clicked(colorfunc)
plt.show()
#return 'gc protect:', update, reset, colorfunc,sfreq,samp, radio, button
self.gc_protect.append((update, reset, colorfunc,sfreq,samp, radio, button))
```
## Useful Layers
### Identify
```python
class IdentityLayer(Layer):
def __call__(self, x):
return x
def backprop(self, output_delE):
return output_delE
```
### Affine
A layer that does an [affine transformation](https://mathworld.wolfram.com/AffineTransformation.html) aka affinity, which is the classic fully-connected layer with output offsets.
$$ \mathbf{M} \mathbf{x} + \mathbf{b} = \mathbf{y} $$
where
$$
\mathbf{x} = \sum_{j=1}^{n} x_j \mathbf{\hat{x}}_j \\
\mathbf{b} = \sum_{i=1}^{m} b_i \mathbf{\hat{y}}_i \\
\mathbf{y} = \sum_{i=1}^{m} y_i \mathbf{\hat{y}}_i
$$
and $\mathbf{M}$ can be written
$$
\begin{bmatrix}
m_{1,1} & \dots & m_{1,n} \\
\vdots & \ddots & \vdots \\
m_{m,1} & \dots & m_{m,n}
\end{bmatrix} \\
$$
#### Error gradient back-propagation
$$
\begin{align}
\frac{\partial loss}{\partial\mathbf{x}}
= \frac{\partial loss}{\partial\mathbf{y}} \frac{\partial\mathbf{y}}{\partial\mathbf{x}}
= \mathbf{M}\frac{\partial loss}{\partial\mathbf{y}}
\end{align}
$$
_SOLVE: Left-multiply or right-multiply?_
#### Parameter adjustment
$$
\frac{\partial loss}{\partial\mathbf{M}}
= \frac{\partial loss}{\partial\mathbf{y}} \frac{\partial\mathbf{y}}{\partial\mathbf{M}}
= \frac{\partial loss}{\partial\mathbf{y}} \mathbf{x} \\
\frac{\partial loss}{\partial\mathbf{b}}
= \frac{\partial loss}{\partial\mathbf{y}} \frac{\partial\mathbf{y}}{\partial\mathbf{b}}
= \frac{\partial loss}{\partial\mathbf{y}}
$$
```python
class AffinityLayer(Layer):
"""An affine transformation, which is the classic fully-connected layer with offsets"""
def __init__(self, n, m):
self.M = np.empty((m, n))
self.b = np.empty(m)
self.randomize()
def randomize(self):
self.M[:] = np.random.randn(*self.M.shape)
self.b[:] = np.random.randn(*self.b.shape)
def __call__(self, x):
self.input = x
self.output = self.M @ x + self.b
return self.output
def backprop(self, output_delE):
input_delE = self.M @ output_delE
self.M -= np.einsum('i,j', output_delE, self.input) # use np.outer?
self.b -= output_delE
return input_delE
```
### Map
Maps a scalar function on the inputs, for e.g. activation layers.
```python
class MapLayer(Layer):
"""Map a scalar function on the input taken element-wise"""
def __init__(self, fun, dfundx):
self.vfun = np.vectorize(fun)
self.vdfundx = np.vectorize(dfundx)
def __call__(self, x):
self.input = x
return self.vfun(x)
def backprop(self, output_delE):
input_delE = self.vdfundx(self.input) * output_delE
return input_delE
```
___
## Tests
### One identity layer
See if the wheels turn:
```python
net = Network()
net.extend(IdentityLayer())
all(net(np.arange(3)) == np.arange(3))
```
True
It does not learn, as expected:
```python
facts = [(np.arange(2*n, 2*n+2), np.arange(2*n+1, 2*n-1, -1)) for n in range(3)]
net.learn(facts)
```
1.0
```python
net(np.arange(2,4))
```
array([2, 3])
### One map layer
```python
net = Network()
net.extend(MapLayer(lambda x: x+1, lambda d: 1))
all(net(np.arange(3)) == np.arange(3)+1)
```
True
It does not learn, as expected:
```python
net.learn(facts), all(net(np.arange(5)) == np.arange(5)+1), net(np.arange(2,4))
```
(2.0, True, array([3, 4]))
### One affine layer
```python
net = Network()
net.extend(AffinityLayer(2,2))
```
```python
t = net.layers[0]
t.M, t.b
```
(array([[-0.07555974, 0.50894321],
[-0.79693707, 0.52115632]]),
array([-0.30574839, 0.99669659]))
#### Can it learn the identity transformation?
```python
bench = NNBench(net)
bench.checkpoint_net()
bench.learning_potential()
```
20.852776766237184
```python
bench.plot_learning(100)
```
Canvas(toolbar=Toolbar(toolitems=[('Home', 'Reset original view', 'home', 'home'), ('Back', 'Back to previous …
```python
bench.ideal = lambda v: np.array([v[1], v[0]])
bench.knobs_plot_learning(100)
```
Canvas(toolbar=Toolbar(toolitems=[('Home', 'Reset original view', 'home', 'home'), ('Back', 'Back to previous …
### Learn thru a map layer
This layer squares its input and divides by two:
```python
net = Network()
net.extend(AffinityLayer(2,2))
def dtanh(x):
v = np.tanh(x)
return (1+v)*(1-v)
net.extend(MapLayer(lambda x:x*x/2.0, lambda d:d))
#net.extend(MapLayer(np.tanh, dtanh))
bench = NNBench(net)
bench.checkpoint_net()
```
```python
net.layers[0].M, net.layers[0].b
```
(array([[-0.32158469, 0.15113037],
[-0.01862772, 0.48352879]]),
array([0.76896516, 1.36624284]))
#### Can it learn difference squared?
```python
bench.ideal = lambda v: [(v[0]-v[1])**2,0]
#bench.ideal = lambda v: [(v[0]>0)*2-1,(v[0]>v[1])*2-1]
bench.learning_potential()
#bench.knobs_plot_learning(100)
```
/opt/conda/lib/python3.7/site-packages/ipykernel_launcher.py:13: RuntimeWarning: overflow encountered in multiply
del sys.path[0]
/opt/conda/lib/python3.7/site-packages/ipykernel_launcher.py:19: RuntimeWarning: invalid value encountered in subtract
/opt/conda/lib/python3.7/site-packages/ipykernel_launcher.py:20: RuntimeWarning: invalid value encountered in subtract
nan
```python
bench.knobs_plot_learning(100)
```
Canvas(toolbar=Toolbar(toolitems=[('Home', 'Reset original view', 'home', 'home'), ('Back', 'Back to previous …
/opt/conda/lib/python3.7/site-packages/ipykernel_launcher.py:13: RuntimeWarning: overflow encountered in multiply
del sys.path[0]
/opt/conda/lib/python3.7/site-packages/ipykernel_launcher.py:19: RuntimeWarning: invalid value encountered in subtract
/opt/conda/lib/python3.7/site-packages/ipykernel_launcher.py:20: RuntimeWarning: invalid value encountered in subtract
### add a RELU
```python
bench.net.layers = []
bench.net.extend(AffinityLayer(2,2))
leak = 0
bench.net.extend(MapLayer(lambda x: (x*(1+leak/2)+abs(x)*(1-leak/2))/2, lambda d: [leak,1][1 if d>0 else 0]))
bench.net.layers
```
[<__main__.AffinityLayer at 0x7fbec396afd0>,
<__main__.MapLayer at 0x7fbec3971110>]
### XOR
```python
net = Network()
net.extend(AffinityLayer(2,2))
```
```python
t = net.layers[0]
t.M, t.b
```
(array([[ 1.14726479, -0.11022916],
[ 0.38825041, -0.38712718]]),
array([-0.58722031, 1.91082685]))
```python
```
|
header {* Deciding Regular Expression Equivalence *}
theory Equivalence_Checking
imports
NDerivative
"~~/src/HOL/Library/While_Combinator"
begin
subsection {* Bisimulation between languages and regular expressions *}
coinductive bisimilar :: "'a lang \<Rightarrow> 'a lang \<Rightarrow> bool" where
"([] \<in> K \<longleftrightarrow> [] \<in> L)
\<Longrightarrow> (\<And>x. bisimilar (Deriv x K) (Deriv x L))
\<Longrightarrow> bisimilar K L"
lemma equal_if_bisimilar:
assumes "bisimilar K L" shows "K = L"
proof (rule set_eqI)
fix w
from `bisimilar K L` show "w \<in> K \<longleftrightarrow> w \<in> L"
proof (induct w arbitrary: K L)
case Nil thus ?case by (auto elim: bisimilar.cases)
next
case (Cons a w K L)
from `bisimilar K L` have "bisimilar (Deriv a K) (Deriv a L)"
by (auto elim: bisimilar.cases)
then have "w \<in> Deriv a K \<longleftrightarrow> w \<in> Deriv a L" by (rule Cons(1))
thus ?case by (auto simp: Deriv_def)
qed
qed
lemma language_coinduct:
fixes R (infixl "\<sim>" 50)
assumes "K \<sim> L"
assumes "\<And>K L. K \<sim> L \<Longrightarrow> ([] \<in> K \<longleftrightarrow> [] \<in> L)"
assumes "\<And>K L x. K \<sim> L \<Longrightarrow> Deriv x K \<sim> Deriv x L"
shows "K = L"
apply (rule equal_if_bisimilar)
apply (rule bisimilar.coinduct[of R, OF `K \<sim> L`])
apply (auto simp: assms)
done
type_synonym 'a rexp_pair = "'a rexp * 'a rexp"
type_synonym 'a rexp_pairs = "'a rexp_pair list"
definition is_bisimulation :: "'a::order list \<Rightarrow> 'a rexp_pair set \<Rightarrow> bool"
where
"is_bisimulation as R =
(\<forall>(r,s)\<in> R. (atoms r \<union> atoms s \<subseteq> set as) \<and> (nullable r \<longleftrightarrow> nullable s) \<and>
(\<forall>a\<in>set as. (nderiv a r, nderiv a s) \<in> R))"
lemma bisim_lang_eq:
assumes bisim: "is_bisimulation as ps"
assumes "(r, s) \<in> ps"
shows "lang r = lang s"
proof -
def ps' \<equiv> "insert (Zero, Zero) ps"
from bisim have bisim': "is_bisimulation as ps'"
by (auto simp: ps'_def is_bisimulation_def)
let ?R = "\<lambda>K L. (\<exists>(r,s)\<in>ps'. K = lang r \<and> L = lang s)"
show ?thesis
proof (rule language_coinduct[where R="?R"])
from `(r, s) \<in> ps`
have "(r, s) \<in> ps'" by (auto simp: ps'_def)
thus "?R (lang r) (lang s)" by auto
next
fix K L assume "?R K L"
then obtain r s where rs: "(r, s) \<in> ps'"
and KL: "K = lang r" "L = lang s" by auto
with bisim' have "nullable r \<longleftrightarrow> nullable s"
by (auto simp: is_bisimulation_def)
thus "[] \<in> K \<longleftrightarrow> [] \<in> L" by (auto simp: nullable_iff KL)
fix a
show "?R (Deriv a K) (Deriv a L)"
proof cases
assume "a \<in> set as"
with rs bisim'
have "(nderiv a r, nderiv a s) \<in> ps'"
by (auto simp: is_bisimulation_def)
thus ?thesis by (force simp: KL lang_nderiv)
next
assume "a \<notin> set as"
with bisim' rs
have "a \<notin> atoms r" "a \<notin> atoms s" by (auto simp: is_bisimulation_def)
then have "nderiv a r = Zero" "nderiv a s = Zero"
by (auto intro: deriv_no_occurrence)
then have "Deriv a K = lang Zero"
"Deriv a L = lang Zero"
unfolding KL lang_nderiv[symmetric] by auto
thus ?thesis by (auto simp: ps'_def)
qed
qed
qed
subsection {* Closure computation *}
definition closure ::
"'a::order list \<Rightarrow> 'a rexp_pair \<Rightarrow> ('a rexp_pairs * 'a rexp_pair set) option"
where
"closure as = rtrancl_while (%(r,s). nullable r = nullable s)
(%(r,s). map (\<lambda>a. (nderiv a r, nderiv a s)) as)"
definition pre_bisim :: "'a::order list \<Rightarrow> 'a rexp \<Rightarrow> 'a rexp \<Rightarrow>
'a rexp_pairs * 'a rexp_pair set \<Rightarrow> bool"
where
"pre_bisim as r s = (\<lambda>(ws,R).
(r,s) \<in> R \<and> set ws \<subseteq> R \<and>
(\<forall>(r,s)\<in> R. atoms r \<union> atoms s \<subseteq> set as) \<and>
(\<forall>(r,s)\<in> R - set ws. (nullable r \<longleftrightarrow> nullable s) \<and>
(\<forall>a\<in>set as. (nderiv a r, nderiv a s) \<in> R)))"
subsection {* Bisimulation-free proof of closure computation *}
text{* The equivalence check can be viewed as the product construction
of two automata. The state space is the reflexive transitive closure of
the pair of next-state functions, i.e. derivatives. *}
lemma rtrancl_nderiv_nderivs: defines "nderivs == foldl (%r a. nderiv a r)"
shows "{((r,s),(nderiv a r,nderiv a s))| r s a. a : A}^* =
{((r,s),(nderivs r w,nderivs s w))| r s w. w : lists A}" (is "?L = ?R")
proof-
note [simp] = nderivs_def
{ fix r s r' s'
have "((r,s),(r',s')) : ?L \<Longrightarrow> ((r,s),(r',s')) : ?R"
proof(induction rule: converse_rtrancl_induct2)
case refl show ?case by (force intro!: foldl.simps(1)[symmetric])
next
case step thus ?case by(force intro!: foldl.simps(2)[symmetric])
qed
} moreover
{ fix r s r' s'
{ fix w have "\<forall>x\<in>set w. x \<in> A \<Longrightarrow> ((r, s), nderivs r w, nderivs s w) :?L"
proof(induction w rule: rev_induct)
case Nil show ?case by simp
next
case snoc thus ?case by (auto elim!: rtrancl_into_rtrancl)
qed
}
hence "((r,s),(r',s')) : ?R \<Longrightarrow> ((r,s),(r',s')) : ?L" by auto
} ultimately show ?thesis by (auto simp: in_lists_conv_set) blast
qed
lemma nullable_nderivs:
"nullable (foldl (%r a. nderiv a r) r w) = (w : lang r)"
by (induct w arbitrary: r) (simp_all add: nullable_iff lang_nderiv Deriv_def)
theorem closure_sound_complete:
assumes result: "closure as (r,s) = Some(ws,R)"
and atoms: "set as = atoms r \<union> atoms s"
shows "ws = [] \<longleftrightarrow> lang r = lang s"
proof -
have leq: "(lang r = lang s) =
(\<forall>(r',s') \<in> {((r0,s0),(nderiv a r0,nderiv a s0))| r0 s0 a. a : set as}^* `` {(r,s)}.
nullable r' = nullable s')"
by(simp add: atoms rtrancl_nderiv_nderivs Ball_def lang_eq_ext imp_ex nullable_nderivs
del:Un_iff)
have "{(x,y). y \<in> set ((\<lambda>(p,q). map (\<lambda>a. (nderiv a p, nderiv a q)) as) x)} =
{((r,s), nderiv a r, nderiv a s) |r s a. a \<in> set as}"
by auto
with atoms rtrancl_while_Some[OF result[unfolded closure_def]]
show ?thesis by (auto simp add: leq Ball_def split: if_splits)
qed
subsection {* The overall procedure *}
primrec add_atoms :: "'a rexp \<Rightarrow> 'a list \<Rightarrow> 'a list"
where
"add_atoms Zero = id"
| "add_atoms One = id"
| "add_atoms (Atom a) = List.insert a"
| "add_atoms (Plus r s) = add_atoms s o add_atoms r"
| "add_atoms (Times r s) = add_atoms s o add_atoms r"
| "add_atoms (Star r) = add_atoms r"
lemma set_add_atoms: "set (add_atoms r as) = atoms r \<union> set as"
by (induct r arbitrary: as) auto
definition check_eqv :: "nat rexp \<Rightarrow> nat rexp \<Rightarrow> bool" where
"check_eqv r s =
(let nr = norm r; ns = norm s; as = add_atoms nr (add_atoms ns [])
in case closure as (nr, ns) of
Some([],_) \<Rightarrow> True | _ \<Rightarrow> False)"
text{* Test: *}
lemma "check_eqv (Plus One (Times (Atom 0) (Star(Atom 0)))) (Star(Atom 0))"
by eval
end
|
If $f$ is a bijection from $A$ to $B$, then the distribution of $B$ is the same as the distribution of $A$ under $f$. |
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b a : α
l : List α
p : Chain R a l
⊢ Chain (fun x y => x ∈ a :: l ∧ y ∈ l ∧ R x y) a l
[PROOFSTEP]
induction' p with _ a b l r _ IH <;> constructor <;> [exact ⟨mem_cons_self _ _, mem_cons_self _ _, r⟩;
exact IH.imp fun a b ⟨am, bm, h⟩ => ⟨mem_cons_of_mem _ am, mem_cons_of_mem _ bm, h⟩]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b a : α
l : List α
p : Chain R a l
⊢ Chain (fun x y => x ∈ a :: l ∧ y ∈ l ∧ R x y) a l
[PROOFSTEP]
induction' p with _ a b l r _ IH
[GOAL]
case nil
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝¹ b a : α
l : List α
a✝ : α
⊢ Chain (fun x y => x ∈ [a✝] ∧ y ∈ [] ∧ R x y) a✝ []
[PROOFSTEP]
constructor
[GOAL]
case cons
α : Type u
β : Type v
R r✝ : α → α → Prop
l✝¹ l₁ l₂ : List α
a✝² b✝ a✝¹ : α
l✝ : List α
a b : α
l : List α
r : R a b
a✝ : Chain R b l
IH : Chain (fun x y => x ∈ b :: l ∧ y ∈ l ∧ R x y) b l
⊢ Chain (fun x y => x ∈ a :: b :: l ∧ y ∈ b :: l ∧ R x y) a (b :: l)
[PROOFSTEP]
constructor
[GOAL]
case cons.a
α : Type u
β : Type v
R r✝ : α → α → Prop
l✝¹ l₁ l₂ : List α
a✝² b✝ a✝¹ : α
l✝ : List α
a b : α
l : List α
r : R a b
a✝ : Chain R b l
IH : Chain (fun x y => x ∈ b :: l ∧ y ∈ l ∧ R x y) b l
⊢ a ∈ a :: b :: l ∧ b ∈ b :: l ∧ R a b
[PROOFSTEP]
exact ⟨mem_cons_self _ _, mem_cons_self _ _, r⟩
[GOAL]
case cons.a
α : Type u
β : Type v
R r✝ : α → α → Prop
l✝¹ l₁ l₂ : List α
a✝² b✝ a✝¹ : α
l✝ : List α
a b : α
l : List α
r : R a b
a✝ : Chain R b l
IH : Chain (fun x y => x ∈ b :: l ∧ y ∈ l ∧ R x y) b l
⊢ Chain (fun x y => x ∈ a :: b :: l ∧ y ∈ b :: l ∧ R x y) b l
[PROOFSTEP]
exact IH.imp fun a b ⟨am, bm, h⟩ => ⟨mem_cons_of_mem _ am, mem_cons_of_mem _ bm, h⟩
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ a b : α
⊢ Chain R a [b] ↔ R a b
[PROOFSTEP]
simp only [chain_cons, Chain.nil, and_true_iff]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a✝ b✝ a b : α
l₁ l₂ : List α
⊢ Chain R a (l₁ ++ b :: l₂) ↔ Chain R a (l₁ ++ [b]) ∧ Chain R b l₂
[PROOFSTEP]
induction' l₁ with x l₁ IH generalizing a
[GOAL]
case nil
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂✝ : List α
a✝¹ b✝ a✝ b : α
l₂ : List α
a : α
⊢ Chain R a ([] ++ b :: l₂) ↔ Chain R a ([] ++ [b]) ∧ Chain R b l₂
[PROOFSTEP]
simp only [*, nil_append, cons_append, Chain.nil, chain_cons, and_true_iff, and_assoc]
[GOAL]
case cons
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a✝¹ b✝ a✝ b : α
l₂ : List α
x : α
l₁ : List α
IH : ∀ {a : α}, Chain R a (l₁ ++ b :: l₂) ↔ Chain R a (l₁ ++ [b]) ∧ Chain R b l₂
a : α
⊢ Chain R a (x :: l₁ ++ b :: l₂) ↔ Chain R a (x :: l₁ ++ [b]) ∧ Chain R b l₂
[PROOFSTEP]
simp only [*, nil_append, cons_append, Chain.nil, chain_cons, and_true_iff, and_assoc]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a✝ b✝ a b c : α
l₁ l₂ : List α
⊢ Chain R a (l₁ ++ b :: c :: l₂) ↔ Chain R a (l₁ ++ [b]) ∧ R b c ∧ Chain R c l₂
[PROOFSTEP]
rw [chain_split, chain_cons]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a✝ b a : α
⊢ Chain R a [] ↔ [] = [] ∨ Forall₂ R (a :: dropLast []) []
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b✝ a b : α
l : List α
⊢ Chain R a (b :: l) ↔ b :: l = [] ∨ Forall₂ R (a :: dropLast (b :: l)) (b :: l)
[PROOFSTEP]
by_cases h : l = []
[GOAL]
case pos
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b✝ a b : α
l : List α
h : l = []
⊢ Chain R a (b :: l) ↔ b :: l = [] ∨ Forall₂ R (a :: dropLast (b :: l)) (b :: l)
[PROOFSTEP]
simp [@chain_iff_forall₂ b l, *]
[GOAL]
case neg
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b✝ a b : α
l : List α
h : ¬l = []
⊢ Chain R a (b :: l) ↔ b :: l = [] ∨ Forall₂ R (a :: dropLast (b :: l)) (b :: l)
[PROOFSTEP]
simp [@chain_iff_forall₂ b l, *]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
⊢ Chain R a (l ++ [b]) ↔ Forall₂ R (a :: l) (l ++ [b])
[PROOFSTEP]
simp [chain_iff_forall₂]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b✝ : α
f : β → α
b : β
l : List β
⊢ Chain R (f b) (map f l) ↔ Chain (fun a b => R (f a) (f b)) b l
[PROOFSTEP]
induction l generalizing b
[GOAL]
case nil
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b✝ : α
f : β → α
b : β
⊢ Chain R (f b) (map f []) ↔ Chain (fun a b => R (f a) (f b)) b []
[PROOFSTEP]
simp only [map, Chain.nil, chain_cons, *]
[GOAL]
case cons
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b✝ : α
f : β → α
head✝ : β
tail✝ : List β
tail_ih✝ : ∀ {b : β}, Chain R (f b) (map f tail✝) ↔ Chain (fun a b => R (f a) (f b)) b tail✝
b : β
⊢ Chain R (f b) (map f (head✝ :: tail✝)) ↔ Chain (fun a b => R (f a) (f b)) b (head✝ :: tail✝)
[PROOFSTEP]
simp only [map, Chain.nil, chain_cons, *]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
S : β → β → Prop
p : α → Prop
f : (a : α) → p a → β
H : ∀ (a b : α) (ha : p a) (hb : p b), R a b → S (f a ha) (f b hb)
a : α
l : List α
hl₁ : Chain R a l
ha : p a
hl₂ : ∀ (a : α), a ∈ l → p a
⊢ Chain S (f a ha) (pmap f l hl₂)
[PROOFSTEP]
induction' l with lh lt l_ih generalizing a
[GOAL]
case nil
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝¹ b : α
S : β → β → Prop
p : α → Prop
f : (a : α) → p a → β
H : ∀ (a b : α) (ha : p a) (hb : p b), R a b → S (f a ha) (f b hb)
a✝ : α
l : List α
hl₁✝ : Chain R a✝ l
ha✝ : p a✝
hl₂✝ : ∀ (a : α), a ∈ l → p a
a : α
hl₁ : Chain R a []
ha : p a
hl₂ : ∀ (a : α), a ∈ [] → p a
⊢ Chain S (f a ha) (pmap f [] hl₂)
[PROOFSTEP]
simp
[GOAL]
case cons
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝¹ b : α
S : β → β → Prop
p : α → Prop
f : (a : α) → p a → β
H : ∀ (a b : α) (ha : p a) (hb : p b), R a b → S (f a ha) (f b hb)
a✝ : α
l : List α
hl₁✝ : Chain R a✝ l
ha✝ : p a✝
hl₂✝ : ∀ (a : α), a ∈ l → p a
lh : α
lt : List α
l_ih : ∀ {a : α}, Chain R a lt → ∀ (ha : p a) (hl₂ : ∀ (a : α), a ∈ lt → p a), Chain S (f a ha) (pmap f lt hl₂)
a : α
hl₁ : Chain R a (lh :: lt)
ha : p a
hl₂ : ∀ (a : α), a ∈ lh :: lt → p a
⊢ Chain S (f a ha) (pmap f (lh :: lt) hl₂)
[PROOFSTEP]
simp [H _ _ _ _ (rel_of_chain_cons hl₁), l_ih (chain_of_chain_cons hl₁)]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
S : β → β → Prop
p : α → Prop
f : (a : α) → p a → β
l : List α
hl₁ : ∀ (a : α), a ∈ l → p a
a : α
ha : p a
hl₂ : Chain S (f a ha) (pmap f l hl₁)
H : ∀ (a b : α) (ha : p a) (hb : p b), S (f a ha) (f b hb) → R a b
⊢ Chain R a l
[PROOFSTEP]
induction' l with lh lt l_ih generalizing a
[GOAL]
case nil
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝¹ b : α
S : β → β → Prop
p : α → Prop
f : (a : α) → p a → β
l : List α
hl₁✝ : ∀ (a : α), a ∈ l → p a
a✝ : α
ha✝ : p a✝
hl₂✝ : Chain S (f a✝ ha✝) (pmap f l hl₁✝)
H : ∀ (a b : α) (ha : p a) (hb : p b), S (f a ha) (f b hb) → R a b
hl₁ : ∀ (a : α), a ∈ [] → p a
a : α
ha : p a
hl₂ : Chain S (f a ha) (pmap f [] hl₁)
⊢ Chain R a []
[PROOFSTEP]
simp
[GOAL]
case cons
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝¹ b : α
S : β → β → Prop
p : α → Prop
f : (a : α) → p a → β
l : List α
hl₁✝ : ∀ (a : α), a ∈ l → p a
a✝ : α
ha✝ : p a✝
hl₂✝ : Chain S (f a✝ ha✝) (pmap f l hl₁✝)
H : ∀ (a b : α) (ha : p a) (hb : p b), S (f a ha) (f b hb) → R a b
lh : α
lt : List α
l_ih : ∀ (hl₁ : ∀ (a : α), a ∈ lt → p a) {a : α} (ha : p a), Chain S (f a ha) (pmap f lt hl₁) → Chain R a lt
hl₁ : ∀ (a : α), a ∈ lh :: lt → p a
a : α
ha : p a
hl₂ : Chain S (f a ha) (pmap f (lh :: lt) hl₁)
⊢ Chain R a (lh :: lt)
[PROOFSTEP]
simp [H _ _ _ _ (rel_of_chain_cons hl₂), l_ih _ _ (chain_of_chain_cons hl₂)]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b✝ : α
inst✝ : IsTrans α R
a b : α
l : List α
h : R a b
hb : Chain R b l
⊢ ∀ (a' : α), a' ∈ b :: l → R a a'
[PROOFSTEP]
simp only [mem_cons, forall_eq_or_imp, h, true_and_iff]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b✝ : α
inst✝ : IsTrans α R
a b : α
l : List α
h : R a b
hb : Chain R b l
⊢ ∀ (a_1 : α), a_1 ∈ l → R a a_1
[PROOFSTEP]
exact fun c hc => _root_.trans h (rel_of_pairwise_cons hb.pairwise hc)
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
inst✝ : IsTrans α R
hl : Chain R a l₂
h : l₁ <+ l₂
⊢ Chain R a l₁
[PROOFSTEP]
rw [chain_iff_pairwise] at hl ⊢
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
inst✝ : IsTrans α R
hl : Pairwise R (a :: l₂)
h : l₁ <+ l₂
⊢ Pairwise R (a :: l₁)
[PROOFSTEP]
exact hl.sublist (h.cons_cons a)
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
inst✝ : IsTrans α R
hl : Chain R a l
hb : b ∈ l
⊢ R a b
[PROOFSTEP]
rw [chain_iff_pairwise] at hl
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
inst✝ : IsTrans α R
hl : Pairwise R (a :: l)
hb : b ∈ l
⊢ R a b
[PROOFSTEP]
exact rel_of_pairwise_cons hl hb
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b : α
R : α → α → Prop
a : α
⊢ Chain R a []
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b : α
R : α → α → Prop
a : α
h : 0 < length []
⊢ R a (get [] { val := 0, isLt := h })
[PROOFSTEP]
simp at h
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b : α
R : α → α → Prop
a : α
x✝ : ℕ
h : x✝ < length [] - 1
⊢ R (get [] { val := x✝, isLt := (_ : x✝ < length []) }) (get [] { val := x✝ + 1, isLt := (_ : succ x✝ < length []) })
[PROOFSTEP]
simp at h
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
⊢ Chain R a (b :: t) ↔
(∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
[PROOFSTEP]
rw [chain_cons, @chain_iff_get _ _ t]
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
⊢ (R a b ∧
(∀ (h : 0 < length t), R b (get t { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length t - 1),
R (get t { val := i, isLt := (_ : i < length t) })
(get t { val := i + 1, isLt := (_ : succ i < length t) })) ↔
(∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
[PROOFSTEP]
constructor
[GOAL]
case mp
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
⊢ (R a b ∧
(∀ (h : 0 < length t), R b (get t { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length t - 1),
R (get t { val := i, isLt := (_ : i < length t) })
(get t { val := i + 1, isLt := (_ : succ i < length t) })) →
(∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
[PROOFSTEP]
rintro ⟨R, ⟨h0, h⟩⟩
[GOAL]
case mp.intro.intro
α : Type u
β : Type v
R✝¹ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R✝ : α → α → Prop
a b : α
t : List α
R : R✝ a b
h0 : ∀ (h : 0 < length t), R✝ b (get t { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length t - 1),
R✝ (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
⊢ (∀ (h : 0 < length (b :: t)), R✝ a (get (b :: t) { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R✝ (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
[PROOFSTEP]
constructor
[GOAL]
case mp.intro.intro.left
α : Type u
β : Type v
R✝¹ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R✝ : α → α → Prop
a b : α
t : List α
R : R✝ a b
h0 : ∀ (h : 0 < length t), R✝ b (get t { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length t - 1),
R✝ (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
⊢ ∀ (h : 0 < length (b :: t)), R✝ a (get (b :: t) { val := 0, isLt := h })
[PROOFSTEP]
intro _
[GOAL]
case mp.intro.intro.left
α : Type u
β : Type v
R✝¹ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R✝ : α → α → Prop
a b : α
t : List α
R : R✝ a b
h0 : ∀ (h : 0 < length t), R✝ b (get t { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length t - 1),
R✝ (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
h✝ : 0 < length (b :: t)
⊢ R✝ a (get (b :: t) { val := 0, isLt := h✝ })
[PROOFSTEP]
exact R
[GOAL]
case mp.intro.intro.right
α : Type u
β : Type v
R✝¹ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R✝ : α → α → Prop
a b : α
t : List α
R : R✝ a b
h0 : ∀ (h : 0 < length t), R✝ b (get t { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length t - 1),
R✝ (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
⊢ ∀ (i : ℕ) (h : i < length (b :: t) - 1),
R✝ (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
[PROOFSTEP]
intro i w
[GOAL]
case mp.intro.intro.right
α : Type u
β : Type v
R✝¹ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R✝ : α → α → Prop
a b : α
t : List α
R : R✝ a b
h0 : ∀ (h : 0 < length t), R✝ b (get t { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length t - 1),
R✝ (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
i : ℕ
w : i < length (b :: t) - 1
⊢ R✝ (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
[PROOFSTEP]
cases' i with i
[GOAL]
case mp.intro.intro.right.zero
α : Type u
β : Type v
R✝¹ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R✝ : α → α → Prop
a b : α
t : List α
R : R✝ a b
h0 : ∀ (h : 0 < length t), R✝ b (get t { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length t - 1),
R✝ (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
w : zero < length (b :: t) - 1
⊢ R✝ (get (b :: t) { val := zero, isLt := (_ : zero < length (b :: t)) })
(get (b :: t) { val := zero + 1, isLt := (_ : succ zero < length (b :: t)) })
[PROOFSTEP]
apply h0
[GOAL]
case mp.intro.intro.right.succ
α : Type u
β : Type v
R✝¹ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R✝ : α → α → Prop
a b : α
t : List α
R : R✝ a b
h0 : ∀ (h : 0 < length t), R✝ b (get t { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length t - 1),
R✝ (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
i : ℕ
w : succ i < length (b :: t) - 1
⊢ R✝ (get (b :: t) { val := succ i, isLt := (_ : succ i < length (b :: t)) })
(get (b :: t) { val := succ i + 1, isLt := (_ : succ (succ i) < length (b :: t)) })
[PROOFSTEP]
exact h i (lt_pred_iff.2 <| by simpa using w)
[GOAL]
α : Type u
β : Type v
R✝¹ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R✝ : α → α → Prop
a b : α
t : List α
R : R✝ a b
h0 : ∀ (h : 0 < length t), R✝ b (get t { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length t - 1),
R✝ (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
i : ℕ
w : succ i < length (b :: t) - 1
⊢ succ i < Nat.sub (length t) 0
[PROOFSTEP]
simpa using w
[GOAL]
case mpr
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
⊢ ((∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })) →
R a b ∧
(∀ (h : 0 < length t), R b (get t { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length t - 1),
R (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
[PROOFSTEP]
rintro ⟨h0, h⟩
[GOAL]
case mpr.intro
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
h0 : ∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
⊢ R a b ∧
(∀ (h : 0 < length t), R b (get t { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length t - 1),
R (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
[PROOFSTEP]
constructor
[GOAL]
case mpr.intro.left
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
h0 : ∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
⊢ R a b
[PROOFSTEP]
apply h0
[GOAL]
case mpr.intro.left
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
h0 : ∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
⊢ 0 < length (b :: t)
[PROOFSTEP]
simp
[GOAL]
case mpr.intro.right
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
h0 : ∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
⊢ (∀ (h : 0 < length t), R b (get t { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length t - 1),
R (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
[PROOFSTEP]
constructor
[GOAL]
case mpr.intro.right.left
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
h0 : ∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
⊢ ∀ (h : 0 < length t), R b (get t { val := 0, isLt := h })
[PROOFSTEP]
apply h 0
[GOAL]
case mpr.intro.right.right
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
h0 : ∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
⊢ ∀ (i : ℕ) (h : i < length t - 1),
R (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
[PROOFSTEP]
intro i w
[GOAL]
case mpr.intro.right.right
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
h0 : ∀ (h : 0 < length (b :: t)), R a (get (b :: t) { val := 0, isLt := h })
h :
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })
i : ℕ
w : i < length t - 1
⊢ R (get t { val := i, isLt := (_ : i < length t) }) (get t { val := i + 1, isLt := (_ : succ i < length t) })
[PROOFSTEP]
exact h (i + 1) (lt_pred_iff.mp w)
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
R : α → α → Prop
a : α
l : List α
⊢ Chain R a l ↔
(∀ (h : 0 < length l), R a (nthLe l 0 h)) ∧
∀ (i : ℕ) (h : i < length l - 1), R (nthLe l i (_ : i < length l)) (nthLe l (i + 1) (_ : succ i < length l))
[PROOFSTEP]
rw [chain_iff_get]
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
R : α → α → Prop
a : α
l : List α
⊢ ((∀ (h : 0 < length l), R a (get l { val := 0, isLt := h })) ∧
∀ (i : ℕ) (h : i < length l - 1),
R (get l { val := i, isLt := (_ : i < length l) }) (get l { val := i + 1, isLt := (_ : succ i < length l) })) ↔
(∀ (h : 0 < length l), R a (nthLe l 0 h)) ∧
∀ (i : ℕ) (h : i < length l - 1), R (nthLe l i (_ : i < length l)) (nthLe l (i + 1) (_ : succ i < length l))
[PROOFSTEP]
simp [nthLe]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
S : α → α → Prop
H : ∀ (a b : α), R a b → S a b
l : List α
p : Chain' R l
⊢ Chain' S l
[PROOFSTEP]
cases l <;> [trivial; exact Chain.imp H p]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
S : α → α → Prop
H : ∀ (a b : α), R a b → S a b
l : List α
p : Chain' R l
⊢ Chain' S l
[PROOFSTEP]
cases l
[GOAL]
case nil
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
S : α → α → Prop
H : ∀ (a b : α), R a b → S a b
p : Chain' R []
⊢ Chain' S []
[PROOFSTEP]
trivial
[GOAL]
case cons
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
S : α → α → Prop
H : ∀ (a b : α), R a b → S a b
head✝ : α
tail✝ : List α
p : Chain' R (head✝ :: tail✝)
⊢ Chain' S (head✝ :: tail✝)
[PROOFSTEP]
exact Chain.imp H p
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b✝ a b : α
l : List α
⊢ [] ++ [a, b] ++ l = a :: b :: l
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b✝ a b : α
l : List α
x y : α
h : [x, y] <:+: b :: l
⊢ [a] ++ b :: l ++ [] = a :: b :: l
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a b✝ b c : α
l₁ l₂ : List α
⊢ Chain' R (l₁ ++ b :: c :: l₂) ↔ Chain' R (l₁ ++ [b]) ∧ R b c ∧ Chain' R (c :: l₂)
[PROOFSTEP]
rw [chain'_split, chain'_cons]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
f : β → α
l : List β
⊢ Chain' R (map f l) ↔ Chain' (fun a b => R (f a) (f b)) l
[PROOFSTEP]
cases l <;> [rfl; exact chain_map _]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
f : β → α
l : List β
⊢ Chain' R (map f l) ↔ Chain' (fun a b => R (f a) (f b)) l
[PROOFSTEP]
cases l
[GOAL]
case nil
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
f : β → α
⊢ Chain' R (map f []) ↔ Chain' (fun a b => R (f a) (f b)) []
[PROOFSTEP]
rfl
[GOAL]
case cons
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
f : β → α
head✝ : β
tail✝ : List β
⊢ Chain' R (map f (head✝ :: tail✝)) ↔ Chain' (fun a b => R (f a) (f b)) (head✝ :: tail✝)
[PROOFSTEP]
exact chain_map _
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
inst✝ : IsTrans α R
hl : Chain' R l₂
h : l₁ <+ l₂
⊢ Chain' R l₁
[PROOFSTEP]
rw [chain'_iff_pairwise] at hl ⊢
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
inst✝ : IsTrans α R
hl : Pairwise R l₂
h : l₁ <+ l₂
⊢ Pairwise R l₁
[PROOFSTEP]
exact hl.sublist h
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b x : α
l : List α
h : Chain' R (x :: l)
y : α
hy : y ∈ head? l
⊢ R x y
[PROOFSTEP]
rw [← cons_head?_tail hy] at h
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b x : α
l : List α
y : α
h : Chain' R (x :: y :: List.tail l)
hy : y ∈ head? l
⊢ R x y
[PROOFSTEP]
exact h.rel_head
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
l : List α
⊢ Chain' R ([] ++ l) ↔ Chain' R [] ∧ Chain' R l ∧ ∀ (x : α), x ∈ getLast? [] → ∀ (y : α), y ∈ head? l → R x y
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b a : α
l : List α
⊢ Chain' R ([a] ++ l) ↔ Chain' R [a] ∧ Chain' R l ∧ ∀ (x : α), x ∈ getLast? [a] → ∀ (y : α), y ∈ head? l → R x y
[PROOFSTEP]
simp [chain'_cons', and_comm]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a✝ b✝ a b : α
l₁ l₂ : List α
⊢ Chain' R (a :: b :: l₁ ++ l₂) ↔
Chain' R (a :: b :: l₁) ∧ Chain' R l₂ ∧ ∀ (x : α), x ∈ getLast? (a :: b :: l₁) → ∀ (y : α), y ∈ head? l₂ → R x y
[PROOFSTEP]
rw [cons_append, cons_append, chain'_cons, chain'_cons, ← cons_append, chain'_append, and_assoc]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a✝ b✝ a b : α
l₁ l₂ : List α
⊢ (R a b ∧ Chain' R (b :: l₁) ∧ Chain' R l₂ ∧ ∀ (x : α), x ∈ getLast? (b :: l₁) → ∀ (y : α), y ∈ head? l₂ → R x y) ↔
R a b ∧ Chain' R (b :: l₁) ∧ Chain' R l₂ ∧ ∀ (x : α), x ∈ getLast? (a :: b :: l₁) → ∀ (y : α), y ∈ head? l₂ → R x y
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
h : Chain' R l
h' : l₁ <:+: l
⊢ Chain' R l₁
[PROOFSTEP]
rcases h' with ⟨l₂, l₃, rfl⟩
[GOAL]
case intro.intro
α : Type u
β : Type v
R r : α → α → Prop
l₁ l₂✝ : List α
a b : α
l₂ l₃ : List α
h : Chain' R (l₂ ++ l₁ ++ l₃)
⊢ Chain' R l₁
[PROOFSTEP]
exact h.left_of_append.right_of_append
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b x y : α
⊢ Chain' R [x, y] ↔ R x y
[PROOFSTEP]
simp only [chain'_singleton, chain'_cons, and_true_iff]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a✝ b a : α
⊢ Chain' R (reverse [a]) ↔ Chain' (flip R) [a]
[PROOFSTEP]
simp only [chain'_singleton, reverse_singleton]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b✝ a b : α
l : List α
⊢ Chain' R (reverse (a :: b :: l)) ↔ Chain' (flip R) (a :: b :: l)
[PROOFSTEP]
rw [chain'_cons, reverse_cons, reverse_cons, append_assoc, cons_append, nil_append, chain'_split, ← reverse_cons,
@chain'_reverse (b :: l), and_comm, chain'_pair, flip]
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a b : α
R : α → α → Prop
⊢ Chain' R []
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a b : α
R : α → α → Prop
x✝ : ℕ
h : x✝ < length [] - 1
⊢ R (get [] { val := x✝, isLt := (_ : x✝ < length []) }) (get [] { val := x✝ + 1, isLt := (_ : succ x✝ < length []) })
[PROOFSTEP]
simp at h
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b : α
R : α → α → Prop
a : α
⊢ Chain' R [a]
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b : α
R : α → α → Prop
a : α
x✝ : ℕ
h : x✝ < length [a] - 1
⊢ R (get [a] { val := x✝, isLt := (_ : x✝ < length [a]) })
(get [a] { val := x✝ + 1, isLt := (_ : succ x✝ < length [a]) })
[PROOFSTEP]
simp at h
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
⊢ Chain' R (a :: b :: t) ↔
∀ (i : ℕ) (h : i < length (a :: b :: t) - 1),
R (get (a :: b :: t) { val := i, isLt := (_ : i < length (a :: b :: t)) })
(get (a :: b :: t) { val := i + 1, isLt := (_ : succ i < length (a :: b :: t)) })
[PROOFSTEP]
rw [← and_forall_succ, chain'_cons, chain'_iff_get]
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
⊢ (R a b ∧
∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get (b :: t) { val := i + 1, isLt := (_ : succ i < length (b :: t)) })) ↔
(∀ (h : 0 < length (a :: b :: t) - 1),
R (get (a :: b :: t) { val := 0, isLt := (_ : 0 < length (a :: b :: t)) })
(get (a :: b :: t) { val := 0 + 1, isLt := (_ : succ 0 < length (a :: b :: t)) })) ∧
∀ (n : ℕ) (h : n + 1 < length (a :: b :: t) - 1),
R (get (a :: b :: t) { val := n + 1, isLt := (_ : n + 1 < length (a :: b :: t)) })
(get (a :: b :: t) { val := n + 1 + 1, isLt := (_ : succ (n + 1) < length (a :: b :: t)) })
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
⊢ R a b →
((∀ (i : ℕ) (h : i < length (b :: t) - 1),
R (get (b :: t) { val := i, isLt := (_ : i < length (b :: t)) })
(get t { val := i, isLt := (_ : i < length t) })) ↔
∀ (n : ℕ) (h : n + 1 < length (a :: b :: t) - 1),
R (get (b :: t) { val := n, isLt := (_ : n < length (b :: t)) })
(get t { val := n, isLt := (_ : n < length t) }))
[PROOFSTEP]
dsimp [succ_sub_one]
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l l₁ l₂ : List α
a✝ b✝ : α
R : α → α → Prop
a b : α
t : List α
⊢ R a b →
((∀ (i : ℕ) (h : i < length t),
R (get (b :: t) { val := i, isLt := (_ : i < succ (length t)) })
(get t { val := i, isLt := (_ : succ i ≤ length t) })) ↔
∀ (n : ℕ) (h : n + 1 < length t + 1),
R (get (b :: t) { val := n, isLt := (_ : succ n ≤ length (b :: t)) })
(get t { val := n, isLt := (_ : succ n ≤ length t) }))
[PROOFSTEP]
exact fun _ => ⟨fun h i hi => h i (Nat.lt_of_succ_lt_succ hi), fun h i hi => h i (Nat.succ_lt_succ hi)⟩
[GOAL]
α : Type u
β : Type v
R✝ r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
R : α → α → Prop
l : List α
⊢ (∀ (i : ℕ) (h : i < length l - 1),
R (get l { val := i, isLt := (_ : i < length l) }) (get l { val := i + 1, isLt := (_ : succ i < length l) })) ↔
∀ (i : ℕ) (h : i < length l - 1), R (nthLe l i (_ : i < length l)) (nthLe l (i + 1) (_ : succ i < length l))
[PROOFSTEP]
simp [nthLe]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a b : α
l₁ l₂ l₃ : List α
h₁ : Chain' R (l₁ ++ l₂)
h₂ : Chain' R (l₂ ++ l₃)
hn : l₂ ≠ []
⊢ ∀ (x : α), x ∈ getLast? (l₁ ++ l₂) → ∀ (y : α), y ∈ head? l₃ → R x y
[PROOFSTEP]
simpa only [getLast?_append_of_ne_nil _ hn] using (chain'_append.1 h₂).2.2
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
x✝ : ¬[] ∈ []
⊢ Chain' R (join []) ↔
(∀ (l : List α), l ∈ [] → Chain' R l) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) []
[PROOFSTEP]
simp
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
l : List α
x✝ : ¬[] ∈ [l]
⊢ Chain' R (join [l]) ↔
(∀ (l_1 : List α), l_1 ∈ [l] → Chain' R l_1) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) [l]
[PROOFSTEP]
simp [join]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a b : α
l₁ l₂ : List α
L : List (List α)
hL : ¬[] ∈ l₁ :: l₂ :: L
⊢ Chain' R (join (l₁ :: l₂ :: L)) ↔
(∀ (l : List α), l ∈ l₁ :: l₂ :: L → Chain' R l) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) (l₁ :: l₂ :: L)
[PROOFSTEP]
rw [mem_cons, not_or, ← Ne.def] at hL
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a b : α
l₁ l₂ : List α
L : List (List α)
hL : [] ≠ l₁ ∧ ¬[] ∈ l₂ :: L
⊢ Chain' R (join (l₁ :: l₂ :: L)) ↔
(∀ (l : List α), l ∈ l₁ :: l₂ :: L → Chain' R l) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) (l₁ :: l₂ :: L)
[PROOFSTEP]
rw [join, chain'_append, chain'_join hL.2, forall_mem_cons, chain'_cons]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a b : α
l₁ l₂ : List α
L : List (List α)
hL : [] ≠ l₁ ∧ ¬[] ∈ l₂ :: L
⊢ (Chain' R l₁ ∧
((Chain' R l₂ ∧ ∀ (x : List α), x ∈ L → Chain' R x) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) (l₂ :: L)) ∧
∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? (join (l₂ :: L)) → R x y) ↔
(∀ (l : List α), l ∈ l₁ :: l₂ :: L → Chain' R l) ∧
(∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) (l₂ :: L)
[PROOFSTEP]
rw [mem_cons, not_or, ← Ne.def] at hL
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a b : α
l₁ l₂ : List α
L : List (List α)
hL : [] ≠ l₁ ∧ [] ≠ l₂ ∧ ¬[] ∈ L
⊢ (Chain' R l₁ ∧
((Chain' R l₂ ∧ ∀ (x : List α), x ∈ L → Chain' R x) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) (l₂ :: L)) ∧
∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? (join (l₂ :: L)) → R x y) ↔
(∀ (l : List α), l ∈ l₁ :: l₂ :: L → Chain' R l) ∧
(∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) (l₂ :: L)
[PROOFSTEP]
simp only [forall_mem_cons, and_assoc, join, head?_append_of_ne_nil _ hL.2.1.symm]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁✝ l₂✝ : List α
a b : α
l₁ l₂ : List α
L : List (List α)
hL : [] ≠ l₁ ∧ [] ≠ l₂ ∧ ¬[] ∈ L
⊢ (Chain' R l₁ ∧
Chain' R l₂ ∧
(∀ (x : List α), x ∈ L → Chain' R x) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) (l₂ :: L) ∧
∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) ↔
Chain' R l₁ ∧
Chain' R l₂ ∧
(∀ (x : List α), x ∈ L → Chain' R x) ∧
(∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) ∧
Chain' (fun l₁ l₂ => ∀ (x : α), x ∈ getLast? l₁ → ∀ (y : α), y ∈ head? l₂ → R x y) (l₂ :: L)
[PROOFSTEP]
exact Iff.rfl.and (Iff.rfl.and $ Iff.rfl.and and_comm)
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
h : Relation.ReflTransGen r a b
⊢ ∃ l, Chain r a l ∧ getLast (a :: l) (_ : a :: l ≠ []) = b
[PROOFSTEP]
refine' Relation.ReflTransGen.head_induction_on h _ _
[GOAL]
case refine'_1
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
h : Relation.ReflTransGen r a b
⊢ ∃ l, Chain r b l ∧ getLast (b :: l) (_ : b :: l ≠ []) = b
[PROOFSTEP]
exact ⟨[], Chain.nil, rfl⟩
[GOAL]
case refine'_2
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
h : Relation.ReflTransGen r a b
⊢ ∀ {a c : α},
r a c →
Relation.ReflTransGen r c b →
(∃ l, Chain r c l ∧ getLast (c :: l) (_ : c :: l ≠ []) = b) →
∃ l, Chain r a l ∧ getLast (a :: l) (_ : a :: l ≠ []) = b
[PROOFSTEP]
intro c d e _ ih
[GOAL]
case refine'_2
α : Type u
β : Type v
R r : α → α → Prop
l l₁ l₂ : List α
a b : α
h : Relation.ReflTransGen r a b
c d : α
e : r c d
h✝ : Relation.ReflTransGen r d b
ih : ∃ l, Chain r d l ∧ getLast (d :: l) (_ : d :: l ≠ []) = b
⊢ ∃ l, Chain r c l ∧ getLast (c :: l) (_ : c :: l ≠ []) = b
[PROOFSTEP]
obtain ⟨l, hl₁, hl₂⟩ := ih
[GOAL]
case refine'_2.intro.intro
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
h : Relation.ReflTransGen r a b
c d : α
e : r c d
h✝ : Relation.ReflTransGen r d b
l : List α
hl₁ : Chain r d l
hl₂ : getLast (d :: l) (_ : d :: l ≠ []) = b
⊢ ∃ l, Chain r c l ∧ getLast (c :: l) (_ : c :: l ≠ []) = b
[PROOFSTEP]
refine' ⟨d :: l, Chain.cons e hl₁, _⟩
[GOAL]
case refine'_2.intro.intro
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
h : Relation.ReflTransGen r a b
c d : α
e : r c d
h✝ : Relation.ReflTransGen r d b
l : List α
hl₁ : Chain r d l
hl₂ : getLast (d :: l) (_ : d :: l ≠ []) = b
⊢ getLast (c :: d :: l) (_ : c :: d :: l ≠ []) = b
[PROOFSTEP]
rwa [getLast_cons_cons]
[GOAL]
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
p : α → Prop
l : List α
h : Chain r a l
hb : getLast (a :: l) (_ : a :: l ≠ []) = b
carries : ∀ ⦃x y : α⦄, r x y → p y → p x
final : p b
⊢ ∀ (i : α), i ∈ a :: l → p i
[PROOFSTEP]
induction' l with _ _ l_ih generalizing a
[GOAL]
case nil
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
p : α → Prop
l : List α
h✝ : Chain r a✝ l
hb✝ : getLast (a✝ :: l) (_ : a✝ :: l ≠ []) = b
carries : ∀ ⦃x y : α⦄, r x y → p y → p x
final : p b
a : α
h : Chain r a []
hb : getLast [a] (_ : [a] ≠ []) = b
⊢ ∀ (i : α), i ∈ [a] → p i
[PROOFSTEP]
cases hb
[GOAL]
case nil.refl
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ : α
p : α → Prop
l : List α
h✝ : Chain r a✝ l
carries : ∀ ⦃x y : α⦄, r x y → p y → p x
a : α
h : Chain r a []
hb : getLast (a✝ :: l) (_ : a✝ :: l ≠ []) = getLast [a] (_ : [a] ≠ [])
final : p (getLast [a] (_ : [a] ≠ []))
⊢ ∀ (i : α), i ∈ [a] → p i
[PROOFSTEP]
simpa using final
[GOAL]
case cons
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
p : α → Prop
l : List α
h✝ : Chain r a✝ l
hb✝ : getLast (a✝ :: l) (_ : a✝ :: l ≠ []) = b
carries : ∀ ⦃x y : α⦄, r x y → p y → p x
final : p b
head✝ : α
tail✝ : List α
l_ih : ∀ {a : α}, Chain r a tail✝ → getLast (a :: tail✝) (_ : a :: tail✝ ≠ []) = b → ∀ (i : α), i ∈ a :: tail✝ → p i
a : α
h : Chain r a (head✝ :: tail✝)
hb : getLast (a :: head✝ :: tail✝) (_ : a :: head✝ :: tail✝ ≠ []) = b
⊢ ∀ (i : α), i ∈ a :: head✝ :: tail✝ → p i
[PROOFSTEP]
rw [chain_cons] at h
[GOAL]
case cons
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
p : α → Prop
l : List α
h✝ : Chain r a✝ l
hb✝ : getLast (a✝ :: l) (_ : a✝ :: l ≠ []) = b
carries : ∀ ⦃x y : α⦄, r x y → p y → p x
final : p b
head✝ : α
tail✝ : List α
l_ih : ∀ {a : α}, Chain r a tail✝ → getLast (a :: tail✝) (_ : a :: tail✝ ≠ []) = b → ∀ (i : α), i ∈ a :: tail✝ → p i
a : α
h : r a head✝ ∧ Chain r head✝ tail✝
hb : getLast (a :: head✝ :: tail✝) (_ : a :: head✝ :: tail✝ ≠ []) = b
⊢ ∀ (i : α), i ∈ a :: head✝ :: tail✝ → p i
[PROOFSTEP]
simp only [mem_cons]
[GOAL]
case cons
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
p : α → Prop
l : List α
h✝ : Chain r a✝ l
hb✝ : getLast (a✝ :: l) (_ : a✝ :: l ≠ []) = b
carries : ∀ ⦃x y : α⦄, r x y → p y → p x
final : p b
head✝ : α
tail✝ : List α
l_ih : ∀ {a : α}, Chain r a tail✝ → getLast (a :: tail✝) (_ : a :: tail✝ ≠ []) = b → ∀ (i : α), i ∈ a :: tail✝ → p i
a : α
h : r a head✝ ∧ Chain r head✝ tail✝
hb : getLast (a :: head✝ :: tail✝) (_ : a :: head✝ :: tail✝ ≠ []) = b
⊢ ∀ (i : α), i = a ∨ i = head✝ ∨ i ∈ tail✝ → p i
[PROOFSTEP]
rintro _ (rfl | H)
[GOAL]
case cons.inl
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a b : α
p : α → Prop
l : List α
h✝ : Chain r a l
hb✝ : getLast (a :: l) (_ : a :: l ≠ []) = b
carries : ∀ ⦃x y : α⦄, r x y → p y → p x
final : p b
head✝ : α
tail✝ : List α
l_ih : ∀ {a : α}, Chain r a tail✝ → getLast (a :: tail✝) (_ : a :: tail✝ ≠ []) = b → ∀ (i : α), i ∈ a :: tail✝ → p i
i✝ : α
h : r i✝ head✝ ∧ Chain r head✝ tail✝
hb : getLast (i✝ :: head✝ :: tail✝) (_ : i✝ :: head✝ :: tail✝ ≠ []) = b
⊢ p i✝
case cons.inr
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
p : α → Prop
l : List α
h✝ : Chain r a✝ l
hb✝ : getLast (a✝ :: l) (_ : a✝ :: l ≠ []) = b
carries : ∀ ⦃x y : α⦄, r x y → p y → p x
final : p b
head✝ : α
tail✝ : List α
l_ih : ∀ {a : α}, Chain r a tail✝ → getLast (a :: tail✝) (_ : a :: tail✝ ≠ []) = b → ∀ (i : α), i ∈ a :: tail✝ → p i
a : α
h : r a head✝ ∧ Chain r head✝ tail✝
hb : getLast (a :: head✝ :: tail✝) (_ : a :: head✝ :: tail✝ ≠ []) = b
i✝ : α
H : i✝ = head✝ ∨ i✝ ∈ tail✝
⊢ p i✝
[PROOFSTEP]
apply carries h.1 (l_ih h.2 hb _ (mem_cons.2 (Or.inl rfl)))
[GOAL]
case cons.inr
α : Type u
β : Type v
R r : α → α → Prop
l✝ l₁ l₂ : List α
a✝ b : α
p : α → Prop
l : List α
h✝ : Chain r a✝ l
hb✝ : getLast (a✝ :: l) (_ : a✝ :: l ≠ []) = b
carries : ∀ ⦃x y : α⦄, r x y → p y → p x
final : p b
head✝ : α
tail✝ : List α
l_ih : ∀ {a : α}, Chain r a tail✝ → getLast (a :: tail✝) (_ : a :: tail✝ ≠ []) = b → ∀ (i : α), i ∈ a :: tail✝ → p i
a : α
h : r a head✝ ∧ Chain r head✝ tail✝
hb : getLast (a :: head✝ :: tail✝) (_ : a :: head✝ :: tail✝ ≠ []) = b
i✝ : α
H : i✝ = head✝ ∨ i✝ ∈ tail✝
⊢ p i✝
[PROOFSTEP]
apply l_ih h.2 hb _ (mem_cons.2 H)
[GOAL]
α : Type u_1
r : α → α → Prop
l : List.chains r
acc : ∀ (a : α), a ∈ List.head? ↑l → Acc r a
⊢ Acc (List.lex_chains r) l
[PROOFSTEP]
obtain ⟨_ | ⟨a, l⟩, hl⟩ := l
[GOAL]
case mk.nil
α : Type u_1
r : α → α → Prop
hl : List.Chain' (flip r) []
acc : ∀ (a : α), a ∈ List.head? ↑{ val := [], property := hl } → Acc r a
⊢ Acc (List.lex_chains r) { val := [], property := hl }
[PROOFSTEP]
apply Acc.intro
[GOAL]
case mk.nil.h
α : Type u_1
r : α → α → Prop
hl : List.Chain' (flip r) []
acc : ∀ (a : α), a ∈ List.head? ↑{ val := [], property := hl } → Acc r a
⊢ ∀ (y : List.chains r), List.lex_chains r y { val := [], property := hl } → Acc (List.lex_chains r) y
[PROOFSTEP]
rintro ⟨_⟩ ⟨_⟩
[GOAL]
case mk.cons
α : Type u_1
r : α → α → Prop
a : α
l : List α
hl : List.Chain' (flip r) (a :: l)
acc : ∀ (a_1 : α), a_1 ∈ List.head? ↑{ val := a :: l, property := hl } → Acc r a_1
⊢ Acc (List.lex_chains r) { val := a :: l, property := hl }
[PROOFSTEP]
specialize acc a _
[GOAL]
case mk.cons
α : Type u_1
r : α → α → Prop
a : α
l : List α
hl : List.Chain' (flip r) (a :: l)
acc : ∀ (a_1 : α), a_1 ∈ List.head? ↑{ val := a :: l, property := hl } → Acc r a_1
⊢ a ∈ List.head? ↑{ val := a :: l, property := hl }
[PROOFSTEP]
rw [List.head?_cons, Option.mem_some_iff]
/- For an r-decreasing chain of the form a :: l, apply induction on a -/
[GOAL]
case mk.cons
α : Type u_1
r : α → α → Prop
a : α
l : List α
hl : List.Chain' (flip r) (a :: l)
acc : Acc r a
⊢ Acc (List.lex_chains r) { val := a :: l, property := hl }
[PROOFSTEP]
induction acc generalizing l with
| intro a _ ih =>
/- Bundle l with a proof that it is r-decreasing to form l' -/
have hl' := (List.chain'_cons'.1 hl).2
let l' : List.chains r := ⟨l, hl'⟩
have : Acc (List.lex_chains r) l'
· cases' l with b l
· apply Acc.intro;
rintro ⟨_⟩
⟨_⟩
/- l' is accessible by induction hypothesis -/
·
apply
ih b
(List.chain'_cons.1 hl).1
/- make l' a free variable and induct on l' -/
revert hl
rw [(by rfl : l = l'.1)]
clear_value l'
induction this with
| intro l _ ihl =>
intro hl
apply Acc.intro
rintro ⟨_ | ⟨b, m⟩, hm⟩ (_ | hr | hr)
· apply Acc.intro; rintro ⟨_⟩ ⟨_⟩
· apply ihl ⟨m, (List.chain'_cons'.1 hm).2⟩ hr
· apply ih b hr
[GOAL]
case mk.cons
α : Type u_1
r : α → α → Prop
a : α
l : List α
hl : List.Chain' (flip r) (a :: l)
acc : Acc r a
⊢ Acc (List.lex_chains r) { val := a :: l, property := hl }
[PROOFSTEP]
induction acc generalizing l with
| intro a _ ih =>
/- Bundle l with a proof that it is r-decreasing to form l' -/
have hl' := (List.chain'_cons'.1 hl).2
let l' : List.chains r := ⟨l, hl'⟩
have : Acc (List.lex_chains r) l'
· cases' l with b l
· apply Acc.intro;
rintro ⟨_⟩
⟨_⟩
/- l' is accessible by induction hypothesis -/
·
apply
ih b
(List.chain'_cons.1 hl).1
/- make l' a free variable and induct on l' -/
revert hl
rw [(by rfl : l = l'.1)]
clear_value l'
induction this with
| intro l _ ihl =>
intro hl
apply Acc.intro
rintro ⟨_ | ⟨b, m⟩, hm⟩ (_ | hr | hr)
· apply Acc.intro; rintro ⟨_⟩ ⟨_⟩
· apply ihl ⟨m, (List.chain'_cons'.1 hm).2⟩ hr
· apply ih b hr
[GOAL]
case mk.cons.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl : List.Chain' (flip r) (a :: l)
⊢ Acc (List.lex_chains r) { val := a :: l, property := hl }
[PROOFSTEP]
| intro a _ ih =>
/- Bundle l with a proof that it is r-decreasing to form l' -/
have hl' := (List.chain'_cons'.1 hl).2
let l' : List.chains r := ⟨l, hl'⟩
have : Acc (List.lex_chains r) l'
· cases' l with b l
· apply Acc.intro;
rintro ⟨_⟩
⟨_⟩
/- l' is accessible by induction hypothesis -/
·
apply
ih b
(List.chain'_cons.1 hl).1
/- make l' a free variable and induct on l' -/
revert hl
rw [(by rfl : l = l'.1)]
clear_value l'
induction this with
| intro l _ ihl =>
intro hl
apply Acc.intro
rintro ⟨_ | ⟨b, m⟩, hm⟩ (_ | hr | hr)
· apply Acc.intro; rintro ⟨_⟩ ⟨_⟩
· apply ihl ⟨m, (List.chain'_cons'.1 hm).2⟩ hr
· apply ih b hr
[GOAL]
case mk.cons.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl : List.Chain' (flip r) (a :: l)
⊢ Acc (List.lex_chains r) { val := a :: l, property := hl }
[PROOFSTEP]
have hl' := (List.chain'_cons'.1 hl).2
[GOAL]
case mk.cons.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl : List.Chain' (flip r) (a :: l)
hl' : List.Chain' (flip r) l
⊢ Acc (List.lex_chains r) { val := a :: l, property := hl }
[PROOFSTEP]
let l' : List.chains r := ⟨l, hl'⟩
[GOAL]
case mk.cons.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl : List.Chain' (flip r) (a :: l)
hl' : List.Chain' (flip r) l
l' : List.chains r := { val := l, property := hl' }
⊢ Acc (List.lex_chains r) { val := a :: l, property := hl }
[PROOFSTEP]
have : Acc (List.lex_chains r) l'
[GOAL]
case this
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl : List.Chain' (flip r) (a :: l)
hl' : List.Chain' (flip r) l
l' : List.chains r := { val := l, property := hl' }
⊢ Acc (List.lex_chains r) l'
[PROOFSTEP]
cases' l with b l
[GOAL]
case this.nil
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
hl : List.Chain' (flip r) [a]
hl' : List.Chain' (flip r) []
l' : List.chains r := { val := [], property := hl' }
⊢ Acc (List.lex_chains r) l'
[PROOFSTEP]
apply Acc.intro
[GOAL]
case this.nil.h
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
hl : List.Chain' (flip r) [a]
hl' : List.Chain' (flip r) []
l' : List.chains r := { val := [], property := hl' }
⊢ ∀ (y : List.chains r), List.lex_chains r y l' → Acc (List.lex_chains r) y
[PROOFSTEP]
rintro ⟨_⟩
⟨_⟩
/- l' is accessible by induction hypothesis -/
[GOAL]
case this.cons
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
b : α
l : List α
hl : List.Chain' (flip r) (a :: b :: l)
hl' : List.Chain' (flip r) (b :: l)
l' : List.chains r := { val := b :: l, property := hl' }
⊢ Acc (List.lex_chains r) l'
[PROOFSTEP]
apply
ih b
(List.chain'_cons.1 hl).1
/- make l' a free variable and induct on l' -/
[GOAL]
case mk.cons.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl : List.Chain' (flip r) (a :: l)
hl' : List.Chain' (flip r) l
l' : List.chains r := { val := l, property := hl' }
this : Acc (List.lex_chains r) l'
⊢ Acc (List.lex_chains r) { val := a :: l, property := hl }
[PROOFSTEP]
revert hl
[GOAL]
case mk.cons.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl' : List.Chain' (flip r) l
l' : List.chains r := { val := l, property := hl' }
this : Acc (List.lex_chains r) l'
⊢ ∀ (hl : List.Chain' (flip r) (a :: l)), Acc (List.lex_chains r) { val := a :: l, property := hl }
[PROOFSTEP]
rw [(by rfl : l = l'.1)]
[GOAL]
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl' : List.Chain' (flip r) l
l' : List.chains r := { val := l, property := hl' }
this : Acc (List.lex_chains r) l'
⊢ l = ↑l'
[PROOFSTEP]
rfl
[GOAL]
case mk.cons.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl' : List.Chain' (flip r) l
l' : List.chains r := { val := l, property := hl' }
this : Acc (List.lex_chains r) l'
⊢ ∀ (hl : List.Chain' (flip r) (a :: ↑l')), Acc (List.lex_chains r) { val := a :: ↑l', property := hl }
[PROOFSTEP]
clear_value l'
[GOAL]
case mk.cons.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl' : List.Chain' (flip r) l
l' : List.chains r
this : Acc (List.lex_chains r) l'
⊢ ∀ (hl : List.Chain' (flip r) (a :: ↑l')), Acc (List.lex_chains r) { val := a :: ↑l', property := hl }
[PROOFSTEP]
induction this with
| intro l _ ihl =>
intro hl
apply Acc.intro
rintro ⟨_ | ⟨b, m⟩, hm⟩ (_ | hr | hr)
· apply Acc.intro; rintro ⟨_⟩ ⟨_⟩
· apply ihl ⟨m, (List.chain'_cons'.1 hm).2⟩ hr
· apply ih b hr
[GOAL]
case mk.cons.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l : List α
hl' : List.Chain' (flip r) l
l' : List.chains r
this : Acc (List.lex_chains r) l'
⊢ ∀ (hl : List.Chain' (flip r) (a :: ↑l')), Acc (List.lex_chains r) { val := a :: ↑l', property := hl }
[PROOFSTEP]
induction this with
| intro l _ ihl =>
intro hl
apply Acc.intro
rintro ⟨_ | ⟨b, m⟩, hm⟩ (_ | hr | hr)
· apply Acc.intro; rintro ⟨_⟩ ⟨_⟩
· apply ihl ⟨m, (List.chain'_cons'.1 hm).2⟩ hr
· apply ih b hr
[GOAL]
case mk.cons.intro.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝¹ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l✝ : List α
hl' : List.Chain' (flip r) l✝
l' l : List.chains r
h✝ : ∀ (y : List.chains r), List.lex_chains r y l → Acc (List.lex_chains r) y
ihl :
∀ (y : List.chains r),
List.lex_chains r y l →
∀ (hl : List.Chain' (flip r) (a :: ↑y)), Acc (List.lex_chains r) { val := a :: ↑y, property := hl }
⊢ ∀ (hl : List.Chain' (flip r) (a :: ↑l)), Acc (List.lex_chains r) { val := a :: ↑l, property := hl }
[PROOFSTEP]
| intro l _ ihl =>
intro hl
apply Acc.intro
rintro ⟨_ | ⟨b, m⟩, hm⟩ (_ | hr | hr)
· apply Acc.intro; rintro ⟨_⟩ ⟨_⟩
· apply ihl ⟨m, (List.chain'_cons'.1 hm).2⟩ hr
· apply ih b hr
[GOAL]
case mk.cons.intro.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝¹ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l✝ : List α
hl' : List.Chain' (flip r) l✝
l' l : List.chains r
h✝ : ∀ (y : List.chains r), List.lex_chains r y l → Acc (List.lex_chains r) y
ihl :
∀ (y : List.chains r),
List.lex_chains r y l →
∀ (hl : List.Chain' (flip r) (a :: ↑y)), Acc (List.lex_chains r) { val := a :: ↑y, property := hl }
⊢ ∀ (hl : List.Chain' (flip r) (a :: ↑l)), Acc (List.lex_chains r) { val := a :: ↑l, property := hl }
[PROOFSTEP]
intro hl
[GOAL]
case mk.cons.intro.intro
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝¹ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l✝ : List α
hl' : List.Chain' (flip r) l✝
l' l : List.chains r
h✝ : ∀ (y : List.chains r), List.lex_chains r y l → Acc (List.lex_chains r) y
ihl :
∀ (y : List.chains r),
List.lex_chains r y l →
∀ (hl : List.Chain' (flip r) (a :: ↑y)), Acc (List.lex_chains r) { val := a :: ↑y, property := hl }
hl : List.Chain' (flip r) (a :: ↑l)
⊢ Acc (List.lex_chains r) { val := a :: ↑l, property := hl }
[PROOFSTEP]
apply Acc.intro
[GOAL]
case mk.cons.intro.intro.h
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝¹ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l✝ : List α
hl' : List.Chain' (flip r) l✝
l' l : List.chains r
h✝ : ∀ (y : List.chains r), List.lex_chains r y l → Acc (List.lex_chains r) y
ihl :
∀ (y : List.chains r),
List.lex_chains r y l →
∀ (hl : List.Chain' (flip r) (a :: ↑y)), Acc (List.lex_chains r) { val := a :: ↑y, property := hl }
hl : List.Chain' (flip r) (a :: ↑l)
⊢ ∀ (y : List.chains r), List.lex_chains r y { val := a :: ↑l, property := hl } → Acc (List.lex_chains r) y
[PROOFSTEP]
rintro ⟨_ | ⟨b, m⟩, hm⟩ (_ | hr | hr)
[GOAL]
case mk.cons.intro.intro.h.mk.nil.nil
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝¹ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l✝ : List α
hl' : List.Chain' (flip r) l✝
l' l : List.chains r
h✝ : ∀ (y : List.chains r), List.lex_chains r y l → Acc (List.lex_chains r) y
ihl :
∀ (y : List.chains r),
List.lex_chains r y l →
∀ (hl : List.Chain' (flip r) (a :: ↑y)), Acc (List.lex_chains r) { val := a :: ↑y, property := hl }
hl : List.Chain' (flip r) (a :: ↑l)
hm : List.Chain' (flip r) []
⊢ Acc (List.lex_chains r) { val := [], property := hm }
[PROOFSTEP]
apply Acc.intro
[GOAL]
case mk.cons.intro.intro.h.mk.nil.nil.h
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝¹ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l✝ : List α
hl' : List.Chain' (flip r) l✝
l' l : List.chains r
h✝ : ∀ (y : List.chains r), List.lex_chains r y l → Acc (List.lex_chains r) y
ihl :
∀ (y : List.chains r),
List.lex_chains r y l →
∀ (hl : List.Chain' (flip r) (a :: ↑y)), Acc (List.lex_chains r) { val := a :: ↑y, property := hl }
hl : List.Chain' (flip r) (a :: ↑l)
hm : List.Chain' (flip r) []
⊢ ∀ (y : List.chains r), List.lex_chains r y { val := [], property := hm } → Acc (List.lex_chains r) y
[PROOFSTEP]
rintro ⟨_⟩ ⟨_⟩
[GOAL]
case mk.cons.intro.intro.h.mk.cons.cons
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝¹ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l✝ : List α
hl' : List.Chain' (flip r) l✝
l' l : List.chains r
h✝ : ∀ (y : List.chains r), List.lex_chains r y l → Acc (List.lex_chains r) y
ihl :
∀ (y : List.chains r),
List.lex_chains r y l →
∀ (hl : List.Chain' (flip r) (a :: ↑y)), Acc (List.lex_chains r) { val := a :: ↑y, property := hl }
hl : List.Chain' (flip r) (a :: ↑l)
m : List α
hm : List.Chain' (flip r) (a :: m)
hr : List.Lex r m ↑l
⊢ Acc (List.lex_chains r) { val := a :: m, property := hm }
[PROOFSTEP]
apply ihl ⟨m, (List.chain'_cons'.1 hm).2⟩ hr
[GOAL]
case mk.cons.intro.intro.h.mk.cons.rel
α : Type u_1
r : α → α → Prop
a✝ a : α
h✝¹ : ∀ (y : α), r y a → Acc r y
ih :
∀ (y : α),
r y a →
∀ (l : List α) (hl : List.Chain' (flip r) (y :: l)), Acc (List.lex_chains r) { val := y :: l, property := hl }
l✝ : List α
hl' : List.Chain' (flip r) l✝
l' l : List.chains r
h✝ : ∀ (y : List.chains r), List.lex_chains r y l → Acc (List.lex_chains r) y
ihl :
∀ (y : List.chains r),
List.lex_chains r y l →
∀ (hl : List.Chain' (flip r) (a :: ↑y)), Acc (List.lex_chains r) { val := a :: ↑y, property := hl }
hl : List.Chain' (flip r) (a :: ↑l)
b : α
m : List α
hm : List.Chain' (flip r) (b :: m)
hr : r b a
⊢ Acc (List.lex_chains r) { val := b :: m, property := hm }
[PROOFSTEP]
apply ih b hr
|
module Loss where
import Numeric.LinearAlgebra
getLoss :: String -> Matrix R -> Matrix R -> Matrix R
getLoss lossType yMat tMat =
case lossType of
"mse" -> mse yMat tMat
otherwise -> yMat -- Warning: Never Used Necessery!
mse :: Matrix R -> Matrix R -> Matrix R
mse yMat tMat =
let dyMat = yMat - tMat
r = rows yMat
c = cols yMat
halfMat = (r >< c) [0.5 ..]::(Matrix R) in
halfMat * dyMat * dyMat
|
using ReducedBasisMethods
using Test
@testset "ReducedBasisMethods.jl" begin
include("parameter_tests.jl")
include("parameterspace_tests.jl")
include("trainingset_tests.jl")
end
|
program uno
open (10, FILE='Ejer5A.dat', STATUS='UNKNOWN')
write(10,*) i,x1,x2,xm, abs(G)
close(10)
end program
subroutine eulerexp(func,xo,yo,a,b,n,y)
!dado la confincion inicial xo,yo, en el intervalo (a,b) se eligen n puntos y devuelve la funcion en esos puntos.
implicit none
REAL*8,ALLOCATABLE,DIMENSION(:)::y,x
integer*4::n,i
real*8::func,xo,yo,a,b,h
h=(b-a)/n
allocate(x(n+1),y(n))
do i=0,n
x(i)=xo+h*i
end do
y(1)=yo+func(xo,yo)*h
do i=1,n
y(i+1)=y(i)+func(x(i),y(i))*h
end do
return
end subroutine
|
[STATEMENT]
lemma (in domain) div_sum:
assumes "a \<in> carrier R" "b \<in> carrier R" "c \<in> carrier R"
assumes "a divides b"
assumes "a divides c"
shows "a divides (b \<oplus> c)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
proof -
[PROOF STATE]
proof (state)
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
obtain r1 where r1_def: "r1 \<in> carrier R" "a \<otimes> r1 = b"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (\<And>r1. \<lbrakk>r1 \<in> carrier R; a \<otimes> r1 = b\<rbrakk> \<Longrightarrow> thesis) \<Longrightarrow> thesis
[PROOF STEP]
using assms
[PROOF STATE]
proof (prove)
using this:
a \<in> carrier R
b \<in> carrier R
c \<in> carrier R
a divides b
a divides c
goal (1 subgoal):
1. (\<And>r1. \<lbrakk>r1 \<in> carrier R; a \<otimes> r1 = b\<rbrakk> \<Longrightarrow> thesis) \<Longrightarrow> thesis
[PROOF STEP]
by (auto simp:factor_def)
[PROOF STATE]
proof (state)
this:
r1 \<in> carrier R
a \<otimes> r1 = b
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
obtain r2 where r2_def: "r2 \<in> carrier R" "a \<otimes> r2 = c"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. (\<And>r2. \<lbrakk>r2 \<in> carrier R; a \<otimes> r2 = c\<rbrakk> \<Longrightarrow> thesis) \<Longrightarrow> thesis
[PROOF STEP]
using assms
[PROOF STATE]
proof (prove)
using this:
a \<in> carrier R
b \<in> carrier R
c \<in> carrier R
a divides b
a divides c
goal (1 subgoal):
1. (\<And>r2. \<lbrakk>r2 \<in> carrier R; a \<otimes> r2 = c\<rbrakk> \<Longrightarrow> thesis) \<Longrightarrow> thesis
[PROOF STEP]
by (auto simp:factor_def)
[PROOF STATE]
proof (state)
this:
r2 \<in> carrier R
a \<otimes> r2 = c
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
have "a \<otimes> (r1 \<oplus> r2) = (a \<otimes> r1) \<oplus> (a \<otimes> r2)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. a \<otimes> (r1 \<oplus> r2) = a \<otimes> r1 \<oplus> a \<otimes> r2
[PROOF STEP]
using assms(1) r1_def(1) r2_def(1)
[PROOF STATE]
proof (prove)
using this:
a \<in> carrier R
r1 \<in> carrier R
r2 \<in> carrier R
goal (1 subgoal):
1. a \<otimes> (r1 \<oplus> r2) = a \<otimes> r1 \<oplus> a \<otimes> r2
[PROOF STEP]
by algebra
[PROOF STATE]
proof (state)
this:
a \<otimes> (r1 \<oplus> r2) = a \<otimes> r1 \<oplus> a \<otimes> r2
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
also
[PROOF STATE]
proof (state)
this:
a \<otimes> (r1 \<oplus> r2) = a \<otimes> r1 \<oplus> a \<otimes> r2
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
have "... = b \<oplus> c"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. a \<otimes> r1 \<oplus> a \<otimes> r2 = b \<oplus> c
[PROOF STEP]
using r1_def(2) r2_def(2)
[PROOF STATE]
proof (prove)
using this:
a \<otimes> r1 = b
a \<otimes> r2 = c
goal (1 subgoal):
1. a \<otimes> r1 \<oplus> a \<otimes> r2 = b \<oplus> c
[PROOF STEP]
by simp
[PROOF STATE]
proof (state)
this:
a \<otimes> r1 \<oplus> a \<otimes> r2 = b \<oplus> c
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
finally
[PROOF STATE]
proof (chain)
picking this:
a \<otimes> (r1 \<oplus> r2) = b \<oplus> c
[PROOF STEP]
have "b \<oplus> c = a \<otimes> (r1 \<oplus> r2)"
[PROOF STATE]
proof (prove)
using this:
a \<otimes> (r1 \<oplus> r2) = b \<oplus> c
goal (1 subgoal):
1. b \<oplus> c = a \<otimes> (r1 \<oplus> r2)
[PROOF STEP]
by simp
[PROOF STATE]
proof (state)
this:
b \<oplus> c = a \<otimes> (r1 \<oplus> r2)
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
moreover
[PROOF STATE]
proof (state)
this:
b \<oplus> c = a \<otimes> (r1 \<oplus> r2)
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
have "r1 \<oplus> r2 \<in> carrier R"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. r1 \<oplus> r2 \<in> carrier R
[PROOF STEP]
using r1_def(1) r2_def(1)
[PROOF STATE]
proof (prove)
using this:
r1 \<in> carrier R
r2 \<in> carrier R
goal (1 subgoal):
1. r1 \<oplus> r2 \<in> carrier R
[PROOF STEP]
by simp
[PROOF STATE]
proof (state)
this:
r1 \<oplus> r2 \<in> carrier R
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
ultimately
[PROOF STATE]
proof (chain)
picking this:
b \<oplus> c = a \<otimes> (r1 \<oplus> r2)
r1 \<oplus> r2 \<in> carrier R
[PROOF STEP]
show ?thesis
[PROOF STATE]
proof (prove)
using this:
b \<oplus> c = a \<otimes> (r1 \<oplus> r2)
r1 \<oplus> r2 \<in> carrier R
goal (1 subgoal):
1. a divides (b \<oplus> c)
[PROOF STEP]
by (auto simp:factor_def)
[PROOF STATE]
proof (state)
this:
a divides (b \<oplus> c)
goal:
No subgoals!
[PROOF STEP]
qed |
/-
Copyright (c) 2022 Eric Rodriguez. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Eric Rodriguez
! This file was ported from Lean 3 source module ring_theory.localization.cardinality
! leanprover-community/mathlib commit 3b09a2601bb7690643936643e99bba0fedfbf6ed
! Please do not edit these lines, except to modify the commit id
! if you have ported upstream changes.
-/
import Mathbin.SetTheory.Cardinal.Ordinal
import Mathbin.RingTheory.Artinian
/-!
# Cardinality of localizations
In this file, we establish the cardinality of localizations. In most cases, a localization has
cardinality equal to the base ring. If there are zero-divisors, however, this is no longer true -
for example, `zmod 6` localized at `{2, 4}` is equal to `zmod 3`, and if you have zero in your
submonoid, then your localization is trivial (see `is_localization.unique_of_zero_mem`).
## Main statements
* `is_localization.card_le`: A localization has cardinality no larger than the base ring.
* `is_localization.card`: If you don't localize at zero-divisors, the localization of a ring has
cardinality equal to its base ring,
-/
open Cardinal nonZeroDivisors
universe u v
namespace IsLocalization
variable {R : Type u} [CommRing R] (S : Submonoid R) {L : Type u} [CommRing L] [Algebra R L]
[IsLocalization S L]
include S
/-- A localization always has cardinality less than or equal to the base ring. -/
theorem card_le : (#L) ≤ (#R) := by
classical
cases fintypeOrInfinite R
· exact Cardinal.mk_le_of_surjective (IsArtinianRing.localization_surjective S _)
erw [← Cardinal.mul_eq_self <| Cardinal.aleph0_le_mk R]
set f : R × R → L := fun aa => IsLocalization.mk' _ aa.1 (if h : aa.2 ∈ S then ⟨aa.2, h⟩ else 1)
refine' @Cardinal.mk_le_of_surjective _ _ f fun a => _
obtain ⟨x, y, h⟩ := IsLocalization.mk'_surjective S a
use (x, y)
dsimp [f]
rwa [dif_pos <| show ↑y ∈ S from y.2, SetLike.eta]
#align is_localization.card_le IsLocalization.card_le
variable (L)
/-- If you do not localize at any zero-divisors, localization preserves cardinality. -/
theorem card (hS : S ≤ R⁰) : (#R) = (#L) :=
(Cardinal.mk_le_of_injective (IsLocalization.injective L hS)).antisymm (card_le S)
#align is_localization.card IsLocalization.card
end IsLocalization
|
module Core.SchemeEval.ToScheme
import Core.TT
import Libraries.Utils.Scheme
export
Scheme Namespace where
toScheme x = toScheme (unsafeUnfoldNamespace x)
fromScheme x = Just $ unsafeFoldNamespace !(fromScheme x)
export
Scheme UserName where
toScheme (Basic str) = toScheme str
toScheme (Field str) = Vector 5 [toScheme str]
toScheme Underscore = Vector 9 []
fromScheme (Vector 5 [x]) = pure $ Field !(fromScheme x)
fromScheme (Vector 9 []) = pure Underscore
fromScheme (StringVal x) = pure (Basic x)
fromScheme _ = Nothing
export
Scheme Name where
toScheme (NS x y) = Vector 0 [toScheme x, toScheme y]
toScheme (UN x) = toScheme x
toScheme (MN x y) = Vector 2 [toScheme x, toScheme y]
toScheme (PV x y) = Vector 3 [toScheme x, toScheme y]
toScheme (DN x y) = Vector 4 [toScheme x, toScheme y]
toScheme (Nested x y) = Vector 6 [toScheme x, toScheme y]
toScheme (CaseBlock x y) = Vector 7 [toScheme x, toScheme y]
toScheme (WithBlock x y) = Vector 8 [toScheme x, toScheme y]
toScheme (Resolved x) = toScheme x -- we'll see this most often
fromScheme (Vector 0 [x, y])
= pure $ NS !(fromScheme x) !(fromScheme y)
fromScheme (Vector 2 [x, y])
= pure $ MN !(fromScheme x) !(fromScheme y)
fromScheme (Vector 3 [x, y])
= pure $ PV !(fromScheme x) !(fromScheme y)
fromScheme (Vector 4 [x, y])
= pure $ DN !(fromScheme x) !(fromScheme y)
fromScheme (Vector 5 [x, y])
= pure $ UN (Field !(fromScheme x))
fromScheme (Vector 6 [x, y])
= pure $ Nested !(fromScheme x) !(fromScheme y)
fromScheme (Vector 7 [x, y])
= pure $ CaseBlock !(fromScheme x) !(fromScheme y)
fromScheme (Vector 8 [x, y])
= pure $ WithBlock !(fromScheme x) !(fromScheme y)
fromScheme (Vector 9 [])
= pure $ UN Underscore
fromScheme (IntegerVal x)
= pure $ Resolved (cast x)
fromScheme (StringVal x)
= pure $ UN (Basic x)
fromScheme _ = Nothing
export
Scheme ModuleIdent where
toScheme ns = toScheme (miAsNamespace ns)
fromScheme s = Just $ nsAsModuleIdent !(fromScheme s)
export
Scheme OriginDesc where
toScheme (PhysicalIdrSrc ident) = Vector 0 [toScheme ident]
toScheme (PhysicalPkgSrc fname) = Vector 1 [toScheme fname]
toScheme (Virtual ident) = Null
fromScheme (Vector 0 [i]) = Just (PhysicalIdrSrc !(fromScheme i))
fromScheme (Vector 1 [i]) = Just (PhysicalPkgSrc !(fromScheme i))
fromScheme (Vector _ _) = Nothing
fromScheme _ = Just (Virtual Interactive)
export
Scheme FC where
toScheme (MkFC d s e) = Vector 0 [toScheme d, toScheme s, toScheme e]
toScheme (MkVirtualFC d s e) = Vector 1 [toScheme d, toScheme s, toScheme e]
toScheme EmptyFC = Null
fromScheme _ = Just EmptyFC
export
Scheme LazyReason where
toScheme LInf = IntegerVal 0
toScheme LLazy = IntegerVal 1
toScheme LUnknown = IntegerVal 2
fromScheme (IntegerVal 0) = Just LInf
fromScheme (IntegerVal 1) = Just LLazy
fromScheme _ = Just LUnknown
export
Scheme RigCount where
toScheme x
= if isErased x then IntegerVal 0
else if isLinear x then IntegerVal 1
else IntegerVal 2
fromScheme (IntegerVal 0) = Just erased
fromScheme (IntegerVal 1) = Just linear
fromScheme _ = Just top
export
toSchemePi : PiInfo (SchemeObj Write) -> SchemeObj Write
toSchemePi Implicit = IntegerVal 0
toSchemePi Explicit = IntegerVal 1
toSchemePi AutoImplicit = IntegerVal 2
toSchemePi (DefImplicit s) = Box s
|
lemma uniformly_continuous_imp_Cauchy_continuous: fixes f :: "'a::metric_space \<Rightarrow> 'b::metric_space" shows "\<lbrakk>uniformly_continuous_on S f; Cauchy \<sigma>; \<And>n. (\<sigma> n) \<in> S\<rbrakk> \<Longrightarrow> Cauchy(f \<circ> \<sigma>)" |
```
c=4
dx=0.2
sigma=1
```
```
dt = (sigma*dx)/c
print(dt)
```
0.05
```
import sympy
import numpy
%matplotlib inline
from matplotlib import rcParams
rcParams['font.family'] = 'serif'
rcParams['font.size'] = 16
from sympy import init_printing
from sympy.utilities.lambdify import lambdify
init_printing()
```
```
x = sympy.symbols('x')
```
```
f=((sympy.cos(x)**2)*(sympy.sin(x)**3))/(4*(x**5)*sympy.exp(x))
f
```
```
fprime=f.diff(x)
fprime
```
```
fprime.subs(x,2.2).evalf()
```
```
```
|
State Before: R : Type u_1
inst✝ : Semiring R
f✝ f : R[X]
⊢ eraseLead f + ↑C (leadingCoeff f) * X ^ natDegree f = f State After: no goals Tactic: rw [C_mul_X_pow_eq_monomial, eraseLead_add_monomial_natDegree_leadingCoeff] |
lemma cball_subset_ball_iff: "cball a r \<subseteq> ball a' r' \<longleftrightarrow> dist a a' + r < r' \<or> r < 0" (is "?lhs \<longleftrightarrow> ?rhs") for a :: "'a::euclidean_space" |
/-
Copyright (c) 2022 Yaël Dillies. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Yaël Dillies
-/
import category_theory.category.Bipointed
import order.category.PartOrd
import order.hom.bounded
/-!
# The category of bounded orders
This defines `BddOrd`, the category of bounded orders.
-/
universes u v
open category_theory
/-- The category of bounded orders with monotone functions. -/
structure BddOrd :=
(to_PartOrd : PartOrd)
[is_bounded_order : bounded_order to_PartOrd]
namespace BddOrd
instance : has_coe_to_sort BddOrd Type* := induced_category.has_coe_to_sort to_PartOrd
instance (X : BddOrd) : partial_order X := X.to_PartOrd.str
attribute [instance] BddOrd.is_bounded_order
/-- Construct a bundled `BddOrd` from a `fintype` `partial_order`. -/
def of (α : Type*) [partial_order α] [bounded_order α] : BddOrd := ⟨⟨α⟩⟩
@[simp] lemma coe_of (α : Type*) [partial_order α] [bounded_order α] : ↥(of α) = α := rfl
instance : inhabited BddOrd := ⟨of punit⟩
instance large_category : large_category.{u} BddOrd :=
{ hom := λ X Y, bounded_order_hom X Y,
id := λ X, bounded_order_hom.id X,
comp := λ X Y Z f g, g.comp f,
id_comp' := λ X Y, bounded_order_hom.comp_id,
comp_id' := λ X Y, bounded_order_hom.id_comp,
assoc' := λ W X Y Z _ _ _, bounded_order_hom.comp_assoc _ _ _ }
instance concrete_category : concrete_category BddOrd :=
{ forget := ⟨coe_sort, λ X Y, coe_fn, λ X, rfl, λ X Y Z f g, rfl⟩,
forget_faithful := ⟨λ X Y, by convert fun_like.coe_injective⟩ }
instance has_forget_to_PartOrd : has_forget₂ BddOrd PartOrd :=
{ forget₂ := { obj := λ X, X.to_PartOrd, map := λ X Y, bounded_order_hom.to_order_hom } }
instance has_forget_to_Bipointed : has_forget₂ BddOrd Bipointed :=
{ forget₂ := { obj := λ X, ⟨X, ⊥, ⊤⟩, map := λ X Y f, ⟨f, map_bot f, map_top f⟩ },
forget_comp := rfl }
/-- `order_dual` as a functor. -/
@[simps] def dual : BddOrd ⥤ BddOrd :=
{ obj := λ X, of Xᵒᵈ, map := λ X Y, bounded_order_hom.dual }
/-- Constructs an equivalence between bounded orders from an order isomorphism between them. -/
@[simps] def iso.mk {α β : BddOrd.{u}} (e : α ≃o β) : α ≅ β :=
{ hom := e,
inv := e.symm,
hom_inv_id' := by { ext, exact e.symm_apply_apply _ },
inv_hom_id' := by { ext, exact e.apply_symm_apply _ } }
/-- The equivalence between `BddOrd` and itself induced by `order_dual` both ways. -/
@[simps functor inverse] def dual_equiv : BddOrd ≌ BddOrd :=
equivalence.mk dual dual
(nat_iso.of_components (λ X, iso.mk $ order_iso.dual_dual X) $ λ X Y f, rfl)
(nat_iso.of_components (λ X, iso.mk $ order_iso.dual_dual X) $ λ X Y f, rfl)
end BddOrd
lemma BddOrd_dual_comp_forget_to_PartOrd :
BddOrd.dual ⋙ forget₂ BddOrd PartOrd =
forget₂ BddOrd PartOrd ⋙ PartOrd.dual := rfl
lemma BddOrd_dual_comp_forget_to_Bipointed :
BddOrd.dual ⋙ forget₂ BddOrd Bipointed =
forget₂ BddOrd Bipointed ⋙ Bipointed.swap := rfl
|
suppressMessages(library(ez))
suppressMessages(library(ggplot2))
suppressMessages(library(multcomp))
suppressMessages(library(nlme))
suppressMessages(library(pastecs))
suppressMessages(library(pgirmess))
suppressMessages(library(reshape))
suppressMessages(library(WRS))
suppressMessages(library(e1071))
suppressMessages(library(afex))
suppressMessages(library(emmeans))
suppressMessages(library(tidyverse))
suppressMessages(library(sjPlot))
suppressMessages(library(sjmisc))
processMainEffectCategory <- function(filename, n){
data <- read.csv(file=filename, header=TRUE, sep=",")
data$preferredVersion <- unclass(data$preferredVersion)
personalityVariables <- names(data)[2:39]
invisible(lapply(personalityVariables, function(x) {
result <- aov(eval(substitute(grandMeanTakes ~ personality, list(personality = as.name(x)))), data = data)
filename = sprintf("./results/mainEffects/personality/categories/%s/grandMeanTakes/%s.messageAcrossData", n, x)
#print(summary(result))
write.table(data.frame(unlist(summary(result))), file=filename, row.names=TRUE, col.names=TRUE)
if(summary(result)[[1]][["Pr(>F)"]] <= 0.05){
posthoc<-suppressMessages(pairwise.wilcox.test(data$grandMeanTakes, eval(substitute(data$personality, list(personality = as.name(x)))), p.adj = "none", exact=FALSE))
write.table(data.frame(unlist(posthoc)), file=sprintf("./results/mainEffects/personality/categories/%s/grandMeanTakes/Post_%s.messageAcrossData", n, x), row.names=TRUE, col.names=TRUE)
#print(posthoc)
}
}))
invisible(lapply(personalityVariables, function(x) {
result <- aov(eval(substitute(meanWhoFocus ~ personality, list(personality = as.name(x)))), data = data)
filename = sprintf("./results/mainEffects/personality/categories/%s/meanWhoFocus/%s.messageAcrossData", n, x)
write.table(data.frame(unlist(summary(result))), file=filename, row.names=TRUE, col.names=TRUE)
if(summary(result)[[1]][["Pr(>F)"]] <= 0.05){
posthoc<-suppressMessages(pairwise.wilcox.test(data$meanWhoFocus, eval(substitute(data$personality, list(personality = as.name(x)))), p.adj = "none", exact=FALSE))
write.table(data.frame(unlist(posthoc)), file=sprintf("./results/mainEffects/personality/categories/%s/meanWhoFocus/Post_%s.messageAcrossData", n, x), row.names=TRUE, col.names=TRUE)
}
}))
invisible(lapply(personalityVariables, function(x) {
result <- aov(eval(substitute(meanWhatFocus ~ personality, list(personality = as.name(x)))), data = data)
filename = sprintf("./results/mainEffects/personality/categories/%s/meanWhatFocus/%s.messageAcrossData", n, x)
write.table(data.frame(unlist(summary(result))), file=filename, row.names=TRUE, col.names=TRUE)
if(summary(result)[[1]][["Pr(>F)"]] <= 0.05){
posthoc<-suppressMessages(pairwise.wilcox.test(data$meanWhatFocus, eval(substitute(data$personality, list(personality = as.name(x)))), p.adj = "none", exact=FALSE))
write.table(data.frame(unlist(posthoc)), file=sprintf("./results/mainEffects/personality/categories/%s/meanWhatFocus/Post_%s.messageAcrossData", n, x), row.names=TRUE, col.names=TRUE)
}
}))
}
processInteractionCategory <- function(filename, n){
meltedData <- read.csv(file=filename, header=TRUE, sep=",")
personalityVariables <- names(meltedData)[2:39]
#print(personalityVariables)
#Compute the analysis of variance
invisible(lapply(personalityVariables, function(x) {
Mixed.aov.1 <- suppressMessages(aov_car(eval(substitute(takes ~ personality * ScoreSystem + Error(playerId/ScoreSystem), list(personality = as.name(x)))), data = meltedData))
filename = sprintf("./results/interactionEffects/categories/%s/takes/%s.messageAcrossData", n, x)
write(knitr::kable(nice(Mixed.aov.1)),file=filename,append=TRUE)
if(grepl("*", nice(Mixed.aov.1)[[4]][[3]], fixed=TRUE)){
Mixed_Fitted_Interaction<-suppressMessages(emmeans(Mixed.aov.1, eval(substitute(~ScoreSystem|personality, list(personality = as.name(x))))))
write.table(Mixed_Fitted_Interaction, file=sprintf("./results/interactionEffects/categories/%s/takes/Post_%s.messageAcrossData", n, x), row.names=TRUE, col.names=TRUE)
}
}
))
invisible(lapply(personalityVariables, function(x) {
Mixed.aov.1 <- suppressMessages(aov_car(eval(substitute(what ~ personality * ScoreSystem + Error(playerId/ScoreSystem), list(personality = as.name(x)))), data = meltedData))
filename = sprintf("./results/interactionEffects/categories/%s/what/%s.messageAcrossData", n, x)
write(knitr::kable(nice(Mixed.aov.1)),file=filename,append=TRUE)
if(grepl("*", nice(Mixed.aov.1)[[4]][[3]], fixed=TRUE)){
Mixed_Fitted_Interaction<-suppressMessages(emmeans(Mixed.aov.1, eval(substitute(~ScoreSystem|personality, list(personality = as.name(x))))))
write.table(Mixed_Fitted_Interaction, file=sprintf("./results/interactionEffects/categories/%s/what/Post_%s.messageAcrossData", n, x), row.names=TRUE, col.names=TRUE)
}
}
))
invisible(lapply(personalityVariables, function(x) {
Mixed.aov.1 <- suppressMessages(aov_car(eval(substitute(who ~ personality * ScoreSystem + Error(playerId/ScoreSystem), list(personality = as.name(x)))), data = meltedData))
filename = sprintf("./results/interactionEffects/categories/%s/who/%s.messageAcrossData", n, x)
write(knitr::kable(nice(Mixed.aov.1)),file=filename,append=TRUE)
if(grepl("*", nice(Mixed.aov.1)[[4]][[3]], fixed=TRUE)){
Mixed_Fitted_Interaction<-suppressMessages(emmeans(Mixed.aov.1, eval(substitute(~ScoreSystem|personality, list(personality = as.name(x))))))
write.table(Mixed_Fitted_Interaction, file=sprintf("./results/interactionEffects/categories/%s/who/Post_%s.messageAcrossData", n, x), row.names=TRUE, col.names=TRUE)
}
}
))
}
print("Computing main effects...")
# print("Computing for two groups [High, Low]...")
# processMainEffectCategory("input/dataTwoCategories.csv", "two")
print("Computing for three groups [High, Medium, Low]...")
processMainEffectCategory("input/dataThreeCategories.csv", "three")
# print("Computing for four groups [High, Medium_High, Medium_Low, Low]...")
# processMainEffectCategory("input/dataFourCategories.csv", "four")
print("Computing interaction effects...")
# print("Computing for two groups [High, Low]...")
# processInteractionCategory("output/meltedDataTwoCategories.csv", "two")
print("Computing for three groups [High, Medium, Low]...")
processInteractionCategory("output/meltedDataThreeCategories.csv", "three")
# print("Computing for four groups [High, Medium_High, Medium_Low, Low]...")
# processInteractionCategory("output/meltedDataFourCategories.csv", "four")
|
State Before: α : Type u
β : Type v
γ : Type w
ι : Type x
inst✝ : UniformSpace β
F : ι → α → β
f : α → β
s s' : Set α
x : α
p : Filter ι
p' : Filter α
g : ι → α
F' : ι → α → β
hf : TendstoUniformlyOnFilter F f p p'
hff' : ∀ᶠ (n : ι × α) in p ×ˢ p', F n.fst n.snd = F' n.fst n.snd
⊢ TendstoUniformlyOnFilter F' f p p' State After: α : Type u
β : Type v
γ : Type w
ι : Type x
inst✝ : UniformSpace β
F : ι → α → β
f : α → β
s s' : Set α
x : α
p : Filter ι
p' : Filter α
g : ι → α
F' : ι → α → β
hf : TendstoUniformlyOnFilter F f p p'
hff' : ∀ᶠ (n : ι × α) in p ×ˢ p', F n.fst n.snd = F' n.fst n.snd
u : Set (β × β)
hu : u ∈ 𝓤 β
n : ι × α
h : (f n.snd, F n.fst n.snd) ∈ u ∧ F n.fst n.snd = F' n.fst n.snd
⊢ (f n.snd, F' n.fst n.snd) ∈ u Tactic: refine' fun u hu => ((hf u hu).and hff').mono fun n h => _ State Before: α : Type u
β : Type v
γ : Type w
ι : Type x
inst✝ : UniformSpace β
F : ι → α → β
f : α → β
s s' : Set α
x : α
p : Filter ι
p' : Filter α
g : ι → α
F' : ι → α → β
hf : TendstoUniformlyOnFilter F f p p'
hff' : ∀ᶠ (n : ι × α) in p ×ˢ p', F n.fst n.snd = F' n.fst n.snd
u : Set (β × β)
hu : u ∈ 𝓤 β
n : ι × α
h : (f n.snd, F n.fst n.snd) ∈ u ∧ F n.fst n.snd = F' n.fst n.snd
⊢ (f n.snd, F' n.fst n.snd) ∈ u State After: α : Type u
β : Type v
γ : Type w
ι : Type x
inst✝ : UniformSpace β
F : ι → α → β
f : α → β
s s' : Set α
x : α
p : Filter ι
p' : Filter α
g : ι → α
F' : ι → α → β
hf : TendstoUniformlyOnFilter F f p p'
hff' : ∀ᶠ (n : ι × α) in p ×ˢ p', F n.fst n.snd = F' n.fst n.snd
u : Set (β × β)
hu : u ∈ 𝓤 β
n : ι × α
h : (f n.snd, F n.fst n.snd) ∈ u ∧ F n.fst n.snd = F' n.fst n.snd
⊢ (f n.snd, F n.fst n.snd) ∈ u Tactic: rw [← h.right] State Before: α : Type u
β : Type v
γ : Type w
ι : Type x
inst✝ : UniformSpace β
F : ι → α → β
f : α → β
s s' : Set α
x : α
p : Filter ι
p' : Filter α
g : ι → α
F' : ι → α → β
hf : TendstoUniformlyOnFilter F f p p'
hff' : ∀ᶠ (n : ι × α) in p ×ˢ p', F n.fst n.snd = F' n.fst n.snd
u : Set (β × β)
hu : u ∈ 𝓤 β
n : ι × α
h : (f n.snd, F n.fst n.snd) ∈ u ∧ F n.fst n.snd = F' n.fst n.snd
⊢ (f n.snd, F n.fst n.snd) ∈ u State After: no goals Tactic: exact h.left |
```python
from mpl_toolkits.mplot3d import axes3d
import sympy
import pandas as pd
from sympy import *
import matplotlib
from sympy.plotting import plot as symplot
from sympy.plotting import plot3d
from matplotlib.colors import BoundaryNorm
from matplotlib.ticker import MaxNLocator
import matplotlib.pyplot as plt
import numpy as np
from pylab import meshgrid,cm,imshow,contour,clabel,colorbar,axis,title,show
from sympy.vector import CoordSys3D, divergence, Del, dot
```
```python
x = symbols('x')
y = symbols('y')
z = symbols('z')
r = symbols('r')
R = symbols('R')
```
```python
w = (3*(1-r/R)**2)
print('Velocity equation in k direction')
display(w)
print('density equation in terms of r')
rho = 100*(1-0.5*r/R)
display(rho)
print('velocity times density')
expand(w*rho)
```
Velocity equation in k direction
$\displaystyle 3 \left(1 - \frac{r}{R}\right)^{2}$
density equation in terms of r
$\displaystyle 100 - \frac{50.0 r}{R}$
velocity times density
$\displaystyle 300 - \frac{750.0 r}{R} + \frac{600.0 r^{2}}{R^{2}} - \frac{150.0 r^{3}}{R^{3}}$
```python
mean = (w*r*pi/2)/(1/4*pi*R**2)
print('equation to integrate to get mean velocity')
display(mean)
mean_vel = integrate(mean,(r,0,R))
print('mean velocity:')
display(mean_vel)
```
equation to integrate to get mean velocity
$\displaystyle \frac{6.0 r \left(1 - \frac{r}{R}\right)^{2}}{R^{2}}$
mean velocity:
$\displaystyle 0.5$
```python
mass = (rho*w*r*pi/2)
print('funcion a integrar pi/2 * rho * w * r')
display(mass)
mass_flow = integrate(mass,(r,0,R))
print('mass flow:')
display(mass_flow)
```
funcion a integrar pi/2 * rho * w * r
$\displaystyle \frac{3 \pi r \left(1 - \frac{r}{R}\right)^{2} \left(100 - \frac{50.0 r}{R}\right)}{2}$
mass flow:
$\displaystyle 10.0 \pi R^{2}$
## Parte de volumenes finitos para estimar mass flow
$ \dot {m} = \frac{\Pi}{2} \int \rho \textbf{v} r dr \therefore m = \frac{\Pi}{2} (\rho \textbf{v} r) \Delta r $
## Para estimar mean velocity
$ w = 2wr \Delta r$
```python
mean = mean.subs(R,1)
```
```python
mass = mass.subs(R,1)
```
```python
dr = 0.01
R = 1
nodos = int(R/dr)
```
```python
rad = np.linspace(start = 0, stop = 1, num=nodos+1, endpoint=True)
```
```python
integral_mean = np.zeros(len(rad))
integral_mass = np.zeros(len(rad))
for i in range(len(rad)-1):
integral_mean[i] = (mean.subs(r,rad[i]) + mean.subs(r,rad[i+1]))/2*dr
```
```python
print('velocidad promedio')
sum(integral_mean)
```
velocidad promedio
0.4999500000000001
```python
for i in range(len(rad)-1):
integral_mass[i] = (mass.subs(r,rad[i]) + mass.subs(r,rad[i+1]))/2*dr
```
```python
print('flujo másico')
sum(integral_mass)
```
flujo másico
31.411999623620755
# Problem 2
```python
f = Function('f')(x,y)
fxx = f.diff(x,x)
fy = f.diff(y)
eq = Eq(fxx + fy + f)
display(eq)
c = CoordSys3D('c', transformation='cylindrical', variable_names=("r", "theta", "z"))
```
$\displaystyle f{\left(x,y \right)} + \frac{\partial^{2}}{\partial x^{2}} f{\left(x,y \right)} + \frac{\partial}{\partial y} f{\left(x,y \right)} = 0$
## Boundary Conditions:
$Zp - Zw = 0$
$Zp - Zs = 0$
$Z(x=0) = 1$
$Z(y=0) = 1$
### Backward Difference Scheme on the dz/dy
$\frac{1}{\Delta x^2} (Zw+Ze) - Zp (1-\frac{2}{\Delta x^2}+\frac{1}{\Delta y}) - \frac{Zs}{\Delta y} = 0$
```python
L = 1
delta = 0.1
dx = delta
dy = delta
w = 1/(dx**2)
e = 1/(dx**2)
s = -1/(dy)
alpha = 1-2/(dx**2) + 1/dy
nodos = int(L/delta)
x = np.linspace(start = 0, stop = L, num=nodos+1, endpoint=True)
Nx = int(len(x))
Ny = int(len(x))
```
```python
res = np.zeros((len(x),len(x)))
p = np.zeros(len(x)**2)
m = np.zeros((len(x)**2,len(x)**2))
# dZ/dy = 0
for i in range(0,Nx):
m[i,i] = 1
m[i,i+Nx] = -1
p[i]=0
#print(i,1,i+Nx)
step = i
# Z = 1 at x = 0
for i in range(step+1,step+Ny-1):
m[i,(i-step)*(Nx)] = 1
#print(i,2, (i-step)*(Nx))
p[i] = 1
step = i
# dZ/dx = 0
for i in range(step+1, step+Ny-1):
m[i,(i-step-1)*Nx+(Nx*2-1)] = 1
m[i,(i-step-1)*Nx+(Nx*2-2)] = -1
p[i] = 0
#print(i,3,(i-step-1)*Nx+(Nx*2-2))
step = i
# Z = 1 at y= 0
for i in range(step+1, step+1+ Nx):
m[i,(i-step-1)+Nx*Ny-Ny] = 1
p[i] = 1
#print(i,4,(i-step-1)+Nx*Ny-Ny)
step = i
# Inside
for i in range(0, (Nx-2)):
#m[i,j*]
for j in range(0,Ny-2):
step = step+1
# Zp
m[step,i*(Nx)+(j+Nx+1)] = alpha
#ZW
m[step,i*(Nx)+(j+Nx+1)-1] = w
#ZE
m[step,i*(Nx)+(j+Nx+1)+1] = e
#ZS
m[step,i*(Nx)+(j+Nx+1)+Nx] = s
p[step] = 0
#print(step,5,i*(Nx)+(j+Nx+1)+Nx)
```
```python
m_inv = np.linalg.inv(m)
Z = p
resultado = np.dot(m_inv,Z)
resultado = resultado.reshape((Nx,Nx))
plt.imshow(resultado, cmap = 'jet', extent=[0,1, 0,1])
plt.title('2D Finite Volume ')
plt.colorbar()
plt.show()
```
```python
# A deltas bajos BDS muestra problemas... se modifica el metodo
```
```python
L = 1
delta = 0.01
dx = delta
dy = delta
w = 1/(dx**2)
e = 1/(dx**2)
n = 1/(2*dy)
s = -1/(2*dy)
alpha = 1-2/(dx**2)
nodos = int(L/delta)
x = np.linspace(start = 0, stop = L, num=nodos+1, endpoint=True)
Nx = int(len(x))
Ny = int(len(x))
```
### Central Difference Scheme on dz/dy
$\frac{1}{\Delta x^2} (Zw+Ze) - Zp (1-\frac{2}{\Delta x^2}) + \frac{Zn}{2\Delta y} - \frac{Zs}{2\Delta y} = 0$
```python
res = np.zeros((len(x),len(x)))
p = np.zeros(len(x)**2)
m = np.zeros((len(x)**2,len(x)**2))
# dZ/dy = 0
for i in range(0,Nx):
m[i,i] = 1
m[i,i+Nx] = -1
p[i]=0
#print(i,1,i+Nx)
step = i
# Z = 1 at x = 0
for i in range(step+1,step+Ny-1):
m[i,(i-step)*(Nx)] = 1
#print(i,2, (i-step)*(Nx))
p[i] = 1
step = i
# dZ/dx = 0
for i in range(step+1, step+Ny-1):
m[i,(i-step-1)*Nx+(Nx*2-1)] = 1
m[i,(i-step-1)*Nx+(Nx*2-2)] = -1
p[i] = 0
#print(i,3,(i-step-1)*Nx+(Nx*2-2))
step = i
# Z = 1 at y= 0
for i in range(step+1, step+1+ Nx):
m[i,(i-step-1)+Nx*Ny-Ny] = 1
p[i] = 1
#print(i,4,(i-step-1)+Nx*Ny-Ny)
step = i
# Inside
for i in range(0, (Nx-2)):
#m[i,j*]
for j in range(0,Ny-2):
step = step+1
# Zp
m[step,i*(Nx)+(j+Nx+1)] = alpha
#ZW
m[step,i*(Nx)+(j+Nx+1)-1] = w
#ZE
m[step,i*(Nx)+(j+Nx+1)+1] = e
#ZN
m[step,i*(Nx)+(j+Nx+1)-Nx] = n
#ZS
m[step,i*(Nx)+(j+Nx+1)+Nx] = s
p[step] = 0
#print(step,5,i*(Nx)+(j+Nx+1)-Nx)
```
```python
m_inv = np.linalg.inv(m)
Z = p
resultado = np.dot(m_inv,Z)
resultado = resultado.reshape((Nx,Nx))
plt.imshow(resultado, cmap = 'jet', extent=[0,1, 0,1])
plt.title('2D Finite Volume ')
plt.colorbar()
plt.show()
```
```python
```
```python
```
|
[STATEMENT]
lemma NFOR_disjuncts: "nf_ACI \<psi> \<Longrightarrow> NFOR (disjuncts \<psi>) = \<psi>"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. nf_ACI \<psi> \<Longrightarrow> NFOR (disjuncts \<psi>) = \<psi>
[PROOF STEP]
using nFOR_disjuncts[of \<psi>]
[PROOF STATE]
proof (prove)
using this:
nf_ACI \<psi> \<Longrightarrow> nFOR (disjuncts_list \<psi>) = \<psi>
goal (1 subgoal):
1. nf_ACI \<psi> \<Longrightarrow> NFOR (disjuncts \<psi>) = \<psi>
[PROOF STEP]
unfolding NFOR_def o_apply juncts_eq_set_juncts_list
[PROOF STATE]
proof (prove)
using this:
nf_ACI \<psi> \<Longrightarrow> nFOR (disjuncts_list \<psi>) = \<psi>
goal (1 subgoal):
1. nf_ACI \<psi> \<Longrightarrow> nFOR (sorted_list_of_set (set (disjuncts_list \<psi>))) = \<psi>
[PROOF STEP]
by (metis finite_set finite_sorted_distinct_unique nf_ACI_D(1,3) sorted_list_of_set) |
State Before: 𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
⊢ tangentConeAt 𝕜 s x ⊆ tangentConeAt 𝕜 t x State After: case intro.intro.intro.intro
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ y ∈ tangentConeAt 𝕜 t x Tactic: rintro y ⟨c, d, ds, ctop, clim⟩ State Before: case intro.intro.intro.intro
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ y ∈ tangentConeAt 𝕜 t x State After: case intro.intro.intro.intro
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ ∀ᶠ (n : ℕ) in atTop, x + d n ∈ t Tactic: refine' ⟨c, d, _, ctop, clim⟩ State Before: case intro.intro.intro.intro
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ ∀ᶠ (n : ℕ) in atTop, x + d n ∈ t State After: case intro.intro.intro.intro
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
this : Tendsto (fun n => x + d n) atTop (𝓝[t] x)
⊢ ∀ᶠ (n : ℕ) in atTop, x + d n ∈ t
case this
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ Tendsto (fun n => x + d n) atTop (𝓝[t] x) Tactic: suffices : Tendsto (fun n => x + d n) atTop (𝓝[t] x) State Before: case intro.intro.intro.intro
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
this : Tendsto (fun n => x + d n) atTop (𝓝[t] x)
⊢ ∀ᶠ (n : ℕ) in atTop, x + d n ∈ t
case this
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ Tendsto (fun n => x + d n) atTop (𝓝[t] x) State After: case this
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ Tendsto (fun n => x + d n) atTop (𝓝[t] x) Tactic: exact tendsto_principal.1 (tendsto_inf.1 this).2 State Before: case this
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ Tendsto (fun n => x + d n) atTop (𝓝[t] x) State After: case this
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ Tendsto (fun a => x + d a) atTop (𝓝 x) Tactic: refine' (tendsto_inf.2 ⟨_, tendsto_principal.2 ds⟩).mono_right h State Before: case this
𝕜 : Type u_2
inst✝⁶ : NontriviallyNormedField 𝕜
E : Type u_1
inst✝⁵ : NormedAddCommGroup E
inst✝⁴ : NormedSpace 𝕜 E
F : Type ?u.28939
inst✝³ : NormedAddCommGroup F
inst✝² : NormedSpace 𝕜 F
G : Type ?u.29029
inst✝¹ : NormedAddCommGroup G
inst✝ : NormedSpace ℝ G
x y✝ : E
s t : Set E
h : 𝓝[s] x ≤ 𝓝[t] x
y : E
c : ℕ → 𝕜
d : ℕ → E
ds : ∀ᶠ (n : ℕ) in atTop, x + d n ∈ s
ctop : Tendsto (fun n => ‖c n‖) atTop atTop
clim : Tendsto (fun n => c n • d n) atTop (𝓝 y)
⊢ Tendsto (fun a => x + d a) atTop (𝓝 x) State After: no goals Tactic: simpa only [add_zero] using tendsto_const_nhds.add (tangentConeAt.lim_zero atTop ctop clim) |
The product of two monomials is a monomial. |
-- Local Variables:
-- idris-packages: ("lightyear" "contrib")
-- End:
-- Quick and dirty bot
import Lightyear.Strings
import Network.Socket
import Prelude.Strings
import System
import Control.Monad.State
import Control.Monad.Trans
import MessyBot.Types
import MessyBot.IRCParsers
import MessyBot.NetworkUtils
Bot : Type -> Type
Bot = StateT BotState IO
action : String -> Bot String
action s = pure $ "\001ACTION " ++ s ++ "\001"
actionl : List String -> Bot (List String)
actionl s = traverse action s
-- Bot behaviour (i.e. commands) is defined below
||| A command (prefixed channel message) processor.
||| Returns a message to send back to that channel.
||| @u User nick
||| @m Message words
commandMessage : (u: String) -> (m: List String) -> Bot String
commandMessage u ["inc"] = do
bs <- get
put $ record { counter = counter bs + 1 } bs
pure $ u ++ ": " ++ show (counter bs)
commandMessage _ ["slap", nick] = action $ "slaps " ++ nick
commandMessage _ _ = pure ""
||| IRC actions processor.
||| Returns a list of messages to react on an action.
||| @u User nick
||| @a Action words
actionMessages : (u: String) -> (a: List String) -> Bot (List String)
actionMessages u ["slaps", u2] = actionl ["slaps " ++ u]
actionMessages u ["hugs", u2] = actionl ["hugs " ++ u]
actionMessages _ _ = pure []
-- The rest is mostly bootstrap
||| Channel messages processor, returns a list of messages to send
||| @u User nick
||| @c Channel name
||| @m A received message
chanMessages : (u: String) -> (c: String) -> (m: String) -> Bot (List Message)
chanMessages u c m = case (strHead m) of
',' => do
r <- commandMessage u (words $ strTail m)
case r of
"" => pure []
msg => pure $ cmsgl c msg
_ => case (isPrefixOf "\001ACTION " m && isSuffixOf "\001" m) of
False => pure []
True => case (init' $ drop 8 $ unpack m) of
Nothing => pure []
Just l => do
msgs <- actionMessages u $ words $ pack l
case msgs of
[] => pure []
l => pure $ map (cmsg c) msgs
||| Parsed IRC messages processor, returns a list of messages to send.
botMessages : Message -> Bot (List Message)
botMessages (Msg _ (Right 376) _) = do
bs <- get
pure $ map (msg "JOIN" . pure) $ channels bs
botMessages (Msg _ (Left "PING") p) = pure $ msgl "PONG" p
botMessages m@(Msg (Just (User u)) (Left "PRIVMSG") [c,msg]) = do
bs <- get
if c `elem` (channels bs)
then chanMessages u c msg
else return []
botMessages m = pure []
||| Raw IRC messages processor: simply parses them and passes to
||| botMessages. Returns a list of messages to send.
||| @s Raw message.
botLines : (s: String) -> Bot (List String)
botLines s = case (parse message s) of
Left err => do
lift $ putStrLn err
return []
Right msg => map (map show) (botMessages msg)
||| Main loop
||| @s A connected socket
botLoop : (s: Socket) -> Bot ()
botLoop s = do
line <- lift $ recvTill s "\r\n"
case line of
Left err => lift $ putStrLn $ "Socket error on recv: " ++ show err
Right str => do
lift $ putStr $ "< " ++ str
lines <- botLines str
r <- lift $ traverse (sendLine s) lines
-- might be worthwhile to check results here
botLoop s
||| Entry point: processes args, connects, runs botLoop
main : IO ()
main = do
args <- getArgs
case args of
(prog::host::port::n::u::ui::c) => do
sock <- tcpConnect host (cast port)
case sock of
Nothing => putStrLn "Failed to connect"
Just s => do
lines <- pure (the (List String) [show $ msg "NICK" [n], show $ msg "USER" [u, "*", "*", ui]])
traverse (sendLine s) lines
runStateT (botLoop s) $ record { channels = c } defaultState
return ()
_ => putStrLn "Arguments: <server> <port> <nick> <user> <user info> [#channel1 #channel2...]"
|
proposition Hurwitz_no_zeros: assumes S: "open S" "connected S" and holf: "\<And>n::nat. \<F> n holomorphic_on S" and holg: "g holomorphic_on S" and ul_g: "\<And>K. \<lbrakk>compact K; K \<subseteq> S\<rbrakk> \<Longrightarrow> uniform_limit K \<F> g sequentially" and nonconst: "\<not> g constant_on S" and nz: "\<And>n z. z \<in> S \<Longrightarrow> \<F> n z \<noteq> 0" and "z0 \<in> S" shows "g z0 \<noteq> 0" |
(*
Copyright (C) 2017 M.A.L. Marques
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
*)
(* type: work_gga_x *)
a1 := 1.647127:
a2 := 0.980118:
a3 := 0.017399:
b1 := 1.523671:
b2 := 0.367229:
b3 := 0.011282:
f0 := s -> (1 + a1*s^2 + a2*s^4 + a3*s^6)/(1 + b1*s^2 + b2*s^4 + b3*s^6):
f := x -> f0(X2S*x): |
library(ISLR)
Hitters=na.omit(Hitters)
library(leaps)
regfit.full=regsubsets (Salary~.,Hitters)
regfit.full=regsubsets (Salary~.,data=Hitters ,nvmax=19)
reg.summary=summary(regfit.full)
par(mfrow=c(2,2))
plot(reg.summary$rss ,xlab="Number of Variables ",ylab="RSS", type="l")
plot(reg.summary$adjr2 ,xlab="Number of Variables ", ylab="Adjusted RSq",type="l")
points(11,reg.summary$adjr2[11], col="red",cex=2,pch=20)
#foward and backward stepwise selection
regfit.fwd=regsubsets (Salary~.,data=Hitters ,nvmax=19, method="forward")
summary(regfit.fwd)
regfit.bwd=regsubsets (Salary~.,data=Hitters ,nvmax=19, method="backward")
summary(regfit.bwd)
predict.regsubsets =function (object ,newdata ,id,...){
form=as.formula(object$call [[2]])
mat=model.matrix(form,newdata)
coefi=coef(object ,id=id)
xvars=names(coefi)
mat[,xvars]%*%coefi
}
k=10
set.seed(1)
folds=sample(1:k,nrow(Hitters),replace=TRUE)
cv.errors=matrix(NA,k,19, dimnames =list(NULL , paste(1:19)))
for(j in 1:k){
best.fit=regsubsets (Salary~.,data=Hitters[folds!=j,],nvmax=19)
for(i in 1:19){
pred=predict(best.fit ,Hitters[folds==j,],id=i)
cv.errors[j,i]=mean( (Hitters$Salary[folds==j]-pred)^2)
}
}
mean.cv.errors=apply(cv.errors ,2,mean)
mean.cv.errors
par(mfrow=c(1,1))
plot(mean.cv.errors ,type="b")
#Why use the full dataset after the CV?
#https://stats.stackexchange.com/questions/11602/training-with-the-full-dataset-after-cross-validation
reg.best=regsubsets (Salary~.,data=Hitters , nvmax=19)
coef(reg.best ,11)
#RIDGE AND LASSO REGRESSION
x=model.matrix(Salary~.,Hitters)[,-1]
y=Hitters$Salary
library(glmnet)
#ridge
grid=10^seq(10,-2, length =100)
ridge.mod=glmnet(x,y,alpha=0,lambda=grid)
set.seed(1)
train=sample(1:nrow(x), nrow(x)/2)
test=(-train)
y.test=y[test]
set.seed(1)
cv.out=cv.glmnet(x[train ,],y[train],alpha=0)
plot(cv.out)
bestlam=cv.out$lambda.min
bestlam
#after findingnthe best RIDGE lanbda, we refit the model using
#the whole dataset
out=glmnet(x,y,alpha=0)
predict(out,type="coefficients",s=bestlam)[1:20,]
#LASSO
lasso.mod=glmnet(x[train ,],y[train],alpha=1,lambda=grid)
plot(lasso.mod)
set.seed(1)
cv.out=cv.glmnet(x[train ,],y[train],alpha=1)
plot(cv.out)
bestlam=cv.out$lambda.min
lasso.pred=predict(lasso.mod,s=bestlam ,newx=x[test ,])
mean((lasso.pred-y.test)^2)
out=glmnet(x,y,alpha=1,lambda=grid)
lasso.coef=predict(out,type="coefficients",s=bestlam)[1:20,]
lasso.coef
library(pls)
set.seed(2)
pcr.fit=pcr(Salary~., data=Hitters ,scale=TRUE, validation ="CV")
summary(pcr.fit)
validationplot(pcr.fit,val.type="MSEP")
set.seed(1)
pcr.fit=pcr(Salary~., data=Hitters ,subset=train ,scale=TRUE,validation ="CV")
validationplot(pcr.fit,val.type="MSEP")
pcr.pred=predict(pcr.fit ,x[test ,],ncomp=7)
mean((pcr.pred-y.test)^2)
#Finally, we fit PCR on the full data set,
#using M = 7, the number of
#components identified by cross-validation.
pcr.fit=pcr(y~x,scale=TRUE,ncomp=7)
summary(pcr.fit)
#PLS
set.seed(1)
pls.fit=plsr(Salary~., data=Hitters ,subset=train ,scale=TRUE ,validation ="CV")
summary(pls.fit)
pls.pred=predict(pls.fit ,x[test ,],ncomp=2)
mean((pls.pred-y.test)^2)
#Finally, we perform PLS using the full data set, using M = 2, the number
#of components identified by cross-validation.
pls.fit=plsr(Salary~., data=Hitters ,scale=TRUE,ncomp=2)
summary(pls.fit)
|
\documentclass[10pt,twocolumn,letterpaper]{article}
\usepackage{cvpr}
\usepackage{times}
\usepackage{epsfig}
\usepackage{graphicx}
\usepackage{color}
\usepackage{transparent}
\usepackage{caption}
\usepackage{subcaption}
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage{mathtools}
\newcommand{\norm}[1]{\left\lVert #1 \right\rVert}
\DeclarePairedDelimiter\abs{\lvert}{\rvert}
% Include other packages here, before hyperref.
% If you comment hyperref and then uncomment it, you should delete
% egpaper.aux before re-running latex. (Or just hit 'q' on the first latex
% run, let it finish, and you should be clear).
%\usepackage[pagebackref=true,breaklinks=true,letterpaper=true,colorlinks,bookmarks=false]{hyperref}
\cvprfinalcopy % *** Uncomment this line for the final submission
\def\cvprPaperID{} % *** Enter the 3DV Paper ID here
\def\httilde{\mbox{\tt\raisebox{-.5ex}{\symbol{126}}}}
% Pages are numbered in submission mode, and unnumbered in camera-ready
%\ifcvprfinal\pagestyle{empty}\fi
\setcounter{page}{1}
\begin{document}
%%%%%%%%% TITLE
\title{Structure from Motion with RGB-D}
\author{Seonwook Park \\
ETH Zurich \\
{\tt\small [email protected]}
% For a paper whose authors are all at the same institution,
% omit the following lines up until the closing ``}''.
% Additional authors and addresses can be added with ``\and'',
% just like the second author.
% To save space, use either the email address or home page, not both
\and
Yifan Wang \\
ETH Zurich \\
{\tt\small [email protected]}
}
\maketitle
%\thispagestyle{empty}
%%%%%%%%% ABSTRACT
\begin{abstract}
A structure from motion pipeline is constructed using C++, OpenCV, Ceres
solver, and PCL. This includes the acquisition of data, extraction and
matching of features, estimation of pairwise camera pose using depth data,
transformation of estimates into a global coordinate frame, and performing
bundle adjustment on all data. This pipeline incorporates depth data taken
from a Microsoft Kinect in pairwise camera pose estimation to improve the
final 3D reconstruction, which simplifies the conventional pipeline and improves the robustness of the result. Furthermore the inclusion of depth data in the bundle adjustment was attempted and compared with the result using a standard cost function.
\end{abstract}
%%%%%%%%% BODY TEXT
\section{Introduction}
Structure from Motion (SfM) concerns the recreation of a real environment
through the inferring of motion from images \cite{varga2008practical}. (1)
First, a 3D object or environment is imaged from multiple different
perspectives. (2) In each image, it is possible to find keypoints via feature
detection algorithms such as SIFT and SURF, and calculate descriptors with which
correspondences can be found in other images. (3) After finding corresponding
pairs, the relative pose of each camera can be inferred in various ways.
With early implementations of SfM, keypoints are projected into 3D space and
optimisation algorithms such as RANSAC and Levenberg-Marquardt are used to find
the homography or perspective transformation between two image planes. With the
advent of low-cost commercial depth sensing cameras, it is now possible to
incorporate depth information in hopes to improve or speed this procedure up. An
example is the Microsoft Kinect, which casts and reads an infra-red pattern
which is used to calculate depth values per pixel imaged \cite{zhang2012microsoft}.
For problems of corresponding 2D points (image plane) and 3D points (using depth
values), one can use PnP or Perspective-n-Point algorithms \cite{d2013p3p}. A P3P
algorithm uses a minimum of 3 data points to infer the relative pose between a
pair of cameras. There are optimised PnP algorithms available, which when
coupled with RANSAC can discard outliers and estimate relative camera pose
accurately \cite{lepetit2009epnp}.
Once an initial estimate for relative camera pose is made, this can be optimised
via bundle adjustment, an optimisation step operated on sparse keypoints from
the perspective of all cameras. To do so, (4) pairwise camera pose estimates
need to be transformed into a single coordinate system. This is achieved through
the construction of a minimum spanning tree with cameras as nodes and the
existence of corresponding pairs as edges. When camera pose estimates are
relative to a single coordinate system, it is possible to project keypoints into
the coordinate system from the perspective of each camera. Minimising this
reprojection is the (5) bundle adjustment step.
A final model can be constructed by projecting all known data into 3D space
using final camera pose estimates. In such a way a dense 3D reconstruction is
possible. The alignment of RGB-D data in the final reconstruction is
representative of the SfM pipeline's performance.
This study concerns the use of depth data in hopes to improve the accuracy of
the final 3D reconstruction output. This is done both in the pairwise camera
pose estimation step as well as the bundle adjustment step where depth data
is introduced into the minimisation problems.
%-------------------------------------------------------------------------
\section{Methodology}
\begin{figure*}[ht]
\begin{center}
\includegraphics[width=0.9\linewidth]{figures/pipeline.pdf}
\end{center}
\caption{Overview of Structure from Motion pipeline}
\label{fig:pipeline}
\end{figure*}
As outlined in the previous sections, our structure from motion pipeline is
composed of 5 main steps as shown in figure \ref{fig:pipeline}.
\begin{enumerate}
\item Data acquisition
\item Feature detection and matching
\item Pairwise camera pose estimation
\item Transform to global coordinate system
\item Bundle adjustment
\end{enumerate}
It is important to note that the acquired images in this case is RGB-D and is
acquired using a Microsoft Kinect (first generation).
The pipeline is implemented in C++. Third party libraries used include OpenCV
3.0, Ceres Solver and PCL 1.8.
%-------------------------------------------------------------------------
\subsection{Data acquisition}
\begin{figure*}
\begin{center}
\includegraphics[width=0.9\linewidth]{figures/BeersNMore_panorama.jpg}
\end{center}
\caption{Panorama of BeersNMore interior, displaying a high number of features}
\label{fig:panorama}
\end{figure*}
OpenNI and OpenCV are used to acquire RGB images and depth maps from a Kinect.
The two output images are stored with the same timestamps. The code used for
this step is provided by our supervisor, Bernhard Zeisl.
It is worth noting that the Kinect returns depth in mm in range $[0, 10000]$
which is stored as a 16-bit unsigned integer. Camera parameters are taken from
\cite{smisek20133d} and used in methods in the following steps.
While acquiring data, we attempt to find areas with sufficient potential
features and try to maximise the overlap between each shot to retain enough
correspondences.
This resulted in the BeersNMore dataset, taken at a craft beer shop on
Universit\"atstrasse. The interior of the shop (as seen in figure \ref{fig:panorama}) exhibits
numerous unique and repeating features in the form of labelled beer bottles
boxes, and crates. The dataset consists of 229 RGB and depth images.
The final reconstruction uses pixels with depth data in the range
$\left[0.4,8\right]$m. This is the range in which depth information
is reliable, as studied by Newcombe \etal \cite{newcombe2011kinectfusion}.
%-------------------------------------------------------------------------
\subsection{Feature detection and matching}\label{sec:extractfeature}
For each given RGB image, SIFT features are found, and 128-dimensional are
descriptors calculated. Standard OpenCV parameters are used for this step.
In each image, around 1000 to 2000 features are found.
A matching algorithm is then run between all potential image pairs. This is
an $\mathcal{O}(n^2)$ operation. The matcher computes the euclidian distances of the $i$-th descriptor in one image to all descriptors in the other image and returns the descriptor with minimum distance as the match to descriptor $i$. This produces numerous
incorrect matches, often visible by the violation of epipolar geometry.
The matching is therefore performed in a bi-directional manner, both from image
$i$ to $j$, and $j$ to $i$, so that only consistent matches are kept. This results in a better sample of matches. RANSAC
can be used in the camera registration step to further eliminate outliers.
%-------------------------------------------------------------------------
\subsection{Pairwise camera pose estimation}
The previous step yields a list of image pairs which have matching features.
As in equation \ref{eqn:backprojection}, given the camera matrix $K$, the discovered features $m_{i} = (u,v)^{T}$ from image $i$ can be projected into 3D space
using its depth map values $\rho$. These 3D points are then reprojected into image $j$ (see figure \ref{fig:reg}). The 3D points $P_{i}$ and their corresponding 2D matches $m^i_{j}$ in image $j$ are fed into OpenCV's EPnP solver, which computes the camera pose $i$ w.r.t camera $j$, $T_{ji} = \left(R_{ji}, t_{ji}\right)$ by minimising reprojection error $e$, as defined in equation \ref{eqn:reprojerror} in a RANSAC manner.
\begin{align}
P &= \rho K^{-1} \begin{pmatrix}
u\\v\\1
\end{pmatrix} \label{eqn:backprojection}\\
e &= \sqrt{\norm{\pi\left(P_{i};R_{ji},t_{ji}\right)-m^i_{j}}^2},\label{eqn:reprojerror}
\end{align}
where $\pi$ denotes the projection function \[
\rho
\begin{pmatrix}
\pi\left(P;R,t\right)\\1
\end{pmatrix} = KR\left(P+t\right).
\]
\begin{figure}[t]
\begin{center}
\includegraphics[width=0.9\linewidth]{figures/registration.pdf}
\end{center}
\caption{Pairwise camera pose registration. Features of camera $i$ are unprojected into 3D space and the reprojection errors (red) are minimised.}
\label{fig:reg}
\end{figure}
The mentioned solver also identifies outlier matches via RANSAC. Only inliers
are retained to improve any further optimisations. To ensure that only good
image pairs are retained, we also filter these image pairs based on an absolute
minimum number of inliers of 30. If two images have less then 30 feature matches
which are inliers in the registration process, it is assumed that the image pair
is not good enough for subsequent steps.
Similar to the feature matching step, the PnP solver is run in both directions,
projecting 3D points from image $i$ into image $j$, and projecting 3D points
from image $j$ into image $i$. This allows for two things, (1) validation of registration result and (2) the averaging of
pose estimates to improve accuracy. In particular, the registration of a camera pair (i,j) is considered successful, only when the rotational vector $r_{ij}$ and $r_{ji}$ from the bi-directional registration are anti-parallel (equation \ref{eqn:check1}) and have similar magnitude (equation \ref{eqn:check2}).
\begin{align}
\norm{\frac{r_{ij}}{\norm{r_{ij}}} + \frac{r_{ji}}{\norm{r_{ji}}}} < 0.2
\label{eqn:check1}\\
\abs{\norm{r_{ij}} - \norm{r_{ji}}} < 0.2\label{eqn:check2}
\end{align}
The final output from this step is a list of camera pairs which are deemed to
have good matching features, and associated pairwise camera pose estimates.
%-------------------------------------------------------------------------
\subsection{Transform to global coordinate system}
The final goal of this SfM pipeline is to combine the data from all images
acquired. To do so, previously acquired pairwise camera pose estimates must
be transformed into a single coordinate frame.
\begin{figure}[t]
\begin{center}
\includegraphics[width=0.9\linewidth]{figures/spanning_tree.eps}
\end{center}
\caption{An example of a minimum spanning tree in terms of this pipeline}
\label{fig:spanning}
\end{figure}
The 0th camera is selected to be the reference coordinate frame. A breadth-first
algorithm is used to construct a minimum spanning tree with cameras as nodes and
the existence of camera pose estimate as edges (whether an image pair exists).
An example of such a spanning tree structure can be seen in figure
\ref{fig:spanning}.
The spanning tree can be walked to calculate camera pose estimates relative to
the 0th camera as in equation \ref{eqn:globalpose}. Having the global poses, one can obtain the 3D keypoints in global frame using equation \ref{eqn:globalpoint}, where $P^{i}_{k}$ denotes the $i$-th keypoints in $k$-th camera and $P^{i}_{g,k}$ denotes this point transformed in global frame.
\begin{align}\label{eqn:globalpose}
R_{k} &= R_{kj}R_{ji}\ldots R_{0}\nonumber\\
t_{k} &= t_{kj}+t_{ji}+\ldots t_{0}\\
P^{i}_{g,k} &= R_{k}^T P^{i}_{k} - t_{k} \label{eqn:globalpoint}
\end{align}
\begin{figure}[t]
\begin{center}
\includegraphics[width=0.9\linewidth]{figures/clusters.eps}
\end{center}
\caption{An example of a cluster of keypoint pose estimations}
\label{fig:clusters}
\end{figure}
The outcome of this step needs to be a cloud of keypoints and camera pose
estimates in global coordinate frame. However, each keypoint is observed by a
minimum of two cameras, resulting in a cluster of keypoint coordinate estimates.
This is illustrated in figure \ref{fig:clusters}.
The centre of mass (CoM) of this cluster is calculated by averaging the
coordinate estimations as in equation \ref{eqn:CoM} \cite{reckerdepth}. This results in a single keypoint coordinate estimate,
and consequently an initial point cloud of sparse features.
\begin{equation}\label{eqn:CoM}
P^{i}_{g} = \sum_{k}{P^{i}_{g,k}}
\end{equation}
%-------------------------------------------------------------------------
\subsection{Bundle adjustment}
With global keypoint and camera pose estimates, it is now possible to perform a global optimisation known as bundle adjustment. This step takes consideration of all estimated 3D points and camera pose into consideration and minimizes a predefined cost function, that reflects the reconstruction quality. Conventionally, this the total reprojection error (as in equation \ref{eqn:baNoD} is used as cost function.
\begin{equation}\label{eqn:baNoD}
\sum_{R_k,t_k}\sum_{P_i}{\norm{\pi\left(P_i;R_k,t_k\right)-m^i_{k}}}^2,
\end{equation}
To this end, having extra depth values of all 3D points, we altered the cost function to exploit this information in hope of an improvement in the reconstruction quality. As can in figure \ref{fig:baCost} and equation \ref{eqn:baD} ,relative error in depth value is incorporated as an extra term, where $d^i_k$ and $\rho^i_k$ denote the measured and estimated depth value respectively.
\begin{equation}\label{eqn:baD}
\sum_{R_k,t_k}\sum_{P_i}{\norm{\pi\left(P_i;R_k,t_k\right)-m^i_{k}}^2+\left\lvert\dfrac{d^i_k-\rho^i_k}{d^i_k}\right\rvert ^2}
\end{equation}
To improve the robustness of this optimization, cauchy loss function $\rho(s) = \log(1+s)$ is utilised which decrease the weight of outliers in the cost function, so that they do not overly influence the final solution.
The result and comparison of both cost functions is shown in section \ref{sec:result}.
\begin{figure}[t]
\begin{center}
\includegraphics[width=0.9\linewidth]{figures/ba.pdf}
\end{center}
\caption{Visualization of error terms in cost function of bundle adjustment}
\label{fig:baCost}
\end{figure}
%-------------------------------------------------------------------------
\section{Results and discussion}\label{sec:result}
\begin{figure*}
\centering
\begin{subfigure}[b]{1.0\columnwidth}
\includegraphics[width=\textwidth]{figures/result_small_noBA.png}
\caption{Before BA}
\label{fig:beforeBA}
\end{subfigure}
\hfill
\begin{subfigure}[b]{1.0\columnwidth}
\includegraphics[width=\textwidth]{figures/result_small_BA_noD.png}
\caption{After BA}
\label{fig:afterBA}
\end{subfigure}
\caption{3D reconstruction using a 37-image subset of data. An improvement in
reconstruction quality can be seen after the bundle adjustment step.}
\label{fig:3Dsmall}
\end{figure*}
The pipeline works very well on small datasets which cover an area well with
numerous overlapping shots. The resulting dense reconstruction is of high
quality and alignment is noticably improved with bundle adjustment. This can be
seen in figures \ref{fig:beforeBA} and \ref{fig:afterBA} where a rough initial
estimate is refined through bundle adjustment.
\begin{figure*}
\centering
\begin{subfigure}[b]{1.0\columnwidth}
\includegraphics[width=\textwidth]{figures/result_large_BA_noD.png}
\caption{BA without depth}
\label{fig:BA_noD}
\end{subfigure}
\hfill
\begin{subfigure}[b]{1.0\columnwidth}
\includegraphics[width=\textwidth]{figures/result_large_BA_D.png}
\caption{BA with depth}
\label{fig:BA_withD}
\end{subfigure}
\caption{3D reconstruction using the full dataset (226 images). With the depth
term added to the bundle adjustment step, the reconstruction is no longer
successful. It can also be seen the the reconstruction quality drops for the
larger dataset.}
\label{fig:3Dlarge}
\end{figure*}
However, it performs less well when some correspondences are weak and thus
prevents the final spanning tree from spanning all cameras. This results in a
partial reconstruction of the scene. This is also evident when there are
sufficient correspondences for the spanning tree construction to propagate,
but not enough for a good estimate of camera pose to be made. This results in a
lower quality reconstruction (figure \ref{fig:3Dlarge}).
The lack of correspondences is not only due to issues in data acquisition, but
also due to varying lighting conditions. For example, the light bleeding from
the beer fridges and windows caused feature matches to succeed less well.
We attempt to use depth data in our bundle adjustment step. This was in hopes of
improving the final reconstruction accuracy. It can be seen however in figures
\ref{fig:BA_noD} and \ref{fig:BA_withD} that using depth data can sometimes
prevent the bundle adjustment step from executing successfully. This could be
due to the limited resolution of depth data compared to reprojection errors.
\begin{figure}
\begin{center}
\includegraphics[width=0.9\linewidth]{figures/result_tiny_noBA.png}
\end{center}
\caption{3D reconstruction result using just 6 images}
\label{fig:3D_6images}
\end{figure}
Nonetheless, the pipeline works well in general, and does not require specific
parameters and is thus general purpose. Even with as few as 6 images, an
accurate dense reconstruction is possible (figure \ref{fig:3D_6images}) and this
can be extended to larger areas with more images provided sufficient
correspondences.
%-------------------------------------------------------------------------
\section{Conclusion}
A Structure from Motion pipeline taking advantage of both RGB and depth
information was successfully implemented. This involved all steps starting
from the acquisition of data to the visualisation of the final 3D model. A
good understanding of all parts of the pipeline as well as associated technology
was required.
One of the goals of this project was to investigate the improvements which can
be made by incorporating the depth information provided by the Kinect. We can
show that the camera registration step works quite well with depth data, as can
be seen in the final reconstructions. It cannot be said however that depth data
can improve the bundle adjustment step. Our initial assessment shows that the
low resolution of depth data ($2^{11}$ bits) can result in a degradation in
quality of the final model, but this may require more analysis.
It can be claimed that incorporating depth data into the pipeline makes it more
robust. This is because depth data is used in multiple steps such as camera
registration and the finding of centre-of-mass of keypoint pose clusters instead
of periodic bundle adjustment. A single bundle adjustment step is enough to
produce a high quality final model.
One assessment which may be helpful is the comparison of our 2D-3D registration
method against a more conventional 2D-2D registration as well as 3D-3D
registration using depth data from both images in a pair. This would further
inform us whether depth data is helpful in structure from motion and in which
steps. We propose this as future work in this area.
%-------------------------------------------------------------------------
\section*{Work distribution}
The authors worked together weekly and thus shared workload fairly.
Park focused more infrastructure, spanning tree, and visualisation, while Wang
focused more on camera registration, clusters finding and bundle adjustment.
Data acquisition was done with cooperation from Jonathan, the owner of craft
beer shop, BeersNMore.
We thank Pavol Vyhlidal, An-phi Nguyen, and Federico Danieli for their support
and enthusiasm.
%-------------------------------------------------------------------------
{\small
\bibliographystyle{ieeetr}
\bibliography{egbib}
}
\end{document}
|
module functor where
open import level
record Functor {ℓ : Level} (F : Set ℓ → Set ℓ) : Set (lsuc ℓ) where
constructor mkFunc
field
fmap : ∀{A B : Set ℓ} → (A → B) → F A → F B
open Functor public
|
module Data.Any where
import Lvl
open import Type
-- A type that can hold a value of any type.
record Any {ℓ} : Type{Lvl.𝐒(ℓ)} where
constructor intro
field
{type} : Type{ℓ}
value : type
map : ∀{T : Type{ℓ}} → (type → T) → Any{ℓ}
map f = record{value = f(value)}
-- A type that can hold a value of any type in any universe.
record UniversalAny : Typeω where
constructor intro
field
{level} : Lvl.Level
{type} : Type{level}
value : type
map : ∀{ℓ}{T : Type{ℓ}} → (type → T) → UniversalAny
map f = record{value = f(value)}
|
The Romanian military is undergoing a three @-@ stage restructuring . As of 2007 , the first short @-@ term stage was completed ( reorganisation of the command system , implementation of the voluntary military service ) . The year 2015 marks the end of the second stage ( operational integration in NATO and EU ) , while 2025 is the date when the long @-@ term stage is to be completed ( full technical integration in NATO and EU ) . The stages aim at modernising the structure of the armed forces , reducing the personnel as well as acquiring newer and improved technology that is compatible with NATO standards .
|
[STATEMENT]
lemma rcis_cmod_Arg: "rcis (cmod z) (Arg z) = z"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. rcis (cmod z) (Arg z) = z
[PROOF STEP]
by (cases "z = 0") (simp_all add: rcis_def cis_Arg sgn_div_norm of_real_def) |
# Matrix Multiplication
Author: Yoseph K. Soenggoro
```python
from random import random
from itertools import product
import numpy as np
```
```python
# Check you NumPy Version (I used 1.16.4).
# If the program is incompatible with your NumPy version, use pip or conda to set the appropriate version
np.__version__
```
'1.16.4'
```python
# Choose the value of n, the dimension for Matrix X and Y
n = 3
# Choose d as the range of random value for Matrix X and Y.
# By choosing value d, the element of Matrix X and Y will be any real number between 0 and d, but never d.
d = 10
```
Before starting to multiply any two matrices, first define two different matrices $X$ and $Y$ using the `random` library.
```python
# Define Matrix X and Matrix Y
X = []
Y = []
for i in range(0, n):
x_row = []
for j in range(0, n):
x_val = random() * d
x_row.append(x_val)
X.append(x_row)
for i in range(0, n):
y_row = []
for j in range(0, n):
y_val = random() * d
y_row.append(y_val)
Y.append(y_row)
```
```python
# Function to print the matrices
def print_matrix(X):
matrix_string = ''
for i, j in product(range(0, n), range(0, n)):
matrix_string += f'{X[i][j]}' + ('\t' if j != n - 1 else '\n')
print(matrix_string)
```
```python
# Print X to Check
print_matrix(X)
```
1.411243489014371 4.823307914767269 2.9619965649396427
8.632221076600207 6.909318166288182 4.297076325685831
6.741265901114342 9.161915445688102 6.166996253979611
```python
# Print Y to Check
print_matrix(Y)
```
7.627644148989169 4.412768013974525 4.55820479886072
7.864788107651906 2.491004876229417 1.2387049762153457
1.6601811042693615 6.587244192044375 9.966139995991005
### Matrix Multiplication Formula (Linear Algebra)
Given a $n \times n$ matrices $X$ and $Y$, as follows:
\begin{align}
X =
\begin{bmatrix}
x_{1, 1} & x_{1, 2} & \dots & x_{1, n} \\
x_{2, 1} & x_{2, 2} & \dots & x_{2, n} \\
\vdots & \vdots & \ddots & \vdots \\
x_{n, 1} & x_{n, 2} & \dots & x_{n, n}
\end{bmatrix}
, \quad
Y =
\begin{bmatrix}
y_{1, 1} & y_{1, 2} & \dots & y_{1, n} \\
y_{2, 1} & y_{2, 2} & \dots & y_{2, n} \\
\vdots & \vdots & \ddots & \vdots \\
y_{n, 1} & y_{n, 2} & \dots & y_{n, n}
\end{bmatrix}
\end{align}
then the multiplication is defined by the following formula:
\begin{align}
X \cdot Y = \left[\sum_{k = 1}^n x_{i, k} \cdot y_{j, k}\right]_{i, j = 1}^n
\end{align}
### Implementation \#1: Functional Paradigm
The simplest way to implement Matrix Multiplication is by using modular functions, that can be used and reused multiple times within a program. Given the formula above, the Python implementation will be as follows.
```python
# Function to implement Matrix Multiplication of Matrix X and Y
def matrix_mul(X, Y):
Z = []
for i in range(0, n):
z_row = []
for j in range(0, n):
z_val = 0
for k in range(0, n):
z_val += X[i][k] * Y[k][j]
z_row.append(z_val)
Z.append(z_row)
return Z
```
For the multiplication between $X$ and $Y$, the result will be kept in variable $Z$.
```python
Z = matrix_mul(X, Y)
print_matrix(Z)
```
53.61620859740801 37.75376833274768 41.92706479366027
127.31775885342397 83.60902536916785 90.7313025662656
133.714831698932 93.1935288493318 103.53812885700651
### Check Validity on Matrix Multiplication Function
Despite having a working matrix multiplication implementation in functional form, we still have no idea whether the result from our implementation is right or wrong. Therefore, one method to validate the result will be doing a comparison with `NumPy`'s implementation of `matmul` API.
```python
# Function to compare the Matrix Multiplication Function to NumPy's matmul
def check_matrix_mul(X, Y):
print('Starting Validation Process...\n\n\n')
x = np.array(X)
y = np.array(Y)
z = np.matmul(x, y)
Z = matrix_mul(X, Y)
for i, j in product(range(0, n), range(0, n)):
print(f'Checking index {(i, j)}... \t\t\t {round(z[i][j], 2) == round(Z[i][j], 2)}')
print('\n')
print('Validation Process Completed')
```
```python
a = check_matrix_mul(X, Y)
```
Starting Validation Process...
Checking index (0, 0)... True
Checking index (0, 1)... True
Checking index (0, 2)... True
Checking index (1, 0)... True
Checking index (1, 1)... True
Checking index (1, 2)... True
Checking index (2, 0)... True
Checking index (2, 1)... True
Checking index (2, 2)... True
Validation Process Completed
Since after checking all the results are True, then it can be confirmed that the implentation works sucessfully.
### Implementation \#2: Object-Oriented Paradigm
Another paradigm that can be used is OOP or Object-Oriented Programming, which represents a program as a set of Objects with various fields and methods to interact with the defined Object. In this case, first defined a generalized form of matrices, which is known as Tensors. The implementation of `Tensor` will be as follows:
```python
class Tensor:
def __init__(self, X):
validation = self.__checking_validity(X)
self.__dim = 2
self.tensor = X if validation else []
self.__dimension = self.__get_dimension_private(X) if validation else -1
def __get_dimension_private(self, X):
if not check_child(X):
return 1
else:
# Check whether the size of each child are the same
for i in range(0, len(X)):
if not check_child(X[i]):
return self.__dim
else:
get_dimension(X[i])
self.__dim += 1
return self.__dim
def __checking_validity(self, X):
self.__dim = 2
valid = True
if not check_child(X):
return valid
else:
dim_0 = get_dimension(X[0])
# Check whether the size of each child are the same
for i in range(1, len(X)):
self.__dim = 2
if get_dimension(X[i]) != dim_0:
valid &= False
break
return valid
# Getting the Value of Tensor Rank/Dimension (Not to be confused with Matrix Dimension)
def get_dimension(self):
return self.__dimension
```
Since Tensors are generalized form of matrices, it implies that it is possible to define `Matrix` class as a child class of `Tensor` with additional methods (some overrides the `Tensor`'s original methods). For operators, I only managed to override the multiplication operator for the sake of implementing Matrix Multiplication. Thus, other operator such as `+`, `-`, `/`, and others will not be available for the current implementation.
```python
class Matrix(Tensor):
def __init__(self, X):
super().__init__(X)
self.__matrix_string = ''
def __str__(self):
return self.__matrix_string if self.__check_matrix_validation() else ''
# Check whether the given input X is a valid Matrix
def __check_matrix_validation(self):
valid = True
try:
for i, j in product(range(0, n), range(0, n)):
self.__matrix_string += f'{self.tensor[i][j]}' + ('\t' if j != n - 1 else '\n')
except:
valid = False
print('Matrix is Invalid. Create New Instance with appropriate inputs.')
return valid
# Get Matrix Dimension: Number of Columns and Rows
def get_dimension(self):
print(f'Matrix Dimension: ({len(self.tensor)}, {len(self.tensor[0])})' if self.__check_matrix_validation() else -1)
return [len(self.tensor), len(self.tensor[0])]
# Overriding Multiplication Operator for Matrix Multiplication
# and Integer-Matrix Multiplication
def __mul__(self, other):
if isinstance(other, Matrix):
Z = []
for i in range(0, n):
z_row = []
for j in range(0, n):
z_val = 0
for k in range(0, n):
z_val += self.tensor[i][k] * other.tensor[k][j]
z_row.append(z_val)
Z.append(z_row)
return Matrix(Z)
elif isinstance(other, int):
Z = []
for i in range(0, n):
z_row = []
for j in range(0, n):
z_row.append(self.tensor[i][j] * other)
Z.append(z_row)
return Matrix(Z)
else:
return NotImplemented
# Overriding Reverse Multiplication to support Matrix-Integer Multiplication
def __rmul__(self, other):
if isinstance(other, int):
Z = []
for i in range(0, n):
z_row = []
for j in range(0, n):
z_row.append(self.tensor[i][j] * other)
Z.append(z_row)
return Matrix(Z)
else:
return NotImplemented
```
```python
# Transform X and Y to Matrix Object
x_obj = Matrix(X)
y_obj = Matrix(Y)
# Implement Matrix Multiplication as follows
z_obj = x_obj * y_obj
print(z_obj)
```
53.61620859740801 37.75376833274768 41.92706479366027
127.31775885342397 83.60902536916785 90.7313025662656
133.714831698932 93.1935288493318 103.53812885700651
### Check Validity on Matrix Multiplication using OOP
Similar to the previous section, we still have no idea whether the result from our implementation is right or wrong. Hence, validation is highly important. Therefore, one method to validate the result will be again doing a comparison with `NumPy`'s implementation of `matmul` API.
```python
# Function to compare the Matrix Multiplication Function to Numpy's matmul
def check_matrix_mul_oop(X, Y):
print('Starting Validation Process...\n\n\n')
x = np.array(X)
y = np.array(Y)
z = np.matmul(x, y)
Z = Matrix(X) * Matrix(Y)
for i, j in product(range(0, n), range(0, n)):
print(f'Checking index {(i, j)}... \t\t\t {round(z[i][j], 2) == round(Z.tensor[i][j], 2)}')
print('\n')
print('Validation Process Completed')
```
```python
a = check_matrix_mul_oop(X, Y)
```
Starting Validation Process...
Checking index (0, 0)... True
Checking index (0, 1)... True
Checking index (0, 2)... True
Checking index (1, 0)... True
Checking index (1, 1)... True
Checking index (1, 2)... True
Checking index (2, 0)... True
Checking index (2, 1)... True
Checking index (2, 2)... True
Validation Process Completed
Since after checking all the results are True, then it can be confirmed that the implentation works sucessfully.
# Python Libraries
- [NumPy](https://numpy.org/)
|
Formal statement is: proposition homotopic_with_compose_continuous_left: "\<lbrakk>homotopic_with_canon (\<lambda>f. p (h \<circ> f)) X Y f g; continuous_on Y h; h ` Y \<subseteq> Z\<rbrakk> \<Longrightarrow> homotopic_with_canon p X Z (h \<circ> f) (h \<circ> g)" Informal statement is: If $f$ and $g$ are homotopic maps from $X$ to $Y$ and $h$ is a continuous map from $Y$ to $Z$ such that $h(Y) \subseteq Z$, then $h \circ f$ and $h \circ g$ are homotopic maps from $X$ to $Z$. |
library(quaternions)
q <- Q(1, 2, 3, 4)
q1 <- Q(2, 3, 4, 5)
q2 <- Q(3, 4, 5, 6)
r <- 7.0
display <- function(x){
e <- deparse(substitute(x))
res <- if(class(x) == "Q") paste(x$r, "+", x$i, "i+", x$j, "j+", x$k, "k", sep = "") else x
cat(noquote(paste(c(e, " = ", res, "\n"), collapse="")))
invisible(res)
}
display(norm(q))
display(-q)
display(Conj(q))
display(r + q)
display(q1 + q2)
display(r*q)
display(q*r)
if(display(q1*q2) == display(q2*q1)) cat("q1*q2 == q2*q1\n") else cat("q1*q2 != q2*q1\n")
## norm(q) = 5.47722557505166
## -q = -1+-2i+-3j+-4k
## Conj(q) = 1+-2i+-3j+-4k
## r + q = 8+2i+3j+4k
## q1 + q2 = 5+7i+9j+11k
## r * q = 7+14i+21j+28k
## q * r = 7+14i+21j+28k
## q1 * q2 = -56+16i+24j+26k
## q2 * q1 = -56+18i+20j+28k
## q1*q2 != q2*q1
|
/-
Copyright (c) 2021 Johan Commelin. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Johan Commelin
! This file was ported from Lean 3 source module category_theory.preadditive.functor_category
! leanprover-community/mathlib commit 69c6a5a12d8a2b159f20933e60115a4f2de62b58
! Please do not edit these lines, except to modify the commit id
! if you have ported upstream changes.
-/
import Mathbin.CategoryTheory.Preadditive.Basic
/-!
# Preadditive structure on functor categories
> THIS FILE IS SYNCHRONIZED WITH MATHLIB4.
> Any changes to this file require a corresponding PR to mathlib4.
If `C` and `D` are categories and `D` is preadditive,
then `C ⥤ D` is also preadditive.
-/
open BigOperators
namespace CategoryTheory
open CategoryTheory.Limits Preadditive
variable {C D : Type _} [Category C] [Category D] [Preadditive D]
#print CategoryTheory.functorCategoryPreadditive /-
instance functorCategoryPreadditive : Preadditive (C ⥤ D)
where
homGroup F G :=
{ add := fun α β =>
{ app := fun X => α.app X + β.app X
naturality' := by
intros
rw [comp_add, add_comp, α.naturality, β.naturality] }
zero :=
{ app := fun X => 0
naturality' := by
intros
rw [zero_comp, comp_zero] }
neg := fun α =>
{ app := fun X => -α.app X
naturality' := by
intros
rw [comp_neg, neg_comp, α.naturality] }
sub := fun α β =>
{ app := fun X => α.app X - β.app X
naturality' := by
intros
rw [comp_sub, sub_comp, α.naturality, β.naturality] }
add_assoc := by
intros
ext
apply add_assoc
zero_add := by
intros
ext
apply zero_add
add_zero := by
intros
ext
apply add_zero
sub_eq_add_neg := by
intros
ext
apply sub_eq_add_neg
add_left_neg := by
intros
ext
apply add_left_neg
add_comm := by
intros
ext
apply add_comm }
add_comp := by
intros
ext
apply add_comp
comp_add := by
intros
ext
apply comp_add
#align category_theory.functor_category_preadditive CategoryTheory.functorCategoryPreadditive
-/
namespace NatTrans
variable {F G : C ⥤ D}
/- warning: category_theory.nat_trans.app_hom -> CategoryTheory.NatTrans.appHom is a dubious translation:
lean 3 declaration is
forall {C : Type.{u1}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u3, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u2} D _inst_2] {F : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} (X : C), AddMonoidHom.{max u1 u4, u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddMonoid.toAddZeroClass.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.toAddMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u1, u2, u3, u4} C D _inst_1 _inst_2 _inst_3) F G))))) (AddMonoid.toAddZeroClass.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (SubNegMonoid.toAddMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddGroup.toSubNegMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddCommGroup.toAddGroup.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.Preadditive.homGroup.{u4, u2} D _inst_2 _inst_3 (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X))))))
but is expected to have type
forall {C : Type.{u1}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u3, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u2} D _inst_2] {F : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} (X : C), AddMonoidHom.{max u1 u4, u4} (Quiver.Hom.{succ (max u1 u4), max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 G) X)) (AddMonoid.toAddZeroClass.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.toAddMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u1 u4, max (max (max u1 u2) u3) u4} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u1, u2, u3, u4} C D _inst_1 _inst_2 _inst_3) F G))))) (AddMonoid.toAddZeroClass.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 G) X)) (SubNegMonoid.toAddMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 G) X)) (AddGroup.toSubNegMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 G) X)) (AddCommGroup.toAddGroup.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.Preadditive.homGroup.{u4, u2} D _inst_2 _inst_3 (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u3, succ u4, u1, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u3, u1} C (CategoryTheory.Category.toCategoryStruct.{u3, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u3, u4, u1, u2} C _inst_1 D _inst_2 G) X))))))
Case conversion may be inaccurate. Consider using '#align category_theory.nat_trans.app_hom CategoryTheory.NatTrans.appHomₓ'. -/
/-- Application of a natural transformation at a fixed object,
as group homomorphism -/
@[simps]
def appHom (X : C) : (F ⟶ G) →+ (F.obj X ⟶ G.obj X)
where
toFun α := α.app X
map_zero' := rfl
map_add' _ _ := rfl
#align category_theory.nat_trans.app_hom CategoryTheory.NatTrans.appHom
/- warning: category_theory.nat_trans.app_zero -> CategoryTheory.NatTrans.app_zero is a dubious translation:
lean 3 declaration is
forall {C : Type.{u1}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u3, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u2} D _inst_2] {F : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} (X : C), Eq.{succ u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G (OfNat.ofNat.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) 0 (OfNat.mk.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) 0 (Zero.zero.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Limits.HasZeroMorphisms.hasZero.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Limits.CategoryTheory.Functor.hasZeroMorphisms.{u3, u1, u4, u2} C _inst_1 D _inst_2 (CategoryTheory.Preadditive.preadditiveHasZeroMorphisms.{u4, u2} D _inst_2 _inst_3)) F G)))) X) (OfNat.ofNat.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) 0 (OfNat.mk.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) 0 (Zero.zero.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.Limits.HasZeroMorphisms.hasZero.{u4, u2} D _inst_2 (CategoryTheory.Preadditive.preadditiveHasZeroMorphisms.{u4, u2} D _inst_2 _inst_3) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)))))
but is expected to have type
forall {C : Type.{u1}} {D : Type.{u3}} [_inst_1 : CategoryTheory.Category.{u2, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u3} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u3} D _inst_2] {F : CategoryTheory.Functor.{u2, u4, u1, u3} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u2, u4, u1, u3} C _inst_1 D _inst_2} (X : C), Eq.{succ u4} (Quiver.Hom.{succ u4, u3} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (Prefunctor.obj.{succ u2, succ u4, u1, u3} C (CategoryTheory.CategoryStruct.toQuiver.{u2, u1} C (CategoryTheory.Category.toCategoryStruct.{u2, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u2, u4, u1, u3} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u2, succ u4, u1, u3} C (CategoryTheory.CategoryStruct.toQuiver.{u2, u1} C (CategoryTheory.Category.toCategoryStruct.{u2, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u2, u4, u1, u3} C _inst_1 D _inst_2 G) X)) (CategoryTheory.NatTrans.app.{u2, u4, u1, u3} C _inst_1 D _inst_2 F G (OfNat.ofNat.{max u1 u4} (Quiver.Hom.{max (succ u1) (succ u4), max (max (max u1 u3) u2) u4} (CategoryTheory.Functor.{u2, u4, u1, u3} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max (max (max u1 u3) u2) u4} (CategoryTheory.Functor.{u2, u4, u1, u3} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max (max (max u1 u3) u2) u4} (CategoryTheory.Functor.{u2, u4, u1, u3} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u2, u4, u1, u3} C _inst_1 D _inst_2))) F G) 0 (Zero.toOfNat0.{max u1 u4} (Quiver.Hom.{max (succ u1) (succ u4), max (max (max u1 u3) u2) u4} (CategoryTheory.Functor.{u2, u4, u1, u3} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max (max (max u1 u3) u2) u4} (CategoryTheory.Functor.{u2, u4, u1, u3} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max (max (max u1 u3) u2) u4} (CategoryTheory.Functor.{u2, u4, u1, u3} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u2, u4, u1, u3} C _inst_1 D _inst_2))) F G) (CategoryTheory.Limits.HasZeroMorphisms.Zero.{max u1 u4, max (max (max u1 u3) u2) u4} (CategoryTheory.Functor.{u2, u4, u1, u3} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u2, u4, u1, u3} C _inst_1 D _inst_2) (CategoryTheory.Limits.instHasZeroMorphismsFunctorCategory.{u2, u1, u4, u3} C _inst_1 D _inst_2 (CategoryTheory.Preadditive.preadditiveHasZeroMorphisms.{u4, u3} D _inst_2 _inst_3)) F G))) X) (OfNat.ofNat.{u4} (Quiver.Hom.{succ u4, u3} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (Prefunctor.obj.{succ u2, succ u4, u1, u3} C (CategoryTheory.CategoryStruct.toQuiver.{u2, u1} C (CategoryTheory.Category.toCategoryStruct.{u2, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u2, u4, u1, u3} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u2, succ u4, u1, u3} C (CategoryTheory.CategoryStruct.toQuiver.{u2, u1} C (CategoryTheory.Category.toCategoryStruct.{u2, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u2, u4, u1, u3} C _inst_1 D _inst_2 G) X)) 0 (Zero.toOfNat0.{u4} (Quiver.Hom.{succ u4, u3} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (Prefunctor.obj.{succ u2, succ u4, u1, u3} C (CategoryTheory.CategoryStruct.toQuiver.{u2, u1} C (CategoryTheory.Category.toCategoryStruct.{u2, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u2, u4, u1, u3} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u2, succ u4, u1, u3} C (CategoryTheory.CategoryStruct.toQuiver.{u2, u1} C (CategoryTheory.Category.toCategoryStruct.{u2, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u2, u4, u1, u3} C _inst_1 D _inst_2 G) X)) (CategoryTheory.Limits.HasZeroMorphisms.Zero.{u4, u3} D _inst_2 (CategoryTheory.Preadditive.preadditiveHasZeroMorphisms.{u4, u3} D _inst_2 _inst_3) (Prefunctor.obj.{succ u2, succ u4, u1, u3} C (CategoryTheory.CategoryStruct.toQuiver.{u2, u1} C (CategoryTheory.Category.toCategoryStruct.{u2, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u2, u4, u1, u3} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u2, succ u4, u1, u3} C (CategoryTheory.CategoryStruct.toQuiver.{u2, u1} C (CategoryTheory.Category.toCategoryStruct.{u2, u1} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u4, u3} D (CategoryTheory.Category.toCategoryStruct.{u4, u3} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u2, u4, u1, u3} C _inst_1 D _inst_2 G) X))))
Case conversion may be inaccurate. Consider using '#align category_theory.nat_trans.app_zero CategoryTheory.NatTrans.app_zeroₓ'. -/
@[simp]
theorem app_zero (X : C) : (0 : F ⟶ G).app X = 0 :=
rfl
#align category_theory.nat_trans.app_zero CategoryTheory.NatTrans.app_zero
/- warning: category_theory.nat_trans.app_add -> CategoryTheory.NatTrans.app_add is a dubious translation:
lean 3 declaration is
forall {C : Type.{u1}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u3, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u2} D _inst_2] {F : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (β : Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G), Eq.{succ u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G (HAdd.hAdd.{max u1 u4, max u1 u4, max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (instHAdd.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddZeroClass.toHasAdd.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddMonoid.toAddZeroClass.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.toAddMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u1, u2, u3, u4} C D _inst_1 _inst_2 _inst_3) F G))))))) α β) X) (HAdd.hAdd.{u4, u4, u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (instHAdd.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddZeroClass.toHasAdd.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddMonoid.toAddZeroClass.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (SubNegMonoid.toAddMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddGroup.toSubNegMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddCommGroup.toAddGroup.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.Preadditive.homGroup.{u4, u2} D _inst_2 _inst_3 (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)))))))) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G α X) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G β X))
but is expected to have type
forall {C : Type.{u4}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u1, u4} C] [_inst_2 : CategoryTheory.Category.{u3, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u3, u2} D _inst_2] {F : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (β : Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G), Eq.{succ u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G (HAdd.hAdd.{max u4 u3, max u4 u3, max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (instHAdd.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddZeroClass.toAdd.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddMonoid.toAddZeroClass.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.toAddMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u4, u2, u1, u3} C D _inst_1 _inst_2 _inst_3) F G))))))) α β) X) (HAdd.hAdd.{u3, u3, u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (instHAdd.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddZeroClass.toAdd.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddMonoid.toAddZeroClass.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (SubNegMonoid.toAddMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddGroup.toSubNegMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddCommGroup.toAddGroup.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.Preadditive.homGroup.{u3, u2} D _inst_2 _inst_3 (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)))))))) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G α X) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G β X))
Case conversion may be inaccurate. Consider using '#align category_theory.nat_trans.app_add CategoryTheory.NatTrans.app_addₓ'. -/
@[simp]
theorem app_add (X : C) (α β : F ⟶ G) : (α + β).app X = α.app X + β.app X :=
rfl
#align category_theory.nat_trans.app_add CategoryTheory.NatTrans.app_add
/- warning: category_theory.nat_trans.app_sub -> CategoryTheory.NatTrans.app_sub is a dubious translation:
lean 3 declaration is
forall {C : Type.{u1}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u3, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u2} D _inst_2] {F : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (β : Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G), Eq.{succ u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G (HSub.hSub.{max u1 u4, max u1 u4, max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (instHSub.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.toHasSub.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u1, u2, u3, u4} C D _inst_1 _inst_2 _inst_3) F G))))) α β) X) (HSub.hSub.{u4, u4, u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (instHSub.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (SubNegMonoid.toHasSub.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddGroup.toSubNegMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddCommGroup.toAddGroup.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.Preadditive.homGroup.{u4, u2} D _inst_2 _inst_3 (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)))))) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G α X) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G β X))
but is expected to have type
forall {C : Type.{u4}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u1, u4} C] [_inst_2 : CategoryTheory.Category.{u3, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u3, u2} D _inst_2] {F : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (β : Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G), Eq.{succ u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G (HSub.hSub.{max u4 u3, max u4 u3, max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (instHSub.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.toSub.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u4, u2, u1, u3} C D _inst_1 _inst_2 _inst_3) F G))))) α β) X) (HSub.hSub.{u3, u3, u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (instHSub.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (SubNegMonoid.toSub.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddGroup.toSubNegMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddCommGroup.toAddGroup.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.Preadditive.homGroup.{u3, u2} D _inst_2 _inst_3 (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)))))) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G α X) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G β X))
Case conversion may be inaccurate. Consider using '#align category_theory.nat_trans.app_sub CategoryTheory.NatTrans.app_subₓ'. -/
@[simp]
theorem app_sub (X : C) (α β : F ⟶ G) : (α - β).app X = α.app X - β.app X :=
rfl
#align category_theory.nat_trans.app_sub CategoryTheory.NatTrans.app_sub
/- warning: category_theory.nat_trans.app_neg -> CategoryTheory.NatTrans.app_neg is a dubious translation:
lean 3 declaration is
forall {C : Type.{u1}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u3, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u2} D _inst_2] {F : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G), Eq.{succ u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G (Neg.neg.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.toHasNeg.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u1, u2, u3, u4} C D _inst_1 _inst_2 _inst_3) F G)))) α) X) (Neg.neg.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (SubNegMonoid.toHasNeg.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddGroup.toSubNegMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddCommGroup.toAddGroup.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.Preadditive.homGroup.{u4, u2} D _inst_2 _inst_3 (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X))))) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G α X))
but is expected to have type
forall {C : Type.{u4}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u1, u4} C] [_inst_2 : CategoryTheory.Category.{u3, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u3, u2} D _inst_2] {F : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G), Eq.{succ u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G (Neg.neg.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (NegZeroClass.toNeg.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (SubNegZeroMonoid.toNegZeroClass.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (SubtractionMonoid.toSubNegZeroMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (SubtractionCommMonoid.toSubtractionMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toDivisionAddCommMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u4, u2, u1, u3} C D _inst_1 _inst_2 _inst_3) F G)))))) α) X) (Neg.neg.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (NegZeroClass.toNeg.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (SubNegZeroMonoid.toNegZeroClass.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (SubtractionMonoid.toSubNegZeroMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (SubtractionCommMonoid.toSubtractionMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddCommGroup.toDivisionAddCommMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.Preadditive.homGroup.{u3, u2} D _inst_2 _inst_3 (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X))))))) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G α X))
Case conversion may be inaccurate. Consider using '#align category_theory.nat_trans.app_neg CategoryTheory.NatTrans.app_negₓ'. -/
@[simp]
theorem app_neg (X : C) (α : F ⟶ G) : (-α).app X = -α.app X :=
rfl
#align category_theory.nat_trans.app_neg CategoryTheory.NatTrans.app_neg
/- warning: category_theory.nat_trans.app_nsmul -> CategoryTheory.NatTrans.app_nsmul is a dubious translation:
lean 3 declaration is
forall {C : Type.{u1}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u3, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u2} D _inst_2] {F : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (n : Nat), Eq.{succ u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G (SMul.smul.{0, max u1 u4} Nat (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddMonoid.SMul.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.toAddMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u1, u2, u3, u4} C D _inst_1 _inst_2 _inst_3) F G))))) n α) X) (SMul.smul.{0, u4} Nat (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddMonoid.SMul.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (SubNegMonoid.toAddMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddGroup.toSubNegMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddCommGroup.toAddGroup.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.Preadditive.homGroup.{u4, u2} D _inst_2 _inst_3 (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)))))) n (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G α X))
but is expected to have type
forall {C : Type.{u4}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u1, u4} C] [_inst_2 : CategoryTheory.Category.{u3, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u3, u2} D _inst_2] {F : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (n : Nat), Eq.{succ u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G (HSMul.hSMul.{0, max u4 u3, max u4 u3} Nat (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (instHSMul.{0, max u4 u3} Nat (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddMonoid.SMul.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.toAddMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u4, u2, u1, u3} C D _inst_1 _inst_2 _inst_3) F G)))))) n α) X) (HSMul.hSMul.{0, u3, u3} Nat (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (instHSMul.{0, u3} Nat (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddMonoid.SMul.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (SubNegMonoid.toAddMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddGroup.toSubNegMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddCommGroup.toAddGroup.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.Preadditive.homGroup.{u3, u2} D _inst_2 _inst_3 (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X))))))) n (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G α X))
Case conversion may be inaccurate. Consider using '#align category_theory.nat_trans.app_nsmul CategoryTheory.NatTrans.app_nsmulₓ'. -/
@[simp]
theorem app_nsmul (X : C) (α : F ⟶ G) (n : ℕ) : (n • α).app X = n • α.app X :=
(appHom X).map_nsmul α n
#align category_theory.nat_trans.app_nsmul CategoryTheory.NatTrans.app_nsmul
/- warning: category_theory.nat_trans.app_zsmul -> CategoryTheory.NatTrans.app_zsmul is a dubious translation:
lean 3 declaration is
forall {C : Type.{u1}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u3, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u2} D _inst_2] {F : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (n : Int), Eq.{succ u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G (SMul.smul.{0, max u1 u4} Int (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.SMulInt.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u1, u2, u3, u4} C D _inst_1 _inst_2 _inst_3) F G)))) n α) X) (SMul.smul.{0, u4} Int (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (SubNegMonoid.SMulInt.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddGroup.toSubNegMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (AddCommGroup.toAddGroup.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.Preadditive.homGroup.{u4, u2} D _inst_2 _inst_3 (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X))))) n (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G α X))
but is expected to have type
forall {C : Type.{u4}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u1, u4} C] [_inst_2 : CategoryTheory.Category.{u3, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u3, u2} D _inst_2] {F : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} (X : C) (α : Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (n : Int), Eq.{succ u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G (HSMul.hSMul.{0, max u4 u3, max u4 u3} Int (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (instHSMul.{0, max u4 u3} Int (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (SubNegMonoid.SMulInt.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddGroup.toSubNegMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (AddCommGroup.toAddGroup.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u4, u2, u1, u3} C D _inst_1 _inst_2 _inst_3) F G))))) n α) X) (HSMul.hSMul.{0, u3, u3} Int (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (instHSMul.{0, u3} Int (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (SubNegMonoid.SMulInt.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddGroup.toSubNegMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (AddCommGroup.toAddGroup.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.Preadditive.homGroup.{u3, u2} D _inst_2 _inst_3 (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)))))) n (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G α X))
Case conversion may be inaccurate. Consider using '#align category_theory.nat_trans.app_zsmul CategoryTheory.NatTrans.app_zsmulₓ'. -/
@[simp]
theorem app_zsmul (X : C) (α : F ⟶ G) (n : ℤ) : (n • α).app X = n • α.app X :=
(appHom X : (F ⟶ G) →+ (F.obj X ⟶ G.obj X)).map_zsmul α n
#align category_theory.nat_trans.app_zsmul CategoryTheory.NatTrans.app_zsmul
/- warning: category_theory.nat_trans.app_sum -> CategoryTheory.NatTrans.app_sum is a dubious translation:
lean 3 declaration is
forall {C : Type.{u1}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u3, u1} C] [_inst_2 : CategoryTheory.Category.{u4, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u4, u2} D _inst_2] {F : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2} {ι : Type.{u5}} (s : Finset.{u5} ι) (X : C) (α : ι -> (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G)), Eq.{succ u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G (Finset.sum.{max u1 u4, u5} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) ι (AddCommGroup.toAddCommMonoid.{max u1 u4} (Quiver.Hom.{succ (max u1 u4), max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u1 u4, max u3 u4 u1 u2} (CategoryTheory.Functor.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u3, u4, u1, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u1, u2, u3, u4} C D _inst_1 _inst_2 _inst_3) F G)) s (fun (i : ι) => α i)) X) (Finset.sum.{u4, u5} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) ι (AddCommGroup.toAddCommMonoid.{u4} (Quiver.Hom.{succ u4, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u4, u2} D (CategoryTheory.Category.toCategoryStruct.{u4, u2} D _inst_2)) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X)) (CategoryTheory.Preadditive.homGroup.{u4, u2} D _inst_2 _inst_3 (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 F X) (CategoryTheory.Functor.obj.{u3, u4, u1, u2} C _inst_1 D _inst_2 G X))) s (fun (i : ι) => CategoryTheory.NatTrans.app.{u3, u4, u1, u2} C _inst_1 D _inst_2 F G (α i) X))
but is expected to have type
forall {C : Type.{u4}} {D : Type.{u2}} [_inst_1 : CategoryTheory.Category.{u1, u4} C] [_inst_2 : CategoryTheory.Category.{u3, u2} D] [_inst_3 : CategoryTheory.Preadditive.{u3, u2} D _inst_2] {F : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} {G : CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2} {ι : Type.{u5}} (s : Finset.{u5} ι) (X : C) (α : ι -> (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G)), Eq.{succ u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G (Finset.sum.{max u4 u3, u5} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) ι (AddCommGroup.toAddCommMonoid.{max u4 u3} (Quiver.Hom.{max (succ u4) (succ u3), max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.CategoryStruct.toQuiver.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Category.toCategoryStruct.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2))) F G) (CategoryTheory.Preadditive.homGroup.{max u4 u3, max (max (max u4 u2) u1) u3} (CategoryTheory.Functor.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.Functor.category.{u1, u3, u4, u2} C _inst_1 D _inst_2) (CategoryTheory.functorCategoryPreadditive.{u4, u2, u1, u3} C D _inst_1 _inst_2 _inst_3) F G)) s (fun (i : ι) => α i)) X) (Finset.sum.{u3, u5} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) ι (AddCommGroup.toAddCommMonoid.{u3} (Quiver.Hom.{succ u3, u2} D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X)) (CategoryTheory.Preadditive.homGroup.{u3, u2} D _inst_2 _inst_3 (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 F) X) (Prefunctor.obj.{succ u1, succ u3, u4, u2} C (CategoryTheory.CategoryStruct.toQuiver.{u1, u4} C (CategoryTheory.Category.toCategoryStruct.{u1, u4} C _inst_1)) D (CategoryTheory.CategoryStruct.toQuiver.{u3, u2} D (CategoryTheory.Category.toCategoryStruct.{u3, u2} D _inst_2)) (CategoryTheory.Functor.toPrefunctor.{u1, u3, u4, u2} C _inst_1 D _inst_2 G) X))) s (fun (i : ι) => CategoryTheory.NatTrans.app.{u1, u3, u4, u2} C _inst_1 D _inst_2 F G (α i) X))
Case conversion may be inaccurate. Consider using '#align category_theory.nat_trans.app_sum CategoryTheory.NatTrans.app_sumₓ'. -/
@[simp]
theorem app_sum {ι : Type _} (s : Finset ι) (X : C) (α : ι → (F ⟶ G)) :
(∑ i in s, α i).app X = ∑ i in s, (α i).app X :=
by
rw [← app_hom_apply, AddMonoidHom.map_sum]
rfl
#align category_theory.nat_trans.app_sum CategoryTheory.NatTrans.app_sum
end NatTrans
end CategoryTheory
|
Require Import
Coq.Lists.List.
Generalizable All Variables.
Import ListNotations.
(****************************************************************************
* hlist: Type heterogeneous list indexed by a list.
****************************************************************************)
Inductive hlist : list Type -> Type :=
| HNil : hlist []
| HCons : forall t ts, t -> hlist ts -> hlist (t :: ts).
Arguments HNil : default implicits.
Arguments HCons : default implicits.
Lemma cons_head_eq : forall A (x0 : A) x1 y0 y1,
x0 :: y0 = x1 :: y1 -> x0 = x1.
Proof.
intros.
inversion H.
intuition.
Defined.
Lemma cons_tail_eq : forall A (x0 : A) x1 y0 y1,
x0 :: y0 = x1 :: y1 -> y0 = y1.
Proof.
intros.
inversion H.
intuition.
Defined.
Import EqNotations.
Program Definition hlist_head `(l : hlist (t :: ts)) : t :=
match l in hlist d return d = t :: ts -> t with
| HNil => fun _ => False_rect _ _
| HCons x _ => fun H => rew (cons_head_eq _ _ _ _ _ H) in x
end eq_refl.
Program Definition hlist_tail `(l : hlist (t :: ts)) : hlist ts :=
match l in hlist d return d = t :: ts -> hlist ts with
| HNil => fun _ => False_rect _ _
| HCons _ xs => fun H => rew (cons_tail_eq _ _ _ _ _ H) in xs
end eq_refl.
|
[STATEMENT]
lemma product_language[simp]: "b.language (product AA) = \<Inter> (a.language ` set AA)"
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. b.language (product AA) = \<Inter> (a.language ` set AA)
[PROOF STEP]
unfolding a.language_def b.language_def
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. {w. b.run' (product AA) w (initial\<^sub>2 (product AA)) \<and> test\<^sub>2 (condition\<^sub>2 (product AA)) w (b.trace (product AA) w (initial\<^sub>2 (product AA))) (initial\<^sub>2 (product AA))} = (\<Inter>A\<in>set AA. {w. a.run' A w (initial\<^sub>1 A) \<and> test\<^sub>1 (condition\<^sub>1 A) w (a.trace A w (initial\<^sub>1 A)) (initial\<^sub>1 A)})
[PROOF STEP]
unfolding a.run_alt_def b.run_alt_def streams_iff_sset
[PROOF STATE]
proof (prove)
goal (1 subgoal):
1. {w. sset w \<subseteq> alphabet\<^sub>2 (product AA) \<and> test\<^sub>2 (condition\<^sub>2 (product AA)) w (b.trace (product AA) w (initial\<^sub>2 (product AA))) (initial\<^sub>2 (product AA))} = (\<Inter>A\<in>set AA. {w. sset w \<subseteq> alphabet\<^sub>1 A \<and> test\<^sub>1 (condition\<^sub>1 A) w (a.trace A w (initial\<^sub>1 A)) (initial\<^sub>1 A)})
[PROOF STEP]
by (fastforce simp: set_conv_nth product_trace_smap) |
module Personal
public export
record Personal where
constructor CreatePersonal
political : Integer
langs : List String
religion : String
inspiredBy : String
peopleMain : Integer
lifeMain : Integer
smoking : Integer
alcohol : Integer
|
State Before: α : Type u
β : Type v
δ : Type w
n : ℕ
s₁ s₂ : Stream' α
⊢ nth (s₁ ⋈ s₂) (2 * (n + 1) + 1) = nth s₂ (n + 1) State After: α : Type u
β : Type v
δ : Type w
n : ℕ
s₁ s₂ : Stream' α
⊢ nth (s₁ ⋈ s₂) (succ (succ (2 * n + 1))) = nth s₂ (succ n) Tactic: change nth (s₁ ⋈ s₂) (succ (succ (2 * n + 1))) = nth s₂ (succ n) State Before: α : Type u
β : Type v
δ : Type w
n : ℕ
s₁ s₂ : Stream' α
⊢ nth (s₁ ⋈ s₂) (succ (succ (2 * n + 1))) = nth s₂ (succ n) State After: α : Type u
β : Type v
δ : Type w
n : ℕ
s₁ s₂ : Stream' α
⊢ nth (tail s₂) n = nth s₂ (succ n) Tactic: rw [nth_succ, nth_succ, interleave_eq, tail_cons, tail_cons,
nth_interleave_right n (tail s₁) (tail s₂)] State Before: α : Type u
β : Type v
δ : Type w
n : ℕ
s₁ s₂ : Stream' α
⊢ nth (tail s₂) n = nth s₂ (succ n) State After: no goals Tactic: rfl |
-- Non-indexed (plain) monads in form of Kleisli triple, presented in point-free style.
module Control.Monad.KleisliTriple where
open import Function using (id) renaming (_∘′_ to _∘_)
open import Relation.Binary.PropositionalEquality
open ≡-Reasoning
open import Axiom.FunctionExtensionality
open import Control.Functor
record IsKleisliTriple (M : Set → Set) : Set₁ where
-- Methods.
field
return : ∀ {A } → A → M A
bind′ : ∀ {A B} → (A → M B) → M A → M B
-- Laws.
field
bind′-β : ∀ {A B} {k : A → M B} →
bind′ k ∘ return ≡ k
bind′-η : ∀ {A} →
bind′ {A = A} return ≡ id
bind′-∘ : ∀ {A B C} {k : A → M B} {l : B → M C} →
bind′ (bind′ l ∘ k) ≡ bind′ l ∘ bind′ k
-- Notations for bind′.
-- Postfix category-theoretical notation.
_✶ = bind′
-- Infix Haskell notation.
infixl 6 _=<<_
_=<<_ = bind′
-- Kleisli composition.
infixl 6 _<=<_
_<=<_ : ∀ {A B C : Set} (l : B → M C) (k : A → M B) → (A → M C)
l <=< k = bind′ l ∘ k
-- Functoriality.
isFunctor : IsFunctor M
isFunctor = record
{ ops = record { map = map }
; laws = record { map-id = bind′-η ; map-∘ = sym (map-∘-sym _ _) }
}
where
map : ∀ {A B} → (A → B) → M A → M B
map f = bind′ (return ∘ f)
map-∘-sym : ∀ {A B C} (f : A → B) (g : B → C) → map g ∘ map f ≡ map (g ∘ f)
map-∘-sym f g = begin
map g ∘ map f ≡⟨⟩
bind′ (return ∘ g) ∘ bind′ (return ∘ f) ≡⟨ sym bind′-∘ ⟩
bind′ (bind′ (return ∘ g) ∘ return ∘ f) ≡⟨ cong (λ z → bind′ (z ∘ f)) bind′-β ⟩
bind′ (return ∘ g ∘ f) ≡⟨⟩
map (g ∘ f) ∎
open IsFunctor isFunctor public
-- Monads in Kleisli Triple presentation.
record KleisliTriple : Set₁ where
field
M : Set → Set
M! : IsKleisliTriple M
open IsKleisliTriple M! public
|
module m_npy
use iso_fortran_env
implicit none
private
! Suffix for temporary .npy files
character(len=*), parameter :: npy_suffix = '.npy'
interface save_npy
module procedure write_int64_vec, write_int64_mtx, &
write_int32_vec, write_int32_mtx, write_int32_3d, &
write_int16_vec, write_int16_mtx, &
write_int8_vec, write_int8_mtx, write_int8_3d, &
write_dbl_vec, write_dbl_mtx, &
write_sng_vec, write_sng_mtx, &
write_cmplx_sgn_vec, write_cmplx_sgn_mtx, &
write_cmplx_dbl_vec, write_cmplx_dbl_mtx, &
write_sng_3dT, write_dbl_3dT, &
write_sng_4dT, write_dbl_4dT, &
write_dbl_5dT, &
write_cmplx_dbl_3dT, &
write_cmplx_dbl_4dT, &
write_cmplx_dbl_5dT, &
write_cmplx_dbl_6dT
end interface save_npy
public :: save_npy
public :: remove_file
public :: add_to_zip
contains
subroutine run_sys(cmd, stat)
character(len=*), intent(in) :: cmd
integer(int32), intent(out) :: stat
call execute_command_line(cmd, wait=.true., exitstat=stat)
end subroutine run_sys
! Add npy file to a zip file and remove it
subroutine add_to_zip(zipfile, filename, keep_file, custom_name)
character(len=*), intent(in) :: zipfile ! Name of zip file
character(len=*), intent(in) :: filename ! Name of file to add
logical, intent(in) :: keep_file ! Whether to keep 'filename'
character(len=*), intent(in), optional :: custom_name ! Custom name
integer(int32) :: stat
! Be quiet while zipping
character(len=*), parameter :: zip_command = "zip -q0"
call run_sys(zip_command//" "//trim(zipfile)//" "//&
trim(filename), stat)
if (stat /= 0) then
print *, zip_command//" "//trim(zipfile)//" "// trim(filename)
error stop "add_to_zip: Can't execute zip command"
endif
if (present(custom_name)) then
call run_sys('printf "@ '//trim(filename)//'\n@='//&
trim(custom_name)//'\n" | zipnote -w '//trim(zipfile), stat)
if (stat /= 0) then
error stop "add_to_zip: Failed to rename to custom_name"
endif
end if
if (.not. keep_file) then
call remove_file(filename)
end if
end subroutine add_to_zip
subroutine remove_file(filename)
character(len=*), intent(in) :: filename
integer :: p_un, stat
open(newunit=p_un, iostat=stat, file=filename, status='old')
if (stat == 0) close(p_un, status='delete')
end subroutine remove_file
function dict_str(var_type, var_shape) result(str)
character(len=*), intent(in) :: var_type
integer(int32), intent(in) :: var_shape(:)
character(len=:), allocatable :: str
character(len=1024) :: buffer
integer(int32) :: total_size, my_size
! https://numpy.org/devdocs/reference/generated/numpy.lib.format.html
! The first 6 bytes are a magic string: exactly \x93NUMPY.
! The next 1 byte is an unsigned byte: the major version number of the file
! format, e.g. \x01.
! The next 1 byte is an unsigned byte: the minor version number of the file
! format, e.g. \x00. Note: the version of the file format is not tied to the
! version of the numpy package.
! The next 2 bytes form a little-endian unsigned short int: the length of
! the header data HEADER_LEN.
! The next HEADER_LEN bytes form the header data describing the array’s
! format. It is an ASCII string which contains a Python literal expression
! of a dictionary. It is terminated by a newline (\n) and padded with spaces
! (\x20) to make the total of len(magic string) + 2 + len(length) +
! HEADER_LEN be evenly divisible by 64 for alignment purposes.
buffer = "{'descr': '"//var_type// &
"', 'fortran_order': True, 'shape': ("// &
shape_str(var_shape)//"), }"
! len(magic string) + 2 + len(length) + ending newline =
! 6 + 2 + 4 + 1 = 13 bytes
total_size = len_trim(buffer) + 13
! ensure total_size is divisible by 16 bytes
total_size = ((total_size + 15)/16) * 16
! Size of dict_str includes the ending newline (so -12 instead of -13)
my_size = total_size - 12
! End with newline
buffer(my_size:my_size) = achar(10)
str = buffer(1:my_size)
end function dict_str
function shape_str(var_shape) result(fin_str)
integer(int32), intent(in) :: var_shape(:)
character(len=:), allocatable :: str, small_str, fin_str
integer(int32) :: i, length, start, halt
length = 14*size(var_shape)
allocate (character(length) :: str)
allocate (character(14) :: small_str)
str = " "
do i = 1, size(var_shape)
start = (i - 1)*length + 1
halt = i*length + 1
write (small_str, "(I13,A)") var_shape(i), ","
str = trim(str)//adjustl(small_str)
enddo
fin_str = trim(str)
end function shape_str
subroutine write_header(p_un, var_type, var_shape)
integer(int32), intent(in) :: p_un
character(len=*), intent(in) :: var_type
integer(int32), intent(in) :: var_shape(:)
integer(int32) :: header_len
! Magic number hex x93 is 147 (unsigned), signed this is -109
integer(int8), parameter :: magic_num = int(-109, int8)
character(len=*), parameter :: magic_str = "NUMPY"
integer(int8), parameter :: major = 2_int8 ! major *.npy version
integer(int8), parameter :: minor = 0_int8 ! minor *.npy version
header_len = len(dict_str(var_type, var_shape))
write (p_un) magic_num, magic_str, major, minor
write (p_un) header_len
write (p_un) dict_str(var_type, var_shape)
end subroutine write_header
subroutine write_cmplx_sgn_mtx(filename, mtx)
character(len=*), intent(in) :: filename
complex(4), intent(in) :: mtx(:, :)
character(len=*), parameter :: var_type = "<c8"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_cmplx_sgn_mtx
subroutine write_cmplx_sgn_vec(filename, vec)
character(len=*), intent(in) :: filename
complex(4), intent(in) :: vec(:)
character(len=*), parameter :: var_type = "<c8"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(vec))
write (p_un) vec
close (unit=p_un)
end subroutine write_cmplx_sgn_vec
subroutine write_cmplx_dbl_6dT(filename, tensor)
character(len=*), intent(in) :: filename
complex(8), intent(in) :: tensor(:, :, :, :, :, :)
character(len=*), parameter :: var_type = "<c16"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(tensor))
write (p_un) tensor
close (unit=p_un)
end subroutine write_cmplx_dbl_6dT
subroutine write_cmplx_dbl_5dT(filename, tensor)
character(len=*), intent(in) :: filename
complex(8), intent(in) :: tensor(:, :, :, :, :)
character(len=*), parameter :: var_type = "<c16"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(tensor))
write (p_un) tensor
close (unit=p_un)
end subroutine write_cmplx_dbl_5dT
subroutine write_cmplx_dbl_4dT(filename, tensor)
character(len=*), intent(in) :: filename
complex(8), intent(in) :: tensor(:, :, :, :)
character(len=*), parameter :: var_type = "<c16"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(tensor))
write (p_un) tensor
close (unit=p_un)
end subroutine write_cmplx_dbl_4dT
subroutine write_cmplx_dbl_3dT(filename, tensor)
character(len=*), intent(in) :: filename
complex(8), intent(in) :: tensor(:, :, :)
character(len=*), parameter :: var_type = "<c16"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(tensor))
write (p_un) tensor
close (unit=p_un)
end subroutine write_cmplx_dbl_3dT
subroutine write_cmplx_dbl_mtx(filename, mtx)
character(len=*), intent(in) :: filename
complex(8), intent(in) :: mtx(:, :)
character(len=*), parameter :: var_type = "<c16"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_cmplx_dbl_mtx
subroutine write_cmplx_dbl_vec(filename, vec)
character(len=*), intent(in) :: filename
complex(8), intent(in) :: vec(:)
character(len=*), parameter :: var_type = "<c16"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(vec))
write (p_un) vec
close (unit=p_un)
end subroutine write_cmplx_dbl_vec
subroutine write_sng_3dT(filename, tensor)
character(len=*), intent(in) :: filename
real(real32), intent(in) :: tensor(:, :, :)
character(len=*), parameter :: var_type = "<f4"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(tensor))
write (p_un) tensor
close (unit=p_un)
end subroutine write_sng_3dT
subroutine write_sng_4dT(filename, tensor)
character(len=*), intent(in) :: filename
real(real32), intent(in) :: tensor(:, :, :, :)
character(len=*), parameter :: var_type = "<f4"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(tensor))
write (p_un) tensor
close (unit=p_un)
end subroutine write_sng_4dT
subroutine write_sng_mtx(filename, mtx)
character(len=*), intent(in) :: filename
real(real32), intent(in) :: mtx(:, :)
character(len=*), parameter :: var_type = "<f4"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_sng_mtx
subroutine write_sng_vec(filename, vec)
character(len=*), intent(in) :: filename
real(real32), intent(in) :: vec(:)
character(len=*), parameter :: var_type = "<f4"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(vec))
write (p_un) vec
close (unit=p_un)
end subroutine write_sng_vec
subroutine write_dbl_3dT(filename, tensor)
character(len=*), intent(in) :: filename
real(real64), intent(in) :: tensor(:, :, :)
character(len=*), parameter :: var_type = "<f8"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(tensor))
write (p_un) tensor
close (unit=p_un)
end subroutine write_dbl_3dT
subroutine write_dbl_4dT(filename, tensor4)
character(len=*), intent(in) :: filename
real(real64), intent(in) :: tensor4(:, :, :, :)
character(len=*), parameter :: var_type = "<f8"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(tensor4))
write (p_un) tensor4
close (unit=p_un)
end subroutine write_dbl_4dT
subroutine write_dbl_5dT(filename, tensor5)
character(len=*), intent(in) :: filename
real(real64), intent(in) :: tensor5(:, :, :, :, :)
character(len=*), parameter :: var_type = "<f8"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(tensor5))
write (p_un) tensor5
close (unit=p_un)
end subroutine write_dbl_5dT
subroutine write_dbl_mtx(filename, mtx)
character(len=*), intent(in) :: filename
real(real64), intent(in) :: mtx(:, :)
character(len=*), parameter :: var_type = "<f8"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_dbl_mtx
subroutine write_dbl_vec(filename, vec)
character(len=*), intent(in) :: filename
real(real64), intent(in) :: vec(:)
character(len=*), parameter :: var_type = "<f8"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(vec))
write (p_un) vec
close (unit=p_un)
end subroutine write_dbl_vec
subroutine write_int64_mtx(filename, mtx)
character(len=*), intent(in) :: filename
integer(int64), intent(in) :: mtx(:, :)
character(len=*), parameter :: var_type = "<i8"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_int64_mtx
subroutine write_int64_vec(filename, vec)
character(len=*), intent(in) :: filename
integer(int64), intent(in) :: vec(:)
character(len=*), parameter :: var_type = "<i8"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(vec))
write (p_un) vec
close (unit=p_un)
end subroutine write_int64_vec
subroutine write_int32_mtx(filename, mtx)
character(len=*), intent(in) :: filename
integer(int32), intent(in) :: mtx(:, :)
character(len=*), parameter :: var_type = "<i4"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_int32_mtx
subroutine write_int32_3d(filename, mtx)
character(len=*), intent(in) :: filename
integer(int32), intent(in) :: mtx(:,:,:)
character(len=*), parameter :: var_type = "<i4"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_int32_3d
subroutine write_int32_vec(filename, vec)
character(len=*), intent(in) :: filename
integer(int32), intent(in) :: vec(:)
character(len=*), parameter :: var_type = "<i4"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(vec))
write (p_un) vec
close (unit=p_un)
end subroutine write_int32_vec
subroutine write_int16_mtx(filename, mtx)
character(len=*), intent(in) :: filename
integer(int16), intent(in) :: mtx(:, :)
character(len=*), parameter :: var_type = "<i2"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_int16_mtx
subroutine write_int16_vec(filename, vec)
character(len=*), intent(in) :: filename
integer(int16), intent(in) :: vec(:)
character(len=*), parameter :: var_type = "<i2"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(vec))
write (p_un) vec
close (unit=p_un)
end subroutine write_int16_vec
subroutine write_int8_mtx(filename, mtx)
character(len=*), intent(in) :: filename
integer(int8), intent(in) :: mtx(:, :)
character(len=*), parameter :: var_type = "<i1"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_int8_mtx
subroutine write_int8_3d(filename, mtx)
character(len=*), intent(in) :: filename
integer(int8), intent(in) :: mtx(:,:,:)
character(len=*), parameter :: var_type = "<i1"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(mtx))
write (p_un) mtx
close (unit=p_un)
end subroutine write_int8_3d
subroutine write_int8_vec(filename, vec)
character(len=*), intent(in) :: filename
integer(int8), intent(in) :: vec(:)
character(len=*), parameter :: var_type = "<i1"
integer(int32) :: p_un
open (newunit=p_un, file=filename, form="unformatted", access="stream")
call write_header(p_un, var_type, shape(vec))
write (p_un) vec
close (unit=p_un)
end subroutine write_int8_vec
end module m_npy
|
theory Hotel_Example
imports Main "HOL-Library.Predicate_Compile_Quickcheck"
begin
datatype guest = Guest0 | Guest1
datatype key = Key0 | Key1 | Key2 | Key3
datatype room = Room0
type_synonym card = "key * key"
datatype event =
Check_in guest room card
| Enter guest room card
| Exit guest room
definition initk :: "room \<Rightarrow> key"
where "initk = (%r. Key0)"
declare initk_def[code_pred_def, code]
primrec owns :: "event list \<Rightarrow> room \<Rightarrow> guest option"
where
"owns [] r = None"
| "owns (e#s) r = (case e of
Check_in g r' c \<Rightarrow> if r' = r then Some g else owns s r |
Enter g r' c \<Rightarrow> owns s r |
Exit g r' \<Rightarrow> owns s r)"
primrec currk :: "event list \<Rightarrow> room \<Rightarrow> key"
where
"currk [] r = initk r"
| "currk (e#s) r = (let k = currk s r in
case e of Check_in g r' (k1, k2) \<Rightarrow> if r' = r then k2 else k
| Enter g r' c \<Rightarrow> k
| Exit g r \<Rightarrow> k)"
primrec issued :: "event list \<Rightarrow> key set"
where
"issued [] = range initk"
| "issued (e#s) = issued s \<union>
(case e of Check_in g r (k1, k2) \<Rightarrow> {k2} | Enter g r c \<Rightarrow> {} | Exit g r \<Rightarrow> {})"
primrec cards :: "event list \<Rightarrow> guest \<Rightarrow> card set"
where
"cards [] g = {}"
| "cards (e#s) g = (let C = cards s g in
case e of Check_in g' r c \<Rightarrow> if g' = g then insert c C
else C
| Enter g r c \<Rightarrow> C
| Exit g r \<Rightarrow> C)"
primrec roomk :: "event list \<Rightarrow> room \<Rightarrow> key"
where
"roomk [] r = initk r"
| "roomk (e#s) r = (let k = roomk s r in
case e of Check_in g r' c \<Rightarrow> k
| Enter g r' (x,y) \<Rightarrow> if r' = r \<^cancel>\<open>\<and> x = k\<close> then y else k
| Exit g r \<Rightarrow> k)"
primrec isin :: "event list \<Rightarrow> room \<Rightarrow> guest set"
where
"isin [] r = {}"
| "isin (e#s) r = (let G = isin s r in
case e of Check_in g r c \<Rightarrow> G
| Enter g r' c \<Rightarrow> if r' = r then {g} \<union> G else G
| Exit g r' \<Rightarrow> if r'=r then G - {g} else G)"
primrec hotel :: "event list \<Rightarrow> bool"
where
"hotel [] = True"
| "hotel (e # s) = (hotel s & (case e of
Check_in g r (k,k') \<Rightarrow> k = currk s r \<and> k' \<notin> issued s |
Enter g r (k,k') \<Rightarrow> (k,k') \<in> cards s g & (roomk s r \<in> {k, k'}) |
Exit g r \<Rightarrow> g \<in> isin s r))"
definition no_Check_in :: "event list \<Rightarrow> room \<Rightarrow> bool" where(*>*)
[code del]: "no_Check_in s r \<equiv> \<not>(\<exists>g c. Check_in g r c \<in> set s)"
definition feels_safe :: "event list \<Rightarrow> room \<Rightarrow> bool"
where
"feels_safe s r = (\<exists>s\<^sub>1 s\<^sub>2 s\<^sub>3 g c c'.
s = s\<^sub>3 @ [Enter g r c] @ s\<^sub>2 @ [Check_in g r c'] @ s\<^sub>1 \<and>
no_Check_in (s\<^sub>3 @ s\<^sub>2) r \<and> isin (s\<^sub>2 @ [Check_in g r c] @ s\<^sub>1) r = {})"
section \<open>Some setup\<close>
lemma issued_nil: "issued [] = {Key0}"
by (auto simp add: initk_def)
lemmas issued_simps[code] = issued_nil issued.simps(2)
setup \<open>Predicate_Compile_Data.ignore_consts [\<^const_name>\<open>Set.member\<close>,
\<^const_name>\<open>issued\<close>, \<^const_name>\<open>cards\<close>, \<^const_name>\<open>isin\<close>,
\<^const_name>\<open>Collect\<close>, \<^const_name>\<open>insert\<close>]\<close>
ML_val \<open>Core_Data.force_modes_and_compilations\<close>
fun find_first :: "('a => 'b option) => 'a list => 'b option"
where
"find_first f [] = None"
| "find_first f (x # xs) = (case f x of Some y => Some y | None => find_first f xs)"
axiomatization cps_of_set :: "'a set => ('a => term list option) => term list option"
where cps_of_set_code [code]: "cps_of_set (set xs) f = find_first f xs"
axiomatization pos_cps_of_set :: "'a set => ('a => (bool * term list) option) => natural => (bool * term list) option"
where pos_cps_of_set_code [code]: "pos_cps_of_set (set xs) f i = find_first f xs"
axiomatization find_first' :: "('b Quickcheck_Exhaustive.unknown => 'a Quickcheck_Exhaustive.three_valued)
=> 'b list => 'a Quickcheck_Exhaustive.three_valued"
where find_first'_Nil: "find_first' f [] = Quickcheck_Exhaustive.No_value"
and find_first'_Cons: "find_first' f (x # xs) =
(case f (Quickcheck_Exhaustive.Known x) of
Quickcheck_Exhaustive.No_value => find_first' f xs
| Quickcheck_Exhaustive.Value x => Quickcheck_Exhaustive.Value x
| Quickcheck_Exhaustive.Unknown_value =>
(case find_first' f xs of Quickcheck_Exhaustive.Value x =>
Quickcheck_Exhaustive.Value x
| _ => Quickcheck_Exhaustive.Unknown_value))"
lemmas find_first'_code [code] = find_first'_Nil find_first'_Cons
axiomatization neg_cps_of_set :: "'a set => ('a Quickcheck_Exhaustive.unknown => term list Quickcheck_Exhaustive.three_valued) => natural => term list Quickcheck_Exhaustive.three_valued"
where neg_cps_of_set_code [code]: "neg_cps_of_set (set xs) f i = find_first' f xs"
setup \<open>
let
val Fun = Predicate_Compile_Aux.Fun
val Input = Predicate_Compile_Aux.Input
val Output = Predicate_Compile_Aux.Output
val Bool = Predicate_Compile_Aux.Bool
val oi = Fun (Output, Fun (Input, Bool))
val ii = Fun (Input, Fun (Input, Bool))
fun of_set compfuns \<^Type>\<open>fun T _\<close> =
case body_type (Predicate_Compile_Aux.mk_monadT compfuns T) of
\<^Type>\<open>Quickcheck_Exhaustive.three_valued _\<close> =>
Const(\<^const_name>\<open>neg_cps_of_set\<close>, \<^Type>\<open>set T\<close> --> Predicate_Compile_Aux.mk_monadT compfuns T)
| _ => Const(\<^const_name>\<open>pos_cps_of_set\<close>, \<^Type>\<open>set T\<close> --> Predicate_Compile_Aux.mk_monadT compfuns T)
fun member compfuns (U as \<^Type>\<open>fun T _\<close>) =
(absdummy T (absdummy \<^Type>\<open>set T\<close> (Predicate_Compile_Aux.mk_if compfuns
(\<^Const>\<open>Set.member T for \<open>Bound 1\<close> \<open>Bound 0\<close>\<close>))))
in
Core_Data.force_modes_and_compilations \<^const_name>\<open>Set.member\<close>
[(oi, (of_set, false)), (ii, (member, false))]
end
\<close>
section \<open>Property\<close>
lemma "\<lbrakk> hotel s; g \<in> isin s r \<rbrakk> \<Longrightarrow> owns s r = Some g"
quickcheck[tester = exhaustive, size = 6, expect = counterexample]
quickcheck[tester = smart_exhaustive, depth = 6, expect = counterexample]
oops
lemma
"hotel s ==> feels_safe s r ==> g \<in> isin s r ==> owns s r = Some g"
quickcheck[smart_exhaustive, depth = 10, allow_function_inversion, expect = counterexample]
oops
section \<open>Refinement\<close>
fun split_list
where
"split_list [] = [([], [])]"
| "split_list (z # zs) = (([], z # zs) # [(z # xs', ys'). (xs', ys') <- split_list zs])"
lemma split_list: "((xs, ys) \<in> set (split_list zs)) = (zs = xs @ ys)"
apply (induct zs arbitrary: xs ys)
apply fastforce
apply (case_tac xs)
apply auto
done
lemma [code]: "feels_safe s r = list_ex (%(s3, s2, s1, g, c, c'). no_Check_in (s3 @ s2) r &
isin (s2 @ [Check_in g r c] @ s1) r = {}) ([(s3, s2, s1, g, c, c'). (s3, Enter g' r' c # r3) <- split_list s, r' = r, (s2, Check_in g r'' c' # s1) <- split_list r3, r'' = r, g = g'])"
unfolding feels_safe_def list_ex_iff
by auto (metis split_list)+
lemma
"hotel s ==> feels_safe s r ==> g \<in> isin s r ==> owns s r = Some g"
(* quickcheck[exhaustive, size = 9, timeout = 2000] -- maybe possible with a lot of time *)
quickcheck[narrowing, size = 7, expect = counterexample]
oops
end
|
function xyl = Xl(s,row,b0,b1)
if nargin == 1
row = 2. ;
b0 = 1. ;
b1 = 2. ;
end
x = 0 ;
y = b0+(b1-b0)*s ;
xyl = [x ; y] ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.