Session Key_Agreement_Strong_Adversaries

Theory Infra

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Infra.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Infra.thy 132861 2016-12-21 10:34:41Z csprenge $
  Author:  Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Infrastructure for refinement proofs (attributes, prover tuning etc)

  Copyright (c) 2009-2016 Christoph Sprenger 
  Licence: LGPL

*******************************************************************************)

section ‹Proving infrastructure›

theory Infra imports Main  
begin

(******************************************************************************)
subsection ‹Prover configuration›
(******************************************************************************)

declare if_split_asm [split]


(******************************************************************************)
subsection ‹Forward reasoning ("attributes")›
(******************************************************************************)

text ‹The following lemmas are used to produce intro/elim rules from
set definitions and relation definitions.›

lemmas set_def_to_intro = meta_eq_to_obj_eq [THEN eqset_imp_iff, THEN iffD2]
lemmas set_def_to_dest = meta_eq_to_obj_eq [THEN eqset_imp_iff, THEN iffD1]
lemmas set_def_to_elim = set_def_to_dest [elim_format]

lemmas setc_def_to_intro = 
  set_def_to_intro [where B="{x. P x}" for P, to_pred]

lemmas setc_def_to_dest = 
  set_def_to_dest [where B="{x. P x}" for P, to_pred]

lemmas setc_def_to_elim = setc_def_to_dest [elim_format]

lemmas rel_def_to_intro = setc_def_to_intro [where x="(s, t)" for s t]
lemmas rel_def_to_dest = setc_def_to_dest [where x="(s, t)" for s t]
lemmas rel_def_to_elim = rel_def_to_dest [elim_format]


(******************************************************************************)
subsection ‹General results›
(******************************************************************************)

subsubsection ‹Maps›
(******************************************************************************)

text ‹We usually remove @{term"domIff"} from the simpset and clasets due
to annoying behavior. Sometimes the lemmas below are more well-behaved than 
@{term "domIff"}. Usually to be used as "dest: dom\_lemmas". However, adding 
them as permanent dest rules slows down proofs too much, so we refrain from 
doing this.›

lemma map_definedness: 
  "f x = Some y  x  dom f"
by (simp add: domIff)

lemma map_definedness_contra:
  " f x = Some y; z  dom f   x  z"
by (auto simp add: domIff)

lemmas dom_lemmas = map_definedness map_definedness_contra


subsubsection ‹Set›
(******************************************************************************)


lemma vimage_image_subset: "A  f-`(f`A)"
by (auto simp add: image_def vimage_def)


subsubsection ‹Relations›
(******************************************************************************)

lemma Image_compose [simp]:
  "(R1 O R2)``A  = R2``(R1``A)"
by (auto)


subsubsection ‹Lists›
(******************************************************************************)

― ‹Do NOT add the following equation to the simpset! (looping)›
lemma map_comp: "map (g o f) = map g o map f"  
by (simp)

declare map_comp_map [simp del]

lemma take_prefix: " take n l = xs   xs'. l = xs @ xs'"
by (induct l arbitrary: n xs, auto)
   (rename_tac n, case_tac n, auto)


subsubsection ‹Finite sets›
(******************************************************************************)

text ‹Cardinality.›

declare arg_cong [where f=card, intro] 

lemma finite_positive_cardI [intro!]: 
  " A  {}; finite A   0 < card A"
by (auto)

lemma finite_positive_cardD [dest!]: 
  " 0 < card A; finite A   A  {}"
by (auto)

lemma finite_zero_cardI [intro!]: 
  " A = {}; finite A   card A = 0"
by (auto)

lemma finite_zero_cardD [dest!]: 
  " card A = 0; finite A   A = {}"
by (auto)


end





Theory Refinement

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Refinement.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Refinement.thy 132859 2016-12-21 09:09:39Z eth.jlallemand $
  Author:  Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Infrastructure for specifications, invariants, and refinements.

  Copyright (c) 2009-2016 Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Models, Invariants and Refinements›

theory Refinement imports Infra 
begin

(******************************************************************************)
subsection ‹Specifications, reachability, and behaviours.›
(******************************************************************************)

text ‹Transition systems are multi-pointed graphs.›

record 's TS = 
  init :: "'s set"
  trans :: "('s × 's) set"


text ‹The inductive set of reachable states.›

inductive_set 
  reach ::  "('s, 'a) TS_scheme  's set" 
  for T :: "('s, 'a) TS_scheme"
where
  r_init [intro]:  "s  init T  s  reach T"
| r_trans [intro]: " (s, t)  trans T; s  reach T   t  reach T"



subsubsection ‹Finite behaviours›
(******************************************************************************)

text ‹Note that behaviours grow at the head of the list, i.e., the initial 
state is at the end.›

inductive_set 
  beh :: "('s, 'a) TS_scheme  ('s list) set"
  for T :: "('s, 'a) TS_scheme" 
where
  b_empty [iff]: "[]  beh T"
| b_init [intro]: "s  init T  [s]  beh T"
| b_trans [intro]: " s # b  beh T; (s, t)  trans T   t # s # b  beh T"

inductive_cases beh_non_empty: "s # b  beh T"


text ‹Behaviours are prefix closed.›

lemma beh_immediate_prefix_closed:
  "s # b  beh T  b  beh T"
by (erule beh_non_empty, auto) 

lemma beh_prefix_closed:
  "c @ b  beh T  b  beh T"
by (induct c, auto dest!: beh_immediate_prefix_closed)


text ‹States in behaviours are exactly reachable.›

lemma beh_in_reach [rule_format]:
  "b  beh T  (s  set b. s  reach T)"
by (erule beh.induct) (auto)
  
lemma reach_in_beh:
  assumes "s  reach T" shows "b  beh T. s  set b" 
using assms
proof (induction s rule: reach.induct)
  case (r_init s) 
  hence "s  set [s]" and "[s]  beh T" by auto
  thus ?case by fastforce
next 
  case (r_trans s t)
  then obtain b where "b  beh T" and "s  set b" by blast
  from s  set b obtain b1 b2 where "b = b2 @ s # b1" by (blast dest: split_list)
  with b  beh T have "s # b1  beh T" by (blast intro: beh_prefix_closed)
  with (s, t)  trans T have "t # s # b1  beh T" by blast
  thus ?case by force
qed

lemma reach_equiv_beh_states: "reach T =  (set`(beh T))"
by (auto intro!: reach_in_beh beh_in_reach)


subsubsection ‹Specifications, observability, and implementation› 
(******************************************************************************)

text ‹Specifications add an observer function to transition systems.›

record ('s, 'o) spec = "'s TS" +
  obs :: "'s  'o" 

lemma beh_obs_upd [simp]: "beh (S(| obs := x |)) = beh S"
by (safe) (erule beh.induct, auto)+

lemma reach_obs_upd [simp]: "reach (S(| obs := x |)) = reach S"
by (safe) (erule reach.induct, auto)+

text ‹Observable behaviour and reachability.›

definition 
  obeh :: "('s, 'o) spec  ('o list) set" where
  "obeh S  (map (obs S))`(beh S)"

definition 
  oreach :: "('s, 'o) spec  'o set" where 
  "oreach S  (obs S)`(reach S)"

lemma oreach_equiv_obeh_states:
  "oreach S =  (set`(obeh S))"
by (auto simp add: reach_equiv_beh_states oreach_def obeh_def)


lemma obeh_pi_translation:
  "(map pi)`(obeh S) = obeh (S(| obs := pi o (obs S) |))"
by (auto simp add: obeh_def image_comp)

lemma oreach_pi_translation:
  "pi`(oreach S) = oreach (S(| obs := pi o (obs S) |))"
by (auto simp add: oreach_def)


text ‹A predicate $P$ on the states of a specification is \emph{observable} 
if it cannot distinguish between states yielding the same observation. 
Equivalently, $P$ is observable if it is the inverse image under the 
observation function of a predicate on observations.›

definition 
  observable :: "['s  'o, 's set]  bool"
where
  "observable ob P  s s'. ob s = ob s'  s'  P  s  P"

definition 
  observable2 :: "['s  'o, 's set]  bool"
where
  "observable2 ob P  Q. P = ob-`Q"

definition 
  observable3 :: "['s  'o, 's set]  bool"
where
  "observable3 ob P  ob-`ob`P  P"    ― ‹other direction holds trivially›

lemma observableE [elim]:
  "observable ob P; ob s = ob s'; s'  P  s  P"
by (unfold observable_def) (fast)

lemma observable2_equiv_observable: "observable2 ob P = observable ob P"
by (unfold observable_def observable2_def) (auto)

lemma observable3_equiv_observable2: "observable3 ob P = observable2 ob P"
by (unfold observable3_def observable2_def) (auto)

lemma observable_id [simp]: "observable id P" 
by (simp add: observable_def)

text ‹The set extension of a function @{term "ob"} is the left adjoint of 
a Galois connection on the powerset lattices over domain and range of @{term "ob"} 
where the right adjoint is the inverse image function.›

lemma image_vimage_adjoints: "(ob`P  Q) = (P  ob-`Q)"
by auto

declare image_vimage_subset [simp, intro]
declare vimage_image_subset [simp, intro]

text ‹Similar but "reversed" (wrt to adjointness) relationships only hold under
additional conditions.›

lemma image_r_vimage_l: " Q  ob`P; observable ob P   ob-`Q  P"
by (auto)

lemma vimage_l_image_r: " ob-`Q  P; Q  range ob   Q  ob`P"
by (drule image_mono [where f=ob], auto)


text ‹Internal and external invariants›

lemma external_from_internal_invariant: 
  " reach S  P; (obs S)`P  Q   
   oreach S  Q"
by (auto simp add: oreach_def)

lemma external_from_internal_invariant_vimage: 
  " reach S  P; P  (obs S)-`Q 
   oreach S  Q"
by (erule external_from_internal_invariant) (auto)


lemma external_to_internal_invariant_vimage: 
  " oreach S  Q; (obs S)-`Q  P 
   reach S  P"
by (auto simp add: oreach_def)

lemma external_to_internal_invariant:
  " oreach S  Q; Q  (obs S)`P; observable (obs S) P  
   reach S  P"
by (erule external_to_internal_invariant_vimage) (auto)


lemma external_equiv_internal_invariant_vimage: 
  " P = (obs S)-`Q  
   (oreach S  Q) = (reach S  P)"
by (fast intro: external_from_internal_invariant_vimage
                external_to_internal_invariant_vimage 
         del: subsetI)

lemma external_equiv_internal_invariant: 
  " (obs S)`P = Q; observable (obs S) P  
   (oreach S  Q) = (reach S  P)"
by (rule external_equiv_internal_invariant_vimage) (auto)


text ‹Our notion of implementation is inclusion of observable behaviours.›

definition 
  implements :: "['p  'o, ('s, 'o) spec, ('t, 'p) spec]  bool" where
  "implements pi Sa Sc  (map pi)`(obeh Sc)  obeh Sa"


text ‹Reflexivity and transitivity›

lemma implements_refl: "implements id S S"
by (auto simp add: implements_def)

lemma implements_trans:
  " implements pi1 S1 S2; implements pi2 S2 S3  
   implements (pi1 o pi2) S1 S3"
by (fastforce simp add: implements_def image_subset_iff)


text ‹Preservation of external invariants›

lemma implements_oreach:
  "implements pi Sa Sc  pi`(oreach Sc)  oreach Sa"
by (auto simp add: implements_def oreach_equiv_obeh_states dest!: subsetD)

lemma external_invariant_preservation:
  " oreach Sa  Q; implements pi Sa Sc 
   pi`(oreach Sc)  Q"
by (rule subset_trans [OF implements_oreach]) (auto)

lemma external_invariant_translation:
  " oreach Sa  Q; pi-`Q  P; implements pi Sa Sc 
   oreach Sc  P"
apply (rule subset_trans [OF vimage_image_subset, of pi])
apply (rule subset_trans [where B="pi-`Q"])
apply (intro vimage_mono external_invariant_preservation, auto)
done


text ‹Preservation of internal invariants›

lemma internal_invariant_translation:
  " reach Sa  Pa; Pa  obs Sa -` Qa; pi -` Qa  Q; obs S -` Q  P;
     implements pi Sa S 
   reach S  P"
by (rule external_from_internal_invariant_vimage [
      THEN external_invariant_translation, 
      THEN external_to_internal_invariant_vimage])


(******************************************************************************)
subsection ‹Invariants›
(******************************************************************************)

text ‹First we define Hoare triples over transition relations and then
we derive proof rules to establish invariants.›


subsubsection ‹Hoare triples›
(******************************************************************************)

definition 
  PO_hoare :: "['s set, ('s × 's) set, 's set]  bool"
     ("(3{_} _ {> _})" [0, 0, 0] 90)
where
  "{pre} R {> post}  R``pre  post"

lemmas PO_hoare_defs = PO_hoare_def Image_def

lemma "{P} R {> Q} = (s t. s  P  (s, t)  R  t  Q)"
by (auto simp add: PO_hoare_defs)


text ‹Some essential facts about Hoare triples.›

lemma hoare_conseq_left [intro]:
  " {P'} R {> Q}; P  P' 
   {P} R {> Q}"
by (auto simp add: PO_hoare_defs)

lemma hoare_conseq_right:
  " {P} R {> Q'}; Q'  Q 
   {P} R {> Q}"
by (auto simp add: PO_hoare_defs)

lemma hoare_false_left [simp]:
  "{{}} R {> Q}"
by (auto simp add: PO_hoare_defs)

lemma hoare_true_right [simp]:
  "{P} R {> UNIV}"
by (auto simp add: PO_hoare_defs)

lemma hoare_conj_right [intro!]:
  " {P} R {> Q1}; {P} R {> Q2} 
   {P} R {> Q1  Q2}"
by (auto simp add: PO_hoare_defs)


text ‹Special transition relations.›

lemma hoare_stop [simp, intro!]:
  "{P} {} {> Q}"
by (auto simp add: PO_hoare_defs)

lemma hoare_skip [simp, intro!]: 
  "P  Q  {P} Id {> Q}"
by (auto simp add: PO_hoare_defs)

lemma hoare_trans_Un [iff]:
  "{P} R1  R2 {> Q} = ({P} R1 {> Q}  {P} R2 {> Q})"
by (auto simp add: PO_hoare_defs)

lemma hoare_trans_UN [iff]:
  "{P}  x. R x {> Q} = (x. {P} R x {> Q})"
by (auto simp add: PO_hoare_defs)

lemma hoare_apply:
  "{P} R {>Q}  x  P  (x, y)  R  y  Q"
by (auto simp add: PO_hoare_defs)



subsubsection ‹Characterization of reachability›
(******************************************************************************)

lemma reach_init: "reach T  I  init T  I"
by (auto dest: subsetD)

lemma reach_trans: "reach T  I  {reach T} trans T {> I}"
by (auto simp add: PO_hoare_defs)


text ‹Useful consequences.›

corollary init_reach [iff]: "init T  reach T" 
by (rule reach_init, simp)

corollary trans_reach [iff]: "{reach T} trans T {> reach T}" 
by (rule reach_trans, simp)



subsubsection ‹Invariant proof rules›
(******************************************************************************)

text ‹Basic proof rule for invariants.›

lemma inv_rule_basic:
  " init T  P; {P} (trans T) {> P} 
   reach T  P"
by (safe, erule reach.induct, auto simp add: PO_hoare_def)


text ‹General invariant proof rule. This rule is complete (set 
@{term "I = reach T"}).›

lemma inv_rule:
  " init T  I; I  P; {I} (trans T) {> I} 
   reach T  P"
apply (rule subset_trans, auto)              ― ‹strengthen goal›
apply (erule reach.induct, auto simp add: PO_hoare_def)
done

text ‹The following rule is equivalent to the previous one.›

lemma INV_rule:
  " init T  I; {I  reach T} (trans T) {> I} 
   reach T  I"
by (safe, erule reach.induct, auto simp add: PO_hoare_defs)


text ‹Proof of equivalence.›

lemma inv_rule_from_INV_rule:
  " init T  I; I  P; {I} (trans T) {> I} 
   reach T  P"
apply (rule subset_trans, auto del: subsetI)
apply (rule INV_rule, auto)
done

lemma INV_rule_from_inv_rule:
  " init T  I; {I  reach T} (trans T) {> I} 
   reach T  I"
by (rule_tac I="I  reach T" in inv_rule, auto)


text ‹Incremental proof rule for invariants using auxiliary invariant(s). 
This rule might have become obsolete by addition of $INV\_rule$.›

lemma inv_rule_incr:
  " init T  I; {I  J} (trans T) {> I}; reach T  J     
   reach T  I"
by (rule INV_rule, auto)


(******************************************************************************)
subsection ‹Refinement›
(******************************************************************************)

text ‹Our notion of refinement is simulation. We first define a general
notion of relational Hoare tuple, which we then use to define the refinement
proof obligation.  Finally, we show that observation-consistent refinement 
of specifications implies the implementation relation between them.›


subsubsection ‹Relational Hoare tuples›
(******************************************************************************)

text ‹Relational Hoare tuples formalize the following generalized simulation 
diagram:

\begin{verbatim}
                           o -- Ra --> o
                           |           |
                          pre         post
                           |           |
                           v           V
                           o -- Rc --> o
\end{verbatim}

Here, $Ra$ and $Rc$ are the abstract and concrete transition relations, 
and $pre$ and $post$ are the pre- and post-relations.
(In the definiton below, the operator @{term "(O)"} stands for relational 
composition, which is defined as follows: @{thm relcomp_def [no_vars]}.)›

definition
  PO_rhoare :: 
    "[('s × 't) set, ('s × 's) set, ('t × 't) set, ('s × 't) set]  bool"
     ("(4{_} _, _ {> _})" [0, 0, 0] 90)
where
  "{pre} Ra, Rc {> post}  pre O Rc  Ra O post"

lemmas PO_rhoare_defs = PO_rhoare_def relcomp_unfold


text ‹Facts about relational Hoare tuples.›

lemma relhoare_conseq_left [intro]:
  " {pre'} Ra, Rc {> post}; pre  pre'  
   {pre} Ra, Rc {> post}"
by (auto simp add: PO_rhoare_defs dest!: subsetD)

lemma relhoare_conseq_right:                    ― ‹do NOT declare [intro]›
  " {pre} Ra, Rc {> post'}; post'  post  
   {pre} Ra, Rc {> post}"
by (auto simp add: PO_rhoare_defs)

lemma relhoare_false_left [simp]:               ― ‹do NOT declare [intro]›
  "{ {} } Ra, Rc {> post}"
by (auto simp add: PO_rhoare_defs)

lemma relhoare_true_right [simp]:                ― ‹not true in general›
  "{pre} Ra, Rc {> UNIV} = (Domain (pre O Rc)  Domain Ra)"
by (auto simp add: PO_rhoare_defs)

lemma Domain_rel_comp [intro]:
  "Domain pre  R  Domain (pre O Rc)  R"
by (auto simp add: Domain_def)

lemma rel_hoare_skip [iff]: "{R} Id, Id {> R}"
by (auto simp add: PO_rhoare_def)


text ‹Reflexivity and transitivity.›

lemma relhoare_refl [simp]: "{Id} R, R {> Id}"
by (auto simp add: PO_rhoare_defs)

lemma rhoare_trans:
  " {R1} T1, T2 {> R1}; {R2} T2, T3 {> R2} 
   {R1 O R2} T1, T3 {> R1 O R2}"
apply (auto simp add: PO_rhoare_def del: subsetI)
apply (drule subset_refl [THEN relcomp_mono, where r=R1])
apply (drule subset_refl [THEN [2] relcomp_mono, where s=R2])
apply (auto simp add: O_assoc del: subsetI)
done


text ‹Conjunction in the post-relation cannot be split in general.  However, 
here are two useful special cases.  In the first case the abstract transtition 
relation is deterministic and in the second case one conjunct is a cartesian 
product of two state predicates.›

lemma relhoare_conj_right_det:                 
  " {pre} Ra, Rc {> post1}; {pre} Ra, Rc {> post2};
     single_valued Ra                            ― ‹only for deterministic Ra›!›
   {pre} Ra, Rc {> post1  post2}"
by (auto simp add: PO_rhoare_defs dest: single_valuedD dest!: subsetD)

lemma relhoare_conj_right_cartesian [intro]:
  " {Domain pre} Ra {> I}; {Range pre} Rc {> J};
     {pre} Ra, Rc {> post}  
   {pre} Ra, Rc {> post  I × J}"
by (force simp add: PO_rhoare_defs PO_hoare_defs Domain_def Range_def)


text ‹Separate rule for cartesian products.›

corollary relhoare_cartesian:
  " {Domain pre} Ra {> I}; {Range pre} Rc {> J};
     {pre} Ra, Rc {> post}                       ― ‹any post›, including UNIV›!›
   {pre} Ra, Rc {> I × J}"
by (auto intro: relhoare_conseq_right)


text ‹Unions of transition relations.›

lemma relhoare_concrete_Un [simp]:
  "{pre} Ra, Rc1  Rc2 {> post} 
   = ({pre} Ra, Rc1 {> post}  {pre} Ra, Rc2 {> post})"
apply (auto simp add: PO_rhoare_defs)
apply (auto dest!: subsetD)
done

lemma relhoare_concrete_UN [simp]:
  "{pre} Ra, x. Rc x {> post} = (x. {pre} Ra, Rc x {> post})"
apply (auto simp add: PO_rhoare_defs)
apply (auto dest!: subsetD)
done

lemma relhoare_abstract_Un_left [intro]:
  " {pre} Ra1, Rc {> post} 
   {pre} Ra1  Ra2, Rc {> post}"
by (auto simp add: PO_rhoare_defs)

lemma relhoare_abstract_Un_right [intro]:
  " {pre} Ra2, Rc {> post} 
   {pre} Ra1  Ra2, Rc {> post}"
by (auto simp add: PO_rhoare_defs)

lemma relhoare_abstract_UN [intro]:   ― ‹! might be too aggressive? INDEED.›
  " {pre} Ra x, Rc {> post} 
   {pre} x. Ra x, Rc {> post}"
apply (auto simp add: PO_rhoare_defs)
apply (auto dest!: subsetD)
done

text ‹Inclusion of abstract transition relations.›

lemma relhoare_abstract_trans_weak [intro]:
  " {pre} Ra', Rc {> post}; Ra'  Ra  
  {pre} Ra, Rc {> post}"
by (auto simp add:PO_rhoare_defs)


subsubsection ‹Refinement proof obligations›
(******************************************************************************)

text ‹A transition system refines another one if the initial states and the
transitions are refined. 
Initial state refinement means that for each concrete initial state there is 
a related abstract one. Transition refinement means that the simulation 
relation is preserved (as expressed by a relational Hoare tuple). 
›

definition 
  PO_refines :: 
    "[('s × 't) set, ('s, 'a) TS_scheme, ('t, 'b) TS_scheme]  bool" 
where
  "PO_refines R Ta Tc  (
       init Tc  R``(init Ta)
      {R} (trans Ta), (trans Tc) {> R} 
  )"

lemma PO_refinesI:
  " init Tc  R``(init Ta); {R} (trans Ta), (trans Tc) {> R}   PO_refines R Ta Tc"
by (simp add: PO_refines_def)

lemma PO_refinesE [elim]:
  " PO_refines R Ta Tc;  init Tc  R``(init Ta); {R} (trans Ta), (trans Tc) {> R}   P  
  P"
by (simp add: PO_refines_def)


text ‹Basic refinement rule. This is just an introduction rule for the
definition.›

lemma refine_basic:
  " init Tc  R``(init Ta); {R} (trans Ta), (trans Tc) {> R} 
   PO_refines R Ta Tc"
by (simp add: PO_refines_def)


text ‹The following proof rule uses individual invariants @{term "I"} 
and @{term "J"} of the concrete and abstract systems to strengthen the 
simulation relation $R$. 

The hypotheses state that these state predicates are indeed invariants.
Note that the pre-condition of the invariant preservation hypotheses for 
@{term "I"} and @{term "J"} are strengthened by adding the predicates 
@{term "Domain (R  UNIV × J)"} and @{term "Range (R  I × UNIV)"}, 
respectively.  In particular, the latter predicate may be essential, if a 
concrete invariant depends on the simulation relation and an abstract invariant, 
i.e. to "transport" abstract invariants to the concrete system.›

lemma refine_init_using_invariants:
  " init Tc  R``(init Ta); init Ta  I; init Tc  J 
   init Tc  (R  I × J)``(init Ta)"
by (auto simp add: Image_def dest!: bspec subsetD)

lemma refine_trans_using_invariants:
  " {R  I × J} (trans Ta), (trans Tc) {> R};
     {I  Domain (R  UNIV × J)} (trans Ta) {> I}; 
     {J  Range (R  I × UNIV)} (trans Tc) {> J} 
   {R  I × J} (trans Ta), (trans Tc) {> R   I × J}"
by (rule relhoare_conj_right_cartesian) (auto)


text ‹This is our main rule for refinements.›

lemma refine_using_invariants:
  " {R  I × J} (trans Ta), (trans Tc) {> R};
     {I  Domain (R  UNIV × J)} (trans Ta) {> I}; 
     {J  Range (R  I × UNIV)} (trans Tc) {> J}; 
     init Tc  R``(init Ta); 
     init Ta  I; init Tc  J 
   PO_refines (R  I × J) Ta Tc"
by (unfold PO_refines_def)
   (intro refine_init_using_invariants refine_trans_using_invariants conjI)


subsubsection ‹Deriving invariants from refinements›
(******************************************************************************)

text ‹Some invariants can only be proved after the simulation has been 
established, because they depend on the simulation relation and some abstract
invariants.  Here is a rule to derive invariant theorems from the refinement.›

lemma PO_refines_implies_Range_init:
  "PO_refines R Ta Tc  init Tc  Range R"
by (auto simp add: PO_refines_def)

lemma PO_refines_implies_Range_trans:
  "PO_refines R Ta Tc  {Range R} trans Tc {> Range R}"
by (auto simp add: PO_refines_def PO_rhoare_def PO_hoare_def)

lemma PO_refines_implies_Range_invariant:
  "PO_refines R Ta Tc  reach Tc  Range R"
by (rule INV_rule)
   (auto intro!: PO_refines_implies_Range_init 
                 PO_refines_implies_Range_trans)

text ‹The following rules are more useful in proofs.›

corollary INV_init_from_refinement: 
  " PO_refines R Ta Tc; Range R  I 
   init Tc  I"
by (drule PO_refines_implies_Range_init, auto)

corollary INV_trans_from_refinement: 
  " PO_refines R Ta Tc; K  Range R; Range R  I 
   {K} trans Tc {> I}"
apply (drule PO_refines_implies_Range_trans)
apply (auto intro: hoare_conseq_right)
done

corollary INV_from_refinement: 
  " PO_refines R Ta Tc; Range R  I 
   reach Tc  I"
by (drule PO_refines_implies_Range_invariant, fast)


subsubsection ‹Refinement of specifications›
(******************************************************************************)

text ‹Lift relation membership to finite sequences›

inductive_set 
  seq_lift :: "('s × 't) set  ('s list × 't list) set" 
  for R :: "('s × 't) set"
where
  sl_nil [iff]: "([], [])  seq_lift R"
| sl_cons [intro]: 
    " (xs, ys)  seq_lift R; (x, y)  R   (x#xs, y#ys)  seq_lift R"

inductive_cases sl_cons_right_invert: "(ba', t # bc)  seq_lift R" 


text ‹For each concrete behaviour there is a related abstract one.›

lemma behaviour_refinement:
  " PO_refines R Ta Tc; bc  beh Tc 
   ba  beh Ta. (ba, bc)  seq_lift R" 
apply (erule beh.induct, auto)
  ― ‹case: singleton›
  apply (clarsimp simp add: PO_refines_def Image_def)
  apply (drule subsetD, auto)

  ― ‹case: cons; first construct related abstract state›
  apply (erule sl_cons_right_invert, clarsimp)
  apply (rename_tac s bc s' ba t)
  ― ‹now construct abstract transition›
  apply (auto simp add: PO_refines_def PO_rhoare_def)
  apply (thin_tac "X  Y" for X Y)
  apply (drule subsetD, auto)
done


text ‹Observation consistency of a relation is defined using a mediator 
function @{term "pi"} to abstract the concrete observation.  This allows us 
to also refine the observables as we move down a refinement branch.
›

definition 
  obs_consistent :: 
    "[('s × 't) set, 'p  'o, ('s, 'o) spec, ('t, 'p) spec]  bool"
where
  "obs_consistent R pi Sa Sc  (s t. (s, t)  R  pi (obs Sc t) = obs Sa s)"

lemma obs_consistent_refl [iff]: "obs_consistent Id id S S"
by (simp add: obs_consistent_def)

lemma obs_consistent_trans [intro]: 
  " obs_consistent R1 pi1 S1 S2; obs_consistent R2 pi2 S2 S3 
   obs_consistent (R1 O R2) (pi1 o pi2) S1 S3"
by (auto simp add: obs_consistent_def)

lemma obs_consistent_empty: "obs_consistent {} pi Sa Sc"
by (auto simp add: obs_consistent_def)

lemma obs_consistent_conj1 [intro]: 
  "obs_consistent R pi Sa Sc  obs_consistent (R  R') pi Sa Sc"
by (auto simp add: obs_consistent_def)

lemma obs_consistent_conj2 [intro]: 
  "obs_consistent R pi Sa Sc  obs_consistent (R'  R) pi Sa Sc"
by (auto simp add: obs_consistent_def)

lemma obs_consistent_behaviours:
  " obs_consistent R pi Sa Sc; bc  beh Sc; ba  beh Sa; (ba, bc)  seq_lift R
   map pi (map (obs Sc) bc) = map (obs Sa) ba"
by (erule seq_lift.induct) (auto simp add: obs_consistent_def) 


text ‹Definition of refinement proof obligations.›

definition 
  refines :: 
    "[('s × 't) set, 'p  'o, ('s, 'o) spec, ('t, 'p) spec]  bool" 
where
  "refines R pi Sa Sc  obs_consistent R pi Sa Sc  PO_refines R Sa Sc"

lemmas refines_defs = 
  refines_def PO_refines_def

lemma refinesI: 
  " PO_refines R Sa Sc; obs_consistent R pi Sa Sc 
   refines R pi Sa Sc"
by (simp add: refines_def)

lemma refinesE [elim]: 
  " refines R pi Sa Sc;  PO_refines R Sa Sc; obs_consistent R pi Sa Sc   P 
   P"
by (simp add: refines_def)


text ‹Reflexivity and transitivity of refinement.›

lemma refinement_reflexive: "refines Id id S S"
by (auto simp add: refines_defs)

lemma refinement_transitive: 
  " refines R1 pi1 S1 S2; refines R2 pi2 S2 S3  
   refines (R1 O R2) (pi1 o pi2) S1 S3"
apply (auto simp add: refines_defs del: subsetI
            intro: rhoare_trans)
apply (fastforce dest: Image_mono)
done


text ‹Soundness of refinement for proving implementation›

lemma observable_behaviour_refinement:
  " refines R pi Sa Sc; bc  obeh Sc   map pi bc  obeh Sa"
by (auto simp add: refines_def obeh_def image_def
         dest!: behaviour_refinement obs_consistent_behaviours)

theorem refinement_soundness: 
  "refines R pi Sa Sc  implements pi Sa Sc"
by (auto simp add: implements_def 
         elim!: observable_behaviour_refinement)


text ‹Extended versions of refinement proof rules including observations›

lemmas Refinement_basic = refine_basic [THEN refinesI]
lemmas Refinement_using_invariants = refine_using_invariants [THEN refinesI]

lemma refines_reachable_strengthening:
  "refines R pi Sa Sc  refines (R  reach Sa × reach Sc) pi Sa Sc"
by (auto intro!: Refinement_using_invariants)


text ‹Inhertitance of internal invariants through refinements›

lemma INV_init_from_Refinement:
  "refines R pi Sa Sc; Range R  I  init Sc  I" 
by (blast intro: INV_init_from_refinement) 

lemma INV_trans_from_Refinement:
  " refines R pi Sa Sc; K  Range R; Range R  I  {K} TS.trans Sc {> I}"
by (blast intro: INV_trans_from_refinement) 


lemma INV_from_Refinement_basic: 
  " refines R pi Sa Sc; Range R  I   reach Sc  I"
by (rule INV_from_refinement) blast

lemma INV_from_Refinement_using_invariants: 
  assumes "refines R pi Sa Sc" "Range (R  I × J)  K" 
          "reach Sa  I" "reach Sc  J" 
  shows "reach Sc  K" 
proof (rule INV_from_Refinement_basic)
  show "refines (R  reach Sa × reach Sc) pi Sa Sc" using assms(1) 
    by (rule refines_reachable_strengthening)
next 
  show "Range (R  reach Sa × reach Sc)  K" using assms(2-4) by blast
qed


end

Theory Messages

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Messages.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Messages.thy 133008 2017-01-17 13:53:14Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Message datatype and quotient construction based on Diffie-Hellman 
  equational theory.

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Message definitions› 

theory Messages
imports Main
begin

(****************************************************************************************)
subsection ‹Messages›
(****************************************************************************************)

text ‹Agents›
datatype
  agent = Agent nat

text ‹Nonces›
typedecl fid_t

datatype fresh_t = 
  mk_fresh "fid_t" "nat"      (infixr "$" 65) 

fun fid :: "fresh_t  fid_t" where
  "fid (f $ n) = f"

fun num :: "fresh_t  nat" where
  "num (f $ n) = n"

datatype
  nonce_t =
    nonce_fresh "fresh_t"
  | nonce_atk "nat"


text ‹Keys›
datatype ltkey =
  sharK "agent" "agent"
| publK "agent"
| privK "agent"

datatype ephkey =
  epublK nonce_t
| eprivK nonce_t

datatype tag = insec | auth | confid | secure

text ‹Messages›

datatype cmsg =
  cAgent "agent"
| cNumber "nat"
| cNonce "nonce_t"
| cLtK "ltkey"
| cEphK "ephkey"
| cPair cmsg cmsg
| cEnc cmsg cmsg
| cAenc cmsg cmsg
| cSign cmsg cmsg
| cHash cmsg
| cTag tag
| cExp cmsg cmsg

fun catomic :: "cmsg  bool"
where
  "catomic (cAgent _) = True"
| "catomic (cNumber _) = True"
| "catomic (cNonce _) = True"
| "catomic (cLtK _) = True"
| "catomic (cEphK _) = True"
| "catomic (cTag _) = True"
|" catomic _ = False"


inductive eq :: "cmsg  cmsg  bool"
where
― ‹equations›
  Permute [intro]:"eq (cExp (cExp a b) c) (cExp (cExp a c) b)"
― ‹closure by context›
 | Tag[intro]: "eq (cTag t) (cTag t)"
 | Agent[intro]: "eq (cAgent A) (cAgent A)"
 | Nonce[intro]:"eq (cNonce x) (cNonce x)"
 | Number[intro]:"eq (cNumber x) (cNumber x)"
 | LtK[intro]:"eq (cLtK x) (cLtK x)"
 | EphK[intro]:"eq (cEphK x) (cEphK x)"
 | Pair[intro]:"eq a b  eq c d  eq (cPair a c) (cPair b d)"
 | Enc[intro]:"eq a b  eq c d  eq (cEnc a c) (cEnc b d)"
 | Aenc[intro]:"eq a b  eq c d  eq (cAenc a c) (cAenc b d)"
 | Sign[intro]:"eq a b  eq c d  eq (cSign a c) (cSign b d)"
 | Hash[intro]:"eq a b  eq (cHash a) (cHash b)"
 | Exp[intro]:"eq a b  eq c d  eq (cExp a c) (cExp b d)"
― ‹reflexive closure is not needed here because the context closure implies it›
― ‹symmetric closure is not needed as it is easier to include equations in both directions›
― ‹transitive closure›
 | Tr[intro]: "eq a b  eq b c  eq a c"

lemma eq_sym: "eq a b  eq b a"
by (auto elim: eq.induct)

lemma eq_Sym [intro]: "eq a b  eq b a"
by (auto elim: eq.induct)

lemma eq_refl [simp, intro]: "eq a a"
by (induction a, auto)

text ‹inductive cases; keep the transitivity case, so we prove the the right lemmas by hand.›
lemma eq_Number: "eq (cNumber N) y   y = cNumber N"
  by (induction "cNumber N" y rule: eq.induct, auto)
lemma eq_Agent: "eq (cAgent A) y   y = cAgent A"
  by (induction "cAgent A" y rule: eq.induct, auto)
lemma eq_Nonce: "eq (cNonce N) y   y = cNonce N"
  by (induction "cNonce N" y rule: eq.induct, auto)
lemma eq_LtK: "eq (cLtK N) y   y = cLtK N"
  by (induction "cLtK N" y rule: eq.induct, auto)
lemma eq_EphK: "eq (cEphK N) y   y = cEphK N"
  by (induction "cEphK N" y rule: eq.induct, auto)
lemma eq_Tag: "eq (cTag N) y   y = cTag N"
  by (induction "cTag N" y rule: eq.induct, auto)
lemma eq_Hash: "eq (cHash X) y  Y. y = cHash Y  eq X Y"
  by (drule eq.induct [where P="λx. λy.  X. x = cHash X  (Y. y = cHash Y  eq X Y)"],
      auto elim!:Tr)
lemma eq_Pair: "eq (cPair X Y) y    X' Y'. y = cPair X' Y'  eq X X'  eq Y Y'"
  apply (drule eq.induct [where 
    P="λx. λy.  X Y. x = cPair X Y  ( X' Y'. y = cPair X' Y'  eq X X'  eq Y Y')"])
  apply (auto elim!: Tr)
done
lemma eq_Enc: "eq (cEnc X Y) y    X' Y'. y = cEnc X' Y'  eq X X'  eq Y Y'"
  apply (drule eq.induct [where 
    P="λx. λy.  X Y. x = cEnc X Y  ( X' Y'. y = cEnc X' Y'  eq X X'  eq Y Y')"])
  apply (auto elim!: Tr)
done
lemma eq_Aenc: "eq (cAenc X Y) y    X' Y'. y = cAenc X' Y'  eq X X'  eq Y Y'"
  apply (drule eq.induct [where 
    P="λx. λy.  X Y. x = cAenc X Y  ( X' Y'. y = cAenc X' Y'  eq X X'  eq Y Y')"])
  apply (auto elim!: Tr)
done
lemma eq_Sign: "eq (cSign X Y) y    X' Y'. y = cSign X' Y'  eq X X'  eq Y Y'"
  apply (drule eq.induct [where 
    P="λx. λy.  X Y. x = cSign X Y  ( X' Y'. y = cSign X' Y'  eq X X'  eq Y Y')"])
  apply (auto elim!: Tr)
done
lemma eq_Exp: "eq (cExp X Y) y    X' Y'. y = cExp X' Y'"
  apply (drule eq.induct [where 
    P="λx. λy.  X Y. x = cExp X Y  ( X' Y'. y = cExp X' Y')"])
  apply (auto elim!: Tr)
done

lemmas eqD_aux = eq_Number eq_Agent eq_Nonce eq_LtK eq_EphK eq_Tag
                    eq_Hash eq_Pair eq_Enc eq_Aenc eq_Sign eq_Exp
lemmas eqD [dest] = eqD_aux eqD_aux [OF eq_Sym]


text ‹Quotient construction›

quotient_type msg = cmsg / eq
morphisms Re Ab
by (auto simp add:equivp_def)


lift_definition Number :: "nat  msg" is cNumber by -
lift_definition Nonce :: "nonce_t  msg" is cNonce by -
lift_definition Agent :: "agent  msg" is cAgent by -
lift_definition LtK :: "ltkey  msg" is cLtK by -
lift_definition EphK :: "ephkey  msg" is cEphK by -
lift_definition Pair :: "msg  msg  msg" is cPair by auto
lift_definition Enc :: "msg  msg  msg" is cEnc by auto
lift_definition Aenc :: "msg  msg  msg" is cAenc by auto
lift_definition Exp :: "msg  msg  msg" is cExp by auto
lift_definition Tag :: "tag  msg" is cTag by -
lift_definition Hash :: "msg  msg" is cHash by auto
lift_definition Sign :: "msg  msg  msg" is cSign by auto

lemmas msg_defs = 
  Agent_def Number_def Nonce_def LtK_def EphK_def Pair_def 
  Enc_def Aenc_def Exp_def Hash_def Tag_def Sign_def


text ‹Commutativity of exponents›

lemma permute_exp [simp]: "Exp (Exp X Y) Z = Exp (Exp X Z) Y"
by (transfer, auto)

lift_definition atomic :: "msg  bool" is catomic by (erule eq.induct, auto)

abbreviation 
  composed :: "msg  bool" where
  "composed X  ¬atomic X"

lemma atomic_Agent [simp, intro]: "atomic (Agent X)" by (transfer, auto)
lemma atomic_Tag [simp, intro]: "atomic (Tag X)" by (transfer, auto)
lemma atomic_Nonce [simp, intro]: "atomic (Nonce X)" by (transfer, auto)
lemma atomic_Number [simp, intro]: "atomic (Number X)" by (transfer, auto)
lemma atomic_LtK [simp, intro]: "atomic (LtK X)" by (transfer, auto)
lemma atomic_EphK [simp, intro]: "atomic (EphK X)" by (transfer, auto)

lemma non_atomic_Pair [simp]: "¬atomic (Pair x y)" by (transfer, auto)
lemma non_atomic_Enc [simp]: "¬atomic (Enc x y)" by (transfer, auto)
lemma non_atomic_Aenc [simp]: "¬atomic (Aenc x y)" by (transfer, auto)
lemma non_atomic_Sign [simp]: "¬atomic (Sign x y)" by (transfer, auto)
lemma non_atomic_Exp [simp]: "¬atomic (Exp x y)" by (transfer, auto)
lemma non_atomic_Hash [simp]: "¬atomic (Hash x)" by (transfer, auto)

lemma Nonce_Nonce: "(Nonce X = Nonce X') = (X = X')" by transfer auto
lemma Nonce_Agent: "Nonce X  Agent X'" by transfer auto
lemma Nonce_Number: "Nonce X  Number X'" by transfer auto
lemma Nonce_Hash: "Nonce X  Hash  X'" by transfer auto
lemma Nonce_Tag: "Nonce X  Tag  X'" by transfer auto
lemma Nonce_EphK: "Nonce X  EphK  X'" by transfer auto
lemma Nonce_LtK: "Nonce X  LtK  X'" by transfer auto
lemma Nonce_Pair: "Nonce X  Pair  X' Y'" by transfer auto
lemma Nonce_Enc: "Nonce X  Enc  X' Y'" by transfer auto
lemma Nonce_Aenc: "Nonce X  Aenc  X' Y'" by transfer auto
lemma Nonce_Sign: "Nonce X  Sign  X' Y'" by transfer auto
lemma Nonce_Exp: "Nonce X  Exp  X' Y'" by transfer auto

lemma Agent_Nonce: "Agent X  Nonce  X'" by transfer auto
lemma Agent_Agent: "(Agent X = Agent X') = (X = X')" by transfer auto
lemma Agent_Number: "Agent X  Number  X'" by transfer auto
lemma Agent_Hash: "Agent X  Hash  X'" by transfer auto
lemma Agent_Tag: "Agent X  Tag  X'" by transfer auto
lemma Agent_EphK: "Agent X  EphK  X'" by transfer auto
lemma Agent_LtK: "Agent X  LtK  X'" by transfer auto
lemma Agent_Pair: "Agent X  Pair  X' Y'" by transfer auto
lemma Agent_Enc: "Agent X  Enc  X' Y'" by transfer auto
lemma Agent_Aenc: "Agent X  Aenc  X' Y'" by transfer auto
lemma Agent_Sign: "Agent X  Sign  X' Y'" by transfer auto
lemma Agent_Exp: "Agent X  Exp  X' Y'" by transfer auto

lemma Number_Nonce: "Number X  Nonce  X'" by transfer auto
lemma Number_Agent: "Number X  Agent  X'" by transfer auto
lemma Number_Number: "(Number X = Number X') = (X = X')" by transfer auto
lemma Number_Hash: "Number X  Hash  X'" by transfer auto
lemma Number_Tag: "Number X  Tag  X'" by transfer auto
lemma Number_EphK: "Number X  EphK  X'" by transfer auto
lemma Number_LtK: "Number X  LtK  X'" by transfer auto
lemma Number_Pair: "Number X  Pair  X' Y'" by transfer auto
lemma Number_Enc: "Number X  Enc  X' Y'" by transfer auto
lemma Number_Aenc: "Number X  Aenc  X' Y'" by transfer auto
lemma Number_Sign: "Number X  Sign  X' Y'" by transfer auto
lemma Number_Exp: "Number X  Exp  X' Y'" by transfer auto

lemma Hash_Nonce: "Hash X  Nonce  X'" by transfer auto
lemma Hash_Agent: "Hash X  Agent  X'" by transfer auto
lemma Hash_Number: "Hash X  Number  X'" by transfer auto
lemma Hash_Hash: "(Hash X = Hash X') = (X = X')" by transfer auto
lemma Hash_Tag: "Hash X  Tag  X'" by transfer auto
lemma Hash_EphK: "Hash X  EphK  X'" by transfer auto
lemma Hash_LtK: "Hash X  LtK  X'" by transfer auto
lemma Hash_Pair: "Hash X  Pair  X' Y'" by transfer auto
lemma Hash_Enc: "Hash X  Enc  X' Y'" by transfer auto
lemma Hash_Aenc: "Hash X  Aenc  X' Y'" by transfer auto
lemma Hash_Sign: "Hash X  Sign  X' Y'" by transfer auto
lemma Hash_Exp: "Hash X  Exp  X' Y'" by transfer auto

lemma Tag_Nonce: "Tag X  Nonce  X'" by transfer auto
lemma Tag_Agent: "Tag X  Agent  X'" by transfer auto
lemma Tag_Number: "Tag X  Number  X'" by transfer auto
lemma Tag_Hash: "Tag X  Hash  X'" by transfer auto
lemma Tag_Tag: "(Tag X = Tag X') = (X = X')" by transfer auto
lemma Tag_EphK: "Tag X  EphK  X'" by transfer auto
lemma Tag_LtK: "Tag X  LtK  X'" by transfer auto
lemma Tag_Pair: "Tag X  Pair  X' Y'" by transfer auto
lemma Tag_Enc: "Tag X  Enc  X' Y'" by transfer auto
lemma Tag_Aenc: "Tag X  Aenc  X' Y'" by transfer auto
lemma Tag_Sign: "Tag X  Sign  X' Y'" by transfer auto
lemma Tag_Exp: "Tag X  Exp  X' Y'" by transfer auto

lemma EphK_Nonce: "EphK X  Nonce  X'" by transfer auto
lemma EphK_Agent: "EphK X  Agent  X'" by transfer auto
lemma EphK_Number: "EphK X  Number  X'" by transfer auto
lemma EphK_Hash: "EphK X  Hash  X'" by transfer auto
lemma EphK_Tag: "EphK X  Tag  X'" by transfer auto
lemma EphK_EphK: "(EphK X = EphK X') = (X = X')" by transfer auto
lemma EphK_LtK: "EphK X  LtK  X'" by transfer auto
lemma EphK_Pair: "EphK X  Pair  X' Y'" by transfer auto
lemma EphK_Enc: "EphK X  Enc  X' Y'" by transfer auto
lemma EphK_Aenc: "EphK X  Aenc  X' Y'" by transfer auto
lemma EphK_Sign: "EphK X  Sign  X' Y'" by transfer auto
lemma EphK_Exp: "EphK X  Exp  X' Y'" by transfer auto

lemma LtK_Nonce: "LtK X  Nonce  X'" by transfer auto
lemma LtK_Agent: "LtK X  Agent  X'" by transfer auto
lemma LtK_Number: "LtK X  Number  X'" by transfer auto
lemma LtK_Hash: "LtK X  Hash  X'" by transfer auto
lemma LtK_Tag: "LtK X  Tag  X'" by transfer auto
lemma LtK_EphK: "LtK X  EphK  X'" by transfer auto
lemma LtK_LtK: "(LtK X = LtK X') = (X = X')" by transfer auto
lemma LtK_Pair: "LtK X  Pair  X' Y'" by transfer auto
lemma LtK_Enc: "LtK X  Enc  X' Y'" by transfer auto
lemma LtK_Aenc: "LtK X  Aenc  X' Y'" by transfer auto
lemma LtK_Sign: "LtK X  Sign  X' Y'" by transfer auto
lemma LtK_Exp: "LtK X  Exp  X' Y'" by transfer auto

lemma Pair_Nonce: "Pair X Y  Nonce  X'" by transfer auto
lemma Pair_Agent: "Pair X Y  Agent  X'" by transfer auto
lemma Pair_Number: "Pair X Y  Number  X'" by transfer auto
lemma Pair_Hash: "Pair X Y  Hash  X'" by transfer auto
lemma Pair_Tag: "Pair X Y  Tag  X'" by transfer auto
lemma Pair_EphK: "Pair X Y  EphK  X'" by transfer auto
lemma Pair_LtK: "Pair X Y  LtK  X'" by transfer auto
lemma Pair_Pair: "(Pair X Y = Pair X' Y') = (X = X'  Y = Y')" by transfer auto
lemma Pair_Enc: "Pair X Y  Enc  X' Y'" by transfer auto
lemma Pair_Aenc: "Pair X Y  Aenc  X' Y'" by transfer auto
lemma Pair_Sign: "Pair X Y  Sign  X' Y'" by transfer auto
lemma Pair_Exp: "Pair X Y  Exp  X' Y'" by transfer auto

lemma Enc_Nonce: "Enc X Y  Nonce  X'" by transfer auto
lemma Enc_Agent: "Enc X Y  Agent  X'" by transfer auto
lemma Enc_Number: "Enc X Y  Number  X'" by transfer auto
lemma Enc_Hash: "Enc X Y  Hash  X'" by transfer auto
lemma Enc_Tag: "Enc X Y  Tag  X'" by transfer auto
lemma Enc_EphK: "Enc X Y  EphK  X'" by transfer auto
lemma Enc_LtK: "Enc X Y  LtK  X'" by transfer auto
lemma Enc_Pair: "Enc X Y  Pair  X' Y'" by transfer auto
lemma Enc_Enc: "(Enc X Y = Enc X' Y') = (X = X'  Y = Y')" by transfer auto
lemma Enc_Aenc: "Enc X Y  Aenc  X' Y'" by transfer auto
lemma Enc_Sign: "Enc X Y  Sign  X' Y'" by transfer auto
lemma Enc_Exp: "Enc X Y  Exp  X' Y'" by transfer auto

lemma Aenc_Nonce: "Aenc X Y  Nonce  X'" by transfer auto
lemma Aenc_Agent: "Aenc X Y  Agent  X'" by transfer auto
lemma Aenc_Number: "Aenc X Y  Number  X'" by transfer auto
lemma Aenc_Hash: "Aenc X Y  Hash  X'" by transfer auto
lemma Aenc_Tag: "Aenc X Y  Tag  X'" by transfer auto
lemma Aenc_EphK: "Aenc X Y  EphK  X'" by transfer auto
lemma Aenc_LtK: "Aenc X Y  LtK  X'" by transfer auto
lemma Aenc_Pair: "Aenc X Y  Pair  X' Y'" by transfer auto
lemma Aenc_Enc: "Aenc X Y  Enc  X' Y'" by transfer auto
lemma Aenc_Aenc: "(Aenc X Y = Aenc X' Y') = (X = X'  Y = Y')" by transfer auto
lemma Aenc_Sign: "Aenc X Y  Sign  X' Y'" by transfer auto
lemma Aenc_Exp: "Aenc X Y  Exp  X' Y'" by transfer auto

lemma Sign_Nonce: "Sign X Y  Nonce  X'" by transfer auto
lemma Sign_Agent: "Sign X Y  Agent  X'" by transfer auto
lemma Sign_Number: "Sign X Y  Number  X'" by transfer auto
lemma Sign_Hash: "Sign X Y  Hash  X'" by transfer auto
lemma Sign_Tag: "Sign X Y  Tag  X'" by transfer auto
lemma Sign_EphK: "Sign X Y  EphK  X'" by transfer auto
lemma Sign_LtK: "Sign X Y  LtK  X'" by transfer auto
lemma Sign_Pair: "Sign X Y  Pair  X' Y'" by transfer auto
lemma Sign_Enc: "Sign X Y  Enc  X' Y'" by transfer auto
lemma Sign_Aenc: "Sign X Y  Aenc  X' Y'" by transfer auto
lemma Sign_Sign: "(Sign X Y = Sign X' Y') = (X = X'  Y = Y')" by transfer auto
lemma Sign_Exp: "Sign X Y  Exp  X' Y'" by transfer auto

lemma Exp_Nonce: "Exp X Y  Nonce  X'" by transfer auto
lemma Exp_Agent: "Exp X Y  Agent  X'" by transfer auto
lemma Exp_Number: "Exp X Y  Number  X'" by transfer auto
lemma Exp_Hash: "Exp X Y  Hash  X'" by transfer auto
lemma Exp_Tag: "Exp X Y  Tag  X'" by transfer auto
lemma Exp_EphK: "Exp X Y  EphK  X'" by transfer auto
lemma Exp_LtK: "Exp X Y  LtK  X'" by transfer auto
lemma Exp_Pair: "Exp X Y  Pair  X' Y'" by transfer auto
lemma Exp_Enc: "Exp X Y  Enc  X' Y'" by transfer auto
lemma Exp_Aenc: "Exp X Y  Aenc  X' Y'" by transfer auto
lemma Exp_Sign: "Exp X Y  Sign  X' Y'" by transfer auto


lemmas msg_inject [iff, induct_simp] =
  Nonce_Nonce Agent_Agent Number_Number Hash_Hash Tag_Tag EphK_EphK LtK_LtK 
  Pair_Pair Enc_Enc Aenc_Aenc Sign_Sign

lemmas msg_distinct [simp, induct_simp] =
  Nonce_Agent Nonce_Number Nonce_Hash Nonce_Tag Nonce_EphK Nonce_LtK Nonce_Pair 
  Nonce_Enc Nonce_Aenc Nonce_Sign Nonce_Exp 
  Agent_Nonce Agent_Number Agent_Hash Agent_Tag Agent_EphK Agent_LtK Agent_Pair 
  Agent_Enc Agent_Aenc Agent_Sign Agent_Exp 
  Number_Nonce Number_Agent Number_Hash Number_Tag Number_EphK Number_LtK 
  Number_Pair Number_Enc Number_Aenc Number_Sign Number_Exp 
  Hash_Nonce Hash_Agent Hash_Number Hash_Tag Hash_EphK Hash_LtK Hash_Pair 
  Hash_Enc Hash_Aenc Hash_Sign Hash_Exp 
  Tag_Nonce Tag_Agent Tag_Number Tag_Hash Tag_EphK Tag_LtK Tag_Pair 
  Tag_Enc Tag_Aenc Tag_Sign Tag_Exp 
  EphK_Nonce EphK_Agent EphK_Number EphK_Hash EphK_Tag EphK_LtK EphK_Pair 
  EphK_Enc EphK_Aenc EphK_Sign EphK_Exp 
  LtK_Nonce LtK_Agent LtK_Number LtK_Hash LtK_Tag LtK_EphK LtK_Pair 
  LtK_Enc LtK_Aenc LtK_Sign LtK_Exp 
  Pair_Nonce Pair_Agent Pair_Number Pair_Hash Pair_Tag Pair_EphK Pair_LtK 
  Pair_Enc Pair_Aenc Pair_Sign Pair_Exp 
  Enc_Nonce Enc_Agent Enc_Number Enc_Hash Enc_Tag Enc_EphK Enc_LtK Enc_Pair 
  Enc_Aenc Enc_Sign Enc_Exp 
  Aenc_Nonce Aenc_Agent Aenc_Number Aenc_Hash Aenc_Tag Aenc_EphK Aenc_LtK 
  Aenc_Pair Aenc_Enc Aenc_Sign Aenc_Exp 
  Sign_Nonce Sign_Agent Sign_Number Sign_Hash Sign_Tag Sign_EphK Sign_LtK 
  Sign_Pair Sign_Enc Sign_Aenc Sign_Exp 
  Exp_Nonce Exp_Agent Exp_Number Exp_Hash Exp_Tag Exp_EphK Exp_LtK Exp_Pair 
  Exp_Enc Exp_Aenc Exp_Sign 


consts Ngen :: nat
abbreviation "Gen  Number Ngen"
abbreviation "cGen  cNumber Ngen"

abbreviation 
  "InsecTag  Tag insec"

abbreviation 
  "AuthTag  Tag auth"

abbreviation 
  "ConfidTag  Tag confid"

abbreviation 
  "SecureTag  Tag secure"

abbreviation 
  "Tags  range Tag"

abbreviation
  NonceF :: "fresh_t  msg" where
  "NonceF N  Nonce (nonce_fresh N)"

abbreviation
  NonceA :: "nat  msg" where
  "NonceA N  Nonce (nonce_atk N)"

abbreviation
  shrK :: "agent  agent  msg" where
  "shrK A B  LtK (sharK A B)"

abbreviation
  pubK :: "agent  msg" where
  "pubK A  LtK (publK A)"

abbreviation
  priK :: "agent  msg" where
  "priK A  LtK (privK A)"

abbreviation
  epubK :: "nonce_t  msg" where
  "epubK N  EphK (epublK N)"

abbreviation
  epriK :: "nonce_t  msg" where
  "epriK N  EphK (eprivK N)"

abbreviation
  epubKF :: "fresh_t  msg" where
  "epubKF N  EphK (epublK (nonce_fresh N))"

abbreviation
  epriKF :: "fresh_t  msg" where
  "epriKF N  EphK (eprivK (nonce_fresh N))"

abbreviation
  epubKA :: "nat  msg" where
  "epubKA N  EphK (epublK (nonce_atk N))"

abbreviation
  epriKA :: "nat  msg" where
  "epriKA N  EphK (eprivK (nonce_atk N))"


text‹Concrete syntax: messages appear as <A,B,NA>, etc...›

syntax
  "_MTuple"      :: "['a, args]  'a * 'b"       ("(2_,/ _)")
translations
  "x, y, z"  "x, y, z"
  "x, y"     "CONST Pair x y"

text ‹hash macs›
abbreviation
  hmac :: "msg  msg  msg" where
  "hmac M K  Hash M, K"


text ‹recover some kind of injectivity for Exp›
lemma eq_expgen: 
  "eq X Y  ( X'. X = cExp cGen X'  ( Z. Y = (cExp cGen Z)  eq X' Z)) 
              ( Y'. Y = cExp cGen Y'  ( Z. X = (cExp cGen Z)  eq Y' Z))"
by (erule eq.induct, auto elim!: Tr)

lemma Exp_Gen_inj: "Exp Gen X = Exp Gen Y  X = Y"
by (transfer, auto dest: eq_expgen)


lemma eq_expexpgen: 
  "eq X Y  ( X' X''. X = cExp (cExp cGen X') X''  
                ( Y' Y''. Y = cExp (cExp cGen Y') Y''  
                   ((eq X' Y'  eq X'' Y'')  (eq X' Y''  eq X'' Y'))))" 
apply (erule eq.induct, simp_all) 
apply ((drule eq_expgen)+, force)
apply (auto, blast+)
done

lemma Exp_Exp_Gen_inj:
  "Exp (Exp Gen X) X' = Z 
   ( Y Y'. Z = Exp (Exp Gen Y) Y'  ((X = Y  X' = Y')  (X = Y'  X' = Y)))"
by (transfer, auto dest: eq_expexpgen)

lemma Exp_Exp_Gen_inj2:
  "Exp (Exp Gen X) X' = Exp Z Y' 
  (Y' = X  Z = Exp Gen X')  (Y' = X'  Z = Exp Gen X)"
apply (transfer, auto)
apply (drule eq_expexpgen, auto)+
done



end

Theory Message_derivation

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Message_derivation.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Message_derivation.thy 132885 2016-12-23 18:41:32Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  A theory of message derivations (analz, synth, parts).
  Based on Larry Paulson's theory HOL/Auth/Message.thy, but extended with
  - composed keys
  - including ephemeral asymmetric keys
  - Diffie-Hellman exponentiation and associated equational theory

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Message theory› 

theory Message_derivation
imports Messages
begin

text ‹This theory is adapted from Larry Paulson's original Message theory.›


(****************************************************************************************)
subsection ‹Message composition›
(****************************************************************************************)

text ‹Dolev-Yao message synthesis.›

inductive_set
  synth :: "msg set  msg set"   
  for H :: "msg set"
  where
    Ax [intro]: "X  H  X  synth H"
  | Agent [simp, intro]: "Agent A  synth H"
  | Number [simp, intro]: "Number n  synth H"
  | NonceA [simp, intro]: "NonceA n  synth H"
  | EpubKA [simp, intro]: "epubKA n  synth H"
  | EpriKA [simp, intro]: "epriKA n  synth H"
  | Hash [intro]: "X  synth H  Hash X  synth H"
  | Pair [intro]: "X  synth H  Y  synth H  (Pair X Y)  synth H"
  | Enc [intro]: "X  synth H  Y  synth H  (Enc X Y)  synth H"
  | Aenc [intro]: "X  synth H  Y  synth H  (Aenc X Y)  synth H"
  | Sign [intro]: "X  synth H  Y  synth H  Sign X Y  synth H"
  | Exp [intro]: "X  synth H  Y  synth H  (Exp X Y)  synth H"


text ‹Lemmas about Dolev-Yao message synthesis.›

lemma synth_mono [mono_set]: "G  H  synth G  synth H"
  by (auto, erule synth.induct, auto) 

lemmas synth_monotone = synth_mono [THEN [2] rev_subsetD]

― ‹[elim!]› slows down certain proofs, e.g., ⟦ synth H ∩ B ⊆ {} ⟧ ⟹ P›
inductive_cases NonceF_synth: "NonceF n  synth H"
inductive_cases LtK_synth: "LtK K  synth H"
inductive_cases EpubKF_synth: "epubKF K  synth H"
inductive_cases EpriKF_synth: "epriKF K  synth H"
inductive_cases Hash_synth: "Hash X  synth H"
inductive_cases Pair_synth: "Pair X Y  synth H"
inductive_cases Enc_synth: "Enc X K  synth H"
inductive_cases Aenc_synth: "Aenc X K  synth H"
inductive_cases Sign_synth: "Sign X K  synth H"
inductive_cases Tag_synth: "Tag t  synth H"

lemma EpriK_synth [elim]: "epriK K  synth H 
       epriK K  H  ( N. epriK K = epriKA N)"
by (cases K, auto elim: EpriKF_synth)

lemma EpubK_synth [elim]: "epubK K  synth H 
       epubK K  H  ( N. epubK K = epubKA N)"
by (cases K, auto elim: EpubKF_synth)

lemmas synth_inversion [elim] = 
  NonceF_synth LtK_synth EpubKF_synth EpriKF_synth Hash_synth Pair_synth 
  Enc_synth Aenc_synth Sign_synth Tag_synth


lemma synth_increasing: "H  synth H"
by blast

lemma synth_Int1: "x  synth (A  B)  x  synth A "
  by (erule synth.induct) (auto)

lemma synth_Int2: "x  synth (A  B)  x  synth B"
  by (erule synth.induct) (auto)

lemma synth_Int: "x  synth (A  B)  x  synth A  synth B"
  by (blast intro: synth_Int1 synth_Int2)

lemma synth_Un: "synth G  synth H  synth (G  H)"
by (intro Un_least synth_mono Un_upper1 Un_upper2)

lemma synth_insert: "insert X (synth H)  synth (insert X H)"
by (blast intro: synth_mono [THEN [2] rev_subsetD])

lemma synth_synthD [dest!]: "X  synth (synth H)  X  synth H"
by (erule synth.induct, blast+)

lemma synth_idem [simp]: "synth (synth H) = synth H"
by blast

lemma synth_subset_iff: "synth G  synth H  G  synth H"
by (blast dest: synth_mono)

lemma synth_trans: " X  synth G; G  synth H   X  synth H"
by (drule synth_mono, blast)

lemma synth_cut: " Y  synth (insert X H); X  synth H   Y  synth H"
by (erule synth_trans, blast)


lemma Nonce_synth_eq [simp]: "(NonceF N  synth H) = (NonceF N  H)"
by blast

lemma LtK_synth_eq [simp]: "(LtK K  synth H) = (LtK K  H)"
by blast

lemma EpubKF_synth_eq [simp]: "(epubKF K  synth H) = (epubKF K  H)"
by blast

lemma EpriKF_synth_eq [simp]: "(epriKF K  synth H) = (epriKF K  H)"
by blast

lemma Enc_synth_eq1 [simp]:
     "K  synth H  (Enc X K  synth H) = (Enc X K  H)"
by blast

lemma Enc_synth_eq2 [simp]:
     "X  synth H  (Enc X K  synth H) = (Enc X K  H)"
by blast

lemma Aenc_synth_eq1 [simp]:
     "K  synth H  (Aenc X K  synth H) = (Aenc X K  H)"
by blast

lemma Aenc_synth_eq2 [simp]:
     "X  synth H  (Aenc X K  synth H) = (Aenc X K  H)"
by blast

lemma Sign_synth_eq1 [simp]:
     "K  synth H  (Sign X K  synth H) = (Sign X K  H)"
by blast

lemma Sign_synth_eq2 [simp]:
     "X  synth H  (Sign X K  synth H) = (Sign X K  H)"
by blast

(****************************************************************************************)
subsection ‹Message decomposition›
(****************************************************************************************)

text ‹Dolev-Yao message decomposition using known keys.›

inductive_set
  analz :: "msg set  msg set"
  for H :: "msg set"
  where
    Ax [intro]: "X  H  X  analz H"
  | Fst: "Pair X Y  analz H  X  analz H"
  | Snd: "Pair X Y  analz H  Y  analz H"
  | Dec [dest]: 
      " Enc X Y  analz H; Y  synth (analz H)   X  analz H"
  | Adec_lt [dest]: 
      " Aenc X (LtK (publK Y))  analz H; priK Y  analz H   X  analz H"
  | Adec_eph [dest]: 
      " Aenc X (EphK (epublK Y))  analz H; epriK Y  synth (analz H)   X  analz H"
  | Sign_getmsg [dest]: 
      "Sign X (priK Y)  analz H  pubK Y  analz H  X  analz H"


text ‹Lemmas about Dolev-Yao message decomposition.›

lemma analz_mono: "G  H  analz(G)  analz(H)"
by (safe, erule analz.induct) (auto dest: Fst Snd synth_Int2)

lemmas analz_monotone = analz_mono [THEN [2] rev_subsetD]


lemma Pair_analz [elim!]:
  " Pair X Y  analz H;  X  analz H; Y  analz H   P   P"
by (blast dest: analz.Fst analz.Snd)


lemma analz_empty [simp]: "analz {} = {}"
by (safe, erule analz.induct) (blast+)

lemma analz_increasing: "H  analz(H)"
by auto

lemma analz_analzD [dest!]: "X  analz (analz H)  X  analz H"
by (induction X rule: analz.induct) (auto dest: synth_monotone)

lemma analz_idem [simp]: "analz (analz H) = analz H"
by auto


lemma analz_Un: "analz G  analz H  analz (G  H)"
by (intro Un_least analz_mono Un_upper1 Un_upper2)

lemma analz_insertI: "X  analz H  X  analz (insert Y H)"
by (blast intro: analz_monotone)

lemma analz_insert: "insert X (analz H)  analz (insert X H)"
by (blast intro: analz_monotone)

lemmas analz_insert_eq_I = equalityI [OF subsetI analz_insert]


lemma analz_subset_iff [simp]: "analz G  analz H  G  analz H"
by (blast dest: analz_mono)

lemma analz_trans: "X  analz G   G  analz H  X  analz H"
by (drule analz_mono) blast

lemma analz_cut: "Y  analz (insert X H)   X  analz H  Y  analz H"
by (erule analz_trans) blast

lemma analz_insert_eq: "X  analz H  analz (insert X H) = analz H"
by (blast intro: analz_cut analz_insertI)


lemma analz_subset_cong:
  "analz G  analz G' 
   analz H  analz H'  
   analz (G  H)  analz (G'  H')"
apply simp
apply (iprover intro: conjI subset_trans analz_mono Un_upper1 Un_upper2) 
done

lemma analz_cong:
  "analz G = analz G' 
   analz H = analz H' 
   analz (G  H) = analz (G'  H')"
by (intro equalityI analz_subset_cong, simp_all) 

lemma analz_insert_cong:
  "analz H = analz H' 
   analz (insert X H) = analz (insert X H')"
by (force simp only: insert_def intro!: analz_cong)

lemma analz_trivial:
  "X Y. Pair X Y  H 
   X Y. Enc X Y  H 
   X Y. Aenc X Y  H 
   X Y. Sign X Y  H 
   analz H = H"
apply safe
apply (erule analz.induct, blast+)
done


lemma analz_analz_Un [simp]: "analz (analz G  H) = analz (G  H)"
apply (intro equalityI analz_subset_cong)+
apply simp_all
done

lemma analz_Un_analz [simp]: "analz (G  analz H) = analz (G  H)"
by (subst Un_commute, auto)+


text ‹Lemmas about analz and insert.› 

lemma analz_insert_Agent [simp]:
  "analz (insert (Agent A) H) = insert (Agent A) (analz H)"
apply (rule analz_insert_eq_I) 
apply (erule analz.induct)
thm analz.induct
apply fastforce
apply fastforce
apply fastforce
defer 1
apply fastforce
defer 1
apply fastforce

apply (rename_tac x X Y)
apply (subgoal_tac "Y  synth (analz H)", auto)
apply (thin_tac "Enc X Y  Z" for Z)+
apply (drule synth_Int2, auto)
apply (erule synth.induct, auto)

apply (rename_tac X Y)
apply (subgoal_tac "epriK Y  synth (analz H)", auto)
apply (thin_tac "Aenc X (epubK Y)  Z" for Z)+
apply (erule synth.induct, auto)
done



(****************************************************************************************)
subsection ‹Lemmas about combined composition/decomposition›
(****************************************************************************************)

lemma synth_analz_incr: "H  synth (analz H)"
by auto

lemmas synth_analz_increasing = synth_analz_incr [THEN [2] rev_subsetD] 

lemma synth_analz_mono: "G  H  synth (analz G)  synth (analz H)"
by (blast intro!: analz_mono synth_mono)

lemmas synth_analz_monotone = synth_analz_mono [THEN [2] rev_subsetD]

lemma lem1: 
  "Y  synth (analz (synth G  H)  (analz (G  H)  synth G)) 
 Y  synth (analz (G  H))"
apply (rule subsetD, auto simp add: synth_subset_iff intro: analz_increasing synth_monotone)
done

lemma lem2: "{a. a  analz (G  H)  a  synth G} = analz (G  H)  synth G" by auto

lemma analz_synth_Un: "analz (synth G  H) = analz (G  H)  synth G"
proof (intro equalityI subsetI)
  fix x
  assume "x  analz (synth G  H)" 
  thus "x  analz (G  H)  synth G"
  by (induction x rule: analz.induct)
     (auto simp add: lem2 intro: analz_monotone Fst Snd Dec Adec_eph Adec_lt Sign_getmsg 
           dest: lem1)
next 
  fix x
  assume "x  analz (G  H)  synth G" 
  thus "x  analz (synth G  H)" 
    by (blast intro: analz_monotone) 
qed


lemma analz_synth: "analz (synth H) = analz H  synth H"
by (rule analz_synth_Un [where H="{}", simplified])


lemma analz_synth_Un2 [simp]: "analz (G  synth H) = analz (G  H)  synth H"
by (subst Un_commute, auto simp add: analz_synth_Un)+


lemma synth_analz_synth [simp]: "synth (analz (synth H)) = synth (analz H)"
by (auto del:subsetI) (auto simp add: synth_subset_iff analz_synth)

lemma analz_synth_analz [simp]: "analz (synth (analz H)) = synth (analz H)"
by (auto simp add: analz_synth)

lemma synth_analz_idem [simp]: "synth (analz (synth (analz H))) = synth (analz H)"
by (simp only: analz_synth_analz) simp


lemma insert_subset_synth_analz [simp]: 
  "X  synth (analz H)  insert X H  synth (analz H)"
by auto


lemma synth_analz_insert [simp]: 
  assumes "X  synth (analz H)"
  shows "synth (analz (insert X H)) = synth (analz H)"
using assms
proof (intro equalityI subsetI)
  fix Z
  assume "Z  synth (analz (insert X H))"
  hence "Z  synth (analz (synth (analz H)))" using assms 
    by - (erule synth_analz_monotone, rule insert_subset_synth_analz)
  thus "Z  synth (analz H)" using assms by simp
qed (auto intro: synth_analz_monotone)


(****************************************************************************************)
subsection ‹Accessible message parts›
(****************************************************************************************)

text ‹Accessible message parts: all subterms that are in principle extractable
by the Dolev-Yao attacker, i.e., provided he knows all keys. Note that keys in
key positions and messages under hashes are not message parts in this sense.›

inductive_set
  parts :: "msg set => msg set"
  for H :: "msg set"
where
    Inj [intro]: "X  H  X  parts H"
  | Fst [intro]: "Pair X Y  parts H  X  parts H"
  | Snd [intro]: "Pair X Y  parts H  Y  parts H"
  | Dec [intro]: "Enc X Y  parts H  X  parts H"
  | Adec [intro]: "Aenc X Y  parts H  X  parts H"
  | Sign_getmsg [intro]: "Sign X Y  parts H  X  parts H"


text ‹Lemmas about accessible message parts.›

lemma parts_mono [mono_set]: "G  H  parts G  parts H"
by (auto, erule parts.induct, auto)

lemmas parts_monotone = parts_mono [THEN [2] rev_subsetD]


lemma Pair_parts [elim]:
  " Pair X Y  parts H;  X  parts H; Y  parts H   P   P"
by blast 


lemma parts_increasing: "H  parts H"
by blast

lemmas parts_insertI = subset_insertI [THEN parts_mono, THEN subsetD]

lemma parts_empty [simp]: "parts {} = {}"
by (safe, erule parts.induct, auto)

lemma parts_atomic [simp]: "atomic x  parts {x} = {x}"
by (auto, erule parts.induct, auto)
 
lemma parts_InsecTag [simp]: "parts {Tag t} = {Tag t}" 
by (safe, erule parts.induct) (auto)

lemma parts_emptyE [elim!]: "X  parts {}  P"
by simp

lemma parts_Tags [simp]:
  "parts Tags = Tags"
by (rule, rule, erule parts.induct, auto) 

lemma parts_singleton: "X  parts H   YH. X  parts {Y}"
by (erule parts.induct, blast+)

lemma parts_Agents [simp]:
  "parts (Agent` G) = Agent` G"
by (auto elim: parts.induct)

lemma parts_Un [simp]: "parts (G  H) = parts G  parts H"
proof 
  show "parts (G  H)  parts G  parts H"
    by (rule, erule parts.induct) (auto)
next 
  show "parts G  parts H  parts (G  H)" 
    by (intro Un_least parts_mono Un_upper1 Un_upper2)
qed 

lemma parts_insert_subset_Un: 
  assumes "X  G" 
  shows "parts (insert X H)  parts G  parts H"
proof -
  from assms have "parts (insert X H)  parts (G  H)" by (blast intro!: parts_mono) 
  thus ?thesis by simp
qed 

lemma parts_insert: "parts (insert X H) = parts {X}  parts H"
by (blast intro!: parts_insert_subset_Un intro: parts_monotone)

lemma parts_insert2:
  "parts (insert X (insert Y H)) = parts {X}  parts {Y}  parts H"
apply (simp add: Un_assoc)
apply (simp add: parts_insert [symmetric])
done


lemma parts_UN [simp]: "parts (xA. H x) = (xA. parts(H x))" (is "?X = ?Y")
proof 
  show "?X  ?Y" by (rule subsetI, erule parts.induct) (blast+)
next 
  show "?Y  ?X" by (intro UN_least parts_mono UN_upper)
qed


lemmas in_parts_UnE [elim!] = parts_Un [THEN equalityD1, THEN subsetD, THEN UnE] 


lemma parts_insert_subset: "insert X (parts H)  parts (insert X H)"
by (blast intro: parts_mono [THEN [2] rev_subsetD])


lemma parts_partsD [dest!]: "X  parts (parts H)  X  parts H"
by (erule parts.induct, blast+)

lemma parts_idem [simp]: "parts (parts H) = parts H"
by blast

lemma parts_subset_iff [simp]: "(parts G  parts H)  (G  parts H)"
by (blast dest: parts_mono)

lemma parts_trans: "X  parts G   G  parts H  X  parts H"
by (drule parts_mono, blast)


lemma parts_cut:
  "Y  parts (insert X G)   X  parts H  Y  parts (G  H)" 
by (blast intro: parts_trans)


lemma parts_cut_eq [simp]: "X  parts H  parts (insert X H) = parts H"
by (force dest!: parts_cut intro: parts_insertI)


lemmas parts_insert_eq_I = equalityI [OF subsetI parts_insert_subset]


lemma parts_insert_Agent [simp]:
  "parts (insert (Agent agt) H) = insert (Agent agt) (parts H)"
by (rule parts_insert_eq_I, erule parts.induct, auto)

lemma parts_insert_Nonce [simp]:
  "parts (insert (Nonce N) H) = insert (Nonce N) (parts H)"
by (rule parts_insert_eq_I, erule parts.induct, auto)

lemma parts_insert_Number [simp]:
  "parts (insert (Number N) H) = insert (Number N) (parts H)"
by (rule parts_insert_eq_I, erule parts.induct, auto)

lemma parts_insert_LtK [simp]:
  "parts (insert (LtK K) H) = insert (LtK K) (parts H)"
by (rule parts_insert_eq_I, erule parts.induct, auto)

lemma parts_insert_Hash [simp]:
  "parts (insert (Hash X) H) = insert (Hash X) (parts H)"
by (rule parts_insert_eq_I, erule parts.induct, auto)


lemma parts_insert_Enc [simp]:
  "parts (insert (Enc X Y) H) = insert (Enc X Y) (parts {X}  parts H)"
apply (rule equalityI)
apply (rule subsetI)
apply (erule parts.induct, auto)
done

lemma parts_insert_Aenc [simp]:
  "parts (insert (Aenc X Y) H) = insert (Aenc X Y) (parts {X}  parts H)"
apply (rule equalityI)
apply (rule subsetI)
apply (erule parts.induct, auto)
done

lemma parts_insert_Sign [simp]:
  "parts (insert (Sign X Y) H) = insert (Sign X Y) (parts {X}  parts H)"
apply (rule equalityI)
apply (rule subsetI)
apply (erule parts.induct, auto)
done

lemma parts_insert_Pair [simp]:
  "parts (insert (Pair X Y) H) = insert (Pair X Y) (parts {X}  parts {Y}  parts H)"
apply (rule equalityI)
apply (rule subsetI)
apply (erule parts.induct, auto)
done


subsubsection ‹Lemmas about combinations with composition and decomposition›
(****************************************************************************************)

lemma analz_subset_parts: "analz H  parts H"
apply (rule subsetI)
apply (erule analz.induct, blast+)
done

lemmas analz_into_parts [simp] = analz_subset_parts [THEN subsetD]

lemmas not_parts_not_analz = analz_subset_parts [THEN contra_subsetD]


lemma parts_analz [simp]: "parts (analz H) = parts H"
apply (rule equalityI)
apply (rule analz_subset_parts [THEN parts_mono, THEN subset_trans], simp)
apply (blast intro: analz_increasing [THEN parts_mono, THEN subsetD])
done

lemma analz_parts [simp]: "analz (parts H) = parts H"
apply auto
apply (erule analz.induct, auto)
done


lemma parts_synth [simp]: "parts (synth H) = parts H  synth H"
apply (rule equalityI)
apply (rule subsetI)
apply (erule parts.induct)
apply (blast intro: synth_increasing [THEN parts_mono, THEN subsetD] )+
done

lemma Fake_parts_insert:
  "X  synth (analz H)  parts (insert X H)  synth (analz H)  parts H"
apply (drule parts_insert_subset_Un)
apply (simp (no_asm_use))
apply blast
done

lemma Fake_parts_insert_in_Un:
  "Z  parts (insert X H) 
   X  synth (analz H) 
   Z   synth (analz H)  parts H"
by (blast dest: Fake_parts_insert [THEN subsetD, dest])


lemma analz_conj_parts [simp]:
  "X  analz H  X  parts H  X  analz H"
by (blast intro: analz_subset_parts [THEN subsetD])

lemma analz_disj_parts [simp]:
  "X  analz H  X  parts H  X  parts H"
by (blast intro: analz_subset_parts [THEN subsetD])


(****************************************************************************************)
subsection ‹More lemmas about combinations of closures›
(****************************************************************************************)

text ‹Combinations of @{term synth} and @{term analz}.›

lemma Pair_synth_analz [simp]:
  "Pair X Y  synth (analz H)  X  synth (analz H)  Y  synth (analz H)"
by blast

lemma Enc_synth_analz:
  "Y  synth (analz H) 
   (Enc X Y  synth (analz H))  (X  synth (analz H))"
by blast

lemma Hash_synth_analz [simp]:
  "X  synth (analz H) 
   (Hash (Pair X Y)  synth (analz H))  (Hash (Pair X Y)  analz H)"
by blast


lemma gen_analz_insert_eq:
  " X  analz G; G  H   analz (insert X H) = analz H"
by (blast intro: analz_cut analz_insertI analz_monotone)

lemma synth_analz_insert_eq:
  " X  synth (analz G); G  H   synth (analz (insert X H)) = synth (analz H)"
by (blast intro!: synth_analz_insert synth_analz_monotone)

lemma Fake_parts_sing:
  "X  synth (analz H)  parts {X}  synth (analz H)  parts H"
apply (rule subset_trans)
apply (erule_tac [2] Fake_parts_insert)
apply (rule parts_mono, blast)
done

lemmas Fake_parts_sing_imp_Un = Fake_parts_sing [THEN [2] rev_subsetD]


lemma analz_hash_nonce [simp]: 
  "analz {M. N. M = Hash (Nonce N)} = {M. N. M = Hash (Nonce N)}"
by (auto, rule analz.induct, auto)

lemma synth_analz_hash_nonce [simp]: 
  "NonceF N  synth (analz {M. N. M = Hash (Nonce N)})"
by auto

lemma synth_analz_idem_mono:
  "S  synth (analz S')  synth (analz S)  synth (analz S')"
by (frule synth_analz_mono, auto)

lemmas synth_analz_idem_monoI =
  synth_analz_idem_mono [THEN [2] rev_subsetD]



lemma analz_synth_subset:
  "analz X  synth (analz X') 
   analz Y  synth (analz Y') 
   analz (X  Y)  synth (analz (X'  Y'))"
proof -
  assume "analz X  synth (analz X')"
  then have HX:"analz X  analz (synth (analz X'))" by blast
  assume "analz Y  synth (analz Y')"
  then have HY:"analz Y  analz (synth (analz Y'))" by blast
  from analz_subset_cong [OF HX HY] 
  have "analz (X  Y)  analz (synth (analz X')  synth (analz Y'))"
    by blast
  also have "...  analz (synth (analz X'  analz Y'))"
    by (intro analz_mono synth_Un)
  also have "...  analz (synth (analz (X'  Y')))"
    by (intro synth_mono analz_mono analz_Un)
  also have "...  synth (analz (X'  Y'))"
    by auto
  finally show "analz (X  Y)  synth (analz (X'  Y'))"
    by auto
qed


lemma analz_synth_subset_Un1 :
  "analz X  synth (analz X')  analz (X  Y)  synth (analz (X'  Y))"
using analz_synth_subset by blast

lemma analz_synth_subset_Un2 :
  "analz X  synth (analz X')  analz (Y  X)  synth (analz (Y  X'))"
using analz_synth_subset by blast

lemma analz_synth_insert:
  "analz X  synth (analz X')  analz (insert Y X)  synth (analz (insert Y X'))"
by (metis analz_synth_subset_Un2 insert_def)

lemma Fake_analz_insert_Un:
  assumes "Y  analz (insert X H)" and "X  synth (analz G)" 
  shows "Y  synth (analz G)  analz (G  H)"
proof -
  from assms have "Y  analz (synth (analz G)  H)"
    by (blast intro: analz_mono [THEN [2] rev_subsetD] 
                     analz_mono [THEN synth_mono, THEN [2] rev_subsetD])
  thus ?thesis by (simp add: sup.commute) 
qed



end

Theory IK

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  IK.thy (Isabelle/HOL 2016-1)
  ID:      $Id: IK.thy 132882 2016-12-23 09:03:55Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  State with intruder knowledge and corresponding Dolev-Yao derivation event.
  - This is used a building block for Secrecy model. 
  - The attacker event is also used by level-3 models.

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Environment: Dolev-Yao Intruder›

theory IK
imports Message_derivation 
begin

text ‹Basic state contains intruder knowledge. The secrecy model and concrete Level 1 
states will be record extensions of this state.›

record ik_state =
  ik :: "msg set"


text ‹Dolev-Yao intruder event adds a derived message.›

definition
  ik_dy :: "msg  ('a ik_state_scheme * 'a ik_state_scheme) set"
where
  "ik_dy m  {(s, s').
    ― ‹guard›
    m  synth (analz (ik s)) 

    ― ‹action›
    s' = s ik := ik s  {m}
  }"

definition
  ik_trans :: "('a ik_state_scheme * 'a ik_state_scheme) set"
where
  "ik_trans  ( m.  ik_dy m)"

lemmas ik_trans_defs = ik_trans_def ik_dy_def


lemma ik_trans_ik_increasing: "(s, s')  ik_trans  ik s  ik s'"
by (auto simp add: ik_trans_defs)

lemma ik_trans_synth_analz_ik_increasing: 
  "(s, s')  ik_trans  synth (analz (ik s))  synth (analz (ik s'))"
by (simp only: synth_analz_mono ik_trans_ik_increasing)


end

Theory Secrecy

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Secrecy.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Secrecy.thy 133008 2017-01-17 13:53:14Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  A simple model of secrecy with a set of secrets and the intruder knowledge.
  Secrecy = disjointness of secret messages and intruder knowledge
  Two events: (1) secret declaration (later by protocols)
              (2) intruder learns a message

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Secrecy Model (L0)›

theory Secrecy 
imports Refinement IK
begin

declare domIff [simp, iff del] 

(**************************************************************************************************)
subsection ‹State and events›
(**************************************************************************************************)

text ‹Level 0 secrecy state: extend intruder knowledge with set of secrets.›

record s0_state = ik_state + 
  secret :: "msg set"


text ‹Definition of the secrecy invariant: DY closure of intruder knowledge and set of
secrets are disjoint.›

definition
  s0_secrecy :: "'a s0_state_scheme set"
where
  "s0_secrecy  {s. synth (analz (ik s))  secret s = {}}"

lemmas s0_secrecyI = s0_secrecy_def [THEN setc_def_to_intro, rule_format]
lemmas s0_secrecyE [elim] = s0_secrecy_def [THEN setc_def_to_elim, rule_format]


text ‹Two events: add/declare a message as a secret and learn a (non-secret) message.›

definition
  s0_add_secret :: "msg  ('a s0_state_scheme * 'a s0_state_scheme) set"
where
  "s0_add_secret m  {(s,s').
    ― ‹guard›
    m  synth (analz (ik s)) 
    
    ― ‹action›
    s' = ssecret := insert m (secret s)
  }"

definition
  s0_learn :: "msg  ('a s0_state_scheme * 'a s0_state_scheme) set"
where
  "s0_learn m  {(s,s').
    ― ‹guard›
    sik := insert m (ik s)  s0_secrecy  

    ― ‹action›
    s' = sik := insert m (ik s)
  }"

definition
  s0_learn' :: "msg  ('a s0_state_scheme * 'a s0_state_scheme) set"
where
  "s0_learn' m  {(s,s').
    ― ‹guard›
    synth (analz (insert m (ik s)))  secret s = {}  

    ― ‹action›
    s' = sik := insert m (ik s)
  }"


definition
  s0_trans :: "('a s0_state_scheme * 'a s0_state_scheme) set"
where
  "s0_trans  (m. s0_add_secret m)  (m. s0_learn m)  Id"


text ‹Initial state is any state satisfying the invariant. The whole state is
observable. Put all together to define the L0 specification.›

definition
  s0_init :: "'a s0_state_scheme set"
where
  "s0_init  s0_secrecy"

type_synonym
  s0_obs = "s0_state"

definition 
  s0 :: "(s0_state, s0_obs) spec" where
  "s0  
    init = s0_init,
    trans = s0_trans,
    obs = id
  "

lemmas s0_defs = s0_def s0_init_def s0_trans_def s0_add_secret_def s0_learn_def 
lemmas s0_all_defs = s0_defs ik_trans_defs

lemma s0_obs_id [simp]: "obs s0 = id"
by (simp add: s0_def)


(**************************************************************************************************)
subsection ‹Proof of secrecy invariant›
(**************************************************************************************************)

lemma s0_secrecy_init [iff]: "init s0  s0_secrecy"
by (simp add: s0_defs)

lemma s0_secrecy_trans [simp, intro]: "{s0_secrecy} trans s0 {> s0_secrecy}"
apply (auto simp add: s0_all_defs PO_hoare_defs intro!: s0_secrecyI)
apply (auto)
done

lemma s0_secrecy [iff]:"reach s0  s0_secrecy"
by (rule inv_rule_basic, auto)


lemma s0_obs_secrecy [iff]:"oreach s0  s0_secrecy"
by (rule external_from_internal_invariant) (auto del: subsetI)


lemma s0_anyP_observable [iff]: "observable (obs s0) P"
by (auto)


end


Theory AuthenticationN

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  AuthenticationN.thy (Isabelle/HOL 2016-1)
  ID:      $Id: AuthenticationN.thy 133008 2017-01-17 13:53:14Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Simple abstract model of non-injective agreement based on a multiset 
  of signals. Two events: running and commit.

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Non-injective Agreement (L0)›

theory AuthenticationN imports Refinement Messages
begin

declare domIff [simp, iff del]

(**************************************************************************************************)
subsection ‹Signals›
(**************************************************************************************************)

text ‹signals›
datatype signal =
  Running agent agent msg
| Commit agent agent msg

fun
  addSignal :: "(signal  nat)  signal  signal  nat"
where
  "addSignal sigs s = sigs (s := sigs s + 1)"


(**************************************************************************************************)
subsection ‹State and events›
(**************************************************************************************************)

text ‹level 0 non-injective agreement›
record a0n_state = 
  signals :: "signal  nat"    ― ‹multi-set of signals›

type_synonym 
  a0n_obs = a0n_state


text ‹Events›

definition 
  a0n_running :: "agent  agent  msg  (a0n_state × a0n_state) set"
where 
  "a0n_running A B M  {(s,s').
     ― ‹action›
     s' = ssignals := addSignal (signals s) (Running A B M)
  }"

definition 
  a0n_commit :: "agent  agent  msg  (a0n_state × a0n_state) set"
where 
  "a0n_commit A B M  {(s, s').
  ― ‹guard›
    signals s (Running A B M) > 0 
  ― ‹action›
    s' = ssignals := addSignal (signals s) (Commit A B M)
  }"


definition 
  a0n_trans :: "(a0n_state × a0n_state) set" where
  "a0n_trans  (A B M. a0n_running A B M)  (A B M. a0n_commit A B M)  Id"


text ‹Level 0 state›

definition
  a0n_init :: "a0n_state set"
where
  "a0n_init  {signals = λs. 0}"


definition 
  a0n :: "(a0n_state, a0n_obs) spec" where
  "a0n  
    init = a0n_init,
    trans = a0n_trans, 
    obs = id
  "

lemmas a0n_defs = 
  a0n_def a0n_init_def a0n_trans_def 
  a0n_running_def a0n_commit_def

lemma a0n_obs_id [simp]: "obs a0n = id"
by (simp add: a0n_def)  

lemma a0n_anyP_observable [iff]: "observable (obs a0n) P"
by (auto)  


(**************************************************************************************************)
subsection ‹Non injective agreement invariant›
(**************************************************************************************************)

text ‹Invariant: non injective agreement›

definition 
  a0n_agreement :: "a0n_state set" 
where
  "a0n_agreement  {s. A B M.
     signals s (Commit A B M) > 0  signals s (Running A B M) > 0
  }"

lemmas a0n_agreementI = a0n_agreement_def [THEN setc_def_to_intro, rule_format]
lemmas a0n_agreementE [elim] = a0n_agreement_def [THEN setc_def_to_elim, rule_format]
lemmas a0n_agreementD = a0n_agreement_def [THEN setc_def_to_dest, rule_format, rotated 2]


lemma a0n_agreement_init [iff]:
  "init a0n  a0n_agreement"
by (auto simp add: a0n_defs intro!: a0n_agreementI)

lemma a0n_agreement_trans [iff]:
  "{a0n_agreement} trans a0n {> a0n_agreement}"
by (auto simp add: PO_hoare_defs a0n_defs intro!: a0n_agreementI)

lemma a0n_agreement [iff]: "reach a0n  a0n_agreement"
by (rule inv_rule_basic) (auto)


lemma a0n_obs_agreement [iff]:
  "oreach a0n  a0n_agreement"
apply (rule external_from_internal_invariant, fast) 
apply (subst a0n_def, auto)
done


end

Theory AuthenticationI

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  AuthenticationI.thy (Isabelle/HOL 2016-1)
  ID:      $Id: AuthenticationI.thy 132844 2016-12-19 15:21:54Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Simple abstract model of injective agreement based on a multiset of signals. 
  Two events: running and commit. Refines model for non-injective agreement.

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Injective Agreement (L0)›

theory AuthenticationI
imports AuthenticationN
begin

(**************************************************************************************************)
subsection ‹State and events›
(**************************************************************************************************)

type_synonym
  a0i_state = a0n_state

type_synonym
  a0i_obs = a0n_obs

abbreviation
  a0i_init :: "a0n_state set"
where
  "a0i_init  a0n_init"

abbreviation
  a0i_running :: "agent  agent  msg  (a0i_state × a0i_state) set"
where
  "a0i_running  a0n_running"

lemmas a0i_running_def = a0n_running_def

definition 
  a0i_commit :: "agent  agent  msg  (a0i_state × a0i_state) set"
where 
  "a0i_commit A B M  {(s, s').
   ― ‹guard›
     signals s (Commit A B M) < signals s (Running A B M) 
   ― ‹actions:›
     s' = ssignals := addSignal (signals s) (Commit A B M)
  }"


definition 
  a0i_trans :: "(a0i_state × a0i_state) set" where
  "a0i_trans  ( A B M. a0i_running A B M)  ( A B M. a0i_commit A B M)  Id"


definition 
  a0i :: "(a0i_state,a0i_obs) spec" where
  "a0i  
    init = a0i_init,
    trans = a0i_trans, 
    obs = id
  "

lemmas a0i_defs = a0n_defs a0i_def a0i_trans_def a0i_commit_def

lemma a0i_obs [simp]: "obs a0i = id"
by (simp add: a0i_def)

lemma a0i_anyP_observable [iff]: "observable (obs a0i) P"
by (auto)  


(**************************************************************************************************)
subsection ‹Injective agreement invariant›
(**************************************************************************************************)

definition 
  a0i_agreement :: "a0i_state set" 
where
  "a0i_agreement  {s. A B M.
    signals s (Commit A B M)  signals s (Running A B M)
  }"

lemmas a0i_agreementI = 
  a0i_agreement_def [THEN setc_def_to_intro, rule_format]
lemmas a0i_agreementE [elim] = 
  a0i_agreement_def [THEN setc_def_to_elim, rule_format]
lemmas a0i_agreementD = 
  a0i_agreement_def [THEN setc_def_to_dest, rule_format, rotated 1]


lemma PO_a0i_agreement_init [iff]:
  "init a0i  a0i_agreement"
by (auto simp add: a0i_defs intro!: a0i_agreementI)

lemma PO_a0i_agreement_trans [iff]:
  "{a0i_agreement} trans a0i {> a0i_agreement}"
apply (auto simp add: PO_hoare_defs a0i_defs intro!: a0i_agreementI)
apply (auto dest: a0i_agreementD intro: le_SucI)
done

lemma PO_a0i_agreement [iff]: "reach a0i  a0i_agreement"
by (rule inv_rule_basic) (auto)


lemma PO_a0i_obs_agreement [iff]: "oreach a0i  a0i_agreement"
apply (rule external_from_internal_invariant, fast) 
apply (subst a0i_def, auto)
done


(**************************************************************************************************)
subsection ‹Refinement›
(**************************************************************************************************)

definition
  med0n0i :: "a0i_obs  a0i_obs"
where
  "med0n0i  id"

definition
  R0n0i :: "(a0n_state × a0i_state) set"
where
  "R0n0i  Id"

lemma PO_a0i_running_refines_a0n_running:
  "{R0n0i} a0n_running A B M, a0i_running A B M {> R0n0i}"
by (unfold R0n0i_def) (rule relhoare_refl)

lemma PO_a0i_commit_refines_a0n_commit:
  "{R0n0i} a0n_commit A B M, a0i_commit A B M {> R0n0i}"
by (auto simp add: PO_rhoare_defs R0n0i_def a0i_defs)


lemmas PO_a0i_trans_refines_a0n_trans = 
  PO_a0i_running_refines_a0n_running
  PO_a0i_commit_refines_a0n_commit


lemma PO_a0i_refines_init_a0n [iff]:
  "init a0i  R0n0i``(init a0n)"
by (auto simp add: R0n0i_def a0i_defs)

lemma PO_a0i_refines_trans_a0n [iff]:
  "{R0n0i} trans a0n, trans a0i {> R0n0i}"
by (auto simp add: a0n_def a0n_trans_def a0i_def a0i_trans_def
         intro!: PO_a0i_trans_refines_a0n_trans relhoare_abstract_UN)

lemma PO_obs_consistent [iff]:
  "obs_consistent R0n0i med0n0i a0n a0i"
by (auto simp add: obs_consistent_def R0n0i_def med0n0i_def a0i_def a0n_def)

lemma PO_a0i_refines_a0n:
  "refines R0n0i med0n0i a0n a0i"
by (rule Refinement_basic) (auto)


(**************************************************************************************************)
subsection ‹Derived invariant›
(**************************************************************************************************)

lemma iagreement_implies_niagreement [iff]: "a0i_agreement  a0n_agreement"
apply (auto intro!: a0n_agreementI)
apply (drule a0i_agreementD, drule order.strict_trans2, auto)
done


lemma PO_a0i_a0n_agreement [iff]: "reach a0i  a0n_agreement"
by (rule subset_trans, rule, rule)

lemma PO_a0i_obs_a0n_agreement [iff]: "oreach a0i  a0n_agreement"
by (rule subset_trans, rule, rule)


end

Theory Runs

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Runs.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Runs.thy 132861 2016-12-21 10:34:41Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Defines types protocol runs: a run is described by a protocol role, the owner 
  and partner agents, and frame, which maps variables to messages. 

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Runs›

theory Runs imports Messages
begin

subsection ‹Type definitions›


datatype role_t = Init | Resp 

datatype var = Var nat

type_synonym 
  rid_t = "fid_t"

type_synonym 
  frame = "var  msg" 

record run_t = 
  role :: role_t
  owner :: agent
  partner :: agent

type_synonym
  progress_t = "rid_t  var set" 


fun
  in_progress :: "var set option  var  bool"
where
  "in_progress (Some S) x = (x  S)"
 |"in_progress None x = False"

fun
  in_progressS :: "var set option  var set  bool"
where
  "in_progressS (Some S) S' = (S'  S)"
 |"in_progressS None S' = False"

lemma in_progress_dom [elim]: "in_progress (r R) x  R  dom r"
by (cases "r R", auto)

lemma in_progress_Some [elim]: "in_progress r x   x. r = Some x"
by (cases "r", auto)

lemma in_progressS_elt [elim]: "in_progressS r S  x  S  in_progress r x"
by (cases r, auto)


end

Theory Channels

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Channels.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Channels.thy 132885 2016-12-23 18:41:32Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Channel messages and related message derivations (extract and fake).

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Channel Messages›

theory Channels
imports Message_derivation
begin

(**************************************************************************************************)
subsection ‹Channel messages›
(**************************************************************************************************)

datatype chan = 
  Chan "tag" "agent" "agent" "msg"


abbreviation 
  Insec :: "[agent, agent, msg]  chan" where
  "Insec  Chan insec"

abbreviation 
  Confid :: "[agent, agent, msg]  chan" where
  "Confid  Chan confid"

abbreviation 
  Auth :: "[agent, agent, msg]  chan" where
  "Auth  Chan auth"

abbreviation 
  Secure :: "[agent, agent, msg]  chan" where
  "Secure  Chan secure"


(**************************************************************************************************)
subsection ‹Extract›
(**************************************************************************************************)

text ‹The set of payload messages that can be extracted from a set of (crypto) messages 
and a set of channel messages, given a set of bad agents. The second rule states that 
the payload can be extracted from insecure and authentic channels as well as from channels
with a compromised endpoint.›

inductive_set 
  extr :: "agent set  msg set  chan set  msg set"  
  for bad :: "agent set"
  and IK :: "msg set"
  and H :: "chan set"
where 
  extr_Inj: "M  IK  M  extr bad IK H"
| extr_Chan: 
    " Chan c A B M  H; c = insec  c = auth  A  bad  B  bad   M  extr bad IK H"

declare extr.intros [intro]
declare extr.cases [elim]


lemma extr_empty_chan [simp]: "extr bad IK {} = IK"
by (auto)

lemma IK_subset_extr: "IK  extr bad IK chan"
by (auto)

lemma extr_mono_chan [dest]: "G  H  extr bad IK G  extr bad IK H"
by (safe, erule extr.induct, auto)

lemma extr_mono_IK [dest]: "IK1  IK2  extr bad IK1 H  extr bad IK2 H"
by (safe) (erule extr.induct, auto)

lemma extr_mono_bad [dest]: "bad  bad'  extr bad IK H  extr bad' IK H"
by (safe, erule extr.induct, auto)

lemmas extr_monotone_chan [elim] = extr_mono_chan [THEN [2] rev_subsetD]
lemmas extr_monotone_IK [elim] = extr_mono_IK [THEN [2] rev_subsetD]
lemmas extr_monotone_bad [elim] = extr_mono_bad [THEN [2] rev_subsetD]

lemma extr_mono [intro]: " b  b'; I  I'; C  C'   extr b I C  extr b' I' C'"
by (force)

lemmas extr_monotone [elim] = extr_mono [THEN [2] rev_subsetD]

lemma extr_insert [intro]: "M  extr bad IK H  M  extr bad IK (insert C H)"
by (auto)

lemma extr_insert_Chan [simp]: 
  "extr bad IK (insert (Chan c A B M) H) 
   = (if c = insec  c = auth  A  bad  B  bad 
      then insert M (extr bad IK H) else extr bad IK H)"
by auto

(* do not declare [simp]! *)
lemma extr_insert_chan_eq: "extr bad IK (insert X CH) = extr bad IK {X}  extr bad IK CH"
by (auto)

lemma extr_insert_IK_eq [simp]: "extr bad (insert X IK) CH = insert X (extr bad IK CH)"
by (auto)

lemma extr_insert_bad:
  "extr (insert A bad) IK CH 
   extr bad IK CH  {M.  B. Confid A B M  CH  Confid B A M  CH 
                             Secure A B M  CH  Secure B A M  CH}"
by (rule, erule extr.induct, auto intro: tag.exhaust)

lemma extr_insert_Confid [simp]:
  "A  bad 
   B  bad  
   extr bad IK (insert (Confid A B X) CH) = extr bad IK CH"
by auto


(**************************************************************************************************)
subsection ‹Fake›
(**************************************************************************************************)

text ‹The set of channel messages that an attacker can fake given a set of compromised
agents, a set of crypto messages and a set of channel messages. The second rule states
that an attacker can fake an insecure or confidential messages or a channel message
with a compromised endpoint using a payload that he knows.›

inductive_set 
  fake :: "agent set  msg set  chan set  chan set"
  for bad :: "agent set"
  and IK :: "msg set"
  and chan :: "chan set"
where 
  fake_Inj: "M  chan  M  fake bad IK chan"
| fake_New: 
    " M  IK; c = insec  c = confid  A  bad  B  bad   
    Chan c A B M  fake bad IK chan"

declare fake.cases [elim]
declare fake.intros [intro]

lemmas fake_intros = fake_Inj fake_New

lemma fake_mono_bad [intro]: 
  "bad  bad'  fake bad IK chan  fake bad' IK chan" 
by (auto)

lemma fake_mono_ik [intro]: 
  "IK  IK'  fake bad IK chan  fake bad IK' chan" 
by (auto)

lemma fake_mono_chan [intro]: 
  "chan  chan'  fake bad IK chan  fake bad IK chan'" 
by (auto)

lemma fake_mono [intro]: 
  " bad  bad'; IK  IK'; chan  chan'  fake bad IK chan  fake bad' IK' chan'" 
by (auto, erule fake.cases, auto)

lemmas fake_monotone_bad [elim] = fake_mono_bad [THEN [2] rev_subsetD]
lemmas fake_monotone_ik [elim] = fake_mono_ik [THEN [2] rev_subsetD]
lemmas fake_monotone_chan [elim] = fake_mono_chan [THEN [2] rev_subsetD]
lemmas fake_monotone [elim] = fake_mono [THEN [2] rev_subsetD]


lemma chan_subset_fake: "chan  fake bad IK chan"
by auto

lemma extr_fake:
  "X  fake bad IK chan  extr bad IK' {X}  IK  extr bad IK' chan"
by auto

lemmas extr_fake_2 [elim] = extr_fake [THEN [2] rev_subsetD]

lemma fake_parts_extr_singleton:  
  "X  fake bad IK chan  parts (extr bad IK' {X})  parts IK  parts (extr bad IK' chan)"
by (rule extr_fake [THEN parts_mono, simplified])

lemmas fake_parts_extr_singleton_2 [elim] = fake_parts_extr_singleton [THEN [2] rev_subsetD]


lemma fake_parts_extr_insert: 
assumes "X  fake bad IK CH"
shows "parts (extr bad IK' (insert X CH))  parts (extr bad IK' CH)  parts IK"
proof -
  have "parts (extr bad IK' (insert X CH))  parts (extr bad IK' {X})  parts (extr bad IK' CH)" 
    by (auto simp: extr_insert_chan_eq [where CH=CH])
  also have "...  parts (extr bad IK' CH)  parts IK" using assms
    by (auto dest!: fake_parts_extr_singleton)
  finally show ?thesis .
qed


lemma fake_synth_analz_extr: 
assumes  "X  fake bad (synth (analz (extr bad IK CH))) CH"
shows "synth (analz (extr bad IK (insert X CH))) = synth (analz (extr bad IK CH))"
using assms
proof (intro equalityI) 
  have "synth (analz (extr bad IK (insert X CH))) 
      synth (analz (extr bad IK {X}  extr bad IK CH))"
    by - (rule synth_analz_mono, auto)
  also have "...  synth (analz (synth (analz (extr bad IK CH))  extr bad IK CH))" using assms
    by - (rule synth_analz_mono, auto)
  also have "...  synth (analz (synth (analz (extr bad IK CH))))"
    by - (rule synth_analz_mono, auto)
  also have "...  synth (analz (extr bad IK CH))" by simp
  finally show "synth (analz (extr bad IK (insert X CH)))  synth (analz (extr bad IK CH))" .
next
  have "extr bad IK CH  extr bad IK (insert X CH)"
    by auto
  then show "synth (analz (extr bad IK CH))  synth (analz (extr bad IK (insert X CH)))"
    by - (rule synth_analz_mono, auto)
qed


(**************************************************************************************************)
subsection ‹Closure of Dolev-Yao, extract and fake›
(**************************************************************************************************)

subsubsection dy_fake_msg›: returns messages, closure of DY and extr is sufficient›
(**************************************************************************************************)

text ‹Close @{term extr} under Dolev-Yao closure using @{term synth} and @{term analz}. 
This will be used in Level 2 attacker events to fake crypto messages.›

definition 
  dy_fake_msg :: "agent set  msg set  chan set  msg set"
where
  "dy_fake_msg b i c = synth (analz (extr b i c))"


lemma dy_fake_msg_empty [simp]: "dy_fake_msg bad {} {} = synth {}"
by (auto simp add: dy_fake_msg_def)

lemma dy_fake_msg_mono_bad [dest]: "bad  bad'  dy_fake_msg bad I C  dy_fake_msg bad' I C"
by (auto simp add: dy_fake_msg_def intro!: synth_analz_mono)

lemma dy_fake_msg_mono_ik [dest]: "G  H  dy_fake_msg bad G C  dy_fake_msg bad H C"
by (auto simp add: dy_fake_msg_def intro!: synth_analz_mono)

lemma dy_fake_msg_mono_chan [dest]: "G  H  dy_fake_msg bad I G  dy_fake_msg bad I H"
by (auto simp add: dy_fake_msg_def intro!: synth_analz_mono)
 
lemmas dy_fake_msg_monotone_bad [elim] = dy_fake_msg_mono_bad [THEN [2] rev_subsetD]
lemmas dy_fake_msg_monotone_ik [elim] = dy_fake_msg_mono_ik [THEN [2] rev_subsetD]
lemmas dy_fake_msg_monotone_chan [elim] = dy_fake_msg_mono_chan [THEN [2] rev_subsetD]

lemma dy_fake_msg_insert [intro]: 
  "M  dy_fake_msg bad I C  M  dy_fake_msg bad I (insert X C)"
by (auto)

lemma dy_fake_msg_mono [intro]: 
  " b  b'; I  I'; C  C'   dy_fake_msg b I C  dy_fake_msg b' I' C'"
by (force simp add: dy_fake_msg_def intro!: synth_analz_mono)

lemmas dy_fake_msg_monotone [elim] = dy_fake_msg_mono [THEN [2] rev_subsetD]

lemma dy_fake_msg_insert_chan:
  "x = insec  x = auth 
   M  dy_fake_msg bad IK (insert (Chan x A B M) CH)"
by (auto simp add: dy_fake_msg_def)


subsubsection dy_fake_chan›: returns channel messages›
(**************************************************************************************************)

text ‹The set of all channel messages that an attacker can fake is obtained using
@{term fake} with the sets of possible payload messages derived with @{term dy_fake_msg}
defined above. This will be used in Level 2 attacker events to fake channel messages.›

definition
  dy_fake_chan :: "agent set  msg set  chan set  chan set"
where
  "dy_fake_chan b i c = fake b (dy_fake_msg b i c) c"


lemma dy_fake_chan_mono_bad [intro]: 
  "bad  bad'  dy_fake_chan bad I C  dy_fake_chan bad' I C" 
by (auto simp add: dy_fake_chan_def)

lemma dy_fake_chan_mono_ik [intro]: 
  "T  T'  dy_fake_chan bad T C  dy_fake_chan bad T' C" 
by (auto simp add: dy_fake_chan_def)

lemma dy_fake_chan_mono_chan [intro]: 
  "C  C'  dy_fake_chan bad T C  dy_fake_chan bad T C'" 
by (auto simp add: dy_fake_chan_def)

lemmas dy_fake_chan_monotone_bad [elim] = dy_fake_chan_mono_bad [THEN [2] rev_subsetD]
lemmas dy_fake_chan_monotone_ik [elim] = dy_fake_chan_mono_ik [THEN [2] rev_subsetD]
lemmas dy_fake_chan_monotone_chan [elim] = dy_fake_chan_mono_chan [THEN [2] rev_subsetD]


lemma dy_fake_chan_mono [intro]: 
  assumes "b  b'" and "I  I'" and "C  C'"
  shows "dy_fake_chan b I C  dy_fake_chan b' I' C'"
proof -
  have "dy_fake_chan b I C  dy_fake_chan b' I C" using b  b' by auto
  also have "...  dy_fake_chan b' I' C" using I  I' by auto
  also have "...  dy_fake_chan b' I' C'" using C  C' by auto
  finally show ?thesis .
qed

lemmas dy_fake_chan_monotone [elim] = dy_fake_chan_mono [THEN [2] rev_subsetD]

lemma dy_fake_msg_subset_synth_analz: 
  "extr bad IK chan  T   dy_fake_msg bad IK chan  synth (analz T)"
by (auto simp add: dy_fake_msg_def synth_analz_mono)

lemma dy_fake_chan_mono2:
  " extr bad IK chan  synth (analz y); chan  fake bad (synth (analz y)) z 
  dy_fake_chan bad IK chan  fake bad (synth (analz y)) z"
apply (auto simp add: dy_fake_chan_def, erule fake.cases, auto)
apply (auto intro!: fake_New dest!: dy_fake_msg_subset_synth_analz)
done

lemma extr_subset_dy_fake_msg: "extr bad IK chan  dy_fake_msg bad IK chan"
by (auto simp add: dy_fake_msg_def)


lemma dy_fake_chan_extr_insert: 
  "M  dy_fake_chan bad IK CH  extr bad IK (insert M CH)  dy_fake_msg bad IK CH"
by (auto simp add: dy_fake_chan_def dy_fake_msg_def dest: fake_synth_analz_extr)

lemma dy_fake_chan_extr_insert_parts:
  "M  dy_fake_chan bad IK CH 
   parts (extr bad IK (insert M CH))  parts (extr bad IK CH)  dy_fake_msg bad IK CH"
by (drule dy_fake_chan_extr_insert [THEN parts_mono], auto simp add: dy_fake_msg_def)

lemma dy_fake_msg_extr: 
  "extr bad ik chan  synth (analz X)  dy_fake_msg bad ik chan  synth (analz X)"
by (drule synth_analz_mono) (auto simp add: dy_fake_msg_def)

lemma extr_insert_dy_fake_msg:
  "M  dy_fake_msg bad IK CH  extr bad (insert M IK) CH  dy_fake_msg bad IK CH"
by (auto simp add: dy_fake_msg_def)

lemma dy_fake_msg_insert_dy_fake_msg:
  "M  dy_fake_msg bad IK CH  dy_fake_msg bad (insert M IK) CH  dy_fake_msg bad IK CH"
by (drule synth_analz_mono [OF extr_insert_dy_fake_msg], auto simp add: dy_fake_msg_def)

lemma synth_analz_insert_dy_fake_msg:
  "M  dy_fake_msg bad IK CH  synth (analz (insert M IK))  dy_fake_msg bad IK CH"
by (auto dest!: dy_fake_msg_insert_dy_fake_msg, erule subsetD, 
    auto simp add: dy_fake_msg_def elim: synth_analz_monotone)

lemma Fake_insert_dy_fake_msg:
  "M  dy_fake_msg bad IK CH 
   extr bad IK CH  synth (analz X) 
   synth (analz (insert M IK))  synth (analz X)"
by (auto dest!: synth_analz_insert_dy_fake_msg dy_fake_msg_extr)

lemma dy_fake_chan_insert_chan:
  "x = insec  x = auth 
   Chan x A B M  dy_fake_chan bad IK (insert (Chan x A B M) CH)"
by (auto simp add: dy_fake_chan_def)

lemma dy_fake_chan_subset:
  "CH  fake bad (dy_fake_msg bad IK CH) CH' 
   dy_fake_chan bad IK CH  fake bad (dy_fake_msg bad IK CH) CH'"
by (auto simp add: dy_fake_chan_def)


end

Theory Payloads

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Payloads.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Payloads.thy 132885 2016-12-23 18:41:32Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Payload messages are messages that contain no implementation material,
  i.e. neither long-term keys nor channel tags; also:
  - auxiliary definitions: Keys_bad, broken, Enc_keys_clean 
  - lemmas for moving message sets out of 'analz'

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Payloads and Support for Channel Message Implementations› 

text ‹Definitions and lemmas that do not require the implementations.›

theory Payloads
imports Message_derivation
begin

subsection ‹Payload messages›
(**************************************************************************************************)

text ‹Payload messages contain no implementation material ie no long term keys or tags.›

text ‹Define set of payloads for basic messages.›
inductive_set cpayload :: "cmsg set" where
  "cAgent A  cpayload"
| "cNumber T  cpayload"
| "cNonce N  cpayload"
| "cEphK K  cpayload"
| "X  cpayload  cHash X  cpayload"
| " X  cpayload; Y  cpayload   cPair X Y  cpayload"
| " X  cpayload; Y  cpayload   cEnc X Y  cpayload"
| " X  cpayload; Y  cpayload   cAenc X Y  cpayload"
| " X  cpayload; Y  cpayload   cSign X Y  cpayload"
| " X  cpayload; Y  cpayload   cExp X Y  cpayload"

text ‹Lift @{term cpayload} to the quotiented message type.›
lift_definition payload :: "msg set" is cpayload by -

text ‹Lemmas used to prove the intro and inversion rules for @{term payload}.›
lemma eq_rep_abs: "eq x (Re (Ab x))"
by (simp add: Quotient3_msg rep_abs_rsp)


lemma eq_cpayload:
  assumes "eq x y" and "x  cpayload"
  shows "y  cpayload"
using assms by (induction x y rule: eq.induct, auto intro: cpayload.intros elim: cpayload.cases)

lemma abs_payload: "Ab x  payload  x  cpayload"
by (auto simp add: payload_def msg.abs_eq_iff eq_cpayload eq_sym cpayload.intros 
         elim: cpayload.cases)

lemma abs_cpayload_rep: "x  Ab` cpayload  Re x  cpayload"
apply (auto elim: eq_cpayload [OF eq_rep_abs])
apply (subgoal_tac "x = Ab (Re x)", auto)
using Quotient3_abs_rep Quotient3_msg by fastforce

lemma payload_rep_cpayload: "Re x  cpayload  x  payload"
by (auto simp add: payload_def abs_cpayload_rep)

text ‹Manual proof of payload introduction rules. Transfer does not work for these›

declare cpayload.intros [intro]
lemma payload_AgentI: "Agent A  payload"
  by (auto simp add: msg_defs abs_payload)
lemma payload_NonceI: "Nonce N  payload"
  by (auto simp add: msg_defs abs_payload)
lemma payload_NumberI: "Number N  payload"
  by (auto simp add: msg_defs abs_payload)
lemma payload_EphKI: "EphK X  payload"
  by (auto simp add: msg_defs abs_payload)
lemma payload_HashI: "x  payload  Hash x  payload"
  by (auto simp add: msg_defs payload_rep_cpayload abs_payload)
lemma payload_PairI: "x  payload  y  payload  Pair x y  payload"
  by (auto simp add: msg_defs payload_rep_cpayload abs_payload)
lemma payload_EncI: "x  payload  y  payload  Enc x y  payload"
  by (auto simp add: msg_defs payload_rep_cpayload abs_payload)
lemma payload_AencI: "x  payload  y  payload  Aenc x y  payload"
  by (auto simp add: msg_defs payload_rep_cpayload abs_payload)
lemma payload_SignI: "x  payload  y  payload  Sign x y  payload"
  by (auto simp add: msg_defs payload_rep_cpayload abs_payload)
lemma payload_ExpI: "x  payload  y  payload  Exp x y  payload"
by (auto simp add: msg_defs payload_rep_cpayload abs_payload)

lemmas payload_intros [simp, intro] =
  payload_AgentI payload_NonceI payload_NumberI payload_EphKI payload_HashI
  payload_PairI payload_EncI payload_AencI payload_SignI payload_ExpI

text ‹Manual proof of payload inversion rules, transfer does not work for these.›

declare cpayload.cases[elim]
lemma payload_Tag: "Tag X  payload  P"
apply (auto simp add: payload_def msg_defs msg.abs_eq_iff eq_sym)
apply (auto dest!: eq_cpayload simp add: abs_cpayload_rep)
done

lemma payload_LtK: "LtK X  payload  P"
apply (auto simp add: payload_def msg_defs msg.abs_eq_iff eq_sym)
apply (auto dest!: eq_cpayload simp add: abs_cpayload_rep)
done
lemma payload_Hash: "Hash X  payload  (X  payload  P)  P"
apply (auto simp add: payload_def msg_defs msg.abs_eq_iff eq_sym)
apply (auto dest!: eq_cpayload simp add: abs_cpayload_rep)
done
lemma payload_Pair: "Pair X Y  payload  (X  payload  Y  payload  P)  P"
apply (auto simp add: payload_def msg_defs msg.abs_eq_iff eq_sym)
apply (auto dest!: eq_cpayload simp add: abs_cpayload_rep)
done
lemma payload_Enc: "Enc X Y  payload  (X  payload  Y  payload  P)  P"
apply (auto simp add: payload_def msg_defs msg.abs_eq_iff eq_sym)
apply (auto dest!: eq_cpayload simp add: abs_cpayload_rep)
done
lemma payload_Aenc: "Aenc X Y  payload  (X  payload  Y  payload  P)  P"
apply (auto simp add: payload_def msg_defs msg.abs_eq_iff eq_sym)
apply (auto dest!: eq_cpayload simp add: abs_cpayload_rep)
done
lemma payload_Sign: "Sign X Y  payload  (X  payload  Y  payload  P)  P"
apply (auto simp add: payload_def msg_defs msg.abs_eq_iff eq_sym)
apply (auto dest!: eq_cpayload simp add: abs_cpayload_rep)
done
lemma payload_Exp: "Exp X Y  payload  (X  payload  Y  payload  P)  P"
apply (auto simp add: payload_def Exp_def msg.abs_eq_iff eq_sym)
apply (auto dest!: eq_cpayload simp add: abs_cpayload_rep)
done

declare cpayload.intros[rule del]
declare cpayload.cases[rule del]

lemmas payload_inductive_cases =
  payload_Tag payload_LtK payload_Hash
  payload_Pair payload_Enc payload_Aenc payload_Sign payload_Exp

lemma eq_exhaust:
"(x. eq y (cAgent x)  P) 
 (x. eq y (cNumber x)  P) 
 (x. eq y (cNonce x)  P) 
 (x. eq y (cLtK x)  P) 
 (x. eq y (cEphK x)  P) 
 (x x'. eq y (cPair x x')  P) 
 (x x'. eq y (cEnc x x')  P) 
 (x x'. eq y (cAenc x x')  P) 
 (x x'. eq y (cSign x x')  P) 
 (x. eq y (cHash x)  P) 
 (x. eq y (cTag x)  P) 
 (x x'. eq y (cExp x x')  P) 
 P"
apply (cases y)
apply (meson Messages.eq_refl)+
done

lemma msg_exhaust:
"(x. y = Agent x  P) 
 (x. y = Number x  P) 
 (x. y = Nonce x  P) 
 (x. y = LtK x  P) 
 (x. y = EphK x  P) 
 (x x'. y = Pair x x'  P) 
 (x x'. y = Enc x x'  P) 
 (x x'. y = Aenc x x'  P) 
 (x x'. y = Sign x x'  P) 
 (x. y = Hash x  P) 
 (x. y = Tag x  P) 
 (x x'. y = Exp x x'  P) 
 P"
apply transfer
apply (erule eq_exhaust, auto)
done

lemma payload_cases: 
"a  payload 
(A. a = Agent A  P) 
(T. a = Number T  P) 
(N. a = Nonce N  P) 
(K. a = EphK K  P) 
(X. a = Hash X  X  payload  P) 
(X Y. a = Pair X Y  X  payload  Y  payload  P) 
(X Y. a = Enc X Y  X  payload  Y  payload  P) 
(X Y. a = Aenc X Y  X  payload  Y  payload  P) 
(X Y. a = Sign X Y  X  payload  Y  payload  P) 
(X Y. a = Exp X Y  X  payload  Y  payload  P) 
 P"
by (erule msg_exhaust [of a], auto elim: payload_inductive_cases)

declare payload_cases [elim]
declare payload_inductive_cases [elim]

text ‹Properties of payload; messages constructed from payload messages are also payloads.›

lemma payload_parts [simp, dest]:
  " X  parts S; S  payload   X  payload" 
by (erule parts.induct) (auto)

lemma payload_parts_singleton [simp, dest]:
  " X  parts {Y}; Y  payload   X  payload" 
by (erule parts.induct) (auto)

lemma payload_analz [simp, dest]:
  " X  analz S; S  payload   X  payload" 
by (auto dest: analz_into_parts)

lemma payload_synth_analz: 
  " X  synth (analz S); S  payload   X  payload" 
by (erule synth.induct) (auto intro: payload_analz)

text ‹Important lemma: using messages with implementation material one can only 
synthesise more such messages.›

lemma synth_payload: 
  "Y  payload = {}  synth (X  Y)  synth X  -payload"
by (rule, erule synth.induct) (auto) 

lemma synth_payload2:
  "Y  payload = {}  synth (Y  X)  synth X  -payload"
by (rule, erule synth.induct) (auto) 

text ‹Lemma: in the case of the previous lemma, @{term synth} can be applied on the 
left with no consequence.›

lemma synth_idem_payload:
  "X  synth Y  -payload  synth X  synth Y  -payload"
by (auto dest: synth_mono subset_trans [OF _ synth_payload])


subsection isLtKey›: is a long term key›
(**************************************************************************************************)

lemma LtKeys_payload [dest]: "NI  payload  NI  range LtK = {}"
by (auto)

lemma LtKeys_parts_payload [dest]: "NI  payload  parts NI  range LtK = {}"
by (auto)

lemma LtKeys_parts_payload_singleton [elim]: "X  payload  LtK Y  parts {X}  False"
by (auto)

lemma parts_of_LtKeys [simp]: "K  range LtK  parts K = K"
by (rule, rule, erule parts.induct, auto) 


subsectionkeys_of›: the long term keys of an agent›
(**************************************************************************************************)

definition
  keys_of :: "agent  msg set"
where
  "keys_of A  insert (priK A) {shrK B C | B C. B = A  C = A}"

lemma keys_of_Ltk [intro!]: "keys_of A  range LtK"
by (auto simp add: keys_of_def)

lemma priK_keys_of [intro!]:
  "priK A  keys_of A"
by (simp add: keys_of_def)

lemma shrK_keys_of_1 [intro!]:
  "shrK A B  keys_of A"
by (simp add: keys_of_def)

lemma shrK_keys_of_2 [intro!]:
  "shrK B A  keys_of A"
by (simp add: keys_of_def)
lemma priK_keys_of_eq [dest]:
  "priK B  keys_of A  A = B"
by (simp add: keys_of_def)

lemma shrK_keys_of_eq [dest]:
  "shrK A B  keys_of C  A = C  B = C"
by (simp add: keys_of_def)

lemma def_keys_of [dest]:
  "K  keys_of A  K = priK A  ( B. K = shrK A B  K = shrK B A)"
by (auto simp add: keys_of_def)

lemma parts_keys_of [simp]: "parts (keys_of A) = keys_of A"
by (auto intro!: parts_of_LtKeys)


lemma analz_keys_of [simp]: "analz (keys_of A) = keys_of A"
by (rule, rule, erule analz.induct, auto)

subsection Keys_bad›: bounds on the attacker's knowledge of long-term keys.›
(**************************************************************************************************)

text ‹A set of keys contains all public long term keys, and only the private/shared keys 
of bad agents.›

definition
  Keys_bad :: "msg set  agent set  bool"
where
  "Keys_bad IK Bad     
    IK  range LtK  range pubK   (keys_of ` Bad)
     range pubK  IK"

― ‹basic lemmas›

lemma Keys_badI:
  " IK  range LtK  range pubK  priK`Bad  {shrK A B | A B. A  Bad  B  Bad}; 
     range pubK  IK  
  Keys_bad IK Bad"
by (auto simp add: Keys_bad_def)

lemma Keys_badE [elim]: 
  " Keys_bad IK Bad;
      range pubK  IK; 
       IK  range LtK  range pubK   (keys_of ` Bad)
    P  
  P"
by (auto simp add: Keys_bad_def)

lemma Keys_bad_Ltk [simp]:
  "Keys_bad (IK  range LtK) Bad  Keys_bad IK Bad"
by (auto simp add: Keys_bad_def)


lemma Keys_bad_priK_D: " priK A  IK; Keys_bad IK Bad   A  Bad"
by (auto simp add: Keys_bad_def)

lemma Keys_bad_shrK_D: " shrK A B  IK; Keys_bad IK Bad   A  Bad  B  Bad"
by (auto simp add: Keys_bad_def)

lemmas Keys_bad_dests [dest] = Keys_bad_priK_D Keys_bad_shrK_D


text ‹interaction with @{term insert}.›

lemma Keys_bad_insert_non_LtK: 
  "X  range LtK  Keys_bad (insert X IK) Bad  Keys_bad IK Bad"
by (auto simp add: Keys_bad_def)

lemma Keys_bad_insert_pubK: 
  " Keys_bad IK Bad   Keys_bad (insert (pubK A) IK) Bad"
by (auto simp add: Keys_bad_def)

lemma Keys_bad_insert_priK_bad: 
  " Keys_bad IK Bad; A  Bad   Keys_bad (insert (priK A) IK) Bad"
by (auto simp add: Keys_bad_def)

lemma Keys_bad_insert_shrK_bad: 
  " Keys_bad IK Bad; A  Bad  B  Bad   Keys_bad (insert (shrK A B) IK) Bad"
by (auto simp add: Keys_bad_def)

lemmas Keys_bad_insert_lemmas [simp] = 
  Keys_bad_insert_non_LtK Keys_bad_insert_pubK 
  Keys_bad_insert_priK_bad Keys_bad_insert_shrK_bad


lemma Keys_bad_insert_Fake:
assumes "Keys_bad IK Bad" 
    and "parts IK  range LtK  IK"
    and "X  synth (analz IK)"
shows "Keys_bad (insert X IK) Bad"
proof cases
  assume "X  range LtK"
  then obtain ltk where "X = LtK ltk" by blast
  thus ?thesis using assms
    by (auto simp add: insert_absorb dest: analz_into_parts)
next 
  assume "X  range LtK"
  thus ?thesis using assms(1) by simp
qed


lemma Keys_bad_insert_keys_of:
  "Keys_bad Ik Bad 
   Keys_bad (keys_of A  Ik) (insert A Bad)"
by (auto simp add: Keys_bad_def)

lemma Keys_bad_insert_payload:
  "Keys_bad Ik Bad 
   x  payload 
   Keys_bad (insert x Ik) Bad"
by (auto simp add: Keys_bad_def)


subsection broken K›: pairs of agents where at least one is compromised.›
(**************************************************************************************************)

text ‹Set of pairs (A,B) such that the priK of A or B, or their shared key, is in K›

definition
  broken :: "msg set  (agent * agent) set"
where
  "broken K  {(A,B) | A B. priK A  K  priK B  K  shrK A B  K  shrK B A  K}"

lemma brokenD [dest!]:
  "(A, B)  broken K  priK A  K  priK B  K  shrK A B  K  shrK B A  K"
by (simp add: broken_def)

lemma brokenI [intro!]:
  "priK A  K  priK B  K  shrK A B  K  shrK B A  K  (A, B)  broken K"
by (auto simp add: broken_def)


subsection Enc_keys_clean S›: messages with ``clean'' symmetric encryptions.›
(**************************************************************************************************)

text ‹All terms used as symmetric keys in S are either long term keys or messages without 
implementation material.›

definition
  Enc_keys_clean :: "msg set  bool"
where
  "Enc_keys_clean S  X Y. Enc X Y  parts S  Y  range LtK  payload"

lemma Enc_keys_cleanI:
  "X Y. Enc X Y  parts S  Y  range LtK  payload  Enc_keys_clean S"
by (simp add: Enc_keys_clean_def)

― ‹general lemmas about Enc_keys_clean›
lemma Enc_keys_clean_mono: 
  "Enc_keys_clean H  G  H  Enc_keys_clean G"  ― ‹anti-tone›
by (auto simp add: Enc_keys_clean_def dest!: parts_monotone [where G=G])

lemma Enc_keys_clean_Un [simp]: 
  "Enc_keys_clean (G  H)  Enc_keys_clean G  Enc_keys_clean H" 
by (auto simp add: Enc_keys_clean_def)


― ‹from Enc_keys_clean S›, the property on parts S› also holds for analz S›
lemma Enc_keys_clean_analz:
  "Enc X K  analz S  Enc_keys_clean S  K  range LtK  payload"
by (auto simp add: Enc_keys_clean_def dest: analz_into_parts)


― ‹Enc_keys_clean› and different types of messages›
lemma Enc_keys_clean_Tags [simp,intro]: "Enc_keys_clean Tags"
by (auto simp add: Enc_keys_clean_def)

lemma Enc_keys_clean_LtKeys [simp,intro]: "K  range LtK  Enc_keys_clean K"
by (auto simp add: Enc_keys_clean_def)

lemma Enc_keys_clean_payload [simp,intro]: "NI  payload  Enc_keys_clean NI"
by (auto simp add: Enc_keys_clean_def)


subsection ‹Sets of messages with particular constructors›
(**************************************************************************************************)

text ‹Sets of all pairs, ciphertexts, and signatures constructed from a set of messages.›
(*
 FIX: These should probably be turned into definitions, since they may create automation problems 
*)
abbreviation AgentSet :: "msg set"
where "AgentSet  range Agent"

abbreviation PairSet :: "msg set  msg set  msg set"
where "PairSet G H  {Pair X Y | X Y. X  G  Y  H}"

abbreviation EncSet :: "msg set  msg set  msg set"
where "EncSet G K  {Enc X Y | X Y. X  G  Y  K}"

abbreviation AencSet :: "msg set  msg set  msg set"
where "AencSet G K  {Aenc X Y | X Y. X  G  Y  K}"

abbreviation SignSet :: "msg set  msg set  msg set"
where "SignSet G K  {Sign X Y | X Y. X  G  Y  K}"

abbreviation HashSet :: "msg set  msg set"
where "HashSet G  {Hash X | X. X  G}"


text ‹Move @{term Enc}, @{term Aenc}, @{term Sign}, and @{term Pair} sets out of @{term parts}. 
›

lemma parts_PairSet:
  "parts (PairSet G H)  PairSet G H  parts G  parts H"
by (rule, erule parts.induct, auto)

lemma parts_EncSet:
  "parts (EncSet G K)  EncSet G K  PairSet (range Agent) G   range Agent   parts G"
by (rule, erule parts.induct, auto)

lemma parts_AencSet:
  "parts (AencSet G K)  AencSet G K  PairSet (range Agent) G   range Agent   parts G"
by (rule, erule parts.induct, auto)

lemma parts_SignSet:
  "parts (SignSet G K)  SignSet G K  PairSet (range Agent) G   range Agent   parts G"
by (rule, erule parts.induct, auto)

lemma parts_HashSet:
  "parts (HashSet G)  HashSet G"
by (rule, erule parts.induct, auto)

lemmas parts_msgSet = parts_PairSet parts_EncSet parts_AencSet parts_SignSet parts_HashSet
lemmas parts_msgSetD = parts_msgSet [THEN [2] rev_subsetD]

text ‹
Remove the message sets from under the @{term "Enc_keys_clean"} predicate.
Only when the first part is a set of agents or tags for @{term Pair}, this is sufficient.›

lemma Enc_keys_clean_PairSet_Agent_Un: 
  "Enc_keys_clean (G  H)  Enc_keys_clean (PairSet (Agent`X) G  H)"
by (auto simp add: Enc_keys_clean_def dest!: parts_msgSetD)

lemma Enc_keys_clean_PairSet_Tag_Un: 
  "Enc_keys_clean (G  H)  Enc_keys_clean (PairSet Tags G  H)"
by (auto simp add: Enc_keys_clean_def dest!: parts_msgSetD)

lemma Enc_keys_clean_AencSet_Un: 
  "Enc_keys_clean (G  H)  Enc_keys_clean (AencSet G K  H)"
by (auto simp add: Enc_keys_clean_def dest!: parts_msgSetD)

lemma Enc_keys_clean_EncSet_Un: 
  "K  range LtK  Enc_keys_clean (G  H)  Enc_keys_clean (EncSet G K  H)"
by (auto simp add: Enc_keys_clean_def dest!: parts_msgSetD)

lemma Enc_keys_clean_SignSet_Un: 
  "Enc_keys_clean (G  H)  Enc_keys_clean (SignSet G K  H)"
by (auto simp add: Enc_keys_clean_def dest!: parts_msgSetD)

lemma Enc_keys_clean_HashSet_Un: 
  "Enc_keys_clean (G  H)  Enc_keys_clean (HashSet G  H)"
by (auto simp add: Enc_keys_clean_def dest!: parts_msgSetD)

lemmas Enc_keys_clean_msgSet_Un =
  Enc_keys_clean_PairSet_Tag_Un Enc_keys_clean_PairSet_Agent_Un
  Enc_keys_clean_EncSet_Un Enc_keys_clean_AencSet_Un
  Enc_keys_clean_SignSet_Un Enc_keys_clean_HashSet_Un


subsubsection ‹Lemmas for moving message sets out of @{term "analz"}
(**************************************************************************************************)

text ‹Pull @{term EncSet} out of @{term analz}.›

lemma analz_Un_EncSet:
assumes "K  range LtK" and "Enc_keys_clean (G  H)" 
shows "analz (EncSet G K  H)  EncSet G K  analz (G  H)"
proof 
  fix X
  assume "X  analz (EncSet G K  H)"
  thus "X  EncSet G K  analz (G  H)"
  proof (induction X rule: analz.induct)
    case (Dec Y K')     
    from Dec.IH(1) show ?case
    proof
      assume "Enc Y K'  analz (G  H)"
      have "K'  synth (analz (G  H))"
      proof -
        have "K'  range LtK  payload" using ‹Enc Y K'  analz (G  H) assms(2)
          by (blast dest: Enc_keys_clean_analz)
        moreover
        have "K'  synth (EncSet G K  analz (G  H))" using Dec.IH(2)
          by (auto simp add: Collect_disj_eq dest: synth_Int2)
        moreover
        hence "K'  synth (analz (G  H))  -payload" using assms(1) 
          by (blast dest!: synth_payload2 [THEN [2] rev_subsetD])
        ultimately show ?thesis by auto
      qed  
      thus ?case using ‹Enc Y K'  analz (G  H) by auto
    qed auto
  next
    case (Adec_eph Y K')  
    thus ?case by (auto dest!: EpriK_synth)
  qed (auto)
qed 

text ‹Pull @{term EncSet} out of @{term analz}, 2nd case: the keys are unknown.›

lemma analz_Un_EncSet2:
assumes "Enc_keys_clean H" and "K  range LtK" and "K  synth (analz H) = {}"
shows "analz (EncSet G K  H)  EncSet G K  analz H"
proof 
  fix X
  assume "X  analz (EncSet G K  H)"
  thus "X  EncSet G K  analz H"
  proof (induction X rule: analz.induct)
    case (Dec Y K')
    from Dec.IH(1) show ?case 
    proof
      assume "Enc Y K'  analz H"
      moreover have "K'  synth (analz H)"
      proof -
          have "K'  range LtK  payload" using ‹Enc Y K'  analz H assms(1)
            by (auto dest: Enc_keys_clean_analz)
          moreover
          from Dec.IH(2) have H: "K'  synth (EncSet G K  analz H)" 
            by (auto simp add: Collect_disj_eq dest: synth_Int2)
          moreover
          hence "K'  synth (analz H)  -payload" 
          proof (rule synth_payload2 [THEN [2] rev_subsetD], auto elim!: payload_Enc)
            fix X Y
            assume "Y  K" "Y  payload"
            with K  range LtK› obtain KK where "Y = LtK KK" by auto
            with Y  payload› show False by auto
          qed
        ultimately 
        show ?thesis by auto
      qed
      ultimately show ?case by auto
    next
      assume "Enc Y K'  EncSet G K"
      moreover hence "K'  K" by auto
      moreover with K  range LtK› obtain KK where "K' = LtK KK" by auto
      moreover with Dec.IH(2) have "K'  analz H" 
        by (auto simp add: Collect_disj_eq dest: synth_Int2)
      ultimately show ?case using K  synth (analz H) = {} by auto
    qed
  next 
    case (Adec_eph Y K') 
    thus ?case by (auto dest!: EpriK_synth)
  qed (insert assms(2), auto)
qed


text ‹Pull @{term AencSet} out of the @{term analz}.›

lemma analz_Un_AencSet:
assumes "K  range LtK" and "Enc_keys_clean (G  H)" 
shows "analz (AencSet G K  H)  AencSet G K  analz (G  H)"
proof 
  fix X
  assume "X  analz (AencSet G K  H)"
  thus "X  AencSet G K  analz (G  H)"
  proof (induction X rule: analz.induct)
    case (Dec Y K') 
    from Dec.IH(1) have "Enc Y K'  analz (G  H)" by auto
    moreover have "K'  synth (analz (G  H))" 
    proof -
      have "K'  range LtK  payload" using ‹Enc Y K'  analz (G  H) assms(2) 
        by (blast dest: Enc_keys_clean_analz)
      moreover
      have "K'  synth (AencSet G K  analz (G  H))" using Dec.IH(2)
        by (auto simp add: Collect_disj_eq dest: synth_Int2)
      moreover
      hence "K'  synth (analz (G  H))  -payload" using assms(1) 
        by (blast dest!: synth_payload2 [THEN [2] rev_subsetD])
      ultimately 
      show ?thesis by auto 
    qed
    ultimately show ?case by auto
  next 
    case (Adec_eph Y K') 
    thus ?case by (auto dest!: EpriK_synth)
  qed auto
qed

text ‹Pull @{term AencSet} out of @{term analz}, 2nd case: the keys are unknown.›

lemma analz_Un_AencSet2:
assumes "Enc_keys_clean H" and "priK`Ag  synth (analz H) = {}"
shows "analz (AencSet G (pubK`Ag)  H)  AencSet G (pubK`Ag)  analz H"
proof 
  fix X
  assume "X  analz (AencSet G (pubK` Ag)  H)"
  thus "X  AencSet G (pubK` Ag)  analz H"
  proof (induction X rule: analz.induct)
    case (Dec Y K') 
    from Dec.IH(1) have "Enc Y K'  analz H" by auto
    moreover have "K'  synth (analz H)"
    proof -
      have "K'  range LtK  payload" using ‹Enc Y K'  analz H assms(1)
      by (auto dest: Enc_keys_clean_analz)
      moreover
      from Dec.IH(2) have H: "K'  synth (AencSet G (pubK`Ag)  analz H)" 
        by (auto simp add: Collect_disj_eq dest: synth_Int2)
      moreover
      hence "K'  synth (analz H)  -payload" 
        by (auto dest: synth_payload2 [THEN [2] rev_subsetD])
      ultimately 
      show ?thesis by auto
    qed
    ultimately show ?case by auto
  next 
    case (Adec_eph Y K') 
    thus ?case by (auto dest!: EpriK_synth)
  qed (insert assms(2), auto)
qed

text ‹Pull @{term PairSet} out of @{term analz}.›
lemma analz_Un_PairSet:
  "analz (PairSet G G'  H)  PairSet G G'  analz (G  G'  H)"
proof 
  fix X
  assume "X  analz (PairSet G G'  H)"
  thus "X  PairSet G G'  analz (G  G'  H)"
  proof (induct X rule: analz.induct)
    case (Dec Y K) 
    from Dec.hyps(2) have "Enc Y K  analz (G  G'  H)" by auto
    moreover
    from Dec.hyps(3) have "K  synth (PairSet G G'  analz (G  G'  H))"
      by (auto simp add: Collect_disj_eq dest: synth_Int2)
    then have "K  synth (analz (G  G'  H))"
      by (elim synth_trans) auto
    ultimately
    show ?case by auto
  next 
    case (Adec_eph Y K) 
    thus ?case by (auto dest!: EpriK_synth)
  qed auto
qed

― ‹move the SignSet› out of the analz›
lemma analz_Un_SignSet:
assumes "K  range LtK" and "Enc_keys_clean (G  H)"
shows "analz (SignSet G K  H)  SignSet G K  analz (G  H)"
proof 
  fix X
  assume "X  analz (SignSet G K  H)"
  thus "X  SignSet G K  analz (G  H)"
  proof (induct X rule: analz.induct)
    case (Dec Y K') 
    from Dec.hyps(2) have "Enc Y K'  analz (G  H)" by auto
    moreover have "K'  synth (analz (G  H))"
    proof -
      have "K'  range LtK  payload" using ‹Enc Y K'  analz (G  H) assms(2) 
        by (blast dest: Enc_keys_clean_analz)
      moreover
      from Dec.hyps(3) have "K'  synth (SignSet G K  analz (G  H)) "
        by (auto simp add: Collect_disj_eq dest: synth_Int2)
      moreover
      hence "K'  synth (analz (G  H))  -payload" using assms(1) 
        by (blast dest!: synth_payload2 [THEN [2] rev_subsetD])
      ultimately 
      show ?thesis by auto
    qed
    ultimately show ?case by auto
  next 
    case (Adec_eph Y K) 
    thus ?case by (auto dest!: EpriK_synth)
  qed auto
qed

text ‹Pull @{term Tags} out of @{term analz}.›

lemma analz_Un_Tag:
assumes "Enc_keys_clean H"
shows "analz (Tags  H)  Tags  analz H"
proof 
  fix X
  assume "X  analz (Tags  H)"
  thus "X  Tags  analz H"
  proof (induction X rule: analz.induct)
    case (Dec Y K') 
    have "Enc Y K'  analz H" using Dec.IH(1) by auto
    moreover have "K'  synth (analz H)" 
    proof -
      have "K'  range LtK  payload" using ‹Enc Y K'  analz H assms 
        by (auto dest: Enc_keys_clean_analz)
      moreover
      from Dec.IH(2) have "K'  synth (Tags  analz H)" 
        by (auto simp add: Collect_disj_eq dest: synth_Int2)
      moreover
      hence "K'  synth (analz H)  -payload" 
        by (auto dest: synth_payload2 [THEN [2] rev_subsetD]) 
      ultimately show ?thesis by auto
    qed
    ultimately show ?case by (auto)
  next
    case (Adec_eph Y K') 
    thus ?case by (auto dest!: EpriK_synth)
  qed auto
qed 

text ‹Pull the @{term AgentSet} out of the @{term analz}.›

lemma analz_Un_AgentSet:
shows "analz (AgentSet  H)  AgentSet  analz H"
proof 
  fix X
  assume "X  analz (AgentSet  H)"
  thus "X  AgentSet  analz H"
  proof (induction X rule: analz.induct)
    case (Dec Y K') 
    from Dec.IH(1) have "Enc Y K'  analz H" by auto
    moreover have "K'  synth (analz H)" 
      proof -
        from Dec.IH(2) have "K'  synth (AgentSet  analz H)" 
          by (auto simp add: Collect_disj_eq dest: synth_Int2)
        moreover have "synth (AgentSet  analz H)  synth (analz H)" 
          by (auto simp add: synth_subset_iff)
        ultimately show ?thesis by auto
    qed
    ultimately show ?case by (auto)
  next
    case (Adec_eph Y K') 
    thus ?case by (auto dest!: EpriK_synth)
  qed auto
qed 

text ‹Pull @{term HashSet} out of @{term analz}.›
lemma analz_Un_HashSet:
assumes "Enc_keys_clean H" and "G  - payload" 
shows "analz (HashSet G   H)  HashSet G  analz H"
proof 
  fix X
  assume "X  analz (HashSet G  H)"
  thus "X  HashSet G  analz H"
  proof (induction X rule: analz.induct)
    case (Dec Y K')
    from Dec.IH(1) have "Enc Y K'  analz H" by auto
    moreover have "K'  synth (analz H)"
    proof -
      have "K'  range LtK  payload" using ‹Enc Y K'  analz H assms(1)
      by (auto dest: Enc_keys_clean_analz)
      thus ?thesis
      proof
        assume "K'  range LtK"
        then obtain KK where "K' = LtK KK" by auto
        moreover
        with Dec.IH(2) show ?thesis
          by (auto simp add: Collect_disj_eq dest: synth_Int2)
      next
        assume "K'  payload"  
        moreover
        from assms have "HashSet G  payload = {}" by auto
        moreover from Dec.IH(2) have "K'  synth (HashSet G  analz H)" 
          by (auto simp add: Collect_disj_eq dest: synth_Int2)
        ultimately
        have "K'  synth (analz H)  -payload" 
          by (auto dest: synth_payload2 [THEN [2] rev_subsetD])
        with K'  payload› show ?thesis by auto 
      qed
    qed
    ultimately show ?case by auto
  next 
    case (Adec_eph Y K') 
    thus ?case by (auto dest!: EpriK_synth)
  qed (insert assms(2), auto)
qed

end

Theory Implem

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Implem.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Implem.thy 132885 2016-12-23 18:41:32Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Parametric channel message implementation
  - definition of 'valid' implementations
  - assumptions for channel implementations (formulated as locales)

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Assumptions for Channel Message Implementation›

text ‹We define a series of locales capturing our assumptions on channel message 
implementations.›

theory Implem
imports Channels Payloads
begin

subsection ‹First step: basic implementation locale›
(**************************************************************************************************)

text ‹This locale has no assumptions, it only fixes an implementation function and 
defines some useful abbreviations (impl*, impl*Set) and valid›.
›

locale basic_implem =
  fixes implem :: "chan  msg"
begin

abbreviation "implInsec A B M  implem (Insec A B M)"
abbreviation "implConfid A B M  implem (Confid A B M)"
abbreviation "implAuth A B M  implem (Auth A B M)"
abbreviation "implSecure A B M  implem (Secure A B M)"

abbreviation implInsecSet :: "msg set  msg set"
where "implInsecSet G  {implInsec A B M | A B M. M  G}"

abbreviation implConfidSet :: "(agent * agent) set  msg set  msg set"
where "implConfidSet Ag G  {implConfid A B M | A B M.  (A, B)  Ag  M  G}"

abbreviation implAuthSet :: "msg set  msg set"
where "implAuthSet G  {implAuth A B M | A B M. M  G}"

abbreviation implSecureSet :: "(agent * agent) set  msg set  msg set"
where "implSecureSet Ag G  {implSecure A B M | A B M. (A, B)  Ag  M  G}"

definition
  valid :: "msg set"
where
  "valid  {implem (Chan x A B M) | x A B M. M  payload}"

lemma validI:
  "M  payload  implem (Chan x A B M)  valid"
by (auto simp add: valid_def)

lemma validE:
  "X  valid  ( x A B M. X = implem (Chan x A B M)  M  payload  P)  P"
by (auto simp add: valid_def)

lemma valid_cases:
fixes X P
assumes "X  valid"
        "(A B M. X = implInsec A B M  M  payload  P)"
        "(A B M. X = implConfid A B M  M  payload  P)"
        "(A B M. X = implAuth A B M  M  payload  P)"
        "(A B M. X = implSecure A B M  M  payload  P)"
shows "P"
proof -
  from assms have "( x A B M. X = implem (Chan x A B M)  M  payload  P)  P"
  by (auto elim: validE)
  moreover from assms have " x A B M. X = implem (Chan x A B M)  M  payload  P"
    proof -
      fix x A B M
      assume "X = implem (Chan x A B M)" "M  payload"
      with assms show "P" by (cases x, auto)
    qed
  ultimately show ?thesis .
qed

end

subsection ‹Second step: basic and analyze assumptions›
(**************************************************************************************************)

text ‹This locale contains most of the assumptions on implem, i.e.:
\begin{itemize}
\item impl_inj›: injectivity
\item parts_impl_inj›: injectivity through parts
\item Enc_parts_valid_impl›: if Enc X Y appears in parts of an implem, then it is 
  in parts of the payload, or the key is either long term or payload
\item impl_composed›: the implementations are composed (not nonces, agents, tags etc.)
\item analz_Un_implXXXSet›: move the impl*Set out of the analz (only keep the payloads)
\item impl_Impl›: implementations contain implementation material
\item LtK_parts_impl›: no exposed long term keys in the implementations 
  (i.e., they are only used as keys, or under hashes)
\end{itemize}
›

locale semivalid_implem = basic_implem +
― ‹injectivity›
assumes impl_inj:
  "implem (Chan x A B M) = implem (Chan x' A' B' M') 
    x = x'  A = A'  B = B'  M = M'"
― ‹implementations and parts›
and parts_impl_inj:
  "M'  payload 
   implem (Chan x A B M)  parts {implem (Chan x' A' B' M')}  
   x = x'  A = A'  B = B'  M = M'"
and Enc_keys_clean_valid: "I  valid  Enc_keys_clean I"
and impl_composed: "composed (implem Z)"
and impl_Impl: "implem (Chan x A B M)  payload"
― ‹no ltk in the parts of an implementation›
and LtK_parts_impl: "X  valid  LtK K  parts {X}"

― ‹analyze assumptions:›
and analz_Un_implInsecSet:
  " G  payload; Enc_keys_clean H  
  analz (implInsecSet G  H)  synth (analz (G  H))  -payload"
and analz_Un_implConfidSet:
  " G  payload; Enc_keys_clean H  
  analz (implConfidSet Ag G  H)  synth (analz (G  H))  -payload"
and analz_Un_implConfidSet_2:
  " G  payload; Enc_keys_clean H; Ag  broken (parts H  range LtK) = {} 
  analz (implConfidSet Ag G  H)  synth (analz H)  -payload"
and analz_Un_implAuthSet:
  " G  payload; Enc_keys_clean H 
  analz (implAuthSet G  H)  synth (analz (G  H))  -payload"
and analz_Un_implSecureSet:
  " G  payload; Enc_keys_clean H 
  analz (implSecureSet Ag G  H)  synth (analz (G  H))  -payload"
and analz_Un_implSecureSet_2:
  " G  payload; Enc_keys_clean H; Ag  broken (parts H  range LtK) = {} 
  analz (implSecureSet Ag G  H)  synth (analz H)  -payload"

begin
― ‹declare some attributes and abbreviations for the hypotheses›
― ‹and prove some simple consequences of the hypotheses›
declare impl_inj [simp]

lemmas parts_implE [elim] = parts_impl_inj [rotated 1]

declare impl_composed [simp, intro]

lemma composed_arg_cong: "X = Y  composed X  composed Y"
by (rule arg_cong)

lemma implem_Tags_aux: "implem (Chan x A B M)  Tags" by (cases x, auto dest: composed_arg_cong)
lemma implem_Tags [simp]: "implem x  Tags" by (cases x, auto simp add: implem_Tags_aux)
lemma implem_LtK_aux: "implem (Chan x A B M)  LtK K" by (cases x, auto dest: composed_arg_cong)
lemma implem_LtK [simp]: "implem x  LtK K" by (cases x, auto simp add: implem_LtK_aux)
lemma implem_LtK2 [simp]: "implem x  range LtK" by (cases x, auto simp add: implem_LtK_aux)

declare impl_Impl [simp]

lemma LtK_parts_impl_insert:
  "LtK K  parts (insert (implem (Chan x A B M)) S)  M  payload  LtK K  parts S"
apply (simp add: parts_insert [of _ S], clarify)
apply (auto dest: validI LtK_parts_impl)
done


declare LtK_parts_impl_insert [dest]
declare Enc_keys_clean_valid [simp, intro]

lemma valid_composed [simp,dest]: "M  valid  composed M"
by (auto elim: validE)

― ‹lemmas: valid/payload are mutually exclusive›
lemma valid_payload [dest]: " X  valid; X  payload   P"
by (auto elim!: validE)
    
― ‹valid/LtK are mutually exclusive›
lemma valid_isLtKey [dest]: " X  valid; X  range LtK   P"
by (auto)

lemma analz_valid:
  "H  payload  valid  range LtK  Tags 
   implem (Chan x A B M)  analz H 
   implem (Chan x A B M)  H"
apply (drule analz_into_parts, 
       drule parts_monotone [of _ H "payload  H  valid  range LtK  Tags"], auto)
apply (drule parts_singleton, auto elim!:validE dest: parts_impl_inj)
done

lemma parts_valid_LtKeys_disjoint:
  "I  valid  parts I  range LtK = {}"
apply (safe, drule parts_singleton, clarsimp)
apply (auto dest: subsetD LtK_parts_impl)
done

lemma analz_LtKeys:
  "H  payload  valid  range LtK  Tags 
   analz H  range LtK  H"
apply auto
apply (drule analz_into_parts, drule parts_monotone [of _ H "payload  valid  H  range LtK  Tags"], auto)
apply (drule parts_singleton, auto elim!:validE dest: parts_impl_inj)
done

end


subsection ‹Third step: valid_implem›
(**************************************************************************************************)

text ‹This extends @{locale "semivalid_implem"} with four new assumptions, which under certain 
  conditions give information on $A$, $B$, $M$ when @{term "implXXX A B M  synth (analz Z)"}.
  These assumptions are separated because interpretations are more easily proved, if the 
  conclusions that follow from the @{locale "semivalid_implem"} assumptions are already 
  available.
›

locale valid_implem = semivalid_implem +

― ‹Synthesize assumptions: conditions on payloads $M$ implied by derivable›
― ‹channel messages with payload $M$.›
assumes implInsec_synth_analz:
  "H  payload  valid  range LtK  Tags 
   implInsec A B M  synth (analz H) 
   implInsec A B M  H  M  synth (analz H)"
and implConfid_synth_analz:
  "H  payload  valid  range LtK  Tags 
   implConfid A B M  synth (analz H) 
   implConfid A B M  H  M  synth (analz H)"
and implAuth_synth_analz:
  "H  payload  valid  range LtK  Tags 
   implAuth A B M  synth (analz H) 
   implAuth A B M  H  (M  synth (analz H)  (A, B)  broken H)"
and implSecure_synth_analz:
  "H  payload  valid  range LtK  Tags 
   implSecure A B M  synth (analz H) 
   implSecure A B M  H  (M  synth (analz H)  (A, B)  broken H)"

end

Theory Implem_lemmas

(*******************************************************************************

  Project: Refining Authenticated Key Agreement with Strong Adversaries

  Module:  Implem_lemmas.thy (Isabelle/HOL 2016-1)
  ID:      $Id: Implem_lemmas.thy 132885 2016-12-23 18:41:32Z csprenge $
  Author:  Joseph Lallemand, INRIA Nancy <joseph.lallemand@loria.fr>
           Christoph Sprenger, ETH Zurich <sprenger@inf.ethz.ch>
  
  Auxiliary notions and lemmas for channel implementations.
  - message abstraction = inverse of (valid) impl, lifted to sets
  - extractable messages (those of non-confidential and "broken" channels)
  - lemmas for proving L2-L3 intruder refinement

  Copyright (c) 2015-2016 Joseph Lallemand and Christoph Sprenger
  Licence: LGPL

*******************************************************************************)

section ‹Lemmas Following from Channel Message Implementation Assumptions›

theory Implem_lemmas
imports Implem
begin

text ‹These lemmas require the assumptions added in the @{locale "valid_implem"} locale.›

context semivalid_implem
begin
(**************************************************************************************************)
subsection ‹Message implementations and abstractions›
(**************************************************************************************************)

text ‹Abstracting a set of messages into channel messages.›

definition
  abs :: "msg set  chan set"
where
  "abs S  {Chan x A B M | x A B M. M  payload  implem (Chan x A B M)  S}"

lemma absE [elim]: 
  " X  abs H;
      x A B M. X = Chan x A B M  M  payload  implem X  H  P 
   P"
by (auto simp add: abs_def)

lemma absI [intro]: "M  payload  implem (Chan x A B M)  H  Chan x A B M  abs H"
by (auto simp add: abs_def)

lemma abs_mono: "G  H  abs G  abs H"
by (auto simp add: abs_def)

lemmas abs_monotone [simp] = abs_mono [THEN [2] rev_subsetD]

lemma abs_empty [simp]: "abs {} = {}"
by (auto simp add: abs_def)

lemma abs_Un_eq: "abs (G  H) = abs G  abs H"
by (auto simp add: abs_def)

text ‹General lemmas about implementations and @{term abs}.›
lemma abs_insert_payload [simp]: "M  payload  abs (insert M S) = abs S"
by (auto simp add: abs_def)

lemma abs_insert_impl [simp]:
  "M  payload  abs (insert (implem (Chan x A B M)) S) = insert (Chan x A B M) (abs S)"
by (auto simp add: abs_def)

lemma extr_payload [simp, intro]:
  " X  extr Bad NI (abs I); NI  payload   X  payload"
by (erule extr.induct, blast, auto)

lemma abs_Un_LtK:
  "K  range LtK  abs (K  S) = abs S"
by (auto simp add: abs_Un_eq)

lemma abs_Un_keys_of [simp]:
  "abs (keys_of A  S) = abs S"
by (auto intro!: abs_Un_LtK)


text ‹Lemmas about @{term valid} and @{term abs}

lemma abs_validSet: "abs (S  valid) = abs S"
by (auto elim: absE intro: validI)

lemma valid_abs: "M  valid   M'. M'  abs {M}"
by (auto elim: validE)


(**************************************************************************************************)
subsection ‹Extractable messages›
(**************************************************************************************************)

text extractable K I›: subset of messages in $I$ which are implementations 
(not necessarily valid since we do not require that they are payload) and can be extracted 
using the keys in K. It corresponds to L2 @{term extr}.›

definition
  extractable :: "msg set  msg set  msg set"
where
  "extractable K I 
    {implInsec A B M | A B M. implInsec A B M  I} 
    {implAuth A B M | A B M. implAuth A B M  I} 
    {implConfid A B M | A B M. implConfid A B M  I  (A, B)  broken K} 
    {implSecure A B M | A B M. implSecure A B M  I  (A, B)  broken K}"

lemma extractable_red: "extractable K I  I"
by (auto simp add: extractable_def)

lemma extractableI:
  "implem (Chan x A B M)  I 
   x = insec  x = auth  ((x = confid  x = secure)  (A, B)  broken K)    
   implem (Chan x A B M)  extractable K I"
by (auto simp add: extractable_def)

lemma extractableE:
  "X  extractable K I 
   (A B M. X = implInsec A B M  X  I  P) 
   (A B M. X = implAuth A B M  X  I  P) 
   (A B M. X = implConfid A B M  X  I  (A, B)  broken K  P) 
   (A B M. X = implSecure A B M  X  I  (A, B)  broken K  P) 
  P"
by (auto simp add: extractable_def brokenI)

text ‹General lemmas about implementations and extractable.›
lemma implem_extractable [simp]:
  " Keys_bad K Bad; implem (Chan x A B M)  extractable K I; M  payload  
  M  extr Bad NI (abs I)"
by (erule extractableE, auto)


text ‹Auxiliary lemmas about extractable messages: they are implementations.›
lemma valid_extractable [simp]: "I  valid  extractable K I  valid"
by (auto intro: subset_trans extractable_red del: subsetI)

lemma LtKeys_parts_extractable:
  "I  valid  parts (extractable K I)  range LtK = {}"
by (auto dest: valid_extractable intro!: parts_valid_LtKeys_disjoint)

lemma LtKeys_parts_extractable_elt [simp]:  
  "I  valid  LtK ltk  parts (extractable K I)"
by (blast dest: LtKeys_parts_extractable)


lemma LtKeys_parts_implSecureSet:   (* FIX: possibly problematic: not in normal form *)
  "parts (implSecureSet Ag payload)  range LtK = {}"
by (auto intro!: parts_valid_LtKeys_disjoint intro: validI)

lemma LtKeys_parts_implSecureSet_elt: 
  "LtK K  parts (implSecureSet Ag payload)"
using LtKeys_parts_implSecureSet
by auto

lemmas LtKeys_parts = LtKeys_parts_payload parts_valid_LtKeys_disjoint
                      LtKeys_parts_extractable LtKeys_parts_implSecureSet 
                      LtKeys_parts_implSecureSet_elt


subsubsection‹Partition $I$ to keep only the extractable messages›
(**************************************************************************************************)

text ‹Partition the implementation set.›

lemma impl_partition:
  " NI  payload; I  valid  
  I  extractable K I 
      implConfidSet (UNIV - broken K) payload 
      implSecureSet (UNIV - broken K) payload"
by (auto dest!: subsetD [where A=I] elim!: valid_cases intro:  extractableI)


subsubsection ‹Partition of @{term "extractable"}
(**************************************************************************************************)

text ‹We partition the @{term "extractable"} set into insecure, confidential, authentic 
implementations.›

lemma extractable_partition:
  "Keys_bad K Bad; NI  payload; I  valid 
  extractable K I  
  implInsecSet (extr Bad NI (abs I)) 
  implConfidSet UNIV (extr Bad NI (abs I)) 
  implAuthSet (extr Bad NI (abs I)) 
  implSecureSet UNIV (extr Bad NI (abs I))"
apply (rule, frule valid_extractable, drule subsetD [where A="extractable K I"], fast)
apply (erule valid_cases, auto)
done


(**************************************************************************************************)
subsection ‹Lemmas for proving intruder refinement (L2-L3)›
(**************************************************************************************************)

text ‹Chain of lemmas used to prove the refinement for l3_dy›. 
The ultimate goal is to show @{prop [display] 
  "synth (analz (NI  I  K  Tags))  synth (analz (extr Bad NI (abs I)))  -payload"
}

subsubsection ‹First: we only keep the extractable messages›

― ‹the synth› is probably not needed›
lemma analz_NI_I_K_analz_NI_EI:
assumes HNI: "NI  payload"
    and HK: "K  range LtK"
    and HI: "I  valid"
shows "analz (NI  I  K  Tags) 
       synth (analz (NI  extractable K I  K  Tags))  -payload"
proof -
  from HNI HI
  have "analz (NI  I  K  Tags)  
        analz (NI  (extractable K I 
                     implConfidSet (UNIV - broken K) payload 
                     implSecureSet (UNIV - broken K) payload)
                 K  Tags)"
    by (intro analz_mono Un_mono impl_partition, simp_all)
  also have "...  analz (implConfidSet (UNIV - broken K) payload 
                    (implSecureSet (UNIV - broken K) payload 
                    (extractable K I  NI  K  Tags)))"
    by (auto)
  also have "...  synth (analz (implSecureSet (UNIV - broken K) payload 
                  (extractable K I  NI  K  Tags)))  -payload"
    proof (rule analz_Un_implConfidSet_2)
      show "Enc_keys_clean (implSecureSet (UNIV - broken K) payload 
                               (extractable K I  NI  K  Tags))"
        by (auto simp add: HNI HI HK intro: validI)
    next
      from HK HI HNI 
      show "(UNIV - broken K)  
            broken (parts (
              implSecureSet (UNIV - broken K) payload 
              (extractable K I  NI  K  Tags))  range LtK)  = {}"
        by (auto simp add: LtKeys_parts 
               LtKeys_parts_implSecureSet_elt [where Ag="- broken K", simplified])
    qed (auto)
  also have "...  synth (analz (extractable K I  NI  K  Tags))  -payload"
    proof (rule Un_least, rule synth_idem_payload)
      show "analz (implSecureSet (UNIV - broken K) payload  
                   (extractable K I  NI  K  Tags))
             synth (analz (extractable K I  NI  K  Tags))  -payload"
        proof (rule analz_Un_implSecureSet_2)
          show "Enc_keys_clean (extractable K I  NI  K  Tags)"
            using HNI HK HI by auto
        next  
          from HI HK HNI 
          show "(UNIV - broken K)  
                broken (parts (extractable K I  NI  K  Tags)  range LtK) = {}"
          by (auto simp add: LtKeys_parts)
        qed (auto)
    next
      show "-payload  synth (analz (extractable K I  NI  K  Tags))  -payload"
        by auto
    qed
  also have "...  synth (analz (NI  extractable K I  K  Tags))  -payload"
    by (simp add: sup.left_commute sup_commute)
  finally show ?thesis .
qed


subsubsection ‹Only keep the extracted messages (instead of extractable)›
(**************************************************************************************************)

lemma analz_NI_EI_K_synth_analz_NI_E_K:
assumes HNI: "NI  payload"
    and HK: "K  range LtK"
    and HI: "I  valid"
    and Hbad: "Keys_bad K Bad"
shows "analz (NI  extractable K I  K  Tags)
     synth (analz (extr Bad NI (abs I)  K  Tags))  -payload"
proof -
  from HNI HI Hbad
  have "analz (NI  extractable K I  K  Tags)  
        analz (NI  (implInsecSet (extr Bad NI (abs I)) 
                     implConfidSet UNIV (extr Bad NI (abs I)) 
                     implAuthSet (extr Bad NI (abs I)) 
                     implSecureSet UNIV (extr Bad NI (abs I))) 
                    K  Tags)"
    by (intro analz_mono Un_mono extractable_partition) (auto)

  also have "...  analz (implInsecSet (extr Bad NI (abs I)) 
                     (implConfidSet UNIV (extr Bad NI (abs I)) 
                     (implAuthSet (extr Bad NI (abs I)) 
                     (implSecureSet UNIV (extr Bad NI (abs I)) 
                     (NI  K  Tags)))))"
    by (auto)
  also have "...  synth (analz (extr Bad NI (abs I) 
                   (implConfidSet UNIV (extr Bad NI (abs I)) 
                   (implAuthSet (extr Bad NI (abs I)) 
                   (implSecureSet UNIV (extr Bad NI (abs I))  (NI  K  Tags)))))) 
                  -payload"
    by (rule analz_Un_implInsecSet)
       (auto simp only: Un_commute [of "extr _ _ _" _] Un_assoc Un_absorb, 
        auto simp add: HNI HK HI intro!: validI)
  also have "...  synth (analz (extr Bad NI (abs I) 
                   (implAuthSet (extr Bad NI (abs I)) 
                   (implSecureSet UNIV (extr Bad NI (abs I))  (NI  K  Tags))))) 
                  -payload"
    proof (rule Un_least, rule synth_idem_payload)
      have "analz (implConfidSet UNIV (extr Bad NI (abs I)) 
                   (implAuthSet (extr Bad NI (abs I)) 
                   (implSecureSet UNIV (extr Bad NI (abs I))  
                   (NI  (K  extr Bad NI (abs I)  Tags)))))
             synth (analz (extr Bad NI (abs I) 
                           (implAuthSet (extr Bad NI (abs I)) 
                           (implSecureSet UNIV (extr Bad NI (abs I))  
                           (NI  (K  extr Bad NI (abs I)  Tags)))))) 
               -payload"
        by (rule analz_Un_implConfidSet)
           (auto simp only: Un_commute [of "extr _ _ _" _] Un_assoc Un_absorb,
            auto simp add: HK HI HNI  intro!: validI)
      then show "analz (extr Bad NI (abs I) 
                        (implConfidSet UNIV (extr Bad NI (abs I)) 
                        (implAuthSet (extr Bad NI (abs I)) 
                        (implSecureSet UNIV (extr Bad NI (abs I))  (NI  K  Tags)))))
                  synth (analz (extr Bad NI (abs I) 
                         (implAuthSet (extr Bad NI (abs I)) 
                         (implSecureSet UNIV (extr Bad NI (abs I))  
                         (NI  K  Tags))))) 
                    -payload"
        by (simp add: inf_sup_aci(6) inf_sup_aci(7))
    next
      show "-payload
             synth (analz (extr Bad NI (abs I) 
                           (implAuthSet (extr Bad NI (abs I)) 
                           (implSecureSet UNIV (extr Bad NI (abs I))  (NI  K  Tags))))) 
               -payload"
        by blast
    qed
  also have "...  synth (analz (extr Bad NI (abs I) 
                   (implSecureSet UNIV (extr Bad NI (abs I))  (NI  K  Tags)))) 
                  -payload"
    proof (rule Un_least, rule synth_idem_payload)
      have "analz (implAuthSet (extr Bad NI (abs I)) 
                   (implSecureSet UNIV (extr Bad NI (abs I))  
                   (NI  (K  (extr Bad NI (abs I)  Tags)))))
             synth (analz (extr Bad NI (abs I) 
                           (implSecureSet UNIV (extr Bad NI (abs I))  
                           (NI  (K