let G1 be _finite _Graph; for H being Subgraph of G1 ex G2 being spanning Subgraph of G1 ex p being non empty Graph-yielding _finite FinSequence st
( H .size() = G2 .size() & p . 1 == H & p . (len p) = G2 & len p = ((G1 .order()) - (H .order())) + 1 & ( for n being Element of dom p st n <= (len p) - 1 holds
ex v being Vertex of G1 st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) ) ) )
let H be Subgraph of G1; ex G2 being spanning Subgraph of G1 ex p being non empty Graph-yielding _finite FinSequence st
( H .size() = G2 .size() & p . 1 == H & p . (len p) = G2 & len p = ((G1 .order()) - (H .order())) + 1 & ( for n being Element of dom p st n <= (len p) - 1 holds
ex v being Vertex of G1 st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) ) ) )
set V = (the_Vertices_of G1) \ (the_Vertices_of H);
set G2 = the addVertices of H,(the_Vertices_of G1) \ (the_Vertices_of H);
consider p being non empty Graph-yielding FinSequence such that
A1:
( p . 1 == H & p . (len p) = the addVertices of H,(the_Vertices_of G1) \ (the_Vertices_of H) & len p = (card (((the_Vertices_of G1) \ (the_Vertices_of H)) \ (the_Vertices_of H))) + 1 )
and
A2:
for n being Element of dom p st n <= (len p) - 1 holds
ex v being Vertex of the addVertices of H,(the_Vertices_of G1) \ (the_Vertices_of H) st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) )
by Th59;
defpred S1[ Nat] means for n being Element of dom p st $1 = n holds
p . n is _finite ;
A3:
S1[1]
by A1, GLIB_000:89;
A4:
for k being non zero Nat st S1[k] holds
S1[k + 1]
A10:
for k being non zero Nat holds S1[k]
from NAT_1:sch 10(A3, A4);
for x being Element of dom p holds p . x is _finite
then reconsider p = p as non empty Graph-yielding _finite FinSequence by GLIB_000:def 66;
A11: the_Vertices_of the addVertices of H,(the_Vertices_of G1) \ (the_Vertices_of H) =
(the_Vertices_of H) \/ ((the_Vertices_of G1) \ (the_Vertices_of H))
by GLIB_006:def 10
.=
the_Vertices_of G1
by XBOOLE_1:45
;
the addVertices of H,(the_Vertices_of G1) \ (the_Vertices_of H) is Subgraph of G1
then reconsider G2 = the addVertices of H,(the_Vertices_of G1) \ (the_Vertices_of H) as spanning Subgraph of G1 by A11, GLIB_000:def 33;
take
G2
; ex p being non empty Graph-yielding _finite FinSequence st
( H .size() = G2 .size() & p . 1 == H & p . (len p) = G2 & len p = ((G1 .order()) - (H .order())) + 1 & ( for n being Element of dom p st n <= (len p) - 1 holds
ex v being Vertex of G1 st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) ) ) )
take
p
; ( H .size() = G2 .size() & p . 1 == H & p . (len p) = G2 & len p = ((G1 .order()) - (H .order())) + 1 & ( for n being Element of dom p st n <= (len p) - 1 holds
ex v being Vertex of G1 st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) ) ) )
thus
H .size() = G2 .size()
by GLIB_006:95; ( p . 1 == H & p . (len p) = G2 & len p = ((G1 .order()) - (H .order())) + 1 & ( for n being Element of dom p st n <= (len p) - 1 holds
ex v being Vertex of G1 st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) ) ) )
thus
( p . 1 == H & p . (len p) = G2 )
by A1; ( len p = ((G1 .order()) - (H .order())) + 1 & ( for n being Element of dom p st n <= (len p) - 1 holds
ex v being Vertex of G1 st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) ) ) )
((the_Vertices_of G1) \ (the_Vertices_of H)) \ (the_Vertices_of H) =
(the_Vertices_of G1) \ ((the_Vertices_of H) \/ (the_Vertices_of H))
by XBOOLE_1:41
.=
(the_Vertices_of G1) \ (the_Vertices_of H)
;
hence len p =
((card (the_Vertices_of G1)) - (card (the_Vertices_of H))) + 1
by A1, CARD_2:44
.=
((G1 .order()) - (card (the_Vertices_of H))) + 1
by GLIB_000:def 24
.=
((G1 .order()) - (H .order())) + 1
by GLIB_000:def 24
;
for n being Element of dom p st n <= (len p) - 1 holds
ex v being Vertex of G1 st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) )
let n be Element of dom p; ( n <= (len p) - 1 implies ex v being Vertex of G1 st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) ) )
assume
n <= (len p) - 1
; ex v being Vertex of G1 st
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) )
then consider v being Vertex of G2 such that
A12:
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) )
by A2;
the_Vertices_of G2 c= the_Vertices_of G1
;
then reconsider v = v as Vertex of G1 by TARSKI:def 3;
take
v
; ( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) )
thus
( p . (n + 1) is addVertex of p . n,v & not v in the_Vertices_of (p . n) )
by A12; verum