let M1 be Matrix of n,K; ( M1 = M @ implies M1 is Anti-subsymmetric )
assume A1:
M1 = M @
; M1 is Anti-subsymmetric
A2:
Indices M = [:(Seg n),(Seg n):]
by MATRIX_0:24;
let i, j, k, l be Nat; MATRIX17:def 2 ( [i,j] in Indices M1 & k = (n + 1) - j & l = (n + 1) - i implies M1 * (i,j) = - (M1 * (k,l)) )
assume that
A3:
[i,j] in Indices M1
and
A4:
( k = (n + 1) - j & l = (n + 1) - i )
; M1 * (i,j) = - (M1 * (k,l))
[i,j] in [:(Seg n),(Seg n):]
by A3, MATRIX_0:24;
then A5:
( i in Seg n & j in Seg n )
by ZFMISC_1:87;
then A6:
[j,i] in [:(Seg n),(Seg n):]
by ZFMISC_1:87;
( (n + 1) - j in Seg n & (n + 1) - i in Seg n )
by A5, Lm1;
then A7:
( [((n + 1) - i),((n + 1) - j)] in [:(Seg n),(Seg n):] & [((n + 1) - j),((n + 1) - i)] in [:(Seg n),(Seg n):] )
by ZFMISC_1:87;
thus M1 * (i,j) =
M * (j,i)
by A1, A2, A6, MATRIX_0:def 6
.=
- (M * (l,k))
by A4, A6, A2, Def2
.=
- (M1 * (k,l))
by A1, A7, A2, A4, MATRIX_0:def 6
; verum