let M be Matrix of n,K; ( M = M1 + M2 implies M is Anti-subsymmetric )
assume A1:
M = M1 + M2
; M is Anti-subsymmetric
let i, j, k, l be Nat; MATRIX17:def 2 ( [i,j] in Indices M & k = (n + 1) - j & l = (n + 1) - i implies M * (i,j) = - (M * (k,l)) )
assume that
A2:
[i,j] in Indices M
and
A3:
( k = (n + 1) - j & l = (n + 1) - i )
; M * (i,j) = - (M * (k,l))
A4:
Indices M = [:(Seg n),(Seg n):]
by MATRIX_0:24;
( (n + 1) - j in Seg n & (n + 1) - i in Seg n )
by A2, A4, Lm2;
then A5:
[k,l] in [:(Seg n),(Seg n):]
by A3, ZFMISC_1:87;
A6:
( Indices M1 = [:(Seg n),(Seg n):] & Indices M2 = [:(Seg n),(Seg n):] )
by MATRIX_0:24;
(M1 + M2) * (i,j) =
(M1 * (i,j)) + (M2 * (i,j))
by A2, A4, A6, MATRIX_3:def 3
.=
(- (M1 * (k,l))) + (M2 * (i,j))
by A2, A4, A6, A3, Def2
.=
(- (M1 * (k,l))) + (- (M2 * (k,l)))
by A2, A4, A6, A3, Def2
.=
- ((M1 * (k,l)) + (M2 * (k,l)))
by RLVECT_1:31
.=
- ((M1 + M2) * (k,l))
by A6, A5, MATRIX_3:def 3
;
hence
M * (i,j) = - (M * (k,l))
by A1; verum