let n be Nat; for K being Ring
for i, j being Nat st [i,j] in Indices (1. (K,n)) holds
( ( i = j implies (Col ((1. (K,n)),j)) . i = 1. K ) & ( i <> j implies (Col ((1. (K,n)),j)) . i = 0. K ) )
let K be Ring; for i, j being Nat st [i,j] in Indices (1. (K,n)) holds
( ( i = j implies (Col ((1. (K,n)),j)) . i = 1. K ) & ( i <> j implies (Col ((1. (K,n)),j)) . i = 0. K ) )
let i, j be Nat; ( [i,j] in Indices (1. (K,n)) implies ( ( i = j implies (Col ((1. (K,n)),j)) . i = 1. K ) & ( i <> j implies (Col ((1. (K,n)),j)) . i = 0. K ) ) )
set B = 1. (K,n);
assume A1:
[i,j] in Indices (1. (K,n))
; ( ( i = j implies (Col ((1. (K,n)),j)) . i = 1. K ) & ( i <> j implies (Col ((1. (K,n)),j)) . i = 0. K ) )
then
i in dom (1. (K,n))
by ZFMISC_1:87;
then A2:
(Col ((1. (K,n)),j)) . i = (1. (K,n)) * (i,j)
by MATRIX_0:def 8;
hence
( i = j implies (Col ((1. (K,n)),j)) . i = 1. K )
by A1, MATRIX_1:def 3; ( i <> j implies (Col ((1. (K,n)),j)) . i = 0. K )
thus
( i <> j implies (Col ((1. (K,n)),j)) . i = 0. K )
by A1, A2, MATRIX_1:def 3; verum