take
1. (K,n)
; 1. (K,n) is diagonal
let i be Nat; MATRIX_1:def 6 for j being Nat st [i,j] in Indices (1. (K,n)) & (1. (K,n)) * (i,j) <> 0. K holds
i = j
let j be Nat; ( [i,j] in Indices (1. (K,n)) & (1. (K,n)) * (i,j) <> 0. K implies i = j )
thus
( [i,j] in Indices (1. (K,n)) & (1. (K,n)) * (i,j) <> 0. K implies i = j )
by Def3; verum