-
Notifications
You must be signed in to change notification settings - Fork 0
/
mathematical_background.tex
522 lines (509 loc) · 20.5 KB
/
mathematical_background.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
\chapter{Mathematical Preliminaries}
\label{cha:math-prel}
\section{Graph Laplacians}
\label{sec:graph-laplacians}
We now introduce the concept of the Laplacian matrix of a graph. Our
exposition will be very superficial. For a comprehensive account
of graph Laplacians, see
\citep{chung05:_laplac_cheeg,cvetkovic80:_spect_graph_theor_applic}. \\
\\
%
\noindent Let $G = (V,E,\omega)$ be a simple, undirected graph with
vertex set $V$, edge set $E$ and similarity measure $\omega \colon E \mapsto
\mathbb{R}^{\geq 0}$. If $u$ and $v$ are vertices of $G$, we write $u
\sim v$ whenever $\{u,v\} \in E$. The degree of a vertex $v$ is
defined as $\deg(v) = \sum_{u \sim v}{\omega(\{u,v\})}$ and the volume
of $G$ is $\mathrm{Vol}(G) = \sum_{v \in V}{\deg(v)}$. We denote by
$N$ the number of vertices of $G$. We define $D = (d_{ij})$ as the $N
\times N$ diagonal matrix with diagonal entries $d_{vv} = \deg(v)$.
\begin{definition}
\label{def:1}
Let $G = (V,E,\omega)$ be a simple, undirected graph with similarity
measure $\omega$. The {\em combinatorial} Laplacian of $G$ is the
matrix $L = L(G)$ with entries
\begin{equation}
\label{eq:1}
L_{uv} = \begin{cases}
- \omega(\{u,v\}) & \text{if $u \not = v$ and $u \sim v$} \\
\deg(u) & \text{if $u = v$} \\
0 & \text{otherwise}
\end{cases}
\end{equation}
The {\em normalized} Laplacian of $G$ is the matrix $\mathcal{L} =
\mathcal{L}(G)$ with entries
\begin{equation}
\label{eq:2}
\mathcal{L}_{uv} = \begin{cases}
- \tfrac{\omega(\{u,v\})}{\sqrt{\deg(u)}\sqrt{\deg(v)}} & \text{if $u \not = v$ and $u \sim v$} \\
1 & \text{if $u = v$} \\
0 & \text{otherwise}
\end{cases}
\end{equation}
\end{definition}
The following proposition lists some simple properties of the
combinatorial and normalized Laplacians.
\begin{proposition}
\label{prop:1}
Let $G = (V,E,\omega)$ be a simple, undirected graph and $L$ and
$\mathcal{L}$ be its combinatorial and normalized Laplacians,
respectively. We have
\begin{itemize}
\item $L$ and $\mathcal{L}$ are symmetric, positive
semi-definite matrices.
\item $\mathcal{L} = D^{-1/2} L D^{-1/2}$
\item The number of connected components of $G$ is equal to the
number of zero eigenvalues of either $L$ or $\mathcal{L}$.
\item The eigenvalues of $\mathcal{L}$ are at most $2$.
\end{itemize}
\end{proposition}
\section{Finite Markov chains}
\label{sec:finite-markov-chain}
\begin{definition}
\label{def:6}
Let $\Omega$ be a finite or countably infinite set and
$\mathbb{Z}^{*}$ be the set of non-negative integers. A sequence
$\mathbf{X} = (X_n)_{n \in \mathbb{Z}^{*}}$ of random variables with values in
$\Omega$ is a {\em Markov chain} if
\begin{equation}
\label{eq:8}
\mathbb{P}[X_{n+1} = j \, | \, X_n = i, X_{n-1} = i_{n-1},
\dots, X_0 = i_0] = \mathbb{P}[X_{n+1} = j \, | \, X_n = i] =
p_{ij}
\end{equation}
for all $n \geq 0$ and all states $i_0, i_1, \dots, i_{n-1}, i,
j$. The matrix $\mathbf{P}$, possibly infinite, with entries
$\mathbf{P}(i,j) = p_{ij}$ is then termed the transition matrix of
$(X_n)_{n \in \mathbb{Z}^*}$.
\end{definition}
Let $\mathbf{X} = (X_n)_{n \in \mathbb{Z}^*}$ be a Markov
chain. Denote by $p_{ij}^{(n)}$ the probability of going from state
$i$ to state $j$ in $n$ steps, i.e.,
\begin{equation}
\label{eq:11}
p_{ij}^{(n)} = \mathbb{P}[ X_{n + m } = j \, | \, X_m = i]
\end{equation}
for all $i, j \in \Omega$, and $m,n \in \mathbb{Z}^{*}$. Then
$p_{ij}^{(n)}$ satisfy the {\em Chapman-Kolmogorov equation}
\begin{equation}
\label{eq:12}
p_{ij}^{(m+n)} = \sum_{k \in \Omega}{p_{ik}^{(m)}p_{kj}^{(n)}}
\end{equation}
for all $m,n \in \mathbb{Z}^{*}$. Thus if $\mathbf{P}^{(n)}$ is the
matrix with entries $p_{ij}^{(n)}$, then $\mathbf{P}^{(m+n)} =
\mathbf{P}^{(m)}\mathbf{P}^{(n)}$. Because $\mathbf{P}^{(1)} =
\mathbf{P}$, we have
\begin{equation}
\label{eq:13}
\mathbf{P}^{(n)} = \mathbf{P}^{n}
\end{equation}
The behavior of a Markov chain $\mathbf{X} = (X_n)_{n \in
\mathbb{Z}^{*}}$ is thus completely specified by its transition
matrix $\mathbf{P}$. We can therefore view a Markov chain as being a
sequence of random variables generated by a transition matrix
$\mathbf{P}$. This view will be most helpful in the context of this
work. However, because the transition matrix $\mathbf{P}$ only describes
the conditional probabilities, in order for us to compute the marginal
probabilities $\mathbb{P}[X_n = j]$, we need to specify an initial
distribution for $X_0$.
\begin{definition}
\label{def:5}
Let $\mathbf{X}$ be a Markov chain with state space
$\Omega$. The initial distribution $\mu$ of $\mathbf{X}$ is a probability
distribution on $\Omega$ such that
\begin{equation}
\label{eq:14}
\mu(i) = \mathbb{P}[X_0 = i]
\end{equation}
for all $i \in \Omega$.
\end{definition}
\begin{definition}
\label{def:7}
Let $\mathbf{X}$ be a Markov chain with state space $\Omega$. Let
$i$ and $j$ be elements of $\Omega$. $j$ is
{\em accessible} from $i$, denoted as $i \rightarrow j$, if there
exists a $n \in \mathbb{Z}^{*}$ such that $p_{ij}^{(n)} > 0$. If $i
\rightarrow j$ and $j \rightarrow i$, then we say that $i$ and $j$
{\em communicate}, and we write $i \leftrightarrow j$. A Markov chain is
{\em irreducible} if $i \leftrightarrow j$ for any $i,j \in \Omega$.
\end{definition}
\begin{definition}
\label{def:2}
The stationary distribution $\pi$ of
$\mathbf{X}$, if it exists, is a probability distribution on
$\Omega$ such that
\begin{equation}
\label{eq:15}
\pi(j) = \sum_{i \in \Omega}{\pi(i) p_{ij}}
\end{equation}
for any $j \in \Omega$.
\end{definition}
\begin{proposition}
\label{prop:3}
If $\mathbf{X}$ is an irreducible Markov chain with state space
$\Omega$, then there exists a unique stationary distribution $\pi$
of $\mathbf{X}$, and that $\pi(i) > 0$ for all $i \in \Omega$.
\end{proposition}
\begin{definition}
\label{def:3}
Let $\mathbf{X}$ be a Markov chain
with transition matrix $\mathbf{P}$. Define
\begin{equation}
\label{eq:5}
\tau_i = \min\{ t \geq 0 \colon X_t = i \}, \qquad \tau_i^{+} \min
\{ t \geq 1 \colon X_t = i \}.
\end{equation}
The expected first passage time from $i$ to $j$, denoted by
$\mathbb{E}_{i}[\tau_j]$, is defined as
\begin{equation}
\label{eq:6}
\mathbb{E}_{i}[\tau_j] = \sum_{t = 0}^{\infty}{t \, \mathbb{P}(\tau_j =
t \,|\, X_0 = i)}.
\end{equation}
The expected first return time from $i$ to $i$, denoted by
$\mathbb{E}_{i}[\tau_i^{+}]$, is defined as
\begin{equation}
\label{eq:7}
\mathbb{E}_{i}[\tau_i^{+}] = \sum_{t = 1}^{\infty}{t \,
\mathbb{P}(\tau_v^{+} = t \,|\, X_0 = i)}.
\end{equation}
$\tau_i$ and $\tau_{i}^{+}$ as declared above are examples of {\em
stopping times}.
\end{definition}
\begin{proposition}
\label{prop:2}
Let $\mathbf{X}$ be an irreducible
Markov chain with transition matrix $\mathbf{P}$ and stationary
distribution $\pi$. We then have that
\begin{equation}
\label{eq:9}
\mathbb{E}_{i}[\tau_i^{+}] = \frac{1}{\pi(i)}.
\end{equation}
\end{proposition}
\begin{definition}
\label{def:9}
Let $\mathbf{X}$ be an irreducible Markov chain with transition
matrix $\mathbf{P}$ and stationary distribution
$\pi$. $\hat{\mathbf{P}} = (\hat{p}_{ij})$ is said to be the {\em
time reversal} of $\mathbf{P}$ if, for all pairs $i,j \in \Omega$,
one has
\begin{gather}
\label{eq:16}
\pi(i) p_{ij} = \pi(j) \hat{p}_{ji}, \\
\intertext{or, in other words}
\label{eq:78}
\hat{\mathbf{P}} = \bm{\Pi}^{-1} \mathbf{P}^{T} \bm{\Pi}.
\end{gather}
$\mathbf{P}$ is said to be {\em time-reversible} if
$\hat{\mathbf{P}} = \mathbf{P}$.
\end{definition}
Now $\hat{\mathbf{P}}$ also defines a Markov chain
$\hat{\mathbf{X}}$. $\hat{\mathbf{X}}$ will be termed the
time-reversed Markov chain with respect to $\mathbf{X}$. $\pi$ is also
the stationary distribution of $\hat{\mathbf{P}}$ and that
\begin{equation}
\label{eq:17}
\mathbb{P}[X_n = j, \dots, X_0 = i] = \mathbb{P}[\hat{X}_0 = i,
\dots, \hat{X}_n = j]
\end{equation}
where the initial distribution of $X_0$ and $\hat{X}_0$ are both
identical to the stationary distribution $\pi$.
\section{Random walks on graphs}
\label{sec:random-walks-graphs}
Let $G = (V,E,\omega)$ be a simple, undirected graph. We define the transition
matrix $\mathbf{P}_G = (p_{uv})$ of a Markov chain with state space $V$ as follows
\begin{equation}
\label{eq:20}
p_{uv} = \begin{cases}
\tfrac{\omega(\{u,v\})}{\deg(u)} & \text{if $u \sim v$} \\
0 & \text{otherwise}
\end{cases}
\end{equation}
We now note some properties of the Markov chain $\mathbf{X}$ generated
by $\mathbf{P}_G$.
\begin{proposition}
\label{prop:15}
Let $G$ be an undirected graph and $\mathbf{P}$ be the transition
matrix on $G$. Let $\mathbf{X}$ be the Markov chain generated by
$\mathbf{P}$. Then
\begin{itemize}
\item $\mathbf{X}$ is irreducible if and only if $G$ is connected.
\item If $\mathbf{X}$ is irreducible, $\pi(v) =
\tfrac{\deg(v)}{\mathrm{Vol}(G)}$ for all $v \in V$.
\item $\mathbf{P}$ is time-reversible. Therefore, $\bm{\Pi}\mathbf{P} = \mathbf{P}^{T}\bm{\Pi}$.
\end{itemize}
\end{proposition}
%
We can also define the transition matrix $\mathbf{P}_G$ when $G$ is
directed. $\mathbf{P}_G$ will have entries
\begin{equation}
\label{eq:18}
p_{uv} = \begin{cases}
\tfrac{\omega(e)}{\deg(u)} & \text{if $e = (u,v) \in E$} \\
0 & \text{otherwise}
\end{cases}
\end{equation}
If $G$ is directed, then $\mathbf{X}$ is irreducible if and only if
$G$ is strongly connected. However, $\mathbf{P}$ is in general not
time reversible and there is no explicit expression for the
stationary distribution $\pi$ of $\mathbf{P}$. \\ \\
%
%
\noindent Let $G = (V,E)$ be a graph, directed or undirected, and $\mathbf{P}$
be its transition matrix. A function $f \colon V \mapsto \mathbb{R}$
is {\em harmonic} at $v \in V$ if
\begin{equation}
\label{eq:10}
f(v) = \sum_{w \in V}{\mathbf{P}(v,w) f(w)}
\end{equation}
$f$ is harmonic on $V$ if it is harmonic for all $v \in V$. If $\mathbf{P}$
is irreducible, we have a simple characterization for harmonic
functions on $V$. Specifically,
\begin{lemma}
\label{lem:1}
Suppose that $\mathbf{P}$ is irreducible. A function $f \colon V \mapsto
\mathbb{R}$ is harmonic on $V$ if and only if $f$ is constant on
$V$.
\end{lemma}
\begin{proof}
It's easy to see that if $f$ is constant on $V$ then it is also
harmonic on $V$. Thus, let us assume that $f$ is harmonic on $V$.
Let $v_*$ be a node such that $f(v_*) \geq f(w)$ for all $w \in
V$. Because $f$ is harmonic, from Eq.~\eqref{eq:10} we have that $f(w)
= f(v_*)$ for all $w$ such that $\mathbf{P}(v_*,w) > 0$. We thus see
that every vertex $w$ that is accessible from $v_*$ will satisfy
$f(w) = f(v_*)$. Because $\mathbf{P}$ is irreducible, $f(v_*) = f(w)$ for
all $w \in V$. $f$ is thus constant on $V$.
\end{proof}
\begin{proposition}
\label{prop:6}
Let $G = (V,E)$ be a graph and $\mathbf{P}$ be its transition
matrix. Suppose that the Markov chain defined by $\mathbf{P}$ is
regular. Then there exists a unique stationary distribution $\pi$ of
$\mathbf{P}$. Furthermore, if $\mathbf{Q} = \mathbf{1} \mathbf{\pi}^{T}$ is the
matrix with each row being the stationary distribution, then
\begin{equation}
\label{eq:22}
\lim_{k \rightarrow \infty}(\mathbf{P} - \mathbf{Q})^{k} = 0
\end{equation}
Eq.~\eqref{eq:22} is equivalent to $\rho(\mathbf{P}-\mathbf{Q}) < 1$
where $\rho(\mathbf{P}-\mathbf{Q})$ is the spectral radius of
$\mathbf{P} - \mathbf{Q}$.
\end{proposition}
\begin{proposition}
\label{prop:7}
Let $G = (V,E)$ be a graph and $\mathbf{P}$ be its transition
matrix. Suppose that $\mathbf{P}$ is regular. Then the matrix $\mathbf{Z} =
(\mathbf{I} - \mathbf{P} + \mathbf{Q})^{-1}$ exists and is given by
\begin{equation}
\label{eq:28}
\mathbf{Z} = \sum_{k=0}^{\infty}(\mathbf{P} - \mathbf{Q})^{k} = \mathbf{I} +
\sum_{k=1}^{\infty}(\mathbf{P}^{k} - \mathbf{Q})
\end{equation}
\end{proposition}
\begin{proof}
Because $\mathbf{P}$ is regular, by Proposition \ref{prop:6}, $\lim_{k
\rightarrow \infty}(\mathbf{P} - \mathbf{Q})^{k} = 0$. Thus, $\mathbf{Z}$ has
an expansion in terms of a Neumann series
\begin{equation}
\label{eq:29}
\mathbf{Z} = \sum_{k=0}^{\infty}(\mathbf{P} - \mathbf{Q})^{k}
\end{equation}
Because $\mathbf{P}\mathbf{Q} = \mathbf{P}\bm{1}^{T}\bm{\pi} =
\bm{1}^{T}\bm{\pi} =
\bm{1}^{T}\bm{\pi}\mathbf{P} = \mathbf{Q}\mathbf{P} = \mathbf{Q}$,
one has $(\mathbf{P} - \mathbf{Q})^{k} = \mathbf{P}^{k} - \mathbf{Q}$ for $k \geq
1$. Eq.~\eqref{eq:28} thus follows.
\end{proof}
The matrix $\mathbf{Z}$ is termed the {\em fundamental matrix}
\citep{kemeny83:_finit_markov_chain}. Some properties of
$\mathbf{Z}$ are given in the following proposition.
\begin{proposition}
\label{prop:8}
Let $\mathbf{P}$ be the transition matrix of a regular Markov chain and
$\mathbf{Z}$ be its fundamental matrix. We have
\begin{enumerate}[(i)]
\item $\mathbf{P}\mathbf{Z} = \mathbf{Z} - \mathbf{I} + \mathbf{Q}$.
\item $(\mathbf{I} - \mathbf{P})\mathbf{Z} = \mathbf{I} - \mathbf{Q}$.
\item $\mathbf{Z} \mathbf{J} = \mathbf{J}$.
\end{enumerate}
\end{proposition}
\begin{proof}
$\mathbf{P}\mathbf{Z} = \mathbf{P} - \sum_{k=1}^{\infty}(\mathbf{P}^{k+1} - \mathbf{Q})
= \mathbf{Z} - \mathbf{I} + \mathbf{Q}$. (i) and (ii) thus follow. For (iii),
note that $\mathbf{P}^{k}\mathbf{J} = \mathbf{Q}\mathbf{J} = \mathbf{J}$.
\end{proof}
\section{Distance geometry}
\label{sec:distance-geometry}
We discuss in this section some notation and results regarding
distance matrices. The notion of a Euclidean distance matrix (EDM) is
of particular importance to our discussion and is defined in
Definition \ref{def:1}. We then introduce two linear transformations
between matrices, the $\kappa$ transform and the $\tau$ transform.
Schoenberg's characterization
\citep{schoenberg35:_remar_mauric_frech_artic_sur} of Euclidean
distance matrices in terms of positive semidefinite matrices is
stated in Theorem \ref{thm:5}. We also state some simple results
regarding the $\kappa$ transforms that are useful in the context of
this work.
\begin{definition}
\label{def:10}
Let $\Delta = (\delta_{ij}) \in M_n(\mathbb{R})$. $\Delta$ is a Type 1
Euclidean distance matrix (EDM-$1$) if and only if there exists a
positive integer $p$ and $x_1, x_2, \dots, x_n \in \mathbb{R}^{p}$
such that $\delta_{ij} = \| x_i - x_j \|$. $\Delta$ is a Type 2
Euclidean distance matrix (EDM-$2$) if and only if there exists a
$p$ and $x_1, x_2, \dots, x_n \in \mathbb{R}^{p}$ such that
$\delta_{ij} = \|x_i - x_j\|^{2}$. The {\em embedding dimension} of
$\Delta$ is the minimum $p$ such that a configuration of points
$x_1, x_2, \dots, x_n$ exists with the desired property.
\end{definition}
\begin{definition}
\label{def:11}
Let $\mathbf{A} \in M_n(\mathbb{R})$. Define a linear mapping $\tau \colon M_n(\mathbb{R})
\mapsto M_n(\mathbb{R})$ by
\begin{equation}
\label{eq:83}
\tau(\mathbf{A}) = - \frac{1}{2} \Bigl(\mathbf{I} -
\frac{\mathbf{J}}{n}\Bigr)\mathbf{A} \Bigl(\mathbf{I} - \frac{\mathbf{J}}{n}\Bigr)
\end{equation}
If $a_{ij}$ are the entries of $\mathbf{A}$ then
\begin{equation}
\label{eq:56}
b_{ij} = -\frac{1}{2}\Bigl(a_{ij} - \frac{1}{n}\sum_{j=1}^{n}a_{ij} -
\frac{1}{n}\sum_{i=1}^{n}{a_{ij}} +
\frac{1}{n^2}\sum_{i=1}^{n}\sum_{j=1}^{n}a_{ij}\Bigr)
\end{equation}
are the entries of $\mathbf{B} = \tau(\mathbf{A})$. $\tau$
is a continuous mapping from $M_n(\mathbb{R})$ to
$M_n(\mathbb{R})$.
\end{definition}
%
The following result provides a necessary and sufficient condition for
$\Delta$ to be an EDM-2 matrix.
\begin{theorem}[\citet{schoenberg38:_metric,young38:_discus}]
\label{thm:5}
$\Delta$ is an EDM-2 with embedding dimension $p$ if and only
if $\mathbf{B} = \tau(\Delta)$ is positive semidefinite with rank
$p$.
\end{theorem}
\begin{definition}
\label{def:12}
Let $\mathbf{A} \in M_n(\mathbb{R})$. Define a linear mapping
$\kappa \colon M_n(\mathbb{R}) \mapsto M_n(\mathbb{R})$ by
\begin{equation}
\label{eq:61}
\kappa(\mathbf{A}) = \mathbf{J}\mathbf{A}_{\mathrm{dg}} -
\mathbf{A} - \mathbf{A}^{T} + \mathbf{A}_{\mathrm{dg}}\mathbf{J}
\end{equation}
where $\mathbf{A}_{\mathrm{dg}}$ is the diagonal matrix obtained by
setting the off-diagonal entries of $\mathbf{A}$ to $0$. If $a_{ij}$
are the entries of $\mathbf{A}$ then
\begin{equation}
\label{eq:70}
b_{ij} = a_{ii} - a_{ij} - a_{ji} + a_{jj}
\end{equation}
are the entries of $\mathbf{B} = \kappa(\mathbf{A})$. $\kappa$ is
also a continuous mapping from $M_n(\mathbb{R})$ to
$M_n(\mathbb{R})$.
\end{definition}
\begin{proposition}
\label{prop:16}
The $\kappa$ transform has the following properties.
\begin{enumerate}[i]
\item Let $\mathcal{C} = \{ \mathbf{A} \in S_n(\mathbb{R}) \colon
\mathbf{C}\bm{1}_{n}^{T} = \bm{0} \}$ be the set of symmetric
matrices with zero row sums and let $\mathcal{D} = \{ \Delta \in
S_n(\mathbb{R}) \colon \Delta_{\mathrm{dg}} = 0 \}$ be the set of
symmetric hollow matrices. Then $\kappa$ and $\tau$ are inverse
mappings between $\mathcal{C}$ and $\mathcal{D}$, i.e.,
\begin{gather}
\label{eq:55}
\mathbf{A} \in \mathcal{C}
\Longrightarrow \Delta = \kappa(\mathbf{A}) \in \mathcal{D}, \,\,
\mathbf{A} = \tau(\Delta) \\
\Delta \in \mathcal{D} \Longrightarrow \mathbf{A} = \tau(\Delta)
\in \mathcal{C}, \,\, \Delta = \kappa(\mathbf{A})
\end{gather}
\item $\kappa(\mathbf{J}) = 0$. More generally,
$\kappa(\bm{a}\bm{1}^{T}) = \kappa(\bm{1}\bm{b}^{T}) = 0$
for any vector $\bm{a}$, $\bm{b}$.
\item Let $\tilde{\mathbf{X}}$ be the double centering of
$\mathbf{X}$, i.e.,
\begin{equation}
\label{eq:71}
\tilde{\mathbf{X}} = \Bigl(\mathbf{I} - \frac{\mathbf{J}}{n}\Bigr)\mathbf{X} \Bigl(\mathbf{I} - \frac{\mathbf{J}}{n}\Bigr)
\end{equation}
Then $\kappa(\tilde{\mathbf{X}}) = \kappa(\mathbf{X})$.
\end{enumerate}
\end{proposition}
Part (i) of Proposition \ref{prop:16} is from
\citep{critchley88:_certain_linear_mappin}. Part (ii) and (iii)
follow directly from the definition of the $\kappa$ transform.
\begin{proposition}
\label{prop:18}
Let $\mathbf{A} \in S_n(\mathbb{R})$ be a positive semidefinite
matrix. Then $\Delta = \kappa(\mathbf{A})$ is EDM-2.
\end{proposition}
\begin{proof}
The double centering $\tilde{\mathbf{A}}$ of $\mathbf{A}$ is a
matrix in $\mathcal{C}$. By Proposition
\ref{prop:16}, $\Delta = \kappa(\mathbf{A}) =
\kappa(\tilde{\mathbf{A}})$ and $\tilde{\mathbf{A}} =
\tau(\Delta)$. Now $\mathbf{A} \succeq 0$ implies
$\tilde{\mathbf{A}} \succeq 0$. By Schoenberg's criterion, $\Delta =
\kappa(\tilde{\mathbf{A}})$ is EDM-2.
\end{proof}
%
\section{Matrix analysis}
\label{sec:matrix-analysis}
We list here some results in matrix analysis that are useful within
the scope of this work.
%
Let $\mathbf{A} = (a_{ij})$ be an $n \times n$ matrix with real
entries $a_{ij}$. Denote by $R_i$ the sum $\sum_{j \not = i}{|a_{ij}|}$ of
off-diagonal elements in row $i$.
%
\begin{theorem}[\cite{gersgorin31:_uber_abgren_eigen_matrix}]
\label{thm:1}
Let $\mathbf{A}$ be an $n \times n$ matrix with off-diagonal row
sums $R_i$. Then the eigenvalues of $\mathbf{A}$ lie in the set
\begin{equation}
\label{eq:23}
\bigcup \{z \in \mathbb{C} \colon |z - a_{ii}| \leq R_i \}
\end{equation}
\end{theorem}
\begin{definition}
\label{def:4}
The matrix $\mathbf{A}$ is said to be diagonally dominant if
$|a_{ii}| \geq R_i$ for all $i$ and strictly diagonally dominant if
$|a_{ii}| > R_i$ for all $i$.
\end{definition}
If $\mathbf{A}$ is diagonally dominant, then by Ger\u{s}gorin's
circle theorem, the eigenvalues of $\mathbf{A}$ have nonnegative real
parts. If $\mathbf{A}$ is strictly diagonally dominant, then the
eigenvalues of $\mathbf{A}$ has positive real parts.
%
\begin{definition}
\label{def:8}
Let $Z_n \subset M_{n}(\mathbb{R})$ be the set of matrices with
non-positive off-diagonal entries, i.e.,
\begin{equation}
\label{eq:24}
Z_n = \{ \mathbf{A} = (a_{ij}) \in M_{n}(\mathbb{R}) \colon a_{ij}
\leq 0 \,\, \text{if $i \not = j$} \}
\end{equation}
A matrix $\mathbf{A} \in Z_n$ is called an $M$-matrix if $A$ is
positive stable, i.e., if the eigenvalues of $\mathbf{A}$ has
positive real parts.
\end{definition}
A relationship between $M$-matrices and non-negative matrices is given
by the following result \citep[\S 2.5]{horn94:_topic_in_matrix_analy}.
\begin{theorem}
\label{thm:2}
$\mathbf{A} \in Z_n$ is an $M$-matrix if and only if $\mathbf{A}$ is
not singular and $\mathbf{A}^{-1} \geq 0$.
\end{theorem}
%%% Local Variables:
%%% mode: latex
%%% TeX-master: "dissertation"
%%% End: