Skip to content

Commit

Permalink
Fix scipy deprecation warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
jarrodmillman committed Jan 3, 2020
1 parent 9b02800 commit ea19baa
Show file tree
Hide file tree
Showing 25 changed files with 1,028 additions and 659 deletions.
33 changes: 18 additions & 15 deletions networkx/algorithms/centrality/eigenvector.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,11 @@
import networkx as nx
from networkx.utils import not_implemented_for

__all__ = ['eigenvector_centrality', 'eigenvector_centrality_numpy']
__all__ = ["eigenvector_centrality", "eigenvector_centrality_numpy"]


@not_implemented_for('multigraph')
def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None,
weight=None):
@not_implemented_for("multigraph")
def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None):
r"""Compute the eigenvector centrality for the graph `G`.
Eigenvector centrality computes the centrality for a node based on the
Expand Down Expand Up @@ -104,13 +103,14 @@ def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None,
"""
if len(G) == 0:
raise nx.NetworkXPointlessConcept('cannot compute centrality for the'
' null graph')
raise nx.NetworkXPointlessConcept(
"cannot compute centrality for the null graph"
)
# If no initial vector is provided, start with the all-ones vector.
if nstart is None:
nstart = {v: 1 for v in G}
if all(v == 0 for v in nstart.values()):
raise nx.NetworkXError('initial vector cannot have all zero values')
raise nx.NetworkXError("initial vector cannot have all zero values")
# Normalize the initial vector so that each entry is in [0, 1]. This is
# guaranteed to never have a divide-by-zero error by the previous line.
nstart_sum = sum(nstart.values())
Expand Down Expand Up @@ -177,7 +177,7 @@ def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0):
--------
>>> G = nx.path_graph(4)
>>> centrality = nx.eigenvector_centrality_numpy(G)
>>> print([f"{node} {centrality[node]:0.2f}" for node in centrality])
>>> print([f"{node} {centrality[node]:0.2f}" for node in centrality])
['0 0.37', '1 0.60', '2 0.60', '3 0.37']
See Also
Expand Down Expand Up @@ -212,15 +212,18 @@ def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0):
Networks: An Introduction.
Oxford University Press, USA, 2010, pp. 169.
"""
import numpy as np
import scipy as sp
from scipy.sparse import linalg

if len(G) == 0:
raise nx.NetworkXPointlessConcept('cannot compute centrality for the'
' null graph')
M = nx.to_scipy_sparse_matrix(G, nodelist=list(G), weight=weight,
dtype=float)
eigenvalue, eigenvector = linalg.eigs(M.T, k=1, which='LR',
maxiter=max_iter, tol=tol)
raise nx.NetworkXPointlessConcept(
"cannot compute centrality for the null graph"
)
M = nx.to_scipy_sparse_matrix(G, nodelist=list(G), weight=weight, dtype=float)
eigenvalue, eigenvector = linalg.eigs(
M.T, k=1, which="LR", maxiter=max_iter, tol=tol
)
largest = eigenvector.flatten().real
norm = sp.sign(largest.sum()) * sp.linalg.norm(largest)
norm = np.sign(largest.sum()) * sp.linalg.norm(largest)
return dict(zip(G, largest / norm))
111 changes: 86 additions & 25 deletions networkx/algorithms/centrality/subgraph_alg.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,16 @@
import networkx as nx
from networkx.utils import not_implemented_for

__all__ = ['subgraph_centrality_exp',
'subgraph_centrality',
'communicability_betweenness_centrality',
'estrada_index'
]
__all__ = [
"subgraph_centrality_exp",
"subgraph_centrality",
"communicability_betweenness_centrality",
"estrada_index",
]


@not_implemented_for('directed')
@not_implemented_for('multigraph')
@not_implemented_for("directed")
@not_implemented_for("multigraph")
def subgraph_centrality_exp(G):
r"""Returns the subgraph centrality for each node of G.
Expand Down Expand Up @@ -61,13 +62,29 @@ def subgraph_centrality_exp(G):
Examples
--------
(Example from [1]_)
>>> G = nx.Graph([(1,2),(1,5),(1,8),(2,3),(2,8),(3,4),(3,6),(4,5),(4,7),(5,6),(6,7),(7,8)])
>>> G = nx.Graph(
... [
... (1, 2),
... (1, 5),
... (1, 8),
... (2, 3),
... (2, 8),
... (3, 4),
... (3, 6),
... (4, 5),
... (4, 7),
... (5, 6),
... (6, 7),
... (7, 8),
... ]
... )
>>> sc = nx.subgraph_centrality_exp(G)
>>> print(['%s %0.2f'%(node,sc[node]) for node in sorted(sc)])
>>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
"""
# alternative implementation that calculates the matrix exponential
import scipy.linalg

nodelist = list(G) # ordering of nodes in matrix
A = nx.to_numpy_array(G, nodelist)
# convert to 0-1 matrix
Expand All @@ -78,8 +95,8 @@ def subgraph_centrality_exp(G):
return sc


@not_implemented_for('directed')
@not_implemented_for('multigraph')
@not_implemented_for("directed")
@not_implemented_for("multigraph")
def subgraph_centrality(G):
r"""Returns subgraph centrality for each node in G.
Expand Down Expand Up @@ -125,9 +142,24 @@ def subgraph_centrality(G):
Examples
--------
(Example from [1]_)
>>> G = nx.Graph([(1,2),(1,5),(1,8),(2,3),(2,8),(3,4),(3,6),(4,5),(4,7),(5,6),(6,7),(7,8)])
>>> G = nx.Graph(
... [
... (1, 2),
... (1, 5),
... (1, 8),
... (2, 3),
... (2, 8),
... (3, 4),
... (3, 6),
... (4, 5),
... (4, 7),
... (5, 6),
... (6, 7),
... (7, 8),
... ]
... )
>>> sc = nx.subgraph_centrality(G)
>>> print(['%s %0.2f'%(node,sc[node]) for node in sorted(sc)])
>>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
References
Expand All @@ -138,23 +170,24 @@ def subgraph_centrality(G):
https://arxiv.org/abs/cond-mat/0504730
"""
import numpy
import numpy as np
import numpy.linalg

nodelist = list(G) # ordering of nodes in matrix
A = nx.to_numpy_matrix(G, nodelist)
# convert to 0-1 matrix
A[A != 0.0] = 1
w, v = numpy.linalg.eigh(A.A)
vsquare = numpy.array(v)**2
expw = numpy.exp(w)
xg = numpy.dot(vsquare, expw)
vsquare = np.array(v) ** 2
expw = np.exp(w)
xg = np.dot(vsquare, expw)
# convert vector dictionary keyed by node
sc = dict(zip(nodelist, map(float, xg)))
return sc


@not_implemented_for('directed')
@not_implemented_for('multigraph')
@not_implemented_for("directed")
@not_implemented_for("multigraph")
def communicability_betweenness_centrality(G, normalized=True):
r"""Returns subgraph communicability for all pairs of nodes in G.
Expand Down Expand Up @@ -215,11 +248,25 @@ def communicability_betweenness_centrality(G, normalized=True):
Examples
--------
>>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)])
>>> G = nx.Graph(
... [
... (0, 1),
... (1, 2),
... (1, 5),
... (5, 4),
... (2, 4),
... (2, 3),
... (4, 3),
... (3, 6),
... ]
... )
>>> cbc = nx.communicability_betweenness_centrality(G)
>>> print([f"{node} {cbc[node]:0.2f}" for node in sorted(cbc)])
['0 0.03', '1 0.45', '2 0.51', '3 0.45', '4 0.40', '5 0.19', '6 0.03']
"""
import scipy
import numpy as np
import scipy.linalg

nodelist = list(G) # ordering of nodes in matrix
n = len(nodelist)
A = nx.to_numpy_matrix(G, nodelist)
Expand All @@ -239,7 +286,7 @@ def communicability_betweenness_centrality(G, normalized=True):
# sum with row/col of node v and diag set to zero
B[i, :] = 0
B[:, i] = 0
B -= scipy.diag(scipy.diag(B))
B -= np.diag(np.diag(B))
cbc[v] = float(B.sum())
# put row and col back
A[i, :] = row
Expand All @@ -256,7 +303,7 @@ def _rescale(cbc, normalized):
if order <= 2:
scale = None
else:
scale = 1.0 / ((order - 1.0)**2 - (order - 1.0))
scale = 1.0 / ((order - 1.0) ** 2 - (order - 1.0))
if scale is not None:
for v in cbc:
cbc[v] *= scale
Expand Down Expand Up @@ -303,7 +350,21 @@ def estrada_index(G):
Examples
--------
>>> G=nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)])
>>> ei=nx.estrada_index(G)
>>> G = nx.Graph(
... [
... (0, 1),
... (1, 2),
... (1, 5),
... (5, 4),
... (2, 4),
... (2, 3),
... (4, 3),
... (3, 6),
... ]
... )
>>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)])
>>> ei = nx.estrada_index(G)
>>> print(f"{ei:0.5}")
20.55
"""
return sum(subgraph_centrality(G).values())
28 changes: 13 additions & 15 deletions networkx/algorithms/link_analysis/hits_alg.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"""
import networkx as nx

__all__ = ['hits', 'hits_numpy', 'hits_scipy', 'authority_matrix', 'hub_matrix']
__all__ = ["hits", "hits_numpy", "hits_scipy", "authority_matrix", "hub_matrix"]


def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
Expand Down Expand Up @@ -44,8 +44,8 @@ def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
Examples
--------
>>> G=nx.path_graph(4)
>>> h,a=nx.hits(G)
>>> G = nx.path_graph(4)
>>> h, a = nx.hits(G)
Notes
-----
Expand Down Expand Up @@ -90,11 +90,11 @@ def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
# doing a left multiply a^T=hlast^T*G
for n in h:
for nbr in G[n]:
a[nbr] += hlast[n] * G[n][nbr].get('weight', 1)
a[nbr] += hlast[n] * G[n][nbr].get("weight", 1)
# now multiply h=Ga
for n in h:
for nbr in G[n]:
h[n] += a[nbr] * G[n][nbr].get('weight', 1)
h[n] += a[nbr] * G[n][nbr].get("weight", 1)
# normalize vector
s = 1.0 / max(h.values())
for n in h:
Expand Down Expand Up @@ -154,8 +154,8 @@ def hits_numpy(G, normalized=True):
Examples
--------
>>> G=nx.path_graph(4)
>>> h,a=nx.hits(G)
>>> G = nx.path_graph(4)
>>> h, a = nx.hits(G)
Notes
-----
Expand All @@ -179,8 +179,7 @@ def hits_numpy(G, normalized=True):
try:
import numpy as np
except ImportError:
raise ImportError(
"hits_numpy() requires NumPy: http://scipy.org/")
raise ImportError("hits_numpy() requires NumPy: http://scipy.org/")
if len(G) == 0:
return {}, {}
H = nx.hub_matrix(G, list(G))
Expand Down Expand Up @@ -234,8 +233,8 @@ def hits_scipy(G, max_iter=100, tol=1.0e-6, normalized=True):
Examples
--------
>>> G=nx.path_graph(4)
>>> h,a=nx.hits(G)
>>> G = nx.path_graph(4)
>>> h, a = nx.hits(G)
Notes
-----
Expand Down Expand Up @@ -272,22 +271,21 @@ def hits_scipy(G, max_iter=100, tol=1.0e-6, normalized=True):
import scipy.sparse
import numpy as np
except ImportError:
raise ImportError(
"hits_scipy() requires SciPy: http://scipy.org/")
raise ImportError("hits_scipy() requires SciPy: http://scipy.org/")
if len(G) == 0:
return {}, {}
M = nx.to_scipy_sparse_matrix(G, nodelist=list(G))
(n, m) = M.shape # should be square
A = M.T * M # authority matrix
x = scipy.ones((n, 1)) / n # initial guess
x = np.ones((n, 1)) / n # initial guess
# power iteration on authority matrix
i = 0
while True:
xlast = x
x = A * x
x = x / x.max()
# check convergence, l1 norm
err = scipy.absolute(x - xlast).sum()
err = np.absolute(x - xlast).sum()
if err < tol:
break
if i > max_iter:
Expand Down
Loading

0 comments on commit ea19baa

Please sign in to comment.