summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHarri Nieminen <moiman@posteo.net>2023-04-04 16:45:17 +0000
committerGitHub <noreply@github.com>2023-04-04 09:45:17 -0700
commit7297ae8a37dd3356b64d383cb0c55735a6364bcc (patch)
tree60cf106958ceaf2f111413965f476cffd922efdf
parentc55f87045edb25b033854f40eefab092ab9df6eb (diff)
downloadnetworkx-7297ae8a37dd3356b64d383cb0c55735a6364bcc.tar.gz
Fix typos (#6620)
-rw-r--r--doc/developer/nxeps/nxep-0002.rst10
-rw-r--r--doc/developer/nxeps/nxep-0004.rst4
-rw-r--r--doc/reference/glossary.rst2
-rw-r--r--examples/algorithms/plot_davis_club.py2
-rw-r--r--networkx/algorithms/centrality/group.py2
-rw-r--r--networkx/algorithms/coloring/greedy_coloring.py2
-rw-r--r--networkx/algorithms/community/lukes.py4
-rw-r--r--networkx/algorithms/connectivity/edge_augmentation.py2
-rw-r--r--networkx/algorithms/connectivity/edge_kcomponents.py4
-rw-r--r--networkx/algorithms/connectivity/kcomponents.py2
-rw-r--r--networkx/algorithms/cycles.py4
-rw-r--r--networkx/algorithms/distance_measures.py2
-rw-r--r--networkx/algorithms/isomorphism/tree_isomorphism.py2
-rw-r--r--networkx/algorithms/link_analysis/pagerank_alg.py2
-rw-r--r--networkx/algorithms/matching.py2
-rw-r--r--networkx/algorithms/operators/product.py2
-rw-r--r--networkx/algorithms/tests/test_simple_paths.py4
-rw-r--r--networkx/algorithms/threshold.py4
-rw-r--r--networkx/algorithms/tree/mst.py4
-rw-r--r--networkx/drawing/layout.py2
-rw-r--r--networkx/drawing/nx_latex.py10
-rw-r--r--networkx/drawing/tests/test_pylab.py2
-rw-r--r--networkx/generators/directed.py2
-rw-r--r--networkx/generators/tests/test_degree_seq.py6
-rw-r--r--networkx/linalg/attrmatrix.py2
-rw-r--r--networkx/readwrite/tests/test_gexf.py2
-rw-r--r--networkx/readwrite/tests/test_gml.py2
-rw-r--r--networkx/readwrite/tests/test_text.py8
-rw-r--r--networkx/readwrite/text.py6
29 files changed, 51 insertions, 51 deletions
diff --git a/doc/developer/nxeps/nxep-0002.rst b/doc/developer/nxeps/nxep-0002.rst
index 1904cadc..55c0617d 100644
--- a/doc/developer/nxeps/nxep-0002.rst
+++ b/doc/developer/nxeps/nxep-0002.rst
@@ -78,7 +78,7 @@ The main impact and the decision that needs to be taken in this NXEP is with
respect to the user facing API. By implementing this NXEP via subscripting NodeViews,
we may end up adding some ambiguity for users. As for example `G.nodes[x]`
will return an attribute dict but `G.nodes[0:5]` will return a list of first five nodes.
-This will be more ambigious with EdgeView as ``G.edges[0, 1]`` will return an
+This will be more ambiguous with EdgeView as ``G.edges[0, 1]`` will return an
attribute dictionary of the edge between 0 and 1 and ``G.edges[0:1]`` will return the first edge.
We need to find a way to counter this potential confusion.
The alternative proposal of a new slicing method is one possible solution.
@@ -117,7 +117,7 @@ add order and edge order based on adjacency storage.
On the computational front, if we create lists to allow slices, we use memory to store the lists.
This is something user would have anyway done with something like ``list(G.nodes(data=True))[0:10]``.
But we can do better with our slicing mechanisms.
-We should be able to avoid constucting the entire list simply to get the slices by internally
+We should be able to avoid constructing the entire list simply to get the slices by internally
using code like: ``indx=[n for i, n in enumerate(G.nodes(data=True)) if i in range(x.start, x.stop, s.step)]``
where x is the desired slice object.
@@ -138,11 +138,11 @@ The following code will be valid::
>>> G.edges[1:10]
>>> G.edges(data=True)[4:6]
-Prelimanary impelementation work is available at https://github.com/networkx/networkx/pull/4086
+Preliminary implementation work is available at https://github.com/networkx/networkx/pull/4086
Alternatively, to get rid of the ambiguity in slicing API with respect to
the dict views we can implement a new
-``slice`` method which leads to a less ambigious API.::
+``slice`` method which leads to a less ambiguous API.::
>>> G.nodes(data=True).slice[:10]
>>> G.nodes.slice[10:30]
@@ -206,7 +206,7 @@ The listed alternatives are not mutually exclusive.
...
NetworkXError: NodeView does not support slicing. Try list(G.nodes)[0:10].
-- Instead of changing the behavior of ``__getitem__`` we can impelment a new
+- Instead of changing the behavior of ``__getitem__`` we can implement a new
method, something like ``G.nodes.head(x)`` (insipired by pandas) which
returns the first x nodes.
This approach could be expanded to using a ``slice`` object directly but
diff --git a/doc/developer/nxeps/nxep-0004.rst b/doc/developer/nxeps/nxep-0004.rst
index 9a905e70..41bde67c 100644
--- a/doc/developer/nxeps/nxep-0004.rst
+++ b/doc/developer/nxeps/nxep-0004.rst
@@ -166,7 +166,7 @@ by `numpy.random.Generator.integers`.
Thus any code that uses `create_random_state` or `create_py_random_state` and
relies on the ``randint`` method of the returned rng would result in an
`AttributeError`.
-This can be addressed with a compatiblity class similar to the
+This can be addressed with a compatibility class similar to the
`networkx.utils.misc.PythonRandomInterface` class, which provides a compatibility
layer between `random` and `numpy.random.RandomState`.
@@ -210,7 +210,7 @@ potential approaches to supporting the new NumPy random interface:
- `scikit-learn/scikit-learn#14042 <sklearn14042>`_ is a higher-level discussion
that includes additional information about the design considerations and constraints
related to scikit-learn's ``random_state``.
-- There is also a releated `SLEP <slep011>`_.
+- There is also a related `SLEP <slep011>`_.
.. _sklearn16988: https://github.com/scikit-learn/scikit-learn/issues/16988
.. _sklearn14042: https://github.com/scikit-learn/scikit-learn/issues/14042
diff --git a/doc/reference/glossary.rst b/doc/reference/glossary.rst
index 097a23bc..7dd86c84 100644
--- a/doc/reference/glossary.rst
+++ b/doc/reference/glossary.rst
@@ -15,7 +15,7 @@ Glossary
with an edge attribute dictionary `(u, v, dict)`.
ebunch
- An iteratable container of edge tuples like a list, iterator,
+ An iterable container of edge tuples like a list, iterator,
or file.
edge attribute
diff --git a/examples/algorithms/plot_davis_club.py b/examples/algorithms/plot_davis_club.py
index 0029d55c..3bd37b6e 100644
--- a/examples/algorithms/plot_davis_club.py
+++ b/examples/algorithms/plot_davis_club.py
@@ -30,7 +30,7 @@ print("#Friends, Member")
for w in women:
print(f"{W.degree(w)} {w}")
-# project bipartite graph onto women nodes keeping number of co-occurence
+# project bipartite graph onto women nodes keeping number of co-occurrence
# the degree computed is weighted and counts the total number of shared contacts
W = bipartite.weighted_projected_graph(G, women)
print()
diff --git a/networkx/algorithms/centrality/group.py b/networkx/algorithms/centrality/group.py
index 9ca8f59b..0c82b4ce 100644
--- a/networkx/algorithms/centrality/group.py
+++ b/networkx/algorithms/centrality/group.py
@@ -414,7 +414,7 @@ def _dfbnb(G, k, DF_tree, max_GBC, root, D, max_group, nodes, greedy):
if len(DF_tree.nodes[root]["GM"]) == k and DF_tree.nodes[root]["GBC"] > max_GBC:
return DF_tree.nodes[root]["GBC"], DF_tree, DF_tree.nodes[root]["GM"]
# stopping condition - if the size of group members equal to k or there are less than
- # k - |GM| in the candidate list or the heuristic function plus the GBC is bellow the
+ # k - |GM| in the candidate list or the heuristic function plus the GBC is below the
# maximal GBC found then prune
if (
len(DF_tree.nodes[root]["GM"]) == k
diff --git a/networkx/algorithms/coloring/greedy_coloring.py b/networkx/algorithms/coloring/greedy_coloring.py
index 93bbab66..b078cb25 100644
--- a/networkx/algorithms/coloring/greedy_coloring.py
+++ b/networkx/algorithms/coloring/greedy_coloring.py
@@ -438,7 +438,7 @@ class _AdjEntry:
def _greedy_coloring_with_interchange(G, nodes):
- """Return a coloring for `orginal_graph` using interchange approach
+ """Return a coloring for `original_graph` using interchange approach
This procedure is an adaption of the algorithm described by [1]_,
and is an implementation of coloring with interchange. Please be
diff --git a/networkx/algorithms/community/lukes.py b/networkx/algorithms/community/lukes.py
index 90a26901..632dbd9b 100644
--- a/networkx/algorithms/community/lukes.py
+++ b/networkx/algorithms/community/lukes.py
@@ -145,14 +145,14 @@ def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None):
assert len(ccx) == 1
return ccx[0]
- def _concatenate_or_merge(partition_1, partition_2, x, i, ref_weigth):
+ def _concatenate_or_merge(partition_1, partition_2, x, i, ref_weight):
ccx = _pivot(partition_1, x)
cci = _pivot(partition_2, i)
merged_xi = ccx.union(cci)
# We first check if we can do the merge.
# If so, we do the actual calculations, otherwise we concatenate
- if _weight_of_cluster(frozenset(merged_xi)) <= ref_weigth:
+ if _weight_of_cluster(frozenset(merged_xi)) <= ref_weight:
cp1 = list(filter(lambda x: x != ccx, partition_1))
cp2 = list(filter(lambda x: x != cci, partition_2))
diff --git a/networkx/algorithms/connectivity/edge_augmentation.py b/networkx/algorithms/connectivity/edge_augmentation.py
index 3195df89..3c5d6b95 100644
--- a/networkx/algorithms/connectivity/edge_augmentation.py
+++ b/networkx/algorithms/connectivity/edge_augmentation.py
@@ -10,7 +10,7 @@ k-edge-augmentation exists.
See Also
--------
:mod:`edge_kcomponents` : algorithms for finding k-edge-connected components
-:mod:`connectivity` : algorithms for determening edge connectivity.
+:mod:`connectivity` : algorithms for determining edge connectivity.
"""
import itertools as it
import math
diff --git a/networkx/algorithms/connectivity/edge_kcomponents.py b/networkx/algorithms/connectivity/edge_kcomponents.py
index a5d6977f..fc85e264 100644
--- a/networkx/algorithms/connectivity/edge_kcomponents.py
+++ b/networkx/algorithms/connectivity/edge_kcomponents.py
@@ -239,7 +239,7 @@ def bridge_components(G):
class EdgeComponentAuxGraph:
r"""A simple algorithm to find all k-edge-connected components in a graph.
- Constructing the AuxillaryGraph (which may take some time) allows for the
+ Constructing the auxiliary graph (which may take some time) allows for the
k-edge-ccs to be found in linear time for arbitrary k.
Notes
@@ -288,7 +288,7 @@ class EdgeComponentAuxGraph:
>>> sorted(map(sorted, aux_graph.k_edge_components(k=4)))
[[0], [1], [2], [3], [4], [5], [6], [7]]
- The auxiliary graph is primarilly used for k-edge-ccs but it
+ The auxiliary graph is primarily used for k-edge-ccs but it
can also speed up the queries of k-edge-subgraphs by refining the
search space.
diff --git a/networkx/algorithms/connectivity/kcomponents.py b/networkx/algorithms/connectivity/kcomponents.py
index 39adca58..5c7f441b 100644
--- a/networkx/algorithms/connectivity/kcomponents.py
+++ b/networkx/algorithms/connectivity/kcomponents.py
@@ -104,7 +104,7 @@ def k_components(G, flow_func=None):
"""
# Dictionary with connectivity level (k) as keys and a list of
# sets of nodes that form a k-component as values. Note that
- # k-compoents can overlap (but only k - 1 nodes).
+ # k-components can overlap (but only k - 1 nodes).
k_components = defaultdict(list)
# Define default flow function
if flow_func is None:
diff --git a/networkx/algorithms/cycles.py b/networkx/algorithms/cycles.py
index 21138d8c..6c5f7d48 100644
--- a/networkx/algorithms/cycles.py
+++ b/networkx/algorithms/cycles.py
@@ -577,7 +577,7 @@ def chordless_cycles(G, length_bound=None):
return
# Nodes with loops cannot belong to longer cycles. Let's delete them here.
- # also, we implicitly reduce the multiplicty of edges down to 1 in the case
+ # also, we implicitly reduce the multiplicity of edges down to 1 in the case
# of multiedges.
if directed:
F = nx.DiGraph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
@@ -599,7 +599,7 @@ def chordless_cycles(G, length_bound=None):
# present, then we remove both from F.
#
# In directed graphs, we need to consider both directions that edges can
- # take, so iterate over all edges (u, v) and posibly (v, u). In undirected
+ # take, so iterate over all edges (u, v) and possibly (v, u). In undirected
# graphs, we need to be a little careful to only consider every edge once,
# so we use a "visited" set to emulate node-order comparisons.
diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py
index db576eb2..cfb0fdc3 100644
--- a/networkx/algorithms/distance_measures.py
+++ b/networkx/algorithms/distance_measures.py
@@ -729,7 +729,7 @@ def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True):
# Replace with collapsing topology or approximated zero?
# Using determinants to compute the effective resistance is more memory
- # efficient than directly calculating the psuedo-inverse
+ # efficient than directly calculating the pseudo-inverse
L = nx.laplacian_matrix(G, node_list, weight=weight).asformat("csc")
indices = list(range(L.shape[0]))
# w/ nodeA removed
diff --git a/networkx/algorithms/isomorphism/tree_isomorphism.py b/networkx/algorithms/isomorphism/tree_isomorphism.py
index 176e8bd3..372224ca 100644
--- a/networkx/algorithms/isomorphism/tree_isomorphism.py
+++ b/networkx/algorithms/isomorphism/tree_isomorphism.py
@@ -168,7 +168,7 @@ def rooted_tree_isomorphism(t1, root1, t2, root2):
# nothing to do on last level so start on h-1
# also nothing to do for our fake level 0, so skip that
for i in range(h - 1, 0, -1):
- # update the ordered_labels and ordered_childen
+ # update the ordered_labels and ordered_children
# for any children
for v in L[i]:
# nothing to do if no children
diff --git a/networkx/algorithms/link_analysis/pagerank_alg.py b/networkx/algorithms/link_analysis/pagerank_alg.py
index 1914305f..6aab0849 100644
--- a/networkx/algorithms/link_analysis/pagerank_alg.py
+++ b/networkx/algorithms/link_analysis/pagerank_alg.py
@@ -191,7 +191,7 @@ def google_matrix(
The "personalization vector" consisting of a dictionary with a
key some subset of graph nodes and personalization value each of those.
At least one personalization value must be non-zero.
- If not specifed, a nodes personalization value will be zero.
+ If not specified, a nodes personalization value will be zero.
By default, a uniform distribution is used.
nodelist : list, optional
diff --git a/networkx/algorithms/matching.py b/networkx/algorithms/matching.py
index 03497ad8..1a94f206 100644
--- a/networkx/algorithms/matching.py
+++ b/networkx/algorithms/matching.py
@@ -1037,7 +1037,7 @@ def max_weight_matching(G, maxcardinality=False, weight="weight"):
if deltatype == -1:
# No further improvement possible; max-cardinality optimum
# reached. Do a final delta update to make the optimum
- # verifyable.
+ # verifiable.
assert maxcardinality
deltatype = 1
delta = max(0, min(dualvar.values()))
diff --git a/networkx/algorithms/operators/product.py b/networkx/algorithms/operators/product.py
index a62b1df1..e89300f2 100644
--- a/networkx/algorithms/operators/product.py
+++ b/networkx/algorithms/operators/product.py
@@ -21,7 +21,7 @@ def _dict_product(d1, d2):
return {k: (d1.get(k), d2.get(k)) for k in set(d1) | set(d2)}
-# Generators for producting graph products
+# Generators for producing graph products
def _node_product(G, H):
for u, v in product(G, H):
yield ((u, v), _dict_product(G.nodes[u], H.nodes[v]))
diff --git a/networkx/algorithms/tests/test_simple_paths.py b/networkx/algorithms/tests/test_simple_paths.py
index 300dbc81..af15861a 100644
--- a/networkx/algorithms/tests/test_simple_paths.py
+++ b/networkx/algorithms/tests/test_simple_paths.py
@@ -464,7 +464,7 @@ def test_shortest_simple_paths_directed():
assert list(paths) == [[0, 1, 2, 3]]
-def test_shortest_simple_paths_directed_with_weight_fucntion():
+def test_shortest_simple_paths_directed_with_weight_function():
def cost(u, v, x):
return 1
@@ -477,7 +477,7 @@ def test_shortest_simple_paths_directed_with_weight_fucntion():
] == sorted(len(path) for path in nx.all_simple_paths(G, 1, 12))
-def test_shortest_simple_paths_with_weight_fucntion():
+def test_shortest_simple_paths_with_weight_function():
def cost(u, v, x):
return 1
diff --git a/networkx/algorithms/threshold.py b/networkx/algorithms/threshold.py
index 5370d987..0d37c906 100644
--- a/networkx/algorithms/threshold.py
+++ b/networkx/algorithms/threshold.py
@@ -674,7 +674,7 @@ def betweenness_sequence(creation_sequence, normalized=True):
dlast = 0.0 # betweenness of last d
for i, c in enumerate(cs):
if c == "d": # cs[i]=="d":
- # betweennees = amt shared with eariler d's and i's
+ # betweennees = amt shared with earlier d's and i's
# + new isolated nodes covered
# + new paths to all previous nodes
b = dlast + (irun - 1) * irun / dr + 2 * irun * (i - drun - irun) / dr
@@ -825,7 +825,7 @@ def random_threshold_sequence(n, p, seed=None):
"""
Create a random threshold sequence of size n.
A creation sequence is built by randomly choosing d's with
- probabiliy p and i's with probability 1-p.
+ probability p and i's with probability 1-p.
s=nx.random_threshold_sequence(10,0.5)
diff --git a/networkx/algorithms/tree/mst.py b/networkx/algorithms/tree/mst.py
index 8ec693d6..d36f411b 100644
--- a/networkx/algorithms/tree/mst.py
+++ b/networkx/algorithms/tree/mst.py
@@ -613,7 +613,7 @@ def partition_spanning_tree(
"""
Find a spanning tree while respecting a partition of edges.
- Edges can be flagged as either `INLCUDED` which are required to be in the
+ Edges can be flagged as either `INCLUDED` which are required to be in the
returned tree, `EXCLUDED`, which cannot be in the returned tree and `OPEN`.
This is used in the SpanningTreeIterator to create new partitions following
@@ -742,7 +742,7 @@ def random_spanning_tree(G, weight=None, *, multiplicative=True, seed=None):
is based on the product of edge weights, and if ``multiplicative=False``
it is based on the sum of the edge weight. However, since it is
easier to determine the total weight of all spanning trees for the
- multiplicative verison, that is significantly faster and should be used if
+ multiplicative version, that is significantly faster and should be used if
possible. Additionally, setting `weight` to `None` will cause a spanning tree
to be selected with uniform probability.
diff --git a/networkx/drawing/layout.py b/networkx/drawing/layout.py
index 5608c73b..6200b3e3 100644
--- a/networkx/drawing/layout.py
+++ b/networkx/drawing/layout.py
@@ -1151,7 +1151,7 @@ def arf_layout(
References
.. [1] "Self-Organization Applied to Dynamic Network Layout", M. Geipel,
- International Jounral of Modern Physics C, 2007, Vol 18, No 10, pp. 1537-1549.
+ International Journal of Modern Physics C, 2007, Vol 18, No 10, pp. 1537-1549.
https://doi.org/10.1142/S0129183107011558 https://arxiv.org/abs/0704.1748
Returns
diff --git a/networkx/drawing/nx_latex.py b/networkx/drawing/nx_latex.py
index 359d0ed1..6312f715 100644
--- a/networkx/drawing/nx_latex.py
+++ b/networkx/drawing/nx_latex.py
@@ -8,7 +8,7 @@ Usually, you will want the drawing to appear in a figure environment so
you use ``to_latex(G, caption="A caption")``. If you want the raw
drawing commands without a figure environment use :func:`to_latex_raw`.
And if you want to write to a file instead of just returning the latex
-code as a string, use ``write_latex(G, "filname.tex", caption="A caption")``.
+code as a string, use ``write_latex(G, "filename.tex", caption="A caption")``.
To construct a figure with subfigures for each graph to be shown, provide
``to_latex`` or ``write_latex`` a list of graphs, a list of subcaptions,
@@ -334,7 +334,7 @@ def to_latex(
The TikZ drawing utility in LaTeX is used to draw the graph(s).
If `Gbunch` is a graph, it is drawn in a figure environment.
- If `Gbunch` is an iterable of graphs, each is drawn in a subfigure envionment
+ If `Gbunch` is an iterable of graphs, each is drawn in a subfigure environment
within a single figure environment.
If `as_document` is True, the figure is wrapped inside a document environment
@@ -397,7 +397,7 @@ def to_latex(
n_rows : int
The number of rows of subfigures to arrange for multiple graphs
as_document : bool
- Whether to wrap the latex code in a document envionment for compiling
+ Whether to wrap the latex code in a document environment for compiling
document_wrapper : formatted text string with variable ``content``.
This text is called to evaluate the content embedded in a document
environment with a preamble setting up TikZ.
@@ -494,7 +494,7 @@ def write_latex(Gbunch, path, **options):
Gbunch : NetworkX graph or iterable of NetworkX graphs
If Gbunch is a graph, it is drawn in a figure environment.
If Gbunch is an iterable of graphs, each is drawn in a subfigure
- envionment within a single figure environment.
+ environment within a single figure environment.
path : filename
Filename or file handle to write to
options : dict
@@ -550,7 +550,7 @@ def write_latex(Gbunch, path, **options):
n_rows : int
The number of rows of subfigures to arrange for multiple graphs
as_document : bool
- Whether to wrap the latex code in a document envionment for compiling
+ Whether to wrap the latex code in a document environment for compiling
document_wrapper : formatted text string with variable ``content``.
This text is called to evaluate the content embedded in a document
environment with a preamble setting up the TikZ syntax.
diff --git a/networkx/drawing/tests/test_pylab.py b/networkx/drawing/tests/test_pylab.py
index cef2702d..668a6270 100644
--- a/networkx/drawing/tests/test_pylab.py
+++ b/networkx/drawing/tests/test_pylab.py
@@ -197,7 +197,7 @@ def test_more_edge_colors_than_num_edges_directed():
assert mpl.colors.same_color(fap.get_edgecolor(), expected)
-def test_edge_color_string_with_gloabl_alpha_undirected():
+def test_edge_color_string_with_global_alpha_undirected():
edge_collection = nx.draw_networkx_edges(
barbell,
pos=nx.random_layout(barbell),
diff --git a/networkx/generators/directed.py b/networkx/generators/directed.py
index 21afab40..af3514d6 100644
--- a/networkx/generators/directed.py
+++ b/networkx/generators/directed.py
@@ -94,7 +94,7 @@ def gnr_graph(n, p, create_using=None, seed=None):
The GNR graph is built by adding nodes one at a time with a link to one
previously added node. The previous target node is chosen uniformly at
- random. With probabiliy `p` the link is instead "redirected" to the
+ random. With probability `p` the link is instead "redirected" to the
successor node of the target.
The graph is always a (directed) tree.
diff --git a/networkx/generators/tests/test_degree_seq.py b/networkx/generators/tests/test_degree_seq.py
index 93f82f76..39ed59a5 100644
--- a/networkx/generators/tests/test_degree_seq.py
+++ b/networkx/generators/tests/test_degree_seq.py
@@ -87,19 +87,19 @@ class TestConfigurationModel:
nx.configuration_model([1, 2])
-def test_directed_configuation_raise_unequal():
+def test_directed_configuration_raise_unequal():
with pytest.raises(nx.NetworkXError):
zin = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1]
zout = [5, 3, 3, 3, 3, 2, 2, 2, 1, 2]
nx.directed_configuration_model(zin, zout)
-def test_directed_configuation_model():
+def test_directed_configuration_model():
G = nx.directed_configuration_model([], [], seed=0)
assert len(G) == 0
-def test_simple_directed_configuation_model():
+def test_simple_directed_configuration_model():
G = nx.directed_configuration_model([1, 1], [1, 1], seed=0)
assert len(G) == 2
diff --git a/networkx/linalg/attrmatrix.py b/networkx/linalg/attrmatrix.py
index ace7144e..d2029996 100644
--- a/networkx/linalg/attrmatrix.py
+++ b/networkx/linalg/attrmatrix.py
@@ -326,7 +326,7 @@ def attr_sparse_matrix(
edge_attr : str, optional
Each element of the matrix represents a running total of the
specified edge attribute for edges whose node attributes correspond
- to the rows/cols of the matirx. The attribute must be present for
+ to the rows/cols of the matrix. The attribute must be present for
all edges in the graph. If no attribute is specified, then we
just count the number of edges whose node attributes correspond
to the matrix element.
diff --git a/networkx/readwrite/tests/test_gexf.py b/networkx/readwrite/tests/test_gexf.py
index 95c29f90..6ff14c99 100644
--- a/networkx/readwrite/tests/test_gexf.py
+++ b/networkx/readwrite/tests/test_gexf.py
@@ -462,7 +462,7 @@ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2">
G = nx.MultiGraph()
G.add_node(0, label="1", color="green")
G.add_node(1, label="2", color="green")
- G.add_edge(0, 1, id="0", wight=3, type="undirected", start=0, end=1)
+ G.add_edge(0, 1, id="0", weight=3, type="undirected", start=0, end=1)
G.add_edge(0, 1, id="1", label="foo", start=0, end=1)
G.add_edge(0, 1)
fh = io.BytesIO()
diff --git a/networkx/readwrite/tests/test_gml.py b/networkx/readwrite/tests/test_gml.py
index 57823694..0d0bebf6 100644
--- a/networkx/readwrite/tests/test_gml.py
+++ b/networkx/readwrite/tests/test_gml.py
@@ -544,7 +544,7 @@ graph
"directed 1 multigraph 1 ]"
)
- # Tests for string convertable alphanumeric id and label values
+ # Tests for string convertible alphanumeric id and label values
nx.parse_gml("graph [edge [ source a target a ] node [ id a label b ] ]")
nx.parse_gml(
"graph [ node [ id n42 label 0 ] node [ id x43 label 1 ]"
diff --git a/networkx/readwrite/tests/test_text.py b/networkx/readwrite/tests/test_text.py
index 62fac025..539927e4 100644
--- a/networkx/readwrite/tests/test_text.py
+++ b/networkx/readwrite/tests/test_text.py
@@ -294,7 +294,7 @@ def test_forest_str_errors():
def test_forest_str_overspecified_sources():
"""
- When sources are directly specified, we wont be able to determine when we
+ When sources are directly specified, we won't be able to determine when we
are in the last component, so there will always be a trailing, leftmost
pipe.
"""
@@ -353,7 +353,7 @@ def test_forest_str_overspecified_sources():
def test_write_network_text_iterative_add_directed_edges():
"""
- Walk through the cases going from a diconnected to fully connected graph
+ Walk through the cases going from a disconnected to fully connected graph
"""
graph = nx.DiGraph()
graph.add_nodes_from([1, 2, 3, 4])
@@ -501,7 +501,7 @@ def test_write_network_text_iterative_add_directed_edges():
def test_write_network_text_iterative_add_undirected_edges():
"""
- Walk through the cases going from a diconnected to fully connected graph
+ Walk through the cases going from a disconnected to fully connected graph
"""
graph = nx.Graph()
graph.add_nodes_from([1, 2, 3, 4])
@@ -606,7 +606,7 @@ def test_write_network_text_iterative_add_undirected_edges():
def test_write_network_text_iterative_add_random_directed_edges():
"""
- Walk through the cases going from a diconnected to fully connected graph
+ Walk through the cases going from a disconnected to fully connected graph
"""
rng = random.Random(724466096)
diff --git a/networkx/readwrite/text.py b/networkx/readwrite/text.py
index 1d721b11..09f23e1b 100644
--- a/networkx/readwrite/text.py
+++ b/networkx/readwrite/text.py
@@ -108,7 +108,7 @@ def generate_network_text(
with_labels : bool | str
If True will use the "label" attribute of a node to display if it
exists otherwise it will use the node value itself. If given as a
- string, then that attribte name will be used instead of "label".
+ string, then that attribute name will be used instead of "label".
Defaults to True.
sources : List
@@ -317,7 +317,7 @@ def write_network_text(
with_labels : bool | str
If True will use the "label" attribute of a node to display if it
exists otherwise it will use the node value itself. If given as a
- string, then that attribte name will be used instead of "label".
+ string, then that attribute name will be used instead of "label".
Defaults to True.
sources : List
@@ -332,7 +332,7 @@ def write_network_text(
If True only ASCII characters are used to construct the visualization
end : string
- The line ending characater
+ The line ending character
Examples
--------