Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: CI
on:
push:
pull_request:

jobs:
test:
name: test ${{ matrix.python_version }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python_version:
- "3.10"
- "3.9"
- "3.8"
- "3.7"
steps:
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python_version }}
- uses: actions/checkout@v2
- name: Install tox-gh
run: python -m pip install tox-gh
- name: Setup test suite
run: tox -vv --notest
- name: Run test suite
run: tox --skip-pkg-install
16 changes: 0 additions & 16 deletions .travis.yml

This file was deleted.

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

Author: Roan LaPlante <rlaplant@nmr.mgh.harvard.edu>

Tested against python 2.7 and 3.9.
Tested against python 3.7+.

## Copyright information

Expand Down
11 changes: 5 additions & 6 deletions bct/algorithms/clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def clustering_coef_wu_sign(W, coef_type='default'):
'''
Returns the weighted clustering coefficient generalized or separated
for positive and negative weights.

Three Algorithms are supported; herefore referred to as default, zhang,
and costantini.

Expand Down Expand Up @@ -384,9 +384,8 @@ def consensus_und(D, tau, reps=1000, seed=None):
reps : int
number of times the clustering algorithm is reapplied. default value
is 1000.
seed : hashable, optional
If None (default), use the np.random's global random state to generate random numbers.
Otherwise, use a new np.random.RandomState instance seeded with the given value.
seed : None, int, or numpy.random.Generator
Seed (or RNG itself) used to generate random numbers.

Returns
-------
Expand Down Expand Up @@ -485,7 +484,7 @@ def get_components(A, no_depend=False):
if not np.all(A == A.T): # ensure matrix is undirected
raise BCTParamError('get_components can only be computed for undirected'
' matrices. If your matrix is noisy, correct it with np.around')

A = binarize(A, copy=True)
n = len(A)
np.fill_diagonal(A, 1)
Expand All @@ -503,7 +502,7 @@ def get_components(A, no_depend=False):
temp.append(item)
union_sets = temp

comps = np.array([i+1 for v in range(n) for i in
comps = np.array([i+1 for v in range(n) for i in
range(len(union_sets)) if v in union_sets[i]])
comp_sizes = np.array([len(s) for s in union_sets])

Expand Down
29 changes: 14 additions & 15 deletions bct/algorithms/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,8 @@ def assortativity_wei(CIJ, flag=0):


def core_periphery_dir(W, gamma=1, C0=None, seed=None):
'''
The optimal core/periphery subdivision is a partition of the network
'''
The optimal core/periphery subdivision is a partition of the network
into two nonoverlapping groups of nodes, a core group and a periphery
group. The number of core-group edges is maximized, and the number of
within periphery edges is minimized.
Expand All @@ -166,16 +166,15 @@ def core_periphery_dir(W, gamma=1, C0=None, seed=None):
0 < gamma < 1 detects large core, small periphery
C0 : NxN np.ndarray
Initial core structure
seed : hashable, optional
If None (default), use the np.random's global random state to generate random numbers.
Otherwise, use a new np.random.RandomState instance seeded with the given value.
seed : None, int, or numpy.random.Generator
Seed (or RNG itself) used to generate random numbers.
'''
rng = get_rng(seed)
n = len(W)
np.fill_diagonal(W, 0)

if C0 == None:
C = rng.randint(2, size=(n,))
C = rng.integers(2, size=(n,))
else:
C = C0.copy()

Expand All @@ -195,39 +194,39 @@ def core_periphery_dir(W, gamma=1, C0=None, seed=None):
flag = True
it = 0
while flag:
it += 1
it += 1
if it > 100:
raise BCTParamError('Infinite Loop aborted')

flag = False
#initial node indices
ixes = np.arange(n)
ixes = np.arange(n)

Ct = C.copy()
while len(ixes) > 0:
Qt = np.zeros((n,))
ctix, = np.where(Ct)
nctix, = np.where(np.logical_not(Ct))
q0 = (np.sum(B[np.ix_(ctix, ctix)]) -
q0 = (np.sum(B[np.ix_(ctix, ctix)]) -
np.sum(B[np.ix_(nctix, nctix)]))
Qt[ctix] = q0 - 2 * np.sum(B[ctix, :], axis=1)
Qt[nctix] = q0 + 2 * np.sum(B[nctix, :], axis=1)

max_Qt = np.max(Qt[ixes])
u, = np.where(np.abs(Qt[ixes]-max_Qt) < 1e-10)
#tunourn
u = u[rng.randint(len(u))]
u = u[rng.integers(len(u))]
Ct[ixes[u]] = np.logical_not(Ct[ixes[u]])
#casga

ixes = np.delete(ixes, u)

if max_Qt - q > 1e-10:
flag = True
C = Ct.copy()
cix, = np.where(C)
ncix, = np.where(np.logical_not(C))
q = (np.sum(B[np.ix_(cix, cix)]) -
q = (np.sum(B[np.ix_(cix, cix)]) -
np.sum(B[np.ix_(ncix, ncix)]))

cix, = np.where(C)
Expand Down Expand Up @@ -384,7 +383,7 @@ def local_assortativity_wu_sign(W):
----------
W : NxN np.ndarray
undirected connection matrix with positive and negative weights

Returns
-------
loc_assort_pos : Nx1 np.ndarray
Expand All @@ -405,13 +404,13 @@ def local_assortativity_wu_sign(W):

for curr_node in range(n):
jp = np.where(W[curr_node, :] > 0)
loc_assort_pos[curr_node] = np.sum(np.abs(str_pos[jp] -
loc_assort_pos[curr_node] = np.sum(np.abs(str_pos[jp] -
str_pos[curr_node])) / str_pos[curr_node]
jn = np.where(W[curr_node, :] < 0)
loc_assort_neg[curr_node] = np.sum(np.abs(str_neg[jn] -
str_neg[curr_node])) / str_neg[curr_node]

loc_assort_pos = ((r_pos + 1) / n -
loc_assort_pos = ((r_pos + 1) / n -
loc_assort_pos / np.sum(loc_assort_pos))
loc_assort_neg = ((r_neg + 1) / n -
loc_assort_neg / np.sum(loc_assort_neg))
Expand Down
Loading