Skip to content

Commit

Permalink
Merge branch 'main' into New_docs
Browse files Browse the repository at this point in the history
  • Loading branch information
filimarc authored Jul 31, 2024
2 parents f7a425c + f234265 commit 846934e
Show file tree
Hide file tree
Showing 19 changed files with 660 additions and 68 deletions.
128 changes: 109 additions & 19 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -1,26 +1,116 @@
name: Upload BSB Package
name: Bump version, create release and deploy

on:
release:
types: [created]
push:
branches:
- main

jobs:
bump:
runs-on: ubuntu-latest
outputs:
tag: ${{ steps.semver.outputs.next }}
old_tag: ${{ steps.semver.outputs.current }}

steps:
- name: Checkout Code
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Get Next Version
id: semver
uses: ietf-tools/semver-action@v1
with:
token: ${{ github.token }}
branch: main

- name: Set up Python 3.11
uses: actions/setup-python@v1
with:
python-version: 3.11

- name: Bump version in Python project
run: |
pip install --upgrade pip bump-my-version
oldv="${{ steps.semver.outputs.current }}"
newv="${{steps.semver.outputs.next}}"
# Bump the version, dropping the leading `v` with `${x:1}`
bump-my-version replace --current-version=${oldv:1} --new-version=${newv:1} pyproject.toml
- name: Commit version change
uses: stefanzweifel/git-auto-commit-action@v4
with:
branch: main
commit_message: 'docs: bump version: ${{ steps.semver.outputs.current }} → ${{ steps.semver.outputs.next }}'

- uses: rickstaa/action-create-tag@v1
id: "tag_create"
with:
tag: ${{ steps.semver.outputs.next }}
github_token: ${{ github.token }}

release:
runs-on: ubuntu-latest
needs: bump

steps:
- name: Checkout Code
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Pull commit of version change
run: |
git pull origin main
- name: Update CHANGELOG
id: changelog
uses: requarks/changelog-action@v1
with:
token: ${{ github.token }}
fromTag: ${{ needs.bump.outputs.tag }}
toTag: ${{ needs.bump.outputs.old_tag }}

- name: Create Release
uses: ncipollo/[email protected]
with:
allowUpdates: true
draft: false
makeLatest: true
tag: ${{ needs.bump.outputs.tag }}
name: ${{ needs.bump.outputs.tag }}
body: ${{ steps.changelog.outputs.changes }}
token: ${{ github.token }}

- name: Commit CHANGELOG.md
uses: stefanzweifel/git-auto-commit-action@v4
with:
branch: main
commit_message: 'docs: update CHANGELOG.md for ${{ github.ref_name }}'
file_pattern: CHANGELOG.md

deploy:
runs-on: ubuntu-latest
needs: release

steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine
- name: Build and publish
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
python setup.py sdist bdist_wheel
twine upload dist/*
- uses: actions/checkout@v4

- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: 3.11

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build twine
- name: Build and publish
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
python -m build
twine upload --verbose --repository pypi dist/*
14 changes: 14 additions & 0 deletions .github/workflows/validate-pr.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: PR Conventional Commit Validation

on:
pull_request:
types: [opened, synchronize, reopened, edited]

jobs:
validate-pr-title:
runs-on: ubuntu-latest
steps:
- name: PR Conventional Commit Validation
uses: ytanikin/[email protected]
with:
task_types: '["feat","fix","docs","test","ci","refactor","perf","revert"]'
10 changes: 10 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
default_install_hook_types:
- pre-commit
- commit-msg
repos:
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.1.1
Expand All @@ -14,3 +17,10 @@ repos:
name: api-test
entry: python3 .github/devops/generate_public_api.py
language: system
- repo: https://github.com/compilerla/conventional-pre-commit
rev: v3.3.0
hooks:
- id: conventional-pre-commit
name: conventional-commit
stages: [ commit-msg ]
args: [ ]
5 changes: 5 additions & 0 deletions CHANGELOG → CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
# 4.3.0
* Introduction of a pool caching system
* Fix run iteration values in core
* Add FixedOutdegree

# 4.2.0
* Created geometric shape connection strategies
* Added support for multiple shapes for each cell type
Expand Down
4 changes: 3 additions & 1 deletion bsb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
install the `bsb` package instead.
"""

__version__ = "4.2.0"
__version__ = "4.3.0"

import functools
import importlib
Expand Down Expand Up @@ -221,6 +221,7 @@ def __dir__():
FileScheme: typing.Type["bsb.storage._files.FileScheme"]
FileStore: typing.Type["bsb.storage.interfaces.FileStore"]
FixedIndegree: typing.Type["bsb.connectivity.general.FixedIndegree"]
FixedOutdegree: typing.Type["bsb.connectivity.general.FixedOutdegree"]
FixedPositions: typing.Type["bsb.placement.strategy.FixedPositions"]
FractionFilter: typing.Type["bsb.simulation.targetting.FractionFilter"]
GatewayError: typing.Type["bsb.exceptions.GatewayError"]
Expand Down Expand Up @@ -416,6 +417,7 @@ def __dir__():
parse_configuration_file: "bsb.config.parse_configuration_file"
parse_morphology_content: "bsb.morphologies.parsers.parse_morphology_content"
parse_morphology_file: "bsb.morphologies.parsers.parse_morphology_file"
pool_cache: "bsb.services.pool_cache"
read_option: "bsb.options.read_option"
refs: "bsb.config.refs"
register_option: "bsb.options.register_option"
Expand Down
72 changes: 49 additions & 23 deletions bsb/connectivity/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,38 @@ def connect(self, pre, post):
self.connect_cells(from_ps, to_ps, src_locs, dest_locs)


def _connect_fixed_degree(self, pre, post, degree, is_in):
# Generalized connect function for Fixed in- and out-degree
rng = np.random.default_rng()
ps_counted = pre.placement if is_in else post.placement
ps_fixed = post.placement if is_in else pre.placement
high = sum(len(ps) for ps in ps_counted)
for ps in ps_fixed:
l = len(ps)
counted_targets = np.full((l * degree, 3), -1)
fixed_targets = np.full((l * degree, 3), -1)
ptr = 0
for i in range(l):
fixed_targets[ptr : ptr + degree, 0] = i
counted_targets[ptr : ptr + degree, 0] = rng.choice(
high, degree, replace=False
)
ptr += degree
lowmux = 0
for ps_o in ps_counted:
highmux = lowmux + len(ps_o)
demux_idx = (counted_targets[:, 0] >= lowmux) & (
counted_targets[:, 0] < highmux
)
demuxed = counted_targets[demux_idx]
demuxed[:, 0] -= lowmux
if is_in:
self.connect_cells(ps_o, ps, demuxed, fixed_targets[demux_idx])
else:
self.connect_cells(ps, ps_o, fixed_targets[demux_idx], demuxed)
lowmux = highmux


@config.node
class FixedIndegree(InvertedRoI, ConnectionStrategy):
"""
Expand All @@ -52,26 +84,20 @@ class FixedIndegree(InvertedRoI, ConnectionStrategy):
indegree: int = config.attr(type=int, required=True)

def connect(self, pre, post):
in_ = self.indegree
rng = np.random.default_rng()
high = sum(len(ps) for ps in pre.placement)
for ps in post.placement:
l = len(ps)
pre_targets = np.full((l * in_, 3), -1)
post_targets = np.full((l * in_, 3), -1)
ptr = 0
for i in range(l):
post_targets[ptr : ptr + in_, 0] = i
pre_targets[ptr : ptr + in_, 0] = rng.choice(high, in_, replace=False)
ptr += in_
lowmux = 0
for pre_ps in pre.placement:
highmux = lowmux + len(pre_ps)
demux_idx = (pre_targets[:, 0] >= lowmux) & (pre_targets[:, 0] < highmux)
demuxed = pre_targets[demux_idx]
demuxed[:, 0] -= lowmux
self.connect_cells(pre_ps, ps, demuxed, post_targets[demux_idx])
lowmux = highmux


__all__ = ["AllToAll", "Convergence", "FixedIndegree"]
_connect_fixed_degree(self, pre, post, self.indegree, True)


@config.node
class FixedOutdegree(ConnectionStrategy):
"""
Connect a group of presynaptic cell types to ``outdegree`` uniformly random
postsynaptic cells from all the postsynaptic cell types.
"""

outdegree: int = config.attr(type=int, required=True)

def connect(self, pre, post):
_connect_fixed_degree(self, pre, post, self.outdegree, False)


__all__ = ["AllToAll", "Convergence", "FixedIndegree", "FixedOutdegree"]
20 changes: 17 additions & 3 deletions bsb/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ def __init__(self, config=None, storage=None, clear=False, comm=None):
:returns: A network object
:rtype: :class:`~.core.Scaffold`
"""
self._pool_cache: dict[int, typing.Callable[[], None]] = {}
self._pool_listeners: list[tuple[typing.Callable[[list["Job"]], None], float]] = (
[]
)
Expand Down Expand Up @@ -270,7 +271,7 @@ def run_placement(self, strategies=None, fail_fast=True, pipelines=True):
if pipelines:
self.run_pipelines()
if strategies is None:
strategies = [*self.placement.values()]
strategies = set(self.placement.values())
strategies = PlacementStrategy.sort_deps(strategies)
with self.create_job_pool(fail_fast=fail_fast) as pool:
if pool.is_main():
Expand Down Expand Up @@ -309,7 +310,7 @@ def run_after_placement(self, hooks=None, fail_fast=None, pipelines=True):
Run after placement hooks.
"""
if hooks is None:
hooks = self.after_placement
hooks = set(self.after_placement.values())
with self.create_job_pool(fail_fast) as pool:
if pool.is_main():
pool.schedule(hooks)
Expand All @@ -321,7 +322,7 @@ def run_after_connectivity(self, hooks=None, fail_fast=None, pipelines=True):
Run after placement hooks.
"""
if hooks is None:
hooks = self.after_placement
hooks = set(self.after_connectivity.values())
with self.create_job_pool(fail_fast) as pool:
if pool.is_main():
pool.schedule(hooks)
Expand Down Expand Up @@ -785,6 +786,19 @@ def remove_listener(self, listener):
self._pool_listeners.pop(i)
break

def register_pool_cached_item(self, id, cleanup):
"""
Registers a cleanup function for items cached during a parallel workflow.
Internal use only.
:param id: Id of the cached item. Should be unique but identical across MPI
nodes
:param cleanup: A callable that cleans up the cached item.
"""
if id in self._pool_cache:
raise RuntimeError(f"Pool cache item '{id}' already exists.")
self._pool_cache[id] = cleanup


class ReportListener:
def __init__(self, scaffold, file):
Expand Down
4 changes: 2 additions & 2 deletions bsb/services/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"""

from .pool import JobPool as _JobPool # noqa
from .pool import WorkflowError
from .pool import WorkflowError, pool_cache

JobPool = _JobPool
"""
Expand All @@ -33,4 +33,4 @@ def register_service(attr, provider):
globals()[attr] = provider


__all__ = ["MPI", "MPILock", "JobPool", "register_service", "WorkflowError"]
__all__ = ["MPI", "MPILock", "JobPool", "register_service", "WorkflowError", "pool_cache"]
19 changes: 19 additions & 0 deletions bsb/services/mpi.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,25 @@ def allgather(self, obj):
return self._comm.allgather(obj)
return [obj]

def window(self, buffer):
if self._comm and self.get_size() > 1:
from mpi4py.MPI import INFO_NULL, Win

return Win.Create(buffer, True, INFO_NULL, self._comm)
else:

class WindowMock:
def Get(self, bufspec, rank):
return bufspec[0]

def Lock(self, rank):
pass

def Unlock(self, rank):
pass

return WindowMock()


class MPIModule(MockModule):
"""
Expand Down
Loading

0 comments on commit 846934e

Please sign in to comment.