Skip to content

Commit

Permalink
small clean-ups, and fixed requirements
Browse files Browse the repository at this point in the history
future.annotations requires Cython>=3

Signed-off-by: Nick Papior <[email protected]>
  • Loading branch information
zerothi committed Feb 14, 2024
1 parent 18e1d35 commit bb16b7e
Show file tree
Hide file tree
Showing 5 changed files with 50 additions and 62 deletions.
2 changes: 1 addition & 1 deletion ci/doc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ dependencies:
- pyproject-metadata
- pyparsing>=1.5.7
- numpy>=1.13
- cython>=0.29.28
- cython>=3.0.0
- scipy>=1.5.0
- netcdf4
- xarray>=0.10.0
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
requires = [
"setuptools_scm[toml]>=6.2",
"scikit-build-core[pyproject]",
"Cython>=0.29.28",
"Cython>=3",
# see https://github.com/scipy/oldest-supported-numpy/
# should fix #310
"oldest-supported-numpy; sys_platform != 'win32'",
Expand Down Expand Up @@ -49,7 +49,8 @@ dependencies = [
]

authors = [
{name = "Nick Papior", email = "[email protected]"}
{name = "Nick Papior", email = "[email protected]"},
{name = "Pol Febrer"}
]
maintainers = [{name="sisl developers"}]

Expand Down
8 changes: 1 addition & 7 deletions src/sisl/geom/neighs/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,11 +1,5 @@
# In this directory we have a set of libraries
# We will need to link to the Numpy includes
set_property(DIRECTORY
APPEND
PROPERTY INCLUDE_DIRECTORIES
${CMAKE_CURRENT_SOURCE_DIR}/..
)

foreach(source _neigh_operations)
add_cython_library(
SOURCE ${source}.py
Expand All @@ -14,4 +8,4 @@ foreach(source _neigh_operations)
)
install(TARGETS ${source} LIBRARY
DESTINATION ${SKBUILD_PROJECT_NAME}/geom/neighs)
endforeach()
endforeach()
62 changes: 32 additions & 30 deletions src/sisl/geom/neighs/_neigh_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,12 @@ def build_table(nbins: cnp.int64_t, bin_indices: cnp.int64_t[:]):
"""
n_atoms = bin_indices.shape[0]

list_array: cnp.int64_t[:] = np.zeros(n_atoms, dtype=np.int64)
counts: cnp.int64_t[:] = np.zeros(nbins, dtype=np.int64)
heads: cnp.int64_t[:] = np.full(nbins, -1, dtype=np.int64)
list_array_obj = np.zeros(n_atoms, dtype=np.int64)
list_array: cnp.int64_t[:] = list_array_obj
counts_obj = np.zeros(nbins, dtype=np.int64)
counts: cnp.int64_t[:] = counts_obj
heads_obj = np.full(nbins, -1, dtype=np.int64)
heads: cnp.int64_t[:] = heads_obj

# Loop through all atoms
for at in range(n_atoms):
Expand All @@ -54,10 +57,10 @@ def build_table(nbins: cnp.int64_t, bin_indices: cnp.int64_t[:]):
heads[bin_index] = at

# Update the count of this bin (increment it by 1).
counts[bin_index] = counts[bin_index] + 1
counts[bin_index] += 1

# Return the memory views as numpy arrays
return np.asarray(list_array), np.asarray(heads), np.asarray(counts)
return list_array_obj, heads_obj, counts_obj


@cython.boundscheck(False)
Expand Down Expand Up @@ -135,20 +138,22 @@ def get_pairs(
"""
N_ind = at_indices.shape[0]

i_pair: cython.int = 0
i_pair: cython.size_t = 0

ref_xyz: cnp.float64_t[:] = np.zeros(3, dtype=np.float64)
neigh_isc: cnp.int64_t[:] = np.zeros(3, dtype=np.int64)

neighs: cnp.int64_t[:, :] = np.zeros((max_npairs, 5), dtype=np.int64)
split_indices: cnp.int64_t[:] = np.zeros(N_ind, dtype=np.int64)
neighs_obj: cnp.ndarray = np.zeros([max_npairs, 5], dtype=np.int64)
neighs: cnp.int64_t[:, :] = neighs_obj
split_indices_obj: cnp.ndarray = np.zeros(N_ind, dtype=np.int64)
split_indices: cnp.int64_t[:] = split_indices_obj

for search_index in range(N_ind):
at = at_indices[search_index]

for j in range(8):
# Find the bin index.
bin_index = indices[search_index, j]
bin_index: cnp.int64_t = indices[search_index, j]

# Get the first atom index in this bin
neigh_at: cnp.int64_t = heads[bin_index]
Expand Down Expand Up @@ -181,11 +186,10 @@ def get_pairs(
# instead of moving potential neighbors to the unit cell
# because in this way we reduce the number of operations.
for i in range(3):
ref_xyz[i] = (
ref_xyz[i]
- cell[0, i] * neigh_isc[0]
- cell[1, i] * neigh_isc[1]
- cell[2, i] * neigh_isc[2]
ref_xyz[i] -= (
cell[0, i] * neigh_isc[0]
+ cell[1, i] * neigh_isc[1]
+ cell[2, i] * neigh_isc[2]
)

# Loop through all atoms that are in this bin.
Expand Down Expand Up @@ -228,7 +232,7 @@ def get_pairs(
# We have finished this search, store the breakpoint.
split_indices[search_index] = i_pair

return np.asarray(neighs), np.asarray(split_indices)
return neighs_obj, split_indices_obj


@cython.boundscheck(False)
Expand Down Expand Up @@ -306,12 +310,12 @@ def get_all_unique_pairs(

N_ats = list_array.shape[0]

i_pair: cython.int = 0
i_pair: cython.size_t = 0

ref_xyz: cnp.float64_t[:] = np.zeros(3, dtype=np.float64)
neigh_isc: cnp.int64_t[:] = np.zeros(3, dtype=np.int64)

neighs: cnp.int64_t[:, :] = np.zeros((max_npairs, 5), dtype=np.int64)
neighs: cnp.int64_t[:, :] = np.zeros([max_npairs, 5], dtype=np.int64)

for at in range(N_ats):
if self_interaction:
Expand All @@ -325,7 +329,7 @@ def get_all_unique_pairs(

for j in range(8):
# Find the bin index.
bin_index = indices[at, j]
bin_index: cnp.int64_t = indices[at, j]

# Get the first atom index in this bin
neigh_at: cnp.int64_t = heads[bin_index]
Expand Down Expand Up @@ -358,11 +362,10 @@ def get_all_unique_pairs(
# instead of moving potential neighbors to the unit cell
# because in this way we reduce the number of operations.
for i in range(3):
ref_xyz[i] = (
ref_xyz[i]
- cell[0, i] * neigh_isc[0]
- cell[1, i] * neigh_isc[1]
- cell[2, i] * neigh_isc[2]
ref_xyz[i] -= (
cell[0, i] * neigh_isc[0]
+ cell[1, i] * neigh_isc[1]
+ cell[2, i] * neigh_isc[2]
)

# Loop through all atoms that are in this bin.
Expand Down Expand Up @@ -483,7 +486,7 @@ def get_close(

N_ind = search_xyz.shape[0]

i_pair: cython.int = 0
i_pair: cython.size_t = 0

ref_xyz: cnp.float64_t[:] = np.zeros(3, dtype=np.float64)
neigh_isc: cnp.int64_t[:] = np.zeros(3, dtype=np.int64)
Expand All @@ -494,7 +497,7 @@ def get_close(
for search_index in range(N_ind):
for j in range(8):
# Find the bin index.
bin_index = indices[search_index, j]
bin_index: cnp.int64_t = indices[search_index, j]

# Get the first atom index in this bin
neigh_at: cnp.int64_t = heads[bin_index]
Expand Down Expand Up @@ -527,11 +530,10 @@ def get_close(
# instead of moving potential neighbors to the unit cell
# because in this way we reduce the number of operations.
for i in range(3):
ref_xyz[i] = (
ref_xyz[i]
- cell[0, i] * neigh_isc[0]
- cell[1, i] * neigh_isc[1]
- cell[2, i] * neigh_isc[2]
ref_xyz[i] -= (
cell[0, i] * neigh_isc[0]
+ cell[1, i] * neigh_isc[1]
+ cell[2, i] * neigh_isc[2]
)

# Loop through all atoms that are in this bin.
Expand Down
35 changes: 13 additions & 22 deletions src/sisl/geom/neighs/tests/test_neighfinder.py
Original file line number Diff line number Diff line change
@@ -1,38 +1,31 @@
from functools import partial

import numpy as np
import pytest

from sisl import Geometry
from sisl.geom import NeighFinder

pytestmark = [pytest.mark.neighbour]

@pytest.fixture(scope="module", params=[True, False])
def sphere_overlap(request):
return request.param

tr_fixture = partial(pytest.fixture, scope="module", params=[True, False])

@pytest.fixture(scope="module", params=[True, False])
def multiR(request):
return request.param


@pytest.fixture(scope="module", params=[True, False])
def self_interaction(request):
def request_param(request):
return request.param


@pytest.fixture(scope="module", params=[False, True])
def post_setup(request):
return request.param


@pytest.fixture(scope="module", params=[False, True])
def pbc(request):
return request.param
sphere_overlap = tr_fixture()(request_param)
multiR = tr_fixture()(request_param)
self_interaction = tr_fixture()(request_param)
post_setup = tr_fixture()(request_param)
pbc = tr_fixture()(request_param)


@pytest.fixture(scope="module")
def neighfinder(sphere_overlap, multiR):
geom = Geometry([[0, 0, 0], [1.2, 0, 0], [9, 0, 0]], lattice=np.diag([10, 10, 7]))
geom = Geometry([[0, 0, 0], [1.2, 0, 0], [9, 0, 0]], lattice=[10, 10, 7])

R = np.array([1.1, 1.5, 1.2]) if multiR else 1.5

Expand Down Expand Up @@ -75,7 +68,7 @@ def expected_neighs(sphere_overlap, multiR, self_interaction, pbc):


def test_neighfinder_setup(sphere_overlap, multiR, post_setup):
geom = Geometry([[0, 0, 0], [1, 0, 0]], lattice=np.diag([10, 10, 7]))
geom = Geometry([[0, 0, 0], [1, 0, 0]], lattice=[10, 10, 7])

R = np.array([0.9, 1.5]) if multiR else 1.5

Expand Down Expand Up @@ -243,7 +236,7 @@ def test_R_too_big(pbc):
"""Test the case when R is so big that it needs a bigger bin
than the unit cell."""

geom = Geometry([[0, 0, 0], [1, 0, 0]], lattice=np.diag([2, 10, 10]))
geom = Geometry([[0, 0, 0], [1, 0, 0]], lattice=[2, 10, 10])

neighfinder = NeighFinder(geom, R=1.5)

Expand All @@ -265,7 +258,5 @@ def test_R_too_big(pbc):
if pbc:
expected_neighs.insert(0, [0, 1, -1, 0, 0])

print(neighs, expected_neighs)

assert neighs.shape == (len(expected_neighs), 5)
assert np.all(neighs == expected_neighs)

0 comments on commit bb16b7e

Please sign in to comment.