Skip to content

Commit

Permalink
Merge pull request #2319 from mikedh/test/iteration
Browse files Browse the repository at this point in the history
Release: Iteration Tests
  • Loading branch information
mikedh authored Nov 28, 2024
2 parents 9dbb10c + 21cbdb6 commit 2fcb2b2
Show file tree
Hide file tree
Showing 20 changed files with 191 additions and 71 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ requires = ["setuptools >= 61.0", "wheel"]
[project]
name = "trimesh"
requires-python = ">=3.8"
version = "4.5.2"
version = "4.5.3"
authors = [{name = "Michael Dawson-Haggerty", email = "[email protected]"}]
license = {file = "LICENSE.md"}
description = "Import, export, process, analyze and view triangular meshes."
Expand Down
65 changes: 65 additions & 0 deletions tests/test_iteration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
from functools import reduce

import numpy as np

from trimesh.iteration import chain, reduce_cascade


def test_reduce_cascade():
# the multiply will explode quickly past the integer maximum
def both(operation, items):
"""
Run our cascaded reduce and regular reduce.
"""

b = reduce_cascade(operation, items)

if len(items) > 0:
assert b == reduce(operation, items)

return b

for i in range(20):
data = np.arange(i)
c = both(items=data, operation=lambda a, b: a + b)

if i == 0:
assert c is None
else:
assert c == np.arange(i).sum()

# try a multiply
data = np.arange(i)
c = both(items=data, operation=lambda a, b: a * b)

if i == 0:
assert c is None
else:
assert c == np.prod(data)

# try a multiply
data = np.arange(i)[1:]
c = both(items=data, operation=lambda a, b: a * b)
if i <= 1:
assert c is None
else:
assert c == np.prod(data)

data = ["a", "b", "c", "d", "e", "f", "g"]
print("# reduce_pairwise\n-----------")
r = both(operation=lambda a, b: a + b, items=data)

assert r == "abcdefg"


def test_chain():
# should work on iterables the same as `itertools.chain`
assert np.allclose(chain([1, 3], [4]), [1, 3, 4])
# should work with non-iterable single values
assert np.allclose(chain([1, 3], 4), [1, 3, 4])
# should filter out `None` arguments
assert np.allclose(chain([1, 3], None, 4, None), [1, 3, 4])


if __name__ == "__main__":
test_reduce_cascade()
6 changes: 6 additions & 0 deletions tests/test_remesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,13 +86,19 @@ def test_sub(self):
meshes = [g.trimesh.creation.box(), g.trimesh.creation.icosphere()]

for m in meshes:
# set vertex positions as attributes for trivial check after subdivision
# make sure we're copying the array to avoid in-place check
m.vertex_attributes = {"pos": g.np.array(m.vertices) + 1.0}

s = m.subdivide(face_index=[0, len(m.faces) - 1])
# shouldn't have subdivided in-place
assert len(s.faces) > len(m.faces)
# area should be the same
assert g.np.isclose(m.area, s.area)
# volume should be the same
assert g.np.isclose(m.volume, s.volume)
# position attributes and actual vertices should be the same
assert g.np.allclose(s.vertex_attributes["pos"], s.vertices + 1.0)

max_edge = m.scale / 50
s = m.subdivide_to_size(max_edge=max_edge)
Expand Down
10 changes: 0 additions & 10 deletions tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,16 +75,6 @@ def test_stack(self):
# this is what should happen
pass

def test_chain(self):
from trimesh.util import chain

# should work on iterables the same as `itertools.chain`
assert g.np.allclose(chain([1, 3], [4]), [1, 3, 4])
# should work with non-iterable single values
assert g.np.allclose(chain([1, 3], 4), [1, 3, 4])
# should filter out `None` arguments
assert g.np.allclose(chain([1, 3], None, 4, None), [1, 3, 4])

def test_has_module(self):
# built-in
assert g.trimesh.util.has_module("collections")
Expand Down
21 changes: 17 additions & 4 deletions tests/test_voxel.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,16 +67,29 @@ def test_marching(self):
g.log.warning("no skimage, skipping marching cubes test")
return

march = g.trimesh.voxel.ops.matrix_to_marching_cubes

# make sure offset is correct
matrix = g.np.ones((3, 3, 3), dtype=bool)
mesh = g.trimesh.voxel.ops.matrix_to_marching_cubes(matrix=matrix)
mesh = march(matrix=matrix)
assert mesh.is_watertight

mesh = g.trimesh.voxel.ops.matrix_to_marching_cubes(matrix=matrix).apply_scale(
3.0
)
mesh = march(matrix=matrix).apply_scale(3.0)
assert mesh.is_watertight

# try an array full of a small number
matrix = g.np.full((3, 3, 3), 0.01, dtype=g.np.float64)
# set some to zero
matrix[:2, :2, :2] = 0.0

a = march(matrix)
assert a.is_watertight

# but above the threshold it should be not empty
b = march(matrix, threshold=-0.001)
assert b.is_watertight
assert b.volume > a.volume

def test_marching_points(self):
"""
Try marching cubes on points
Expand Down
5 changes: 2 additions & 3 deletions trimesh/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,8 @@


__all__ = [
"PointCloud",
"Geometry",
"Trimesh",
"PointCloud",
"Scene",
"Trimesh",
"__version__",
Expand All @@ -103,8 +102,8 @@
"graph",
"grouping",
"inertia",
"iteration",
"intersections",
"iteration",
"load",
"load_mesh",
"load_path",
Expand Down
8 changes: 5 additions & 3 deletions trimesh/exchange/binvox.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,9 @@ def voxel_from_binvox(rle_data, shape, translate=None, scale=1.0, axis_order="xz
elif axis_order is None or axis_order == "xyz":
encoding = encoding.reshape(shape)
else:
raise ValueError("Invalid axis_order '%s': must be None, 'xyz' or 'xzy'")
raise ValueError(
"Invalid axis_order '%s': must be None, 'xyz' or 'xzy'", axis_order
)

assert encoding.shape == shape

Expand Down Expand Up @@ -423,7 +425,7 @@ def __init__(
)

if dimension > 1024 and not exact:
raise ValueError("Maximum dimension using exact is 1024, got %d" % dimension)
raise ValueError("Maximum dimension using exact is 1024, got %d", dimension)
if file_type not in Binvoxer.SUPPORTED_OUTPUT_TYPES:
raise ValueError(
f"file_type {file_type} not in set of supported output types {Binvoxer.SUPPORTED_OUTPUT_TYPES!s}"
Expand Down Expand Up @@ -471,7 +473,7 @@ def __init__(
times = np.log2(downsample_factor)
if int(times) != times:
raise ValueError(
"downsample_factor must be a power of 2, got %d" % downsample_factor
"downsample_factor must be a power of 2, got %d", downsample_factor
)
args.extend(("-down",) * int(times))
if downsample_threshold is not None:
Expand Down
4 changes: 2 additions & 2 deletions trimesh/exchange/gltf.py
Original file line number Diff line number Diff line change
Expand Up @@ -1029,14 +1029,14 @@ def _build_accessor(array):
if vec_length > 4:
raise ValueError("The GLTF spec does not support vectors larger than 4")
if vec_length > 1:
data_type = "VEC%d" % vec_length
data_type = f"VEC{int(vec_length)}"
else:
data_type = "SCALAR"

if len(shape) == 3:
if shape[2] not in [2, 3, 4]:
raise ValueError("Matrix types must have 4, 9 or 16 components")
data_type = "MAT%d" % shape[2]
data_type = f"MAT{int(shape[2])}"

# get the array data type as a str stripping off endian
lookup = array.dtype.str.lstrip("<>")
Expand Down
2 changes: 1 addition & 1 deletion trimesh/interfaces/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from . import blender, gmsh

# add to __all__ as per pep8
__all__ = ["gmsh", "blender"]
__all__ = ["blender", "gmsh"]
15 changes: 10 additions & 5 deletions trimesh/iteration.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import numpy as np
from math import log2

from .typed import Any, Callable, Iterable, List, NDArray, Sequence, Union

Expand Down Expand Up @@ -54,12 +54,17 @@ def reduce_cascade(operation: Callable, items: Union[Sequence, NDArray]):
# skip the loop overhead for a single pair
return operation(items[0], items[1])

for _ in range(int(1 + np.log2(len(items)))):
for _ in range(int(1 + log2(len(items)))):
results = []
for i in np.arange(len(items) // 2) * 2:

# loop over pairs of items.
items_mod = len(items) % 2
for i in range(0, len(items) - items_mod, 2):
results.append(operation(items[i], items[i + 1]))

if len(items) % 2:
# if we had a non-even number of items it will have been
# skipped by the loop so append it to our list
if items_mod != 0:
results.append(items[-1])

items = results
Expand Down Expand Up @@ -117,7 +122,7 @@ def chain(*args: Union[Iterable[Any], Any, None]) -> List[Any]:
# extend if it's a sequence, otherwise append
[
chained.extend(a)
if (hasattr(a, "__iter__") and not isinstance(a, str))
if (hasattr(a, "__iter__") and not isinstance(a, (str, bytes)))
else chained.append(a)
for a in args
if a is not None
Expand Down
29 changes: 17 additions & 12 deletions trimesh/path/exchange/load.py
Original file line number Diff line number Diff line change
@@ -1,58 +1,63 @@
import os

from ... import util
from ...exchange.ply import load_ply
from ...typed import Optional
from ..path import Path
from . import misc
from .dxf import _dxf_loaders
from .svg_io import svg_to_path


def load_path(file_obj, file_type=None, **kwargs):
def load_path(file_obj, file_type: Optional[str] = None, **kwargs):
"""
Load a file to a Path file_object.
Parameters
-----------
file_obj : One of the following:
file_obj
Accepts many types:
- Path, Path2D, or Path3D file_objects
- open file file_object (dxf or svg)
- file name (dxf or svg)
- shapely.geometry.Polygon
- shapely.geometry.MultiLineString
- dict with kwargs for Path constructor
- (n,2,(2|3)) float, line segments
file_type : str
- `(n, 2, (2|3)) float` line segments
file_type
Type of file is required if file
file_object passed.
object is passed.
Returns
---------
path : Path, Path2D, Path3D file_object
Data as a native trimesh Path file_object
Data as a native trimesh Path file_object
"""
# avoid a circular import
from ...exchange.load import load_kwargs

if isinstance(file_type, str):
# we accept full file names here so make sure we
file_type = util.split_extension(file_type).lower()

# record how long we took
tic = util.now()

if isinstance(file_obj, Path):
# we have been passed a Path file_object so
# do nothing and return the passed file_object
# we have been passed a file object that is already a loaded
# trimesh.path.Path object so do nothing and return
return file_obj
elif util.is_file(file_obj):
# for open file file_objects use loaders
if file_type == "ply":
# we cannot register this exporter to path_loaders since this is already reserved by TriMesh in ply format in trimesh.load()
# we cannot register this exporter to path_loaders
# since this is already reserved for 3D values in `trimesh.load`
kwargs.update(load_ply(file_obj, file_type=file_type))
else:
kwargs.update(path_loaders[file_type](file_obj, file_type=file_type))
elif isinstance(file_obj, str):
# strings passed are evaluated as file file_objects
with open(file_obj, "rb") as f:
# get the file type from the extension
file_type = os.path.splitext(file_obj)[-1][1:].lower()
file_type = util.split_extension(file_obj).lower()
if file_type == "ply":
# we cannot register this exporter to path_loaders since this is already reserved by TriMesh in ply format in trimesh.load()
kwargs.update(load_ply(f, file_type=file_type))
Expand Down
4 changes: 2 additions & 2 deletions trimesh/path/packing.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import numpy as np

from ..constants import log, tol
from ..typed import Integer, Number, Optional
from ..typed import ArrayLike, Integer, NDArray, Number, Optional, float64
from ..util import allclose, bounds_tree

# floating point zero
Expand Down Expand Up @@ -692,7 +692,7 @@ def visualize(extents, bounds):
return Scene(meshes)


def roll_transform(bounds, extents):
def roll_transform(bounds: ArrayLike, extents: ArrayLike) -> NDArray[float64]:
"""
Packing returns rotations with integer "roll" which
needs to be converted into a homogeneous rotation matrix.
Expand Down
6 changes: 4 additions & 2 deletions trimesh/path/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,16 @@
from ..util import is_ccw # NOQA


def concatenate(paths):
def concatenate(paths, **kwargs):
"""
Concatenate multiple paths into a single path.
Parameters
-------------
paths : (n,) Path
Path objects to concatenate
kwargs
Passed through to the path constructor
Returns
-------------
Expand Down Expand Up @@ -52,6 +54,6 @@ def concatenate(paths):
# generate the single new concatenated path
# use input types so we don't have circular imports
concat = type(path)(
metadata=metadata, entities=entities, vertices=np.vstack(vertices)
metadata=metadata, entities=entities, vertices=np.vstack(vertices), **kwargs
)
return concat
6 changes: 1 addition & 5 deletions trimesh/remesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,7 @@ def subdivide(
if vertex_attributes is not None:
new_attributes = {}
for key, values in vertex_attributes.items():
attr_tris = values[faces_subset]
attr_mid = np.vstack(
[attr_tris[:, g, :].mean(axis=1) for g in [[0, 1], [1, 2], [2, 0]]]
)
attr_mid = attr_mid[unique]
attr_mid = values[edges[unique]].mean(axis=1)
new_attributes[key] = np.vstack((values, attr_mid))
return new_vertices, new_faces, new_attributes

Expand Down
Loading

0 comments on commit 2fcb2b2

Please sign in to comment.