vedo.core

Base classes providing functionality to different vedo objects.

   1#!/usr/bin/env python3
   2# -*- coding: utf-8 -*-
   3import numpy as np
   4from typing import List, Union, Any
   5from typing_extensions import Self
   6
   7import vedo.vtkclasses as vtki
   8
   9import vedo
  10from vedo import colors
  11from vedo import utils
  12from vedo.transformations import LinearTransform, NonLinearTransform
  13
  14
  15__docformat__ = "google"
  16
  17__doc__ = """
  18Base classes providing functionality to different vedo objects.
  19
  20![](https://vedo.embl.es/images/feats/algorithms_illustration.png)
  21"""
  22
  23__all__ = [
  24    "DataArrayHelper",
  25    "CommonAlgorithms",
  26    "PointAlgorithms",
  27    "VolumeAlgorithms",
  28]
  29
  30
  31###############################################################################
  32# warnings = dict(
  33#     func_name=(
  34#         "WARNING: some message"
  35#         " (silence this with vedo.core.warnings['func_name']=False)"
  36#     ),
  37# )
  38# ### USAGE
  39# def func_name(self):
  40#     """Obsolete, use ... instead."""
  41#         if warnings["func_name"]:
  42#             colors.printc(warnings["func_name"], c="y")
  43#             warnings["func_name"] = ""
  44#         return
  45
  46
  47###############################################################################
  48class DataArrayHelper:
  49    # Internal use only.
  50    # Helper class to manage data associated to either points (or vertices) and cells (or faces).
  51    def __init__(self, obj, association):
  52
  53        self.obj = obj
  54        self.association = association
  55
  56    def __getitem__(self, key):
  57
  58        if self.association == 0:
  59            data = self.obj.dataset.GetPointData()
  60
  61        elif self.association == 1:
  62            data = self.obj.dataset.GetCellData()
  63
  64        elif self.association == 2:
  65            data = self.obj.dataset.GetFieldData()
  66
  67            varr = data.GetAbstractArray(key)
  68            if isinstance(varr, vtki.vtkStringArray):
  69                if isinstance(key, int):
  70                    key = data.GetArrayName(key)
  71                n = varr.GetNumberOfValues()
  72                narr = [varr.GetValue(i) for i in range(n)]
  73                return narr
  74                ###########
  75
  76        else:
  77            raise RuntimeError()
  78
  79        if isinstance(key, int):
  80            key = data.GetArrayName(key)
  81
  82        arr = data.GetArray(key)
  83        if not arr:
  84            return None
  85        return utils.vtk2numpy(arr)
  86
  87    def __setitem__(self, key, input_array):
  88
  89        if self.association == 0:
  90            data = self.obj.dataset.GetPointData()
  91            n = self.obj.dataset.GetNumberOfPoints()
  92            self.obj.mapper.SetScalarModeToUsePointData()
  93
  94        elif self.association == 1:
  95            data = self.obj.dataset.GetCellData()
  96            n = self.obj.dataset.GetNumberOfCells()
  97            self.obj.mapper.SetScalarModeToUseCellData()
  98
  99        elif self.association == 2:
 100            data = self.obj.dataset.GetFieldData()
 101            if not utils.is_sequence(input_array):
 102                input_array = [input_array]
 103
 104            if isinstance(input_array[0], str):
 105                varr = vtki.vtkStringArray()
 106                varr.SetName(key)
 107                varr.SetNumberOfComponents(1)
 108                varr.SetNumberOfTuples(len(input_array))
 109                for i, iarr in enumerate(input_array):
 110                    if isinstance(iarr, np.ndarray):
 111                        iarr = iarr.tolist()  # better format
 112                        # Note: a string k can be converted to numpy with
 113                        # import json; k = np.array(json.loads(k))
 114                    varr.InsertValue(i, str(iarr))
 115            else:
 116                try:
 117                    varr = utils.numpy2vtk(input_array, name=key)
 118                except TypeError as e:
 119                    vedo.logger.error(
 120                        f"cannot create metadata with input object:\n"
 121                        f"{input_array}"
 122                        f"\n\nAllowed content examples are:\n"
 123                        f"- flat list of strings ['a','b', 1, [1,2,3], ...]"
 124                        f" (first item must be a string in this case)\n"
 125                        f"  hint: use k = np.array(json.loads(k)) to convert strings\n"
 126                        f"- numpy arrays of any shape"
 127                    )
 128                    raise e
 129
 130            data.AddArray(varr)
 131            return  ############
 132
 133        else:
 134            raise RuntimeError()
 135
 136        if len(input_array) != n:
 137            vedo.logger.error(
 138                f"Error in point/cell data: length of input {len(input_array)}"
 139                f" !=  {n} nr. of elements"
 140            )
 141            raise RuntimeError()
 142
 143        input_array = np.asarray(input_array)
 144        varr = utils.numpy2vtk(input_array, name=key)
 145        data.AddArray(varr)
 146
 147        if len(input_array.shape) == 1:  # scalars
 148            data.SetActiveScalars(key)
 149            try:  # could be a volume mapper
 150                self.obj.mapper.SetScalarRange(data.GetScalars().GetRange())
 151            except AttributeError:
 152                pass
 153        elif len(input_array.shape) == 2 and input_array.shape[1] == 3:  # vectors
 154            if key.lower() == "normals":
 155                data.SetActiveNormals(key)
 156            else:
 157                data.SetActiveVectors(key)
 158
 159    def keys(self) -> List[str]:
 160        """Return the list of available data array names"""
 161        if self.association == 0:
 162            data = self.obj.dataset.GetPointData()
 163        elif self.association == 1:
 164            data = self.obj.dataset.GetCellData()
 165        elif self.association == 2:
 166            data = self.obj.dataset.GetFieldData()
 167        arrnames = []
 168        for i in range(data.GetNumberOfArrays()):
 169            name = ""
 170            if self.association == 2:
 171                name = data.GetAbstractArray(i).GetName()
 172            else:
 173                iarr = data.GetArray(i)
 174                if iarr:
 175                    name = iarr.GetName()
 176            if name:
 177                arrnames.append(name)
 178        return arrnames
 179
 180    def items(self) -> List:
 181        """Return the list of available data array `(names, values)`."""
 182        if self.association == 0:
 183            data = self.obj.dataset.GetPointData()
 184        elif self.association == 1:
 185            data = self.obj.dataset.GetCellData()
 186        elif self.association == 2:
 187            data = self.obj.dataset.GetFieldData()
 188        arrnames = []
 189        for i in range(data.GetNumberOfArrays()):
 190            if self.association == 2:
 191                name = data.GetAbstractArray(i).GetName()
 192            else:
 193                name = data.GetArray(i).GetName()
 194            if name:
 195                arrnames.append((name, self[name]))
 196        return arrnames
 197
 198    def todict(self) -> dict:
 199        """Return a dictionary of the available data arrays."""
 200        return dict(self.items())
 201
 202    def rename(self, oldname: str, newname: str) -> None:
 203        """Rename an array"""
 204        if self.association == 0:
 205            varr = self.obj.dataset.GetPointData().GetArray(oldname)
 206        elif self.association == 1:
 207            varr = self.obj.dataset.GetCellData().GetArray(oldname)
 208        elif self.association == 2:
 209            varr = self.obj.dataset.GetFieldData().GetAbstractArray(oldname)
 210        if varr:
 211            varr.SetName(newname)
 212        else:
 213            vedo.logger.warning(
 214                f"Cannot rename non existing array {oldname} to {newname}"
 215            )
 216
 217    def remove(self, key: Union[int, str]) -> None:
 218        """Remove a data array by name or number"""
 219        if self.association == 0:
 220            self.obj.dataset.GetPointData().RemoveArray(key)
 221        elif self.association == 1:
 222            self.obj.dataset.GetCellData().RemoveArray(key)
 223        elif self.association == 2:
 224            self.obj.dataset.GetFieldData().RemoveArray(key)
 225
 226    def clear(self) -> None:
 227        """Remove all data associated to this object"""
 228        if self.association == 0:
 229            data = self.obj.dataset.GetPointData()
 230        elif self.association == 1:
 231            data = self.obj.dataset.GetCellData()
 232        elif self.association == 2:
 233            data = self.obj.dataset.GetFieldData()
 234        for i in range(data.GetNumberOfArrays()):
 235            if self.association == 2:
 236                name = data.GetAbstractArray(i).GetName()
 237            else:
 238                name = data.GetArray(i).GetName()
 239            data.RemoveArray(name)
 240
 241    def select(self, key: Union[int, str]) -> Any:
 242        """Select one specific array by its name to make it the `active` one."""
 243        # Default (ColorModeToDefault): unsigned char scalars are treated as colors,
 244        # and NOT mapped through the lookup table, while everything else is.
 245        # ColorModeToDirectScalar extends ColorModeToDefault such that all integer
 246        # types are treated as colors with values in the range 0-255
 247        # and floating types are treated as colors with values in the range 0.0-1.0.
 248        # Setting ColorModeToMapScalars means that all scalar data will be mapped
 249        # through the lookup table.
 250        # (Note that for multi-component scalars, the particular component
 251        # to use for mapping can be specified using the SelectColorArray() method.)
 252        if self.association == 0:
 253            data = self.obj.dataset.GetPointData()
 254            self.obj.mapper.SetScalarModeToUsePointData()
 255        else:
 256            data = self.obj.dataset.GetCellData()
 257            self.obj.mapper.SetScalarModeToUseCellData()
 258
 259        if isinstance(key, int):
 260            key = data.GetArrayName(key)
 261
 262        arr = data.GetArray(key)
 263        if not arr:
 264            return self.obj
 265
 266        nc = arr.GetNumberOfComponents()
 267        # print("GetNumberOfComponents", nc)
 268        if nc == 1:
 269            data.SetActiveScalars(key)
 270        elif nc == 2:
 271            data.SetTCoords(arr)
 272        elif nc in (3, 4):
 273            if "rgb" in key.lower(): # type: ignore
 274                data.SetActiveScalars(key)
 275                try:
 276                    # could be a volume mapper
 277                    self.obj.mapper.SetColorModeToDirectScalars()
 278                    data.SetActiveVectors(None) # need this to fix bug in #1066
 279                    # print("SetColorModeToDirectScalars for", key)
 280                except AttributeError:
 281                    pass
 282            else:
 283                data.SetActiveVectors(key)
 284        elif nc == 9:
 285            data.SetActiveTensors(key)
 286        else:
 287            vedo.logger.error(f"Cannot select array {key} with {nc} components")
 288            return self.obj
 289
 290        try:
 291            # could be a volume mapper
 292            self.obj.mapper.SetArrayName(key)
 293            self.obj.mapper.ScalarVisibilityOn()
 294        except AttributeError:
 295            pass
 296
 297        return self.obj
 298
 299    def select_texture_coords(self, key: Union[int,str]) -> Any:
 300        """Select one specific array to be used as texture coordinates."""
 301        if self.association == 0:
 302            data = self.obj.dataset.GetPointData()
 303        else:
 304            vedo.logger.warning("texture coordinates are only available for point data")
 305            return
 306
 307        if isinstance(key, int):
 308            key = data.GetArrayName(key)
 309        data.SetTCoords(data.GetArray(key))
 310        return self.obj
 311
 312    def select_normals(self, key: Union[int,str]) -> Any:
 313        """Select one specific normal array by its name to make it the "active" one."""
 314        if self.association == 0:
 315            data = self.obj.dataset.GetPointData()
 316            self.obj.mapper.SetScalarModeToUsePointData()
 317        else:
 318            data = self.obj.dataset.GetCellData()
 319            self.obj.mapper.SetScalarModeToUseCellData()
 320
 321        if isinstance(key, int):
 322            key = data.GetArrayName(key)
 323        data.SetActiveNormals(key)
 324        return self.obj
 325
 326    def print(self, **kwargs) -> None:
 327        """Print the array names available to terminal"""
 328        colors.printc(self.keys(), **kwargs)
 329
 330    def __repr__(self) -> str:
 331        """Representation"""
 332
 333        def _get_str(pd, header):
 334            out = f"\x1b[2m\x1b[1m\x1b[7m{header}"
 335            if pd.GetNumberOfArrays():
 336                if self.obj.name:
 337                    out += f" in {self.obj.name}"
 338                out += f" contains {pd.GetNumberOfArrays()} array(s)\x1b[0m"
 339                for i in range(pd.GetNumberOfArrays()):
 340                    varr = pd.GetArray(i)
 341                    out += f"\n\x1b[1m\x1b[4mArray name    : {varr.GetName()}\x1b[0m"
 342                    out += "\nindex".ljust(15) + f": {i}"
 343                    t = varr.GetDataType()
 344                    if t in vtki.array_types:
 345                        out += "\ntype".ljust(15)
 346                        out += f": {vtki.array_types[t]}"
 347                    shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents())
 348                    out += "\nshape".ljust(15) + f": {shape}"
 349                    out += "\nrange".ljust(15) + f": {np.array(varr.GetRange())}"
 350                    out += "\nmax id".ljust(15) + f": {varr.GetMaxId()}"
 351                    out += "\nlook up table".ljust(15) + f": {bool(varr.GetLookupTable())}"
 352                    out += "\nin-memory size".ljust(15) + f": {varr.GetActualMemorySize()} KB"
 353            else:
 354                out += " is empty.\x1b[0m"
 355            return out
 356
 357        if self.association == 0:
 358            out = _get_str(self.obj.dataset.GetPointData(), "Point Data")
 359        elif self.association == 1:
 360            out = _get_str(self.obj.dataset.GetCellData(), "Cell Data")
 361        elif self.association == 2:
 362            pd = self.obj.dataset.GetFieldData()
 363            if pd.GetNumberOfArrays():
 364                out = "\x1b[2m\x1b[1m\x1b[7mMeta Data"
 365                if self.obj.name:
 366                    out += f" in {self.obj.name}"
 367                out += f" contains {pd.GetNumberOfArrays()} entries\x1b[0m"
 368                for i in range(pd.GetNumberOfArrays()):
 369                    varr = pd.GetAbstractArray(i)
 370                    out += f"\n\x1b[1m\x1b[4mEntry name    : {varr.GetName()}\x1b[0m"
 371                    out += "\nindex".ljust(15) + f": {i}"
 372                    shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents())
 373                    out += "\nshape".ljust(15) + f": {shape}"
 374
 375        return out
 376
 377
 378###############################################################################
 379class CommonAlgorithms:
 380    """Common algorithms."""
 381
 382    @property
 383    def pointdata(self):
 384        """
 385        Create and/or return a `numpy.array` associated to points (vertices).
 386        A data array can be indexed either as a string or by an integer number.
 387        E.g.:  `myobj.pointdata["arrayname"]`
 388
 389        Usage:
 390
 391            `myobj.pointdata.keys()` to return the available data array names
 392
 393            `myobj.pointdata.select(name)` to make this array the active one
 394
 395            `myobj.pointdata.remove(name)` to remove this array
 396        """
 397        return DataArrayHelper(self, 0)
 398
 399    @property
 400    def celldata(self):
 401        """
 402        Create and/or return a `numpy.array` associated to cells (faces).
 403        A data array can be indexed either as a string or by an integer number.
 404        E.g.:  `myobj.celldata["arrayname"]`
 405
 406        Usage:
 407
 408            `myobj.celldata.keys()` to return the available data array names
 409
 410            `myobj.celldata.select(name)` to make this array the active one
 411
 412            `myobj.celldata.remove(name)` to remove this array
 413        """
 414        return DataArrayHelper(self, 1)
 415
 416    @property
 417    def metadata(self):
 418        """
 419        Create and/or return a `numpy.array` associated to neither cells nor faces.
 420        A data array can be indexed either as a string or by an integer number.
 421        E.g.:  `myobj.metadata["arrayname"]`
 422
 423        Usage:
 424
 425            `myobj.metadata.keys()` to return the available data array names
 426
 427            `myobj.metadata.select(name)` to make this array the active one
 428
 429            `myobj.metadata.remove(name)` to remove this array
 430        """
 431        return DataArrayHelper(self, 2)
 432
 433    def memory_address(self) -> int:
 434        """
 435        Return a unique memory address integer which may serve as the ID of the
 436        object, or passed to c++ code.
 437        """
 438        # https://www.linkedin.com/pulse/speedup-your-code-accessing-python-vtk-objects-from-c-pletzer/
 439        # https://github.com/tfmoraes/polydata_connectivity
 440        return int(self.dataset.GetAddressAsString("")[5:], 16)
 441
 442    def memory_size(self) -> int:
 443        """Return the size in bytes of the object in memory."""
 444        return self.dataset.GetActualMemorySize()
 445
 446    def modified(self) -> Self:
 447        """Use in conjunction with `tonumpy()` to update any modifications to the image array."""
 448        self.dataset.GetPointData().Modified()
 449        scals = self.dataset.GetPointData().GetScalars()
 450        if scals:
 451            scals.Modified()
 452        return self
 453
 454    def box(self, scale=1, padding=0) -> "vedo.Mesh":
 455        """
 456        Return the bounding box as a new `Mesh` object.
 457
 458        Arguments:
 459            scale : (float)
 460                box size can be scaled by a factor
 461            padding : (float, list)
 462                a constant padding can be added (can be a list `[padx,pady,padz]`)
 463        """
 464        b = self.bounds()
 465        if not utils.is_sequence(padding):
 466            padding = [padding, padding, padding]
 467        length, width, height = b[1] - b[0], b[3] - b[2], b[5] - b[4]
 468        tol = (length + width + height) / 30000  # useful for boxing text
 469        pos = [(b[0] + b[1]) / 2, (b[3] + b[2]) / 2, (b[5] + b[4]) / 2 - tol]
 470        bx = vedo.shapes.Box(
 471            pos,
 472            length * scale + padding[0],
 473            width  * scale + padding[1],
 474            height * scale + padding[2],
 475            c="gray",
 476        )
 477        try:
 478            pr = vtki.vtkProperty()
 479            pr.DeepCopy(self.properties)
 480            bx.actor.SetProperty(pr)
 481            bx.properties = pr
 482        except (AttributeError, TypeError):
 483            pass
 484        bx.flat().lighting("off").wireframe(True)
 485        return bx
 486    
 487    def update_dataset(self, dataset, **kwargs) -> Self:
 488        """Update the dataset of the object with the provided VTK dataset."""
 489        self._update(dataset, **kwargs)
 490        return self
 491
 492    def bounds(self) -> np.ndarray:
 493        """
 494        Get the object bounds.
 495        Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`.
 496        """
 497        try:  # this is very slow for large meshes
 498            pts = self.vertices
 499            xmin, ymin, zmin = np.min(pts, axis=0)
 500            xmax, ymax, zmax = np.max(pts, axis=0)
 501            return np.array([xmin, xmax, ymin, ymax, zmin, zmax])
 502        except (AttributeError, ValueError):
 503            return np.array(self.dataset.GetBounds())
 504
 505    def xbounds(self, i=None) -> np.ndarray:
 506        """Get the bounds `[xmin,xmax]`. Can specify upper or lower with i (0,1)."""
 507        b = self.bounds()
 508        if i is not None:
 509            return b[i]
 510        return np.array([b[0], b[1]])
 511
 512    def ybounds(self, i=None) -> np.ndarray:
 513        """Get the bounds `[ymin,ymax]`. Can specify upper or lower with i (0,1)."""
 514        b = self.bounds()
 515        if i == 0:
 516            return b[2]
 517        if i == 1:
 518            return b[3]
 519        return np.array([b[2], b[3]])
 520
 521    def zbounds(self, i=None) -> np.ndarray:
 522        """Get the bounds `[zmin,zmax]`. Can specify upper or lower with i (0,1)."""
 523        b = self.bounds()
 524        if i == 0:
 525            return b[4]
 526        if i == 1:
 527            return b[5]
 528        return np.array([b[4], b[5]])
 529
 530    def diagonal_size(self) -> float:
 531        """Get the length of the diagonal of the bounding box."""
 532        b = self.bounds()
 533        return np.sqrt((b[1] - b[0])**2 + (b[3] - b[2])**2 + (b[5] - b[4])**2)
 534
 535    def average_size(self) -> float:
 536        """
 537        Calculate and return the average size of the object.
 538        This is the mean of the vertex distances from the center of mass.
 539        """
 540        coords = self.vertices
 541        cm = np.mean(coords, axis=0)
 542        if coords.shape[0] == 0:
 543            return 0.0
 544        cc = coords - cm
 545        return np.mean(np.linalg.norm(cc, axis=1))
 546
 547    def center_of_mass(self) -> np.ndarray:
 548        """Get the center of mass of the object."""
 549        if isinstance(self, (vedo.RectilinearGrid, vedo.Volume)):
 550            return np.array(self.dataset.GetCenter())
 551        cmf = vtki.new("CenterOfMass")
 552        cmf.SetInputData(self.dataset)
 553        cmf.Update()
 554        c = cmf.GetCenter()
 555        return np.array(c)
 556
 557    def copy_data_from(self, obj: Any) -> Self:
 558        """Copy all data (point and cell data) from this input object"""
 559        self.dataset.GetPointData().PassData(obj.dataset.GetPointData())
 560        self.dataset.GetCellData().PassData(obj.dataset.GetCellData())
 561        self.pipeline = utils.OperationNode(
 562            "copy_data_from",
 563            parents=[self, obj],
 564            comment=f"{obj.__class__.__name__}",
 565            shape="note",
 566            c="#ccc5b9",
 567        )
 568        return self
 569
 570    def inputdata(self):
 571        """Obsolete, use `.dataset` instead."""
 572        colors.printc("WARNING: 'inputdata()' is obsolete, use '.dataset' instead.", c="y")
 573        return self.dataset
 574
 575    @property
 576    def npoints(self):
 577        """Retrieve the number of points (or vertices)."""
 578        return self.dataset.GetNumberOfPoints()
 579
 580    @property
 581    def nvertices(self):
 582        """Retrieve the number of vertices (or points)."""
 583        return self.dataset.GetNumberOfPoints()
 584
 585    @property
 586    def ncells(self):
 587        """Retrieve the number of cells."""
 588        return self.dataset.GetNumberOfCells()
 589
 590    def cell_centers(self, copy_arrays=False) -> "vedo.Points":
 591        """
 592        Get the coordinates of the cell centers as a `Points` object.
 593
 594        Examples:
 595            - [delaunay2d.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/delaunay2d.py)
 596        """
 597        vcen = vtki.new("CellCenters")
 598        vcen.SetCopyArrays(copy_arrays)
 599        vcen.SetVertexCells(copy_arrays)
 600        vcen.SetInputData(self.dataset)
 601        vcen.Update()
 602        vpts = vedo.Points(vcen.GetOutput())
 603        if copy_arrays:
 604            vpts.copy_properties_from(self)
 605        return vpts
 606
 607    @property
 608    def lines(self):
 609        """
 610        Get lines connectivity ids as a python array
 611        formatted as `[[id0,id1], [id3,id4], ...]`
 612
 613        See also: `lines_as_flat_array()`.
 614        """
 615        # Get cell connettivity ids as a 1D array. The vtk format is:
 616        #    [nids1, id0 ... idn, niids2, id0 ... idm,  etc].
 617        try:
 618            arr1d = utils.vtk2numpy(self.dataset.GetLines().GetData())
 619        except AttributeError:
 620            return np.array([])
 621        i = 0
 622        conn = []
 623        n = len(arr1d)
 624        for _ in range(n):
 625            cell = [arr1d[i + k + 1] for k in range(arr1d[i])]
 626            conn.append(cell)
 627            i += arr1d[i] + 1
 628            if i >= n:
 629                break
 630
 631        return conn  # cannot always make a numpy array of it!
 632
 633    @property
 634    def lines_as_flat_array(self):
 635        """
 636        Get lines connectivity ids as a 1D numpy array.
 637        Format is e.g. [2,  10,20,  3, 10,11,12,  2, 70,80, ...]
 638
 639        See also: `lines()`.
 640        """
 641        try:
 642            return utils.vtk2numpy(self.dataset.GetLines().GetData())
 643        except AttributeError:
 644            return np.array([])
 645
 646    def mark_boundaries(self) -> Self:
 647        """
 648        Mark cells and vertices if they lie on a boundary.
 649        A new array called `BoundaryCells` is added to the object.
 650        """
 651        mb = vtki.new("MarkBoundaryFilter")
 652        mb.SetInputData(self.dataset)
 653        mb.Update()
 654        self.dataset.DeepCopy(mb.GetOutput())
 655        self.pipeline = utils.OperationNode("mark_boundaries", parents=[self])
 656        return self
 657
 658    def find_cells_in_bounds(self, xbounds=(), ybounds=(), zbounds=()) -> np.ndarray:
 659        """
 660        Find cells that are within the specified bounds.
 661        """
 662        try:
 663            xbounds = list(xbounds.bounds())
 664        except AttributeError:
 665            pass
 666
 667        if len(xbounds) == 6:
 668            bnds = xbounds
 669        else:
 670            bnds = list(self.bounds())
 671            if len(xbounds) == 2:
 672                bnds[0] = xbounds[0]
 673                bnds[1] = xbounds[1]
 674            if len(ybounds) == 2:
 675                bnds[2] = ybounds[0]
 676                bnds[3] = ybounds[1]
 677            if len(zbounds) == 2:
 678                bnds[4] = zbounds[0]
 679                bnds[5] = zbounds[1]
 680
 681        cell_ids = vtki.vtkIdList()
 682        if not self.cell_locator:
 683            self.cell_locator = vtki.new("CellTreeLocator")
 684            self.cell_locator.SetDataSet(self.dataset)
 685            self.cell_locator.BuildLocator()
 686        self.cell_locator.FindCellsWithinBounds(bnds, cell_ids)
 687        cids = []
 688        for i in range(cell_ids.GetNumberOfIds()):
 689            cid = cell_ids.GetId(i)
 690            cids.append(cid)
 691        return np.array(cids)
 692
 693    def find_cells_along_line(self, p0, p1, tol=0.001) -> np.ndarray:
 694        """
 695        Find cells that are intersected by a line segment.
 696        """
 697        cell_ids = vtki.vtkIdList()
 698        if not self.cell_locator:
 699            self.cell_locator = vtki.new("CellTreeLocator")
 700            self.cell_locator.SetDataSet(self.dataset)
 701            self.cell_locator.BuildLocator()
 702        self.cell_locator.FindCellsAlongLine(p0, p1, tol, cell_ids)
 703        cids = []
 704        for i in range(cell_ids.GetNumberOfIds()):
 705            cid = cell_ids.GetId(i)
 706            cids.append(cid)
 707        return np.array(cids)
 708
 709    def find_cells_along_plane(self, origin, normal, tol=0.001) -> np.ndarray:
 710        """
 711        Find cells that are intersected by a plane.
 712        """
 713        cell_ids = vtki.vtkIdList()
 714        if not self.cell_locator:
 715            self.cell_locator = vtki.new("CellTreeLocator")
 716            self.cell_locator.SetDataSet(self.dataset)
 717            self.cell_locator.BuildLocator()
 718        self.cell_locator.FindCellsAlongPlane(origin, normal, tol, cell_ids)
 719        cids = []
 720        for i in range(cell_ids.GetNumberOfIds()):
 721            cid = cell_ids.GetId(i)
 722            cids.append(cid)
 723        return np.array(cids)
 724
 725    def keep_cell_types(self, types=()):
 726        """
 727        Extract cells of a specific type.
 728
 729        Check the VTK cell types here:
 730        https://vtk.org/doc/nightly/html/vtkCellType_8h.html
 731        """
 732        fe = vtki.new("ExtractCellsByType")
 733        fe.SetInputData(self.dataset)
 734        for t in types:
 735            try:
 736                if utils.is_integer(t):
 737                    it = t
 738                else:
 739                    it = vtki.cell_types[t.upper()]
 740            except KeyError:
 741                vedo.logger.error(f"Cell type '{t}' not recognized")
 742                continue
 743            fe.AddCellType(it)
 744        fe.Update()
 745        self._update(fe.GetOutput())
 746        return self
 747
 748    def map_cells_to_points(self, arrays=(), move=False) -> Self:
 749        """
 750        Interpolate cell data (i.e., data specified per cell or face)
 751        into point data (i.e., data specified at each vertex).
 752        The method of transformation is based on averaging the data values
 753        of all cells using a particular point.
 754
 755        A custom list of arrays to be mapped can be passed in input.
 756
 757        Set `move=True` to delete the original `celldata` array.
 758        """
 759        c2p = vtki.new("CellDataToPointData")
 760        c2p.SetInputData(self.dataset)
 761        if not move:
 762            c2p.PassCellDataOn()
 763        if arrays:
 764            c2p.ClearCellDataArrays()
 765            c2p.ProcessAllArraysOff()
 766            for arr in arrays:
 767                c2p.AddCellDataArray(arr)
 768        else:
 769            c2p.ProcessAllArraysOn()
 770        c2p.Update()
 771        self._update(c2p.GetOutput(), reset_locators=False)
 772        self.mapper.SetScalarModeToUsePointData()
 773        self.pipeline = utils.OperationNode("map_cells_to_points", parents=[self])
 774        return self
 775
 776    @property
 777    def vertices(self):
 778        """Return the vertices (points) coordinates."""
 779        try:
 780            # for polydata and unstructured grid
 781            varr = self.dataset.GetPoints().GetData()
 782        except (AttributeError, TypeError):
 783            try:
 784                # for RectilinearGrid, StructuredGrid
 785                vpts = vtki.vtkPoints()
 786                self.dataset.GetPoints(vpts)
 787                varr = vpts.GetData()
 788            except (AttributeError, TypeError):
 789                try:
 790                    # for ImageData
 791                    v2p = vtki.new("ImageToPoints")
 792                    v2p.SetInputData(self.dataset)
 793                    v2p.Update()
 794                    varr = v2p.GetOutput().GetPoints().GetData()
 795                except AttributeError:
 796                    return np.array([])
 797
 798        return utils.vtk2numpy(varr)
 799
 800    # setter
 801    @vertices.setter
 802    def vertices(self, pts):
 803        """Set vertices (points) coordinates."""
 804        pts = utils.make3d(pts)
 805        arr = utils.numpy2vtk(pts, dtype=np.float32)
 806        try:
 807            vpts = self.dataset.GetPoints()
 808            vpts.SetData(arr)
 809            vpts.Modified()
 810        except (AttributeError, TypeError):
 811            vedo.logger.error(f"Cannot set vertices for {type(self)}")
 812            return self
 813        # reset mesh to identity matrix position/rotation:
 814        self.point_locator = None
 815        self.cell_locator = None
 816        self.line_locator = None
 817        self.transform = LinearTransform()
 818
 819    @property
 820    def points(self):
 821        """Return the vertices (points) coordinates. Same as `vertices`."""
 822        return self.vertices
 823
 824    @points.setter
 825    def points(self, pts):
 826        """Set vertices (points) coordinates. Same as `vertices`."""
 827        self.vertices = pts
 828
 829    @property
 830    def coordinates(self):
 831        """Return the vertices (points) coordinates. Same as `vertices`."""
 832        return self.vertices
 833
 834    @coordinates.setter
 835    def coordinates(self, pts):
 836        """Set vertices (points) coordinates. Same as `vertices`."""
 837        self.vertices = pts
 838
 839    @property
 840    def cells_as_flat_array(self):
 841        """
 842        Get cell connectivity ids as a 1D numpy array.
 843        Format is e.g. [3,  10,20,30  4, 10,11,12,13  ...]
 844        """
 845        try:
 846            # valid for unstructured grid
 847            arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData())
 848        except AttributeError:
 849            # valid for polydata
 850            arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData())
 851        return arr1d
 852
 853    @property
 854    def cells(self):
 855        """
 856        Get the cells connectivity ids as a numpy array.
 857
 858        The output format is: `[[id0 ... idn], [id0 ... idm],  etc]`.
 859        """
 860        try:
 861            # valid for unstructured grid
 862            arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData())
 863        except AttributeError:
 864            try:
 865                # valid for polydata
 866                arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData())
 867            except AttributeError:
 868                vedo.logger.warning(f"Cannot get cells for {type(self)}")
 869                return np.array([])
 870
 871        # Get cell connettivity ids as a 1D array. vtk format is:
 872        # [nids1, id0 ... idn, niids2, id0 ... idm,  etc].
 873        i = 0
 874        conn = []
 875        n = len(arr1d)
 876        if n:
 877            while True:
 878                cell = [arr1d[i + k] for k in range(1, arr1d[i] + 1)]
 879                conn.append(cell)
 880                i += arr1d[i] + 1
 881                if i >= n:
 882                    break
 883        return conn
 884    
 885    def cell_edge_neighbors(self):
 886        """
 887        Get the cell neighbor indices of each cell.
 888
 889        Returns a python list of lists.
 890        """
 891
 892        def face_to_edges(face):
 893            edges = []
 894            size = len(face)
 895            for i in range(1, size + 1):
 896                if i == size:
 897                    edges.append([face[i - 1], face[0]])
 898                else:
 899                    edges.append([face[i - 1], face[i]])
 900            return edges
 901
 902        pd = self.dataset
 903        pd.BuildLinks()
 904
 905        neicells = []
 906        for i, cell in enumerate(self.cells):
 907            nn = []
 908            for edge in face_to_edges(cell):
 909                neighbors = vtki.vtkIdList()
 910                pd.GetCellEdgeNeighbors(i, edge[0], edge[1], neighbors)
 911                if neighbors.GetNumberOfIds() > 0:
 912                    neighbor = neighbors.GetId(0)
 913                    nn.append(neighbor)
 914            neicells.append(nn)
 915
 916        return neicells
 917
 918
 919    def map_points_to_cells(self, arrays=(), move=False) -> Self:
 920        """
 921        Interpolate point data (i.e., data specified per point or vertex)
 922        into cell data (i.e., data specified per cell).
 923        The method of transformation is based on averaging the data values
 924        of all points defining a particular cell.
 925
 926        A custom list of arrays to be mapped can be passed in input.
 927
 928        Set `move=True` to delete the original `pointdata` array.
 929
 930        Examples:
 931            - [mesh_map2cell.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/mesh_map2cell.py)
 932        """
 933        p2c = vtki.new("PointDataToCellData")
 934        p2c.SetInputData(self.dataset)
 935        if not move:
 936            p2c.PassPointDataOn()
 937        if arrays:
 938            p2c.ClearPointDataArrays()
 939            p2c.ProcessAllArraysOff()
 940            for arr in arrays:
 941                p2c.AddPointDataArray(arr)
 942        else:
 943            p2c.ProcessAllArraysOn()
 944        p2c.Update()
 945        self._update(p2c.GetOutput(), reset_locators=False)
 946        self.mapper.SetScalarModeToUseCellData()
 947        self.pipeline = utils.OperationNode("map_points_to_cells", parents=[self])
 948        return self
 949
 950    def resample_data_from(self, source, tol=None, categorical=False) -> Self:
 951        """
 952        Resample point and cell data from another dataset.
 953        The output has the same structure but its point data have
 954        the resampled values from target.
 955
 956        Use `tol` to set the tolerance used to compute whether
 957        a point in the source is in a cell of the current object.
 958        Points without resampled values, and their cells, are marked as blank.
 959        If the data is categorical, then the resulting data will be determined
 960        by a nearest neighbor interpolation scheme.
 961
 962        Example:
 963        ```python
 964        from vedo import *
 965        m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1)
 966        pts = m1.coordinates
 967        ces = m1.cell_centers().coordinates
 968        m1.pointdata["xvalues"] = np.power(pts[:,0], 3)
 969        m1.celldata["yvalues"]  = np.power(ces[:,1], 3)
 970        m2 = Mesh(dataurl+'bunny.obj')
 971        m2.resample_data_from(m1)
 972        # print(m2.pointdata["xvalues"])
 973        show(m1, m2 , N=2, axes=1)
 974        ```
 975        """
 976        rs = vtki.new("ResampleWithDataSet")
 977        rs.SetInputData(self.dataset)
 978        rs.SetSourceData(source.dataset)
 979
 980        rs.SetPassPointArrays(True)
 981        rs.SetPassCellArrays(True)
 982        rs.SetPassFieldArrays(True)
 983        rs.SetCategoricalData(categorical)
 984
 985        rs.SetComputeTolerance(True)
 986        if tol:
 987            rs.SetComputeTolerance(False)
 988            rs.SetTolerance(tol)
 989        rs.Update()
 990        self._update(rs.GetOutput(), reset_locators=False)
 991        self.pipeline = utils.OperationNode(
 992            "resample_data_from",
 993            comment=f"{source.__class__.__name__}",
 994            parents=[self, source],
 995        )
 996        return self
 997
 998    def interpolate_data_from(
 999        self,
1000        source,
1001        radius=None,
1002        n=None,
1003        kernel="shepard",
1004        exclude=("Normals",),
1005        on="points",
1006        null_strategy=1,
1007        null_value=0,
1008    ) -> Self:
1009        """
1010        Interpolate over source to port its data onto the current object using various kernels.
1011
1012        If n (number of closest points to use) is set then radius value is ignored.
1013
1014        Check out also:
1015            `probe()` which in many cases can be faster.
1016
1017        Arguments:
1018            kernel : (str)
1019                available kernels are [shepard, gaussian, linear]
1020            null_strategy : (int)
1021                specify a strategy to use when encountering a "null" point
1022                during the interpolation process. Null points occur when the local neighborhood
1023                (of nearby points to interpolate from) is empty.
1024
1025                - Case 0: an output array is created that marks points
1026                  as being valid (=1) or null (invalid =0), and the null_value is set as well
1027                - Case 1: the output data value(s) are set to the provided null_value
1028                - Case 2: simply use the closest point to perform the interpolation.
1029            null_value : (float)
1030                see above.
1031
1032        Examples:
1033            - [interpolate_scalar1.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar1.py)
1034            - [interpolate_scalar3.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar3.py)
1035            - [interpolate_scalar4.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar4.py)
1036            - [image_probe.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/image_probe.py)
1037
1038                ![](https://vedo.embl.es/images/advanced/interpolateMeshArray.png)
1039        """
1040        if radius is None and not n:
1041            vedo.logger.error("in interpolate_data_from(): please set either radius or n")
1042            raise RuntimeError
1043
1044        if on == "points":
1045            points = source.dataset
1046        elif on == "cells":
1047            c2p = vtki.new("CellDataToPointData")
1048            c2p.SetInputData(source.dataset)
1049            c2p.Update()
1050            points = c2p.GetOutput()
1051        else:
1052            vedo.logger.error("in interpolate_data_from(), on must be on points or cells")
1053            raise RuntimeError()
1054
1055        locator = vtki.new("PointLocator")
1056        locator.SetDataSet(points)
1057        locator.BuildLocator()
1058
1059        if kernel.lower() == "shepard":
1060            kern = vtki.new("ShepardKernel")
1061            kern.SetPowerParameter(2)
1062        elif kernel.lower() == "gaussian":
1063            kern = vtki.new("GaussianKernel")
1064            kern.SetSharpness(2)
1065        elif kernel.lower() == "linear":
1066            kern = vtki.new("LinearKernel")
1067        else:
1068            vedo.logger.error("available kernels are: [shepard, gaussian, linear]")
1069            raise RuntimeError()
1070
1071        if n:
1072            kern.SetNumberOfPoints(n)
1073            kern.SetKernelFootprintToNClosest()
1074        else:
1075            kern.SetRadius(radius)
1076            kern.SetKernelFootprintToRadius()
1077
1078        interpolator = vtki.new("PointInterpolator")
1079        interpolator.SetInputData(self.dataset)
1080        interpolator.SetSourceData(points)
1081        interpolator.SetKernel(kern)
1082        interpolator.SetLocator(locator)
1083        interpolator.PassFieldArraysOn()
1084        interpolator.SetNullPointsStrategy(null_strategy)
1085        interpolator.SetNullValue(null_value)
1086        interpolator.SetValidPointsMaskArrayName("ValidPointMask")
1087        for ex in exclude:
1088            interpolator.AddExcludedArray(ex)
1089
1090        # remove arrays that are already present in the source
1091        # this is because the interpolator will ignore them otherwise
1092        for i in range(self.dataset.GetPointData().GetNumberOfArrays()):
1093            name = self.dataset.GetPointData().GetArrayName(i)
1094            if name not in exclude:
1095                self.dataset.GetPointData().RemoveArray(name)
1096
1097        interpolator.Update()
1098
1099        if on == "cells":
1100            p2c = vtki.new("PointDataToCellData")
1101            p2c.SetInputData(interpolator.GetOutput())
1102            p2c.Update()
1103            cpoly = p2c.GetOutput()
1104        else:
1105            cpoly = interpolator.GetOutput()
1106
1107        self._update(cpoly, reset_locators=False)
1108
1109        self.pipeline = utils.OperationNode("interpolate_data_from", parents=[self, source])
1110        return self
1111
1112    def add_ids(self) -> Self:
1113        """
1114        Generate point and cell ids arrays.
1115
1116        Two new arrays are added to the mesh: `PointID` and `CellID`.
1117        """
1118        ids = vtki.new("IdFilter")
1119        ids.SetInputData(self.dataset)
1120        ids.PointIdsOn()
1121        ids.CellIdsOn()
1122        ids.FieldDataOff()
1123        ids.SetPointIdsArrayName("PointID")
1124        ids.SetCellIdsArrayName("CellID")
1125        ids.Update()
1126        self._update(ids.GetOutput(), reset_locators=False)
1127        self.pipeline = utils.OperationNode("add_ids", parents=[self])
1128        return self
1129
1130    def gradient(self, input_array=None, on="points", fast=False) -> np.ndarray:
1131        """
1132        Compute and return the gradiend of the active scalar field as a numpy array.
1133
1134        Arguments:
1135            input_array : (str)
1136                array of the scalars to compute the gradient,
1137                if None the current active array is selected
1138            on : (str)
1139                compute either on 'points' or 'cells' data
1140            fast : (bool)
1141                if True, will use a less accurate algorithm
1142                that performs fewer derivative calculations (and is therefore faster).
1143
1144        Examples:
1145            - [isolines.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/isolines.py)
1146
1147            ![](https://user-images.githubusercontent.com/32848391/72433087-f00a8780-3798-11ea-9778-991f0abeca70.png)
1148        """
1149        gra = vtki.new("GradientFilter")
1150        if on.startswith("p"):
1151            varr = self.dataset.GetPointData()
1152            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS
1153        elif on.startswith("c"):
1154            varr = self.dataset.GetCellData()
1155            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS
1156        else:
1157            vedo.logger.error(f"in gradient: unknown option {on}")
1158            raise RuntimeError
1159
1160        if input_array is None:
1161            if varr.GetScalars():
1162                input_array = varr.GetScalars().GetName()
1163            else:
1164                vedo.logger.error(f"in gradient: no scalars found for {on}")
1165                raise RuntimeError
1166
1167        gra.SetInputData(self.dataset)
1168        gra.SetInputScalars(tp, input_array)
1169        gra.SetResultArrayName("Gradient")
1170        gra.SetFasterApproximation(fast)
1171        gra.ComputeDivergenceOff()
1172        gra.ComputeVorticityOff()
1173        gra.ComputeGradientOn()
1174        gra.Update()
1175        # self._update(gra.GetOutput(), reset_locators=False)
1176        if on.startswith("p"):
1177            gvecs = utils.vtk2numpy(gra.GetOutput().GetPointData().GetArray("Gradient"))
1178        else:
1179            gvecs = utils.vtk2numpy(gra.GetOutput().GetCellData().GetArray("Gradient"))
1180        return gvecs
1181
1182    def divergence(self, array_name=None, on="points", fast=False) -> np.ndarray:
1183        """
1184        Compute and return the divergence of a vector field as a numpy array.
1185
1186        Arguments:
1187            array_name : (str)
1188                name of the array of vectors to compute the divergence,
1189                if None the current active array is selected
1190            on : (str)
1191                compute either on 'points' or 'cells' data
1192            fast : (bool)
1193                if True, will use a less accurate algorithm
1194                that performs fewer derivative calculations (and is therefore faster).
1195        """
1196        div = vtki.new("GradientFilter")
1197        if on.startswith("p"):
1198            varr = self.dataset.GetPointData()
1199            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS
1200        elif on.startswith("c"):
1201            varr = self.dataset.GetCellData()
1202            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS
1203        else:
1204            vedo.logger.error(f"in divergence(): unknown option {on}")
1205            raise RuntimeError
1206
1207        if array_name is None:
1208            if varr.GetVectors():
1209                array_name = varr.GetVectors().GetName()
1210            else:
1211                vedo.logger.error(f"in divergence(): no vectors found for {on}")
1212                raise RuntimeError
1213
1214        div.SetInputData(self.dataset)
1215        div.SetInputScalars(tp, array_name)
1216        div.ComputeDivergenceOn()
1217        div.ComputeGradientOff()
1218        div.ComputeVorticityOff()
1219        div.SetDivergenceArrayName("Divergence")
1220        div.SetFasterApproximation(fast)
1221        div.Update()
1222        # self._update(div.GetOutput(), reset_locators=False)
1223        if on.startswith("p"):
1224            dvecs = utils.vtk2numpy(div.GetOutput().GetPointData().GetArray("Divergence"))
1225        else:
1226            dvecs = utils.vtk2numpy(div.GetOutput().GetCellData().GetArray("Divergence"))
1227        return dvecs
1228
1229    def vorticity(self, array_name=None, on="points", fast=False) -> np.ndarray:
1230        """
1231        Compute and return the vorticity of a vector field as a numpy array.
1232
1233        Arguments:
1234            array_name : (str)
1235                name of the array to compute the vorticity,
1236                if None the current active array is selected
1237            on : (str)
1238                compute either on 'points' or 'cells' data
1239            fast : (bool)
1240                if True, will use a less accurate algorithm
1241                that performs fewer derivative calculations (and is therefore faster).
1242        """
1243        vort = vtki.new("GradientFilter")
1244        if on.startswith("p"):
1245            varr = self.dataset.GetPointData()
1246            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS
1247        elif on.startswith("c"):
1248            varr = self.dataset.GetCellData()
1249            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS
1250        else:
1251            vedo.logger.error(f"in vorticity(): unknown option {on}")
1252            raise RuntimeError
1253
1254        if array_name is None:
1255            if varr.GetVectors():
1256                array_name = varr.GetVectors().GetName()
1257            else:
1258                vedo.logger.error(f"in vorticity(): no vectors found for {on}")
1259                raise RuntimeError
1260
1261        vort.SetInputData(self.dataset)
1262        vort.SetInputScalars(tp, array_name)
1263        vort.ComputeDivergenceOff()
1264        vort.ComputeGradientOff()
1265        vort.ComputeVorticityOn()
1266        vort.SetVorticityArrayName("Vorticity")
1267        vort.SetFasterApproximation(fast)
1268        vort.Update()
1269        if on.startswith("p"):
1270            vvecs = utils.vtk2numpy(vort.GetOutput().GetPointData().GetArray("Vorticity"))
1271        else:
1272            vvecs = utils.vtk2numpy(vort.GetOutput().GetCellData().GetArray("Vorticity"))
1273        return vvecs
1274
1275    def probe(
1276            self,
1277            source,
1278            categorical=False,
1279            snap=False,
1280            tol=0,
1281        ) -> Self:
1282        """
1283        Takes a data set and probes its scalars at the specified points in space.
1284
1285        Note that a mask is also output with valid/invalid points which can be accessed
1286        with `mesh.pointdata['ValidPointMask']`.
1287
1288        Arguments:
1289            source : any dataset
1290                the data set to probe.
1291            categorical : bool
1292                control whether the source pointdata is to be treated as categorical.
1293            snap : bool
1294                snap to the cell with the closest point if no cell was found
1295            tol : float
1296                the tolerance to use when performing the probe.
1297
1298        Check out also:
1299            `interpolate_data_from()` and `tovolume()`
1300
1301        Examples:
1302            - [probe_points.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/probe_points.py)
1303
1304                ![](https://vedo.embl.es/images/volumetric/probePoints.png)
1305        """
1306        probe_filter = vtki.new("ProbeFilter")
1307        probe_filter.SetSourceData(source.dataset)
1308        probe_filter.SetInputData(self.dataset)
1309        probe_filter.PassCellArraysOn()
1310        probe_filter.PassFieldArraysOn()
1311        probe_filter.PassPointArraysOn()
1312        probe_filter.SetCategoricalData(categorical)
1313        probe_filter.ComputeToleranceOff()
1314        if tol:
1315            probe_filter.ComputeToleranceOn()
1316            probe_filter.SetTolerance(tol)
1317        probe_filter.SetSnapToCellWithClosestPoint(snap)
1318        probe_filter.Update()
1319        self._update(probe_filter.GetOutput(), reset_locators=False)
1320        self.pipeline = utils.OperationNode("probe", parents=[self, source])
1321        self.pointdata.rename("vtkValidPointMask", "ValidPointMask")
1322        return self
1323
1324    def compute_cell_size(self) -> Self:
1325        """
1326        Add to this object a cell data array
1327        containing the area, volume and edge length
1328        of the cells (when applicable to the object type).
1329
1330        Array names are: `Area`, `Volume`, `Length`.
1331        """
1332        csf = vtki.new("CellSizeFilter")
1333        csf.SetInputData(self.dataset)
1334        csf.SetComputeArea(1)
1335        csf.SetComputeVolume(1)
1336        csf.SetComputeLength(1)
1337        csf.SetComputeVertexCount(0)
1338        csf.SetAreaArrayName("Area")
1339        csf.SetVolumeArrayName("Volume")
1340        csf.SetLengthArrayName("Length")
1341        csf.Update()
1342        self._update(csf.GetOutput(), reset_locators=False)
1343        return self
1344
1345    def generate_random_data(self) -> Self:
1346        """Fill a dataset with random attributes"""
1347        gen = vtki.new("RandomAttributeGenerator")
1348        gen.SetInputData(self.dataset)
1349        gen.GenerateAllDataOn()
1350        gen.SetDataTypeToFloat()
1351        gen.GeneratePointNormalsOff()
1352        gen.GeneratePointTensorsOn()
1353        gen.GenerateCellScalarsOn()
1354        gen.Update()
1355        self._update(gen.GetOutput(), reset_locators=False)
1356        self.pipeline = utils.OperationNode("generate_random_data", parents=[self])
1357        return self
1358
1359    def integrate_data(self) -> dict:
1360        """
1361        Integrate point and cell data arrays while computing length,
1362        area or volume of the domain. It works for 1D, 2D or 3D cells.
1363    
1364        For volumetric datasets, this filter ignores all but 3D cells.
1365        It will not compute the volume contained in a closed surface.
1366
1367        Returns a dictionary with keys: `pointdata`, `celldata`, `metadata`,
1368        which contain the integration result for the corresponding attributes.
1369
1370        Examples:
1371            ```python
1372            from vedo import *
1373            surf = Sphere(res=100)
1374            surf.pointdata['scalars'] = np.ones(surf.npoints)
1375            data = surf.integrate_data()
1376            print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi)
1377            ```
1378
1379            ```python
1380            from vedo import *
1381
1382            xcoords1 = np.arange(0, 2.2, 0.2)
1383            xcoords2 = sqrt(np.arange(0, 4.2, 0.2))
1384
1385            ycoords = np.arange(0, 1.2, 0.2)
1386
1387            surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2)
1388            surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2)
1389
1390            surf1.pointdata['scalars'] = surf1.vertices[:,2]
1391            surf2.pointdata['scalars'] = surf2.vertices[:,2]
1392
1393            data1 = surf1.integrate_data()
1394            data2 = surf2.integrate_data()
1395
1396            print(data1['pointdata']['scalars'],
1397                "is equal to",
1398                data2['pointdata']['scalars'],
1399                "even if the grids are different!",
1400                "(= the volume under the surface)"
1401            )
1402            show(surf1, surf2, N=2, axes=1).close()
1403            ```
1404        """
1405        vinteg = vtki.new("IntegrateAttributes")
1406        vinteg.SetInputData(self.dataset)
1407        vinteg.Update()
1408        ugrid = vedo.UnstructuredGrid(vinteg.GetOutput())
1409        data = dict(
1410            pointdata=ugrid.pointdata.todict(),
1411            celldata=ugrid.celldata.todict(),
1412            metadata=ugrid.metadata.todict(),
1413        )
1414        return data
1415
1416    def write(self, filename, binary=True) -> None:
1417        """Write object to file."""
1418        out = vedo.file_io.write(self, filename, binary)
1419        out.pipeline = utils.OperationNode(
1420            "write", parents=[self], comment=filename[:15], shape="folder", c="#8a817c"
1421        )
1422
1423    def tomesh(self, bounds=(), shrink=0) -> "vedo.Mesh":
1424        """
1425        Extract boundary geometry from dataset (or convert data to polygonal type).
1426
1427        Two new arrays are added to the mesh: `OriginalCellIds` and `OriginalPointIds`
1428        to keep track of the original mesh elements.
1429
1430        Arguments:
1431            bounds : (list)
1432                specify a sub-region to extract
1433            shrink : (float)
1434                shrink the cells to a fraction of their original size
1435        """
1436        geo = vtki.new("GeometryFilter")
1437
1438        if shrink:
1439            sf = vtki.new("ShrinkFilter")
1440            sf.SetInputData(self.dataset)
1441            sf.SetShrinkFactor(shrink)
1442            sf.Update()
1443            geo.SetInputData(sf.GetOutput())
1444        else:
1445            geo.SetInputData(self.dataset)
1446
1447        geo.SetPassThroughCellIds(1)
1448        geo.SetPassThroughPointIds(1)
1449        geo.SetOriginalCellIdsName("OriginalCellIds")
1450        geo.SetOriginalPointIdsName("OriginalPointIds")
1451        geo.SetNonlinearSubdivisionLevel(1)
1452        # geo.MergingOff() # crashes on StructuredGrids
1453        if bounds:
1454            geo.SetExtent(bounds)
1455            geo.ExtentClippingOn()
1456        geo.Update()
1457        msh = vedo.mesh.Mesh(geo.GetOutput())
1458        msh.pipeline = utils.OperationNode("tomesh", parents=[self], c="#9e2a2b")
1459        return msh
1460
1461    def signed_distance(self, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> "vedo.Volume":
1462        """
1463        Compute the `Volume` object whose voxels contains the signed distance from
1464        the object. The calling object must have "Normals" defined.
1465
1466        Arguments:
1467            bounds : (list, actor)
1468                bounding box sizes
1469            dims : (list)
1470                dimensions (nr. of voxels) of the output volume.
1471            invert : (bool)
1472                flip the sign
1473            max_radius : (float)
1474                specify how far out to propagate distance calculation
1475
1476        Examples:
1477            - [distance2mesh.py](https://github.com/marcomusy/vedo/blob/master/examples/basic/distance2mesh.py)
1478
1479                ![](https://vedo.embl.es/images/basic/distance2mesh.png)
1480        """
1481        if bounds is None:
1482            bounds = self.bounds()
1483        if max_radius is None:
1484            max_radius = self.diagonal_size() / 2
1485        dist = vtki.new("SignedDistance")
1486        dist.SetInputData(self.dataset)
1487        dist.SetRadius(max_radius)
1488        dist.SetBounds(bounds)
1489        dist.SetDimensions(dims)
1490        dist.Update()
1491        img = dist.GetOutput()
1492        if invert:
1493            mat = vtki.new("ImageMathematics")
1494            mat.SetInput1Data(img)
1495            mat.SetOperationToMultiplyByK()
1496            mat.SetConstantK(-1)
1497            mat.Update()
1498            img = mat.GetOutput()
1499
1500        vol = vedo.Volume(img)
1501        vol.name = "SignedDistanceVolume"
1502        vol.pipeline = utils.OperationNode(
1503            "signed_distance",
1504            parents=[self],
1505            comment=f"dims={tuple(vol.dimensions())}",
1506            c="#e9c46a:#0096c7",
1507        )
1508        return vol
1509    
1510    def unsigned_distance(
1511            self, dims=(25,25,25), bounds=(), max_radius=0, cap_value=0) -> "vedo.Volume":
1512        """
1513        Compute the `Volume` object whose voxels contains the unsigned distance. 
1514        """
1515        dist = vtki.new("UnsignedDistance")
1516        dist.SetInputData(self.dataset)
1517        dist.SetDimensions(dims)
1518
1519        if len(bounds) == 6:
1520            dist.SetBounds(bounds)
1521        # elif bounds == "auto":
1522        #     dist.AdjustBoundsOn()
1523        else:
1524            dist.SetBounds(self.bounds())
1525        if not max_radius:
1526            max_radius = self.diagonal_size() / 10
1527        dist.SetRadius(max_radius)
1528
1529        if self.point_locator:
1530            dist.SetLocator(self.point_locator)
1531        
1532        if cap_value is not None:
1533            dist.CappingOn()
1534            dist.SetCapValue(cap_value)
1535        dist.SetOutputScalarTypeToFloat()
1536        dist.Update()
1537        vol = vedo.Volume(dist.GetOutput())
1538        vol.name = "UnsignedDistanceVolume"
1539        vol.pipeline = utils.OperationNode(
1540            "unsigned_distance", parents=[self], c="#e9c46a:#0096c7")
1541        return vol
1542
1543    def smooth_data(self, 
1544            niter=10, relaxation_factor=0.1, strategy=0, mask=None,
1545            mode="distance2",
1546            exclude=("Normals", "TextureCoordinates"),
1547        ) -> Self:
1548        """
1549        Smooth point attribute data using distance weighted Laplacian kernel.
1550        The effect is to blur regions of high variation and emphasize low variation regions.
1551
1552        A central concept of this method is the point smoothing stencil.
1553        A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge.
1554        To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using
1555        the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1).
1556        This averaging process is repeated until the maximum number of iterations is reached.
1557
1558        The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion.
1559        The a(i+1) attributes are determined from the a(i) attributes as follows:
1560            a(i+1) = (1-R)*a(i) + R*sum(w(j)*a(j))
1561    
1562        Convergence occurs faster for larger relaxation factors.
1563        Typically a small number of iterations is required for large relaxation factors,
1564        and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be
1565        adequate (i.e., just a distance weighted average is computed).
1566
1567        Warning:
1568            Certain data attributes cannot be correctly interpolated. For example, surface normals are expected to be |n|=1;
1569            after attribute smoothing this constraint is likely to be violated.
1570            Other vectors and tensors may suffer from similar issues.
1571            In such a situation, specify `exclude=...` which will not be smoothed (and simply passed through to the output).
1572            Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance
1573            between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil).
1574            The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, the weights are simply 1/n,
1575            where n is the number of connected points in the stencil.
1576            The smoothing process reduces high frequency information in the data attributes.
1577            With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) important details may be lost,
1578            and the attributes will move towards an "average" value.
1579            While this filter will process any dataset type, if the input data is a 3D image volume, it's likely much faster to use
1580            an image-based algorithm to perform data smoothing.
1581            To determine boundary points in polygonal data, edges used by only one cell are considered boundary
1582            (and hence the associated points defining the edge). 
1583
1584        Arguments:
1585            niter : (int)
1586                number of iterations
1587            relaxation_factor : (float)
1588                relaxation factor controlling the amount of Laplacian smoothing applied
1589            strategy : (int)
1590                strategy to use for Laplacian smoothing
1591                    - 0: use all points, all point data attributes are smoothed
1592                    - 1: smooth all point attribute data except those on the boundary
1593                    - 2: only point data connected to a boundary point are smoothed
1594            mask : (str, np.ndarray)
1595                array to be used as a mask (ignore then the strategy keyword)
1596            mode : (str)
1597                smoothing mode, either "distance2", "distance" or "average"
1598                    - distance**2 weighted (i.e., 1/r**2 interpolation weights)
1599                    - distance weighted (i.e., 1/r) approach;
1600                    - simple average of all connected points in the stencil
1601            exclude : (list)
1602                list of arrays to be excluded from smoothing
1603        """
1604        try:
1605            saf = vtki.new("AttributeSmoothingFilter")
1606        except:
1607            vedo.logger.error("smooth_data() only avaialble in VTK>=9.3.0")
1608            return self
1609        saf.SetInputData(self.dataset)
1610        saf.SetRelaxationFactor(relaxation_factor)
1611        saf.SetNumberOfIterations(niter)
1612
1613        for ex in exclude:
1614            saf.AddExcludedArray(ex)
1615
1616        if mode == "distance":
1617            saf.SetWeightsTypeToDistance()
1618        elif mode == "distance2":
1619            saf.SetWeightsTypeToDistance2()
1620        elif mode == "average":
1621            saf.SetWeightsTypeToAverage()
1622        else:
1623            vedo.logger.error(f"smooth_data(): unknown mode {mode}")
1624            raise TypeError
1625
1626        saf.SetSmoothingStrategy(strategy)
1627        if mask is not None:
1628            saf.SetSmoothingStrategyToSmoothingMask()
1629            if isinstance(mask, str):
1630                mask_ = self.dataset.GetPointData().GetArray(mask)
1631                if not mask_:
1632                    vedo.logger.error(f"smooth_data(): mask array {mask} not found")
1633                    return self
1634                mask_array = vtki.vtkUnsignedCharArray()
1635                mask_array.ShallowCopy(mask_)
1636                mask_array.SetName(mask_.GetName())
1637            else:
1638                mask_array = utils.numpy2vtk(mask, dtype=np.uint8)
1639            saf.SetSmoothingMask(mask_array)
1640
1641        saf.Update()
1642
1643        self._update(saf.GetOutput())
1644        self.pipeline = utils.OperationNode(
1645            "smooth_data", comment=f"strategy {strategy}", parents=[self], c="#9e2a2b"
1646        )
1647        return self
1648        
1649    def compute_streamlines(
1650            self, 
1651            seeds: Any, 
1652            integrator="rk4",
1653            direction="forward",
1654            initial_step_size=None,
1655            max_propagation=None,
1656            max_steps=10000,
1657            step_length=0,
1658            surface_constrained=False,
1659            compute_vorticity=False,
1660        ) -> Union["vedo.Lines", None]:
1661        """
1662        Integrate a vector field to generate streamlines.
1663
1664        Arguments:
1665            seeds : (Mesh, Points, list)
1666                starting points of the streamlines
1667            integrator : (str)
1668                type of integration method to be used:
1669                    - "rk2" (Runge-Kutta 2)
1670                    - "rk4" (Runge-Kutta 4)
1671                    - "rk45" (Runge-Kutta 45)
1672            direction : (str)
1673                direction of integration, either "forward", "backward" or "both"
1674            initial_step_size : (float)
1675                initial step size used for line integration
1676            max_propagation : (float)
1677                maximum length of a streamline expressed in absolute units
1678            max_steps : (int)
1679                maximum number of steps for a streamline
1680            step_length : (float)
1681                maximum length of a step expressed in absolute units
1682            surface_constrained : (bool)
1683                whether to stop integrating when the streamline leaves the surface
1684            compute_vorticity : (bool)
1685                whether to compute the vorticity at each streamline point
1686        """
1687        b = self.dataset.GetBounds()
1688        size = (b[5]-b[4] + b[3]-b[2] + b[1]-b[0]) / 3
1689        if initial_step_size is None:
1690            initial_step_size = size / 1000.0
1691
1692        if max_propagation is None:
1693            max_propagation = size * 2
1694
1695        if utils.is_sequence(seeds):
1696            seeds = vedo.Points(seeds)
1697
1698        sti = vtki.new("StreamTracer")
1699        sti.SetSourceData(seeds.dataset)
1700        if isinstance(self, vedo.RectilinearGrid):
1701            sti.SetInputData(vedo.UnstructuredGrid(self.dataset).dataset)
1702        else:
1703            sti.SetInputDataObject(self.dataset)
1704
1705        sti.SetInitialIntegrationStep(initial_step_size)
1706        sti.SetComputeVorticity(compute_vorticity)
1707        sti.SetMaximumNumberOfSteps(max_steps)
1708        sti.SetMaximumPropagation(max_propagation)
1709        sti.SetSurfaceStreamlines(surface_constrained)
1710        if step_length:
1711            sti.SetMaximumIntegrationStep(step_length)
1712
1713        if "for" in direction:
1714            sti.SetIntegrationDirectionToForward()
1715        elif "back" in direction:
1716            sti.SetIntegrationDirectionToBackward()
1717        elif "both" in direction:
1718            sti.SetIntegrationDirectionToBoth()
1719        else:
1720            vedo.logger.error(f"in compute_streamlines(), unknown direction {direction}")
1721            return None
1722
1723        if integrator == "rk2":
1724            sti.SetIntegratorTypeToRungeKutta2()
1725        elif integrator == "rk4":
1726            sti.SetIntegratorTypeToRungeKutta4()
1727        elif integrator == "rk45":
1728            sti.SetIntegratorTypeToRungeKutta45()
1729        else:
1730            vedo.logger.error(f"in compute_streamlines(), unknown integrator {integrator}")
1731            return None
1732
1733        sti.Update()
1734
1735        stlines = vedo.shapes.Lines(sti.GetOutput(), lw=4)
1736        stlines.name = "StreamLines"
1737        self.pipeline = utils.OperationNode(
1738            "compute_streamlines", comment=f"{integrator}", parents=[self, seeds], c="#9e2a2b"
1739        )
1740        return stlines
1741
1742###############################################################################
1743class PointAlgorithms(CommonAlgorithms):
1744    """Methods for point clouds."""
1745
1746    def apply_transform(self, LT: Any, deep_copy=True) -> Self:
1747        """
1748        Apply a linear or non-linear transformation to the mesh polygonal data.
1749
1750        Example:
1751        ```python
1752        from vedo import Cube, show, settings
1753        settings.use_parallel_projection = True
1754        c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5)
1755        T = c1.transform  # rotate by 5 degrees, place at (2,1)
1756        c2 = Cube().c('red4').wireframe().lw(10).lighting('off')
1757        c2.apply_transform(T)
1758        show(c1, c2, "The 2 cubes should overlap!", axes=1).close()
1759        ```
1760
1761        ![](https://vedo.embl.es/images/feats/apply_transform.png)
1762        """
1763        if self.dataset.GetNumberOfPoints() == 0:
1764            return self
1765
1766        if isinstance(LT, LinearTransform):
1767            LT_is_linear = True
1768            tr = LT.T
1769            if LT.is_identity():
1770                return self
1771        
1772        elif isinstance(LT, (vtki.vtkMatrix4x4, vtki.vtkLinearTransform)) or utils.is_sequence(LT):
1773            LT_is_linear = True
1774            LT = LinearTransform(LT)
1775            tr = LT.T
1776            if LT.is_identity():
1777                return self
1778
1779        elif isinstance(LT, NonLinearTransform):
1780            LT_is_linear = False
1781            tr = LT.T
1782            self.transform = LT  # reset
1783
1784        elif isinstance(LT, vtki.vtkThinPlateSplineTransform):
1785            LT_is_linear = False
1786            tr = LT
1787            self.transform = NonLinearTransform(LT)  # reset
1788
1789        else:
1790            vedo.logger.error(f"apply_transform(), unknown input type:\n{LT}")
1791            return self
1792
1793        ################
1794        if LT_is_linear:
1795            try:
1796                # self.transform might still not be linear
1797                self.transform.concatenate(LT)
1798            except AttributeError:
1799                # in that case reset it
1800                self.transform = LinearTransform()
1801
1802        ################
1803        if isinstance(self.dataset, vtki.vtkPolyData):
1804            tp = vtki.new("TransformPolyDataFilter")
1805        elif isinstance(self.dataset, vtki.vtkUnstructuredGrid):
1806            tp = vtki.new("TransformFilter")
1807            tp.TransformAllInputVectorsOn()
1808        # elif isinstance(self.dataset, vtki.vtkImageData):
1809        #     tp = vtki.new("ImageReslice")
1810        #     tp.SetInterpolationModeToCubic()
1811        #     tp.SetResliceTransform(tr)
1812        else:
1813            vedo.logger.error(f"apply_transform(), unknown input type: {[self.dataset]}")
1814            return self
1815
1816        tp.SetTransform(tr)
1817        tp.SetInputData(self.dataset)
1818        tp.Update()
1819        out = tp.GetOutput()
1820
1821        if deep_copy:
1822            self.dataset.DeepCopy(out)
1823        else:
1824            self.dataset.ShallowCopy(out)
1825
1826        # reset the locators
1827        self.point_locator = None
1828        self.cell_locator = None
1829        self.line_locator = None
1830        return self
1831
1832    def apply_transform_from_actor(self) -> LinearTransform:
1833        """
1834        Apply the current transformation of the actor to the data.
1835        Useful when manually moving an actor (eg. when pressing "a").
1836        Returns the `LinearTransform` object.
1837
1838        Note that this method is automatically called when the window is closed,
1839        or the interactor style is changed.
1840        """
1841        M = self.actor.GetMatrix()
1842        self.apply_transform(M)
1843        iden = vtki.vtkMatrix4x4()
1844        self.actor.PokeMatrix(iden)
1845        return LinearTransform(M)
1846
1847    def pos(self, x=None, y=None, z=None) -> Self:
1848        """Set/Get object position."""
1849        if x is None:  # get functionality
1850            return self.transform.position
1851
1852        if z is None and y is None:  # assume x is of the form (x,y,z)
1853            if len(x) == 3:
1854                x, y, z = x
1855            else:
1856                x, y = x
1857                z = 0
1858        elif z is None:  # assume x,y is of the form x, y
1859            z = 0
1860
1861        q = self.transform.position
1862        delta = [x, y, z] - q
1863        if delta[0] == delta[1] == delta[2] == 0:
1864            return self
1865        LT = LinearTransform().translate(delta)
1866        return self.apply_transform(LT)
1867
1868    def shift(self, dx=0, dy=0, dz=0) -> Self:
1869        """Add a vector to the current object position."""
1870        if utils.is_sequence(dx):
1871            dx, dy, dz = utils.make3d(dx)
1872        if dx == dy == dz == 0:
1873            return self
1874        LT = LinearTransform().translate([dx, dy, dz])
1875        return self.apply_transform(LT)
1876
1877    def x(self, val=None) -> Self:
1878        """Set/Get object position along x axis."""
1879        p = self.transform.position
1880        if val is None:
1881            return p[0]
1882        self.pos(val, p[1], p[2])
1883        return self
1884
1885    def y(self, val=None)-> Self:
1886        """Set/Get object position along y axis."""
1887        p = self.transform.position
1888        if val is None:
1889            return p[1]
1890        self.pos(p[0], val, p[2])
1891        return self
1892
1893    def z(self, val=None) -> Self:
1894        """Set/Get object position along z axis."""
1895        p = self.transform.position
1896        if val is None:
1897            return p[2]
1898        self.pos(p[0], p[1], val)
1899        return self
1900
1901    def rotate(self, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self:
1902        """
1903        Rotate around an arbitrary `axis` passing through `point`.
1904
1905        Example:
1906        ```python
1907        from vedo import *
1908        c1 = Cube()
1909        c2 = c1.clone().c('violet').alpha(0.5) # copy of c1
1910        v = vector(0.2,1,0)
1911        p = vector(1,0,0)  # axis passes through this point
1912        c2.rotate(90, axis=v, point=p)
1913        l = Line(-v+p, v+p).lw(3).c('red')
1914        show(c1, l, c2, axes=1).close()
1915        ```
1916
1917        ![](https://vedo.embl.es/images/feats/rotate_axis.png)
1918        """
1919        LT = LinearTransform()
1920        LT.rotate(angle, axis, point, rad)
1921        return self.apply_transform(LT)
1922
1923    def rotate_x(self, angle: float, rad=False, around=None) -> Self:
1924        """
1925        Rotate around x-axis. If angle is in radians set `rad=True`.
1926
1927        Use `around` to define a pivoting point.
1928        """
1929        if angle == 0:
1930            return self
1931        LT = LinearTransform().rotate_x(angle, rad, around)
1932        return self.apply_transform(LT)
1933
1934    def rotate_y(self, angle: float, rad=False, around=None) -> Self:
1935        """
1936        Rotate around y-axis. If angle is in radians set `rad=True`.
1937
1938        Use `around` to define a pivoting point.
1939        """
1940        if angle == 0:
1941            return self
1942        LT = LinearTransform().rotate_y(angle, rad, around)
1943        return self.apply_transform(LT)
1944
1945    def rotate_z(self, angle: float, rad=False, around=None) -> Self:
1946        """
1947        Rotate around z-axis. If angle is in radians set `rad=True`.
1948
1949        Use `around` to define a pivoting point.
1950        """
1951        if angle == 0:
1952            return self
1953        LT = LinearTransform().rotate_z(angle, rad, around)
1954        return self.apply_transform(LT)
1955
1956    def reorient(self, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self:
1957        """
1958        Reorient the object to point to a new direction from an initial one.
1959        If `initaxis` is None, the object will be assumed in its "default" orientation.
1960        If `xyplane` is True, the object will be rotated to lie on the xy plane.
1961
1962        Use `rotation` to first rotate the object around its `initaxis`.
1963        """
1964        q = self.transform.position
1965        LT = LinearTransform()
1966        LT.reorient(initaxis, newaxis, q, rotation, rad, xyplane)
1967        return self.apply_transform(LT)
1968
1969    def scale(self, s=None, reset=False, origin=True) -> Union[Self, np.array]:
1970        """
1971        Set/get object's scaling factor.
1972
1973        Arguments:
1974            s : (list, float)
1975                scaling factor(s).
1976            reset : (bool)
1977                if True previous scaling factors are ignored.
1978            origin : (bool)
1979                if True scaling is applied with respect to object's position,
1980                otherwise is applied respect to (0,0,0).
1981
1982        Note:
1983            use `s=(sx,sy,sz)` to scale differently in the three coordinates.
1984        """
1985        if s is None:
1986            return np.array(self.transform.T.GetScale())
1987
1988        if not utils.is_sequence(s):
1989            s = [s, s, s]
1990
1991        LT = LinearTransform()
1992        if reset:
1993            old_s = np.array(self.transform.T.GetScale())
1994            LT.scale(s / old_s)
1995        else:
1996            if origin is True:
1997                LT.scale(s, origin=self.transform.position)
1998            elif origin is False:
1999                LT.scale(s, origin=False)
2000            else:
2001                LT.scale(s, origin=origin)
2002
2003        return self.apply_transform(LT)
2004
2005
2006###############################################################################
2007class VolumeAlgorithms(CommonAlgorithms):
2008    """Methods for Volume objects."""
2009
2010    def bounds(self) -> np.ndarray:
2011        """
2012        Get the object bounds.
2013        Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`.
2014        """
2015        # OVERRIDE CommonAlgorithms.bounds() which is too slow
2016        return np.array(self.dataset.GetBounds())
2017
2018    def isosurface(self, value=None, flying_edges=False) -> "vedo.mesh.Mesh":
2019        """
2020        Return an `Mesh` isosurface extracted from the `Volume` object.
2021
2022        Set `value` as single float or list of values to draw the isosurface(s).
2023        Use flying_edges for faster results (but sometimes can interfere with `smooth()`).
2024
2025        The isosurface values can be accessed with `mesh.metadata["isovalue"]`.
2026
2027        Examples:
2028            - [isosurfaces1.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces1.py)
2029
2030                ![](https://vedo.embl.es/images/volumetric/isosurfaces.png)
2031        """
2032        scrange = self.dataset.GetScalarRange()
2033
2034        if flying_edges:
2035            cf = vtki.new("FlyingEdges3D")
2036            cf.InterpolateAttributesOn()
2037        else:
2038            cf = vtki.new("ContourFilter")
2039            cf.UseScalarTreeOn()
2040
2041        cf.SetInputData(self.dataset)
2042        cf.ComputeNormalsOn()
2043
2044        if utils.is_sequence(value):
2045            cf.SetNumberOfContours(len(value))
2046            for i, t in enumerate(value):
2047                cf.SetValue(i, t)
2048        else:
2049            if value is None:
2050                value = (2 * scrange[0] + scrange[1]) / 3.0
2051                # print("automatic isosurface value =", value)
2052            cf.SetValue(0, value)
2053
2054        cf.Update()
2055        poly = cf.GetOutput()
2056
2057        out = vedo.mesh.Mesh(poly, c=None).phong()
2058        out.mapper.SetScalarRange(scrange[0], scrange[1])
2059        out.metadata["isovalue"] = value
2060
2061        out.pipeline = utils.OperationNode(
2062            "isosurface",
2063            parents=[self],
2064            comment=f"#pts {out.dataset.GetNumberOfPoints()}",
2065            c="#4cc9f0:#e9c46a",
2066        )
2067        return out
2068    
2069    def isosurface_discrete(
2070            self, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0,
2071        ) -> "vedo.mesh.Mesh":
2072        """
2073        Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded,
2074        3D version of the multiple objects/labels Surface Nets algorithm.
2075        The input is a 3D image (i.e., volume) where each voxel is labeled
2076        (integer labels are preferred to real values), and the output data is a polygonal mesh separating
2077        labeled regions / objects.
2078        (Note that on output each region [corresponding to a different segmented object] will share
2079        points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them).
2080        
2081        Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating
2082        the labels on either side of the polygons composing the output Mesh. 
2083        (This can be used for advanced operations like extracting shared/contacting boundaries between two objects.
2084        The name of this celldata array is "BoundaryLabels").
2085
2086        The values can be accessed with `mesh.metadata["isovalue"]`.
2087
2088        Arguments:
2089            value : (float, list)
2090                single value or list of values to draw the isosurface(s).
2091            background_label : (float)
2092                this value specifies the label value to use when referencing the background
2093                region outside of any of the specified regions.
2094            boundaries : (bool, list)
2095                if True, the output will only contain the boundary surface. Internal surfaces will be removed.
2096                If a list of integers is provided, only the boundaries between the specified labels will be extracted.
2097            use_quads : (bool)
2098                if True, the output polygons will be quads. If False, the output polygons will be triangles.
2099            nsmooth : (int)
2100                number of iterations of smoothing (0 means no smoothing).
2101
2102        Examples:
2103            - [isosurfaces2.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces2.py)
2104        """
2105        logger = vtki.get_class("Logger")
2106        logger.SetStderrVerbosity(logger.VERBOSITY_ERROR)
2107
2108        snets = vtki.new("SurfaceNets3D")
2109        snets.SetInputData(self.dataset)
2110
2111        if nsmooth:
2112            snets.SmoothingOn()
2113            snets.AutomaticSmoothingConstraintsOn()
2114            snets.GetSmoother().SetNumberOfIterations(nsmooth)
2115            # snets.GetSmoother().SetRelaxationFactor(relaxation_factor)
2116            # snets.GetSmoother().SetConstraintDistance(constraint_distance)
2117        else:
2118            snets.SmoothingOff()
2119
2120        if internal_boundaries is False:
2121            snets.SetOutputStyleToBoundary()
2122        elif internal_boundaries is True:
2123            snets.SetOutputStyleToDefault()
2124        elif utils.is_sequence(internal_boundaries):
2125            snets.SetOutputStyleToSelected()
2126            snets.InitializeSelectedLabelsList()
2127            for val in internal_boundaries:
2128                snets.AddSelectedLabel(val)
2129        else:
2130            vedo.logger.error("isosurface_discrete(): unknown boundaries option")
2131
2132        n = len(values)
2133        snets.SetNumberOfContours(n)
2134        snets.SetNumberOfLabels(n)
2135
2136        if background_label is not None:
2137            snets.SetBackgroundLabel(background_label)
2138
2139        for i, val in enumerate(values):
2140            snets.SetValue(i, val)
2141        
2142        if use_quads:
2143            snets.SetOutputMeshTypeToQuads()
2144        else:
2145            snets.SetOutputMeshTypeToTriangles()
2146        snets.Update()
2147
2148        out = vedo.mesh.Mesh(snets.GetOutput())
2149        out.metadata["isovalue"] = values
2150        out.pipeline = utils.OperationNode(
2151            "isosurface_discrete",
2152            parents=[self],
2153            comment=f"#pts {out.dataset.GetNumberOfPoints()}",
2154            c="#4cc9f0:#e9c46a",
2155        )
2156
2157        logger.SetStderrVerbosity(logger.VERBOSITY_INFO)
2158        return out
2159
2160
2161    def legosurface(
2162        self,
2163        vmin=None,
2164        vmax=None,
2165        invert=False,
2166        boundary=True,
2167        array_name="input_scalars",
2168    ) -> "vedo.mesh.Mesh":
2169        """
2170        Represent an object - typically a `Volume` - as lego blocks (voxels).
2171        By default colors correspond to the volume's scalar.
2172        Returns an `Mesh` object.
2173
2174        Arguments:
2175            vmin : (float)
2176                the lower threshold, voxels below this value are not shown.
2177            vmax : (float)
2178                the upper threshold, voxels above this value are not shown.
2179            boundary : (bool)
2180                controls whether to include cells that are partially inside
2181            array_name : (int, str)
2182                name or index of the scalar array to be considered
2183
2184        Examples:
2185            - [legosurface.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/legosurface.py)
2186
2187                ![](https://vedo.embl.es/images/volumetric/56820682-da40e500-684c-11e9-8ea3-91cbcba24b3a.png)
2188        """
2189        imp_dataset = vtki.new("ImplicitDataSet")
2190        imp_dataset.SetDataSet(self.dataset)
2191        window = vtki.new("ImplicitWindowFunction")
2192        window.SetImplicitFunction(imp_dataset)
2193
2194        srng = list(self.dataset.GetScalarRange())
2195        if vmin is not None:
2196            srng[0] = vmin
2197        if vmax is not None:
2198            srng[1] = vmax
2199        if not boundary:
2200            tol = 0.00001 * (srng[1] - srng[0])
2201            srng[0] -= tol
2202            srng[1] += tol
2203        window.SetWindowRange(srng)
2204        # print("legosurface window range:", srng)
2205
2206        extract = vtki.new("ExtractGeometry")
2207        extract.SetInputData(self.dataset)
2208        extract.SetImplicitFunction(window)
2209        extract.SetExtractInside(invert)
2210        extract.SetExtractBoundaryCells(boundary)
2211        extract.Update()
2212
2213        gf = vtki.new("GeometryFilter")
2214        gf.SetInputData(extract.GetOutput())
2215        gf.Update()
2216
2217        m = vedo.mesh.Mesh(gf.GetOutput()).lw(0.1).flat()
2218        m.map_points_to_cells()
2219        m.celldata.select(array_name)
2220
2221        m.pipeline = utils.OperationNode(
2222            "legosurface",
2223            parents=[self],
2224            comment=f"array: {array_name}",
2225            c="#4cc9f0:#e9c46a",
2226        )
2227        return m
2228
2229    def tomesh(self, fill=True, shrink=1.0) -> "vedo.mesh.Mesh":
2230        """
2231        Build a polygonal Mesh from the current object.
2232
2233        If `fill=True`, the interior faces of all the cells are created.
2234        (setting a `shrink` value slightly smaller than the default 1.0
2235        can avoid flickering due to internal adjacent faces).
2236
2237        If `fill=False`, only the boundary faces will be generated.
2238        """
2239        gf = vtki.new("GeometryFilter")
2240        if fill:
2241            sf = vtki.new("ShrinkFilter")
2242            sf.SetInputData(self.dataset)
2243            sf.SetShrinkFactor(shrink)
2244            sf.Update()
2245            gf.SetInputData(sf.GetOutput())
2246            gf.Update()
2247            poly = gf.GetOutput()
2248            if shrink == 1.0:
2249                clean_poly = vtki.new("CleanPolyData")
2250                clean_poly.PointMergingOn()
2251                clean_poly.ConvertLinesToPointsOn()
2252                clean_poly.ConvertPolysToLinesOn()
2253                clean_poly.ConvertStripsToPolysOn()
2254                clean_poly.SetInputData(poly)
2255                clean_poly.Update()
2256                poly = clean_poly.GetOutput()
2257        else:
2258            gf.SetInputData(self.dataset)
2259            gf.Update()
2260            poly = gf.GetOutput()
2261
2262        msh = vedo.mesh.Mesh(poly).flat()
2263        msh.scalarbar = self.scalarbar
2264        lut = utils.ctf2lut(self)
2265        if lut:
2266            msh.mapper.SetLookupTable(lut)
2267
2268        msh.pipeline = utils.OperationNode(
2269            "tomesh", parents=[self], comment=f"fill={fill}", c="#9e2a2b:#e9c46a"
2270        )
2271        return msh
class DataArrayHelper:
 49class DataArrayHelper:
 50    # Internal use only.
 51    # Helper class to manage data associated to either points (or vertices) and cells (or faces).
 52    def __init__(self, obj, association):
 53
 54        self.obj = obj
 55        self.association = association
 56
 57    def __getitem__(self, key):
 58
 59        if self.association == 0:
 60            data = self.obj.dataset.GetPointData()
 61
 62        elif self.association == 1:
 63            data = self.obj.dataset.GetCellData()
 64
 65        elif self.association == 2:
 66            data = self.obj.dataset.GetFieldData()
 67
 68            varr = data.GetAbstractArray(key)
 69            if isinstance(varr, vtki.vtkStringArray):
 70                if isinstance(key, int):
 71                    key = data.GetArrayName(key)
 72                n = varr.GetNumberOfValues()
 73                narr = [varr.GetValue(i) for i in range(n)]
 74                return narr
 75                ###########
 76
 77        else:
 78            raise RuntimeError()
 79
 80        if isinstance(key, int):
 81            key = data.GetArrayName(key)
 82
 83        arr = data.GetArray(key)
 84        if not arr:
 85            return None
 86        return utils.vtk2numpy(arr)
 87
 88    def __setitem__(self, key, input_array):
 89
 90        if self.association == 0:
 91            data = self.obj.dataset.GetPointData()
 92            n = self.obj.dataset.GetNumberOfPoints()
 93            self.obj.mapper.SetScalarModeToUsePointData()
 94
 95        elif self.association == 1:
 96            data = self.obj.dataset.GetCellData()
 97            n = self.obj.dataset.GetNumberOfCells()
 98            self.obj.mapper.SetScalarModeToUseCellData()
 99
100        elif self.association == 2:
101            data = self.obj.dataset.GetFieldData()
102            if not utils.is_sequence(input_array):
103                input_array = [input_array]
104
105            if isinstance(input_array[0], str):
106                varr = vtki.vtkStringArray()
107                varr.SetName(key)
108                varr.SetNumberOfComponents(1)
109                varr.SetNumberOfTuples(len(input_array))
110                for i, iarr in enumerate(input_array):
111                    if isinstance(iarr, np.ndarray):
112                        iarr = iarr.tolist()  # better format
113                        # Note: a string k can be converted to numpy with
114                        # import json; k = np.array(json.loads(k))
115                    varr.InsertValue(i, str(iarr))
116            else:
117                try:
118                    varr = utils.numpy2vtk(input_array, name=key)
119                except TypeError as e:
120                    vedo.logger.error(
121                        f"cannot create metadata with input object:\n"
122                        f"{input_array}"
123                        f"\n\nAllowed content examples are:\n"
124                        f"- flat list of strings ['a','b', 1, [1,2,3], ...]"
125                        f" (first item must be a string in this case)\n"
126                        f"  hint: use k = np.array(json.loads(k)) to convert strings\n"
127                        f"- numpy arrays of any shape"
128                    )
129                    raise e
130
131            data.AddArray(varr)
132            return  ############
133
134        else:
135            raise RuntimeError()
136
137        if len(input_array) != n:
138            vedo.logger.error(
139                f"Error in point/cell data: length of input {len(input_array)}"
140                f" !=  {n} nr. of elements"
141            )
142            raise RuntimeError()
143
144        input_array = np.asarray(input_array)
145        varr = utils.numpy2vtk(input_array, name=key)
146        data.AddArray(varr)
147
148        if len(input_array.shape) == 1:  # scalars
149            data.SetActiveScalars(key)
150            try:  # could be a volume mapper
151                self.obj.mapper.SetScalarRange(data.GetScalars().GetRange())
152            except AttributeError:
153                pass
154        elif len(input_array.shape) == 2 and input_array.shape[1] == 3:  # vectors
155            if key.lower() == "normals":
156                data.SetActiveNormals(key)
157            else:
158                data.SetActiveVectors(key)
159
160    def keys(self) -> List[str]:
161        """Return the list of available data array names"""
162        if self.association == 0:
163            data = self.obj.dataset.GetPointData()
164        elif self.association == 1:
165            data = self.obj.dataset.GetCellData()
166        elif self.association == 2:
167            data = self.obj.dataset.GetFieldData()
168        arrnames = []
169        for i in range(data.GetNumberOfArrays()):
170            name = ""
171            if self.association == 2:
172                name = data.GetAbstractArray(i).GetName()
173            else:
174                iarr = data.GetArray(i)
175                if iarr:
176                    name = iarr.GetName()
177            if name:
178                arrnames.append(name)
179        return arrnames
180
181    def items(self) -> List:
182        """Return the list of available data array `(names, values)`."""
183        if self.association == 0:
184            data = self.obj.dataset.GetPointData()
185        elif self.association == 1:
186            data = self.obj.dataset.GetCellData()
187        elif self.association == 2:
188            data = self.obj.dataset.GetFieldData()
189        arrnames = []
190        for i in range(data.GetNumberOfArrays()):
191            if self.association == 2:
192                name = data.GetAbstractArray(i).GetName()
193            else:
194                name = data.GetArray(i).GetName()
195            if name:
196                arrnames.append((name, self[name]))
197        return arrnames
198
199    def todict(self) -> dict:
200        """Return a dictionary of the available data arrays."""
201        return dict(self.items())
202
203    def rename(self, oldname: str, newname: str) -> None:
204        """Rename an array"""
205        if self.association == 0:
206            varr = self.obj.dataset.GetPointData().GetArray(oldname)
207        elif self.association == 1:
208            varr = self.obj.dataset.GetCellData().GetArray(oldname)
209        elif self.association == 2:
210            varr = self.obj.dataset.GetFieldData().GetAbstractArray(oldname)
211        if varr:
212            varr.SetName(newname)
213        else:
214            vedo.logger.warning(
215                f"Cannot rename non existing array {oldname} to {newname}"
216            )
217
218    def remove(self, key: Union[int, str]) -> None:
219        """Remove a data array by name or number"""
220        if self.association == 0:
221            self.obj.dataset.GetPointData().RemoveArray(key)
222        elif self.association == 1:
223            self.obj.dataset.GetCellData().RemoveArray(key)
224        elif self.association == 2:
225            self.obj.dataset.GetFieldData().RemoveArray(key)
226
227    def clear(self) -> None:
228        """Remove all data associated to this object"""
229        if self.association == 0:
230            data = self.obj.dataset.GetPointData()
231        elif self.association == 1:
232            data = self.obj.dataset.GetCellData()
233        elif self.association == 2:
234            data = self.obj.dataset.GetFieldData()
235        for i in range(data.GetNumberOfArrays()):
236            if self.association == 2:
237                name = data.GetAbstractArray(i).GetName()
238            else:
239                name = data.GetArray(i).GetName()
240            data.RemoveArray(name)
241
242    def select(self, key: Union[int, str]) -> Any:
243        """Select one specific array by its name to make it the `active` one."""
244        # Default (ColorModeToDefault): unsigned char scalars are treated as colors,
245        # and NOT mapped through the lookup table, while everything else is.
246        # ColorModeToDirectScalar extends ColorModeToDefault such that all integer
247        # types are treated as colors with values in the range 0-255
248        # and floating types are treated as colors with values in the range 0.0-1.0.
249        # Setting ColorModeToMapScalars means that all scalar data will be mapped
250        # through the lookup table.
251        # (Note that for multi-component scalars, the particular component
252        # to use for mapping can be specified using the SelectColorArray() method.)
253        if self.association == 0:
254            data = self.obj.dataset.GetPointData()
255            self.obj.mapper.SetScalarModeToUsePointData()
256        else:
257            data = self.obj.dataset.GetCellData()
258            self.obj.mapper.SetScalarModeToUseCellData()
259
260        if isinstance(key, int):
261            key = data.GetArrayName(key)
262
263        arr = data.GetArray(key)
264        if not arr:
265            return self.obj
266
267        nc = arr.GetNumberOfComponents()
268        # print("GetNumberOfComponents", nc)
269        if nc == 1:
270            data.SetActiveScalars(key)
271        elif nc == 2:
272            data.SetTCoords(arr)
273        elif nc in (3, 4):
274            if "rgb" in key.lower(): # type: ignore
275                data.SetActiveScalars(key)
276                try:
277                    # could be a volume mapper
278                    self.obj.mapper.SetColorModeToDirectScalars()
279                    data.SetActiveVectors(None) # need this to fix bug in #1066
280                    # print("SetColorModeToDirectScalars for", key)
281                except AttributeError:
282                    pass
283            else:
284                data.SetActiveVectors(key)
285        elif nc == 9:
286            data.SetActiveTensors(key)
287        else:
288            vedo.logger.error(f"Cannot select array {key} with {nc} components")
289            return self.obj
290
291        try:
292            # could be a volume mapper
293            self.obj.mapper.SetArrayName(key)
294            self.obj.mapper.ScalarVisibilityOn()
295        except AttributeError:
296            pass
297
298        return self.obj
299
300    def select_texture_coords(self, key: Union[int,str]) -> Any:
301        """Select one specific array to be used as texture coordinates."""
302        if self.association == 0:
303            data = self.obj.dataset.GetPointData()
304        else:
305            vedo.logger.warning("texture coordinates are only available for point data")
306            return
307
308        if isinstance(key, int):
309            key = data.GetArrayName(key)
310        data.SetTCoords(data.GetArray(key))
311        return self.obj
312
313    def select_normals(self, key: Union[int,str]) -> Any:
314        """Select one specific normal array by its name to make it the "active" one."""
315        if self.association == 0:
316            data = self.obj.dataset.GetPointData()
317            self.obj.mapper.SetScalarModeToUsePointData()
318        else:
319            data = self.obj.dataset.GetCellData()
320            self.obj.mapper.SetScalarModeToUseCellData()
321
322        if isinstance(key, int):
323            key = data.GetArrayName(key)
324        data.SetActiveNormals(key)
325        return self.obj
326
327    def print(self, **kwargs) -> None:
328        """Print the array names available to terminal"""
329        colors.printc(self.keys(), **kwargs)
330
331    def __repr__(self) -> str:
332        """Representation"""
333
334        def _get_str(pd, header):
335            out = f"\x1b[2m\x1b[1m\x1b[7m{header}"
336            if pd.GetNumberOfArrays():
337                if self.obj.name:
338                    out += f" in {self.obj.name}"
339                out += f" contains {pd.GetNumberOfArrays()} array(s)\x1b[0m"
340                for i in range(pd.GetNumberOfArrays()):
341                    varr = pd.GetArray(i)
342                    out += f"\n\x1b[1m\x1b[4mArray name    : {varr.GetName()}\x1b[0m"
343                    out += "\nindex".ljust(15) + f": {i}"
344                    t = varr.GetDataType()
345                    if t in vtki.array_types:
346                        out += "\ntype".ljust(15)
347                        out += f": {vtki.array_types[t]}"
348                    shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents())
349                    out += "\nshape".ljust(15) + f": {shape}"
350                    out += "\nrange".ljust(15) + f": {np.array(varr.GetRange())}"
351                    out += "\nmax id".ljust(15) + f": {varr.GetMaxId()}"
352                    out += "\nlook up table".ljust(15) + f": {bool(varr.GetLookupTable())}"
353                    out += "\nin-memory size".ljust(15) + f": {varr.GetActualMemorySize()} KB"
354            else:
355                out += " is empty.\x1b[0m"
356            return out
357
358        if self.association == 0:
359            out = _get_str(self.obj.dataset.GetPointData(), "Point Data")
360        elif self.association == 1:
361            out = _get_str(self.obj.dataset.GetCellData(), "Cell Data")
362        elif self.association == 2:
363            pd = self.obj.dataset.GetFieldData()
364            if pd.GetNumberOfArrays():
365                out = "\x1b[2m\x1b[1m\x1b[7mMeta Data"
366                if self.obj.name:
367                    out += f" in {self.obj.name}"
368                out += f" contains {pd.GetNumberOfArrays()} entries\x1b[0m"
369                for i in range(pd.GetNumberOfArrays()):
370                    varr = pd.GetAbstractArray(i)
371                    out += f"\n\x1b[1m\x1b[4mEntry name    : {varr.GetName()}\x1b[0m"
372                    out += "\nindex".ljust(15) + f": {i}"
373                    shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents())
374                    out += "\nshape".ljust(15) + f": {shape}"
375
376        return out
DataArrayHelper(obj, association)
52    def __init__(self, obj, association):
53
54        self.obj = obj
55        self.association = association
def keys(self) -> List[str]:
160    def keys(self) -> List[str]:
161        """Return the list of available data array names"""
162        if self.association == 0:
163            data = self.obj.dataset.GetPointData()
164        elif self.association == 1:
165            data = self.obj.dataset.GetCellData()
166        elif self.association == 2:
167            data = self.obj.dataset.GetFieldData()
168        arrnames = []
169        for i in range(data.GetNumberOfArrays()):
170            name = ""
171            if self.association == 2:
172                name = data.GetAbstractArray(i).GetName()
173            else:
174                iarr = data.GetArray(i)
175                if iarr:
176                    name = iarr.GetName()
177            if name:
178                arrnames.append(name)
179        return arrnames

Return the list of available data array names

def items(self) -> List:
181    def items(self) -> List:
182        """Return the list of available data array `(names, values)`."""
183        if self.association == 0:
184            data = self.obj.dataset.GetPointData()
185        elif self.association == 1:
186            data = self.obj.dataset.GetCellData()
187        elif self.association == 2:
188            data = self.obj.dataset.GetFieldData()
189        arrnames = []
190        for i in range(data.GetNumberOfArrays()):
191            if self.association == 2:
192                name = data.GetAbstractArray(i).GetName()
193            else:
194                name = data.GetArray(i).GetName()
195            if name:
196                arrnames.append((name, self[name]))
197        return arrnames

Return the list of available data array (names, values).

def todict(self) -> dict:
199    def todict(self) -> dict:
200        """Return a dictionary of the available data arrays."""
201        return dict(self.items())

Return a dictionary of the available data arrays.

def rename(self, oldname: str, newname: str) -> None:
203    def rename(self, oldname: str, newname: str) -> None:
204        """Rename an array"""
205        if self.association == 0:
206            varr = self.obj.dataset.GetPointData().GetArray(oldname)
207        elif self.association == 1:
208            varr = self.obj.dataset.GetCellData().GetArray(oldname)
209        elif self.association == 2:
210            varr = self.obj.dataset.GetFieldData().GetAbstractArray(oldname)
211        if varr:
212            varr.SetName(newname)
213        else:
214            vedo.logger.warning(
215                f"Cannot rename non existing array {oldname} to {newname}"
216            )

Rename an array

def remove(self, key: Union[int, str]) -> None:
218    def remove(self, key: Union[int, str]) -> None:
219        """Remove a data array by name or number"""
220        if self.association == 0:
221            self.obj.dataset.GetPointData().RemoveArray(key)
222        elif self.association == 1:
223            self.obj.dataset.GetCellData().RemoveArray(key)
224        elif self.association == 2:
225            self.obj.dataset.GetFieldData().RemoveArray(key)

Remove a data array by name or number

def clear(self) -> None:
227    def clear(self) -> None:
228        """Remove all data associated to this object"""
229        if self.association == 0:
230            data = self.obj.dataset.GetPointData()
231        elif self.association == 1:
232            data = self.obj.dataset.GetCellData()
233        elif self.association == 2:
234            data = self.obj.dataset.GetFieldData()
235        for i in range(data.GetNumberOfArrays()):
236            if self.association == 2:
237                name = data.GetAbstractArray(i).GetName()
238            else:
239                name = data.GetArray(i).GetName()
240            data.RemoveArray(name)

Remove all data associated to this object

def select(self, key: Union[int, str]) -> Any:
242    def select(self, key: Union[int, str]) -> Any:
243        """Select one specific array by its name to make it the `active` one."""
244        # Default (ColorModeToDefault): unsigned char scalars are treated as colors,
245        # and NOT mapped through the lookup table, while everything else is.
246        # ColorModeToDirectScalar extends ColorModeToDefault such that all integer
247        # types are treated as colors with values in the range 0-255
248        # and floating types are treated as colors with values in the range 0.0-1.0.
249        # Setting ColorModeToMapScalars means that all scalar data will be mapped
250        # through the lookup table.
251        # (Note that for multi-component scalars, the particular component
252        # to use for mapping can be specified using the SelectColorArray() method.)
253        if self.association == 0:
254            data = self.obj.dataset.GetPointData()
255            self.obj.mapper.SetScalarModeToUsePointData()
256        else:
257            data = self.obj.dataset.GetCellData()
258            self.obj.mapper.SetScalarModeToUseCellData()
259
260        if isinstance(key, int):
261            key = data.GetArrayName(key)
262
263        arr = data.GetArray(key)
264        if not arr:
265            return self.obj
266
267        nc = arr.GetNumberOfComponents()
268        # print("GetNumberOfComponents", nc)
269        if nc == 1:
270            data.SetActiveScalars(key)
271        elif nc == 2:
272            data.SetTCoords(arr)
273        elif nc in (3, 4):
274            if "rgb" in key.lower(): # type: ignore
275                data.SetActiveScalars(key)
276                try:
277                    # could be a volume mapper
278                    self.obj.mapper.SetColorModeToDirectScalars()
279                    data.SetActiveVectors(None) # need this to fix bug in #1066
280                    # print("SetColorModeToDirectScalars for", key)
281                except AttributeError:
282                    pass
283            else:
284                data.SetActiveVectors(key)
285        elif nc == 9:
286            data.SetActiveTensors(key)
287        else:
288            vedo.logger.error(f"Cannot select array {key} with {nc} components")
289            return self.obj
290
291        try:
292            # could be a volume mapper
293            self.obj.mapper.SetArrayName(key)
294            self.obj.mapper.ScalarVisibilityOn()
295        except AttributeError:
296            pass
297
298        return self.obj

Select one specific array by its name to make it the active one.

def select_texture_coords(self, key: Union[int, str]) -> Any:
300    def select_texture_coords(self, key: Union[int,str]) -> Any:
301        """Select one specific array to be used as texture coordinates."""
302        if self.association == 0:
303            data = self.obj.dataset.GetPointData()
304        else:
305            vedo.logger.warning("texture coordinates are only available for point data")
306            return
307
308        if isinstance(key, int):
309            key = data.GetArrayName(key)
310        data.SetTCoords(data.GetArray(key))
311        return self.obj

Select one specific array to be used as texture coordinates.

def select_normals(self, key: Union[int, str]) -> Any:
313    def select_normals(self, key: Union[int,str]) -> Any:
314        """Select one specific normal array by its name to make it the "active" one."""
315        if self.association == 0:
316            data = self.obj.dataset.GetPointData()
317            self.obj.mapper.SetScalarModeToUsePointData()
318        else:
319            data = self.obj.dataset.GetCellData()
320            self.obj.mapper.SetScalarModeToUseCellData()
321
322        if isinstance(key, int):
323            key = data.GetArrayName(key)
324        data.SetActiveNormals(key)
325        return self.obj

Select one specific normal array by its name to make it the "active" one.

def print(self, **kwargs) -> None:
327    def print(self, **kwargs) -> None:
328        """Print the array names available to terminal"""
329        colors.printc(self.keys(), **kwargs)

Print the array names available to terminal

class CommonAlgorithms:
 380class CommonAlgorithms:
 381    """Common algorithms."""
 382
 383    @property
 384    def pointdata(self):
 385        """
 386        Create and/or return a `numpy.array` associated to points (vertices).
 387        A data array can be indexed either as a string or by an integer number.
 388        E.g.:  `myobj.pointdata["arrayname"]`
 389
 390        Usage:
 391
 392            `myobj.pointdata.keys()` to return the available data array names
 393
 394            `myobj.pointdata.select(name)` to make this array the active one
 395
 396            `myobj.pointdata.remove(name)` to remove this array
 397        """
 398        return DataArrayHelper(self, 0)
 399
 400    @property
 401    def celldata(self):
 402        """
 403        Create and/or return a `numpy.array` associated to cells (faces).
 404        A data array can be indexed either as a string or by an integer number.
 405        E.g.:  `myobj.celldata["arrayname"]`
 406
 407        Usage:
 408
 409            `myobj.celldata.keys()` to return the available data array names
 410
 411            `myobj.celldata.select(name)` to make this array the active one
 412
 413            `myobj.celldata.remove(name)` to remove this array
 414        """
 415        return DataArrayHelper(self, 1)
 416
 417    @property
 418    def metadata(self):
 419        """
 420        Create and/or return a `numpy.array` associated to neither cells nor faces.
 421        A data array can be indexed either as a string or by an integer number.
 422        E.g.:  `myobj.metadata["arrayname"]`
 423
 424        Usage:
 425
 426            `myobj.metadata.keys()` to return the available data array names
 427
 428            `myobj.metadata.select(name)` to make this array the active one
 429
 430            `myobj.metadata.remove(name)` to remove this array
 431        """
 432        return DataArrayHelper(self, 2)
 433
 434    def memory_address(self) -> int:
 435        """
 436        Return a unique memory address integer which may serve as the ID of the
 437        object, or passed to c++ code.
 438        """
 439        # https://www.linkedin.com/pulse/speedup-your-code-accessing-python-vtk-objects-from-c-pletzer/
 440        # https://github.com/tfmoraes/polydata_connectivity
 441        return int(self.dataset.GetAddressAsString("")[5:], 16)
 442
 443    def memory_size(self) -> int:
 444        """Return the size in bytes of the object in memory."""
 445        return self.dataset.GetActualMemorySize()
 446
 447    def modified(self) -> Self:
 448        """Use in conjunction with `tonumpy()` to update any modifications to the image array."""
 449        self.dataset.GetPointData().Modified()
 450        scals = self.dataset.GetPointData().GetScalars()
 451        if scals:
 452            scals.Modified()
 453        return self
 454
 455    def box(self, scale=1, padding=0) -> "vedo.Mesh":
 456        """
 457        Return the bounding box as a new `Mesh` object.
 458
 459        Arguments:
 460            scale : (float)
 461                box size can be scaled by a factor
 462            padding : (float, list)
 463                a constant padding can be added (can be a list `[padx,pady,padz]`)
 464        """
 465        b = self.bounds()
 466        if not utils.is_sequence(padding):
 467            padding = [padding, padding, padding]
 468        length, width, height = b[1] - b[0], b[3] - b[2], b[5] - b[4]
 469        tol = (length + width + height) / 30000  # useful for boxing text
 470        pos = [(b[0] + b[1]) / 2, (b[3] + b[2]) / 2, (b[5] + b[4]) / 2 - tol]
 471        bx = vedo.shapes.Box(
 472            pos,
 473            length * scale + padding[0],
 474            width  * scale + padding[1],
 475            height * scale + padding[2],
 476            c="gray",
 477        )
 478        try:
 479            pr = vtki.vtkProperty()
 480            pr.DeepCopy(self.properties)
 481            bx.actor.SetProperty(pr)
 482            bx.properties = pr
 483        except (AttributeError, TypeError):
 484            pass
 485        bx.flat().lighting("off").wireframe(True)
 486        return bx
 487    
 488    def update_dataset(self, dataset, **kwargs) -> Self:
 489        """Update the dataset of the object with the provided VTK dataset."""
 490        self._update(dataset, **kwargs)
 491        return self
 492
 493    def bounds(self) -> np.ndarray:
 494        """
 495        Get the object bounds.
 496        Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`.
 497        """
 498        try:  # this is very slow for large meshes
 499            pts = self.vertices
 500            xmin, ymin, zmin = np.min(pts, axis=0)
 501            xmax, ymax, zmax = np.max(pts, axis=0)
 502            return np.array([xmin, xmax, ymin, ymax, zmin, zmax])
 503        except (AttributeError, ValueError):
 504            return np.array(self.dataset.GetBounds())
 505
 506    def xbounds(self, i=None) -> np.ndarray:
 507        """Get the bounds `[xmin,xmax]`. Can specify upper or lower with i (0,1)."""
 508        b = self.bounds()
 509        if i is not None:
 510            return b[i]
 511        return np.array([b[0], b[1]])
 512
 513    def ybounds(self, i=None) -> np.ndarray:
 514        """Get the bounds `[ymin,ymax]`. Can specify upper or lower with i (0,1)."""
 515        b = self.bounds()
 516        if i == 0:
 517            return b[2]
 518        if i == 1:
 519            return b[3]
 520        return np.array([b[2], b[3]])
 521
 522    def zbounds(self, i=None) -> np.ndarray:
 523        """Get the bounds `[zmin,zmax]`. Can specify upper or lower with i (0,1)."""
 524        b = self.bounds()
 525        if i == 0:
 526            return b[4]
 527        if i == 1:
 528            return b[5]
 529        return np.array([b[4], b[5]])
 530
 531    def diagonal_size(self) -> float:
 532        """Get the length of the diagonal of the bounding box."""
 533        b = self.bounds()
 534        return np.sqrt((b[1] - b[0])**2 + (b[3] - b[2])**2 + (b[5] - b[4])**2)
 535
 536    def average_size(self) -> float:
 537        """
 538        Calculate and return the average size of the object.
 539        This is the mean of the vertex distances from the center of mass.
 540        """
 541        coords = self.vertices
 542        cm = np.mean(coords, axis=0)
 543        if coords.shape[0] == 0:
 544            return 0.0
 545        cc = coords - cm
 546        return np.mean(np.linalg.norm(cc, axis=1))
 547
 548    def center_of_mass(self) -> np.ndarray:
 549        """Get the center of mass of the object."""
 550        if isinstance(self, (vedo.RectilinearGrid, vedo.Volume)):
 551            return np.array(self.dataset.GetCenter())
 552        cmf = vtki.new("CenterOfMass")
 553        cmf.SetInputData(self.dataset)
 554        cmf.Update()
 555        c = cmf.GetCenter()
 556        return np.array(c)
 557
 558    def copy_data_from(self, obj: Any) -> Self:
 559        """Copy all data (point and cell data) from this input object"""
 560        self.dataset.GetPointData().PassData(obj.dataset.GetPointData())
 561        self.dataset.GetCellData().PassData(obj.dataset.GetCellData())
 562        self.pipeline = utils.OperationNode(
 563            "copy_data_from",
 564            parents=[self, obj],
 565            comment=f"{obj.__class__.__name__}",
 566            shape="note",
 567            c="#ccc5b9",
 568        )
 569        return self
 570
 571    def inputdata(self):
 572        """Obsolete, use `.dataset` instead."""
 573        colors.printc("WARNING: 'inputdata()' is obsolete, use '.dataset' instead.", c="y")
 574        return self.dataset
 575
 576    @property
 577    def npoints(self):
 578        """Retrieve the number of points (or vertices)."""
 579        return self.dataset.GetNumberOfPoints()
 580
 581    @property
 582    def nvertices(self):
 583        """Retrieve the number of vertices (or points)."""
 584        return self.dataset.GetNumberOfPoints()
 585
 586    @property
 587    def ncells(self):
 588        """Retrieve the number of cells."""
 589        return self.dataset.GetNumberOfCells()
 590
 591    def cell_centers(self, copy_arrays=False) -> "vedo.Points":
 592        """
 593        Get the coordinates of the cell centers as a `Points` object.
 594
 595        Examples:
 596            - [delaunay2d.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/delaunay2d.py)
 597        """
 598        vcen = vtki.new("CellCenters")
 599        vcen.SetCopyArrays(copy_arrays)
 600        vcen.SetVertexCells(copy_arrays)
 601        vcen.SetInputData(self.dataset)
 602        vcen.Update()
 603        vpts = vedo.Points(vcen.GetOutput())
 604        if copy_arrays:
 605            vpts.copy_properties_from(self)
 606        return vpts
 607
 608    @property
 609    def lines(self):
 610        """
 611        Get lines connectivity ids as a python array
 612        formatted as `[[id0,id1], [id3,id4], ...]`
 613
 614        See also: `lines_as_flat_array()`.
 615        """
 616        # Get cell connettivity ids as a 1D array. The vtk format is:
 617        #    [nids1, id0 ... idn, niids2, id0 ... idm,  etc].
 618        try:
 619            arr1d = utils.vtk2numpy(self.dataset.GetLines().GetData())
 620        except AttributeError:
 621            return np.array([])
 622        i = 0
 623        conn = []
 624        n = len(arr1d)
 625        for _ in range(n):
 626            cell = [arr1d[i + k + 1] for k in range(arr1d[i])]
 627            conn.append(cell)
 628            i += arr1d[i] + 1
 629            if i >= n:
 630                break
 631
 632        return conn  # cannot always make a numpy array of it!
 633
 634    @property
 635    def lines_as_flat_array(self):
 636        """
 637        Get lines connectivity ids as a 1D numpy array.
 638        Format is e.g. [2,  10,20,  3, 10,11,12,  2, 70,80, ...]
 639
 640        See also: `lines()`.
 641        """
 642        try:
 643            return utils.vtk2numpy(self.dataset.GetLines().GetData())
 644        except AttributeError:
 645            return np.array([])
 646
 647    def mark_boundaries(self) -> Self:
 648        """
 649        Mark cells and vertices if they lie on a boundary.
 650        A new array called `BoundaryCells` is added to the object.
 651        """
 652        mb = vtki.new("MarkBoundaryFilter")
 653        mb.SetInputData(self.dataset)
 654        mb.Update()
 655        self.dataset.DeepCopy(mb.GetOutput())
 656        self.pipeline = utils.OperationNode("mark_boundaries", parents=[self])
 657        return self
 658
 659    def find_cells_in_bounds(self, xbounds=(), ybounds=(), zbounds=()) -> np.ndarray:
 660        """
 661        Find cells that are within the specified bounds.
 662        """
 663        try:
 664            xbounds = list(xbounds.bounds())
 665        except AttributeError:
 666            pass
 667
 668        if len(xbounds) == 6:
 669            bnds = xbounds
 670        else:
 671            bnds = list(self.bounds())
 672            if len(xbounds) == 2:
 673                bnds[0] = xbounds[0]
 674                bnds[1] = xbounds[1]
 675            if len(ybounds) == 2:
 676                bnds[2] = ybounds[0]
 677                bnds[3] = ybounds[1]
 678            if len(zbounds) == 2:
 679                bnds[4] = zbounds[0]
 680                bnds[5] = zbounds[1]
 681
 682        cell_ids = vtki.vtkIdList()
 683        if not self.cell_locator:
 684            self.cell_locator = vtki.new("CellTreeLocator")
 685            self.cell_locator.SetDataSet(self.dataset)
 686            self.cell_locator.BuildLocator()
 687        self.cell_locator.FindCellsWithinBounds(bnds, cell_ids)
 688        cids = []
 689        for i in range(cell_ids.GetNumberOfIds()):
 690            cid = cell_ids.GetId(i)
 691            cids.append(cid)
 692        return np.array(cids)
 693
 694    def find_cells_along_line(self, p0, p1, tol=0.001) -> np.ndarray:
 695        """
 696        Find cells that are intersected by a line segment.
 697        """
 698        cell_ids = vtki.vtkIdList()
 699        if not self.cell_locator:
 700            self.cell_locator = vtki.new("CellTreeLocator")
 701            self.cell_locator.SetDataSet(self.dataset)
 702            self.cell_locator.BuildLocator()
 703        self.cell_locator.FindCellsAlongLine(p0, p1, tol, cell_ids)
 704        cids = []
 705        for i in range(cell_ids.GetNumberOfIds()):
 706            cid = cell_ids.GetId(i)
 707            cids.append(cid)
 708        return np.array(cids)
 709
 710    def find_cells_along_plane(self, origin, normal, tol=0.001) -> np.ndarray:
 711        """
 712        Find cells that are intersected by a plane.
 713        """
 714        cell_ids = vtki.vtkIdList()
 715        if not self.cell_locator:
 716            self.cell_locator = vtki.new("CellTreeLocator")
 717            self.cell_locator.SetDataSet(self.dataset)
 718            self.cell_locator.BuildLocator()
 719        self.cell_locator.FindCellsAlongPlane(origin, normal, tol, cell_ids)
 720        cids = []
 721        for i in range(cell_ids.GetNumberOfIds()):
 722            cid = cell_ids.GetId(i)
 723            cids.append(cid)
 724        return np.array(cids)
 725
 726    def keep_cell_types(self, types=()):
 727        """
 728        Extract cells of a specific type.
 729
 730        Check the VTK cell types here:
 731        https://vtk.org/doc/nightly/html/vtkCellType_8h.html
 732        """
 733        fe = vtki.new("ExtractCellsByType")
 734        fe.SetInputData(self.dataset)
 735        for t in types:
 736            try:
 737                if utils.is_integer(t):
 738                    it = t
 739                else:
 740                    it = vtki.cell_types[t.upper()]
 741            except KeyError:
 742                vedo.logger.error(f"Cell type '{t}' not recognized")
 743                continue
 744            fe.AddCellType(it)
 745        fe.Update()
 746        self._update(fe.GetOutput())
 747        return self
 748
 749    def map_cells_to_points(self, arrays=(), move=False) -> Self:
 750        """
 751        Interpolate cell data (i.e., data specified per cell or face)
 752        into point data (i.e., data specified at each vertex).
 753        The method of transformation is based on averaging the data values
 754        of all cells using a particular point.
 755
 756        A custom list of arrays to be mapped can be passed in input.
 757
 758        Set `move=True` to delete the original `celldata` array.
 759        """
 760        c2p = vtki.new("CellDataToPointData")
 761        c2p.SetInputData(self.dataset)
 762        if not move:
 763            c2p.PassCellDataOn()
 764        if arrays:
 765            c2p.ClearCellDataArrays()
 766            c2p.ProcessAllArraysOff()
 767            for arr in arrays:
 768                c2p.AddCellDataArray(arr)
 769        else:
 770            c2p.ProcessAllArraysOn()
 771        c2p.Update()
 772        self._update(c2p.GetOutput(), reset_locators=False)
 773        self.mapper.SetScalarModeToUsePointData()
 774        self.pipeline = utils.OperationNode("map_cells_to_points", parents=[self])
 775        return self
 776
 777    @property
 778    def vertices(self):
 779        """Return the vertices (points) coordinates."""
 780        try:
 781            # for polydata and unstructured grid
 782            varr = self.dataset.GetPoints().GetData()
 783        except (AttributeError, TypeError):
 784            try:
 785                # for RectilinearGrid, StructuredGrid
 786                vpts = vtki.vtkPoints()
 787                self.dataset.GetPoints(vpts)
 788                varr = vpts.GetData()
 789            except (AttributeError, TypeError):
 790                try:
 791                    # for ImageData
 792                    v2p = vtki.new("ImageToPoints")
 793                    v2p.SetInputData(self.dataset)
 794                    v2p.Update()
 795                    varr = v2p.GetOutput().GetPoints().GetData()
 796                except AttributeError:
 797                    return np.array([])
 798
 799        return utils.vtk2numpy(varr)
 800
 801    # setter
 802    @vertices.setter
 803    def vertices(self, pts):
 804        """Set vertices (points) coordinates."""
 805        pts = utils.make3d(pts)
 806        arr = utils.numpy2vtk(pts, dtype=np.float32)
 807        try:
 808            vpts = self.dataset.GetPoints()
 809            vpts.SetData(arr)
 810            vpts.Modified()
 811        except (AttributeError, TypeError):
 812            vedo.logger.error(f"Cannot set vertices for {type(self)}")
 813            return self
 814        # reset mesh to identity matrix position/rotation:
 815        self.point_locator = None
 816        self.cell_locator = None
 817        self.line_locator = None
 818        self.transform = LinearTransform()
 819
 820    @property
 821    def points(self):
 822        """Return the vertices (points) coordinates. Same as `vertices`."""
 823        return self.vertices
 824
 825    @points.setter
 826    def points(self, pts):
 827        """Set vertices (points) coordinates. Same as `vertices`."""
 828        self.vertices = pts
 829
 830    @property
 831    def coordinates(self):
 832        """Return the vertices (points) coordinates. Same as `vertices`."""
 833        return self.vertices
 834
 835    @coordinates.setter
 836    def coordinates(self, pts):
 837        """Set vertices (points) coordinates. Same as `vertices`."""
 838        self.vertices = pts
 839
 840    @property
 841    def cells_as_flat_array(self):
 842        """
 843        Get cell connectivity ids as a 1D numpy array.
 844        Format is e.g. [3,  10,20,30  4, 10,11,12,13  ...]
 845        """
 846        try:
 847            # valid for unstructured grid
 848            arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData())
 849        except AttributeError:
 850            # valid for polydata
 851            arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData())
 852        return arr1d
 853
 854    @property
 855    def cells(self):
 856        """
 857        Get the cells connectivity ids as a numpy array.
 858
 859        The output format is: `[[id0 ... idn], [id0 ... idm],  etc]`.
 860        """
 861        try:
 862            # valid for unstructured grid
 863            arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData())
 864        except AttributeError:
 865            try:
 866                # valid for polydata
 867                arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData())
 868            except AttributeError:
 869                vedo.logger.warning(f"Cannot get cells for {type(self)}")
 870                return np.array([])
 871
 872        # Get cell connettivity ids as a 1D array. vtk format is:
 873        # [nids1, id0 ... idn, niids2, id0 ... idm,  etc].
 874        i = 0
 875        conn = []
 876        n = len(arr1d)
 877        if n:
 878            while True:
 879                cell = [arr1d[i + k] for k in range(1, arr1d[i] + 1)]
 880                conn.append(cell)
 881                i += arr1d[i] + 1
 882                if i >= n:
 883                    break
 884        return conn
 885    
 886    def cell_edge_neighbors(self):
 887        """
 888        Get the cell neighbor indices of each cell.
 889
 890        Returns a python list of lists.
 891        """
 892
 893        def face_to_edges(face):
 894            edges = []
 895            size = len(face)
 896            for i in range(1, size + 1):
 897                if i == size:
 898                    edges.append([face[i - 1], face[0]])
 899                else:
 900                    edges.append([face[i - 1], face[i]])
 901            return edges
 902
 903        pd = self.dataset
 904        pd.BuildLinks()
 905
 906        neicells = []
 907        for i, cell in enumerate(self.cells):
 908            nn = []
 909            for edge in face_to_edges(cell):
 910                neighbors = vtki.vtkIdList()
 911                pd.GetCellEdgeNeighbors(i, edge[0], edge[1], neighbors)
 912                if neighbors.GetNumberOfIds() > 0:
 913                    neighbor = neighbors.GetId(0)
 914                    nn.append(neighbor)
 915            neicells.append(nn)
 916
 917        return neicells
 918
 919
 920    def map_points_to_cells(self, arrays=(), move=False) -> Self:
 921        """
 922        Interpolate point data (i.e., data specified per point or vertex)
 923        into cell data (i.e., data specified per cell).
 924        The method of transformation is based on averaging the data values
 925        of all points defining a particular cell.
 926
 927        A custom list of arrays to be mapped can be passed in input.
 928
 929        Set `move=True` to delete the original `pointdata` array.
 930
 931        Examples:
 932            - [mesh_map2cell.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/mesh_map2cell.py)
 933        """
 934        p2c = vtki.new("PointDataToCellData")
 935        p2c.SetInputData(self.dataset)
 936        if not move:
 937            p2c.PassPointDataOn()
 938        if arrays:
 939            p2c.ClearPointDataArrays()
 940            p2c.ProcessAllArraysOff()
 941            for arr in arrays:
 942                p2c.AddPointDataArray(arr)
 943        else:
 944            p2c.ProcessAllArraysOn()
 945        p2c.Update()
 946        self._update(p2c.GetOutput(), reset_locators=False)
 947        self.mapper.SetScalarModeToUseCellData()
 948        self.pipeline = utils.OperationNode("map_points_to_cells", parents=[self])
 949        return self
 950
 951    def resample_data_from(self, source, tol=None, categorical=False) -> Self:
 952        """
 953        Resample point and cell data from another dataset.
 954        The output has the same structure but its point data have
 955        the resampled values from target.
 956
 957        Use `tol` to set the tolerance used to compute whether
 958        a point in the source is in a cell of the current object.
 959        Points without resampled values, and their cells, are marked as blank.
 960        If the data is categorical, then the resulting data will be determined
 961        by a nearest neighbor interpolation scheme.
 962
 963        Example:
 964        ```python
 965        from vedo import *
 966        m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1)
 967        pts = m1.coordinates
 968        ces = m1.cell_centers().coordinates
 969        m1.pointdata["xvalues"] = np.power(pts[:,0], 3)
 970        m1.celldata["yvalues"]  = np.power(ces[:,1], 3)
 971        m2 = Mesh(dataurl+'bunny.obj')
 972        m2.resample_data_from(m1)
 973        # print(m2.pointdata["xvalues"])
 974        show(m1, m2 , N=2, axes=1)
 975        ```
 976        """
 977        rs = vtki.new("ResampleWithDataSet")
 978        rs.SetInputData(self.dataset)
 979        rs.SetSourceData(source.dataset)
 980
 981        rs.SetPassPointArrays(True)
 982        rs.SetPassCellArrays(True)
 983        rs.SetPassFieldArrays(True)
 984        rs.SetCategoricalData(categorical)
 985
 986        rs.SetComputeTolerance(True)
 987        if tol:
 988            rs.SetComputeTolerance(False)
 989            rs.SetTolerance(tol)
 990        rs.Update()
 991        self._update(rs.GetOutput(), reset_locators=False)
 992        self.pipeline = utils.OperationNode(
 993            "resample_data_from",
 994            comment=f"{source.__class__.__name__}",
 995            parents=[self, source],
 996        )
 997        return self
 998
 999    def interpolate_data_from(
1000        self,
1001        source,
1002        radius=None,
1003        n=None,
1004        kernel="shepard",
1005        exclude=("Normals",),
1006        on="points",
1007        null_strategy=1,
1008        null_value=0,
1009    ) -> Self:
1010        """
1011        Interpolate over source to port its data onto the current object using various kernels.
1012
1013        If n (number of closest points to use) is set then radius value is ignored.
1014
1015        Check out also:
1016            `probe()` which in many cases can be faster.
1017
1018        Arguments:
1019            kernel : (str)
1020                available kernels are [shepard, gaussian, linear]
1021            null_strategy : (int)
1022                specify a strategy to use when encountering a "null" point
1023                during the interpolation process. Null points occur when the local neighborhood
1024                (of nearby points to interpolate from) is empty.
1025
1026                - Case 0: an output array is created that marks points
1027                  as being valid (=1) or null (invalid =0), and the null_value is set as well
1028                - Case 1: the output data value(s) are set to the provided null_value
1029                - Case 2: simply use the closest point to perform the interpolation.
1030            null_value : (float)
1031                see above.
1032
1033        Examples:
1034            - [interpolate_scalar1.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar1.py)
1035            - [interpolate_scalar3.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar3.py)
1036            - [interpolate_scalar4.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar4.py)
1037            - [image_probe.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/image_probe.py)
1038
1039                ![](https://vedo.embl.es/images/advanced/interpolateMeshArray.png)
1040        """
1041        if radius is None and not n:
1042            vedo.logger.error("in interpolate_data_from(): please set either radius or n")
1043            raise RuntimeError
1044
1045        if on == "points":
1046            points = source.dataset
1047        elif on == "cells":
1048            c2p = vtki.new("CellDataToPointData")
1049            c2p.SetInputData(source.dataset)
1050            c2p.Update()
1051            points = c2p.GetOutput()
1052        else:
1053            vedo.logger.error("in interpolate_data_from(), on must be on points or cells")
1054            raise RuntimeError()
1055
1056        locator = vtki.new("PointLocator")
1057        locator.SetDataSet(points)
1058        locator.BuildLocator()
1059
1060        if kernel.lower() == "shepard":
1061            kern = vtki.new("ShepardKernel")
1062            kern.SetPowerParameter(2)
1063        elif kernel.lower() == "gaussian":
1064            kern = vtki.new("GaussianKernel")
1065            kern.SetSharpness(2)
1066        elif kernel.lower() == "linear":
1067            kern = vtki.new("LinearKernel")
1068        else:
1069            vedo.logger.error("available kernels are: [shepard, gaussian, linear]")
1070            raise RuntimeError()
1071
1072        if n:
1073            kern.SetNumberOfPoints(n)
1074            kern.SetKernelFootprintToNClosest()
1075        else:
1076            kern.SetRadius(radius)
1077            kern.SetKernelFootprintToRadius()
1078
1079        interpolator = vtki.new("PointInterpolator")
1080        interpolator.SetInputData(self.dataset)
1081        interpolator.SetSourceData(points)
1082        interpolator.SetKernel(kern)
1083        interpolator.SetLocator(locator)
1084        interpolator.PassFieldArraysOn()
1085        interpolator.SetNullPointsStrategy(null_strategy)
1086        interpolator.SetNullValue(null_value)
1087        interpolator.SetValidPointsMaskArrayName("ValidPointMask")
1088        for ex in exclude:
1089            interpolator.AddExcludedArray(ex)
1090
1091        # remove arrays that are already present in the source
1092        # this is because the interpolator will ignore them otherwise
1093        for i in range(self.dataset.GetPointData().GetNumberOfArrays()):
1094            name = self.dataset.GetPointData().GetArrayName(i)
1095            if name not in exclude:
1096                self.dataset.GetPointData().RemoveArray(name)
1097
1098        interpolator.Update()
1099
1100        if on == "cells":
1101            p2c = vtki.new("PointDataToCellData")
1102            p2c.SetInputData(interpolator.GetOutput())
1103            p2c.Update()
1104            cpoly = p2c.GetOutput()
1105        else:
1106            cpoly = interpolator.GetOutput()
1107
1108        self._update(cpoly, reset_locators=False)
1109
1110        self.pipeline = utils.OperationNode("interpolate_data_from", parents=[self, source])
1111        return self
1112
1113    def add_ids(self) -> Self:
1114        """
1115        Generate point and cell ids arrays.
1116
1117        Two new arrays are added to the mesh: `PointID` and `CellID`.
1118        """
1119        ids = vtki.new("IdFilter")
1120        ids.SetInputData(self.dataset)
1121        ids.PointIdsOn()
1122        ids.CellIdsOn()
1123        ids.FieldDataOff()
1124        ids.SetPointIdsArrayName("PointID")
1125        ids.SetCellIdsArrayName("CellID")
1126        ids.Update()
1127        self._update(ids.GetOutput(), reset_locators=False)
1128        self.pipeline = utils.OperationNode("add_ids", parents=[self])
1129        return self
1130
1131    def gradient(self, input_array=None, on="points", fast=False) -> np.ndarray:
1132        """
1133        Compute and return the gradiend of the active scalar field as a numpy array.
1134
1135        Arguments:
1136            input_array : (str)
1137                array of the scalars to compute the gradient,
1138                if None the current active array is selected
1139            on : (str)
1140                compute either on 'points' or 'cells' data
1141            fast : (bool)
1142                if True, will use a less accurate algorithm
1143                that performs fewer derivative calculations (and is therefore faster).
1144
1145        Examples:
1146            - [isolines.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/isolines.py)
1147
1148            ![](https://user-images.githubusercontent.com/32848391/72433087-f00a8780-3798-11ea-9778-991f0abeca70.png)
1149        """
1150        gra = vtki.new("GradientFilter")
1151        if on.startswith("p"):
1152            varr = self.dataset.GetPointData()
1153            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS
1154        elif on.startswith("c"):
1155            varr = self.dataset.GetCellData()
1156            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS
1157        else:
1158            vedo.logger.error(f"in gradient: unknown option {on}")
1159            raise RuntimeError
1160
1161        if input_array is None:
1162            if varr.GetScalars():
1163                input_array = varr.GetScalars().GetName()
1164            else:
1165                vedo.logger.error(f"in gradient: no scalars found for {on}")
1166                raise RuntimeError
1167
1168        gra.SetInputData(self.dataset)
1169        gra.SetInputScalars(tp, input_array)
1170        gra.SetResultArrayName("Gradient")
1171        gra.SetFasterApproximation(fast)
1172        gra.ComputeDivergenceOff()
1173        gra.ComputeVorticityOff()
1174        gra.ComputeGradientOn()
1175        gra.Update()
1176        # self._update(gra.GetOutput(), reset_locators=False)
1177        if on.startswith("p"):
1178            gvecs = utils.vtk2numpy(gra.GetOutput().GetPointData().GetArray("Gradient"))
1179        else:
1180            gvecs = utils.vtk2numpy(gra.GetOutput().GetCellData().GetArray("Gradient"))
1181        return gvecs
1182
1183    def divergence(self, array_name=None, on="points", fast=False) -> np.ndarray:
1184        """
1185        Compute and return the divergence of a vector field as a numpy array.
1186
1187        Arguments:
1188            array_name : (str)
1189                name of the array of vectors to compute the divergence,
1190                if None the current active array is selected
1191            on : (str)
1192                compute either on 'points' or 'cells' data
1193            fast : (bool)
1194                if True, will use a less accurate algorithm
1195                that performs fewer derivative calculations (and is therefore faster).
1196        """
1197        div = vtki.new("GradientFilter")
1198        if on.startswith("p"):
1199            varr = self.dataset.GetPointData()
1200            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS
1201        elif on.startswith("c"):
1202            varr = self.dataset.GetCellData()
1203            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS
1204        else:
1205            vedo.logger.error(f"in divergence(): unknown option {on}")
1206            raise RuntimeError
1207
1208        if array_name is None:
1209            if varr.GetVectors():
1210                array_name = varr.GetVectors().GetName()
1211            else:
1212                vedo.logger.error(f"in divergence(): no vectors found for {on}")
1213                raise RuntimeError
1214
1215        div.SetInputData(self.dataset)
1216        div.SetInputScalars(tp, array_name)
1217        div.ComputeDivergenceOn()
1218        div.ComputeGradientOff()
1219        div.ComputeVorticityOff()
1220        div.SetDivergenceArrayName("Divergence")
1221        div.SetFasterApproximation(fast)
1222        div.Update()
1223        # self._update(div.GetOutput(), reset_locators=False)
1224        if on.startswith("p"):
1225            dvecs = utils.vtk2numpy(div.GetOutput().GetPointData().GetArray("Divergence"))
1226        else:
1227            dvecs = utils.vtk2numpy(div.GetOutput().GetCellData().GetArray("Divergence"))
1228        return dvecs
1229
1230    def vorticity(self, array_name=None, on="points", fast=False) -> np.ndarray:
1231        """
1232        Compute and return the vorticity of a vector field as a numpy array.
1233
1234        Arguments:
1235            array_name : (str)
1236                name of the array to compute the vorticity,
1237                if None the current active array is selected
1238            on : (str)
1239                compute either on 'points' or 'cells' data
1240            fast : (bool)
1241                if True, will use a less accurate algorithm
1242                that performs fewer derivative calculations (and is therefore faster).
1243        """
1244        vort = vtki.new("GradientFilter")
1245        if on.startswith("p"):
1246            varr = self.dataset.GetPointData()
1247            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS
1248        elif on.startswith("c"):
1249            varr = self.dataset.GetCellData()
1250            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS
1251        else:
1252            vedo.logger.error(f"in vorticity(): unknown option {on}")
1253            raise RuntimeError
1254
1255        if array_name is None:
1256            if varr.GetVectors():
1257                array_name = varr.GetVectors().GetName()
1258            else:
1259                vedo.logger.error(f"in vorticity(): no vectors found for {on}")
1260                raise RuntimeError
1261
1262        vort.SetInputData(self.dataset)
1263        vort.SetInputScalars(tp, array_name)
1264        vort.ComputeDivergenceOff()
1265        vort.ComputeGradientOff()
1266        vort.ComputeVorticityOn()
1267        vort.SetVorticityArrayName("Vorticity")
1268        vort.SetFasterApproximation(fast)
1269        vort.Update()
1270        if on.startswith("p"):
1271            vvecs = utils.vtk2numpy(vort.GetOutput().GetPointData().GetArray("Vorticity"))
1272        else:
1273            vvecs = utils.vtk2numpy(vort.GetOutput().GetCellData().GetArray("Vorticity"))
1274        return vvecs
1275
1276    def probe(
1277            self,
1278            source,
1279            categorical=False,
1280            snap=False,
1281            tol=0,
1282        ) -> Self:
1283        """
1284        Takes a data set and probes its scalars at the specified points in space.
1285
1286        Note that a mask is also output with valid/invalid points which can be accessed
1287        with `mesh.pointdata['ValidPointMask']`.
1288
1289        Arguments:
1290            source : any dataset
1291                the data set to probe.
1292            categorical : bool
1293                control whether the source pointdata is to be treated as categorical.
1294            snap : bool
1295                snap to the cell with the closest point if no cell was found
1296            tol : float
1297                the tolerance to use when performing the probe.
1298
1299        Check out also:
1300            `interpolate_data_from()` and `tovolume()`
1301
1302        Examples:
1303            - [probe_points.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/probe_points.py)
1304
1305                ![](https://vedo.embl.es/images/volumetric/probePoints.png)
1306        """
1307        probe_filter = vtki.new("ProbeFilter")
1308        probe_filter.SetSourceData(source.dataset)
1309        probe_filter.SetInputData(self.dataset)
1310        probe_filter.PassCellArraysOn()
1311        probe_filter.PassFieldArraysOn()
1312        probe_filter.PassPointArraysOn()
1313        probe_filter.SetCategoricalData(categorical)
1314        probe_filter.ComputeToleranceOff()
1315        if tol:
1316            probe_filter.ComputeToleranceOn()
1317            probe_filter.SetTolerance(tol)
1318        probe_filter.SetSnapToCellWithClosestPoint(snap)
1319        probe_filter.Update()
1320        self._update(probe_filter.GetOutput(), reset_locators=False)
1321        self.pipeline = utils.OperationNode("probe", parents=[self, source])
1322        self.pointdata.rename("vtkValidPointMask", "ValidPointMask")
1323        return self
1324
1325    def compute_cell_size(self) -> Self:
1326        """
1327        Add to this object a cell data array
1328        containing the area, volume and edge length
1329        of the cells (when applicable to the object type).
1330
1331        Array names are: `Area`, `Volume`, `Length`.
1332        """
1333        csf = vtki.new("CellSizeFilter")
1334        csf.SetInputData(self.dataset)
1335        csf.SetComputeArea(1)
1336        csf.SetComputeVolume(1)
1337        csf.SetComputeLength(1)
1338        csf.SetComputeVertexCount(0)
1339        csf.SetAreaArrayName("Area")
1340        csf.SetVolumeArrayName("Volume")
1341        csf.SetLengthArrayName("Length")
1342        csf.Update()
1343        self._update(csf.GetOutput(), reset_locators=False)
1344        return self
1345
1346    def generate_random_data(self) -> Self:
1347        """Fill a dataset with random attributes"""
1348        gen = vtki.new("RandomAttributeGenerator")
1349        gen.SetInputData(self.dataset)
1350        gen.GenerateAllDataOn()
1351        gen.SetDataTypeToFloat()
1352        gen.GeneratePointNormalsOff()
1353        gen.GeneratePointTensorsOn()
1354        gen.GenerateCellScalarsOn()
1355        gen.Update()
1356        self._update(gen.GetOutput(), reset_locators=False)
1357        self.pipeline = utils.OperationNode("generate_random_data", parents=[self])
1358        return self
1359
1360    def integrate_data(self) -> dict:
1361        """
1362        Integrate point and cell data arrays while computing length,
1363        area or volume of the domain. It works for 1D, 2D or 3D cells.
1364    
1365        For volumetric datasets, this filter ignores all but 3D cells.
1366        It will not compute the volume contained in a closed surface.
1367
1368        Returns a dictionary with keys: `pointdata`, `celldata`, `metadata`,
1369        which contain the integration result for the corresponding attributes.
1370
1371        Examples:
1372            ```python
1373            from vedo import *
1374            surf = Sphere(res=100)
1375            surf.pointdata['scalars'] = np.ones(surf.npoints)
1376            data = surf.integrate_data()
1377            print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi)
1378            ```
1379
1380            ```python
1381            from vedo import *
1382
1383            xcoords1 = np.arange(0, 2.2, 0.2)
1384            xcoords2 = sqrt(np.arange(0, 4.2, 0.2))
1385
1386            ycoords = np.arange(0, 1.2, 0.2)
1387
1388            surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2)
1389            surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2)
1390
1391            surf1.pointdata['scalars'] = surf1.vertices[:,2]
1392            surf2.pointdata['scalars'] = surf2.vertices[:,2]
1393
1394            data1 = surf1.integrate_data()
1395            data2 = surf2.integrate_data()
1396
1397            print(data1['pointdata']['scalars'],
1398                "is equal to",
1399                data2['pointdata']['scalars'],
1400                "even if the grids are different!",
1401                "(= the volume under the surface)"
1402            )
1403            show(surf1, surf2, N=2, axes=1).close()
1404            ```
1405        """
1406        vinteg = vtki.new("IntegrateAttributes")
1407        vinteg.SetInputData(self.dataset)
1408        vinteg.Update()
1409        ugrid = vedo.UnstructuredGrid(vinteg.GetOutput())
1410        data = dict(
1411            pointdata=ugrid.pointdata.todict(),
1412            celldata=ugrid.celldata.todict(),
1413            metadata=ugrid.metadata.todict(),
1414        )
1415        return data
1416
1417    def write(self, filename, binary=True) -> None:
1418        """Write object to file."""
1419        out = vedo.file_io.write(self, filename, binary)
1420        out.pipeline = utils.OperationNode(
1421            "write", parents=[self], comment=filename[:15], shape="folder", c="#8a817c"
1422        )
1423
1424    def tomesh(self, bounds=(), shrink=0) -> "vedo.Mesh":
1425        """
1426        Extract boundary geometry from dataset (or convert data to polygonal type).
1427
1428        Two new arrays are added to the mesh: `OriginalCellIds` and `OriginalPointIds`
1429        to keep track of the original mesh elements.
1430
1431        Arguments:
1432            bounds : (list)
1433                specify a sub-region to extract
1434            shrink : (float)
1435                shrink the cells to a fraction of their original size
1436        """
1437        geo = vtki.new("GeometryFilter")
1438
1439        if shrink:
1440            sf = vtki.new("ShrinkFilter")
1441            sf.SetInputData(self.dataset)
1442            sf.SetShrinkFactor(shrink)
1443            sf.Update()
1444            geo.SetInputData(sf.GetOutput())
1445        else:
1446            geo.SetInputData(self.dataset)
1447
1448        geo.SetPassThroughCellIds(1)
1449        geo.SetPassThroughPointIds(1)
1450        geo.SetOriginalCellIdsName("OriginalCellIds")
1451        geo.SetOriginalPointIdsName("OriginalPointIds")
1452        geo.SetNonlinearSubdivisionLevel(1)
1453        # geo.MergingOff() # crashes on StructuredGrids
1454        if bounds:
1455            geo.SetExtent(bounds)
1456            geo.ExtentClippingOn()
1457        geo.Update()
1458        msh = vedo.mesh.Mesh(geo.GetOutput())
1459        msh.pipeline = utils.OperationNode("tomesh", parents=[self], c="#9e2a2b")
1460        return msh
1461
1462    def signed_distance(self, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> "vedo.Volume":
1463        """
1464        Compute the `Volume` object whose voxels contains the signed distance from
1465        the object. The calling object must have "Normals" defined.
1466
1467        Arguments:
1468            bounds : (list, actor)
1469                bounding box sizes
1470            dims : (list)
1471                dimensions (nr. of voxels) of the output volume.
1472            invert : (bool)
1473                flip the sign
1474            max_radius : (float)
1475                specify how far out to propagate distance calculation
1476
1477        Examples:
1478            - [distance2mesh.py](https://github.com/marcomusy/vedo/blob/master/examples/basic/distance2mesh.py)
1479
1480                ![](https://vedo.embl.es/images/basic/distance2mesh.png)
1481        """
1482        if bounds is None:
1483            bounds = self.bounds()
1484        if max_radius is None:
1485            max_radius = self.diagonal_size() / 2
1486        dist = vtki.new("SignedDistance")
1487        dist.SetInputData(self.dataset)
1488        dist.SetRadius(max_radius)
1489        dist.SetBounds(bounds)
1490        dist.SetDimensions(dims)
1491        dist.Update()
1492        img = dist.GetOutput()
1493        if invert:
1494            mat = vtki.new("ImageMathematics")
1495            mat.SetInput1Data(img)
1496            mat.SetOperationToMultiplyByK()
1497            mat.SetConstantK(-1)
1498            mat.Update()
1499            img = mat.GetOutput()
1500
1501        vol = vedo.Volume(img)
1502        vol.name = "SignedDistanceVolume"
1503        vol.pipeline = utils.OperationNode(
1504            "signed_distance",
1505            parents=[self],
1506            comment=f"dims={tuple(vol.dimensions())}",
1507            c="#e9c46a:#0096c7",
1508        )
1509        return vol
1510    
1511    def unsigned_distance(
1512            self, dims=(25,25,25), bounds=(), max_radius=0, cap_value=0) -> "vedo.Volume":
1513        """
1514        Compute the `Volume` object whose voxels contains the unsigned distance. 
1515        """
1516        dist = vtki.new("UnsignedDistance")
1517        dist.SetInputData(self.dataset)
1518        dist.SetDimensions(dims)
1519
1520        if len(bounds) == 6:
1521            dist.SetBounds(bounds)
1522        # elif bounds == "auto":
1523        #     dist.AdjustBoundsOn()
1524        else:
1525            dist.SetBounds(self.bounds())
1526        if not max_radius:
1527            max_radius = self.diagonal_size() / 10
1528        dist.SetRadius(max_radius)
1529
1530        if self.point_locator:
1531            dist.SetLocator(self.point_locator)
1532        
1533        if cap_value is not None:
1534            dist.CappingOn()
1535            dist.SetCapValue(cap_value)
1536        dist.SetOutputScalarTypeToFloat()
1537        dist.Update()
1538        vol = vedo.Volume(dist.GetOutput())
1539        vol.name = "UnsignedDistanceVolume"
1540        vol.pipeline = utils.OperationNode(
1541            "unsigned_distance", parents=[self], c="#e9c46a:#0096c7")
1542        return vol
1543
1544    def smooth_data(self, 
1545            niter=10, relaxation_factor=0.1, strategy=0, mask=None,
1546            mode="distance2",
1547            exclude=("Normals", "TextureCoordinates"),
1548        ) -> Self:
1549        """
1550        Smooth point attribute data using distance weighted Laplacian kernel.
1551        The effect is to blur regions of high variation and emphasize low variation regions.
1552
1553        A central concept of this method is the point smoothing stencil.
1554        A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge.
1555        To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using
1556        the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1).
1557        This averaging process is repeated until the maximum number of iterations is reached.
1558
1559        The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion.
1560        The a(i+1) attributes are determined from the a(i) attributes as follows:
1561            a(i+1) = (1-R)*a(i) + R*sum(w(j)*a(j))
1562    
1563        Convergence occurs faster for larger relaxation factors.
1564        Typically a small number of iterations is required for large relaxation factors,
1565        and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be
1566        adequate (i.e., just a distance weighted average is computed).
1567
1568        Warning:
1569            Certain data attributes cannot be correctly interpolated. For example, surface normals are expected to be |n|=1;
1570            after attribute smoothing this constraint is likely to be violated.
1571            Other vectors and tensors may suffer from similar issues.
1572            In such a situation, specify `exclude=...` which will not be smoothed (and simply passed through to the output).
1573            Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance
1574            between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil).
1575            The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, the weights are simply 1/n,
1576            where n is the number of connected points in the stencil.
1577            The smoothing process reduces high frequency information in the data attributes.
1578            With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) important details may be lost,
1579            and the attributes will move towards an "average" value.
1580            While this filter will process any dataset type, if the input data is a 3D image volume, it's likely much faster to use
1581            an image-based algorithm to perform data smoothing.
1582            To determine boundary points in polygonal data, edges used by only one cell are considered boundary
1583            (and hence the associated points defining the edge). 
1584
1585        Arguments:
1586            niter : (int)
1587                number of iterations
1588            relaxation_factor : (float)
1589                relaxation factor controlling the amount of Laplacian smoothing applied
1590            strategy : (int)
1591                strategy to use for Laplacian smoothing
1592                    - 0: use all points, all point data attributes are smoothed
1593                    - 1: smooth all point attribute data except those on the boundary
1594                    - 2: only point data connected to a boundary point are smoothed
1595            mask : (str, np.ndarray)
1596                array to be used as a mask (ignore then the strategy keyword)
1597            mode : (str)
1598                smoothing mode, either "distance2", "distance" or "average"
1599                    - distance**2 weighted (i.e., 1/r**2 interpolation weights)
1600                    - distance weighted (i.e., 1/r) approach;
1601                    - simple average of all connected points in the stencil
1602            exclude : (list)
1603                list of arrays to be excluded from smoothing
1604        """
1605        try:
1606            saf = vtki.new("AttributeSmoothingFilter")
1607        except:
1608            vedo.logger.error("smooth_data() only avaialble in VTK>=9.3.0")
1609            return self
1610        saf.SetInputData(self.dataset)
1611        saf.SetRelaxationFactor(relaxation_factor)
1612        saf.SetNumberOfIterations(niter)
1613
1614        for ex in exclude:
1615            saf.AddExcludedArray(ex)
1616
1617        if mode == "distance":
1618            saf.SetWeightsTypeToDistance()
1619        elif mode == "distance2":
1620            saf.SetWeightsTypeToDistance2()
1621        elif mode == "average":
1622            saf.SetWeightsTypeToAverage()
1623        else:
1624            vedo.logger.error(f"smooth_data(): unknown mode {mode}")
1625            raise TypeError
1626
1627        saf.SetSmoothingStrategy(strategy)
1628        if mask is not None:
1629            saf.SetSmoothingStrategyToSmoothingMask()
1630            if isinstance(mask, str):
1631                mask_ = self.dataset.GetPointData().GetArray(mask)
1632                if not mask_:
1633                    vedo.logger.error(f"smooth_data(): mask array {mask} not found")
1634                    return self
1635                mask_array = vtki.vtkUnsignedCharArray()
1636                mask_array.ShallowCopy(mask_)
1637                mask_array.SetName(mask_.GetName())
1638            else:
1639                mask_array = utils.numpy2vtk(mask, dtype=np.uint8)
1640            saf.SetSmoothingMask(mask_array)
1641
1642        saf.Update()
1643
1644        self._update(saf.GetOutput())
1645        self.pipeline = utils.OperationNode(
1646            "smooth_data", comment=f"strategy {strategy}", parents=[self], c="#9e2a2b"
1647        )
1648        return self
1649        
1650    def compute_streamlines(
1651            self, 
1652            seeds: Any, 
1653            integrator="rk4",
1654            direction="forward",
1655            initial_step_size=None,
1656            max_propagation=None,
1657            max_steps=10000,
1658            step_length=0,
1659            surface_constrained=False,
1660            compute_vorticity=False,
1661        ) -> Union["vedo.Lines", None]:
1662        """
1663        Integrate a vector field to generate streamlines.
1664
1665        Arguments:
1666            seeds : (Mesh, Points, list)
1667                starting points of the streamlines
1668            integrator : (str)
1669                type of integration method to be used:
1670                    - "rk2" (Runge-Kutta 2)
1671                    - "rk4" (Runge-Kutta 4)
1672                    - "rk45" (Runge-Kutta 45)
1673            direction : (str)
1674                direction of integration, either "forward", "backward" or "both"
1675            initial_step_size : (float)
1676                initial step size used for line integration
1677            max_propagation : (float)
1678                maximum length of a streamline expressed in absolute units
1679            max_steps : (int)
1680                maximum number of steps for a streamline
1681            step_length : (float)
1682                maximum length of a step expressed in absolute units
1683            surface_constrained : (bool)
1684                whether to stop integrating when the streamline leaves the surface
1685            compute_vorticity : (bool)
1686                whether to compute the vorticity at each streamline point
1687        """
1688        b = self.dataset.GetBounds()
1689        size = (b[5]-b[4] + b[3]-b[2] + b[1]-b[0]) / 3
1690        if initial_step_size is None:
1691            initial_step_size = size / 1000.0
1692
1693        if max_propagation is None:
1694            max_propagation = size * 2
1695
1696        if utils.is_sequence(seeds):
1697            seeds = vedo.Points(seeds)
1698
1699        sti = vtki.new("StreamTracer")
1700        sti.SetSourceData(seeds.dataset)
1701        if isinstance(self, vedo.RectilinearGrid):
1702            sti.SetInputData(vedo.UnstructuredGrid(self.dataset).dataset)
1703        else:
1704            sti.SetInputDataObject(self.dataset)
1705
1706        sti.SetInitialIntegrationStep(initial_step_size)
1707        sti.SetComputeVorticity(compute_vorticity)
1708        sti.SetMaximumNumberOfSteps(max_steps)
1709        sti.SetMaximumPropagation(max_propagation)
1710        sti.SetSurfaceStreamlines(surface_constrained)
1711        if step_length:
1712            sti.SetMaximumIntegrationStep(step_length)
1713
1714        if "for" in direction:
1715            sti.SetIntegrationDirectionToForward()
1716        elif "back" in direction:
1717            sti.SetIntegrationDirectionToBackward()
1718        elif "both" in direction:
1719            sti.SetIntegrationDirectionToBoth()
1720        else:
1721            vedo.logger.error(f"in compute_streamlines(), unknown direction {direction}")
1722            return None
1723
1724        if integrator == "rk2":
1725            sti.SetIntegratorTypeToRungeKutta2()
1726        elif integrator == "rk4":
1727            sti.SetIntegratorTypeToRungeKutta4()
1728        elif integrator == "rk45":
1729            sti.SetIntegratorTypeToRungeKutta45()
1730        else:
1731            vedo.logger.error(f"in compute_streamlines(), unknown integrator {integrator}")
1732            return None
1733
1734        sti.Update()
1735
1736        stlines = vedo.shapes.Lines(sti.GetOutput(), lw=4)
1737        stlines.name = "StreamLines"
1738        self.pipeline = utils.OperationNode(
1739            "compute_streamlines", comment=f"{integrator}", parents=[self, seeds], c="#9e2a2b"
1740        )
1741        return stlines

Common algorithms.

CommonAlgorithms()
pointdata
383    @property
384    def pointdata(self):
385        """
386        Create and/or return a `numpy.array` associated to points (vertices).
387        A data array can be indexed either as a string or by an integer number.
388        E.g.:  `myobj.pointdata["arrayname"]`
389
390        Usage:
391
392            `myobj.pointdata.keys()` to return the available data array names
393
394            `myobj.pointdata.select(name)` to make this array the active one
395
396            `myobj.pointdata.remove(name)` to remove this array
397        """
398        return DataArrayHelper(self, 0)

Create and/or return a numpy.array associated to points (vertices). A data array can be indexed either as a string or by an integer number. E.g.: myobj.pointdata["arrayname"]

Usage:

myobj.pointdata.keys() to return the available data array names

myobj.pointdata.select(name) to make this array the active one

myobj.pointdata.remove(name) to remove this array

celldata
400    @property
401    def celldata(self):
402        """
403        Create and/or return a `numpy.array` associated to cells (faces).
404        A data array can be indexed either as a string or by an integer number.
405        E.g.:  `myobj.celldata["arrayname"]`
406
407        Usage:
408
409            `myobj.celldata.keys()` to return the available data array names
410
411            `myobj.celldata.select(name)` to make this array the active one
412
413            `myobj.celldata.remove(name)` to remove this array
414        """
415        return DataArrayHelper(self, 1)

Create and/or return a numpy.array associated to cells (faces). A data array can be indexed either as a string or by an integer number. E.g.: myobj.celldata["arrayname"]

Usage:

myobj.celldata.keys() to return the available data array names

myobj.celldata.select(name) to make this array the active one

myobj.celldata.remove(name) to remove this array

metadata
417    @property
418    def metadata(self):
419        """
420        Create and/or return a `numpy.array` associated to neither cells nor faces.
421        A data array can be indexed either as a string or by an integer number.
422        E.g.:  `myobj.metadata["arrayname"]`
423
424        Usage:
425
426            `myobj.metadata.keys()` to return the available data array names
427
428            `myobj.metadata.select(name)` to make this array the active one
429
430            `myobj.metadata.remove(name)` to remove this array
431        """
432        return DataArrayHelper(self, 2)

Create and/or return a numpy.array associated to neither cells nor faces. A data array can be indexed either as a string or by an integer number. E.g.: myobj.metadata["arrayname"]

Usage:

myobj.metadata.keys() to return the available data array names

myobj.metadata.select(name) to make this array the active one

myobj.metadata.remove(name) to remove this array

def memory_address(self) -> int:
434    def memory_address(self) -> int:
435        """
436        Return a unique memory address integer which may serve as the ID of the
437        object, or passed to c++ code.
438        """
439        # https://www.linkedin.com/pulse/speedup-your-code-accessing-python-vtk-objects-from-c-pletzer/
440        # https://github.com/tfmoraes/polydata_connectivity
441        return int(self.dataset.GetAddressAsString("")[5:], 16)

Return a unique memory address integer which may serve as the ID of the object, or passed to c++ code.

def memory_size(self) -> int:
443    def memory_size(self) -> int:
444        """Return the size in bytes of the object in memory."""
445        return self.dataset.GetActualMemorySize()

Return the size in bytes of the object in memory.

def modified(self) -> Self:
447    def modified(self) -> Self:
448        """Use in conjunction with `tonumpy()` to update any modifications to the image array."""
449        self.dataset.GetPointData().Modified()
450        scals = self.dataset.GetPointData().GetScalars()
451        if scals:
452            scals.Modified()
453        return self

Use in conjunction with tonumpy() to update any modifications to the image array.

def box(self, scale=1, padding=0) -> vedo.mesh.Mesh:
455    def box(self, scale=1, padding=0) -> "vedo.Mesh":
456        """
457        Return the bounding box as a new `Mesh` object.
458
459        Arguments:
460            scale : (float)
461                box size can be scaled by a factor
462            padding : (float, list)
463                a constant padding can be added (can be a list `[padx,pady,padz]`)
464        """
465        b = self.bounds()
466        if not utils.is_sequence(padding):
467            padding = [padding, padding, padding]
468        length, width, height = b[1] - b[0], b[3] - b[2], b[5] - b[4]
469        tol = (length + width + height) / 30000  # useful for boxing text
470        pos = [(b[0] + b[1]) / 2, (b[3] + b[2]) / 2, (b[5] + b[4]) / 2 - tol]
471        bx = vedo.shapes.Box(
472            pos,
473            length * scale + padding[0],
474            width  * scale + padding[1],
475            height * scale + padding[2],
476            c="gray",
477        )
478        try:
479            pr = vtki.vtkProperty()
480            pr.DeepCopy(self.properties)
481            bx.actor.SetProperty(pr)
482            bx.properties = pr
483        except (AttributeError, TypeError):
484            pass
485        bx.flat().lighting("off").wireframe(True)
486        return bx

Return the bounding box as a new Mesh object.

Arguments:
  • scale : (float) box size can be scaled by a factor
  • padding : (float, list) a constant padding can be added (can be a list [padx,pady,padz])
def update_dataset(self, dataset, **kwargs) -> Self:
488    def update_dataset(self, dataset, **kwargs) -> Self:
489        """Update the dataset of the object with the provided VTK dataset."""
490        self._update(dataset, **kwargs)
491        return self

Update the dataset of the object with the provided VTK dataset.

def bounds(self) -> numpy.ndarray:
493    def bounds(self) -> np.ndarray:
494        """
495        Get the object bounds.
496        Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`.
497        """
498        try:  # this is very slow for large meshes
499            pts = self.vertices
500            xmin, ymin, zmin = np.min(pts, axis=0)
501            xmax, ymax, zmax = np.max(pts, axis=0)
502            return np.array([xmin, xmax, ymin, ymax, zmin, zmax])
503        except (AttributeError, ValueError):
504            return np.array(self.dataset.GetBounds())

Get the object bounds. Returns a list in format [xmin,xmax, ymin,ymax, zmin,zmax].

def xbounds(self, i=None) -> numpy.ndarray:
506    def xbounds(self, i=None) -> np.ndarray:
507        """Get the bounds `[xmin,xmax]`. Can specify upper or lower with i (0,1)."""
508        b = self.bounds()
509        if i is not None:
510            return b[i]
511        return np.array([b[0], b[1]])

Get the bounds [xmin,xmax]. Can specify upper or lower with i (0,1).

def ybounds(self, i=None) -> numpy.ndarray:
513    def ybounds(self, i=None) -> np.ndarray:
514        """Get the bounds `[ymin,ymax]`. Can specify upper or lower with i (0,1)."""
515        b = self.bounds()
516        if i == 0:
517            return b[2]
518        if i == 1:
519            return b[3]
520        return np.array([b[2], b[3]])

Get the bounds [ymin,ymax]. Can specify upper or lower with i (0,1).

def zbounds(self, i=None) -> numpy.ndarray:
522    def zbounds(self, i=None) -> np.ndarray:
523        """Get the bounds `[zmin,zmax]`. Can specify upper or lower with i (0,1)."""
524        b = self.bounds()
525        if i == 0:
526            return b[4]
527        if i == 1:
528            return b[5]
529        return np.array([b[4], b[5]])

Get the bounds [zmin,zmax]. Can specify upper or lower with i (0,1).

def diagonal_size(self) -> float:
531    def diagonal_size(self) -> float:
532        """Get the length of the diagonal of the bounding box."""
533        b = self.bounds()
534        return np.sqrt((b[1] - b[0])**2 + (b[3] - b[2])**2 + (b[5] - b[4])**2)

Get the length of the diagonal of the bounding box.

def average_size(self) -> float:
536    def average_size(self) -> float:
537        """
538        Calculate and return the average size of the object.
539        This is the mean of the vertex distances from the center of mass.
540        """
541        coords = self.vertices
542        cm = np.mean(coords, axis=0)
543        if coords.shape[0] == 0:
544            return 0.0
545        cc = coords - cm
546        return np.mean(np.linalg.norm(cc, axis=1))

Calculate and return the average size of the object. This is the mean of the vertex distances from the center of mass.

def center_of_mass(self) -> numpy.ndarray:
548    def center_of_mass(self) -> np.ndarray:
549        """Get the center of mass of the object."""
550        if isinstance(self, (vedo.RectilinearGrid, vedo.Volume)):
551            return np.array(self.dataset.GetCenter())
552        cmf = vtki.new("CenterOfMass")
553        cmf.SetInputData(self.dataset)
554        cmf.Update()
555        c = cmf.GetCenter()
556        return np.array(c)

Get the center of mass of the object.

def copy_data_from(self, obj: Any) -> Self:
558    def copy_data_from(self, obj: Any) -> Self:
559        """Copy all data (point and cell data) from this input object"""
560        self.dataset.GetPointData().PassData(obj.dataset.GetPointData())
561        self.dataset.GetCellData().PassData(obj.dataset.GetCellData())
562        self.pipeline = utils.OperationNode(
563            "copy_data_from",
564            parents=[self, obj],
565            comment=f"{obj.__class__.__name__}",
566            shape="note",
567            c="#ccc5b9",
568        )
569        return self

Copy all data (point and cell data) from this input object

def inputdata(self):
571    def inputdata(self):
572        """Obsolete, use `.dataset` instead."""
573        colors.printc("WARNING: 'inputdata()' is obsolete, use '.dataset' instead.", c="y")
574        return self.dataset

Obsolete, use .dataset instead.

npoints
576    @property
577    def npoints(self):
578        """Retrieve the number of points (or vertices)."""
579        return self.dataset.GetNumberOfPoints()

Retrieve the number of points (or vertices).

nvertices
581    @property
582    def nvertices(self):
583        """Retrieve the number of vertices (or points)."""
584        return self.dataset.GetNumberOfPoints()

Retrieve the number of vertices (or points).

ncells
586    @property
587    def ncells(self):
588        """Retrieve the number of cells."""
589        return self.dataset.GetNumberOfCells()

Retrieve the number of cells.

def cell_centers(self, copy_arrays=False) -> vedo.pointcloud.Points:
591    def cell_centers(self, copy_arrays=False) -> "vedo.Points":
592        """
593        Get the coordinates of the cell centers as a `Points` object.
594
595        Examples:
596            - [delaunay2d.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/delaunay2d.py)
597        """
598        vcen = vtki.new("CellCenters")
599        vcen.SetCopyArrays(copy_arrays)
600        vcen.SetVertexCells(copy_arrays)
601        vcen.SetInputData(self.dataset)
602        vcen.Update()
603        vpts = vedo.Points(vcen.GetOutput())
604        if copy_arrays:
605            vpts.copy_properties_from(self)
606        return vpts

Get the coordinates of the cell centers as a Points object.

Examples:
lines
608    @property
609    def lines(self):
610        """
611        Get lines connectivity ids as a python array
612        formatted as `[[id0,id1], [id3,id4], ...]`
613
614        See also: `lines_as_flat_array()`.
615        """
616        # Get cell connettivity ids as a 1D array. The vtk format is:
617        #    [nids1, id0 ... idn, niids2, id0 ... idm,  etc].
618        try:
619            arr1d = utils.vtk2numpy(self.dataset.GetLines().GetData())
620        except AttributeError:
621            return np.array([])
622        i = 0
623        conn = []
624        n = len(arr1d)
625        for _ in range(n):
626            cell = [arr1d[i + k + 1] for k in range(arr1d[i])]
627            conn.append(cell)
628            i += arr1d[i] + 1
629            if i >= n:
630                break
631
632        return conn  # cannot always make a numpy array of it!

Get lines connectivity ids as a python array formatted as [[id0,id1], [id3,id4], ...]

See also: lines_as_flat_array().

lines_as_flat_array
634    @property
635    def lines_as_flat_array(self):
636        """
637        Get lines connectivity ids as a 1D numpy array.
638        Format is e.g. [2,  10,20,  3, 10,11,12,  2, 70,80, ...]
639
640        See also: `lines()`.
641        """
642        try:
643            return utils.vtk2numpy(self.dataset.GetLines().GetData())
644        except AttributeError:
645            return np.array([])

Get lines connectivity ids as a 1D numpy array. Format is e.g. [2, 10,20, 3, 10,11,12, 2, 70,80, ...]

See also: lines().

def mark_boundaries(self) -> Self:
647    def mark_boundaries(self) -> Self:
648        """
649        Mark cells and vertices if they lie on a boundary.
650        A new array called `BoundaryCells` is added to the object.
651        """
652        mb = vtki.new("MarkBoundaryFilter")
653        mb.SetInputData(self.dataset)
654        mb.Update()
655        self.dataset.DeepCopy(mb.GetOutput())
656        self.pipeline = utils.OperationNode("mark_boundaries", parents=[self])
657        return self

Mark cells and vertices if they lie on a boundary. A new array called BoundaryCells is added to the object.

def find_cells_in_bounds(self, xbounds=(), ybounds=(), zbounds=()) -> numpy.ndarray:
659    def find_cells_in_bounds(self, xbounds=(), ybounds=(), zbounds=()) -> np.ndarray:
660        """
661        Find cells that are within the specified bounds.
662        """
663        try:
664            xbounds = list(xbounds.bounds())
665        except AttributeError:
666            pass
667
668        if len(xbounds) == 6:
669            bnds = xbounds
670        else:
671            bnds = list(self.bounds())
672            if len(xbounds) == 2:
673                bnds[0] = xbounds[0]
674                bnds[1] = xbounds[1]
675            if len(ybounds) == 2:
676                bnds[2] = ybounds[0]
677                bnds[3] = ybounds[1]
678            if len(zbounds) == 2:
679                bnds[4] = zbounds[0]
680                bnds[5] = zbounds[1]
681
682        cell_ids = vtki.vtkIdList()
683        if not self.cell_locator:
684            self.cell_locator = vtki.new("CellTreeLocator")
685            self.cell_locator.SetDataSet(self.dataset)
686            self.cell_locator.BuildLocator()
687        self.cell_locator.FindCellsWithinBounds(bnds, cell_ids)
688        cids = []
689        for i in range(cell_ids.GetNumberOfIds()):
690            cid = cell_ids.GetId(i)
691            cids.append(cid)
692        return np.array(cids)

Find cells that are within the specified bounds.

def find_cells_along_line(self, p0, p1, tol=0.001) -> numpy.ndarray:
694    def find_cells_along_line(self, p0, p1, tol=0.001) -> np.ndarray:
695        """
696        Find cells that are intersected by a line segment.
697        """
698        cell_ids = vtki.vtkIdList()
699        if not self.cell_locator:
700            self.cell_locator = vtki.new("CellTreeLocator")
701            self.cell_locator.SetDataSet(self.dataset)
702            self.cell_locator.BuildLocator()
703        self.cell_locator.FindCellsAlongLine(p0, p1, tol, cell_ids)
704        cids = []
705        for i in range(cell_ids.GetNumberOfIds()):
706            cid = cell_ids.GetId(i)
707            cids.append(cid)
708        return np.array(cids)

Find cells that are intersected by a line segment.

def find_cells_along_plane(self, origin, normal, tol=0.001) -> numpy.ndarray:
710    def find_cells_along_plane(self, origin, normal, tol=0.001) -> np.ndarray:
711        """
712        Find cells that are intersected by a plane.
713        """
714        cell_ids = vtki.vtkIdList()
715        if not self.cell_locator:
716            self.cell_locator = vtki.new("CellTreeLocator")
717            self.cell_locator.SetDataSet(self.dataset)
718            self.cell_locator.BuildLocator()
719        self.cell_locator.FindCellsAlongPlane(origin, normal, tol, cell_ids)
720        cids = []
721        for i in range(cell_ids.GetNumberOfIds()):
722            cid = cell_ids.GetId(i)
723            cids.append(cid)
724        return np.array(cids)

Find cells that are intersected by a plane.

def keep_cell_types(self, types=()):
726    def keep_cell_types(self, types=()):
727        """
728        Extract cells of a specific type.
729
730        Check the VTK cell types here:
731        https://vtk.org/doc/nightly/html/vtkCellType_8h.html
732        """
733        fe = vtki.new("ExtractCellsByType")
734        fe.SetInputData(self.dataset)
735        for t in types:
736            try:
737                if utils.is_integer(t):
738                    it = t
739                else:
740                    it = vtki.cell_types[t.upper()]
741            except KeyError:
742                vedo.logger.error(f"Cell type '{t}' not recognized")
743                continue
744            fe.AddCellType(it)
745        fe.Update()
746        self._update(fe.GetOutput())
747        return self

Extract cells of a specific type.

Check the VTK cell types here: https://vtk.org/doc/nightly/html/vtkCellType_8h.html

def map_cells_to_points(self, arrays=(), move=False) -> Self:
749    def map_cells_to_points(self, arrays=(), move=False) -> Self:
750        """
751        Interpolate cell data (i.e., data specified per cell or face)
752        into point data (i.e., data specified at each vertex).
753        The method of transformation is based on averaging the data values
754        of all cells using a particular point.
755
756        A custom list of arrays to be mapped can be passed in input.
757
758        Set `move=True` to delete the original `celldata` array.
759        """
760        c2p = vtki.new("CellDataToPointData")
761        c2p.SetInputData(self.dataset)
762        if not move:
763            c2p.PassCellDataOn()
764        if arrays:
765            c2p.ClearCellDataArrays()
766            c2p.ProcessAllArraysOff()
767            for arr in arrays:
768                c2p.AddCellDataArray(arr)
769        else:
770            c2p.ProcessAllArraysOn()
771        c2p.Update()
772        self._update(c2p.GetOutput(), reset_locators=False)
773        self.mapper.SetScalarModeToUsePointData()
774        self.pipeline = utils.OperationNode("map_cells_to_points", parents=[self])
775        return self

Interpolate cell data (i.e., data specified per cell or face) into point data (i.e., data specified at each vertex). The method of transformation is based on averaging the data values of all cells using a particular point.

A custom list of arrays to be mapped can be passed in input.

Set move=True to delete the original celldata array.

vertices
777    @property
778    def vertices(self):
779        """Return the vertices (points) coordinates."""
780        try:
781            # for polydata and unstructured grid
782            varr = self.dataset.GetPoints().GetData()
783        except (AttributeError, TypeError):
784            try:
785                # for RectilinearGrid, StructuredGrid
786                vpts = vtki.vtkPoints()
787                self.dataset.GetPoints(vpts)
788                varr = vpts.GetData()
789            except (AttributeError, TypeError):
790                try:
791                    # for ImageData
792                    v2p = vtki.new("ImageToPoints")
793                    v2p.SetInputData(self.dataset)
794                    v2p.Update()
795                    varr = v2p.GetOutput().GetPoints().GetData()
796                except AttributeError:
797                    return np.array([])
798
799        return utils.vtk2numpy(varr)

Return the vertices (points) coordinates.

points
820    @property
821    def points(self):
822        """Return the vertices (points) coordinates. Same as `vertices`."""
823        return self.vertices

Return the vertices (points) coordinates. Same as vertices.

coordinates
830    @property
831    def coordinates(self):
832        """Return the vertices (points) coordinates. Same as `vertices`."""
833        return self.vertices

Return the vertices (points) coordinates. Same as vertices.

cells_as_flat_array
840    @property
841    def cells_as_flat_array(self):
842        """
843        Get cell connectivity ids as a 1D numpy array.
844        Format is e.g. [3,  10,20,30  4, 10,11,12,13  ...]
845        """
846        try:
847            # valid for unstructured grid
848            arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData())
849        except AttributeError:
850            # valid for polydata
851            arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData())
852        return arr1d

Get cell connectivity ids as a 1D numpy array. Format is e.g. [3, 10,20,30 4, 10,11,12,13 ...]

cells
854    @property
855    def cells(self):
856        """
857        Get the cells connectivity ids as a numpy array.
858
859        The output format is: `[[id0 ... idn], [id0 ... idm],  etc]`.
860        """
861        try:
862            # valid for unstructured grid
863            arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData())
864        except AttributeError:
865            try:
866                # valid for polydata
867                arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData())
868            except AttributeError:
869                vedo.logger.warning(f"Cannot get cells for {type(self)}")
870                return np.array([])
871
872        # Get cell connettivity ids as a 1D array. vtk format is:
873        # [nids1, id0 ... idn, niids2, id0 ... idm,  etc].
874        i = 0
875        conn = []
876        n = len(arr1d)
877        if n:
878            while True:
879                cell = [arr1d[i + k] for k in range(1, arr1d[i] + 1)]
880                conn.append(cell)
881                i += arr1d[i] + 1
882                if i >= n:
883                    break
884        return conn

Get the cells connectivity ids as a numpy array.

The output format is: [[id0 ... idn], [id0 ... idm], etc].

def cell_edge_neighbors(self):
886    def cell_edge_neighbors(self):
887        """
888        Get the cell neighbor indices of each cell.
889
890        Returns a python list of lists.
891        """
892
893        def face_to_edges(face):
894            edges = []
895            size = len(face)
896            for i in range(1, size + 1):
897                if i == size:
898                    edges.append([face[i - 1], face[0]])
899                else:
900                    edges.append([face[i - 1], face[i]])
901            return edges
902
903        pd = self.dataset
904        pd.BuildLinks()
905
906        neicells = []
907        for i, cell in enumerate(self.cells):
908            nn = []
909            for edge in face_to_edges(cell):
910                neighbors = vtki.vtkIdList()
911                pd.GetCellEdgeNeighbors(i, edge[0], edge[1], neighbors)
912                if neighbors.GetNumberOfIds() > 0:
913                    neighbor = neighbors.GetId(0)
914                    nn.append(neighbor)
915            neicells.append(nn)
916
917        return neicells

Get the cell neighbor indices of each cell.

Returns a python list of lists.

def map_points_to_cells(self, arrays=(), move=False) -> Self:
920    def map_points_to_cells(self, arrays=(), move=False) -> Self:
921        """
922        Interpolate point data (i.e., data specified per point or vertex)
923        into cell data (i.e., data specified per cell).
924        The method of transformation is based on averaging the data values
925        of all points defining a particular cell.
926
927        A custom list of arrays to be mapped can be passed in input.
928
929        Set `move=True` to delete the original `pointdata` array.
930
931        Examples:
932            - [mesh_map2cell.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/mesh_map2cell.py)
933        """
934        p2c = vtki.new("PointDataToCellData")
935        p2c.SetInputData(self.dataset)
936        if not move:
937            p2c.PassPointDataOn()
938        if arrays:
939            p2c.ClearPointDataArrays()
940            p2c.ProcessAllArraysOff()
941            for arr in arrays:
942                p2c.AddPointDataArray(arr)
943        else:
944            p2c.ProcessAllArraysOn()
945        p2c.Update()
946        self._update(p2c.GetOutput(), reset_locators=False)
947        self.mapper.SetScalarModeToUseCellData()
948        self.pipeline = utils.OperationNode("map_points_to_cells", parents=[self])
949        return self

Interpolate point data (i.e., data specified per point or vertex) into cell data (i.e., data specified per cell). The method of transformation is based on averaging the data values of all points defining a particular cell.

A custom list of arrays to be mapped can be passed in input.

Set move=True to delete the original pointdata array.

Examples:
def resample_data_from(self, source, tol=None, categorical=False) -> Self:
951    def resample_data_from(self, source, tol=None, categorical=False) -> Self:
952        """
953        Resample point and cell data from another dataset.
954        The output has the same structure but its point data have
955        the resampled values from target.
956
957        Use `tol` to set the tolerance used to compute whether
958        a point in the source is in a cell of the current object.
959        Points without resampled values, and their cells, are marked as blank.
960        If the data is categorical, then the resulting data will be determined
961        by a nearest neighbor interpolation scheme.
962
963        Example:
964        ```python
965        from vedo import *
966        m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1)
967        pts = m1.coordinates
968        ces = m1.cell_centers().coordinates
969        m1.pointdata["xvalues"] = np.power(pts[:,0], 3)
970        m1.celldata["yvalues"]  = np.power(ces[:,1], 3)
971        m2 = Mesh(dataurl+'bunny.obj')
972        m2.resample_data_from(m1)
973        # print(m2.pointdata["xvalues"])
974        show(m1, m2 , N=2, axes=1)
975        ```
976        """
977        rs = vtki.new("ResampleWithDataSet")
978        rs.SetInputData(self.dataset)
979        rs.SetSourceData(source.dataset)
980
981        rs.SetPassPointArrays(True)
982        rs.SetPassCellArrays(True)
983        rs.SetPassFieldArrays(True)
984        rs.SetCategoricalData(categorical)
985
986        rs.SetComputeTolerance(True)
987        if tol:
988            rs.SetComputeTolerance(False)
989            rs.SetTolerance(tol)
990        rs.Update()
991        self._update(rs.GetOutput(), reset_locators=False)
992        self.pipeline = utils.OperationNode(
993            "resample_data_from",
994            comment=f"{source.__class__.__name__}",
995            parents=[self, source],
996        )
997        return self

Resample point and cell data from another dataset. The output has the same structure but its point data have the resampled values from target.

Use tol to set the tolerance used to compute whether a point in the source is in a cell of the current object. Points without resampled values, and their cells, are marked as blank. If the data is categorical, then the resulting data will be determined by a nearest neighbor interpolation scheme.

Example:

from vedo import *
m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1)
pts = m1.coordinates
ces = m1.cell_centers().coordinates
m1.pointdata["xvalues"] = np.power(pts[:,0], 3)
m1.celldata["yvalues"]  = np.power(ces[:,1], 3)
m2 = Mesh(dataurl+'bunny.obj')
m2.resample_data_from(m1)
# print(m2.pointdata["xvalues"])
show(m1, m2 , N=2, axes=1)
def interpolate_data_from( self, source, radius=None, n=None, kernel='shepard', exclude=('Normals',), on='points', null_strategy=1, null_value=0) -> Self:
 999    def interpolate_data_from(
1000        self,
1001        source,
1002        radius=None,
1003        n=None,
1004        kernel="shepard",
1005        exclude=("Normals",),
1006        on="points",
1007        null_strategy=1,
1008        null_value=0,
1009    ) -> Self:
1010        """
1011        Interpolate over source to port its data onto the current object using various kernels.
1012
1013        If n (number of closest points to use) is set then radius value is ignored.
1014
1015        Check out also:
1016            `probe()` which in many cases can be faster.
1017
1018        Arguments:
1019            kernel : (str)
1020                available kernels are [shepard, gaussian, linear]
1021            null_strategy : (int)
1022                specify a strategy to use when encountering a "null" point
1023                during the interpolation process. Null points occur when the local neighborhood
1024                (of nearby points to interpolate from) is empty.
1025
1026                - Case 0: an output array is created that marks points
1027                  as being valid (=1) or null (invalid =0), and the null_value is set as well
1028                - Case 1: the output data value(s) are set to the provided null_value
1029                - Case 2: simply use the closest point to perform the interpolation.
1030            null_value : (float)
1031                see above.
1032
1033        Examples:
1034            - [interpolate_scalar1.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar1.py)
1035            - [interpolate_scalar3.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar3.py)
1036            - [interpolate_scalar4.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar4.py)
1037            - [image_probe.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/image_probe.py)
1038
1039                ![](https://vedo.embl.es/images/advanced/interpolateMeshArray.png)
1040        """
1041        if radius is None and not n:
1042            vedo.logger.error("in interpolate_data_from(): please set either radius or n")
1043            raise RuntimeError
1044
1045        if on == "points":
1046            points = source.dataset
1047        elif on == "cells":
1048            c2p = vtki.new("CellDataToPointData")
1049            c2p.SetInputData(source.dataset)
1050            c2p.Update()
1051            points = c2p.GetOutput()
1052        else:
1053            vedo.logger.error("in interpolate_data_from(), on must be on points or cells")
1054            raise RuntimeError()
1055
1056        locator = vtki.new("PointLocator")
1057        locator.SetDataSet(points)
1058        locator.BuildLocator()
1059
1060        if kernel.lower() == "shepard":
1061            kern = vtki.new("ShepardKernel")
1062            kern.SetPowerParameter(2)
1063        elif kernel.lower() == "gaussian":
1064            kern = vtki.new("GaussianKernel")
1065            kern.SetSharpness(2)
1066        elif kernel.lower() == "linear":
1067            kern = vtki.new("LinearKernel")
1068        else:
1069            vedo.logger.error("available kernels are: [shepard, gaussian, linear]")
1070            raise RuntimeError()
1071
1072        if n:
1073            kern.SetNumberOfPoints(n)
1074            kern.SetKernelFootprintToNClosest()
1075        else:
1076            kern.SetRadius(radius)
1077            kern.SetKernelFootprintToRadius()
1078
1079        interpolator = vtki.new("PointInterpolator")
1080        interpolator.SetInputData(self.dataset)
1081        interpolator.SetSourceData(points)
1082        interpolator.SetKernel(kern)
1083        interpolator.SetLocator(locator)
1084        interpolator.PassFieldArraysOn()
1085        interpolator.SetNullPointsStrategy(null_strategy)
1086        interpolator.SetNullValue(null_value)
1087        interpolator.SetValidPointsMaskArrayName("ValidPointMask")
1088        for ex in exclude:
1089            interpolator.AddExcludedArray(ex)
1090
1091        # remove arrays that are already present in the source
1092        # this is because the interpolator will ignore them otherwise
1093        for i in range(self.dataset.GetPointData().GetNumberOfArrays()):
1094            name = self.dataset.GetPointData().GetArrayName(i)
1095            if name not in exclude:
1096                self.dataset.GetPointData().RemoveArray(name)
1097
1098        interpolator.Update()
1099
1100        if on == "cells":
1101            p2c = vtki.new("PointDataToCellData")
1102            p2c.SetInputData(interpolator.GetOutput())
1103            p2c.Update()
1104            cpoly = p2c.GetOutput()
1105        else:
1106            cpoly = interpolator.GetOutput()
1107
1108        self._update(cpoly, reset_locators=False)
1109
1110        self.pipeline = utils.OperationNode("interpolate_data_from", parents=[self, source])
1111        return self

Interpolate over source to port its data onto the current object using various kernels.

If n (number of closest points to use) is set then radius value is ignored.

Check out also:

probe() which in many cases can be faster.

Arguments:
  • kernel : (str) available kernels are [shepard, gaussian, linear]
  • null_strategy : (int) specify a strategy to use when encountering a "null" point during the interpolation process. Null points occur when the local neighborhood (of nearby points to interpolate from) is empty.

    • Case 0: an output array is created that marks points as being valid (=1) or null (invalid =0), and the null_value is set as well
    • Case 1: the output data value(s) are set to the provided null_value
    • Case 2: simply use the closest point to perform the interpolation.
  • null_value : (float) see above.
Examples:
def add_ids(self) -> Self:
1113    def add_ids(self) -> Self:
1114        """
1115        Generate point and cell ids arrays.
1116
1117        Two new arrays are added to the mesh: `PointID` and `CellID`.
1118        """
1119        ids = vtki.new("IdFilter")
1120        ids.SetInputData(self.dataset)
1121        ids.PointIdsOn()
1122        ids.CellIdsOn()
1123        ids.FieldDataOff()
1124        ids.SetPointIdsArrayName("PointID")
1125        ids.SetCellIdsArrayName("CellID")
1126        ids.Update()
1127        self._update(ids.GetOutput(), reset_locators=False)
1128        self.pipeline = utils.OperationNode("add_ids", parents=[self])
1129        return self

Generate point and cell ids arrays.

Two new arrays are added to the mesh: PointID and CellID.

def gradient(self, input_array=None, on='points', fast=False) -> numpy.ndarray:
1131    def gradient(self, input_array=None, on="points", fast=False) -> np.ndarray:
1132        """
1133        Compute and return the gradiend of the active scalar field as a numpy array.
1134
1135        Arguments:
1136            input_array : (str)
1137                array of the scalars to compute the gradient,
1138                if None the current active array is selected
1139            on : (str)
1140                compute either on 'points' or 'cells' data
1141            fast : (bool)
1142                if True, will use a less accurate algorithm
1143                that performs fewer derivative calculations (and is therefore faster).
1144
1145        Examples:
1146            - [isolines.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/isolines.py)
1147
1148            ![](https://user-images.githubusercontent.com/32848391/72433087-f00a8780-3798-11ea-9778-991f0abeca70.png)
1149        """
1150        gra = vtki.new("GradientFilter")
1151        if on.startswith("p"):
1152            varr = self.dataset.GetPointData()
1153            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS
1154        elif on.startswith("c"):
1155            varr = self.dataset.GetCellData()
1156            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS
1157        else:
1158            vedo.logger.error(f"in gradient: unknown option {on}")
1159            raise RuntimeError
1160
1161        if input_array is None:
1162            if varr.GetScalars():
1163                input_array = varr.GetScalars().GetName()
1164            else:
1165                vedo.logger.error(f"in gradient: no scalars found for {on}")
1166                raise RuntimeError
1167
1168        gra.SetInputData(self.dataset)
1169        gra.SetInputScalars(tp, input_array)
1170        gra.SetResultArrayName("Gradient")
1171        gra.SetFasterApproximation(fast)
1172        gra.ComputeDivergenceOff()
1173        gra.ComputeVorticityOff()
1174        gra.ComputeGradientOn()
1175        gra.Update()
1176        # self._update(gra.GetOutput(), reset_locators=False)
1177        if on.startswith("p"):
1178            gvecs = utils.vtk2numpy(gra.GetOutput().GetPointData().GetArray("Gradient"))
1179        else:
1180            gvecs = utils.vtk2numpy(gra.GetOutput().GetCellData().GetArray("Gradient"))
1181        return gvecs

Compute and return the gradiend of the active scalar field as a numpy array.

Arguments:
  • input_array : (str) array of the scalars to compute the gradient, if None the current active array is selected
  • on : (str) compute either on 'points' or 'cells' data
  • fast : (bool) if True, will use a less accurate algorithm that performs fewer derivative calculations (and is therefore faster).
Examples:

def divergence(self, array_name=None, on='points', fast=False) -> numpy.ndarray:
1183    def divergence(self, array_name=None, on="points", fast=False) -> np.ndarray:
1184        """
1185        Compute and return the divergence of a vector field as a numpy array.
1186
1187        Arguments:
1188            array_name : (str)
1189                name of the array of vectors to compute the divergence,
1190                if None the current active array is selected
1191            on : (str)
1192                compute either on 'points' or 'cells' data
1193            fast : (bool)
1194                if True, will use a less accurate algorithm
1195                that performs fewer derivative calculations (and is therefore faster).
1196        """
1197        div = vtki.new("GradientFilter")
1198        if on.startswith("p"):
1199            varr = self.dataset.GetPointData()
1200            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS
1201        elif on.startswith("c"):
1202            varr = self.dataset.GetCellData()
1203            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS
1204        else:
1205            vedo.logger.error(f"in divergence(): unknown option {on}")
1206            raise RuntimeError
1207
1208        if array_name is None:
1209            if varr.GetVectors():
1210                array_name = varr.GetVectors().GetName()
1211            else:
1212                vedo.logger.error(f"in divergence(): no vectors found for {on}")
1213                raise RuntimeError
1214
1215        div.SetInputData(self.dataset)
1216        div.SetInputScalars(tp, array_name)
1217        div.ComputeDivergenceOn()
1218        div.ComputeGradientOff()
1219        div.ComputeVorticityOff()
1220        div.SetDivergenceArrayName("Divergence")
1221        div.SetFasterApproximation(fast)
1222        div.Update()
1223        # self._update(div.GetOutput(), reset_locators=False)
1224        if on.startswith("p"):
1225            dvecs = utils.vtk2numpy(div.GetOutput().GetPointData().GetArray("Divergence"))
1226        else:
1227            dvecs = utils.vtk2numpy(div.GetOutput().GetCellData().GetArray("Divergence"))
1228        return dvecs

Compute and return the divergence of a vector field as a numpy array.

Arguments:
  • array_name : (str) name of the array of vectors to compute the divergence, if None the current active array is selected
  • on : (str) compute either on 'points' or 'cells' data
  • fast : (bool) if True, will use a less accurate algorithm that performs fewer derivative calculations (and is therefore faster).
def vorticity(self, array_name=None, on='points', fast=False) -> numpy.ndarray:
1230    def vorticity(self, array_name=None, on="points", fast=False) -> np.ndarray:
1231        """
1232        Compute and return the vorticity of a vector field as a numpy array.
1233
1234        Arguments:
1235            array_name : (str)
1236                name of the array to compute the vorticity,
1237                if None the current active array is selected
1238            on : (str)
1239                compute either on 'points' or 'cells' data
1240            fast : (bool)
1241                if True, will use a less accurate algorithm
1242                that performs fewer derivative calculations (and is therefore faster).
1243        """
1244        vort = vtki.new("GradientFilter")
1245        if on.startswith("p"):
1246            varr = self.dataset.GetPointData()
1247            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS
1248        elif on.startswith("c"):
1249            varr = self.dataset.GetCellData()
1250            tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS
1251        else:
1252            vedo.logger.error(f"in vorticity(): unknown option {on}")
1253            raise RuntimeError
1254
1255        if array_name is None:
1256            if varr.GetVectors():
1257                array_name = varr.GetVectors().GetName()
1258            else:
1259                vedo.logger.error(f"in vorticity(): no vectors found for {on}")
1260                raise RuntimeError
1261
1262        vort.SetInputData(self.dataset)
1263        vort.SetInputScalars(tp, array_name)
1264        vort.ComputeDivergenceOff()
1265        vort.ComputeGradientOff()
1266        vort.ComputeVorticityOn()
1267        vort.SetVorticityArrayName("Vorticity")
1268        vort.SetFasterApproximation(fast)
1269        vort.Update()
1270        if on.startswith("p"):
1271            vvecs = utils.vtk2numpy(vort.GetOutput().GetPointData().GetArray("Vorticity"))
1272        else:
1273            vvecs = utils.vtk2numpy(vort.GetOutput().GetCellData().GetArray("Vorticity"))
1274        return vvecs

Compute and return the vorticity of a vector field as a numpy array.

Arguments:
  • array_name : (str) name of the array to compute the vorticity, if None the current active array is selected
  • on : (str) compute either on 'points' or 'cells' data
  • fast : (bool) if True, will use a less accurate algorithm that performs fewer derivative calculations (and is therefore faster).
def probe(self, source, categorical=False, snap=False, tol=0) -> Self:
1276    def probe(
1277            self,
1278            source,
1279            categorical=False,
1280            snap=False,
1281            tol=0,
1282        ) -> Self:
1283        """
1284        Takes a data set and probes its scalars at the specified points in space.
1285
1286        Note that a mask is also output with valid/invalid points which can be accessed
1287        with `mesh.pointdata['ValidPointMask']`.
1288
1289        Arguments:
1290            source : any dataset
1291                the data set to probe.
1292            categorical : bool
1293                control whether the source pointdata is to be treated as categorical.
1294            snap : bool
1295                snap to the cell with the closest point if no cell was found
1296            tol : float
1297                the tolerance to use when performing the probe.
1298
1299        Check out also:
1300            `interpolate_data_from()` and `tovolume()`
1301
1302        Examples:
1303            - [probe_points.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/probe_points.py)
1304
1305                ![](https://vedo.embl.es/images/volumetric/probePoints.png)
1306        """
1307        probe_filter = vtki.new("ProbeFilter")
1308        probe_filter.SetSourceData(source.dataset)
1309        probe_filter.SetInputData(self.dataset)
1310        probe_filter.PassCellArraysOn()
1311        probe_filter.PassFieldArraysOn()
1312        probe_filter.PassPointArraysOn()
1313        probe_filter.SetCategoricalData(categorical)
1314        probe_filter.ComputeToleranceOff()
1315        if tol:
1316            probe_filter.ComputeToleranceOn()
1317            probe_filter.SetTolerance(tol)
1318        probe_filter.SetSnapToCellWithClosestPoint(snap)
1319        probe_filter.Update()
1320        self._update(probe_filter.GetOutput(), reset_locators=False)
1321        self.pipeline = utils.OperationNode("probe", parents=[self, source])
1322        self.pointdata.rename("vtkValidPointMask", "ValidPointMask")
1323        return self

Takes a data set and probes its scalars at the specified points in space.

Note that a mask is also output with valid/invalid points which can be accessed with mesh.pointdata['ValidPointMask'].

Arguments:
  • source : any dataset the data set to probe.
  • categorical : bool control whether the source pointdata is to be treated as categorical.
  • snap : bool snap to the cell with the closest point if no cell was found
  • tol : float the tolerance to use when performing the probe.
Check out also:

interpolate_data_from() and tovolume()

Examples:
def compute_cell_size(self) -> Self:
1325    def compute_cell_size(self) -> Self:
1326        """
1327        Add to this object a cell data array
1328        containing the area, volume and edge length
1329        of the cells (when applicable to the object type).
1330
1331        Array names are: `Area`, `Volume`, `Length`.
1332        """
1333        csf = vtki.new("CellSizeFilter")
1334        csf.SetInputData(self.dataset)
1335        csf.SetComputeArea(1)
1336        csf.SetComputeVolume(1)
1337        csf.SetComputeLength(1)
1338        csf.SetComputeVertexCount(0)
1339        csf.SetAreaArrayName("Area")
1340        csf.SetVolumeArrayName("Volume")
1341        csf.SetLengthArrayName("Length")
1342        csf.Update()
1343        self._update(csf.GetOutput(), reset_locators=False)
1344        return self

Add to this object a cell data array containing the area, volume and edge length of the cells (when applicable to the object type).

Array names are: Area, Volume, Length.

def generate_random_data(self) -> Self:
1346    def generate_random_data(self) -> Self:
1347        """Fill a dataset with random attributes"""
1348        gen = vtki.new("RandomAttributeGenerator")
1349        gen.SetInputData(self.dataset)
1350        gen.GenerateAllDataOn()
1351        gen.SetDataTypeToFloat()
1352        gen.GeneratePointNormalsOff()
1353        gen.GeneratePointTensorsOn()
1354        gen.GenerateCellScalarsOn()
1355        gen.Update()
1356        self._update(gen.GetOutput(), reset_locators=False)
1357        self.pipeline = utils.OperationNode("generate_random_data", parents=[self])
1358        return self

Fill a dataset with random attributes

def integrate_data(self) -> dict:
1360    def integrate_data(self) -> dict:
1361        """
1362        Integrate point and cell data arrays while computing length,
1363        area or volume of the domain. It works for 1D, 2D or 3D cells.
1364    
1365        For volumetric datasets, this filter ignores all but 3D cells.
1366        It will not compute the volume contained in a closed surface.
1367
1368        Returns a dictionary with keys: `pointdata`, `celldata`, `metadata`,
1369        which contain the integration result for the corresponding attributes.
1370
1371        Examples:
1372            ```python
1373            from vedo import *
1374            surf = Sphere(res=100)
1375            surf.pointdata['scalars'] = np.ones(surf.npoints)
1376            data = surf.integrate_data()
1377            print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi)
1378            ```
1379
1380            ```python
1381            from vedo import *
1382
1383            xcoords1 = np.arange(0, 2.2, 0.2)
1384            xcoords2 = sqrt(np.arange(0, 4.2, 0.2))
1385
1386            ycoords = np.arange(0, 1.2, 0.2)
1387
1388            surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2)
1389            surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2)
1390
1391            surf1.pointdata['scalars'] = surf1.vertices[:,2]
1392            surf2.pointdata['scalars'] = surf2.vertices[:,2]
1393
1394            data1 = surf1.integrate_data()
1395            data2 = surf2.integrate_data()
1396
1397            print(data1['pointdata']['scalars'],
1398                "is equal to",
1399                data2['pointdata']['scalars'],
1400                "even if the grids are different!",
1401                "(= the volume under the surface)"
1402            )
1403            show(surf1, surf2, N=2, axes=1).close()
1404            ```
1405        """
1406        vinteg = vtki.new("IntegrateAttributes")
1407        vinteg.SetInputData(self.dataset)
1408        vinteg.Update()
1409        ugrid = vedo.UnstructuredGrid(vinteg.GetOutput())
1410        data = dict(
1411            pointdata=ugrid.pointdata.todict(),
1412            celldata=ugrid.celldata.todict(),
1413            metadata=ugrid.metadata.todict(),
1414        )
1415        return data

Integrate point and cell data arrays while computing length, area or volume of the domain. It works for 1D, 2D or 3D cells.

For volumetric datasets, this filter ignores all but 3D cells. It will not compute the volume contained in a closed surface.

Returns a dictionary with keys: pointdata, celldata, metadata, which contain the integration result for the corresponding attributes.

Examples:
from vedo import *
surf = Sphere(res=100)
surf.pointdata['scalars'] = np.ones(surf.npoints)
data = surf.integrate_data()
print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi)
from vedo import *

xcoords1 = np.arange(0, 2.2, 0.2)
xcoords2 = sqrt(np.arange(0, 4.2, 0.2))

ycoords = np.arange(0, 1.2, 0.2)

surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2)
surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2)

surf1.pointdata['scalars'] = surf1.vertices[:,2]
surf2.pointdata['scalars'] = surf2.vertices[:,2]

data1 = surf1.integrate_data()
data2 = surf2.integrate_data()

print(data1['pointdata']['scalars'],
    "is equal to",
    data2['pointdata']['scalars'],
    "even if the grids are different!",
    "(= the volume under the surface)"
)
show(surf1, surf2, N=2, axes=1).close()
def write(self, filename, binary=True) -> None:
1417    def write(self, filename, binary=True) -> None:
1418        """Write object to file."""
1419        out = vedo.file_io.write(self, filename, binary)
1420        out.pipeline = utils.OperationNode(
1421            "write", parents=[self], comment=filename[:15], shape="folder", c="#8a817c"
1422        )

Write object to file.

def tomesh(self, bounds=(), shrink=0) -> vedo.mesh.Mesh:
1424    def tomesh(self, bounds=(), shrink=0) -> "vedo.Mesh":
1425        """
1426        Extract boundary geometry from dataset (or convert data to polygonal type).
1427
1428        Two new arrays are added to the mesh: `OriginalCellIds` and `OriginalPointIds`
1429        to keep track of the original mesh elements.
1430
1431        Arguments:
1432            bounds : (list)
1433                specify a sub-region to extract
1434            shrink : (float)
1435                shrink the cells to a fraction of their original size
1436        """
1437        geo = vtki.new("GeometryFilter")
1438
1439        if shrink:
1440            sf = vtki.new("ShrinkFilter")
1441            sf.SetInputData(self.dataset)
1442            sf.SetShrinkFactor(shrink)
1443            sf.Update()
1444            geo.SetInputData(sf.GetOutput())
1445        else:
1446            geo.SetInputData(self.dataset)
1447
1448        geo.SetPassThroughCellIds(1)
1449        geo.SetPassThroughPointIds(1)
1450        geo.SetOriginalCellIdsName("OriginalCellIds")
1451        geo.SetOriginalPointIdsName("OriginalPointIds")
1452        geo.SetNonlinearSubdivisionLevel(1)
1453        # geo.MergingOff() # crashes on StructuredGrids
1454        if bounds:
1455            geo.SetExtent(bounds)
1456            geo.ExtentClippingOn()
1457        geo.Update()
1458        msh = vedo.mesh.Mesh(geo.GetOutput())
1459        msh.pipeline = utils.OperationNode("tomesh", parents=[self], c="#9e2a2b")
1460        return msh

Extract boundary geometry from dataset (or convert data to polygonal type).

Two new arrays are added to the mesh: OriginalCellIds and OriginalPointIds to keep track of the original mesh elements.

Arguments:
  • bounds : (list) specify a sub-region to extract
  • shrink : (float) shrink the cells to a fraction of their original size
def signed_distance( self, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> vedo.volume.Volume:
1462    def signed_distance(self, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> "vedo.Volume":
1463        """
1464        Compute the `Volume` object whose voxels contains the signed distance from
1465        the object. The calling object must have "Normals" defined.
1466
1467        Arguments:
1468            bounds : (list, actor)
1469                bounding box sizes
1470            dims : (list)
1471                dimensions (nr. of voxels) of the output volume.
1472            invert : (bool)
1473                flip the sign
1474            max_radius : (float)
1475                specify how far out to propagate distance calculation
1476
1477        Examples:
1478            - [distance2mesh.py](https://github.com/marcomusy/vedo/blob/master/examples/basic/distance2mesh.py)
1479
1480                ![](https://vedo.embl.es/images/basic/distance2mesh.png)
1481        """
1482        if bounds is None:
1483            bounds = self.bounds()
1484        if max_radius is None:
1485            max_radius = self.diagonal_size() / 2
1486        dist = vtki.new("SignedDistance")
1487        dist.SetInputData(self.dataset)
1488        dist.SetRadius(max_radius)
1489        dist.SetBounds(bounds)
1490        dist.SetDimensions(dims)
1491        dist.Update()
1492        img = dist.GetOutput()
1493        if invert:
1494            mat = vtki.new("ImageMathematics")
1495            mat.SetInput1Data(img)
1496            mat.SetOperationToMultiplyByK()
1497            mat.SetConstantK(-1)
1498            mat.Update()
1499            img = mat.GetOutput()
1500
1501        vol = vedo.Volume(img)
1502        vol.name = "SignedDistanceVolume"
1503        vol.pipeline = utils.OperationNode(
1504            "signed_distance",
1505            parents=[self],
1506            comment=f"dims={tuple(vol.dimensions())}",
1507            c="#e9c46a:#0096c7",
1508        )
1509        return vol

Compute the Volume object whose voxels contains the signed distance from the object. The calling object must have "Normals" defined.

Arguments:
  • bounds : (list, actor) bounding box sizes
  • dims : (list) dimensions (nr. of voxels) of the output volume.
  • invert : (bool) flip the sign
  • max_radius : (float) specify how far out to propagate distance calculation
Examples:
def unsigned_distance( self, dims=(25, 25, 25), bounds=(), max_radius=0, cap_value=0) -> vedo.volume.Volume:
1511    def unsigned_distance(
1512            self, dims=(25,25,25), bounds=(), max_radius=0, cap_value=0) -> "vedo.Volume":
1513        """
1514        Compute the `Volume` object whose voxels contains the unsigned distance. 
1515        """
1516        dist = vtki.new("UnsignedDistance")
1517        dist.SetInputData(self.dataset)
1518        dist.SetDimensions(dims)
1519
1520        if len(bounds) == 6:
1521            dist.SetBounds(bounds)
1522        # elif bounds == "auto":
1523        #     dist.AdjustBoundsOn()
1524        else:
1525            dist.SetBounds(self.bounds())
1526        if not max_radius:
1527            max_radius = self.diagonal_size() / 10
1528        dist.SetRadius(max_radius)
1529
1530        if self.point_locator:
1531            dist.SetLocator(self.point_locator)
1532        
1533        if cap_value is not None:
1534            dist.CappingOn()
1535            dist.SetCapValue(cap_value)
1536        dist.SetOutputScalarTypeToFloat()
1537        dist.Update()
1538        vol = vedo.Volume(dist.GetOutput())
1539        vol.name = "UnsignedDistanceVolume"
1540        vol.pipeline = utils.OperationNode(
1541            "unsigned_distance", parents=[self], c="#e9c46a:#0096c7")
1542        return vol

Compute the Volume object whose voxels contains the unsigned distance.

def smooth_data( self, niter=10, relaxation_factor=0.1, strategy=0, mask=None, mode='distance2', exclude=('Normals', 'TextureCoordinates')) -> Self:
1544    def smooth_data(self, 
1545            niter=10, relaxation_factor=0.1, strategy=0, mask=None,
1546            mode="distance2",
1547            exclude=("Normals", "TextureCoordinates"),
1548        ) -> Self:
1549        """
1550        Smooth point attribute data using distance weighted Laplacian kernel.
1551        The effect is to blur regions of high variation and emphasize low variation regions.
1552
1553        A central concept of this method is the point smoothing stencil.
1554        A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge.
1555        To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using
1556        the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1).
1557        This averaging process is repeated until the maximum number of iterations is reached.
1558
1559        The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion.
1560        The a(i+1) attributes are determined from the a(i) attributes as follows:
1561            a(i+1) = (1-R)*a(i) + R*sum(w(j)*a(j))
1562    
1563        Convergence occurs faster for larger relaxation factors.
1564        Typically a small number of iterations is required for large relaxation factors,
1565        and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be
1566        adequate (i.e., just a distance weighted average is computed).
1567
1568        Warning:
1569            Certain data attributes cannot be correctly interpolated. For example, surface normals are expected to be |n|=1;
1570            after attribute smoothing this constraint is likely to be violated.
1571            Other vectors and tensors may suffer from similar issues.
1572            In such a situation, specify `exclude=...` which will not be smoothed (and simply passed through to the output).
1573            Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance
1574            between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil).
1575            The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, the weights are simply 1/n,
1576            where n is the number of connected points in the stencil.
1577            The smoothing process reduces high frequency information in the data attributes.
1578            With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) important details may be lost,
1579            and the attributes will move towards an "average" value.
1580            While this filter will process any dataset type, if the input data is a 3D image volume, it's likely much faster to use
1581            an image-based algorithm to perform data smoothing.
1582            To determine boundary points in polygonal data, edges used by only one cell are considered boundary
1583            (and hence the associated points defining the edge). 
1584
1585        Arguments:
1586            niter : (int)
1587                number of iterations
1588            relaxation_factor : (float)
1589                relaxation factor controlling the amount of Laplacian smoothing applied
1590            strategy : (int)
1591                strategy to use for Laplacian smoothing
1592                    - 0: use all points, all point data attributes are smoothed
1593                    - 1: smooth all point attribute data except those on the boundary
1594                    - 2: only point data connected to a boundary point are smoothed
1595            mask : (str, np.ndarray)
1596                array to be used as a mask (ignore then the strategy keyword)
1597            mode : (str)
1598                smoothing mode, either "distance2", "distance" or "average"
1599                    - distance**2 weighted (i.e., 1/r**2 interpolation weights)
1600                    - distance weighted (i.e., 1/r) approach;
1601                    - simple average of all connected points in the stencil
1602            exclude : (list)
1603                list of arrays to be excluded from smoothing
1604        """
1605        try:
1606            saf = vtki.new("AttributeSmoothingFilter")
1607        except:
1608            vedo.logger.error("smooth_data() only avaialble in VTK>=9.3.0")
1609            return self
1610        saf.SetInputData(self.dataset)
1611        saf.SetRelaxationFactor(relaxation_factor)
1612        saf.SetNumberOfIterations(niter)
1613
1614        for ex in exclude:
1615            saf.AddExcludedArray(ex)
1616
1617        if mode == "distance":
1618            saf.SetWeightsTypeToDistance()
1619        elif mode == "distance2":
1620            saf.SetWeightsTypeToDistance2()
1621        elif mode == "average":
1622            saf.SetWeightsTypeToAverage()
1623        else:
1624            vedo.logger.error(f"smooth_data(): unknown mode {mode}")
1625            raise TypeError
1626
1627        saf.SetSmoothingStrategy(strategy)
1628        if mask is not None:
1629            saf.SetSmoothingStrategyToSmoothingMask()
1630            if isinstance(mask, str):
1631                mask_ = self.dataset.GetPointData().GetArray(mask)
1632                if not mask_:
1633                    vedo.logger.error(f"smooth_data(): mask array {mask} not found")
1634                    return self
1635                mask_array = vtki.vtkUnsignedCharArray()
1636                mask_array.ShallowCopy(mask_)
1637                mask_array.SetName(mask_.GetName())
1638            else:
1639                mask_array = utils.numpy2vtk(mask, dtype=np.uint8)
1640            saf.SetSmoothingMask(mask_array)
1641
1642        saf.Update()
1643
1644        self._update(saf.GetOutput())
1645        self.pipeline = utils.OperationNode(
1646            "smooth_data", comment=f"strategy {strategy}", parents=[self], c="#9e2a2b"
1647        )
1648        return self

Smooth point attribute data using distance weighted Laplacian kernel. The effect is to blur regions of high variation and emphasize low variation regions.

A central concept of this method is the point smoothing stencil. A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge. To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1). This averaging process is repeated until the maximum number of iterations is reached.

The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion. The a(i+1) attributes are determined from the a(i) attributes as follows: a(i+1) = (1-R)a(i) + Rsum(w(j)*a(j))

Convergence occurs faster for larger relaxation factors. Typically a small number of iterations is required for large relaxation factors, and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be adequate (i.e., just a distance weighted average is computed).

Warning:

Certain data attributes cannot be correctly interpolated. For example, surface normals are expected to be |n|=1; after attribute smoothing this constraint is likely to be violated. Other vectors and tensors may suffer from similar issues. In such a situation, specify exclude=... which will not be smoothed (and simply passed through to the output). Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil). The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, the weights are simply 1/n, where n is the number of connected points in the stencil. The smoothing process reduces high frequency information in the data attributes. With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) important details may be lost, and the attributes will move towards an "average" value. While this filter will process any dataset type, if the input data is a 3D image volume, it's likely much faster to use an image-based algorithm to perform data smoothing. To determine boundary points in polygonal data, edges used by only one cell are considered boundary (and hence the associated points defining the edge).

Arguments:
  • niter : (int) number of iterations
  • relaxation_factor : (float) relaxation factor controlling the amount of Laplacian smoothing applied
  • strategy : (int) strategy to use for Laplacian smoothing - 0: use all points, all point data attributes are smoothed - 1: smooth all point attribute data except those on the boundary - 2: only point data connected to a boundary point are smoothed
  • mask : (str, np.ndarray) array to be used as a mask (ignore then the strategy keyword)
  • mode : (str) smoothing mode, either "distance2", "distance" or "average" - distance2 weighted (i.e., 1/r2 interpolation weights) - distance weighted (i.e., 1/r) approach; - simple average of all connected points in the stencil
  • exclude : (list) list of arrays to be excluded from smoothing
def compute_streamlines( self, seeds: Any, integrator='rk4', direction='forward', initial_step_size=None, max_propagation=None, max_steps=10000, step_length=0, surface_constrained=False, compute_vorticity=False) -> Optional[vedo.shapes.Lines]:
1650    def compute_streamlines(
1651            self, 
1652            seeds: Any, 
1653            integrator="rk4",
1654            direction="forward",
1655            initial_step_size=None,
1656            max_propagation=None,
1657            max_steps=10000,
1658            step_length=0,
1659            surface_constrained=False,
1660            compute_vorticity=False,
1661        ) -> Union["vedo.Lines", None]:
1662        """
1663        Integrate a vector field to generate streamlines.
1664
1665        Arguments:
1666            seeds : (Mesh, Points, list)
1667                starting points of the streamlines
1668            integrator : (str)
1669                type of integration method to be used:
1670                    - "rk2" (Runge-Kutta 2)
1671                    - "rk4" (Runge-Kutta 4)
1672                    - "rk45" (Runge-Kutta 45)
1673            direction : (str)
1674                direction of integration, either "forward", "backward" or "both"
1675            initial_step_size : (float)
1676                initial step size used for line integration
1677            max_propagation : (float)
1678                maximum length of a streamline expressed in absolute units
1679            max_steps : (int)
1680                maximum number of steps for a streamline
1681            step_length : (float)
1682                maximum length of a step expressed in absolute units
1683            surface_constrained : (bool)
1684                whether to stop integrating when the streamline leaves the surface
1685            compute_vorticity : (bool)
1686                whether to compute the vorticity at each streamline point
1687        """
1688        b = self.dataset.GetBounds()
1689        size = (b[5]-b[4] + b[3]-b[2] + b[1]-b[0]) / 3
1690        if initial_step_size is None:
1691            initial_step_size = size / 1000.0
1692
1693        if max_propagation is None:
1694            max_propagation = size * 2
1695
1696        if utils.is_sequence(seeds):
1697            seeds = vedo.Points(seeds)
1698
1699        sti = vtki.new("StreamTracer")
1700        sti.SetSourceData(seeds.dataset)
1701        if isinstance(self, vedo.RectilinearGrid):
1702            sti.SetInputData(vedo.UnstructuredGrid(self.dataset).dataset)
1703        else:
1704            sti.SetInputDataObject(self.dataset)
1705
1706        sti.SetInitialIntegrationStep(initial_step_size)
1707        sti.SetComputeVorticity(compute_vorticity)
1708        sti.SetMaximumNumberOfSteps(max_steps)
1709        sti.SetMaximumPropagation(max_propagation)
1710        sti.SetSurfaceStreamlines(surface_constrained)
1711        if step_length:
1712            sti.SetMaximumIntegrationStep(step_length)
1713
1714        if "for" in direction:
1715            sti.SetIntegrationDirectionToForward()
1716        elif "back" in direction:
1717            sti.SetIntegrationDirectionToBackward()
1718        elif "both" in direction:
1719            sti.SetIntegrationDirectionToBoth()
1720        else:
1721            vedo.logger.error(f"in compute_streamlines(), unknown direction {direction}")
1722            return None
1723
1724        if integrator == "rk2":
1725            sti.SetIntegratorTypeToRungeKutta2()
1726        elif integrator == "rk4":
1727            sti.SetIntegratorTypeToRungeKutta4()
1728        elif integrator == "rk45":
1729            sti.SetIntegratorTypeToRungeKutta45()
1730        else:
1731            vedo.logger.error(f"in compute_streamlines(), unknown integrator {integrator}")
1732            return None
1733
1734        sti.Update()
1735
1736        stlines = vedo.shapes.Lines(sti.GetOutput(), lw=4)
1737        stlines.name = "StreamLines"
1738        self.pipeline = utils.OperationNode(
1739            "compute_streamlines", comment=f"{integrator}", parents=[self, seeds], c="#9e2a2b"
1740        )
1741        return stlines

Integrate a vector field to generate streamlines.

Arguments:
  • seeds : (Mesh, Points, list) starting points of the streamlines
  • integrator : (str) type of integration method to be used: - "rk2" (Runge-Kutta 2) - "rk4" (Runge-Kutta 4) - "rk45" (Runge-Kutta 45)
  • direction : (str) direction of integration, either "forward", "backward" or "both"
  • initial_step_size : (float) initial step size used for line integration
  • max_propagation : (float) maximum length of a streamline expressed in absolute units
  • max_steps : (int) maximum number of steps for a streamline
  • step_length : (float) maximum length of a step expressed in absolute units
  • surface_constrained : (bool) whether to stop integrating when the streamline leaves the surface
  • compute_vorticity : (bool) whether to compute the vorticity at each streamline point
class PointAlgorithms(CommonAlgorithms):
1744class PointAlgorithms(CommonAlgorithms):
1745    """Methods for point clouds."""
1746
1747    def apply_transform(self, LT: Any, deep_copy=True) -> Self:
1748        """
1749        Apply a linear or non-linear transformation to the mesh polygonal data.
1750
1751        Example:
1752        ```python
1753        from vedo import Cube, show, settings
1754        settings.use_parallel_projection = True
1755        c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5)
1756        T = c1.transform  # rotate by 5 degrees, place at (2,1)
1757        c2 = Cube().c('red4').wireframe().lw(10).lighting('off')
1758        c2.apply_transform(T)
1759        show(c1, c2, "The 2 cubes should overlap!", axes=1).close()
1760        ```
1761
1762        ![](https://vedo.embl.es/images/feats/apply_transform.png)
1763        """
1764        if self.dataset.GetNumberOfPoints() == 0:
1765            return self
1766
1767        if isinstance(LT, LinearTransform):
1768            LT_is_linear = True
1769            tr = LT.T
1770            if LT.is_identity():
1771                return self
1772        
1773        elif isinstance(LT, (vtki.vtkMatrix4x4, vtki.vtkLinearTransform)) or utils.is_sequence(LT):
1774            LT_is_linear = True
1775            LT = LinearTransform(LT)
1776            tr = LT.T
1777            if LT.is_identity():
1778                return self
1779
1780        elif isinstance(LT, NonLinearTransform):
1781            LT_is_linear = False
1782            tr = LT.T
1783            self.transform = LT  # reset
1784
1785        elif isinstance(LT, vtki.vtkThinPlateSplineTransform):
1786            LT_is_linear = False
1787            tr = LT
1788            self.transform = NonLinearTransform(LT)  # reset
1789
1790        else:
1791            vedo.logger.error(f"apply_transform(), unknown input type:\n{LT}")
1792            return self
1793
1794        ################
1795        if LT_is_linear:
1796            try:
1797                # self.transform might still not be linear
1798                self.transform.concatenate(LT)
1799            except AttributeError:
1800                # in that case reset it
1801                self.transform = LinearTransform()
1802
1803        ################
1804        if isinstance(self.dataset, vtki.vtkPolyData):
1805            tp = vtki.new("TransformPolyDataFilter")
1806        elif isinstance(self.dataset, vtki.vtkUnstructuredGrid):
1807            tp = vtki.new("TransformFilter")
1808            tp.TransformAllInputVectorsOn()
1809        # elif isinstance(self.dataset, vtki.vtkImageData):
1810        #     tp = vtki.new("ImageReslice")
1811        #     tp.SetInterpolationModeToCubic()
1812        #     tp.SetResliceTransform(tr)
1813        else:
1814            vedo.logger.error(f"apply_transform(), unknown input type: {[self.dataset]}")
1815            return self
1816
1817        tp.SetTransform(tr)
1818        tp.SetInputData(self.dataset)
1819        tp.Update()
1820        out = tp.GetOutput()
1821
1822        if deep_copy:
1823            self.dataset.DeepCopy(out)
1824        else:
1825            self.dataset.ShallowCopy(out)
1826
1827        # reset the locators
1828        self.point_locator = None
1829        self.cell_locator = None
1830        self.line_locator = None
1831        return self
1832
1833    def apply_transform_from_actor(self) -> LinearTransform:
1834        """
1835        Apply the current transformation of the actor to the data.
1836        Useful when manually moving an actor (eg. when pressing "a").
1837        Returns the `LinearTransform` object.
1838
1839        Note that this method is automatically called when the window is closed,
1840        or the interactor style is changed.
1841        """
1842        M = self.actor.GetMatrix()
1843        self.apply_transform(M)
1844        iden = vtki.vtkMatrix4x4()
1845        self.actor.PokeMatrix(iden)
1846        return LinearTransform(M)
1847
1848    def pos(self, x=None, y=None, z=None) -> Self:
1849        """Set/Get object position."""
1850        if x is None:  # get functionality
1851            return self.transform.position
1852
1853        if z is None and y is None:  # assume x is of the form (x,y,z)
1854            if len(x) == 3:
1855                x, y, z = x
1856            else:
1857                x, y = x
1858                z = 0
1859        elif z is None:  # assume x,y is of the form x, y
1860            z = 0
1861
1862        q = self.transform.position
1863        delta = [x, y, z] - q
1864        if delta[0] == delta[1] == delta[2] == 0:
1865            return self
1866        LT = LinearTransform().translate(delta)
1867        return self.apply_transform(LT)
1868
1869    def shift(self, dx=0, dy=0, dz=0) -> Self:
1870        """Add a vector to the current object position."""
1871        if utils.is_sequence(dx):
1872            dx, dy, dz = utils.make3d(dx)
1873        if dx == dy == dz == 0:
1874            return self
1875        LT = LinearTransform().translate([dx, dy, dz])
1876        return self.apply_transform(LT)
1877
1878    def x(self, val=None) -> Self:
1879        """Set/Get object position along x axis."""
1880        p = self.transform.position
1881        if val is None:
1882            return p[0]
1883        self.pos(val, p[1], p[2])
1884        return self
1885
1886    def y(self, val=None)-> Self:
1887        """Set/Get object position along y axis."""
1888        p = self.transform.position
1889        if val is None:
1890            return p[1]
1891        self.pos(p[0], val, p[2])
1892        return self
1893
1894    def z(self, val=None) -> Self:
1895        """Set/Get object position along z axis."""
1896        p = self.transform.position
1897        if val is None:
1898            return p[2]
1899        self.pos(p[0], p[1], val)
1900        return self
1901
1902    def rotate(self, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self:
1903        """
1904        Rotate around an arbitrary `axis` passing through `point`.
1905
1906        Example:
1907        ```python
1908        from vedo import *
1909        c1 = Cube()
1910        c2 = c1.clone().c('violet').alpha(0.5) # copy of c1
1911        v = vector(0.2,1,0)
1912        p = vector(1,0,0)  # axis passes through this point
1913        c2.rotate(90, axis=v, point=p)
1914        l = Line(-v+p, v+p).lw(3).c('red')
1915        show(c1, l, c2, axes=1).close()
1916        ```
1917
1918        ![](https://vedo.embl.es/images/feats/rotate_axis.png)
1919        """
1920        LT = LinearTransform()
1921        LT.rotate(angle, axis, point, rad)
1922        return self.apply_transform(LT)
1923
1924    def rotate_x(self, angle: float, rad=False, around=None) -> Self:
1925        """
1926        Rotate around x-axis. If angle is in radians set `rad=True`.
1927
1928        Use `around` to define a pivoting point.
1929        """
1930        if angle == 0:
1931            return self
1932        LT = LinearTransform().rotate_x(angle, rad, around)
1933        return self.apply_transform(LT)
1934
1935    def rotate_y(self, angle: float, rad=False, around=None) -> Self:
1936        """
1937        Rotate around y-axis. If angle is in radians set `rad=True`.
1938
1939        Use `around` to define a pivoting point.
1940        """
1941        if angle == 0:
1942            return self
1943        LT = LinearTransform().rotate_y(angle, rad, around)
1944        return self.apply_transform(LT)
1945
1946    def rotate_z(self, angle: float, rad=False, around=None) -> Self:
1947        """
1948        Rotate around z-axis. If angle is in radians set `rad=True`.
1949
1950        Use `around` to define a pivoting point.
1951        """
1952        if angle == 0:
1953            return self
1954        LT = LinearTransform().rotate_z(angle, rad, around)
1955        return self.apply_transform(LT)
1956
1957    def reorient(self, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self:
1958        """
1959        Reorient the object to point to a new direction from an initial one.
1960        If `initaxis` is None, the object will be assumed in its "default" orientation.
1961        If `xyplane` is True, the object will be rotated to lie on the xy plane.
1962
1963        Use `rotation` to first rotate the object around its `initaxis`.
1964        """
1965        q = self.transform.position
1966        LT = LinearTransform()
1967        LT.reorient(initaxis, newaxis, q, rotation, rad, xyplane)
1968        return self.apply_transform(LT)
1969
1970    def scale(self, s=None, reset=False, origin=True) -> Union[Self, np.array]:
1971        """
1972        Set/get object's scaling factor.
1973
1974        Arguments:
1975            s : (list, float)
1976                scaling factor(s).
1977            reset : (bool)
1978                if True previous scaling factors are ignored.
1979            origin : (bool)
1980                if True scaling is applied with respect to object's position,
1981                otherwise is applied respect to (0,0,0).
1982
1983        Note:
1984            use `s=(sx,sy,sz)` to scale differently in the three coordinates.
1985        """
1986        if s is None:
1987            return np.array(self.transform.T.GetScale())
1988
1989        if not utils.is_sequence(s):
1990            s = [s, s, s]
1991
1992        LT = LinearTransform()
1993        if reset:
1994            old_s = np.array(self.transform.T.GetScale())
1995            LT.scale(s / old_s)
1996        else:
1997            if origin is True:
1998                LT.scale(s, origin=self.transform.position)
1999            elif origin is False:
2000                LT.scale(s, origin=False)
2001            else:
2002                LT.scale(s, origin=origin)
2003
2004        return self.apply_transform(LT)

Methods for point clouds.

PointAlgorithms()
def apply_transform(self, LT: Any, deep_copy=True) -> Self:
1747    def apply_transform(self, LT: Any, deep_copy=True) -> Self:
1748        """
1749        Apply a linear or non-linear transformation to the mesh polygonal data.
1750
1751        Example:
1752        ```python
1753        from vedo import Cube, show, settings
1754        settings.use_parallel_projection = True
1755        c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5)
1756        T = c1.transform  # rotate by 5 degrees, place at (2,1)
1757        c2 = Cube().c('red4').wireframe().lw(10).lighting('off')
1758        c2.apply_transform(T)
1759        show(c1, c2, "The 2 cubes should overlap!", axes=1).close()
1760        ```
1761
1762        ![](https://vedo.embl.es/images/feats/apply_transform.png)
1763        """
1764        if self.dataset.GetNumberOfPoints() == 0:
1765            return self
1766
1767        if isinstance(LT, LinearTransform):
1768            LT_is_linear = True
1769            tr = LT.T
1770            if LT.is_identity():
1771                return self
1772        
1773        elif isinstance(LT, (vtki.vtkMatrix4x4, vtki.vtkLinearTransform)) or utils.is_sequence(LT):
1774            LT_is_linear = True
1775            LT = LinearTransform(LT)
1776            tr = LT.T
1777            if LT.is_identity():
1778                return self
1779
1780        elif isinstance(LT, NonLinearTransform):
1781            LT_is_linear = False
1782            tr = LT.T
1783            self.transform = LT  # reset
1784
1785        elif isinstance(LT, vtki.vtkThinPlateSplineTransform):
1786            LT_is_linear = False
1787            tr = LT
1788            self.transform = NonLinearTransform(LT)  # reset
1789
1790        else:
1791            vedo.logger.error(f"apply_transform(), unknown input type:\n{LT}")
1792            return self
1793
1794        ################
1795        if LT_is_linear:
1796            try:
1797                # self.transform might still not be linear
1798                self.transform.concatenate(LT)
1799            except AttributeError:
1800                # in that case reset it
1801                self.transform = LinearTransform()
1802
1803        ################
1804        if isinstance(self.dataset, vtki.vtkPolyData):
1805            tp = vtki.new("TransformPolyDataFilter")
1806        elif isinstance(self.dataset, vtki.vtkUnstructuredGrid):
1807            tp = vtki.new("TransformFilter")
1808            tp.TransformAllInputVectorsOn()
1809        # elif isinstance(self.dataset, vtki.vtkImageData):
1810        #     tp = vtki.new("ImageReslice")
1811        #     tp.SetInterpolationModeToCubic()
1812        #     tp.SetResliceTransform(tr)
1813        else:
1814            vedo.logger.error(f"apply_transform(), unknown input type: {[self.dataset]}")
1815            return self
1816
1817        tp.SetTransform(tr)
1818        tp.SetInputData(self.dataset)
1819        tp.Update()
1820        out = tp.GetOutput()
1821
1822        if deep_copy:
1823            self.dataset.DeepCopy(out)
1824        else:
1825            self.dataset.ShallowCopy(out)
1826
1827        # reset the locators
1828        self.point_locator = None
1829        self.cell_locator = None
1830        self.line_locator = None
1831        return self

Apply a linear or non-linear transformation to the mesh polygonal data.

Example:

from vedo import Cube, show, settings
settings.use_parallel_projection = True
c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5)
T = c1.transform  # rotate by 5 degrees, place at (2,1)
c2 = Cube().c('red4').wireframe().lw(10).lighting('off')
c2.apply_transform(T)
show(c1, c2, "The 2 cubes should overlap!", axes=1).close()

def apply_transform_from_actor(self) -> vedo.transformations.LinearTransform:
1833    def apply_transform_from_actor(self) -> LinearTransform:
1834        """
1835        Apply the current transformation of the actor to the data.
1836        Useful when manually moving an actor (eg. when pressing "a").
1837        Returns the `LinearTransform` object.
1838
1839        Note that this method is automatically called when the window is closed,
1840        or the interactor style is changed.
1841        """
1842        M = self.actor.GetMatrix()
1843        self.apply_transform(M)
1844        iden = vtki.vtkMatrix4x4()
1845        self.actor.PokeMatrix(iden)
1846        return LinearTransform(M)

Apply the current transformation of the actor to the data. Useful when manually moving an actor (eg. when pressing "a"). Returns the LinearTransform object.

Note that this method is automatically called when the window is closed, or the interactor style is changed.

def pos(self, x=None, y=None, z=None) -> Self:
1848    def pos(self, x=None, y=None, z=None) -> Self:
1849        """Set/Get object position."""
1850        if x is None:  # get functionality
1851            return self.transform.position
1852
1853        if z is None and y is None:  # assume x is of the form (x,y,z)
1854            if len(x) == 3:
1855                x, y, z = x
1856            else:
1857                x, y = x
1858                z = 0
1859        elif z is None:  # assume x,y is of the form x, y
1860            z = 0
1861
1862        q = self.transform.position
1863        delta = [x, y, z] - q
1864        if delta[0] == delta[1] == delta[2] == 0:
1865            return self
1866        LT = LinearTransform().translate(delta)
1867        return self.apply_transform(LT)

Set/Get object position.

def shift(self, dx=0, dy=0, dz=0) -> Self:
1869    def shift(self, dx=0, dy=0, dz=0) -> Self:
1870        """Add a vector to the current object position."""
1871        if utils.is_sequence(dx):
1872            dx, dy, dz = utils.make3d(dx)
1873        if dx == dy == dz == 0:
1874            return self
1875        LT = LinearTransform().translate([dx, dy, dz])
1876        return self.apply_transform(LT)

Add a vector to the current object position.

def x(self, val=None) -> Self:
1878    def x(self, val=None) -> Self:
1879        """Set/Get object position along x axis."""
1880        p = self.transform.position
1881        if val is None:
1882            return p[0]
1883        self.pos(val, p[1], p[2])
1884        return self

Set/Get object position along x axis.

def y(self, val=None) -> Self:
1886    def y(self, val=None)-> Self:
1887        """Set/Get object position along y axis."""
1888        p = self.transform.position
1889        if val is None:
1890            return p[1]
1891        self.pos(p[0], val, p[2])
1892        return self

Set/Get object position along y axis.

def z(self, val=None) -> Self:
1894    def z(self, val=None) -> Self:
1895        """Set/Get object position along z axis."""
1896        p = self.transform.position
1897        if val is None:
1898            return p[2]
1899        self.pos(p[0], p[1], val)
1900        return self

Set/Get object position along z axis.

def rotate(self, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self:
1902    def rotate(self, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self:
1903        """
1904        Rotate around an arbitrary `axis` passing through `point`.
1905
1906        Example:
1907        ```python
1908        from vedo import *
1909        c1 = Cube()
1910        c2 = c1.clone().c('violet').alpha(0.5) # copy of c1
1911        v = vector(0.2,1,0)
1912        p = vector(1,0,0)  # axis passes through this point
1913        c2.rotate(90, axis=v, point=p)
1914        l = Line(-v+p, v+p).lw(3).c('red')
1915        show(c1, l, c2, axes=1).close()
1916        ```
1917
1918        ![](https://vedo.embl.es/images/feats/rotate_axis.png)
1919        """
1920        LT = LinearTransform()
1921        LT.rotate(angle, axis, point, rad)
1922        return self.apply_transform(LT)

Rotate around an arbitrary axis passing through point.

Example:

from vedo import *
c1 = Cube()
c2 = c1.clone().c('violet').alpha(0.5) # copy of c1
v = vector(0.2,1,0)
p = vector(1,0,0)  # axis passes through this point
c2.rotate(90, axis=v, point=p)
l = Line(-v+p, v+p).lw(3).c('red')
show(c1, l, c2, axes=1).close()

def rotate_x(self, angle: float, rad=False, around=None) -> Self:
1924    def rotate_x(self, angle: float, rad=False, around=None) -> Self:
1925        """
1926        Rotate around x-axis. If angle is in radians set `rad=True`.
1927
1928        Use `around` to define a pivoting point.
1929        """
1930        if angle == 0:
1931            return self
1932        LT = LinearTransform().rotate_x(angle, rad, around)
1933        return self.apply_transform(LT)

Rotate around x-axis. If angle is in radians set rad=True.

Use around to define a pivoting point.

def rotate_y(self, angle: float, rad=False, around=None) -> Self:
1935    def rotate_y(self, angle: float, rad=False, around=None) -> Self:
1936        """
1937        Rotate around y-axis. If angle is in radians set `rad=True`.
1938
1939        Use `around` to define a pivoting point.
1940        """
1941        if angle == 0:
1942            return self
1943        LT = LinearTransform().rotate_y(angle, rad, around)
1944        return self.apply_transform(LT)

Rotate around y-axis. If angle is in radians set rad=True.

Use around to define a pivoting point.

def rotate_z(self, angle: float, rad=False, around=None) -> Self:
1946    def rotate_z(self, angle: float, rad=False, around=None) -> Self:
1947        """
1948        Rotate around z-axis. If angle is in radians set `rad=True`.
1949
1950        Use `around` to define a pivoting point.
1951        """
1952        if angle == 0:
1953            return self
1954        LT = LinearTransform().rotate_z(angle, rad, around)
1955        return self.apply_transform(LT)

Rotate around z-axis. If angle is in radians set rad=True.

Use around to define a pivoting point.

def reorient(self, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self:
1957    def reorient(self, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self:
1958        """
1959        Reorient the object to point to a new direction from an initial one.
1960        If `initaxis` is None, the object will be assumed in its "default" orientation.
1961        If `xyplane` is True, the object will be rotated to lie on the xy plane.
1962
1963        Use `rotation` to first rotate the object around its `initaxis`.
1964        """
1965        q = self.transform.position
1966        LT = LinearTransform()
1967        LT.reorient(initaxis, newaxis, q, rotation, rad, xyplane)
1968        return self.apply_transform(LT)

Reorient the object to point to a new direction from an initial one. If initaxis is None, the object will be assumed in its "default" orientation. If xyplane is True, the object will be rotated to lie on the xy plane.

Use rotation to first rotate the object around its initaxis.

def scale( self, s=None, reset=False, origin=True) -> Union[Self, <built-in function array>]:
1970    def scale(self, s=None, reset=False, origin=True) -> Union[Self, np.array]:
1971        """
1972        Set/get object's scaling factor.
1973
1974        Arguments:
1975            s : (list, float)
1976                scaling factor(s).
1977            reset : (bool)
1978                if True previous scaling factors are ignored.
1979            origin : (bool)
1980                if True scaling is applied with respect to object's position,
1981                otherwise is applied respect to (0,0,0).
1982
1983        Note:
1984            use `s=(sx,sy,sz)` to scale differently in the three coordinates.
1985        """
1986        if s is None:
1987            return np.array(self.transform.T.GetScale())
1988
1989        if not utils.is_sequence(s):
1990            s = [s, s, s]
1991
1992        LT = LinearTransform()
1993        if reset:
1994            old_s = np.array(self.transform.T.GetScale())
1995            LT.scale(s / old_s)
1996        else:
1997            if origin is True:
1998                LT.scale(s, origin=self.transform.position)
1999            elif origin is False:
2000                LT.scale(s, origin=False)
2001            else:
2002                LT.scale(s, origin=origin)
2003
2004        return self.apply_transform(LT)

Set/get object's scaling factor.

Arguments:
  • s : (list, float) scaling factor(s).
  • reset : (bool) if True previous scaling factors are ignored.
  • origin : (bool) if True scaling is applied with respect to object's position, otherwise is applied respect to (0,0,0).
Note:

use s=(sx,sy,sz) to scale differently in the three coordinates.

class VolumeAlgorithms(CommonAlgorithms):
2008class VolumeAlgorithms(CommonAlgorithms):
2009    """Methods for Volume objects."""
2010
2011    def bounds(self) -> np.ndarray:
2012        """
2013        Get the object bounds.
2014        Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`.
2015        """
2016        # OVERRIDE CommonAlgorithms.bounds() which is too slow
2017        return np.array(self.dataset.GetBounds())
2018
2019    def isosurface(self, value=None, flying_edges=False) -> "vedo.mesh.Mesh":
2020        """
2021        Return an `Mesh` isosurface extracted from the `Volume` object.
2022
2023        Set `value` as single float or list of values to draw the isosurface(s).
2024        Use flying_edges for faster results (but sometimes can interfere with `smooth()`).
2025
2026        The isosurface values can be accessed with `mesh.metadata["isovalue"]`.
2027
2028        Examples:
2029            - [isosurfaces1.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces1.py)
2030
2031                ![](https://vedo.embl.es/images/volumetric/isosurfaces.png)
2032        """
2033        scrange = self.dataset.GetScalarRange()
2034
2035        if flying_edges:
2036            cf = vtki.new("FlyingEdges3D")
2037            cf.InterpolateAttributesOn()
2038        else:
2039            cf = vtki.new("ContourFilter")
2040            cf.UseScalarTreeOn()
2041
2042        cf.SetInputData(self.dataset)
2043        cf.ComputeNormalsOn()
2044
2045        if utils.is_sequence(value):
2046            cf.SetNumberOfContours(len(value))
2047            for i, t in enumerate(value):
2048                cf.SetValue(i, t)
2049        else:
2050            if value is None:
2051                value = (2 * scrange[0] + scrange[1]) / 3.0
2052                # print("automatic isosurface value =", value)
2053            cf.SetValue(0, value)
2054
2055        cf.Update()
2056        poly = cf.GetOutput()
2057
2058        out = vedo.mesh.Mesh(poly, c=None).phong()
2059        out.mapper.SetScalarRange(scrange[0], scrange[1])
2060        out.metadata["isovalue"] = value
2061
2062        out.pipeline = utils.OperationNode(
2063            "isosurface",
2064            parents=[self],
2065            comment=f"#pts {out.dataset.GetNumberOfPoints()}",
2066            c="#4cc9f0:#e9c46a",
2067        )
2068        return out
2069    
2070    def isosurface_discrete(
2071            self, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0,
2072        ) -> "vedo.mesh.Mesh":
2073        """
2074        Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded,
2075        3D version of the multiple objects/labels Surface Nets algorithm.
2076        The input is a 3D image (i.e., volume) where each voxel is labeled
2077        (integer labels are preferred to real values), and the output data is a polygonal mesh separating
2078        labeled regions / objects.
2079        (Note that on output each region [corresponding to a different segmented object] will share
2080        points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them).
2081        
2082        Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating
2083        the labels on either side of the polygons composing the output Mesh. 
2084        (This can be used for advanced operations like extracting shared/contacting boundaries between two objects.
2085        The name of this celldata array is "BoundaryLabels").
2086
2087        The values can be accessed with `mesh.metadata["isovalue"]`.
2088
2089        Arguments:
2090            value : (float, list)
2091                single value or list of values to draw the isosurface(s).
2092            background_label : (float)
2093                this value specifies the label value to use when referencing the background
2094                region outside of any of the specified regions.
2095            boundaries : (bool, list)
2096                if True, the output will only contain the boundary surface. Internal surfaces will be removed.
2097                If a list of integers is provided, only the boundaries between the specified labels will be extracted.
2098            use_quads : (bool)
2099                if True, the output polygons will be quads. If False, the output polygons will be triangles.
2100            nsmooth : (int)
2101                number of iterations of smoothing (0 means no smoothing).
2102
2103        Examples:
2104            - [isosurfaces2.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces2.py)
2105        """
2106        logger = vtki.get_class("Logger")
2107        logger.SetStderrVerbosity(logger.VERBOSITY_ERROR)
2108
2109        snets = vtki.new("SurfaceNets3D")
2110        snets.SetInputData(self.dataset)
2111
2112        if nsmooth:
2113            snets.SmoothingOn()
2114            snets.AutomaticSmoothingConstraintsOn()
2115            snets.GetSmoother().SetNumberOfIterations(nsmooth)
2116            # snets.GetSmoother().SetRelaxationFactor(relaxation_factor)
2117            # snets.GetSmoother().SetConstraintDistance(constraint_distance)
2118        else:
2119            snets.SmoothingOff()
2120
2121        if internal_boundaries is False:
2122            snets.SetOutputStyleToBoundary()
2123        elif internal_boundaries is True:
2124            snets.SetOutputStyleToDefault()
2125        elif utils.is_sequence(internal_boundaries):
2126            snets.SetOutputStyleToSelected()
2127            snets.InitializeSelectedLabelsList()
2128            for val in internal_boundaries:
2129                snets.AddSelectedLabel(val)
2130        else:
2131            vedo.logger.error("isosurface_discrete(): unknown boundaries option")
2132
2133        n = len(values)
2134        snets.SetNumberOfContours(n)
2135        snets.SetNumberOfLabels(n)
2136
2137        if background_label is not None:
2138            snets.SetBackgroundLabel(background_label)
2139
2140        for i, val in enumerate(values):
2141            snets.SetValue(i, val)
2142        
2143        if use_quads:
2144            snets.SetOutputMeshTypeToQuads()
2145        else:
2146            snets.SetOutputMeshTypeToTriangles()
2147        snets.Update()
2148
2149        out = vedo.mesh.Mesh(snets.GetOutput())
2150        out.metadata["isovalue"] = values
2151        out.pipeline = utils.OperationNode(
2152            "isosurface_discrete",
2153            parents=[self],
2154            comment=f"#pts {out.dataset.GetNumberOfPoints()}",
2155            c="#4cc9f0:#e9c46a",
2156        )
2157
2158        logger.SetStderrVerbosity(logger.VERBOSITY_INFO)
2159        return out
2160
2161
2162    def legosurface(
2163        self,
2164        vmin=None,
2165        vmax=None,
2166        invert=False,
2167        boundary=True,
2168        array_name="input_scalars",
2169    ) -> "vedo.mesh.Mesh":
2170        """
2171        Represent an object - typically a `Volume` - as lego blocks (voxels).
2172        By default colors correspond to the volume's scalar.
2173        Returns an `Mesh` object.
2174
2175        Arguments:
2176            vmin : (float)
2177                the lower threshold, voxels below this value are not shown.
2178            vmax : (float)
2179                the upper threshold, voxels above this value are not shown.
2180            boundary : (bool)
2181                controls whether to include cells that are partially inside
2182            array_name : (int, str)
2183                name or index of the scalar array to be considered
2184
2185        Examples:
2186            - [legosurface.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/legosurface.py)
2187
2188                ![](https://vedo.embl.es/images/volumetric/56820682-da40e500-684c-11e9-8ea3-91cbcba24b3a.png)
2189        """
2190        imp_dataset = vtki.new("ImplicitDataSet")
2191        imp_dataset.SetDataSet(self.dataset)
2192        window = vtki.new("ImplicitWindowFunction")
2193        window.SetImplicitFunction(imp_dataset)
2194
2195        srng = list(self.dataset.GetScalarRange())
2196        if vmin is not None:
2197            srng[0] = vmin
2198        if vmax is not None:
2199            srng[1] = vmax
2200        if not boundary:
2201            tol = 0.00001 * (srng[1] - srng[0])
2202            srng[0] -= tol
2203            srng[1] += tol
2204        window.SetWindowRange(srng)
2205        # print("legosurface window range:", srng)
2206
2207        extract = vtki.new("ExtractGeometry")
2208        extract.SetInputData(self.dataset)
2209        extract.SetImplicitFunction(window)
2210        extract.SetExtractInside(invert)
2211        extract.SetExtractBoundaryCells(boundary)
2212        extract.Update()
2213
2214        gf = vtki.new("GeometryFilter")
2215        gf.SetInputData(extract.GetOutput())
2216        gf.Update()
2217
2218        m = vedo.mesh.Mesh(gf.GetOutput()).lw(0.1).flat()
2219        m.map_points_to_cells()
2220        m.celldata.select(array_name)
2221
2222        m.pipeline = utils.OperationNode(
2223            "legosurface",
2224            parents=[self],
2225            comment=f"array: {array_name}",
2226            c="#4cc9f0:#e9c46a",
2227        )
2228        return m
2229
2230    def tomesh(self, fill=True, shrink=1.0) -> "vedo.mesh.Mesh":
2231        """
2232        Build a polygonal Mesh from the current object.
2233
2234        If `fill=True`, the interior faces of all the cells are created.
2235        (setting a `shrink` value slightly smaller than the default 1.0
2236        can avoid flickering due to internal adjacent faces).
2237
2238        If `fill=False`, only the boundary faces will be generated.
2239        """
2240        gf = vtki.new("GeometryFilter")
2241        if fill:
2242            sf = vtki.new("ShrinkFilter")
2243            sf.SetInputData(self.dataset)
2244            sf.SetShrinkFactor(shrink)
2245            sf.Update()
2246            gf.SetInputData(sf.GetOutput())
2247            gf.Update()
2248            poly = gf.GetOutput()
2249            if shrink == 1.0:
2250                clean_poly = vtki.new("CleanPolyData")
2251                clean_poly.PointMergingOn()
2252                clean_poly.ConvertLinesToPointsOn()
2253                clean_poly.ConvertPolysToLinesOn()
2254                clean_poly.ConvertStripsToPolysOn()
2255                clean_poly.SetInputData(poly)
2256                clean_poly.Update()
2257                poly = clean_poly.GetOutput()
2258        else:
2259            gf.SetInputData(self.dataset)
2260            gf.Update()
2261            poly = gf.GetOutput()
2262
2263        msh = vedo.mesh.Mesh(poly).flat()
2264        msh.scalarbar = self.scalarbar
2265        lut = utils.ctf2lut(self)
2266        if lut:
2267            msh.mapper.SetLookupTable(lut)
2268
2269        msh.pipeline = utils.OperationNode(
2270            "tomesh", parents=[self], comment=f"fill={fill}", c="#9e2a2b:#e9c46a"
2271        )
2272        return msh

Methods for Volume objects.

VolumeAlgorithms()
def bounds(self) -> numpy.ndarray:
2011    def bounds(self) -> np.ndarray:
2012        """
2013        Get the object bounds.
2014        Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`.
2015        """
2016        # OVERRIDE CommonAlgorithms.bounds() which is too slow
2017        return np.array(self.dataset.GetBounds())

Get the object bounds. Returns a list in format [xmin,xmax, ymin,ymax, zmin,zmax].

def isosurface(self, value=None, flying_edges=False) -> vedo.mesh.Mesh:
2019    def isosurface(self, value=None, flying_edges=False) -> "vedo.mesh.Mesh":
2020        """
2021        Return an `Mesh` isosurface extracted from the `Volume` object.
2022
2023        Set `value` as single float or list of values to draw the isosurface(s).
2024        Use flying_edges for faster results (but sometimes can interfere with `smooth()`).
2025
2026        The isosurface values can be accessed with `mesh.metadata["isovalue"]`.
2027
2028        Examples:
2029            - [isosurfaces1.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces1.py)
2030
2031                ![](https://vedo.embl.es/images/volumetric/isosurfaces.png)
2032        """
2033        scrange = self.dataset.GetScalarRange()
2034
2035        if flying_edges:
2036            cf = vtki.new("FlyingEdges3D")
2037            cf.InterpolateAttributesOn()
2038        else:
2039            cf = vtki.new("ContourFilter")
2040            cf.UseScalarTreeOn()
2041
2042        cf.SetInputData(self.dataset)
2043        cf.ComputeNormalsOn()
2044
2045        if utils.is_sequence(value):
2046            cf.SetNumberOfContours(len(value))
2047            for i, t in enumerate(value):
2048                cf.SetValue(i, t)
2049        else:
2050            if value is None:
2051                value = (2 * scrange[0] + scrange[1]) / 3.0
2052                # print("automatic isosurface value =", value)
2053            cf.SetValue(0, value)
2054
2055        cf.Update()
2056        poly = cf.GetOutput()
2057
2058        out = vedo.mesh.Mesh(poly, c=None).phong()
2059        out.mapper.SetScalarRange(scrange[0], scrange[1])
2060        out.metadata["isovalue"] = value
2061
2062        out.pipeline = utils.OperationNode(
2063            "isosurface",
2064            parents=[self],
2065            comment=f"#pts {out.dataset.GetNumberOfPoints()}",
2066            c="#4cc9f0:#e9c46a",
2067        )
2068        return out

Return an Mesh isosurface extracted from the Volume object.

Set value as single float or list of values to draw the isosurface(s). Use flying_edges for faster results (but sometimes can interfere with smooth()).

The isosurface values can be accessed with mesh.metadata["isovalue"].

Examples:
def isosurface_discrete( self, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0) -> vedo.mesh.Mesh:
2070    def isosurface_discrete(
2071            self, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0,
2072        ) -> "vedo.mesh.Mesh":
2073        """
2074        Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded,
2075        3D version of the multiple objects/labels Surface Nets algorithm.
2076        The input is a 3D image (i.e., volume) where each voxel is labeled
2077        (integer labels are preferred to real values), and the output data is a polygonal mesh separating
2078        labeled regions / objects.
2079        (Note that on output each region [corresponding to a different segmented object] will share
2080        points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them).
2081        
2082        Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating
2083        the labels on either side of the polygons composing the output Mesh. 
2084        (This can be used for advanced operations like extracting shared/contacting boundaries between two objects.
2085        The name of this celldata array is "BoundaryLabels").
2086
2087        The values can be accessed with `mesh.metadata["isovalue"]`.
2088
2089        Arguments:
2090            value : (float, list)
2091                single value or list of values to draw the isosurface(s).
2092            background_label : (float)
2093                this value specifies the label value to use when referencing the background
2094                region outside of any of the specified regions.
2095            boundaries : (bool, list)
2096                if True, the output will only contain the boundary surface. Internal surfaces will be removed.
2097                If a list of integers is provided, only the boundaries between the specified labels will be extracted.
2098            use_quads : (bool)
2099                if True, the output polygons will be quads. If False, the output polygons will be triangles.
2100            nsmooth : (int)
2101                number of iterations of smoothing (0 means no smoothing).
2102
2103        Examples:
2104            - [isosurfaces2.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces2.py)
2105        """
2106        logger = vtki.get_class("Logger")
2107        logger.SetStderrVerbosity(logger.VERBOSITY_ERROR)
2108
2109        snets = vtki.new("SurfaceNets3D")
2110        snets.SetInputData(self.dataset)
2111
2112        if nsmooth:
2113            snets.SmoothingOn()
2114            snets.AutomaticSmoothingConstraintsOn()
2115            snets.GetSmoother().SetNumberOfIterations(nsmooth)
2116            # snets.GetSmoother().SetRelaxationFactor(relaxation_factor)
2117            # snets.GetSmoother().SetConstraintDistance(constraint_distance)
2118        else:
2119            snets.SmoothingOff()
2120
2121        if internal_boundaries is False:
2122            snets.SetOutputStyleToBoundary()
2123        elif internal_boundaries is True:
2124            snets.SetOutputStyleToDefault()
2125        elif utils.is_sequence(internal_boundaries):
2126            snets.SetOutputStyleToSelected()
2127            snets.InitializeSelectedLabelsList()
2128            for val in internal_boundaries:
2129                snets.AddSelectedLabel(val)
2130        else:
2131            vedo.logger.error("isosurface_discrete(): unknown boundaries option")
2132
2133        n = len(values)
2134        snets.SetNumberOfContours(n)
2135        snets.SetNumberOfLabels(n)
2136
2137        if background_label is not None:
2138            snets.SetBackgroundLabel(background_label)
2139
2140        for i, val in enumerate(values):
2141            snets.SetValue(i, val)
2142        
2143        if use_quads:
2144            snets.SetOutputMeshTypeToQuads()
2145        else:
2146            snets.SetOutputMeshTypeToTriangles()
2147        snets.Update()
2148
2149        out = vedo.mesh.Mesh(snets.GetOutput())
2150        out.metadata["isovalue"] = values
2151        out.pipeline = utils.OperationNode(
2152            "isosurface_discrete",
2153            parents=[self],
2154            comment=f"#pts {out.dataset.GetNumberOfPoints()}",
2155            c="#4cc9f0:#e9c46a",
2156        )
2157
2158        logger.SetStderrVerbosity(logger.VERBOSITY_INFO)
2159        return out

Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded, 3D version of the multiple objects/labels Surface Nets algorithm. The input is a 3D image (i.e., volume) where each voxel is labeled (integer labels are preferred to real values), and the output data is a polygonal mesh separating labeled regions / objects. (Note that on output each region [corresponding to a different segmented object] will share points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them).

Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating the labels on either side of the polygons composing the output Mesh. (This can be used for advanced operations like extracting shared/contacting boundaries between two objects. The name of this celldata array is "BoundaryLabels").

The values can be accessed with mesh.metadata["isovalue"].

Arguments:
  • value : (float, list) single value or list of values to draw the isosurface(s).
  • background_label : (float) this value specifies the label value to use when referencing the background region outside of any of the specified regions.
  • boundaries : (bool, list) if True, the output will only contain the boundary surface. Internal surfaces will be removed. If a list of integers is provided, only the boundaries between the specified labels will be extracted.
  • use_quads : (bool) if True, the output polygons will be quads. If False, the output polygons will be triangles.
  • nsmooth : (int) number of iterations of smoothing (0 means no smoothing).
Examples:
def legosurface( self, vmin=None, vmax=None, invert=False, boundary=True, array_name='input_scalars') -> vedo.mesh.Mesh:
2162    def legosurface(
2163        self,
2164        vmin=None,
2165        vmax=None,
2166        invert=False,
2167        boundary=True,
2168        array_name="input_scalars",
2169    ) -> "vedo.mesh.Mesh":
2170        """
2171        Represent an object - typically a `Volume` - as lego blocks (voxels).
2172        By default colors correspond to the volume's scalar.
2173        Returns an `Mesh` object.
2174
2175        Arguments:
2176            vmin : (float)
2177                the lower threshold, voxels below this value are not shown.
2178            vmax : (float)
2179                the upper threshold, voxels above this value are not shown.
2180            boundary : (bool)
2181                controls whether to include cells that are partially inside
2182            array_name : (int, str)
2183                name or index of the scalar array to be considered
2184
2185        Examples:
2186            - [legosurface.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/legosurface.py)
2187
2188                ![](https://vedo.embl.es/images/volumetric/56820682-da40e500-684c-11e9-8ea3-91cbcba24b3a.png)
2189        """
2190        imp_dataset = vtki.new("ImplicitDataSet")
2191        imp_dataset.SetDataSet(self.dataset)
2192        window = vtki.new("ImplicitWindowFunction")
2193        window.SetImplicitFunction(imp_dataset)
2194
2195        srng = list(self.dataset.GetScalarRange())
2196        if vmin is not None:
2197            srng[0] = vmin
2198        if vmax is not None:
2199            srng[1] = vmax
2200        if not boundary:
2201            tol = 0.00001 * (srng[1] - srng[0])
2202            srng[0] -= tol
2203            srng[1] += tol
2204        window.SetWindowRange(srng)
2205        # print("legosurface window range:", srng)
2206
2207        extract = vtki.new("ExtractGeometry")
2208        extract.SetInputData(self.dataset)
2209        extract.SetImplicitFunction(window)
2210        extract.SetExtractInside(invert)
2211        extract.SetExtractBoundaryCells(boundary)
2212        extract.Update()
2213
2214        gf = vtki.new("GeometryFilter")
2215        gf.SetInputData(extract.GetOutput())
2216        gf.Update()
2217
2218        m = vedo.mesh.Mesh(gf.GetOutput()).lw(0.1).flat()
2219        m.map_points_to_cells()
2220        m.celldata.select(array_name)
2221
2222        m.pipeline = utils.OperationNode(
2223            "legosurface",
2224            parents=[self],
2225            comment=f"array: {array_name}",
2226            c="#4cc9f0:#e9c46a",
2227        )
2228        return m

Represent an object - typically a Volume - as lego blocks (voxels). By default colors correspond to the volume's scalar. Returns an Mesh object.

Arguments:
  • vmin : (float) the lower threshold, voxels below this value are not shown.
  • vmax : (float) the upper threshold, voxels above this value are not shown.
  • boundary : (bool) controls whether to include cells that are partially inside
  • array_name : (int, str) name or index of the scalar array to be considered
Examples:
def tomesh(self, fill=True, shrink=1.0) -> vedo.mesh.Mesh:
2230    def tomesh(self, fill=True, shrink=1.0) -> "vedo.mesh.Mesh":
2231        """
2232        Build a polygonal Mesh from the current object.
2233
2234        If `fill=True`, the interior faces of all the cells are created.
2235        (setting a `shrink` value slightly smaller than the default 1.0
2236        can avoid flickering due to internal adjacent faces).
2237
2238        If `fill=False`, only the boundary faces will be generated.
2239        """
2240        gf = vtki.new("GeometryFilter")
2241        if fill:
2242            sf = vtki.new("ShrinkFilter")
2243            sf.SetInputData(self.dataset)
2244            sf.SetShrinkFactor(shrink)
2245            sf.Update()
2246            gf.SetInputData(sf.GetOutput())
2247            gf.Update()
2248            poly = gf.GetOutput()
2249            if shrink == 1.0:
2250                clean_poly = vtki.new("CleanPolyData")
2251                clean_poly.PointMergingOn()
2252                clean_poly.ConvertLinesToPointsOn()
2253                clean_poly.ConvertPolysToLinesOn()
2254                clean_poly.ConvertStripsToPolysOn()
2255                clean_poly.SetInputData(poly)
2256                clean_poly.Update()
2257                poly = clean_poly.GetOutput()
2258        else:
2259            gf.SetInputData(self.dataset)
2260            gf.Update()
2261            poly = gf.GetOutput()
2262
2263        msh = vedo.mesh.Mesh(poly).flat()
2264        msh.scalarbar = self.scalarbar
2265        lut = utils.ctf2lut(self)
2266        if lut:
2267            msh.mapper.SetLookupTable(lut)
2268
2269        msh.pipeline = utils.OperationNode(
2270            "tomesh", parents=[self], comment=f"fill={fill}", c="#9e2a2b:#e9c46a"
2271        )
2272        return msh

Build a polygonal Mesh from the current object.

If fill=True, the interior faces of all the cells are created. (setting a shrink value slightly smaller than the default 1.0 can avoid flickering due to internal adjacent faces).

If fill=False, only the boundary faces will be generated.