vedo.core
Base classes providing functionality to different vedo objects.
1#!/usr/bin/env python3 2# -*- coding: utf-8 -*- 3from typing import List, Union, Any 4from typing_extensions import Self 5 6import numpy as np 7 8import vedo.vtkclasses as vtki 9 10import vedo 11from vedo import colors 12from vedo import utils 13from vedo.transformations import LinearTransform, NonLinearTransform 14 15 16__docformat__ = "google" 17 18__doc__ = """ 19Base classes providing functionality to different vedo objects. 20 21 22""" 23 24__all__ = [ 25 "DataArrayHelper", 26 "CommonAlgorithms", 27 "PointAlgorithms", 28 "VolumeAlgorithms", 29] 30 31 32############################################################################### 33# warnings = dict( 34# func_name=( 35# "WARNING: some message" 36# " (silence this with vedo.core.warnings['func_name']=False)" 37# ), 38# ) 39# ### USAGE 40# def func_name(self): 41# """Obsolete, use ... instead.""" 42# if warnings["func_name"]: 43# colors.printc(warnings["func_name"], c="y") 44# warnings["func_name"] = "" 45# return 46 47 48############################################################################### 49class DataArrayHelper: 50 """ 51 Helper class to manage data associated to either points (or vertices) and cells (or faces). 52 53 Internal use only. 54 """ 55 def __init__(self, obj, association): 56 57 self.obj = obj 58 self.association = association 59 60 def __getitem__(self, key): 61 62 if self.association == 0: 63 data = self.obj.dataset.GetPointData() 64 65 elif self.association == 1: 66 data = self.obj.dataset.GetCellData() 67 68 elif self.association == 2: 69 data = self.obj.dataset.GetFieldData() 70 71 varr = data.GetAbstractArray(key) 72 if isinstance(varr, vtki.vtkStringArray): 73 if isinstance(key, int): 74 key = data.GetArrayName(key) 75 n = varr.GetNumberOfValues() 76 narr = [varr.GetValue(i) for i in range(n)] 77 return narr 78 ########### 79 80 else: 81 raise RuntimeError() 82 83 if isinstance(key, int): 84 key = data.GetArrayName(key) 85 86 arr = data.GetArray(key) 87 if not arr: 88 return None 89 return utils.vtk2numpy(arr) 90 91 def __setitem__(self, key, input_array): 92 93 if self.association == 0: 94 data = self.obj.dataset.GetPointData() 95 n = self.obj.dataset.GetNumberOfPoints() 96 self.obj.mapper.SetScalarModeToUsePointData() 97 98 elif self.association == 1: 99 data = self.obj.dataset.GetCellData() 100 n = self.obj.dataset.GetNumberOfCells() 101 self.obj.mapper.SetScalarModeToUseCellData() 102 103 elif self.association == 2: 104 data = self.obj.dataset.GetFieldData() 105 if not utils.is_sequence(input_array): 106 input_array = [input_array] 107 108 if isinstance(input_array[0], str): 109 varr = vtki.vtkStringArray() 110 varr.SetName(key) 111 varr.SetNumberOfComponents(1) 112 varr.SetNumberOfTuples(len(input_array)) 113 for i, iarr in enumerate(input_array): 114 if isinstance(iarr, np.ndarray): 115 iarr = iarr.tolist() # better format 116 # Note: a string k can be converted to numpy with 117 # import json; k = np.array(json.loads(k)) 118 varr.InsertValue(i, str(iarr)) 119 else: 120 try: 121 varr = utils.numpy2vtk(input_array, name=key) 122 except TypeError as e: 123 vedo.logger.error( 124 f"cannot create metadata with input object:\n" 125 f"{input_array}" 126 f"\n\nAllowed content examples are:\n" 127 f"- flat list of strings ['a','b', 1, [1,2,3], ...]" 128 f" (first item must be a string in this case)\n" 129 f" hint: use k = np.array(json.loads(k)) to convert strings\n" 130 f"- numpy arrays of any shape" 131 ) 132 raise e 133 134 data.AddArray(varr) 135 return ############ 136 137 else: 138 raise RuntimeError() 139 140 if len(input_array) != n: 141 vedo.logger.error( 142 f"Error in point/cell data: length of input {len(input_array)}" 143 f" != {n} nr. of elements" 144 ) 145 raise RuntimeError() 146 147 input_array = np.asarray(input_array) 148 varr = utils.numpy2vtk(input_array, name=key) 149 data.AddArray(varr) 150 151 if len(input_array.shape) == 1: # scalars 152 data.SetActiveScalars(key) 153 try: # could be a volume mapper 154 self.obj.mapper.SetScalarRange(data.GetScalars().GetRange()) 155 except AttributeError: 156 pass 157 elif len(input_array.shape) == 2 and input_array.shape[1] == 3: # vectors 158 if key.lower() == "normals": 159 data.SetActiveNormals(key) 160 else: 161 data.SetActiveVectors(key) 162 163 def keys(self) -> List[str]: 164 """Return the list of available data array names""" 165 if self.association == 0: 166 data = self.obj.dataset.GetPointData() 167 elif self.association == 1: 168 data = self.obj.dataset.GetCellData() 169 elif self.association == 2: 170 data = self.obj.dataset.GetFieldData() 171 arrnames = [] 172 for i in range(data.GetNumberOfArrays()): 173 name = "" 174 if self.association == 2: 175 name = data.GetAbstractArray(i).GetName() 176 else: 177 iarr = data.GetArray(i) 178 if iarr: 179 name = iarr.GetName() 180 if name: 181 arrnames.append(name) 182 return arrnames 183 184 def items(self) -> List: 185 """Return the list of available data array `(names, values)`.""" 186 if self.association == 0: 187 data = self.obj.dataset.GetPointData() 188 elif self.association == 1: 189 data = self.obj.dataset.GetCellData() 190 elif self.association == 2: 191 data = self.obj.dataset.GetFieldData() 192 arrnames = [] 193 for i in range(data.GetNumberOfArrays()): 194 if self.association == 2: 195 name = data.GetAbstractArray(i).GetName() 196 else: 197 name = data.GetArray(i).GetName() 198 if name: 199 arrnames.append((name, self[name])) 200 return arrnames 201 202 def todict(self) -> dict: 203 """Return a dictionary of the available data arrays.""" 204 return dict(self.items()) 205 206 def rename(self, oldname: str, newname: str) -> None: 207 """Rename an array""" 208 if self.association == 0: 209 varr = self.obj.dataset.GetPointData().GetArray(oldname) 210 elif self.association == 1: 211 varr = self.obj.dataset.GetCellData().GetArray(oldname) 212 elif self.association == 2: 213 varr = self.obj.dataset.GetFieldData().GetAbstractArray(oldname) 214 if varr: 215 varr.SetName(newname) 216 else: 217 vedo.logger.warning( 218 f"Cannot rename non existing array {oldname} to {newname}" 219 ) 220 221 def remove(self, key: Union[int, str]) -> None: 222 """Remove a data array by name or number""" 223 if self.association == 0: 224 self.obj.dataset.GetPointData().RemoveArray(key) 225 elif self.association == 1: 226 self.obj.dataset.GetCellData().RemoveArray(key) 227 elif self.association == 2: 228 self.obj.dataset.GetFieldData().RemoveArray(key) 229 230 def clear(self) -> None: 231 """Remove all data associated to this object""" 232 if self.association == 0: 233 data = self.obj.dataset.GetPointData() 234 elif self.association == 1: 235 data = self.obj.dataset.GetCellData() 236 elif self.association == 2: 237 data = self.obj.dataset.GetFieldData() 238 for i in range(data.GetNumberOfArrays()): 239 if self.association == 2: 240 if data.GetAbstractArray(i): 241 name = data.GetAbstractArray(i).GetName() 242 data.RemoveArray(name) 243 else: 244 if data.GetArray(i): 245 name = data.GetArray(i).GetName() 246 data.RemoveArray(name) 247 248 def select(self, key: Union[int, str]) -> Any: 249 """Select one specific array by its name to make it the `active` one.""" 250 # Default (ColorModeToDefault): unsigned char scalars are treated as colors, 251 # and NOT mapped through the lookup table, while everything else is. 252 # ColorModeToDirectScalar extends ColorModeToDefault such that all integer 253 # types are treated as colors with values in the range 0-255 254 # and floating types are treated as colors with values in the range 0.0-1.0. 255 # Setting ColorModeToMapScalars means that all scalar data will be mapped 256 # through the lookup table. 257 # (Note that for multi-component scalars, the particular component 258 # to use for mapping can be specified using the SelectColorArray() method.) 259 if self.association == 0: 260 data = self.obj.dataset.GetPointData() 261 self.obj.mapper.SetScalarModeToUsePointData() 262 else: 263 data = self.obj.dataset.GetCellData() 264 self.obj.mapper.SetScalarModeToUseCellData() 265 266 if isinstance(key, int): 267 key = data.GetArrayName(key) 268 269 arr = data.GetArray(key) 270 if not arr: 271 return self.obj 272 273 nc = arr.GetNumberOfComponents() 274 # print("GetNumberOfComponents", nc) 275 if nc == 1: 276 data.SetActiveScalars(key) 277 elif nc == 2: 278 data.SetTCoords(arr) 279 elif nc in (3, 4): 280 if "rgb" in key.lower(): # type: ignore 281 data.SetActiveScalars(key) 282 try: 283 # could be a volume mapper 284 self.obj.mapper.SetColorModeToDirectScalars() 285 data.SetActiveVectors(None) # need this to fix bug in #1066 286 # print("SetColorModeToDirectScalars for", key) 287 except AttributeError: 288 pass 289 else: 290 data.SetActiveVectors(key) 291 elif nc == 9: 292 data.SetActiveTensors(key) 293 else: 294 vedo.logger.error(f"Cannot select array {key} with {nc} components") 295 return self.obj 296 297 try: 298 # could be a volume mapper 299 self.obj.mapper.SetArrayName(key) 300 self.obj.mapper.ScalarVisibilityOn() 301 except AttributeError: 302 pass 303 304 return self.obj 305 306 def select_texture_coords(self, key: Union[int,str]) -> Any: 307 """Select one specific array to be used as texture coordinates.""" 308 if self.association == 0: 309 data = self.obj.dataset.GetPointData() 310 if isinstance(key, int): 311 key = data.GetArrayName(key) 312 data.SetTCoords(data.GetArray(key)) 313 else: 314 vedo.logger.warning("texture coordinates are only available for point data") 315 return self.obj 316 317 def select_normals(self, key: Union[int,str]) -> Any: 318 """Select one specific normal array by its name to make it the "active" one.""" 319 if self.association == 0: 320 data = self.obj.dataset.GetPointData() 321 self.obj.mapper.SetScalarModeToUsePointData() 322 else: 323 data = self.obj.dataset.GetCellData() 324 self.obj.mapper.SetScalarModeToUseCellData() 325 326 if isinstance(key, int): 327 key = data.GetArrayName(key) 328 data.SetActiveNormals(key) 329 return self.obj 330 331 def print(self, **kwargs) -> None: 332 """Print the array names available to terminal""" 333 colors.printc(self.keys(), **kwargs) 334 335 def __repr__(self) -> str: 336 """Representation""" 337 338 def _get_str(pd, header): 339 out = f"\x1b[2m\x1b[1m\x1b[7m{header}" 340 if pd.GetNumberOfArrays(): 341 if self.obj.name: 342 out += f" in {self.obj.name}" 343 out += f" contains {pd.GetNumberOfArrays()} array(s)\x1b[0m" 344 for i in range(pd.GetNumberOfArrays()): 345 varr = pd.GetArray(i) 346 out += f"\n\x1b[1m\x1b[4mArray name : {varr.GetName()}\x1b[0m" 347 out += "\nindex".ljust(15) + f": {i}" 348 t = varr.GetDataType() 349 if t in vtki.array_types: 350 out += "\ntype".ljust(15) 351 out += f": {vtki.array_types[t]}" 352 shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents()) 353 out += "\nshape".ljust(15) + f": {shape}" 354 out += "\nrange".ljust(15) + f": {np.array(varr.GetRange())}" 355 out += "\nmax id".ljust(15) + f": {varr.GetMaxId()}" 356 out += "\nlook up table".ljust(15) + f": {bool(varr.GetLookupTable())}" 357 out += "\nin-memory size".ljust(15) + f": {varr.GetActualMemorySize()} KB" 358 else: 359 out += " is empty.\x1b[0m" 360 return out 361 362 if self.association == 0: 363 out = _get_str(self.obj.dataset.GetPointData(), "Point Data") 364 elif self.association == 1: 365 out = _get_str(self.obj.dataset.GetCellData(), "Cell Data") 366 elif self.association == 2: 367 pd = self.obj.dataset.GetFieldData() 368 if pd.GetNumberOfArrays(): 369 out = "\x1b[2m\x1b[1m\x1b[7mMeta Data" 370 if self.obj.name: 371 out += f" in {self.obj.name}" 372 out += f" contains {pd.GetNumberOfArrays()} entries\x1b[0m" 373 for i in range(pd.GetNumberOfArrays()): 374 varr = pd.GetAbstractArray(i) 375 out += f"\n\x1b[1m\x1b[4mEntry name : {varr.GetName()}\x1b[0m" 376 out += "\nindex".ljust(15) + f": {i}" 377 shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents()) 378 out += "\nshape".ljust(15) + f": {shape}" 379 380 return out 381 382 383############################################################################### 384class CommonAlgorithms: 385 """Common algorithms.""" 386 387 @property 388 def pointdata(cls): 389 """ 390 Create and/or return a `numpy.array` associated to points (vertices). 391 A data array can be indexed either as a string or by an integer number. 392 E.g.: `myobj.pointdata["arrayname"]` 393 394 Usage: 395 396 `myobj.pointdata.keys()` to return the available data array names 397 398 `myobj.pointdata.select(name)` to make this array the active one 399 400 `myobj.pointdata.remove(name)` to remove this array 401 """ 402 return DataArrayHelper(cls, 0) 403 404 @property 405 def celldata(cls): 406 """ 407 Create and/or return a `numpy.array` associated to cells (faces). 408 A data array can be indexed either as a string or by an integer number. 409 E.g.: `myobj.celldata["arrayname"]` 410 411 Usage: 412 413 `myobj.celldata.keys()` to return the available data array names 414 415 `myobj.celldata.select(name)` to make this array the active one 416 417 `myobj.celldata.remove(name)` to remove this array 418 """ 419 return DataArrayHelper(cls, 1) 420 421 @property 422 def metadata(cls): 423 """ 424 Create and/or return a `numpy.array` associated to neither cells nor faces. 425 A data array can be indexed either as a string or by an integer number. 426 E.g.: `myobj.metadata["arrayname"]` 427 428 Usage: 429 430 `myobj.metadata.keys()` to return the available data array names 431 432 `myobj.metadata.select(name)` to make this array the active one 433 434 `myobj.metadata.remove(name)` to remove this array 435 """ 436 return DataArrayHelper(cls, 2) 437 438 def rename(cls, newname: str) -> Self: 439 """Rename the object""" 440 try: 441 cls.name = newname 442 except AttributeError: 443 vedo.logger.error(f"Cannot rename object {cls}") 444 return cls 445 446 def memory_address(cls) -> int: 447 """ 448 Return a unique memory address integer which may serve as the ID of the 449 object, or passed to c++ code. 450 """ 451 # https://www.linkedin.com/pulse/speedup-your-code-accessing-python-vtk-objects-from-c-pletzer/ 452 # https://github.com/tfmoraes/polydata_connectivity 453 return int(cls.dataset.GetAddressAsString("")[5:], 16) 454 455 def memory_size(cls) -> int: 456 """Return the size in bytes of the object in memory.""" 457 return cls.dataset.GetActualMemorySize() 458 459 def modified(cls) -> Self: 460 """Use in conjunction with `tonumpy()` to update any modifications to the image array.""" 461 cls.dataset.GetPointData().Modified() 462 scals = cls.dataset.GetPointData().GetScalars() 463 if scals: 464 scals.Modified() 465 return cls 466 467 def box(cls, scale=1, padding=0) -> "vedo.Mesh": 468 """ 469 Return the bounding box as a new `Mesh` object. 470 471 Arguments: 472 scale : (float) 473 box size can be scaled by a factor 474 padding : (float, list) 475 a constant padding can be added (can be a list `[padx,pady,padz]`) 476 """ 477 b = cls.bounds() 478 if not utils.is_sequence(padding): 479 padding = [padding, padding, padding] 480 length, width, height = b[1] - b[0], b[3] - b[2], b[5] - b[4] 481 tol = (length + width + height) / 30000 # useful for boxing text 482 pos = [(b[0] + b[1]) / 2, (b[3] + b[2]) / 2, (b[5] + b[4]) / 2 - tol] 483 bx = vedo.shapes.Box( 484 pos, 485 length * scale + padding[0], 486 width * scale + padding[1], 487 height * scale + padding[2], 488 c="gray", 489 ) 490 try: 491 pr = vtki.vtkProperty() 492 pr.DeepCopy(cls.properties) 493 bx.actor.SetProperty(pr) 494 bx.properties = pr 495 except (AttributeError, TypeError): 496 pass 497 bx.flat().lighting("off").wireframe(True) 498 return bx 499 500 def update_dataset(cls, dataset, **kwargs) -> Self: 501 """Update the dataset of the object with the provided VTK dataset.""" 502 cls._update(dataset, **kwargs) 503 return cls 504 505 def bounds(cls) -> np.ndarray: 506 """ 507 Get the object bounds. 508 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 509 """ 510 try: # this is very slow for large meshes 511 pts = cls.vertices 512 xmin, ymin, zmin = np.nanmin(pts, axis=0) 513 xmax, ymax, zmax = np.nanmax(pts, axis=0) 514 return np.array([xmin, xmax, ymin, ymax, zmin, zmax]) 515 except (AttributeError, ValueError): 516 return np.array(cls.dataset.GetBounds()) 517 518 def xbounds(cls, i=None) -> np.ndarray: 519 """Get the bounds `[xmin,xmax]`. Can specify upper or lower with i (0,1).""" 520 b = cls.bounds() 521 if i is not None: 522 return b[i] 523 return np.array([b[0], b[1]]) 524 525 def ybounds(cls, i=None) -> np.ndarray: 526 """Get the bounds `[ymin,ymax]`. Can specify upper or lower with i (0,1).""" 527 b = cls.bounds() 528 if i == 0: 529 return b[2] 530 if i == 1: 531 return b[3] 532 return np.array([b[2], b[3]]) 533 534 def zbounds(cls, i=None) -> np.ndarray: 535 """Get the bounds `[zmin,zmax]`. Can specify upper or lower with i (0,1).""" 536 b = cls.bounds() 537 if i == 0: 538 return b[4] 539 if i == 1: 540 return b[5] 541 return np.array([b[4], b[5]]) 542 543 def diagonal_size(cls) -> float: 544 """Get the length of the diagonal of the bounding box.""" 545 b = cls.bounds() 546 return np.sqrt((b[1] - b[0])**2 + (b[3] - b[2])**2 + (b[5] - b[4])**2) 547 548 def average_size(cls) -> float: 549 """ 550 Calculate and return the average size of the object. 551 This is the mean of the vertex distances from the center of mass. 552 """ 553 coords = cls.vertices 554 cm = np.mean(coords, axis=0) 555 if coords.shape[0] == 0: 556 return 0.0 557 cc = coords - cm 558 return np.mean(np.linalg.norm(cc, axis=1)) 559 560 def center_of_mass(cls) -> np.ndarray: 561 """Get the center of mass of the object.""" 562 if isinstance(cls, (vedo.RectilinearGrid, vedo.Volume)): 563 return np.array(cls.dataset.GetCenter()) 564 cmf = vtki.new("CenterOfMass") 565 cmf.SetInputData(cls.dataset) 566 cmf.Update() 567 c = cmf.GetCenter() 568 return np.array(c) 569 570 def copy_data_from(cls, obj: Any) -> Self: 571 """Copy all data (point and cell data) from this input object""" 572 cls.dataset.GetPointData().PassData(obj.dataset.GetPointData()) 573 cls.dataset.GetCellData().PassData(obj.dataset.GetCellData()) 574 cls.pipeline = utils.OperationNode( 575 "copy_data_from", 576 parents=[cls, obj], 577 comment=f"{obj.__class__.__name__}", 578 shape="note", 579 c="#ccc5b9", 580 ) 581 return cls 582 583 def inputdata(cls): 584 """Obsolete, use `.dataset` instead.""" 585 colors.printc("WARNING: 'inputdata()' is obsolete, use '.dataset' instead.", c="y") 586 return cls.dataset 587 588 @property 589 def npoints(cls): 590 """Retrieve the number of points (or vertices).""" 591 return cls.dataset.GetNumberOfPoints() 592 593 @property 594 def nvertices(cls): 595 """Retrieve the number of vertices (or points).""" 596 return cls.dataset.GetNumberOfPoints() 597 598 @property 599 def ncells(cls): 600 """Retrieve the number of cells.""" 601 return cls.dataset.GetNumberOfCells() 602 603 def cell_centers(cls, copy_arrays=False) -> "vedo.Points": 604 """ 605 Get the coordinates of the cell centers as a `Points` object. 606 607 Examples: 608 - [delaunay2d.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/delaunay2d.py) 609 """ 610 vcen = vtki.new("CellCenters") 611 vcen.SetCopyArrays(copy_arrays) 612 vcen.SetVertexCells(copy_arrays) 613 vcen.SetInputData(cls.dataset) 614 vcen.Update() 615 vpts = vedo.Points(vcen.GetOutput()) 616 if copy_arrays: 617 vpts.copy_properties_from(cls) 618 return vpts 619 620 @property 621 def lines(cls): 622 """ 623 Get lines connectivity ids as a python array 624 formatted as `[[id0,id1], [id3,id4], ...]` 625 626 See also: `lines_as_flat_array()`. 627 """ 628 # Get cell connettivity ids as a 1D array. The vtk format is: 629 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 630 try: 631 arr1d = utils.vtk2numpy(cls.dataset.GetLines().GetData()) 632 except AttributeError: 633 return np.array([]) 634 i = 0 635 conn = [] 636 n = len(arr1d) 637 for _ in range(n): 638 cell = [arr1d[i + k + 1] for k in range(arr1d[i])] 639 conn.append(cell) 640 i += arr1d[i] + 1 641 if i >= n: 642 break 643 644 return conn # cannot always make a numpy array of it! 645 646 @property 647 def lines_as_flat_array(cls): 648 """ 649 Get lines connectivity ids as a 1D numpy array. 650 Format is e.g. [2, 10,20, 3, 10,11,12, 2, 70,80, ...] 651 652 See also: `lines()`. 653 """ 654 try: 655 return utils.vtk2numpy(cls.dataset.GetLines().GetData()) 656 except AttributeError: 657 return np.array([]) 658 659 def mark_boundaries(cls) -> Self: 660 """ 661 Mark cells and vertices if they lie on a boundary. 662 A new array called `BoundaryCells` is added to the object. 663 """ 664 mb = vtki.new("MarkBoundaryFilter") 665 mb.SetInputData(cls.dataset) 666 mb.Update() 667 cls.dataset.DeepCopy(mb.GetOutput()) 668 cls.pipeline = utils.OperationNode("mark_boundaries", parents=[cls]) 669 return cls 670 671 def find_cells_in_bounds(cls, xbounds=(), ybounds=(), zbounds=()) -> np.ndarray: 672 """ 673 Find cells that are within the specified bounds. 674 """ 675 try: 676 xbounds = list(xbounds.bounds()) 677 except AttributeError: 678 pass 679 680 if len(xbounds) == 6: 681 bnds = xbounds 682 else: 683 bnds = list(cls.bounds()) 684 if len(xbounds) == 2: 685 bnds[0] = xbounds[0] 686 bnds[1] = xbounds[1] 687 if len(ybounds) == 2: 688 bnds[2] = ybounds[0] 689 bnds[3] = ybounds[1] 690 if len(zbounds) == 2: 691 bnds[4] = zbounds[0] 692 bnds[5] = zbounds[1] 693 694 cell_ids = vtki.vtkIdList() 695 if not cls.cell_locator: 696 cls.cell_locator = vtki.new("CellTreeLocator") 697 cls.cell_locator.SetDataSet(cls.dataset) 698 cls.cell_locator.BuildLocator() 699 cls.cell_locator.FindCellsWithinBounds(bnds, cell_ids) 700 cids = [] 701 for i in range(cell_ids.GetNumberOfIds()): 702 cid = cell_ids.GetId(i) 703 cids.append(cid) 704 return np.array(cids) 705 706 def find_cells_along_line(cls, p0, p1, tol=0.001) -> np.ndarray: 707 """ 708 Find cells that are intersected by a line segment. 709 """ 710 cell_ids = vtki.vtkIdList() 711 if not cls.cell_locator: 712 cls.cell_locator = vtki.new("CellTreeLocator") 713 cls.cell_locator.SetDataSet(cls.dataset) 714 cls.cell_locator.BuildLocator() 715 cls.cell_locator.FindCellsAlongLine(p0, p1, tol, cell_ids) 716 cids = [] 717 for i in range(cell_ids.GetNumberOfIds()): 718 cid = cell_ids.GetId(i) 719 cids.append(cid) 720 return np.array(cids) 721 722 def find_cells_along_plane(cls, origin, normal, tol=0.001) -> np.ndarray: 723 """ 724 Find cells that are intersected by a plane. 725 """ 726 cell_ids = vtki.vtkIdList() 727 if not cls.cell_locator: 728 cls.cell_locator = vtki.new("CellTreeLocator") 729 cls.cell_locator.SetDataSet(cls.dataset) 730 cls.cell_locator.BuildLocator() 731 cls.cell_locator.FindCellsAlongPlane(origin, normal, tol, cell_ids) 732 cids = [] 733 for i in range(cell_ids.GetNumberOfIds()): 734 cid = cell_ids.GetId(i) 735 cids.append(cid) 736 return np.array(cids) 737 738 def keep_cell_types(cls, types=()): 739 """ 740 Extract cells of a specific type. 741 742 Check the VTK cell types here: 743 https://vtk.org/doc/nightly/html/vtkCellType_8h.html 744 """ 745 fe = vtki.new("ExtractCellsByType") 746 fe.SetInputData(cls.dataset) 747 for t in types: 748 try: 749 if utils.is_integer(t): 750 it = t 751 else: 752 it = vtki.cell_types[t.upper()] 753 except KeyError: 754 vedo.logger.error(f"Cell type '{t}' not recognized") 755 continue 756 fe.AddCellType(it) 757 fe.Update() 758 cls._update(fe.GetOutput()) 759 return cls 760 761 def map_cells_to_points(cls, arrays=(), move=False) -> Self: 762 """ 763 Interpolate cell data (i.e., data specified per cell or face) 764 into point data (i.e., data specified at each vertex). 765 The method of transformation is based on averaging the data values 766 of all cells using a particular point. 767 768 A custom list of arrays to be mapped can be passed in input. 769 770 Set `move=True` to delete the original `celldata` array. 771 """ 772 c2p = vtki.new("CellDataToPointData") 773 c2p.SetInputData(cls.dataset) 774 if not move: 775 c2p.PassCellDataOn() 776 if arrays: 777 c2p.ClearCellDataArrays() 778 c2p.ProcessAllArraysOff() 779 for arr in arrays: 780 c2p.AddCellDataArray(arr) 781 else: 782 c2p.ProcessAllArraysOn() 783 c2p.Update() 784 cls._update(c2p.GetOutput(), reset_locators=False) 785 cls.mapper.SetScalarModeToUsePointData() 786 cls.pipeline = utils.OperationNode("map_cells_to_points", parents=[cls]) 787 return cls 788 789 @property 790 def vertices(cls): 791 """Return the vertices (points) coordinates.""" 792 try: 793 # for polydata and unstructured grid 794 varr = cls.dataset.GetPoints().GetData() 795 except (AttributeError, TypeError): 796 try: 797 # for RectilinearGrid, StructuredGrid 798 vpts = vtki.vtkPoints() 799 cls.dataset.GetPoints(vpts) 800 varr = vpts.GetData() 801 except (AttributeError, TypeError): 802 try: 803 # for ImageData 804 v2p = vtki.new("ImageToPoints") 805 v2p.SetInputData(cls.dataset) 806 v2p.Update() 807 varr = v2p.GetOutput().GetPoints().GetData() 808 except AttributeError: 809 return np.array([]) 810 811 return utils.vtk2numpy(varr) 812 813 # setter 814 @vertices.setter 815 def vertices(cls, pts): 816 """Set vertices (points) coordinates.""" 817 pts = utils.make3d(pts) 818 arr = utils.numpy2vtk(pts, dtype=np.float32) 819 try: 820 vpts = cls.dataset.GetPoints() 821 vpts.SetData(arr) 822 vpts.Modified() 823 except (AttributeError, TypeError): 824 vedo.logger.error(f"Cannot set vertices for {type(cls)}") 825 return 826 # reset mesh to identity matrix position/rotation: 827 cls.point_locator = None 828 cls.cell_locator = None 829 cls.line_locator = None 830 cls.transform = LinearTransform() 831 832 @property 833 def points(cls): 834 """Return the vertices (points) coordinates. Same as `vertices`.""" 835 return cls.vertices 836 837 @points.setter 838 def points(cls, pts): 839 """Set vertices (points) coordinates. Same as `vertices`.""" 840 cls.vertices = pts 841 842 @property 843 def coordinates(cls): 844 """Return the vertices (points) coordinates. Same as `vertices`.""" 845 return cls.vertices 846 847 @coordinates.setter 848 def coordinates(cls, pts): 849 """Set vertices (points) coordinates. Same as `vertices`.""" 850 cls.vertices = pts 851 852 @property 853 def cells_as_flat_array(cls): 854 """ 855 Get cell connectivity ids as a 1D numpy array. 856 Format is e.g. [3, 10,20,30 4, 10,11,12,13 ...] 857 """ 858 try: 859 # valid for unstructured grid 860 arr1d = utils.vtk2numpy(cls.dataset.GetCells().GetData()) 861 except AttributeError: 862 # valid for polydata 863 arr1d = utils.vtk2numpy(cls.dataset.GetPolys().GetData()) 864 return arr1d 865 866 @property 867 def cells(cls): 868 """ 869 Get the cells connectivity ids as a numpy array. 870 871 The output format is: `[[id0 ... idn], [id0 ... idm], etc]`. 872 """ 873 try: 874 # valid for unstructured grid 875 arr1d = utils.vtk2numpy(cls.dataset.GetCells().GetData()) 876 except AttributeError: 877 try: 878 # valid for polydata 879 arr1d = utils.vtk2numpy(cls.dataset.GetPolys().GetData()) 880 except AttributeError: 881 vedo.logger.warning(f"Cannot get cells for {type(cls)}") 882 return np.array([]) 883 884 # Get cell connettivity ids as a 1D array. vtk format is: 885 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 886 i = 0 887 conn = [] 888 n = len(arr1d) 889 if n: 890 while True: 891 cell = [arr1d[i + k] for k in range(1, arr1d[i] + 1)] 892 conn.append(cell) 893 i += arr1d[i] + 1 894 if i >= n: 895 break 896 return conn 897 898 def cell_edge_neighbors(cls): 899 """ 900 Get the cell neighbor indices of each cell. 901 902 Returns a python list of lists. 903 """ 904 905 def face_to_edges(face): 906 edges = [] 907 size = len(face) 908 for i in range(1, size + 1): 909 if i == size: 910 edges.append([face[i - 1], face[0]]) 911 else: 912 edges.append([face[i - 1], face[i]]) 913 return edges 914 915 pd = cls.dataset 916 pd.BuildLinks() 917 918 neicells = [] 919 for i, cell in enumerate(cls.cells): 920 nn = [] 921 for edge in face_to_edges(cell): 922 neighbors = vtki.vtkIdList() 923 pd.GetCellEdgeNeighbors(i, edge[0], edge[1], neighbors) 924 if neighbors.GetNumberOfIds() > 0: 925 neighbor = neighbors.GetId(0) 926 nn.append(neighbor) 927 neicells.append(nn) 928 929 return neicells 930 931 932 def map_points_to_cells(cls, arrays=(), move=False) -> Self: 933 """ 934 Interpolate point data (i.e., data specified per point or vertex) 935 into cell data (i.e., data specified per cell). 936 The method of transformation is based on averaging the data values 937 of all points defining a particular cell. 938 939 A custom list of arrays to be mapped can be passed in input. 940 941 Set `move=True` to delete the original `pointdata` array. 942 943 Examples: 944 - [mesh_map2cell.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/mesh_map2cell.py) 945 """ 946 p2c = vtki.new("PointDataToCellData") 947 p2c.SetInputData(cls.dataset) 948 if not move: 949 p2c.PassPointDataOn() 950 if arrays: 951 p2c.ClearPointDataArrays() 952 p2c.ProcessAllArraysOff() 953 for arr in arrays: 954 p2c.AddPointDataArray(arr) 955 else: 956 p2c.ProcessAllArraysOn() 957 p2c.Update() 958 cls._update(p2c.GetOutput(), reset_locators=False) 959 cls.mapper.SetScalarModeToUseCellData() 960 cls.pipeline = utils.OperationNode("map_points_to_cells", parents=[cls]) 961 return cls 962 963 def resample_data_from(cls, source, tol=None, categorical=False) -> Self: 964 """ 965 Resample point and cell data from another dataset. 966 The output has the same structure but its point data have 967 the resampled values from target. 968 969 Use `tol` to set the tolerance used to compute whether 970 a point in the source is in a cell of the current object. 971 Points without resampled values, and their cells, are marked as blank. 972 If the data is categorical, then the resulting data will be determined 973 by a nearest neighbor interpolation scheme. 974 975 Example: 976 ```python 977 from vedo import * 978 m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1) 979 pts = m1.coordinates 980 ces = m1.cell_centers().coordinates 981 m1.pointdata["xvalues"] = np.power(pts[:,0], 3) 982 m1.celldata["yvalues"] = np.power(ces[:,1], 3) 983 m2 = Mesh(dataurl+'bunny.obj') 984 m2.resample_data_from(m1) 985 # print(m2.pointdata["xvalues"]) 986 show(m1, m2 , N=2, axes=1) 987 ``` 988 """ 989 rs = vtki.new("ResampleWithDataSet") 990 rs.SetInputData(cls.dataset) 991 rs.SetSourceData(source.dataset) 992 993 rs.SetPassPointArrays(True) 994 rs.SetPassCellArrays(True) 995 rs.SetPassFieldArrays(True) 996 rs.SetCategoricalData(categorical) 997 998 rs.SetComputeTolerance(True) 999 if tol: 1000 rs.SetComputeTolerance(False) 1001 rs.SetTolerance(tol) 1002 rs.Update() 1003 cls._update(rs.GetOutput(), reset_locators=False) 1004 cls.pipeline = utils.OperationNode( 1005 "resample_data_from", 1006 comment=f"{source.__class__.__name__}", 1007 parents=[cls, source], 1008 ) 1009 return cls 1010 1011 def interpolate_data_from( 1012 cls, 1013 source, 1014 radius=None, 1015 n=None, 1016 kernel="shepard", 1017 exclude=("Normals",), 1018 on="points", 1019 null_strategy=1, 1020 null_value=0, 1021 ) -> Self: 1022 """ 1023 Interpolate over source to port its data onto the current object using various kernels. 1024 1025 If n (number of closest points to use) is set then radius value is ignored. 1026 1027 Check out also: 1028 `probe()` which in many cases can be faster. 1029 1030 Arguments: 1031 kernel : (str) 1032 available kernels are [shepard, gaussian, linear] 1033 null_strategy : (int) 1034 specify a strategy to use when encountering a "null" point 1035 during the interpolation process. Null points occur when the local neighborhood 1036 (of nearby points to interpolate from) is empty. 1037 1038 - Case 0: an output array is created that marks points 1039 as being valid (=1) or null (invalid =0), and the null_value is set as well 1040 - Case 1: the output data value(s) are set to the provided null_value 1041 - Case 2: simply use the closest point to perform the interpolation. 1042 null_value : (float) 1043 see above. 1044 1045 Examples: 1046 - [interpolate_scalar1.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar1.py) 1047 - [interpolate_scalar3.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar3.py) 1048 - [interpolate_scalar4.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar4.py) 1049 - [image_probe.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/image_probe.py) 1050 1051  1052 """ 1053 if radius is None and not n: 1054 vedo.logger.error("in interpolate_data_from(): please set either radius or n") 1055 raise RuntimeError 1056 1057 if on == "points": 1058 points = source.dataset 1059 elif on == "cells": 1060 c2p = vtki.new("CellDataToPointData") 1061 c2p.SetInputData(source.dataset) 1062 c2p.Update() 1063 points = c2p.GetOutput() 1064 else: 1065 vedo.logger.error("in interpolate_data_from(), on must be on points or cells") 1066 raise RuntimeError() 1067 1068 locator = vtki.new("PointLocator") 1069 locator.SetDataSet(points) 1070 locator.BuildLocator() 1071 1072 if kernel.lower() == "shepard": 1073 kern = vtki.new("ShepardKernel") 1074 kern.SetPowerParameter(2) 1075 elif kernel.lower() == "gaussian": 1076 kern = vtki.new("GaussianKernel") 1077 kern.SetSharpness(2) 1078 elif kernel.lower() == "linear": 1079 kern = vtki.new("LinearKernel") 1080 else: 1081 vedo.logger.error("available kernels are: [shepard, gaussian, linear]") 1082 raise RuntimeError() 1083 1084 if n: 1085 kern.SetNumberOfPoints(n) 1086 kern.SetKernelFootprintToNClosest() 1087 else: 1088 kern.SetRadius(radius) 1089 kern.SetKernelFootprintToRadius() 1090 1091 # remove arrays that are already present in cls dataset 1092 # this is because the interpolator will ignore them otherwise 1093 clsnames = [] 1094 for i in range(cls.dataset.GetPointData().GetNumberOfArrays()): 1095 name = cls.dataset.GetPointData().GetArrayName(i) 1096 clsnames.append(name) 1097 1098 pointsnames = [] 1099 for i in range(points.GetPointData().GetNumberOfArrays()): 1100 name = points.GetPointData().GetArrayName(i) 1101 pointsnames.append(name) 1102 1103 for cname in clsnames: 1104 if cname in set(pointsnames) - set(exclude): 1105 cls.dataset.GetPointData().RemoveArray(cname) 1106 # print(f"Removed {cname} from cls dataset") 1107 1108 interpolator = vtki.new("PointInterpolator") 1109 interpolator.SetInputData(cls.dataset) 1110 interpolator.SetSourceData(points) 1111 interpolator.SetKernel(kern) 1112 interpolator.SetLocator(locator) 1113 interpolator.PassFieldArraysOn() 1114 interpolator.SetNullPointsStrategy(null_strategy) 1115 interpolator.SetNullValue(null_value) 1116 interpolator.SetValidPointsMaskArrayName("ValidPointMask") 1117 for ex in exclude: 1118 interpolator.AddExcludedArray(ex) 1119 1120 # remove arrays that are already present in the source 1121 # this is because the interpolator will ignore them otherwise 1122 for i in range(cls.dataset.GetPointData().GetNumberOfArrays()): 1123 name = cls.dataset.GetPointData().GetArrayName(i) 1124 if name not in exclude: 1125 cls.dataset.GetPointData().RemoveArray(name) 1126 1127 interpolator.Update() 1128 1129 if on == "cells": 1130 p2c = vtki.new("PointDataToCellData") 1131 p2c.SetInputData(interpolator.GetOutput()) 1132 p2c.Update() 1133 cpoly = p2c.GetOutput() 1134 else: 1135 cpoly = interpolator.GetOutput() 1136 1137 cls._update(cpoly, reset_locators=False) 1138 1139 cls.pipeline = utils.OperationNode("interpolate_data_from", parents=[cls, source]) 1140 return cls 1141 1142 def add_ids(cls) -> Self: 1143 """ 1144 Generate point and cell ids arrays. 1145 1146 Two new arrays are added to the mesh: `PointID` and `CellID`. 1147 """ 1148 ids = vtki.new("IdFilter") 1149 ids.SetInputData(cls.dataset) 1150 ids.PointIdsOn() 1151 ids.CellIdsOn() 1152 ids.FieldDataOff() 1153 ids.SetPointIdsArrayName("PointID") 1154 ids.SetCellIdsArrayName("CellID") 1155 ids.Update() 1156 cls._update(ids.GetOutput(), reset_locators=False) 1157 cls.pipeline = utils.OperationNode("add_ids", parents=[cls]) 1158 return cls 1159 1160 def gradient(cls, input_array=None, on="points", fast=False) -> np.ndarray: 1161 """ 1162 Compute and return the gradiend of the active scalar field as a numpy array. 1163 1164 Arguments: 1165 input_array : (str) 1166 array of the scalars to compute the gradient, 1167 if None the current active array is selected 1168 on : (str) 1169 compute either on 'points' or 'cells' data 1170 fast : (bool) 1171 if True, will use a less accurate algorithm 1172 that performs fewer derivative calculations (and is therefore faster). 1173 1174 Examples: 1175 - [isolines.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/isolines.py) 1176 1177  1178 """ 1179 gra = vtki.new("GradientFilter") 1180 if on.startswith("p"): 1181 varr = cls.dataset.GetPointData() 1182 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1183 elif on.startswith("c"): 1184 varr = cls.dataset.GetCellData() 1185 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1186 else: 1187 vedo.logger.error(f"in gradient: unknown option {on}") 1188 raise RuntimeError 1189 1190 if input_array is None: 1191 if varr.GetScalars(): 1192 input_array = varr.GetScalars().GetName() 1193 else: 1194 vedo.logger.error(f"in gradient: no scalars found for {on}") 1195 raise RuntimeError 1196 1197 gra.SetInputData(cls.dataset) 1198 gra.SetInputScalars(tp, input_array) 1199 gra.SetResultArrayName("Gradient") 1200 gra.SetFasterApproximation(fast) 1201 gra.ComputeDivergenceOff() 1202 gra.ComputeVorticityOff() 1203 gra.ComputeGradientOn() 1204 gra.Update() 1205 # cls._update(gra.GetOutput(), reset_locators=False) 1206 if on.startswith("p"): 1207 gvecs = utils.vtk2numpy(gra.GetOutput().GetPointData().GetArray("Gradient")) 1208 else: 1209 gvecs = utils.vtk2numpy(gra.GetOutput().GetCellData().GetArray("Gradient")) 1210 return gvecs 1211 1212 def divergence(cls, array_name=None, on="points", fast=False) -> np.ndarray: 1213 """ 1214 Compute and return the divergence of a vector field as a numpy array. 1215 1216 Arguments: 1217 array_name : (str) 1218 name of the array of vectors to compute the divergence, 1219 if None the current active array is selected 1220 on : (str) 1221 compute either on 'points' or 'cells' data 1222 fast : (bool) 1223 if True, will use a less accurate algorithm 1224 that performs fewer derivative calculations (and is therefore faster). 1225 """ 1226 div = vtki.new("GradientFilter") 1227 if on.startswith("p"): 1228 varr = cls.dataset.GetPointData() 1229 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1230 elif on.startswith("c"): 1231 varr = cls.dataset.GetCellData() 1232 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1233 else: 1234 vedo.logger.error(f"in divergence(): unknown option {on}") 1235 raise RuntimeError 1236 1237 if array_name is None: 1238 if varr.GetVectors(): 1239 array_name = varr.GetVectors().GetName() 1240 else: 1241 vedo.logger.error(f"in divergence(): no vectors found for {on}") 1242 raise RuntimeError 1243 1244 div.SetInputData(cls.dataset) 1245 div.SetInputScalars(tp, array_name) 1246 div.ComputeDivergenceOn() 1247 div.ComputeGradientOff() 1248 div.ComputeVorticityOff() 1249 div.SetDivergenceArrayName("Divergence") 1250 div.SetFasterApproximation(fast) 1251 div.Update() 1252 # cls._update(div.GetOutput(), reset_locators=False) 1253 if on.startswith("p"): 1254 dvecs = utils.vtk2numpy(div.GetOutput().GetPointData().GetArray("Divergence")) 1255 else: 1256 dvecs = utils.vtk2numpy(div.GetOutput().GetCellData().GetArray("Divergence")) 1257 return dvecs 1258 1259 def vorticity(cls, array_name=None, on="points", fast=False) -> np.ndarray: 1260 """ 1261 Compute and return the vorticity of a vector field as a numpy array. 1262 1263 Arguments: 1264 array_name : (str) 1265 name of the array to compute the vorticity, 1266 if None the current active array is selected 1267 on : (str) 1268 compute either on 'points' or 'cells' data 1269 fast : (bool) 1270 if True, will use a less accurate algorithm 1271 that performs fewer derivative calculations (and is therefore faster). 1272 """ 1273 vort = vtki.new("GradientFilter") 1274 if on.startswith("p"): 1275 varr = cls.dataset.GetPointData() 1276 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1277 elif on.startswith("c"): 1278 varr = cls.dataset.GetCellData() 1279 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1280 else: 1281 vedo.logger.error(f"in vorticity(): unknown option {on}") 1282 raise RuntimeError 1283 1284 if array_name is None: 1285 if varr.GetVectors(): 1286 array_name = varr.GetVectors().GetName() 1287 else: 1288 vedo.logger.error(f"in vorticity(): no vectors found for {on}") 1289 raise RuntimeError 1290 1291 vort.SetInputData(cls.dataset) 1292 vort.SetInputScalars(tp, array_name) 1293 vort.ComputeDivergenceOff() 1294 vort.ComputeGradientOff() 1295 vort.ComputeVorticityOn() 1296 vort.SetVorticityArrayName("Vorticity") 1297 vort.SetFasterApproximation(fast) 1298 vort.Update() 1299 if on.startswith("p"): 1300 vvecs = utils.vtk2numpy(vort.GetOutput().GetPointData().GetArray("Vorticity")) 1301 else: 1302 vvecs = utils.vtk2numpy(vort.GetOutput().GetCellData().GetArray("Vorticity")) 1303 return vvecs 1304 1305 def probe( 1306 cls, 1307 source, 1308 categorical=False, 1309 snap=False, 1310 tol=0, 1311 ) -> Self: 1312 """ 1313 Takes a data set and probes its scalars at the specified points in space. 1314 1315 Note that a mask is also output with valid/invalid points which can be accessed 1316 with `mesh.pointdata['ValidPointMask']`. 1317 1318 Arguments: 1319 source : any dataset 1320 the data set to probe. 1321 categorical : bool 1322 control whether the source pointdata is to be treated as categorical. 1323 snap : bool 1324 snap to the cell with the closest point if no cell was found 1325 tol : float 1326 the tolerance to use when performing the probe. 1327 1328 Check out also: 1329 `interpolate_data_from()` and `tovolume()` 1330 1331 Examples: 1332 - [probe_points.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/probe_points.py) 1333 1334  1335 """ 1336 probe_filter = vtki.new("ProbeFilter") 1337 probe_filter.SetSourceData(source.dataset) 1338 probe_filter.SetInputData(cls.dataset) 1339 probe_filter.PassCellArraysOn() 1340 probe_filter.PassFieldArraysOn() 1341 probe_filter.PassPointArraysOn() 1342 probe_filter.SetCategoricalData(categorical) 1343 probe_filter.ComputeToleranceOff() 1344 if tol: 1345 probe_filter.ComputeToleranceOn() 1346 probe_filter.SetTolerance(tol) 1347 probe_filter.SetSnapToCellWithClosestPoint(snap) 1348 probe_filter.Update() 1349 cls._update(probe_filter.GetOutput(), reset_locators=False) 1350 cls.pipeline = utils.OperationNode("probe", parents=[cls, source]) 1351 cls.pointdata.rename("vtkValidPointMask", "ValidPointMask") 1352 return cls 1353 1354 def compute_cell_size(cls) -> Self: 1355 """ 1356 Add to this object a cell data array 1357 containing the area, volume and edge length 1358 of the cells (when applicable to the object type). 1359 1360 Array names are: `Area`, `Volume`, `Length`. 1361 """ 1362 csf = vtki.new("CellSizeFilter") 1363 csf.SetInputData(cls.dataset) 1364 csf.SetComputeArea(1) 1365 csf.SetComputeVolume(1) 1366 csf.SetComputeLength(1) 1367 csf.SetComputeVertexCount(0) 1368 csf.SetAreaArrayName("Area") 1369 csf.SetVolumeArrayName("Volume") 1370 csf.SetLengthArrayName("Length") 1371 csf.Update() 1372 cls._update(csf.GetOutput(), reset_locators=False) 1373 return cls 1374 1375 def generate_random_data(cls) -> Self: 1376 """Fill a dataset with random attributes""" 1377 gen = vtki.new("RandomAttributeGenerator") 1378 gen.SetInputData(cls.dataset) 1379 gen.GenerateAllDataOn() 1380 gen.SetDataTypeToFloat() 1381 gen.GeneratePointNormalsOff() 1382 gen.GeneratePointTensorsOn() 1383 gen.GenerateCellScalarsOn() 1384 gen.Update() 1385 cls._update(gen.GetOutput(), reset_locators=False) 1386 cls.pipeline = utils.OperationNode("generate_random_data", parents=[cls]) 1387 return cls 1388 1389 def integrate_data(cls) -> dict: 1390 """ 1391 Integrate point and cell data arrays while computing length, 1392 area or volume of the domain. It works for 1D, 2D or 3D cells. 1393 1394 For volumetric datasets, this filter ignores all but 3D cells. 1395 It will not compute the volume contained in a closed surface. 1396 1397 Returns a dictionary with keys: `pointdata`, `celldata`, `metadata`, 1398 which contain the integration result for the corresponding attributes. 1399 1400 Examples: 1401 ```python 1402 from vedo import * 1403 surf = Sphere(res=100) 1404 surf.pointdata['scalars'] = np.ones(surf.npoints) 1405 data = surf.integrate_data() 1406 print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi) 1407 ``` 1408 1409 ```python 1410 from vedo import * 1411 1412 xcoords1 = np.arange(0, 2.2, 0.2) 1413 xcoords2 = sqrt(np.arange(0, 4.2, 0.2)) 1414 1415 ycoords = np.arange(0, 1.2, 0.2) 1416 1417 surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2) 1418 surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2) 1419 1420 surf1.pointdata['scalars'] = surf1.vertices[:,2] 1421 surf2.pointdata['scalars'] = surf2.vertices[:,2] 1422 1423 data1 = surf1.integrate_data() 1424 data2 = surf2.integrate_data() 1425 1426 print(data1['pointdata']['scalars'], 1427 "is equal to", 1428 data2['pointdata']['scalars'], 1429 "even if the grids are different!", 1430 "(= the volume under the surface)" 1431 ) 1432 show(surf1, surf2, N=2, axes=1).close() 1433 ``` 1434 """ 1435 vinteg = vtki.new("IntegrateAttributes") 1436 vinteg.SetInputData(cls.dataset) 1437 vinteg.Update() 1438 ugrid = vedo.UnstructuredGrid(vinteg.GetOutput()) 1439 data = dict( 1440 pointdata=ugrid.pointdata.todict(), 1441 celldata=ugrid.celldata.todict(), 1442 metadata=ugrid.metadata.todict(), 1443 ) 1444 return data 1445 1446 def write(cls, filename, binary=True) -> None: 1447 """Write object to file.""" 1448 out = vedo.file_io.write(cls, filename, binary) 1449 out.pipeline = utils.OperationNode( 1450 "write", parents=[cls], comment=filename[:15], shape="folder", c="#8a817c" 1451 ) 1452 1453 def tomesh(cls, bounds=(), shrink=0) -> "vedo.Mesh": 1454 """ 1455 Extract boundary geometry from dataset (or convert data to polygonal type). 1456 1457 Two new arrays are added to the mesh: `OriginalCellIds` and `OriginalPointIds` 1458 to keep track of the original mesh elements. 1459 1460 Arguments: 1461 bounds : (list) 1462 specify a sub-region to extract 1463 shrink : (float) 1464 shrink the cells to a fraction of their original size 1465 """ 1466 geo = vtki.new("GeometryFilter") 1467 1468 if shrink: 1469 sf = vtki.new("ShrinkFilter") 1470 sf.SetInputData(cls.dataset) 1471 sf.SetShrinkFactor(shrink) 1472 sf.Update() 1473 geo.SetInputData(sf.GetOutput()) 1474 else: 1475 geo.SetInputData(cls.dataset) 1476 1477 geo.SetPassThroughCellIds(1) 1478 geo.SetPassThroughPointIds(1) 1479 geo.SetOriginalCellIdsName("OriginalCellIds") 1480 geo.SetOriginalPointIdsName("OriginalPointIds") 1481 geo.SetNonlinearSubdivisionLevel(1) 1482 # geo.MergingOff() # crashes on StructuredGrids 1483 if bounds: 1484 geo.SetExtent(bounds) 1485 geo.ExtentClippingOn() 1486 geo.Update() 1487 msh = vedo.mesh.Mesh(geo.GetOutput()) 1488 msh.pipeline = utils.OperationNode("tomesh", parents=[cls], c="#9e2a2b") 1489 return msh 1490 1491 def signed_distance(cls, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> "vedo.Volume": 1492 """ 1493 Compute the `Volume` object whose voxels contains the signed distance from 1494 the object. The calling object must have "Normals" defined. 1495 1496 Arguments: 1497 bounds : (list, actor) 1498 bounding box sizes 1499 dims : (list) 1500 dimensions (nr. of voxels) of the output volume. 1501 invert : (bool) 1502 flip the sign 1503 max_radius : (float) 1504 specify how far out to propagate distance calculation 1505 1506 Examples: 1507 - [distance2mesh.py](https://github.com/marcomusy/vedo/blob/master/examples/basic/distance2mesh.py) 1508 1509  1510 """ 1511 if bounds is None: 1512 bounds = cls.bounds() 1513 if max_radius is None: 1514 max_radius = cls.diagonal_size() / 2 1515 dist = vtki.new("SignedDistance") 1516 dist.SetInputData(cls.dataset) 1517 dist.SetRadius(max_radius) 1518 dist.SetBounds(bounds) 1519 dist.SetDimensions(dims) 1520 dist.Update() 1521 img = dist.GetOutput() 1522 if invert: 1523 mat = vtki.new("ImageMathematics") 1524 mat.SetInput1Data(img) 1525 mat.SetOperationToMultiplyByK() 1526 mat.SetConstantK(-1) 1527 mat.Update() 1528 img = mat.GetOutput() 1529 1530 vol = vedo.Volume(img) 1531 vol.name = "SignedDistanceVolume" 1532 vol.pipeline = utils.OperationNode( 1533 "signed_distance", 1534 parents=[cls], 1535 comment=f"dims={tuple(vol.dimensions())}", 1536 c="#e9c46a:#0096c7", 1537 ) 1538 return vol 1539 1540 def unsigned_distance( 1541 cls, dims=(25,25,25), bounds=(), max_radius=0, cap_value=0) -> "vedo.Volume": 1542 """ 1543 Compute the `Volume` object whose voxels contains the unsigned distance 1544 from the input object. 1545 """ 1546 dist = vtki.new("UnsignedDistance") 1547 dist.SetInputData(cls.dataset) 1548 dist.SetDimensions(dims) 1549 1550 if len(bounds) == 6: 1551 dist.SetBounds(bounds) 1552 else: 1553 dist.SetBounds(cls.bounds()) 1554 if not max_radius: 1555 max_radius = cls.diagonal_size() / 10 1556 dist.SetRadius(max_radius) 1557 1558 if cls.point_locator: 1559 dist.SetLocator(cls.point_locator) 1560 1561 if cap_value is not None: 1562 dist.CappingOn() 1563 dist.SetCapValue(cap_value) 1564 dist.SetOutputScalarTypeToFloat() 1565 dist.Update() 1566 vol = vedo.Volume(dist.GetOutput()) 1567 vol.name = "UnsignedDistanceVolume" 1568 vol.pipeline = utils.OperationNode( 1569 "unsigned_distance", parents=[cls], c="#e9c46a:#0096c7") 1570 return vol 1571 1572 def smooth_data(cls, 1573 niter=10, relaxation_factor=0.1, strategy=0, mask=None, 1574 mode="distance2", 1575 exclude=("Normals", "TextureCoordinates"), 1576 ) -> Self: 1577 """ 1578 Smooth point attribute data using distance weighted Laplacian kernel. 1579 The effect is to blur regions of high variation and emphasize low variation regions. 1580 1581 A central concept of this method is the point smoothing stencil. 1582 A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge. 1583 To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using 1584 the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1). 1585 This averaging process is repeated until the maximum number of iterations is reached. 1586 1587 The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion. 1588 The a(i+1) attributes are determined from the a(i) attributes as follows: 1589 a(i+1) = (1-R)*a(i) + R*sum(w(j)*a(j)) 1590 1591 Convergence occurs faster for larger relaxation factors. 1592 Typically a small number of iterations is required for large relaxation factors, 1593 and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be 1594 adequate (i.e., just a distance weighted average is computed). 1595 1596 Warning: 1597 Certain data attributes cannot be correctly interpolated. 1598 For example, surface normals are expected to be |n|=1; 1599 after attribute smoothing this constraint is likely to be violated. 1600 Other vectors and tensors may suffer from similar issues. 1601 In such a situation, specify `exclude=...` which will not be smoothed 1602 (and simply passed through to the output). 1603 Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance 1604 between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil). 1605 The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, 1606 the weights are simply 1/n, where n is the number of connected points in the stencil. 1607 The smoothing process reduces high frequency information in the data attributes. 1608 With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) 1609 important details may be lost, and the attributes will move towards an "average" value. 1610 While this filter will process any dataset type, if the input data is a 3D image volume, 1611 it's likely much faster to use an image-based algorithm to perform data smoothing. 1612 To determine boundary points in polygonal data, edges used by only one cell are considered boundary 1613 (and hence the associated points defining the edge). 1614 1615 Arguments: 1616 niter : (int) 1617 number of iterations 1618 relaxation_factor : (float) 1619 relaxation factor controlling the amount of Laplacian smoothing applied 1620 strategy : (int) 1621 strategy to use for Laplacian smoothing 1622 1623 - 0: use all points, all point data attributes are smoothed 1624 - 1: smooth all point attribute data except those on the boundary 1625 - 2: only point data connected to a boundary point are smoothed 1626 1627 mask : (str, np.ndarray) 1628 array to be used as a mask (ignore then the strategy keyword) 1629 mode : (str) 1630 smoothing mode, either "distance2", "distance" or "average" 1631 1632 - distance**2 weighted (i.e., 1/r**2 interpolation weights) 1633 - distance weighted (i.e., 1/r) approach; 1634 - simple average of all connected points in the stencil 1635 1636 exclude : (list) 1637 list of arrays to be excluded from smoothing 1638 """ 1639 saf = vtki.new("AttributeSmoothingFilter") 1640 saf.SetInputData(cls.dataset) 1641 saf.SetRelaxationFactor(relaxation_factor) 1642 saf.SetNumberOfIterations(niter) 1643 1644 for ex in exclude: 1645 saf.AddExcludedArray(ex) 1646 1647 if mode == "distance": 1648 saf.SetWeightsTypeToDistance() 1649 elif mode == "distance2": 1650 saf.SetWeightsTypeToDistance2() 1651 elif mode == "average": 1652 saf.SetWeightsTypeToAverage() 1653 else: 1654 vedo.logger.error(f"smooth_data(): unknown mode {mode}") 1655 raise TypeError 1656 1657 saf.SetSmoothingStrategy(strategy) 1658 if mask is not None: 1659 saf.SetSmoothingStrategyToSmoothingMask() 1660 if isinstance(mask, str): 1661 mask_ = cls.dataset.GetPointData().GetArray(mask) 1662 if not mask_: 1663 vedo.logger.error(f"smooth_data(): mask array {mask} not found") 1664 return cls 1665 mask_array = vtki.vtkUnsignedCharArray() 1666 mask_array.ShallowCopy(mask_) 1667 mask_array.SetName(mask_.GetName()) 1668 else: 1669 mask_array = utils.numpy2vtk(mask, dtype=np.uint8) 1670 saf.SetSmoothingMask(mask_array) 1671 1672 saf.Update() 1673 1674 cls._update(saf.GetOutput()) 1675 cls.pipeline = utils.OperationNode( 1676 "smooth_data", comment=f"strategy {strategy}", parents=[cls], c="#9e2a2b" 1677 ) 1678 return cls 1679 1680 def compute_streamlines( 1681 cls, 1682 seeds: Any, 1683 integrator="rk4", 1684 direction="forward", 1685 initial_step_size=None, 1686 max_propagation=None, 1687 max_steps=10000, 1688 step_length=0, 1689 surface_constrained=False, 1690 compute_vorticity=False, 1691 ) -> Union["vedo.Lines", None]: 1692 """ 1693 Integrate a vector field to generate streamlines. 1694 1695 Arguments: 1696 seeds : (Mesh, Points, list) 1697 starting points of the streamlines 1698 integrator : (str) 1699 type of integration method to be used: 1700 1701 - "rk2" (Runge-Kutta 2) 1702 - "rk4" (Runge-Kutta 4) 1703 - "rk45" (Runge-Kutta 45) 1704 1705 direction : (str) 1706 direction of integration, either "forward", "backward" or "both" 1707 initial_step_size : (float) 1708 initial step size used for line integration 1709 max_propagation : (float) 1710 maximum length of a streamline expressed in absolute units 1711 max_steps : (int) 1712 maximum number of steps for a streamline 1713 step_length : (float) 1714 maximum length of a step expressed in absolute units 1715 surface_constrained : (bool) 1716 whether to stop integrating when the streamline leaves the surface 1717 compute_vorticity : (bool) 1718 whether to compute the vorticity at each streamline point 1719 """ 1720 b = cls.dataset.GetBounds() 1721 size = (b[5]-b[4] + b[3]-b[2] + b[1]-b[0]) / 3 1722 if initial_step_size is None: 1723 initial_step_size = size / 1000.0 1724 1725 if max_propagation is None: 1726 max_propagation = size * 2 1727 1728 if utils.is_sequence(seeds): 1729 seeds = vedo.Points(seeds) 1730 1731 sti = vtki.new("StreamTracer") 1732 sti.SetSourceData(seeds.dataset) 1733 if isinstance(cls, vedo.RectilinearGrid): 1734 sti.SetInputData(vedo.UnstructuredGrid(cls.dataset).dataset) 1735 else: 1736 sti.SetInputDataObject(cls.dataset) 1737 1738 sti.SetInitialIntegrationStep(initial_step_size) 1739 sti.SetComputeVorticity(compute_vorticity) 1740 sti.SetMaximumNumberOfSteps(max_steps) 1741 sti.SetMaximumPropagation(max_propagation) 1742 sti.SetSurfaceStreamlines(surface_constrained) 1743 if step_length: 1744 sti.SetMaximumIntegrationStep(step_length) 1745 1746 if "for" in direction: 1747 sti.SetIntegrationDirectionToForward() 1748 elif "back" in direction: 1749 sti.SetIntegrationDirectionToBackward() 1750 elif "both" in direction: 1751 sti.SetIntegrationDirectionToBoth() 1752 else: 1753 vedo.logger.error(f"in compute_streamlines(), unknown direction {direction}") 1754 return None 1755 1756 if integrator == "rk2": 1757 sti.SetIntegratorTypeToRungeKutta2() 1758 elif integrator == "rk4": 1759 sti.SetIntegratorTypeToRungeKutta4() 1760 elif integrator == "rk45": 1761 sti.SetIntegratorTypeToRungeKutta45() 1762 else: 1763 vedo.logger.error(f"in compute_streamlines(), unknown integrator {integrator}") 1764 return None 1765 1766 sti.Update() 1767 1768 stlines = vedo.shapes.Lines(sti.GetOutput(), lw=4) 1769 stlines.name = "StreamLines" 1770 cls.pipeline = utils.OperationNode( 1771 "compute_streamlines", comment=f"{integrator}", parents=[cls, seeds], c="#9e2a2b" 1772 ) 1773 return stlines 1774 1775############################################################################### 1776class PointAlgorithms(CommonAlgorithms): 1777 """Methods for point clouds.""" 1778 1779 def apply_transform(cls, LT: Any, deep_copy=True) -> Self: 1780 """ 1781 Apply a linear or non-linear transformation to the mesh polygonal data. 1782 1783 Example: 1784 ```python 1785 from vedo import Cube, show, settings 1786 settings.use_parallel_projection = True 1787 c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5) 1788 T = c1.transform # rotate by 5 degrees, place at (2,1) 1789 c2 = Cube().c('red4').wireframe().lw(10).lighting('off') 1790 c2.apply_transform(T) 1791 show(c1, c2, "The 2 cubes should overlap!", axes=1).close() 1792 ``` 1793 1794  1795 """ 1796 if cls.dataset.GetNumberOfPoints() == 0: 1797 return cls 1798 1799 if isinstance(LT, LinearTransform): 1800 LT_is_linear = True 1801 tr = LT.T 1802 if LT.is_identity(): 1803 return cls 1804 1805 elif isinstance(LT, (vtki.vtkMatrix4x4, vtki.vtkLinearTransform)) or utils.is_sequence(LT): 1806 LT_is_linear = True 1807 LT = LinearTransform(LT) 1808 tr = LT.T 1809 if LT.is_identity(): 1810 return cls 1811 1812 elif isinstance(LT, NonLinearTransform): 1813 LT_is_linear = False 1814 tr = LT.T 1815 cls.transform = LT # reset 1816 1817 elif isinstance(LT, vtki.vtkThinPlateSplineTransform): 1818 LT_is_linear = False 1819 tr = LT 1820 cls.transform = NonLinearTransform(LT) # reset 1821 1822 else: 1823 vedo.logger.error(f"apply_transform(), unknown input type:\n{LT}") 1824 return cls 1825 1826 ################ 1827 if LT_is_linear: 1828 try: 1829 # cls.transform might still not be linear 1830 cls.transform.concatenate(LT) 1831 except AttributeError: 1832 # in that case reset it 1833 cls.transform = LinearTransform() 1834 1835 ################ 1836 if isinstance(cls.dataset, vtki.vtkPolyData): 1837 tp = vtki.new("TransformPolyDataFilter") 1838 elif isinstance(cls.dataset, vtki.vtkUnstructuredGrid): 1839 tp = vtki.new("TransformFilter") 1840 tp.TransformAllInputVectorsOn() 1841 # elif isinstance(cls.dataset, vtki.vtkImageData): 1842 # tp = vtki.new("ImageReslice") 1843 # tp.SetInterpolationModeToCubic() 1844 # tp.SetResliceTransform(tr) 1845 else: 1846 vedo.logger.error(f"apply_transform(), unknown input type: {[cls.dataset]}") 1847 return cls 1848 1849 tp.SetTransform(tr) 1850 tp.SetInputData(cls.dataset) 1851 tp.Update() 1852 out = tp.GetOutput() 1853 1854 if deep_copy: 1855 cls.dataset.DeepCopy(out) 1856 else: 1857 cls.dataset.ShallowCopy(out) 1858 1859 # reset the locators 1860 cls.point_locator = None 1861 cls.cell_locator = None 1862 cls.line_locator = None 1863 return cls 1864 1865 def apply_transform_from_actor(cls) -> LinearTransform: 1866 """ 1867 Apply the current transformation of the actor to the data. 1868 Useful when manually moving an actor (eg. when pressing "a"). 1869 Returns the `LinearTransform` object. 1870 1871 Note that this method is automatically called when the window is closed, 1872 or the interactor style is changed. 1873 """ 1874 M = cls.actor.GetMatrix() 1875 cls.apply_transform(M) 1876 iden = vtki.vtkMatrix4x4() 1877 cls.actor.PokeMatrix(iden) 1878 return LinearTransform(M) 1879 1880 def pos(cls, x=None, y=None, z=None) -> Self: 1881 """Set/Get object position.""" 1882 if x is None: # get functionality 1883 return cls.transform.position 1884 1885 if z is None and y is None: # assume x is of the form (x,y,z) 1886 if len(x) == 3: 1887 x, y, z = x 1888 else: 1889 x, y = x 1890 z = 0 1891 elif z is None: # assume x,y is of the form x, y 1892 z = 0 1893 1894 q = cls.transform.position 1895 delta = [x, y, z] - q 1896 if delta[0] == delta[1] == delta[2] == 0: 1897 return cls 1898 LT = LinearTransform().translate(delta) 1899 return cls.apply_transform(LT) 1900 1901 def shift(cls, dx=0, dy=0, dz=0) -> Self: 1902 """Add a vector to the current object position.""" 1903 if utils.is_sequence(dx): 1904 dx, dy, dz = utils.make3d(dx) 1905 if dx == dy == dz == 0: 1906 return cls 1907 LT = LinearTransform().translate([dx, dy, dz]) 1908 return cls.apply_transform(LT) 1909 1910 def x(cls, val=None) -> Self: 1911 """Set/Get object position along x axis.""" 1912 p = cls.transform.position 1913 if val is None: 1914 return p[0] 1915 cls.pos(val, p[1], p[2]) 1916 return cls 1917 1918 def y(cls, val=None)-> Self: 1919 """Set/Get object position along y axis.""" 1920 p = cls.transform.position 1921 if val is None: 1922 return p[1] 1923 cls.pos(p[0], val, p[2]) 1924 return cls 1925 1926 def z(cls, val=None) -> Self: 1927 """Set/Get object position along z axis.""" 1928 p = cls.transform.position 1929 if val is None: 1930 return p[2] 1931 cls.pos(p[0], p[1], val) 1932 return cls 1933 1934 def rotate(cls, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self: 1935 """ 1936 Rotate around an arbitrary `axis` passing through `point`. 1937 1938 Example: 1939 ```python 1940 from vedo import * 1941 c1 = Cube() 1942 c2 = c1.clone().c('violet').alpha(0.5) # copy of c1 1943 v = vector(0.2,1,0) 1944 p = vector(1,0,0) # axis passes through this point 1945 c2.rotate(90, axis=v, point=p) 1946 l = Line(-v+p, v+p).lw(3).c('red') 1947 show(c1, l, c2, axes=1).close() 1948 ``` 1949 1950  1951 """ 1952 LT = LinearTransform() 1953 LT.rotate(angle, axis, point, rad) 1954 return cls.apply_transform(LT) 1955 1956 def rotate_x(cls, angle: float, rad=False, around=None) -> Self: 1957 """ 1958 Rotate around x-axis. If angle is in radians set `rad=True`. 1959 1960 Use `around` to define a pivoting point. 1961 """ 1962 if angle == 0: 1963 return cls 1964 LT = LinearTransform().rotate_x(angle, rad, around) 1965 return cls.apply_transform(LT) 1966 1967 def rotate_y(cls, angle: float, rad=False, around=None) -> Self: 1968 """ 1969 Rotate around y-axis. If angle is in radians set `rad=True`. 1970 1971 Use `around` to define a pivoting point. 1972 """ 1973 if angle == 0: 1974 return cls 1975 LT = LinearTransform().rotate_y(angle, rad, around) 1976 return cls.apply_transform(LT) 1977 1978 def rotate_z(cls, angle: float, rad=False, around=None) -> Self: 1979 """ 1980 Rotate around z-axis. If angle is in radians set `rad=True`. 1981 1982 Use `around` to define a pivoting point. 1983 """ 1984 if angle == 0: 1985 return cls 1986 LT = LinearTransform().rotate_z(angle, rad, around) 1987 return cls.apply_transform(LT) 1988 1989 def reorient(cls, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self: 1990 """ 1991 Reorient the object to point to a new direction from an initial one. 1992 If `initaxis` is None, the object will be assumed in its "default" orientation. 1993 If `xyplane` is True, the object will be rotated to lie on the xy plane. 1994 1995 Use `rotation` to first rotate the object around its `initaxis`. 1996 """ 1997 q = cls.transform.position 1998 LT = LinearTransform() 1999 LT.reorient(initaxis, newaxis, q, rotation, rad, xyplane) 2000 return cls.apply_transform(LT) 2001 2002 def scale(cls, s=None, reset=False, origin=True) -> Union[Self, np.ndarray]: 2003 """ 2004 Set/get object's scaling factor. 2005 2006 Arguments: 2007 s : (list, float) 2008 scaling factor(s). 2009 reset : (bool) 2010 if True previous scaling factors are ignored. 2011 origin : (bool) 2012 if True scaling is applied with respect to object's position, 2013 otherwise is applied respect to (0,0,0). 2014 2015 Note: 2016 use `s=(sx,sy,sz)` to scale differently in the three coordinates. 2017 """ 2018 if s is None: 2019 return np.array(cls.transform.T.GetScale()) 2020 2021 if not utils.is_sequence(s): 2022 s = [s, s, s] 2023 2024 LT = LinearTransform() 2025 if reset: 2026 old_s = np.array(cls.transform.T.GetScale()) 2027 LT.scale(s / old_s) 2028 else: 2029 if origin is True: 2030 LT.scale(s, origin=cls.transform.position) 2031 elif origin is False: 2032 LT.scale(s, origin=False) 2033 else: 2034 LT.scale(s, origin=origin) 2035 2036 return cls.apply_transform(LT) 2037 2038 2039############################################################################### 2040class VolumeAlgorithms(CommonAlgorithms): 2041 """Methods for Volume objects.""" 2042 2043 def bounds(cls) -> np.ndarray: 2044 """ 2045 Get the object bounds. 2046 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 2047 """ 2048 # OVERRIDE CommonAlgorithms.bounds() which is too slow 2049 return np.array(cls.dataset.GetBounds()) 2050 2051 def isosurface(cls, value=None, flying_edges=False) -> "vedo.mesh.Mesh": 2052 """ 2053 Return an `Mesh` isosurface extracted from the `Volume` object. 2054 2055 Set `value` as single float or list of values to draw the isosurface(s). 2056 Use flying_edges for faster results (but sometimes can interfere with `smooth()`). 2057 2058 The isosurface values can be accessed with `mesh.metadata["isovalue"]`. 2059 2060 Examples: 2061 - [isosurfaces1.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces1.py) 2062 2063  2064 """ 2065 scrange = cls.dataset.GetScalarRange() 2066 2067 if flying_edges: 2068 cf = vtki.new("FlyingEdges3D") 2069 cf.InterpolateAttributesOn() 2070 else: 2071 cf = vtki.new("ContourFilter") 2072 cf.UseScalarTreeOn() 2073 2074 cf.SetInputData(cls.dataset) 2075 cf.ComputeNormalsOn() 2076 2077 if utils.is_sequence(value): 2078 cf.SetNumberOfContours(len(value)) 2079 for i, t in enumerate(value): 2080 cf.SetValue(i, t) 2081 else: 2082 if value is None: 2083 value = (2 * scrange[0] + scrange[1]) / 3.0 2084 # print("automatic isosurface value =", value) 2085 cf.SetValue(0, value) 2086 2087 cf.Update() 2088 poly = cf.GetOutput() 2089 2090 out = vedo.mesh.Mesh(poly, c=None).phong() 2091 out.mapper.SetScalarRange(scrange[0], scrange[1]) 2092 out.metadata["isovalue"] = value 2093 2094 out.pipeline = utils.OperationNode( 2095 "isosurface", 2096 parents=[cls], 2097 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2098 c="#4cc9f0:#e9c46a", 2099 ) 2100 return out 2101 2102 def isosurface_discrete( 2103 cls, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0, 2104 ) -> "vedo.mesh.Mesh": 2105 """ 2106 Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded, 2107 3D version of the multiple objects/labels Surface Nets algorithm. 2108 The input is a 3D image (i.e., volume) where each voxel is labeled 2109 (integer labels are preferred to real values), and the output data is a polygonal mesh separating 2110 labeled regions / objects. 2111 (Note that on output each region [corresponding to a different segmented object] will share 2112 points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them). 2113 2114 Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating 2115 the labels on either side of the polygons composing the output Mesh. 2116 (This can be used for advanced operations like extracting shared/contacting boundaries between two objects. 2117 The name of this celldata array is "BoundaryLabels"). 2118 2119 The values can be accessed with `mesh.metadata["isovalue"]`. 2120 2121 Arguments: 2122 value : (float, list) 2123 single value or list of values to draw the isosurface(s). 2124 background_label : (float) 2125 this value specifies the label value to use when referencing the background 2126 region outside of any of the specified regions. 2127 boundaries : (bool, list) 2128 if True, the output will only contain the boundary surface. Internal surfaces will be removed. 2129 If a list of integers is provided, only the boundaries between the specified labels will be extracted. 2130 use_quads : (bool) 2131 if True, the output polygons will be quads. If False, the output polygons will be triangles. 2132 nsmooth : (int) 2133 number of iterations of smoothing (0 means no smoothing). 2134 2135 Examples: 2136 - [isosurfaces2.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces2.py) 2137 """ 2138 logger = vtki.get_class("Logger") 2139 logger.SetStderrVerbosity(logger.VERBOSITY_ERROR) 2140 2141 snets = vtki.new("SurfaceNets3D") 2142 snets.SetInputData(cls.dataset) 2143 2144 if nsmooth: 2145 snets.SmoothingOn() 2146 snets.AutomaticSmoothingConstraintsOn() 2147 snets.GetSmoother().SetNumberOfIterations(nsmooth) 2148 # snets.GetSmoother().SetRelaxationFactor(relaxation_factor) 2149 # snets.GetSmoother().SetConstraintDistance(constraint_distance) 2150 else: 2151 snets.SmoothingOff() 2152 2153 if internal_boundaries is False: 2154 snets.SetOutputStyleToBoundary() 2155 elif internal_boundaries is True: 2156 snets.SetOutputStyleToDefault() 2157 elif utils.is_sequence(internal_boundaries): 2158 snets.SetOutputStyleToSelected() 2159 snets.InitializeSelectedLabelsList() 2160 for val in internal_boundaries: 2161 snets.AddSelectedLabel(val) 2162 else: 2163 vedo.logger.error("isosurface_discrete(): unknown boundaries option") 2164 2165 n = len(values) 2166 snets.SetNumberOfContours(n) 2167 snets.SetNumberOfLabels(n) 2168 2169 if background_label is not None: 2170 snets.SetBackgroundLabel(background_label) 2171 2172 for i, val in enumerate(values): 2173 snets.SetValue(i, val) 2174 2175 if use_quads: 2176 snets.SetOutputMeshTypeToQuads() 2177 else: 2178 snets.SetOutputMeshTypeToTriangles() 2179 snets.Update() 2180 2181 out = vedo.mesh.Mesh(snets.GetOutput()) 2182 out.metadata["isovalue"] = values 2183 out.pipeline = utils.OperationNode( 2184 "isosurface_discrete", 2185 parents=[cls], 2186 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2187 c="#4cc9f0:#e9c46a", 2188 ) 2189 2190 logger.SetStderrVerbosity(logger.VERBOSITY_INFO) 2191 return out 2192 2193 2194 def legosurface( 2195 cls, 2196 vmin=None, 2197 vmax=None, 2198 invert=False, 2199 boundary=True, 2200 array_name="input_scalars", 2201 ) -> "vedo.mesh.Mesh": 2202 """ 2203 Represent an object - typically a `Volume` - as lego blocks (voxels). 2204 By default colors correspond to the volume's scalar. 2205 Returns an `Mesh` object. 2206 2207 Arguments: 2208 vmin : (float) 2209 the lower threshold, voxels below this value are not shown. 2210 vmax : (float) 2211 the upper threshold, voxels above this value are not shown. 2212 boundary : (bool) 2213 controls whether to include cells that are partially inside 2214 array_name : (int, str) 2215 name or index of the scalar array to be considered 2216 2217 Examples: 2218 - [legosurface.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/legosurface.py) 2219 2220  2221 """ 2222 imp_dataset = vtki.new("ImplicitDataSet") 2223 imp_dataset.SetDataSet(cls.dataset) 2224 window = vtki.new("ImplicitWindowFunction") 2225 window.SetImplicitFunction(imp_dataset) 2226 2227 srng = list(cls.dataset.GetScalarRange()) 2228 if vmin is not None: 2229 srng[0] = vmin 2230 if vmax is not None: 2231 srng[1] = vmax 2232 if not boundary: 2233 tol = 0.00001 * (srng[1] - srng[0]) 2234 srng[0] -= tol 2235 srng[1] += tol 2236 window.SetWindowRange(srng) 2237 # print("legosurface window range:", srng) 2238 2239 extract = vtki.new("ExtractGeometry") 2240 extract.SetInputData(cls.dataset) 2241 extract.SetImplicitFunction(window) 2242 extract.SetExtractInside(invert) 2243 extract.SetExtractBoundaryCells(boundary) 2244 extract.Update() 2245 2246 gf = vtki.new("GeometryFilter") 2247 gf.SetInputData(extract.GetOutput()) 2248 gf.Update() 2249 2250 m = vedo.mesh.Mesh(gf.GetOutput()).lw(0.1).flat() 2251 m.map_points_to_cells() 2252 m.celldata.select(array_name) 2253 2254 m.pipeline = utils.OperationNode( 2255 "legosurface", 2256 parents=[cls], 2257 comment=f"array: {array_name}", 2258 c="#4cc9f0:#e9c46a", 2259 ) 2260 return m 2261 2262 def tomesh(cls, fill=True, shrink=1.0) -> "vedo.mesh.Mesh": 2263 """ 2264 Build a polygonal Mesh from the current object. 2265 2266 If `fill=True`, the interior faces of all the cells are created. 2267 (setting a `shrink` value slightly smaller than the default 1.0 2268 can avoid flickering due to internal adjacent faces). 2269 2270 If `fill=False`, only the boundary faces will be generated. 2271 """ 2272 gf = vtki.new("GeometryFilter") 2273 if fill: 2274 sf = vtki.new("ShrinkFilter") 2275 sf.SetInputData(cls.dataset) 2276 sf.SetShrinkFactor(shrink) 2277 sf.Update() 2278 gf.SetInputData(sf.GetOutput()) 2279 gf.Update() 2280 poly = gf.GetOutput() 2281 if shrink == 1.0: 2282 clean_poly = vtki.new("CleanPolyData") 2283 clean_poly.PointMergingOn() 2284 clean_poly.ConvertLinesToPointsOn() 2285 clean_poly.ConvertPolysToLinesOn() 2286 clean_poly.ConvertStripsToPolysOn() 2287 clean_poly.SetInputData(poly) 2288 clean_poly.Update() 2289 poly = clean_poly.GetOutput() 2290 else: 2291 gf.SetInputData(cls.dataset) 2292 gf.Update() 2293 poly = gf.GetOutput() 2294 2295 msh = vedo.mesh.Mesh(poly).flat() 2296 msh.scalarbar = cls.scalarbar 2297 lut = utils.ctf2lut(cls) 2298 if lut: 2299 msh.mapper.SetLookupTable(lut) 2300 2301 msh.pipeline = utils.OperationNode( 2302 "tomesh", parents=[cls], comment=f"fill={fill}", c="#9e2a2b:#e9c46a" 2303 ) 2304 return msh
50class DataArrayHelper: 51 """ 52 Helper class to manage data associated to either points (or vertices) and cells (or faces). 53 54 Internal use only. 55 """ 56 def __init__(self, obj, association): 57 58 self.obj = obj 59 self.association = association 60 61 def __getitem__(self, key): 62 63 if self.association == 0: 64 data = self.obj.dataset.GetPointData() 65 66 elif self.association == 1: 67 data = self.obj.dataset.GetCellData() 68 69 elif self.association == 2: 70 data = self.obj.dataset.GetFieldData() 71 72 varr = data.GetAbstractArray(key) 73 if isinstance(varr, vtki.vtkStringArray): 74 if isinstance(key, int): 75 key = data.GetArrayName(key) 76 n = varr.GetNumberOfValues() 77 narr = [varr.GetValue(i) for i in range(n)] 78 return narr 79 ########### 80 81 else: 82 raise RuntimeError() 83 84 if isinstance(key, int): 85 key = data.GetArrayName(key) 86 87 arr = data.GetArray(key) 88 if not arr: 89 return None 90 return utils.vtk2numpy(arr) 91 92 def __setitem__(self, key, input_array): 93 94 if self.association == 0: 95 data = self.obj.dataset.GetPointData() 96 n = self.obj.dataset.GetNumberOfPoints() 97 self.obj.mapper.SetScalarModeToUsePointData() 98 99 elif self.association == 1: 100 data = self.obj.dataset.GetCellData() 101 n = self.obj.dataset.GetNumberOfCells() 102 self.obj.mapper.SetScalarModeToUseCellData() 103 104 elif self.association == 2: 105 data = self.obj.dataset.GetFieldData() 106 if not utils.is_sequence(input_array): 107 input_array = [input_array] 108 109 if isinstance(input_array[0], str): 110 varr = vtki.vtkStringArray() 111 varr.SetName(key) 112 varr.SetNumberOfComponents(1) 113 varr.SetNumberOfTuples(len(input_array)) 114 for i, iarr in enumerate(input_array): 115 if isinstance(iarr, np.ndarray): 116 iarr = iarr.tolist() # better format 117 # Note: a string k can be converted to numpy with 118 # import json; k = np.array(json.loads(k)) 119 varr.InsertValue(i, str(iarr)) 120 else: 121 try: 122 varr = utils.numpy2vtk(input_array, name=key) 123 except TypeError as e: 124 vedo.logger.error( 125 f"cannot create metadata with input object:\n" 126 f"{input_array}" 127 f"\n\nAllowed content examples are:\n" 128 f"- flat list of strings ['a','b', 1, [1,2,3], ...]" 129 f" (first item must be a string in this case)\n" 130 f" hint: use k = np.array(json.loads(k)) to convert strings\n" 131 f"- numpy arrays of any shape" 132 ) 133 raise e 134 135 data.AddArray(varr) 136 return ############ 137 138 else: 139 raise RuntimeError() 140 141 if len(input_array) != n: 142 vedo.logger.error( 143 f"Error in point/cell data: length of input {len(input_array)}" 144 f" != {n} nr. of elements" 145 ) 146 raise RuntimeError() 147 148 input_array = np.asarray(input_array) 149 varr = utils.numpy2vtk(input_array, name=key) 150 data.AddArray(varr) 151 152 if len(input_array.shape) == 1: # scalars 153 data.SetActiveScalars(key) 154 try: # could be a volume mapper 155 self.obj.mapper.SetScalarRange(data.GetScalars().GetRange()) 156 except AttributeError: 157 pass 158 elif len(input_array.shape) == 2 and input_array.shape[1] == 3: # vectors 159 if key.lower() == "normals": 160 data.SetActiveNormals(key) 161 else: 162 data.SetActiveVectors(key) 163 164 def keys(self) -> List[str]: 165 """Return the list of available data array names""" 166 if self.association == 0: 167 data = self.obj.dataset.GetPointData() 168 elif self.association == 1: 169 data = self.obj.dataset.GetCellData() 170 elif self.association == 2: 171 data = self.obj.dataset.GetFieldData() 172 arrnames = [] 173 for i in range(data.GetNumberOfArrays()): 174 name = "" 175 if self.association == 2: 176 name = data.GetAbstractArray(i).GetName() 177 else: 178 iarr = data.GetArray(i) 179 if iarr: 180 name = iarr.GetName() 181 if name: 182 arrnames.append(name) 183 return arrnames 184 185 def items(self) -> List: 186 """Return the list of available data array `(names, values)`.""" 187 if self.association == 0: 188 data = self.obj.dataset.GetPointData() 189 elif self.association == 1: 190 data = self.obj.dataset.GetCellData() 191 elif self.association == 2: 192 data = self.obj.dataset.GetFieldData() 193 arrnames = [] 194 for i in range(data.GetNumberOfArrays()): 195 if self.association == 2: 196 name = data.GetAbstractArray(i).GetName() 197 else: 198 name = data.GetArray(i).GetName() 199 if name: 200 arrnames.append((name, self[name])) 201 return arrnames 202 203 def todict(self) -> dict: 204 """Return a dictionary of the available data arrays.""" 205 return dict(self.items()) 206 207 def rename(self, oldname: str, newname: str) -> None: 208 """Rename an array""" 209 if self.association == 0: 210 varr = self.obj.dataset.GetPointData().GetArray(oldname) 211 elif self.association == 1: 212 varr = self.obj.dataset.GetCellData().GetArray(oldname) 213 elif self.association == 2: 214 varr = self.obj.dataset.GetFieldData().GetAbstractArray(oldname) 215 if varr: 216 varr.SetName(newname) 217 else: 218 vedo.logger.warning( 219 f"Cannot rename non existing array {oldname} to {newname}" 220 ) 221 222 def remove(self, key: Union[int, str]) -> None: 223 """Remove a data array by name or number""" 224 if self.association == 0: 225 self.obj.dataset.GetPointData().RemoveArray(key) 226 elif self.association == 1: 227 self.obj.dataset.GetCellData().RemoveArray(key) 228 elif self.association == 2: 229 self.obj.dataset.GetFieldData().RemoveArray(key) 230 231 def clear(self) -> None: 232 """Remove all data associated to this object""" 233 if self.association == 0: 234 data = self.obj.dataset.GetPointData() 235 elif self.association == 1: 236 data = self.obj.dataset.GetCellData() 237 elif self.association == 2: 238 data = self.obj.dataset.GetFieldData() 239 for i in range(data.GetNumberOfArrays()): 240 if self.association == 2: 241 if data.GetAbstractArray(i): 242 name = data.GetAbstractArray(i).GetName() 243 data.RemoveArray(name) 244 else: 245 if data.GetArray(i): 246 name = data.GetArray(i).GetName() 247 data.RemoveArray(name) 248 249 def select(self, key: Union[int, str]) -> Any: 250 """Select one specific array by its name to make it the `active` one.""" 251 # Default (ColorModeToDefault): unsigned char scalars are treated as colors, 252 # and NOT mapped through the lookup table, while everything else is. 253 # ColorModeToDirectScalar extends ColorModeToDefault such that all integer 254 # types are treated as colors with values in the range 0-255 255 # and floating types are treated as colors with values in the range 0.0-1.0. 256 # Setting ColorModeToMapScalars means that all scalar data will be mapped 257 # through the lookup table. 258 # (Note that for multi-component scalars, the particular component 259 # to use for mapping can be specified using the SelectColorArray() method.) 260 if self.association == 0: 261 data = self.obj.dataset.GetPointData() 262 self.obj.mapper.SetScalarModeToUsePointData() 263 else: 264 data = self.obj.dataset.GetCellData() 265 self.obj.mapper.SetScalarModeToUseCellData() 266 267 if isinstance(key, int): 268 key = data.GetArrayName(key) 269 270 arr = data.GetArray(key) 271 if not arr: 272 return self.obj 273 274 nc = arr.GetNumberOfComponents() 275 # print("GetNumberOfComponents", nc) 276 if nc == 1: 277 data.SetActiveScalars(key) 278 elif nc == 2: 279 data.SetTCoords(arr) 280 elif nc in (3, 4): 281 if "rgb" in key.lower(): # type: ignore 282 data.SetActiveScalars(key) 283 try: 284 # could be a volume mapper 285 self.obj.mapper.SetColorModeToDirectScalars() 286 data.SetActiveVectors(None) # need this to fix bug in #1066 287 # print("SetColorModeToDirectScalars for", key) 288 except AttributeError: 289 pass 290 else: 291 data.SetActiveVectors(key) 292 elif nc == 9: 293 data.SetActiveTensors(key) 294 else: 295 vedo.logger.error(f"Cannot select array {key} with {nc} components") 296 return self.obj 297 298 try: 299 # could be a volume mapper 300 self.obj.mapper.SetArrayName(key) 301 self.obj.mapper.ScalarVisibilityOn() 302 except AttributeError: 303 pass 304 305 return self.obj 306 307 def select_texture_coords(self, key: Union[int,str]) -> Any: 308 """Select one specific array to be used as texture coordinates.""" 309 if self.association == 0: 310 data = self.obj.dataset.GetPointData() 311 if isinstance(key, int): 312 key = data.GetArrayName(key) 313 data.SetTCoords(data.GetArray(key)) 314 else: 315 vedo.logger.warning("texture coordinates are only available for point data") 316 return self.obj 317 318 def select_normals(self, key: Union[int,str]) -> Any: 319 """Select one specific normal array by its name to make it the "active" one.""" 320 if self.association == 0: 321 data = self.obj.dataset.GetPointData() 322 self.obj.mapper.SetScalarModeToUsePointData() 323 else: 324 data = self.obj.dataset.GetCellData() 325 self.obj.mapper.SetScalarModeToUseCellData() 326 327 if isinstance(key, int): 328 key = data.GetArrayName(key) 329 data.SetActiveNormals(key) 330 return self.obj 331 332 def print(self, **kwargs) -> None: 333 """Print the array names available to terminal""" 334 colors.printc(self.keys(), **kwargs) 335 336 def __repr__(self) -> str: 337 """Representation""" 338 339 def _get_str(pd, header): 340 out = f"\x1b[2m\x1b[1m\x1b[7m{header}" 341 if pd.GetNumberOfArrays(): 342 if self.obj.name: 343 out += f" in {self.obj.name}" 344 out += f" contains {pd.GetNumberOfArrays()} array(s)\x1b[0m" 345 for i in range(pd.GetNumberOfArrays()): 346 varr = pd.GetArray(i) 347 out += f"\n\x1b[1m\x1b[4mArray name : {varr.GetName()}\x1b[0m" 348 out += "\nindex".ljust(15) + f": {i}" 349 t = varr.GetDataType() 350 if t in vtki.array_types: 351 out += "\ntype".ljust(15) 352 out += f": {vtki.array_types[t]}" 353 shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents()) 354 out += "\nshape".ljust(15) + f": {shape}" 355 out += "\nrange".ljust(15) + f": {np.array(varr.GetRange())}" 356 out += "\nmax id".ljust(15) + f": {varr.GetMaxId()}" 357 out += "\nlook up table".ljust(15) + f": {bool(varr.GetLookupTable())}" 358 out += "\nin-memory size".ljust(15) + f": {varr.GetActualMemorySize()} KB" 359 else: 360 out += " is empty.\x1b[0m" 361 return out 362 363 if self.association == 0: 364 out = _get_str(self.obj.dataset.GetPointData(), "Point Data") 365 elif self.association == 1: 366 out = _get_str(self.obj.dataset.GetCellData(), "Cell Data") 367 elif self.association == 2: 368 pd = self.obj.dataset.GetFieldData() 369 if pd.GetNumberOfArrays(): 370 out = "\x1b[2m\x1b[1m\x1b[7mMeta Data" 371 if self.obj.name: 372 out += f" in {self.obj.name}" 373 out += f" contains {pd.GetNumberOfArrays()} entries\x1b[0m" 374 for i in range(pd.GetNumberOfArrays()): 375 varr = pd.GetAbstractArray(i) 376 out += f"\n\x1b[1m\x1b[4mEntry name : {varr.GetName()}\x1b[0m" 377 out += "\nindex".ljust(15) + f": {i}" 378 shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents()) 379 out += "\nshape".ljust(15) + f": {shape}" 380 381 return out
Helper class to manage data associated to either points (or vertices) and cells (or faces).
Internal use only.
164 def keys(self) -> List[str]: 165 """Return the list of available data array names""" 166 if self.association == 0: 167 data = self.obj.dataset.GetPointData() 168 elif self.association == 1: 169 data = self.obj.dataset.GetCellData() 170 elif self.association == 2: 171 data = self.obj.dataset.GetFieldData() 172 arrnames = [] 173 for i in range(data.GetNumberOfArrays()): 174 name = "" 175 if self.association == 2: 176 name = data.GetAbstractArray(i).GetName() 177 else: 178 iarr = data.GetArray(i) 179 if iarr: 180 name = iarr.GetName() 181 if name: 182 arrnames.append(name) 183 return arrnames
Return the list of available data array names
185 def items(self) -> List: 186 """Return the list of available data array `(names, values)`.""" 187 if self.association == 0: 188 data = self.obj.dataset.GetPointData() 189 elif self.association == 1: 190 data = self.obj.dataset.GetCellData() 191 elif self.association == 2: 192 data = self.obj.dataset.GetFieldData() 193 arrnames = [] 194 for i in range(data.GetNumberOfArrays()): 195 if self.association == 2: 196 name = data.GetAbstractArray(i).GetName() 197 else: 198 name = data.GetArray(i).GetName() 199 if name: 200 arrnames.append((name, self[name])) 201 return arrnames
Return the list of available data array (names, values)
.
203 def todict(self) -> dict: 204 """Return a dictionary of the available data arrays.""" 205 return dict(self.items())
Return a dictionary of the available data arrays.
207 def rename(self, oldname: str, newname: str) -> None: 208 """Rename an array""" 209 if self.association == 0: 210 varr = self.obj.dataset.GetPointData().GetArray(oldname) 211 elif self.association == 1: 212 varr = self.obj.dataset.GetCellData().GetArray(oldname) 213 elif self.association == 2: 214 varr = self.obj.dataset.GetFieldData().GetAbstractArray(oldname) 215 if varr: 216 varr.SetName(newname) 217 else: 218 vedo.logger.warning( 219 f"Cannot rename non existing array {oldname} to {newname}" 220 )
Rename an array
222 def remove(self, key: Union[int, str]) -> None: 223 """Remove a data array by name or number""" 224 if self.association == 0: 225 self.obj.dataset.GetPointData().RemoveArray(key) 226 elif self.association == 1: 227 self.obj.dataset.GetCellData().RemoveArray(key) 228 elif self.association == 2: 229 self.obj.dataset.GetFieldData().RemoveArray(key)
Remove a data array by name or number
231 def clear(self) -> None: 232 """Remove all data associated to this object""" 233 if self.association == 0: 234 data = self.obj.dataset.GetPointData() 235 elif self.association == 1: 236 data = self.obj.dataset.GetCellData() 237 elif self.association == 2: 238 data = self.obj.dataset.GetFieldData() 239 for i in range(data.GetNumberOfArrays()): 240 if self.association == 2: 241 if data.GetAbstractArray(i): 242 name = data.GetAbstractArray(i).GetName() 243 data.RemoveArray(name) 244 else: 245 if data.GetArray(i): 246 name = data.GetArray(i).GetName() 247 data.RemoveArray(name)
Remove all data associated to this object
249 def select(self, key: Union[int, str]) -> Any: 250 """Select one specific array by its name to make it the `active` one.""" 251 # Default (ColorModeToDefault): unsigned char scalars are treated as colors, 252 # and NOT mapped through the lookup table, while everything else is. 253 # ColorModeToDirectScalar extends ColorModeToDefault such that all integer 254 # types are treated as colors with values in the range 0-255 255 # and floating types are treated as colors with values in the range 0.0-1.0. 256 # Setting ColorModeToMapScalars means that all scalar data will be mapped 257 # through the lookup table. 258 # (Note that for multi-component scalars, the particular component 259 # to use for mapping can be specified using the SelectColorArray() method.) 260 if self.association == 0: 261 data = self.obj.dataset.GetPointData() 262 self.obj.mapper.SetScalarModeToUsePointData() 263 else: 264 data = self.obj.dataset.GetCellData() 265 self.obj.mapper.SetScalarModeToUseCellData() 266 267 if isinstance(key, int): 268 key = data.GetArrayName(key) 269 270 arr = data.GetArray(key) 271 if not arr: 272 return self.obj 273 274 nc = arr.GetNumberOfComponents() 275 # print("GetNumberOfComponents", nc) 276 if nc == 1: 277 data.SetActiveScalars(key) 278 elif nc == 2: 279 data.SetTCoords(arr) 280 elif nc in (3, 4): 281 if "rgb" in key.lower(): # type: ignore 282 data.SetActiveScalars(key) 283 try: 284 # could be a volume mapper 285 self.obj.mapper.SetColorModeToDirectScalars() 286 data.SetActiveVectors(None) # need this to fix bug in #1066 287 # print("SetColorModeToDirectScalars for", key) 288 except AttributeError: 289 pass 290 else: 291 data.SetActiveVectors(key) 292 elif nc == 9: 293 data.SetActiveTensors(key) 294 else: 295 vedo.logger.error(f"Cannot select array {key} with {nc} components") 296 return self.obj 297 298 try: 299 # could be a volume mapper 300 self.obj.mapper.SetArrayName(key) 301 self.obj.mapper.ScalarVisibilityOn() 302 except AttributeError: 303 pass 304 305 return self.obj
Select one specific array by its name to make it the active
one.
307 def select_texture_coords(self, key: Union[int,str]) -> Any: 308 """Select one specific array to be used as texture coordinates.""" 309 if self.association == 0: 310 data = self.obj.dataset.GetPointData() 311 if isinstance(key, int): 312 key = data.GetArrayName(key) 313 data.SetTCoords(data.GetArray(key)) 314 else: 315 vedo.logger.warning("texture coordinates are only available for point data") 316 return self.obj
Select one specific array to be used as texture coordinates.
318 def select_normals(self, key: Union[int,str]) -> Any: 319 """Select one specific normal array by its name to make it the "active" one.""" 320 if self.association == 0: 321 data = self.obj.dataset.GetPointData() 322 self.obj.mapper.SetScalarModeToUsePointData() 323 else: 324 data = self.obj.dataset.GetCellData() 325 self.obj.mapper.SetScalarModeToUseCellData() 326 327 if isinstance(key, int): 328 key = data.GetArrayName(key) 329 data.SetActiveNormals(key) 330 return self.obj
Select one specific normal array by its name to make it the "active" one.
385class CommonAlgorithms: 386 """Common algorithms.""" 387 388 @property 389 def pointdata(cls): 390 """ 391 Create and/or return a `numpy.array` associated to points (vertices). 392 A data array can be indexed either as a string or by an integer number. 393 E.g.: `myobj.pointdata["arrayname"]` 394 395 Usage: 396 397 `myobj.pointdata.keys()` to return the available data array names 398 399 `myobj.pointdata.select(name)` to make this array the active one 400 401 `myobj.pointdata.remove(name)` to remove this array 402 """ 403 return DataArrayHelper(cls, 0) 404 405 @property 406 def celldata(cls): 407 """ 408 Create and/or return a `numpy.array` associated to cells (faces). 409 A data array can be indexed either as a string or by an integer number. 410 E.g.: `myobj.celldata["arrayname"]` 411 412 Usage: 413 414 `myobj.celldata.keys()` to return the available data array names 415 416 `myobj.celldata.select(name)` to make this array the active one 417 418 `myobj.celldata.remove(name)` to remove this array 419 """ 420 return DataArrayHelper(cls, 1) 421 422 @property 423 def metadata(cls): 424 """ 425 Create and/or return a `numpy.array` associated to neither cells nor faces. 426 A data array can be indexed either as a string or by an integer number. 427 E.g.: `myobj.metadata["arrayname"]` 428 429 Usage: 430 431 `myobj.metadata.keys()` to return the available data array names 432 433 `myobj.metadata.select(name)` to make this array the active one 434 435 `myobj.metadata.remove(name)` to remove this array 436 """ 437 return DataArrayHelper(cls, 2) 438 439 def rename(cls, newname: str) -> Self: 440 """Rename the object""" 441 try: 442 cls.name = newname 443 except AttributeError: 444 vedo.logger.error(f"Cannot rename object {cls}") 445 return cls 446 447 def memory_address(cls) -> int: 448 """ 449 Return a unique memory address integer which may serve as the ID of the 450 object, or passed to c++ code. 451 """ 452 # https://www.linkedin.com/pulse/speedup-your-code-accessing-python-vtk-objects-from-c-pletzer/ 453 # https://github.com/tfmoraes/polydata_connectivity 454 return int(cls.dataset.GetAddressAsString("")[5:], 16) 455 456 def memory_size(cls) -> int: 457 """Return the size in bytes of the object in memory.""" 458 return cls.dataset.GetActualMemorySize() 459 460 def modified(cls) -> Self: 461 """Use in conjunction with `tonumpy()` to update any modifications to the image array.""" 462 cls.dataset.GetPointData().Modified() 463 scals = cls.dataset.GetPointData().GetScalars() 464 if scals: 465 scals.Modified() 466 return cls 467 468 def box(cls, scale=1, padding=0) -> "vedo.Mesh": 469 """ 470 Return the bounding box as a new `Mesh` object. 471 472 Arguments: 473 scale : (float) 474 box size can be scaled by a factor 475 padding : (float, list) 476 a constant padding can be added (can be a list `[padx,pady,padz]`) 477 """ 478 b = cls.bounds() 479 if not utils.is_sequence(padding): 480 padding = [padding, padding, padding] 481 length, width, height = b[1] - b[0], b[3] - b[2], b[5] - b[4] 482 tol = (length + width + height) / 30000 # useful for boxing text 483 pos = [(b[0] + b[1]) / 2, (b[3] + b[2]) / 2, (b[5] + b[4]) / 2 - tol] 484 bx = vedo.shapes.Box( 485 pos, 486 length * scale + padding[0], 487 width * scale + padding[1], 488 height * scale + padding[2], 489 c="gray", 490 ) 491 try: 492 pr = vtki.vtkProperty() 493 pr.DeepCopy(cls.properties) 494 bx.actor.SetProperty(pr) 495 bx.properties = pr 496 except (AttributeError, TypeError): 497 pass 498 bx.flat().lighting("off").wireframe(True) 499 return bx 500 501 def update_dataset(cls, dataset, **kwargs) -> Self: 502 """Update the dataset of the object with the provided VTK dataset.""" 503 cls._update(dataset, **kwargs) 504 return cls 505 506 def bounds(cls) -> np.ndarray: 507 """ 508 Get the object bounds. 509 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 510 """ 511 try: # this is very slow for large meshes 512 pts = cls.vertices 513 xmin, ymin, zmin = np.nanmin(pts, axis=0) 514 xmax, ymax, zmax = np.nanmax(pts, axis=0) 515 return np.array([xmin, xmax, ymin, ymax, zmin, zmax]) 516 except (AttributeError, ValueError): 517 return np.array(cls.dataset.GetBounds()) 518 519 def xbounds(cls, i=None) -> np.ndarray: 520 """Get the bounds `[xmin,xmax]`. Can specify upper or lower with i (0,1).""" 521 b = cls.bounds() 522 if i is not None: 523 return b[i] 524 return np.array([b[0], b[1]]) 525 526 def ybounds(cls, i=None) -> np.ndarray: 527 """Get the bounds `[ymin,ymax]`. Can specify upper or lower with i (0,1).""" 528 b = cls.bounds() 529 if i == 0: 530 return b[2] 531 if i == 1: 532 return b[3] 533 return np.array([b[2], b[3]]) 534 535 def zbounds(cls, i=None) -> np.ndarray: 536 """Get the bounds `[zmin,zmax]`. Can specify upper or lower with i (0,1).""" 537 b = cls.bounds() 538 if i == 0: 539 return b[4] 540 if i == 1: 541 return b[5] 542 return np.array([b[4], b[5]]) 543 544 def diagonal_size(cls) -> float: 545 """Get the length of the diagonal of the bounding box.""" 546 b = cls.bounds() 547 return np.sqrt((b[1] - b[0])**2 + (b[3] - b[2])**2 + (b[5] - b[4])**2) 548 549 def average_size(cls) -> float: 550 """ 551 Calculate and return the average size of the object. 552 This is the mean of the vertex distances from the center of mass. 553 """ 554 coords = cls.vertices 555 cm = np.mean(coords, axis=0) 556 if coords.shape[0] == 0: 557 return 0.0 558 cc = coords - cm 559 return np.mean(np.linalg.norm(cc, axis=1)) 560 561 def center_of_mass(cls) -> np.ndarray: 562 """Get the center of mass of the object.""" 563 if isinstance(cls, (vedo.RectilinearGrid, vedo.Volume)): 564 return np.array(cls.dataset.GetCenter()) 565 cmf = vtki.new("CenterOfMass") 566 cmf.SetInputData(cls.dataset) 567 cmf.Update() 568 c = cmf.GetCenter() 569 return np.array(c) 570 571 def copy_data_from(cls, obj: Any) -> Self: 572 """Copy all data (point and cell data) from this input object""" 573 cls.dataset.GetPointData().PassData(obj.dataset.GetPointData()) 574 cls.dataset.GetCellData().PassData(obj.dataset.GetCellData()) 575 cls.pipeline = utils.OperationNode( 576 "copy_data_from", 577 parents=[cls, obj], 578 comment=f"{obj.__class__.__name__}", 579 shape="note", 580 c="#ccc5b9", 581 ) 582 return cls 583 584 def inputdata(cls): 585 """Obsolete, use `.dataset` instead.""" 586 colors.printc("WARNING: 'inputdata()' is obsolete, use '.dataset' instead.", c="y") 587 return cls.dataset 588 589 @property 590 def npoints(cls): 591 """Retrieve the number of points (or vertices).""" 592 return cls.dataset.GetNumberOfPoints() 593 594 @property 595 def nvertices(cls): 596 """Retrieve the number of vertices (or points).""" 597 return cls.dataset.GetNumberOfPoints() 598 599 @property 600 def ncells(cls): 601 """Retrieve the number of cells.""" 602 return cls.dataset.GetNumberOfCells() 603 604 def cell_centers(cls, copy_arrays=False) -> "vedo.Points": 605 """ 606 Get the coordinates of the cell centers as a `Points` object. 607 608 Examples: 609 - [delaunay2d.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/delaunay2d.py) 610 """ 611 vcen = vtki.new("CellCenters") 612 vcen.SetCopyArrays(copy_arrays) 613 vcen.SetVertexCells(copy_arrays) 614 vcen.SetInputData(cls.dataset) 615 vcen.Update() 616 vpts = vedo.Points(vcen.GetOutput()) 617 if copy_arrays: 618 vpts.copy_properties_from(cls) 619 return vpts 620 621 @property 622 def lines(cls): 623 """ 624 Get lines connectivity ids as a python array 625 formatted as `[[id0,id1], [id3,id4], ...]` 626 627 See also: `lines_as_flat_array()`. 628 """ 629 # Get cell connettivity ids as a 1D array. The vtk format is: 630 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 631 try: 632 arr1d = utils.vtk2numpy(cls.dataset.GetLines().GetData()) 633 except AttributeError: 634 return np.array([]) 635 i = 0 636 conn = [] 637 n = len(arr1d) 638 for _ in range(n): 639 cell = [arr1d[i + k + 1] for k in range(arr1d[i])] 640 conn.append(cell) 641 i += arr1d[i] + 1 642 if i >= n: 643 break 644 645 return conn # cannot always make a numpy array of it! 646 647 @property 648 def lines_as_flat_array(cls): 649 """ 650 Get lines connectivity ids as a 1D numpy array. 651 Format is e.g. [2, 10,20, 3, 10,11,12, 2, 70,80, ...] 652 653 See also: `lines()`. 654 """ 655 try: 656 return utils.vtk2numpy(cls.dataset.GetLines().GetData()) 657 except AttributeError: 658 return np.array([]) 659 660 def mark_boundaries(cls) -> Self: 661 """ 662 Mark cells and vertices if they lie on a boundary. 663 A new array called `BoundaryCells` is added to the object. 664 """ 665 mb = vtki.new("MarkBoundaryFilter") 666 mb.SetInputData(cls.dataset) 667 mb.Update() 668 cls.dataset.DeepCopy(mb.GetOutput()) 669 cls.pipeline = utils.OperationNode("mark_boundaries", parents=[cls]) 670 return cls 671 672 def find_cells_in_bounds(cls, xbounds=(), ybounds=(), zbounds=()) -> np.ndarray: 673 """ 674 Find cells that are within the specified bounds. 675 """ 676 try: 677 xbounds = list(xbounds.bounds()) 678 except AttributeError: 679 pass 680 681 if len(xbounds) == 6: 682 bnds = xbounds 683 else: 684 bnds = list(cls.bounds()) 685 if len(xbounds) == 2: 686 bnds[0] = xbounds[0] 687 bnds[1] = xbounds[1] 688 if len(ybounds) == 2: 689 bnds[2] = ybounds[0] 690 bnds[3] = ybounds[1] 691 if len(zbounds) == 2: 692 bnds[4] = zbounds[0] 693 bnds[5] = zbounds[1] 694 695 cell_ids = vtki.vtkIdList() 696 if not cls.cell_locator: 697 cls.cell_locator = vtki.new("CellTreeLocator") 698 cls.cell_locator.SetDataSet(cls.dataset) 699 cls.cell_locator.BuildLocator() 700 cls.cell_locator.FindCellsWithinBounds(bnds, cell_ids) 701 cids = [] 702 for i in range(cell_ids.GetNumberOfIds()): 703 cid = cell_ids.GetId(i) 704 cids.append(cid) 705 return np.array(cids) 706 707 def find_cells_along_line(cls, p0, p1, tol=0.001) -> np.ndarray: 708 """ 709 Find cells that are intersected by a line segment. 710 """ 711 cell_ids = vtki.vtkIdList() 712 if not cls.cell_locator: 713 cls.cell_locator = vtki.new("CellTreeLocator") 714 cls.cell_locator.SetDataSet(cls.dataset) 715 cls.cell_locator.BuildLocator() 716 cls.cell_locator.FindCellsAlongLine(p0, p1, tol, cell_ids) 717 cids = [] 718 for i in range(cell_ids.GetNumberOfIds()): 719 cid = cell_ids.GetId(i) 720 cids.append(cid) 721 return np.array(cids) 722 723 def find_cells_along_plane(cls, origin, normal, tol=0.001) -> np.ndarray: 724 """ 725 Find cells that are intersected by a plane. 726 """ 727 cell_ids = vtki.vtkIdList() 728 if not cls.cell_locator: 729 cls.cell_locator = vtki.new("CellTreeLocator") 730 cls.cell_locator.SetDataSet(cls.dataset) 731 cls.cell_locator.BuildLocator() 732 cls.cell_locator.FindCellsAlongPlane(origin, normal, tol, cell_ids) 733 cids = [] 734 for i in range(cell_ids.GetNumberOfIds()): 735 cid = cell_ids.GetId(i) 736 cids.append(cid) 737 return np.array(cids) 738 739 def keep_cell_types(cls, types=()): 740 """ 741 Extract cells of a specific type. 742 743 Check the VTK cell types here: 744 https://vtk.org/doc/nightly/html/vtkCellType_8h.html 745 """ 746 fe = vtki.new("ExtractCellsByType") 747 fe.SetInputData(cls.dataset) 748 for t in types: 749 try: 750 if utils.is_integer(t): 751 it = t 752 else: 753 it = vtki.cell_types[t.upper()] 754 except KeyError: 755 vedo.logger.error(f"Cell type '{t}' not recognized") 756 continue 757 fe.AddCellType(it) 758 fe.Update() 759 cls._update(fe.GetOutput()) 760 return cls 761 762 def map_cells_to_points(cls, arrays=(), move=False) -> Self: 763 """ 764 Interpolate cell data (i.e., data specified per cell or face) 765 into point data (i.e., data specified at each vertex). 766 The method of transformation is based on averaging the data values 767 of all cells using a particular point. 768 769 A custom list of arrays to be mapped can be passed in input. 770 771 Set `move=True` to delete the original `celldata` array. 772 """ 773 c2p = vtki.new("CellDataToPointData") 774 c2p.SetInputData(cls.dataset) 775 if not move: 776 c2p.PassCellDataOn() 777 if arrays: 778 c2p.ClearCellDataArrays() 779 c2p.ProcessAllArraysOff() 780 for arr in arrays: 781 c2p.AddCellDataArray(arr) 782 else: 783 c2p.ProcessAllArraysOn() 784 c2p.Update() 785 cls._update(c2p.GetOutput(), reset_locators=False) 786 cls.mapper.SetScalarModeToUsePointData() 787 cls.pipeline = utils.OperationNode("map_cells_to_points", parents=[cls]) 788 return cls 789 790 @property 791 def vertices(cls): 792 """Return the vertices (points) coordinates.""" 793 try: 794 # for polydata and unstructured grid 795 varr = cls.dataset.GetPoints().GetData() 796 except (AttributeError, TypeError): 797 try: 798 # for RectilinearGrid, StructuredGrid 799 vpts = vtki.vtkPoints() 800 cls.dataset.GetPoints(vpts) 801 varr = vpts.GetData() 802 except (AttributeError, TypeError): 803 try: 804 # for ImageData 805 v2p = vtki.new("ImageToPoints") 806 v2p.SetInputData(cls.dataset) 807 v2p.Update() 808 varr = v2p.GetOutput().GetPoints().GetData() 809 except AttributeError: 810 return np.array([]) 811 812 return utils.vtk2numpy(varr) 813 814 # setter 815 @vertices.setter 816 def vertices(cls, pts): 817 """Set vertices (points) coordinates.""" 818 pts = utils.make3d(pts) 819 arr = utils.numpy2vtk(pts, dtype=np.float32) 820 try: 821 vpts = cls.dataset.GetPoints() 822 vpts.SetData(arr) 823 vpts.Modified() 824 except (AttributeError, TypeError): 825 vedo.logger.error(f"Cannot set vertices for {type(cls)}") 826 return 827 # reset mesh to identity matrix position/rotation: 828 cls.point_locator = None 829 cls.cell_locator = None 830 cls.line_locator = None 831 cls.transform = LinearTransform() 832 833 @property 834 def points(cls): 835 """Return the vertices (points) coordinates. Same as `vertices`.""" 836 return cls.vertices 837 838 @points.setter 839 def points(cls, pts): 840 """Set vertices (points) coordinates. Same as `vertices`.""" 841 cls.vertices = pts 842 843 @property 844 def coordinates(cls): 845 """Return the vertices (points) coordinates. Same as `vertices`.""" 846 return cls.vertices 847 848 @coordinates.setter 849 def coordinates(cls, pts): 850 """Set vertices (points) coordinates. Same as `vertices`.""" 851 cls.vertices = pts 852 853 @property 854 def cells_as_flat_array(cls): 855 """ 856 Get cell connectivity ids as a 1D numpy array. 857 Format is e.g. [3, 10,20,30 4, 10,11,12,13 ...] 858 """ 859 try: 860 # valid for unstructured grid 861 arr1d = utils.vtk2numpy(cls.dataset.GetCells().GetData()) 862 except AttributeError: 863 # valid for polydata 864 arr1d = utils.vtk2numpy(cls.dataset.GetPolys().GetData()) 865 return arr1d 866 867 @property 868 def cells(cls): 869 """ 870 Get the cells connectivity ids as a numpy array. 871 872 The output format is: `[[id0 ... idn], [id0 ... idm], etc]`. 873 """ 874 try: 875 # valid for unstructured grid 876 arr1d = utils.vtk2numpy(cls.dataset.GetCells().GetData()) 877 except AttributeError: 878 try: 879 # valid for polydata 880 arr1d = utils.vtk2numpy(cls.dataset.GetPolys().GetData()) 881 except AttributeError: 882 vedo.logger.warning(f"Cannot get cells for {type(cls)}") 883 return np.array([]) 884 885 # Get cell connettivity ids as a 1D array. vtk format is: 886 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 887 i = 0 888 conn = [] 889 n = len(arr1d) 890 if n: 891 while True: 892 cell = [arr1d[i + k] for k in range(1, arr1d[i] + 1)] 893 conn.append(cell) 894 i += arr1d[i] + 1 895 if i >= n: 896 break 897 return conn 898 899 def cell_edge_neighbors(cls): 900 """ 901 Get the cell neighbor indices of each cell. 902 903 Returns a python list of lists. 904 """ 905 906 def face_to_edges(face): 907 edges = [] 908 size = len(face) 909 for i in range(1, size + 1): 910 if i == size: 911 edges.append([face[i - 1], face[0]]) 912 else: 913 edges.append([face[i - 1], face[i]]) 914 return edges 915 916 pd = cls.dataset 917 pd.BuildLinks() 918 919 neicells = [] 920 for i, cell in enumerate(cls.cells): 921 nn = [] 922 for edge in face_to_edges(cell): 923 neighbors = vtki.vtkIdList() 924 pd.GetCellEdgeNeighbors(i, edge[0], edge[1], neighbors) 925 if neighbors.GetNumberOfIds() > 0: 926 neighbor = neighbors.GetId(0) 927 nn.append(neighbor) 928 neicells.append(nn) 929 930 return neicells 931 932 933 def map_points_to_cells(cls, arrays=(), move=False) -> Self: 934 """ 935 Interpolate point data (i.e., data specified per point or vertex) 936 into cell data (i.e., data specified per cell). 937 The method of transformation is based on averaging the data values 938 of all points defining a particular cell. 939 940 A custom list of arrays to be mapped can be passed in input. 941 942 Set `move=True` to delete the original `pointdata` array. 943 944 Examples: 945 - [mesh_map2cell.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/mesh_map2cell.py) 946 """ 947 p2c = vtki.new("PointDataToCellData") 948 p2c.SetInputData(cls.dataset) 949 if not move: 950 p2c.PassPointDataOn() 951 if arrays: 952 p2c.ClearPointDataArrays() 953 p2c.ProcessAllArraysOff() 954 for arr in arrays: 955 p2c.AddPointDataArray(arr) 956 else: 957 p2c.ProcessAllArraysOn() 958 p2c.Update() 959 cls._update(p2c.GetOutput(), reset_locators=False) 960 cls.mapper.SetScalarModeToUseCellData() 961 cls.pipeline = utils.OperationNode("map_points_to_cells", parents=[cls]) 962 return cls 963 964 def resample_data_from(cls, source, tol=None, categorical=False) -> Self: 965 """ 966 Resample point and cell data from another dataset. 967 The output has the same structure but its point data have 968 the resampled values from target. 969 970 Use `tol` to set the tolerance used to compute whether 971 a point in the source is in a cell of the current object. 972 Points without resampled values, and their cells, are marked as blank. 973 If the data is categorical, then the resulting data will be determined 974 by a nearest neighbor interpolation scheme. 975 976 Example: 977 ```python 978 from vedo import * 979 m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1) 980 pts = m1.coordinates 981 ces = m1.cell_centers().coordinates 982 m1.pointdata["xvalues"] = np.power(pts[:,0], 3) 983 m1.celldata["yvalues"] = np.power(ces[:,1], 3) 984 m2 = Mesh(dataurl+'bunny.obj') 985 m2.resample_data_from(m1) 986 # print(m2.pointdata["xvalues"]) 987 show(m1, m2 , N=2, axes=1) 988 ``` 989 """ 990 rs = vtki.new("ResampleWithDataSet") 991 rs.SetInputData(cls.dataset) 992 rs.SetSourceData(source.dataset) 993 994 rs.SetPassPointArrays(True) 995 rs.SetPassCellArrays(True) 996 rs.SetPassFieldArrays(True) 997 rs.SetCategoricalData(categorical) 998 999 rs.SetComputeTolerance(True) 1000 if tol: 1001 rs.SetComputeTolerance(False) 1002 rs.SetTolerance(tol) 1003 rs.Update() 1004 cls._update(rs.GetOutput(), reset_locators=False) 1005 cls.pipeline = utils.OperationNode( 1006 "resample_data_from", 1007 comment=f"{source.__class__.__name__}", 1008 parents=[cls, source], 1009 ) 1010 return cls 1011 1012 def interpolate_data_from( 1013 cls, 1014 source, 1015 radius=None, 1016 n=None, 1017 kernel="shepard", 1018 exclude=("Normals",), 1019 on="points", 1020 null_strategy=1, 1021 null_value=0, 1022 ) -> Self: 1023 """ 1024 Interpolate over source to port its data onto the current object using various kernels. 1025 1026 If n (number of closest points to use) is set then radius value is ignored. 1027 1028 Check out also: 1029 `probe()` which in many cases can be faster. 1030 1031 Arguments: 1032 kernel : (str) 1033 available kernels are [shepard, gaussian, linear] 1034 null_strategy : (int) 1035 specify a strategy to use when encountering a "null" point 1036 during the interpolation process. Null points occur when the local neighborhood 1037 (of nearby points to interpolate from) is empty. 1038 1039 - Case 0: an output array is created that marks points 1040 as being valid (=1) or null (invalid =0), and the null_value is set as well 1041 - Case 1: the output data value(s) are set to the provided null_value 1042 - Case 2: simply use the closest point to perform the interpolation. 1043 null_value : (float) 1044 see above. 1045 1046 Examples: 1047 - [interpolate_scalar1.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar1.py) 1048 - [interpolate_scalar3.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar3.py) 1049 - [interpolate_scalar4.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar4.py) 1050 - [image_probe.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/image_probe.py) 1051 1052  1053 """ 1054 if radius is None and not n: 1055 vedo.logger.error("in interpolate_data_from(): please set either radius or n") 1056 raise RuntimeError 1057 1058 if on == "points": 1059 points = source.dataset 1060 elif on == "cells": 1061 c2p = vtki.new("CellDataToPointData") 1062 c2p.SetInputData(source.dataset) 1063 c2p.Update() 1064 points = c2p.GetOutput() 1065 else: 1066 vedo.logger.error("in interpolate_data_from(), on must be on points or cells") 1067 raise RuntimeError() 1068 1069 locator = vtki.new("PointLocator") 1070 locator.SetDataSet(points) 1071 locator.BuildLocator() 1072 1073 if kernel.lower() == "shepard": 1074 kern = vtki.new("ShepardKernel") 1075 kern.SetPowerParameter(2) 1076 elif kernel.lower() == "gaussian": 1077 kern = vtki.new("GaussianKernel") 1078 kern.SetSharpness(2) 1079 elif kernel.lower() == "linear": 1080 kern = vtki.new("LinearKernel") 1081 else: 1082 vedo.logger.error("available kernels are: [shepard, gaussian, linear]") 1083 raise RuntimeError() 1084 1085 if n: 1086 kern.SetNumberOfPoints(n) 1087 kern.SetKernelFootprintToNClosest() 1088 else: 1089 kern.SetRadius(radius) 1090 kern.SetKernelFootprintToRadius() 1091 1092 # remove arrays that are already present in cls dataset 1093 # this is because the interpolator will ignore them otherwise 1094 clsnames = [] 1095 for i in range(cls.dataset.GetPointData().GetNumberOfArrays()): 1096 name = cls.dataset.GetPointData().GetArrayName(i) 1097 clsnames.append(name) 1098 1099 pointsnames = [] 1100 for i in range(points.GetPointData().GetNumberOfArrays()): 1101 name = points.GetPointData().GetArrayName(i) 1102 pointsnames.append(name) 1103 1104 for cname in clsnames: 1105 if cname in set(pointsnames) - set(exclude): 1106 cls.dataset.GetPointData().RemoveArray(cname) 1107 # print(f"Removed {cname} from cls dataset") 1108 1109 interpolator = vtki.new("PointInterpolator") 1110 interpolator.SetInputData(cls.dataset) 1111 interpolator.SetSourceData(points) 1112 interpolator.SetKernel(kern) 1113 interpolator.SetLocator(locator) 1114 interpolator.PassFieldArraysOn() 1115 interpolator.SetNullPointsStrategy(null_strategy) 1116 interpolator.SetNullValue(null_value) 1117 interpolator.SetValidPointsMaskArrayName("ValidPointMask") 1118 for ex in exclude: 1119 interpolator.AddExcludedArray(ex) 1120 1121 # remove arrays that are already present in the source 1122 # this is because the interpolator will ignore them otherwise 1123 for i in range(cls.dataset.GetPointData().GetNumberOfArrays()): 1124 name = cls.dataset.GetPointData().GetArrayName(i) 1125 if name not in exclude: 1126 cls.dataset.GetPointData().RemoveArray(name) 1127 1128 interpolator.Update() 1129 1130 if on == "cells": 1131 p2c = vtki.new("PointDataToCellData") 1132 p2c.SetInputData(interpolator.GetOutput()) 1133 p2c.Update() 1134 cpoly = p2c.GetOutput() 1135 else: 1136 cpoly = interpolator.GetOutput() 1137 1138 cls._update(cpoly, reset_locators=False) 1139 1140 cls.pipeline = utils.OperationNode("interpolate_data_from", parents=[cls, source]) 1141 return cls 1142 1143 def add_ids(cls) -> Self: 1144 """ 1145 Generate point and cell ids arrays. 1146 1147 Two new arrays are added to the mesh: `PointID` and `CellID`. 1148 """ 1149 ids = vtki.new("IdFilter") 1150 ids.SetInputData(cls.dataset) 1151 ids.PointIdsOn() 1152 ids.CellIdsOn() 1153 ids.FieldDataOff() 1154 ids.SetPointIdsArrayName("PointID") 1155 ids.SetCellIdsArrayName("CellID") 1156 ids.Update() 1157 cls._update(ids.GetOutput(), reset_locators=False) 1158 cls.pipeline = utils.OperationNode("add_ids", parents=[cls]) 1159 return cls 1160 1161 def gradient(cls, input_array=None, on="points", fast=False) -> np.ndarray: 1162 """ 1163 Compute and return the gradiend of the active scalar field as a numpy array. 1164 1165 Arguments: 1166 input_array : (str) 1167 array of the scalars to compute the gradient, 1168 if None the current active array is selected 1169 on : (str) 1170 compute either on 'points' or 'cells' data 1171 fast : (bool) 1172 if True, will use a less accurate algorithm 1173 that performs fewer derivative calculations (and is therefore faster). 1174 1175 Examples: 1176 - [isolines.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/isolines.py) 1177 1178  1179 """ 1180 gra = vtki.new("GradientFilter") 1181 if on.startswith("p"): 1182 varr = cls.dataset.GetPointData() 1183 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1184 elif on.startswith("c"): 1185 varr = cls.dataset.GetCellData() 1186 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1187 else: 1188 vedo.logger.error(f"in gradient: unknown option {on}") 1189 raise RuntimeError 1190 1191 if input_array is None: 1192 if varr.GetScalars(): 1193 input_array = varr.GetScalars().GetName() 1194 else: 1195 vedo.logger.error(f"in gradient: no scalars found for {on}") 1196 raise RuntimeError 1197 1198 gra.SetInputData(cls.dataset) 1199 gra.SetInputScalars(tp, input_array) 1200 gra.SetResultArrayName("Gradient") 1201 gra.SetFasterApproximation(fast) 1202 gra.ComputeDivergenceOff() 1203 gra.ComputeVorticityOff() 1204 gra.ComputeGradientOn() 1205 gra.Update() 1206 # cls._update(gra.GetOutput(), reset_locators=False) 1207 if on.startswith("p"): 1208 gvecs = utils.vtk2numpy(gra.GetOutput().GetPointData().GetArray("Gradient")) 1209 else: 1210 gvecs = utils.vtk2numpy(gra.GetOutput().GetCellData().GetArray("Gradient")) 1211 return gvecs 1212 1213 def divergence(cls, array_name=None, on="points", fast=False) -> np.ndarray: 1214 """ 1215 Compute and return the divergence of a vector field as a numpy array. 1216 1217 Arguments: 1218 array_name : (str) 1219 name of the array of vectors to compute the divergence, 1220 if None the current active array is selected 1221 on : (str) 1222 compute either on 'points' or 'cells' data 1223 fast : (bool) 1224 if True, will use a less accurate algorithm 1225 that performs fewer derivative calculations (and is therefore faster). 1226 """ 1227 div = vtki.new("GradientFilter") 1228 if on.startswith("p"): 1229 varr = cls.dataset.GetPointData() 1230 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1231 elif on.startswith("c"): 1232 varr = cls.dataset.GetCellData() 1233 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1234 else: 1235 vedo.logger.error(f"in divergence(): unknown option {on}") 1236 raise RuntimeError 1237 1238 if array_name is None: 1239 if varr.GetVectors(): 1240 array_name = varr.GetVectors().GetName() 1241 else: 1242 vedo.logger.error(f"in divergence(): no vectors found for {on}") 1243 raise RuntimeError 1244 1245 div.SetInputData(cls.dataset) 1246 div.SetInputScalars(tp, array_name) 1247 div.ComputeDivergenceOn() 1248 div.ComputeGradientOff() 1249 div.ComputeVorticityOff() 1250 div.SetDivergenceArrayName("Divergence") 1251 div.SetFasterApproximation(fast) 1252 div.Update() 1253 # cls._update(div.GetOutput(), reset_locators=False) 1254 if on.startswith("p"): 1255 dvecs = utils.vtk2numpy(div.GetOutput().GetPointData().GetArray("Divergence")) 1256 else: 1257 dvecs = utils.vtk2numpy(div.GetOutput().GetCellData().GetArray("Divergence")) 1258 return dvecs 1259 1260 def vorticity(cls, array_name=None, on="points", fast=False) -> np.ndarray: 1261 """ 1262 Compute and return the vorticity of a vector field as a numpy array. 1263 1264 Arguments: 1265 array_name : (str) 1266 name of the array to compute the vorticity, 1267 if None the current active array is selected 1268 on : (str) 1269 compute either on 'points' or 'cells' data 1270 fast : (bool) 1271 if True, will use a less accurate algorithm 1272 that performs fewer derivative calculations (and is therefore faster). 1273 """ 1274 vort = vtki.new("GradientFilter") 1275 if on.startswith("p"): 1276 varr = cls.dataset.GetPointData() 1277 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1278 elif on.startswith("c"): 1279 varr = cls.dataset.GetCellData() 1280 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1281 else: 1282 vedo.logger.error(f"in vorticity(): unknown option {on}") 1283 raise RuntimeError 1284 1285 if array_name is None: 1286 if varr.GetVectors(): 1287 array_name = varr.GetVectors().GetName() 1288 else: 1289 vedo.logger.error(f"in vorticity(): no vectors found for {on}") 1290 raise RuntimeError 1291 1292 vort.SetInputData(cls.dataset) 1293 vort.SetInputScalars(tp, array_name) 1294 vort.ComputeDivergenceOff() 1295 vort.ComputeGradientOff() 1296 vort.ComputeVorticityOn() 1297 vort.SetVorticityArrayName("Vorticity") 1298 vort.SetFasterApproximation(fast) 1299 vort.Update() 1300 if on.startswith("p"): 1301 vvecs = utils.vtk2numpy(vort.GetOutput().GetPointData().GetArray("Vorticity")) 1302 else: 1303 vvecs = utils.vtk2numpy(vort.GetOutput().GetCellData().GetArray("Vorticity")) 1304 return vvecs 1305 1306 def probe( 1307 cls, 1308 source, 1309 categorical=False, 1310 snap=False, 1311 tol=0, 1312 ) -> Self: 1313 """ 1314 Takes a data set and probes its scalars at the specified points in space. 1315 1316 Note that a mask is also output with valid/invalid points which can be accessed 1317 with `mesh.pointdata['ValidPointMask']`. 1318 1319 Arguments: 1320 source : any dataset 1321 the data set to probe. 1322 categorical : bool 1323 control whether the source pointdata is to be treated as categorical. 1324 snap : bool 1325 snap to the cell with the closest point if no cell was found 1326 tol : float 1327 the tolerance to use when performing the probe. 1328 1329 Check out also: 1330 `interpolate_data_from()` and `tovolume()` 1331 1332 Examples: 1333 - [probe_points.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/probe_points.py) 1334 1335  1336 """ 1337 probe_filter = vtki.new("ProbeFilter") 1338 probe_filter.SetSourceData(source.dataset) 1339 probe_filter.SetInputData(cls.dataset) 1340 probe_filter.PassCellArraysOn() 1341 probe_filter.PassFieldArraysOn() 1342 probe_filter.PassPointArraysOn() 1343 probe_filter.SetCategoricalData(categorical) 1344 probe_filter.ComputeToleranceOff() 1345 if tol: 1346 probe_filter.ComputeToleranceOn() 1347 probe_filter.SetTolerance(tol) 1348 probe_filter.SetSnapToCellWithClosestPoint(snap) 1349 probe_filter.Update() 1350 cls._update(probe_filter.GetOutput(), reset_locators=False) 1351 cls.pipeline = utils.OperationNode("probe", parents=[cls, source]) 1352 cls.pointdata.rename("vtkValidPointMask", "ValidPointMask") 1353 return cls 1354 1355 def compute_cell_size(cls) -> Self: 1356 """ 1357 Add to this object a cell data array 1358 containing the area, volume and edge length 1359 of the cells (when applicable to the object type). 1360 1361 Array names are: `Area`, `Volume`, `Length`. 1362 """ 1363 csf = vtki.new("CellSizeFilter") 1364 csf.SetInputData(cls.dataset) 1365 csf.SetComputeArea(1) 1366 csf.SetComputeVolume(1) 1367 csf.SetComputeLength(1) 1368 csf.SetComputeVertexCount(0) 1369 csf.SetAreaArrayName("Area") 1370 csf.SetVolumeArrayName("Volume") 1371 csf.SetLengthArrayName("Length") 1372 csf.Update() 1373 cls._update(csf.GetOutput(), reset_locators=False) 1374 return cls 1375 1376 def generate_random_data(cls) -> Self: 1377 """Fill a dataset with random attributes""" 1378 gen = vtki.new("RandomAttributeGenerator") 1379 gen.SetInputData(cls.dataset) 1380 gen.GenerateAllDataOn() 1381 gen.SetDataTypeToFloat() 1382 gen.GeneratePointNormalsOff() 1383 gen.GeneratePointTensorsOn() 1384 gen.GenerateCellScalarsOn() 1385 gen.Update() 1386 cls._update(gen.GetOutput(), reset_locators=False) 1387 cls.pipeline = utils.OperationNode("generate_random_data", parents=[cls]) 1388 return cls 1389 1390 def integrate_data(cls) -> dict: 1391 """ 1392 Integrate point and cell data arrays while computing length, 1393 area or volume of the domain. It works for 1D, 2D or 3D cells. 1394 1395 For volumetric datasets, this filter ignores all but 3D cells. 1396 It will not compute the volume contained in a closed surface. 1397 1398 Returns a dictionary with keys: `pointdata`, `celldata`, `metadata`, 1399 which contain the integration result for the corresponding attributes. 1400 1401 Examples: 1402 ```python 1403 from vedo import * 1404 surf = Sphere(res=100) 1405 surf.pointdata['scalars'] = np.ones(surf.npoints) 1406 data = surf.integrate_data() 1407 print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi) 1408 ``` 1409 1410 ```python 1411 from vedo import * 1412 1413 xcoords1 = np.arange(0, 2.2, 0.2) 1414 xcoords2 = sqrt(np.arange(0, 4.2, 0.2)) 1415 1416 ycoords = np.arange(0, 1.2, 0.2) 1417 1418 surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2) 1419 surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2) 1420 1421 surf1.pointdata['scalars'] = surf1.vertices[:,2] 1422 surf2.pointdata['scalars'] = surf2.vertices[:,2] 1423 1424 data1 = surf1.integrate_data() 1425 data2 = surf2.integrate_data() 1426 1427 print(data1['pointdata']['scalars'], 1428 "is equal to", 1429 data2['pointdata']['scalars'], 1430 "even if the grids are different!", 1431 "(= the volume under the surface)" 1432 ) 1433 show(surf1, surf2, N=2, axes=1).close() 1434 ``` 1435 """ 1436 vinteg = vtki.new("IntegrateAttributes") 1437 vinteg.SetInputData(cls.dataset) 1438 vinteg.Update() 1439 ugrid = vedo.UnstructuredGrid(vinteg.GetOutput()) 1440 data = dict( 1441 pointdata=ugrid.pointdata.todict(), 1442 celldata=ugrid.celldata.todict(), 1443 metadata=ugrid.metadata.todict(), 1444 ) 1445 return data 1446 1447 def write(cls, filename, binary=True) -> None: 1448 """Write object to file.""" 1449 out = vedo.file_io.write(cls, filename, binary) 1450 out.pipeline = utils.OperationNode( 1451 "write", parents=[cls], comment=filename[:15], shape="folder", c="#8a817c" 1452 ) 1453 1454 def tomesh(cls, bounds=(), shrink=0) -> "vedo.Mesh": 1455 """ 1456 Extract boundary geometry from dataset (or convert data to polygonal type). 1457 1458 Two new arrays are added to the mesh: `OriginalCellIds` and `OriginalPointIds` 1459 to keep track of the original mesh elements. 1460 1461 Arguments: 1462 bounds : (list) 1463 specify a sub-region to extract 1464 shrink : (float) 1465 shrink the cells to a fraction of their original size 1466 """ 1467 geo = vtki.new("GeometryFilter") 1468 1469 if shrink: 1470 sf = vtki.new("ShrinkFilter") 1471 sf.SetInputData(cls.dataset) 1472 sf.SetShrinkFactor(shrink) 1473 sf.Update() 1474 geo.SetInputData(sf.GetOutput()) 1475 else: 1476 geo.SetInputData(cls.dataset) 1477 1478 geo.SetPassThroughCellIds(1) 1479 geo.SetPassThroughPointIds(1) 1480 geo.SetOriginalCellIdsName("OriginalCellIds") 1481 geo.SetOriginalPointIdsName("OriginalPointIds") 1482 geo.SetNonlinearSubdivisionLevel(1) 1483 # geo.MergingOff() # crashes on StructuredGrids 1484 if bounds: 1485 geo.SetExtent(bounds) 1486 geo.ExtentClippingOn() 1487 geo.Update() 1488 msh = vedo.mesh.Mesh(geo.GetOutput()) 1489 msh.pipeline = utils.OperationNode("tomesh", parents=[cls], c="#9e2a2b") 1490 return msh 1491 1492 def signed_distance(cls, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> "vedo.Volume": 1493 """ 1494 Compute the `Volume` object whose voxels contains the signed distance from 1495 the object. The calling object must have "Normals" defined. 1496 1497 Arguments: 1498 bounds : (list, actor) 1499 bounding box sizes 1500 dims : (list) 1501 dimensions (nr. of voxels) of the output volume. 1502 invert : (bool) 1503 flip the sign 1504 max_radius : (float) 1505 specify how far out to propagate distance calculation 1506 1507 Examples: 1508 - [distance2mesh.py](https://github.com/marcomusy/vedo/blob/master/examples/basic/distance2mesh.py) 1509 1510  1511 """ 1512 if bounds is None: 1513 bounds = cls.bounds() 1514 if max_radius is None: 1515 max_radius = cls.diagonal_size() / 2 1516 dist = vtki.new("SignedDistance") 1517 dist.SetInputData(cls.dataset) 1518 dist.SetRadius(max_radius) 1519 dist.SetBounds(bounds) 1520 dist.SetDimensions(dims) 1521 dist.Update() 1522 img = dist.GetOutput() 1523 if invert: 1524 mat = vtki.new("ImageMathematics") 1525 mat.SetInput1Data(img) 1526 mat.SetOperationToMultiplyByK() 1527 mat.SetConstantK(-1) 1528 mat.Update() 1529 img = mat.GetOutput() 1530 1531 vol = vedo.Volume(img) 1532 vol.name = "SignedDistanceVolume" 1533 vol.pipeline = utils.OperationNode( 1534 "signed_distance", 1535 parents=[cls], 1536 comment=f"dims={tuple(vol.dimensions())}", 1537 c="#e9c46a:#0096c7", 1538 ) 1539 return vol 1540 1541 def unsigned_distance( 1542 cls, dims=(25,25,25), bounds=(), max_radius=0, cap_value=0) -> "vedo.Volume": 1543 """ 1544 Compute the `Volume` object whose voxels contains the unsigned distance 1545 from the input object. 1546 """ 1547 dist = vtki.new("UnsignedDistance") 1548 dist.SetInputData(cls.dataset) 1549 dist.SetDimensions(dims) 1550 1551 if len(bounds) == 6: 1552 dist.SetBounds(bounds) 1553 else: 1554 dist.SetBounds(cls.bounds()) 1555 if not max_radius: 1556 max_radius = cls.diagonal_size() / 10 1557 dist.SetRadius(max_radius) 1558 1559 if cls.point_locator: 1560 dist.SetLocator(cls.point_locator) 1561 1562 if cap_value is not None: 1563 dist.CappingOn() 1564 dist.SetCapValue(cap_value) 1565 dist.SetOutputScalarTypeToFloat() 1566 dist.Update() 1567 vol = vedo.Volume(dist.GetOutput()) 1568 vol.name = "UnsignedDistanceVolume" 1569 vol.pipeline = utils.OperationNode( 1570 "unsigned_distance", parents=[cls], c="#e9c46a:#0096c7") 1571 return vol 1572 1573 def smooth_data(cls, 1574 niter=10, relaxation_factor=0.1, strategy=0, mask=None, 1575 mode="distance2", 1576 exclude=("Normals", "TextureCoordinates"), 1577 ) -> Self: 1578 """ 1579 Smooth point attribute data using distance weighted Laplacian kernel. 1580 The effect is to blur regions of high variation and emphasize low variation regions. 1581 1582 A central concept of this method is the point smoothing stencil. 1583 A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge. 1584 To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using 1585 the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1). 1586 This averaging process is repeated until the maximum number of iterations is reached. 1587 1588 The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion. 1589 The a(i+1) attributes are determined from the a(i) attributes as follows: 1590 a(i+1) = (1-R)*a(i) + R*sum(w(j)*a(j)) 1591 1592 Convergence occurs faster for larger relaxation factors. 1593 Typically a small number of iterations is required for large relaxation factors, 1594 and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be 1595 adequate (i.e., just a distance weighted average is computed). 1596 1597 Warning: 1598 Certain data attributes cannot be correctly interpolated. 1599 For example, surface normals are expected to be |n|=1; 1600 after attribute smoothing this constraint is likely to be violated. 1601 Other vectors and tensors may suffer from similar issues. 1602 In such a situation, specify `exclude=...` which will not be smoothed 1603 (and simply passed through to the output). 1604 Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance 1605 between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil). 1606 The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, 1607 the weights are simply 1/n, where n is the number of connected points in the stencil. 1608 The smoothing process reduces high frequency information in the data attributes. 1609 With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) 1610 important details may be lost, and the attributes will move towards an "average" value. 1611 While this filter will process any dataset type, if the input data is a 3D image volume, 1612 it's likely much faster to use an image-based algorithm to perform data smoothing. 1613 To determine boundary points in polygonal data, edges used by only one cell are considered boundary 1614 (and hence the associated points defining the edge). 1615 1616 Arguments: 1617 niter : (int) 1618 number of iterations 1619 relaxation_factor : (float) 1620 relaxation factor controlling the amount of Laplacian smoothing applied 1621 strategy : (int) 1622 strategy to use for Laplacian smoothing 1623 1624 - 0: use all points, all point data attributes are smoothed 1625 - 1: smooth all point attribute data except those on the boundary 1626 - 2: only point data connected to a boundary point are smoothed 1627 1628 mask : (str, np.ndarray) 1629 array to be used as a mask (ignore then the strategy keyword) 1630 mode : (str) 1631 smoothing mode, either "distance2", "distance" or "average" 1632 1633 - distance**2 weighted (i.e., 1/r**2 interpolation weights) 1634 - distance weighted (i.e., 1/r) approach; 1635 - simple average of all connected points in the stencil 1636 1637 exclude : (list) 1638 list of arrays to be excluded from smoothing 1639 """ 1640 saf = vtki.new("AttributeSmoothingFilter") 1641 saf.SetInputData(cls.dataset) 1642 saf.SetRelaxationFactor(relaxation_factor) 1643 saf.SetNumberOfIterations(niter) 1644 1645 for ex in exclude: 1646 saf.AddExcludedArray(ex) 1647 1648 if mode == "distance": 1649 saf.SetWeightsTypeToDistance() 1650 elif mode == "distance2": 1651 saf.SetWeightsTypeToDistance2() 1652 elif mode == "average": 1653 saf.SetWeightsTypeToAverage() 1654 else: 1655 vedo.logger.error(f"smooth_data(): unknown mode {mode}") 1656 raise TypeError 1657 1658 saf.SetSmoothingStrategy(strategy) 1659 if mask is not None: 1660 saf.SetSmoothingStrategyToSmoothingMask() 1661 if isinstance(mask, str): 1662 mask_ = cls.dataset.GetPointData().GetArray(mask) 1663 if not mask_: 1664 vedo.logger.error(f"smooth_data(): mask array {mask} not found") 1665 return cls 1666 mask_array = vtki.vtkUnsignedCharArray() 1667 mask_array.ShallowCopy(mask_) 1668 mask_array.SetName(mask_.GetName()) 1669 else: 1670 mask_array = utils.numpy2vtk(mask, dtype=np.uint8) 1671 saf.SetSmoothingMask(mask_array) 1672 1673 saf.Update() 1674 1675 cls._update(saf.GetOutput()) 1676 cls.pipeline = utils.OperationNode( 1677 "smooth_data", comment=f"strategy {strategy}", parents=[cls], c="#9e2a2b" 1678 ) 1679 return cls 1680 1681 def compute_streamlines( 1682 cls, 1683 seeds: Any, 1684 integrator="rk4", 1685 direction="forward", 1686 initial_step_size=None, 1687 max_propagation=None, 1688 max_steps=10000, 1689 step_length=0, 1690 surface_constrained=False, 1691 compute_vorticity=False, 1692 ) -> Union["vedo.Lines", None]: 1693 """ 1694 Integrate a vector field to generate streamlines. 1695 1696 Arguments: 1697 seeds : (Mesh, Points, list) 1698 starting points of the streamlines 1699 integrator : (str) 1700 type of integration method to be used: 1701 1702 - "rk2" (Runge-Kutta 2) 1703 - "rk4" (Runge-Kutta 4) 1704 - "rk45" (Runge-Kutta 45) 1705 1706 direction : (str) 1707 direction of integration, either "forward", "backward" or "both" 1708 initial_step_size : (float) 1709 initial step size used for line integration 1710 max_propagation : (float) 1711 maximum length of a streamline expressed in absolute units 1712 max_steps : (int) 1713 maximum number of steps for a streamline 1714 step_length : (float) 1715 maximum length of a step expressed in absolute units 1716 surface_constrained : (bool) 1717 whether to stop integrating when the streamline leaves the surface 1718 compute_vorticity : (bool) 1719 whether to compute the vorticity at each streamline point 1720 """ 1721 b = cls.dataset.GetBounds() 1722 size = (b[5]-b[4] + b[3]-b[2] + b[1]-b[0]) / 3 1723 if initial_step_size is None: 1724 initial_step_size = size / 1000.0 1725 1726 if max_propagation is None: 1727 max_propagation = size * 2 1728 1729 if utils.is_sequence(seeds): 1730 seeds = vedo.Points(seeds) 1731 1732 sti = vtki.new("StreamTracer") 1733 sti.SetSourceData(seeds.dataset) 1734 if isinstance(cls, vedo.RectilinearGrid): 1735 sti.SetInputData(vedo.UnstructuredGrid(cls.dataset).dataset) 1736 else: 1737 sti.SetInputDataObject(cls.dataset) 1738 1739 sti.SetInitialIntegrationStep(initial_step_size) 1740 sti.SetComputeVorticity(compute_vorticity) 1741 sti.SetMaximumNumberOfSteps(max_steps) 1742 sti.SetMaximumPropagation(max_propagation) 1743 sti.SetSurfaceStreamlines(surface_constrained) 1744 if step_length: 1745 sti.SetMaximumIntegrationStep(step_length) 1746 1747 if "for" in direction: 1748 sti.SetIntegrationDirectionToForward() 1749 elif "back" in direction: 1750 sti.SetIntegrationDirectionToBackward() 1751 elif "both" in direction: 1752 sti.SetIntegrationDirectionToBoth() 1753 else: 1754 vedo.logger.error(f"in compute_streamlines(), unknown direction {direction}") 1755 return None 1756 1757 if integrator == "rk2": 1758 sti.SetIntegratorTypeToRungeKutta2() 1759 elif integrator == "rk4": 1760 sti.SetIntegratorTypeToRungeKutta4() 1761 elif integrator == "rk45": 1762 sti.SetIntegratorTypeToRungeKutta45() 1763 else: 1764 vedo.logger.error(f"in compute_streamlines(), unknown integrator {integrator}") 1765 return None 1766 1767 sti.Update() 1768 1769 stlines = vedo.shapes.Lines(sti.GetOutput(), lw=4) 1770 stlines.name = "StreamLines" 1771 cls.pipeline = utils.OperationNode( 1772 "compute_streamlines", comment=f"{integrator}", parents=[cls, seeds], c="#9e2a2b" 1773 ) 1774 return stlines
Common algorithms.
388 @property 389 def pointdata(cls): 390 """ 391 Create and/or return a `numpy.array` associated to points (vertices). 392 A data array can be indexed either as a string or by an integer number. 393 E.g.: `myobj.pointdata["arrayname"]` 394 395 Usage: 396 397 `myobj.pointdata.keys()` to return the available data array names 398 399 `myobj.pointdata.select(name)` to make this array the active one 400 401 `myobj.pointdata.remove(name)` to remove this array 402 """ 403 return DataArrayHelper(cls, 0)
Create and/or return a numpy.array
associated to points (vertices).
A data array can be indexed either as a string or by an integer number.
E.g.: myobj.pointdata["arrayname"]
Usage:
myobj.pointdata.keys()
to return the available data array names
myobj.pointdata.select(name)
to make this array the active one
myobj.pointdata.remove(name)
to remove this array
405 @property 406 def celldata(cls): 407 """ 408 Create and/or return a `numpy.array` associated to cells (faces). 409 A data array can be indexed either as a string or by an integer number. 410 E.g.: `myobj.celldata["arrayname"]` 411 412 Usage: 413 414 `myobj.celldata.keys()` to return the available data array names 415 416 `myobj.celldata.select(name)` to make this array the active one 417 418 `myobj.celldata.remove(name)` to remove this array 419 """ 420 return DataArrayHelper(cls, 1)
Create and/or return a numpy.array
associated to cells (faces).
A data array can be indexed either as a string or by an integer number.
E.g.: myobj.celldata["arrayname"]
Usage:
myobj.celldata.keys()
to return the available data array names
myobj.celldata.select(name)
to make this array the active one
myobj.celldata.remove(name)
to remove this array
422 @property 423 def metadata(cls): 424 """ 425 Create and/or return a `numpy.array` associated to neither cells nor faces. 426 A data array can be indexed either as a string or by an integer number. 427 E.g.: `myobj.metadata["arrayname"]` 428 429 Usage: 430 431 `myobj.metadata.keys()` to return the available data array names 432 433 `myobj.metadata.select(name)` to make this array the active one 434 435 `myobj.metadata.remove(name)` to remove this array 436 """ 437 return DataArrayHelper(cls, 2)
Create and/or return a numpy.array
associated to neither cells nor faces.
A data array can be indexed either as a string or by an integer number.
E.g.: myobj.metadata["arrayname"]
Usage:
myobj.metadata.keys()
to return the available data array names
myobj.metadata.select(name)
to make this array the active one
myobj.metadata.remove(name)
to remove this array
439 def rename(cls, newname: str) -> Self: 440 """Rename the object""" 441 try: 442 cls.name = newname 443 except AttributeError: 444 vedo.logger.error(f"Cannot rename object {cls}") 445 return cls
Rename the object
447 def memory_address(cls) -> int: 448 """ 449 Return a unique memory address integer which may serve as the ID of the 450 object, or passed to c++ code. 451 """ 452 # https://www.linkedin.com/pulse/speedup-your-code-accessing-python-vtk-objects-from-c-pletzer/ 453 # https://github.com/tfmoraes/polydata_connectivity 454 return int(cls.dataset.GetAddressAsString("")[5:], 16)
Return a unique memory address integer which may serve as the ID of the object, or passed to c++ code.
456 def memory_size(cls) -> int: 457 """Return the size in bytes of the object in memory.""" 458 return cls.dataset.GetActualMemorySize()
Return the size in bytes of the object in memory.
460 def modified(cls) -> Self: 461 """Use in conjunction with `tonumpy()` to update any modifications to the image array.""" 462 cls.dataset.GetPointData().Modified() 463 scals = cls.dataset.GetPointData().GetScalars() 464 if scals: 465 scals.Modified() 466 return cls
Use in conjunction with tonumpy()
to update any modifications to the image array.
468 def box(cls, scale=1, padding=0) -> "vedo.Mesh": 469 """ 470 Return the bounding box as a new `Mesh` object. 471 472 Arguments: 473 scale : (float) 474 box size can be scaled by a factor 475 padding : (float, list) 476 a constant padding can be added (can be a list `[padx,pady,padz]`) 477 """ 478 b = cls.bounds() 479 if not utils.is_sequence(padding): 480 padding = [padding, padding, padding] 481 length, width, height = b[1] - b[0], b[3] - b[2], b[5] - b[4] 482 tol = (length + width + height) / 30000 # useful for boxing text 483 pos = [(b[0] + b[1]) / 2, (b[3] + b[2]) / 2, (b[5] + b[4]) / 2 - tol] 484 bx = vedo.shapes.Box( 485 pos, 486 length * scale + padding[0], 487 width * scale + padding[1], 488 height * scale + padding[2], 489 c="gray", 490 ) 491 try: 492 pr = vtki.vtkProperty() 493 pr.DeepCopy(cls.properties) 494 bx.actor.SetProperty(pr) 495 bx.properties = pr 496 except (AttributeError, TypeError): 497 pass 498 bx.flat().lighting("off").wireframe(True) 499 return bx
Return the bounding box as a new Mesh
object.
Arguments:
- scale : (float) box size can be scaled by a factor
- padding : (float, list)
a constant padding can be added (can be a list
[padx,pady,padz]
)
501 def update_dataset(cls, dataset, **kwargs) -> Self: 502 """Update the dataset of the object with the provided VTK dataset.""" 503 cls._update(dataset, **kwargs) 504 return cls
Update the dataset of the object with the provided VTK dataset.
506 def bounds(cls) -> np.ndarray: 507 """ 508 Get the object bounds. 509 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 510 """ 511 try: # this is very slow for large meshes 512 pts = cls.vertices 513 xmin, ymin, zmin = np.nanmin(pts, axis=0) 514 xmax, ymax, zmax = np.nanmax(pts, axis=0) 515 return np.array([xmin, xmax, ymin, ymax, zmin, zmax]) 516 except (AttributeError, ValueError): 517 return np.array(cls.dataset.GetBounds())
Get the object bounds.
Returns a list in format [xmin,xmax, ymin,ymax, zmin,zmax]
.
519 def xbounds(cls, i=None) -> np.ndarray: 520 """Get the bounds `[xmin,xmax]`. Can specify upper or lower with i (0,1).""" 521 b = cls.bounds() 522 if i is not None: 523 return b[i] 524 return np.array([b[0], b[1]])
Get the bounds [xmin,xmax]
. Can specify upper or lower with i (0,1).
526 def ybounds(cls, i=None) -> np.ndarray: 527 """Get the bounds `[ymin,ymax]`. Can specify upper or lower with i (0,1).""" 528 b = cls.bounds() 529 if i == 0: 530 return b[2] 531 if i == 1: 532 return b[3] 533 return np.array([b[2], b[3]])
Get the bounds [ymin,ymax]
. Can specify upper or lower with i (0,1).
535 def zbounds(cls, i=None) -> np.ndarray: 536 """Get the bounds `[zmin,zmax]`. Can specify upper or lower with i (0,1).""" 537 b = cls.bounds() 538 if i == 0: 539 return b[4] 540 if i == 1: 541 return b[5] 542 return np.array([b[4], b[5]])
Get the bounds [zmin,zmax]
. Can specify upper or lower with i (0,1).
544 def diagonal_size(cls) -> float: 545 """Get the length of the diagonal of the bounding box.""" 546 b = cls.bounds() 547 return np.sqrt((b[1] - b[0])**2 + (b[3] - b[2])**2 + (b[5] - b[4])**2)
Get the length of the diagonal of the bounding box.
549 def average_size(cls) -> float: 550 """ 551 Calculate and return the average size of the object. 552 This is the mean of the vertex distances from the center of mass. 553 """ 554 coords = cls.vertices 555 cm = np.mean(coords, axis=0) 556 if coords.shape[0] == 0: 557 return 0.0 558 cc = coords - cm 559 return np.mean(np.linalg.norm(cc, axis=1))
Calculate and return the average size of the object. This is the mean of the vertex distances from the center of mass.
561 def center_of_mass(cls) -> np.ndarray: 562 """Get the center of mass of the object.""" 563 if isinstance(cls, (vedo.RectilinearGrid, vedo.Volume)): 564 return np.array(cls.dataset.GetCenter()) 565 cmf = vtki.new("CenterOfMass") 566 cmf.SetInputData(cls.dataset) 567 cmf.Update() 568 c = cmf.GetCenter() 569 return np.array(c)
Get the center of mass of the object.
571 def copy_data_from(cls, obj: Any) -> Self: 572 """Copy all data (point and cell data) from this input object""" 573 cls.dataset.GetPointData().PassData(obj.dataset.GetPointData()) 574 cls.dataset.GetCellData().PassData(obj.dataset.GetCellData()) 575 cls.pipeline = utils.OperationNode( 576 "copy_data_from", 577 parents=[cls, obj], 578 comment=f"{obj.__class__.__name__}", 579 shape="note", 580 c="#ccc5b9", 581 ) 582 return cls
Copy all data (point and cell data) from this input object
584 def inputdata(cls): 585 """Obsolete, use `.dataset` instead.""" 586 colors.printc("WARNING: 'inputdata()' is obsolete, use '.dataset' instead.", c="y") 587 return cls.dataset
Obsolete, use .dataset
instead.
589 @property 590 def npoints(cls): 591 """Retrieve the number of points (or vertices).""" 592 return cls.dataset.GetNumberOfPoints()
Retrieve the number of points (or vertices).
594 @property 595 def nvertices(cls): 596 """Retrieve the number of vertices (or points).""" 597 return cls.dataset.GetNumberOfPoints()
Retrieve the number of vertices (or points).
599 @property 600 def ncells(cls): 601 """Retrieve the number of cells.""" 602 return cls.dataset.GetNumberOfCells()
Retrieve the number of cells.
604 def cell_centers(cls, copy_arrays=False) -> "vedo.Points": 605 """ 606 Get the coordinates of the cell centers as a `Points` object. 607 608 Examples: 609 - [delaunay2d.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/delaunay2d.py) 610 """ 611 vcen = vtki.new("CellCenters") 612 vcen.SetCopyArrays(copy_arrays) 613 vcen.SetVertexCells(copy_arrays) 614 vcen.SetInputData(cls.dataset) 615 vcen.Update() 616 vpts = vedo.Points(vcen.GetOutput()) 617 if copy_arrays: 618 vpts.copy_properties_from(cls) 619 return vpts
621 @property 622 def lines(cls): 623 """ 624 Get lines connectivity ids as a python array 625 formatted as `[[id0,id1], [id3,id4], ...]` 626 627 See also: `lines_as_flat_array()`. 628 """ 629 # Get cell connettivity ids as a 1D array. The vtk format is: 630 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 631 try: 632 arr1d = utils.vtk2numpy(cls.dataset.GetLines().GetData()) 633 except AttributeError: 634 return np.array([]) 635 i = 0 636 conn = [] 637 n = len(arr1d) 638 for _ in range(n): 639 cell = [arr1d[i + k + 1] for k in range(arr1d[i])] 640 conn.append(cell) 641 i += arr1d[i] + 1 642 if i >= n: 643 break 644 645 return conn # cannot always make a numpy array of it!
Get lines connectivity ids as a python array
formatted as [[id0,id1], [id3,id4], ...]
See also: lines_as_flat_array()
.
647 @property 648 def lines_as_flat_array(cls): 649 """ 650 Get lines connectivity ids as a 1D numpy array. 651 Format is e.g. [2, 10,20, 3, 10,11,12, 2, 70,80, ...] 652 653 See also: `lines()`. 654 """ 655 try: 656 return utils.vtk2numpy(cls.dataset.GetLines().GetData()) 657 except AttributeError: 658 return np.array([])
Get lines connectivity ids as a 1D numpy array. Format is e.g. [2, 10,20, 3, 10,11,12, 2, 70,80, ...]
See also: lines()
.
660 def mark_boundaries(cls) -> Self: 661 """ 662 Mark cells and vertices if they lie on a boundary. 663 A new array called `BoundaryCells` is added to the object. 664 """ 665 mb = vtki.new("MarkBoundaryFilter") 666 mb.SetInputData(cls.dataset) 667 mb.Update() 668 cls.dataset.DeepCopy(mb.GetOutput()) 669 cls.pipeline = utils.OperationNode("mark_boundaries", parents=[cls]) 670 return cls
Mark cells and vertices if they lie on a boundary.
A new array called BoundaryCells
is added to the object.
672 def find_cells_in_bounds(cls, xbounds=(), ybounds=(), zbounds=()) -> np.ndarray: 673 """ 674 Find cells that are within the specified bounds. 675 """ 676 try: 677 xbounds = list(xbounds.bounds()) 678 except AttributeError: 679 pass 680 681 if len(xbounds) == 6: 682 bnds = xbounds 683 else: 684 bnds = list(cls.bounds()) 685 if len(xbounds) == 2: 686 bnds[0] = xbounds[0] 687 bnds[1] = xbounds[1] 688 if len(ybounds) == 2: 689 bnds[2] = ybounds[0] 690 bnds[3] = ybounds[1] 691 if len(zbounds) == 2: 692 bnds[4] = zbounds[0] 693 bnds[5] = zbounds[1] 694 695 cell_ids = vtki.vtkIdList() 696 if not cls.cell_locator: 697 cls.cell_locator = vtki.new("CellTreeLocator") 698 cls.cell_locator.SetDataSet(cls.dataset) 699 cls.cell_locator.BuildLocator() 700 cls.cell_locator.FindCellsWithinBounds(bnds, cell_ids) 701 cids = [] 702 for i in range(cell_ids.GetNumberOfIds()): 703 cid = cell_ids.GetId(i) 704 cids.append(cid) 705 return np.array(cids)
Find cells that are within the specified bounds.
707 def find_cells_along_line(cls, p0, p1, tol=0.001) -> np.ndarray: 708 """ 709 Find cells that are intersected by a line segment. 710 """ 711 cell_ids = vtki.vtkIdList() 712 if not cls.cell_locator: 713 cls.cell_locator = vtki.new("CellTreeLocator") 714 cls.cell_locator.SetDataSet(cls.dataset) 715 cls.cell_locator.BuildLocator() 716 cls.cell_locator.FindCellsAlongLine(p0, p1, tol, cell_ids) 717 cids = [] 718 for i in range(cell_ids.GetNumberOfIds()): 719 cid = cell_ids.GetId(i) 720 cids.append(cid) 721 return np.array(cids)
Find cells that are intersected by a line segment.
723 def find_cells_along_plane(cls, origin, normal, tol=0.001) -> np.ndarray: 724 """ 725 Find cells that are intersected by a plane. 726 """ 727 cell_ids = vtki.vtkIdList() 728 if not cls.cell_locator: 729 cls.cell_locator = vtki.new("CellTreeLocator") 730 cls.cell_locator.SetDataSet(cls.dataset) 731 cls.cell_locator.BuildLocator() 732 cls.cell_locator.FindCellsAlongPlane(origin, normal, tol, cell_ids) 733 cids = [] 734 for i in range(cell_ids.GetNumberOfIds()): 735 cid = cell_ids.GetId(i) 736 cids.append(cid) 737 return np.array(cids)
Find cells that are intersected by a plane.
739 def keep_cell_types(cls, types=()): 740 """ 741 Extract cells of a specific type. 742 743 Check the VTK cell types here: 744 https://vtk.org/doc/nightly/html/vtkCellType_8h.html 745 """ 746 fe = vtki.new("ExtractCellsByType") 747 fe.SetInputData(cls.dataset) 748 for t in types: 749 try: 750 if utils.is_integer(t): 751 it = t 752 else: 753 it = vtki.cell_types[t.upper()] 754 except KeyError: 755 vedo.logger.error(f"Cell type '{t}' not recognized") 756 continue 757 fe.AddCellType(it) 758 fe.Update() 759 cls._update(fe.GetOutput()) 760 return cls
Extract cells of a specific type.
Check the VTK cell types here: https://vtk.org/doc/nightly/html/vtkCellType_8h.html
762 def map_cells_to_points(cls, arrays=(), move=False) -> Self: 763 """ 764 Interpolate cell data (i.e., data specified per cell or face) 765 into point data (i.e., data specified at each vertex). 766 The method of transformation is based on averaging the data values 767 of all cells using a particular point. 768 769 A custom list of arrays to be mapped can be passed in input. 770 771 Set `move=True` to delete the original `celldata` array. 772 """ 773 c2p = vtki.new("CellDataToPointData") 774 c2p.SetInputData(cls.dataset) 775 if not move: 776 c2p.PassCellDataOn() 777 if arrays: 778 c2p.ClearCellDataArrays() 779 c2p.ProcessAllArraysOff() 780 for arr in arrays: 781 c2p.AddCellDataArray(arr) 782 else: 783 c2p.ProcessAllArraysOn() 784 c2p.Update() 785 cls._update(c2p.GetOutput(), reset_locators=False) 786 cls.mapper.SetScalarModeToUsePointData() 787 cls.pipeline = utils.OperationNode("map_cells_to_points", parents=[cls]) 788 return cls
Interpolate cell data (i.e., data specified per cell or face) into point data (i.e., data specified at each vertex). The method of transformation is based on averaging the data values of all cells using a particular point.
A custom list of arrays to be mapped can be passed in input.
Set move=True
to delete the original celldata
array.
790 @property 791 def vertices(cls): 792 """Return the vertices (points) coordinates.""" 793 try: 794 # for polydata and unstructured grid 795 varr = cls.dataset.GetPoints().GetData() 796 except (AttributeError, TypeError): 797 try: 798 # for RectilinearGrid, StructuredGrid 799 vpts = vtki.vtkPoints() 800 cls.dataset.GetPoints(vpts) 801 varr = vpts.GetData() 802 except (AttributeError, TypeError): 803 try: 804 # for ImageData 805 v2p = vtki.new("ImageToPoints") 806 v2p.SetInputData(cls.dataset) 807 v2p.Update() 808 varr = v2p.GetOutput().GetPoints().GetData() 809 except AttributeError: 810 return np.array([]) 811 812 return utils.vtk2numpy(varr)
Return the vertices (points) coordinates.
833 @property 834 def points(cls): 835 """Return the vertices (points) coordinates. Same as `vertices`.""" 836 return cls.vertices
Return the vertices (points) coordinates. Same as vertices
.
843 @property 844 def coordinates(cls): 845 """Return the vertices (points) coordinates. Same as `vertices`.""" 846 return cls.vertices
Return the vertices (points) coordinates. Same as vertices
.
853 @property 854 def cells_as_flat_array(cls): 855 """ 856 Get cell connectivity ids as a 1D numpy array. 857 Format is e.g. [3, 10,20,30 4, 10,11,12,13 ...] 858 """ 859 try: 860 # valid for unstructured grid 861 arr1d = utils.vtk2numpy(cls.dataset.GetCells().GetData()) 862 except AttributeError: 863 # valid for polydata 864 arr1d = utils.vtk2numpy(cls.dataset.GetPolys().GetData()) 865 return arr1d
Get cell connectivity ids as a 1D numpy array. Format is e.g. [3, 10,20,30 4, 10,11,12,13 ...]
867 @property 868 def cells(cls): 869 """ 870 Get the cells connectivity ids as a numpy array. 871 872 The output format is: `[[id0 ... idn], [id0 ... idm], etc]`. 873 """ 874 try: 875 # valid for unstructured grid 876 arr1d = utils.vtk2numpy(cls.dataset.GetCells().GetData()) 877 except AttributeError: 878 try: 879 # valid for polydata 880 arr1d = utils.vtk2numpy(cls.dataset.GetPolys().GetData()) 881 except AttributeError: 882 vedo.logger.warning(f"Cannot get cells for {type(cls)}") 883 return np.array([]) 884 885 # Get cell connettivity ids as a 1D array. vtk format is: 886 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 887 i = 0 888 conn = [] 889 n = len(arr1d) 890 if n: 891 while True: 892 cell = [arr1d[i + k] for k in range(1, arr1d[i] + 1)] 893 conn.append(cell) 894 i += arr1d[i] + 1 895 if i >= n: 896 break 897 return conn
Get the cells connectivity ids as a numpy array.
The output format is: [[id0 ... idn], [id0 ... idm], etc]
.
899 def cell_edge_neighbors(cls): 900 """ 901 Get the cell neighbor indices of each cell. 902 903 Returns a python list of lists. 904 """ 905 906 def face_to_edges(face): 907 edges = [] 908 size = len(face) 909 for i in range(1, size + 1): 910 if i == size: 911 edges.append([face[i - 1], face[0]]) 912 else: 913 edges.append([face[i - 1], face[i]]) 914 return edges 915 916 pd = cls.dataset 917 pd.BuildLinks() 918 919 neicells = [] 920 for i, cell in enumerate(cls.cells): 921 nn = [] 922 for edge in face_to_edges(cell): 923 neighbors = vtki.vtkIdList() 924 pd.GetCellEdgeNeighbors(i, edge[0], edge[1], neighbors) 925 if neighbors.GetNumberOfIds() > 0: 926 neighbor = neighbors.GetId(0) 927 nn.append(neighbor) 928 neicells.append(nn) 929 930 return neicells
Get the cell neighbor indices of each cell.
Returns a python list of lists.
933 def map_points_to_cells(cls, arrays=(), move=False) -> Self: 934 """ 935 Interpolate point data (i.e., data specified per point or vertex) 936 into cell data (i.e., data specified per cell). 937 The method of transformation is based on averaging the data values 938 of all points defining a particular cell. 939 940 A custom list of arrays to be mapped can be passed in input. 941 942 Set `move=True` to delete the original `pointdata` array. 943 944 Examples: 945 - [mesh_map2cell.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/mesh_map2cell.py) 946 """ 947 p2c = vtki.new("PointDataToCellData") 948 p2c.SetInputData(cls.dataset) 949 if not move: 950 p2c.PassPointDataOn() 951 if arrays: 952 p2c.ClearPointDataArrays() 953 p2c.ProcessAllArraysOff() 954 for arr in arrays: 955 p2c.AddPointDataArray(arr) 956 else: 957 p2c.ProcessAllArraysOn() 958 p2c.Update() 959 cls._update(p2c.GetOutput(), reset_locators=False) 960 cls.mapper.SetScalarModeToUseCellData() 961 cls.pipeline = utils.OperationNode("map_points_to_cells", parents=[cls]) 962 return cls
Interpolate point data (i.e., data specified per point or vertex) into cell data (i.e., data specified per cell). The method of transformation is based on averaging the data values of all points defining a particular cell.
A custom list of arrays to be mapped can be passed in input.
Set move=True
to delete the original pointdata
array.
Examples:
964 def resample_data_from(cls, source, tol=None, categorical=False) -> Self: 965 """ 966 Resample point and cell data from another dataset. 967 The output has the same structure but its point data have 968 the resampled values from target. 969 970 Use `tol` to set the tolerance used to compute whether 971 a point in the source is in a cell of the current object. 972 Points without resampled values, and their cells, are marked as blank. 973 If the data is categorical, then the resulting data will be determined 974 by a nearest neighbor interpolation scheme. 975 976 Example: 977 ```python 978 from vedo import * 979 m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1) 980 pts = m1.coordinates 981 ces = m1.cell_centers().coordinates 982 m1.pointdata["xvalues"] = np.power(pts[:,0], 3) 983 m1.celldata["yvalues"] = np.power(ces[:,1], 3) 984 m2 = Mesh(dataurl+'bunny.obj') 985 m2.resample_data_from(m1) 986 # print(m2.pointdata["xvalues"]) 987 show(m1, m2 , N=2, axes=1) 988 ``` 989 """ 990 rs = vtki.new("ResampleWithDataSet") 991 rs.SetInputData(cls.dataset) 992 rs.SetSourceData(source.dataset) 993 994 rs.SetPassPointArrays(True) 995 rs.SetPassCellArrays(True) 996 rs.SetPassFieldArrays(True) 997 rs.SetCategoricalData(categorical) 998 999 rs.SetComputeTolerance(True) 1000 if tol: 1001 rs.SetComputeTolerance(False) 1002 rs.SetTolerance(tol) 1003 rs.Update() 1004 cls._update(rs.GetOutput(), reset_locators=False) 1005 cls.pipeline = utils.OperationNode( 1006 "resample_data_from", 1007 comment=f"{source.__class__.__name__}", 1008 parents=[cls, source], 1009 ) 1010 return cls
Resample point and cell data from another dataset. The output has the same structure but its point data have the resampled values from target.
Use tol
to set the tolerance used to compute whether
a point in the source is in a cell of the current object.
Points without resampled values, and their cells, are marked as blank.
If the data is categorical, then the resulting data will be determined
by a nearest neighbor interpolation scheme.
Example:
from vedo import *
m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1)
pts = m1.coordinates
ces = m1.cell_centers().coordinates
m1.pointdata["xvalues"] = np.power(pts[:,0], 3)
m1.celldata["yvalues"] = np.power(ces[:,1], 3)
m2 = Mesh(dataurl+'bunny.obj')
m2.resample_data_from(m1)
# print(m2.pointdata["xvalues"])
show(m1, m2 , N=2, axes=1)
1012 def interpolate_data_from( 1013 cls, 1014 source, 1015 radius=None, 1016 n=None, 1017 kernel="shepard", 1018 exclude=("Normals",), 1019 on="points", 1020 null_strategy=1, 1021 null_value=0, 1022 ) -> Self: 1023 """ 1024 Interpolate over source to port its data onto the current object using various kernels. 1025 1026 If n (number of closest points to use) is set then radius value is ignored. 1027 1028 Check out also: 1029 `probe()` which in many cases can be faster. 1030 1031 Arguments: 1032 kernel : (str) 1033 available kernels are [shepard, gaussian, linear] 1034 null_strategy : (int) 1035 specify a strategy to use when encountering a "null" point 1036 during the interpolation process. Null points occur when the local neighborhood 1037 (of nearby points to interpolate from) is empty. 1038 1039 - Case 0: an output array is created that marks points 1040 as being valid (=1) or null (invalid =0), and the null_value is set as well 1041 - Case 1: the output data value(s) are set to the provided null_value 1042 - Case 2: simply use the closest point to perform the interpolation. 1043 null_value : (float) 1044 see above. 1045 1046 Examples: 1047 - [interpolate_scalar1.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar1.py) 1048 - [interpolate_scalar3.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar3.py) 1049 - [interpolate_scalar4.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar4.py) 1050 - [image_probe.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/image_probe.py) 1051 1052  1053 """ 1054 if radius is None and not n: 1055 vedo.logger.error("in interpolate_data_from(): please set either radius or n") 1056 raise RuntimeError 1057 1058 if on == "points": 1059 points = source.dataset 1060 elif on == "cells": 1061 c2p = vtki.new("CellDataToPointData") 1062 c2p.SetInputData(source.dataset) 1063 c2p.Update() 1064 points = c2p.GetOutput() 1065 else: 1066 vedo.logger.error("in interpolate_data_from(), on must be on points or cells") 1067 raise RuntimeError() 1068 1069 locator = vtki.new("PointLocator") 1070 locator.SetDataSet(points) 1071 locator.BuildLocator() 1072 1073 if kernel.lower() == "shepard": 1074 kern = vtki.new("ShepardKernel") 1075 kern.SetPowerParameter(2) 1076 elif kernel.lower() == "gaussian": 1077 kern = vtki.new("GaussianKernel") 1078 kern.SetSharpness(2) 1079 elif kernel.lower() == "linear": 1080 kern = vtki.new("LinearKernel") 1081 else: 1082 vedo.logger.error("available kernels are: [shepard, gaussian, linear]") 1083 raise RuntimeError() 1084 1085 if n: 1086 kern.SetNumberOfPoints(n) 1087 kern.SetKernelFootprintToNClosest() 1088 else: 1089 kern.SetRadius(radius) 1090 kern.SetKernelFootprintToRadius() 1091 1092 # remove arrays that are already present in cls dataset 1093 # this is because the interpolator will ignore them otherwise 1094 clsnames = [] 1095 for i in range(cls.dataset.GetPointData().GetNumberOfArrays()): 1096 name = cls.dataset.GetPointData().GetArrayName(i) 1097 clsnames.append(name) 1098 1099 pointsnames = [] 1100 for i in range(points.GetPointData().GetNumberOfArrays()): 1101 name = points.GetPointData().GetArrayName(i) 1102 pointsnames.append(name) 1103 1104 for cname in clsnames: 1105 if cname in set(pointsnames) - set(exclude): 1106 cls.dataset.GetPointData().RemoveArray(cname) 1107 # print(f"Removed {cname} from cls dataset") 1108 1109 interpolator = vtki.new("PointInterpolator") 1110 interpolator.SetInputData(cls.dataset) 1111 interpolator.SetSourceData(points) 1112 interpolator.SetKernel(kern) 1113 interpolator.SetLocator(locator) 1114 interpolator.PassFieldArraysOn() 1115 interpolator.SetNullPointsStrategy(null_strategy) 1116 interpolator.SetNullValue(null_value) 1117 interpolator.SetValidPointsMaskArrayName("ValidPointMask") 1118 for ex in exclude: 1119 interpolator.AddExcludedArray(ex) 1120 1121 # remove arrays that are already present in the source 1122 # this is because the interpolator will ignore them otherwise 1123 for i in range(cls.dataset.GetPointData().GetNumberOfArrays()): 1124 name = cls.dataset.GetPointData().GetArrayName(i) 1125 if name not in exclude: 1126 cls.dataset.GetPointData().RemoveArray(name) 1127 1128 interpolator.Update() 1129 1130 if on == "cells": 1131 p2c = vtki.new("PointDataToCellData") 1132 p2c.SetInputData(interpolator.GetOutput()) 1133 p2c.Update() 1134 cpoly = p2c.GetOutput() 1135 else: 1136 cpoly = interpolator.GetOutput() 1137 1138 cls._update(cpoly, reset_locators=False) 1139 1140 cls.pipeline = utils.OperationNode("interpolate_data_from", parents=[cls, source]) 1141 return cls
Interpolate over source to port its data onto the current object using various kernels.
If n (number of closest points to use) is set then radius value is ignored.
Check out also:
probe()
which in many cases can be faster.
Arguments:
- kernel : (str) available kernels are [shepard, gaussian, linear]
null_strategy : (int) specify a strategy to use when encountering a "null" point during the interpolation process. Null points occur when the local neighborhood (of nearby points to interpolate from) is empty.
- Case 0: an output array is created that marks points as being valid (=1) or null (invalid =0), and the null_value is set as well
- Case 1: the output data value(s) are set to the provided null_value
- Case 2: simply use the closest point to perform the interpolation.
- null_value : (float) see above.
Examples:
1143 def add_ids(cls) -> Self: 1144 """ 1145 Generate point and cell ids arrays. 1146 1147 Two new arrays are added to the mesh: `PointID` and `CellID`. 1148 """ 1149 ids = vtki.new("IdFilter") 1150 ids.SetInputData(cls.dataset) 1151 ids.PointIdsOn() 1152 ids.CellIdsOn() 1153 ids.FieldDataOff() 1154 ids.SetPointIdsArrayName("PointID") 1155 ids.SetCellIdsArrayName("CellID") 1156 ids.Update() 1157 cls._update(ids.GetOutput(), reset_locators=False) 1158 cls.pipeline = utils.OperationNode("add_ids", parents=[cls]) 1159 return cls
Generate point and cell ids arrays.
Two new arrays are added to the mesh: PointID
and CellID
.
1161 def gradient(cls, input_array=None, on="points", fast=False) -> np.ndarray: 1162 """ 1163 Compute and return the gradiend of the active scalar field as a numpy array. 1164 1165 Arguments: 1166 input_array : (str) 1167 array of the scalars to compute the gradient, 1168 if None the current active array is selected 1169 on : (str) 1170 compute either on 'points' or 'cells' data 1171 fast : (bool) 1172 if True, will use a less accurate algorithm 1173 that performs fewer derivative calculations (and is therefore faster). 1174 1175 Examples: 1176 - [isolines.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/isolines.py) 1177 1178  1179 """ 1180 gra = vtki.new("GradientFilter") 1181 if on.startswith("p"): 1182 varr = cls.dataset.GetPointData() 1183 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1184 elif on.startswith("c"): 1185 varr = cls.dataset.GetCellData() 1186 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1187 else: 1188 vedo.logger.error(f"in gradient: unknown option {on}") 1189 raise RuntimeError 1190 1191 if input_array is None: 1192 if varr.GetScalars(): 1193 input_array = varr.GetScalars().GetName() 1194 else: 1195 vedo.logger.error(f"in gradient: no scalars found for {on}") 1196 raise RuntimeError 1197 1198 gra.SetInputData(cls.dataset) 1199 gra.SetInputScalars(tp, input_array) 1200 gra.SetResultArrayName("Gradient") 1201 gra.SetFasterApproximation(fast) 1202 gra.ComputeDivergenceOff() 1203 gra.ComputeVorticityOff() 1204 gra.ComputeGradientOn() 1205 gra.Update() 1206 # cls._update(gra.GetOutput(), reset_locators=False) 1207 if on.startswith("p"): 1208 gvecs = utils.vtk2numpy(gra.GetOutput().GetPointData().GetArray("Gradient")) 1209 else: 1210 gvecs = utils.vtk2numpy(gra.GetOutput().GetCellData().GetArray("Gradient")) 1211 return gvecs
Compute and return the gradiend of the active scalar field as a numpy array.
Arguments:
- input_array : (str) array of the scalars to compute the gradient, if None the current active array is selected
- on : (str) compute either on 'points' or 'cells' data
- fast : (bool) if True, will use a less accurate algorithm that performs fewer derivative calculations (and is therefore faster).
Examples:
1213 def divergence(cls, array_name=None, on="points", fast=False) -> np.ndarray: 1214 """ 1215 Compute and return the divergence of a vector field as a numpy array. 1216 1217 Arguments: 1218 array_name : (str) 1219 name of the array of vectors to compute the divergence, 1220 if None the current active array is selected 1221 on : (str) 1222 compute either on 'points' or 'cells' data 1223 fast : (bool) 1224 if True, will use a less accurate algorithm 1225 that performs fewer derivative calculations (and is therefore faster). 1226 """ 1227 div = vtki.new("GradientFilter") 1228 if on.startswith("p"): 1229 varr = cls.dataset.GetPointData() 1230 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1231 elif on.startswith("c"): 1232 varr = cls.dataset.GetCellData() 1233 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1234 else: 1235 vedo.logger.error(f"in divergence(): unknown option {on}") 1236 raise RuntimeError 1237 1238 if array_name is None: 1239 if varr.GetVectors(): 1240 array_name = varr.GetVectors().GetName() 1241 else: 1242 vedo.logger.error(f"in divergence(): no vectors found for {on}") 1243 raise RuntimeError 1244 1245 div.SetInputData(cls.dataset) 1246 div.SetInputScalars(tp, array_name) 1247 div.ComputeDivergenceOn() 1248 div.ComputeGradientOff() 1249 div.ComputeVorticityOff() 1250 div.SetDivergenceArrayName("Divergence") 1251 div.SetFasterApproximation(fast) 1252 div.Update() 1253 # cls._update(div.GetOutput(), reset_locators=False) 1254 if on.startswith("p"): 1255 dvecs = utils.vtk2numpy(div.GetOutput().GetPointData().GetArray("Divergence")) 1256 else: 1257 dvecs = utils.vtk2numpy(div.GetOutput().GetCellData().GetArray("Divergence")) 1258 return dvecs
Compute and return the divergence of a vector field as a numpy array.
Arguments:
- array_name : (str) name of the array of vectors to compute the divergence, if None the current active array is selected
- on : (str) compute either on 'points' or 'cells' data
- fast : (bool) if True, will use a less accurate algorithm that performs fewer derivative calculations (and is therefore faster).
1260 def vorticity(cls, array_name=None, on="points", fast=False) -> np.ndarray: 1261 """ 1262 Compute and return the vorticity of a vector field as a numpy array. 1263 1264 Arguments: 1265 array_name : (str) 1266 name of the array to compute the vorticity, 1267 if None the current active array is selected 1268 on : (str) 1269 compute either on 'points' or 'cells' data 1270 fast : (bool) 1271 if True, will use a less accurate algorithm 1272 that performs fewer derivative calculations (and is therefore faster). 1273 """ 1274 vort = vtki.new("GradientFilter") 1275 if on.startswith("p"): 1276 varr = cls.dataset.GetPointData() 1277 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1278 elif on.startswith("c"): 1279 varr = cls.dataset.GetCellData() 1280 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1281 else: 1282 vedo.logger.error(f"in vorticity(): unknown option {on}") 1283 raise RuntimeError 1284 1285 if array_name is None: 1286 if varr.GetVectors(): 1287 array_name = varr.GetVectors().GetName() 1288 else: 1289 vedo.logger.error(f"in vorticity(): no vectors found for {on}") 1290 raise RuntimeError 1291 1292 vort.SetInputData(cls.dataset) 1293 vort.SetInputScalars(tp, array_name) 1294 vort.ComputeDivergenceOff() 1295 vort.ComputeGradientOff() 1296 vort.ComputeVorticityOn() 1297 vort.SetVorticityArrayName("Vorticity") 1298 vort.SetFasterApproximation(fast) 1299 vort.Update() 1300 if on.startswith("p"): 1301 vvecs = utils.vtk2numpy(vort.GetOutput().GetPointData().GetArray("Vorticity")) 1302 else: 1303 vvecs = utils.vtk2numpy(vort.GetOutput().GetCellData().GetArray("Vorticity")) 1304 return vvecs
Compute and return the vorticity of a vector field as a numpy array.
Arguments:
- array_name : (str) name of the array to compute the vorticity, if None the current active array is selected
- on : (str) compute either on 'points' or 'cells' data
- fast : (bool) if True, will use a less accurate algorithm that performs fewer derivative calculations (and is therefore faster).
1306 def probe( 1307 cls, 1308 source, 1309 categorical=False, 1310 snap=False, 1311 tol=0, 1312 ) -> Self: 1313 """ 1314 Takes a data set and probes its scalars at the specified points in space. 1315 1316 Note that a mask is also output with valid/invalid points which can be accessed 1317 with `mesh.pointdata['ValidPointMask']`. 1318 1319 Arguments: 1320 source : any dataset 1321 the data set to probe. 1322 categorical : bool 1323 control whether the source pointdata is to be treated as categorical. 1324 snap : bool 1325 snap to the cell with the closest point if no cell was found 1326 tol : float 1327 the tolerance to use when performing the probe. 1328 1329 Check out also: 1330 `interpolate_data_from()` and `tovolume()` 1331 1332 Examples: 1333 - [probe_points.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/probe_points.py) 1334 1335  1336 """ 1337 probe_filter = vtki.new("ProbeFilter") 1338 probe_filter.SetSourceData(source.dataset) 1339 probe_filter.SetInputData(cls.dataset) 1340 probe_filter.PassCellArraysOn() 1341 probe_filter.PassFieldArraysOn() 1342 probe_filter.PassPointArraysOn() 1343 probe_filter.SetCategoricalData(categorical) 1344 probe_filter.ComputeToleranceOff() 1345 if tol: 1346 probe_filter.ComputeToleranceOn() 1347 probe_filter.SetTolerance(tol) 1348 probe_filter.SetSnapToCellWithClosestPoint(snap) 1349 probe_filter.Update() 1350 cls._update(probe_filter.GetOutput(), reset_locators=False) 1351 cls.pipeline = utils.OperationNode("probe", parents=[cls, source]) 1352 cls.pointdata.rename("vtkValidPointMask", "ValidPointMask") 1353 return cls
Takes a data set and probes its scalars at the specified points in space.
Note that a mask is also output with valid/invalid points which can be accessed
with mesh.pointdata['ValidPointMask']
.
Arguments:
- source : any dataset the data set to probe.
- categorical : bool control whether the source pointdata is to be treated as categorical.
- snap : bool snap to the cell with the closest point if no cell was found
- tol : float the tolerance to use when performing the probe.
Check out also:
interpolate_data_from()
andtovolume()
Examples:
1355 def compute_cell_size(cls) -> Self: 1356 """ 1357 Add to this object a cell data array 1358 containing the area, volume and edge length 1359 of the cells (when applicable to the object type). 1360 1361 Array names are: `Area`, `Volume`, `Length`. 1362 """ 1363 csf = vtki.new("CellSizeFilter") 1364 csf.SetInputData(cls.dataset) 1365 csf.SetComputeArea(1) 1366 csf.SetComputeVolume(1) 1367 csf.SetComputeLength(1) 1368 csf.SetComputeVertexCount(0) 1369 csf.SetAreaArrayName("Area") 1370 csf.SetVolumeArrayName("Volume") 1371 csf.SetLengthArrayName("Length") 1372 csf.Update() 1373 cls._update(csf.GetOutput(), reset_locators=False) 1374 return cls
Add to this object a cell data array containing the area, volume and edge length of the cells (when applicable to the object type).
Array names are: Area
, Volume
, Length
.
1376 def generate_random_data(cls) -> Self: 1377 """Fill a dataset with random attributes""" 1378 gen = vtki.new("RandomAttributeGenerator") 1379 gen.SetInputData(cls.dataset) 1380 gen.GenerateAllDataOn() 1381 gen.SetDataTypeToFloat() 1382 gen.GeneratePointNormalsOff() 1383 gen.GeneratePointTensorsOn() 1384 gen.GenerateCellScalarsOn() 1385 gen.Update() 1386 cls._update(gen.GetOutput(), reset_locators=False) 1387 cls.pipeline = utils.OperationNode("generate_random_data", parents=[cls]) 1388 return cls
Fill a dataset with random attributes
1390 def integrate_data(cls) -> dict: 1391 """ 1392 Integrate point and cell data arrays while computing length, 1393 area or volume of the domain. It works for 1D, 2D or 3D cells. 1394 1395 For volumetric datasets, this filter ignores all but 3D cells. 1396 It will not compute the volume contained in a closed surface. 1397 1398 Returns a dictionary with keys: `pointdata`, `celldata`, `metadata`, 1399 which contain the integration result for the corresponding attributes. 1400 1401 Examples: 1402 ```python 1403 from vedo import * 1404 surf = Sphere(res=100) 1405 surf.pointdata['scalars'] = np.ones(surf.npoints) 1406 data = surf.integrate_data() 1407 print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi) 1408 ``` 1409 1410 ```python 1411 from vedo import * 1412 1413 xcoords1 = np.arange(0, 2.2, 0.2) 1414 xcoords2 = sqrt(np.arange(0, 4.2, 0.2)) 1415 1416 ycoords = np.arange(0, 1.2, 0.2) 1417 1418 surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2) 1419 surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2) 1420 1421 surf1.pointdata['scalars'] = surf1.vertices[:,2] 1422 surf2.pointdata['scalars'] = surf2.vertices[:,2] 1423 1424 data1 = surf1.integrate_data() 1425 data2 = surf2.integrate_data() 1426 1427 print(data1['pointdata']['scalars'], 1428 "is equal to", 1429 data2['pointdata']['scalars'], 1430 "even if the grids are different!", 1431 "(= the volume under the surface)" 1432 ) 1433 show(surf1, surf2, N=2, axes=1).close() 1434 ``` 1435 """ 1436 vinteg = vtki.new("IntegrateAttributes") 1437 vinteg.SetInputData(cls.dataset) 1438 vinteg.Update() 1439 ugrid = vedo.UnstructuredGrid(vinteg.GetOutput()) 1440 data = dict( 1441 pointdata=ugrid.pointdata.todict(), 1442 celldata=ugrid.celldata.todict(), 1443 metadata=ugrid.metadata.todict(), 1444 ) 1445 return data
Integrate point and cell data arrays while computing length, area or volume of the domain. It works for 1D, 2D or 3D cells.
For volumetric datasets, this filter ignores all but 3D cells. It will not compute the volume contained in a closed surface.
Returns a dictionary with keys: pointdata
, celldata
, metadata
,
which contain the integration result for the corresponding attributes.
Examples:
from vedo import * surf = Sphere(res=100) surf.pointdata['scalars'] = np.ones(surf.npoints) data = surf.integrate_data() print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi)
from vedo import * xcoords1 = np.arange(0, 2.2, 0.2) xcoords2 = sqrt(np.arange(0, 4.2, 0.2)) ycoords = np.arange(0, 1.2, 0.2) surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2) surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2) surf1.pointdata['scalars'] = surf1.vertices[:,2] surf2.pointdata['scalars'] = surf2.vertices[:,2] data1 = surf1.integrate_data() data2 = surf2.integrate_data() print(data1['pointdata']['scalars'], "is equal to", data2['pointdata']['scalars'], "even if the grids are different!", "(= the volume under the surface)" ) show(surf1, surf2, N=2, axes=1).close()
1447 def write(cls, filename, binary=True) -> None: 1448 """Write object to file.""" 1449 out = vedo.file_io.write(cls, filename, binary) 1450 out.pipeline = utils.OperationNode( 1451 "write", parents=[cls], comment=filename[:15], shape="folder", c="#8a817c" 1452 )
Write object to file.
1454 def tomesh(cls, bounds=(), shrink=0) -> "vedo.Mesh": 1455 """ 1456 Extract boundary geometry from dataset (or convert data to polygonal type). 1457 1458 Two new arrays are added to the mesh: `OriginalCellIds` and `OriginalPointIds` 1459 to keep track of the original mesh elements. 1460 1461 Arguments: 1462 bounds : (list) 1463 specify a sub-region to extract 1464 shrink : (float) 1465 shrink the cells to a fraction of their original size 1466 """ 1467 geo = vtki.new("GeometryFilter") 1468 1469 if shrink: 1470 sf = vtki.new("ShrinkFilter") 1471 sf.SetInputData(cls.dataset) 1472 sf.SetShrinkFactor(shrink) 1473 sf.Update() 1474 geo.SetInputData(sf.GetOutput()) 1475 else: 1476 geo.SetInputData(cls.dataset) 1477 1478 geo.SetPassThroughCellIds(1) 1479 geo.SetPassThroughPointIds(1) 1480 geo.SetOriginalCellIdsName("OriginalCellIds") 1481 geo.SetOriginalPointIdsName("OriginalPointIds") 1482 geo.SetNonlinearSubdivisionLevel(1) 1483 # geo.MergingOff() # crashes on StructuredGrids 1484 if bounds: 1485 geo.SetExtent(bounds) 1486 geo.ExtentClippingOn() 1487 geo.Update() 1488 msh = vedo.mesh.Mesh(geo.GetOutput()) 1489 msh.pipeline = utils.OperationNode("tomesh", parents=[cls], c="#9e2a2b") 1490 return msh
Extract boundary geometry from dataset (or convert data to polygonal type).
Two new arrays are added to the mesh: OriginalCellIds
and OriginalPointIds
to keep track of the original mesh elements.
Arguments:
- bounds : (list) specify a sub-region to extract
- shrink : (float) shrink the cells to a fraction of their original size
1492 def signed_distance(cls, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> "vedo.Volume": 1493 """ 1494 Compute the `Volume` object whose voxels contains the signed distance from 1495 the object. The calling object must have "Normals" defined. 1496 1497 Arguments: 1498 bounds : (list, actor) 1499 bounding box sizes 1500 dims : (list) 1501 dimensions (nr. of voxels) of the output volume. 1502 invert : (bool) 1503 flip the sign 1504 max_radius : (float) 1505 specify how far out to propagate distance calculation 1506 1507 Examples: 1508 - [distance2mesh.py](https://github.com/marcomusy/vedo/blob/master/examples/basic/distance2mesh.py) 1509 1510  1511 """ 1512 if bounds is None: 1513 bounds = cls.bounds() 1514 if max_radius is None: 1515 max_radius = cls.diagonal_size() / 2 1516 dist = vtki.new("SignedDistance") 1517 dist.SetInputData(cls.dataset) 1518 dist.SetRadius(max_radius) 1519 dist.SetBounds(bounds) 1520 dist.SetDimensions(dims) 1521 dist.Update() 1522 img = dist.GetOutput() 1523 if invert: 1524 mat = vtki.new("ImageMathematics") 1525 mat.SetInput1Data(img) 1526 mat.SetOperationToMultiplyByK() 1527 mat.SetConstantK(-1) 1528 mat.Update() 1529 img = mat.GetOutput() 1530 1531 vol = vedo.Volume(img) 1532 vol.name = "SignedDistanceVolume" 1533 vol.pipeline = utils.OperationNode( 1534 "signed_distance", 1535 parents=[cls], 1536 comment=f"dims={tuple(vol.dimensions())}", 1537 c="#e9c46a:#0096c7", 1538 ) 1539 return vol
Compute the Volume
object whose voxels contains the signed distance from
the object. The calling object must have "Normals" defined.
Arguments:
- bounds : (list, actor) bounding box sizes
- dims : (list) dimensions (nr. of voxels) of the output volume.
- invert : (bool) flip the sign
- max_radius : (float) specify how far out to propagate distance calculation
Examples:
1541 def unsigned_distance( 1542 cls, dims=(25,25,25), bounds=(), max_radius=0, cap_value=0) -> "vedo.Volume": 1543 """ 1544 Compute the `Volume` object whose voxels contains the unsigned distance 1545 from the input object. 1546 """ 1547 dist = vtki.new("UnsignedDistance") 1548 dist.SetInputData(cls.dataset) 1549 dist.SetDimensions(dims) 1550 1551 if len(bounds) == 6: 1552 dist.SetBounds(bounds) 1553 else: 1554 dist.SetBounds(cls.bounds()) 1555 if not max_radius: 1556 max_radius = cls.diagonal_size() / 10 1557 dist.SetRadius(max_radius) 1558 1559 if cls.point_locator: 1560 dist.SetLocator(cls.point_locator) 1561 1562 if cap_value is not None: 1563 dist.CappingOn() 1564 dist.SetCapValue(cap_value) 1565 dist.SetOutputScalarTypeToFloat() 1566 dist.Update() 1567 vol = vedo.Volume(dist.GetOutput()) 1568 vol.name = "UnsignedDistanceVolume" 1569 vol.pipeline = utils.OperationNode( 1570 "unsigned_distance", parents=[cls], c="#e9c46a:#0096c7") 1571 return vol
Compute the Volume
object whose voxels contains the unsigned distance
from the input object.
1573 def smooth_data(cls, 1574 niter=10, relaxation_factor=0.1, strategy=0, mask=None, 1575 mode="distance2", 1576 exclude=("Normals", "TextureCoordinates"), 1577 ) -> Self: 1578 """ 1579 Smooth point attribute data using distance weighted Laplacian kernel. 1580 The effect is to blur regions of high variation and emphasize low variation regions. 1581 1582 A central concept of this method is the point smoothing stencil. 1583 A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge. 1584 To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using 1585 the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1). 1586 This averaging process is repeated until the maximum number of iterations is reached. 1587 1588 The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion. 1589 The a(i+1) attributes are determined from the a(i) attributes as follows: 1590 a(i+1) = (1-R)*a(i) + R*sum(w(j)*a(j)) 1591 1592 Convergence occurs faster for larger relaxation factors. 1593 Typically a small number of iterations is required for large relaxation factors, 1594 and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be 1595 adequate (i.e., just a distance weighted average is computed). 1596 1597 Warning: 1598 Certain data attributes cannot be correctly interpolated. 1599 For example, surface normals are expected to be |n|=1; 1600 after attribute smoothing this constraint is likely to be violated. 1601 Other vectors and tensors may suffer from similar issues. 1602 In such a situation, specify `exclude=...` which will not be smoothed 1603 (and simply passed through to the output). 1604 Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance 1605 between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil). 1606 The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, 1607 the weights are simply 1/n, where n is the number of connected points in the stencil. 1608 The smoothing process reduces high frequency information in the data attributes. 1609 With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) 1610 important details may be lost, and the attributes will move towards an "average" value. 1611 While this filter will process any dataset type, if the input data is a 3D image volume, 1612 it's likely much faster to use an image-based algorithm to perform data smoothing. 1613 To determine boundary points in polygonal data, edges used by only one cell are considered boundary 1614 (and hence the associated points defining the edge). 1615 1616 Arguments: 1617 niter : (int) 1618 number of iterations 1619 relaxation_factor : (float) 1620 relaxation factor controlling the amount of Laplacian smoothing applied 1621 strategy : (int) 1622 strategy to use for Laplacian smoothing 1623 1624 - 0: use all points, all point data attributes are smoothed 1625 - 1: smooth all point attribute data except those on the boundary 1626 - 2: only point data connected to a boundary point are smoothed 1627 1628 mask : (str, np.ndarray) 1629 array to be used as a mask (ignore then the strategy keyword) 1630 mode : (str) 1631 smoothing mode, either "distance2", "distance" or "average" 1632 1633 - distance**2 weighted (i.e., 1/r**2 interpolation weights) 1634 - distance weighted (i.e., 1/r) approach; 1635 - simple average of all connected points in the stencil 1636 1637 exclude : (list) 1638 list of arrays to be excluded from smoothing 1639 """ 1640 saf = vtki.new("AttributeSmoothingFilter") 1641 saf.SetInputData(cls.dataset) 1642 saf.SetRelaxationFactor(relaxation_factor) 1643 saf.SetNumberOfIterations(niter) 1644 1645 for ex in exclude: 1646 saf.AddExcludedArray(ex) 1647 1648 if mode == "distance": 1649 saf.SetWeightsTypeToDistance() 1650 elif mode == "distance2": 1651 saf.SetWeightsTypeToDistance2() 1652 elif mode == "average": 1653 saf.SetWeightsTypeToAverage() 1654 else: 1655 vedo.logger.error(f"smooth_data(): unknown mode {mode}") 1656 raise TypeError 1657 1658 saf.SetSmoothingStrategy(strategy) 1659 if mask is not None: 1660 saf.SetSmoothingStrategyToSmoothingMask() 1661 if isinstance(mask, str): 1662 mask_ = cls.dataset.GetPointData().GetArray(mask) 1663 if not mask_: 1664 vedo.logger.error(f"smooth_data(): mask array {mask} not found") 1665 return cls 1666 mask_array = vtki.vtkUnsignedCharArray() 1667 mask_array.ShallowCopy(mask_) 1668 mask_array.SetName(mask_.GetName()) 1669 else: 1670 mask_array = utils.numpy2vtk(mask, dtype=np.uint8) 1671 saf.SetSmoothingMask(mask_array) 1672 1673 saf.Update() 1674 1675 cls._update(saf.GetOutput()) 1676 cls.pipeline = utils.OperationNode( 1677 "smooth_data", comment=f"strategy {strategy}", parents=[cls], c="#9e2a2b" 1678 ) 1679 return cls
Smooth point attribute data using distance weighted Laplacian kernel. The effect is to blur regions of high variation and emphasize low variation regions.
A central concept of this method is the point smoothing stencil. A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge. To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1). This averaging process is repeated until the maximum number of iterations is reached.
The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion. The a(i+1) attributes are determined from the a(i) attributes as follows: a(i+1) = (1-R)a(i) + Rsum(w(j)*a(j))
Convergence occurs faster for larger relaxation factors. Typically a small number of iterations is required for large relaxation factors, and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be adequate (i.e., just a distance weighted average is computed).
Warning:
Certain data attributes cannot be correctly interpolated. For example, surface normals are expected to be |n|=1; after attribute smoothing this constraint is likely to be violated. Other vectors and tensors may suffer from similar issues. In such a situation, specify
exclude=...
which will not be smoothed (and simply passed through to the output). Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil). The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, the weights are simply 1/n, where n is the number of connected points in the stencil. The smoothing process reduces high frequency information in the data attributes. With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) important details may be lost, and the attributes will move towards an "average" value. While this filter will process any dataset type, if the input data is a 3D image volume, it's likely much faster to use an image-based algorithm to perform data smoothing. To determine boundary points in polygonal data, edges used by only one cell are considered boundary (and hence the associated points defining the edge).
Arguments:
- niter : (int) number of iterations
- relaxation_factor : (float) relaxation factor controlling the amount of Laplacian smoothing applied
strategy : (int) strategy to use for Laplacian smoothing
- 0: use all points, all point data attributes are smoothed - 1: smooth all point attribute data except those on the boundary - 2: only point data connected to a boundary point are smoothed
- mask : (str, np.ndarray) array to be used as a mask (ignore then the strategy keyword)
mode : (str) smoothing mode, either "distance2", "distance" or "average"
- distance**2 weighted (i.e., 1/r**2 interpolation weights) - distance weighted (i.e., 1/r) approach; - simple average of all connected points in the stencil
- exclude : (list) list of arrays to be excluded from smoothing
1681 def compute_streamlines( 1682 cls, 1683 seeds: Any, 1684 integrator="rk4", 1685 direction="forward", 1686 initial_step_size=None, 1687 max_propagation=None, 1688 max_steps=10000, 1689 step_length=0, 1690 surface_constrained=False, 1691 compute_vorticity=False, 1692 ) -> Union["vedo.Lines", None]: 1693 """ 1694 Integrate a vector field to generate streamlines. 1695 1696 Arguments: 1697 seeds : (Mesh, Points, list) 1698 starting points of the streamlines 1699 integrator : (str) 1700 type of integration method to be used: 1701 1702 - "rk2" (Runge-Kutta 2) 1703 - "rk4" (Runge-Kutta 4) 1704 - "rk45" (Runge-Kutta 45) 1705 1706 direction : (str) 1707 direction of integration, either "forward", "backward" or "both" 1708 initial_step_size : (float) 1709 initial step size used for line integration 1710 max_propagation : (float) 1711 maximum length of a streamline expressed in absolute units 1712 max_steps : (int) 1713 maximum number of steps for a streamline 1714 step_length : (float) 1715 maximum length of a step expressed in absolute units 1716 surface_constrained : (bool) 1717 whether to stop integrating when the streamline leaves the surface 1718 compute_vorticity : (bool) 1719 whether to compute the vorticity at each streamline point 1720 """ 1721 b = cls.dataset.GetBounds() 1722 size = (b[5]-b[4] + b[3]-b[2] + b[1]-b[0]) / 3 1723 if initial_step_size is None: 1724 initial_step_size = size / 1000.0 1725 1726 if max_propagation is None: 1727 max_propagation = size * 2 1728 1729 if utils.is_sequence(seeds): 1730 seeds = vedo.Points(seeds) 1731 1732 sti = vtki.new("StreamTracer") 1733 sti.SetSourceData(seeds.dataset) 1734 if isinstance(cls, vedo.RectilinearGrid): 1735 sti.SetInputData(vedo.UnstructuredGrid(cls.dataset).dataset) 1736 else: 1737 sti.SetInputDataObject(cls.dataset) 1738 1739 sti.SetInitialIntegrationStep(initial_step_size) 1740 sti.SetComputeVorticity(compute_vorticity) 1741 sti.SetMaximumNumberOfSteps(max_steps) 1742 sti.SetMaximumPropagation(max_propagation) 1743 sti.SetSurfaceStreamlines(surface_constrained) 1744 if step_length: 1745 sti.SetMaximumIntegrationStep(step_length) 1746 1747 if "for" in direction: 1748 sti.SetIntegrationDirectionToForward() 1749 elif "back" in direction: 1750 sti.SetIntegrationDirectionToBackward() 1751 elif "both" in direction: 1752 sti.SetIntegrationDirectionToBoth() 1753 else: 1754 vedo.logger.error(f"in compute_streamlines(), unknown direction {direction}") 1755 return None 1756 1757 if integrator == "rk2": 1758 sti.SetIntegratorTypeToRungeKutta2() 1759 elif integrator == "rk4": 1760 sti.SetIntegratorTypeToRungeKutta4() 1761 elif integrator == "rk45": 1762 sti.SetIntegratorTypeToRungeKutta45() 1763 else: 1764 vedo.logger.error(f"in compute_streamlines(), unknown integrator {integrator}") 1765 return None 1766 1767 sti.Update() 1768 1769 stlines = vedo.shapes.Lines(sti.GetOutput(), lw=4) 1770 stlines.name = "StreamLines" 1771 cls.pipeline = utils.OperationNode( 1772 "compute_streamlines", comment=f"{integrator}", parents=[cls, seeds], c="#9e2a2b" 1773 ) 1774 return stlines
Integrate a vector field to generate streamlines.
Arguments:
- seeds : (Mesh, Points, list) starting points of the streamlines
integrator : (str) type of integration method to be used:
- "rk2" (Runge-Kutta 2) - "rk4" (Runge-Kutta 4) - "rk45" (Runge-Kutta 45)
- direction : (str) direction of integration, either "forward", "backward" or "both"
- initial_step_size : (float) initial step size used for line integration
- max_propagation : (float) maximum length of a streamline expressed in absolute units
- max_steps : (int) maximum number of steps for a streamline
- step_length : (float) maximum length of a step expressed in absolute units
- surface_constrained : (bool) whether to stop integrating when the streamline leaves the surface
- compute_vorticity : (bool) whether to compute the vorticity at each streamline point
1777class PointAlgorithms(CommonAlgorithms): 1778 """Methods for point clouds.""" 1779 1780 def apply_transform(cls, LT: Any, deep_copy=True) -> Self: 1781 """ 1782 Apply a linear or non-linear transformation to the mesh polygonal data. 1783 1784 Example: 1785 ```python 1786 from vedo import Cube, show, settings 1787 settings.use_parallel_projection = True 1788 c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5) 1789 T = c1.transform # rotate by 5 degrees, place at (2,1) 1790 c2 = Cube().c('red4').wireframe().lw(10).lighting('off') 1791 c2.apply_transform(T) 1792 show(c1, c2, "The 2 cubes should overlap!", axes=1).close() 1793 ``` 1794 1795  1796 """ 1797 if cls.dataset.GetNumberOfPoints() == 0: 1798 return cls 1799 1800 if isinstance(LT, LinearTransform): 1801 LT_is_linear = True 1802 tr = LT.T 1803 if LT.is_identity(): 1804 return cls 1805 1806 elif isinstance(LT, (vtki.vtkMatrix4x4, vtki.vtkLinearTransform)) or utils.is_sequence(LT): 1807 LT_is_linear = True 1808 LT = LinearTransform(LT) 1809 tr = LT.T 1810 if LT.is_identity(): 1811 return cls 1812 1813 elif isinstance(LT, NonLinearTransform): 1814 LT_is_linear = False 1815 tr = LT.T 1816 cls.transform = LT # reset 1817 1818 elif isinstance(LT, vtki.vtkThinPlateSplineTransform): 1819 LT_is_linear = False 1820 tr = LT 1821 cls.transform = NonLinearTransform(LT) # reset 1822 1823 else: 1824 vedo.logger.error(f"apply_transform(), unknown input type:\n{LT}") 1825 return cls 1826 1827 ################ 1828 if LT_is_linear: 1829 try: 1830 # cls.transform might still not be linear 1831 cls.transform.concatenate(LT) 1832 except AttributeError: 1833 # in that case reset it 1834 cls.transform = LinearTransform() 1835 1836 ################ 1837 if isinstance(cls.dataset, vtki.vtkPolyData): 1838 tp = vtki.new("TransformPolyDataFilter") 1839 elif isinstance(cls.dataset, vtki.vtkUnstructuredGrid): 1840 tp = vtki.new("TransformFilter") 1841 tp.TransformAllInputVectorsOn() 1842 # elif isinstance(cls.dataset, vtki.vtkImageData): 1843 # tp = vtki.new("ImageReslice") 1844 # tp.SetInterpolationModeToCubic() 1845 # tp.SetResliceTransform(tr) 1846 else: 1847 vedo.logger.error(f"apply_transform(), unknown input type: {[cls.dataset]}") 1848 return cls 1849 1850 tp.SetTransform(tr) 1851 tp.SetInputData(cls.dataset) 1852 tp.Update() 1853 out = tp.GetOutput() 1854 1855 if deep_copy: 1856 cls.dataset.DeepCopy(out) 1857 else: 1858 cls.dataset.ShallowCopy(out) 1859 1860 # reset the locators 1861 cls.point_locator = None 1862 cls.cell_locator = None 1863 cls.line_locator = None 1864 return cls 1865 1866 def apply_transform_from_actor(cls) -> LinearTransform: 1867 """ 1868 Apply the current transformation of the actor to the data. 1869 Useful when manually moving an actor (eg. when pressing "a"). 1870 Returns the `LinearTransform` object. 1871 1872 Note that this method is automatically called when the window is closed, 1873 or the interactor style is changed. 1874 """ 1875 M = cls.actor.GetMatrix() 1876 cls.apply_transform(M) 1877 iden = vtki.vtkMatrix4x4() 1878 cls.actor.PokeMatrix(iden) 1879 return LinearTransform(M) 1880 1881 def pos(cls, x=None, y=None, z=None) -> Self: 1882 """Set/Get object position.""" 1883 if x is None: # get functionality 1884 return cls.transform.position 1885 1886 if z is None and y is None: # assume x is of the form (x,y,z) 1887 if len(x) == 3: 1888 x, y, z = x 1889 else: 1890 x, y = x 1891 z = 0 1892 elif z is None: # assume x,y is of the form x, y 1893 z = 0 1894 1895 q = cls.transform.position 1896 delta = [x, y, z] - q 1897 if delta[0] == delta[1] == delta[2] == 0: 1898 return cls 1899 LT = LinearTransform().translate(delta) 1900 return cls.apply_transform(LT) 1901 1902 def shift(cls, dx=0, dy=0, dz=0) -> Self: 1903 """Add a vector to the current object position.""" 1904 if utils.is_sequence(dx): 1905 dx, dy, dz = utils.make3d(dx) 1906 if dx == dy == dz == 0: 1907 return cls 1908 LT = LinearTransform().translate([dx, dy, dz]) 1909 return cls.apply_transform(LT) 1910 1911 def x(cls, val=None) -> Self: 1912 """Set/Get object position along x axis.""" 1913 p = cls.transform.position 1914 if val is None: 1915 return p[0] 1916 cls.pos(val, p[1], p[2]) 1917 return cls 1918 1919 def y(cls, val=None)-> Self: 1920 """Set/Get object position along y axis.""" 1921 p = cls.transform.position 1922 if val is None: 1923 return p[1] 1924 cls.pos(p[0], val, p[2]) 1925 return cls 1926 1927 def z(cls, val=None) -> Self: 1928 """Set/Get object position along z axis.""" 1929 p = cls.transform.position 1930 if val is None: 1931 return p[2] 1932 cls.pos(p[0], p[1], val) 1933 return cls 1934 1935 def rotate(cls, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self: 1936 """ 1937 Rotate around an arbitrary `axis` passing through `point`. 1938 1939 Example: 1940 ```python 1941 from vedo import * 1942 c1 = Cube() 1943 c2 = c1.clone().c('violet').alpha(0.5) # copy of c1 1944 v = vector(0.2,1,0) 1945 p = vector(1,0,0) # axis passes through this point 1946 c2.rotate(90, axis=v, point=p) 1947 l = Line(-v+p, v+p).lw(3).c('red') 1948 show(c1, l, c2, axes=1).close() 1949 ``` 1950 1951  1952 """ 1953 LT = LinearTransform() 1954 LT.rotate(angle, axis, point, rad) 1955 return cls.apply_transform(LT) 1956 1957 def rotate_x(cls, angle: float, rad=False, around=None) -> Self: 1958 """ 1959 Rotate around x-axis. If angle is in radians set `rad=True`. 1960 1961 Use `around` to define a pivoting point. 1962 """ 1963 if angle == 0: 1964 return cls 1965 LT = LinearTransform().rotate_x(angle, rad, around) 1966 return cls.apply_transform(LT) 1967 1968 def rotate_y(cls, angle: float, rad=False, around=None) -> Self: 1969 """ 1970 Rotate around y-axis. If angle is in radians set `rad=True`. 1971 1972 Use `around` to define a pivoting point. 1973 """ 1974 if angle == 0: 1975 return cls 1976 LT = LinearTransform().rotate_y(angle, rad, around) 1977 return cls.apply_transform(LT) 1978 1979 def rotate_z(cls, angle: float, rad=False, around=None) -> Self: 1980 """ 1981 Rotate around z-axis. If angle is in radians set `rad=True`. 1982 1983 Use `around` to define a pivoting point. 1984 """ 1985 if angle == 0: 1986 return cls 1987 LT = LinearTransform().rotate_z(angle, rad, around) 1988 return cls.apply_transform(LT) 1989 1990 def reorient(cls, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self: 1991 """ 1992 Reorient the object to point to a new direction from an initial one. 1993 If `initaxis` is None, the object will be assumed in its "default" orientation. 1994 If `xyplane` is True, the object will be rotated to lie on the xy plane. 1995 1996 Use `rotation` to first rotate the object around its `initaxis`. 1997 """ 1998 q = cls.transform.position 1999 LT = LinearTransform() 2000 LT.reorient(initaxis, newaxis, q, rotation, rad, xyplane) 2001 return cls.apply_transform(LT) 2002 2003 def scale(cls, s=None, reset=False, origin=True) -> Union[Self, np.ndarray]: 2004 """ 2005 Set/get object's scaling factor. 2006 2007 Arguments: 2008 s : (list, float) 2009 scaling factor(s). 2010 reset : (bool) 2011 if True previous scaling factors are ignored. 2012 origin : (bool) 2013 if True scaling is applied with respect to object's position, 2014 otherwise is applied respect to (0,0,0). 2015 2016 Note: 2017 use `s=(sx,sy,sz)` to scale differently in the three coordinates. 2018 """ 2019 if s is None: 2020 return np.array(cls.transform.T.GetScale()) 2021 2022 if not utils.is_sequence(s): 2023 s = [s, s, s] 2024 2025 LT = LinearTransform() 2026 if reset: 2027 old_s = np.array(cls.transform.T.GetScale()) 2028 LT.scale(s / old_s) 2029 else: 2030 if origin is True: 2031 LT.scale(s, origin=cls.transform.position) 2032 elif origin is False: 2033 LT.scale(s, origin=False) 2034 else: 2035 LT.scale(s, origin=origin) 2036 2037 return cls.apply_transform(LT)
Methods for point clouds.
1780 def apply_transform(cls, LT: Any, deep_copy=True) -> Self: 1781 """ 1782 Apply a linear or non-linear transformation to the mesh polygonal data. 1783 1784 Example: 1785 ```python 1786 from vedo import Cube, show, settings 1787 settings.use_parallel_projection = True 1788 c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5) 1789 T = c1.transform # rotate by 5 degrees, place at (2,1) 1790 c2 = Cube().c('red4').wireframe().lw(10).lighting('off') 1791 c2.apply_transform(T) 1792 show(c1, c2, "The 2 cubes should overlap!", axes=1).close() 1793 ``` 1794 1795  1796 """ 1797 if cls.dataset.GetNumberOfPoints() == 0: 1798 return cls 1799 1800 if isinstance(LT, LinearTransform): 1801 LT_is_linear = True 1802 tr = LT.T 1803 if LT.is_identity(): 1804 return cls 1805 1806 elif isinstance(LT, (vtki.vtkMatrix4x4, vtki.vtkLinearTransform)) or utils.is_sequence(LT): 1807 LT_is_linear = True 1808 LT = LinearTransform(LT) 1809 tr = LT.T 1810 if LT.is_identity(): 1811 return cls 1812 1813 elif isinstance(LT, NonLinearTransform): 1814 LT_is_linear = False 1815 tr = LT.T 1816 cls.transform = LT # reset 1817 1818 elif isinstance(LT, vtki.vtkThinPlateSplineTransform): 1819 LT_is_linear = False 1820 tr = LT 1821 cls.transform = NonLinearTransform(LT) # reset 1822 1823 else: 1824 vedo.logger.error(f"apply_transform(), unknown input type:\n{LT}") 1825 return cls 1826 1827 ################ 1828 if LT_is_linear: 1829 try: 1830 # cls.transform might still not be linear 1831 cls.transform.concatenate(LT) 1832 except AttributeError: 1833 # in that case reset it 1834 cls.transform = LinearTransform() 1835 1836 ################ 1837 if isinstance(cls.dataset, vtki.vtkPolyData): 1838 tp = vtki.new("TransformPolyDataFilter") 1839 elif isinstance(cls.dataset, vtki.vtkUnstructuredGrid): 1840 tp = vtki.new("TransformFilter") 1841 tp.TransformAllInputVectorsOn() 1842 # elif isinstance(cls.dataset, vtki.vtkImageData): 1843 # tp = vtki.new("ImageReslice") 1844 # tp.SetInterpolationModeToCubic() 1845 # tp.SetResliceTransform(tr) 1846 else: 1847 vedo.logger.error(f"apply_transform(), unknown input type: {[cls.dataset]}") 1848 return cls 1849 1850 tp.SetTransform(tr) 1851 tp.SetInputData(cls.dataset) 1852 tp.Update() 1853 out = tp.GetOutput() 1854 1855 if deep_copy: 1856 cls.dataset.DeepCopy(out) 1857 else: 1858 cls.dataset.ShallowCopy(out) 1859 1860 # reset the locators 1861 cls.point_locator = None 1862 cls.cell_locator = None 1863 cls.line_locator = None 1864 return cls
Apply a linear or non-linear transformation to the mesh polygonal data.
Example:
from vedo import Cube, show, settings
settings.use_parallel_projection = True
c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5)
T = c1.transform # rotate by 5 degrees, place at (2,1)
c2 = Cube().c('red4').wireframe().lw(10).lighting('off')
c2.apply_transform(T)
show(c1, c2, "The 2 cubes should overlap!", axes=1).close()
1866 def apply_transform_from_actor(cls) -> LinearTransform: 1867 """ 1868 Apply the current transformation of the actor to the data. 1869 Useful when manually moving an actor (eg. when pressing "a"). 1870 Returns the `LinearTransform` object. 1871 1872 Note that this method is automatically called when the window is closed, 1873 or the interactor style is changed. 1874 """ 1875 M = cls.actor.GetMatrix() 1876 cls.apply_transform(M) 1877 iden = vtki.vtkMatrix4x4() 1878 cls.actor.PokeMatrix(iden) 1879 return LinearTransform(M)
Apply the current transformation of the actor to the data.
Useful when manually moving an actor (eg. when pressing "a").
Returns the LinearTransform
object.
Note that this method is automatically called when the window is closed, or the interactor style is changed.
1881 def pos(cls, x=None, y=None, z=None) -> Self: 1882 """Set/Get object position.""" 1883 if x is None: # get functionality 1884 return cls.transform.position 1885 1886 if z is None and y is None: # assume x is of the form (x,y,z) 1887 if len(x) == 3: 1888 x, y, z = x 1889 else: 1890 x, y = x 1891 z = 0 1892 elif z is None: # assume x,y is of the form x, y 1893 z = 0 1894 1895 q = cls.transform.position 1896 delta = [x, y, z] - q 1897 if delta[0] == delta[1] == delta[2] == 0: 1898 return cls 1899 LT = LinearTransform().translate(delta) 1900 return cls.apply_transform(LT)
Set/Get object position.
1902 def shift(cls, dx=0, dy=0, dz=0) -> Self: 1903 """Add a vector to the current object position.""" 1904 if utils.is_sequence(dx): 1905 dx, dy, dz = utils.make3d(dx) 1906 if dx == dy == dz == 0: 1907 return cls 1908 LT = LinearTransform().translate([dx, dy, dz]) 1909 return cls.apply_transform(LT)
Add a vector to the current object position.
1911 def x(cls, val=None) -> Self: 1912 """Set/Get object position along x axis.""" 1913 p = cls.transform.position 1914 if val is None: 1915 return p[0] 1916 cls.pos(val, p[1], p[2]) 1917 return cls
Set/Get object position along x axis.
1919 def y(cls, val=None)-> Self: 1920 """Set/Get object position along y axis.""" 1921 p = cls.transform.position 1922 if val is None: 1923 return p[1] 1924 cls.pos(p[0], val, p[2]) 1925 return cls
Set/Get object position along y axis.
1927 def z(cls, val=None) -> Self: 1928 """Set/Get object position along z axis.""" 1929 p = cls.transform.position 1930 if val is None: 1931 return p[2] 1932 cls.pos(p[0], p[1], val) 1933 return cls
Set/Get object position along z axis.
1935 def rotate(cls, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self: 1936 """ 1937 Rotate around an arbitrary `axis` passing through `point`. 1938 1939 Example: 1940 ```python 1941 from vedo import * 1942 c1 = Cube() 1943 c2 = c1.clone().c('violet').alpha(0.5) # copy of c1 1944 v = vector(0.2,1,0) 1945 p = vector(1,0,0) # axis passes through this point 1946 c2.rotate(90, axis=v, point=p) 1947 l = Line(-v+p, v+p).lw(3).c('red') 1948 show(c1, l, c2, axes=1).close() 1949 ``` 1950 1951  1952 """ 1953 LT = LinearTransform() 1954 LT.rotate(angle, axis, point, rad) 1955 return cls.apply_transform(LT)
Rotate around an arbitrary axis
passing through point
.
Example:
from vedo import *
c1 = Cube()
c2 = c1.clone().c('violet').alpha(0.5) # copy of c1
v = vector(0.2,1,0)
p = vector(1,0,0) # axis passes through this point
c2.rotate(90, axis=v, point=p)
l = Line(-v+p, v+p).lw(3).c('red')
show(c1, l, c2, axes=1).close()
1957 def rotate_x(cls, angle: float, rad=False, around=None) -> Self: 1958 """ 1959 Rotate around x-axis. If angle is in radians set `rad=True`. 1960 1961 Use `around` to define a pivoting point. 1962 """ 1963 if angle == 0: 1964 return cls 1965 LT = LinearTransform().rotate_x(angle, rad, around) 1966 return cls.apply_transform(LT)
Rotate around x-axis. If angle is in radians set rad=True
.
Use around
to define a pivoting point.
1968 def rotate_y(cls, angle: float, rad=False, around=None) -> Self: 1969 """ 1970 Rotate around y-axis. If angle is in radians set `rad=True`. 1971 1972 Use `around` to define a pivoting point. 1973 """ 1974 if angle == 0: 1975 return cls 1976 LT = LinearTransform().rotate_y(angle, rad, around) 1977 return cls.apply_transform(LT)
Rotate around y-axis. If angle is in radians set rad=True
.
Use around
to define a pivoting point.
1979 def rotate_z(cls, angle: float, rad=False, around=None) -> Self: 1980 """ 1981 Rotate around z-axis. If angle is in radians set `rad=True`. 1982 1983 Use `around` to define a pivoting point. 1984 """ 1985 if angle == 0: 1986 return cls 1987 LT = LinearTransform().rotate_z(angle, rad, around) 1988 return cls.apply_transform(LT)
Rotate around z-axis. If angle is in radians set rad=True
.
Use around
to define a pivoting point.
1990 def reorient(cls, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self: 1991 """ 1992 Reorient the object to point to a new direction from an initial one. 1993 If `initaxis` is None, the object will be assumed in its "default" orientation. 1994 If `xyplane` is True, the object will be rotated to lie on the xy plane. 1995 1996 Use `rotation` to first rotate the object around its `initaxis`. 1997 """ 1998 q = cls.transform.position 1999 LT = LinearTransform() 2000 LT.reorient(initaxis, newaxis, q, rotation, rad, xyplane) 2001 return cls.apply_transform(LT)
Reorient the object to point to a new direction from an initial one.
If initaxis
is None, the object will be assumed in its "default" orientation.
If xyplane
is True, the object will be rotated to lie on the xy plane.
Use rotation
to first rotate the object around its initaxis
.
2003 def scale(cls, s=None, reset=False, origin=True) -> Union[Self, np.ndarray]: 2004 """ 2005 Set/get object's scaling factor. 2006 2007 Arguments: 2008 s : (list, float) 2009 scaling factor(s). 2010 reset : (bool) 2011 if True previous scaling factors are ignored. 2012 origin : (bool) 2013 if True scaling is applied with respect to object's position, 2014 otherwise is applied respect to (0,0,0). 2015 2016 Note: 2017 use `s=(sx,sy,sz)` to scale differently in the three coordinates. 2018 """ 2019 if s is None: 2020 return np.array(cls.transform.T.GetScale()) 2021 2022 if not utils.is_sequence(s): 2023 s = [s, s, s] 2024 2025 LT = LinearTransform() 2026 if reset: 2027 old_s = np.array(cls.transform.T.GetScale()) 2028 LT.scale(s / old_s) 2029 else: 2030 if origin is True: 2031 LT.scale(s, origin=cls.transform.position) 2032 elif origin is False: 2033 LT.scale(s, origin=False) 2034 else: 2035 LT.scale(s, origin=origin) 2036 2037 return cls.apply_transform(LT)
Set/get object's scaling factor.
Arguments:
- s : (list, float) scaling factor(s).
- reset : (bool) if True previous scaling factors are ignored.
- origin : (bool) if True scaling is applied with respect to object's position, otherwise is applied respect to (0,0,0).
Note:
use
s=(sx,sy,sz)
to scale differently in the three coordinates.
Inherited Members
- CommonAlgorithms
- pointdata
- celldata
- metadata
- rename
- memory_address
- memory_size
- modified
- box
- update_dataset
- bounds
- xbounds
- ybounds
- zbounds
- diagonal_size
- average_size
- center_of_mass
- copy_data_from
- inputdata
- npoints
- nvertices
- ncells
- cell_centers
- lines
- lines_as_flat_array
- mark_boundaries
- find_cells_in_bounds
- find_cells_along_line
- find_cells_along_plane
- keep_cell_types
- map_cells_to_points
- vertices
- points
- coordinates
- cells_as_flat_array
- cells
- cell_edge_neighbors
- map_points_to_cells
- resample_data_from
- interpolate_data_from
- add_ids
- gradient
- divergence
- vorticity
- probe
- compute_cell_size
- generate_random_data
- integrate_data
- write
- tomesh
- signed_distance
- unsigned_distance
- smooth_data
- compute_streamlines
2041class VolumeAlgorithms(CommonAlgorithms): 2042 """Methods for Volume objects.""" 2043 2044 def bounds(cls) -> np.ndarray: 2045 """ 2046 Get the object bounds. 2047 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 2048 """ 2049 # OVERRIDE CommonAlgorithms.bounds() which is too slow 2050 return np.array(cls.dataset.GetBounds()) 2051 2052 def isosurface(cls, value=None, flying_edges=False) -> "vedo.mesh.Mesh": 2053 """ 2054 Return an `Mesh` isosurface extracted from the `Volume` object. 2055 2056 Set `value` as single float or list of values to draw the isosurface(s). 2057 Use flying_edges for faster results (but sometimes can interfere with `smooth()`). 2058 2059 The isosurface values can be accessed with `mesh.metadata["isovalue"]`. 2060 2061 Examples: 2062 - [isosurfaces1.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces1.py) 2063 2064  2065 """ 2066 scrange = cls.dataset.GetScalarRange() 2067 2068 if flying_edges: 2069 cf = vtki.new("FlyingEdges3D") 2070 cf.InterpolateAttributesOn() 2071 else: 2072 cf = vtki.new("ContourFilter") 2073 cf.UseScalarTreeOn() 2074 2075 cf.SetInputData(cls.dataset) 2076 cf.ComputeNormalsOn() 2077 2078 if utils.is_sequence(value): 2079 cf.SetNumberOfContours(len(value)) 2080 for i, t in enumerate(value): 2081 cf.SetValue(i, t) 2082 else: 2083 if value is None: 2084 value = (2 * scrange[0] + scrange[1]) / 3.0 2085 # print("automatic isosurface value =", value) 2086 cf.SetValue(0, value) 2087 2088 cf.Update() 2089 poly = cf.GetOutput() 2090 2091 out = vedo.mesh.Mesh(poly, c=None).phong() 2092 out.mapper.SetScalarRange(scrange[0], scrange[1]) 2093 out.metadata["isovalue"] = value 2094 2095 out.pipeline = utils.OperationNode( 2096 "isosurface", 2097 parents=[cls], 2098 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2099 c="#4cc9f0:#e9c46a", 2100 ) 2101 return out 2102 2103 def isosurface_discrete( 2104 cls, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0, 2105 ) -> "vedo.mesh.Mesh": 2106 """ 2107 Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded, 2108 3D version of the multiple objects/labels Surface Nets algorithm. 2109 The input is a 3D image (i.e., volume) where each voxel is labeled 2110 (integer labels are preferred to real values), and the output data is a polygonal mesh separating 2111 labeled regions / objects. 2112 (Note that on output each region [corresponding to a different segmented object] will share 2113 points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them). 2114 2115 Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating 2116 the labels on either side of the polygons composing the output Mesh. 2117 (This can be used for advanced operations like extracting shared/contacting boundaries between two objects. 2118 The name of this celldata array is "BoundaryLabels"). 2119 2120 The values can be accessed with `mesh.metadata["isovalue"]`. 2121 2122 Arguments: 2123 value : (float, list) 2124 single value or list of values to draw the isosurface(s). 2125 background_label : (float) 2126 this value specifies the label value to use when referencing the background 2127 region outside of any of the specified regions. 2128 boundaries : (bool, list) 2129 if True, the output will only contain the boundary surface. Internal surfaces will be removed. 2130 If a list of integers is provided, only the boundaries between the specified labels will be extracted. 2131 use_quads : (bool) 2132 if True, the output polygons will be quads. If False, the output polygons will be triangles. 2133 nsmooth : (int) 2134 number of iterations of smoothing (0 means no smoothing). 2135 2136 Examples: 2137 - [isosurfaces2.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces2.py) 2138 """ 2139 logger = vtki.get_class("Logger") 2140 logger.SetStderrVerbosity(logger.VERBOSITY_ERROR) 2141 2142 snets = vtki.new("SurfaceNets3D") 2143 snets.SetInputData(cls.dataset) 2144 2145 if nsmooth: 2146 snets.SmoothingOn() 2147 snets.AutomaticSmoothingConstraintsOn() 2148 snets.GetSmoother().SetNumberOfIterations(nsmooth) 2149 # snets.GetSmoother().SetRelaxationFactor(relaxation_factor) 2150 # snets.GetSmoother().SetConstraintDistance(constraint_distance) 2151 else: 2152 snets.SmoothingOff() 2153 2154 if internal_boundaries is False: 2155 snets.SetOutputStyleToBoundary() 2156 elif internal_boundaries is True: 2157 snets.SetOutputStyleToDefault() 2158 elif utils.is_sequence(internal_boundaries): 2159 snets.SetOutputStyleToSelected() 2160 snets.InitializeSelectedLabelsList() 2161 for val in internal_boundaries: 2162 snets.AddSelectedLabel(val) 2163 else: 2164 vedo.logger.error("isosurface_discrete(): unknown boundaries option") 2165 2166 n = len(values) 2167 snets.SetNumberOfContours(n) 2168 snets.SetNumberOfLabels(n) 2169 2170 if background_label is not None: 2171 snets.SetBackgroundLabel(background_label) 2172 2173 for i, val in enumerate(values): 2174 snets.SetValue(i, val) 2175 2176 if use_quads: 2177 snets.SetOutputMeshTypeToQuads() 2178 else: 2179 snets.SetOutputMeshTypeToTriangles() 2180 snets.Update() 2181 2182 out = vedo.mesh.Mesh(snets.GetOutput()) 2183 out.metadata["isovalue"] = values 2184 out.pipeline = utils.OperationNode( 2185 "isosurface_discrete", 2186 parents=[cls], 2187 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2188 c="#4cc9f0:#e9c46a", 2189 ) 2190 2191 logger.SetStderrVerbosity(logger.VERBOSITY_INFO) 2192 return out 2193 2194 2195 def legosurface( 2196 cls, 2197 vmin=None, 2198 vmax=None, 2199 invert=False, 2200 boundary=True, 2201 array_name="input_scalars", 2202 ) -> "vedo.mesh.Mesh": 2203 """ 2204 Represent an object - typically a `Volume` - as lego blocks (voxels). 2205 By default colors correspond to the volume's scalar. 2206 Returns an `Mesh` object. 2207 2208 Arguments: 2209 vmin : (float) 2210 the lower threshold, voxels below this value are not shown. 2211 vmax : (float) 2212 the upper threshold, voxels above this value are not shown. 2213 boundary : (bool) 2214 controls whether to include cells that are partially inside 2215 array_name : (int, str) 2216 name or index of the scalar array to be considered 2217 2218 Examples: 2219 - [legosurface.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/legosurface.py) 2220 2221  2222 """ 2223 imp_dataset = vtki.new("ImplicitDataSet") 2224 imp_dataset.SetDataSet(cls.dataset) 2225 window = vtki.new("ImplicitWindowFunction") 2226 window.SetImplicitFunction(imp_dataset) 2227 2228 srng = list(cls.dataset.GetScalarRange()) 2229 if vmin is not None: 2230 srng[0] = vmin 2231 if vmax is not None: 2232 srng[1] = vmax 2233 if not boundary: 2234 tol = 0.00001 * (srng[1] - srng[0]) 2235 srng[0] -= tol 2236 srng[1] += tol 2237 window.SetWindowRange(srng) 2238 # print("legosurface window range:", srng) 2239 2240 extract = vtki.new("ExtractGeometry") 2241 extract.SetInputData(cls.dataset) 2242 extract.SetImplicitFunction(window) 2243 extract.SetExtractInside(invert) 2244 extract.SetExtractBoundaryCells(boundary) 2245 extract.Update() 2246 2247 gf = vtki.new("GeometryFilter") 2248 gf.SetInputData(extract.GetOutput()) 2249 gf.Update() 2250 2251 m = vedo.mesh.Mesh(gf.GetOutput()).lw(0.1).flat() 2252 m.map_points_to_cells() 2253 m.celldata.select(array_name) 2254 2255 m.pipeline = utils.OperationNode( 2256 "legosurface", 2257 parents=[cls], 2258 comment=f"array: {array_name}", 2259 c="#4cc9f0:#e9c46a", 2260 ) 2261 return m 2262 2263 def tomesh(cls, fill=True, shrink=1.0) -> "vedo.mesh.Mesh": 2264 """ 2265 Build a polygonal Mesh from the current object. 2266 2267 If `fill=True`, the interior faces of all the cells are created. 2268 (setting a `shrink` value slightly smaller than the default 1.0 2269 can avoid flickering due to internal adjacent faces). 2270 2271 If `fill=False`, only the boundary faces will be generated. 2272 """ 2273 gf = vtki.new("GeometryFilter") 2274 if fill: 2275 sf = vtki.new("ShrinkFilter") 2276 sf.SetInputData(cls.dataset) 2277 sf.SetShrinkFactor(shrink) 2278 sf.Update() 2279 gf.SetInputData(sf.GetOutput()) 2280 gf.Update() 2281 poly = gf.GetOutput() 2282 if shrink == 1.0: 2283 clean_poly = vtki.new("CleanPolyData") 2284 clean_poly.PointMergingOn() 2285 clean_poly.ConvertLinesToPointsOn() 2286 clean_poly.ConvertPolysToLinesOn() 2287 clean_poly.ConvertStripsToPolysOn() 2288 clean_poly.SetInputData(poly) 2289 clean_poly.Update() 2290 poly = clean_poly.GetOutput() 2291 else: 2292 gf.SetInputData(cls.dataset) 2293 gf.Update() 2294 poly = gf.GetOutput() 2295 2296 msh = vedo.mesh.Mesh(poly).flat() 2297 msh.scalarbar = cls.scalarbar 2298 lut = utils.ctf2lut(cls) 2299 if lut: 2300 msh.mapper.SetLookupTable(lut) 2301 2302 msh.pipeline = utils.OperationNode( 2303 "tomesh", parents=[cls], comment=f"fill={fill}", c="#9e2a2b:#e9c46a" 2304 ) 2305 return msh
Methods for Volume objects.
2044 def bounds(cls) -> np.ndarray: 2045 """ 2046 Get the object bounds. 2047 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 2048 """ 2049 # OVERRIDE CommonAlgorithms.bounds() which is too slow 2050 return np.array(cls.dataset.GetBounds())
Get the object bounds.
Returns a list in format [xmin,xmax, ymin,ymax, zmin,zmax]
.
2052 def isosurface(cls, value=None, flying_edges=False) -> "vedo.mesh.Mesh": 2053 """ 2054 Return an `Mesh` isosurface extracted from the `Volume` object. 2055 2056 Set `value` as single float or list of values to draw the isosurface(s). 2057 Use flying_edges for faster results (but sometimes can interfere with `smooth()`). 2058 2059 The isosurface values can be accessed with `mesh.metadata["isovalue"]`. 2060 2061 Examples: 2062 - [isosurfaces1.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces1.py) 2063 2064  2065 """ 2066 scrange = cls.dataset.GetScalarRange() 2067 2068 if flying_edges: 2069 cf = vtki.new("FlyingEdges3D") 2070 cf.InterpolateAttributesOn() 2071 else: 2072 cf = vtki.new("ContourFilter") 2073 cf.UseScalarTreeOn() 2074 2075 cf.SetInputData(cls.dataset) 2076 cf.ComputeNormalsOn() 2077 2078 if utils.is_sequence(value): 2079 cf.SetNumberOfContours(len(value)) 2080 for i, t in enumerate(value): 2081 cf.SetValue(i, t) 2082 else: 2083 if value is None: 2084 value = (2 * scrange[0] + scrange[1]) / 3.0 2085 # print("automatic isosurface value =", value) 2086 cf.SetValue(0, value) 2087 2088 cf.Update() 2089 poly = cf.GetOutput() 2090 2091 out = vedo.mesh.Mesh(poly, c=None).phong() 2092 out.mapper.SetScalarRange(scrange[0], scrange[1]) 2093 out.metadata["isovalue"] = value 2094 2095 out.pipeline = utils.OperationNode( 2096 "isosurface", 2097 parents=[cls], 2098 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2099 c="#4cc9f0:#e9c46a", 2100 ) 2101 return out
Return an Mesh
isosurface extracted from the Volume
object.
Set value
as single float or list of values to draw the isosurface(s).
Use flying_edges for faster results (but sometimes can interfere with smooth()
).
The isosurface values can be accessed with mesh.metadata["isovalue"]
.
Examples:
2103 def isosurface_discrete( 2104 cls, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0, 2105 ) -> "vedo.mesh.Mesh": 2106 """ 2107 Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded, 2108 3D version of the multiple objects/labels Surface Nets algorithm. 2109 The input is a 3D image (i.e., volume) where each voxel is labeled 2110 (integer labels are preferred to real values), and the output data is a polygonal mesh separating 2111 labeled regions / objects. 2112 (Note that on output each region [corresponding to a different segmented object] will share 2113 points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them). 2114 2115 Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating 2116 the labels on either side of the polygons composing the output Mesh. 2117 (This can be used for advanced operations like extracting shared/contacting boundaries between two objects. 2118 The name of this celldata array is "BoundaryLabels"). 2119 2120 The values can be accessed with `mesh.metadata["isovalue"]`. 2121 2122 Arguments: 2123 value : (float, list) 2124 single value or list of values to draw the isosurface(s). 2125 background_label : (float) 2126 this value specifies the label value to use when referencing the background 2127 region outside of any of the specified regions. 2128 boundaries : (bool, list) 2129 if True, the output will only contain the boundary surface. Internal surfaces will be removed. 2130 If a list of integers is provided, only the boundaries between the specified labels will be extracted. 2131 use_quads : (bool) 2132 if True, the output polygons will be quads. If False, the output polygons will be triangles. 2133 nsmooth : (int) 2134 number of iterations of smoothing (0 means no smoothing). 2135 2136 Examples: 2137 - [isosurfaces2.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces2.py) 2138 """ 2139 logger = vtki.get_class("Logger") 2140 logger.SetStderrVerbosity(logger.VERBOSITY_ERROR) 2141 2142 snets = vtki.new("SurfaceNets3D") 2143 snets.SetInputData(cls.dataset) 2144 2145 if nsmooth: 2146 snets.SmoothingOn() 2147 snets.AutomaticSmoothingConstraintsOn() 2148 snets.GetSmoother().SetNumberOfIterations(nsmooth) 2149 # snets.GetSmoother().SetRelaxationFactor(relaxation_factor) 2150 # snets.GetSmoother().SetConstraintDistance(constraint_distance) 2151 else: 2152 snets.SmoothingOff() 2153 2154 if internal_boundaries is False: 2155 snets.SetOutputStyleToBoundary() 2156 elif internal_boundaries is True: 2157 snets.SetOutputStyleToDefault() 2158 elif utils.is_sequence(internal_boundaries): 2159 snets.SetOutputStyleToSelected() 2160 snets.InitializeSelectedLabelsList() 2161 for val in internal_boundaries: 2162 snets.AddSelectedLabel(val) 2163 else: 2164 vedo.logger.error("isosurface_discrete(): unknown boundaries option") 2165 2166 n = len(values) 2167 snets.SetNumberOfContours(n) 2168 snets.SetNumberOfLabels(n) 2169 2170 if background_label is not None: 2171 snets.SetBackgroundLabel(background_label) 2172 2173 for i, val in enumerate(values): 2174 snets.SetValue(i, val) 2175 2176 if use_quads: 2177 snets.SetOutputMeshTypeToQuads() 2178 else: 2179 snets.SetOutputMeshTypeToTriangles() 2180 snets.Update() 2181 2182 out = vedo.mesh.Mesh(snets.GetOutput()) 2183 out.metadata["isovalue"] = values 2184 out.pipeline = utils.OperationNode( 2185 "isosurface_discrete", 2186 parents=[cls], 2187 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2188 c="#4cc9f0:#e9c46a", 2189 ) 2190 2191 logger.SetStderrVerbosity(logger.VERBOSITY_INFO) 2192 return out
Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded, 3D version of the multiple objects/labels Surface Nets algorithm. The input is a 3D image (i.e., volume) where each voxel is labeled (integer labels are preferred to real values), and the output data is a polygonal mesh separating labeled regions / objects. (Note that on output each region [corresponding to a different segmented object] will share points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them).
Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating the labels on either side of the polygons composing the output Mesh. (This can be used for advanced operations like extracting shared/contacting boundaries between two objects. The name of this celldata array is "BoundaryLabels").
The values can be accessed with mesh.metadata["isovalue"]
.
Arguments:
- value : (float, list) single value or list of values to draw the isosurface(s).
- background_label : (float) this value specifies the label value to use when referencing the background region outside of any of the specified regions.
- boundaries : (bool, list) if True, the output will only contain the boundary surface. Internal surfaces will be removed. If a list of integers is provided, only the boundaries between the specified labels will be extracted.
- use_quads : (bool) if True, the output polygons will be quads. If False, the output polygons will be triangles.
- nsmooth : (int) number of iterations of smoothing (0 means no smoothing).
Examples:
2195 def legosurface( 2196 cls, 2197 vmin=None, 2198 vmax=None, 2199 invert=False, 2200 boundary=True, 2201 array_name="input_scalars", 2202 ) -> "vedo.mesh.Mesh": 2203 """ 2204 Represent an object - typically a `Volume` - as lego blocks (voxels). 2205 By default colors correspond to the volume's scalar. 2206 Returns an `Mesh` object. 2207 2208 Arguments: 2209 vmin : (float) 2210 the lower threshold, voxels below this value are not shown. 2211 vmax : (float) 2212 the upper threshold, voxels above this value are not shown. 2213 boundary : (bool) 2214 controls whether to include cells that are partially inside 2215 array_name : (int, str) 2216 name or index of the scalar array to be considered 2217 2218 Examples: 2219 - [legosurface.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/legosurface.py) 2220 2221  2222 """ 2223 imp_dataset = vtki.new("ImplicitDataSet") 2224 imp_dataset.SetDataSet(cls.dataset) 2225 window = vtki.new("ImplicitWindowFunction") 2226 window.SetImplicitFunction(imp_dataset) 2227 2228 srng = list(cls.dataset.GetScalarRange()) 2229 if vmin is not None: 2230 srng[0] = vmin 2231 if vmax is not None: 2232 srng[1] = vmax 2233 if not boundary: 2234 tol = 0.00001 * (srng[1] - srng[0]) 2235 srng[0] -= tol 2236 srng[1] += tol 2237 window.SetWindowRange(srng) 2238 # print("legosurface window range:", srng) 2239 2240 extract = vtki.new("ExtractGeometry") 2241 extract.SetInputData(cls.dataset) 2242 extract.SetImplicitFunction(window) 2243 extract.SetExtractInside(invert) 2244 extract.SetExtractBoundaryCells(boundary) 2245 extract.Update() 2246 2247 gf = vtki.new("GeometryFilter") 2248 gf.SetInputData(extract.GetOutput()) 2249 gf.Update() 2250 2251 m = vedo.mesh.Mesh(gf.GetOutput()).lw(0.1).flat() 2252 m.map_points_to_cells() 2253 m.celldata.select(array_name) 2254 2255 m.pipeline = utils.OperationNode( 2256 "legosurface", 2257 parents=[cls], 2258 comment=f"array: {array_name}", 2259 c="#4cc9f0:#e9c46a", 2260 ) 2261 return m
Represent an object - typically a Volume
- as lego blocks (voxels).
By default colors correspond to the volume's scalar.
Returns an Mesh
object.
Arguments:
- vmin : (float) the lower threshold, voxels below this value are not shown.
- vmax : (float) the upper threshold, voxels above this value are not shown.
- boundary : (bool) controls whether to include cells that are partially inside
- array_name : (int, str) name or index of the scalar array to be considered
Examples:
2263 def tomesh(cls, fill=True, shrink=1.0) -> "vedo.mesh.Mesh": 2264 """ 2265 Build a polygonal Mesh from the current object. 2266 2267 If `fill=True`, the interior faces of all the cells are created. 2268 (setting a `shrink` value slightly smaller than the default 1.0 2269 can avoid flickering due to internal adjacent faces). 2270 2271 If `fill=False`, only the boundary faces will be generated. 2272 """ 2273 gf = vtki.new("GeometryFilter") 2274 if fill: 2275 sf = vtki.new("ShrinkFilter") 2276 sf.SetInputData(cls.dataset) 2277 sf.SetShrinkFactor(shrink) 2278 sf.Update() 2279 gf.SetInputData(sf.GetOutput()) 2280 gf.Update() 2281 poly = gf.GetOutput() 2282 if shrink == 1.0: 2283 clean_poly = vtki.new("CleanPolyData") 2284 clean_poly.PointMergingOn() 2285 clean_poly.ConvertLinesToPointsOn() 2286 clean_poly.ConvertPolysToLinesOn() 2287 clean_poly.ConvertStripsToPolysOn() 2288 clean_poly.SetInputData(poly) 2289 clean_poly.Update() 2290 poly = clean_poly.GetOutput() 2291 else: 2292 gf.SetInputData(cls.dataset) 2293 gf.Update() 2294 poly = gf.GetOutput() 2295 2296 msh = vedo.mesh.Mesh(poly).flat() 2297 msh.scalarbar = cls.scalarbar 2298 lut = utils.ctf2lut(cls) 2299 if lut: 2300 msh.mapper.SetLookupTable(lut) 2301 2302 msh.pipeline = utils.OperationNode( 2303 "tomesh", parents=[cls], comment=f"fill={fill}", c="#9e2a2b:#e9c46a" 2304 ) 2305 return msh
Build a polygonal Mesh from the current object.
If fill=True
, the interior faces of all the cells are created.
(setting a shrink
value slightly smaller than the default 1.0
can avoid flickering due to internal adjacent faces).
If fill=False
, only the boundary faces will be generated.
Inherited Members
- CommonAlgorithms
- pointdata
- celldata
- metadata
- rename
- memory_address
- memory_size
- modified
- box
- update_dataset
- xbounds
- ybounds
- zbounds
- diagonal_size
- average_size
- center_of_mass
- copy_data_from
- inputdata
- npoints
- nvertices
- ncells
- cell_centers
- lines
- lines_as_flat_array
- mark_boundaries
- find_cells_in_bounds
- find_cells_along_line
- find_cells_along_plane
- keep_cell_types
- map_cells_to_points
- vertices
- points
- coordinates
- cells_as_flat_array
- cells
- cell_edge_neighbors
- map_points_to_cells
- resample_data_from
- interpolate_data_from
- add_ids
- gradient
- divergence
- vorticity
- probe
- compute_cell_size
- generate_random_data
- integrate_data
- write
- signed_distance
- unsigned_distance
- smooth_data
- compute_streamlines