vedo.core
Base classes providing functionality to different vedo objects.
1#!/usr/bin/env python3 2# -*- coding: utf-8 -*- 3import numpy as np 4from typing import List, Union, Any 5from typing_extensions import Self 6 7import vedo.vtkclasses as vtki 8 9import vedo 10from vedo import colors 11from vedo import utils 12from vedo.transformations import LinearTransform, NonLinearTransform 13 14 15__docformat__ = "google" 16 17__doc__ = """ 18Base classes providing functionality to different vedo objects. 19 20![](https://vedo.embl.es/images/feats/algorithms_illustration.png) 21""" 22 23__all__ = [ 24 "DataArrayHelper", 25 "CommonAlgorithms", 26 "PointAlgorithms", 27 "VolumeAlgorithms", 28] 29 30warnings = dict( 31 points_getter=( 32 "WARNING: points() is deprecated, use vertices instead. Change:\n" 33 " mesh.points() -> mesh.vertices\n" 34 " (silence this with vedo.core.warnings['points_getter']=False)" 35 ), 36 points_setter=( 37 "WARNING: points() is deprecated, use vertices instead. Change:\n" 38 " mesh.points([[x,y,z], ...]) -> mesh.vertices = [[x,y,z], ...]\n" 39 " (silence this with vedo.core.warnings['points_getter']=False)" 40 ), 41) 42 43############################################################################### 44class DataArrayHelper: 45 # Internal use only. 46 # Helper class to manage data associated to either 47 # points (or vertices) and cells (or faces). 48 def __init__(self, obj, association): 49 50 self.obj = obj 51 self.association = association 52 53 def __getitem__(self, key): 54 55 if self.association == 0: 56 data = self.obj.dataset.GetPointData() 57 58 elif self.association == 1: 59 data = self.obj.dataset.GetCellData() 60 61 elif self.association == 2: 62 data = self.obj.dataset.GetFieldData() 63 64 varr = data.GetAbstractArray(key) 65 if isinstance(varr, vtki.vtkStringArray): 66 if isinstance(key, int): 67 key = data.GetArrayName(key) 68 n = varr.GetNumberOfValues() 69 narr = [varr.GetValue(i) for i in range(n)] 70 return narr 71 ########### 72 73 else: 74 raise RuntimeError() 75 76 if isinstance(key, int): 77 key = data.GetArrayName(key) 78 79 arr = data.GetArray(key) 80 if not arr: 81 return None 82 return utils.vtk2numpy(arr) 83 84 def __setitem__(self, key, input_array): 85 86 if self.association == 0: 87 data = self.obj.dataset.GetPointData() 88 n = self.obj.dataset.GetNumberOfPoints() 89 self.obj.mapper.SetScalarModeToUsePointData() 90 91 elif self.association == 1: 92 data = self.obj.dataset.GetCellData() 93 n = self.obj.dataset.GetNumberOfCells() 94 self.obj.mapper.SetScalarModeToUseCellData() 95 96 elif self.association == 2: 97 data = self.obj.dataset.GetFieldData() 98 if not utils.is_sequence(input_array): 99 input_array = [input_array] 100 101 if isinstance(input_array[0], str): 102 varr = vtki.vtkStringArray() 103 varr.SetName(key) 104 varr.SetNumberOfComponents(1) 105 varr.SetNumberOfTuples(len(input_array)) 106 for i, iarr in enumerate(input_array): 107 if isinstance(iarr, np.ndarray): 108 iarr = iarr.tolist() # better format 109 # Note: a string k can be converted to numpy with 110 # import json; k = np.array(json.loads(k)) 111 varr.InsertValue(i, str(iarr)) 112 else: 113 try: 114 varr = utils.numpy2vtk(input_array, name=key) 115 except TypeError as e: 116 vedo.logger.error( 117 f"cannot create metadata with input object:\n" 118 f"{input_array}" 119 f"\n\nAllowed content examples are:\n" 120 f"- flat list of strings ['a','b', 1, [1,2,3], ...]" 121 f" (first item must be a string in this case)\n" 122 f" hint: use k = np.array(json.loads(k)) to convert strings\n" 123 f"- numpy arrays of any shape" 124 ) 125 raise e 126 127 data.AddArray(varr) 128 return ############ 129 130 else: 131 raise RuntimeError() 132 133 if len(input_array) != n: 134 vedo.logger.error( 135 f"Error in point/cell data: length of input {len(input_array)}" 136 f" != {n} nr. of elements" 137 ) 138 raise RuntimeError() 139 140 input_array = np.asarray(input_array) 141 varr = utils.numpy2vtk(input_array, name=key) 142 data.AddArray(varr) 143 144 if len(input_array.shape) == 1: # scalars 145 data.SetActiveScalars(key) 146 try: # could be a volume mapper 147 self.obj.mapper.SetScalarRange(data.GetScalars().GetRange()) 148 except AttributeError: 149 pass 150 elif len(input_array.shape) == 2 and input_array.shape[1] == 3: # vectors 151 if key.lower() == "normals": 152 data.SetActiveNormals(key) 153 else: 154 data.SetActiveVectors(key) 155 156 def keys(self) -> List[str]: 157 """Return the list of available data array names""" 158 if self.association == 0: 159 data = self.obj.dataset.GetPointData() 160 elif self.association == 1: 161 data = self.obj.dataset.GetCellData() 162 elif self.association == 2: 163 data = self.obj.dataset.GetFieldData() 164 arrnames = [] 165 for i in range(data.GetNumberOfArrays()): 166 name = "" 167 if self.association == 2: 168 name = data.GetAbstractArray(i).GetName() 169 else: 170 iarr = data.GetArray(i) 171 if iarr: 172 name = iarr.GetName() 173 if name: 174 arrnames.append(name) 175 return arrnames 176 177 def items(self) -> List: 178 """Return the list of available data array `(names, values)`.""" 179 if self.association == 0: 180 data = self.obj.dataset.GetPointData() 181 elif self.association == 1: 182 data = self.obj.dataset.GetCellData() 183 elif self.association == 2: 184 data = self.obj.dataset.GetFieldData() 185 arrnames = [] 186 for i in range(data.GetNumberOfArrays()): 187 if self.association == 2: 188 name = data.GetAbstractArray(i).GetName() 189 else: 190 name = data.GetArray(i).GetName() 191 if name: 192 arrnames.append((name, self[name])) 193 return arrnames 194 195 def todict(self) -> dict: 196 """Return a dictionary of the available data arrays.""" 197 return dict(self.items()) 198 199 def rename(self, oldname: str, newname: str) -> None: 200 """Rename an array""" 201 if self.association == 0: 202 varr = self.obj.dataset.GetPointData().GetArray(oldname) 203 elif self.association == 1: 204 varr = self.obj.dataset.GetCellData().GetArray(oldname) 205 elif self.association == 2: 206 varr = self.obj.dataset.GetFieldData().GetAbstractArray(oldname) 207 if varr: 208 varr.SetName(newname) 209 else: 210 vedo.logger.warning( 211 f"Cannot rename non existing array {oldname} to {newname}" 212 ) 213 214 def remove(self, key: Union[int, str]) -> None: 215 """Remove a data array by name or number""" 216 if self.association == 0: 217 self.obj.dataset.GetPointData().RemoveArray(key) 218 elif self.association == 1: 219 self.obj.dataset.GetCellData().RemoveArray(key) 220 elif self.association == 2: 221 self.obj.dataset.GetFieldData().RemoveArray(key) 222 223 def clear(self) -> None: 224 """Remove all data associated to this object""" 225 if self.association == 0: 226 data = self.obj.dataset.GetPointData() 227 elif self.association == 1: 228 data = self.obj.dataset.GetCellData() 229 elif self.association == 2: 230 data = self.obj.dataset.GetFieldData() 231 for i in range(data.GetNumberOfArrays()): 232 if self.association == 2: 233 name = data.GetAbstractArray(i).GetName() 234 else: 235 name = data.GetArray(i).GetName() 236 data.RemoveArray(name) 237 238 def select(self, key: Union[int, str]) -> Any: 239 """Select one specific array by its name to make it the `active` one.""" 240 # Default (ColorModeToDefault): unsigned char scalars are treated as colors, 241 # and NOT mapped through the lookup table, while everything else is. 242 # ColorModeToDirectScalar extends ColorModeToDefault such that all integer 243 # types are treated as colors with values in the range 0-255 244 # and floating types are treated as colors with values in the range 0.0-1.0. 245 # Setting ColorModeToMapScalars means that all scalar data will be mapped 246 # through the lookup table. 247 # (Note that for multi-component scalars, the particular component 248 # to use for mapping can be specified using the SelectColorArray() method.) 249 if self.association == 0: 250 data = self.obj.dataset.GetPointData() 251 self.obj.mapper.SetScalarModeToUsePointData() 252 else: 253 data = self.obj.dataset.GetCellData() 254 self.obj.mapper.SetScalarModeToUseCellData() 255 256 if isinstance(key, int): 257 key = data.GetArrayName(key) 258 259 arr = data.GetArray(key) 260 if not arr: 261 return self.obj 262 263 nc = arr.GetNumberOfComponents() 264 # print("GetNumberOfComponents", nc) 265 if nc == 1: 266 data.SetActiveScalars(key) 267 elif nc == 2: 268 data.SetTCoords(arr) 269 elif nc in (3, 4): 270 if "rgb" in key.lower(): # type: ignore 271 data.SetActiveScalars(key) 272 try: 273 # could be a volume mapper 274 self.obj.mapper.SetColorModeToDirectScalars() 275 data.SetActiveVectors(None) # need this to fix bug in #1066 276 # print("SetColorModeToDirectScalars for", key) 277 except AttributeError: 278 pass 279 else: 280 data.SetActiveVectors(key) 281 elif nc == 9: 282 data.SetActiveTensors(key) 283 else: 284 vedo.logger.error(f"Cannot select array {key} with {nc} components") 285 return self.obj 286 287 try: 288 # could be a volume mapper 289 self.obj.mapper.SetArrayName(key) 290 self.obj.mapper.ScalarVisibilityOn() 291 except AttributeError: 292 pass 293 294 return self.obj 295 296 def select_texture_coords(self, key: Union[int,str]) -> Any: 297 """Select one specific array to be used as texture coordinates.""" 298 if self.association == 0: 299 data = self.obj.dataset.GetPointData() 300 else: 301 vedo.logger.warning("texture coordinates are only available for point data") 302 return 303 304 if isinstance(key, int): 305 key = data.GetArrayName(key) 306 data.SetTCoords(data.GetArray(key)) 307 return self.obj 308 309 def select_normals(self, key: Union[int,str]) -> Any: 310 """Select one specific normal array by its name to make it the "active" one.""" 311 if self.association == 0: 312 data = self.obj.dataset.GetPointData() 313 self.obj.mapper.SetScalarModeToUsePointData() 314 else: 315 data = self.obj.dataset.GetCellData() 316 self.obj.mapper.SetScalarModeToUseCellData() 317 318 if isinstance(key, int): 319 key = data.GetArrayName(key) 320 data.SetActiveNormals(key) 321 return self.obj 322 323 def print(self, **kwargs) -> None: 324 """Print the array names available to terminal""" 325 colors.printc(self.keys(), **kwargs) 326 327 def __repr__(self) -> str: 328 """Representation""" 329 330 def _get_str(pd, header): 331 out = f"\x1b[2m\x1b[1m\x1b[7m{header}" 332 if pd.GetNumberOfArrays(): 333 if self.obj.name: 334 out += f" in {self.obj.name}" 335 out += f" contains {pd.GetNumberOfArrays()} array(s)\x1b[0m" 336 for i in range(pd.GetNumberOfArrays()): 337 varr = pd.GetArray(i) 338 out += f"\n\x1b[1m\x1b[4mArray name : {varr.GetName()}\x1b[0m" 339 out += "\nindex".ljust(15) + f": {i}" 340 t = varr.GetDataType() 341 if t in vtki.array_types: 342 out += "\ntype".ljust(15) 343 out += f": {vtki.array_types[t]}" 344 shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents()) 345 out += "\nshape".ljust(15) + f": {shape}" 346 out += "\nrange".ljust(15) + f": {np.array(varr.GetRange())}" 347 out += "\nmax id".ljust(15) + f": {varr.GetMaxId()}" 348 out += "\nlook up table".ljust(15) + f": {bool(varr.GetLookupTable())}" 349 out += "\nin-memory size".ljust(15) + f": {varr.GetActualMemorySize()} KB" 350 else: 351 out += " is empty.\x1b[0m" 352 return out 353 354 if self.association == 0: 355 out = _get_str(self.obj.dataset.GetPointData(), "Point Data") 356 elif self.association == 1: 357 out = _get_str(self.obj.dataset.GetCellData(), "Cell Data") 358 elif self.association == 2: 359 pd = self.obj.dataset.GetFieldData() 360 if pd.GetNumberOfArrays(): 361 out = "\x1b[2m\x1b[1m\x1b[7mMeta Data" 362 if self.obj.name: 363 out += f" in {self.obj.name}" 364 out += f" contains {pd.GetNumberOfArrays()} entries\x1b[0m" 365 for i in range(pd.GetNumberOfArrays()): 366 varr = pd.GetAbstractArray(i) 367 out += f"\n\x1b[1m\x1b[4mEntry name : {varr.GetName()}\x1b[0m" 368 out += "\nindex".ljust(15) + f": {i}" 369 shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents()) 370 out += "\nshape".ljust(15) + f": {shape}" 371 372 return out 373 374 375############################################################################### 376class CommonAlgorithms: 377 """Common algorithms.""" 378 379 @property 380 def pointdata(self): 381 """ 382 Create and/or return a `numpy.array` associated to points (vertices). 383 A data array can be indexed either as a string or by an integer number. 384 E.g.: `myobj.pointdata["arrayname"]` 385 386 Usage: 387 388 `myobj.pointdata.keys()` to return the available data array names 389 390 `myobj.pointdata.select(name)` to make this array the active one 391 392 `myobj.pointdata.remove(name)` to remove this array 393 """ 394 return DataArrayHelper(self, 0) 395 396 @property 397 def celldata(self): 398 """ 399 Create and/or return a `numpy.array` associated to cells (faces). 400 A data array can be indexed either as a string or by an integer number. 401 E.g.: `myobj.celldata["arrayname"]` 402 403 Usage: 404 405 `myobj.celldata.keys()` to return the available data array names 406 407 `myobj.celldata.select(name)` to make this array the active one 408 409 `myobj.celldata.remove(name)` to remove this array 410 """ 411 return DataArrayHelper(self, 1) 412 413 @property 414 def metadata(self): 415 """ 416 Create and/or return a `numpy.array` associated to neither cells nor faces. 417 A data array can be indexed either as a string or by an integer number. 418 E.g.: `myobj.metadata["arrayname"]` 419 420 Usage: 421 422 `myobj.metadata.keys()` to return the available data array names 423 424 `myobj.metadata.select(name)` to make this array the active one 425 426 `myobj.metadata.remove(name)` to remove this array 427 """ 428 return DataArrayHelper(self, 2) 429 430 def memory_address(self) -> int: 431 """ 432 Return a unique memory address integer which may serve as the ID of the 433 object, or passed to c++ code. 434 """ 435 # https://www.linkedin.com/pulse/speedup-your-code-accessing-python-vtk-objects-from-c-pletzer/ 436 # https://github.com/tfmoraes/polydata_connectivity 437 return int(self.dataset.GetAddressAsString("")[5:], 16) 438 439 def memory_size(self) -> int: 440 """Return the size in bytes of the object in memory.""" 441 return self.dataset.GetActualMemorySize() 442 443 def modified(self) -> Self: 444 """Use in conjunction with `tonumpy()` to update any modifications to the image array.""" 445 self.dataset.GetPointData().Modified() 446 scals = self.dataset.GetPointData().GetScalars() 447 if scals: 448 scals.Modified() 449 return self 450 451 def box(self, scale=1, padding=0) -> "vedo.Mesh": 452 """ 453 Return the bounding box as a new `Mesh` object. 454 455 Arguments: 456 scale : (float) 457 box size can be scaled by a factor 458 padding : (float, list) 459 a constant padding can be added (can be a list `[padx,pady,padz]`) 460 """ 461 b = self.bounds() 462 if not utils.is_sequence(padding): 463 padding = [padding, padding, padding] 464 length, width, height = b[1] - b[0], b[3] - b[2], b[5] - b[4] 465 tol = (length + width + height) / 30000 # useful for boxing text 466 pos = [(b[0] + b[1]) / 2, (b[3] + b[2]) / 2, (b[5] + b[4]) / 2 - tol] 467 bx = vedo.shapes.Box( 468 pos, 469 length * scale + padding[0], 470 width * scale + padding[1], 471 height * scale + padding[2], 472 c="gray", 473 ) 474 try: 475 pr = vtki.vtkProperty() 476 pr.DeepCopy(self.properties) 477 bx.actor.SetProperty(pr) 478 bx.properties = pr 479 except (AttributeError, TypeError): 480 pass 481 bx.flat().lighting("off").wireframe(True) 482 return bx 483 484 def update_dataset(self, dataset, **kwargs) -> Self: 485 """Update the dataset of the object with the provided VTK dataset.""" 486 self._update(dataset, **kwargs) 487 return self 488 489 def bounds(self) -> np.ndarray: 490 """ 491 Get the object bounds. 492 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 493 """ 494 try: # this is very slow for large meshes 495 pts = self.vertices 496 xmin, ymin, zmin = np.min(pts, axis=0) 497 xmax, ymax, zmax = np.max(pts, axis=0) 498 return np.array([xmin, xmax, ymin, ymax, zmin, zmax]) 499 except (AttributeError, ValueError): 500 return np.array(self.dataset.GetBounds()) 501 502 def xbounds(self, i=None) -> np.ndarray: 503 """Get the bounds `[xmin,xmax]`. Can specify upper or lower with i (0,1).""" 504 b = self.bounds() 505 if i is not None: 506 return b[i] 507 return np.array([b[0], b[1]]) 508 509 def ybounds(self, i=None) -> np.ndarray: 510 """Get the bounds `[ymin,ymax]`. Can specify upper or lower with i (0,1).""" 511 b = self.bounds() 512 if i == 0: 513 return b[2] 514 if i == 1: 515 return b[3] 516 return np.array([b[2], b[3]]) 517 518 def zbounds(self, i=None) -> np.ndarray: 519 """Get the bounds `[zmin,zmax]`. Can specify upper or lower with i (0,1).""" 520 b = self.bounds() 521 if i == 0: 522 return b[4] 523 if i == 1: 524 return b[5] 525 return np.array([b[4], b[5]]) 526 527 def diagonal_size(self) -> float: 528 """Get the length of the diagonal of the bounding box.""" 529 b = self.bounds() 530 return np.sqrt((b[1] - b[0])**2 + (b[3] - b[2])**2 + (b[5] - b[4])**2) 531 532 def average_size(self) -> float: 533 """ 534 Calculate and return the average size of the object. 535 This is the mean of the vertex distances from the center of mass. 536 """ 537 coords = self.vertices 538 cm = np.mean(coords, axis=0) 539 if coords.shape[0] == 0: 540 return 0.0 541 cc = coords - cm 542 return np.mean(np.linalg.norm(cc, axis=1)) 543 544 def center_of_mass(self) -> np.ndarray: 545 """Get the center of mass of the object.""" 546 if isinstance(self, (vedo.RectilinearGrid, vedo.Volume)): 547 return np.array(self.dataset.GetCenter()) 548 cmf = vtki.new("CenterOfMass") 549 cmf.SetInputData(self.dataset) 550 cmf.Update() 551 c = cmf.GetCenter() 552 return np.array(c) 553 554 def copy_data_from(self, obj: Any) -> Self: 555 """Copy all data (point and cell data) from this input object""" 556 self.dataset.GetPointData().PassData(obj.dataset.GetPointData()) 557 self.dataset.GetCellData().PassData(obj.dataset.GetCellData()) 558 self.pipeline = utils.OperationNode( 559 "copy_data_from", 560 parents=[self, obj], 561 comment=f"{obj.__class__.__name__}", 562 shape="note", 563 c="#ccc5b9", 564 ) 565 return self 566 567 def inputdata(self): 568 """Obsolete, use `.dataset` instead.""" 569 colors.printc("WARNING: 'inputdata()' is obsolete, use '.dataset' instead.", c="y") 570 return self.dataset 571 572 @property 573 def npoints(self): 574 """Retrieve the number of points (or vertices).""" 575 return self.dataset.GetNumberOfPoints() 576 577 @property 578 def nvertices(self): 579 """Retrieve the number of vertices (or points).""" 580 return self.dataset.GetNumberOfPoints() 581 582 @property 583 def ncells(self): 584 """Retrieve the number of cells.""" 585 return self.dataset.GetNumberOfCells() 586 587 def points(self, pts=None): 588 """Obsolete, use `self.vertices` or `self.coordinates` instead.""" 589 if pts is None: ### getter 590 591 if warnings["points_getter"]: 592 colors.printc(warnings["points_getter"], c="y") 593 warnings["points_getter"] = "" 594 return self.vertices 595 596 else: ### setter 597 598 if warnings["points_setter"]: 599 colors.printc(warnings["points_setter"], c="y") 600 warnings["points_setter"] = "" 601 602 pts = np.asarray(pts, dtype=np.float32) 603 604 if pts.ndim == 1: 605 ### getter by point index ################### 606 indices = pts.astype(int) 607 vpts = self.dataset.GetPoints() 608 arr = utils.vtk2numpy(vpts.GetData()) 609 return arr[indices] ########### 610 611 ### setter #################################### 612 if pts.shape[1] == 2: 613 pts = np.c_[pts, np.zeros(pts.shape[0], dtype=np.float32)] 614 arr = utils.numpy2vtk(pts, dtype=np.float32) 615 616 vpts = self.dataset.GetPoints() 617 vpts.SetData(arr) 618 vpts.Modified() 619 # reset mesh to identity matrix position/rotation: 620 self.point_locator = None 621 self.cell_locator = None 622 self.line_locator = None 623 self.transform = LinearTransform() 624 return self 625 626 @property 627 def cell_centers(self): 628 """ 629 Get the coordinates of the cell centers. 630 631 Examples: 632 - [delaunay2d.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/delaunay2d.py) 633 634 See also: `CellCenters()`. 635 """ 636 vcen = vtki.new("CellCenters") 637 vcen.CopyArraysOff() 638 vcen.SetInputData(self.dataset) 639 vcen.Update() 640 return utils.vtk2numpy(vcen.GetOutput().GetPoints().GetData()) 641 642 @property 643 def lines(self): 644 """ 645 Get lines connectivity ids as a python array 646 formatted as `[[id0,id1], [id3,id4], ...]` 647 648 See also: `lines_as_flat_array()`. 649 """ 650 # Get cell connettivity ids as a 1D array. The vtk format is: 651 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 652 try: 653 arr1d = utils.vtk2numpy(self.dataset.GetLines().GetData()) 654 except AttributeError: 655 return np.array([]) 656 i = 0 657 conn = [] 658 n = len(arr1d) 659 for _ in range(n): 660 cell = [arr1d[i + k + 1] for k in range(arr1d[i])] 661 conn.append(cell) 662 i += arr1d[i] + 1 663 if i >= n: 664 break 665 666 return conn # cannot always make a numpy array of it! 667 668 @property 669 def lines_as_flat_array(self): 670 """ 671 Get lines connectivity ids as a 1D numpy array. 672 Format is e.g. [2, 10,20, 3, 10,11,12, 2, 70,80, ...] 673 674 See also: `lines()`. 675 """ 676 try: 677 return utils.vtk2numpy(self.dataset.GetLines().GetData()) 678 except AttributeError: 679 return np.array([]) 680 681 def mark_boundaries(self) -> Self: 682 """ 683 Mark cells and vertices if they lie on a boundary. 684 A new array called `BoundaryCells` is added to the object. 685 """ 686 mb = vtki.new("MarkBoundaryFilter") 687 mb.SetInputData(self.dataset) 688 mb.Update() 689 self.dataset.DeepCopy(mb.GetOutput()) 690 self.pipeline = utils.OperationNode("mark_boundaries", parents=[self]) 691 return self 692 693 def find_cells_in_bounds(self, xbounds=(), ybounds=(), zbounds=()) -> np.ndarray: 694 """ 695 Find cells that are within the specified bounds. 696 """ 697 try: 698 xbounds = list(xbounds.bounds()) 699 except AttributeError: 700 pass 701 702 if len(xbounds) == 6: 703 bnds = xbounds 704 else: 705 bnds = list(self.bounds()) 706 if len(xbounds) == 2: 707 bnds[0] = xbounds[0] 708 bnds[1] = xbounds[1] 709 if len(ybounds) == 2: 710 bnds[2] = ybounds[0] 711 bnds[3] = ybounds[1] 712 if len(zbounds) == 2: 713 bnds[4] = zbounds[0] 714 bnds[5] = zbounds[1] 715 716 cell_ids = vtki.vtkIdList() 717 if not self.cell_locator: 718 self.cell_locator = vtki.new("CellTreeLocator") 719 self.cell_locator.SetDataSet(self.dataset) 720 self.cell_locator.BuildLocator() 721 self.cell_locator.FindCellsWithinBounds(bnds, cell_ids) 722 cids = [] 723 for i in range(cell_ids.GetNumberOfIds()): 724 cid = cell_ids.GetId(i) 725 cids.append(cid) 726 return np.array(cids) 727 728 def find_cells_along_line(self, p0, p1, tol=0.001) -> np.ndarray: 729 """ 730 Find cells that are intersected by a line segment. 731 """ 732 cell_ids = vtki.vtkIdList() 733 if not self.cell_locator: 734 self.cell_locator = vtki.new("CellTreeLocator") 735 self.cell_locator.SetDataSet(self.dataset) 736 self.cell_locator.BuildLocator() 737 self.cell_locator.FindCellsAlongLine(p0, p1, tol, cell_ids) 738 cids = [] 739 for i in range(cell_ids.GetNumberOfIds()): 740 cid = cell_ids.GetId(i) 741 cids.append(cid) 742 return np.array(cids) 743 744 def find_cells_along_plane(self, origin, normal, tol=0.001) -> np.ndarray: 745 """ 746 Find cells that are intersected by a plane. 747 """ 748 cell_ids = vtki.vtkIdList() 749 if not self.cell_locator: 750 self.cell_locator = vtki.new("CellTreeLocator") 751 self.cell_locator.SetDataSet(self.dataset) 752 self.cell_locator.BuildLocator() 753 self.cell_locator.FindCellsAlongPlane(origin, normal, tol, cell_ids) 754 cids = [] 755 for i in range(cell_ids.GetNumberOfIds()): 756 cid = cell_ids.GetId(i) 757 cids.append(cid) 758 return np.array(cids) 759 760 def keep_cell_types(self, types=()): 761 """ 762 Extract cells of a specific type. 763 764 Check the VTK cell types here: 765 https://vtk.org/doc/nightly/html/vtkCellType_8h.html 766 """ 767 fe = vtki.new("ExtractCellsByType") 768 fe.SetInputData(self.dataset) 769 for t in types: 770 try: 771 if utils.is_integer(t): 772 it = t 773 else: 774 it = vtki.cell_types[t.upper()] 775 except KeyError: 776 vedo.logger.error(f"Cell type '{t}' not recognized") 777 continue 778 fe.AddCellType(it) 779 fe.Update() 780 self._update(fe.GetOutput()) 781 return self 782 783 def map_cells_to_points(self, arrays=(), move=False) -> Self: 784 """ 785 Interpolate cell data (i.e., data specified per cell or face) 786 into point data (i.e., data specified at each vertex). 787 The method of transformation is based on averaging the data values 788 of all cells using a particular point. 789 790 A custom list of arrays to be mapped can be passed in input. 791 792 Set `move=True` to delete the original `celldata` array. 793 """ 794 c2p = vtki.new("CellDataToPointData") 795 c2p.SetInputData(self.dataset) 796 if not move: 797 c2p.PassCellDataOn() 798 if arrays: 799 c2p.ClearCellDataArrays() 800 c2p.ProcessAllArraysOff() 801 for arr in arrays: 802 c2p.AddCellDataArray(arr) 803 else: 804 c2p.ProcessAllArraysOn() 805 c2p.Update() 806 self._update(c2p.GetOutput(), reset_locators=False) 807 self.mapper.SetScalarModeToUsePointData() 808 self.pipeline = utils.OperationNode("map_cells_to_points", parents=[self]) 809 return self 810 811 @property 812 def vertices(self): 813 """Return the vertices (points) coordinates.""" 814 try: 815 # for polydata and unstructured grid 816 varr = self.dataset.GetPoints().GetData() 817 except (AttributeError, TypeError): 818 try: 819 # for RectilinearGrid, StructuredGrid 820 vpts = vtki.vtkPoints() 821 self.dataset.GetPoints(vpts) 822 varr = vpts.GetData() 823 except (AttributeError, TypeError): 824 try: 825 # for ImageData 826 v2p = vtki.new("ImageToPoints") 827 v2p.SetInputData(self.dataset) 828 v2p.Update() 829 varr = v2p.GetOutput().GetPoints().GetData() 830 except AttributeError: 831 return np.array([]) 832 833 return utils.vtk2numpy(varr) 834 835 # setter 836 @vertices.setter 837 def vertices(self, pts): 838 """Set vertices (points) coordinates.""" 839 pts = utils.make3d(pts) 840 arr = utils.numpy2vtk(pts, dtype=np.float32) 841 try: 842 vpts = self.dataset.GetPoints() 843 vpts.SetData(arr) 844 vpts.Modified() 845 except (AttributeError, TypeError): 846 vedo.logger.error(f"Cannot set vertices for {type(self)}") 847 return self 848 # reset mesh to identity matrix position/rotation: 849 self.point_locator = None 850 self.cell_locator = None 851 self.line_locator = None 852 self.transform = LinearTransform() 853 854 @property 855 def coordinates(self): 856 """Return the vertices (points) coordinates. Same as `vertices`.""" 857 return self.vertices 858 859 @coordinates.setter 860 def coordinates(self, pts): 861 """Set vertices (points) coordinates. Same as `vertices`.""" 862 self.vertices = pts 863 864 @property 865 def cells_as_flat_array(self): 866 """ 867 Get cell connectivity ids as a 1D numpy array. 868 Format is e.g. [3, 10,20,30 4, 10,11,12,13 ...] 869 """ 870 try: 871 # valid for unstructured grid 872 arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData()) 873 except AttributeError: 874 # valid for polydata 875 arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData()) 876 return arr1d 877 878 @property 879 def cells(self): 880 """ 881 Get the cells connectivity ids as a numpy array. 882 883 The output format is: `[[id0 ... idn], [id0 ... idm], etc]`. 884 """ 885 try: 886 # valid for unstructured grid 887 arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData()) 888 except AttributeError: 889 try: 890 # valid for polydata 891 arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData()) 892 except AttributeError: 893 vedo.logger.warning(f"Cannot get cells for {type(self)}") 894 return np.array([]) 895 896 # Get cell connettivity ids as a 1D array. vtk format is: 897 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 898 i = 0 899 conn = [] 900 n = len(arr1d) 901 if n: 902 while True: 903 cell = [arr1d[i + k] for k in range(1, arr1d[i] + 1)] 904 conn.append(cell) 905 i += arr1d[i] + 1 906 if i >= n: 907 break 908 return conn 909 910 def cell_edge_neighbors(self): 911 """ 912 Get the cell neighbor indices of each cell. 913 914 Returns a python list of lists. 915 """ 916 917 def face_to_edges(face): 918 edges = [] 919 size = len(face) 920 for i in range(1, size + 1): 921 if i == size: 922 edges.append([face[i - 1], face[0]]) 923 else: 924 edges.append([face[i - 1], face[i]]) 925 return edges 926 927 pd = self.dataset 928 pd.BuildLinks() 929 930 neicells = [] 931 for i, cell in enumerate(self.cells): 932 nn = [] 933 for edge in face_to_edges(cell): 934 neighbors = vtki.vtkIdList() 935 pd.GetCellEdgeNeighbors(i, edge[0], edge[1], neighbors) 936 if neighbors.GetNumberOfIds() > 0: 937 neighbor = neighbors.GetId(0) 938 nn.append(neighbor) 939 neicells.append(nn) 940 941 return neicells 942 943 944 def map_points_to_cells(self, arrays=(), move=False) -> Self: 945 """ 946 Interpolate point data (i.e., data specified per point or vertex) 947 into cell data (i.e., data specified per cell). 948 The method of transformation is based on averaging the data values 949 of all points defining a particular cell. 950 951 A custom list of arrays to be mapped can be passed in input. 952 953 Set `move=True` to delete the original `pointdata` array. 954 955 Examples: 956 - [mesh_map2cell.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/mesh_map2cell.py) 957 """ 958 p2c = vtki.new("PointDataToCellData") 959 p2c.SetInputData(self.dataset) 960 if not move: 961 p2c.PassPointDataOn() 962 if arrays: 963 p2c.ClearPointDataArrays() 964 p2c.ProcessAllArraysOff() 965 for arr in arrays: 966 p2c.AddPointDataArray(arr) 967 else: 968 p2c.ProcessAllArraysOn() 969 p2c.Update() 970 self._update(p2c.GetOutput(), reset_locators=False) 971 self.mapper.SetScalarModeToUseCellData() 972 self.pipeline = utils.OperationNode("map_points_to_cells", parents=[self]) 973 return self 974 975 def resample_data_from(self, source, tol=None, categorical=False) -> Self: 976 """ 977 Resample point and cell data from another dataset. 978 The output has the same structure but its point data have 979 the resampled values from target. 980 981 Use `tol` to set the tolerance used to compute whether 982 a point in the source is in a cell of the current object. 983 Points without resampled values, and their cells, are marked as blank. 984 If the data is categorical, then the resulting data will be determined 985 by a nearest neighbor interpolation scheme. 986 987 Example: 988 ```python 989 from vedo import * 990 m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1) 991 pts = m1.vertices 992 ces = m1.cell_centers 993 m1.pointdata["xvalues"] = np.power(pts[:,0], 3) 994 m1.celldata["yvalues"] = np.power(ces[:,1], 3) 995 m2 = Mesh(dataurl+'bunny.obj') 996 m2.resample_data_from(m1) 997 # print(m2.pointdata["xvalues"]) 998 show(m1, m2 , N=2, axes=1) 999 ``` 1000 """ 1001 rs = vtki.new("ResampleWithDataSet") 1002 rs.SetInputData(self.dataset) 1003 rs.SetSourceData(source.dataset) 1004 1005 rs.SetPassPointArrays(True) 1006 rs.SetPassCellArrays(True) 1007 rs.SetPassFieldArrays(True) 1008 rs.SetCategoricalData(categorical) 1009 1010 rs.SetComputeTolerance(True) 1011 if tol: 1012 rs.SetComputeTolerance(False) 1013 rs.SetTolerance(tol) 1014 rs.Update() 1015 self._update(rs.GetOutput(), reset_locators=False) 1016 self.pipeline = utils.OperationNode( 1017 "resample_data_from", 1018 comment=f"{source.__class__.__name__}", 1019 parents=[self, source], 1020 ) 1021 return self 1022 1023 def interpolate_data_from( 1024 self, 1025 source, 1026 radius=None, 1027 n=None, 1028 kernel="shepard", 1029 exclude=("Normals",), 1030 on="points", 1031 null_strategy=1, 1032 null_value=0, 1033 ) -> Self: 1034 """ 1035 Interpolate over source to port its data onto the current object using various kernels. 1036 1037 If n (number of closest points to use) is set then radius value is ignored. 1038 1039 Check out also: 1040 `probe()` which in many cases can be faster. 1041 1042 Arguments: 1043 kernel : (str) 1044 available kernels are [shepard, gaussian, linear] 1045 null_strategy : (int) 1046 specify a strategy to use when encountering a "null" point 1047 during the interpolation process. Null points occur when the local neighborhood 1048 (of nearby points to interpolate from) is empty. 1049 1050 - Case 0: an output array is created that marks points 1051 as being valid (=1) or null (invalid =0), and the null_value is set as well 1052 - Case 1: the output data value(s) are set to the provided null_value 1053 - Case 2: simply use the closest point to perform the interpolation. 1054 null_value : (float) 1055 see above. 1056 1057 Examples: 1058 - [interpolate_scalar1.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar1.py) 1059 - [interpolate_scalar3.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar3.py) 1060 - [interpolate_scalar4.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar4.py) 1061 - [image_probe.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/image_probe.py) 1062 1063 ![](https://vedo.embl.es/images/advanced/interpolateMeshArray.png) 1064 """ 1065 if radius is None and not n: 1066 vedo.logger.error("in interpolate_data_from(): please set either radius or n") 1067 raise RuntimeError 1068 1069 if on == "points": 1070 points = source.dataset 1071 elif on == "cells": 1072 c2p = vtki.new("CellDataToPointData") 1073 c2p.SetInputData(source.dataset) 1074 c2p.Update() 1075 points = c2p.GetOutput() 1076 else: 1077 vedo.logger.error("in interpolate_data_from(), on must be on points or cells") 1078 raise RuntimeError() 1079 1080 locator = vtki.new("PointLocator") 1081 locator.SetDataSet(points) 1082 locator.BuildLocator() 1083 1084 if kernel.lower() == "shepard": 1085 kern = vtki.new("ShepardKernel") 1086 kern.SetPowerParameter(2) 1087 elif kernel.lower() == "gaussian": 1088 kern = vtki.new("GaussianKernel") 1089 kern.SetSharpness(2) 1090 elif kernel.lower() == "linear": 1091 kern = vtki.new("LinearKernel") 1092 else: 1093 vedo.logger.error("available kernels are: [shepard, gaussian, linear]") 1094 raise RuntimeError() 1095 1096 if n: 1097 kern.SetNumberOfPoints(n) 1098 kern.SetKernelFootprintToNClosest() 1099 else: 1100 kern.SetRadius(radius) 1101 kern.SetKernelFootprintToRadius() 1102 1103 interpolator = vtki.new("PointInterpolator") 1104 interpolator.SetInputData(self.dataset) 1105 interpolator.SetSourceData(points) 1106 interpolator.SetKernel(kern) 1107 interpolator.SetLocator(locator) 1108 interpolator.PassFieldArraysOn() 1109 interpolator.SetNullPointsStrategy(null_strategy) 1110 interpolator.SetNullValue(null_value) 1111 interpolator.SetValidPointsMaskArrayName("ValidPointMask") 1112 for ex in exclude: 1113 interpolator.AddExcludedArray(ex) 1114 1115 # remove arrays that are already present in the source 1116 # this is because the interpolator will ignore them otherwise 1117 for i in range(self.dataset.GetPointData().GetNumberOfArrays()): 1118 name = self.dataset.GetPointData().GetArrayName(i) 1119 if name not in exclude: 1120 self.dataset.GetPointData().RemoveArray(name) 1121 1122 interpolator.Update() 1123 1124 if on == "cells": 1125 p2c = vtki.new("PointDataToCellData") 1126 p2c.SetInputData(interpolator.GetOutput()) 1127 p2c.Update() 1128 cpoly = p2c.GetOutput() 1129 else: 1130 cpoly = interpolator.GetOutput() 1131 1132 self._update(cpoly, reset_locators=False) 1133 1134 self.pipeline = utils.OperationNode("interpolate_data_from", parents=[self, source]) 1135 return self 1136 1137 def add_ids(self) -> Self: 1138 """ 1139 Generate point and cell ids arrays. 1140 1141 Two new arrays are added to the mesh: `PointID` and `CellID`. 1142 """ 1143 ids = vtki.new("IdFilter") 1144 ids.SetInputData(self.dataset) 1145 ids.PointIdsOn() 1146 ids.CellIdsOn() 1147 ids.FieldDataOff() 1148 ids.SetPointIdsArrayName("PointID") 1149 ids.SetCellIdsArrayName("CellID") 1150 ids.Update() 1151 self._update(ids.GetOutput(), reset_locators=False) 1152 self.pipeline = utils.OperationNode("add_ids", parents=[self]) 1153 return self 1154 1155 def gradient(self, input_array=None, on="points", fast=False) -> np.ndarray: 1156 """ 1157 Compute and return the gradiend of the active scalar field as a numpy array. 1158 1159 Arguments: 1160 input_array : (str) 1161 array of the scalars to compute the gradient, 1162 if None the current active array is selected 1163 on : (str) 1164 compute either on 'points' or 'cells' data 1165 fast : (bool) 1166 if True, will use a less accurate algorithm 1167 that performs fewer derivative calculations (and is therefore faster). 1168 1169 Examples: 1170 - [isolines.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/isolines.py) 1171 1172 ![](https://user-images.githubusercontent.com/32848391/72433087-f00a8780-3798-11ea-9778-991f0abeca70.png) 1173 """ 1174 gra = vtki.new("GradientFilter") 1175 if on.startswith("p"): 1176 varr = self.dataset.GetPointData() 1177 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1178 elif on.startswith("c"): 1179 varr = self.dataset.GetCellData() 1180 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1181 else: 1182 vedo.logger.error(f"in gradient: unknown option {on}") 1183 raise RuntimeError 1184 1185 if input_array is None: 1186 if varr.GetScalars(): 1187 input_array = varr.GetScalars().GetName() 1188 else: 1189 vedo.logger.error(f"in gradient: no scalars found for {on}") 1190 raise RuntimeError 1191 1192 gra.SetInputData(self.dataset) 1193 gra.SetInputScalars(tp, input_array) 1194 gra.SetResultArrayName("Gradient") 1195 gra.SetFasterApproximation(fast) 1196 gra.ComputeDivergenceOff() 1197 gra.ComputeVorticityOff() 1198 gra.ComputeGradientOn() 1199 gra.Update() 1200 # self._update(gra.GetOutput(), reset_locators=False) 1201 if on.startswith("p"): 1202 gvecs = utils.vtk2numpy(gra.GetOutput().GetPointData().GetArray("Gradient")) 1203 else: 1204 gvecs = utils.vtk2numpy(gra.GetOutput().GetCellData().GetArray("Gradient")) 1205 return gvecs 1206 1207 def divergence(self, array_name=None, on="points", fast=False) -> np.ndarray: 1208 """ 1209 Compute and return the divergence of a vector field as a numpy array. 1210 1211 Arguments: 1212 array_name : (str) 1213 name of the array of vectors to compute the divergence, 1214 if None the current active array is selected 1215 on : (str) 1216 compute either on 'points' or 'cells' data 1217 fast : (bool) 1218 if True, will use a less accurate algorithm 1219 that performs fewer derivative calculations (and is therefore faster). 1220 """ 1221 div = vtki.new("GradientFilter") 1222 if on.startswith("p"): 1223 varr = self.dataset.GetPointData() 1224 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1225 elif on.startswith("c"): 1226 varr = self.dataset.GetCellData() 1227 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1228 else: 1229 vedo.logger.error(f"in divergence(): unknown option {on}") 1230 raise RuntimeError 1231 1232 if array_name is None: 1233 if varr.GetVectors(): 1234 array_name = varr.GetVectors().GetName() 1235 else: 1236 vedo.logger.error(f"in divergence(): no vectors found for {on}") 1237 raise RuntimeError 1238 1239 div.SetInputData(self.dataset) 1240 div.SetInputScalars(tp, array_name) 1241 div.ComputeDivergenceOn() 1242 div.ComputeGradientOff() 1243 div.ComputeVorticityOff() 1244 div.SetDivergenceArrayName("Divergence") 1245 div.SetFasterApproximation(fast) 1246 div.Update() 1247 # self._update(div.GetOutput(), reset_locators=False) 1248 if on.startswith("p"): 1249 dvecs = utils.vtk2numpy(div.GetOutput().GetPointData().GetArray("Divergence")) 1250 else: 1251 dvecs = utils.vtk2numpy(div.GetOutput().GetCellData().GetArray("Divergence")) 1252 return dvecs 1253 1254 def vorticity(self, array_name=None, on="points", fast=False) -> np.ndarray: 1255 """ 1256 Compute and return the vorticity of a vector field as a numpy array. 1257 1258 Arguments: 1259 array_name : (str) 1260 name of the array to compute the vorticity, 1261 if None the current active array is selected 1262 on : (str) 1263 compute either on 'points' or 'cells' data 1264 fast : (bool) 1265 if True, will use a less accurate algorithm 1266 that performs fewer derivative calculations (and is therefore faster). 1267 """ 1268 vort = vtki.new("GradientFilter") 1269 if on.startswith("p"): 1270 varr = self.dataset.GetPointData() 1271 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1272 elif on.startswith("c"): 1273 varr = self.dataset.GetCellData() 1274 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1275 else: 1276 vedo.logger.error(f"in vorticity(): unknown option {on}") 1277 raise RuntimeError 1278 1279 if array_name is None: 1280 if varr.GetVectors(): 1281 array_name = varr.GetVectors().GetName() 1282 else: 1283 vedo.logger.error(f"in vorticity(): no vectors found for {on}") 1284 raise RuntimeError 1285 1286 vort.SetInputData(self.dataset) 1287 vort.SetInputScalars(tp, array_name) 1288 vort.ComputeDivergenceOff() 1289 vort.ComputeGradientOff() 1290 vort.ComputeVorticityOn() 1291 vort.SetVorticityArrayName("Vorticity") 1292 vort.SetFasterApproximation(fast) 1293 vort.Update() 1294 if on.startswith("p"): 1295 vvecs = utils.vtk2numpy(vort.GetOutput().GetPointData().GetArray("Vorticity")) 1296 else: 1297 vvecs = utils.vtk2numpy(vort.GetOutput().GetCellData().GetArray("Vorticity")) 1298 return vvecs 1299 1300 def probe( 1301 self, 1302 source, 1303 categorical=False, 1304 snap=False, 1305 tol=0, 1306 ) -> Self: 1307 """ 1308 Takes a data set and probes its scalars at the specified points in space. 1309 1310 Note that a mask is also output with valid/invalid points which can be accessed 1311 with `mesh.pointdata['ValidPointMask']`. 1312 1313 Arguments: 1314 source : any dataset 1315 the data set to probe. 1316 categorical : bool 1317 control whether the source pointdata is to be treated as categorical. 1318 snap : bool 1319 snap to the cell with the closest point if no cell was found 1320 tol : float 1321 the tolerance to use when performing the probe. 1322 1323 Check out also: 1324 `interpolate_data_from()` and `tovolume()` 1325 1326 Examples: 1327 - [probe_points.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/probe_points.py) 1328 1329 ![](https://vedo.embl.es/images/volumetric/probePoints.png) 1330 """ 1331 probe_filter = vtki.new("ProbeFilter") 1332 probe_filter.SetSourceData(source.dataset) 1333 probe_filter.SetInputData(self.dataset) 1334 probe_filter.PassCellArraysOn() 1335 probe_filter.PassFieldArraysOn() 1336 probe_filter.PassPointArraysOn() 1337 probe_filter.SetCategoricalData(categorical) 1338 probe_filter.ComputeToleranceOff() 1339 if tol: 1340 probe_filter.ComputeToleranceOn() 1341 probe_filter.SetTolerance(tol) 1342 probe_filter.SetSnapToCellWithClosestPoint(snap) 1343 probe_filter.Update() 1344 self._update(probe_filter.GetOutput(), reset_locators=False) 1345 self.pipeline = utils.OperationNode("probe", parents=[self, source]) 1346 self.pointdata.rename("vtkValidPointMask", "ValidPointMask") 1347 return self 1348 1349 def compute_cell_size(self) -> Self: 1350 """ 1351 Add to this object a cell data array 1352 containing the area, volume and edge length 1353 of the cells (when applicable to the object type). 1354 1355 Array names are: `Area`, `Volume`, `Length`. 1356 """ 1357 csf = vtki.new("CellSizeFilter") 1358 csf.SetInputData(self.dataset) 1359 csf.SetComputeArea(1) 1360 csf.SetComputeVolume(1) 1361 csf.SetComputeLength(1) 1362 csf.SetComputeVertexCount(0) 1363 csf.SetAreaArrayName("Area") 1364 csf.SetVolumeArrayName("Volume") 1365 csf.SetLengthArrayName("Length") 1366 csf.Update() 1367 self._update(csf.GetOutput(), reset_locators=False) 1368 return self 1369 1370 def generate_random_data(self) -> Self: 1371 """Fill a dataset with random attributes""" 1372 gen = vtki.new("RandomAttributeGenerator") 1373 gen.SetInputData(self.dataset) 1374 gen.GenerateAllDataOn() 1375 gen.SetDataTypeToFloat() 1376 gen.GeneratePointNormalsOff() 1377 gen.GeneratePointTensorsOn() 1378 gen.GenerateCellScalarsOn() 1379 gen.Update() 1380 self._update(gen.GetOutput(), reset_locators=False) 1381 self.pipeline = utils.OperationNode("generate_random_data", parents=[self]) 1382 return self 1383 1384 def integrate_data(self) -> dict: 1385 """ 1386 Integrate point and cell data arrays while computing length, 1387 area or volume of the domain. It works for 1D, 2D or 3D cells. 1388 1389 For volumetric datasets, this filter ignores all but 3D cells. 1390 It will not compute the volume contained in a closed surface. 1391 1392 Returns a dictionary with keys: `pointdata`, `celldata`, `metadata`, 1393 which contain the integration result for the corresponding attributes. 1394 1395 Examples: 1396 ```python 1397 from vedo import * 1398 surf = Sphere(res=100) 1399 surf.pointdata['scalars'] = np.ones(surf.npoints) 1400 data = surf.integrate_data() 1401 print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi) 1402 ``` 1403 1404 ```python 1405 from vedo import * 1406 1407 xcoords1 = np.arange(0, 2.2, 0.2) 1408 xcoords2 = sqrt(np.arange(0, 4.2, 0.2)) 1409 1410 ycoords = np.arange(0, 1.2, 0.2) 1411 1412 surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2) 1413 surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2) 1414 1415 surf1.pointdata['scalars'] = surf1.vertices[:,2] 1416 surf2.pointdata['scalars'] = surf2.vertices[:,2] 1417 1418 data1 = surf1.integrate_data() 1419 data2 = surf2.integrate_data() 1420 1421 print(data1['pointdata']['scalars'], 1422 "is equal to", 1423 data2['pointdata']['scalars'], 1424 "even if the grids are different!", 1425 "(= the volume under the surface)" 1426 ) 1427 show(surf1, surf2, N=2, axes=1).close() 1428 ``` 1429 """ 1430 vinteg = vtki.new("IntegrateAttributes") 1431 vinteg.SetInputData(self.dataset) 1432 vinteg.Update() 1433 ugrid = vedo.UnstructuredGrid(vinteg.GetOutput()) 1434 data = dict( 1435 pointdata=ugrid.pointdata.todict(), 1436 celldata=ugrid.celldata.todict(), 1437 metadata=ugrid.metadata.todict(), 1438 ) 1439 return data 1440 1441 def write(self, filename, binary=True) -> None: 1442 """Write object to file.""" 1443 out = vedo.file_io.write(self, filename, binary) 1444 out.pipeline = utils.OperationNode( 1445 "write", parents=[self], comment=filename[:15], shape="folder", c="#8a817c" 1446 ) 1447 1448 def tomesh(self, bounds=(), shrink=0) -> "vedo.Mesh": 1449 """ 1450 Extract boundary geometry from dataset (or convert data to polygonal type). 1451 1452 Two new arrays are added to the mesh: `OriginalCellIds` and `OriginalPointIds` 1453 to keep track of the original mesh elements. 1454 1455 Arguments: 1456 bounds : (list) 1457 specify a sub-region to extract 1458 shrink : (float) 1459 shrink the cells to a fraction of their original size 1460 """ 1461 geo = vtki.new("GeometryFilter") 1462 1463 if shrink: 1464 sf = vtki.new("ShrinkFilter") 1465 sf.SetInputData(self.dataset) 1466 sf.SetShrinkFactor(shrink) 1467 sf.Update() 1468 geo.SetInputData(sf.GetOutput()) 1469 else: 1470 geo.SetInputData(self.dataset) 1471 1472 geo.SetPassThroughCellIds(1) 1473 geo.SetPassThroughPointIds(1) 1474 geo.SetOriginalCellIdsName("OriginalCellIds") 1475 geo.SetOriginalPointIdsName("OriginalPointIds") 1476 geo.SetNonlinearSubdivisionLevel(1) 1477 # geo.MergingOff() # crashes on StructuredGrids 1478 if bounds: 1479 geo.SetExtent(bounds) 1480 geo.ExtentClippingOn() 1481 geo.Update() 1482 msh = vedo.mesh.Mesh(geo.GetOutput()) 1483 msh.pipeline = utils.OperationNode("tomesh", parents=[self], c="#9e2a2b") 1484 return msh 1485 1486 def signed_distance(self, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> "vedo.Volume": 1487 """ 1488 Compute the `Volume` object whose voxels contains the signed distance from 1489 the object. The calling object must have "Normals" defined. 1490 1491 Arguments: 1492 bounds : (list, actor) 1493 bounding box sizes 1494 dims : (list) 1495 dimensions (nr. of voxels) of the output volume. 1496 invert : (bool) 1497 flip the sign 1498 max_radius : (float) 1499 specify how far out to propagate distance calculation 1500 1501 Examples: 1502 - [distance2mesh.py](https://github.com/marcomusy/vedo/blob/master/examples/basic/distance2mesh.py) 1503 1504 ![](https://vedo.embl.es/images/basic/distance2mesh.png) 1505 """ 1506 if bounds is None: 1507 bounds = self.bounds() 1508 if max_radius is None: 1509 max_radius = self.diagonal_size() / 2 1510 dist = vtki.new("SignedDistance") 1511 dist.SetInputData(self.dataset) 1512 dist.SetRadius(max_radius) 1513 dist.SetBounds(bounds) 1514 dist.SetDimensions(dims) 1515 dist.Update() 1516 img = dist.GetOutput() 1517 if invert: 1518 mat = vtki.new("ImageMathematics") 1519 mat.SetInput1Data(img) 1520 mat.SetOperationToMultiplyByK() 1521 mat.SetConstantK(-1) 1522 mat.Update() 1523 img = mat.GetOutput() 1524 1525 vol = vedo.Volume(img) 1526 vol.name = "SignedDistanceVolume" 1527 vol.pipeline = utils.OperationNode( 1528 "signed_distance", 1529 parents=[self], 1530 comment=f"dims={tuple(vol.dimensions())}", 1531 c="#e9c46a:#0096c7", 1532 ) 1533 return vol 1534 1535 def unsigned_distance( 1536 self, dims=(25,25,25), bounds=(), max_radius=0, cap_value=0) -> "vedo.Volume": 1537 """ 1538 Compute the `Volume` object whose voxels contains the unsigned distance. 1539 """ 1540 dist = vtki.new("UnsignedDistance") 1541 dist.SetInputData(self.dataset) 1542 dist.SetDimensions(dims) 1543 1544 if len(bounds) == 6: 1545 dist.SetBounds(bounds) 1546 # elif bounds == "auto": 1547 # dist.AdjustBoundsOn() 1548 else: 1549 dist.SetBounds(self.bounds()) 1550 if not max_radius: 1551 max_radius = self.diagonal_size() / 10 1552 dist.SetRadius(max_radius) 1553 1554 if self.point_locator: 1555 dist.SetLocator(self.point_locator) 1556 1557 if cap_value is not None: 1558 dist.CappingOn() 1559 dist.SetCapValue(cap_value) 1560 dist.SetOutputScalarTypeToFloat() 1561 dist.Update() 1562 vol = vedo.Volume(dist.GetOutput()) 1563 vol.name = "UnsignedDistanceVolume" 1564 vol.pipeline = utils.OperationNode( 1565 "unsigned_distance", parents=[self], c="#e9c46a:#0096c7") 1566 return vol 1567 1568 def smooth_data(self, 1569 niter=10, relaxation_factor=0.1, strategy=0, mask=None, 1570 mode="distance2", 1571 exclude=("Normals", "TextureCoordinates"), 1572 ) -> Self: 1573 """ 1574 Smooth point attribute data using distance weighted Laplacian kernel. 1575 The effect is to blur regions of high variation and emphasize low variation regions. 1576 1577 A central concept of this method is the point smoothing stencil. 1578 A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge. 1579 To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using 1580 the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1). 1581 This averaging process is repeated until the maximum number of iterations is reached. 1582 1583 The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion. 1584 The a(i+1) attributes are determined from the a(i) attributes as follows: 1585 a(i+1) = (1-R)*a(i) + R*sum(w(j)*a(j)) 1586 1587 Convergence occurs faster for larger relaxation factors. 1588 Typically a small number of iterations is required for large relaxation factors, 1589 and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be 1590 adequate (i.e., just a distance weighted average is computed). 1591 1592 Warning: 1593 Certain data attributes cannot be correctly interpolated. For example, surface normals are expected to be |n|=1; 1594 after attribute smoothing this constraint is likely to be violated. 1595 Other vectors and tensors may suffer from similar issues. 1596 In such a situation, specify `exclude=...` which will not be smoothed (and simply passed through to the output). 1597 Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance 1598 between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil). 1599 The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, the weights are simply 1/n, 1600 where n is the number of connected points in the stencil. 1601 The smoothing process reduces high frequency information in the data attributes. 1602 With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) important details may be lost, 1603 and the attributes will move towards an "average" value. 1604 While this filter will process any dataset type, if the input data is a 3D image volume, it's likely much faster to use 1605 an image-based algorithm to perform data smoothing. 1606 To determine boundary points in polygonal data, edges used by only one cell are considered boundary 1607 (and hence the associated points defining the edge). 1608 1609 Arguments: 1610 niter : (int) 1611 number of iterations 1612 relaxation_factor : (float) 1613 relaxation factor controlling the amount of Laplacian smoothing applied 1614 strategy : (int) 1615 strategy to use for Laplacian smoothing 1616 - 0: use all points, all point data attributes are smoothed 1617 - 1: smooth all point attribute data except those on the boundary 1618 - 2: only point data connected to a boundary point are smoothed 1619 mask : (str, np.ndarray) 1620 array to be used as a mask (ignore then the strategy keyword) 1621 mode : (str) 1622 smoothing mode, either "distance2", "distance" or "average" 1623 - distance**2 weighted (i.e., 1/r**2 interpolation weights) 1624 - distance weighted (i.e., 1/r) approach; 1625 - simple average of all connected points in the stencil 1626 exclude : (list) 1627 list of arrays to be excluded from smoothing 1628 """ 1629 try: 1630 saf = vtki.new("AttributeSmoothingFilter") 1631 except: 1632 vedo.logger.error("smooth_data() only avaialble in VTK>=9.3.0") 1633 return self 1634 saf.SetInputData(self.dataset) 1635 saf.SetRelaxationFactor(relaxation_factor) 1636 saf.SetNumberOfIterations(niter) 1637 1638 for ex in exclude: 1639 saf.AddExcludedArray(ex) 1640 1641 if mode == "distance": 1642 saf.SetWeightsTypeToDistance() 1643 elif mode == "distance2": 1644 saf.SetWeightsTypeToDistance2() 1645 elif mode == "average": 1646 saf.SetWeightsTypeToAverage() 1647 else: 1648 vedo.logger.error(f"smooth_data(): unknown mode {mode}") 1649 raise TypeError 1650 1651 saf.SetSmoothingStrategy(strategy) 1652 if mask is not None: 1653 saf.SetSmoothingStrategyToSmoothingMask() 1654 if isinstance(mask, str): 1655 mask_ = self.dataset.GetPointData().GetArray(mask) 1656 if not mask_: 1657 vedo.logger.error(f"smooth_data(): mask array {mask} not found") 1658 return self 1659 mask_array = vtki.vtkUnsignedCharArray() 1660 mask_array.ShallowCopy(mask_) 1661 mask_array.SetName(mask_.GetName()) 1662 else: 1663 mask_array = utils.numpy2vtk(mask, dtype=np.uint8) 1664 saf.SetSmoothingMask(mask_array) 1665 1666 saf.Update() 1667 1668 self._update(saf.GetOutput()) 1669 self.pipeline = utils.OperationNode( 1670 "smooth_data", comment=f"strategy {strategy}", parents=[self], c="#9e2a2b" 1671 ) 1672 return self 1673 1674 def compute_streamlines( 1675 self, 1676 seeds: Any, 1677 integrator="rk4", 1678 direction="forward", 1679 initial_step_size=None, 1680 max_propagation=None, 1681 max_steps=10000, 1682 step_length=0, 1683 surface_constrained=False, 1684 compute_vorticity=False, 1685 ) -> Union["vedo.Lines", None]: 1686 """ 1687 Integrate a vector field to generate streamlines. 1688 1689 Arguments: 1690 seeds : (Mesh, Points, list) 1691 starting points of the streamlines 1692 integrator : (str) 1693 type of integration method to be used: 1694 - "rk2" (Runge-Kutta 2) 1695 - "rk4" (Runge-Kutta 4) 1696 - "rk45" (Runge-Kutta 45) 1697 direction : (str) 1698 direction of integration, either "forward", "backward" or "both" 1699 initial_step_size : (float) 1700 initial step size used for line integration 1701 max_propagation : (float) 1702 maximum length of a streamline expressed in absolute units 1703 max_steps : (int) 1704 maximum number of steps for a streamline 1705 step_length : (float) 1706 maximum length of a step expressed in absolute units 1707 surface_constrained : (bool) 1708 whether to stop integrating when the streamline leaves the surface 1709 compute_vorticity : (bool) 1710 whether to compute the vorticity at each streamline point 1711 """ 1712 b = self.dataset.GetBounds() 1713 size = (b[5]-b[4] + b[3]-b[2] + b[1]-b[0]) / 3 1714 if initial_step_size is None: 1715 initial_step_size = size / 1000.0 1716 1717 if max_propagation is None: 1718 max_propagation = size * 2 1719 1720 if utils.is_sequence(seeds): 1721 seeds = vedo.Points(seeds) 1722 1723 sti = vtki.new("StreamTracer") 1724 sti.SetSourceData(seeds.dataset) 1725 if isinstance(self, vedo.RectilinearGrid): 1726 sti.SetInputData(vedo.UnstructuredGrid(self.dataset).dataset) 1727 else: 1728 sti.SetInputDataObject(self.dataset) 1729 1730 sti.SetInitialIntegrationStep(initial_step_size) 1731 sti.SetComputeVorticity(compute_vorticity) 1732 sti.SetMaximumNumberOfSteps(max_steps) 1733 sti.SetMaximumPropagation(max_propagation) 1734 sti.SetSurfaceStreamlines(surface_constrained) 1735 if step_length: 1736 sti.SetMaximumIntegrationStep(step_length) 1737 1738 if "for" in direction: 1739 sti.SetIntegrationDirectionToForward() 1740 elif "back" in direction: 1741 sti.SetIntegrationDirectionToBackward() 1742 elif "both" in direction: 1743 sti.SetIntegrationDirectionToBoth() 1744 else: 1745 vedo.logger.error(f"in compute_streamlines(), unknown direction {direction}") 1746 return None 1747 1748 if integrator == "rk2": 1749 sti.SetIntegratorTypeToRungeKutta2() 1750 elif integrator == "rk4": 1751 sti.SetIntegratorTypeToRungeKutta4() 1752 elif integrator == "rk45": 1753 sti.SetIntegratorTypeToRungeKutta45() 1754 else: 1755 vedo.logger.error(f"in compute_streamlines(), unknown integrator {integrator}") 1756 return None 1757 1758 sti.Update() 1759 1760 stlines = vedo.shapes.Lines(sti.GetOutput(), lw=4) 1761 stlines.name = "StreamLines" 1762 self.pipeline = utils.OperationNode( 1763 "compute_streamlines", comment=f"{integrator}", parents=[self, seeds], c="#9e2a2b" 1764 ) 1765 return stlines 1766 1767############################################################################### 1768class PointAlgorithms(CommonAlgorithms): 1769 """Methods for point clouds.""" 1770 1771 def apply_transform(self, LT: Any, deep_copy=True) -> Self: 1772 """ 1773 Apply a linear or non-linear transformation to the mesh polygonal data. 1774 1775 Example: 1776 ```python 1777 from vedo import Cube, show, settings 1778 settings.use_parallel_projection = True 1779 c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5) 1780 T = c1.transform # rotate by 5 degrees, place at (2,1) 1781 c2 = Cube().c('red4').wireframe().lw(10).lighting('off') 1782 c2.apply_transform(T) 1783 show(c1, c2, "The 2 cubes should overlap!", axes=1).close() 1784 ``` 1785 1786 ![](https://vedo.embl.es/images/feats/apply_transform.png) 1787 """ 1788 if self.dataset.GetNumberOfPoints() == 0: 1789 return self 1790 1791 if isinstance(LT, LinearTransform): 1792 LT_is_linear = True 1793 tr = LT.T 1794 if LT.is_identity(): 1795 return self 1796 1797 elif isinstance(LT, (vtki.vtkMatrix4x4, vtki.vtkLinearTransform)) or utils.is_sequence(LT): 1798 LT_is_linear = True 1799 LT = LinearTransform(LT) 1800 tr = LT.T 1801 if LT.is_identity(): 1802 return self 1803 1804 elif isinstance(LT, NonLinearTransform): 1805 LT_is_linear = False 1806 tr = LT.T 1807 self.transform = LT # reset 1808 1809 elif isinstance(LT, vtki.vtkThinPlateSplineTransform): 1810 LT_is_linear = False 1811 tr = LT 1812 self.transform = NonLinearTransform(LT) # reset 1813 1814 else: 1815 vedo.logger.error(f"apply_transform(), unknown input type:\n{LT}") 1816 return self 1817 1818 ################ 1819 if LT_is_linear: 1820 try: 1821 # self.transform might still not be linear 1822 self.transform.concatenate(LT) 1823 except AttributeError: 1824 # in that case reset it 1825 self.transform = LinearTransform() 1826 1827 ################ 1828 if isinstance(self.dataset, vtki.vtkPolyData): 1829 tp = vtki.new("TransformPolyDataFilter") 1830 elif isinstance(self.dataset, vtki.vtkUnstructuredGrid): 1831 tp = vtki.new("TransformFilter") 1832 tp.TransformAllInputVectorsOn() 1833 # elif isinstance(self.dataset, vtki.vtkImageData): 1834 # tp = vtki.new("ImageReslice") 1835 # tp.SetInterpolationModeToCubic() 1836 # tp.SetResliceTransform(tr) 1837 else: 1838 vedo.logger.error(f"apply_transform(), unknown input type: {[self.dataset]}") 1839 return self 1840 1841 tp.SetTransform(tr) 1842 tp.SetInputData(self.dataset) 1843 tp.Update() 1844 out = tp.GetOutput() 1845 1846 if deep_copy: 1847 self.dataset.DeepCopy(out) 1848 else: 1849 self.dataset.ShallowCopy(out) 1850 1851 # reset the locators 1852 self.point_locator = None 1853 self.cell_locator = None 1854 self.line_locator = None 1855 return self 1856 1857 def apply_transform_from_actor(self) -> LinearTransform: 1858 """ 1859 Apply the current transformation of the actor to the data. 1860 Useful when manually moving an actor (eg. when pressing "a"). 1861 Returns the `LinearTransform` object. 1862 1863 Note that this method is automatically called when the window is closed, 1864 or the interactor style is changed. 1865 """ 1866 M = self.actor.GetMatrix() 1867 self.apply_transform(M) 1868 iden = vtki.vtkMatrix4x4() 1869 self.actor.PokeMatrix(iden) 1870 return LinearTransform(M) 1871 1872 def pos(self, x=None, y=None, z=None) -> Self: 1873 """Set/Get object position.""" 1874 if x is None: # get functionality 1875 return self.transform.position 1876 1877 if z is None and y is None: # assume x is of the form (x,y,z) 1878 if len(x) == 3: 1879 x, y, z = x 1880 else: 1881 x, y = x 1882 z = 0 1883 elif z is None: # assume x,y is of the form x, y 1884 z = 0 1885 1886 q = self.transform.position 1887 delta = [x, y, z] - q 1888 if delta[0] == delta[1] == delta[2] == 0: 1889 return self 1890 LT = LinearTransform().translate(delta) 1891 return self.apply_transform(LT) 1892 1893 def shift(self, dx=0, dy=0, dz=0) -> Self: 1894 """Add a vector to the current object position.""" 1895 if utils.is_sequence(dx): 1896 dx, dy, dz = utils.make3d(dx) 1897 if dx == dy == dz == 0: 1898 return self 1899 LT = LinearTransform().translate([dx, dy, dz]) 1900 return self.apply_transform(LT) 1901 1902 def x(self, val=None) -> Self: 1903 """Set/Get object position along x axis.""" 1904 p = self.transform.position 1905 if val is None: 1906 return p[0] 1907 self.pos(val, p[1], p[2]) 1908 return self 1909 1910 def y(self, val=None)-> Self: 1911 """Set/Get object position along y axis.""" 1912 p = self.transform.position 1913 if val is None: 1914 return p[1] 1915 self.pos(p[0], val, p[2]) 1916 return self 1917 1918 def z(self, val=None) -> Self: 1919 """Set/Get object position along z axis.""" 1920 p = self.transform.position 1921 if val is None: 1922 return p[2] 1923 self.pos(p[0], p[1], val) 1924 return self 1925 1926 def rotate(self, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self: 1927 """ 1928 Rotate around an arbitrary `axis` passing through `point`. 1929 1930 Example: 1931 ```python 1932 from vedo import * 1933 c1 = Cube() 1934 c2 = c1.clone().c('violet').alpha(0.5) # copy of c1 1935 v = vector(0.2,1,0) 1936 p = vector(1,0,0) # axis passes through this point 1937 c2.rotate(90, axis=v, point=p) 1938 l = Line(-v+p, v+p).lw(3).c('red') 1939 show(c1, l, c2, axes=1).close() 1940 ``` 1941 1942 ![](https://vedo.embl.es/images/feats/rotate_axis.png) 1943 """ 1944 LT = LinearTransform() 1945 LT.rotate(angle, axis, point, rad) 1946 return self.apply_transform(LT) 1947 1948 def rotate_x(self, angle: float, rad=False, around=None) -> Self: 1949 """ 1950 Rotate around x-axis. If angle is in radians set `rad=True`. 1951 1952 Use `around` to define a pivoting point. 1953 """ 1954 if angle == 0: 1955 return self 1956 LT = LinearTransform().rotate_x(angle, rad, around) 1957 return self.apply_transform(LT) 1958 1959 def rotate_y(self, angle: float, rad=False, around=None) -> Self: 1960 """ 1961 Rotate around y-axis. If angle is in radians set `rad=True`. 1962 1963 Use `around` to define a pivoting point. 1964 """ 1965 if angle == 0: 1966 return self 1967 LT = LinearTransform().rotate_y(angle, rad, around) 1968 return self.apply_transform(LT) 1969 1970 def rotate_z(self, angle: float, rad=False, around=None) -> Self: 1971 """ 1972 Rotate around z-axis. If angle is in radians set `rad=True`. 1973 1974 Use `around` to define a pivoting point. 1975 """ 1976 if angle == 0: 1977 return self 1978 LT = LinearTransform().rotate_z(angle, rad, around) 1979 return self.apply_transform(LT) 1980 1981 def reorient(self, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self: 1982 """ 1983 Reorient the object to point to a new direction from an initial one. 1984 If `initaxis` is None, the object will be assumed in its "default" orientation. 1985 If `xyplane` is True, the object will be rotated to lie on the xy plane. 1986 1987 Use `rotation` to first rotate the object around its `initaxis`. 1988 """ 1989 q = self.transform.position 1990 LT = LinearTransform() 1991 LT.reorient(initaxis, newaxis, q, rotation, rad, xyplane) 1992 return self.apply_transform(LT) 1993 1994 def scale(self, s=None, reset=False, origin=True) -> Union[Self, np.array]: 1995 """ 1996 Set/get object's scaling factor. 1997 1998 Arguments: 1999 s : (list, float) 2000 scaling factor(s). 2001 reset : (bool) 2002 if True previous scaling factors are ignored. 2003 origin : (bool) 2004 if True scaling is applied with respect to object's position, 2005 otherwise is applied respect to (0,0,0). 2006 2007 Note: 2008 use `s=(sx,sy,sz)` to scale differently in the three coordinates. 2009 """ 2010 if s is None: 2011 return np.array(self.transform.T.GetScale()) 2012 2013 if not utils.is_sequence(s): 2014 s = [s, s, s] 2015 2016 LT = LinearTransform() 2017 if reset: 2018 old_s = np.array(self.transform.T.GetScale()) 2019 LT.scale(s / old_s) 2020 else: 2021 if origin is True: 2022 LT.scale(s, origin=self.transform.position) 2023 elif origin is False: 2024 LT.scale(s, origin=False) 2025 else: 2026 LT.scale(s, origin=origin) 2027 2028 return self.apply_transform(LT) 2029 2030 2031############################################################################### 2032class VolumeAlgorithms(CommonAlgorithms): 2033 """Methods for Volume objects.""" 2034 2035 def bounds(self) -> np.ndarray: 2036 """ 2037 Get the object bounds. 2038 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 2039 """ 2040 # OVERRIDE CommonAlgorithms.bounds() which is too slow 2041 return np.array(self.dataset.GetBounds()) 2042 2043 def isosurface(self, value=None, flying_edges=False) -> "vedo.mesh.Mesh": 2044 """ 2045 Return an `Mesh` isosurface extracted from the `Volume` object. 2046 2047 Set `value` as single float or list of values to draw the isosurface(s). 2048 Use flying_edges for faster results (but sometimes can interfere with `smooth()`). 2049 2050 Examples: 2051 - [isosurfaces1.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces1.py) 2052 2053 ![](https://vedo.embl.es/images/volumetric/isosurfaces.png) 2054 """ 2055 scrange = self.dataset.GetScalarRange() 2056 2057 if flying_edges: 2058 cf = vtki.new("FlyingEdges3D") 2059 cf.InterpolateAttributesOn() 2060 else: 2061 cf = vtki.new("ContourFilter") 2062 cf.UseScalarTreeOn() 2063 2064 cf.SetInputData(self.dataset) 2065 cf.ComputeNormalsOn() 2066 2067 if utils.is_sequence(value): 2068 cf.SetNumberOfContours(len(value)) 2069 for i, t in enumerate(value): 2070 cf.SetValue(i, t) 2071 else: 2072 if value is None: 2073 value = (2 * scrange[0] + scrange[1]) / 3.0 2074 # print("automatic isosurface value =", value) 2075 cf.SetValue(0, value) 2076 2077 cf.Update() 2078 poly = cf.GetOutput() 2079 2080 out = vedo.mesh.Mesh(poly, c=None).phong() 2081 out.mapper.SetScalarRange(scrange[0], scrange[1]) 2082 2083 out.pipeline = utils.OperationNode( 2084 "isosurface", 2085 parents=[self], 2086 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2087 c="#4cc9f0:#e9c46a", 2088 ) 2089 return out 2090 2091 def isosurface_discrete( 2092 self, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0, 2093 ) -> "vedo.mesh.Mesh": 2094 """ 2095 Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded, 2096 3D version of the multiple objects/labels Surface Nets algorithm. 2097 The input is a 3D image (i.e., volume) where each voxel is labeled 2098 (integer labels are preferred to real values), and the output data is a polygonal mesh separating 2099 labeled regions / objects. 2100 (Note that on output each region [corresponding to a different segmented object] will share 2101 points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them). 2102 2103 Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating 2104 the labels on either side of the polygons composing the output Mesh. 2105 (This can be used for advanced operations like extracting shared/contacting boundaries between two objects. 2106 The name of this celldata array is "BoundaryLabels"). 2107 2108 Arguments: 2109 value : (float, list) 2110 single value or list of values to draw the isosurface(s). 2111 background_label : (float) 2112 this value specifies the label value to use when referencing the background 2113 region outside of any of the specified regions. 2114 boundaries : (bool, list) 2115 if True, the output will only contain the boundary surface. Internal surfaces will be removed. 2116 If a list of integers is provided, only the boundaries between the specified labels will be extracted. 2117 use_quads : (bool) 2118 if True, the output polygons will be quads. If False, the output polygons will be triangles. 2119 nsmooth : (int) 2120 number of iterations of smoothing (0 means no smoothing). 2121 2122 Examples: 2123 - [isosurfaces2.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces2.py) 2124 """ 2125 logger = vtki.get_class("Logger") 2126 logger.SetStderrVerbosity(logger.VERBOSITY_ERROR) 2127 2128 snets = vtki.new("SurfaceNets3D") 2129 snets.SetInputData(self.dataset) 2130 2131 if nsmooth: 2132 snets.SmoothingOn() 2133 snets.AutomaticSmoothingConstraintsOn() 2134 snets.GetSmoother().SetNumberOfIterations(nsmooth) 2135 # snets.GetSmoother().SetRelaxationFactor(relaxation_factor) 2136 # snets.GetSmoother().SetConstraintDistance(constraint_distance) 2137 else: 2138 snets.SmoothingOff() 2139 2140 if internal_boundaries is False: 2141 snets.SetOutputStyleToBoundary() 2142 elif internal_boundaries is True: 2143 snets.SetOutputStyleToDefault() 2144 elif utils.is_sequence(internal_boundaries): 2145 snets.SetOutputStyleToSelected() 2146 snets.InitializeSelectedLabelsList() 2147 for val in internal_boundaries: 2148 snets.AddSelectedLabel(val) 2149 else: 2150 vedo.logger.error("isosurface_discrete(): unknown boundaries option") 2151 2152 n = len(values) 2153 snets.SetNumberOfContours(n) 2154 snets.SetNumberOfLabels(n) 2155 2156 if background_label is not None: 2157 snets.SetBackgroundLabel(background_label) 2158 2159 for i, val in enumerate(values): 2160 snets.SetValue(i, val) 2161 2162 if use_quads: 2163 snets.SetOutputMeshTypeToQuads() 2164 else: 2165 snets.SetOutputMeshTypeToTriangles() 2166 snets.Update() 2167 2168 out = vedo.mesh.Mesh(snets.GetOutput()) 2169 out.pipeline = utils.OperationNode( 2170 "isosurface_discrete", 2171 parents=[self], 2172 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2173 c="#4cc9f0:#e9c46a", 2174 ) 2175 2176 logger.SetStderrVerbosity(logger.VERBOSITY_INFO) 2177 return out 2178 2179 2180 def legosurface( 2181 self, 2182 vmin=None, 2183 vmax=None, 2184 invert=False, 2185 boundary=True, 2186 array_name="input_scalars", 2187 ) -> "vedo.mesh.Mesh": 2188 """ 2189 Represent an object - typically a `Volume` - as lego blocks (voxels). 2190 By default colors correspond to the volume's scalar. 2191 Returns an `Mesh` object. 2192 2193 Arguments: 2194 vmin : (float) 2195 the lower threshold, voxels below this value are not shown. 2196 vmax : (float) 2197 the upper threshold, voxels above this value are not shown. 2198 boundary : (bool) 2199 controls whether to include cells that are partially inside 2200 array_name : (int, str) 2201 name or index of the scalar array to be considered 2202 2203 Examples: 2204 - [legosurface.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/legosurface.py) 2205 2206 ![](https://vedo.embl.es/images/volumetric/56820682-da40e500-684c-11e9-8ea3-91cbcba24b3a.png) 2207 """ 2208 imp_dataset = vtki.new("ImplicitDataSet") 2209 imp_dataset.SetDataSet(self.dataset) 2210 window = vtki.new("ImplicitWindowFunction") 2211 window.SetImplicitFunction(imp_dataset) 2212 2213 srng = list(self.dataset.GetScalarRange()) 2214 if vmin is not None: 2215 srng[0] = vmin 2216 if vmax is not None: 2217 srng[1] = vmax 2218 if not boundary: 2219 tol = 0.00001 * (srng[1] - srng[0]) 2220 srng[0] -= tol 2221 srng[1] += tol 2222 window.SetWindowRange(srng) 2223 # print("legosurface window range:", srng) 2224 2225 extract = vtki.new("ExtractGeometry") 2226 extract.SetInputData(self.dataset) 2227 extract.SetImplicitFunction(window) 2228 extract.SetExtractInside(invert) 2229 extract.SetExtractBoundaryCells(boundary) 2230 extract.Update() 2231 2232 gf = vtki.new("GeometryFilter") 2233 gf.SetInputData(extract.GetOutput()) 2234 gf.Update() 2235 2236 m = vedo.mesh.Mesh(gf.GetOutput()).lw(0.1).flat() 2237 m.map_points_to_cells() 2238 m.celldata.select(array_name) 2239 2240 m.pipeline = utils.OperationNode( 2241 "legosurface", 2242 parents=[self], 2243 comment=f"array: {array_name}", 2244 c="#4cc9f0:#e9c46a", 2245 ) 2246 return m 2247 2248 def tomesh(self, fill=True, shrink=1.0) -> "vedo.mesh.Mesh": 2249 """ 2250 Build a polygonal Mesh from the current object. 2251 2252 If `fill=True`, the interior faces of all the cells are created. 2253 (setting a `shrink` value slightly smaller than the default 1.0 2254 can avoid flickering due to internal adjacent faces). 2255 2256 If `fill=False`, only the boundary faces will be generated. 2257 """ 2258 gf = vtki.new("GeometryFilter") 2259 if fill: 2260 sf = vtki.new("ShrinkFilter") 2261 sf.SetInputData(self.dataset) 2262 sf.SetShrinkFactor(shrink) 2263 sf.Update() 2264 gf.SetInputData(sf.GetOutput()) 2265 gf.Update() 2266 poly = gf.GetOutput() 2267 if shrink == 1.0: 2268 clean_poly = vtki.new("CleanPolyData") 2269 clean_poly.PointMergingOn() 2270 clean_poly.ConvertLinesToPointsOn() 2271 clean_poly.ConvertPolysToLinesOn() 2272 clean_poly.ConvertStripsToPolysOn() 2273 clean_poly.SetInputData(poly) 2274 clean_poly.Update() 2275 poly = clean_poly.GetOutput() 2276 else: 2277 gf.SetInputData(self.dataset) 2278 gf.Update() 2279 poly = gf.GetOutput() 2280 2281 msh = vedo.mesh.Mesh(poly).flat() 2282 msh.scalarbar = self.scalarbar 2283 lut = utils.ctf2lut(self) 2284 if lut: 2285 msh.mapper.SetLookupTable(lut) 2286 2287 msh.pipeline = utils.OperationNode( 2288 "tomesh", parents=[self], comment=f"fill={fill}", c="#9e2a2b:#e9c46a" 2289 ) 2290 return msh
45class DataArrayHelper: 46 # Internal use only. 47 # Helper class to manage data associated to either 48 # points (or vertices) and cells (or faces). 49 def __init__(self, obj, association): 50 51 self.obj = obj 52 self.association = association 53 54 def __getitem__(self, key): 55 56 if self.association == 0: 57 data = self.obj.dataset.GetPointData() 58 59 elif self.association == 1: 60 data = self.obj.dataset.GetCellData() 61 62 elif self.association == 2: 63 data = self.obj.dataset.GetFieldData() 64 65 varr = data.GetAbstractArray(key) 66 if isinstance(varr, vtki.vtkStringArray): 67 if isinstance(key, int): 68 key = data.GetArrayName(key) 69 n = varr.GetNumberOfValues() 70 narr = [varr.GetValue(i) for i in range(n)] 71 return narr 72 ########### 73 74 else: 75 raise RuntimeError() 76 77 if isinstance(key, int): 78 key = data.GetArrayName(key) 79 80 arr = data.GetArray(key) 81 if not arr: 82 return None 83 return utils.vtk2numpy(arr) 84 85 def __setitem__(self, key, input_array): 86 87 if self.association == 0: 88 data = self.obj.dataset.GetPointData() 89 n = self.obj.dataset.GetNumberOfPoints() 90 self.obj.mapper.SetScalarModeToUsePointData() 91 92 elif self.association == 1: 93 data = self.obj.dataset.GetCellData() 94 n = self.obj.dataset.GetNumberOfCells() 95 self.obj.mapper.SetScalarModeToUseCellData() 96 97 elif self.association == 2: 98 data = self.obj.dataset.GetFieldData() 99 if not utils.is_sequence(input_array): 100 input_array = [input_array] 101 102 if isinstance(input_array[0], str): 103 varr = vtki.vtkStringArray() 104 varr.SetName(key) 105 varr.SetNumberOfComponents(1) 106 varr.SetNumberOfTuples(len(input_array)) 107 for i, iarr in enumerate(input_array): 108 if isinstance(iarr, np.ndarray): 109 iarr = iarr.tolist() # better format 110 # Note: a string k can be converted to numpy with 111 # import json; k = np.array(json.loads(k)) 112 varr.InsertValue(i, str(iarr)) 113 else: 114 try: 115 varr = utils.numpy2vtk(input_array, name=key) 116 except TypeError as e: 117 vedo.logger.error( 118 f"cannot create metadata with input object:\n" 119 f"{input_array}" 120 f"\n\nAllowed content examples are:\n" 121 f"- flat list of strings ['a','b', 1, [1,2,3], ...]" 122 f" (first item must be a string in this case)\n" 123 f" hint: use k = np.array(json.loads(k)) to convert strings\n" 124 f"- numpy arrays of any shape" 125 ) 126 raise e 127 128 data.AddArray(varr) 129 return ############ 130 131 else: 132 raise RuntimeError() 133 134 if len(input_array) != n: 135 vedo.logger.error( 136 f"Error in point/cell data: length of input {len(input_array)}" 137 f" != {n} nr. of elements" 138 ) 139 raise RuntimeError() 140 141 input_array = np.asarray(input_array) 142 varr = utils.numpy2vtk(input_array, name=key) 143 data.AddArray(varr) 144 145 if len(input_array.shape) == 1: # scalars 146 data.SetActiveScalars(key) 147 try: # could be a volume mapper 148 self.obj.mapper.SetScalarRange(data.GetScalars().GetRange()) 149 except AttributeError: 150 pass 151 elif len(input_array.shape) == 2 and input_array.shape[1] == 3: # vectors 152 if key.lower() == "normals": 153 data.SetActiveNormals(key) 154 else: 155 data.SetActiveVectors(key) 156 157 def keys(self) -> List[str]: 158 """Return the list of available data array names""" 159 if self.association == 0: 160 data = self.obj.dataset.GetPointData() 161 elif self.association == 1: 162 data = self.obj.dataset.GetCellData() 163 elif self.association == 2: 164 data = self.obj.dataset.GetFieldData() 165 arrnames = [] 166 for i in range(data.GetNumberOfArrays()): 167 name = "" 168 if self.association == 2: 169 name = data.GetAbstractArray(i).GetName() 170 else: 171 iarr = data.GetArray(i) 172 if iarr: 173 name = iarr.GetName() 174 if name: 175 arrnames.append(name) 176 return arrnames 177 178 def items(self) -> List: 179 """Return the list of available data array `(names, values)`.""" 180 if self.association == 0: 181 data = self.obj.dataset.GetPointData() 182 elif self.association == 1: 183 data = self.obj.dataset.GetCellData() 184 elif self.association == 2: 185 data = self.obj.dataset.GetFieldData() 186 arrnames = [] 187 for i in range(data.GetNumberOfArrays()): 188 if self.association == 2: 189 name = data.GetAbstractArray(i).GetName() 190 else: 191 name = data.GetArray(i).GetName() 192 if name: 193 arrnames.append((name, self[name])) 194 return arrnames 195 196 def todict(self) -> dict: 197 """Return a dictionary of the available data arrays.""" 198 return dict(self.items()) 199 200 def rename(self, oldname: str, newname: str) -> None: 201 """Rename an array""" 202 if self.association == 0: 203 varr = self.obj.dataset.GetPointData().GetArray(oldname) 204 elif self.association == 1: 205 varr = self.obj.dataset.GetCellData().GetArray(oldname) 206 elif self.association == 2: 207 varr = self.obj.dataset.GetFieldData().GetAbstractArray(oldname) 208 if varr: 209 varr.SetName(newname) 210 else: 211 vedo.logger.warning( 212 f"Cannot rename non existing array {oldname} to {newname}" 213 ) 214 215 def remove(self, key: Union[int, str]) -> None: 216 """Remove a data array by name or number""" 217 if self.association == 0: 218 self.obj.dataset.GetPointData().RemoveArray(key) 219 elif self.association == 1: 220 self.obj.dataset.GetCellData().RemoveArray(key) 221 elif self.association == 2: 222 self.obj.dataset.GetFieldData().RemoveArray(key) 223 224 def clear(self) -> None: 225 """Remove all data associated to this object""" 226 if self.association == 0: 227 data = self.obj.dataset.GetPointData() 228 elif self.association == 1: 229 data = self.obj.dataset.GetCellData() 230 elif self.association == 2: 231 data = self.obj.dataset.GetFieldData() 232 for i in range(data.GetNumberOfArrays()): 233 if self.association == 2: 234 name = data.GetAbstractArray(i).GetName() 235 else: 236 name = data.GetArray(i).GetName() 237 data.RemoveArray(name) 238 239 def select(self, key: Union[int, str]) -> Any: 240 """Select one specific array by its name to make it the `active` one.""" 241 # Default (ColorModeToDefault): unsigned char scalars are treated as colors, 242 # and NOT mapped through the lookup table, while everything else is. 243 # ColorModeToDirectScalar extends ColorModeToDefault such that all integer 244 # types are treated as colors with values in the range 0-255 245 # and floating types are treated as colors with values in the range 0.0-1.0. 246 # Setting ColorModeToMapScalars means that all scalar data will be mapped 247 # through the lookup table. 248 # (Note that for multi-component scalars, the particular component 249 # to use for mapping can be specified using the SelectColorArray() method.) 250 if self.association == 0: 251 data = self.obj.dataset.GetPointData() 252 self.obj.mapper.SetScalarModeToUsePointData() 253 else: 254 data = self.obj.dataset.GetCellData() 255 self.obj.mapper.SetScalarModeToUseCellData() 256 257 if isinstance(key, int): 258 key = data.GetArrayName(key) 259 260 arr = data.GetArray(key) 261 if not arr: 262 return self.obj 263 264 nc = arr.GetNumberOfComponents() 265 # print("GetNumberOfComponents", nc) 266 if nc == 1: 267 data.SetActiveScalars(key) 268 elif nc == 2: 269 data.SetTCoords(arr) 270 elif nc in (3, 4): 271 if "rgb" in key.lower(): # type: ignore 272 data.SetActiveScalars(key) 273 try: 274 # could be a volume mapper 275 self.obj.mapper.SetColorModeToDirectScalars() 276 data.SetActiveVectors(None) # need this to fix bug in #1066 277 # print("SetColorModeToDirectScalars for", key) 278 except AttributeError: 279 pass 280 else: 281 data.SetActiveVectors(key) 282 elif nc == 9: 283 data.SetActiveTensors(key) 284 else: 285 vedo.logger.error(f"Cannot select array {key} with {nc} components") 286 return self.obj 287 288 try: 289 # could be a volume mapper 290 self.obj.mapper.SetArrayName(key) 291 self.obj.mapper.ScalarVisibilityOn() 292 except AttributeError: 293 pass 294 295 return self.obj 296 297 def select_texture_coords(self, key: Union[int,str]) -> Any: 298 """Select one specific array to be used as texture coordinates.""" 299 if self.association == 0: 300 data = self.obj.dataset.GetPointData() 301 else: 302 vedo.logger.warning("texture coordinates are only available for point data") 303 return 304 305 if isinstance(key, int): 306 key = data.GetArrayName(key) 307 data.SetTCoords(data.GetArray(key)) 308 return self.obj 309 310 def select_normals(self, key: Union[int,str]) -> Any: 311 """Select one specific normal array by its name to make it the "active" one.""" 312 if self.association == 0: 313 data = self.obj.dataset.GetPointData() 314 self.obj.mapper.SetScalarModeToUsePointData() 315 else: 316 data = self.obj.dataset.GetCellData() 317 self.obj.mapper.SetScalarModeToUseCellData() 318 319 if isinstance(key, int): 320 key = data.GetArrayName(key) 321 data.SetActiveNormals(key) 322 return self.obj 323 324 def print(self, **kwargs) -> None: 325 """Print the array names available to terminal""" 326 colors.printc(self.keys(), **kwargs) 327 328 def __repr__(self) -> str: 329 """Representation""" 330 331 def _get_str(pd, header): 332 out = f"\x1b[2m\x1b[1m\x1b[7m{header}" 333 if pd.GetNumberOfArrays(): 334 if self.obj.name: 335 out += f" in {self.obj.name}" 336 out += f" contains {pd.GetNumberOfArrays()} array(s)\x1b[0m" 337 for i in range(pd.GetNumberOfArrays()): 338 varr = pd.GetArray(i) 339 out += f"\n\x1b[1m\x1b[4mArray name : {varr.GetName()}\x1b[0m" 340 out += "\nindex".ljust(15) + f": {i}" 341 t = varr.GetDataType() 342 if t in vtki.array_types: 343 out += "\ntype".ljust(15) 344 out += f": {vtki.array_types[t]}" 345 shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents()) 346 out += "\nshape".ljust(15) + f": {shape}" 347 out += "\nrange".ljust(15) + f": {np.array(varr.GetRange())}" 348 out += "\nmax id".ljust(15) + f": {varr.GetMaxId()}" 349 out += "\nlook up table".ljust(15) + f": {bool(varr.GetLookupTable())}" 350 out += "\nin-memory size".ljust(15) + f": {varr.GetActualMemorySize()} KB" 351 else: 352 out += " is empty.\x1b[0m" 353 return out 354 355 if self.association == 0: 356 out = _get_str(self.obj.dataset.GetPointData(), "Point Data") 357 elif self.association == 1: 358 out = _get_str(self.obj.dataset.GetCellData(), "Cell Data") 359 elif self.association == 2: 360 pd = self.obj.dataset.GetFieldData() 361 if pd.GetNumberOfArrays(): 362 out = "\x1b[2m\x1b[1m\x1b[7mMeta Data" 363 if self.obj.name: 364 out += f" in {self.obj.name}" 365 out += f" contains {pd.GetNumberOfArrays()} entries\x1b[0m" 366 for i in range(pd.GetNumberOfArrays()): 367 varr = pd.GetAbstractArray(i) 368 out += f"\n\x1b[1m\x1b[4mEntry name : {varr.GetName()}\x1b[0m" 369 out += "\nindex".ljust(15) + f": {i}" 370 shape = (varr.GetNumberOfTuples(), varr.GetNumberOfComponents()) 371 out += "\nshape".ljust(15) + f": {shape}" 372 373 return out
157 def keys(self) -> List[str]: 158 """Return the list of available data array names""" 159 if self.association == 0: 160 data = self.obj.dataset.GetPointData() 161 elif self.association == 1: 162 data = self.obj.dataset.GetCellData() 163 elif self.association == 2: 164 data = self.obj.dataset.GetFieldData() 165 arrnames = [] 166 for i in range(data.GetNumberOfArrays()): 167 name = "" 168 if self.association == 2: 169 name = data.GetAbstractArray(i).GetName() 170 else: 171 iarr = data.GetArray(i) 172 if iarr: 173 name = iarr.GetName() 174 if name: 175 arrnames.append(name) 176 return arrnames
Return the list of available data array names
178 def items(self) -> List: 179 """Return the list of available data array `(names, values)`.""" 180 if self.association == 0: 181 data = self.obj.dataset.GetPointData() 182 elif self.association == 1: 183 data = self.obj.dataset.GetCellData() 184 elif self.association == 2: 185 data = self.obj.dataset.GetFieldData() 186 arrnames = [] 187 for i in range(data.GetNumberOfArrays()): 188 if self.association == 2: 189 name = data.GetAbstractArray(i).GetName() 190 else: 191 name = data.GetArray(i).GetName() 192 if name: 193 arrnames.append((name, self[name])) 194 return arrnames
Return the list of available data array (names, values)
.
196 def todict(self) -> dict: 197 """Return a dictionary of the available data arrays.""" 198 return dict(self.items())
Return a dictionary of the available data arrays.
200 def rename(self, oldname: str, newname: str) -> None: 201 """Rename an array""" 202 if self.association == 0: 203 varr = self.obj.dataset.GetPointData().GetArray(oldname) 204 elif self.association == 1: 205 varr = self.obj.dataset.GetCellData().GetArray(oldname) 206 elif self.association == 2: 207 varr = self.obj.dataset.GetFieldData().GetAbstractArray(oldname) 208 if varr: 209 varr.SetName(newname) 210 else: 211 vedo.logger.warning( 212 f"Cannot rename non existing array {oldname} to {newname}" 213 )
Rename an array
215 def remove(self, key: Union[int, str]) -> None: 216 """Remove a data array by name or number""" 217 if self.association == 0: 218 self.obj.dataset.GetPointData().RemoveArray(key) 219 elif self.association == 1: 220 self.obj.dataset.GetCellData().RemoveArray(key) 221 elif self.association == 2: 222 self.obj.dataset.GetFieldData().RemoveArray(key)
Remove a data array by name or number
224 def clear(self) -> None: 225 """Remove all data associated to this object""" 226 if self.association == 0: 227 data = self.obj.dataset.GetPointData() 228 elif self.association == 1: 229 data = self.obj.dataset.GetCellData() 230 elif self.association == 2: 231 data = self.obj.dataset.GetFieldData() 232 for i in range(data.GetNumberOfArrays()): 233 if self.association == 2: 234 name = data.GetAbstractArray(i).GetName() 235 else: 236 name = data.GetArray(i).GetName() 237 data.RemoveArray(name)
Remove all data associated to this object
239 def select(self, key: Union[int, str]) -> Any: 240 """Select one specific array by its name to make it the `active` one.""" 241 # Default (ColorModeToDefault): unsigned char scalars are treated as colors, 242 # and NOT mapped through the lookup table, while everything else is. 243 # ColorModeToDirectScalar extends ColorModeToDefault such that all integer 244 # types are treated as colors with values in the range 0-255 245 # and floating types are treated as colors with values in the range 0.0-1.0. 246 # Setting ColorModeToMapScalars means that all scalar data will be mapped 247 # through the lookup table. 248 # (Note that for multi-component scalars, the particular component 249 # to use for mapping can be specified using the SelectColorArray() method.) 250 if self.association == 0: 251 data = self.obj.dataset.GetPointData() 252 self.obj.mapper.SetScalarModeToUsePointData() 253 else: 254 data = self.obj.dataset.GetCellData() 255 self.obj.mapper.SetScalarModeToUseCellData() 256 257 if isinstance(key, int): 258 key = data.GetArrayName(key) 259 260 arr = data.GetArray(key) 261 if not arr: 262 return self.obj 263 264 nc = arr.GetNumberOfComponents() 265 # print("GetNumberOfComponents", nc) 266 if nc == 1: 267 data.SetActiveScalars(key) 268 elif nc == 2: 269 data.SetTCoords(arr) 270 elif nc in (3, 4): 271 if "rgb" in key.lower(): # type: ignore 272 data.SetActiveScalars(key) 273 try: 274 # could be a volume mapper 275 self.obj.mapper.SetColorModeToDirectScalars() 276 data.SetActiveVectors(None) # need this to fix bug in #1066 277 # print("SetColorModeToDirectScalars for", key) 278 except AttributeError: 279 pass 280 else: 281 data.SetActiveVectors(key) 282 elif nc == 9: 283 data.SetActiveTensors(key) 284 else: 285 vedo.logger.error(f"Cannot select array {key} with {nc} components") 286 return self.obj 287 288 try: 289 # could be a volume mapper 290 self.obj.mapper.SetArrayName(key) 291 self.obj.mapper.ScalarVisibilityOn() 292 except AttributeError: 293 pass 294 295 return self.obj
Select one specific array by its name to make it the active
one.
297 def select_texture_coords(self, key: Union[int,str]) -> Any: 298 """Select one specific array to be used as texture coordinates.""" 299 if self.association == 0: 300 data = self.obj.dataset.GetPointData() 301 else: 302 vedo.logger.warning("texture coordinates are only available for point data") 303 return 304 305 if isinstance(key, int): 306 key = data.GetArrayName(key) 307 data.SetTCoords(data.GetArray(key)) 308 return self.obj
Select one specific array to be used as texture coordinates.
310 def select_normals(self, key: Union[int,str]) -> Any: 311 """Select one specific normal array by its name to make it the "active" one.""" 312 if self.association == 0: 313 data = self.obj.dataset.GetPointData() 314 self.obj.mapper.SetScalarModeToUsePointData() 315 else: 316 data = self.obj.dataset.GetCellData() 317 self.obj.mapper.SetScalarModeToUseCellData() 318 319 if isinstance(key, int): 320 key = data.GetArrayName(key) 321 data.SetActiveNormals(key) 322 return self.obj
Select one specific normal array by its name to make it the "active" one.
377class CommonAlgorithms: 378 """Common algorithms.""" 379 380 @property 381 def pointdata(self): 382 """ 383 Create and/or return a `numpy.array` associated to points (vertices). 384 A data array can be indexed either as a string or by an integer number. 385 E.g.: `myobj.pointdata["arrayname"]` 386 387 Usage: 388 389 `myobj.pointdata.keys()` to return the available data array names 390 391 `myobj.pointdata.select(name)` to make this array the active one 392 393 `myobj.pointdata.remove(name)` to remove this array 394 """ 395 return DataArrayHelper(self, 0) 396 397 @property 398 def celldata(self): 399 """ 400 Create and/or return a `numpy.array` associated to cells (faces). 401 A data array can be indexed either as a string or by an integer number. 402 E.g.: `myobj.celldata["arrayname"]` 403 404 Usage: 405 406 `myobj.celldata.keys()` to return the available data array names 407 408 `myobj.celldata.select(name)` to make this array the active one 409 410 `myobj.celldata.remove(name)` to remove this array 411 """ 412 return DataArrayHelper(self, 1) 413 414 @property 415 def metadata(self): 416 """ 417 Create and/or return a `numpy.array` associated to neither cells nor faces. 418 A data array can be indexed either as a string or by an integer number. 419 E.g.: `myobj.metadata["arrayname"]` 420 421 Usage: 422 423 `myobj.metadata.keys()` to return the available data array names 424 425 `myobj.metadata.select(name)` to make this array the active one 426 427 `myobj.metadata.remove(name)` to remove this array 428 """ 429 return DataArrayHelper(self, 2) 430 431 def memory_address(self) -> int: 432 """ 433 Return a unique memory address integer which may serve as the ID of the 434 object, or passed to c++ code. 435 """ 436 # https://www.linkedin.com/pulse/speedup-your-code-accessing-python-vtk-objects-from-c-pletzer/ 437 # https://github.com/tfmoraes/polydata_connectivity 438 return int(self.dataset.GetAddressAsString("")[5:], 16) 439 440 def memory_size(self) -> int: 441 """Return the size in bytes of the object in memory.""" 442 return self.dataset.GetActualMemorySize() 443 444 def modified(self) -> Self: 445 """Use in conjunction with `tonumpy()` to update any modifications to the image array.""" 446 self.dataset.GetPointData().Modified() 447 scals = self.dataset.GetPointData().GetScalars() 448 if scals: 449 scals.Modified() 450 return self 451 452 def box(self, scale=1, padding=0) -> "vedo.Mesh": 453 """ 454 Return the bounding box as a new `Mesh` object. 455 456 Arguments: 457 scale : (float) 458 box size can be scaled by a factor 459 padding : (float, list) 460 a constant padding can be added (can be a list `[padx,pady,padz]`) 461 """ 462 b = self.bounds() 463 if not utils.is_sequence(padding): 464 padding = [padding, padding, padding] 465 length, width, height = b[1] - b[0], b[3] - b[2], b[5] - b[4] 466 tol = (length + width + height) / 30000 # useful for boxing text 467 pos = [(b[0] + b[1]) / 2, (b[3] + b[2]) / 2, (b[5] + b[4]) / 2 - tol] 468 bx = vedo.shapes.Box( 469 pos, 470 length * scale + padding[0], 471 width * scale + padding[1], 472 height * scale + padding[2], 473 c="gray", 474 ) 475 try: 476 pr = vtki.vtkProperty() 477 pr.DeepCopy(self.properties) 478 bx.actor.SetProperty(pr) 479 bx.properties = pr 480 except (AttributeError, TypeError): 481 pass 482 bx.flat().lighting("off").wireframe(True) 483 return bx 484 485 def update_dataset(self, dataset, **kwargs) -> Self: 486 """Update the dataset of the object with the provided VTK dataset.""" 487 self._update(dataset, **kwargs) 488 return self 489 490 def bounds(self) -> np.ndarray: 491 """ 492 Get the object bounds. 493 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 494 """ 495 try: # this is very slow for large meshes 496 pts = self.vertices 497 xmin, ymin, zmin = np.min(pts, axis=0) 498 xmax, ymax, zmax = np.max(pts, axis=0) 499 return np.array([xmin, xmax, ymin, ymax, zmin, zmax]) 500 except (AttributeError, ValueError): 501 return np.array(self.dataset.GetBounds()) 502 503 def xbounds(self, i=None) -> np.ndarray: 504 """Get the bounds `[xmin,xmax]`. Can specify upper or lower with i (0,1).""" 505 b = self.bounds() 506 if i is not None: 507 return b[i] 508 return np.array([b[0], b[1]]) 509 510 def ybounds(self, i=None) -> np.ndarray: 511 """Get the bounds `[ymin,ymax]`. Can specify upper or lower with i (0,1).""" 512 b = self.bounds() 513 if i == 0: 514 return b[2] 515 if i == 1: 516 return b[3] 517 return np.array([b[2], b[3]]) 518 519 def zbounds(self, i=None) -> np.ndarray: 520 """Get the bounds `[zmin,zmax]`. Can specify upper or lower with i (0,1).""" 521 b = self.bounds() 522 if i == 0: 523 return b[4] 524 if i == 1: 525 return b[5] 526 return np.array([b[4], b[5]]) 527 528 def diagonal_size(self) -> float: 529 """Get the length of the diagonal of the bounding box.""" 530 b = self.bounds() 531 return np.sqrt((b[1] - b[0])**2 + (b[3] - b[2])**2 + (b[5] - b[4])**2) 532 533 def average_size(self) -> float: 534 """ 535 Calculate and return the average size of the object. 536 This is the mean of the vertex distances from the center of mass. 537 """ 538 coords = self.vertices 539 cm = np.mean(coords, axis=0) 540 if coords.shape[0] == 0: 541 return 0.0 542 cc = coords - cm 543 return np.mean(np.linalg.norm(cc, axis=1)) 544 545 def center_of_mass(self) -> np.ndarray: 546 """Get the center of mass of the object.""" 547 if isinstance(self, (vedo.RectilinearGrid, vedo.Volume)): 548 return np.array(self.dataset.GetCenter()) 549 cmf = vtki.new("CenterOfMass") 550 cmf.SetInputData(self.dataset) 551 cmf.Update() 552 c = cmf.GetCenter() 553 return np.array(c) 554 555 def copy_data_from(self, obj: Any) -> Self: 556 """Copy all data (point and cell data) from this input object""" 557 self.dataset.GetPointData().PassData(obj.dataset.GetPointData()) 558 self.dataset.GetCellData().PassData(obj.dataset.GetCellData()) 559 self.pipeline = utils.OperationNode( 560 "copy_data_from", 561 parents=[self, obj], 562 comment=f"{obj.__class__.__name__}", 563 shape="note", 564 c="#ccc5b9", 565 ) 566 return self 567 568 def inputdata(self): 569 """Obsolete, use `.dataset` instead.""" 570 colors.printc("WARNING: 'inputdata()' is obsolete, use '.dataset' instead.", c="y") 571 return self.dataset 572 573 @property 574 def npoints(self): 575 """Retrieve the number of points (or vertices).""" 576 return self.dataset.GetNumberOfPoints() 577 578 @property 579 def nvertices(self): 580 """Retrieve the number of vertices (or points).""" 581 return self.dataset.GetNumberOfPoints() 582 583 @property 584 def ncells(self): 585 """Retrieve the number of cells.""" 586 return self.dataset.GetNumberOfCells() 587 588 def points(self, pts=None): 589 """Obsolete, use `self.vertices` or `self.coordinates` instead.""" 590 if pts is None: ### getter 591 592 if warnings["points_getter"]: 593 colors.printc(warnings["points_getter"], c="y") 594 warnings["points_getter"] = "" 595 return self.vertices 596 597 else: ### setter 598 599 if warnings["points_setter"]: 600 colors.printc(warnings["points_setter"], c="y") 601 warnings["points_setter"] = "" 602 603 pts = np.asarray(pts, dtype=np.float32) 604 605 if pts.ndim == 1: 606 ### getter by point index ################### 607 indices = pts.astype(int) 608 vpts = self.dataset.GetPoints() 609 arr = utils.vtk2numpy(vpts.GetData()) 610 return arr[indices] ########### 611 612 ### setter #################################### 613 if pts.shape[1] == 2: 614 pts = np.c_[pts, np.zeros(pts.shape[0], dtype=np.float32)] 615 arr = utils.numpy2vtk(pts, dtype=np.float32) 616 617 vpts = self.dataset.GetPoints() 618 vpts.SetData(arr) 619 vpts.Modified() 620 # reset mesh to identity matrix position/rotation: 621 self.point_locator = None 622 self.cell_locator = None 623 self.line_locator = None 624 self.transform = LinearTransform() 625 return self 626 627 @property 628 def cell_centers(self): 629 """ 630 Get the coordinates of the cell centers. 631 632 Examples: 633 - [delaunay2d.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/delaunay2d.py) 634 635 See also: `CellCenters()`. 636 """ 637 vcen = vtki.new("CellCenters") 638 vcen.CopyArraysOff() 639 vcen.SetInputData(self.dataset) 640 vcen.Update() 641 return utils.vtk2numpy(vcen.GetOutput().GetPoints().GetData()) 642 643 @property 644 def lines(self): 645 """ 646 Get lines connectivity ids as a python array 647 formatted as `[[id0,id1], [id3,id4], ...]` 648 649 See also: `lines_as_flat_array()`. 650 """ 651 # Get cell connettivity ids as a 1D array. The vtk format is: 652 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 653 try: 654 arr1d = utils.vtk2numpy(self.dataset.GetLines().GetData()) 655 except AttributeError: 656 return np.array([]) 657 i = 0 658 conn = [] 659 n = len(arr1d) 660 for _ in range(n): 661 cell = [arr1d[i + k + 1] for k in range(arr1d[i])] 662 conn.append(cell) 663 i += arr1d[i] + 1 664 if i >= n: 665 break 666 667 return conn # cannot always make a numpy array of it! 668 669 @property 670 def lines_as_flat_array(self): 671 """ 672 Get lines connectivity ids as a 1D numpy array. 673 Format is e.g. [2, 10,20, 3, 10,11,12, 2, 70,80, ...] 674 675 See also: `lines()`. 676 """ 677 try: 678 return utils.vtk2numpy(self.dataset.GetLines().GetData()) 679 except AttributeError: 680 return np.array([]) 681 682 def mark_boundaries(self) -> Self: 683 """ 684 Mark cells and vertices if they lie on a boundary. 685 A new array called `BoundaryCells` is added to the object. 686 """ 687 mb = vtki.new("MarkBoundaryFilter") 688 mb.SetInputData(self.dataset) 689 mb.Update() 690 self.dataset.DeepCopy(mb.GetOutput()) 691 self.pipeline = utils.OperationNode("mark_boundaries", parents=[self]) 692 return self 693 694 def find_cells_in_bounds(self, xbounds=(), ybounds=(), zbounds=()) -> np.ndarray: 695 """ 696 Find cells that are within the specified bounds. 697 """ 698 try: 699 xbounds = list(xbounds.bounds()) 700 except AttributeError: 701 pass 702 703 if len(xbounds) == 6: 704 bnds = xbounds 705 else: 706 bnds = list(self.bounds()) 707 if len(xbounds) == 2: 708 bnds[0] = xbounds[0] 709 bnds[1] = xbounds[1] 710 if len(ybounds) == 2: 711 bnds[2] = ybounds[0] 712 bnds[3] = ybounds[1] 713 if len(zbounds) == 2: 714 bnds[4] = zbounds[0] 715 bnds[5] = zbounds[1] 716 717 cell_ids = vtki.vtkIdList() 718 if not self.cell_locator: 719 self.cell_locator = vtki.new("CellTreeLocator") 720 self.cell_locator.SetDataSet(self.dataset) 721 self.cell_locator.BuildLocator() 722 self.cell_locator.FindCellsWithinBounds(bnds, cell_ids) 723 cids = [] 724 for i in range(cell_ids.GetNumberOfIds()): 725 cid = cell_ids.GetId(i) 726 cids.append(cid) 727 return np.array(cids) 728 729 def find_cells_along_line(self, p0, p1, tol=0.001) -> np.ndarray: 730 """ 731 Find cells that are intersected by a line segment. 732 """ 733 cell_ids = vtki.vtkIdList() 734 if not self.cell_locator: 735 self.cell_locator = vtki.new("CellTreeLocator") 736 self.cell_locator.SetDataSet(self.dataset) 737 self.cell_locator.BuildLocator() 738 self.cell_locator.FindCellsAlongLine(p0, p1, tol, cell_ids) 739 cids = [] 740 for i in range(cell_ids.GetNumberOfIds()): 741 cid = cell_ids.GetId(i) 742 cids.append(cid) 743 return np.array(cids) 744 745 def find_cells_along_plane(self, origin, normal, tol=0.001) -> np.ndarray: 746 """ 747 Find cells that are intersected by a plane. 748 """ 749 cell_ids = vtki.vtkIdList() 750 if not self.cell_locator: 751 self.cell_locator = vtki.new("CellTreeLocator") 752 self.cell_locator.SetDataSet(self.dataset) 753 self.cell_locator.BuildLocator() 754 self.cell_locator.FindCellsAlongPlane(origin, normal, tol, cell_ids) 755 cids = [] 756 for i in range(cell_ids.GetNumberOfIds()): 757 cid = cell_ids.GetId(i) 758 cids.append(cid) 759 return np.array(cids) 760 761 def keep_cell_types(self, types=()): 762 """ 763 Extract cells of a specific type. 764 765 Check the VTK cell types here: 766 https://vtk.org/doc/nightly/html/vtkCellType_8h.html 767 """ 768 fe = vtki.new("ExtractCellsByType") 769 fe.SetInputData(self.dataset) 770 for t in types: 771 try: 772 if utils.is_integer(t): 773 it = t 774 else: 775 it = vtki.cell_types[t.upper()] 776 except KeyError: 777 vedo.logger.error(f"Cell type '{t}' not recognized") 778 continue 779 fe.AddCellType(it) 780 fe.Update() 781 self._update(fe.GetOutput()) 782 return self 783 784 def map_cells_to_points(self, arrays=(), move=False) -> Self: 785 """ 786 Interpolate cell data (i.e., data specified per cell or face) 787 into point data (i.e., data specified at each vertex). 788 The method of transformation is based on averaging the data values 789 of all cells using a particular point. 790 791 A custom list of arrays to be mapped can be passed in input. 792 793 Set `move=True` to delete the original `celldata` array. 794 """ 795 c2p = vtki.new("CellDataToPointData") 796 c2p.SetInputData(self.dataset) 797 if not move: 798 c2p.PassCellDataOn() 799 if arrays: 800 c2p.ClearCellDataArrays() 801 c2p.ProcessAllArraysOff() 802 for arr in arrays: 803 c2p.AddCellDataArray(arr) 804 else: 805 c2p.ProcessAllArraysOn() 806 c2p.Update() 807 self._update(c2p.GetOutput(), reset_locators=False) 808 self.mapper.SetScalarModeToUsePointData() 809 self.pipeline = utils.OperationNode("map_cells_to_points", parents=[self]) 810 return self 811 812 @property 813 def vertices(self): 814 """Return the vertices (points) coordinates.""" 815 try: 816 # for polydata and unstructured grid 817 varr = self.dataset.GetPoints().GetData() 818 except (AttributeError, TypeError): 819 try: 820 # for RectilinearGrid, StructuredGrid 821 vpts = vtki.vtkPoints() 822 self.dataset.GetPoints(vpts) 823 varr = vpts.GetData() 824 except (AttributeError, TypeError): 825 try: 826 # for ImageData 827 v2p = vtki.new("ImageToPoints") 828 v2p.SetInputData(self.dataset) 829 v2p.Update() 830 varr = v2p.GetOutput().GetPoints().GetData() 831 except AttributeError: 832 return np.array([]) 833 834 return utils.vtk2numpy(varr) 835 836 # setter 837 @vertices.setter 838 def vertices(self, pts): 839 """Set vertices (points) coordinates.""" 840 pts = utils.make3d(pts) 841 arr = utils.numpy2vtk(pts, dtype=np.float32) 842 try: 843 vpts = self.dataset.GetPoints() 844 vpts.SetData(arr) 845 vpts.Modified() 846 except (AttributeError, TypeError): 847 vedo.logger.error(f"Cannot set vertices for {type(self)}") 848 return self 849 # reset mesh to identity matrix position/rotation: 850 self.point_locator = None 851 self.cell_locator = None 852 self.line_locator = None 853 self.transform = LinearTransform() 854 855 @property 856 def coordinates(self): 857 """Return the vertices (points) coordinates. Same as `vertices`.""" 858 return self.vertices 859 860 @coordinates.setter 861 def coordinates(self, pts): 862 """Set vertices (points) coordinates. Same as `vertices`.""" 863 self.vertices = pts 864 865 @property 866 def cells_as_flat_array(self): 867 """ 868 Get cell connectivity ids as a 1D numpy array. 869 Format is e.g. [3, 10,20,30 4, 10,11,12,13 ...] 870 """ 871 try: 872 # valid for unstructured grid 873 arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData()) 874 except AttributeError: 875 # valid for polydata 876 arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData()) 877 return arr1d 878 879 @property 880 def cells(self): 881 """ 882 Get the cells connectivity ids as a numpy array. 883 884 The output format is: `[[id0 ... idn], [id0 ... idm], etc]`. 885 """ 886 try: 887 # valid for unstructured grid 888 arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData()) 889 except AttributeError: 890 try: 891 # valid for polydata 892 arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData()) 893 except AttributeError: 894 vedo.logger.warning(f"Cannot get cells for {type(self)}") 895 return np.array([]) 896 897 # Get cell connettivity ids as a 1D array. vtk format is: 898 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 899 i = 0 900 conn = [] 901 n = len(arr1d) 902 if n: 903 while True: 904 cell = [arr1d[i + k] for k in range(1, arr1d[i] + 1)] 905 conn.append(cell) 906 i += arr1d[i] + 1 907 if i >= n: 908 break 909 return conn 910 911 def cell_edge_neighbors(self): 912 """ 913 Get the cell neighbor indices of each cell. 914 915 Returns a python list of lists. 916 """ 917 918 def face_to_edges(face): 919 edges = [] 920 size = len(face) 921 for i in range(1, size + 1): 922 if i == size: 923 edges.append([face[i - 1], face[0]]) 924 else: 925 edges.append([face[i - 1], face[i]]) 926 return edges 927 928 pd = self.dataset 929 pd.BuildLinks() 930 931 neicells = [] 932 for i, cell in enumerate(self.cells): 933 nn = [] 934 for edge in face_to_edges(cell): 935 neighbors = vtki.vtkIdList() 936 pd.GetCellEdgeNeighbors(i, edge[0], edge[1], neighbors) 937 if neighbors.GetNumberOfIds() > 0: 938 neighbor = neighbors.GetId(0) 939 nn.append(neighbor) 940 neicells.append(nn) 941 942 return neicells 943 944 945 def map_points_to_cells(self, arrays=(), move=False) -> Self: 946 """ 947 Interpolate point data (i.e., data specified per point or vertex) 948 into cell data (i.e., data specified per cell). 949 The method of transformation is based on averaging the data values 950 of all points defining a particular cell. 951 952 A custom list of arrays to be mapped can be passed in input. 953 954 Set `move=True` to delete the original `pointdata` array. 955 956 Examples: 957 - [mesh_map2cell.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/mesh_map2cell.py) 958 """ 959 p2c = vtki.new("PointDataToCellData") 960 p2c.SetInputData(self.dataset) 961 if not move: 962 p2c.PassPointDataOn() 963 if arrays: 964 p2c.ClearPointDataArrays() 965 p2c.ProcessAllArraysOff() 966 for arr in arrays: 967 p2c.AddPointDataArray(arr) 968 else: 969 p2c.ProcessAllArraysOn() 970 p2c.Update() 971 self._update(p2c.GetOutput(), reset_locators=False) 972 self.mapper.SetScalarModeToUseCellData() 973 self.pipeline = utils.OperationNode("map_points_to_cells", parents=[self]) 974 return self 975 976 def resample_data_from(self, source, tol=None, categorical=False) -> Self: 977 """ 978 Resample point and cell data from another dataset. 979 The output has the same structure but its point data have 980 the resampled values from target. 981 982 Use `tol` to set the tolerance used to compute whether 983 a point in the source is in a cell of the current object. 984 Points without resampled values, and their cells, are marked as blank. 985 If the data is categorical, then the resulting data will be determined 986 by a nearest neighbor interpolation scheme. 987 988 Example: 989 ```python 990 from vedo import * 991 m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1) 992 pts = m1.vertices 993 ces = m1.cell_centers 994 m1.pointdata["xvalues"] = np.power(pts[:,0], 3) 995 m1.celldata["yvalues"] = np.power(ces[:,1], 3) 996 m2 = Mesh(dataurl+'bunny.obj') 997 m2.resample_data_from(m1) 998 # print(m2.pointdata["xvalues"]) 999 show(m1, m2 , N=2, axes=1) 1000 ``` 1001 """ 1002 rs = vtki.new("ResampleWithDataSet") 1003 rs.SetInputData(self.dataset) 1004 rs.SetSourceData(source.dataset) 1005 1006 rs.SetPassPointArrays(True) 1007 rs.SetPassCellArrays(True) 1008 rs.SetPassFieldArrays(True) 1009 rs.SetCategoricalData(categorical) 1010 1011 rs.SetComputeTolerance(True) 1012 if tol: 1013 rs.SetComputeTolerance(False) 1014 rs.SetTolerance(tol) 1015 rs.Update() 1016 self._update(rs.GetOutput(), reset_locators=False) 1017 self.pipeline = utils.OperationNode( 1018 "resample_data_from", 1019 comment=f"{source.__class__.__name__}", 1020 parents=[self, source], 1021 ) 1022 return self 1023 1024 def interpolate_data_from( 1025 self, 1026 source, 1027 radius=None, 1028 n=None, 1029 kernel="shepard", 1030 exclude=("Normals",), 1031 on="points", 1032 null_strategy=1, 1033 null_value=0, 1034 ) -> Self: 1035 """ 1036 Interpolate over source to port its data onto the current object using various kernels. 1037 1038 If n (number of closest points to use) is set then radius value is ignored. 1039 1040 Check out also: 1041 `probe()` which in many cases can be faster. 1042 1043 Arguments: 1044 kernel : (str) 1045 available kernels are [shepard, gaussian, linear] 1046 null_strategy : (int) 1047 specify a strategy to use when encountering a "null" point 1048 during the interpolation process. Null points occur when the local neighborhood 1049 (of nearby points to interpolate from) is empty. 1050 1051 - Case 0: an output array is created that marks points 1052 as being valid (=1) or null (invalid =0), and the null_value is set as well 1053 - Case 1: the output data value(s) are set to the provided null_value 1054 - Case 2: simply use the closest point to perform the interpolation. 1055 null_value : (float) 1056 see above. 1057 1058 Examples: 1059 - [interpolate_scalar1.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar1.py) 1060 - [interpolate_scalar3.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar3.py) 1061 - [interpolate_scalar4.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar4.py) 1062 - [image_probe.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/image_probe.py) 1063 1064 ![](https://vedo.embl.es/images/advanced/interpolateMeshArray.png) 1065 """ 1066 if radius is None and not n: 1067 vedo.logger.error("in interpolate_data_from(): please set either radius or n") 1068 raise RuntimeError 1069 1070 if on == "points": 1071 points = source.dataset 1072 elif on == "cells": 1073 c2p = vtki.new("CellDataToPointData") 1074 c2p.SetInputData(source.dataset) 1075 c2p.Update() 1076 points = c2p.GetOutput() 1077 else: 1078 vedo.logger.error("in interpolate_data_from(), on must be on points or cells") 1079 raise RuntimeError() 1080 1081 locator = vtki.new("PointLocator") 1082 locator.SetDataSet(points) 1083 locator.BuildLocator() 1084 1085 if kernel.lower() == "shepard": 1086 kern = vtki.new("ShepardKernel") 1087 kern.SetPowerParameter(2) 1088 elif kernel.lower() == "gaussian": 1089 kern = vtki.new("GaussianKernel") 1090 kern.SetSharpness(2) 1091 elif kernel.lower() == "linear": 1092 kern = vtki.new("LinearKernel") 1093 else: 1094 vedo.logger.error("available kernels are: [shepard, gaussian, linear]") 1095 raise RuntimeError() 1096 1097 if n: 1098 kern.SetNumberOfPoints(n) 1099 kern.SetKernelFootprintToNClosest() 1100 else: 1101 kern.SetRadius(radius) 1102 kern.SetKernelFootprintToRadius() 1103 1104 interpolator = vtki.new("PointInterpolator") 1105 interpolator.SetInputData(self.dataset) 1106 interpolator.SetSourceData(points) 1107 interpolator.SetKernel(kern) 1108 interpolator.SetLocator(locator) 1109 interpolator.PassFieldArraysOn() 1110 interpolator.SetNullPointsStrategy(null_strategy) 1111 interpolator.SetNullValue(null_value) 1112 interpolator.SetValidPointsMaskArrayName("ValidPointMask") 1113 for ex in exclude: 1114 interpolator.AddExcludedArray(ex) 1115 1116 # remove arrays that are already present in the source 1117 # this is because the interpolator will ignore them otherwise 1118 for i in range(self.dataset.GetPointData().GetNumberOfArrays()): 1119 name = self.dataset.GetPointData().GetArrayName(i) 1120 if name not in exclude: 1121 self.dataset.GetPointData().RemoveArray(name) 1122 1123 interpolator.Update() 1124 1125 if on == "cells": 1126 p2c = vtki.new("PointDataToCellData") 1127 p2c.SetInputData(interpolator.GetOutput()) 1128 p2c.Update() 1129 cpoly = p2c.GetOutput() 1130 else: 1131 cpoly = interpolator.GetOutput() 1132 1133 self._update(cpoly, reset_locators=False) 1134 1135 self.pipeline = utils.OperationNode("interpolate_data_from", parents=[self, source]) 1136 return self 1137 1138 def add_ids(self) -> Self: 1139 """ 1140 Generate point and cell ids arrays. 1141 1142 Two new arrays are added to the mesh: `PointID` and `CellID`. 1143 """ 1144 ids = vtki.new("IdFilter") 1145 ids.SetInputData(self.dataset) 1146 ids.PointIdsOn() 1147 ids.CellIdsOn() 1148 ids.FieldDataOff() 1149 ids.SetPointIdsArrayName("PointID") 1150 ids.SetCellIdsArrayName("CellID") 1151 ids.Update() 1152 self._update(ids.GetOutput(), reset_locators=False) 1153 self.pipeline = utils.OperationNode("add_ids", parents=[self]) 1154 return self 1155 1156 def gradient(self, input_array=None, on="points", fast=False) -> np.ndarray: 1157 """ 1158 Compute and return the gradiend of the active scalar field as a numpy array. 1159 1160 Arguments: 1161 input_array : (str) 1162 array of the scalars to compute the gradient, 1163 if None the current active array is selected 1164 on : (str) 1165 compute either on 'points' or 'cells' data 1166 fast : (bool) 1167 if True, will use a less accurate algorithm 1168 that performs fewer derivative calculations (and is therefore faster). 1169 1170 Examples: 1171 - [isolines.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/isolines.py) 1172 1173 ![](https://user-images.githubusercontent.com/32848391/72433087-f00a8780-3798-11ea-9778-991f0abeca70.png) 1174 """ 1175 gra = vtki.new("GradientFilter") 1176 if on.startswith("p"): 1177 varr = self.dataset.GetPointData() 1178 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1179 elif on.startswith("c"): 1180 varr = self.dataset.GetCellData() 1181 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1182 else: 1183 vedo.logger.error(f"in gradient: unknown option {on}") 1184 raise RuntimeError 1185 1186 if input_array is None: 1187 if varr.GetScalars(): 1188 input_array = varr.GetScalars().GetName() 1189 else: 1190 vedo.logger.error(f"in gradient: no scalars found for {on}") 1191 raise RuntimeError 1192 1193 gra.SetInputData(self.dataset) 1194 gra.SetInputScalars(tp, input_array) 1195 gra.SetResultArrayName("Gradient") 1196 gra.SetFasterApproximation(fast) 1197 gra.ComputeDivergenceOff() 1198 gra.ComputeVorticityOff() 1199 gra.ComputeGradientOn() 1200 gra.Update() 1201 # self._update(gra.GetOutput(), reset_locators=False) 1202 if on.startswith("p"): 1203 gvecs = utils.vtk2numpy(gra.GetOutput().GetPointData().GetArray("Gradient")) 1204 else: 1205 gvecs = utils.vtk2numpy(gra.GetOutput().GetCellData().GetArray("Gradient")) 1206 return gvecs 1207 1208 def divergence(self, array_name=None, on="points", fast=False) -> np.ndarray: 1209 """ 1210 Compute and return the divergence of a vector field as a numpy array. 1211 1212 Arguments: 1213 array_name : (str) 1214 name of the array of vectors to compute the divergence, 1215 if None the current active array is selected 1216 on : (str) 1217 compute either on 'points' or 'cells' data 1218 fast : (bool) 1219 if True, will use a less accurate algorithm 1220 that performs fewer derivative calculations (and is therefore faster). 1221 """ 1222 div = vtki.new("GradientFilter") 1223 if on.startswith("p"): 1224 varr = self.dataset.GetPointData() 1225 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1226 elif on.startswith("c"): 1227 varr = self.dataset.GetCellData() 1228 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1229 else: 1230 vedo.logger.error(f"in divergence(): unknown option {on}") 1231 raise RuntimeError 1232 1233 if array_name is None: 1234 if varr.GetVectors(): 1235 array_name = varr.GetVectors().GetName() 1236 else: 1237 vedo.logger.error(f"in divergence(): no vectors found for {on}") 1238 raise RuntimeError 1239 1240 div.SetInputData(self.dataset) 1241 div.SetInputScalars(tp, array_name) 1242 div.ComputeDivergenceOn() 1243 div.ComputeGradientOff() 1244 div.ComputeVorticityOff() 1245 div.SetDivergenceArrayName("Divergence") 1246 div.SetFasterApproximation(fast) 1247 div.Update() 1248 # self._update(div.GetOutput(), reset_locators=False) 1249 if on.startswith("p"): 1250 dvecs = utils.vtk2numpy(div.GetOutput().GetPointData().GetArray("Divergence")) 1251 else: 1252 dvecs = utils.vtk2numpy(div.GetOutput().GetCellData().GetArray("Divergence")) 1253 return dvecs 1254 1255 def vorticity(self, array_name=None, on="points", fast=False) -> np.ndarray: 1256 """ 1257 Compute and return the vorticity of a vector field as a numpy array. 1258 1259 Arguments: 1260 array_name : (str) 1261 name of the array to compute the vorticity, 1262 if None the current active array is selected 1263 on : (str) 1264 compute either on 'points' or 'cells' data 1265 fast : (bool) 1266 if True, will use a less accurate algorithm 1267 that performs fewer derivative calculations (and is therefore faster). 1268 """ 1269 vort = vtki.new("GradientFilter") 1270 if on.startswith("p"): 1271 varr = self.dataset.GetPointData() 1272 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1273 elif on.startswith("c"): 1274 varr = self.dataset.GetCellData() 1275 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1276 else: 1277 vedo.logger.error(f"in vorticity(): unknown option {on}") 1278 raise RuntimeError 1279 1280 if array_name is None: 1281 if varr.GetVectors(): 1282 array_name = varr.GetVectors().GetName() 1283 else: 1284 vedo.logger.error(f"in vorticity(): no vectors found for {on}") 1285 raise RuntimeError 1286 1287 vort.SetInputData(self.dataset) 1288 vort.SetInputScalars(tp, array_name) 1289 vort.ComputeDivergenceOff() 1290 vort.ComputeGradientOff() 1291 vort.ComputeVorticityOn() 1292 vort.SetVorticityArrayName("Vorticity") 1293 vort.SetFasterApproximation(fast) 1294 vort.Update() 1295 if on.startswith("p"): 1296 vvecs = utils.vtk2numpy(vort.GetOutput().GetPointData().GetArray("Vorticity")) 1297 else: 1298 vvecs = utils.vtk2numpy(vort.GetOutput().GetCellData().GetArray("Vorticity")) 1299 return vvecs 1300 1301 def probe( 1302 self, 1303 source, 1304 categorical=False, 1305 snap=False, 1306 tol=0, 1307 ) -> Self: 1308 """ 1309 Takes a data set and probes its scalars at the specified points in space. 1310 1311 Note that a mask is also output with valid/invalid points which can be accessed 1312 with `mesh.pointdata['ValidPointMask']`. 1313 1314 Arguments: 1315 source : any dataset 1316 the data set to probe. 1317 categorical : bool 1318 control whether the source pointdata is to be treated as categorical. 1319 snap : bool 1320 snap to the cell with the closest point if no cell was found 1321 tol : float 1322 the tolerance to use when performing the probe. 1323 1324 Check out also: 1325 `interpolate_data_from()` and `tovolume()` 1326 1327 Examples: 1328 - [probe_points.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/probe_points.py) 1329 1330 ![](https://vedo.embl.es/images/volumetric/probePoints.png) 1331 """ 1332 probe_filter = vtki.new("ProbeFilter") 1333 probe_filter.SetSourceData(source.dataset) 1334 probe_filter.SetInputData(self.dataset) 1335 probe_filter.PassCellArraysOn() 1336 probe_filter.PassFieldArraysOn() 1337 probe_filter.PassPointArraysOn() 1338 probe_filter.SetCategoricalData(categorical) 1339 probe_filter.ComputeToleranceOff() 1340 if tol: 1341 probe_filter.ComputeToleranceOn() 1342 probe_filter.SetTolerance(tol) 1343 probe_filter.SetSnapToCellWithClosestPoint(snap) 1344 probe_filter.Update() 1345 self._update(probe_filter.GetOutput(), reset_locators=False) 1346 self.pipeline = utils.OperationNode("probe", parents=[self, source]) 1347 self.pointdata.rename("vtkValidPointMask", "ValidPointMask") 1348 return self 1349 1350 def compute_cell_size(self) -> Self: 1351 """ 1352 Add to this object a cell data array 1353 containing the area, volume and edge length 1354 of the cells (when applicable to the object type). 1355 1356 Array names are: `Area`, `Volume`, `Length`. 1357 """ 1358 csf = vtki.new("CellSizeFilter") 1359 csf.SetInputData(self.dataset) 1360 csf.SetComputeArea(1) 1361 csf.SetComputeVolume(1) 1362 csf.SetComputeLength(1) 1363 csf.SetComputeVertexCount(0) 1364 csf.SetAreaArrayName("Area") 1365 csf.SetVolumeArrayName("Volume") 1366 csf.SetLengthArrayName("Length") 1367 csf.Update() 1368 self._update(csf.GetOutput(), reset_locators=False) 1369 return self 1370 1371 def generate_random_data(self) -> Self: 1372 """Fill a dataset with random attributes""" 1373 gen = vtki.new("RandomAttributeGenerator") 1374 gen.SetInputData(self.dataset) 1375 gen.GenerateAllDataOn() 1376 gen.SetDataTypeToFloat() 1377 gen.GeneratePointNormalsOff() 1378 gen.GeneratePointTensorsOn() 1379 gen.GenerateCellScalarsOn() 1380 gen.Update() 1381 self._update(gen.GetOutput(), reset_locators=False) 1382 self.pipeline = utils.OperationNode("generate_random_data", parents=[self]) 1383 return self 1384 1385 def integrate_data(self) -> dict: 1386 """ 1387 Integrate point and cell data arrays while computing length, 1388 area or volume of the domain. It works for 1D, 2D or 3D cells. 1389 1390 For volumetric datasets, this filter ignores all but 3D cells. 1391 It will not compute the volume contained in a closed surface. 1392 1393 Returns a dictionary with keys: `pointdata`, `celldata`, `metadata`, 1394 which contain the integration result for the corresponding attributes. 1395 1396 Examples: 1397 ```python 1398 from vedo import * 1399 surf = Sphere(res=100) 1400 surf.pointdata['scalars'] = np.ones(surf.npoints) 1401 data = surf.integrate_data() 1402 print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi) 1403 ``` 1404 1405 ```python 1406 from vedo import * 1407 1408 xcoords1 = np.arange(0, 2.2, 0.2) 1409 xcoords2 = sqrt(np.arange(0, 4.2, 0.2)) 1410 1411 ycoords = np.arange(0, 1.2, 0.2) 1412 1413 surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2) 1414 surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2) 1415 1416 surf1.pointdata['scalars'] = surf1.vertices[:,2] 1417 surf2.pointdata['scalars'] = surf2.vertices[:,2] 1418 1419 data1 = surf1.integrate_data() 1420 data2 = surf2.integrate_data() 1421 1422 print(data1['pointdata']['scalars'], 1423 "is equal to", 1424 data2['pointdata']['scalars'], 1425 "even if the grids are different!", 1426 "(= the volume under the surface)" 1427 ) 1428 show(surf1, surf2, N=2, axes=1).close() 1429 ``` 1430 """ 1431 vinteg = vtki.new("IntegrateAttributes") 1432 vinteg.SetInputData(self.dataset) 1433 vinteg.Update() 1434 ugrid = vedo.UnstructuredGrid(vinteg.GetOutput()) 1435 data = dict( 1436 pointdata=ugrid.pointdata.todict(), 1437 celldata=ugrid.celldata.todict(), 1438 metadata=ugrid.metadata.todict(), 1439 ) 1440 return data 1441 1442 def write(self, filename, binary=True) -> None: 1443 """Write object to file.""" 1444 out = vedo.file_io.write(self, filename, binary) 1445 out.pipeline = utils.OperationNode( 1446 "write", parents=[self], comment=filename[:15], shape="folder", c="#8a817c" 1447 ) 1448 1449 def tomesh(self, bounds=(), shrink=0) -> "vedo.Mesh": 1450 """ 1451 Extract boundary geometry from dataset (or convert data to polygonal type). 1452 1453 Two new arrays are added to the mesh: `OriginalCellIds` and `OriginalPointIds` 1454 to keep track of the original mesh elements. 1455 1456 Arguments: 1457 bounds : (list) 1458 specify a sub-region to extract 1459 shrink : (float) 1460 shrink the cells to a fraction of their original size 1461 """ 1462 geo = vtki.new("GeometryFilter") 1463 1464 if shrink: 1465 sf = vtki.new("ShrinkFilter") 1466 sf.SetInputData(self.dataset) 1467 sf.SetShrinkFactor(shrink) 1468 sf.Update() 1469 geo.SetInputData(sf.GetOutput()) 1470 else: 1471 geo.SetInputData(self.dataset) 1472 1473 geo.SetPassThroughCellIds(1) 1474 geo.SetPassThroughPointIds(1) 1475 geo.SetOriginalCellIdsName("OriginalCellIds") 1476 geo.SetOriginalPointIdsName("OriginalPointIds") 1477 geo.SetNonlinearSubdivisionLevel(1) 1478 # geo.MergingOff() # crashes on StructuredGrids 1479 if bounds: 1480 geo.SetExtent(bounds) 1481 geo.ExtentClippingOn() 1482 geo.Update() 1483 msh = vedo.mesh.Mesh(geo.GetOutput()) 1484 msh.pipeline = utils.OperationNode("tomesh", parents=[self], c="#9e2a2b") 1485 return msh 1486 1487 def signed_distance(self, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> "vedo.Volume": 1488 """ 1489 Compute the `Volume` object whose voxels contains the signed distance from 1490 the object. The calling object must have "Normals" defined. 1491 1492 Arguments: 1493 bounds : (list, actor) 1494 bounding box sizes 1495 dims : (list) 1496 dimensions (nr. of voxels) of the output volume. 1497 invert : (bool) 1498 flip the sign 1499 max_radius : (float) 1500 specify how far out to propagate distance calculation 1501 1502 Examples: 1503 - [distance2mesh.py](https://github.com/marcomusy/vedo/blob/master/examples/basic/distance2mesh.py) 1504 1505 ![](https://vedo.embl.es/images/basic/distance2mesh.png) 1506 """ 1507 if bounds is None: 1508 bounds = self.bounds() 1509 if max_radius is None: 1510 max_radius = self.diagonal_size() / 2 1511 dist = vtki.new("SignedDistance") 1512 dist.SetInputData(self.dataset) 1513 dist.SetRadius(max_radius) 1514 dist.SetBounds(bounds) 1515 dist.SetDimensions(dims) 1516 dist.Update() 1517 img = dist.GetOutput() 1518 if invert: 1519 mat = vtki.new("ImageMathematics") 1520 mat.SetInput1Data(img) 1521 mat.SetOperationToMultiplyByK() 1522 mat.SetConstantK(-1) 1523 mat.Update() 1524 img = mat.GetOutput() 1525 1526 vol = vedo.Volume(img) 1527 vol.name = "SignedDistanceVolume" 1528 vol.pipeline = utils.OperationNode( 1529 "signed_distance", 1530 parents=[self], 1531 comment=f"dims={tuple(vol.dimensions())}", 1532 c="#e9c46a:#0096c7", 1533 ) 1534 return vol 1535 1536 def unsigned_distance( 1537 self, dims=(25,25,25), bounds=(), max_radius=0, cap_value=0) -> "vedo.Volume": 1538 """ 1539 Compute the `Volume` object whose voxels contains the unsigned distance. 1540 """ 1541 dist = vtki.new("UnsignedDistance") 1542 dist.SetInputData(self.dataset) 1543 dist.SetDimensions(dims) 1544 1545 if len(bounds) == 6: 1546 dist.SetBounds(bounds) 1547 # elif bounds == "auto": 1548 # dist.AdjustBoundsOn() 1549 else: 1550 dist.SetBounds(self.bounds()) 1551 if not max_radius: 1552 max_radius = self.diagonal_size() / 10 1553 dist.SetRadius(max_radius) 1554 1555 if self.point_locator: 1556 dist.SetLocator(self.point_locator) 1557 1558 if cap_value is not None: 1559 dist.CappingOn() 1560 dist.SetCapValue(cap_value) 1561 dist.SetOutputScalarTypeToFloat() 1562 dist.Update() 1563 vol = vedo.Volume(dist.GetOutput()) 1564 vol.name = "UnsignedDistanceVolume" 1565 vol.pipeline = utils.OperationNode( 1566 "unsigned_distance", parents=[self], c="#e9c46a:#0096c7") 1567 return vol 1568 1569 def smooth_data(self, 1570 niter=10, relaxation_factor=0.1, strategy=0, mask=None, 1571 mode="distance2", 1572 exclude=("Normals", "TextureCoordinates"), 1573 ) -> Self: 1574 """ 1575 Smooth point attribute data using distance weighted Laplacian kernel. 1576 The effect is to blur regions of high variation and emphasize low variation regions. 1577 1578 A central concept of this method is the point smoothing stencil. 1579 A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge. 1580 To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using 1581 the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1). 1582 This averaging process is repeated until the maximum number of iterations is reached. 1583 1584 The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion. 1585 The a(i+1) attributes are determined from the a(i) attributes as follows: 1586 a(i+1) = (1-R)*a(i) + R*sum(w(j)*a(j)) 1587 1588 Convergence occurs faster for larger relaxation factors. 1589 Typically a small number of iterations is required for large relaxation factors, 1590 and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be 1591 adequate (i.e., just a distance weighted average is computed). 1592 1593 Warning: 1594 Certain data attributes cannot be correctly interpolated. For example, surface normals are expected to be |n|=1; 1595 after attribute smoothing this constraint is likely to be violated. 1596 Other vectors and tensors may suffer from similar issues. 1597 In such a situation, specify `exclude=...` which will not be smoothed (and simply passed through to the output). 1598 Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance 1599 between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil). 1600 The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, the weights are simply 1/n, 1601 where n is the number of connected points in the stencil. 1602 The smoothing process reduces high frequency information in the data attributes. 1603 With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) important details may be lost, 1604 and the attributes will move towards an "average" value. 1605 While this filter will process any dataset type, if the input data is a 3D image volume, it's likely much faster to use 1606 an image-based algorithm to perform data smoothing. 1607 To determine boundary points in polygonal data, edges used by only one cell are considered boundary 1608 (and hence the associated points defining the edge). 1609 1610 Arguments: 1611 niter : (int) 1612 number of iterations 1613 relaxation_factor : (float) 1614 relaxation factor controlling the amount of Laplacian smoothing applied 1615 strategy : (int) 1616 strategy to use for Laplacian smoothing 1617 - 0: use all points, all point data attributes are smoothed 1618 - 1: smooth all point attribute data except those on the boundary 1619 - 2: only point data connected to a boundary point are smoothed 1620 mask : (str, np.ndarray) 1621 array to be used as a mask (ignore then the strategy keyword) 1622 mode : (str) 1623 smoothing mode, either "distance2", "distance" or "average" 1624 - distance**2 weighted (i.e., 1/r**2 interpolation weights) 1625 - distance weighted (i.e., 1/r) approach; 1626 - simple average of all connected points in the stencil 1627 exclude : (list) 1628 list of arrays to be excluded from smoothing 1629 """ 1630 try: 1631 saf = vtki.new("AttributeSmoothingFilter") 1632 except: 1633 vedo.logger.error("smooth_data() only avaialble in VTK>=9.3.0") 1634 return self 1635 saf.SetInputData(self.dataset) 1636 saf.SetRelaxationFactor(relaxation_factor) 1637 saf.SetNumberOfIterations(niter) 1638 1639 for ex in exclude: 1640 saf.AddExcludedArray(ex) 1641 1642 if mode == "distance": 1643 saf.SetWeightsTypeToDistance() 1644 elif mode == "distance2": 1645 saf.SetWeightsTypeToDistance2() 1646 elif mode == "average": 1647 saf.SetWeightsTypeToAverage() 1648 else: 1649 vedo.logger.error(f"smooth_data(): unknown mode {mode}") 1650 raise TypeError 1651 1652 saf.SetSmoothingStrategy(strategy) 1653 if mask is not None: 1654 saf.SetSmoothingStrategyToSmoothingMask() 1655 if isinstance(mask, str): 1656 mask_ = self.dataset.GetPointData().GetArray(mask) 1657 if not mask_: 1658 vedo.logger.error(f"smooth_data(): mask array {mask} not found") 1659 return self 1660 mask_array = vtki.vtkUnsignedCharArray() 1661 mask_array.ShallowCopy(mask_) 1662 mask_array.SetName(mask_.GetName()) 1663 else: 1664 mask_array = utils.numpy2vtk(mask, dtype=np.uint8) 1665 saf.SetSmoothingMask(mask_array) 1666 1667 saf.Update() 1668 1669 self._update(saf.GetOutput()) 1670 self.pipeline = utils.OperationNode( 1671 "smooth_data", comment=f"strategy {strategy}", parents=[self], c="#9e2a2b" 1672 ) 1673 return self 1674 1675 def compute_streamlines( 1676 self, 1677 seeds: Any, 1678 integrator="rk4", 1679 direction="forward", 1680 initial_step_size=None, 1681 max_propagation=None, 1682 max_steps=10000, 1683 step_length=0, 1684 surface_constrained=False, 1685 compute_vorticity=False, 1686 ) -> Union["vedo.Lines", None]: 1687 """ 1688 Integrate a vector field to generate streamlines. 1689 1690 Arguments: 1691 seeds : (Mesh, Points, list) 1692 starting points of the streamlines 1693 integrator : (str) 1694 type of integration method to be used: 1695 - "rk2" (Runge-Kutta 2) 1696 - "rk4" (Runge-Kutta 4) 1697 - "rk45" (Runge-Kutta 45) 1698 direction : (str) 1699 direction of integration, either "forward", "backward" or "both" 1700 initial_step_size : (float) 1701 initial step size used for line integration 1702 max_propagation : (float) 1703 maximum length of a streamline expressed in absolute units 1704 max_steps : (int) 1705 maximum number of steps for a streamline 1706 step_length : (float) 1707 maximum length of a step expressed in absolute units 1708 surface_constrained : (bool) 1709 whether to stop integrating when the streamline leaves the surface 1710 compute_vorticity : (bool) 1711 whether to compute the vorticity at each streamline point 1712 """ 1713 b = self.dataset.GetBounds() 1714 size = (b[5]-b[4] + b[3]-b[2] + b[1]-b[0]) / 3 1715 if initial_step_size is None: 1716 initial_step_size = size / 1000.0 1717 1718 if max_propagation is None: 1719 max_propagation = size * 2 1720 1721 if utils.is_sequence(seeds): 1722 seeds = vedo.Points(seeds) 1723 1724 sti = vtki.new("StreamTracer") 1725 sti.SetSourceData(seeds.dataset) 1726 if isinstance(self, vedo.RectilinearGrid): 1727 sti.SetInputData(vedo.UnstructuredGrid(self.dataset).dataset) 1728 else: 1729 sti.SetInputDataObject(self.dataset) 1730 1731 sti.SetInitialIntegrationStep(initial_step_size) 1732 sti.SetComputeVorticity(compute_vorticity) 1733 sti.SetMaximumNumberOfSteps(max_steps) 1734 sti.SetMaximumPropagation(max_propagation) 1735 sti.SetSurfaceStreamlines(surface_constrained) 1736 if step_length: 1737 sti.SetMaximumIntegrationStep(step_length) 1738 1739 if "for" in direction: 1740 sti.SetIntegrationDirectionToForward() 1741 elif "back" in direction: 1742 sti.SetIntegrationDirectionToBackward() 1743 elif "both" in direction: 1744 sti.SetIntegrationDirectionToBoth() 1745 else: 1746 vedo.logger.error(f"in compute_streamlines(), unknown direction {direction}") 1747 return None 1748 1749 if integrator == "rk2": 1750 sti.SetIntegratorTypeToRungeKutta2() 1751 elif integrator == "rk4": 1752 sti.SetIntegratorTypeToRungeKutta4() 1753 elif integrator == "rk45": 1754 sti.SetIntegratorTypeToRungeKutta45() 1755 else: 1756 vedo.logger.error(f"in compute_streamlines(), unknown integrator {integrator}") 1757 return None 1758 1759 sti.Update() 1760 1761 stlines = vedo.shapes.Lines(sti.GetOutput(), lw=4) 1762 stlines.name = "StreamLines" 1763 self.pipeline = utils.OperationNode( 1764 "compute_streamlines", comment=f"{integrator}", parents=[self, seeds], c="#9e2a2b" 1765 ) 1766 return stlines
Common algorithms.
380 @property 381 def pointdata(self): 382 """ 383 Create and/or return a `numpy.array` associated to points (vertices). 384 A data array can be indexed either as a string or by an integer number. 385 E.g.: `myobj.pointdata["arrayname"]` 386 387 Usage: 388 389 `myobj.pointdata.keys()` to return the available data array names 390 391 `myobj.pointdata.select(name)` to make this array the active one 392 393 `myobj.pointdata.remove(name)` to remove this array 394 """ 395 return DataArrayHelper(self, 0)
Create and/or return a numpy.array
associated to points (vertices).
A data array can be indexed either as a string or by an integer number.
E.g.: myobj.pointdata["arrayname"]
Usage:
myobj.pointdata.keys()
to return the available data array names
myobj.pointdata.select(name)
to make this array the active one
myobj.pointdata.remove(name)
to remove this array
397 @property 398 def celldata(self): 399 """ 400 Create and/or return a `numpy.array` associated to cells (faces). 401 A data array can be indexed either as a string or by an integer number. 402 E.g.: `myobj.celldata["arrayname"]` 403 404 Usage: 405 406 `myobj.celldata.keys()` to return the available data array names 407 408 `myobj.celldata.select(name)` to make this array the active one 409 410 `myobj.celldata.remove(name)` to remove this array 411 """ 412 return DataArrayHelper(self, 1)
Create and/or return a numpy.array
associated to cells (faces).
A data array can be indexed either as a string or by an integer number.
E.g.: myobj.celldata["arrayname"]
Usage:
myobj.celldata.keys()
to return the available data array names
myobj.celldata.select(name)
to make this array the active one
myobj.celldata.remove(name)
to remove this array
414 @property 415 def metadata(self): 416 """ 417 Create and/or return a `numpy.array` associated to neither cells nor faces. 418 A data array can be indexed either as a string or by an integer number. 419 E.g.: `myobj.metadata["arrayname"]` 420 421 Usage: 422 423 `myobj.metadata.keys()` to return the available data array names 424 425 `myobj.metadata.select(name)` to make this array the active one 426 427 `myobj.metadata.remove(name)` to remove this array 428 """ 429 return DataArrayHelper(self, 2)
Create and/or return a numpy.array
associated to neither cells nor faces.
A data array can be indexed either as a string or by an integer number.
E.g.: myobj.metadata["arrayname"]
Usage:
myobj.metadata.keys()
to return the available data array names
myobj.metadata.select(name)
to make this array the active one
myobj.metadata.remove(name)
to remove this array
431 def memory_address(self) -> int: 432 """ 433 Return a unique memory address integer which may serve as the ID of the 434 object, or passed to c++ code. 435 """ 436 # https://www.linkedin.com/pulse/speedup-your-code-accessing-python-vtk-objects-from-c-pletzer/ 437 # https://github.com/tfmoraes/polydata_connectivity 438 return int(self.dataset.GetAddressAsString("")[5:], 16)
Return a unique memory address integer which may serve as the ID of the object, or passed to c++ code.
440 def memory_size(self) -> int: 441 """Return the size in bytes of the object in memory.""" 442 return self.dataset.GetActualMemorySize()
Return the size in bytes of the object in memory.
444 def modified(self) -> Self: 445 """Use in conjunction with `tonumpy()` to update any modifications to the image array.""" 446 self.dataset.GetPointData().Modified() 447 scals = self.dataset.GetPointData().GetScalars() 448 if scals: 449 scals.Modified() 450 return self
Use in conjunction with tonumpy()
to update any modifications to the image array.
452 def box(self, scale=1, padding=0) -> "vedo.Mesh": 453 """ 454 Return the bounding box as a new `Mesh` object. 455 456 Arguments: 457 scale : (float) 458 box size can be scaled by a factor 459 padding : (float, list) 460 a constant padding can be added (can be a list `[padx,pady,padz]`) 461 """ 462 b = self.bounds() 463 if not utils.is_sequence(padding): 464 padding = [padding, padding, padding] 465 length, width, height = b[1] - b[0], b[3] - b[2], b[5] - b[4] 466 tol = (length + width + height) / 30000 # useful for boxing text 467 pos = [(b[0] + b[1]) / 2, (b[3] + b[2]) / 2, (b[5] + b[4]) / 2 - tol] 468 bx = vedo.shapes.Box( 469 pos, 470 length * scale + padding[0], 471 width * scale + padding[1], 472 height * scale + padding[2], 473 c="gray", 474 ) 475 try: 476 pr = vtki.vtkProperty() 477 pr.DeepCopy(self.properties) 478 bx.actor.SetProperty(pr) 479 bx.properties = pr 480 except (AttributeError, TypeError): 481 pass 482 bx.flat().lighting("off").wireframe(True) 483 return bx
Return the bounding box as a new Mesh
object.
Arguments:
- scale : (float) box size can be scaled by a factor
- padding : (float, list)
a constant padding can be added (can be a list
[padx,pady,padz]
)
485 def update_dataset(self, dataset, **kwargs) -> Self: 486 """Update the dataset of the object with the provided VTK dataset.""" 487 self._update(dataset, **kwargs) 488 return self
Update the dataset of the object with the provided VTK dataset.
490 def bounds(self) -> np.ndarray: 491 """ 492 Get the object bounds. 493 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 494 """ 495 try: # this is very slow for large meshes 496 pts = self.vertices 497 xmin, ymin, zmin = np.min(pts, axis=0) 498 xmax, ymax, zmax = np.max(pts, axis=0) 499 return np.array([xmin, xmax, ymin, ymax, zmin, zmax]) 500 except (AttributeError, ValueError): 501 return np.array(self.dataset.GetBounds())
Get the object bounds.
Returns a list in format [xmin,xmax, ymin,ymax, zmin,zmax]
.
503 def xbounds(self, i=None) -> np.ndarray: 504 """Get the bounds `[xmin,xmax]`. Can specify upper or lower with i (0,1).""" 505 b = self.bounds() 506 if i is not None: 507 return b[i] 508 return np.array([b[0], b[1]])
Get the bounds [xmin,xmax]
. Can specify upper or lower with i (0,1).
510 def ybounds(self, i=None) -> np.ndarray: 511 """Get the bounds `[ymin,ymax]`. Can specify upper or lower with i (0,1).""" 512 b = self.bounds() 513 if i == 0: 514 return b[2] 515 if i == 1: 516 return b[3] 517 return np.array([b[2], b[3]])
Get the bounds [ymin,ymax]
. Can specify upper or lower with i (0,1).
519 def zbounds(self, i=None) -> np.ndarray: 520 """Get the bounds `[zmin,zmax]`. Can specify upper or lower with i (0,1).""" 521 b = self.bounds() 522 if i == 0: 523 return b[4] 524 if i == 1: 525 return b[5] 526 return np.array([b[4], b[5]])
Get the bounds [zmin,zmax]
. Can specify upper or lower with i (0,1).
528 def diagonal_size(self) -> float: 529 """Get the length of the diagonal of the bounding box.""" 530 b = self.bounds() 531 return np.sqrt((b[1] - b[0])**2 + (b[3] - b[2])**2 + (b[5] - b[4])**2)
Get the length of the diagonal of the bounding box.
533 def average_size(self) -> float: 534 """ 535 Calculate and return the average size of the object. 536 This is the mean of the vertex distances from the center of mass. 537 """ 538 coords = self.vertices 539 cm = np.mean(coords, axis=0) 540 if coords.shape[0] == 0: 541 return 0.0 542 cc = coords - cm 543 return np.mean(np.linalg.norm(cc, axis=1))
Calculate and return the average size of the object. This is the mean of the vertex distances from the center of mass.
545 def center_of_mass(self) -> np.ndarray: 546 """Get the center of mass of the object.""" 547 if isinstance(self, (vedo.RectilinearGrid, vedo.Volume)): 548 return np.array(self.dataset.GetCenter()) 549 cmf = vtki.new("CenterOfMass") 550 cmf.SetInputData(self.dataset) 551 cmf.Update() 552 c = cmf.GetCenter() 553 return np.array(c)
Get the center of mass of the object.
555 def copy_data_from(self, obj: Any) -> Self: 556 """Copy all data (point and cell data) from this input object""" 557 self.dataset.GetPointData().PassData(obj.dataset.GetPointData()) 558 self.dataset.GetCellData().PassData(obj.dataset.GetCellData()) 559 self.pipeline = utils.OperationNode( 560 "copy_data_from", 561 parents=[self, obj], 562 comment=f"{obj.__class__.__name__}", 563 shape="note", 564 c="#ccc5b9", 565 ) 566 return self
Copy all data (point and cell data) from this input object
568 def inputdata(self): 569 """Obsolete, use `.dataset` instead.""" 570 colors.printc("WARNING: 'inputdata()' is obsolete, use '.dataset' instead.", c="y") 571 return self.dataset
Obsolete, use .dataset
instead.
573 @property 574 def npoints(self): 575 """Retrieve the number of points (or vertices).""" 576 return self.dataset.GetNumberOfPoints()
Retrieve the number of points (or vertices).
578 @property 579 def nvertices(self): 580 """Retrieve the number of vertices (or points).""" 581 return self.dataset.GetNumberOfPoints()
Retrieve the number of vertices (or points).
583 @property 584 def ncells(self): 585 """Retrieve the number of cells.""" 586 return self.dataset.GetNumberOfCells()
Retrieve the number of cells.
588 def points(self, pts=None): 589 """Obsolete, use `self.vertices` or `self.coordinates` instead.""" 590 if pts is None: ### getter 591 592 if warnings["points_getter"]: 593 colors.printc(warnings["points_getter"], c="y") 594 warnings["points_getter"] = "" 595 return self.vertices 596 597 else: ### setter 598 599 if warnings["points_setter"]: 600 colors.printc(warnings["points_setter"], c="y") 601 warnings["points_setter"] = "" 602 603 pts = np.asarray(pts, dtype=np.float32) 604 605 if pts.ndim == 1: 606 ### getter by point index ################### 607 indices = pts.astype(int) 608 vpts = self.dataset.GetPoints() 609 arr = utils.vtk2numpy(vpts.GetData()) 610 return arr[indices] ########### 611 612 ### setter #################################### 613 if pts.shape[1] == 2: 614 pts = np.c_[pts, np.zeros(pts.shape[0], dtype=np.float32)] 615 arr = utils.numpy2vtk(pts, dtype=np.float32) 616 617 vpts = self.dataset.GetPoints() 618 vpts.SetData(arr) 619 vpts.Modified() 620 # reset mesh to identity matrix position/rotation: 621 self.point_locator = None 622 self.cell_locator = None 623 self.line_locator = None 624 self.transform = LinearTransform() 625 return self
Obsolete, use self.vertices
or self.coordinates
instead.
627 @property 628 def cell_centers(self): 629 """ 630 Get the coordinates of the cell centers. 631 632 Examples: 633 - [delaunay2d.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/delaunay2d.py) 634 635 See also: `CellCenters()`. 636 """ 637 vcen = vtki.new("CellCenters") 638 vcen.CopyArraysOff() 639 vcen.SetInputData(self.dataset) 640 vcen.Update() 641 return utils.vtk2numpy(vcen.GetOutput().GetPoints().GetData())
643 @property 644 def lines(self): 645 """ 646 Get lines connectivity ids as a python array 647 formatted as `[[id0,id1], [id3,id4], ...]` 648 649 See also: `lines_as_flat_array()`. 650 """ 651 # Get cell connettivity ids as a 1D array. The vtk format is: 652 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 653 try: 654 arr1d = utils.vtk2numpy(self.dataset.GetLines().GetData()) 655 except AttributeError: 656 return np.array([]) 657 i = 0 658 conn = [] 659 n = len(arr1d) 660 for _ in range(n): 661 cell = [arr1d[i + k + 1] for k in range(arr1d[i])] 662 conn.append(cell) 663 i += arr1d[i] + 1 664 if i >= n: 665 break 666 667 return conn # cannot always make a numpy array of it!
Get lines connectivity ids as a python array
formatted as [[id0,id1], [id3,id4], ...]
See also: lines_as_flat_array()
.
669 @property 670 def lines_as_flat_array(self): 671 """ 672 Get lines connectivity ids as a 1D numpy array. 673 Format is e.g. [2, 10,20, 3, 10,11,12, 2, 70,80, ...] 674 675 See also: `lines()`. 676 """ 677 try: 678 return utils.vtk2numpy(self.dataset.GetLines().GetData()) 679 except AttributeError: 680 return np.array([])
Get lines connectivity ids as a 1D numpy array. Format is e.g. [2, 10,20, 3, 10,11,12, 2, 70,80, ...]
See also: lines()
.
682 def mark_boundaries(self) -> Self: 683 """ 684 Mark cells and vertices if they lie on a boundary. 685 A new array called `BoundaryCells` is added to the object. 686 """ 687 mb = vtki.new("MarkBoundaryFilter") 688 mb.SetInputData(self.dataset) 689 mb.Update() 690 self.dataset.DeepCopy(mb.GetOutput()) 691 self.pipeline = utils.OperationNode("mark_boundaries", parents=[self]) 692 return self
Mark cells and vertices if they lie on a boundary.
A new array called BoundaryCells
is added to the object.
694 def find_cells_in_bounds(self, xbounds=(), ybounds=(), zbounds=()) -> np.ndarray: 695 """ 696 Find cells that are within the specified bounds. 697 """ 698 try: 699 xbounds = list(xbounds.bounds()) 700 except AttributeError: 701 pass 702 703 if len(xbounds) == 6: 704 bnds = xbounds 705 else: 706 bnds = list(self.bounds()) 707 if len(xbounds) == 2: 708 bnds[0] = xbounds[0] 709 bnds[1] = xbounds[1] 710 if len(ybounds) == 2: 711 bnds[2] = ybounds[0] 712 bnds[3] = ybounds[1] 713 if len(zbounds) == 2: 714 bnds[4] = zbounds[0] 715 bnds[5] = zbounds[1] 716 717 cell_ids = vtki.vtkIdList() 718 if not self.cell_locator: 719 self.cell_locator = vtki.new("CellTreeLocator") 720 self.cell_locator.SetDataSet(self.dataset) 721 self.cell_locator.BuildLocator() 722 self.cell_locator.FindCellsWithinBounds(bnds, cell_ids) 723 cids = [] 724 for i in range(cell_ids.GetNumberOfIds()): 725 cid = cell_ids.GetId(i) 726 cids.append(cid) 727 return np.array(cids)
Find cells that are within the specified bounds.
729 def find_cells_along_line(self, p0, p1, tol=0.001) -> np.ndarray: 730 """ 731 Find cells that are intersected by a line segment. 732 """ 733 cell_ids = vtki.vtkIdList() 734 if not self.cell_locator: 735 self.cell_locator = vtki.new("CellTreeLocator") 736 self.cell_locator.SetDataSet(self.dataset) 737 self.cell_locator.BuildLocator() 738 self.cell_locator.FindCellsAlongLine(p0, p1, tol, cell_ids) 739 cids = [] 740 for i in range(cell_ids.GetNumberOfIds()): 741 cid = cell_ids.GetId(i) 742 cids.append(cid) 743 return np.array(cids)
Find cells that are intersected by a line segment.
745 def find_cells_along_plane(self, origin, normal, tol=0.001) -> np.ndarray: 746 """ 747 Find cells that are intersected by a plane. 748 """ 749 cell_ids = vtki.vtkIdList() 750 if not self.cell_locator: 751 self.cell_locator = vtki.new("CellTreeLocator") 752 self.cell_locator.SetDataSet(self.dataset) 753 self.cell_locator.BuildLocator() 754 self.cell_locator.FindCellsAlongPlane(origin, normal, tol, cell_ids) 755 cids = [] 756 for i in range(cell_ids.GetNumberOfIds()): 757 cid = cell_ids.GetId(i) 758 cids.append(cid) 759 return np.array(cids)
Find cells that are intersected by a plane.
761 def keep_cell_types(self, types=()): 762 """ 763 Extract cells of a specific type. 764 765 Check the VTK cell types here: 766 https://vtk.org/doc/nightly/html/vtkCellType_8h.html 767 """ 768 fe = vtki.new("ExtractCellsByType") 769 fe.SetInputData(self.dataset) 770 for t in types: 771 try: 772 if utils.is_integer(t): 773 it = t 774 else: 775 it = vtki.cell_types[t.upper()] 776 except KeyError: 777 vedo.logger.error(f"Cell type '{t}' not recognized") 778 continue 779 fe.AddCellType(it) 780 fe.Update() 781 self._update(fe.GetOutput()) 782 return self
Extract cells of a specific type.
Check the VTK cell types here: https://vtk.org/doc/nightly/html/vtkCellType_8h.html
784 def map_cells_to_points(self, arrays=(), move=False) -> Self: 785 """ 786 Interpolate cell data (i.e., data specified per cell or face) 787 into point data (i.e., data specified at each vertex). 788 The method of transformation is based on averaging the data values 789 of all cells using a particular point. 790 791 A custom list of arrays to be mapped can be passed in input. 792 793 Set `move=True` to delete the original `celldata` array. 794 """ 795 c2p = vtki.new("CellDataToPointData") 796 c2p.SetInputData(self.dataset) 797 if not move: 798 c2p.PassCellDataOn() 799 if arrays: 800 c2p.ClearCellDataArrays() 801 c2p.ProcessAllArraysOff() 802 for arr in arrays: 803 c2p.AddCellDataArray(arr) 804 else: 805 c2p.ProcessAllArraysOn() 806 c2p.Update() 807 self._update(c2p.GetOutput(), reset_locators=False) 808 self.mapper.SetScalarModeToUsePointData() 809 self.pipeline = utils.OperationNode("map_cells_to_points", parents=[self]) 810 return self
Interpolate cell data (i.e., data specified per cell or face) into point data (i.e., data specified at each vertex). The method of transformation is based on averaging the data values of all cells using a particular point.
A custom list of arrays to be mapped can be passed in input.
Set move=True
to delete the original celldata
array.
812 @property 813 def vertices(self): 814 """Return the vertices (points) coordinates.""" 815 try: 816 # for polydata and unstructured grid 817 varr = self.dataset.GetPoints().GetData() 818 except (AttributeError, TypeError): 819 try: 820 # for RectilinearGrid, StructuredGrid 821 vpts = vtki.vtkPoints() 822 self.dataset.GetPoints(vpts) 823 varr = vpts.GetData() 824 except (AttributeError, TypeError): 825 try: 826 # for ImageData 827 v2p = vtki.new("ImageToPoints") 828 v2p.SetInputData(self.dataset) 829 v2p.Update() 830 varr = v2p.GetOutput().GetPoints().GetData() 831 except AttributeError: 832 return np.array([]) 833 834 return utils.vtk2numpy(varr)
Return the vertices (points) coordinates.
855 @property 856 def coordinates(self): 857 """Return the vertices (points) coordinates. Same as `vertices`.""" 858 return self.vertices
Return the vertices (points) coordinates. Same as vertices
.
865 @property 866 def cells_as_flat_array(self): 867 """ 868 Get cell connectivity ids as a 1D numpy array. 869 Format is e.g. [3, 10,20,30 4, 10,11,12,13 ...] 870 """ 871 try: 872 # valid for unstructured grid 873 arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData()) 874 except AttributeError: 875 # valid for polydata 876 arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData()) 877 return arr1d
Get cell connectivity ids as a 1D numpy array. Format is e.g. [3, 10,20,30 4, 10,11,12,13 ...]
879 @property 880 def cells(self): 881 """ 882 Get the cells connectivity ids as a numpy array. 883 884 The output format is: `[[id0 ... idn], [id0 ... idm], etc]`. 885 """ 886 try: 887 # valid for unstructured grid 888 arr1d = utils.vtk2numpy(self.dataset.GetCells().GetData()) 889 except AttributeError: 890 try: 891 # valid for polydata 892 arr1d = utils.vtk2numpy(self.dataset.GetPolys().GetData()) 893 except AttributeError: 894 vedo.logger.warning(f"Cannot get cells for {type(self)}") 895 return np.array([]) 896 897 # Get cell connettivity ids as a 1D array. vtk format is: 898 # [nids1, id0 ... idn, niids2, id0 ... idm, etc]. 899 i = 0 900 conn = [] 901 n = len(arr1d) 902 if n: 903 while True: 904 cell = [arr1d[i + k] for k in range(1, arr1d[i] + 1)] 905 conn.append(cell) 906 i += arr1d[i] + 1 907 if i >= n: 908 break 909 return conn
Get the cells connectivity ids as a numpy array.
The output format is: [[id0 ... idn], [id0 ... idm], etc]
.
911 def cell_edge_neighbors(self): 912 """ 913 Get the cell neighbor indices of each cell. 914 915 Returns a python list of lists. 916 """ 917 918 def face_to_edges(face): 919 edges = [] 920 size = len(face) 921 for i in range(1, size + 1): 922 if i == size: 923 edges.append([face[i - 1], face[0]]) 924 else: 925 edges.append([face[i - 1], face[i]]) 926 return edges 927 928 pd = self.dataset 929 pd.BuildLinks() 930 931 neicells = [] 932 for i, cell in enumerate(self.cells): 933 nn = [] 934 for edge in face_to_edges(cell): 935 neighbors = vtki.vtkIdList() 936 pd.GetCellEdgeNeighbors(i, edge[0], edge[1], neighbors) 937 if neighbors.GetNumberOfIds() > 0: 938 neighbor = neighbors.GetId(0) 939 nn.append(neighbor) 940 neicells.append(nn) 941 942 return neicells
Get the cell neighbor indices of each cell.
Returns a python list of lists.
945 def map_points_to_cells(self, arrays=(), move=False) -> Self: 946 """ 947 Interpolate point data (i.e., data specified per point or vertex) 948 into cell data (i.e., data specified per cell). 949 The method of transformation is based on averaging the data values 950 of all points defining a particular cell. 951 952 A custom list of arrays to be mapped can be passed in input. 953 954 Set `move=True` to delete the original `pointdata` array. 955 956 Examples: 957 - [mesh_map2cell.py](https://github.com/marcomusy/vedo/tree/master/examples/basic/mesh_map2cell.py) 958 """ 959 p2c = vtki.new("PointDataToCellData") 960 p2c.SetInputData(self.dataset) 961 if not move: 962 p2c.PassPointDataOn() 963 if arrays: 964 p2c.ClearPointDataArrays() 965 p2c.ProcessAllArraysOff() 966 for arr in arrays: 967 p2c.AddPointDataArray(arr) 968 else: 969 p2c.ProcessAllArraysOn() 970 p2c.Update() 971 self._update(p2c.GetOutput(), reset_locators=False) 972 self.mapper.SetScalarModeToUseCellData() 973 self.pipeline = utils.OperationNode("map_points_to_cells", parents=[self]) 974 return self
Interpolate point data (i.e., data specified per point or vertex) into cell data (i.e., data specified per cell). The method of transformation is based on averaging the data values of all points defining a particular cell.
A custom list of arrays to be mapped can be passed in input.
Set move=True
to delete the original pointdata
array.
Examples:
976 def resample_data_from(self, source, tol=None, categorical=False) -> Self: 977 """ 978 Resample point and cell data from another dataset. 979 The output has the same structure but its point data have 980 the resampled values from target. 981 982 Use `tol` to set the tolerance used to compute whether 983 a point in the source is in a cell of the current object. 984 Points without resampled values, and their cells, are marked as blank. 985 If the data is categorical, then the resulting data will be determined 986 by a nearest neighbor interpolation scheme. 987 988 Example: 989 ```python 990 from vedo import * 991 m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1) 992 pts = m1.vertices 993 ces = m1.cell_centers 994 m1.pointdata["xvalues"] = np.power(pts[:,0], 3) 995 m1.celldata["yvalues"] = np.power(ces[:,1], 3) 996 m2 = Mesh(dataurl+'bunny.obj') 997 m2.resample_data_from(m1) 998 # print(m2.pointdata["xvalues"]) 999 show(m1, m2 , N=2, axes=1) 1000 ``` 1001 """ 1002 rs = vtki.new("ResampleWithDataSet") 1003 rs.SetInputData(self.dataset) 1004 rs.SetSourceData(source.dataset) 1005 1006 rs.SetPassPointArrays(True) 1007 rs.SetPassCellArrays(True) 1008 rs.SetPassFieldArrays(True) 1009 rs.SetCategoricalData(categorical) 1010 1011 rs.SetComputeTolerance(True) 1012 if tol: 1013 rs.SetComputeTolerance(False) 1014 rs.SetTolerance(tol) 1015 rs.Update() 1016 self._update(rs.GetOutput(), reset_locators=False) 1017 self.pipeline = utils.OperationNode( 1018 "resample_data_from", 1019 comment=f"{source.__class__.__name__}", 1020 parents=[self, source], 1021 ) 1022 return self
Resample point and cell data from another dataset. The output has the same structure but its point data have the resampled values from target.
Use tol
to set the tolerance used to compute whether
a point in the source is in a cell of the current object.
Points without resampled values, and their cells, are marked as blank.
If the data is categorical, then the resulting data will be determined
by a nearest neighbor interpolation scheme.
Example:
from vedo import *
m1 = Mesh(dataurl+'bunny.obj')#.add_gaussian_noise(0.1)
pts = m1.vertices
ces = m1.cell_centers
m1.pointdata["xvalues"] = np.power(pts[:,0], 3)
m1.celldata["yvalues"] = np.power(ces[:,1], 3)
m2 = Mesh(dataurl+'bunny.obj')
m2.resample_data_from(m1)
# print(m2.pointdata["xvalues"])
show(m1, m2 , N=2, axes=1)
1024 def interpolate_data_from( 1025 self, 1026 source, 1027 radius=None, 1028 n=None, 1029 kernel="shepard", 1030 exclude=("Normals",), 1031 on="points", 1032 null_strategy=1, 1033 null_value=0, 1034 ) -> Self: 1035 """ 1036 Interpolate over source to port its data onto the current object using various kernels. 1037 1038 If n (number of closest points to use) is set then radius value is ignored. 1039 1040 Check out also: 1041 `probe()` which in many cases can be faster. 1042 1043 Arguments: 1044 kernel : (str) 1045 available kernels are [shepard, gaussian, linear] 1046 null_strategy : (int) 1047 specify a strategy to use when encountering a "null" point 1048 during the interpolation process. Null points occur when the local neighborhood 1049 (of nearby points to interpolate from) is empty. 1050 1051 - Case 0: an output array is created that marks points 1052 as being valid (=1) or null (invalid =0), and the null_value is set as well 1053 - Case 1: the output data value(s) are set to the provided null_value 1054 - Case 2: simply use the closest point to perform the interpolation. 1055 null_value : (float) 1056 see above. 1057 1058 Examples: 1059 - [interpolate_scalar1.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar1.py) 1060 - [interpolate_scalar3.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar3.py) 1061 - [interpolate_scalar4.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/interpolate_scalar4.py) 1062 - [image_probe.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/image_probe.py) 1063 1064 ![](https://vedo.embl.es/images/advanced/interpolateMeshArray.png) 1065 """ 1066 if radius is None and not n: 1067 vedo.logger.error("in interpolate_data_from(): please set either radius or n") 1068 raise RuntimeError 1069 1070 if on == "points": 1071 points = source.dataset 1072 elif on == "cells": 1073 c2p = vtki.new("CellDataToPointData") 1074 c2p.SetInputData(source.dataset) 1075 c2p.Update() 1076 points = c2p.GetOutput() 1077 else: 1078 vedo.logger.error("in interpolate_data_from(), on must be on points or cells") 1079 raise RuntimeError() 1080 1081 locator = vtki.new("PointLocator") 1082 locator.SetDataSet(points) 1083 locator.BuildLocator() 1084 1085 if kernel.lower() == "shepard": 1086 kern = vtki.new("ShepardKernel") 1087 kern.SetPowerParameter(2) 1088 elif kernel.lower() == "gaussian": 1089 kern = vtki.new("GaussianKernel") 1090 kern.SetSharpness(2) 1091 elif kernel.lower() == "linear": 1092 kern = vtki.new("LinearKernel") 1093 else: 1094 vedo.logger.error("available kernels are: [shepard, gaussian, linear]") 1095 raise RuntimeError() 1096 1097 if n: 1098 kern.SetNumberOfPoints(n) 1099 kern.SetKernelFootprintToNClosest() 1100 else: 1101 kern.SetRadius(radius) 1102 kern.SetKernelFootprintToRadius() 1103 1104 interpolator = vtki.new("PointInterpolator") 1105 interpolator.SetInputData(self.dataset) 1106 interpolator.SetSourceData(points) 1107 interpolator.SetKernel(kern) 1108 interpolator.SetLocator(locator) 1109 interpolator.PassFieldArraysOn() 1110 interpolator.SetNullPointsStrategy(null_strategy) 1111 interpolator.SetNullValue(null_value) 1112 interpolator.SetValidPointsMaskArrayName("ValidPointMask") 1113 for ex in exclude: 1114 interpolator.AddExcludedArray(ex) 1115 1116 # remove arrays that are already present in the source 1117 # this is because the interpolator will ignore them otherwise 1118 for i in range(self.dataset.GetPointData().GetNumberOfArrays()): 1119 name = self.dataset.GetPointData().GetArrayName(i) 1120 if name not in exclude: 1121 self.dataset.GetPointData().RemoveArray(name) 1122 1123 interpolator.Update() 1124 1125 if on == "cells": 1126 p2c = vtki.new("PointDataToCellData") 1127 p2c.SetInputData(interpolator.GetOutput()) 1128 p2c.Update() 1129 cpoly = p2c.GetOutput() 1130 else: 1131 cpoly = interpolator.GetOutput() 1132 1133 self._update(cpoly, reset_locators=False) 1134 1135 self.pipeline = utils.OperationNode("interpolate_data_from", parents=[self, source]) 1136 return self
Interpolate over source to port its data onto the current object using various kernels.
If n (number of closest points to use) is set then radius value is ignored.
Check out also:
probe()
which in many cases can be faster.
Arguments:
- kernel : (str) available kernels are [shepard, gaussian, linear]
null_strategy : (int) specify a strategy to use when encountering a "null" point during the interpolation process. Null points occur when the local neighborhood (of nearby points to interpolate from) is empty.
- Case 0: an output array is created that marks points as being valid (=1) or null (invalid =0), and the null_value is set as well
- Case 1: the output data value(s) are set to the provided null_value
- Case 2: simply use the closest point to perform the interpolation.
- null_value : (float) see above.
Examples:
1138 def add_ids(self) -> Self: 1139 """ 1140 Generate point and cell ids arrays. 1141 1142 Two new arrays are added to the mesh: `PointID` and `CellID`. 1143 """ 1144 ids = vtki.new("IdFilter") 1145 ids.SetInputData(self.dataset) 1146 ids.PointIdsOn() 1147 ids.CellIdsOn() 1148 ids.FieldDataOff() 1149 ids.SetPointIdsArrayName("PointID") 1150 ids.SetCellIdsArrayName("CellID") 1151 ids.Update() 1152 self._update(ids.GetOutput(), reset_locators=False) 1153 self.pipeline = utils.OperationNode("add_ids", parents=[self]) 1154 return self
Generate point and cell ids arrays.
Two new arrays are added to the mesh: PointID
and CellID
.
1156 def gradient(self, input_array=None, on="points", fast=False) -> np.ndarray: 1157 """ 1158 Compute and return the gradiend of the active scalar field as a numpy array. 1159 1160 Arguments: 1161 input_array : (str) 1162 array of the scalars to compute the gradient, 1163 if None the current active array is selected 1164 on : (str) 1165 compute either on 'points' or 'cells' data 1166 fast : (bool) 1167 if True, will use a less accurate algorithm 1168 that performs fewer derivative calculations (and is therefore faster). 1169 1170 Examples: 1171 - [isolines.py](https://github.com/marcomusy/vedo/tree/master/examples/advanced/isolines.py) 1172 1173 ![](https://user-images.githubusercontent.com/32848391/72433087-f00a8780-3798-11ea-9778-991f0abeca70.png) 1174 """ 1175 gra = vtki.new("GradientFilter") 1176 if on.startswith("p"): 1177 varr = self.dataset.GetPointData() 1178 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1179 elif on.startswith("c"): 1180 varr = self.dataset.GetCellData() 1181 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1182 else: 1183 vedo.logger.error(f"in gradient: unknown option {on}") 1184 raise RuntimeError 1185 1186 if input_array is None: 1187 if varr.GetScalars(): 1188 input_array = varr.GetScalars().GetName() 1189 else: 1190 vedo.logger.error(f"in gradient: no scalars found for {on}") 1191 raise RuntimeError 1192 1193 gra.SetInputData(self.dataset) 1194 gra.SetInputScalars(tp, input_array) 1195 gra.SetResultArrayName("Gradient") 1196 gra.SetFasterApproximation(fast) 1197 gra.ComputeDivergenceOff() 1198 gra.ComputeVorticityOff() 1199 gra.ComputeGradientOn() 1200 gra.Update() 1201 # self._update(gra.GetOutput(), reset_locators=False) 1202 if on.startswith("p"): 1203 gvecs = utils.vtk2numpy(gra.GetOutput().GetPointData().GetArray("Gradient")) 1204 else: 1205 gvecs = utils.vtk2numpy(gra.GetOutput().GetCellData().GetArray("Gradient")) 1206 return gvecs
Compute and return the gradiend of the active scalar field as a numpy array.
Arguments:
- input_array : (str) array of the scalars to compute the gradient, if None the current active array is selected
- on : (str) compute either on 'points' or 'cells' data
- fast : (bool) if True, will use a less accurate algorithm that performs fewer derivative calculations (and is therefore faster).
Examples:
1208 def divergence(self, array_name=None, on="points", fast=False) -> np.ndarray: 1209 """ 1210 Compute and return the divergence of a vector field as a numpy array. 1211 1212 Arguments: 1213 array_name : (str) 1214 name of the array of vectors to compute the divergence, 1215 if None the current active array is selected 1216 on : (str) 1217 compute either on 'points' or 'cells' data 1218 fast : (bool) 1219 if True, will use a less accurate algorithm 1220 that performs fewer derivative calculations (and is therefore faster). 1221 """ 1222 div = vtki.new("GradientFilter") 1223 if on.startswith("p"): 1224 varr = self.dataset.GetPointData() 1225 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1226 elif on.startswith("c"): 1227 varr = self.dataset.GetCellData() 1228 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1229 else: 1230 vedo.logger.error(f"in divergence(): unknown option {on}") 1231 raise RuntimeError 1232 1233 if array_name is None: 1234 if varr.GetVectors(): 1235 array_name = varr.GetVectors().GetName() 1236 else: 1237 vedo.logger.error(f"in divergence(): no vectors found for {on}") 1238 raise RuntimeError 1239 1240 div.SetInputData(self.dataset) 1241 div.SetInputScalars(tp, array_name) 1242 div.ComputeDivergenceOn() 1243 div.ComputeGradientOff() 1244 div.ComputeVorticityOff() 1245 div.SetDivergenceArrayName("Divergence") 1246 div.SetFasterApproximation(fast) 1247 div.Update() 1248 # self._update(div.GetOutput(), reset_locators=False) 1249 if on.startswith("p"): 1250 dvecs = utils.vtk2numpy(div.GetOutput().GetPointData().GetArray("Divergence")) 1251 else: 1252 dvecs = utils.vtk2numpy(div.GetOutput().GetCellData().GetArray("Divergence")) 1253 return dvecs
Compute and return the divergence of a vector field as a numpy array.
Arguments:
- array_name : (str) name of the array of vectors to compute the divergence, if None the current active array is selected
- on : (str) compute either on 'points' or 'cells' data
- fast : (bool) if True, will use a less accurate algorithm that performs fewer derivative calculations (and is therefore faster).
1255 def vorticity(self, array_name=None, on="points", fast=False) -> np.ndarray: 1256 """ 1257 Compute and return the vorticity of a vector field as a numpy array. 1258 1259 Arguments: 1260 array_name : (str) 1261 name of the array to compute the vorticity, 1262 if None the current active array is selected 1263 on : (str) 1264 compute either on 'points' or 'cells' data 1265 fast : (bool) 1266 if True, will use a less accurate algorithm 1267 that performs fewer derivative calculations (and is therefore faster). 1268 """ 1269 vort = vtki.new("GradientFilter") 1270 if on.startswith("p"): 1271 varr = self.dataset.GetPointData() 1272 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_POINTS 1273 elif on.startswith("c"): 1274 varr = self.dataset.GetCellData() 1275 tp = vtki.vtkDataObject.FIELD_ASSOCIATION_CELLS 1276 else: 1277 vedo.logger.error(f"in vorticity(): unknown option {on}") 1278 raise RuntimeError 1279 1280 if array_name is None: 1281 if varr.GetVectors(): 1282 array_name = varr.GetVectors().GetName() 1283 else: 1284 vedo.logger.error(f"in vorticity(): no vectors found for {on}") 1285 raise RuntimeError 1286 1287 vort.SetInputData(self.dataset) 1288 vort.SetInputScalars(tp, array_name) 1289 vort.ComputeDivergenceOff() 1290 vort.ComputeGradientOff() 1291 vort.ComputeVorticityOn() 1292 vort.SetVorticityArrayName("Vorticity") 1293 vort.SetFasterApproximation(fast) 1294 vort.Update() 1295 if on.startswith("p"): 1296 vvecs = utils.vtk2numpy(vort.GetOutput().GetPointData().GetArray("Vorticity")) 1297 else: 1298 vvecs = utils.vtk2numpy(vort.GetOutput().GetCellData().GetArray("Vorticity")) 1299 return vvecs
Compute and return the vorticity of a vector field as a numpy array.
Arguments:
- array_name : (str) name of the array to compute the vorticity, if None the current active array is selected
- on : (str) compute either on 'points' or 'cells' data
- fast : (bool) if True, will use a less accurate algorithm that performs fewer derivative calculations (and is therefore faster).
1301 def probe( 1302 self, 1303 source, 1304 categorical=False, 1305 snap=False, 1306 tol=0, 1307 ) -> Self: 1308 """ 1309 Takes a data set and probes its scalars at the specified points in space. 1310 1311 Note that a mask is also output with valid/invalid points which can be accessed 1312 with `mesh.pointdata['ValidPointMask']`. 1313 1314 Arguments: 1315 source : any dataset 1316 the data set to probe. 1317 categorical : bool 1318 control whether the source pointdata is to be treated as categorical. 1319 snap : bool 1320 snap to the cell with the closest point if no cell was found 1321 tol : float 1322 the tolerance to use when performing the probe. 1323 1324 Check out also: 1325 `interpolate_data_from()` and `tovolume()` 1326 1327 Examples: 1328 - [probe_points.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/probe_points.py) 1329 1330 ![](https://vedo.embl.es/images/volumetric/probePoints.png) 1331 """ 1332 probe_filter = vtki.new("ProbeFilter") 1333 probe_filter.SetSourceData(source.dataset) 1334 probe_filter.SetInputData(self.dataset) 1335 probe_filter.PassCellArraysOn() 1336 probe_filter.PassFieldArraysOn() 1337 probe_filter.PassPointArraysOn() 1338 probe_filter.SetCategoricalData(categorical) 1339 probe_filter.ComputeToleranceOff() 1340 if tol: 1341 probe_filter.ComputeToleranceOn() 1342 probe_filter.SetTolerance(tol) 1343 probe_filter.SetSnapToCellWithClosestPoint(snap) 1344 probe_filter.Update() 1345 self._update(probe_filter.GetOutput(), reset_locators=False) 1346 self.pipeline = utils.OperationNode("probe", parents=[self, source]) 1347 self.pointdata.rename("vtkValidPointMask", "ValidPointMask") 1348 return self
Takes a data set and probes its scalars at the specified points in space.
Note that a mask is also output with valid/invalid points which can be accessed
with mesh.pointdata['ValidPointMask']
.
Arguments:
- source : any dataset the data set to probe.
- categorical : bool control whether the source pointdata is to be treated as categorical.
- snap : bool snap to the cell with the closest point if no cell was found
- tol : float the tolerance to use when performing the probe.
Check out also:
interpolate_data_from()
andtovolume()
Examples:
1350 def compute_cell_size(self) -> Self: 1351 """ 1352 Add to this object a cell data array 1353 containing the area, volume and edge length 1354 of the cells (when applicable to the object type). 1355 1356 Array names are: `Area`, `Volume`, `Length`. 1357 """ 1358 csf = vtki.new("CellSizeFilter") 1359 csf.SetInputData(self.dataset) 1360 csf.SetComputeArea(1) 1361 csf.SetComputeVolume(1) 1362 csf.SetComputeLength(1) 1363 csf.SetComputeVertexCount(0) 1364 csf.SetAreaArrayName("Area") 1365 csf.SetVolumeArrayName("Volume") 1366 csf.SetLengthArrayName("Length") 1367 csf.Update() 1368 self._update(csf.GetOutput(), reset_locators=False) 1369 return self
Add to this object a cell data array containing the area, volume and edge length of the cells (when applicable to the object type).
Array names are: Area
, Volume
, Length
.
1371 def generate_random_data(self) -> Self: 1372 """Fill a dataset with random attributes""" 1373 gen = vtki.new("RandomAttributeGenerator") 1374 gen.SetInputData(self.dataset) 1375 gen.GenerateAllDataOn() 1376 gen.SetDataTypeToFloat() 1377 gen.GeneratePointNormalsOff() 1378 gen.GeneratePointTensorsOn() 1379 gen.GenerateCellScalarsOn() 1380 gen.Update() 1381 self._update(gen.GetOutput(), reset_locators=False) 1382 self.pipeline = utils.OperationNode("generate_random_data", parents=[self]) 1383 return self
Fill a dataset with random attributes
1385 def integrate_data(self) -> dict: 1386 """ 1387 Integrate point and cell data arrays while computing length, 1388 area or volume of the domain. It works for 1D, 2D or 3D cells. 1389 1390 For volumetric datasets, this filter ignores all but 3D cells. 1391 It will not compute the volume contained in a closed surface. 1392 1393 Returns a dictionary with keys: `pointdata`, `celldata`, `metadata`, 1394 which contain the integration result for the corresponding attributes. 1395 1396 Examples: 1397 ```python 1398 from vedo import * 1399 surf = Sphere(res=100) 1400 surf.pointdata['scalars'] = np.ones(surf.npoints) 1401 data = surf.integrate_data() 1402 print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi) 1403 ``` 1404 1405 ```python 1406 from vedo import * 1407 1408 xcoords1 = np.arange(0, 2.2, 0.2) 1409 xcoords2 = sqrt(np.arange(0, 4.2, 0.2)) 1410 1411 ycoords = np.arange(0, 1.2, 0.2) 1412 1413 surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2) 1414 surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2) 1415 1416 surf1.pointdata['scalars'] = surf1.vertices[:,2] 1417 surf2.pointdata['scalars'] = surf2.vertices[:,2] 1418 1419 data1 = surf1.integrate_data() 1420 data2 = surf2.integrate_data() 1421 1422 print(data1['pointdata']['scalars'], 1423 "is equal to", 1424 data2['pointdata']['scalars'], 1425 "even if the grids are different!", 1426 "(= the volume under the surface)" 1427 ) 1428 show(surf1, surf2, N=2, axes=1).close() 1429 ``` 1430 """ 1431 vinteg = vtki.new("IntegrateAttributes") 1432 vinteg.SetInputData(self.dataset) 1433 vinteg.Update() 1434 ugrid = vedo.UnstructuredGrid(vinteg.GetOutput()) 1435 data = dict( 1436 pointdata=ugrid.pointdata.todict(), 1437 celldata=ugrid.celldata.todict(), 1438 metadata=ugrid.metadata.todict(), 1439 ) 1440 return data
Integrate point and cell data arrays while computing length, area or volume of the domain. It works for 1D, 2D or 3D cells.
For volumetric datasets, this filter ignores all but 3D cells. It will not compute the volume contained in a closed surface.
Returns a dictionary with keys: pointdata
, celldata
, metadata
,
which contain the integration result for the corresponding attributes.
Examples:
from vedo import * surf = Sphere(res=100) surf.pointdata['scalars'] = np.ones(surf.npoints) data = surf.integrate_data() print(data['pointdata']['scalars'], "is equal to 4pi", 4*np.pi)
from vedo import * xcoords1 = np.arange(0, 2.2, 0.2) xcoords2 = sqrt(np.arange(0, 4.2, 0.2)) ycoords = np.arange(0, 1.2, 0.2) surf1 = Grid(s=(xcoords1, ycoords)).rotate_y(-45).lw(2) surf2 = Grid(s=(xcoords2, ycoords)).rotate_y(-45).lw(2) surf1.pointdata['scalars'] = surf1.vertices[:,2] surf2.pointdata['scalars'] = surf2.vertices[:,2] data1 = surf1.integrate_data() data2 = surf2.integrate_data() print(data1['pointdata']['scalars'], "is equal to", data2['pointdata']['scalars'], "even if the grids are different!", "(= the volume under the surface)" ) show(surf1, surf2, N=2, axes=1).close()
1442 def write(self, filename, binary=True) -> None: 1443 """Write object to file.""" 1444 out = vedo.file_io.write(self, filename, binary) 1445 out.pipeline = utils.OperationNode( 1446 "write", parents=[self], comment=filename[:15], shape="folder", c="#8a817c" 1447 )
Write object to file.
1449 def tomesh(self, bounds=(), shrink=0) -> "vedo.Mesh": 1450 """ 1451 Extract boundary geometry from dataset (or convert data to polygonal type). 1452 1453 Two new arrays are added to the mesh: `OriginalCellIds` and `OriginalPointIds` 1454 to keep track of the original mesh elements. 1455 1456 Arguments: 1457 bounds : (list) 1458 specify a sub-region to extract 1459 shrink : (float) 1460 shrink the cells to a fraction of their original size 1461 """ 1462 geo = vtki.new("GeometryFilter") 1463 1464 if shrink: 1465 sf = vtki.new("ShrinkFilter") 1466 sf.SetInputData(self.dataset) 1467 sf.SetShrinkFactor(shrink) 1468 sf.Update() 1469 geo.SetInputData(sf.GetOutput()) 1470 else: 1471 geo.SetInputData(self.dataset) 1472 1473 geo.SetPassThroughCellIds(1) 1474 geo.SetPassThroughPointIds(1) 1475 geo.SetOriginalCellIdsName("OriginalCellIds") 1476 geo.SetOriginalPointIdsName("OriginalPointIds") 1477 geo.SetNonlinearSubdivisionLevel(1) 1478 # geo.MergingOff() # crashes on StructuredGrids 1479 if bounds: 1480 geo.SetExtent(bounds) 1481 geo.ExtentClippingOn() 1482 geo.Update() 1483 msh = vedo.mesh.Mesh(geo.GetOutput()) 1484 msh.pipeline = utils.OperationNode("tomesh", parents=[self], c="#9e2a2b") 1485 return msh
Extract boundary geometry from dataset (or convert data to polygonal type).
Two new arrays are added to the mesh: OriginalCellIds
and OriginalPointIds
to keep track of the original mesh elements.
Arguments:
- bounds : (list) specify a sub-region to extract
- shrink : (float) shrink the cells to a fraction of their original size
1487 def signed_distance(self, dims=(20, 20, 20), bounds=None, invert=False, max_radius=None) -> "vedo.Volume": 1488 """ 1489 Compute the `Volume` object whose voxels contains the signed distance from 1490 the object. The calling object must have "Normals" defined. 1491 1492 Arguments: 1493 bounds : (list, actor) 1494 bounding box sizes 1495 dims : (list) 1496 dimensions (nr. of voxels) of the output volume. 1497 invert : (bool) 1498 flip the sign 1499 max_radius : (float) 1500 specify how far out to propagate distance calculation 1501 1502 Examples: 1503 - [distance2mesh.py](https://github.com/marcomusy/vedo/blob/master/examples/basic/distance2mesh.py) 1504 1505 ![](https://vedo.embl.es/images/basic/distance2mesh.png) 1506 """ 1507 if bounds is None: 1508 bounds = self.bounds() 1509 if max_radius is None: 1510 max_radius = self.diagonal_size() / 2 1511 dist = vtki.new("SignedDistance") 1512 dist.SetInputData(self.dataset) 1513 dist.SetRadius(max_radius) 1514 dist.SetBounds(bounds) 1515 dist.SetDimensions(dims) 1516 dist.Update() 1517 img = dist.GetOutput() 1518 if invert: 1519 mat = vtki.new("ImageMathematics") 1520 mat.SetInput1Data(img) 1521 mat.SetOperationToMultiplyByK() 1522 mat.SetConstantK(-1) 1523 mat.Update() 1524 img = mat.GetOutput() 1525 1526 vol = vedo.Volume(img) 1527 vol.name = "SignedDistanceVolume" 1528 vol.pipeline = utils.OperationNode( 1529 "signed_distance", 1530 parents=[self], 1531 comment=f"dims={tuple(vol.dimensions())}", 1532 c="#e9c46a:#0096c7", 1533 ) 1534 return vol
Compute the Volume
object whose voxels contains the signed distance from
the object. The calling object must have "Normals" defined.
Arguments:
- bounds : (list, actor) bounding box sizes
- dims : (list) dimensions (nr. of voxels) of the output volume.
- invert : (bool) flip the sign
- max_radius : (float) specify how far out to propagate distance calculation
Examples:
1536 def unsigned_distance( 1537 self, dims=(25,25,25), bounds=(), max_radius=0, cap_value=0) -> "vedo.Volume": 1538 """ 1539 Compute the `Volume` object whose voxels contains the unsigned distance. 1540 """ 1541 dist = vtki.new("UnsignedDistance") 1542 dist.SetInputData(self.dataset) 1543 dist.SetDimensions(dims) 1544 1545 if len(bounds) == 6: 1546 dist.SetBounds(bounds) 1547 # elif bounds == "auto": 1548 # dist.AdjustBoundsOn() 1549 else: 1550 dist.SetBounds(self.bounds()) 1551 if not max_radius: 1552 max_radius = self.diagonal_size() / 10 1553 dist.SetRadius(max_radius) 1554 1555 if self.point_locator: 1556 dist.SetLocator(self.point_locator) 1557 1558 if cap_value is not None: 1559 dist.CappingOn() 1560 dist.SetCapValue(cap_value) 1561 dist.SetOutputScalarTypeToFloat() 1562 dist.Update() 1563 vol = vedo.Volume(dist.GetOutput()) 1564 vol.name = "UnsignedDistanceVolume" 1565 vol.pipeline = utils.OperationNode( 1566 "unsigned_distance", parents=[self], c="#e9c46a:#0096c7") 1567 return vol
Compute the Volume
object whose voxels contains the unsigned distance.
1569 def smooth_data(self, 1570 niter=10, relaxation_factor=0.1, strategy=0, mask=None, 1571 mode="distance2", 1572 exclude=("Normals", "TextureCoordinates"), 1573 ) -> Self: 1574 """ 1575 Smooth point attribute data using distance weighted Laplacian kernel. 1576 The effect is to blur regions of high variation and emphasize low variation regions. 1577 1578 A central concept of this method is the point smoothing stencil. 1579 A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge. 1580 To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using 1581 the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1). 1582 This averaging process is repeated until the maximum number of iterations is reached. 1583 1584 The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion. 1585 The a(i+1) attributes are determined from the a(i) attributes as follows: 1586 a(i+1) = (1-R)*a(i) + R*sum(w(j)*a(j)) 1587 1588 Convergence occurs faster for larger relaxation factors. 1589 Typically a small number of iterations is required for large relaxation factors, 1590 and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be 1591 adequate (i.e., just a distance weighted average is computed). 1592 1593 Warning: 1594 Certain data attributes cannot be correctly interpolated. For example, surface normals are expected to be |n|=1; 1595 after attribute smoothing this constraint is likely to be violated. 1596 Other vectors and tensors may suffer from similar issues. 1597 In such a situation, specify `exclude=...` which will not be smoothed (and simply passed through to the output). 1598 Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance 1599 between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil). 1600 The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, the weights are simply 1/n, 1601 where n is the number of connected points in the stencil. 1602 The smoothing process reduces high frequency information in the data attributes. 1603 With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) important details may be lost, 1604 and the attributes will move towards an "average" value. 1605 While this filter will process any dataset type, if the input data is a 3D image volume, it's likely much faster to use 1606 an image-based algorithm to perform data smoothing. 1607 To determine boundary points in polygonal data, edges used by only one cell are considered boundary 1608 (and hence the associated points defining the edge). 1609 1610 Arguments: 1611 niter : (int) 1612 number of iterations 1613 relaxation_factor : (float) 1614 relaxation factor controlling the amount of Laplacian smoothing applied 1615 strategy : (int) 1616 strategy to use for Laplacian smoothing 1617 - 0: use all points, all point data attributes are smoothed 1618 - 1: smooth all point attribute data except those on the boundary 1619 - 2: only point data connected to a boundary point are smoothed 1620 mask : (str, np.ndarray) 1621 array to be used as a mask (ignore then the strategy keyword) 1622 mode : (str) 1623 smoothing mode, either "distance2", "distance" or "average" 1624 - distance**2 weighted (i.e., 1/r**2 interpolation weights) 1625 - distance weighted (i.e., 1/r) approach; 1626 - simple average of all connected points in the stencil 1627 exclude : (list) 1628 list of arrays to be excluded from smoothing 1629 """ 1630 try: 1631 saf = vtki.new("AttributeSmoothingFilter") 1632 except: 1633 vedo.logger.error("smooth_data() only avaialble in VTK>=9.3.0") 1634 return self 1635 saf.SetInputData(self.dataset) 1636 saf.SetRelaxationFactor(relaxation_factor) 1637 saf.SetNumberOfIterations(niter) 1638 1639 for ex in exclude: 1640 saf.AddExcludedArray(ex) 1641 1642 if mode == "distance": 1643 saf.SetWeightsTypeToDistance() 1644 elif mode == "distance2": 1645 saf.SetWeightsTypeToDistance2() 1646 elif mode == "average": 1647 saf.SetWeightsTypeToAverage() 1648 else: 1649 vedo.logger.error(f"smooth_data(): unknown mode {mode}") 1650 raise TypeError 1651 1652 saf.SetSmoothingStrategy(strategy) 1653 if mask is not None: 1654 saf.SetSmoothingStrategyToSmoothingMask() 1655 if isinstance(mask, str): 1656 mask_ = self.dataset.GetPointData().GetArray(mask) 1657 if not mask_: 1658 vedo.logger.error(f"smooth_data(): mask array {mask} not found") 1659 return self 1660 mask_array = vtki.vtkUnsignedCharArray() 1661 mask_array.ShallowCopy(mask_) 1662 mask_array.SetName(mask_.GetName()) 1663 else: 1664 mask_array = utils.numpy2vtk(mask, dtype=np.uint8) 1665 saf.SetSmoothingMask(mask_array) 1666 1667 saf.Update() 1668 1669 self._update(saf.GetOutput()) 1670 self.pipeline = utils.OperationNode( 1671 "smooth_data", comment=f"strategy {strategy}", parents=[self], c="#9e2a2b" 1672 ) 1673 return self
Smooth point attribute data using distance weighted Laplacian kernel. The effect is to blur regions of high variation and emphasize low variation regions.
A central concept of this method is the point smoothing stencil. A smoothing stencil for a point p(i) is the list of points p(j) which connect to p(i) via an edge. To smooth the attributes of point p(i), p(i)'s attribute data a(i) are iteratively averaged using the distance weighted average of the attributes of a(j) (the weights w[j] sum to 1). This averaging process is repeated until the maximum number of iterations is reached.
The relaxation factor (R) is also important as the smoothing process proceeds in an iterative fashion. The a(i+1) attributes are determined from the a(i) attributes as follows: a(i+1) = (1-R)a(i) + Rsum(w(j)*a(j))
Convergence occurs faster for larger relaxation factors. Typically a small number of iterations is required for large relaxation factors, and in cases where only points adjacent to the boundary are being smoothed, a single iteration with R=1 may be adequate (i.e., just a distance weighted average is computed).
Warning:
Certain data attributes cannot be correctly interpolated. For example, surface normals are expected to be |n|=1; after attribute smoothing this constraint is likely to be violated. Other vectors and tensors may suffer from similar issues. In such a situation, specify
exclude=...
which will not be smoothed (and simply passed through to the output). Distance weighting function is based on averaging, 1/r, or 1/r**2 weights, where r is the distance between the point to be smoothed and an edge connected neighbor (defined by the smoothing stencil). The weights are normalized so that sum(w(i))==1. When smoothing based on averaging, the weights are simply 1/n, where n is the number of connected points in the stencil. The smoothing process reduces high frequency information in the data attributes. With excessive smoothing (large numbers of iterations, and/or a large relaxation factor) important details may be lost, and the attributes will move towards an "average" value. While this filter will process any dataset type, if the input data is a 3D image volume, it's likely much faster to use an image-based algorithm to perform data smoothing. To determine boundary points in polygonal data, edges used by only one cell are considered boundary (and hence the associated points defining the edge).
Arguments:
- niter : (int) number of iterations
- relaxation_factor : (float) relaxation factor controlling the amount of Laplacian smoothing applied
- strategy : (int) strategy to use for Laplacian smoothing - 0: use all points, all point data attributes are smoothed - 1: smooth all point attribute data except those on the boundary - 2: only point data connected to a boundary point are smoothed
- mask : (str, np.ndarray) array to be used as a mask (ignore then the strategy keyword)
- mode : (str) smoothing mode, either "distance2", "distance" or "average" - distance2 weighted (i.e., 1/r2 interpolation weights) - distance weighted (i.e., 1/r) approach; - simple average of all connected points in the stencil
- exclude : (list) list of arrays to be excluded from smoothing
1675 def compute_streamlines( 1676 self, 1677 seeds: Any, 1678 integrator="rk4", 1679 direction="forward", 1680 initial_step_size=None, 1681 max_propagation=None, 1682 max_steps=10000, 1683 step_length=0, 1684 surface_constrained=False, 1685 compute_vorticity=False, 1686 ) -> Union["vedo.Lines", None]: 1687 """ 1688 Integrate a vector field to generate streamlines. 1689 1690 Arguments: 1691 seeds : (Mesh, Points, list) 1692 starting points of the streamlines 1693 integrator : (str) 1694 type of integration method to be used: 1695 - "rk2" (Runge-Kutta 2) 1696 - "rk4" (Runge-Kutta 4) 1697 - "rk45" (Runge-Kutta 45) 1698 direction : (str) 1699 direction of integration, either "forward", "backward" or "both" 1700 initial_step_size : (float) 1701 initial step size used for line integration 1702 max_propagation : (float) 1703 maximum length of a streamline expressed in absolute units 1704 max_steps : (int) 1705 maximum number of steps for a streamline 1706 step_length : (float) 1707 maximum length of a step expressed in absolute units 1708 surface_constrained : (bool) 1709 whether to stop integrating when the streamline leaves the surface 1710 compute_vorticity : (bool) 1711 whether to compute the vorticity at each streamline point 1712 """ 1713 b = self.dataset.GetBounds() 1714 size = (b[5]-b[4] + b[3]-b[2] + b[1]-b[0]) / 3 1715 if initial_step_size is None: 1716 initial_step_size = size / 1000.0 1717 1718 if max_propagation is None: 1719 max_propagation = size * 2 1720 1721 if utils.is_sequence(seeds): 1722 seeds = vedo.Points(seeds) 1723 1724 sti = vtki.new("StreamTracer") 1725 sti.SetSourceData(seeds.dataset) 1726 if isinstance(self, vedo.RectilinearGrid): 1727 sti.SetInputData(vedo.UnstructuredGrid(self.dataset).dataset) 1728 else: 1729 sti.SetInputDataObject(self.dataset) 1730 1731 sti.SetInitialIntegrationStep(initial_step_size) 1732 sti.SetComputeVorticity(compute_vorticity) 1733 sti.SetMaximumNumberOfSteps(max_steps) 1734 sti.SetMaximumPropagation(max_propagation) 1735 sti.SetSurfaceStreamlines(surface_constrained) 1736 if step_length: 1737 sti.SetMaximumIntegrationStep(step_length) 1738 1739 if "for" in direction: 1740 sti.SetIntegrationDirectionToForward() 1741 elif "back" in direction: 1742 sti.SetIntegrationDirectionToBackward() 1743 elif "both" in direction: 1744 sti.SetIntegrationDirectionToBoth() 1745 else: 1746 vedo.logger.error(f"in compute_streamlines(), unknown direction {direction}") 1747 return None 1748 1749 if integrator == "rk2": 1750 sti.SetIntegratorTypeToRungeKutta2() 1751 elif integrator == "rk4": 1752 sti.SetIntegratorTypeToRungeKutta4() 1753 elif integrator == "rk45": 1754 sti.SetIntegratorTypeToRungeKutta45() 1755 else: 1756 vedo.logger.error(f"in compute_streamlines(), unknown integrator {integrator}") 1757 return None 1758 1759 sti.Update() 1760 1761 stlines = vedo.shapes.Lines(sti.GetOutput(), lw=4) 1762 stlines.name = "StreamLines" 1763 self.pipeline = utils.OperationNode( 1764 "compute_streamlines", comment=f"{integrator}", parents=[self, seeds], c="#9e2a2b" 1765 ) 1766 return stlines
Integrate a vector field to generate streamlines.
Arguments:
- seeds : (Mesh, Points, list) starting points of the streamlines
- integrator : (str) type of integration method to be used: - "rk2" (Runge-Kutta 2) - "rk4" (Runge-Kutta 4) - "rk45" (Runge-Kutta 45)
- direction : (str) direction of integration, either "forward", "backward" or "both"
- initial_step_size : (float) initial step size used for line integration
- max_propagation : (float) maximum length of a streamline expressed in absolute units
- max_steps : (int) maximum number of steps for a streamline
- step_length : (float) maximum length of a step expressed in absolute units
- surface_constrained : (bool) whether to stop integrating when the streamline leaves the surface
- compute_vorticity : (bool) whether to compute the vorticity at each streamline point
1769class PointAlgorithms(CommonAlgorithms): 1770 """Methods for point clouds.""" 1771 1772 def apply_transform(self, LT: Any, deep_copy=True) -> Self: 1773 """ 1774 Apply a linear or non-linear transformation to the mesh polygonal data. 1775 1776 Example: 1777 ```python 1778 from vedo import Cube, show, settings 1779 settings.use_parallel_projection = True 1780 c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5) 1781 T = c1.transform # rotate by 5 degrees, place at (2,1) 1782 c2 = Cube().c('red4').wireframe().lw(10).lighting('off') 1783 c2.apply_transform(T) 1784 show(c1, c2, "The 2 cubes should overlap!", axes=1).close() 1785 ``` 1786 1787 ![](https://vedo.embl.es/images/feats/apply_transform.png) 1788 """ 1789 if self.dataset.GetNumberOfPoints() == 0: 1790 return self 1791 1792 if isinstance(LT, LinearTransform): 1793 LT_is_linear = True 1794 tr = LT.T 1795 if LT.is_identity(): 1796 return self 1797 1798 elif isinstance(LT, (vtki.vtkMatrix4x4, vtki.vtkLinearTransform)) or utils.is_sequence(LT): 1799 LT_is_linear = True 1800 LT = LinearTransform(LT) 1801 tr = LT.T 1802 if LT.is_identity(): 1803 return self 1804 1805 elif isinstance(LT, NonLinearTransform): 1806 LT_is_linear = False 1807 tr = LT.T 1808 self.transform = LT # reset 1809 1810 elif isinstance(LT, vtki.vtkThinPlateSplineTransform): 1811 LT_is_linear = False 1812 tr = LT 1813 self.transform = NonLinearTransform(LT) # reset 1814 1815 else: 1816 vedo.logger.error(f"apply_transform(), unknown input type:\n{LT}") 1817 return self 1818 1819 ################ 1820 if LT_is_linear: 1821 try: 1822 # self.transform might still not be linear 1823 self.transform.concatenate(LT) 1824 except AttributeError: 1825 # in that case reset it 1826 self.transform = LinearTransform() 1827 1828 ################ 1829 if isinstance(self.dataset, vtki.vtkPolyData): 1830 tp = vtki.new("TransformPolyDataFilter") 1831 elif isinstance(self.dataset, vtki.vtkUnstructuredGrid): 1832 tp = vtki.new("TransformFilter") 1833 tp.TransformAllInputVectorsOn() 1834 # elif isinstance(self.dataset, vtki.vtkImageData): 1835 # tp = vtki.new("ImageReslice") 1836 # tp.SetInterpolationModeToCubic() 1837 # tp.SetResliceTransform(tr) 1838 else: 1839 vedo.logger.error(f"apply_transform(), unknown input type: {[self.dataset]}") 1840 return self 1841 1842 tp.SetTransform(tr) 1843 tp.SetInputData(self.dataset) 1844 tp.Update() 1845 out = tp.GetOutput() 1846 1847 if deep_copy: 1848 self.dataset.DeepCopy(out) 1849 else: 1850 self.dataset.ShallowCopy(out) 1851 1852 # reset the locators 1853 self.point_locator = None 1854 self.cell_locator = None 1855 self.line_locator = None 1856 return self 1857 1858 def apply_transform_from_actor(self) -> LinearTransform: 1859 """ 1860 Apply the current transformation of the actor to the data. 1861 Useful when manually moving an actor (eg. when pressing "a"). 1862 Returns the `LinearTransform` object. 1863 1864 Note that this method is automatically called when the window is closed, 1865 or the interactor style is changed. 1866 """ 1867 M = self.actor.GetMatrix() 1868 self.apply_transform(M) 1869 iden = vtki.vtkMatrix4x4() 1870 self.actor.PokeMatrix(iden) 1871 return LinearTransform(M) 1872 1873 def pos(self, x=None, y=None, z=None) -> Self: 1874 """Set/Get object position.""" 1875 if x is None: # get functionality 1876 return self.transform.position 1877 1878 if z is None and y is None: # assume x is of the form (x,y,z) 1879 if len(x) == 3: 1880 x, y, z = x 1881 else: 1882 x, y = x 1883 z = 0 1884 elif z is None: # assume x,y is of the form x, y 1885 z = 0 1886 1887 q = self.transform.position 1888 delta = [x, y, z] - q 1889 if delta[0] == delta[1] == delta[2] == 0: 1890 return self 1891 LT = LinearTransform().translate(delta) 1892 return self.apply_transform(LT) 1893 1894 def shift(self, dx=0, dy=0, dz=0) -> Self: 1895 """Add a vector to the current object position.""" 1896 if utils.is_sequence(dx): 1897 dx, dy, dz = utils.make3d(dx) 1898 if dx == dy == dz == 0: 1899 return self 1900 LT = LinearTransform().translate([dx, dy, dz]) 1901 return self.apply_transform(LT) 1902 1903 def x(self, val=None) -> Self: 1904 """Set/Get object position along x axis.""" 1905 p = self.transform.position 1906 if val is None: 1907 return p[0] 1908 self.pos(val, p[1], p[2]) 1909 return self 1910 1911 def y(self, val=None)-> Self: 1912 """Set/Get object position along y axis.""" 1913 p = self.transform.position 1914 if val is None: 1915 return p[1] 1916 self.pos(p[0], val, p[2]) 1917 return self 1918 1919 def z(self, val=None) -> Self: 1920 """Set/Get object position along z axis.""" 1921 p = self.transform.position 1922 if val is None: 1923 return p[2] 1924 self.pos(p[0], p[1], val) 1925 return self 1926 1927 def rotate(self, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self: 1928 """ 1929 Rotate around an arbitrary `axis` passing through `point`. 1930 1931 Example: 1932 ```python 1933 from vedo import * 1934 c1 = Cube() 1935 c2 = c1.clone().c('violet').alpha(0.5) # copy of c1 1936 v = vector(0.2,1,0) 1937 p = vector(1,0,0) # axis passes through this point 1938 c2.rotate(90, axis=v, point=p) 1939 l = Line(-v+p, v+p).lw(3).c('red') 1940 show(c1, l, c2, axes=1).close() 1941 ``` 1942 1943 ![](https://vedo.embl.es/images/feats/rotate_axis.png) 1944 """ 1945 LT = LinearTransform() 1946 LT.rotate(angle, axis, point, rad) 1947 return self.apply_transform(LT) 1948 1949 def rotate_x(self, angle: float, rad=False, around=None) -> Self: 1950 """ 1951 Rotate around x-axis. If angle is in radians set `rad=True`. 1952 1953 Use `around` to define a pivoting point. 1954 """ 1955 if angle == 0: 1956 return self 1957 LT = LinearTransform().rotate_x(angle, rad, around) 1958 return self.apply_transform(LT) 1959 1960 def rotate_y(self, angle: float, rad=False, around=None) -> Self: 1961 """ 1962 Rotate around y-axis. If angle is in radians set `rad=True`. 1963 1964 Use `around` to define a pivoting point. 1965 """ 1966 if angle == 0: 1967 return self 1968 LT = LinearTransform().rotate_y(angle, rad, around) 1969 return self.apply_transform(LT) 1970 1971 def rotate_z(self, angle: float, rad=False, around=None) -> Self: 1972 """ 1973 Rotate around z-axis. If angle is in radians set `rad=True`. 1974 1975 Use `around` to define a pivoting point. 1976 """ 1977 if angle == 0: 1978 return self 1979 LT = LinearTransform().rotate_z(angle, rad, around) 1980 return self.apply_transform(LT) 1981 1982 def reorient(self, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self: 1983 """ 1984 Reorient the object to point to a new direction from an initial one. 1985 If `initaxis` is None, the object will be assumed in its "default" orientation. 1986 If `xyplane` is True, the object will be rotated to lie on the xy plane. 1987 1988 Use `rotation` to first rotate the object around its `initaxis`. 1989 """ 1990 q = self.transform.position 1991 LT = LinearTransform() 1992 LT.reorient(initaxis, newaxis, q, rotation, rad, xyplane) 1993 return self.apply_transform(LT) 1994 1995 def scale(self, s=None, reset=False, origin=True) -> Union[Self, np.array]: 1996 """ 1997 Set/get object's scaling factor. 1998 1999 Arguments: 2000 s : (list, float) 2001 scaling factor(s). 2002 reset : (bool) 2003 if True previous scaling factors are ignored. 2004 origin : (bool) 2005 if True scaling is applied with respect to object's position, 2006 otherwise is applied respect to (0,0,0). 2007 2008 Note: 2009 use `s=(sx,sy,sz)` to scale differently in the three coordinates. 2010 """ 2011 if s is None: 2012 return np.array(self.transform.T.GetScale()) 2013 2014 if not utils.is_sequence(s): 2015 s = [s, s, s] 2016 2017 LT = LinearTransform() 2018 if reset: 2019 old_s = np.array(self.transform.T.GetScale()) 2020 LT.scale(s / old_s) 2021 else: 2022 if origin is True: 2023 LT.scale(s, origin=self.transform.position) 2024 elif origin is False: 2025 LT.scale(s, origin=False) 2026 else: 2027 LT.scale(s, origin=origin) 2028 2029 return self.apply_transform(LT)
Methods for point clouds.
1772 def apply_transform(self, LT: Any, deep_copy=True) -> Self: 1773 """ 1774 Apply a linear or non-linear transformation to the mesh polygonal data. 1775 1776 Example: 1777 ```python 1778 from vedo import Cube, show, settings 1779 settings.use_parallel_projection = True 1780 c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5) 1781 T = c1.transform # rotate by 5 degrees, place at (2,1) 1782 c2 = Cube().c('red4').wireframe().lw(10).lighting('off') 1783 c2.apply_transform(T) 1784 show(c1, c2, "The 2 cubes should overlap!", axes=1).close() 1785 ``` 1786 1787 ![](https://vedo.embl.es/images/feats/apply_transform.png) 1788 """ 1789 if self.dataset.GetNumberOfPoints() == 0: 1790 return self 1791 1792 if isinstance(LT, LinearTransform): 1793 LT_is_linear = True 1794 tr = LT.T 1795 if LT.is_identity(): 1796 return self 1797 1798 elif isinstance(LT, (vtki.vtkMatrix4x4, vtki.vtkLinearTransform)) or utils.is_sequence(LT): 1799 LT_is_linear = True 1800 LT = LinearTransform(LT) 1801 tr = LT.T 1802 if LT.is_identity(): 1803 return self 1804 1805 elif isinstance(LT, NonLinearTransform): 1806 LT_is_linear = False 1807 tr = LT.T 1808 self.transform = LT # reset 1809 1810 elif isinstance(LT, vtki.vtkThinPlateSplineTransform): 1811 LT_is_linear = False 1812 tr = LT 1813 self.transform = NonLinearTransform(LT) # reset 1814 1815 else: 1816 vedo.logger.error(f"apply_transform(), unknown input type:\n{LT}") 1817 return self 1818 1819 ################ 1820 if LT_is_linear: 1821 try: 1822 # self.transform might still not be linear 1823 self.transform.concatenate(LT) 1824 except AttributeError: 1825 # in that case reset it 1826 self.transform = LinearTransform() 1827 1828 ################ 1829 if isinstance(self.dataset, vtki.vtkPolyData): 1830 tp = vtki.new("TransformPolyDataFilter") 1831 elif isinstance(self.dataset, vtki.vtkUnstructuredGrid): 1832 tp = vtki.new("TransformFilter") 1833 tp.TransformAllInputVectorsOn() 1834 # elif isinstance(self.dataset, vtki.vtkImageData): 1835 # tp = vtki.new("ImageReslice") 1836 # tp.SetInterpolationModeToCubic() 1837 # tp.SetResliceTransform(tr) 1838 else: 1839 vedo.logger.error(f"apply_transform(), unknown input type: {[self.dataset]}") 1840 return self 1841 1842 tp.SetTransform(tr) 1843 tp.SetInputData(self.dataset) 1844 tp.Update() 1845 out = tp.GetOutput() 1846 1847 if deep_copy: 1848 self.dataset.DeepCopy(out) 1849 else: 1850 self.dataset.ShallowCopy(out) 1851 1852 # reset the locators 1853 self.point_locator = None 1854 self.cell_locator = None 1855 self.line_locator = None 1856 return self
Apply a linear or non-linear transformation to the mesh polygonal data.
Example:
from vedo import Cube, show, settings
settings.use_parallel_projection = True
c1 = Cube().rotate_z(25).pos(2,1).mirror().alpha(0.5)
T = c1.transform # rotate by 5 degrees, place at (2,1)
c2 = Cube().c('red4').wireframe().lw(10).lighting('off')
c2.apply_transform(T)
show(c1, c2, "The 2 cubes should overlap!", axes=1).close()
1858 def apply_transform_from_actor(self) -> LinearTransform: 1859 """ 1860 Apply the current transformation of the actor to the data. 1861 Useful when manually moving an actor (eg. when pressing "a"). 1862 Returns the `LinearTransform` object. 1863 1864 Note that this method is automatically called when the window is closed, 1865 or the interactor style is changed. 1866 """ 1867 M = self.actor.GetMatrix() 1868 self.apply_transform(M) 1869 iden = vtki.vtkMatrix4x4() 1870 self.actor.PokeMatrix(iden) 1871 return LinearTransform(M)
Apply the current transformation of the actor to the data.
Useful when manually moving an actor (eg. when pressing "a").
Returns the LinearTransform
object.
Note that this method is automatically called when the window is closed, or the interactor style is changed.
1873 def pos(self, x=None, y=None, z=None) -> Self: 1874 """Set/Get object position.""" 1875 if x is None: # get functionality 1876 return self.transform.position 1877 1878 if z is None and y is None: # assume x is of the form (x,y,z) 1879 if len(x) == 3: 1880 x, y, z = x 1881 else: 1882 x, y = x 1883 z = 0 1884 elif z is None: # assume x,y is of the form x, y 1885 z = 0 1886 1887 q = self.transform.position 1888 delta = [x, y, z] - q 1889 if delta[0] == delta[1] == delta[2] == 0: 1890 return self 1891 LT = LinearTransform().translate(delta) 1892 return self.apply_transform(LT)
Set/Get object position.
1894 def shift(self, dx=0, dy=0, dz=0) -> Self: 1895 """Add a vector to the current object position.""" 1896 if utils.is_sequence(dx): 1897 dx, dy, dz = utils.make3d(dx) 1898 if dx == dy == dz == 0: 1899 return self 1900 LT = LinearTransform().translate([dx, dy, dz]) 1901 return self.apply_transform(LT)
Add a vector to the current object position.
1903 def x(self, val=None) -> Self: 1904 """Set/Get object position along x axis.""" 1905 p = self.transform.position 1906 if val is None: 1907 return p[0] 1908 self.pos(val, p[1], p[2]) 1909 return self
Set/Get object position along x axis.
1911 def y(self, val=None)-> Self: 1912 """Set/Get object position along y axis.""" 1913 p = self.transform.position 1914 if val is None: 1915 return p[1] 1916 self.pos(p[0], val, p[2]) 1917 return self
Set/Get object position along y axis.
1919 def z(self, val=None) -> Self: 1920 """Set/Get object position along z axis.""" 1921 p = self.transform.position 1922 if val is None: 1923 return p[2] 1924 self.pos(p[0], p[1], val) 1925 return self
Set/Get object position along z axis.
1927 def rotate(self, angle: float, axis=(1, 0, 0), point=(0, 0, 0), rad=False) -> Self: 1928 """ 1929 Rotate around an arbitrary `axis` passing through `point`. 1930 1931 Example: 1932 ```python 1933 from vedo import * 1934 c1 = Cube() 1935 c2 = c1.clone().c('violet').alpha(0.5) # copy of c1 1936 v = vector(0.2,1,0) 1937 p = vector(1,0,0) # axis passes through this point 1938 c2.rotate(90, axis=v, point=p) 1939 l = Line(-v+p, v+p).lw(3).c('red') 1940 show(c1, l, c2, axes=1).close() 1941 ``` 1942 1943 ![](https://vedo.embl.es/images/feats/rotate_axis.png) 1944 """ 1945 LT = LinearTransform() 1946 LT.rotate(angle, axis, point, rad) 1947 return self.apply_transform(LT)
Rotate around an arbitrary axis
passing through point
.
Example:
from vedo import *
c1 = Cube()
c2 = c1.clone().c('violet').alpha(0.5) # copy of c1
v = vector(0.2,1,0)
p = vector(1,0,0) # axis passes through this point
c2.rotate(90, axis=v, point=p)
l = Line(-v+p, v+p).lw(3).c('red')
show(c1, l, c2, axes=1).close()
1949 def rotate_x(self, angle: float, rad=False, around=None) -> Self: 1950 """ 1951 Rotate around x-axis. If angle is in radians set `rad=True`. 1952 1953 Use `around` to define a pivoting point. 1954 """ 1955 if angle == 0: 1956 return self 1957 LT = LinearTransform().rotate_x(angle, rad, around) 1958 return self.apply_transform(LT)
Rotate around x-axis. If angle is in radians set rad=True
.
Use around
to define a pivoting point.
1960 def rotate_y(self, angle: float, rad=False, around=None) -> Self: 1961 """ 1962 Rotate around y-axis. If angle is in radians set `rad=True`. 1963 1964 Use `around` to define a pivoting point. 1965 """ 1966 if angle == 0: 1967 return self 1968 LT = LinearTransform().rotate_y(angle, rad, around) 1969 return self.apply_transform(LT)
Rotate around y-axis. If angle is in radians set rad=True
.
Use around
to define a pivoting point.
1971 def rotate_z(self, angle: float, rad=False, around=None) -> Self: 1972 """ 1973 Rotate around z-axis. If angle is in radians set `rad=True`. 1974 1975 Use `around` to define a pivoting point. 1976 """ 1977 if angle == 0: 1978 return self 1979 LT = LinearTransform().rotate_z(angle, rad, around) 1980 return self.apply_transform(LT)
Rotate around z-axis. If angle is in radians set rad=True
.
Use around
to define a pivoting point.
1982 def reorient(self, initaxis, newaxis, rotation=0, rad=False, xyplane=False) -> Self: 1983 """ 1984 Reorient the object to point to a new direction from an initial one. 1985 If `initaxis` is None, the object will be assumed in its "default" orientation. 1986 If `xyplane` is True, the object will be rotated to lie on the xy plane. 1987 1988 Use `rotation` to first rotate the object around its `initaxis`. 1989 """ 1990 q = self.transform.position 1991 LT = LinearTransform() 1992 LT.reorient(initaxis, newaxis, q, rotation, rad, xyplane) 1993 return self.apply_transform(LT)
Reorient the object to point to a new direction from an initial one.
If initaxis
is None, the object will be assumed in its "default" orientation.
If xyplane
is True, the object will be rotated to lie on the xy plane.
Use rotation
to first rotate the object around its initaxis
.
1995 def scale(self, s=None, reset=False, origin=True) -> Union[Self, np.array]: 1996 """ 1997 Set/get object's scaling factor. 1998 1999 Arguments: 2000 s : (list, float) 2001 scaling factor(s). 2002 reset : (bool) 2003 if True previous scaling factors are ignored. 2004 origin : (bool) 2005 if True scaling is applied with respect to object's position, 2006 otherwise is applied respect to (0,0,0). 2007 2008 Note: 2009 use `s=(sx,sy,sz)` to scale differently in the three coordinates. 2010 """ 2011 if s is None: 2012 return np.array(self.transform.T.GetScale()) 2013 2014 if not utils.is_sequence(s): 2015 s = [s, s, s] 2016 2017 LT = LinearTransform() 2018 if reset: 2019 old_s = np.array(self.transform.T.GetScale()) 2020 LT.scale(s / old_s) 2021 else: 2022 if origin is True: 2023 LT.scale(s, origin=self.transform.position) 2024 elif origin is False: 2025 LT.scale(s, origin=False) 2026 else: 2027 LT.scale(s, origin=origin) 2028 2029 return self.apply_transform(LT)
Set/get object's scaling factor.
Arguments:
- s : (list, float) scaling factor(s).
- reset : (bool) if True previous scaling factors are ignored.
- origin : (bool) if True scaling is applied with respect to object's position, otherwise is applied respect to (0,0,0).
Note:
use
s=(sx,sy,sz)
to scale differently in the three coordinates.
Inherited Members
- CommonAlgorithms
- pointdata
- celldata
- metadata
- memory_address
- memory_size
- modified
- box
- update_dataset
- bounds
- xbounds
- ybounds
- zbounds
- diagonal_size
- average_size
- center_of_mass
- copy_data_from
- inputdata
- npoints
- nvertices
- ncells
- points
- cell_centers
- lines
- lines_as_flat_array
- mark_boundaries
- find_cells_in_bounds
- find_cells_along_line
- find_cells_along_plane
- keep_cell_types
- map_cells_to_points
- vertices
- coordinates
- cells_as_flat_array
- cells
- cell_edge_neighbors
- map_points_to_cells
- resample_data_from
- interpolate_data_from
- add_ids
- gradient
- divergence
- vorticity
- probe
- compute_cell_size
- generate_random_data
- integrate_data
- write
- tomesh
- signed_distance
- unsigned_distance
- smooth_data
- compute_streamlines
2033class VolumeAlgorithms(CommonAlgorithms): 2034 """Methods for Volume objects.""" 2035 2036 def bounds(self) -> np.ndarray: 2037 """ 2038 Get the object bounds. 2039 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 2040 """ 2041 # OVERRIDE CommonAlgorithms.bounds() which is too slow 2042 return np.array(self.dataset.GetBounds()) 2043 2044 def isosurface(self, value=None, flying_edges=False) -> "vedo.mesh.Mesh": 2045 """ 2046 Return an `Mesh` isosurface extracted from the `Volume` object. 2047 2048 Set `value` as single float or list of values to draw the isosurface(s). 2049 Use flying_edges for faster results (but sometimes can interfere with `smooth()`). 2050 2051 Examples: 2052 - [isosurfaces1.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces1.py) 2053 2054 ![](https://vedo.embl.es/images/volumetric/isosurfaces.png) 2055 """ 2056 scrange = self.dataset.GetScalarRange() 2057 2058 if flying_edges: 2059 cf = vtki.new("FlyingEdges3D") 2060 cf.InterpolateAttributesOn() 2061 else: 2062 cf = vtki.new("ContourFilter") 2063 cf.UseScalarTreeOn() 2064 2065 cf.SetInputData(self.dataset) 2066 cf.ComputeNormalsOn() 2067 2068 if utils.is_sequence(value): 2069 cf.SetNumberOfContours(len(value)) 2070 for i, t in enumerate(value): 2071 cf.SetValue(i, t) 2072 else: 2073 if value is None: 2074 value = (2 * scrange[0] + scrange[1]) / 3.0 2075 # print("automatic isosurface value =", value) 2076 cf.SetValue(0, value) 2077 2078 cf.Update() 2079 poly = cf.GetOutput() 2080 2081 out = vedo.mesh.Mesh(poly, c=None).phong() 2082 out.mapper.SetScalarRange(scrange[0], scrange[1]) 2083 2084 out.pipeline = utils.OperationNode( 2085 "isosurface", 2086 parents=[self], 2087 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2088 c="#4cc9f0:#e9c46a", 2089 ) 2090 return out 2091 2092 def isosurface_discrete( 2093 self, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0, 2094 ) -> "vedo.mesh.Mesh": 2095 """ 2096 Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded, 2097 3D version of the multiple objects/labels Surface Nets algorithm. 2098 The input is a 3D image (i.e., volume) where each voxel is labeled 2099 (integer labels are preferred to real values), and the output data is a polygonal mesh separating 2100 labeled regions / objects. 2101 (Note that on output each region [corresponding to a different segmented object] will share 2102 points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them). 2103 2104 Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating 2105 the labels on either side of the polygons composing the output Mesh. 2106 (This can be used for advanced operations like extracting shared/contacting boundaries between two objects. 2107 The name of this celldata array is "BoundaryLabels"). 2108 2109 Arguments: 2110 value : (float, list) 2111 single value or list of values to draw the isosurface(s). 2112 background_label : (float) 2113 this value specifies the label value to use when referencing the background 2114 region outside of any of the specified regions. 2115 boundaries : (bool, list) 2116 if True, the output will only contain the boundary surface. Internal surfaces will be removed. 2117 If a list of integers is provided, only the boundaries between the specified labels will be extracted. 2118 use_quads : (bool) 2119 if True, the output polygons will be quads. If False, the output polygons will be triangles. 2120 nsmooth : (int) 2121 number of iterations of smoothing (0 means no smoothing). 2122 2123 Examples: 2124 - [isosurfaces2.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces2.py) 2125 """ 2126 logger = vtki.get_class("Logger") 2127 logger.SetStderrVerbosity(logger.VERBOSITY_ERROR) 2128 2129 snets = vtki.new("SurfaceNets3D") 2130 snets.SetInputData(self.dataset) 2131 2132 if nsmooth: 2133 snets.SmoothingOn() 2134 snets.AutomaticSmoothingConstraintsOn() 2135 snets.GetSmoother().SetNumberOfIterations(nsmooth) 2136 # snets.GetSmoother().SetRelaxationFactor(relaxation_factor) 2137 # snets.GetSmoother().SetConstraintDistance(constraint_distance) 2138 else: 2139 snets.SmoothingOff() 2140 2141 if internal_boundaries is False: 2142 snets.SetOutputStyleToBoundary() 2143 elif internal_boundaries is True: 2144 snets.SetOutputStyleToDefault() 2145 elif utils.is_sequence(internal_boundaries): 2146 snets.SetOutputStyleToSelected() 2147 snets.InitializeSelectedLabelsList() 2148 for val in internal_boundaries: 2149 snets.AddSelectedLabel(val) 2150 else: 2151 vedo.logger.error("isosurface_discrete(): unknown boundaries option") 2152 2153 n = len(values) 2154 snets.SetNumberOfContours(n) 2155 snets.SetNumberOfLabels(n) 2156 2157 if background_label is not None: 2158 snets.SetBackgroundLabel(background_label) 2159 2160 for i, val in enumerate(values): 2161 snets.SetValue(i, val) 2162 2163 if use_quads: 2164 snets.SetOutputMeshTypeToQuads() 2165 else: 2166 snets.SetOutputMeshTypeToTriangles() 2167 snets.Update() 2168 2169 out = vedo.mesh.Mesh(snets.GetOutput()) 2170 out.pipeline = utils.OperationNode( 2171 "isosurface_discrete", 2172 parents=[self], 2173 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2174 c="#4cc9f0:#e9c46a", 2175 ) 2176 2177 logger.SetStderrVerbosity(logger.VERBOSITY_INFO) 2178 return out 2179 2180 2181 def legosurface( 2182 self, 2183 vmin=None, 2184 vmax=None, 2185 invert=False, 2186 boundary=True, 2187 array_name="input_scalars", 2188 ) -> "vedo.mesh.Mesh": 2189 """ 2190 Represent an object - typically a `Volume` - as lego blocks (voxels). 2191 By default colors correspond to the volume's scalar. 2192 Returns an `Mesh` object. 2193 2194 Arguments: 2195 vmin : (float) 2196 the lower threshold, voxels below this value are not shown. 2197 vmax : (float) 2198 the upper threshold, voxels above this value are not shown. 2199 boundary : (bool) 2200 controls whether to include cells that are partially inside 2201 array_name : (int, str) 2202 name or index of the scalar array to be considered 2203 2204 Examples: 2205 - [legosurface.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/legosurface.py) 2206 2207 ![](https://vedo.embl.es/images/volumetric/56820682-da40e500-684c-11e9-8ea3-91cbcba24b3a.png) 2208 """ 2209 imp_dataset = vtki.new("ImplicitDataSet") 2210 imp_dataset.SetDataSet(self.dataset) 2211 window = vtki.new("ImplicitWindowFunction") 2212 window.SetImplicitFunction(imp_dataset) 2213 2214 srng = list(self.dataset.GetScalarRange()) 2215 if vmin is not None: 2216 srng[0] = vmin 2217 if vmax is not None: 2218 srng[1] = vmax 2219 if not boundary: 2220 tol = 0.00001 * (srng[1] - srng[0]) 2221 srng[0] -= tol 2222 srng[1] += tol 2223 window.SetWindowRange(srng) 2224 # print("legosurface window range:", srng) 2225 2226 extract = vtki.new("ExtractGeometry") 2227 extract.SetInputData(self.dataset) 2228 extract.SetImplicitFunction(window) 2229 extract.SetExtractInside(invert) 2230 extract.SetExtractBoundaryCells(boundary) 2231 extract.Update() 2232 2233 gf = vtki.new("GeometryFilter") 2234 gf.SetInputData(extract.GetOutput()) 2235 gf.Update() 2236 2237 m = vedo.mesh.Mesh(gf.GetOutput()).lw(0.1).flat() 2238 m.map_points_to_cells() 2239 m.celldata.select(array_name) 2240 2241 m.pipeline = utils.OperationNode( 2242 "legosurface", 2243 parents=[self], 2244 comment=f"array: {array_name}", 2245 c="#4cc9f0:#e9c46a", 2246 ) 2247 return m 2248 2249 def tomesh(self, fill=True, shrink=1.0) -> "vedo.mesh.Mesh": 2250 """ 2251 Build a polygonal Mesh from the current object. 2252 2253 If `fill=True`, the interior faces of all the cells are created. 2254 (setting a `shrink` value slightly smaller than the default 1.0 2255 can avoid flickering due to internal adjacent faces). 2256 2257 If `fill=False`, only the boundary faces will be generated. 2258 """ 2259 gf = vtki.new("GeometryFilter") 2260 if fill: 2261 sf = vtki.new("ShrinkFilter") 2262 sf.SetInputData(self.dataset) 2263 sf.SetShrinkFactor(shrink) 2264 sf.Update() 2265 gf.SetInputData(sf.GetOutput()) 2266 gf.Update() 2267 poly = gf.GetOutput() 2268 if shrink == 1.0: 2269 clean_poly = vtki.new("CleanPolyData") 2270 clean_poly.PointMergingOn() 2271 clean_poly.ConvertLinesToPointsOn() 2272 clean_poly.ConvertPolysToLinesOn() 2273 clean_poly.ConvertStripsToPolysOn() 2274 clean_poly.SetInputData(poly) 2275 clean_poly.Update() 2276 poly = clean_poly.GetOutput() 2277 else: 2278 gf.SetInputData(self.dataset) 2279 gf.Update() 2280 poly = gf.GetOutput() 2281 2282 msh = vedo.mesh.Mesh(poly).flat() 2283 msh.scalarbar = self.scalarbar 2284 lut = utils.ctf2lut(self) 2285 if lut: 2286 msh.mapper.SetLookupTable(lut) 2287 2288 msh.pipeline = utils.OperationNode( 2289 "tomesh", parents=[self], comment=f"fill={fill}", c="#9e2a2b:#e9c46a" 2290 ) 2291 return msh
Methods for Volume objects.
2036 def bounds(self) -> np.ndarray: 2037 """ 2038 Get the object bounds. 2039 Returns a list in format `[xmin,xmax, ymin,ymax, zmin,zmax]`. 2040 """ 2041 # OVERRIDE CommonAlgorithms.bounds() which is too slow 2042 return np.array(self.dataset.GetBounds())
Get the object bounds.
Returns a list in format [xmin,xmax, ymin,ymax, zmin,zmax]
.
2044 def isosurface(self, value=None, flying_edges=False) -> "vedo.mesh.Mesh": 2045 """ 2046 Return an `Mesh` isosurface extracted from the `Volume` object. 2047 2048 Set `value` as single float or list of values to draw the isosurface(s). 2049 Use flying_edges for faster results (but sometimes can interfere with `smooth()`). 2050 2051 Examples: 2052 - [isosurfaces1.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces1.py) 2053 2054 ![](https://vedo.embl.es/images/volumetric/isosurfaces.png) 2055 """ 2056 scrange = self.dataset.GetScalarRange() 2057 2058 if flying_edges: 2059 cf = vtki.new("FlyingEdges3D") 2060 cf.InterpolateAttributesOn() 2061 else: 2062 cf = vtki.new("ContourFilter") 2063 cf.UseScalarTreeOn() 2064 2065 cf.SetInputData(self.dataset) 2066 cf.ComputeNormalsOn() 2067 2068 if utils.is_sequence(value): 2069 cf.SetNumberOfContours(len(value)) 2070 for i, t in enumerate(value): 2071 cf.SetValue(i, t) 2072 else: 2073 if value is None: 2074 value = (2 * scrange[0] + scrange[1]) / 3.0 2075 # print("automatic isosurface value =", value) 2076 cf.SetValue(0, value) 2077 2078 cf.Update() 2079 poly = cf.GetOutput() 2080 2081 out = vedo.mesh.Mesh(poly, c=None).phong() 2082 out.mapper.SetScalarRange(scrange[0], scrange[1]) 2083 2084 out.pipeline = utils.OperationNode( 2085 "isosurface", 2086 parents=[self], 2087 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2088 c="#4cc9f0:#e9c46a", 2089 ) 2090 return out
Return an Mesh
isosurface extracted from the Volume
object.
Set value
as single float or list of values to draw the isosurface(s).
Use flying_edges for faster results (but sometimes can interfere with smooth()
).
Examples:
2092 def isosurface_discrete( 2093 self, values, background_label=None, internal_boundaries=True, use_quads=False, nsmooth=0, 2094 ) -> "vedo.mesh.Mesh": 2095 """ 2096 Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded, 2097 3D version of the multiple objects/labels Surface Nets algorithm. 2098 The input is a 3D image (i.e., volume) where each voxel is labeled 2099 (integer labels are preferred to real values), and the output data is a polygonal mesh separating 2100 labeled regions / objects. 2101 (Note that on output each region [corresponding to a different segmented object] will share 2102 points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them). 2103 2104 Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating 2105 the labels on either side of the polygons composing the output Mesh. 2106 (This can be used for advanced operations like extracting shared/contacting boundaries between two objects. 2107 The name of this celldata array is "BoundaryLabels"). 2108 2109 Arguments: 2110 value : (float, list) 2111 single value or list of values to draw the isosurface(s). 2112 background_label : (float) 2113 this value specifies the label value to use when referencing the background 2114 region outside of any of the specified regions. 2115 boundaries : (bool, list) 2116 if True, the output will only contain the boundary surface. Internal surfaces will be removed. 2117 If a list of integers is provided, only the boundaries between the specified labels will be extracted. 2118 use_quads : (bool) 2119 if True, the output polygons will be quads. If False, the output polygons will be triangles. 2120 nsmooth : (int) 2121 number of iterations of smoothing (0 means no smoothing). 2122 2123 Examples: 2124 - [isosurfaces2.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/isosurfaces2.py) 2125 """ 2126 logger = vtki.get_class("Logger") 2127 logger.SetStderrVerbosity(logger.VERBOSITY_ERROR) 2128 2129 snets = vtki.new("SurfaceNets3D") 2130 snets.SetInputData(self.dataset) 2131 2132 if nsmooth: 2133 snets.SmoothingOn() 2134 snets.AutomaticSmoothingConstraintsOn() 2135 snets.GetSmoother().SetNumberOfIterations(nsmooth) 2136 # snets.GetSmoother().SetRelaxationFactor(relaxation_factor) 2137 # snets.GetSmoother().SetConstraintDistance(constraint_distance) 2138 else: 2139 snets.SmoothingOff() 2140 2141 if internal_boundaries is False: 2142 snets.SetOutputStyleToBoundary() 2143 elif internal_boundaries is True: 2144 snets.SetOutputStyleToDefault() 2145 elif utils.is_sequence(internal_boundaries): 2146 snets.SetOutputStyleToSelected() 2147 snets.InitializeSelectedLabelsList() 2148 for val in internal_boundaries: 2149 snets.AddSelectedLabel(val) 2150 else: 2151 vedo.logger.error("isosurface_discrete(): unknown boundaries option") 2152 2153 n = len(values) 2154 snets.SetNumberOfContours(n) 2155 snets.SetNumberOfLabels(n) 2156 2157 if background_label is not None: 2158 snets.SetBackgroundLabel(background_label) 2159 2160 for i, val in enumerate(values): 2161 snets.SetValue(i, val) 2162 2163 if use_quads: 2164 snets.SetOutputMeshTypeToQuads() 2165 else: 2166 snets.SetOutputMeshTypeToTriangles() 2167 snets.Update() 2168 2169 out = vedo.mesh.Mesh(snets.GetOutput()) 2170 out.pipeline = utils.OperationNode( 2171 "isosurface_discrete", 2172 parents=[self], 2173 comment=f"#pts {out.dataset.GetNumberOfPoints()}", 2174 c="#4cc9f0:#e9c46a", 2175 ) 2176 2177 logger.SetStderrVerbosity(logger.VERBOSITY_INFO) 2178 return out
Create boundary/isocontour surfaces from a label map (e.g., a segmented image) using a threaded, 3D version of the multiple objects/labels Surface Nets algorithm. The input is a 3D image (i.e., volume) where each voxel is labeled (integer labels are preferred to real values), and the output data is a polygonal mesh separating labeled regions / objects. (Note that on output each region [corresponding to a different segmented object] will share points/edges on a common boundary, i.e., two neighboring objects will share the boundary that separates them).
Besides output geometry defining the surface net, the filter outputs a two-component celldata array indicating the labels on either side of the polygons composing the output Mesh. (This can be used for advanced operations like extracting shared/contacting boundaries between two objects. The name of this celldata array is "BoundaryLabels").
Arguments:
- value : (float, list) single value or list of values to draw the isosurface(s).
- background_label : (float) this value specifies the label value to use when referencing the background region outside of any of the specified regions.
- boundaries : (bool, list) if True, the output will only contain the boundary surface. Internal surfaces will be removed. If a list of integers is provided, only the boundaries between the specified labels will be extracted.
- use_quads : (bool) if True, the output polygons will be quads. If False, the output polygons will be triangles.
- nsmooth : (int) number of iterations of smoothing (0 means no smoothing).
Examples:
2181 def legosurface( 2182 self, 2183 vmin=None, 2184 vmax=None, 2185 invert=False, 2186 boundary=True, 2187 array_name="input_scalars", 2188 ) -> "vedo.mesh.Mesh": 2189 """ 2190 Represent an object - typically a `Volume` - as lego blocks (voxels). 2191 By default colors correspond to the volume's scalar. 2192 Returns an `Mesh` object. 2193 2194 Arguments: 2195 vmin : (float) 2196 the lower threshold, voxels below this value are not shown. 2197 vmax : (float) 2198 the upper threshold, voxels above this value are not shown. 2199 boundary : (bool) 2200 controls whether to include cells that are partially inside 2201 array_name : (int, str) 2202 name or index of the scalar array to be considered 2203 2204 Examples: 2205 - [legosurface.py](https://github.com/marcomusy/vedo/tree/master/examples/volumetric/legosurface.py) 2206 2207 ![](https://vedo.embl.es/images/volumetric/56820682-da40e500-684c-11e9-8ea3-91cbcba24b3a.png) 2208 """ 2209 imp_dataset = vtki.new("ImplicitDataSet") 2210 imp_dataset.SetDataSet(self.dataset) 2211 window = vtki.new("ImplicitWindowFunction") 2212 window.SetImplicitFunction(imp_dataset) 2213 2214 srng = list(self.dataset.GetScalarRange()) 2215 if vmin is not None: 2216 srng[0] = vmin 2217 if vmax is not None: 2218 srng[1] = vmax 2219 if not boundary: 2220 tol = 0.00001 * (srng[1] - srng[0]) 2221 srng[0] -= tol 2222 srng[1] += tol 2223 window.SetWindowRange(srng) 2224 # print("legosurface window range:", srng) 2225 2226 extract = vtki.new("ExtractGeometry") 2227 extract.SetInputData(self.dataset) 2228 extract.SetImplicitFunction(window) 2229 extract.SetExtractInside(invert) 2230 extract.SetExtractBoundaryCells(boundary) 2231 extract.Update() 2232 2233 gf = vtki.new("GeometryFilter") 2234 gf.SetInputData(extract.GetOutput()) 2235 gf.Update() 2236 2237 m = vedo.mesh.Mesh(gf.GetOutput()).lw(0.1).flat() 2238 m.map_points_to_cells() 2239 m.celldata.select(array_name) 2240 2241 m.pipeline = utils.OperationNode( 2242 "legosurface", 2243 parents=[self], 2244 comment=f"array: {array_name}", 2245 c="#4cc9f0:#e9c46a", 2246 ) 2247 return m
Represent an object - typically a Volume
- as lego blocks (voxels).
By default colors correspond to the volume's scalar.
Returns an Mesh
object.
Arguments:
- vmin : (float) the lower threshold, voxels below this value are not shown.
- vmax : (float) the upper threshold, voxels above this value are not shown.
- boundary : (bool) controls whether to include cells that are partially inside
- array_name : (int, str) name or index of the scalar array to be considered
Examples:
2249 def tomesh(self, fill=True, shrink=1.0) -> "vedo.mesh.Mesh": 2250 """ 2251 Build a polygonal Mesh from the current object. 2252 2253 If `fill=True`, the interior faces of all the cells are created. 2254 (setting a `shrink` value slightly smaller than the default 1.0 2255 can avoid flickering due to internal adjacent faces). 2256 2257 If `fill=False`, only the boundary faces will be generated. 2258 """ 2259 gf = vtki.new("GeometryFilter") 2260 if fill: 2261 sf = vtki.new("ShrinkFilter") 2262 sf.SetInputData(self.dataset) 2263 sf.SetShrinkFactor(shrink) 2264 sf.Update() 2265 gf.SetInputData(sf.GetOutput()) 2266 gf.Update() 2267 poly = gf.GetOutput() 2268 if shrink == 1.0: 2269 clean_poly = vtki.new("CleanPolyData") 2270 clean_poly.PointMergingOn() 2271 clean_poly.ConvertLinesToPointsOn() 2272 clean_poly.ConvertPolysToLinesOn() 2273 clean_poly.ConvertStripsToPolysOn() 2274 clean_poly.SetInputData(poly) 2275 clean_poly.Update() 2276 poly = clean_poly.GetOutput() 2277 else: 2278 gf.SetInputData(self.dataset) 2279 gf.Update() 2280 poly = gf.GetOutput() 2281 2282 msh = vedo.mesh.Mesh(poly).flat() 2283 msh.scalarbar = self.scalarbar 2284 lut = utils.ctf2lut(self) 2285 if lut: 2286 msh.mapper.SetLookupTable(lut) 2287 2288 msh.pipeline = utils.OperationNode( 2289 "tomesh", parents=[self], comment=f"fill={fill}", c="#9e2a2b:#e9c46a" 2290 ) 2291 return msh
Build a polygonal Mesh from the current object.
If fill=True
, the interior faces of all the cells are created.
(setting a shrink
value slightly smaller than the default 1.0
can avoid flickering due to internal adjacent faces).
If fill=False
, only the boundary faces will be generated.
Inherited Members
- CommonAlgorithms
- pointdata
- celldata
- metadata
- memory_address
- memory_size
- modified
- box
- update_dataset
- xbounds
- ybounds
- zbounds
- diagonal_size
- average_size
- center_of_mass
- copy_data_from
- inputdata
- npoints
- nvertices
- ncells
- points
- cell_centers
- lines
- lines_as_flat_array
- mark_boundaries
- find_cells_in_bounds
- find_cells_along_line
- find_cells_along_plane
- keep_cell_types
- map_cells_to_points
- vertices
- coordinates
- cells_as_flat_array
- cells
- cell_edge_neighbors
- map_points_to_cells
- resample_data_from
- interpolate_data_from
- add_ids
- gradient
- divergence
- vorticity
- probe
- compute_cell_size
- generate_random_data
- integrate_data
- write
- signed_distance
- unsigned_distance
- smooth_data
- compute_streamlines