Coverage for /opt/hostedtoolcache/Python/3.11.10/x64/lib/python3.11/site-packages/hypervehicle/utilities.py: 20%
368 statements
« prev ^ index » next coverage.py v7.6.4, created at 2024-10-29 02:51 +0000
« prev ^ index » next coverage.py v7.6.4, created at 2024-10-29 02:51 +0000
1import os
2import sys
3import glob
4import numpy as np
5import pandas as pd
6from stl import mesh
7from tqdm import tqdm
8from art import tprint, art
9import xml.etree.ElementTree as ET
10from typing import Dict, List, Optional, TYPE_CHECKING
12if TYPE_CHECKING:
13 from .vehicle import Vehicle
16def surfce_to_stl(
17 parametric_surface,
18 triangles_per_edge_r: int,
19 triangles_per_edge_s: int,
20 si: float = 1.0,
21 sj: float = 1.0,
22 mirror_y: bool = False,
23 i_clustering_func: callable = None,
24 j_clustering_func: callable = None,
25 verbosity=0,
26) -> mesh.Mesh:
27 """
28 Function to convert parametric_surface generated using the Eilmer Geometry
29 Package into a stl mesh object.
31 Parameters
32 ----------
33 parametric_surface : Any
34 The parametric surface object.
36 si : float, optional
37 The clustering in the i-direction. The default is 1.0.
39 sj : float, optional
40 The clustering in the j-direction. The default is 1.0.
42 triangles_per_edge_r : int
43 The resolution for the stl object - x/i direction.
45 triangles_per_edge_s : int
46 The resolution for the stl object - y/j direction.
48 mirror_y : bool, optional
49 Create mirror image about x-z plane. The default is False.
51 i_clustering_func : callable, optional
52 A custom clustering function to apply in the i direction.
53 The default is None.
55 j_clustering_func : callable, optional
56 A custom clustering function to apply in the j direction.
57 The default is None.
59 Returns
60 ----------
61 stl_mesh : Mesh
62 The numpy-stl mesh.
63 """
64 ni = triangles_per_edge_r
65 nj = triangles_per_edge_s
67 if verbosity > 1:
68 print(f"Triangles per edge: (ni, nj) = ({ni}, {nj})")
70 # Create list of vertices
71 if i_clustering_func:
72 r_list = [i_clustering_func(i) for i in np.linspace(0, 1, ni + 1)]
73 else:
74 r_list = default_vertex_func(lb=0.0, ub=1.0, steps=ni + 1, spacing=si)
76 if j_clustering_func:
77 s_list = [j_clustering_func(i) for i in np.linspace(0, 1, ni + 1)]
78 else:
79 s_list = default_vertex_func(lb=0.0, ub=1.0, steps=nj + 1, spacing=sj)
81 y_mult: int = -1 if mirror_y else 1
83 if verbosity > 1:
84 print(f"r_list = {r_list}")
85 print(f"s_list = {s_list}")
87 # For stl generation we split each cell into 4x triangles with indece [0, 3]
88 # p01-------p11
89 # | \ 2 / |
90 # | \ / |
91 # | 3 pc 1 |
92 # | / \ |
93 # | / 0 \ |
94 # p00-------p10
96 N_triangles = 4 * (ni) * (nj)
97 stl_mesh = mesh.Mesh(np.zeros(N_triangles, dtype=mesh.Mesh.dtype))
99 # mesh.vectors contains a list defining three corners of each triangle.
100 t = 0
102 # For vertices along the x direction (i)
103 for i in range(ni):
104 # For vertices along the y direction (j)
105 for j in range(nj):
106 # Get corner points for current cell (quad)
107 pos00 = parametric_surface(r_list[i], s_list[j])
108 pos10 = parametric_surface(r_list[i + 1], s_list[j])
109 pos01 = parametric_surface(r_list[i], s_list[j + 1])
110 pos11 = parametric_surface(r_list[i + 1], s_list[j + 1])
112 # Get centre for current cell (quad)
113 pos_x = 0.25 * (pos00.x + pos10.x + pos01.x + pos11.x)
114 pos_y = 0.25 * (pos00.y + pos10.y + pos01.y + pos11.y)
115 pos_z = 0.25 * (pos00.z + pos10.z + pos01.z + pos11.z)
117 # Add triangle 0 [p00, p10, pc]
118 stl_mesh.vectors[t][0] = np.array([pos00.x, y_mult * pos00.y, pos00.z])
119 stl_mesh.vectors[t][1] = np.array([pos10.x, y_mult * pos10.y, pos10.z])
120 stl_mesh.vectors[t][2] = np.array([pos_x, y_mult * pos_y, pos_z])
121 t += 1
123 # Add triangle 1 [p10, p11, pc]
124 stl_mesh.vectors[t][0] = np.array([pos10.x, y_mult * pos10.y, pos10.z])
125 stl_mesh.vectors[t][1] = np.array([pos11.x, y_mult * pos11.y, pos11.z])
126 stl_mesh.vectors[t][2] = np.array([pos_x, y_mult * pos_y, pos_z])
127 t += 1
129 # Add triangle 2 [p11, p01, pc]
130 stl_mesh.vectors[t][0] = np.array([pos11.x, y_mult * pos11.y, pos11.z])
131 stl_mesh.vectors[t][1] = np.array([pos01.x, y_mult * pos01.y, pos01.z])
132 stl_mesh.vectors[t][2] = np.array([pos_x, y_mult * pos_y, pos_z])
133 t += 1
135 # Add triangle 3 [p01, p00, pc]
136 stl_mesh.vectors[t][0] = np.array([pos01.x, y_mult * pos01.y, pos01.z])
137 stl_mesh.vectors[t][1] = np.array([pos00.x, y_mult * pos00.y, pos00.z])
138 stl_mesh.vectors[t][2] = np.array([pos_x, y_mult * pos_y, pos_z])
139 t += 1
141 return stl_mesh
144def default_vertex_func(lb, ub, steps, spacing=1.0):
145 span = ub - lb
146 dx = 1.0 / (steps - 1)
147 return np.array([lb + (i * dx) ** spacing * span for i in range(steps)])
150def assess_inertial_properties(
151 vehicle: "Vehicle", component_densities: Dict[str, float]
152):
153 """
155 Parameters
156 ----------
157 vehicle : Vehicle
158 A hypervehicle Vehicle instance.
160 component_densities : Dict[str, float]
161 A dictionary containing the effective densities for each component.
162 Note that the keys of the dict must match the keys of
163 vehicle._named_components.
165 Returns
166 -------
167 vehicle_properties : dict
168 A dictionary containing the vehicle's mass, volume, location of
169 CoG and moment of inertia matrix.
171 component_properties : dict
172 A dictionary containing the same keys as vehicle_properties, but
173 the values are now dictionaries for each component of the vehicle.
174 """
175 # Check if vehicle has been generated
176 if not vehicle._generated:
177 vehicle.generate()
179 volumes = {}
180 areas = {}
181 masses = {}
182 cgs = {}
183 inertias = {}
184 total_mass = 0
185 total_volume = 0
186 total_area = 0
188 for name, component in vehicle._named_components.items():
189 # Estiamte inertial properties
190 volume, vmass, cog, inertia = component.mesh.get_mass_properties_with_density(
191 component_densities[name]
192 )
194 # Also calculate surface area
195 area = float(sum(component.mesh.areas))
197 # Save component result
198 volumes[name] = volume
199 areas[name] = area
200 masses[name] = vmass
201 cgs[name] = cog
202 inertias[name] = inertia
203 total_mass += vmass
204 total_volume += volume
205 total_area += area
207 # Composite centre of mass
208 composite_cog = 0
209 for component in vehicle._named_components:
210 m = masses[component]
211 composite_cog += m * cgs[component]
213 composite_cog *= 1 / total_mass
215 # Parallel axis theorem
216 shifted_inertias = {}
217 composite_inertia = 0
218 for component in vehicle._named_components:
219 m = masses[component]
220 r = cgs[component] - composite_cog
221 I_adj = inertias[component] + m * r**2
223 shifted_inertias[component] = I_adj
224 composite_inertia += I_adj
226 # Prepare output
227 vehicle_properties = {
228 "mass": total_mass,
229 "volume": total_volume,
230 "area": total_area,
231 "cog": composite_cog,
232 "moi": composite_inertia,
233 }
234 component_properties = {
235 "mass": masses,
236 "volume": volumes,
237 "area": areas,
238 "cog": cgs,
239 "moi": inertias,
240 }
242 return vehicle_properties, component_properties
245class SensitivityStudy:
246 """
247 Computes the geometric sensitivities using finite differencing.
248 """
250 def __init__(self, vehicle_constructor, verbosity: Optional[int] = 1):
251 """Vehicle geometry sensitivity constructor.
253 Parameters
254 ----------
255 vehicle_constructor : AbstractGenerator
256 The Vehicle instance constructor.
258 verbosity : int, optional
259 The code verbosity. The default is 1.
261 Returns
262 -------
263 VehicleSensitivity object.
264 """
265 self.vehicle_constructor = vehicle_constructor
266 self.verbosity = verbosity
268 # Parameter sensitivities
269 self.parameter_sensitivities = None
270 self.component_sensitivities = None
271 self.component_scalar_sensitivities = None
272 self.scalar_sensitivities = None
273 self.property_sensitivities = None
275 # Nominal vehicle instance
276 self.nominal_vehicle_instance = None
278 # Combined data file name
279 self.combined_fn = "all_components_sensitivity.csv"
281 def __repr__(self):
282 return "HyperVehicle sensitivity study"
284 def dvdp(
285 self,
286 parameter_dict: dict[str, any],
287 overrides: Optional[dict[str, any]] = None,
288 perturbation: Optional[float] = 5,
289 write_nominal_stl: Optional[bool] = True,
290 nominal_stl_prefix: Optional[str] = None,
291 ):
292 """Computes the sensitivity of the geometry with respect to the
293 parameters.
295 Parameters
296 ----------
297 parameter_dict : dict
298 A dictionary of the design parameters to perturb, and their
299 nominal values.
301 overrides : dict, optional
302 Optional vehicle generator overrides to provide along with the
303 parameter dictionary without variation. The default is None.
305 perturbation : float, optional
306 The design parameter perturbation amount, specified as percentage.
307 The default is 20.
309 vehicle_creator_method : str, optional
310 The name of the method which returns a hypervehicle.Vehicle
311 instance, ready for generation. The default is 'create_instance'.
313 write_nominal_stl : bool, optional
314 A boolean flag to write the nominal geometry STL(s) to file. The
315 default is True.
317 nominal_stl_prefix : str, optional
318 The prefix to append when writing STL files for the nominal geometry.
319 If None, no prefix will be used. The default is None.
321 Returns
322 -------
323 sensitivities : dict
324 A dictionary containing the sensitivity information for all
325 components of the geometry, relative to the nominal geometry.
326 """
327 # Print banner
328 if self.verbosity > 0:
329 print_banner()
330 print("Running geometric sensitivity study.")
332 # TODO - return perturbed instances? After generatation to allow
333 # quickly writing to STL
334 from hypervehicle.generator import AbstractGenerator
336 # Check overrides
337 overrides = overrides if overrides else {}
339 # Create Vehicle instance with nominal parameters
340 if self.verbosity > 0:
341 print(" Generating nominal geometry...")
343 constructor_instance: AbstractGenerator = self.vehicle_constructor(
344 **parameter_dict, **overrides
345 )
346 nominal_instance = constructor_instance.create_instance()
347 nominal_instance.verbosity = 0
349 # Generate components
350 nominal_instance.generate()
351 nominal_meshes = {
352 name: component.mesh
353 for name, component in nominal_instance._named_components.items()
354 }
356 if self.verbosity > 0:
357 print(" Done.")
359 if write_nominal_stl:
360 # Write nominal instance to STL files
361 nominal_instance.to_stl(prefix=nominal_stl_prefix)
363 # Generate meshes for each parameter
364 if self.verbosity > 0:
365 print(" Generating perturbed geometries...")
366 print(" Parameters: ", parameter_dict.keys())
368 sensitivities = {}
369 analysis_sens = {}
370 component_analysis_sens = {}
371 property_sens = {}
372 for parameter, value in parameter_dict.items():
373 if self.verbosity > 0:
374 print(f" Generating for {parameter}.")
376 sensitivities[parameter] = {}
378 # Create copy
379 adjusted_parameters = parameter_dict.copy()
381 # Adjust current parameter for sensitivity analysis
382 adjusted_parameters[parameter] *= 1 + perturbation / 100
383 dp = adjusted_parameters[parameter] - value
385 # Create Vehicle instance with perturbed parameter
386 constructor_instance = self.vehicle_constructor(
387 **adjusted_parameters, **overrides
388 )
389 parameter_instance = constructor_instance.create_instance()
390 parameter_instance.verbosity = 0
392 # Generate stl meshes
393 parameter_instance.generate()
394 parameter_meshes = {
395 name: component.mesh
396 for name, component in parameter_instance._named_components.items()
397 }
399 # Generate sensitivities for geometric analysis results
400 if nominal_instance.analysis_results:
401 analysis_sens[parameter] = {}
402 for r, v in nominal_instance.analysis_results.items():
403 analysis_sens[parameter][r] = (
404 parameter_instance.analysis_results[r] - v
405 ) / dp
407 # Repeat for components
408 component_analysis_sens[parameter] = (
409 parameter_instance._volmass - nominal_instance._volmass
410 ) / dp
412 # Generate sensitivities for vehicle properties
413 if nominal_instance.properties:
414 property_sens[parameter] = {}
415 for property, v in nominal_instance.properties.items():
416 property_sens[parameter][property] = (
417 parameter_instance.properties[property] - v
418 ) / dp
420 # Generate sensitivities
421 for component, nominal_mesh in nominal_meshes.items():
422 parameter_mesh = parameter_meshes[component]
423 sensitivity_df = self._compare_meshes(
424 nominal_mesh,
425 parameter_mesh,
426 dp,
427 parameter,
428 )
430 sensitivities[parameter][component] = sensitivity_df
432 if self.verbosity > 0:
433 print(" Done.")
435 # Return output
436 self.parameter_sensitivities = sensitivities
437 self.scalar_sensitivities = analysis_sens
438 self.component_scalar_sensitivities = component_analysis_sens
439 self.property_sensitivities = property_sens
440 self.component_sensitivities = self._combine(nominal_instance, sensitivities)
441 self.nominal_vehicle_instance = nominal_instance
443 if self.verbosity > 0:
444 print("Sensitivity study complete.")
446 return sensitivities
448 def to_csv(self, outdir: Optional[str] = None):
449 """Writes the sensitivity information to CSV file.
451 Parameters
452 ----------
453 outdir : str, optional
454 The output directory to write the sensitivity files to. If
455 None, the current working directory will be used. The default
456 is None.
458 Returns
459 -------
460 combined_data_filepath : str
461 The filepath to the combined sensitivity data.
462 """
463 # Check if sensitivities have been generated
464 if self.component_sensitivities is None:
465 raise Exception("Sensitivities have not yet been generated.")
467 else:
468 # Check output directory
469 if outdir is None:
470 outdir = os.getcwd()
472 if not os.path.exists(outdir):
473 # Make the directory
474 os.mkdir(outdir)
476 # Save sensitivity data for each component
477 all_sens_data = pd.DataFrame()
478 for component, df in self.component_sensitivities.items():
479 df.to_csv(
480 os.path.join(outdir, f"{component}_sensitivity.csv"), index=False
481 )
482 all_sens_data = pd.concat([all_sens_data, df])
484 # Also save the combined sensitivity data
485 combined_data_path = os.path.join(outdir, self.combined_fn)
486 all_sens_data.to_csv(combined_data_path, index=False)
488 # Also save analysis sensitivities
489 if self.scalar_sensitivities:
490 # Make analysis results directory
491 properties_dir = os.path.join(outdir, f"scalar_sensitivities")
492 if not os.path.exists(properties_dir):
493 os.mkdir(properties_dir)
495 # Save volume and mass
496 reformatted_results = {}
497 for p, s in self.component_scalar_sensitivities.items():
498 labels = []
499 values = []
500 s: pd.DataFrame
501 for component, comp_sens in s.iterrows():
502 comp_sens: pd.Series
503 for i, j in comp_sens.items():
504 labels.append(f"{component}_{i}")
505 values.append(j)
507 reformatted_results[p] = values
509 # Convert to DataFrame and save
510 comp_sens = pd.DataFrame(data=reformatted_results, index=labels)
511 comp_sens.to_csv(
512 os.path.join(properties_dir, "volmass_sensitivity.csv")
513 )
515 # Save others
516 for param in self.scalar_sensitivities:
517 self.scalar_sensitivities[param]["cog"].tofile(
518 os.path.join(properties_dir, f"{param}_cog_sensitivity.txt"),
519 sep=", ",
520 )
521 self.scalar_sensitivities[param]["moi"].tofile(
522 os.path.join(properties_dir, f"{param}_moi_sensitivity.txt"),
523 sep=", ",
524 )
526 # Also save user-defined property sensitivities
527 if self.property_sensitivities:
528 properties_dir = os.path.join(outdir, f"scalar_sensitivities")
529 if not os.path.exists(properties_dir):
530 os.mkdir(properties_dir)
532 pd.DataFrame(self.property_sensitivities).to_csv(
533 os.path.join(properties_dir, "property_sensitivity.csv")
534 )
536 return combined_data_path
538 @staticmethod
539 def _compare_meshes(mesh1, mesh2, dp, parameter_name: str) -> pd.DataFrame:
540 """Compares two meshes with each other and applies finite differencing
541 to quantify their differences.
543 Parameters
544 ----------
545 mesh1 : Mesh
546 The reference mesh.
548 mesh1 : Mesh
549 The perturbed mesh.
551 dp : float
552 The parameter perturbation.
554 parameter_name : str
555 The name of the parameter.
557 Returns
558 --------
559 df : pd.DataFrame
560 A DataFrame of the finite difference results.
561 """
562 # Take the vector difference
563 diff = mesh2.vectors - mesh1.vectors
565 # Resize difference array to flatten
566 shape = diff.shape
567 flat_diff = diff.reshape((shape[0] * shape[2], shape[1]))
569 # Also flatten the reference mesh vectors
570 vectors = mesh1.vectors.reshape((shape[0] * shape[2], shape[1]))
572 # Concatenate all data column-wise
573 all_data = np.zeros((shape[0] * shape[2], shape[1] * 2))
574 all_data[:, 0:3] = vectors # Reference locations
575 all_data[:, 3:6] = flat_diff # Location deltas
577 # Create DataFrame
578 df = pd.DataFrame(data=all_data, columns=["x", "y", "z", "dx", "dy", "dz"])
579 df["magnitude"] = np.sqrt(np.square(df[["dx", "dy", "dz"]]).sum(axis=1))
581 # Sensitivity calculations
582 sensitivities = df[["dx", "dy", "dz"]] / dp
583 sensitivities.rename(
584 columns={
585 "dx": f"dxd{parameter_name}",
586 "dy": f"dyd{parameter_name}",
587 "dz": f"dzd{parameter_name}",
588 },
589 inplace=True,
590 )
592 # Merge dataframes
593 df = df.merge(sensitivities, left_index=True, right_index=True)
595 # Delete duplicate vertices
596 df = df[~df.duplicated()]
598 return df
600 @staticmethod
601 def _combine(nominal_instance, sensitivities):
602 """Combines the sensitivity information for multiple parameters."""
603 component_names = nominal_instance._named_components.keys()
604 params = list(sensitivities.keys())
606 allsens = {}
607 for component in component_names:
608 df = sensitivities[params[0]][component][["x", "y", "z"]]
609 for param in params:
610 p_s = sensitivities[param][component][
611 [f"dxd{param}", f"dyd{param}", f"dzd{param}"]
612 ]
613 df = pd.concat([df, p_s], axis=1)
615 allsens[component] = df
617 return allsens
620def append_sensitivities_to_tri(
621 dp_filenames: List[str],
622 components_filepath: Optional[str] = "Components.i.tri",
623 match_tolerance: Optional[float] = 1e-5,
624 rounding_tolerance: Optional[float] = 1e-8,
625 combined_sens_fn: Optional[str] = "all_components_sensitivity.csv",
626 outdir: Optional[str] = None,
627 verbosity: Optional[int] = 1,
628) -> float:
629 """Appends shape sensitivity data to .i.tri file, and writes the sensitivity
630 data to csv file too. This step is required for geometries with multiple
631 components. The .tri file is used to match individual sensitivity files
632 (dp_filenames) to the geometry. The combined sensitivity file is required
633 to calculate flow sensitivities with the .plt file, which has local flow
634 conditions attached.
636 Parameters
637 ----------
638 dp_files : list[str]
639 A list of the file names of the sensitivity data.
641 components_filepath : str, optional
642 The filepath to the .tri file to be appended to. The default is
643 'Components.i.tri'.
645 match_tolerance : float, optional
646 The precision tolerance for matching point coordinates. The
647 default is 1e-5.
649 rounding_tolerance : float, optional
650 The tolerance to round data off to. The default is 1e-8.
652 combined_sens_fn : str, optional
653 The filename of the combined geometry sensitivity data. The default
654 is "all_components_sensitivity.csv".
656 outdir : str, optional
657 The output directory to write the combined sensitivity file to. If
658 None, the current working directory will be used. The default
659 is None.
661 verbosity : int, optional
662 The verbosity of the code. The defualt is 1.
664 Returns
665 ---------
666 match_fraction : float
667 The fraction of cells which got matched. If this is below 100%, try
668 decreasing the match tolerance and run again.
670 Examples
671 ---------
672 >>> dp_files = ['wing_0_body_width_sensitivity.csv',
673 'wing_1_body_width_sensitivity.csv']
674 """
675 # Check outdir
676 if outdir is None:
677 outdir = os.getcwd()
679 else:
680 # Desired outdir provided, check it exists
681 if not os.path.exists(outdir):
682 # Make the directory
683 os.mkdir(outdir)
685 # TODO - rename to 'combine sensitivity' or "combine_comp_sens"
686 # Parse .tri file
687 tree = ET.parse(components_filepath)
688 root = tree.getroot()
689 grid = root[0]
690 piece = grid[0]
691 points = piece[0]
693 points_data = points[0].text
695 points_data_list = [el.split() for el in points_data.splitlines()]
696 points_data_list = [[float(j) for j in i] for i in points_data_list]
698 points_df = pd.DataFrame(points_data_list, columns=["x", "y", "z"]).dropna()
700 # Ensure previous components sensitivity file is not included
701 try:
702 del dp_filenames[dp_filenames.index(combined_sens_fn)]
703 except ValueError:
704 # It is not in there
705 pass
707 # Load and concatenate sensitivity data across components
708 dp_df = pd.DataFrame()
709 for filename in dp_filenames:
710 df = pd.read_csv(filename)
711 dp_df = pd.concat([dp_df, df])
713 # Extract parameters
714 parameters = []
715 param_cols = dp_df.columns[3:]
716 for i in range(int(len(param_cols) / 3)):
717 parameters.append(param_cols[int(i * 3)].split("dxd")[-1])
719 # Match points_df to sensitivity df
720 if verbosity > 0:
721 print("Running coordinate matching algorithm for sensitivities...")
722 pbar = tqdm(
723 total=len(points_df), position=0, leave=True, desc="Point matching progress"
724 )
725 data_str = "\n "
726 param_data = dict(zip(parameters, ["\n " for _ in parameters]))
727 all_data = np.zeros((len(points_df), len(param_cols)), dtype=float)
728 matched_points = 0
729 for i in range(len(points_df)):
730 match_x = (points_df["x"].iloc[i] - dp_df["x"]).abs() < match_tolerance
731 match_y = (points_df["y"].iloc[i] - dp_df["y"]).abs() < match_tolerance
732 match_z = (points_df["z"].iloc[i] - dp_df["z"]).abs() < match_tolerance
734 match = match_x & match_y & match_z
735 try:
736 # TODO - what if there are multiple matches? (due to intersect perturbations)
737 matched_data = dp_df[match].iloc[0][param_cols].values
739 # Round off infinitesimally small values
740 matched_data[abs(matched_data) < rounding_tolerance] = 0
742 # Update data
743 all_data[i, :] = matched_data
744 p_n = 0
745 for parameter, data_str in param_data.items():
746 line = ""
747 for j in range(3):
748 line += f"\t{matched_data[j+3*p_n]:.14e}"
749 line += "\n "
751 data_str += line
752 param_data[parameter] = data_str
753 p_n += 1
755 # Count matched points
756 matched_points += 1
758 except IndexError:
759 # No match found, append zeros to maintain order
760 line = f"\t{0:.14e}\t{0:.14e}\t{0:.14e}\n "
762 # Update data string for each parameter
763 p_n = 0
764 for parameter, data_str in param_data.items():
765 param_data[parameter] = data_str + line
766 p_n += 1
768 # Update progress bar
769 if verbosity > 0:
770 pbar.update(1)
772 match_fraction = matched_points / len(points_df)
773 if verbosity > 0:
774 pbar.close()
775 print(f"Done - matched {100*match_fraction:.2f}% of points.")
777 # Write combined sensitivity data to CSV
778 combined_sense = pd.merge(
779 left=points_df.reset_index(),
780 right=pd.DataFrame(all_data, columns=param_cols),
781 left_index=True,
782 right_index=True,
783 ).drop("index", axis=1)
784 combined_sense.to_csv(os.path.join(outdir, combined_sens_fn), index=False)
786 # Write the matched sensitivity df to i.tri file as new xml element
787 # NumberOfComponents is how many sensitivity components there are (3 for x,y,z)
788 for parameter in parameters:
789 attribs = {
790 "Name": f"{parameter}_sens",
791 "NumberOfComponents": "3",
792 "type": "Float64",
793 "format": "ascii",
794 "TRIXtype": "SHAPE_LINEARIZATION",
795 }
796 PointData = ET.SubElement(piece, "PointData")
797 PointDataArray = ET.SubElement(PointData, "DataArray", attribs)
798 PointDataArray.text = param_data[parameter]
800 # Save to file
801 tree.write(components_filepath)
803 return match_fraction
806def csv_to_delaunay(filepath: str):
807 """Converts a csv file of points to a Delaunay3D surface.
809 Parameters
810 ------------
811 filepath : str
812 The filepath to the CSV file.
813 """
814 # TODO - rename
815 from paraview.simple import CSVReader, TableToPoints, Delaunay3D, SaveData # type: ignore
817 root_dir = os.path.dirname(filepath)
818 prefix = filepath.split(os.sep)[-1].split(".csv")[0]
819 savefilename = os.path.join(root_dir, f"{prefix}.vtu")
821 # create a new 'CSV Reader'
822 fin_0_L_b_sensitivitycsv = CSVReader(FileName=[filepath])
824 # create a new 'Table To Points'
825 tableToPoints1 = TableToPoints(Input=fin_0_L_b_sensitivitycsv)
827 # Properties modified on tableToPoints1
828 tableToPoints1.XColumn = "x"
829 tableToPoints1.YColumn = "y"
830 tableToPoints1.ZColumn = "z"
832 # create a new 'Delaunay 3D'
833 delaunay3D1 = Delaunay3D(Input=tableToPoints1)
835 # save data
836 SaveData(savefilename, proxy=delaunay3D1)
839def convert_all_csv_to_delaunay(directory: str = ""):
840 # TODO - rename
841 # TODO - docstrings
842 # TODO - specify outdir
843 files = glob.glob(os.path.join(directory, "*.csv"))
845 if len(files) == 0:
846 print(f"No CSV files in directory {directory}.")
847 sys.exit()
849 for file in files:
850 csv_to_delaunay(file)
853def merge_stls(
854 stl_files: List[str], name: Optional[str] = None, verbosity: Optional[int] = 1
855) -> str:
856 """Merge STL files into a single file. Note that this function
857 depends on having PyMesh installed.
859 Parameters
860 ----------
861 stl_files : list[str]
862 A list of the STL file names to be merged.
864 name : str, optional
865 The prefix of the combined STL filename output.
867 verbosity : int, optional
868 The function verbosity. The default is 1.
870 Returns
871 -------
872 outfile : str
873 The filename of the merged STL.
874 """
875 # Import PyMesh
876 try:
877 import pymesh # type: ignore
878 except ModuleNotFoundError:
879 raise Exception(
880 "Could not find pymesh. Please follow the "
881 + "installation instructions at "
882 + "https://pymesh.readthedocs.io/en/latest/installation.html"
883 )
885 if verbosity > 0:
886 print("")
888 # Load STL files
889 if verbosity > 1:
890 print("Loading STLs...")
891 meshes = [pymesh.meshio.load_mesh(f) for f in stl_files]
893 # Merge meshes
894 if verbosity > 0:
895 print("Merging STLs...")
896 merged = pymesh.merge_meshes(meshes)
898 # Resolve self-intersections
899 if verbosity > 1:
900 print("Resolving self intersections...")
901 merged = pymesh.resolve_self_intersection(merged)
903 # Remove degenerate triangles
904 if verbosity > 1:
905 print("Removing degenerate triangles...")
906 merged, info = pymesh.remove_degenerated_triangles(merged)
908 # Remove duplicate faces
909 if verbosity > 1:
910 print("Removing duplicated faces...")
911 merged, info = pymesh.remove_duplicated_faces(merged)
913 # Remove isolated vertices
914 if verbosity > 1:
915 print("Removing isolated vertices...")
916 merged, info = pymesh.remove_isolated_vertices(merged)
918 # Remove obtuse triangles
919 if verbosity > 1:
920 print("Removing obtuse triangles...")
921 merged, info = pymesh.remove_obtuse_triangles(merged)
923 # Write to file
924 if verbosity > 1:
925 print("Saving merged STL mesh...")
926 name = "combined_mesh" if name is None else name
927 outfile = f"{name}.stl"
928 pymesh.meshio.save_mesh(outfile, merged)
930 if verbosity > 0:
931 print(f"Done. Merged STLs written to '{outfile}'.")
933 return outfile
936def print_banner():
937 """Prints the hypervehicle banner"""
938 tprint("Hypervehicle", "tarty4")
939 p = art("airplane2")
940 print(f" {p} {p} {p} {p}")