Coverage for src / sdynpy / core / sdynpy_shape.py: 25%
646 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-11 16:22 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-11 16:22 +0000
1# -*- coding: utf-8 -*-
2"""
3Objects and procedures to handle operations on test or model shapes
5Shapes are generally defined as a set of coordinates or degrees of freedom and
6the respective displacements at each of those degrees of freedom.
7"""
8"""
9Copyright 2022 National Technology & Engineering Solutions of Sandia,
10LLC (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S.
11Government retains certain rights in this software.
13This program is free software: you can redistribute it and/or modify
14it under the terms of the GNU General Public License as published by
15the Free Software Foundation, either version 3 of the License, or
16(at your option) any later version.
18This program is distributed in the hope that it will be useful,
19but WITHOUT ANY WARRANTY; without even the implied warranty of
20MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21GNU General Public License for more details.
23You should have received a copy of the GNU General Public License
24along with this program. If not, see <https://www.gnu.org/licenses/>.
25"""
27import numpy as np
28import matplotlib.pyplot as plt
29import scipy.optimize as opt
30from . import sdynpy_colors
31from . import sdynpy_coordinate
32from . import sdynpy_array
33from . import sdynpy_data
34from . import sdynpy_system
35from ..signal_processing.sdynpy_integration import integrate_MCK
36from ..signal_processing.sdynpy_correlation import mac as mac_corr, matrix_plot
37from ..signal_processing.sdynpy_complex import collapse_complex_to_real
38from ..signal_processing.sdynpy_rotation import unit_magnitude_constraint, quaternion_to_rotation_matrix
39from ..fem.sdynpy_exodus import Exodus
40from ..fem.sdynpy_dof import by_condition_number, by_effective_independence
41from ..core.sdynpy_matrix import matrix
42from copy import deepcopy
43import pandas as pd
44from qtpy.QtWidgets import QDialog, QTableWidget, QDialogButtonBox, QVBoxLayout, QTableWidgetItem, QAbstractItemView
45from qtpy.QtCore import Qt
46import time
49class ShapeArray(sdynpy_array.SdynpyArray):
50 """Shape information specifying displacements at nodes.
52 Use the shape_array helper function to create the array.
53 """
54 @staticmethod
55 def real_data_dtype(ndof):
56 """
57 Data type of the underlying numpy structured array for real shapes
59 Parameters
60 ----------
61 ndof : int
62 Number of degrees of freedom in the shape array
64 Returns
65 -------
66 list
67 Numpy dtype that can be passed into any of the numpy array
68 constructors
70 """
71 return [
72 ('frequency', 'float64'),
73 ('damping', 'float64'),
74 ('coordinate', sdynpy_coordinate.CoordinateArray.data_dtype, (ndof,)),
75 ('shape_matrix', 'float64', (ndof,)),
76 ('modal_mass', 'float64'),
77 ('comment1', '<U80'),
78 ('comment2', '<U80'),
79 ('comment3', '<U80'),
80 ('comment4', '<U80'),
81 ('comment5', '<U80'),
82 ]
84 @staticmethod
85 def complex_data_dtype(ndof):
86 """
87 Data type of the underlying numpy structured array for complex shapes
89 Parameters
90 ----------
91 ndof : int
92 Number of degrees of freedom in the shape array
94 Returns
95 -------
96 list
97 Numpy dtype that can be passed into any of the numpy array
98 constructors
100 """
101 return [
102 ('frequency', 'float64'),
103 ('damping', 'float64'),
104 ('coordinate', sdynpy_coordinate.CoordinateArray.data_dtype, (ndof,)),
105 ('shape_matrix', 'complex128', (ndof,)),
106 ('modal_mass', 'complex128'),
107 ('comment1', '<U80'),
108 ('comment2', '<U80'),
109 ('comment3', '<U80'),
110 ('comment4', '<U80'),
111 ('comment5', '<U80'),
112 ]
114 def __new__(subtype, shape, ndof, shape_type='real', buffer=None, offset=0,
115 strides=None, order=None):
116 # Create the ndarray instance of our type, given the usual
117 # ndarray input arguments. This will call the standard
118 # ndarray constructor, but return an object of our type.
119 # It also triggers a call to InfoArray.__array_finalize__
120 # Data Types
121 # 'coordinates', - This will actually be an attribute
122 # 'shape_matrix',
123 # 'frequency',
124 # 'damping',
125 # 'modal_mass',
126 # 'comment1',
127 # 'comment2',
128 # 'comment3',
129 # 'comment4',
130 # 'comment5',
131 # 'numerator_type',
132 # 'denominator_type',
133 # 'load_case_number',
134 # 'mode_number',
135 # 'reference_coordinate',
136 # 'response_coordinate',
137 if shape_type == 'real':
138 data_dtype = ShapeArray.real_data_dtype(ndof)
139 elif shape_type == 'complex' or shape_type == 'imaginary' or shape_type == 'imag':
140 data_dtype = ShapeArray.complex_data_dtype(ndof)
141 obj = super(ShapeArray, subtype).__new__(subtype, shape, data_dtype,
142 buffer, offset, strides,
143 order)
144 # Finally, we must return the newly created object:
145 return obj
147 def is_complex(self):
148 """
149 Returns true if the shape is a complex shape, False if shape is real
151 Returns
152 -------
153 bool
154 True if the shape is complex
156 """
157 return np.iscomplexobj(self.shape_matrix)
159 def __repr__(self):
160 string_out = '{:>8s}, {:>10s}, {:>10s}, {:>10s}\n'.format(
161 'Index', 'Frequency', 'Damping', '# DoFs')
162 if self.size == 0:
163 string_out += '----------- Empty -------------\n'
164 for key, val in self.ndenumerate():
165 string_out += '{:>8s}, {:>10.4f}, {:>9.4f}%, {:>10d}\n'.format(
166 str(key), val.frequency, val.damping * 100, np.prod(val.dtype['coordinate'].shape))
167 return string_out
169 # def __getattribute__(self,attr):
170 # return_obj = np.recarray.__getattribute__(self,attr)
171 # if attr == 'coordinate':
172 # return return_obj.view(sdynpy_coordinate.CoordinateArray)
173 # else:
174 # return return_obj
176 def __getitem__(self, key):
177 if type(key) is sdynpy_coordinate.CoordinateArray:
178 # Get the coordinate array
179 coordinate_array = self.coordinate
180 single_shape_coordinate_array = coordinate_array[(
181 0,) * (coordinate_array.ndim - 1) + (slice(None),)]
182 # Now check if the coordinates are consistent across the arrays
183 if not np.all((coordinate_array[..., :] == single_shape_coordinate_array)):
184 # If they aren't, raise a value error
185 raise ValueError(
186 'Shape array must have equivalent coordinates for all shapes to index by coordinate')
187 # Otherwise we will do an intersection
188 consistent_arrays, shape_indices, request_indices = np.intersect1d(
189 abs(single_shape_coordinate_array), abs(key), assume_unique=False, return_indices=True)
190 # Make sure that all of the keys are actually in the consistent array matrix
191 if consistent_arrays.size != key.size:
192 extra_keys = np.setdiff1d(abs(key), abs(single_shape_coordinate_array))
193 if extra_keys.size == 0:
194 raise ValueError(
195 'Duplicate coordinate values requested. Please ensure coordinate indices are unique.')
196 raise ValueError(
197 'Not all indices in requested coordinate array exist in the shape\n{:}'.format(str(extra_keys)))
198 # Handle sign flipping
199 multiplications = key.flatten()[request_indices].sign(
200 ) * single_shape_coordinate_array[shape_indices].sign()
201 return_value = self.shape_matrix[..., shape_indices] * multiplications
202 # Invert the indices to return the dofs in the correct order as specified in keys
203 inverse_indices = np.zeros(request_indices.shape, dtype=int)
204 inverse_indices[request_indices] = np.arange(len(request_indices))
205 return_value = return_value[..., inverse_indices]
206 # Reshape to the original coordinate array shape
207 return_value = return_value.reshape(*return_value.shape[:-1], *key.shape)
208 return return_value
209 else:
210 output = super().__getitem__(key)
211 if isinstance(key, str) and key == 'coordinate':
212 return output.view(sdynpy_coordinate.CoordinateArray)
213 else:
214 return output
216 def __mul__(self, val):
217 this = deepcopy(self)
218 if isinstance(val, ShapeArray):
219 val = val.shape_matrix
220 this.shape_matrix *= val
221 return this
223 @property
224 def ndof(self):
225 """The number of degrees of freedom in the shape"""
226 return self.dtype['coordinate'].shape[0]
228 # @property
229 # def U(self):
230 # if self.shape_matrix.ndim > 1:
231 # return self.shape_matrix.swapaxes(-2,-1)
232 # else:
233 # return self.shape_matrix[:,np.newaxis]
235 # @U.setter
236 # def U(self,value):
237 # raise AttributeError('Cannot set the U attribute directly. Set the shape_matrix parameter instead')
239 @property
240 def modeshape(self):
241 """The mode shape matrix with degrees of freedom as second to last axis"""
242 if self.shape_matrix.ndim > 1:
243 return self.shape_matrix.swapaxes(-2, -1)
244 else:
245 return self.shape_matrix[:, np.newaxis]
247 @modeshape.setter
248 def modeshape(self, value):
249 value = np.array(value)
250 if value.ndim > 1:
251 self.shape_matrix = value.swapaxes(-2, -1)
252 else:
253 self.shape_matrix = value
255 @staticmethod
256 def from_unv(unv_data_dict, combine=True):
257 """
258 Load ShapeArrays from universal file data from read_unv
260 Parameters
261 ----------
262 unv_data_dict : dict
263 Dictionary containing data from read_unv
264 combine : bool, optional
265 If True, return as a single ShapeArray
267 Raises
268 ------
269 ValueError
270 Raised if an unknown data characteristic is provided
272 Returns
273 -------
274 output_arrays : ShapeArray
275 Shapes read from unv
277 """
278 try:
279 datasets = unv_data_dict[55]
280 except KeyError:
281 if combine:
282 return ShapeArray(0, 0)
283 else:
284 return []
285 output_arrays = []
286 for dataset in datasets:
287 nodes = np.array([val for val in dataset.node_data_dictionary.keys()])
288 if dataset.data_characteristic == 2:
289 coordinates = sdynpy_coordinate.coordinate_array(
290 nodes[:, np.newaxis], np.array([1, 2, 3])).flatten()
291 elif dataset.data_characteristic == 3:
292 coordinates = sdynpy_coordinate.coordinate_array(
293 nodes[:, np.newaxis], np.array([1, 2, 3, 4, 5, 6])).flatten()
294 else:
295 raise ValueError('Cannot handle shapes besides 3Dof and 6Dof')
296 data_matrix = np.array([dataset.node_data_dictionary[key] for key in nodes]).flatten()
297 if dataset.analysis_type == 2:
298 this_shape = shape_array(
299 coordinate=coordinates,
300 shape_matrix=data_matrix,
301 frequency=dataset.frequency,
302 damping=dataset.modal_viscous_damping,
303 modal_mass=dataset.modal_mass,
304 comment1=dataset.idline1,
305 comment2=dataset.idline2,
306 comment3=dataset.idline3,
307 comment4=dataset.idline4,
308 comment5=dataset.idline5,
309 )
310 elif dataset.analysis_type == 3:
311 this_shape = shape_array(
312 coordinate=coordinates,
313 shape_matrix=data_matrix,
314 frequency=np.abs(dataset.eigenvalue)/(2*np.pi),
315 damping=-np.real(dataset.eigenvalue)/np.abs(dataset.eigenvalue),
316 modal_mass=dataset.modal_a,
317 comment1=dataset.idline1,
318 comment2=dataset.idline2,
319 comment3=dataset.idline3,
320 comment4=dataset.idline4,
321 comment5=dataset.idline5,
322 )
323 else:
324 raise NotImplementedError('Analysis Type {:} for UFF Dataset 55 not implemented yet'.format(dataset.analysis_type))
325 output_arrays.append(this_shape)
326 if combine:
327 output_arrays = np.concatenate([val[np.newaxis] for val in output_arrays])
328 return output_arrays
330 from_uff = from_unv
332 # @classmethod
333 # def from_exodus(cls,exo,x_disp = 'DispX',y_disp = 'DispY',z_disp = 'DispZ',timesteps = None):
334 # if isinstance(exo,Exodus):
335 # exo = exo.load_into_memory(close=False,variables = [x_disp,y_disp,z_disp],timesteps = None, blocks=[])
336 # node_ids = np.arange(exo.nodes.coordinates.shape[0])+1 if exo.nodes.node_num_map is None else exo.nodes.node_num_map
337 # x_var = [var for var in exo.nodal_vars if var.name.lower() == x_disp.lower()][0].data[slice(timesteps) if timesteps is None else timesteps]
338 # y_var = [var for var in exo.nodal_vars if var.name.lower() == y_disp.lower()][0].data[slice(timesteps) if timesteps is None else timesteps]
339 # z_var = [var for var in exo.nodal_vars if var.name.lower() == z_disp.lower()][0].data[slice(timesteps) if timesteps is None else timesteps]
340 # frequencies = exo.time[slice(timesteps) if timesteps is None else timesteps]
341 # shape_matrix = np.concatenate((x_var,y_var,z_var),axis=-1)
342 # coordinates = sdynpy_coordinate.coordinate_array(node_ids,np.array((1,2,3))[:,np.newaxis]).flatten()
343 # return shape_array(coordinates,shape_matrix,frequencies)
345 @classmethod
346 def from_exodus(cls, exo, x_disp='DispX', y_disp='DispY', z_disp='DispZ', x_rot=None, y_rot=None, z_rot=None, timesteps=None):
347 """
348 Reads shape data from displacements in an Exodus file
350 Parameters
351 ----------
352 exo : Exodus or ExodusInMemory
353 The exodus data from which shapes will be created.
354 x_disp : str, optional
355 String denoting the nodal variable in the exodus file from which
356 the X-direction displacement should be read. The default is 'DispX'.
357 Specify `None` if no x_disp is to be read.
358 y_disp : str, optional
359 String denoting the nodal variable in the exodus file from which
360 the Y-direction displacement should be read. The default is 'DispY'.
361 Specify `None` if no y_disp is to be read.
362 z_disp : str, optional
363 String denoting the nodal variable in the exodus file from which
364 the Z-direction displacement should be read. The default is 'DispZ'.
365 Specify `None` if no z_disp is to be read.
366 x_rot : str, optional
367 String denoting the nodal variable in the exodus file from which
368 the X-direction rotation should be read. The default is `None` which
369 results in the X-direction rotation not being read. Typically this
370 would be set to 'RotX' if rotational values are desired.
371 y_rot : str, optional
372 String denoting the nodal variable in the exodus file from which
373 the Y-direction rotation should be read. The default is `None` which
374 results in the Y-direction rotation not being read. Typically this
375 would be set to 'RotY' if rotational values are desired.
376 z_rot : str, optional
377 String denoting the nodal variable in the exodus file from which
378 the Z-direction rotation should be read. The default is `None` which
379 results in the Z-direction rotation not being read. Typically this
380 would be set to 'RotZ' if rotational values are desired.
381 timesteps : iterable, optional
382 A list of timesteps from which data should be read. The default is
383 `None`, which reads all timesteps.
385 Returns
386 -------
387 ShapeArray
388 Shape data from the exodus file
390 """
391 if isinstance(exo, Exodus):
392 variables = [v for v in [x_disp, y_disp, z_disp, x_rot, y_rot, z_rot] if v is not None]
393 exo = exo.load_into_memory(close=False, variables=variables, timesteps=None, blocks=[])
394 node_ids = np.arange(
395 exo.nodes.coordinates.shape[0]) + 1 if exo.nodes.node_num_map is None else exo.nodes.node_num_map
397 shape_matrix = np.empty(
398 [exo.time[slice(timesteps) if timesteps is None else timesteps].shape[0], 0])
399 coord_nums = np.empty([0], dtype=int)
401 variables = [x_disp, y_disp, z_disp, x_rot, y_rot, z_rot]
402 for counter, variable in enumerate(variables):
403 if variable is not None:
404 var = [var for var in exo.nodal_vars if var.name.lower() == variable.lower(
405 )][0].data[slice(timesteps) if timesteps is None else timesteps]
406 shape_matrix = np.append(shape_matrix, var, axis=-1)
407 coord_nums = np.append(coord_nums, [counter + 1])
409 frequencies = exo.time[slice(timesteps) if timesteps is None else timesteps]
410 coordinates = sdynpy_coordinate.coordinate_array(
411 node_ids, coord_nums[:, np.newaxis]).flatten()
413 return shape_array(coordinates, shape_matrix, frequencies)
415 @classmethod
416 def from_imat_struct(cls, imat_shp_struct):
417 """
418 Constructs a ShapeArray from an imat_shp class saved to a Matlab structure
420 In IMAT, a structure can be created from an `imat_shp` by using the get()
421 function. This can then be saved to a .mat file and loaded using
422 `scipy.io.loadmat`. The output from loadmat can be passed into this function
424 Parameters
425 ----------
426 imat_fem_struct : np.ndarray
427 structure from loadmat containing data from an imat_shp
429 Returns
430 -------
431 ShapeArray
432 ShapeArray constructed from the data in the imat structure
434 """
435 frequency = imat_shp_struct['Frequency'][0, 0]
436 nodes = imat_shp_struct['Node'][0, 0]
437 nodes = nodes.reshape(nodes.shape[0], 1, *frequency.shape)
438 doftype = np.array(imat_shp_struct['DOFType'][0, 0].tolist())
439 doftype = doftype.reshape(*doftype.shape[:-1])
440 directions = (np.array([1, 2, 3]) if np.all(
441 doftype == '3DOF') else np.array(1, 2, 3, 4, 5, 6))
442 directions = directions.reshape(1, -1, *([1] * (nodes.ndim - 2)))
443 coordinates = np.moveaxis(sdynpy_coordinate.coordinate_array(nodes,
444 directions
445 ).reshape(-1, *frequency.shape), 0, -1)
446 shape_matrix = np.moveaxis(imat_shp_struct['Shape'][0, 0].reshape(
447 coordinates.shape[-1], *frequency.shape), 0, -1)
448 comment_1 = np.array(imat_shp_struct['IDLine1'][0, 0].tolist())
449 if comment_1.size > 0:
450 comment_1 = comment_1.reshape(*comment_1.shape[:-1])
451 else:
452 comment_1 = np.zeros(comment_1.shape[:-1], dtype='<U1')
453 comment_2 = np.array(imat_shp_struct['IDLine2'][0, 0].tolist())
454 if comment_2.size > 0:
455 comment_2 = comment_1.reshape(*comment_2.shape[:-1])
456 else:
457 comment_2 = np.zeros(comment_2.shape[:-1], dtype='<U1')
458 comment_3 = np.array(imat_shp_struct['IDLine3'][0, 0].tolist())
459 if comment_3.size > 0:
460 comment_3 = comment_3.reshape(*comment_3.shape[:-1])
461 else:
462 comment_3 = np.zeros(comment_3.shape[:-1], dtype='<U1')
463 comment_4 = np.array(imat_shp_struct['IDLine4'][0, 0].tolist())
464 if comment_4.size > 0:
465 comment_4 = comment_4.reshape(*comment_4.shape[:-1])
466 else:
467 comment_4 = np.zeros(comment_4.shape[:-1], dtype='<U1')
468 comment_5 = np.array(imat_shp_struct['IDLine5'][0, 0].tolist())
469 if comment_5.size > 0:
470 comment_5 = comment_5.reshape(*comment_5.shape[:-1])
471 else:
472 comment_5 = np.zeros(comment_5.shape[:-1], dtype='<U1')
473 modal_mass = imat_shp_struct['ModalMassReal'][0, 0] + \
474 (0 if np.isrealobj(shape_matrix) else 1j * imat_shp_struct['ModalMassImag'][0, 0])
475 damping = imat_shp_struct['Damping'][0, 0] if np.isrealobj(shape_matrix) else (
476 imat_shp_struct['ModalDampingReal'][0, 0] + 1j * imat_shp_struct['ModalDampingImag'][0, 0])
477 return shape_array(coordinates, shape_matrix, frequency, damping, modal_mass,
478 comment_1, comment_2, comment_3, comment_4, comment_5)
480 def compute_frf(self, frequencies, responses=None, references=None, displacement_derivative=2):
481 """
482 Computes FRFs from shape data
484 Parameters
485 ----------
486 frequencies : iterable
487 A list of frequencies to compute the FRF at.
488 responses : CoordinateArray, optional
489 Degrees of freedom to use as responses. The default is to compute
490 FRFs at all degrees of freedom in the shape.
491 references : CoordinateArray, optional
492 Degrees of freedom to use as references. The default is to compute
493 FRFs using the response degrees of freedom also as references.
494 displacement_derivative : int, optional
495 The derivative to use when computing the FRFs. 0 corresponds to
496 displacement FRFs, 1 corresponds to velocity, and 2 corresponds to
497 acceleration. The default is 2.
499 Returns
500 -------
501 output_data : TransferFunctionArray
502 A transfer function array containing the specified references and
503 responses.
505 """
506 flat_self = self.flatten()
507 if responses is None:
508 all_coords = flat_self.coordinate
509 dim = all_coords.ndim
510 index = (0,) * (dim - 1)
511 responses = all_coords[index]
512 else:
514 responses = responses.flatten()
515 if references is None:
516 references = responses
517 else:
518 references = references.flatten()
519 damping_ratios = flat_self.damping
520 angular_natural_frequencies = flat_self.frequency*2*np.pi
521 angular_frequencies = frequencies*2*np.pi
522 response_shape_matrix = flat_self[responses] # nm x no
523 reference_shape_matrix = flat_self[references] # nm x ni
524 modal_mass = flat_self.modal_mass
525 if self.is_complex():
526 poles = -damping_ratios*angular_natural_frequencies + 1j*np.sqrt(1-damping_ratios**2)*angular_natural_frequencies
527 denominator = 1/(modal_mass*(1j*angular_frequencies[:, np.newaxis] - poles)) # nf x nm
528 denominator_conj = 1/(modal_mass*(1j*angular_frequencies[:, np.newaxis] - poles.conjugate())) # nf x nm
529 frf_ordinate = (np.einsum('mo,mi,fm->oif', response_shape_matrix, reference_shape_matrix, denominator) +
530 np.einsum('mo,mi,fm->oif', response_shape_matrix.conjugate(), reference_shape_matrix.conjugate(), denominator_conj))
531 else:
532 denominator = 1/(modal_mass *
533 (-angular_frequencies[:, np.newaxis]**2
534 + angular_natural_frequencies**2
535 + 2j*damping_ratios*angular_frequencies[:, np.newaxis]*angular_natural_frequencies))
537 frf_ordinate = np.einsum('mo,mi,fm->oif',
538 response_shape_matrix,
539 reference_shape_matrix,
540 denominator,
541 )
542 # Modify for data type
543 if displacement_derivative > 0:
544 frf_ordinate *= (1j*angular_frequencies)**displacement_derivative
545 # Now package into a sdynpy array
546 output_data = sdynpy_data.data_array(sdynpy_data.FunctionTypes.FREQUENCY_RESPONSE_FUNCTION,
547 frequencies, frf_ordinate,
548 sdynpy_coordinate.outer_product(responses, references))
550 return output_data
552 def reduce(self, nodelist_or_coordinate_array):
553 """
554 Reduces the shape to the degrees of freedom specified
556 Parameters
557 ----------
558 nodelist_or_coordinate_array : iterable or CoordinateArray
559 Consists of either a list of node ids or a CoordinateArray. The
560 ShapeArray will be reduced to only the degrees of freedom specified
562 Returns
563 -------
564 ShapeArray
565 A reduced ShapeArray
567 """
568 if isinstance(nodelist_or_coordinate_array, sdynpy_coordinate.CoordinateArray):
569 coord_array = nodelist_or_coordinate_array
570 else:
571 coord_array = np.unique(self.coordinate)
572 coord_array = coord_array[np.isin(coord_array.node, nodelist_or_coordinate_array)]
573 shape_matrix = self[coord_array.flatten()]
574 return shape_array(coord_array.flatten(), shape_matrix, self.frequency, self.damping,
575 self.modal_mass, self.comment1, self.comment2, self.comment3,
576 self.comment4, self.comment5)
578 def repack(self, coefficients):
579 """
580 Creates new shapes by linearly combining existing shapes
582 Parameters
583 ----------
584 coefficients : np.ndarray
585 coefficient matrix that will be multiplied by the shapes. This
586 should have number of rows equal to the number of shapes that are
587 going to be combined. The number of columns will specify the
588 number of shapes that will result
590 Returns
591 -------
592 ShapeArray
593 ShapeArray consisting of linear combinations of the original
594 ShapeArray
596 """
597 coordinates = np.unique(self.coordinate)
598 new_shape = self.flatten()[coordinates].T @ coefficients
599 return shape_array(coordinates, new_shape.T)
601 def expand(self, initial_geometry, expansion_geometry, expansion_shapes,
602 node_id_map=None, expansion_coordinates=None, return_coefficients=False):
603 """
604 Perform SEREP expansion on shape data
606 Parameters
607 ----------
608 initial_geometry : Geometry
609 The initial or "Test" Geometry, corresponding to the initial
610 shapes (self)
611 expansion_geometry : Geometry
612 The expanded or "FEM" Geometry, corresponding to the shapes that
613 will be expanded
614 expansion_shapes : ShapeArray
615 Shapes defined on the expanded geometry, which will be used
616 to expand the initial shapes
617 node_id_map : id_map, optional
618 If the initial and expanded geometry or shapes do not have common
619 node ids, an id_map can be specified to map the finite element
620 node ids to test node ids. The default is None, which means no
621 mapping will occur, and the shapes have common id numbers.
622 expansion_coordinates : CoordinateArray, optional
623 Degrees of freedom in the test shapes to use in the expansion. The
624 default is None, which results in all degrees of freedom being used
625 for expansion
626 return_coefficients : bool, optional
627 If True, the coefficients used in the expansion will be returned
628 along with the expanded shapes. The default is False.
630 Returns
631 -------
632 ShapeArray
633 The original shapes expanded to the expansion_geometry
634 np.ndarray
635 The coefficients used to perform the expansion, only returned if
636 return_coefficients is True
638 """
639 expansion_shape_in_original_basis = expansion_shapes.transform_coordinate_system(
640 expansion_geometry, initial_geometry, node_id_map)
641 if expansion_coordinates is None:
642 expansion_coordinates = np.intersect1d(
643 self.coordinate, expansion_shape_in_original_basis.coordinate)
644 expansion_shape_matrix = expansion_shape_in_original_basis[expansion_coordinates].T
645 original_shape_matrix = self[expansion_coordinates].T
646 coefficients = np.linalg.lstsq(expansion_shape_matrix, original_shape_matrix)[0]
647 expanded_shapes = expansion_shapes.repack(coefficients)
648 expanded_shapes.frequency = self.frequency
649 expanded_shapes.damping = self.damping
650 expanded_shapes.modal_mass = self.modal_mass
651 expanded_shapes.comment1 = self.comment1
652 expanded_shapes.comment2 = self.comment2
653 expanded_shapes.comment3 = self.comment3
654 expanded_shapes.comment4 = self.comment4
655 expanded_shapes.comment5 = self.comment5
656 if return_coefficients:
657 return expanded_shapes, coefficients
658 else:
659 return expanded_shapes
661 def transform_coordinate_system(self, original_geometry, new_geometry, node_id_map=None, rotations=False,
662 missing_dofs_are_zero=False):
663 """
664 Performs coordinate system transformations on the shape
666 Parameters
667 ----------
668 original_geometry : Geometry
669 The Geometry in which the shapes are currently defined
670 new_geometry : Geometry
671 The Geometry in which the shapes are desired to be defined
672 node_id_map : id_map, optional
673 If the original and new geometries do not have common
674 node ids, an id_map can be specified to map the original geometry
675 node ids to new geometry node ids. The default is None, which means no
676 mapping will occur, and the geometries have common id numbers.
677 rotations : bool, optional
678 If True, also transform rotational degrees of freedom. The default
679 is False.
680 missing_dofs_are_zero : bool, optional
681 If False, any degree of freedom required for the transformation that
682 is not provided will result in a ValueError. If True, these missing
683 degrees of freedom will simply be appended to the original shape
684 matrix as zeros. Default is False.
686 Returns
687 -------
688 ShapeArray
689 A ShapeArray that can now be plotted with the new geometry
691 """
692 if node_id_map is not None:
693 original_geometry = original_geometry.reduce(node_id_map.from_ids)
694 original_geometry.node.id = node_id_map(original_geometry.node.id)
695 self = self.reduce(node_id_map.from_ids)
696 self.coordinate.node = node_id_map(self.coordinate.node)
697 common_nodes = np.intersect1d(np.intersect1d(original_geometry.node.id, new_geometry.node.id),
698 np.unique(self.coordinate.node))
699 coordinates = sdynpy_coordinate.coordinate_array(
700 common_nodes[:, np.newaxis], [1, 2, 3, 4, 5, 6] if rotations else [1, 2, 3])
701 transform_from_original = original_geometry.global_deflection(coordinates)
702 transform_to_new = new_geometry.global_deflection(coordinates)
703 if missing_dofs_are_zero:
704 # Find any coordinates that are not in shapes
705 shape_coords = np.unique(abs(self.coordinate))
706 coords_not_in_shape = coordinates[~np.isin(abs(coordinates), shape_coords)]
707 # Create a new shape array and set the coefficients to zero
708 append_shape_matrix = np.zeros(self.shape+(coords_not_in_shape.size,),
709 dtype=self.shape_matrix.dtype)
710 # Create a shape
711 append_shape = shape_array(coords_not_in_shape, append_shape_matrix)
712 # Append it
713 self = ShapeArray.concatenate_dofs((self, append_shape))
714 shape_matrix = self[coordinates].reshape(*self.shape, *coordinates.shape)
715 # TODO: I think there might be a bug in how rotations are handled...
716 new_shape_matrix = np.einsum('nij,nkj,...nk->...ni', transform_to_new,
717 transform_from_original, shape_matrix)
718 return shape_array(coordinates.flatten(), new_shape_matrix.reshape(*self.shape, -1), self.frequency, self.damping, self.modal_mass,
719 self.comment1, self.comment2, self.comment3, self.comment4, self.comment5)
721 def reduce_for_comparison(self, comparison_shape, node_id_map=None):
722 if node_id_map is not None:
723 self = self.reduce(node_id_map.from_ids)
724 self.coordinate.node = node_id_map(self.coordinate.node)
725 common_dofs = np.intersect1d(abs(self.coordinate), abs(comparison_shape.coordinate))
726 reduced_self = self.reduce(common_dofs)
727 reduced_comparison = comparison_shape.reduce(common_dofs)
728 return reduced_self, reduced_comparison
730 def plot_frequency(self, interp_abscissa, interp_ordinate, ax=None, plot_kwargs={'color': 'k', 'marker': 'x', 'linestyle': "None"}):
731 """
732 Plots the frequencies of the shapes on curves of a 2D plot
734 Parameters
735 ----------
736 interp_abscissa : np.ndarray
737 The abscissa used to interpolate the Y-position of the frequency
738 mark
739 interp_ordinate : np.ndarray
740 The ordinate used to interpolate the Y-postiion of the frequency
741 mark
742 ax : matplotlib axes, optional
743 Axes on which to plot the frequency marks. The default is None,
744 which creates a new window
745 plot_kwargs : dict, optional
746 Additional arguments to pass to the matplotlib plot command. The
747 default is {'color':'k','marker':'x','linestyle':"None"}.
749 Returns
750 -------
751 ax : matplotlib axes, optional
752 Axes on which the frequency marks were plotted
754 """
755 if ax is None:
756 fig, ax = plt.subplots()
757 ys = np.interp(self.frequency, interp_abscissa, interp_ordinate)
758 ax.plot(self.frequency.flatten(), ys.flatten(), **plot_kwargs)
759 return ax
761 def to_real(self, force_angle=-np.pi/4, **kwargs):
762 """
763 Creates real shapes from complex shapes by collapsing the complexity
765 Parameters
766 ----------
767 force_angle : float
768 Angle to force the complex collapsing to real, by default it is
769 -pi/4 (-45 degrees). To allow other angles, use None for this
770 argument.
771 **kwargs : various
772 Extra arguments to pass to collapse_complex_to_real
774 Returns
775 -------
776 ShapeArray
777 Real shapes created from the original complex shapes
779 """
780 if not self.is_complex():
781 return self.copy()
782 matrix = collapse_complex_to_real(self.shape_matrix,
783 force_angle=force_angle,
784 **kwargs)
785 damped_natural_frequency = (self.frequency*2*np.pi*np.sqrt(1-self.damping**2)).real[..., np.newaxis]
786 matrix *= np.sqrt(2*damped_natural_frequency)
787 return shape_array(self.coordinate, matrix.real, self.frequency, self.damping.real,
788 self.modal_mass.real, self.comment1, self.comment2, self.comment3,
789 self.comment4, self.comment5)
791 def to_complex(self):
792 """
793 Creates complex shapes from real shapes
795 Returns
796 -------
797 ShapeArray
798 Complex shapes compute from the real shape coefficients
800 """
801 if self.is_complex():
802 return self.copy()
803 matrix = self.shape_matrix-1j*self.shape_matrix
804 damped_natural_frequency = (self.frequency*2*np.pi*np.sqrt(1-self.damping**2)).real[..., np.newaxis]
805 matrix /= 2*np.sqrt(damped_natural_frequency)
806 return shape_array(self.coordinate, matrix, self.frequency, self.damping.real,
807 self.modal_mass.real, self.comment1, self.comment2, self.comment3,
808 self.comment4, self.comment5)
810 def normalize(self,system_or_matrix, return_modal_matrix = False):
811 """
812 Computes A-normalized or mass-normalized shapes
814 Parameters
815 ----------
816 system_or_matrix : System or np.ndarray
817 A System object or a mass matrix for real modes or A-matrix for
818 complex modes.
819 return_modal_matrix : bool, optional
820 If true, it will return the modal mass or modal-A matrix computed
821 from the normalized mode shapes. The default is False.
823 Returns
824 -------
825 ShapeArray
826 A copy of the original shape array with normalized shape coefficients
828 """
829 if self.is_complex():
830 if isinstance(system_or_matrix,sdynpy_system.System):
831 Z = np.zeros(system_or_matrix.M.shape)
832 A = np.block([[ Z, system_or_matrix.M],
833 [system_or_matrix.M, system_or_matrix.C]])
834 shapes = self[system_or_matrix.coordinate].T
835 else:
836 A = system_or_matrix
837 shapes = self.modeshape
838 omega_r = self.frequency*2*np.pi
839 zeta_r = self.damping
840 lam = -omega_r*zeta_r + 1j*omega_r*np.sqrt(1-zeta_r**2)
841 E = np.concatenate((lam*shapes,shapes),axis=-2)
842 scale_factors = 1/np.sqrt(np.einsum('ji,jk,ki->i',E,A,E))
843 if return_modal_matrix:
844 modal_matrix = np.einsum('ji,jk,kl->il',E*scale_factors,A,E*scale_factors)
845 else:
846 if isinstance(system_or_matrix,sdynpy_system.System):
847 M = system_or_matrix.M
848 shapes = self[system_or_matrix.coordinate].T
849 else:
850 M = system_or_matrix
851 shapes = self.modeshape
852 scale_factors = 1/np.sqrt(np.einsum('ji,jk,ki->i',shapes,M,shapes))
853 if return_modal_matrix:
854 modal_matrix = np.einsum('ji,jk,kl->il',shapes*scale_factors,M,shapes*scale_factors)
855 output_shape = self.copy()
856 output_shape *= scale_factors
857 if return_modal_matrix:
858 return output_shape, modal_matrix
859 else:
860 return output_shape
862 def write_to_unv(self, filename, specific_data_type=12, load_case_number=0
863 ):
864 """
865 Writes shape data to a unverisal file
867 Parameters
868 ----------
869 filename : str
870 Filename to which the geometry will be written. If None,
871 unv data will be returned instead, similar to that
872 obtained from the readunv function in sdynpy
873 specific_data_type : int, optional
874 Integer specifying the type of data in the shape. The default is 12,
875 which denotes acceleration shapes.
876 load_case_number : int, optional
877 The load case number. The default is 0.
879 Raises
880 ------
881 NotImplementedError
882 Raised if complex numbers are used as they are not implemented yet
884 Returns
885 -------
886 shape_unv : List
887 A list of Sdynpy_UFF_Dataset_55 objects, only if filename is None
889 """
890 from ..fileio.sdynpy_uff import dataset_55
891 # First check if any rotations are in the shape
892 is_six_dof = np.any(np.abs(self.coordinate.direction) > 3)
893 is_complex = np.iscomplexobj(self.shape_matrix)
894 nodes = np.unique(self.coordinate.node)
895 directions = np.arange(6) + 1 if is_six_dof else np.arange(3) + 1
896 full_coord_array = sdynpy_coordinate.coordinate_array(nodes[:, np.newaxis], directions)
897 flat_self = self.flatten()
898 shape_matrix = np.zeros(full_coord_array.shape + flat_self.shape,
899 dtype=self.shape_matrix.dtype)
900 for index, coord in full_coord_array.ndenumerate():
901 try:
902 shape_matrix[index] = flat_self[coord].flatten()
903 except ValueError:
904 pass # This means it's a uniaxial sensor and no triaxial dofs exist so we'll leave it at zero
905 # Go through shapes and create the unv structures
906 shape_unv = []
907 for index, shape in flat_self.ndenumerate():
908 # Create node dictionary
909 node_dict = {node_coords[0].node: node_shape_matrix[..., index[0]]
910 for node_coords, node_shape_matrix in zip(full_coord_array, shape_matrix)}
911 # Create the integer data
912 ints = [load_case_number, index[0] + 1]
913 # Create the real data
914 if not is_complex:
915 reals = [shape.frequency, shape.modal_mass, shape.damping, 0.0]
916 else:
917 if shape.modal_mass.real*1e-8 < shape.modal_mass.imag:
918 raise NotImplementedError("I don't know what modal mass is for a complex mode")
919 reals = [(-shape.damping * shape.frequency * 2 * np.pi).real, # Real part of eigenvalue
920 # Imaginary part of eigenvalue
921 (shape.frequency * 2 * np.pi * np.sqrt(1 - shape.damping**2)).real,
922 shape.modal_mass.real, # Real modal mass
923 shape.modal_mass.imag, # Imaginary modal mass
924 0.0,
925 0.0,
926 ] # I don't actually know what modal a and modal b are...
927 shape_unv.append(dataset_55.Sdynpy_UFF_Dataset_55(
928 shape.comment1, shape.comment2, shape.comment3, shape.comment4, shape.comment5,
929 1, 3 if is_complex else 2, 3 if is_six_dof else 2, specific_data_type, 5 if is_complex else 2,
930 ints, reals, node_dict))
931 if filename is None:
932 return shape_unv
933 else:
934 with open(filename, 'w') as f:
935 for dataset in shape_unv:
936 f.write(' -1\n')
937 f.write(' 55\n')
938 f.write(dataset.write_string())
939 f.write(' -1\n')
941 @staticmethod
942 def concatenate_dofs(shape_arrays):
943 """
944 Combines the degrees of freedom from multiple shapes into one set of
945 shapes
947 Parameters
948 ----------
949 shape_arrays : list of ShapeArray
950 List of ShapeArray objects to combine in to one set of shapes
952 Returns
953 -------
954 ShapeArray
955 ShapeArray object containing degrees of freedom from all input shapes
957 """
958 dof_lists = [np.unique(shape.coordinate) for shape in shape_arrays]
959 shape_matrices = [shape_arrays[i][dofs] for i, dofs in enumerate(dof_lists)]
961 all_dofs = np.concatenate(dof_lists, axis=-1)
962 all_shape_matrices = np.concatenate(shape_matrices, axis=-1)
964 # Create new shapes
965 return shape_array(all_dofs, all_shape_matrices, shape_arrays[0].frequency,
966 shape_arrays[0].damping, shape_arrays[0].modal_mass,
967 shape_arrays[0].comment1, shape_arrays[0].comment2,
968 shape_arrays[0].comment3, shape_arrays[0].comment4,
969 shape_arrays[0].comment5)
971 @staticmethod
972 def overlay_shapes(geometries, shapes, color_override=None):
973 """
974 Combines several shapes and geometries for comparitive visualization
977 Parameters
978 ----------
979 geometries : Iterable of Geometry
980 A list of Geometry objects that will be combined into a single
981 Geometry
982 shapes : Iterable of ShapeArray
983 A list of ShapeArray objects that will be combined into a single
984 ShapeArray
985 color_override : iterable, optional
986 An iterble of integers specifying colors, which will override the
987 existing geometry colors. This should have the same length as the
988 `geometries` input. The default is None, which keeps the original
989 geometry colors.
991 Returns
992 -------
993 new_geometry : Geometry
994 A geometry consisting of a combination of the specified geometries
995 new_shape : ShapeArray
996 A ShapeArray consisting of a combination of the specified ShapeArrays
998 """
999 from .sdynpy_geometry import Geometry
1000 new_geometry, node_offset = Geometry.overlay_geometries(
1001 geometries, color_override, True)
1003 new_shapes = [shape.copy() for shape in shapes]
1004 for i in range(len(new_shapes)):
1005 new_shapes[i].coordinate.node += node_offset * (i + 1)
1006 new_shape = ShapeArray.concatenate_dofs(new_shapes)
1007 return new_geometry, new_shape
1009 def optimize_degrees_of_freedom(self, sensors_to_keep,
1010 group_by_node=False, method='ei'):
1011 """
1012 Creates a reduced set of shapes using optimal degrees of freedom
1014 Parameters
1015 ----------
1016 sensors_to_keep : int
1017 Number of sensors to keep
1018 group_by_node : bool, optional
1019 If True, group shape degrees of freedom a the same node as one
1020 sensor, like a triaxial accelerometer. The default is False.
1021 method : str, optional
1022 'ei' for effective independence or 'cond' for condition number.
1023 The default is 'ei'.
1025 Returns
1026 -------
1027 ShapeArray
1028 The set of shapes with a reduced set of degrees of freedom that are
1029 optimally chosen.
1031 """
1032 if group_by_node:
1033 nodes = np.unique(self.coordinate.node)
1034 directions = np.unique(abs(self.coordinate).direction)
1035 coordinate_array = sdynpy_coordinate.coordinate_array(nodes[:, np.newaxis], directions)
1036 else:
1037 coordinate_array = np.unique(abs(self.coordinate))
1038 shape_matrix = self[coordinate_array]
1039 shape_matrix = np.moveaxis(shape_matrix, 0, -1)
1040 if method == 'ei':
1041 indices = by_effective_independence(sensors_to_keep, shape_matrix)
1042 elif method == 'cond':
1043 indices = by_condition_number(sensors_to_keep, shape_matrix)
1044 else:
1045 raise ValueError('Invalid `method`, must be one of "ei" or "cond"')
1046 coordinate_array = coordinate_array[indices]
1047 return self.reduce(coordinate_array)
1049 def mac(self):
1050 """Computes the modal assurance criterion matrix of the shapes
1052 Returns
1053 -------
1054 mac
1055 The modal assurance criterion matrix
1056 """
1057 return mac(self)
1059 def plot_mac(self, *args, **kwargs):
1060 """Plots the mac matrix of the shapes"""
1062 matrix_plot(self.mac(), *args, **kwargs)
1064 def system(self):
1065 """
1066 Create system matrices from the shapes
1068 This will create a System object with modal mass, stiffness, and
1069 damping matrices, with the mode shape matrix as the transformation
1070 to physical coordinates
1072 Raises
1073 ------
1074 NotImplementedError
1075 Raised if complex modes are used
1077 Returns
1078 -------
1079 System
1080 A System object containing modal mass, stiffness, and damping
1081 matrices, with the mode shape matrix as the transformation
1082 to physical coordinates
1084 """
1085 if self.is_complex():
1086 raise NotImplementedError('Complex Modes Not Implemented Yet')
1087 else:
1088 self = self.ravel()
1089 # It can take a long time to sort through coordinates, so we should check if
1090 # the coordinates are identical across all shapes already
1091 if np.all(self[0].coordinate == self.coordinate):
1092 coordinates = self[0].coordinate
1093 shape_matrix = self.shape_matrix
1094 else:
1095 coordinates = np.unique(self.coordinate)
1096 shape_matrix = self[coordinates]
1098 return sdynpy_system.System(
1099 coordinates,
1100 np.diag(self.modal_mass),
1101 np.diag((2 * np.pi * self.frequency) ** 2 * self.modal_mass),
1102 np.diag(2 * (2 * np.pi * self.frequency) * self.damping * self.modal_mass),
1103 shape_matrix.T,
1104 )
1106 @staticmethod
1107 def shape_alignment(shape_1, shape_2, node_id_map=None):
1108 """
1109 Computes if the shapes are aligned, or if one needs to be flipped
1111 Parameters
1112 ----------
1113 shape_1 : ShapeArray
1114 Shape to compare.
1115 shape_2 : ShapeArray
1116 Shape to compare.
1118 Returns
1119 -------
1120 np.ndarray
1121 An array denoting if one of the shapes need to be flipped (-1)
1122 to be equivalent to the other, or if they are already aligned (1)
1124 """
1125 if node_id_map is not None:
1126 shape_2 = shape_2.copy()
1127 shape_2.coordinate.node = node_id_map(shape_2.coordinate.node)
1128 common_coordinates = np.intersect1d(
1129 np.unique(shape_1.coordinate), np.unique(shape_2.coordinate))
1130 return np.sign(np.einsum('ij,ij->i', shape_1[common_coordinates], shape_2[common_coordinates]))
1132 def mode_table(self, table_format='csv',
1133 frequency_format='{:0.2f}',
1134 damping_format='{:0.2f}%'):
1135 """
1136 Generates a table of modal information including frequency and damping
1138 Parameters
1139 ----------
1140 table_format : str, optional
1141 The type of table to generate. Can be 'csv', 'rst', 'markdown',
1142 'latex', 'pandas', or 'ascii'. The default is 'csv'.
1143 frequency_format : str, optional
1144 Format specifier for frequency. The default is '{:0.2f}'.
1145 damping_format : str, optional
1146 Format specifier for damping percent. The default is '{:0.2f}%'.
1148 Raises
1149 ------
1150 ValueError
1151 Raised if a invalid `table_format` is specified
1153 Returns
1154 -------
1155 table : str
1156 String representation of the mode table
1158 """
1159 available_formats = ['csv', 'rst', 'markdown', 'latex', 'ascii','pandas']
1160 if not table_format.lower() in available_formats:
1161 raise ValueError('`table_format` must be one of {:}'.format(available_formats))
1162 if table_format.lower() == 'ascii':
1163 return repr(self)
1164 elif table_format.lower() in ['csv', 'latex','pandas']:
1165 # Create a Pandas dataframe
1166 sorted_flat_self = np.sort(self.flatten())
1167 data_dict = {'Mode': np.arange(sorted_flat_self.size) + 1,
1168 'Frequency': [frequency_format.format(v) for v in sorted_flat_self.frequency],
1169 'Damping': [damping_format.format(v * (100 if '%' in damping_format else 1)) for v in sorted_flat_self.damping]}
1170 for field in ['comment1', 'comment2', 'comment3', 'comment4', 'comment5']:
1171 if np.all(sorted_flat_self[field] == ''):
1172 continue
1173 data_dict[field.title()] = sorted_flat_self[field]
1174 df = pd.DataFrame(data_dict)
1175 if table_format.lower() == 'csv':
1176 return df.to_csv(index=False)
1177 elif table_format.lower() == 'latex':
1178 return df.to_latex(index=False)
1179 elif table_format.lower() == 'pandas':
1180 return df
1181 else:
1182 raise ValueError('Unknown Table Format: {:}'.format(table_format))
1184 def edit_comments(self, geometry = None):
1185 """
1186 Opens up a table where the shape comments can be edited
1188 If a geometry is also passed, it will also open up a mode shape plotter
1189 window where you can visualize the modes you are looking at.
1191 Edited comments will be stored back into the ShapeArray object when the
1192 OK button is pressed. Comments will not be stored if the Cancel button
1193 is pressed.
1195 Parameters
1196 ----------
1197 geometry : Geometry, optional
1198 A geometry on which the shapes will be plotted. If not specified,
1199 a table will just open up.
1201 Returns
1202 -------
1203 ShapeCommentTable
1204 A ShapeCommentTable displaying the modal information where comments
1205 can be edited.
1207 Notes
1208 -----
1209 Due to how Python handles garbage collection, the table may be
1210 immediately closed if not assigned to a variable, as Python things it
1211 is no longer in use.
1213 """
1214 if geometry is not None:
1215 plotter = geometry.plot_shape(self)
1216 return ShapeCommentTable(self, plotter)
1218 def transformation_matrix(self, physical_coordinates, inversion=True, normalized = True):
1219 """
1220 Creates a transformation matrix that describes a transformation from a physical coordinate array into modal space using the provided mode shapes.
1222 Parameters
1223 ----------
1224 coordinates : CoordinateArray
1225 The "physical" force coordinates for the transformation.
1227 inversion : bool, optional
1228 If True, the pseudo inverse of the mode shape matrix will be performed. If False, the mode shape matrix will not be inverted.
1230 normalized : bool, optional
1231 If True, the rows of the transformation matrix will be normalized to unit magnitude.
1233 Returns
1234 -------
1235 transformation : Matrix
1236 The transformation matrix as a matrix object. It is organized with
1237 the modal CoordinateArray on the rows and the physical
1238 force CoordinateArray on the columns.
1240 Notes
1241 -----
1242 The transformation automatically handles polarity differences in the geometry
1243 and force_coordinate.
1245 The returned transformation matrix is intended to be used as an output transformation matrix for MIMO vibration control.
1247 """
1248 # Generate Modal Coordinates
1249 modal_coordinates = sdynpy_coordinate.coordinate_array(node=np.arange(len(self)), direction=0)
1251 # Truncate Shapes to Physical Coordinates
1252 self = self.reduce(physical_coordinates)
1254 # Invert Matrix (if desired)
1255 if inversion:
1256 transformation_matrix = np.linalg.pinv(self.shape_matrix.T)
1257 else:
1258 transformation_matrix = self.shape_matrix
1260 # Normalize Matrix (if desired)
1261 if normalized:
1262 transformation_matrix = (transformation_matrix.T / np.abs(transformation_matrix).max(axis=1)).T
1264 return matrix(transformation_matrix, modal_coordinates, physical_coordinates)
1266class ShapeCommentTable(QDialog):
1267 def __init__(self, shapes, plotter = None, parent = None):
1268 """
1269 Creates a table window that allows editing of comments on the mode
1270 shapes.
1272 Parameters
1273 ----------
1274 shapes : ShapeArray
1275 The shapes for which the comments need to be modified.
1276 plotter : ShapePlotter, optional
1277 A shape plotter that is to be linked to the table. It should have
1278 the same modes used for the table plotted on it. The plotter will
1279 automatically update the mode being displayed as different rows of
1280 the table are selected. If not specified, there will be no mode
1281 shape display linked to the table.
1282 parent : QWidget, optional
1283 Parent widget for the window. The default is No parent.
1285 Returns
1286 -------
1287 None.
1289 """
1290 super().__init__(parent)
1291 # Add a table widget
1292 self.setWindowTitle('Shape Comment Editor')
1294 self.plotter = plotter
1295 self.shapes = shapes
1297 self.button_box = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
1298 self.button_box.accepted.connect(self.accept)
1299 self.button_box.rejected.connect(self.reject)
1301 self.layout = QVBoxLayout()
1302 # Columns will be key, frequency, damping, comment1, comment2, ... comment5
1303 self.mode_table = QTableWidget(shapes.size, 8)
1304 self.mode_table.currentItemChanged.connect(self.update_mode)
1305 self.mode_table.setHorizontalHeaderLabels(['Index','Frequency','Damping','Comment1','Comment2','Comment3','Comment4','Comment5'])
1306 self.mode_table.setSelectionMode(QAbstractItemView.SingleSelection)
1307 for row,(shape_index,shape) in enumerate(shapes.ndenumerate()):
1308 for column,value in enumerate([shape_index,shape.frequency,
1309 shape.damping,
1310 shape.comment1,
1311 shape.comment2,
1312 shape.comment3,
1313 shape.comment4,
1314 shape.comment5]):
1315 if column == 1:
1316 item = QTableWidgetItem('{:0.2f}'.format(value))
1317 elif column == 2:
1318 item = QTableWidgetItem('{:0.2f}%'.format(value*100))
1319 else:
1320 item = QTableWidgetItem(str(value))
1321 if column <= 2:
1322 item.setFlags(item.flags() ^ Qt.ItemIsEditable)
1323 self.mode_table.setItem(row,column,item)
1325 self.layout.addWidget(self.mode_table)
1326 self.layout.addWidget(self.button_box)
1327 self.setLayout(self.layout)
1329 self.show()
1331 def update_mode(self):
1332 row = self.mode_table.currentRow()
1333 if self.plotter is not None:
1334 self.plotter.current_shape = row
1335 self.plotter.reset_shape()
1337 def accept(self):
1338 for index,(shape_index,shape) in enumerate(self.shapes.ndenumerate()):
1339 shape.comment1 = self.mode_table.item(index,3).text()
1340 shape.comment2 = self.mode_table.item(index,4).text()
1341 shape.comment3 = self.mode_table.item(index,5).text()
1342 shape.comment4 = self.mode_table.item(index,6).text()
1343 shape.comment5 = self.mode_table.item(index,7).text()
1344 super().accept()
1346# def string_array(self):
1347# return create_coordinate_string_array(self.node,self.direction)
1348#
1349# def __repr__(self):
1350# return 'coordinate_array(string_array=\n'+repr(self.string_array())+')'
1351#
1352# def __str__(self):
1353# return str(self.string_array())
1354#
1355# def __eq__(self,value):
1356# value = np.array(value)
1357# # A string
1358# if np.issubdtype(value.dtype,np.character):
1359# return self.string_array() == value
1360# else:
1361# if value.dtype.names is None:
1362# node_logical = self.node == value[...,0]
1363# direction_logical = self.direction == value[...,1]
1364# else:
1365# node_logical = self.node == value['node']
1366# direction_logical = self.direction == value['direction']
1367# return np.logical_and(node_logical,direction_logical)
1368#
1369# def __ne__(self,value):
1370# return ~self.__eq__(value)
1371#
1372# def __abs__(self):
1373# abs_coord = self.copy()
1374# abs_coord.direction = abs(abs_coord.direction)
1375# return abs_coord
1376#
1377# def __neg__(self):
1378# neg_coord = self.copy()
1379# neg_coord.direction = -neg_coord.direction
1380# return neg_coord
1381#
1382# def __pos__(self):
1383# pos_coord = self.copy()
1384# pos_coord.direction = +pos_coord.direction
1385# return pos_coord
1386#
1387# def abs(self):
1388# return self.__abs__()
1389#
1391def shape_array(coordinate=None, shape_matrix=None, frequency=1.0, damping=0.0, modal_mass=1.0,
1392 comment1='', comment2='', comment3='', comment4='', comment5='',
1393 structured_array=None):
1394 """
1395 Creates a coordinate array that specify degrees of freedom.
1397 Creates an array of coordinates that specify degrees of freedom in a test
1398 or analysis. Coordinate arrays can be created using a numpy structured
1399 array or two arrays for node and direction. Multidimensional arrays can
1400 be used.
1402 Parameters
1403 ----------
1404 coordinate : CoordinateArray
1405 Array of coordinates corresponding to the last dimension of the
1406 shape_matrix
1407 shape_matrix : ndarray
1408 Matrix of shape coefficients. If complex, then the shape will be made
1409 into a complex shape. Otherwise it will be real. The last dimension
1410 should be the "coordinate" dimension, and the first dimension(s) should
1411 be the shape dimension(s). Note that this is transposed from the
1412 typical modal approach, but it makes for better itegration with numpy.
1413 frequency : ndarray
1414 Natural Frequencies for each shape of the array
1415 damping : ndarray
1416 Fraction of Critical Damping (as proportion not percentage) for each
1417 shape of the array
1418 modal_mass : ndarray
1419 Modal mass for each shape of the array
1420 comment1 - comment5 : ndarray
1421 Comments for the universal file for each shape. Note that comment5 will
1422 not be stored to the universal file format.
1423 structured_array : ndarray (structured)
1424 Alternatively to the above, a structured array can be passed with
1425 identical parameters to the above.
1427 Returns
1428 -------
1429 shape_array : ShapeArray
1431 """
1432 keys = [
1433 'coordinate',
1434 'shape_matrix',
1435 'frequency',
1436 'damping',
1437 'modal_mass',
1438 'comment1',
1439 'comment2',
1440 'comment3',
1441 'comment4',
1442 'comment5',
1443 ]
1444 data = {}
1445 if structured_array is not None:
1446 for key in keys:
1447 try:
1448 data[key] = structured_array[key]
1449 except (ValueError, TypeError):
1450 raise ValueError(
1451 'structured_array must be numpy.ndarray with dtype names {:}'.format(keys))
1452 else:
1453 data['coordinate'] = np.array(coordinate).view(sdynpy_coordinate.CoordinateArray)
1454 data['shape_matrix'] = np.array(shape_matrix)
1455 data['frequency'] = np.array(frequency)
1456 data['damping'] = np.array(damping)
1457 data['modal_mass'] = np.array(modal_mass)
1458 data['comment1'] = np.array(comment1, dtype='<U80')
1459 data['comment2'] = np.array(comment2, dtype='<U80')
1460 data['comment3'] = np.array(comment3, dtype='<U80')
1461 data['comment4'] = np.array(comment4, dtype='<U80')
1462 data['comment5'] = np.array(comment5, dtype='<U80')
1464 # Create the coordinate array
1465 shp_array = ShapeArray(data['shape_matrix'].shape[:-1], data['shape_matrix'].shape[-1],
1466 'real' if np.isrealobj(data['shape_matrix']) else 'complex')
1467 shp_array.coordinate = data['coordinate']
1468 shp_array.shape_matrix = data['shape_matrix']
1469 shp_array.frequency = data['frequency']
1470 shp_array.damping = data['damping']
1471 shp_array.modal_mass = data['modal_mass']
1472 shp_array.comment1 = data['comment1']
1473 shp_array.comment2 = data['comment2']
1474 shp_array.comment3 = data['comment3']
1475 shp_array.comment4 = data['comment4']
1476 shp_array.comment5 = data['comment5']
1478 return shp_array
1481from_imat_struct = ShapeArray.from_imat_struct
1482from_exodus = ShapeArray.from_exodus
1483from_unv = ShapeArray.from_unv
1484load = ShapeArray.load
1485concatenate_dofs = ShapeArray.concatenate_dofs
1486overlay_shapes = ShapeArray.overlay_shapes
1487shape_alignment = ShapeArray.shape_alignment
1490def rigid_body_error(geometry, rigid_shapes, **rigid_shape_kwargs):
1491 """
1492 Computes rigid shape error based on geometries
1494 Analytic rigid body shapes are computed from the geometry. The supplied
1495 rigid_shapes are then projected through these analytic shapes and a
1496 residual is computed by subtracting the projected shapes from the original
1497 shapes. This residual is a measure of how "non-rigid" the provided shapes
1498 were.
1500 Parameters
1501 ----------
1502 geometry : Geometry
1503 Geometry from which analytic rigid body shapes will be created
1504 rigid_shapes : ShapeArray
1505 ShapeArray consisting of nominally rigid shapes from which errors are
1506 to be computed
1507 **rigid_shape_kwargs : various
1508 Additional keywords that can be passed to the Geometry.rigid_body_shapes
1509 method.
1511 Returns
1512 -------
1513 coordinates : CoordinateArray
1514 The coordinates corresponding to the output residual array
1515 residual : np.ndarray
1516 Residuals computed by subtracting the provided shapes from those same
1517 shapes projected through the analytical rigid body shapes.
1519 """
1520 coordinates = np.unique(rigid_shapes.coordinate)
1521 true_rigid_shapes = geometry.rigid_body_shapes(coordinates, **rigid_shape_kwargs)
1522 shape_matrix_exp = rigid_shapes[coordinates].T
1523 shape_matrix_true = true_rigid_shapes[coordinates].T
1524 projection = shape_matrix_true @ np.linalg.lstsq(shape_matrix_true, shape_matrix_exp)[0]
1525 residual = np.abs(shape_matrix_exp - projection)
1526 return coordinates, residual
1529def rigid_body_check(geometry, rigid_shapes, distance_number=5, residuals_to_label=5,
1530 return_shape_diagnostics=False, plot=True, return_figures = False, **rigid_shape_kwargs):
1531 """
1532 Performs rigid body checks, both looking at the complex plane and residuals
1534 Parameters
1535 ----------
1536 geometry : Geometry
1537 Geometry from which analytic rigid body shapes will be created
1538 rigid_shapes : ShapeArray
1539 ShapeArray consisting of nominally rigid shapes from which errors are
1540 to be computed
1541 distance_number : int, optional
1542 Threshold for number of neighbors to find outliers. The default is 5.
1543 residuals_to_label : int, optional
1544 The top `residuals_to_label` residuals will be highlighted in the
1545 residual plots. The default is 5.
1546 return_shape_diagnostics : True, optional
1547 If True, additional outputs are returned to help diagnose issues. The
1548 default is False.
1549 plot : bool, optional
1550 Whether or not to create plots of the results. The default is True.
1551 **rigid_shape_kwargs : various
1552 Additional keywords that can be passed to the Geometry.rigid_body_shapes
1553 method.
1555 Returns
1556 -------
1557 suspicious_channels : CoordinateArray
1558 A set of suspicous channels that should be investigated
1559 analytic_rigid_shapes : ShapeArray
1560 Rigid body shapes created from the geometry. Only returned if
1561 return_shape_diagnostics is True
1562 residual : np.ndarray
1563 Values of the residuals at coordinates np.unique(rigid_shapes.coordinate).
1564 Only returned if return_shape_diagnostics is True
1565 shape_matrix_exp : np.ndarray
1566 The shape matrix of the supplied rigid shapes. Only returned if
1567 return_shape_diagnostics is True
1569 """
1570 coordinates = np.unique(rigid_shapes.coordinate)
1571 true_rigid_shapes = geometry.rigid_body_shapes(coordinates, **rigid_shape_kwargs)
1572 shape_matrix_exp = rigid_shapes[coordinates].T
1573 shape_matrix_true = true_rigid_shapes[coordinates].T
1574 projection = shape_matrix_true @ np.linalg.lstsq(shape_matrix_true, shape_matrix_exp)[0]
1575 residual = np.abs(shape_matrix_exp - projection)/np.max(np.abs(projection),axis=0) # Normalize to the largest dof in each shape
1576 suspicious_channels = []
1577 # Plot the complex plane for each shape
1578 figs = []
1579 if plot:
1580 for i, (shape, data) in enumerate(zip(rigid_shapes, shape_matrix_exp.T)):
1581 fig, ax = plt.subplots(num='Shape {:} Complex Plane'.format(
1582 i if shape.comment1 == '' else shape.comment1))
1583 ax.plot(data.real, data.imag, 'bx')
1584 ax.set_xlabel('Real Part')
1585 ax.set_ylabel('Imag Part')
1586 ax.set_title('Shape {:}\nComplex Plane'.format(
1587 i+1 if shape.comment1 == '' else shape.comment1))
1588 x = np.real(data)
1589 y = np.imag(data)
1590 # Fit line through the origin (y = mx)
1591 m, residuals, rank, singular_values = np.linalg.lstsq(x[:, np.newaxis], y)
1592 m = m[0] # Get the slope
1593 point_distances = np.abs(m * x - y) / np.sqrt(m**2 + 1)
1594 # Plot the line fit on the plot
1595 ax.axline((0,0),slope=m,color='k',linestyle='--')
1596 outliers = np.argsort(point_distances)[-5:]
1597 for outlier in outliers:
1598 ax.text(data[outlier].real, data[outlier].imag, str(coordinates[outlier]))
1599 suspicious_channels.append(coordinates[outlier])
1600 figs.append(fig)
1601 # Now plot the residuals
1602 residual_order = np.argsort(np.max(residual, axis=-1))
1603 if plot:
1604 fig, ax = plt.subplots(num='Projection Residuals')
1605 ax.plot(residual, 'x')
1606 figs.append(fig)
1607 for i in range(residuals_to_label):
1608 index = residual_order[-1 - i]
1609 if plot:
1610 ax.text(index, np.max(residual[index]), str(coordinates[index]))
1611 suspicious_channels.append(coordinates[index])
1612 if plot:
1613 # ax.set_yscale('log')
1614 ax.set_ylabel('Residual')
1615 ax.set_xlabel('Degree of Freedom')
1616 return_object = (np.array(suspicious_channels).view(sdynpy_coordinate.CoordinateArray),)
1617 if return_shape_diagnostics:
1618 return_object = return_object + (true_rigid_shapes, residual, shape_matrix_exp)
1619 if return_figures:
1620 return_object = return_object + tuple(figs)
1621 if len(return_object) == 1:
1622 return return_object[0]
1623 else:
1624 return return_object
1627def rigid_body_fix_node_orientation(geometry, rigid_shapes, suspect_nodes, new_cs_ids=None, gtol=1e-8, xtol=1e-8):
1628 """
1629 Solves for the best sensor orientation in the geometry to minimize the residual
1631 This function uses a nonlinear optimizer to solve for the orientation of
1632 the specified nodes that provide the best estimate of a rigid body shape.
1633 This can be used to try to figure out the true orientation of a sensor in
1634 a test.
1636 Parameters
1637 ----------
1638 geometry : Geometry
1639 Geometry from which analytic rigid body shapes will be created
1640 rigid_shapes : ShapeArray
1641 ShapeArray consisting of nominally rigid shapes from which errors are
1642 to be corrected
1643 suspect_nodes : np.ndarray
1644 A set of node ids to investigate. The coordinate system of each of
1645 these will be modified.
1646 new_cs_ids : np.ndarray, optional
1647 Id numbers to give the newly added coordinate systems. The default is
1648 None, which simply increments the maximum coordinate system for each
1649 suspect node
1650 gtol : float, optional
1651 Global tolerance for convergence for the nonlinear optimizer. The
1652 default is 1e-8.
1653 xtol : TYPE, optional
1654 Relative tolerance for convergence for the nonlinear optimizer. The
1655 default is 1e-8.
1657 Returns
1658 -------
1659 corrected_geometry : Geometry
1660 A geometry with coordinate systems modified to be consistent with the
1661 rigid body shapes supplied.
1663 """
1664 new_nodes = []
1665 new_css = []
1666 # Loop through all suspect nodes
1667 for i, node in enumerate(suspect_nodes):
1668 keep_nodes = [val for val in np.unique(geometry.node.id) if (
1669 val not in suspect_nodes) or (val == node)]
1670 error_geometry = geometry.reduce(keep_nodes)
1671 error_shapes = rigid_shapes.reduce(keep_nodes)
1673 def objective_function(quaternion):
1674 r = quaternion_to_rotation_matrix(quaternion)
1675 # Create a new geometry with the rotation applied
1676 updated_geometry = error_geometry.copy()
1677 new_cs = updated_geometry.coordinate_system(updated_geometry.node(node).disp_cs)
1678 new_cs.id = updated_geometry.coordinate_system.id.max(
1679 ) + 1 if new_cs_ids is None else new_cs_ids[i]
1680 new_cs.matrix[:3, :3] = r @ new_cs.matrix[:3, :3]
1681 updated_geometry.coordinate_system = np.concatenate(
1682 [updated_geometry.coordinate_system, new_cs[np.newaxis]])
1683 updated_geometry.node.disp_cs[updated_geometry.node.id == node] = new_cs.id
1684 residual = np.linalg.norm(rigid_body_error(updated_geometry, error_shapes)[1])
1685 return residual
1686 print('Optimizing Orientation of Node {:}'.format(node))
1687 output = opt.minimize(objective_function, [1, 0, 0, 0], method='trust-constr', constraints=[
1688 unit_magnitude_constraint], options={'gtol': gtol, 'xtol': xtol})
1689 r = quaternion_to_rotation_matrix(output.x)
1690 new_cs = error_geometry.coordinate_system(error_geometry.node(node).disp_cs)
1691 new_cs.id = error_geometry.coordinate_system.id.max(
1692 ) + 1 + i if new_cs_ids is None else new_cs_ids[i]
1693 new_cs.matrix[:3, :3] = r @ new_cs.matrix[:3, :3]
1694 new_node = error_geometry.node[error_geometry.node.id == node].copy()
1695 new_node.disp_cs = new_cs.id
1696 new_nodes.append(new_node)
1697 new_css.append(new_cs)
1698 # Now add the nodes and coordinate systems to the geometry
1699 output_geometry = geometry.copy()
1700 for node in new_nodes:
1701 output_geometry.node[output_geometry.node.id == node.id] = node
1702 output_geometry.coordinate_system = np.concatenate([output_geometry.coordinate_system, new_css])
1703 return output_geometry
1706def mac(shape_1, shape_2=None, node_id_map=None):
1707 """
1708 Computes the modal assurance critera between two sets of shapes
1710 Parameters
1711 ----------
1712 shape_1 : ShapeArray
1713 A set of shapes to compute the MAC
1714 shape_2 : ShapeArray, optional
1715 A second set of shapes to compute the MAC. If not specified, the
1716 AutoMAC of shape_1 is computed
1718 Returns
1719 -------
1720 mac_array : np.ndarray
1721 A numpy ndarray consisting of the MAC
1723 """
1724 if shape_2 is None:
1725 shape_2 = shape_1
1726 if node_id_map is not None:
1727 shape_2 = shape_2.copy()
1728 shape_2.coordinate.node = node_id_map(shape_2.coordinate.node)
1729 common_coordinates = np.intersect1d(
1730 np.unique(shape_1.coordinate), np.unique(shape_2.coordinate))
1731 phi_1 = shape_1[common_coordinates].T
1732 phi_2 = shape_2[common_coordinates].T
1733 return mac_corr(phi_1, phi_2)
1736def shape_comparison_table(shape_1, shape_2, frequency_format='{:0.2f}',
1737 damping_format='{:.02f}%', mac_format='{:0.0f}',
1738 percent_error_format='{:0.1f}%', spacing=2,
1739 table_format='text', node_id_map=None):
1740 """
1741 Generates a shape comparison table between two sets of shapes
1743 Parameters
1744 ----------
1745 shape_1 : ShapeArray
1746 The first shape set to compare
1747 shape_2 : ShapeArray
1748 The second shape set to compare
1749 frequency_format : str, optional
1750 Format specifier for frequency. The default is '{:0.2f}'.
1751 damping_format : TYPE, optional
1752 Format specifier for damping. The default is '{:.02f}%'.
1753 mac_format : TYPE, optional
1754 Format specifier for MAC. The default is '{:0.0f}'.
1755 percent_error_format : TYPE, optional
1756 Format specifier for percent error. The default is '{:0.1f}%'.
1757 spacing : TYPE, optional
1758 Spacing added between columns. The default is 2.
1759 table_format : str, optional
1760 Table format to return. Must be 'text', 'markdown', or 'latex'. The
1761 default is 'text'.
1762 node_id_map : id_map, optional
1763 An ID map to use if shapes do not have identical node ids. The default
1764 is to assume that the shapes have common node ids.
1766 Raises
1767 ------
1768 ValueError
1769 Raised if an invalid `table_format` is specified
1771 Returns
1772 -------
1773 output_string : str
1774 String representation of the output table
1776 """
1777 shape_1 = shape_1.flatten()
1778 shape_2 = shape_2.flatten()
1779 frequency_strings_1 = ['Freq 1 (Hz)'] + [frequency_format.format(shape.frequency)
1780 for shape in shape_1]
1781 frequency_strings_2 = ['Freq 2 (Hz)'] + [frequency_format.format(shape.frequency)
1782 for shape in shape_2]
1783 index_strings = ['Mode'] + ['{:}'.format(v) for v in np.arange(shape_1.size) + 1]
1784 mac_strings = ['MAC'] + [mac_format.format(m * 100)
1785 for m in np.einsum('ii->i', mac(shape_1, shape_2, node_id_map))]
1786 freq_error_strings = ['Freq Error'] + [percent_error_format.format(
1787 (s1.frequency - s2.frequency) * 100 / s2.frequency) for s1, s2 in zip(shape_1, shape_2)]
1788 index_size = max([len(s) for s in index_strings])
1789 freq_size = max([len(s) for s in frequency_strings_1 + frequency_strings_2])
1790 mac_size = max([len(s) for s in mac_strings])
1791 freqerr_size = max([len(s) for s in freq_error_strings])
1792 index_table_format = '{{:>{:}}}'.format(index_size + spacing)
1793 freq_table_format = '{{:>{:}}}'.format(freq_size + spacing)
1794 mac_table_format = '{{:>{:}}}'.format(mac_size + spacing)
1795 freqerr_table_format = '{{:>{:}}}'.format(freqerr_size + spacing)
1796 if damping_format is not None:
1797 damping_strings_1 = ['Damp 1'] + [damping_format.format(shape.damping * 100)
1798 for shape in shape_1]
1799 damping_strings_2 = ['Damp 2'] + [damping_format.format(shape.damping * 100)
1800 for shape in shape_2]
1801 damp_error_strings = ['Damp Error'] + [percent_error_format.format(
1802 (s1.damping - s2.damping) * 100 / s2.damping) for s1, s2 in zip(shape_1, shape_2)]
1803 damp_size = max([len(s) for s in damping_strings_1 + damping_strings_2])
1804 damperr_size = max([len(s) for s in damp_error_strings])
1805 damp_table_format = '{{:>{:}}}'.format(damp_size + spacing)
1806 damperr_table_format = '{{:>{:}}}'.format(damperr_size + spacing)
1807 if table_format.lower() == 'text':
1808 table_begin_string = ''
1809 table_end_string = ''
1810 separator_string = ''
1811 lineend_string = '\n'
1812 linebegin_string = ''
1813 header_separator_string = '\n'
1814 elif table_format.lower() == 'markdown':
1815 table_begin_string = ''
1816 table_end_string = ''
1817 separator_string = '|'
1818 lineend_string = '|\n'
1819 linebegin_string = '|'
1820 if damping_format is not None:
1821 header_separator_string = lineend_string + linebegin_string + separator_string.join(['-' * (length + spacing) for length in [
1822 index_size, freq_size, freq_size, freqerr_size,
1823 damp_size, damp_size, damperr_size, mac_size]]) + lineend_string
1824 else:
1825 header_separator_string = lineend_string + linebegin_string + separator_string.join(['-' * (length + spacing) for length in [
1826 index_size, freq_size, freq_size, freqerr_size, mac_size]]) + lineend_string
1827 elif table_format.lower() == 'latex':
1828 if damping_format is not None:
1829 table_begin_string = '\\begin{tabular}{rrrrrrrr}\n'
1830 else:
1831 table_begin_string = '\\begin{tabular}{rrrrr}\n'
1832 table_end_string = r'\end{tabular}'
1833 separator_string = ' & '
1834 lineend_string = '\\\\\n'
1835 linebegin_string = ' '
1836 header_separator_string = '\\\\ \\hline\n'
1837 else:
1838 raise ValueError("Invalid table_format. Must be one of ['text','markdown','latex'].")
1839 output_string = table_begin_string
1840 if damping_format is not None:
1841 for i, (ind, f1, f2, d1, d2, fe, de, m) in enumerate(zip(
1842 index_strings, frequency_strings_1, frequency_strings_2,
1843 damping_strings_1, damping_strings_2, freq_error_strings,
1844 damp_error_strings, mac_strings)):
1845 output_string += (linebegin_string + index_table_format + separator_string
1846 + freq_table_format + separator_string
1847 + freq_table_format + separator_string
1848 + freqerr_table_format + separator_string
1849 + damp_table_format + separator_string
1850 + damp_table_format + separator_string
1851 + damperr_table_format + separator_string
1852 + mac_table_format
1853 + (header_separator_string if i == 0 else lineend_string)).format(
1854 ind, f1, f2, fe, d1, d2, de, m)
1855 else:
1856 for i, (ind, f1, f2, fe, m) in enumerate(zip(
1857 index_strings, frequency_strings_1, frequency_strings_2,
1858 freq_error_strings,
1859 mac_strings)):
1860 output_string += (linebegin_string + index_table_format + separator_string
1861 + freq_table_format + separator_string
1862 + freq_table_format + separator_string
1863 + freqerr_table_format + separator_string
1864 + mac_table_format
1865 + (header_separator_string if i == 0 else lineend_string)).format(
1866 ind, f1, f2, fe, m)
1867 output_string += table_end_string
1868 if table_format.lower() == 'latex':
1869 output_string = output_string.replace('%', '\\%')
1870 return output_string