Coverage for lasso/dyna/d3plot.py: 50%

3907 statements  

« prev     ^ index     » next       coverage.py v7.2.4, created at 2023-04-28 19:49 +0100

1import ctypes 

2from dataclasses import dataclass 

3import logging 

4import mmap 

5import os 

6import pprint 

7import re 

8import struct 

9import tempfile 

10import traceback 

11import typing 

12import webbrowser 

13from typing import Any, BinaryIO, Dict, Iterable, List, Set, Tuple, Union 

14 

15import numpy as np 

16 

17from ..femzip.femzip_api import FemzipAPI, FemzipBufferInfo, FemzipVariableCategory 

18from ..io.binary_buffer import BinaryBuffer 

19from ..io.files import open_file_or_filepath 

20from ..logging import get_logger 

21from ..plotting import plot_shell_mesh 

22from .array_type import ArrayType 

23from .d3plot_header import D3plotFiletype, D3plotHeader 

24from .femzip_mapper import FemzipMapper, filter_femzip_variables 

25from .filter_type import FilterType 

26 

27# pylint: disable = too-many-lines 

28 

29FORTRAN_OFFSET = 1 

30LOGGER = get_logger(__name__) 

31 

32 

33def _check_ndim(d3plot, array_dim_names: Dict[str, List[str]]): 

34 """Checks if the specified array is fine in terms of ndim 

35 

36 Parameters 

37 ---------- 

38 d3plot: D3plot 

39 d3plot holding arrays 

40 array_dim_names: Dict[str, List[str]] 

41 """ 

42 

43 for type_name, dim_names in array_dim_names.items(): 

44 if type_name in d3plot.arrays: 

45 array = d3plot.arrays[type_name] 

46 if array.ndim != len(dim_names): 

47 msg = "Array {0} must have {1} instead of {2} dimensions: ({3})" 

48 dim_names_text = ", ".join(dim_names) 

49 raise ValueError(msg.format(type_name, len(dim_names), array.ndim, dim_names_text)) 

50 

51 

52def _check_array_occurrence( 

53 d3plot, array_names: List[str], required_array_names: List[str] 

54) -> bool: 

55 """Check if an array exists, if all depending on it exist too 

56 

57 Parameters 

58 ---------- 

59 array_names: List[str] 

60 list of base arrays 

61 required_array_names: List[str] 

62 list of array names which would be required 

63 

64 Returns 

65 ------- 

66 exists: bool 

67 if the arrays exist or not 

68 

69 Raises 

70 ------ 

71 ValueError 

72 If a required array is not present 

73 """ 

74 

75 if any(name in d3plot.arrays for name in array_names): 

76 if not all(name in d3plot.arrays for name in required_array_names): 

77 msg = "The arrays '{0}' require setting also the arrays '{1}'" 

78 raise ValueError(msg.format(", ".join(array_names), ", ".join(required_array_names))) 

79 return True 

80 return False 

81 

82 

83def _negative_to_positive_state_indexes(indexes: Set[int], n_entries) -> Set[int]: 

84 """Convert negative indexes of an iterable to positive ones 

85 

86 Parameters 

87 ---------- 

88 indexes: Set[int] 

89 indexes to check and convert 

90 n_entries: int 

91 total number of entries 

92 

93 Returns 

94 ------- 

95 new_entries: Set[int] 

96 the positive indexes 

97 """ 

98 

99 new_entries: Set[int] = set() 

100 for _, index in enumerate(indexes): 

101 new_index = index + n_entries if index < 0 else index 

102 if new_index >= n_entries: 

103 err_msg = "State '{0}' exceeds the maximum number of states of '{1}'" 

104 raise ValueError(err_msg.format(index, n_entries)) 

105 new_entries.add(new_index) 

106 return new_entries 

107 

108 

109# pylint: disable = too-many-instance-attributes 

110class D3plotWriterSettings: 

111 """Settings class for d3plot writing""" 

112 

113 def __init__(self, d3plot: Any, block_size_bytes: int, single_file: bool): 

114 

115 # check the writing types 

116 if d3plot.header.itype == np.int32: 

117 self.itype = "<i" 

118 elif d3plot.header.itype == np.int64: 

119 self.itype = "<q" 

120 else: 

121 msg = "Invalid type for integers: {0}. np.int32 or np.int64 is required." 

122 raise RuntimeError(msg.format(d3plot.itype)) 

123 

124 if d3plot.header.ftype == np.float32: 

125 self.ftype = "<f" 

126 elif d3plot.header.ftype == np.float64: 

127 self.ftype = "<d" 

128 else: 

129 msg = "Invalid type for floats: {0}. np.float32 or np.float64 is required." 

130 raise RuntimeError(msg.format(d3plot.ftype)) 

131 

132 assert isinstance(d3plot, D3plot) 

133 self.d3plot = d3plot 

134 self._header = {} 

135 self.block_size_bytes = block_size_bytes 

136 self.mattyp = 0 

137 self.single_file = single_file 

138 self.mdlopt = 0 

139 self.n_shell_layers = 0 

140 self.n_rigid_shells = 0 

141 self.unique_beam_part_indexes = np.empty(0, dtype=self.itype) 

142 self.unique_shell_part_indexes = np.empty(0, dtype=self.itype) 

143 self.unique_solid_part_indexes = np.empty(0, dtype=self.itype) 

144 self.unique_tshell_part_indexes = np.empty(0, dtype=self.itype) 

145 self._str_codec = "utf-8" 

146 self.has_node_temperature_gradient = False 

147 self.has_node_residual_forces = False 

148 self.has_node_residual_moments = False 

149 self.has_plastic_strain_tensor = False 

150 self.has_thermal_strain_tensor = False 

151 self.n_solid_layers = 1 

152 

153 self._allowed_int_types = (np.int8, np.int16, np.int32, np.int64, int) 

154 self._allowed_float_types = (np.float32, np.float64, float) 

155 

156 @property 

157 def wordsize(self): 

158 """Get the wordsize to use for the d3plot 

159 

160 Returns 

161 ------- 

162 worsize : int 

163 D3plot wordsize 

164 """ 

165 return self.d3plot.header.wordsize 

166 

167 @property 

168 def header(self): 

169 """Dictionary holding all d3plot header information 

170 

171 Notes 

172 ----- 

173 The header is being build from the data stored in the d3plot. 

174 """ 

175 return self._header 

176 

177 @header.setter 

178 def set_header(self, new_header: dict): 

179 assert isinstance(new_header, dict) 

180 self._header = new_header 

181 

182 # pylint: disable = too-many-branches, too-many-statements, too-many-locals 

183 def build_header(self): 

184 """Build the new d3plot header""" 

185 

186 new_header = {} 

187 

188 # TITLE 

189 new_header["title"] = self.d3plot.header.title 

190 # RUNTIME 

191 new_header["runtime"] = self.d3plot.header.runtime 

192 # FILETYPE 

193 new_header["filetype"] = self.d3plot.header.filetype.value 

194 # SOURCE VERSION 

195 new_header["source_version"] = self.d3plot.header.source_version 

196 # RELEASE VERSION 

197 new_header["release_version"] = self.d3plot.header.release_version 

198 # SOURCE VERSION 

199 new_header["version"] = self.d3plot.header.version 

200 

201 # NDIM 

202 

203 # check for rigid body data 

204 has_rigid_body_data = False 

205 has_reduced_rigid_body_data = False 

206 if ( 

207 ArrayType.rigid_body_coordinates in self.d3plot.arrays 

208 or ArrayType.rigid_body_rotation_matrix in self.d3plot.arrays 

209 ): 

210 has_rigid_body_data = True 

211 has_reduced_rigid_body_data = True 

212 if ( 

213 ArrayType.rigid_body_velocity in self.d3plot.arrays 

214 or ArrayType.rigid_body_rot_velocity in self.d3plot.arrays 

215 or ArrayType.rigid_body_acceleration in self.d3plot.arrays 

216 or ArrayType.rigid_body_rot_acceleration in self.d3plot.arrays 

217 ): 

218 has_reduced_rigid_body_data = False 

219 

220 # check for rigid road 

221 required_arrays = [ 

222 ArrayType.rigid_road_node_ids, 

223 ArrayType.rigid_road_node_coordinates, 

224 ArrayType.rigid_road_ids, 

225 ArrayType.rigid_road_segment_node_ids, 

226 ArrayType.rigid_road_segment_road_id, 

227 ] 

228 _check_array_occurrence( 

229 self.d3plot, array_names=required_arrays, required_array_names=required_arrays 

230 ) 

231 has_rigid_road = ArrayType.rigid_road_node_ids in self.d3plot.arrays 

232 

233 # check for mattyp shit 

234 # self.mattyp = 0 

235 # if not is_d3part and ArrayType.part_material_type in self.d3plot.arrays: 

236 # self.mattyp = 1 

237 # elif is_d3part and ArrayType.part_material_type in self.d3plot.arrays: 

238 # # 

239 # self.mattyp = 0 

240 

241 # check for mattyp 

242 is_d3part = self.d3plot.header.filetype == D3plotFiletype.D3PART 

243 

244 self.mattyp = 0 

245 if not is_d3part and ArrayType.part_material_type in self.d3plot.arrays: 

246 self.mattyp = 1 

247 

248 # rigid shells 

249 if ArrayType.element_shell_part_indexes in self.d3plot.arrays: 

250 part_mattyp = self.d3plot.arrays[ArrayType.part_material_type] 

251 shell_part_indexes = self.d3plot.arrays[ArrayType.element_shell_part_indexes] 

252 self.n_rigid_shells = (part_mattyp[shell_part_indexes] == 20).sum() 

253 elif is_d3part: 

254 self.mattyp = 0 

255 

256 # set ndim finally 

257 # 

258 # This also confuses me from the manual ... 

259 # It doesn't specify ndim clearly and only gives ranges. 

260 # 

261 # - has rigid body: rigid body data (movement etc.) 

262 # - rigid road: rigid road data 

263 # - mattyp: array with material types for each part 

264 # 

265 # Table: 

266 # |----------------|--------------------|------------|---------|----------| 

267 # | has_rigid_body | reduced rigid body | rigid road | mattyp | ndim | 

268 # |----------------|--------------------|------------|---------|----------| 

269 # | False | False | False | 0 | 4 | 

270 # | False | False | False | 1 | 5 | 

271 # | False (?) | False | True | 0 | 6 | 

272 # | False | False | True | 1 | 7 | 

273 # | True | False | False | 0 | 8 | 

274 # | True | True | True | 0 | 9 | 

275 # |----------------|--------------------|------------|---------|----------| 

276 # 

277 # uncertainties: mattyp 0 or 1 ?!?!? 

278 if ( 

279 not has_rigid_body_data 

280 and not has_reduced_rigid_body_data 

281 and not has_rigid_road 

282 and self.mattyp == 0 

283 ): 

284 new_header["ndim"] = 4 

285 elif ( 

286 not has_rigid_body_data 

287 and not has_reduced_rigid_body_data 

288 and not has_rigid_road 

289 and self.mattyp == 1 

290 ): 

291 new_header["ndim"] = 5 

292 elif ( 

293 not has_rigid_body_data 

294 and not has_reduced_rigid_body_data 

295 and has_rigid_road 

296 and self.mattyp == 0 

297 ): 

298 new_header["ndim"] = 6 

299 elif ( 

300 not has_rigid_body_data 

301 and not has_reduced_rigid_body_data 

302 and has_rigid_road 

303 and self.mattyp == 1 

304 ): 

305 new_header["ndim"] = 7 

306 elif ( 

307 has_rigid_body_data 

308 and not has_reduced_rigid_body_data 

309 and not has_rigid_road 

310 and self.mattyp == 0 

311 ): 

312 new_header["ndim"] = 8 

313 elif ( 

314 has_rigid_body_data 

315 and has_reduced_rigid_body_data 

316 and has_rigid_road 

317 and self.mattyp == 0 

318 ): 

319 new_header["ndim"] = 9 

320 else: 

321 raise RuntimeError("Cannot determine haeder variable ndim.") 

322 

323 # NUMNP 

324 new_header["numnp"] = ( 

325 self.d3plot.arrays[ArrayType.node_coordinates].shape[0] 

326 if ArrayType.node_coordinates in self.d3plot.arrays 

327 else 0 

328 ) 

329 

330 # ICODE 

331 new_header["icode"] = self.d3plot.header.legacy_code_type 

332 

333 # IT aka temperatures 

334 _check_array_occurrence( 

335 self.d3plot, 

336 array_names=[ArrayType.node_heat_flux], 

337 required_array_names=[ArrayType.node_temperature], 

338 ) 

339 

340 it_temp = 0 

341 if ArrayType.node_mass_scaling in self.d3plot.arrays: 

342 it_temp += 10 

343 

344 if ( 

345 ArrayType.node_temperature in self.d3plot.arrays 

346 and ArrayType.node_heat_flux not in self.d3plot.arrays 

347 ): 

348 it_temp += 1 

349 elif ( 

350 ArrayType.node_temperature in self.d3plot.arrays 

351 and ArrayType.node_heat_flux in self.d3plot.arrays 

352 ): 

353 

354 node_temp_shape = self.d3plot.arrays[ArrayType.node_temperature].shape 

355 if node_temp_shape.ndim == 2: 

356 it_temp += 2 

357 elif node_temp_shape.ndim == 3: 

358 it_temp += 3 

359 else: 

360 msg = "{1} is supposed to have either 2 or 3 dims and not '{0}'" 

361 raise RuntimeError(msg.format(node_temp_shape.ndim, ArrayType.node_temperature)) 

362 else: 

363 # caught by _check_array_occurrence 

364 pass 

365 new_header["it"] = it_temp 

366 

367 # IU - disp field indicator 

368 new_header["iu"] = 1 if ArrayType.node_displacement in self.d3plot.arrays else 0 

369 

370 # IV - velicoty field indicator 

371 new_header["iv"] = 1 if ArrayType.node_velocity in self.d3plot.arrays else 0 

372 

373 # IA - velicoty field indicator 

374 new_header["ia"] = 1 if ArrayType.node_acceleration in self.d3plot.arrays else 0 

375 

376 # NEL8 - solid count 

377 n_solids = ( 

378 self.d3plot.arrays[ArrayType.element_solid_node_indexes].shape[0] 

379 if ArrayType.element_solid_node_indexes in self.d3plot.arrays 

380 else 0 

381 ) 

382 new_header["nel8"] = n_solids 

383 

384 # helper var to track max material index across all element types 

385 # this is required to allocate the part array later 

386 # new_header["nmmat"] = 0 

387 

388 # NUMMAT8 - solid material count 

389 required_arrays = [ 

390 ArrayType.element_solid_node_indexes, 

391 ArrayType.element_solid_part_indexes, 

392 ] 

393 _check_array_occurrence( 

394 self.d3plot, 

395 array_names=required_arrays, 

396 required_array_names=required_arrays, 

397 ) 

398 if ArrayType.element_solid_part_indexes in self.d3plot.arrays: 

399 part_indexes = self.d3plot.arrays[ArrayType.element_solid_part_indexes] 

400 unique_part_indexes = np.unique(part_indexes) 

401 self.unique_solid_part_indexes = unique_part_indexes 

402 new_header["nummat8"] = len(unique_part_indexes) 

403 

404 # max_index = unique_part_indexes.max() + 1 \ 

405 # if len(part_indexes) else 0 

406 # new_header["nmmat"] = max(new_header["nmmat"], 

407 # max_index) 

408 else: 

409 new_header["nummat8"] = 0 

410 

411 # NUMDS 

412 new_header["numds"] = self.d3plot.header.has_shell_four_inplane_gauss_points 

413 

414 # NUMST 

415 new_header["numst"] = self.d3plot.header.unused_numst 

416 

417 # NV3D - number of solid vars 

418 # NEIPH - number of solid history vars 

419 n_solid_layers = self.d3plot.check_array_dims( 

420 { 

421 ArrayType.element_solid_stress: 2, 

422 ArrayType.element_solid_effective_plastic_strain: 2, 

423 ArrayType.element_solid_history_variables: 2, 

424 ArrayType.element_solid_plastic_strain_tensor: 2, 

425 ArrayType.element_solid_thermal_strain_tensor: 2, 

426 }, 

427 "n_solid_layers", 

428 ) 

429 n_solid_layers = 1 if n_solid_layers < 1 else n_solid_layers 

430 self.n_solid_layers = n_solid_layers 

431 if n_solid_layers not in (1, 8): 

432 err_msg = "Solids must have either 1 or 8 integration layers not {0}." 

433 raise ValueError(err_msg.format(self.n_solid_layers)) 

434 

435 n_solid_hist_vars, _ = self.count_array_state_var( 

436 array_type=ArrayType.element_solid_history_variables, 

437 dimension_names=["n_timesteps", "n_solids", "n_solid_layers", "n_history_vars"], 

438 has_layers=True, 

439 n_layers=n_solid_layers, 

440 ) 

441 n_solid_hist_vars = n_solid_hist_vars // n_solid_layers 

442 

443 if ArrayType.element_solid_strain in self.d3plot.arrays: 

444 n_solid_hist_vars += 6 

445 # It is uncertain if this is counted as history var 

446 if ArrayType.element_solid_plastic_strain_tensor in self.d3plot.arrays: 

447 n_solid_hist_vars += 6 

448 # It is uncertain if this is counted as history var 

449 if ArrayType.element_solid_thermal_strain_tensor in self.d3plot.arrays: 

450 n_solid_hist_vars += 6 

451 n_solid_vars = (7 + n_solid_hist_vars) * n_solid_layers 

452 new_header["neiph"] = ( 

453 n_solid_hist_vars if n_solids != 0 else self.d3plot.header.n_solid_history_vars 

454 ) 

455 new_header["nv3d"] = n_solid_vars if n_solids != 0 else self.d3plot.header.n_solid_vars 

456 

457 # NEL2 - beam count 

458 new_header["nel2"] = ( 

459 self.d3plot.arrays[ArrayType.element_beam_node_indexes].shape[0] 

460 if ArrayType.element_beam_node_indexes in self.d3plot.arrays 

461 else 0 

462 ) 

463 

464 # NUMMAT2 - beam material count 

465 required_arrays = [ 

466 ArrayType.element_beam_node_indexes, 

467 ArrayType.element_beam_part_indexes, 

468 ] 

469 _check_array_occurrence( 

470 self.d3plot, 

471 array_names=required_arrays, 

472 required_array_names=required_arrays, 

473 ) 

474 if ArrayType.element_beam_part_indexes in self.d3plot.arrays: 

475 part_indexes = self.d3plot.arrays[ArrayType.element_beam_part_indexes] 

476 unique_part_indexes = np.unique(part_indexes) 

477 new_header["nummat2"] = len(unique_part_indexes) 

478 

479 self.unique_beam_part_indexes = unique_part_indexes 

480 

481 # max_index = unique_part_indexes.max() + 1 \ 

482 # if len(unique_part_indexes) else 0 

483 # new_header["nmmat"] = max(new_header["nmmat"], 

484 # max_index) 

485 else: 

486 new_header["nummat2"] = 0 

487 

488 # NEIPB - beam history vars per integration point 

489 array_dims = { 

490 ArrayType.element_beam_shear_stress: 2, 

491 ArrayType.element_beam_axial_stress: 2, 

492 ArrayType.element_beam_plastic_strain: 2, 

493 ArrayType.element_beam_axial_strain: 2, 

494 ArrayType.element_beam_history_vars: 2, 

495 } 

496 n_beam_layers = self.d3plot.check_array_dims(array_dims, "n_beam_layers") 

497 new_header["beamip"] = n_beam_layers 

498 

499 new_header["neipb"] = 0 

500 if ArrayType.element_beam_history_vars in self.d3plot.arrays: 

501 array = self.d3plot.arrays[ArrayType.element_beam_history_vars] 

502 if array.ndim != 4: 

503 msg = ( 

504 "Array '{0}' was expected to have 4 dimensions " 

505 "(n_timesteps, n_beams, n_modes (3+n_beam_layers), " 

506 "n_beam_history_vars)." 

507 ) 

508 raise ValueError(msg.format(ArrayType.element_beam_history_vars)) 

509 if array.shape[3] < 3: 

510 msg = ( 

511 "Array '{0}' dimension 3 must have have at least three" 

512 " entries (beam layers: average, min, max)" 

513 ) 

514 raise ValueError(msg.format(ArrayType.element_beam_history_vars)) 

515 if array.shape[3] != 3 + n_beam_layers: 

516 msg = "Array '{0}' dimension 3 must have size (3+n_beam_layers). {1} != (3+{2})" 

517 raise ValueError(msg.format(ArrayType.element_beam_history_vars)) 

518 new_header["neipb"] = array.shape[3] 

519 

520 # NV1D - beam variable count 

521 new_header["nv1d"] = ( 

522 6 + 5 * new_header["beamip"] + new_header["neipb"] * (3 + new_header["beamip"]) 

523 ) 

524 

525 # NEL4 - number of shells 

526 n_shells = ( 

527 self.d3plot.arrays[ArrayType.element_shell_node_indexes].shape[0] 

528 if ArrayType.element_shell_node_indexes in self.d3plot.arrays 

529 else 0 

530 ) 

531 new_header["nel4"] = n_shells 

532 

533 # NUMMAT4 - shell material count 

534 required_arrays = [ 

535 ArrayType.element_shell_node_indexes, 

536 ArrayType.element_shell_part_indexes, 

537 ] 

538 _check_array_occurrence( 

539 self.d3plot, 

540 array_names=required_arrays, 

541 required_array_names=required_arrays, 

542 ) 

543 if ArrayType.element_shell_part_indexes in self.d3plot.arrays: 

544 part_indexes = self.d3plot.arrays[ArrayType.element_shell_part_indexes] 

545 unique_part_indexes = np.unique(part_indexes) 

546 new_header["nummat4"] = len(unique_part_indexes) 

547 

548 self.unique_shell_part_indexes = unique_part_indexes 

549 

550 # max_index = unique_part_indexes.max() + 1 \ 

551 # if len(unique_part_indexes) else 0 

552 # new_header["nmmat"] = max(new_header["nmmat"], 

553 # max_index) 

554 else: 

555 new_header["nummat4"] = 0 

556 

557 # NEIPS -shell history variable count 

558 n_shell_layers = 0 

559 if ( 

560 ArrayType.element_shell_history_vars in self.d3plot.arrays 

561 or ArrayType.element_tshell_history_variables in self.d3plot.arrays 

562 ): 

563 

564 n_shell_history_vars, n_shell_layers = self.count_array_state_var( 

565 array_type=ArrayType.element_shell_history_vars, 

566 dimension_names=["n_timesteps", "n_shells", "n_shell_layers", "n_history_vars"], 

567 has_layers=True, 

568 n_layers=n_shell_layers, 

569 ) 

570 n_tshell_history_vars, n_tshell_layers = self.count_array_state_var( 

571 array_type=ArrayType.element_tshell_history_variables, 

572 dimension_names=["n_timesteps", "n_tshells", "n_shell_layers", "n_history_vars"], 

573 has_layers=True, 

574 n_layers=n_shell_layers, 

575 ) 

576 

577 if n_shell_layers != n_tshell_layers: 

578 msg = ( 

579 "Shells and thick shells must have the same amount " 

580 "of integration layers: {0} != {1}" 

581 ) 

582 raise RuntimeError(msg.format(n_shell_layers, n_tshell_layers)) 

583 

584 # we are tolerant here and simply add zero padding for the other 

585 # field later on 

586 new_header["neips"] = max( 

587 n_tshell_history_vars // n_tshell_layers, n_shell_history_vars // n_shell_layers 

588 ) 

589 else: 

590 new_header["neips"] = 0 

591 

592 array_dims = { 

593 ArrayType.element_shell_stress: 2, 

594 ArrayType.element_shell_effective_plastic_strain: 2, 

595 ArrayType.element_shell_history_vars: 2, 

596 ArrayType.element_tshell_stress: 2, 

597 ArrayType.element_tshell_effective_plastic_strain: 2, 

598 ArrayType.element_tshell_history_variables: 2, 

599 } 

600 n_shell_layers = self.d3plot.check_array_dims(array_dims, "n_shell_layers") 

601 self.n_shell_layers = n_shell_layers 

602 

603 # NELTH - number of thick shell elements 

604 n_thick_shells = ( 

605 self.d3plot.arrays[ArrayType.element_tshell_node_indexes].shape[0] 

606 if ArrayType.element_tshell_node_indexes in self.d3plot.arrays 

607 else 0 

608 ) 

609 new_header["nelth"] = n_thick_shells 

610 

611 # IOSHL1 - shell & solid stress flag 

612 if ( 

613 ArrayType.element_shell_stress in self.d3plot.arrays 

614 or ArrayType.element_tshell_stress in self.d3plot.arrays 

615 ): 

616 new_header["ioshl1"] = 1000 

617 else: 

618 # if either stress or pstrain is written for solids 

619 # the whole block of 7 basic variables is always written 

620 # to the file 

621 if ( 

622 ArrayType.element_solid_stress in self.d3plot.arrays 

623 or ArrayType.element_solid_effective_plastic_strain in self.d3plot.arrays 

624 ): 

625 new_header["ioshl1"] = 999 

626 else: 

627 new_header["ioshl1"] = 0 

628 

629 if n_shells == 0 and n_thick_shells == 0 and n_solids == 0: 

630 new_header["ioshl1"] = ( 

631 self.d3plot.header.raw_header["ioshl1"] 

632 if "ioshl1" in self.d3plot.header.raw_header 

633 else 0 

634 ) 

635 

636 if n_shells == 0 and n_thick_shells == 0 and n_solids != 0: 

637 if ( 

638 "ioshl1" in self.d3plot.header.raw_header 

639 and self.d3plot.header.raw_header["ioshl1"] == 1000 

640 ): 

641 new_header["ioshl1"] = 1000 

642 

643 # IOSHL2 - shell & solid pstrain flag 

644 if ( 

645 ArrayType.element_shell_effective_plastic_strain in self.d3plot.arrays 

646 or ArrayType.element_tshell_effective_plastic_strain in self.d3plot.arrays 

647 ): 

648 new_header["ioshl2"] = 1000 

649 else: 

650 if ArrayType.element_solid_effective_plastic_strain in self.d3plot.arrays: 

651 new_header["ioshl2"] = 999 

652 else: 

653 new_header["ioshl2"] = 0 

654 

655 if n_shells == 0 and n_thick_shells == 0 and n_solids == 0: 

656 new_header["ioshl2"] = ( 

657 self.d3plot.header.raw_header["ioshl2"] 

658 if "ioshl2" in self.d3plot.header.raw_header 

659 else 0 

660 ) 

661 

662 if n_shells == 0 and n_thick_shells == 0 and n_solids != 0: 

663 if ( 

664 "ioshl2" in self.d3plot.header.raw_header 

665 and self.d3plot.header.raw_header["ioshl2"] == 1000 

666 ): 

667 new_header["ioshl2"] = 1000 

668 

669 # IOSHL3 - shell forces flag 

670 if ( 

671 ArrayType.element_shell_shear_force in self.d3plot.arrays 

672 or ArrayType.element_shell_bending_moment in self.d3plot.arrays 

673 or ArrayType.element_shell_normal_force in self.d3plot.arrays 

674 ): 

675 new_header["ioshl3"] = 1000 

676 else: 

677 # See https://github.com/open-lasso-python/lasso-python/issues/39 

678 if ( 

679 ArrayType.element_shell_thickness in self.d3plot.arrays 

680 or ArrayType.element_shell_internal_energy in self.d3plot.arrays 

681 ): 

682 new_header["ioshl3"] = 999 

683 else: 

684 new_header["ioshl3"] = 0 

685 

686 if n_shells == 0: 

687 new_header["ioshl3"] = ( 

688 self.d3plot.header.raw_header["ioshl3"] 

689 if "ioshl3" in self.d3plot.header.raw_header 

690 else 0 

691 ) 

692 

693 # IOSHL4 - shell energy+2 unknown+thickness flag 

694 if ( 

695 ArrayType.element_shell_thickness in self.d3plot.arrays 

696 or ArrayType.element_shell_unknown_variables in self.d3plot.arrays 

697 or ArrayType.element_shell_internal_energy in self.d3plot.arrays 

698 ): 

699 new_header["ioshl4"] = 1000 

700 else: 

701 # new_header["ioshl4"] = 999 

702 new_header["ioshl4"] = 0 

703 

704 if n_shells == 0: 

705 new_header["ioshl4"] = ( 

706 self.d3plot.header.raw_header["ioshl4"] 

707 if "ioshl4" in self.d3plot.header.raw_header 

708 else 0 

709 ) 

710 

711 # IDTDT - Flags for various data in the database 

712 new_header["idtdt"] = 0 

713 istrn = 0 

714 if ( 

715 ArrayType.element_shell_strain in self.d3plot.arrays 

716 or ArrayType.element_solid_strain in self.d3plot.arrays 

717 or ArrayType.element_tshell_strain in self.d3plot.arrays 

718 ): 

719 # new_header["idtdt"] = 10000 

720 istrn = 1 

721 new_header["istrn"] = istrn 

722 

723 if ArrayType.node_temperature_gradient in self.d3plot.arrays: 

724 new_header["idtdt"] += 1 

725 self.has_node_temperature_gradient = True 

726 if ( 

727 ArrayType.node_residual_forces in self.d3plot.arrays 

728 or ArrayType.node_residual_moments in self.d3plot.arrays 

729 ): 

730 new_header["idtdt"] += 10 

731 self.has_node_residual_forces = True 

732 self.has_node_residual_moments = True 

733 if ( 

734 ArrayType.element_shell_plastic_strain_tensor in self.d3plot.arrays 

735 or ArrayType.element_solid_plastic_strain_tensor in self.d3plot.arrays 

736 ): 

737 new_header["idtdt"] += 100 

738 self.has_plastic_strain_tensor = True 

739 if ( 

740 ArrayType.element_shell_thermal_strain_tensor in self.d3plot.arrays 

741 or ArrayType.element_solid_thermal_strain_tensor in self.d3plot.arrays 

742 ): 

743 new_header["idtdt"] += 1000 

744 self.has_thermal_strain_tensor = True 

745 if new_header["idtdt"] > 100 and new_header["istrn"]: 

746 new_header["idtdt"] += 10000 

747 

748 # info of element deletion is encoded into maxint ... 

749 element_deletion_arrays = [ 

750 ArrayType.element_beam_is_alive, 

751 ArrayType.element_shell_is_alive, 

752 ArrayType.element_tshell_is_alive, 

753 ArrayType.element_solid_is_alive, 

754 ] 

755 mdlopt = 0 

756 if any(name in self.d3plot.arrays for name in element_deletion_arrays): 

757 mdlopt = 2 

758 elif ArrayType.node_is_alive in self.d3plot.arrays: 

759 mdlopt = 1 

760 self.mdlopt = mdlopt 

761 

762 # MAXINT - shell integration layer count 

763 array_dims = { 

764 ArrayType.element_shell_stress: 2, 

765 ArrayType.element_shell_effective_plastic_strain: 2, 

766 ArrayType.element_shell_history_vars: 2, 

767 ArrayType.element_tshell_stress: 2, 

768 ArrayType.element_tshell_effective_plastic_strain: 2, 

769 } 

770 n_shell_layers = self.d3plot.check_array_dims(array_dims, "n_layers") 

771 

772 # beauty fix: take old shell layers if none exist 

773 if n_shell_layers == 0: 

774 n_shell_layers = self.d3plot.header.n_shell_tshell_layers 

775 

776 if mdlopt == 0: 

777 new_header["maxint"] = n_shell_layers 

778 elif mdlopt == 1: 

779 new_header["maxint"] = -n_shell_layers 

780 elif mdlopt == 2: 

781 new_header["maxint"] = -(n_shell_layers + 10000) 

782 

783 # NV2D - shell variable count 

784 has_shell_stress = new_header["ioshl1"] == 1000 

785 has_shell_pstrain = new_header["ioshl2"] == 1000 

786 has_shell_forces = new_header["ioshl3"] == 1000 

787 has_shell_other = new_header["ioshl4"] == 1000 

788 new_header["nv2d"] = ( 

789 n_shell_layers * (6 * has_shell_stress + has_shell_pstrain + new_header["neips"]) 

790 + 8 * has_shell_forces 

791 + 4 * has_shell_other 

792 + 12 * istrn 

793 + n_shell_layers * self.has_plastic_strain_tensor * 6 

794 + self.has_thermal_strain_tensor * 6 

795 ) 

796 

797 # NMSPH - number of sph nodes 

798 new_header["nmsph"] = ( 

799 len(self.d3plot.arrays[ArrayType.sph_node_indexes]) 

800 if ArrayType.sph_node_indexes in self.d3plot.arrays 

801 else 0 

802 ) 

803 

804 # NGPSPH - number of sph materials 

805 new_header["ngpsph"] = ( 

806 len(np.unique(self.d3plot.arrays[ArrayType.sph_node_material_index])) 

807 if ArrayType.sph_node_material_index in self.d3plot.arrays 

808 else 0 

809 ) 

810 

811 # NUMMATT - thick shell material count 

812 required_arrays = [ 

813 ArrayType.element_tshell_node_indexes, 

814 ArrayType.element_tshell_part_indexes, 

815 ] 

816 _check_array_occurrence( 

817 self.d3plot, 

818 array_names=required_arrays, 

819 required_array_names=required_arrays, 

820 ) 

821 if ArrayType.element_tshell_part_indexes in self.d3plot.arrays: 

822 part_indexes = self.d3plot.arrays[ArrayType.element_tshell_part_indexes] 

823 unique_part_indexes = np.unique(part_indexes) 

824 new_header["nummatt"] = len(unique_part_indexes) 

825 

826 self.unique_tshell_part_indexes = unique_part_indexes 

827 

828 # max_index = unique_part_indexes.max() + 1 \ 

829 # if len(part_indexes) else 0 

830 # new_header["nmmat"] = max(new_header["nmmat"], 

831 # max_index) 

832 else: 

833 new_header["nummatt"] = 0 

834 

835 # NV3DT 

836 new_header["nv3dt"] = ( 

837 n_shell_layers * (6 * has_shell_stress + has_shell_pstrain + new_header["neips"]) 

838 + 12 * istrn 

839 ) 

840 

841 # IALEMAT - number of ALE materials 

842 new_header["ialemat"] = ( 

843 len(self.d3plot.arrays[ArrayType.ale_material_ids]) 

844 if ArrayType.ale_material_ids in self.d3plot.arrays 

845 else 0 

846 ) 

847 # NCFDV1 

848 new_header["ncfdv1"] = 0 

849 

850 # NCFDV2 

851 new_header["ncfdv2"] = 0 

852 

853 # NADAPT - number of adapted element to parent pairs ?!? 

854 new_header["ncfdv2"] = 0 

855 

856 # NUMRBS (written to numbering header) 

857 if ArrayType.rigid_body_coordinates in self.d3plot.arrays: 

858 array = self.d3plot.arrays[ArrayType.rigid_body_coordinates] 

859 if array.ndim != 3: 

860 msg = "Array '{0}' was expected to have {1} dimensions ({2})." 

861 raise ValueError( 

862 msg.format( 

863 ArrayType.rigid_wall_force, 

864 3, 

865 ",".join(["n_timesteps", "n_rigid_bodies", "x_y_z"]), 

866 ) 

867 ) 

868 new_header["numrbs"] = array.shape[1] 

869 else: 

870 new_header["numrbs"] = 0 

871 

872 # NMMAT - material count (very complicated stuff ...) 

873 tmp_nmmat = ( 

874 new_header["nummat2"] 

875 + new_header["nummat4"] 

876 + new_header["nummat8"] 

877 + new_header["nummatt"] 

878 + new_header["numrbs"] 

879 ) 

880 if ( 

881 ArrayType.part_ids in self.d3plot.arrays 

882 or ArrayType.part_internal_energy in self.d3plot.arrays 

883 or ArrayType.part_kinetic_energy in self.d3plot.arrays 

884 or ArrayType.part_mass in self.d3plot.arrays 

885 or ArrayType.part_velocity in self.d3plot.arrays 

886 ): 

887 

888 tmp_nmmat2 = self.d3plot.check_array_dims( 

889 { 

890 ArrayType.part_ids: 0, 

891 ArrayType.part_internal_energy: 1, 

892 ArrayType.part_kinetic_energy: 1, 

893 ArrayType.part_mass: 1, 

894 ArrayType.part_velocity: 1, 

895 }, 

896 "n_parts", 

897 ) 

898 

899 new_header["nmmat"] = tmp_nmmat2 

900 

901 # FIX 

902 # ... 

903 if new_header["nmmat"] > tmp_nmmat: 

904 new_header["numrbs"] = ( 

905 new_header["nmmat"] 

906 - new_header["nummat2"] 

907 - new_header["nummat4"] 

908 - new_header["nummat8"] 

909 - new_header["nummatt"] 

910 ) 

911 else: 

912 new_header["nmmat"] = tmp_nmmat 

913 

914 # NARBS - words for arbitrary numbering of everything 

915 # requires nmmat thus it was placed here 

916 new_header["narbs"] = ( 

917 new_header["numnp"] 

918 + new_header["nel8"] 

919 + new_header["nel2"] 

920 + new_header["nel4"] 

921 + new_header["nelth"] 

922 + 3 * new_header["nmmat"] 

923 ) 

924 # narbs header data 

925 if ArrayType.part_ids in self.d3plot.arrays: 

926 new_header["narbs"] += 16 

927 else: 

928 new_header["narbs"] += 10 

929 

930 # NGLBV - number of global variables 

931 n_rigid_wall_vars = 0 

932 n_rigid_walls = 0 

933 if ArrayType.rigid_wall_force in self.d3plot.arrays: 

934 n_rigid_wall_vars = 1 

935 array = self.d3plot.arrays[ArrayType.rigid_wall_force] 

936 if array.ndim != 2: 

937 msg = "Array '{0}' was expected to have {1} dimensions ({2})." 

938 raise ValueError( 

939 msg.format( 

940 ArrayType.rigid_wall_force, 2, ",".join(["n_timesteps", "n_rigid_walls"]) 

941 ) 

942 ) 

943 n_rigid_walls = array.shape[1] 

944 if ArrayType.rigid_wall_position in self.d3plot.arrays: 

945 n_rigid_wall_vars = 4 

946 array = self.d3plot.arrays[ArrayType.rigid_wall_position] 

947 if array.ndim != 3: 

948 msg = "Array '{0}' was expected to have {1} dimensions ({2})." 

949 raise ValueError( 

950 msg.format( 

951 ArrayType.rigid_wall_position, 

952 3, 

953 ",".join(["n_timesteps", "n_rigid_walls", "x_y_z"]), 

954 ) 

955 ) 

956 n_rigid_walls = array.shape[1] 

957 

958 new_header["n_rigid_walls"] = n_rigid_walls 

959 new_header["n_rigid_wall_vars"] = n_rigid_wall_vars 

960 n_global_variables = 0 

961 if ArrayType.global_kinetic_energy in self.d3plot.arrays: 

962 n_global_variables = 1 

963 if ArrayType.global_internal_energy in self.d3plot.arrays: 

964 n_global_variables = 2 

965 if ArrayType.global_total_energy in self.d3plot.arrays: 

966 n_global_variables = 3 

967 if ArrayType.global_velocity in self.d3plot.arrays: 

968 n_global_variables = 6 

969 if ArrayType.part_internal_energy in self.d3plot.arrays: 

970 n_global_variables = 6 + 1 * new_header["nmmat"] 

971 if ArrayType.part_kinetic_energy in self.d3plot.arrays: 

972 n_global_variables = 6 + 2 * new_header["nmmat"] 

973 if ArrayType.part_velocity in self.d3plot.arrays: 

974 n_global_variables = 6 + 5 * new_header["nmmat"] 

975 if ArrayType.part_mass in self.d3plot.arrays: 

976 n_global_variables = 6 + 6 * new_header["nmmat"] 

977 if ArrayType.part_hourglass_energy in self.d3plot.arrays: 

978 n_global_variables = 6 + 7 * new_header["nmmat"] 

979 if n_rigid_wall_vars * n_rigid_walls != 0: 

980 n_global_variables = 6 + 7 * new_header["nmmat"] + n_rigid_wall_vars * n_rigid_walls 

981 new_header["nglbv"] = n_global_variables 

982 

983 # NUMFLUID - total number of ALE fluid groups 

984 new_header["numfluid"] = 0 

985 

986 # INN - Invariant node numbering fore shell and solid elements 

987 if self.d3plot.header.has_invariant_numbering: 

988 if "inn" in self.d3plot.header.raw_header and self.d3plot.header.raw_header["inn"] != 0: 

989 new_header["inn"] = self.d3plot.header.raw_header["inn"] 

990 else: 

991 new_header["inn"] = int(self.d3plot.header.has_invariant_numbering) 

992 else: 

993 new_header["inn"] = 0 

994 

995 # NPEFG 

996 airbag_arrays = [ 

997 ArrayType.airbags_first_particle_id, 

998 ArrayType.airbags_n_particles, 

999 ArrayType.airbags_ids, 

1000 ArrayType.airbags_n_gas_mixtures, 

1001 ArrayType.airbags_n_chambers, 

1002 ArrayType.airbag_n_active_particles, 

1003 ArrayType.airbag_bag_volume, 

1004 ArrayType.airbag_particle_gas_id, 

1005 ArrayType.airbag_particle_chamber_id, 

1006 ArrayType.airbag_particle_leakage, 

1007 ArrayType.airbag_particle_mass, 

1008 ArrayType.airbag_particle_radius, 

1009 ArrayType.airbag_particle_spin_energy, 

1010 ArrayType.airbag_particle_translation_energy, 

1011 ArrayType.airbag_particle_nearest_segment_distance, 

1012 ArrayType.airbag_particle_position, 

1013 ArrayType.airbag_particle_velocity, 

1014 ] 

1015 subver = 3 if any(name in self.d3plot.arrays for name in airbag_arrays) else 0 

1016 

1017 # subver overwrite 

1018 if self.d3plot.header.n_airbags: 

1019 # pylint: disable = protected-access 

1020 subver = self.d3plot._airbag_info.subver 

1021 

1022 n_partgas = ( 

1023 len(self.d3plot.arrays[ArrayType.airbags_ids]) 

1024 if ArrayType.airbags_ids in self.d3plot.arrays 

1025 else 0 

1026 ) 

1027 

1028 new_header["npefg"] = 1000 * subver + n_partgas 

1029 

1030 # NEL48 - extra nodes for 8 node shell elements 

1031 required_arrays = [ 

1032 ArrayType.element_shell_node8_element_index, 

1033 ArrayType.element_shell_node8_extra_node_indexes, 

1034 ] 

1035 _check_array_occurrence( 

1036 self.d3plot, 

1037 array_names=required_arrays, 

1038 required_array_names=required_arrays, 

1039 ) 

1040 new_header["nel48"] = ( 

1041 len(self.d3plot.arrays[ArrayType.element_shell_node8_element_index]) 

1042 if ArrayType.element_shell_node8_element_index in self.d3plot.arrays 

1043 else 0 

1044 ) 

1045 

1046 # NEL20 - 20 nodes solid elements 

1047 required_arrays = [ 

1048 ArrayType.element_solid_node20_element_index, 

1049 ArrayType.element_solid_node20_extra_node_indexes, 

1050 ] 

1051 _check_array_occurrence( 

1052 self.d3plot, 

1053 array_names=required_arrays, 

1054 required_array_names=required_arrays, 

1055 ) 

1056 if ArrayType.element_solid_node20_element_index in self.d3plot.arrays: 

1057 new_header["nel20"] = len( 

1058 self.d3plot.arrays[ArrayType.element_solid_node20_element_index] 

1059 ) 

1060 else: 

1061 new_header["nel20"] = 0 

1062 

1063 # NT3D - thermal solid data 

1064 if ArrayType.element_solid_thermal_data in self.d3plot.arrays: 

1065 new_header["nt3d"] = len(self.d3plot.arrays[ArrayType.element_solid_thermal_data]) 

1066 else: 

1067 new_header["nt3d"] = 0 

1068 

1069 # NEL27 - 27 node solid elements 

1070 required_arrays = [ 

1071 ArrayType.element_solid_node27_element_index, 

1072 ArrayType.element_solid_node27_extra_node_indexes, 

1073 ] 

1074 _check_array_occurrence( 

1075 self.d3plot, 

1076 array_names=required_arrays, 

1077 required_array_names=required_arrays, 

1078 ) 

1079 if ArrayType.element_solid_node27_element_index in self.d3plot.arrays: 

1080 new_header["nel27"] = len( 

1081 self.d3plot.arrays[ArrayType.element_solid_node27_element_index] 

1082 ) 

1083 else: 

1084 new_header["nel27"] = 0 

1085 

1086 # EXTRA - extra header variables 

1087 # set only if any value is non-zero 

1088 extra_hdr_variables = ["nel20", "nt3d", "nel27", "neipb"] 

1089 if any(new_header[name] for name in extra_hdr_variables): 

1090 new_header["extra"] = 64 

1091 else: 

1092 new_header["extra"] = 0 

1093 

1094 # CHECKS 

1095 

1096 # unique part indexes all ok 

1097 for part_index in self.unique_beam_part_indexes: 

1098 if part_index >= new_header["nmmat"]: 

1099 msg = "{0} part index {1} is larger than number of materials {2}" 

1100 raise ValueError(msg.format("beam", part_index, new_header["nmmat"])) 

1101 for part_index in self.unique_shell_part_indexes: 

1102 if part_index >= new_header["nmmat"]: 

1103 msg = "{0} part index {1} is larger than number of materials {2}" 

1104 raise ValueError(msg.format("shell", part_index, new_header["nmmat"])) 

1105 for part_index in self.unique_solid_part_indexes: 

1106 if part_index >= new_header["nmmat"]: 

1107 msg = "{0} part index {1} is larger than number of materials {2}" 

1108 raise ValueError(msg.format("solid", part_index, new_header["nmmat"])) 

1109 for part_index in self.unique_tshell_part_indexes: 

1110 if part_index >= new_header["nmmat"]: 

1111 msg = "{0} part index {1} is larger than number of materials {2}" 

1112 raise ValueError(msg.format("tshell", part_index, new_header["nmmat"])) 

1113 

1114 # new header 

1115 self._header = new_header 

1116 

1117 # pylint: disable = too-many-return-statements 

1118 def pack(self, value: Any, size=None, dtype_hint=None) -> bytes: 

1119 """Pack a python value according to its settings 

1120 

1121 Parameters 

1122 ---------- 

1123 value: Any 

1124 integer, float or string type value 

1125 size: int 

1126 size in bytes 

1127 dtype_hint: `np.integer` or `np.floating` (default: None) 

1128 dtype hint for numpy arrays (prevens wrong casting) 

1129 

1130 Returns 

1131 ------- 

1132 bytes: bytes 

1133 value packed in bytes 

1134 

1135 Raises 

1136 ------ 

1137 RuntimeError 

1138 If the type cannot be deserialized for being unknown. 

1139 """ 

1140 

1141 assert dtype_hint in (None, np.integer, np.floating) 

1142 

1143 # INT 

1144 if isinstance(value, self._allowed_int_types): 

1145 return struct.pack(self.itype, value) 

1146 # FLOAT 

1147 if isinstance(value, self._allowed_float_types): 

1148 return struct.pack(self.ftype, value) 

1149 # BYTES 

1150 if isinstance(value, bytes): 

1151 if size and len(value) > size: 

1152 return value[:size] 

1153 return value 

1154 # BYTEARRAY 

1155 if isinstance(value, bytearray): 

1156 if size and len(value) > size: 

1157 return bytes(value[:size]) 

1158 return bytes(value) 

1159 # STRING 

1160 if isinstance(value, str): 

1161 if size: 

1162 fmt = "{0:" + str(size) + "}" 

1163 return fmt.format(value).encode(self._str_codec) 

1164 

1165 return value.encode(self._str_codec) 

1166 # ARRAY 

1167 if isinstance(value, np.ndarray): 

1168 

1169 if (value.dtype != self.ftype and dtype_hint == np.floating) or ( 

1170 value.dtype != self.itype and dtype_hint == np.integer 

1171 ): 

1172 

1173 # we need typehint 

1174 if dtype_hint is None: 

1175 msg = "Please specify a dtype_hint (np.floating, np.integer)." 

1176 raise ValueError(msg) 

1177 

1178 # determine new dtype 

1179 new_dtype = self.itype if dtype_hint == np.integer else self.ftype 

1180 

1181 # log conversion 

1182 msg = "Converting array from %s to %s" 

1183 LOGGER.info(msg, value.dtype, new_dtype) 

1184 

1185 # warn if convert between int and float (possible bugs) 

1186 if not np.issubdtype(value.dtype, dtype_hint): 

1187 LOGGER.warning(msg, value.dtype, new_dtype) 

1188 

1189 value = value.astype(new_dtype) 

1190 

1191 return value.tobytes() 

1192 

1193 msg = "Cannot deserialize type '%s' of value '%s' for writing." 

1194 raise RuntimeError(msg, type(value), value) 

1195 

1196 def count_array_state_var( 

1197 self, array_type: str, dimension_names: List[str], has_layers: bool, n_layers: int = 0 

1198 ) -> Tuple[int, int]: 

1199 """This functions checks and updates the variable count for certain types of arrays 

1200 

1201 Parameters 

1202 ---------- 

1203 array_type: str 

1204 name of the shell layer array 

1205 dimension_names: List[str] 

1206 names of the array dimensions 

1207 has_layers: bool 

1208 if the array has integration layers 

1209 n_layers: int 

1210 number of (previous) shell layers, if unknown set to 0 

1211 

1212 Returns 

1213 ------- 

1214 n_vars: int 

1215 variable count 

1216 n_layers: int 

1217 number of layers 

1218 

1219 Raises 

1220 ------ 

1221 ValueError 

1222 If the dimensions of the array were invalid or an inconsistent 

1223 number of integration layers was detected. 

1224 """ 

1225 

1226 n_vars = 0 

1227 

1228 if array_type in self.d3plot.arrays: 

1229 array = self.d3plot.arrays[array_type] 

1230 

1231 if array.ndim != len(dimension_names): 

1232 msg = "Array '{0}' was expected to have {1} dimensions ({2})." 

1233 raise ValueError( 

1234 msg.format(array_type, len(dimension_names), ", ".join(dimension_names)) 

1235 ) 

1236 

1237 if has_layers: 

1238 if n_layers == 0: 

1239 n_layers = array.shape[2] 

1240 else: 

1241 if n_layers != array.shape[2]: 

1242 msg = ( 

1243 "Array '{0}' has '{1}' integration layers" 

1244 " but another array used '{2}'." 

1245 ) 

1246 raise ValueError(msg.format(array_type, array.shape[2], n_layers)) 

1247 

1248 # last dimension is collapsed 

1249 if array.ndim == 3: 

1250 n_vars = 1 * n_layers 

1251 else: 

1252 n_vars = array.shape[3] * n_layers 

1253 

1254 # no layers 

1255 else: 

1256 

1257 # last dimension is collapsed 

1258 if array.ndim == 2: 

1259 n_vars = 1 

1260 else: 

1261 n_vars = array.shape[2] 

1262 

1263 return n_vars, n_layers 

1264 

1265 

1266@dataclass 

1267class MemoryInfo: 

1268 """MemoryInfo contains info about memory regions in files""" 

1269 

1270 start: int = 0 

1271 length: int = 0 

1272 filepath: str = "" 

1273 n_states: int = 0 

1274 filesize: int = 0 

1275 use_mmap: bool = False 

1276 

1277 

1278class FemzipInfo: 

1279 """FemzipInfo contains information and wrappers for the femzip api""" 

1280 

1281 api: FemzipAPI 

1282 n_states: int = 0 

1283 buffer_info: FemzipBufferInfo 

1284 use_femzip: bool = False 

1285 

1286 def __init__(self, filepath: str = ""): 

1287 self.api = FemzipAPI() 

1288 self.buffer_info = FemzipBufferInfo() 

1289 

1290 if filepath: 

1291 tmp_header = D3plotHeader().load_file(filepath) 

1292 self.use_femzip = tmp_header.has_femzip_indicator 

1293 

1294 if self.use_femzip: 

1295 # there is a lot to go wrong 

1296 try: 

1297 self.buffer_info = self.api.get_buffer_info(filepath) 

1298 # loading femzip api failed 

1299 except Exception as err: 

1300 raise RuntimeError(f"Failed to use Femzip: {err}") from err 

1301 

1302 

1303class MaterialSectionInfo: 

1304 """MaterialSectionInfo contains vars from the material section""" 

1305 

1306 n_rigid_shells: int = 0 

1307 

1308 

1309class SphSectionInfo: 

1310 """SphSectionInfo contains vars from the sph geometry section""" 

1311 

1312 n_sph_array_length: int = 11 

1313 n_sph_vars: int = 0 

1314 has_influence_radius: bool = False 

1315 has_particle_pressure: bool = False 

1316 has_stresses: bool = False 

1317 has_plastic_strain: bool = False 

1318 has_material_density: bool = False 

1319 has_internal_energy: bool = False 

1320 has_n_affecting_neighbors: bool = False 

1321 has_strain_and_strainrate: bool = False 

1322 has_true_strains: bool = False 

1323 has_mass: bool = False 

1324 n_sph_history_vars: int = 0 

1325 

1326 

1327class AirbagInfo: 

1328 """AirbagInfo contains vars used to describe the sph geometry section""" 

1329 

1330 n_geometric_variables: int = 0 

1331 n_airbag_state_variables: int = 0 

1332 n_particle_state_variables: int = 0 

1333 n_particles: int = 0 

1334 n_airbags: int = 0 

1335 # ? 

1336 subver: int = 0 

1337 n_chambers: int = 0 

1338 

1339 def get_n_variables(self) -> int: 

1340 """Get the number of airbag variables 

1341 

1342 Returns 

1343 ------- 

1344 n_airbag_vars: int 

1345 number of airbag vars 

1346 """ 

1347 return ( 

1348 self.n_geometric_variables 

1349 + self.n_particle_state_variables 

1350 + self.n_airbag_state_variables 

1351 ) 

1352 

1353 

1354class NumberingInfo: 

1355 """NumberingInfo contains vars from the part numbering section (ids)""" 

1356 

1357 # the value(s) of ptr is initialized 

1358 # as 1 since we need to make it 

1359 # negative if part_ids are written 

1360 # to file and 0 cannot do that ... 

1361 # This is ok for self-made D3plots 

1362 # since these fields are unused anyway 

1363 ptr_node_ids: int = 1 

1364 has_material_ids: bool = False 

1365 ptr_solid_ids: int = 1 

1366 ptr_beam_ids: int = 1 

1367 ptr_shell_ids: int = 1 

1368 ptr_thick_shell_ids: int = 1 

1369 n_nodes: int = 0 

1370 n_solids: int = 0 

1371 n_beams: int = 0 

1372 n_shells: int = 0 

1373 n_thick_shells: int = 0 

1374 ptr_material_ids: int = 1 

1375 ptr_material_ids_defined_order: int = 1 

1376 ptr_material_ids_crossref: int = 1 

1377 n_parts: int = 0 

1378 n_parts2: int = 0 

1379 n_rigid_bodies: int = 0 

1380 

1381 

1382@dataclass 

1383class RigidBodyMetadata: 

1384 """RigidBodyMetadata contains vars from the rigid body metadata section. 

1385 This section comes before the individual rigid body data. 

1386 """ 

1387 

1388 internal_number: int 

1389 n_nodes: int 

1390 node_indexes: np.ndarray 

1391 n_active_nodes: int 

1392 active_node_indexes: np.ndarray 

1393 

1394 

1395class RigidBodyInfo: 

1396 """RigidBodyMetadata contains vars for the individual rigid bodies""" 

1397 

1398 rigid_body_metadata_list: Iterable[RigidBodyMetadata] 

1399 n_rigid_bodies: int = 0 

1400 

1401 def __init__( 

1402 self, rigid_body_metadata_list: Iterable[RigidBodyMetadata], n_rigid_bodies: int = 0 

1403 ): 

1404 self.rigid_body_metadata_list = rigid_body_metadata_list 

1405 self.n_rigid_bodies = n_rigid_bodies 

1406 

1407 

1408class RigidRoadInfo: 

1409 """RigidRoadInfo contains metadata for the description of rigid roads""" 

1410 

1411 n_nodes: int = 0 

1412 n_road_segments: int = 0 

1413 n_roads: int = 0 

1414 # ? 

1415 motion: int = 0 

1416 

1417 def __init__( 

1418 self, n_nodes: int = 0, n_road_segments: int = 0, n_roads: int = 0, motion: int = 0 

1419 ): 

1420 self.n_nodes = n_nodes 

1421 self.n_road_segments = n_road_segments 

1422 self.n_roads = n_roads 

1423 self.motion = motion 

1424 

1425 

1426class StateInfo: 

1427 """StateInfo holds metadata for states which is currently solely the timestep. 

1428 We all had bigger plans in life ... 

1429 """ 

1430 

1431 n_timesteps: int = 0 

1432 

1433 def __init__(self, n_timesteps: int = 0): 

1434 self.n_timesteps = n_timesteps 

1435 

1436 

1437class D3plot: 

1438 """Class used to read LS-Dyna d3plots""" 

1439 

1440 _header: D3plotHeader 

1441 _femzip_info: FemzipInfo 

1442 _material_section_info: MaterialSectionInfo 

1443 _sph_info: SphSectionInfo 

1444 _airbag_info: AirbagInfo 

1445 _numbering_info: NumberingInfo 

1446 _rigid_body_info: RigidBodyInfo 

1447 _rigid_road_info: RigidRoadInfo 

1448 _buffer: Union[BinaryBuffer, None] = None 

1449 

1450 # we all love secret settings 

1451 use_advanced_femzip_api: bool = False 

1452 

1453 # This amount of args is needed 

1454 # pylint: disable = too-many-arguments, too-many-statements, unused-argument 

1455 def __init__( 

1456 self, 

1457 filepath: str = None, 

1458 use_femzip: Union[bool, None] = None, 

1459 n_files_to_load_at_once: Union[int, None] = None, 

1460 state_array_filter: Union[List[str], None] = None, 

1461 state_filter: Union[None, Set[int]] = None, 

1462 buffered_reading: bool = False, 

1463 ): 

1464 """Constructor for a D3plot 

1465 

1466 Parameters 

1467 ---------- 

1468 filepath: str 

1469 path to a d3plot file 

1470 use_femzip: bool 

1471 Not used anymore. 

1472 n_files_to_load_at_once: int 

1473 *DEPRECATED* not used anymore, use `buffered_reading` 

1474 state_array_filter: Union[List[str], None] 

1475 names of arrays which will be the only ones loaded from state data 

1476 state_filter: Union[None, Set[int]] 

1477 which states to load. Negative indexes count backwards. 

1478 buffered_reading: bool 

1479 whether to pull only a single state into memory during reading 

1480 

1481 Examples 

1482 -------- 

1483 >>> from lasso.dyna import D3plot, ArrayType 

1484 >>> # open and read everything 

1485 >>> d3plot = D3plot("path/to/d3plot") 

1486 

1487 >>> # only read node displacement 

1488 >>> d3plot = D3plot("path/to/d3plot", state_array_filter=["node_displacement"]) 

1489 >>> # or with nicer syntax 

1490 >>> d3plot = D3plot("path/to/d3plot", state_array_filter=[ArrayType.node_displacement]) 

1491 

1492 >>> # only load first and last state 

1493 >>> d3plot = D3plot("path/to/d3plot", state_filter={0, -1}) 

1494 

1495 >>> # our computer lacks RAM so lets extract a specific array 

1496 >>> # but only keep one state at a time in memory 

1497 >>> d3plot = D3plot("path/to/d3plot", 

1498 >>> state_array_filter=[ArrayType.node_displacement], 

1499 >>> buffered_reading=True) 

1500 

1501 Notes 

1502 ----- 

1503 If dyna wrote multiple files for several states, 

1504 only give the path to the first file. 

1505 """ 

1506 super().__init__() 

1507 

1508 LOGGER.debug("-------- D 3 P L O T --------") 

1509 

1510 self._arrays = {} 

1511 self._header = D3plotHeader() 

1512 self._femzip_info = FemzipInfo(filepath=filepath if filepath is not None else "") 

1513 self._material_section_info = MaterialSectionInfo() 

1514 self._sph_info = SphSectionInfo() 

1515 self._airbag_info = AirbagInfo() 

1516 self._numbering_info = NumberingInfo() 

1517 self._rigid_body_info = RigidBodyInfo(rigid_body_metadata_list=tuple()) 

1518 self._rigid_road_info = RigidRoadInfo() 

1519 self._state_info = StateInfo() 

1520 

1521 # which states to load 

1522 self.state_filter = state_filter 

1523 

1524 # how many files to load into memory at once 

1525 if n_files_to_load_at_once is not None: 

1526 warn_msg = "D3plot argument '{0}' is deprecated. Please use '{1}=True'." 

1527 raise DeprecationWarning(warn_msg.format("n_files_to_load_at_once", "buffered_reading")) 

1528 self.buffered_reading = buffered_reading or (state_filter is not None and any(state_filter)) 

1529 

1530 # arrays to filter out 

1531 self.state_array_filter = state_array_filter 

1532 

1533 # load memory accordingly 

1534 # no femzip 

1535 if filepath and not self._femzip_info.use_femzip: 

1536 self._buffer = BinaryBuffer(filepath) 

1537 self.bb_states = None 

1538 # femzip 

1539 elif filepath and self._femzip_info.use_femzip: 

1540 self._buffer = self._read_femzip_geometry(filepath) 

1541 # we need to reload the header 

1542 self._header = D3plotHeader().load_file(self._buffer) 

1543 self.bb_states = None 

1544 # no data to load basically 

1545 else: 

1546 self._buffer = None 

1547 self.bb_states = None 

1548 

1549 self.geometry_section_size = 0 

1550 

1551 # read header 

1552 self._read_header() 

1553 

1554 # read geometry 

1555 self._parse_geometry() 

1556 

1557 # read state data 

1558 

1559 # try advanced femzip api 

1560 if ( 

1561 filepath 

1562 and self._femzip_info.use_femzip 

1563 and self.use_advanced_femzip_api 

1564 and self._femzip_info.api.has_femunziplib_license() 

1565 ): 

1566 

1567 LOGGER.debug("Advanced FEMZIP-API used") 

1568 try: 

1569 self._read_states_femzip_advanced( 

1570 filepath, 

1571 ) 

1572 except Exception: 

1573 trace = traceback.format_exc() 

1574 warn_msg = ( 

1575 "Error when using advanced Femzip API, " 

1576 "falling back to normal but slower Femzip API.\n%s" 

1577 ) 

1578 LOGGER.warning(warn_msg, trace) 

1579 

1580 # since we had a crash, we need to reload the file 

1581 # to be sure we don't crash again 

1582 self._femzip_info.api.close_current_file() 

1583 self._femzip_info.api.read_geometry(filepath, self._femzip_info.buffer_info, False) 

1584 # try normal femzip api 

1585 self._read_states(filepath) 

1586 finally: 

1587 self._femzip_info.api.close_current_file() 

1588 

1589 # normal state reading (femzip and non-femzip) 

1590 elif filepath: 

1591 self._read_states(filepath) 

1592 if self._femzip_info.use_femzip: 

1593 self._femzip_info.api.close_current_file() 

1594 else: 

1595 # no filepath = nothing to do 

1596 pass 

1597 

1598 def _read_femzip_geometry(self, filepath: str) -> BinaryBuffer: 

1599 """Read the geometry from femzip 

1600 

1601 Parameters 

1602 ---------- 

1603 filepath: str 

1604 path to the femzpi file 

1605 

1606 Returns 

1607 ------- 

1608 bb: BinaryBuffer 

1609 memory of the geometry section 

1610 """ 

1611 

1612 buffer_geo = self._femzip_info.api.read_geometry( 

1613 filepath, buffer_info=self._femzip_info.buffer_info, close_file=False 

1614 ) 

1615 

1616 # save 

1617 buffer = BinaryBuffer() 

1618 buffer.filepath_ = filepath 

1619 buffer.memoryview = buffer_geo.cast("B") 

1620 

1621 return buffer 

1622 

1623 @property 

1624 def n_timesteps(self) -> int: 

1625 """Number of timesteps loaded""" 

1626 return self._state_info.n_timesteps 

1627 

1628 @property 

1629 def arrays(self) -> dict: 

1630 """Dictionary holding all d3plot arrays 

1631 

1632 Notes 

1633 ----- 

1634 The corresponding keys of the dictionary can 

1635 also be found in `lasso.dyna.ArrayTypes`, which 

1636 helps with IDE integration and code safety. 

1637 

1638 Examples 

1639 -------- 

1640 >>> d3plot = D3plot("some/path/to/d3plot") 

1641 >>> d3plot.arrays.keys() 

1642 dict_keys(['irbtyp', 'node_coordinates', ...]) 

1643 >>> # The following is good coding practice 

1644 >>> import lasso.dyna.ArrayTypes.ArrayTypes as atypes 

1645 >>> d3plot.arrays[atypes.node_displacmeent].shape 

1646 """ 

1647 return self._arrays 

1648 

1649 @arrays.setter 

1650 def arrays(self, array_dict: dict): 

1651 assert isinstance(array_dict, dict) 

1652 self._arrays = array_dict 

1653 

1654 @property 

1655 def header(self) -> D3plotHeader: 

1656 """Instance holding all d3plot header information 

1657 

1658 Returns 

1659 ------- 

1660 header: D3plotHeader 

1661 header of the d3plot 

1662 

1663 Notes 

1664 ----- 

1665 The header contains a lot of information such as number 

1666 of elements, etc. 

1667 

1668 Examples 

1669 -------- 

1670 >>> d3plot = D3plot("some/path/to/d3plot") 

1671 >>> # number of shells 

1672 >>> d3plot.header.n_shells 

1673 85624 

1674 """ 

1675 return self._header 

1676 

1677 @staticmethod 

1678 def _is_end_of_file_marker( 

1679 buffer: BinaryBuffer, position: int, ftype: Union[np.float32, np.float64] 

1680 ) -> bool: 

1681 """Check for the dyna eof marker at a certain position 

1682 

1683 Parameters 

1684 ---------- 

1685 bb: BinaryBuffer 

1686 buffer holding memory 

1687 position: int 

1688 position in the buffer 

1689 ftype: Union[np.float32, np.float64] 

1690 floating point type 

1691 

1692 Returns 

1693 ------- 

1694 is_end_marker: bool 

1695 if at the position is an end marker 

1696 

1697 Notes 

1698 ----- 

1699 The end of file marker is represented by a floating point 

1700 number with the value -999999 (single precision hex: F02374C9, 

1701 double precision hex: 000000007E842EC1). 

1702 """ 

1703 

1704 if ftype not in (np.float32, np.float64): 

1705 err_msg = "Floating point type '{0}' is not a floating point type." 

1706 raise ValueError(err_msg.format(ftype)) 

1707 

1708 return buffer.read_number(position, ftype) == ftype(-999999) 

1709 

1710 def _correct_file_offset(self): 

1711 """Correct the position in the bytes 

1712 

1713 Notes 

1714 ----- 

1715 LS-Dyna writes its files zero padded at a size of 

1716 512 words in block size. There might be a lot of 

1717 unused trailing data in the rear we need to skip 

1718 in order to get to the next useful data block. 

1719 """ 

1720 

1721 if not self._buffer: 

1722 return 

1723 

1724 block_count = len(self._buffer) // (512 * self.header.wordsize) 

1725 

1726 # Warning! 

1727 # Resets the block count! 

1728 self.geometry_section_size = (block_count + 1) * 512 * self.header.wordsize 

1729 

1730 @property 

1731 def _n_parts(self) -> int: 

1732 """Get the number of parts contained in the d3plot 

1733 

1734 Returns 

1735 ------- 

1736 n_parts: int 

1737 number of total parts 

1738 """ 

1739 

1740 n_parts = ( 

1741 self.header.n_solid_materials 

1742 + self.header.n_beam_materials 

1743 + self.header.n_shell_materials 

1744 + self.header.n_thick_shell_materials 

1745 + self._numbering_info.n_rigid_bodies 

1746 ) 

1747 

1748 return n_parts 

1749 

1750 @property 

1751 def _n_rigid_walls(self) -> int: 

1752 """Get the number of rigid walls in the d3plot 

1753 

1754 Returns 

1755 ------- 

1756 n_rigid_walls: int 

1757 number of rigid walls 

1758 """ 

1759 

1760 # there have been cases that there are less than in the specs 

1761 # indicated global vars. That breaks this computation, thus we 

1762 # use max at the end. 

1763 previous_global_vars = 6 + 7 * self._n_parts 

1764 n_rigid_wall_vars = self.header.n_rigid_wall_vars 

1765 n_rigid_walls = (self.header.n_global_vars - previous_global_vars) // n_rigid_wall_vars 

1766 

1767 # if n_rigid_walls < 0: 

1768 # err_msg = "The computed number of rigid walls is negative ('{0}')." 

1769 # raise RuntimeError(err_msg.format(n_rigid_walls)) 

1770 

1771 return max(n_rigid_walls, 0) 

1772 

1773 # pylint: disable = unused-argument, too-many-locals 

1774 def _read_d3plot_file_generator( 

1775 self, buffered_reading: bool, state_filter: Union[None, Set[int]] 

1776 ) -> typing.Any: 

1777 """Generator function for reading bare d3plot files 

1778 

1779 Parameters 

1780 ---------- 

1781 buffered_reading: bool 

1782 whether to read one state at a time 

1783 state_filter: Union[None, Set[int]] 

1784 which states to filter out 

1785 

1786 Yields 

1787 ------ 

1788 buffer: BinaryBuffer 

1789 buffer for each file 

1790 n_states: int 

1791 number of states from second yield on 

1792 """ 

1793 

1794 # (1) STATES 

1795 # This is dangerous. The following routine requires data from 

1796 # several sections in the geometry part calling this too early crashes 

1797 bytes_per_state = self._compute_n_bytes_per_state() 

1798 file_infos = self._collect_file_infos(bytes_per_state) 

1799 

1800 # some status 

1801 n_files = len(file_infos) 

1802 n_states = sum(map(lambda file_info: file_info.n_states, file_infos)) 

1803 LOGGER.debug("n_files found: %d", n_files) 

1804 LOGGER.debug("n_states estimated: %d", n_states) 

1805 

1806 # convert negative state indexes into positive ones 

1807 if state_filter is not None: 

1808 state_filter = _negative_to_positive_state_indexes(state_filter, n_states) 

1809 

1810 # if using buffered reading, we load one state at a time 

1811 # into memory 

1812 if buffered_reading: 

1813 file_infos_tmp: List[MemoryInfo] = [] 

1814 n_previous_states = 0 

1815 for minfo in file_infos: 

1816 for i_file_state in range(minfo.n_states): 

1817 i_global_state = n_previous_states + i_file_state 

1818 

1819 # do we need to skip this one 

1820 if state_filter and i_global_state not in state_filter: 

1821 continue 

1822 

1823 file_infos_tmp.append( 

1824 MemoryInfo( 

1825 start=minfo.start + i_file_state * bytes_per_state, 

1826 length=bytes_per_state, 

1827 filepath=minfo.filepath, 

1828 n_states=1, 

1829 filesize=minfo.filesize, 

1830 use_mmap=minfo.n_states != 1, 

1831 ) 

1832 ) 

1833 

1834 n_previous_states += minfo.n_states 

1835 file_infos = file_infos_tmp 

1836 

1837 LOGGER.debug("buffers: %s", pprint.pformat([info.__dict__ for info in file_infos])) 

1838 

1839 # number of states and if buffered reading is used 

1840 n_states_selected = sum(map(lambda file_info: file_info.n_states, file_infos)) 

1841 yield n_states_selected 

1842 

1843 sub_file_infos = [file_infos] if not buffered_reading else [[info] for info in file_infos] 

1844 for sub_file_info_list in sub_file_infos: 

1845 buffer, n_states = D3plot._read_file_from_memory_info(sub_file_info_list) 

1846 yield buffer, n_states 

1847 

1848 def _read_femzip_file_generator( 

1849 self, buffered_reading: bool, state_filter: Union[None, Set[int]] 

1850 ) -> typing.Any: 

1851 """Generator function for reading femzipped d3plot files 

1852 

1853 Parameters 

1854 ---------- 

1855 buffered_reading: bool 

1856 load state by state 

1857 state_filter: Union[None, Set[int]] 

1858 which states to filter out 

1859 

1860 Yields 

1861 ------ 

1862 buffer: BinaryBuffer 

1863 binary buffer of a file 

1864 n_states: int 

1865 from second yield on, number of states for buffers 

1866 """ 

1867 

1868 femzip_api = self._femzip_info.api 

1869 

1870 # (1) STATES 

1871 # number of states and if buffered reading is used 

1872 buffer_info = self._femzip_info.buffer_info 

1873 n_timesteps: int = buffer_info.n_timesteps 

1874 

1875 # convert negative filter indexes 

1876 state_filter_parsed: Set[int] = set() 

1877 if state_filter is not None: 

1878 state_filter_parsed = _negative_to_positive_state_indexes(state_filter, n_timesteps) 

1879 n_states_to_load = len(state_filter) 

1880 else: 

1881 n_states_to_load = n_timesteps 

1882 state_filter_parsed = set(range(n_timesteps)) 

1883 

1884 yield n_states_to_load 

1885 

1886 n_files_to_load_at_once = n_timesteps if not buffered_reading else 1 

1887 # pylint: disable = invalid-name 

1888 BufferStateType = ctypes.c_float * (buffer_info.size_state * n_files_to_load_at_once) 

1889 buffer_state = BufferStateType() 

1890 

1891 buffer = BinaryBuffer() 

1892 buffer.memoryview = memoryview(buffer_state) 

1893 

1894 # do the thing 

1895 i_timesteps_read = 0 

1896 max_timestep = max(state_filter_parsed) if state_filter_parsed else 0 

1897 for i_timestep in range(n_timesteps): 

1898 

1899 # buffer offset 

1900 buffer_current_state = buffer.memoryview[i_timesteps_read * buffer_info.size_state :] 

1901 

1902 # read state 

1903 femzip_api.read_single_state(i_timestep, buffer_info, state_buffer=buffer_current_state) 

1904 

1905 if i_timestep in state_filter_parsed: 

1906 i_timesteps_read += 1 

1907 

1908 # Note: 

1909 # the buffer is re-used here! This saves memory BUT 

1910 # if memory is not copied we overwrite the same again and again 

1911 # This is ok for buffered reading thus indirectly safe 

1912 # since elsewhere the arrays get copied but keep it in mind! 

1913 if i_timesteps_read != 0 and i_timesteps_read % n_files_to_load_at_once == 0: 

1914 yield buffer, i_timesteps_read 

1915 i_timesteps_read = 0 

1916 

1917 # stop in case we have everything we needed 

1918 if i_timestep >= max_timestep: 

1919 if i_timesteps_read != 0: 

1920 yield buffer, i_timesteps_read 

1921 break 

1922 

1923 # do the thing 

1924 femzip_api.close_current_file() 

1925 

1926 def _read_states_femzip_advanced(self, filepath: str) -> None: 

1927 """Read d3plot variables with advanced femzip API 

1928 

1929 Parameters 

1930 ---------- 

1931 filepath: str 

1932 path to the femzipped d3plot 

1933 """ 

1934 

1935 # convert filter 

1936 d3plot_array_filter = set(self.state_array_filter) if self.state_array_filter else None 

1937 

1938 # what vars are inside? 

1939 api = self._femzip_info.api 

1940 file_metadata = api.get_file_metadata(filepath) 

1941 

1942 if file_metadata.number_of_timesteps <= 0: 

1943 return 

1944 

1945 # filter femzip vars according to requested d3plot vars 

1946 file_metadata_filtered = filter_femzip_variables( 

1947 file_metadata, 

1948 d3plot_array_filter, 

1949 ) 

1950 

1951 # read femzip arrays 

1952 result_arrays = api.read_variables( 

1953 file_metadata=file_metadata_filtered, 

1954 n_parts=self.header.n_parts, 

1955 n_rigid_walls=self._n_rigid_walls, 

1956 n_rigid_wall_vars=self.header.n_rigid_wall_vars, 

1957 n_airbag_particles=self._airbag_info.n_particles, 

1958 n_airbags=self._airbag_info.n_airbags, 

1959 state_filter=self.state_filter, 

1960 ) 

1961 

1962 # special case arrays which need extra parsing 

1963 keys_to_remove = [] 

1964 for (fz_index, fz_name, fz_cat), array in result_arrays.items(): 

1965 

1966 # global vars 

1967 if fz_cat == FemzipVariableCategory.GLOBAL: 

1968 keys_to_remove.append((fz_index, fz_name, fz_cat)) 

1969 self._read_states_globals( 

1970 state_data=array, 

1971 var_index=0, 

1972 array_dict=self.arrays, 

1973 ) 

1974 

1975 # parts and rigid walls 

1976 elif fz_cat == FemzipVariableCategory.PART: 

1977 keys_to_remove.append((fz_index, fz_name, fz_cat)) 

1978 

1979 var_index = self._read_states_parts( 

1980 state_data=array, var_index=0, array_dict=self.arrays 

1981 ) 

1982 

1983 self._read_states_rigid_walls( 

1984 state_data=array, var_index=var_index, array_dict=self.arrays 

1985 ) 

1986 

1987 for key in keys_to_remove: 

1988 del result_arrays[key] 

1989 

1990 # transfer arrays 

1991 mapper = FemzipMapper() 

1992 mapper.map(result_arrays) 

1993 

1994 # save arrays 

1995 for plt_name, arr in mapper.d3plot_arrays.items(): 

1996 

1997 # femzip sometimes stores strain in solid history vars 

1998 # but also sometimes separately 

1999 if ( 

2000 plt_name == ArrayType.element_solid_history_variables 

2001 and self.header.has_element_strain 

2002 and ArrayType.element_solid_strain not in mapper.d3plot_arrays 

2003 ): 

2004 self.arrays[ArrayType.element_solid_strain] = arr[:, :, :, :6] 

2005 tmp_array = arr[:, :, :, 6:] 

2006 if all(tmp_array.shape): 

2007 self.arrays[plt_name] = tmp_array 

2008 else: 

2009 self.arrays[plt_name] = arr 

2010 

2011 # ELEMENT DELETION 

2012 # 

2013 # somehow element deletion info is extra ... 

2014 # buffer_info 

2015 buffer_info = self._femzip_info.buffer_info 

2016 deletion_array = api.read_state_deletion_info( 

2017 buffer_info=buffer_info, state_filter=self.state_filter 

2018 ) 

2019 self._read_states_is_alive(state_data=deletion_array, var_index=0, array_dict=self.arrays) 

2020 

2021 # TIMESTEPS 

2022 timestep_array = np.array( 

2023 [buffer_info.timesteps[i_timestep] for i_timestep in range(buffer_info.n_timesteps)], 

2024 dtype=self.header.ftype, 

2025 ) 

2026 self.arrays[ArrayType.global_timesteps] = timestep_array 

2027 

2028 def _read_header(self): 

2029 """Read the d3plot header""" 

2030 

2031 LOGGER.debug("-------- H E A D E R --------") 

2032 

2033 if self._buffer: 

2034 self._header.load_file(self._buffer) 

2035 

2036 self.geometry_section_size = self._header.n_header_bytes 

2037 

2038 def _parse_geometry(self): 

2039 """Read the d3plot geometry""" 

2040 

2041 LOGGER.debug("------ G E O M E T R Y ------") 

2042 

2043 # read material section 

2044 self._read_material_section() 

2045 

2046 # read fluid material data 

2047 self._read_fluid_material_data() 

2048 

2049 # SPH element data flags 

2050 self._read_sph_element_data_flags() 

2051 

2052 # Particle Data 

2053 self._read_particle_data() 

2054 

2055 # Geometry Data 

2056 self._read_geometry_data() 

2057 

2058 # User Material, Node, Blabla IDs 

2059 self._read_user_ids() 

2060 

2061 # Rigid Body Description 

2062 self._read_rigid_body_description() 

2063 

2064 # Adapted Element Parent List 

2065 # manual says not implemented 

2066 

2067 # Smooth Particle Hydrodynamcis Node and Material list 

2068 self._read_sph_node_and_material_list() 

2069 

2070 # Particle Geometry Data 

2071 self._read_particle_geometry_data() 

2072 

2073 # Rigid Road Surface Data 

2074 self._read_rigid_road_surface() 

2075 

2076 # Connectivity for weirdo elements 

2077 # 10 Node Tetra 

2078 # 8 Node Shell 

2079 # 20 Node Solid 

2080 # 27 Node Solid 

2081 self._read_extra_node_connectivity() 

2082 

2083 # Header Part & Contact Interface Titles 

2084 # this is a class method since it is also needed elsewhere 

2085 self.geometry_section_size = self._read_header_part_contact_interface_titles( 

2086 self.header, 

2087 self._buffer, 

2088 self.geometry_section_size, # type: ignore 

2089 self.arrays, 

2090 ) 

2091 

2092 # Extra Data Types (for multi solver output) 

2093 # ... not supported 

2094 

2095 def _read_material_section(self): 

2096 """This function reads the material type section""" 

2097 

2098 if not self._buffer: 

2099 return 

2100 

2101 if not self.header.has_material_type_section: 

2102 return 

2103 

2104 LOGGER.debug("_read_material_section start at byte %d", self.geometry_section_size) 

2105 

2106 position = self.geometry_section_size 

2107 

2108 # failsafe 

2109 original_position = self.geometry_section_size 

2110 blocksize = (2 + self.header.n_parts) * self.header.wordsize 

2111 

2112 try: 

2113 

2114 # Material Type Data 

2115 # 

2116 # "This data is required because those shell elements 

2117 # that are in a rigid body have no element data output 

2118 # in the state data section." 

2119 # 

2120 # "The normal length of the shell element state data is: 

2121 # NEL4 * NV2D, when the MATTYP flag is set the length is: 

2122 # (NEL4 – NUMRBE) * NV2D. When reading the shell element data, 

2123 # the material number must be checked against IRBRTYP list to 

2124 # find the element’s material type. If the type = 20, then 

2125 # all the values for the element to zero." (Manual 03.2016) 

2126 

2127 self._material_section_info.n_rigid_shells = int( 

2128 self._buffer.read_number(position, self._header.itype) 

2129 ) # type: ignore 

2130 position += self.header.wordsize 

2131 

2132 test_nummat = self._buffer.read_number(position, self._header.itype) 

2133 position += self.header.wordsize 

2134 

2135 if test_nummat != self.header.n_parts: 

2136 raise RuntimeError( 

2137 "nmmat (header) != nmmat (material type data): " 

2138 f"{self.header.n_parts} != {test_nummat}", 

2139 ) 

2140 

2141 self.arrays[ArrayType.part_material_type] = self._buffer.read_ndarray( 

2142 position, self.header.n_parts * self.header.wordsize, 1, self.header.itype 

2143 ) 

2144 position += self.header.n_parts * self.header.wordsize 

2145 

2146 except Exception: 

2147 # print info 

2148 trb_msg = traceback.format_exc() 

2149 LOGGER.warning("A failure in %s was caught:\n%s", "_read_material_section", trb_msg) 

2150 

2151 # fix position 

2152 position = original_position + blocksize 

2153 

2154 self.geometry_section_size = position 

2155 LOGGER.debug("_read_material_section end at byte %d", self.geometry_section_size) 

2156 

2157 def _read_fluid_material_data(self): 

2158 """Read the fluid material data""" 

2159 

2160 if not self._buffer: 

2161 return 

2162 

2163 if self.header.n_ale_materials == 0: 

2164 return 

2165 

2166 LOGGER.debug("_read_fluid_material_data start at byte %d", self.geometry_section_size) 

2167 

2168 position = self.geometry_section_size 

2169 

2170 # safety 

2171 original_position = position 

2172 blocksize = self.header.n_ale_materials * self.header.wordsize 

2173 

2174 try: 

2175 # Fluid Material Data 

2176 array_length = self.header.n_ale_materials * self.header.wordsize 

2177 self.arrays[ArrayType.ale_material_ids] = self._buffer.read_ndarray( 

2178 position, array_length, 1, self.header.itype 

2179 ) # type: ignore 

2180 position += array_length 

2181 

2182 except Exception: 

2183 

2184 # print info 

2185 trb_msg = traceback.format_exc() 

2186 msg = "A failure in %s was caught:\n%s" 

2187 LOGGER.warning(msg, "_read_fluid_material_data", trb_msg) 

2188 

2189 # fix position 

2190 position = original_position + blocksize 

2191 

2192 # remember position 

2193 self.geometry_section_size = position 

2194 LOGGER.debug("_read_fluid_material_data end at byte %d", self.geometry_section_size) 

2195 

2196 def _read_sph_element_data_flags(self): 

2197 """Read the sph element data flags""" 

2198 

2199 if not self._buffer: 

2200 return 

2201 

2202 if not self.header.n_sph_nodes: 

2203 return 

2204 

2205 LOGGER.debug("_read_sph_element_data_flags start at byte %d", self.geometry_section_size) 

2206 

2207 position = self.geometry_section_size 

2208 

2209 sph_element_data_words = { 

2210 "isphfg1": (position, self._header.itype), 

2211 "isphfg2": (position + 1 * self.header.wordsize, self._header.itype), 

2212 "isphfg3": (position + 2 * self.header.wordsize, self._header.itype), 

2213 "isphfg4": (position + 3 * self.header.wordsize, self._header.itype), 

2214 "isphfg5": (position + 4 * self.header.wordsize, self._header.itype), 

2215 "isphfg6": (position + 5 * self.header.wordsize, self._header.itype), 

2216 "isphfg7": (position + 6 * self.header.wordsize, self._header.itype), 

2217 "isphfg8": (position + 7 * self.header.wordsize, self._header.itype), 

2218 "isphfg9": (position + 8 * self.header.wordsize, self._header.itype), 

2219 "isphfg10": (position + 9 * self.header.wordsize, self._header.itype), 

2220 "isphfg11": (position + 10 * self.header.wordsize, self._header.itype), 

2221 } 

2222 

2223 sph_header_data = self.header.read_words(self._buffer, sph_element_data_words) 

2224 

2225 self._sph_info.n_sph_array_length = sph_header_data["isphfg1"] 

2226 self._sph_info.has_influence_radius = sph_header_data["isphfg2"] != 0 

2227 self._sph_info.has_particle_pressure = sph_header_data["isphfg3"] != 0 

2228 self._sph_info.has_stresses = sph_header_data["isphfg4"] != 0 

2229 self._sph_info.has_plastic_strain = sph_header_data["isphfg5"] != 0 

2230 self._sph_info.has_material_density = sph_header_data["isphfg6"] != 0 

2231 self._sph_info.has_internal_energy = sph_header_data["isphfg7"] != 0 

2232 self._sph_info.has_n_affecting_neighbors = sph_header_data["isphfg8"] != 0 

2233 self._sph_info.has_strain_and_strainrate = sph_header_data["isphfg9"] != 0 

2234 self._sph_info.has_true_strains = sph_header_data["isphfg9"] < 0 

2235 self._sph_info.has_mass = sph_header_data["isphfg10"] != 0 

2236 self._sph_info.n_sph_history_vars = sph_header_data["isphfg11"] 

2237 

2238 if self._sph_info.n_sph_array_length != 11: 

2239 msg = ( 

2240 "Detected inconsistency: " 

2241 f"isphfg = {self._sph_info.n_sph_array_length} but must be 11." 

2242 ) 

2243 raise RuntimeError(msg) 

2244 

2245 self._sph_info.n_sph_vars = ( 

2246 sph_header_data["isphfg2"] 

2247 + sph_header_data["isphfg3"] 

2248 + sph_header_data["isphfg4"] 

2249 + sph_header_data["isphfg5"] 

2250 + sph_header_data["isphfg6"] 

2251 + sph_header_data["isphfg7"] 

2252 + sph_header_data["isphfg8"] 

2253 + abs(sph_header_data["isphfg9"]) 

2254 + sph_header_data["isphfg10"] 

2255 + sph_header_data["isphfg11"] 

2256 + 1 

2257 ) # material number 

2258 

2259 self.geometry_section_size += sph_header_data["isphfg1"] * self.header.wordsize 

2260 LOGGER.debug("_read_sph_element_data_flags end at byte %d", self.geometry_section_size) 

2261 

2262 def _read_particle_data(self): 

2263 """Read the geometry section for particle data (airbags)""" 

2264 

2265 if not self._buffer: 

2266 return 

2267 

2268 if "npefg" not in self.header.raw_header: 

2269 return 

2270 npefg = self.header.raw_header["npefg"] 

2271 

2272 # let's stick to the manual, too lazy to decypther this test 

2273 if npefg <= 0 or npefg > 10000000: 

2274 return 

2275 

2276 LOGGER.debug("_read_particle_data start at byte %d", self.geometry_section_size) 

2277 

2278 position = self.geometry_section_size 

2279 

2280 airbag_header = { 

2281 # number of airbags 

2282 "npartgas": npefg % 1000, 

2283 # ? 

2284 "subver": npefg // 1000, 

2285 } 

2286 

2287 particle_geometry_data_words = { 

2288 # number of geometry variables 

2289 "ngeom": (position, self._header.itype), 

2290 # number of state variables 

2291 "nvar": (position + 1 * self.header.wordsize, self._header.itype), 

2292 # number of particles 

2293 "npart": (position + 2 * self.header.wordsize, self._header.itype), 

2294 # number of state geometry variables 

2295 "nstgeom": (position + 3 * self.header.wordsize, self._header.itype), 

2296 } 

2297 

2298 self.header.read_words(self._buffer, particle_geometry_data_words, airbag_header) 

2299 position += 4 * self.header.wordsize 

2300 

2301 # transfer to info object 

2302 self._airbag_info.n_airbags = npefg % 1000 

2303 self._airbag_info.subver = npefg // 1000 

2304 self._airbag_info.n_geometric_variables = airbag_header["ngeom"] 

2305 self._airbag_info.n_particle_state_variables = airbag_header["nvar"] 

2306 self._airbag_info.n_particles = airbag_header["npart"] 

2307 self._airbag_info.n_airbag_state_variables = airbag_header["nstgeom"] 

2308 

2309 if self._airbag_info.subver == 4: 

2310 # number of chambers 

2311 self._airbag_info.n_chambers = self._buffer.read_number(position, self._header.itype) 

2312 position += self.header.wordsize 

2313 

2314 n_airbag_variables = self._airbag_info.get_n_variables() 

2315 

2316 # safety 

2317 # from here on the code may fail 

2318 original_position = position 

2319 blocksize = 9 * n_airbag_variables * self.header.wordsize 

2320 

2321 try: 

2322 # variable typecodes 

2323 self.arrays[ArrayType.airbag_variable_types] = self._buffer.read_ndarray( 

2324 position, n_airbag_variables * self.header.wordsize, 1, self._header.itype 

2325 ) 

2326 position += n_airbag_variables * self.header.wordsize 

2327 

2328 # airbag variable names 

2329 # every word is an ascii char 

2330 airbag_variable_names = [] 

2331 var_width = 8 

2332 

2333 for i_variable in range(n_airbag_variables): 

2334 name = self._buffer.read_text( 

2335 position + (i_variable * var_width) * self.header.wordsize, 

2336 var_width * self.header.wordsize, 

2337 ) 

2338 airbag_variable_names.append(name[:: self.header.wordsize]) 

2339 

2340 self.arrays[ArrayType.airbag_variable_names] = airbag_variable_names 

2341 position += n_airbag_variables * var_width * self.header.wordsize 

2342 

2343 except Exception: 

2344 

2345 # print info 

2346 trb_msg = traceback.format_exc() 

2347 msg = "A failure in %s was caught:\n%s" 

2348 LOGGER.warning(msg, "_read_particle_data", trb_msg) 

2349 

2350 # fix position 

2351 position = original_position + blocksize 

2352 

2353 # update position marker 

2354 self.geometry_section_size = position 

2355 LOGGER.debug("_read_particle_data start at byte %d", self.geometry_section_size) 

2356 

2357 # pylint: disable = too-many-branches 

2358 def _read_geometry_data(self): 

2359 """Read the data from the geometry section""" 

2360 

2361 if not self._buffer: 

2362 return 

2363 

2364 LOGGER.debug("_read_geometry_data start at byte %d", self.geometry_section_size) 

2365 

2366 # not sure but I think never used by LS-Dyna 

2367 # anyway needs to be detected in the header and not here, 

2368 # though it is mentioned in this section of the database manual 

2369 # 

2370 # is_packed = True if self.header['ndim'] == 3 else False 

2371 # if is_packed: 

2372 # raise RuntimeError("Can not deal with packed "\ 

2373 # "geometry data (ndim == {}).".format(self.header['ndim'])) 

2374 

2375 position = self.geometry_section_size 

2376 

2377 # node coords 

2378 n_nodes = self.header.n_nodes 

2379 n_dimensions = self.header.n_dimensions 

2380 section_word_length = n_dimensions * n_nodes 

2381 try: 

2382 node_coordinates = self._buffer.read_ndarray( 

2383 position, section_word_length * self.header.wordsize, 1, self.header.ftype 

2384 ).reshape((n_nodes, n_dimensions)) 

2385 self.arrays[ArrayType.node_coordinates] = node_coordinates 

2386 except Exception: 

2387 trb_msg = traceback.format_exc() 

2388 msg = "A failure in %d was caught:\n%s" 

2389 LOGGER.warning(msg, "_read_geometry_data, node_coordinates", trb_msg) 

2390 finally: 

2391 position += section_word_length * self.header.wordsize 

2392 

2393 # solid data 

2394 n_solids = self.header.n_solids 

2395 section_word_length = 9 * n_solids 

2396 try: 

2397 elem_solid_data = self._buffer.read_ndarray( 

2398 position, section_word_length * self.header.wordsize, 1, self._header.itype 

2399 ).reshape((n_solids, 9)) 

2400 solid_connectivity = elem_solid_data[:, :8] 

2401 solid_part_indexes = elem_solid_data[:, 8] 

2402 self.arrays[ArrayType.element_solid_node_indexes] = solid_connectivity - FORTRAN_OFFSET 

2403 self.arrays[ArrayType.element_solid_part_indexes] = solid_part_indexes - FORTRAN_OFFSET 

2404 except Exception: 

2405 trb_msg = traceback.format_exc() 

2406 msg = "A failure in %s was caught:\n%s" 

2407 LOGGER.warning(msg, "_read_geometry_data, solids_geometry", trb_msg) 

2408 finally: 

2409 position += section_word_length * self.header.wordsize 

2410 

2411 # ten node solids extra nodes 

2412 if self.header.has_solid_2_extra_nodes: 

2413 section_word_length = 2 * n_solids 

2414 try: 

2415 self.arrays[ 

2416 ArrayType.element_solid_extra_nodes 

2417 ] = elem_solid_data = self._buffer.read_ndarray( 

2418 position, section_word_length * self.header.wordsize, 1, self._header.itype 

2419 ).reshape( 

2420 (n_solids, 2) 

2421 ) 

2422 except Exception: 

2423 trb_msg = traceback.format_exc() 

2424 msg = "A failure in %s was caught:\n%s" 

2425 LOGGER.warning(msg, "_read_geometry_data, ten_node_solids", trb_msg) 

2426 finally: 

2427 position += section_word_length * self.header.wordsize 

2428 

2429 # 8 node thick shells 

2430 n_thick_shells = self.header.n_thick_shells 

2431 section_word_length = 9 * n_thick_shells 

2432 try: 

2433 elem_tshell_data = self._buffer.read_ndarray( 

2434 position, section_word_length * self.header.wordsize, 1, self._header.itype 

2435 ).reshape((self.header.n_thick_shells, 9)) 

2436 self.arrays[ArrayType.element_tshell_node_indexes] = ( 

2437 elem_tshell_data[:, :8] - FORTRAN_OFFSET 

2438 ) 

2439 self.arrays[ArrayType.element_tshell_part_indexes] = ( 

2440 elem_tshell_data[:, 8] - FORTRAN_OFFSET 

2441 ) 

2442 except Exception: 

2443 trb_msg = traceback.format_exc() 

2444 msg = "A failure in %s was caught:\n%s" 

2445 LOGGER.warning(msg, "_read_geometry_data, tshells_geometry", trb_msg) 

2446 finally: 

2447 position += section_word_length * self.header.wordsize 

2448 

2449 # beams 

2450 n_beams = self.header.n_beams 

2451 section_word_length = 6 * n_beams 

2452 try: 

2453 elem_beam_data = self._buffer.read_ndarray( 

2454 position, section_word_length * self.header.wordsize, 1, self._header.itype 

2455 ).reshape((n_beams, 6)) 

2456 self.arrays[ArrayType.element_beam_part_indexes] = elem_beam_data[:, 5] - FORTRAN_OFFSET 

2457 self.arrays[ArrayType.element_beam_node_indexes] = ( 

2458 elem_beam_data[:, :5] - FORTRAN_OFFSET 

2459 ) 

2460 except Exception: 

2461 trb_msg = traceback.format_exc() 

2462 msg = "A failure in %s was caught:\n%s" 

2463 LOGGER.warning(msg, "_read_geometry_data, beams_geometry", trb_msg) 

2464 finally: 

2465 position += section_word_length * self.header.wordsize 

2466 

2467 # shells 

2468 n_shells = self.header.n_shells 

2469 section_word_length = 5 * n_shells 

2470 try: 

2471 elem_shell_data = self._buffer.read_ndarray( 

2472 position, section_word_length * self.header.wordsize, 1, self._header.itype 

2473 ).reshape((self.header.n_shells, 5)) 

2474 self.arrays[ArrayType.element_shell_node_indexes] = ( 

2475 elem_shell_data[:, :4] - FORTRAN_OFFSET 

2476 ) 

2477 self.arrays[ArrayType.element_shell_part_indexes] = ( 

2478 elem_shell_data[:, 4] - FORTRAN_OFFSET 

2479 ) 

2480 except Exception: 

2481 trb_msg = traceback.format_exc() 

2482 msg = "A failure in %s was caught:\n%s" 

2483 LOGGER.warning(msg, "_read_geometry_data, shells_geometry", trb_msg) 

2484 finally: 

2485 position += section_word_length * self.header.wordsize 

2486 

2487 # update word position 

2488 self.geometry_section_size = position 

2489 

2490 LOGGER.debug("_read_geometry_data end at byte %d", self.geometry_section_size) 

2491 

2492 def _read_user_ids(self): 

2493 

2494 if not self._buffer: 

2495 return 

2496 

2497 if not self.header.has_numbering_section: 

2498 self.arrays[ArrayType.node_ids] = np.arange( 

2499 FORTRAN_OFFSET, self.header.n_nodes + FORTRAN_OFFSET, dtype=self.header.itype 

2500 ) 

2501 self.arrays[ArrayType.element_solid_ids] = np.arange( 

2502 FORTRAN_OFFSET, self.header.n_solids + FORTRAN_OFFSET, dtype=self.header.itype 

2503 ) 

2504 self.arrays[ArrayType.element_beam_ids] = np.arange( 

2505 FORTRAN_OFFSET, self.header.n_beams + FORTRAN_OFFSET, dtype=self.header.itype 

2506 ) 

2507 self.arrays[ArrayType.element_shell_ids] = np.arange( 

2508 FORTRAN_OFFSET, self.header.n_shells + FORTRAN_OFFSET, dtype=self.header.itype 

2509 ) 

2510 self.arrays[ArrayType.element_tshell_ids] = np.arange( 

2511 FORTRAN_OFFSET, self.header.n_thick_shells + FORTRAN_OFFSET, dtype=self.header.itype 

2512 ) 

2513 self.arrays[ArrayType.part_ids] = np.arange( 

2514 FORTRAN_OFFSET, self.header.n_parts + FORTRAN_OFFSET, dtype=self.header.itype 

2515 ) 

2516 return 

2517 

2518 LOGGER.debug("_read_user_ids start at byte %d", self.geometry_section_size) 

2519 

2520 position = self.geometry_section_size 

2521 

2522 # safety 

2523 original_position = position 

2524 blocksize = self.header.raw_header["narbs"] * self.header.wordsize 

2525 

2526 try: 

2527 numbering_words = { 

2528 "nsort": (position, self._header.itype), 

2529 "nsrh": (position + 1 * self.header.wordsize, self._header.itype), 

2530 "nsrb": (position + 2 * self.header.wordsize, self._header.itype), 

2531 "nsrs": (position + 3 * self.header.wordsize, self._header.itype), 

2532 "nsrt": (position + 4 * self.header.wordsize, self._header.itype), 

2533 "nsortd": (position + 5 * self.header.wordsize, self._header.itype), 

2534 "nsrhd": (position + 6 * self.header.wordsize, self._header.itype), 

2535 "nsrbd": (position + 7 * self.header.wordsize, self._header.itype), 

2536 "nsrsd": (position + 8 * self.header.wordsize, self._header.itype), 

2537 "nsrtd": (position + 9 * self.header.wordsize, self._header.itype), 

2538 } 

2539 

2540 extra_numbering_words = { 

2541 "nsrma": (position + 10 * self.header.wordsize, self._header.itype), 

2542 "nsrmu": (position + 11 * self.header.wordsize, self._header.itype), 

2543 "nsrmp": (position + 12 * self.header.wordsize, self._header.itype), 

2544 "nsrtm": (position + 13 * self.header.wordsize, self._header.itype), 

2545 "numrbs": (position + 14 * self.header.wordsize, self._header.itype), 

2546 "nmmat": (position + 15 * self.header.wordsize, self._header.itype), 

2547 } 

2548 

2549 numbering_header = self.header.read_words(self._buffer, numbering_words) 

2550 position += len(numbering_words) * self.header.wordsize 

2551 

2552 # let's make life easier 

2553 info = self._numbering_info 

2554 

2555 # transfer first bunch 

2556 info.ptr_node_ids = abs(numbering_header["nsort"]) 

2557 info.has_material_ids = numbering_header["nsort"] < 0 

2558 info.ptr_solid_ids = numbering_header["nsrh"] 

2559 info.ptr_beam_ids = numbering_header["nsrb"] 

2560 info.ptr_shell_ids = numbering_header["nsrs"] 

2561 info.ptr_thick_shell_ids = numbering_header["nsrt"] 

2562 info.n_nodes = numbering_header["nsortd"] 

2563 info.n_solids = numbering_header["nsrhd"] 

2564 info.n_beams = numbering_header["nsrbd"] 

2565 info.n_shells = numbering_header["nsrsd"] 

2566 info.n_thick_shells = numbering_header["nsrtd"] 

2567 

2568 if info.has_material_ids: 

2569 

2570 # read extra header 

2571 self.header.read_words(self._buffer, extra_numbering_words, numbering_header) 

2572 position += len(extra_numbering_words) * self.header.wordsize 

2573 

2574 # transfer more 

2575 info.ptr_material_ids = numbering_header["nsrma"] 

2576 info.ptr_material_ids_defined_order = numbering_header["nsrmu"] 

2577 info.ptr_material_ids_crossref = numbering_header["nsrmp"] 

2578 info.n_parts = numbering_header["nsrtm"] 

2579 info.n_rigid_bodies = numbering_header["numrbs"] 

2580 info.n_parts2 = numbering_header["nmmat"] 

2581 else: 

2582 info.n_parts = self.header.n_parts 

2583 

2584 # let's do a quick check 

2585 n_words_computed = ( 

2586 len(numbering_header) 

2587 + info.n_nodes 

2588 + info.n_shells 

2589 + info.n_beams 

2590 + info.n_solids 

2591 + info.n_thick_shells 

2592 + info.n_parts * 3 

2593 ) 

2594 if n_words_computed != self.header.n_numbering_section_words: 

2595 warn_msg = ( 

2596 "ID section: The computed word count does " 

2597 "not match the header word count: %d != %d." 

2598 " The ID arrays might contain errors." 

2599 ) 

2600 LOGGER.warning(warn_msg, n_words_computed, self.header.n_numbering_section_words) 

2601 # node ids 

2602 array_length = info.n_nodes * self.header.wordsize 

2603 self.arrays[ArrayType.node_ids] = self._buffer.read_ndarray( 

2604 position, array_length, 1, self._header.itype 

2605 ) 

2606 position += array_length 

2607 # solid ids 

2608 array_length = info.n_solids * self.header.wordsize 

2609 self.arrays[ArrayType.element_solid_ids] = self._buffer.read_ndarray( 

2610 position, array_length, 1, self._header.itype 

2611 ) 

2612 position += array_length 

2613 # beam ids 

2614 array_length = info.n_beams * self.header.wordsize 

2615 self.arrays[ArrayType.element_beam_ids] = self._buffer.read_ndarray( 

2616 position, array_length, 1, self._header.itype 

2617 ) 

2618 position += array_length 

2619 # shell ids 

2620 array_length = info.n_shells * self.header.wordsize 

2621 self.arrays[ArrayType.element_shell_ids] = self._buffer.read_ndarray( 

2622 position, array_length, 1, self._header.itype 

2623 ) 

2624 position += array_length 

2625 # tshell ids 

2626 array_length = info.n_thick_shells * self.header.wordsize 

2627 self.arrays[ArrayType.element_tshell_ids] = self._buffer.read_ndarray( 

2628 position, array_length, 1, self._header.itype 

2629 ) 

2630 position += array_length 

2631 

2632 # part ids 

2633 # 

2634 # this makes no sense but materials are output three times at this section 

2635 # but the length of the array (nmmat) is only output if nsort < 0. In 

2636 # the other case the length is unknown ... 

2637 # 

2638 # Bugfix: 

2639 # The material arrays (three times) are always output, even if nsort < 0 

2640 # which means they are not used. Quite confusing, especially since nmmat 

2641 # is output in the main header and numbering header. 

2642 # 

2643 if "nmmat" in numbering_header: 

2644 

2645 if info.n_parts != self.header.n_parts: 

2646 err_msg = ( 

2647 "nmmat in the file header (%d) and in the " 

2648 "numbering header (%d) are inconsistent." 

2649 ) 

2650 raise RuntimeError(err_msg, self.header.n_parts, info.n_parts) 

2651 

2652 array_length = info.n_parts * self.header.wordsize 

2653 

2654 self.arrays[ArrayType.part_ids] = self._buffer.read_ndarray( 

2655 position, info.n_parts * self.header.wordsize, 1, self._header.itype 

2656 ) 

2657 position += info.n_parts * self.header.wordsize 

2658 

2659 self.arrays[ArrayType.part_ids_unordered] = self._buffer.read_ndarray( 

2660 position, info.n_parts * self.header.wordsize, 1, self._header.itype 

2661 ) 

2662 position += info.n_parts * self.header.wordsize 

2663 

2664 self.arrays[ArrayType.part_ids_cross_references] = self._buffer.read_ndarray( 

2665 position, info.n_parts * self.header.wordsize, 1, self._header.itype 

2666 ) 

2667 position += info.n_parts * self.header.wordsize 

2668 

2669 else: 

2670 position += 3 * self.header.n_parts * self.header.wordsize 

2671 

2672 except Exception: 

2673 # print info 

2674 trb_msg = traceback.format_exc() 

2675 msg = "A failure in %s was caught:\n%s" 

2676 LOGGER.warning(msg, "_read_user_ids", trb_msg) 

2677 

2678 # fix position 

2679 position = original_position + blocksize 

2680 

2681 # update position 

2682 self.geometry_section_size = position 

2683 LOGGER.debug("_read_user_ids end at byte %d", self.geometry_section_size) 

2684 

2685 def _read_rigid_body_description(self): 

2686 """Read the rigid body description section""" 

2687 

2688 if not self._buffer: 

2689 return 

2690 

2691 if not self.header.has_rigid_body_data: 

2692 return 

2693 

2694 LOGGER.debug("_read_rigid_body_description start at byte %d", self.geometry_section_size) 

2695 

2696 position = self.geometry_section_size 

2697 

2698 rigid_body_description_header = { 

2699 "nrigid": self._buffer.read_number(position, self._header.itype) 

2700 } 

2701 position += self.header.wordsize 

2702 

2703 info = self._rigid_body_info 

2704 info.n_rigid_bodies = rigid_body_description_header["nrigid"] 

2705 

2706 rigid_bodies: List[RigidBodyMetadata] = [] 

2707 for _ in range(info.n_rigid_bodies): 

2708 rigid_body_info = { 

2709 # rigid body part internal number 

2710 "mrigid": self._buffer.read_number(position, self._header.itype), 

2711 # number of nodes in rigid body 

2712 "numnodr": self._buffer.read_number( 

2713 position + self.header.wordsize, self._header.itype 

2714 ), 

2715 } 

2716 position += 2 * self.header.wordsize 

2717 

2718 # internal node number of rigid body 

2719 array_length = rigid_body_info["numnodr"] * self.header.wordsize 

2720 rigid_body_info["noder"] = self._buffer.read_ndarray( 

2721 position, array_length, 1, self._header.itype 

2722 ) 

2723 position += array_length 

2724 

2725 # number of active (non-rigid) nodes 

2726 rigid_body_info["numnoda"] = self._buffer.read_number(position, self._header.itype) 

2727 position += self.header.wordsize 

2728 

2729 # internal node numbers of active nodes 

2730 array_length = rigid_body_info["numnoda"] * self.header.wordsize 

2731 rigid_body_info["nodea"] = self._buffer.read_ndarray( 

2732 position, array_length, 1, self._header.itype 

2733 ) 

2734 position += array_length 

2735 

2736 # transfer props 

2737 body_metadata = RigidBodyMetadata( 

2738 internal_number=rigid_body_info["mrigid"], 

2739 n_nodes=rigid_body_info["numnodr"], 

2740 node_indexes=rigid_body_info["noder"], 

2741 n_active_nodes=rigid_body_info["numnoda"], 

2742 active_node_indexes=rigid_body_info["nodea"], 

2743 ) 

2744 

2745 # append to list 

2746 rigid_bodies.append(body_metadata) 

2747 

2748 # save rigid body info to header 

2749 info.rigid_body_metadata_list = rigid_bodies 

2750 

2751 # save arrays 

2752 rigid_body_n_nodes = [] 

2753 rigid_body_part_indexes = [] 

2754 rigid_body_n_active_nodes = [] 

2755 rigid_body_node_indexes_list = [] 

2756 rigid_body_active_node_indexes_list = [] 

2757 for rigid_body_info in rigid_bodies: 

2758 rigid_body_part_indexes.append(rigid_body_info.internal_number) 

2759 rigid_body_n_nodes.append(rigid_body_info.n_nodes) 

2760 rigid_body_node_indexes_list.append(rigid_body_info.node_indexes - FORTRAN_OFFSET) 

2761 rigid_body_n_active_nodes.append(rigid_body_info.n_active_nodes) 

2762 rigid_body_active_node_indexes_list.append( 

2763 rigid_body_info.active_node_indexes - FORTRAN_OFFSET 

2764 ) 

2765 

2766 self.arrays[ArrayType.rigid_body_part_indexes] = ( 

2767 np.array(rigid_body_part_indexes, dtype=self._header.itype) - FORTRAN_OFFSET 

2768 ) 

2769 self.arrays[ArrayType.rigid_body_n_nodes] = np.array( 

2770 rigid_body_n_nodes, dtype=self._header.itype 

2771 ) 

2772 self.arrays[ArrayType.rigid_body_n_active_nodes] = np.array( 

2773 rigid_body_n_active_nodes, dtype=self._header.itype 

2774 ) 

2775 self.arrays[ArrayType.rigid_body_node_indexes_list] = rigid_body_node_indexes_list 

2776 self.arrays[ 

2777 ArrayType.rigid_body_active_node_indexes_list 

2778 ] = rigid_body_active_node_indexes_list 

2779 

2780 # update position 

2781 self.geometry_section_size = position 

2782 LOGGER.debug("_read_rigid_body_description end at byte %d", self.geometry_section_size) 

2783 

2784 def _read_sph_node_and_material_list(self): 

2785 """Read SPH node and material list""" 

2786 

2787 if not self._buffer: 

2788 return 

2789 

2790 if self.header.n_sph_nodes <= 0: 

2791 return 

2792 

2793 LOGGER.debug( 

2794 "_read_sph_node_and_material_list start at byte %d", self.geometry_section_size 

2795 ) 

2796 

2797 position = self.geometry_section_size 

2798 

2799 array_length = self.header.n_sph_nodes * self.header.wordsize * 2 

2800 try: 

2801 # read info array 

2802 sph_node_matlist = self._buffer.read_ndarray( 

2803 position, array_length, 1, self._header.itype 

2804 ).reshape((self.header.n_sph_nodes, 2)) 

2805 

2806 # save array 

2807 self.arrays[ArrayType.sph_node_indexes] = sph_node_matlist[:, 0] - FORTRAN_OFFSET 

2808 self.arrays[ArrayType.sph_node_material_index] = sph_node_matlist[:, 1] - FORTRAN_OFFSET 

2809 

2810 except Exception: 

2811 # print info 

2812 trb_msg = traceback.format_exc() 

2813 msg = "A failure in %s was caught:\n%s" 

2814 LOGGER.warning(msg, "_read_sph_node_and_material_list", trb_msg) 

2815 

2816 finally: 

2817 # update position 

2818 self.geometry_section_size += array_length 

2819 

2820 LOGGER.debug("_read_sph_node_and_material_list end at byte %d", self.geometry_section_size) 

2821 

2822 def _read_particle_geometry_data(self): 

2823 """Read the particle geometry data""" 

2824 

2825 if not self._buffer: 

2826 return 

2827 

2828 if "npefg" not in self.header.raw_header: 

2829 return 

2830 

2831 if self.header.raw_header["npefg"] <= 0: 

2832 return 

2833 

2834 LOGGER.debug("_read_particle_geometry_data start at byte %d", self.geometry_section_size) 

2835 

2836 info = self._airbag_info 

2837 

2838 position = self.geometry_section_size 

2839 

2840 # size of geometry section checking 

2841 ngeom = info.n_geometric_variables 

2842 if ngeom not in [4, 5]: 

2843 raise RuntimeError("variable ngeom in the airbag header must be 4 or 5.") 

2844 

2845 original_position = position 

2846 blocksize = info.n_airbags * ngeom * self.header.wordsize 

2847 try: 

2848 

2849 # extract geometry as a single array 

2850 array_length = blocksize 

2851 particle_geom_data = self._buffer.read_ndarray( 

2852 position, array_length, 1, self._header.itype 

2853 ).reshape((info.n_airbags, ngeom)) 

2854 position += array_length 

2855 

2856 # store arrays 

2857 self.arrays[ArrayType.airbags_first_particle_id] = particle_geom_data[:, 0] 

2858 self.arrays[ArrayType.airbags_n_particles] = particle_geom_data[:, 1] 

2859 self.arrays[ArrayType.airbags_ids] = particle_geom_data[:, 2] 

2860 self.arrays[ArrayType.airbags_n_gas_mixtures] = particle_geom_data[:, 3] 

2861 if ngeom == 5: 

2862 self.arrays[ArrayType.airbags_n_chambers] = particle_geom_data[:, 4] 

2863 

2864 except Exception: 

2865 # print info 

2866 trb_msg = traceback.format_exc() 

2867 msg = "A failure in %d was caught:\n%s" 

2868 LOGGER.warning(msg, "_read_particle_geometry_data", trb_msg) 

2869 

2870 # fix position 

2871 position = original_position + blocksize 

2872 

2873 # update position 

2874 self.geometry_section_size = position 

2875 

2876 LOGGER.debug("_read_particle_geometry_data end at byte %d", self.geometry_section_size) 

2877 

2878 def _read_rigid_road_surface(self): 

2879 """Read rigid road surface data""" 

2880 

2881 if not self._buffer: 

2882 return 

2883 

2884 if not self.header.has_rigid_road_surface: 

2885 return 

2886 

2887 LOGGER.debug("_read_rigid_road_surface start at byte %d", self.geometry_section_size) 

2888 

2889 position = self.geometry_section_size 

2890 

2891 # read header 

2892 rigid_road_surface_words = { 

2893 "nnode": (position, self._header.itype), 

2894 "nseg": (position + 1 * self.header.wordsize, self._header.itype), 

2895 "nsurf": (position + 2 * self.header.wordsize, self._header.itype), 

2896 "motion": (position + 3 * self.header.wordsize, self._header.itype), 

2897 } 

2898 

2899 rigid_road_header = self.header.read_words(self._buffer, rigid_road_surface_words) 

2900 position += 4 * self.header.wordsize 

2901 

2902 self._rigid_road_info = RigidRoadInfo( 

2903 n_nodes=rigid_road_header["nnode"], 

2904 n_roads=rigid_road_header["nsurf"], 

2905 n_road_segments=rigid_road_header["nseg"], 

2906 motion=rigid_road_header["motion"], 

2907 ) 

2908 info = self._rigid_road_info 

2909 

2910 # node ids 

2911 array_length = info.n_nodes * self.header.wordsize 

2912 rigid_road_node_ids = self._buffer.read_ndarray( 

2913 position, array_length, 1, self._header.itype 

2914 ) 

2915 self.arrays[ArrayType.rigid_road_node_ids] = rigid_road_node_ids 

2916 position += array_length 

2917 

2918 # node xyz 

2919 array_length = info.n_nodes * 3 * self.header.wordsize 

2920 rigid_road_node_coords = self._buffer.read_ndarray( 

2921 position, array_length, 1, self.header.ftype 

2922 ).reshape((info.n_nodes, 3)) 

2923 self.arrays[ArrayType.rigid_road_node_coordinates] = rigid_road_node_coords 

2924 position += array_length 

2925 

2926 # read road segments 

2927 # Warning: must be copied 

2928 rigid_road_ids = np.empty(info.n_roads, dtype=self._header.itype) 

2929 rigid_road_nsegments = np.empty(info.n_roads, dtype=self._header.itype) 

2930 rigid_road_segment_node_ids = [] 

2931 

2932 # this array is created since the array database requires 

2933 # constant sized arrays, and we dump all segments into one 

2934 # array. In order to distinguish which segment 

2935 # belongs to which road, this new array keeps track of it 

2936 rigid_road_segment_road_id = [] 

2937 

2938 # n_total_segments = 0 

2939 for i_surf in range(info.n_roads): 

2940 # surface id 

2941 surf_id = self._buffer.read_number(position, self._header.itype) # type: ignore 

2942 position += self.header.wordsize 

2943 rigid_road_ids[i_surf] = surf_id 

2944 

2945 # number of segments of surface 

2946 surf_nseg = self._buffer.read_number( 

2947 position + 1 * self.header.wordsize, self._header.itype 

2948 ) # type: ignore 

2949 position += self.header.wordsize 

2950 rigid_road_nsegments[i_surf] = surf_nseg 

2951 

2952 # count total segments 

2953 # n_total_segments += surf_nseg 

2954 

2955 # node ids of surface segments 

2956 array_length = 4 * surf_nseg * self.header.wordsize 

2957 surf_segm_node_ids = self._buffer.read_ndarray( 

2958 position, # type: ignore 

2959 array_length, # type: ignore 

2960 1, 

2961 self._header.itype, 

2962 ).reshape((surf_nseg, 4)) 

2963 position += array_length 

2964 rigid_road_segment_node_ids.append(surf_segm_node_ids) 

2965 

2966 # remember road id for segments 

2967 rigid_road_segment_road_id += [surf_id] * surf_nseg 

2968 

2969 # save arrays 

2970 self.arrays[ArrayType.rigid_road_ids] = rigid_road_ids 

2971 self.arrays[ArrayType.rigid_road_n_segments] = rigid_road_nsegments 

2972 self.arrays[ArrayType.rigid_road_segment_node_ids] = np.concatenate( 

2973 rigid_road_segment_node_ids 

2974 ) 

2975 self.arrays[ArrayType.rigid_road_segment_road_id] = np.asarray(rigid_road_segment_road_id) 

2976 

2977 # update position 

2978 self.geometry_section_size = position 

2979 LOGGER.debug("_read_rigid_road_surface end at byte %d", self.geometry_section_size) 

2980 

2981 # pylint: disable = too-many-branches 

2982 def _read_extra_node_connectivity(self): 

2983 """Read the extra node data for creepy elements""" 

2984 

2985 if not self._buffer: 

2986 return 

2987 

2988 LOGGER.debug("_read_extra_node_connectivity start at byte %d", self.geometry_section_size) 

2989 

2990 position = self.geometry_section_size 

2991 

2992 # extra 2 node connectivity for 10 node tetrahedron elements 

2993 if self.header.has_solid_2_extra_nodes: 

2994 array_length = 2 * self.header.n_solids * self.header.wordsize 

2995 try: 

2996 array = self._buffer.read_ndarray( 

2997 position, array_length, 1, self._header.itype 

2998 ).reshape((self.header.n_solids, 2)) 

2999 self.arrays[ArrayType.element_solid_node10_extra_node_indexes] = ( 

3000 array - FORTRAN_OFFSET 

3001 ) 

3002 except Exception: 

3003 trb_msg = traceback.format_exc() 

3004 msg = "A failure in %s was caught:\n%s" 

3005 LOGGER.warning(msg, "_read_extra_node_connectivity, solid10", trb_msg) 

3006 finally: 

3007 position += array_length 

3008 

3009 # 8 node shell elements 

3010 if self.header.n_shells_8_nodes > 0: 

3011 array_length = 5 * self.header.n_shells_8_nodes * self.header.wordsize 

3012 try: 

3013 array = self._buffer.read_ndarray( 

3014 position, array_length, 1, self._header.itype 

3015 ).reshape((self.header.n_shells_8_nodes, 5)) 

3016 self.arrays[ArrayType.element_shell_node8_element_index] = ( 

3017 array[:, 0] - FORTRAN_OFFSET 

3018 ) 

3019 self.arrays[ArrayType.element_shell_node8_extra_node_indexes] = ( 

3020 array[:, 1:] - FORTRAN_OFFSET 

3021 ) 

3022 except Exception: 

3023 trb_msg = traceback.format_exc() 

3024 msg = "A failure in %s was caught:\n%s" 

3025 LOGGER.warning(msg, "_read_extra_node_connectivity, shell8", trb_msg) 

3026 finally: 

3027 position += array_length 

3028 

3029 # 20 node solid elements 

3030 if self.header.n_solids_20_node_hexas > 0: 

3031 array_length = 13 * self.header.n_solids_20_node_hexas * self.header.wordsize 

3032 try: 

3033 array = self._buffer.read_ndarray( 

3034 position, array_length, 1, self._header.itype 

3035 ).reshape((self.header.n_solids_20_node_hexas, 13)) 

3036 self.arrays[ArrayType.element_solid_node20_element_index] = ( 

3037 array[:, 0] - FORTRAN_OFFSET 

3038 ) 

3039 self.arrays[ArrayType.element_solid_node20_extra_node_indexes] = ( 

3040 array[:, 1:] - FORTRAN_OFFSET 

3041 ) 

3042 except Exception: 

3043 trb_msg = traceback.format_exc() 

3044 msg = "A failure in %s was caught:\n%s" 

3045 LOGGER.warning(msg, "_read_extra_node_connectivity, solid20", trb_msg) 

3046 finally: 

3047 position += array_length 

3048 

3049 # 27 node solid hexas 

3050 if ( 

3051 self.header.n_solids_27_node_hexas > 0 

3052 and self.header.quadratic_elems_has_full_connectivity 

3053 ): 

3054 array_length = 28 * self.header.n_solids_27_node_hexas * self.header.wordsize 

3055 try: 

3056 array = self._buffer.read_ndarray( 

3057 position, array_length, 1, self._header.itype 

3058 ).reshape((self.header.n_solids_27_node_hexas, 28)) 

3059 self.arrays[ArrayType.element_solid_node27_element_index] = ( 

3060 array[:, 0] - FORTRAN_OFFSET 

3061 ) 

3062 self.arrays[ArrayType.element_solid_node27_extra_node_indexes] = ( 

3063 array[:, 1:] - FORTRAN_OFFSET 

3064 ) 

3065 except Exception: 

3066 trb_msg = traceback.format_exc() 

3067 msg = "A failure in %s was caught:\n%s" 

3068 LOGGER.warning(msg, "_read_extra_node_connectivity, solid27", trb_msg) 

3069 finally: 

3070 position += array_length 

3071 

3072 # 21 node solid pentas 

3073 if ( 

3074 self.header.n_solids_21_node_pentas > 0 

3075 and self.header.quadratic_elems_has_full_connectivity 

3076 ): 

3077 array_length = 22 * self.header.n_solids_21_node_pentas * self.header.wordsize 

3078 try: 

3079 array = self._buffer.read_ndarray( 

3080 position, array_length, 1, self._header.itype 

3081 ).reshape((self.header.n_solids_21_node_pentas, 22)) 

3082 self.arrays[ArrayType.element_solid_node21_penta_element_index] = ( 

3083 array[:, 0] - FORTRAN_OFFSET 

3084 ) 

3085 self.arrays[ArrayType.element_solid_node21_penta_extra_node_indexes] = ( 

3086 array[:, 1:] - FORTRAN_OFFSET 

3087 ) 

3088 except Exception: 

3089 trb_msg = traceback.format_exc() 

3090 msg = "A failure in %s was caught:\n%s" 

3091 LOGGER.warning(msg, "_read_extra_node_connectivity, solid21p", trb_msg) 

3092 finally: 

3093 position += array_length 

3094 

3095 # 15 node solid tetras 

3096 if ( 

3097 self.header.n_solids_15_node_tetras > 0 

3098 and self.header.quadratic_elems_has_full_connectivity 

3099 ): 

3100 # manual says 8 but this seems odd 

3101 array_length = 8 * self.header.n_solids_15_node_tetras * self.header.wordsize 

3102 try: 

3103 array = self._buffer.read_ndarray( 

3104 position, array_length, 1, self._header.itype 

3105 ).reshape((self.header.n_solids_15_node_tetras, 8)) 

3106 self.arrays[ArrayType.element_solid_node15_tetras_element_index] = ( 

3107 array[:, 0] - FORTRAN_OFFSET 

3108 ) 

3109 self.arrays[ArrayType.element_solid_node15_tetras_extra_node_indexes] = ( 

3110 array[:, 1:] - FORTRAN_OFFSET 

3111 ) 

3112 except Exception: 

3113 trb_msg = traceback.format_exc() 

3114 msg = "A failure in %s was caught:\n%s" 

3115 LOGGER.warning(msg, "_read_extra_node_connectivity, solid15t", trb_msg) 

3116 finally: 

3117 position += array_length 

3118 

3119 # 20 node solid tetras 

3120 if self.header.n_solids_20_node_tetras > 0 and self.header.has_cubic_solids: 

3121 array_length = 21 * self.header.n_solids_20_node_tetras * self.header.wordsize 

3122 try: 

3123 array = self._buffer.read_ndarray( 

3124 position, array_length, 1, self._header.itype 

3125 ).reshape((self.header.n_solids_20_node_tetras, 21)) 

3126 self.arrays[ArrayType.element_solid_node20_tetras_element_index] = ( 

3127 array[:, 0] - FORTRAN_OFFSET 

3128 ) 

3129 self.arrays[ArrayType.element_solid_node20_tetras_extra_node_indexes] = ( 

3130 array[:, 1:] - FORTRAN_OFFSET 

3131 ) 

3132 except Exception: 

3133 trb_msg = traceback.format_exc() 

3134 msg = "A failure in %s was caught:\n%s" 

3135 LOGGER.warning(msg, "_read_extra_node_connectivity, solid20t", trb_msg) 

3136 finally: 

3137 position += array_length 

3138 

3139 # 40 node solid tetras 

3140 if self.header.n_solids_40_node_pentas > 0 and self.header.has_cubic_solids: 

3141 array_length = 41 * self.header.n_solids_40_node_pentas * self.header.wordsize 

3142 try: 

3143 array = self._buffer.read_ndarray( 

3144 position, array_length, 1, self._header.itype 

3145 ).reshape((self.header.n_solids_40_node_pentas, 41)) 

3146 self.arrays[ArrayType.element_solid_node40_pentas_element_index] = ( 

3147 array[:, 0] - FORTRAN_OFFSET 

3148 ) 

3149 self.arrays[ArrayType.element_solid_node40_pentas_extra_node_indexes] = ( 

3150 array[:, 1:] - FORTRAN_OFFSET 

3151 ) 

3152 except Exception: 

3153 trb_msg = traceback.format_exc() 

3154 msg = "A failure in %s was caught:\n%s" 

3155 LOGGER.warning(msg, "_read_extra_node_connectivity, solid40t", trb_msg) 

3156 finally: 

3157 position += array_length 

3158 

3159 # 64 node solid tetras 

3160 if self.header.n_solids_64_node_hexas > 0 and self.header.has_cubic_solids: 

3161 array_length = 65 * self.header.n_solids_64_node_hexas * self.header.wordsize 

3162 try: 

3163 array = self._buffer.read_ndarray( 

3164 position, array_length, 1, self._header.itype 

3165 ).reshape((self.header.n_solids_64_node_hexas, 65)) 

3166 self.arrays[ArrayType.element_solid_node64_hexas_element_index] = ( 

3167 array[:, 0] - FORTRAN_OFFSET 

3168 ) 

3169 self.arrays[ArrayType.element_solid_node64_hexas_extra_node_indexes] = ( 

3170 array[:, 1:] - FORTRAN_OFFSET 

3171 ) 

3172 except Exception: 

3173 trb_msg = traceback.format_exc() 

3174 msg = "A failure in %s was caught:\n%s" 

3175 LOGGER.warning(msg, "_read_extra_node_connectivity, solid64t", trb_msg) 

3176 finally: 

3177 position += array_length 

3178 

3179 # update position 

3180 self.geometry_section_size = position 

3181 

3182 LOGGER.debug("_read_extra_node_connectivity end at byte %d", self.geometry_section_size) 

3183 

3184 # pylint: disable = too-many-branches 

3185 @classmethod 

3186 def _read_header_part_contact_interface_titles( 

3187 cls, 

3188 header: D3plotHeader, 

3189 buffer: Union[BinaryBuffer, None], 

3190 geometry_section_size: int, 

3191 arrays: dict, 

3192 ) -> int: 

3193 """Read the header for the parts, contacts and interfaces 

3194 

3195 Parameters 

3196 ---------- 

3197 header: D3plotHeader 

3198 d3plot header 

3199 bb: BinaryBuffer 

3200 buffer holding geometry 

3201 geometry_section_size: int 

3202 size of the geometry section until now 

3203 arrays: dict 

3204 dictionary holding arrays and where arrays will be saved into 

3205 

3206 Returns 

3207 ------- 

3208 geometry_section_size: int 

3209 new size of the geometry section 

3210 """ 

3211 

3212 if not buffer: 

3213 return geometry_section_size 

3214 

3215 if header.filetype not in ( 

3216 D3plotFiletype.D3PLOT, 

3217 D3plotFiletype.D3PART, 

3218 D3plotFiletype.INTFOR, 

3219 ): 

3220 return geometry_section_size 

3221 

3222 LOGGER.debug( 

3223 "_read_header_part_contact_interface_titles start at byte %d", geometry_section_size 

3224 ) 

3225 

3226 position = geometry_section_size 

3227 

3228 # Security 

3229 # 

3230 # we try to read the titles ahead. If dyna writes multiple files 

3231 # then the first file is geometry only thus failing here has no 

3232 # impact on further state reading. 

3233 # If though states are compressed into the first file then we are 

3234 # in trouble here even when catching here. 

3235 try: 

3236 # there is only output if there is an eof marker 

3237 # at least I think I fixed such a bug in the past 

3238 if not cls._is_end_of_file_marker(buffer, position, header.ftype): 

3239 return geometry_section_size 

3240 

3241 position += header.wordsize 

3242 

3243 # section have types here according to what is inside 

3244 ntypes = [] 

3245 

3246 # read first ntype 

3247 current_ntype = buffer.read_number(position, header.itype) 

3248 

3249 while current_ntype in [90000, 90001, 90002, 90020]: 

3250 

3251 # title output 

3252 if current_ntype == 90000: 

3253 

3254 ntypes.append(current_ntype) 

3255 position += header.wordsize 

3256 

3257 # Bugfix: 

3258 # the titles are always 18*4 bytes, even if the wordsize 

3259 # is 8 bytes for the entire file. 

3260 titles_wordsize = 4 

3261 

3262 array_length = 18 * titles_wordsize 

3263 header.title2 = buffer.read_text(position, array_length) 

3264 position += array_length 

3265 

3266 # some title output 

3267 elif current_ntype in [90001, 90002, 90020]: 

3268 

3269 ntypes.append(current_ntype) 

3270 position += header.wordsize 

3271 

3272 # number of parts 

3273 entry_count = buffer.read_number(position, header.itype) 

3274 position += header.wordsize 

3275 

3276 # Bugfix: 

3277 # the titles are always 18*4 bytes, even if the wordsize 

3278 # is 8 bytes for the entire file. 

3279 titles_wordsize = 4 

3280 

3281 # part ids and corresponding titles 

3282 array_type = np.dtype( 

3283 [("ids", header.itype), ("titles", "S" + str(18 * titles_wordsize))] 

3284 ) 

3285 array_length = (header.wordsize + 18 * titles_wordsize) * int(entry_count) 

3286 tmp_arrays = buffer.read_ndarray(position, array_length, 1, array_type) 

3287 position += array_length 

3288 

3289 # save stuff 

3290 if current_ntype == 90001: 

3291 arrays[ArrayType.part_titles_ids] = tmp_arrays["ids"] 

3292 arrays[ArrayType.part_titles] = tmp_arrays["titles"] 

3293 elif current_ntype == 90002: 

3294 arrays[ArrayType.contact_title_ids] = tmp_arrays["ids"] 

3295 arrays[ArrayType.contact_titles] = tmp_arrays["titles"] 

3296 elif current_ntype == 90020: 

3297 arrays["icfd_part_title_ids"] = tmp_arrays["ids"] 

3298 arrays["icfd_part_titles"] = tmp_arrays["titles"] 

3299 

3300 # d3prop 

3301 elif current_ntype == 90100: 

3302 

3303 ntypes.append(current_ntype) 

3304 position += header.wordsize 

3305 

3306 # number of keywords 

3307 nline = buffer.read_number(position, header.itype) 

3308 position += header.wordsize 

3309 

3310 # Bugfix: 

3311 # the titles are always 18*4 bytes, even if the wordsize 

3312 # is 8 bytes for the entire file. 

3313 titles_wordsize = 4 

3314 

3315 # keywords 

3316 array_length = 20 * titles_wordsize * int(nline) 

3317 d3prop_keywords = buffer.read_ndarray( 

3318 position, array_length, 1, np.dtype("S" + str(titles_wordsize * 20)) 

3319 ) 

3320 position += array_length 

3321 

3322 # save 

3323 arrays["d3prop_keywords"] = d3prop_keywords 

3324 

3325 # not sure whether there is an eof file here 

3326 # do not have a test file to check ... 

3327 if cls._is_end_of_file_marker(buffer, position, header.ftype): 

3328 position += header.wordsize 

3329 

3330 # next one 

3331 if buffer.size <= position: 

3332 break 

3333 current_ntype = buffer.read_number(position, header.itype) 

3334 

3335 header.n_types = tuple(ntypes) 

3336 

3337 except Exception: 

3338 trb_msg = traceback.format_exc() 

3339 msg = "A failure in %s was caught:\n%s" 

3340 LOGGER.warning(msg, "_read_header_part_contact_interface_titles", trb_msg) 

3341 

3342 # remember position 

3343 geometry_section_size = position 

3344 LOGGER.debug( 

3345 "_read_header_part_contact_interface_titles end at byte %d", geometry_section_size 

3346 ) 

3347 

3348 return geometry_section_size 

3349 

3350 @staticmethod 

3351 def _read_states_allocate_arrays( 

3352 header: D3plotHeader, 

3353 material_section_info: MaterialSectionInfo, 

3354 airbag_info: AirbagInfo, 

3355 rigid_road_info: RigidRoadInfo, 

3356 rigid_body_info: RigidBodyInfo, 

3357 n_states: int, 

3358 n_rigid_walls: int, 

3359 n_parts: int, 

3360 array_names: Union[Iterable[str], None], 

3361 array_dict: dict, 

3362 ) -> None: 

3363 """Allocate the state arrays 

3364 

3365 Parameters 

3366 ---------- 

3367 header: D3plotHeader 

3368 header of the d3plot 

3369 material_section_info: MaterialSectionInfo 

3370 info about the material section data 

3371 airbag_info: AirbagInfo 

3372 info for airbags 

3373 rigid_road_info: RigidRoadInfo 

3374 info for rigid roads 

3375 rigid_body_info: RigidBodyInfo 

3376 info for rigid bodies 

3377 n_states: int 

3378 number of states to allocate memory for 

3379 n_rigid_walls: int 

3380 number of rigid walls 

3381 n_parts: int 

3382 number of parts 

3383 array_names: Union[Iterable[str], None] 

3384 names of state arrays to allocate (all if None) 

3385 array_dict: dict 

3386 dictionary to allocate arrays into 

3387 """ 

3388 

3389 # (1) ARRAY SHAPES 

3390 # general 

3391 n_dim = header.n_dimensions 

3392 # nodes 

3393 n_nodes = header.n_nodes 

3394 # solids 

3395 n_solids = header.n_solids 

3396 n_solids_thermal_vars = header.n_solid_thermal_vars 

3397 n_solids_strain_vars = 6 * header.has_element_strain * (header.n_solid_history_vars >= 6) 

3398 n_solid_thermal_strain_vars = 6 * header.has_solid_shell_thermal_strain_tensor 

3399 n_solid_plastic_strain_vars = 6 * header.has_solid_shell_plastic_strain_tensor 

3400 n_solid_layers = header.n_solid_layers 

3401 n_solids_history_vars = ( 

3402 header.n_solid_history_vars 

3403 - n_solids_strain_vars 

3404 - n_solid_thermal_strain_vars 

3405 - n_solid_plastic_strain_vars 

3406 ) 

3407 # thick shells 

3408 n_tshells = header.n_thick_shells 

3409 n_tshells_history_vars = header.n_shell_tshell_history_vars 

3410 n_tshells_layers = header.n_shell_tshell_layers 

3411 # beams 

3412 n_beams = header.n_beams 

3413 n_beams_history_vars = header.n_beam_history_vars 

3414 n_beam_vars = header.n_beam_vars 

3415 n_beams_layers = max( 

3416 int((-3 * n_beams_history_vars + n_beam_vars - 6) / (n_beams_history_vars + 5)), 0 

3417 ) 

3418 # shells 

3419 n_shells = header.n_shells 

3420 n_shells_reduced = header.n_shells - material_section_info.n_rigid_shells 

3421 n_shell_layers = header.n_shell_tshell_layers 

3422 n_shell_history_vars = header.n_shell_tshell_history_vars 

3423 # sph 

3424 allocate_sph = header.n_sph_nodes != 0 

3425 n_sph_particles = header.n_sph_nodes if allocate_sph else 0 

3426 # airbags 

3427 allocate_airbags = header.n_airbags != 0 

3428 n_airbags = header.n_airbags if allocate_airbags else 0 

3429 n_airbag_particles = airbag_info.n_particles if allocate_airbags else 0 

3430 # rigid roads 

3431 allocate_rigid_roads = rigid_road_info.n_roads != 0 

3432 n_roads = rigid_road_info.n_roads if allocate_rigid_roads else 0 

3433 # rigid bodies 

3434 n_rigid_bodies = rigid_body_info.n_rigid_bodies 

3435 

3436 # dictionary to lookup array types 

3437 state_array_shapes = { 

3438 # global 

3439 ArrayType.global_timesteps: [n_states], 

3440 ArrayType.global_kinetic_energy: [n_states], 

3441 ArrayType.global_internal_energy: [n_states], 

3442 ArrayType.global_total_energy: [n_states], 

3443 ArrayType.global_velocity: [n_states, 3], 

3444 # parts 

3445 ArrayType.part_internal_energy: [n_states, n_parts], 

3446 ArrayType.part_kinetic_energy: [n_states, n_parts], 

3447 ArrayType.part_velocity: [n_states, n_parts, 3], 

3448 ArrayType.part_mass: [n_states, n_parts], 

3449 ArrayType.part_hourglass_energy: [n_states, n_parts], 

3450 # rigid wall 

3451 ArrayType.rigid_wall_force: [n_states, n_rigid_walls], 

3452 ArrayType.rigid_wall_position: [n_states, n_rigid_walls, 3], 

3453 # nodes 

3454 ArrayType.node_temperature: [n_states, n_nodes, 3] 

3455 if header.has_node_temperature_layers 

3456 else [n_states, n_nodes], 

3457 ArrayType.node_heat_flux: [n_states, n_nodes, 3], 

3458 ArrayType.node_mass_scaling: [n_states, n_nodes], 

3459 ArrayType.node_displacement: [n_states, n_nodes, n_dim], 

3460 ArrayType.node_velocity: [n_states, n_nodes, n_dim], 

3461 ArrayType.node_acceleration: [n_states, n_nodes, n_dim], 

3462 ArrayType.node_temperature_gradient: [n_states, n_nodes], 

3463 ArrayType.node_residual_forces: [n_states, n_nodes, 3], 

3464 ArrayType.node_residual_moments: [n_states, n_nodes, 3], 

3465 # solids 

3466 ArrayType.element_solid_thermal_data: [n_states, n_solids, n_solids_thermal_vars], 

3467 ArrayType.element_solid_stress: [n_states, n_solids, n_solid_layers, 6], 

3468 ArrayType.element_solid_effective_plastic_strain: [n_states, n_solids, n_solid_layers], 

3469 ArrayType.element_solid_history_variables: [ 

3470 n_states, 

3471 n_solids, 

3472 n_solid_layers, 

3473 n_solids_history_vars, 

3474 ], 

3475 ArrayType.element_solid_strain: [n_states, n_solids, n_solid_layers, 6], 

3476 ArrayType.element_solid_is_alive: [n_states, n_solids], 

3477 ArrayType.element_solid_plastic_strain_tensor: [n_states, n_solids, n_solid_layers, 6], 

3478 ArrayType.element_solid_thermal_strain_tensor: [n_states, n_solids, n_solid_layers, 6], 

3479 # thick shells 

3480 ArrayType.element_tshell_stress: [n_states, n_tshells, n_tshells_layers, 6], 

3481 ArrayType.element_tshell_effective_plastic_strain: [ 

3482 n_states, 

3483 n_tshells, 

3484 n_tshells_layers, 

3485 ], 

3486 ArrayType.element_tshell_history_variables: [ 

3487 n_states, 

3488 n_tshells, 

3489 n_tshells_layers, 

3490 n_tshells_history_vars, 

3491 ], 

3492 ArrayType.element_tshell_strain: [n_states, n_tshells, 2, 6], 

3493 ArrayType.element_tshell_is_alive: [n_states, n_tshells], 

3494 # beams 

3495 ArrayType.element_beam_axial_force: [n_states, n_beams], 

3496 ArrayType.element_beam_shear_force: [n_states, n_beams, 2], 

3497 ArrayType.element_beam_bending_moment: [n_states, n_beams, 2], 

3498 ArrayType.element_beam_torsion_moment: [n_states, n_beams], 

3499 ArrayType.element_beam_shear_stress: [n_states, n_beams, n_beams_layers, 2], 

3500 ArrayType.element_beam_axial_stress: [n_states, n_beams, n_beams_layers], 

3501 ArrayType.element_beam_plastic_strain: [n_states, n_beams, n_beams_layers], 

3502 ArrayType.element_beam_axial_strain: [n_states, n_beams, n_beams_layers], 

3503 ArrayType.element_beam_history_vars: [ 

3504 n_states, 

3505 n_beams, 

3506 n_beams_layers + 3, 

3507 n_beams_history_vars, 

3508 ], 

3509 ArrayType.element_beam_is_alive: [n_states, n_beams], 

3510 # shells 

3511 ArrayType.element_shell_stress: [n_states, n_shells_reduced, n_shell_layers, 6], 

3512 ArrayType.element_shell_effective_plastic_strain: [ 

3513 n_states, 

3514 n_shells_reduced, 

3515 n_shell_layers, 

3516 ], 

3517 ArrayType.element_shell_history_vars: [ 

3518 n_states, 

3519 n_shells_reduced, 

3520 n_shell_layers, 

3521 n_shell_history_vars, 

3522 ], 

3523 ArrayType.element_shell_bending_moment: [n_states, n_shells_reduced, 3], 

3524 ArrayType.element_shell_shear_force: [n_states, n_shells_reduced, 2], 

3525 ArrayType.element_shell_normal_force: [n_states, n_shells_reduced, 3], 

3526 ArrayType.element_shell_thickness: [n_states, n_shells_reduced], 

3527 ArrayType.element_shell_unknown_variables: [n_states, n_shells_reduced, 2], 

3528 ArrayType.element_shell_internal_energy: [n_states, n_shells_reduced], 

3529 ArrayType.element_shell_strain: [n_states, n_shells_reduced, 2, 6], 

3530 ArrayType.element_shell_thermal_strain_tensor: [n_states, n_shells_reduced, 6], 

3531 ArrayType.element_shell_plastic_strain_tensor: [ 

3532 n_states, 

3533 n_shells_reduced, 

3534 n_shell_layers, 

3535 6, 

3536 ], 

3537 ArrayType.element_shell_is_alive: [n_states, n_shells], 

3538 # sph 

3539 ArrayType.sph_deletion: [n_states, n_sph_particles], 

3540 ArrayType.sph_radius: [n_states, n_sph_particles], 

3541 ArrayType.sph_pressure: [n_states, n_sph_particles], 

3542 ArrayType.sph_stress: [n_states, n_sph_particles, 6], 

3543 ArrayType.sph_effective_plastic_strain: [n_states, n_sph_particles], 

3544 ArrayType.sph_density: [n_states, n_sph_particles], 

3545 ArrayType.sph_internal_energy: [n_states, n_sph_particles], 

3546 ArrayType.sph_n_neighbors: [n_states, n_sph_particles], 

3547 ArrayType.sph_strain: [n_states, n_sph_particles, 6], 

3548 ArrayType.sph_mass: [n_states, n_sph_particles], 

3549 # airbag 

3550 ArrayType.airbag_n_active_particles: [n_states, n_airbags], 

3551 ArrayType.airbag_bag_volume: [n_states, n_airbags], 

3552 ArrayType.airbag_particle_gas_id: [n_states, n_airbag_particles], 

3553 ArrayType.airbag_particle_chamber_id: [n_states, n_airbag_particles], 

3554 ArrayType.airbag_particle_leakage: [n_states, n_airbag_particles], 

3555 ArrayType.airbag_particle_mass: [n_states, n_airbag_particles], 

3556 ArrayType.airbag_particle_radius: [n_states, n_airbag_particles], 

3557 ArrayType.airbag_particle_spin_energy: [n_states, n_airbag_particles], 

3558 ArrayType.airbag_particle_translation_energy: [n_states, n_airbag_particles], 

3559 ArrayType.airbag_particle_nearest_segment_distance: [n_states, n_airbag_particles], 

3560 ArrayType.airbag_particle_position: [n_states, n_airbag_particles, 3], 

3561 ArrayType.airbag_particle_velocity: [n_states, n_airbag_particles, 3], 

3562 # rigid road 

3563 ArrayType.rigid_road_displacement: [n_states, n_roads, 3], 

3564 ArrayType.rigid_road_velocity: [n_states, n_roads, 3], 

3565 # rigid body 

3566 ArrayType.rigid_body_coordinates: [n_states, n_rigid_bodies, 3], 

3567 ArrayType.rigid_body_rotation_matrix: [n_states, n_rigid_bodies, 9], 

3568 ArrayType.rigid_body_velocity: [n_states, n_rigid_bodies, 3], 

3569 ArrayType.rigid_body_rot_velocity: [n_states, n_rigid_bodies, 3], 

3570 ArrayType.rigid_body_acceleration: [n_states, n_rigid_bodies, 3], 

3571 ArrayType.rigid_body_rot_acceleration: [n_states, n_rigid_bodies, 3], 

3572 } 

3573 

3574 # only allocate available arrays 

3575 if array_names is None: 

3576 array_names = ArrayType.get_state_array_names() 

3577 

3578 # BUGFIX 

3579 # These arrays are actually integer types, all other state arrays 

3580 # are floats 

3581 int_state_arrays = [ 

3582 ArrayType.airbag_n_active_particles, 

3583 ArrayType.airbag_particle_gas_id, 

3584 ArrayType.airbag_particle_chamber_id, 

3585 ArrayType.airbag_particle_leakage, 

3586 ] 

3587 

3588 # (2) ALLOCATE ARRAYS 

3589 # this looper allocates the arrays specified by the user. 

3590 for array_name in array_names: 

3591 

3592 array_dtype = header.ftype if array_name not in int_state_arrays else header.itype 

3593 

3594 if array_name in state_array_shapes: 

3595 array_dict[array_name] = np.empty(state_array_shapes[array_name], dtype=array_dtype) 

3596 else: 

3597 raise ValueError( 

3598 f"Array '{array_name}' is not a state array. " 

3599 f"Please try one of: {list(state_array_shapes.keys())}", 

3600 ) 

3601 

3602 @staticmethod 

3603 def _read_states_transfer_memory( 

3604 i_state: int, buffer_array_dict: dict, master_array_dict: dict 

3605 ): 

3606 """Transfers the memory from smaller buffer arrays with only a few 

3607 timesteps into the major one 

3608 

3609 Parameters 

3610 ---------- 

3611 i_state: int 

3612 current state index 

3613 buffer_array_dict: dict 

3614 dict with arrays of only a few timesteps 

3615 master_array_dict: dict 

3616 dict with the parent master arrays 

3617 

3618 Notes 

3619 ----- 

3620 If an array in the master dict is not found in the buffer dict 

3621 then this array is set to `None`. 

3622 """ 

3623 

3624 state_array_names = ArrayType.get_state_array_names() 

3625 

3626 arrays_to_delete = [] 

3627 for array_name, array in master_array_dict.items(): 

3628 

3629 # copy memory to big array 

3630 if array_name in buffer_array_dict: 

3631 buffer_array = buffer_array_dict[array_name] 

3632 n_states_buffer_array = buffer_array.shape[0] 

3633 array[i_state : i_state + n_states_buffer_array] = buffer_array 

3634 else: 

3635 # remove unnecesary state arrays (not geometry arrays!) 

3636 # we "could" deal with this in the allocate function 

3637 # by not allocating them but this would replicate code 

3638 # in the reading functions 

3639 if array_name in state_array_names: 

3640 arrays_to_delete.append(array_name) 

3641 

3642 for array_name in arrays_to_delete: 

3643 del master_array_dict[array_name] 

3644 

3645 def _compute_n_bytes_per_state(self) -> int: 

3646 """Computes the number of bytes for every state 

3647 

3648 Returns 

3649 ------- 

3650 n_bytes_per_state: int 

3651 number of bytes of every state 

3652 """ 

3653 

3654 if not self.header: 

3655 return 0 

3656 

3657 # timestep 

3658 timestep_offset = 1 * self.header.wordsize 

3659 # global vars 

3660 global_vars_offset = self.header.n_global_vars * self.header.wordsize 

3661 # node vars 

3662 n_node_vars = ( 

3663 self.header.has_node_displacement 

3664 + self.header.has_node_velocity 

3665 + self.header.has_node_acceleration 

3666 ) * self.header.n_dimensions 

3667 

3668 if self.header.has_node_temperatures: 

3669 n_node_vars += 1 

3670 if self.header.has_node_temperature_layers: 

3671 n_node_vars += 2 

3672 if self.header.has_node_heat_flux: 

3673 n_node_vars += 3 

3674 if self.header.has_node_mass_scaling: 

3675 n_node_vars += 1 

3676 if self.header.has_node_temperature_gradient: 

3677 n_node_vars += 1 

3678 if self.header.has_node_residual_forces: 

3679 n_node_vars += 3 

3680 if self.header.has_node_residual_moments: 

3681 n_node_vars += 3 

3682 

3683 node_data_offset = n_node_vars * self.header.n_nodes * self.header.wordsize 

3684 # thermal shit 

3685 therm_data_offset = ( 

3686 self.header.n_solid_thermal_vars * self.header.n_solids * self.header.wordsize 

3687 ) 

3688 # solids 

3689 solid_offset = self.header.n_solids * self.header.n_solid_vars * self.header.wordsize 

3690 # tshells 

3691 tshell_offset = ( 

3692 self.header.n_thick_shells * self.header.n_thick_shell_vars * self.header.wordsize 

3693 ) 

3694 # beams 

3695 beam_offset = self.header.n_beams * self.header.n_beam_vars * self.header.wordsize 

3696 # shells 

3697 shell_offset = ( 

3698 (self.header.n_shells - self._material_section_info.n_rigid_shells) 

3699 * self.header.n_shell_vars 

3700 * self.header.wordsize 

3701 ) 

3702 # Manual 

3703 # "NOTE: This CFDDATA is no longer output by ls-dyna." 

3704 cfd_data_offset = 0 

3705 # sph 

3706 sph_offset = self.header.n_sph_nodes * self._sph_info.n_sph_vars * self.header.wordsize 

3707 # deleted nodes and elems ... or nothing 

3708 elem_deletion_offset = 0 

3709 if self.header.has_node_deletion_data: 

3710 elem_deletion_offset = self.header.n_nodes * self.header.wordsize 

3711 elif self.header.has_element_deletion_data: 

3712 elem_deletion_offset = ( 

3713 self.header.n_beams 

3714 + self.header.n_shells 

3715 + self.header.n_solids 

3716 + self.header.n_thick_shells 

3717 ) * self.header.wordsize 

3718 # airbag particle offset 

3719 if self._airbag_info.n_airbags: 

3720 particle_state_offset = ( 

3721 self._airbag_info.n_airbags * self._airbag_info.n_airbag_state_variables 

3722 + self._airbag_info.n_particles * self._airbag_info.n_particle_state_variables 

3723 ) * self.header.wordsize 

3724 else: 

3725 particle_state_offset = 0 

3726 # rigid road stuff whoever uses this 

3727 road_surface_offset = self._rigid_road_info.n_roads * 6 * self.header.wordsize 

3728 # rigid body motion data 

3729 if self.header.has_rigid_body_data: 

3730 n_rigids = self._rigid_body_info.n_rigid_bodies 

3731 n_rigid_vars = 12 if self.header.has_reduced_rigid_body_data else 24 

3732 rigid_body_motion_offset = n_rigids * n_rigid_vars * self.header.wordsize 

3733 else: 

3734 rigid_body_motion_offset = 0 

3735 # ... not supported 

3736 extra_data_offset = 0 

3737 

3738 n_bytes_per_state = ( 

3739 timestep_offset 

3740 + global_vars_offset 

3741 + node_data_offset 

3742 + therm_data_offset 

3743 + solid_offset 

3744 + tshell_offset 

3745 + beam_offset 

3746 + shell_offset 

3747 + cfd_data_offset 

3748 + sph_offset 

3749 + elem_deletion_offset 

3750 + particle_state_offset 

3751 + road_surface_offset 

3752 + rigid_body_motion_offset 

3753 + extra_data_offset 

3754 ) 

3755 return n_bytes_per_state 

3756 

3757 def _read_states(self, filepath: str): 

3758 """Read the states from the d3plot 

3759 

3760 Parameters 

3761 ---------- 

3762 filepath: str 

3763 path to the d3plot 

3764 """ 

3765 

3766 if not self._buffer or not filepath: 

3767 self._state_info.n_timesteps = 0 

3768 return 

3769 

3770 LOGGER.debug("-------- S T A T E S --------") 

3771 LOGGER.debug("_read_states with geom offset %d", self.geometry_section_size) 

3772 

3773 # (0) OFFSETS 

3774 bytes_per_state = self._compute_n_bytes_per_state() 

3775 LOGGER.debug("bytes_per_state: %d", bytes_per_state) 

3776 

3777 # load the memory from the files 

3778 if self._femzip_info.use_femzip: 

3779 bytes_per_state += 1 * self.header.wordsize 

3780 self.bb_generator = self._read_femzip_file_generator( 

3781 self.buffered_reading, self.state_filter 

3782 ) 

3783 else: 

3784 self.bb_generator = self._read_d3plot_file_generator( 

3785 self.buffered_reading, self.state_filter 

3786 ) 

3787 

3788 # (1) READ STATE DATA 

3789 n_states = next(self.bb_generator) 

3790 

3791 # determine whether to transfer arrays 

3792 if not self.buffered_reading: 

3793 transfer_arrays = False 

3794 else: 

3795 transfer_arrays = True 

3796 if self.state_filter is not None and any(self.state_filter): 

3797 transfer_arrays = True 

3798 if self.state_array_filter: 

3799 transfer_arrays = True 

3800 

3801 # arrays need to be preallocated if we transfer them 

3802 if transfer_arrays: 

3803 self._read_states_allocate_arrays( 

3804 self.header, 

3805 self._material_section_info, 

3806 self._airbag_info, 

3807 self._rigid_road_info, 

3808 self._rigid_body_info, 

3809 n_states, 

3810 self._n_rigid_walls, 

3811 self._n_parts, 

3812 self.state_array_filter, 

3813 self.arrays, 

3814 ) 

3815 

3816 i_state = 0 

3817 for bb_states, n_states in self.bb_generator: 

3818 

3819 # dictionary to store the temporary, partial arrays 

3820 # if we do not transfer any arrays we store them directly 

3821 # in the classes main dict 

3822 array_dict = {} if transfer_arrays else self.arrays 

3823 

3824 # sometimes there is just a geometry in the file 

3825 if n_states == 0: 

3826 continue 

3827 

3828 # state data as array 

3829 array_length = int(n_states) * int(bytes_per_state) 

3830 state_data = bb_states.read_ndarray(0, array_length, 1, self.header.ftype) 

3831 state_data = state_data.reshape((n_states, -1)) 

3832 

3833 var_index = 0 

3834 

3835 # global state header 

3836 var_index = self._read_states_global_section(state_data, var_index, array_dict) 

3837 

3838 # node data 

3839 var_index = self._read_states_nodes(state_data, var_index, array_dict) 

3840 

3841 # thermal solid data 

3842 var_index = self._read_states_solids_thermal(state_data, var_index, array_dict) 

3843 

3844 # cfddata was originally here 

3845 

3846 # solids 

3847 var_index = self._read_states_solids(state_data, var_index, array_dict) 

3848 

3849 # tshells 

3850 var_index = self._read_states_tshell(state_data, var_index, array_dict) 

3851 

3852 # beams 

3853 var_index = self._read_states_beams(state_data, var_index, array_dict) 

3854 

3855 # shells 

3856 var_index = self._read_states_shell(state_data, var_index, array_dict) 

3857 

3858 # element and node deletion info 

3859 var_index = self._read_states_is_alive(state_data, var_index, array_dict) 

3860 

3861 # sph 

3862 var_index = self._read_states_sph(state_data, var_index, array_dict) 

3863 

3864 # airbag particle data 

3865 var_index = self._read_states_airbags(state_data, var_index, array_dict) 

3866 

3867 # road surface data 

3868 var_index = self._read_states_road_surfaces(state_data, var_index, array_dict) 

3869 

3870 # rigid body motion 

3871 var_index = self._read_states_rigid_body_motion(state_data, var_index, array_dict) 

3872 

3873 # transfer memory 

3874 if transfer_arrays: 

3875 self._read_states_transfer_memory(i_state, array_dict, self.arrays) 

3876 

3877 # increment state counter 

3878 i_state += n_states 

3879 self._state_info.n_timesteps = i_state 

3880 

3881 if transfer_arrays: 

3882 self._buffer = None 

3883 self.bb_states = None 

3884 

3885 def _read_states_global_section( 

3886 self, state_data: np.ndarray, var_index: int, array_dict: dict 

3887 ) -> int: 

3888 """Read the global vars for the state 

3889 

3890 Parameters 

3891 ---------- 

3892 state_data: np.ndarray 

3893 array with entire state data 

3894 var_index: int 

3895 variable index in the state data array 

3896 array_dict: dict 

3897 dictionary to store the loaded arrays in 

3898 

3899 Returns 

3900 ------- 

3901 var_index: int 

3902 updated variable index after reading the section 

3903 """ 

3904 

3905 LOGGER.debug("_read_states_global_section start at var_index %d", var_index) 

3906 

3907 # we wrap globals, parts and rigid walls into a single try 

3908 # catch block since in the header the global section is 

3909 # defined by those three. If we fail in any of those we can 

3910 # only heal by skipping all together and jumping forward 

3911 original_var_index = var_index 

3912 try: 

3913 # timestep 

3914 array_dict[ArrayType.global_timesteps] = state_data[:, var_index] 

3915 var_index += 1 

3916 

3917 # global stuff 

3918 var_index = self._read_states_globals(state_data, var_index, array_dict) 

3919 

3920 # parts 

3921 var_index = self._read_states_parts(state_data, var_index, array_dict) 

3922 

3923 # rigid walls 

3924 var_index = self._read_states_rigid_walls(state_data, var_index, array_dict) 

3925 

3926 except Exception: 

3927 # print 

3928 trb_msg = traceback.format_exc() 

3929 msg = "A failure in %s was caught:\n%s" 

3930 LOGGER.warning(msg, "_read_states_global_section", trb_msg) 

3931 finally: 

3932 timestep_var_size = 1 

3933 var_index = original_var_index + self.header.n_global_vars + timestep_var_size 

3934 

3935 LOGGER.debug("_read_states_global_section end at var_index %d", var_index) 

3936 

3937 return var_index 

3938 

3939 def _read_states_globals(self, state_data: np.ndarray, var_index: int, array_dict: dict) -> int: 

3940 """Read the part data in the state section 

3941 

3942 Parameters 

3943 ---------- 

3944 state_data: np.ndarray 

3945 array with entire state data 

3946 var_index: int 

3947 variable index in the state data array 

3948 array_dict: dict 

3949 dictionary to store the loaded arrays in 

3950 

3951 Returns 

3952 ------- 

3953 var_index: int 

3954 updated variable index after reading the section 

3955 """ 

3956 

3957 n_global_vars = self.header.n_global_vars 

3958 

3959 # global stuff 

3960 i_global_var = 0 

3961 if i_global_var < n_global_vars: 

3962 array_dict[ArrayType.global_kinetic_energy] = state_data[:, var_index + i_global_var] 

3963 i_global_var += 1 

3964 if i_global_var < n_global_vars: 

3965 array_dict[ArrayType.global_internal_energy] = state_data[:, var_index + i_global_var] 

3966 i_global_var += 1 

3967 if i_global_var < n_global_vars: 

3968 array_dict[ArrayType.global_total_energy] = state_data[:, var_index + i_global_var] 

3969 i_global_var += 1 

3970 if i_global_var + 3 <= n_global_vars: 

3971 array_dict[ArrayType.global_velocity] = state_data[ 

3972 :, var_index + i_global_var : var_index + i_global_var + 3 

3973 ] 

3974 i_global_var += 3 

3975 

3976 return var_index + i_global_var 

3977 

3978 def _read_states_parts(self, state_data: np.ndarray, var_index: int, array_dict: dict) -> int: 

3979 """Read the part data in the state section 

3980 

3981 Parameters 

3982 ---------- 

3983 state_data: np.ndarray 

3984 array with entire state data 

3985 var_index: int 

3986 variable index in the state data array 

3987 array_dict: dict 

3988 dictionary to store the loaded arrays in 

3989 

3990 Returns 

3991 ------- 

3992 var_index: int 

3993 updated variable index after reading the section 

3994 """ 

3995 

3996 n_states = state_data.shape[0] 

3997 timestep_word = 1 

3998 n_global_vars = self.header.n_global_vars + timestep_word 

3999 

4000 # part infos 

4001 # n_parts = self._n_parts 

4002 n_parts = self.header.n_parts 

4003 

4004 # part internal energy 

4005 if var_index + n_parts <= n_global_vars: 

4006 array_dict[ArrayType.part_internal_energy] = state_data[ 

4007 :, var_index : var_index + n_parts 

4008 ] 

4009 var_index += n_parts 

4010 

4011 # part kinetic energy 

4012 if var_index + n_parts <= n_global_vars: 

4013 array_dict[ArrayType.part_kinetic_energy] = state_data[ 

4014 :, var_index : var_index + n_parts 

4015 ] 

4016 var_index += n_parts 

4017 

4018 # part velocity 

4019 if var_index + 3 * n_parts <= n_global_vars: 

4020 array_dict[ArrayType.part_velocity] = state_data[ 

4021 :, var_index : var_index + 3 * n_parts 

4022 ].reshape((n_states, n_parts, 3)) 

4023 var_index += 3 * n_parts 

4024 

4025 # part mass 

4026 if var_index + n_parts <= n_global_vars: 

4027 array_dict[ArrayType.part_mass] = state_data[:, var_index : var_index + n_parts] 

4028 var_index += n_parts 

4029 

4030 # part hourglass energy 

4031 if var_index + n_parts <= n_global_vars: 

4032 array_dict[ArrayType.part_hourglass_energy] = state_data[ 

4033 :, var_index : var_index + n_parts 

4034 ] 

4035 var_index += n_parts 

4036 

4037 return var_index 

4038 

4039 def _read_states_rigid_walls( 

4040 self, state_data: np.ndarray, var_index: int, array_dict: dict 

4041 ) -> int: 

4042 """Read the rigid wall data in the state section 

4043 

4044 Parameters 

4045 ---------- 

4046 state_data: np.ndarray 

4047 array with entire state data 

4048 var_index: int 

4049 variable index in the state data array 

4050 array_dict: dict 

4051 dictionary to store the loaded arrays in 

4052 

4053 Returns 

4054 ------- 

4055 var_index: int 

4056 updated variable index after reading the section 

4057 """ 

4058 

4059 n_states = state_data.shape[0] 

4060 

4061 i_global_var = 6 + 7 * self.header.n_parts 

4062 n_global_vars = self.header.n_global_vars 

4063 

4064 # rigid walls 

4065 previous_global_vars = i_global_var 

4066 n_rigid_wall_vars = 4 if self.header.version >= 971 else 1 

4067 # +1 is timestep which is not considered a global var ... seriously 

4068 n_rigid_walls = self._n_rigid_walls 

4069 if n_global_vars >= previous_global_vars + n_rigid_walls * n_rigid_wall_vars: 

4070 if ( 

4071 previous_global_vars + n_rigid_walls * n_rigid_wall_vars 

4072 != self.header.n_global_vars 

4073 ): 

4074 LOGGER.warning("Bug while reading global data for rigid walls. Skipping this data.") 

4075 var_index += self.header.n_global_vars - previous_global_vars 

4076 else: 

4077 

4078 # rigid wall force 

4079 if n_rigid_walls * n_rigid_wall_vars != 0: 

4080 array_dict[ArrayType.rigid_wall_force] = state_data[ 

4081 :, var_index : var_index + n_rigid_walls 

4082 ] 

4083 var_index += n_rigid_walls 

4084 

4085 # rigid wall position 

4086 if n_rigid_wall_vars > 1: 

4087 array_dict[ArrayType.rigid_wall_position] = state_data[ 

4088 :, var_index : var_index + 3 * n_rigid_walls 

4089 ].reshape(n_states, n_rigid_walls, 3) 

4090 var_index += 3 * n_rigid_walls 

4091 

4092 return var_index 

4093 

4094 def _read_states_nodes(self, state_data: np.ndarray, var_index: int, array_dict: dict) -> int: 

4095 """Read the node data in the state section 

4096 

4097 Parameters 

4098 ---------- 

4099 state_data: np.ndarray 

4100 array with entire state data 

4101 var_index: int 

4102 variable index in the state data array 

4103 array_dict: dict 

4104 dictionary to store the loaded arrays in 

4105 

4106 Returns 

4107 ------- 

4108 var_index: int 

4109 updated variable index after reading the section 

4110 """ 

4111 

4112 if self.header.n_nodes <= 0: 

4113 return var_index 

4114 

4115 LOGGER.debug("_read_states_nodes start at var_index %d", var_index) 

4116 

4117 n_dim = self.header.n_dimensions 

4118 n_states = state_data.shape[0] 

4119 n_nodes = self.header.n_nodes 

4120 

4121 # displacement 

4122 if self.header.has_node_displacement: 

4123 try: 

4124 tmp_array = state_data[:, var_index : var_index + n_dim * n_nodes].reshape( 

4125 (n_states, n_nodes, n_dim) 

4126 ) 

4127 array_dict[ArrayType.node_displacement] = tmp_array 

4128 except Exception: 

4129 trb_msg = traceback.format_exc() 

4130 msg = "A failure in %s was caught:\n%s" 

4131 LOGGER.warning(msg, "_read_states_nodes, node_displacement", trb_msg) 

4132 finally: 

4133 var_index += n_dim * n_nodes 

4134 

4135 # temperatures 

4136 if self.header.has_node_temperatures: 

4137 

4138 # only node temperatures 

4139 if not self.header.has_node_temperature_layers: 

4140 try: 

4141 array_dict[ArrayType.node_temperature] = state_data[ 

4142 :, var_index : var_index + n_nodes 

4143 ] 

4144 except Exception: 

4145 trb_msg = traceback.format_exc() 

4146 msg = "A failure in %s was caught:\n%s" 

4147 LOGGER.warning(msg, "_read_states_nodes, node_temperatures", trb_msg) 

4148 finally: 

4149 var_index += n_nodes 

4150 # node temperature layers 

4151 else: 

4152 try: 

4153 tmp_array = state_data[:, var_index : var_index + 3 * n_nodes].reshape( 

4154 (n_states, n_nodes, 3) 

4155 ) 

4156 array_dict[ArrayType.node_temperature] = tmp_array 

4157 except Exception: 

4158 trb_msg = traceback.format_exc() 

4159 msg = "A failure in %s was caught:\n%s" 

4160 LOGGER.warning(msg, "_read_states_nodes, node_temperatures_layers", trb_msg) 

4161 finally: 

4162 var_index += 3 * n_nodes 

4163 

4164 # node heat flux 

4165 if self.header.has_node_heat_flux: 

4166 try: 

4167 tmp_array = state_data[:, var_index : var_index + 3 * n_nodes].reshape( 

4168 (n_states, n_nodes, 3) 

4169 ) 

4170 array_dict[ArrayType.node_heat_flux] = tmp_array 

4171 except Exception: 

4172 trb_msg = traceback.format_exc() 

4173 msg = "A failure in %s was caught:\n%s" 

4174 LOGGER.warning(msg, "_read_states_nodes, node_heat_flux", trb_msg) 

4175 finally: 

4176 var_index += 3 * n_nodes 

4177 

4178 # mass scaling 

4179 if self.header.has_node_mass_scaling: 

4180 try: 

4181 array_dict[ArrayType.node_mass_scaling] = state_data[ 

4182 :, var_index : var_index + n_nodes 

4183 ] 

4184 except Exception: 

4185 trb_msg = traceback.format_exc() 

4186 msg = "A failure in %s was caught:\n%s" 

4187 LOGGER.warning(msg, "_read_states_nodes, node_mass_scaling", trb_msg) 

4188 finally: 

4189 var_index += n_nodes 

4190 

4191 # node temperature gradient 

4192 # Unclear: verify (could also be between temperature and node heat flux) 

4193 if self.header.has_node_temperature_gradient: 

4194 try: 

4195 array_dict[ArrayType.node_temperature_gradient] = state_data[ 

4196 :, var_index : var_index + n_nodes 

4197 ] 

4198 except Exception: 

4199 trb_msg = traceback.format_exc() 

4200 msg = "A failure in %s was caught:\n%s" 

4201 LOGGER.warning(msg, "_read_states_nodes, node_temperature_gradient", trb_msg) 

4202 finally: 

4203 var_index += n_nodes 

4204 

4205 # node residual forces and moments 

4206 # Unclear: verify (see before, according to docs this is after previous) 

4207 if self.header.has_node_residual_forces: 

4208 try: 

4209 array_dict[ArrayType.node_residual_forces] = state_data[ 

4210 :, var_index : var_index + 3 * n_nodes 

4211 ].reshape((n_states, n_nodes, 3)) 

4212 except Exception: 

4213 trb_msg = traceback.format_exc() 

4214 msg = "A failure in %s was caught:\n%s" 

4215 LOGGER.warning(msg, "_read_states_nodes, node_residual_forces", trb_msg) 

4216 finally: 

4217 var_index += n_nodes * 3 

4218 

4219 if self.header.has_node_residual_moments: 

4220 try: 

4221 array_dict[ArrayType.node_residual_moments] = state_data[ 

4222 :, var_index : var_index + 3 * n_nodes 

4223 ].reshape((n_states, n_nodes, 3)) 

4224 except Exception: 

4225 trb_msg = traceback.format_exc() 

4226 msg = "A failure in %s was caught:\n%s" 

4227 LOGGER.warning(msg, "_read_states_nodes, node_residual_moments", trb_msg) 

4228 finally: 

4229 var_index += n_nodes * 3 

4230 

4231 # velocity 

4232 if self.header.has_node_velocity: 

4233 try: 

4234 tmp_array = state_data[:, var_index : var_index + n_dim * n_nodes].reshape( 

4235 (n_states, n_nodes, n_dim) 

4236 ) 

4237 array_dict[ArrayType.node_velocity] = tmp_array 

4238 except Exception: 

4239 trb_msg = traceback.format_exc() 

4240 msg = "A failure in %s was caught:\n%s" 

4241 LOGGER.warning(msg, "_read_states_nodes, node_velocity", trb_msg) 

4242 finally: 

4243 var_index += n_dim * n_nodes 

4244 

4245 # acceleration 

4246 if self.header.has_node_acceleration: 

4247 try: 

4248 tmp_array = state_data[:, var_index : var_index + n_dim * n_nodes].reshape( 

4249 (n_states, n_nodes, n_dim) 

4250 ) 

4251 array_dict[ArrayType.node_acceleration] = tmp_array 

4252 except Exception: 

4253 trb_msg = traceback.format_exc() 

4254 msg = "A failure in %s was caught:\n%s" 

4255 LOGGER.warning(msg, "_read_states_nodes, node_acceleration", trb_msg) 

4256 finally: 

4257 var_index += n_dim * n_nodes 

4258 

4259 LOGGER.debug("_read_states_nodes end at var_index %d", var_index) 

4260 

4261 return var_index 

4262 

4263 def _read_states_solids_thermal( 

4264 self, state_data: np.ndarray, var_index: int, array_dict: dict 

4265 ) -> int: 

4266 """Read the thermal data for solids 

4267 

4268 Parameters 

4269 ---------- 

4270 state_data: np.ndarray 

4271 array with entire state data 

4272 var_index: int 

4273 variable index in the state data array 

4274 array_dict: dict 

4275 dictionary to store the loaded arrays in 

4276 

4277 Returns 

4278 ------- 

4279 var_index: int 

4280 updated variable index after reading the section 

4281 """ 

4282 

4283 if self.header.n_solid_thermal_vars <= 0: 

4284 return var_index 

4285 

4286 LOGGER.debug("_read_states_solids_thermal start at var_index %d", var_index) 

4287 

4288 n_states = state_data.shape[0] 

4289 n_solids = self.header.n_solids 

4290 n_thermal_vars = self.header.n_solid_thermal_vars 

4291 

4292 try: 

4293 tmp_array = state_data[:, var_index : var_index + n_solids * n_thermal_vars] 

4294 array_dict[ArrayType.element_solid_thermal_data] = tmp_array.reshape( 

4295 (n_states, n_solids, n_thermal_vars) 

4296 ) 

4297 except Exception: 

4298 trb_msg = traceback.format_exc() 

4299 msg = "A failure in %s was caught:\n%s" 

4300 LOGGER.warning(msg, "_read_states_solids_thermal", trb_msg) 

4301 finally: 

4302 var_index += n_thermal_vars * n_solids 

4303 

4304 LOGGER.debug("_read_states_solids_thermal end at var_index %d", var_index) 

4305 

4306 return var_index 

4307 

4308 def _read_states_solids(self, state_data: np.ndarray, var_index: int, array_dict: dict) -> int: 

4309 """Read the state data of the solid elements 

4310 

4311 Parameters 

4312 ---------- 

4313 state_data: np.ndarray 

4314 array with entire state data 

4315 var_index: int 

4316 variable index in the state data array 

4317 array_dict: dict 

4318 dictionary to store the loaded arrays in 

4319 

4320 Returns 

4321 ------- 

4322 var_index: int 

4323 updated variable index after reading the section 

4324 """ 

4325 

4326 if self.header.n_solids <= 0 or self.header.n_solid_vars <= 0: 

4327 return var_index 

4328 

4329 LOGGER.debug("_read_states_solids start at var_index %d", var_index) 

4330 

4331 n_solid_vars = self.header.n_solid_vars 

4332 n_solids = self.header.n_solids 

4333 n_states = state_data.shape[0] 

4334 n_strain_vars = 6 * self.header.has_element_strain 

4335 n_history_vars = self.header.n_solid_history_vars 

4336 n_solid_layers = self.header.n_solid_layers 

4337 

4338 # double safety here, if either the formatting of the solid state data 

4339 # or individual arrays fails then we catch it 

4340 try: 

4341 # this is a sanity check if the manual was understood correctly 

4342 # 

4343 # NOTE due to plotcompress we disable this check, it can delete 

4344 # variables so that stress or pstrain might be missing despite 

4345 # being always present in the file spec 

4346 # 

4347 # n_solid_vars2 = (7 + 

4348 # n_history_vars) 

4349 

4350 # if n_solid_vars2 != n_solid_vars: 

4351 # msg = "n_solid_vars != n_solid_vars_computed: {} != {}."\ 

4352 # + " Solid variables might be wrong." 

4353 # LOGGER.warning(msg.format(n_solid_vars, n_solid_vars2)) 

4354 

4355 solid_state_data = state_data[ 

4356 :, var_index : var_index + n_solid_vars * n_solids 

4357 ].reshape((n_states, n_solids, n_solid_layers, n_solid_vars // n_solid_layers)) 

4358 

4359 i_solid_var = 0 

4360 

4361 # stress 

4362 try: 

4363 if self.header.has_solid_stress: 

4364 array_dict[ArrayType.element_solid_stress] = solid_state_data[:, :, :, :6] 

4365 except Exception: 

4366 trb_msg = traceback.format_exc() 

4367 msg = "A failure in %s was caught:\n%s" 

4368 LOGGER.warning(msg, "_read_states_solids, stress", trb_msg) 

4369 finally: 

4370 i_solid_var += 6 * self.header.has_solid_stress 

4371 

4372 # effective plastic strain 

4373 try: 

4374 # in case plotcompress deleted stresses but pstrain exists 

4375 if self.header.has_solid_pstrain: 

4376 array_dict[ArrayType.element_solid_effective_plastic_strain] = solid_state_data[ 

4377 :, :, :, i_solid_var 

4378 ].reshape((n_states, n_solids, n_solid_layers)) 

4379 except Exception: 

4380 trb_msg = traceback.format_exc() 

4381 msg = "A failure in %s was caught:\n%s" 

4382 LOGGER.warning(msg, "_read_states_solids, eff_plastic_strain", trb_msg) 

4383 finally: 

4384 i_solid_var += 1 * self.header.has_solid_pstrain 

4385 

4386 # history vars 

4387 if n_history_vars: 

4388 try: 

4389 array_dict[ArrayType.element_solid_history_variables] = solid_state_data[ 

4390 :, :, :, i_solid_var : i_solid_var + n_history_vars 

4391 ] 

4392 except Exception: 

4393 trb_msg = traceback.format_exc() 

4394 msg = "A failure in %s was caught:\n%s" 

4395 LOGGER.warning(msg, "_read_states_solids, history_variables", trb_msg) 

4396 finally: 

4397 i_solid_var += n_history_vars 

4398 

4399 # strain 

4400 # they are the last 6 entries of the history vars 

4401 if n_strain_vars: 

4402 try: 

4403 array_dict[ArrayType.element_solid_strain] = array_dict[ 

4404 ArrayType.element_solid_history_variables 

4405 ][:, :, :, -n_strain_vars:] 

4406 

4407 array_dict[ArrayType.element_solid_history_variables] = array_dict[ 

4408 ArrayType.element_solid_history_variables 

4409 ][:, :, :, :-n_strain_vars] 

4410 

4411 if not all(array_dict[ArrayType.element_solid_history_variables].shape): 

4412 del array_dict[ArrayType.element_solid_history_variables] 

4413 except Exception: 

4414 trb_msg = traceback.format_exc() 

4415 msg = "A failure in %s was caught:\n%s" 

4416 LOGGER.warning(msg, "_read_states_solids, strain", trb_msg) 

4417 

4418 # plastic strain tensor 

4419 if self.header.has_solid_shell_plastic_strain_tensor: 

4420 try: 

4421 array_dict[ArrayType.element_solid_plastic_strain_tensor] = solid_state_data[ 

4422 :, :, :, i_solid_var : i_solid_var + 6 

4423 ] 

4424 except Exception: 

4425 trb_msg = traceback.format_exc() 

4426 msg = "A failure in %s was caught:\n%s" 

4427 LOGGER.warning( 

4428 msg, "_read_states_solids, element_solid_plastic_strain_tensor", trb_msg 

4429 ) 

4430 finally: 

4431 i_solid_var += 6 

4432 

4433 # thermal strain tensor 

4434 if self.header.has_solid_shell_thermal_strain_tensor: 

4435 try: 

4436 array_dict[ArrayType.element_solid_thermal_strain_tensor] = solid_state_data[ 

4437 :, :, i_solid_var : i_solid_var + 6 

4438 ] 

4439 except Exception: 

4440 trb_msg = traceback.format_exc() 

4441 msg = "A failure in %s was caught:\n%s" 

4442 LOGGER.warning( 

4443 msg, "_read_states_solids, element_solid_thermal_strain_tensor", trb_msg 

4444 ) 

4445 finally: 

4446 i_solid_var += 6 

4447 

4448 # catch formatting in solid_state_datra 

4449 except Exception: 

4450 trb_msg = traceback.format_exc() 

4451 msg = "A failure in %s was caught:\n%s" 

4452 LOGGER.warning(msg, "_read_states_solids, solid_state_data", trb_msg) 

4453 # always increment variable count 

4454 finally: 

4455 var_index += n_solids * n_solid_vars 

4456 

4457 LOGGER.debug("_read_states_solids end at var_index %d", var_index) 

4458 

4459 return var_index 

4460 

4461 def _read_states_tshell(self, state_data: np.ndarray, var_index: int, array_dict: dict) -> int: 

4462 """Read the state data for thick shell elements 

4463 

4464 Parameters 

4465 ---------- 

4466 state_data: np.ndarray 

4467 array with entire state data 

4468 var_index: int 

4469 variable index in the state data array 

4470 array_dict: dict 

4471 dictionary to store the loaded arrays in 

4472 

4473 Returns 

4474 ------- 

4475 var_index: int 

4476 updated variable index after reading the section 

4477 """ 

4478 

4479 if self.header.n_thick_shells <= 0 or self.header.n_thick_shell_vars <= 0: 

4480 return var_index 

4481 

4482 LOGGER.debug("_read_states_tshell start at var_index %d", var_index) 

4483 

4484 n_states = state_data.shape[0] 

4485 n_tshells = self.header.n_thick_shells 

4486 n_history_vars = self.header.n_shell_tshell_history_vars 

4487 n_layers = self.header.n_shell_tshell_layers 

4488 n_layer_vars = n_layers * ( 

4489 6 * self.header.has_shell_tshell_stress 

4490 + self.header.has_shell_tshell_pstrain 

4491 + n_history_vars 

4492 ) 

4493 n_strain_vars = 12 * self.header.has_element_strain 

4494 n_thsell_vars = self.header.n_thick_shell_vars 

4495 has_stress = self.header.has_shell_tshell_stress 

4496 has_pstrain = self.header.has_shell_tshell_pstrain 

4497 

4498 try: 

4499 # this is a sanity check if the manual was understood correctly 

4500 n_tshell_vars2 = n_layer_vars + n_strain_vars 

4501 

4502 if n_tshell_vars2 != n_thsell_vars: 

4503 msg = ( 

4504 "n_tshell_vars != n_tshell_vars_computed: %d != %d." 

4505 " Thick shell variables might be wrong." 

4506 ) 

4507 LOGGER.warning(msg, n_thsell_vars, n_tshell_vars2) 

4508 

4509 # thick shell element data 

4510 tshell_data = state_data[:, var_index : var_index + n_thsell_vars * n_tshells] 

4511 tshell_data = tshell_data.reshape((n_states, n_tshells, n_thsell_vars)) 

4512 

4513 # extract layer data 

4514 tshell_layer_data = tshell_data[:, :, slice(0, n_layer_vars)] 

4515 tshell_layer_data = tshell_layer_data.reshape((n_states, n_tshells, n_layers, -1)) 

4516 tshell_nonlayer_data = tshell_data[:, :, n_layer_vars:] 

4517 

4518 # STRESS 

4519 i_tshell_layer_var = 0 

4520 if has_stress: 

4521 try: 

4522 array_dict[ArrayType.element_tshell_stress] = tshell_layer_data[ 

4523 :, :, :, i_tshell_layer_var : i_tshell_layer_var + 6 

4524 ].reshape((n_states, n_tshells, n_layers, 6)) 

4525 except Exception: 

4526 trb_msg = traceback.format_exc() 

4527 msg = "A failure in %d was caught:\n%s" 

4528 LOGGER.warning(msg, "_read_states_tshell, stress", trb_msg) 

4529 finally: 

4530 i_tshell_layer_var += 6 

4531 

4532 # PSTRAIN 

4533 if has_pstrain: 

4534 try: 

4535 array_dict[ 

4536 ArrayType.element_tshell_effective_plastic_strain 

4537 ] = tshell_layer_data[:, :, :, i_tshell_layer_var].reshape( 

4538 (n_states, n_tshells, n_layers) 

4539 ) 

4540 except Exception: 

4541 trb_msg = traceback.format_exc() 

4542 msg = "A failure in %s was caught:\n%s" 

4543 LOGGER.warning(msg, "_read_states_tshell, eff_plastic_strain", trb_msg) 

4544 finally: 

4545 i_tshell_layer_var += 1 

4546 

4547 # HISTORY VARS 

4548 if n_history_vars: 

4549 try: 

4550 array_dict[ArrayType.element_tshell_history_variables] = tshell_layer_data[ 

4551 :, :, :, i_tshell_layer_var : i_tshell_layer_var + n_history_vars 

4552 ].reshape((n_states, n_tshells, n_layers, n_history_vars)) 

4553 except Exception: 

4554 trb_msg = traceback.format_exc() 

4555 msg = "A failure in %s was caught:\n%s" 

4556 LOGGER.warning(msg, "_read_states_tshell, history_variables", trb_msg) 

4557 

4558 # STRAIN (only non layer data for tshells) 

4559 if n_strain_vars: 

4560 try: 

4561 tshell_nonlayer_data = tshell_nonlayer_data[:, :, :n_strain_vars] 

4562 array_dict[ArrayType.element_tshell_strain] = tshell_nonlayer_data.reshape( 

4563 (n_states, n_tshells, 2, 6) 

4564 ) 

4565 except Exception: 

4566 trb_msg = traceback.format_exc() 

4567 msg = "A failure in %s was caught:\n%s" 

4568 LOGGER.warning(msg, "_read_states_tshell, strain", trb_msg) 

4569 

4570 except Exception: 

4571 trb_msg = traceback.format_exc() 

4572 msg = "A failure in %s was caught:\n%s" 

4573 LOGGER.warning(msg, "_read_states_tshell, tshell_data", trb_msg) 

4574 finally: 

4575 var_index += n_thsell_vars * n_tshells 

4576 

4577 LOGGER.debug("_read_states_tshell end at var_index %d", var_index) 

4578 

4579 return var_index 

4580 

4581 def _read_states_beams(self, state_data: np.ndarray, var_index: int, array_dict: dict) -> int: 

4582 """Read the state data for beams 

4583 

4584 Parameters 

4585 ---------- 

4586 state_data: np.ndarray 

4587 array with entire state data 

4588 var_index: int 

4589 variable index in the state data array 

4590 array_dict: dict 

4591 dictionary to store the loaded arrays in 

4592 

4593 Returns 

4594 ------- 

4595 var_index: int 

4596 updated variable index after reading the section 

4597 """ 

4598 

4599 if self.header.n_beams <= 0 or self.header.n_beam_vars <= 0: 

4600 return var_index 

4601 

4602 LOGGER.debug("_read_states_beams start at var_index %d", var_index) 

4603 

4604 # usual beam vars 

4605 # pylint: disable = invalid-name 

4606 N_BEAM_BASIC_VARS = 6 

4607 # beam intergration point vars 

4608 # pylint: disable = invalid-name 

4609 N_BEAM_IP_VARS = 5 

4610 

4611 n_states = state_data.shape[0] 

4612 n_beams = self.header.n_beams 

4613 n_history_vars = self.header.n_beam_history_vars 

4614 n_beam_vars = self.header.n_beam_vars 

4615 n_layers = int( 

4616 (-3 * n_history_vars + n_beam_vars - N_BEAM_BASIC_VARS) 

4617 / (n_history_vars + N_BEAM_IP_VARS) 

4618 ) 

4619 # n_layer_vars = 6 + N_BEAM_IP_VARS * n_layers 

4620 n_layer_vars = N_BEAM_IP_VARS * n_layers 

4621 

4622 try: 

4623 # beam element data 

4624 beam_data = state_data[:, var_index : var_index + n_beam_vars * n_beams] 

4625 beam_data = beam_data.reshape((n_states, n_beams, n_beam_vars)) 

4626 

4627 # extract layer data 

4628 beam_nonlayer_data = beam_data[:, :, :N_BEAM_BASIC_VARS] 

4629 beam_layer_data = beam_data[:, :, N_BEAM_BASIC_VARS : N_BEAM_BASIC_VARS + n_layer_vars] 

4630 beam_layer_data = beam_layer_data.reshape((n_states, n_beams, n_layers, N_BEAM_IP_VARS)) 

4631 

4632 # axial force 

4633 try: 

4634 array_dict[ArrayType.element_beam_axial_force] = beam_nonlayer_data[ 

4635 :, :, 0 

4636 ].reshape((n_states, n_beams)) 

4637 except Exception: 

4638 trb_msg = traceback.format_exc() 

4639 msg = "A failure in %s was caught:\n%s" 

4640 LOGGER.warning(msg, "_read_states_beams, axial_force", trb_msg) 

4641 

4642 # shear force 

4643 try: 

4644 array_dict[ArrayType.element_beam_shear_force] = beam_nonlayer_data[ 

4645 :, :, 1:3 

4646 ].reshape((n_states, n_beams, 2)) 

4647 except Exception: 

4648 trb_msg = traceback.format_exc() 

4649 msg = "A failure in %s was caught:\n%s" 

4650 LOGGER.warning(msg, "_read_states_beams, shear_force", trb_msg) 

4651 

4652 # bending moment 

4653 try: 

4654 array_dict[ArrayType.element_beam_bending_moment] = beam_nonlayer_data[ 

4655 :, :, 3:5 

4656 ].reshape((n_states, n_beams, 2)) 

4657 except Exception: 

4658 trb_msg = traceback.format_exc() 

4659 msg = "A failure in %s was caught:\n%s" 

4660 LOGGER.warning(msg, "_read_states_beams, bending_moment", trb_msg) 

4661 

4662 # torsion moment 

4663 try: 

4664 array_dict[ArrayType.element_beam_torsion_moment] = beam_nonlayer_data[ 

4665 :, :, 5 

4666 ].reshape((n_states, n_beams)) 

4667 except Exception: 

4668 trb_msg = traceback.format_exc() 

4669 msg = "A failure in %s was caught:\n%s" 

4670 LOGGER.warning(msg, "_read_states_beams, torsion_moment", trb_msg) 

4671 

4672 if n_layers: 

4673 

4674 # BUGFIX? 

4675 # According to the database manual the first 

4676 # two layer vars are the shear stress and then 

4677 # axial stress. Tests with FEMZIP and META though 

4678 # suggests that axial stress comes first. 

4679 

4680 # axial stress 

4681 try: 

4682 array_dict[ArrayType.element_beam_axial_stress] = beam_layer_data[:, :, :, 0] 

4683 except Exception: 

4684 trb_msg = traceback.format_exc() 

4685 msg = "A failure in %s was caught:\n%s" 

4686 LOGGER.warning(msg, "_read_states_beams, axial_stress", trb_msg) 

4687 

4688 # shear stress 

4689 try: 

4690 array_dict[ArrayType.element_beam_shear_stress] = beam_layer_data[:, :, :, 1:3] 

4691 except Exception: 

4692 trb_msg = traceback.format_exc() 

4693 msg = "A failure in %s was caught:\n%s" 

4694 LOGGER.warning(msg, "_read_states_beams, shear_stress", trb_msg) 

4695 

4696 # eff. plastic strain 

4697 try: 

4698 array_dict[ArrayType.element_beam_plastic_strain] = beam_layer_data[:, :, :, 3] 

4699 except Exception: 

4700 trb_msg = traceback.format_exc() 

4701 msg = "A failure in %s was caught:\n%s" 

4702 LOGGER.warning(msg, "_read_states_beams, eff_plastic_strain", trb_msg) 

4703 

4704 # axial strain 

4705 try: 

4706 array_dict[ArrayType.element_beam_axial_strain] = beam_layer_data[:, :, :, 4] 

4707 except Exception: 

4708 trb_msg = traceback.format_exc() 

4709 msg = "A failure in %s was caught:\n%s" 

4710 LOGGER.warning(msg, "_read_states_beams, axial_strain", trb_msg) 

4711 

4712 # history vars 

4713 if n_history_vars: 

4714 try: 

4715 array_dict[ArrayType.element_beam_history_vars] = beam_data[ 

4716 :, :, 6 + n_layer_vars : 

4717 ].reshape((n_states, n_beams, 3 + n_layers, n_history_vars)) 

4718 except Exception: 

4719 trb_msg = traceback.format_exc() 

4720 msg = "A failure in %s was caught:\n%s" 

4721 LOGGER.warning(msg, "_read_states_beams, history_variables", trb_msg) 

4722 

4723 # failure of formatting beam state data 

4724 except Exception: 

4725 trb_msg = traceback.format_exc() 

4726 msg = "A failure in %s was caught:\n%s" 

4727 LOGGER.warning(msg, "_read_states_beams, beam_state_data", trb_msg) 

4728 # always increment variable index 

4729 finally: 

4730 var_index += n_beams * n_beam_vars 

4731 

4732 LOGGER.debug("_read_states_beams end at var_index %d", var_index) 

4733 

4734 return var_index 

4735 

4736 def _read_states_shell(self, state_data: np.ndarray, var_index: int, array_dict: dict) -> int: 

4737 """Read the state data for shell elements 

4738 

4739 Parameters 

4740 ---------- 

4741 state_data: np.ndarray 

4742 array with entire state data 

4743 var_index: int 

4744 variable index in the state data array 

4745 array_dict: dict 

4746 dictionary to store the loaded arrays in 

4747 

4748 Returns 

4749 ------- 

4750 var_index: int 

4751 updated variable index after reading the section 

4752 """ 

4753 

4754 # bugfix 

4755 # 

4756 # Interestingly, dyna seems to write result values for rigid shells in 

4757 # the d3part file, but not in the d3plot. Of course this is not 

4758 # documented ... 

4759 n_reduced_shells = ( 

4760 self.header.n_shells 

4761 if self.header.filetype == D3plotFiletype.D3PART 

4762 else self.header.n_shells - self._material_section_info.n_rigid_shells 

4763 ) 

4764 

4765 if self.header.n_shell_vars <= 0 or n_reduced_shells <= 0: 

4766 return var_index 

4767 

4768 LOGGER.debug("_read_states_shell start at var_index %d", var_index) 

4769 

4770 n_states = state_data.shape[0] 

4771 n_shells = n_reduced_shells 

4772 n_shell_vars = self.header.n_shell_vars 

4773 

4774 # what is in the file? 

4775 n_layers = self.header.n_shell_tshell_layers 

4776 n_history_vars = self.header.n_shell_tshell_history_vars 

4777 n_stress_vars = 6 * self.header.has_shell_tshell_stress 

4778 n_pstrain_vars = 1 * self.header.has_shell_tshell_pstrain 

4779 n_force_variables = 8 * self.header.has_shell_forces 

4780 n_extra_variables = 4 * self.header.has_shell_extra_variables 

4781 n_strain_vars = 12 * self.header.has_element_strain 

4782 n_plastic_strain_tensor = 6 * n_layers * self.header.has_solid_shell_plastic_strain_tensor 

4783 n_thermal_strain_tensor = 6 * self.header.has_solid_shell_thermal_strain_tensor 

4784 

4785 try: 

4786 # this is a sanity check if the manual was understood correctly 

4787 n_shell_vars2 = ( 

4788 n_layers * (n_stress_vars + n_pstrain_vars + n_history_vars) 

4789 + n_force_variables 

4790 + n_extra_variables 

4791 + n_strain_vars 

4792 + n_plastic_strain_tensor 

4793 + n_thermal_strain_tensor 

4794 ) 

4795 

4796 if n_shell_vars != n_shell_vars2: 

4797 msg = ( 

4798 "n_shell_vars != n_shell_vars_computed: %d != %d." 

4799 " Shell variables might be wrong." 

4800 ) 

4801 LOGGER.warning(msg, n_shell_vars, n_shell_vars2) 

4802 

4803 n_layer_vars = n_layers * (n_stress_vars + n_pstrain_vars + n_history_vars) 

4804 

4805 # shell element data 

4806 shell_data = state_data[:, var_index : var_index + n_shell_vars * n_shells] 

4807 shell_data = shell_data.reshape((n_states, n_shells, n_shell_vars)) 

4808 

4809 # extract layer data 

4810 shell_layer_data = shell_data[:, :, :n_layer_vars] 

4811 shell_layer_data = shell_layer_data.reshape((n_states, n_shells, n_layers, -1)) 

4812 shell_nonlayer_data = shell_data[:, :, n_layer_vars:] 

4813 

4814 # save layer stuff 

4815 # STRESS 

4816 layer_var_index = 0 

4817 if n_stress_vars: 

4818 try: 

4819 array_dict[ArrayType.element_shell_stress] = shell_layer_data[ 

4820 :, :, :, :n_stress_vars 

4821 ].reshape((n_states, n_shells, n_layers, n_stress_vars)) 

4822 except Exception: 

4823 trb_msg = traceback.format_exc() 

4824 msg = "A failure in %s was caught:\n%s" 

4825 LOGGER.warning(msg, "_read_states_shells, stress", trb_msg) 

4826 finally: 

4827 layer_var_index += n_stress_vars 

4828 

4829 # PSTRAIN 

4830 if n_pstrain_vars: 

4831 try: 

4832 array_dict[ArrayType.element_shell_effective_plastic_strain] = shell_layer_data[ 

4833 :, :, :, layer_var_index 

4834 ].reshape((n_states, n_shells, n_layers)) 

4835 except Exception: 

4836 trb_msg = traceback.format_exc() 

4837 msg = "A failure in %s was caught:\n%s" 

4838 LOGGER.warning(msg, "_read_states_shells, stress", trb_msg) 

4839 finally: 

4840 layer_var_index += 1 

4841 

4842 # HISTORY VARIABLES 

4843 if n_history_vars: 

4844 try: 

4845 array_dict[ArrayType.element_shell_history_vars] = shell_layer_data[ 

4846 :, :, :, layer_var_index : layer_var_index + n_history_vars 

4847 ].reshape((n_states, n_shells, n_layers, n_history_vars)) 

4848 except Exception: 

4849 trb_msg = traceback.format_exc() 

4850 msg = "A failure in %s was caught:\n%s" 

4851 LOGGER.warning(msg, "_read_states_shells, history_variables", trb_msg) 

4852 finally: 

4853 layer_var_index += n_history_vars 

4854 

4855 # save nonlayer stuff 

4856 # forces 

4857 nonlayer_var_index = 0 

4858 if n_force_variables: 

4859 try: 

4860 array_dict[ArrayType.element_shell_bending_moment] = shell_nonlayer_data[ 

4861 :, :, 0:3 

4862 ].reshape((n_states, n_shells, 3)) 

4863 array_dict[ArrayType.element_shell_shear_force] = shell_nonlayer_data[ 

4864 :, :, 3:5 

4865 ].reshape((n_states, n_shells, 2)) 

4866 array_dict[ArrayType.element_shell_normal_force] = shell_nonlayer_data[ 

4867 :, :, 5:8 

4868 ].reshape((n_states, n_shells, 3)) 

4869 except Exception: 

4870 trb_msg = traceback.format_exc() 

4871 msg = "A failure in %s was caught:\n%s" 

4872 LOGGER.warning(msg, "_read_states_shells, forces", trb_msg) 

4873 finally: 

4874 nonlayer_var_index += n_force_variables 

4875 

4876 # weird stuff 

4877 if n_extra_variables: 

4878 try: 

4879 array_dict[ArrayType.element_shell_thickness] = shell_nonlayer_data[ 

4880 :, :, nonlayer_var_index 

4881 ].reshape((n_states, n_shells)) 

4882 array_dict[ArrayType.element_shell_unknown_variables] = shell_nonlayer_data[ 

4883 :, :, nonlayer_var_index + 1 : nonlayer_var_index + 3 

4884 ].reshape((n_states, n_shells, 2)) 

4885 except Exception: 

4886 trb_msg = traceback.format_exc() 

4887 msg = "A failure in %s was caught:\n%s" 

4888 LOGGER.warning(msg, "_read_states_shells, history_variables", trb_msg) 

4889 finally: 

4890 nonlayer_var_index += 3 

4891 

4892 # strain present 

4893 if n_strain_vars: 

4894 try: 

4895 shell_strain = shell_nonlayer_data[ 

4896 :, :, nonlayer_var_index : nonlayer_var_index + n_strain_vars 

4897 ] 

4898 array_dict[ArrayType.element_shell_strain] = shell_strain.reshape( 

4899 (n_states, n_shells, 2, 6) 

4900 ) 

4901 except Exception: 

4902 trb_msg = traceback.format_exc() 

4903 msg = "A failure in %s was caught:\n%s" 

4904 LOGGER.warning(msg, "_read_states_shells, strain", trb_msg) 

4905 finally: 

4906 nonlayer_var_index += n_strain_vars 

4907 

4908 # internal energy is behind strain if strain is written 

4909 if self.header.has_shell_extra_variables: 

4910 try: 

4911 array_dict[ArrayType.element_shell_internal_energy] = shell_nonlayer_data[ 

4912 :, :, nonlayer_var_index 

4913 ].reshape((n_states, n_shells)) 

4914 except Exception: 

4915 trb_msg = traceback.format_exc() 

4916 msg = "A failure in %s was caught:\n%s" 

4917 LOGGER.warning(msg, "_read_states_shells, internal_energy", trb_msg) 

4918 

4919 # PLASTIC STRAIN TENSOR 

4920 if n_plastic_strain_tensor: 

4921 try: 

4922 pstrain_tensor = shell_nonlayer_data[ 

4923 :, :, nonlayer_var_index : nonlayer_var_index + n_plastic_strain_tensor 

4924 ] 

4925 array_dict[ 

4926 ArrayType.element_shell_plastic_strain_tensor 

4927 ] = pstrain_tensor.reshape((n_states, n_shells, n_layers, 6)) 

4928 except Exception: 

4929 trb_msg = traceback.format_exc() 

4930 msg = "A failure in %s was caught:\n%s" 

4931 LOGGER.warning( 

4932 msg, "_read_states_shells, element_shell_plastic_strain_tensor", trb_msg 

4933 ) 

4934 finally: 

4935 nonlayer_var_index += n_plastic_strain_tensor 

4936 

4937 # THERMAL STRAIN TENSOR 

4938 if n_thermal_strain_tensor: 

4939 try: 

4940 thermal_tensor = shell_nonlayer_data[ 

4941 :, :, nonlayer_var_index : nonlayer_var_index + n_thermal_strain_tensor 

4942 ] 

4943 array_dict[ 

4944 ArrayType.element_shell_thermal_strain_tensor 

4945 ] = thermal_tensor.reshape((n_states, n_shells, 6)) 

4946 except Exception: 

4947 trb_msg = traceback.format_exc() 

4948 msg = "A failure in %s was caught:\n%s" 

4949 LOGGER.warning( 

4950 msg, "_read_states_shells, element_shell_thermal_strain_tensor", trb_msg 

4951 ) 

4952 finally: 

4953 nonlayer_var_index += n_thermal_strain_tensor 

4954 

4955 # error in formatting shell state data 

4956 except Exception: 

4957 trb_msg = traceback.format_exc() 

4958 msg = "A failure in %s was caught:\n%s" 

4959 LOGGER.warning(msg, "_read_states_shell, shell_state_data", trb_msg) 

4960 

4961 # always increment variable index 

4962 finally: 

4963 var_index += n_shell_vars * n_shells 

4964 

4965 LOGGER.debug("_read_states_shell end at var_index %d", var_index) 

4966 

4967 return var_index 

4968 

4969 def _read_states_is_alive( 

4970 self, state_data: np.ndarray, var_index: int, array_dict: dict 

4971 ) -> int: 

4972 """Read deletion info for nodes, elements, etc 

4973 

4974 Parameters 

4975 ---------- 

4976 state_data: np.ndarray 

4977 array with entire state data 

4978 var_index: int 

4979 variable index in the state data array 

4980 array_dict: dict 

4981 dictionary to store the loaded arrays in 

4982 

4983 Returns 

4984 ------- 

4985 var_index: int 

4986 updated variable index after reading the section 

4987 """ 

4988 

4989 if not self.header.has_node_deletion_data and not self.header.has_element_deletion_data: 

4990 return var_index 

4991 

4992 LOGGER.debug("_read_states_is_alive start at var_index %s", var_index) 

4993 

4994 n_states = state_data.shape[0] 

4995 

4996 # NODES 

4997 if self.header.has_node_deletion_data: 

4998 n_nodes = self.header.n_nodes 

4999 

5000 if n_nodes > 0: 

5001 try: 

5002 array_dict[ArrayType.node_is_alive] = state_data[ 

5003 :, var_index : var_index + n_nodes 

5004 ] 

5005 except Exception: 

5006 trb_msg = traceback.format_exc() 

5007 msg = "A failure in %s was caught:\n%s" 

5008 LOGGER.warning(msg, "_read_states_is_alive, nodes", trb_msg) 

5009 finally: 

5010 var_index += n_nodes 

5011 

5012 # element deletion info 

5013 elif self.header.has_element_deletion_data: 

5014 n_solids = self.header.n_solids 

5015 n_tshells = self.header.n_thick_shells 

5016 n_shells = self.header.n_shells 

5017 n_beams = self.header.n_beams 

5018 # n_elems = n_solids + n_tshells + n_shells + n_beams 

5019 

5020 # SOLIDS 

5021 if n_solids > 0: 

5022 try: 

5023 array_dict[ArrayType.element_solid_is_alive] = state_data[ 

5024 :, var_index : var_index + n_solids 

5025 ].reshape((n_states, n_solids)) 

5026 except Exception: 

5027 trb_msg = traceback.format_exc() 

5028 msg = "A failure in %s was caught:\n%s" 

5029 LOGGER.warning(msg, "_read_states_is_alive, solids", trb_msg) 

5030 finally: 

5031 var_index += n_solids 

5032 

5033 # TSHELLS 

5034 if n_tshells > 0: 

5035 try: 

5036 array_dict[ArrayType.element_tshell_is_alive] = state_data[ 

5037 :, var_index : var_index + n_tshells 

5038 ].reshape((n_states, n_tshells)) 

5039 except Exception: 

5040 trb_msg = traceback.format_exc() 

5041 msg = "A failure in %s was caught:\n%s" 

5042 LOGGER.warning(msg, "_read_states_is_alive, solids", trb_msg) 

5043 finally: 

5044 var_index += n_tshells 

5045 

5046 # SHELLS 

5047 if n_shells > 0: 

5048 try: 

5049 array_dict[ArrayType.element_shell_is_alive] = state_data[ 

5050 :, var_index : var_index + n_shells 

5051 ].reshape((n_states, n_shells)) 

5052 except Exception: 

5053 trb_msg = traceback.format_exc() 

5054 msg = "A failure in %s was caught:\n%s" 

5055 LOGGER.warning(msg, "_read_states_is_alive, shells", trb_msg) 

5056 finally: 

5057 var_index += n_shells 

5058 

5059 # BEAMS 

5060 if n_beams > 0: 

5061 try: 

5062 array_dict[ArrayType.element_beam_is_alive] = state_data[ 

5063 :, var_index : var_index + n_beams 

5064 ].reshape((n_states, n_beams)) 

5065 except Exception: 

5066 trb_msg = traceback.format_exc() 

5067 msg = "A failure in %s was caught:\n%s" 

5068 LOGGER.warning(msg, "_read_states_is_alive, beams", trb_msg) 

5069 finally: 

5070 var_index += n_beams 

5071 

5072 LOGGER.debug("_read_states_is_alive end at var_index %d", var_index) 

5073 

5074 return var_index 

5075 

5076 def _read_states_sph(self, state_data: np.ndarray, var_index: int, array_dict: dict) -> int: 

5077 """Read the sph state data 

5078 

5079 Parameters 

5080 ---------- 

5081 state_data: np.ndarray 

5082 array with entire state data 

5083 var_index: int 

5084 variable index in the state data array 

5085 array_dict: dict 

5086 dictionary to store the loaded arrays in 

5087 

5088 Returns 

5089 ------- 

5090 var_index: int 

5091 updated variable index after reading the section 

5092 """ 

5093 

5094 if self.header.n_sph_nodes <= 0: 

5095 return var_index 

5096 

5097 LOGGER.debug("_read_states_sph start at var_index %d", var_index) 

5098 

5099 info = self._sph_info 

5100 n_states = state_data.shape[0] 

5101 n_particles = self.header.n_sph_nodes 

5102 n_variables = info.n_sph_vars 

5103 

5104 # extract data 

5105 try: 

5106 sph_data = state_data[:, var_index : var_index + n_particles * n_variables] 

5107 

5108 i_var = 1 

5109 

5110 # deletion 

5111 try: 

5112 array_dict[ArrayType.sph_deletion] = sph_data[:, 0] < 0 

5113 except Exception: 

5114 trb_msg = traceback.format_exc() 

5115 msg = "A failure in %s was caught:\n%s" 

5116 LOGGER.warning(msg, "_read_states_sph, deletion", trb_msg) 

5117 

5118 # particle radius 

5119 if info.has_influence_radius: 

5120 try: 

5121 array_dict[ArrayType.sph_radius] = sph_data[:, i_var] 

5122 except Exception: 

5123 trb_msg = traceback.format_exc() 

5124 msg = "A failure in %s was caught:\n%s" 

5125 LOGGER.warning(msg, "_read_states_sph, radius", trb_msg) 

5126 finally: 

5127 i_var += 1 

5128 

5129 # pressure 

5130 if info.has_particle_pressure: 

5131 try: 

5132 array_dict[ArrayType.sph_pressure] = sph_data[:, i_var] 

5133 except Exception: 

5134 trb_msg = traceback.format_exc() 

5135 msg = "A failure in %s was caught:\n%s" 

5136 LOGGER.warning(msg, "_read_states_sph, pressure", trb_msg) 

5137 finally: 

5138 i_var += 1 

5139 

5140 # stress 

5141 if info.has_stresses: 

5142 try: 

5143 array_dict[ArrayType.sph_stress] = sph_data[ 

5144 :, i_var : i_var + n_particles * 6 

5145 ].reshape((n_states, n_particles, 6)) 

5146 except Exception: 

5147 trb_msg = traceback.format_exc() 

5148 msg = "A failure in %s was caught:\n%s" 

5149 LOGGER.warning(msg, "_read_states_sph, pressure", trb_msg) 

5150 finally: 

5151 i_var += 6 * n_particles 

5152 

5153 # eff. plastic strain 

5154 if info.has_plastic_strain: 

5155 try: 

5156 array_dict[ArrayType.sph_effective_plastic_strain] = sph_data[:, i_var] 

5157 except Exception: 

5158 trb_msg = traceback.format_exc() 

5159 msg = "A failure in %s was caught:\n%s" 

5160 LOGGER.warning(msg, "_read_states_sph, eff_plastic_strain", trb_msg) 

5161 finally: 

5162 i_var += 1 

5163 

5164 # density 

5165 if info.has_material_density: 

5166 try: 

5167 array_dict[ArrayType.sph_density] = sph_data[:, i_var] 

5168 except Exception: 

5169 trb_msg = traceback.format_exc() 

5170 msg = "A failure in %s was caught:\n%s" 

5171 LOGGER.warning(msg, "_read_states_sph, density", trb_msg) 

5172 finally: 

5173 i_var += 1 

5174 

5175 # internal energy 

5176 if info.has_internal_energy: 

5177 try: 

5178 array_dict[ArrayType.sph_internal_energy] = sph_data[:, i_var] 

5179 except Exception: 

5180 trb_msg = traceback.format_exc() 

5181 msg = "A failure in %s was caught:\n%s" 

5182 LOGGER.warning(msg, "_read_states_sph, internal_energy", trb_msg) 

5183 finally: 

5184 i_var += 1 

5185 

5186 # number of neighbors 

5187 if info.has_n_affecting_neighbors: 

5188 try: 

5189 array_dict[ArrayType.sph_n_neighbors] = sph_data[:, i_var] 

5190 except Exception: 

5191 trb_msg = traceback.format_exc() 

5192 msg = "A failure in %s was caught:\n%s" 

5193 LOGGER.warning(msg, "_read_states_sph, n_neighbors", trb_msg) 

5194 finally: 

5195 i_var += 1 

5196 

5197 # strain and strainrate 

5198 if info.has_strain_and_strainrate: 

5199 

5200 try: 

5201 array_dict[ArrayType.sph_strain] = sph_data[ 

5202 :, i_var : i_var + n_particles * 6 

5203 ].reshape((n_states, n_particles, 6)) 

5204 except Exception: 

5205 trb_msg = traceback.format_exc() 

5206 msg = "A failure in %s was caught:\n%s" 

5207 LOGGER.warning(msg, "_read_states_sph, strain", trb_msg) 

5208 finally: 

5209 i_var += 6 * n_particles 

5210 

5211 try: 

5212 array_dict[ArrayType.sph_strainrate] = sph_data[ 

5213 :, i_var : i_var + n_particles * 6 

5214 ].reshape((n_states, n_particles, 6)) 

5215 except Exception: 

5216 trb_msg = traceback.format_exc() 

5217 msg = "A failure in %s was caught:\n%s" 

5218 LOGGER.warning(msg, "_read_states_sph, strainrate", trb_msg) 

5219 finally: 

5220 i_var += 6 * n_particles 

5221 

5222 # mass 

5223 if info.has_mass: 

5224 try: 

5225 array_dict[ArrayType.sph_mass] = sph_data[:, i_var] 

5226 except Exception: 

5227 trb_msg = traceback.format_exc() 

5228 msg = "A failure in %s was caught:\n%s" 

5229 LOGGER.warning(msg, "_read_states_sph, pressure", trb_msg) 

5230 finally: 

5231 i_var += 1 

5232 

5233 except Exception: 

5234 trb_msg = traceback.format_exc() 

5235 msg = "A failure in %s was caught:\n%s" 

5236 LOGGER.warning(msg, "_read_states_sph, sph_data", trb_msg) 

5237 finally: 

5238 var_index += n_particles * n_variables 

5239 

5240 LOGGER.debug("_read_states_sph end at var_index %d", var_index) 

5241 

5242 return var_index 

5243 

5244 def _read_states_airbags(self, state_data: np.ndarray, var_index: int, array_dict: dict) -> int: 

5245 """Read the airbag state data 

5246 

5247 Parameters 

5248 ---------- 

5249 state_data: np.ndarray 

5250 array with entire state data 

5251 var_index: int 

5252 variable index in the state data array 

5253 array_dict: dict 

5254 dictionary to store the loaded arrays in 

5255 

5256 Returns 

5257 ------- 

5258 var_index: int 

5259 updated variable index after reading the section 

5260 """ 

5261 

5262 if self.header.n_airbags <= 0: 

5263 return var_index 

5264 

5265 LOGGER.debug("_read_states_airbags start at var_index %d", var_index) 

5266 

5267 n_states = state_data.shape[0] 

5268 info = self._airbag_info 

5269 n_airbag_geom_vars = info.n_geometric_variables 

5270 n_airbags = info.n_airbags 

5271 n_state_airbag_vars = info.n_airbag_state_variables 

5272 n_particles = info.n_particles 

5273 n_particle_vars = info.n_particle_state_variables 

5274 

5275 # Warning 

5276 # I am not sure if this is right ... 

5277 n_total_vars = n_airbags * n_state_airbag_vars + n_particles * n_particle_vars 

5278 

5279 try: 

5280 # types 

5281 # nlist = ngeom + nvar + nstgeom 

5282 airbag_var_types = self.arrays[ArrayType.airbag_variable_types] 

5283 airbag_var_names = self.arrays[ArrayType.airbag_variable_names] 

5284 # geom_var_types = airbag_var_types[:n_airbag_geom_vars] 

5285 particle_var_types = airbag_var_types[ 

5286 n_airbag_geom_vars : n_airbag_geom_vars + n_particle_vars 

5287 ] 

5288 particle_var_names = airbag_var_names[ 

5289 n_airbag_geom_vars : n_airbag_geom_vars + n_particle_vars 

5290 ] 

5291 

5292 airbag_state_var_types = airbag_var_types[n_airbag_geom_vars + n_particle_vars :] 

5293 airbag_state_var_names = airbag_var_names[n_airbag_geom_vars + n_particle_vars :] 

5294 

5295 # required for dynamic reading 

5296 def get_dtype(type_flag): 

5297 return self._header.itype if type_flag == 1 else self.header.ftype 

5298 

5299 # extract airbag data 

5300 airbag_state_data = state_data[:, var_index : var_index + n_total_vars] 

5301 

5302 # airbag data 

5303 airbag_data = airbag_state_data[:, : n_airbags * n_state_airbag_vars].reshape( 

5304 (n_states, n_airbags, n_state_airbag_vars) 

5305 ) 

5306 airbag_state_offset = n_airbags * n_state_airbag_vars 

5307 

5308 # particle data 

5309 particle_data = airbag_state_data[ 

5310 :, airbag_state_offset : airbag_state_offset + n_particles * n_particle_vars 

5311 ].reshape((n_states, n_particles, n_particle_vars)) 

5312 

5313 # save sh... 

5314 

5315 # airbag state vars 

5316 for i_airbag_state_var in range(n_state_airbag_vars): 

5317 var_name = airbag_state_var_names[i_airbag_state_var].strip() 

5318 var_type = airbag_state_var_types[i_airbag_state_var] 

5319 

5320 if var_name.startswith("Act Gas"): 

5321 try: 

5322 array_dict[ArrayType.airbag_n_active_particles] = airbag_data[ 

5323 :, :, i_airbag_state_var 

5324 ].view(get_dtype(var_type)) 

5325 except Exception: 

5326 trb_msg = traceback.format_exc() 

5327 msg = "A failure in %s was caught:\n%s" 

5328 LOGGER.warning( 

5329 msg, "_read_states_airbags, airbag_n_active_particles", trb_msg 

5330 ) 

5331 elif var_name.startswith("Bag Vol"): 

5332 try: 

5333 array_dict[ArrayType.airbag_bag_volume] = airbag_data[ 

5334 :, :, i_airbag_state_var 

5335 ].view(get_dtype(var_type)) 

5336 except Exception: 

5337 trb_msg = traceback.format_exc() 

5338 msg = "A failure in %s was caught:\n%s" 

5339 LOGGER.warning(msg, "_read_states_airbags, airbag_volume", trb_msg) 

5340 else: 

5341 warn_msg = "Unknown airbag state var: '%s'. Skipping it." 

5342 LOGGER.warning(warn_msg, var_name) 

5343 

5344 # particles yay 

5345 for i_particle_var in range(n_particle_vars): 

5346 var_type = particle_var_types[i_particle_var] 

5347 var_name = particle_var_names[i_particle_var].strip() 

5348 

5349 # particle gas id 

5350 if var_name.startswith("GasC ID"): 

5351 try: 

5352 array_dict[ArrayType.airbag_particle_gas_id] = particle_data[ 

5353 :, :, i_particle_var 

5354 ].view(get_dtype(var_type)) 

5355 except Exception: 

5356 trb_msg = traceback.format_exc() 

5357 msg = "A failure in %s %s was caught:\n%s" 

5358 LOGGER.warning( 

5359 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5360 ) 

5361 # particle chamber id 

5362 elif var_name.startswith("Cham ID"): 

5363 try: 

5364 array_dict[ArrayType.airbag_particle_chamber_id] = particle_data[ 

5365 :, :, i_particle_var 

5366 ].view(get_dtype(var_type)) 

5367 except Exception: 

5368 trb_msg = traceback.format_exc() 

5369 msg = "A failure in %s %s was caught:\n%s" 

5370 LOGGER.warning( 

5371 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5372 ) 

5373 # particle leakage 

5374 elif var_name.startswith("Leakage"): 

5375 try: 

5376 array_dict[ArrayType.airbag_particle_leakage] = particle_data[ 

5377 :, :, i_particle_var 

5378 ].view(get_dtype(var_type)) 

5379 except Exception: 

5380 trb_msg = traceback.format_exc() 

5381 msg = "A failure in %s %s was caught:\n%s" 

5382 LOGGER.warning( 

5383 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5384 ) 

5385 # particle mass 

5386 elif var_name.startswith("Mass"): 

5387 try: 

5388 array_dict[ArrayType.airbag_particle_mass] = particle_data[ 

5389 :, :, i_particle_var 

5390 ].view(get_dtype(var_type)) 

5391 except Exception: 

5392 trb_msg = traceback.format_exc() 

5393 msg = "A failure in %s %s was caught:\n%s" 

5394 LOGGER.warning( 

5395 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5396 ) 

5397 # particle radius 

5398 try: 

5399 array_dict[ArrayType.airbag_particle_radius] = particle_data[ 

5400 :, :, i_particle_var 

5401 ].view(get_dtype(var_type)) 

5402 except Exception: 

5403 trb_msg = traceback.format_exc() 

5404 msg = "A failure in %s %s was caught:\n%s" 

5405 LOGGER.warning( 

5406 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5407 ) 

5408 # particle spin energy 

5409 elif var_name.startswith("Spin En"): 

5410 try: 

5411 array_dict[ArrayType.airbag_particle_spin_energy] = particle_data[ 

5412 :, :, i_particle_var 

5413 ].view(get_dtype(var_type)) 

5414 except Exception: 

5415 trb_msg = traceback.format_exc() 

5416 msg = "A failure in %s %s was caught:\n%s" 

5417 LOGGER.warning( 

5418 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5419 ) 

5420 # particle translational energy 

5421 elif var_name.startswith("Tran En"): 

5422 try: 

5423 array_dict[ArrayType.airbag_particle_translation_energy] = particle_data[ 

5424 :, :, i_particle_var 

5425 ].view(get_dtype(var_type)) 

5426 except Exception: 

5427 trb_msg = traceback.format_exc() 

5428 msg = "A failure in %s %s was caught:\n%s" 

5429 LOGGER.warning( 

5430 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5431 ) 

5432 # particle segment distance 

5433 elif var_name.startswith("NS dist"): 

5434 try: 

5435 array_dict[ 

5436 ArrayType.airbag_particle_nearest_segment_distance 

5437 ] = particle_data[:, :, i_particle_var].view(get_dtype(var_type)) 

5438 except Exception: 

5439 trb_msg = traceback.format_exc() 

5440 msg = "A failure in %s %s was caught:\n%s" 

5441 LOGGER.warning( 

5442 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5443 ) 

5444 # particle position 

5445 elif var_name.startswith("Pos x"): 

5446 try: 

5447 particle_var_names_stripped = [ 

5448 entry.strip() for entry in particle_var_names 

5449 ] 

5450 i_particle_var_x = i_particle_var 

5451 i_particle_var_y = particle_var_names_stripped.index("Pos y") 

5452 i_particle_var_z = particle_var_names_stripped.index("Pos z") 

5453 

5454 array_dict[ArrayType.airbag_particle_position] = particle_data[ 

5455 :, :, (i_particle_var_x, i_particle_var_y, i_particle_var_z) 

5456 ].view(get_dtype(var_type)) 

5457 except Exception: 

5458 trb_msg = traceback.format_exc() 

5459 msg = "A failure in %s %s was caught:\n%s" 

5460 LOGGER.warning( 

5461 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5462 ) 

5463 elif var_name.startswith("Pos y"): 

5464 # handled in Pos x 

5465 pass 

5466 elif var_name.startswith("Pos z"): 

5467 # handled in Pos x 

5468 pass 

5469 # particle velocity 

5470 elif var_name.startswith("Vel x"): 

5471 try: 

5472 particle_var_names_stripped = [ 

5473 entry.strip() for entry in particle_var_names 

5474 ] 

5475 i_particle_var_x = i_particle_var 

5476 i_particle_var_y = particle_var_names_stripped.index("Vel y") 

5477 i_particle_var_z = particle_var_names_stripped.index("Vel z") 

5478 

5479 array_dict[ArrayType.airbag_particle_velocity] = particle_data[ 

5480 :, :, (i_particle_var_x, i_particle_var_y, i_particle_var_z) 

5481 ].view(get_dtype(var_type)) 

5482 except Exception: 

5483 trb_msg = traceback.format_exc() 

5484 msg = "A failure in %s %s was caught:\n%s" 

5485 LOGGER.warning( 

5486 msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg 

5487 ) 

5488 

5489 except Exception: 

5490 trb_msg = traceback.format_exc() 

5491 msg = "A failure in %s was caught:\n%s" 

5492 LOGGER.warning(msg, "_read_states_airbags, particle_data", trb_msg) 

5493 finally: 

5494 var_index += n_total_vars 

5495 

5496 LOGGER.debug("_read_states_airbags end at var_index %d", var_index) 

5497 

5498 return var_index 

5499 

5500 def _read_states_road_surfaces( 

5501 self, state_data: np.ndarray, var_index: int, array_dict: dict 

5502 ) -> int: 

5503 """Read the road surfaces state data for whoever wants this ... 

5504 

5505 Parameters 

5506 ---------- 

5507 state_data: np.ndarray 

5508 array with entire state data 

5509 var_index: int 

5510 variable index in the state data array 

5511 array_dict: dict 

5512 dictionary to store the loaded arrays in 

5513 

5514 Returns 

5515 ------- 

5516 var_index: int 

5517 updated variable index after reading the section 

5518 """ 

5519 

5520 if not self.header.has_rigid_road_surface: 

5521 return var_index 

5522 

5523 LOGGER.debug("_read_states_road_surfaces start at var_index %s", var_index) 

5524 

5525 n_states = state_data.shape[0] 

5526 info = self._rigid_road_info 

5527 n_roads = info.n_roads 

5528 

5529 try: 

5530 # read road data 

5531 road_data = state_data[:, var_index : var_index + 6 * n_roads].reshape( 

5532 (n_states, n_roads, 2, 3) 

5533 ) 

5534 

5535 # DISPLACEMENT 

5536 try: 

5537 array_dict[ArrayType.rigid_road_displacement] = road_data[:, :, 0, :] 

5538 except Exception: 

5539 trb_msg = traceback.format_exc() 

5540 msg = "A failure in %s was caught:\n%s" 

5541 LOGGER.warning(msg, "_read_states_road_surfaces, road_displacement", trb_msg) 

5542 

5543 # VELOCITY 

5544 try: 

5545 array_dict[ArrayType.rigid_road_velocity] = road_data[:, :, 1, :] 

5546 except Exception: 

5547 trb_msg = traceback.format_exc() 

5548 msg = "A failure in %s was caught:\n%s" 

5549 LOGGER.warning(msg, "_read_states_road_surfaces, road_velocity", trb_msg) 

5550 

5551 except Exception: 

5552 trb_msg = traceback.format_exc() 

5553 msg = "A failure in %s was caught:\n%s" 

5554 LOGGER.warning(msg, "_read_states_road_surfaces, road_data", trb_msg) 

5555 finally: 

5556 var_index += 6 * n_roads 

5557 

5558 LOGGER.debug("_read_states_road_surfaces end at var_index %d", var_index) 

5559 

5560 return var_index 

5561 

5562 def _read_states_rigid_body_motion( 

5563 self, state_data: np.ndarray, var_index: int, array_dict: dict 

5564 ) -> int: 

5565 """Read the road surfaces state data for whoever want this ... 

5566 

5567 Parameters 

5568 ---------- 

5569 state_data: np.ndarray 

5570 array with entire state data 

5571 var_index: int 

5572 variable index in the state data array 

5573 array_dict: dict 

5574 dictionary to store the loaded arrays in 

5575 

5576 Returns 

5577 ------- 

5578 var_index: int 

5579 updated variable index after reading the section 

5580 """ 

5581 

5582 if not self.header.has_rigid_body_data: 

5583 return var_index 

5584 

5585 LOGGER.debug("_read_states_rigid_body_motion start at var_index %d", var_index) 

5586 

5587 info = self._rigid_body_info 

5588 n_states = state_data.shape[0] 

5589 n_rigids = info.n_rigid_bodies 

5590 n_rigid_vars = 12 if self.header.has_reduced_rigid_body_data else 24 

5591 

5592 try: 

5593 # do the thing 

5594 rigid_body_data = state_data[ 

5595 :, var_index : var_index + n_rigids * n_rigid_vars 

5596 ].reshape((n_states, n_rigids, n_rigid_vars)) 

5597 

5598 # let the party begin 

5599 # rigid coordinates 

5600 try: 

5601 array_dict[ArrayType.rigid_body_coordinates] = rigid_body_data[:, :, :3] 

5602 except Exception: 

5603 trb_msg = traceback.format_exc() 

5604 msg = "A failure in %s was caught:\n%s" 

5605 LOGGER.warning(msg, "_read_states_rigid_body_motion, coordinates", trb_msg) 

5606 finally: 

5607 i_var = 3 

5608 

5609 # rotation matrix 

5610 try: 

5611 array_dict[ArrayType.rigid_body_rotation_matrix] = rigid_body_data[ 

5612 :, :, i_var : i_var + 9 

5613 ] 

5614 except Exception: 

5615 trb_msg = traceback.format_exc() 

5616 msg = "A failure in %s was caught:\n%s" 

5617 LOGGER.warning(msg, "_read_states_rigid_body_motion, rot_matrix", trb_msg) 

5618 finally: 

5619 i_var += 9 

5620 

5621 if self.header.has_reduced_rigid_body_data: 

5622 return var_index 

5623 

5624 # velocity pewpew 

5625 try: 

5626 array_dict[ArrayType.rigid_body_velocity] = rigid_body_data[:, :, i_var : i_var + 3] 

5627 except Exception: 

5628 trb_msg = traceback.format_exc() 

5629 msg = "A failure in %s was caught:\n%s" 

5630 LOGGER.warning(msg, "_read_states_rigid_body_motion, velocity", trb_msg) 

5631 finally: 

5632 i_var += 3 

5633 

5634 # rotational velocity 

5635 try: 

5636 array_dict[ArrayType.rigid_body_rot_velocity] = rigid_body_data[ 

5637 :, :, i_var : i_var + 3 

5638 ] 

5639 except Exception: 

5640 trb_msg = traceback.format_exc() 

5641 msg = "A failure in %s was caught:\n%s" 

5642 LOGGER.warning(msg, "_read_states_rigid_body_motion, rot_velocity", trb_msg) 

5643 finally: 

5644 i_var += 3 

5645 

5646 # acceleration 

5647 try: 

5648 array_dict[ArrayType.rigid_body_acceleration] = rigid_body_data[ 

5649 :, :, i_var : i_var + 3 

5650 ] 

5651 except Exception: 

5652 trb_msg = traceback.format_exc() 

5653 msg = "A failure in %s was caught:\n%s" 

5654 LOGGER.warning(msg, "_read_states_rigid_body_motion, acceleration", trb_msg) 

5655 finally: 

5656 i_var += 3 

5657 

5658 # rotational acceleration 

5659 try: 

5660 array_dict[ArrayType.rigid_body_rot_acceleration] = rigid_body_data[ 

5661 :, :, i_var : i_var + 3 

5662 ] 

5663 except Exception: 

5664 trb_msg = traceback.format_exc() 

5665 msg = "A failure in %s was caught:\n%s" 

5666 LOGGER.warning(msg, "_read_states_rigid_body_motion, rot_acceleration", trb_msg) 

5667 finally: 

5668 i_var += 3 

5669 

5670 except Exception: 

5671 trb_msg = traceback.format_exc() 

5672 msg = "A failure in %s was caught:\n%s" 

5673 LOGGER.warning(msg, "_read_states_rigid_body_motion, rigid_body_data", trb_msg) 

5674 

5675 finally: 

5676 var_index += n_rigids * n_rigid_vars 

5677 

5678 LOGGER.debug("_read_states_rigid_body_motion end at var_index %d", var_index) 

5679 

5680 return var_index 

5681 

5682 def _collect_file_infos(self, size_per_state: int) -> List[MemoryInfo]: 

5683 """This routine collects the memory and file info for the d3plot files 

5684 

5685 Parameters 

5686 ---------- 

5687 size_per_state: int 

5688 size of every state to be read 

5689 

5690 Returns 

5691 ------- 

5692 memory_infos: List[MemoryInfo] 

5693 memory infos about the states 

5694 

5695 Notes 

5696 ----- 

5697 State data is expected directly behind geometry data 

5698 Unfortunately data is spread across multiple files. 

5699 One file could contain geometry and state data but states 

5700 may also be littered accross several files. This would 

5701 not be an issue, if dyna would not always write in blocks 

5702 of 512 words of memory, leaving zero byte padding blocks 

5703 at the end of files. These need to be removed and/or taken 

5704 care of. 

5705 """ 

5706 

5707 if not self._buffer: 

5708 return [] 

5709 

5710 base_filepath = self.header.filepath 

5711 

5712 # bugfix 

5713 # If you encounter these int casts more often here this is why: 

5714 # Some ints around here are numpy.int32 which can overflow 

5715 # (sometimes there is a warning ... sometimes not ...) 

5716 # we cast to python ints in order to prevent overflow. 

5717 size_per_state = int(size_per_state) 

5718 

5719 # Info: 

5720 # 

5721 # We need to determine here how many states are in every file 

5722 # without really loading the file itself. For big files this is 

5723 # simply filesize // state_size. 

5724 # For files though with a smaller filesize this may cause issues 

5725 # e.g. 

5726 # filesize 2048 bytes (minimum filesize from dyna) 

5727 # geom_size 200 bytes 

5728 # state_size 200 bytes 

5729 # File contains: 

5730 # -> 1 state * state_size + geom_size = 400 bytes 

5731 # Wrong State Estimation: 

5732 # -> (filesize - geom_size) // state_size = 9 states != 1 state 

5733 # 

5734 # To avoid this wrong number of states when reading small files 

5735 # we need to search the end mark (here nonzero byte) from the rear 

5736 # of the file. 

5737 # This though needs the file to be loaded into memory. To make this 

5738 # very light, we simply memorymap a small fraction of the file starting 

5739 # from the rear until we have our nonzero byte. Since the end mark 

5740 # is usually in the first block loaded, there should not be any performance 

5741 # concerns, even with bigger files. 

5742 

5743 # query for state files 

5744 filepaths = D3plot._find_dyna_result_files(base_filepath) 

5745 

5746 # compute state data in first file 

5747 # search therefore the first non-zero byte from the rear 

5748 last_nonzero_byte_index = self._buffer.size 

5749 mview_inv_arr = np.asarray(self._buffer.memoryview[::-1]) 

5750 # pylint: disable = invalid-name 

5751 BLOCK_SIZE = 2048 

5752 for start in range(0, self._buffer.size, BLOCK_SIZE): 

5753 (nz_indexes,) = np.nonzero(mview_inv_arr[start : start + BLOCK_SIZE]) 

5754 if len(nz_indexes): 

5755 last_nonzero_byte_index = self._buffer.size - (start + nz_indexes[0]) 

5756 break 

5757 n_states_beyond_geom = ( 

5758 last_nonzero_byte_index - self.geometry_section_size 

5759 ) // size_per_state 

5760 

5761 # bugfix: if states are too big we can get a negative estimation 

5762 n_states_beyond_geom = max(0, n_states_beyond_geom) 

5763 

5764 # memory required later 

5765 memory_infos = [ 

5766 MemoryInfo( 

5767 start=self.geometry_section_size, # type: ignore 

5768 length=n_states_beyond_geom * size_per_state, # type: ignore 

5769 filepath=base_filepath, 

5770 n_states=n_states_beyond_geom, # type: ignore 

5771 filesize=self._buffer.size, 

5772 use_mmap=True, 

5773 ) 

5774 ] 

5775 

5776 # compute amount of state data in every further file 

5777 for filepath in filepaths: 

5778 filesize = os.path.getsize(filepath) 

5779 last_nonzero_byte_index = -1 

5780 

5781 n_blocks = filesize // mmap.ALLOCATIONGRANULARITY 

5782 rest_size = filesize % mmap.ALLOCATIONGRANULARITY 

5783 block_length = mmap.ALLOCATIONGRANULARITY 

5784 with open(filepath, "rb") as fp: 

5785 

5786 # search last rest block (page-aligned) 

5787 # page-aligned means the start must be 

5788 # a multiple of mmap.ALLOCATIONGRANULARITY 

5789 # otherwise we get an error on linux 

5790 if rest_size: 

5791 start = n_blocks * block_length 

5792 mview = memoryview( 

5793 mmap.mmap( 

5794 fp.fileno(), offset=start, length=rest_size, access=mmap.ACCESS_READ 

5795 ).read() 

5796 ) 

5797 (nz_indexes,) = np.nonzero(mview[::-1]) 

5798 if len(nz_indexes): 

5799 last_nonzero_byte_index = start + rest_size - nz_indexes[0] 

5800 

5801 # search in blocks from the reair 

5802 if last_nonzero_byte_index == -1: 

5803 for i_block in range(n_blocks - 1, -1, -1): 

5804 start = block_length * i_block 

5805 mview = memoryview( 

5806 mmap.mmap( 

5807 fp.fileno(), 

5808 offset=start, 

5809 length=block_length, 

5810 access=mmap.ACCESS_READ, 

5811 ).read() 

5812 ) 

5813 (nz_indexes,) = np.nonzero(mview[::-1]) 

5814 if len(nz_indexes): 

5815 index = block_length - nz_indexes[0] 

5816 last_nonzero_byte_index = start + index 

5817 break 

5818 

5819 if last_nonzero_byte_index == -1: 

5820 msg = "The file {0} seems to be missing it's endmark." 

5821 raise RuntimeError(msg.format(filepath)) 

5822 

5823 # BUGFIX 

5824 # In d3eigv it could be observed that there is not necessarily an end mark. 

5825 # As a consequence the last byte can indeed be zero. We control this by 

5826 # checking if the last nonzero byte was smaller than the state size which 

5827 # makes no sense. 

5828 if ( 

5829 self.header.filetype == D3plotFiletype.D3EIGV 

5830 and last_nonzero_byte_index < size_per_state <= filesize 

5831 ): 

5832 last_nonzero_byte_index = size_per_state 

5833 

5834 n_states_in_file = last_nonzero_byte_index // size_per_state 

5835 memory_infos.append( 

5836 MemoryInfo( 

5837 start=0, 

5838 length=size_per_state * (n_states_in_file), 

5839 filepath=filepath, 

5840 n_states=n_states_in_file, 

5841 filesize=filesize, 

5842 use_mmap=False, 

5843 ) 

5844 ) 

5845 

5846 return memory_infos 

5847 

5848 @staticmethod 

5849 def _read_file_from_memory_info( 

5850 memory_infos: Union[MemoryInfo, List[MemoryInfo]] 

5851 ) -> Tuple[BinaryBuffer, int]: 

5852 """Read files from a single or multiple memory infos 

5853 

5854 Parameters 

5855 ---------- 

5856 memory_infos: MemoryInfo or List[MemoryInfo] 

5857 memory infos for loading a file (see `D3plot._collect_file_infos`) 

5858 

5859 Returns 

5860 ------- 

5861 bb_states: BinaryBuffer 

5862 New binary buffer with all states perfectly linear in memory 

5863 n_states: int 

5864 Number of states to be expected 

5865 

5866 Notes 

5867 ----- 

5868 This routine in contrast to `D3plot._read_state_bytebuffer` is used 

5869 to load only a fraction of files into memory. 

5870 """ 

5871 

5872 # single file case 

5873 if isinstance(memory_infos, MemoryInfo): 

5874 memory_infos = [memory_infos] 

5875 

5876 # allocate memory 

5877 # bugfix: casting to int prevents int32 overflow for large files 

5878 memory_required = 0 

5879 for mem in memory_infos: 

5880 memory_required += int(mem.length) 

5881 mview = memoryview(bytearray(memory_required)) 

5882 

5883 # transfer memory for other files 

5884 n_states = 0 

5885 total_offset = 0 

5886 for minfo in memory_infos: 

5887 LOGGER.debug("opening: %s", minfo.filepath) 

5888 

5889 with open(minfo.filepath, "br") as fp: 

5890 # NOTE 

5891 # mmap is too slow but maybe there are faster 

5892 # ways to use mmap correctly 

5893 # if minfo.use_mmap: 

5894 

5895 # # memory mapping can only be done page aligned 

5896 # mmap_start = (minfo.start // mmap.ALLOCATIONGRANULARITY) * \ 

5897 # mmap.ALLOCATIONGRANULARITY 

5898 # mview_start = minfo.start - mmap_start 

5899 

5900 # end = minfo.start + minfo.length 

5901 # n_end_pages = (end // mmap.ALLOCATIONGRANULARITY + 

5902 # (end % mmap.ALLOCATIONGRANULARITY != 0)) 

5903 # mmap_length = n_end_pages * mmap.ALLOCATIONGRANULARITY - mmap_start 

5904 # if mmap_start + mmap_length > minfo.filesize: 

5905 # mmap_length = minfo.filesize - mmap_start 

5906 

5907 # with mmap.mmap(fp.fileno(), 

5908 # length=mmap_length, 

5909 # offset=mmap_start, 

5910 # access=mmap.ACCESS_READ) as mp: 

5911 # # mp.seek(mview_start) 

5912 # # mview[total_offset:total_offset + 

5913 # # minfo.length] = mp.read(minfo.length) 

5914 

5915 # mview[total_offset:total_offset + 

5916 # minfo.length] = mp[mview_start:mview_start + minfo.length] 

5917 

5918 # else: 

5919 fp.seek(minfo.start) 

5920 fp.readinto(mview[total_offset : total_offset + minfo.length]) # type: ignore 

5921 

5922 total_offset += minfo.length 

5923 n_states += minfo.n_states 

5924 

5925 # save 

5926 bb_states = BinaryBuffer() 

5927 bb_states.memoryview = mview 

5928 

5929 return bb_states, n_states 

5930 

5931 def _read_state_bytebuffer(self, size_per_state: int): 

5932 """This routine reads the data for state information 

5933 

5934 Parameters 

5935 ---------- 

5936 size_per_state: int 

5937 size of every state to be read 

5938 

5939 Returns 

5940 ------- 

5941 bb_states: BinaryBuffer 

5942 New binary buffer with all states perfectly linear in memory 

5943 n_states: int 

5944 Number of states to be expected 

5945 

5946 Notes 

5947 ----- 

5948 State data is expected directly behind geometry data 

5949 Unfortunately data is spread across multiple files. 

5950 One file could contain geometry and state data but states 

5951 may also be littered accross several files. This would 

5952 not be an issue, if dyna would not always write in blocks 

5953 of 512 words of memory, leaving zero byte padding blocks 

5954 at the end of files. These need to be removed and/or taken 

5955 care of. 

5956 """ 

5957 

5958 if not self._buffer: 

5959 return BinaryBuffer(), 0 

5960 

5961 memory_infos = self._collect_file_infos(size_per_state) 

5962 

5963 # allocate memory 

5964 # bugfix: casting to int prevents int32 overflow for large files 

5965 memory_required = 0 

5966 for mem in memory_infos: 

5967 memory_required += int(mem.length) 

5968 mview = memoryview(bytearray(memory_required)) 

5969 

5970 # transfer memory from first file 

5971 n_states = memory_infos[0].n_states 

5972 start = memory_infos[0].start 

5973 length = memory_infos[0].length 

5974 end = start + length 

5975 mview[:length] = self._buffer.memoryview[start:end] 

5976 

5977 # transfer memory for other files 

5978 total_offset = length 

5979 for minfo in memory_infos[1:]: 

5980 with open(minfo.filepath, "br") as fp: 

5981 fp.seek(minfo.start) 

5982 fp.readinto(mview[total_offset : total_offset + length]) # type: ignore 

5983 

5984 total_offset += length 

5985 n_states += minfo.n_states 

5986 

5987 # save 

5988 bb_states = BinaryBuffer() 

5989 bb_states.memoryview = mview 

5990 return bb_states, n_states 

5991 

5992 @staticmethod 

5993 def _find_dyna_result_files(filepath: str): 

5994 """Searches all dyna result files 

5995 

5996 Parameters 

5997 ---------- 

5998 filepath: str 

5999 path to the first basic d3plot file 

6000 

6001 Returns 

6002 ------- 

6003 filepaths: list of str 

6004 path to all dyna files 

6005 

6006 Notes 

6007 ----- 

6008 The dyna files usually follow a scheme to 

6009 simply have the base name and numbers appended 

6010 e.g. (d3plot, d3plot0001, d3plot0002, etc.) 

6011 """ 

6012 

6013 file_dir = os.path.dirname(filepath) 

6014 file_dir = file_dir if len(file_dir) != 0 else "." 

6015 file_basename = os.path.basename(filepath) 

6016 

6017 pattern = f"({file_basename})[0-9]+$" 

6018 reg = re.compile(pattern) 

6019 

6020 filepaths = [ 

6021 os.path.join(file_dir, path) 

6022 for path in os.listdir(file_dir) 

6023 if os.path.isfile(os.path.join(file_dir, path)) and reg.match(path) 

6024 ] 

6025 

6026 # alphasort files to handle d3plots with more than 100 files 

6027 # e.g. d3plot01, d3plot02, ..., d3plot100 

6028 def convert(text): 

6029 return int(text) if text.isdigit() else text.lower() 

6030 

6031 number_pattern = "([0-9]+)" 

6032 

6033 def alphanum_key(key): 

6034 return [convert(c) for c in re.split(number_pattern, key)] 

6035 

6036 return sorted(filepaths, key=alphanum_key) 

6037 

6038 def _determine_wordsize(self): 

6039 """Determine the precision of the file 

6040 

6041 Returns 

6042 ------- 

6043 wordsize: int 

6044 size of each word in bytes 

6045 """ 

6046 

6047 if not self._buffer: 

6048 return 4, np.int32, np.float32 

6049 

6050 # test file type flag (1=d3plot, 5=d3part, 11=d3eigv) 

6051 

6052 # single precision 

6053 value = self._buffer.read_number(44, np.int32) 

6054 if value > 1000: 

6055 value -= 1000 

6056 if value in (1, 5, 11): 

6057 return 4, np.int32, np.float32 

6058 

6059 # double precision 

6060 value = self._buffer.read_number(88, np.int64) 

6061 if value > 1000: 

6062 value -= 1000 

6063 if value in (1, 5, 11): 

6064 return 8, np.int64, np.float64 

6065 

6066 raise RuntimeError(f"Unknown file type '{value}'.") 

6067 

6068 def plot( 

6069 self, 

6070 i_timestep: int = 0, 

6071 field: Union[np.ndarray, None] = None, 

6072 is_element_field: bool = True, 

6073 fringe_limits: Union[Tuple[float, float], None] = None, 

6074 export_filepath: str = "", 

6075 ): 

6076 """Plot the d3plot geometry 

6077 

6078 Parameters 

6079 ---------- 

6080 i_timestep: int 

6081 timestep index to plot 

6082 field: Union[np.ndarray, None] 

6083 Array containing a field value for every element or node 

6084 is_element_field: bool 

6085 if the specified field is for elements or nodes 

6086 fringe_limits: Union[Tuple[float, float], None] 

6087 limits for the fringe bar. Set by default to min and max. 

6088 export_filepath: str 

6089 filepath to export the html to 

6090 

6091 Notes 

6092 ----- 

6093 Currently only shell elements can be plotted, since for 

6094 solids the surface needs extraction. 

6095 

6096 Examples 

6097 -------- 

6098 Plot deformation of last timestep. 

6099 

6100 >>> d3plot = D3plot("path/to/d3plot") 

6101 >>> d3plot.plot(-1) 

6102 >>> # get eff. plastic strain 

6103 >>> pstrain = d3plot.arrays[ArrayType.element_shell_effective_plastic_strain] 

6104 >>> pstrain.shape 

6105 (1, 4696, 3) 

6106 >>> # mean across all 3 integration points 

6107 >>> pstrain = pstrain.mean(axis=2) 

6108 >>> pstrain.shape 

6109 (1, 4696) 

6110 >>> # we only have 1 timestep here but let's take last one in general 

6111 >>> last_timestep = -1 

6112 >>> d3plot.plot(0, field=pstrain[last_timestep]) 

6113 >>> # we don't like the fringe, let's adjust 

6114 >>> d3plot.plot(0, field=pstrain[last_timestep], fringe_limits=(0, 0.3)) 

6115 """ 

6116 

6117 assert i_timestep < self._state_info.n_timesteps 

6118 assert ArrayType.node_displacement in self.arrays 

6119 if fringe_limits: 

6120 assert len(fringe_limits) == 2 

6121 

6122 # shell nodes 

6123 shell_node_indexes = self.arrays[ArrayType.element_shell_node_indexes] 

6124 

6125 # get node displacement 

6126 node_xyz = self.arrays[ArrayType.node_displacement][i_timestep, :, :] 

6127 

6128 # check for correct field size 

6129 if isinstance(field, np.ndarray): 

6130 assert field.ndim == 1 

6131 if is_element_field and len(shell_node_indexes) != len(field): # type: ignore 

6132 msg = "Element indexes and field have different len: {} != {}" 

6133 raise ValueError(msg.format(shell_node_indexes.shape, field.shape)) 

6134 if not is_element_field and len(node_xyz) != len(field): # type: ignore 

6135 msg = "Node field and coords have different len: {} != {}" 

6136 raise ValueError(msg.format(node_xyz.shape, field.shape)) 

6137 

6138 # create plot 

6139 _html = plot_shell_mesh( 

6140 node_coordinates=node_xyz, 

6141 shell_node_indexes=shell_node_indexes, 

6142 field=field, 

6143 is_element_field=is_element_field, 

6144 fringe_limits=fringe_limits, 

6145 ) 

6146 

6147 # store in a temporary file 

6148 tempdir = tempfile.gettempdir() 

6149 tempdir = os.path.join(tempdir, "lasso") 

6150 if not os.path.isdir(tempdir): 

6151 os.mkdir(tempdir) 

6152 

6153 for tmpfile in os.listdir(tempdir): 

6154 tmpfile = os.path.join(tempdir, tmpfile) 

6155 if os.path.isfile(tmpfile): 

6156 os.remove(tmpfile) 

6157 

6158 if export_filepath: 

6159 with open(export_filepath, "w", encoding="utf-8") as fp: 

6160 fp.write(_html) 

6161 else: 

6162 # create new temp file 

6163 with tempfile.NamedTemporaryFile( 

6164 dir=tempdir, suffix=".html", mode="w", delete=False 

6165 ) as fp: 

6166 fp.write(_html) 

6167 webbrowser.open(fp.name) 

6168 

6169 def write_d3plot( 

6170 self, filepath: Union[str, BinaryIO], block_size_bytes: int = 2048, single_file: bool = True 

6171 ): 

6172 """Write a d3plot file again 

6173 

6174 Parameters 

6175 ---------- 

6176 filepath: Union[str, BinaryIO] 

6177 filepath of the new d3plot file or an opened file handle 

6178 block_size_bytes: int 

6179 D3plots are originally written in byte-blocks causing zero-padding at the end of 

6180 files. This can be controlled by this parameter. Set to 0 for no padding. 

6181 single_file: bool 

6182 whether to write all states into a single file 

6183 

6184 Examples 

6185 -------- 

6186 Modify an existing d3plot: 

6187 

6188 >>> d3plot = D3plot("path/to/d3plot") 

6189 >>> hvars = d3plot.array[ArrayType.element_shell_history_vars] 

6190 >>> hvars.shape 

6191 (1, 4696, 3, 19) 

6192 >>> new_history_var = np.random.random((1, 4696, 3, 1)) 

6193 >>> new_hvars = np.concatenate([hvars, new_history_var], axis=3) 

6194 >>> d3plot.array[ArrayType.element_shell_history_vars] = new_hvars 

6195 >>> d3plot.write_d3plot("path/to/new/d3plot") 

6196 

6197 Write a new d3plot from scratch: 

6198 

6199 >>> d3plot = D3plot() 

6200 >>> d3plot.arrays[ArrayType.node_coordinates] = np.array([[0, 0, 0], 

6201 ... [1, 0, 0], 

6202 ... [0, 1, 0]]) 

6203 >>> d3plot.arrays[ArrayType.element_shell_node_indexes] = np.array([[0, 2, 1, 1]]) 

6204 >>> d3plot.arrays[ArrayType.element_shell_part_indexes] = np.array([0]) 

6205 >>> d3plot.arrays[ArrayType.node_displacement] = np.array([[[0, 0, 0], 

6206 ... [1, 0, 0], 

6207 ... [0, 1, 0]]]) 

6208 >>> d3plot.write_d3plot("yay.d3plot") 

6209 """ 

6210 

6211 # if there is a single buffer, write all in 

6212 if not isinstance(filepath, str): 

6213 single_file = True 

6214 

6215 # determine write settings 

6216 write_settings = D3plotWriterSettings(self, block_size_bytes, single_file) 

6217 write_settings.build_header() 

6218 

6219 # remove old files 

6220 if isinstance(filepath, str): 

6221 filepaths = D3plot._find_dyna_result_files(filepath) 

6222 for path in filepaths: 

6223 if os.path.isfile(path): 

6224 os.remove(path) 

6225 

6226 # write geometry file 

6227 with open_file_or_filepath(filepath, "wb") as fp: 

6228 

6229 n_bytes_written = 0 

6230 msg = "wrote {0} after {1}." 

6231 

6232 # header 

6233 n_bytes_written += self._write_header(fp, write_settings) 

6234 LOGGER.debug(msg, n_bytes_written, "_write_header") 

6235 

6236 # material section 

6237 n_bytes_written += self._write_geom_material_section(fp, write_settings) 

6238 LOGGER.debug(msg, n_bytes_written, "_write_geom_material_section") 

6239 

6240 # fluid material data 

6241 n_bytes_written += self._write_geom_fluid_material_header(fp, write_settings) 

6242 LOGGER.debug(msg, n_bytes_written, "_write_geom_fluid_material_header") 

6243 

6244 # SPH element data flags 

6245 n_bytes_written += self._write_geom_sph_element_data_flags(fp, write_settings) 

6246 LOGGER.debug(msg, n_bytes_written, "_write_geom_sph_element_data_flags") 

6247 

6248 # Particle Data 

6249 n_bytes_written += self._write_geom_particle_flags(fp, write_settings) 

6250 LOGGER.debug(msg, n_bytes_written, "_write_geom_particle_flags") 

6251 

6252 # Geometry Data 

6253 n_bytes_written += self._write_geometry(fp, write_settings) 

6254 LOGGER.debug(msg, n_bytes_written, "_write_geometry") 

6255 

6256 # User Material, Node, Blabla IDs 

6257 n_bytes_written += self._write_geom_user_ids(fp, write_settings) 

6258 LOGGER.debug(msg, n_bytes_written, "_write_geom_user_ids") 

6259 

6260 # Rigid Body Description 

6261 n_bytes_written += self._write_geom_rigid_body_description(fp, write_settings) 

6262 LOGGER.debug(msg, n_bytes_written, "_write_geom_rigid_body_description") 

6263 

6264 # Adapted Element Parent List 

6265 # not supported 

6266 

6267 # Smooth Particle Hydrodynamcis Node and Material list 

6268 n_bytes_written += self._write_geom_sph_node_and_materials(fp, write_settings) 

6269 LOGGER.debug(msg, n_bytes_written, "_write_geom_sph_node_and_materials") 

6270 

6271 # Particle Geometry Data 

6272 n_bytes_written += self._write_geom_particle_geometry_data(fp, write_settings) 

6273 LOGGER.debug(msg, n_bytes_written, "_write_geom_particle_geometry_data") 

6274 

6275 # Rigid Road Surface Data 

6276 n_bytes_written += self._write_geom_rigid_road_surface(fp, write_settings) 

6277 LOGGER.debug(msg, n_bytes_written, "_write_geom_rigid_road_surface") 

6278 

6279 # Connectivity for weirdo elements 

6280 # 10 Node Tetra 

6281 # 8 Node Shell 

6282 # 20 Node Solid 

6283 # 27 Node Solid 

6284 n_bytes_written += self._write_geom_extra_node_data(fp, write_settings) 

6285 LOGGER.debug(msg, n_bytes_written, "_write_geom_extra_node_data") 

6286 

6287 # end mark 

6288 n_bytes_written += fp.write(write_settings.pack(-999999.0)) 

6289 LOGGER.debug(msg, n_bytes_written, "_end_mark") 

6290 

6291 # Header Part & Contact Interface Titles 

6292 n_bytes_written_before_titles = n_bytes_written 

6293 n_bytes_written += self._write_header_part_contact_interface_titles(fp, write_settings) 

6294 LOGGER.debug(msg, n_bytes_written, "_write_header_part_contact_interface_titles") 

6295 

6296 if n_bytes_written_before_titles != n_bytes_written: 

6297 

6298 # we seal the file here with an endmark 

6299 n_bytes_written += fp.write(write_settings.pack(-999999.0)) 

6300 LOGGER.debug(msg, n_bytes_written, "_end_mark") 

6301 else: 

6302 pass 

6303 # we already set an end-mark before 

6304 # that is perfectly fine 

6305 

6306 # correct zero padding at the end 

6307 if block_size_bytes > 0: 

6308 zero_bytes = self._get_zero_byte_padding(n_bytes_written, block_size_bytes) 

6309 n_bytes_written += fp.write(zero_bytes) 

6310 LOGGER.debug(msg, n_bytes_written, "_zero_byte_padding") 

6311 

6312 msg = "Wrote {0} bytes to geometry file." 

6313 LOGGER.debug(msg, n_bytes_written) 

6314 

6315 # Extra Data Types (for multi solver output) 

6316 # not supported 

6317 

6318 # write states 

6319 self._write_states(filepath, write_settings) 

6320 

6321 def _write_header(self, fp: typing.IO[Any], settings: D3plotWriterSettings) -> int: 

6322 

6323 wordsize = settings.wordsize 

6324 

6325 header_words = { 

6326 "title": (0 * wordsize, 10 * wordsize), 

6327 "runtime": (10 * wordsize, wordsize), 

6328 "filetype": (11 * wordsize, wordsize), 

6329 "source_version": (12 * wordsize, wordsize), 

6330 "release_version": (13 * wordsize, wordsize), 

6331 "version": (14 * wordsize, wordsize), 

6332 "ndim": (15 * wordsize, wordsize), 

6333 "numnp": (16 * wordsize, wordsize), 

6334 "icode": (17 * wordsize, wordsize), 

6335 "nglbv": (18 * wordsize, wordsize), 

6336 "it": (19 * wordsize, wordsize), 

6337 "iu": (20 * wordsize, wordsize), 

6338 "iv": (21 * wordsize, wordsize), 

6339 "ia": (22 * wordsize, wordsize), 

6340 "nel8": (23 * wordsize, wordsize), 

6341 "nummat8": (24 * wordsize, wordsize), 

6342 "numds": (25 * wordsize, wordsize), 

6343 "numst": (26 * wordsize, wordsize), 

6344 "nv3d": (27 * wordsize, wordsize), 

6345 "nel2": (28 * wordsize, wordsize), 

6346 "nummat2": (29 * wordsize, wordsize), 

6347 "nv1d": (30 * wordsize, wordsize), 

6348 "nel4": (31 * wordsize, wordsize), 

6349 "nummat4": (32 * wordsize, wordsize), 

6350 "nv2d": (33 * wordsize, wordsize), 

6351 "neiph": (34 * wordsize, wordsize), 

6352 "neips": (35 * wordsize, wordsize), 

6353 "maxint": (36 * wordsize, wordsize), 

6354 "nmsph": (37 * wordsize, wordsize), 

6355 "ngpsph": (38 * wordsize, wordsize), 

6356 "narbs": (39 * wordsize, wordsize), 

6357 "nelth": (40 * wordsize, wordsize), 

6358 "nummatt": (41 * wordsize, wordsize), 

6359 "nv3dt": (42 * wordsize, wordsize), 

6360 "ioshl1": (43 * wordsize, wordsize), 

6361 "ioshl2": (44 * wordsize, wordsize), 

6362 "ioshl3": (45 * wordsize, wordsize), 

6363 "ioshl4": (46 * wordsize, wordsize), 

6364 "ialemat": (47 * wordsize, wordsize), 

6365 "ncfdv1": (48 * wordsize, wordsize), 

6366 "ncfdv2": (49 * wordsize, wordsize), 

6367 # "nadapt": (50*wordsize, wordsize), 

6368 "nmmat": (51 * wordsize, wordsize), 

6369 "numfluid": (52 * wordsize, wordsize), 

6370 "inn": (53 * wordsize, wordsize), 

6371 "npefg": (54 * wordsize, wordsize), 

6372 "nel48": (55 * wordsize, wordsize), 

6373 "idtdt": (56 * wordsize, wordsize), 

6374 "extra": (57 * wordsize, wordsize), 

6375 } 

6376 

6377 header_extra_words = { 

6378 "nel20": (64 * wordsize, wordsize), 

6379 "nt3d": (65 * wordsize, wordsize), 

6380 "nel27": (66 * wordsize, wordsize), 

6381 "neipb": (67 * wordsize, wordsize), 

6382 } 

6383 

6384 new_header = settings.header 

6385 

6386 barray = bytearray((64 + new_header["extra"]) * wordsize) 

6387 

6388 for name, (position, size) in header_words.items(): 

6389 barray[position : position + size] = settings.pack(new_header[name], size) 

6390 

6391 if new_header["extra"] > 0: 

6392 for name, (position, size) in header_extra_words.items(): 

6393 barray[position : position + size] = settings.pack(new_header[name], size) 

6394 

6395 n_bytes_written = fp.write(barray) 

6396 

6397 # check 

6398 n_bytes_expected = (64 + new_header["extra"]) * settings.wordsize 

6399 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

6400 

6401 return n_bytes_written 

6402 

6403 def _write_geom_material_section( 

6404 self, fp: typing.IO[Any], settings: D3plotWriterSettings 

6405 ) -> int: 

6406 

6407 if settings.mattyp <= 0: 

6408 return 0 

6409 

6410 _check_ndim(self, {ArrayType.part_material_type: ["n_parts"]}) 

6411 

6412 part_material_type_original = self.arrays[ArrayType.part_material_type] 

6413 # part_material_type = np.full(settings.header["nmmat"], -1, 

6414 # dtype=settings.itype) 

6415 

6416 # if ArrayType.element_solid_part_indexes in self.arrays: 

6417 # unique_part_indexes = settings.unique_solid_part_indexes 

6418 # part_material_type[unique_part_indexes] = \ 

6419 # part_material_type_original[unique_part_indexes] 

6420 # if ArrayType.element_beam_part_indexes in self.arrays: 

6421 # unique_part_indexes = settings.unique_beam_part_indexes 

6422 # part_material_type[unique_part_indexes] = \ 

6423 # part_material_type_original[unique_part_indexes] 

6424 # if ArrayType.element_shell_part_indexes in self.arrays: 

6425 # unique_part_indexes = settings.unique_shell_part_indexes 

6426 # part_material_type[unique_part_indexes] = \ 

6427 # part_material_type_original[unique_part_indexes] 

6428 # if ArrayType.element_tshell_part_indexes in self.arrays: 

6429 # unique_part_indexes = settings.unique_tshell_part_indexes 

6430 # part_material_type[unique_part_indexes] = \ 

6431 # part_material_type_original[unique_part_indexes] 

6432 

6433 numrbe = settings.n_rigid_shells 

6434 

6435 n_bytes_written = 0 

6436 n_bytes_written += fp.write(settings.pack(numrbe)) 

6437 n_bytes_written += fp.write(settings.pack(len(part_material_type_original))) 

6438 n_bytes_written += fp.write(settings.pack(part_material_type_original)) 

6439 

6440 # check 

6441 n_bytes_expected = (len(part_material_type_original) + 2) * settings.wordsize 

6442 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

6443 

6444 return n_bytes_written 

6445 

6446 def _write_geom_fluid_material_header( 

6447 self, fp: typing.IO[Any], settings: D3plotWriterSettings 

6448 ) -> int: 

6449 

6450 if settings.header["ialemat"] == 0: 

6451 return 0 

6452 

6453 _check_ndim(self, {ArrayType.ale_material_ids: ["n_ale_parts"]}) 

6454 

6455 array = self.arrays[ArrayType.ale_material_ids] 

6456 n_bytes_written = fp.write(settings.pack(array, dtype_hint=np.integer)) 

6457 

6458 # check 

6459 n_bytes_expected = settings.header["ialemat"] * settings.wordsize 

6460 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

6461 

6462 return n_bytes_written 

6463 

6464 def _write_geom_sph_element_data_flags( 

6465 self, fp: typing.IO[Any], settings: D3plotWriterSettings 

6466 ) -> int: 

6467 

6468 if settings.header["nmsph"] <= 0: 

6469 return 0 

6470 

6471 n_sph_var_count = 0 

6472 

6473 # radius 

6474 n_sph_radius_vars = 1 if ArrayType.sph_radius in self.arrays else 0 

6475 n_sph_var_count += n_sph_radius_vars 

6476 

6477 # pressure 

6478 n_sph_pressure_vars = 1 if ArrayType.sph_pressure in self.arrays else 0 

6479 n_sph_var_count += n_sph_pressure_vars 

6480 

6481 # stress 

6482 n_sph_stress_vars = 6 if ArrayType.sph_stress in self.arrays else 0 

6483 n_sph_var_count += n_sph_stress_vars 

6484 

6485 # eff pstrain 

6486 n_sph_eff_pstrain_vars = 1 if ArrayType.sph_effective_plastic_strain in self.arrays else 0 

6487 n_sph_var_count += n_sph_eff_pstrain_vars 

6488 

6489 # density 

6490 n_sph_density_vars = 1 if ArrayType.sph_density in self.arrays else 0 

6491 n_sph_var_count += n_sph_density_vars 

6492 

6493 # internal energy 

6494 n_sph_internal_energy_vars = 1 if ArrayType.sph_internal_energy in self.arrays else 0 

6495 n_sph_var_count += n_sph_internal_energy_vars 

6496 

6497 # n neighbors 

6498 n_sph_n_neighbors_vars = 1 if ArrayType.sph_n_neighbors in self.arrays else 0 

6499 n_sph_var_count += n_sph_n_neighbors_vars 

6500 

6501 # strains 

6502 n_sph_strain_vars = 6 if ArrayType.sph_strain in self.arrays else 0 

6503 n_sph_var_count += n_sph_strain_vars 

6504 

6505 # mass 

6506 n_sph_mass_vars = 1 if ArrayType.sph_mass in self.arrays else 0 

6507 n_sph_var_count += n_sph_mass_vars 

6508 

6509 # history vars 

6510 n_sph_history_vars = 0 

6511 if ArrayType.sph_history_vars in self.arrays: 

6512 n_sph_history_vars, _ = settings.count_array_state_var( 

6513 ArrayType.sph_history_vars, 

6514 ["n_timesteps", "n_sph_particles", "n_sph_history_vars"], 

6515 False, 

6516 ) 

6517 n_sph_var_count += n_sph_history_vars 

6518 

6519 # write 

6520 n_bytes_written = 0 

6521 n_bytes_written += fp.write(settings.pack(n_sph_var_count)) 

6522 n_bytes_written += fp.write(settings.pack(n_sph_radius_vars)) 

6523 n_bytes_written += fp.write(settings.pack(n_sph_pressure_vars)) 

6524 n_bytes_written += fp.write(settings.pack(n_sph_stress_vars)) 

6525 n_bytes_written += fp.write(settings.pack(n_sph_eff_pstrain_vars)) 

6526 n_bytes_written += fp.write(settings.pack(n_sph_density_vars)) 

6527 n_bytes_written += fp.write(settings.pack(n_sph_internal_energy_vars)) 

6528 n_bytes_written += fp.write(settings.pack(n_sph_n_neighbors_vars)) 

6529 n_bytes_written += fp.write(settings.pack(n_sph_strain_vars)) 

6530 n_bytes_written += fp.write(settings.pack(n_sph_mass_vars)) 

6531 n_bytes_written += fp.write(settings.pack(n_sph_history_vars)) 

6532 

6533 # check 

6534 n_bytes_expected = 11 * settings.wordsize 

6535 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

6536 

6537 return n_bytes_written 

6538 

6539 def _write_geom_particle_flags(self, fp: typing.IO[Any], settings: D3plotWriterSettings) -> int: 

6540 

6541 npefg = settings.header["npefg"] 

6542 

6543 if npefg <= 0 or npefg > 10000000: 

6544 return 0 

6545 

6546 _check_ndim( 

6547 self, 

6548 { 

6549 ArrayType.airbags_n_particles: ["n_airbags"], 

6550 ArrayType.airbags_n_chambers: ["n_airbags"], 

6551 }, 

6552 ) 

6553 

6554 # n_airbags = npefg % 1000 

6555 subver = npefg // 1000 

6556 

6557 # airbag geometry var count 

6558 ngeom = 5 if ArrayType.airbags_n_chambers in self.arrays else 4 

6559 

6560 # state variable count 

6561 # see later 

6562 nvar = 14 

6563 

6564 # n particles 

6565 n_particles = 0 

6566 if ArrayType.airbags_n_particles in self.arrays: 

6567 n_particles = np.sum(self.arrays[ArrayType.airbags_n_particles]) 

6568 

6569 # airbag state var count 

6570 nstgeom = 2 

6571 

6572 # write 

6573 n_bytes_written = 0 

6574 n_bytes_written += fp.write(settings.pack(ngeom)) 

6575 n_bytes_written += fp.write(settings.pack(nvar)) 

6576 n_bytes_written += fp.write(settings.pack(n_particles)) 

6577 n_bytes_written += fp.write(settings.pack(nstgeom)) 

6578 if subver == 4: 

6579 # This was never validated 

6580 n_bytes_written += fp.write( 

6581 settings.pack(self.arrays[ArrayType.airbags_n_chambers].sum()) 

6582 ) 

6583 

6584 # check 

6585 n_bytes_expected = (5 if subver == 4 else 4) * settings.wordsize 

6586 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

6587 

6588 # typecode for variables 

6589 # pylint: disable = invalid-name 

6590 INT_TC = 1 

6591 # pylint: disable = invalid-name 

6592 FLOAT_TC = 2 

6593 nlist_names_typecodes = [ 

6594 # airbag geometry data (ngeom) 

6595 ["Start N ", INT_TC], 

6596 ["Npart ", INT_TC], 

6597 ["Bag ID ", INT_TC], 

6598 ["NGasC ", INT_TC], 

6599 ["NCham ", INT_TC], 

6600 # state particle data (nvar) 

6601 ["GasC ID ", INT_TC], 

6602 ["Cham ID ", INT_TC], 

6603 ["Leakage ", INT_TC], 

6604 ["Pos x ", FLOAT_TC], 

6605 ["Pos y ", FLOAT_TC], 

6606 ["Pos z ", FLOAT_TC], 

6607 ["Vel x ", FLOAT_TC], 

6608 ["Vel y ", FLOAT_TC], 

6609 ["Vel z ", FLOAT_TC], 

6610 ["Mass ", FLOAT_TC], 

6611 ["Radius ", FLOAT_TC], 

6612 ["Spin En ", FLOAT_TC], 

6613 ["Tran En ", FLOAT_TC], 

6614 ["NS dist ", FLOAT_TC], 

6615 # airbag state vars (nstgeom) 

6616 ["Act Gas ", INT_TC], 

6617 ["Bag Vol ", FLOAT_TC], 

6618 ] 

6619 

6620 # airbag var typecodes 

6621 for _, typecode in nlist_names_typecodes: 

6622 n_bytes_written += fp.write(settings.pack(typecode)) 

6623 

6624 # airbag var names 

6625 # every word is an ascii char. So, we need to set 

6626 # only the first byte to the ascii char code 

6627 fmt_string = "{0:" + str(settings.wordsize) + "}" 

6628 for name, _ in nlist_names_typecodes: 

6629 name_formatted = fmt_string.format(name).encode("ascii") 

6630 for ch in name_formatted: 

6631 barray = bytearray(settings.wordsize) 

6632 barray[0] = ch 

6633 

6634 n_bytes_written += fp.write(settings.pack(barray, settings.wordsize)) 

6635 

6636 # check 

6637 n_bytes_expected += len(nlist_names_typecodes) * 9 * settings.wordsize 

6638 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

6639 

6640 return n_bytes_written 

6641 

6642 def _write_geometry(self, fp: typing.IO[Any], settings: D3plotWriterSettings) -> int: 

6643 

6644 n_bytes_written = 0 

6645 

6646 # pre-checks 

6647 _check_ndim( 

6648 self, 

6649 { 

6650 ArrayType.node_coordinates: ["n_nodes", "x_y_z"], 

6651 ArrayType.element_solid_node_indexes: ["n_solids", "n_element_nodes"], 

6652 ArrayType.element_solid_part_indexes: ["n_solids"], 

6653 ArrayType.element_solid_extra_nodes: ["n_solids", "n_extra_nodes"], 

6654 ArrayType.element_tshell_node_indexes: ["n_tshells", "n_element_nodes"], 

6655 ArrayType.element_tshell_part_indexes: ["n_tshells"], 

6656 ArrayType.element_beam_node_indexes: ["n_beams", "n_element_nodes"], 

6657 ArrayType.element_beam_part_indexes: ["n_beams"], 

6658 ArrayType.element_shell_node_indexes: ["n_shells", "n_element_nodes"], 

6659 ArrayType.element_shell_part_indexes: ["n_shells"], 

6660 }, 

6661 ) 

6662 self.check_array_dims({ArrayType.node_coordinates: 1}, "x_y_z", 3) 

6663 

6664 array_dims = { 

6665 ArrayType.element_solid_node_indexes: 0, 

6666 ArrayType.element_solid_part_indexes: 0, 

6667 ArrayType.element_solid_extra_nodes: 0, 

6668 } 

6669 n_solids = self.check_array_dims(array_dims, "n_solids") 

6670 self.check_array_dims({ArrayType.element_solid_node_indexes: 1}, "n_element_nodes", 8) 

6671 self.check_array_dims({ArrayType.element_solid_extra_nodes: 1}, "n_extra_nodes", 2) 

6672 array_dims = { 

6673 ArrayType.element_tshell_node_indexes: 0, 

6674 ArrayType.element_tshell_part_indexes: 0, 

6675 } 

6676 self.check_array_dims(array_dims, "n_tshells") 

6677 self.check_array_dims({ArrayType.element_tshell_node_indexes: 1}, "n_element_nodes", 8) 

6678 array_dims = { 

6679 ArrayType.element_beam_node_indexes: 0, 

6680 ArrayType.element_beam_part_indexes: 0, 

6681 } 

6682 self.check_array_dims(array_dims, "n_beams") 

6683 self.check_array_dims({ArrayType.element_beam_node_indexes: 1}, "n_element_nodes", 5) 

6684 array_dims = { 

6685 ArrayType.element_shell_node_indexes: 0, 

6686 ArrayType.element_shell_part_indexes: 0, 

6687 } 

6688 self.check_array_dims(array_dims, "n_shells") 

6689 self.check_array_dims({ArrayType.element_shell_node_indexes: 1}, "n_element_nodes", 4) 

6690 

6691 # NODES 

6692 node_coordinates = ( 

6693 self.arrays[ArrayType.node_coordinates] 

6694 if ArrayType.node_coordinates in self.arrays 

6695 else np.zeros((0, settings.header["ndim"]), dtype=self.header.ftype) 

6696 ) 

6697 n_bytes_written += fp.write(settings.pack(node_coordinates, dtype_hint=np.floating)) 

6698 

6699 # SOLIDS 

6700 solid_node_indexes = ( 

6701 self.arrays[ArrayType.element_solid_node_indexes] + FORTRAN_OFFSET 

6702 if ArrayType.element_solid_node_indexes in self.arrays 

6703 else np.zeros((0, 8), dtype=self._header.itype) 

6704 ) 

6705 solid_part_indexes = ( 

6706 self.arrays[ArrayType.element_solid_part_indexes] + FORTRAN_OFFSET 

6707 if ArrayType.element_solid_part_indexes in self.arrays 

6708 else np.zeros(0, dtype=self._header.itype) 

6709 ) 

6710 solid_geom_array = np.concatenate( 

6711 (solid_node_indexes, solid_part_indexes.reshape(n_solids, 1)), axis=1 

6712 ) 

6713 n_bytes_written += fp.write(settings.pack(solid_geom_array, dtype_hint=np.integer)) 

6714 

6715 # SOLID 10 

6716 # the two extra nodes 

6717 if ArrayType.element_solid_extra_nodes in self.arrays: 

6718 array = self.arrays[ArrayType.element_solid_extra_nodes] + FORTRAN_OFFSET 

6719 n_bytes_written += fp.write(settings.pack(array, dtype_hint=np.integer)) 

6720 

6721 # THICK SHELLS 

6722 tshell_node_indexes = ( 

6723 self.arrays[ArrayType.element_tshell_node_indexes] + FORTRAN_OFFSET 

6724 if ArrayType.element_tshell_node_indexes in self.arrays 

6725 else np.zeros((0, 8), dtype=self._header.itype) 

6726 ) 

6727 tshell_part_indexes = ( 

6728 self.arrays[ArrayType.element_tshell_part_indexes] + FORTRAN_OFFSET 

6729 if ArrayType.element_tshell_part_indexes in self.arrays 

6730 else np.zeros(0, dtype=self._header.itype) 

6731 ) 

6732 tshell_geom_array = np.concatenate( 

6733 (tshell_node_indexes, tshell_part_indexes.reshape(-1, 1)), axis=1 

6734 ) 

6735 n_bytes_written += fp.write(settings.pack(tshell_geom_array, dtype_hint=np.integer)) 

6736 

6737 # BEAMS 

6738 beam_node_indexes = ( 

6739 self.arrays[ArrayType.element_beam_node_indexes] + FORTRAN_OFFSET 

6740 if ArrayType.element_beam_node_indexes in self.arrays 

6741 else np.zeros((0, 5), dtype=self._header.itype) 

6742 ) 

6743 beam_part_indexes = ( 

6744 self.arrays[ArrayType.element_beam_part_indexes] + FORTRAN_OFFSET 

6745 if ArrayType.element_beam_part_indexes in self.arrays 

6746 else np.zeros(0, dtype=self._header.itype) 

6747 ) 

6748 beam_geom_array = np.concatenate( 

6749 (beam_node_indexes, beam_part_indexes.reshape(-1, 1)), axis=1 

6750 ) 

6751 n_bytes_written += fp.write(settings.pack(beam_geom_array, dtype_hint=np.integer)) 

6752 

6753 # SHELLS 

6754 shell_node_indexes = ( 

6755 self.arrays[ArrayType.element_shell_node_indexes] + FORTRAN_OFFSET 

6756 if ArrayType.element_shell_node_indexes in self.arrays 

6757 else np.zeros((0, 4), dtype=self._header.itype) 

6758 ) 

6759 shell_part_indexes = ( 

6760 self.arrays[ArrayType.element_shell_part_indexes] + FORTRAN_OFFSET 

6761 if ArrayType.element_shell_part_indexes in self.arrays 

6762 else np.zeros(0, dtype=self._header.itype) 

6763 ) 

6764 shell_geom_array = np.concatenate( 

6765 (shell_node_indexes, shell_part_indexes.reshape(-1, 1)), axis=1 

6766 ) 

6767 n_bytes_written += fp.write(settings.pack(shell_geom_array, dtype_hint=np.integer)) 

6768 

6769 # check 

6770 n_bytes_expected = ( 

6771 settings.header["numnp"] * 3 

6772 + abs(settings.header["nel8"]) * 9 

6773 + settings.header["nelth"] * 9 

6774 + settings.header["nel2"] * 6 

6775 + settings.header["nel4"] * 5 

6776 ) * settings.wordsize 

6777 if ArrayType.element_solid_extra_nodes in self.arrays: 

6778 n_bytes_expected += 2 * abs(settings.header["nel8"]) 

6779 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

6780 

6781 # return the chunks 

6782 return n_bytes_written 

6783 

6784 def _write_geom_user_ids(self, fp: typing.IO[Any], settings: D3plotWriterSettings) -> int: 

6785 

6786 narbs = settings.header["narbs"] 

6787 if narbs == 0: 

6788 return 0 

6789 

6790 info = self._numbering_info 

6791 

6792 _check_ndim( 

6793 self, 

6794 { 

6795 ArrayType.node_ids: ["n_nodes"], 

6796 ArrayType.element_solid_ids: ["n_solids"], 

6797 ArrayType.element_beam_ids: ["n_beams"], 

6798 ArrayType.element_shell_ids: ["n_shells"], 

6799 ArrayType.element_tshell_ids: ["n_tshells"], 

6800 ArrayType.part_ids: ["n_parts"], 

6801 ArrayType.part_ids_unordered: ["n_parts"], 

6802 ArrayType.part_ids_cross_references: ["n_parts"], 

6803 }, 

6804 ) 

6805 

6806 n_bytes_written = 0 

6807 

6808 # NUMBERING HEADER 

6809 

6810 # nsort seems to be solver internal pointer 

6811 # ... hopefully 

6812 nsort = info.ptr_node_ids 

6813 nsort *= -1 if ArrayType.part_ids in self.arrays else 1 

6814 

6815 n_bytes_written += fp.write(settings.pack(nsort)) 

6816 

6817 nsrh = abs(nsort) + settings.header["numnp"] 

6818 n_bytes_written += fp.write(settings.pack(nsrh)) 

6819 

6820 nsrb = nsrh + abs(settings.header["nel8"]) 

6821 n_bytes_written += fp.write(settings.pack(nsrb)) 

6822 

6823 nsrs = nsrb + settings.header["nel2"] 

6824 n_bytes_written += fp.write(settings.pack(nsrs)) 

6825 

6826 nsrt = nsrs + settings.header["nel4"] 

6827 n_bytes_written += fp.write(settings.pack(nsrt)) 

6828 

6829 nsortd = settings.header["numnp"] 

6830 n_bytes_written += fp.write(settings.pack(nsortd)) 

6831 

6832 nsrhd = abs(settings.header["nel8"]) 

6833 n_bytes_written += fp.write(settings.pack(nsrhd)) 

6834 

6835 nsrbd = settings.header["nel2"] 

6836 n_bytes_written += fp.write(settings.pack(nsrbd)) 

6837 

6838 nsrsd = settings.header["nel4"] 

6839 n_bytes_written += fp.write(settings.pack(nsrsd)) 

6840 

6841 nsrtd = settings.header["nelth"] 

6842 n_bytes_written += fp.write(settings.pack(nsrtd)) 

6843 

6844 if ArrayType.part_ids in self.arrays: 

6845 # some lsdyna material pointer 

6846 nsrma = info.ptr_material_ids 

6847 n_bytes_written += fp.write(settings.pack(nsrma)) 

6848 

6849 # some lsdyna material pointer 

6850 nsrmu = info.ptr_material_ids_defined_order 

6851 n_bytes_written += fp.write(settings.pack(nsrmu)) 

6852 

6853 # some lsdyna material pointer 

6854 nsrmp = info.ptr_material_ids_crossref 

6855 n_bytes_written += fp.write(settings.pack(nsrmp)) 

6856 

6857 # "Total number of materials (parts)" 

6858 nsrtm = settings.header["nmmat"] 

6859 n_bytes_written += fp.write(settings.pack(nsrtm)) 

6860 

6861 # Total number of nodal rigid body constraint sets 

6862 numrbs = settings.header["numrbs"] 

6863 n_bytes_written += fp.write(settings.pack(numrbs)) 

6864 

6865 # Total number of materials 

6866 # ... coz it's fun doing nice things twice 

6867 nmmat = settings.header["nmmat"] 

6868 n_bytes_written += fp.write(settings.pack(nmmat)) 

6869 

6870 # NODE IDS 

6871 node_ids = ( 

6872 self.arrays[ArrayType.node_ids] 

6873 if ArrayType.node_ids in self.arrays 

6874 else np.arange( 

6875 FORTRAN_OFFSET, settings.header["numnp"] + FORTRAN_OFFSET, dtype=settings.itype 

6876 ) 

6877 ) 

6878 n_bytes_written += fp.write(settings.pack(node_ids, dtype_hint=np.integer)) 

6879 

6880 # SOLID IDS 

6881 solid_ids = ( 

6882 self.arrays[ArrayType.element_solid_ids] 

6883 if ArrayType.element_solid_ids in self.arrays 

6884 else np.arange( 

6885 FORTRAN_OFFSET, settings.header["nel8"] + FORTRAN_OFFSET, dtype=settings.itype 

6886 ) 

6887 ) 

6888 n_bytes_written += fp.write(settings.pack(solid_ids, dtype_hint=np.integer)) 

6889 

6890 # BEAM IDS 

6891 beam_ids = ( 

6892 self.arrays[ArrayType.element_beam_ids] 

6893 if ArrayType.element_beam_ids in self.arrays 

6894 else np.arange( 

6895 FORTRAN_OFFSET, settings.header["nel2"] + FORTRAN_OFFSET, dtype=settings.itype 

6896 ) 

6897 ) 

6898 n_bytes_written += fp.write(settings.pack(beam_ids, dtype_hint=np.integer)) 

6899 

6900 # SHELL IDS 

6901 shell_ids = ( 

6902 self.arrays[ArrayType.element_shell_ids] 

6903 if ArrayType.element_shell_ids in self.arrays 

6904 else np.arange( 

6905 FORTRAN_OFFSET, settings.header["nel4"] + FORTRAN_OFFSET, dtype=settings.itype 

6906 ) 

6907 ) 

6908 n_bytes_written += fp.write(settings.pack(shell_ids, dtype_hint=np.integer)) 

6909 

6910 # TSHELL IDS 

6911 tshell_ids = ( 

6912 self.arrays[ArrayType.element_tshell_ids] 

6913 if ArrayType.element_tshell_ids in self.arrays 

6914 else np.arange( 

6915 FORTRAN_OFFSET, settings.header["nelth"] + FORTRAN_OFFSET, dtype=settings.itype 

6916 ) 

6917 ) 

6918 n_bytes_written += fp.write(settings.pack(tshell_ids, dtype_hint=np.integer)) 

6919 

6920 # MATERIALS .... yay 

6921 # 

6922 # lsdyna generates duplicate materials originally 

6923 # thus nmmat in header is larger than the materials used 

6924 # by the elements. Some are related to rigid bodies 

6925 # but some are also generated internally by material models 

6926 # by the following procedure the material array is larger 

6927 # than the actual amount of materials (there may be unused 

6928 # material ids), but it ensures a relatively consistent writing 

6929 

6930 material_ids = np.full(settings.header["nmmat"], -1, dtype=self._header.itype) 

6931 if ArrayType.part_ids in self.arrays: 

6932 part_ids = self.arrays[ArrayType.part_ids] 

6933 material_ids = part_ids 

6934 else: 

6935 material_ids = np.arange(start=0, stop=settings.header["nmmat"], dtype=settings.itype) 

6936 

6937 n_bytes_written += fp.write(settings.pack(material_ids, dtype_hint=np.integer)) 

6938 

6939 # unordered material ids can be ignored 

6940 data_array = np.zeros(settings.header["nmmat"], dtype=settings.itype) 

6941 if ArrayType.part_ids_unordered in self.arrays: 

6942 array = self.arrays[ArrayType.part_ids_unordered] 

6943 end_index = min(len(array), len(data_array)) 

6944 data_array[:end_index] = array[:end_index] 

6945 n_bytes_written += fp.write(settings.pack(data_array, dtype_hint=np.integer)) 

6946 

6947 # also cross-reference array for ids 

6948 data_array = np.zeros(settings.header["nmmat"], dtype=settings.itype) 

6949 if ArrayType.part_ids_cross_references in self.arrays: 

6950 array = self.arrays[ArrayType.part_ids_cross_references] 

6951 end_index = min(len(array), len(data_array)) 

6952 data_array[:end_index] = array[:end_index] 

6953 n_bytes_written += fp.write(settings.pack(data_array, dtype_hint=np.integer)) 

6954 

6955 # check 

6956 n_bytes_expected = settings.header["narbs"] * settings.wordsize 

6957 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

6958 

6959 return n_bytes_written 

6960 

6961 def _write_geom_rigid_body_description( 

6962 self, fp: typing.IO[Any], settings: D3plotWriterSettings 

6963 ) -> int: 

6964 

6965 # this type of rigid body descriptions are very rare 

6966 # and thus badly tested 

6967 

6968 if settings.header["ndim"] not in (8, 9): 

6969 return 0 

6970 

6971 _check_ndim( 

6972 self, 

6973 { 

6974 ArrayType.rigid_body_part_indexes: ["n_rigid_bodies"], 

6975 }, 

6976 ) 

6977 array_dims = { 

6978 ArrayType.rigid_body_part_indexes: 0, 

6979 ArrayType.rigid_body_node_indexes_list: 0, 

6980 ArrayType.rigid_body_active_node_indexes_list: 0, 

6981 } 

6982 if not _check_array_occurrence(self, list(array_dims.keys()), list(array_dims.keys())): 

6983 return 0 

6984 

6985 # check length 

6986 # cannot use self._check_array_dims due to some lists 

6987 dim_size = -1 

6988 for typename in array_dims: 

6989 array = self.arrays[typename] 

6990 if dim_size < 0: 

6991 dim_size = len(array) 

6992 else: 

6993 if len(array) != dim_size: 

6994 dimension_size_dict = { 

6995 typename2: len(self.arrays[typename2]) for typename2 in array_dims 

6996 } 

6997 msg = "Inconsistency in array dim '{0}' detected:\n{1}" 

6998 size_list = [ 

6999 f" - name: {typename}, dim: {array_dims[typename]}, size: {size}" 

7000 for typename, size in dimension_size_dict.items() 

7001 ] 

7002 raise ValueError(msg.format("n_rigid_bodies", "\n".join(size_list))) 

7003 

7004 rigid_body_part_indexes = self.arrays[ArrayType.rigid_body_part_indexes] + FORTRAN_OFFSET 

7005 # rigid_body_n_nodes = self.arrays[ArrayType.rigid_body_n_nodes] 

7006 rigid_body_node_indexes_list = self.arrays[ArrayType.rigid_body_node_indexes_list] 

7007 # rigid_body_n_active_nodes = self.arrays[ArrayType.rigid_body_n_active_nodes] 

7008 rigid_body_active_node_indexes_list = self.arrays[ 

7009 ArrayType.rigid_body_active_node_indexes_list 

7010 ] 

7011 

7012 n_bytes_written = 0 

7013 n_bytes_expected = settings.wordsize 

7014 

7015 # NRIGID 

7016 n_rigid_bodies = len(rigid_body_part_indexes) 

7017 n_bytes_written += fp.write(settings.pack(n_rigid_bodies)) 

7018 

7019 for i_rigid in range(n_rigid_bodies): 

7020 # part index 

7021 n_bytes_written += fp.write(settings.pack(rigid_body_part_indexes[i_rigid])) 

7022 # node indexes 

7023 array = rigid_body_node_indexes_list[i_rigid] + FORTRAN_OFFSET 

7024 n_bytes_written += fp.write(settings.pack(len(array))) 

7025 n_bytes_written += fp.write(settings.pack(array, dtype_hint=np.integer)) 

7026 # active node indexes 

7027 array = rigid_body_active_node_indexes_list[i_rigid] 

7028 n_bytes_written += fp.write(settings.pack(len(array))) 

7029 n_bytes_written += fp.write(settings.pack(array, dtype_hint=np.integer)) 

7030 

7031 n_bytes_expected += settings.wordsize * ( 

7032 3 

7033 + len(rigid_body_node_indexes_list[i_rigid]) 

7034 + len(rigid_body_active_node_indexes_list[i_rigid]) 

7035 ) 

7036 

7037 # check 

7038 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

7039 

7040 return n_bytes_written 

7041 

7042 def _write_geom_sph_node_and_materials( 

7043 self, fp: typing.IO[Any], settings: D3plotWriterSettings 

7044 ) -> int: 

7045 

7046 nmsph = settings.header["nmsph"] 

7047 

7048 if nmsph <= 0: 

7049 return 0 

7050 

7051 _check_ndim( 

7052 self, 

7053 { 

7054 ArrayType.sph_node_indexes: ["n_sph_nodes"], 

7055 ArrayType.sph_node_material_index: ["n_sph_nodes"], 

7056 }, 

7057 ) 

7058 array_dims = { 

7059 ArrayType.sph_node_indexes: 0, 

7060 ArrayType.sph_node_material_index: 0, 

7061 } 

7062 array_names = list(array_dims.keys()) 

7063 _check_array_occurrence(self, array_names, array_names) 

7064 self.check_array_dims(array_dims, "n_sph_nodes", nmsph) 

7065 

7066 sph_node_indexes = self.arrays[ArrayType.sph_node_indexes] + FORTRAN_OFFSET 

7067 sph_node_material_index = self.arrays[ArrayType.sph_node_material_index] + FORTRAN_OFFSET 

7068 sph_data = np.concatenate((sph_node_indexes, sph_node_material_index), axis=1) 

7069 

7070 # write 

7071 n_bytes_written = fp.write(settings.pack(sph_data, dtype_hint=np.integer)) 

7072 

7073 # check 

7074 n_bytes_expected = nmsph * settings.wordsize * 2 

7075 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

7076 

7077 return n_bytes_written 

7078 

7079 def _write_geom_particle_geometry_data( 

7080 self, fp: typing.IO[Any], settings: D3plotWriterSettings 

7081 ) -> int: 

7082 

7083 npefg = settings.header["npefg"] 

7084 if npefg <= 0: 

7085 return 0 

7086 

7087 _check_ndim( 

7088 self, 

7089 { 

7090 ArrayType.airbags_first_particle_id: ["n_airbags"], 

7091 ArrayType.airbags_n_particles: ["n_airbags"], 

7092 ArrayType.airbags_ids: ["n_airbags"], 

7093 ArrayType.airbags_n_gas_mixtures: ["n_airbags"], 

7094 ArrayType.airbags_n_chambers: ["n_airbags"], 

7095 }, 

7096 ) 

7097 array_dims = { 

7098 ArrayType.airbags_first_particle_id: 0, 

7099 ArrayType.airbags_n_particles: 0, 

7100 ArrayType.airbags_ids: 0, 

7101 ArrayType.airbags_n_gas_mixtures: 0, 

7102 ArrayType.airbags_n_chambers: 0, 

7103 } 

7104 array_names = list(array_dims.keys()) 

7105 _check_array_occurrence(self, array_names, array_names) 

7106 self.check_array_dims(array_dims, "n_airbags") 

7107 

7108 # get the arrays 

7109 array_list = [ 

7110 self.arrays[ArrayType.airbags_first_particle_id].reshape(-1, 1), 

7111 self.arrays[ArrayType.airbags_n_particles].reshape(-1, 1), 

7112 self.arrays[ArrayType.airbags_ids].reshape(-1, 1), 

7113 self.arrays[ArrayType.airbags_n_gas_mixtures].reshape(-1, 1), 

7114 ] 

7115 if ArrayType.airbags_n_chambers in self.arrays: 

7116 array_list.append(self.arrays[ArrayType.airbags_n_chambers].reshape(-1, 1)) 

7117 

7118 # write 

7119 airbag_geometry_data = np.concatenate(array_list, axis=1) 

7120 n_bytes_written = fp.write(settings.pack(airbag_geometry_data, dtype_hint=np.integer)) 

7121 

7122 # check 

7123 n_airbags = npefg % 1000 

7124 ngeom = 5 if ArrayType.airbags_n_chambers in self.arrays else 4 

7125 n_bytes_expected = n_airbags * ngeom * settings.wordsize 

7126 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

7127 

7128 return n_bytes_written 

7129 

7130 def _write_geom_rigid_road_surface( 

7131 self, fp: typing.IO[Any], settings: D3plotWriterSettings 

7132 ) -> int: 

7133 

7134 if settings.header["ndim"] <= 5: 

7135 return 0 

7136 

7137 _check_ndim( 

7138 self, 

7139 { 

7140 ArrayType.rigid_road_node_ids: ["rigid_road_n_nodes"], 

7141 ArrayType.rigid_road_node_coordinates: ["rigid_road_n_nodes", "x_y_z"], 

7142 ArrayType.rigid_road_segment_node_ids: ["n_segments", "n_nodes"], 

7143 ArrayType.rigid_road_segment_road_id: ["n_segments"], 

7144 }, 

7145 ) 

7146 array_dims = { 

7147 ArrayType.rigid_road_node_ids: 0, 

7148 ArrayType.rigid_road_node_coordinates: 0, 

7149 } 

7150 n_rigid_road_nodes = self.check_array_dims(array_dims, "rigid_road_n_nodes") 

7151 self.check_array_dims({ArrayType.rigid_road_node_coordinates: 1}, "x_y_z", 3) 

7152 array_dims = { 

7153 ArrayType.rigid_road_n_segments: 0, 

7154 ArrayType.rigid_road_segment_node_ids: 0, 

7155 ArrayType.rigid_road_segment_road_id: 0, 

7156 } 

7157 n_rigid_roads = self.check_array_dims(array_dims, "n_rigid_roads") 

7158 n_bytes_written = 0 

7159 

7160 # NODE COUNT 

7161 n_bytes_written += fp.write(settings.pack(n_rigid_road_nodes)) 

7162 

7163 # SEGMENT COUNT 

7164 # This was never verified 

7165 n_total_segments = np.sum( 

7166 len(segment_ids) for segment_ids in self.arrays[ArrayType.rigid_road_segment_node_ids] 

7167 ) 

7168 n_bytes_written += fp.write(settings.pack(n_total_segments)) 

7169 

7170 # SURFACE COUNT 

7171 n_bytes_written += fp.write(settings.pack(n_rigid_roads)) 

7172 

7173 # MOTION FLAG - if motion data is output 

7174 # by default let's just say ... yeah baby 

7175 # This was never verified 

7176 n_bytes_written += fp.write(settings.pack(1)) 

7177 

7178 # RIGID ROAD NODE IDS 

7179 rigid_road_node_ids = self.arrays[ArrayType.rigid_road_node_ids] 

7180 n_bytes_written += fp.write(settings.pack(rigid_road_node_ids, dtype_hint=np.integer)) 

7181 

7182 # RIGID ROAD NODE COORDS 

7183 rigid_road_node_coordinates = self.arrays[ArrayType.rigid_road_node_coordinates] 

7184 n_bytes_written += fp.write( 

7185 settings.pack(rigid_road_node_coordinates, dtype_hint=np.floating) 

7186 ) 

7187 

7188 # SURFACE ID 

7189 # SURFACE N_SEGMENTS 

7190 # SURFACE SEGMENTS 

7191 rigid_road_segment_road_id = self.arrays[ArrayType.rigid_road_segment_road_id] 

7192 rigid_road_segment_node_ids = self.arrays[ArrayType.rigid_road_segment_node_ids] 

7193 

7194 for segment_id, node_ids in zip(rigid_road_segment_road_id, rigid_road_segment_node_ids): 

7195 n_bytes_written += fp.write(settings.pack(segment_id)) 

7196 n_bytes_written += fp.write(settings.pack(len(node_ids))) 

7197 n_bytes_written += fp.write(settings.pack(node_ids, dtype_hint=np.integer)) 

7198 

7199 # check 

7200 n_bytes_expected = ( 

7201 4 + 4 * n_rigid_road_nodes + n_rigid_roads * (2 + 4 * n_total_segments) 

7202 ) * settings.wordsize 

7203 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

7204 

7205 return n_bytes_written 

7206 

7207 def _write_geom_extra_node_data( 

7208 self, fp: typing.IO[Any], settings: D3plotWriterSettings 

7209 ) -> int: 

7210 

7211 n_bytes_written = 0 

7212 

7213 # 10 NODE SOLIDS 

7214 if settings.header["nel8"] < 0: 

7215 _check_ndim( 

7216 self, 

7217 { 

7218 ArrayType.element_solid_node10_extra_node_indexes: [ 

7219 "n_solids", 

7220 "2_extra_node_ids", 

7221 ], 

7222 }, 

7223 ) 

7224 array_dims = { 

7225 ArrayType.element_solid_node_indexes: 0, 

7226 ArrayType.element_solid_node10_extra_node_indexes: 0, 

7227 } 

7228 self.check_array_dims(array_dims, "n_solids") 

7229 self.check_array_dims( 

7230 {ArrayType.element_solid_node10_extra_node_indexes: 1}, "extra_node_ids", 2 

7231 ) 

7232 

7233 extra_nodes = ( 

7234 self.arrays[ArrayType.element_solid_node10_extra_node_indexes] + FORTRAN_OFFSET 

7235 ) 

7236 

7237 n_bytes_written += fp.write(settings.pack(extra_nodes, dtype_hint=np.integer)) 

7238 

7239 # 8 NODE SHELLS 

7240 if settings.header["nel48"] > 0: 

7241 _check_ndim( 

7242 self, 

7243 { 

7244 ArrayType.element_shell_node8_element_index: ["n_node8_shells"], 

7245 ArrayType.element_shell_node8_extra_node_indexes: [ 

7246 "n_node8_shells", 

7247 "4_extra_node_ids", 

7248 ], 

7249 }, 

7250 ) 

7251 array_dims = { 

7252 ArrayType.element_shell_node8_element_index: 0, 

7253 ArrayType.element_shell_node8_extra_node_indexes: 0, 

7254 } 

7255 self.check_array_dims(array_dims, "n_node8_shells") 

7256 self.check_array_dims( 

7257 {ArrayType.element_shell_node8_extra_node_indexes: 1}, "extra_node_ids", 4 

7258 ) 

7259 

7260 element_indexes = ( 

7261 self.arrays[ArrayType.element_shell_node8_element_index] + FORTRAN_OFFSET 

7262 ) 

7263 extra_nodes = ( 

7264 self.arrays[ArrayType.element_shell_node8_extra_node_indexes] + FORTRAN_OFFSET 

7265 ) 

7266 

7267 geom_data = np.concatenate((element_indexes, extra_nodes), axis=1) 

7268 

7269 n_bytes_written += fp.write(settings.pack(geom_data, dtype_hint=np.integer)) 

7270 

7271 # 20 NODE SOLIDS 

7272 if settings.header["extra"] > 0 and settings.header["nel20"] > 0: 

7273 _check_ndim( 

7274 self, 

7275 { 

7276 ArrayType.element_solid_node20_element_index: ["n_node20_solids"], 

7277 ArrayType.element_solid_node20_extra_node_indexes: [ 

7278 "n_node20_solids", 

7279 "12_extra_node_ids", 

7280 ], 

7281 }, 

7282 ) 

7283 array_dims = { 

7284 ArrayType.element_solid_node20_element_index: 0, 

7285 ArrayType.element_solid_node20_extra_node_indexes: 0, 

7286 } 

7287 self.check_array_dims(array_dims, "n_node20_solids") 

7288 self.check_array_dims( 

7289 {ArrayType.element_solid_node20_extra_node_indexes: 1}, "extra_node_ids", 12 

7290 ) 

7291 

7292 element_indexes = ( 

7293 self.arrays[ArrayType.element_solid_node20_element_index] + FORTRAN_OFFSET 

7294 ) 

7295 extra_nodes = ( 

7296 self.arrays[ArrayType.element_solid_node20_extra_node_indexes] + FORTRAN_OFFSET 

7297 ) 

7298 

7299 geom_data = np.concatenate((element_indexes, extra_nodes), axis=1) 

7300 

7301 n_bytes_written += fp.write(settings.pack(geom_data, dtype_hint=np.integer)) 

7302 

7303 # 27 NODE SOLIDS 

7304 if settings.header["extra"] > 0 and settings.header["nel27"] > 0: 

7305 _check_ndim( 

7306 self, 

7307 { 

7308 ArrayType.element_solid_node20_element_index: ["n_node27_solids"], 

7309 ArrayType.element_solid_node20_extra_node_indexes: [ 

7310 "n_node27_solids", 

7311 "19_extra_node_ids", 

7312 ], 

7313 }, 

7314 ) 

7315 array_dims = { 

7316 ArrayType.element_solid_node27_element_index: 0, 

7317 ArrayType.element_solid_node27_extra_node_indexes: 0, 

7318 } 

7319 self.check_array_dims(array_dims, "n_node27_solids") 

7320 self.check_array_dims( 

7321 {ArrayType.element_solid_node27_extra_node_indexes: 1}, "extra_node_ids", 19 

7322 ) 

7323 

7324 element_indexes = ( 

7325 self.arrays[ArrayType.element_solid_node27_element_index] + FORTRAN_OFFSET 

7326 ) 

7327 extra_nodes = ( 

7328 self.arrays[ArrayType.element_solid_node27_extra_node_indexes] + FORTRAN_OFFSET 

7329 ) 

7330 

7331 geom_data = np.concatenate((element_indexes, extra_nodes), axis=1) 

7332 

7333 n_bytes_written += fp.write(settings.pack(geom_data, dtype_hint=np.integer)) 

7334 

7335 # check 

7336 has_nel10 = settings.header["nel8"] < 0 

7337 n_bytes_expected = ( 

7338 has_nel10 * abs(settings.header["nel8"]) 

7339 + settings.header["nel48"] * 5 

7340 + settings.header["nel20"] * 13 

7341 + settings.header["nel27"] * 20 

7342 ) * settings.wordsize 

7343 D3plot._compare_n_bytes_checksum(n_bytes_written, n_bytes_expected) 

7344 

7345 return n_bytes_written 

7346 

7347 def _write_header_part_contact_interface_titles( 

7348 self, fp: typing.IO[Any], settings: D3plotWriterSettings 

7349 ) -> int: 

7350 

7351 n_bytes_written = 0 

7352 

7353 # PART TITLES 

7354 _check_ndim( 

7355 self, 

7356 { 

7357 # ArrayType.part_titles: ["n_parts", "n_chars"], 

7358 ArrayType.part_titles_ids: ["n_parts"], 

7359 }, 

7360 ) 

7361 array_dimensions = { 

7362 ArrayType.part_titles: 0, 

7363 ArrayType.part_titles_ids: 0, 

7364 } 

7365 if _check_array_occurrence( 

7366 self, list(array_dimensions.keys()), list(array_dimensions.keys()) 

7367 ): 

7368 self.check_array_dims(array_dimensions, "n_parts") 

7369 

7370 ntype = 90001 

7371 

7372 n_bytes_written += fp.write(settings.pack(ntype)) 

7373 

7374 part_titles_ids = self.arrays[ArrayType.part_titles_ids] 

7375 part_titles = self.arrays[ArrayType.part_titles] 

7376 

7377 n_entries = len(part_titles) 

7378 n_bytes_written += fp.write(settings.pack(n_entries)) 

7379 

7380 # title words always have 4 byte size 

7381 title_wordsize = 4 

7382 max_len = 18 * title_wordsize 

7383 fmt_name = "{0:" + str(max_len) + "}" 

7384 for pid, title in zip(part_titles_ids, part_titles): 

7385 title = title.decode("ascii") 

7386 n_bytes_written += fp.write(settings.pack(pid)) 

7387 

7388 formatted_title = fmt_name.format(title[:max_len]) 

7389 n_bytes_written += fp.write(settings.pack(formatted_title, max_len)) 

7390 

7391 # TITLE2 

7392 # yet another title, coz double is always more fun 

7393 if "title2" in self.header.title2: 

7394 ntype = 90000 

7395 

7396 # title words always have 4 bytes 

7397 title_wordsize = 4 

7398 title_size_words = 18 

7399 

7400 fmt_title2 = "{0:" + str(title_wordsize * title_size_words) + "}" 

7401 title2 = fmt_title2.format(self.header.title2[: settings.wordsize * title_size_words]) 

7402 

7403 n_bytes_written += fp.write(settings.pack(ntype)) 

7404 n_bytes_written += fp.write(settings.pack(title2, settings.wordsize * title_size_words)) 

7405 

7406 # CONTACT TITLES 

7407 array_dimensions = { 

7408 ArrayType.contact_titles: 0, 

7409 ArrayType.contact_title_ids: 0, 

7410 } 

7411 if _check_array_occurrence( 

7412 self, list(array_dimensions.keys()), list(array_dimensions.keys()) 

7413 ): 

7414 self.check_array_dims(array_dimensions, "n_parts") 

7415 

7416 ntype = 90002 

7417 n_bytes_written += fp.write(settings.pack(ntype)) 

7418 

7419 titles_ids = self.arrays[ArrayType.contact_title_ids] 

7420 titles = self.arrays[ArrayType.contact_titles] 

7421 

7422 n_entries = len(titles) 

7423 n_bytes_written += fp.write(settings.pack(n_entries)) 

7424 

7425 max_len = 18 * self.header.wordsize 

7426 fmt_name = "{0:" + str(max_len) + "}" 

7427 for pid, title in zip(titles_ids, titles): 

7428 n_bytes_written += fp.write(settings.pack(pid)) 

7429 

7430 formatted_title = fmt_name.format(title[:max_len]) 

7431 n_bytes_written += fp.write(settings.pack(formatted_title)) 

7432 

7433 return n_bytes_written 

7434 

7435 def _write_states( 

7436 self, filepath: Union[str, typing.BinaryIO], settings: D3plotWriterSettings 

7437 ) -> int: 

7438 

7439 # did we store any states? 

7440 n_timesteps_written = 0 

7441 

7442 # if timestep array is missing check for any state arrays 

7443 if ArrayType.global_timesteps not in self.arrays: 

7444 # if any state array is present simply make up a timestep array 

7445 if any(array_name in self.arrays for array_name in ArrayType.get_state_array_names()): 

7446 array_dims = {array_name: 0 for array_name in ArrayType.get_state_array_names()} 

7447 n_timesteps = self.check_array_dims( 

7448 array_dimensions=array_dims, dimension_name="n_timesteps" 

7449 ) 

7450 self._state_info.n_timesteps = n_timesteps 

7451 self.arrays[ArrayType.global_timesteps] = np.arange( 

7452 0, n_timesteps, dtype=settings.ftype 

7453 ) 

7454 # no state data so we call it a day 

7455 else: 

7456 return n_timesteps_written 

7457 

7458 # formatter for state files 

7459 timesteps = self.arrays[ArrayType.global_timesteps] 

7460 n_timesteps = len(timesteps) 

7461 fmt_state_file_counter = "{0:02d}" 

7462 

7463 # single file or multiple file handling 

7464 state_fp: Union[None, typing.BinaryIO] = None 

7465 file_to_close: Union[None, typing.BinaryIO] = None 

7466 if isinstance(filepath, str): 

7467 if settings.single_file: 

7468 # pylint: disable = consider-using-with 

7469 state_fp = file_to_close = open(filepath + fmt_state_file_counter.format(1), "ab") 

7470 else: 

7471 # create a new file per timestep 

7472 # see time loop 

7473 pass 

7474 else: 

7475 state_fp = filepath 

7476 

7477 try: 

7478 # time looping ... wheeeeeeeee 

7479 for i_timestep, _ in enumerate(timesteps): 

7480 

7481 # open new state file ... or not 

7482 state_filepath_or_file = ( 

7483 filepath + fmt_state_file_counter.format(i_timestep + 1) 

7484 if isinstance(filepath, str) and state_fp is None 

7485 else state_fp 

7486 ) 

7487 

7488 n_bytes_written = 0 

7489 

7490 with open_file_or_filepath(state_filepath_or_file, "ab") as fp: 

7491 

7492 # GLOBALS 

7493 n_bytes_written += self._write_states_globals(fp, i_timestep, settings) 

7494 

7495 # NODE DATA 

7496 n_bytes_written += self._write_states_nodes(fp, i_timestep, settings) 

7497 

7498 # SOLID THERMAL DATA 

7499 n_bytes_written += self._write_states_solid_thermal_data( 

7500 fp, i_timestep, settings 

7501 ) 

7502 

7503 # CFDDATA 

7504 # not supported 

7505 

7506 # SOLIDS 

7507 n_bytes_written += self._write_states_solids(fp, i_timestep, settings) 

7508 

7509 # THICK SHELLS 

7510 n_bytes_written += self._write_states_tshells(fp, i_timestep, settings) 

7511 

7512 # spocky ... BEAM me up 

7513 n_bytes_written += self._write_states_beams(fp, i_timestep, settings) 

7514 

7515 # SHELLS 

7516 n_bytes_written += self._write_states_shells(fp, i_timestep, settings) 

7517 

7518 # DELETION INFO 

7519 n_bytes_written += self._write_states_deletion_info(fp, i_timestep, settings) 

7520 

7521 # SPH 

7522 n_bytes_written += self._write_states_sph(fp, i_timestep, settings) 

7523 

7524 # AIRBAG 

7525 n_bytes_written += self._write_states_airbags(fp, i_timestep, settings) 

7526 

7527 # RIGID ROAD 

7528 n_bytes_written += self._write_states_rigid_road(fp, i_timestep, settings) 

7529 

7530 # RIGID BODY 

7531 n_bytes_written += self._write_states_rigid_bodies(fp, i_timestep, settings) 

7532 

7533 # EXTRA DATA 

7534 # not supported 

7535 

7536 # end mark 

7537 # at the end for single file buffer 

7538 # or behind each state file 

7539 if not settings.single_file or i_timestep == n_timesteps - 1: 

7540 n_bytes_written += fp.write(settings.pack(-999999.0)) 

7541 

7542 if settings.block_size_bytes > 0: 

7543 zero_bytes = self._get_zero_byte_padding( 

7544 n_bytes_written, settings.block_size_bytes 

7545 ) 

7546 n_bytes_written += fp.write(zero_bytes) 

7547 

7548 # log 

7549 msg = "_write_states wrote %d bytes" 

7550 LOGGER.debug(msg, n_bytes_written) 

7551 n_timesteps_written += 1 

7552 

7553 finally: 

7554 # close file if required 

7555 if file_to_close is not None: 

7556 file_to_close.close() 

7557 

7558 return n_timesteps_written 

7559 

7560 def _write_states_globals( 

7561 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

7562 ) -> int: 

7563 

7564 _check_ndim( 

7565 self, 

7566 { 

7567 ArrayType.global_kinetic_energy: ["n_timesteps"], 

7568 ArrayType.global_internal_energy: ["n_timesteps"], 

7569 ArrayType.global_total_energy: ["n_timesteps"], 

7570 ArrayType.global_velocity: ["n_timesteps", "vx_vy_vz"], 

7571 }, 

7572 ) 

7573 array_dims = { 

7574 ArrayType.global_timesteps: 0, 

7575 ArrayType.global_kinetic_energy: 0, 

7576 ArrayType.global_internal_energy: 0, 

7577 ArrayType.global_total_energy: 0, 

7578 ArrayType.global_velocity: 0, 

7579 } 

7580 self.check_array_dims(array_dims, "n_timesteps") 

7581 

7582 byte_checksum = 0 

7583 

7584 n_global_vars = settings.header["nglbv"] 

7585 

7586 # TIME 

7587 timesteps = self.arrays[ArrayType.global_timesteps] 

7588 byte_checksum += fp.write(settings.pack(timesteps[i_timestep])) 

7589 

7590 # GLOBAL KINETIC ENERGY 

7591 if n_global_vars >= 1: 

7592 array_type = ArrayType.global_kinetic_energy 

7593 value = ( 

7594 self.arrays[array_type][i_timestep] 

7595 if array_type in self.arrays 

7596 else self.header.ftype(0.0) 

7597 ) 

7598 byte_checksum += fp.write(settings.pack(value, dtype_hint=np.floating)) 

7599 

7600 # GLOBAL INTERNAL ENERGY 

7601 if n_global_vars >= 2: 

7602 array_type = ArrayType.global_internal_energy 

7603 value = ( 

7604 self.arrays[array_type][i_timestep] 

7605 if array_type in self.arrays 

7606 else self.header.ftype(0.0) 

7607 ) 

7608 byte_checksum += fp.write(settings.pack(value, dtype_hint=np.floating)) 

7609 

7610 # GLOBAL TOTAL ENERGY 

7611 if n_global_vars >= 3: 

7612 array_type = ArrayType.global_total_energy 

7613 value = ( 

7614 self.arrays[array_type][i_timestep] 

7615 if array_type in self.arrays 

7616 else self.header.ftype(0.0) 

7617 ) 

7618 byte_checksum += fp.write(settings.pack(value, dtype_hint=np.floating)) 

7619 

7620 # GLOBAL VELOCITY 

7621 if n_global_vars >= 6: 

7622 self.check_array_dims({ArrayType.global_velocity: 1}, "vx_vy_vz", 3) 

7623 array_type = ArrayType.global_velocity 

7624 array = ( 

7625 self.arrays[array_type][i_timestep] 

7626 if array_type in self.arrays 

7627 else np.zeros(3, self.header.ftype) 

7628 ) 

7629 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7630 

7631 # PARTS 

7632 # 

7633 # Parts always need special love since dyna 

7634 # writes many dummy parts 

7635 _check_ndim( 

7636 self, 

7637 { 

7638 ArrayType.part_internal_energy: ["n_timesteps", "n_parts"], 

7639 ArrayType.part_kinetic_energy: ["n_timesteps", "n_parts"], 

7640 ArrayType.part_velocity: ["n_timesteps", "n_parts", "vx_vy_vz"], 

7641 ArrayType.part_mass: ["n_timesteps", "n_parts"], 

7642 ArrayType.part_hourglass_energy: ["n_timesteps", "n_parts"], 

7643 }, 

7644 ) 

7645 array_dims = { 

7646 ArrayType.global_timesteps: 0, 

7647 ArrayType.part_internal_energy: 0, 

7648 ArrayType.part_kinetic_energy: 0, 

7649 ArrayType.part_velocity: 0, 

7650 ArrayType.part_mass: 0, 

7651 ArrayType.part_hourglass_energy: 0, 

7652 } 

7653 self.check_array_dims(array_dims, "n_timesteps") 

7654 

7655 self.check_array_dims({ArrayType.part_velocity: 2}, "vx_vy_vz", 3) 

7656 

7657 n_parts = settings.header["nmmat"] 

7658 

7659 def _write_part_field(array_type: str, default_shape: Union[int, Tuple], dtype: np.dtype): 

7660 array = ( 

7661 self.arrays[array_type][i_timestep] 

7662 if array_type in self.arrays 

7663 else np.zeros(default_shape, self.header.ftype) 

7664 ) 

7665 

7666 if len(array): 

7667 dummy_array = array 

7668 return fp.write(settings.pack(dummy_array, dtype_hint=np.floating)) 

7669 

7670 return 0 

7671 

7672 # PART INTERNAL ENERGY 

7673 if n_global_vars >= 6 + n_parts: 

7674 byte_checksum += _write_part_field( 

7675 ArrayType.part_internal_energy, n_parts, settings.ftype 

7676 ) 

7677 

7678 # PART KINETIC ENERGY 

7679 if n_global_vars >= 6 + 2 * n_parts: 

7680 byte_checksum += _write_part_field( 

7681 ArrayType.part_kinetic_energy, n_parts, settings.ftype 

7682 ) 

7683 

7684 # PART VELOCITY 

7685 if n_global_vars >= 6 + 5 * n_parts: 

7686 byte_checksum += _write_part_field( 

7687 ArrayType.part_velocity, (n_parts, 3), settings.ftype 

7688 ) 

7689 

7690 # PART MASS 

7691 if n_global_vars >= 6 + 6 * n_parts: 

7692 byte_checksum += _write_part_field(ArrayType.part_mass, n_parts, settings.ftype) 

7693 

7694 # PART HOURGLASS ENERGY 

7695 if n_global_vars >= 6 + 7 * n_parts: 

7696 byte_checksum += _write_part_field( 

7697 ArrayType.part_hourglass_energy, n_parts, settings.ftype 

7698 ) 

7699 

7700 # RIGID WALL 

7701 array_dims = { 

7702 ArrayType.global_timesteps: 0, 

7703 ArrayType.rigid_wall_force: 0, 

7704 ArrayType.rigid_wall_position: 0, 

7705 } 

7706 self.check_array_dims(array_dims, "n_timesteps") 

7707 array_dims = { 

7708 ArrayType.rigid_wall_force: 1, 

7709 ArrayType.rigid_wall_position: 1, 

7710 } 

7711 self.check_array_dims(array_dims, "n_rigid_walls") 

7712 self.check_array_dims({ArrayType.rigid_wall_position: 2}, "x_y_z", 3) 

7713 

7714 n_rigid_wall_vars = settings.header["n_rigid_wall_vars"] 

7715 n_rigid_walls = settings.header["n_rigid_walls"] 

7716 if n_global_vars >= 6 + 7 * n_parts + n_rigid_wall_vars * n_rigid_walls: 

7717 if n_rigid_wall_vars >= 1: 

7718 array = self.arrays[ArrayType.rigid_wall_force][i_timestep] 

7719 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7720 if n_rigid_wall_vars >= 4: 

7721 array = self.arrays[ArrayType.rigid_wall_position][i_timestep] 

7722 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7723 

7724 # check byte checksum 

7725 # pylint: disable = invalid-name 

7726 TIME_WORDSIZE = 1 

7727 byte_checksum_target = (TIME_WORDSIZE + settings.header["nglbv"]) * settings.wordsize 

7728 if byte_checksum != byte_checksum_target: 

7729 msg = ( 

7730 "byte checksum wrong: " 

7731 f"{byte_checksum_target} (header) != {byte_checksum} (checksum)" 

7732 ) 

7733 raise RuntimeError(msg) 

7734 

7735 # log 

7736 msg = "%s wrote %d bytes." 

7737 LOGGER.debug(msg, "_write_states_globals", byte_checksum) 

7738 

7739 return byte_checksum 

7740 

7741 def _write_states_nodes( 

7742 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

7743 ) -> int: 

7744 

7745 n_nodes = settings.header["numnp"] 

7746 if n_nodes <= 0: 

7747 return 0 

7748 

7749 _check_ndim( 

7750 self, 

7751 { 

7752 ArrayType.node_displacement: ["n_timesteps", "n_nodes", "x_y_z"], 

7753 ArrayType.node_velocity: ["n_timesteps", "n_nodes", "vx_vy_vz"], 

7754 ArrayType.node_acceleration: ["n_timesteps", "n_nodes", "ax_ay_az"], 

7755 ArrayType.node_heat_flux: ["n_timesteps", "n_nodes", "hx_hy_hz"], 

7756 # INFO: cannot check since it may have 1 or 3 values per node 

7757 # ArrayType.node_temperature: ["n_timesteps","n_nodes"], 

7758 ArrayType.node_mass_scaling: ["n_timesteps", "n_nodes"], 

7759 ArrayType.node_temperature_gradient: ["n_timesteps", "n_nodes"], 

7760 ArrayType.node_residual_forces: ["n_timesteps", "n_nodes", "fx_fy_fz"], 

7761 ArrayType.node_residual_moments: ["n_timesteps", "n_nodes", "mx_my_mz"], 

7762 }, 

7763 ) 

7764 array_dims = { 

7765 ArrayType.global_timesteps: 0, 

7766 ArrayType.node_displacement: 0, 

7767 ArrayType.node_velocity: 0, 

7768 ArrayType.node_acceleration: 0, 

7769 ArrayType.node_heat_flux: 0, 

7770 ArrayType.node_temperature: 0, 

7771 ArrayType.node_mass_scaling: 0, 

7772 ArrayType.node_temperature_gradient: 0, 

7773 ArrayType.node_residual_forces: 0, 

7774 ArrayType.node_residual_moments: 0, 

7775 } 

7776 self.check_array_dims(array_dims, "n_timesteps") 

7777 array_dims = { 

7778 ArrayType.node_coordinates: 0, 

7779 ArrayType.node_displacement: 1, 

7780 ArrayType.node_velocity: 1, 

7781 ArrayType.node_acceleration: 1, 

7782 ArrayType.node_heat_flux: 1, 

7783 ArrayType.node_temperature: 1, 

7784 ArrayType.node_mass_scaling: 1, 

7785 ArrayType.node_temperature_gradient: 1, 

7786 ArrayType.node_residual_forces: 1, 

7787 ArrayType.node_residual_moments: 1, 

7788 } 

7789 self.check_array_dims(array_dims, "n_nodes") 

7790 self.check_array_dims({ArrayType.node_heat_flux: 2}, "x_y_z", 3) 

7791 self.check_array_dims({ArrayType.node_displacement: 2}, "dx_dy_dz", 3) 

7792 self.check_array_dims({ArrayType.node_velocity: 2}, "vx_vy_vz", 3) 

7793 self.check_array_dims({ArrayType.node_acceleration: 2}, "ax_ay_az", 3) 

7794 self.check_array_dims({ArrayType.node_residual_forces: 2}, "fx_fy_fz", 3) 

7795 self.check_array_dims({ArrayType.node_residual_moments: 2}, "mx_my_mz", 3) 

7796 

7797 byte_checksum = 0 

7798 

7799 it = settings.header["it"] 

7800 has_mass_scaling = False 

7801 if it >= 10: 

7802 it -= 10 

7803 has_mass_scaling = True 

7804 

7805 n_nodes = settings.header["numnp"] 

7806 

7807 # NODE DISPLACEMENT 

7808 if settings.header["iu"]: 

7809 array = self.arrays[ArrayType.node_displacement][i_timestep] 

7810 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7811 

7812 if it != 0: 

7813 

7814 # NODE TEMPERATURES 

7815 array_type = ArrayType.node_temperature 

7816 array = ( 

7817 self.arrays[array_type][i_timestep] 

7818 if array_type in self.arrays 

7819 else np.zeros(n_nodes, dtype=settings.ftype) 

7820 ) 

7821 

7822 # just 1 temperature per node 

7823 if it < 3: 

7824 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7825 # 3 temperatures per node 

7826 else: 

7827 self.check_array_dims({ArrayType.node_temperature: 2}, "node_layer", 3) 

7828 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7829 

7830 # NODE HEAT FLUX 

7831 if it >= 2: 

7832 array = self.arrays[ArrayType.node_heat_flux][i_timestep] 

7833 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7834 

7835 # NODE MASS SCALING 

7836 if has_mass_scaling: 

7837 array = self.arrays[ArrayType.node_mass_scaling][i_timestep] 

7838 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7839 

7840 # NODE TEMPERATURE GRADIENT 

7841 if settings.has_node_temperature_gradient: 

7842 array = self.arrays[ArrayType.node_temperature_gradient][i_timestep] 

7843 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7844 

7845 # NODE RESIDUAL FORCES 

7846 if settings.has_node_residual_forces: 

7847 array = ( 

7848 self.arrays[ArrayType.node_residual_forces][i_timestep] 

7849 if ArrayType.node_residual_forces in self.arrays 

7850 else np.zeros((n_nodes, 3), dtype=settings.ftype) 

7851 ) 

7852 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7853 

7854 # NODE RESIDUAL MOMENTS 

7855 if settings.has_node_residual_moments: 

7856 array = ( 

7857 self.arrays[ArrayType.node_residual_moments][i_timestep] 

7858 if ArrayType.node_residual_forces in self.arrays 

7859 else np.zeros((n_nodes, 3), dtype=settings.ftype) 

7860 ) 

7861 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7862 

7863 # NODE VELOCITY 

7864 if settings.header["iv"]: 

7865 array = self.arrays[ArrayType.node_velocity][i_timestep] 

7866 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7867 

7868 # NODE ACCELERATION 

7869 if settings.header["ia"]: 

7870 array = self.arrays[ArrayType.node_acceleration][i_timestep] 

7871 byte_checksum += fp.write(settings.pack(array, dtype_hint=np.floating)) 

7872 

7873 # check the checksum 

7874 n_thermal_vars = 0 

7875 if settings.header["it"] % 10 == 1: 

7876 n_thermal_vars = 1 

7877 elif settings.header["it"] % 10 == 2: 

7878 n_thermal_vars = 4 

7879 elif settings.header["it"] % 10 == 3: 

7880 n_thermal_vars = 6 

7881 

7882 if settings.header["it"] // 10 == 1: 

7883 n_thermal_vars += 1 

7884 

7885 n_temp_gradient_vars = settings.has_node_temperature_gradient 

7886 n_residual_forces_vars = settings.has_node_residual_forces * 3 

7887 n_residual_moments_vars = settings.has_node_residual_moments * 3 

7888 

7889 # pylint: disable = invalid-name 

7890 NDIM = 3 

7891 byte_checksum_target = ( 

7892 ( 

7893 (settings.header["iu"] + settings.header["iv"] + settings.header["ia"]) * NDIM 

7894 + n_thermal_vars 

7895 + n_temp_gradient_vars 

7896 + n_residual_forces_vars 

7897 + n_residual_moments_vars 

7898 ) 

7899 * settings.wordsize 

7900 * settings.header["numnp"] 

7901 ) 

7902 if byte_checksum != byte_checksum_target: 

7903 msg = ( 

7904 "byte checksum wrong: " 

7905 "{byte_checksum_target} (header) != {byte_checksum} (checksum)" 

7906 ) 

7907 raise RuntimeError(msg) 

7908 

7909 # log 

7910 msg = "%s wrote %d bytes." 

7911 LOGGER.debug(msg, "_write_states_nodes", byte_checksum) 

7912 

7913 return byte_checksum 

7914 

7915 def _write_states_solid_thermal_data( 

7916 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

7917 ) -> int: 

7918 

7919 if settings.header["nt3d"] <= 0: 

7920 return 0 

7921 

7922 _check_ndim( 

7923 self, 

7924 { 

7925 ArrayType.element_solid_thermal_data: [ 

7926 "n_timesteps", 

7927 "n_solids", 

7928 "n_solids_thermal_vars", 

7929 ] 

7930 }, 

7931 ) 

7932 

7933 array_dims = { 

7934 ArrayType.global_timesteps: 0, 

7935 ArrayType.element_solid_thermal_data: 0, 

7936 } 

7937 self.check_array_dims(array_dims, "n_timesteps") 

7938 

7939 array_dims = { 

7940 ArrayType.element_solid_node_indexes: 0, 

7941 ArrayType.element_solid_thermal_data: 1, 

7942 } 

7943 self.check_array_dims(array_dims, "n_solids") 

7944 

7945 array = self.arrays[ArrayType.element_solid_thermal_data][i_timestep] 

7946 n_bytes_written = fp.write(settings.pack(array, dtype_hint=np.floating)) 

7947 

7948 # check bytes 

7949 n_bytes_expected = ( 

7950 settings.header["nt3d"] * abs(settings.header["nel8"]) * settings.wordsize 

7951 ) 

7952 if n_bytes_expected != n_bytes_written: 

7953 msg = ( 

7954 "byte checksum wrong: " 

7955 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

7956 ) 

7957 raise RuntimeError(msg) 

7958 

7959 # log 

7960 msg = "%s wrote %d bytes." 

7961 LOGGER.debug(msg, "_write_states_thermal_data", n_bytes_written) 

7962 

7963 return n_bytes_written 

7964 

7965 def _write_states_solids( 

7966 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

7967 ) -> int: 

7968 

7969 n_solids = abs(settings.header["nel8"]) 

7970 n_solid_vars = settings.header["nv3d"] 

7971 n_solid_layers = settings.n_solid_layers 

7972 

7973 if n_solids == 0 or n_solid_vars <= 0: 

7974 return 0 

7975 

7976 _check_ndim( 

7977 self, 

7978 { 

7979 ArrayType.element_solid_stress: [ 

7980 "n_timesteps", 

7981 "n_solids", 

7982 "n_solid_layers", 

7983 "σx_σy_σz_σxy_σyz_σxz", 

7984 ], 

7985 ArrayType.element_solid_effective_plastic_strain: [ 

7986 "n_timesteps", 

7987 "n_solid_layers", 

7988 "n_solids", 

7989 ], 

7990 ArrayType.element_solid_history_variables: [ 

7991 "n_timesteps", 

7992 "n_solids", 

7993 "n_solid_layers", 

7994 "n_solid_history_vars", 

7995 ], 

7996 ArrayType.element_solid_strain: [ 

7997 "n_timesteps", 

7998 "n_solids", 

7999 "n_solid_layers", 

8000 "εx_εy_εz_εxy_εyz_εxz", 

8001 ], 

8002 ArrayType.element_solid_plastic_strain_tensor: [ 

8003 "n_timesteps", 

8004 "n_solids", 

8005 "n_solid_layers", 

8006 "εx_εy_εz_εxy_εyz_εxz", 

8007 ], 

8008 ArrayType.element_solid_thermal_strain_tensor: [ 

8009 "n_timesteps", 

8010 "n_solids", 

8011 "n_solid_layers", 

8012 "εx_εy_εz_εxy_εyz_εxz", 

8013 ], 

8014 }, 

8015 ) 

8016 

8017 array_dims = { 

8018 ArrayType.global_timesteps: 0, 

8019 ArrayType.element_solid_stress: 0, 

8020 ArrayType.element_solid_effective_plastic_strain: 0, 

8021 ArrayType.element_solid_history_variables: 0, 

8022 ArrayType.element_solid_strain: 0, 

8023 ArrayType.element_solid_plastic_strain_tensor: 0, 

8024 ArrayType.element_solid_thermal_strain_tensor: 0, 

8025 } 

8026 self.check_array_dims(array_dims, "n_timesteps") 

8027 

8028 array_dims = { 

8029 ArrayType.element_solid_node_indexes: 0, 

8030 ArrayType.element_solid_stress: 1, 

8031 ArrayType.element_solid_effective_plastic_strain: 1, 

8032 ArrayType.element_solid_history_variables: 1, 

8033 ArrayType.element_solid_strain: 1, 

8034 ArrayType.element_solid_plastic_strain_tensor: 1, 

8035 ArrayType.element_solid_thermal_strain_tensor: 1, 

8036 } 

8037 self.check_array_dims(array_dims, "n_solids") 

8038 

8039 array_dims = { 

8040 ArrayType.element_solid_stress: 2, 

8041 ArrayType.element_solid_effective_plastic_strain: 2, 

8042 ArrayType.element_solid_history_variables: 2, 

8043 ArrayType.element_solid_strain: 2, 

8044 ArrayType.element_solid_plastic_strain_tensor: 2, 

8045 ArrayType.element_solid_thermal_strain_tensor: 2, 

8046 } 

8047 self.check_array_dims(array_dims, "n_solid_layers") 

8048 

8049 self.check_array_dims({ArrayType.element_solid_stress: 3}, "σx_σy_σz_σxy_σyz_σxz", 6) 

8050 

8051 self.check_array_dims({ArrayType.element_solid_strain: 3}, "εx_εy_εz_εxy_εyz_εxz", 6) 

8052 

8053 self.check_array_dims( 

8054 {ArrayType.element_solid_plastic_strain_tensor: 3}, "εx_εy_εz_εxy_εyz_εxz", 6 

8055 ) 

8056 

8057 self.check_array_dims( 

8058 {ArrayType.element_solid_thermal_strain_tensor: 3}, "εx_εy_εz_εxy_εyz_εxz", 6 

8059 ) 

8060 

8061 # allocate array 

8062 solid_data = np.zeros( 

8063 (n_solids, n_solid_layers, n_solid_vars // n_solid_layers), dtype=settings.ftype 

8064 ) 

8065 

8066 # SOLID STRESS 

8067 if ArrayType.element_solid_stress in self.arrays: 

8068 try: 

8069 array = self.arrays[ArrayType.element_solid_stress][i_timestep] 

8070 solid_data[:, :, 0:6] = array 

8071 except Exception: 

8072 trb_msg = traceback.format_exc() 

8073 msg = "A failure in %s was caught:\n%s" 

8074 LOGGER.warning(msg, "_write_states_solids, element_solid_stress", trb_msg) 

8075 

8076 # SOLID EFFECTIVE PSTRAIN 

8077 if ArrayType.element_solid_effective_plastic_strain in self.arrays: 

8078 try: 

8079 array = self.arrays[ArrayType.element_solid_effective_plastic_strain][i_timestep] 

8080 solid_data[:, :, 6] = array 

8081 except Exception: 

8082 trb_msg = traceback.format_exc() 

8083 msg = "A failure in %s was caught:\n%s" 

8084 LOGGER.warning( 

8085 msg, "_write_states_solids, element_solid_effective_plastic_strain", trb_msg 

8086 ) 

8087 

8088 # SOLID HISTORY VARIABLES 

8089 # (strains, pstrain tensor and thermal tensor are excluded here) 

8090 has_strain = settings.header["istrn"] 

8091 n_solid_history_variables = ( 

8092 settings.header["neiph"] 

8093 - 6 * has_strain 

8094 - 6 * settings.has_plastic_strain_tensor 

8095 - 6 * settings.has_thermal_strain_tensor 

8096 ) 

8097 

8098 if n_solid_history_variables: 

8099 try: 

8100 array = self.arrays[ArrayType.element_solid_history_variables][i_timestep] 

8101 solid_data[:, :, 7 : 7 + n_solid_history_variables] = array 

8102 except Exception: 

8103 trb_msg = traceback.format_exc() 

8104 msg = "A failure in %s was caught:\n%s" 

8105 LOGGER.warning( 

8106 msg, "_write_states_solids, element_solid_history_variables", trb_msg 

8107 ) 

8108 

8109 # SOLID STRAIN 

8110 if has_strain and ArrayType.element_solid_strain in self.arrays: 

8111 try: 

8112 array = self.arrays[ArrayType.element_solid_strain][i_timestep] 

8113 offset = 7 + n_solid_history_variables 

8114 solid_data[:, :, offset : offset + 6] = array 

8115 except Exception: 

8116 trb_msg = traceback.format_exc() 

8117 msg = "A failure in %s was caught:\n%s" 

8118 LOGGER.warning(msg, "_write_states_solids, element_solid_strain", trb_msg) 

8119 

8120 # PLASTIC STRAIN TENSOR 

8121 if ( 

8122 settings.has_plastic_strain_tensor 

8123 and ArrayType.element_solid_plastic_strain_tensor in self.arrays 

8124 ): 

8125 try: 

8126 array = self.arrays[ArrayType.element_solid_plastic_strain_tensor][i_timestep] 

8127 offset = 7 + n_solid_history_variables + 6 * has_strain 

8128 solid_data[:, :, offset : offset + 6] = array 

8129 except Exception: 

8130 trb_msg = traceback.format_exc() 

8131 msg = "A failure in %s was caught:\n%s" 

8132 LOGGER.warning( 

8133 msg, "_write_states_solids, element_solid_plastic_strain_tensor", trb_msg 

8134 ) 

8135 

8136 # THERMAL STRAIN TENSOR 

8137 if ( 

8138 settings.has_thermal_strain_tensor 

8139 and ArrayType.element_solid_thermal_strain_tensor in self.arrays 

8140 ): 

8141 try: 

8142 array = self.arrays[ArrayType.element_solid_thermal_strain_tensor][i_timestep] 

8143 offset = ( 

8144 7 

8145 + n_solid_history_variables 

8146 + 6 * has_strain 

8147 + 6 * settings.has_plastic_strain_tensor 

8148 ) 

8149 solid_data[:, :, offset : offset + 6] = array 

8150 except Exception: 

8151 trb_msg = traceback.format_exc() 

8152 msg = "A failure in %s was caught:\n%s" 

8153 LOGGER.warning( 

8154 msg, "_write_states_solids, element_solid_thermal_strain_tensor", trb_msg 

8155 ) 

8156 

8157 n_bytes_written = fp.write(settings.pack(solid_data, dtype_hint=np.floating)) 

8158 

8159 # check bytes 

8160 n_bytes_expected = ( 

8161 settings.header["nv3d"] * abs(settings.header["nel8"]) * settings.wordsize 

8162 ) 

8163 if n_bytes_expected != n_bytes_written: 

8164 msg = ( 

8165 "byte checksum wrong: " 

8166 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

8167 ) 

8168 raise RuntimeError(msg) 

8169 

8170 # log 

8171 msg = "%s wrote %d bytes." 

8172 LOGGER.debug(msg, "_write_states_solids", n_bytes_written) 

8173 

8174 return n_bytes_written 

8175 

8176 def _write_states_tshells( 

8177 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

8178 ) -> int: 

8179 

8180 n_tshells = settings.header["nelth"] 

8181 n_tshell_vars = settings.header["nv3dt"] 

8182 if n_tshells <= 0 or n_tshell_vars <= 0: 

8183 return 0 

8184 

8185 _check_ndim( 

8186 self, 

8187 { 

8188 ArrayType.element_tshell_stress: [ 

8189 "n_timesteps", 

8190 "n_tshells", 

8191 "n_shell_layers", 

8192 "σx_σy_σz_σxy_σyz_σxz", 

8193 ], 

8194 ArrayType.element_tshell_strain: [ 

8195 "n_timesteps", 

8196 "n_tshells", 

8197 "upper_lower", 

8198 "εx_εy_εz_εxy_εyz_εxz", 

8199 ], 

8200 ArrayType.element_tshell_effective_plastic_strain: [ 

8201 "n_timesteps", 

8202 "n_tshells", 

8203 "n_shell_layers", 

8204 ], 

8205 ArrayType.element_tshell_history_variables: [ 

8206 "n_timesteps", 

8207 "n_tshells", 

8208 "n_shell_layers", 

8209 "n_tshell_history_vars", 

8210 ], 

8211 }, 

8212 ) 

8213 

8214 array_dims = { 

8215 ArrayType.global_timesteps: 0, 

8216 ArrayType.element_tshell_stress: 0, 

8217 ArrayType.element_tshell_strain: 0, 

8218 ArrayType.element_tshell_effective_plastic_strain: 0, 

8219 ArrayType.element_tshell_history_variables: 0, 

8220 } 

8221 self.check_array_dims(array_dims, "n_timesteps") 

8222 

8223 array_dims = { 

8224 ArrayType.element_tshell_node_indexes: 0, 

8225 ArrayType.element_tshell_stress: 1, 

8226 ArrayType.element_tshell_strain: 1, 

8227 ArrayType.element_tshell_effective_plastic_strain: 1, 

8228 ArrayType.element_tshell_history_variables: 1, 

8229 } 

8230 self.check_array_dims(array_dims, "n_tshells") 

8231 

8232 self.check_array_dims({ArrayType.element_tshell_stress: 3}, "σx_σy_σz_σxy_σyz_σxz", 6) 

8233 

8234 self.check_array_dims({ArrayType.element_tshell_strain: 2}, "upper_lower", 2) 

8235 

8236 self.check_array_dims({ArrayType.element_tshell_strain: 3}, "εx_εy_εz_εxy_εyz_εxz", 6) 

8237 

8238 has_stress = settings.header["ioshl1"] == 1000 

8239 has_pstrain = settings.header["ioshl2"] == 1000 

8240 n_history_vars = settings.header["neips"] 

8241 n_layer_vars = settings.n_shell_layers * (6 * has_stress + has_pstrain + n_history_vars) 

8242 

8243 tshell_data = np.zeros((n_tshells, n_tshell_vars), settings.ftype) 

8244 tshell_layer_data = tshell_data[:, :n_layer_vars].reshape( 

8245 (n_tshells, settings.n_shell_layers, -1) 

8246 ) 

8247 tshell_nonlayer_data = tshell_data[:, n_layer_vars:] 

8248 

8249 # TSHELL STRESS 

8250 if has_stress: 

8251 if ArrayType.element_tshell_stress in self.arrays: 

8252 array = self.arrays[ArrayType.element_tshell_stress][i_timestep] 

8253 tshell_layer_data[:, :, 0:6] = array 

8254 

8255 # TSHELL EFF. PLASTIC STRAIN 

8256 if has_pstrain: 

8257 if ArrayType.element_tshell_effective_plastic_strain in self.arrays: 

8258 array = self.arrays[ArrayType.element_tshell_effective_plastic_strain][i_timestep] 

8259 start_index = 6 * has_stress 

8260 tshell_layer_data[:, :, start_index] = array 

8261 

8262 # TSHELL HISTORY VARS 

8263 if n_history_vars != 0: 

8264 if ArrayType.element_tshell_history_variables in self.arrays: 

8265 array = self.arrays[ArrayType.element_tshell_history_variables][i_timestep] 

8266 start_index = 6 * has_stress + has_pstrain 

8267 end_index = start_index + array.shape[2] 

8268 tshell_layer_data[:, :, start_index:end_index] = array 

8269 

8270 # TSHELL STRAIN 

8271 if settings.header["istrn"]: 

8272 if ArrayType.element_tshell_strain in self.arrays: 

8273 array = self.arrays[ArrayType.element_tshell_strain][i_timestep] 

8274 start_index = 6 * has_stress + has_pstrain + n_history_vars 

8275 tshell_nonlayer_data[:, :] = array.reshape(n_tshells, 12) 

8276 

8277 n_bytes_written = fp.write(settings.pack(tshell_data, dtype_hint=np.floating)) 

8278 

8279 # check bytes 

8280 n_bytes_expected = ( 

8281 settings.header["nv3dt"] * abs(settings.header["nelth"]) * settings.wordsize 

8282 ) 

8283 if n_bytes_expected != n_bytes_written: 

8284 msg = ( 

8285 "byte checksum wrong: " 

8286 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

8287 ) 

8288 raise RuntimeError(msg) 

8289 

8290 # log 

8291 msg = "%s wrote %d bytes." 

8292 LOGGER.debug(msg, "_write_states_tshells", n_bytes_written) 

8293 

8294 return n_bytes_written 

8295 

8296 def _write_states_beams( 

8297 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

8298 ) -> int: 

8299 

8300 n_beams = settings.header["nel2"] 

8301 n_beam_vars = settings.header["nv1d"] 

8302 if n_beams <= 0 or n_beam_vars <= 0: 

8303 return 0 

8304 

8305 n_beam_layers = settings.header["beamip"] 

8306 n_beam_history_vars = settings.header["neipb"] 

8307 

8308 _check_ndim( 

8309 self, 

8310 { 

8311 ArrayType.element_beam_axial_force: ["n_timesteps", "n_beams"], 

8312 ArrayType.element_beam_shear_force: ["n_timesteps", "n_beams", "fs_ft"], 

8313 ArrayType.element_beam_bending_moment: ["n_timesteps", "n_beams", "ms_mt"], 

8314 ArrayType.element_beam_torsion_moment: ["n_timesteps", "n_beams"], 

8315 ArrayType.element_beam_shear_stress: [ 

8316 "n_timesteps", 

8317 "n_beams", 

8318 "n_beam_layers", 

8319 "σrs_σtr", 

8320 ], 

8321 ArrayType.element_beam_axial_stress: ["n_timesteps", "n_beams", "n_beam_layers"], 

8322 ArrayType.element_beam_plastic_strain: ["n_timesteps", "n_beams", "n_beam_layers"], 

8323 ArrayType.element_beam_axial_strain: ["n_timesteps", "n_beams", "n_beam_layers"], 

8324 ArrayType.element_beam_history_vars: [ 

8325 "n_timesteps", 

8326 "n_beams", 

8327 "n_beam_layers+3", 

8328 "n_beam_history_vars", 

8329 ], 

8330 }, 

8331 ) 

8332 

8333 array_dims = { 

8334 ArrayType.global_timesteps: 0, 

8335 ArrayType.element_beam_axial_force: 0, 

8336 ArrayType.element_beam_shear_force: 0, 

8337 ArrayType.element_beam_bending_moment: 0, 

8338 ArrayType.element_beam_torsion_moment: 0, 

8339 ArrayType.element_beam_shear_stress: 0, 

8340 ArrayType.element_beam_axial_stress: 0, 

8341 ArrayType.element_beam_plastic_strain: 0, 

8342 ArrayType.element_beam_axial_strain: 0, 

8343 ArrayType.element_beam_history_vars: 0, 

8344 } 

8345 self.check_array_dims(array_dims, "n_timesteps") 

8346 array_dims = { 

8347 ArrayType.element_beam_axial_force: 1, 

8348 ArrayType.element_beam_shear_force: 1, 

8349 ArrayType.element_beam_bending_moment: 1, 

8350 ArrayType.element_beam_torsion_moment: 1, 

8351 ArrayType.element_beam_shear_stress: 1, 

8352 ArrayType.element_beam_axial_stress: 1, 

8353 ArrayType.element_beam_plastic_strain: 1, 

8354 ArrayType.element_beam_axial_strain: 1, 

8355 ArrayType.element_beam_history_vars: 1, 

8356 } 

8357 self.check_array_dims(array_dims, "n_beams") 

8358 self.check_array_dims({ArrayType.element_beam_shear_force: 2}, "fs_ft", 2) 

8359 self.check_array_dims({ArrayType.element_beam_bending_moment: 2}, "ms_mt", 2) 

8360 array_dims = { 

8361 ArrayType.element_beam_shear_stress: 2, 

8362 ArrayType.element_beam_axial_stress: 2, 

8363 ArrayType.element_beam_plastic_strain: 2, 

8364 ArrayType.element_beam_axial_strain: 2, 

8365 ArrayType.element_beam_history_vars: 2, 

8366 } 

8367 n_beam_layers = self.check_array_dims(array_dims, "n_beam_layers") 

8368 self.check_array_dims({ArrayType.element_beam_shear_stress: 3}, "σrs_σtr", 2) 

8369 self.check_array_dims( 

8370 {ArrayType.element_beam_history_vars: 2}, "n_modes", n_beam_layers + 3 

8371 ) 

8372 

8373 # allocate buffer 

8374 beam_data = np.zeros((n_beams, n_beam_vars), dtype=settings.ftype) 

8375 n_layer_vars_total = 5 * n_beam_layers 

8376 beam_layer_data = beam_data[:, 6 : 6 + n_layer_vars_total].reshape( 

8377 (n_beams, n_beam_layers, 5) 

8378 ) 

8379 beam_history_vars = beam_data[:, 6 + n_layer_vars_total :].reshape( 

8380 (n_beams, 3 + n_beam_layers, n_beam_history_vars) 

8381 ) 

8382 

8383 # BEAM AXIAL FORCE 

8384 if ArrayType.element_beam_axial_force in self.arrays: 

8385 array = self.arrays[ArrayType.element_beam_axial_force][i_timestep] 

8386 beam_data[:, 0] = array 

8387 

8388 # BEAM SHEAR FORCE 

8389 if ArrayType.element_beam_shear_force in self.arrays: 

8390 array = self.arrays[ArrayType.element_beam_shear_force][i_timestep] 

8391 beam_data[:, 1:3] = array 

8392 

8393 # BEAM BENDING MOMENTUM 

8394 if ArrayType.element_beam_bending_moment in self.arrays: 

8395 array = self.arrays[ArrayType.element_beam_bending_moment][i_timestep] 

8396 beam_data[:, 3:5] = array 

8397 

8398 # BEAM TORSION MOMENTUM 

8399 if ArrayType.element_beam_torsion_moment in self.arrays: 

8400 array = self.arrays[ArrayType.element_beam_torsion_moment][i_timestep] 

8401 beam_data[:, 5] = array 

8402 

8403 if n_beam_layers: 

8404 array = ( 

8405 self.arrays[ArrayType.element_beam_axial_stress][i_timestep] 

8406 if ArrayType.element_beam_axial_stress in self.arrays 

8407 else np.zeros((n_beams, n_beam_layers), dtype=settings.ftype) 

8408 ) 

8409 beam_layer_data[:, :, 0] = array 

8410 

8411 array = ( 

8412 self.arrays[ArrayType.element_beam_shear_stress][i_timestep] 

8413 if ArrayType.element_beam_shear_stress in self.arrays 

8414 else np.zeros((n_beams, n_beam_layers, 2), dtype=settings.ftype) 

8415 ) 

8416 beam_layer_data[:, :, 1:3] = array 

8417 

8418 array = ( 

8419 self.arrays[ArrayType.element_beam_plastic_strain][i_timestep] 

8420 if ArrayType.element_beam_plastic_strain in self.arrays 

8421 else np.zeros((n_beams, n_beam_layers), dtype=settings.ftype) 

8422 ) 

8423 beam_layer_data[:, :, 3] = array 

8424 

8425 array = ( 

8426 self.arrays[ArrayType.element_beam_axial_strain][i_timestep] 

8427 if ArrayType.element_beam_axial_strain in self.arrays 

8428 else np.zeros((n_beams, n_beam_layers), dtype=settings.ftype) 

8429 ) 

8430 beam_layer_data[:, :, 4] = array 

8431 

8432 # BEAM HISTORY VARIABLES 

8433 if n_beam_history_vars: 

8434 array = ( 

8435 self.arrays[ArrayType.element_beam_history_vars][i_timestep] 

8436 if ArrayType.element_beam_history_vars in self.arrays 

8437 else np.zeros( 

8438 (n_beams, n_beam_layers + 3, n_beam_history_vars), dtype=settings.ftype 

8439 ) 

8440 ) 

8441 beam_history_vars[:, :, :] = array 

8442 

8443 n_bytes_written = fp.write(settings.pack(beam_data, dtype_hint=np.floating)) 

8444 

8445 # check bytes 

8446 n_bytes_expected = settings.header["nv1d"] * settings.header["nel2"] * settings.wordsize 

8447 if n_bytes_expected != n_bytes_written: 

8448 msg = ( 

8449 "byte checksum wrong: " 

8450 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

8451 ) 

8452 raise RuntimeError(msg) 

8453 

8454 # log 

8455 msg = "%s wrote %d bytes." 

8456 LOGGER.debug(msg, "_write_states_tshells", n_bytes_written) 

8457 

8458 return n_bytes_written 

8459 

8460 def _write_states_shells( 

8461 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

8462 ) -> int: 

8463 

8464 n_shells = settings.header["nel4"] 

8465 n_shell_vars = settings.header["nv2d"] 

8466 n_rigid_shells = settings.n_rigid_shells 

8467 is_d3part = self.header.filetype == D3plotFiletype.D3PART 

8468 # d3part writes results also for rigid shells 

8469 n_reduced_shells = n_shells if is_d3part else n_shells - n_rigid_shells 

8470 

8471 if n_reduced_shells <= 0 or n_shell_vars <= 0: 

8472 return 0 

8473 

8474 has_stress = settings.header["ioshl1"] == 1000 

8475 has_pstrain = settings.header["ioshl2"] == 1000 

8476 has_forces = settings.header["ioshl3"] == 1000 

8477 has_else = settings.header["ioshl4"] == 1000 

8478 has_strain = settings.header["istrn"] != 0 

8479 n_shell_history_vars = settings.header["neips"] 

8480 

8481 _check_ndim( 

8482 self, 

8483 { 

8484 ArrayType.element_shell_stress: [ 

8485 "n_timesteps", 

8486 "n_shells", 

8487 "n_shell_layers", 

8488 "σx_σy_σz_σxy_σyz_σxz", 

8489 ], 

8490 ArrayType.element_shell_effective_plastic_strain: [ 

8491 "n_timesteps", 

8492 "n_shells", 

8493 "n_shell_layers", 

8494 ], 

8495 ArrayType.element_shell_history_vars: [ 

8496 "n_timesteps", 

8497 "n_shells", 

8498 "n_shell_layers", 

8499 "n_shell_history_vars", 

8500 ], 

8501 ArrayType.element_shell_bending_moment: ["n_timesteps", "n_shells", "mx_my_mxy"], 

8502 ArrayType.element_shell_shear_force: ["n_timesteps", "n_shells", "qx_qy"], 

8503 ArrayType.element_shell_normal_force: ["n_timesteps", "n_shells", "nx_ny_nxy"], 

8504 ArrayType.element_shell_thickness: ["n_timesteps", "n_shells"], 

8505 ArrayType.element_shell_unknown_variables: [ 

8506 "n_timesteps", 

8507 "n_shells", 

8508 "n_extra_vars", 

8509 ], 

8510 ArrayType.element_shell_internal_energy: ["n_timesteps", "n_shells"], 

8511 ArrayType.element_shell_strain: [ 

8512 "n_timesteps", 

8513 "n_shells", 

8514 "upper_lower", 

8515 "εx_εy_εz_εxy_εyz_εxz", 

8516 ], 

8517 }, 

8518 ) 

8519 

8520 array_dims = { 

8521 ArrayType.global_timesteps: 0, 

8522 ArrayType.element_shell_stress: 0, 

8523 ArrayType.element_shell_effective_plastic_strain: 0, 

8524 ArrayType.element_shell_history_vars: 0, 

8525 ArrayType.element_shell_bending_moment: 0, 

8526 ArrayType.element_shell_shear_force: 0, 

8527 ArrayType.element_shell_normal_force: 0, 

8528 ArrayType.element_shell_thickness: 0, 

8529 ArrayType.element_shell_unknown_variables: 0, 

8530 ArrayType.element_shell_internal_energy: 0, 

8531 ArrayType.element_shell_strain: 0, 

8532 } 

8533 self.check_array_dims(array_dims, "n_timesteps") 

8534 

8535 array_dims = { 

8536 ArrayType.element_shell_stress: 1, 

8537 ArrayType.element_shell_effective_plastic_strain: 1, 

8538 ArrayType.element_shell_history_vars: 1, 

8539 ArrayType.element_shell_bending_moment: 1, 

8540 ArrayType.element_shell_shear_force: 1, 

8541 ArrayType.element_shell_normal_force: 1, 

8542 ArrayType.element_shell_thickness: 1, 

8543 ArrayType.element_shell_unknown_variables: 1, 

8544 ArrayType.element_shell_internal_energy: 1, 

8545 ArrayType.element_shell_strain: 1, 

8546 } 

8547 n_reduced_shells = self.check_array_dims(array_dims, "n_shells") 

8548 if not is_d3part and n_reduced_shells != n_shells - n_rigid_shells: 

8549 msg = ( 

8550 "Parts with mattyp 20 (rigid material) were specified." 

8551 " For these parts no state data is output in dyna." 

8552 " The state arrays are thus expected output data for only" 

8553 f" {n_shells - n_rigid_shells} shells and not {n_reduced_shells}." 

8554 ) 

8555 raise ValueError(msg) 

8556 

8557 array_dims = { 

8558 ArrayType.element_shell_stress: 2, 

8559 ArrayType.element_shell_effective_plastic_strain: 2, 

8560 ArrayType.element_shell_history_vars: 2, 

8561 } 

8562 n_shell_layers = self.check_array_dims(array_dims, "n_shell_layers") 

8563 

8564 self.check_array_dims({ArrayType.element_shell_stress: 3}, "σx_σy_σz_σxy_σyz_σxz", 6) 

8565 self.check_array_dims({ArrayType.element_shell_bending_moment: 2}, "mx_my_mxy", 3) 

8566 self.check_array_dims({ArrayType.element_shell_shear_force: 2}, "qx_qy") 

8567 self.check_array_dims({ArrayType.element_shell_strain: 2}, "upper_lower", 2) 

8568 self.check_array_dims({ArrayType.element_shell_strain: 3}, "εx_εy_εz_εxy_εyz_εxz", 6) 

8569 

8570 # allocate buffer 

8571 shell_data = np.zeros((n_reduced_shells, n_shell_vars), dtype=settings.ftype) 

8572 n_layer_vars = has_stress * 6 + has_pstrain + n_shell_history_vars 

8573 n_layer_vars_total = n_layer_vars * n_shell_layers 

8574 

8575 shell_layer_data = shell_data[:, :n_layer_vars_total].reshape( 

8576 (n_reduced_shells, n_shell_layers, n_layer_vars) 

8577 ) 

8578 shell_nonlayer_data = shell_data[:, n_layer_vars_total:] 

8579 

8580 start_layer_index = 0 

8581 end_layer_index = 0 

8582 

8583 # SHELL STRESS 

8584 if has_stress: 

8585 start_layer_index = 0 

8586 end_layer_index = 6 

8587 if ArrayType.element_shell_stress in self.arrays: 

8588 array = self.arrays[ArrayType.element_shell_stress][i_timestep] 

8589 shell_layer_data[:, :, start_layer_index:end_layer_index] = array 

8590 

8591 # EFF PSTRAIN 

8592 if has_pstrain: 

8593 start_layer_index = end_layer_index 

8594 end_layer_index = start_layer_index + has_pstrain 

8595 if ArrayType.element_shell_effective_plastic_strain in self.arrays: 

8596 array = self.arrays[ArrayType.element_shell_effective_plastic_strain][i_timestep] 

8597 shell_layer_data[:, :, start_layer_index:end_layer_index] = array.reshape( 

8598 (n_reduced_shells, n_shell_layers, 1) 

8599 ) 

8600 

8601 # SHELL HISTORY VARS 

8602 if n_shell_history_vars: 

8603 start_layer_index = end_layer_index 

8604 end_layer_index = start_layer_index + n_shell_history_vars 

8605 if ArrayType.element_shell_history_vars in self.arrays: 

8606 array = self.arrays[ArrayType.element_shell_history_vars][i_timestep] 

8607 n_hist_vars_arr = array.shape[2] 

8608 end_layer_index2 = start_layer_index + min(n_hist_vars_arr, n_shell_history_vars) 

8609 shell_layer_data[:, :, start_layer_index:end_layer_index2] = array 

8610 

8611 start_index = 0 

8612 end_index = 0 

8613 

8614 # SHELL FORCES 

8615 if has_forces: 

8616 start_index = end_index 

8617 end_index = start_index + 8 

8618 

8619 # MOMENTUM 

8620 if ArrayType.element_shell_bending_moment in self.arrays: 

8621 start_index2 = start_index 

8622 end_index2 = start_index + 3 

8623 array = self.arrays[ArrayType.element_shell_bending_moment][i_timestep] 

8624 shell_nonlayer_data[:, start_index2:end_index2] = array 

8625 

8626 # SHEAR 

8627 if ArrayType.element_shell_shear_force in self.arrays: 

8628 start_index2 = start_index + 3 

8629 end_index2 = start_index + 5 

8630 array = self.arrays[ArrayType.element_shell_shear_force][i_timestep] 

8631 shell_nonlayer_data[:, start_index2:end_index2] = array 

8632 

8633 # NORMAL 

8634 if ArrayType.element_shell_normal_force in self.arrays: 

8635 start_index2 = start_index + 5 

8636 end_index2 = start_index + 8 

8637 array = self.arrays[ArrayType.element_shell_normal_force][i_timestep] 

8638 shell_nonlayer_data[:, start_index2:end_index2] = array 

8639 

8640 if has_else: 

8641 start_index = end_index 

8642 end_index = start_index + 3 

8643 

8644 # THICKNESS 

8645 if ArrayType.element_shell_thickness in self.arrays: 

8646 start_index2 = start_index 

8647 end_index2 = start_index + 1 

8648 array = self.arrays[ArrayType.element_shell_thickness][i_timestep] 

8649 shell_nonlayer_data[:, start_index2:end_index2] = array.reshape( 

8650 (n_reduced_shells, 1) 

8651 ) 

8652 

8653 # ELEMENT SPECIFIC VARS 

8654 if ArrayType.element_shell_unknown_variables in self.arrays: 

8655 start_index2 = start_index + 1 

8656 end_index2 = start_index + 3 

8657 array = self.arrays[ArrayType.element_shell_unknown_variables][i_timestep] 

8658 shell_nonlayer_data[:, start_index2:end_index2] = array 

8659 

8660 # SHELL STRAIN 

8661 # 

8662 # Strain is squeezed between the 3rd and 4th var of the else block 

8663 if has_strain: 

8664 start_index = end_index 

8665 end_index = start_index + 12 

8666 

8667 if ArrayType.element_shell_strain in self.arrays: 

8668 array = self.arrays[ArrayType.element_shell_strain][i_timestep] 

8669 shell_nonlayer_data[:, start_index:end_index] = array.reshape( 

8670 (n_reduced_shells, 12) 

8671 ) 

8672 

8673 # INTERNAL ENERGY 

8674 if has_else: 

8675 start_index = end_index 

8676 end_index = start_index + 1 

8677 

8678 if ArrayType.element_shell_internal_energy in self.arrays: 

8679 array = self.arrays[ArrayType.element_shell_internal_energy][i_timestep] 

8680 shell_nonlayer_data[:, start_index:end_index] = array.reshape((n_reduced_shells, 1)) 

8681 

8682 # THERMAL STRAIN TENSOR 

8683 if settings.has_plastic_strain_tensor: 

8684 start_index = end_index 

8685 end_index = start_index + n_shell_layers * 6 

8686 

8687 if ArrayType.element_shell_plastic_strain_tensor in self.arrays: 

8688 array = self.arrays[ArrayType.element_shell_plastic_strain_tensor][i_timestep] 

8689 shell_nonlayer_data[:, start_index:end_index] = array.reshape( 

8690 (n_reduced_shells, n_shell_layers * 6) 

8691 ) 

8692 

8693 # PLASTIC THERMAL TENSOR 

8694 if settings.has_thermal_strain_tensor: 

8695 start_index = end_index 

8696 end_index = start_index + 6 

8697 

8698 if ArrayType.element_shell_thermal_strain_tensor in self.arrays: 

8699 array = self.arrays[ArrayType.element_shell_thermal_strain_tensor][i_timestep] 

8700 shell_nonlayer_data[:, start_index:end_index] = array.reshape((n_reduced_shells, 6)) 

8701 

8702 n_bytes_written = fp.write(settings.pack(shell_data, dtype_hint=np.floating)) 

8703 

8704 # check bytes 

8705 # *(settings.header["nel4"]-settings.n_rigid_shells)\ 

8706 n_bytes_expected = settings.header["nv2d"] * n_reduced_shells * settings.wordsize 

8707 if n_bytes_expected != n_bytes_written: 

8708 msg = ( 

8709 "byte checksum wrong: " 

8710 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

8711 ) 

8712 raise RuntimeError(msg) 

8713 

8714 # log 

8715 msg = "%s wrote %d bytes." 

8716 LOGGER.debug(msg, "_write_states_shells", n_bytes_written) 

8717 

8718 return n_bytes_written 

8719 

8720 def _write_states_deletion_info( 

8721 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

8722 ) -> int: 

8723 

8724 if settings.mdlopt <= 0: 

8725 return 0 

8726 

8727 n_bytes_written = 0 

8728 n_bytes_expected = 0 

8729 

8730 # NODE DELETION 

8731 if settings.mdlopt == 1: 

8732 

8733 _check_ndim(self, {ArrayType.node_is_alive: ["n_timesteps", "n_nodes"]}) 

8734 

8735 array_dims = { 

8736 ArrayType.global_timesteps: 0, 

8737 ArrayType.node_is_alive: 0, 

8738 } 

8739 self.check_array_dims(array_dims, "n_timesteps") 

8740 

8741 array_dims = { 

8742 ArrayType.node_coordinates: 0, 

8743 ArrayType.node_is_alive: 1, 

8744 } 

8745 self.check_array_dims(array_dims, "n_nodes") 

8746 

8747 n_nodes = settings.header["numnp"] 

8748 

8749 array = ( 

8750 self.arrays[ArrayType.node_is_alive] 

8751 if ArrayType.node_is_alive in self.arrays 

8752 else np.zeros(n_nodes, dtype=settings.ftype) 

8753 ) 

8754 

8755 n_bytes_written += fp.write(settings.pack(array, dtype_hint=np.floating)) 

8756 

8757 # check 

8758 n_bytes_expected = settings.header["numnp"] * settings.wordsize 

8759 

8760 # ELEMENT DELETION 

8761 elif settings.mdlopt == 2: 

8762 

8763 _check_ndim( 

8764 self, 

8765 { 

8766 ArrayType.element_solid_is_alive: ["n_timesteps", "n_solids"], 

8767 ArrayType.element_shell_is_alive: ["n_timesteps", "n_shells"], 

8768 ArrayType.element_beam_is_alive: ["n_timesteps", "n_beams"], 

8769 ArrayType.element_tshell_is_alive: ["n_timesteps", "n_tshells"], 

8770 }, 

8771 ) 

8772 

8773 array_dims = { 

8774 ArrayType.global_timesteps: 0, 

8775 ArrayType.element_solid_is_alive: 0, 

8776 ArrayType.element_shell_is_alive: 0, 

8777 ArrayType.element_beam_is_alive: 0, 

8778 ArrayType.element_tshell_is_alive: 0, 

8779 } 

8780 self.check_array_dims(array_dims, "n_timesteps") 

8781 

8782 array_dims = { 

8783 ArrayType.element_solid_node_indexes: 0, 

8784 ArrayType.element_solid_is_alive: 1, 

8785 } 

8786 self.check_array_dims(array_dims, "n_solids") 

8787 

8788 array_dims = { 

8789 ArrayType.element_beam_node_indexes: 0, 

8790 ArrayType.element_beam_is_alive: 1, 

8791 } 

8792 self.check_array_dims(array_dims, "n_beams") 

8793 

8794 array_dims = { 

8795 ArrayType.element_shell_node_indexes: 0, 

8796 ArrayType.element_shell_is_alive: 1, 

8797 } 

8798 self.check_array_dims(array_dims, "n_shells") 

8799 

8800 array_dims = { 

8801 ArrayType.element_tshell_node_indexes: 0, 

8802 ArrayType.element_tshell_is_alive: 1, 

8803 } 

8804 self.check_array_dims(array_dims, "n_tshells") 

8805 

8806 n_solids = settings.header["nel8"] 

8807 n_tshells = settings.header["nelth"] 

8808 n_shells = settings.header["nel4"] 

8809 n_beams = settings.header["nel2"] 

8810 

8811 # SOLID DELETION 

8812 array = ( 

8813 self.arrays[ArrayType.element_solid_is_alive][i_timestep] 

8814 if ArrayType.element_solid_is_alive in self.arrays 

8815 else np.ones(n_solids, dtype=settings.ftype) 

8816 ) 

8817 n_bytes_written += fp.write(settings.pack(array, dtype_hint=np.floating)) 

8818 

8819 # THICK SHELL DELETION 

8820 array = ( 

8821 self.arrays[ArrayType.element_tshell_is_alive][i_timestep] 

8822 if ArrayType.element_tshell_is_alive in self.arrays 

8823 else np.ones(n_tshells, dtype=settings.ftype) 

8824 ) 

8825 n_bytes_written += fp.write(settings.pack(array, dtype_hint=np.floating)) 

8826 

8827 # SHELL DELETION 

8828 array = ( 

8829 self.arrays[ArrayType.element_shell_is_alive][i_timestep] 

8830 if ArrayType.element_shell_is_alive in self.arrays 

8831 else np.ones(n_shells, dtype=settings.ftype) 

8832 ) 

8833 n_bytes_written += fp.write(settings.pack(array, dtype_hint=np.floating)) 

8834 

8835 # BEAM DELETION 

8836 array = ( 

8837 self.arrays[ArrayType.element_beam_is_alive][i_timestep] 

8838 if ArrayType.element_beam_is_alive in self.arrays 

8839 else np.ones(n_beams, dtype=settings.ftype) 

8840 ) 

8841 n_bytes_written += fp.write(settings.pack(array, dtype_hint=np.floating)) 

8842 

8843 # check 

8844 n_bytes_expected = ( 

8845 settings.header["nel2"] 

8846 + settings.header["nel4"] 

8847 + abs(settings.header["nel8"]) 

8848 + settings.header["nelth"] 

8849 ) * settings.wordsize 

8850 

8851 else: 

8852 msg = f"Invalid mdlopt flag during write process: {settings.mdlopt}" 

8853 raise RuntimeError(msg) 

8854 

8855 # check bytes 

8856 if n_bytes_expected != n_bytes_written: 

8857 msg = ( 

8858 "byte checksum wrong: " 

8859 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

8860 ) 

8861 raise RuntimeError(msg) 

8862 

8863 # log 

8864 msg = "%s wrote %d bytes." 

8865 LOGGER.debug(msg, "_write_states_deletion_info", n_bytes_written) 

8866 

8867 return n_bytes_written 

8868 

8869 def _write_states_sph( 

8870 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

8871 ) -> int: 

8872 

8873 if settings.header["nmsph"] <= 0: 

8874 return 0 

8875 

8876 _check_ndim( 

8877 self, 

8878 { 

8879 ArrayType.sph_deletion: ["n_timesteps", "n_particles"], 

8880 ArrayType.sph_radius: ["n_timesteps", "n_particles"], 

8881 ArrayType.sph_pressure: ["n_timesteps", "n_particles"], 

8882 ArrayType.sph_stress: ["n_timesteps", "n_particles", "σx_σy_σz_σxy_σyz_σxz"], 

8883 ArrayType.sph_effective_plastic_strain: ["n_timesteps", "n_particles"], 

8884 ArrayType.sph_density: ["n_timesteps", "n_particles"], 

8885 ArrayType.sph_internal_energy: ["n_timesteps", "n_particles"], 

8886 ArrayType.sph_n_neighbors: ["n_timesteps", "n_particles"], 

8887 ArrayType.sph_strain: ["n_timesteps", "n_particles", "εx_εy_εz_εxy_εyz_εxz"], 

8888 ArrayType.sph_mass: ["n_timesteps", "n_particles"], 

8889 }, 

8890 ) 

8891 

8892 array_dims = { 

8893 ArrayType.global_timesteps: 0, 

8894 ArrayType.sph_deletion: 0, 

8895 ArrayType.sph_radius: 0, 

8896 ArrayType.sph_pressure: 0, 

8897 ArrayType.sph_stress: 0, 

8898 ArrayType.sph_effective_plastic_strain: 0, 

8899 ArrayType.sph_density: 0, 

8900 ArrayType.sph_internal_energy: 0, 

8901 ArrayType.sph_n_neighbors: 0, 

8902 ArrayType.sph_strain: 0, 

8903 ArrayType.sph_mass: 0, 

8904 } 

8905 self.check_array_dims(array_dims, "n_timesteps") 

8906 

8907 array_dims = { 

8908 ArrayType.sph_node_indexes: 0, 

8909 ArrayType.sph_deletion: 1, 

8910 ArrayType.sph_radius: 1, 

8911 ArrayType.sph_pressure: 1, 

8912 ArrayType.sph_stress: 1, 

8913 ArrayType.sph_effective_plastic_strain: 1, 

8914 ArrayType.sph_density: 1, 

8915 ArrayType.sph_internal_energy: 1, 

8916 ArrayType.sph_n_neighbors: 1, 

8917 ArrayType.sph_strain: 1, 

8918 ArrayType.sph_mass: 1, 

8919 } 

8920 n_particles = self.check_array_dims(array_dims, "n_particles") 

8921 self.check_array_dims({ArrayType.sph_stress: 2}, "σx_σy_σz_σxy_σyz_σxz", 6) 

8922 self.check_array_dims({ArrayType.sph_strain: 2}, "εx_εy_εz_εxy_εyz_εxz", 6) 

8923 

8924 n_sph_variables = settings.header["numsph"] 

8925 

8926 sph_data = np.zeros((n_particles, n_sph_variables)) 

8927 

8928 start_index = 0 

8929 end_index = 0 

8930 

8931 # SPH MATERIAL AND DELETION 

8932 start_index = 0 

8933 end_index = 1 

8934 array = ( 

8935 self.arrays[ArrayType.sph_deletion][i_timestep] 

8936 if ArrayType.sph_deletion in self.arrays 

8937 else np.ones(n_particles) 

8938 ) 

8939 sph_data[:, start_index:end_index] = array 

8940 

8941 # INFLUENCE RADIUS 

8942 if settings.header["isphfg2"]: 

8943 start_index = end_index 

8944 end_index = start_index + n_particles 

8945 if ArrayType.sph_radius in self.arrays: 

8946 array = self.arrays[ArrayType.sph_radius][i_timestep] 

8947 sph_data[:, start_index:end_index] = array 

8948 

8949 # PRESSURE 

8950 if settings.header["isphfg3"]: 

8951 start_index = end_index 

8952 end_index = start_index + n_particles 

8953 if ArrayType.sph_pressure in self.arrays: 

8954 array = self.arrays[ArrayType.sph_pressure][i_timestep] 

8955 sph_data[:, start_index:end_index] = array 

8956 

8957 # STRESS 

8958 if settings.header["isphfg4"]: 

8959 start_index = end_index 

8960 end_index = start_index + 6 * n_particles 

8961 if ArrayType.sph_stress in self.arrays: 

8962 array = self.arrays[ArrayType.sph_stress][i_timestep] 

8963 sph_data[:, start_index:end_index] = array 

8964 

8965 # PSTRAIN 

8966 if settings.header["isphfg5"]: 

8967 start_index = end_index 

8968 end_index = start_index + n_particles 

8969 if ArrayType.sph_effective_plastic_strain in self.arrays: 

8970 array = self.arrays[ArrayType.sph_effective_plastic_strain][i_timestep] 

8971 sph_data[:, start_index:end_index] = array 

8972 

8973 # DENSITY 

8974 if settings.header["isphfg6"]: 

8975 start_index = end_index 

8976 end_index = start_index + n_particles 

8977 if ArrayType.sph_density in self.arrays: 

8978 array = self.arrays[ArrayType.sph_density][i_timestep] 

8979 sph_data[:, start_index:end_index] = array 

8980 

8981 # INTERNAL ENERGY 

8982 if settings.header["isphfg7"]: 

8983 start_index = end_index 

8984 end_index = start_index + n_particles 

8985 if ArrayType.sph_internal_energy in self.arrays: 

8986 array = self.arrays[ArrayType.sph_internal_energy][i_timestep] 

8987 sph_data[:, start_index:end_index] = array 

8988 

8989 # INTERNAL ENERGY 

8990 if settings.header["isphfg8"]: 

8991 start_index = end_index 

8992 end_index = start_index + n_particles 

8993 if ArrayType.sph_n_neighbors in self.arrays: 

8994 array = self.arrays[ArrayType.sph_n_neighbors][i_timestep] 

8995 sph_data[:, start_index:end_index] = array 

8996 

8997 # STRAIN 

8998 if settings.header["isphfg9"]: 

8999 start_index = end_index 

9000 end_index = start_index + n_particles * 6 

9001 if ArrayType.sph_strain in self.arrays: 

9002 array = self.arrays[ArrayType.sph_strain][i_timestep] 

9003 sph_data[:, start_index:end_index] = array 

9004 

9005 # MASS 

9006 if settings.header["isphfg10"]: 

9007 start_index = end_index 

9008 end_index = start_index + n_particles 

9009 if ArrayType.sph_mass in self.arrays: 

9010 array = self.arrays[ArrayType.sph_mass][i_timestep] 

9011 sph_data[:, start_index:end_index] = array 

9012 

9013 n_bytes_written = fp.write(settings.pack(sph_data, dtype_hint=np.floating)) 

9014 

9015 # check bytes 

9016 n_bytes_expected = ( 

9017 settings.header["nv2d"] 

9018 * (settings.header["nel4"] - settings.header["numrbe"]) 

9019 * settings.wordsize 

9020 ) 

9021 if n_bytes_expected != n_bytes_written: 

9022 msg = ( 

9023 "byte checksum wrong: " 

9024 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

9025 ) 

9026 raise RuntimeError(msg) 

9027 

9028 # log 

9029 msg = "%s wrote %d bytes." 

9030 LOGGER.debug(msg, "_write_states_sph", n_bytes_written) 

9031 

9032 return n_bytes_written 

9033 

9034 def _write_states_airbags( 

9035 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

9036 ) -> int: 

9037 

9038 if settings.header["npefg"] <= 0: 

9039 return 0 

9040 

9041 _check_ndim( 

9042 self, 

9043 { 

9044 ArrayType.airbag_n_active_particles: ["n_timesteps", "n_airbags"], 

9045 ArrayType.airbag_bag_volume: ["n_timesteps", "n_airbags"], 

9046 ArrayType.airbag_particle_gas_id: ["n_timesteps", "n_particles"], 

9047 ArrayType.airbag_particle_chamber_id: ["n_timesteps", "n_particles"], 

9048 ArrayType.airbag_particle_leakage: ["n_timesteps", "n_particles"], 

9049 ArrayType.airbag_particle_mass: ["n_timesteps", "n_particles"], 

9050 ArrayType.airbag_particle_radius: ["n_timesteps", "n_particles"], 

9051 ArrayType.airbag_particle_spin_energy: ["n_timesteps", "n_particles"], 

9052 ArrayType.airbag_particle_translation_energy: ["n_timesteps", "n_particles"], 

9053 ArrayType.airbag_particle_nearest_segment_distance: ["n_timesteps", "n_particles"], 

9054 ArrayType.airbag_particle_position: ["n_timesteps", "n_particles", "x_y_z"], 

9055 ArrayType.airbag_particle_velocity: ["n_timesteps", "n_particles", "vx_vy_vz"], 

9056 }, 

9057 ) 

9058 

9059 array_dims = { 

9060 ArrayType.global_timesteps: 0, 

9061 ArrayType.airbag_n_active_particles: 0, 

9062 ArrayType.airbag_bag_volume: 0, 

9063 } 

9064 self.check_array_dims(array_dims, "n_timesteps") 

9065 

9066 array_dims = { 

9067 ArrayType.airbags_ids: 0, 

9068 ArrayType.airbag_n_active_particles: 1, 

9069 ArrayType.airbag_bag_volume: 1, 

9070 } 

9071 n_airbags = self.check_array_dims(array_dims, "n_airbags") 

9072 assert n_airbags == settings.header["npefg"] % 1000 

9073 

9074 array_dims = { 

9075 ArrayType.global_timesteps: 0, 

9076 ArrayType.airbag_particle_gas_id: 0, 

9077 ArrayType.airbag_particle_chamber_id: 0, 

9078 ArrayType.airbag_particle_leakage: 0, 

9079 ArrayType.airbag_particle_mass: 0, 

9080 ArrayType.airbag_particle_radius: 0, 

9081 ArrayType.airbag_particle_spin_energy: 0, 

9082 ArrayType.airbag_particle_translation_energy: 0, 

9083 ArrayType.airbag_particle_nearest_segment_distance: 0, 

9084 ArrayType.airbag_particle_position: 0, 

9085 ArrayType.airbag_particle_velocity: 0, 

9086 } 

9087 self.check_array_dims(array_dims, "n_timesteps") 

9088 

9089 array_dims = { 

9090 ArrayType.airbag_particle_gas_id: 1, 

9091 ArrayType.airbag_particle_chamber_id: 1, 

9092 ArrayType.airbag_particle_leakage: 1, 

9093 ArrayType.airbag_particle_mass: 1, 

9094 ArrayType.airbag_particle_radius: 1, 

9095 ArrayType.airbag_particle_spin_energy: 1, 

9096 ArrayType.airbag_particle_translation_energy: 1, 

9097 ArrayType.airbag_particle_nearest_segment_distance: 1, 

9098 ArrayType.airbag_particle_position: 1, 

9099 ArrayType.airbag_particle_velocity: 1, 

9100 } 

9101 n_particles = self.check_array_dims(array_dims, "n_particles") 

9102 

9103 self.check_array_dims({ArrayType.airbag_particle_position: 2}, "x_y_z", 3) 

9104 

9105 self.check_array_dims({ArrayType.airbag_particle_velocity: 2}, "vx_vy_vz", 3) 

9106 

9107 # Info: 

9108 # we cast integers to floats here (no conversion, just a cast) 

9109 # to be able to concatenate the arrays while preserving the 

9110 # bytes internally. 

9111 

9112 # AIRBAG STATE DATA 

9113 airbag_n_active_particles = ( 

9114 self.arrays[ArrayType.airbag_n_active_particles][i_timestep] 

9115 if ArrayType.airbag_n_active_particles in self.arrays 

9116 else np.zeros(n_airbags, dtype=settings.itype) 

9117 ) 

9118 airbag_n_active_particles = airbag_n_active_particles.view(settings.ftype) 

9119 

9120 airbag_bag_volume = ( 

9121 self.arrays[ArrayType.airbag_bag_volume][i_timestep] 

9122 if ArrayType.airbag_bag_volume in self.arrays 

9123 else np.zeros(n_airbags, dtype=settings.ftype) 

9124 ) 

9125 

9126 airbag_data = np.concatenate( 

9127 [ 

9128 airbag_n_active_particles.reshape(n_airbags, 1), 

9129 airbag_bag_volume.reshape(n_airbags, 1), 

9130 ], 

9131 axis=1, 

9132 ) 

9133 n_bytes_written = fp.write(settings.pack(airbag_data, dtype_hint=np.floating)) 

9134 

9135 # particle var names 

9136 array_particle_list = [] 

9137 

9138 # PARTICLE GAS ID 

9139 array = ( 

9140 self.arrays[ArrayType.airbag_particle_gas_id][i_timestep] 

9141 if ArrayType.airbag_particle_gas_id in self.arrays 

9142 else np.zeros(n_particles, dtype=settings.itype) 

9143 ) 

9144 array = array.view(settings.ftype) 

9145 array_particle_list.append(array.reshape(-1, 1)) 

9146 

9147 # PARTICLE CHAMBER ID 

9148 array = ( 

9149 self.arrays[ArrayType.airbag_particle_chamber_id][i_timestep] 

9150 if ArrayType.airbag_particle_chamber_id in self.arrays 

9151 else np.zeros(n_particles, dtype=settings.itype) 

9152 ) 

9153 array = array.view(settings.ftype) 

9154 array_particle_list.append(array.reshape(-1, 1)) 

9155 

9156 # PARTICLE LEAKAGE 

9157 array = ( 

9158 self.arrays[ArrayType.airbag_particle_leakage][i_timestep] 

9159 if ArrayType.airbag_particle_leakage in self.arrays 

9160 else np.zeros(n_particles, dtype=settings.itype) 

9161 ) 

9162 array = array.view(settings.ftype) 

9163 array_particle_list.append(array.reshape(-1, 1)) 

9164 

9165 # PARTICLE POSITION 

9166 array = ( 

9167 self.arrays[ArrayType.airbag_particle_position][i_timestep] 

9168 if ArrayType.airbag_particle_position in self.arrays 

9169 else np.zeros((n_particles, 3), dtype=settings.ftype) 

9170 ) 

9171 array_particle_list.append(array) 

9172 

9173 # PARTICLE VELOCITY 

9174 array = ( 

9175 self.arrays[ArrayType.airbag_particle_velocity][i_timestep] 

9176 if ArrayType.airbag_particle_velocity in self.arrays 

9177 else np.zeros((n_particles, 3), dtype=settings.ftype) 

9178 ) 

9179 array_particle_list.append(array) 

9180 

9181 # PARTICLE MASS 

9182 array = ( 

9183 self.arrays[ArrayType.airbag_particle_mass][i_timestep] 

9184 if ArrayType.airbag_particle_mass in self.arrays 

9185 else np.zeros(n_particles, dtype=settings.ftype) 

9186 ) 

9187 array_particle_list.append(array.reshape(-1, 1)) 

9188 

9189 # PARTICLE RADIUS 

9190 array = ( 

9191 self.arrays[ArrayType.airbag_particle_radius][i_timestep] 

9192 if ArrayType.airbag_particle_radius in self.arrays 

9193 else np.zeros(n_particles, dtype=settings.ftype) 

9194 ) 

9195 array_particle_list.append(array.reshape(-1, 1)) 

9196 

9197 # PARTICLE SPIN ENERGY 

9198 array = ( 

9199 self.arrays[ArrayType.airbag_particle_spin_energy][i_timestep] 

9200 if ArrayType.airbag_particle_spin_energy in self.arrays 

9201 else np.zeros(n_particles, dtype=settings.ftype) 

9202 ) 

9203 array_particle_list.append(array.reshape(-1, 1)) 

9204 

9205 # PARTICLE TRANSL ENERGY 

9206 array = ( 

9207 self.arrays[ArrayType.airbag_particle_translation_energy][i_timestep] 

9208 if ArrayType.airbag_particle_translation_energy in self.arrays 

9209 else np.zeros(n_particles, dtype=settings.ftype) 

9210 ) 

9211 array_particle_list.append(array.reshape(-1, 1)) 

9212 

9213 # PARTICLE NEAREST NEIGHBOR DISTANCE 

9214 array = ( 

9215 self.arrays[ArrayType.airbag_particle_nearest_segment_distance][i_timestep] 

9216 if ArrayType.airbag_particle_nearest_segment_distance in self.arrays 

9217 else np.zeros(n_particles, dtype=settings.ftype) 

9218 ) 

9219 array_particle_list.append(array.reshape(-1, 1)) 

9220 

9221 airbag_particle_data = np.concatenate(array_particle_list, axis=1) 

9222 n_bytes_written += fp.write(settings.pack(airbag_particle_data, dtype_hint=np.floating)) 

9223 

9224 # check bytes 

9225 n_bytes_expected = (2 * n_airbags + n_particles * 14) * settings.wordsize 

9226 if n_bytes_expected != n_bytes_written: 

9227 msg = ( 

9228 "byte checksum wrong: " 

9229 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

9230 ) 

9231 raise RuntimeError(msg) 

9232 

9233 # log 

9234 msg = "%s wrote %d bytes." 

9235 LOGGER.debug(msg, "_write_states_airbags", n_bytes_written) 

9236 

9237 return n_bytes_written 

9238 

9239 def _write_states_rigid_road( 

9240 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

9241 ) -> int: 

9242 

9243 if settings.header["ndim"] <= 5: 

9244 return 0 

9245 

9246 _check_ndim( 

9247 self, 

9248 { 

9249 ArrayType.rigid_road_displacement: ["n_timesteps", "n_rigid_roads", "x_y_z"], 

9250 ArrayType.rigid_road_velocity: ["n_timesteps", "n_rigid_roads", "vx_vy_vz"], 

9251 }, 

9252 ) 

9253 

9254 array_dims = { 

9255 ArrayType.global_timesteps: 0, 

9256 ArrayType.rigid_road_displacement: 0, 

9257 ArrayType.rigid_road_velocity: 0, 

9258 } 

9259 self.check_array_dims(array_dims, "n_rigid_roads") 

9260 

9261 array_dims = { 

9262 ArrayType.rigid_road_segment_road_id: 0, 

9263 ArrayType.rigid_road_displacement: 1, 

9264 ArrayType.rigid_road_velocity: 1, 

9265 } 

9266 n_rigid_roads = self.check_array_dims(array_dims, "n_rigid_roads") 

9267 

9268 self.check_array_dims({ArrayType.rigid_road_displacement: 2}, "x_y_z", 3) 

9269 

9270 self.check_array_dims({ArrayType.rigid_road_velocity: 2}, "vx_vy_vz", 3) 

9271 

9272 rigid_road_data = np.zeros((n_rigid_roads, 2, 3), dtype=settings.ftype) 

9273 

9274 # RIGID ROAD DISPLACEMENT 

9275 if ArrayType.rigid_road_displacement in self.arrays: 

9276 array = self.arrays[ArrayType.rigid_road_displacement][i_timestep] 

9277 rigid_road_data[:, 0, :] = array 

9278 

9279 # RIGID ROAD VELOCITY 

9280 if ArrayType.rigid_road_velocity in self.arrays: 

9281 array = self.arrays[ArrayType.rigid_road_velocity][i_timestep] 

9282 rigid_road_data[:, 1, :] = array 

9283 

9284 n_bytes_written = fp.write(settings.pack(rigid_road_data, dtype_hint=np.floating)) 

9285 

9286 # check bytes 

9287 n_bytes_expected = settings.header["nv1d"] * settings.header["nel2"] * settings.wordsize 

9288 if n_bytes_expected != n_bytes_written: 

9289 msg = ( 

9290 "byte checksum wrong: " 

9291 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

9292 ) 

9293 raise RuntimeError(msg) 

9294 

9295 # log 

9296 msg = "%s wrote %d bytes." 

9297 LOGGER.debug(msg, "_write_states_rigid_road", n_bytes_written) 

9298 

9299 return n_bytes_written 

9300 

9301 def _write_states_rigid_bodies( 

9302 self, fp: typing.IO[Any], i_timestep: int, settings: D3plotWriterSettings 

9303 ) -> int: 

9304 

9305 if 8 <= settings.header["ndim"] <= 9: 

9306 pass 

9307 else: 

9308 return 0 

9309 

9310 has_reduced_data = settings.header["ndim"] == 9 

9311 

9312 _check_ndim( 

9313 self, 

9314 { 

9315 ArrayType.rigid_body_coordinates: ["n_timesteps", "n_rigid_bodies", "x_y_z"], 

9316 ArrayType.rigid_body_rotation_matrix: ["n_timesteps", "n_rigid_bodies", "matrix"], 

9317 ArrayType.rigid_body_velocity: ["n_timesteps", "n_rigid_bodies", "vx_vy_vz"], 

9318 ArrayType.rigid_body_rot_velocity: ["n_timesteps", "n_rigid_bodies", "rvx_rvy_rvz"], 

9319 ArrayType.rigid_body_acceleration: ["n_timesteps", "n_rigid_bodies", "ax_ay_az"], 

9320 ArrayType.rigid_body_rot_acceleration: [ 

9321 "n_timesteps", 

9322 "n_rigid_bodies", 

9323 "rax_ray_raz", 

9324 ], 

9325 }, 

9326 ) 

9327 

9328 array_dims = { 

9329 ArrayType.global_timesteps: 0, 

9330 ArrayType.rigid_body_coordinates: 0, 

9331 ArrayType.rigid_body_rotation_matrix: 0, 

9332 ArrayType.rigid_body_velocity: 0, 

9333 ArrayType.rigid_body_rot_velocity: 0, 

9334 ArrayType.rigid_body_acceleration: 0, 

9335 ArrayType.rigid_body_rot_acceleration: 0, 

9336 } 

9337 self.check_array_dims(array_dims, "n_timesteps") 

9338 

9339 array_dims = { 

9340 ArrayType.rigid_body_part_indexes: 1, 

9341 ArrayType.rigid_body_coordinates: 1, 

9342 ArrayType.rigid_body_rotation_matrix: 1, 

9343 ArrayType.rigid_body_velocity: 1, 

9344 ArrayType.rigid_body_rot_velocity: 1, 

9345 ArrayType.rigid_body_acceleration: 1, 

9346 ArrayType.rigid_body_rot_acceleration: 1, 

9347 } 

9348 n_rigid_bodies = self.check_array_dims(array_dims, "n_rigid_bodies") 

9349 

9350 self.check_array_dims({ArrayType.rigid_body_coordinates: 2}, "x_y_z", 3) 

9351 

9352 self.check_array_dims({ArrayType.rigid_body_rotation_matrix: 2}, "matrix", 9) 

9353 

9354 self.check_array_dims({ArrayType.rigid_body_velocity: 2}, "vx_vy_vz", 3) 

9355 

9356 self.check_array_dims({ArrayType.rigid_body_rot_velocity: 2}, "rvx_rvy_rvz", 3) 

9357 

9358 self.check_array_dims({ArrayType.rigid_body_acceleration: 2}, "ax_ay_az", 3) 

9359 

9360 self.check_array_dims({ArrayType.rigid_body_rot_acceleration: 2}, "rax_ray_raz", 3) 

9361 

9362 # allocate block 

9363 rigid_body_data = ( 

9364 np.zeros((n_rigid_bodies, 12), dtype=settings.ftype) 

9365 if has_reduced_data 

9366 else np.zeros((n_rigid_bodies, 24), dtype=settings.ftype) 

9367 ) 

9368 

9369 start_index = 0 

9370 end_index = 0 

9371 

9372 # COORDINATES 

9373 if ArrayType.rigid_body_coordinates in self.arrays: 

9374 start_index = end_index 

9375 end_index = start_index + 3 

9376 array = self.arrays[ArrayType.rigid_body_coordinates][i_timestep] 

9377 rigid_body_data[:, start_index:end_index] = array 

9378 

9379 # ROTATION MATRIX 

9380 if ArrayType.rigid_body_rotation_matrix in self.arrays: 

9381 start_index = end_index 

9382 end_index = start_index + 9 

9383 array = self.arrays[ArrayType.rigid_body_coordinates][i_timestep] 

9384 rigid_body_data[:, start_index:end_index] = array 

9385 

9386 if not has_reduced_data: 

9387 

9388 # VELOCITY 

9389 if ArrayType.rigid_body_velocity in self.arrays: 

9390 start_index = end_index 

9391 end_index = start_index + 3 

9392 array = self.arrays[ArrayType.rigid_body_velocity][i_timestep] 

9393 rigid_body_data[:, start_index:end_index] = array 

9394 

9395 # ROTATION VELOCITY 

9396 if ArrayType.rigid_body_rot_velocity in self.arrays: 

9397 start_index = end_index 

9398 end_index = start_index + 3 

9399 array = self.arrays[ArrayType.rigid_body_rot_velocity][i_timestep] 

9400 rigid_body_data[:, start_index:end_index] = array 

9401 

9402 # ACCELERATION 

9403 if ArrayType.rigid_body_acceleration in self.arrays: 

9404 start_index = end_index 

9405 end_index = start_index + 3 

9406 array = self.arrays[ArrayType.rigid_body_acceleration][i_timestep] 

9407 rigid_body_data[:, start_index:end_index] = array 

9408 

9409 # ROTATION ACCELERATION 

9410 if ArrayType.rigid_body_rot_acceleration in self.arrays: 

9411 start_index = end_index 

9412 end_index = start_index + 3 

9413 array = self.arrays[ArrayType.rigid_body_rot_acceleration][i_timestep] 

9414 rigid_body_data[:, start_index:end_index] = array 

9415 

9416 n_bytes_written = fp.write(settings.pack(rigid_body_data, dtype_hint=np.floating)) 

9417 

9418 # check bytes 

9419 n_bytes_expected = settings.header["nv1d"] * settings.header["nel2"] * settings.wordsize 

9420 if n_bytes_expected != n_bytes_written: 

9421 msg = ( 

9422 "byte checksum wrong: " 

9423 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

9424 ) 

9425 raise RuntimeError(msg) 

9426 

9427 # log 

9428 msg = "%s wrote %d bytes." 

9429 LOGGER.debug(msg, "_write_states_rigid_bodies", n_bytes_written) 

9430 

9431 return n_bytes_written 

9432 

9433 def check_array_dims( 

9434 self, array_dimensions: Dict[str, int], dimension_name: str, dimension_size: int = -1 

9435 ): 

9436 """This function checks if multiple arrays share an array dimensions 

9437 with the same size. 

9438 

9439 Parameters 

9440 ---------- 

9441 array_dimensions: Dict[str, int] 

9442 Array name and expected number of dimensions as dict 

9443 dimension_name: str 

9444 Name of the array dimension for error messages 

9445 dimension_size: int 

9446 Optional expected size. If not set then all entries must equal 

9447 the first value collected. 

9448 

9449 Raises 

9450 ------ 

9451 ValueError 

9452 If dimensions do not match in any kind of way. 

9453 """ 

9454 

9455 dimension_size_dict = {} 

9456 

9457 # collect all dimensions 

9458 for typename, dimension_index in array_dimensions.items(): 

9459 if typename not in self.arrays: 

9460 continue 

9461 

9462 array = self.arrays[typename] 

9463 

9464 if dimension_index >= array.ndim: 

9465 msg = ( 

9466 f"Array '{typename}' requires at least " 

9467 f"{dimension_index} dimensions ({dimension_name})" 

9468 ) 

9469 raise ValueError(msg) 

9470 

9471 dimension_size_dict[typename] = array.shape[dimension_index] 

9472 

9473 # static dimension 

9474 if dimension_size >= 0: 

9475 arrays_with_wrong_dims = { 

9476 typename: size 

9477 for typename, size in dimension_size_dict.items() 

9478 if size != dimension_size 

9479 } 

9480 

9481 if arrays_with_wrong_dims: 

9482 msg = "The dimension %s of the following arrays is expected to have size %d:\n%s" 

9483 msg_arrays = [ 

9484 f" - name: {typename} dim: {array_dimensions[typename]} size: {size}" 

9485 for typename, size in arrays_with_wrong_dims.items() 

9486 ] 

9487 raise ValueError(msg, dimension_name, dimension_size, "\n".join(msg_arrays)) 

9488 

9489 # dynamic dimensions 

9490 else: 

9491 if dimension_size_dict: 

9492 unique_sizes = np.unique(list(dimension_size_dict.values())) 

9493 if len(unique_sizes) > 1: 

9494 msg = "Inconsistency in array dim '%d' detected:\n%s" 

9495 size_list = [ 

9496 f" - name: {typename}, dim: {array_dimensions[typename]}, size: {size}" 

9497 for typename, size in dimension_size_dict.items() 

9498 ] 

9499 raise ValueError(msg, dimension_name, "\n".join(size_list)) 

9500 if len(unique_sizes) == 1: 

9501 dimension_size = unique_sizes[0] 

9502 

9503 if dimension_size < 0: 

9504 return 0 

9505 

9506 return dimension_size 

9507 

9508 @staticmethod 

9509 def _compare_n_bytes_checksum(n_bytes_written: int, n_bytes_expected: int): 

9510 """Throw if the byte checksum was not ok 

9511 

9512 Parameters 

9513 ---------- 

9514 n_bytes_written: int 

9515 bytes written to the file 

9516 n_bytes_expected: int 

9517 bytes expected from the header computation 

9518 

9519 Raises 

9520 ------ 

9521 RuntimeError 

9522 If the byte count doesn't match. 

9523 """ 

9524 if n_bytes_expected != n_bytes_written: 

9525 msg = ( 

9526 "byte checksum wrong: " 

9527 f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" 

9528 ) 

9529 raise RuntimeError(msg) 

9530 

9531 def _get_zero_byte_padding(self, n_bytes_written: int, block_size_bytes: int): 

9532 """Compute the zero byte-padding at the end of files 

9533 

9534 Parameters 

9535 ---------- 

9536 n_bytes_written: int 

9537 number of bytes already written to file 

9538 block_size_bytes: int 

9539 byte block size of the file 

9540 

9541 Returns 

9542 ------- 

9543 zero_bytes: bytes 

9544 zero-byte padding ready to be written to the file 

9545 """ 

9546 

9547 if block_size_bytes > 0: 

9548 remaining_bytes = n_bytes_written % block_size_bytes 

9549 n_bytes_to_fill = block_size_bytes - remaining_bytes if remaining_bytes != 0 else 0 

9550 return b"\x00" * n_bytes_to_fill 

9551 

9552 return b"" 

9553 

9554 def compare(self, d3plot2, array_eps: Union[float, None] = None): 

9555 """Compare two d3plots and print the info 

9556 

9557 Parameters 

9558 ---------- 

9559 d3plot2: D3plot 

9560 second d3plot 

9561 array_eps: float or None 

9562 tolerance for arrays 

9563 

9564 Returns 

9565 ------- 

9566 hdr_differences: dict 

9567 differences in the header 

9568 array_differences: dict 

9569 difference between arrays as message 

9570 

9571 Examples 

9572 -------- 

9573 Comparison of a femzipped file and an uncompressed file. Femzip 

9574 is a lossy compression, thus precision is traded for memory. 

9575 

9576 >>> d3plot1 = D3plot("path/to/d3plot") 

9577 >>> d3plot2 = D3plot("path/to/d3plot.fz") 

9578 >>> hdr_diff, array_diff = d3plot1.compare(d3plot2) 

9579 >>> for arraytype, msg in array_diff.items(): 

9580 >>> print(name, msg) 

9581 node_coordinates Δmax = 0.050048828125 

9582 node_displacement Δmax = 0.050048828125 

9583 node_velocity Δmax = 0.050048828125 

9584 node_acceleration Δmax = 49998984.0 

9585 element_beam_axial_force Δmax = 6.103515625e-05 

9586 element_shell_stress Δmax = 0.0005035400390625 

9587 element_shell_thickness Δmax = 9.999999717180685e-10 

9588 element_shell_unknown_variables Δmax = 0.0005000010132789612 

9589 element_shell_internal_energy Δmax = 188.41957092285156 

9590 

9591 """ 

9592 

9593 # pylint: disable = too-many-nested-blocks 

9594 

9595 assert isinstance(d3plot2, D3plot) 

9596 d3plot1 = self 

9597 

9598 hdr_differences = d3plot1.header.compare(d3plot2.header) 

9599 

9600 # ARRAY COMPARISON 

9601 array_differences = {} 

9602 

9603 array_names = list(d3plot1.arrays.keys()) + list(d3plot2.arrays.keys()) 

9604 

9605 for name in array_names: 

9606 

9607 array1 = ( 

9608 d3plot1.arrays[name] if name in d3plot1.arrays else "Array is missing in original" 

9609 ) 

9610 

9611 array2 = d3plot2.arrays[name] if name in d3plot2.arrays else "Array is missing in other" 

9612 

9613 # d3parts write results for rigid shells. 

9614 # when rewriting as d3plot we simply 

9615 # don't write the part_material_types 

9616 # array which is the same as having no 

9617 # rigid shells. 

9618 d3plot1_is_d3part = d3plot1.header.filetype == D3plotFiletype.D3PART 

9619 d3plot2_is_d3part = d3plot2.header.filetype == D3plotFiletype.D3PART 

9620 if name == "part_material_type" and (d3plot1_is_d3part or d3plot2_is_d3part): 

9621 continue 

9622 

9623 # we have an array to compare 

9624 if isinstance(array1, str): 

9625 array_differences[name] = array1 

9626 elif isinstance(array2, str): 

9627 array_differences[name] = array2 

9628 elif isinstance(array2, np.ndarray): 

9629 comparison = False 

9630 

9631 # compare arrays 

9632 if isinstance(array1, np.ndarray): 

9633 if array1.shape != array2.shape: 

9634 comparison = f"shape mismatch {array1.shape} != {array2.shape}" 

9635 else: 

9636 if np.issubdtype(array1.dtype, np.number) and np.issubdtype( 

9637 array2.dtype, np.number 

9638 ): 

9639 diff = np.abs(array1 - array2) 

9640 if diff.size: 

9641 if array_eps is not None: 

9642 diff2 = diff[diff > array_eps] 

9643 if diff2.size: 

9644 diff2_max = diff2.max() 

9645 if diff2_max: 

9646 comparison = f"Δmax = {diff2_max}" 

9647 else: 

9648 diff_max = diff.max() 

9649 if diff_max: 

9650 comparison = f"Δmax = {diff_max}" 

9651 else: 

9652 n_mismatches = (array1 != array2).sum() 

9653 if n_mismatches: 

9654 comparison = f"Mismatches: {n_mismatches}" 

9655 

9656 else: 

9657 comparison = "Arrays don't match" 

9658 

9659 # print 

9660 if comparison: 

9661 array_differences[name] = comparison 

9662 

9663 return hdr_differences, array_differences 

9664 

9665 def get_part_filter( 

9666 self, filter_type: FilterType, part_ids: Iterable[int], for_state_array: bool = True 

9667 ) -> np.ndarray: 

9668 """Get a part filter for different entities 

9669 

9670 Parameters 

9671 ---------- 

9672 filter_type: lasso.dyna.FilterType 

9673 the array type to filter for (beam, shell, solid, tshell, node) 

9674 part_ids: Iterable[int] 

9675 part ids to filter out 

9676 for_state_array: bool 

9677 if the filter is meant for a state array. Makes a difference 

9678 for shells if rigid bodies are in the model (mattyp == 20) 

9679 

9680 Returns 

9681 ------- 

9682 mask: np.ndarray 

9683 mask usable on arrays to filter results 

9684 

9685 Examples 

9686 -------- 

9687 >>> from lasso.dyna import D3plot, ArrayType, FilterType 

9688 >>> d3plot = D3plot("path/to/d3plot") 

9689 >>> part_ids = [13, 14] 

9690 >>> mask = d3plot.get_part_filter(FilterType.shell) 

9691 >>> shell_stress = d3plot.arrays[ArrayType.element_shell_stress] 

9692 >>> shell_stress.shape 

9693 (34, 7463, 3, 6) 

9694 >>> # select only parts from part_ids 

9695 >>> shell_stress_parts = shell_stress[:, mask] 

9696 """ 

9697 

9698 # nodes are treated separately 

9699 if filter_type == FilterType.NODE: 

9700 node_index_arrays = [] 

9701 

9702 if ArrayType.element_shell_node_indexes in self.arrays: 

9703 shell_filter = self.get_part_filter( 

9704 FilterType.SHELL, part_ids, for_state_array=False 

9705 ) 

9706 shell_node_indexes = self.arrays[ArrayType.element_shell_node_indexes] 

9707 node_index_arrays.append(shell_node_indexes[shell_filter].flatten()) 

9708 

9709 if ArrayType.element_solid_node_indexes in self.arrays: 

9710 solid_filter = self.get_part_filter( 

9711 FilterType.SOLID, part_ids, for_state_array=False 

9712 ) 

9713 solid_node_indexes = self.arrays[ArrayType.element_solid_node_indexes] 

9714 node_index_arrays.append(solid_node_indexes[solid_filter].flatten()) 

9715 

9716 if ArrayType.element_tshell_node_indexes in self.arrays: 

9717 tshell_filter = self.get_part_filter( 

9718 FilterType.TSHELL, part_ids, for_state_array=False 

9719 ) 

9720 tshell_node_indexes = self.arrays[ArrayType.element_tshell_node_indexes] 

9721 node_index_arrays.append(tshell_node_indexes[tshell_filter].flatten()) 

9722 

9723 return np.unique(np.concatenate(node_index_arrays)) 

9724 

9725 # we need part ids first 

9726 if ArrayType.part_ids in self.arrays: 

9727 d3plot_part_ids = self.arrays[ArrayType.part_ids] 

9728 elif ArrayType.part_titles_ids in self.arrays: 

9729 d3plot_part_ids = self.arrays[ArrayType.part_titles_ids] 

9730 else: 

9731 msg = "D3plot does neither contain '%s' nor '%s'" 

9732 raise RuntimeError(msg, ArrayType.part_ids, ArrayType.part_titles_ids) 

9733 

9734 # if we filter parts we can stop here 

9735 if filter_type == FilterType.PART: 

9736 return np.isin(d3plot_part_ids, part_ids) 

9737 

9738 # get part indexes from part ids 

9739 part_indexes = np.argwhere(np.isin(d3plot_part_ids, part_ids)).flatten() 

9740 

9741 # associate part indexes with entities 

9742 if filter_type == FilterType.BEAM: 

9743 entity_part_indexes = self.arrays[ArrayType.element_beam_part_indexes] 

9744 elif filter_type == FilterType.SHELL: 

9745 entity_part_indexes = self.arrays[ArrayType.element_shell_part_indexes] 

9746 

9747 # shells may contain "rigid body shell elements" 

9748 # for these shells no state data is output and thus 

9749 # the state arrays have a reduced element count 

9750 if for_state_array and self._material_section_info.n_rigid_shells != 0: 

9751 mat_types = self.arrays[ArrayType.part_material_type] 

9752 mat_type_filter = mat_types[entity_part_indexes] != 20 

9753 entity_part_indexes = entity_part_indexes[mat_type_filter] 

9754 

9755 elif filter_type == FilterType.TSHELL: 

9756 entity_part_indexes = self.arrays[ArrayType.element_tshell_part_indexes] 

9757 elif filter_type == FilterType.SOLID: 

9758 entity_part_indexes = self.arrays[ArrayType.element_solid_part_indexes] 

9759 else: 

9760 msg = "Invalid filter_type '%s'. Use lasso.dyna.FilterType." 

9761 raise ValueError(msg, filter_type) 

9762 

9763 mask = np.isin(entity_part_indexes, part_indexes) 

9764 return mask 

9765 

9766 @staticmethod 

9767 def enable_logger(enable: bool): 

9768 """Enable the logger for this class 

9769 

9770 Parameters 

9771 ---------- 

9772 enable: bool 

9773 whether to enable logging for this class 

9774 """ 

9775 

9776 if enable: 

9777 LOGGER.setLevel(logging.DEBUG) 

9778 else: 

9779 LOGGER.setLevel(logging.NOTSET)