Skip to content

Commit

Permalink
Merge branch 'devel' into 'master'
Browse files Browse the repository at this point in the history
Prepare for a new release

See merge request loads-kernel/loads-kernel!54
  • Loading branch information
ArneVoss committed Aug 18, 2023
2 parents 0208570 + 927e004 commit eb89ac7
Show file tree
Hide file tree
Showing 44 changed files with 1,580 additions and 1,640 deletions.
8 changes: 1 addition & 7 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@ build:
- lk
script:
- *virtualenv
- pip install git+https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.dlr.de/loads-kernel/panel-aero.git
- pip install .
- pip install -e .

test:
stage: test
Expand All @@ -32,11 +31,6 @@ test:
dependencies:
- build
script:
# Set-up the environement
- *virtualenv
- pip install git+https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.dlr.de/loads-kernel/panel-aero.git
# Install with -e (in editable mode) to allow the tracking of the test coverage
- pip install -e .
- which python
- which pytest
- which mpiexec
Expand Down
1 change: 1 addition & 0 deletions AUTHORS.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ If you contributed to this code, please list your name and affiliation here:
```
Schulze, Matthias, Institute of Aeroelasticity, Deutsches Zentrum für Luft- und Raumfahrt e.V.
Handojo, Vega, Institute of Aeroelasticity, Deutsches Zentrum für Luft- und Raumfahrt e.V.
Baier, Jan, Institute of Aeroelasticity, Deutsches Zentrum für Luft- und Raumfahrt e.V.
```

28 changes: 28 additions & 0 deletions doc/DMAPforSOL103viaHDF5.alter
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
NASTRAN OP2NEW=0
$ Direct Text Input for File Management Section
ASSIGN OUTPUT2 = 'uset.op2', UNIT = 12
$ Direct Text Input for Executive Control
MALTER 'AFTER UPSTREAM SUPERELEMENT MATRIX AND LOAD ASSEMBLY'$
CRDB_MTX MGG//'MGG' $
CRDB_MTX KGG//'KGG' $
ENDALTER $
MALTER 'MALTER:AFTER SUPERELEMENT MATRIX AND LOAD REDUCTION TO A-SET' $
CRDB_MTX GM//'GM' $
OUTPUT2 USET//0/12///'USET' $
ENDALTER $
ECHOON $
$
SOL 103
CEND
$
METHOD=100
ECHO=NONE
$
BEGIN BULK
PARAM GRDPNT 0
$------><------><------><------><------><------><------><------><------>
EIGRL 100 100
$------><------><------><------><------><------><------><------><------>
HDF5OUT PRCISION 64 CMPRMTHD NONE MTX YES
$
$ Strukturmodell
14 changes: 13 additions & 1 deletion doc/jcl_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,16 @@ def __init__(self):
'support': [0,1,2,3,4,5],
# Thrust per engine in N or 'balanced'
'thrust':'balanced',
# Euler angle Phi in rad
'phi': 0.0/180.0*np.pi,
# Euler angle Theta in rad
'theta': 0.0/180.0*np.pi,
# Pilot command Xi in rad
'command_xi': 0.0/180.0*np.pi,
# Pilot command Eta in rad
'command_eta': 0.0/180.0*np.pi,
# Pilot command Zeta in rad
'command_zeta': 0.0/180.0*np.pi,
# --- End of experimental section ---
},
]
Expand Down Expand Up @@ -307,7 +317,9 @@ def __init__(self):
# True or False, enables a generic controller e.g. to maintain p, q and r
'controller': False,
# True or False, enables a generic landing gear
'landinggear':False,
'landinggear': False,
# True or False, enables calculation of rigid and elastic derivatives
'derivatives': False,
# List of DoF to be constrained
'support': [0,1,2,3,4,5],
# True or False, enables flutter check with k, ke or pk method
Expand Down
10 changes: 5 additions & 5 deletions loadscompare/compare.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from loadscompare import plotting
import loadskernel.io_functions as io_functions
import loadskernel.io_functions.specific_functions
import loadskernel.io_functions.data_handling


class Compare():
Expand Down Expand Up @@ -257,9 +257,9 @@ def load_monstation(self):
if filename != '':
if '.pickle' in filename:
with open(filename, 'rb') as f:
dataset = io_functions.specific_functions.load_pickle(f)
dataset = io_functions.data_handling.load_pickle(f)
elif '.hdf5' in filename:
dataset = io_functions.specific_functions.load_hdf5(filename)
dataset = io_functions.data_handling.load_hdf5(filename)

# save into data structure
self.datasets['ID'].append(self.datasets['n'])
Expand All @@ -281,9 +281,9 @@ def save_monstation(self):
filename = QFileDialog.getSaveFileName(self.window, self.file_opt['title'], self.file_opt['initialdir'], self.file_opt['filters'])[0]
if filename != '' and '.pickle' in filename:
with open(filename, 'wb') as f:
io_functions.specific_functions.dump_pickle(dataset_sel, f)
io_functions.data_handling.dump_pickle(dataset_sel, f)
if filename != '' and '.hdf5' in filename:
io_functions.specific_functions.dump_hdf5(filename, dataset_sel)
io_functions.data_handling.dump_hdf5(filename, dataset_sel)

def update_fields(self):
self.lb_dataset.clear()
Expand Down
87 changes: 48 additions & 39 deletions loadskernel/auxiliary_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,38 +5,46 @@

import loadskernel.io_functions as io_functions
import loadskernel.io_functions.write_mona
import loadskernel.io_functions.specific_functions
import loadskernel.io_functions.data_handling
from loadskernel.grid_trafo import *
from loadskernel.io_functions.data_handling import load_hdf5_dict

class AuxiliaryOutput:
#===========================================================================
# This class provides functions to save data of trim calculations.
#===========================================================================
class AuxiliaryOutput(object):
"""
This class provides functions to save data of trim calculations.
"""
def __init__(self, jcl, model, trimcase):
self.jcl = jcl
self.model = model
self.trimcase = trimcase
self.responses = []
self.crit_trimcases = []

self.strcgrid = load_hdf5_dict(self.model['strcgrid'])
self.mongrid = load_hdf5_dict(self.model['mongrid'])
self.macgrid = load_hdf5_dict(self.model['macgrid'])
self.coord = load_hdf5_dict(self.model['coord'])

self.Dkx1 = self.model['Dkx1'][()]

def save_nodaldefo(self, filename):
# deformations are given in 9300 coord
strcgrid_tmp = copy.deepcopy(self.model.strcgrid)
grid_trafo(strcgrid_tmp, self.model.coord, 9300)
strcgrid_tmp = copy.deepcopy(self.strcgrid)
grid_trafo(strcgrid_tmp, self.coord, 9300)
logging.info( 'saving nodal flexible deformations as dat file...')
with open(filename+'_undeformed.dat', 'w') as fid:
np.savetxt(fid, np.hstack((self.model.strcgrid['ID'].reshape(-1,1), strcgrid_tmp['offset'])))
np.savetxt(fid, np.hstack((self.strcgrid['ID'].reshape(-1,1), strcgrid_tmp['offset'])))

for i_trimcase in range(len(self.jcl.trimcase)):
with open(filename+'_subcase_'+str(self.jcl.trimcase[i_trimcase]['subcase'])+'_Ug.dat', 'w') as fid:
defo = np.hstack((self.model.strcgrid['ID'].reshape(-1,1), self.model.strcgrid['offset'] + self.responses[i_trimcase]['Ug_r'][self.model.strcgrid['set'][:,0:3]] + + self.responses[i_trimcase]['Ug_f'][self.model.strcgrid['set'][:,0:3]] * 500.0))
defo = np.hstack((self.strcgrid['ID'].reshape(-1,1), self.strcgrid['offset'] + self.responses[i_trimcase]['Ug_r'][self.strcgrid['set'][:,0:3]] + + self.responses[i_trimcase]['Ug_f'][self.model.strcgrid['set'][:,0:3]] * 500.0))
np.savetxt(fid, defo)

def write_all_nodalloads(self, filename):
logging.info( 'saving all nodal loads as Nastarn cards...')
with open(filename+'_Pg', 'w') as fid:
for i_trimcase in range(len(self.jcl.trimcase)):
io_functions.write_mona.write_force_and_moment_cards(fid, self.model.strcgrid, self.responses[i_trimcase]['Pg'][0,:], self.jcl.trimcase[i_trimcase]['subcase'])
io_functions.write_mona.write_force_and_moment_cards(fid, self.strcgrid, self.responses[i_trimcase]['Pg'][0,:], self.jcl.trimcase[i_trimcase]['subcase'])
with open(filename+'_subcases', 'w') as fid:
for i_trimcase in range(len(self.jcl.trimcase)):
io_functions.write_mona.write_subcases(fid, self.jcl.trimcase[i_trimcase]['subcase'], self.jcl.trimcase[i_trimcase]['desc'])
Expand All @@ -48,15 +56,16 @@ def write_trimresults(self, filename_csv):
if trimresult != False:
trimresults.append(trimresult)
logging.info('writing trim results to: ' + filename_csv)
io_functions.specific_functions.write_list_of_dictionaries(trimresults, filename_csv)
io_functions.data_handling.write_list_of_dictionaries(trimresults, filename_csv)

def assemble_trimresult(self, i_case):
response = self.responses[i_case]
if response['successful'][()]:
trimresult = OrderedDict({'subcase': self.jcl.trimcase[i_case]['subcase'],
'desc': self.jcl.trimcase[i_case]['desc'],})
i_mass = self.model.mass['key'].index(self.jcl.trimcase[i_case]['mass'])
n_modes = self.model.mass['n_modes'][i_mass]
'desc': self.jcl.trimcase[i_case]['desc'],
})

self.n_modes = self.model['mass'][self.jcl.trimcase[i_case]['mass']]['n_modes'][()]

# get trimmed states
trimresult['x'] = response['X'][0,0]
Expand All @@ -80,24 +89,24 @@ def assemble_trimresult(self, i_case):
trimresult['dp'] = response['Y'][0,9]
trimresult['dq'] = response['Y'][0,10]
trimresult['dr'] = response['Y'][0,11]
trimresult['command_xi [deg]'] = response['X'][0,12+2*n_modes]/np.pi*180.0
trimresult['command_eta [deg]'] = response['X'][0,13+2*n_modes]/np.pi*180.0
trimresult['command_zeta [deg]'] = response['X'][0,14+2*n_modes]/np.pi*180.0
trimresult['thrust per engine [N]'] = response['X'][0,15+2*n_modes]
trimresult['stabilizer [deg]'] = response['X'][0,16+2*n_modes]/np.pi*180.0
trimresult['flap setting [deg]'] = response['X'][0,17+2*n_modes]/np.pi*180.0
trimresult['command_xi [deg]'] = response['X'][0,12+2*self.n_modes]/np.pi*180.0
trimresult['command_eta [deg]'] = response['X'][0,13+2*self.n_modes]/np.pi*180.0
trimresult['command_zeta [deg]'] = response['X'][0,14+2*self.n_modes]/np.pi*180.0
trimresult['thrust per engine [N]'] = response['X'][0,15+2*self.n_modes]
trimresult['stabilizer [deg]'] = response['X'][0,16+2*self.n_modes]/np.pi*180.0
trimresult['flap setting [deg]'] = response['X'][0,17+2*self.n_modes]/np.pi*180.0
trimresult['Nz'] = response['Nxyz'][0,2]
trimresult['Vtas'] = response['Y'][0,-2]
trimresult['q_dyn'] = response['q_dyn'][0,0]
trimresult['alpha [deg]'] = response['alpha'][0,0]/np.pi*180.0
trimresult['beta [deg]'] = response['beta'][0,0]/np.pi*180.0

# calculate additional aero coefficients
Pmac_rbm = np.dot(self.model.Dkx1.T, response['Pk_rbm'][0,:])
Pmac_cam = np.dot(self.model.Dkx1.T, response['Pk_cam'][0,:])
Pmac_cs = np.dot(self.model.Dkx1.T, response['Pk_cs'][0,:])
Pmac_f = np.dot(self.model.Dkx1.T, response['Pk_f'][0,:])
Pmac_idrag = np.dot(self.model.Dkx1.T, response['Pk_idrag'][0,:])
Pmac_rbm = np.dot(self.Dkx1.T, response['Pk_rbm'][0,:])
Pmac_cam = np.dot(self.Dkx1.T, response['Pk_cam'][0,:])
Pmac_cs = np.dot(self.Dkx1.T, response['Pk_cs'][0,:])
Pmac_f = np.dot(self.Dkx1.T, response['Pk_f'][0,:])
Pmac_idrag = np.dot(self.Dkx1.T, response['Pk_idrag'][0,:])
A = self.jcl.general['A_ref'] #sum(self.model.aerogrid['A'][:])
AR = self.jcl.general['b_ref']**2.0 / self.jcl.general['A_ref']
Pmac_c = np.divide(response['Pmac'][0,:],response['q_dyn'][0])/A
Expand All @@ -113,14 +122,14 @@ def assemble_trimresult(self, i_case):
trimresult['Cx'] = Pmac_c[0]
trimresult['Cy'] = Pmac_c[1]
trimresult['Cz'] = Pmac_c[2]
trimresult['Cmx'] = Pmac_c[3]/self.model.macgrid['b_ref']
trimresult['Cmy'] = Pmac_c[4]/self.model.macgrid['c_ref']
trimresult['Cmz'] = Pmac_c[5]/self.model.macgrid['b_ref']
trimresult['Cmx'] = Pmac_c[3]/self.macgrid['b_ref']
trimresult['Cmy'] = Pmac_c[4]/self.macgrid['c_ref']
trimresult['Cmz'] = Pmac_c[5]/self.macgrid['b_ref']
trimresult['Cl'] = Cl
trimresult['Cd'] = Cd
trimresult['E'] = Cl/Cd
trimresult['Cd_ind'] = Pmac_idrag[0]/response['q_dyn'][0,0]/A
trimresult['Cmz_ind'] = Pmac_idrag[5]/response['q_dyn'][0,0]/A/self.model.macgrid['b_ref']
trimresult['Cmz_ind'] = Pmac_idrag[5]/response['q_dyn'][0,0]/A/self.macgrid['b_ref']
trimresult['e'] = Cd_ind_theo/(Pmac_idrag[0]/response['q_dyn'][0,0]/A)
else:
trimresult = False
Expand All @@ -134,7 +143,7 @@ def write_successful_trimcases(self, filename_csv):
if self.responses[i_case]['successful'][()]:
sucessfull_trimcases_info.append(trimcase)
logging.info('writing successful trimcases cases to: ' + filename_csv)
io_functions.specific_functions.write_list_of_dictionaries(sucessfull_trimcases_info, filename_csv)
io_functions.data_handling.write_list_of_dictionaries(sucessfull_trimcases_info, filename_csv)

def write_failed_trimcases(self, filename_csv):
failed_trimcases_info = []
Expand All @@ -144,7 +153,7 @@ def write_failed_trimcases(self, filename_csv):
if not self.responses[i_case]['successful'][()]:
failed_trimcases_info.append(trimcase)
logging.info('writing failed trimcases cases to: ' + filename_csv)
io_functions.specific_functions.write_list_of_dictionaries(failed_trimcases_info, filename_csv)
io_functions.data_handling.write_list_of_dictionaries(failed_trimcases_info, filename_csv)

def write_critical_trimcases(self, filename_csv):
# eigentlich gehoert diese Funtion eher zum post-processing als zum
Expand All @@ -159,7 +168,7 @@ def write_critical_trimcases(self, filename_csv):
crit_trimcases_info.append(trimcase)

logging.info('writing critical trimcases cases to: ' + filename_csv)
io_functions.specific_functions.write_list_of_dictionaries(crit_trimcases_info, filename_csv)
io_functions.data_handling.write_list_of_dictionaries(crit_trimcases_info, filename_csv)

def write_critical_nodalloads(self, filename):
logging.info( 'saving critical nodal loads as Nastarn cards...')
Expand All @@ -178,7 +187,7 @@ def write_critical_nodalloads(self, filename):
with open(filename+'_Pg', 'w') as fid:
for subcase_ID in crit_ids:
idx = subcases_IDs.index(subcase_ID)
io_functions.write_mona.write_force_and_moment_cards(fid, self.model.strcgrid, self.dyn2stat_data['Pg'][idx][:], subcases_IDs[idx])
io_functions.write_mona.write_force_and_moment_cards(fid, self.strcgrid, self.dyn2stat_data['Pg'][idx][:], subcases_IDs[idx])
with open(filename+'_subcases', 'w') as fid:
for subcase_ID in crit_ids:
idx = subcases_IDs.index(subcase_ID)
Expand All @@ -205,8 +214,8 @@ def write_critical_sectionloads(self, base_filename):
crit_monstations[key]['t'] += [monstation['t'][pos_to_copy]]
logging.info('saving critical monstation(s).')
with open(base_filename + '.pickle', 'wb') as f:
io_functions.specific_functions.dump_pickle(crit_monstations, f)
io_functions.specific_functions.dump_hdf5(base_filename + '.hdf5', crit_monstations)
io_functions.data_handling.dump_pickle(crit_monstations, f)
io_functions.data_handling.dump_hdf5(base_filename + '.hdf5', crit_monstations)

def save_cpacs_header(self):

Expand All @@ -232,24 +241,24 @@ def save_cpacs_flight_load_cases(self):
self.cf.create_path(path_flight_load_cases+'/flightLoadCase['+str(i_trimcase+1)+']', 'nodalLoads/wingNodalLoad')
path_nodalLoads = path_flight_load_cases+'/flightLoadCase['+str(i_trimcase+1)+']'+'/nodalLoads/wingNodalLoad'
self.cf.add_elem(path_nodalLoads, 'parentUID', 'complete aircraft', 'text')
self.cf.write_cpacs_loadsvector(path_nodalLoads, self.model.strcgrid, self.responses[i_trimcase]['Pg'] )
self.cf.write_cpacs_loadsvector(path_nodalLoads, self.strcgrid, self.responses[i_trimcase]['Pg'] )
# cut loads
self.cf.create_path(path_flight_load_cases+'/flightLoadCase['+str(i_trimcase+1)+']', 'cutLoads/wingCutLoad')
path_cutLoads = path_flight_load_cases+'/flightLoadCase['+str(i_trimcase+1)+']'+'/cutLoads/wingCutLoad'
self.cf.add_elem(path_cutLoads, 'parentUID', 'complete aircraft', 'text')
self.cf.write_cpacs_loadsvector(path_cutLoads, self.model.mongrid, self.responses[i_trimcase]['Pmon_local'])
self.cf.write_cpacs_loadsvector(path_cutLoads, self.mongrid, self.responses[i_trimcase]['Pmon_local'])

def save_cpacs_dynamic_aircraft_model_points(self):
# save structural grid points to CPACS
self.cf.create_path('/cpacs/vehicles/aircraft/model/wings/wing/dynamicAircraftModel', 'dynamicAircraftModelPoints')
path_dynamic_aircraft_model_points = '/cpacs/vehicles/aircraft/model/wings/wing/dynamicAircraftModel/dynamicAircraftModelPoints'
self.cf.write_cpacs_grid(path_dynamic_aircraft_model_points, self.model.strcgrid)
self.cf.write_cpacs_grid(path_dynamic_aircraft_model_points, self.strcgrid)

def save_cpacs_cut_load_integration_points(self):
# save monitoring stations to CPACS
self.cf.create_path('/cpacs/vehicles/aircraft/model/wings/wing/dynamicAircraftModel', 'cutLoadIntegrationPoints')
path_cut_load_integration_points = '/cpacs/vehicles/aircraft/model/wings/wing/dynamicAircraftModel/cutLoadIntegrationPoints'
self.cf.write_cpacs_grid(path_cut_load_integration_points, self.model.mongrid)
self.cf.write_cpacs_grid(path_cut_load_integration_points, self.mongrid)
#self.cf.write_cpacs_grid_orientation(path_CutLoadIntegrationPoints, self.model.mongrid, self.model.coord)

def save_cpacs(self, filename):
Expand Down
Loading

0 comments on commit eb89ac7

Please sign in to comment.