Fix filenames in make_big_inp and add fixmes

This commit is contained in:
rasmusvt 2022-07-14 19:47:36 +02:00
parent ace44cb48f
commit 92075fbb66

View file

@ -9,8 +9,6 @@ import datetime
import warnings import warnings
import json import json
from ase import io
import nafuma.auxillary as aux import nafuma.auxillary as aux
@ -329,6 +327,8 @@ def write_output(fout, data, options, index=0):
fout.write(f'out {options["save_dir"]}/{filename}_{label}.dat append\n') fout.write(f'out {options["save_dir"]}/{filename}_{label}.dat append\n')
fout.write(f'\t\tOut_String("XXXX")\n') fout.write(f'\t\tOut_String("XXXX")\n')
# FIXME Does not write out weighted_Durbin_Watson, TOPAS complained about this
fout.write('\t\t{: <40} {: <40} {: <40} {: <40} {: <40} {: <40}'.format( fout.write('\t\t{: <40} {: <40} {: <40} {: <40} {: <40} {: <40}'.format(
f'Out(Get(r_wp), "%11.5f")', f'Out(Get(r_wp), "%11.5f")',
f'Out(Get(r_exp), "%11.5f")', f'Out(Get(r_exp), "%11.5f")',
@ -477,6 +477,9 @@ def make_big_inp(data: dict, options={}):
''' Generates a big .INP-file with all filenames found in data["path"]. Uses a template .INP-file (which has to be generated manually from an initial refinement in TOPAS) and appends this to a large .INP-file ''' Generates a big .INP-file with all filenames found in data["path"]. Uses a template .INP-file (which has to be generated manually from an initial refinement in TOPAS) and appends this to a large .INP-file
while changing the filenames. ''' while changing the filenames. '''
# FIXME Strip headers from initial INP file before copying it.
required_options = ['template', 'output', 'overwrite', 'backup', 'backup_dir', 'include', 'topas_options', 'save_results', 'save_dir', 'log', 'logfile'] required_options = ['template', 'output', 'overwrite', 'backup', 'backup_dir', 'include', 'topas_options', 'save_results', 'save_dir', 'log', 'logfile']
default_options = { default_options = {
@ -635,29 +638,29 @@ def make_inp_entry(template: str, xdd: str, num: int, options: dict) -> str:
# Replace diffractogram-path # Replace diffractogram-path
s = template.replace(temp_xdd, xdd).replace('XXXX', num_str) s = template.replace(temp_xdd, xdd).replace('XXXX', num_str)
basename = os.path.basename(xdd).split(".")[0] # basename = os.path.basename(xdd).split(".")[0]
# Define regular expressions for output lines # # Define regular expressions for output lines
regs = [r'Out_Riet\([\S]*\)', # regs = [r'Out_Riet\([\S]*\)',
r'Out_CIF_STR\([\S]*\)', # r'Out_CIF_STR\([\S]*\)',
r'Out_CIF_ADPs\([\S]*\)', # r'Out_CIF_ADPs\([\S]*\)',
r'Out_CIF_Bonds_Angles\([\S]*\)', # r'Out_CIF_Bonds_Angles\([\S]*\)',
r'Out_FCF\([\S]*\)', # r'Out_FCF\([\S]*\)',
r'Create_hklm_d_Th2_Ip_file\([\S]*\)', # r'Create_hklm_d_Th2_Ip_file\([\S]*\)',
r'out(.*?)append'] # r'out(.*?)append']
# Define substitute strings for output lines # # Define substitute strings for output lines
subs = [f'Out_Riet({options["save_dir"]}/{basename}_riet.xy)', # subs = [f'Out_Riet({options["save_dir"]}/{basename}_riet.xy)',
f'Out_CIF_STR({options["save_dir"]}/{basename}.cif)', # f'Out_CIF_STR({options["save_dir"]}/{basename}.cif)',
f'Out_CIF_ADPs({options["save_dir"]}/{basename}.cif)', # f'Out_CIF_ADPs({options["save_dir"]}/{basename}.cif)',
f'Out_CIF_Bonds_Angles({options["save_dir"]}/{basename}.cif)', # f'Out_CIF_Bonds_Angles({options["save_dir"]}/{basename}.cif)',
f'Out_FCF({options["save_dir"]}/{basename}.fcf)', # f'Out_FCF({options["save_dir"]}/{basename}.fcf)',
f'Create_hklm_d_Th2_Ip_file({options["save_dir"]}/{basename}_hkl.dat)', # f'Create_hklm_d_Th2_Ip_file({options["save_dir"]}/{basename}_hkl.dat)',
f'out \t {options["save_dir"]}/{basename}_refined_params.dat \t append'] # f'out \t {options["save_dir"]}/{basename}_refined_params.dat \t append']
# Substitute strings in output lines # # Substitute strings in output lines
for reg, sub in zip(regs, subs): # for reg, sub in zip(regs, subs):
s = re.sub(reg, sub, s) # s = re.sub(reg, sub, s)
@ -716,7 +719,12 @@ def refine(data: dict, options={}):
# Create folders if they don't exist # Create folders if they don't exist
paths, headers = get_paths(data['inp']), get_headers(data['inp'])
# FIXME Since the big INP files now have the same filename for all iterations, we need to adjust the code to only get unique values from the get_paths function
# FIXME get_headers() is also not working now. Needs to be adjusted to the new way of writing the Out-parameters
paths = get_paths(data['inp'])
headers = get_headers(data['inp'])
for path in paths: for path in paths:
dirname = os.path.dirname(path) dirname = os.path.dirname(path)
@ -745,4 +753,15 @@ def refine(data: dict, options={}):
os.makedirs(os.path.dirname(options['topas_logfile'])) os.makedirs(os.path.dirname(options['topas_logfile']))
subprocess.call(command, shell=True) subprocess.call(command, shell=True)
def read_results():
# FIXME Write the function
return None