""" Copyright (c) 2016, Christian Schou Oxvig, Thomas Arildsen, and Torben Larsen All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. More information about the 2-Clause BSD license may be found at: http://opensource.org/licenses/BSD-2-Clause Script for running the simulations that creates the: Structure Assisted Compressed Sensing Reconstruction of Undersampled AFM Images Dataset 2 **Combinations that are tested** - 17 images - 1 sampling pattern: Rect Spiral - 20 undersampling ratios: np.linspace(0.10, 0.40, 20) - 1 Dictionary: DCT - 20 Sparsity levels: np.linspace(0.05, 0.20, 20) for IT based algorithms - 4 Reconstruction algorithms: IHT, IST, w-IST, w-IHT, ell_1 - 2 Quality indicators: PSNR, SSIM - 1 value of the "a" parameter: 0.0025 See the bottom of the script for more details about the specific combinations. **Weights model** A Jackknife approach is used to test the weights model. Specifically, all but the image used in the reconstruction are averaged (in the DCT domain) and used in the fit of the model. **Requirements** To run the script, the following must be available: - The seven AFM cell images from http://dx.doi.org/10.5281/zenodo.17573 IMAGE MD5SUM / SHA256SUM image_0.mi cbdad52d68b2c3d294fa79f1c1f5b289 / 17be078f0cd8c0af212508295d776325c7a609ff14a442d958447a9f38e78112 image_1.mi 0bad0a8617677de8316a69c3d4250859 / 371f55ea437c852e0195b8e5025e385ebce80e75752a4937ed795f841318e577 image_2.mi 04bb426527fe09fb089559b98e5ea69d / 1c578e632bb1fd101fce0a11546f04d9e198ad325238b7eafb518ea05f02ef43 image_3.mi 3b335afa6fc331d3b6dcc1a3827e72f7 / 6b0a0faa87772240a986f87feec52e88584a225a156aa963703f3c0de4b6809b image_4.mi 58dd5ec753463a0b8027553b6aa5a1d1 / e6ec20f9e9c94323ce83e1ee8aab9f2d5b944e28bc23f04f978be49d8909dc84 image_5.mi 95fd0548021cc51518082a2eeb962b49 / 29bf989526aa1f31cb88fa9637b96d1562605ea14eeab661c1f6965962f2d8d6 image_6.mi 038574a2050ab73e8fc88ecf770bfe1a / 82f7b2e214579bd7694e7a0c0ef06cbf60418e28c95336247adea71510791400 - The ten AFM images from http://dx.doi.org/10.5281/zenodo.60434 IMAGE MD5SUM / SHA256SUM image_7.mi d20175f3de3732e217f6c8000e8e105a / d38514378efe521c97361f81ca440900ff56073b0cace6c88f6d23f30b633c4d image_8.mi f2567c28dbd2561a064f3b5b5faf31a1 / 2b5b129deb5e13084c8bb57856204bf4d882d04b8bfd98aac1235916131b4131 image_9.mi dc07f9619b5b954e2b0a0baa874216ff / 1b3d9b09057e1b8421c5db7c16fc0d068f6b39aede41cc85f19c0e1a04b3460b image_10.mi 28721367516a20ea31e75c00f3b5ce0c / c891c6763158bea335dc57900629c84179cde657af96c0b896ad4e38268a80e2 image_11.mi 7ebc8f25cf4ba4f6bf722dce27622360 / aa8d20b5719eb1e07f84eb9c963aa9c38cf4cdb88afaffc3f187d38f8a637c5c image_12.mi bd01764ed7ae0169cc543fc399c762cf / 83a890f10bdc5a561a4a63af9a0242ffefa87832b9f77b0fb024d150158d4082 image_13.mi 0aa8b4488c5db7fd391128b82c5b3f54 / c05b285abd7ba2b0f182025c25567db9aea6a4cc6e794196c49b76e6fcdca8e5 image_14.mi 2062bf84fcad3b0ee7d0b04b1d7064ab / 2fd8b0cb71c7185aa1a45dcbd9a55e23c30f7e97ff7a0d4b1dd84f50483764c1 image_15.mi fdd16e5c54a147c457af6568ce602698 / a2c842021b7942573ac02706e1a52bd5485e9a1a3f7320ffaaab85a354266a43 image_16.mi 12c71a620bd18638bb590ee782bf15ab / 6720ffea925d039251b6cdb8467350f99b038684b01f1381a650c14bf53c73ae - The magni Python package from http://dx.doi.org/10.5278/VBN/MISC/Magni Magni_1.5.0.zip MD5SUM: 0dc719b80d9a976f9d6b3bd64fb7c19b SHA256SUM: bb77ee22088260e56250bd7b4889504e9fa82c5c8aab1be5377a8c899cceaa95 **Output*** All output is placed in the "results" folder. The output is: - A HDF database ("weighted_it_reconstructions.hdf5") containing all results. - An overview pdf of each reconstruction. """ from __future__ import division import json import os from pprint import pprint import time import warnings import matplotlib as mpl; mpl.use('Agg') import matplotlib.pyplot as plt import numpy as np import psutil from scipy import optimize from skimage.measure import compare_ssim import tables as tb import pyunlocbox as ulb import magni from magni.utils.validation import decorate_validation as _decorate_validation from magni.utils.validation import validate_generic as _generic from magni.utils.validation import validate_levels as _levels from magni.utils.validation import validate_numeric as _numeric def run_simulation_task(img_folder=None, result_folder=None, h5_name=None, task=None): """ Run a simulation task. The following elements are part of this simulation: * Load, downsample, and detilt image * Fit model of DCT domain * Scan / sample image * Setup reconstruction * Reconstruct image * Evaluate reconstruction result * Save results Parameters ---------- img_folder : str The path to the folder containing the mi-files. result_folder : str The path to the root folder to save result figures to. h5_name : str The name of the HDF5 file in which the results are saved. task : dict The simulation task specification. """ # Input validation @_decorate_validation def validate_input(): _generic('img_folder', 'string') _generic('result_folder', 'string') _generic('h5_name', 'string') _generic('task', 'mapping') validate_input() # 1. Divide images into training samples and test image and load them if task['reconstruction_algorithm'].startswith('w_'): training_images = list(task['images']) training_images.remove(task['image']) training_mi_imgs = [magni.afm.io.read_mi_file( img_folder + image).get_buffer('Topography')[0] for image in training_images] training_imgs = [ magni.imaging.visualisation.stretch_image( magni.imaging.preprocessing.detilt(mi_img.data[::2, ::2]), 1.0) if mi_img.data.shape[0] == 512 else magni.imaging.visualisation.stretch_image( magni.imaging.preprocessing.detilt(mi_img.data), 1.0) for mi_img in training_mi_imgs] else: training_imgs = list() test_mi_img = magni.afm.io.read_mi_file( img_folder + task['image']).get_buffer('Topography')[0] if test_mi_img.data.shape[0] == 512: test_img = magni.imaging.visualisation.stretch_image( test_mi_img.data[::2, ::2], 1.0) else: test_img = magni.imaging.visualisation.stretch_image( test_mi_img.data, 1.0) for img in training_imgs + [test_img]: assert img.shape == (256, 256) assert np.allclose(img.min(), 0.0) assert np.allclose(img.max(), 1.0) h, w = img.shape # 2. Scanning setup Psi = magni.imaging.dictionaries.utils.get_function_handle( 'matrix', 'DCT')((h, w)) scan_length = task['delta'] * 2 * h * w num_points = 10 * int(scan_length) # Make sure to have enough points img_coords = magni.imaging.measurements.spiral_sample_image( h, w, scan_length, num_points, rect_area=True) Phi = magni.imaging.measurements.construct_measurement_matrix( img_coords, h, w) unique_pixels = magni.imaging.measurements.unique_pixels(img_coords) # 3. De-tilt based on samples scan_mask = np.zeros((h, w), dtype=np.bool_) scan_mask[unique_pixels[:, 1], unique_pixels[:, 0]] = True test_img_detilt, tilt = magni.imaging.preprocessing.detilt( test_img, mask=scan_mask, return_tilt=True) # 4. Convert images to vectors training_img_vecs = [magni.imaging.mat2vec(img) for img in training_imgs] test_img_vec = magni.imaging.mat2vec(test_img) test_img_detilt_vec = magni.imaging.mat2vec( magni.imaging.visualisation.stretch_image(test_img_detilt, 1.0)) tilt_vec = magni.imaging.mat2vec(tilt) assert np.allclose(test_img_detilt_vec.min(), 0.0) assert np.allclose(test_img_detilt_vec.max(), 1.0) # 5. Fit model to DCT domain representations if task['reconstruction_algorithm'].startswith('w_'): training_dct_vecs = [Psi.T.dot(vec) for vec in training_img_vecs] training_abs_dcts = [magni.imaging.visualisation.stretch_image( np.abs(magni.imaging.vec2mat(vec, (h, w))), 1) for vec in training_dct_vecs] gaus_model = fit_gaussian_model(training_abs_dcts, h, w) W = magni.imaging.mat2vec( magni.imaging.visualisation.stretch_image( gaus_model, 1.0, min_val=1e-3)) else: W = np.ones_like(test_img_vec) # 6. Scan image measurements = Phi.dot(test_img_detilt_vec) # 7. Reconstruction reconstructed_img_vec, reconstruction_time, rec_coefs = reconstruct_image( task['reconstruction_algorithm'], measurements, Phi, Psi, W, task['reconstruction_parameter']) # 8. Evaluation psnr = magni.imaging.evaluation.calculate_psnr( test_img_detilt_vec, reconstructed_img_vec, 1.0) with warnings.catch_warnings(): # Ignore copy-on-read warnings (only affects execution speed) warnings.simplefilter('ignore') ssim = compare_ssim( magni.imaging.vec2mat(test_img_detilt_vec, (h, w)), magni.imaging.vec2mat(reconstructed_img_vec, (h, w)), dynamic_range=1) # 9. Save results in database h5_file = result_folder + '/' + h5_name with magni.utils.multiprocessing.File(h5_file, mode='a') as h5file: # Save metrics row = h5file.root.simulation_results.metrics.row row['image'] = task['image'] row['delta'] = task['delta'] row['reconstruction_parameter'] = task['reconstruction_parameter'] row['reconstruction_algorithm'] = task['reconstruction_algorithm'] row['psnr'] = psnr row['ssim'] = ssim row['time'] = reconstruction_time row.append() save_path = '/'.join(['/simulation_results', task['image'], 'd' + str(task['delta']), 'r' + str(task['reconstruction_parameter'])]) save_path = _fix_str_representation(save_path) carray_tb_filters = tb.Filters(complevel=1, complib='bzip2', fletcher32=True) with magni.utils.multiprocessing.File(h5_file, mode='a') as h5file: # Save arrays and tasks db_group = h5file.create_group( save_path, task['reconstruction_algorithm'], createparents=True) h5file.create_carray( db_group, 'test_img_vec', obj=test_img_vec, filters=carray_tb_filters) h5file.create_carray( db_group, 'test_img_detilt_vec', obj=test_img_detilt_vec, filters=carray_tb_filters) h5file.create_carray( db_group, 'tilt_vec', obj=tilt_vec, filters=carray_tb_filters) h5file.create_carray( db_group, 'measurements', obj=measurements, filters=carray_tb_filters) h5file.create_carray( db_group, 'img_coords', obj=img_coords, filters=carray_tb_filters) h5file.create_carray( db_group, 'weights', obj=W, filters=carray_tb_filters) h5file.create_carray( db_group, 'reconstructed_coefficients_vec', obj=rec_coefs, filters=carray_tb_filters) h5file.create_carray( db_group, 'reconstructed_img_vec', obj=reconstructed_img_vec, filters=carray_tb_filters) h5file.create_array(db_group, 'task', obj=json.dumps(task).encode()) h5file.create_array(db_group, 'img_shape', obj=(h, w)) # 10. Save summary figures measurement_img = magni.imaging.mat2vec( magni.imaging.visualisation.mask_img_from_coords( img, magni.imaging.measurements.unique_pixels(img_coords))) rec_coefs_vec = magni.imaging.visualisation.stretch_image(np.log10( magni.imaging.visualisation.stretch_image(np.abs(rec_coefs), 1) + 1e-5), 1) figs = [magni.imaging.vec2mat(vec, (h, w)) for vec in [test_img_vec, test_img_detilt_vec, tilt_vec, W, measurement_img, reconstructed_img_vec, test_img_detilt_vec, rec_coefs_vec] ] titles = ['Original', 'Detilted', 'Tilt', 'Weights', 'Measurements', 'Reconstruction', 'Detilted', 'DCT coefficients\nin reconstruction'] magni.utils.plotting.setup_matplotlib({'figure': {'figsize': (20, 12)}}) fig = magni.imaging.visualisation.imsubplot(figs, 2, titles=titles) out_dir = (result_folder + save_path + '/' + task['reconstruction_algorithm']) fig.suptitle('{}\n PSNR: {:.2f} dB, SSIM: {:.2f}, time: {:.2f} s'.format( out_dir, psnr, ssim, reconstruction_time), fontsize=20) if not os.path.isdir(out_dir): os.makedirs(out_dir) plt.savefig(out_dir + '/summary.png') plt.close(fig) def reconstruct_image(algorithm, measurements, Phi, Psi, W, reconstruction_parameter): """ Return a reconstructed image along with the reconstruction time. Parameters ---------- algorithm : str The reconstruction algorithm to use. measurements : ndarray The m x 1 vector of measurments. Phi : magni.utils.matrices.Matrix The measurements matrix operator. Psi : magni.utils.matrices.Matrix The dictionary matrix operator. W : ndarray The n x 1 vector of weights to use in weighted reconstruction methods. reconstruction_parameter : float or int The reconstruction algorithm specific parameter that is being swept. Returns ------- reconstructed_img_vec : ndarray The n x 1 vector representing the reconstructed image. reconstrution_time : float The time in seconds it took to do the reconstruction. reconstructed_coefficients : ndarray The sparse coefficients in the reconstruction. """ @_decorate_validation def validate_input(): _generic('algorithm', 'string') _numeric('Phi', ('integer', 'floating'), shape=(-1, -1)) _numeric('Psi', ('integer', 'floating'), shape=(-1, -1)) _numeric('measurements', ('integer', 'floating'), shape=(Phi.shape[0], 1)) _numeric('W', ('integer', 'floating'), shape=(Psi.shape[1], 1)) _numeric('reconstruction_parameter', ('integer', 'floating')) @_decorate_validation def validate_output(): _numeric('reconstructed_img_vec', ('integer', 'floating'), range_='[0;1.001]', shape=(Psi.shape[1], 1)) _numeric('reconstruction_time', 'floating', range_='[0;inf)') _numeric('reconstructed_coefficients', ('integer', 'floating'), shape=(Psi.shape[1], 1)) validate_input() A = magni.utils.matrices.MatrixCollection((Phi, Psi)) if algorithm == 'iht': threshold_fixed = int(reconstruction_parameter * Phi.shape[1]) magni.cs.reconstruction.it.config.update( {'threshold': 'fixed', 'threshold_fixed': threshold_fixed}) t0 = time.time() alpha = magni.cs.reconstruction.it.run(measurements, A) t1 = time.time() elif algorithm == 'w_iht': threshold_fixed = int(reconstruction_parameter * Phi.shape[1]) magni.cs.reconstruction.it.config.update( {'threshold': 'fixed', 'threshold_fixed': threshold_fixed, 'threshold_operator': 'weighted_hard', 'threshold_weights': W}) t0 = time.time() alpha = magni.cs.reconstruction.it.run(measurements, A) t1 = time.time() elif algorithm == 'ist': threshold_fixed = int(reconstruction_parameter * Phi.shape[1]) magni.cs.reconstruction.it.config.update( {'kappa_fixed': 0.6, 'threshold_operator': 'soft', 'threshold': 'fixed', 'threshold_fixed': threshold_fixed}) t0 = time.time() alpha = magni.cs.reconstruction.it.run(measurements, A) t1 = time.time() elif algorithm == 'w_ist': threshold_fixed = int(reconstruction_parameter * Phi.shape[1]) magni.cs.reconstruction.it.config.update( {'kappa_fixed': 0.6, 'threshold': 'fixed', 'threshold_fixed': threshold_fixed, 'threshold_operator': 'weighted_soft', 'threshold_weights': W}) t0 = time.time() alpha = magni.cs.reconstruction.it.run(measurements, A) t1 = time.time() elif algorithm == 'ell_1': def Afunc(x): return A.dot(x) def Atfunc(x): return A.T.dot(x) f1 = ulb.functions.norm_l1() f2 = ulb.functions.proj_b2( epsilon=1e-3 * np.linalg.norm(measurements), y=measurements, A=Afunc, At=Atfunc, tight=False) solver = ulb.solvers.douglas_rachford() x0 = np.zeros((A.shape[1], 1)) t0 = time.time() solution = ulb.solvers.solve( # rtol=1e-3 relative change in 1-norm [f1, f2], x0, solver, maxit=magni.cs.reconstruction.it.config['iterations']) alpha = solution['sol'].reshape(-1, 1) t1 = time.time() else: raise ValueError('Invalid reconstruction algorithm: {!r}'.format( algorithm)) magni.cs.reconstruction.it.config.reset() reconstructed_img_vec = magni.imaging.visualisation.stretch_image( Psi.dot(alpha), 1.0) reconstruction_time = t1 - t0 reconstructed_coefficients = alpha validate_output() return (reconstructed_img_vec, reconstruction_time, reconstructed_coefficients) def fit_gaussian_model(training_abs_dcts, h, w, a=0.0025, initial_guess=(0.005, 0.01, 0.01)): """ Fit a Gaussian model to a set of absolute DCT domain coefficients. The fit is based on the average of the DCT domain coefficients. A sum of squares cost function is used in the fit. Parameters ---------- training_abs_dcts : list or tuple The list of DCT domain coefficients as ndarrays. h : int The height in pixels of the DCT domain coefficient ndarrays. w : int The width in pixels of the DCT domain coefficient ndarrays. a : float The scale factor in the Gaussian model. initial_guess: list or tuple The initial parameter guess to pass to the optimization solver. Returns ------- gaussian_model : ndarray The fitted model. """ @_decorate_validation def validate_input(): _numeric('h', 'integer', range_='[1;inf)') _numeric('w', 'integer', range_='[1;inf)') _levels('training_abs_dcts', ( _generic(None, 'explicit collection'), _numeric(None, ('integer', 'floating'), shape=(h, w)))) _numeric('a', 'floating', range_='(0;1]') _levels('initial_guess', ( _generic(None, 'explicit collection', len_=3), _numeric(None, ('integer', 'floating')))) @_decorate_validation def validate_output(): _numeric('gaussian_model', ('integer', 'floating'), shape=(h, w)) validate_input() mean_dct_coefs = np.dstack(training_abs_dcts).mean(axis=2) xx, yy = np.meshgrid(*map(lambda n: np.linspace(0, 1, n), (h, w))) x = magni.imaging.mat2vec(xx) y = magni.imaging.mat2vec(yy) X = np.hstack([x, y]).T # 45 degree rotation matrix to align principle axes of ellipsis R = np.array([[np.sqrt(2) / 2, -np.sqrt(2) / 2], [np.sqrt(2) / 2, np.sqrt(2) / 2]]) def f_model(theta): """Gaussian function with "mean" b and "variances" c1, c2.""" b, c1, c2 = theta b = np.array([[b], [b]]) C_hat_inv = np.array([[1/c1, 0], [0, 1/c2]]) C_inv = R.dot(C_hat_inv).dot(R.T) gaus_model = a * (np.exp(-((X - b) * C_inv.dot((X - b))).sum(axis=0)) ).reshape(h, w) return gaus_model def c_fun(theta): """Optimization cost function (sum of squared errors).""" return np.linalg.norm(mean_dct_coefs - f_model(theta))**2 res = optimize.minimize(c_fun, initial_guess, method='Powell') assert res.success gaussian_model = f_model(res.x) validate_output() return gaussian_model def get_tasks(): """ Construct the task setup, i.e. the list of dicts of tasks for the workers. """ # Combinations to test images = tuple(['image_{}.mi'.format(k) for k in range(17)]) undersampling_ratios = np.linspace(0.10, 0.40, 20) sparsity_levels = np.linspace(0.05, 0.20, 20) reconstruction_algorithms = ('iht', 'w_iht', 'ist', 'w_ist', 'ell_1') # Resulting tasks it_tasks = [{'images': images, 'image': image, 'delta': undersampling_ratio, 'reconstruction_parameter': sparsity_level, 'reconstruction_algorithm': reconstruction_algorithm} for image in images for undersampling_ratio in undersampling_ratios for sparsity_level in sparsity_levels for reconstruction_algorithm in reconstruction_algorithms if reconstruction_algorithm != 'ell_1'] ell_1_tasks = [{'images': images, 'image': image, 'delta': undersampling_ratio, 'reconstruction_parameter': 0.0, 'reconstruction_algorithm': reconstruction_algorithm} for image in images for undersampling_ratio in undersampling_ratios for reconstruction_algorithm in reconstruction_algorithms if reconstruction_algorithm == 'ell_1'] tasks = ell_1_tasks + it_tasks # Group structure group_structure = tuple(['image', 'undersampling_ratio', 'reconstruction_parameter', 'reconstruction_algorithm']) return group_structure, tasks def create_database(h5_path, num_rows): """ Create an empty and annotated database for storing the results. Parameters ---------- h5_path : str The path to the database to create. num_rows : int The expected number of rows in the reconstruction metrics table. """ @_decorate_validation def validate_input(): _generic('h5_path', 'string') _numeric('num_rows', 'integer', range_='[0;inf)') validate_input() class ReconMetrics(tb.IsDescription): """ Table description for table to contain reconstruction performance metrics. """ image = tb.StringCol(itemsize=11, pos=0) delta = tb.Float64Col(pos=1) reconstruction_parameter = tb.Float64Col(pos=2) reconstruction_algorithm = tb.StringCol(itemsize=5, pos=3) ssim = tb.Float64Col(pos=4) psnr = tb.Float64Col(pos=5) time = tb.Float64Col(pos=6) magni.reproducibility.io.create_database(h5_path) with magni.utils.multiprocessing.File(h5_path, mode='a') as h5file: sim_group = h5file.create_group('/', 'simulation_results') h5file.create_table(sim_group, 'metrics', description=ReconMetrics, expectedrows=num_rows) def _fix_str_representation(string): return string.replace('.', '_').replace('-', '__') # Run the simulation if __name__ == '__main__': """ This script is designed to run on a compute server featuring several processing units (CPUs/cores). It will automatically divide the tasks among the available processosing units. To run the simulation simply execute .. code:: bash python weighted_it_reconstructions.py """ # Setup result_folder = './results/' if not os.path.isdir(result_folder): os.makedirs(result_folder) img_folder = './' h5_name = 'weighted_it_reconstructions.hdf5' print('SETUP') print('='*78) print('Results folder: {}'.format(result_folder)) print('Database name: {}'.format(h5_name)) print('Magni IT configuration') pprint(dict(magni.cs.reconstruction.it.config.items())) # Allow validate once in Magni magni.utils.validation.enable_validate_once() # Get tasks group_structure, tasks = get_tasks() h5_path = result_folder + h5_name create_database(h5_path, len(tasks)) kwargs = [{'img_folder': img_folder, 'result_folder': result_folder, 'h5_name': h5_name, 'task': task} for task in tasks] print('Total number of simulation tasks: {}'.format(len(tasks))) # Setup magni multiprocessing magni.utils.multiprocessing.config.update({ 'workers': psutil.cpu_count(logical=True), 'prefer_futures': True, 're_raise_exceptions': True, 'max_broken_pool_restarts': 10}) print('Magni multiprocessing config:') pprint(dict(magni.utils.multiprocessing.config.items())) # Store additional metadata pyunlocbox_path = '/'.join( os.path.dirname(os.path.realpath(ulb.__file__)).split('/')[:-1]) with magni.utils.multiprocessing.File(h5_path, mode='a') as h5_file: # Group structure magni.reproducibility.io.write_custom_annotation( h5_file, 'group_structure', group_structure, annotations_sub_group='parameter_values') # PyUNLocBoX magni.reproducibility.io.write_custom_annotation( h5_file, 'pyunlocbox_git_revision', magni.reproducibility.data.get_git_revision( git_root_dir=pyunlocbox_path)) # Image folder files magni.reproducibility.io.write_custom_annotation( h5_file, 'img_folder_path', img_folder, annotations_sub_group='img_folder_file_hashes') for file_ in next(os.walk(img_folder))[2]: if '~' in file_: continue else: file_path = os.path.join(img_folder, file_) magni.reproducibility.io.write_custom_annotation( h5_file, _fix_str_representation(file_), magni.reproducibility.data.get_file_hashes(file_path), annotations_sub_group='img_folder_file_hashes') # Run simulation magni.utils.multiprocessing.process( run_simulation_task, kwargs_list=kwargs, maxtasks=1) # Write end time annotation with magni.utils.multiprocessing.File(h5_path, mode='a') as h5_file: magni.reproducibility.io.write_custom_annotation( h5_file, 'end_time', magni.reproducibility.data.get_datetime()) print('END of simulation') print('='*78) print('\n')