jpdefrutos commited on
Commit
c7383ff
·
1 Parent(s): a3cbfc7

Refactored the package LOGGER

Browse files

Changed the logs when printing the metrics

DeepDeformationMapRegistration/main.py CHANGED
@@ -30,11 +30,10 @@ from DeepDeformationMapRegistration.ms_ssim_tf import MultiScaleStructuralSimila
30
  from DeepDeformationMapRegistration.utils.operators import min_max_norm
31
  from DeepDeformationMapRegistration.utils.misc import resize_displacement_map
32
  from DeepDeformationMapRegistration.utils.model_utils import get_models_path, load_model
 
33
 
34
  from importlib.util import find_spec
35
 
36
- LOGGER = logging.getLogger(__name__)
37
-
38
 
39
  def rigidly_align_images(image_1: str, image_2: str) -> nib.Nifti1Image:
40
  """
@@ -290,7 +289,7 @@ def main():
290
  mse_tf = vxm.losses.MSE().loss(fix_img_ph, pred_img_ph)
291
  ms_ssim_tf = MultiScaleStructuralSimilarity(max_val=1., filter_size=3).metric(fix_img_ph, pred_img_ph)
292
 
293
- LOGGER.info(f'Using model: {"Brain" if args.anatomy == "B" else "Liver"} -> {args.model}')
294
  MODEL_FILE = get_models_path(args.anatomy, args.model, os.getcwd()) # MODELS_FILE[args.anatomy][args.model]
295
 
296
  network, registration_model = load_model(MODEL_FILE, False, True)
@@ -345,11 +344,11 @@ def main():
345
 
346
  save_nifti(pred_image, os.path.join(args.outputdir, 'pred_image.nii.gz'))
347
  np.savez_compressed(os.path.join(args.outputdir, 'displacement_map.npz'), disp_map)
348
- LOGGER.info('Predicted image (full image) and displacement map saved in: '.format(args.outputdir))
349
  LOGGER.info(f'Displacement map prediction time: {time_disp_map_end - time_disp_map_start} s')
350
  LOGGER.info(f'Predicted image time: {time_pred_img_end - time_pred_img_start} s')
351
 
352
- LOGGER.info('Similarity metrics (Full image)\n------------------')
353
  LOGGER.info('SSIM: {:.03f}'.format(ssim))
354
  LOGGER.info('NCC: {:.03f}'.format(ncc))
355
  LOGGER.info('MSE: {:.03f}'.format(mse))
 
30
  from DeepDeformationMapRegistration.utils.operators import min_max_norm
31
  from DeepDeformationMapRegistration.utils.misc import resize_displacement_map
32
  from DeepDeformationMapRegistration.utils.model_utils import get_models_path, load_model
33
+ from DeepDeformationMapRegistration.utils.logger import LOGGER
34
 
35
  from importlib.util import find_spec
36
 
 
 
37
 
38
  def rigidly_align_images(image_1: str, image_2: str) -> nib.Nifti1Image:
39
  """
 
289
  mse_tf = vxm.losses.MSE().loss(fix_img_ph, pred_img_ph)
290
  ms_ssim_tf = MultiScaleStructuralSimilarity(max_val=1., filter_size=3).metric(fix_img_ph, pred_img_ph)
291
 
292
+ LOGGER.info(f'Getting model: {"Brain" if args.anatomy == "B" else "Liver"} -> {args.model}')
293
  MODEL_FILE = get_models_path(args.anatomy, args.model, os.getcwd()) # MODELS_FILE[args.anatomy][args.model]
294
 
295
  network, registration_model = load_model(MODEL_FILE, False, True)
 
344
 
345
  save_nifti(pred_image, os.path.join(args.outputdir, 'pred_image.nii.gz'))
346
  np.savez_compressed(os.path.join(args.outputdir, 'displacement_map.npz'), disp_map)
347
+ LOGGER.info('Predicted image and displacement map saved in: '.format(args.outputdir))
348
  LOGGER.info(f'Displacement map prediction time: {time_disp_map_end - time_disp_map_start} s')
349
  LOGGER.info(f'Predicted image time: {time_pred_img_end - time_pred_img_start} s')
350
 
351
+ LOGGER.info('Similarity metrics\n------------------')
352
  LOGGER.info('SSIM: {:.03f}'.format(ssim))
353
  LOGGER.info('NCC: {:.03f}'.format(ncc))
354
  LOGGER.info('MSE: {:.03f}'.format(mse))
DeepDeformationMapRegistration/utils/model_utils.py CHANGED
@@ -4,6 +4,7 @@ from datetime import datetime
4
  from email.utils import parsedate_to_datetime, formatdate
5
  from DeepDeformationMapRegistration.utils.constants import ANATOMIES, MODEL_TYPES, ENCODER_FILTERS, DECODER_FILTERS, IMG_SHAPE
6
  import voxelmorph as vxm
 
7
 
8
 
9
  # taken from: https://lenon.dev/blog/downloading-and-caching-large-files-using-python/
@@ -39,8 +40,12 @@ def get_models_path(anatomy: str, model_type: str, output_root_dir: str):
39
  url = 'https://github.com/jpdefrutos/DDMR/releases/download/trained_models_v0/' + anatomy + '_' + model_type + '.h5'
40
  file_path = os.path.join(output_root_dir, 'models', anatomy, model_type + '.h5')
41
  if not os.path.exists(file_path):
 
42
  os.makedirs(os.path.split(file_path)[0], exist_ok=True)
43
  download(url, file_path)
 
 
 
44
  return file_path
45
 
46
 
 
4
  from email.utils import parsedate_to_datetime, formatdate
5
  from DeepDeformationMapRegistration.utils.constants import ANATOMIES, MODEL_TYPES, ENCODER_FILTERS, DECODER_FILTERS, IMG_SHAPE
6
  import voxelmorph as vxm
7
+ from DeepDeformationMapRegistration.utils.logger import LOGGER
8
 
9
 
10
  # taken from: https://lenon.dev/blog/downloading-and-caching-large-files-using-python/
 
40
  url = 'https://github.com/jpdefrutos/DDMR/releases/download/trained_models_v0/' + anatomy + '_' + model_type + '.h5'
41
  file_path = os.path.join(output_root_dir, 'models', anatomy, model_type + '.h5')
42
  if not os.path.exists(file_path):
43
+ LOGGER.info(f'Model not found. Downloading from {url}... ')
44
  os.makedirs(os.path.split(file_path)[0], exist_ok=True)
45
  download(url, file_path)
46
+ LOGGER.info(f'... downloaded model. Stored in {file_path}')
47
+ else:
48
+ LOGGER.info(f'Found model: {file_path}')
49
  return file_path
50
 
51