Commit d528e08977568430f9e5da7ded54b5822d8256bc

Authored by Thiago Franco de Moraes
1 parent 1cdd42c8
Exists in master

Remove imgnormalize (FIX #235)

invesalius/data/imagedata_utils.py
... ... @@ -500,34 +500,6 @@ def img2memmap(group):
500 500 return matrix, scalar_range, temp_file
501 501  
502 502  
503   -def imgnormalize(data, srange=(0, 255)):
504   - """
505   - Normalize image pixel intensity for int16 gray scale values.
506   -
507   - :param data: image matrix
508   - :param srange: range for normalization, default is 0 to 255
509   - :return: normalized pixel intensity matrix
510   - """
511   -
512   - dataf = numpy.asarray(data)
513   - rangef = numpy.asarray(srange)
514   - faux = numpy.ravel(dataf).astype(float)
515   - minimum = numpy.min(faux)
516   - maximum = numpy.max(faux)
517   - lower = rangef[0]
518   - upper = rangef[1]
519   -
520   - if minimum == maximum:
521   - datan = numpy.ones(dataf.shape)*(upper + lower) / 2.
522   - else:
523   - datan = (faux-minimum)*(upper-lower) / (maximum-minimum) + lower
524   -
525   - datan = numpy.reshape(datan, dataf.shape)
526   - datan = datan.astype(numpy.int16)
527   -
528   - return datan
529   -
530   -
531 503 def get_LUT_value_255(data, window, level):
532 504 shape = data.shape
533 505 data_ = data.ravel()
... ... @@ -539,6 +511,8 @@ def get_LUT_value_255(data, window, level):
539 511 return data
540 512  
541 513  
542   -def image_normalize(image, min_=0.0, max_=1.0):
  514 +def image_normalize(image, min_=0.0, max_=1.0, output_dtype=np.int16):
  515 + output = np.empty(shape=image.shape, dtype=output_dtype)
543 516 imin, imax = image.min(), image.max()
544   - return (image - imin) * ((max_ - min_) / (imax - imin)) + min_
  517 + output[:] = (image - imin) * ((max_ - min_) / (imax - imin)) + min_
  518 + return output
... ...
invesalius/segmentation/brain/segment.py
... ... @@ -64,7 +64,7 @@ def brain_segment(image, probability_array, comm_array):
64 64 model.load_weights(str(folder.joinpath("model.h5")))
65 65 model.compile("Adam", "binary_crossentropy")
66 66  
67   - image = imagedata_utils.image_normalize(image, 0.0, 1.0)
  67 + image = imagedata_utils.image_normalize(image, 0.0, 1.0, output_dtype=np.float32)
68 68 sums = np.zeros_like(image)
69 69 # segmenting by patches
70 70 for completion, sub_image, patch in gen_patches(image, SIZE, OVERLAP):
... ...