Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
GabrielTavernini committed Jun 24, 2019
2 parents d844874 + ee1855e commit 7e98c88
Show file tree
Hide file tree
Showing 2 changed files with 204 additions and 10 deletions.
20 changes: 10 additions & 10 deletions ImagePreprocessing.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@
},
"outputs": [],
"source": [
"work_dir = './Data'\n",
"dcm_files = [pd.read_file(os.path.join(os.path.join(work_dir,file_path), file_path2)) for file_path in os.listdir(work_dir) for file_path2 in os.listdir(os.path.join(work_dir,file_path))]"
"work_dir = './Data/Raw'\n",
"dcm_files = [pd.read_file(os.path.join(work_dir,file_path)) for file_path in os.listdir(work_dir)]"
]
},
{
Expand Down Expand Up @@ -270,21 +270,21 @@
},
"outputs": [],
"source": [
"folder_path = \"./Processed_Data\"\n",
"folder_path = \"./Data/Reg\"\n",
"for counter, dcm in enumerate(dcm_files):\n",
" metacrop_arr = metacrop(dcm)\n",
" blur_arr = gaussian_blur(metacrop_arr, 2)\n",
" blur_arr = gaussian_blur(metacrop_arr, 15)\n",
" clahe_arr = clahe(blur_arr)\n",
" canny_arr_2d = canny(clahe_arr[:,:,0], 1.5)\n",
" canny_arr = conv_2d_3d(canny_arr_2d)\n",
" #canny_arr_2d = canny(clahe_arr[:,:,0], 1.5)\n",
" #canny_arr = conv_2d_3d(canny_arr_2d)\n",
" #denoise_arr = denoising(clahe_arr)\n",
" flip_edge_arr = flip(canny_arr)\n",
" #flip_edge_arr = flip(canny_arr)\n",
" flip_arr = flip(clahe_arr)\n",
" file_path = os.path.join(folder_path, str(counter))\n",
" np.save(file_path+\".npy\", clahe_arr)\n",
" np.save(file_path+\"rev.npy\", flip_arr)\n",
" np.save(file_path+\"edge.npy\", canny_arr)\n",
" np.save(file_path+\"edgerev.npy\", flip_edge_arr)"
" #np.save(file_path+\"edge.npy\", canny_arr)\n",
" #np.save(file_path+\"edgerev.npy\", flip_edge_arr)"
]
},
{
Expand Down Expand Up @@ -351,7 +351,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
"version": "3.6.5"
}
},
"nbformat": 4,
Expand Down
194 changes: 194 additions & 0 deletions ImagePreprocessing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,194 @@
import pydicom as pd
import numpy as np
import sklearn
import matplotlib.pyplot as plt
import sklearn.preprocessing as pp
import skimage.filters as filters
import skimage.feature as feature
import skimage.restoration as restoration
import skimage.exposure as exposure
import os

work_dir = './Data/Raw'
dcm_files = [pd.read_file(os.path.join(work_dir,file_path)) for file_path in os.listdir(work_dir)]

'''
Cropping by Metadata
Args:
img: DCM file input
Output:
parr: Cropped pixel array
'''
def metacrop(file):
for key in file.dir():
value = getattr(file, key, "")
if(type(value) == pd.sequence.Sequence and key == "SequenceOfUltrasoundRegions"):
value = value[0]
break
x0, x1, y0, y1 = None, None, None, None
for key in value.dir():
if key == "RegionLocationMinX0":
x0 = getattr(value, key, "")
if key == "RegionLocationMaxX1":
x1 = getattr(value, key, "")
if key == "RegionLocationMinY0":
y0 = getattr(value, key, "")
if key == "RegionLocationMaxY1":
y1 = getattr(value, key, "")
print(x0, x1, y0, y1)
if(x0 == 0):
return colorcrop2(file.pixel_array)
return file.pixel_array[y0:y1,x0:x1]

'''
Cropping by Color
Args:
parr: Initial pixel array
Output:
parr: Cropped pixel array
'''
def colorcrop(pixel_arr):
midCol = pixel_arr.shape[1]//2
print(midCol)

# Left Side
flag = False
ind = -1
for i in range(midCol, 0, -1):
colArr = pixel_arr[:,i,0]
if(np.mean(colArr) == 0):
flag = True
ind = i
break
flag2 = False
ind2 = -1
for i in range(midCol, pixel_arr.shape[1]):
colArr = pixel_arr[:,i,0]
if(np.mean(colArr) == 0):
flag2 = True
ind2 = i
break
if(flag and flag2):
return pixel_arr[:,ind:ind2]
return pixel_arr

'''
Faster Cropping by Color
Args:
parr: Initial pixel array
Output:
parr: Cropped pixel array
'''
def colorcrop2(pixel_arr):
midCol = pixel_arr.shape[1]//2
x = pixel_arr[:,midCol:].sum(axis=0)[:,0]
distance = np.where(x == 0)[0][0]
ind = midCol - distance
ind2 = midCol + distance
print(ind, ind2)
return pixel_arr[:,ind:ind2]

'''
Returns flipped image (vertically)
Args:
parr: Initial pixel array
Output:
parr: Flipped pixel array
'''
def flip(img):
return np.fliplr(img)

'''
Gaussian Blur
Args:
parr: Initial pixel array
sigma: Hyperparameter σ
Output:
parr: Blurred pixel array
'''
def gaussian_blur(img, sig):
smooth_img = filters.gaussian(img, sigma = sig)
return smooth_img

'''
Sobel Edge Detection
Args:
parr: Initial pixel array
Output:
parr: Transformed pixel array
'''
def sobel(img):
return filters.sobel(img)

'''
Canny Edge Detection
Args:
parr: Initial pixel array
Output:
parr: Transformed pixel array
'''
def canny(img, sigma):
return feature.canny(img, sigma = sigma)

'''
Wavelet Denoising
Args:
parr: Initial pixel array
Output:
parr: Denoised pixel array
'''
def denoising(img):
denoise_img = restoration.denoise_wavelet(img)
return denoise_img

'''
Contrast-Limited Adaptive Histogram Equalization
Args:
parr: Initial pixel array
Output:
parr: Transformed pixel array
'''
def clahe(img):
return exposure.equalize_adapthist(img)

'''
Converting BW 2D array into BW 3D array
Args:
parr: Initial pixel array (2 dimensional)
Output:
parr: Transformed pixel array (3 dimensional)
'''
def conv_2d_3d(img):
arr = np.array(np.ones((img.shape[0], img.shape[1], 3), dtype=float))
print(arr.shape)
arr[:,:,0] = img
arr[:,:,1] = img
arr[:,:,2] = img
return arr

folder_path = "./Data/Reg"
for counter, dcm in enumerate(dcm_files):
metacrop_arr = metacrop(dcm)
blur_arr = gaussian_blur(metacrop_arr, 15)
clahe_arr = clahe(blur_arr)
#canny_arr_2d = canny(clahe_arr[:,:,0], 1.5)
#canny_arr = conv_2d_3d(canny_arr_2d)
#denoise_arr = denoising(clahe_arr)
#flip_edge_arr = flip(canny_arr)
flip_arr = flip(clahe_arr)
file_path = os.path.join(folder_path, str(counter))
np.save(file_path+".npy", clahe_arr)
np.save(file_path+"rev.npy", flip_arr)
#np.save(file_path+"edge.npy", canny_arr)
#np.save(file_path+"edgerev.npy", flip_edge_arr)

0 comments on commit 7e98c88

Please sign in to comment.