Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added .DS_Store
Binary file not shown.
Empty file added .Rhistory
Empty file.
Binary file added ._CMakeLists.txt
Binary file not shown.
Binary file added ._README.md
Binary file not shown.
Empty file modified .gitignore
100644 → 100755
Empty file.
3 changes: 2 additions & 1 deletion CMakeLists.txt
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ project (rh-scripts)

cmake_minimum_required (VERSION 2.8)

#/opt/bin was changed to /homes/kovacs/toolbox for local installation
if (CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
set (CMAKE_INSTALL_PREFIX "/opt/caai" CACHE PATH "default install path" FORCE )
set (CMAKE_INSTALL_PREFIX "/homes/kovacs/toolbox" CACHE PATH "default install path" FORCE )
endif()

# Add python-lib
Expand Down
7 changes: 6 additions & 1 deletion README.md
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ If your training is interrupted, you can resume from last saved checkpoint by ju
again, it will automatically resume training if you did not modify any of the parameters in the config.

## HOW TO INSTALL
Ensure CMakeLists.txt is set to copy to a folder, which you can write to. For example change '/opt/bin' to your own toolbox location, '/homes/*username*/toolbox'

```
mkdir build
cd build
Expand All @@ -43,6 +45,7 @@ make install
```
## POST INSTALLATION
Add "source /opt/caai/toolkit-config.sh" to .bashrc / .bash_profile
In the above replace /opt/caai/ with the location where cnn toolbox is intalled, for instance "/toolbox/".

## KNOWN ISSUES

Expand Down Expand Up @@ -80,7 +83,8 @@ from CAAI.losses import rmse

cnn = CNN(model_name='v1',
data_pickle='/users/claes/projects/LowdosePET/PETrecon/HjerteFDG_mnc/data_6fold.pickle',
data_folder='/users/claes/projects/LowdosePET/PETrecon/HjerteFDG_mnc'
data_folder='/users/claes/projects/LowdosePET/PETrecon/HjerteFDG_mnc',
network_architecture = 'custom'#DGK added
)
cnn.data_loader = DataGenerator(cnn.config)

Expand Down Expand Up @@ -114,3 +118,4 @@ cnn.compile_network()
cnn.train()
```


Empty file modified bin/merge_tensorboard_logs.py
100644 → 100755
Empty file.
Binary file added pythontoolkit/._CMakeLists.txt
Binary file not shown.
Binary file added pythontoolkit/._jaj_plot.py
Binary file not shown.
1 change: 1 addition & 0 deletions pythontoolkit/CMakeLists.txt
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ INSTALL(PROGRAMS
networks.py
train.py
predict.py
jaj_plot.py

DESTINATION CAAI )

33 changes: 33 additions & 0 deletions pythontoolkit/jaj_plot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
import os

plt.close('all')

def jaj_plot(im, save_plot = 1, mapc = 0, lim = 0, alph = 1, axis = 1, grid = 1, colorbar = 1, colorbar_lab = ' '):
if lim == 0:
lim = im.min(), im.max()
print(lim)
elif lim == "soft":
lim = -135, 215
print(lim)
if mapc == 0:
mapc = "gray"
print(mapc)
fov = 500.
v = (0, round(((fov/512)*im.shape[1])), 0, round(((fov/512)*im.shape[0]))) #rescaling the axis
ax = plt.imshow(im, cmap = mapc, clim = lim, alpha = alph, extent = v)
if colorbar == 1:
cbar = plt.colorbar(ax)
cbar.set_label(colorbar_lab)#, rotation = 90)
#set axes
plt.grid(color='gray', alpha = 0.5 , linestyle='-', linewidth=1.5)
if axis == 0:
plt.axis('off')
if grid == 0:
plt.grid('off')
if save_plot == 1:
print('saving plot at ' + os.getcwd())
plt.savefig(fname = 'test.png')
return ax
15 changes: 15 additions & 0 deletions pythontoolkit/losses.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,6 +1,21 @@
from keras import backend as K
import numpy as np

def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))

def dice(y_true, y_pred):
#print(np.shape(y_pred))
smooth = 1
print('Calculating dice coefficient')
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)

def dice_coef_loss(y_true, y_pred):
#print(np.shape(y_pred))
#print(np.shape(y_true))
print(K.int_shape(y_pred))
print(K.int_shape(y_true))
return -dice(y_true, y_pred)
76 changes: 67 additions & 9 deletions pythontoolkit/networks.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -38,26 +38,84 @@ def convt_block(layer, concat, fsize):
layer = concatenate([layer, concat], axis=-1)
return layer

dropout = [.1,.1,.2,.2,.3,.3,.2,.2,.1]

# ENCODING
block1, dblock1 = conv_block(X,f,.1)
block2, dblock2 = conv_block(dblock1,f*2**1,.1)
block3, dblock3 = conv_block(dblock2,f*2**2,.2)
block4, dblock4 = conv_block(dblock3,f*2**3,.2)
block5, _ = conv_block(dblock4,f*2**4,.3,downsample=False)
block1, dblock1 = conv_block(X,f,dropout[0])
block2, dblock2 = conv_block(dblock1,f*2**1,dropout[1])
block3, dblock3 = conv_block(dblock2,f*2**2,dropout[2])
block4, dblock4 = conv_block(dblock3,f*2**3,dropout[3])
block5, _ = conv_block(dblock4,f*2**4,dropout[4],downsample=False)

# DECODING
block7 = convt_block(block5,block4,f*2**3)
block8, _ = conv_block(block7,f*2**3,.3,downsample=False)
block8, _ = conv_block(block7,f*2**3,dropout[5],downsample=False)

block9 = convt_block(block8,block3,f*2**2)
block10, _ = conv_block(block9,f*2**2,.2,downsample=False)
block10, _ = conv_block(block9,f*2**2,dropout[6],downsample=False)

block11 = convt_block(block10,block2,f*2**1)
block12, _ = conv_block(block11,f*2**1,.2,downsample=False)
block12, _ = conv_block(block11,f*2**1,dropout[7],downsample=False)

block13 = convt_block(block12,block1,f)
block14, _ = conv_block(block13,f,.1,downsample=False)
block14, _ = conv_block(block13,f,dropout[8],downsample=False)

output = Conv3D(dims_out,kernel_size=3, kernel_regularizer=regularizers.l2(1e-1),
kernel_initializer='he_normal', padding='same',strides=1, activation='relu')(block14)
return output

def unet_8_slice(X, config):
print('---------------------')
print('---------------------')
print('Running unet_8_slice')
print('---------------------')
print('---------------------')
f, dims_out = config['n_base_filters'],config['output_channels']
def conv_block(layer,fsize,dropout,downsample=True):
for i in range(1,3):
layer = Conv3D(fsize, kernel_size=3, kernel_regularizer=regularizers.l2(1e-1),
kernel_initializer='he_normal', padding='same',strides=1)(layer)
layer = BatchNormalization()(layer)
layer = Activation('relu')(layer)
layer = Dropout(dropout)(layer)
if downsample:
downsample = Conv3D(fsize*2, kernel_size=3, kernel_regularizer=regularizers.l2(1e-1),
kernel_initializer='he_normal', padding='same', strides=2)(layer)
downsample = BatchNormalization()(downsample)
downsample = Activation('relu')(downsample)
return layer, downsample

def convt_block(layer, concat, fsize):
layer = Conv3DTranspose(fsize, kernel_size=3, kernel_regularizer=regularizers.l2(1e-1),
kernel_initializer='he_normal', padding='same', strides=2)(layer)
layer = BatchNormalization()(layer)
layer = Activation('relu')(layer)
layer = concatenate([layer, concat], axis=-1)
return layer

# Dropout values
dropout = [.1,.1,.2,.3,.2,.2,.1]

# ENCODING
block1, dblock1 = conv_block(X,f,dropout[0])
block2, dblock2 = conv_block(dblock1,f*2**1,dropout[1])
block3, dblock3 = conv_block(dblock2,f*2**2,dropout[2])
block4, _ = conv_block(dblock3,f*2**3,dropout[3],downsample=False)

# DECODING
block5 = convt_block(block4,block3,f*2**2)
block6, _ = conv_block(block5,f*2**2,dropout[4],downsample=False)

block7 = convt_block(block6,block2,f*2**1)
block8, _ = conv_block(block7,f*2**1,dropout[5],downsample=False)

block9 = convt_block(block8,block1,f)
block10, _ = conv_block(block9,f,dropout[6],downsample=False)

block11 = Conv3D(dims_out,kernel_size=3, kernel_regularizer=regularizers.l2(1e-1),
kernel_initializer='he_normal', padding='same',strides=1, activation='relu')(block10)

output = Activation('sigmoid')(block11)

return output

Empty file modified pythontoolkit/predict.py
100644 → 100755
Empty file.
5 changes: 3 additions & 2 deletions pythontoolkit/train.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,9 @@ def build_network(self,inputs=None):
outputs = networks.unet(inputs,f=self.config['n_base_filters'],dims_out=self.config['output_channels'])

elif self.config['network_architecture'] == 'custom' and not self.custom_network_architecture == None:
outputs = self.custom_network_architecture(inputs,config=self.config)

#TO_DO: generalize for all custom networks. Fix "config=self.config"
outputs = self.custom_network_architecture(inputs,config=self.config)#config=self.config was exchanged for f=self.config['n_base_filters'],dims_out=self.config['output_channels']
#outputs = networks.unet_8_slice(inputs,f=self.config['n_base_filters'],dims_out=self.config['output_channels'])
else:
print("You are using a network that I dont know..")
exit(-1)
Expand Down
Empty file modified pythontoolkit/version.py
100644 → 100755
Empty file.
6 changes: 6 additions & 0 deletions reinstall.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#reinstall the CNN package
#by running ./reinstall.sh in command-line
mkdir build
cd build
cmake ..
make install
Binary file added scripts/._main.py
Binary file not shown.
Empty file modified scripts/data_generator.py
100644 → 100755
Empty file.
Empty file modified scripts/generate_data_pickle.py
100644 → 100755
Empty file.
66 changes: 43 additions & 23 deletions scripts/main.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,36 @@
from data_generator import DataGenerator
import pyminc.volumes.factory as pyminc

#Project specific parameters - must be set for each project
data_base_path = '/Volumes/my_passport/project_data/lymphoma-auto-contouring/patient_data_preprocessed/'
pickle_file = 'data_2fold.pickle'
x = 256 #image size in x-direction
y = 256 #image size in y-direction
z = 8 #number of input slices to network
d = 2 #number of input channels
n_slices = 111 #total slices in series
binary_output = 1#set to zero to get binary output by thresholding (for mask prediction)

#Must be set for each model trained
n_k_fold = 0 #change for each k-fold training in same version
version_num = 1 #change for each new version where new hyperparameters are used

def train_v1():

cnn = CNN(model_name='v1',
input_patch_shape=(128,128,16),
input_channels=2,
cnn = CNN(model_name='v'+str(version_num),
input_patch_shape=(x,y,z),
input_channels=d,
output_channels=1,
batch_size=2,
epochs=2000,
learning_rate=1e-4,
checkpoint_save_rate=50,
loss_functions=[['mean_absolute_error',1]],
data_pickle='/users/claes/projects/LowdosePET/PETrecon/HjerteFDG_mnc/data_6fold.pickle',
data_folder='/users/claes/projects/LowdosePET/PETrecon/HjerteFDG_mnc',
data_pickle_kfold=1
data_pickle= data_base_path+pickle_file,
data_folder= data_base_path,
data_pickle_kfold=n_k_fold,
network_architecture = 'unet_8_slice', #more archs can be added by configuring networks.py and and "def build_network" in train.py
n_base_filters = 64 #can for instance use 64 or 32
)

# Attach generator
Expand All @@ -39,36 +55,40 @@ def predict(modelh5name, model_name=None):
if model_name:
modelbasename = model_name

summary = pickle.load( open('/users/claes/projects/LowdosePET/PETrecon/HjerteFDG_mnc/data_6fold.pickle', 'rb') )
for pt in summary['valid_1']:
summary = pickle.load( open(data_base_path+pickle_file, 'rb') )
for pt in summary['valid_'+str(n_k_fold)]:
predict_patient(pt,model,modelbasename)

def predict_patient(pt,model,modelbasename):
_lowdose_name = "FDG_01_SUV.mnc"
data_folder = '/users/claes/projects/LowdosePET/PETrecon/HjerteFDG_mnc'
fname_dat = os.path.join(data_folder,pt,'dat_01_suv_ctnorm_double.npy')
_predictor_fname = pt+"AVG_TrueX1-6_reshaped.mnc"#Container, hvor prædikterede data indsættes i #project specific
fname_dat = os.path.join(data_base_path,pt,'minc/pet-ct.npy')
dat = np.memmap(fname_dat, dtype='double', mode='r')
dat = dat.reshape(128,128,-1,2)
dat = dat.reshape(x,y,-1,2)

print("Predicting volume for %s" % pt)
predicted = np.empty((111,128,128))
x = 128
y = 128
z = 16
d = 2
for z_index in range(int(z/2),111-int(z/2)):
predicted = np.empty((n_slices,x,y))
for z_index in range(int(z/2),n_slices-int(z/2)):
predicted_stack = model.predict(dat[:,:,z_index-int(z/2):z_index+int(z/2),:].reshape(1,x,y,z,d))
if z_index == int(z/2):
for ind in range(int(z/2)):
predicted[ind,:,:] = predicted_stack[0,:,:,ind].reshape(128,128)
if z_index == 111-int(z/2)-1:
predicted[ind,:,:] = predicted_stack[0,:,:,ind].reshape(x,y)
if z_index == n_slices-int(z/2)-1:
for ind in range(int(z/2)):
predicted[z_index+ind,:,:] = predicted_stack[0,:,:,int(z/2)+ind].reshape(128,128)
predicted[z_index,:,:] = predicted_stack[0,:,:,int(z/2)].reshape(128,128)
predicted[z_index+ind,:,:] = predicted_stack[0,:,:,int(z/2)+ind].reshape(x,y)
predicted[z_index,:,:] = predicted_stack[0,:,:,int(z/2)].reshape(x,y)
predicted_full = predicted
predicted_full += np.swapaxes(np.swapaxes(dat[:,:,:,0],2,1),1,0)

out_vol = pyminc.volumeLikeFile(os.path.join(data_folder,pt,_lowdose_name),os.path.join(data_folder,pt,'predicted_'+modelbasename+'_'+_lowdose_name))
# Create minc file of predicted data.
out_vol = pyminc.volumeLikeFile(os.path.join(data_base_path,pt,'minc',_predictor_fname),os.path.join(data_base_path,pt,'predicted_'+modelbasename+'_'+_predictor_fname))

#thresholding (use if you need binary output)
if binary_output ==1:
thres = 0.5
predicted_full[predicted_full[...,0] > thres] = 1
predicted_full[predicted_full[...,0] <= thres] = 0

#save the data
out_vol.data = predicted_full
out_vol.writeFile()
out_vol.closeVolume()
Expand Down
Empty file modified toolkit-config.sh.cmake
100644 → 100755
Empty file.