Commit fa7f4ad3 authored by Luis Fernandez Ruiz's avatar Luis Fernandez Ruiz
Browse files

- Inclusion of a new cluster

- Inclusion of background in image generation
- regression_radius_deep: comments in code
parent 2642a741
function create_img_param(save_img_path, distance, collimation, wav, radius)
function create_img_param(save_img_path, distance, collimation, wav, bg, radius)
mono_tof = 'mono';
delta_wav = 10; %percentage
......@@ -12,7 +12,7 @@ scatter_model = 'Sphere';
params_scatter.poly_fwhm = 0;
params_scatter.contrast = 6e-6;
params_scatter.scale = 0.01;
params_scatter.background = 1;
params_scatter.background = bg;
background_model = 'Empty Cell'; %Empty Cell,Cryostat Ox7T
horiz_beam_centre_params = [0 -randi([15 35])];
horiz_beam_centre = horiz_beam_centre_params(1);
......
......@@ -16,6 +16,7 @@ else
radius_array = df(:,4);
end
prediction_array = num2str(df(:,5));
bg_array = num2str(df(:,7));
%distance = 18; % D22, distance 1-18m
mono_tof = 'mono';
......@@ -45,7 +46,8 @@ for a = 1:dims(1)
collimation = col_array(a);
wav = wav_array(a);
params_scatter.radius = radius_array(a);
fprintf('Former prediction %s\n', former_predict)
params_scatter.background = bg_array(a);
fprintf('Former prediction %i\n', former_predict)
%%%%%%%calcul%%%%%%%%
params = zeros(1,39);
if strcmp(instrument,'d11')
......@@ -150,7 +152,7 @@ for a = 1:dims(1)
end
%filename = ['/home/dpt/fernandez-ruiz/sim/sim_data/Sphere/scatter_image/Sphere' background_model '_col' num2str(collimation) '_dist' num2str(distance) '_radius' num2str(params_scatter.radius) '_poly' num2str(params_scatter.poly_fwhm) '_contrast' num2str(params_scatter.contrast) '_scale' num2str(params_scatter.scale) '_bg' num2str(params_scatter.background) '_wav' num2str(wav) '_thickness' num2str(thickness) '_beamcenter' num2str(horiz_beam_centre) '.txt'];
filename2 = [save_img_path, former_predict, '/Sphere' background_model '_col' num2str(collimation) '_dist' num2str(distance) '_radius' num2str(params_scatter.radius) '_poly' num2str(params_scatter.poly_fwhm) '_contrast' num2str(params_scatter.contrast) '_scale' num2str(params_scatter.scale) '_bg' num2str(params_scatter.background) '_wav' num2str(wav) '_thickness' num2str(thickness) '_beamcenter' num2str(horiz_beam_centre) '.jpg'];
filename2 = [save_img_path, '/', former_predict, '/Sphere' background_model '_col' num2str(collimation) '_dist' num2str(distance) '_radius' num2str(params_scatter.radius) '_poly' num2str(params_scatter.poly_fwhm) '_contrast' num2str(params_scatter.contrast) '_scale' num2str(params_scatter.scale) '_bg' num2str(params_scatter.background) '_wav' num2str(wav) '_thickness' num2str(thickness) '_beamcenter' num2str(horiz_beam_centre) '.jpg'];
%filename3 = ['/home/dpt/fernandez-ruiz/sim/sim_data/Sphere/params/Sphere' background_model '_col' num2str(collimation) '_dist' num2str(distance) '_radius' num2str(params_scatter.radius) '_poly' num2str(params_scatter.poly_fwhm) '_contrast' num2str(params_scatter.contrast) '_scale' num2str(params_scatter.scale) '_bg' num2str(params_scatter.background) '_wav' num2str(wav) '_thickness' num2str(thickness) '_beamcenter' num2str(horiz_beam_centre) '.txt'];
data = abs(output.data1(:,:,1));
......
......@@ -8,7 +8,7 @@
------------------------------------------------------
Author: FERNANDEZ RUIZ Luis
Modify: 20/05/2019
Last modified: 20/05/2019
File: img_process.py
*********************************************************************************************
......
......@@ -8,8 +8,8 @@
------------------------------------------------------
Author: Tensorflow
Modify by: FERNANDEZ RUIZ Luis
Modify: 06/05/2019
Modified by: FERNANDEZ RUIZ Luis
Last modified : 06/05/2019
File: label_classify_image_folder.py
*************************************************************
......@@ -135,7 +135,7 @@ if __name__ == "__main__":
model_file = \
"tensorflow/examples/label_image/data/inception_v3_2016_08_28_frozen.pb"
label_file = "tensorflow/examples/label_image/data/imagenet_slim_labels.txt"
input_height = 299# 224 #299
input_height = 299 #224 #299
input_width = 299 #224 #299
input_mean = 0
input_std = 255
......@@ -190,7 +190,7 @@ if __name__ == "__main__":
# Open file and write header
out_f = open(results_path, "w")
results_file = csv.writer(out_f)
l = ["dist", "col", "wav", "radius", "predicted", "mean_prediction"]
l = ["dist", "col", "wav", "radius", "predicted", "mean_prediction", "bg"]
results_file.writerow(l)
# Create a variable for measuring the progress (percentage of labeled images)
......@@ -236,8 +236,9 @@ if __name__ == "__main__":
dist_value = obtain_parameter_val(file_name, "dist")
wav_value = obtain_parameter_val(file_name, "wav")
radius_value = obtain_parameter_val(file_name, "radius")
bg_value = obtain_parameter_val(file_name, "bg")
# ... we write them in the logs file
l = [dist_value, col_value, wav_value, radius_value, str(predicted), mean_prediction]
l = [dist_value, col_value, wav_value, radius_value, str(predicted), mean_prediction, bg_value]
results_file.writerow(l)
# We move the file to a child folder corresponding to the predicted class
......@@ -248,7 +249,7 @@ if __name__ == "__main__":
if count_f%100 == 0:
print("Labelling of images progress: %i of %i. %i%%" % (count_f, total_f, (100*count_f)//total_f))
# Close the file once we have finished the loop
# # Close the file once we have finished the loop
out_f.close()
# End timer. Print relevant time info.
......
......@@ -13,7 +13,7 @@
------------------------------------------------------
Author: FERNANDEZ RUIZ Luis
Modify: 06/05/2019
Last modified: 06/05/2019
File: main_SANS.py
*************************************************************
......@@ -30,42 +30,46 @@ import datetime
from time import strftime
# Define paths
img_path = "/home/dpt/fernandez-ruiz/sim/sim_data/Sphere/log_image/20190531/"
results_file = "results.csv"
suggest_file = "suggest_useless.csv"
tmp_folder = "tmp"
save_img_sug_folder = "Classif_by_categ_useless/"
scatter_model = "Sphere" # "Core-Shell Sphere" or "Sphere"
img_path = "/home/dpt/fernandez-ruiz/sim/sim_data/Sphere/log_image/20190531_resize/"
results_file = "results_modif_guinier.csv"
suggest_file = "suggest_guinier.csv"
tmp_folder = "tmp_guinier"
save_img_sug_folder = "Classif_by_categ_guinier"
python_script_path = "/users/fernandez-ruiz/scatteringimage/python/"
matlab_path = "/users/fernandez-ruiz/scatteringimage/matlab/"
retrain_model_path = "/home/dpt/sci_share/ScatterImage/fernandez-ruiz/TF_results/" # "output_labels.py",
retrain_model_path = "/home/dpt/fernandez-ruiz/TF_Results/sphere_7_improved/" # "output_labels.py",
# "output_graph.pb" should be in this folder. (files from retrain.py)
bool_create_img_real_rad = "true" # Used in regression_radius_multip.py. Whether create imgs with real or predicted
# radius. Usually we will want the real one (true)
categ_search = "6"
categ_search = "1"
num_clusters = "8"
radius_min = "20"
radius_max = "350"
# Start timer
start = datetime.datetime.now()
# Open matlab instance
eng = matlab.engine.start_matlab()
eng.addpath(matlab_path,'-end')
eng.addpath(matlab_path, scatter_model, '-end')
print("Matlab instance open")
# # 1) GENERATE IMAGES
# eng.save_sim_sphere(img_path, nargout=0)
# print("Images succesfully generated")
#
# # 2) CLASSIFY THEM and write a log file with the CNN classification
# subprocess.run(["python", join(python_script_path, "label_classify_image_folder.py"), "--dir", img_path,
# "--graph", join(retrain_model_path, "output_graph.pb"),
# "--labels", join(retrain_model_path, "output_labels.txt"), "--input_layer", "Placeholder",
# "--output_layer", "final_result", "--results_path", join(img_path, results_file)])
# print("Images well classified. Results.csv created")
#
# # 3) PLOT THE RESULTS to see if the classification looks well
# subprocess.run(["python", join(python_script_path, "plot_results.py"), "--file_name", join(img_path, results_file),
# "--jitter", "0.01", "--multiple_graphs", "True"])
#
# # 4) SUGGEST dist, col and wav for transforming previous images into the category searched
# subprocess.run(["python", join(python_script_path, "regression_radius_multip.py"),
# "--results_path", join(img_path, results_file),
......@@ -78,9 +82,10 @@ print("Matlab instance open")
# "--reg_type", "xgb",
# "--categ_search", categ_search,
# "--oneHotEncoder", "True",
# "--duration", "short",
# "--radius_min", "60",
# "--radius_max", "350"])
# "--duration", "long",
# "--radius_min", radius_min,
# "--radius_max", radius_max,
# "--num_cluster", num_clusters])
# print("Suggest.csv created")
# 5) CREATE SUGGESTED IMAGES to check if they are correct
......@@ -88,13 +93,13 @@ print("Matlab instance open")
if os.path.isdir(join(img_path, save_img_sug_folder)):
shutil.rmtree(join(img_path, save_img_sug_folder), ignore_errors=True)
os.mkdir(join(img_path, save_img_sug_folder))
for i in range(7):
for i in range(int(num_clusters)):
os.mkdir(join(img_path, save_img_sug_folder, str(i)))
eng.create_suggested_images(join(img_path, save_img_sug_folder), join(img_path, suggest_file), bool_create_img_real_rad, nargout=0)
print("Suggested images created")
# 6) CLASSIFY SUGGESTED IMAGES we have just created
for i in range(7):
for i in range(int(num_clusters)):
print("Transforming previous %i images" % (i))
subprocess.run(["python", join(python_script_path, "label_classify_image_folder.py"), "--dir", join(img_path, save_img_sug_folder, str(i)),
"--graph", join(retrain_model_path, "output_graph.pb"),
......@@ -108,4 +113,4 @@ print("Matlab instance closed")
# End timer. Print relevant time info.
end = datetime.datetime.now()
print("PROCEDURE TIME:\ninitial date: %s\nfinal date: %s\nelapsed time: %s" % (start.strftime("%Y-%m-%d %H:%M:%S"), end.strftime("%Y-%m-%d %H:%M:%S"), end - start))
\ No newline at end of file
print("PROCEDURE TIME:\ninitial date: %s\nfinal date: %s\nelapsed time: %s" % (start.strftime("%Y-%m-%d %H:%M:%S"), end.strftime("%Y-%m-%d %H:%M:%S"), end - start))
......@@ -119,8 +119,8 @@ def misclassified_images(txt_path, img_path):
decision = input() # Input 'y' if we want to change loc or 'n' if not
plt.close('all')
if decision == 'y': # If yes, we move the file to predicted location
print("File moved from: ", str(true_labels[i]), " to: ", str(predict_label[i]))
os.renames(join(img_path, str(true_labels[i]), imgs_names[i][0]), join(img_path, str(predict_label[i]), imgs_names[i][0]))
print("File moved from: ", str(true_labels[i]), " to: ", str(predict_label[i]))
elif decision == 'n': # If not, we leave the file where it is
print("File remains in folder: ", str(true_labels[i]))
elif decision == 'e': # If escape, we escape the procedure
......@@ -131,7 +131,7 @@ def misclassified_images(txt_path, img_path):
# Calling the function
txt_path = "/home/dpt/fernandez-ruiz/Desktop/Original_Misclassified.txt"
img_path = "/home/dpt/fernandez-ruiz/sim/sim_data/Sphere/log/training_set/"
img_path = "/home/dpt/fernandez-ruiz/sim/sim_data/ScatterImage_v4/Sphere299_v4/"
# Start timer
start = datetime.datetime.now()
......
......@@ -6,7 +6,7 @@
------------------------------------------------------
Author: FERNANDEZ RUIZ Luis
Modify: 06/05/2019
Last modified: 06/05/2019
File: MoveFiles.py
*************************************************************
......@@ -16,6 +16,8 @@ from os.path import join
import time
import csv
import datetime
import cv2
import shutil
from time import strftime
'''
......@@ -164,6 +166,7 @@ def obtain_parameter_val(file_name, parameter):
file_value = float(file_name[begin_value:end_value])
return file_value
'''
FUNCTION: from_child_to_parent
Objective: Move files from child dir to parent's
......@@ -180,12 +183,13 @@ def write_cnn_prediction(parent_dir):
# Open file and write header
out_f = open(out_file_name, "w")
results_file = csv.writer(out_f)
l = ["dist", "col", "wav", "radius", "predicted"]
l = ["dist", "col", "wav", "radius", "predicted", "mean_prediction", "bg"]
results_file.writerow(l)
start = time.time() # start a timer to measure the time
child_dir = os.listdir(parent_dir) # Get the folders in parent_dir
for child in child_dir:
if child != '0' and child != '1' and child != '2' and child != '3' and child != '4' and child != '5' and child != '6' and child != '7':
continue
if os.path.isdir(join(parent_dir, child)): # we only deal with directories
files = [f for f in os.listdir(join(parent_dir, child))] # search files in child directory
total_files = len(files)
......@@ -198,30 +202,106 @@ def write_cnn_prediction(parent_dir):
col = obtain_parameter_val(file, "col")
wav = obtain_parameter_val(file, "wav")
radius= obtain_parameter_val(file, "radius")
bg = obtain_parameter_val(file, "bg")
predicted = child
# write the information of the file in the csv
l = [dist, col, wav, radius, predicted]
l = [dist, col, wav, radius, predicted, 1, bg]
results_file.writerow(l)
count += 1
print(count, "/", total_files) # display the progress
end = time.time() # stop timer
print(end-start) # display time of execution
'''
FUNCTION: count_max_min_value_list
Objective: given a directory that contains subdirectories which inside have imgs with the following format:
"SphereEmpty Cell_col2_dist1.4_radius110_poly0_contrast6e-06_scale0.01_bg1_wav5_thickness0.1_beamcenter0.jpg"
it extracts all the possible values for each parameter, the number of unique values, the max and the min
Arguments:
input:
parent_dir (string): name of the directory which has subfolder with imgs inside
params (list of strings): name of the parameters we want to analyze. e.g. ["col", "dist", "radius", "poly", ...]
output:
void
'''
def count_max_min_value_list(parent_dir, params):
file_list = []
# Extract the name of all the images to evaluate
child_dir = [d for d in os.listdir(parent_dir) if os.path.isdir(join(parent_dir, d))]
for child in child_dir:
files = [f for f in os.listdir(join(parent_dir, child)) if os.path.isfile(join(parent_dir, child, f))]
for file in files:
file_list.append(file)
for param in params:
param_list = []
value_list = []
print("-----", param, "-----")
for file in file_list:
param_value = obtain_parameter_val(file, param)
param_list.append(param_value)
value_list = [v for v in set(param_list)]
value_list.sort()
print([v for v in value_list])
print("distinct values = ", len(value_list), "; min = ", min(value_list), "; max = ", max(value_list))
'''
FUNCTION: count_max_min_value_list
Objective: given a directory that contains subdirectories which inside have imgs, this script resize them into the format
specified in 'size'
Arguments:
input:
parent_dir (string): name of the directory which has subfolder with imgs inside
path_save (string): name of the parameters we want to analyze. e.g. ["col", "dist", "radius", "poly", ...]
output:
void
'''
def resize_imgs(parent_dir, path_save, size):
if os.path.isdir(path_save):
shutil.rmtree(path_save, ignore_errors=True)
os.mkdir(path_save)
for i in range(8):
os.mkdir(join(path_save, str(i)))
child_dir = [d for d in os.listdir(parent_dir) if os.path.isdir(join(parent_dir, d))]
for child in child_dir:
print(child)
if child != '0' and child != '1' and child != '2' and child != '3' and child != '4' and child != '5' and child != '6' and child != '7':
continue
print("------------------------", child, "------------------------")
files = [f for f in os.listdir(join(parent_dir, child)) if os.path.isfile(join(parent_dir, child, f))]
count = 0
for file in files:
if count % 100 == 0:
print("Progress: ", count, "/", len(files))
image_ori = cv2.imread(join(parent_dir, child, file))
image = cv2.resize(image_ori, size)
cv2.imwrite(join(path_save, child, file), image)
count += 1
# Call of functions
dirs = ["/home/dpt/fernandez-ruiz/Desktop/Test/mask/cluster_edges/"]
dirs = ["/home/dpt/fernandez-ruiz/Desktop/Test/ori/cluster_edge_resize/"]
params_list = ["col", "dist", "radius", "poly", "shell", "rhocore", "rhoshell", "rhomatrix", "scale", "bg"]
parent = "/home/dpt/fernandez-ruiz/sim/sim_data/CoreShellSphere/"
save = "/home/dpt/fernandez-ruiz/sim/sim_data/CoreShellSphere_resize/"
IMG_SIZE = (299, 299)
# Start timer
start = datetime.datetime.now()
resize_imgs(parent, save, IMG_SIZE)
# Iterate over the directories specified (it can be only one)
for dir in dirs:
print(dir)
from_child_to_parent(dir)
# split_by_parameter(dir, "beamcenter", -1, -50)
# count_max_min_value_list(dir, params_list)
# from_child_to_parent(dir)
# split_by_parameter(dir, "wav", 12, 12)
# write_cnn_prediction(dir)
# End timer. Print relevant time info.
end = datetime.datetime.now()
print("PROCEDURE TIME:\ninitial date: %s\nfinal date: %s\nelapsed time: %s" % (
......
......@@ -9,7 +9,7 @@
------------------------------------------------------
Author: FERNANDEZ RUIZ Luis
Modify: 06/05/2019
Last modified: 06/05/2019
File: plot_results.py
*************************************************************
......@@ -41,7 +41,7 @@ def plot_lin_model(df):
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--file_name", type=str, help="file to be processed")
parser.add_argument("--file_name", type=str, help="file to be processed (results.csv file. See gitlab)")
parser.add_argument('--jitter', type=float, default=0.01, help="noise apply to each point that allow to see points with the same x and y")
parser.add_argument('--multiple_graphs', type=str, default="False")
parser.add_argument('--num_bins', type=int, default=5, help="Number of regression lines to paint")
......@@ -65,7 +65,7 @@ if __name__ == "__main__":
radius_min = dataframe.radius.min()
radius_max = dataframe.radius.max()
target_names = ["0_bad guinier", "1_good guinier", "2_one ring", "3_two rings", "4_three rings good",
"5_three rings bad", "6_useless"]
"5_three rings bad", "6_background", "7_useless"]
if multiple_graphs=="True":
for j in range(len(target_names)):
......@@ -96,7 +96,7 @@ if __name__ == "__main__":
sizes = bins
labels = bins
colors = plt.cm.get_cmap('terrain',6)
colors = plt.cm.get_cmap('terrain', 7)
for k in range(data.predicted.nunique()):
data_interval = data.loc[data['predicted'] == k]
fit = np.polyfit(data_interval.dist, data_interval.wav, 1)
......@@ -111,7 +111,7 @@ if __name__ == "__main__":
formatter = plt.FuncFormatter(lambda val, loc: target_names[val])
# We must be sure to specify the ticks matching our target names
plt.colorbar(ticks=[0, 1, 2, 3, 4, 5, 6], format=formatter)
plt.colorbar(ticks=[0, 1, 2, 3, 4, 5, 6, 7], format=formatter)
plt.legend(loc='upper center', bbox_to_anchor=(0.5, -0.05), shadow=True, ncol=num_bins)
plt.show()
......
......@@ -31,8 +31,6 @@ Arguments:
output:
file_value (float): value of the parameter
'''
def obtain_parameter_val(file_name, parameter):
# we search in 'file_name' the position where begins the value of the parameter we are looking for
begin_value = file_name.find(parameter) + len(parameter)
......@@ -64,23 +62,33 @@ Arguments:
'''
def order_priority(searched_categ):
if searched_categ == 1:
prior_array = [1, 2, 3, 0, 4, 5, 6]
prior_array = [1, 2, 3, 0, 4, 5, 6, 7]
elif searched_categ == 2:
prior_array = [2, 3, 1, 4, 5, 0, 6]
prior_array = [2, 3, 1, 4, 5, 0, 6, 7]
elif searched_categ == 3:
prior_array = [3, 2, 1, 4, 5, 0, 6]
prior_array = [3, 2, 1, 4, 5, 0, 6, 7]
elif searched_categ == 4:
prior_array = [4, 3, 2, 5, 1, 6, 0]
prior_array = [4, 3, 2, 5, 1, 6, 0, 7]
elif searched_categ == 5:
prior_array = [5, 4, 6, 3, 0, 2, 1]
prior_array = [5, 4, 6, 7, 3, 0, 2, 1]
elif searched_categ == 6:
prior_array = [6, 5, 0, 4, 3, 2, 1]
prior_array = [6, 7, 5, 0, 4, 3, 2, 1]
elif searched_categ == 0:
prior_array = [0, 1, 6, 5, 4, 3, 2]
prior_array = [0, 1, 6, 7, 5, 4, 3, 2]
return prior_array
# The correlation between the features
'''
FUNCTION: corr_heatmap
Objective: draw a heatmap to see the correlation between variables
Arguments:
input:
X: dataframe with variables we want to know the correlation
output:
void
'''
def corr_heatmap(X):
# X['Target'] = Y
# X.drop(['radius'], axis=1, inplace=True)
......@@ -91,8 +99,18 @@ def corr_heatmap(X):
plt.show()
# Load train and test data into pandas DataFrames
# Combine train and test data to process them together
'''
FUNCTION: get_data
Objective: given a dataframe, divide into test and train test. After, extract the dependant variable we want to predict
in each one of them. Finally, combine test and train
Arguments:
input:
X: dataframe with variables we want to know the correlation
output:
void
'''
def get_data(dataframe):
# get train data
dataframe.drop(["col", "mean_prediction"], axis=1, inplace=True)
......@@ -112,10 +130,20 @@ def get_data(dataframe):
return combined, target_train, target_test
# One Hot Encode The Categorical Features :
'''
FUNCTION: oneHotEncode
Objective: given a list of categorical variables (colNames) from a dataframe (dataframe), convert them into dummy
Arguments:
input:
dataframe: df which we want to transform
colNames: name of columns we want to transform into categorical
output:
dataframe: initial df but transformed
'''
def oneHotEncode(dataframe, colNames):
for col in colNames:
# if (dataframe[col].dtype == np.dtype('object')):
dummies = pd.get_dummies(dataframe[col], prefix=col)
dataframe = pd.concat([dataframe, dummies], axis=1)
......@@ -124,7 +152,18 @@ def oneHotEncode(dataframe, colNames):
return dataframe
# Now, split back combined dataFrame to training data and test data
'''
FUNCTION: split_combined
Objective: split combined set (see function get_data) into test and train
Arguments:
input:
length_train (integer): length of the training set. test length is going to be (length_whole_dataset - length_train)
output:
train: dataframe
test: dataframe
'''
def split_combined(length_train):
global combined
train = combined[:length_train]
......@@ -132,12 +171,28 @@ def split_combined(length_train):
return train, test
'''
FUNCTION: make_submission
Objective: make a file with the prediction and the real radius
'''
def make_submission(df_test, prediction, sub_name):
my_submission = pd.DataFrame({'Id': df_test.Id, 'predict_radius': prediction, 'real_rad': df_test.radius, 'error': prediction - df_test.radius})
my_submission.to_csv('{}.csv'.format(sub_name), index=False)
print('A submission file has been made')
'''
FUNCTION: plot_error_hist
Objective: it plots the histogram with the mean of stdev of errors
Arguments:
input:
error_array (array): array of errors (real radius - predicted radius)
prediction (int): category of image we are plotting
extension
output:
void
'''
def plot_error_hist(error_array, prediction = -1):
# First we calculate mean and standard deviation of the list
error_mean = np.mean(error_array)
......@@ -161,31 +216,36 @@ def plot_error_hist(error_array, prediction = -1):
# Load train and test data into pandas DataFrames
root = "/home/dpt/fernandez-ruiz/sim/sim_data/Sphere/log_image/20190531/"
suggest_path = "/home/dpt/fernandez-ruiz/sim/sim_data/Sphere/log_image/20190531/suggest3.csv"
tmp_folder = "/home/dpt/fernandez-ruiz/sim/sim_data/Sphere/log_image/20190531/tmp"
root = "/home/dpt/fernandez-ruiz/sim/sim_data/Sphere/log_image/20190531_resize"
result_path = "results_modif.csv"
suggest_path = "suggest_deep.csv"
tmp_folder = "tmp_deep"
matlab_path = "/users/fernandez-ruiz/scatteringimage/matlab/"
python_script_path = "/users/fernandez-ruiz/scatteringimage/matlab/python"
retrain_model_path = "/home/dpt/fernandez-ruiz/TF_Results/" # "output_labels.py", "output_graph.pb" should be in this folder. (files from retrain.py)
retrain_model_path = "/home/dpt/fernandez-ruiz/TF_Results/sphere_7_improved/" # "output_labels.py", "output_graph.pb" should be in this folder. (files from retrain.py)
categ_search = 1
oneHotEncoder = "True"
oneHotEncoder = "False"
plot_result = "True"
duration = "short"
radius_min = 20
radius_max = 800
radius_max = 780
num_cluster = 8
# Start timer
start = datetime.datetime.now()
# Open the file where the results are written (comes from label_image.py)
df = pd.read_csv(join(root, "results.csv"))
df = pd.read_csv(join(root, result_path))
df= df[((df["radius"] > radius_min) & (df["radius"] < radius_max))]
# Open csv in which we are going to write the SUGGESTIONS of the CNN
if os.path.exists(suggest_path):
os.remove(suggest_path)
out_f = open(suggest_path, "w")
if os.path.exists(join(root, suggest_path)):
os.remove(join(root, suggest_path))
out_f = open(join(root, suggest_path), "w")
suggest_file = csv.writer(out_f)
# Write the header
l = ["dist", "col", "wav", "radius", "former CNN prediction", "radius error"]
l = ["dist", "col", "wav", "radius", "former CNN prediction", "radius error", "bg"]
suggest_file.writerow(l)
# Open a MATLAB instance
......@@ -194,11 +254,10 @@ if duration == "long":
eng.addpath(matlab_path, '-end')
# We measure the time