diff --git a/DESCRIPTION b/DESCRIPTION new file mode 100644 index 0000000..6555d70 --- /dev/null +++ b/DESCRIPTION @@ -0,0 +1,18 @@ +Package: SomaMS +Type: Package +Title: 3DSomaMS: A univocal definition of the neuronal soma morphology + using Gaussian mixture models +Version: 1.1 +Date: 2015-03-30 +Author: Sergio Luengo-Sanchez +Maintainer: Sergio Luengo_Sanchez +Description: The definition of the soma is fuzzy, as there is no clear line demarcating the soma of the labeled neurons and the origin of the dendrites and axon. Thus, the morphometric analysis of the neuronal soma is highly subjective. In this paper, we provide a mathematical definition and an automatic segmentation method to delimit the neuronal soma. We applied this method to the characterization of pyramidal cells, which are the most abundant neurons in the cerebral cortex. Since there are no benchmarks with which to compare the proposed procedure, we validated the goodness of this automatic segmentation method against manual segmentation by experts in neuroanatomy to set up a framework for comparison. We concluded that there were no significant differences between automatically and manually segmented somata, i.e., the proposed procedure segments the neurons more or less as an expert does. It also provides univocal, justifiable and objective cutoffs. Thus, this study is a means of characterizing pyramidal neurons in order to objectively compare the morphometry of the somata of these neurons in different cortical areas and species. +License: GPL (>= 2) +Depends: R (>= 3.0.0), mclust, Morpho, Rcpp, rgl, Rvcg, tools, + matrixStats, pracma, geometry, misc3d, bnlearn, caret +Suggests: doSNOW, foreach, gplots, parallel +LinkingTo: Rcpp +LazyLoad: yes +Archs: x64 +NeedsCompilation: yes +Packaged: 2015-10-26 17:18:50 UTC; luis diff --git a/NAMESPACE b/NAMESPACE new file mode 100644 index 0000000..0845b3b --- /dev/null +++ b/NAMESPACE @@ -0,0 +1,20 @@ +# Generated by roxygen2 (4.1.1): do not edit by hand + +export(BN_clustering) +export(BN_structure_learning) +export(MAQ_S) +export(RMSE_computation) +export(characterize.soma3d) +export(compute_meshes_volumes) +export(convert_somas_to_PLY) +export(read_VRML) +export(read_binary_PLY) +export(repair.soma3d) +export(repair_soma) +export(segment.soma3d) +export(segment_somas) +export(simulate_new_somas) +export(simulation_3D_mesh) +export(soma_caracterization) +export(wilcoxon_RMSE) +useDynLib(SomaMS) diff --git a/R/0_convert_somas_to_PLY.R b/R/0_convert_somas_to_PLY.R new file mode 100644 index 0000000..39e82bb --- /dev/null +++ b/R/0_convert_somas_to_PLY.R @@ -0,0 +1,84 @@ +#' Read VRMLs of somas +#' +#' Read all the VRMLs of somas in a folder and export them to PLY format in other folder +#' +#' @param read_directory path to the folder where VRML files are placed +#' @param write_directory path to the folder where PLY files will be saved +#' +#' @return None +#' +#' @examples +#' convert_somas_to_PLY(read_directory = system.file("test/VRMLs",package="SomaMS"), write_directory = file.path(tempdir(),"PLYs"), parallel = TRUE) +#' +#' @export +convert_somas_to_PLY <- function(read_directory, write_directory, parallel = TRUE) { + if (!file.exists(file.path(read_directory))){ + stop("The reading directory does not exist") + } + + if (!file.exists(file.path(write_directory))){ + if (write_directory == "") { + stop("The writing directory is not defined") + }else{ + warning("The writing directory does not exist, it was created automatically") + dir.create(write_directory, showWarning = TRUE, recursive = TRUE) + } + } + + VRML_files <- list.files(file.path(read_directory))[which(file_ext(list.files(read_directory)) == "vrml")] + + if (length(VRML_files)==0){ + warning('There are not VRMLs in the reading directory') + } + + if(!parallel) + { + ncores <- 1 + cl <- NULL + }else{ + ncores <- detectCores() + if(is.na(ncores)){ + ncores <- 1 + cl <- NULL + }else{ + ncores <- ncores - 1 + cl <- makeCluster(ncores, type = "SOCK") + registerDoSNOW(cl) + } + } + + if(!parallel){ + foreach (file = VRML_files) %do% { + # Number 1 denotes that it must be choosen that mesh with more vertices. It is useful if there are more + # than one mesh save in the same file. p.e: 1 mesh with soma+dendrites and 1 mesh with the soma + # segmented by an expert. + soma <- read_VRML(file.path(read_directory, file)) + + soma_name <- file_path_sans_ext(file) + soma_name <- gsub(" ", "_", soma_name) + + vcgPlyWrite(tmesh3d(rbind(t(soma$vertices), 1), t(soma$faces)), file.path(write_directory, soma_name)) + } + }else{ + foreach (file = VRML_files, .packages=c("Rvcg","Morpho","tools", "rgl"), .export = "read_VRML") %dopar% { + # Number 1 denotes that it must be choosen that mesh with more vertices. It is useful if there are more + # than one mesh save in the same file. p.e: 1 mesh with soma+dendrites and 1 mesh with the soma + # segmented by an expert. + soma <- read_VRML(file.path(read_directory, file)) + + soma_name <- file_path_sans_ext(file) + soma_name <- gsub(" ", "_", soma_name) + + vcgPlyWrite(tmesh3d(rbind(t(soma$vertices), 1), t(soma$faces)), file.path(write_directory, soma_name)) + } + } + + if(parallel) + { + stopCluster(cl) + } + + return(0) +} + + diff --git a/R/1_repair_soma.R b/R/1_repair_soma.R new file mode 100644 index 0000000..71e5642 --- /dev/null +++ b/R/1_repair_soma.R @@ -0,0 +1,176 @@ +adjust_faces <- function(list_vertices, actual_faces) { + # Vertices of the faces. + dendritic_faces <- sort(unique(c(actual_faces))) + # The list of vertices that were removed and still are defined in the faces matrix are obtained. + vertices_deleted <- sort(setdiff(dendritic_faces, list_vertices)) + + # Those faces that have a vertex that were deleted are removed. + new_faces <- actual_faces + new_faces <- new_faces[which(!(new_faces[, 1] %in% vertices_deleted)), ] + new_faces <- new_faces[which(!(new_faces[, 2] %in% vertices_deleted)), ] + new_faces <- c(new_faces[which(!(new_faces[, 3] %in% vertices_deleted)), ]) + + # Isolated vertices, that is, those that do not belong to any face, are removed. + verticesIsolated <- which(!(list_vertices %in% unique(c(new_faces)))) + if (length(verticesIsolated) > 0) { + list_vertices <- list_vertices[-c(verticesIsolated)] + } + + + # Gaps in the numeration are removed reordering the index + new_faces <- as.numeric(factor(new_faces)) + new_faces <- matrix(new_faces, ncol = 3) + + # Return vertices and faces of the mesh. + return(list(vertices = list_vertices, faces = new_faces)) +} + +#' Repair soma morphology +#' +#' Repair the surface of the neuron to remove holes and vertices inside the mesh +#' +#' @param directory path to the folder where PLY files representing damaged soma mesh +#' @param output_ambient_occlusion path to the folder where somas with ambient occlusion are saved +#' @param broken_mesh path to the folder where somas were saved after the GMM threshold was imposed and vertices inside the soma or forming cavities were removed +#' @param output_poisson_reconstruction path to the folder where somas were saved after the mesh closing process +#' +#' @return None +#' +#' @examples +#' repair_soma(directory = file.path(tempdir(),"PLYs"), output_ambient_occlusion = file.path(tempdir(),"ambient_occlusion"), broken_mesh = file.path(tempdir(),"broken_mesh_ao"), output_poisson_reconstruction = file.path(tempdir(),"poisson_reconstruction_ao")) +#' +#' @export +repair_soma <- function(directory, output_ambient_occlusion, broken_mesh, output_poisson_reconstruction) { + if (!file.exists(file.path(directory))){ + stop("The reading directory does not exist") + } + + if (!file.exists(file.path(output_ambient_occlusion))){ + if (output_ambient_occlusion == "") { + stop("The ambient occlusion directory is not defined") + }else{ + warning("The ambient occlusion directory does not exist, it was created automatically") + dir.create(output_ambient_occlusion,showWarning = TRUE, recursive = TRUE) + } + } + + if (!file.exists(file.path(broken_mesh))){ + if (broken_mesh == "") { + stop("The broken mesh directory is not defined") + }else{ + warning("The broken mesh directory does not exist, it was created automatically") + dir.create(broken_mesh,showWarning=TRUE, recursive = TRUE) + } + } + + if (!file.exists(file.path(output_poisson_reconstruction))){ + if (output_poisson_reconstruction == "") { + stop("The poisson reconstruction directory is not defined") + }else{ + warning("The poisson reconstruction directory does not exist, it was created automatically") + dir.create(output_poisson_reconstruction, showWarning = TRUE, recursive = TRUE) + } + } + + PLY_files <- list.files(directory)[which(file_ext(list.files(directory)) == "ply")] + + for (file in PLY_files) { + # Extract soma name from the file. + soma_name <- file_path_sans_ext(file) + + # Execute ambient occlusion script. + output <- execute_meshlab_script(input = file.path(directory, file), output = file.path(output_ambient_occlusion,soma_name), meshlab_script="ambient_occlusion") + + # Read the .ply file generated by the script and extract vertices, faces and ambient occlusion values. + soma <- read_binary_PLY(output) + + # Clustering to differentiate surface points from interior points. + quality_classification <- Mclust(soma$quality, G = 2) + surface_vertices <- which(quality_classification$classification == 2) + reparation <- adjust_faces(surface_vertices, as.matrix(soma$faces)) + adjusted_vertex <- soma$vertices[reparation$vertices, ] + + + # Export results to a ply file + vcgPlyWrite(tmesh3d(rbind(t(soma$vertices[reparation$vertices, ]), 1), t(reparation$faces)), file.path(broken_mesh,soma_name), addNormals=T) + + remove(adjusted_vertex) + remove(reparation) + + output <- execute_meshlab_script(input = paste0(file.path(broken_mesh,soma_name),".ply"), output = paste0(file.path(output_poisson_reconstruction,soma_name),".ply"), meshlab_script="poisson_reconstruction") + } + return(0) +} + +#' Repair single soma from data +#' +#' Repair the surface of the soma to remove holes and vertices inside the mesh +#' +#' @param soma A filepath or the structure returned by read_VRML or read_PLY representing damaged soma mesh @seealso \code{read_VRML} +#' @param ao.path folder where intermediate files with ambient occlusion data are saved +#' @param broken.path Folder where intermediate files with broken somas after GMM threshold are saved +#' @param repaired.path Folder where final file is stored +#' @param name Name of the final file +#' @param returnSoma +#' +#' @return Repaired soma (if returnSoma is True) +#' +#' @useDynLib SomaMS +#' +#' @export +repair.soma3d <- function( soma, ao.path = tempdir(), broken.path = tempdir(), repaired.path = tempdir(), name="soma", returnSoma = F ){ + + ### + # Validate inputs + ### + + # Validate repaired + stopifnot( length(name)>0 ) + + # Validate tempdirs + stopifnot( utils.validateDir(ao.path, pre="Ambient Occlusion", createDir = T) ) + stopifnot( utils.validateDir(broken.path, pre="Broken Mesh", createDir = T) ) + stopifnot( utils.validateDir(repaired.path, pre="Repaired Meshes", createDir = T) ) + + ###### + + # If soma is already a ply path keeps it, if its a vrml read and validates and if is a list validates. + soma.path <- utils.somaToPLY(soma, paste("original",name,sep="_"), moveToLocation = F) + stopifnot(!is.null(soma.path)) + + # Call meshlab + soma.ao.path <- file.path(ao.path,paste0("ao_",name,".ply")) + ret <- execute_meshlab_script(input = soma.path, output = soma.ao.path, meshlab_script="ambient_occlusion") + if( ret != 0 ) + stop(sprintf("Error executing poisson reconstruction. Error code: %d - out: %s - in %s",ret,soma.ao.path,soma.path)) + + # Read the .ply file generated by the script and extract vertices, faces and ambient occlusion values. + soma.ao <- read_binary_PLY(soma.ao.path) + + # Clustering to differentiate surface points from interior points. + quality_classification <- Mclust(soma.ao$quality, G = 2) + surface_vertices <- which(quality_classification$classification == 2) + reparation <- adjust_faces(surface_vertices, as.matrix(soma.ao$faces)) + adjusted_vertex <- soma.ao$vertices[reparation$vertices, ] + + # Export results to a ply file + soma.broken.path <-file.path(broken.path,paste0("broken_",name)) + if( vcgPlyWrite(tmesh3d(rbind(t(soma.ao$vertices[reparation$vertices, ]), 1), t(reparation$faces)), soma.broken.path, addNormals=T) != 0) + stop("Error while writing PLY broken soma file") + ## update name name (.ply is added by vcgPlyWrite) + soma.broken.path <- paste0(soma.broken.path,".ply") + + + # Clean up a little + remove(adjusted_vertex) + remove(reparation) + + # repaired file + soma.repaired.path <- file.path(repaired.path,paste0("repaired_",name,".ply")) + ret <- execute_meshlab_script(input = soma.broken.path, output = soma.repaired.path, meshlab_script="poisson_reconstruction") + if( ret != 0) + stop(sprintf("Error executing poisson reconstruction. Error code: %d",ret)) + + # If returnSoma read and return it + return( ifelse(returnSoma, read_binary_PLY(soma.repaired.path), soma.repaired.path) ) +} \ No newline at end of file diff --git a/R/2_extract_soma.R b/R/2_extract_soma.R new file mode 100644 index 0000000..d8e7d58 --- /dev/null +++ b/R/2_extract_soma.R @@ -0,0 +1,169 @@ +#' Segment somas +#' +#' Apply Gaussian mixture models to segment soma from dendrites +#' +#' @param directory path to the folder where PLY files of the somas are saved +#' @param output_shape_diameter path to the folder where somas with shape diameter will be saved +#' @param broken_mesh path to the folder where somas will be saved after that the GMM threshold removes the dendrites +#' @param output_poisson_reconstruction path to the folder where somas will be saved after the mesh closing process +#' @param final_result path to the folder where final segmented somas will be saved after the isolated pieces are removed +#' @return None +#' +#' @examples +#' segment_somas(directory = file.path(tempdir(), "poisson_reconstruction_ao"), output_shape_diameter = file.path(tempdir(), "sdf"), broken_mesh = file.path(tempdir(), "broken_mesh_sdf"), output_poisson_reconstruction = file.path(tempdir(), "poisson_reconstruction_sdf"), final_result = file.path(tempdir(), "final_result")) +#' +#' @export +segment_somas <- function(directory, output_shape_diameter, broken_mesh, output_poisson_reconstruction, final_result) { + if (!file.exists(file.path(directory))){ + stop("The reading directory does not exist") + } + + if (!file.exists(file.path(output_shape_diameter))){ + if (output_shape_diameter == "") { + stop("The shape diameter funcion directory is not defined") + }else{ + warning("The shape diameter function directory does not exist, it was created automatically") + dir.create(output_shape_diameter, showWarning = TRUE, recursive = TRUE) + } + } + + if (!file.exists(file.path(broken_mesh))){ + if (broken_mesh == "") { + stop("The broken mesh directory is not defined") + }else{ + warning("The broken mesh directory does not exist, it was created automatically") + dir.create(broken_mesh, showWarning = TRUE, recursive = TRUE) + } + } + + if (!file.exists(file.path(output_poisson_reconstruction))){ + if (output_poisson_reconstruction == "") { + stop("The poisson reconstruction directory is not defined") + }else{ + warning("The poisson reconstruction directory does not exist, it was created automatically") + dir.create(output_poisson_reconstruction, showWarning = TRUE, recursive = TRUE) + } + } + + if (!file.exists(file.path(final_result))){ + if (final_result == "") { + stop("The final result directory is not defined") + }else{ + warning("The final result directory does not exist, it was created automatically") + dir.create(final_result, showWarning = TRUE, recursive = TRUE) + } + } + + PLY_files <- list.files(directory)[which(file_ext(list.files(directory)) == "ply")] + + for (file in PLY_files) { + soma_name <- file_path_sans_ext(file) + + output <- execute_meshlab_script(input = file.path(directory, file), output = file.path(output_shape_diameter, soma_name), meshlab_script = "shape_diameter") + + soma <- read_binary_PLY(output) + + #First GMM + quality_classification <- Mclust(soma$quality, G = 2) + soma_class <- which.min(c(mean(c(apply(soma$vertices[quality_classification$classification == 1, ], 2, sd))), mean(c(apply(soma$vertices[quality_classification$classification == 2, ], 2, sd))))) + + quality_sec_gauss <- soma$quality [quality_classification$classification == soma_class] + vertices_sec_gauss <- soma$vertices[quality_classification$classification == soma_class, ] + + #Second GMM + quality_classification <- Mclust(quality_sec_gauss, G = 2) + write_PLY_only_points(vertices_sec_gauss[quality_classification$classification == which.max(table(quality_classification$classification)), ], paste0(file.path(broken_mesh, soma_name), ".ply")) + + execute_meshlab_script(paste0(file.path(broken_mesh,soma_name), ".ply"), paste0(file.path(output_poisson_reconstruction, soma_name), ".ply"), "poisson_reconstruction_with_normals") + + poisson_output_mesh <- vcgPlyRead(paste0(file.path(output_poisson_reconstruction, soma_name), ".ply")) + + #Poisson reconstruction sometimes generates isolated mesh pieces that must be removed + cleaned_mesh <- vcgIsolated(poisson_output_mesh, silent=TRUE) + + vcgPlyWrite(cleaned_mesh, file.path(final_result, soma_name)) + } +} + + +#' Extract single soma from data +#' +#' Apply Gaussian mixture models to segment soma from dendrites +#' +#' @param soma A filepath or the structure returned by read_VRML or read_PLY representing damaged soma mesh @seealso \code{read_VRML} +#' @param sdf.path folder where intermediate files with shape diameter function data are saved +#' @param broken_sdf.path Folder where intermediate files with broken somas after GMM threshold are saved +#' @param reconstruction.path Folder where reconstructed file is stored +#' @param name Name of the soma file +#' +#' @return Extracted soma +#' +#' @useDynLib SomaMS +#' +#' @export +segment.soma3d <- function(soma, sdf.path = tempdir(), broken_sdf.path = tempdir(), + reconstruction.path = tempdir(), name="soma"){ + + ### + # INPUT SANITIZATION + ### + # Validate repaired + stopifnot( length(name)>0 ) + + # Validate tempdirs + stopifnot( utils.validateDir(sdf.path, pre="Shape Diameter Function measures", createDir = T) ) + stopifnot( utils.validateDir(broken_sdf.path, pre="Broken SDF Meshes", createDir = T) ) + stopifnot( utils.validateDir(reconstruction.path, pre="Reconstructed Meshes", createDir = T) ) + + # If soma is already a ply path keeps it, if its a vrml read and validates and if is a list validates. + soma.path <- utils.somaToPLY(soma, paste("original",name,sep="_"), moveToLocation = F) + stopifnot(!is.null(soma.path)) + + #### + + ### + # CALL SDF + ### + soma.sdf.path <- file.path(sdf.path,paste0("sdf_",name,".ply")) + if( execute_meshlab_script(input = soma.path, + output = soma.sdf.path, + meshlab_script = "shape_diameter") != 0) + stop("Failure while executing SDF meshlab script") + # Read soma.sdf + soma.sdf <- read_binary_PLY(soma.sdf.path) + + #First GMM + quality_classification <- Mclust(soma.sdf$quality, G = 2) + soma_class <- which.min(c(mean(c(apply(soma.sdf$vertices[quality_classification$classification == 1, ], 2, sd))), + mean(c(apply(soma.sdf$vertices[quality_classification$classification == 2, ], 2, sd))))) + + quality_sec_gauss <- soma.sdf$quality [quality_classification$classification == soma_class] + vertices_sec_gauss <- soma.sdf$vertices[quality_classification$classification == soma_class, ] + + #Second GMM + quality_classification <- Mclust(quality_sec_gauss, G = 2) + soma.broken_sdf.path <- file.path(broken_sdf.path,paste0("brokensdf_",name)) + + write_PLY_only_points( + vertices_sec_gauss[quality_classification$classification == which.max(table(quality_classification$classification)), ], + soma.broken_sdf.path) + soma.broken_sdf.path <- paste0(soma.broken_sdf.path,".ply") + + ### + # CALL POISSON RECONSTRUCTION + ### + soma.reconstruction.path <- file.path(broken_sdf.path,paste0("reconstructed_",name,".ply")) + if( execute_meshlab_script(input = soma.broken_sdf.path, + output = soma.reconstruction.path, + meshlab_script = "poisson_reconstruction_with_normals") != 0 ) + stop("Failure while executing SDF meshlab script") + + #Poisson reconstruction sometimes generates isolated mesh pieces that must be removed + poisson_output_mesh <- vcgPlyRead(soma.reconstruction.path) + poisson_output_mesh <- vcgIsolated(poisson_output_mesh, silent=TRUE) + + # Transform to soma3d + return(list( vertices= t(poisson_output_mesh$vb[1:3,]), faces = t(poisson_output_mesh$it), + normals = t(poisson_output_mesh$normals[1:3,]) )) +} + diff --git a/R/3_validation.R b/R/3_validation.R new file mode 100644 index 0000000..50b279e --- /dev/null +++ b/R/3_validation.R @@ -0,0 +1,246 @@ +#' Compute RMSE +#' +#' Compute RMSE between three different techniques to compare processing methods +#' +#' @param technique_1 path to the folder where meshes processed with the first technique are placed. They have to be PLY files +#' @param technique_2 path to the folder where meshes processed with the second technique are placed. They have to be PLY files +#' @param technique_3 path to the folder where meshes processed with the third technique are placed. They have to be PLY files +#' +#' @return RMSE a matrix Nx3 where N is the number of meshes and first column is RMSE between technique 1 and technique 2, second column is RMSE between technique 1 and technique 3 and third column is RMSE between technique 2 and technique 3 +#' +#' @examples +#' ###################################################### +#' ###################### Interexpert ################### +#' ###################################################### +#' #RMSE between the somas of the algorithm and the somas of the experts before repairing the experts' somas +#' path_somas_algorithm<- "./temp/final_result" +#' path_somas_experts<- system.file("test/pre_repaired",package="SomaMS") +#' experts_paths<-list.dirs(path_somas_experts,recursive=F) +#' pre_repaired_RMSE<-RMSE_mesh_distance(path_somas_algorithm, experts_paths[1], experts_paths[2], TRUE) +#' +#' #RMSE between the somas of the algorithm and the somas of the experts after repairing the experts' somas +#' path_somas_algorithm<- "./temp/final_result" +#' path_somas_experts<- system.file("test/post_repaired",package="SomaMS") +#' experts_paths<-list.dirs(path_somas_experts,recursive=F) +#' post_repaired_RMSE<-RMSE_mesh_distance(path_somas_algorithm, experts_paths[1], experts_paths[2], TRUE) +#' +#' X11(width = 18, height = 10.37) +#' par(mfrow=c(1,2)) +#' values_barplot <- t(pre_repaired_RMSE) +#' colors <- c(rainbow(3)) +#' mp <- barplot2(values_barplot, main = "RMSE before repairing experts' somas", ylab = "RMSE", beside = TRUE, +#' col = colors, ylim = c(0, 1.7), cex.names = 1.5, cex.lab = 1.5, cex.axis = 1.5) +#' legend("top", legend = c("Procedure Vs Expert 1", "Procedure Vs Expert 2", "Expert 1 Vs Expert 2"), fill = colors, box.col = "transparent", x.intersp = 0.8, cex = 1.5) + +#' mtext(1, at = mp[2,], text = c("Neuron 1", "Neuron 2", "Neuron 3", "Neuron 4", "Neuron 5", "Neuron 6", "Neuron 7", "Neuron 8", "Neuron 9"), line = 0.5, cex = 1) +#' legend("topleft", legend = "", title = "A", box.col = "transparent", cex = 2) + +#' values_barplot <- t(post_repaired_RMSE) +#' colors <- c(rainbow(3)) +#' mp <- barplot2(values_barplot, main = "RMSE after repairing experts' somas", ylab = "RMSE", beside = TRUE, +#' col = colors, ylim = c(0, 1.7), cex.names = 1.5, cex.lab = 1.5, cex.axis = 1.5) +#' legend("top", legend = c("Procedure Vs Expert 1", "Procedure Vs Expert 2", "Expert 1 Vs Expert 2"), fill = colors, box.col = "transparent", x.intersp = 0.8, cex = 1.5) + +#' mtext(1, at = mp[2,], text = c("Neuron 1", "Neuron 2", "Neuron 3", "Neuron 4", "Neuron 5", "Neuron 6", "Neuron 7", "Neuron 8", "Neuron 9"), line = 0.5, cex = 1) +#' legend("topleft", legend = "", title = "B", box.col = "transparent", cex = 2) +#' +#' print(paste0("Mean interexpert RMSE: ", mean(post_repaired_RMSE[,3]))) +#' ###################################################### +#' ###################### Intraexpert ################### +#' ###################################################### +#' path_somas_experts<- system.file("test/intraexpert",package="SomaMS") +#' experts_paths<-list.dirs(path_somas_experts,recursive=F) +#' +#' expert_1_days<-list.dirs(experts_paths[1],recursive=F) +#' expert_2_days<-list.dirs(experts_paths[2],recursive=F) +#' +#' intraexpert_expert_1 <- RMSE_mesh_distance(expert_1_days[1], expert_1_days[2], expert_1_days[3], TRUE) +#' intraexpert_expert_2 <- RMSE_mesh_distance(expert_2_days[1], expert_2_days[2], expert_2_days[3], TRUE) +#' +#' X11(width = 18, height = 10.37) +#' par(mfrow = c(1,2)) +#' +#' valuesBarplot <- t(intraexpert_expert_1) +#' colors <- c(rainbow(3)) +#' mp <- barplot2(valuesBarplot, main="Intra-expert variability of the first expert ", ylab = "RMSE", beside = TRUE, +#' col = colors, ylim = c(0,1.6), cex.names = 1.5,cex.lab = 1.5, cex.axis = 1.5) +#' legend("top",legend = c("Day 1 Vs Day 2","Day 1 Vs Day 3","Day 2 Vs Day 3"), fill = colors, box.col = "transparent", x.intersp = 0.8, cex = 1.5) +#' +#' mtext(1, at = mp[2,], text = c("Neuron 1","Neuron 2","Neuron 3","Neuron 4","Neuron 5","Neuron 6"),line = 0.5, cex = 1.3) +#' legend("topleft",legend="",title="A",box.col = "transparent",cex=2) +#' +#' +#' values_barplot<-t(intraexpert_expert_2) +#' colors<-c(rainbow(3)) +#' mp <- barplot2(values_barplot, main = "Intra-expert variability of the second expert ", ylab = "RMSE", beside = TRUE, +#' col = colors, ylim = c(0, 1.6), cex.names = 1.5, cex.lab = 1.5, cex.axis = 1.5) +#' legend("top",legend = c("Day 1 Vs Day 2","Day 1 Vs Day 3","Day 2 Vs Day 3"), fill = colors, box.col = "transparent", x.intersp = 0.8, cex = 1.5) +#' +#' mtext(1, at = mp[2,], text = c("Neuron 1", "Neuron 2", "Neuron 3", "Neuron 4", "Neuron 5", "Neuron 6"), line = 0.5, cex = 1.3) +#' legend("topleft", legend = "", title = "B", box.col = "transparent", cex = 2) +#' +#' print(paste0("Mean RMSE for the first expert: ", mean(c(intraexpert_expert_1)))) +#' print(paste0("Mean RMSE for the second expert: ", mean(c(intraexpert_expert_2)))) +#' +#' @export +RMSE_computation <- function(path_somas_algorithm, path_somas_expert_1, path_somas_expert_2, parallel = TRUE) +{ + if(!parallel) + { + ncores <- 1 + cl <- NULL + }else{ + ncores <- detectCores() + if(is.na(ncores)){ + ncores <- 1 + cl <- NULL + }else{ + ncores <- ncores - 1 + cl <- makeCluster(ncores, type = "SOCK") + registerDoSNOW(cl) + } + } + + RMSE <- RMSE_between_meshes(path_somas_algorithm, path_somas_expert_1, cl) + RMSE <- cbind(RMSE, RMSE_between_meshes(path_somas_algorithm, path_somas_expert_2, cl)) + RMSE <- cbind(RMSE, RMSE_between_meshes(path_somas_expert_1, path_somas_expert_2, cl)) + + if(ncores > 1) + { + stopCluster(cl) + } + + return(RMSE) +} + + +RMSE_between_meshes <- function(path_somas_algorithm, path_somas_expert, cl = NULL) +{ + list_interexpert_somas <- list.files(path_somas_expert) + + if(is.null(cl)) + { + distance_matrix<-foreach(i = 1:length(list_interexpert_somas), .combine=c, .packages=c('Rvcg','Morpho')) %do% + { + soma_algorithm <- vcgPlyRead(file.path(path_somas_algorithm, list_interexpert_somas[i])) + soma_expert <- vcgPlyRead(file.path(path_somas_expert, list_interexpert_somas[i])) + + max(sqrt(mean((meshDist(soma_algorithm, soma_expert, plot = FALSE)$dists)^2)), sqrt(mean((meshDist(soma_expert, soma_algorithm, plot = FALSE)$dists)^2))) + } + + }else{ + distance_matrix<-foreach(i = 1:length(list_interexpert_somas), .combine=c, .packages=c('Rvcg','Morpho')) %dopar% + { + soma_algorithm <- vcgPlyRead(file.path(path_somas_algorithm, list_interexpert_somas[i])) + soma_expert <- vcgPlyRead(file.path(path_somas_expert, list_interexpert_somas[i])) + + max(sqrt(mean((meshDist(soma_algorithm, soma_expert, plot = FALSE)$dists)^2)), sqrt(mean((meshDist(soma_expert, soma_algorithm, plot = FALSE)$dists)^2))) + } + } + return (distance_matrix) +} + + +#' Wilcoxon test for RMSE +#' +#' Compute Wilcoxon between RMSE distances to find significant differences between mesh processing techniques +#' +#' @param RMSE_matrix a matrix Nx3 which is the output of +#' +#' @return A vector of two p-values. The first value is the p-value resulting from wilcoxon between first column of the RMSE matrix and second column of the RMSE matrix. The second one is the p-value resulting from wilcoxon between first column of the RMSE matrix and second column of the RMSE matrix +#' +#' @examples +#' path_somas_algorithm<- "./test/final_result" +#' path_somas_experts<- system.file("test/pre_repaired",package="SomaMS") +#' experts_paths<-list.dirs(path_somas_experts,recursive=F) +#' pre_repaired_RMSE<-RMSE_mesh_distance(path_somas_algorithm, experts_paths[1], experts_paths[2], TRUE) +#' +#' #RMSE between the somas of the algorithm and the somas of the experts after repairing the experts' somas +#' path_somas_algorithm<- "./test/final_result" +#' path_somas_experts<- system.file("test/post_repaired",package="SomaMS") +#' experts_paths<-list.dirs(path_somas_experts,recursive=F) +#' post_repaired_RMSE<-RMSE_mesh_distance(path_somas_algorithm, experts_paths[1], experts_paths[2], TRUE) +#' +#' pvalue_before_repairing <- wilcoxon_RMSE(pre_repaired_RMSE) +#' print(paste0("p-value between algorithm and first expert: ", pvalue_before_repairing[1])) +#' print(paste0("p-value between algorithm and second expert: ", pvalue_before_repairing[2])) +#' +#' pvalue_after_repairing <- wilcoxon_RMSE(post_repaired_RMSE) +#' print(paste0("p-value between algorithm and first expert: ", pvalue_after_repairing[1])) +#' print(paste0("p-value between algorithm and second expert: ", pvalue_after_repairing[2])) +#' +#' @export +wilcoxon_RMSE<-function(RMSE_matrix) +{ + return (c(wilcox.test(RMSE_matrix[ ,1], RMSE_matrix[ ,3], paired = TRUE, exact = TRUE)$p.value,wilcox.test(RMSE_matrix[ ,2], RMSE_matrix[ ,3], paired = TRUE, exact = TRUE)$p.value)) +} + + +#' Compute the volume of meshes in a folder +#' +#' Read one by one the meshes in a folder and compute the volume of each one of the them +#' +#' @param path_to_somas_folder path to the folder where meshes processed with the first technique are placed. They have to be PLY files +#' +#' @return array of N volumes where N is the number of meshes in the folder +#' +#' @examples +#' compute_meshes_volumes("./temp/final_result") +#' +#' @export +compute_meshes_volumes<-function(path_to_mesh_folder) +{ + list_interexpert_somas <- list.files(path_to_mesh_folder) + volumes <- foreach(i=1:length(list_interexpert_somas), .combine=c, .packages=c('Rvcg','Morpho')) %do% + { + soma_mesh <- vcgPlyRead(file.path(path_to_mesh_folder, list_interexpert_somas[i], sep = "")) + poly_volume(t(soma_mesh$vb[1:3, ])[soma_mesh$it, ]) + } + + return (volumes) +} + + +#' Compute MAQ_S +#' +#' Compute MAQ_S between three different techniques to compare processing methods +#' +#' @param path_somas_algorithm path to the folder where meshes processed with the first technique are placed. They have to be PLY files +#' @param path_somas_expert_1 path to the folder where meshes processed with the second technique are placed. They have to be PLY files +#' @param path_somas_expert_2 path to the folder where meshes processed with the third technique are placed. They have to be PLY files +#' +#' @return A vector of three values where each value is the MAQ_S between two techniques. Concretely they are MAQ_S_12, MAQ_S_13, MAQ_S_23 +#' +#' @examples +#' path_somas_algorithm<- "./temp/final_result" +#' path_somas_experts<- system.file("test/post_repaired",package="SomaMS") +#' experts_paths<-list.dirs(path_somas_experts,recursive=F) +#' path_somas_expert_1<-experts_paths[1] +#' path_somas_expert_2<-experts_paths[2] + +#' MAQ_S_result <- MAQ_S(path_somas_algorithm,path_somas_expert_1, path_somas_expert_2) + +#' print(paste("MAQ_S_12 is:", MAQ_S_result[1] * 100, "%")) +#' print(paste("MAQ_S_13 is:", MAQ_S_result[2] * 100, "%")) +#' print(paste("MAQ_S_23 is:", MAQ_S_result[3] * 100, "%")) +#' print(paste("Mean MAQ_S between algorithm and experts is:", mean(c(MAQ_S_result[1],MAQ_S_result[2])) * 100, "%")) +#' print(paste("Difference between experts MAQ_S and mean MAQ_S of algorithm is:", abs(MAQ_S_result[3]-mean(c(MAQ_S_result[1],MAQ_S_result[2])))*100, "%")) +#' +#' @export +MAQ_S<-function(path_somas_algorithm, path_somas_expert_1, path_somas_expert_2) +{ + list_interexpert_somas <- list.files(path_somas_expert_1) + + volumes <- matrix(nrow=length(list_interexpert_somas),ncol=3) + volumes[ ,1] <- compute_meshes_volumes(path_somas_algorithm) + volumes[ ,2] <- compute_meshes_volumes(path_somas_expert_1) + volumes[ ,3] <- compute_meshes_volumes(path_somas_expert_2) + + #MEAN ABSOLUTE QUOTIENT VOLUME + MAQ_S_12 <- max(mean(abs((volumes[ ,1] / volumes[ ,2]) - 1)), mean(abs((volumes[ ,2] / volumes[ ,1]) - 1))) + MAQ_S_13 <- max(mean(abs((volumes[ ,1] / volumes[ ,3]) - 1)), mean(abs((volumes[ ,3] / volumes[ ,1]) - 1))) + MAQ_S_23 <- max(mean(abs((volumes[ ,2] / volumes[ ,3]) - 1)), mean(abs((volumes[ ,3] / volumes[ ,2]) - 1))) + + return(c(MAQ_S_12, MAQ_S_13, MAQ_S_23)) +} + diff --git a/R/4_soma_parametrization.R b/R/4_soma_parametrization.R new file mode 100644 index 0000000..1599a64 --- /dev/null +++ b/R/4_soma_parametrization.R @@ -0,0 +1,277 @@ +library(Rvcg) +library(Morpho) +library(rgl) +library(misc3d) +library(geometry) +library(pracma) +library(matrixStats) +library(tools) +library(bnlearn) + +source("./R/Geometric_operators.R") + +#' Characterize soma with ray tracing reeb graph +#' +#' Compute the morphological characterization of the soma. First, cast rays from the centroid of the soma to the surface, +#' defining level curves. Next, orientate soma and adjust curves. Finally compute parameters according to the curves. +#' +#' @param path_somas_repaired path to the folder where segmented somas were placed. They have to be PLY files. +#' @param path_csv_file path to a csv file where the values of the characterization will be saved +#' +#' @return None +#' @examples +#' soma_caracterization(path_somas_repaired = file.path(tempdir(),"reconstructed"), path_csv_file = file.path(tempdir(),"somaReebParameters.csv")) +#' +#' @export + +soma_caracterization <- function(path_somas_repaired, path_csv_file) +{ + PLY_files <- list.files(path_somas_repaired)[which(file_ext(list.files(path_somas_repaired)) == "ply")] + soma_names <- unlist(strsplit(basename(PLY_files), "\\.")) + soma_names <- soma_names[which(soma_names != "ply")] + dataset <- data.frame() + + #For each soma file compute parameters and save in the csv file + for(file in PLY_files) + { + #Read soma from PLY file + soma_PLY <- vcgImport(paste(path_somas_repaired, file, sep = "/"), F, F, F) + + #Remove isolate pieces and smooth the mesh + smoothed_mesh <- vcgSmooth(vcgIsolated(soma_PLY, facenum = NULL, diameter = NULL), type = c("laplacian"), iteration = 50) + + #Compute centroid and place the centroid in the origin + center_of_mass <- colSums(vert2points(smoothed_mesh)) / nrow(vert2points(smoothed_mesh)) + smoothed_mesh <- translate3d(smoothed_mesh, -center_of_mass[1], -center_of_mass[2], -center_of_mass[3]) + + vertices_soma <- vert2points(smoothed_mesh) + + #Compute Feret diameter to obtain the + convex_hull <- convhulln(vertices_soma, "FA")$hull + convex_vertices <- vertices_soma[sort(unique(c(convex_hull))), ] + radius <- max(apply(convex_vertices, 1, normv)) + + #Generate points in the sphere to cast rays to them. Each curve is composed by 3000 points (theta). + #We generate 6 curves and 2 extreme points (phi). + set.seed(1) + theta <- seq(0, 2*pi, length = 3000) + phi <- seq(pi, 0, length = 8) + + #Normal vectors denote the direction of the rays. + malla <- smoothed_mesh + malla$vb[1:3, ] <- t(vertices_soma) + ellipse <- list() + list_of_ellipses <- list() + ellipses_centroids <- matrix(NA, ncol = 3, nrow = length(phi)) + for(i in 1:length(phi)) + { + #Cartesian points from the spherical coordinates + x <- radius * sin(theta) * sin(phi[i]) + y <- radius * cos(phi[i]) + z <- radius * cos(theta) * sin(phi[i]) + + #Cartesian points denote the normals + normals <- cbind(x,y,z) + + #Define origin for each ray. As always is the same point, we generate a zero matrix + vertices <- matrix(rep(c(0, 0, 0), 3000), ncol = 3000) + + x$vb <- vertices + x$normals <- t(normals) + #Change class to mesh3d to execute vcgRaySearch + class(x) <- "mesh3d" + + #Cast rays + raytracer <- vcgRaySearch(x,malla) + ellipse[[i]] <- raytracer + #Save each vertex of the curve + list_of_ellipses[[i]] <- vert2points(raytracer) + #Centroid of each curve + ellipses_centroids[i, ] <- colSums(list_of_ellipses[[i]]) / nrow(list_of_ellipses[[i]]) + } + + #Place lowest ellipse in the origin + insertion_point<-ellipses_centroids[1, ] + for(i in 1:length(list_of_ellipses)) + { + list_of_ellipses[[i]] <- translate3d(list_of_ellipses[[i]], -insertion_point[1], -insertion_point[2], -insertion_point[3]) + } + + + #Also shift the soma mesh to the same place + smoothed_mesh <- translate3d(smoothed_mesh, -insertion_point[1], -insertion_point[2], -insertion_point[3]) + ellipses_centroids <- translate3d(ellipses_centroids, -insertion_point[1], -insertion_point[2], -insertion_point[3]) + + number_of_curves<-length(list_of_ellipses) - 2 + + perp_vectors_sph <- matrix(0, nrow = number_of_curves, ncol = 2) #Perpendicular vector to curve in spherical coords. + ellipse_angles <- matrix(0, nrow = number_of_curves, ncol = 1) #Angle of the elipse in the XY plane. + eccentricity <- matrix(0, nrow = number_of_curves, ncol = 1) #Relation between semiaxes. + curve_coefficients <- matrix(0, number_of_curves, ncol = 3) #Coefficients of z=a_0+a_1*x^2+a_2*y^2. + curve_centroids <- matrix(0, nrow = length(list_of_ellipses), ncol = 3) #Centroids of the curves. + perp_PCA_matrix <- matrix(0, nrow = number_of_curves, ncol = 3) #Perpendicular vector to curve in cartesian coords. + ellipse_angles_vectors <- matrix(0, nrow = number_of_curves, ncol = 3) #Vector representation of the angle of the ellipse. + semiaxis <- matrix(0, nrow = number_of_curves, ncol = 1) #One of the semiaxis of the ellipse. + + #Compute parameters of each curve. + for (i in 2:(length(list_of_ellipses) - 1)) + { + PCA <- prcomp(list_of_ellipses[[i]]) #Compute main axis of the curve + perp_PCA_matrix[i-1, ] <- PCA$rotation[ ,3]#Perpendicular vector to the fittest plane to the curve + + #Rotate curve to place the normal to the plane parallel to the Z axis, that is, the curve will be in the XY plane + u <- vcrossp(perp_PCA_matrix[i-1, ],c(0, 0, 1)) + u <- u / as.numeric(normv(u)) + theta <- acos((perp_PCA_matrix[i-1,] %*% c(0, 0, 1))) + rotationMatrix <- rotation_matrix(u, theta) + + rotated_curve <- t(rotationMatrix %*% t(list_of_ellipses[[i]])) + + #Check that the ellipse is over the XY plane because in the other case (when it is under the plane) + #rotation would be in the opposite direction + if(min(rotated_curve[ ,3]) < 0) + { + perp_PCA_matrix[i-1, ] <- -perp_PCA_matrix[i-1, ] + u <- vcrossp(perp_PCA_matrix[i-1, ],c(0, 0, 1)) + u <- u / as.numeric(normv(u)) + theta <- acos((perp_PCA_matrix[i-1, ] %*% c(0, 0, 1))) + rotationMatrix <- rotation_matrix(u,theta) + + rotated_curve <- t(rotationMatrix %*% t(list_of_ellipses[[i]])) + } + + #Compute ellipse approximation to the curve + fit_ellipse <- fit.ellipse(rotated_curve[ ,1], rotated_curve[ ,2]) + + #Rotate angle and centroid of the ellipse to place it in the original space (before PCA rotation) + ellipse_angles_vectors[i-1, ] <- as.numeric(t(rotationMatrix) %*% as.numeric(c(cos(fit_ellipse$angle), sin(fit_ellipse$angle), 0))) + ellipses_centroids[i, ] <- as.numeric(t(rotationMatrix) %*% as.numeric(c(fit_ellipse$center, mean(rotated_curve[ ,3])))) + + eccentricity[i-1] <- fit_ellipse$minor / fit_ellipse$major + semiaxis[i-1] <- fit_ellipse$major + ellipse_angles[i-1] <- fit_ellipse$angle + + #Place the angle of the ellipse parallel to X axis. This is needed to compute the approximation of the curve. + #The angle of the ellipse must be always the same. + u <- vcrossp(c(cos(fit_ellipse$angle), sin(fit_ellipse$angle), 0), c(1, 0, 0)) + u <- u / as.numeric(normv(u)) + rotationZ <- rotation_matrix(u,fit_ellipse$angle) + rotated_curve <- t(rotationZ %*% t(rotated_curve)) + + x <- rotated_curve[ ,1] + y <- rotated_curve[ ,2] + z <- rotated_curve[ ,3] + + #Adjust curve with a cuadratic regression + data <- data.frame(x, y, z) + fitModel <- lm(z ~ I(x^2) + I(y^2), data = data) + curve_coefficients[i-1, ] <- fitModel$coefficients + } + + ellipses_centroids[length(list_of_ellipses), ] <- list_of_ellipses[[length(list_of_ellipses)]][1, ] + + #Rotate the soma to align the vector between de lowest point and + #the centroid of the first curve with the Z axis. Also, rotate the + #curves and the centroids. + #u=rotation axis, theta=rotation angle + unit_vector <- ellipses_centroids[2, ] / normv(ellipses_centroids[2, ]); + u <- vcrossp(unit_vector,c(0, 0, 1)); + u <- u / as.numeric(normv(u)); + theta <- acos((unit_vector %*% c(0, 0, 1))); + + rotationMatrix <- rotation_matrix(u, theta); + + #Rotation of all the geometric measures + for (i in 1:length(list_of_ellipses)) + { + list_of_ellipses[[i]] <- t(rotationMatrix %*% t(list_of_ellipses[[i]])) + } + ellipses_centroids <- t(rotationMatrix %*% t(ellipses_centroids)) + perp_PCA_matrix <- t(rotationMatrix %*% t(perp_PCA_matrix)) + ellipse_angles_vectors <- t(rotationMatrix %*% t(ellipse_angles_vectors)) + + #Rotate the soma to place the upper point of the soma parallel to the + #X axis. It is rotated around the Z axis. + unit_vector <- c(ellipses_centroids[8, 1], ellipses_centroids[8, 2], 0) / as.numeric(normv(c(ellipses_centroids[8, 1], ellipses_centroids[8, 2], 0))) + + u <- vcrossp(unit_vector,c(1, 0, 0)) + u <- u / as.numeric(normv(u)) + theta <- acos((unit_vector %*% c(1, 0, 0))) + + rotationZ <- rotation_matrix(u, theta) + + #Rotate all the geometric measures + for (i in 1:length(list_of_ellipses)) + { + + list_of_ellipses[[i]] <- t(rotationZ %*% t(list_of_ellipses[[i]])) + } + + ellipses_centroids <- t(rotationZ %*% t(ellipses_centroids)) + perp_PCA_matrix <- t(rotationZ %*% t(perp_PCA_matrix)) + ellipse_angles_vectors <- t(rotationZ %*% t(ellipse_angles_vectors)) + + #Compute current angle of the ellipses after all the transformations + for (i in 1:nrow(perp_PCA_matrix)) + { + u <- vcrossp(perp_PCA_matrix[i, ], c(0, 0, 1)) + u <- u / as.numeric(normv(u)) + theta <- acos((perp_PCA_matrix[i,] %*% c(0, 0, 1))) + rotationMatrix <- rotation_matrix(u, theta) + rotated_angle_matrix <- t(rotationMatrix %*% matrix(ellipse_angles_vectors[i, ], ncol = 1)) + ellipse_angles[i] <- cart2pol(rotated_angle_matrix[1], rotated_angle_matrix[2])[1] + } + + perp_vectors_sph <- cartesian2Spherical(perp_PCA_matrix[ ,1], perp_PCA_matrix[ ,2], perp_PCA_matrix[ ,3])[ ,1:2] + + #Compute the parameters needed to build the skeleton of the spine. + #|h|, phi, theta, alpha + vector_length <- matrix(0, nrow = nrow(ellipses_centroids) - 1, ncol = 1) + phi <- matrix(0, nrow = nrow(ellipses_centroids) - 2, ncol = 1) + theta <- matrix(0, nrow = nrow(ellipses_centroids) - 2, ncol = 1) + + section_vector <- diff(ellipses_centroids) + vector_length <- sqrt(rowSums(section_vector^2)) + angles_section <- cartesian2Spherical(section_vector[ ,1], section_vector[ ,2], section_vector[ ,3]) + phi <- angles_section[ ,1] + theta <- angles_section[ ,2] + + r_h <- matrix(0, nrow = length(list_of_ellipses) - 2, ncol = 1) + ellipse_area <- matrix(0, nrow = length(list_of_ellipses) - 2, ncol = 1) + r_h <- semiaxis / (vector_length[1:(length(vector_length) - 1)] + vector_length[2:(length(vector_length))]) + + instance_row <- data.frame(t(c(vector_length, phi[2:length(phi)], theta[2:length(theta)], r_h,eccentricity, perp_vectors_sph, ellipse_angles, curve_coefficients))) + rownames(instance_row)<-file + dataset<-rbind(dataset,instance_row) + + } + + if(!file.exists(path_csv_file)) + { + #Names of the height variables + height_names <- list() + for (i in 1:length(vector_length)) + { + height_names[[i]] <- paste0("height", i) + } + + #Names of all the other variables + variable_names <- c("phi", "theta", "r_h", "e", "PCA_phi", "PCA_theta", "w", "b_0", "b_1", "b_2") + variable_names_matrix <- matrix(rep(unlist(variable_names), number_of_curves),ncol = number_of_curves) + for(i in 1:length(variable_names)) + { + for(j in 1:number_of_curves) + { + variable_names_matrix[i,j] <- paste0(variable_names_matrix[i,j], j) + } + } + column_names <- c(unlist(height_names), as.character(t(variable_names_matrix))) + colnames(dataset) <- column_names + + #Save dataset + write.table(dataset, file = path_csv_file, sep = ";", col.names = NA) + }else{ + #If the file exist, append the new values at the end of the file + write.table(dataset, file = path_csv_file, sep = ";", append = T, col.names = F, row.names = T) + } +} \ No newline at end of file diff --git a/R/5_BN_structure_learning.R b/R/5_BN_structure_learning.R new file mode 100644 index 0000000..fb4764d --- /dev/null +++ b/R/5_BN_structure_learning.R @@ -0,0 +1,120 @@ +library(caret) +library(bnlearn) + +#' Compute the structure of the BN +#' +#' Compute the structure of a Dynamical Spatial Bayesian Network. Also apply bootstrap to choose the most significant arcs. +#' +#' @param somas dataframe with the characterization of the somas +#' @param n_boot number of times that bootstrap learn the bayesian structure +#' @param significant_arcs a value between 0 and 1 that denotes a threshold over percentage of times that an arc appeared in the boostrap structures +#' +#' @return bayesian_model an object of class bn from bnlearn package that representates the structure of the DSBN +#' +#' @export + +BN_structure_learning <- function(somas, nboots, significant_arcs) +{ + #Read csv file + #somas <- read.table(path_to_csv, sep = ";", header = T, row.names = 1) + + #Column index of the variables + height_position <- grep("height", colnames(somas)) + phi_position <- grep("^phi", colnames(somas)) + theta_position <- grep("^theta", colnames(somas)) + r_h_position <- grep("r_h", colnames(somas)) + e_position <- grep("^e",colnames(somas)) + PCA_azimuth_position <- grep("PCA_phi", colnames(somas)) + PCA_theta_position <- grep("PCA_theta", colnames(somas)) + w_position <- grep("^w", colnames(somas)) + b_0_position <- grep("b_0", colnames(somas)) + b_1_position <- grep("b_1", colnames(somas)) + b_2_position <- grep("b_2", colnames(somas)) + + #Generate the blacklist of variables for the bayesian network. Actual state define the variables in a section. + #Transition is an arc from the variables of the actual section to the next section of the soma + #There can be arcs between the variables of the actual state but not between transitions + #Neither can be arcs from transitions to actual state. Transitions only point out the next state + dataset <- data.frame() + state_names <- c("height", "phi", "theta", "r_h", "e", "PCA_phi", "PCA_theta", "w", "b_0", "b_1", "b_2") + transition_names <- paste0(state_names, "+1") + black_list <- expand.grid(transition_names, transition_names) + black_list <- rbind(cbind(as.character(black_list[ ,1]), as.character(black_list[ ,2])), + cbind(as.character(black_list[ ,1]), gsub("\\+1", "", as.character(black_list[ ,2])))) + + + #Create a new dataset combining actual state with the next future state. Each state is a section of the soma. + #Thus, we obtain a dynamic bayesian network applied to spatial variables. + for(i in 1:(length(height_position) - 2)) + { + xi <- c(height_position[i], phi_position[i], theta_position[i], r_h_position[i], e_position[i], + PCA_azimuth_position[i], PCA_theta_position[i], w_position[i], b_0_position[i], b_1_position[i], + b_2_position[i]) + + xi1 <- c(height_position[i+1], phi_position[i+1], theta_position[i+1], r_h_position[i+1], e_position[i+1], + PCA_azimuth_position[i+1], PCA_theta_position[i+1], w_position[i+1], b_0_position[i+1], + b_1_position[i+1], b_2_position[i+1]) + + state <- somas[ ,xi] + colnames(state) <- state_names + transition <- somas[ ,xi1] + colnames(transition) <- transition_names + dataset <- rbind(dataset, cbind(state, transition)) + } + + #Sampling index of the dataset to apply bootstrap + bootstrap <- list() + for(i in 1:nboots) + { + bootstrap[[i]] <- sample(1:nrow(dataset), size = nrow(dataset), replace = T) + } + + #All posible combinations of variables + all_combinations <- expand.grid(colnames(dataset), colnames(dataset)) + all_combinations <- all_combinations[-which(all_combinations[ ,1] == all_combinations[ ,2]), ] + distribution <- rep(0, length = nrow(all_combinations)) + + for(i in 1:nboots) + { + model <- bnlearn::hc(dataset[bootstrap[[i]], ], blacklist = black_list) + white_list <- as.data.frame(bnlearn::arcs(model)) + update_positions <- which(paste(all_combinations[ ,1], all_combinations[ ,2]) %in% paste(white_list[ ,1], white_list[ ,2])) + distribution[update_positions] <- distribution[update_positions] + 1 + } + + #Arcs that were in more than the significant_arcs percentage of learned structures are included in the future model + #Arcs that were in less than the significant_arcs percentage of learned structures are discarded + #Thus, only the robust arcs are saved. It is useful when the number of variables is bigger than the number of instances + white_list <- all_combinations[which(distribution > (nboots * significant_arcs)), ] + new_black_list <- all_combinations[which(distribution <= (nboots * significant_arcs)), ] + + bootstrap_model <- bnlearn::hc(dataset, whitelist = white_list, blacklist = new_black_list) + + #Develop the network repeating the learned structure for each variable + model_arcs <- data.frame() + for(i in 1:(length(height_position) - 2)) + { + graph_arcs <- bnlearn::arcs(bootstrap_model) + trans_variables_pos <- grep("\\+1", graph_arcs) + graph_arcs <- gsub("\\+1", i + 1, graph_arcs) + graph_arcs[setdiff(1:length(graph_arcs), trans_variables_pos)] <- paste0(graph_arcs[setdiff(1:length(graph_arcs), trans_variables_pos)], i) + model_arcs <- rbind(model_arcs, graph_arcs) + } + + graph_arcs <- bnlearn::arcs(bootstrap_model) + graph_arcs <- graph_arcs[which(graph_arcs[ ,2] == "height+1"), ] + if(dim(graph_arcs)[1] > 0) + { + trans_variables_pos <- grep("\\+1", graph_arcs) + graph_arcs <- gsub("\\+1", length(height_position), graph_arcs) + graph_arcs[setdiff(1:length(graph_arcs),trans_variables_pos)] <- paste0(graph_arcs[setdiff(1:length(graph_arcs), trans_variables_pos)], length(height_position) - 1) + model_arcs <- rbind(model_arcs, graph_arcs) + } + + all_combinations <- expand.grid(colnames(somas), colnames(somas)) + black_list_reloaded <- setdiff(all_combinations, model_arcs) + + bayesian_model <- bnlearn::hc(somas, whitelist = model_arcs, blacklist = black_list_reloaded) + + return(bayesian_model) +} \ No newline at end of file diff --git a/R/6_BN_clustering.R b/R/6_BN_clustering.R new file mode 100644 index 0000000..d7e00ad --- /dev/null +++ b/R/6_BN_clustering.R @@ -0,0 +1,209 @@ +#Compute denominator of the expectation step +logSumExp<-function(x) +{ + y = apply(x, 1, max) + x = sweep(x, 1, y, "-") + s = y + log(rowSums(exp(x))) + return(s) +} + +#Estimate coefficients of the parents for the maximization step +#A Linear Gaussian distribution is represented as N(x|B_0+B_1*x_p1+B_2*x_p2,sigma) +#B_0+B_1*x_p1+B_2*x_p2 is the mean of the gaussian, B_ is a coefficient +#and x_p are the column vector of each parent of the actual node +#To compute the mle, the partial derivative of each coefficient is computed giving as result a linear system +estimateCoefficients<-function(data, responsabilities, node, parents) +{ + #Initialize coefficient and independent term matrix + coefficient_matrix <- matrix(NA, nrow = (length(parents) + 1), ncol = (length(parents) + 1)) + independent_term_matrix <- matrix(rep(NA, length(parents) + 1), ncol = 1) + + #The column of the data that corresponds to the actual node is represented as a row to multiply faster + node_column_values <- matrix(data[ ,node], nrow = 1) + #The same as before but with the parents nodes. We added a column of 1s to multiply the intercept + parent_columns_values <- as.matrix(cbind(1, data[ ,parents])) + + #Responsabilities (weights) multiplying each parent + comb_respons_parents <- matrix(sweep(data.matrix(data[ ,parents]), 1, responsabilities, "*"),ncol = length(parents))#Se computa el vector de responsabilidades por los datos del padre que se esta estudiando. + + #Compute the ecuation of the partial derivative of the intercept + coefficient_matrix[1, ] <- matrix(responsabilities, nrow = 1) %*% parent_columns_values + independent_term_matrix[1] <- node_column_values %*% matrix(responsabilities, ncol = 1) + + + #Compute the ecuation of the partial derivative of each one of the parents + for(parent in 2:(length(parents)+1)) + { + coefficient_matrix[parent, ] <- matrix(comb_respons_parents[ ,parent-1], nrow = 1) %*% parent_columns_values + independent_term_matrix[parent] <- node_column_values %*% comb_respons_parents[ ,parent-1] + } + + #Solve for the coefficients + coefs<-solve(coefficient_matrix, independent_term_matrix) + + return(coefs) +} + +#M-step +maximization<-function(bn, data, responsabilities) +{ + node_order <- bnlearn::node.ordering(bn) + for(node in node_order) + { + parents <- bn[[node]]$parents + children <- bn[[node]]$children + + if (length(parents) == 0) #If it has not got parents + { + mean <- (matrix(responsabilities, nrow = 1) %*% matrix(data[,node], ncol = 1)) / sum(responsabilities) + coefs <- c("(Intercept)" = mean) + resid <- data[ ,node] - mean + sd <- sqrt((matrix(responsabilities, nrow = 1) %*% matrix((resid)^2, ncol = 1)) / sum(responsabilities)) + bn[[node]] <- list(coef = as.numeric(coefs), sd = as.numeric(sd)) + + }else{ #If there is at least one parent + + coefs <- estimateCoefficients(data, responsabilities, node, parents) + resid <- data.matrix(data)[ ,node] - matrix(coefs, nrow = 1) %*% t(as.matrix(cbind(1, data[ ,parents]))) + sd <- sqrt((matrix(responsabilities, nrow = 1) %*% matrix((resid)^2, ncol = 1)) / sum(responsabilities)) + bn[[node]] <- list(coef = as.numeric(coefs), sd = as.numeric(sd)) + }#ELSE + }#FOR + return(bn) +} + +#E-step +#Compute (alpha_k * N(x|theta_k))/sum^K_i(alpha_i * N(x|theta_i)) +expectation <- function(model_parameters, data, priori) +{ + #Compute loglikelihood for each cluster and instance + log_likelihood <- matrix(NA, nrow = nrow(data), ncol = length(model_parameters)) + for(k in 1:length(model_parameters)) + { + log_likelihood[ ,k] <- logLik(model_parameters[[k]], data,by.sample = T) + } + + #Compute the weights or responsabilities of each cluster in each instance with logSumExp to avoid underflow + priori_loglikelihood <- sweep(log_likelihood, 2, log(priori), "+") + denominator <- logSumExp(priori_loglikelihood) + weights <- exp(sweep(priori_loglikelihood, 1, denominator, "-")) + + return (list(weights = weights, log_lik = sum(denominator))) +} + +#' Clustering Gaussian Bayesian network +#' +#' Given a Bayesian network structure, apply clustering to the data with the aim to find clusters of somas +#' +#' @param bayesian_structure a bn object from bnlearn package whose nodes are gaussians +#' @param data the same dataset used to compute the bayesian_structure +#' @param clusters array of values where each value indicates the number of clusters for that model +#' @param initialization a natural number to initialize the algorithm. Use 0 to use kmeans initialization and any other positive number as a seed for a random initialization +#' @param verbose show BIC scores for each model and the optimal number of clusters +#' +#' @return model_list is the list of parameters that best fit the data according to BIC criteria +#' +#' @examples +#' somas <- read.table(file.path(tempdir(),"somaReebParameters.csv"), sep = ";", header = T, row.names = 1) +#' bayesian_structure <- BN_structure_learning(path_to_csv = file.path(tempdir(),"somaReebParameters.csv"), nboots = 200, significant_arcs = 0.95) +#' fittest_model <- BN_clustering(bayesian_structure, data = somas, clusters = c(2, 3, 4), initialization = 2, T) +#' +#' @export +BN_clustering <- function(bayesian_structure, data, clusters, initialization, verbose) +{ + #Initializataion + model_parameters <- list() + model_list <- list() + priori_list <- list() + BIC_list <- list() + weights_list <- list() + + #For each of the integers introduced by the user compute the mle for the parameters + for(cluster in 1:length(clusters)) + { + #Reboot variables + old_log <- -Inf + K <- clusters[cluster] + data <- data.frame(data) + + #Estimate initial parameters. If it is 0 compute k-means if it is any other value with that seed random initialization + if(initialization == 0 ) + { + initiation <- kmeans(data, K)$cluster + }else{ + set.seed(initialization) + initiation <- round(runif(nrow(data), min = 1, max = K)) + } + + #Initialization of the parameters + priori <- matrix(0, nrow = K, ncol = 1) + for(i in 1:K) + { + model_parameters[[i]] <- bnlearn::bn.fit(bayesian_structure, data[which(initiation == i), ]) + priori[i] <- length(which(initiation == i)) / length(initiation) + } + + e_step <- expectation(model_parameters, data, priori) + new_log <- sum(e_step$log_lik) + + #When a NA value is detected execution stops. When there is not enough data to fit the parameters NA values appears + if(is.na(new_log)) + { + stop("Not enough data in one of the clusters to compute the parameters. Please, change initialization values or reduce the number of clusters.") + } + + #Repeat until convergence + while(new_log > old_log) + { + #M-step + #Maximizacion de los parametros para cada uno de los nodos + for(k in 1:K) + { + model_parameters[[k]] <- maximization(model_parameters[[k]], data, e_step$weights[ ,k]) + } + + #Priori maximization + priori <- colSums(e_step$weights) / nrow(e_step$weights) + #end M-step + + #E-step + e_step <- expectation(model_parameters, data, priori) + #end E-step + + old_log <- new_log + new_log <- e_step$log_lik + + if(is.na(new_log)) + { + stop(paste0("Not enough data in one of the clusters to compute the parameters.", + "Please, change initialization values or reduce the number of clusters.")) + } + + individualBIC <- matrix(0, nrow = length(model_parameters), ncol = 1) + for(i in 1:length(model_parameters)) + { + individualBIC[i] <- BIC(model_parameters[[i]], data) + } + } + + individual_BIC <- matrix(0, nrow = length(model_parameters), ncol = 1) + for(i in 1:length(model_parameters)) + { + individual_BIC[i] <- BIC(model_parameters[[i]], data) + } + + model_list[[cluster]] <- model_parameters + priori_list[[cluster]] <- priori + BIC_list[[cluster]] <- sum(individual_BIC) + weights_list[[cluster]] <- e_step$weights + } + + if(verbose) + { + print("BIC values by cluster order:") + print(unlist(BIC_list)) + print(paste0("Optimal number of clusters = ", clusters[which.min(unlist(BIC_list))])) + } + return(list(priori=priori_list[[which.min(unlist(BIC_list))]], weights=weights_list[[which.min(unlist(BIC_list))]], parameters=model_list[[which.min(unlist(BIC_list))]])) + #return(list(priori=priori_list[[which.min(unlist(BIC_list))]], parameters=model_list[[which.min(unlist(BIC_list))]])) +} diff --git a/R/7_simulation.R b/R/7_simulation.R new file mode 100644 index 0000000..1309778 --- /dev/null +++ b/R/7_simulation.R @@ -0,0 +1,237 @@ +#' Simulation of new somas +#' +#' Simulation of new somas of the clustering of bayesian networks +#' +#' @param model a bn.fit object from bnlearn package whose nodes are gaussians. It must be the result of the BN_clustering function +#' @param data the same dataset used to compute the bayesian_structure +#' @param cluster_number a number between 1 and the number of clusters of model. When it is NULL, new somas are generated from the priori distribution. When is a number between 1 and the number of clusters, simulate new somas from the selected cluster. +#' @param number_of_new_instances a natural number denoting the number of new somas sampled from simulation +#' @param seed a natural number used as seed during the sampling, by default it is 1 +#' +#' @return new_somas a dataset similar to data with the simulated somas +#' +#' +#' @export +simulate_new_somas <- function(model, data, cluster_number = NULL, number_of_new_instances = 100, seed = 1) +{ + new_somas<-data.frame() + if(is.null(cluster_number)) #New somas from priori distribution + { + set.seed(seed) + sampling_cluster <- sample(length(model$priori), number_of_new_instances, replace = T, prob = model$priori)#Sample clusters according to priori distribution + soma_classes <- as.numeric(table(sampling_cluster)) #Count the number of instances for each cluster + + #Simulate new somas from each cluster according to the number of instances computed in the previous sampling + for(i in 1:length(soma_classes)) + { + + new_somas <- rbind(new_somas, cluster_simulation(model, data, i, soma_classes[i], seed)) + + } + + }else{ #Sample from the selected cluster + if(cluster_number %in% c(1:length(model$priori))) + { + + new_somas <- cluster_simulation(model, data, cluster_number, number_of_new_instances, seed) + + }else{ + + stop(paste0("The selected cluster number does not exist. The model has ", length(model), " clusters")) + + } + } + + return(new_somas) +} + + +#' Simulation of new somas from a selected cluster +#' +#' Simulation of new somas from the choosen cluster of the clustering of bayesian networks +#' +#' @param model a bn.fit object from bnlearn package whose nodes are gaussians. It must be the result of the BN_clustering function +#' @param data the same dataset used to compute the bayesian_structure +#' @param cluster_number a natural number between 1 and the number of clusters of model. Simulate new somas from the selected cluster. +#' @param number_of_new_instances a natural number denoting the number of new somas sampled from simulation +#' @param seed a natural number used as seed during the sampling, by default it is 1 +#' +#' @return new_somas a dataset similar to data with the simulated somas of the selected cluster +cluster_simulation <- function(model, data, cluster_number, number_of_new_instances, seed = 1) +{ + set.seed(seed) + + parameters <- model$parameters[[cluster_number]] + nodeOrder <- node.ordering(parameters) + + new_somas <- data.frame(matrix(NA, nrow = number_of_new_instances, ncol = length(nodeOrder))) + colnames(new_somas) <- names(parameters) + + classes <- apply(model$weights, 1, which.max) + maximum <- apply(data[which(classes == cluster_number), nodeOrder], 2, max) + minimum <- apply(data[which(classes == cluster_number), nodeOrder], 2, min) + + for(i in 1:length(nodeOrder)) + { + node.parameters <- parameters[[nodeOrder[i]]] + node.parents <- parameters[[nodeOrder[i]]]$parents + + mean <- as.numeric(node.parameters$coefficients %*% rbind(1, t(data.matrix(new_somas[ ,node.parents])))) + sd <- node.parameters$sd + new_somas[ ,nodeOrder[i]] <- rnorm(number_of_new_instances, mean, sd) + } + return(new_somas) +} + + +#' Three-dimensional representation of the simulated somas +#' +#' Three-dimensional renderization from a dataset where the simulated somas were saved +#' +#' @param new_somas a dataset with the simulated somas obtained in simulate_new_somas function +#' @param index index of the row of the new soma to renderize +#' +#' @return curve_points a n x 3 dataframe where each row represents a point in the cartesian three-dimensional space +#' +#' @export +simulation_3D_mesh<-function(new_somas, index) +{ + + curve_points<-data.frame(x = 0, y = 0, z = 0) + cartesianEllipses<-list() + cartesianEllipses[[1]]<-c(0,0,0) + matrixOfFaces <- data.frame() + + #Column index of the variables + height_position <- grep("height", colnames(new_somas)) + phi_position <- grep("^phi", colnames(new_somas)) + theta_position <- grep("^theta", colnames(new_somas)) + r_h_position <- grep("r_h", colnames(new_somas)) + e_position <- grep("^e",colnames(new_somas)) + PCA_azimuth_position <- grep("PCA_phi", colnames(new_somas)) + PCA_theta_position <- grep("PCA_theta", colnames(new_somas)) + w_position <- grep("^w", colnames(new_somas)) + b_position <- grep("b_", colnames(new_somas)) + + vector_length <- as.numeric(new_somas[index, height_position]) + phi <- as.numeric(c(0, new_somas[index, phi_position])) + theta <- as.numeric(c(pi/2, new_somas[index, theta_position])) + r_h <- as.numeric(new_somas[index, r_h_position]) + eccentricity <- as.numeric(new_somas[index, e_position]) + perp_vectors_sph <- matrix(as.numeric(new_somas[index, c(PCA_azimuth_position, PCA_theta_position)]), ncol = 2) + ellipse_angles <- as.numeric(new_somas[index, w_position]) + curve_coefficients <- matrix(as.numeric(new_somas[index, c(b_position)]), ncol = 3) + + matrix_skeleton <- matrix(0, nrow = 7, ncol = 3) + matrix_skeleton <- spherical2Cartesian(phi, theta, vector_length) + + skeleton <- apply(matrix_skeleton, 2, cumsum) + + major_axis <- r_h * (vector_length[1:(length(vector_length)-1)] + vector_length[2:length(vector_length)]) + minor_axis <- major_axis * eccentricity + + perp_PCA_matrix <- spherical2Cartesian(perp_vectors_sph[ ,1], perp_vectors_sph[ ,2], 1) + + for (i in 1:nrow(perp_PCA_matrix)) + { + u <- vcrossp(perp_PCA_matrix[i, ], c(0, 0, 1)) + u <- u / as.numeric(normv(u)) + rotation_angle <- acos((perp_PCA_matrix[i,] %*% c(0, 0, 1))) + rotationMatrix <- rotation_matrix(u,rotation_angle) + + centers <- rotationMatrix %*% matrix(skeleton[i, ], ncol = 1) + + fit <- list() + fit$angle <- ellipse_angles[i] + fit$maj <- major_axis[i] + fit$min <- minor_axis[i] + fit$center <- centers[1:2] + + points_ellipse <- get.ellipse(fit) + x <- points_ellipse[ ,1] + y <- points_ellipse[ ,2] + z <- centers[3] + + elipse <- cbind(x, y, z) +# u <- vcrossp(c(cos(ellipse_angles[i]), sin(ellipse_angles[i]), 0), c(1, 0, 0)); +# u <- u / as.numeric(norm(u)) +# rotationZ <- rotation_matrix(u, ellipse_angles[i]) +# +# rotated_ellipse <- cbind(x, y, matrix(0, nrow = length(x), ncol = 1)) +# rotated_ellipse <- t(rotationZ %*% t(rotated_ellipse)) +# +# z <- curve_coefficients[i, 1] + curve_coefficients[i,2] * (rotated_ellipse[ ,1]^2) + curve_coefficients[i,3] * (rotated_ellipse[,2]^2) +# +# rotated_ellipse[,3] <- z +# +# elipse <- t(t(rotationZ) %*% t(rotated_ellipse)) +# + recovered_ellipse <- t(t(rotationMatrix) %*% t(elipse)) + curve_points <- rbind(curve_points, data.frame(x = recovered_ellipse[,1], y = recovered_ellipse[,2], z = recovered_ellipse[,3])) + cartesianEllipses[[i+1]]<-recovered_ellipse + } + + curve_points <- rbind(curve_points,skeleton[nrow(skeleton),]) + + + points<-rbind(c(0,0,0),cartesianEllipses[[2]]) + faces<-cbind(1,seq(2,(nrow(points)-1),by=1),seq(3,nrow(points),by=1)) + matrixOfFaces<-rbind(matrixOfFaces,faces) + + for(i in 2:(length(cartesianEllipses)-2)) + { + points<-matrix(rbind(cartesianEllipses[[i]],cartesianEllipses[[i+1]]),ncol=3,byrow=F) + + initialPosition<-which.min(cartesianEllipses[[i]][,3]) + + apex<-seq(initialPosition,length=nrow(cartesianEllipses[[i]])/2,by=2) + apex[apex>nrow(cartesianEllipses[[i]])]<-(apex[apex>nrow(cartesianEllipses[[i]])]%%(nrow(cartesianEllipses[[i]])+1))+1 + + positionMatch<-which.min(sqrt(rowSums(sweep(cartesianEllipses[[i+1]],2,cartesianEllipses[[i]][initialPosition,],"-")^2)))+nrow(cartesianEllipses[[i]]) + base<-seq(positionMatch,length=nrow(cartesianEllipses[[i]])/2,by=2) + + faces1<-cbind(apex,base-1,base) + faces1[faces1>nrow(points)]<-(faces1[faces1>nrow(points)]%%(nrow(points)+1))+361 + + faces2<-cbind(apex,base,base+1) + faces2[faces2>nrow(points)]<-(faces2[faces2>nrow(points)]%%(nrow(points)+1))+361 + + positionMatch<-positionMatch+1 + initialPosition<-initialPosition+1 + apex<-seq(positionMatch,length=nrow(cartesianEllipses[[i]])/2,by=2) + apex[apex>nrow(points)]<-(apex[apex>nrow(points)]%%(nrow(points)+1))+361 + + base<-seq(initialPosition,length=nrow(cartesianEllipses[[i]])/2,by=2) + + prefaces<-cbind(base-1,base) + prefaces[prefaces>nrow(cartesianEllipses[[i]])]<-(prefaces[prefaces>nrow(cartesianEllipses[[i]])]%%(nrow(cartesianEllipses[[i]])+1))+1 + faces3<-cbind(apex,prefaces) + + + prefaces<-cbind(base+1,base) + prefaces[prefaces>nrow(cartesianEllipses[[i]])]<-(prefaces[prefaces>nrow(cartesianEllipses[[i]])]%%(nrow(cartesianEllipses[[i]])+1))+1 + faces4<-cbind(apex,prefaces) + + + faces<-rbind(faces1,faces2,faces3,faces4) + colnames(faces)<-NULL + matrixOfFaces<-rbind(matrixOfFaces,(faces+1+(360*(i-2)))) + } + + points<-rbind(cartesianEllipses[[7]],skeleton[nrow(skeleton),]) + faces<-cbind(seq(1,(nrow(points)-1),by=1),seq(2,nrow(points),by=1),361) + matrixOfFaces<-rbind(matrixOfFaces,(faces+1+(360*(i-2+1)))) + + points<-rbind(skeleton[1,], cartesianEllipses[[2]]) + faces<-cbind(seq(1,(nrow(points)-1),by=1),seq(2,nrow(points),by=1),361) + matrixOfFaces<-rbind(matrixOfFaces,(faces+1+(360*(i-2+1)))) + + reoriented <- vcgClean(tmesh3d( t(cbind(curve_points,1)),t(matrixOfFaces) ),sel=7) + new_vertices <- vcgSample(reoriented,type="mc",SampleNum = 1E4) + + write_PLY_only_points( new_vertices, file.path( tempdir(), "vertices_non_poisson" )) + execute_meshlab_script(paste0(file.path( tempdir(), "vertices_non_poisson" ), ".ply"), paste0(file.path( tempdir(), "vertices_poisson" ), ".ply"), "poisson_reconstruction_with_normals") + poisson_output_mesh <- vcgPlyRead(paste0(file.path( tempdir(), "vertices_poisson" ), ".ply")) + + return(list(vertices = t(poisson_output_mesh$vb[1:3,]), faces = t(poisson_output_mesh$it) )) +} \ No newline at end of file diff --git a/R/Geometric_operators.R b/R/Geometric_operators.R new file mode 100644 index 0000000..0889833 --- /dev/null +++ b/R/Geometric_operators.R @@ -0,0 +1,284 @@ + +#Se recibe un conjunto de puntos que conforman aproximadamente una elipse. Los puntos son transladados al plano XY donde se definen eje mayor y menor. +#A continuacion se pasa 1 de los puntos del eje mayor y 1 punto del eje menor a coordenadas sphericas. Se pretende representar una elipse en base a sus diagonales. +ellipseSphPoints<-function(polygon) +{ + #Se calcula el centroide del poligono + elipse_center_of_mass <- colSums(polygon) / nrow(polygon) + + #Calculo del PCA para obtener la rotacion al plano XY. Rotacion al plano XY + component_analysis <- prcomp(polygon) + rotated_ellipse <- rotate3d(polygon, matrix = component_analysis$rotation) + + #Se cogen 2 puntos. Uno para cada elipse + point_over_diameter_X <- polygon[which.max(rotated_ellipse[ ,1]), ] + point_over_diameter_Y <- polygon[which.max(rotated_ellipse[ ,2]), ] + + #Se calcula el vector entre el centroide del poligono y los puntos sobre los ejes que se calcularon. + #De esta forma el centroide del poligono pasa a ser el centro del sistema de coordenadas esfericas. + v_point_X <- point_over_diameter_X - elipse_center_of_mass + v_point_Y <- point_over_diameter_Y - elipse_center_of_mass + + #Finalmente se almacenan las coordenadas esfericas para ambos puntos respecto al centro de masas de la elipse + spherical_X <- cartesian2Spherical(c(v_point_X)) + spherical_Y <- cartesian2Spherical(c(v_point_Y)) + return(c(spherical_X, spherical_Y)) +} + + +ellipseCartPoints<-function(ellipse_diameters, ellipse_center_of_mass, number_of_points = 1000) +{ + #Se almacenan las coordenadas spherical con la mitad del diametro de la elipse + spherical_coordinates_X <- c(ellipse_diameters[1], ellipse_diameters[2], ellipse_diameters[3]) + spherical_coordinates_Y <- c(ellipse_diameters[4], ellipse_diameters[5], ellipse_diameters[6]) + + #Se pasan las coordenadas de esfericas a cartesianas + cartesian_coordinatesX <- spherical2Cartesian(c(spherical_coordinates_X)) + cartesian_coordinatesY <- spherical2Cartesian(c(spherical_coordinates_Y)) + + #Se generan 2 puntos alrededor del centro de masas de la elipse que se situaran sobre el eje X cuando se rote la elipse al plano XY. + point_X1 <- ellipse_center_of_mass + cartesian_coordinatesX + point_X2 <- ellipse_center_of_mass - cartesian_coordinatesX + + #Se generan 2 puntos alrededor del centro de masas de la elipse que se situaran sobre el eje Y cuando se rote la elipse al plano XY. + point_Y1 <- ellipse_center_of_mass + cartesian_coordinatesY + point_Y2 <- ellipse_center_of_mass - cartesian_coordinatesY + + #Se almacenan los 5 puntos que se van a utilizar en el calculo de la elipse + ellipse4points <- rbind(pointX1, pointX2, pointY1, pointY2, ellipse_center_of_mass) + + #Se colocan los 5 puntos sobre el plano XY para generar la elipse + PCA <- prcomp(ellipse4points) + rotated4_points_ellipse <- rotate3d(ellipse4points, matrix = PCA$rotation) + + #Se genera una elipse con numberOfPoints puntos o en su defecto 1000 sobre XY + random_points <- runif(number_of_points, min = 0, max = 2 * pi) + a <- sqrt(sum((rotated4_points_ellipse[5, ] - rotated4_points_ellipse[1, ])^2)) + b <- sqrt(sum((rotated4_points_ellipse[5, ] - rotated4_points_ellipse[3, ])^2)) + x <- a * cos(random_points) + rotated4_points_ellipse[5,1] + y <- b * sin(random_points) + rotated4_points_ellipse[5,2] + + #Una vez se dispone de la elipse se vuelve a colocar la elipse + reconstructed_ellipse <- cbind(x, y, rotated4_points_ellipse[1,3]) + final_ellipse<-rotate3d(reconstructed_ellipse, matrix = t(PCA$rotation)) + return(final_ellipse) +} + +#Funcion que recibe n puntos en sus coordenadas x e y a partir de los cuales calcula la elipse que mejor se ajusta a esos puntos. +#Input: +# -x:Valores del punto 2 dimensional para el eje x. +# -y:Valores del punto 2 dimensional para el eje y. +#Output: +# -Lista de parametros: Se almacenan los parametros necesarios para representar una elipse como el centroide, la longitud de los ejes, el angulo que forman los ejes y los coeficientes de la regresion. +fit.ellipse <- function (x, y = NULL) { + + EPS <- 1.0e-8 + dat <- xy.coords(x, y) + + D1 <- cbind(dat$x * dat$x, dat$x * dat$y, dat$y * dat$y) + D2 <- cbind(dat$x, dat$y, 1) + S1 <- t(D1) %*% D1 + S2 <- t(D1) %*% D2 + S3 <- t(D2) %*% D2 + T <- -solve(S3) %*% t(S2) + M <- S1 + S2 %*% T + M <- rbind(M[3,] / 2, -M[2,], M[1,] / 2) + evec <- eigen(M)$vec + cond <- 4 * evec[1,] * evec[3,] - evec[2,]^2 + a1 <- evec[, which(cond > 0)] + f <- c(a1, T %*% a1) + names(f) <- letters[1:6] + + + A <- matrix(c(2*f[1], f[2], f[2], 2*f[3]), nrow=2, ncol=2, byrow=T ) + b <- matrix(c(-f[4], -f[5]), nrow=2, ncol=1, byrow=T) + soln <- solve(A) %*% b + + b2 <- f[2]^2 / 4 + + center <- c(soln[1], soln[2]) + names(center) <- c("x", "y") + + num <- 2 * (f[1] * f[5]^2 / 4 + f[3] * f[4]^2 / 4 + f[6] * b2 - f[2]*f[4]*f[5]/4 - f[1]*f[3]*f[6]) + den1 <- (b2 - f[1]*f[3]) + den2 <- sqrt((f[1] - f[3])^2 + 4*b2) + den3 <- f[1] + f[3] + + semi.axes <- sqrt(c( num / (den1 * (den2 - den3)), num / (den1 * (-den2 - den3)) )) + + # calculate the angle of rotation + term <- (f[1] - f[3]) / f[2] + angle <- atan(1 / term) / 2 + + list(coef = f, center = center, major = max(semi.axes), minor = min(semi.axes), angle = unname(angle)) +} + +#Generador de puntos de una elipse en base a los parametros que se ajustaron en fit.ellipse +#Input: +# -fit:Lista de valores que se obtuvo en fit.ellipse +# -n:Numero de puntos que se quieren generar para la elipse. +#Output: +# -x:Valores sobre el eje x para los puntos que se han generado para la elipse. +# -y:Valores sobre el eje y para los puntos que se han generado para la elipse. +get.ellipse <- function(fit, n = 360 ) +{ + + tt <- seq(0, 2*pi, length=n) + sa <- sin(fit$angle) + ca <- cos(fit$angle) + ct <- cos(tt) + st <- sin(tt) + + x <- fit$center[1] + fit$maj * ct * ca - fit$min * st * sa + y <- fit$center[2] + fit$maj * ct * sa + fit$min * st * ca + + cbind(x = x, y = y) +} + + +trans3d <- function(vector) +{ + trans_matrix <- rbind(c(1, 0, 0, 0), c(0, 1, 0, 0), c(0, 0, 1, 0), c(vector[1], vector[2], vector[3], 1)) + return(trans_matrix) +} + +rotation3dX<-function(radians) +{ + c <- cos(radians) + s <- sin(radians) + return( rbind(c(1,0,0,0),c( 0,c,s,0),c( 0,-s,c,0),c( 0,0,0,1))) +} + +rotation3dY<-function(radians) +{ + c <- cos(radians) + s <- sin(radians) + return( rbind(c(c,0,-s,0),c( 0,1,0,0),c( s,0,c,0),c( 0,0,0,1))) +} + +#u es un vector perpendicular a ambos vectores que se desean alinear y theta el angulo entre los vectores +rotation_matrix<-function(u, theta) +{ + rotationMatrix <- t(matrix(c(cos(theta)+u[1]^2*(1-cos(theta)), u[1]*u[2]*(1-cos(theta))-u[3]*sin(theta), u[1]*u[3]*(1-cos(theta))+u[2]*sin(theta), + u[2]*u[1]*(1-cos(theta))+u[3]*sin(theta), cos(theta)+u[2]^2*(1-cos(theta)), u[2]*u[3]*(1-cos(theta))-u[1]*sin(theta), + u[3]*u[1]*(1-cos(theta))-u[2]*sin(theta), u[3]*u[2]*(1-cos(theta))+u[1]*sin(theta), cos(theta)+u[3]^2*(1-cos(theta))),nrow=3,ncol=3)) + + return(rotationMatrix) +} + +align3dZ <-function(vector) + { + #Translacion al origen + trans<-trans3d(-vector) + itrans <- trans3d(vector); + + u <- vector/sqrt(sum(vector^2)); + uz <- u; uz[1] = 0; uz[2] = 0; + uzy <- u; uzy[1] = 0; + + + modUz <- uz[3]; + modUzy <- sqrt(sum(uzy^2)) + radsX <- acos(modUz/modUzy); + if (u[2] < 0) { + radsX <- -radsX; + } + rotX <- rotation3dX(radsX); + irotX <- rotation3dX(-radsX); + + modUzx <- 1; + modUzBis <- modUzy; + radsY <- acos(modUzBis / modUzx); + if (u[1] < 0) { + radsY = -radsY; + } + + rotY <- rotation3dY(-radsY); + irotY <- rotation3dY(radsY); + return(list(trans, rotX, rotY)) +} + +#Convertir coordenadas 3D de del sistema cartesiana al sistema esferico. Se basa en la definicion que se prove en +#http://es.wikipedia.org/wiki/Coordenadas_esf%C3%A9ricas#Relaci.C3.B3n_con_las_coordenadas_cartesianas +#Input: +# -Matriz de coordenadas 3D en el que una fila es un punto tridimensional y las columnas son las dimensiones x,y,z. +#Output: +# -Matriz de coordenadas esfericas en el que cada fila es un punto en coordenadas esfericas y las columnas hacen referencia a la colatitud, azimuth y el radio en ese orden. +# cartesian2Spherical <- function(cartesian_coord_matrix) +# { +# #El caso en el +# if (cartesian_coord_matrix[3] < 1.0e-8 & cartesian_coord_matrix[3] > -1.0e-8) +# { +# colatitude <- pi / 2; +# }else if (cartesian_coord_matrix[3] > 0) +# { +# colatitude <- atan(sqrt(cartesian_coord_matrix[1]^2 + cartesian_coord_matrix[2]^2) / cartesian_coord_matrix[3]); +# }else{ +# colatitude <- atan(sqrt(cartesian_coord_matrix[1]^2 + cartesian_coord_matrix[2]^2) / cartesian_coord_matrix[3]) + pi; +# } +# +# if (cartesian_coord_matrix[1] < 1.0e-8 & cartesian_coord_matrix[1] > -1.0e-8){ +# azimuth <- (pi/2) * sign(cartesian_coord_matrix[2]) +# }else if(cartesian_coord_matrix[1] > 0 & cartesian_coord_matrix[2] > 0){ +# azimuth <- atan(cartesian_coord_matrix[2] / cartesian_coord_matrix[1]) +# }else if(cartesian_coord_matrix[1] > 0 & cartesian_coord_matrix[2] < 0){ +# azimuth <- 2 * pi + atan(cartesian_coord_matrix[2] / cartesian_coord_matrix[1]) +# }else{ +# azimuth <- pi + atan(cartesian_coord_matrix[2] / cartesian_coord_matrix[1]) +# } +# +# r <- sqrt(sum(cartesian_coord_matrix^2)) +# return(c(colatitude, azimuth, r)) +# } + +cartesian2Spherical <- function(x,y,z) +{ + azimuth <- atan2(y,x) + elevation <- atan2(z,sqrt(x^2 + y^2)) + r <- sqrt(x^2 + y^2 + z^2) + return(cbind(azimuth,elevation,r)) +} + +#Convertir coordenadas 3D de del sistema esferico al cartesiano. Se basa en la definicion que se prove en +#http://es.wikipedia.org/wiki/Coordenadas_esf%C3%A9ricas#Relaci.C3.B3n_con_las_coordenadas_cartesianas +#Input: +# -Matriz de coordenadas 3D en el que una fila es un punto tridimensional y las columnas son las dimensiones x,y,z. +#Output: +# -Matriz de coordenadas esfericas en el que cada fila es un punto en coordenadas esfericas y las columnas hacen referencia a la colatitud, azimuth y el radio en ese orden. +spherical2Cartesian<-function(azimuth,elevation,r) +{ + x <- r * cos(elevation) * cos(azimuth) + y <- r * cos(elevation) * sin(azimuth) + z <- r * sin(elevation) + return(cbind(x, y, z)) +} + +cart2pol <- function(x, y) +{ + r <- sqrt(x^2 + y^2) + theta <- atan(y/x) + + return(c(theta,r)) +} + +pol2cart <- function(theta,r) +{ + x<-r*cos(theta) + y<-r*sin(theta) + return(c(x,y)) +} + +vcrossp <- function( a, b ) { + result <- matrix( NA, 1, 3 ) + result[1] <- a[2] * b[3] - a[3] * b[2] + result[2] <- a[3] * b[1] - a[1] * b[3] + result[3] <- a[1] * b[2] - a[2] * b[1] + return(result) +} + +normv<- function(a) +{ + a<-as.numeric(a) + return(sqrt(a%*%a)) +} + diff --git a/R/IO-mesh.R b/R/IO-mesh.R new file mode 100644 index 0000000..68cb710 --- /dev/null +++ b/R/IO-mesh.R @@ -0,0 +1,126 @@ +#' Read binary PLY file +#' +#' Read vertices, faces and color or quality of the vertices of a mesh from a PLY file in little endian 1.0 format +#' +#' @param file_name path to the PLY file +#' +#' @return a list of three components of the mesh, the vertices, the faces and the color or quality of the vertices +#' @export +read_binary_PLY <- function(filePath) { + # Call to C++ code that reads the binary file and write it to a temp file + path <- read_PLY(filePath, file.path(tempdir(),"temp.dat") ) + data <- scan(file.path(tempdir(),"temp.dat")) + + # Read vertices, faces and quality + vertex <- matrix(data[1:(4 * path[[1]])], ncol = 4, byrow = TRUE) + faces <- matrix(data[((4 * path[[1]]) + 1):length(data)], ncol = 4, byrow = TRUE) + + faces <- faces[, 2:4] + quality <- vertex[ ,4] + vertices <- vertex[ ,1:3] + + # Add 1 to the faces because index in R starts at 1 + faces <- faces + 1 + + # Remove temp file + file.remove( file.path(tempdir(),"temp.dat") ) + return(list(vertices = vertices, faces = faces, quality = quality)) +} + +#' Write only the vertices of a mesh in a PLY file +#' +#' Write only the vertices of a mesh in a PLY file so it is saved a point cloud +#' +#' @param vertices matrix Nx3 where each row represents the coordinates of a vertex of the mesh +#' @param file_name path and name of the PLY file where point cloud is going to be saved +#' +#' @return None +write_PLY_only_points <- function(vertices, file_name = "") +{ + if(file_name==""){file_name <- tclvalue(tkgetSaveFile())} + + file_name <- sub("\\.ply$","", file_name, ignore.case = T) + + # Write as matrix + return ( vcgPlyWrite(vertices, filename = file_name, binary = T) ) + +} + +#' Read neuron from a VRML file +#' +#' Read vertices, faces of a mesh from a VRML file. Adapted from package geomorph +#' +#' @param file_name path to the PLY file +#' +#' @return a list of two components of the mesh, the vertices and the faces +#' @export +read_VRML <- function(file_name) { + # Read each caracter one by one. + b <- scan(file_name, what = "character", skip = 0, sep = "", quiet = T) + + cos <- which(b == "Coordinate") # 'Coordinate' denotes that the definition of vertices starts. + tri <- which(b == "coordIndex") # 'coordIndex' denotes that the definition of faces starts. + zz <- triang <- NULL # Data frames of vertices and faces. + co <- c() # Positions of 'b' that identifies the beginning of the vertices + en <- c() # Positions of 'b' that identifies the end of the vertices + + # Loop to find the start and the beginning of each block of vertices. + for (i in 1:length(cos)) { + co[i] <- cos[i] + 4 # Beginning of the block is 4 character after 'Coordinate' + en[i] <- which(b[co[i]:length(b)] == "]")[1] - 1 #']' denotes that block is finished so that it should be removed the last character + } + + # Select that with more vertices + positionCoords <- which(en == sort(en, T)[1]) + + + # Remove incorrect coordinates + zerocoord <- NULL + # Select vertices that only belongs to the neuron. + newrange <- b[co[positionCoords]:length(b)] + # Check that there are vertices + if (en[positionCoords] != 0) { + # Selected values are disposed in a matrix + z <- as.matrix(newrange[1:en[positionCoords]]) + # There must be 3 columns if not there is a definition error + if (nrow(z)%%3 == 0) { + # Matrix is disposed in 3 columns + zz.temp <- matrix(as.numeric(unlist(strsplit(z, split = ","))), ncol = 3, byrow = T) + zerocoord <- which(as.character(zz.temp[, 1]) == as.character(0) & as.character(zz.temp[, 2]) == + as.character(0) & as.character(zz.temp[, 3]) == as.character(0)) + # Wrong positions are removed + if (length(zerocoord) > 0) { + zz.temp <- zz.temp[-zerocoord, ] + } + + zz <- rbind(zz, zz.temp) + } + } + + # Similar to the vertices definition applied to the faces + co.tri <- c() + en.tri <- c() + for (i in 1:length(tri)) { + co.tri[i] <- tri[i] + 2 + en.tri[i] <- which(b[co.tri[i]:length(b)] == "]")[1] - 1 + } + position <- which(en.tri == sort(en.tri, T)[1]) + newrange.tri <- b[co.tri[position]:length(b)] + if (en.tri[position] != 0) { + tria <- as.matrix(newrange.tri[1:en.tri[position]]) + + if (nrow(tria)%%4 == 0) { + triang.temp <- matrix(as.numeric(unlist(strsplit(tria, split = ","))), ncol = 4, byrow = T)[, + -4] + if (i == 1) { + triang <- triang.temp + 1 + } else { + triang <- rbind(triang, triang.temp + 1) + } + } + } + colnames(zz) <- c("x", "y", "z") + + return(list(vertices = zz, faces = triang)) +} + diff --git a/R/MeshLab_scripts.R b/R/MeshLab_scripts.R new file mode 100644 index 0000000..ee3915e --- /dev/null +++ b/R/MeshLab_scripts.R @@ -0,0 +1,70 @@ +#options(SomaMS.meshlabserver.path="/home/luis/package_sources/MESHLAB_133/meshlab_working/meshlab/distrib/meshlabserver") + +# Get meshlabserver path +meslabserverpath <- function(){ + + # Check for a given path + path <- getOption("SomaMS.meshlabserver.path") + + # Check that the file exists and + if(!is.null(path)){ + if( file.exists(path) ) + return(path) + else + warning(sprintf("Meshlab server not found in %s. Falling back to default path",path)) + } + + # Default behaviour: meshlabserver in path + if(.Platform$OS.type == "unix") { + return("meshlabserver") + } + else { + return ("meshlabserver.exe") + } +} + +# Compute ambient occlusion through MeshLab. +execute_meshlab_script <- function(input = "", output = "", meshlab_script) { + if (input == "") { + input <- tk_choose.files(default = "", caption = "Select a file") + } + if (output == "") { + output <- tclvalue(tkgetSaveFile()) + } + + # Force the file to be have .ply extension + if (file_ext(output) != "ply") { + output <- paste0(output, ".ply") + } + + # Write the script to run with cmd. NOTE: A path with one or more spaces does not work. Example: 'C:/my + # soma.off' does not work. 'C:/mysoma.off' is OK. + + # Path to MeshLab script + if(meshlab_script=="ambient_occlusion"){ + script <- file.path(system.file("meshlab_scripts",package="SomaMS"),"ambientOcclusion.mlx") + }else if(meshlab_script=="shape_diameter"){ + script <- file.path(system.file("meshlab_scripts",package="SomaMS"),"shapeDiameter.mlx") + }else if(meshlab_script=="poisson_reconstruction"){ + script <- file.path(system.file("meshlab_scripts",package="SomaMS"),"poisson_reconstruction.mlx") + }else if(meshlab_script=="poisson_reconstruction_with_normals"){ + script <- script <- file.path(system.file("meshlab_scripts",package="SomaMS"),"poisson_reconstruction_with_normals.mlx") + } + + # shquote + if(.Platform$OS.type == "unix") { + type <- "sh" + } + else { + type <- "cmd" + } + + # Run the script + #commandOrder <- paste0(meslabserverpath()," -i '", input, "' -o '", output, "' -m vq ", "-s '", script,"'") + return( system2(meslabserverpath(),c("-i", shQuote(input,type=type), + "-o", shQuote(output,type=type), + "-m vq", + "-s", shQuote(script,type=type) ), + stderr = "" , stdout = "") ) +} + diff --git a/R/RcppExports.R b/R/RcppExports.R new file mode 100644 index 0000000..90cf3ce --- /dev/null +++ b/R/RcppExports.R @@ -0,0 +1,11 @@ +# This file was generated by Rcpp::compileAttributes +# Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 + +read_PLY <- function(input, output) { + .Call('SomaMS_read_PLY', PACKAGE = 'SomaMS', input, output) +} + +poly_volume <- function(x) { + .Call('SomaMS_poly_volume', PACKAGE = 'SomaMS', x) +} + diff --git a/R/characterize.soma3d.R b/R/characterize.soma3d.R new file mode 100644 index 0000000..80df5f9 --- /dev/null +++ b/R/characterize.soma3d.R @@ -0,0 +1,223 @@ +#' Characterize a single soma +#' +#' @export +characterize.soma3d <- function( soma, name = "soma" ){ + + # Validate repaired + stopifnot( length(name)>0 ) + + # If soma is already a ply path keeps it, if its a vrml read and validates and if is a list validates. + soma.loaded <- utils.loadsoma(soma) + stopifnot(!is.null(soma.loaded)) + + soma_PLY <- tmesh3d(rbind(t(soma.loaded$vertices), 1), t(soma.loaded$faces)) + + ### AQUI COMIENZA LA PARTE DE SERGIO + + #Remove isolate pieces and smooth the mesh + smoothed_mesh <- vcgSmooth(vcgIsolated(soma_PLY, facenum = NULL, diameter = NULL), type = c("laplacian"), iteration = 50) + + #Compute centroid and place the centroid in the origin + center_of_mass <- colSums(vert2points(smoothed_mesh)) / nrow(vert2points(smoothed_mesh)) + smoothed_mesh <- translate3d(smoothed_mesh, -center_of_mass[1], -center_of_mass[2], -center_of_mass[3]) + + vertices_soma <- vert2points(smoothed_mesh) + + #Compute Feret diameter to obtain the + convex_hull <- convhulln(vertices_soma, "FA")$hull + convex_vertices <- vertices_soma[sort(unique(c(convex_hull))), ] + radius <- max(apply(convex_vertices, 1, normv)) + + #Generate points in the sphere to cast rays to them. Each curve is composed by 3000 points (theta). + #We generate 6 curves and 2 extreme points (phi). + set.seed(1) + theta <- seq(0, 2*pi, length = 3000) + phi <- seq(pi, 0, length = 8) + + #Normal vectors denote the direction of the rays. + malla <- smoothed_mesh + malla$vb[1:3, ] <- t(vertices_soma) + ellipse <- list() + list_of_ellipses <- list() + ellipses_centroids <- matrix(NA, ncol = 3, nrow = length(phi)) + for(i in 1:length(phi)) + { + #Cartesian points from the spherical coordinates + x <- radius * sin(theta) * sin(phi[i]) + y <- radius * cos(phi[i]) + z <- radius * cos(theta) * sin(phi[i]) + + #Cartesian points denote the normals + normals <- cbind(x,y,z) + + #Define origin for each ray. As always is the same point, we generate a zero matrix + vertices <- matrix(rep(c(0, 0, 0), 3000), ncol = 3000) + + x$vb <- vertices + x$normals <- t(normals) + #Change class to mesh3d to execute vcgRaySearch + class(x) <- "mesh3d" + + #Cast rays + raytracer <- vcgRaySearch(x,malla) + ellipse[[i]] <- raytracer + #Save each vertex of the curve + list_of_ellipses[[i]] <- vert2points(raytracer) + #Centroid of each curve + ellipses_centroids[i, ] <- colSums(list_of_ellipses[[i]]) / nrow(list_of_ellipses[[i]]) + } + + #Place lowest ellipse in the origin + insertion_point<-ellipses_centroids[1, ] + for(i in 1:length(list_of_ellipses)) + { + list_of_ellipses[[i]] <- translate3d(list_of_ellipses[[i]], -insertion_point[1], -insertion_point[2], -insertion_point[3]) + } + + + #Also shift the soma mesh to the same place + smoothed_mesh <- translate3d(smoothed_mesh, -insertion_point[1], -insertion_point[2], -insertion_point[3]) + ellipses_centroids <- translate3d(ellipses_centroids, -insertion_point[1], -insertion_point[2], -insertion_point[3]) + + number_of_curves<-length(list_of_ellipses) - 2 + + perp_vectors_sph <- matrix(0, nrow = number_of_curves, ncol = 2) #Perpendicular vector to curve in spherical coords. + ellipse_angles <- matrix(0, nrow = number_of_curves, ncol = 1) #Angle of the elipse in the XY plane. + eccentricity <- matrix(0, nrow = number_of_curves, ncol = 1) #Relation between semiaxes. + curve_coefficients <- matrix(0, number_of_curves, ncol = 3) #Coefficients of z=a_0+a_1*x^2+a_2*y^2. + curve_centroids <- matrix(0, nrow = length(list_of_ellipses), ncol = 3) #Centroids of the curves. + perp_PCA_matrix <- matrix(0, nrow = number_of_curves, ncol = 3) #Perpendicular vector to curve in cartesian coords. + ellipse_angles_vectors <- matrix(0, nrow = number_of_curves, ncol = 3) #Vector representation of the angle of the ellipse. + semiaxis <- matrix(0, nrow = number_of_curves, ncol = 1) #One of the semiaxis of the ellipse. + + #Compute parameters of each curve. + for (i in 2:(length(list_of_ellipses) - 1)) + { + PCA <- prcomp(list_of_ellipses[[i]]) #Compute main axis of the curve + perp_PCA_matrix[i-1, ] <- PCA$rotation[ ,3]#Perpendicular vector to the fittest plane to the curve + + #Rotate curve to place the normal to the plane parallel to the Z axis, that is, the curve will be in the XY plane + u <- vcrossp(perp_PCA_matrix[i-1, ],c(0, 0, 1)) + u <- u / as.numeric(normv(u)) + theta <- acos((perp_PCA_matrix[i-1,] %*% c(0, 0, 1))) + rotationMatrix <- rotation_matrix(u, theta) + + rotated_curve <- t(rotationMatrix %*% t(list_of_ellipses[[i]])) + + #Check that the ellipse is over the XY plane because in the other case (when it is under the plane) + #rotation would be in the opposite direction + if(min(rotated_curve[ ,3]) < 0) + { + perp_PCA_matrix[i-1, ] <- -perp_PCA_matrix[i-1, ] + u <- vcrossp(perp_PCA_matrix[i-1, ],c(0, 0, 1)) + u <- u / as.numeric(normv(u)) + theta <- acos((perp_PCA_matrix[i-1, ] %*% c(0, 0, 1))) + rotationMatrix <- rotation_matrix(u,theta) + + rotated_curve <- t(rotationMatrix %*% t(list_of_ellipses[[i]])) + } + + #Compute ellipse approximation to the curve + fit_ellipse <- fit.ellipse(rotated_curve[ ,1], rotated_curve[ ,2]) + + #Rotate angle and centroid of the ellipse to place it in the original space (before PCA rotation) + ellipse_angles_vectors[i-1, ] <- as.numeric(t(rotationMatrix) %*% as.numeric(c(cos(fit_ellipse$angle), sin(fit_ellipse$angle), 0))) + ellipses_centroids[i, ] <- as.numeric(t(rotationMatrix) %*% as.numeric(c(fit_ellipse$center, mean(rotated_curve[ ,3])))) + + eccentricity[i-1] <- fit_ellipse$minor / fit_ellipse$major + semiaxis[i-1] <- fit_ellipse$major + ellipse_angles[i-1] <- fit_ellipse$angle + + #Place the angle of the ellipse parallel to X axis. This is needed to compute the approximation of the curve. + #The angle of the ellipse must be always the same. + u <- vcrossp(c(cos(fit_ellipse$angle), sin(fit_ellipse$angle), 0), c(1, 0, 0)) + u <- u / as.numeric(normv(u)) + rotationZ <- rotation_matrix(u,fit_ellipse$angle) + rotated_curve <- t(rotationZ %*% t(rotated_curve)) + + x <- rotated_curve[ ,1] + y <- rotated_curve[ ,2] + z <- rotated_curve[ ,3] + + #Adjust curve with a cuadratic regression + data <- data.frame(x, y, z) + fitModel <- lm(z ~ I(x^2) + I(y^2), data = data) + curve_coefficients[i-1, ] <- fitModel$coefficients + } + + ellipses_centroids[length(list_of_ellipses), ] <- list_of_ellipses[[length(list_of_ellipses)]][1, ] + + #Rotate the soma to align the vector between de lowest point and + #the centroid of the first curve with the Z axis. Also, rotate the + #curves and the centroids. + #u=rotation axis, theta=rotation angle + unit_vector <- ellipses_centroids[2, ] / normv(ellipses_centroids[2, ]); + u <- vcrossp(unit_vector,c(0, 0, 1)); + u <- u / as.numeric(normv(u)); + theta <- acos((unit_vector %*% c(0, 0, 1))); + + rotationMatrix <- rotation_matrix(u, theta); + + #Rotation of all the geometric measures + for (i in 1:length(list_of_ellipses)) + { + list_of_ellipses[[i]] <- t(rotationMatrix %*% t(list_of_ellipses[[i]])) + } + ellipses_centroids <- t(rotationMatrix %*% t(ellipses_centroids)) + perp_PCA_matrix <- t(rotationMatrix %*% t(perp_PCA_matrix)) + ellipse_angles_vectors <- t(rotationMatrix %*% t(ellipse_angles_vectors)) + + #Rotate the soma to place the upper point of the soma parallel to the + #X axis. It is rotated around the Z axis. + unit_vector <- c(ellipses_centroids[8, 1], ellipses_centroids[8, 2], 0) / as.numeric(normv(c(ellipses_centroids[8, 1], ellipses_centroids[8, 2], 0))) + + u <- vcrossp(unit_vector,c(1, 0, 0)) + u <- u / as.numeric(normv(u)) + theta <- acos((unit_vector %*% c(1, 0, 0))) + + rotationZ <- rotation_matrix(u, theta) + + #Rotate all the geometric measures + for (i in 1:length(list_of_ellipses)) + { + + list_of_ellipses[[i]] <- t(rotationZ %*% t(list_of_ellipses[[i]])) + } + + ellipses_centroids <- t(rotationZ %*% t(ellipses_centroids)) + perp_PCA_matrix <- t(rotationZ %*% t(perp_PCA_matrix)) + ellipse_angles_vectors <- t(rotationZ %*% t(ellipse_angles_vectors)) + + #Compute current angle of the ellipses after all the transformations + for (i in 1:nrow(perp_PCA_matrix)) + { + u <- vcrossp(perp_PCA_matrix[i, ], c(0, 0, 1)) + u <- u / as.numeric(normv(u)) + theta <- acos((perp_PCA_matrix[i,] %*% c(0, 0, 1))) + rotationMatrix <- rotation_matrix(u, theta) + rotated_angle_matrix <- t(rotationMatrix %*% matrix(ellipse_angles_vectors[i, ], ncol = 1)) + ellipse_angles[i] <- cart2pol(rotated_angle_matrix[1], rotated_angle_matrix[2])[1] + } + + perp_vectors_sph <- cartesian2Spherical(perp_PCA_matrix[ ,1], perp_PCA_matrix[ ,2], perp_PCA_matrix[ ,3])[ ,1:2] + + #Compute the parameters needed to build the skeleton of the spine. + #|h|, phi, theta, alpha + vector_length <- matrix(0, nrow = nrow(ellipses_centroids) - 1, ncol = 1) + phi <- matrix(0, nrow = nrow(ellipses_centroids) - 2, ncol = 1) + theta <- matrix(0, nrow = nrow(ellipses_centroids) - 2, ncol = 1) + + section_vector <- diff(ellipses_centroids) + vector_length <- sqrt(rowSums(section_vector^2)) + angles_section <- cartesian2Spherical(section_vector[ ,1], section_vector[ ,2], section_vector[ ,3]) + phi <- angles_section[ ,1] + theta <- angles_section[ ,2] + + r_h <- matrix(0, nrow = length(list_of_ellipses) - 2, ncol = 1) + ellipse_area <- matrix(0, nrow = length(list_of_ellipses) - 2, ncol = 1) + r_h <- semiaxis / (vector_length[1:(length(vector_length) - 1)] + vector_length[2:(length(vector_length))]) + + instance_row <- data.frame(t(c(vector_length, phi[2:length(phi)], theta[2:length(theta)], r_h,eccentricity, perp_vectors_sph, ellipse_angles, curve_coefficients))) + + return(instance_row) +} \ No newline at end of file diff --git a/R/utils.R b/R/utils.R new file mode 100644 index 0000000..aa167cb --- /dev/null +++ b/R/utils.R @@ -0,0 +1,113 @@ +utils.validateSoma <- function(soma, check.quality = F){ + + # This can be done shorter in one expresison, but legibility is a thing. + # + + # Check soma null values + if( is.null(soma) || is.null(soma$vertices) || is.null(soma$faces) ) + return (FALSE) + # Check sublist elements + else if( !is.matrix(soma$vertices) || !is.matrix(soma$faces) || ncol(soma$vertices)!=3 || ncol(soma$faces)!= 3 ) + return (FALSE) + # Check that values are numeric + else if( !is.numeric(soma$vertices) || !is.numeric(soma$faces) ) + return (FALSE) + # Check quality parameter + else if( check.quality && ( is.null(soma$quality) || !is.numeric(soma$quality) ) ) + return (FALSE) + else + return (TRUE) +} + +utils.validateDir <- function(path, pre="", createDir = F ){ + + # Check null and empty + if( length(path)==0 ) + return (FALSE) + + # Check that given paths exist (tmpdir always exist and is unique per R session) + if (!dir.exists(path) ) { + if(createDir){ + warning( sprintf("%s Path \"%s\" does not exist. Creating empty folder",pre,path ) ) + # Stop if failure creating ao.path + return(dir.create(path, showWarnings = T, recursive = T )) + } + else{ + return(FALSE) + } + } + else{ + return(TRUE) + } +} + +utils.somaToPLY <- function(soma, filename, dir=tempdir(), moveToLocation = F ){ + + # Remove filename extension (if it is ply) + filename <- sub("\\.ply$","", filename, ignore.case = T) + + # Is already a file -> Check if it is ply + if( is.character(soma) ){ + # Is ply + if( tolower(file_ext(soma)) == "ply"){ + if(moveToLocation){ + + if(file.rename(soma,file.path(dir,paste0(filename,".ply")))) + return (file.path(dir,paste0(filename,".ply"))) + else + stop("Error moving file to destination") + } + else + return ( soma ) + } + # Is vrml + else if( tolower(file_ext(soma)) == "vrml"){ + soma <- read_VRML(soma) + } + else + return (NULL) + } + + # Is a soma (or we have used read_VRML) + if( is.list(soma) ){ + + stopifnot(utils.validateSoma(soma, check.quality = F)) + + # Dump to temp PLY file + if( vcgPlyWrite(tmesh3d(rbind(t(soma$vertices), 1), t(soma$faces)), binary = T, file.path(dir,filename) ) != 0) + stop("Error while writing PLY temporal file") + + ## update name name (.ply is added by vcgPlyWrite) + return( file.path(dir,paste0(filename,".ply")) ) + } + else + return(NULL) +} + +utils.loadsoma <- function(soma){ + + # Is already a file -> Check if it is ply + if( is.character(soma) ){ + # Is ply + if( tolower(file_ext(soma)) == "ply"){ + + vcgmesh <- vcgImport(soma) + return(list(vertices=t(vcgmesh$vb[1:3,]), faces=t(vcgmesh$it))) + } + # Is vrml + else if( tolower(file_ext(soma)) == "vrml"){ + return (read_VRML(soma)) + } + else + return (NULL) + } + + # Is a soma (or we have used read_VRML) + if( is.list(soma) ){ + + stopifnot(utils.validateSoma(soma, check.quality = F)) + return(soma) + } + else + return(NULL) +} \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..87a75c8 --- /dev/null +++ b/README.md @@ -0,0 +1,5 @@ +# Description +The 3DSomaMS component is designed to characterise quantitatively a 3D soma, according to morphological features taken from its image reconstruction, to build a mathematical model that also will allow simulation of artificial somas. + +# Functionality +Selection of soma 3D representation generate table with quantitative characterisation reconstruction of the soma. diff --git a/inst/meshlab_scripts/ambientOcclusion.mlx b/inst/meshlab_scripts/ambientOcclusion.mlx new file mode 100644 index 0000000..263b51d --- /dev/null +++ b/inst/meshlab_scripts/ambientOcclusion.mlx @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/inst/meshlab_scripts/poisson_reconstruction.mlx b/inst/meshlab_scripts/poisson_reconstruction.mlx new file mode 100644 index 0000000..765d36e --- /dev/null +++ b/inst/meshlab_scripts/poisson_reconstruction.mlx @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/inst/meshlab_scripts/poisson_reconstruction_with_normals.mlx b/inst/meshlab_scripts/poisson_reconstruction_with_normals.mlx new file mode 100644 index 0000000..0ce81f3 --- /dev/null +++ b/inst/meshlab_scripts/poisson_reconstruction_with_normals.mlx @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/inst/meshlab_scripts/shapeDiameter.mlx b/inst/meshlab_scripts/shapeDiameter.mlx new file mode 100644 index 0000000..c9e4499 --- /dev/null +++ b/inst/meshlab_scripts/shapeDiameter.mlx @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/man/BN_clustering.Rd b/man/BN_clustering.Rd new file mode 100644 index 0000000..42c556e --- /dev/null +++ b/man/BN_clustering.Rd @@ -0,0 +1,31 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/6_BN_clustering.R +\name{BN_clustering} +\alias{BN_clustering} +\title{Clustering Gaussian Bayesian network} +\usage{ +BN_clustering(bayesian_structure, data, clusters, initialization, verbose) +} +\arguments{ +\item{bayesian_structure}{a bn object from bnlearn package whose nodes are gaussians} + +\item{data}{the same dataset used to compute the bayesian_structure} + +\item{clusters}{array of values where each value indicates the number of clusters for that model} + +\item{initialization}{a natural number to initialize the algorithm. Use 0 to use kmeans initialization and any other positive number as a seed for a random initialization} + +\item{verbose}{show BIC scores for each model and the optimal number of clusters} +} +\value{ +model_list is the list of parameters that best fit the data according to BIC criteria +} +\description{ +Given a Bayesian network structure, apply clustering to the data with the aim to find clusters of somas +} +\examples{ +somas <- read.table(file.path(tempdir(),"somaReebParameters.csv"), sep = ";", header = T, row.names = 1) +bayesian_structure <- BN_structure_learning(path_to_csv = file.path(tempdir(),"somaReebParameters.csv"), nboots = 200, significant_arcs = 0.95) +fittest_model <- BN_clustering(bayesian_structure, data = somas, clusters = c(2, 3, 4), initialization = 2, T) +} + diff --git a/man/BN_structure_learning.Rd b/man/BN_structure_learning.Rd new file mode 100644 index 0000000..8715e23 --- /dev/null +++ b/man/BN_structure_learning.Rd @@ -0,0 +1,22 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/5_BN_structure_learning.R +\name{BN_structure_learning} +\alias{BN_structure_learning} +\title{Compute the structure of the BN} +\usage{ +BN_structure_learning(somas, nboots, significant_arcs) +} +\arguments{ +\item{somas}{dataframe with the characterization of the somas} + +\item{significant_arcs}{a value between 0 and 1 that denotes a threshold over percentage of times that an arc appeared in the boostrap structures} + +\item{n_boot}{number of times that bootstrap learn the bayesian structure} +} +\value{ +bayesian_model an object of class bn from bnlearn package that representates the structure of the DSBN +} +\description{ +Compute the structure of a Dynamical Spatial Bayesian Network. Also apply bootstrap to choose the most significant arcs. +} + diff --git a/man/MAQ_S.Rd b/man/MAQ_S.Rd new file mode 100644 index 0000000..f270293 --- /dev/null +++ b/man/MAQ_S.Rd @@ -0,0 +1,35 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/3_validation.R +\name{MAQ_S} +\alias{MAQ_S} +\title{Compute MAQ_S} +\usage{ +MAQ_S(path_somas_algorithm, path_somas_expert_1, path_somas_expert_2) +} +\arguments{ +\item{path_somas_algorithm}{path to the folder where meshes processed with the first technique are placed. They have to be PLY files} + +\item{path_somas_expert_1}{path to the folder where meshes processed with the second technique are placed. They have to be PLY files} + +\item{path_somas_expert_2}{path to the folder where meshes processed with the third technique are placed. They have to be PLY files} +} +\value{ +A vector of three values where each value is the MAQ_S between two techniques. Concretely they are MAQ_S_12, MAQ_S_13, MAQ_S_23 +} +\description{ +Compute MAQ_S between three different techniques to compare processing methods +} +\examples{ +path_somas_algorithm<- "./temp/final_result" +path_somas_experts<- system.file("test/post_repaired",package="SomaMS") +experts_paths<-list.dirs(path_somas_experts,recursive=F) +path_somas_expert_1<-experts_paths[1] +path_somas_expert_2<-experts_paths[2] +MAQ_S_result <- MAQ_S(path_somas_algorithm,path_somas_expert_1, path_somas_expert_2) +print(paste("MAQ_S_12 is:", MAQ_S_result[1] * 100, "\%")) +print(paste("MAQ_S_13 is:", MAQ_S_result[2] * 100, "\%")) +print(paste("MAQ_S_23 is:", MAQ_S_result[3] * 100, "\%")) +print(paste("Mean MAQ_S between algorithm and experts is:", mean(c(MAQ_S_result[1],MAQ_S_result[2])) * 100, "\%")) +print(paste("Difference between experts MAQ_S and mean MAQ_S of algorithm is:", abs(MAQ_S_result[3]-mean(c(MAQ_S_result[1],MAQ_S_result[2])))*100, "\%")) +} + diff --git a/man/RMSE_computation.Rd b/man/RMSE_computation.Rd new file mode 100644 index 0000000..c718337 --- /dev/null +++ b/man/RMSE_computation.Rd @@ -0,0 +1,94 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/3_validation.R +\name{RMSE_computation} +\alias{RMSE_computation} +\title{Compute RMSE} +\usage{ +RMSE_computation(path_somas_algorithm, path_somas_expert_1, path_somas_expert_2, + parallel = TRUE) +} +\arguments{ +\item{technique_1}{path to the folder where meshes processed with the first technique are placed. They have to be PLY files} + +\item{technique_2}{path to the folder where meshes processed with the second technique are placed. They have to be PLY files} + +\item{technique_3}{path to the folder where meshes processed with the third technique are placed. They have to be PLY files} +} +\value{ +RMSE a matrix Nx3 where N is the number of meshes and first column is RMSE between technique 1 and technique 2, second column is RMSE between technique 1 and technique 3 and third column is RMSE between technique 2 and technique 3 +} +\description{ +Compute RMSE between three different techniques to compare processing methods +} +\examples{ +###################################################### +###################### Interexpert ################### +###################################################### +#RMSE between the somas of the algorithm and the somas of the experts before repairing the experts' somas +path_somas_algorithm<- "./temp/final_result" +path_somas_experts<- system.file("test/pre_repaired",package="SomaMS") +experts_paths<-list.dirs(path_somas_experts,recursive=F) +pre_repaired_RMSE<-RMSE_mesh_distance(path_somas_algorithm, experts_paths[1], experts_paths[2], TRUE) + +#RMSE between the somas of the algorithm and the somas of the experts after repairing the experts' somas +path_somas_algorithm<- "./temp/final_result" +path_somas_experts<- system.file("test/post_repaired",package="SomaMS") +experts_paths<-list.dirs(path_somas_experts,recursive=F) +post_repaired_RMSE<-RMSE_mesh_distance(path_somas_algorithm, experts_paths[1], experts_paths[2], TRUE) + +X11(width = 18, height = 10.37) +par(mfrow=c(1,2)) +values_barplot <- t(pre_repaired_RMSE) +colors <- c(rainbow(3)) +mp <- barplot2(values_barplot, main = "RMSE before repairing experts' somas", ylab = "RMSE", beside = TRUE, + col = colors, ylim = c(0, 1.7), cex.names = 1.5, cex.lab = 1.5, cex.axis = 1.5) +legend("top", legend = c("Procedure Vs Expert 1", "Procedure Vs Expert 2", "Expert 1 Vs Expert 2"), fill = colors, box.col = "transparent", x.intersp = 0.8, cex = 1.5) +mtext(1, at = mp[2,], text = c("Neuron 1", "Neuron 2", "Neuron 3", "Neuron 4", "Neuron 5", "Neuron 6", "Neuron 7", "Neuron 8", "Neuron 9"), line = 0.5, cex = 1) +legend("topleft", legend = "", title = "A", box.col = "transparent", cex = 2) +values_barplot <- t(post_repaired_RMSE) +colors <- c(rainbow(3)) +mp <- barplot2(values_barplot, main = "RMSE after repairing experts' somas", ylab = "RMSE", beside = TRUE, + col = colors, ylim = c(0, 1.7), cex.names = 1.5, cex.lab = 1.5, cex.axis = 1.5) +legend("top", legend = c("Procedure Vs Expert 1", "Procedure Vs Expert 2", "Expert 1 Vs Expert 2"), fill = colors, box.col = "transparent", x.intersp = 0.8, cex = 1.5) +mtext(1, at = mp[2,], text = c("Neuron 1", "Neuron 2", "Neuron 3", "Neuron 4", "Neuron 5", "Neuron 6", "Neuron 7", "Neuron 8", "Neuron 9"), line = 0.5, cex = 1) +legend("topleft", legend = "", title = "B", box.col = "transparent", cex = 2) + +print(paste0("Mean interexpert RMSE: ", mean(post_repaired_RMSE[,3]))) +###################################################### +###################### Intraexpert ################### +###################################################### +path_somas_experts<- system.file("test/intraexpert",package="SomaMS") +experts_paths<-list.dirs(path_somas_experts,recursive=F) + +expert_1_days<-list.dirs(experts_paths[1],recursive=F) +expert_2_days<-list.dirs(experts_paths[2],recursive=F) + +intraexpert_expert_1 <- RMSE_mesh_distance(expert_1_days[1], expert_1_days[2], expert_1_days[3], TRUE) +intraexpert_expert_2 <- RMSE_mesh_distance(expert_2_days[1], expert_2_days[2], expert_2_days[3], TRUE) + +X11(width = 18, height = 10.37) +par(mfrow = c(1,2)) + +valuesBarplot <- t(intraexpert_expert_1) +colors <- c(rainbow(3)) +mp <- barplot2(valuesBarplot, main="Intra-expert variability of the first expert ", ylab = "RMSE", beside = TRUE, + col = colors, ylim = c(0,1.6), cex.names = 1.5,cex.lab = 1.5, cex.axis = 1.5) +legend("top",legend = c("Day 1 Vs Day 2","Day 1 Vs Day 3","Day 2 Vs Day 3"), fill = colors, box.col = "transparent", x.intersp = 0.8, cex = 1.5) + +mtext(1, at = mp[2,], text = c("Neuron 1","Neuron 2","Neuron 3","Neuron 4","Neuron 5","Neuron 6"),line = 0.5, cex = 1.3) +legend("topleft",legend="",title="A",box.col = "transparent",cex=2) + + +values_barplot<-t(intraexpert_expert_2) +colors<-c(rainbow(3)) +mp <- barplot2(values_barplot, main = "Intra-expert variability of the second expert ", ylab = "RMSE", beside = TRUE, + col = colors, ylim = c(0, 1.6), cex.names = 1.5, cex.lab = 1.5, cex.axis = 1.5) +legend("top",legend = c("Day 1 Vs Day 2","Day 1 Vs Day 3","Day 2 Vs Day 3"), fill = colors, box.col = "transparent", x.intersp = 0.8, cex = 1.5) + +mtext(1, at = mp[2,], text = c("Neuron 1", "Neuron 2", "Neuron 3", "Neuron 4", "Neuron 5", "Neuron 6"), line = 0.5, cex = 1.3) +legend("topleft", legend = "", title = "B", box.col = "transparent", cex = 2) + +print(paste0("Mean RMSE for the first expert: ", mean(c(intraexpert_expert_1)))) +print(paste0("Mean RMSE for the second expert: ", mean(c(intraexpert_expert_2)))) +} + diff --git a/man/characterize.soma3d.Rd b/man/characterize.soma3d.Rd new file mode 100644 index 0000000..e462a6f --- /dev/null +++ b/man/characterize.soma3d.Rd @@ -0,0 +1,12 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/characterize.soma3d.R +\name{characterize.soma3d} +\alias{characterize.soma3d} +\title{Characterize a single soma} +\usage{ +characterize.soma3d(soma, name = "soma") +} +\description{ +Characterize a single soma +} + diff --git a/man/cluster_simulation.Rd b/man/cluster_simulation.Rd new file mode 100644 index 0000000..d4f159e --- /dev/null +++ b/man/cluster_simulation.Rd @@ -0,0 +1,27 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/7_simulation.R +\name{cluster_simulation} +\alias{cluster_simulation} +\title{Simulation of new somas from a selected cluster} +\usage{ +cluster_simulation(model, data, cluster_number, number_of_new_instances, + seed = 1) +} +\arguments{ +\item{model}{a bn.fit object from bnlearn package whose nodes are gaussians. It must be the result of the BN_clustering function} + +\item{data}{the same dataset used to compute the bayesian_structure} + +\item{cluster_number}{a natural number between 1 and the number of clusters of model. Simulate new somas from the selected cluster.} + +\item{number_of_new_instances}{a natural number denoting the number of new somas sampled from simulation} + +\item{seed}{a natural number used as seed during the sampling, by default it is 1} +} +\value{ +new_somas a dataset similar to data with the simulated somas of the selected cluster +} +\description{ +Simulation of new somas from the choosen cluster of the clustering of bayesian networks +} + diff --git a/man/compute_meshes_volumes.Rd b/man/compute_meshes_volumes.Rd new file mode 100644 index 0000000..2c996c1 --- /dev/null +++ b/man/compute_meshes_volumes.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/3_validation.R +\name{compute_meshes_volumes} +\alias{compute_meshes_volumes} +\title{Compute the volume of meshes in a folder} +\usage{ +compute_meshes_volumes(path_to_mesh_folder) +} +\arguments{ +\item{path_to_somas_folder}{path to the folder where meshes processed with the first technique are placed. They have to be PLY files} +} +\value{ +array of N volumes where N is the number of meshes in the folder +} +\description{ +Read one by one the meshes in a folder and compute the volume of each one of the them +} +\examples{ +compute_meshes_volumes("./temp/final_result") +} + diff --git a/man/convert_somas_to_PLY.Rd b/man/convert_somas_to_PLY.Rd new file mode 100644 index 0000000..d0aaa0b --- /dev/null +++ b/man/convert_somas_to_PLY.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/0_convert_somas_to_PLY.R +\name{convert_somas_to_PLY} +\alias{convert_somas_to_PLY} +\title{Read VRMLs of somas} +\usage{ +convert_somas_to_PLY(read_directory, write_directory, parallel = TRUE) +} +\arguments{ +\item{read_directory}{path to the folder where VRML files are placed} + +\item{write_directory}{path to the folder where PLY files will be saved} +} +\value{ +None +} +\description{ +Read all the VRMLs of somas in a folder and export them to PLY format in other folder +} +\examples{ +convert_somas_to_PLY(read_directory = system.file("test/VRMLs",package="SomaMS"), write_directory = file.path(tempdir(),"PLYs"), parallel = TRUE) +} + diff --git a/man/read_VRML.Rd b/man/read_VRML.Rd new file mode 100644 index 0000000..31836ac --- /dev/null +++ b/man/read_VRML.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/IO-mesh.R +\name{read_VRML} +\alias{read_VRML} +\title{Read neuron from a VRML file} +\usage{ +read_VRML(file_name) +} +\arguments{ +\item{file_name}{path to the PLY file} +} +\value{ +a list of two components of the mesh, the vertices and the faces +} +\description{ +Read vertices, faces of a mesh from a VRML file. Adapted from package geomorph +} + diff --git a/man/read_binary_PLY.Rd b/man/read_binary_PLY.Rd new file mode 100644 index 0000000..0e94436 --- /dev/null +++ b/man/read_binary_PLY.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/IO-mesh.R +\name{read_binary_PLY} +\alias{read_binary_PLY} +\title{Read binary PLY file} +\usage{ +read_binary_PLY(filePath) +} +\arguments{ +\item{file_name}{path to the PLY file} +} +\value{ +a list of three components of the mesh, the vertices, the faces and the color or quality of the vertices +} +\description{ +Read vertices, faces and color or quality of the vertices of a mesh from a PLY file in little endian 1.0 format +} + diff --git a/man/repair.soma3d.Rd b/man/repair.soma3d.Rd new file mode 100644 index 0000000..5d23567 --- /dev/null +++ b/man/repair.soma3d.Rd @@ -0,0 +1,32 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/1_repair_soma.R +\name{repair.soma3d} +\alias{repair.soma3d} +\title{Repair single soma from data} +\usage{ +repair.soma3d(soma, ao.path = tempdir(), broken.path = tempdir(), + repaired.path = tempdir(), name = "soma", returnSoma = F) +} +\arguments{ +\item{soma}{A filepath or the structure returned by read_VRML or read_PLY representing damaged soma mesh} + +\item{ao.path}{folder where intermediate files with ambient occlusion data are saved} + +\item{broken.path}{Folder where intermediate files with broken somas after GMM threshold are saved} + +\item{repaired.path}{Folder where final file is stored} + +\item{name}{Name of the final file} + +\item{returnSoma}{} +} +\value{ +Repaired soma (if returnSoma is True) +} +\description{ +Repair the surface of the soma to remove holes and vertices inside the mesh +} +\seealso{ +\code{read_VRML} +} + diff --git a/man/repair_soma.Rd b/man/repair_soma.Rd new file mode 100644 index 0000000..b5d04a2 --- /dev/null +++ b/man/repair_soma.Rd @@ -0,0 +1,28 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/1_repair_soma.R +\name{repair_soma} +\alias{repair_soma} +\title{Repair soma morphology} +\usage{ +repair_soma(directory, output_ambient_occlusion, broken_mesh, + output_poisson_reconstruction) +} +\arguments{ +\item{directory}{path to the folder where PLY files representing damaged soma mesh} + +\item{output_ambient_occlusion}{path to the folder where somas with ambient occlusion are saved} + +\item{broken_mesh}{path to the folder where somas were saved after the GMM threshold was imposed and vertices inside the soma or forming cavities were removed} + +\item{output_poisson_reconstruction}{path to the folder where somas were saved after the mesh closing process} +} +\value{ +None +} +\description{ +Repair the surface of the neuron to remove holes and vertices inside the mesh +} +\examples{ +repair_soma(directory = file.path(tempdir(),"PLYs"), output_ambient_occlusion = file.path(tempdir(),"ambient_occlusion"), broken_mesh = file.path(tempdir(),"broken_mesh_ao"), output_poisson_reconstruction = file.path(tempdir(),"poisson_reconstruction_ao")) +} + diff --git a/man/segment.soma3d.Rd b/man/segment.soma3d.Rd new file mode 100644 index 0000000..df6e231 --- /dev/null +++ b/man/segment.soma3d.Rd @@ -0,0 +1,30 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/2_extract_soma.R +\name{segment.soma3d} +\alias{segment.soma3d} +\title{Extract single soma from data} +\usage{ +segment.soma3d(soma, sdf.path = tempdir(), broken_sdf.path = tempdir(), + reconstruction.path = tempdir(), name = "soma") +} +\arguments{ +\item{soma}{A filepath or the structure returned by read_VRML or read_PLY representing damaged soma mesh} + +\item{sdf.path}{folder where intermediate files with shape diameter function data are saved} + +\item{broken_sdf.path}{Folder where intermediate files with broken somas after GMM threshold are saved} + +\item{reconstruction.path}{Folder where reconstructed file is stored} + +\item{name}{Name of the soma file} +} +\value{ +Extracted soma +} +\description{ +Apply Gaussian mixture models to segment soma from dendrites +} +\seealso{ +\code{read_VRML} +} + diff --git a/man/segment_somas.Rd b/man/segment_somas.Rd new file mode 100644 index 0000000..7bc6519 --- /dev/null +++ b/man/segment_somas.Rd @@ -0,0 +1,30 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/2_extract_soma.R +\name{segment_somas} +\alias{segment_somas} +\title{Segment somas} +\usage{ +segment_somas(directory, output_shape_diameter, broken_mesh, + output_poisson_reconstruction, final_result) +} +\arguments{ +\item{directory}{path to the folder where PLY files of the somas are saved} + +\item{output_shape_diameter}{path to the folder where somas with shape diameter will be saved} + +\item{broken_mesh}{path to the folder where somas will be saved after that the GMM threshold removes the dendrites} + +\item{output_poisson_reconstruction}{path to the folder where somas will be saved after the mesh closing process} + +\item{final_result}{path to the folder where final segmented somas will be saved after the isolated pieces are removed} +} +\value{ +None +} +\description{ +Apply Gaussian mixture models to segment soma from dendrites +} +\examples{ +segment_somas(directory = file.path(tempdir(), "poisson_reconstruction_ao"), output_shape_diameter = file.path(tempdir(), "sdf"), broken_mesh = file.path(tempdir(), "broken_mesh_sdf"), output_poisson_reconstruction = file.path(tempdir(), "poisson_reconstruction_sdf"), final_result = file.path(tempdir(), "final_result")) +} + diff --git a/man/simulate_new_somas.Rd b/man/simulate_new_somas.Rd new file mode 100644 index 0000000..ad05ac2 --- /dev/null +++ b/man/simulate_new_somas.Rd @@ -0,0 +1,27 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/7_simulation.R +\name{simulate_new_somas} +\alias{simulate_new_somas} +\title{Simulation of new somas} +\usage{ +simulate_new_somas(model, data, cluster_number = NULL, + number_of_new_instances = 100, seed = 1) +} +\arguments{ +\item{model}{a bn.fit object from bnlearn package whose nodes are gaussians. It must be the result of the BN_clustering function} + +\item{data}{the same dataset used to compute the bayesian_structure} + +\item{cluster_number}{a number between 1 and the number of clusters of model. When it is NULL, new somas are generated from the priori distribution. When is a number between 1 and the number of clusters, simulate new somas from the selected cluster.} + +\item{number_of_new_instances}{a natural number denoting the number of new somas sampled from simulation} + +\item{seed}{a natural number used as seed during the sampling, by default it is 1} +} +\value{ +new_somas a dataset similar to data with the simulated somas +} +\description{ +Simulation of new somas of the clustering of bayesian networks +} + diff --git a/man/simulation_3D_mesh.Rd b/man/simulation_3D_mesh.Rd new file mode 100644 index 0000000..720201a --- /dev/null +++ b/man/simulation_3D_mesh.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/7_simulation.R +\name{simulation_3D_mesh} +\alias{simulation_3D_mesh} +\title{Three-dimensional representation of the simulated somas} +\usage{ +simulation_3D_mesh(new_somas, index) +} +\arguments{ +\item{new_somas}{a dataset with the simulated somas obtained in simulate_new_somas function} + +\item{index}{index of the row of the new soma to renderize} +} +\value{ +curve_points a n x 3 dataframe where each row represents a point in the cartesian three-dimensional space +} +\description{ +Three-dimensional renderization from a dataset where the simulated somas were saved +} + diff --git a/man/soma_caracterization.Rd b/man/soma_caracterization.Rd new file mode 100644 index 0000000..2eb325c --- /dev/null +++ b/man/soma_caracterization.Rd @@ -0,0 +1,24 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/4_soma_parametrization.R +\name{soma_caracterization} +\alias{soma_caracterization} +\title{Characterize soma with ray tracing reeb graph} +\usage{ +soma_caracterization(path_somas_repaired, path_csv_file) +} +\arguments{ +\item{path_somas_repaired}{path to the folder where segmented somas were placed. They have to be PLY files.} + +\item{path_csv_file}{path to a csv file where the values of the characterization will be saved} +} +\value{ +None +} +\description{ +Compute the morphological characterization of the soma. First, cast rays from the centroid of the soma to the surface, +defining level curves. Next, orientate soma and adjust curves. Finally compute parameters according to the curves. +} +\examples{ +soma_caracterization(path_somas_repaired = file.path(tempdir(),"reconstructed"), path_csv_file = file.path(tempdir(),"somaReebParameters.csv")) +} + diff --git a/man/wilcoxon_RMSE.Rd b/man/wilcoxon_RMSE.Rd new file mode 100644 index 0000000..3a17686 --- /dev/null +++ b/man/wilcoxon_RMSE.Rd @@ -0,0 +1,38 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/3_validation.R +\name{wilcoxon_RMSE} +\alias{wilcoxon_RMSE} +\title{Wilcoxon test for RMSE} +\usage{ +wilcoxon_RMSE(RMSE_matrix) +} +\arguments{ +\item{RMSE_matrix}{a matrix Nx3 which is the output of} +} +\value{ +A vector of two p-values. The first value is the p-value resulting from wilcoxon between first column of the RMSE matrix and second column of the RMSE matrix. The second one is the p-value resulting from wilcoxon between first column of the RMSE matrix and second column of the RMSE matrix +} +\description{ +Compute Wilcoxon between RMSE distances to find significant differences between mesh processing techniques +} +\examples{ +path_somas_algorithm<- "./test/final_result" +path_somas_experts<- system.file("test/pre_repaired",package="SomaMS") +experts_paths<-list.dirs(path_somas_experts,recursive=F) +pre_repaired_RMSE<-RMSE_mesh_distance(path_somas_algorithm, experts_paths[1], experts_paths[2], TRUE) + +#RMSE between the somas of the algorithm and the somas of the experts after repairing the experts' somas +path_somas_algorithm<- "./test/final_result" +path_somas_experts<- system.file("test/post_repaired",package="SomaMS") +experts_paths<-list.dirs(path_somas_experts,recursive=F) +post_repaired_RMSE<-RMSE_mesh_distance(path_somas_algorithm, experts_paths[1], experts_paths[2], TRUE) + +pvalue_before_repairing <- wilcoxon_RMSE(pre_repaired_RMSE) +print(paste0("p-value between algorithm and first expert: ", pvalue_before_repairing[1])) +print(paste0("p-value between algorithm and second expert: ", pvalue_before_repairing[2])) + +pvalue_after_repairing <- wilcoxon_RMSE(post_repaired_RMSE) +print(paste0("p-value between algorithm and first expert: ", pvalue_after_repairing[1])) +print(paste0("p-value between algorithm and second expert: ", pvalue_after_repairing[2])) +} + diff --git a/man/write_PLY_only_points.Rd b/man/write_PLY_only_points.Rd new file mode 100644 index 0000000..ff4863a --- /dev/null +++ b/man/write_PLY_only_points.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/IO-mesh.R +\name{write_PLY_only_points} +\alias{write_PLY_only_points} +\title{Write only the vertices of a mesh in a PLY file} +\usage{ +write_PLY_only_points(vertices, file_name = "") +} +\arguments{ +\item{vertices}{matrix Nx3 where each row represents the coordinates of a vertex of the mesh} + +\item{file_name}{path and name of the PLY file where point cloud is going to be saved} +} +\value{ +None +} +\description{ +Write only the vertices of a mesh in a PLY file so it is saved a point cloud +} + diff --git a/src/RcppExports.cpp b/src/RcppExports.cpp new file mode 100644 index 0000000..6b10524 --- /dev/null +++ b/src/RcppExports.cpp @@ -0,0 +1,30 @@ +// This file was generated by Rcpp::compileAttributes +// Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 + +#include + +using namespace Rcpp; + +// read_PLY +List read_PLY(std::string input, std::string output); +RcppExport SEXP SomaMS_read_PLY(SEXP inputSEXP, SEXP outputSEXP) { +BEGIN_RCPP + Rcpp::RObject __result; + Rcpp::RNGScope __rngScope; + Rcpp::traits::input_parameter< std::string >::type input(inputSEXP); + Rcpp::traits::input_parameter< std::string >::type output(outputSEXP); + __result = Rcpp::wrap(read_PLY(input, output)); + return __result; +END_RCPP +} +// poly_volume +double poly_volume(NumericMatrix x); +RcppExport SEXP SomaMS_poly_volume(SEXP xSEXP) { +BEGIN_RCPP + Rcpp::RObject __result; + Rcpp::RNGScope __rngScope; + Rcpp::traits::input_parameter< NumericMatrix >::type x(xSEXP); + __result = Rcpp::wrap(poly_volume(x)); + return __result; +END_RCPP +} diff --git a/src/read_PLY.cpp b/src/read_PLY.cpp new file mode 100644 index 0000000..521f1fb --- /dev/null +++ b/src/read_PLY.cpp @@ -0,0 +1,77 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace Rcpp; +using namespace std; + +// [[Rcpp::export]] +List read_PLY(std::string input,std::string output) +{ + ifstream file(input.c_str()); + string line; + vector tokens; + int i; + int j; + FILE *ptr_myfile; + FILE *ptr_mynewfile; + List out(3); + + float my_record; + int intmy_record; + unsigned char character; + int contador=0; + for(i=1;i<=11;i++) + { + getline(file, line); + istringstream iss(line); + copy(istream_iterator(iss), + istream_iterator(), + back_inserter >(tokens)); + } + file.close(); + int vertex=atoi(tokens[9].c_str()); + int faces=atoi(tokens[24].c_str()); + + for(int j=0;j + +using namespace Rcpp; + +// [[Rcpp::export]] +double poly_volume(NumericMatrix x) { + int nrow = x.nrow(); + int ncol= x.ncol(); + + NumericVector p1(3); + NumericVector p2(3); + NumericVector p3(3); + + double total=0; + for(int i = 0; i < nrow; i+=3) { + for(int j = 0; j < ncol; ++j) { + p1[j]= x(i,j); + p2[j]= x((i+1),j); + p3[j]= x((i+2),j); + + } + + total+=(-(p3[0]*p2[1]*p1[2])+(p2[0]*p3[1]*p1[2])+(p3[0]*p1[1]*p2[2])-(p1[0]*p3[1]*p2[2])-(p2[0]*p1[1]*p3[2])+(p1[0]*p2[1]*p3[2]))/6; + } + return abs(total); +} +