Nantes Université

Skip to content
Extraits de code Groupes Projets
Valider 610adaef rédigé par BenChurcheward's avatar BenChurcheward
Parcourir les fichiers

Updated version - with log values for coverage

parent f4d4a2ed
Aucune branche associée trouvée
Aucune étiquette associée trouvée
Aucune requête de fusion associée trouvée
......@@ -34,7 +34,7 @@ for (i in 1:nrow(allsamples_copy)){
allsamples_copy[i,j] <-1
}
}
allsamples_copy[i,3:171]<-log10(allsamples_copy[i,3:171])
allsamples_copy[i,3:171]<- -log10(allsamples_copy[i,3:171])
test <- t(rbind(allsamples_copy[i,3:171], samples_env))
colnames(test)<-c('coverage','env')
rownames(test)<-NULL
......@@ -83,17 +83,20 @@ for (i in 1:nrow(allsamples_copy)){
# /!\ CAUTIOUS /!\ #
####################
#4050 genomes are not classified as the same type of the metagenome for which there is the maximum number of reads mapped
#4175 genomes are not classified as the same type of the metagenome for which there is the maximum number of reads mapped
#This is more than half of the number of genomes we have...
#######################
# Table of MinGenTime #
#######################
write.table(allsamples_copy, file='allsamples_with_type.csv', sep='\t')
#write.table(allsamples_copy, file='allsamples_with_type.csv', sep='\t')
write.table(allsamples_copy, file='allsamples_with_type.log.csv', sep='\t')
allsamples_useful <- read.table('all_samples_with_type_with_growthpred.csv', sep='\t', h=TRUE)
allsamples_useful_log <- read.table('allsamples_with_type_only_growthpred_names.log.csv', sep='\t', h=TRUE)
#mingentime <- read.table('~/PHD/growthpred_results/results_growthpred_MarineDB.csv', sep='\t', header=TRUE)
mingentime_mapping <- read.table('results_growthpred_MarineDB_with_coverage.csv', sep='\t', header=TRUE)
allsamples_useful_log <- allsamples_useful_log[order(allsamples_useful$genome),]
allsamples_useful <- allsamples_useful[order(allsamples_useful$genome),]
mingentime_mapping <- mingentime_mapping[order(mingentime_mapping$MAG),]
mingentime_mapping$Type<- ''
......@@ -106,6 +109,8 @@ for (i in 1:nrow(allsamples_useful)){
}
}
write.table(mingentime_mapping, file='growthpred_results_only_names_from_coverage.csv', sep='\t')
wlcx_tst <- wilcox.test(mingentime_mapping$OGT[which(mingentime_mapping$Type=='MES')],
mingentime_mapping$OGT[which(mingentime_mapping$Type=='SUR_DCM')]
)
......@@ -130,8 +135,11 @@ wlcx_tst_dcmdcm <-wilcox.test(mingentime_mapping$OGT[which(mingentime_mapping$Ma
mingentime_mapping$OGT[which(mingentime_mapping$Maximum_coverage_env=='DCM - Not expected')]
)
#spurious is closer of MES than SUR_DCM, but is significatively different (see Wilcoxon test)
bxplt_all <- boxplot(mingentime_mapping$OGT ~ mingentime_mapping$Type ,plot=TRUE)
bxplt_all <- boxplot(mingentime_mapping$OGT ~ mingentime_mapping$Type,
plot=TRUE,
xlab = 'Genomes environment',
ylab = 'Optimal Growth Temperature (Celsius)',
)
#just to say we performed it, but this is not going to be useful
wlcx_tst_surdcmspu <-wilcox.test(mingentime_mapping$OGT[which(mingentime_mapping$Type=='SUR_DCM')],
mingentime_mapping$OGT[which(mingentime_mapping$Type=='Spurious')]
......@@ -147,6 +155,10 @@ wlcx_tst_no_outliers <- wilcox.test(mingentime_mapping_no_outliers$OGT[which(min
mingentime_mapping_no_outliers$OGT[which(mingentime_mapping_no_outliers$Type=='SUR_DCM')]
)
bxplt_no_outliers <- boxplot(mingentime_mapping_no_outliers$OGT[which(grepl(paste(c("MES", "SUR_DCM"),collapse="|"),mingentime_mapping_no_outliers$Type))] ~
mingentime_mapping_no_outliers$Type[which(grepl(paste(c("MES", "SUR_DCM"),collapse="|"),mingentime_mapping_no_outliers$Type))]
bxplt_no_outliers <- boxplot(
mingentime_mapping_no_outliers$OGT[which(grepl(paste(c("MES", "SUR_DCM"),collapse="|"),mingentime_mapping_no_outliers$Type))] ~
mingentime_mapping_no_outliers$Type[which(grepl(paste(c("MES", "SUR_DCM"),collapse="|"),mingentime_mapping_no_outliers$Type))],
plot = TRUE,
xlab = 'Genomes environment',
ylab = 'Optimal Growth Temperature (Celsius)'
)
0% Chargement en cours ou .
You are about to add 0 people to the discussion. Proceed with caution.
Veuillez vous inscrire ou vous pour commenter