Skip to content
Snippets Groups Projects
Commit 8efd580b authored by Rohmer Coralie's avatar Rohmer Coralie
Browse files

more attribute in total data

parent a953baca
Branches
Tags
No related merge requests found
...@@ -2,8 +2,8 @@ ...@@ -2,8 +2,8 @@
import sys,re,os import sys,re,os
import subprocess import subprocess
EXP = "experiments" EXP = "experiments"
ATTRIBUTES_TO_DISPLAY=["percentage_Identity","percentage_Error","percentage_Match"] ATTRIBUTES_TO_DISPLAY=["percentage_Ambiguity","sd_percentage_Ambiguity","percentage_Identity","sd_percentage_Identity","percentage_Match","sd_percentage_Match","percentage_Error","sd_percentage_Error","percentage_Substitution","sd_percentage_Substitution","percentage_Deletion","sd_percentage_Deletion","percentage_Insertion","sd_percentage_Insertion"]
ATTRIBUTES_TO_DISPLAY_THRESHOLD_INDEPENDANT=["time","memory"] ATTRIBUTES_TO_DISPLAY_THRESHOLD_INDEPENDANT=["time","memory","sd_time","sd_memory"]
PREFIX="start_position_" PREFIX="start_position_"
RESULT_FOLDER="results" RESULT_FOLDER="results"
NAME_DATA_FILE="data_align_t" NAME_DATA_FILE="data_align_t"
...@@ -66,7 +66,6 @@ for i in range(len(exp_names)): ...@@ -66,7 +66,6 @@ for i in range(len(exp_names)):
if re.search(NAME_DATA_FILE,filename): if re.search(NAME_DATA_FILE,filename):
search_meta_consensus=re.search(NAME_META_CONSENSUS,filename) search_meta_consensus=re.search(NAME_META_CONSENSUS,filename)
if ((search_meta_consensus and META_CONSENSUS) or (not search_meta_consensus and not META_CONSENSUS)): if ((search_meta_consensus and META_CONSENSUS) or (not search_meta_consensus and not META_CONSENSUS)):
threshold=filename.split(".")[0].split("_t")[1] threshold=filename.split(".")[0].split("_t")[1]
if (threshold not in files): if (threshold not in files):
files[threshold]={} files[threshold]={}
...@@ -145,11 +144,14 @@ for threshold in files : ...@@ -145,11 +144,14 @@ for threshold in files :
if attribute in ATTRIBUTES_TO_DISPLAY_THRESHOLD_INDEPENDANT: if attribute in ATTRIBUTES_TO_DISPLAY_THRESHOLD_INDEPENDANT:
pass pass
output_mean=open("results_mean/data_" + add_name_file_output + "mean_"+ attribute + ".csv","w") output_mean=open("results_mean/data_" + add_name_file_output + "mean_"+ attribute + ".csv","w")
if(not re.search("^sd_",attribute)):
output_all=open("results_all_start_positions/data_" + add_name_file_output + "all_start_position_"+ attribute + ".csv","w") output_all=open("results_all_start_positions/data_" + add_name_file_output + "all_start_position_"+ attribute + ".csv","w")
else: else:
output_mean=open("results_mean/data_" + add_name_file_output + "mean_"+ attribute + "_" + threshold + ".csv","w") output_mean=open("results_mean/data_" + add_name_file_output + "mean_"+ attribute + "_" + threshold + ".csv","w")
if(not re.search("^sd_",attribute)):
output_all=open("results_all_start_positions/data_" + add_name_file_output + "all_start_position_"+ attribute + "_" + threshold + ".csv","w") output_all=open("results_all_start_positions/data_" + add_name_file_output + "all_start_position_"+ attribute + "_" + threshold + ".csv","w")
output_mean.write(",,") output_mean.write(",,")
if(not re.search("^sd_",attribute)):
output_all.write(",,") output_all.write(",,")
for exp_name in data["order"]: for exp_name in data["order"]:
for dir in data[exp_name]["order"]: for dir in data[exp_name]["order"]:
...@@ -158,10 +160,12 @@ for threshold in files : ...@@ -158,10 +160,12 @@ for threshold in files :
for msa in MSA: for msa in MSA:
output_mean.write(",") output_mean.write(",")
else: else:
if(not re.search("^sd_",attribute)):
output_all.write("," + os.path.join(exp_name,dir)) output_all.write("," + os.path.join(exp_name,dir))
for msa in MSA: for msa in MSA:
output_all.write(",") output_all.write(",")
output_mean.write("\nlenght,cover,") output_mean.write("\nlenght,cover,")
if(not re.search("^sd_",attribute)):
output_all.write("\nlenght,cover,") output_all.write("\nlenght,cover,")
for exp_name in data["order"]: for exp_name in data["order"]:
for dir in data[exp_name]["order"]: for dir in data[exp_name]["order"]:
...@@ -170,10 +174,12 @@ for threshold in files : ...@@ -170,10 +174,12 @@ for threshold in files :
output_mean.write("," + msa) output_mean.write("," + msa)
output_mean.write(",") output_mean.write(",")
else: else:
if(not re.search("^sd_",attribute)):
for msa in MSA: for msa in MSA:
output_all.write("," + msa) output_all.write("," + msa)
output_all.write(",") output_all.write(",")
output_mean.write("\n") output_mean.write("\n")
if(not re.search("^sd_",attribute)):
output_all.write("\n") output_all.write("\n")
# Writing data # Writing data
...@@ -183,6 +189,7 @@ for threshold in files : ...@@ -183,6 +189,7 @@ for threshold in files :
for base in bases : for base in bases :
for read in reads : for read in reads :
output_mean.write(str(base) + "," + str(read) + ",") output_mean.write(str(base) + "," + str(read) + ",")
if(not re.search("^sd_",attribute)):
output_all.write(str(base) + "," + str(read) + ",") output_all.write(str(base) + "," + str(read) + ",")
for exp_name in data["order"]: for exp_name in data["order"]:
for dir in data[exp_name]["order"]: for dir in data[exp_name]["order"]:
...@@ -194,6 +201,7 @@ for threshold in files : ...@@ -194,6 +201,7 @@ for threshold in files :
output_mean.write(",") output_mean.write(",")
output_mean.write(",") output_mean.write(",")
else: else:
if(not re.search("^sd_",attribute)):
for msa in MSA: for msa in MSA:
if (base in data[exp_name][dir]) and (read in data[exp_name][dir][base]) and (msa in data[exp_name][dir][base][read]): if (base in data[exp_name][dir]) and (read in data[exp_name][dir][base]) and (msa in data[exp_name][dir][base][read]):
output_all.write("," + data[exp_name][dir][base][read][msa][attribute]) output_all.write("," + data[exp_name][dir][base][read][msa][attribute])
...@@ -201,6 +209,8 @@ for threshold in files : ...@@ -201,6 +209,8 @@ for threshold in files :
output_all.write(",") output_all.write(",")
output_all.write(",") output_all.write(",")
output_mean.write("\n") output_mean.write("\n")
if(not re.search("^sd_",attribute)):
output_all.write("\n") output_all.write("\n")
output_mean.write("\n") output_mean.write("\n")
if(not re.search("^sd_",attribute)):
output_all.write("\n") output_all.write("\n")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment