neuroimagen:facehbi
Differences
This shows you the differences between two versions of the page.
Both sides previous revisionPrevious revisionNext revision | Previous revision | ||
neuroimagen:facehbi [2019/05/20 08:18] – [Extract image date] osotolongo | neuroimagen:facehbi [2020/08/04 10:58] (current) – external edit 127.0.0.1 | ||
---|---|---|---|
Line 178: | Line 178: | ||
===== Extract image date ===== | ===== Extract image date ===== | ||
- | === MRI === | + | ==== MRI ==== |
First approach, | First approach, | ||
Line 192: | Line 192: | ||
< | < | ||
- | [osotolongo@detritus facehbi]$ for y in / | + | [osotolongo@detritus facehbi]$ for y in / |
</ | </ | ||
O mas rapido, | O mas rapido, | ||
+ | DELETEME | ||
+ | < | ||
+ | [osotolongo@detritus facehbi]$ for y in / | ||
+ | </ | ||
+ | |||
+ | **Cuidado que en la visita 2 hay algunas adquisiciones de la visita 0**. | ||
< | < | ||
- | [osotolongo@detritus | + | [osotolongo@detritus |
+ | ..... | ||
+ | / | ||
+ | / | ||
+ | ...... | ||
+ | </ | ||
+ | |||
+ | Vamos a darle una vuelta, | ||
+ | < | ||
+ | [osotolongo@detritus corachan]$ for y in / | ||
+ | </ | ||
+ | |||
+ | y voy a hacer un script en perl para que escoja el que tenga la fecha mas alta, | ||
+ | <code perl clean_pre_date.pl> | ||
+ | # | ||
+ | |||
+ | use strict; | ||
+ | use warnings; | ||
+ | |||
+ | my $ifile = '/ | ||
+ | my $ofile = '/ | ||
+ | my %imgdates; | ||
+ | |||
+ | open IDF, "< | ||
+ | while (< | ||
+ | (my $subject, my $imdate) = / | ||
+ | if (exists($imgdates{$subject})){ | ||
+ | $imgdates{$subject} = $imdate unless ($imgdates{$subject} > $imdate); | ||
+ | }else{ | ||
+ | $imgdates{$subject} = $imdate; | ||
+ | } | ||
+ | } | ||
+ | close IDF; | ||
+ | open ODF, "> | ||
+ | foreach my $subject (sort keys %imgdates){ | ||
+ | print ODF " | ||
+ | } | ||
+ | close ODF; | ||
+ | </ | ||
+ | |||
+ | A ver ahora, | ||
+ | < | ||
+ | [osotolongo@detritus facehbi]$ ./ | ||
+ | [osotolongo@detritus facehbi]$ grep " | ||
+ | 0171; | ||
+ | </ | ||
+ | |||
+ | Parece que funciona. | ||
+ | |||
+ | === Encontrar los directorios erroneos en la V2 === | ||
+ | |||
+ | Primero lista de los directorios y fecha de los DCM, | ||
+ | <code bash> | ||
+ | [osotolongo@detritus facehbi_2]$ for y in / | ||
+ | [osotolongo@detritus facehbi_2]$ head / | ||
+ | 0001; | ||
+ | 0002; | ||
+ | 0003; | ||
+ | 0005; | ||
+ | 0006; | ||
+ | 0007; | ||
+ | 0008; | ||
+ | 0009; | ||
+ | 0010; | ||
+ | 0011; | ||
+ | </ | ||
+ | |||
+ | hago un script para escoger los de menor fecha, | ||
+ | <code perl find_bad_guys.pl> | ||
+ | # | ||
+ | |||
+ | use strict; | ||
+ | use warnings; | ||
+ | |||
+ | my $ifile = '/ | ||
+ | my %imgdates; | ||
+ | |||
+ | open IDF, "< | ||
+ | while (< | ||
+ | (my $subject, my $imdate, my $imdir) = / | ||
+ | if (exists($imgdates{$subject}) && exists($imgdates{$subject}{' | ||
+ | if ($imgdates{$subject}{' | ||
+ | print " | ||
+ | }else{ | ||
+ | print " | ||
+ | $imgdates{$subject}{' | ||
+ | $imgdates{$subject}{' | ||
+ | } | ||
+ | }else{ | ||
+ | $imgdates{$subject}{' | ||
+ | $imgdates{$subject}{' | ||
+ | } | ||
+ | } | ||
+ | close IDF; | ||
+ | </ | ||
+ | |||
+ | Y vamos a comprobar cuantos directorios repetidos o de otra visita hay, | ||
+ | |||
+ | <code bash> | ||
+ | [osotolongo@detritus facehbi]$ ./ | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | / | ||
+ | </ | ||
+ | 8-O | ||
+ | |||
+ | Lo he escrito con la fecha para que me sirva de guia pero en fin, | ||
+ | |||
+ | <code bash> | ||
+ | [osotolongo@detritus facehbi]$ ./ | ||
+ | [osotolongo@detritus facehbi]$ su - | ||
+ | Password: | ||
+ | [root@detritus ~]# cd / | ||
+ | [root@detritus facehbi_2]# mkdir badguys | ||
+ | [root@detritus facehbi_2]# for x in `cat / | ||
+ | [root@detritus facehbi_2]# ls badguys/ | ||
+ | F122_._._(1D17105595) | ||
+ | F134_._._(1D15001121) | ||
+ | F135_._._(1D15004700) | ||
+ | F164_._(1D15028382) | ||
+ | |||
+ | </ | ||
+ | |||
+ | **Y ahora viene lo bueno pues hay que arreglar y/o rehacer toda la visita 2 por los posibles errores producto de esto.** | ||
+ | |||
+ | m( **otro problema** hay algunos drop-outs que no he conseguido identificar y estan repetidos en la visita 2. Por suerte, alguien los ha encontrado pormi ;-). | ||
+ | |||
+ | <code bash> | ||
+ | [osotolongo@detritus f2cehbi]$ cat delete.txt | ||
+ | F172 | ||
+ | F176 | ||
+ | F191 | ||
+ | </ | ||
+ | Las lineas que habria que borrar, | ||
+ | <code bash> | ||
+ | [osotolongo@detritus f2cehbi]$ for x in `cat delete.txt`; | ||
+ | dates_mri.csv: | ||
+ | gdata_mri.csv: | ||
+ | guia_mri.csv: | ||
+ | ids.csv: | ||
+ | info_mri.csv: | ||
+ | info_mri_proper.csv: | ||
+ | internos.csv: | ||
+ | dates_mri.csv: | ||
+ | gdata_mri.csv: | ||
+ | guia_mri.csv: | ||
+ | ids.csv: | ||
+ | info_mri.csv: | ||
+ | info_mri_proper.csv: | ||
+ | internos.csv: | ||
+ | dates_mri.csv: | ||
+ | gdata_mri.csv: | ||
+ | guia_mri.csv: | ||
+ | ids.csv: | ||
+ | info_mri.csv: | ||
+ | info_mri_proper.csv: | ||
+ | internos.csv: | ||
+ | </ | ||
+ | //Not so hard//, | ||
+ | <code bash> | ||
+ | [osotolongo@detritus f2cehbi]$ for x in `cat delete.txt`; | ||
</ | </ | ||
- | === FBB === | + | :-P |
+ | ==== FBB ==== | ||
one random file by subject, | one random file by subject, | ||
Line 211: | Line 401: | ||
< | < | ||
- | [osotolongo@detritus facehbi]$ for y in / | + | [osotolongo@detritus facehbi]$ for y in / |
- | [osotolongo@detritus facehbi]$ for y in / | + | [osotolongo@detritus facehbi]$ for y in / |
</ | </ | ||
- | Parece raro pero dado el poco consistente formato de los archivos hay que cambiar las ordenes para cad directorio. | + | Parece raro pero dado el poco consistente formato de los archivos hay que cambiar las ordenes para cada directorio. |
+ | |||
+ | ==== Pegando ==== | ||
Las fechas quedan ahora en cuatro archivos, | Las fechas quedan ahora en cuatro archivos, | ||
Line 227: | Line 419: | ||
</ | </ | ||
+ | Voy a hacer un parser para juntar todo, | ||
+ | <code perl date_parser.pl> | ||
+ | # | ||
+ | # | ||
+ | #use strict; | ||
+ | #use warnings; | ||
+ | |||
+ | my %fdates = ( FBBv0 => " | ||
+ | FBBv2 => " | ||
+ | MRIv0 => " | ||
+ | MRIv2 => " | ||
+ | ); | ||
+ | |||
+ | my $fdpath = '/ | ||
+ | my %imgdates; | ||
+ | |||
+ | foreach my $fdate (sort keys %fdates){ | ||
+ | $real_file = $fdpath.$fdates{$fdate}; | ||
+ | open IDF, "< | ||
+ | while(< | ||
+ | (my $subject, my $imdate) = / | ||
+ | $imgdates{$subject}{$fdate} = $imdate; | ||
+ | } | ||
+ | close IDF; | ||
+ | } | ||
+ | print " | ||
+ | foreach my $fdate (sort keys %fdates){ | ||
+ | print ", $fdate"; | ||
+ | } | ||
+ | print " | ||
+ | foreach my $subject (sort keys %imgdates){ | ||
+ | print " | ||
+ | foreach my $fdate (sort keys %fdates){ | ||
+ | if (exists($imgdates{$subject}{$fdate})){ | ||
+ | print ", $imgdates{$subject}{$fdate}"; | ||
+ | }else{ | ||
+ | print ", -"; | ||
+ | } | ||
+ | } | ||
+ | print " | ||
+ | } | ||
+ | </ | ||
+ | y ahi va, | ||
+ | <code bash> | ||
+ | [osotolongo@detritus facehbi]$ ./ | ||
+ | [osotolongo@detritus facehbi]$ head dicom_dates.csv | ||
+ | Subject, FBBv0, FBBv2, MRIv0, MRIv2 | ||
+ | 0001, 20141211, 20170126, 20141205, 20170124 | ||
+ | 0002, 20141211, 20170420, 20141205, 20170323 | ||
+ | 0003, 20141218, 20170126, 20141211, 20170123 | ||
+ | 0004, 20141218, -, 20141212, - | ||
+ | 0005, 20150122, 20170202, 20150107, 20170123 | ||
+ | 0006, 20150115, 20170126, 20141223, 20170124 | ||
+ | 0007, 20150115, 20170126, 20141219, 20170120 | ||
+ | 0008, 20150115, 20170202, 20141220, 20170125 | ||
+ | 0009, 20150129, 20170216, 20150110, 20170207 | ||
+ | |||
+ | </ | ||
+ | |||
+ | |||
+ | |||
+ | ==== Integrar fechas con datos ==== | ||
+ | <code perl join_conv.pl> | ||
+ | # | ||
+ | |||
+ | use strict; | ||
+ | use warnings; | ||
+ | use Data::Dump qw(dump); | ||
+ | |||
+ | my %fdates = ( FBBv0 => " | ||
+ | FBBv2 => " | ||
+ | MRIv0 => " | ||
+ | MRIv2 => " | ||
+ | ); | ||
+ | |||
+ | my %ofiles = ( FBBv0 => " | ||
+ | FBBv2 => " | ||
+ | MRIv0 => " | ||
+ | MRIv2 => " | ||
+ | ); | ||
+ | |||
+ | my $info_file = " | ||
+ | my %internos; | ||
+ | |||
+ | open IIF, "< | ||
+ | while(< | ||
+ | if(/ | ||
+ | (my $fnumber, my $inumber) = / | ||
+ | $internos{$fnumber} = $inumber; | ||
+ | } | ||
+ | } | ||
+ | close IIF; | ||
+ | |||
+ | my %dates; | ||
+ | foreach my $fdate (sort keys %fdates){ | ||
+ | open IDF, "< | ||
+ | while(< | ||
+ | if(/ | ||
+ | (my $fnumber, my $date) = / | ||
+ | (my $cdate = $date) =~ s/ | ||
+ | $dates{$fnumber}{$fdate} = $cdate; | ||
+ | } | ||
+ | } | ||
+ | close IDF; | ||
+ | open ODF, "> | ||
+ | print ODF " | ||
+ | foreach my $fnumber (sort keys %internos){ | ||
+ | print ODF " | ||
+ | if (exists $internos{$fnumber}){ | ||
+ | print ODF " | ||
+ | if (exists($dates{$fnumber}) && exists($dates{$fnumber}{$fdate})){ | ||
+ | print ODF " | ||
+ | }else{ | ||
+ | print ODF " | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | close ODF; | ||
+ | } | ||
+ | | ||
+ | </ | ||
+ | < | ||
+ | osotolongo@daisy: | ||
+ | facehbi_mri.csv | ||
+ | osotolongo@daisy: | ||
+ | osotolongo@daisy: | ||
+ | </ | ||
+ | |||
+ | jodidos problemas de permisos m( | ||
+ | < | ||
+ | [root@detritus ~]# chmod g+rwx / | ||
+ | ..... | ||
+ | [osotolongo@detritus facehbi]$ ls / | ||
+ | bem label labels | ||
+ | [osotolongo@detritus facehbi]$ cat soloeste.csv | ||
+ | 0003;smc | ||
+ | [osotolongo@detritus facehbi]$ precon.pl -cut soloeste.csv facehbi | ||
+ | </ | ||
+ | |||
+ | ===== Reprocesamiento ===== | ||
+ | |||
+ | Primero reconvertir los que estan mal, borrar los directorios de FS, crearlos de nuevo y recalcular FS. | ||
+ | |||
+ | < | ||
+ | [osotolongo@detritus facehbi]$ awk -F"/" | ||
+ | ... | ||
+ | [osotolongo@detritus v2MriPet]$ awk -F";" | ||
+ | [osotolongo@detritus v2MriPet]$ for x in `cat mri_repetir.dir`; | ||
+ | [osotolongo@detritus v2MriPet]$ for a in `ls processed/ | ||
+ | [osotolongo@detritus v2MriPet]$ for x in `awk -F";" | ||
+ | [osotolongo@detritus v2MriPet]$ fsl2fs.pl -cut repetir.csv v2MriPet | ||
+ | [osotolongo@detritus v2MriPet]$ precon.pl -cut repetir.csv v2MriPet | ||
+ | Submitted batch job 17319 | ||
+ | [osotolongo@detritus v2MriPet]$ squeue | ||
+ | JOBID PARTITION | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | | ||
+ | |||
+ | </ | ||
neuroimagen/facehbi.1558340333.txt.gz · Last modified: 2020/08/04 10:45 (external edit)