8b84f22d1b235ac3dcc37d35e2c051d3e6e36292 braney Wed Nov 6 14:08:46 2019 -0800 generate new CDS FASTA files for multiz30way diff --git src/hg/makeDb/doc/hg38/multiz30way.txt src/hg/makeDb/doc/hg38/multiz30way.txt index fce4b35..fba1544 100644 --- src/hg/makeDb/doc/hg38/multiz30way.txt +++ src/hg/makeDb/doc/hg38/multiz30way.txt @@ -1863,32 +1863,132 @@ # situation ln -s `pwd`/*.mod `pwd`/*.bw `pwd`/*.txt \ /usr/local/apache/htdocs-hgdownload/goldenPath/hg38/phyloP30way ############################################################################# # hgPal downloads (DONE - 2017-11-06 - Hiram) # FASTA from 30-way for knownGene, refGene and knownCanonical ssh hgwdev screen -S hg38HgPal mkdir /hive/data/genomes/hg38/bed/multiz30way/pal cd /hive/data/genomes/hg38/bed/multiz30way/pal cat ../species.list | tr '[ ]' '[\n]' > order.list - # this for loop can take hours on a high contig count assembly - # it is just fine on human/hg38, just a few seconds + ### knownCanonical with full CDS + cd /hive/data/genomes/hg38/bed/multiz30way/pal + export mz=multiz30way + export gp=knownCanonical + export db=hg38 + mkdir exonAA exonNuc knownCanonical + + time cut -f1 ../../../chrom.sizes | while read C + do + echo $C 1>&2 + hgsql hg38 -N -e "select chrom, chromStart, chromEnd, transcript from knownCanonical where chrom='$C'" > knownCanonical/$C.known.bed + done + + ls knownCanonical/*.known.bed | while read F + do + if [ -s $F ]; then + echo $F | sed -e 's#knownCanonical/##; s/.known.bed//' + fi + done | while read C + do + echo "date" + echo "mafGene -geneBeds=knownCanonical/$C.known.bed -noTrans $db $mz knownGene order.list stdout | \ + gzip -c > protNuc/$C.protNuc.fa.gz" + echo "mafGene -geneBeds=knownCanonical/$C.known.bed $db $mz knownGene order.list stdout | \ + gzip -c > protAA/$C.protAA.fa.gz" + done > $gp.$mz.prot.jobs + + time sh -x $gp.$mz.jobs > $gp.$mz.job.log 2>&1 + # 267m58.813s + + rm *.known.bed + export mz=multiz30way + export gp=knownCanonical + export db=hg38 + zcat protAA/c*.gz | gzip -c > $gp.$mz.protAA.fa.gz & + zcat protNuc/c*.gz | gzip -c > $gp.$mz.protNuc.fa.gz & + # about 6 minutes + + ### knownCanonical broken up by exon + cd /hive/data/genomes/hg38/bed/multiz100way/pal + export mz=multiz100way + export gp=knownCanonical + export db=hg38 + mkdir exonAA exonNuc knownCanonical + + time cut -f1 ../../../chrom.sizes | while read C + do + echo $C 1>&2 + hgsql hg38 -N -e "select chrom, chromStart, chromEnd, transcript from knownCanonical where chrom='$C'" > knownCanonical/$C.known.bed + done + # real 0m15.897s + + ls knownCanonical/*.known.bed | while read F + do + if [ -s $F ]; then + echo $F | sed -e 's#knownCanonical/##; s/.known.bed//' + fi + done | while read C + do + echo "date" + echo "mafGene -geneBeds=knownCanonical/$C.known.bed -exons -noTrans $db $mz knownGene order.list stdout | \ + gzip -c > exonNuc/$C.exonNuc.fa.gz" + echo "mafGene -geneBeds=knownCanonical/$C.known.bed -exons $db $mz knownGene order.list stdout | \ + gzip -c > exonAA/$C.exonAA.fa.gz" + done > $gp.$mz.jobs + + time sh -x $gp.$mz.jobs > $gp.$mz.job.log 2>&1 + # 267m58.813s + + rm *.known.bed + export mz=multiz30way + export gp=knownCanonical + export db=hg38 + zcat exonAA/c*.gz | gzip -c > $gp.$mz.exonAA.fa.gz & + zcat exonNuc/c*.gz | gzip -c > $gp.$mz.exonNuc.fa.gz & + # about 6 minutes + + rm -rf exonAA exonNuc + + export mz=multiz100way + export gp=knownCanonical + export db=hg38 + export pd=/usr/local/apache/htdocs-hgdownload/goldenPath/$db/$mz/alignments + mkdir -p $pd + ln -s `pwd`/$gp.$mz.exonAA.fa.gz $pd/$gp.exonAA.fa.gz + ln -s `pwd`/$gp.$mz.exonNuc.fa.gz $pd/$gp.exonNuc.fa.gz + ln -s `pwd`/$gp.$mz.protAA.fa.gz $pd/$gp.protAA.fa.gz + ln -s `pwd`/$gp.$mz.protNuc.fa.gz $pd/$gp.protNuc.fa.gz + cd $pd + md5sum *.fa.gz > md5sum.txt + + rm -rf exonAA exonNuc + + export mz=multiz30way + export gp=knownCanonical + export db=hg38 + export pd=/usr/local/apache/htdocs-hgdownload/goldenPath/$db/$mz/alignments + mkdir -p $pd + ln -s `pwd`/$gp.$mz.exonAA.fa.gz $pd/$gp.exonAA.fa.gz + ln -s `pwd`/$gp.$mz.exonNuc.fa.gz $pd/$gp.exonNuc.fa.gz + + # knownGene export mz=multiz30way export gp=knownGene export db=hg38 export I=0 export D=0 mkdir exonAA exonNuc for C in `sort -nk2 ../../../chrom.sizes | cut -f1` do I=`echo $I | awk '{print $1+1}'` D=`echo $D | awk '{print $1+1}'` dNum=`echo $D | awk '{printf "%03d", int($1/300)}'` mkdir -p exonNuc/${dNum} > /dev/null mkdir -p exonAA/${dNum} > /dev/null echo "mafGene -chrom=$C -exons -noTrans $db $mz $gp order.list stdout | gzip -c > exonNuc/${dNum}/$C.exonNuc.fa.gz &" echo "mafGene -chrom=$C -exons $db $mz $gp order.list stdout | gzip -c > exonAA/${dNum}/$C.exonAA.fa.gz &" @@ -1910,35 +2010,37 @@ | gzip -c > $gp.$mz.exonAA.fa.gz # real 1m28.841s time find ./exonNuc -type f | grep exonNuc.fa.gz | xargs zcat \ | gzip -c > $gp.$mz.exonNuc.fa.gz # real 3m56.370s # -rw-rw-r-- 1 397928833 Nov 6 18:44 knownGene.multiz30way.exonAA.fa.gz # -rw-rw-r-- 1 580377720 Nov 6 18:49 knownGene.multiz30way.exonNuc.fa.gz export mz=multiz30way export gp=knownGene export db=hg38 export pd=/usr/local/apache/htdocs-hgdownload/goldenPath/$db/$mz/alignments mkdir -p $pd - md5sum *.fa.gz > md5sum.txt ln -s `pwd`/$gp.$mz.exonAA.fa.gz $pd/$gp.exonAA.fa.gz ln -s `pwd`/$gp.$mz.exonNuc.fa.gz $pd/$gp.exonNuc.fa.gz ln -s `pwd`/md5sum.txt $pd/ + cd $pd + md5sum *.fa.gz > md5sum.txt + rm -rf exonAA exonNuc ############################################################################# # wiki page for 30-way (DONE - 2017-11-06 - Hiram) mkdir /hive/users/hiram/bigWays/hg38.30way cd /hive/users/hiram/bigWays echo "hg38" > hg38.30way/ordered.list awk '{print $1}' /hive/data/genomes/hg38/bed/multiz30way/30way.distances.txt \ >> hg38.30way/ordered.list # sizeStats.sh catches up the cached measurements required for data # in the tables. They are usually already mostly done, only new # assemblies will have updates. ./sizeStats.sh hg38.30way/ordered.list # dbDb.sh constructs hg38.30way/XenTro9_30-way_conservation_alignment.html