4d34cfc382f4e45a7119129b2baa6a8645b80542
angie
  Fri Oct 1 14:21:27 2021 -0700
Max pointed out that 8 threads would be much more reasonable for xz than 50.

diff --git src/hg/utils/otto/sarscov2phylo/getCogUk.sh src/hg/utils/otto/sarscov2phylo/getCogUk.sh
index ef25caa..3481bbd 100755
--- src/hg/utils/otto/sarscov2phylo/getCogUk.sh
+++ src/hg/utils/otto/sarscov2phylo/getCogUk.sh
@@ -11,31 +11,31 @@
 
 ottoDir=/hive/data/outside/otto/sarscov2phylo
 cogUrlBase=https://cog-uk.s3.climb.ac.uk/phylogenetics/latest
 
 mkdir -p $ottoDir/cogUk.$today
 cd $ottoDir/cogUk.$today
 
 # Sometimes the curl fails with a DNS error, regardless of whether my previous cron job with
 # curl -I succeeded.  Do multiple retries for the first URL; once it's working, it should
 # continue to work for the other URLs (she said hopefully).
 attempt=0
 maxAttempts=5
 retryDelay=60
 while [[ $((++attempt)) -le $maxAttempts ]]; do
     echo "curl attempt $attempt"
-    if curl -S -s $cogUrlBase/cog_all.fasta | xz -T 50 > cog_all.fasta.xz; then
+    if curl -S -s $cogUrlBase/cog_all.fasta | xz -T 8 > cog_all.fasta.xz; then
         break
     else
         echo "FAILED; will try again after $retryDelay seconds"
         rm -f cog_all.fasta.xz
         sleep $retryDelay
     fi
 done
 if [[ ! -f cog_all.fasta.xz ]]; then
     echo "curl failed $maxAttempts times; quitting."
     exit 1
 fi
 curl -S -s $cogUrlBase/cog_metadata.csv > cog_metadata.csv
 curl -S -s $cogUrlBase/cog_global_tree.newick > cog_global_tree.newick
 
 tail -n +2 cog_metadata.csv \