9fe4be9ca5880d7cf4b4d8a6a970b3e463b959c1
kuhn
  Tue Dec 11 12:33:50 2012 -0800
removed the -q flag from a grep.  It was =too= quiet, allowing nothing to get through
diff --git src/utils/qa/checkStaticLinks.csh src/utils/qa/checkStaticLinks.csh
index c925ed0..3877757 100755
--- src/utils/qa/checkStaticLinks.csh
+++ src/utils/qa/checkStaticLinks.csh
@@ -48,31 +48,31 @@
   endif
   set exclude=`cat $excludeList`
 endif
 
 # get list of active files from beta
 # and strip off the pathname from list leaving only filenames
 
 set origlist=`ssh hgwbeta 'ls /usr/local/apache/htdocs/'${filePath}'/*html' \
       | sed "s/.*\///g"`
 
 # strip out any files in exclude list
 foreach excl ( $exclude )
   set origlist=`echo $origlist | sed "s/ /\n/g" | egrep -wv $excl`
 end
 
-# echo $origlist
+# echo origlist $origlist
 
 # set up outfile for all the files in the dir
 set i=0
 rm -f outfile
 echo "\nfiles checked in htdocs/${filePath}" >> outfile
 echo $origlist | sed "s/ /\n/g"              >> outfile
 echo                                         >> outfile
 
 foreach file ( $origlist )
   rm -f tmp0
   htmlCheck checkLinks $baseUrl/$filePath/$file  >>& tmp0
   if ( -e tmp0 ) then
     # there were errors
     # clean out things we don't care about
     rm -f tmp
@@ -89,31 +89,31 @@
       # there were errors worth looking at
       # get the link names for any broken urls
       @ errs = $errs + 1                    # counts files with errors
       set j=1
       set errors=`wc -l tmp | awk '{print $1}'`  # counts errs in file
       rm -f outfile$file
       echo                                           >> err$file
       while ( $j <= $errors )
         set errLine=`sed -n "${j}p" tmp`
         set url=`sed -n "${j}p" tmp | awk '{print $NF}'`
         set xfile=$baseUrl/$filePath/$file
         # set xfile=http://genome.ucsc.edu/goldenPath/credits.html
         # set url=http://www.genome.washington.edu/UWGC
 
         # grab 3 lines from html page and trim down to </A> tag
-        set link=`htmlCheck getHtml $xfile | egrep -qi -A 4 "$url" \
+        set link=`htmlCheck getHtml $xfile | egrep -A 4 "$url" \
           | sed -n "1,/<\/A>/p"`
         set link=`echo $link \
           | awk -F'</A>' '{print $1}' \
           | awk -F'>' '{print $NF}'`
 
         echo "link  = $link"                         >> err$file
         echo "error = $errLine"                      >> err$file
         echo                                         >> err$file
         @ j = $j + 1
       end
       @ j = $j - 1
       if ( $j > 0 ) then
         echo $file                                >> outfile$file
         echo $baseUrl/$filePath/$file             >> outfile$file
         cat err$file                              >> outfile$file