cf2f877834b6a6783b7192bd26235ba399fe52e8
kuhn
  Wed Dec 12 14:21:28 2012 -0800
cleaned up output logic a bit and added another exception
diff --git src/utils/qa/checkStaticLinks.csh src/utils/qa/checkStaticLinks.csh
index 6cb94a6..39544b9 100755
--- src/utils/qa/checkStaticLinks.csh
+++ src/utils/qa/checkStaticLinks.csh
@@ -3,31 +3,31 @@
 
 ###############################################
 #  05-10-2004
 # 
 #  checks the links in all the files in a directory
 #  Robert Kuhn
 # 
 ###############################################
 
 set filePath=""
 set out=""
 set url=""
 set exclude=""
 set excludeList=""
 set baseUrl="http://hgwbeta.cse.ucsc.edu"
-set errs=""
+set errs=0
 
 if ( $#argv < 1 || $#argv > 2 ) then
   # wrong number of command-line args
   echo
   echo "  checks the links in all the static pages in a directory."
   echo "  operates on pages on hgwbeta"
   echo "  writes a file called dir.dir.err"
   echo
   echo "    usage:  pathInHtdocs [excludeList]"
   echo '      where:'
   echo '        pathInHtdocs = path in htdocs (0 for htdocs root)'
   echo "        excludeList = filename for list of files not to check"
   echo
   exit
 endif
@@ -70,30 +70,31 @@
 echo                                         >> outfile
 
 foreach file ( $origlist )
   rm -f tmp0
   htmlCheck checkLinks $baseUrl/$filePath/$file  >>& tmp0
   if ( -e tmp0 ) then
     # there were errors
     # clean out things we don't care about
     rm -f tmp
     cat tmp0 | grep -v "403" \
       | grep -v "doesn't exist" \
       | grep -v "Cancelling" \
       | grep -v "service not known" \
       | grep -v "than directories in" \
       | grep -v "Connection refused" \
+      | grep -v "Non-numeric port" \
       | egrep "."  > tmp
     rm -f tmp0
 
     if ( `wc -l tmp | awk '{print $1}'` > 0 ) then
       # there were errors worth looking at
       # get the link names for any broken urls
       @ errs = $errs + 1                    # counts files with errors
       set j=1
       set errors=`wc -l tmp | awk '{print $1}'`  # counts errs in file
       rm -f outfile$file
       echo                                           >> err$file
       while ( $j <= $errors )
         set errLine=`sed -n "${j}p" tmp`
         set url=`sed -n "${j}p" tmp | awk '{print $NF}'`
         set xfile=$baseUrl/$filePath/$file
@@ -132,32 +133,33 @@
     rm -f outfile$file
   endif
   @ i = $i + 1
 end
 
 echo "\n directory = htdocs/$filePath"         >> outfile
 if (  $i == 1 ) then
   echo " checked $i file"                       >> outfile
 else
   echo " checked $i files"                       >> outfile
 endif
 
 # note:  if you change the line below the wrapper script will break
 if ( $errs == 0 ) then
   echo " found no files with errors\n"          >> outfile
-endif
+else
 if ( $errs == 1 ) then
   echo " found errors in $errs file\n"          >> outfile
 else
   echo " found errors in $errs files\n"          >> outfile
 endif
+endif
 
 echo                                           >> outfile
 
 # cat outfile
 if ( $filePath == "" ) then
   set out=htdocs.err
 else
   set out=`echo $filePath | sed s@/@.@g`.err
 endif
 mv outfile $out