d1abfc7a22070300f05d476ae0fa66315094a311
galt
  Fri Apr 21 12:00:16 2017 -0700
Adding double-check to udc. The url passed in should not have any byterange on it.

diff --git src/lib/udc.c src/lib/udc.c
index 01ea80b4..9224e35 100644
--- src/lib/udc.c
+++ src/lib/udc.c
@@ -467,30 +467,36 @@
 if (rd == -1)
     errnoAbort("udcDataViaHttpOrFtp: error reading socket");
 struct connInfo *ci = &file->connInfo;
 if (ci == NULL)
     mustCloseFd(&sd);
 else
     ci->offset += total;
 return total;
 }
 
 boolean udcInfoViaHttp(char *url, struct udcRemoteFileInfo *retInfo)
 /* Gets size and last modified time of URL
  * and returns status of HEAD or GET byterange 0-0. */
 {
 verbose(4, "checking http remote info on %s\n", url);
+boolean byteRangeUsed = (strstr(url,";byterange=") != NULL);
+if (byteRangeUsed) // URLs passed into here should not have byterange.
+    {
+    warn("Unexpected byterange use in udcInfoViaHttp [%s]", url);
+    dumpStack("Unexpected byterange use in udcInfoViaHttp [%s]", url);
+    }
 int redirectCount = 0;
 struct hash *hash;
 int status;
 char *sizeString = NULL;
 /*
  For caching, sites should support byte-range and last-modified.
  However, several groups including ENCODE have made sites that use CGIs to 
  dynamically generate hub text files such as hub.txt, genome.txt, trackDb.txt.
  Byte-range and last-modified are difficult to support for this case,
  so they do without them, effectively defeat caching. Every 5 minutes (udcTimeout),
  they get re-downloaded, even when the data has not changed.  
 */
 while (TRUE)
     {
     hash = newHash(0);