5d25b7b10a486fb25edff4391db1c02cb70fb416
galt
  Wed May 10 11:23:50 2017 -0700
I never found evidence that the URL was coming in with a byterange clause appended to it.

diff --git src/lib/udc.c src/lib/udc.c
index 90f9c23..c9f4304 100644
--- src/lib/udc.c
+++ src/lib/udc.c
@@ -465,36 +465,31 @@
 if (rd == -1)
     errnoAbort("udcDataViaHttpOrFtp: error reading socket");
 struct connInfo *ci = &file->connInfo;
 if (ci == NULL)
     mustCloseFd(&sd);
 else
     ci->offset += total;
 return total;
 }
 
 boolean udcInfoViaHttp(char *url, struct udcRemoteFileInfo *retInfo)
 /* Gets size and last modified time of URL
  * and returns status of HEAD or GET byterange 0-0. */
 {
 verbose(4, "checking http remote info on %s\n", url);
-boolean byteRangeUsed = (strstr(url,";byterange=") != NULL);
-if (byteRangeUsed) // URLs passed into here should not have byterange.
-    {
-    warn("Unexpected byterange use in udcInfoViaHttp [%s]", url);
-    dumpStack("Unexpected byterange use in udcInfoViaHttp [%s]", url);
-    }
+// URLs passed into here should not have byterange clause.
 int redirectCount = 0;
 struct hash *hash;
 int status;
 char *sizeString = NULL;
 /*
  For caching, sites should support byte-range and last-modified.
  However, several groups including ENCODE have made sites that use CGIs to 
  dynamically generate hub text files such as hub.txt, genome.txt, trackDb.txt.
  Byte-range and last-modified are difficult to support for this case,
  so they do without them, effectively defeat caching. Every 5 minutes (udcTimeout),
  they get re-downloaded, even when the data has not changed.  
 */
 while (TRUE)
     {
     hash = newHash(0);