e07889d47e6f959eb8a7ea151ecf09ab6d9439d3 max Tue May 17 06:32:30 2022 -0700 adding invisible link so can find web crawlers, no redmine, making now so can analyze the data in a few months diff --git src/hg/hgTracks/hgTracks.c src/hg/hgTracks/hgTracks.c index c3eab37..0e0064b 100644 --- src/hg/hgTracks/hgTracks.c +++ src/hg/hgTracks/hgTracks.c @@ -9191,30 +9191,34 @@ if (sameString(database, "wuhCor1")) { puts("<p class='centeredCol'>\n" "For information about this browser and related resources, see " "<a target='blank' href='../covid19.html'>COVID-19 Research at UCSC</a>.</p>"); // GISAID wants this displayed on any page that shows any GISAID data puts("<p class='centeredCol'>\n" "GISAID data displayed in the Genome Browser are subject to GISAID's\n" "<a href='https://www.gisaid.org/registration/terms-of-use/' " "target=_blank>Terms and Conditions</a>.\n" "SARS-CoV-2 genome sequences and metadata are available for download from\n" "<a href='https://gisaid.org' target=_blank>GISAID</a> EpiCoV™.\n" "</p>"); } +// add hidden link as a trap for web spiders, for log analysis one day to get an idea what the spider IPs +// are +hPrintf("<a href='/notExist.html' style='display:none'>Invisible link</a>"); + hPrintf("</CENTER>\n"); #ifdef SLOW /* We'll rely on the end of program to do the cleanup. * It turns out that the 'free' routine on Linux is * quite slow. For chromosome level views the browser * spends about 1/3 of it's time doing the cleanup * below if it's enabled. Since we really don't * need to reclaim this memory at this point I'm * taking this out. Please don't delete the code though. * I'll like to keep it for testing now and then. -jk. */ // TODO GALT cleanup sibs too? probably can do for window copies but low priority. /* Clean up. */ for (track = trackList; track != NULL; track = track->next)