use strict;
use warnings;

use Data::Dumper;

use Fcntl qw(:DEFAULT :flock);

my $OK_IMAGES = ["cross0", "cross1", "cross2"];
my $ERR_IMAGE = "tyrannosaurus";

my $ROOT = $ENV{PATH_TRANSLATED};
$ROOT =~ s/\\cgi-bin.*//;

my $t = time;
my $image = $OK_IMAGES -> [$t % @$OK_IMAGES];

# URLs to this script should pass in a "uid" parameter even though
# we don't make use of it here.  This is to prevent the server from
# caching and reusing the output of this script.  Currently we use 
# the page's URL (window.location.href) concatenated with the current 
# time (new Date().getTime()) as a uid, but this won't quite do the
# trick if two users attempt to access the same page at the same time
# (if this happens, one of the attempts may not get logged).  It would
# be nice if we could figure out a way to create some sort of 
# client-specific uid.

my $query = {map {
    /(.*?)=(.*)/ ? (lc $1 => $2) : ($_ => undef);
} split (/&/, $ENV{QUERY_STRING})};
$query -> {referrer} = ($query -> {referer})
    if exists $query -> {referer};  # back compat
my $referrer = ! exists $query -> {referrer} ? "[NONE]" : 
    ! $query -> {referrer} ? "[DIRECT]" : $query -> {referrer};

my $log = "$ROOT/special/access.log";
my $size0 = (stat ($log))[7];
if (open (OUT, ">> $log")) {
    if (flock (OUT, LOCK_EX)) {
	seek (OUT, 0, 2); # reposition file pointer at eof
	print OUT ("[" . (localtime $t) . " ($t)]\n" .
		   "  $ENV{REMOTE_HOST}\n" .
		   "  $ENV{HTTP_USER_AGENT}\n" .
		   "  $referrer\n" .
		   "  $ENV{HTTP_REFERER}\n");
	close (OUT);
	my $size1 = (stat ($log))[7];
	$image = $ERR_IMAGE 
	    unless $size0 < $size1;  # Check that the log has grown.
    } else {
	$image = $ERR_IMAGE;
    }
} else {
    $image = $ERR_IMAGE;
}

binmode (STDOUT);
print "Content-type: image/gif\n\n";
open (IN, "< $ROOT\\images\\$image.gif");
binmode (IN);
while (<IN>) { print; }
