Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge branch 'beeminder'

  • Loading branch information...
commit eb23d62df94ba637ce915bcf3845687f445155b5 2 parents 011d268 + 0193601
@pmyteh authored
Showing with 24 additions and 5 deletions.
  1. +21 −5 beeminder.pl
  2. +3 −0  script/ttlogmerge.pl
View
26 beeminder.pl
@@ -35,7 +35,7 @@
# ph (ping hash) maps "y-m-d" to number of pings on that day.
# sh (string hash) maps "y-m-d" to the beeminder comment string for that day.
# bh (beeminder hash) maps "y-m-d" to the bmndr ID of the datapoint on that day.
-# ph1 and sh1 are based on the current tagtime long and
+# ph1 and sh1 are based on the current tagtime log and
# ph0 and sh0 are based on the cached .bee file or beeminder-fetched data.
my $start = time; # start and end are the earliest and latest times we will
@@ -109,21 +109,26 @@
# take one pass to delete any duplicates on bmndr; must be one datapt per day
my $i = 0;
undef %remember;
+ my @todelete;
for my $x (@$data) {
my($y,$m,$d) = dt($x->{"timestamp"});
my $ts = "$y-$m-$d";
my $b = $x->{"id"};
if(defined($remember{$ts})) {
- print "Beeminder has multiple datapoints for the same day. " ,
- "Deleting this one:\n";
+ print "Beeminder has multiple datapoints for the same day. " ,
+ "The other id is $remember{$ts}. Deleting this one:\n";
print Dumper $x;
beemdelete($usr, $slug, $b);
- delete $data->[$i];
+ push(@todelete,$i);
}
- $remember{$ts} = 1;
+ $remember{$ts} = $b;
$i++;
}
+ for my $x (reverse(@todelete)) {
+ splice(@$data,$x,1);
+ }
+
for my $x (@$data) { # parse the bmndr data into %ph0, %sh0, %bh
my($y,$m,$d) = dt($x->{"timestamp"});
my $ts = "$y-$m-$d";
@@ -136,6 +141,8 @@
$ph0{$ts} = 0+$c; # ping count is first thing in the comment
$sh0{$ts} = $c;
$sh0{$ts} =~ s/[^\:]*\:\s+//; # drop the "n pings:" comment prefix
+ # This really shouldn't happen.
+ if(defined($bh{$ts})) { die "Duplicate cached/fetched id datapoints for $y-$m-$d: $bh{$ts}, $b.\n", Dumper $x, "\n"; }
$bh{$ts} = $b;
}
}
@@ -194,6 +201,15 @@
if ($p1 > $p0) { $plus += ($p1-$p0); }
elsif($p1 < $p0) { $minus += ($p0-$p1); }
beemupdate($usr, $slug, $b, $t, ($p1*$ping), splur($p1,"ping").": ".$s1);
+ # If this fails, it may well be because the point being updated was deleted/
+ # replaced on another machine (possibly as the result of a merge) and is no
+ # longer on the server. In which case we should probably fail gracefully
+ # rather than failing with an ERROR (see beemupdate()) and not fixing
+ # the problem, which requires manual cache-deleting intervention.
+ # Restarting the script after deleting the offending cache is one option,
+ # though simply deleting the cache file and waiting for next time is less
+ # Intrusive. Deleting the cache files when merging two TT logs would reduce
+ # the scope for this somewhat.
} else {
print "ERROR: can't tell what to do with this datapoint (old/new):\n";
print "$y $m $d ",$p0*$ping," \"$p0 pings: $s0 [bID:$b]\"\n";
View
3  script/ttlogmerge.pl
@@ -35,6 +35,9 @@ sub parse
{
my $s = $_[0];
my @tokens = split(/\s+/, $s);
+ # XXX FIXME: This may fail where huge numbers of tags are added:
+ # It appears TT shortens the human-readable date string to stay
+ # under 80 characters per line.
for my $i (1..3) { pop(@tokens) } # Discard date string
# print "parse: ", $_[0], @tokens;
return @tokens;

0 comments on commit eb23d62

Please sign in to comment.
Something went wrong with that request. Please try again.