&leakCheck(2); # mandatory
&ignoreCheck(1); # mandatory
&seenFlushOld(2);
-# &ircCheck(2); # mandatory
&ircCheck(1); # mandatory
&miscCheck(1); # mandatory
&miscCheck2(2); # mandatory
return if ($_[0] eq "2"); # defer.
}
- my $line = &getRandomLineFromFile($bot_misc_dir. "/blootbot.randtext");
+ my $line = &getRandomLineFromFile($bot_data_dir. "/blootbot.randtext");
if (!defined $line) {
&ERROR("random Quote: weird error?");
return;
}
### check if all the logs exceed size.
- my $logdir = "$bot_base_dir/log/";
- if (opendir(LOGS, $logdir)) {
+ if (opendir(LOGS, $bot_log_dir)) {
my $tsize = 0;
my (%age, %size);
while (defined($_ = readdir LOGS)) {
- my $logfile = "$logdir/$_";
+ my $logfile = "$bot_log_dir/$_";
next unless ( -f $logfile);
my $size = -s $logfile;
### TODO: add how many b,kb,mb removed?
&status("LOG: removed $delete logs.") if ($delete);
} else {
- &WARN("could not open dir $logdir");
+ &WARN("could not open dir $bot_log_dir");
}
}
my $max_time = &getChanConfDefault("seenMaxDays", 30) *60*60*24;
my $delete = 0;
- if ($param{'DBType'} =~ /^pg|postgres|mysql/i) {
- my $query = "SELECT nick,time FROM seen GROUP BY nick HAVING UNIX_TIMESTAMP() - time > $max_time";
+ if ($param{'DBType'} =~ /^pgsql|mysql/i) {
+ my $query;
+
+ if ($param{'DBType'} =~ /^mysql$/i) {
+ $query = "SELECT nick,time FROM seen GROUP BY nick HAVING ".
+ "UNIX_TIMESTAMP() - time > $max_time";
+ } else { # pgsql.
+ $query = "SELECT nick,time FROM seen WHERE ".
+ "extract(epoch from timestamp 'now') - time > $max_time";
+ }
+
my $sth = $dbh->prepare($query);
$sth->execute;
}
my $str = join(' ', &ChanConfList("chanlimitcheck") );
- &DEBUG("chanlimitCheck: str => $str");
foreach $chan ( &ChanConfList("chanlimitcheck") ) {
next unless (&validChan($chan));
&status("chanlimit: netsplit; removing it for $chan.");
&rawout("MODE $chan -l");
$cache{chanlimitChange}{$chan} = time();
+ &status("chanlimit: netsplit; removed.");
}
next;
}
$cache{'netsplitCache'}++;
- &DEBUG("running netsplitCheck... $cache{netsplitCache}");
+# &DEBUG("running netsplitCheck... $cache{netsplitCache}");
if (!scalar %netsplit and scalar %netsplitservers) {
&DEBUG("nsC: !hash netsplit but hash netsplitservers <- removing!");
if ($delete) {
my $j = scalar(keys %netsplit);
- &DEBUG("nsC: removed from netsplit list: (before: $count; after: $j)");
+ &status("nsC: removed from netsplit list: (before: $count; after: $j)");
}
if (!scalar %netsplit and scalar %netsplitservers) {
$stats{'new'} = 0;
$stats{'old'} = 0;
- if ($param{'DBType'} =~ /^mysql|pg|postgres/i) {
+ if ($param{'DBType'} =~ /^(mysql|pgsql)$/i) {
foreach $nick (keys %seencache) {
- my $retval = &dbReplace("seen", (
- "nick" => $seencache{$nick}{'nick'},
+ my $retval = &dbReplace("seen", "nick", (
+ "nick" => lc $seencache{$nick}{'nick'},
"time" => $seencache{$nick}{'time'},
"host" => $seencache{$nick}{'host'},
"channel" => $seencache{$nick}{'chan'},
$count += scalar(keys %{ $flood{$blah1}{$blah2} });
}
}
- &DEBUG("leak: hash flood has $count total keys.",2);
+ &VERB("leak: hash flood has $count total keys.",2);
# floodjoin.
$count = 0;
$count += scalar(keys %{ $floodjoin{$blah1}{$blah2} });
}
}
- &DEBUG("leak: hash floodjoin has $count total keys.",2);
+ &VERB("leak: hash floodjoin has $count total keys.",2);
# floodwarn.
$count = scalar(keys %floodwarn);
- &DEBUG("leak: hash floodwarn has $count total keys.",2);
+ &VERB("leak: hash floodwarn has $count total keys.",2);
my $chan;
foreach $chan (grep /[A-Z]/, keys %channels) {
# chanstats
$count = scalar(keys %chanstats);
- &DEBUG("leak: hash chanstats has $count total keys.",2);
+ &VERB("leak: hash chanstats has $count total keys.",2);
# nuh.
my $delete = 0;
$count++;
}
}
+
+ $cache{ignoreCheckTime} = time();
+
&VERB("ignore: $count items deleted.",2);
}
return if ($_[0] eq "2"); # defer.
}
+ $cache{statusSafe} = 1;
+
my @x = &getJoinChans();
my $iconf = scalar( @x );
my $inow = scalar( keys %channels );
if (grep /^\s*$/, keys %channels) {
&WARN("ircCheck: we have a NULL chan in hash channels? removing!");
- if (exists $channels{''}) {
- &DEBUG("ircCheck: ok it existed!");
- } else {
- &DEBUG("ircCheck: this hsould never happen!");
+ if (!exists $channels{''}) {
+ &DEBUG("ircCheck: this should never happen!");
}
delete $channels{''};
}
+ $cache{statusSafe} = 0;
+
### USER FILE.
if ($utime_userfile > $wtime_userfile and time() - $wtime_userfile > 3600) {
&writeUserFile();
sub miscCheck {
if (@_) {
- &ScheduleThis(240, "miscCheck");
+ &ScheduleThis(120, "miscCheck");
return if ($_[0] eq "2"); # defer.
}
+ # debian check.
+ opendir(DEBIAN, "$bot_state_dir/debian");
+ foreach ( grep /gz$/, readdir(DEBIAN) ) {
+ my $exit = CORE::system("gzip -t $bot_state_dir/debian/$_");
+ next unless ($exit);
+
+ &status("debian: unlinking file => $_");
+ unlink "$bot_state_dir/debian/$_";
+ }
+ closedir DEBIAN;
+
# SHM check.
my @ipcs;
if ( -x "/usr/bin/ipcs") {
}
# make backup of important files.
- &mkBackup( $bot_misc_dir."/blootbot.chan", 60*60*24*3);
- &mkBackup( $bot_misc_dir."/blootbot.users", 60*60*24*3);
+ &mkBackup( $bot_state_dir."/blootbot.chan", 60*60*24*3);
+ &mkBackup( $bot_state_dir."/blootbot.users", 60*60*24*3);
&mkBackup( $bot_base_dir."/blootbot-news.txt", 60*60*24*1);
# flush cache{lobotomy}
return if ($_[0] eq "2"); # defer.
}
- # debian check.
- opendir(DEBIAN, "$bot_base_dir/debian");
- foreach ( grep /gz$/, readdir(DEBIAN) ) {
- my $exit = CORE::system("gzip -t $bot_base_dir/debian/$_");
- next unless ($exit);
-
- &status("debian: unlinking file => $_");
- unlink "$bot_base_dir/debian/$_";
- }
- closedir DEBIAN;
-
# compress logs that should have been compressed.
# todo: use strftime?
my ($day,$month,$year) = (localtime(time()))[3,4,5];
my $date = sprintf("%04d%02d%02d",$year+1900,$month+1,$day);
- opendir(DIR,"$bot_base_dir/log");
+ if (!opendir(DIR,"$bot_log_dir")) {
+ &ERROR("misccheck2: log dir $bot_log_dir does not exist.");
+ closedir DIR;
+ return -1;
+ }
+
while (my $f = readdir(DIR)) {
- next unless ( -f "$bot_base_dir/log/$f");
+ next unless ( -f "$bot_log_dir/$f");
next if ($f =~ /gz|bz2/);
next unless ($f =~ /(\d{8})/);
next if ($date eq $1);
- &compress("$bot_base_dir/log/$f");
+ &compress("$bot_log_dir/$f");
}
closedir DIR;
}