1 # Timestamp. Used for dinstall stat graphs
3 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
12 # If we error out this one is called, *FOLLOWED* by cleanup above
14 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
16 subject="ATTENTION ATTENTION!"
17 if [ "${error}" = "false" ]; then
18 subject="${subject} (continued)"
20 subject="${subject} (interrupted)"
22 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
24 cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org
27 ########################################################################
28 # the actual dinstall functions follow #
29 ########################################################################
31 # pushing merkels QA user, part one
33 log "Telling merkels QA user that we start dinstall"
34 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
37 # Create the postgres dump files
38 function pgdump_pre() {
39 log "Creating pre-daily-cron-job backup of projectb database..."
40 pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
43 function pgdump_post() {
44 log "Creating post-daily-cron-job backup of projectb database..."
46 POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
47 pg_dump projectb > $base/backup/dump_$POSTDUMP
48 pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
49 ln -sf $base/backup/dump_$POSTDUMP current
50 ln -sf $base/backup/dumpall_$POSTDUMP currentall
53 # Load the dak-dev projectb
56 echo "drop database projectb" | psql -p 5433 template1
57 cat currentall | psql -p 5433 template1
58 createdb -p 5433 -T template0 projectb
59 fgrep -v '\connect' current | psql -p 5433 projectb
62 # Updating various files
64 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
66 $scriptsdir/update-bugdoctxt
67 $scriptsdir/update-mirrorlists
68 $scriptsdir/update-mailingliststxt
69 $scriptsdir/update-pseudopackages.sh
72 # Process (oldstable)-proposed-updates "NEW" queue
76 dak process-new -a -C COMMENTS >> REPORT || true
80 log "Doing automated p-u-new processing"
84 log "Doing automated o-p-u-new processing"
88 # The first i18n one, syncing new descriptions
90 log "Synchronizing i18n package descriptions"
91 # First sync their newest data
92 cd ${scriptdir}/i18nsync
93 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
95 # Now check if we still know about the packages for which they created the files
96 # is the timestamp signed by us?
97 if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
98 # now read it. As its signed by us we are sure the content is what we expect, no need
99 # to do more here. And we only test -d a directory on it anyway.
100 TSTAMP=$(cat timestamp)
101 # do we have the dir still?
102 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
104 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
105 # Yay, worked, lets copy around
106 for dir in squeeze sid; do
107 if [ -d dists/${dir}/ ]; then
108 cd dists/${dir}/main/i18n
109 rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
111 cd ${scriptdir}/i18nsync
114 echo "ARRRR, bad guys, wrong files, ARRR"
115 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
118 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
119 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
122 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
123 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
128 log "Checking for cruft in overrides"
133 log "Generating suite file lists for apt-ftparchive"
134 dak make-suite-file-list
137 function filelist() {
138 log "Generating file lists for apt-ftparchive"
139 dak generate-filelist
142 function fingerprints() {
143 log "Not updating fingerprints - scripts needs checking"
145 # log "Updating fingerprints"
146 # dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
149 # dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
151 # if [ -s "${OUTFILE}" ]; then
152 # /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
153 #From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
154 #To: <debian-project@lists.debian.org>
155 #Subject: Debian Maintainers Keyring changes
156 #Content-Type: text/plain; charset=utf-8
159 #The following changes to the debian-maintainers keyring have just been activated:
163 #Debian distribution maintenance software,
164 #on behalf of the Keyring maintainers
171 function overrides() {
172 log "Writing overrides into text files"
177 rm -f override.sid.all3
178 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
182 log "Generating package / file mapping"
183 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
186 function packages() {
187 log "Generating Packages and Sources files"
189 GZIP='--rsyncable' ; export GZIP
190 apt-ftparchive generate apt.conf
194 log "Generating pdiff files"
195 dak generate-index-diffs
199 log "Generating Release files"
200 dak generate-releases
203 function dakcleanup() {
204 log "Cleanup old packages/files"
205 # TODO: Fix up clean-suites
206 #dak clean-suites -m 10000
210 function buildd_dir() {
211 # Rebuilt the buildd dir to avoid long times of 403
212 log "Regenerating the buildd incoming dir"
213 STAMP=$(date "+%Y%m%d%H%M")
222 log "Removing any core files ..."
223 find -type f -name core -print0 | xargs -0r rm -v
225 log "Checking permissions on files in the FTP tree ..."
226 find -type f \( \! -perm -444 -o -perm +002 \) -ls
227 find -type d \( \! -perm -555 -o -perm +002 \) -ls
229 log "Checking symlinks ..."
232 log "Creating recursive directory listing ... "
233 rm -f .${FILENAME}.new
234 TZ=UTC ls -lR > .${FILENAME}.new
236 if [ -r ${FILENAME}.gz ] ; then
237 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
238 mv -f .${FILENAME}.new ${FILENAME}
239 rm -f ${FILENAME}.patch.gz
240 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
241 rm -f ${FILENAME}.old.gz
243 mv -f .${FILENAME}.new ${FILENAME}
246 gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
250 function mkmaintainers() {
251 log -n 'Creating Maintainers index ... '
254 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
255 sed -e "s/~[^ ]*\([ ]\)/\1/" | awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
258 cmp .new-maintainers Maintainers >/dev/null
261 if [ $rc = 1 ] || [ ! -f Maintainers ] ; then
262 log -n "installing Maintainers ... "
263 mv -f .new-maintainers Maintainers
264 gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
265 mv -f .new-maintainers.gz Maintainers.gz
266 elif [ $rc = 0 ] ; then
267 log '(same as before)'
268 rm -f .new-maintainers
275 function copyoverrides() {
276 log 'Copying override files into public view ...'
278 for f in $copyoverrides ; do
280 chmod g+w override.$f
284 pc="`gzip 2>&1 -9nv <$overridedir/override.$f >.newover-$f.gz`"
287 cmp -s .newover-$f.gz $nf
292 elif [ $rc = 1 -o ! -f $nf ]; then
293 log " installing new $nf $pc"
294 mv -f .newover-$f.gz $nf
303 function mkfilesindices() {
305 cd $base/ftp/indices/files/components
309 log "Querying projectb..."
310 echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST
313 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
316 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
319 log "Generating sources list
321 sed -n 's/|$//p' $ARCHLIST
323 find ./dists -maxdepth 1 \! -type d
324 find ./dists \! -type d | grep "/source/"
325 ) | sort -u | gzip --rsyncable -9 > source.list.gz
327 log "Generating arch lists
329 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
331 (sed -n "s/|$a$//p" $ARCHLIST
332 sed -n 's/|all$//p' $ARCHLIST
335 find ./dists -maxdepth 1 \! -type d
336 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
337 ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
340 log "Generating suite lists"
343 printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb
345 printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb
348 printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb |
349 while read id suite; do
350 [ -e $base/ftp/dists/$suite ] || continue
353 distname=$(cd dists; readlink $suite || echo $suite)
354 find ./dists/$distname \! -type d
355 for distdir in ./dists/*; do
356 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
359 suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,'
360 ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
363 log "Finding everything on the ftp site to generate sundries"
365 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
368 zcat *.list.gz | cat - *.list | sort -u |
369 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
371 log "Generating files list"
374 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
375 cat - sundries.list dists.list project.list docs.list indices.list |
376 sort -u | poolfirst > ../arch-$a.files
380 for dist in sid squeeze; do
381 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
385 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-oldstable.list.gz suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
386 sort -u | poolfirst > ../typical.files
392 function mkchecksums() {
393 dsynclist=$dbdir/dsync.list
394 md5list=$indices/md5sums
396 log -n "Creating md5 / dsync index file ... "
399 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
400 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
401 ${bindir}/dsync-flist -q link-dups $dsynclist || true
405 log "Running various scripts from $scriptsdir"
414 log "Regenerating \"public\" mirror/ hardlink fun"
416 rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
420 log "Trigger daily wanna-build run"
421 ssh -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 wbadm@buildd /org/wanna-build/trigger.daily || echo "W-B trigger.daily failed" | mail -s "W-B Daily trigger failed" ftpmaster@ftp-master.debian.org
425 log "Expiring old database dumps..."
427 $scriptsdir/expire_dumps -d . -p -f "dump_*"
430 function transitionsclean() {
431 log "Removing out of date transitions..."
433 dak transitions -c -a
437 # Send a report on NEW/BYHAND packages
438 log "Nagging ftpteam about NEW/BYHAND packages"
439 dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
440 # and one on crufty packages
441 log "Sending information about crufty packages"
442 dak cruft-report > $webdir/cruft-report-daily.txt
443 dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
444 cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
448 log "Updating DM html page"
449 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
453 log "Categorizing uncategorized bugs filed against ftp.debian.org"
458 # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
459 log "Trigger merkel/flotows projectb sync"
460 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
461 # Also trigger flotow, the ftpmaster test box
462 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
466 # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
467 log "Trigger merkels dd accessible parts sync"
468 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
471 function mirrorpush() {
472 log "Starting the mirrorpush"
473 date -u > /srv/ftp.debian.org/web/mirrorstart
474 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
475 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
476 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
480 log "Exporting package data foo for i18n project"
481 STAMP=$(date "+%Y%m%d%H%M")
482 mkdir -p ${scriptdir}/i18n/${STAMP}
483 cd ${scriptdir}/i18n/${STAMP}
484 dak control-suite -l stable > lenny
485 dak control-suite -l testing > squeeze
486 dak control-suite -l unstable > sid
487 echo "${STAMP}" > timestamp
488 gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
492 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
495 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
499 log "Updating stats data"
501 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
502 R --slave --vanilla < $base/misc/ftpstats.R
503 dak stats arch-space > $webdir/arch-space
504 dak stats pkg-nums > $webdir/pkg-nums
507 function aptftpcleanup() {
508 log "Clean up apt-ftparchive's databases"
510 apt-ftparchive -q clean apt.conf
513 function compress() {
514 log "Compress old psql backups"
516 find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
518 find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
519 while read dumpname; do
520 echo "Compressing $dumpname"
521 bzip2 -9fv "$dumpname"
523 find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
524 while read dumpname; do
525 echo "Compressing $dumpname"
526 bzip2 -9fv "$dumpname"
528 finddup -l -d $base/backup
531 function logstats() {
532 $masterdir/tools/logs.py "$1"
535 # save timestamp when we start
536 function savetimestamp() {
537 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
538 echo ${NOW} > "${dbdir}/dinstallstart"
541 function maillogfile() {
542 cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
545 function renamelogfile() {
546 if [ -f "${dbdir}/dinstallstart" ]; then
547 NOW=$(cat "${dbdir}/dinstallstart")
549 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
550 logstats "$logdir/dinstall_${NOW}.log"
551 bzip2 -9 "$logdir/dinstall_${NOW}.log"
553 error "Problem, I don't know when dinstall started, unable to do log statistics."
554 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
556 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
557 bzip2 -9 "$logdir/dinstall_${NOW}.log"
561 function testingsourcelist() {
562 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
565 # do a last run of process-unchecked before dinstall is on.
566 function process_unchecked() {
567 log "Processing the unchecked queue"
568 UNCHECKED_WITHOUT_LOCK="-p"