1 # Timestamp. Used for dinstall stat graphs
3 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
12 # If we error out this one is called, *FOLLOWED* by cleanup above
14 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
16 subject="ATTENTION ATTENTION!"
17 if [ "${error}" = "false" ]; then
18 subject="${subject} (continued)"
20 subject="${subject} (interrupted)"
22 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
24 cat "${STAGEFILE}.log" | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org
27 ########################################################################
28 # the actual dinstall functions follow #
29 ########################################################################
31 # pushing merkels QA user, part one
33 log "Telling merkels QA user that we start dinstall"
34 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@merkel.debian.org sleep 1
37 # Create the postgres dump files
38 function pgdump_pre() {
39 log "Creating pre-daily-cron-job backup of projectb database..."
40 pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
43 function pgdump_post() {
44 log "Creating post-daily-cron-job backup of projectb database..."
46 POSTDUMP=$(date +%Y.%m.%d-%H:%M:%S)
47 pg_dump projectb > $base/backup/dump_$POSTDUMP
48 pg_dumpall --globals-only > $base/backup/dumpall_$POSTDUMP
49 ln -sf $base/backup/dump_$POSTDUMP current
50 ln -sf $base/backup/dumpall_$POSTDUMP currentall
53 # Load the dak-dev projectb
56 echo "drop database projectb" | psql -p 5433 template1
57 cat currentall | psql -p 5433 template1
58 createdb -p 5433 -T template0 projectb
59 fgrep -v '\connect' current | psql -p 5433 projectb
62 # Updating various files
64 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
66 $scriptsdir/update-bugdoctxt
67 $scriptsdir/update-mirrorlists
68 $scriptsdir/update-mailingliststxt
69 $scriptsdir/update-pseudopackages.sh
72 # Process (oldstable)-proposed-updates "NEW" queue
75 dak process-policy $1 | tee -a REPORT | mail -e -s "NEW changes in $1" debian-release@lists.debian.org
79 log "Doing automated p-u-new processing"
80 cd "${queuedir}/p-u-new"
84 log "Doing automated o-p-u-new processing"
85 cd "${queuedir}/o-p-u-new"
89 # The first i18n one, syncing new descriptions
91 log "Synchronizing i18n package descriptions"
92 # First sync their newest data
93 cd ${scriptdir}/i18nsync
94 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
96 # Now check if we still know about the packages for which they created the files
97 # is the timestamp signed by us?
98 if $(gpgv --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
99 # now read it. As its signed by us we are sure the content is what we expect, no need
100 # to do more here. And we only test -d a directory on it anyway.
101 TSTAMP=$(cat timestamp)
102 # do we have the dir still?
103 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
105 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
106 # Yay, worked, lets copy around
107 for dir in squeeze sid; do
108 if [ -d dists/${dir}/ ]; then
109 cd dists/${dir}/main/i18n
110 rsync -aq --delete --delete-after . ${ftpdir}/dists/${dir}/main/i18n/.
112 cd ${scriptdir}/i18nsync
115 echo "ARRRR, bad guys, wrong files, ARRR"
116 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org
119 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
120 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org
123 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
124 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org
129 log "Checking for cruft in overrides"
133 function dominate() {
134 log "Removing obsolete source and binary associations"
138 function filelist() {
139 log "Generating file lists for apt-ftparchive"
140 dak generate-filelist
143 function fingerprints() {
144 log "Not updating fingerprints - scripts needs checking"
146 log "Updating fingerprints"
147 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
150 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
152 if [ -s "${OUTFILE}" ]; then
153 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
154 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
155 To: <debian-project@lists.debian.org>
156 Subject: Debian Maintainers Keyring changes
157 Content-Type: text/plain; charset=utf-8
160 The following changes to the debian-maintainers keyring have just been activated:
164 Debian distribution maintenance software,
165 on behalf of the Keyring maintainers
172 function overrides() {
173 log "Writing overrides into text files"
178 rm -f override.sid.all3
179 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
183 log "Generating package / file mapping"
184 dak make-pkg-file-mapping | bzip2 -9 > $base/ftp/indices/package-file.map.bz2
187 function packages() {
188 log "Generating Packages and Sources files"
190 GZIP='--rsyncable' ; export GZIP
191 apt-ftparchive generate apt.conf
195 log "Generating pdiff files"
196 dak generate-index-diffs
200 log "Generating Release files"
201 dak generate-releases
204 function dakcleanup() {
205 log "Cleanup old packages/files"
206 dak clean-suites -m 10000
210 function buildd_dir() {
211 # Rebuilt the buildd dir to avoid long times of 403
212 log "Regenerating the buildd incoming dir"
213 STAMP=$(date "+%Y%m%d%H%M")
222 log "Removing any core files ..."
223 find -type f -name core -print0 | xargs -0r rm -v
225 log "Checking permissions on files in the FTP tree ..."
226 find -type f \( \! -perm -444 -o -perm +002 \) -ls
227 find -type d \( \! -perm -555 -o -perm +002 \) -ls
229 log "Checking symlinks ..."
232 log "Creating recursive directory listing ... "
233 rm -f .${FILENAME}.new
234 TZ=UTC ls -lR > .${FILENAME}.new
236 if [ -r ${FILENAME}.gz ] ; then
237 mv -f ${FILENAME}.gz ${FILENAME}.old.gz
238 mv -f .${FILENAME}.new ${FILENAME}
239 rm -f ${FILENAME}.patch.gz
240 zcat ${FILENAME}.old.gz | diff -u - ${FILENAME} | gzip --rsyncable -9cfn - >${FILENAME}.patch.gz
241 rm -f ${FILENAME}.old.gz
243 mv -f .${FILENAME}.new ${FILENAME}
246 gzip --rsyncable -9cfN ${FILENAME} >${FILENAME}.gz
250 function mkmaintainers() {
251 log 'Creating Maintainers index ... '
254 dak make-maintainers ${scriptdir}/masterfiles/pseudo-packages.maintainers | \
255 sed -e "s/~[^ ]*\([ ]\)/\1/" | \
256 awk '{printf "%-20s ", $1; for (i=2; i<=NF; i++) printf "%s ", $i; printf "\n";}' > .new-maintainers
258 if ! cmp -s .new-maintainers Maintainers || [ ! -f Maintainers ]; then
259 log "installing Maintainers ... "
260 mv -f .new-maintainers Maintainers
261 gzip --rsyncable -9v <Maintainers >.new-maintainers.gz
262 mv -f .new-maintainers.gz Maintainers.gz
264 rm -f .new-maintainers
268 function copyoverrides() {
269 log 'Copying override files into public view ...'
271 for ofile in $copyoverrides ; do
273 chmod g+w override.$ofile
277 newofile=override.$ofile.gz
278 rm -f .newover-$ofile.gz
279 pc="`gzip 2>&1 -9nv <$overridedir/override.$ofile >.newover-$ofile.gz`"
280 if ! cmp -s .newover-$ofile.gz $newofile || [ ! -f $newofile ]; then
281 log " installing new $newofile $pc"
282 mv -f .newover-$ofile.gz $newofile
285 rm -f .newover-$ofile.gz
290 function mkfilesindices() {
292 cd $base/ftp/indices/files/components
296 log "Querying projectb..."
297 echo 'SELECT l.path, f.filename, a.arch_string FROM location l JOIN files f ON (f.location = l.id) LEFT OUTER JOIN (binaries b JOIN architecture a ON (b.architecture = a.id)) ON (f.id = b.file)' | psql projectb -At | sed 's/|//;s,^/srv/ftp.debian.org/ftp,.,' | sort >$ARCHLIST
300 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
303 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
306 log "Generating sources list"
308 sed -n 's/|$//p' $ARCHLIST
310 find ./dists -maxdepth 1 \! -type d
311 find ./dists \! -type d | grep "/source/"
312 ) | sort -u | gzip --rsyncable -9 > source.list.gz
314 log "Generating arch lists"
316 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
318 (sed -n "s/|$a$//p" $ARCHLIST
319 sed -n 's/|all$//p' $ARCHLIST
322 find ./dists -maxdepth 1 \! -type d
323 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
324 ) | sort -u | gzip --rsyncable -9 > arch-$a.list.gz
327 log "Generating suite lists"
330 printf 'SELECT DISTINCT l.path, f.filename FROM (SELECT sa.source AS source FROM src_associations sa WHERE sa.suite = %d UNION SELECT b.source AS source FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) WHERE ba.suite = %d) s JOIN dsc_files df ON (s.source = df.source) JOIN files f ON (df.file = f.id) JOIN location l ON (f.location = l.id)\n' $1 $1 | psql -F' ' -A -t projectb
332 printf 'SELECT l.path, f.filename FROM bin_associations ba JOIN binaries b ON (ba.bin = b.id) JOIN files f ON (b.file = f.id) JOIN location l ON (f.location = l.id) WHERE ba.suite = %d\n' $1 | psql -F' ' -A -t projectb
335 printf 'SELECT id, suite_name FROM suite\n' | psql -F' ' -At projectb |
336 while read id suite; do
337 [ -e $base/ftp/dists/$suite ] || continue
340 distname=$(cd dists; readlink $suite || echo $suite)
341 find ./dists/$distname \! -type d
342 for distdir in ./dists/*; do
343 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
346 suite_list $id | tr -d ' ' | sed 's,^/srv/ftp.debian.org/ftp,.,'
347 ) | sort -u | gzip --rsyncable -9 > suite-${suite}.list.gz
350 log "Finding everything on the ftp site to generate sundries"
351 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
354 zcat *.list.gz | cat - *.list | sort -u |
355 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
357 log "Generating files list"
360 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
361 cat - sundries.list dists.list project.list docs.list indices.list |
362 sort -u | poolfirst > ../arch-$a.files
366 for dist in sid squeeze; do
367 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip --rsyncable -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
371 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-oldstable.list.gz suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-squeeze.list.gz) |
372 sort -u | poolfirst > ../typical.files
378 function mkchecksums() {
379 dsynclist=$dbdir/dsync.list
380 md5list=$indices/md5sums
382 log -n "Creating md5 / dsync index file ... "
385 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
386 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n --rsyncable > ${md5list}.gz
387 ${bindir}/dsync-flist -q link-dups $dsynclist || true
391 log "Regenerating \"public\" mirror/ hardlink fun"
393 rsync -aH --link-dest ${ftpdir} --delete --delete-after --ignore-errors ${ftpdir}/. .
397 log "Expiring old database dumps..."
399 $scriptsdir/expire_dumps -d . -p -f "dump_*"
402 function transitionsclean() {
403 log "Removing out of date transitions..."
405 dak transitions -c -a
409 # Send a report on NEW/BYHAND packages
410 log "Nagging ftpteam about NEW/BYHAND packages"
411 dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
412 # and one on crufty packages
413 log "Sending information about crufty packages"
414 dak cruft-report > $webdir/cruft-report-daily.txt
415 dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
416 cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
420 log "Updating DM html page"
421 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
425 log "Categorizing uncategorized bugs filed against ftp.debian.org"
430 # Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
431 log "Trigger merkel/flotows projectb sync"
432 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
433 # Also trigger flotow, the ftpmaster test box
434 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
438 # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
439 log "Trigger merkels dd accessible parts sync"
440 ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
443 function mirrorpush() {
444 log "Starting the mirrorpush"
445 date -u > /srv/ftp.debian.org/web/mirrorstart
446 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
447 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
448 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
452 log "Exporting package data foo for i18n project"
453 STAMP=$(date "+%Y%m%d%H%M")
454 mkdir -p ${scriptdir}/i18n/${STAMP}
455 cd ${scriptdir}/i18n/${STAMP}
456 dak control-suite -l stable > lenny
457 dak control-suite -l testing > squeeze
458 dak control-suite -l unstable > sid
459 echo "${STAMP}" > timestamp
460 gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 55BE302B --detach-sign -o timestamp.gpg timestamp
464 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
467 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
471 log "Updating stats data"
473 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
474 R --slave --vanilla < $base/misc/ftpstats.R
475 dak stats arch-space > $webdir/arch-space
476 dak stats pkg-nums > $webdir/pkg-nums
479 function aptftpcleanup() {
480 log "Clean up apt-ftparchive's databases"
482 apt-ftparchive -q clean apt.conf
485 function compress() {
486 log "Compress old psql backups"
488 find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
490 find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
491 while read dumpname; do
492 echo "Compressing $dumpname"
493 bzip2 -9fv "$dumpname"
495 find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin +720 |
496 while read dumpname; do
497 echo "Compressing $dumpname"
498 bzip2 -9fv "$dumpname"
500 finddup -l -d $base/backup
503 function logstats() {
504 $masterdir/tools/logs.py "$1"
507 # save timestamp when we start
508 function savetimestamp() {
509 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
510 echo ${NOW} > "${dbdir}/dinstallstart"
513 function maillogfile() {
514 cat "$LOGFILE" | mail -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org
517 function renamelogfile() {
518 if [ -f "${dbdir}/dinstallstart" ]; then
519 NOW=$(cat "${dbdir}/dinstallstart")
521 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
522 logstats "$logdir/dinstall_${NOW}.log"
523 bzip2 -9 "$logdir/dinstall_${NOW}.log"
525 error "Problem, I don't know when dinstall started, unable to do log statistics."
526 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
528 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
529 bzip2 -9 "$logdir/dinstall_${NOW}.log"
533 function testingsourcelist() {
534 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
537 # do a last run of process-unchecked before dinstall is on.
538 function process_unchecked() {
539 log "Processing the unchecked queue"
540 UNCHECKED_WITHOUT_LOCK="-p"
545 # do a run of newstage only before dinstall is on.
546 function newstage() {
547 log "Processing the newstage queue"
548 UNCHECKED_WITHOUT_LOCK="-p"
552 # Function to update a "statefile" telling people what we are doing
555 # This should be called with the argument(s)
556 # - Status name we want to show.
559 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
560 cat >"${DINSTALLSTATE}" <<EOF
561 Dinstall start: ${DINSTALLBEGIN}
563 Action start: ${RIGHTNOW}