2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in jessie sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." debian-l10n-devel@lists.alioth.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
101 log "Checking for cruft in overrides"
105 function dominate() {
106 log "Removing obsolete source and binary associations"
110 function filelist() {
111 log "Generating file lists for apt-ftparchive"
112 dak generate-filelist
115 function fingerprints() {
116 log "Updating fingerprints"
117 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
120 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
122 if [ -s "${OUTFILE}" ]; then
123 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
124 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
125 To: <debian-project@lists.debian.org>
126 Subject: Debian Maintainers Keyring changes
127 Content-Type: text/plain; charset=utf-8
131 The following changes to the debian-maintainers keyring have just been activated:
135 Debian distribution maintenance software,
136 on behalf of the Keyring maintainers
143 function overrides() {
144 log "Writing overrides into text files"
149 rm -f override.sid.all3
150 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
156 log "Generating package / file mapping"
157 for archive in "${public_archives[@]}"; do
158 archiveroot="$(get_archiveroot "${archive}")"
159 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
163 function packages() {
164 log "Generating Packages and Sources files"
165 for archive in "${public_archives[@]}"; do
166 dak generate-packages-sources2 -a "${archive}"
167 dak contents generate -a "${archive}"
172 log "Generating pdiff files"
173 dak generate-index-diffs
177 log "Generating Release files"
178 for archive in "${public_archives[@]}"; do
179 dak generate-releases -a "${archive}"
183 function dakcleanup() {
184 log "Cleanup old packages/files"
185 dak clean-suites -m 10000
186 dak clean-queues -i "$unchecked"
193 for archive in "${public_archives[@]}"; do
194 archiveroot="$(get_archiveroot "${archive}")"
197 log "Removing any core files ..."
198 find -type f -name core -print -delete
200 log "Checking symlinks ..."
203 log "Creating recursive directory listing ... "
205 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
209 function mkmaintainers() {
213 log 'Creating Maintainers index ... '
215 for archive in "${public_archives[@]}"; do
216 archiveroot="$(get_archiveroot "${archive}")"
217 indices="${archiveroot}/indices"
218 if ! [ -d "${indices}" ]; then
223 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
224 gzip -9v --rsyncable <Maintainers >Maintainers.gz
225 gzip -9v --rsyncable <Uploaders >Uploaders.gz
229 function copyoverrides() {
230 log 'Copying override files into public view ...'
232 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,sid}.{,extra.}{main,contrib,non-free}*; do
234 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
235 chmod g+w ${indices}/${bname}.gz
239 function mkfilesindices() {
242 cd $base/ftp/indices/files/components
246 log "Querying postgres"
248 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
250 JOIN files_archive_map af ON f.id = af.file_id
251 JOIN component c ON af.component_id = c.id
252 JOIN archive ON af.archive_id = archive.id
255 JOIN architecture a ON b.architecture = a.id)
257 WHERE archive.name = 'ftp-master'
258 ORDER BY path, arch_string
260 psql -At -c "$query" >$ARCHLIST
263 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
266 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
269 log "Generating sources list"
271 sed -n 's/|$//p' $ARCHLIST
273 find ./dists -maxdepth 1 \! -type d
274 find ./dists \! -type d | grep "/source/"
275 ) | sort -u | gzip -9 > source.list.gz
277 log "Generating arch lists"
279 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
281 (sed -n "s/|$a$//p" $ARCHLIST
282 sed -n 's/|all$//p' $ARCHLIST
285 find ./dists -maxdepth 1 \! -type d
286 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
287 ) | sort -u | gzip -9 > arch-$a.list.gz
290 log "Generating suite lists"
293 local suite_id="$(printf %d $1)"
296 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
298 (SELECT sa.source AS source
299 FROM src_associations sa
300 WHERE sa.suite = $suite_id
303 FROM extra_src_references esr
304 JOIN bin_associations ba ON esr.bin_id = ba.bin
305 WHERE ba.suite = $suite_id
307 SELECT b.source AS source
308 FROM bin_associations ba
309 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
310 JOIN dsc_files df ON s.source = df.source
311 JOIN files f ON df.file = f.id
312 JOIN files_archive_map af ON f.id = af.file_id
313 JOIN component c ON af.component_id = c.id
314 JOIN archive ON af.archive_id = archive.id
315 WHERE archive.name = 'ftp-master'
317 psql -F' ' -A -t -c "$query"
320 SELECT './pool/' || c.name || '/' || f.filename
321 FROM bin_associations ba
322 JOIN binaries b ON ba.bin = b.id
323 JOIN files f ON b.file = f.id
324 JOIN files_archive_map af ON f.id = af.file_id
325 JOIN component c ON af.component_id = c.id
326 JOIN archive ON af.archive_id = archive.id
327 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
329 psql -F' ' -A -t -c "$query"
332 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
333 while read id suite; do
334 [ -e $base/ftp/dists/$suite ] || continue
337 distname=$(cd dists; readlink $suite || echo $suite)
338 find ./dists/$distname \! -type d
339 for distdir in ./dists/*; do
340 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
344 ) | sort -u | gzip -9 > suite-${suite}.list.gz
347 log "Finding everything on the ftp site to generate sundries"
348 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
351 zcat *.list.gz | cat - *.list | sort -u |
352 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
354 log "Generating files list"
357 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
358 cat - sundries.list dists.list project.list docs.list indices.list |
359 sort -u | poolfirst > ../arch-$a.files
363 for dist in sid jessie; do
364 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
368 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz) |
369 sort -u | poolfirst > ../typical.files
376 function mkchecksums() {
377 local archiveroot dsynclist md5list
379 for archive in "${public_archives[@]}"; do
380 archiveroot="$(get_archiveroot "${archive}")"
381 dsynclist=$dbdir/dsync.${archive}.list
382 md5list=${archiveroot}/indices/md5sums
384 log -n "Creating md5 / dsync index file for ${archive}... "
387 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
388 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
389 ${bindir}/dsync-flist -q link-dups $dsynclist || true
396 log "Regenerating \"public\" mirror/ hardlink fun"
397 DATE_SERIAL=$(date +"%Y%m%d01")
398 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
399 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
400 SERIAL="$DATE_SERIAL"
402 SERIAL="$FILESOAPLUS1"
404 date -u > ${TRACEFILE}
405 echo "Using dak v1" >> ${TRACEFILE}
406 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
407 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
409 # Ugly "hack", but hey, it does what we want.
410 cp ${TRACEFILE} ${TRACEFILE_BDO}
412 for archive in "${public_archives[@]}"; do
413 archiveroot="$(get_archiveroot "${archive}")"
414 mirrordir="${archiveroot}/../mirror"
416 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
421 log "Expiring old database dumps..."
423 $scriptsdir/expire_dumps -d . -p -f "dump_*"
426 function transitionsclean() {
427 log "Removing out of date transitions..."
429 dak transitions -c -a
433 log "Updating DM permissions page"
434 dak acl export-per-source dm >$exportdir/dm.txt
438 log "Categorizing uncategorized bugs filed against ftp.debian.org"
442 function ddaccess() {
443 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
444 log "Trigger dd accessible parts sync including ftp dir"
445 ${scriptsdir}/sync-dd ries-sync ries-sync1 ries-sync2 pool
448 function mirrorpush() {
449 log "Checking the public archive copy"
450 cd ${mirrordir}/dists
453 for release in $(find . -name "InRelease"); do
454 echo "Processing: ${release}"
455 subdir=${release%/InRelease}
456 while read SHASUM SIZE NAME; do
457 if ! [ -f "${subdir}/${NAME}" ]; then
458 bname=$(basename ${NAME})
459 if [[ "${bname}" =~ ^(Packages|Sources|Translation-[a-zA-Z_]+)$ ]]; then
460 # We don't keep unpacked files, don't check for their existance.
461 # We might want to go and check their unpacked shasum, but right now
462 # I don't care. I believe it should be enough if all the packed shasums
466 broken=$(( broken + 1 ))
467 echo "File ${subdir}/${NAME} is missing"
471 # We do have symlinks in the tree (see the contents files currently).
472 # So we use "readlink -f" to check the size of the target, as thats basically
473 # what gen-releases does
474 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
475 if [ ${fsize} -ne ${SIZE} ]; then
476 broken=$(( broken + 1 ))
477 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
481 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
482 fshasum=${fshasum%% *}
483 if [ "${fshasum}" != "${SHASUM}" ]; then
484 broken=$(( broken + 1 ))
485 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
488 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
491 if [ $broken -gt 0 ]; then
492 log_error "Trouble with the public mirror, found ${broken} errors"
496 log "Starting the mirrorpush"
497 date -u > /srv/ftp.debian.org/web/mirrorstart
498 echo "Using dak v1" >> /srv/ftp.debian.org/web/mirrorstart
499 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/mirrorstart
500 sudo -H -u archvsync /home/archvsync/runmirrors > ~dak/runmirrors.log 2>&1 &
501 sudo -H -u archvsync /home/archvsync/runmirrors -a backports > ~dak/runmirrorsbpo.log 2>&1 &
505 log "Exporting package data foo for i18n project"
506 STAMP=$(date "+%Y%m%d%H%M")
507 mkdir -p ${scriptdir}/i18n/${STAMP}
508 cd ${scriptdir}/i18n/${STAMP}
509 for suite in stable testing unstable; do
510 codename=$(dak admin s show ${suite}|grep '^Codename')
511 codename=${codename##* }
512 echo "Codename is ${codename}"
513 dak control-suite -l ${suite} >${codename}
515 echo "${STAMP}" > timestamp
516 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
520 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
523 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
527 log "Updating stats data"
529 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
530 R --slave --vanilla < $base/misc/ftpstats.R
531 dak stats arch-space > $webdir/arch-space
532 dak stats pkg-nums > $webdir/pkg-nums
535 function cleantransactions() {
536 log "Cleanup transaction ids older than 3 months"
538 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -print0 | xargs -0 --no-run-if-empty rm
541 function logstats() {
542 $masterdir/tools/logs.py "$1"
545 # save timestamp when we start
546 function savetimestamp() {
547 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
548 echo ${NOW} > "${dbdir}/dinstallstart"
551 function maillogfile() {
552 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" cron@ftp-master.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org
555 function renamelogfile() {
556 if [ -f "${dbdir}/dinstallstart" ]; then
557 NOW=$(cat "${dbdir}/dinstallstart")
559 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
560 logstats "$logdir/dinstall_${NOW}.log"
561 bzip2 -9 "$logdir/dinstall_${NOW}.log"
563 error "Problem, I don't know when dinstall started, unable to do log statistics."
564 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
566 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
567 bzip2 -9 "$logdir/dinstall_${NOW}.log"
571 function testingsourcelist() {
572 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
575 # do a last run of process-unchecked before dinstall is on.
576 function process_unchecked() {
577 log "Processing the unchecked queue"
578 UNCHECKED_WITHOUT_LOCK="-p"
583 # Function to update a "statefile" telling people what we are doing
586 # This should be called with the argument(s)
587 # - Status name we want to show.
590 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
591 cat >"${DINSTALLSTATE}" <<EOF
592 Dinstall start: ${DINSTALLBEGIN}
594 Action start: ${RIGHTNOW}
598 # extract changelogs and stuff
599 function changelogs() {
600 if lockfile -r3 $LOCK_CHANGELOG; then
601 log "Extracting changelogs"
602 dak make-changelog -e -a ftp-master
603 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
604 mkdir -p ${exportpublic}/changelogs
605 cd ${exportpublic}/changelogs
606 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
607 sudo -H -u staticsync /usr/local/bin/static-update-component ftp-master.metadata.debian.org >/dev/null 2>&1 &
609 dak make-changelog -e -a backports
610 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
611 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
612 cd /srv/backports-master.debian.org/rsync/export/changelogs
613 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
614 remove_changelog_lock
615 trap remove_changelog_lock EXIT TERM HUP INT QUIT
619 function gitpdiff() {
620 # Might be that we want to change this to have more than one git repository.
621 # Advantage of one is that we do not need much space in terms of storage in git itself,
622 # git gc is pretty good on our input.
623 # But it might be faster. Well, lets test.
624 log "Adjusting the git tree for pdiffs"
627 # The regex needs the architectures seperated with \|
628 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
630 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
632 # Also, we only want contents, packages and sources.
633 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
642 # Second, add all there is into git
645 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
647 TAGD=$(date +%Y-%m-%d-%H-%M)
648 git commit -m "Commit of ${COMD}"