2006-12-10 14:26:19 -01:00
|
|
|
#!/usr/bin/perl -w
|
|
|
|
#
|
|
|
|
# make_disc_trees
|
|
|
|
#
|
|
|
|
# From the list of packages we have, lay out the CD trees
|
|
|
|
|
|
|
|
use strict;
|
|
|
|
use Digest::MD5;
|
2011-03-04 12:11:16 -01:00
|
|
|
use Digest::SHA;
|
2006-12-10 14:26:19 -01:00
|
|
|
use File::stat;
|
|
|
|
use File::Find;
|
2019-01-23 21:50:36 -01:00
|
|
|
use File::Path qw(make_path remove_tree);
|
2010-05-27 00:52:25 +00:00
|
|
|
use File::Basename;
|
2008-03-20 08:43:22 -01:00
|
|
|
use Compress::Zlib;
|
2023-01-24 23:38:34 -01:00
|
|
|
use File::Slurp;
|
2006-12-10 14:26:19 -01:00
|
|
|
|
2008-03-20 08:43:22 -01:00
|
|
|
my %pkginfo;
|
2008-09-10 22:16:18 +00:00
|
|
|
my ($basedir, $mirror, $tdir, $codename, $archlist, $mkisofs, $maxcds,
|
2011-11-13 23:14:30 -01:00
|
|
|
$maxisos, $maxjigdos, $extranonfree, $nonfree, $contrib, $use_local);
|
2010-11-27 17:28:45 -01:00
|
|
|
my $mkisofs_base_opts = "";
|
2006-12-10 14:26:19 -01:00
|
|
|
my $mkisofs_opts = "";
|
|
|
|
my $mkisofs_dirs = "";
|
2022-10-08 16:45:11 +00:00
|
|
|
my (@arches, @arches_nosrc, @overflowlist, @pkgs_added, @nonfree_components);
|
2006-12-29 04:56:43 -01:00
|
|
|
my (@exclude_packages, @unexclude_packages, @excluded_package_list);
|
2010-05-27 00:52:25 +00:00
|
|
|
my %firmware_package;
|
2011-03-04 12:11:16 -01:00
|
|
|
my $current_checksum_type = "";
|
2011-11-13 23:14:30 -01:00
|
|
|
my %descriptions;
|
2006-12-10 14:26:19 -01:00
|
|
|
|
2007-02-27 19:05:32 -01:00
|
|
|
undef @pkgs_added;
|
2006-12-29 04:56:43 -01:00
|
|
|
undef @exclude_packages;
|
|
|
|
undef @unexclude_packages;
|
|
|
|
undef @excluded_package_list;
|
2006-12-10 14:26:19 -01:00
|
|
|
|
|
|
|
$basedir = shift;
|
|
|
|
$mirror = shift;
|
|
|
|
$tdir = shift;
|
|
|
|
$codename = shift;
|
|
|
|
$archlist = shift;
|
|
|
|
$mkisofs = shift;
|
2010-11-27 17:28:45 -01:00
|
|
|
$mkisofs_base_opts = shift;
|
2006-12-10 14:26:19 -01:00
|
|
|
|
2012-06-05 14:20:06 +00:00
|
|
|
require "$basedir/tools/link.pl";
|
|
|
|
|
2008-03-20 08:43:22 -01:00
|
|
|
my $iso_blksize = 2048;
|
|
|
|
my $log_opened = 0;
|
|
|
|
my $old_split = $/;
|
2012-06-05 13:56:51 +00:00
|
|
|
my $security = read_env('SECURITY', $mirror);
|
|
|
|
my $localdebs = read_env('LOCALDEBS', $mirror);
|
|
|
|
my $symlink_farm = read_env('SYMLINK', 0);
|
|
|
|
my $link_verbose = read_env('VERBOSE', 0);
|
|
|
|
my $link_copy = read_env('COPYLINK', 0);
|
2016-09-20 00:46:15 +00:00
|
|
|
my $backports_list = read_env('BACKPORTS', "");
|
|
|
|
my $backports = 1;
|
|
|
|
if ($backports_list =~ /^$/) {
|
|
|
|
$backports = 0;;
|
|
|
|
}
|
2006-12-10 17:57:29 -01:00
|
|
|
|
2021-01-16 21:32:48 -01:00
|
|
|
my $disktype = read_env('DISKTYPE', "");
|
|
|
|
if ($disktype =~ /^$/) {
|
|
|
|
die "make_disc_trees.pl: DISKTYPE not set, aborting\n";
|
|
|
|
}
|
|
|
|
|
2008-09-10 22:16:18 +00:00
|
|
|
# MAXCDS is the hard limit on the MAXIMUM number of images to
|
|
|
|
# make. MAXJIGDOS and MAXISOS can only make this number smaller; we
|
|
|
|
# will use the higher of those 2 numbers as the last image to go to,
|
|
|
|
# if they're set
|
2012-06-05 13:56:51 +00:00
|
|
|
$maxcds = read_env('MAXCDS', 9999);
|
2008-09-10 22:16:18 +00:00
|
|
|
|
2012-06-05 13:56:51 +00:00
|
|
|
$maxisos = read_env('MAXISOS', 9999);
|
|
|
|
if ($maxisos =~ 'ALL' || $maxisos =~ 'all') {
|
|
|
|
$maxisos = 9999;
|
2008-09-10 22:16:18 +00:00
|
|
|
}
|
|
|
|
|
2012-06-05 13:56:51 +00:00
|
|
|
$maxjigdos = read_env('MAXJIGDOS', 9999);
|
|
|
|
if ($maxjigdos =~ 'ALL' || $maxjigdos =~ 'all') {
|
|
|
|
$maxjigdos = 9999;
|
2008-09-10 22:16:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if ($maxisos > $maxjigdos) {
|
|
|
|
$maxjigdos = $maxisos;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($maxjigdos > $maxisos) {
|
|
|
|
$maxisos = $maxjigdos;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($maxisos < $maxcds) {
|
|
|
|
$maxcds = $maxisos;
|
2006-12-10 14:26:19 -01:00
|
|
|
}
|
2007-04-19 23:38:29 +00:00
|
|
|
|
2012-06-05 13:56:51 +00:00
|
|
|
$extranonfree = read_env('EXTRANONFREE', 0);
|
2022-10-08 16:45:11 +00:00
|
|
|
@nonfree_components = split /\ /, read_env('NONFREE_COMPONENTS', 'non-free');
|
2012-06-05 13:56:51 +00:00
|
|
|
$nonfree = read_env('NONFREE', 0);
|
|
|
|
$contrib = read_env('CONTRIB', 0);
|
|
|
|
$use_local = read_env('LOCAL', 0);
|
2024-02-01 20:21:35 -01:00
|
|
|
|
|
|
|
my $dep11 = read_env('DEP11', 1);
|
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
my $list = "$tdir/list";
|
|
|
|
my $bdir = "$tdir/$codename";
|
|
|
|
my $log = "$bdir/make_disc_tree.log";
|
2008-01-05 22:53:27 -01:00
|
|
|
open(LOG, ">> $log") or die ("Can't open logfile $log for writing: $!\n");
|
2006-12-10 14:26:19 -01:00
|
|
|
|
2011-03-30 15:33:39 +00:00
|
|
|
# Print out the details of genisoimage/xorriso etc.
|
|
|
|
my $mkisofs_version = `$mkisofs -version`;
|
|
|
|
print "$mkisofs -version says:\n$mkisofs_version\n";
|
|
|
|
print LOG "$mkisofs -version says:\n$mkisofs_version\n";
|
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
foreach my $arch (split(' ', $archlist)) {
|
|
|
|
push(@arches, $arch);
|
|
|
|
if (! ($arch eq "source")) {
|
|
|
|
push(@arches_nosrc, $arch);
|
|
|
|
}
|
2008-03-20 08:43:22 -01:00
|
|
|
# Pre-cache all the package information that we need
|
|
|
|
load_packages_cache($arch);
|
2006-12-10 14:26:19 -01:00
|
|
|
}
|
|
|
|
|
2016-09-20 00:46:15 +00:00
|
|
|
load_all_descriptions(0);
|
|
|
|
if ($backports) {
|
|
|
|
load_all_descriptions(1);
|
2011-11-13 23:14:30 -01:00
|
|
|
}
|
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
my $disknum = 1;
|
|
|
|
my $max_done = 0;
|
|
|
|
my $size_check = "";
|
|
|
|
|
|
|
|
# Constants used for space calculations
|
|
|
|
my $MiB = 1048576;
|
|
|
|
my $MB = 1000000;
|
2011-02-23 12:41:33 -01:00
|
|
|
my $GB = 1000000000;
|
2006-12-10 14:26:19 -01:00
|
|
|
my $blocksize = 2048;
|
|
|
|
my ($maxdiskblocks, $diskdesc);
|
2006-12-19 00:58:41 -01:00
|
|
|
my $cddir;
|
2006-12-10 14:26:19 -01:00
|
|
|
|
|
|
|
my $size_swap_check;
|
2006-12-12 01:10:53 -01:00
|
|
|
my $hfs_extra = 0;
|
|
|
|
my $hfs_mult = 1;
|
2006-12-10 14:26:19 -01:00
|
|
|
|
2006-12-11 16:52:25 -01:00
|
|
|
# And count how many packages added since the last size check was done
|
|
|
|
# - the estimation code is getting very accurate, so let's reduce the
|
|
|
|
# number of times we fork mkisofs
|
|
|
|
my $count_since_last_check = 0;
|
|
|
|
my $size_check_period = 10;
|
2006-12-10 14:26:19 -01:00
|
|
|
|
|
|
|
my $pkgs_this_cd = 0;
|
|
|
|
my $pkgs_done = 0;
|
|
|
|
my $size = 0;
|
|
|
|
my $guess_size = 0;
|
|
|
|
my @overflowpkg;
|
2010-11-27 17:28:45 -01:00
|
|
|
my $mkisofs_check = "$mkisofs $mkisofs_base_opts -r -print-size -quiet";
|
2012-06-05 13:56:51 +00:00
|
|
|
my $debootstrap_script = read_env('DEBOOTSTRAP_SCRIPT', "");
|
2006-12-10 14:26:19 -01:00
|
|
|
|
2006-12-19 00:58:41 -01:00
|
|
|
chdir $bdir;
|
|
|
|
|
|
|
|
# Size calculation is slightly complicated:
|
|
|
|
#
|
|
|
|
# 1. At the start, ask mkisofs for a size so far (including all the
|
|
|
|
# stuff in the initial tree like docs and boot stuff
|
|
|
|
#
|
|
|
|
# 2. After that, add_packages will tell us the sizes of the files it
|
|
|
|
# has added. This will not include directories / metadata so is
|
|
|
|
# only a rough guess, but it's a _cheap_ guess
|
|
|
|
#
|
|
|
|
# 3. Once we get >90% of the max size we've been configured with,
|
|
|
|
# start asking mkisofs after each package addition. This will
|
|
|
|
# be slow, but we want to be exact at the end
|
|
|
|
|
|
|
|
$cddir = "$bdir/CD$disknum";
|
|
|
|
get_disc_size();
|
2006-12-20 10:02:59 -01:00
|
|
|
# Space calculation for extra HFS crap
|
|
|
|
if ($archlist =~ /m68k/ || $archlist =~ /powerpc/) {
|
2007-02-27 19:05:32 -01:00
|
|
|
$hfs_mult = 1.2;
|
2006-12-20 10:02:59 -01:00
|
|
|
$hfs_extra = int($maxdiskblocks * 8 / $blocksize);
|
2007-02-27 19:05:32 -01:00
|
|
|
print LOG "arches require HFS hybrid, multiplying sizes by $hfs_mult and marking $hfs_extra blocks for HFS use\n";
|
2006-12-20 10:02:59 -01:00
|
|
|
}
|
|
|
|
|
2011-02-23 12:51:17 -01:00
|
|
|
print "Starting to lay out packages into images:\n";
|
2006-12-19 00:58:41 -01:00
|
|
|
|
2021-07-25 14:57:45 +00:00
|
|
|
# Read in the list of packages that we're expecting to include
|
2010-05-27 00:52:25 +00:00
|
|
|
if (-e "$bdir/firmware-packages") {
|
|
|
|
open(FWLIST, "$bdir/firmware-packages") or die "Unable to read firmware-packages file!\n";
|
|
|
|
while (defined (my $pkg = <FWLIST>)) {
|
|
|
|
chomp $pkg;
|
|
|
|
$firmware_package{$pkg} = 1;
|
|
|
|
}
|
|
|
|
close(FWLIST);
|
|
|
|
}
|
|
|
|
|
2008-01-05 22:53:27 -01:00
|
|
|
open(INLIST, "$bdir/packages") or die "No packages file!\n";
|
2006-12-19 00:58:41 -01:00
|
|
|
while (defined (my $pkg = <INLIST>)) {
|
2008-12-28 12:48:10 -01:00
|
|
|
chomp $pkg;
|
|
|
|
$cddir = "$bdir/CD$disknum";
|
|
|
|
my $opt;
|
|
|
|
if (! -d $cddir) {
|
|
|
|
if ($disknum > $maxcds) {
|
|
|
|
print LOG "Disk $disknum is beyond the configured MAXCDS of $maxcds; exiting now...\n";
|
|
|
|
$max_done = 1;
|
2024-02-27 23:48:10 -01:00
|
|
|
$disknum--;
|
2008-12-28 12:48:10 -01:00
|
|
|
last;
|
|
|
|
}
|
|
|
|
print LOG "Starting new disc $disknum at " . `date` . "\n";
|
|
|
|
start_disc();
|
2011-02-23 14:24:06 -01:00
|
|
|
print LOG " Specified size: $diskdesc, $maxdiskblocks 2K-blocks maximum\n";
|
2008-12-28 12:48:10 -01:00
|
|
|
print " Placing packages into image $disknum\n";
|
|
|
|
if ( -e "$bdir/$disknum.mkisofs_opts" ) {
|
|
|
|
open(OPTS, "<$bdir/$disknum.mkisofs_opts");
|
|
|
|
while (defined($opt = <OPTS>)) {
|
|
|
|
chomp $opt;
|
|
|
|
$mkisofs_opts = "$mkisofs_opts $opt";
|
|
|
|
}
|
|
|
|
close(OPTS);
|
|
|
|
} else {
|
|
|
|
$mkisofs_opts = "";
|
|
|
|
}
|
2010-12-28 19:07:29 -01:00
|
|
|
if ($disknum <= $maxjigdos) {
|
2019-11-21 18:03:11 -01:00
|
|
|
# Set things to /dev/null - we're only doing a
|
|
|
|
# sizing run here
|
2010-12-28 19:07:29 -01:00
|
|
|
$mkisofs_opts = "$mkisofs_opts -o /dev/null";
|
|
|
|
}
|
2008-12-28 12:48:10 -01:00
|
|
|
if ( -e "$bdir/$disknum.mkisofs_dirs" ) {
|
|
|
|
open(OPTS, "<$bdir/$disknum.mkisofs_dirs");
|
|
|
|
while (defined($opt = <OPTS>)) {
|
|
|
|
chomp $opt;
|
|
|
|
$mkisofs_dirs = "$mkisofs_dirs $opt";
|
|
|
|
}
|
|
|
|
close(OPTS);
|
|
|
|
} else {
|
|
|
|
$mkisofs_dirs = "";
|
|
|
|
}
|
|
|
|
|
|
|
|
$size_check = "$mkisofs_check $mkisofs_opts $mkisofs_dirs";
|
|
|
|
$size=`$size_check $cddir`;
|
|
|
|
chomp $size;
|
|
|
|
$size += $hfs_extra;
|
|
|
|
print LOG "CD $disknum: size is $size before starting to add packages\n";
|
2006-12-29 04:56:43 -01:00
|
|
|
|
|
|
|
$pkgs_this_cd = 0;
|
|
|
|
|
|
|
|
# If we have some unexcludes for this disc and have already
|
|
|
|
# previously excluded some packages, check now if the two
|
|
|
|
# lists intersect and we should re-include some packages
|
|
|
|
if (scalar @unexclude_packages && scalar @excluded_package_list) {
|
|
|
|
foreach my $reinclude_pkg (@excluded_package_list) {
|
2009-12-29 00:27:25 -01:00
|
|
|
my ($arch, $component, $pkgname, $pkgsize) = split /:/, $reinclude_pkg;
|
2006-12-29 04:56:43 -01:00
|
|
|
foreach my $entry (@unexclude_packages) {
|
2007-03-05 15:09:46 -01:00
|
|
|
if (($pkgname =~ /^\Q$entry\E$/m)) {
|
2006-12-29 04:56:43 -01:00
|
|
|
print LOG "Re-including $reinclude_pkg due to match on \"\^$entry\$\"\n";
|
|
|
|
$guess_size = int($hfs_mult * add_packages($cddir, $reinclude_pkg));
|
2024-04-23 01:33:17 +00:00
|
|
|
if ($guess_size > $maxdiskblocks) {
|
|
|
|
print LOG "$reinclude_pkg will never fit - it is $guess_size blocks against a disk size of $maxdiskblocks. ABORT\n";
|
|
|
|
die "$reinclude_pkg will never fit - it is $guess_size blocks against a disk size of $maxdiskblocks. ABORT\n";
|
|
|
|
}
|
2006-12-29 04:56:43 -01:00
|
|
|
$size += $guess_size;
|
|
|
|
print LOG "CD $disknum: GUESS_TOTAL is $size after adding $reinclude_pkg\n";
|
|
|
|
$pkgs_this_cd++;
|
2008-03-20 08:43:22 -01:00
|
|
|
$pkgs_done++;
|
2011-03-28 16:29:16 +00:00
|
|
|
push (@pkgs_added, $entry);
|
2006-12-29 04:56:43 -01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2008-12-28 12:48:10 -01:00
|
|
|
while (scalar @overflowlist) {
|
|
|
|
my $overflowpkg = pop @overflowlist;
|
|
|
|
print LOG "Adding a package that failed on the last disc: $overflowpkg\n";
|
|
|
|
$guess_size = int($hfs_mult * add_packages($cddir, $overflowpkg));
|
2024-04-23 01:33:17 +00:00
|
|
|
if ($guess_size > $maxdiskblocks) {
|
|
|
|
print LOG "$overflowpkg will never fit - it is $guess_size blocks against a disk size of $maxdiskblocks. ABORT\n";
|
|
|
|
die "$overflowpkg will never fit - it is $guess_size blocks against a disk size of $maxdiskblocks. ABORT\n";
|
|
|
|
}
|
2008-12-28 12:48:10 -01:00
|
|
|
$size += $guess_size;
|
|
|
|
print LOG "CD $disknum: GUESS_TOTAL is $size after adding $overflowpkg\n";
|
|
|
|
$pkgs_this_cd++;
|
|
|
|
$pkgs_done++;
|
2011-03-28 12:36:58 +00:00
|
|
|
push (@pkgs_added, $overflowpkg);
|
2008-12-28 12:48:10 -01:00
|
|
|
}
|
2007-04-19 23:38:29 +00:00
|
|
|
} # end of creating new CD dir
|
2006-12-19 00:58:41 -01:00
|
|
|
|
2006-12-29 04:56:43 -01:00
|
|
|
if (should_exclude_package($pkg)) {
|
|
|
|
push(@excluded_package_list, $pkg);
|
2008-12-28 12:48:10 -01:00
|
|
|
} elsif (should_start_extra_nonfree($pkg)) {
|
2022-10-08 16:45:11 +00:00
|
|
|
print LOG "Starting on extra non-free image(s)\n";
|
2008-12-28 12:48:10 -01:00
|
|
|
finish_disc($cddir, "");
|
|
|
|
# And reset, to start the next disc
|
|
|
|
$size = 0;
|
|
|
|
$disknum++;
|
|
|
|
undef(@pkgs_added);
|
|
|
|
# Put this package first on the next disc
|
|
|
|
push (@overflowlist, $pkg);
|
2006-12-29 04:56:43 -01:00
|
|
|
} else {
|
|
|
|
$guess_size = int($hfs_mult * add_packages($cddir, $pkg));
|
|
|
|
$size += $guess_size;
|
2024-04-23 01:33:17 +00:00
|
|
|
if ($guess_size > $maxdiskblocks) {
|
|
|
|
print LOG "$pkg will never fit - it is $guess_size blocks against a disk size of $maxdiskblocks. ABORT\n";
|
|
|
|
die "$pkg will never fit - it is $guess_size blocks against a disk size of $maxdiskblocks. ABORT\n";
|
|
|
|
}
|
2008-12-28 12:48:10 -01:00
|
|
|
push (@pkgs_added, $pkg);
|
2006-12-29 04:56:43 -01:00
|
|
|
print LOG "CD $disknum: GUESS_TOTAL is $size after adding $pkg\n";
|
|
|
|
if (($size > $maxdiskblocks) ||
|
|
|
|
(($size > $size_swap_check) &&
|
|
|
|
($count_since_last_check > $size_check_period))) {
|
|
|
|
$count_since_last_check = 0;
|
2011-11-13 23:14:30 -01:00
|
|
|
# Recompress files as needed before the size check
|
|
|
|
find (\&recompress, "$cddir/dists");
|
2010-11-27 17:28:45 -01:00
|
|
|
print LOG "Running $size_check $cddir\n";
|
2006-12-29 04:56:43 -01:00
|
|
|
$size = `$size_check $cddir`;
|
|
|
|
chomp $size;
|
|
|
|
print LOG "CD $disknum: Real current size is $size blocks after adding $pkg\n";
|
|
|
|
}
|
|
|
|
if ($size > $maxdiskblocks) {
|
2008-12-28 12:48:10 -01:00
|
|
|
while ($size > $maxdiskblocks) {
|
|
|
|
$pkg = pop(@pkgs_added);
|
|
|
|
print LOG "CD $disknum over-full ($size > $maxdiskblocks). Rollback!\n";
|
|
|
|
$guess_size = int($hfs_mult * add_packages("--rollback", $cddir, $pkg));
|
2011-11-13 23:14:30 -01:00
|
|
|
# Recompress files as needed before the size check
|
|
|
|
find (\&recompress, "$cddir/dists");
|
2008-12-28 12:48:10 -01:00
|
|
|
$size=`$size_check $cddir`;
|
|
|
|
chomp $size;
|
|
|
|
print LOG "CD $disknum: Real current size is $size blocks after rolling back $pkg\n";
|
|
|
|
# Put this package first on the next disc
|
|
|
|
push (@overflowlist, $pkg);
|
|
|
|
}
|
2011-02-23 16:30:27 -01:00
|
|
|
# Special-case for source-only discs where we don't care
|
|
|
|
# about the ordering. If we're doing a source-only build
|
|
|
|
# and we've overflowed, allow us to carry on down the list
|
|
|
|
# for a while to fill more space. Stop when we've skipped
|
|
|
|
# 5 packages (arbitrary choice of number!) #613751
|
|
|
|
if (!($archlist eq "source") or (scalar @overflowlist >= 5)) {
|
|
|
|
finish_disc($cddir, "");
|
|
|
|
# And reset, to start the next disc
|
|
|
|
$size = 0;
|
|
|
|
$disknum++;
|
|
|
|
undef(@pkgs_added);
|
|
|
|
} else {
|
|
|
|
print LOG "SOURCE DISC: continuing on to see if anything else will fit, " . scalar @overflowlist . " packages on the overflow list at this point\n";
|
|
|
|
}
|
2006-12-29 04:56:43 -01:00
|
|
|
} else {
|
|
|
|
$pkgs_this_cd++;
|
|
|
|
$pkgs_done++;
|
|
|
|
$count_since_last_check++;
|
|
|
|
}
|
|
|
|
}
|
2006-12-19 00:58:41 -01:00
|
|
|
}
|
|
|
|
close(INLIST);
|
|
|
|
|
|
|
|
if ($max_done == 0) {
|
|
|
|
finish_disc($cddir, " (not)");
|
|
|
|
}
|
|
|
|
|
|
|
|
print LOG "Finished: $pkgs_done packages placed\n";
|
2024-02-22 23:57:00 -01:00
|
|
|
|
|
|
|
last_minute_update($disknum);
|
|
|
|
|
2006-12-19 00:58:41 -01:00
|
|
|
print "Finished: $pkgs_done packages placed\n";
|
|
|
|
system("date >> $log");
|
|
|
|
|
|
|
|
close(LOG);
|
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
#############################################
|
|
|
|
#
|
|
|
|
# Local helper functions
|
|
|
|
#
|
|
|
|
#############################################
|
2024-02-22 23:57:00 -01:00
|
|
|
|
|
|
|
# Only once we know how many CDs we're making can we fill in the
|
|
|
|
# TOTALNUM number in README.{html,txt} (and therefore also update the
|
|
|
|
# md5sum.txt entries for those files)
|
|
|
|
sub last_minute_update {
|
|
|
|
my $total_disks = shift;
|
|
|
|
|
|
|
|
for (my $disknum = 1; $disknum <= $total_disks; $disknum++) {
|
|
|
|
my $cddir = "$bdir/CD$disknum";
|
|
|
|
|
|
|
|
chdir $cddir;
|
|
|
|
|
|
|
|
for my $upd ("README.html", "README.txt") {
|
|
|
|
my $buf = "";
|
|
|
|
print LOG " Last-minute update of $cddir/$upd\n";
|
|
|
|
open(README, "< $upd") or
|
|
|
|
die "Failed to open $upd for reading: $!\n";
|
|
|
|
while (defined (my $line = <README>)) {
|
|
|
|
$line =~ s/TOTALNUM/$total_disks/g;
|
|
|
|
$buf .= $line
|
|
|
|
}
|
|
|
|
close(README);
|
|
|
|
open(README, "> $upd") or
|
|
|
|
die "Failed to open $upd for writing: $!\n";
|
|
|
|
print README $buf;
|
|
|
|
close(README);
|
|
|
|
}
|
|
|
|
|
|
|
|
# Now update the md5sums.txt for the README files
|
|
|
|
open(MD5LIST, ">> md5sum.txt") or
|
|
|
|
die "Failed to open md5sum.txt file: $!\n";
|
|
|
|
foreach my $file (glob "./README.*") {
|
|
|
|
my ($md5, $size) = checksum_file($file, "md5");
|
|
|
|
printf MD5LIST "%s %s\n", $md5, $file;
|
|
|
|
}
|
|
|
|
close(MD5LIST);
|
|
|
|
chdir $bdir;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-11-13 23:14:30 -01:00
|
|
|
# Load up information about all the packages
|
2008-03-20 08:43:22 -01:00
|
|
|
sub load_packages_cache {
|
|
|
|
my $arch = shift;
|
|
|
|
my @pkglist;
|
2016-09-20 00:46:15 +00:00
|
|
|
my @tmplist;
|
2008-03-20 08:43:22 -01:00
|
|
|
my ($p);
|
2008-11-26 22:42:05 -01:00
|
|
|
my $num_pkgs = 0;
|
2008-03-20 08:43:22 -01:00
|
|
|
|
|
|
|
$ENV{'LC_ALL'} = 'C'; # Required since apt is now translated
|
|
|
|
$ENV{'ARCH'} = $arch;
|
|
|
|
|
|
|
|
open(INLIST, "$bdir/packages.$arch")
|
|
|
|
or die "No packages file $bdir/packages.$arch for $arch!\n";
|
|
|
|
|
|
|
|
while (defined (my $pkg = <INLIST>)) {
|
|
|
|
chomp $pkg;
|
2009-12-29 00:27:25 -01:00
|
|
|
my ($junk, $component, $pkgname, $pkgsize) = split /:/, $pkg;
|
2016-09-20 00:46:15 +00:00
|
|
|
push @tmplist, $pkgname;
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
|
|
|
close INLIST;
|
|
|
|
|
2008-11-26 22:42:05 -01:00
|
|
|
print "Reading in package information for $arch:\n";
|
2011-11-13 23:14:30 -01:00
|
|
|
print LOG "Reading in package information for $arch:\n";
|
2008-11-26 22:37:18 -01:00
|
|
|
|
2008-03-20 08:43:22 -01:00
|
|
|
$/ = ''; # Browse by paragraph
|
2016-09-20 00:46:15 +00:00
|
|
|
@pkglist = (grep (!/\/$codename-backports$/, @tmplist));
|
2008-03-20 08:43:22 -01:00
|
|
|
while (@pkglist) {
|
|
|
|
my (@pkg) = splice(@pkglist,0,200);
|
|
|
|
if ($arch eq "source") {
|
|
|
|
open (LIST, "$basedir/tools/apt-selection cache showsrc @pkg |")
|
|
|
|
|| die "Can't fork : $!\n";
|
|
|
|
} else {
|
|
|
|
open (LIST, "$basedir/tools/apt-selection cache show @pkg |")
|
|
|
|
|| die "Can't fork : $!\n";
|
|
|
|
}
|
|
|
|
while (defined($_ = <LIST>)) {
|
|
|
|
m/^Package: (\S+)/m and $p = $1;
|
2013-01-22 22:32:54 -01:00
|
|
|
push @{$pkginfo{$arch}{$p}}, $_;
|
2008-11-26 22:42:05 -01:00
|
|
|
$num_pkgs++;
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
2008-11-26 22:42:05 -01:00
|
|
|
close LIST;
|
|
|
|
print LOG "load_packages_cache: Read details of $num_pkgs packages for $arch\n";
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
2008-11-26 22:42:05 -01:00
|
|
|
print " Done: Read details of $num_pkgs packages for $arch\n";
|
2016-09-20 00:46:15 +00:00
|
|
|
if ($backports) {
|
|
|
|
$num_pkgs = 0;
|
|
|
|
@pkglist = (grep (/\/$codename-backports$/, @tmplist));
|
|
|
|
while (@pkglist) {
|
|
|
|
my (@pkg) = splice(@pkglist,0,200);
|
|
|
|
if ($arch eq "source") {
|
|
|
|
open (LIST, "USE_BP=1 $basedir/tools/apt-selection cache showsrc @pkg |")
|
|
|
|
|| die "Can't fork : $!\n";
|
|
|
|
} else {
|
|
|
|
open (LIST, "USE_BP=1 $basedir/tools/apt-selection cache show @pkg |")
|
|
|
|
|| die "Can't fork : $!\n";
|
|
|
|
}
|
|
|
|
while (defined($_ = <LIST>)) {
|
|
|
|
m/^Package: (\S+)/m and $p = $1;
|
|
|
|
push @{$pkginfo{$arch}{"$p/$codename-backports"}}, $_;
|
|
|
|
$num_pkgs++;
|
|
|
|
}
|
|
|
|
close LIST;
|
|
|
|
print LOG "load_packages_cache: Read details of $num_pkgs packages for $arch backports\n";
|
|
|
|
}
|
|
|
|
print " Done: Read details of $num_pkgs packages for $arch backports\n";
|
|
|
|
}
|
|
|
|
$/ = $old_split; # Browse by line again
|
|
|
|
}
|
|
|
|
|
|
|
|
sub load_all_descriptions {
|
|
|
|
my $use_backports = shift;
|
|
|
|
if (! ($archlist eq "source")) {
|
|
|
|
load_descriptions("main", $use_backports);
|
|
|
|
if ($contrib) {
|
|
|
|
load_descriptions("contrib", $use_backports);
|
|
|
|
}
|
|
|
|
if ($nonfree || $extranonfree) {
|
2022-10-08 16:45:11 +00:00
|
|
|
foreach my $component (@nonfree_components) {
|
|
|
|
load_descriptions($component, $use_backports);
|
|
|
|
}
|
2016-09-20 00:46:15 +00:00
|
|
|
}
|
|
|
|
if ($use_local) {
|
|
|
|
load_descriptions("local", $use_backports);
|
|
|
|
}
|
|
|
|
}
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
|
|
|
|
2011-11-13 23:14:30 -01:00
|
|
|
# Load all the translated descriptions we can find
|
|
|
|
sub load_descriptions {
|
|
|
|
my $suite = shift;
|
2016-09-20 00:46:15 +00:00
|
|
|
my $use_backports = shift;
|
2011-11-13 23:14:30 -01:00
|
|
|
my $lang;
|
|
|
|
my $dh;
|
|
|
|
my ($p);
|
|
|
|
my $num_total = 0;
|
|
|
|
my $num_files = 0;
|
|
|
|
my $dir = "$mirror/dists/$codename/$suite/i18n";
|
2016-09-20 00:46:15 +00:00
|
|
|
if ($use_backports) {
|
|
|
|
$dir = "$mirror/dists/$codename-backports/$suite/i18n";
|
|
|
|
}
|
2011-11-13 23:14:30 -01:00
|
|
|
if ($suite =~ /local/) {
|
|
|
|
$dir = "$localdebs/dists/$codename/$suite/i18n";
|
2016-09-20 00:46:15 +00:00
|
|
|
if ($use_backports) {
|
|
|
|
$dir = "$localdebs/dists/$codename-backports/$suite/i18n";
|
|
|
|
}
|
2011-11-13 23:14:30 -01:00
|
|
|
}
|
|
|
|
my @files;
|
|
|
|
|
2016-09-20 00:46:15 +00:00
|
|
|
if ($use_backports) {
|
|
|
|
$suite = "$suite backports";
|
|
|
|
}
|
|
|
|
|
2011-11-13 23:14:30 -01:00
|
|
|
if (-d $dir) {
|
|
|
|
print "Reading in translated package descriptions for $suite:\n";
|
|
|
|
print LOG "Reading in translated package descriptions for $suite:\n";
|
|
|
|
opendir($dh, $dir) || die "can't opendir $dir: $!\n";
|
|
|
|
@files = readdir($dh);
|
|
|
|
$/ = ''; # Browse by paragraph
|
|
|
|
foreach my $file (@files) {
|
|
|
|
if ($file =~ /Translation-(.*).bz2/) {
|
|
|
|
my $num_descs = 0;
|
|
|
|
$lang = $1;
|
|
|
|
open(BZ, "bzip2 -cd $dir/$file |") ||
|
|
|
|
die "can't open description file $dir/$file for reading: $!\n";
|
|
|
|
$num_files++;
|
|
|
|
print LOG " Parsing $file\n";
|
|
|
|
while (defined($_ = <BZ>)) {
|
|
|
|
m/^Package: (\S+)/m and $p = $1;
|
2016-09-20 00:46:15 +00:00
|
|
|
if ($use_backports) {
|
|
|
|
$p = "$p/$codename-backports";
|
|
|
|
}
|
2011-11-13 23:14:30 -01:00
|
|
|
$descriptions{"$lang"}{$p}{"data"} = $_;
|
|
|
|
$descriptions{"$lang"}{$p}{"used"} = 0;
|
|
|
|
$num_descs++;
|
|
|
|
$num_total++;
|
|
|
|
}
|
|
|
|
close(BZ);
|
|
|
|
print LOG " $num_descs descriptions\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$/ = $old_split; # Browse by line again
|
|
|
|
print " Done: read $num_total entries for $num_files languages\n";
|
|
|
|
print LOG " Done: read $num_total entries for $num_files languages\n";
|
|
|
|
} else {
|
|
|
|
print "WARNING: no translated descriptions found for $codename/$suite\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-04-19 23:38:29 +00:00
|
|
|
sub should_start_extra_nonfree {
|
|
|
|
my $pkg = shift;
|
2009-12-29 00:27:25 -01:00
|
|
|
my ($arch, $component, $pkgname, $pkgsize) = split /:/, $pkg;
|
2007-04-19 23:38:29 +00:00
|
|
|
|
2022-10-08 16:45:11 +00:00
|
|
|
if ($extranonfree) {
|
|
|
|
foreach my $nf_comp (@nonfree_components) {
|
|
|
|
if ($component eq $nf_comp) {
|
2007-04-19 23:38:29 +00:00
|
|
|
$extranonfree = 0; # Flag that we don't need to start new next time!
|
|
|
|
return 1;
|
2022-10-08 16:45:11 +00:00
|
|
|
}
|
2007-04-19 23:38:29 +00:00
|
|
|
}
|
2022-10-08 16:45:11 +00:00
|
|
|
}
|
|
|
|
return 0;
|
2007-04-19 23:38:29 +00:00
|
|
|
}
|
|
|
|
|
2006-12-29 04:56:43 -01:00
|
|
|
sub should_exclude_package {
|
|
|
|
my $pkg = shift;
|
2009-12-29 00:27:25 -01:00
|
|
|
my ($arch, $component, $pkgname, $pkgsize) = split /:/, $pkg;
|
2006-12-29 04:56:43 -01:00
|
|
|
my $should_exclude = 0;
|
|
|
|
|
|
|
|
foreach my $entry (@exclude_packages) {
|
2007-09-02 17:24:04 +00:00
|
|
|
if (($pkgname =~ /^\Q$entry\E$/m)) {
|
2006-12-29 04:56:43 -01:00
|
|
|
print LOG "Excluding $pkg due to match on \"\^$entry\$\"\n";
|
|
|
|
$should_exclude++;
|
|
|
|
}
|
2007-09-02 17:24:04 +00:00
|
|
|
}
|
2006-12-29 04:56:43 -01:00
|
|
|
|
|
|
|
if ($should_exclude) {
|
|
|
|
# Double-check that we're not being asked to include *and*
|
|
|
|
# exclude the package at the same time. If so, complain and
|
|
|
|
# bail out
|
|
|
|
foreach my $entry (@unexclude_packages) {
|
2007-03-05 15:09:46 -01:00
|
|
|
if (($pkgname =~ /^\Q$entry\E$/m)) {
|
2006-12-29 04:56:43 -01:00
|
|
|
print LOG "But ALSO asked to unexclude $pkg due to match on \"\^$entry\$\"\n";
|
|
|
|
print LOG "Make your mind up! Bailing out...\n";
|
|
|
|
die "Incompatible exclude/unexclude entries for $pkg...\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
sub check_base_installable {
|
|
|
|
my $arch = shift;
|
|
|
|
my $cddir = shift;
|
|
|
|
my $ok = 0;
|
|
|
|
my (%on_disc, %exclude);
|
|
|
|
my $packages_file = "$cddir/dists/$codename/main/binary-$arch/Packages";
|
|
|
|
my $p;
|
2010-02-21 21:38:03 -01:00
|
|
|
my $db_error = 0;
|
|
|
|
my $error_string = "";
|
2006-12-10 14:26:19 -01:00
|
|
|
|
|
|
|
open (PLIST, $packages_file)
|
|
|
|
|| die "Can't open Packages file $packages_file : $!\n";
|
|
|
|
while (defined($p = <PLIST>)) {
|
|
|
|
chomp $p;
|
|
|
|
$p =~ m/^Package: (\S+)/ and $on_disc{$1} = $1;
|
|
|
|
}
|
|
|
|
close PLIST;
|
|
|
|
|
2007-09-02 17:24:04 +00:00
|
|
|
$packages_file = "$cddir/dists/$codename/local/binary-$arch/Packages";
|
|
|
|
if (open (PLIST, $packages_file)) {
|
|
|
|
while (defined($p = <PLIST>)) {
|
|
|
|
chomp $p;
|
|
|
|
$p =~ m/^Package: (\S+)/ and $on_disc{$1} = $1;
|
|
|
|
}
|
|
|
|
close PLIST;
|
|
|
|
}
|
2007-08-26 15:19:00 +00:00
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
if (defined($ENV{'BASE_EXCLUDE'})) {
|
|
|
|
open (ELIST, $ENV{'BASE_EXCLUDE'})
|
|
|
|
|| die "Can't open base_exclude file $ENV{'BASE_EXCLUDE'} : $!\n";
|
|
|
|
while (defined($p = <ELIST>)) {
|
|
|
|
chomp $p;
|
|
|
|
$exclude{$p} = $p;
|
|
|
|
}
|
|
|
|
close ELIST;
|
|
|
|
}
|
|
|
|
|
2024-01-08 09:52:25 -01:00
|
|
|
open (DLIST, "debootstrap --arch $arch --print-debs $codename $tdir/debootstrap_tmp file:$mirror $debootstrap_script 2>/dev/null | grep -v ^I: | tr ' ' '\n' |")
|
2006-12-10 14:26:19 -01:00
|
|
|
|| die "Can't fork debootstrap : $!\n";
|
|
|
|
while (defined($p = <DLIST>)) {
|
2010-02-21 21:38:03 -01:00
|
|
|
if ($p =~ m/^E:/) {
|
|
|
|
$db_error = 1;
|
|
|
|
}
|
2006-12-10 14:26:19 -01:00
|
|
|
chomp $p;
|
2010-02-21 21:38:03 -01:00
|
|
|
if ($db_error) {
|
|
|
|
$error_string = "$error_string $p";
|
|
|
|
} else {
|
|
|
|
if (length $p > 1) {
|
|
|
|
if (!defined($on_disc{$p})) {
|
|
|
|
if (defined($exclude{$p})) {
|
|
|
|
print LOG "Missing debootstrap-required $p but included in $ENV{'BASE_EXCLUDE'}\n";
|
|
|
|
} else {
|
|
|
|
$ok++;
|
|
|
|
print LOG "Missing debootstrap-required $p\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
close DLIST;
|
|
|
|
if ($db_error) {
|
|
|
|
print LOG "Debootstrap reported error: $error_string\n";
|
|
|
|
die "Debootstrap reported error: $error_string\n";
|
|
|
|
}
|
2019-01-23 21:50:36 -01:00
|
|
|
remove_tree("$tdir/debootstrap_tmp");
|
2006-12-10 14:26:19 -01:00
|
|
|
return $ok;
|
|
|
|
}
|
|
|
|
|
2008-01-05 22:50:48 -01:00
|
|
|
# If missing, create an empty local Packages file for an architecture.
|
|
|
|
# Only create an uncompressed Packages file; the call to recompress will
|
|
|
|
# create the compressed version.
|
|
|
|
sub add_missing_Packages {
|
|
|
|
my ($filename);
|
|
|
|
|
|
|
|
$filename = $File::Find::name;
|
|
|
|
|
|
|
|
if ((-d "$_") && ($filename =~ m/\/main\/binary-[^\/]*$/)) {
|
2021-02-03 15:36:13 -01:00
|
|
|
if (-f "$_/Packages") {
|
|
|
|
if (! -d "../local/$_/") {
|
|
|
|
mkdir "../local/$_/" || die "Error creating directory local/$_: $!\n";
|
|
|
|
}
|
|
|
|
if ( ! -f "../local/$_/Packages" ) {
|
|
|
|
open(LPFILE, ">../local/$_/Packages") or die "Error creating local/$_/Packages: $!\n";
|
|
|
|
close LPFILE;
|
|
|
|
print " Created empty Packages file for local/$_\n";
|
|
|
|
}
|
2008-01-05 22:50:48 -01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-03-04 12:11:16 -01:00
|
|
|
sub checksum_file {
|
2006-12-10 14:26:19 -01:00
|
|
|
my $filename = shift;
|
2011-03-04 12:11:16 -01:00
|
|
|
my $alg = shift;
|
|
|
|
my ($checksum, $st);
|
|
|
|
|
|
|
|
open(CHECKFILE, $filename) or die "Can't open '$filename': $!\n";
|
|
|
|
binmode(CHECKFILE);
|
|
|
|
if ($alg eq "md5") {
|
|
|
|
$checksum = Digest::MD5->new->addfile(*CHECKFILE)->hexdigest;
|
|
|
|
} elsif ($alg =~ /^sha\d+$/) {
|
|
|
|
$checksum = Digest::SHA->new($alg)->addfile(*CHECKFILE)->hexdigest;
|
|
|
|
} else {
|
|
|
|
die "checksum_file: unknown alorithm $alg!\n";
|
|
|
|
}
|
|
|
|
close(CHECKFILE);
|
2006-12-10 14:26:19 -01:00
|
|
|
$st = stat($filename) || die "Stat error on '$filename': $!\n";
|
2011-03-04 12:11:16 -01:00
|
|
|
return ($checksum, $st->size);
|
2006-12-10 14:26:19 -01:00
|
|
|
}
|
|
|
|
|
2014-10-30 01:12:12 -01:00
|
|
|
sub remove_uncompressed {
|
|
|
|
my ($filename);
|
|
|
|
|
|
|
|
$filename = $File::Find::name;
|
|
|
|
|
|
|
|
if ($filename =~ m/\/.*\/(Packages|Sources)$/o ||
|
|
|
|
$filename =~ m/\/.*\/i18n\/(Translation-[_a-zA-Z]+)$/o)
|
|
|
|
{
|
|
|
|
unlink($_) or die "Failed to remove $_: $!\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-08-26 15:07:52 +00:00
|
|
|
sub recompress {
|
2011-11-13 23:14:30 -01:00
|
|
|
# Recompress various files
|
2007-08-26 15:07:52 +00:00
|
|
|
my ($filename);
|
2006-12-10 14:26:19 -01:00
|
|
|
|
|
|
|
$filename = $File::Find::name;
|
2006-12-10 19:00:37 -01:00
|
|
|
|
2014-10-30 01:12:12 -01:00
|
|
|
# Packages and Sources files; workaround for bug #402482
|
|
|
|
if ($filename =~ m/\/.*\/(Packages|Sources)$/o) {
|
2019-07-06 10:19:02 +00:00
|
|
|
system("rm -f $_.gz");
|
2023-01-23 17:30:35 -01:00
|
|
|
system("pigz -9nmc < $_ >$_.gz");
|
2006-12-10 19:00:37 -01:00
|
|
|
}
|
2014-10-30 01:12:12 -01:00
|
|
|
# Translation files need to be compressed in .gz format on CD?
|
2011-11-13 23:14:30 -01:00
|
|
|
if ($filename =~ m/\/.*\/i18n\/(Translation.*)$/o &&
|
2014-10-30 01:12:12 -01:00
|
|
|
! ($filename =~ m/\/.*\/i18n\/(Translation.*gz)$/o)) {
|
2019-07-06 10:19:02 +00:00
|
|
|
system("rm -f $_.gz");
|
2023-01-23 17:30:35 -01:00
|
|
|
system("pigz -9nmc < $_ >$_.gz");
|
2020-12-15 10:44:56 -01:00
|
|
|
system("rm -f $_");
|
2011-11-13 23:14:30 -01:00
|
|
|
}
|
2007-08-26 15:07:52 +00:00
|
|
|
}
|
|
|
|
|
2011-03-04 12:19:34 -01:00
|
|
|
sub find_and_checksum_files_for_release {
|
2011-03-04 12:11:16 -01:00
|
|
|
my ($checksum, $size, $filename);
|
2007-08-26 15:07:52 +00:00
|
|
|
|
|
|
|
$filename = $File::Find::name;
|
2006-12-10 19:00:37 -01:00
|
|
|
|
2013-06-17 14:35:45 +00:00
|
|
|
if ($filename =~ m/\/.*\/(Packages|Sources|Release|Translation)/o) {
|
2006-12-10 14:26:19 -01:00
|
|
|
$filename =~ s/^\.\///g;
|
2011-03-04 12:11:16 -01:00
|
|
|
($checksum, $size) = checksum_file($_, $current_checksum_type);
|
|
|
|
printf RELEASE " %s %8d %s\n", $checksum, $size, $filename;
|
2006-12-10 14:26:19 -01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-03-04 12:11:16 -01:00
|
|
|
sub checksum_files_for_release {
|
|
|
|
# ICK: no way to pass arguments to the
|
2011-03-04 12:19:34 -01:00
|
|
|
# find_and_checksum_files_for_release() function that I can see,
|
|
|
|
# so using a global here...
|
2011-03-04 12:11:16 -01:00
|
|
|
print RELEASE "MD5Sum:\n";
|
2011-03-04 12:20:56 -01:00
|
|
|
$current_checksum_type = "md5";
|
2011-03-04 12:19:34 -01:00
|
|
|
find (\&find_and_checksum_files_for_release, ".");
|
2011-03-04 12:11:16 -01:00
|
|
|
print RELEASE "SHA1:\n";
|
2011-03-04 12:20:56 -01:00
|
|
|
$current_checksum_type = "sha1";
|
2011-03-04 12:19:34 -01:00
|
|
|
find (\&find_and_checksum_files_for_release, ".");
|
2011-03-04 12:11:16 -01:00
|
|
|
print RELEASE "SHA256:\n";
|
2011-03-04 12:20:56 -01:00
|
|
|
$current_checksum_type = "sha256";
|
2011-03-04 12:19:34 -01:00
|
|
|
find (\&find_and_checksum_files_for_release, ".");
|
2011-03-04 12:11:16 -01:00
|
|
|
print RELEASE "SHA512:\n";
|
2011-03-04 12:20:56 -01:00
|
|
|
$current_checksum_type = "sha512";
|
2011-03-04 12:19:34 -01:00
|
|
|
find (\&find_and_checksum_files_for_release, ".");
|
2011-03-04 12:11:16 -01:00
|
|
|
}
|
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
sub md5_files_for_md5sum {
|
|
|
|
my ($md5, $size, $filename);
|
|
|
|
|
|
|
|
$filename = $File::Find::name;
|
|
|
|
if (-f $_) {
|
2011-03-04 12:11:16 -01:00
|
|
|
($md5, $size) = checksum_file($_, "md5");
|
2006-12-10 14:26:19 -01:00
|
|
|
printf MD5LIST "%s %s\n", $md5, $filename;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2006-12-19 00:58:41 -01:00
|
|
|
sub get_disc_size {
|
|
|
|
my $hook;
|
|
|
|
my $error = 0;
|
|
|
|
my $reserved = 0;
|
2011-02-22 20:52:28 -01:00
|
|
|
my $chosen_disk = $disktype;
|
2011-02-23 12:41:33 -01:00
|
|
|
my $disk_size_hack = "";
|
2006-12-19 00:58:41 -01:00
|
|
|
|
|
|
|
if (defined($ENV{'RESERVED_BLOCKS_HOOK'})) {
|
|
|
|
$hook = $ENV{'RESERVED_BLOCKS_HOOK'};
|
|
|
|
print " Calling reserved_blocks hook: $hook\n";
|
|
|
|
$reserved = `$hook $tdir $mirror $disknum $cddir \"$archlist\"`;
|
|
|
|
chomp $reserved;
|
|
|
|
if ($reserved eq "") {
|
|
|
|
$reserved = 0;
|
|
|
|
}
|
|
|
|
print " Reserving $reserved blocks on CD $disknum\n";
|
|
|
|
}
|
|
|
|
|
2011-02-23 12:41:33 -01:00
|
|
|
# See if we've been asked to switch sizes for the whole set
|
2012-06-05 13:56:51 +00:00
|
|
|
$disk_size_hack = read_env('FORCE_CD_SIZE', "");
|
2011-02-22 20:52:28 -01:00
|
|
|
if ($disk_size_hack) {
|
2011-02-23 12:41:33 -01:00
|
|
|
print LOG "HACK HACK HACK: FORCE_CD_SIZE found:\n";
|
|
|
|
print LOG " forcing use of a $disk_size_hack disk instead of $chosen_disk\n";
|
|
|
|
$chosen_disk = $disk_size_hack;
|
|
|
|
}
|
|
|
|
|
|
|
|
# If we're asked to do a specific size for *this* disknum, over-ride again
|
2012-06-05 13:56:51 +00:00
|
|
|
$disk_size_hack = read_env("FORCE_CD_SIZE$disknum", "");
|
2011-02-23 12:41:33 -01:00
|
|
|
if ($disk_size_hack) {
|
|
|
|
print LOG "HACK HACK HACK: FORCE_CD_SIZE$disknum found:\n";
|
|
|
|
print LOG " forcing use of a $disk_size_hack disk instead of $chosen_disk\n";
|
2011-02-22 20:52:28 -01:00
|
|
|
$chosen_disk = $disk_size_hack;
|
|
|
|
}
|
|
|
|
|
2006-12-19 00:58:41 -01:00
|
|
|
# Calculate the maximum number of 2K blocks in the output images
|
2011-02-22 20:52:28 -01:00
|
|
|
if ($chosen_disk eq "BC") {
|
2006-12-19 00:58:41 -01:00
|
|
|
$maxdiskblocks = int(680 * $MB / $blocksize) - $reserved;
|
|
|
|
$diskdesc = "businesscard";
|
2011-02-22 20:52:28 -01:00
|
|
|
} elsif ($chosen_disk eq "NETINST") {
|
2006-12-19 00:58:41 -01:00
|
|
|
$maxdiskblocks = int(680 * $MB / $blocksize) - $reserved;
|
|
|
|
$diskdesc = "netinst";
|
2011-02-22 20:52:28 -01:00
|
|
|
} elsif ($chosen_disk =~ /CD$/) {
|
2006-12-19 00:58:41 -01:00
|
|
|
$maxdiskblocks = int(680 * $MB / $blocksize) - $reserved;
|
|
|
|
$diskdesc = "650MiB CD";
|
2011-02-22 20:52:28 -01:00
|
|
|
} elsif ($chosen_disk eq "CD700") {
|
2006-12-19 00:58:41 -01:00
|
|
|
$maxdiskblocks = int(737 * $MB / $blocksize) - $reserved;
|
|
|
|
$diskdesc = "700MiB CD";
|
2011-02-22 20:52:28 -01:00
|
|
|
} elsif ($chosen_disk eq "DVD") {
|
2006-12-19 00:58:41 -01:00
|
|
|
$maxdiskblocks = int(4700 * $MB / $blocksize) - $reserved;
|
2008-06-28 14:08:41 +00:00
|
|
|
$diskdesc = "4.7GB DVD";
|
2011-02-22 20:52:28 -01:00
|
|
|
} elsif ($chosen_disk eq "DLDVD") {
|
2008-06-28 14:08:41 +00:00
|
|
|
$maxdiskblocks = int(8500 * $MB / $blocksize) - $reserved;
|
|
|
|
$diskdesc = "8.5GB DVD";
|
2011-02-22 20:52:28 -01:00
|
|
|
} elsif ($chosen_disk eq "BD") {
|
2023-07-12 16:54:44 +00:00
|
|
|
# Leave some space for potential formatting/spares on BD
|
|
|
|
$maxdiskblocks = int(24000 * $MB / $blocksize) - $reserved;
|
2008-06-28 14:08:41 +00:00
|
|
|
$diskdesc = "25GB BD";
|
2011-02-22 20:52:28 -01:00
|
|
|
} elsif ($chosen_disk eq "DLBD") {
|
2023-07-12 16:54:44 +00:00
|
|
|
# Leave some space for potential formatting/spares on BD
|
|
|
|
$maxdiskblocks = int(48000 * $MB / $blocksize) - $reserved;
|
2008-06-28 14:08:41 +00:00
|
|
|
$diskdesc = "50GB DLBD";
|
2019-06-18 22:59:01 +00:00
|
|
|
} elsif ($chosen_disk =~ /STICK(\d+)GB/) {
|
|
|
|
$maxdiskblocks = int($1 * $GB / $blocksize) - $reserved;
|
|
|
|
$diskdesc = "$1GB STICK";
|
2011-02-22 20:52:28 -01:00
|
|
|
} elsif ($chosen_disk eq "CUSTOM") {
|
2012-06-05 13:56:51 +00:00
|
|
|
$maxdiskblocks = $ENV{'CUSTOMSIZE'} - $reserved ||
|
2006-12-19 00:58:41 -01:00
|
|
|
die "Need to specify a custom size for the CUSTOM disktype\n";
|
|
|
|
$diskdesc = "User-supplied size";
|
2021-01-16 21:32:48 -01:00
|
|
|
} else {
|
|
|
|
die "make_disc_trees.pl: Unknown disk type \"$chosen_disk\" specified; ABORT\n";
|
2006-12-19 00:58:41 -01:00
|
|
|
}
|
|
|
|
|
|
|
|
$ENV{'MAXDISKBLOCKS'} = $maxdiskblocks;
|
|
|
|
$ENV{'DISKDESC'} = $diskdesc;
|
|
|
|
|
|
|
|
# How full should we let the disc get before we stop estimating and
|
|
|
|
# start running mkisofs?
|
2007-02-27 19:05:32 -01:00
|
|
|
$size_swap_check = $maxdiskblocks - (40 * $MB / $blocksize);
|
2006-12-19 00:58:41 -01:00
|
|
|
}
|
|
|
|
|
|
|
|
sub start_disc {
|
2023-07-04 16:14:04 +00:00
|
|
|
my $error = 0;
|
2006-12-19 00:58:41 -01:00
|
|
|
|
2023-07-04 16:14:04 +00:00
|
|
|
$error = system("$basedir/tools/start_new_disc $basedir $mirror $tdir $codename \"$archlist\" $disknum");
|
|
|
|
if ($error != 0) {
|
|
|
|
die " Failed to start disc $disknum, error $error\n";
|
|
|
|
}
|
2006-12-19 00:58:41 -01:00
|
|
|
|
2023-07-04 16:14:04 +00:00
|
|
|
get_disc_size();
|
2006-12-19 00:58:41 -01:00
|
|
|
|
2011-02-23 13:15:35 -01:00
|
|
|
print "Starting new \"$archlist\" $disktype $disknum at $basedir/$codename/CD$disknum\n";
|
2011-02-23 13:11:42 -01:00
|
|
|
print " Specified size for this image: $diskdesc, $maxdiskblocks 2K-blocks maximum\n";
|
2023-07-04 16:14:04 +00:00
|
|
|
# Grab all the early stuff, apart from dirs that will change later
|
|
|
|
print " Starting the md5sum.txt file\n";
|
|
|
|
chdir $cddir;
|
2024-02-22 23:57:00 -01:00
|
|
|
system("find . -type f | grep -v -e ^\./\.disk -e ^\./dists -e ^\./README | xargs md5sum >> md5sum.txt");
|
2023-07-04 16:14:04 +00:00
|
|
|
chdir $bdir;
|
2006-12-19 00:58:41 -01:00
|
|
|
|
2023-07-04 16:14:04 +00:00
|
|
|
$mkisofs_opts = "";
|
|
|
|
$mkisofs_dirs = "";
|
2006-12-29 04:56:43 -01:00
|
|
|
|
2023-07-04 16:14:04 +00:00
|
|
|
@exclude_packages = ();
|
|
|
|
@unexclude_packages = ();
|
2006-12-29 04:56:43 -01:00
|
|
|
|
2023-07-04 16:14:04 +00:00
|
|
|
# Change of interface here - exclude/unexclude files are now a
|
|
|
|
# colon-separated list
|
2006-12-29 04:56:43 -01:00
|
|
|
if (defined ($ENV{"EXCLUDE"})) {
|
2023-07-04 16:14:04 +00:00
|
|
|
add_excludes_from_files($ENV{"EXCLUDE"}, "exclude", \@exclude_packages)
|
2006-12-29 04:56:43 -01:00
|
|
|
}
|
|
|
|
if (defined ($ENV{"EXCLUDE$disknum"})) {
|
2023-07-04 16:14:04 +00:00
|
|
|
add_excludes_from_files($ENV{"EXCLUDE$disknum"}, "exclude", \@exclude_packages)
|
2006-12-29 04:56:43 -01:00
|
|
|
}
|
|
|
|
if (defined ($ENV{"UNEXCLUDE$disknum"})) {
|
2023-07-04 16:14:04 +00:00
|
|
|
add_excludes_from_files($ENV{"UNEXCLUDE$disknum"}, "unexclude", \@unexclude_packages)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub add_excludes_from_files {
|
|
|
|
my $files_list = shift;
|
|
|
|
my $verb = shift;
|
|
|
|
my $listref = shift;
|
|
|
|
|
|
|
|
foreach my $entry (split(':', $files_list)) {
|
|
|
|
my $excl_file = $ENV{"TASKDIR"} . "/" . $entry;
|
|
|
|
parse_exclude_file($excl_file, $verb, $listref)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub parse_exclude_file {
|
|
|
|
my $excl_file = shift;
|
|
|
|
my $verb = shift;
|
|
|
|
my $listref = shift;
|
|
|
|
my @list = @$listref;
|
|
|
|
|
|
|
|
print " Adding packages to $verb list from $excl_file\n";
|
|
|
|
print LOG "Adding packages to $verb list from $excl_file\n";
|
|
|
|
open (EXCLUDE_FILE, "< $excl_file") || die "Can't open exclude file $excl_file: $!\n";
|
|
|
|
while (defined (my $excl_pkg = <EXCLUDE_FILE>)) {
|
|
|
|
chomp $excl_pkg;
|
|
|
|
push(@exclude_packages, $excl_pkg);
|
|
|
|
print LOG " $excl_pkg\n";
|
2006-12-29 04:56:43 -01:00
|
|
|
}
|
2023-07-04 16:14:04 +00:00
|
|
|
close (EXCLUDE_FILE);
|
2006-12-19 00:58:41 -01:00
|
|
|
}
|
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
sub finish_disc {
|
|
|
|
my $cddir = shift;
|
|
|
|
my $not = shift;
|
|
|
|
my $archok = 0;
|
|
|
|
my $ok = 0;
|
|
|
|
my $bytes = 0;
|
|
|
|
my $ctx;
|
2007-09-02 17:24:04 +00:00
|
|
|
my $hook;
|
2006-12-19 00:58:41 -01:00
|
|
|
my $error = 0;
|
|
|
|
|
2007-09-02 17:24:04 +00:00
|
|
|
if (defined($ENV{'DISC_FINISH_HOOK'})) {
|
|
|
|
$hook = $ENV{'DISC_FINISH_HOOK'};
|
|
|
|
print " Calling disc_finish hook: $hook\n";
|
|
|
|
$error = system("$hook $tdir $mirror $disknum $cddir \"$archlist\"");
|
2006-12-19 00:58:41 -01:00
|
|
|
$error == 0 || die "DISC_FINISH_HOOK failed with error $error\n";
|
2007-09-02 17:24:04 +00:00
|
|
|
}
|
2006-12-10 14:26:19 -01:00
|
|
|
|
2006-12-10 19:12:46 -01:00
|
|
|
if (($disknum == 1) && !($archlist eq "source") && !($disktype eq "BC")) {
|
2006-12-10 14:26:19 -01:00
|
|
|
foreach my $arch (@arches_nosrc) {
|
|
|
|
print " Checking base is installable for $arch\n";
|
|
|
|
$archok = check_base_installable($arch, $cddir);
|
|
|
|
if ($archok > 0) {
|
|
|
|
print " $arch is missing $archok files needed for debootstrap, look in $log for the list\n";
|
|
|
|
}
|
|
|
|
$ok += $archok;
|
|
|
|
}
|
|
|
|
if ($ok == 0) {
|
2019-01-23 21:50:36 -01:00
|
|
|
open(my $fh, ">>", "$cddir/.disk/base_installable");
|
|
|
|
close($fh);
|
2006-12-10 14:26:19 -01:00
|
|
|
print " Found all files needed for debootstrap for all binary arches\n";
|
|
|
|
} else {
|
|
|
|
print " $ok files missing for debootstrap, not creating base_installable\n";
|
|
|
|
if ($disktype eq "BC") {
|
|
|
|
print " This is expected - building a BC\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
chdir $cddir;
|
|
|
|
|
2008-01-05 22:50:48 -01:00
|
|
|
# If we have a local packages directory, ensure we have a Packages file
|
|
|
|
# for all included architectures as otherwise the Release file will be
|
|
|
|
# invalid. This can happen if we do have local udebs but no local
|
|
|
|
# regular packages, or multiple architectures with not all of them
|
|
|
|
# having local packages.
|
|
|
|
if (-d "./dists/$codename/local") {
|
|
|
|
find (\&add_missing_Packages, "./dists/$codename/main/");
|
2023-01-27 05:38:40 -01:00
|
|
|
} else {
|
|
|
|
# Otherwise ensure not to pass --components=main,local to
|
|
|
|
# debootstrap:
|
|
|
|
my $base_components = ".disk/base_components";
|
2023-02-09 00:40:33 -01:00
|
|
|
if (-f $base_components) {
|
|
|
|
my @components = read_file($base_components);
|
|
|
|
if (grep { $_ eq "local\n" } @components) {
|
|
|
|
print " Removing local from base_components (no such component under $codename)\n";
|
|
|
|
write_file($base_components, grep { $_ ne "local\n" } @components);
|
|
|
|
}
|
2023-01-27 05:38:40 -01:00
|
|
|
}
|
2008-01-05 22:50:48 -01:00
|
|
|
}
|
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
print " Finishing off the Release file\n";
|
2016-09-20 00:46:15 +00:00
|
|
|
my @codenames = ("$codename");
|
|
|
|
if ($backports) {
|
|
|
|
push @codenames, "$codename-backports";
|
|
|
|
}
|
|
|
|
foreach my $tmpcode (@codenames) {
|
|
|
|
chdir "dists/$tmpcode";
|
|
|
|
open(RELEASE, ">>Release") or die "Failed to open Release file: $!\n";
|
|
|
|
find (\&recompress, ".");
|
|
|
|
checksum_files_for_release();
|
|
|
|
close(RELEASE);
|
|
|
|
find (\&remove_uncompressed, ".");
|
|
|
|
chdir("../..");
|
|
|
|
}
|
2006-12-10 14:26:19 -01:00
|
|
|
|
|
|
|
print " Finishing off md5sum.txt\n";
|
|
|
|
# Just md5 the bits we won't have seen already
|
2008-01-05 22:53:27 -01:00
|
|
|
open(MD5LIST, ">>md5sum.txt") or die "Failed to open md5sum.txt file: $!\n";
|
2023-02-18 21:12:42 -01:00
|
|
|
foreach my $dir ("./.disk", "./dists", "./firmware/dep11") {
|
|
|
|
if (-d $dir) {
|
|
|
|
find (\&md5_files_for_md5sum, $dir);
|
|
|
|
}
|
|
|
|
}
|
2024-02-22 23:57:00 -01:00
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
close(MD5LIST);
|
|
|
|
|
|
|
|
# And sort; it should make things faster for people checking
|
|
|
|
# the md5sums, as ISO9660 dirs are sorted alphabetically
|
2006-12-29 01:50:35 -01:00
|
|
|
system("LANG=C sort -uk2 md5sum.txt | grep -v \./md5sum.txt > md5sum.txt.tmp");
|
2019-01-23 21:50:36 -01:00
|
|
|
rename("md5sum.txt.tmp", "md5sum.txt");
|
2006-12-10 14:26:19 -01:00
|
|
|
chdir $bdir;
|
|
|
|
|
2007-09-02 17:24:04 +00:00
|
|
|
if (defined($ENV{'DISC_END_HOOK'})) {
|
|
|
|
$hook = $ENV{'DISC_END_HOOK'};
|
|
|
|
print " Calling disc_end hook: $hook\n";
|
|
|
|
$error = system("$hook $tdir $mirror $disknum $cddir \"$archlist\"");
|
2006-12-19 00:58:41 -01:00
|
|
|
$error == 0 || die "DISC_END_HOOK failed with error $error\n";
|
2007-09-02 17:24:04 +00:00
|
|
|
}
|
2006-12-19 00:58:41 -01:00
|
|
|
|
2006-12-10 14:26:19 -01:00
|
|
|
$size = `$size_check $cddir`;
|
|
|
|
chomp $size;
|
|
|
|
$bytes = $size * $blocksize;
|
|
|
|
print LOG "CD $disknum$not filled with $pkgs_this_cd packages, $size blocks, $bytes bytes\n";
|
|
|
|
print " CD $disknum$not filled with $pkgs_this_cd packages, $size blocks, $bytes bytes\n";
|
|
|
|
system("date >> $log");
|
|
|
|
}
|
2008-03-20 08:43:22 -01:00
|
|
|
|
|
|
|
# start of add_packages
|
|
|
|
|
|
|
|
sub msg_ap {
|
|
|
|
my $level = shift;
|
|
|
|
if (!$log_opened) {
|
2010-04-27 12:27:03 +00:00
|
|
|
open(AP_LOG, ">> $tdir/$codename/add_packages.log")
|
|
|
|
|| die "Can't write in $tdir/add_packages.log!\n";
|
2019-01-23 21:34:01 -01:00
|
|
|
$log_opened = 1;
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
|
|
|
print AP_LOG @_;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub size_in_blocks {
|
|
|
|
my $size_in_bytes = shift;
|
|
|
|
return (1 + int(($size_in_bytes + $iso_blksize - 1) / $iso_blksize));
|
|
|
|
}
|
|
|
|
|
|
|
|
# From a package name and section, work out the directory where its
|
|
|
|
# corresponding Packages file should live
|
|
|
|
sub Packages_dir {
|
|
|
|
my $dir = shift;
|
|
|
|
my $file = shift;
|
|
|
|
my $section = shift;
|
2016-09-20 00:46:15 +00:00
|
|
|
my $in_backports = shift;
|
2008-03-20 08:43:22 -01:00
|
|
|
|
2023-01-21 14:40:56 -01:00
|
|
|
my ($pdir, $component);
|
2008-03-20 08:43:22 -01:00
|
|
|
|
|
|
|
if ($file =~ /\/main\//) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "main";
|
2008-03-20 08:43:22 -01:00
|
|
|
} elsif ($file =~ /\/contrib\//) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "contrib";
|
2008-03-20 08:43:22 -01:00
|
|
|
} elsif ($file =~ /\/non-free\//) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "non-free";
|
2022-10-08 16:45:11 +00:00
|
|
|
} elsif ($file =~ /\/non-free-firmware\//) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "non-free-firmware";
|
2008-03-20 08:43:22 -01:00
|
|
|
} else {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "local";
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
|
|
|
|
2023-01-21 14:40:56 -01:00
|
|
|
$pdir = "$dir/dists/$codename/$component";
|
2016-09-20 00:46:15 +00:00
|
|
|
if ($in_backports) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$pdir = "$dir/dists/$codename-backports/$component";
|
2016-09-20 00:46:15 +00:00
|
|
|
}
|
2009-02-11 23:22:17 -01:00
|
|
|
if ($section and $section eq "debian-installer") {
|
2016-09-20 00:46:15 +00:00
|
|
|
# Don't attempt to put d-i components into backports, as d-i
|
|
|
|
# won't look for them there.
|
2023-01-27 03:24:03 -01:00
|
|
|
#
|
|
|
|
# Also, merge local udebs into main, as d-i uses a single
|
|
|
|
# Packages file anyway:
|
|
|
|
my $dstcomponent = $component ne 'local' ? $component : 'main';
|
|
|
|
$pdir = "$dir/dists/$codename/$dstcomponent/debian-installer";
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
|
|
|
return $pdir;
|
|
|
|
}
|
|
|
|
|
|
|
|
# Dump the apt-cached data into a Packages file; make the parent dir
|
|
|
|
# for the Packages file if necesssary
|
|
|
|
sub add_Packages_entry {
|
|
|
|
my $dir = shift;
|
|
|
|
my $arch = shift;
|
2016-09-20 00:46:15 +00:00
|
|
|
my $in_backports = shift;
|
2016-11-22 08:57:28 -01:00
|
|
|
local $_ = shift;
|
2011-11-13 23:14:30 -01:00
|
|
|
my ($p, $file, $section, $pdir, $pkgfile, $gz, $st1, $st2, $size1, $size2);
|
2008-03-20 08:43:22 -01:00
|
|
|
my $blocks_added = 0;
|
|
|
|
my $old_blocks = 0;
|
|
|
|
my $new_blocks = 0;
|
|
|
|
|
|
|
|
m/^Package: (\S+)/m and $p = $1;
|
|
|
|
m/^Section: (\S+)/m and $section = $1;
|
|
|
|
|
|
|
|
if ($arch eq "source") {
|
|
|
|
m/^Directory: (\S+)/mi and $file = $1;
|
2011-11-28 15:20:50 -01:00
|
|
|
if (!defined($file)) {
|
|
|
|
die "Can't parse source file information out of $_\n";
|
|
|
|
}
|
2016-09-20 00:46:15 +00:00
|
|
|
$pdir = Packages_dir($dir, $file, $section, $in_backports) . "/source";
|
2008-03-20 08:43:22 -01:00
|
|
|
$pkgfile = "$pdir/Sources";
|
|
|
|
} else {
|
|
|
|
m/^Filename: (\S+)/mi and $file = $1;
|
2011-11-28 15:20:50 -01:00
|
|
|
if (!defined($file)) {
|
|
|
|
die "Can't parse binary file information out of $_\n";
|
|
|
|
}
|
2016-09-20 00:46:15 +00:00
|
|
|
$pdir = Packages_dir($dir, $file, $section, $in_backports) . "/binary-$arch";
|
2008-03-20 08:43:22 -01:00
|
|
|
$pkgfile = "$pdir/Packages";
|
|
|
|
}
|
|
|
|
|
|
|
|
msg_ap(0, " Adding $p to $pkgfile(.gz)\n");
|
|
|
|
|
|
|
|
if (! -d $pdir) {
|
2019-01-23 21:50:36 -01:00
|
|
|
make_path($pdir);
|
2008-03-20 08:43:22 -01:00
|
|
|
$blocks_added++;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (-e $pkgfile) {
|
|
|
|
$st1 = stat("$pkgfile");
|
|
|
|
$old_blocks = size_in_blocks($st1->size);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (-e "$pkgfile.gz") {
|
|
|
|
$st1 = stat("$pkgfile.gz");
|
|
|
|
$old_blocks += size_in_blocks($st1->size);
|
|
|
|
}
|
|
|
|
|
|
|
|
open(PFILE, ">>$pkgfile");
|
|
|
|
print PFILE $_;
|
|
|
|
close(PFILE);
|
|
|
|
|
|
|
|
$gz = gzopen("$pkgfile.gz", "ab9") or die "Failed to open $pkgfile.gz: $gzerrno\n";
|
|
|
|
$gz->gzwrite($_) or die "Failed to write $pkgfile.gz: $gzerrno\n";
|
|
|
|
$gz->gzclose();
|
|
|
|
$st1 = stat("$pkgfile");
|
|
|
|
$st2 = stat("$pkgfile.gz");
|
|
|
|
$size1 = $st1->size;
|
|
|
|
$size2 = $st2->size;
|
|
|
|
|
|
|
|
$new_blocks += size_in_blocks($st1->size);
|
|
|
|
$new_blocks += size_in_blocks($st2->size);
|
|
|
|
$blocks_added += ($new_blocks - $old_blocks);
|
|
|
|
msg_ap(0, " now $size1 / $size2 bytes, $blocks_added blocks added\n");
|
|
|
|
return $blocks_added;
|
|
|
|
}
|
|
|
|
|
2011-11-13 23:14:30 -01:00
|
|
|
# Write out translated description(s) for a package
|
|
|
|
sub add_trans_desc_entry {
|
|
|
|
my $dir = shift;
|
|
|
|
my $arch = shift;
|
2016-09-20 00:46:15 +00:00
|
|
|
my $in_backports = shift;
|
2016-11-22 08:57:28 -01:00
|
|
|
local $_ = shift;
|
2011-11-13 23:14:30 -01:00
|
|
|
my ($p, $file, $section, $idir, $pkgfile, $gz, $st);
|
|
|
|
my $size = 0;
|
|
|
|
my $blocks_added = 0;
|
|
|
|
my $old_blocks = 0;
|
|
|
|
my $new_blocks = 0;
|
|
|
|
|
|
|
|
m/^Package: (\S+)/m and $p = $1;
|
|
|
|
m/^Section: (\S+)/m and $section = $1;
|
|
|
|
m/^Filename: (\S+)/mi and $file = $1;
|
2021-07-25 17:19:05 +00:00
|
|
|
|
2016-09-20 00:46:15 +00:00
|
|
|
$idir = Packages_dir($dir, $file, $section, $in_backports) . "/i18n";
|
2011-11-13 23:14:30 -01:00
|
|
|
|
|
|
|
if (! -d $idir) {
|
2019-01-23 21:50:36 -01:00
|
|
|
make_path($idir);
|
2011-11-13 23:14:30 -01:00
|
|
|
$blocks_added++;
|
|
|
|
}
|
|
|
|
|
|
|
|
foreach my $lang (keys %descriptions) {
|
|
|
|
# Do we have a translation for this language?
|
|
|
|
if (defined $descriptions{$lang}{$p}{"data"}) {
|
|
|
|
my $trans_file = "$idir/Translation-$lang";
|
|
|
|
|
2011-11-20 23:12:39 -01:00
|
|
|
msg_ap(0, " Adding $p to $trans_file(.gz)\n");
|
2011-11-13 23:14:30 -01:00
|
|
|
|
|
|
|
if ($descriptions{$lang}{$p}{"used"}) {
|
|
|
|
msg_ap(0, " - not, already included\n");
|
|
|
|
} else {
|
2011-11-20 23:12:39 -01:00
|
|
|
# Keeping files in .gz format is far too expensive in
|
2011-11-13 23:14:30 -01:00
|
|
|
# terms of de-compressing and re-compressing all the
|
|
|
|
# time. Store uncompressed and only compress when we're
|
2011-11-20 23:12:39 -01:00
|
|
|
# finished. Analysis of typical text suggests that gzip
|
|
|
|
# will give roughly a factor of 2 compresssion here, so
|
2011-11-13 23:14:30 -01:00
|
|
|
# use that estimate. For accuracy, we may end up
|
|
|
|
# compressing *anyway* just before doing a size check; if
|
|
|
|
# so, we'll need to uncompress again on entry here.
|
|
|
|
|
2011-11-20 23:12:39 -01:00
|
|
|
if (-f "$trans_file.gz") {
|
2019-08-13 06:54:51 +00:00
|
|
|
system("rm -f $trans_file");
|
2011-11-20 23:12:39 -01:00
|
|
|
system("gunzip $trans_file.gz");
|
2011-11-13 23:14:30 -01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (-f $trans_file) {
|
|
|
|
$st = stat("$trans_file") || die "unable to stat $trans_file\n";
|
2011-11-20 23:12:39 -01:00
|
|
|
$old_blocks += size_in_blocks($st->size / 2);
|
2011-11-13 23:14:30 -01:00
|
|
|
}
|
|
|
|
|
|
|
|
# Add the new description
|
|
|
|
open(IFILE, ">> $trans_file");
|
|
|
|
print IFILE $descriptions{$lang}{$p}{"data"};
|
|
|
|
$descriptions{$lang}{$p}{"used"} = 1;
|
|
|
|
close(IFILE);
|
|
|
|
|
|
|
|
$st = stat("$trans_file") || die "unable to stat $trans_file\n";
|
2011-11-20 23:12:39 -01:00
|
|
|
$size += int($st->size / 2);
|
|
|
|
$new_blocks += size_in_blocks($st->size / 2);
|
2011-11-13 23:14:30 -01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$blocks_added += ($new_blocks - $old_blocks);
|
2016-09-20 00:46:15 +00:00
|
|
|
if ($blocks_added != 0) {
|
|
|
|
msg_ap(0, " now $size bytes, $blocks_added blocks added\n");
|
|
|
|
}
|
2011-11-13 23:14:30 -01:00
|
|
|
return $blocks_added;
|
|
|
|
}
|
|
|
|
|
2008-03-20 08:43:22 -01:00
|
|
|
sub add_md5_entry {
|
|
|
|
my $dir = shift;
|
|
|
|
my $arch = shift;
|
2016-09-20 00:46:15 +00:00
|
|
|
my $in_backports = shift;
|
2016-11-22 08:57:28 -01:00
|
|
|
local $_ = shift;
|
2011-11-13 23:14:30 -01:00
|
|
|
my ($pdir, $file, $md5, $st, $size, $p);
|
2008-03-20 08:43:22 -01:00
|
|
|
my $md5file = "$dir/md5sum.txt";
|
|
|
|
my $blocks_added = 0;
|
|
|
|
my $old_blocks = 0;
|
|
|
|
my $new_blocks = 0;
|
|
|
|
|
2016-09-20 00:46:15 +00:00
|
|
|
if ($in_backports) {
|
|
|
|
m/^Package: (\S+)/mi and $p = "$1/$codename-backports";
|
|
|
|
} else {
|
|
|
|
m/^Package: (\S+)/mi and $p = $1;
|
|
|
|
}
|
2008-03-20 08:43:22 -01:00
|
|
|
|
|
|
|
if (-e $md5file) {
|
|
|
|
$st = stat("$md5file");
|
|
|
|
$old_blocks = size_in_blocks($st->size);
|
|
|
|
}
|
|
|
|
|
|
|
|
open(MD5FILE, ">>$md5file");
|
|
|
|
|
|
|
|
if ($arch eq "source") {
|
|
|
|
m/^Directory: (\S+)/mi and $pdir = $1;
|
2011-03-23 16:23:50 -01:00
|
|
|
# Explicitly use the md5 lines in the Sources stanza, hence the xdigit(32) here
|
|
|
|
while (/^ ([[:xdigit:]]{32}) (\d+) (\S+)/msg) { print MD5FILE "$1 ./$pdir/$3\n"; }
|
2008-03-20 08:43:22 -01:00
|
|
|
} else {
|
|
|
|
m/^Filename: (\S+)/m and $file = $1;
|
|
|
|
m/^MD5sum: (\S+)/m and print MD5FILE "$1 ./$file\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
close(MD5FILE);
|
|
|
|
msg_ap(0, " Adding $p to $md5file\n");
|
|
|
|
$st = stat("$md5file");
|
|
|
|
$size = $st->size;
|
|
|
|
$new_blocks = size_in_blocks($st->size);
|
|
|
|
$blocks_added = $new_blocks - $old_blocks;
|
|
|
|
msg_ap(0, " now $size bytes, added $blocks_added blocks\n");
|
|
|
|
|
|
|
|
return $blocks_added;
|
|
|
|
}
|
|
|
|
|
2021-07-25 17:19:05 +00:00
|
|
|
# Add sym-links and pattern files for firmware packages
|
|
|
|
sub add_firmware_stuff {
|
|
|
|
my $dir = shift;
|
|
|
|
my $arch = shift;
|
|
|
|
my $in_backports = shift;
|
|
|
|
local $_ = shift;
|
2023-01-21 14:40:56 -01:00
|
|
|
my ($p, $file, $section, $component, $dep11_dir);
|
2021-07-25 17:19:05 +00:00
|
|
|
my $blocks_added = 0;
|
|
|
|
|
|
|
|
m/^Package: (\S+)/m and $p = $1;
|
|
|
|
m/^Section: (\S+)/m and $section = $1;
|
|
|
|
m/^Filename: (\S+)/mi and $file = $1;
|
|
|
|
|
2022-10-09 00:43:12 +00:00
|
|
|
my $base_file = basename($file);
|
|
|
|
|
2021-07-25 17:19:05 +00:00
|
|
|
if ($file =~ /\/main\//) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "main";
|
2021-07-25 17:19:05 +00:00
|
|
|
} elsif ($file =~ /\/contrib\//) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "contrib";
|
2021-07-25 17:19:05 +00:00
|
|
|
} elsif ($file =~ /\/non-free\//) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "non-free";
|
2022-10-08 16:45:11 +00:00
|
|
|
} elsif ($file =~ /\/non-free-firmware\//) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "non-free-firmware";
|
2021-07-25 17:19:05 +00:00
|
|
|
} else {
|
2023-01-21 14:40:56 -01:00
|
|
|
$component = "local";
|
2021-07-25 17:19:05 +00:00
|
|
|
}
|
|
|
|
|
2023-01-21 14:40:56 -01:00
|
|
|
$dep11_dir = "$mirror/dists/$codename/$component/dep11";
|
2021-07-25 17:19:05 +00:00
|
|
|
if ($in_backports) {
|
2023-01-21 14:40:56 -01:00
|
|
|
$dep11_dir = "$mirror/dists/$codename-backports/$component/dep11";
|
2021-07-25 17:19:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (! -d "$dir/firmware") {
|
|
|
|
mkdir "$dir/firmware" or die "mkdir $dir/firmware failed $!\n";
|
|
|
|
mkdir "$dir/firmware/dep11" or die "mkdir $dir/firmware/dep11 failed $!\n";
|
|
|
|
$blocks_added += 2;
|
2023-01-24 23:38:34 -01:00
|
|
|
|
|
|
|
# In case anyone wonders about those files:
|
|
|
|
write_file("$dir/firmware/dep11/README.txt",
|
|
|
|
"These files help Debian Installer detect helpful firmware packages (via hw-detect).\n")
|
|
|
|
or die "unable to create $dir/firmware/dep11/README.txt";
|
|
|
|
$blocks_added += get_file_blocks("$dir/firmware/dep11/README.txt");
|
2021-07-25 17:19:05 +00:00
|
|
|
}
|
|
|
|
|
2023-03-12 17:39:40 -01:00
|
|
|
msg_ap(0, "Link fw package $p into /firmware\n");
|
|
|
|
$blocks_added += good_link("$dir/firmware/../$file", "$dir/firmware/$base_file");
|
|
|
|
msg_ap(0, "Link ../$file $dir/firmware/.\n");
|
2023-01-20 14:23:17 -01:00
|
|
|
|
2021-07-25 17:19:05 +00:00
|
|
|
# Cope with maybe having the patterns file already
|
|
|
|
# (e.g. multi-arch), in which case we'll replace it here
|
|
|
|
if (-f "$dir/firmware/dep11/$p.patterns") {
|
|
|
|
$blocks_added -= get_file_blocks("$dir/firmware/dep11/$p.patterns");
|
2023-01-26 22:37:06 -01:00
|
|
|
$blocks_added -= get_file_blocks("$dir/firmware/dep11/$p.component");
|
2021-07-25 17:19:05 +00:00
|
|
|
}
|
|
|
|
|
2024-02-01 20:21:35 -01:00
|
|
|
# Do only if dep11 is enabled
|
|
|
|
if ($dep11) {
|
|
|
|
msg_ap(0, "(Maybe) generate fw pattern file $dir/firmware/dep11/$p.patterns\n");
|
|
|
|
my @args = ("$basedir/tools/generate_firmware_patterns",
|
|
|
|
"--output-dir", "$dir/firmware/dep11");
|
|
|
|
push(@args, "--package", "$p");
|
|
|
|
push(@args, "$dep11_dir/Components-$arch.yml.gz");
|
|
|
|
system(@args) == 0 or die "generate_firmware_patterns failed: $?";
|
|
|
|
if (-f "$dir/firmware/dep11/$p.patterns") {
|
|
|
|
$blocks_added += get_file_blocks("$dir/firmware/dep11/$p.patterns");
|
|
|
|
# Make sure apt-setup can be configured appropriately:
|
|
|
|
write_file("$dir/firmware/dep11/$p.component", $component)
|
|
|
|
or die "unable to create $dir/firmware/dep11/$p.component";
|
|
|
|
$blocks_added += get_file_blocks("$dir/firmware/dep11/$p.component");
|
|
|
|
}
|
2021-07-25 17:19:05 +00:00
|
|
|
}
|
|
|
|
|
2022-10-09 00:43:12 +00:00
|
|
|
# Find the current size of the firmware Contents file
|
|
|
|
my $contents_blocks_old = 0;
|
|
|
|
my $cont_file = "$dir/firmware/Contents-firmware";
|
|
|
|
if (-f "$cont_file") {
|
|
|
|
$contents_blocks_old = get_file_blocks("$cont_file");
|
|
|
|
}
|
|
|
|
# Add new contents stuff, and count it
|
|
|
|
open(OFILE, ">> $cont_file");
|
|
|
|
open(DPKGC, "dpkg --contents $dir/firmware/$base_file |")
|
|
|
|
or die "Can't find contents of $dir/firmware/$base_file: $!";
|
|
|
|
while (defined(my $line = <DPKGC>)) {
|
|
|
|
chomp $line;
|
2023-02-19 01:42:36 -01:00
|
|
|
# XXX: Keep in line with make-firmware-image!
|
2023-02-18 22:33:02 -01:00
|
|
|
if ($line =~ m,^[-|l]\S+\s+\S+\s+\d+\s+\S+\s+\S+\s+./(\S+/firmware/\S+),) {
|
2023-01-21 14:40:58 -01:00
|
|
|
printf OFILE "%-55s %s %s\n", "/$1", $base_file, $component;
|
2022-10-09 00:43:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
close DPKGC;
|
|
|
|
close OFILE;
|
|
|
|
|
|
|
|
my $contents_blocks_new = get_file_blocks($cont_file);
|
|
|
|
$blocks_added += $contents_blocks_new - $contents_blocks_old;
|
|
|
|
|
2021-07-25 17:19:05 +00:00
|
|
|
return $blocks_added;
|
|
|
|
}
|
|
|
|
|
2008-03-20 08:43:22 -01:00
|
|
|
# Roll back the results of add_Packages_entry()
|
|
|
|
sub remove_Packages_entry {
|
|
|
|
my $dir = shift;
|
|
|
|
my $arch = shift;
|
2016-09-20 00:46:15 +00:00
|
|
|
my $in_backports = shift;
|
2016-11-22 08:57:28 -01:00
|
|
|
local $_ = shift;
|
2011-11-13 23:14:30 -01:00
|
|
|
my ($p, $file, $section, $pdir, $pkgfile, $tmp_pkgfile, $match, $gz,
|
|
|
|
$st1, $st2, $size1, $size2);
|
2008-03-20 08:43:22 -01:00
|
|
|
my $blocks_removed = 0;
|
|
|
|
my $old_blocks = 0;
|
|
|
|
my $new_blocks = 0;
|
|
|
|
|
|
|
|
m/^Package: (\S+)/m and $p = $1;
|
|
|
|
m/^Section: (\S+)/m and $section = $1;
|
|
|
|
|
|
|
|
if ($arch eq "source") {
|
|
|
|
m/^Directory: (\S+)/mi and $file = $1;
|
2016-09-20 00:46:15 +00:00
|
|
|
$pdir = Packages_dir($dir, $file, $section, $in_backports) . "/source";
|
2008-03-20 08:43:22 -01:00
|
|
|
$pkgfile = "$pdir/Sources";
|
|
|
|
} else {
|
|
|
|
m/^Filename: (\S+)/mi and $file = $1;
|
2016-09-20 00:46:15 +00:00
|
|
|
$pdir = Packages_dir($dir, $file, $section, $in_backports) . "/binary-$arch";
|
2008-03-20 08:43:22 -01:00
|
|
|
$pkgfile = "$pdir/Packages";
|
|
|
|
}
|
|
|
|
|
|
|
|
if (-e $pkgfile) {
|
|
|
|
$st1 = stat("$pkgfile");
|
|
|
|
$old_blocks += size_in_blocks($st1->size);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (-e "$pkgfile.gz") {
|
|
|
|
$st2 = stat("$pkgfile.gz");
|
|
|
|
$old_blocks += size_in_blocks($st2->size);
|
|
|
|
}
|
|
|
|
|
|
|
|
$tmp_pkgfile = "$pkgfile" . ".rollback";
|
|
|
|
|
|
|
|
msg_ap(0, " Removing $p from $pkgfile(.gz)\n");
|
|
|
|
|
|
|
|
open(IFILE, "<$pkgfile");
|
|
|
|
open(OFILE, ">>$tmp_pkgfile");
|
|
|
|
|
|
|
|
$gz = gzopen("$pkgfile.gz", "wb9");
|
|
|
|
|
2008-04-20 15:31:48 +00:00
|
|
|
$/ = ''; # Browse by paragraph
|
2008-03-20 08:43:22 -01:00
|
|
|
while (defined($match = <IFILE>)) {
|
|
|
|
if (! ($match =~ /^Package: \Q$p\E$/m)) {
|
|
|
|
print OFILE $match;
|
|
|
|
$gz->gzwrite($match) or die "Failed to write $pkgfile.gz: $gzerrno\n";
|
|
|
|
}
|
|
|
|
}
|
2011-11-13 23:14:30 -01:00
|
|
|
$/ = $old_split; # Browse by line again
|
2008-03-20 08:43:22 -01:00
|
|
|
|
|
|
|
$gz->gzclose();
|
|
|
|
close(IFILE);
|
|
|
|
close(OFILE);
|
|
|
|
|
|
|
|
rename $tmp_pkgfile, $pkgfile;
|
|
|
|
$st1 = stat("$pkgfile");
|
|
|
|
$st2 = stat("$pkgfile.gz");
|
|
|
|
$size1 = $st1->size;
|
|
|
|
$size2 = $st2->size;
|
|
|
|
$new_blocks += size_in_blocks($st1->size);
|
|
|
|
$new_blocks += size_in_blocks($st2->size);
|
|
|
|
$blocks_removed += ($old_blocks - $new_blocks);
|
|
|
|
msg_ap(0, " now $size1 / $size2 bytes, $blocks_removed blocks removed\n");
|
|
|
|
return $blocks_removed;
|
|
|
|
}
|
|
|
|
|
2011-11-13 23:14:30 -01:00
|
|
|
# Roll back the result of add_trans_desc_entry()
|
|
|
|
sub remove_trans_desc_entry {
|
|
|
|
my $dir = shift;
|
|
|
|
my $arch = shift;
|
2016-09-20 00:46:15 +00:00
|
|
|
my $in_backports = shift;
|
2016-11-22 08:57:28 -01:00
|
|
|
local $_ = shift;
|
2011-11-13 23:14:30 -01:00
|
|
|
my ($p, $file, $section, $idir, $gz, $match, $st);
|
|
|
|
my $size = 0;
|
|
|
|
my $blocks_added = 0;
|
|
|
|
my $old_blocks = 0;
|
|
|
|
my $new_blocks = 0;
|
|
|
|
|
|
|
|
m/^Package: (\S+)/m and $p = $1;
|
|
|
|
m/^Section: (\S+)/m and $section = $1;
|
|
|
|
|
|
|
|
m/^Filename: (\S+)/mi and $file = $1;
|
2016-09-20 00:46:15 +00:00
|
|
|
$idir = Packages_dir($dir, $file, $section, $in_backports) . "/i18n";
|
2011-11-13 23:14:30 -01:00
|
|
|
|
|
|
|
$/ = ''; # Browse by paragraph
|
|
|
|
foreach my $lang (keys %descriptions) {
|
|
|
|
# Do we have a translation for this language?
|
|
|
|
if (defined $descriptions{$lang}{$p}{"data"}) {
|
|
|
|
my $trans_file = "$idir/Translation-$lang";
|
|
|
|
my $tmp_tfile = "$trans_file" . ".rollback";
|
|
|
|
my $entries_remaining = 0;
|
|
|
|
|
2011-11-20 23:12:39 -01:00
|
|
|
msg_ap(0, " Removing $p from $trans_file(.gz)\n");
|
2011-11-13 23:14:30 -01:00
|
|
|
|
2011-11-20 23:12:39 -01:00
|
|
|
# Keeping files in .gz format is expensive - see comment
|
2011-11-13 23:14:30 -01:00
|
|
|
# in add_trans_desc_entry() above.
|
2011-11-20 23:12:39 -01:00
|
|
|
if (-f "$trans_file.gz") {
|
2019-08-13 06:54:51 +00:00
|
|
|
system("rm -f $trans_file");
|
2011-11-20 23:12:39 -01:00
|
|
|
system("gunzip $trans_file.gz");
|
2011-11-13 23:14:30 -01:00
|
|
|
}
|
|
|
|
$st = stat("$trans_file") || die "unable to stat $trans_file\n";
|
2011-11-20 23:12:39 -01:00
|
|
|
$old_blocks += size_in_blocks($st->size / 2);
|
2011-11-13 23:14:30 -01:00
|
|
|
|
|
|
|
# Remove the description
|
|
|
|
open(IFILE, "< $trans_file") || die "unable to open $trans_file\n";
|
|
|
|
open(OFILE, ">> $tmp_tfile");
|
|
|
|
|
|
|
|
while (defined($match = <IFILE>)) {
|
|
|
|
if (! ($match =~ /^Package: \Q$p\E$/m)) {
|
|
|
|
print OFILE $match;
|
|
|
|
$entries_remaining++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
close(IFILE);
|
|
|
|
close(OFILE);
|
|
|
|
|
|
|
|
$descriptions{$lang}{$p}{"used"} = 0;
|
|
|
|
|
|
|
|
# If we still have any entries in the new file,
|
|
|
|
# keep it. Otherwise, just delete it
|
|
|
|
if ($entries_remaining) {
|
|
|
|
rename $tmp_tfile, $trans_file;
|
|
|
|
$st = stat("$trans_file") || die "unable to stat $trans_file\n";
|
|
|
|
$size += int($st->size / 3);
|
|
|
|
$new_blocks += size_in_blocks($st->size / 3);
|
|
|
|
} else {
|
|
|
|
unlink($tmp_tfile);
|
|
|
|
unlink($trans_file);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$/ = $old_split; # Browse by line again
|
|
|
|
$blocks_added += ($new_blocks - $old_blocks);
|
|
|
|
msg_ap(0, " now $size bytes, $blocks_added blocks added\n");
|
|
|
|
return $blocks_added;
|
|
|
|
}
|
|
|
|
|
2008-03-20 08:43:22 -01:00
|
|
|
sub remove_md5_entry {
|
|
|
|
my $dir = shift;
|
|
|
|
my $arch = shift;
|
2017-03-27 19:40:19 +00:00
|
|
|
my $in_backports = shift;
|
2016-11-22 08:57:28 -01:00
|
|
|
local $_ = shift;
|
2008-03-20 08:43:22 -01:00
|
|
|
my $md5file = "$dir/md5sum.txt";
|
|
|
|
my $tmp_md5file = "$dir/md5sum.txt.tmp";
|
|
|
|
my @fileslist;
|
2011-11-13 23:14:30 -01:00
|
|
|
my ($pdir, $file, $md5, $match, $present, $st, $size, $p);
|
2008-03-20 08:43:22 -01:00
|
|
|
my $blocks_removed = 0;
|
|
|
|
my $old_blocks = 0;
|
|
|
|
my $new_blocks = 0;
|
|
|
|
|
|
|
|
$/ = $old_split; # Browse by line again
|
|
|
|
|
|
|
|
m/^Package: (\S+)/mi and $p = $1;
|
|
|
|
if ($arch eq "source") {
|
2011-03-23 16:23:50 -01:00
|
|
|
m/^Directory: (\S+)/mi and $pdir = $1;
|
|
|
|
# Explicitly use the md5 lines in the Sources stanza, hence the xdigit(32) here
|
|
|
|
while (/^ ([[:xdigit:]]{32}) (\d+) (\S+)/msg) { push(@fileslist, "$1 ./$pdir/$3"); }
|
2008-03-20 08:43:22 -01:00
|
|
|
} else {
|
|
|
|
m/^Filename: (\S+)/m and $file = $1;
|
|
|
|
m/^MD5Sum: (\S+)/mi and push(@fileslist, "$1 ./$file");
|
|
|
|
}
|
|
|
|
|
|
|
|
if (-e $md5file) {
|
|
|
|
$st = stat("$md5file");
|
|
|
|
$old_blocks = size_in_blocks($st->size);
|
|
|
|
}
|
|
|
|
|
|
|
|
open(IFILE, "<$md5file");
|
|
|
|
open(OFILE, ">>$tmp_md5file");
|
|
|
|
while (defined($match = <IFILE>)) {
|
|
|
|
$present = 0;
|
|
|
|
foreach my $entry (@fileslist) {
|
|
|
|
if (($match =~ /\Q$entry\E$/m)) {
|
|
|
|
$present++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!$present) {
|
|
|
|
print OFILE $match;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
close(IFILE);
|
|
|
|
close(OFILE);
|
|
|
|
|
|
|
|
$/ = ''; # Browse by paragraph again
|
|
|
|
rename $tmp_md5file, $md5file;
|
|
|
|
msg_ap(0, " Removing $p from md5sum.txt\n");
|
|
|
|
$st = stat("$dir/md5sum.txt");
|
|
|
|
$size = $st->size;
|
|
|
|
$new_blocks = size_in_blocks($st->size);
|
|
|
|
$blocks_removed = $old_blocks - $new_blocks;
|
|
|
|
msg_ap(0, " now $size bytes, $blocks_removed blocks removed\n");
|
2011-11-13 23:14:30 -01:00
|
|
|
$/ = $old_split; # Browse by line again
|
2008-03-20 08:43:22 -01:00
|
|
|
return $blocks_removed;
|
|
|
|
}
|
|
|
|
|
2021-07-25 17:19:05 +00:00
|
|
|
sub remove_firmware_stuff {
|
|
|
|
my $dir = shift;
|
|
|
|
my $arch = shift;
|
|
|
|
my $in_backports = shift;
|
|
|
|
my ($p, $file);
|
|
|
|
local $_ = shift;
|
|
|
|
my $blocks_removed = 0;
|
|
|
|
|
|
|
|
m/^Package: (\S+)/mi and $p = $1;
|
|
|
|
m/^Filename: (\S+)/mi and $file = $1;
|
|
|
|
|
2022-10-09 00:43:12 +00:00
|
|
|
my $base_file = basename($file);
|
|
|
|
|
2021-07-25 17:19:05 +00:00
|
|
|
msg_ap(0, "Remove symlink for fw package $p in /firmware\n");
|
2022-10-09 00:43:12 +00:00
|
|
|
unlink("$dir/firmware/$base_file");
|
2021-07-25 17:19:05 +00:00
|
|
|
|
|
|
|
if (-f "$dir/firmware/dep11/$p.patterns") {
|
|
|
|
$blocks_removed += get_file_blocks("$dir/firmware/dep11/$p.patterns");
|
|
|
|
msg_ap(0, "Remove $dir/firmware/dep11/$p.patterns\n");
|
|
|
|
unlink("$dir/firmware/dep11/$p.patterns");
|
2023-01-26 22:37:06 -01:00
|
|
|
|
|
|
|
$blocks_removed += get_file_blocks("$dir/firmware/dep11/$p.component");
|
|
|
|
msg_ap(0, "Remove $dir/firmware/dep11/$p.component\n");
|
|
|
|
unlink("$dir/firmware/dep11/$p.component");
|
2021-07-25 17:19:05 +00:00
|
|
|
}
|
|
|
|
|
2022-10-09 00:43:12 +00:00
|
|
|
# Find the current size of the firmware Contents file, and grep
|
|
|
|
# out from the current data
|
|
|
|
my $contents_blocks_old = 0;
|
|
|
|
my $cont_file = "$dir/firmware/Contents-firmware";
|
|
|
|
open(OFILE, "> $cont_file.1");
|
|
|
|
if (-f $cont_file) {
|
|
|
|
$contents_blocks_old = get_file_blocks($cont_file);
|
|
|
|
open(IFILE, "< $cont_file");
|
|
|
|
while (defined(my $line = <IFILE>)) {
|
|
|
|
chomp $line;
|
|
|
|
if ($line !~ /\b$base_file$/) {
|
|
|
|
print OFILE "$line\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
close IFILE;
|
|
|
|
}
|
|
|
|
close OFILE;
|
|
|
|
rename "$cont_file.1", "$cont_file";
|
|
|
|
my $contents_blocks_new = get_file_blocks($cont_file);
|
|
|
|
$blocks_removed += $contents_blocks_new - $contents_blocks_old;
|
|
|
|
|
2021-07-25 17:19:05 +00:00
|
|
|
return $blocks_removed;
|
|
|
|
}
|
|
|
|
|
2008-03-20 08:43:22 -01:00
|
|
|
sub get_file_blocks {
|
|
|
|
my $realfile = shift;
|
|
|
|
my $st;
|
|
|
|
$st = stat($realfile) or die "unable to stat file $realfile: $!\n";
|
|
|
|
return size_in_blocks($st->size);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub add_packages {
|
|
|
|
my ($p, @files, $d, $realfile, $source, $section, $name, $pkgfile, $pdir);
|
|
|
|
my $dir;
|
|
|
|
|
|
|
|
my $total_blocks = 0;
|
|
|
|
my $rollback = 0;
|
|
|
|
my $option = shift;
|
|
|
|
if ($option =~ /--rollback/) {
|
|
|
|
$rollback = 1;
|
|
|
|
$dir = shift;
|
|
|
|
} else {
|
|
|
|
$dir = $option;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (! -d $dir) {
|
|
|
|
die "add_packages: $dir is not a directory ...";
|
|
|
|
}
|
|
|
|
|
|
|
|
my $pkg = shift;
|
2011-03-23 16:23:50 -01:00
|
|
|
my ($arch, $component, $pkgname, $pkgsize) = split /:/, $pkg;
|
2008-03-20 08:43:22 -01:00
|
|
|
|
2008-07-13 11:33:14 +00:00
|
|
|
if ("$arch" eq "" or "$pkgname" eq "" or "$pkgname" eq "") {
|
|
|
|
die "inconsistent data passed to add_packages: $pkg\n";
|
|
|
|
}
|
|
|
|
|
2008-03-20 08:43:22 -01:00
|
|
|
msg_ap(0, "Looking at $pkg: arch $arch, package $pkgname, rollback $rollback\n");
|
|
|
|
|
2013-01-22 22:32:54 -01:00
|
|
|
foreach my $package_info (@{$pkginfo{$arch}{$pkgname}}) {
|
2016-09-20 00:46:15 +00:00
|
|
|
my $in_backports = 0;
|
|
|
|
if ($pkgname =~ /\/$codename-backports/) {
|
|
|
|
$in_backports = 1;
|
|
|
|
}
|
2013-01-22 22:32:54 -01:00
|
|
|
undef @files;
|
|
|
|
$source = $mirror;
|
|
|
|
if ($arch eq "source") {
|
|
|
|
if ($package_info =~ m/^Directory: (\S+)/m) {
|
|
|
|
$pdir = $1;
|
|
|
|
}
|
|
|
|
if ($pdir =~ m:updates/:) {
|
|
|
|
$source = $security;
|
|
|
|
}
|
|
|
|
# Explicitly use the md5 lines in the Sources stanza, hence the xdigit(32) here
|
2013-01-22 22:45:23 -01:00
|
|
|
while ($package_info =~ /^ ([[:xdigit:]]{32}) (\d+) (\S+)/msg) {
|
2013-01-22 22:32:54 -01:00
|
|
|
push(@files, "$pdir/$3");
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if ($package_info =~ m/^Filename: (\S+)/mi) {
|
|
|
|
push(@files, $1);
|
|
|
|
}
|
|
|
|
if ($1 =~ m:updates/:) {
|
|
|
|
$source = $security;
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
2011-11-28 15:19:38 -01:00
|
|
|
}
|
2008-03-20 08:43:22 -01:00
|
|
|
|
2013-01-22 22:32:54 -01:00
|
|
|
if ($rollback) {
|
|
|
|
# Remove the Packages entry/entries for the specified package
|
2016-09-20 00:46:15 +00:00
|
|
|
$total_blocks -= remove_Packages_entry($dir, $arch, $in_backports, $package_info);
|
|
|
|
$total_blocks -= remove_md5_entry($dir, $arch, $in_backports, $package_info);
|
2013-01-22 22:32:54 -01:00
|
|
|
if (!($arch eq "source")) {
|
2016-09-20 00:46:15 +00:00
|
|
|
$total_blocks -= remove_trans_desc_entry($dir, $arch, $in_backports, $package_info);
|
2013-01-22 22:32:54 -01:00
|
|
|
}
|
2021-07-25 17:19:05 +00:00
|
|
|
|
2021-07-25 14:57:45 +00:00
|
|
|
if ($firmware_package{$pkgname}) {
|
2021-07-25 17:19:05 +00:00
|
|
|
$total_blocks -= remove_firmware_stuff($dir, $arch, $in_backports, $package_info);
|
2021-07-25 14:57:45 +00:00
|
|
|
}
|
2013-01-22 22:32:54 -01:00
|
|
|
|
|
|
|
foreach my $file (@files) {
|
|
|
|
my $missing = 0;
|
|
|
|
# Count how big the file is we're removing, for checking if the disc is full
|
|
|
|
if (! -e "$source/$file") {
|
|
|
|
msg_ap(0, "Can't find $file in the main archive, trying local\n");
|
|
|
|
if (-e "$localdebs/$file") {
|
|
|
|
$source = $localdebs;
|
|
|
|
} else {
|
|
|
|
die "$file not found under either $source or $localdebs\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$realfile = real_file ("$source/$file");
|
|
|
|
$total_blocks -= get_file_blocks($realfile);
|
2008-03-20 08:43:22 -01:00
|
|
|
|
2013-01-22 22:32:54 -01:00
|
|
|
# Remove the link
|
|
|
|
unlink ("$dir/$file") || msg_ap(0, "Couldn't delete file $dir/$file\n");
|
|
|
|
msg_ap(0, " Rollback: removed $dir/$file\n");
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
2021-12-06 22:50:22 -01:00
|
|
|
# Try to remove the directory; will silently fail if there
|
|
|
|
# are still files there, which is OK.
|
|
|
|
rmdir ($dir);
|
2013-01-22 22:32:54 -01:00
|
|
|
} else {
|
2016-09-20 00:46:15 +00:00
|
|
|
$total_blocks += add_Packages_entry($dir, $arch, $in_backports, $package_info);
|
|
|
|
$total_blocks += add_md5_entry($dir, $arch, $in_backports, $package_info);
|
2013-01-22 22:32:54 -01:00
|
|
|
if (!($arch eq "source")) {
|
2016-09-20 00:46:15 +00:00
|
|
|
$total_blocks += add_trans_desc_entry($dir, $arch, $in_backports, $package_info);
|
2013-01-22 22:32:54 -01:00
|
|
|
}
|
|
|
|
|
|
|
|
foreach my $file (@files) {
|
|
|
|
|
|
|
|
# And put the file in the CD tree (with a (hard) link)
|
|
|
|
if (! -e "$source/$file") {
|
|
|
|
msg_ap(0, "Can't find $file in the main archive, trying local\n");
|
|
|
|
if (-e "$localdebs/$file") {
|
|
|
|
$source = $localdebs;
|
|
|
|
} else {
|
|
|
|
die "$file not found under either $source or $localdebs\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$realfile = real_file ("$source/$file");
|
|
|
|
|
|
|
|
if (! -e "$dir/$file") {
|
|
|
|
# Count how big the file is, for checking if the
|
|
|
|
# disc is full. ONLY do this if the file is not
|
|
|
|
# already linked in - consider binary-all packages
|
|
|
|
# on a multi-arch disc
|
|
|
|
$total_blocks += get_file_blocks($realfile);
|
|
|
|
$total_blocks += good_link ($realfile, "$dir/$file");
|
|
|
|
msg_ap(0, " Linked $dir/$file\n");
|
|
|
|
if ($firmware_package{$pkgname}) {
|
2021-07-25 17:19:05 +00:00
|
|
|
$total_blocks += add_firmware_stuff($dir, $arch, $in_backports, $package_info);
|
2010-05-27 00:52:25 +00:00
|
|
|
}
|
2013-01-22 22:32:54 -01:00
|
|
|
} else {
|
|
|
|
msg_ap(0, " $dir/$file already linked in\n");
|
2010-05-27 00:52:25 +00:00
|
|
|
}
|
2008-03-20 08:43:22 -01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
# close LIST or die "Something went wrong with apt-cache : $@ ($!)\n";
|
|
|
|
msg_ap(0, " size $total_blocks\n");
|
|
|
|
$/ = $old_split; # Return to line-orientation
|
|
|
|
return $total_blocks;
|
|
|
|
}
|