summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPascal Rigaux <pixel@mandriva.com>2006-11-21 17:43:37 +0000
committerPascal Rigaux <pixel@mandriva.com>2006-11-21 17:43:37 +0000
commit871705259637a1b980f74fe118c5a23a0674b283 (patch)
treea7f98f5f3ff8d6036e631b75dfd68a5d032d3d71
parent30844e6755db13ce18727cc4e7fd113f744e99c0 (diff)
downloadurpmi-871705259637a1b980f74fe118c5a23a0674b283.tar
urpmi-871705259637a1b980f74fe118c5a23a0674b283.tar.gz
urpmi-871705259637a1b980f74fe118c5a23a0674b283.tar.bz2
urpmi-871705259637a1b980f74fe118c5a23a0674b283.tar.xz
urpmi-871705259637a1b980f74fe118c5a23a0674b283.zip
move many functions from urpm.pm to urpm/media.pm
-rwxr-xr-xgurpmi23
-rw-r--r--urpm.pm1807
-rw-r--r--urpm/bug_report.pm4
-rw-r--r--urpm/download.pm105
-rw-r--r--urpm/ldap.pm18
-rw-r--r--urpm/md5sum.pm4
-rw-r--r--urpm/media.pm1800
-rw-r--r--urpm/parallel.pm4
-rw-r--r--urpme3
-rwxr-xr-xurpmf3
-rwxr-xr-xurpmi7
-rwxr-xr-xurpmi.addmedia21
-rwxr-xr-xurpmi.removemedia9
-rwxr-xr-xurpmi.update11
-rwxr-xr-xurpmq7
15 files changed, 1960 insertions, 1846 deletions
diff --git a/gurpmi2 b/gurpmi2
index 44cfcd25..0fe865b1 100755
--- a/gurpmi2
+++ b/gurpmi2
@@ -13,6 +13,7 @@ BEGIN { #- set up a safe path and environment
use gurpmi;
use urpm::install;
+use urpm::media;
use Gtk2;
#- GUI globals
@@ -117,7 +118,7 @@ sub configure_urpm() {
};
urpm::sys::lock_rpm_db($urpm, 'exclusive');
urpm::sys::lock_urpmi_db($urpm);
- $urpm->configure(
+ urpm::media::configure($urpm,
root => $gurpmi::options{root},
media => $gurpmi::options{media},
searchmedia => $gurpmi::options{searchmedia},
diff --git a/urpm.pm b/urpm.pm
index 16177d4f..1750d9a1 100644
--- a/urpm.pm
+++ b/urpm.pm
@@ -14,7 +14,8 @@ use urpm::md5sum;
use MDV::Distribconf;
our $VERSION = '4.8.29';
-our @ISA = qw(URPM);
+our @ISA = qw(URPM Exporter);
+our @EXPORT_OK = 'file_from_local_url';
use URPM;
use URPM::Resolve;
@@ -54,251 +55,6 @@ sub new {
$self;
}
-our @PER_MEDIA_OPT = qw(
- downloader
- hdlist
- ignore
- key-ids
- list
- md5sum
- noreconfigure
- priority
- priority-upgrade
- removable
- static
- synthesis
- update
- url
- verify-rpm
- virtual
- with_hdlist
-);
-
-sub read_private_netrc {
- my ($urpm) = @_;
-
- my @words = split(/\s+/, scalar cat_($urpm->{private_netrc}));
- my @l;
- my $e;
- while (@words) {
- my $keyword = shift @words;
- if ($keyword eq 'machine') {
- push @l, $e = { machine => shift(@words) };
- } elsif ($keyword eq 'default') {
- push @l, $e = { default => '' };
- } elsif ($keyword eq 'login' || $keyword eq 'password' || $keyword eq 'account') {
- $e->{$keyword} = shift(@words);
- } else {
- $urpm->{error}("unknown netrc command $keyword");
- }
- }
- @l;
-}
-
-sub parse_url_with_login {
- my ($url) = @_;
- $url =~ m!([^:]*)://([^/:\@]*)(:([^/:\@]*))?\@([^/]*)(.*)! &&
- { proto => $1, login => $2, password => $4, machine => $5, dir => $6 };
-}
-
-sub read_config_add_passwords {
- my ($urpm, $config) = @_;
-
- my @netrc = read_private_netrc($urpm) or return;
- foreach (values %$config) {
- my $u = parse_url_with_login($_->{url}) or next;
- if (my ($e) = grep { ($_->{default} || $_->{machine} eq $u->{machine}) && $_->{login} eq $u->{login} } @netrc) {
- $_->{url} = sprintf('%s://%s:%s@%s%s', $u->{proto}, $u->{login}, $e->{password}, $u->{machine}, $u->{dir});
- } else {
- $urpm->{log}("no password found for $u->{login}@$u->{machine}");
- }
- }
-}
-
-sub remove_passwords_and_write_private_netrc {
- my ($urpm, $config) = @_;
-
- my @l;
- foreach (values %$config) {
- my $u = parse_url_with_login($_->{url}) or next;
- #- check whether a password is visible
- $u->{password} or next;
-
- push @l, $u;
- $_->{url} = sprintf('%s://%s@%s%s', $u->{proto}, $u->{login}, $u->{machine}, $u->{dir});
- }
- {
- my $fh = urpm::sys::open_safe($urpm, '>', $urpm->{private_netrc}) or return;
- foreach my $u (@l) {
- printf $fh "machine %s login %s password %s\n", $u->{machine}, $u->{login}, $u->{password};
- }
- }
- chmod 0600, $urpm->{private_netrc};
-}
-
-#- handle deprecated way of saving passwords
-sub recover_url_from_list {
- my ($urpm, $medium) = @_;
-
- #- /./ is end of url marker in list file (typically generated by a
- #- find . -name "*.rpm" > list
- #- for exportable list file.
- if (my @probe = map { m!^(.*)/\./! || m!^(.*)/[^/]*$! } cat_(statedir_list($urpm, $medium))) {
- ($medium->{url}) = sort { length($a) <=> length($b) } @probe;
- $urpm->{modified} = 1; #- ensure urpmi.cfg is handled using only partially hidden url + netrc, since file list won't be generated anymore
- }
-}
-
-#- Loads /etc/urpmi/urpmi.cfg and performs basic checks.
-#- Does not handle old format: <name> <url> [with <path_hdlist>]
-#- options :
-#- - nocheck_access : don't check presence of hdlist and other files
-sub read_config {
- my ($urpm, $b_nocheck_access) = @_;
- return if $urpm->{media}; #- media already loaded
- $urpm->{media} = [];
- my $config = urpm::cfg::load_config($urpm->{config})
- or $urpm->{fatal}(6, $urpm::cfg::err);
-
- #- global options
- if (my $global = $config->{''}) {
- foreach my $opt (keys %$global) {
- if (defined $global->{$opt} && !exists $urpm->{options}{$opt}) {
- $urpm->{options}{$opt} = $global->{$opt};
- }
- }
- }
-
- #- per-media options
-
- read_config_add_passwords($urpm, $config);
-
- foreach my $m (grep { $_ ne '' } keys %$config) {
- my $medium = { name => $m };
- foreach my $opt (@PER_MEDIA_OPT) {
- defined $config->{$m}{$opt} and $medium->{$opt} = $config->{$m}{$opt};
- }
-
- if (!$medium->{url}) {
- #- recover the url the old deprecated way...
- #- only useful for migration, new urpmi.cfg will use netrc
- recover_url_from_list($urpm, $medium);
- $medium->{url} or $urpm->{error}("unable to find url in list file $medium->{name}, medium ignored");
- }
-
- $urpm->add_existing_medium($medium, $b_nocheck_access);
- }
-
- eval { require urpm::ldap; urpm::ldap::load_ldap_media($urpm) };
-
- #- load default values
- foreach (qw(post-clean verify-rpm)) {
- exists $urpm->{options}{$_} or $urpm->{options}{$_} = 1;
- }
-
- $urpm->{media} = [ sort { $a->{priority} <=> $b->{priority} } @{$urpm->{media}} ];
-
- #- read MD5 sums (usually not in urpmi.cfg but in a separate file)
- foreach (@{$urpm->{media}}) {
- if (my $md5sum = urpm::md5sum::from_MD5SUM("$urpm->{statedir}/MD5SUM", statedir_hdlist_or_synthesis($urpm, $_))) {
- $_->{md5sum} = $md5sum;
- }
- }
-
- #- remember global options for write_config
- $urpm->{global_config} = $config->{''};
-}
-
-#- if invalid, set {ignore}
-sub check_existing_medium {
- my ($urpm, $medium, $b_nocheck_access) = @_;
-
- if ($medium->{virtual}) {
- #- a virtual medium needs to have an url available without using a list file.
- if ($medium->{hdlist} || $medium->{list}) {
- $medium->{ignore} = 1;
- $urpm->{error}(N("virtual medium \"%s\" should not have defined hdlist or list file, medium ignored",
- $medium->{name}));
- }
- unless ($medium->{url}) {
- $medium->{ignore} = 1;
- $urpm->{error}(N("virtual medium \"%s\" should have a clear url, medium ignored",
- $medium->{name}));
- }
- } else {
- if ($medium->{hdlist}) {
- #- is this check really needed? keeping just in case
- $medium->{hdlist} ne 'list' && $medium->{hdlist} ne 'pubkey' or
- $medium->{ignore} = 1,
- $urpm->{error}(N("invalid hdlist name"));
- }
- if (!$medium->{ignore} && !$medium->{hdlist}) {
- $medium->{hdlist} = "hdlist.$medium->{name}.cz";
- -e statedir_hdlist($urpm, $medium) or
- $medium->{ignore} = 1,
- $urpm->{error}(N("unable to find hdlist file for \"%s\", medium ignored", $medium->{name}));
- }
- if (!$medium->{ignore} && !$medium->{list}) {
- unless (defined $medium->{url}) {
- $medium->{list} = "list.$medium->{name}";
- unless (-e statedir_list($urpm, $medium)) {
- $medium->{ignore} = 1,
- $urpm->{error}(N("unable to find list file for \"%s\", medium ignored", $medium->{name}));
- }
- }
- }
- }
-
-
- #- check the presence of hdlist and list files if necessary.
- if (!$b_nocheck_access && !$medium->{ignore}) {
- if ($medium->{virtual} && -r hdlist_or_synthesis_for_virtual_medium($medium)) {}
- elsif (-r statedir_hdlist($urpm, $medium)) {}
- elsif ($medium->{synthesis} && -r statedir_synthesis($urpm, $medium)) {}
- else {
- $medium->{ignore} = 1;
- $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
- }
- if ($medium->{list} && -r statedir_list($urpm, $medium)) {}
- elsif ($medium->{url}) {}
- else {
- $medium->{ignore} = 1;
- $urpm->{error}(N("unable to access list file of \"%s\", medium ignored", $medium->{name}));
- }
- }
-
- foreach my $field ('hdlist', 'list') {
- $medium->{$field} or next;
- if (grep { $_->{$field} eq $medium->{$field} } @{$urpm->{media}}) {
- $medium->{ignore} = 1;
- $urpm->{error}(
- $field eq 'hdlist'
- ? N("medium \"%s\" trying to use an already used hdlist, medium ignored", $medium->{name})
- : N("medium \"%s\" trying to use an already used list, medium ignored", $medium->{name}));
- }
- }
-}
-
-#- probe medium to be used, take old medium into account too.
-sub add_existing_medium {
- my ($urpm, $medium, $b_nocheck_access) = @_;
-
- if (name2medium($urpm, $medium->{name})) {
- $urpm->{error}(N("trying to override existing medium \"%s\", skipping", $medium->{name}));
- return;
- }
-
- check_existing_medium($urpm, $medium, $b_nocheck_access);
-
- #- probe removable device.
- $urpm->probe_removable_device($medium);
-
- #- clear URLs for trailing /es.
- $medium->{url} and $medium->{url} =~ s|(.*?)/*$|$1|;
-
- push @{$urpm->{media}}, $medium;
-}
-
#- returns the removable device name if it corresponds to an iso image, '' otherwise
sub is_iso {
my ($removable_dev) = @_;
@@ -313,857 +69,6 @@ sub file_from_local_url {
my ($url) = @_;
$url =~ m!^(?:removable[^:]*:/|file:/)?(/.*)! && $1;
}
-sub file_from_file_url {
- my ($url) = @_;
- $url =~ m!^(?:file:/)?(/.*)! && $1;
-}
-
-sub _hdlist_dir {
- my ($medium) = @_;
- my $base = file_from_file_url($medium->{url}) || $medium->{url};
- $medium->{with_hdlist} && reduce_pathname("$base/$medium->{with_hdlist}/..");
-}
-sub _url_with_hdlist {
- my ($medium) = @_;
-
- my $base = file_from_file_url($medium->{url}) || $medium->{url};
- $medium->{with_hdlist} && reduce_pathname("$base/$medium->{with_hdlist}");
-}
-sub hdlist_or_synthesis_for_virtual_medium {
- my ($medium) = @_;
- file_from_file_url($medium->{url}) && _url_with_hdlist($medium);
-}
-
-sub statedir_hdlist_or_synthesis {
- my ($urpm, $medium) = @_;
- $medium->{hdlist} && "$urpm->{statedir}/" . ($medium->{synthesis} ? 'synthesis.' : '') . $medium->{hdlist};
-}
-sub statedir_hdlist {
- my ($urpm, $medium) = @_;
- $medium->{hdlist} && "$urpm->{statedir}/$medium->{hdlist}";
-}
-sub statedir_synthesis {
- my ($urpm, $medium) = @_;
- $medium->{hdlist} && "$urpm->{statedir}/synthesis.$medium->{hdlist}";
-}
-sub statedir_list {
- my ($urpm, $medium) = @_;
- $medium->{list} && "$urpm->{statedir}/$medium->{list}";
-}
-sub statedir_descriptions {
- my ($urpm, $medium) = @_;
- $medium->{name} && "$urpm->{statedir}/descriptions.$medium->{name}";
-}
-sub statedir_names {
- my ($urpm, $medium) = @_;
- $medium->{name} && "$urpm->{statedir}/names.$medium->{name}";
-}
-sub cachedir_hdlist {
- my ($urpm, $medium) = @_;
- $medium->{hdlist} && "$urpm->{cachedir}/partial/$medium->{hdlist}";
-}
-sub cachedir_list {
- my ($urpm, $medium) = @_;
- $medium->{list} && "$urpm->{cachedir}/partial/$medium->{list}";
-}
-
-sub name2medium {
- my ($urpm, $name) = @_;
- my ($medium) = grep { $_->{name} eq $name } @{$urpm->{media}};
- $medium;
-}
-
-#- probe device associated with a removable device.
-sub probe_removable_device {
- my ($urpm, $medium) = @_;
-
- if ($medium->{url} && $medium->{url} =~ /^removable/) {
- #- try to find device name in url scheme, this is deprecated, use medium option "removable" instead
- if ($medium->{url} =~ /^removable_?([^_:]*)/) {
- $medium->{removable} ||= $1 && "/dev/$1";
- }
- } else {
- delete $medium->{removable};
- return;
- }
-
- #- try to find device to open/close for removable medium.
- if (my $dir = file_from_local_url($medium->{url})) {
- my %infos;
- my @mntpoints = urpm::sys::find_mntpoints($dir, \%infos);
- if (@mntpoints > 1) { #- return value is suitable for an hash.
- $urpm->{log}(N("too many mount points for removable medium \"%s\"", $medium->{name}));
- $urpm->{log}(N("taking removable device as \"%s\"", join ',', map { $infos{$_}{device} } @mntpoints));
- }
- if (is_iso($medium->{removable})) {
- $urpm->{log}(N("Medium \"%s\" is an ISO image, will be mounted on-the-fly", $medium->{name}));
- } elsif (@mntpoints) {
- if ($medium->{removable} && $medium->{removable} ne $infos{$mntpoints[-1]}{device}) {
- $urpm->{log}(N("using different removable device [%s] for \"%s\"",
- $infos{$mntpoints[-1]}{device}, $medium->{name}));
- }
- $medium->{removable} = $infos{$mntpoints[-1]}{device};
- } else {
- $urpm->{error}(N("unable to retrieve pathname for removable medium \"%s\"", $medium->{name}));
- }
- } else {
- $urpm->{error}(N("unable to retrieve pathname for removable medium \"%s\"", $medium->{name}));
- }
-}
-
-
-sub write_MD5SUM {
- my ($urpm) = @_;
-
- #- write MD5SUM file
- my $fh = urpm::sys::open_safe($urpm, '>', "$urpm->{statedir}/MD5SUM") or return 0;
- foreach my $medium (grep { $_->{md5sum} } @{$urpm->{media}}) {
- my $s = basename(statedir_hdlist_or_synthesis($urpm, $medium));
- print $fh "$medium->{md5sum} $s\n";
- }
-
- $urpm->{log}(N("wrote %s", "$urpm->{statedir}/MD5SUM"));
-
- delete $urpm->{md5sum_modified};
-}
-
-#- Writes the urpmi.cfg file.
-sub write_urpmi_cfg {
- my ($urpm) = @_;
-
- #- avoid trashing exiting configuration if it wasn't loaded
- $urpm->{media} or return;
-
- my $config = {
- #- global config options found in the config file, without the ones
- #- set from the command-line
- '' => $urpm->{global_config},
- };
- foreach my $medium (@{$urpm->{media}}) {
- next if $medium->{external};
- my $medium_name = $medium->{name};
-
- foreach (@PER_MEDIA_OPT) {
- defined $medium->{$_} and $config->{$medium_name}{$_} = $medium->{$_};
- }
- }
- remove_passwords_and_write_private_netrc($urpm, $config);
-
- urpm::cfg::dump_config($urpm->{config}, $config)
- or $urpm->{fatal}(6, N("unable to write config file [%s]", $urpm->{config}));
-
- $urpm->{log}(N("wrote config file [%s]", $urpm->{config}));
-
- #- everything should be synced now.
- delete $urpm->{modified};
-}
-
-sub write_config {
- my ($urpm) = @_;
-
- write_urpmi_cfg($urpm);
- write_MD5SUM($urpm);
-}
-
-#- read urpmi.cfg file as well as necessary synthesis files
-#- options :
-#- root
-#- cmdline_skiplist
-#- nocheck_access (used by read_config)
-#-
-#- callback (urpmf)
-#- need_hdlist (for urpmf: to be able to have info not available in synthesis)
-#- nodepslist (for urpmq: we don't need the hdlist/synthesis)
-#- no_skiplist (urpmf)
-#-
-#- synthesis (use this synthesis file, and only this synthesis file)
-#-
-#- usedistrib (otherwise uses urpmi.cfg)
-#- parallel
-#- media
-#- excludemedia
-#- sortmedia
-#-
-#- update
-#- searchmedia
-sub configure {
- my ($urpm, %options) = @_;
-
- $urpm->clean;
-
- $options{parallel} && $options{usedistrib} and $urpm->{fatal}(1, N("Can't use parallel mode with use-distrib mode"));
-
- if ($options{parallel}) {
- require urpm::parallel;
- urpm::parallel::configure($urpm, $options{parallel});
-
- if (!$options{media} && $urpm->{parallel_handler}{media}) {
- $options{media} = $urpm->{parallel_handler}{media};
- $urpm->{log}->(N("using associated media for parallel mode: %s", $options{media}));
- }
- } else {
- #- nb: can't have both parallel and root
- $urpm->{root} = $options{root};
- }
-
- $urpm->{root} && ! -c "$urpm->{root}/dev/null"
- and $urpm->{error}(N("there doesn't seem to be devices in the chroot in \"%s\"", $urpm->{root}));
-
- if ($options{synthesis}) {
- if ($options{synthesis} ne 'none') {
- #- synthesis take precedence over media, update options.
- $options{media} || $options{excludemedia} || $options{sortmedia} || $options{update} || $options{usedistrib} || $options{parallel} and
- $urpm->{fatal}(1, N("--synthesis cannot be used with --media, --excludemedia, --sortmedia, --update, --use-distrib or --parallel"));
- $urpm->parse_synthesis($options{synthesis});
- #- synthesis disables the split of transaction (too risky and not useful).
- $urpm->{options}{'split-length'} = 0;
- }
- } else {
- if ($options{usedistrib}) {
- $urpm->{media} = [];
- $urpm->add_distrib_media("Virtual", $options{usedistrib}, %options, 'virtual' => 1);
- } else {
- $urpm->read_config($options{nocheck_access});
- if (!$options{media} && $urpm->{options}{'default-media'}) {
- $options{media} = $urpm->{options}{'default-media'};
- }
- }
- if ($options{media}) {
- delete $_->{modified} foreach @{$urpm->{media} || []};
- $urpm->select_media(split /,/, $options{media});
- foreach (grep { !$_->{modified} } @{$urpm->{media} || []}) {
- #- this is only a local ignore that will not be saved.
- $_->{tempignore} = $_->{ignore} = 1;
- }
- }
- if ($options{searchmedia}) {
- $urpm->select_media($options{searchmedia}); #- Ensure this media has been selected
- if (my $medium = name2medium($urpm, $options{searchmedia})) {
- $medium->{ignore} and $urpm->{fatal}("searchmedia is ignored");
- $medium->{searchmedia} = 1;
- }
- }
- if ($options{excludemedia}) {
- delete $_->{modified} foreach @{$urpm->{media} || []};
- foreach (select_media_by_name($urpm, [ split /,/, $options{excludemedia} ])) {
- $_->{modified} = 1;
- #- this is only a local ignore that will not be saved.
- $_->{tempignore} = $_->{ignore} = 1;
- }
- }
- if ($options{sortmedia}) {
- my @sorted_media = map { select_media_by_name($urpm, [$_]) } split(/,/, $options{sortmedia});
- my @remaining = difference2($urpm->{media}, \@sorted_media);
- $urpm->{media} = [ @sorted_media, @remaining ];
- }
- _parse_media($urpm, 0, \%options) if !$options{nodepslist};
- }
- #- determine package to withdraw (from skip.list file) only if something should be withdrawn.
- if (!$options{nodepslist}) {
- _compute_flags_for_skiplist($urpm, $options{cmdline_skiplist}) if !$options{no_skiplist};
- _compute_flags_for_instlist($urpm);
- }
-}
-
-sub _parse_media {
- my ($urpm, $second_pass, $options) = @_;
-
- foreach (grep { !$_->{ignore} && (!$options->{update} || $_->{update}) } @{$urpm->{media} || []}) {
- our $currentmedia = $_; #- hack for urpmf
- delete @$_{qw(start end)};
- if ($_->{virtual}) {
- if (file_from_file_url($_->{url})) {
- if ($_->{synthesis}) {
- _parse_synthesis($urpm, $_,
- hdlist_or_synthesis_for_virtual_medium($_), $options->{callback});
- } else {
- #- we'll need a second pass
- $second_pass++;
- _parse_hdlist($urpm, $_,
- hdlist_or_synthesis_for_virtual_medium($_),
- $second_pass > 1 ? undef : $options->{callback},
- );
- }
- } else {
- $urpm->{error}(N("virtual medium \"%s\" is not local, medium ignored", $_->{name}));
- $_->{ignore} = 1;
- }
- } else {
- if ($options->{need_hdlist} && file_size(statedir_hdlist($urpm, $_)) > 32) {
- _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_), $options->{callback});
- } else {
- if (!_parse_synthesis($urpm, $_,
- statedir_synthesis($urpm, $_),
- $options->{callback})) {
- _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_), $options->{callback});
- }
- }
- }
- unless ($_->{ignore}) {
- _check_after_reading_hdlist_or_synthesis($urpm, $_);
- }
- unless ($_->{ignore}) {
- if ($_->{searchmedia}) {
- ($urpm->{searchmedia}{start}, $urpm->{searchmedia}{end}) = ($_->{start}, $_->{end});
- $urpm->{log}(N("Search start: %s end: %s",
- $urpm->{searchmedia}{start}, $urpm->{searchmedia}{end}));
- delete $_->{searchmedia};
- }
- }
- }
-
- if ($second_pass == 1) {
- require URPM::Build;
- $urpm->{log}(N("performing second pass to compute dependencies\n"));
- $urpm->unresolved_provides_clean;
- _parse_media($urpm, 1, $options);
- }
-}
-
-sub _compute_flags_for_skiplist {
- my ($urpm, $cmdline_skiplist) = @_;
- my %uniq;
- $urpm->compute_flags(
- get_packages_list($urpm->{skiplist}, $cmdline_skiplist),
- skip => 1,
- callback => sub {
- my ($urpm, $pkg) = @_;
- $pkg->is_arch_compat && ! exists $uniq{$pkg->fullname} or return;
- $uniq{$pkg->fullname} = undef;
- $urpm->{log}(N("skipping package %s", scalar($pkg->fullname)));
- },
- );
-}
-
-sub _compute_flags_for_instlist {
- my ($urpm) = @_;
-
- my %uniq;
- $urpm->compute_flags(
- get_packages_list($urpm->{instlist}),
- disable_obsolete => 1,
- callback => sub {
- my ($urpm, $pkg) = @_;
- $pkg->is_arch_compat && ! exists $uniq{$pkg->fullname} or return;
- $uniq{$pkg->fullname} = undef;
- $urpm->{log}(N("would install instead of upgrade package %s", scalar($pkg->fullname)));
- },
- );
-
-}
-
-#- add a new medium, sync the config file accordingly.
-#- returns the new medium's name. (might be different from the requested
-#- name if index_name was specified)
-#- options: ignore, index_name, nolock, update, virtual
-sub add_medium {
- my ($urpm, $name, $url, $with_hdlist, %options) = @_;
-
- #- make sure configuration has been read.
- $urpm->{media} or die "caller should have used ->read_config or ->configure first";
- urpm::sys::lock_urpmi_db($urpm, 'exclusive') if !$options{nolock};
-
- #- if a medium with that name has already been found, we have to exit now
- my $medium;
- if (defined $options{index_name}) {
- my $i = $options{index_name};
- do {
- ++$i;
- $medium = name2medium($urpm, $name . $i);
- } while $medium;
- $name .= $i;
- } else {
- $medium = name2medium($urpm, $name);
- }
- $medium and $urpm->{fatal}(5, N("medium \"%s\" already exists", $medium->{name}));
-
- $url =~ s,/*$,,; #- clear URLs for trailing /es.
-
- #- creating the medium info.
- $medium = { name => $name, url => $url, update => $options{update}, modified => 1, ignore => $options{ignore} };
- if ($options{virtual}) {
- file_from_file_url($url) or $urpm->{fatal}(1, N("virtual medium needs to be local"));
- $medium->{virtual} = 1;
- } else {
- $medium->{hdlist} = "hdlist.$name.cz";
- $urpm->probe_removable_device($medium);
- }
-
- #- local media have priority, other are added at the end.
- if (file_from_file_url($url)) {
- $medium->{priority} = 0.5;
- } else {
- $medium->{priority} = 1 + @{$urpm->{media}};
- }
-
- $with_hdlist and $medium->{with_hdlist} = $with_hdlist;
-
- #- create an entry in media list.
- push @{$urpm->{media}}, $medium;
-
- $urpm->{log}(N("added medium %s", $name));
- $urpm->{modified} = 1;
-
- $options{nolock} or urpm::sys::unlock_urpmi_db($urpm);
- $name;
-}
-
-#- add distribution media, according to url given.
-#- returns the list of names of added media.
-#- options :
-#- - initial_number : when adding several numbered media, start with this number
-#- - probe_with : if eq 'synthesis', use synthesis instead of hdlists
-#- - ask_media : callback to know whether each media should be added
-#- other options are passed to add_medium(): ignore, nolock, virtual
-sub add_distrib_media {
- my ($urpm, $name, $url, %options) = @_;
-
- #- make sure configuration has been read.
- $urpm->{media} or die "caller should have used ->read_config or ->configure first";
-
- my $distribconf;
-
- if (my $dir = file_from_local_url($url)) {
- $urpm->try_mounting($dir)
- or $urpm->{error}(N("unable to mount the distribution medium")), return ();
- $distribconf = MDV::Distribconf->new($dir, undef);
- $distribconf->load
- or $urpm->{error}(N("this location doesn't seem to contain any distribution")), return ();
- } else {
- unlink "$urpm->{cachedir}/partial/media.cfg";
-
- $distribconf = MDV::Distribconf->new($url, undef);
- $distribconf->settree('mandriva');
-
- $urpm->{log}(N("retrieving media.cfg file..."));
- if (urpm::download::sync($urpm, undef,
- [ reduce_pathname($distribconf->getfullpath(undef, 'infodir') . '/media.cfg') ],
- quiet => 1)) {
- $urpm->{log}(N("...retrieving done"));
- $distribconf->parse_mediacfg("$urpm->{cachedir}/partial/media.cfg")
- or $urpm->{error}(N("unable to parse media.cfg")), return();
- } else {
- $urpm->{error}(N("...retrieving failed: %s", $@));
- $urpm->{error}(N("unable to access the distribution medium (no media.cfg file found)"));
- return ();
- }
- }
-
- #- cosmetic update of name if it contains spaces.
- $name =~ /\s/ and $name .= ' ';
-
- my @newnames;
- #- at this point, we have found a media.cfg file, so parse it
- #- and create all necessary media according to it.
- my $medium = $options{initial_number} || 1;
-
- foreach my $media ($distribconf->listmedia) {
- my $skip = 0;
- # if one of those values is set, by default, we skip adding the media
- foreach (qw(noauto)) {
- $distribconf->getvalue($media, $_) and do {
- $skip = 1;
- last;
- };
- }
- if ($options{ask_media}) {
- if ($options{ask_media}->(
- $distribconf->getvalue($media, 'name'),
- !$skip,
- )) {
- $skip = 0;
- } else {
- $skip = 1;
- }
- }
- $skip and next;
-
- my $media_name = $distribconf->getvalue($media, 'name') || '';
- my $is_update_media = $distribconf->getvalue($media, 'updates_for');
-
- push @newnames, $urpm->add_medium(
- $name ? "$media_name ($name$medium)" : $media_name,
- reduce_pathname($distribconf->getfullpath($media, 'path')),
- offset_pathname(
- $url,
- $distribconf->getpath($media, 'path'),
- ) . '/' . $distribconf->getpath($media, $options{probe_with} eq 'synthesis' ? 'synthesis' : 'hdlist'),
- index_name => $name ? undef : 0,
- %options,
- # the following override %options
- update => $is_update_media ? 1 : undef,
- );
- ++$medium;
- }
- return @newnames;
-}
-
-#- deprecated, use select_media_by_name instead
-sub select_media {
- my $urpm = shift;
- my $options = {};
- if (ref $_[0]) { $options = shift }
- foreach (select_media_by_name($urpm, [ @_ ], $options->{strict_match})) {
- #- select medium by setting the modified flag, do not check ignore.
- $_->{modified} = 1;
- }
-}
-
-sub select_media_by_name {
- my ($urpm, $names, $b_strict_match) = @_;
-
- my %wanted = map { $_ => 1 } @$names;
-
- #- first the exact matches
- my @l = grep { delete $wanted{$_->{name}} } @{$urpm->{media}};
-
- #- check if some arguments don't correspond to the medium name.
- #- in such case, try to find the unique medium (or list candidate
- #- media found).
- foreach (keys %wanted) {
- my $q = quotemeta;
- my (@found, @foundi);
- my $regex = $b_strict_match ? qr/^$q$/ : qr/$q/;
- my $regexi = $b_strict_match ? qr/^$q$/i : qr/$q/i;
- foreach my $medium (@{$urpm->{media}}) {
- $medium->{name} =~ $regex and push @found, $medium;
- $medium->{name} =~ $regexi and push @foundi, $medium;
- }
- @found = @foundi if !@found;
-
- if (@found == 0) {
- $urpm->{error}(N("trying to select nonexistent medium \"%s\"", $_));
- } else {
- if (@found > 1) {
- $urpm->{log}(N("selecting multiple media: %s", join(", ", map { qq("$_->{name}") } @found)));
- }
- #- changed behaviour to select all occurences by default.
- push @l, @found;
- }
- }
- @l;
-}
-
-#- deprecated, use remove_media instead
-sub remove_selected_media {
- my ($urpm) = @_;
-
- remove_media($urpm, [ grep { $_->{modified} } @{$urpm->{media}} ]);
-}
-
-sub remove_media {
- my ($urpm, $to_remove) = @_;
-
- foreach my $medium (@$to_remove) {
- $urpm->{log}(N("removing medium \"%s\"", $medium->{name}));
-
- #- mark to re-write configuration.
- $urpm->{modified} = 1;
-
- #- remove files associated with this medium.
- unlink grep { $_ } map { $_->($urpm, $medium) } \&statedir_hdlist, \&statedir_list, \&statedir_synthesis, \&statedir_descriptions, \&statedir_names;
-
- #- remove proxy settings for this media
- urpm::download::remove_proxy_media($medium->{name});
- }
-
- $urpm->{media} = [ difference2($urpm->{media}, $to_remove) ];
-}
-
-#- return list of synthesis or hdlist reference to probe.
-sub _probe_with_try_list {
- my ($probe_with) = @_;
-
- my @probe_synthesis = (
- "media_info/synthesis.hdlist.cz",
- "synthesis.hdlist.cz",
- );
- my @probe_hdlist = (
- "media_info/hdlist.cz",
- "hdlist.cz",
- );
- $probe_with =~ /synthesis/
- ? (@probe_synthesis, @probe_hdlist)
- : (@probe_hdlist, @probe_synthesis);
-}
-
-sub may_reconfig_urpmi {
- my ($urpm, $medium) = @_;
-
- my $f;
- if (my $dir = file_from_file_url($medium->{url})) {
- $f = reduce_pathname("$dir/reconfig.urpmi");
- } else {
- unlink($f = "$urpm->{cachedir}/partial/reconfig.urpmi");
- urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/reconfig.urpmi") ], quiet => 1);
- }
- if (-s $f) {
- reconfig_urpmi($urpm, $f, $medium->{name});
- }
- unlink $f if !file_from_file_url($medium->{url});
-}
-
-#- read a reconfiguration file for urpmi, and reconfigure media accordingly
-#- $rfile is the reconfiguration file (local), $name is the media name
-#-
-#- the format is similar to the RewriteRule of mod_rewrite, so:
-#- PATTERN REPLACEMENT [FLAG]
-#- where FLAG can be L or N
-#-
-#- example of reconfig.urpmi:
-#- # this is an urpmi reconfiguration file
-#- /cooker /cooker/$ARCH
-sub reconfig_urpmi {
- my ($urpm, $rfile, $name) = @_;
- -r $rfile or return;
-
- $urpm->{log}(N("reconfiguring urpmi for media \"%s\"", $name));
-
- my ($magic, @lines) = cat_($rfile);
- #- the first line of reconfig.urpmi must be magic, to be sure it's not an error file
- $magic =~ /^# this is an urpmi reconfiguration file/ or return undef;
-
- my @replacements;
- foreach (@lines) {
- chomp;
- s/^\s*//; s/#.*$//; s/\s*$//;
- $_ or next;
- my ($p, $r, $f) = split /\s+/, $_, 3;
- push @replacements, [ quotemeta $p, $r, $f || 1 ];
- }
-
- my $reconfigured = 0;
- my @reconfigurable = qw(url with_hdlist);
-
- my $medium = name2medium($urpm, $name) or return;
- my %orig = %$medium;
-
- URLS:
- foreach my $k (@reconfigurable) {
- foreach my $r (@replacements) {
- if ($medium->{$k} =~ s/$r->[0]/$r->[1]/) {
- $reconfigured = 1;
- #- Flags stolen from mod_rewrite: L(ast), N(ext)
- if ($r->[2] =~ /L/) {
- last;
- } elsif ($r->[2] =~ /N/) { #- dangerous option
- redo URLS;
- }
- }
- }
- #- check that the new url exists before committing changes (local mirrors)
- my $file = file_from_local_url($medium->{$k});
- if ($file && !-e $file) {
- %$medium = %orig;
- $reconfigured = 0;
- $urpm->{log}(N("...reconfiguration failed"));
- return;
- }
- }
-
- if ($reconfigured) {
- $urpm->{log}(N("reconfiguration done"));
- $urpm->write_config;
- }
- $reconfigured;
-}
-
-sub _guess_hdlist_suffix {
- my ($url) = @_;
- $url =~ m!\bmedia/(\w+)/*\Z! && $1;
-}
-
-sub _hdlist_suffix {
- my ($medium) = @_;
- $medium->{with_hdlist} =~ /hdlist(.*?)(?:\.src)?\.cz$/ ? $1 : '';
-}
-
-sub _parse_hdlist_or_synthesis__when_not_modified {
- my ($urpm, $medium) = @_;
-
- delete @$medium{qw(start end)};
- if ($medium->{virtual}) {
- if (file_from_file_url($medium->{url})) {
- _parse_maybe_hdlist_or_synthesis($urpm, $medium, hdlist_or_synthesis_for_virtual_medium($medium));
- } else {
- $urpm->{error}(N("virtual medium \"%s\" is not local, medium ignored", $medium->{name}));
- $medium->{ignore} = 1;
- }
- } else {
- if (!_parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium))) {
- _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
- }
- }
- unless ($medium->{ignore}) {
- _check_after_reading_hdlist_or_synthesis($urpm, $medium);
- }
-}
-
-sub _parse_hdlist_or_synthesis__virtual {
- my ($urpm, $medium) = @_;
-
- if (my $hdlist_or = hdlist_or_synthesis_for_virtual_medium($medium)) {
- delete $medium->{modified};
- $medium->{really_modified} = 1;
- $urpm->{md5sum_modified} = 1;
- _parse_maybe_hdlist_or_synthesis($urpm, $medium, $hdlist_or);
- _check_after_reading_hdlist_or_synthesis($urpm, $medium);
- } else {
- $urpm->{error}(N("virtual medium \"%s\" should have valid source hdlist or synthesis, medium ignored",
- $medium->{name}));
- $medium->{ignore} = 1;
- }
-}
-
-#- names.<media_name> is used by external progs (namely for bash-completion)
-sub generate_medium_names {
- my ($urpm, $medium) = @_;
-
- unlink statedir_names($urpm, $medium);
-
- if (my $fh = urpm::sys::open_safe($urpm, ">", statedir_names($urpm, $medium))) {
- foreach ($medium->{start} .. $medium->{end}) {
- if (defined $urpm->{depslist}[$_]) {
- print $fh $urpm->{depslist}[$_]->name . "\n";
- } else {
- $urpm->{error}(N("Error generating names file: dependency %d not found", $_));
- }
- }
- } else {
- $urpm->{error}(N("Error generating names file: Can't write to file (%s)", $!));
- }
-}
-
-
-sub _read_existing_synthesis_and_hdlist_if_same_time_and_msize {
- my ($urpm, $medium, $basename) = @_;
-
- same_size_and_mtime("$urpm->{cachedir}/partial/$basename",
- statedir_hdlist($urpm, $medium)) or return;
-
- unlink "$urpm->{cachedir}/partial/$basename";
-
- _read_existing_synthesis_and_hdlist($urpm, $medium);
-
- 1;
-}
-
-sub _read_existing_synthesis_and_hdlist_if_same_md5sum {
- my ($urpm, $medium, $retrieved_md5sum) = @_;
-
- #- if an existing hdlist or synthesis file has the same md5sum, we assume the
- #- files are the same.
- #- if local md5sum is the same as distant md5sum, this means there is no need to
- #- download hdlist or synthesis file again.
- $retrieved_md5sum && $medium->{md5sum} eq $retrieved_md5sum or return;
-
- unlink "$urpm->{cachedir}/partial/" . basename($medium->{with_hdlist});
-
- _read_existing_synthesis_and_hdlist($urpm, $medium);
-
- 1;
-}
-
-sub _read_existing_synthesis_and_hdlist {
- my ($urpm, $medium) = @_;
-
- $urpm->{log}(N("medium \"%s\" is up-to-date", $medium->{name}));
-
- #- the medium is now considered not modified.
- $medium->{modified} = 0;
- #- XXX we could link the new hdlist to the old one.
- #- (However links need to be managed. see bug #12391.)
- #- as previously done, just read synthesis file here, this is enough.
- if (!_parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium))) {
- _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
- _check_after_reading_hdlist_or_synthesis($urpm, $medium);
- }
-
- 1;
-}
-
-sub _parse_hdlist {
- my ($urpm, $medium, $hdlist_file, $o_callback) = @_;
-
- $urpm->{log}(N("examining hdlist file [%s]", $hdlist_file));
- ($medium->{start}, $medium->{end}) =
- $urpm->parse_hdlist($hdlist_file, packing => 1, $o_callback ? (callback => $o_callback) : @{[]});
-}
-
-sub _parse_synthesis {
- my ($urpm, $medium, $synthesis_file, $o_callback) = @_;
-
- $urpm->{log}(N("examining synthesis file [%s]", $synthesis_file));
- ($medium->{start}, $medium->{end}) =
- $urpm->parse_synthesis($synthesis_file, $o_callback ? (callback => $o_callback) : @{[]});
-}
-sub _parse_maybe_hdlist_or_synthesis {
- my ($urpm, $medium, $hdlist_or) = @_;
-
- if ($medium->{synthesis}) {
- if (_parse_synthesis($urpm, $medium, $hdlist_or)) {
- $medium->{synthesis} = 1;
- } elsif (_parse_hdlist($urpm, $medium, $hdlist_or)) {
- delete $medium->{synthesis};
- } else {
- return;
- }
- } else {
- if (_parse_hdlist($urpm, $medium, $hdlist_or)) {
- delete $medium->{synthesis};
- } elsif (_parse_synthesis($urpm, $medium, $hdlist_or)) {
- $medium->{synthesis} = 1;
- } else {
- return;
- }
- }
- 1;
-}
-
-sub _build_hdlist_using_rpm_headers {
- my ($urpm, $medium) = @_;
-
- $urpm->{log}(N("building hdlist [%s]", statedir_hdlist($urpm, $medium)));
- #- finish building operation of hdlist.
- $urpm->build_hdlist(start => $medium->{start},
- end => $medium->{end},
- dir => "$urpm->{cachedir}/headers",
- hdlist => statedir_hdlist($urpm, $medium),
- );
-}
-
-sub _build_synthesis {
- my ($urpm, $medium) = @_;
-
- eval { $urpm->build_synthesis(
- start => $medium->{start},
- end => $medium->{end},
- synthesis => statedir_synthesis($urpm, $medium),
- ) };
- if ($@) {
- $urpm->{error}(N("Unable to build synthesis file for medium \"%s\". Your hdlist file may be corrupted.", $medium->{name}));
- $urpm->{error}($@);
- unlink statedir_synthesis($urpm, $medium);
- } else {
- $urpm->{log}(N("built hdlist synthesis file for medium \"%s\"", $medium->{name}));
- }
- #- keep in mind we have a modified database, sure at this point.
- $urpm->{md5sum_modified} = 1;
-}
-
-sub is_valid_medium {
- my ($medium) = @_;
- defined $medium->{start} && defined $medium->{end};
-}
-
-sub _check_after_reading_hdlist_or_synthesis {
- my ($urpm, $medium) = @_;
-
- if (!is_valid_medium($medium)) {
- $urpm->{error}(N("problem reading hdlist or synthesis file of medium \"%s\"", $medium->{name}));
- $medium->{ignore} = 1;
- }
-}
sub db_open_or_die {
my ($urpm, $root, $b_force) = @_;
@@ -1174,37 +79,6 @@ sub db_open_or_die {
$db;
}
-sub _get_list_or_pubkey__local {
- my ($urpm, $medium, $name) = @_;
-
- my $path = _hdlist_dir($medium) . "/$name" . _hdlist_suffix($medium);
- -e $path or $path = file_from_local_url($medium->{url}) . "/$name";
- if (-e $path) {
- copy_and_own($path, "$urpm->{cachedir}/partial/$name")
- or $urpm->{error}(N("...copying failed")), return;
- }
- 1;
-}
-
-sub _get_list_or_pubkey__remote {
- my ($urpm, $medium, $name) = @_;
-
- my $found;
- if (_hdlist_suffix($medium)) {
- my $local_name = $name . _hdlist_suffix($medium);
-
- if (urpm::download::sync($urpm, $medium, [_hdlist_dir($medium) . "/$local_name"],
- quiet => 1)) {
- rename("$urpm->{cachedir}/partial/$local_name", "$urpm->{cachedir}/partial/$name");
- $found = 1;
- }
- }
- if (!$found) {
- urpm::download::sync($urpm, $medium, [reduce_pathname("$medium->{url}/$name")], quiet => 1)
- or unlink "$urpm->{cachedir}/partial/$name";
- }
-}
-
sub clean_dir {
my ($dir) = @_;
@@ -1213,542 +87,6 @@ sub clean_dir {
mkdir $dir, 0755;
}
-sub get_descriptions_local {
- my ($urpm, $medium) = @_;
-
- unlink statedir_descriptions($urpm, $medium);
-
- my $dir = file_from_local_url($medium->{url});
- my $description_file = "$dir/media_info/descriptions"; #- new default location
- -e $description_file or $description_file = "$dir/../descriptions";
- -e $description_file or return;
-
- $urpm->{log}(N("copying description file of \"%s\"...", $medium->{name}));
- if (copy_and_own($description_file, statedir_descriptions($urpm, $medium))) {
- $urpm->{log}(N("...copying done"));
- } else {
- $urpm->{error}(N("...copying failed"));
- $medium->{ignore} = 1;
- }
-}
-sub get_descriptions_remote {
- my ($urpm, $medium) = @_;
-
- unlink "$urpm->{cachedir}/partial/descriptions";
-
- if (-e statedir_descriptions($urpm, $medium)) {
- urpm::util::move(statedir_descriptions($urpm, $medium), "$urpm->{cachedir}/partial/descriptions");
- }
- urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/media_info/descriptions") ], quiet => 1)
- or #- try older location
- urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/../descriptions") ], quiet => 1);
-
- if (-e "$urpm->{cachedir}/partial/descriptions") {
- urpm::util::move("$urpm->{cachedir}/partial/descriptions", statedir_descriptions($urpm, $medium));
- }
-}
-sub get_hdlist_or_synthesis__local {
- my ($urpm, $medium, $callback) = @_;
-
- unlink cachedir_hdlist($urpm, $medium);
- $urpm->{log}(N("copying source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
- $callback and $callback->('copy', $medium->{name});
- if (copy_and_own(_url_with_hdlist($medium), cachedir_hdlist($urpm, $medium))) {
- $callback and $callback->('done', $medium->{name});
- $urpm->{log}(N("...copying done"));
- if (file_size(cachedir_hdlist($urpm, $medium)) < 20) {
- $urpm->{error}(N("copy of [%s] failed (file is suspiciously small)", cachedir_hdlist($urpm, $medium)));
- 0;
- } else {
- 1;
- }
- } else {
- $callback and $callback->('failed', $medium->{name});
- #- force error, reported afterwards
- unlink cachedir_hdlist($urpm, $medium);
- 0;
- }
-}
-
-sub get_hdlist_or_synthesis_and_check_md5sum__local {
- my ($urpm, $medium, $retrieved_md5sum, $callback) = @_;
-
- get_hdlist_or_synthesis__local($urpm, $medium, $callback) or return;
-
- #- keep checking md5sum of file just copied ! (especially on nfs or removable device).
- if ($retrieved_md5sum) {
- $urpm->{log}(N("computing md5sum of copied source hdlist (or synthesis)"));
- urpm::md5sum::compute(cachedir_hdlist($urpm, $medium)) eq $retrieved_md5sum or
- $urpm->{error}(N("copy of [%s] failed (md5sum mismatch)", _url_with_hdlist($medium))), return;
- }
-
- 1;
-}
-
-sub _read_rpms_from_dir {
- my ($urpm, $medium, $second_pass, $clean_cache) = @_;
-
- my $dir = file_from_local_url($medium->{url});
-
- $medium->{rpm_files} = [ glob("$dir/*.rpm") ];
-
- #- check files contains something good!
- if (!@{$medium->{rpm_files}}) {
- $urpm->{error}(N("no rpm files found from [%s]", $dir));
- $medium->{ignore} = 1;
- return;
- }
-
- #- we need to rebuild from rpm files the hdlist.
-
- $urpm->{log}(N("reading rpm files from [%s]", $dir));
- my @unresolved_before = grep {
- ! defined $urpm->{provides}{$_};
- } keys %{$urpm->{provides} || {}};
- $medium->{start} = @{$urpm->{depslist}};
-
- eval {
- $medium->{headers} = [ $urpm->parse_rpms_build_headers(
- dir => "$urpm->{cachedir}/headers",
- rpms => $medium->{rpm_files},
- clean => $$clean_cache,
- packing => 1,
- ) ];
- };
- if ($@) {
- $urpm->{error}(N("unable to read rpm files from [%s]: %s", $dir, $@));
- delete $medium->{headers}; #- do not propagate these.
- return;
- }
-
- $medium->{end} = $#{$urpm->{depslist}};
- if ($medium->{start} > $medium->{end}) {
- #- an error occured (provided there are files in input.)
- delete $medium->{start};
- delete $medium->{end};
- $urpm->{fatal}(9, N("no rpms read"));
- }
-
- #- make sure the headers will not be removed for another media.
- $$clean_cache = 0;
- my @unresolved = grep {
- ! defined $urpm->{provides}{$_};
- } keys %{$urpm->{provides} || {}};
- @unresolved_before == @unresolved or $$second_pass = 1;
-
- delete $medium->{synthesis}; #- when building hdlist by ourself, drop synthesis property.
- 1;
-}
-
-#- options: callback, force, force_building_hdlist, nomd5sum, nopubkey, probe_with
-sub _update_medium__parse_if_unmodified__local {
- my ($urpm, $medium, $second_pass, $clean_cache, $options) = @_;
-
- my $dir = file_from_local_url($medium->{url});
-
- if (!-d $dir) {
- #- the directory given does not exist and may be accessible
- #- by mounting some other directory. Try to figure it out and mount
- #- everything that might be necessary.
- $urpm->try_mounting(
- !$options->{force_building_hdlist} && $medium->{with_hdlist}
- ? _hdlist_dir($medium) : $dir,
- #- in case of an iso image, pass its name
- is_iso($medium->{removable}) && $medium->{removable},
- ) or $urpm->{error}(N("unable to access medium \"%s\",
-this could happen if you mounted manually the directory when creating the medium.", $medium->{name})), return 'unmodified';
- }
-
- #- try to probe for possible with_hdlist parameter, unless
- #- it is already defined (and valid).
- if ($options->{probe_with} && !$medium->{with_hdlist}) {
- foreach (_probe_with_try_list($options->{probe_with})) {
- -e "$dir/$_" or next;
- if (file_size("$dir/$_") >= 20) {
- $medium->{with_hdlist} = $_;
- last;
- } else {
- $urpm->{error}(N("invalid hdlist file %s for medium \"%s\"", "$dir/$_", $medium->{name}));
- return;
- }
- }
- }
-
- if ($medium->{virtual}) {
- #- syncing a virtual medium is very simple, just try to read the file in order to
- #- determine its type, once a with_hdlist has been found (but is mandatory).
- _parse_hdlist_or_synthesis__virtual($urpm, $medium);
- }
-
- #- examine if a distant MD5SUM file is available.
- #- this will only be done if $with_hdlist is not empty in order to use
- #- an existing hdlist or synthesis file, and to check if download was good.
- #- if no MD5SUM is available, do it as before...
- #- we can assume at this point a basename is existing, but it needs
- #- to be checked for being valid, nothing can be deduced if no MD5SUM
- #- file is present.
-
- unless ($medium->{virtual}) {
- if ($medium->{with_hdlist}) {
- my ($retrieved_md5sum);
-
- if (!$options->{nomd5sum} && file_size(_hdlist_dir($medium) . '/MD5SUM') > 32) {
- $retrieved_md5sum = urpm::md5sum::from_MD5SUM__or_warn($urpm, _hdlist_dir($medium) . '/MD5SUM', basename($medium->{with_hdlist}));
- if (urpm::md5sum::on_local_medium($urpm, $medium, $options->{force})) {
- _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $retrieved_md5sum)
- and return 'unmodified';
- }
- }
-
- #- if the source hdlist is present and we are not forcing using rpm files
- if (!$options->{force_building_hdlist} && -e _url_with_hdlist($medium)) {
- if (get_hdlist_or_synthesis_and_check_md5sum__local($urpm, $medium, $retrieved_md5sum, $options->{callback})) {
-
- $medium->{md5sum} = $retrieved_md5sum if $retrieved_md5sum;
-
- #- check if the files are equal... and no force copy...
- if (!$options->{force}) {
- _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium, $medium->{hdlist})
- and return 'unmodified';
- }
- } else {
- #- if copying hdlist has failed, try to build it directly.
- if ($urpm->{options}{'build-hdlist-on-error'}) {
- $options->{force_building_hdlist} = 1;
- } else {
- $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
- $medium->{ignore} = 1;
- return;
- }
- }
- }
- } else {
- #- no available hdlist/synthesis, try to build it from rpms
- $options->{force_building_hdlist} = 1;
- }
-
- if ($options->{force_building_hdlist}) {
- _read_rpms_from_dir($urpm, $medium, $second_pass, $clean_cache) or return;
- }
- }
-
- 1;
-}
-
-#- options: callback, force, nomd5sum, nopubkey, probe_with, quiet
-sub _update_medium__parse_if_unmodified__remote {
- my ($urpm, $medium, $options) = @_;
- my ($retrieved_md5sum, $basename);
-
- #- examine if a distant MD5SUM file is available.
- #- this will only be done if $with_hdlist is not empty in order to use
- #- an existing hdlist or synthesis file, and to check if download was good.
- #- if no MD5SUM is available, do it as before...
- if ($medium->{with_hdlist}) {
- #- we can assume at this point a basename is existing, but it needs
- #- to be checked for being valid, nothing can be deduced if no MD5SUM
- #- file is present.
- $basename = basename($medium->{with_hdlist});
-
- unlink "$urpm->{cachedir}/partial/MD5SUM";
- if (!$options->{nomd5sum} &&
- urpm::download::sync($urpm, $medium,
- [ reduce_pathname(_hdlist_dir($medium) . '/MD5SUM') ],
- quiet => 1) && file_size("$urpm->{cachedir}/partial/MD5SUM") > 32) {
- if (urpm::md5sum::on_local_medium($urpm, $medium, $options->{force} >= 2)) {
- $retrieved_md5sum = urpm::md5sum::from_MD5SUM__or_warn($urpm, "$urpm->{cachedir}/partial/MD5SUM", $basename);
- _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $retrieved_md5sum)
- and return 'unmodified';
- }
- } else {
- #- at this point, we don't if a basename exists and is valid, let probe it later.
- $basename = undef;
- }
- }
-
- #- try to probe for possible with_hdlist parameter, unless
- #- it is already defined (and valid).
- $urpm->{log}(N("retrieving source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
- $options->{callback} and $options->{callback}('retrieve', $medium->{name});
- if ($options->{probe_with} && !$medium->{with_hdlist}) {
- foreach my $with_hdlist (_probe_with_try_list($options->{probe_with})) {
- $basename = basename($with_hdlist) or next;
- $options->{force} and unlink "$urpm->{cachedir}/partial/$basename";
- if (urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/$with_hdlist") ],
- quiet => $options->{quiet}, callback => $options->{callback}) && file_size("$urpm->{cachedir}/partial/$basename") >= 20) {
- $urpm->{log}(N("...retrieving done"));
- $medium->{with_hdlist} = $with_hdlist;
- $urpm->{log}(N("found probed hdlist (or synthesis) as %s", $medium->{with_hdlist}));
- last; #- found a suitable with_hdlist in the list above.
- }
- }
- } else {
- $basename = basename($medium->{with_hdlist});
-
- if ($options->{force}) {
- unlink "$urpm->{cachedir}/partial/$basename";
- } else {
- #- try to sync (copy if needed) local copy after restored the previous one.
- #- this is useful for rsync (?)
- if (-e statedir_hdlist_or_synthesis($urpm, $medium)) {
- copy_and_own(
- statedir_hdlist_or_synthesis($urpm, $medium),
- "$urpm->{cachedir}/partial/$basename",
- ) or $urpm->{error}(N("...copying failed")), return;
- }
- }
- if (urpm::download::sync($urpm, $medium, [ _url_with_hdlist($medium) ],
- quiet => $options->{quiet}, callback => $options->{callback})) {
- $urpm->{log}(N("...retrieving done"));
- } else {
- $urpm->{error}(N("...retrieving failed: %s", $@));
- unlink "$urpm->{cachedir}/partial/$basename";
- }
- }
-
- #- check downloaded file has right signature.
- if (file_size("$urpm->{cachedir}/partial/$basename") >= 20 && $retrieved_md5sum) {
- $urpm->{log}(N("computing md5sum of retrieved source hdlist (or synthesis)"));
- unless (urpm::md5sum::compute("$urpm->{cachedir}/partial/$basename") eq $retrieved_md5sum) {
- $urpm->{error}(N("...retrieving failed: md5sum mismatch"));
- unlink "$urpm->{cachedir}/partial/$basename";
- }
- }
-
- if (file_size("$urpm->{cachedir}/partial/$basename") >= 20) {
- $options->{callback} and $options->{callback}('done', $medium->{name});
-
- unless ($options->{force}) {
- _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium, $basename)
- and return 'unmodified';
- }
-
- #- the files are different, update local copy.
- rename("$urpm->{cachedir}/partial/$basename", cachedir_hdlist($urpm, $medium));
- } else {
- $options->{callback} and $options->{callback}('failed', $medium->{name});
- $urpm->{error}(N("retrieval of source hdlist (or synthesis) failed"));
- return;
- }
- $urpm->{md5sum} = $retrieved_md5sum if $retrieved_md5sum;
- 1;
-}
-
-sub _get_pubkey_and_descriptions {
- my ($urpm, $medium, $nopubkey) = @_;
-
- my $local = file_from_local_url($medium->{url});
-
- ($local ? \&get_descriptions_local : \&get_descriptions_remote)->($urpm, $medium);
-
- #- examine if a pubkey file is available.
- if (!$nopubkey && !$medium->{'key-ids'}) {
- ($local ? \&_get_list_or_pubkey__local : \&_get_list_or_pubkey__remote)->($urpm, $medium, 'pubkey');
- }
-}
-
-sub _read_cachedir_pubkey {
- my ($urpm, $medium) = @_;
- -s "$urpm->{cachedir}/partial/pubkey" or return;
-
- $urpm->{log}(N("examining pubkey file of \"%s\"...", $medium->{name}));
-
- my %key_ids;
- $urpm->import_needed_pubkeys(
- [ $urpm->parse_armored_file("$urpm->{cachedir}/partial/pubkey") ],
- root => $urpm->{root},
- callback => sub {
- my (undef, undef, $_k, $id, $imported) = @_;
- if ($id) {
- $key_ids{$id} = undef;
- $imported and $urpm->{log}(N("...imported key %s from pubkey file of \"%s\"",
- $id, $medium->{name}));
- } else {
- $urpm->{error}(N("unable to import pubkey file of \"%s\"", $medium->{name}));
- }
- });
- if (keys(%key_ids)) {
- $medium->{'key-ids'} = join(',', keys %key_ids);
- }
-}
-
-sub _write_rpm_list {
- my ($urpm, $medium) = @_;
-
- @{$medium->{rpm_files} || []} or return;
-
- $medium->{list} ||= "list.$medium->{name}";
-
- #- write list file.
- $urpm->{log}(N("writing list file for medium \"%s\"", $medium->{name}));
- my $listfh = urpm::sys::open_safe($urpm, '>', cachedir_list($urpm, $medium)) or return;
- print $listfh basename($_), "\n" foreach @{$medium->{rpm_files}};
- 1;
-}
-
-#- options: callback, force, force_building_hdlist, nomd5sum, probe_with, quiet
-#- (from _update_medium__parse_if_unmodified__local and _update_medium__parse_if_unmodified__remote)
-sub _update_medium_first_pass {
- my ($urpm, $medium, $second_pass, $clean_cache, %options) = @_;
-
- #- we should create the associated synthesis file if it does not already exist...
- file_size(statedir_synthesis($urpm, $medium)) >= 20
- or $medium->{must_build_synthesis} = 1;
-
- unless ($medium->{modified}) {
- #- the medium is not modified, but to compute dependencies,
- #- we still need to read it and all synthesis will be written if
- #- an unresolved provides is found.
- #- to speed up the process, we only read the synthesis at the beginning.
- _parse_hdlist_or_synthesis__when_not_modified($urpm, $medium);
- return 1;
- }
-
- #- always delete a remaining list file or pubkey file in cache.
- foreach (qw(list pubkey)) {
- unlink "$urpm->{cachedir}/partial/$_";
- }
-
- #- check for a reconfig.urpmi file (if not already reconfigured)
- if (!$medium->{noreconfigure}) {
- may_reconfig_urpmi($urpm, $medium);
- }
-
- {
- my $rc =
- file_from_local_url($medium->{url})
- ? _update_medium__parse_if_unmodified__local($urpm, $medium, $second_pass, $clean_cache, \%options)
- : _update_medium__parse_if_unmodified__remote($urpm, $medium, \%options);
-
- if (!$rc || $rc eq 'unmodified') {
- return $rc;
- }
- }
-
- #- build list file according to hdlist.
- if (!$medium->{headers} && !$medium->{virtual} && file_size(cachedir_hdlist($urpm, $medium)) < 20) {
- $urpm->{error}(N("no hdlist file found for medium \"%s\"", $medium->{name}));
- return;
- }
-
- if (!$medium->{virtual}) {
- if ($medium->{headers}) {
- _write_rpm_list($urpm, $medium) or return;
- } else {
- #- read first pass hdlist or synthesis, try to open as synthesis, if file
- #- is larger than 1MB, this is probably an hdlist else a synthesis.
- #- anyway, if one tries fails, try another mode.
- $options{callback} and $options{callback}('parse', $medium->{name});
- my @unresolved_before = grep { ! defined $urpm->{provides}{$_} } keys %{$urpm->{provides} || {}};
-
- #- if it looks like a hdlist, try to parse as hdlist first
- delete $medium->{synthesis} if file_size(cachedir_hdlist($urpm, $medium)) > 262144;
- _parse_maybe_hdlist_or_synthesis($urpm, $medium, cachedir_hdlist($urpm, $medium));
-
- if (is_valid_medium($medium)) {
- $options{callback} && $options{callback}('done', $medium->{name});
- } else {
- $urpm->{error}(N("unable to parse hdlist file of \"%s\"", $medium->{name}));
- $options{callback} and $options{callback}('failed', $medium->{name});
- delete $medium->{md5sum};
-
- #- we have to read back the current synthesis file unmodified.
- if (!_parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium))) {
- $urpm->{error}(N("problem reading synthesis file of medium \"%s\"", $medium->{name}));
- $medium->{ignore} = 1;
- }
- return;
- }
- delete $medium->{list};
-
- {
- my @unresolved_after = grep { ! defined $urpm->{provides}{$_} } keys %{$urpm->{provides} || {}};
- @unresolved_before == @unresolved_after or $$second_pass = 1;
- }
- }
- }
-
- unless ($medium->{virtual}) {
- #- make sure to rebuild base files and clear medium modified state.
- $medium->{modified} = 0;
- $medium->{really_modified} = 1;
- $urpm->{md5sum_modified} = 1;
-
- #- but use newly created file.
- unlink statedir_hdlist($urpm, $medium);
- $medium->{synthesis} and unlink statedir_synthesis($urpm, $medium);
- $medium->{list} and unlink statedir_list($urpm, $medium);
- unless ($medium->{headers}) {
- unlink statedir_synthesis($urpm, $medium);
- unlink statedir_hdlist($urpm, $medium);
- urpm::util::move(cachedir_hdlist($urpm, $medium),
- statedir_hdlist_or_synthesis($urpm, $medium));
- }
- if ($medium->{list}) {
- urpm::util::move(cachedir_list($urpm, $medium), statedir_list($urpm, $medium));
- }
-
- #- and create synthesis file associated.
- $medium->{must_build_synthesis} = !$medium->{synthesis};
- }
- 1;
-}
-
-sub _update_medium_first_pass_failed {
- my ($urpm, $medium) = @_;
-
- !$medium->{virtual} or return;
-
- #- an error has occured for updating the medium, we have to remove temporary files.
- unlink(glob("$urpm->{cachedir}/partial/*"));
-}
-
-#- take care of modified medium only, or all if all have to be recomputed.
-sub _update_medium_second_pass {
- my ($urpm, $medium, $callback) = @_;
-
- $callback and $callback->('parse', $medium->{name});
-
- #- a modified medium is an invalid medium, we have to read back the previous hdlist
- #- or synthesis which has not been modified by first pass above.
-
- if ($medium->{headers} && !$medium->{modified}) {
- $urpm->{log}(N("reading headers from medium \"%s\"", $medium->{name}));
- ($medium->{start}, $medium->{end}) = $urpm->parse_headers(dir => "$urpm->{cachedir}/headers",
- headers => $medium->{headers},
- );
- } elsif ($medium->{synthesis}) {
- if ($medium->{virtual}) {
- if (file_from_file_url($medium->{url})) {
- _parse_synthesis($urpm, $medium, hdlist_or_synthesis_for_virtual_medium($medium));
- }
- } else {
- _parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium));
- }
- } else {
- _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
- $medium->{must_build_synthesis} ||= 1;
- }
-
- $callback && $callback->('done', $medium->{name});
-}
-
-sub _build_hdlist_synthesis {
- my ($urpm, $medium) = @_;
-
- if ($medium->{headers} && !$medium->{modified}) {
- _build_hdlist_using_rpm_headers($urpm, $medium);
- #- synthesis needs to be created, since the medium has been built from rpm files.
- _build_synthesis($urpm, $medium);
- } elsif ($medium->{synthesis}) {
- } else {
- #- check if the synthesis file can be built.
- if ($medium->{must_build_synthesis} && !$medium->{modified} && !$medium->{virtual}) {
- _build_synthesis($urpm, $medium);
- }
- }
-}
-
sub remove_obsolete_headers_in_cache {
my ($urpm) = @_;
my %headers;
@@ -1770,117 +108,6 @@ sub remove_obsolete_headers_in_cache {
}
}
-sub _update_media__handle_some_flags {
- my ($urpm, $forcekey, $all) = @_;
-
- foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) {
- $forcekey and delete $medium->{'key-ids'};
-
- if ($medium->{static}) {
- #- don't ever update static media
- $medium->{modified} = 0;
- } elsif ($all) {
- #- if we're rebuilding all media, mark them as modified (except removable ones)
- $medium->{modified} ||= $medium->{url} !~ m!^removable!;
- }
- }
-}
-
-#- Update the urpmi database w.r.t. the current configuration.
-#- Takes care of modifications, and tries some tricks to bypass
-#- the recomputation of base files.
-#- Recognized options :
-#- all : all medias are being rebuilt
-#- callback : UI callback
-#- forcekey : force retrieval of pubkey
-#- force : try to force rebuilding base files
-#- force_building_hdlist
-#- noclean : keep old files in the header cache directory
-#- nolock : don't lock the urpmi database
-#- nomd5sum : don't verify MD5SUM of retrieved files
-#- nopubkey : don't use rpm pubkeys
-#- probe_with : probe synthesis or hdlist (or none)
-#- quiet : download hdlists quietly
-sub update_media {
- my ($urpm, %options) = @_;
-
- $urpm->{media} or return; # verify that configuration has been read
-
- $options{nopubkey} ||= $urpm->{options}{nopubkey};
- #- get gpg-pubkey signature.
- if (!$options{nopubkey}) {
- urpm::sys::lock_rpm_db($urpm, 'exclusive');
- $urpm->{keys} or $urpm->parse_pubkeys(root => $urpm->{root});
- }
- #- lock database if allowed.
- urpm::sys::lock_urpmi_db($urpm, 'exclusive') if !$options{nolock};
-
- #- examine each medium to see if one of them needs to be updated.
- #- if this is the case and if not forced, try to use a pre-calculated
- #- hdlist file, else build it from rpm files.
- $urpm->clean;
-
- _update_media__handle_some_flags($urpm, $options{forcekey}, $options{all});
-
- my $clean_cache = !$options{noclean};
- my $second_pass;
- foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) {
- _update_medium_first_pass($urpm, $medium, \$second_pass, \$clean_cache, %options)
- or _update_medium_first_pass_failed($urpm, $medium);
- }
-
- #- some unresolved provides may force to rebuild all synthesis,
- #- a second pass will be necessary.
- if ($second_pass) {
- $urpm->{log}(N("performing second pass to compute dependencies\n"));
- $urpm->unresolved_provides_clean;
- }
-
- foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) {
- if ($second_pass) {
- #- second pass consists in reading again synthesis or hdlists.
- _update_medium_second_pass($urpm, $medium, $options{callback});
- }
- _build_hdlist_synthesis($urpm, $medium);
-
- if ($medium->{really_modified}) {
- _get_pubkey_and_descriptions($urpm, $medium, $options{nopubkey});
- _read_cachedir_pubkey($urpm, $medium);
- generate_medium_names($urpm, $medium);
- }
- }
-
- if ($urpm->{modified}) {
- if ($options{noclean}) {
- #- clean headers cache directory to remove everything that is no longer
- #- useful according to the depslist.
- remove_obsolete_headers_in_cache($urpm);
- }
- #- write config files in any case
- $urpm->write_config;
- dump_proxy_config();
- } elsif ($urpm->{md5sum_modified}) {
- #- NB: in case of $urpm->{modified}, write_MD5SUM is called in write_config above
- write_MD5SUM($urpm);
- }
-
- $options{nolock} or urpm::sys::unlock_urpmi_db($urpm);
- $options{nopubkey} or urpm::sys::unlock_rpm_db($urpm);
-}
-
-#- clean params and depslist computation zone.
-sub clean {
- my ($urpm) = @_;
-
- $urpm->{depslist} = [];
- $urpm->{provides} = {};
-
- foreach (@{$urpm->{media} || []}) {
- delete $_->{start};
- delete $_->{end};
- }
-}
-
sub try_mounting {
my ($urpm, $dir, $o_removable) = @_;
my %infos;
@@ -2210,20 +437,6 @@ sub resolve_dependencies {
$need_restart;
}
-#- get the list of packages that should not be upgraded or installed,
-#- typically from the inst.list or skip.list files.
-sub get_packages_list {
- my ($file, $o_extra) = @_;
- my $val = [];
- open(my $f, '<', $file) or return [];
- foreach (<$f>, split /,/, $o_extra || '') {
- chomp; s/#.*$//; s/^\s*//; s/\s*$//;
- next if $_ eq '';
- push @$val, $_;
- }
- $val;
-}
-
#- select sources for selected packages,
#- according to keys of the packages hash.
#- returns a list of lists containing the source description for each rpm,
@@ -2287,11 +500,11 @@ sub get_source_packages {
foreach my $medium (@{$urpm->{media} || []}) {
my (%sources, %list_examined, $list_warning);
- if (is_valid_medium($medium) && !$medium->{ignore}) {
+ if (urpm::media::is_valid_medium($medium) && !$medium->{ignore}) {
#- always prefer a list file if available.
if ($medium->{list}) {
- if (-r statedir_list($urpm, $medium)) {
- foreach (cat_(statedir_list($urpm, $medium))) {
+ if (-r urpm::media::statedir_list($urpm, $medium)) {
+ foreach (cat_(urpm::media::statedir_list($urpm, $medium))) {
chomp;
if (my ($filename) = m!([^/]*\.rpm)$!) {
if (keys(%{$file2fullnames{$filename} || {}}) > 1) {
@@ -2309,7 +522,7 @@ sub get_source_packages {
} else {
chomp;
$error = 1;
- $urpm->{error}(N("unable to correctly parse [%s] on value \"%s\"", statedir_list($urpm, $medium), $_));
+ $urpm->{error}(N("unable to correctly parse [%s] on value \"%s\"", urpm::media::statedir_list($urpm, $medium), $_));
last;
}
}
@@ -2341,7 +554,7 @@ sub get_source_packages {
}
}
}
- $list_warning && $medium->{list} && -r statedir_list($urpm, $medium) && -f _
+ $list_warning && $medium->{list} && -r urpm::media::statedir_list($urpm, $medium) && -f _
and $urpm->{error}(N("medium \"%s\" uses an invalid list file:
mirror is probably not up-to-date, trying to use alternate method", $medium->{name}));
} elsif (!%list_examined) {
@@ -2844,12 +1057,12 @@ sub _check_sources_signatures {
$verif =~ s/\n//g;
$invalid_sources{$filepath} = N("Invalid signature (%s)", $verif);
} else {
- unless ($medium && is_valid_medium($medium) &&
+ unless ($medium && urpm::media::is_valid_medium($medium) &&
$medium->{start} <= $id && $id <= $medium->{end})
{
$medium = undef;
foreach (@{$urpm->{media}}) {
- is_valid_medium($_) && $_->{start} <= $id && $id <= $_->{end}
+ urpm::media::is_valid_medium($_) && $_->{start} <= $id && $id <= $_->{end}
and $medium = $_, last;
}
}
@@ -2900,7 +1113,7 @@ sub get_updates_description {
@update_medias or @update_medias = grep { !$_->{ignore} && $_->{update} } @{$urpm->{media}};
- foreach (map { cat_(statedir_descriptions($urpm, $_)), '%package dummy' } @update_medias) {
+ foreach (map { cat_(urpm::media::statedir_descriptions($urpm, $_)), '%package dummy' } @update_medias) {
/^%package (.+)/ and do {
if (exists $cur->{importance} && $cur->{importance} ne "security" && $cur->{importance} ne "bugfix") {
$cur->{importance} = 'normal';
diff --git a/urpm/bug_report.pm b/urpm/bug_report.pm
index e9666d13..eb1258a4 100644
--- a/urpm/bug_report.pm
+++ b/urpm/bug_report.pm
@@ -32,14 +32,14 @@ sub write_urpmdb {
#- take care of virtual medium this way.
$_->{hdlist} ||= "hdlist.$_->{name}.cz";
#- now build directly synthesis file, this is by far the simplest method.
- if (urpm::is_valid_medium($_)) {
+ if (urpm::media::is_valid_medium($_)) {
$urpm->build_synthesis(start => $_->{start}, end => $_->{end}, synthesis => "$bug_report_dir/synthesis.$_->{hdlist}");
$urpm->{log}(N("built hdlist synthesis file for medium \"%s\"", $_->{name}));
}
}
#- fake configuration written to convert virtual media on the fly.
local $urpm->{config} = "$bug_report_dir/urpmi.cfg";
- $urpm->write_config;
+ urpm::media::write_config($urpm);
}
sub copy_requested {
diff --git a/urpm/download.pm b/urpm/download.pm
index 2a60a4eb..0b6b94e9 100644
--- a/urpm/download.pm
+++ b/urpm/download.pm
@@ -11,7 +11,7 @@ use Exporter;
our @ISA = 'Exporter';
our @EXPORT = qw(get_proxy
propagate_sync_callback
- sync_file sync_prozilla sync_wget sync_curl sync_rsync sync_ssh
+ sync_file sync_rsync sync_ssh
set_proxy_config dump_proxy_config
);
@@ -24,6 +24,20 @@ my $proxy_config;
#- Timeout for curl connection and wget operations
our $CONNECT_TIMEOUT = 60; #- (in seconds)
+
+
+sub ftp_http_downloaders() { qw(curl wget prozilla) }
+
+sub available_ftp_http_downloaders() {
+ my %binaries = (
+ curl => 'curl',
+ wget => 'wget',
+ prozilla => 'proz',
+ );
+ grep { -x "/usr/bin/$binaries{$_}" || -x "/bin/$binaries{$_}" } ftp_http_downloaders();
+}
+
+
#- parses proxy.cfg (private)
sub load_proxy_config () {
return if defined $proxy_config;
@@ -294,7 +308,7 @@ sub sync_curl {
eval { require Date::Manip };
#- prepare to get back size and time stamp of each file.
- open my $curl, join(" ", map { "'$_'" } "/usr/bin/curl",
+ my $cmd = join(" ", map { "'$_'" } "/usr/bin/curl",
"-q", # don't read .curlrc; some toggle options might interfer
($options->{limit_rate} ? ("--limit-rate", $options->{limit_rate}) : ()),
($options->{proxy} ? set_proxy({ type => "curl", proxy => $options->{proxy} }) : ()),
@@ -305,7 +319,8 @@ sub sync_curl {
"-s", "-I",
"--anyauth",
(defined $options->{'curl-options'} ? split /\s+/, $options->{'curl-options'} : ()),
- @ftp_files) . " |";
+ @ftp_files);
+ open my $curl, "$cmd |";
while (<$curl>) {
if (/Content-Length:\s*(\d+)/) {
!$cur_ftp_file || exists($ftp_files_info{$cur_ftp_file}{size})
@@ -353,7 +368,7 @@ sub sync_curl {
{
my @l = (@ftp_files, @other_files);
my ($buf, $file); $buf = '';
- my $curl_pid = open my $curl, join(" ", map { "'$_'" } "/usr/bin/curl",
+ my $cmd = join(" ", map { "'$_'" } "/usr/bin/curl",
"-q", # don't read .curlrc; some toggle options might interfer
($options->{limit_rate} ? ("--limit-rate", $options->{limit_rate}) : ()),
($options->{resume} ? ("--continue-at", "-") : ()),
@@ -369,7 +384,8 @@ sub sync_curl {
"--anyauth",
(defined $options->{'curl-options'} ? split /\s+/, $options->{'curl-options'} : ()),
"--stderr", "-", # redirect everything to stdout
- @all_files) . " |";
+ @all_files);
+ my $curl_pid = open(my $curl, "$cmd |");
local $/ = \1; #- read input by only one char, this is slow but very nice (and it works!).
local $_;
while (<$curl>) {
@@ -581,6 +597,85 @@ sub sync_logger {
}
}
+
+sub requested_ftp_http_downloader {
+ my ($urpm, $media_name) = @_;
+
+ $urpm->{options}{downloader} || #- cmd-line switch
+ $media_name && do {
+ #- per-media config
+ require urpm::media; #- help perl_checker
+ my $m = urpm::media::name2medium($urpm, $media_name);
+ $m && $m->{downloader};
+ } || $urpm->{global_config}{downloader};
+}
+
+#- $medium can be undef
+#- known options: quiet, resume, callback
+sub sync {
+ my ($urpm, $medium, $files, %options) = @_;
+
+ my %all_options = (
+ dir => "$urpm->{cachedir}/partial",
+ proxy => get_proxy($medium),
+ $medium ? (media => $medium->{name}) : (),
+ %options,
+ );
+ foreach my $cpt (qw(compress limit_rate retry wget-options curl-options rsync-options prozilla-options)) {
+ $all_options{$cpt} = $urpm->{options}{$cpt} if defined $urpm->{options}{$cpt};
+ }
+
+ eval { _sync_webfetch_raw($urpm, $files, \%all_options); 1 };
+}
+
+#- syncing algorithms.
+sub _sync_webfetch_raw {
+ my ($urpm, $files, $options) = @_;
+
+ my %files;
+ #- currently ftp and http protocols are managed by curl or wget,
+ #- ssh and rsync protocols are managed by rsync *AND* ssh.
+ foreach (@$files) {
+ my $proto = urpm::protocol_from_url($_) or die N("unknown protocol defined for %s", $_);
+ push @{$files{$proto}}, $_;
+ }
+ if ($files{removable} || $files{file}) {
+ my @l = map { urpm::file_from_local_url($_) } @{$files{removable} || []}, @{$files{file} || []};
+ eval { sync_file($options, @l) };
+ $urpm->{fatal}(10, $@) if $@;
+ delete @files{qw(removable file)};
+ }
+ if ($files{ftp} || $files{http} || $files{https}) {
+ my @available = urpm::download::available_ftp_http_downloaders();
+
+ #- use user default downloader if provided and available
+ my $requested_downloader = requested_ftp_http_downloader($urpm, $options->{media});
+ my ($preferred) = grep { $_ eq $requested_downloader } @available;
+ if (!$preferred) {
+ #- else first downloader of @available is the default one
+ $preferred = $available[0];
+ if ($requested_downloader && !our $webfetch_not_available) {
+ $urpm->{log}(N("%s is not available, falling back on %s", $requested_downloader, $preferred));
+ $webfetch_not_available = 1;
+ }
+ }
+ my $sync = $urpm::download::{"sync_$preferred"} or die N("no webfetch found, supported webfetch are: %s\n", join(", ", urpm::download::ftp_http_downloaders()));
+ $sync->($options, @{$files{ftp} || []}, @{$files{http} || []}, @{$files{https} || []});
+
+ delete @files{qw(ftp http https)};
+ }
+ if ($files{rsync}) {
+ sync_rsync($options, @{$files{rsync}});
+ delete $files{rsync};
+ }
+ if ($files{ssh}) {
+ my @ssh_files = map { m!^ssh://([^/]*)(.*)! ? "$1:$2" : () } @{$files{ssh}};
+ sync_ssh($options, @ssh_files);
+ delete $files{ssh};
+ }
+ %files and die N("unable to handle protocol: %s", join ', ', keys %files);
+}
+
1;
__END__
diff --git a/urpm/ldap.pm b/urpm/ldap.pm
index 6298f422..5c4d636f 100644
--- a/urpm/ldap.pm
+++ b/urpm/ldap.pm
@@ -38,7 +38,7 @@ therefore, caching is useless if server is up.
Checks if the ldap medium has all required attributes.
-=item read_ldap_cache($urpm, %options)
+=item read_ldap_cache($urpm)
Reads the cache created by the C<write_ldap_cache> function. Should be called
if the ldap server doesn't answer (upgrade, network problem, mobile user, etc.)
@@ -47,7 +47,7 @@ if the ldap server doesn't answer (upgrade, network problem, mobile user, etc.)
Cleans the ldap cache, removes all files in the directory.
-=item load_ldap_media($urpm, %options)
+=item load_ldap_media($urpm)
Loads urpmi media configuration from ldap.
@@ -97,13 +97,13 @@ sub get_vars_from_sh {
%l;
}
-sub read_ldap_cache($%) {
- my ($urpm, %options) = @_;
+sub read_ldap_cache {
+ my ($urpm) = @_;
foreach (glob("$urpm->{cachedir}/ldap/*")) {
! -f $_ and next;
my %medium = get_vars_from_sh($_);
next if !check_ldap_medium(\%medium);
- $urpm->probe_medium(\%medium, %options) and push @{$urpm->{media}}, \%medium;
+ urpm::media::add_existing_medium($urpm, \%medium);
}
}
@@ -146,8 +146,8 @@ my %ldap_changed_attributes = (
'ftp-proxy' => 'ftp_proxy',
);
-sub load_ldap_media($%) {
- my ($urpm, %options) = @_;
+sub load_ldap_media {
+ my ($urpm) = @_;
my $config = get_ldap_config() or return;
@@ -205,13 +205,13 @@ sub load_ldap_media($%) {
$medium->{ldap} = 1;
$medium->{priority} = $priority++;
next if !check_ldap_medium($medium);
- $urpm->probe_medium($medium, %options) and push @{$urpm->{media}}, $medium;
+ urpm::media::add_existing_medium($urpm, $medium);
write_ldap_cache($urpm,$medium);
}
};
if ($@) {
$urpm->{log}($@);
- read_ldap_cache($urpm, %options);
+ read_ldap_cache($urpm);
}
}
diff --git a/urpm/md5sum.pm b/urpm/md5sum.pm
index 829e0dfd..f5295190 100644
--- a/urpm/md5sum.pm
+++ b/urpm/md5sum.pm
@@ -40,8 +40,8 @@ sub on_local_medium {
sub compute_on_local_medium {
my ($urpm, $medium) = @_;
- require urpm; #- help perl_checker
- my $f = urpm::statedir_hdlist_or_synthesis($urpm, $medium);
+ require urpm::media; #- help perl_checker
+ my $f = urpm::media::statedir_hdlist_or_synthesis($urpm, $medium);
$urpm->{log}(N("computing md5sum of existing source hdlist (or synthesis) [%s]", $f));
-e $f && compute($f);
}
diff --git a/urpm/media.pm b/urpm/media.pm
new file mode 100644
index 00000000..e32ae406
--- /dev/null
+++ b/urpm/media.pm
@@ -0,0 +1,1800 @@
+package urpm::media;
+
+# $Id$
+
+use urpm 'file_from_local_url';
+use urpm::msg;
+use urpm::util;
+
+
+our @PER_MEDIA_OPT = qw(
+ downloader
+ hdlist
+ ignore
+ key-ids
+ list
+ md5sum
+ noreconfigure
+ priority
+ priority-upgrade
+ removable
+ static
+ synthesis
+ update
+ url
+ verify-rpm
+ virtual
+ with_hdlist
+);
+
+sub read_private_netrc {
+ my ($urpm) = @_;
+
+ my @words = split(/\s+/, scalar cat_($urpm->{private_netrc}));
+ my @l;
+ my $e;
+ while (@words) {
+ my $keyword = shift @words;
+ if ($keyword eq 'machine') {
+ push @l, $e = { machine => shift(@words) };
+ } elsif ($keyword eq 'default') {
+ push @l, $e = { default => '' };
+ } elsif ($keyword eq 'login' || $keyword eq 'password' || $keyword eq 'account') {
+ $e->{$keyword} = shift(@words);
+ } else {
+ $urpm->{error}("unknown netrc command $keyword");
+ }
+ }
+ @l;
+}
+
+sub parse_url_with_login {
+ my ($url) = @_;
+ $url =~ m!([^:]*)://([^/:\@]*)(:([^/:\@]*))?\@([^/]*)(.*)! &&
+ { proto => $1, login => $2, password => $4, machine => $5, dir => $6 };
+}
+
+sub read_config_add_passwords {
+ my ($urpm, $config) = @_;
+
+ my @netrc = read_private_netrc($urpm) or return;
+ foreach (values %$config) {
+ my $u = parse_url_with_login($_->{url}) or next;
+ if (my ($e) = grep { ($_->{default} || $_->{machine} eq $u->{machine}) && $_->{login} eq $u->{login} } @netrc) {
+ $_->{url} = sprintf('%s://%s:%s@%s%s', $u->{proto}, $u->{login}, $e->{password}, $u->{machine}, $u->{dir});
+ } else {
+ $urpm->{log}("no password found for $u->{login}@$u->{machine}");
+ }
+ }
+}
+
+sub remove_passwords_and_write_private_netrc {
+ my ($urpm, $config) = @_;
+
+ my @l;
+ foreach (values %$config) {
+ my $u = parse_url_with_login($_->{url}) or next;
+ #- check whether a password is visible
+ $u->{password} or next;
+
+ push @l, $u;
+ $_->{url} = sprintf('%s://%s@%s%s', $u->{proto}, $u->{login}, $u->{machine}, $u->{dir});
+ }
+ {
+ my $fh = urpm::sys::open_safe($urpm, '>', $urpm->{private_netrc}) or return;
+ foreach my $u (@l) {
+ printf $fh "machine %s login %s password %s\n", $u->{machine}, $u->{login}, $u->{password};
+ }
+ }
+ chmod 0600, $urpm->{private_netrc};
+}
+
+#- handle deprecated way of saving passwords
+sub recover_url_from_list {
+ my ($urpm, $medium) = @_;
+
+ #- /./ is end of url marker in list file (typically generated by a
+ #- find . -name "*.rpm" > list
+ #- for exportable list file.
+ if (my @probe = map { m!^(.*)/\./! || m!^(.*)/[^/]*$! } cat_(statedir_list($urpm, $medium))) {
+ ($medium->{url}) = sort { length($a) <=> length($b) } @probe;
+ $urpm->{modified} = 1; #- ensure urpmi.cfg is handled using only partially hidden url + netrc, since file list won't be generated anymore
+ }
+}
+
+#- Loads /etc/urpmi/urpmi.cfg and performs basic checks.
+#- Does not handle old format: <name> <url> [with <path_hdlist>]
+#- options :
+#- - nocheck_access : don't check presence of hdlist and other files
+sub read_config {
+ my ($urpm, $b_nocheck_access) = @_;
+ return if $urpm->{media}; #- media already loaded
+ $urpm->{media} = [];
+ my $config = urpm::cfg::load_config($urpm->{config})
+ or $urpm->{fatal}(6, $urpm::cfg::err);
+
+ #- global options
+ if (my $global = $config->{''}) {
+ foreach my $opt (keys %$global) {
+ if (defined $global->{$opt} && !exists $urpm->{options}{$opt}) {
+ $urpm->{options}{$opt} = $global->{$opt};
+ }
+ }
+ }
+
+ #- per-media options
+
+ read_config_add_passwords($urpm, $config);
+
+ foreach my $m (grep { $_ ne '' } keys %$config) {
+ my $medium = { name => $m };
+ foreach my $opt (@PER_MEDIA_OPT) {
+ defined $config->{$m}{$opt} and $medium->{$opt} = $config->{$m}{$opt};
+ }
+
+ if (!$medium->{url}) {
+ #- recover the url the old deprecated way...
+ #- only useful for migration, new urpmi.cfg will use netrc
+ recover_url_from_list($urpm, $medium);
+ $medium->{url} or $urpm->{error}("unable to find url in list file $medium->{name}, medium ignored");
+ }
+
+ add_existing_medium($urpm, $medium, $b_nocheck_access);
+ }
+
+ eval { require urpm::ldap; urpm::ldap::load_ldap_media($urpm) };
+
+ #- load default values
+ foreach (qw(post-clean verify-rpm)) {
+ exists $urpm->{options}{$_} or $urpm->{options}{$_} = 1;
+ }
+
+ $urpm->{media} = [ sort { $a->{priority} <=> $b->{priority} } @{$urpm->{media}} ];
+
+ #- read MD5 sums (usually not in urpmi.cfg but in a separate file)
+ foreach (@{$urpm->{media}}) {
+ if (my $md5sum = urpm::md5sum::from_MD5SUM("$urpm->{statedir}/MD5SUM", statedir_hdlist_or_synthesis($urpm, $_))) {
+ $_->{md5sum} = $md5sum;
+ }
+ }
+
+ #- remember global options for write_config
+ $urpm->{global_config} = $config->{''};
+}
+
+#- if invalid, set {ignore}
+sub check_existing_medium {
+ my ($urpm, $medium, $b_nocheck_access) = @_;
+
+ if ($medium->{virtual}) {
+ #- a virtual medium needs to have an url available without using a list file.
+ if ($medium->{hdlist} || $medium->{list}) {
+ $medium->{ignore} = 1;
+ $urpm->{error}(N("virtual medium \"%s\" should not have defined hdlist or list file, medium ignored",
+ $medium->{name}));
+ }
+ unless ($medium->{url}) {
+ $medium->{ignore} = 1;
+ $urpm->{error}(N("virtual medium \"%s\" should have a clear url, medium ignored",
+ $medium->{name}));
+ }
+ } else {
+ if ($medium->{hdlist}) {
+ #- is this check really needed? keeping just in case
+ $medium->{hdlist} ne 'list' && $medium->{hdlist} ne 'pubkey' or
+ $medium->{ignore} = 1,
+ $urpm->{error}(N("invalid hdlist name"));
+ }
+ if (!$medium->{ignore} && !$medium->{hdlist}) {
+ $medium->{hdlist} = "hdlist.$medium->{name}.cz";
+ -e statedir_hdlist($urpm, $medium) or
+ $medium->{ignore} = 1,
+ $urpm->{error}(N("unable to find hdlist file for \"%s\", medium ignored", $medium->{name}));
+ }
+ if (!$medium->{ignore} && !$medium->{list}) {
+ unless (defined $medium->{url}) {
+ $medium->{list} = "list.$medium->{name}";
+ unless (-e statedir_list($urpm, $medium)) {
+ $medium->{ignore} = 1,
+ $urpm->{error}(N("unable to find list file for \"%s\", medium ignored", $medium->{name}));
+ }
+ }
+ }
+ }
+
+
+ #- check the presence of hdlist and list files if necessary.
+ if (!$b_nocheck_access && !$medium->{ignore}) {
+ if ($medium->{virtual} && -r hdlist_or_synthesis_for_virtual_medium($medium)) {}
+ elsif (-r statedir_hdlist($urpm, $medium)) {}
+ elsif ($medium->{synthesis} && -r statedir_synthesis($urpm, $medium)) {}
+ else {
+ $medium->{ignore} = 1;
+ $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
+ }
+ if ($medium->{list} && -r statedir_list($urpm, $medium)) {}
+ elsif ($medium->{url}) {}
+ else {
+ $medium->{ignore} = 1;
+ $urpm->{error}(N("unable to access list file of \"%s\", medium ignored", $medium->{name}));
+ }
+ }
+
+ foreach my $field ('hdlist', 'list') {
+ $medium->{$field} or next;
+ if (grep { $_->{$field} eq $medium->{$field} } @{$urpm->{media}}) {
+ $medium->{ignore} = 1;
+ $urpm->{error}(
+ $field eq 'hdlist'
+ ? N("medium \"%s\" trying to use an already used hdlist, medium ignored", $medium->{name})
+ : N("medium \"%s\" trying to use an already used list, medium ignored", $medium->{name}));
+ }
+ }
+}
+
+#- probe medium to be used, take old medium into account too.
+sub add_existing_medium {
+ my ($urpm, $medium, $b_nocheck_access) = @_;
+
+ if (name2medium($urpm, $medium->{name})) {
+ $urpm->{error}(N("trying to override existing medium \"%s\", skipping", $medium->{name}));
+ return;
+ }
+
+ check_existing_medium($urpm, $medium, $b_nocheck_access);
+
+ #- probe removable device.
+ probe_removable_device($urpm, $medium);
+
+ #- clear URLs for trailing /es.
+ $medium->{url} and $medium->{url} =~ s|(.*?)/*$|$1|;
+
+ push @{$urpm->{media}}, $medium;
+}
+
+sub file_from_file_url {
+ my ($url) = @_;
+ $url =~ m!^(?:file:/)?(/.*)! && $1;
+}
+
+sub _hdlist_dir {
+ my ($medium) = @_;
+ my $base = file_from_file_url($medium->{url}) || $medium->{url};
+ $medium->{with_hdlist} && reduce_pathname("$base/$medium->{with_hdlist}/..");
+}
+sub _url_with_hdlist {
+ my ($medium) = @_;
+
+ my $base = file_from_file_url($medium->{url}) || $medium->{url};
+ $medium->{with_hdlist} && reduce_pathname("$base/$medium->{with_hdlist}");
+}
+sub hdlist_or_synthesis_for_virtual_medium {
+ my ($medium) = @_;
+ file_from_file_url($medium->{url}) && _url_with_hdlist($medium);
+}
+
+sub statedir_hdlist_or_synthesis {
+ my ($urpm, $medium) = @_;
+ $medium->{hdlist} && "$urpm->{statedir}/" . ($medium->{synthesis} ? 'synthesis.' : '') . $medium->{hdlist};
+}
+sub statedir_hdlist {
+ my ($urpm, $medium) = @_;
+ $medium->{hdlist} && "$urpm->{statedir}/$medium->{hdlist}";
+}
+sub statedir_synthesis {
+ my ($urpm, $medium) = @_;
+ $medium->{hdlist} && "$urpm->{statedir}/synthesis.$medium->{hdlist}";
+}
+sub statedir_list {
+ my ($urpm, $medium) = @_;
+ $medium->{list} && "$urpm->{statedir}/$medium->{list}";
+}
+sub statedir_descriptions {
+ my ($urpm, $medium) = @_;
+ $medium->{name} && "$urpm->{statedir}/descriptions.$medium->{name}";
+}
+sub statedir_names {
+ my ($urpm, $medium) = @_;
+ $medium->{name} && "$urpm->{statedir}/names.$medium->{name}";
+}
+sub cachedir_hdlist {
+ my ($urpm, $medium) = @_;
+ $medium->{hdlist} && "$urpm->{cachedir}/partial/$medium->{hdlist}";
+}
+sub cachedir_list {
+ my ($urpm, $medium) = @_;
+ $medium->{list} && "$urpm->{cachedir}/partial/$medium->{list}";
+}
+
+sub name2medium {
+ my ($urpm, $name) = @_;
+ my ($medium) = grep { $_->{name} eq $name } @{$urpm->{media}};
+ $medium;
+}
+
+#- probe device associated with a removable device.
+sub probe_removable_device {
+ my ($urpm, $medium) = @_;
+
+ if ($medium->{url} && $medium->{url} =~ /^removable/) {
+ #- try to find device name in url scheme, this is deprecated, use medium option "removable" instead
+ if ($medium->{url} =~ /^removable_?([^_:]*)/) {
+ $medium->{removable} ||= $1 && "/dev/$1";
+ }
+ } else {
+ delete $medium->{removable};
+ return;
+ }
+
+ #- try to find device to open/close for removable medium.
+ if (my $dir = file_from_local_url($medium->{url})) {
+ my %infos;
+ my @mntpoints = urpm::sys::find_mntpoints($dir, \%infos);
+ if (@mntpoints > 1) { #- return value is suitable for an hash.
+ $urpm->{log}(N("too many mount points for removable medium \"%s\"", $medium->{name}));
+ $urpm->{log}(N("taking removable device as \"%s\"", join ',', map { $infos{$_}{device} } @mntpoints));
+ }
+ if (urpm::is_iso($medium->{removable})) {
+ $urpm->{log}(N("Medium \"%s\" is an ISO image, will be mounted on-the-fly", $medium->{name}));
+ } elsif (@mntpoints) {
+ if ($medium->{removable} && $medium->{removable} ne $infos{$mntpoints[-1]}{device}) {
+ $urpm->{log}(N("using different removable device [%s] for \"%s\"",
+ $infos{$mntpoints[-1]}{device}, $medium->{name}));
+ }
+ $medium->{removable} = $infos{$mntpoints[-1]}{device};
+ } else {
+ $urpm->{error}(N("unable to retrieve pathname for removable medium \"%s\"", $medium->{name}));
+ }
+ } else {
+ $urpm->{error}(N("unable to retrieve pathname for removable medium \"%s\"", $medium->{name}));
+ }
+}
+
+
+sub write_MD5SUM {
+ my ($urpm) = @_;
+
+ #- write MD5SUM file
+ my $fh = urpm::sys::open_safe($urpm, '>', "$urpm->{statedir}/MD5SUM") or return 0;
+ foreach my $medium (grep { $_->{md5sum} } @{$urpm->{media}}) {
+ my $s = basename(statedir_hdlist_or_synthesis($urpm, $medium));
+ print $fh "$medium->{md5sum} $s\n";
+ }
+
+ $urpm->{log}(N("wrote %s", "$urpm->{statedir}/MD5SUM"));
+
+ delete $urpm->{md5sum_modified};
+}
+
+#- Writes the urpmi.cfg file.
+sub write_urpmi_cfg {
+ my ($urpm) = @_;
+
+ #- avoid trashing exiting configuration if it wasn't loaded
+ $urpm->{media} or return;
+
+ my $config = {
+ #- global config options found in the config file, without the ones
+ #- set from the command-line
+ '' => $urpm->{global_config},
+ };
+ foreach my $medium (@{$urpm->{media}}) {
+ next if $medium->{external};
+ my $medium_name = $medium->{name};
+
+ foreach (@PER_MEDIA_OPT) {
+ defined $medium->{$_} and $config->{$medium_name}{$_} = $medium->{$_};
+ }
+ }
+ remove_passwords_and_write_private_netrc($urpm, $config);
+
+ urpm::cfg::dump_config($urpm->{config}, $config)
+ or $urpm->{fatal}(6, N("unable to write config file [%s]", $urpm->{config}));
+
+ $urpm->{log}(N("wrote config file [%s]", $urpm->{config}));
+
+ #- everything should be synced now.
+ delete $urpm->{modified};
+}
+
+sub write_config {
+ my ($urpm) = @_;
+
+ write_urpmi_cfg($urpm);
+ write_MD5SUM($urpm);
+}
+
+#- read urpmi.cfg file as well as necessary synthesis files
+#- options :
+#- root
+#- cmdline_skiplist
+#- nocheck_access (used by read_config)
+#-
+#- callback (urpmf)
+#- need_hdlist (for urpmf: to be able to have info not available in synthesis)
+#- nodepslist (for urpmq: we don't need the hdlist/synthesis)
+#- no_skiplist (urpmf)
+#-
+#- synthesis (use this synthesis file, and only this synthesis file)
+#-
+#- usedistrib (otherwise uses urpmi.cfg)
+#- parallel
+#- media
+#- excludemedia
+#- sortmedia
+#-
+#- update
+#- searchmedia
+sub configure {
+ my ($urpm, %options) = @_;
+
+ clean($urpm);
+
+ $options{parallel} && $options{usedistrib} and $urpm->{fatal}(1, N("Can't use parallel mode with use-distrib mode"));
+
+ if ($options{parallel}) {
+ require urpm::parallel;
+ urpm::parallel::configure($urpm, $options{parallel});
+
+ if (!$options{media} && $urpm->{parallel_handler}{media}) {
+ $options{media} = $urpm->{parallel_handler}{media};
+ $urpm->{log}->(N("using associated media for parallel mode: %s", $options{media}));
+ }
+ } else {
+ #- nb: can't have both parallel and root
+ $urpm->{root} = $options{root};
+ }
+
+ $urpm->{root} && ! -c "$urpm->{root}/dev/null"
+ and $urpm->{error}(N("there doesn't seem to be devices in the chroot in \"%s\"", $urpm->{root}));
+
+ if ($options{synthesis}) {
+ if ($options{synthesis} ne 'none') {
+ #- synthesis take precedence over media, update options.
+ $options{media} || $options{excludemedia} || $options{sortmedia} || $options{update} || $options{usedistrib} || $options{parallel} and
+ $urpm->{fatal}(1, N("--synthesis cannot be used with --media, --excludemedia, --sortmedia, --update, --use-distrib or --parallel"));
+ $urpm->parse_synthesis($options{synthesis});
+ #- synthesis disables the split of transaction (too risky and not useful).
+ $urpm->{options}{'split-length'} = 0;
+ }
+ } else {
+ if ($options{usedistrib}) {
+ $urpm->{media} = [];
+ add_distrib_media($urpm, "Virtual", $options{usedistrib}, %options, 'virtual' => 1);
+ } else {
+ read_config($urpm, $options{nocheck_access});
+ if (!$options{media} && $urpm->{options}{'default-media'}) {
+ $options{media} = $urpm->{options}{'default-media'};
+ }
+ }
+ if ($options{media}) {
+ delete $_->{modified} foreach @{$urpm->{media} || []};
+ select_media($urpm, split /,/, $options{media});
+ foreach (grep { !$_->{modified} } @{$urpm->{media} || []}) {
+ #- this is only a local ignore that will not be saved.
+ $_->{tempignore} = $_->{ignore} = 1;
+ }
+ }
+ if ($options{searchmedia}) {
+ select_media($urpm, $options{searchmedia}); #- Ensure this media has been selected
+ if (my $medium = name2medium($urpm, $options{searchmedia})) {
+ $medium->{ignore} and $urpm->{fatal}("searchmedia is ignored");
+ $medium->{searchmedia} = 1;
+ }
+ }
+ if ($options{excludemedia}) {
+ delete $_->{modified} foreach @{$urpm->{media} || []};
+ foreach (select_media_by_name($urpm, [ split /,/, $options{excludemedia} ])) {
+ $_->{modified} = 1;
+ #- this is only a local ignore that will not be saved.
+ $_->{tempignore} = $_->{ignore} = 1;
+ }
+ }
+ if ($options{sortmedia}) {
+ my @sorted_media = map { select_media_by_name($urpm, [$_]) } split(/,/, $options{sortmedia});
+ my @remaining = difference2($urpm->{media}, \@sorted_media);
+ $urpm->{media} = [ @sorted_media, @remaining ];
+ }
+ _parse_media($urpm, 0, \%options) if !$options{nodepslist};
+ }
+ #- determine package to withdraw (from skip.list file) only if something should be withdrawn.
+ if (!$options{nodepslist}) {
+ _compute_flags_for_skiplist($urpm, $options{cmdline_skiplist}) if !$options{no_skiplist};
+ _compute_flags_for_instlist($urpm);
+ }
+}
+
+sub _parse_media {
+ my ($urpm, $second_pass, $options) = @_;
+
+ foreach (grep { !$_->{ignore} && (!$options->{update} || $_->{update}) } @{$urpm->{media} || []}) {
+ our $currentmedia = $_; #- hack for urpmf
+ delete @$_{qw(start end)};
+ if ($_->{virtual}) {
+ if (file_from_file_url($_->{url})) {
+ if ($_->{synthesis}) {
+ _parse_synthesis($urpm, $_,
+ hdlist_or_synthesis_for_virtual_medium($_), $options->{callback});
+ } else {
+ #- we'll need a second pass
+ $second_pass++;
+ _parse_hdlist($urpm, $_,
+ hdlist_or_synthesis_for_virtual_medium($_),
+ $second_pass > 1 ? undef : $options->{callback},
+ );
+ }
+ } else {
+ $urpm->{error}(N("virtual medium \"%s\" is not local, medium ignored", $_->{name}));
+ $_->{ignore} = 1;
+ }
+ } else {
+ if ($options->{need_hdlist} && file_size(statedir_hdlist($urpm, $_)) > 32) {
+ _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_), $options->{callback});
+ } else {
+ if (!_parse_synthesis($urpm, $_,
+ statedir_synthesis($urpm, $_),
+ $options->{callback})) {
+ _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_), $options->{callback});
+ }
+ }
+ }
+ unless ($_->{ignore}) {
+ _check_after_reading_hdlist_or_synthesis($urpm, $_);
+ }
+ unless ($_->{ignore}) {
+ if ($_->{searchmedia}) {
+ ($urpm->{searchmedia}{start}, $urpm->{searchmedia}{end}) = ($_->{start}, $_->{end});
+ $urpm->{log}(N("Search start: %s end: %s",
+ $urpm->{searchmedia}{start}, $urpm->{searchmedia}{end}));
+ delete $_->{searchmedia};
+ }
+ }
+ }
+
+ if ($second_pass == 1) {
+ require URPM::Build;
+ $urpm->{log}(N("performing second pass to compute dependencies\n"));
+ $urpm->unresolved_provides_clean;
+ _parse_media($urpm, 1, $options);
+ }
+}
+
+sub _compute_flags_for_skiplist {
+ my ($urpm, $cmdline_skiplist) = @_;
+ my %uniq;
+ $urpm->compute_flags(
+ get_packages_list($urpm->{skiplist}, $cmdline_skiplist),
+ skip => 1,
+ callback => sub {
+ my ($urpm, $pkg) = @_;
+ $pkg->is_arch_compat && ! exists $uniq{$pkg->fullname} or return;
+ $uniq{$pkg->fullname} = undef;
+ $urpm->{log}(N("skipping package %s", scalar($pkg->fullname)));
+ },
+ );
+}
+
+sub _compute_flags_for_instlist {
+ my ($urpm) = @_;
+
+ my %uniq;
+ $urpm->compute_flags(
+ get_packages_list($urpm->{instlist}),
+ disable_obsolete => 1,
+ callback => sub {
+ my ($urpm, $pkg) = @_;
+ $pkg->is_arch_compat && ! exists $uniq{$pkg->fullname} or return;
+ $uniq{$pkg->fullname} = undef;
+ $urpm->{log}(N("would install instead of upgrade package %s", scalar($pkg->fullname)));
+ },
+ );
+
+}
+
+#- add a new medium, sync the config file accordingly.
+#- returns the new medium's name. (might be different from the requested
+#- name if index_name was specified)
+#- options: ignore, index_name, nolock, update, virtual
+sub add_medium {
+ my ($urpm, $name, $url, $with_hdlist, %options) = @_;
+
+ #- make sure configuration has been read.
+ $urpm->{media} or die "caller should have used ->read_config or ->configure first";
+ urpm::sys::lock_urpmi_db($urpm, 'exclusive') if !$options{nolock};
+
+ #- if a medium with that name has already been found, we have to exit now
+ my $medium;
+ if (defined $options{index_name}) {
+ my $i = $options{index_name};
+ do {
+ ++$i;
+ $medium = name2medium($urpm, $name . $i);
+ } while $medium;
+ $name .= $i;
+ } else {
+ $medium = name2medium($urpm, $name);
+ }
+ $medium and $urpm->{fatal}(5, N("medium \"%s\" already exists", $medium->{name}));
+
+ $url =~ s,/*$,,; #- clear URLs for trailing /es.
+
+ #- creating the medium info.
+ $medium = { name => $name, url => $url, update => $options{update}, modified => 1, ignore => $options{ignore} };
+ if ($options{virtual}) {
+ file_from_file_url($url) or $urpm->{fatal}(1, N("virtual medium needs to be local"));
+ $medium->{virtual} = 1;
+ } else {
+ $medium->{hdlist} = "hdlist.$name.cz";
+ probe_removable_device($urpm, $medium);
+ }
+
+ #- local media have priority, other are added at the end.
+ if (file_from_file_url($url)) {
+ $medium->{priority} = 0.5;
+ } else {
+ $medium->{priority} = 1 + @{$urpm->{media}};
+ }
+
+ $with_hdlist and $medium->{with_hdlist} = $with_hdlist;
+
+ #- create an entry in media list.
+ push @{$urpm->{media}}, $medium;
+
+ $urpm->{log}(N("added medium %s", $name));
+ $urpm->{modified} = 1;
+
+ $options{nolock} or urpm::sys::unlock_urpmi_db($urpm);
+ $name;
+}
+
+#- add distribution media, according to url given.
+#- returns the list of names of added media.
+#- options :
+#- - initial_number : when adding several numbered media, start with this number
+#- - probe_with : if eq 'synthesis', use synthesis instead of hdlists
+#- - ask_media : callback to know whether each media should be added
+#- other options are passed to add_medium(): ignore, nolock, virtual
+sub add_distrib_media {
+ my ($urpm, $name, $url, %options) = @_;
+
+ #- make sure configuration has been read.
+ $urpm->{media} or die "caller should have used ->read_config or ->configure first";
+
+ my $distribconf;
+
+ if (my $dir = file_from_local_url($url)) {
+ $urpm->try_mounting($dir)
+ or $urpm->{error}(N("unable to mount the distribution medium")), return ();
+ $distribconf = MDV::Distribconf->new($dir, undef);
+ $distribconf->load
+ or $urpm->{error}(N("this location doesn't seem to contain any distribution")), return ();
+ } else {
+ unlink "$urpm->{cachedir}/partial/media.cfg";
+
+ $distribconf = MDV::Distribconf->new($url, undef);
+ $distribconf->settree('mandriva');
+
+ $urpm->{log}(N("retrieving media.cfg file..."));
+ if (urpm::download::sync($urpm, undef,
+ [ reduce_pathname($distribconf->getfullpath(undef, 'infodir') . '/media.cfg') ],
+ quiet => 1)) {
+ $urpm->{log}(N("...retrieving done"));
+ $distribconf->parse_mediacfg("$urpm->{cachedir}/partial/media.cfg")
+ or $urpm->{error}(N("unable to parse media.cfg")), return();
+ } else {
+ $urpm->{error}(N("...retrieving failed: %s", $@));
+ $urpm->{error}(N("unable to access the distribution medium (no media.cfg file found)"));
+ return ();
+ }
+ }
+
+ #- cosmetic update of name if it contains spaces.
+ $name =~ /\s/ and $name .= ' ';
+
+ my @newnames;
+ #- at this point, we have found a media.cfg file, so parse it
+ #- and create all necessary media according to it.
+ my $medium = $options{initial_number} || 1;
+
+ foreach my $media ($distribconf->listmedia) {
+ my $skip = 0;
+ # if one of those values is set, by default, we skip adding the media
+ foreach (qw(noauto)) {
+ $distribconf->getvalue($media, $_) and do {
+ $skip = 1;
+ last;
+ };
+ }
+ if ($options{ask_media}) {
+ if ($options{ask_media}->(
+ $distribconf->getvalue($media, 'name'),
+ !$skip,
+ )) {
+ $skip = 0;
+ } else {
+ $skip = 1;
+ }
+ }
+ $skip and next;
+
+ my $media_name = $distribconf->getvalue($media, 'name') || '';
+ my $is_update_media = $distribconf->getvalue($media, 'updates_for');
+
+ push @newnames, add_medium($urpm,
+ $name ? "$media_name ($name$medium)" : $media_name,
+ reduce_pathname($distribconf->getfullpath($media, 'path')),
+ offset_pathname(
+ $url,
+ $distribconf->getpath($media, 'path'),
+ ) . '/' . $distribconf->getpath($media, $options{probe_with} eq 'synthesis' ? 'synthesis' : 'hdlist'),
+ index_name => $name ? undef : 0,
+ %options,
+ # the following override %options
+ update => $is_update_media ? 1 : undef,
+ );
+ ++$medium;
+ }
+ return @newnames;
+}
+
+#- deprecated, use select_media_by_name instead
+sub select_media {
+ my $urpm = shift;
+ my $options = {};
+ if (ref $_[0]) { $options = shift }
+ foreach (select_media_by_name($urpm, [ @_ ], $options->{strict_match})) {
+ #- select medium by setting the modified flag, do not check ignore.
+ $_->{modified} = 1;
+ }
+}
+
+sub select_media_by_name {
+ my ($urpm, $names, $b_strict_match) = @_;
+
+ my %wanted = map { $_ => 1 } @$names;
+
+ #- first the exact matches
+ my @l = grep { delete $wanted{$_->{name}} } @{$urpm->{media}};
+
+ #- check if some arguments don't correspond to the medium name.
+ #- in such case, try to find the unique medium (or list candidate
+ #- media found).
+ foreach (keys %wanted) {
+ my $q = quotemeta;
+ my (@found, @foundi);
+ my $regex = $b_strict_match ? qr/^$q$/ : qr/$q/;
+ my $regexi = $b_strict_match ? qr/^$q$/i : qr/$q/i;
+ foreach my $medium (@{$urpm->{media}}) {
+ $medium->{name} =~ $regex and push @found, $medium;
+ $medium->{name} =~ $regexi and push @foundi, $medium;
+ }
+ @found = @foundi if !@found;
+
+ if (@found == 0) {
+ $urpm->{error}(N("trying to select nonexistent medium \"%s\"", $_));
+ } else {
+ if (@found > 1) {
+ $urpm->{log}(N("selecting multiple media: %s", join(", ", map { qq("$_->{name}") } @found)));
+ }
+ #- changed behaviour to select all occurences by default.
+ push @l, @found;
+ }
+ }
+ @l;
+}
+
+#- deprecated, use remove_media instead
+sub remove_selected_media {
+ my ($urpm) = @_;
+
+ remove_media($urpm, [ grep { $_->{modified} } @{$urpm->{media}} ]);
+}
+
+sub remove_media {
+ my ($urpm, $to_remove) = @_;
+
+ foreach my $medium (@$to_remove) {
+ $urpm->{log}(N("removing medium \"%s\"", $medium->{name}));
+
+ #- mark to re-write configuration.
+ $urpm->{modified} = 1;
+
+ #- remove files associated with this medium.
+ unlink grep { $_ } map { $_->($urpm, $medium) } \&statedir_hdlist, \&statedir_list, \&statedir_synthesis, \&statedir_descriptions, \&statedir_names;
+
+ #- remove proxy settings for this media
+ urpm::download::remove_proxy_media($medium->{name});
+ }
+
+ $urpm->{media} = [ difference2($urpm->{media}, $to_remove) ];
+}
+
+#- return list of synthesis or hdlist reference to probe.
+sub _probe_with_try_list {
+ my ($probe_with) = @_;
+
+ my @probe_synthesis = (
+ "media_info/synthesis.hdlist.cz",
+ "synthesis.hdlist.cz",
+ );
+ my @probe_hdlist = (
+ "media_info/hdlist.cz",
+ "hdlist.cz",
+ );
+ $probe_with =~ /synthesis/
+ ? (@probe_synthesis, @probe_hdlist)
+ : (@probe_hdlist, @probe_synthesis);
+}
+
+sub may_reconfig_urpmi {
+ my ($urpm, $medium) = @_;
+
+ my $f;
+ if (my $dir = file_from_file_url($medium->{url})) {
+ $f = reduce_pathname("$dir/reconfig.urpmi");
+ } else {
+ unlink($f = "$urpm->{cachedir}/partial/reconfig.urpmi");
+ urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/reconfig.urpmi") ], quiet => 1);
+ }
+ if (-s $f) {
+ reconfig_urpmi($urpm, $f, $medium->{name});
+ }
+ unlink $f if !file_from_file_url($medium->{url});
+}
+
+#- read a reconfiguration file for urpmi, and reconfigure media accordingly
+#- $rfile is the reconfiguration file (local), $name is the media name
+#-
+#- the format is similar to the RewriteRule of mod_rewrite, so:
+#- PATTERN REPLACEMENT [FLAG]
+#- where FLAG can be L or N
+#-
+#- example of reconfig.urpmi:
+#- # this is an urpmi reconfiguration file
+#- /cooker /cooker/$ARCH
+sub reconfig_urpmi {
+ my ($urpm, $rfile, $name) = @_;
+ -r $rfile or return;
+
+ $urpm->{log}(N("reconfiguring urpmi for media \"%s\"", $name));
+
+ my ($magic, @lines) = cat_($rfile);
+ #- the first line of reconfig.urpmi must be magic, to be sure it's not an error file
+ $magic =~ /^# this is an urpmi reconfiguration file/ or return undef;
+
+ my @replacements;
+ foreach (@lines) {
+ chomp;
+ s/^\s*//; s/#.*$//; s/\s*$//;
+ $_ or next;
+ my ($p, $r, $f) = split /\s+/, $_, 3;
+ push @replacements, [ quotemeta $p, $r, $f || 1 ];
+ }
+
+ my $reconfigured = 0;
+ my @reconfigurable = qw(url with_hdlist);
+
+ my $medium = name2medium($urpm, $name) or return;
+ my %orig = %$medium;
+
+ URLS:
+ foreach my $k (@reconfigurable) {
+ foreach my $r (@replacements) {
+ if ($medium->{$k} =~ s/$r->[0]/$r->[1]/) {
+ $reconfigured = 1;
+ #- Flags stolen from mod_rewrite: L(ast), N(ext)
+ if ($r->[2] =~ /L/) {
+ last;
+ } elsif ($r->[2] =~ /N/) { #- dangerous option
+ redo URLS;
+ }
+ }
+ }
+ #- check that the new url exists before committing changes (local mirrors)
+ my $file = file_from_local_url($medium->{$k});
+ if ($file && !-e $file) {
+ %$medium = %orig;
+ $reconfigured = 0;
+ $urpm->{log}(N("...reconfiguration failed"));
+ return;
+ }
+ }
+
+ if ($reconfigured) {
+ $urpm->{log}(N("reconfiguration done"));
+ write_config($urpm);
+ }
+ $reconfigured;
+}
+
+sub _guess_hdlist_suffix {
+ my ($url) = @_;
+ $url =~ m!\bmedia/(\w+)/*\Z! && $1;
+}
+
+sub _hdlist_suffix {
+ my ($medium) = @_;
+ $medium->{with_hdlist} =~ /hdlist(.*?)(?:\.src)?\.cz$/ ? $1 : '';
+}
+
+sub _parse_hdlist_or_synthesis__when_not_modified {
+ my ($urpm, $medium) = @_;
+
+ delete @$medium{qw(start end)};
+ if ($medium->{virtual}) {
+ if (file_from_file_url($medium->{url})) {
+ _parse_maybe_hdlist_or_synthesis($urpm, $medium, hdlist_or_synthesis_for_virtual_medium($medium));
+ } else {
+ $urpm->{error}(N("virtual medium \"%s\" is not local, medium ignored", $medium->{name}));
+ $medium->{ignore} = 1;
+ }
+ } else {
+ if (!_parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium))) {
+ _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
+ }
+ }
+ unless ($medium->{ignore}) {
+ _check_after_reading_hdlist_or_synthesis($urpm, $medium);
+ }
+}
+
+sub _parse_hdlist_or_synthesis__virtual {
+ my ($urpm, $medium) = @_;
+
+ if (my $hdlist_or = hdlist_or_synthesis_for_virtual_medium($medium)) {
+ delete $medium->{modified};
+ $medium->{really_modified} = 1;
+ $urpm->{md5sum_modified} = 1;
+ _parse_maybe_hdlist_or_synthesis($urpm, $medium, $hdlist_or);
+ _check_after_reading_hdlist_or_synthesis($urpm, $medium);
+ } else {
+ $urpm->{error}(N("virtual medium \"%s\" should have valid source hdlist or synthesis, medium ignored",
+ $medium->{name}));
+ $medium->{ignore} = 1;
+ }
+}
+
+#- names.<media_name> is used by external progs (namely for bash-completion)
+sub generate_medium_names {
+ my ($urpm, $medium) = @_;
+
+ unlink statedir_names($urpm, $medium);
+
+ if (my $fh = urpm::sys::open_safe($urpm, ">", statedir_names($urpm, $medium))) {
+ foreach ($medium->{start} .. $medium->{end}) {
+ if (defined $urpm->{depslist}[$_]) {
+ print $fh $urpm->{depslist}[$_]->name . "\n";
+ } else {
+ $urpm->{error}(N("Error generating names file: dependency %d not found", $_));
+ }
+ }
+ } else {
+ $urpm->{error}(N("Error generating names file: Can't write to file (%s)", $!));
+ }
+}
+
+
+sub _read_existing_synthesis_and_hdlist_if_same_time_and_msize {
+ my ($urpm, $medium, $basename) = @_;
+
+ same_size_and_mtime("$urpm->{cachedir}/partial/$basename",
+ statedir_hdlist($urpm, $medium)) or return;
+
+ unlink "$urpm->{cachedir}/partial/$basename";
+
+ _read_existing_synthesis_and_hdlist($urpm, $medium);
+
+ 1;
+}
+
+sub _read_existing_synthesis_and_hdlist_if_same_md5sum {
+ my ($urpm, $medium, $retrieved_md5sum) = @_;
+
+ #- if an existing hdlist or synthesis file has the same md5sum, we assume the
+ #- files are the same.
+ #- if local md5sum is the same as distant md5sum, this means there is no need to
+ #- download hdlist or synthesis file again.
+ $retrieved_md5sum && $medium->{md5sum} eq $retrieved_md5sum or return;
+
+ unlink "$urpm->{cachedir}/partial/" . basename($medium->{with_hdlist});
+
+ _read_existing_synthesis_and_hdlist($urpm, $medium);
+
+ 1;
+}
+
+sub _read_existing_synthesis_and_hdlist {
+ my ($urpm, $medium) = @_;
+
+ $urpm->{log}(N("medium \"%s\" is up-to-date", $medium->{name}));
+
+ #- the medium is now considered not modified.
+ $medium->{modified} = 0;
+ #- XXX we could link the new hdlist to the old one.
+ #- (However links need to be managed. see bug #12391.)
+ #- as previously done, just read synthesis file here, this is enough.
+ if (!_parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium))) {
+ _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
+ _check_after_reading_hdlist_or_synthesis($urpm, $medium);
+ }
+
+ 1;
+}
+
+sub _parse_hdlist {
+ my ($urpm, $medium, $hdlist_file, $o_callback) = @_;
+
+ $urpm->{log}(N("examining hdlist file [%s]", $hdlist_file));
+ ($medium->{start}, $medium->{end}) =
+ $urpm->parse_hdlist($hdlist_file, packing => 1, $o_callback ? (callback => $o_callback) : @{[]});
+}
+
+sub _parse_synthesis {
+ my ($urpm, $medium, $synthesis_file, $o_callback) = @_;
+
+ $urpm->{log}(N("examining synthesis file [%s]", $synthesis_file));
+ ($medium->{start}, $medium->{end}) =
+ $urpm->parse_synthesis($synthesis_file, $o_callback ? (callback => $o_callback) : @{[]});
+}
+sub _parse_maybe_hdlist_or_synthesis {
+ my ($urpm, $medium, $hdlist_or) = @_;
+
+ if ($medium->{synthesis}) {
+ if (_parse_synthesis($urpm, $medium, $hdlist_or)) {
+ $medium->{synthesis} = 1;
+ } elsif (_parse_hdlist($urpm, $medium, $hdlist_or)) {
+ delete $medium->{synthesis};
+ } else {
+ return;
+ }
+ } else {
+ if (_parse_hdlist($urpm, $medium, $hdlist_or)) {
+ delete $medium->{synthesis};
+ } elsif (_parse_synthesis($urpm, $medium, $hdlist_or)) {
+ $medium->{synthesis} = 1;
+ } else {
+ return;
+ }
+ }
+ 1;
+}
+
+sub _build_hdlist_using_rpm_headers {
+ my ($urpm, $medium) = @_;
+
+ $urpm->{log}(N("building hdlist [%s]", statedir_hdlist($urpm, $medium)));
+ #- finish building operation of hdlist.
+ $urpm->build_hdlist(start => $medium->{start},
+ end => $medium->{end},
+ dir => "$urpm->{cachedir}/headers",
+ hdlist => statedir_hdlist($urpm, $medium),
+ );
+}
+
+sub _build_synthesis {
+ my ($urpm, $medium) = @_;
+
+ eval { $urpm->build_synthesis(
+ start => $medium->{start},
+ end => $medium->{end},
+ synthesis => statedir_synthesis($urpm, $medium),
+ ) };
+ if ($@) {
+ $urpm->{error}(N("Unable to build synthesis file for medium \"%s\". Your hdlist file may be corrupted.", $medium->{name}));
+ $urpm->{error}($@);
+ unlink statedir_synthesis($urpm, $medium);
+ } else {
+ $urpm->{log}(N("built hdlist synthesis file for medium \"%s\"", $medium->{name}));
+ }
+ #- keep in mind we have a modified database, sure at this point.
+ $urpm->{md5sum_modified} = 1;
+}
+
+sub is_valid_medium {
+ my ($medium) = @_;
+ defined $medium->{start} && defined $medium->{end};
+}
+
+sub _check_after_reading_hdlist_or_synthesis {
+ my ($urpm, $medium) = @_;
+
+ if (!is_valid_medium($medium)) {
+ $urpm->{error}(N("problem reading hdlist or synthesis file of medium \"%s\"", $medium->{name}));
+ $medium->{ignore} = 1;
+ }
+}
+
+sub _get_list_or_pubkey__local {
+ my ($urpm, $medium, $name) = @_;
+
+ my $path = _hdlist_dir($medium) . "/$name" . _hdlist_suffix($medium);
+ -e $path or $path = file_from_local_url($medium->{url}) . "/$name";
+ if (-e $path) {
+ copy_and_own($path, "$urpm->{cachedir}/partial/$name")
+ or $urpm->{error}(N("...copying failed")), return;
+ }
+ 1;
+}
+
+sub _get_list_or_pubkey__remote {
+ my ($urpm, $medium, $name) = @_;
+
+ my $found;
+ if (_hdlist_suffix($medium)) {
+ my $local_name = $name . _hdlist_suffix($medium);
+
+ if (urpm::download::sync($urpm, $medium, [_hdlist_dir($medium) . "/$local_name"],
+ quiet => 1)) {
+ rename("$urpm->{cachedir}/partial/$local_name", "$urpm->{cachedir}/partial/$name");
+ $found = 1;
+ }
+ }
+ if (!$found) {
+ urpm::download::sync($urpm, $medium, [reduce_pathname("$medium->{url}/$name")], quiet => 1)
+ or unlink "$urpm->{cachedir}/partial/$name";
+ }
+}
+
+sub get_descriptions_local {
+ my ($urpm, $medium) = @_;
+
+ unlink statedir_descriptions($urpm, $medium);
+
+ my $dir = file_from_local_url($medium->{url});
+ my $description_file = "$dir/media_info/descriptions"; #- new default location
+ -e $description_file or $description_file = "$dir/../descriptions";
+ -e $description_file or return;
+
+ $urpm->{log}(N("copying description file of \"%s\"...", $medium->{name}));
+ if (copy_and_own($description_file, statedir_descriptions($urpm, $medium))) {
+ $urpm->{log}(N("...copying done"));
+ } else {
+ $urpm->{error}(N("...copying failed"));
+ $medium->{ignore} = 1;
+ }
+}
+sub get_descriptions_remote {
+ my ($urpm, $medium) = @_;
+
+ unlink "$urpm->{cachedir}/partial/descriptions";
+
+ if (-e statedir_descriptions($urpm, $medium)) {
+ urpm::util::move(statedir_descriptions($urpm, $medium), "$urpm->{cachedir}/partial/descriptions");
+ }
+ urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/media_info/descriptions") ], quiet => 1)
+ or #- try older location
+ urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/../descriptions") ], quiet => 1);
+
+ if (-e "$urpm->{cachedir}/partial/descriptions") {
+ urpm::util::move("$urpm->{cachedir}/partial/descriptions", statedir_descriptions($urpm, $medium));
+ }
+}
+sub get_hdlist_or_synthesis__local {
+ my ($urpm, $medium, $callback) = @_;
+
+ unlink cachedir_hdlist($urpm, $medium);
+ $urpm->{log}(N("copying source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
+ $callback and $callback->('copy', $medium->{name});
+ if (copy_and_own(_url_with_hdlist($medium), cachedir_hdlist($urpm, $medium))) {
+ $callback and $callback->('done', $medium->{name});
+ $urpm->{log}(N("...copying done"));
+ if (file_size(cachedir_hdlist($urpm, $medium)) < 20) {
+ $urpm->{error}(N("copy of [%s] failed (file is suspiciously small)", cachedir_hdlist($urpm, $medium)));
+ 0;
+ } else {
+ 1;
+ }
+ } else {
+ $callback and $callback->('failed', $medium->{name});
+ #- force error, reported afterwards
+ unlink cachedir_hdlist($urpm, $medium);
+ 0;
+ }
+}
+
+sub get_hdlist_or_synthesis_and_check_md5sum__local {
+ my ($urpm, $medium, $retrieved_md5sum, $callback) = @_;
+
+ get_hdlist_or_synthesis__local($urpm, $medium, $callback) or return;
+
+ #- keep checking md5sum of file just copied ! (especially on nfs or removable device).
+ if ($retrieved_md5sum) {
+ $urpm->{log}(N("computing md5sum of copied source hdlist (or synthesis)"));
+ urpm::md5sum::compute(cachedir_hdlist($urpm, $medium)) eq $retrieved_md5sum or
+ $urpm->{error}(N("copy of [%s] failed (md5sum mismatch)", _url_with_hdlist($medium))), return;
+ }
+
+ 1;
+}
+
+sub _read_rpms_from_dir {
+ my ($urpm, $medium, $second_pass, $clean_cache) = @_;
+
+ my $dir = file_from_local_url($medium->{url});
+
+ $medium->{rpm_files} = [ glob("$dir/*.rpm") ];
+
+ #- check files contains something good!
+ if (!@{$medium->{rpm_files}}) {
+ $urpm->{error}(N("no rpm files found from [%s]", $dir));
+ $medium->{ignore} = 1;
+ return;
+ }
+
+ #- we need to rebuild from rpm files the hdlist.
+
+ $urpm->{log}(N("reading rpm files from [%s]", $dir));
+ my @unresolved_before = grep {
+ ! defined $urpm->{provides}{$_};
+ } keys %{$urpm->{provides} || {}};
+ $medium->{start} = @{$urpm->{depslist}};
+
+ eval {
+ $medium->{headers} = [ $urpm->parse_rpms_build_headers(
+ dir => "$urpm->{cachedir}/headers",
+ rpms => $medium->{rpm_files},
+ clean => $$clean_cache,
+ packing => 1,
+ ) ];
+ };
+ if ($@) {
+ $urpm->{error}(N("unable to read rpm files from [%s]: %s", $dir, $@));
+ delete $medium->{headers}; #- do not propagate these.
+ return;
+ }
+
+ $medium->{end} = $#{$urpm->{depslist}};
+ if ($medium->{start} > $medium->{end}) {
+ #- an error occured (provided there are files in input.)
+ delete $medium->{start};
+ delete $medium->{end};
+ $urpm->{fatal}(9, N("no rpms read"));
+ }
+
+ #- make sure the headers will not be removed for another media.
+ $$clean_cache = 0;
+ my @unresolved = grep {
+ ! defined $urpm->{provides}{$_};
+ } keys %{$urpm->{provides} || {}};
+ @unresolved_before == @unresolved or $$second_pass = 1;
+
+ delete $medium->{synthesis}; #- when building hdlist by ourself, drop synthesis property.
+ 1;
+}
+
+#- options: callback, force, force_building_hdlist, nomd5sum, nopubkey, probe_with
+sub _update_medium__parse_if_unmodified__local {
+ my ($urpm, $medium, $second_pass, $clean_cache, $options) = @_;
+
+ my $dir = file_from_local_url($medium->{url});
+
+ if (!-d $dir) {
+ #- the directory given does not exist and may be accessible
+ #- by mounting some other directory. Try to figure it out and mount
+ #- everything that might be necessary.
+ $urpm->try_mounting(
+ !$options->{force_building_hdlist} && $medium->{with_hdlist}
+ ? _hdlist_dir($medium) : $dir,
+ #- in case of an iso image, pass its name
+ urpm::is_iso($medium->{removable}) && $medium->{removable},
+ ) or $urpm->{error}(N("unable to access medium \"%s\",
+this could happen if you mounted manually the directory when creating the medium.", $medium->{name})), return 'unmodified';
+ }
+
+ #- try to probe for possible with_hdlist parameter, unless
+ #- it is already defined (and valid).
+ if ($options->{probe_with} && !$medium->{with_hdlist}) {
+ foreach (_probe_with_try_list($options->{probe_with})) {
+ -e "$dir/$_" or next;
+ if (file_size("$dir/$_") >= 20) {
+ $medium->{with_hdlist} = $_;
+ last;
+ } else {
+ $urpm->{error}(N("invalid hdlist file %s for medium \"%s\"", "$dir/$_", $medium->{name}));
+ return;
+ }
+ }
+ }
+
+ if ($medium->{virtual}) {
+ #- syncing a virtual medium is very simple, just try to read the file in order to
+ #- determine its type, once a with_hdlist has been found (but is mandatory).
+ _parse_hdlist_or_synthesis__virtual($urpm, $medium);
+ }
+
+ #- examine if a distant MD5SUM file is available.
+ #- this will only be done if $with_hdlist is not empty in order to use
+ #- an existing hdlist or synthesis file, and to check if download was good.
+ #- if no MD5SUM is available, do it as before...
+ #- we can assume at this point a basename is existing, but it needs
+ #- to be checked for being valid, nothing can be deduced if no MD5SUM
+ #- file is present.
+
+ unless ($medium->{virtual}) {
+ if ($medium->{with_hdlist}) {
+ my ($retrieved_md5sum);
+
+ if (!$options->{nomd5sum} && file_size(_hdlist_dir($medium) . '/MD5SUM') > 32) {
+ $retrieved_md5sum = urpm::md5sum::from_MD5SUM__or_warn($urpm, _hdlist_dir($medium) . '/MD5SUM', basename($medium->{with_hdlist}));
+ if (urpm::md5sum::on_local_medium($urpm, $medium, $options->{force})) {
+ _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $retrieved_md5sum)
+ and return 'unmodified';
+ }
+ }
+
+ #- if the source hdlist is present and we are not forcing using rpm files
+ if (!$options->{force_building_hdlist} && -e _url_with_hdlist($medium)) {
+ if (get_hdlist_or_synthesis_and_check_md5sum__local($urpm, $medium, $retrieved_md5sum, $options->{callback})) {
+
+ $medium->{md5sum} = $retrieved_md5sum if $retrieved_md5sum;
+
+ #- check if the files are equal... and no force copy...
+ if (!$options->{force}) {
+ _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium, $medium->{hdlist})
+ and return 'unmodified';
+ }
+ } else {
+ #- if copying hdlist has failed, try to build it directly.
+ if ($urpm->{options}{'build-hdlist-on-error'}) {
+ $options->{force_building_hdlist} = 1;
+ } else {
+ $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
+ $medium->{ignore} = 1;
+ return;
+ }
+ }
+ }
+ } else {
+ #- no available hdlist/synthesis, try to build it from rpms
+ $options->{force_building_hdlist} = 1;
+ }
+
+ if ($options->{force_building_hdlist}) {
+ _read_rpms_from_dir($urpm, $medium, $second_pass, $clean_cache) or return;
+ }
+ }
+
+ 1;
+}
+
+#- options: callback, force, nomd5sum, nopubkey, probe_with, quiet
+sub _update_medium__parse_if_unmodified__remote {
+ my ($urpm, $medium, $options) = @_;
+ my ($retrieved_md5sum, $basename);
+
+ #- examine if a distant MD5SUM file is available.
+ #- this will only be done if $with_hdlist is not empty in order to use
+ #- an existing hdlist or synthesis file, and to check if download was good.
+ #- if no MD5SUM is available, do it as before...
+ if ($medium->{with_hdlist}) {
+ #- we can assume at this point a basename is existing, but it needs
+ #- to be checked for being valid, nothing can be deduced if no MD5SUM
+ #- file is present.
+ $basename = basename($medium->{with_hdlist});
+
+ unlink "$urpm->{cachedir}/partial/MD5SUM";
+ if (!$options->{nomd5sum} &&
+ urpm::download::sync($urpm, $medium,
+ [ reduce_pathname(_hdlist_dir($medium) . '/MD5SUM') ],
+ quiet => 1) && file_size("$urpm->{cachedir}/partial/MD5SUM") > 32) {
+ if (urpm::md5sum::on_local_medium($urpm, $medium, $options->{force} >= 2)) {
+ $retrieved_md5sum = urpm::md5sum::from_MD5SUM__or_warn($urpm, "$urpm->{cachedir}/partial/MD5SUM", $basename);
+ _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $retrieved_md5sum)
+ and return 'unmodified';
+ }
+ } else {
+ #- at this point, we don't if a basename exists and is valid, let probe it later.
+ $basename = undef;
+ }
+ }
+
+ #- try to probe for possible with_hdlist parameter, unless
+ #- it is already defined (and valid).
+ $urpm->{log}(N("retrieving source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
+ $options->{callback} and $options->{callback}('retrieve', $medium->{name});
+ if ($options->{probe_with} && !$medium->{with_hdlist}) {
+ foreach my $with_hdlist (_probe_with_try_list($options->{probe_with})) {
+ $basename = basename($with_hdlist) or next;
+ $options->{force} and unlink "$urpm->{cachedir}/partial/$basename";
+ if (urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/$with_hdlist") ],
+ quiet => $options->{quiet}, callback => $options->{callback}) && file_size("$urpm->{cachedir}/partial/$basename") >= 20) {
+ $urpm->{log}(N("...retrieving done"));
+ $medium->{with_hdlist} = $with_hdlist;
+ $urpm->{log}(N("found probed hdlist (or synthesis) as %s", $medium->{with_hdlist}));
+ last; #- found a suitable with_hdlist in the list above.
+ }
+ }
+ } else {
+ $basename = basename($medium->{with_hdlist});
+
+ if ($options->{force}) {
+ unlink "$urpm->{cachedir}/partial/$basename";
+ } else {
+ #- try to sync (copy if needed) local copy after restored the previous one.
+ #- this is useful for rsync (?)
+ if (-e statedir_hdlist_or_synthesis($urpm, $medium)) {
+ copy_and_own(
+ statedir_hdlist_or_synthesis($urpm, $medium),
+ "$urpm->{cachedir}/partial/$basename",
+ ) or $urpm->{error}(N("...copying failed")), return;
+ }
+ }
+ if (urpm::download::sync($urpm, $medium, [ _url_with_hdlist($medium) ],
+ quiet => $options->{quiet}, callback => $options->{callback})) {
+ $urpm->{log}(N("...retrieving done"));
+ } else {
+ $urpm->{error}(N("...retrieving failed: %s", $@));
+ unlink "$urpm->{cachedir}/partial/$basename";
+ }
+ }
+
+ #- check downloaded file has right signature.
+ if (file_size("$urpm->{cachedir}/partial/$basename") >= 20 && $retrieved_md5sum) {
+ $urpm->{log}(N("computing md5sum of retrieved source hdlist (or synthesis)"));
+ unless (urpm::md5sum::compute("$urpm->{cachedir}/partial/$basename") eq $retrieved_md5sum) {
+ $urpm->{error}(N("...retrieving failed: md5sum mismatch"));
+ unlink "$urpm->{cachedir}/partial/$basename";
+ }
+ }
+
+ if (file_size("$urpm->{cachedir}/partial/$basename") >= 20) {
+ $options->{callback} and $options->{callback}('done', $medium->{name});
+
+ unless ($options->{force}) {
+ _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium, $basename)
+ and return 'unmodified';
+ }
+
+ #- the files are different, update local copy.
+ rename("$urpm->{cachedir}/partial/$basename", cachedir_hdlist($urpm, $medium));
+ } else {
+ $options->{callback} and $options->{callback}('failed', $medium->{name});
+ $urpm->{error}(N("retrieval of source hdlist (or synthesis) failed"));
+ return;
+ }
+ $urpm->{md5sum} = $retrieved_md5sum if $retrieved_md5sum;
+ 1;
+}
+
+sub _get_pubkey_and_descriptions {
+ my ($urpm, $medium, $nopubkey) = @_;
+
+ my $local = file_from_local_url($medium->{url});
+
+ ($local ? \&get_descriptions_local : \&get_descriptions_remote)->($urpm, $medium);
+
+ #- examine if a pubkey file is available.
+ if (!$nopubkey && !$medium->{'key-ids'}) {
+ ($local ? \&_get_list_or_pubkey__local : \&_get_list_or_pubkey__remote)->($urpm, $medium, 'pubkey');
+ }
+}
+
+sub _read_cachedir_pubkey {
+ my ($urpm, $medium) = @_;
+ -s "$urpm->{cachedir}/partial/pubkey" or return;
+
+ $urpm->{log}(N("examining pubkey file of \"%s\"...", $medium->{name}));
+
+ my %key_ids;
+ $urpm->import_needed_pubkeys(
+ [ $urpm->parse_armored_file("$urpm->{cachedir}/partial/pubkey") ],
+ root => $urpm->{root},
+ callback => sub {
+ my (undef, undef, $_k, $id, $imported) = @_;
+ if ($id) {
+ $key_ids{$id} = undef;
+ $imported and $urpm->{log}(N("...imported key %s from pubkey file of \"%s\"",
+ $id, $medium->{name}));
+ } else {
+ $urpm->{error}(N("unable to import pubkey file of \"%s\"", $medium->{name}));
+ }
+ });
+ if (keys(%key_ids)) {
+ $medium->{'key-ids'} = join(',', keys %key_ids);
+ }
+}
+
+sub _write_rpm_list {
+ my ($urpm, $medium) = @_;
+
+ @{$medium->{rpm_files} || []} or return;
+
+ $medium->{list} ||= "list.$medium->{name}";
+
+ #- write list file.
+ $urpm->{log}(N("writing list file for medium \"%s\"", $medium->{name}));
+ my $listfh = urpm::sys::open_safe($urpm, '>', cachedir_list($urpm, $medium)) or return;
+ print $listfh basename($_), "\n" foreach @{$medium->{rpm_files}};
+ 1;
+}
+
+#- options: callback, force, force_building_hdlist, nomd5sum, probe_with, quiet
+#- (from _update_medium__parse_if_unmodified__local and _update_medium__parse_if_unmodified__remote)
+sub _update_medium_first_pass {
+ my ($urpm, $medium, $second_pass, $clean_cache, %options) = @_;
+
+ #- we should create the associated synthesis file if it does not already exist...
+ file_size(statedir_synthesis($urpm, $medium)) >= 20
+ or $medium->{must_build_synthesis} = 1;
+
+ unless ($medium->{modified}) {
+ #- the medium is not modified, but to compute dependencies,
+ #- we still need to read it and all synthesis will be written if
+ #- an unresolved provides is found.
+ #- to speed up the process, we only read the synthesis at the beginning.
+ _parse_hdlist_or_synthesis__when_not_modified($urpm, $medium);
+ return 1;
+ }
+
+ #- always delete a remaining list file or pubkey file in cache.
+ foreach (qw(list pubkey)) {
+ unlink "$urpm->{cachedir}/partial/$_";
+ }
+
+ #- check for a reconfig.urpmi file (if not already reconfigured)
+ if (!$medium->{noreconfigure}) {
+ may_reconfig_urpmi($urpm, $medium);
+ }
+
+ {
+ my $rc =
+ file_from_local_url($medium->{url})
+ ? _update_medium__parse_if_unmodified__local($urpm, $medium, $second_pass, $clean_cache, \%options)
+ : _update_medium__parse_if_unmodified__remote($urpm, $medium, \%options);
+
+ if (!$rc || $rc eq 'unmodified') {
+ return $rc;
+ }
+ }
+
+ #- build list file according to hdlist.
+ if (!$medium->{headers} && !$medium->{virtual} && file_size(cachedir_hdlist($urpm, $medium)) < 20) {
+ $urpm->{error}(N("no hdlist file found for medium \"%s\"", $medium->{name}));
+ return;
+ }
+
+ if (!$medium->{virtual}) {
+ if ($medium->{headers}) {
+ _write_rpm_list($urpm, $medium) or return;
+ } else {
+ #- read first pass hdlist or synthesis, try to open as synthesis, if file
+ #- is larger than 1MB, this is probably an hdlist else a synthesis.
+ #- anyway, if one tries fails, try another mode.
+ $options{callback} and $options{callback}('parse', $medium->{name});
+ my @unresolved_before = grep { ! defined $urpm->{provides}{$_} } keys %{$urpm->{provides} || {}};
+
+ #- if it looks like a hdlist, try to parse as hdlist first
+ delete $medium->{synthesis} if file_size(cachedir_hdlist($urpm, $medium)) > 262144;
+ _parse_maybe_hdlist_or_synthesis($urpm, $medium, cachedir_hdlist($urpm, $medium));
+
+ if (is_valid_medium($medium)) {
+ $options{callback} && $options{callback}('done', $medium->{name});
+ } else {
+ $urpm->{error}(N("unable to parse hdlist file of \"%s\"", $medium->{name}));
+ $options{callback} and $options{callback}('failed', $medium->{name});
+ delete $medium->{md5sum};
+
+ #- we have to read back the current synthesis file unmodified.
+ if (!_parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium))) {
+ $urpm->{error}(N("problem reading synthesis file of medium \"%s\"", $medium->{name}));
+ $medium->{ignore} = 1;
+ }
+ return;
+ }
+ delete $medium->{list};
+
+ {
+ my @unresolved_after = grep { ! defined $urpm->{provides}{$_} } keys %{$urpm->{provides} || {}};
+ @unresolved_before == @unresolved_after or $$second_pass = 1;
+ }
+ }
+ }
+
+ unless ($medium->{virtual}) {
+ #- make sure to rebuild base files and clear medium modified state.
+ $medium->{modified} = 0;
+ $medium->{really_modified} = 1;
+ $urpm->{md5sum_modified} = 1;
+
+ #- but use newly created file.
+ unlink statedir_hdlist($urpm, $medium);
+ $medium->{synthesis} and unlink statedir_synthesis($urpm, $medium);
+ $medium->{list} and unlink statedir_list($urpm, $medium);
+ unless ($medium->{headers}) {
+ unlink statedir_synthesis($urpm, $medium);
+ unlink statedir_hdlist($urpm, $medium);
+ urpm::util::move(cachedir_hdlist($urpm, $medium),
+ statedir_hdlist_or_synthesis($urpm, $medium));
+ }
+ if ($medium->{list}) {
+ urpm::util::move(cachedir_list($urpm, $medium), statedir_list($urpm, $medium));
+ }
+
+ #- and create synthesis file associated.
+ $medium->{must_build_synthesis} = !$medium->{synthesis};
+ }
+ 1;
+}
+
+sub _update_medium_first_pass_failed {
+ my ($urpm, $medium) = @_;
+
+ !$medium->{virtual} or return;
+
+ #- an error has occured for updating the medium, we have to remove temporary files.
+ unlink(glob("$urpm->{cachedir}/partial/*"));
+}
+
+#- take care of modified medium only, or all if all have to be recomputed.
+sub _update_medium_second_pass {
+ my ($urpm, $medium, $callback) = @_;
+
+ $callback and $callback->('parse', $medium->{name});
+
+ #- a modified medium is an invalid medium, we have to read back the previous hdlist
+ #- or synthesis which has not been modified by first pass above.
+
+ if ($medium->{headers} && !$medium->{modified}) {
+ $urpm->{log}(N("reading headers from medium \"%s\"", $medium->{name}));
+ ($medium->{start}, $medium->{end}) = $urpm->parse_headers(dir => "$urpm->{cachedir}/headers",
+ headers => $medium->{headers},
+ );
+ } elsif ($medium->{synthesis}) {
+ if ($medium->{virtual}) {
+ if (file_from_file_url($medium->{url})) {
+ _parse_synthesis($urpm, $medium, hdlist_or_synthesis_for_virtual_medium($medium));
+ }
+ } else {
+ _parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium));
+ }
+ } else {
+ _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
+ $medium->{must_build_synthesis} ||= 1;
+ }
+
+ $callback && $callback->('done', $medium->{name});
+}
+
+sub _build_hdlist_synthesis {
+ my ($urpm, $medium) = @_;
+
+ if ($medium->{headers} && !$medium->{modified}) {
+ _build_hdlist_using_rpm_headers($urpm, $medium);
+ #- synthesis needs to be created, since the medium has been built from rpm files.
+ _build_synthesis($urpm, $medium);
+ } elsif ($medium->{synthesis}) {
+ } else {
+ #- check if the synthesis file can be built.
+ if ($medium->{must_build_synthesis} && !$medium->{modified} && !$medium->{virtual}) {
+ _build_synthesis($urpm, $medium);
+ }
+ }
+}
+
+sub _update_media__handle_some_flags {
+ my ($urpm, $forcekey, $all) = @_;
+
+ foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) {
+ $forcekey and delete $medium->{'key-ids'};
+
+ if ($medium->{static}) {
+ #- don't ever update static media
+ $medium->{modified} = 0;
+ } elsif ($all) {
+ #- if we're rebuilding all media, mark them as modified (except removable ones)
+ $medium->{modified} ||= $medium->{url} !~ m!^removable!;
+ }
+ }
+}
+
+#- Update the urpmi database w.r.t. the current configuration.
+#- Takes care of modifications, and tries some tricks to bypass
+#- the recomputation of base files.
+#- Recognized options :
+#- all : all medias are being rebuilt
+#- callback : UI callback
+#- forcekey : force retrieval of pubkey
+#- force : try to force rebuilding base files
+#- force_building_hdlist
+#- noclean : keep old files in the header cache directory
+#- nolock : don't lock the urpmi database
+#- nomd5sum : don't verify MD5SUM of retrieved files
+#- nopubkey : don't use rpm pubkeys
+#- probe_with : probe synthesis or hdlist (or none)
+#- quiet : download hdlists quietly
+sub update_media {
+ my ($urpm, %options) = @_;
+
+ $urpm->{media} or return; # verify that configuration has been read
+
+ $options{nopubkey} ||= $urpm->{options}{nopubkey};
+ #- get gpg-pubkey signature.
+ if (!$options{nopubkey}) {
+ urpm::sys::lock_rpm_db($urpm, 'exclusive');
+ $urpm->{keys} or $urpm->parse_pubkeys(root => $urpm->{root});
+ }
+ #- lock database if allowed.
+ urpm::sys::lock_urpmi_db($urpm, 'exclusive') if !$options{nolock};
+
+ #- examine each medium to see if one of them needs to be updated.
+ #- if this is the case and if not forced, try to use a pre-calculated
+ #- hdlist file, else build it from rpm files.
+ clean($urpm);
+
+ _update_media__handle_some_flags($urpm, $options{forcekey}, $options{all});
+
+ my $clean_cache = !$options{noclean};
+ my $second_pass;
+ foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) {
+ _update_medium_first_pass($urpm, $medium, \$second_pass, \$clean_cache, %options)
+ or _update_medium_first_pass_failed($urpm, $medium);
+ }
+
+ #- some unresolved provides may force to rebuild all synthesis,
+ #- a second pass will be necessary.
+ if ($second_pass) {
+ $urpm->{log}(N("performing second pass to compute dependencies\n"));
+ $urpm->unresolved_provides_clean;
+ }
+
+ foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) {
+ if ($second_pass) {
+ #- second pass consists in reading again synthesis or hdlists.
+ _update_medium_second_pass($urpm, $medium, $options{callback});
+ }
+ _build_hdlist_synthesis($urpm, $medium);
+
+ if ($medium->{really_modified}) {
+ _get_pubkey_and_descriptions($urpm, $medium, $options{nopubkey});
+ _read_cachedir_pubkey($urpm, $medium);
+ generate_medium_names($urpm, $medium);
+ }
+ }
+
+ if ($urpm->{modified}) {
+ if ($options{noclean}) {
+ #- clean headers cache directory to remove everything that is no longer
+ #- useful according to the depslist.
+ urpm::remove_obsolete_headers_in_cache($urpm);
+ }
+ #- write config files in any case
+ write_config($urpm);
+ urpm::download::dump_proxy_config();
+ } elsif ($urpm->{md5sum_modified}) {
+ #- NB: in case of $urpm->{modified}, write_MD5SUM is called in write_config above
+ write_MD5SUM($urpm);
+ }
+
+ $options{nolock} or urpm::sys::unlock_urpmi_db($urpm);
+ $options{nopubkey} or urpm::sys::unlock_rpm_db($urpm);
+}
+
+#- clean params and depslist computation zone.
+sub clean {
+ my ($urpm) = @_;
+
+ $urpm->{depslist} = [];
+ $urpm->{provides} = {};
+
+ foreach (@{$urpm->{media} || []}) {
+ delete $_->{start};
+ delete $_->{end};
+ }
+}
+
+
+#- get the list of packages that should not be upgraded or installed,
+#- typically from the inst.list or skip.list files.
+sub get_packages_list {
+ my ($file, $o_extra) = @_;
+ my $val = [];
+ open(my $f, '<', $file) or return [];
+ foreach (<$f>, split /,/, $o_extra || '') {
+ chomp; s/#.*$//; s/^\s*//; s/\s*$//;
+ next if $_ eq '';
+ push @$val, $_;
+ }
+ $val;
+}
+
+1;
diff --git a/urpm/parallel.pm b/urpm/parallel.pm
index 2e0fc70a..fc8f6020 100644
--- a/urpm/parallel.pm
+++ b/urpm/parallel.pm
@@ -46,8 +46,8 @@ sub resolve_dependencies {
my $file = "$urpm->{cachedir}/partial/parallel.cz";
unlink $file;
foreach (@{$urpm->{media}}) {
- urpm::is_valid_medium($_) or next;
- my $f = urpm::statedir_synthesis($urpm, $_);
+ urpm::media::is_valid_medium($_) or next;
+ my $f = urpm::media::statedir_synthesis($urpm, $_);
system "cat '$f' >> '$file'";
}
#- let each node determine what is requested, according to handler given.
diff --git a/urpme b/urpme
index 6cd94f03..9658f2a4 100644
--- a/urpme
+++ b/urpme
@@ -24,6 +24,7 @@ use urpm;
use urpm::args;
use urpm::msg;
use urpm::install;
+use urpm::media;
$ENV{PATH} = "/sbin:/usr/sbin:/bin:/usr/bin:/usr/X11R6/bin";
delete @ENV{qw(ENV BASH_ENV IFS CDPATH)};
@@ -79,7 +80,7 @@ unless ($test) {
}
#- just configure parallel mode if available.
-$urpm->configure(
+urpm::media::configure($urpm,
synthesis => ($parallel ? 'none' : ''),
root => $root,
parallel => $parallel,
diff --git a/urpmf b/urpmf
index 8b42bc4c..2ca63804 100755
--- a/urpmf
+++ b/urpmf
@@ -23,6 +23,7 @@ use strict;
use urpm;
use urpm::args;
use urpm::msg;
+use urpm::media;
sub usage() {
print N("urpmf version %s
@@ -207,7 +208,7 @@ my $need_hdlist = grep { $usedtags{$_} } qw(
url
vendor
);
-$urpm->configure(
+urpm::media::configure($urpm,
nocheck_access => 1,
no_skiplist => 1,
media => $media,
diff --git a/urpmi b/urpmi
index f966401f..01da1045 100755
--- a/urpmi
+++ b/urpmi
@@ -24,6 +24,7 @@ use urpm;
use urpm::args;
use urpm::msg;
use urpm::install;
+use urpm::media;
use urpm::util qw(untaint difference2 member);
#- contains informations to parse installed system.
@@ -359,8 +360,8 @@ if ($auto_update && !$bug && !$env) {
#- FIXME we need to configure it twice; otherwise
#- some settings are lost (like the skiplist) for
#- some reason.
- $urpm->configure(%config_hash);
- $urpm->update_media(
+ urpm::media::configure($urpm, %config_hash);
+ urpm::media::update_media($urpm,
all => 1,
callback => \&urpm::download::sync_logger,
noclean => $noclean,
@@ -373,7 +374,7 @@ if ($auto_update && !$bug && !$env) {
}
}
-$urpm->configure(%config_hash);
+urpm::media::configure($urpm, %config_hash);
if ($bug) {
require urpm::bug_report;
diff --git a/urpmi.addmedia b/urpmi.addmedia
index 3db52574..04b6fe71 100755
--- a/urpmi.addmedia
+++ b/urpmi.addmedia
@@ -25,6 +25,7 @@ use urpm::args;
use urpm::msg;
use urpm::download ();
use urpm::cfg;
+use urpm::media;
sub usage {
my $m = shift;
@@ -83,8 +84,8 @@ sub remove_failed {
if (@media) {
print STDERR join("\n", map { N("unable to update medium \"%s\"\n", $_->{name}) } @media);
local $urpm->{log} = sub {};
- $urpm->remove_selected_media;
- $urpm->update_media(%options, callback => $sync_logger);
+ urpm::media::remove_selected_media($urpm);
+ urpm::media::update_media($urpm, %options, callback => $sync_logger);
exit(1);
}
}
@@ -122,7 +123,7 @@ if (!-e $urpm->{config}) {
$urpm->{error}(N("Will create config file [%s]", $urpm->{config}));
open my $_f, '>', $urpm->{config} or $urpm->{fatal}(6, N("Can't create config file [%s]", $urpm->{config}));
}
-$urpm->read_config;
+urpm::media::read_config($urpm);
if ($options{distrib}) {
$with || $relative_hdlist
@@ -143,7 +144,7 @@ if ($options{distrib}) {
1;
} : undef;
- $urpm->add_distrib_media(
+ urpm::media::add_distrib_media($urpm,
$name,
$url,
virtual => $options{virtual},
@@ -156,7 +157,7 @@ if ($options{distrib}) {
remove_failed($urpm, grep { $_->{modified} } @{$urpm->{media}});
exit(1);
};
- $urpm->update_media(%options, callback => $sync_logger);
+ urpm::media::update_media($urpm, %options, callback => $sync_logger);
remove_failed($urpm, grep { $_->{modified} } @{$urpm->{media}});
} else {
@@ -168,7 +169,7 @@ if ($options{distrib}) {
$options{probe_with} or usage N("`with' missing for network media\n");
}
- $urpm->add_medium(
+ urpm::media::add_medium($urpm,
$name, $url, $relative_hdlist,
virtual => $options{virtual},
update => $options{update},
@@ -177,16 +178,16 @@ if ($options{distrib}) {
);
urpm::download::copy_cmd_line_proxy($name);
if ($options{raw}) {
- $urpm->write_config;
+ urpm::media::write_config($urpm);
} else {
local $SIG{INT} = sub {
- my $medium = urpm::name2medium($urpm, $name);
+ my $medium = urpm::media::name2medium($urpm, $name);
remove_failed($urpm, $medium) if $medium && $medium->{modified};
exit(1);
};
- $urpm->update_media(%options, callback => $sync_logger);
+ urpm::media::update_media($urpm, %options, callback => $sync_logger);
#- check creation of media
- my $medium = urpm::name2medium($urpm, $name) or die N("unable to create medium \"%s\"\n", $name);
+ my $medium = urpm::media::name2medium($urpm, $name) or die N("unable to create medium \"%s\"\n", $name);
remove_failed($urpm, $medium) if $medium->{modified};
}
}
diff --git a/urpmi.removemedia b/urpmi.removemedia
index dfc7ea3f..25e43ab6 100755
--- a/urpmi.removemedia
+++ b/urpmi.removemedia
@@ -25,6 +25,7 @@ use strict;
use urpm;
use urpm::msg;
use urpm::download;
+use urpm::media;
$ENV{PATH} = "/sbin:/usr/sbin:/bin:/usr/bin:/usr/X11R6/bin";
delete @ENV{qw(ENV BASH_ENV IFS CDPATH)};
@@ -72,7 +73,7 @@ if ($< != 0) {
$options{verbose} > 0 or $urpm->{log} = sub {};
-$urpm->read_config;
+urpm::media::read_config($urpm);
urpm::download::set_cmdline_proxy();
my @entries = map { $_->{name} } @{$urpm->{media}};
@@ -82,15 +83,15 @@ if ($options{all}) {
@toremove or die N("the entry to remove is missing\n(one of %s)\n", join(", ", @entries));
}
-my @selected = $urpm->select_media_by_name(\@toremove, $options{strict_match})
+my @selected = urpm::media::select_media_by_name($urpm, \@toremove, $options{strict_match})
or exit 1;
-$urpm->remove_media(\@selected);
+urpm::media::remove_media($urpm, \@selected);
if ($options{noclean}) {
#- FIXME: AFAIK it is broken because function below use {depslist} which we don't clean here
urpm::remove_obsolete_headers_in_cache($urpm);
}
-$urpm->write_urpmi_cfg;
+urpm::media::write_urpmi_cfg($urpm);
exit(0);
diff --git a/urpmi.update b/urpmi.update
index 6abca159..0ed9d6c4 100755
--- a/urpmi.update
+++ b/urpmi.update
@@ -24,6 +24,7 @@ use urpm;
use urpm::args;
use urpm::msg;
use urpm::download ();
+use urpm::media;
sub usage() {
print N("usage: urpmi.update [options] <name> ...
@@ -40,7 +41,6 @@ where <name> is a medium name to update.
") . N(" --update - update only update media.
") . N(" --no-md5sum - disable MD5SUM file checking.
") . N(" --force-key - force update of gpg key.
-") . N(" --norebuild - don't try to rebuild hdlist if not readable.
") . N(" --ignore - don't update, mark the media as ignored.
") . N(" --no-ignore - don't update, mark the media as enabled.
") . N(" -a - select all non-removable media.
@@ -68,8 +68,7 @@ $options{verbose} > 0 or $urpm->{log} = sub {};
if ($< != 0) {
$urpm->{fatal}(1, N("Only superuser is allowed to update media"));
}
-$urpm->read_config;
-exists $options{limit_rate} or $options{limit_rate} = $urpm->{options}{'limit-rate'};
+urpm::media::read_config($urpm);
my @entries = map { $_->{name} } @{$urpm->{media}};
@@ -77,7 +76,7 @@ if ($options{all} && !defined $options{ignore}) {
@entries == 0 and die N("nothing to update (use urpmi.addmedia to add a media)\n");
} else {
if ($options{all}) { @toupdates = '' } #- select all
- $urpm->select_media(@toupdates);
+ urpm::media::select_media($urpm, @toupdates);
my $something_todo = 0;
foreach (@{$urpm->{media}}) {
$options{update} && $_->{update} and $_->{modified} = 1;
@@ -98,9 +97,9 @@ if ($options{all} && !defined $options{ignore}) {
if (defined $options{ignore}) {
my $str = join(", ", map { N("\"%s\"", $_->{name}) } grep { $_->{modified} } @{$urpm->{media}});
$urpm->{log}($options{ignore} ? N("ignoring media %s", $str) : N("enabling media %s", $str));
- $urpm->write_config;
+ urpm::media::write_config($urpm);
} else {
- $urpm->update_media(%options, callback => \&urpm::download::sync_logger);
+ urpm::media::update_media($urpm, %options, callback => \&urpm::download::sync_logger);
#- try to umount removable device which may have been mounted.
$urpm->try_umounting_removables;
}
diff --git a/urpmq b/urpmq
index 1fc76dc9..d63ab0e1 100755
--- a/urpmq
+++ b/urpmq
@@ -27,6 +27,7 @@ use urpm;
use urpm::args;
use urpm::msg;
use urpm::sys;
+use urpm::media;
#- default options.
$urpm::args::options = { use_provides => 1 };
@@ -138,7 +139,7 @@ if ($urpm::args::options{ignorearch}) { urpm::shunt_ignorearch() }
$urpm::args::options{upgrade} && !$urpm::args::options{env} && !$urpm::args::options{nolock}
and urpm::sys::lock_rpm_db($urpm);
urpm::sys::lock_urpmi_db($urpm) if !$urpm::args::options{nolock};
-$urpm->configure(
+urpm::media::configure($urpm,
nocheck_access => 1,
nodepslist => $urpm::args::options{nodepslist},
media => $urpm::args::options{media},
@@ -331,8 +332,8 @@ if ($urpm::args::options{list_aliases}) {
{
my %h = map { $_ => 1 } @headers; @headers = keys %h;
my $hdlist_path = $medium->{virtual}
- ? urpm::hdlist_or_synthesis_for_virtual_medium($medium)
- : urpm::statedir_hdlist($urpm, $medium);
+ ? urpm::media::hdlist_or_synthesis_for_virtual_medium($medium)
+ : urpm::media::statedir_hdlist($urpm, $medium);
if (-s $hdlist_path) {
require MDV::Packdrakeng;
my $packer = MDV::Packdrakeng->open(archive => $hdlist_path, quiet => 1);