summaryrefslogtreecommitdiffstats
path: root/urpm.pm
diff options
context:
space:
mode:
authorPascal Rigaux <pixel@mandriva.com>2007-04-24 19:34:32 +0000
committerPascal Rigaux <pixel@mandriva.com>2007-04-24 19:34:32 +0000
commit4df7583caa71a35d6d68c373fc0c62d1be6d955b (patch)
treebddf36169b314d82ff20036dea7e47fa4fec5bcd /urpm.pm
parent7a606ec3537b2f465f8e06e99b445c756bfcca8f (diff)
downloadurpmi-4df7583caa71a35d6d68c373fc0c62d1be6d955b.tar
urpmi-4df7583caa71a35d6d68c373fc0c62d1be6d955b.tar.gz
urpmi-4df7583caa71a35d6d68c373fc0c62d1be6d955b.tar.bz2
urpmi-4df7583caa71a35d6d68c373fc0c62d1be6d955b.tar.xz
urpmi-4df7583caa71a35d6d68c373fc0c62d1be6d955b.zip
re-sync after the big svn loss
Diffstat (limited to 'urpm.pm')
-rw-r--r--urpm.pm3322
1 files changed, 48 insertions, 3274 deletions
diff --git a/urpm.pm b/urpm.pm
index 05b1091f..b1d2ed3e 100644
--- a/urpm.pm
+++ b/urpm.pm
@@ -10,17 +10,15 @@ use urpm::download;
use urpm::util;
use urpm::sys;
use urpm::cfg;
-use MDV::Distribconf;
+use urpm::md5sum;
-our $VERSION = '4.8.29';
-our @ISA = qw(URPM);
+our $VERSION = '4.9.21';
+our @ISA = qw(URPM Exporter);
+our @EXPORT_OK = 'file_from_local_url';
use URPM;
use URPM::Resolve;
-my $RPMLOCK_FILE;
-my $LOCK_FILE;
-
#- this violently overrides is_arch_compat() to always return true.
sub shunt_ignorearch {
eval q( sub URPM::Package::is_arch_compat { 1 } );
@@ -35,2107 +33,87 @@ sub new {
depslist => [],
provides => {},
- config => "/etc/urpmi/urpmi.cfg",
- skiplist => "/etc/urpmi/skip.list",
- instlist => "/etc/urpmi/inst.list",
- statedir => "/var/lib/urpmi",
- cachedir => "/var/cache/urpmi",
media => undef,
options => {},
fatal => sub { printf STDERR "%s\n", $_[1]; exit($_[0]) },
error => sub { printf STDERR "%s\n", $_[0] },
+ info => sub { printf "%s\n", $_[0] },
log => sub { printf "%s\n", $_[0] },
ui_msg => sub {
$self->{log}($_[0]);
ref $self->{ui} && ref $self->{ui}{msg} and $self->{ui}{msg}->($_[1]);
},
}, $class;
+
+ set_files($self, '');
$self->set_nofatal(1);
$self;
}
-sub requested_ftp_http_downloader {
- my ($urpm, $media_name) = @_;
-
- $urpm->{options}{downloader} || #- cmd-line switch
- $media_name && do {
- #- per-media config
- my $m = name2medium($urpm, $media_name);
- $m && $m->{downloader};
- } || $urpm->{global_config}{downloader};
-}
-
-#- $medium can be undef
-sub sync_webfetch {
- my ($urpm, $medium, $files, $std_options, %more_options) = @_;
-
- my %options = (
- dir => "$urpm->{cachedir}/partial",
- limit_rate => $std_options->{limit_rate},
- compress => $std_options->{compress},
- proxy => get_proxy($medium),
- quiet => $std_options->{quiet}, #- often overridden in the caller, why??
- $medium ? (media => $medium->{name}) : (),
- %more_options,
+sub set_files {
+ my ($urpm, $urpmi_root) = @_;
+ my %h = (
+ config => "$urpmi_root/etc/urpmi/urpmi.cfg",
+ skiplist => "$urpmi_root/etc/urpmi/skip.list",
+ instlist => "$urpmi_root/etc/urpmi/inst.list",
+ private_netrc => "$urpmi_root/etc/urpmi/netrc",
+ statedir => "$urpmi_root/var/lib/urpmi",
+ cachedir => "$urpmi_root/var/cache/urpmi",
+ root => $urpmi_root,
+ $urpmi_root ? (urpmi_root => $urpmi_root) : (),
);
- foreach my $cpt (qw(retry wget-options curl-options rsync-options prozilla-options)) {
- $options{$cpt} = $urpm->{options}{$cpt} if defined $urpm->{options}{$cpt};
- }
-
- _sync_webfetch_raw($urpm, $files, \%options);
-}
-
-#- syncing algorithms.
-sub _sync_webfetch_raw {
- my ($urpm, $files, $options) = @_;
-
- my %files;
- #- currently ftp and http protocols are managed by curl or wget,
- #- ssh and rsync protocols are managed by rsync *AND* ssh.
- foreach (@$files) {
- my $proto = protocol_from_url($_) or die N("unknown protocol defined for %s", $_);
- push @{$files{$proto}}, $_;
- }
- if ($files{removable} || $files{file}) {
- my @l = map { file_from_local_url($_) } @{$files{removable} || []}, @{$files{file} || []};
- eval { sync_file($options, @l) };
- $urpm->{fatal}(10, $@) if $@;
- delete @files{qw(removable file)};
- }
- if ($files{ftp} || $files{http} || $files{https}) {
- my @available = urpm::download::available_ftp_http_downloaders();
-
- #- use user default downloader if provided and available
- my $requested_downloader = requested_ftp_http_downloader($urpm, $options->{media});
- my ($preferred) = grep { $_ eq $requested_downloader } @available;
- if (!$preferred) {
- #- else first downloader of @available is the default one
- $preferred = $available[0];
- if ($requested_downloader && !our $webfetch_not_available) {
- $urpm->{log}(N("%s is not available, falling back on %s", $requested_downloader, $preferred));
- $webfetch_not_available = 1;
- }
- }
- my $sync = $urpm::download::{"sync_$preferred"} or die N("no webfetch found, supported webfetch are: %s\n", join(", ", urpm::download::ftp_http_downloaders()));
- $sync->($options, @{$files{ftp} || []}, @{$files{http} || []}, @{$files{https} || []});
-
- delete @files{qw(ftp http https)};
- }
- if ($files{rsync}) {
- sync_rsync($options, @{$files{rsync}});
- delete $files{rsync};
- }
- if ($files{ssh}) {
- my @ssh_files = map { m!^ssh://([^/]*)(.*)! ? "$1:$2" : () } @{$files{ssh}};
- sync_ssh($options, @ssh_files);
- delete $files{ssh};
- }
- %files and die N("unable to handle protocol: %s", join ', ', keys %files);
-}
-
-our @PER_MEDIA_OPT = qw(
- downloader
- hdlist
- ignore
- key-ids
- list
- md5sum
- noreconfigure
- priority
- priority-upgrade
- removable
- static
- synthesis
- update
- verify-rpm
- virtual
- with_hdlist
-);
-
-#- Loads /etc/urpmi/urpmi.cfg and performs basic checks.
-#- Does not handle old format: <name> <url> [with <path_hdlist>]
-#- options :
-#- - nocheck_access : don't check presence of hdlist and other files
-sub read_config {
- my ($urpm, %options) = @_;
- return if $urpm->{media}; #- media already loaded
- $urpm->{media} = [];
- my $config = urpm::cfg::load_config($urpm->{config})
- or $urpm->{fatal}(6, $urpm::cfg::err);
-
- #- global options
- if ($config->{''}) {
- foreach my $opt (qw(
- allow-force
- allow-nodeps
- auto
- compress
- downloader
- default-media
- excludedocs
- excludepath
- fuzzy
- ignoresize
- keep
- key-ids
- limit-rate
- nopubkey
- norebuild
- post-clean
- pre-clean
- priority-upgrade
- prohibit-remove
- repackage
- resume
- retry
- split-length
- split-level
- strict-arch
- verify-rpm
- curl-options
- rsync-options
- wget-options
- prozilla-options
- )) {
- if (defined $config->{''}{$opt} && !exists $urpm->{options}{$opt}) {
- $urpm->{options}{$opt} = $config->{''}{$opt};
- }
- }
- }
- #- per-media options
- foreach my $m (grep { $_ ne '' } keys %$config) {
- my $medium = { name => $m, clear_url => $config->{$m}{url} };
- foreach my $opt (@PER_MEDIA_OPT) {
- defined $config->{$m}{$opt} and $medium->{$opt} = $config->{$m}{$opt};
- }
- $urpm->probe_medium($medium, %options) and push @{$urpm->{media}}, $medium;
- }
-
- eval { require urpm::ldap; urpm::ldap::load_ldap_media($urpm, %options) };
-
- #- load default values
- foreach (qw(post-clean verify-rpm)) {
- exists $urpm->{options}{$_} or $urpm->{options}{$_} = 1;
- }
-
- $urpm->{media} = [ sort { $a->{priority} <=> $b->{priority} } @{$urpm->{media}} ];
-
- #- remember if an hdlist or list file is already used
- my %filelists;
- foreach my $medium (@{$urpm->{media}}) {
- foreach my $filetype (qw(hdlist list)) {
- $medium->{$filetype} or next;
-
- if ($filelists{$filetype}{$medium->{$filetype}}) {
- $medium->{ignore} = 1;
- $urpm->{error}(
- $filetype eq 'hdlist'
- ? N("medium \"%s\" trying to use an already used hdlist, medium ignored", $medium->{name})
- : N("medium \"%s\" trying to use an already used list, medium ignored", $medium->{name})
- );
- } else {
- $filelists{$filetype}{$medium->{$filetype}} = 1;
- }
- }
- }
-
- #- check the presence of hdlist and list files if necessary.
- if (!$options{nocheck_access}) {
- foreach my $medium (@{$urpm->{media}}) {
- $medium->{ignore} and next;
-
- if (-r statedir_hdlist($urpm, $medium)) {}
- elsif ($medium->{synthesis} && -r statedir_synthesis($urpm, $medium)) {}
- else {
- $medium->{ignore} = 1;
- $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
- }
- if ($medium->{list} && -r statedir_list($urpm, $medium)) {}
- elsif ($medium->{url}) {}
- else {
- $medium->{ignore} = 1;
- $urpm->{error}(N("unable to access list file of \"%s\", medium ignored", $medium->{name}));
- }
- }
- }
-
- #- read MD5 sums (usually not in urpmi.cfg but in a separate file)
- foreach (@{$urpm->{media}}) {
- if (my $md5sum = get_md5sum("$urpm->{statedir}/MD5SUM", statedir_hdlist_or_synthesis($urpm, $_))) {
- $_->{md5sum} = $md5sum;
- }
- }
-
- #- remember global options for write_config
- $urpm->{global_config} = $config->{''};
-}
-
-#- probe medium to be used, take old medium into account too.
-sub probe_medium {
- my ($urpm, $medium, %options) = @_;
- local $_;
-
- if (name2medium($urpm, $medium->{name})) {
- $urpm->{error}(N("trying to override existing medium \"%s\", skipping", $medium->{name}));
- return;
- }
+ $urpm->{$_} = $h{$_} foreach keys %h;
- $medium->{url} ||= $medium->{clear_url};
-
- if ($medium->{virtual}) {
- #- a virtual medium needs to have an url available without using a list file.
- if ($medium->{hdlist} || $medium->{list}) {
- $medium->{ignore} = 1;
- $urpm->{error}(N("virtual medium \"%s\" should not have defined hdlist or list file, medium ignored",
- $medium->{name}));
- }
- unless ($medium->{url}) {
- $medium->{ignore} = 1;
- $urpm->{error}(N("virtual medium \"%s\" should have a clear url, medium ignored",
- $medium->{name}));
- }
- } else {
- unless ($medium->{ignore} || $medium->{hdlist}) {
- $medium->{hdlist} = "hdlist.$medium->{name}.cz";
- -e statedir_hdlist($urpm, $medium) or
- $medium->{ignore} = 1,
- $urpm->{error}(N("unable to find hdlist file for \"%s\", medium ignored", $medium->{name}));
- }
- unless ($medium->{ignore} || $medium->{list}) {
- unless (defined $medium->{url}) {
- $medium->{list} = "list.$medium->{name}";
- unless (-e statedir_list($urpm, $medium)) {
- $medium->{ignore} = 1,
- $urpm->{error}(N("unable to find list file for \"%s\", medium ignored", $medium->{name}));
- }
- }
- }
-
- #- there is a little more to do at this point as url is not known, inspect directly list file for it.
- unless ($medium->{url}) {
- my %probe;
- if (-r statedir_list($urpm, $medium)) {
- my $listfile = $urpm->open_safe("<", statedir_list($urpm, $medium));
- if ($listfile) {
- while (<$listfile>) {
- #- /./ is end of url marker in list file (typically generated by a
- #- find . -name "*.rpm" > list
- #- for exportable list file.
- m|^(.*)/\./| and $probe{$1} = undef;
- m|^(.*)/[^/]*$| and $probe{$1} = undef;
- }
- close $listfile;
- }
- }
- foreach (sort { length($a) <=> length($b) } keys %probe) {
- if ($medium->{url}) {
- if ($medium->{url} ne substr($_, 0, length($medium->{url}))) {
- $medium->{ignore} or $urpm->{error}(N("inconsistent list file for \"%s\", medium ignored", $medium->{name}));
- $medium->{ignore} = 1;
- last;
- }
- } else {
- $medium->{url} = $_;
- }
- }
- unless ($options{nocheck_access}) {
- unless ($medium->{url}) {
- $medium->{ignore} or $urpm->{error}(N("unable to inspect list file for \"%s\", medium ignored", $medium->{name}));
- $medium->{ignore} = 1;
- }
- }
- }
- }
-
- #- probe removable device.
- $urpm->probe_removable_device($medium);
-
- #- clear URLs for trailing /es.
- $medium->{url} and $medium->{url} =~ s|(.*?)/*$|$1|;
- $medium->{clear_url} and $medium->{clear_url} =~ s|(.*?)/*$|$1|;
-
- $medium;
-}
-
-#- returns the removable device name if it corresponds to an iso image, '' otherwise
-sub is_iso {
- my ($removable_dev) = @_;
- $removable_dev && $removable_dev =~ /\.iso$/i;
+ require File::Path;
+ File::Path::mkpath([ $h{statedir},
+ (map { "$h{cachedir}/$_" } qw(headers partial rpms)),
+ dirname($h{config}),
+ "$urpmi_root/var/lib/rpm",
+ ]);
}
sub protocol_from_url {
my ($url) = @_;
- $url =~ m!^([^:_]*)[^:]*:! && $1;
+ $url =~ m!^(\w+)(_[^:]*)?:! && $1;
}
sub file_from_local_url {
my ($url) = @_;
$url =~ m!^(?:removable[^:]*:/|file:/)?(/.*)! && $1;
}
-sub file_from_file_url {
- my ($url) = @_;
- $url =~ m!^(?:file:/)?(/.*)! && $1;
-}
-
-sub hdlist_or_synthesis_for_virtual_medium {
- my ($medium) = @_;
-
- my $path = file_from_file_url($medium->{url}) or return;
- "$path/$medium->{with_hdlist}";
-}
-
-sub statedir_hdlist_or_synthesis {
- my ($urpm, $medium) = @_;
- $medium->{hdlist} && "$urpm->{statedir}/" . ($medium->{synthesis} ? 'synthesis.' : '') . $medium->{hdlist};
-}
-sub statedir_hdlist {
- my ($urpm, $medium) = @_;
- $medium->{hdlist} && "$urpm->{statedir}/$medium->{hdlist}";
-}
-sub statedir_synthesis {
- my ($urpm, $medium) = @_;
- $medium->{hdlist} && "$urpm->{statedir}/synthesis.$medium->{hdlist}";
-}
-sub statedir_list {
- my ($urpm, $medium) = @_;
- $medium->{list} && "$urpm->{statedir}/$medium->{list}";
-}
-sub statedir_descriptions {
- my ($urpm, $medium) = @_;
- $medium->{name} && "$urpm->{statedir}/descriptions.$medium->{name}";
-}
-sub statedir_names {
- my ($urpm, $medium) = @_;
- $medium->{name} && "$urpm->{statedir}/names.$medium->{name}";
-}
-sub cachedir_hdlist {
- my ($urpm, $medium) = @_;
- $medium->{hdlist} && "$urpm->{cachedir}/partial/$medium->{hdlist}";
-}
-sub cachedir_list {
- my ($urpm, $medium) = @_;
- $medium->{list} && "$urpm->{cachedir}/partial/$medium->{list}";
-}
-
-sub name2medium {
- my ($urpm, $name) = @_;
- my ($medium) = grep { $_->{name} eq $name } @{$urpm->{media}};
- $medium;
-}
-
-#- probe device associated with a removable device.
-sub probe_removable_device {
- my ($urpm, $medium) = @_;
-
- if ($medium->{url} && $medium->{url} =~ /^removable/) {
- #- try to find device name in url scheme, this is deprecated, use medium option "removable" instead
- if ($medium->{url} =~ /^removable_?([^_:]*)/) {
- $medium->{removable} ||= $1 && "/dev/$1";
- }
- } else {
- delete $medium->{removable};
- return;
- }
-
- #- try to find device to open/close for removable medium.
- if (my $dir = file_from_local_url($medium->{url})) {
- my %infos;
- my @mntpoints = urpm::sys::find_mntpoints($dir, \%infos);
- if (@mntpoints > 1) { #- return value is suitable for an hash.
- $urpm->{log}(N("too many mount points for removable medium \"%s\"", $medium->{name}));
- $urpm->{log}(N("taking removable device as \"%s\"", join ',', map { $infos{$_}{device} } @mntpoints));
- }
- if (is_iso($medium->{removable})) {
- $urpm->{log}(N("Medium \"%s\" is an ISO image, will be mounted on-the-fly", $medium->{name}));
- } elsif (@mntpoints) {
- if ($medium->{removable} && $medium->{removable} ne $infos{$mntpoints[-1]}{device}) {
- $urpm->{log}(N("using different removable device [%s] for \"%s\"",
- $infos{$mntpoints[-1]}{device}, $medium->{name}));
- }
- $medium->{removable} = $infos{$mntpoints[-1]}{device};
- } else {
- $urpm->{error}(N("unable to retrieve pathname for removable medium \"%s\"", $medium->{name}));
- }
- } else {
- $urpm->{error}(N("unable to retrieve pathname for removable medium \"%s\"", $medium->{name}));
- }
-}
-
-
-sub write_MD5SUM {
- my ($urpm) = @_;
-
- #- write MD5SUM file
- my $fh = $urpm->open_safe('>', "$urpm->{statedir}/MD5SUM") or return 0;
- foreach my $medium (grep { $_->{md5sum} } @{$urpm->{media}}) {
- my $s = basename(statedir_hdlist_or_synthesis($urpm, $medium));
- print $fh "$medium->{md5sum} $s\n";
- }
-
- $urpm->{log}(N("wrote %s", "$urpm->{statedir}/MD5SUM"));
-
- delete $urpm->{md5sum_modified};
-}
-
-#- Writes the urpmi.cfg file.
-sub write_urpmi_cfg {
- my ($urpm) = @_;
-
- #- avoid trashing exiting configuration if it wasn't loaded
- $urpm->{media} or return;
-
- my $config = {
- #- global config options found in the config file, without the ones
- #- set from the command-line
- '' => $urpm->{global_config},
- };
- foreach my $medium (@{$urpm->{media}}) {
- next if $medium->{external};
- my $medium_name = $medium->{name};
- $config->{$medium_name}{url} = $medium->{clear_url};
- foreach (@PER_MEDIA_OPT) {
- defined $medium->{$_} and $config->{$medium_name}{$_} = $medium->{$_};
- }
- }
- urpm::cfg::dump_config($urpm->{config}, $config)
- or $urpm->{fatal}(6, N("unable to write config file [%s]", $urpm->{config}));
-
- $urpm->{log}(N("wrote config file [%s]", $urpm->{config}));
-
- #- everything should be synced now.
- delete $urpm->{modified};
-}
-
-sub write_config {
- my ($urpm) = @_;
-
- write_urpmi_cfg($urpm);
- write_MD5SUM($urpm);
-}
-
-sub _configure_parallel {
- my ($urpm, $alias) = @_;
- my @parallel_options;
- #- read parallel configuration
- foreach (cat_("/etc/urpmi/parallel.cfg")) {
- chomp; s/#.*$//; s/^\s*//; s/\s*$//;
- /\s*([^:]*):(.*)/ or $urpm->{error}(N("unable to parse \"%s\" in file [%s]", $_, "/etc/urpmi/parallel.cfg")), next;
- $1 eq $alias and push @parallel_options, $2;
- }
- #- if a configuration option has been found, use it; else fatal error.
- my $parallel_handler;
- if (@parallel_options) {
- foreach my $dir (grep { -d $_ } map { "$_/urpm" } @INC) {
- foreach my $pm (grep { -f $_ } glob("$dir/parallel*.pm")) {
- #- load parallel modules
- $urpm->{log}->(N("examining parallel handler in file [%s]", $pm));
- # perl_checker: require urpm::parallel_ka_run
- # perl_checker: require urpm::parallel_ssh
- eval { require $pm; $parallel_handler = $urpm->handle_parallel_options(join("\n", @parallel_options)) };
- $parallel_handler and last;
- }
- $parallel_handler and last;
- }
- }
- if ($parallel_handler) {
- if ($parallel_handler->{nodes}) {
- $urpm->{log}->(N("found parallel handler for nodes: %s", join(', ', keys %{$parallel_handler->{nodes}})));
- }
- $urpm->{parallel_handler} = $parallel_handler;
- } else {
- $urpm->{fatal}(1, N("unable to use parallel option \"%s\"", $alias));
- }
-}
-
-#- read urpmi.cfg file as well as necessary synthesis files
-#- options :
-#- callback
-#- call_back_only_once
-#- excludemedia
-#- hdlist
-#- media
-#- nodepslist
-#- noinstalling
-#- noskipping
-#- parallel
-#- root
-#- searchmedia
-#- skip
-#- sortmedia
-#- update
-#- usedistrib
-sub configure {
- my ($urpm, %options) = @_;
-
- $urpm->clean;
-
- $options{parallel} && $options{usedistrib} and $urpm->{fatal}(1, N("Can't use parallel mode with use-distrib mode"));
-
- if ($options{parallel}) {
- _configure_parallel($urpm, $options{parallel});
-
- if (!$options{media} && $urpm->{parallel_handler}{media}) {
- $options{media} = $urpm->{parallel_handler}{media};
- $urpm->{log}->(N("using associated media for parallel mode: %s", $options{media}));
- }
- } else {
- #- parallel is exclusive against root options.
- $urpm->{root} = $options{root};
- }
-
- $urpm->{root} && ! -c "$urpm->{root}/dev/null"
- and $urpm->{error}(N("there doesn't seem to be devices in the chroot in \"%s\"", $urpm->{root}));
-
- if ($options{synthesis}) {
- if ($options{synthesis} ne 'none') {
- #- synthesis take precedence over media, update options.
- $options{media} || $options{excludemedia} || $options{sortmedia} || $options{update} || $options{parallel} and
- $urpm->{fatal}(1, N("--synthesis cannot be used with --media, --excludemedia, --sortmedia, --update or --parallel"));
- $urpm->parse_synthesis($options{synthesis});
- #- synthesis disables the split of transaction (too risky and not useful).
- $urpm->{options}{'split-length'} = 0;
- }
- } else {
- if ($options{usedistrib}) {
- $urpm->{media} = [];
- $urpm->add_distrib_media("Virtual", $options{usedistrib}, %options, 'virtual' => 1);
- } else {
- $urpm->read_config(%options);
- if (!$options{media} && $urpm->{options}{'default-media'}) {
- $options{media} = $urpm->{options}{'default-media'};
- }
- }
- if ($options{media}) {
- delete $_->{modified} foreach @{$urpm->{media} || []};
- $urpm->select_media(split /,/, $options{media});
- foreach (grep { !$_->{modified} } @{$urpm->{media} || []}) {
- #- this is only a local ignore that will not be saved.
- $_->{tempignore} = $_->{ignore} = 1;
- }
- }
- if ($options{searchmedia}) {
- $urpm->select_media($options{searchmedia}); #- Ensure this media has been selected
- foreach (grep { !$_->{ignore} } @{$urpm->{media} || []}) {
- $_->{name} eq $options{searchmedia} and do {
- $_->{searchmedia} = 1;
- last;
- };
- }
- }
- if ($options{excludemedia}) {
- delete $_->{modified} foreach @{$urpm->{media} || []};
- foreach (select_media_by_name($urpm, [ split /,/, $options{excludemedia} ], {})) {
- $_->{modified} = 1;
- #- this is only a local ignore that will not be saved.
- $_->{tempignore} = $_->{ignore} = 1;
- }
- }
- if ($options{sortmedia}) {
- my @sorted_media = map { select_media_by_name($urpm, [$_], {}) } split(/,/, $options{sortmedia});
- my @remaining = difference2($urpm->{media}, \@sorted_media);
- $urpm->{media} = [ @sorted_media, @remaining ];
- }
- unless ($options{nodepslist}) {
- my $second_pass;
- do {
- foreach (grep { !$_->{ignore} && (!$options{update} || $_->{update}) } @{$urpm->{media} || []}) {
- our $currentmedia = $_; #- hack for urpmf
- delete @$_{qw(start end)};
- if ($_->{virtual}) {
- if (file_from_file_url($_->{url})) {
- if ($_->{synthesis}) {
- _parse_synthesis($urpm, $_,
- hdlist_or_synthesis_for_virtual_medium($_), callback => $options{callback});
- } else {
- #- we'll need a second pass
- defined $second_pass or $second_pass = 1;
- _parse_hdlist($urpm, $_,
- hdlist_or_synthesis_for_virtual_medium($_),
- callback => $options{call_back_only_once} && $second_pass ? undef : $options{callback},
- );
- }
- } else {
- $urpm->{error}(N("virtual medium \"%s\" is not local, medium ignored", $_->{name}));
- $_->{ignore} = 1;
- }
- } else {
- if ($options{hdlist} && file_size(statedir_hdlist($urpm, $_)) > 32) {
- _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_),
- callback => $options{callback},
- );
- } else {
- _parse_synthesis($urpm, $_,
- statedir_synthesis($urpm, $_),
- callback => $options{callback},
- );
- if (!is_valid_medium($_)) {
- _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_),
- callback => $options{callback},
- );
- }
- }
- }
- unless ($_->{ignore}) {
- _check_after_reading_hdlist_or_synthesis($urpm, $_);
- }
- unless ($_->{ignore}) {
- if ($_->{searchmedia}) {
- ($urpm->{searchmedia}{start}, $urpm->{searchmedia}{end}) = ($_->{start}, $_->{end});
- $urpm->{log}(N("Search start: %s end: %s",
- $urpm->{searchmedia}{start}, $urpm->{searchmedia}{end}));
- delete $_->{searchmedia};
- }
- }
- }
- } while $second_pass && do {
- require URPM::Build;
- $urpm->{log}(N("performing second pass to compute dependencies\n"));
- $urpm->unresolved_provides_clean;
- $second_pass--;
- };
- }
- }
- #- determine package to withdraw (from skip.list file) only if something should be withdrawn.
- unless ($options{noskipping}) {
- my %uniq;
- $urpm->compute_flags(
- get_packages_list($urpm->{skiplist}, $options{skip}),
- skip => 1,
- callback => sub {
- my ($urpm, $pkg) = @_;
- $pkg->is_arch_compat && ! exists $uniq{$pkg->fullname} or return;
- $uniq{$pkg->fullname} = undef;
- $urpm->{log}(N("skipping package %s", scalar($pkg->fullname)));
- },
- );
- }
- unless ($options{noinstalling}) {
- my %uniq;
- $urpm->compute_flags(
- get_packages_list($urpm->{instlist}),
- disable_obsolete => 1,
- callback => sub {
- my ($urpm, $pkg) = @_;
- $pkg->is_arch_compat && ! exists $uniq{$pkg->fullname} or return;
- $uniq{$pkg->fullname} = undef;
- $urpm->{log}(N("would install instead of upgrade package %s", scalar($pkg->fullname)));
- },
- );
- }
- if ($options{bug}) {
- #- and a dump of rpmdb itself as synthesis file.
- my $db = db_open_or_die($urpm, $options{root});
- my $sig_handler = sub { undef $db; exit 3 };
- local $SIG{INT} = $sig_handler;
- local $SIG{QUIT} = $sig_handler;
-
- open my $rpmdb, "| " . ($ENV{LD_LOADER} || '') . " gzip -9 >'$options{bug}/rpmdb.cz'"
- or $urpm->syserror("Can't fork", "gzip");
- $db->traverse(sub {
- my ($p) = @_;
- #- this is not right but may be enough.
- my $files = join '@', grep { exists($urpm->{provides}{$_}) } $p->files;
- $p->pack_header;
- $p->build_info(fileno $rpmdb, $files);
- });
- close $rpmdb;
- }
-}
-
-#- add a new medium, sync the config file accordingly.
-#- returns the new medium's name. (might be different from the requested
-#- name if index_name was specified)
-sub add_medium {
- my ($urpm, $name, $url, $with_hdlist, %options) = @_;
-
- #- make sure configuration has been read.
- $urpm->{media} or die "caller should have used ->read_config or ->configure first";
- $urpm->lock_urpmi_db('exclusive') if !$options{nolock};
-
- #- if a medium with that name has already been found, we have to exit now
- my $medium;
- if (defined $options{index_name}) {
- my $i = $options{index_name};
- do {
- ++$i;
- $medium = name2medium($urpm, $name . $i);
- } while $medium;
- $name .= $i;
- } else {
- $medium = name2medium($urpm, $name);
- }
- $medium and $urpm->{fatal}(5, N("medium \"%s\" already exists", $medium->{name}));
-
- $url =~ s,/*$,,; #- clear URLs for trailing /es.
-
- #- creating the medium info.
- $medium = { name => $name, url => $url, update => $options{update}, modified => 1, ignore => $options{ignore} };
- if ($options{virtual}) {
- file_from_file_url($url) or $urpm->{fatal}(1, N("virtual medium needs to be local"));
- $medium->{virtual} = 1;
- } else {
- $medium->{hdlist} = "hdlist.$name.cz";
- $medium->{list} = "list.$name";
- $urpm->probe_removable_device($medium);
- }
-
- #- local media have priority, other are added at the end.
- if (file_from_file_url($url)) {
- $medium->{priority} = 0.5;
- } else {
- $medium->{priority} = 1 + @{$urpm->{media}};
- }
-
- #- check whether a password is visible, if not, set clear_url.
- my $has_password = $url =~ m|([^:]*://[^/:\@]*:)[^/:\@]*(\@.*)|;
- $medium->{clear_url} = $url unless $has_password;
-
- $with_hdlist and $medium->{with_hdlist} = $with_hdlist;
-
- #- create an entry in media list.
- push @{$urpm->{media}}, $medium;
-
- $urpm->{log}(N("added medium %s", $name));
-
- #- we need to reload the config, since some string substitutions may have occured
- unless ($options{no_reload_config}) {
- $urpm->write_config;
- delete $urpm->{media};
- $urpm->read_config(nocheck_access => 1);
-
- #- need getting the fresh datastructure after read_config
- $medium = name2medium($urpm, $name); #- need getting the fresh datastructure after read_config
-
- #- Remember that the database has been modified and base files need to be updated.
- $medium->{modified} = 1;
- $urpm->{md5sum_modified} = 1;
- }
- if ($has_password) {
- $medium->{url} = $url;
- }
-
- $options{nolock} or $urpm->unlock_urpmi_db;
- $name;
-}
-
-#- add distribution media, according to url given.
-#- returns the list of names of added media.
-#- options :
-#- - limit_rate, compress : for downloading files
-#- - initial_number : when adding several numbered media, start with this number
-#- - probe_with : if eq 'synthesis', use synthesis instead of hdlists
-#- - ask_media : callback to know whether each media should be added
-#- other options are passed to add_medium()
-sub add_distrib_media {
- my ($urpm, $name, $url, %options) = @_;
-
- #- make sure configuration has been read.
- $urpm->{media} or die "caller should have used ->read_config or ->configure first";
-
- my $distribconf;
-
- if (my $dir = file_from_local_url($url)) {
- $urpm->try_mounting($dir)
- or $urpm->{error}(N("unable to mount the distribution medium")), return ();
- $distribconf = MDV::Distribconf->new($dir, undef);
- $distribconf->load
- or $urpm->{error}(N("this location doesn't seem to contain any distribution")), return ();
- } else {
- unlink "$urpm->{cachedir}/partial/media.cfg";
-
- $distribconf = MDV::Distribconf->new($url, undef);
- $distribconf->settree('mandriva');
-
- eval {
- $urpm->{log}(N("retrieving media.cfg file..."));
- sync_webfetch($urpm, undef,
- [ reduce_pathname($distribconf->getfullpath(undef, 'infodir') . '/media.cfg') ],
- \%options, quiet => 1);
- $urpm->{log}(N("...retrieving done"));
- };
- $@ and $urpm->{error}(N("...retrieving failed: %s", $@));
- if (-e "$urpm->{cachedir}/partial/media.cfg") {
- $distribconf->parse_mediacfg("$urpm->{cachedir}/partial/media.cfg")
- or $urpm->{error}(N("unable to parse media.cfg")), return();
- } else {
- $urpm->{error}(N("unable to access the distribution medium (no media.cfg file found)"));
- return ();
- }
- }
-
- #- cosmetic update of name if it contains spaces.
- $name =~ /\s/ and $name .= ' ';
-
- my @newnames;
- #- at this point, we have found a media.cfg file, so parse it
- #- and create all necessary media according to it.
- my $medium = $options{initial_number} || 1;
-
- foreach my $media ($distribconf->listmedia) {
- my $skip = 0;
- # if one of those values is set, by default, we skip adding the media
- foreach (qw(noauto)) {
- $distribconf->getvalue($media, $_) and do {
- $skip = 1;
- last;
- };
- }
- if ($options{ask_media}) {
- if ($options{ask_media}->(
- $distribconf->getvalue($media, 'name'),
- !$skip,
- )) {
- $skip = 0;
- } else {
- $skip = 1;
- }
- }
- $skip and next;
-
- my $media_name = $distribconf->getvalue($media, 'name') || '';
- my $is_update_media = $distribconf->getvalue($media, 'updates_for');
-
- push @newnames, $urpm->add_medium(
- $name ? "$media_name ($name$medium)" : $media_name,
- reduce_pathname($distribconf->getfullpath($media, 'path')),
- offset_pathname(
- $url,
- $distribconf->getpath($media, 'path'),
- ) . '/' . $distribconf->getpath($media, $options{probe_with} eq 'synthesis' ? 'synthesis' : 'hdlist'),
- index_name => $name ? undef : 0,
- no_reload_config => 1, #- no need to reload config each time, since we don't update the media
- %options,
- # the following override %options
- update => $is_update_media ? 1 : undef,
- );
- ++$medium;
- }
- return @newnames;
-}
-
-#- deprecated, use select_media_by_name instead
-sub select_media {
- my $urpm = shift;
- my $options = {};
- if (ref $_[0]) { $options = shift }
- foreach (select_media_by_name($urpm, [ @_ ], $options)) {
- #- select medium by setting the modified flag, do not check ignore.
- $_->{modified} = 1;
- }
-}
-
-sub select_media_by_name {
- my ($urpm, $names, $options) = @_;
-
- my %wanted = map { $_ => 1 } @$names;
-
- #- first the exact matches
- my @l = grep { delete $wanted{$_->{name}} } @{$urpm->{media}};
-
- #- check if some arguments don't correspond to the medium name.
- #- in such case, try to find the unique medium (or list candidate
- #- media found).
- foreach (keys %wanted) {
- my $q = quotemeta;
- my (@found, @foundi);
- my $regex = $options->{strict_match} ? qr/^$q$/ : qr/$q/;
- my $regexi = $options->{strict_match} ? qr/^$q$/i : qr/$q/i;
- foreach my $medium (@{$urpm->{media}}) {
- $medium->{name} =~ $regex and push @found, $medium;
- $medium->{name} =~ $regexi and push @foundi, $medium;
- }
- @found = @foundi if !@found;
-
- if (@found == 0) {
- $urpm->{error}(N("trying to select nonexistent medium \"%s\"", $_));
- } else {
- if (@found > 1) {
- $urpm->{log}(N("selecting multiple media: %s", join(", ", map { qq("$_->{name}") } @found)));
- }
- #- changed behaviour to select all occurences by default.
- push @l, @found;
- }
- }
- @l;
-}
-
-#- deprecated, use remove_media instead
-sub remove_selected_media {
- my ($urpm) = @_;
-
- remove_media($urpm, [ grep { $_->{modified} } @{$urpm->{media}} ]);
-}
-
-sub remove_media {
- my ($urpm, $to_remove) = @_;
-
- foreach my $medium (@$to_remove) {
- $urpm->{log}(N("removing medium \"%s\"", $medium->{name}));
-
- #- mark to re-write configuration.
- $urpm->{modified} = 1;
-
- #- remove files associated with this medium.
- unlink grep { $_ } map { $_->($urpm, $medium) } \&statedir_hdlist, \&statedir_list, \&statedir_synthesis, \&statedir_descriptions, \&statedir_names;
-
- #- remove proxy settings for this media
- urpm::download::remove_proxy_media($medium->{name});
- }
-
- $urpm->{media} = [ difference2($urpm->{media}, $to_remove) ];
-}
-
-#- return list of synthesis or hdlist reference to probe.
-sub _probe_with_try_list {
- my ($suffix, $probe_with) = @_;
- my @probe = (
- "media_info/synthesis.hdlist.cz",
- "../media_info/synthesis.hdlist_$suffix.cz",
- "synthesis.hdlist.cz",
- );
- my @probe_hdlist = (
- "media_info/hdlist.cz",
- "../media_info/hdlist_$suffix.cz",
- "hdlist.cz",
- );
- if ($probe_with =~ /synthesis/) {
- push @probe, @probe_hdlist;
- } else {
- unshift @probe, @probe_hdlist;
- }
- @probe;
-}
-
-sub may_reconfig_urpmi {
- my ($urpm, $medium, $options) = @_;
-
- my $f;
- if (my $dir = file_from_file_url($medium->{url})) {
- $f = reduce_pathname("$dir/reconfig.urpmi");
- } else {
- unlink($f = "$urpm->{cachedir}/partial/reconfig.urpmi");
- eval {
- sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/reconfig.urpmi") ],
- $options, quiet => 1);
- };
- }
- if (-s $f) {
- reconfig_urpmi($urpm, $f, $medium->{name});
- }
- unlink $f if !file_from_file_url($medium->{url});
-}
-
-#- read a reconfiguration file for urpmi, and reconfigure media accordingly
-#- $rfile is the reconfiguration file (local), $name is the media name
-#-
-#- the format is similar to the RewriteRule of mod_rewrite, so:
-#- PATTERN REPLACEMENT [FLAG]
-#- where FLAG can be L or N
-#-
-#- example of reconfig.urpmi:
-#- # this is an urpmi reconfiguration file
-#- /cooker /cooker/$ARCH
-sub reconfig_urpmi {
- my ($urpm, $rfile, $name) = @_;
- -r $rfile or return;
-
- $urpm->{log}(N("reconfiguring urpmi for media \"%s\"", $name));
-
- my ($magic, @lines) = cat_($rfile);
- #- the first line of reconfig.urpmi must be magic, to be sure it's not an error file
- $magic =~ /^# this is an urpmi reconfiguration file/ or return undef;
-
- my @replacements;
- foreach (@lines) {
- chomp;
- s/^\s*//; s/#.*$//; s/\s*$//;
- $_ or next;
- my ($p, $r, $f) = split /\s+/, $_, 3;
- push @replacements, [ quotemeta $p, $r, $f || 1 ];
- }
-
- my $reconfigured = 0;
- my @reconfigurable = qw(url with_hdlist clear_url);
-
- my $medium = name2medium($urpm, $name) or return;
- my %orig = %$medium;
-
- URLS:
- foreach my $k (@reconfigurable) {
- foreach my $r (@replacements) {
- if ($medium->{$k} =~ s/$r->[0]/$r->[1]/) {
- $reconfigured = 1;
- #- Flags stolen from mod_rewrite: L(ast), N(ext)
- if ($r->[2] =~ /L/) {
- last;
- } elsif ($r->[2] =~ /N/) { #- dangerous option
- redo URLS;
- }
- }
- }
- #- check that the new url exists before committing changes (local mirrors)
- my $file = file_from_local_url($medium->{$k});
- if ($file && !-e $file) {
- %$medium = %orig;
- $reconfigured = 0;
- $urpm->{log}(N("...reconfiguration failed"));
- return;
- }
- }
-
- if ($reconfigured) {
- $urpm->{log}(N("reconfiguration done"));
- $urpm->write_config;
- }
- $reconfigured;
-}
-
-sub _guess_hdlist_suffix {
- my ($dir) = @_;
- my ($suffix) = $dir =~ m!\bmedia/(\w+)/*\Z!;
- $suffix;
-}
-
-sub _hdlist_suffix {
- my ($medium) = @_;
- $medium->{with_hdlist} =~ /hdlist(.*?)(?:\.src)?\.cz$/ ? $1 : '';
-}
-
-sub _update_media__when_not_modified {
- my ($urpm, $medium) = @_;
-
- delete @$medium{qw(start end)};
- if ($medium->{virtual}) {
- if (file_from_file_url($medium->{url})) {
- if ($medium->{synthesis}) {
- _parse_synthesis($urpm, $medium, hdlist_or_synthesis_for_virtual_medium($medium));
- } else {
- _parse_hdlist($urpm, $medium, hdlist_or_synthesis_for_virtual_medium($medium));
- }
- } else {
- $urpm->{error}(N("virtual medium \"%s\" is not local, medium ignored", $medium->{name}));
- $medium->{ignore} = 1;
- }
- } else {
- _parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium));
- if (!is_valid_medium($medium)) {
- _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
- }
- }
- unless ($medium->{ignore}) {
- _check_after_reading_hdlist_or_synthesis($urpm, $medium);
- }
-}
-
-sub _update_media__virtual {
- my ($urpm, $medium, $with_hdlist_dir) = @_;
-
- if ($medium->{with_hdlist} && -e $with_hdlist_dir) {
- delete @$medium{qw(start end)};
- if ($medium->{synthesis}) {
- _parse_synthesis($urpm, $medium, $with_hdlist_dir);
- delete $medium->{modified};
- $medium->{synthesis} = 1;
- $urpm->{md5sum_modified} = 1;
- if (!is_valid_medium($medium)) {
- _parse_hdlist($urpm, $medium, $with_hdlist_dir);
- delete @$medium{qw(modified synthesis)};
- }
- } else {
- _parse_hdlist($urpm, $medium, $with_hdlist_dir);
- delete @$medium{qw(modified synthesis)};
- $urpm->{md5sum_modified} = 1;
- if (!is_valid_medium($medium)) {
- _parse_synthesis($urpm, $medium, $with_hdlist_dir);
- delete $medium->{modified};
- $medium->{synthesis} = 1;
- }
- }
- _check_after_reading_hdlist_or_synthesis($urpm, $medium);
- } else {
- $urpm->{error}(N("virtual medium \"%s\" should have valid source hdlist or synthesis, medium ignored",
- $medium->{name}));
- $medium->{ignore} = 1;
- }
-}
-
-sub generate_media_names {
- my ($urpm) = @_;
-
- #- make sure names files are regenerated.
- foreach (@{$urpm->{media}}) {
- unlink statedir_names($urpm, $_);
- if (is_valid_medium($_)) {
- my $fh = $urpm->open_safe(">", statedir_names($urpm, $_));
- if ($fh) {
- foreach ($_->{start} .. $_->{end}) {
- if (defined $urpm->{depslist}[$_]) {
- print $fh $urpm->{depslist}[$_]->name . "\n";
- } else {
- $urpm->{error}(N("Error generating names file: dependency %d not found", $_));
- }
- }
- close $fh;
- } else {
- $urpm->{error}(N("Error generating names file: Can't write to file (%s)", $!));
- }
- }
- }
-}
-
-
-sub _read_existing_synthesis_and_hdlist_if_same_time_and_msize {
- my ($urpm, $medium, $basename) = @_;
-
- same_size_and_mtime("$urpm->{cachedir}/partial/$basename",
- statedir_hdlist($urpm, $medium)) or return;
-
- unlink "$urpm->{cachedir}/partial/$basename";
-
- _read_existing_synthesis_and_hdlist($urpm, $medium);
-
- 1;
-}
-
-sub _read_existing_synthesis_and_hdlist_if_same_md5sum {
- my ($urpm, $medium, $retrieved_md5sum) = @_;
-
- #- if an existing hdlist or synthesis file has the same md5sum, we assume the
- #- files are the same.
- #- if local md5sum is the same as distant md5sum, this means there is no need to
- #- download hdlist or synthesis file again.
- $retrieved_md5sum && $medium->{md5sum} eq $retrieved_md5sum or return;
-
- unlink "$urpm->{cachedir}/partial/" . basename($medium->{with_hdlist});
-
- _read_existing_synthesis_and_hdlist($urpm, $medium);
-
- 1;
-}
-
-sub _read_existing_synthesis_and_hdlist {
- my ($urpm, $medium) = @_;
-
- $urpm->{log}(N("medium \"%s\" is up-to-date", $medium->{name}));
-
- #- the medium is now considered not modified.
- $medium->{modified} = 0;
- #- XXX we could link the new hdlist to the old one.
- #- (However links need to be managed. see bug #12391.)
- #- as previously done, just read synthesis file here, this is enough.
- _parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium));
- if (!is_valid_medium($medium)) {
- _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
- _check_after_reading_hdlist_or_synthesis($urpm, $medium);
- }
-
- 1;
-}
-
-sub _parse_hdlist {
- my ($urpm, $medium, $hdlist_file, %args) = @_;
-
- $urpm->{log}(N("examining hdlist file [%s]", $hdlist_file));
- ($medium->{start}, $medium->{end}) = $urpm->parse_hdlist($hdlist_file, packing => 1, %args);
-}
-
-sub _parse_synthesis {
- my ($urpm, $medium, $synthesis_file, %args) = @_;
-
- $urpm->{log}(N("examining synthesis file [%s]", $synthesis_file));
- ($medium->{start}, $medium->{end}) = $urpm->parse_synthesis($synthesis_file, %args);
-}
-
-sub is_valid_medium {
- my ($medium) = @_;
- defined $medium->{start} && defined $medium->{end};
-}
-
-sub _check_after_reading_hdlist_or_synthesis {
- my ($urpm, $medium) = @_;
-
- if (!is_valid_medium($medium)) {
- $urpm->{error}(N("problem reading hdlist or synthesis file of medium \"%s\"", $medium->{name}));
- $medium->{ignore} = 1;
- }
-}
sub db_open_or_die {
- my ($urpm, $root, $b_force) = @_;
+ my ($urpm, $root, $b_write_perm) = @_;
+
+ $urpm->{debug} and $urpm->{debug}("opening rpmdb (root=$root, write=$b_write_perm)");
- my $db = URPM::DB::open($root, $b_force)
+ my $db = URPM::DB::open($root, $b_write_perm || 0)
or $urpm->{fatal}(9, N("unable to open rpmdb"));
$db;
}
-sub _update_media__sync_file {
- my ($urpm, $medium, $name, $options) = @_;
-
- my $local_name = $name . _hdlist_suffix($medium);
-
- foreach (reduce_pathname("$medium->{url}/$medium->{with_hdlist}/../$local_name"),
- reduce_pathname("$medium->{url}/$name")) {
- eval {
- sync_webfetch($urpm, $medium, [$_], $options, quiet => 1);
-
- if ($local_name ne $name && -s "$urpm->{cachedir}/partial/$local_name") {
- rename("$urpm->{cachedir}/partial/$local_name",
- "$urpm->{cachedir}/partial/$name");
- }
- };
- $@ and unlink "$urpm->{cachedir}/partial/$name";
- -s "$urpm->{cachedir}/partial/$name" and last;
- }
-}
-
-sub recursive_find_rpm_files {
- my ($dir) = @_;
-
- my %f;
- local $_; #- help perl_checker not warning "undeclared $_" in wanted callback below
- File::Find::find(
- {
- wanted => sub { -f $_ && /\.rpm$/ and $f{"$File::Find::dir/$_"} = 1 },
- follow_skip => 2,
- follow_fast => 1,
- },
- $dir,
- );
- keys %f;
-}
-
-sub clean_dir {
- my ($dir) = @_;
-
- require File::Path;
- File::Path::rmtree([$dir]);
- mkdir $dir, 0755;
-}
-
-sub _update_medium__get_descriptions_local {
- my ($urpm, $medium) = @_;
-
- unlink statedir_descriptions($urpm, $medium);
-
- my $dir = file_from_local_url($medium->{url});
- my $description_file = "$dir/media_info/descriptions"; #- new default location
- -e $description_file or $description_file = "$dir/../descriptions";
- -e $description_file or return;
-
- $urpm->{log}(N("copying description file of \"%s\"...", $medium->{name}));
- if (copy_and_own($description_file, statedir_descriptions($urpm, $medium))) {
- $urpm->{log}(N("...copying done"));
- } else {
- $urpm->{error}(N("...copying failed"));
- $medium->{ignore} = 1;
- }
-}
-sub _update_medium__get_descriptions_remote {
- my ($urpm, $medium, $options) = @_;
-
- unlink "$urpm->{cachedir}/partial/descriptions";
-
- if (-e statedir_descriptions($urpm, $medium)) {
- urpm::util::move(statedir_descriptions($urpm, $medium), "$urpm->{cachedir}/partial/descriptions");
- }
- eval {
- sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/media_info/descriptions") ],
- $options, quiet => 1);
- };
- #- It is possible that the original fetch of the descriptions
- #- failed, but the file still remains in partial/ because it was
- #- moved from $urpm->{statedir} earlier. So we need to check if
- #- the previous download failed.
- if ($@ || ! -e "$urpm->{cachedir}/partial/descriptions") {
- eval {
- #- try older location
- sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/../descriptions") ],
- $options, quiet => 1);
- };
- }
- if (-e "$urpm->{cachedir}/partial/descriptions") {
- urpm::util::move("$urpm->{cachedir}/partial/descriptions", statedir_descriptions($urpm, $medium));
- }
-}
-
-sub _update_medium_first_pass__local {
- my ($urpm, $medium, $second_pass, $clean_cache, $retrieved_md5sum, $rpm_files, $options) = @_;
-
- my $dir = file_from_local_url($medium->{url});
-
- #- try to figure a possible hdlist_path (or parent directory of searched directory).
- #- this is used to probe for a possible hdlist file.
- my $with_hdlist_dir = reduce_pathname($dir . ($medium->{with_hdlist} ? "/$medium->{with_hdlist}" : "/.."));
-
- #- the directory given does not exist and may be accessible
- #- by mounting some other directory. Try to figure it out and mount
- #- everything that might be necessary.
- -d $dir or $urpm->try_mounting(
- $options->{force} < 2 && ($options->{probe_with} || $medium->{with_hdlist})
- ? $with_hdlist_dir : $dir,
- #- in case of an iso image, pass its name
- is_iso($medium->{removable}) && $medium->{removable},
- ) or $urpm->{error}(N("unable to access medium \"%s\",
-this could happen if you mounted manually the directory when creating the medium.", $medium->{name})), return 'unmodified';
-
- #- try to probe for possible with_hdlist parameter, unless
- #- it is already defined (and valid).
- if ($options->{probe_with} && (!$medium->{with_hdlist} || ! -e "$dir/$medium->{with_hdlist}")) {
- foreach (_probe_with_try_list(_guess_hdlist_suffix($dir), $options->{probe_with})) {
- if (file_size("$dir/$_") > 32) {
- $medium->{with_hdlist} = $_;
- last;
- }
- }
- #- redo...
- $with_hdlist_dir = reduce_pathname($dir . ($medium->{with_hdlist} ? "/$medium->{with_hdlist}" : "/.."));
- }
-
- if ($medium->{virtual}) {
- #- syncing a virtual medium is very simple, just try to read the file in order to
- #- determine its type, once a with_hdlist has been found (but is mandatory).
- _update_media__virtual($urpm, $medium, $with_hdlist_dir);
- }
-
- _update_medium__get_descriptions_local($urpm, $medium);
-
- #- examine if a distant MD5SUM file is available.
- #- this will only be done if $with_hdlist is not empty in order to use
- #- an existing hdlist or synthesis file, and to check if download was good.
- #- if no MD5SUM is available, do it as before...
- #- we can assume at this point a basename is existing, but it needs
- #- to be checked for being valid, nothing can be deduced if no MD5SUM
- #- file is present.
-
- my $error;
-
- unless ($medium->{virtual}) {
- if ($medium->{with_hdlist}) {
- if (!$options->{nomd5sum} && file_size(reduce_pathname("$with_hdlist_dir/../MD5SUM")) > 32) {
- recompute_local_md5sum($urpm, $medium, $options->{force});
- if ($medium->{md5sum}) {
- $$retrieved_md5sum = parse_md5sum($urpm, reduce_pathname("$with_hdlist_dir/../MD5SUM"), basename($with_hdlist_dir));
- _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $$retrieved_md5sum)
- and return 'unmodified';
- }
- }
-
- #- if the source hdlist is present and we are not forcing using rpm files
- if ($options->{force} < 2 && -e $with_hdlist_dir) {
- unlink cachedir_hdlist($urpm, $medium);
- $urpm->{log}(N("copying source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
- $options->{callback} and $options->{callback}('copy', $medium->{name});
- if (copy_and_own($with_hdlist_dir, cachedir_hdlist($urpm, $medium))) {
- $options->{callback} and $options->{callback}('done', $medium->{name});
- $urpm->{log}(N("...copying done"));
- } else {
- $options->{callback} and $options->{callback}('failed', $medium->{name});
- #- force error, reported afterwards
- unlink cachedir_hdlist($urpm, $medium);
- }
- }
-
- file_size(cachedir_hdlist($urpm, $medium)) > 32 or
- $error = 1, $urpm->{error}(N("copy of [%s] failed (file is suspiciously small)",
- cachedir_hdlist($urpm, $medium)));
-
- #- keep checking md5sum of file just copied ! (especially on nfs or removable device).
- if (!$error && $$retrieved_md5sum) {
- $urpm->{log}(N("computing md5sum of copied source hdlist (or synthesis)"));
- md5sum(cachedir_hdlist($urpm, $medium)) eq $$retrieved_md5sum or
- $error = 1, $urpm->{error}(N("copy of [%s] failed (md5sum mismatch)", $with_hdlist_dir));
- }
-
- #- check if the files are equal... and no force copy...
- if (!$error && !$options->{force} && -e statedir_synthesis($urpm, $medium)) {
- _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium, $medium->{hdlist})
- and return 'unmodified';
- }
- } else {
- $error = 1;
- }
-
- #- if copying hdlist has failed, try to build it directly.
- if ($error) {
- if ($urpm->{options}{norebuild}) {
- $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
- $medium->{ignore} = 1;
- } else {
- $options->{force} < 2 and $options->{force} = 2;
- #- clear error state now.
- $error = undef;
- }
- }
-
- if ($options->{force} < 2) {
- #- examine if a local list file is available (always probed according to with_hdlist)
- #- and check hdlist wasn't named very strangely...
- if ($medium->{hdlist} ne 'list') {
- my $local_list = 'list' . _hdlist_suffix($medium);
- my $path_list = reduce_pathname("$with_hdlist_dir/../$local_list");
- -e $path_list or $path_list = "$dir/list";
- if (-e $path_list) {
- copy_and_own($path_list, "$urpm->{cachedir}/partial/list")
- or do { $urpm->{error}(N("...copying failed")); $error = 1 };
- }
- }
- } else {
- push @$rpm_files, recursive_find_rpm_files($dir);
-
- #- check files contains something good!
- if (@$rpm_files > 0) {
- #- we need to rebuild from rpm files the hdlist.
- eval {
- $urpm->{log}(N("reading rpm files from [%s]", $dir));
- my @unresolved_before = grep {
- ! defined $urpm->{provides}{$_};
- } keys %{$urpm->{provides} || {}};
- $medium->{start} = @{$urpm->{depslist}};
- $medium->{headers} = [ $urpm->parse_rpms_build_headers(
- dir => "$urpm->{cachedir}/headers",
- rpms => $rpm_files,
- clean => $$clean_cache,
- ) ];
- $medium->{end} = $#{$urpm->{depslist}};
- if ($medium->{start} > $medium->{end}) {
- #- an error occured (provided there are files in input.)
- delete $medium->{start};
- delete $medium->{end};
- $urpm->{fatal}(9, N("no rpms read"));
- } else {
- #- make sure the headers will not be removed for another media.
- $$clean_cache = 0;
- my @unresolved = grep {
- ! defined $urpm->{provides}{$_};
- } keys %{$urpm->{provides} || {}};
- @unresolved_before == @unresolved or $$second_pass = 1;
- }
- };
- $@ and $error = 1, $urpm->{error}(N("unable to read rpm files from [%s]: %s", $dir, $@));
- $error and delete $medium->{headers}; #- do not propagate these.
- $error or delete $medium->{synthesis}; #- when building hdlist by ourself, drop synthesis property.
- } else {
- $error = 1;
- $urpm->{error}(N("no rpm files found from [%s]", $dir));
- $medium->{ignore} = 1;
- }
- }
- }
-
- #- examine if a local pubkey file is available.
- if (!$options->{nopubkey} && $medium->{hdlist} ne 'pubkey' && !$medium->{'key-ids'}) {
- my $path_pubkey = reduce_pathname("$with_hdlist_dir/../pubkey" . _hdlist_suffix($medium));
- -e $path_pubkey or $path_pubkey = "$dir/pubkey";
- if ($path_pubkey) {
- copy_and_own($path_pubkey, "$urpm->{cachedir}/partial/pubkey")
- or do { $urpm->{error}(N("...copying failed")) };
- }
- }
-
- $error;
-}
-
-sub _update_medium_first_pass__remote {
- my ($urpm, $medium, $retrieved_md5sum, $options) = @_;
- my ($error, $basename);
-
- _update_medium__get_descriptions_remote($urpm, $medium, $options);
-
- #- examine if a distant MD5SUM file is available.
- #- this will only be done if $with_hdlist is not empty in order to use
- #- an existing hdlist or synthesis file, and to check if download was good.
- #- if no MD5SUM is available, do it as before...
- if ($medium->{with_hdlist}) {
- #- we can assume at this point a basename is existing, but it needs
- #- to be checked for being valid, nothing can be deduced if no MD5SUM
- #- file is present.
- $basename = basename($medium->{with_hdlist});
-
- unlink "$urpm->{cachedir}/partial/MD5SUM";
- eval {
- if (!$options->{nomd5sum}) {
- sync_webfetch($urpm, $medium,
- [ reduce_pathname("$medium->{url}/$medium->{with_hdlist}/../MD5SUM") ],
- $options, quiet => 1);
- }
- };
- if (!$@ && file_size("$urpm->{cachedir}/partial/MD5SUM") > 32) {
- recompute_local_md5sum($urpm, $medium, $options->{force} >= 2);
- if ($medium->{md5sum}) {
- $$retrieved_md5sum = parse_md5sum($urpm, "$urpm->{cachedir}/partial/MD5SUM", $basename);
- _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $$retrieved_md5sum)
- and return 'unmodified';
- }
- } else {
- #- at this point, we don't if a basename exists and is valid, let probe it later.
- $basename = undef;
- }
- }
-
- #- try to probe for possible with_hdlist parameter, unless
- #- it is already defined (and valid).
- $urpm->{log}(N("retrieving source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
- $options->{callback} and $options->{callback}('retrieve', $medium->{name});
- if ($options->{probe_with}) {
- my @probe_list = (
- $medium->{with_hdlist}
- ? $medium->{with_hdlist}
- : _probe_with_try_list(_guess_hdlist_suffix($medium->{url}), $options->{probe_with})
- );
- foreach my $with_hdlist (@probe_list) {
- $basename = basename($with_hdlist) or next;
- $options->{force} and unlink "$urpm->{cachedir}/partial/$basename";
- eval {
- sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/$with_hdlist") ],
- $options, callback => $options->{callback});
- };
- if (!$@ && file_size("$urpm->{cachedir}/partial/$basename") > 32) {
- $medium->{with_hdlist} = $with_hdlist;
- $urpm->{log}(N("found probed hdlist (or synthesis) as %s", $medium->{with_hdlist}));
- last; #- found a suitable with_hdlist in the list above.
- }
- }
- } else {
- $basename = basename($medium->{with_hdlist});
-
- if ($options->{force}) {
- unlink "$urpm->{cachedir}/partial/$basename";
- } else {
- #- try to sync (copy if needed) local copy after restored the previous one.
- #- this is useful for rsync (?)
- if (-e statedir_hdlist_or_synthesis($urpm, $medium)) {
- copy_and_own(
- statedir_hdlist_or_synthesis($urpm, $medium),
- "$urpm->{cachedir}/partial/$basename",
- ) or $urpm->{error}(N("...copying failed")), $error = 1;
- }
- }
- eval {
- sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/$medium->{with_hdlist}") ],
- $options, callback => $options->{callback});
- };
- if ($@) {
- $urpm->{error}(N("...retrieving failed: %s", $@));
- unlink "$urpm->{cachedir}/partial/$basename";
- }
- }
-
- #- check downloaded file has right signature.
- if (file_size("$urpm->{cachedir}/partial/$basename") > 32 && $$retrieved_md5sum) {
- $urpm->{log}(N("computing md5sum of retrieved source hdlist (or synthesis)"));
- unless (md5sum("$urpm->{cachedir}/partial/$basename") eq $$retrieved_md5sum) {
- $urpm->{error}(N("...retrieving failed: md5sum mismatch"));
- unlink "$urpm->{cachedir}/partial/$basename";
- }
- }
-
- if (file_size("$urpm->{cachedir}/partial/$basename") > 32) {
- $options->{callback} and $options->{callback}('done', $medium->{name});
- $urpm->{log}(N("...retrieving done"));
-
- unless ($options->{force}) {
- _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium, $basename)
- and return 'unmodified';
- }
-
- #- the files are different, update local copy.
- rename("$urpm->{cachedir}/partial/$basename", cachedir_hdlist($urpm, $medium));
-
- #- retrieval of hdlist or synthesis has been successful,
- #- check whether a list file is available.
- #- and check hdlist wasn't named very strangely...
- if ($medium->{hdlist} ne 'list') {
- _update_media__sync_file($urpm, $medium, 'list', $options);
- }
-
- #- retrieve pubkey file.
- if (!$options->{nopubkey} && $medium->{hdlist} ne 'pubkey' && !$medium->{'key-ids'}) {
- _update_media__sync_file($urpm, $medium, 'pubkey', $options);
- }
- } else {
- $error = 1;
- $options->{callback} and $options->{callback}('failed', $medium->{name});
- $urpm->{error}(N("retrieval of source hdlist (or synthesis) failed"));
- }
- $error;
-}
-
-sub _read_cachedir_pubkey {
- my ($urpm, $medium) = @_;
- -s "$urpm->{cachedir}/partial/pubkey" or return;
-
- $urpm->{log}(N("examining pubkey file of \"%s\"...", $medium->{name}));
-
- my %key_ids;
- $urpm->import_needed_pubkeys(
- [ $urpm->parse_armored_file("$urpm->{cachedir}/partial/pubkey") ],
- root => $urpm->{root},
- callback => sub {
- my (undef, undef, $_k, $id, $imported) = @_;
- if ($id) {
- $key_ids{$id} = undef;
- $imported and $urpm->{log}(N("...imported key %s from pubkey file of \"%s\"",
- $id, $medium->{name}));
- } else {
- $urpm->{error}(N("unable to import pubkey file of \"%s\"", $medium->{name}));
- }
- });
- if (keys(%key_ids)) {
- $medium->{'key-ids'} = join(',', keys %key_ids);
- }
-}
-
-sub _update_medium_first_pass {
- my ($urpm, $medium, $second_pass, $clean_cache, %options) = @_;
-
- $medium->{ignore} and return;
-
- $options{forcekey} and delete $medium->{'key-ids'};
-
- #- we should create the associated synthesis file if it does not already exist...
- file_size(statedir_synthesis($urpm, $medium)) > 32
- or $medium->{modified_synthesis} = 1;
-
- if ($medium->{static}) {
- #- don't ever update static media
- $medium->{modified} = 0;
- } elsif ($options{all}) {
- #- if we're rebuilding all media, mark them as modified (except removable ones)
- $medium->{modified} ||= $medium->{url} !~ m!^removable!;
- }
-
- unless ($medium->{modified}) {
- #- the medium is not modified, but to compute dependencies,
- #- we still need to read it and all synthesis will be written if
- #- an unresolved provides is found.
- #- to speed up the process, we only read the synthesis at the beginning.
- _update_media__when_not_modified($urpm, $medium);
- return;
- }
-
- #- always delete a remaining list file or pubkey file in cache.
- foreach (qw(list pubkey)) {
- unlink "$urpm->{cachedir}/partial/$_";
- }
-
- #- check for a reconfig.urpmi file (if not already reconfigured)
- if (!$medium->{noreconfigure}) {
- may_reconfig_urpmi($urpm, $medium, \%options);
- }
-
- #- list of rpm files for this medium, only available for local medium where
- #- the source hdlist is not used (use force).
- my ($error, $retrieved_md5sum, @files);
-
- #- check if the medium is using a local or a removable medium.
- if (file_from_local_url($medium->{url})) {
- my $rc = _update_medium_first_pass__local($urpm, $medium, $second_pass, $clean_cache, \$retrieved_md5sum, \@files, \%options);
- if ($rc eq 'unmodified') {
- return;
- } else {
- $error = $rc;
- }
- } else {
- my $rc = _update_medium_first_pass__remote($urpm, $medium, \$retrieved_md5sum, \%options);
- if ($rc eq 'unmodified') {
- return;
- } else {
- $error = $rc;
- }
- }
-
- #- build list file according to hdlist.
- unless ($medium->{headers} || file_size(cachedir_hdlist($urpm, $medium)) > 32) {
- $error = 1;
- $urpm->{error}(N("no hdlist file found for medium \"%s\"", $medium->{name}));
- }
-
- unless ($error || $medium->{virtual}) {
- #- sort list file contents according to id.
- my %list;
- if ($medium->{headers}) {
- my $protocol = protocol_from_url($medium->{url});
-
- #- rpm files have already been read (first pass), there is just a need to
- #- build list hash.
- foreach (@files) {
- m|/([^/]*\.rpm)$| or next;
- $list{$1} and $urpm->{error}(N("file [%s] already used in the same medium \"%s\"", $1, $medium->{name})), next;
- $list{$1} = "$protocol:/$_\n";
- }
- } else {
- #- read first pass hdlist or synthesis, try to open as synthesis, if file
- #- is larger than 1MB, this is probably an hdlist else a synthesis.
- #- anyway, if one tries fails, try another mode.
- $options{callback} and $options{callback}('parse', $medium->{name});
- my @unresolved_before = grep { ! defined $urpm->{provides}{$_} } keys %{$urpm->{provides} || {}};
- if (!$medium->{synthesis}
- || file_size(cachedir_hdlist($urpm, $medium)) > 262144) {
- _parse_hdlist($urpm, $medium, cachedir_hdlist($urpm, $medium));
- if (is_valid_medium($medium)) {
- delete $medium->{synthesis};
- } else {
- _parse_synthesis($urpm, $medium, cachedir_hdlist($urpm, $medium));
- is_valid_medium($medium) and $medium->{synthesis} = 1;
- }
- } else {
- _parse_synthesis($urpm, $medium, cachedir_hdlist($urpm, $medium));
- if (is_valid_medium($medium)) {
- $medium->{synthesis} = 1;
- } else {
- _parse_hdlist($urpm, $medium, cachedir_hdlist($urpm, $medium));
- is_valid_medium($medium) and delete $medium->{synthesis};
- }
- }
- if (is_valid_medium($medium)) {
- $options{callback} && $options{callback}('done', $medium->{name});
- } else {
- $error = 1;
- $urpm->{error}(N("unable to parse hdlist file of \"%s\"", $medium->{name}));
- $options{callback} && $options{callback}('failed', $medium->{name});
- #- we will have to read back the current synthesis file unmodified.
- }
-
- unless ($error) {
- my @unresolved_after = grep { ! defined $urpm->{provides}{$_} } keys %{$urpm->{provides} || {}};
- @unresolved_before == @unresolved_after or $$second_pass = 1;
-
- if ($medium->{hdlist} ne 'list' && -s "$urpm->{cachedir}/partial/list") {
- if (open my $fh, "$urpm->{cachedir}/partial/list") {
- local $_;
- while (<$fh>) {
- m|/([^/]*\.rpm)$| or next;
- $list{$1} and $urpm->{error}(N("file [%s] already used in the same medium \"%s\"", $1, $medium->{name})), next;
- $list{$1} = "$medium->{url}/$_";
- }
- close $fh;
- }
- } else {
- #- if url is clear and no relative list file has been downloaded,
- #- there is no need for a list file.
- if ($medium->{url} ne $medium->{clear_url}) {
- foreach ($medium->{start} .. $medium->{end}) {
- my $filename = $urpm->{depslist}[$_]->filename;
- $list{$filename} = "$medium->{url}/$filename\n";
- }
- }
- }
- }
- }
-
- unless ($error) {
- if (keys %list) {
- #- write list file.
- #- make sure group and other do not have any access to this file, used to hide passwords.
- if ($medium->{list}) {
- my $mask = umask 077;
- open my $listfh, ">", cachedir_list($urpm, $medium)
- or $error = 1, $urpm->{error}(N("unable to write list file of \"%s\"", $medium->{name}));
- umask $mask;
- print $listfh values %list;
- close $listfh;
- }
-
- #- check if at least something has been written into list file.
- if ($medium->{list} && -s cachedir_list($urpm, $medium)) {
- $urpm->{log}(N("writing list file for medium \"%s\"", $medium->{name}));
- } else {
- $error = 1, $urpm->{error}(N("nothing written in list file for \"%s\"", $medium->{name}));
- }
- } else {
- #- the flag is no longer necessary.
- if ($medium->{list}) {
- unlink statedir_list($urpm, $medium);
- delete $medium->{list};
- }
- }
- }
- }
-
- unless ($error) {
- #- now... on pubkey
- _read_cachedir_pubkey($urpm, $medium);
- }
-
- unless ($medium->{virtual}) {
- if ($error) {
- #- an error has occured for updating the medium, we have to remove temporary files.
- unlink cachedir_hdlist($urpm, $medium);
- $medium->{list} and unlink cachedir_list($urpm, $medium);
- #- read default synthesis (we have to make sure nothing get out of depslist).
- _parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium));
- if (!is_valid_medium($medium)) {
- $urpm->{error}(N("problem reading synthesis file of medium \"%s\"", $medium->{name}));
- $medium->{ignore} = 1;
- }
- } else {
- #- make sure to rebuild base files and clear medium modified state.
- $medium->{modified} = 0;
- $urpm->{md5sum_modified} = 1;
-
- #- but use newly created file.
- unlink statedir_hdlist($urpm, $medium);
- $medium->{synthesis} and unlink statedir_synthesis($urpm, $medium);
- $medium->{list} and unlink statedir_list($urpm, $medium);
- unless ($medium->{headers}) {
- unlink statedir_synthesis($urpm, $medium);
- unlink statedir_hdlist($urpm, $medium);
- urpm::util::move(cachedir_hdlist($urpm, $medium),
- statedir_hdlist_or_synthesis($urpm, $medium));
- }
- if ($medium->{list}) {
- urpm::util::move(cachedir_list($urpm, $medium), statedir_list($urpm, $medium));
- }
- $medium->{md5sum} = $retrieved_md5sum; #- anyway, keep it, the previous one is no longer useful.
-
- #- and create synthesis file associated.
- $medium->{modified_synthesis} = !$medium->{synthesis};
- }
- }
-}
-
-#- take care of modified medium only, or all if all have to be recomputed.
-sub _update_medium_second_pass {
- my ($urpm, $medium, $second_pass, $callback) = @_;
-
- $medium->{ignore} and return;
-
- $callback and $callback->('parse', $medium->{name});
- #- a modified medium is an invalid medium, we have to read back the previous hdlist
- #- or synthesis which has not been modified by first pass above.
- if ($medium->{headers} && !$medium->{modified}) {
- if ($second_pass) {
- $urpm->{log}(N("reading headers from medium \"%s\"", $medium->{name}));
- ($medium->{start}, $medium->{end}) = $urpm->parse_headers(dir => "$urpm->{cachedir}/headers",
- headers => $medium->{headers},
- );
- }
- $urpm->{log}(N("building hdlist [%s]", statedir_hdlist($urpm, $medium)));
- #- finish building operation of hdlist.
- $urpm->build_hdlist(start => $medium->{start},
- end => $medium->{end},
- dir => "$urpm->{cachedir}/headers",
- hdlist => statedir_hdlist($urpm, $medium),
- );
- #- synthesis needs to be created, since the medium has been built from rpm files.
- eval { $urpm->build_synthesis(
- start => $medium->{start},
- end => $medium->{end},
- synthesis => statedir_synthesis($urpm, $medium),
- ) };
- if ($@) {
- #- XXX this happens when building a synthesis for a local media from RPMs... why ?
- $urpm->{error}(N("Unable to build synthesis file for medium \"%s\". Your hdlist file may be corrupted.", $medium->{name}));
- $urpm->{error}($@);
- unlink statedir_synthesis($urpm, $medium);
- } else {
- $urpm->{log}(N("built hdlist synthesis file for medium \"%s\"", $medium->{name}));
- }
- #- keep in mind we have a modified database, sure at this point.
- $urpm->{md5sum_modified} = 1;
- } elsif ($medium->{synthesis}) {
- if ($second_pass) {
- if ($medium->{virtual}) {
- if (file_from_file_url($medium->{url})) {
- _parse_synthesis($urpm, $medium, hdlist_or_synthesis_for_virtual_medium($medium));
- }
- } else {
- _parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium));
- }
- }
- } else {
- if ($second_pass) {
- _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
- }
- #- check if the synthesis file can be built.
- if (($second_pass || $medium->{modified_synthesis}) && !$medium->{modified}) {
- unless ($medium->{virtual}) {
- eval { $urpm->build_synthesis(
- start => $medium->{start},
- end => $medium->{end},
- synthesis => statedir_synthesis($urpm, $medium),
- ) };
- if ($@) {
- $urpm->{error}(N("Unable to build synthesis file for medium \"%s\". Your hdlist file may be corrupted.", $medium->{name}));
- $urpm->{error}($@);
- unlink statedir_synthesis($urpm, $medium);
- } else {
- $urpm->{log}(N("built hdlist synthesis file for medium \"%s\"", $medium->{name}));
- }
- }
- #- keep in mind we have modified database, sure at this point.
- $urpm->{md5sum_modified} = 1;
- }
- }
- $callback && $callback->('done', $medium->{name});
-}
-
sub remove_obsolete_headers_in_cache {
my ($urpm) = @_;
my %headers;
- my $dh = $urpm->opendir_safe("$urpm->{cachedir}/headers");
- if ($dh) {
+ if (my $dh = urpm::sys::opendir_safe($urpm, "$urpm->{cachedir}/headers")) {
local $_;
while (defined($_ = readdir $dh)) {
m|^([^/]*-[^-]*-[^-]*\.[^\.]*)(?::\S*)?$| and $headers{$1} = $_;
}
- closedir $dh;
}
if (%headers) {
- $urpm->{log}(N("found %d headers in cache", scalar(keys %headers)));
+ my $previous_total = scalar(keys %headers);
foreach (@{$urpm->{depslist}}) {
delete $headers{$_->fullname};
}
- $urpm->{log}(N("removing %d obsolete headers in cache", scalar(keys %headers)));
+ $urpm->{log}(N("found %d rpm headers in cache, removing %d obsolete headers", $previous_total, scalar(keys %headers)));
foreach (values %headers) {
unlink "$urpm->{cachedir}/headers/$_";
}
}
}
-#- Update the urpmi database w.r.t. the current configuration.
-#- Takes care of modifications, and tries some tricks to bypass
-#- the recomputation of base files.
-#- Recognized options :
-#- all : all medias are being rebuilt
-#- callback : UI callback
-#- compress : use compressed download (for rsync)
-#- forcekey : force retrieval of pubkey
-#- force : try to force rebuilding base files (1) or hdlist from rpm files (2)
-#- limit_rate : download limit rate
-#- noclean : keep old files in the header cache directory
-#- nolock : don't lock the urpmi database
-#- nomd5sum : don't verify MD5SUM of retrieved files
-#- nopubkey : don't use rpm pubkeys
-#- norebuild : don't try to rebuild hdlists from rpm headers
-#- probe_with : probe synthesis or hdlist (or none)
-#- quiet : download hdlists quietly
-#- ratio : use this compression ratio (with gzip, default is 4)
-sub update_media {
- my ($urpm, %options) = @_;
-
- $urpm->{media} or return; # verify that configuration has been read
-
- $options{nopubkey} ||= $urpm->{options}{nopubkey};
- #- get gpg-pubkey signature.
- if (!$options{nopubkey}) {
- $urpm->lock_rpm_db('exclusive');
- $urpm->{keys} or $urpm->parse_pubkeys(root => $urpm->{root});
- }
- #- lock database if allowed.
- $urpm->lock_urpmi_db('exclusive') if !$options{nolock};
-
- #- examine each medium to see if one of them needs to be updated.
- #- if this is the case and if not forced, try to use a pre-calculated
- #- hdlist file, else build it from rpm files.
- $urpm->clean;
-
- my $clean_cache = !$options{noclean};
- my $second_pass;
- foreach my $medium (@{$urpm->{media}}) {
- _update_medium_first_pass($urpm, $medium, \$second_pass, \$clean_cache, %options);
- }
-
- #- some unresolved provides may force to rebuild all synthesis,
- #- a second pass will be necessary.
- if ($second_pass) {
- $urpm->{log}(N("performing second pass to compute dependencies\n"));
- $urpm->unresolved_provides_clean;
- }
-
- #- second pass consists in reading again synthesis or hdlists.
- foreach my $medium (@{$urpm->{media}}) {
- _update_medium_second_pass($urpm, $medium, $second_pass, $options{callback});
- }
-
- if ($urpm->{modified}) {
- if ($options{noclean}) {
- #- clean headers cache directory to remove everything that is no longer
- #- useful according to the depslist.
- remove_obsolete_headers_in_cache($urpm);
- }
- #- write config files in any case
- $urpm->write_config;
- dump_proxy_config();
- } elsif ($urpm->{md5sum_modified}) {
- #- NB: in case of $urpm->{modified}, write_MD5SUM is called in write_config above
- write_MD5SUM($urpm);
- }
-
- generate_media_names($urpm);
-
- $options{nolock} or $urpm->unlock_urpmi_db;
- $options{nopubkey} or $urpm->unlock_rpm_db;
-}
-
-#- clean params and depslist computation zone.
-sub clean {
- my ($urpm) = @_;
-
- $urpm->{depslist} = [];
- $urpm->{provides} = {};
-
- foreach (@{$urpm->{media} || []}) {
- delete $_->{start};
- delete $_->{end};
- }
-}
-
-sub try_mounting {
- my ($urpm, $dir, $o_removable) = @_;
- my %infos;
-
- my $is_iso = is_iso($o_removable);
- my @mntpoints = $is_iso
- #- note: for isos, we don't parse the fstab because it might not be declared in it.
- #- so we try to remove suffixes from the dir name until the dir exists
- ? ($dir = urpm::sys::trim_until_d($dir))
- : urpm::sys::find_mntpoints($dir = reduce_pathname($dir), \%infos);
- foreach (grep {
- ! $infos{$_}{mounted} && $infos{$_}{fs} ne 'supermount';
- } @mntpoints)
- {
- $urpm->{log}(N("mounting %s", $_));
- if ($is_iso) {
- #- to mount an iso image, grab the first loop device
- my $loopdev = urpm::sys::first_free_loopdev();
- sys_log("mount iso $_ on $o_removable");
- $loopdev and system('mount', $o_removable, $_, '-t', 'iso9660', '-o', "loop=$loopdev");
- } else {
- sys_log("mount $_");
- system("mount '$_' 2>/dev/null");
- }
- $o_removable && $infos{$_}{fs} ne 'supermount' and $urpm->{removable_mounted}{$_} = undef;
- }
- -e $dir;
-}
-
-sub try_umounting {
- my ($urpm, $dir) = @_;
- my %infos;
-
- $dir = reduce_pathname($dir);
- foreach (reverse grep {
- $infos{$_}{mounted} && $infos{$_}{fs} ne 'supermount';
- } urpm::sys::find_mntpoints($dir, \%infos))
- {
- $urpm->{log}(N("unmounting %s", $_));
- sys_log("umount $_");
- system("umount '$_' 2>/dev/null");
- delete $urpm->{removable_mounted}{$_};
- }
- ! -e $dir;
-}
-
-sub try_umounting_removables {
- my ($urpm) = @_;
- foreach (keys %{$urpm->{removable_mounted}}) {
- $urpm->try_umounting($_);
- }
- delete $urpm->{removable_mounted};
-}
-
#- register local packages for being installed, keep track of source.
sub register_rpms {
my ($urpm, @files) = @_;
@@ -2151,13 +129,11 @@ sub register_rpms {
if (protocol_from_url($_)) {
my $basename = basename($_);
unlink "$urpm->{cachedir}/partial/$basename";
- eval {
- $urpm->{log}(N("retrieving rpm file [%s] ...", $_));
- sync_webfetch($urpm, undef, [$_], { quiet => 1 });
+ $urpm->{log}(N("retrieving rpm file [%s] ...", $_));
+ if (urpm::download::sync($urpm, undef, [$_], quiet => 1)) {
$urpm->{log}(N("...retrieving done"));
$_ = "$urpm->{cachedir}/partial/$basename";
- };
- if ($@) {
+ } else {
$urpm->{error}(N("...retrieving failed: %s", $@));
unlink "$urpm->{cachedir}/partial/$basename";
next;
@@ -2193,420 +169,6 @@ sub register_rpms {
%requested;
}
-sub _findindeps {
- my ($urpm, $found, $qv, $v, %options) = @_;
-
- foreach (keys %{$urpm->{provides}}) {
- #- search through provides to find if a provide matches this one;
- #- but manage choices correctly (as a provides may be virtual or
- #- defined several times).
- /$qv/ || !$options{caseinsensitive} && /$qv/i or next;
-
- my @list = grep { defined $_ } map {
- my $pkg = $urpm->{depslist}[$_];
- $pkg && ($options{src} ? $pkg->arch eq 'src' : $pkg->arch ne 'src')
- ? $pkg->id : undef;
- } keys %{$urpm->{provides}{$_} || {}};
- @list > 0 and push @{$found->{$v}}, join '|', @list;
- }
-}
-
-#- search packages registered by their names by storing their ids into the $packages hash.
-sub search_packages {
- my ($urpm, $packages, $names, %options) = @_;
- my (%exact, %exact_a, %exact_ra, %found, %foundi);
- foreach my $v (@$names) {
- my $qv = quotemeta $v;
- $qv = '(?i)' . $qv if $options{caseinsensitive};
-
- unless ($options{fuzzy}) {
- #- try to search through provides.
- if (my @l = map {
- $_
- && ($options{src} ? $_->arch eq 'src' : $_->is_arch_compat)
- && ($options{use_provides} || $_->name eq $v)
- && defined($_->id)
- && (!defined $urpm->{searchmedia} ||
- $urpm->{searchmedia}{start} <= $_->id
- && $urpm->{searchmedia}{end} >= $_->id)
- ? $_ : @{[]};
- } map {
- $urpm->{depslist}[$_];
- } keys %{$urpm->{provides}{$v} || {}})
- {
- #- we assume that if there is at least one package providing
- #- the resource exactly, this should be the best one; but we
- #- first check if one of the packages has the same name as searched.
- if (my @l2 = grep { $_->name eq $v } @l) {
- @l = @l2;
- }
- $exact{$v} = join('|', map { $_->id } @l);
- next;
- }
- }
-
- if ($options{use_provides} && $options{fuzzy}) {
- _findindeps($urpm, \%found, $qv, $v, %options);
- }
-
- foreach my $id (defined $urpm->{searchmedia} ?
- ($urpm->{searchmedia}{start} .. $urpm->{searchmedia}{end}) :
- (0 .. $#{$urpm->{depslist}})
- ) {
- my $pkg = $urpm->{depslist}[$id];
- ($options{src} ? $pkg->arch eq 'src' : $pkg->is_arch_compat) or next;
- my $pack_name = $pkg->name;
- my $pack_ra = $pack_name . '-' . $pkg->version;
- my $pack_a = "$pack_ra-" . $pkg->release;
- my $pack = "$pack_a." . $pkg->arch;
- unless ($options{fuzzy}) {
- if ($pack eq $v) {
- $exact{$v} = $id;
- next;
- } elsif ($pack_a eq $v) {
- push @{$exact_a{$v}}, $id;
- next;
- } elsif ($pack_ra eq $v || $options{src} && $pack_name eq $v) {
- push @{$exact_ra{$v}}, $id;
- next;
- }
- }
- $pack =~ /$qv/ and push @{$found{$v}}, $id;
- $pack =~ /$qv/i and push @{$foundi{$v}}, $id unless $options{caseinsensitive};
- }
- }
-
- my $result = 1;
- foreach (@$names) {
- if (defined $exact{$_}) {
- $packages->{$exact{$_}} = 1;
- foreach (split /\|/, $exact{$_}) {
- my $pkg = $urpm->{depslist}[$_] or next;
- $pkg->set_flag_skip(0); #- reset skip flag as manually selected.
- }
- } else {
- #- at this level, we need to search the best package given for a given name,
- #- always prefer already found package.
- my %l;
- foreach (@{$exact_a{$_} || $exact_ra{$_} || $found{$_} || $foundi{$_} || []}) {
- my $pkg = $urpm->{depslist}[$_];
- push @{$l{$pkg->name}}, $pkg;
- }
- if (values(%l) == 0 || values(%l) > 1 && !$options{all}) {
- $urpm->{error}(N("no package named %s", $_));
- values(%l) != 0 and $urpm->{error}(
- N("The following packages contain %s: %s",
- $_, "\n" . join("\n", sort { $a cmp $b } keys %l))
- );
- $result = 0;
- } else {
- if (!@{$exact_a{$_} || $exact_ra{$_} || []}) {
- #- we found a non-exact match
- $result = 'substring';
- }
- foreach (values %l) {
- my $best;
- foreach (@$_) {
- if ($best && $best != $_) {
- $_->compare_pkg($best) > 0 and $best = $_;
- } else {
- $best = $_;
- }
- }
- $packages->{$best->id} = 1;
- $best->set_flag_skip(0); #- reset skip flag as manually selected.
- }
- }
- }
- }
-
- #- return true if no error has been encountered, else false.
- $result;
-}
-
-#- Resolves dependencies between requested packages (and auto selection if any).
-#- handles parallel option if any.
-#- The return value is true if program should be restarted (in order to take
-#- care of important packages being upgraded (priority upgrades)
-#- %options :
-#- rpmdb
-#- auto_select
-#- callback_choices
-#- install_src
-#- keep
-#- nodeps
-#- priority_upgrade
-sub resolve_dependencies {
- #- $state->{selected} will contain the selection of packages to be
- #- installed or upgraded
- my ($urpm, $state, $requested, %options) = @_;
- my $need_restart;
-
- if ($options{install_src}) {
- #- only src will be installed, so only update $state->{selected} according
- #- to src status of files.
- foreach (keys %$requested) {
- my $pkg = $urpm->{depslist}[$_] or next;
- $pkg->arch eq 'src' or next;
- $state->{selected}{$_} = undef;
- }
- }
- if ($urpm->{parallel_handler}) {
- #- build the global synthesis file first.
- my $file = "$urpm->{cachedir}/partial/parallel.cz";
- unlink $file;
- foreach (@{$urpm->{media}}) {
- is_valid_medium($_) or next;
- my $f = statedir_synthesis($urpm, $_);
- system "cat '$f' >> '$file'";
- }
- #- let each node determine what is requested, according to handler given.
- $urpm->{parallel_handler}->parallel_resolve_dependencies($file, $urpm, $state, $requested, %options);
- } else {
- my $db;
-
- if ($options{rpmdb}) {
- $db = new URPM;
- $db->parse_synthesis($options{rpmdb});
- } else {
- $db = db_open_or_die($urpm, $urpm->{root});
- }
-
- my $sig_handler = sub { undef $db; exit 3 };
- local $SIG{INT} = $sig_handler;
- local $SIG{QUIT} = $sig_handler;
-
- #- auto select package for upgrading the distribution.
- if ($options{auto_select}) {
- $urpm->request_packages_to_upgrade($db, $state, $requested, requested => undef,
- start => $urpm->{searchmedia}{start}, end => $urpm->{searchmedia}{end});
- }
-
- #- resolve dependencies which will be examined for packages that need to
- #- have urpmi restarted when they're updated.
- $urpm->resolve_requested($db, $state, $requested, %options);
-
- if ($options{priority_upgrade} && !$options{rpmdb}) {
- my (%priority_upgrade, %priority_requested);
- @priority_upgrade{split /,/, $options{priority_upgrade}} = ();
-
- #- check if a priority upgrade should be tried
- foreach (keys %{$state->{selected}}) {
- my $pkg = $urpm->{depslist}[$_] or next;
- exists $priority_upgrade{$pkg->name} or next;
- $priority_requested{$pkg->id} = undef;
- }
-
- if (%priority_requested) {
- my %priority_state;
-
- $urpm->resolve_requested($db, \%priority_state, \%priority_requested, %options);
- if (grep { ! exists $priority_state{selected}{$_} } keys %priority_requested) {
- #- some packages which were selected previously have not been selected, strange!
- $need_restart = 0;
- } elsif (grep { ! exists $priority_state{selected}{$_} } keys %{$state->{selected}}) {
- #- there are other packages to install after this priority transaction.
- %$state = %priority_state;
- $need_restart = 1;
- }
- }
- }
- }
- $need_restart;
-}
-
-sub create_transaction {
- my ($urpm, $state, %options) = @_;
-
- if ($urpm->{parallel_handler} || !$options{split_length} ||
- keys %{$state->{selected}} < $options{split_level}) {
- #- build simplest transaction (no split).
- $urpm->build_transaction_set(undef, $state, split_length => 0);
- } else {
- my $db;
-
- if ($options{rpmdb}) {
- $db = new URPM;
- $db->parse_synthesis($options{rpmdb});
- } else {
- $db = db_open_or_die($urpm, $urpm->{root});
- }
-
- my $sig_handler = sub { undef $db; exit 3 };
- local $SIG{INT} = $sig_handler;
- local $SIG{QUIT} = $sig_handler;
-
- #- build transaction set...
- $urpm->build_transaction_set($db, $state, split_length => $options{split_length});
- }
-}
-
-#- get the list of packages that should not be upgraded or installed,
-#- typically from the inst.list or skip.list files.
-sub get_packages_list {
- my ($file, $o_extra) = @_;
- my $val = [];
- open my $f, $file or return [];
- foreach (<$f>, split /,/, $o_extra || '') {
- chomp; s/#.*$//; s/^\s*//; s/\s*$//;
- next if $_ eq '';
- push @$val, $_;
- }
- close $f;
- $val;
-}
-
-#- select sources for selected packages,
-#- according to keys of the packages hash.
-#- returns a list of lists containing the source description for each rpm,
-#- matching the exact number of registered media; ignored media being
-#- associated to a null list.
-sub get_source_packages {
- my ($urpm, $packages, %options) = @_;
- my (%protected_files, %local_sources, %fullname2id);
-
- #- build association hash to retrieve id and examine all list files.
- foreach (keys %$packages) {
- foreach (split /\|/, $_) {
- if ($urpm->{source}{$_}) {
- $protected_files{$local_sources{$_} = $urpm->{source}{$_}} = undef;
- } else {
- $fullname2id{$urpm->{depslist}[$_]->fullname} = $_ . '';
- }
- }
- }
-
- #- examine each medium to search for packages.
- #- now get rpm file name in hdlist to match list file.
- my %file2fullnames;
- foreach my $pkg (@{$urpm->{depslist} || []}) {
- $file2fullnames{$pkg->filename}{$pkg->fullname} = undef;
- }
-
- #- examine the local repository, which is trusted (no gpg or pgp signature check but md5 is now done).
- my $dh = $urpm->opendir_safe("$urpm->{cachedir}/rpms");
- if ($dh) {
- while (defined(my $filename = readdir $dh)) {
- my $filepath = "$urpm->{cachedir}/rpms/$filename";
- if (-d $filepath) {
- } elsif ($options{clean_all} || ! -s _) {
- unlink $filepath; #- this file should be removed or is already empty.
- } else {
- if (keys(%{$file2fullnames{$filename} || {}}) > 1) {
- $urpm->{error}(N("there are multiple packages with the same rpm filename \"%s\"", $filename));
- } elsif (keys(%{$file2fullnames{$filename} || {}}) == 1) {
- my ($fullname) = keys(%{$file2fullnames{$filename} || {}});
- if (defined(my $id = delete $fullname2id{$fullname})) {
- $local_sources{$id} = $filepath;
- } else {
- $options{clean_other} && ! exists $protected_files{$filepath} and unlink $filepath;
- }
- } else {
- $options{clean_other} && ! exists $protected_files{$filepath} and unlink $filepath;
- }
- }
- }
- closedir $dh;
- }
-
- if ($options{clean_all}) {
- #- clean download directory, do it here even if this is not the best moment.
- clean_dir("$urpm->{cachedir}/partial");
- }
-
- my ($error, @list_error, @list, %examined);
-
- foreach my $medium (@{$urpm->{media} || []}) {
- my (%sources, %list_examined, $list_warning);
-
- if (is_valid_medium($medium) && !$medium->{ignore}) {
- #- always prefer a list file if available.
- my $listfile = $medium->{list} ? statedir_list($urpm, $medium) : '';
- if (!$listfile && $medium->{virtual}) {
- my $dir = file_from_local_url($medium->{url});
- my $with_hdlist_dir = reduce_pathname($dir . ($medium->{with_hdlist} ? "/$medium->{with_hdlist}" : "/.."));
- my $local_list = 'list' . _hdlist_suffix($medium);
- $listfile = reduce_pathname("$with_hdlist_dir/../$local_list");
- -s $listfile or $listfile = "$dir/list";
- }
- if ($listfile && -r $listfile) {
- my $fh = $urpm->open_safe('<', $listfile);
- if ($fh) {
- local $_;
- while (<$fh>) {
- chomp;
- if (my ($filename) = m|/([^/]*\.rpm)$|) {
- if (keys(%{$file2fullnames{$filename} || {}}) > 1) {
- $urpm->{error}(N("there are multiple packages with the same rpm filename \"%s\"", $filename));
- next;
- } elsif (keys(%{$file2fullnames{$filename} || {}}) == 1) {
- my ($fullname) = keys(%{$file2fullnames{$filename} || {}});
- if (defined(my $id = $fullname2id{$fullname})) {
- if (!/\.delta\.rpm$/ || $urpm->is_delta_installable($urpm->{depslist}[$id], $options{root})) {
- $sources{$id} = $medium->{virtual} ? "$medium->{url}/$_" : $_;
- }
- }
- $list_examined{$fullname} = $examined{$fullname} = undef;
- }
- } else {
- chomp;
- $error = 1;
- $urpm->{error}(N("unable to correctly parse [%s] on value \"%s\"", $listfile, $_));
- last;
- }
- }
- close $fh;
- }
- } elsif ($listfile && -e $listfile) {
- # list file exists but isn't readable
- # report error only if no result found, list files are only readable by root
- push @list_error, N("unable to access list file of \"%s\", medium ignored", $medium->{name});
- $< and push @list_error, " " . N("(retry as root?)");
- next;
- }
- if (defined $medium->{url}) {
- foreach ($medium->{start} .. $medium->{end}) {
- my $pkg = $urpm->{depslist}[$_];
- my $fi = $pkg->filename;
- if (keys(%{$file2fullnames{$fi} || {}}) > 1) {
- $urpm->{error}(N("there are multiple packages with the same rpm filename \"%s\"", $fi));
- next;
- } elsif (keys(%{$file2fullnames{$fi} || {}}) == 1) {
- my ($fullname) = keys(%{$file2fullnames{$fi} || {}});
- unless (exists($list_examined{$fullname})) {
- ++$list_warning;
- if (defined(my $id = $fullname2id{$fullname})) {
- if ($fi !~ /\.delta\.rpm$/ || $urpm->is_delta_installable($urpm->{depslist}[$id], $options{root})) {
- $sources{$id} = "$medium->{url}/" . $fi;
- }
- }
- $examined{$fullname} = undef;
- }
- }
- }
- $list_warning && $medium->{list} && -r statedir_list($urpm, $medium) && -f _
- and $urpm->{error}(N("medium \"%s\" uses an invalid list file:
- mirror is probably not up-to-date, trying to use alternate method", $medium->{name}));
- } elsif (!%list_examined) {
- $error = 1;
- $urpm->{error}(N("medium \"%s\" does not define any location for rpm files", $medium->{name}));
- }
- }
- push @list, \%sources;
- }
-
- #- examine package list to see if a package has not been found.
- foreach (grep { ! exists($examined{$_}) } keys %fullname2id) {
- # print list errors only once if any
- $urpm->{error}($_) foreach @list_error;
- @list_error = ();
- $error = 1;
- $urpm->{error}(N("package %s is not found.", $_));
- }
-
- $error ? @{[]} : (\%local_sources, \@list);
-}
-
#- checks whether the delta RPM represented by $pkg is installable wrt the
#- RPM DB on $root. For this, it extracts the rpm version to which the
#- delta applies from the delta rpm filename itself. So naming conventions
@@ -2632,766 +194,35 @@ sub download_source_packages {
my %sources = %$local_sources;
my %error_sources;
- $urpm->lock_urpmi_db('exclusive') if !$options{nolock};
- $urpm->copy_packages_of_removable_media($list, \%sources, %options) or return;
- $urpm->download_packages_of_distant_media($list, \%sources, \%error_sources, %options);
- $urpm->unlock_urpmi_db unless $options{nolock};
+ require urpm::get_pkgs;
+ urpm::removable::copy_packages_of_removable_media($urpm, $list, \%sources, $options{ask_for_medium}) or return;
+ urpm::get_pkgs::download_packages_of_distant_media($urpm, $list, \%sources, \%error_sources, %options);
%sources, %error_sources;
}
-#- lock policy concerning chroot :
-# - lock rpm db in chroot
-# - lock urpmi db in /
-sub _lock {
- my ($urpm, $fh_ref, $file, $b_exclusive) = @_;
- #- avoid putting a require on Fcntl ':flock' (which is perl and not perl-base).
- my ($LOCK_SH, $LOCK_EX, $LOCK_NB) = (1, 2, 4);
- if ($b_exclusive) {
- #- lock urpmi database, but keep lock to wait for an urpmi.update to finish.
- } else {
- #- create the .LOCK file if needed (and if possible)
- unless (-e $file) {
- open(my $f, ">", $file);
- close $f;
- }
- #- lock urpmi database, if the LOCK file doesn't exists no share lock.
- }
- my ($sense, $mode) = $b_exclusive ? ('>', $LOCK_EX) : ('<', $LOCK_SH);
- open $$fh_ref, $sense, $file or return;
- flock $$fh_ref, $mode|$LOCK_NB or $urpm->{fatal}(7, N("urpmi database locked"));
-}
-
-sub lock_rpm_db {
- my ($urpm, $b_exclusive) = @_;
- _lock($urpm, \$RPMLOCK_FILE, "$urpm->{root}/$urpm->{statedir}/.RPMLOCK", $b_exclusive);
-}
-sub lock_urpmi_db {
- my ($urpm, $b_exclusive) = @_;
- _lock($urpm, \$LOCK_FILE, "$urpm->{statedir}/.LOCK", $b_exclusive);
-}
-#- deprecated
-sub exlock_urpmi_db {
- my ($urpm) = @_;
- lock_urpmi_db($urpm, 'exclusive');
-}
-
-sub _unlock {
- my ($fh_ref) = @_;
- #- avoid putting a require on Fcntl ':flock' (which is perl and not perl-base).
- my $LOCK_UN = 8;
- #- now everything is finished.
- #- release lock on database.
- flock $$fh_ref, $LOCK_UN;
- close $$fh_ref;
-}
-sub unlock_rpm_db {
- my ($_urpm) = @_;
- _unlock(\$RPMLOCK_FILE);
-}
-sub unlock_urpmi_db {
- my ($_urpm) = @_;
- _unlock(\$LOCK_FILE);
-}
-
-sub copy_packages_of_removable_media {
- my ($urpm, $list, $sources, %options) = @_;
- my %removables;
-
- #- make sure everything is correct on input...
- $urpm->{media} or return;
- @{$urpm->{media}} == @$list or return;
-
- #- examine if given medium is already inside a removable device.
- my $check_notfound = sub {
- my ($id, $dir, $removable) = @_;
- if ($dir) {
- $urpm->try_mounting($dir, $removable);
- -e $dir or return 2;
- }
- foreach (values %{$list->[$id]}) {
- chomp;
- my $dir_ = file_from_local_url($_) or next;
- $dir_ =~ m!/.*/! or next; #- is this really needed??
- unless ($dir) {
- $dir = $dir_;
- $urpm->try_mounting($dir, $removable);
- }
- -r $dir_ or return 1;
- }
- 0;
- };
- #- removable media have to be examined to keep mounted the one that has
- #- more packages than others.
- my $examine_removable_medium = sub {
- my ($id, $device) = @_;
- my $medium = $urpm->{media}[$id];
- if (my $dir = file_from_local_url($medium->{url})) {
- #- the directory given does not exist and may be accessible
- #- by mounting some other directory. Try to figure it out and mount
- #- everything that might be necessary.
- while ($check_notfound->($id, $dir, is_iso($medium->{removable}) ? $medium->{removable} : 'removable')) {
- is_iso($medium->{removable}) || $options{ask_for_medium}
- or $urpm->{fatal}(4, N("medium \"%s\" is not selected", $medium->{name}));
- $urpm->try_umounting($dir);
- system("/usr/bin/eject '$device' 2>/dev/null");
- is_iso($medium->{removable})
- || $options{ask_for_medium}(remove_internal_name($medium->{name}), $medium->{removable})
- or $urpm->{fatal}(4, N("medium \"%s\" is not selected", $medium->{name}));
- }
- if (-e $dir) {
- while (my ($i, $url) = each %{$list->[$id]}) {
- chomp $url;
- my ($filepath, $filename) = do {
- my $f = file_from_local_url($url) or next;
- $f =~ m!/.*/! or next; #- is this really needed??
- dirname($f), basename($f);
- };
- if (-r $filepath) {
- #- we should assume a possibly buggy removable device...
- #- First, copy in partial cache, and if the package is still good,
- #- transfer it to the rpms cache.
- unlink "$urpm->{cachedir}/partial/$filename";
- if (urpm::util::copy($filepath, "$urpm->{cachedir}/partial") &&
- URPM::verify_rpm("$urpm->{cachedir}/partial/$filename", nosignatures => 1))
- {
- #- now we can consider the file to be fine.
- unlink "$urpm->{cachedir}/rpms/$filename";
- urpm::util::move("$urpm->{cachedir}/partial/$filename", "$urpm->{cachedir}/rpms/$filename");
- -r "$urpm->{cachedir}/rpms/$filename" and $sources->{$i} = "$urpm->{cachedir}/rpms/$filename";
- }
- }
- unless ($sources->{$i}) {
- #- fallback to use other method for retrieving the file later.
- $urpm->{error}(N("unable to read rpm file [%s] from medium \"%s\"", $filepath, $medium->{name}));
- }
- }
- } else {
- $urpm->{error}(N("medium \"%s\" is not selected", $medium->{name}));
- }
- } else {
- #- we have a removable device that is not removable, well...
- $urpm->{error}(N("inconsistent medium \"%s\" marked removable but not really", $medium->{name}));
- }
- };
-
- foreach (0..$#$list) {
- values %{$list->[$_]} or next;
- my $medium = $urpm->{media}[$_];
- #- examine non removable device but that may be mounted.
- if ($medium->{removable}) {
- push @{$removables{$medium->{removable}} ||= []}, $_;
- } elsif (my $dir = file_from_local_url($medium->{url})) {
- -e $dir || $urpm->try_mounting($dir) or
- $urpm->{error}(N("unable to access medium \"%s\"", $medium->{name})), next;
- }
- }
- foreach my $device (keys %removables) {
- next if $device =~ m![^a-zA-Z0-9_./-]!; #- bad path
- #- Here we have only removable devices.
- #- If more than one media uses this device, we have to sort
- #- needed packages to copy the needed rpm files.
- if (@{$removables{$device}} > 1) {
- my @sorted_media = sort { values(%{$list->[$a]}) <=> values(%{$list->[$b]}) } @{$removables{$device}};
-
- #- check if a removable device is already mounted (and files present).
- if (my ($already_mounted_medium) = grep { !$check_notfound->($_) } @sorted_media) {
- @sorted_media = grep { $_ ne $already_mounted_medium } @sorted_media;
- unshift @sorted_media, $already_mounted_medium;
- }
-
- #- mount all except the biggest one.
- my $biggest = pop @sorted_media;
- foreach (@sorted_media) {
- $examine_removable_medium->($_, $device);
- }
- #- now mount the last one...
- $removables{$device} = [ $biggest ];
- }
-
- $examine_removable_medium->($removables{$device}[0], $device);
- }
-
- 1;
-}
-
-# TODO verify that files are downloaded from the right corresponding media
-sub download_packages_of_distant_media {
- my ($urpm, $list, $sources, $error_sources, %options) = @_;
-
- #- get back all ftp and http accessible rpm files into the local cache
- foreach my $n (0..$#$list) {
- my %distant_sources;
-
- #- ignore media that contain nothing for the current set of files
- values %{$list->[$n]} or next;
-
- #- examine all files to know what can be indexed on multiple media.
- while (my ($i, $url) = each %{$list->[$n]}) {
- #- the given URL is trusted, so the file can safely be ignored.
- defined $sources->{$i} and next;
- my $local_file = file_from_local_url($url);
- if ($local_file && $local_file =~ /\.rpm$/) {
- if (-r $local_file) {
- $sources->{$i} = $local_file;
- } else {
- $error_sources->{$i} = $local_file;
- }
- } elsif ($url =~ m!^([^:]*):/(.*/([^/]*\.rpm))\Z!) {
- $distant_sources{$i} = "$1:/$2"; #- will download now
- } else {
- $urpm->{error}(N("malformed URL: [%s]", $url));
- }
- }
-
- #- download files from the current medium.
- if (%distant_sources) {
- eval {
- $urpm->{log}(N("retrieving rpm files from medium \"%s\"...", $urpm->{media}[$n]{name}));
- sync_webfetch($urpm, $urpm->{media}[$n], [ values %distant_sources ],
- \%options, resume => $options{resume}, callback => $options{callback});
- $urpm->{log}(N("...retrieving done"));
- };
- $@ and $urpm->{error}(N("...retrieving failed: %s", $@));
- #- clean files that have not been downloaded, but keep in mind
- #- there have been problems downloading them at least once, this
- #- is necessary to keep track of failing downloads in order to
- #- present the error to the user.
- foreach my $i (keys %distant_sources) {
- my ($filename) = $distant_sources{$i} =~ m|/([^/]*\.rpm)$|;
- if ($filename && -s "$urpm->{cachedir}/partial/$filename" &&
- URPM::verify_rpm("$urpm->{cachedir}/partial/$filename", nosignatures => 1))
- {
- #- it seems the the file has been downloaded correctly and has been checked to be valid.
- unlink "$urpm->{cachedir}/rpms/$filename";
- urpm::util::move("$urpm->{cachedir}/partial/$filename", "$urpm->{cachedir}/rpms/$filename");
- -r "$urpm->{cachedir}/rpms/$filename" and $sources->{$i} = "$urpm->{cachedir}/rpms/$filename";
- }
- unless ($sources->{$i}) {
- $error_sources->{$i} = $distant_sources{$i};
- }
- }
- }
- }
-
- #- clean failed download which have succeeded.
- delete @$error_sources{keys %$sources};
-
- 1;
-}
-
-#- prepare transaction.
-sub prepare_transaction {
- my ($_urpm, $set, $list, $sources, $transaction_list, $transaction_sources) = @_;
-
- foreach my $id (@{$set->{upgrade}}) {
- foreach (0..$#$list) {
- exists $list->[$_]{$id} and $transaction_list->[$_]{$id} = $list->[$_]{$id};
- }
- exists $sources->{$id} and $transaction_sources->{$id} = $sources->{$id};
- }
-}
-
#- extract package that should be installed instead of upgraded,
+#- installing instead of upgrading is useful
+#- - for inst.list (cf flag disable_obsolete)
#- sources is a hash of id -> source rpm filename.
sub extract_packages_to_install {
my ($urpm, $sources, $state) = @_;
my %inst;
- my $rej = ref $state ? $state->{rejected} || {} : {};
foreach (keys %$sources) {
my $pkg = $urpm->{depslist}[$_] or next;
- $pkg->flag_disable_obsolete || !$pkg->flag_installed
- and !grep { exists $rej->{$_}{closure}{$pkg->fullname} } keys %$rej
- and $inst{$pkg->id} = delete $sources->{$pkg->id};
+ $pkg->flag_disable_obsolete
+ and $inst{$pkg->id} = delete $sources->{$pkg->id};
}
\%inst;
}
-# size of the installation progress bar
-my $progress_size = 45;
-eval {
- require Term::ReadKey;
- ($progress_size) = Term::ReadKey::GetTerminalSize();
- $progress_size -= 35;
- $progress_size < 5 and $progress_size = 5;
-};
-
-# install logger callback
-sub install_logger {
- my ($urpm, $type, $id, $subtype, $amount, $total) = @_;
- my $pkg = defined $id && $urpm->{depslist}[$id];
- my $total_pkg = $urpm->{nb_install};
- local $| = 1;
-
- if ($subtype eq 'start') {
- $urpm->{logger_progress} = 0;
- if ($type eq 'trans') {
- $urpm->{logger_id} ||= 0;
- $urpm->{logger_count} ||= 0;
- my $p = N("Preparing...");
- print $p, " " x (33 - length $p);
- } else {
- ++$urpm->{logger_id};
- my $pname = $pkg ? $pkg->name : '';
- ++$urpm->{logger_count} if $pname;
- my $cnt = $pname ? $urpm->{logger_count} : '-';
- $pname ||= N("[repackaging]");
- printf "%9s: %-22s", $cnt . "/" . $total_pkg, $pname;
- }
- } elsif ($subtype eq 'stop') {
- if ($urpm->{logger_progress} < $progress_size) {
- print '#' x ($progress_size - $urpm->{logger_progress}), "\n";
- $urpm->{logger_progress} = 0;
- }
- } elsif ($subtype eq 'progress') {
- my $new_progress = $total > 0 ? int($progress_size * $amount / $total) : $progress_size;
- if ($new_progress > $urpm->{logger_progress}) {
- print '#' x ($new_progress - $urpm->{logger_progress});
- $urpm->{logger_progress} = $new_progress;
- $urpm->{logger_progress} == $progress_size and print "\n";
- }
- }
-}
-
-#- install packages according to each hash (remove, install or upgrade).
-sub install {
- my ($urpm, $remove, $install, $upgrade, %options) = @_;
- my %readmes;
-
- #- allow process to be forked now.
- my $pid;
- my ($CHILD_RETURNS, $ERROR_OUTPUT);
- if ($options{fork}) {
- pipe($CHILD_RETURNS, $ERROR_OUTPUT);
- defined($pid = fork()) or die "Can't fork: $!\n";
- if ($pid) {
- # parent process
- close $ERROR_OUTPUT;
-
- $urpm->{log}(N("using process %d for executing transaction", $pid));
- #- now get all errors from the child and return them directly.
- my @l;
- local $_;
- while (<$CHILD_RETURNS>) {
- chomp;
- if (/^::logger_id:(\d*):(\d*)/) {
- $urpm->{logger_id} = $1;
- $urpm->{logger_count} = $2 if $2;
- } else {
- push @l, $_;
- }
- }
-
- close $CHILD_RETURNS;
- waitpid($pid, 0);
- #- take care of return code from transaction, an error should be returned directly.
- $? >> 8 and exit $? >> 8;
-
- return @l;
- } else {
- # child process
- close $CHILD_RETURNS;
- }
- }
- #- beware this can be a child process or the main process now...
-
- my $db = db_open_or_die($urpm, $urpm->{root}, !$options{test}); #- open in read/write mode unless testing installation.
-
- my $trans = $db->create_transaction($urpm->{root});
- if ($trans) {
- sys_log("transaction on %s (remove=%d, install=%d, upgrade=%d)", $urpm->{root} || '/', scalar(@{$remove || []}), scalar(values %$install), scalar(values %$upgrade));
- $urpm->{log}(N("created transaction for installing on %s (remove=%d, install=%d, upgrade=%d)", $urpm->{root} || '/',
- scalar(@{$remove || []}), scalar(values %$install), scalar(values %$upgrade)));
- } else {
- return N("unable to create transaction");
- }
-
- my ($update, @l) = 0;
- my @produced_deltas;
-
- foreach (@$remove) {
- if ($trans->remove($_)) {
- $urpm->{log}(N("removing package %s", $_));
- } else {
- $urpm->{error}(N("unable to remove package %s", $_));
- }
- }
- foreach my $mode ($install, $upgrade) {
- foreach (keys %$mode) {
- my $pkg = $urpm->{depslist}[$_];
- $pkg->update_header($mode->{$_});
- if ($pkg->payload_format eq 'drpm') { #- handle deltarpms
- my $true_rpm = urpm::sys::apply_delta_rpm($mode->{$_}, "$urpm->{cachedir}/rpms", $pkg);
- if ($true_rpm) {
- push @produced_deltas, ($mode->{$_} = $true_rpm); #- fix path
- } else {
- $urpm->{error}(N("unable to extract rpm from delta-rpm package %s", $mode->{$_}));
- }
- }
- if ($trans->add($pkg, update => $update,
- $options{excludepath} ? (excludepath => [ split /,/, $options{excludepath} ]) : ()
- )) {
- $urpm->{log}(N("adding package %s (id=%d, eid=%d, update=%d, file=%s)", scalar($pkg->fullname),
- $_, $pkg->id, $update, $mode->{$_}));
- } else {
- $urpm->{error}(N("unable to install package %s", $mode->{$_}));
- }
- }
- ++$update;
- }
- if (($options{nodeps} || !(@l = $trans->check(%options))) && ($options{noorder} || !(@l = $trans->order))) {
- my $fh;
- #- assume default value for some parameter.
- $options{delta} ||= 1000;
- $options{callback_open} ||= sub {
- my ($_data, $_type, $id) = @_;
- $fh = $urpm->open_safe('<', $install->{$id} || $upgrade->{$id});
- $fh ? fileno $fh : undef;
- };
- $options{callback_close} ||= sub {
- my ($urpm, undef, $pkgid) = @_;
- return unless defined $pkgid;
- my $pkg = $urpm->{depslist}[$pkgid];
- my $fullname = $pkg->fullname;
- my $trtype = (grep { /\Q$fullname\E/ } values %$install) ? 'install' : '(upgrade|update)';
- foreach ($pkg->files) { /\bREADME(\.$trtype)?\.urpmi$/ and $readmes{$_} = $fullname }
- close $fh if defined $fh;
- };
- if ($::verbose >= 0 && (scalar keys %$install || scalar keys %$upgrade)) {
- $options{callback_inst} ||= \&install_logger;
- $options{callback_trans} ||= \&install_logger;
- }
- @l = $trans->run($urpm, %options);
-
- #- don't clear cache if transaction failed. We might want to retry.
- if (@l == 0 && !$options{test} && $options{post_clean_cache}) {
- #- examine the local cache to delete packages which were part of this transaction
- foreach (keys %$install, keys %$upgrade) {
- my $pkg = $urpm->{depslist}[$_];
- unlink "$urpm->{cachedir}/rpms/" . $pkg->filename;
- }
- }
- }
- unlink @produced_deltas;
-
- #- now exit or return according to current status.
- if (defined $pid && !$pid) { #- child process
- print $ERROR_OUTPUT "::logger_id:$urpm->{logger_id}:$urpm->{logger_count}\n"; #- allow main urpmi to know transaction numbering...
- print $ERROR_OUTPUT "$_\n" foreach @l;
- close $ERROR_OUTPUT;
- #- keep safe exit now (with destructor call).
- exit 0;
- } else {
- #- when non-forking
- # !!! BUG: this part of the code is not called when forking !!!
- # !!! BUG: when forking %readmes is empty, since the child is not passing this information to its parent !!!
- if ($::verbose >= 0 && keys %readmes) {
- foreach (keys %readmes) {
- print "-" x 70, "\n", N("More information on package %s", $readmes{$_}), "\n";
- my $fh; open $fh, '<', $_ and do {
- print while <$fh>;
- close $fh;
- };
- print "-" x 70, "\n";
- }
- }
- return @l;
- }
-}
-
-#- install all files to node as remembered according to resolving done.
-sub parallel_install {
- my @para = @_;
- my ($urpm, $_remove, $_install, $_upgrade, %_options) = @para;
- $urpm->{parallel_handler}->parallel_install(@para);
-}
-
-#- find packages to remove.
-#- options:
-#- bundle
-#- callback_base
-#- callback_fuzzy
-#- callback_notfound
-#- force
-#- matches
-#- root
-#- test
-sub find_packages_to_remove {
- my ($urpm, $state, $l, %options) = @_;
-
- if ($urpm->{parallel_handler}) {
- #- invoke parallel finder.
- $urpm->{parallel_handler}->parallel_find_remove($urpm, $state, $l, %options, find_packages_to_remove => 1);
- } else {
- my $db = db_open_or_die($urpm, $options{root});
- my (@m, @notfound);
-
- if (!$options{matches}) {
- foreach (@$l) {
- my ($n, $found);
-
- #- check if name-version-release-architecture was given.
- if (($n) = /^(.*)-[^\-]*-[^\-]*\.[^\.\-]*$/) {
- $db->traverse_tag('name', [ $n ], sub {
- my ($p) = @_;
- $p->fullname eq $_ or return;
- $urpm->resolve_rejected($db, $state, $p, removed => 1, bundle => $options{bundle});
- push @m, scalar $p->fullname;
- $found = 1;
- });
- $found and next;
- }
-
- #- check if name-version-release was given.
- if (($n) = /^(.*)-[^\-]*-[^\-]*$/) {
- $db->traverse_tag('name', [ $n ], sub {
- my ($p) = @_;
- my ($name, $version, $release) = $p->fullname;
- "$name-$version-$release" eq $_ or return;
- $urpm->resolve_rejected($db, $state, $p, removed => 1, bundle => $options{bundle});
- push @m, scalar $p->fullname;
- $found = 1;
- });
- $found and next;
- }
-
- #- check if name-version was given.
- if (($n) = /^(.*)-[^\-]*$/) {
- $db->traverse_tag('name', [ $n ], sub {
- my ($p) = @_;
- my ($name, $version) = $p->fullname;
- "$name-$version" eq $_ or return;
- $urpm->resolve_rejected($db, $state, $p, removed => 1, bundle => $options{bundle});
- push @m, scalar $p->fullname;
- $found = 1;
- });
- $found and next;
- }
-
- #- check if only name was given.
- $db->traverse_tag('name', [ $_ ], sub {
- my ($p) = @_;
- $p->name eq $_ or return;
- $urpm->resolve_rejected($db, $state, $p, removed => 1, bundle => $options{bundle});
- push @m, scalar $p->fullname;
- $found = 1;
- });
- $found and next;
-
- push @notfound, $_;
- }
- if (!$options{force} && @notfound && @$l > 1) {
- $options{callback_notfound} && $options{callback_notfound}->($urpm, @notfound)
- or return ();
- }
- }
- if ($options{matches} || @notfound) {
- my $match = join "|", map { quotemeta } @$l;
- my $qmatch = qr/$match/;
-
- #- reset what has been already found.
- %$state = ();
- @m = ();
-
- #- search for packages that match, and perform closure again.
- $db->traverse(sub {
- my ($p) = @_;
- my $f = scalar $p->fullname;
- $f =~ $qmatch or return;
- $urpm->resolve_rejected($db, $state, $p, removed => 1, bundle => $options{bundle});
- push @m, $f;
- });
-
- if (!$options{force} && @notfound) {
- if (@m) {
- $options{callback_fuzzy} && $options{callback_fuzzy}->($urpm, @$l > 1 ? $match : $l->[0], @m)
- or return ();
- } else {
- $options{callback_notfound} && $options{callback_notfound}->($urpm, @notfound)
- or return ();
- }
- }
- }
-
- #- check if something needs to be removed.
- find_removed_from_basesystem($urpm, $db, $state, $options{callback_base})
- or return ();
- }
- grep { $state->{rejected}{$_}{removed} && !$state->{rejected}{$_}{obsoleted} } keys %{$state->{rejected}};
-}
-
-sub find_removed_from_basesystem {
- my ($urpm, $db, $state, $callback_base) = @_;
- if ($callback_base && %{$state->{rejected} || {}}) {
- my %basepackages;
- my @dont_remove = ('basesystem', split /,\s*/, $urpm->{global_config}{'prohibit-remove'});
- #- check if a package to be removed is a part of basesystem requires.
- $db->traverse_tag('whatprovides', \@dont_remove, sub {
- my ($p) = @_;
- $basepackages{$p->fullname} = 0;
- });
- foreach (grep { $state->{rejected}{$_}{removed} && !$state->{rejected}{$_}{obsoleted} } keys %{$state->{rejected}}) {
- exists $basepackages{$_} or next;
- ++$basepackages{$_};
- }
- if (grep { $_ } values %basepackages) {
- return $callback_base->($urpm, grep { $basepackages{$_} } keys %basepackages);
- }
- }
- return 1;
-}
-
-#- remove packages from node as remembered according to resolving done.
-sub parallel_remove {
- my ($urpm, $remove, %options) = @_;
- my $state = {};
- my $callback = sub { $urpm->{fatal}(1, "internal distributed remove fatal error") };
- $urpm->{parallel_handler}->parallel_find_remove($urpm, $state, $remove, %options,
- callback_notfound => undef,
- callback_fuzzy => $callback,
- callback_base => $callback,
- );
-}
-
-#- misc functions to help finding ask_unselect and ask_remove elements with their reasons translated.
-sub unselected_packages {
- my (undef, $state) = @_;
- grep { $state->{rejected}{$_}{backtrack} } keys %{$state->{rejected} || {}};
-}
-
-sub uniq { my %l; $l{$_} = 1 foreach @_; grep { delete $l{$_} } @_ }
-
-sub translate_why_unselected {
- my ($urpm, $state, @fullnames) = @_;
-
- join("\n", map { translate_why_unselected_one($urpm, $state, $_) } sort @fullnames);
-}
-
-sub translate_why_unselected_one {
- my ($urpm, $state, $fullname) = @_;
-
- my $rb = $state->{rejected}{$fullname}{backtrack};
- my @froms = keys %{$rb->{closure} || {}};
- my @unsatisfied = @{$rb->{unsatisfied} || []};
- my $s = join ", ", (
- (map { N("due to missing %s", $_) } @froms),
- (map { N("due to unsatisfied %s", $_) } uniq(map {
- #- XXX in theory we shouldn't need this, dependencies (and not ids) should
- #- already be present in @unsatisfied. But with biarch packages this is
- #- not always the case.
- /\D/ ? $_ : scalar($urpm->{depslist}[$_]->fullname);
- } @unsatisfied)),
- $rb->{promote} && !$rb->{keep} ? N("trying to promote %s", join(", ", @{$rb->{promote}})) : (),
- $rb->{keep} ? N("in order to keep %s", join(", ", @{$rb->{keep}})) : (),
- );
- $fullname . ($s ? " ($s)" : '');
-}
-
-sub removed_packages {
- my (undef, $state) = @_;
- grep {
- $state->{rejected}{$_}{removed} && !$state->{rejected}{$_}{obsoleted};
- } keys %{$state->{rejected} || {}};
-}
-
-sub translate_why_removed {
- my ($urpm, $state, @fullnames) = @_;
- join("\n", map { translate_why_removed_one($urpm, $state, $_) } sort @fullnames);
-}
-sub translate_why_removed_one {
- my ($urpm, $state, $fullname) = @_;
-
- my $closure = $state->{rejected}{$fullname}{closure};
- my ($from) = keys %$closure;
- my ($whyk) = keys %{$closure->{$from}};
- my $whyv = $closure->{$from}{$whyk};
- my $frompkg = $urpm->search($from, strict_fullname => 1);
- my $s = do {
- if ($whyk =~ /old_requested/) {
- N("in order to install %s", $frompkg ? scalar $frompkg->fullname : $from);
- } elsif ($whyk =~ /unsatisfied/) {
- join(",\n ", map {
- if (/([^\[\s]*)(?:\[\*\])?(?:\[|\s+)([^\]]*)\]?$/ && $2 ne '*') {
- N("due to unsatisfied %s", "$1 $2");
- } else {
- N("due to missing %s", $_);
- }
- } @$whyv);
- } elsif ($whyk =~ /conflicts/) {
- N("due to conflicts with %s", $whyv);
- } elsif ($whyk =~ /unrequested/) {
- N("unrequested");
- } else {
- undef;
- }
- };
- #- now insert the reason if available.
- $fullname . ($s ? "\n ($s)" : '');
-}
-
-sub check_sources_signatures {
- my ($urpm, $sources_install, $sources, %options) = @_;
- my ($medium, %invalid_sources);
- my $s = $sources_install;
-
- foreach my $id (keys %$sources_install, -1, keys %$sources) {
- if ($id == -1) { $s = $sources; next }
- my $filepath = $s->{$id};
- my $verif = URPM::verify_signature($filepath);
-
- if ($verif =~ /NOT OK/) {
- $verif =~ s/\n//g;
- $invalid_sources{$filepath} = N("Invalid signature (%s)", $verif);
- } else {
- unless ($medium && is_valid_medium($medium) &&
- $medium->{start} <= $id && $id <= $medium->{end})
- {
- $medium = undef;
- foreach (@{$urpm->{media}}) {
- is_valid_medium($_) && $_->{start} <= $id && $id <= $_->{end}
- and $medium = $_, last;
- }
- }
- #- no medium found for this rpm ?
- next if !$medium;
- #- check whether verify-rpm is specifically disabled for this medium
- next if defined $medium->{'verify-rpm'} && !$medium->{'verify-rpm'};
-
- my $key_ids = $medium->{'key-ids'} || $urpm->{options}{'key-ids'};
- #- check that the key ids of the medium match the key ids of the package.
- if ($key_ids) {
- my $valid_ids = 0;
- my $invalid_ids = 0;
-
- foreach my $key_id ($verif =~ /(?:key id \w{8}|#)(\w+)/gi) {
- if (grep { hex($_) == hex($key_id) } split /[,\s]+/, $key_ids) {
- ++$valid_ids;
- } else {
- ++$invalid_ids;
- }
- }
-
- if ($invalid_ids) {
- $invalid_sources{$filepath} = N("Invalid Key ID (%s)", $verif);
- } elsif (!$valid_ids) {
- $invalid_sources{$filepath} = N("Missing signature (%s)", $verif);
- }
- }
- #- invoke check signature callback.
- $options{callback} and $options{callback}->(
- $urpm, $filepath, %options,
- id => $id,
- verif => $verif,
- why => $invalid_sources{$filepath},
- );
- }
- }
+#- deprecated
+sub install { require urpm::install; &urpm::install::install }
- map { ($options{basename} ? basename($_) : $_) . ($options{translate} ? ": $invalid_sources{$_}" : "") }
- sort keys %invalid_sources;
-}
+#- deprecated
+sub parallel_remove { &urpm::parallel::remove }
#- get reason of update for packages to be updated
#- use all update medias if none given
@@ -3402,7 +233,7 @@ sub get_updates_description {
@update_medias or @update_medias = grep { !$_->{ignore} && $_->{update} } @{$urpm->{media}};
- foreach (map { cat_(statedir_descriptions($urpm, $_)), '%package dummy' } @update_medias) {
+ foreach (map { cat_utf8(urpm::media::statedir_descriptions($urpm, $_)), '%package dummy' } @update_medias) {
/^%package (.+)/ and do {
if (exists $cur->{importance} && $cur->{importance} ne "security" && $cur->{importance} ne "bugfix") {
$cur->{importance} = 'normal';
@@ -3423,63 +254,6 @@ sub get_updates_description {
\%update_descr;
}
-#- parse an MD5SUM file from a mirror
-sub get_md5sum {
- my ($md5sum_file, $f) = @_;
- my $basename = basename($f);
-
- my ($retrieved_md5sum) = map {
- my ($md5sum, $file) = m|(\S+)\s+(?:\./)?(\S+)|;
- $file && $file eq $basename ? $md5sum : @{[]};
- } cat_($md5sum_file);
-
- $retrieved_md5sum;
-}
-
-sub parse_md5sum {
- my ($urpm, $md5sum_file, $basename) = @_;
- $urpm->{log}(N("examining MD5SUM file"));
- my $retrieved_md5sum = get_md5sum($md5sum_file, $basename)
- or $urpm->{log}(N("warning: md5sum for %s unavailable in MD5SUM file", $basename));
- return $retrieved_md5sum;
-}
-
-sub recompute_local_md5sum {
- my ($urpm, $medium, $force) = @_;
- if ($force) {
- #- force downloading the file again, else why a force option has been defined ?
- delete $medium->{md5sum};
- } else {
- compute_local_md5sum($urpm, $medium) if !$medium->{md5sum};
- }
-}
-
-sub compute_local_md5sum {
- my ($urpm, $medium) = @_;
-
- $urpm->{log}(N("computing md5sum of existing source hdlist (or synthesis)"));
- my $f = statedir_hdlist_or_synthesis($urpm, $medium);
- if (-e $f) {
- $medium->{md5sum} = md5sum($f);
- }
-}
-
-sub syserror { my ($urpm, $msg, $info) = @_; $urpm->{error}("$msg [$info] [$!]") }
-
-sub open_safe {
- my ($urpm, $sense, $filename) = @_;
- open my $f, $sense, $filename
- or $urpm->syserror($sense eq '>' ? "Can't write file" : "Can't open file", $filename), return undef;
- return $f;
-}
-
-sub opendir_safe {
- my ($urpm, $dirname) = @_;
- opendir my $d, $dirname
- or $urpm->syserror("Can't open directory", $dirname), return undef;
- return $d;
-}
-
sub error_restricted ($) {
my ($urpm) = @_;
$urpm->{fatal}(2, N("This operation is forbidden while running in restricted mode"));