summaryrefslogtreecommitdiffstats
path: root/urpm.pm
diff options
context:
space:
mode:
authorPascal Rigaux <pixel@mandriva.com>2006-11-20 08:38:50 +0000
committerPascal Rigaux <pixel@mandriva.com>2006-11-20 08:38:50 +0000
commitc2fe4131c0d8139bda966785f063e1bbed43e8eb (patch)
treed8cfac00e007fc944843d6061338d60f4226541d /urpm.pm
parent5e8753af0162511e3644aee280d0ae386faa704d (diff)
downloadurpmi-c2fe4131c0d8139bda966785f063e1bbed43e8eb.tar
urpmi-c2fe4131c0d8139bda966785f063e1bbed43e8eb.tar.gz
urpmi-c2fe4131c0d8139bda966785f063e1bbed43e8eb.tar.bz2
urpmi-c2fe4131c0d8139bda966785f063e1bbed43e8eb.tar.xz
urpmi-c2fe4131c0d8139bda966785f063e1bbed43e8eb.zip
can't use --use-distrib when --synthesis is used
Diffstat (limited to 'urpm.pm')
-rw-r--r--urpm.pm785
1 files changed, 318 insertions, 467 deletions
diff --git a/urpm.pm b/urpm.pm
index b9cad615..dc2e56fc 100644
--- a/urpm.pm
+++ b/urpm.pm
@@ -73,14 +73,12 @@ sub sync_webfetch {
my %options = (
dir => "$urpm->{cachedir}/partial",
- limit_rate => $std_options->{limit_rate},
- compress => $std_options->{compress},
proxy => get_proxy($medium),
quiet => $std_options->{quiet}, #- often overridden in the caller, why??
$medium ? (media => $medium->{name}) : (),
%more_options,
);
- foreach my $cpt (qw(retry wget-options curl-options rsync-options prozilla-options)) {
+ foreach my $cpt (qw(compress limit_rate retry wget-options curl-options rsync-options prozilla-options)) {
$options{$cpt} = $urpm->{options}{$cpt} if defined $urpm->{options}{$cpt};
}
@@ -207,7 +205,6 @@ sub remove_passwords_and_write_private_netrc {
push @l, $u;
$_->{url} = sprintf('%s://%s@%s%s', $u->{proto}, $u->{login}, $u->{machine}, $u->{dir});
- warn "url is now $_->{url}\n";
}
{
my $fh = $urpm->open_safe('>', $urpm->{private_netrc}) or return;
@@ -222,30 +219,12 @@ sub remove_passwords_and_write_private_netrc {
sub recover_url_from_list {
my ($urpm, $medium) = @_;
- my %probe;
- if (-r statedir_list($urpm, $medium)) {
- if (my $listfile = $urpm->open_safe("<", statedir_list($urpm, $medium))) {
- local $_;
- while (<$listfile>) {
- #- /./ is end of url marker in list file (typically generated by a
- #- find . -name "*.rpm" > list
- #- for exportable list file.
- m|^(.*)/\./| and $probe{$1} = undef;
- m|^(.*)/[^/]*$| and $probe{$1} = undef;
- }
- }
- }
- foreach (sort { length($a) <=> length($b) } keys %probe) {
- if ($medium->{url}) {
- if ($medium->{url} ne substr($_, 0, length($medium->{url}))) {
- $medium->{ignore} or $urpm->{error}(N("inconsistent list file for \"%s\", medium ignored", $medium->{name}));
- $medium->{ignore} = 1;
- last;
- }
- } else {
- $medium->{url} = $_;
- $urpm->{modified} = 1; #- ensure urpmi.cfg is handled using only partially hidden url + netrc, since file list won't be generated anymore
- }
+ #- /./ is end of url marker in list file (typically generated by a
+ #- find . -name "*.rpm" > list
+ #- for exportable list file.
+ if (my @probe = map { m!^(.*)/\./! || m!^(.*)/[^/]*$! } cat_(statedir_list($urpm, $medium))) {
+ ($medium->{url}) = sort { length($a) <=> length($b) } @probe;
+ $urpm->{modified} = 1; #- ensure urpmi.cfg is handled using only partially hidden url + netrc, since file list won't be generated anymore
}
}
@@ -260,54 +239,33 @@ sub read_config {
my $config = urpm::cfg::load_config($urpm->{config})
or $urpm->{fatal}(6, $urpm::cfg::err);
- read_config_add_passwords($urpm, $config);
-
#- global options
- if ($config->{''}) {
- foreach my $opt (qw(
- allow-force
- allow-nodeps
- auto
- compress
- downloader
- default-media
- excludedocs
- excludepath
- fuzzy
- ignoresize
- keep
- key-ids
- limit-rate
- nopubkey
- norebuild
- post-clean
- pre-clean
- priority-upgrade
- prohibit-remove
- repackage
- resume
- retry
- split-length
- split-level
- strict-arch
- verify-rpm
- curl-options
- rsync-options
- wget-options
- prozilla-options
- )) {
- if (defined $config->{''}{$opt} && !exists $urpm->{options}{$opt}) {
- $urpm->{options}{$opt} = $config->{''}{$opt};
+ if (my $global = $config->{''}) {
+ foreach my $opt (keys %$global) {
+ if (defined $global->{$opt} && !exists $urpm->{options}{$opt}) {
+ $urpm->{options}{$opt} = $global->{$opt};
}
}
}
+
#- per-media options
+
+ read_config_add_passwords($urpm, $config);
+
foreach my $m (grep { $_ ne '' } keys %$config) {
my $medium = { name => $m };
foreach my $opt (@PER_MEDIA_OPT) {
defined $config->{$m}{$opt} and $medium->{$opt} = $config->{$m}{$opt};
}
- $urpm->probe_medium($medium, %options) and push @{$urpm->{media}}, $medium;
+
+ if (!$medium->{url}) {
+ #- recover the url the old deprecated way...
+ #- only useful for migration, new urpmi.cfg will use netrc
+ recover_url_from_list($urpm, $medium);
+ $medium->{url} or $urpm->{error}("unable to find url in list file $medium->{name}, medium ignored");
+ }
+
+ $urpm->add_existing_medium($medium, $options{nocheck_access});
}
eval { require urpm::ldap; urpm::ldap::load_ldap_media($urpm, %options) };
@@ -319,45 +277,6 @@ sub read_config {
$urpm->{media} = [ sort { $a->{priority} <=> $b->{priority} } @{$urpm->{media}} ];
- #- remember if an hdlist or list file is already used
- my %filelists;
- foreach my $medium (@{$urpm->{media}}) {
- foreach my $filetype (qw(hdlist list)) {
- $medium->{$filetype} or next;
-
- if ($filelists{$filetype}{$medium->{$filetype}}) {
- $medium->{ignore} = 1;
- $urpm->{error}(
- $filetype eq 'hdlist'
- ? N("medium \"%s\" trying to use an already used hdlist, medium ignored", $medium->{name})
- : N("medium \"%s\" trying to use an already used list, medium ignored", $medium->{name})
- );
- } else {
- $filelists{$filetype}{$medium->{$filetype}} = 1;
- }
- }
- }
-
- #- check the presence of hdlist and list files if necessary.
- if (!$options{nocheck_access}) {
- foreach my $medium (@{$urpm->{media}}) {
- $medium->{ignore} and next;
-
- if (-r statedir_hdlist($urpm, $medium)) {}
- elsif ($medium->{synthesis} && -r statedir_synthesis($urpm, $medium)) {}
- else {
- $medium->{ignore} = 1;
- $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
- }
- if ($medium->{list} && -r statedir_list($urpm, $medium)) {}
- elsif ($medium->{url}) {}
- else {
- $medium->{ignore} = 1;
- $urpm->{error}(N("unable to access list file of \"%s\", medium ignored", $medium->{name}));
- }
- }
- }
-
#- read MD5 sums (usually not in urpmi.cfg but in a separate file)
foreach (@{$urpm->{media}}) {
if (my $md5sum = get_md5sum("$urpm->{statedir}/MD5SUM", statedir_hdlist_or_synthesis($urpm, $_))) {
@@ -369,14 +288,9 @@ sub read_config {
$urpm->{global_config} = $config->{''};
}
-#- probe medium to be used, take old medium into account too.
-sub probe_medium {
- my ($urpm, $medium, %options) = @_;
-
- if (name2medium($urpm, $medium->{name})) {
- $urpm->{error}(N("trying to override existing medium \"%s\", skipping", $medium->{name}));
- return;
- }
+#- if invalid, set {ignore}
+sub check_existing_medium {
+ my ($urpm, $medium, $b_nocheck_access) = @_;
if ($medium->{virtual}) {
#- a virtual medium needs to have an url available without using a list file.
@@ -391,13 +305,19 @@ sub probe_medium {
$medium->{name}));
}
} else {
- unless ($medium->{ignore} || $medium->{hdlist}) {
+ if ($medium->{hdlist}) {
+ #- is this check really needed? keeping just in case
+ $medium->{hdlist} ne 'list' && $medium->{hdlist} ne 'pubkey' or
+ $medium->{ignore} = 1,
+ $urpm->{error}(N("invalid hdlist name"));
+ }
+ if (!$medium->{ignore} && !$medium->{hdlist}) {
$medium->{hdlist} = "hdlist.$medium->{name}.cz";
-e statedir_hdlist($urpm, $medium) or
$medium->{ignore} = 1,
$urpm->{error}(N("unable to find hdlist file for \"%s\", medium ignored", $medium->{name}));
}
- unless ($medium->{ignore} || $medium->{list}) {
+ if (!$medium->{ignore} && !$medium->{list}) {
unless (defined $medium->{url}) {
$medium->{list} = "list.$medium->{name}";
unless (-e statedir_list($urpm, $medium)) {
@@ -406,19 +326,48 @@ sub probe_medium {
}
}
}
+ }
- #- there is a little more to do at this point as url is not known, inspect directly list file for it.
- unless ($medium->{url}) {
- recover_url_from_list($urpm, $medium);
- unless ($options{nocheck_access}) {
- unless ($medium->{url}) {
- $medium->{ignore} or $urpm->{error}(N("unable to inspect list file for \"%s\", medium ignored", $medium->{name}));
- $medium->{ignore} = 1;
- }
- }
+ #- check the presence of hdlist and list files if necessary.
+ if (!$b_nocheck_access && !$medium->{ignore}) {
+ if ($medium->{virtual} && -r hdlist_or_synthesis_for_virtual_medium($medium)) {}
+ elsif (-r statedir_hdlist($urpm, $medium)) {}
+ elsif ($medium->{synthesis} && -r statedir_synthesis($urpm, $medium)) {}
+ else {
+ $medium->{ignore} = 1;
+ $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
+ }
+ if ($medium->{list} && -r statedir_list($urpm, $medium)) {}
+ elsif ($medium->{url}) {}
+ else {
+ $medium->{ignore} = 1;
+ $urpm->{error}(N("unable to access list file of \"%s\", medium ignored", $medium->{name}));
+ }
+ }
+
+ foreach my $field ('hdlist', 'list') {
+ $medium->{$field} or next;
+ if (grep { $_->{$field} eq $medium->{$field} } @{$urpm->{media}}) {
+ $medium->{ignore} = 1;
+ $urpm->{error}(
+ $field eq 'hdlist'
+ ? N("medium \"%s\" trying to use an already used hdlist, medium ignored", $medium->{name})
+ : N("medium \"%s\" trying to use an already used list, medium ignored", $medium->{name}));
}
}
+}
+
+#- probe medium to be used, take old medium into account too.
+sub add_existing_medium {
+ my ($urpm, $medium, $b_nocheck_access) = @_;
+
+ if (name2medium($urpm, $medium->{name})) {
+ $urpm->{error}(N("trying to override existing medium \"%s\", skipping", $medium->{name}));
+ return;
+ }
+
+ check_existing_medium($urpm, $medium, $b_nocheck_access);
#- probe removable device.
$urpm->probe_removable_device($medium);
@@ -426,7 +375,7 @@ sub probe_medium {
#- clear URLs for trailing /es.
$medium->{url} and $medium->{url} =~ s|(.*?)/*$|$1|;
- $medium;
+ push @{$urpm->{media}}, $medium;
}
#- returns the removable device name if it corresponds to an iso image, '' otherwise
@@ -622,14 +571,14 @@ sub _configure_parallel {
#- read urpmi.cfg file as well as necessary synthesis files
#- options :
-#- callback
-#- call_back_only_once
+#- callback (urpmf)
#- excludemedia
#- hdlist
#- media
+#- need_hdlist (for urpmf: to be able to have info not available in synthesis)
#- nodepslist
#- noinstalling
-#- noskipping
+#- no_skiplist (urpmf & urpmq)
#- parallel
#- root
#- searchmedia
@@ -662,8 +611,8 @@ sub configure {
if ($options{synthesis}) {
if ($options{synthesis} ne 'none') {
#- synthesis take precedence over media, update options.
- $options{media} || $options{excludemedia} || $options{sortmedia} || $options{update} || $options{parallel} and
- $urpm->{fatal}(1, N("--synthesis cannot be used with --media, --excludemedia, --sortmedia, --update or --parallel"));
+ $options{media} || $options{excludemedia} || $options{sortmedia} || $options{update} || $options{usedistrib} || $options{parallel} and
+ $urpm->{fatal}(1, N("--synthesis cannot be used with --media, --excludemedia, --sortmedia, --update, --use-distrib or --parallel"));
$urpm->parse_synthesis($options{synthesis});
#- synthesis disables the split of transaction (too risky and not useful).
$urpm->{options}{'split-length'} = 0;
@@ -718,13 +667,13 @@ sub configure {
if (file_from_file_url($_->{url})) {
if ($_->{synthesis}) {
_parse_synthesis($urpm, $_,
- hdlist_or_synthesis_for_virtual_medium($_), callback => $options{callback});
+ hdlist_or_synthesis_for_virtual_medium($_), $options{callback});
} else {
#- we'll need a second pass
defined $second_pass or $second_pass = 1;
_parse_hdlist($urpm, $_,
hdlist_or_synthesis_for_virtual_medium($_),
- callback => $options{call_back_only_once} && $second_pass ? undef : $options{callback},
+ $second_pass ? undef : $options{callback},
);
}
} else {
@@ -732,16 +681,13 @@ sub configure {
$_->{ignore} = 1;
}
} else {
- if ($options{hdlist} && file_size(statedir_hdlist($urpm, $_)) > 32) {
- _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_),
- callback => $options{callback},
- );
+ if ($options{need_hdlist} && file_size(statedir_hdlist($urpm, $_)) > 32) {
+ _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_), $options{callback});
} else {
if (!_parse_synthesis($urpm, $_,
statedir_synthesis($urpm, $_),
- callback => $options{callback})) {
- _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_),
- callback => $options{callback});
+ $options{callback})) {
+ _parse_hdlist($urpm, $_, statedir_hdlist($urpm, $_), $options{callback});
}
}
}
@@ -766,7 +712,7 @@ sub configure {
}
}
#- determine package to withdraw (from skip.list file) only if something should be withdrawn.
- unless ($options{noskipping}) {
+ unless ($options{no_skiplist}) {
my %uniq;
$urpm->compute_flags(
get_packages_list($urpm->{skiplist}, $options{skip}),
@@ -862,20 +808,7 @@ sub add_medium {
push @{$urpm->{media}}, $medium;
$urpm->{log}(N("added medium %s", $name));
-
- #- we need to reload the config, since some string substitutions may have occured
- unless ($options{no_reload_config}) {
- $urpm->write_config;
- delete $urpm->{media};
- $urpm->read_config(nocheck_access => 1);
-
- #- need getting the fresh datastructure after read_config
- $medium = name2medium($urpm, $name); #- need getting the fresh datastructure after read_config
-
- #- Remember that the database has been modified and base files need to be updated.
- $medium->{modified} = 1;
- $urpm->{md5sum_modified} = 1;
- }
+ $urpm->{modified} = 1;
$options{nolock} or $urpm->unlock_urpmi_db;
$name;
@@ -884,7 +817,6 @@ sub add_medium {
#- add distribution media, according to url given.
#- returns the list of names of added media.
#- options :
-#- - limit_rate, compress : for downloading files
#- - initial_number : when adding several numbered media, start with this number
#- - probe_with : if eq 'synthesis', use synthesis instead of hdlists
#- - ask_media : callback to know whether each media should be added
@@ -963,7 +895,6 @@ sub add_distrib_media {
$distribconf->getpath($media, 'path'),
) . '/' . $distribconf->getpath($media, $options{probe_with} eq 'synthesis' ? 'synthesis' : 'hdlist'),
index_name => $name ? undef : 0,
- no_reload_config => 1, #- no need to reload config each time, since we don't update the media
%options,
# the following override %options
update => $is_update_media ? 1 : undef,
@@ -1048,7 +979,7 @@ sub remove_media {
#- return list of synthesis or hdlist reference to probe.
sub _probe_with_try_list {
my ($suffix, $probe_with) = @_;
- my @probe = (
+ my @probe_synthesis = (
"media_info/synthesis.hdlist.cz",
"../media_info/synthesis.hdlist_$suffix.cz",
"synthesis.hdlist.cz",
@@ -1058,12 +989,9 @@ sub _probe_with_try_list {
"../media_info/hdlist_$suffix.cz",
"hdlist.cz",
);
- if ($probe_with =~ /synthesis/) {
- push @probe, @probe_hdlist;
- } else {
- unshift @probe, @probe_hdlist;
- }
- @probe;
+ $probe_with =~ /synthesis/
+ ? (@probe_synthesis, @probe_hdlist)
+ : (@probe_hdlist, @probe_synthesis);
}
sub may_reconfig_urpmi {
@@ -1184,7 +1112,7 @@ sub _update_media__when_not_modified {
}
}
-sub _update_media__virtual {
+sub _parse_hdlist_or_synthesis__virtual {
my ($urpm, $medium, $with_hdlist_dir) = @_;
if ($medium->{with_hdlist} && -e $with_hdlist_dir) {
@@ -1211,6 +1139,7 @@ sub _update_media__virtual {
}
}
+#- names.<media_name> is used by external progs (namely for bash-completion)
sub generate_media_names {
my ($urpm) = @_;
@@ -1282,17 +1211,50 @@ sub _read_existing_synthesis_and_hdlist {
}
sub _parse_hdlist {
- my ($urpm, $medium, $hdlist_file, %args) = @_;
+ my ($urpm, $medium, $hdlist_file, $o_callback) = @_;
$urpm->{log}(N("examining hdlist file [%s]", $hdlist_file));
- ($medium->{start}, $medium->{end}) = $urpm->parse_hdlist($hdlist_file, packing => 1, %args);
+ ($medium->{start}, $medium->{end}) =
+ $urpm->parse_hdlist($hdlist_file, packing => 1, $o_callback ? (callback => $o_callback) : @{[]});
}
sub _parse_synthesis {
- my ($urpm, $medium, $synthesis_file, %args) = @_;
+ my ($urpm, $medium, $synthesis_file, $o_callback) = @_;
$urpm->{log}(N("examining synthesis file [%s]", $synthesis_file));
- ($medium->{start}, $medium->{end}) = $urpm->parse_synthesis($synthesis_file, %args);
+ ($medium->{start}, $medium->{end}) =
+ $urpm->parse_synthesis($synthesis_file, $o_callback ? (callback => $o_callback) : @{[]});
+}
+
+sub _build_hdlist_using_rpm_headers {
+ my ($urpm, $medium) = @_;
+
+ $urpm->{log}(N("building hdlist [%s]", statedir_hdlist($urpm, $medium)));
+ #- finish building operation of hdlist.
+ $urpm->build_hdlist(start => $medium->{start},
+ end => $medium->{end},
+ dir => "$urpm->{cachedir}/headers",
+ hdlist => statedir_hdlist($urpm, $medium),
+ );
+}
+
+sub _build_synthesis {
+ my ($urpm, $medium) = @_;
+
+ eval { $urpm->build_synthesis(
+ start => $medium->{start},
+ end => $medium->{end},
+ synthesis => statedir_synthesis($urpm, $medium),
+ ) };
+ if ($@) {
+ $urpm->{error}(N("Unable to build synthesis file for medium \"%s\". Your hdlist file may be corrupted.", $medium->{name}));
+ $urpm->{error}($@);
+ unlink statedir_synthesis($urpm, $medium);
+ } else {
+ $urpm->{log}(N("built hdlist synthesis file for medium \"%s\"", $medium->{name}));
+ }
+ #- keep in mind we have a modified database, sure at this point.
+ $urpm->{md5sum_modified} = 1;
}
sub is_valid_medium {
@@ -1318,7 +1280,19 @@ sub db_open_or_die {
$db;
}
-sub _update_media__sync_file {
+sub _get_list_or_pubkey__local {
+ my ($urpm, $medium, $with_hdlist_dir, $name) = @_;
+
+ my $path = reduce_pathname("$with_hdlist_dir/../$name" . _hdlist_suffix($medium));
+ -e $path or $path = file_from_local_url($medium->{url}) . "/$name";
+ if (-e $path) {
+ copy_and_own($path, "$urpm->{cachedir}/partial/$name")
+ or $urpm->{error}(N("...copying failed")), return;
+ }
+ 1;
+}
+
+sub _get_list_or_pubkey__remote {
my ($urpm, $medium, $name, $options) = @_;
my $found;
@@ -1337,22 +1311,6 @@ sub _update_media__sync_file {
}
}
-sub recursive_find_rpm_files {
- my ($dir) = @_;
-
- my %f;
- local $_; #- help perl_checker not warning "undeclared $_" in wanted callback below
- File::Find::find(
- {
- wanted => sub { -f $_ && /\.rpm$/ and $f{"$File::Find::dir/$_"} = 1 },
- follow_skip => 2,
- follow_fast => 1,
- },
- $dir,
- );
- keys %f;
-}
-
sub clean_dir {
my ($dir) = @_;
@@ -1361,7 +1319,7 @@ sub clean_dir {
mkdir $dir, 0755;
}
-sub _update_medium__get_descriptions_local {
+sub get_descriptions_local {
my ($urpm, $medium) = @_;
unlink statedir_descriptions($urpm, $medium);
@@ -1379,7 +1337,7 @@ sub _update_medium__get_descriptions_local {
$medium->{ignore} = 1;
}
}
-sub _update_medium__get_descriptions_remote {
+sub get_descriptions_remote {
my ($urpm, $medium, $options) = @_;
unlink "$urpm->{cachedir}/partial/descriptions";
@@ -1397,9 +1355,31 @@ sub _update_medium__get_descriptions_remote {
urpm::util::move("$urpm->{cachedir}/partial/descriptions", statedir_descriptions($urpm, $medium));
}
}
+sub get_hdlist_or_synthesis__local {
+ my ($urpm, $medium, $with_hdlist_dir, $callback) = @_;
-sub _update_medium_first_pass__local {
- my ($urpm, $medium, $second_pass, $clean_cache, $retrieved_md5sum, $rpm_files, $options) = @_;
+ unlink cachedir_hdlist($urpm, $medium);
+ $urpm->{log}(N("copying source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
+ $callback and $callback->('copy', $medium->{name});
+ if (copy_and_own($with_hdlist_dir, cachedir_hdlist($urpm, $medium))) {
+ $callback and $callback->('done', $medium->{name});
+ $urpm->{log}(N("...copying done"));
+ if (file_size(cachedir_hdlist($urpm, $medium)) <= 32) {
+ $urpm->{error}(N("copy of [%s] failed (file is suspiciously small)", cachedir_hdlist($urpm, $medium)));
+ 0;
+ } else {
+ 1;
+ }
+ } else {
+ $callback and $callback->('failed', $medium->{name});
+ #- force error, reported afterwards
+ unlink cachedir_hdlist($urpm, $medium);
+ 0;
+ }
+}
+
+sub _update_medium__parse_if_unmodified__or_get_files__local {
+ my ($urpm, $medium, $second_pass, $clean_cache, $rpm_files, $options) = @_;
my $dir = file_from_local_url($medium->{url});
@@ -1411,7 +1391,7 @@ sub _update_medium_first_pass__local {
#- by mounting some other directory. Try to figure it out and mount
#- everything that might be necessary.
-d $dir or $urpm->try_mounting(
- $options->{force} < 2 && ($options->{probe_with} || $medium->{with_hdlist})
+ !$options->{force_building_hdlist} && ($options->{probe_with} || $medium->{with_hdlist})
? $with_hdlist_dir : $dir,
#- in case of an iso image, pass its name
is_iso($medium->{removable}) && $medium->{removable},
@@ -1422,9 +1402,12 @@ this could happen if you mounted manually the directory when creating the medium
#- it is already defined (and valid).
if ($options->{probe_with} && (!$medium->{with_hdlist} || ! -e "$dir/$medium->{with_hdlist}")) {
foreach (_probe_with_try_list(_guess_hdlist_suffix($dir), $options->{probe_with})) {
+ -e "$dir/$_" or next;
if (file_size("$dir/$_") > 32) {
$medium->{with_hdlist} = $_;
last;
+ } else {
+ $urpm->{error}(N("invalid hdlist file %s for medium \"%s\"", "$dir/$_", $medium->{name}));
}
}
#- redo...
@@ -1434,10 +1417,10 @@ this could happen if you mounted manually the directory when creating the medium
if ($medium->{virtual}) {
#- syncing a virtual medium is very simple, just try to read the file in order to
#- determine its type, once a with_hdlist has been found (but is mandatory).
- _update_media__virtual($urpm, $medium, $with_hdlist_dir);
+ _parse_hdlist_or_synthesis__virtual($urpm, $medium, $with_hdlist_dir);
}
- _update_medium__get_descriptions_local($urpm, $medium);
+ get_descriptions_local($urpm, $medium);
#- examine if a distant MD5SUM file is available.
#- this will only be done if $with_hdlist is not empty in order to use
@@ -1447,42 +1430,29 @@ this could happen if you mounted manually the directory when creating the medium
#- to be checked for being valid, nothing can be deduced if no MD5SUM
#- file is present.
- my $error;
+ my ($retrieved_md5sum, $error);
unless ($medium->{virtual}) {
if ($medium->{with_hdlist}) {
if (!$options->{nomd5sum} && file_size(reduce_pathname("$with_hdlist_dir/../MD5SUM")) > 32) {
recompute_local_md5sum($urpm, $medium, $options->{force});
if ($medium->{md5sum}) {
- $$retrieved_md5sum = parse_md5sum($urpm, reduce_pathname("$with_hdlist_dir/../MD5SUM"), basename($with_hdlist_dir));
- _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $$retrieved_md5sum)
+ $retrieved_md5sum = parse_md5sum($urpm, reduce_pathname("$with_hdlist_dir/../MD5SUM"), basename($with_hdlist_dir));
+ _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $retrieved_md5sum)
and return 'unmodified';
}
}
#- if the source hdlist is present and we are not forcing using rpm files
- if ($options->{force} < 2 && -e $with_hdlist_dir) {
- unlink cachedir_hdlist($urpm, $medium);
- $urpm->{log}(N("copying source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
- $options->{callback} and $options->{callback}('copy', $medium->{name});
- if (copy_and_own($with_hdlist_dir, cachedir_hdlist($urpm, $medium))) {
- $options->{callback} and $options->{callback}('done', $medium->{name});
- $urpm->{log}(N("...copying done"));
- } else {
- $options->{callback} and $options->{callback}('failed', $medium->{name});
- #- force error, reported afterwards
- unlink cachedir_hdlist($urpm, $medium);
- }
+ if (!$options->{force_building_hdlist} && -e $with_hdlist_dir) {
+ get_hdlist_or_synthesis__local($urpm, $medium, $with_hdlist_dir, $options->{callback})
+ or $error = 1;
}
- file_size(cachedir_hdlist($urpm, $medium)) > 32 or
- $error = 1, $urpm->{error}(N("copy of [%s] failed (file is suspiciously small)",
- cachedir_hdlist($urpm, $medium)));
-
#- keep checking md5sum of file just copied ! (especially on nfs or removable device).
- if (!$error && $$retrieved_md5sum) {
+ if (!$error && $retrieved_md5sum) {
$urpm->{log}(N("computing md5sum of copied source hdlist (or synthesis)"));
- md5sum(cachedir_hdlist($urpm, $medium)) eq $$retrieved_md5sum or
+ md5sum(cachedir_hdlist($urpm, $medium)) eq $retrieved_md5sum or
$error = 1, $urpm->{error}(N("copy of [%s] failed (md5sum mismatch)", $with_hdlist_dir));
}
@@ -1491,51 +1461,49 @@ this could happen if you mounted manually the directory when creating the medium
_read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium, $medium->{hdlist})
and return 'unmodified';
}
- } else {
- $error = 1;
- }
-
- #- if copying hdlist has failed, try to build it directly.
- if ($error) {
- if ($urpm->{options}{norebuild}) {
- $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
- $medium->{ignore} = 1;
- } else {
- $options->{force} < 2 and $options->{force} = 2;
- #- clear error state now.
- $error = undef;
- }
- }
- if ($options->{force} < 2) {
- #- examine if a local list file is available (always probed according to with_hdlist)
- #- and check hdlist wasn't named very strangely...
- if ($medium->{hdlist} ne 'list') {
- my $local_list = 'list' . _hdlist_suffix($medium);
- my $path_list = reduce_pathname("$with_hdlist_dir/../$local_list");
- -e $path_list or $path_list = "$dir/list";
- if (-e $path_list) {
- copy_and_own($path_list, "$urpm->{cachedir}/partial/list")
- or do { $urpm->{error}(N("...copying failed")); $error = 1 };
+ #- if copying hdlist has failed, try to build it directly.
+ if ($error) {
+ if ($urpm->{options}{'build-hdlist-on-error'}) {
+ $options->{force_building_hdlist} = 1;
+ #- clear error state now.
+ $error = undef;
+ } else {
+ $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name}));
+ $medium->{ignore} = 1;
}
}
} else {
- push @$rpm_files, recursive_find_rpm_files($dir);
+ #- no available hdlist/synthesis, try to build it from rpms
+ $options->{force_building_hdlist} = 1;
+ }
+
+ if (!$error && $options->{force_building_hdlist}) {
+ push @$rpm_files, glob("$dir/*.rpm");
#- check files contains something good!
if (@$rpm_files > 0) {
#- we need to rebuild from rpm files the hdlist.
+
+ $urpm->{log}(N("reading rpm files from [%s]", $dir));
+ my @unresolved_before = grep {
+ ! defined $urpm->{provides}{$_};
+ } keys %{$urpm->{provides} || {}};
+ $medium->{start} = @{$urpm->{depslist}};
+
eval {
- $urpm->{log}(N("reading rpm files from [%s]", $dir));
- my @unresolved_before = grep {
- ! defined $urpm->{provides}{$_};
- } keys %{$urpm->{provides} || {}};
- $medium->{start} = @{$urpm->{depslist}};
$medium->{headers} = [ $urpm->parse_rpms_build_headers(
dir => "$urpm->{cachedir}/headers",
rpms => $rpm_files,
clean => $$clean_cache,
+ packing => 1,
) ];
+ };
+ if ($@) {
+ $error = 1;
+ $urpm->{error}(N("unable to read rpm files from [%s]: %s", $dir, $@));
+ delete $medium->{headers}; #- do not propagate these.
+ } else {
$medium->{end} = $#{$urpm->{depslist}};
if ($medium->{start} > $medium->{end}) {
#- an error occured (provided there are files in input.)
@@ -1550,10 +1518,8 @@ this could happen if you mounted manually the directory when creating the medium
} keys %{$urpm->{provides} || {}};
@unresolved_before == @unresolved or $$second_pass = 1;
}
- };
- $@ and $error = 1, $urpm->{error}(N("unable to read rpm files from [%s]: %s", $dir, $@));
- $error and delete $medium->{headers}; #- do not propagate these.
- $error or delete $medium->{synthesis}; #- when building hdlist by ourself, drop synthesis property.
+ delete $medium->{synthesis}; #- when building hdlist by ourself, drop synthesis property.
+ }
} else {
$error = 1;
$urpm->{error}(N("no rpm files found from [%s]", $dir));
@@ -1563,23 +1529,18 @@ this could happen if you mounted manually the directory when creating the medium
}
#- examine if a local pubkey file is available.
- if (!$options->{nopubkey} && $medium->{hdlist} ne 'pubkey' && !$medium->{'key-ids'}) {
- my $path_pubkey = reduce_pathname("$with_hdlist_dir/../pubkey" . _hdlist_suffix($medium));
- -e $path_pubkey or $path_pubkey = "$dir/pubkey";
- if ($path_pubkey) {
- copy_and_own($path_pubkey, "$urpm->{cachedir}/partial/pubkey")
- or do { $urpm->{error}(N("...copying failed")) };
- }
+ if (!$options->{nopubkey} && !$medium->{'key-ids'}) {
+ _get_list_or_pubkey__local($urpm, $medium, $with_hdlist_dir, 'pubkey');
}
- $error;
+ ($error, $retrieved_md5sum);
}
-sub _update_medium_first_pass__remote {
- my ($urpm, $medium, $retrieved_md5sum, $options) = @_;
- my ($error, $basename);
+sub _update_medium__parse_if_unmodified__or_get_files__remote {
+ my ($urpm, $medium, $options) = @_;
+ my ($error, $retrieved_md5sum, $basename);
- _update_medium__get_descriptions_remote($urpm, $medium, $options);
+ get_descriptions_remote($urpm, $medium, $options);
#- examine if a distant MD5SUM file is available.
#- this will only be done if $with_hdlist is not empty in order to use
@@ -1598,8 +1559,8 @@ sub _update_medium_first_pass__remote {
$options, quiet => 1) && file_size("$urpm->{cachedir}/partial/MD5SUM") > 32) {
recompute_local_md5sum($urpm, $medium, $options->{force} >= 2);
if ($medium->{md5sum}) {
- $$retrieved_md5sum = parse_md5sum($urpm, "$urpm->{cachedir}/partial/MD5SUM", $basename);
- _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $$retrieved_md5sum)
+ $retrieved_md5sum = parse_md5sum($urpm, "$urpm->{cachedir}/partial/MD5SUM", $basename);
+ _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $retrieved_md5sum)
and return 'unmodified';
}
} else {
@@ -1654,9 +1615,9 @@ sub _update_medium_first_pass__remote {
}
#- check downloaded file has right signature.
- if (file_size("$urpm->{cachedir}/partial/$basename") > 32 && $$retrieved_md5sum) {
+ if (file_size("$urpm->{cachedir}/partial/$basename") > 32 && $retrieved_md5sum) {
$urpm->{log}(N("computing md5sum of retrieved source hdlist (or synthesis)"));
- unless (md5sum("$urpm->{cachedir}/partial/$basename") eq $$retrieved_md5sum) {
+ unless (md5sum("$urpm->{cachedir}/partial/$basename") eq $retrieved_md5sum) {
$urpm->{error}(N("...retrieving failed: md5sum mismatch"));
unlink "$urpm->{cachedir}/partial/$basename";
}
@@ -1673,23 +1634,16 @@ sub _update_medium_first_pass__remote {
#- the files are different, update local copy.
rename("$urpm->{cachedir}/partial/$basename", cachedir_hdlist($urpm, $medium));
- #- retrieval of hdlist or synthesis has been successful,
- #- check whether a list file is available.
- #- and check hdlist wasn't named very strangely...
- if ($medium->{hdlist} ne 'list') {
- _update_media__sync_file($urpm, $medium, 'list', $options);
- }
-
#- retrieve pubkey file.
- if (!$options->{nopubkey} && $medium->{hdlist} ne 'pubkey' && !$medium->{'key-ids'}) {
- _update_media__sync_file($urpm, $medium, 'pubkey', $options);
+ if (!$options->{nopubkey} && !$medium->{'key-ids'}) {
+ _get_list_or_pubkey__remote($urpm, $medium, 'pubkey', $options);
}
} else {
$error = 1;
$options->{callback} and $options->{callback}('failed', $medium->{name});
$urpm->{error}(N("retrieval of source hdlist (or synthesis) failed"));
}
- $error;
+ ($error, $retrieved_md5sum);
}
sub _read_cachedir_pubkey {
@@ -1717,24 +1671,34 @@ sub _read_cachedir_pubkey {
}
}
-sub _update_medium_first_pass {
- my ($urpm, $medium, $second_pass, $clean_cache, %options) = @_;
+sub _write_rpm_list_if_needed {
+ my ($urpm, $medium, $rpm_list) = @_;
- $medium->{ignore} and return;
+ if (@$rpm_list) {
+ if (!$medium->{list}) {
+ $urpm->{error}("{list} is not set, please report bug");
+ return;
+ }
+ #- write list file.
+ $urpm->{log}(N("writing list file for medium \"%s\"", $medium->{name}));
+ my $listfh = $urpm->open_safe('>', cachedir_list($urpm, $medium)) or return;
+ print $listfh basename($_), "\n" foreach @$rpm_list;
+ } else {
+ #- the flag is no longer necessary.
+ if ($medium->{list}) {
+ unlink statedir_list($urpm, $medium);
+ delete $medium->{list};
+ }
+ }
+ 1;
+}
- $options{forcekey} and delete $medium->{'key-ids'};
+sub _update_medium_first_pass {
+ my ($urpm, $medium, $second_pass, $clean_cache, %options) = @_;
#- we should create the associated synthesis file if it does not already exist...
file_size(statedir_synthesis($urpm, $medium)) > 32
- or $medium->{modified_synthesis} = 1;
-
- if ($medium->{static}) {
- #- don't ever update static media
- $medium->{modified} = 0;
- } elsif ($options{all}) {
- #- if we're rebuilding all media, mark them as modified (except removable ones)
- $medium->{modified} ||= $medium->{url} !~ m!^removable!;
- }
+ or $medium->{must_build_synthesis} = 1;
unless ($medium->{modified}) {
#- the medium is not modified, but to compute dependencies,
@@ -1757,18 +1721,14 @@ sub _update_medium_first_pass {
#- list of rpm files for this medium, only available for local medium where
#- the source hdlist is not used (use force).
- my ($error, $retrieved_md5sum, @files);
+ my ($error, $retrieved_md5sum, @rpm_files);
+
+ {
+ (my $rc, $retrieved_md5sum) =
+ file_from_local_url($medium->{url})
+ ? _update_medium__parse_if_unmodified__or_get_files__local($urpm, $medium, $second_pass, $clean_cache, \@rpm_files, \%options)
+ : _update_medium__parse_if_unmodified__or_get_files__remote($urpm, $medium, \%options);
- #- check if the medium is using a local or a removable medium.
- if (file_from_local_url($medium->{url})) {
- my $rc = _update_medium_first_pass__local($urpm, $medium, $second_pass, $clean_cache, \$retrieved_md5sum, \@files, \%options);
- if ($rc eq 'unmodified') {
- return;
- } else {
- $error = $rc;
- }
- } else {
- my $rc = _update_medium_first_pass__remote($urpm, $medium, \$retrieved_md5sum, \%options);
if ($rc eq 'unmodified') {
return;
} else {
@@ -1777,25 +1737,13 @@ sub _update_medium_first_pass {
}
#- build list file according to hdlist.
- unless ($medium->{headers} || file_size(cachedir_hdlist($urpm, $medium)) > 32) {
+ if (!$medium->{headers} && !$medium->{virtual} && file_size(cachedir_hdlist($urpm, $medium)) <= 32) {
$error = 1;
$urpm->{error}(N("no hdlist file found for medium \"%s\"", $medium->{name}));
}
- unless ($error || $medium->{virtual}) {
- #- sort list file contents according to id.
- my %list;
- if ($medium->{headers}) {
- my $protocol = protocol_from_url($medium->{url});
-
- #- rpm files have already been read (first pass), there is just a need to
- #- build list hash.
- foreach (@files) {
- m|/([^/]*\.rpm)$| or next;
- $list{$1} and $urpm->{error}(N("file [%s] already used in the same medium \"%s\"", $1, $medium->{name})), next;
- $list{$1} = "$protocol:/$_\n";
- }
- } else {
+ if (!$error && !$medium->{virtual}) {
+ if (!$medium->{headers}) {
#- read first pass hdlist or synthesis, try to open as synthesis, if file
#- is larger than 1MB, this is probably an hdlist else a synthesis.
#- anyway, if one tries fails, try another mode.
@@ -1828,48 +1776,17 @@ sub _update_medium_first_pass {
my @unresolved_after = grep { ! defined $urpm->{provides}{$_} } keys %{$urpm->{provides} || {}};
@unresolved_before == @unresolved_after or $$second_pass = 1;
- if ($medium->{hdlist} ne 'list' && -s "$urpm->{cachedir}/partial/list") {
- if (open(my $fh, '<', "$urpm->{cachedir}/partial/list")) {
- local $_;
- while (<$fh>) {
- m|/([^/]*\.rpm)$| or next;
- $list{$1} and $urpm->{error}(N("file [%s] already used in the same medium \"%s\"", $1, $medium->{name})), next;
- $list{$1} = "$medium->{url}/$_";
- }
- }
- }
+ @rpm_files = grep { /\.rpm$/ } map { chomp; $_ } cat_("$urpm->{cachedir}/partial/list");
}
}
unless ($error) {
- if (keys %list) {
- #- write list file.
- #- make sure group and other do not have any access to this file, used to hide passwords.
- if ($medium->{list}) {
- my $mask = umask 077;
- my $listfh = $urpm->open_safe('>', cachedir_list($urpm, $medium)) or $error = 1;
- umask $mask;
- print $listfh values %list;
- }
-
- #- check if at least something has been written into list file.
- if ($medium->{list} && -s cachedir_list($urpm, $medium)) {
- $urpm->{log}(N("writing list file for medium \"%s\"", $medium->{name}));
- } else {
- $error = 1, $urpm->{error}(N("nothing written in list file for \"%s\"", $medium->{name}));
- }
- } else {
- #- the flag is no longer necessary.
- if ($medium->{list}) {
- unlink statedir_list($urpm, $medium);
- delete $medium->{list};
- }
- }
+ _write_rpm_list_if_needed($urpm, $medium, \@rpm_files) or
+ $error = 1;
}
}
unless ($error) {
- #- now... on pubkey
_read_cachedir_pubkey($urpm, $medium);
}
@@ -1904,7 +1821,7 @@ sub _update_medium_first_pass {
$medium->{md5sum} = $retrieved_md5sum; #- anyway, keep it, the previous one is no longer useful.
#- and create synthesis file associated.
- $medium->{modified_synthesis} = !$medium->{synthesis};
+ $medium->{must_build_synthesis} = !$medium->{synthesis};
}
}
}
@@ -1913,8 +1830,6 @@ sub _update_medium_first_pass {
sub _update_medium_second_pass {
my ($urpm, $medium, $second_pass, $callback) = @_;
- $medium->{ignore} and return;
-
$callback and $callback->('parse', $medium->{name});
#- a modified medium is an invalid medium, we have to read back the previous hdlist
#- or synthesis which has not been modified by first pass above.
@@ -1925,29 +1840,9 @@ sub _update_medium_second_pass {
headers => $medium->{headers},
);
}
- $urpm->{log}(N("building hdlist [%s]", statedir_hdlist($urpm, $medium)));
- #- finish building operation of hdlist.
- $urpm->build_hdlist(start => $medium->{start},
- end => $medium->{end},
- dir => "$urpm->{cachedir}/headers",
- hdlist => statedir_hdlist($urpm, $medium),
- );
+ _build_hdlist_using_rpm_headers($urpm, $medium);
#- synthesis needs to be created, since the medium has been built from rpm files.
- eval { $urpm->build_synthesis(
- start => $medium->{start},
- end => $medium->{end},
- synthesis => statedir_synthesis($urpm, $medium),
- ) };
- if ($@) {
- #- XXX this happens when building a synthesis for a local media from RPMs... why ?
- $urpm->{error}(N("Unable to build synthesis file for medium \"%s\". Your hdlist file may be corrupted.", $medium->{name}));
- $urpm->{error}($@);
- unlink statedir_synthesis($urpm, $medium);
- } else {
- $urpm->{log}(N("built hdlist synthesis file for medium \"%s\"", $medium->{name}));
- }
- #- keep in mind we have a modified database, sure at this point.
- $urpm->{md5sum_modified} = 1;
+ _build_synthesis($urpm, $medium);
} elsif ($medium->{synthesis}) {
if ($second_pass) {
if ($medium->{virtual}) {
@@ -1963,23 +1858,8 @@ sub _update_medium_second_pass {
_parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium));
}
#- check if the synthesis file can be built.
- if (($second_pass || $medium->{modified_synthesis}) && !$medium->{modified}) {
- unless ($medium->{virtual}) {
- eval { $urpm->build_synthesis(
- start => $medium->{start},
- end => $medium->{end},
- synthesis => statedir_synthesis($urpm, $medium),
- ) };
- if ($@) {
- $urpm->{error}(N("Unable to build synthesis file for medium \"%s\". Your hdlist file may be corrupted.", $medium->{name}));
- $urpm->{error}($@);
- unlink statedir_synthesis($urpm, $medium);
- } else {
- $urpm->{log}(N("built hdlist synthesis file for medium \"%s\"", $medium->{name}));
- }
- }
- #- keep in mind we have modified database, sure at this point.
- $urpm->{md5sum_modified} = 1;
+ if (($second_pass || $medium->{must_build_synthesis}) && !$medium->{modified} && !$medium->{virtual}) {
+ _build_synthesis($urpm, $medium);
}
}
$callback && $callback->('done', $medium->{name});
@@ -1988,44 +1868,54 @@ sub _update_medium_second_pass {
sub remove_obsolete_headers_in_cache {
my ($urpm) = @_;
my %headers;
- my $dh = $urpm->opendir_safe("$urpm->{cachedir}/headers");
- if ($dh) {
+ if (my $dh = $urpm->opendir_safe("$urpm->{cachedir}/headers")) {
local $_;
while (defined($_ = readdir $dh)) {
m|^([^/]*-[^-]*-[^-]*\.[^\.]*)(?::\S*)?$| and $headers{$1} = $_;
}
- closedir $dh;
}
if (%headers) {
- $urpm->{log}(N("found %d headers in cache", scalar(keys %headers)));
+ my $previous_total = scalar(keys %headers);
foreach (@{$urpm->{depslist}}) {
delete $headers{$_->fullname};
}
- $urpm->{log}(N("removing %d obsolete headers in cache", scalar(keys %headers)));
+ $urpm->{log}(N("found %d rpm headers in cache, removing %d obsolete headers", $previous_total, scalar(keys %headers)));
foreach (values %headers) {
unlink "$urpm->{cachedir}/headers/$_";
}
}
}
+sub _update_media__handle_some_flags {
+ my ($urpm, $options) = @_;
+
+ foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) {
+ $options->{forcekey} and delete $medium->{'key-ids'};
+
+ if ($medium->{static}) {
+ #- don't ever update static media
+ $medium->{modified} = 0;
+ } elsif ($options->{all}) {
+ #- if we're rebuilding all media, mark them as modified (except removable ones)
+ $medium->{modified} ||= $medium->{url} !~ m!^removable!;
+ }
+ }
+}
+
#- Update the urpmi database w.r.t. the current configuration.
#- Takes care of modifications, and tries some tricks to bypass
#- the recomputation of base files.
#- Recognized options :
#- all : all medias are being rebuilt
#- callback : UI callback
-#- compress : use compressed download (for rsync)
#- forcekey : force retrieval of pubkey
#- force : try to force rebuilding base files (1) or hdlist from rpm files (2)
-#- limit_rate : download limit rate
#- noclean : keep old files in the header cache directory
#- nolock : don't lock the urpmi database
#- nomd5sum : don't verify MD5SUM of retrieved files
#- nopubkey : don't use rpm pubkeys
-#- norebuild : don't try to rebuild hdlists from rpm headers
#- probe_with : probe synthesis or hdlist (or none)
#- quiet : download hdlists quietly
-#- ratio : use this compression ratio (with gzip, default is 4)
sub update_media {
my ($urpm, %options) = @_;
@@ -2045,9 +1935,11 @@ sub update_media {
#- hdlist file, else build it from rpm files.
$urpm->clean;
+ _update_media__handle_some_flags($urpm, \%options);
+
my $clean_cache = !$options{noclean};
my $second_pass;
- foreach my $medium (@{$urpm->{media}}) {
+ foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) {
_update_medium_first_pass($urpm, $medium, \$second_pass, \$clean_cache, %options);
}
@@ -2059,7 +1951,7 @@ sub update_media {
}
#- second pass consists in reading again synthesis or hdlists.
- foreach my $medium (@{$urpm->{media}}) {
+ foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) {
_update_medium_second_pass($urpm, $medium, $second_pass, $options{callback});
}
@@ -2224,6 +2116,12 @@ sub _findindeps {
}
#- search packages registered by their names by storing their ids into the $packages hash.
+#- Recognized options:
+#- all
+#- caseinsensitive
+#- fuzzy
+#- src
+#- use_provides
sub search_packages {
my ($urpm, $packages, $names, %options) = @_;
my (%exact, %exact_a, %exact_ra, %found, %foundi);
@@ -2305,7 +2203,7 @@ sub search_packages {
push @{$l{$pkg->name}}, $pkg;
}
if (values(%l) == 0 || values(%l) > 1 && !$options{all}) {
- $urpm->{error}(N("no package named %s", $_));
+ $urpm->{error}(N("No package named %s", $_));
values(%l) != 0 and $urpm->{error}(
N("The following packages contain %s: %s",
$_, "\n" . join("\n", sort { $a cmp $b } keys %l))
@@ -2545,7 +2443,7 @@ sub get_source_packages {
local $_;
while (<$fh>) {
chomp;
- if (my ($filename) = m|/([^/]*\.rpm)$|) {
+ if (my ($filename) = m!([^/]*\.rpm)$!) {
if (keys(%{$file2fullnames{$filename} || {}}) > 1) {
$urpm->{error}(N("there are multiple packages with the same rpm filename \"%s\"", $filename));
next;
@@ -2553,7 +2451,7 @@ sub get_source_packages {
my ($fullname) = keys(%{$file2fullnames{$filename} || {}});
if (defined(my $id = $fullname2id{$fullname})) {
if (!/\.delta\.rpm$/ || $urpm->is_delta_installable($urpm->{depslist}[$id], $options{root})) {
- $sources{$id} = $medium->{virtual} ? "$medium->{url}/$_" : $_;
+ $sources{$id} = "$medium->{url}/$filename";
}
}
$list_examined{$fullname} = $examined{$fullname} = undef;
@@ -2701,6 +2599,8 @@ sub unlock_urpmi_db {
_unlock(\$LOCK_FILE);
}
+#- $list is a [ { pkg_id1 => url1, ... }, { ... }, ... ]
+#- where there is one hash for each medium in {media}
sub copy_packages_of_removable_media {
my ($urpm, $list, $sources, %options) = @_;
my %removables;
@@ -2855,7 +2755,7 @@ sub download_packages_of_distant_media {
if (%distant_sources) {
$urpm->{log}(N("retrieving rpm files from medium \"%s\"...", $urpm->{media}[$n]{name}));
if (sync_webfetch($urpm, $urpm->{media}[$n], [ values %distant_sources ],
- \%options, resume => $options{resume}, callback => $options{callback})) {
+ \%options, resume => $urpm->{options}{resume}, callback => $options{callback})) {
$urpm->{log}(N("...retrieving done"));
} else {
$urpm->{error}(N("...retrieving failed: %s", $@));
@@ -2967,43 +2867,6 @@ sub install {
my ($urpm, $remove, $install, $upgrade, %options) = @_;
my %readmes;
- #- allow process to be forked now.
- my $pid;
- my ($CHILD_RETURNS, $ERROR_OUTPUT);
- if ($options{fork}) {
- pipe($CHILD_RETURNS, $ERROR_OUTPUT);
- defined($pid = fork()) or die "Can't fork: $!\n";
- if ($pid) {
- # parent process
- close $ERROR_OUTPUT;
-
- $urpm->{log}(N("using process %d for executing transaction", $pid));
- #- now get all errors from the child and return them directly.
- my @l;
- local $_;
- while (<$CHILD_RETURNS>) {
- chomp;
- if (/^::logger_id:(\d*):(\d*)/) {
- $urpm->{logger_id} = $1;
- $urpm->{logger_count} = $2 if $2;
- } else {
- push @l, $_;
- }
- }
-
- close $CHILD_RETURNS;
- waitpid($pid, 0);
- #- take care of return code from transaction, an error should be returned directly.
- $? >> 8 and exit $? >> 8;
-
- return @l;
- } else {
- # child process
- close $CHILD_RETURNS;
- }
- }
- #- beware this can be a child process or the main process now...
-
my $db = db_open_or_die($urpm, $urpm->{root}, !$options{test}); #- open in read/write mode unless testing installation.
my $trans = $db->create_transaction($urpm->{root});
@@ -3083,28 +2946,14 @@ sub install {
}
unlink @produced_deltas;
- #- now exit or return according to current status.
- if (defined $pid && !$pid) { #- child process
- print $ERROR_OUTPUT "::logger_id:$urpm->{logger_id}:$urpm->{logger_count}\n"; #- allow main urpmi to know transaction numbering...
- print $ERROR_OUTPUT "$_\n" foreach @l;
- close $ERROR_OUTPUT;
- #- keep safe exit now (with destructor call).
- exit 0;
- } else {
- #- when non-forking
- # !!! BUG: this part of the code is not called when forking !!!
- # !!! BUG: when forking %readmes is empty, since the child is not passing this information to its parent !!!
- if ($::verbose >= 0 && keys %readmes) {
- foreach (keys %readmes) {
- print "-" x 70, "\n", N("More information on package %s", $readmes{$_}), "\n";
- if (open(my $fh, '<', $_)) {
- print while <$fh>;
- }
- print "-" x 70, "\n";
- }
+ if ($::verbose >= 0) {
+ foreach (keys %readmes) {
+ print "-" x 70, "\n", N("More information on package %s", $readmes{$_}), "\n";
+ print cat_($_);
+ print "-" x 70, "\n";
}
- return @l;
}
+ @l;
}
#- install all files to node as remembered according to resolving done.
@@ -3339,12 +3188,15 @@ sub translate_why_removed_one {
sub check_sources_signatures {
my ($urpm, $sources_install, $sources, %options) = @_;
+ sort(_check_sources_signatures($urpm, $sources_install, %options),
+ _check_sources_signatures($urpm, $sources, %options));
+}
+sub _check_sources_signatures {
+ my ($urpm, $sources, %options) = @_;
my ($medium, %invalid_sources);
- my $s = $sources_install;
- foreach my $id (keys %$sources_install, -1, keys %$sources) {
- if ($id == -1) { $s = $sources; next }
- my $filepath = $s->{$id};
+ foreach my $id (keys %$sources) {
+ my $filepath = $sources->{$id};
my $verif = URPM::verify_signature($filepath);
if ($verif =~ /NOT OK/) {
@@ -3394,9 +3246,8 @@ sub check_sources_signatures {
);
}
}
-
map { ($options{basename} ? basename($_) : $_) . ($options{translate} ? ": $invalid_sources{$_}" : "") }
- sort keys %invalid_sources;
+ keys %invalid_sources;
}
#- get reason of update for packages to be updated