summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorOlivier Blin <oblin@mandriva.com>2007-03-15 23:11:06 +0000
committerOlivier Blin <oblin@mandriva.com>2007-03-15 23:11:06 +0000
commit7194335ee9ccc6bc1d1a69d013eef55d20927037 (patch)
tree20006a13613a48398b91dc10abf2785264f80068
parent876f57a42dbff38925853104fe10ca08fe32e47f (diff)
downloadurpmi-7194335ee9ccc6bc1d1a69d013eef55d20927037.tar
urpmi-7194335ee9ccc6bc1d1a69d013eef55d20927037.tar.gz
urpmi-7194335ee9ccc6bc1d1a69d013eef55d20927037.tar.bz2
urpmi-7194335ee9ccc6bc1d1a69d013eef55d20927037.tar.xz
urpmi-7194335ee9ccc6bc1d1a69d013eef55d20927037.zip
move RFC/API/protocol doc in a dhcp-doc sub-package
-rw-r--r--urpm.pm338
-rw-r--r--urpm/util.pm28
2 files changed, 196 insertions, 170 deletions
diff --git a/urpm.pm b/urpm.pm
index 1e09de18..05b1091f 100644
--- a/urpm.pm
+++ b/urpm.pm
@@ -1313,6 +1313,52 @@ sub clean_dir {
mkdir $dir, 0755;
}
+sub _update_medium__get_descriptions_local {
+ my ($urpm, $medium) = @_;
+
+ unlink statedir_descriptions($urpm, $medium);
+
+ my $dir = file_from_local_url($medium->{url});
+ my $description_file = "$dir/media_info/descriptions"; #- new default location
+ -e $description_file or $description_file = "$dir/../descriptions";
+ -e $description_file or return;
+
+ $urpm->{log}(N("copying description file of \"%s\"...", $medium->{name}));
+ if (copy_and_own($description_file, statedir_descriptions($urpm, $medium))) {
+ $urpm->{log}(N("...copying done"));
+ } else {
+ $urpm->{error}(N("...copying failed"));
+ $medium->{ignore} = 1;
+ }
+}
+sub _update_medium__get_descriptions_remote {
+ my ($urpm, $medium, $options) = @_;
+
+ unlink "$urpm->{cachedir}/partial/descriptions";
+
+ if (-e statedir_descriptions($urpm, $medium)) {
+ urpm::util::move(statedir_descriptions($urpm, $medium), "$urpm->{cachedir}/partial/descriptions");
+ }
+ eval {
+ sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/media_info/descriptions") ],
+ $options, quiet => 1);
+ };
+ #- It is possible that the original fetch of the descriptions
+ #- failed, but the file still remains in partial/ because it was
+ #- moved from $urpm->{statedir} earlier. So we need to check if
+ #- the previous download failed.
+ if ($@ || ! -e "$urpm->{cachedir}/partial/descriptions") {
+ eval {
+ #- try older location
+ sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/../descriptions") ],
+ $options, quiet => 1);
+ };
+ }
+ if (-e "$urpm->{cachedir}/partial/descriptions") {
+ urpm::util::move("$urpm->{cachedir}/partial/descriptions", statedir_descriptions($urpm, $medium));
+ }
+}
+
sub _update_medium_first_pass__local {
my ($urpm, $medium, $second_pass, $clean_cache, $retrieved_md5sum, $rpm_files, $options) = @_;
@@ -1351,17 +1397,8 @@ this could happen if you mounted manually the directory when creating the medium
#- determine its type, once a with_hdlist has been found (but is mandatory).
_update_media__virtual($urpm, $medium, $with_hdlist_dir);
}
- #- try to get the description if it has been found.
- unlink statedir_descriptions($urpm, $medium);
- my $description_file = "$dir/media_info/descriptions"; #- new default location
- -e $description_file or $description_file = "$dir/../descriptions";
- if (-e $description_file) {
- $urpm->{log}(N("copying description file of \"%s\"...", $medium->{name}));
- urpm::util::copy($description_file, statedir_descriptions($urpm, $medium))
- ? $urpm->{log}(N("...copying done"))
- : do { $urpm->{error}(N("...copying failed")); $medium->{ignore} = 1 };
- chown 0, 0, statedir_descriptions($urpm, $medium);
- }
+
+ _update_medium__get_descriptions_local($urpm, $medium);
#- examine if a distant MD5SUM file is available.
#- this will only be done if $with_hdlist is not empty in order to use
@@ -1389,10 +1426,9 @@ this could happen if you mounted manually the directory when creating the medium
unlink cachedir_hdlist($urpm, $medium);
$urpm->{log}(N("copying source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
$options->{callback} and $options->{callback}('copy', $medium->{name});
- if (urpm::util::copy($with_hdlist_dir, cachedir_hdlist($urpm, $medium))) {
+ if (copy_and_own($with_hdlist_dir, cachedir_hdlist($urpm, $medium))) {
$options->{callback} and $options->{callback}('done', $medium->{name});
$urpm->{log}(N("...copying done"));
- chown 0, 0, cachedir_hdlist($urpm, $medium);
} else {
$options->{callback} and $options->{callback}('failed', $medium->{name});
#- force error, reported afterwards
@@ -1440,9 +1476,8 @@ this could happen if you mounted manually the directory when creating the medium
my $path_list = reduce_pathname("$with_hdlist_dir/../$local_list");
-e $path_list or $path_list = "$dir/list";
if (-e $path_list) {
- urpm::util::copy($path_list, "$urpm->{cachedir}/partial/list")
+ copy_and_own($path_list, "$urpm->{cachedir}/partial/list")
or do { $urpm->{error}(N("...copying failed")); $error = 1 };
- chown 0, 0, "$urpm->{cachedir}/partial/list";
}
}
} else {
@@ -1493,15 +1528,139 @@ this could happen if you mounted manually the directory when creating the medium
my $path_pubkey = reduce_pathname("$with_hdlist_dir/../pubkey" . _hdlist_suffix($medium));
-e $path_pubkey or $path_pubkey = "$dir/pubkey";
if ($path_pubkey) {
- urpm::util::copy($path_pubkey, "$urpm->{cachedir}/partial/pubkey")
+ copy_and_own($path_pubkey, "$urpm->{cachedir}/partial/pubkey")
or do { $urpm->{error}(N("...copying failed")) };
}
- chown 0, 0, "$urpm->{cachedir}/partial/pubkey";
}
$error;
}
+sub _update_medium_first_pass__remote {
+ my ($urpm, $medium, $retrieved_md5sum, $options) = @_;
+ my ($error, $basename);
+
+ _update_medium__get_descriptions_remote($urpm, $medium, $options);
+
+ #- examine if a distant MD5SUM file is available.
+ #- this will only be done if $with_hdlist is not empty in order to use
+ #- an existing hdlist or synthesis file, and to check if download was good.
+ #- if no MD5SUM is available, do it as before...
+ if ($medium->{with_hdlist}) {
+ #- we can assume at this point a basename is existing, but it needs
+ #- to be checked for being valid, nothing can be deduced if no MD5SUM
+ #- file is present.
+ $basename = basename($medium->{with_hdlist});
+
+ unlink "$urpm->{cachedir}/partial/MD5SUM";
+ eval {
+ if (!$options->{nomd5sum}) {
+ sync_webfetch($urpm, $medium,
+ [ reduce_pathname("$medium->{url}/$medium->{with_hdlist}/../MD5SUM") ],
+ $options, quiet => 1);
+ }
+ };
+ if (!$@ && file_size("$urpm->{cachedir}/partial/MD5SUM") > 32) {
+ recompute_local_md5sum($urpm, $medium, $options->{force} >= 2);
+ if ($medium->{md5sum}) {
+ $$retrieved_md5sum = parse_md5sum($urpm, "$urpm->{cachedir}/partial/MD5SUM", $basename);
+ _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $$retrieved_md5sum)
+ and return 'unmodified';
+ }
+ } else {
+ #- at this point, we don't if a basename exists and is valid, let probe it later.
+ $basename = undef;
+ }
+ }
+
+ #- try to probe for possible with_hdlist parameter, unless
+ #- it is already defined (and valid).
+ $urpm->{log}(N("retrieving source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
+ $options->{callback} and $options->{callback}('retrieve', $medium->{name});
+ if ($options->{probe_with}) {
+ my @probe_list = (
+ $medium->{with_hdlist}
+ ? $medium->{with_hdlist}
+ : _probe_with_try_list(_guess_hdlist_suffix($medium->{url}), $options->{probe_with})
+ );
+ foreach my $with_hdlist (@probe_list) {
+ $basename = basename($with_hdlist) or next;
+ $options->{force} and unlink "$urpm->{cachedir}/partial/$basename";
+ eval {
+ sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/$with_hdlist") ],
+ $options, callback => $options->{callback});
+ };
+ if (!$@ && file_size("$urpm->{cachedir}/partial/$basename") > 32) {
+ $medium->{with_hdlist} = $with_hdlist;
+ $urpm->{log}(N("found probed hdlist (or synthesis) as %s", $medium->{with_hdlist}));
+ last; #- found a suitable with_hdlist in the list above.
+ }
+ }
+ } else {
+ $basename = basename($medium->{with_hdlist});
+
+ if ($options->{force}) {
+ unlink "$urpm->{cachedir}/partial/$basename";
+ } else {
+ #- try to sync (copy if needed) local copy after restored the previous one.
+ #- this is useful for rsync (?)
+ if (-e statedir_hdlist_or_synthesis($urpm, $medium)) {
+ copy_and_own(
+ statedir_hdlist_or_synthesis($urpm, $medium),
+ "$urpm->{cachedir}/partial/$basename",
+ ) or $urpm->{error}(N("...copying failed")), $error = 1;
+ }
+ }
+ eval {
+ sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/$medium->{with_hdlist}") ],
+ $options, callback => $options->{callback});
+ };
+ if ($@) {
+ $urpm->{error}(N("...retrieving failed: %s", $@));
+ unlink "$urpm->{cachedir}/partial/$basename";
+ }
+ }
+
+ #- check downloaded file has right signature.
+ if (file_size("$urpm->{cachedir}/partial/$basename") > 32 && $$retrieved_md5sum) {
+ $urpm->{log}(N("computing md5sum of retrieved source hdlist (or synthesis)"));
+ unless (md5sum("$urpm->{cachedir}/partial/$basename") eq $$retrieved_md5sum) {
+ $urpm->{error}(N("...retrieving failed: md5sum mismatch"));
+ unlink "$urpm->{cachedir}/partial/$basename";
+ }
+ }
+
+ if (file_size("$urpm->{cachedir}/partial/$basename") > 32) {
+ $options->{callback} and $options->{callback}('done', $medium->{name});
+ $urpm->{log}(N("...retrieving done"));
+
+ unless ($options->{force}) {
+ _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium, $basename)
+ and return 'unmodified';
+ }
+
+ #- the files are different, update local copy.
+ rename("$urpm->{cachedir}/partial/$basename", cachedir_hdlist($urpm, $medium));
+
+ #- retrieval of hdlist or synthesis has been successful,
+ #- check whether a list file is available.
+ #- and check hdlist wasn't named very strangely...
+ if ($medium->{hdlist} ne 'list') {
+ _update_media__sync_file($urpm, $medium, 'list', $options);
+ }
+
+ #- retrieve pubkey file.
+ if (!$options->{nopubkey} && $medium->{hdlist} ne 'pubkey' && !$medium->{'key-ids'}) {
+ _update_media__sync_file($urpm, $medium, 'pubkey', $options);
+ }
+ } else {
+ $error = 1;
+ $options->{callback} and $options->{callback}('failed', $medium->{name});
+ $urpm->{error}(N("retrieval of source hdlist (or synthesis) failed"));
+ }
+ $error;
+}
+
sub _read_cachedir_pubkey {
my ($urpm, $medium) = @_;
-s "$urpm->{cachedir}/partial/pubkey" or return;
@@ -1578,148 +1737,11 @@ sub _update_medium_first_pass {
$error = $rc;
}
} else {
- my $basename;
-
- #- try to get the description if it has been found.
- unlink "$urpm->{cachedir}/partial/descriptions";
- if (-e statedir_descriptions($urpm, $medium)) {
- urpm::util::move(statedir_descriptions($urpm, $medium), "$urpm->{cachedir}/partial/descriptions");
- }
- eval {
- sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/media_info/descriptions") ],
- \%options, quiet => 1);
- };
- #- It is possible that the original fetch of the descriptions
- #- failed, but the file still remains in partial/ because it was
- #- moved from $urpm->{statedir} earlier. So we need to check if
- #- the previous download failed.
- if ($@ || ! -e "$urpm->{cachedir}/partial/descriptions") {
- eval {
- #- try older location
- sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/../descriptions") ],
- \%options, quiet => 1);
- };
- }
- if (-e "$urpm->{cachedir}/partial/descriptions") {
- urpm::util::move("$urpm->{cachedir}/partial/descriptions", statedir_descriptions($urpm, $medium));
- }
-
- #- examine if a distant MD5SUM file is available.
- #- this will only be done if $with_hdlist is not empty in order to use
- #- an existing hdlist or synthesis file, and to check if download was good.
- #- if no MD5SUM is available, do it as before...
- if ($medium->{with_hdlist}) {
- #- we can assume at this point a basename is existing, but it needs
- #- to be checked for being valid, nothing can be deduced if no MD5SUM
- #- file is present.
- $basename = basename($medium->{with_hdlist});
-
- unlink "$urpm->{cachedir}/partial/MD5SUM";
- eval {
- if (!$options{nomd5sum}) {
- sync_webfetch($urpm, $medium,
- [ reduce_pathname("$medium->{url}/$medium->{with_hdlist}/../MD5SUM") ],
- \%options, quiet => 1);
- }
- };
- if (!$@ && file_size("$urpm->{cachedir}/partial/MD5SUM") > 32) {
- recompute_local_md5sum($urpm, $medium, $options{force} >= 2);
- if ($medium->{md5sum}) {
- $retrieved_md5sum = parse_md5sum($urpm, "$urpm->{cachedir}/partial/MD5SUM", $basename);
- _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $retrieved_md5sum)
- and return;
- }
- } else {
- #- at this point, we don't if a basename exists and is valid, let probe it later.
- $basename = undef;
- }
- }
-
- #- try to probe for possible with_hdlist parameter, unless
- #- it is already defined (and valid).
- $urpm->{log}(N("retrieving source hdlist (or synthesis) of \"%s\"...", $medium->{name}));
- $options{callback} and $options{callback}('retrieve', $medium->{name});
- if ($options{probe_with}) {
- my @probe_list = (
- $medium->{with_hdlist}
- ? $medium->{with_hdlist}
- : _probe_with_try_list(_guess_hdlist_suffix($medium->{url}), $options{probe_with})
- );
- foreach my $with_hdlist (@probe_list) {
- $basename = basename($with_hdlist) or next;
- $options{force} and unlink "$urpm->{cachedir}/partial/$basename";
- eval {
- sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/$with_hdlist") ],
- \%options, callback => $options{callback});
- };
- if (!$@ && file_size("$urpm->{cachedir}/partial/$basename") > 32) {
- $medium->{with_hdlist} = $with_hdlist;
- $urpm->{log}(N("found probed hdlist (or synthesis) as %s", $medium->{with_hdlist}));
- last; #- found a suitable with_hdlist in the list above.
- }
- }
- } else {
- $basename = basename($medium->{with_hdlist});
-
- if ($options{force}) {
- unlink "$urpm->{cachedir}/partial/$basename";
- } else {
- #- try to sync (copy if needed) local copy after restored the previous one.
- #- this is useful for rsync (?)
- if (-e statedir_hdlist_or_synthesis($urpm, $medium)) {
- urpm::util::copy(
- statedir_hdlist_or_synthesis($urpm, $medium),
- "$urpm->{cachedir}/partial/$basename",
- ) or $urpm->{error}(N("...copying failed")), $error = 1;
- }
- chown 0, 0, "$urpm->{cachedir}/partial/$basename";
- }
- eval {
- sync_webfetch($urpm, $medium, [ reduce_pathname("$medium->{url}/$medium->{with_hdlist}") ],
- \%options, callback => $options{callback});
- };
- if ($@) {
- $urpm->{error}(N("...retrieving failed: %s", $@));
- unlink "$urpm->{cachedir}/partial/$basename";
- }
- }
-
- #- check downloaded file has right signature.
- if (file_size("$urpm->{cachedir}/partial/$basename") > 32 && $retrieved_md5sum) {
- $urpm->{log}(N("computing md5sum of retrieved source hdlist (or synthesis)"));
- unless (md5sum("$urpm->{cachedir}/partial/$basename") eq $retrieved_md5sum) {
- $urpm->{error}(N("...retrieving failed: md5sum mismatch"));
- unlink "$urpm->{cachedir}/partial/$basename";
- }
- }
-
- if (file_size("$urpm->{cachedir}/partial/$basename") > 32) {
- $options{callback} and $options{callback}('done', $medium->{name});
- $urpm->{log}(N("...retrieving done"));
-
- unless ($options{force}) {
- _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium, $basename)
- and return;
- }
-
- #- the files are different, update local copy.
- rename("$urpm->{cachedir}/partial/$basename", cachedir_hdlist($urpm, $medium));
-
- #- retrieval of hdlist or synthesis has been successful,
- #- check whether a list file is available.
- #- and check hdlist wasn't named very strangely...
- if ($medium->{hdlist} ne 'list') {
- _update_media__sync_file($urpm, $medium, 'list', \%options);
- }
-
- #- retrieve pubkey file.
- if (!$options{nopubkey} && $medium->{hdlist} ne 'pubkey' && !$medium->{'key-ids'}) {
- _update_media__sync_file($urpm, $medium, 'pubkey', \%options);
- }
+ my $rc = _update_medium_first_pass__remote($urpm, $medium, \$retrieved_md5sum, \%options);
+ if ($rc eq 'unmodified') {
+ return;
} else {
- $error = 1;
- $options{callback} and $options{callback}('failed', $medium->{name});
- $urpm->{error}(N("retrieval of source hdlist (or synthesis) failed"));
+ $error = $rc;
}
}
diff --git a/urpm/util.pm b/urpm/util.pm
index 520ce7f7..b12c49a0 100644
--- a/urpm/util.pm
+++ b/urpm/util.pm
@@ -8,10 +8,9 @@ our @ISA = 'Exporter';
our @EXPORT = qw(quotespace unquotespace
remove_internal_name
reduce_pathname offset_pathname
- untaint
+ md5sum untaint
copy_and_own
same_size_and_mtime
- partition uniq
difference2 member file_size cat_ dirname basename
);
@@ -88,6 +87,21 @@ sub untaint {
@r == 1 ? $r[0] : @r;
}
+sub md5sum {
+ my ($file) = @_;
+ eval { require Digest::MD5 };
+ if ($@) {
+ #- Use an external command to avoid depending on perl
+ return (split ' ', `/usr/bin/md5sum '$file'`)[0];
+ } else {
+ my $ctx = Digest::MD5->new;
+ open my $fh, $file or return '';
+ $ctx->addfile($fh);
+ close $fh;
+ return $ctx->hexdigest;
+ }
+}
+
sub copy {
my ($file, $dest) = @_;
!system("/bin/cp", "-p", "-L", "-R", $file, $dest);
@@ -116,16 +130,6 @@ sub same_size_and_mtime {
$sstat[7] == $lstat[7] && $sstat[9] == $lstat[9];
}
-sub partition(&@) {
- my $f = shift;
- my (@a, @b);
- foreach (@_) {
- $f->($_) ? push(@a, $_) : push(@b, $_);
- }
- \@a, \@b;
-}
-
-sub uniq { my %l; $l{$_} = 1 foreach @_; grep { delete $l{$_} } @_ }
sub difference2 { my %l; @l{@{$_[1]}} = (); grep { !exists $l{$_} } @{$_[0]} }
sub member { my $e = shift; foreach (@_) { $e eq $_ and return 1 } 0 }
sub cat_ { my @l = map { my $F; open($F, '<', $_) ? <$F> : () } @_; wantarray() ? @l : join '', @l }