summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRafael Garcia-Suarez <rgarciasuarez@mandriva.org>2004-04-19 17:10:12 +0000
committerRafael Garcia-Suarez <rgarciasuarez@mandriva.org>2004-04-19 17:10:12 +0000
commit22d3107b3224f91c04e8825ba8bc812dba63a7ef (patch)
treeee8f28bf683e7634a039909a7503cd3e7e53967f
parent7f79f78d1a0af648a116cd1e39a073e051502b9c (diff)
downloadurpmi-22d3107b3224f91c04e8825ba8bc812dba63a7ef.tar
urpmi-22d3107b3224f91c04e8825ba8bc812dba63a7ef.tar.gz
urpmi-22d3107b3224f91c04e8825ba8bc812dba63a7ef.tar.bz2
urpmi-22d3107b3224f91c04e8825ba8bc812dba63a7ef.tar.xz
urpmi-22d3107b3224f91c04e8825ba8bc812dba63a7ef.zip
Some more cleanup and refactorization.
-rw-r--r--urpm.pm348
-rw-r--r--urpm/args.pm6
-rw-r--r--urpm/cfg.pm41
-rw-r--r--urpm/download.pm403
-rw-r--r--urpm/msg.pm9
-rw-r--r--urpme44
-rwxr-xr-xurpmi6
-rwxr-xr-xurpmi.addmedia11
-rwxr-xr-xurpmi.update3
-rwxr-xr-xurpmq4
10 files changed, 490 insertions, 385 deletions
diff --git a/urpm.pm b/urpm.pm
index 33ac145f..9747cb96 100644
--- a/urpm.pm
+++ b/urpm.pm
@@ -6,6 +6,7 @@ use strict;
use vars qw($VERSION @ISA @EXPORT);
use MDK::Common;
use urpm::msg;
+use urpm::download;
$VERSION = '4.4';
@ISA = qw(URPM);
@@ -14,9 +15,6 @@ use URPM;
use URPM::Resolve;
use POSIX;
-#- tool functions.
-sub localtime2changelog { scalar(localtime($_[0])) =~ /(.*) \S+ (\d{4})$/ && "$1 $2" };
-
#- create a new urpm object.
sub new {
my ($class) = @_;
@@ -46,59 +44,6 @@ sub new {
}, $class;
}
-sub get_proxy {
- my $proxy = {
- http_proxy => undef ,
- ftp_proxy => undef ,
- user => undef,
- pwd => undef
- };
- local (*F, $_);
- open F, "/etc/urpmi/proxy.cfg" or return undef;
- while (<F>) {
- chomp; s/#.*$//; s/^\s*//; s/\s*$//;
- /^http_proxy\s*=\s*(.*)$/ and $proxy->{http_proxy} = $1, next;
- /^ftp_proxy\s*=\s*(.*)$/ and $proxy->{ftp_proxy} = $1, next;
- /^proxy_user\s*=\s*(.*):(.*)$/ and do {
- $proxy->{user} = $1;
- $proxy->{pwd} = $2;
- next;
- };
- next;
- }
- close F;
- $proxy;
-}
-
-sub set_proxy {
- my $proxy = shift @_;
- my @res;
- if (defined $proxy->{proxy}{http_proxy} or defined $proxy->{proxy}{ftp_proxy}) {
- for ($proxy->{type}) {
- /wget/ && do {
- for ($proxy->{proxy}) {
- if (defined $_->{http_proxy}) {
- $ENV{http_proxy} = $_->{http_proxy} =~ /^http:/ ? $_->{http_proxy} : "http://$_->{http_proxy}";
- }
- $ENV{ftp_proxy} = $_->{ftp_proxy} if defined $_->{ftp_proxy};
- @res = ("--proxy-user=$_->{user}", "--proxy-passwd=$_->{pwd}") if defined $_->{user} && defined $_->{pwd};
- }
- last;
- };
- /curl/ && do {
- for ($proxy->{proxy}) {
- push @res, ('-x', $_->{http_proxy}) if defined $_->{http_proxy};
- push @res, ('-x', $_->{ftp_proxy}) if defined $_->{ftp_proxy};
- push @res, ('-U', "$_->{user}:$_->{pwd}") if defined $_->{user} && defined $_->{pwd};
- }
- last;
- };
- die N("Unknown webfetch `%s' !!!\n", $proxy->{type});
- }
- }
- return @res;
-}
-
#- quoting/unquoting a string that may be containing space chars.
sub quotespace { local $_ = $_[0] || ''; s/(\s)/\\$1/g; $_ }
sub unquotespace { local $_ = $_[0] || ''; s/\\(\s)/$1/g; $_ }
@@ -154,297 +99,6 @@ sub sync_webfetch {
}
%files and die N("unable to handle protocol: %s", join ', ', keys %files);
}
-sub propagate_sync_callback {
- my $options = shift @_;
- if (ref($options) && $options->{callback}) {
- my $mode = shift @_;
- if ($mode =~ /^(start|progress|end)$/) {
- my $file = shift @_;
- $file =~ s|([^:]*://[^/:\@]*:)[^/:\@]*(\@.*)|$1xxxx$2|; #- if needed...
- return $options->{callback}($mode, $file, @_);
- } else {
- return $options->{callback}($mode, @_);
- }
- }
-}
-sub sync_file {
- my $options = shift @_;
- foreach (@_) {
- my ($in) = m!^(?:removable[^:]*|file):/(.*)!;
- propagate_sync_callback($options, 'start', $_);
- system("cp", "-p", "-R", $in || $_, ref($options) ? $options->{dir} : $options) and
- die N("copy failed: %s", $@);
- propagate_sync_callback($options, 'end', $_);
- }
-}
-sub sync_wget {
- -x "/usr/bin/wget" or die N("wget is missing\n");
- local *WGET;
- my $options = shift @_;
- #- force download to be done in cachedir to avoid polluting cwd.
- my $cwd = `pwd`; chomp $cwd;
- chdir(ref($options) ? $options->{dir} : $options);
- my ($buf, $total, $file) = ('', undef, undef);
- my $wget_pid = open WGET, join(" ", map { "'$_'" } "/usr/bin/wget",
- (ref($options) && $options->{limit_rate} ? "--limit-rate=$options->{limit_rate}" : ()),
- (ref($options) && $options->{resume} ? "--continue" : ()),
- (ref($options) && $options->{proxy} ? set_proxy({ type => "wget", proxy => $options->{proxy} }) : ()),
- (ref($options) && $options->{callback} ? ("--progress=bar:force", "-o", "-") :
- ref($options) && $options->{quiet} ? "-q" : @{[]}),
- "--retr-symlinks", "-NP",
- (ref($options) ? $options->{dir} : $options), @_) . " |";
- local $/ = \1; #- read input by only one char, this is slow but very nice (and it works!).
- while (<WGET>) {
- $buf .= $_;
- if ($_ eq "\r" || $_ eq "\n") {
- if (ref($options) && $options->{callback}) {
- if ($buf =~ /^--\d\d:\d\d:\d\d--\s+(\S.*)\n/ms) {
- $file && $file ne $1 and propagate_sync_callback($options, 'end', $file);
- ! defined $file and propagate_sync_callback($options, 'start', $file = $1);
- } elsif (defined $file && ! defined $total && $buf =~ /==>\s+RETR/) {
- $total = '';
- } elsif (defined $total && $total eq '' && $buf =~ /^[^:]*:\s+(\d\S*)/) {
- $total = $1;
- } elsif (my ($percent, $speed, $eta) = $buf =~ /^\s*(\d+)%.*\s+(\S+)\s+ETA\s+(\S+)\s*[\r\n]$/ms) {
- if (propagate_sync_callback($options, 'progress', $file, $percent, $total, $eta, $speed) eq 'canceled') {
- kill 15, $wget_pid;
- close WGET;
- return;
- }
- if ($_ eq "\n") {
- propagate_sync_callback($options, 'end', $file);
- ($total, $file) = (undef, undef);
- }
- }
- } else {
- ref($options) && $options->{quiet} or print STDERR $buf;
- }
- $buf = '';
- }
- }
- $file and propagate_sync_callback($options, 'end', $file);
- chdir $cwd;
- close WGET or die N("wget failed: exited with %d or signal %d\n", $? >> 8, $? & 127);
-}
-sub sync_curl {
- -x "/usr/bin/curl" or die N("curl is missing\n");
- local *CURL;
- my $options = shift @_;
- #- force download to be done in cachedir to avoid polluting cwd,
- #- howerver for curl, this is mandatory.
- my $cwd = `pwd`; chomp $cwd;
- chdir(ref($options) ? $options->{dir} : $options);
- my (@ftp_files, @other_files);
- foreach (@_) {
- m|^ftp://.*/([^/]*)$| && -s $1 > 8192 and do { push @ftp_files, $_; next }; #- manage time stamp for large file only.
- push @other_files, $_;
- }
- if (@ftp_files) {
- my ($cur_ftp_file, %ftp_files_info);
-
- eval { require Date::Manip };
-
- #- prepare to get back size and time stamp of each file.
- open CURL, join(" ", map { "'$_'" } "/usr/bin/curl",
- (ref($options) && $options->{limit_rate} ? ("--limit-rate", $options->{limit_rate}) : ()),
- (ref($options) && $options->{proxy} ? set_proxy({ type => "curl", proxy => $options->{proxy} }) : ()),
- "--stderr", "-", "-s", "-I", @ftp_files) . " |";
- while (<CURL>) {
- if (/Content-Length:\s*(\d+)/) {
- !$cur_ftp_file || exists($ftp_files_info{$cur_ftp_file}{size}) and $cur_ftp_file = shift @ftp_files;
- $ftp_files_info{$cur_ftp_file}{size} = $1;
- }
- if (/Last-Modified:\s*(.*)/) {
- !$cur_ftp_file || exists($ftp_files_info{$cur_ftp_file}{time}) and $cur_ftp_file = shift @ftp_files;
- eval {
- $ftp_files_info{$cur_ftp_file}{time} = Date::Manip::ParseDate($1);
- $ftp_files_info{$cur_ftp_file}{time} =~ s/(\d{6}).{4}(.*)/$1$2/; #- remove day and hour.
- };
- }
- }
- close CURL;
-
- #- now analyse size and time stamp according to what already exists here.
- if (@ftp_files) {
- #- re-insert back shifted element of ftp_files, because curl output above
- #- have not been parsed correctly, in doubt download them all.
- push @ftp_files, keys %ftp_files_info;
- } else {
- #- for that, it should be clear ftp_files is empty... else a above work is
- #- use less.
- foreach (keys %ftp_files_info) {
- my ($lfile) = m|/([^/]*)$| or next; #- strange if we can't parse it correctly.
- my $ltime = eval { Date::Manip::ParseDate(scalar gmtime((stat $1)[9])) };
- $ltime =~ s/(\d{6}).{4}(.*)/$1$2/; #- remove day and hour.
- -s $lfile == $ftp_files_info{$_}{size} && $ftp_files_info{$_}{time} eq $ltime or
- push @ftp_files, $_;
- }
- }
- }
- #- http files (and other files) are correctly managed by curl to conditionnal download.
- #- options for ftp files, -R (-O <file>)*
- #- options for http files, -R (-z file -O <file>)*
- if (my @all_files = ((map { ("-O", $_) } @ftp_files), (map { m|/([^/]*)$| ? ("-z", $1, "-O", $_) : @{[]} } @other_files))) {
- my @l = (@ftp_files, @other_files);
- my ($buf, $file) = ('', undef);
- my $curl_pid = open CURL, join(" ", map { "'$_'" } "/usr/bin/curl",
- (ref($options) && $options->{limit_rate} ? ("--limit-rate", $options->{limit_rate}) : ()),
- (ref($options) && $options->{resume} ? ("--continue-at", "-") : ()),
- (ref($options) && $options->{proxy} ? set_proxy({ type => "curl", proxy => $options->{proxy} }) : ()),
- (ref($options) && $options->{quiet} && !$options->{verbose} ? "-s" : @{[]}),
- "-k", `curl -h` =~ /location-trusted/ ? "--location-trusted" : @{[]},
- "-R", "-f", "--stderr", "-",
- @all_files) . " |";
- local $/ = \1; #- read input by only one char, this is slow but very nice (and it works!).
- while (<CURL>) {
- $buf .= $_;
- if ($_ eq "\r" || $_ eq "\n") {
- if (ref($options) && $options->{callback}) {
- unless (defined $file) {
- $file = shift @l;
- propagate_sync_callback($options, 'start', $file);
- }
- if (my ($percent, $total, $eta, $speed) = $buf =~ /^\s*(\d+)\s+(\S+)[^\r\n]*\s+(\S+)\s+(\S+)[\r\n]$/ms) {
- if (propagate_sync_callback($options, 'progress', $file, $percent, $total, $eta, $speed) eq 'canceled') {
- kill 15, $curl_pid;
- close CURL;
- return;
- }
- if ($_ eq "\n") {
- propagate_sync_callback($options, 'end', $file);
- $file = undef;
- }
- } elsif ($buf =~ /^curl:/) { #- likely to be an error reported by curl
- local $/ = "\n";
- chomp $buf;
- propagate_sync_callback($options, 'error', $file, $buf);
- }
- } else {
- ref($options) && $options->{quiet} or print STDERR $buf;
- }
- $buf = '';
- }
- }
- chdir $cwd;
- close CURL or die N("curl failed: exited with %d or signal %d\n", $? >> 8, $? & 127);
- } else {
- chdir $cwd;
- }
-}
-sub sync_rsync {
- -x "/usr/bin/rsync" or die N("rsync is missing\n");
- my $options = shift @_;
- #- force download to be done in cachedir to avoid polluting cwd.
- my $cwd = `pwd`; chomp $cwd;
- chdir(ref($options) ? $options->{dir} : $options);
- my $limit_rate = ref($options) && $options->{limit_rate};
- for ($limit_rate) {
- /^(\d+)$/ and $limit_rate = int $1/1024;
- /^(\d+)[kK]$/ and $limit_rate = $1;
- /^(\d+)[mM]$/ and $limit_rate = 1024*$1;
- /^(\d+)[gG]$/ and $limit_rate = 1024*1024*$1;
- }
- foreach (@_) {
- my $count = 10; #- retry count on error (if file exists).
- my $basename = basename($_);
- my ($file) = m|^rsync://(.*)| or next; $file =~ /::/ or $file = $_;
- propagate_sync_callback($options, 'start', $file);
- do {
- local (*RSYNC, $_);
- my $buf = '';
- open RSYNC, join(" ", map { "'$_'" } "/usr/bin/rsync",
- ($limit_rate ? "--bwlimit=$limit_rate" : ()),
- (ref($options) && $options->{quiet} ? qw(-q) : qw(--progress -v)),
- if_(ref($options) && $options->{compress}, qw(-z)),
- qw(--partial --no-whole-file), $file, (ref($options) ? $options->{dir} : $options)) . " |";
- local $/ = \1; #- read input by only one char, this is slow but very nice (and it works!).
- while (<RSYNC>) {
- $buf .= $_;
- if ($_ eq "\r" || $_ eq "\n") {
- if (ref($options) && $options->{callback}) {
- if (my ($percent, $speed) = $buf =~ /^\s*\d+\s+(\d+)%\s+(\S+)\s+/) {
- propagate_sync_callback($options, 'progress', $file, $percent, undef, undef, $speed);
- }
- } else {
- ref($options) && $options->{quiet} or print STDERR $buf;
- }
- $buf = '';
- }
- }
- close RSYNC;
- } while ($? != 0 && --$count > 0 && -e (ref($options) ? $options->{dir} : $options) . "/$basename");
- propagate_sync_callback($options, 'end', $file);
- }
- chdir $cwd;
- $? == 0 or die N("rsync failed: exited with %d or signal %d\n", $? >> 8, $? & 127);
-}
-sub sync_ssh {
- -x "/usr/bin/rsync" or die N("rsync is missing\n");
- -x "/usr/bin/ssh" or die N("ssh is missing\n");
- my $options = shift @_;
- #- force download to be done in cachedir to avoid polluting cwd.
- my $cwd = `pwd`; chomp $cwd;
- chdir(ref($options) ? $options->{dir} : $options);
- my $limit_rate = ref($options) && $options->{limit_rate};
- for ($limit_rate) {
- /^(\d+)$/ and $limit_rate = int $1/1024;
- /^(\d+)[kK]$/ and $limit_rate = $1;
- /^(\d+)[mM]$/ and $limit_rate = 1024*$1;
- /^(\d+)[gG]$/ and $limit_rate = 1024*1024*$1;
- }
- foreach my $file (@_) {
- my $count = 10; #- retry count on error (if file exists).
- my $basename = basename($file);
- propagate_sync_callback($options, 'start', $file);
- do {
- local (*RSYNC, $_);
- my $buf = '';
- open RSYNC, join(" ", map { "'$_'" } "/usr/bin/rsync",
- ($limit_rate ? "--bwlimit=$limit_rate" : ()),
- (ref($options) && $options->{quiet} ? qw(-q) : qw(--progress -v)),
- if_(ref($options) && $options->{compress}, qw(-z)),
- qw(--partial -e ssh), $file, (ref($options) ? $options->{dir} : $options)) . " |";
- local $/ = \1; #- read input by only one char, this is slow but very nice (and it works!).
- while (<RSYNC>) {
- $buf .= $_;
- if ($_ eq "\r" || $_ eq "\n") {
- if (ref($options) && $options->{callback}) {
- if (my ($percent, $speed) = $buf =~ /^\s*\d+\s+(\d+)%\s+(\S+)\s+/) {
- propagate_sync_callback($options, 'progress', $file, $percent, undef, undef, $speed);
- }
- } else {
- ref($options) && $options->{quiet} or print STDERR $buf;
- }
- $buf = '';
- }
- }
- close RSYNC;
- } while ($? != 0 && --$count > 0 && -e (ref($options) ? $options->{dir} : $options) . "/$basename");
- propagate_sync_callback($options, 'end', $file);
- }
- chdir $cwd;
- $? == 0 or die N("rsync failed: exited with %d or signal %d\n", $? >> 8, $? & 127);
-}
-#- default logger suitable for sync operation on STDERR only.
-sub sync_logger {
- my ($mode, $file, $percent, $total, $eta, $speed) = @_;
- if ($mode eq 'start') {
- print STDERR " $file\n";
- } elsif ($mode eq 'progress') {
- my $text;
- if (defined $total && defined $eta) {
- $text = N(" %s%% of %s completed, ETA = %s, speed = %s", $percent, $total, $eta, $speed);
- } else {
- $text = N(" %s%% completed, speed = %s", $percent, $speed);
- }
- print STDERR $text, " " x (79 - length($text)), "\r";
- } elsif ($mode eq 'end') {
- print STDERR " " x 79, "\r";
- } elsif ($mode eq 'error') {
- #- error is 3rd argument, saved in $percent
- print STDERR N("...retrieving failed: %s", $percent), "\n";
- }
-}
#- read /etc/urpmi/urpmi.cfg as config file, keep compability with older
#- configuration file by examining if one entry is of the form
diff --git a/urpm/args.pm b/urpm/args.pm
index 36e84b79..3d3904f0 100644
--- a/urpm/args.pm
+++ b/urpm/args.pm
@@ -22,8 +22,8 @@ my $urpm;
our %options;
sub import {
- if ($_[1] eq 'options') {
- # export the %options hash
+ if (@_ > 1 && $_[1] eq 'options') {
+ # export the global %options hash
no strict 'refs';
*{caller().'::options'} = \%options;
}
@@ -124,7 +124,7 @@ my %options_spec = (
all => sub {
foreach my $k (qw(filename group size summary description sourcerpm
packager buildhost url provides requires files conflicts obsoletes))
- { $::params{$k} = 1; }
+ { $::params{$k} = 1 }
},
name => \$::params{filename},
group => \$::params{group},
diff --git a/urpm/cfg.pm b/urpm/cfg.pm
new file mode 100644
index 00000000..aa485b53
--- /dev/null
+++ b/urpm/cfg.pm
@@ -0,0 +1,41 @@
+package urpm::cfg;
+
+use strict;
+use warnings;
+
+=head1 NAME
+
+urpm::cfg - routines to handle the urpmi configuration files
+
+=head1 SYNOPSIS
+
+=head1 DESCRIPTION
+
+=over
+
+=cut
+
+# Standard paths of the config files
+our $PROXY_CFG = "/etc/urpmi/proxy.cfg";
+
+=item set_environment($env_path)
+
+Modifies the paths of the config files, so they will be searched
+in the $env_path directory. This is obviously to be called early.
+
+=cut
+
+sub set_environment {
+ my ($env) = @_;
+ for ($PROXY_CFG) {
+ $env =~ s,^/etc/urpmi,$env,;
+ }
+}
+
+1;
+
+__END__
+
+=back
+
+=cut
diff --git a/urpm/download.pm b/urpm/download.pm
new file mode 100644
index 00000000..05bb4d52
--- /dev/null
+++ b/urpm/download.pm
@@ -0,0 +1,403 @@
+package urpm::download;
+
+use strict;
+use urpm::msg;
+use urpm::cfg;
+use Cwd;
+
+sub import () {
+ my $c = caller;
+ no strict 'refs';
+ foreach my $symbol (qw(get_proxy set_proxy
+ propagate_sync_callback
+ sync_file sync_wget sync_curl sync_rsync sync_ssh
+ )) {
+ *{$c.'::'.$symbol} = *$symbol;
+ }
+}
+
+sub get_proxy () {
+ my $proxy = {
+ http_proxy => undef ,
+ ftp_proxy => undef ,
+ user => undef,
+ pwd => undef
+ };
+ local $_;
+ open my $f, $urpm::cfg::PROXY_CFG or return undef;
+ while (<$f>) {
+ chomp; s/#.*$//; s/^\s*//; s/\s*$//;
+ /^http_proxy\s*=\s*(.*)$/ and $proxy->{http_proxy} = $1, next;
+ /^ftp_proxy\s*=\s*(.*)$/ and $proxy->{ftp_proxy} = $1, next;
+ /^proxy_user\s*=\s*(.*):(.*)$/ and do {
+ $proxy->{user} = $1;
+ $proxy->{pwd} = $2;
+ next;
+ };
+ next;
+ }
+ close $f;
+ bless $proxy;
+}
+
+sub set_proxy {
+ my ($proxy) = @_;
+ my @res;
+ if (defined $proxy->{proxy}{http_proxy} || defined $proxy->{proxy}{ftp_proxy}) {
+ for ($proxy->{type}) {
+ /\bwget\b/ and do {
+ for ($proxy->{proxy}) {
+ if (defined $_->{http_proxy}) {
+ $ENV{http_proxy} = $_->{http_proxy} =~ /^http:/
+ ? $_->{http_proxy}
+ : "http://$_->{http_proxy}";
+ }
+ $ENV{ftp_proxy} = $_->{ftp_proxy} if defined $_->{ftp_proxy};
+ @res = ("--proxy-user=$_->{user}", "--proxy-passwd=$_->{pwd}")
+ if defined $_->{user} && defined $_->{pwd};
+ }
+ last;
+ };
+ /\bcurl\b/ and do {
+ for ($proxy->{proxy}) {
+ push @res, ('-x', $_->{http_proxy}) if defined $_->{http_proxy};
+ push @res, ('-x', $_->{ftp_proxy}) if defined $_->{ftp_proxy};
+ push @res, ('-U', "$_->{user}:$_->{pwd}")
+ if defined $_->{user} && defined $_->{pwd};
+ }
+ last;
+ };
+ die N("Unknown webfetch `%s' !!!\n", $proxy->{type});
+ }
+ }
+ return @res;
+}
+
+sub propagate_sync_callback {
+ my $options = shift @_;
+ if (ref($options) && $options->{callback}) {
+ my $mode = shift @_;
+ if ($mode =~ /^(?:start|progress|end)$/) {
+ my $file = shift @_;
+ $file =~ s|([^:]*://[^/:\@]*:)[^/:\@]*(\@.*)|$1xxxx$2|; #- if needed...
+ return $options->{callback}($mode, $file, @_);
+ } else {
+ return $options->{callback}($mode, @_);
+ }
+ }
+}
+
+sub sync_file {
+ my $options = shift;
+ foreach (@_) {
+ my ($in) = m!^(?:removable[^:]*|file):/(.*)!;
+ propagate_sync_callback($options, 'start', $_);
+ system("cp", "-p", "-R", $in || $_, ref($options) ? $options->{dir} : $options) and
+ die N("copy failed: %s", $@);
+ propagate_sync_callback($options, 'end', $_);
+ }
+}
+
+sub sync_wget {
+ -x "/usr/bin/wget" or die N("wget is missing\n");
+ my $options = shift @_;
+ $options = { dir => $options } if !ref $options;
+ #- force download to be done in cachedir to avoid polluting cwd.
+ my $cwd = getcwd();
+ chdir $options->{dir};
+ my ($buf, $total, $file) = ('', undef, undef);
+ my $wget_pid = open my $wget, join(" ", map { "'$_'" }
+ #- construction of the wget command-line
+ "/usr/bin/wget",
+ ($options->{limit_rate} ? "--limit-rate=$options->{limit_rate}" : ()),
+ ($options->{resume} ? "--continue" : ()),
+ ($options->{proxy} ? set_proxy({ type => "wget", proxy => $options->{proxy} }) : ()),
+ ($options->{callback} ? ("--progress=bar:force", "-o", "-") :
+ $options->{quiet} ? "-q" : @{[]}),
+ "--retr-symlinks",
+ "-NP",
+ $options->{dir},
+ @_
+ ) . " |";
+ local $/ = \1; #- read input by only one char, this is slow but very nice (and it works!).
+ while (<$wget>) {
+ $buf .= $_;
+ if ($_ eq "\r" || $_ eq "\n") {
+ if ($options->{callback}) {
+ if ($buf =~ /^--\d\d:\d\d:\d\d--\s+(\S.*)\n/ms) {
+ $file && $file ne $1 and propagate_sync_callback($options, 'end', $file);
+ ! defined $file and propagate_sync_callback($options, 'start', $file = $1);
+ } elsif (defined $file && ! defined $total && $buf =~ /==>\s+RETR/) {
+ $total = '';
+ } elsif (defined $total && $total eq '' && $buf =~ /^[^:]*:\s+(\d\S*)/) {
+ $total = $1;
+ } elsif (my ($percent, $speed, $eta) = $buf =~ /^\s*(\d+)%.*\s+(\S+)\s+ETA\s+(\S+)\s*[\r\n]$/ms) {
+ if (propagate_sync_callback($options, 'progress', $file, $percent, $total, $eta, $speed) eq 'canceled') {
+ kill 15, $wget_pid;
+ close $wget;
+ return;
+ }
+ if ($_ eq "\n") {
+ propagate_sync_callback($options, 'end', $file);
+ ($total, $file) = (undef, undef);
+ }
+ }
+ } else {
+ $options->{quiet} or print STDERR $buf;
+ }
+ $buf = '';
+ }
+ }
+ $file and propagate_sync_callback($options, 'end', $file);
+ chdir $cwd;
+ close $wget or die N("wget failed: exited with %d or signal %d\n", $? >> 8, $? & 127);
+}
+
+sub sync_curl {
+ -x "/usr/bin/curl" or die N("curl is missing\n");
+ my $options = shift @_;
+ $options = { dir => $options } if !ref $options;
+ #- force download to be done in cachedir to avoid polluting cwd,
+ #- however for curl, this is mandatory.
+ my $cwd = getcwd();
+ chdir($options->{dir});
+ my (@ftp_files, @other_files);
+ foreach (@_) {
+ m|^ftp://.*/([^/]*)$| && -s $1 > 8192 and do {
+ push @ftp_files, $_; next;
+ }; #- manage time stamp for large file only.
+ push @other_files, $_;
+ }
+ if (@ftp_files) {
+ my ($cur_ftp_file, %ftp_files_info);
+
+ eval { require Date::Manip };
+
+ #- prepare to get back size and time stamp of each file.
+ open my $curl, join(" ", map { "'$_'" } "/usr/bin/curl",
+ ($options->{limit_rate} ? ("--limit-rate", $options->{limit_rate}) : ()),
+ ($options->{proxy} ? set_proxy({ type => "curl", proxy => $options->{proxy} }) : ()),
+ "--stderr", "-", "-s", "-I", @ftp_files) . " |";
+ while (<$curl>) {
+ if (/Content-Length:\s*(\d+)/) {
+ !$cur_ftp_file || exists($ftp_files_info{$cur_ftp_file}{size})
+ and $cur_ftp_file = shift @ftp_files;
+ $ftp_files_info{$cur_ftp_file}{size} = $1;
+ }
+ if (/Last-Modified:\s*(.*)/) {
+ !$cur_ftp_file || exists($ftp_files_info{$cur_ftp_file}{time})
+ and $cur_ftp_file = shift @ftp_files;
+ eval {
+ $ftp_files_info{$cur_ftp_file}{time} = Date::Manip::ParseDate($1);
+ #- remove day and hour.
+ $ftp_files_info{$cur_ftp_file}{time} =~ s/(\d{6}).{4}(.*)/$1$2/;
+ };
+ }
+ }
+ close $curl;
+
+ #- now analyse size and time stamp according to what already exists here.
+ if (@ftp_files) {
+ #- re-insert back shifted element of ftp_files, because curl output above
+ #- has not been parsed correctly, so in doubt download them all.
+ push @ftp_files, keys %ftp_files_info;
+ } else {
+ #- for that, it should be clear ftp_files is empty...
+ #- elsewhere, the above work was useless.
+ foreach (keys %ftp_files_info) {
+ my ($lfile) = m|/([^/]*)$| or next; #- strange if we can't parse it correctly.
+ my $ltime = eval { Date::Manip::ParseDate(scalar gmtime((stat $1)[9])) };
+ $ltime =~ s/(\d{6}).{4}(.*)/$1$2/; #- remove day and hour.
+ -s $lfile == $ftp_files_info{$_}{size} && $ftp_files_info{$_}{time} eq $ltime or
+ push @ftp_files, $_;
+ }
+ }
+ }
+ #- http files (and other files) are correctly managed by curl to conditionnal download.
+ #- options for ftp files, -R (-O <file>)*
+ #- options for http files, -R (-z file -O <file>)*
+ if (my @all_files = (
+ (map { ("-O", $_) } @ftp_files),
+ (map { m|/([^/]*)$| ? ("-z", $1, "-O", $_) : @{[]} } @other_files)))
+ {
+ my @l = (@ftp_files, @other_files);
+ my ($buf, $file) = ('');
+ my $curl_pid = open my $curl, join(" ", map { "'$_'" } "/usr/bin/curl",
+ ($options->{limit_rate} ? ("--limit-rate", $options->{limit_rate}) : ()),
+ ($options->{resume} ? ("--continue-at", "-") : ()),
+ ($options->{proxy} ? set_proxy({ type => "curl", proxy => $options->{proxy} }) : ()),
+ ($options->{quiet} && !$options->{verbose} ? "-s" : @{[]}),
+ "-k",
+ `curl -h` =~ /location-trusted/ ? "--location-trusted" : @{[]},
+ "-R", "-f", "--stderr", "-",
+ @all_files) . " |";
+ local $/ = \1; #- read input by only one char, this is slow but very nice (and it works!).
+ while (<$curl>) {
+ $buf .= $_;
+ if ($_ eq "\r" || $_ eq "\n") {
+ if ($options->{callback}) {
+ unless (defined $file) {
+ $file = shift @l;
+ propagate_sync_callback($options, 'start', $file);
+ }
+ if (my ($percent, $total, $eta, $speed) = $buf =~ /^\s*(\d+)\s+(\S+)[^\r\n]*\s+(\S+)\s+(\S+)[\r\n]$/ms) {
+ if (propagate_sync_callback($options, 'progress', $file, $percent, $total, $eta, $speed) eq 'canceled') {
+ kill 15, $curl_pid;
+ close $curl;
+ return;
+ }
+ if ($_ eq "\n") {
+ propagate_sync_callback($options, 'end', $file);
+ $file = undef;
+ }
+ } elsif ($buf =~ /^curl:/) { #- likely to be an error reported by curl
+ local $/ = "\n";
+ chomp $buf;
+ propagate_sync_callback($options, 'error', $file, $buf);
+ }
+ } else {
+ $options->{quiet} or print STDERR $buf;
+ }
+ $buf = '';
+ }
+ }
+ chdir $cwd;
+ close $curl or die N("curl failed: exited with %d or signal %d\n", $? >> 8, $? & 127);
+ } else {
+ chdir $cwd;
+ }
+}
+
+sub _calc_limit_rate {
+ my $limit_rate = $_[0];
+ for ($limit_rate) {
+ /^(\d+)$/ and $limit_rate = int $1/1024, last;
+ /^(\d+)[kK]$/ and $limit_rate = $1, last;
+ /^(\d+)[mM]$/ and $limit_rate = 1024*$1, last;
+ /^(\d+)[gG]$/ and $limit_rate = 1024*1024*$1, last;
+ }
+ $limit_rate;
+}
+
+sub sync_rsync {
+ -x "/usr/bin/rsync" or die N("rsync is missing\n");
+ my $options = shift @_;
+ $options = { dir => $options } if !ref $options;
+ #- force download to be done in cachedir to avoid polluting cwd.
+ my $cwd = getcwd();
+ chdir($options->{dir});
+ my $limit_rate = _calc_limit_rate $options->{limit_rate};
+ foreach (@_) {
+ my $count = 10; #- retry count on error (if file exists).
+ my $basename = basename($_);
+ my ($file) = m|^rsync://(.*)| or next;
+ $file =~ /::/ or $file = $_;
+ propagate_sync_callback($options, 'start', $file);
+ do {
+ local $_;
+ my $buf = '';
+ open my $rsync, join(" ", map { "'$_'" } "/usr/bin/rsync",
+ ($limit_rate ? "--bwlimit=$limit_rate" : ()),
+ ($options->{quiet} ? qw(-q) : qw(--progress -v)),
+ if_($options->{compress}, qw(-z)),
+ qw(--partial --no-whole-file),
+ $file, $options->{dir}) . " |";
+ local $/ = \1; #- read input by only one char, this is slow but very nice (and it works!).
+ while (<$rsync>) {
+ $buf .= $_;
+ if ($_ eq "\r" || $_ eq "\n") {
+ if ($options->{callback}) {
+ if (my ($percent, $speed) = $buf =~ /^\s*\d+\s+(\d+)%\s+(\S+)\s+/) {
+ propagate_sync_callback($options, 'progress', $file, $percent, undef, undef, $speed);
+ }
+ } else {
+ $options->{quiet} or print STDERR $buf;
+ }
+ $buf = '';
+ }
+ }
+ close $rsync;
+ } while ($? != 0 && --$count > 0 && -e $options->{dir} . "/$basename");
+ propagate_sync_callback($options, 'end', $file);
+ }
+ chdir $cwd;
+ $? == 0 or die N("rsync failed: exited with %d or signal %d\n", $? >> 8, $? & 127);
+}
+
+sub sync_ssh {
+ -x "/usr/bin/rsync" or die N("rsync is missing\n");
+ -x "/usr/bin/ssh" or die N("ssh is missing\n");
+ my $options = shift @_;
+ $options = { dir => $options } if !ref $options;
+ #- force download to be done in cachedir to avoid polluting cwd.
+ my $cwd = getcwd();
+ chdir($options->{dir});
+ my $limit_rate = _calc_limit_rate $options->{limit_rate};
+ foreach my $file (@_) {
+ my $count = 10; #- retry count on error (if file exists).
+ my $basename = basename($file);
+ propagate_sync_callback($options, 'start', $file);
+ do {
+ local $_;
+ my $buf = '';
+ open my $rsync, join(" ", map { "'$_'" } "/usr/bin/rsync",
+ ($limit_rate ? "--bwlimit=$limit_rate" : ()),
+ ($options->{quiet} ? qw(-q) : qw(--progress -v)),
+ if_($options->{compress}, qw(-z)),
+ qw(--partial -e ssh), $file, $options->{dir}) . " |";
+ local $/ = \1; #- read input by only one char, this is slow but very nice (and it works!).
+ while (<$rsync>) {
+ $buf .= $_;
+ if ($_ eq "\r" || $_ eq "\n") {
+ if ($options->{callback}) {
+ if (my ($percent, $speed) = $buf =~ /^\s*\d+\s+(\d+)%\s+(\S+)\s+/) {
+ propagate_sync_callback($options, 'progress', $file, $percent, undef, undef, $speed);
+ }
+ } else {
+ $options->{quiet} or print STDERR $buf;
+ }
+ $buf = '';
+ }
+ }
+ close $rsync;
+ } while ($? != 0 && --$count > 0 && -e $options->{dir} . "/$basename");
+ propagate_sync_callback($options, 'end', $file);
+ }
+ chdir $cwd;
+ $? == 0 or die N("rsync failed: exited with %d or signal %d\n", $? >> 8, $? & 127);
+}
+
+#- default logger suitable for sync operation on STDERR only.
+sub sync_logger {
+ my ($mode, $file, $percent, $total, $eta, $speed) = @_;
+ if ($mode eq 'start') {
+ print STDERR " $file\n";
+ } elsif ($mode eq 'progress') {
+ my $text;
+ if (defined $total && defined $eta) {
+ $text = N(" %s%% of %s completed, ETA = %s, speed = %s", $percent, $total, $eta, $speed);
+ } else {
+ $text = N(" %s%% completed, speed = %s", $percent, $speed);
+ }
+ print STDERR $text, " " x (79 - length($text)), "\r";
+ } elsif ($mode eq 'end') {
+ print STDERR " " x 79, "\r";
+ } elsif ($mode eq 'error') {
+ #- error is 3rd argument, saved in $percent
+ print STDERR N("...retrieving failed: %s", $percent), "\n";
+ }
+}
+
+1;
+
+__END__
+
+=head1 NAME
+
+urpm::download - download routines for the urpm* tools
+
+=head1 SYNOPSIS
+
+=head1 DESCRIPTION
+
+=cut
diff --git a/urpm/msg.pm b/urpm/msg.pm
index bf3272e2..5e367daa 100644
--- a/urpm/msg.pm
+++ b/urpm/msg.pm
@@ -3,7 +3,7 @@ package urpm::msg;
use strict;
use Exporter;
our @ISA = 'Exporter';
-our @EXPORT = qw(N log_it to_utf8 message_input gmessage message);
+our @EXPORT = qw(N log_it to_utf8 message_input gmessage message toMb);
my $noexpr = N("Nn");
my $yesexpr = N("Yy");
@@ -94,6 +94,13 @@ sub message {
}
}
+sub toMb {
+ my $nb = $_[0] / 1024 / 1024;
+ int $nb + 0.5;
+}
+
+sub localtime2changelog { scalar(localtime($_[0])) =~ /(.*) \S+ (\d{4})$/ && "$1 $2" };
+
1;
__END__
diff --git a/urpme b/urpme
index 1d5686d1..5c46339b 100644
--- a/urpme
+++ b/urpme
@@ -71,23 +71,28 @@ $urpm->configure(
);
#- examine packages...
-my @toremove = $urpm->find_packages_to_remove($state, \@l,
- test => $test, matches => $matches, auto => $auto, force => $force,
- callback_notfound => sub {
- my $urpm = shift @_;
- $urpm->{fatal}(1, (@_ > 1 ? N("unknown packages") : N("unknown package")) .
- ': ' . join(', ', @_)); 0 },
- callback_fuzzy => sub {
- my $urpm = shift @_;
- my $match = shift @_;
- $urpm->{fatal}(1, N("The following packages contain %s: %s",
- $match, join(' ', @_))); 0 },
- callback_base => sub {
- my $urpm = shift @_;
- foreach (@_) {
- $urpm->{error}(N("removing package %s will break your system", $_));
- } 0 },
- ) or $urpm->{fatal}(0, N("Nothing to remove"));
+my @toremove = $urpm->find_packages_to_remove(
+ $state,
+ \@l,
+ test => $test,
+ matches => $matches,
+ auto => $auto,
+ force => $force,
+ callback_notfound => sub {
+ my $urpm = shift @_;
+ $urpm->{fatal}(1, (@_ > 1 ? N("unknown packages") : N("unknown package")) .
+ ': ' . join(', ', @_)); 0 },
+ callback_fuzzy => sub {
+ my $urpm = shift @_;
+ my $match = shift @_;
+ $urpm->{fatal}(1, N("The following packages contain %s: %s",
+ $match, join(' ', @_))); 0 },
+ callback_base => sub {
+ my $urpm = shift @_;
+ foreach (@_) {
+ $urpm->{error}(N("removing package %s will break your system", $_));
+ } 0 },
+) or $urpm->{fatal}(0, N("Nothing to remove"));
my $list = join "\n", $urpm->translate_why_removed($state, sort @toremove);
if ($test && $auto) {
@@ -108,8 +113,3 @@ print STDOUT "\n".N("removing %s", join(' ', sort @toremove))."\n";
$urpm->parallel_remove(\@toremove, test => $test, force => $force, translate_message => 1) :
$urpm->install(\@toremove, {}, {}, test => $test, force => $force, translate_message => 1);
@l and $urpm->{fatal}(2, N("Removing failed") . ":\n" . join("\n", map { "\t$_" } @l));
-
-sub toMb {
- my $nb = $_[0] / 1024 / 1024;
- int $nb + 0.5;
-}
diff --git a/urpmi b/urpmi
index d6c77de8..66d312da 100755
--- a/urpmi
+++ b/urpmi
@@ -796,13 +796,9 @@ if ($pid_err && $pid_out) {
close STDOUT;
}
+use POSIX ();
POSIX::_exit $exit_code;
-sub toMb {
- my $nb = $_[0] / 1024 / 1024;
- int $nb + 0.5;
-}
-
sub untaint {
my @r;
foreach (@_) {
diff --git a/urpmi.addmedia b/urpmi.addmedia
index 8157db01..1dde2551 100755
--- a/urpmi.addmedia
+++ b/urpmi.addmedia
@@ -24,6 +24,7 @@ use strict;
use urpm;
use urpm::args 'options';
use urpm::msg;
+use urpm::download ();
# Default mirror list
our $mirrors = 'http://www.linux-mandrake.com/mirrorsfull.list';
@@ -82,7 +83,7 @@ sub main {
#- parse /etc/urpmi/mirror.config if present, or use default mandrake mirror.
# the --from option overrides this setting.
if ($options{mirrors_url}) {
- $mirrors = $options{$mirrors_url};
+ $mirrors = $options{mirrors_url};
}
elsif (-e "/etc/urpmi/mirror.config") {
local $_;
@@ -172,7 +173,7 @@ sub main {
$urpm->add_distrib_media($name, $url, virtual => $options{virtual}, update => $options{update});
}
- $urpm->update_media(%options, callback => \&urpm::sync_logger);
+ $urpm->update_media(%options, callback => \&urpm::download::sync_logger);
if (my @unsynced_media = grep { $_->{modified} } @{$urpm->{media}}) {
print STDERR join("\n", map { N("unable to update medium \"%s\"\n", $_->{name}) } @unsynced_media);
@@ -180,7 +181,7 @@ sub main {
#- remove quietly the failing media.
$urpm->{log} = sub {};
$urpm->remove_selected_media;
- $urpm->update_media(%options, callback => \&urpm::sync_logger);
+ $urpm->update_media(%options, callback => \&urpm::download::sync_logger);
}
} else {
$name or usage;
@@ -192,7 +193,7 @@ sub main {
}
$urpm->add_medium($name, $url, $relative_hdlist, virtual => $options{virtual}, update => $options{update});
- $urpm->update_media(%options, callback => \&urpm::sync_logger);
+ $urpm->update_media(%options, callback => \&urpm::download::sync_logger);
#- check creation of media (during update has been successfull)
my ($medium) = grep { $_->{name} eq $name } @{$urpm->{media}};
@@ -202,7 +203,7 @@ sub main {
#- remove quietly the failing media.
$urpm->{log} = sub {};
$urpm->remove_selected_media;
- $urpm->update_media(%options, callback => \&urpm::sync_logger);
+ $urpm->update_media(%options, callback => \&urpm::download::sync_logger);
}
}
diff --git a/urpmi.update b/urpmi.update
index 5a5abc7f..9e7d2515 100755
--- a/urpmi.update
+++ b/urpmi.update
@@ -22,6 +22,7 @@ use strict;
use urpm;
use urpm::args 'options';
use urpm::msg;
+use urpm::download ();
sub usage {
warn N("usage: urpmi.update [options] <name> ...
@@ -74,7 +75,7 @@ sub main {
$something_todo or die N("the entry to update is missing\n(one of %s)\n", join(", ", @entries));
}
- $urpm->update_media(%options, callback => \&urpm::sync_logger);
+ $urpm->update_media(%options, callback => \&urpm::download::sync_logger);
#- try to umount removable device which may have been mounted.
$urpm->try_umounting_removables;
diff --git a/urpmq b/urpmq
index b22bf447..aaa8f178 100755
--- a/urpmq
+++ b/urpmq
@@ -367,7 +367,9 @@ if ($query->{list_aliases}) {
$pkg->files and print join("\n", $pkg->files)."\n";
}
if ($query->{changelog} && $pkg->changelog_time && $pkg->changelog_name && $pkg->changelog_text) {
- print join("\n", mapn { "* ".urpm::localtime2changelog($_[0])." $_[1]\n\n$_[2]\n" } [ $pkg->changelog_time ], [ $pkg->changelog_name ] , [ $pkg->changelog_text ]);
+ print join("\n", mapn {
+ "* ".urpm::msg::localtime2changelog($_[0])." $_[1]\n\n$_[2]\n"
+ } [ $pkg->changelog_time ], [ $pkg->changelog_name ] , [ $pkg->changelog_text ]);
}
}
}