1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
|
package install::http; # $Id$
use urpm::download;
use common;
# to be killed once callers got fixed
sub close() {
}
sub getFile {
my ($url, %o_options) = @_;
my ($_size, $fh) = get_file_and_size($url, %o_options) or return;
$fh;
}
sub parse_http_url {
my ($url) = @_;
$url =~ m,^(?:http|ftp)://(?:[^:/]+:[^:/]+\@)?([^/:@]+)(?::(\d+))?(/\S*)?$,;
}
sub get_file_and_size_ {
my ($f, $url) = @_;
if ($f =~ m!^/!) {
my ($host, $port, $_path) = parse_http_url($url);
get_file_and_size("http://$host" . ($port ? ":$port" : '') . $f);
} else {
get_file_and_size("$url/$f");
}
}
sub get_file_and_size {
my ($url, %o_options) = @_;
# can be used for ftp urls (with http proxy)
my ($host) = parse_http_url($url);
defined $host or return undef;
my $urpm = $::o->{packages};
if (!$urpm) {
require install::pkgs;
$urpm = install::pkgs::empty_packages($::o->{keep_unrequested_dependencies});
}
my $cachedir = $urpm->{cachedir} || '/root';
my $file = $url;
$file =~ s!.*/!$cachedir/!;
unlink $file; # prevent "partial file" errors
if ($ENV{PROXY}) {
my ($proxy, $port) = urpm::download::parse_http_proxy(join(':', $ENV{PROXY}, $ENV{PROXYPORT}))
or die "bad proxy declaration\n";
$proxy .= ":1080" unless $port;
urpm::download::set_cmdline_proxy(http_proxy => "http://$proxy/");
}
(my $cwd) = getcwd() =~ /(.*)/;
my $res = eval { urpm::download::sync_url($urpm, $url, %o_options, dir => $cachedir) };
#- urpmi does not always reset cwd when dying or failing!
#- do not stay in /mnt/var/cache/urpmi, it's evil to be in /mnt
#- FIXME: fix urpmi
chdir $cwd;
if ($res) {
open(my $f, $file);
(-s $file, $f);
} else {
log::l("retrieval of [$file] failed");
undef;
}
}
1;
|