diff options
author | Thierry Vignaud <tv@mageia.org> | 2012-03-27 17:15:08 +0000 |
---|---|---|
committer | Thierry Vignaud <tv@mageia.org> | 2012-03-27 17:15:08 +0000 |
commit | ada1534f6420d00de86437e57c921133c5c61ceb (patch) | |
tree | 7db618a2d228e19ea651ae1078529344bec937e8 | |
parent | 412e8c970890f3412d8b67e1d42e96f786aeb211 (diff) | |
download | urpmi-ada1534f6420d00de86437e57c921133c5c61ceb.tar urpmi-ada1534f6420d00de86437e57c921133c5c61ceb.tar.gz urpmi-ada1534f6420d00de86437e57c921133c5c61ceb.tar.bz2 urpmi-ada1534f6420d00de86437e57c921133c5c61ceb.tar.xz urpmi-ada1534f6420d00de86437e57c921133c5c61ceb.zip |
silent perl_checker
-rw-r--r-- | urpm.pm | 2 | ||||
-rw-r--r-- | urpm/cfg.pm | 4 | ||||
-rw-r--r-- | urpm/download.pm | 54 | ||||
-rw-r--r-- | urpm/md5sum.pm | 2 | ||||
-rw-r--r-- | urpm/mirrors.pm | 2 | ||||
-rwxr-xr-x | urpmf | 2 |
6 files changed, 33 insertions, 33 deletions
@@ -182,7 +182,7 @@ sub set_files { statedir => "$urpmi_root/var/lib/urpmi", cachedir => "$urpmi_root/var/cache/urpmi", root => $urpmi_root, - $urpmi_root ? (urpmi_root => $urpmi_root) : (), + $urpmi_root ? (urpmi_root => $urpmi_root) : @{[]}, ); $urpm->{$_} = $h{$_} foreach keys %h; diff --git a/urpm/cfg.pm b/urpm/cfg.pm index 9d82c06f..5e002b0d 100644 --- a/urpm/cfg.pm +++ b/urpm/cfg.pm @@ -141,7 +141,7 @@ sub load_config_raw { $err = N("medium `%s' is defined twice, aborting", $name); return; } - $block = { name => $name, $url ? (url => $url) : () }; + $block = { name => $name, $url ? (url => $url) : @{[]} }; } elsif (/^(hdlist |list |with_hdlist @@ -219,7 +219,7 @@ sub dump_config { my %global = (name => '', %{$config->{global}}); - dump_config_raw($file, [ %global ? \%global : (), @{$config->{media}} ]); + dump_config_raw($file, [ %global ? \%global : @{[]}, @{$config->{media}} ]); } sub dump_config_raw { diff --git a/urpm/download.pm b/urpm/download.pm index f4718036..18c63f42 100644 --- a/urpm/download.pm +++ b/urpm/download.pm @@ -324,16 +324,16 @@ sub sync_wget { my $wget_command = join(" ", map { "'$_'" } #- construction of the wget command-line "/usr/bin/wget", - ($options->{'limit-rate'} ? "--limit-rate=$options->{'limit-rate'}" : ()), + ($options->{'limit-rate'} ? "--limit-rate=$options->{'limit-rate'}" : @{[]}), ($options->{resume} ? "--continue" : "--force-clobber"), - ($options->{proxy} ? set_proxy({ type => "wget", proxy => $options->{proxy} }) : ()), - ($options->{retry} ? ('-t', $options->{retry}) : ()), + ($options->{proxy} ? set_proxy({ type => "wget", proxy => $options->{proxy} }) : @{[]}), + ($options->{retry} ? ('-t', $options->{retry}) : @{[]}), ($options->{callback} ? ("--progress=bar:force", "-o", "-") : $options->{quiet} ? "-q" : @{[]}), "--retr-symlinks", - ($options->{"no-certificate-check"} ? "--no-check-certificate" : ()), + ($options->{"no-certificate-check"} ? "--no-check-certificate" : @{[]}), "--timeout=$CONNECT_TIMEOUT", - (defined $options->{'wget-options'} ? split /\s+/, $options->{'wget-options'} : ()), + (defined $options->{'wget-options'} ? split /\s+/, $options->{'wget-options'} : @{[]}), '-P', $options->{dir}, @_ ) . " |"; @@ -414,15 +414,15 @@ sub sync_curl { #- prepare to get back size and time stamp of each file. my $cmd = join(" ", map { "'$_'" } "/usr/bin/curl", "-q", # don't read .curlrc; some toggle options might interfer - ($options->{'limit-rate'} ? ("--limit-rate", $options->{'limit-rate'}) : ()), - ($options->{proxy} ? set_proxy({ type => "curl", proxy => $options->{proxy} }) : ()), - ($options->{retry} ? ('--retry', $options->{retry}) : ()), + ($options->{'limit-rate'} ? ("--limit-rate", $options->{'limit-rate'}) : @{[]}), + ($options->{proxy} ? set_proxy({ type => "curl", proxy => $options->{proxy} }) : @{[]}), + ($options->{retry} ? ('--retry', $options->{retry}) : @{[]}), "--stderr", "-", # redirect everything to stdout "--disable-epsv", "--connect-timeout", $CONNECT_TIMEOUT, "-s", "-I", "--anyauth", - (defined $options->{'curl-options'} ? split /\s+/, $options->{'curl-options'} : ()), + (defined $options->{'curl-options'} ? split /\s+/, $options->{'curl-options'} : @{[]}), @ftp_files); $options->{debug} and $options->{debug}($cmd); open my $curl, "$cmd |"; @@ -474,19 +474,19 @@ sub sync_curl { my @l = (@ftp_files, @other_files); my $cmd = join(" ", map { "'$_'" } "/usr/bin/curl", "-q", # don't read .curlrc; some toggle options might interfer - ($options->{'limit-rate'} ? ("--limit-rate", $options->{'limit-rate'}) : ()), - ($options->{resume} ? ("--continue-at", "-") : ()), - ($options->{proxy} ? set_proxy({ type => "curl", proxy => $options->{proxy} }) : ()), - ($options->{retry} ? ('--retry', $options->{retry}) : ()), + ($options->{'limit-rate'} ? ("--limit-rate", $options->{'limit-rate'}) : @{[]}), + ($options->{resume} ? ("--continue-at", "-") : @{[]}), + ($options->{proxy} ? set_proxy({ type => "curl", proxy => $options->{proxy} }) : @{[]}), + ($options->{retry} ? ('--retry', $options->{retry}) : @{[]}), ($options->{quiet} ? "-s" : @{[]}), - ($options->{"no-certificate-check"} ? "-k" : ()), + ($options->{"no-certificate-check"} ? "-k" : @{[]}), $location_trusted ? "--location-trusted" : @{[]}, "-R", "-f", "--disable-epsv", "--connect-timeout", $CONNECT_TIMEOUT, "--anyauth", - (defined $options->{'curl-options'} ? split /\s+/, $options->{'curl-options'} : ()), + (defined $options->{'curl-options'} ? split /\s+/, $options->{'curl-options'} : @{[]}), "--stderr", "-", # redirect everything to stdout @all_files); $options->{debug} and $options->{debug}($cmd); @@ -576,7 +576,7 @@ sub sync_rsync { ("--timeout=$CONNECT_TIMEOUT", "--contimeout=$CONNECT_TIMEOUT")), qw(--partial --no-whole-file --no-motd --copy-links), - (defined $options->{'rsync-options'} ? split /\s+/, $options->{'rsync-options'} : ()), + (defined $options->{'rsync-options'} ? split /\s+/, $options->{'rsync-options'} : @{[]}), "'$file' '$options->{dir}' 2>&1"); $options->{debug} and $options->{debug}($cmd); open(my $rsync, "$cmd |"); @@ -649,7 +649,7 @@ sub sync_prozilla { my $proz_command = join(" ", map { "'$_'" } "/usr/bin/proz", "--no-curses", - (defined $options->{'prozilla-options'} ? split /\s+/, $options->{'prozilla-options'} : ()), + (defined $options->{'prozilla-options'} ? split /\s+/, $options->{'prozilla-options'} : @{[]}), @_ ); my $ret = system($proz_command); @@ -675,7 +675,7 @@ sub sync_aria2 { my $stat_file = ($< ? $ENV{HOME} : '/root') . '/.aria2-adaptive-stats'; my $aria2c_command = join(" ", map { "'$_'" } - "/usr/bin/aria2c", $options->{debug} ? ('--log', "$options->{dir}/.aria2.log") : (), + "/usr/bin/aria2c", $options->{debug} ? ('--log', "$options->{dir}/.aria2.log") : @{[]}, "--auto-file-renaming=false", '--ftp-pasv', "--follow-metalink=mem", @@ -685,15 +685,15 @@ sub sync_aria2 { '--lowest-speed-limit=20K', "--timeout", 3, '--metalink-servers=3', # maximum number of servers to use for one download '--uri-selector=adaptive', "--server-stat-if=$stat_file", "--server-stat-of=$stat_file", - $options->{is_versioned} ? () : '--max-file-not-found=3', # number of not found errors on different servers before aborting file download + $options->{is_versioned} ? @{[]} : '--max-file-not-found=3', # number of not found errors on different servers before aborting file download '--connect-timeout=6', # $CONNECT_TIMEOUT, - ) : (), + ) : @{[]}, "-Z", "-j1", - ($options->{'limit-rate'} ? "--max-download-limit=" . $options->{'limit-rate'} : ()), + ($options->{'limit-rate'} ? "--max-download-limit=" . $options->{'limit-rate'} : @{[]}), ($options->{resume} ? "--continue" : "--allow-overwrite=true"), - ($options->{proxy} ? set_proxy({ type => "aria2", proxy => $options->{proxy} }) : ()), - ($options->{"no-certificate-check"} ? "--check-certificate=false" : ()), - (defined $options->{'aria2-options'} ? split /\s+/, $options->{'aria2-options'} : ()), + ($options->{proxy} ? set_proxy({ type => "aria2", proxy => $options->{proxy} }) : @{[]}), + ($options->{"no-certificate-check"} ? "--check-certificate=false" : @{[]}), + (defined $options->{'aria2-options'} ? split /\s+/, $options->{'aria2-options'} : @{[]}), _create_metalink_($urpm, $medium, $rel_files, $options)); $options->{debug} and $options->{debug}($aria2c_command); @@ -864,8 +864,8 @@ sub _all_options { dir => "$urpm->{cachedir}/partial", proxy => get_proxy_($urpm, $medium), metalink => $medium->{mirrorlist}, - $medium->{"disable-certificate-check"} ? "no-certificate-check" : (), - $urpm->{debug} ? (debug => $urpm->{debug}) : (), + $medium->{"disable-certificate-check"} ? "no-certificate-check" : @{[]}, + $urpm->{debug} ? (debug => $urpm->{debug}) : @{[]}, %$options, ); foreach my $cpt (qw(compress limit-rate retry wget-options curl-options rsync-options prozilla-options aria2-options metalink)) { @@ -977,7 +977,7 @@ sub _sync_webfetch_raw { } } } elsif ($proto eq 'ssh') { - my @ssh_files = map { m!^ssh://([^/]*)(.*)! ? "$1:$2" : () } @$files; + my @ssh_files = map { m!^ssh://([^/]*)(.*)! ? "$1:$2" : @{[]} } @$files; sync_ssh($options, @ssh_files); } else { die N("unable to handle protocol: %s", $proto); diff --git a/urpm/md5sum.pm b/urpm/md5sum.pm index ed8eb6e7..906066fe 100644 --- a/urpm/md5sum.pm +++ b/urpm/md5sum.pm @@ -31,7 +31,7 @@ sub versioned_media_info_file { my ($urpm, $medium, $basename) = @_; my $md5sums = $medium->{parsed_md5sum} or $urpm->{log}("$medium->{name} has no md5sum"), return; - my @l = map { $md5sums->{$_} eq $md5sums->{$basename} && /^(\d{8}-\d{6})-\Q$basename\E$/ ? $1 : () } keys %$md5sums; + my @l = map { $md5sums->{$_} eq $md5sums->{$basename} && /^(\d{8}-\d{6})-\Q$basename\E$/ ? $1 : @{[]} } keys %$md5sums; if (@l == 0) { $urpm->{debug}("no versioned $basename for medium $medium->{name}") if $urpm->{debug}; diff --git a/urpm/mirrors.pm b/urpm/mirrors.pm index 25b4e04d..a8b6de69 100644 --- a/urpm/mirrors.pm +++ b/urpm/mirrors.pm @@ -326,7 +326,7 @@ sub _product_id_mtime() { (stat('/etc/product.id'))[9] } sub parse_LDAP_namespace_structure { my ($s) = @_; - my %h = map { /(.*?)=(.*)/ ? ($1 => $2) : () } split(',', $s); + my %h = map { /(.*?)=(.*)/ ? ($1 => $2) : @{[]} } split(',', $s); \%h; } @@ -260,7 +260,7 @@ my %needed_media_info = map { $_ => 1 } grep { int(grep { $usedtags{$_} } @$l); } keys %tags_per_media_info; -my @needed_xml_info = map { s/xml_info__// ? $_ : () } keys %needed_media_info; +my @needed_xml_info = map { s/xml_info__// ? $_ : @{[]} } keys %needed_media_info; if (@needed_xml_info > 1) { # we don't handle parallel parsing of xml files, default to hdlist $needed_media_info{hdlist} = 1; |