package urpm::media; # $Id$ use urpm 'file_from_local_url'; use urpm::msg; use urpm::util; use urpm::removable; use urpm::lock; use MDV::Distribconf; our @PER_MEDIA_OPT = qw( downloader hdlist ignore key-ids list media_info_dir name noreconfigure priority-upgrade removable static synthesis update url verify-rpm virtual with_hdlist ); sub only_media_opts { my ($m) = @_; my %m = map { $_ => $m->{$_} } grep { defined $m->{$_} } @PER_MEDIA_OPT; \%m; } sub read_private_netrc { my ($urpm) = @_; my @words = split(/\s+/, scalar cat_($urpm->{private_netrc})); my @l; my $e; while (@words) { my $keyword = shift @words; if ($keyword eq 'machine') { push @l, $e = { machine => shift(@words) }; } elsif ($keyword eq 'default') { push @l, $e = { default => '' }; } elsif ($keyword eq 'login' || $keyword eq 'password' || $keyword eq 'account') { $e->{$keyword} = shift(@words); } else { $urpm->{error}("unknown netrc command $keyword"); } } @l; } sub read_config_add_passwords { my ($urpm, $config) = @_; my @netrc = read_private_netrc($urpm) or return; foreach (@{$config->{media}}) { my $u = urpm::download::parse_url_with_login($_->{url}) or next; if (my ($e) = grep { ($_->{default} || $_->{machine} eq $u->{machine}) && $_->{login} eq $u->{login} } @netrc) { $_->{url} = sprintf('%s://%s:%s@%s%s', $u->{proto}, $u->{login}, $e->{password}, $u->{machine}, $u->{dir}); } else { $urpm->{log}(sprintf('no password found for %s@%s', $u->{login}, $u->{machine})); } } } sub remove_passwords_and_write_private_netrc { my ($urpm, $config) = @_; my @l; foreach (@{$config->{media}}) { my $u = urpm::download::parse_url_with_login($_->{url}) or next; #- check whether a password is visible $u->{password} or next; push @l, $u; $_->{url} = sprintf('%s://%s@%s%s', $u->{proto}, $u->{login}, $u->{machine}, $u->{dir}); } { my $fh = urpm::sys::open_safe($urpm, '>', $urpm->{private_netrc}) or return; foreach my $u (@l) { printf $fh "machine %s login %s password %s\n", $u->{machine}, $u->{login}, $u->{password}; } } chmod 0600, $urpm->{private_netrc}; } #- handle deprecated way of saving passwords sub recover_url_from_list { my ($urpm, $medium) = @_; #- /./ is end of url marker in list file (typically generated by a #- find . -name "*.rpm" > list #- for exportable list file. if (my @probe = map { m!^(.*)/\./! || m!^(.*)/[^/]*$! } cat_(statedir_list($urpm, $medium))) { $urpm->{log}("recovering url from " . statedir_list($urpm, $medium)); ($medium->{url}) = sort { length($a) <=> length($b) } @probe; $urpm->{modified} = 1; #- ensure urpmi.cfg is handled using only partially hidden url + netrc, since file list won't be generated anymore } } #- Loads /etc/urpmi/urpmi.cfg and performs basic checks. #- Does not handle old format: [with ] #- options : #- - nocheck_access : don't check presence of hdlist and other files sub read_config { my ($urpm, $b_nocheck_access, $b_auto_correct) = @_; return if $urpm->{media}; #- media already loaded $urpm->{media} = []; my $config = urpm::cfg::load_config($urpm->{config}) or $urpm->{fatal}(6, $urpm::cfg::err); #- global options if (my $global = $config->{global}) { foreach my $opt (keys %$global) { if (defined $global->{$opt} && !exists $urpm->{options}{$opt}) { $urpm->{options}{$opt} = $global->{$opt}; } } } #- per-media options read_config_add_passwords($urpm, $config); foreach my $m (@{$config->{media}}) { my $medium = only_media_opts($m); if (!$medium->{url}) { #- recover the url the old deprecated way... #- only useful for migration, new urpmi.cfg will use netrc recover_url_from_list($urpm, $medium); $medium->{url} or $urpm->{error}("unable to find url in list file $medium->{name}, medium ignored"); } add_existing_medium($urpm, $medium, $b_nocheck_access, $b_auto_correct); } eval { require urpm::ldap; urpm::ldap::load_ldap_media($urpm) }; #- read MD5 sums (not in urpmi.cfg but in a separate file) foreach (@{$urpm->{media}}) { if (my $md5sum = urpm::md5sum::from_MD5SUM("$urpm->{statedir}/MD5SUM", statedir_hdlist_or_synthesis($urpm, $_, 's'))) { $_->{md5sum} = $md5sum; } } #- remember global options for write_config $urpm->{global_config} = $config->{global}; } #- if invalid, set {ignore} sub check_existing_medium { my ($urpm, $medium, $b_nocheck_access, $b_auto_correct) = @_; if ($medium->{virtual}) { #- a virtual medium needs to have an url available without using a list file. if ($medium->{list}) { $medium->{ignore} = 1; $urpm->{error}(N("virtual medium \"%s\" should not have defined hdlist or list file, medium ignored", $medium->{name})); } elsif (!$medium->{url}) { $medium->{ignore} = 1; $urpm->{error}(N("virtual medium \"%s\" should have a clear url, medium ignored", $medium->{name})); } } else { if ($medium->{hdlist}) { #- is this check really needed? keeping just in case $medium->{hdlist} ne 'list' && $medium->{hdlist} ne 'pubkey' or $medium->{ignore} = 1, $urpm->{error}(N("invalid hdlist name")); } if (!$medium->{ignore} && !$medium->{list}) { unless (defined $medium->{url}) { $medium->{list} = "list.$medium->{name}"; unless (-e statedir_list($urpm, $medium)) { $medium->{ignore} = 1, $urpm->{error}(N("unable to find list file for \"%s\", medium ignored", $medium->{name})); } } } } #- check the presence of hdlist and list files if necessary. if (!$b_nocheck_access && !$medium->{ignore}) { if ($medium->{virtual} && -r hdlist_or_synthesis_for_virtual_medium($medium, 's')) {} elsif (-r statedir_hdlist_or_synthesis($urpm, $medium, 's')) {} elsif (-r statedir_hdlist($urpm, $medium)) { $b_auto_correct and delete $urpm->{synthesis}; $urpm->{error}(N("\"synthesis\" should not be set (medium \"%s\")", $medium->{name})); } elsif (-r statedir_synthesis($urpm, $medium)) { $medium->{synthesis} = 1; $urpm->{error}(N("\"synthesis\" should be set (medium \"%s\")", $medium->{name})); } else { $medium->{ignore} = 1; $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name})); } if ($medium->{list} && -r statedir_list($urpm, $medium)) {} elsif ($medium->{url}) { if ($medium->{list}) { $b_auto_correct and delete $medium->{list}; #- remove buggy list $urpm->{error}(N("unable to access list file of \"%s\"", $medium->{name})); } } else { $medium->{ignore} = 1; $urpm->{error}(N("unable to access list file of \"%s\", medium ignored", $medium->{name})); } } foreach my $field ('hdlist', 'list') { $medium->{$field} && $medium->{$field} ne '1' or next; if (grep { $_->{$field} eq $medium->{$field} } @{$urpm->{media}}) { $medium->{ignore} = 1; $urpm->{error}( $field eq 'hdlist' ? N("medium \"%s\" trying to use an already used hdlist, medium ignored", $medium->{name}) : N("medium \"%s\" trying to use an already used list, medium ignored", $medium->{name})); } } } sub _migrate__with_hdlist { my ($medium) = @_; grep { $_ eq '..' } split('/', $medium->{with_hdlist}) and return; #- try to migrate to media_info_dir my $b = basename($medium->{with_hdlist}); if ($b eq ($medium->{synthesis} ? 'synthesis.hdlist.cz' : 'hdlist.cz')) { $medium->{media_info_dir} = dirname(delete $medium->{with_hdlist}); 1; } else { 0; } } #- probe medium to be used, take old medium into account too. sub add_existing_medium { my ($urpm, $medium, $b_nocheck_access, $b_auto_correct) = @_; if (name2medium($urpm, $medium->{name})) { $urpm->{error}(N("trying to override existing medium \"%s\", skipping", $medium->{name})); return; } if ($medium->{with_hdlist} && _migrate__with_hdlist($medium)) { $urpm->{modified} = 1; } check_existing_medium($urpm, $medium, $b_nocheck_access, $b_auto_correct); #- probe removable device. probe_removable_device($urpm, $medium); #- clear URLs for trailing /es. $medium->{url} and $medium->{url} =~ s|(.*?)/*$|$1|; push @{$urpm->{media}}, $medium; } sub _set_synthesis_or_hdlist { my ($medium, $want_synthesis) = @_; $medium->{$want_synthesis ? 'synthesis' : 'hdlist'} = 1; } sub file_from_file_url { my ($url) = @_; $url =~ m!^(?:file:/)?(/.*)! && $1; } sub _synthesis_or_not { my ($medium, $prefer_synthesis) = @_; $medium->{synthesis} || !$medium->{hdlist} && $prefer_synthesis; } sub _url_with_hdlist_basename { my ($medium, $prefer_synthesis) = @_; $medium->{with_hdlist} ? basename($medium->{with_hdlist}) : _synthesis_or_not($medium, $prefer_synthesis) ? 'synthesis.hdlist.cz' : 'hdlist.cz'; } sub _hdlist_dir { my ($medium) = @_; my $base = file_from_local_url($medium->{url}) || $medium->{url}; $medium->{with_hdlist} ? reduce_pathname("$base/$medium->{with_hdlist}/..") : $medium->{media_info_dir} && reduce_pathname("$base/$medium->{media_info_dir}"); } sub _url_with_hdlist { my ($medium, $prefer_synthesis) = @_; my $base = file_from_local_url($medium->{url}) || $medium->{url}; $medium->{with_hdlist} ? reduce_pathname("$base/$medium->{with_hdlist}") : _hdlist_dir($medium) . "/" . _url_with_hdlist_basename($medium, $prefer_synthesis); } sub hdlist_or_synthesis_for_virtual_medium { my ($medium, $prefer_synthesis) = @_; file_from_file_url($medium->{url}) && _url_with_hdlist($medium, $prefer_synthesis); } sub _hdlist { my ($medium) = @_; $medium->{hdlist} && $medium->{hdlist} ne '1' ? $medium->{hdlist} : $medium->{name} && "hdlist.$medium->{name}.cz"; } sub statedir_hdlist_or_synthesis { my ($urpm, $medium, $prefer_synthesis) = @_; "$urpm->{statedir}/" . (_synthesis_or_not($medium, $prefer_synthesis) ? 'synthesis.' : '') . _hdlist($medium); } sub statedir_hdlist { my ($urpm, $medium) = @_; "$urpm->{statedir}/" . _hdlist($medium); } sub statedir_synthesis { my ($urpm, $medium) = @_; "$urpm->{statedir}/synthesis." . _hdlist($medium); } sub statedir_list { my ($urpm, $medium) = @_; $medium->{list} && "$urpm->{statedir}/$medium->{list}"; } sub statedir_descriptions { my ($urpm, $medium) = @_; $medium->{name} && "$urpm->{statedir}/descriptions.$medium->{name}"; } sub statedir_names { my ($urpm, $medium) = @_; $medium->{name} && "$urpm->{statedir}/names.$medium->{name}"; } sub cachedir_with_hdlist { my ($urpm, $medium, $prefer_synthesis) = @_; _url_with_hdlist($medium, $prefer_synthesis) && "$urpm->{cachedir}/partial/" . _url_with_hdlist_basename($medium, $prefer_synthesis); } sub cachedir_list { my ($urpm, $medium) = @_; $medium->{list} && "$urpm->{cachedir}/partial/$medium->{list}"; } sub any_hdlist { my ($urpm, $medium) = @_; my $f = statedir_hdlist($urpm, $medium); if ($medium->{virtual} && !$medium->{synthesis} || !-e $f && file_from_local_url($medium->{url}) && !$medium->{synthesis} && !$medium->{hdlist}) { $f = _url_with_hdlist($medium, ''); } -e $f && $f; } sub any_synthesis { my ($urpm, $medium) = @_; my $f = $medium->{virtual} && !$medium->{hdlist} ? _url_with_hdlist($medium, 's') : statedir_synthesis($urpm, $medium); -e $f && $f; } sub name2medium { my ($urpm, $name) = @_; my ($medium) = grep { $_->{name} eq $name } @{$urpm->{media}}; $medium; } #- probe device associated with a removable device. sub probe_removable_device { my ($urpm, $medium) = @_; if ($medium->{url} && $medium->{url} =~ /^removable/) { #- try to find device name in url scheme, this is deprecated, use medium option "removable" instead if ($medium->{url} =~ /^removable_?([^_:]*)/) { $medium->{removable} ||= $1 && "/dev/$1"; } } else { delete $medium->{removable}; return; } #- try to find device to open/close for removable medium. if (my $dir = file_from_local_url($medium->{url})) { my %infos; my @mntpoints = urpm::sys::find_mntpoints($dir, \%infos); if (@mntpoints > 1) { #- return value is suitable for an hash. $urpm->{log}(N("too many mount points for removable medium \"%s\"", $medium->{name})); $urpm->{log}(N("taking removable device as \"%s\"", join ',', map { $infos{$_}{device} } @mntpoints)); } if (urpm::removable::is_iso($medium->{removable})) { $urpm->{log}(N("Medium \"%s\" is an ISO image, will be mounted on-the-fly", $medium->{name})); } elsif (@mntpoints) { if ($medium->{removable} && $medium->{removable} ne $infos{$mntpoints[-1]}{device}) { $urpm->{log}(N("using different removable device [%s] for \"%s\"", $infos{$mntpoints[-1]}{device}, $medium->{name})); } $medium->{removable} = $infos{$mntpoints[-1]}{device}; } else { $urpm->{error}(N("unable to retrieve pathname for removable medium \"%s\"", $medium->{name})); } } else { $urpm->{error}(N("unable to retrieve pathname for removable medium \"%s\"", $medium->{name})); } } sub write_MD5SUM { my ($urpm) = @_; #- write MD5SUM file my $fh = urpm::sys::open_safe($urpm, '>', "$urpm->{statedir}/MD5SUM") or return 0; foreach my $medium (grep { $_->{md5sum} } @{$urpm->{media}}) { my $s = basename(statedir_hdlist_or_synthesis($urpm, $medium, 's')); print $fh "$medium->{md5sum} $s\n"; } $urpm->{log}(N("wrote %s", "$urpm->{statedir}/MD5SUM")); delete $urpm->{md5sum_modified}; } #- Writes the urpmi.cfg file. sub write_urpmi_cfg { my ($urpm) = @_; #- avoid trashing exiting configuration if it wasn't loaded $urpm->{media} or return; my $config = { #- global config options found in the config file, without the ones #- set from the command-line global => $urpm->{global_config}, media => [ map { only_media_opts($_) } grep { !$_->{external} } @{$urpm->{media}} ], }; remove_passwords_and_write_private_netrc($urpm, $config); urpm::cfg::dump_config($urpm->{config}, $config) or $urpm->{fatal}(6, N("unable to write config file [%s]", $urpm->{config})); $urpm->{log}(N("wrote config file [%s]", $urpm->{config})); #- everything should be synced now. delete $urpm->{modified}; } sub write_config { my ($urpm) = @_; write_urpmi_cfg($urpm); write_MD5SUM($urpm); } sub _tempignore { my ($medium, $ignore) = @_; $medium->{ignore} = $ignore; } #- read urpmi.cfg file as well as necessary synthesis files #- options : #- root (deprecated, set directly $urpm->{root}) #- cmdline_skiplist #- nocheck_access (used by read_config) #- #- callback (urpmf) #- need_hdlist (for urpmf: to be able to have info not available in synthesis) #- nodepslist (for urpmq: we don't need the hdlist/synthesis) #- no_skiplist (urpmf) #- no_second_pass (urpmf) #- #- synthesis (use this synthesis file, and only this synthesis file) #- #- parallel #- usedistrib (otherwise uses urpmi.cfg) #- media #- excludemedia #- sortmedia #- #- update #- searchmedia sub configure { my ($urpm, %options) = @_; clean($urpm); $options{parallel} && $options{usedistrib} and $urpm->{fatal}(1, N("Can't use parallel mode with use-distrib mode")); if ($options{parallel}) { require urpm::parallel; urpm::parallel::configure($urpm, $options{parallel}); if (!$options{media} && $urpm->{parallel_handler}{media}) { $options{media} = $urpm->{parallel_handler}{media}; $urpm->{log}->(N("using associated media for parallel mode: %s", $options{media})); } } else { #- nb: can't have both parallel and root $urpm->{root} = $options{root} if $options{root}; } if ($urpm->{root} && ! -c "$urpm->{root}/dev/null") { mkdir "$urpm->{root}/dev"; system("/bin/cp", "-a", '/dev/null', "$urpm->{root}/dev"); } if ($options{synthesis}) { if ($options{synthesis} ne 'none') { #- synthesis take precedence over media, update options. $options{media} || $options{excludemedia} || $options{sortmedia} || $options{update} || $options{usedistrib} || $options{parallel} and $urpm->{fatal}(1, N("--synthesis cannot be used with --media, --excludemedia, --sortmedia, --update, --use-distrib or --parallel")); $urpm->parse_synthesis($options{synthesis}); #- synthesis disables the split of transaction (too risky and not useful). $urpm->{options}{'split-length'} = 0; } } else { if ($options{usedistrib}) { $urpm->{media} = []; add_distrib_media($urpm, "Virtual", $options{usedistrib}, %options, 'virtual' => 1); } else { read_config($urpm, $options{nocheck_access}, 1); if (!$options{media} && $urpm->{options}{'default-media'}) { $options{media} = $urpm->{options}{'default-media'}; } } if ($options{media}) { delete $_->{modified} foreach @{$urpm->{media} || []}; select_media($urpm, split /,/, $options{media}); foreach (@{$urpm->{media} || []}) { _tempignore($_, !$_->{modified}); } } if ($options{searchmedia}) { foreach (select_media_by_name($urpm, [ split /,/, $options{searchmedia} ])) { #- Ensure this media is selected $_->{modified} = 1; _tempignore($_, 0); $_->{searchmedia} = 1; } } if ($options{excludemedia}) { delete $_->{modified} foreach @{$urpm->{media} || []}; foreach (select_media_by_name($urpm, [ split /,/, $options{excludemedia} ])) { $_->{modified} = 1; #- this is only a local ignore that will not be saved. _tempignore($_, 1); } } if ($options{sortmedia}) { my @sorted_media = map { select_media_by_name($urpm, [$_]) } split(/,/, $options{sortmedia}); my @remaining = difference2($urpm->{media}, \@sorted_media); $urpm->{media} = [ @sorted_media, @remaining ]; } _parse_media($urpm, 0, \%options) if !$options{nodepslist}; } #- determine package to withdraw (from skip.list file) only if something should be withdrawn. if (!$options{nodepslist}) { _compute_flags_for_skiplist($urpm, $options{cmdline_skiplist}) if !$options{no_skiplist}; _compute_flags_for_instlist($urpm); } } sub _parse_media { my ($urpm, $is_second_pass, $options) = @_; my $need_second_pass; foreach (grep { !$_->{ignore} && (!$options->{update} || $_->{update}) } @{$urpm->{media} || []}) { our $currentmedia = $_; #- hack for urpmf delete @$_{qw(start end)}; my $want_hdlist = $options->{need_hdlist} || $is_second_pass; if (!$want_hdlist && _parse_synthesis($urpm, $_, any_synthesis($urpm, $_), $options->{callback})) { #- cool } elsif (_parse_hdlist($urpm, $_, any_hdlist($urpm, $_), $options->{callback})) { $need_second_pass = 1 if !$is_second_pass && !$options->{no_second_pass}; } else { $options->{need_hdlist} and $urpm->{error}(N("Note: no hdlist for medium \"%s\", unable to return any result for it", $_->{name})); _parse_synthesis($urpm, $_, any_synthesis($urpm, $_), $options->{callback}); } unless ($_->{ignore}) { _check_after_reading_hdlist_or_synthesis($urpm, $_); } if ($_->{searchmedia}) { $urpm->{searchmedia} = 1; $urpm->{log}(N("Search start: %s end: %s", $_->{start}, $_->{end})); } } if ($need_second_pass) { require URPM::Build; $urpm->{log}(N("performing second pass to compute dependencies\n")); $urpm->unresolved_provides_clean; _parse_media($urpm, 1, $options); } } sub _compute_flags_for_skiplist { my ($urpm, $cmdline_skiplist) = @_; my %uniq; $urpm->compute_flags( urpm::sys::get_packages_list($urpm->{skiplist}, $cmdline_skiplist), skip => 1, callback => sub { my ($urpm, $pkg) = @_; $pkg->is_arch_compat && ! exists $uniq{$pkg->fullname} or return; $uniq{$pkg->fullname} = undef; $urpm->{log}(N("skipping package %s", scalar($pkg->fullname))); }, ); } sub _compute_flags_for_instlist { my ($urpm) = @_; my %uniq; $urpm->compute_flags( urpm::sys::get_packages_list($urpm->{instlist}), disable_obsolete => 1, callback => sub { my ($urpm, $pkg) = @_; $pkg->is_arch_compat && ! exists $uniq{$pkg->fullname} or return; $uniq{$pkg->fullname} = undef; $urpm->{log}(N("would install instead of upgrade package %s", scalar($pkg->fullname))); }, ); } #- add a new medium, sync the config file accordingly. #- returns the new medium's name. (might be different from the requested #- name if index_name was specified) #- options: ignore, index_name, nolock, hdlist, synthesis, update, virtual, media_info_dir sub add_medium { my ($urpm, $name, $url, $with_hdlist, %options) = @_; #- make sure configuration has been read. $urpm->{media} or die "caller should have used ->read_config or ->configure first"; #- if a medium with that name has already been found, we have to exit now if (defined $options{index_name}) { my ($i, $basename) = ($options{index_name}, $name); while (1) { ++$i; $name = $basename . ($i == 1 ? '' : $i); last if !name2medium($urpm, $name); } } else { name2medium($urpm, $name) and $urpm->{fatal}(5, N("medium \"%s\" already exists", $name)); } $url =~ s,/*$,,; #- clear URLs for trailing /es. #- creating the medium info. my $medium = { name => $name, url => $url, modified => !$options{ignore}, }; foreach (qw(downloader update ignore hdlist synthesis media_info_dir)) { $medium->{$_} = $options{$_} if exists $options{$_}; } if ($options{virtual}) { file_from_file_url($url) or $urpm->{fatal}(1, N("virtual medium needs to be local")); $medium->{virtual} = 1; } else { probe_removable_device($urpm, $medium); } if ($with_hdlist) { _set_synthesis_or_hdlist($medium, $with_hdlist =~ m!(?:^|/)synthesis\.!); $medium->{with_hdlist} = $with_hdlist; _migrate__with_hdlist($medium); } #- local media have priority, other are added at the end. my $inserted; my $ignore_text = $medium->{ignore} ? ' ' . N("(ignored by default)") : ''; if (file_from_file_url($url)) { #- insert before first remote medium @{$urpm->{media}} = map { if (!file_from_file_url($_->{url}) && !$inserted) { $inserted = 1; $urpm->{info}(N("adding medium \"%s\" before remote medium \"%s\"", $name, $_->{name}) . $ignore_text); $medium, $_; } else { $_ } } @{$urpm->{media}}; } if (!$inserted) { $urpm->{info}(N("adding medium \"%s\"", $name) . $ignore_text); push @{$urpm->{media}}, $medium; } $urpm->{modified} = 1; $name; } #- add distribution media, according to url given. #- returns the list of names of added media. #- options : #- - initial_number : when adding several numbered media, start with this number #- - probe_with : force use of synthesis/hdlist/rpms instead of using both synthesis&hdlist #- - ask_media : callback to know whether each media should be added #- - only_updates : only add "update" media (used by rpmdrake) #- other options are passed to add_medium(): ignore, nolock, virtual sub add_distrib_media { my ($urpm, $name, $url, %options) = @_; #- make sure configuration has been read. $urpm->{media} or die "caller should have used ->read_config or ->configure first"; my $distribconf; if (my $dir = file_from_local_url($url)) { urpm::removable::try_mounting($urpm, $dir) or $urpm->{error}(N("unable to mount the distribution medium")), return (); $distribconf = MDV::Distribconf->new($dir, undef); $distribconf->load or $urpm->{error}(N("this location doesn't seem to contain any distribution")), return (); } else { unlink "$urpm->{cachedir}/partial/media.cfg"; $distribconf = MDV::Distribconf->new($url, undef); $distribconf->settree('mandriva'); $urpm->{log}(N("retrieving media.cfg file...")); if (urpm::download::sync($urpm, undef, [ reduce_pathname($distribconf->getfullpath(undef, 'infodir') . '/media.cfg') ], quiet => 1)) { $distribconf->parse_mediacfg("$urpm->{cachedir}/partial/media.cfg") or $urpm->{error}(N("unable to parse media.cfg")), return(); } else { $urpm->{error}(N("...retrieving failed: %s", $@)); $urpm->{error}(N("unable to access the distribution medium (no media.cfg file found)")); return (); } } #- cosmetic update of name if it contains spaces. $name =~ /\s/ and $name .= ' '; my @newnames; #- at this point, we have found a media.cfg file, so parse it #- and create all necessary media according to it. my $medium_index = $options{initial_number} || 1; foreach my $media ($distribconf->listmedia) { my $media_name = $distribconf->getvalue($media, 'name') || ''; if (my $media_arch = $distribconf->getvalue($media, 'arch')) { if (!URPM::archscore($media_arch)) { $urpm->{log}(N("skipping non compatible media `%s' (for %s)", $media, $media_arch)); next; } } my $add_by_default = !$distribconf->getvalue($media, 'noauto'); if ($options{ask_media}) { $options{ask_media}->($media_name, $add_by_default) or next; } else { my $simple_rpms = !$distribconf->getvalue($media, 'debug_for') && !$distribconf->getvalue($media, 'rpms'); $add_by_default || $simple_rpms or next; } my $is_update_media = $distribconf->getvalue($media, 'updates_for'); if ($options{only_updates}) { $is_update_media or next; } my $use_copied_hdlist = $urpm->{options}{use_copied_hdlist} || $distribconf->getvalue($media, 'use_copied_hdlist'); my $with_hdlist = $use_copied_hdlist && offset_pathname( $url, $distribconf->getpath($media, 'path'), ) . '/' . $distribconf->getpath($media, $options{probe_with} eq 'synthesis' ? 'synthesis' : 'hdlist'); push @newnames, add_medium($urpm, $name ? "$media_name ($name$medium_index)" : $media_name, reduce_pathname($distribconf->getfullpath($media, 'path')), $with_hdlist, !$use_copied_hdlist ? (media_info_dir => 'media_info') : (), !$use_copied_hdlist && $options{probe_with} ? ($options{probe_with} => 1) : (), index_name => $name ? undef : 0, $add_by_default ? () : (ignore => 1), %options, # the following override %options update => $is_update_media ? 1 : undef, ); ++$medium_index; } return @newnames; } #- deprecated, use select_media_by_name instead sub select_media { my $urpm = shift; my $options = {}; if (ref $_[0]) { $options = shift } foreach (select_media_by_name($urpm, [ @_ ], $options->{strict_match})) { #- select medium by setting the modified flag, do not check ignore. $_->{modified} = 1; } } sub select_media_by_name { my ($urpm, $names, $b_strict_match) = @_; my %wanted = map { $_ => 1 } @$names; #- first the exact matches my @l = grep { delete $wanted{$_->{name}} } @{$urpm->{media}}; #- check if some arguments don't correspond to the medium name. #- in such case, try to find the unique medium (or list candidate #- media found). foreach (keys %wanted) { my $q = quotemeta; my (@found, @foundi); my $regex = $b_strict_match ? qr/^$q$/ : qr/$q/; my $regexi = $b_strict_match ? qr/^$q$/i : qr/$q/i; foreach my $medium (@{$urpm->{media}}) { $medium->{name} =~ $regex and push @found, $medium; $medium->{name} =~ $regexi and push @foundi, $medium; } @found = @foundi if !@found; if (@found == 0) { $urpm->{error}(N("trying to select nonexistent medium \"%s\"", $_)); } else { if (@found > 1) { $urpm->{log}(N("selecting multiple media: %s", join(", ", map { qq("$_->{name}") } @found))); } #- changed behaviour to select all occurences by default. push @l, @found; } } @l; } #- deprecated, use remove_media instead sub remove_selected_media { my ($urpm) = @_; remove_media($urpm, [ grep { $_->{modified} } @{$urpm->{media}} ]); } sub remove_media { my ($urpm, $to_remove) = @_; foreach my $medium (@$to_remove) { $urpm->{info}(N("removing medium \"%s\"", $medium->{name})); #- mark to re-write configuration. $urpm->{modified} = 1; #- remove files associated with this medium. unlink grep { $_ } map { $_->($urpm, $medium) } \&statedir_hdlist, \&statedir_list, \&statedir_synthesis, \&statedir_descriptions, \&statedir_names; #- remove proxy settings for this media urpm::download::remove_proxy_media($medium->{name}); } $urpm->{media} = [ difference2($urpm->{media}, $to_remove) ]; } sub _probe_with_try_list { my ($urpm, $medium, $probe_with, $f) = @_; my $probe = sub { my ($synthesis, $media_info_dir) = @_; my $base = file_from_local_url($medium->{url}) || $medium->{url}; my $url = reduce_pathname("$base/$media_info_dir") . '/' . ($synthesis ? 'synthesis.hdlist.cz' : 'hdlist.cz'); $f->($url) or return; $urpm->{debug} and $urpm->{debug}("found hdlist/synthesis: $url"); $medium->{media_info_dir} = $media_info_dir; if ($probe_with) { _set_synthesis_or_hdlist($medium, $synthesis); } 1; }; my $want_synthesis = !$probe_with || $probe_with eq 'synthesis'; my @media_info_dirs = ('media_info', '.'); foreach my $media_info_dir (@media_info_dirs) { if ($probe->($want_synthesis, $media_info_dir)) { return 1 if $probe_with; last; } } if ($medium->{media_info_dir}) { #- try to have both synthesis and hdlist :-) if (file_from_file_url($medium->{url}) && !$probe->(!$want_synthesis, $medium->{media_info_dir})) { #- sad, only one available. _set_synthesis_or_hdlist($medium, $want_synthesis); } 1; } else { foreach my $media_info_dir (@media_info_dirs) { $probe->(!$want_synthesis, $media_info_dir) and return 1; } ''; } } sub may_reconfig_urpmi { my ($urpm, $medium) = @_; my $f; if (my $dir = file_from_local_url($medium->{url})) { $f = reduce_pathname("$dir/reconfig.urpmi"); } else { unlink($f = "$urpm->{cachedir}/partial/reconfig.urpmi"); urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/reconfig.urpmi") ], quiet => 1); } if (-s $f) { reconfig_urpmi($urpm, $f, $medium); } unlink $f if !file_from_local_url($medium->{url}); } #- read a reconfiguration file for urpmi, and reconfigure media accordingly #- $rfile is the reconfiguration file (local), $name is the media name #- #- the format is similar to the RewriteRule of mod_rewrite, so: #- PATTERN REPLACEMENT [FLAG] #- where FLAG can be L or N #- #- example of reconfig.urpmi: #- # this is an urpmi reconfiguration file #- /cooker /cooker/$ARCH sub reconfig_urpmi { my ($urpm, $rfile, $medium) = @_; -r $rfile or return; $urpm->{log}(N("reconfiguring urpmi for media \"%s\"", $medium->{name})); my ($magic, @lines) = cat_($rfile); #- the first line of reconfig.urpmi must be magic, to be sure it's not an error file $magic =~ /^# this is an urpmi reconfiguration file/ or return undef; my @replacements; foreach (@lines) { chomp; s/^\s*//; s/#.*$//; s/\s*$//; $_ or next; my ($p, $r, $f) = split /\s+/, $_, 3; push @replacements, [ quotemeta $p, $r, $f || 1 ]; } my $reconfigured = 0; my @reconfigurable = qw(url with_hdlist media_info_dir); my %orig = %$medium; URLS: foreach my $k (@reconfigurable) { foreach my $r (@replacements) { if ($medium->{$k} =~ s/$r->[0]/$r->[1]/) { $reconfigured = 1; #- Flags stolen from mod_rewrite: L(ast), N(ext) if ($r->[2] =~ /L/) { last; } elsif ($r->[2] =~ /N/) { #- dangerous option redo URLS; } } } #- check that the new url exists before committing changes (local mirrors) my $file = file_from_local_url($medium->{$k}); if ($file && !-e $file) { %$medium = %orig; $reconfigured = 0; $urpm->{log}(N("...reconfiguration failed")); return; } } if ($reconfigured) { $urpm->{log}(N("reconfiguration done")); $urpm->{modified} = 1; } $reconfigured; } sub _guess_hdlist_suffix { my ($url) = @_; $url =~ m!\bmedia/(\w+)/*\Z! && $1; } sub _hdlist_suffix { my ($medium) = @_; $medium->{with_hdlist} =~ /hdlist(.*?)(?:\.src)?\.cz$/ ? $1 : ''; } sub _parse_hdlist_or_synthesis__when_not_modified { my ($urpm, $medium) = @_; delete @$medium{qw(start end)}; _parse_synthesis($urpm, $medium, any_synthesis($urpm, $medium)) or _parse_hdlist($urpm, $medium, any_hdlist($urpm, $medium)); _check_after_reading_hdlist_or_synthesis($urpm, $medium); } sub _parse_hdlist_or_synthesis__virtual { my ($urpm, $medium) = @_; delete $medium->{modified}; $medium->{really_modified} = 1; $urpm->{md5sum_modified} = 1; _parse_hdlist_or_synthesis__when_not_modified($urpm, $medium); } #- names. is used by external progs (namely for bash-completion) sub generate_medium_names { my ($urpm, $medium) = @_; unlink statedir_names($urpm, $medium); if (my $fh = urpm::sys::open_safe($urpm, ">", statedir_names($urpm, $medium))) { foreach ($medium->{start} .. $medium->{end}) { if (defined $urpm->{depslist}[$_]) { print $fh $urpm->{depslist}[$_]->name . "\n"; } else { $urpm->{error}(N("Error generating names file: dependency %d not found", $_)); } } } else { $urpm->{error}(N("Error generating names file: Can't write to file (%s)", $!)); } } sub _read_existing_synthesis_and_hdlist_if_same_time_and_msize { my ($urpm, $medium) = @_; same_size_and_mtime(cachedir_with_hdlist($urpm, $medium, 's'), statedir_hdlist_or_synthesis($urpm, $medium, 's')) or return; _read_existing_synthesis_and_hdlist($urpm, $medium); 1; } sub _read_existing_synthesis_and_hdlist_if_same_md5sum { my ($urpm, $medium, $retrieved_md5sum) = @_; #- if an existing hdlist or synthesis file has the same md5sum, we assume the #- files are the same. #- if local md5sum is the same as distant md5sum, this means there is no need to #- download hdlist or synthesis file again. $retrieved_md5sum && $medium->{md5sum} eq $retrieved_md5sum or return; _read_existing_synthesis_and_hdlist($urpm, $medium); 1; } sub _read_existing_synthesis_and_hdlist { my ($urpm, $medium) = @_; unlink cachedir_with_hdlist($urpm, $medium, 's'); $urpm->{info}(N("medium \"%s\" is up-to-date", $medium->{name})); #- the medium is now considered not modified. $medium->{modified} = 0; #- XXX we could link the new hdlist to the old one. #- (However links need to be managed. see bug #12391.) #- as previously done, just read synthesis file here, this is enough. if (!_parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium))) { _parse_hdlist($urpm, $medium, statedir_hdlist($urpm, $medium)); _check_after_reading_hdlist_or_synthesis($urpm, $medium); } 1; } sub _parse_hdlist { my ($urpm, $medium, $hdlist_file, $o_callback) = @_; -e $hdlist_file or return; $urpm->{log}(N("examining hdlist file [%s]", $hdlist_file)); ($medium->{start}, $medium->{end}) = $urpm->parse_hdlist($hdlist_file, packing => 1, $o_callback ? (callback => $o_callback) : @{[]}); } sub _parse_synthesis { my ($urpm, $medium, $synthesis_file, $o_callback) = @_; -e $synthesis_file or return; $urpm->{log}(N("examining synthesis file [%s]", $synthesis_file)); ($medium->{start}, $medium->{end}) = $urpm->parse_synthesis($synthesis_file, $o_callback ? (callback => $o_callback) : @{[]}); } sub _parse_hdlist_or_synthesis { my ($urpm, $medium, $hdlist_or, $prefer_synthesis) = @_; if (_synthesis_or_not($medium, $prefer_synthesis)) { _parse_synthesis($urpm, $medium, $hdlist_or); } else { _parse_hdlist($urpm, $medium, $hdlist_or); } } sub _build_hdlist_using_rpm_headers { my ($urpm, $medium) = @_; $urpm->{log}(N("building hdlist [%s]", statedir_hdlist($urpm, $medium))); #- finish building operation of hdlist. $urpm->build_hdlist(start => $medium->{start}, end => $medium->{end}, dir => "$urpm->{cachedir}/headers", hdlist => statedir_hdlist($urpm, $medium), ); } sub _build_synthesis { my ($urpm, $medium) = @_; eval { $urpm->build_synthesis( start => $medium->{start}, end => $medium->{end}, synthesis => statedir_synthesis($urpm, $medium), ) }; if (my $err = $@) { chomp($err); $urpm->{error}(N("Unable to build synthesis file for medium \"%s\". Your hdlist file may be corrupted.", $medium->{name})); $urpm->{error}($err); unlink statedir_synthesis($urpm, $medium); } else { $urpm->{log}(N("built hdlist synthesis file for medium \"%s\"", $medium->{name})); } #- keep in mind we have a modified database, sure at this point. $urpm->{md5sum_modified} = 1; } sub is_valid_medium { my ($medium) = @_; defined $medium->{start} && defined $medium->{end}; } sub _check_after_reading_hdlist_or_synthesis { my ($urpm, $medium) = @_; if (!is_valid_medium($medium)) { $urpm->{error}(N("problem reading hdlist or synthesis file of medium \"%s\"", $medium->{name})); $medium->{ignore} = 1; } } sub _get_list_or_pubkey__local { my ($urpm, $medium, $name) = @_; my $path = _hdlist_dir($medium) . "/$name" . _hdlist_suffix($medium); -e $path or $path = file_from_local_url($medium->{url}) . "/media_info/$name"; if (-e $path) { $urpm->{log}(N("copying [%s] for medium \"%s\"...", $path, $medium->{name})); copy_and_own($path, "$urpm->{cachedir}/partial/$name") or $urpm->{error}(N("...copying failed")), return; } 1; } sub _get_list_or_pubkey__remote { my ($urpm, $medium, $name) = @_; my $found; if (_hdlist_suffix($medium)) { my $local_name = $name . _hdlist_suffix($medium); if (urpm::download::sync($urpm, $medium, [_hdlist_dir($medium) . "/$local_name"], quiet => 1)) { rename("$urpm->{cachedir}/partial/$local_name", "$urpm->{cachedir}/partial/$name"); $found = 1; } } if (!$found) { urpm::download::sync($urpm, $medium, [_hdlist_dir($medium) . "/$name"], quiet => 1) or unlink "$urpm->{cachedir}/partial/$name"; } } sub get_descriptions_local { my ($urpm, $medium) = @_; unlink statedir_descriptions($urpm, $medium); my $dir = file_from_local_url($medium->{url}); my $description_file = "$dir/media_info/descriptions"; #- new default location -e $description_file or $description_file = "$dir/../descriptions"; -e $description_file or return; $urpm->{log}(N("copying description file of \"%s\"...", $medium->{name})); if (copy_and_own($description_file, statedir_descriptions($urpm, $medium))) { $urpm->{log}(N("...copying done")); } else { $urpm->{error}(N("...copying failed")); $medium->{ignore} = 1; } } sub get_descriptions_remote { my ($urpm, $medium) = @_; unlink "$urpm->{cachedir}/partial/descriptions"; if (-e statedir_descriptions($urpm, $medium)) { urpm::util::move(statedir_descriptions($urpm, $medium), "$urpm->{cachedir}/partial/descriptions"); } urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/media_info/descriptions") ], quiet => 1) or #- try older location urpm::download::sync($urpm, $medium, [ reduce_pathname("$medium->{url}/../descriptions") ], quiet => 1); if (-e "$urpm->{cachedir}/partial/descriptions") { urpm::util::move("$urpm->{cachedir}/partial/descriptions", statedir_descriptions($urpm, $medium)); } } sub get_hdlist_or_synthesis__local { my ($urpm, $medium, $callback) = @_; my $f = cachedir_with_hdlist($urpm, $medium, 's'); unlink $f; $urpm->{log}(N("copying [%s] for medium \"%s\"...", _url_with_hdlist($medium, 's'), $medium->{name})); $callback and $callback->('copy', $medium->{name}); if (copy_and_own(_url_with_hdlist($medium, 's'), $f)) { $callback and $callback->('done', $medium->{name}); $urpm->{log}(N("...copying done")); if (file_size($f) < 20) { $urpm->{error}(N("copy of [%s] failed (file is suspiciously small)", $f)); 0; } else { 1; } } else { $callback and $callback->('failed', $medium->{name}); #- force error, reported afterwards unlink $f; 0; } } sub get_hdlist_or_synthesis__remote { my ($urpm, $medium, $callback, $quiet) = @_; if (urpm::download::sync($urpm, $medium, [ _url_with_hdlist($medium, 's') ], quiet => $quiet, callback => $callback) && file_size(cachedir_with_hdlist($urpm, $medium, 's')) >= 20) { 1; } else { chomp(my $err = $@); $urpm->{error}(N("...retrieving failed: %s", $err)); 0; } } sub get_hdlist_or_synthesis_and_check_md5sum__local { my ($urpm, $medium, $retrieved_md5sum, $callback) = @_; get_hdlist_or_synthesis__local($urpm, $medium, $callback) or return; #- keep checking md5sum of file just copied ! (especially on nfs or removable device). if ($retrieved_md5sum) { $urpm->{log}(N("computing md5sum of copied source hdlist (or synthesis)")); urpm::md5sum::compute(cachedir_with_hdlist($urpm, $medium, 's')) eq $retrieved_md5sum or $urpm->{error}(N("copy of [%s] failed (md5sum mismatch)", _url_with_hdlist($medium, 's'))), return; } 1; } sub get_hdlist_or_synthesis_and_check_md5sum__remote { my ($urpm, $medium, $retrieved_md5sum, $callback, $quiet) = @_; get_hdlist_or_synthesis__remote($urpm, $medium, $callback, $quiet) or return; #- check downloaded file has right signature. if ($retrieved_md5sum) { $urpm->{log}(N("computing md5sum of retrieved source hdlist (or synthesis)")); urpm::md5sum::compute(cachedir_with_hdlist($urpm, $medium, 's')) eq $retrieved_md5sum or $urpm->{error}(N("...retrieving failed: md5sum mismatch")), return; } 1; } sub _read_rpms_from_dir { my ($urpm, $medium, $clean_cache) = @_; my $dir = file_from_local_url($medium->{url}); require File::Glob; $medium->{rpm_files} = [ File::Glob::glob("$dir/*.rpm") ]; #- check files contains something good! if (!@{$medium->{rpm_files}}) { $urpm->{error}(N("no rpm files found from [%s]", $dir)); $medium->{ignore} = 1; return; } #- we need to rebuild from rpm files the hdlist. $urpm->{log}(N("reading rpm files from [%s]", $dir)); my @unresolved_before = grep { ! defined $urpm->{provides}{$_}; } keys %{$urpm->{provides} || {}}; $medium->{start} = @{$urpm->{depslist}}; eval { $medium->{headers} = [ $urpm->parse_rpms_build_headers( dir => "$urpm->{cachedir}/headers", rpms => $medium->{rpm_files}, clean => $$clean_cache, packing => 1, ) ]; }; if ($@) { $urpm->{error}(N("unable to read rpm files from [%s]: %s", $dir, $@)); delete $medium->{headers}; #- do not propagate these. return; } $medium->{end} = $#{$urpm->{depslist}}; if ($medium->{start} > $medium->{end}) { #- an error occured (provided there are files in input.) delete $medium->{start}; delete $medium->{end}; $urpm->{fatal}(9, N("no rpms read")); } #- make sure the headers will not be removed for another media. $$clean_cache = 0; my @unresolved = grep { ! defined $urpm->{provides}{$_}; } keys %{$urpm->{provides} || {}}; @unresolved_before == @unresolved or $medium->{need_second_pass} = 1; delete $medium->{synthesis}; #- when building hdlist by ourself, drop synthesis property. 1; } #- options: callback, force, nomd5sum, nopubkey, probe_with sub _update_medium__parse_if_unmodified__local { my ($urpm, $medium, $clean_cache, $options) = @_; my $dir = file_from_local_url($medium->{url}); if (!-d $dir) { #- the directory given does not exist and may be accessible #- by mounting some other directory. Try to figure it out and mount #- everything that might be necessary. urpm::removable::try_mounting($urpm, $options->{probe_with} ne 'rpms' && _hdlist_dir($medium) ? _hdlist_dir($medium) : $dir, #- in case of an iso image, pass its name urpm::removable::is_iso($medium->{removable}) && $medium->{removable}, ) or $urpm->{error}(N("unable to access medium \"%s\", this could happen if you mounted manually the directory when creating the medium.", $medium->{name})), return; } #- try to probe for possible with_hdlist parameter, unless #- it is already defined (and valid). if (!_hdlist_dir($medium) && $options->{probe_with} ne 'rpms') { _probe_with_try_list($urpm, $medium, $options->{probe_with}, sub { my ($url) = @_; -e $url or return; if (file_size($url) >= 20) { 1; } else { $urpm->{error}(N("invalid hdlist file %s for medium \"%s\"", $url, $medium->{name})); 0; } }); } if ($medium->{virtual}) { #- syncing a virtual medium is very simple, just try to read the file in order to #- determine its type, once a with_hdlist has been found (but is mandatory). _parse_hdlist_or_synthesis__virtual($urpm, $medium); 1; } elsif ($options->{probe_with} eq 'rpms' || !_hdlist_dir($medium)) { #- build hdlist/synthesis from rpms _read_rpms_from_dir($urpm, $medium, $clean_cache); } elsif (_hdlist_dir($medium)) { my ($retrieved_md5sum); if (!$options->{nomd5sum} && file_size(_hdlist_dir($medium) . '/MD5SUM') > 32) { $retrieved_md5sum = urpm::md5sum::from_MD5SUM__or_warn($urpm, _hdlist_dir($medium) . '/MD5SUM', _url_with_hdlist_basename($medium, 's')); if (urpm::md5sum::on_local_medium($urpm, $medium, $options->{force})) { _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $retrieved_md5sum) and return 'unmodified'; } } if (get_hdlist_or_synthesis_and_check_md5sum__local($urpm, $medium, $retrieved_md5sum, $options->{callback})) { $medium->{md5sum} = $retrieved_md5sum if $retrieved_md5sum; #- check if the files are equal... and no force copy... if (!$options->{force}) { _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium) and return 'unmodified'; } 1; } else { #- if copying hdlist has failed, try to build it directly. if ($urpm->{options}{'build-hdlist-on-error'}) { #- no available hdlist/synthesis, try to build it from rpms _read_rpms_from_dir($urpm, $medium, $clean_cache); } else { $urpm->{error}(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name})); $medium->{ignore} = 1; ''; } } } } #- options: callback, force, nomd5sum, nopubkey, probe_with, quiet sub _update_medium__parse_if_unmodified__remote { my ($urpm, $medium, $options) = @_; my ($retrieved_md5sum); #- examine if a distant MD5SUM file is available. #- this will only be done if $with_hdlist is not empty in order to use #- an existing hdlist or synthesis file, and to check if download was good. #- if no MD5SUM is available, do it as before... if (_hdlist_dir($medium)) { #- we can assume at this point a basename is existing, but it needs #- to be checked for being valid, nothing can be deduced if no MD5SUM #- file is present. unlink "$urpm->{cachedir}/partial/MD5SUM"; if (!$options->{nomd5sum} && urpm::download::sync($urpm, $medium, [ reduce_pathname(_hdlist_dir($medium) . '/MD5SUM') ], quiet => 1) && file_size("$urpm->{cachedir}/partial/MD5SUM") > 32) { if (urpm::md5sum::on_local_medium($urpm, $medium, $options->{force} >= 2)) { $retrieved_md5sum = urpm::md5sum::from_MD5SUM__or_warn($urpm, "$urpm->{cachedir}/partial/MD5SUM", _url_with_hdlist_basename($medium, 's')); _read_existing_synthesis_and_hdlist_if_same_md5sum($urpm, $medium, $retrieved_md5sum) and return 'unmodified'; } } } #- try to probe for possible with_hdlist parameter, unless #- it is already defined (and valid). $urpm->{log}(N("retrieving source hdlist (or synthesis) of \"%s\"...", $medium->{name})); $options->{callback} and $options->{callback}('retrieve', $medium->{name}); my $error = sub { my ($msg) = @_; $urpm->{error}($msg); unlink cachedir_with_hdlist($urpm, $medium, 's'); $options->{callback} and $options->{callback}('failed', $medium->{name}); }; if (!_hdlist_dir($medium)) { my $err; _probe_with_try_list($urpm, $medium, $options->{probe_with}, sub { my ($url) = @_; my $f = "$urpm->{cachedir}/partial/" . basename($url); $options->{force} and unlink $f; if (urpm::download::sync($urpm, $medium, [ $url ], quiet => $options->{quiet}, callback => $options->{callback}) && file_size($f) >= 20) { $urpm->{log}(N("found probed hdlist (or synthesis) as %s", $url)); 1; } else { chomp($err = $@); 0; } }) or do { $error->(N("no hdlist file found for medium \"%s\"", $medium->{name})); $urpm->{error}(N("...retrieving failed: %s", $err)); return; }; } else { if ($options->{force}) { unlink cachedir_with_hdlist($urpm, $medium, 's'); } else { #- for rsync, try to sync (copy if needed) local copy after restored the previous one. my $previous_hdlist = statedir_hdlist_or_synthesis($urpm, $medium, 's'); if (-e $previous_hdlist && urpm::protocol_from_url($medium->{url}) eq 'rsync') { copy_and_own( $previous_hdlist, cachedir_with_hdlist($urpm, $medium, 's'), ) or $error->(N("...copying failed")), return; } } if (get_hdlist_or_synthesis_and_check_md5sum__remote($urpm, $medium, $retrieved_md5sum, $options->{callback}, $options->{quiet})) { $options->{callback} and $options->{callback}('done', $medium->{name}); $medium->{md5sum} = $retrieved_md5sum if $retrieved_md5sum; if (!$options->{force}) { _read_existing_synthesis_and_hdlist_if_same_time_and_msize($urpm, $medium) and return 'unmodified'; } } else { $error->(N("unable to access hdlist file of \"%s\", medium ignored", $medium->{name})); $medium->{ignore} = 1; return; } } 1; } sub _get_pubkey_and_descriptions { my ($urpm, $medium, $nopubkey) = @_; my $local = file_from_local_url($medium->{url}); ($local ? \&get_descriptions_local : \&get_descriptions_remote)->($urpm, $medium); #- examine if a pubkey file is available. if (!$nopubkey && !$medium->{'key-ids'}) { ($local ? \&_get_list_or_pubkey__local : \&_get_list_or_pubkey__remote)->($urpm, $medium, 'pubkey'); } } sub _read_cachedir_pubkey { my ($urpm, $medium, $b_wait_lock) = @_; -s "$urpm->{cachedir}/partial/pubkey" or return; $urpm->{log}(N("examining pubkey file of \"%s\"...", $medium->{name})); my $_rpm_lock = urpm::lock::rpm_db($urpm, 'exclusive', wait => $b_wait_lock); my $db = urpm::db_open_or_die($urpm, $urpm->{root}, 'rw'); my %key_ids; URPM::import_pubkeys($db, "$urpm->{cachedir}/partial/pubkey", sub { my ($id, $imported) = @_; if ($id) { $key_ids{$id} = undef; $imported and $urpm->{log}(N("...imported key %s from pubkey file of \"%s\"", $id, $medium->{name})); $imported or $urpm->{debug}("pubkey $id already imported") if $urpm->{debug}; } else { $urpm->{error}(N("unable to import pubkey file of \"%s\"", $medium->{name})); } }); if (keys(%key_ids)) { $medium->{'key-ids'} = join(',', keys %key_ids); } unlink "$urpm->{cachedir}/partial/pubkey"; } sub _write_rpm_list { my ($urpm, $medium) = @_; @{$medium->{rpm_files} || []} or return; $medium->{list} ||= "list.$medium->{name}"; #- write list file. $urpm->{log}(N("writing list file for medium \"%s\"", $medium->{name})); my $listfh = urpm::sys::open_safe($urpm, '>', cachedir_list($urpm, $medium)) or return; print $listfh basename($_), "\n" foreach @{$medium->{rpm_files}}; 1; } #- options: callback, force, nomd5sum, probe_with, quiet #- (from _update_medium__parse_if_unmodified__local and _update_medium__parse_if_unmodified__remote) sub _update_medium_first_pass { my ($urpm, $medium, $clean_cache, %options) = @_; #- we should create the associated synthesis file if it does not already exist... file_size(statedir_synthesis($urpm, $medium)) >= 20 or $medium->{must_build_synthesis} = 1; my @unresolved_before = grep { ! defined $urpm->{provides}{$_} } keys %{$urpm->{provides} || {}}; unless ($medium->{modified}) { #- the medium is not modified, but to compute dependencies, #- we still need to read it and all synthesis will be written if #- an unresolved provides is found. #- to speed up the process, we only read the synthesis at the beginning. _parse_hdlist_or_synthesis__when_not_modified($urpm, $medium); compute_need_second_pass($urpm, $medium, \@unresolved_before); return 'unmodified'; } #- always delete a remaining list file or pubkey file in cache. foreach (qw(list pubkey)) { unlink "$urpm->{cachedir}/partial/$_"; } #- check for a reconfig.urpmi file (if not already reconfigured) if (!$medium->{noreconfigure}) { may_reconfig_urpmi($urpm, $medium); } { my $rc = file_from_local_url($medium->{url}) ? _update_medium__parse_if_unmodified__local($urpm, $medium, $clean_cache, \%options) : _update_medium__parse_if_unmodified__remote($urpm, $medium, \%options); if (!$rc || $rc eq 'unmodified') { return $rc; } } #- build list file according to hdlist. if (!$medium->{headers} && !$medium->{virtual} && file_size(cachedir_with_hdlist($urpm, $medium, 's')) < 20) { $urpm->{error}(N("no hdlist file found for medium \"%s\"", $medium->{name})); return; } if (!$medium->{virtual}) { if ($medium->{headers}) { _write_rpm_list($urpm, $medium) or return; if (-e statedir_list($urpm, $medium)) { $urpm->{info}(N("updated medium \"%s\"", $medium->{name})); } unlink statedir_hdlist($urpm, $medium); unlink statedir_synthesis($urpm, $medium); unlink statedir_list($urpm, $medium); urpm::util::move(cachedir_list($urpm, $medium), statedir_list($urpm, $medium)); } else { #- read first pass hdlist or synthesis, try to open as synthesis, if file #- is larger than 1MB, this is probably an hdlist else a synthesis. #- anyway, if one tries fails, try another mode. $options{callback} and $options{callback}('parse', $medium->{name}); _parse_hdlist_or_synthesis($urpm, $medium, cachedir_with_hdlist($urpm, $medium, 's'), 's'); if (is_valid_medium($medium)) { $options{callback} && $options{callback}('done', $medium->{name}); } else { $urpm->{error}(N("unable to parse hdlist file of \"%s\"", $medium->{name})); $options{callback} and $options{callback}('failed', $medium->{name}); delete $medium->{md5sum}; #- we have to read back the current synthesis file unmodified. if (!_parse_synthesis($urpm, $medium, statedir_synthesis($urpm, $medium))) { $urpm->{error}(N("problem reading synthesis file of medium \"%s\"", $medium->{name})); $medium->{ignore} = 1; } return; } if (-e statedir_hdlist_or_synthesis($urpm, $medium, 's')) { $urpm->{info}(N("updated medium \"%s\"", $medium->{name})); } unlink statedir_list($urpm, $medium); unlink statedir_hdlist($urpm, $medium); unlink statedir_synthesis($urpm, $medium); #- use newly created file. urpm::util::move(cachedir_with_hdlist($urpm, $medium, 's'), statedir_hdlist_or_synthesis($urpm, $medium, 's')); if ($medium->{list}) { delete $medium->{list}; $urpm->{modified} = 1; } } #- make sure to rebuild base files and clear medium modified state. $medium->{modified} = 0; $medium->{really_modified} = 1; $urpm->{md5sum_modified} = 1; #- and create synthesis file associated. $medium->{must_build_synthesis} = !_synthesis_or_not($medium, 's'); } compute_need_second_pass($urpm, $medium, \@unresolved_before); 1; } sub compute_need_second_pass { my ($urpm, $medium, $unresolved_before) = @_; my @unresolved_after = grep { ! defined $urpm->{provides}{$_} } keys %{$urpm->{provides} || {}}; if (@$unresolved_before != @unresolved_after) { $medium->{need_second_pass} = 1; $urpm->{debug} and $urpm->{debug}(sprintf qq(medium "%s" has unresolved dependencies: %s), $medium->{name}, join(' ', difference2(\@unresolved_after, $unresolved_before))); } } sub _update_medium_first_pass_failed { my ($urpm, $medium) = @_; !$medium->{virtual} or return; #- an error has occured for updating the medium, we have to remove temporary files. unlink(glob("$urpm->{cachedir}/partial/*")); } #- take care of modified medium only, or all if all have to be recomputed. sub _update_medium_second_pass { my ($urpm, $medium, $callback) = @_; $callback and $callback->('parse', $medium->{name}); #- a modified medium is an invalid medium, we have to read back the previous hdlist #- or synthesis which has not been modified by first pass above. if ($medium->{headers} && !$medium->{modified}) { $urpm->{log}(N("reading headers from medium \"%s\"", $medium->{name})); ($medium->{start}, $medium->{end}) = $urpm->parse_headers(dir => "$urpm->{cachedir}/headers", headers => $medium->{headers}, ); } elsif (_parse_hdlist($urpm, $medium, any_hdlist($urpm, $medium))) { $medium->{must_build_synthesis} = 1; } else { _parse_synthesis($urpm, $medium, any_synthesis($urpm, $medium)); } $callback && $callback->('done', $medium->{name}); } sub _build_hdlist_synthesis { my ($urpm, $medium) = @_; if ($medium->{headers} && !$medium->{modified}) { _build_hdlist_using_rpm_headers($urpm, $medium); #- synthesis needs to be created, since the medium has been built from rpm files. _build_synthesis($urpm, $medium); } elsif ($medium->{synthesis}) { } else { #- check if the synthesis file can be built. if ($medium->{must_build_synthesis} && !$medium->{modified} && !$medium->{virtual}) { _build_synthesis($urpm, $medium); } } } sub _update_media__handle_some_flags { my ($urpm, $forcekey, $all) = @_; foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) { $forcekey and delete $medium->{'key-ids'}; if ($medium->{static}) { #- don't ever update static media $medium->{modified} = 0; } elsif ($all) { #- if we're rebuilding all media, mark them as modified (except removable ones) $medium->{modified} ||= $medium->{url} !~ m!^removable!; } } } #- Update the urpmi database w.r.t. the current configuration. #- Takes care of modifications, and tries some tricks to bypass #- the recomputation of base files. #- Recognized options : #- all : all medias are being rebuilt #- callback : UI callback #- forcekey : force retrieval of pubkey #- force : try to force rebuilding base files #- noclean : keep old files in the header cache directory #- nomd5sum : don't verify MD5SUM of retrieved files #- nopubkey : don't use rpm pubkeys #- probe_with : probe synthesis or hdlist (or none) #- quiet : download hdlists quietly #- wait_lock : block until lock can be acquired sub update_media { my ($urpm, %options) = @_; $urpm->{media} or return; # verify that configuration has been read $options{nopubkey} ||= $urpm->{options}{nopubkey}; #- examine each medium to see if one of them needs to be updated. #- if this is the case and if not forced, try to use a pre-calculated #- hdlist file, else build it from rpm files. clean($urpm); _update_media__handle_some_flags($urpm, $options{forcekey}, $options{all}); my $clean_cache = !$options{noclean}; my %updates_result; foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) { my $rc = _update_medium_first_pass($urpm, $medium, \$clean_cache, %options); $updates_result{$rc || 'error'}++; $rc or _update_medium_first_pass_failed($urpm, $medium); } $urpm->{debug} and $urpm->{debug}('update_medium: ' . join(' ', map { "$_=$updates_result{$_}" } keys %updates_result)); if ($updates_result{1} == 0) { #- only errors/unmodified, leave now #- (this ensures buggy added medium is not added to urpmi.cfg) return $updates_result{error} == 0; } #- some unresolved provides may force to rebuild all synthesis, #- a second pass will be necessary. my $need_second_pass = (grep { $_->{need_second_pass} } @{$urpm->{media}}) #- second pass not useful if not a single media allowed to build a synthesis && (grep { !$_->{virtual} } @{$urpm->{media}}) #- second pass not useful if only synthesis available && (grep { !$_->{synthesis} } @{$urpm->{media}}); if ($need_second_pass) { $urpm->{log}(N("performing second pass to compute dependencies\n")); $urpm->unresolved_provides_clean; } foreach my $medium (grep { !$_->{ignore} } @{$urpm->{media}}) { if ($need_second_pass) { #- second pass consists in reading again synthesis or hdlists. _update_medium_second_pass($urpm, $medium, $options{callback}); } _build_hdlist_synthesis($urpm, $medium); if ($medium->{really_modified}) { _get_pubkey_and_descriptions($urpm, $medium, $options{nopubkey}); _read_cachedir_pubkey($urpm, $medium, $options{wait_lock}); generate_medium_names($urpm, $medium); } } if ($urpm->{modified}) { if ($options{noclean}) { #- clean headers cache directory to remove everything that is no longer #- useful according to the depslist. urpm::remove_obsolete_headers_in_cache($urpm); } #- write config files in any case write_config($urpm); urpm::download::dump_proxy_config(); } elsif ($urpm->{md5sum_modified}) { #- NB: in case of $urpm->{modified}, write_MD5SUM is called in write_config above write_MD5SUM($urpm); } $updates_result{error} == 0; } #- clean params and depslist computation zone. sub clean { my ($urpm) = @_; $urpm->{depslist} = []; $urpm->{provides} = {}; foreach (@{$urpm->{media} || []}) { delete $_->{start}; delete $_->{end}; } } 1;