aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMichael Scherer <misc@mandriva.com>2004-04-09 14:30:13 +0000
committerMichael Scherer <misc@mandriva.com>2004-04-09 14:30:13 +0000
commit39e18d9a5b4919ea773dd09a1d22da9243d4ab03 (patch)
tree5f00555cac4bc355a28cd82540f0a7f2be454916
parent7c3de26a2b5a74d657e618407e6340a623ed881a (diff)
downloadbootsplash-39e18d9a5b4919ea773dd09a1d22da9243d4ab03.tar
bootsplash-39e18d9a5b4919ea773dd09a1d22da9243d4ab03.tar.gz
bootsplash-39e18d9a5b4919ea773dd09a1d22da9243d4ab03.tar.bz2
bootsplash-39e18d9a5b4919ea773dd09a1d22da9243d4ab03.tar.xz
bootsplash-39e18d9a5b4919ea773dd09a1d22da9243d4ab03.zip
- added support to fetch and build tarballs directly from subversion repository
- added support for https server - test the return values of system and die if any error - workaround webserver using mod_spel and thus returning a webpage instead of 404 - code cleanup
-rwxr-xr-xrpmbuildupdate141
1 files changed, 97 insertions, 44 deletions
diff --git a/rpmbuildupdate b/rpmbuildupdate
index 347e4c0..bdca764 100755
--- a/rpmbuildupdate
+++ b/rpmbuildupdate
@@ -10,7 +10,10 @@
# for any purpose. It is provided "as is" without express or implied warranty.
# See the GNU General Public License for more details.
#
+# $Id$
+# TODO
+# do not hardcode sudo urpmi command ( to use --deps on cluster )
use strict;
use AppConfig;
@@ -18,51 +21,99 @@ use AppConfig;
my %config;
+sub system_die
+{
+ my ($command,$message) = @_;
+ $message = "$command failed" if not $message;
+ # do not forget , return value of 1 means failue in unix
+ system("$command") and die "$message";
+}
+
sub build_from_src
{
$_ = shift;
my $nv = shift;
- my $b = qx[basename $_];
+ my $b;
+ ( $b = $_ ) =~ s!.*/!!;
if($b =~ m|^(.*)-([^-]+)-([^-]+)\.[^\.]+\.rpm|)
{ &build($1, $nv, $_); }
}
+sub file_not_found
+{
+ my ($basename) = @_;
+ ( ! -f $basename ) && return 1;
+ # sometimes, webserver give a webpage when the file is not found, instead of letting wget fails
+ # see wget http://www.wesnoth.org/files/wesnoth-0.7.1.tar.bz2
+ # So if the file is a html page, then it is a error and it should be removed.
+ `file $basename` =~ /HTML/i && do { system("rm -f $basename") ; return 1 ; };
+ return 0;
+}
+
+sub download
+{
+ my $wget = "wget -N -q";
+ my ($url)=@_;
+ my $temp;
+ ( $temp = $url ) =~ s!.*/!!;
+ print "Trying to fetch $url...\n";
+ system("$wget $url;");
+ -f "$temp" && ( $temp !~ /.bz2$/ ) && system_die("bzme $temp","Cannot convert $temp");
+}
+
sub fetch
{
my ($url) = @_;
- my $basename = `basename $url`; chomp($basename);
- my ($temp, $turl);
- my $wget = "wget -N -q";
+ # if you add a handler here, do not forget to add it to the body of build()
+ return fetch_http($url) if ($url =~ m!^(ftp|https?)://! );
+ return fetch_svn($url) if ($url =~ m!^svns?://! );
+}
+
+sub fetch_svn
+{
+ my ($url) = @_;
+ my ($basename,$repos);
+
+ ( $basename = $url ) =~ s!.*/!!;
+ print $basename , "\n";
+ ( $repos = $url ) =~ s|/$basename$||;
+ $repos =~ s/^svn/http/;
+ $basename =~ /^(.*)-([^-]*rev)(\d\d*).tar.bz2$/;
+ my ( $name,$prefix,$release ) = ($1,$2,$3);
+ my $dir="$ENV{TMP}/rpmbuildupdate-$$";
+ my $current_dir=`pwd`; chomp $current_dir;
+ mkdir $dir or die "Cannot create dir $dir";
+ chdir $dir or die "Cannot change dir to $dir";
+ system_die("svn co -r $release $repos","svn checkout failed on $repos");
+ my $basedir ;
+ ( $basedir = $repos ) =~ s!.*/!!;
+
+ # FIXME quite inelegant, should use a dedicated cpan module.
+ my $complete_name = "$name-$prefix$release";
+ system_die("mv $basedir $complete_name");
+ system_die("find $complete_name -name '.svn' | xargs rm -Rf");
+ system_die("tar -cjf $complete_name.tar.bz2 $complete_name", "tar failed");
+ system_die("mv -f $complete_name.tar.bz2 $current_dir");
+ chdir $current_dir;
+}
+
+sub fetch_http
+{
+ my ($url) = @_;
+ my $basename;
+ ( $basename = $url ) =~ s!.*/!!;
+ my $turl;
system("rm -f $basename");
- print "Trying to fetch $url...\n";
- system("$wget $url");
-
- if (! -f $basename)
- {
- ($turl = $url) =~ s/bz2/gz/;
- $temp = `basename $turl`; chomp($temp);
- print "Trying to fetch $turl...\n";
- system("$wget $turl; [ -f $temp ] && bzme $temp");
- }
-
- if (! -f $basename)
- {
- ($turl = $url) =~ s/\.tar\.bz2/.tgz/;
- $temp = `basename $turl`; chomp($temp);
- print "Trying to fetch $turl...\n";
- system("$wget $turl; [ -f $temp ] && bzme $temp");
- }
-
- if (! -f $basename)
- {
- ($turl = $url) =~ s/\.tar\.bz2/.zip/;
- $temp = `basename $turl`; chomp($temp);
- print "Trying to fetch $turl...\n";
- system("$wget $turl; [ -f $temp ] && bzme $temp");
+ download($url);
+ foreach ('.tar.gz' , '.tgz' , '.zip' )
+ {
+ ($turl = $url) =~ s/\.tar\.bz2/$_/;
+ download($turl) if (file_not_found($basename));
}
- return (-f $basename);
+
+ return ! file_not_found($basename);
}
sub build
@@ -80,7 +131,7 @@ sub build
chomp($top);
chdir("$top/SOURCES") or die "Unable to chdir to $top/SOURCES";
- $rpm = "rpm --define \"_topdir $top\"";
+ $rpm = qq(rpm --define "_topdir $top");
print "===> Building $pkg $newversion\n";
@@ -105,7 +156,8 @@ sub build
}
else
{
- my $pkgrpm_basename = qx[basename $pkgrpm];
+ my $pkgrpm_basename;
+ ( $pkgrpm_basename = $pkgrpm ) =~ s!.*/!!;
if($pkgrpm_basename =~ m|^($pkge)-([^-]+)-([^-]+)\.[^\.]+\.rpm|)
{ $version = $2; $release = $3; $found = 1;}
}
@@ -118,12 +170,12 @@ sub build
if($config{src})
{
- system("$rpm -ivh $pkgrpm");
+ system_die("$rpm -ivh $pkgrpm");
wait;
}
elsif(!$config{nosource})
{
- system("/usr/sbin/urpmi --install-src --force ".$config{srpms}."/".$pkgrpm);
+ system_die("/usr/sbin/urpmi --install-src --force ".$config{srpms}."/".$pkgrpm);
wait;
}
@@ -144,7 +196,7 @@ sub build
$spec .= $_;
$spec =~ s/\%define(\s+)release(\s+)(.*)/\%define release $config{release}/;
$spec =~ s/Release:(\s+)(.*mdk)/Release:$1$config{release}/;
- push(@url, $2) if(/(Source[0-9]*):\s+((?:ftp|http):\S+)/i);
+ push(@url, $2) if(/(Source[0-9]*):\s+((?:ftp|svns?|https?):\S+)/i);
# For %vars !
$specvars{$1} = $2 if(/\%define\s+(.+?)\s+(.+)/g);
@@ -156,7 +208,7 @@ sub build
my $email;
my @l = getpwuid($<);
$email = $ENV{EMAIL} || $l[6]." <$l[0]\@mandrakesoft.com>";
- $spec .= "* ".qx[LC_TIME=C date '+%a %b %e %Y'|tr -d '\n']." ".$email.
+ $spec .= "* ".`LC_TIME=C date '+%a %b %e %Y'|tr -d '\n'`." ".$email.
" ".$newversion."-".$config{release}."\n";
$spec .= "- New release $newversion\n\n";
}
@@ -180,17 +232,18 @@ sub build
s/\%\{version\}/$newversion/gi;
}
- my $basename = `basename $_`; chomp($basename);
+ my $basename;
+ ($basename=$_)=~ s!.*/!!;
system("rm -f ${top}/SOURCES/$basename");
# GNOME: add the major version to the URL automatically
# for example: ftp://ftp://ftp.gnome.org/pub/GNOME/sources/ORbit2/ORbit2-2.10.0.tar.bz2
# is rewritten in ftp://ftp.gnome.org/pub/GNOME/sources/ORbit2/2.10/ORbit2-2.10.0.tar.bz2
- if (m@ftp.gnome.org/pub/GNOME/sources/@)
+ if (m!ftp.gnome.org/pub/GNOME/sources/!)
{
(my $major = $newversion) =~ s/([^.]+\.[^.]+).*/$1/;
- s@(.*/)(.*)@$1$major/$2@;
+ s!(.*/)(.*)!$1$major/$2!;
}
# download from Fedora rpms
@@ -209,19 +262,19 @@ sub build
closedir(MP);
print "Trying from fedora($basename): $config{fedora}/$pkgrpmrh\n";
- system("cd ${top}/SOURCES; rpm2cpio $config{fedora}/$pkgrpmrh | cpio -id $basename");
+ system_die("cd ${top}/SOURCES; rpm2cpio $config{fedora}/$pkgrpmrh | cpio -id $basename","Rpm extraction failed");
wait;
if (! -f "${top}/SOURCES/$basename")
{
(my $bname = $basename) =~ s/bz2/gz/;
print "Trying from fedora($bname): $config{fedora}/$pkgrpmrh\n";
- system("cd ${top}/SOURCES; rpm2cpio $config{fedora}/$pkgrpmrh | cpio -id $bname; bzme $bname");
+ system("cd ${top}/SOURCES; rpm2cpio $config{fedora}/$pkgrpmrh | cpio -id $bname; bzme $bname","rpm recompression failed");
wait;
}
}
# download from sourceforge mirrors
- if (m@http://prdownloads.sourceforge.net@)
+ if (m!http://prdownloads.sourceforge.net!)
{
foreach my $site ("http://heanet.dl.sourceforge.net/sourceforge/",
"http://aleron.dl.sourceforge.net/sourceforge/",
@@ -231,8 +284,8 @@ sub build
"http://twtelecom.dl.sourceforge.net/sourceforge/",
)
{
- (my $dest = $_) =~ s@http://prdownloads.sourceforge.net/@$site@;
- last if (fetch($dest));
+ (my $dest = $_) =~ s!http://prdownloads.sourceforge.net/!$site!;
+ last if (fetch_http($dest));
}
}
# download specified url