1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
|
#!/usr/bin/perl
#---------------------------------------------------------------
# Project : Linux-Mandrake
# Module : spec-helper
# File : compress_files
# Version : $Id$
# Author : Frederic Lepied
# Created On : Thu Feb 10 08:04:11 2000
# Purpose : compress man and info pages.
#---------------------------------------------------------------
use Cwd;
use File::Find;
################################################################################
# Returns the basename of the argument passed to it.
sub basename {
my $fn=shift;
$fn=~s:^.*/(.*?)$:$1:;
return $fn;
}
################################################################################
# Returns the directory name of the argument passed to it.
sub dirname {
my $fn=shift;
$fn=~s:^(.*)/.*?$:$1:;
return $fn;
}
################################################################################
# Run a command that may have a huge number of arguments, like xargs does.
# Pass in a reference to an array containing the arguments, and then other
# parameters that are the command and any parameters that should be passed to
# it each time.
sub xargs {
my $args=shift;
# The kernel can accept command lines up to 20k worth of characters.
my $command_max=20000;
# Figure out length of static portion of command.
my $static_length=0;
foreach (@_) {
$static_length+=length($_)+1;
}
my @collect=();
my $length=$static_length;
foreach (@$args) {
if (length($_) + 1 + $static_length > $command_max) {
error("This command is greater than the maximum command size allowed by the kernel, and cannot be split up further. What on earth are you doing? \"@_ $_\"");
}
$length+=length($_) + 1;
if ($length < $command_max) {
push @collect, $_;
}
else {
system(@_,@collect) if $#collect > -1;
@collect=($_);
$length=$static_length + length($_) + 1;
}
}
system(@_,@collect) if $#collect > -1;
}
################################################################################
# Check if a file is a .so man page, for use by File::Find.
my @sofiles;
my @sodests;
sub find_so_man {
# The -s test is becuase a .so file tends to be small. We don't want
# to open every man page. 1024 is arbitrary.
if (! -f $_ || -s $_ > 1024) {
return;
}
# Test first line of file for the .so thing.
open (SOTEST,$_);
my $l=<SOTEST>;
close SOTEST;
if ($l=~m/\.so\s+(.*)/) {
my $solink=$1;
# This test is here to prevent links like ... man8/../man8/foo.8
if (basename($File::Find::dir) eq dirname($solink)) {
$solink=basename($solink);
}
else {
$solink="../$solink";
}
push @sofiles,"$File::Find::dir/$_";
push @sodests,$solink;
}
}
################################################################################
$RPM_BUILD_ROOT=$ENV{RPM_BUILD_ROOT};
chdir($RPM_BUILD_ROOT) || die "Can't cd to $ENV{RPM_BUILD_ROOT}: $!";
# Now the .so conversion.
@sofiles=@sodests=();
foreach $dir (qw{usr/man usr/X11R6/man usr/lib/perl5/man}) {
if (-e "$dir") {
find(\&find_so_man, "$dir");
}
}
foreach $sofile (@sofiles) {
my $sodest=shift(@sodests);
system "rm","-f",$sofile;
system "ln","-sf",$sodest,$sofile;
}
push @files, split(/\n/,`find usr/info usr/share/info usr/man usr/share/man usr/X11*/man usr/lib/perl5/man -type f ! -name "*.gz" -a ! -name "*.bz2" ! -name 'dir' ! -name 'whatis' 2>/dev/null || true`);
push @gz_files, split(/\n/,`find usr/info usr/share/info usr/man usr/share/man usr/X11*/man usr/lib/perl5/man -type f -name "*.gz" 2>/dev/null || true`);
if (@gz_files) {xargs(\@gz_files,"gzip", "-d"); $? ? die "Something wrong with the decompression of the gzip man/info file, fix this ASAP" : exec($0);}
# Exclude files from compression.
if (@files && defined($ENV{EXCLUDE_FROM_COMPRESS})) {
@new=();
foreach (@files) {
$ok=1;
foreach $x (split(' ',$ENV{EXCLUDE_FROM_COMPRESS})) {
if (/\Q$x\E/) {
$ok='';
last;
}
}
push @new,$_ if $ok;
}
@files=@new;
}
# Look for files with hard links. If we are going to compress both,
# we can preserve the hard link across the compression and save
# space in the end.
my @f=();
my %hardlinks;
foreach (@files) {
($dev, $inode, undef, $nlink)=stat($_);
if ($nlink > 1) {
if (! $seen{"$inode.$dev"}) {
$seen{"$inode.$dev"}=$_;
push @f, $_;
}
else {
# This is a hardlink.
$hardlinks{$_}=$seen{"$inode.$dev"};
}
}
else {
push @f, $_;
}
}
if (@f) {
# Make executables not be anymore.
xargs(\@f,"chmod","a-x");
xargs(\@f,"bzip2","-9f");
}
# Now change over any files we can that used to be hard links so
# they are again.
foreach (keys %hardlinks) {
# Remove old file.
system("rm","-f","$_");
# Make new hardlink.
system("ln","$hardlinks{$_}.bz2","$_.bz2");
}
# Fix up symlinks that were pointing to the uncompressed files.
open (FIND,"find usr/man -type l |");
while (<FIND>) {
chomp;
($directory)=m:(.*)/:;
$linkval=readlink($_);
if (! -e "$directory/$linkval" && -e "$directory/$linkval.bz2") {
system("rm","-f",$_);
system("ln","-sf","$linkval.bz2","$_.bz2");
} elsif (! -e "$directory/$linkval" && ! -e "$directory/linkval.bz2" ) {
#Bad link go on nowhere (any better idea) ?
unlink("$_");
}
}
# compress_files ends here
|