summaryrefslogtreecommitdiff
path: root/dh_compress
blob: caffc11e9f158b73f03deb2fde2a20a67c1deec9 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#!/usr/bin/perl -w
#
# Compresses files and makes sure that symlinks pointing to the 
# compressed files get fixed.

use Cwd;
BEGIN { push @INC, "debian", "/usr/lib/debhelper" }
use Dh_Lib;
init();

foreach $PACKAGE (@{$dh{DOPACKAGES}}) {
	$TMP=tmpdir($PACKAGE);
	$compress=pkgfile($PACKAGE,"compress");

	# Run the file name gathering commands from within the directory
	# structure that will be effected.
	$olddir=getcwd();
	verbose_print("cd $TMP");
	chdir($TMP) || error("Can't cd to $TMP: $!");

	# Figure out what files to compress.
	@files=();
	# First of all, deal with any files specified right on the command line.
	if (($PACKAGE eq $dh{FIRSTPACKAGE} || $dh{PARAMS_ALL}) && @ARGV) {
		push @files,#ARGV;
	}
	if ($compress) {
		# The config file is a sh script that outputs the files to be compressed
		# (typically using find).
		push @files, split(/\n/,`sh $olddir/$compress 2>/dev/null`);
	}
	else {
		# By default, fall back to what the policy manual says to compress.
		push @files, split(/\n/,`
			find usr/info usr/man usr/X11*/man -type f ! -name "*.gz" 2>/dev/null || true;
			find usr/doc -type f \\( -size +4k -or -name "changelog*" \\) \\
				! -name "*.htm*" ! -name "*.gif" ! -name "*.gz" \\
				! -name "copyright" 2>/dev/null || true
		`);
	}

	# Exclude files from compression.
	if (@files && defined($dh{EXCLUDE}) && $dh{EXCLUDE}) {
		@new=();
		foreach (@files) {
			$ok=1;
			foreach $x (@{$dh{EXCLUDE}}) {
				if (/\Q$x\E/) {
					$ok='';
					last;
				}
			}
			push @new,$_ if $ok;
		}
		@files=@new;
	}
	
	# Look for files with hard links. If we are going to compress both,
	# we can preserve the hard link across the compression and save
	# space in the end.
	my @f=();
	my %hardlinks;
	foreach (@files) {
		($dev, $inode, undef, $nlink)=stat($_);
		if ($nlink > 1) {
			if (! $seen{"$inode.$dev"}) {
				$seen{"$inode.$dev"}=$_;
				push @f, $_;
			}
			else {
				# This is a hardlink.
				$hardlinks{$_}=$seen{"$inode.$dev"};
			}
		}
		else {
			push @f, $_;
		}
	}

	if (@f) {
		doit("gzip","-9f",@f);
	}
	
	# Now change over any files we can that used to be hard links so
	# they are again.
	foreach (keys %hardlinks) {
		# Remove old file.
	    	doit("rm","-f","$_");
		# Make new hardlink.
		doit("ln","$hardlinks{$_}.gz","$_.gz");
	}

	verbose_print("cd $olddir");
	chdir($olddir);

	# Fix up symlinks that were pointing to the uncompressed files.
	open (FIND,"find $TMP -type l |");
	while (<FIND>) {
		chomp;
		($directory)=m:(.*)/:;
		$linkval=readlink($_);
		if (! -e "$directory/$linkval" && -e "$directory/$linkval.gz") {
			doit("rm","-f",$_);
			doit("ln","-sf","$linkval.gz","$_.gz");
		}
	}
}