about summary refs log tree commit homepage
path: root/xt/git-http-backend.t
blob: f2ae44fe870c169bb33b1f01e2d793f2d56512cd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
# Copyright (C) 2016-2020 all contributors <meta@public-inbox.org>
# License: AGPL-3.0+ <https://www.gnu.org/licenses/agpl-3.0.txt>
#
# Ensure buffering behavior in -httpd doesn't cause runaway memory use
# or data corruption
use strict;
use warnings;
use Test::More;
use POSIX qw(setsid);
use PublicInbox::TestCommon;
use PublicInbox::Spawn qw(which);

my $git_dir = $ENV{GIANT_GIT_DIR};
plan 'skip_all' => 'GIANT_GIT_DIR not defined' unless $git_dir;
require_mods(qw(BSD::Resource Plack::Util Plack::Builder
		HTTP::Date HTTP::Status Net::HTTP));
my $psgi = "./t/git-http-backend.psgi";
my ($tmpdir, $for_destroy) = tmpdir();
my $err = "$tmpdir/stderr.log";
my $out = "$tmpdir/stdout.log";
my $sock = tcp_server();
my $host = $sock->sockhost;
my $port = $sock->sockport;
my $td;

my $get_maxrss = sub {
        my $http = Net::HTTP->new(Host => "$host:$port");
	ok($http, 'Net::HTTP object created for maxrss');
        $http->write_request(GET => '/');
        my ($code, $mess, %h) = $http->read_response_headers;
	is($code, 200, 'success reading maxrss');
	my $n = $http->read_entity_body(my $buf, 256);
	ok(defined $n, 'read response body');
	like($buf, qr/\A\d+\n\z/, 'got memory response');
	ok(int($buf) > 0, 'got non-zero memory response');
	int($buf);
};

{
	ok($sock, 'sock created');
	my $cmd = [ '-httpd', '-W0', "--stdout=$out", "--stderr=$err", $psgi ];
	$td = start_script($cmd, undef, { 3 => $sock });
}
my $mem_a = $get_maxrss->();

SKIP: {
	my $max = 0;
	my $pack;
	my $glob = "$git_dir/objects/pack/pack-*.pack";
	foreach my $f (glob($glob)) {
		my $n = -s $f;
		if ($n > $max) {
			$max = $n;
			$pack = $f;
		}
	}
	skip "no packs found in $git_dir" unless defined $pack;
	if ($pack !~ m!(/objects/pack/pack-[a-f0-9]{40}.pack)\z!) {
		skip "bad pack name: $pack";
	}
	my $url = $1;
	my $http = Net::HTTP->new(Host => "$host:$port");
	ok($http, 'Net::HTTP object created');
	$http->write_request(GET => $url);
	my ($code, $mess, %h) = $http->read_response_headers;
	is(200, $code, 'got 200 success for pack');
	is($max, $h{'Content-Length'}, 'got expected Content-Length for pack');

	# no $http->read_entity_body, here, since we want to force buffering
	foreach my $i (1..3) {
		sleep 1;
		my $diff = $get_maxrss->() - $mem_a;
		note "${diff}K memory increase after $i seconds";
		ok($diff < 1024, 'no bloating caused by slow dumb client');
	}
}

SKIP: { # make sure Last-Modified + If-Modified-Since works with curl
	my $nr = 6;
	skip 'no description', $nr unless -f "$git_dir/description";
	my $mtime = (stat(_))[9];
	my $curl = which('curl');
	skip 'curl(1) not found', $nr unless $curl;
	my $url = "http://$host:$port/description";
	my $dst = "$tmpdir/desc";
	is(system($curl, qw(-RsSf), '-o', $dst, $url), 0, 'curl -R');
	is((stat($dst))[9], $mtime, 'curl used remote mtime');
	is(system($curl, qw(-sSf), '-z', $dst, '-o', "$dst.2", $url), 0,
		'curl -z noop');
	ok(!-e "$dst.2", 'no modification, nothing retrieved');
	utime(0, 0, $dst) or die "utime failed: $!";
	is(system($curl, qw(-sSfR), '-z', $dst, '-o', "$dst.2", $url), 0,
		'curl -z updates');
	ok(-e "$dst.2", 'faked modification, got new file retrieved');
}

{
	my $c = fork;
	if ($c == 0) {
		setsid();
		exec qw(git clone -q --mirror), "http://$host:$port/",
			"$tmpdir/mirror.git";
		die "Failed start git clone: $!\n";
	}
	select(undef, undef, undef, 0.1);
	foreach my $i (1..10) {
		is(1, kill('STOP', -$c), 'signaled clone STOP');
		sleep 1;
		ok(kill('CONT', -$c), 'continued clone');
		my $diff = $get_maxrss->() - $mem_a;
		note "${diff}K memory increase after $i seconds";
		ok($diff < 2048, 'no bloating caused by slow smart client');
	}
	ok(kill('CONT', -$c), 'continued clone');
	is($c, waitpid($c, 0), 'reaped wayward slow clone');
	is($?, 0, 'clone did not error out');
	note 'clone done, fsck-ing clone result...';
	is(0, system("git", "--git-dir=$tmpdir/mirror.git",
			qw(fsck --no-progress)),
		'fsck did not report corruption');

	my $diff = $get_maxrss->() - $mem_a;
	note "${diff}K memory increase after smart clone";
	ok($diff < 2048, 'no bloating caused by slow smart client');
}

{
	ok($td->kill, 'killed httpd');
	$td->join;
}

done_testing();