From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: X-Spam-Checker-Version: SpamAssassin 3.4.2 (2018-09-13) on dcvr.yhbt.net X-Spam-Level: X-Spam-ASN: X-Spam-Status: No, score=-4.0 required=3.0 tests=ALL_TRUSTED,BAYES_00 shortcircuit=no autolearn=ham autolearn_force=no version=3.4.2 Received: from localhost (dcvr.yhbt.net [127.0.0.1]) by dcvr.yhbt.net (Postfix) with ESMTP id 401C62141E for ; Thu, 31 Jan 2019 04:27:26 +0000 (UTC) From: Eric Wong To: meta@public-inbox.org Subject: [PATCH 5/5] viewvcs: support streaming large blobs Date: Thu, 31 Jan 2019 04:27:24 +0000 Message-Id: <20190131042724.2675-6-e@80x24.org> In-Reply-To: <20190131042724.2675-1-e@80x24.org> References: <20190131042724.2675-1-e@80x24.org> MIME-Version: 1.0 Content-Transfer-Encoding: 8bit List-Id: Forking off git-cat-file here for streaming large blobs is reasonably efficient, at least no worse than using git-http-backend for serving clones. So let our limiter framework deal with it. git itself isn't great for large files, and AFAIK there's no stable/widely-available mechanisms for reading smaller chunks of giant blobs in git itself. Tested with some giant GPU headers in the Linux kernel. --- lib/PublicInbox/ViewVCS.pm | 37 +++++++++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/lib/PublicInbox/ViewVCS.pm b/lib/PublicInbox/ViewVCS.pm index 85edf22..63731e9 100644 --- a/lib/PublicInbox/ViewVCS.pm +++ b/lib/PublicInbox/ViewVCS.pm @@ -34,6 +34,7 @@ END { $hl = undef }; my %QP_MAP = ( A => 'oid_a', B => 'oid_b', a => 'path_a', b => 'path_b' ); my $max_size = 1024 * 1024; # TODO: configurable my $enc_utf8 = find_encoding('UTF-8'); +my $BIN_DETECT = 8000; # same as git sub html_page ($$$) { my ($ctx, $code, $strref) = @_; @@ -43,7 +44,33 @@ sub html_page ($$$) { my ($nr, undef) = @_; $nr == 1 ? $$strref : undef; }); - $wcb->($res); + $wcb ? $wcb->($res) : $res; +} + +sub stream_large_blob ($$$$) { + my ($ctx, $res, $logref, $fn) = @_; + my ($git, $oid, $type, $size, $di) = @$res; + my $cmd = ['git', "--git-dir=$git->{git_dir}", 'cat-file', $type, $oid]; + my $qsp = PublicInbox::Qspawn->new($cmd); + my @cl = ('Content-Length', $size); + my $env = $ctx->{env}; + $env->{'qspawn.response'} = delete $ctx->{-wcb}; + $qsp->psgi_return($env, undef, sub { + my ($r, $bref) = @_; + if (!defined $r) { # error + html_page($ctx, 500, $logref); + } elsif (index($$bref, "\0") >= 0) { + my $ct = 'application/octet-stream'; + [200, ['Content-Type', $ct, @cl ] ]; + } else { + my $n = bytes::length($$bref); + if ($n >= $BIN_DETECT || $n == $size) { + my $ct = 'text/plain; charset=UTF-8'; + return [200, ['Content-Type', $ct, @cl] ]; + } + undef; # bref keeps growing + } + }); } sub solve_result { @@ -65,9 +92,13 @@ sub solve_result { $ref eq 'ARRAY' or return html_page($ctx, 500, \$log); my ($git, $oid, $type, $size, $di) = @$res; + my $path = to_filename($di->{path_b} || $hints->{path_b} || 'blob'); + my $raw_link = "(raw)"; if ($size > $max_size) { + return stream_large_blob($ctx, $res, \$log, $fn) if defined $fn; # TODO: stream the raw file if it's gigantic, at least - $log = '
Too big to show
' . $log; + $log = "
Too big to show, download available\n" .
+			"$oid $type $size bytes $raw_link
" . $log; return html_page($ctx, 500, \$log); } @@ -86,8 +117,6 @@ sub solve_result { return delete($ctx->{-wcb})->([200, $h, [ $$blob ]]); } - my $path = to_filename($di->{path_b} || $hints->{path_b} || 'blob'); - my $raw_link = "(raw)"; if ($binary) { $log = "
$oid $type $size bytes (binary)" .
 			" $raw_link
" . $log; -- EW