about summary refs log tree commit homepage
path: root/lib/PublicInbox/ViewVCS.pm
diff options
context:
space:
mode:
authorEric Wong <e@80x24.org>2019-01-31 04:10:29 +0000
committerEric Wong <e@80x24.org>2019-01-31 04:10:29 +0000
commit658ca41d321d10acf098c43291577e1fd1fae479 (patch)
tree3f51181b5469fb812a7f50d0a8a4a418e8a0a83e /lib/PublicInbox/ViewVCS.pm
parent1664112a5d2db906bdb89e4a64b02155183eda05 (diff)
downloadpublic-inbox-658ca41d321d10acf098c43291577e1fd1fae479.tar.gz
Forking off git-cat-file here for streaming large blobs is
reasonably efficient, at least no worse than using
git-http-backend for serving clones.  So let our limiter
framework deal with it.

git itself isn't great for large files, and AFAIK there's no
stable/widely-available mechanisms for reading smaller chunks
of giant blobs in git itself.

Tested with some giant GPU headers in the Linux kernel.
Diffstat (limited to 'lib/PublicInbox/ViewVCS.pm')
-rw-r--r--lib/PublicInbox/ViewVCS.pm37
1 files changed, 33 insertions, 4 deletions
diff --git a/lib/PublicInbox/ViewVCS.pm b/lib/PublicInbox/ViewVCS.pm
index 85edf22f..63731e92 100644
--- a/lib/PublicInbox/ViewVCS.pm
+++ b/lib/PublicInbox/ViewVCS.pm
@@ -34,6 +34,7 @@ END { $hl = undef };
 my %QP_MAP = ( A => 'oid_a', B => 'oid_b', a => 'path_a', b => 'path_b' );
 my $max_size = 1024 * 1024; # TODO: configurable
 my $enc_utf8 = find_encoding('UTF-8');
+my $BIN_DETECT = 8000; # same as git
 
 sub html_page ($$$) {
         my ($ctx, $code, $strref) = @_;
@@ -43,7 +44,33 @@ sub html_page ($$$) {
                 my ($nr, undef) =  @_;
                 $nr == 1 ? $$strref : undef;
         });
-        $wcb->($res);
+        $wcb ? $wcb->($res) : $res;
+}
+
+sub stream_large_blob ($$$$) {
+        my ($ctx, $res, $logref, $fn) = @_;
+        my ($git, $oid, $type, $size, $di) = @$res;
+        my $cmd = ['git', "--git-dir=$git->{git_dir}", 'cat-file', $type, $oid];
+        my $qsp = PublicInbox::Qspawn->new($cmd);
+        my @cl = ('Content-Length', $size);
+        my $env = $ctx->{env};
+        $env->{'qspawn.response'} = delete $ctx->{-wcb};
+        $qsp->psgi_return($env, undef, sub {
+                my ($r, $bref) = @_;
+                if (!defined $r) { # error
+                        html_page($ctx, 500, $logref);
+                } elsif (index($$bref, "\0") >= 0) {
+                        my $ct = 'application/octet-stream';
+                        [200, ['Content-Type', $ct, @cl ] ];
+                } else {
+                        my $n = bytes::length($$bref);
+                        if ($n >= $BIN_DETECT || $n == $size) {
+                                my $ct = 'text/plain; charset=UTF-8';
+                                return [200, ['Content-Type', $ct, @cl] ];
+                        }
+                        undef; # bref keeps growing
+                }
+        });
 }
 
 sub solve_result {
@@ -65,9 +92,13 @@ sub solve_result {
         $ref eq 'ARRAY' or return html_page($ctx, 500, \$log);
 
         my ($git, $oid, $type, $size, $di) = @$res;
+        my $path = to_filename($di->{path_b} || $hints->{path_b} || 'blob');
+        my $raw_link = "(<a\nhref=$path>raw</a>)";
         if ($size > $max_size) {
+                return stream_large_blob($ctx, $res, \$log, $fn) if defined $fn;
                 # TODO: stream the raw file if it's gigantic, at least
-                $log = '<pre><b>Too big to show</b></pre>' . $log;
+                $log = "<pre><b>Too big to show, download available</b>\n" .
+                        "$oid $type $size bytes $raw_link</pre>" . $log;
                 return html_page($ctx, 500, \$log);
         }
 
@@ -86,8 +117,6 @@ sub solve_result {
                 return delete($ctx->{-wcb})->([200, $h, [ $$blob ]]);
         }
 
-        my $path = to_filename($di->{path_b} || $hints->{path_b} || 'blob');
-        my $raw_link = "(<a\nhref=$path>raw</a>)";
         if ($binary) {
                 $log = "<pre>$oid $type $size bytes (binary)" .
                         " $raw_link</pre>" . $log;