We're recomputing the prefix length on every iteration of the ref iterator. Precompute it for another speedup when iterating over 1 million refs: Benchmark 1: show-ref: single matching ref (revision = HEAD~) Time (mean ± σ): 100.3 ms ± 3.7 ms [User: 97.3 ms, System: 2.8 ms] Range (min … max): 97.5 ms … 139.7 ms 1000 runs Benchmark 2: show-ref: single matching ref (revision = HEAD) Time (mean ± σ): 95.8 ms ± 3.4 ms [User: 92.9 ms, System: 2.8 ms] Range (min … max): 93.0 ms … 121.9 ms 1000 runs Summary show-ref: single matching ref (revision = HEAD) ran 1.05 ± 0.05 times faster than show-ref: single matching ref (revision = HEAD~) Signed-off-by: Patrick Steinhardt --- refs/reftable-backend.c | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/refs/reftable-backend.c b/refs/reftable-backend.c index 6c11c4a5e3..249a618b5a 100644 --- a/refs/reftable-backend.c +++ b/refs/reftable-backend.c @@ -346,6 +346,7 @@ struct reftable_ref_iterator { struct object_id oid; const char *prefix; + size_t prefix_len; unsigned int flags; int err; }; @@ -371,8 +372,8 @@ static int reftable_ref_iterator_advance(struct ref_iterator *ref_iterator) if (!starts_with(iter->ref.refname, "refs/")) continue; - if (iter->prefix && - strncmp(iter->prefix, iter->ref.refname, strlen(iter->prefix))) { + if (iter->prefix_len && + strncmp(iter->prefix, iter->ref.refname, iter->prefix_len)) { iter->err = 1; break; } @@ -481,6 +482,7 @@ static struct reftable_ref_iterator *ref_iterator_for_stack(struct reftable_ref_ iter = xcalloc(1, sizeof(*iter)); base_ref_iterator_init(&iter->base, &reftable_ref_iterator_vtable); iter->prefix = prefix; + iter->prefix_len = prefix ? strlen(prefix) : 0; iter->base.oid = &iter->oid; iter->flags = flags; iter->refs = refs; -- 2.44.0