summaryrefslogtreecommitdiff
path: root/pack-check.c
diff options
context:
space:
mode:
authorLibravatar Junio C Hamano <gitster@pobox.com>2016-07-28 10:34:42 -0700
committerLibravatar Junio C Hamano <gitster@pobox.com>2016-07-28 10:34:42 -0700
commitad2d77760434e1650c186c71fa04a8fdbd77266c (patch)
treee6f51ef4203047bda1d108bed7f3ca35196d8b4c /pack-check.c
parentMerge branch 'nd/worktree-lock' (diff)
parentfsck: use streaming interface for large blobs in pack (diff)
downloadtgif-ad2d77760434e1650c186c71fa04a8fdbd77266c.tar.xz
Merge branch 'nd/pack-ofs-4gb-limit'
"git pack-objects" and "git index-pack" mostly operate with off_t when talking about the offset of objects in a packfile, but there were a handful of places that used "unsigned long" to hold that value, leading to an unintended truncation. * nd/pack-ofs-4gb-limit: fsck: use streaming interface for large blobs in pack pack-objects: do not truncate result in-pack object size on 32-bit systems index-pack: correct "offset" type in unpack_entry_data() index-pack: report correct bad object offsets even if they are large index-pack: correct "len" type in unpack_data() sha1_file.c: use type off_t* for object_info->disk_sizep pack-objects: pass length to check_pack_crc() without truncation
Diffstat (limited to 'pack-check.c')
-rw-r--r--pack-check.c23
1 files changed, 21 insertions, 2 deletions
diff --git a/pack-check.c b/pack-check.c
index 1da89a41ce..d123846ea2 100644
--- a/pack-check.c
+++ b/pack-check.c
@@ -105,6 +105,8 @@ static int verify_packfile(struct packed_git *p,
void *data;
enum object_type type;
unsigned long size;
+ off_t curpos;
+ int data_valid;
if (p->index_version > 1) {
off_t offset = entries[i].offset;
@@ -116,8 +118,25 @@ static int verify_packfile(struct packed_git *p,
sha1_to_hex(entries[i].sha1),
p->pack_name, (uintmax_t)offset);
}
- data = unpack_entry(p, entries[i].offset, &type, &size);
- if (!data)
+
+ curpos = entries[i].offset;
+ type = unpack_object_header(p, w_curs, &curpos, &size);
+ unuse_pack(w_curs);
+
+ if (type == OBJ_BLOB && big_file_threshold <= size) {
+ /*
+ * Let check_sha1_signature() check it with
+ * the streaming interface; no point slurping
+ * the data in-core only to discard.
+ */
+ data = NULL;
+ data_valid = 0;
+ } else {
+ data = unpack_entry(p, entries[i].offset, &type, &size);
+ data_valid = 1;
+ }
+
+ if (data_valid && !data)
err = error("cannot unpack %s from %s at offset %"PRIuMAX"",
sha1_to_hex(entries[i].sha1), p->pack_name,
(uintmax_t)entries[i].offset);