Merge branch 'jk/big-and-future-archive-tar'

"git archive" learned to handle files that are larger than 8GB and
commits far in the future than expressible by the traditional US-TAR
format.

* jk/big-and-future-archive-tar:
  archive-tar: drop return value
  archive-tar: write extended headers for far-future mtime
  archive-tar: write extended headers for file sizes >= 8GB
  t5000: test tar files that overflow ustar headers
  t9300: factor out portable "head -c" replacement
This commit is contained in:
Junio C Hamano
2016-07-13 11:24:18 -07:00
6 changed files with 138 additions and 30 deletions

View File

@ -319,4 +319,78 @@ test_expect_success 'catch non-matching pathspec' '
test_must_fail git archive -v HEAD -- "*.abc" >/dev/null
'
# Pull the size and date of each entry in a tarfile using the system tar.
#
# We'll pull out only the year from the date; that avoids any question of
# timezones impacting the result (as long as we keep our test times away from a
# year boundary; our reference times are all in August).
#
# The output of tar_info is expected to be "<size> <year>", both in decimal. It
# ignores the return value of tar. We have to do this, because some of our test
# input is only partial (the real data is 64GB in some cases).
tar_info () {
"$TAR" tvf "$1" |
awk '{
split($4, date, "-")
print $3 " " date[1]
}'
}
# See if our system tar can handle a tar file with huge sizes and dates far in
# the future, and that we can actually parse its output.
#
# The reference file was generated by GNU tar, and the magic time and size are
# both octal 01000000000001, which overflows normal ustar fields.
test_lazy_prereq TAR_HUGE '
echo "68719476737 4147" >expect &&
tar_info "$TEST_DIRECTORY"/t5000/huge-and-future.tar >actual &&
test_cmp expect actual
'
test_expect_success 'set up repository with huge blob' '
obj_d=19 &&
obj_f=f9c8273ec45a8938e6999cb59b3ff66739902a &&
obj=${obj_d}${obj_f} &&
mkdir -p .git/objects/$obj_d &&
cp "$TEST_DIRECTORY"/t5000/$obj .git/objects/$obj_d/$obj_f &&
rm -f .git/index &&
git update-index --add --cacheinfo 100644,$obj,huge &&
git commit -m huge
'
# We expect git to die with SIGPIPE here (otherwise we
# would generate the whole 64GB).
test_expect_success 'generate tar with huge size' '
{
git archive HEAD
echo $? >exit-code
} | test_copy_bytes 4096 >huge.tar &&
echo 141 >expect &&
test_cmp expect exit-code
'
test_expect_success TAR_HUGE 'system tar can read our huge size' '
echo 68719476737 >expect &&
tar_info huge.tar | cut -d" " -f1 >actual &&
test_cmp expect actual
'
test_expect_success 'set up repository with far-future commit' '
rm -f .git/index &&
echo content >file &&
git add file &&
GIT_COMMITTER_DATE="@68719476737 +0000" \
git commit -m "tempori parendum"
'
test_expect_success 'generate tar with future mtime' '
git archive HEAD >future.tar
'
test_expect_success TAR_HUGE 'system tar can read our future mtime' '
echo 4147 >expect &&
tar_info future.tar | cut -d" " -f2 >actual &&
test_cmp expect actual
'
test_done

Binary file not shown.

BIN
t/t5000/huge-and-future.tar Normal file

Binary file not shown.

View File

@ -7,23 +7,6 @@ test_description='test git fast-import utility'
. ./test-lib.sh
. "$TEST_DIRECTORY"/diff-lib.sh ;# test-lib chdir's into trash
# Print $1 bytes from stdin to stdout.
#
# This could be written as "head -c $1", but IRIX "head" does not
# support the -c option.
head_c () {
perl -e '
my $len = $ARGV[1];
while ($len > 0) {
my $s;
my $nread = sysread(STDIN, $s, $len);
die "cannot read: $!" unless defined($nread);
print $s;
$len -= $nread;
}
' - "$1"
}
verify_packs () {
for p in .git/objects/pack/*.pack
do
@ -2481,7 +2464,7 @@ test_expect_success PIPE 'R: copy using cat-file' '
read blob_id type size <&3 &&
echo "$blob_id $type $size" >response &&
head_c $size >blob <&3 &&
test_copy_bytes $size >blob <&3 &&
read newline <&3 &&
cat <<-EOF &&
@ -2524,7 +2507,7 @@ test_expect_success PIPE 'R: print blob mid-commit' '
EOF
read blob_id type size <&3 &&
head_c $size >actual <&3 &&
test_copy_bytes $size >actual <&3 &&
read newline <&3 &&
echo
@ -2559,7 +2542,7 @@ test_expect_success PIPE 'R: print staged blob within commit' '
echo "cat-blob $to_get" &&
read blob_id type size <&3 &&
head_c $size >actual <&3 &&
test_copy_bytes $size >actual <&3 &&
read newline <&3 &&
echo deleteall

View File

@ -961,3 +961,17 @@ test_env () {
done
)
}
# Read up to "$1" bytes (or to EOF) from stdin and write them to stdout.
test_copy_bytes () {
perl -e '
my $len = $ARGV[1];
while ($len > 0) {
my $s;
my $nread = sysread(STDIN, $s, $len);
die "cannot read: $!" unless defined($nread);
print $s;
$len -= $nread;
}
' - "$1"
}