Skip to content
This repository has been archived by the owner on May 12, 2018. It is now read-only.

Commit

Permalink
Merge branch 'refactor-tar' into 'master'
Browse files Browse the repository at this point in the history
Refactor Repository#archive_repo for tar files

Use `IO.pipe` instead of `StringIO` to archive repositories to tar files.  This addresses issue #1.

See merge request !5
  • Loading branch information
dzaporozhets committed Sep 29, 2014
2 parents d305de1 + 0f4241e commit a4dd010
Showing 1 changed file with 36 additions and 57 deletions.
93 changes: 36 additions & 57 deletions lib/gitlab_git/repository.rb
Original file line number Diff line number Diff line change
Expand Up @@ -813,33 +813,33 @@ def create_archive(ref_name, pipe_cmd, file_path)
prefix = File.basename(name)
extension = Pathname.new(file_path).extname

if extension == ".zip"
if extension == '.zip'
create_zip_archive(ref_name, file_path, prefix)
else
# Create a tarfile in memory
tarfile = tar_string_io(ref_name, prefix)

if extension == ".tar"
File.new(file_path, "wb").write(tarfile.read)
else
compress_tar(tarfile, file_path, pipe_cmd)
rd_pipe, rw_pipe = IO.pipe
tar_pid = fork do
# Send the tar file to the write pipe
rd_pipe.close
Gem::Package::TarWriter.new(rw_pipe) do |tar|
tar.mkdir(prefix, 33261)

populated_index(ref_name).each do |entry|
add_archive_entry(tar, prefix, entry)
end
end
rw_pipe.close
end
end
end

# Return a StringIO with the contents of the repo's tar file
def tar_string_io(ref_name, prefix)
tarfile = StringIO.new
Gem::Package::TarWriter.new(tarfile) do |tar|
tar.mkdir(prefix, 33261)
# Use the other end of the pipe to compress with bzip2 or gzip
FileUtils.mkdir_p(Pathname.new(file_path).dirname)
archive_file = File.new(file_path, 'wb')
rw_pipe.close
system(*pipe_cmd, in: rd_pipe, out: archive_file)

populated_index(ref_name).each do |entry|
add_archive_entry(tar, prefix, entry)
end
Process.waitpid(tar_pid)
rd_pipe.close
archive_file.close
end

tarfile.rewind
tarfile
end

# Create a zip file with the contents of the repo
Expand All @@ -854,22 +854,26 @@ def create_zip_archive(ref_name, archive_path, prefix)
# Add a file or directory from the index to the given tar or zip file
def add_archive_entry(archive, prefix, entry)
prefixed_path = File.join(prefix, entry[:path])
content = rugged.lookup(entry[:oid]).content unless submodule?(entry)

# Create a file in the archive for each index entry
if archive.is_a?(Zip::File)
unless submodule?(entry)
if submodule?(entry)
# Create an empty directory for submodules
mask = case archive
when Zip::File then 0755
else '100755'.to_i(8)
end
archive.mkdir(prefixed_path, mask)
else
blob = rugged.lookup(entry[:oid])
content = blob.content

# Write the blob contents to the archive
if archive.is_a?(Zip::File)
archive.get_output_stream(prefixed_path) do |os|
os.write(content)
end
end
else
if submodule?(entry)
# Create directories for submodules
archive.mkdir(prefixed_path, 33261)
else
# Write the blob contents to the file
archive.add_file(prefixed_path, entry[:mode]) do |tf|
archive.add_file_simple(prefixed_path,
entry[:mode], blob.size) do |tf|
tf.write(content)
end
end
Expand All @@ -882,31 +886,6 @@ def submodule?(index_entry)
index_entry[:mode] == 57344
end

# Send the +tar_string+ StringIO to +pipe_cmd+ for bzip2 or gzip
# compression.
def compress_tar(tar_string, file_path, pipe_cmd)
# Write the in-memory tarfile to a pipe
rd_pipe, rw_pipe = IO.pipe
tar_pid = fork do
rd_pipe.close
rw_pipe.write(tar_string.read)
rw_pipe.close
end

# Use the other end of the pipe to compress with bzip2 or gzip
FileUtils.mkdir_p(Pathname.new(file_path).dirname)
archive_file = File.new(file_path, "wb")
rw_pipe.close
compress_pid = spawn(*pipe_cmd, in: rd_pipe, out: archive_file)
rd_pipe.close

Process.waitpid(tar_pid)
Process.waitpid(compress_pid)

archive_file.close
tar_string.close
end

# Return a Rugged::Index that has read from the tree at +ref_name+
def populated_index(ref_name)
tree = rugged.lookup(rugged.rev_parse_oid(ref_name)).tree
Expand Down

0 comments on commit a4dd010

Please sign in to comment.