зеркало из https://github.com/github/grit.git
added a pure-ruby git library and converted the cat_file commands to use it
This commit is contained in:
Родитель
9db28e5a65
Коммит
f3a24ae371
|
@ -0,0 +1,32 @@
|
|||
This is a listing of all the places I can find that Grit actually does a
|
||||
'git' system call. My goal is to add native Ruby versions of all of them.
|
||||
|
||||
Completed
|
||||
===========================
|
||||
** lib/grit/blob.rb:36: @size ||= @repo.git.cat_file({:s => true}, id).chomp.to_i
|
||||
** lib/grit/blob.rb:43: @data ||= @repo.git.cat_file({:p => true}, id)
|
||||
|
||||
|
||||
Next to do
|
||||
===========================
|
||||
lib/grit/tree.rb:16: output = repo.git.ls_tree({}, treeish, *paths)
|
||||
lib/grit/config.rb:9: @repo.git.config({}, key, value)
|
||||
lib/grit/config.rb:40: @repo.git.config(:list => true).split(/\n/)
|
||||
lib/grit/tag.rb:28: output = repo.git.for_each_ref(actual_options, "refs/tags")
|
||||
lib/grit/head.rb:37: output = repo.git.for_each_ref(actual_options, HEAD_PREFIX)
|
||||
lib/grit/head.rb:50: self.new($1, repo.git.rev_parse(options, 'HEAD'))
|
||||
|
||||
|
||||
May not be fast enough
|
||||
=============================
|
||||
lib/grit/commit.rb:74: repo.git.rev_list({}, ref).strip.split("\n").size
|
||||
lib/grit/commit.rb:92: output = repo.git.rev_list(actual_options, ref)
|
||||
lib/grit/commit.rb:94: output = repo.git.rev_list(actual_options.merge(:all => true))
|
||||
lib/grit/blob.rb:58: data = repo.git.blame({:p => true}, commit, '--', file)
|
||||
|
||||
|
||||
More Difficult
|
||||
===========================
|
||||
lib/grit/commit.rb:39: @id_abbrev ||= @repo.git.rev_parse({:short => true}, self.id).chomp
|
||||
lib/grit/commit.rb:150: text = repo.git.diff({:full_index => true}, *paths)
|
||||
lib/grit/commit.rb:156: diff = @repo.git.show({:full_index => true, :pretty => 'raw'}, @id)
|
|
@ -11,10 +11,12 @@ require 'timeout'
|
|||
require 'rubygems'
|
||||
require 'mime/types'
|
||||
require 'open4'
|
||||
require 'digest/sha1'
|
||||
|
||||
# internal requires
|
||||
require 'grit/lazy'
|
||||
require 'grit/errors'
|
||||
require 'grit/git-ruby'
|
||||
require 'grit/git'
|
||||
require 'grit/ref'
|
||||
require 'grit/commit'
|
||||
|
@ -26,6 +28,7 @@ require 'grit/config'
|
|||
require 'grit/repo'
|
||||
require 'grit/index'
|
||||
|
||||
|
||||
module Grit
|
||||
class << self
|
||||
attr_accessor :debug
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
require 'grit/git-ruby/repository'
|
||||
|
||||
module Grit
|
||||
|
||||
module GitRuby
|
||||
|
||||
attr_accessor :ruby_gitdir
|
||||
|
||||
# (raw) allowed_options = [:max_count, :skip, :since, :all]
|
||||
def cat_file(options, ref)
|
||||
if options[:t]
|
||||
file_type(ref)
|
||||
return
|
||||
elsif options[:s]
|
||||
file_size(ref)
|
||||
elsif options[:p]
|
||||
ruby_git_dir.cat_file(ref)
|
||||
end
|
||||
end
|
||||
|
||||
def file_size(ref)
|
||||
ruby_git_dir.cat_file_size(ref).to_s
|
||||
end
|
||||
|
||||
def file_type(ref)
|
||||
ruby_git_dir.cat_file_type(ref)
|
||||
end
|
||||
|
||||
def ruby_git_dir
|
||||
@ruby_gitdir ||= Repository.new(@git_dir)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,137 @@
|
|||
#
|
||||
# converted from the gitrb project
|
||||
#
|
||||
# authors:
|
||||
# Matthias Lederhofer <matled@gmx.net>
|
||||
# Simon 'corecode' Schubert <corecode@fs.ei.tum.de>
|
||||
# Scott Chacon <schacon@gmail.com>
|
||||
#
|
||||
# provides native ruby access to git objects and pack files
|
||||
#
|
||||
|
||||
require 'zlib'
|
||||
require 'digest/sha1'
|
||||
|
||||
require 'grit/git-ruby/internal/raw_object'
|
||||
|
||||
module Grit
|
||||
module GitRuby
|
||||
module Internal
|
||||
class LooseObjectError < StandardError
|
||||
end
|
||||
|
||||
class LooseStorage
|
||||
def initialize(directory)
|
||||
@directory = directory
|
||||
end
|
||||
|
||||
def [](sha1)
|
||||
sha1 = sha1.unpack("H*")[0]
|
||||
begin
|
||||
path = @directory+'/'+sha1[0...2]+'/'+sha1[2..39]
|
||||
get_raw_object(File.read(path))
|
||||
rescue Errno::ENOENT
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def get_raw_object(buf)
|
||||
if buf.length < 2
|
||||
raise LooseObjectError, "object file too small"
|
||||
end
|
||||
|
||||
if legacy_loose_object?(buf)
|
||||
content = Zlib::Inflate.inflate(buf)
|
||||
header, content = content.split(/\0/, 2)
|
||||
if !header || !content
|
||||
raise LooseObjectError, "invalid object header"
|
||||
end
|
||||
type, size = header.split(/ /, 2)
|
||||
if !%w(blob tree commit tag).include?(type) || size !~ /^\d+$/
|
||||
raise LooseObjectError, "invalid object header"
|
||||
end
|
||||
type = type.to_sym
|
||||
size = size.to_i
|
||||
else
|
||||
type, size, used = unpack_object_header_gently(buf)
|
||||
content = Zlib::Inflate.inflate(buf[used..-1])
|
||||
end
|
||||
raise LooseObjectError, "size mismatch" if content.length != size
|
||||
return RawObject.new(type, content)
|
||||
end
|
||||
|
||||
# currently, I'm using the legacy format because it's easier to do
|
||||
# this function takes content and a type and writes out the loose object and returns a sha
|
||||
def put_raw_object(content, type)
|
||||
size = content.length.to_s
|
||||
LooseStorage.verify_header(type, size)
|
||||
|
||||
header = "#{type} #{size}\0"
|
||||
store = header + content
|
||||
|
||||
sha1 = Digest::SHA1.hexdigest(store)
|
||||
path = @directory+'/'+sha1[0...2]+'/'+sha1[2..40]
|
||||
|
||||
if !File.exists?(path)
|
||||
content = Zlib::Deflate.deflate(store)
|
||||
|
||||
FileUtils.mkdir_p(@directory+'/'+sha1[0...2])
|
||||
File.open(path, 'w') do |f|
|
||||
f.write content
|
||||
end
|
||||
end
|
||||
return sha1
|
||||
end
|
||||
|
||||
# simply figure out the sha
|
||||
def self.calculate_sha(content, type)
|
||||
size = content.length.to_s
|
||||
verify_header(type, size)
|
||||
header = "#{type} #{size}\0"
|
||||
store = header + content
|
||||
|
||||
Digest::SHA1.hexdigest(store)
|
||||
end
|
||||
|
||||
def self.verify_header(type, size)
|
||||
if !%w(blob tree commit tag).include?(type) || size !~ /^\d+$/
|
||||
raise LooseObjectError, "invalid object header"
|
||||
end
|
||||
end
|
||||
|
||||
# private
|
||||
def unpack_object_header_gently(buf)
|
||||
used = 0
|
||||
c = buf[used]
|
||||
used += 1
|
||||
|
||||
type = (c >> 4) & 7;
|
||||
size = c & 15;
|
||||
shift = 4;
|
||||
while c & 0x80 != 0
|
||||
if buf.length <= used
|
||||
raise LooseObjectError, "object file too short"
|
||||
end
|
||||
c = buf[used]
|
||||
used += 1
|
||||
|
||||
size += (c & 0x7f) << shift
|
||||
shift += 7
|
||||
end
|
||||
type = OBJ_TYPES[type]
|
||||
if ![:blob, :tree, :commit, :tag].include?(type)
|
||||
raise LooseObjectError, "invalid loose object type"
|
||||
end
|
||||
return [type, size, used]
|
||||
end
|
||||
private :unpack_object_header_gently
|
||||
|
||||
def legacy_loose_object?(buf)
|
||||
word = (buf[0] << 8) + buf[1]
|
||||
buf[0] == 0x78 && word % 31 == 0
|
||||
end
|
||||
private :legacy_loose_object?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,59 @@
|
|||
#
|
||||
# converted from the gitrb project
|
||||
#
|
||||
# authors:
|
||||
# Matthias Lederhofer <matled@gmx.net>
|
||||
# Simon 'corecode' Schubert <corecode@fs.ei.tum.de>
|
||||
# Scott Chacon <schacon@gmail.com>
|
||||
#
|
||||
# provides native ruby access to git objects and pack files
|
||||
#
|
||||
|
||||
begin
|
||||
require 'mmap'
|
||||
rescue LoadError
|
||||
|
||||
module Grit
|
||||
module GitRuby
|
||||
module Internal
|
||||
class Mmap
|
||||
def initialize(file)
|
||||
@file = file
|
||||
@offset = nil
|
||||
end
|
||||
|
||||
def unmap
|
||||
@file = nil
|
||||
end
|
||||
|
||||
def [](*idx)
|
||||
idx = idx[0] if idx.length == 1
|
||||
case idx
|
||||
when Range
|
||||
offset = idx.first
|
||||
len = idx.last - idx.first + idx.exclude_end? ? 0 : 1
|
||||
when Fixnum
|
||||
offset = idx
|
||||
len = nil
|
||||
when Array
|
||||
offset, len = idx
|
||||
else
|
||||
raise RuntimeError, "invalid index param: #{idx.class}"
|
||||
end
|
||||
if @offset != offset
|
||||
@file.seek(offset)
|
||||
end
|
||||
@offset = offset + len ? len : 1
|
||||
if not len
|
||||
@file.read(1)[0]
|
||||
else
|
||||
@file.read(len)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end # rescue LoadError
|
||||
|
|
@ -0,0 +1,275 @@
|
|||
#
|
||||
# converted from the gitrb project
|
||||
#
|
||||
# authors:
|
||||
# Matthias Lederhofer <matled@gmx.net>
|
||||
# Simon 'corecode' Schubert <corecode@fs.ei.tum.de>
|
||||
# Scott Chacon <schacon@gmail.com>
|
||||
#
|
||||
# provides native ruby access to git objects and pack files
|
||||
#
|
||||
|
||||
require 'zlib'
|
||||
require 'grit/git-ruby/internal/raw_object'
|
||||
require 'grit/git-ruby/internal/mmap'
|
||||
|
||||
module Grit
|
||||
module GitRuby
|
||||
module Internal
|
||||
class PackFormatError < StandardError
|
||||
end
|
||||
|
||||
class PackStorage
|
||||
OBJ_OFS_DELTA = 6
|
||||
OBJ_REF_DELTA = 7
|
||||
|
||||
FanOutCount = 256
|
||||
SHA1Size = 20
|
||||
IdxOffsetSize = 4
|
||||
OffsetSize = 4
|
||||
OffsetStart = FanOutCount * IdxOffsetSize
|
||||
SHA1Start = OffsetStart + OffsetSize
|
||||
EntrySize = OffsetSize + SHA1Size
|
||||
|
||||
def initialize(file)
|
||||
if file =~ /\.idx$/
|
||||
file = file[0...-3] + 'pack'
|
||||
end
|
||||
|
||||
@name = file
|
||||
@packfile = File.open(file)
|
||||
@idxfile = File.open(file[0...-4]+'idx')
|
||||
@idx = Mmap.new(@idxfile)
|
||||
|
||||
@offsets = [0]
|
||||
FanOutCount.times do |i|
|
||||
pos = @idx[i * IdxOffsetSize,IdxOffsetSize].unpack('N')[0]
|
||||
if pos < @offsets[i]
|
||||
raise PackFormatError, "pack #@name has discontinuous index #{i}"
|
||||
end
|
||||
@offsets << pos
|
||||
end
|
||||
|
||||
@size = @offsets[-1]
|
||||
end
|
||||
|
||||
# given an index file, list out the shas that it's packfile contains
|
||||
def self.get_shas(index_file)
|
||||
@idxfile = File.open(index_file)
|
||||
@idx = Mmap.new(@idxfile)
|
||||
@offsets = [0]
|
||||
FanOutCount.times do |i|
|
||||
pos = @idx[i * IdxOffsetSize,IdxOffsetSize].unpack('N')[0]
|
||||
if pos < @offsets[i]
|
||||
raise PackFormatError, "pack #@name has discontinuous index #{i}"
|
||||
end
|
||||
@offsets << pos
|
||||
end
|
||||
|
||||
@size = @offsets[-1]
|
||||
shas = []
|
||||
|
||||
pos = SHA1Start
|
||||
@size.times do
|
||||
sha1 = @idx[pos,SHA1Size]
|
||||
pos += EntrySize
|
||||
shas << sha1.unpack("H*").first
|
||||
end
|
||||
shas
|
||||
end
|
||||
|
||||
def name
|
||||
@name
|
||||
end
|
||||
|
||||
def close
|
||||
@packfile.close
|
||||
@idx.unmap
|
||||
@idxfile.close
|
||||
end
|
||||
|
||||
def [](sha1)
|
||||
offset = find_object(sha1)
|
||||
return nil if !offset
|
||||
return parse_object(offset)
|
||||
end
|
||||
|
||||
def each_entry
|
||||
pos = OffsetStart
|
||||
@size.times do
|
||||
offset = @idx[pos,OffsetSize].unpack('N')[0]
|
||||
sha1 = @idx[pos+OffsetSize,SHA1Size]
|
||||
pos += EntrySize
|
||||
yield sha1, offset
|
||||
end
|
||||
end
|
||||
|
||||
def each_sha1
|
||||
# unpacking the offset is quite expensive, so
|
||||
# we avoid using #each
|
||||
pos = SHA1Start
|
||||
@size.times do
|
||||
sha1 = @idx[pos,SHA1Size]
|
||||
pos += EntrySize
|
||||
yield sha1
|
||||
end
|
||||
end
|
||||
|
||||
def find_object(sha1)
|
||||
slot = sha1[0]
|
||||
return nil if !slot
|
||||
first, last = @offsets[slot,2]
|
||||
while first < last
|
||||
mid = (first + last) / 2
|
||||
midsha1 = @idx[SHA1Start + mid * EntrySize,SHA1Size]
|
||||
cmp = midsha1 <=> sha1
|
||||
|
||||
if cmp < 0
|
||||
first = mid + 1
|
||||
elsif cmp > 0
|
||||
last = mid
|
||||
else
|
||||
pos = OffsetStart + mid * EntrySize
|
||||
offset = @idx[pos,OffsetSize].unpack('N')[0]
|
||||
return offset
|
||||
end
|
||||
end
|
||||
|
||||
nil
|
||||
end
|
||||
private :find_object
|
||||
|
||||
def parse_object(offset)
|
||||
data, type = unpack_object(offset)
|
||||
RawObject.new(OBJ_TYPES[type], data)
|
||||
end
|
||||
protected :parse_object
|
||||
|
||||
def unpack_object(offset)
|
||||
obj_offset = offset
|
||||
@packfile.seek(offset)
|
||||
|
||||
c = @packfile.read(1)[0]
|
||||
size = c & 0xf
|
||||
type = (c >> 4) & 7
|
||||
shift = 4
|
||||
offset += 1
|
||||
while c & 0x80 != 0
|
||||
c = @packfile.read(1)[0]
|
||||
size |= ((c & 0x7f) << shift)
|
||||
shift += 7
|
||||
offset += 1
|
||||
end
|
||||
|
||||
case type
|
||||
when OBJ_OFS_DELTA, OBJ_REF_DELTA
|
||||
data, type = unpack_deltified(type, offset, obj_offset, size)
|
||||
when OBJ_COMMIT, OBJ_TREE, OBJ_BLOB, OBJ_TAG
|
||||
data = unpack_compressed(offset, size)
|
||||
else
|
||||
raise PackFormatError, "invalid type #{type}"
|
||||
end
|
||||
[data, type]
|
||||
end
|
||||
private :unpack_object
|
||||
|
||||
def unpack_deltified(type, offset, obj_offset, size)
|
||||
@packfile.seek(offset)
|
||||
data = @packfile.read(SHA1Size)
|
||||
|
||||
if type == OBJ_OFS_DELTA
|
||||
i = 0
|
||||
c = data[i]
|
||||
base_offset = c & 0x7f
|
||||
while c & 0x80 != 0
|
||||
c = data[i += 1]
|
||||
base_offset += 1
|
||||
base_offset <<= 7
|
||||
base_offset |= c & 0x7f
|
||||
end
|
||||
base_offset = obj_offset - base_offset
|
||||
offset += i + 1
|
||||
else
|
||||
base_offset = find_object(data)
|
||||
offset += SHA1Size
|
||||
end
|
||||
|
||||
base, type = unpack_object(base_offset)
|
||||
delta = unpack_compressed(offset, size)
|
||||
[patch_delta(base, delta), type]
|
||||
end
|
||||
private :unpack_deltified
|
||||
|
||||
def unpack_compressed(offset, destsize)
|
||||
outdata = ""
|
||||
@packfile.seek(offset)
|
||||
zstr = Zlib::Inflate.new
|
||||
while outdata.size < destsize
|
||||
indata = @packfile.read(4096)
|
||||
if indata.size == 0
|
||||
raise PackFormatError, 'error reading pack data'
|
||||
end
|
||||
outdata += zstr.inflate(indata)
|
||||
end
|
||||
if outdata.size > destsize
|
||||
raise PackFormatError, 'error reading pack data'
|
||||
end
|
||||
zstr.close
|
||||
outdata
|
||||
end
|
||||
private :unpack_compressed
|
||||
|
||||
def patch_delta(base, delta)
|
||||
src_size, pos = patch_delta_header_size(delta, 0)
|
||||
if src_size != base.size
|
||||
raise PackFormatError, 'invalid delta data'
|
||||
end
|
||||
|
||||
dest_size, pos = patch_delta_header_size(delta, pos)
|
||||
dest = ""
|
||||
while pos < delta.size
|
||||
c = delta[pos]
|
||||
pos += 1
|
||||
if c & 0x80 != 0
|
||||
pos -= 1
|
||||
cp_off = cp_size = 0
|
||||
cp_off = delta[pos += 1] if c & 0x01 != 0
|
||||
cp_off |= delta[pos += 1] << 8 if c & 0x02 != 0
|
||||
cp_off |= delta[pos += 1] << 16 if c & 0x04 != 0
|
||||
cp_off |= delta[pos += 1] << 24 if c & 0x08 != 0
|
||||
cp_size = delta[pos += 1] if c & 0x10 != 0
|
||||
cp_size |= delta[pos += 1] << 8 if c & 0x20 != 0
|
||||
cp_size |= delta[pos += 1] << 16 if c & 0x40 != 0
|
||||
cp_size = 0x10000 if cp_size == 0
|
||||
pos += 1
|
||||
dest += base[cp_off,cp_size]
|
||||
elsif c != 0
|
||||
dest += delta[pos,c]
|
||||
pos += c
|
||||
else
|
||||
raise PackFormatError, 'invalid delta data'
|
||||
end
|
||||
end
|
||||
dest
|
||||
end
|
||||
private :patch_delta
|
||||
|
||||
def patch_delta_header_size(delta, pos)
|
||||
size = 0
|
||||
shift = 0
|
||||
begin
|
||||
c = delta[pos]
|
||||
if c == nil
|
||||
raise PackFormatError, 'invalid delta header'
|
||||
end
|
||||
pos += 1
|
||||
size |= (c & 0x7f) << shift
|
||||
shift += 7
|
||||
end while c & 0x80 != 0
|
||||
[size, pos]
|
||||
end
|
||||
private :patch_delta_header_size
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,37 @@
|
|||
#
|
||||
# converted from the gitrb project
|
||||
#
|
||||
# authors:
|
||||
# Matthias Lederhofer <matled@gmx.net>
|
||||
# Simon 'corecode' Schubert <corecode@fs.ei.tum.de>
|
||||
#
|
||||
# provides native ruby access to git objects and pack files
|
||||
#
|
||||
|
||||
require 'digest/sha1'
|
||||
|
||||
module Grit
|
||||
module GitRuby
|
||||
module Internal
|
||||
OBJ_NONE = 0
|
||||
OBJ_COMMIT = 1
|
||||
OBJ_TREE = 2
|
||||
OBJ_BLOB = 3
|
||||
OBJ_TAG = 4
|
||||
|
||||
OBJ_TYPES = [nil, :commit, :tree, :blob, :tag].freeze
|
||||
|
||||
class RawObject
|
||||
attr_accessor :type, :content
|
||||
def initialize(type, content)
|
||||
@type = type
|
||||
@content = content
|
||||
end
|
||||
|
||||
def sha1
|
||||
Digest::SHA1.digest("%s %d\0" % [@type, @content.length] + @content)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,298 @@
|
|||
#
|
||||
# converted from the gitrb project
|
||||
#
|
||||
# authors:
|
||||
# Matthias Lederhofer <matled@gmx.net>
|
||||
# Simon 'corecode' Schubert <corecode@fs.ei.tum.de>
|
||||
# Scott Chacon <schacon@gmail.com>
|
||||
#
|
||||
# provides native ruby access to git objects and pack files
|
||||
#
|
||||
|
||||
# These classes translate the raw binary data kept in the sha encoded files
|
||||
# into parsed data that can then be used in another fashion
|
||||
|
||||
module Grit
|
||||
module GitRuby
|
||||
|
||||
# class for author/committer/tagger lines
|
||||
class UserInfo
|
||||
attr_accessor :name, :email, :date, :offset
|
||||
|
||||
def initialize(str)
|
||||
m = /^(.*?) <(.*)> (\d+) ([+-])0*(\d+?)$/.match(str)
|
||||
if !m
|
||||
raise RuntimeError, "invalid %s header in commit" % str
|
||||
end
|
||||
@name = m[1]
|
||||
@email = m[2]
|
||||
@date = Time.at(Integer(m[3]))
|
||||
@offset = (m[4] == "-" ? -1 : 1)*Integer(m[5])
|
||||
end
|
||||
|
||||
def to_s
|
||||
"%s <%s> %s %+05d" % [@name, @email, @date.to_i, @offset]
|
||||
end
|
||||
end
|
||||
|
||||
# base class for all git objects (blob, tree, commit, tag)
|
||||
class Object
|
||||
attr_accessor :repository
|
||||
|
||||
def Object.from_raw(rawobject, repository = nil)
|
||||
case rawobject.type
|
||||
when :blob
|
||||
return Blob.from_raw(rawobject, repository)
|
||||
when :tree
|
||||
return Tree.from_raw(rawobject, repository)
|
||||
when :commit
|
||||
return Commit.from_raw(rawobject, repository)
|
||||
when :tag
|
||||
return Tag.from_raw(rawobject, repository)
|
||||
else
|
||||
raise RuntimeError, "got invalid object-type"
|
||||
end
|
||||
end
|
||||
|
||||
def initialize
|
||||
raise NotImplemented, "abstract class"
|
||||
end
|
||||
|
||||
def type
|
||||
raise NotImplemented, "abstract class"
|
||||
end
|
||||
|
||||
def raw_content
|
||||
raise NotImplemented, "abstract class"
|
||||
end
|
||||
|
||||
def sha1
|
||||
Digest::SHA1.hexdigest("%s %d\0" % \
|
||||
[self.type, self.raw_content.length] + \
|
||||
self.raw_content)
|
||||
end
|
||||
end
|
||||
|
||||
class Blob < Object
|
||||
attr_accessor :content
|
||||
|
||||
def self.from_raw(rawobject, repository)
|
||||
new(rawobject.content)
|
||||
end
|
||||
|
||||
def initialize(content, repository=nil)
|
||||
@content = content
|
||||
@repository = repository
|
||||
end
|
||||
|
||||
def type
|
||||
:blob
|
||||
end
|
||||
|
||||
def raw_content
|
||||
@content
|
||||
end
|
||||
end
|
||||
|
||||
class DirectoryEntry
|
||||
S_IFMT = 00170000
|
||||
S_IFLNK = 0120000
|
||||
S_IFREG = 0100000
|
||||
S_IFDIR = 0040000
|
||||
|
||||
attr_accessor :mode, :name, :sha1
|
||||
def initialize(buf)
|
||||
m = /^(\d+) (.*)\0(.{20})$/m.match(buf)
|
||||
if !m
|
||||
raise RuntimeError, "invalid directory entry"
|
||||
end
|
||||
@mode = 0
|
||||
m[1].each_byte do |i|
|
||||
@mode = (@mode << 3) | (i-'0'[0])
|
||||
end
|
||||
@name = m[2]
|
||||
@sha1 = m[3].unpack("H*")[0]
|
||||
|
||||
if ![S_IFLNK, S_IFDIR, S_IFREG].include?(@mode & S_IFMT)
|
||||
raise RuntimeError, "unknown type for directory entry"
|
||||
end
|
||||
end
|
||||
|
||||
def type
|
||||
case @mode & S_IFMT
|
||||
when S_IFLNK
|
||||
@type = :link
|
||||
when S_IFDIR
|
||||
@type = :directory
|
||||
when S_IFREG
|
||||
@type = :file
|
||||
else
|
||||
raise RuntimeError, "unknown type for directory entry"
|
||||
end
|
||||
end
|
||||
|
||||
def type=(type)
|
||||
case @type
|
||||
when :link
|
||||
@mode = (@mode & ~S_IFMT) | S_IFLNK
|
||||
when :directory
|
||||
@mode = (@mode & ~S_IFMT) | S_IFDIR
|
||||
when :file
|
||||
@mode = (@mode & ~S_IFMT) | S_IFREG
|
||||
else
|
||||
raise RuntimeError, "invalid type"
|
||||
end
|
||||
end
|
||||
|
||||
def format_type
|
||||
case type
|
||||
when :link
|
||||
'link'
|
||||
when :directory
|
||||
'tree'
|
||||
when :file
|
||||
'blob'
|
||||
end
|
||||
end
|
||||
|
||||
def format_mode
|
||||
"%06o" % @mode
|
||||
end
|
||||
|
||||
def raw
|
||||
"%o %s\0%s" % [@mode, @name, [@sha1].pack("H*")]
|
||||
end
|
||||
end
|
||||
|
||||
class Tree < Object
|
||||
attr_accessor :entry
|
||||
|
||||
def self.from_raw(rawobject, repository=nil)
|
||||
entries = []
|
||||
rawobject.content.scan(/\d+ .*?\0.{20}/m) do |raw|
|
||||
entries << DirectoryEntry.new(raw)
|
||||
end
|
||||
new(entries, repository)
|
||||
end
|
||||
|
||||
def initialize(entries=[], repository = nil)
|
||||
@entry = entries
|
||||
@repository = repository
|
||||
end
|
||||
|
||||
def type
|
||||
:tree
|
||||
end
|
||||
|
||||
def raw_content
|
||||
# TODO: sort correctly
|
||||
#@entry.sort { |a,b| a.name <=> b.name }.
|
||||
@entry.collect { |e| [[e.format_mode, e.format_type, e.sha1].join(' '), e.name].join("\t") }.join("\n")
|
||||
end
|
||||
|
||||
def actual_raw
|
||||
#@entry.collect { |e| e.raw.join(' '), e.name].join("\t") }.join("\n")
|
||||
end
|
||||
end
|
||||
|
||||
class Commit < Object
|
||||
attr_accessor :author, :committer, :tree, :parent, :message
|
||||
|
||||
def self.from_raw(rawobject, repository=nil)
|
||||
parent = []
|
||||
tree = author = committer = nil
|
||||
|
||||
headers, message = rawobject.content.split(/\n\n/, 2)
|
||||
headers = headers.split(/\n/).map { |header| header.split(/ /, 2) }
|
||||
headers.each do |key, value|
|
||||
case key
|
||||
when "tree"
|
||||
tree = value
|
||||
when "parent"
|
||||
parent.push(value)
|
||||
when "author"
|
||||
author = UserInfo.new(value)
|
||||
when "committer"
|
||||
committer = UserInfo.new(value)
|
||||
else
|
||||
warn "unknown header '%s' in commit %s" % \
|
||||
[key, rawobject.sha1.unpack("H*")[0]]
|
||||
end
|
||||
end
|
||||
if not tree && author && committer
|
||||
raise RuntimeError, "incomplete raw commit object"
|
||||
end
|
||||
new(tree, parent, author, committer, message, repository)
|
||||
end
|
||||
|
||||
def initialize(tree, parent, author, committer, message, repository=nil)
|
||||
@tree = tree
|
||||
@author = author
|
||||
@parent = parent
|
||||
@committer = committer
|
||||
@message = message
|
||||
@repository = repository
|
||||
end
|
||||
|
||||
def type
|
||||
:commit
|
||||
end
|
||||
|
||||
def raw_content
|
||||
"tree %s\n%sauthor %s\ncommitter %s\n\n" % [
|
||||
@tree,
|
||||
@parent.collect { |i| "parent %s\n" % i }.join,
|
||||
@author, @committer] + @message
|
||||
end
|
||||
end
|
||||
|
||||
class Tag < Object
|
||||
attr_accessor :object, :type, :tag, :tagger, :message
|
||||
|
||||
def self.from_raw(rawobject, repository=nil)
|
||||
headers, message = rawobject.content.split(/\n\n/, 2)
|
||||
headers = headers.split(/\n/).map { |header| header.split(/ /, 2) }
|
||||
headers.each do |key, value|
|
||||
case key
|
||||
when "object"
|
||||
object = value
|
||||
when "type"
|
||||
if !["blob", "tree", "commit", "tag"].include?(value)
|
||||
raise RuntimeError, "invalid type in tag"
|
||||
end
|
||||
type = value.to_sym
|
||||
when "tag"
|
||||
tag = value
|
||||
when "tagger"
|
||||
tagger = UserInfo.new(value)
|
||||
else
|
||||
warn "unknown header '%s' in tag" % \
|
||||
[key, rawobject.sha1.unpack("H*")[0]]
|
||||
end
|
||||
if not object && type && tag && tagger
|
||||
raise RuntimeError, "incomplete raw tag object"
|
||||
end
|
||||
end
|
||||
new(object, type, tag, tagger, repository)
|
||||
end
|
||||
|
||||
def initialize(object, type, tag, tagger, repository=nil)
|
||||
@object = object
|
||||
@type = type
|
||||
@tag = tag
|
||||
@tagger = tagger
|
||||
@repository = repository
|
||||
end
|
||||
|
||||
def raw_content
|
||||
"object %s\ntype %s\ntag %s\ntagger %s\n\n" % \
|
||||
[@object, @type, @tag, @tagger] + @message
|
||||
end
|
||||
|
||||
def type
|
||||
:tag
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
|
@ -0,0 +1,357 @@
|
|||
#
|
||||
# converted from the gitrb project
|
||||
#
|
||||
# authors:
|
||||
# Matthias Lederhofer <matled@gmx.net>
|
||||
# Simon 'corecode' Schubert <corecode@fs.ei.tum.de>
|
||||
# Scott Chacon <schacon@gmail.com>
|
||||
#
|
||||
# provides native ruby access to git objects and pack files
|
||||
#
|
||||
|
||||
require 'grit/git-ruby/internal/raw_object'
|
||||
require 'grit/git-ruby/internal/pack'
|
||||
require 'grit/git-ruby/internal/loose'
|
||||
require 'grit/git-ruby/object'
|
||||
|
||||
module Grit
|
||||
module GitRuby
|
||||
class Repository
|
||||
|
||||
class NoSuchShaFound < StandardError
|
||||
end
|
||||
|
||||
attr_accessor :cache_ls_tree, :use_cache
|
||||
|
||||
def initialize(git_dir, use_cache = false)
|
||||
clear_cache()
|
||||
@use_cache = use_cache
|
||||
|
||||
@git_dir = git_dir
|
||||
end
|
||||
|
||||
def loose
|
||||
@loose ||= Grit::GitRuby::Internal::LooseStorage.new(git_path("objects"))
|
||||
end
|
||||
|
||||
def packs
|
||||
@packs ||= initpacks
|
||||
end
|
||||
|
||||
def show
|
||||
packs.each do |p|
|
||||
puts p.name
|
||||
puts
|
||||
p.each_sha1 do |s|
|
||||
puts "**#{p[s].type}**"
|
||||
if p[s].type.to_s == 'commit'
|
||||
puts s.unpack('H*')
|
||||
puts p[s].content
|
||||
end
|
||||
end
|
||||
puts
|
||||
end
|
||||
end
|
||||
|
||||
def object(sha)
|
||||
o = get_raw_object_by_sha1(sha)
|
||||
c = Grit::GitRuby::Object.from_raw(o)
|
||||
end
|
||||
|
||||
def cat_file_type(sha)
|
||||
get_raw_object_by_sha1(sha).type
|
||||
end
|
||||
|
||||
def cat_file_size(sha)
|
||||
get_raw_object_by_sha1(sha).content.size
|
||||
end
|
||||
|
||||
def cat_file(sha)
|
||||
o = get_raw_object_by_sha1(sha)
|
||||
object(sha).raw_content
|
||||
end
|
||||
|
||||
def log(sha, options = {})
|
||||
@already_searched = {}
|
||||
@use_cache = true
|
||||
walk_log(sha, options)
|
||||
end
|
||||
|
||||
def walk_log(sha, opts)
|
||||
return [] if @already_searched[sha] # to prevent rechecking branches
|
||||
@already_searched[sha] = true
|
||||
|
||||
array = []
|
||||
if (sha)
|
||||
o = get_raw_object_by_sha1(sha)
|
||||
c = Grit::GitRuby::Object.from_raw(o)
|
||||
|
||||
add_sha = true
|
||||
|
||||
if opts[:since] && opts[:since].is_a?(Time) && (opts[:since] > c.committer.date)
|
||||
add_sha = false
|
||||
end
|
||||
if opts[:until] && opts[:until].is_a?(Time) && (opts[:until] < c.committer.date)
|
||||
add_sha = false
|
||||
end
|
||||
|
||||
# follow all parents unless '--first-parent' is specified #
|
||||
subarray = []
|
||||
|
||||
if !c.parent.first && opts[:path_limiter] # check for the last commit
|
||||
add_sha = false
|
||||
end
|
||||
|
||||
c.parent.each do |psha|
|
||||
if psha && !files_changed?(c.tree, object(psha).tree, opts[:path_limiter])
|
||||
add_sha = false
|
||||
end
|
||||
subarray += walk_log(psha, opts)
|
||||
next if opts[:first_parent]
|
||||
end
|
||||
|
||||
if (!opts[:max_count] || (array.size < opts[:max_count]))
|
||||
if add_sha
|
||||
output = "commit #{sha}\n"
|
||||
output += o.content + "\n\n"
|
||||
array << [sha, output]
|
||||
end
|
||||
array += subarray
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
array
|
||||
end
|
||||
|
||||
# returns true if the files in path_limiter were changed, or no path limiter
|
||||
def files_changed?(tree_sha1, tree_sha2, path_limiter = nil)
|
||||
if path_limiter
|
||||
mod = quick_diff(tree_sha1, tree_sha2)
|
||||
files = mod.map { |c| c.first }
|
||||
path_limiter.to_a.each do |filepath|
|
||||
if files.include?(filepath)
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
end
|
||||
true
|
||||
end
|
||||
|
||||
def last_commits(commit_sha, looking_for)
|
||||
# swap caching temporarily - we have to do this because the algorithm
|
||||
# that dumb scott used ls-tree's each tree twice, which is 90% of the
|
||||
# time this takes, so caching those hits halves the time this takes to run
|
||||
# but, it does take up memory, so if you don't want it, i clear it later
|
||||
old_use_cache = @use_cache
|
||||
@use_cache = true
|
||||
|
||||
@already_searched = {}
|
||||
data = look_for_commits(commit_sha, looking_for)
|
||||
|
||||
@use_cache = old_use_cache
|
||||
clear_cache if !old_use_cache
|
||||
|
||||
data
|
||||
end
|
||||
|
||||
def clear_cache
|
||||
@cache_ls_tree = {}
|
||||
end
|
||||
|
||||
def look_for_commits(commit_sha, looking_for)
|
||||
return [] if @already_searched[commit_sha] # to prevent rechecking branches
|
||||
@already_searched[commit_sha] = true
|
||||
|
||||
commit = object(commit_sha)
|
||||
tree_sha = commit.tree
|
||||
|
||||
found_data = []
|
||||
|
||||
# at the beginning of the branch
|
||||
if commit.parent.size == 0
|
||||
looking_for.each do |search|
|
||||
# prevents the rare case of multiple branch starting points with
|
||||
# files that have never changed
|
||||
if found_data.assoc(search)
|
||||
found_data << [search, commit_sha]
|
||||
end
|
||||
end
|
||||
return found_data
|
||||
end
|
||||
|
||||
# go through the parents recursively, looking for somewhere this has been changed
|
||||
commit.parent.each do |pc|
|
||||
diff = quick_diff(tree_sha, object(pc).tree, '.', false)
|
||||
|
||||
# remove anything found
|
||||
looking_for.each do |search|
|
||||
if match = diff.assoc(search)
|
||||
found_data << [search, commit_sha, match]
|
||||
looking_for.delete(search)
|
||||
end
|
||||
end
|
||||
|
||||
if looking_for.size <= 0 # we're done
|
||||
return found_data
|
||||
end
|
||||
|
||||
found_data += look_for_commits(pc, looking_for) # recurse into parent
|
||||
end
|
||||
|
||||
## TODO : find most recent commit with change in any parent
|
||||
found_data
|
||||
end
|
||||
|
||||
def quick_diff(tree1, tree2, path = '.', recurse = true)
|
||||
# handle empty trees
|
||||
changed = []
|
||||
|
||||
t1 = ls_tree(tree1) if tree1
|
||||
t2 = ls_tree(tree2) if tree2
|
||||
|
||||
# finding files that are different
|
||||
t1['blob'].each do |file, hsh|
|
||||
t2_file = t2['blob'][file] rescue nil
|
||||
full = File.join(path, file)
|
||||
if !t2_file
|
||||
changed << [full, 'added', hsh[:sha], nil] # not in parent
|
||||
elsif (hsh[:sha] != t2_file[:sha])
|
||||
changed << [full, 'modified', hsh[:sha], t2_file[:sha]] # file changed
|
||||
end
|
||||
end if t1
|
||||
t2['blob'].each do |file, hsh|
|
||||
if !t1['blob'][file]
|
||||
changed << [File.join(path, file), 'removed', nil, hsh[:sha]]
|
||||
end if t1
|
||||
end if t2
|
||||
|
||||
t1['tree'].each do |dir, hsh|
|
||||
t2_tree = t2['tree'][dir] rescue nil
|
||||
full = File.join(path, dir)
|
||||
if !t2_tree
|
||||
if recurse
|
||||
changed += quick_diff(hsh[:sha], nil, full)
|
||||
else
|
||||
changed << [full, 'added', hsh[:sha], nil] # not in parent
|
||||
end
|
||||
elsif (hsh[:sha] != t2_tree[:sha])
|
||||
if recurse
|
||||
changed += quick_diff(hsh[:sha], t2_tree[:sha], full)
|
||||
else
|
||||
changed << [full, 'modified', hsh[:sha], t2_tree[:sha]] # file changed
|
||||
end
|
||||
end
|
||||
end if t1
|
||||
t2['tree'].each do |dir, hsh|
|
||||
t1_tree = t2['tree'][dir] rescue nil
|
||||
full = File.join(path, dir)
|
||||
if !t1_tree
|
||||
if recurse
|
||||
changed += quick_diff(nil, hsh[:sha], full)
|
||||
else
|
||||
changed << [full, 'removed', nil, hsh[:sha]]
|
||||
end
|
||||
end
|
||||
end if t2
|
||||
|
||||
changed
|
||||
end
|
||||
|
||||
def ls_tree(sha)
|
||||
return @cache_ls_tree[sha] if @cache_ls_tree[sha] && @use_cache
|
||||
|
||||
data = {'blob' => {}, 'tree' => {}}
|
||||
self.object(sha).entry.each do |e|
|
||||
data[e.format_type][e.name] = {:mode => e.format_mode, :sha => e.sha1}
|
||||
end
|
||||
@cache_ls_tree[sha] = data
|
||||
end
|
||||
|
||||
def get_object_by_sha1(sha1)
|
||||
r = get_raw_object_by_sha1(sha1)
|
||||
return nil if !r
|
||||
Object.from_raw(r, self)
|
||||
end
|
||||
|
||||
def put_raw_object(content, type)
|
||||
loose.put_raw_object(content, type)
|
||||
end
|
||||
|
||||
def object_exists?(sha1)
|
||||
sha_hex = [sha1].pack("H*")
|
||||
return true if in_packs?(sha_hex)
|
||||
return true if in_loose?(sha_hex)
|
||||
initpacks
|
||||
return true if in_packs?(sha_hex) #maybe the object got packed in the meantime
|
||||
false
|
||||
end
|
||||
|
||||
def in_packs?(sha_hex)
|
||||
# try packs
|
||||
packs.each do |pack|
|
||||
return true if pack[sha_hex]
|
||||
end
|
||||
false
|
||||
end
|
||||
|
||||
def in_loose?(sha_hex)
|
||||
return true if loose[sha_hex]
|
||||
false
|
||||
end
|
||||
|
||||
def get_raw_object_by_sha1(sha1)
|
||||
sha1 = [sha1].pack("H*")
|
||||
|
||||
# try packs
|
||||
packs.each do |pack|
|
||||
o = pack[sha1]
|
||||
return o if o
|
||||
end
|
||||
|
||||
# try loose storage
|
||||
o = loose[sha1]
|
||||
return o if o
|
||||
|
||||
# try packs again, maybe the object got packed in the meantime
|
||||
initpacks
|
||||
packs.each do |pack|
|
||||
o = pack[sha1]
|
||||
return o if o
|
||||
end
|
||||
|
||||
raise NoSuchShaFound
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
def git_path(path)
|
||||
return "#@git_dir/#{path}"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def initpacks
|
||||
@packs.each do |pack|
|
||||
pack.close
|
||||
end if @packs
|
||||
|
||||
@packs = []
|
||||
if File.exists?(git_path("objects/pack"))
|
||||
Dir.open(git_path("objects/pack/")) do |dir|
|
||||
dir.each do |entry|
|
||||
if entry =~ /\.pack$/i
|
||||
@packs << Grit::GitRuby::Internal::PackStorage.new(git_path("objects/pack/" \
|
||||
+ entry))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@packs
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
end
|
|
@ -8,6 +8,8 @@ end
|
|||
module Grit
|
||||
|
||||
class Git
|
||||
include Grit::GitRuby
|
||||
|
||||
class GitTimeout < RuntimeError
|
||||
attr_reader :command, :bytes_read
|
||||
|
||||
|
@ -27,7 +29,7 @@ module Grit
|
|||
self.git_timeout = 5
|
||||
|
||||
attr_accessor :git_dir, :bytes_read
|
||||
|
||||
|
||||
def initialize(git_dir)
|
||||
self.git_dir = git_dir
|
||||
self.bytes_read = 0
|
||||
|
@ -61,6 +63,8 @@ module Grit
|
|||
response
|
||||
end
|
||||
|
||||
|
||||
|
||||
def sh(command)
|
||||
pid, _, io, _ = Open4.popen4(command)
|
||||
ret = Timeout.timeout(self.class.git_timeout) { io.read }
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
== 0.7.0 / 2008-01-07
|
||||
|
||||
* 1 major enhancement
|
||||
* First public release!
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
tree 77fc9894c0904279fde93adc9c0ba231515ce68a
|
||||
parent 30e367cef2203eba2b341dc9050993b06fd1e108
|
||||
parent 420eac97a826bfac8724b6b0eef35c20922124b7
|
||||
author Tom Preston-Werner <tom@mojombo.com> 1208054148 -0700
|
||||
committer Tom Preston-Werner <tom@mojombo.com> 1208054148 -0700
|
||||
|
||||
Merge branch 'master' of git://github.com/dustin/grit
|
|
@ -0,0 +1,7 @@
|
|||
100644 blob baaa47163a922b716898936f4ab032db4e08ae8a .gitignore
|
||||
100644 blob 4232d073306f01cf0b895864e5a5cfad7dd76fce History.txt
|
||||
100644 blob 22158f1075113476d332d6f5112cf948f38ae658 Manifest.txt
|
||||
100644 blob dd53bb4983125be6a5b2cc7ac9e89d75804a6a73 README.txt
|
||||
100644 blob fdbea19c6688404f2a65767d8f889a0acdb0b25a Rakefile
|
||||
040000 tree a93f4c338734259d8ae577b16537ad6b99a90937 lib
|
||||
040000 tree ce9b0e78f28d705665030d29d72515f86c28b2b3 test
|
|
@ -0,0 +1,41 @@
|
|||
require File.dirname(__FILE__) + '/helper'
|
||||
|
||||
class TestRubyGit < Test::Unit::TestCase
|
||||
|
||||
def setup
|
||||
@git = Git.new(File.join(File.dirname(__FILE__), *%w[dot_git]))
|
||||
@commit_sha = '5e3ee1198672257164ce3fe31dea3e40848e68d5'
|
||||
@tree_sha = 'cd7422af5a2e0fff3e94d6fb1a8fff03b2841881'
|
||||
@blob_sha = '4232d073306f01cf0b895864e5a5cfad7dd76fce'
|
||||
end
|
||||
|
||||
def test_cat_file_contents_commit
|
||||
out = @git.cat_file({:p => true}, @commit_sha)
|
||||
assert_equal out, fixture('cat_file_commit_ruby')
|
||||
end
|
||||
|
||||
def test_cat_file_contents_tree
|
||||
out = @git.cat_file({:p => true}, @tree_sha)
|
||||
assert_equal out, fixture('cat_file_tree_ruby').chomp
|
||||
end
|
||||
|
||||
def test_cat_file_contents_blob
|
||||
out = @git.cat_file({:p => true}, @blob_sha)
|
||||
assert_equal out, fixture('cat_file_blob_ruby')
|
||||
end
|
||||
|
||||
def test_cat_file_size
|
||||
out = @git.cat_file({:s => true}, @tree_sha)
|
||||
assert_equal '252', out
|
||||
end
|
||||
|
||||
def test_file_type
|
||||
out = @git.file_type(@tree_sha).to_s
|
||||
assert_equal 'tree', out
|
||||
out = @git.file_type(@blob_sha).to_s
|
||||
assert_equal 'blob', out
|
||||
out = @git.file_type(@commit_sha).to_s
|
||||
assert_equal 'commit', out
|
||||
end
|
||||
|
||||
end
|
Загрузка…
Ссылка в новой задаче