зеркало из https://github.com/microsoft/git.git
Merge with master.kernel.org:/pub/scm/git/git.git
This commit is contained in:
Коммит
79a9d8ea0d
|
@ -82,6 +82,7 @@ git-ssh-push
|
|||
git-ssh-upload
|
||||
git-status
|
||||
git-stripspace
|
||||
git-symbolic-ref
|
||||
git-tag
|
||||
git-tar-tree
|
||||
git-unpack-file
|
||||
|
|
|
@ -9,7 +9,7 @@ git-clone - Clones a repository.
|
|||
|
||||
SYNOPSIS
|
||||
--------
|
||||
'git clone' [-l] [-u <upload-pack>] [-q] <repository> <directory>
|
||||
'git clone' [-l [-s]] [-q] [-n] [-u <upload-pack>] <repository> <directory>
|
||||
|
||||
DESCRIPTION
|
||||
-----------
|
||||
|
@ -17,6 +17,7 @@ Clones a repository into a newly created directory.
|
|||
|
||||
OPTIONS
|
||||
-------
|
||||
--local::
|
||||
-l::
|
||||
When the repository to clone from is on a local machine,
|
||||
this flag bypasses normal "git aware" transport
|
||||
|
@ -25,10 +26,22 @@ OPTIONS
|
|||
The files under .git/objects/ directory are hardlinked
|
||||
to save space when possible.
|
||||
|
||||
--shared::
|
||||
-s::
|
||||
When the repository to clone is on the local machine,
|
||||
instead of using hard links automatically setup
|
||||
.git/objects/info/alternatives to share the objects
|
||||
with the source repository
|
||||
|
||||
--quiet::
|
||||
-q::
|
||||
Operate quietly. This flag is passed to "rsync" and
|
||||
"git-clone-pack" commands when given.
|
||||
|
||||
-n::
|
||||
No checkout of HEAD is performed after the clone is complete.
|
||||
|
||||
--upload-pack <upload-pack>::
|
||||
-u <upload-pack>::
|
||||
When given, and the repository to clone from is handled
|
||||
by 'git-clone-pack', '--exec=<upload-pack>' is passed to
|
||||
|
|
|
@ -13,7 +13,7 @@ SYNOPSIS
|
|||
(-[c|d|o|i|s|u|k|m])\*
|
||||
[-x <pattern>|--exclude=<pattern>]
|
||||
[-X <file>|--exclude-from=<file>]
|
||||
[--exclude-per-directory=<file>]
|
||||
[--exclude-per-directory=<file>] [--] [<file>]\*
|
||||
|
||||
DESCRIPTION
|
||||
-----------
|
||||
|
@ -77,6 +77,13 @@ OPTIONS
|
|||
K to be killed
|
||||
? other
|
||||
|
||||
--::
|
||||
Do not interpret any more arguments as options.
|
||||
|
||||
<file>::
|
||||
Files to show. If no files are given all files which match the other
|
||||
specified criteria are shown.
|
||||
|
||||
Output
|
||||
------
|
||||
show files just outputs the filename unless '--stage' is specified in
|
||||
|
|
67
Makefile
67
Makefile
|
@ -50,7 +50,7 @@
|
|||
|
||||
# DEFINES += -DUSE_STDEV
|
||||
|
||||
GIT_VERSION = 0.99.7.GIT
|
||||
GIT_VERSION = 0.99.8.GIT
|
||||
|
||||
CFLAGS = -g -O2 -Wall
|
||||
ALL_CFLAGS = $(CFLAGS) $(PLATFORM_DEFINES) $(DEFINES)
|
||||
|
@ -103,25 +103,29 @@ SIMPLE_PROGRAMS = \
|
|||
|
||||
# ... and all the rest
|
||||
PROGRAMS = \
|
||||
git-apply$X git-cat-file$X git-checkout-index$X \
|
||||
git-clone-pack$X git-commit-tree$X git-convert-objects$X \
|
||||
git-diff-files$X git-diff-index$X git-diff-stages$X \
|
||||
git-diff-tree$X git-fetch-pack$X git-fsck-objects$X \
|
||||
git-hash-object$X git-init-db$X git-local-fetch$X \
|
||||
git-ls-files$X git-ls-tree$X git-merge-base$X \
|
||||
git-merge-index$X git-mktag$X git-pack-objects$X \
|
||||
git-patch-id$X git-peek-remote$X git-prune-packed$X \
|
||||
git-read-tree$X git-receive-pack$X git-rev-list$X \
|
||||
git-rev-parse$X git-send-pack$X git-show-branch$X \
|
||||
git-show-index$X git-ssh-fetch$X git-ssh-upload$X \
|
||||
git-tar-tree$X git-unpack-file$X git-unpack-objects$X \
|
||||
git-update-index$X git-update-server-info$X \
|
||||
git-upload-pack$X git-verify-pack$X git-write-tree$X \
|
||||
git-update-ref$X $(SIMPLE_PROGRAMS)
|
||||
git-apply$X git-cat-file$X \
|
||||
git-checkout-index$X git-clone-pack$X git-commit-tree$X \
|
||||
git-convert-objects$X git-diff-files$X \
|
||||
git-diff-index$X git-diff-stages$X \
|
||||
git-diff-tree$X git-fetch-pack$X git-fsck-objects$X \
|
||||
git-hash-object$X git-init-db$X \
|
||||
git-local-fetch$X git-ls-files$X git-ls-tree$X git-merge-base$X \
|
||||
git-merge-index$X git-mktag$X git-pack-objects$X git-patch-id$X \
|
||||
git-peek-remote$X git-prune-packed$X git-read-tree$X \
|
||||
git-receive-pack$X git-rev-list$X git-rev-parse$X \
|
||||
git-send-pack$X git-show-branch$X \
|
||||
git-show-index$X git-ssh-fetch$X \
|
||||
git-ssh-upload$X git-tar-tree$X git-unpack-file$X \
|
||||
git-unpack-objects$X git-update-index$X git-update-server-info$X \
|
||||
git-upload-pack$X git-verify-pack$X git-write-tree$X \
|
||||
git-update-ref$X git-symbolic-ref$X \
|
||||
$(SIMPLE_PROGRAMS)
|
||||
|
||||
# Backward compatibility -- to be removed after 1.0
|
||||
PROGRAMS += git-ssh-pull$X git-ssh-push$X
|
||||
|
||||
GIT_LIST_TWEAK =
|
||||
|
||||
PYMODULES = \
|
||||
gitMergeCommon.py
|
||||
|
||||
|
@ -131,6 +135,8 @@ endif
|
|||
|
||||
ifdef WITH_SEND_EMAIL
|
||||
SCRIPT_PERL += git-send-email.perl
|
||||
else
|
||||
GIT_LIST_TWEAK += -e '/^send-email$$/d'
|
||||
endif
|
||||
|
||||
LIB_FILE=libgit.a
|
||||
|
@ -181,6 +187,10 @@ endif
|
|||
ifneq (,$(findstring arm,$(shell uname -m)))
|
||||
ARM_SHA1 = YesPlease
|
||||
endif
|
||||
ifeq ($(shell uname -s),OpenBSD)
|
||||
NEEDS_LIBICONV = YesPlease
|
||||
PLATFORM_DEFINES += -I/usr/local/include -L/usr/local/lib
|
||||
endif
|
||||
|
||||
ifndef NO_CURL
|
||||
ifdef CURLDIR
|
||||
|
@ -206,18 +216,32 @@ endif
|
|||
ifndef NO_OPENSSL
|
||||
LIB_OBJS += epoch.o
|
||||
OPENSSL_LIBSSL = -lssl
|
||||
ifdef OPENSSLDIR
|
||||
# Again this may be problematic -- gcc does not always want -R.
|
||||
CFLAGS += -I$(OPENSSLDIR)/include
|
||||
OPENSSL_LINK = -L$(OPENSSLDIR)/lib -R$(OPENSSLDIR)/lib
|
||||
else
|
||||
OPENSSL_LINK =
|
||||
endif
|
||||
else
|
||||
DEFINES += '-DNO_OPENSSL'
|
||||
MOZILLA_SHA1 = 1
|
||||
OPENSSL_LIBSSL =
|
||||
endif
|
||||
ifdef NEEDS_SSL_WITH_CRYPTO
|
||||
LIB_4_CRYPTO = -lcrypto -lssl
|
||||
LIB_4_CRYPTO = $(OPENSSL_LINK) -lcrypto -lssl
|
||||
else
|
||||
LIB_4_CRYPTO = -lcrypto
|
||||
LIB_4_CRYPTO = $(OPENSSL_LINK) -lcrypto
|
||||
endif
|
||||
ifdef NEEDS_LIBICONV
|
||||
LIB_4_ICONV = -liconv
|
||||
ifdef ICONVDIR
|
||||
# Again this may be problematic -- gcc does not always want -R.
|
||||
CFLAGS += -I$(ICONVDIR)/include
|
||||
ICONV_LINK = -L$(ICONVDIR)/lib -R$(ICONVDIR)/lib
|
||||
else
|
||||
ICONV_LINK =
|
||||
endif
|
||||
LIB_4_ICONV = $(ICONV_LINK) -liconv
|
||||
else
|
||||
LIB_4_ICONV =
|
||||
endif
|
||||
|
@ -273,8 +297,13 @@ all:
|
|||
git: git.sh Makefile
|
||||
rm -f $@+ $@
|
||||
sed -e '1s|#!.*/sh|#!$(SHELL_PATH)|' \
|
||||
<<<<<<< Makefile
|
||||
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
|
||||
-e 's/@@X@@/$(X)/g' <$@.sh >$@+
|
||||
=======
|
||||
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
|
||||
$(GIT_LIST_TWEAK) <$@.sh >$@+
|
||||
>>>>>>> .merge_file_3QHyD4
|
||||
chmod +x $@+
|
||||
mv $@+ $@
|
||||
|
||||
|
|
13
apply.c
13
apply.c
|
@ -723,6 +723,16 @@ static int parse_single_patch(char *line, unsigned long size, struct patch *patc
|
|||
return offset;
|
||||
}
|
||||
|
||||
static inline int metadata_changes(struct patch *patch)
|
||||
{
|
||||
return patch->is_rename > 0 ||
|
||||
patch->is_copy > 0 ||
|
||||
patch->is_new > 0 ||
|
||||
patch->is_delete ||
|
||||
(patch->old_mode && patch->new_mode &&
|
||||
patch->old_mode != patch->new_mode);
|
||||
}
|
||||
|
||||
static int parse_chunk(char *buffer, unsigned long size, struct patch *patch)
|
||||
{
|
||||
int hdrsize, patchsize;
|
||||
|
@ -733,6 +743,9 @@ static int parse_chunk(char *buffer, unsigned long size, struct patch *patch)
|
|||
|
||||
patchsize = parse_single_patch(buffer + offset + hdrsize, size - offset - hdrsize, patch);
|
||||
|
||||
if (!patchsize && !metadata_changes(patch))
|
||||
die("patch with only garbage at line %d", linenr);
|
||||
|
||||
return offset + hdrsize + patchsize;
|
||||
}
|
||||
|
||||
|
|
5
cache.h
5
cache.h
|
@ -189,6 +189,7 @@ extern char *git_path(const char *fmt, ...) __attribute__((format (printf, 1, 2)
|
|||
extern char *sha1_file_name(const unsigned char *sha1);
|
||||
extern char *sha1_pack_name(const unsigned char *sha1);
|
||||
extern char *sha1_pack_index_name(const unsigned char *sha1);
|
||||
extern const unsigned char null_sha1[20];
|
||||
|
||||
int git_mkstemp(char *path, size_t n, const char *template);
|
||||
|
||||
|
@ -228,6 +229,10 @@ extern int has_pack_index(const unsigned char *sha1);
|
|||
extern int get_sha1(const char *str, unsigned char *sha1);
|
||||
extern int get_sha1_hex(const char *hex, unsigned char *sha1);
|
||||
extern char *sha1_to_hex(const unsigned char *sha1); /* static buffer result! */
|
||||
extern int read_ref(const char *filename, unsigned char *sha1);
|
||||
extern const char *resolve_ref(const char *path, unsigned char *sha1, int);
|
||||
extern int create_symref(const char *git_HEAD, const char *refs_heads_master);
|
||||
extern int validate_symref(const char *git_HEAD);
|
||||
|
||||
/* General helper functions */
|
||||
extern void usage(const char *err) NORETURN;
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
git-core (0.99.8-0) unstable; urgency=low
|
||||
|
||||
* GIT 0.99.8
|
||||
|
||||
-- Junio C Hamano <junkio@cox.net> Sun, 2 Oct 2005 12:54:26 -0700
|
||||
|
||||
git-core (0.99.7-0) unstable; urgency=low
|
||||
|
||||
* GIT 0.99.7
|
||||
|
|
|
@ -34,7 +34,6 @@ static void show_modified(int oldmode, int mode,
|
|||
|
||||
int main(int argc, const char **argv)
|
||||
{
|
||||
static const unsigned char null_sha1[20] = { 0, };
|
||||
const char **pathspec;
|
||||
const char *prefix = setup_git_directory();
|
||||
int entries, i;
|
||||
|
|
3
diff.c
3
diff.c
|
@ -10,7 +10,6 @@
|
|||
#include "diffcore.h"
|
||||
|
||||
static const char *diff_opts = "-pu";
|
||||
static unsigned char null_sha1[20] = { 0, };
|
||||
|
||||
static int use_size_cache;
|
||||
|
||||
|
@ -414,7 +413,7 @@ void diff_free_filespec_data(struct diff_filespec *s)
|
|||
static void prep_temp_blob(struct diff_tempfile *temp,
|
||||
void *blob,
|
||||
unsigned long size,
|
||||
unsigned char *sha1,
|
||||
const unsigned char *sha1,
|
||||
int mode)
|
||||
{
|
||||
int fd;
|
||||
|
|
|
@ -402,25 +402,17 @@ static void fsck_object_dir(const char *path)
|
|||
|
||||
static int fsck_head_link(void)
|
||||
{
|
||||
int fd, count;
|
||||
char hex[40];
|
||||
unsigned char sha1[20];
|
||||
static char path[PATH_MAX], link[PATH_MAX];
|
||||
const char *git_dir = get_git_dir();
|
||||
const char *git_HEAD = strdup(git_path("HEAD"));
|
||||
const char *git_refs_heads_master = resolve_ref(git_HEAD, sha1, 1);
|
||||
int pfxlen = strlen(git_HEAD) - 4; /* strip .../.git/ part */
|
||||
|
||||
snprintf(path, sizeof(path), "%s/HEAD", git_dir);
|
||||
if (readlink(path, link, sizeof(link)) < 0)
|
||||
return error("HEAD is not a symlink");
|
||||
if (strncmp("refs/heads/", link, 11))
|
||||
return error("HEAD points to something strange (%s)", link);
|
||||
fd = open(path, O_RDONLY);
|
||||
if (fd < 0)
|
||||
return error("HEAD: %s", strerror(errno));
|
||||
count = read(fd, hex, sizeof(hex));
|
||||
close(fd);
|
||||
if (count < 0)
|
||||
return error("HEAD: %s", strerror(errno));
|
||||
if (count < 40 || get_sha1_hex(hex, sha1))
|
||||
if (!git_refs_heads_master)
|
||||
return error("HEAD is not a symbolic ref");
|
||||
if (strncmp(git_refs_heads_master + pfxlen, "refs/heads/", 11))
|
||||
return error("HEAD points to something strange (%s)",
|
||||
git_refs_heads_master + pfxlen);
|
||||
if (!memcmp(null_sha1, sha1, 20))
|
||||
return error("HEAD: not a valid git pointer");
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -228,10 +228,12 @@ foreach my $ps (@psets) {
|
|||
# skip commits already in repo
|
||||
#
|
||||
if (ptag($ps->{id})) {
|
||||
$opt_v && print "Skipping already imported: $ps->{id}\n";
|
||||
$opt_v && print " * Skipping already imported: $ps->{id}\n";
|
||||
next;
|
||||
}
|
||||
|
||||
print " * Starting to work on $ps->{id}\n";
|
||||
|
||||
#
|
||||
# create the branch if needed
|
||||
#
|
||||
|
@ -675,6 +677,10 @@ sub find_parents {
|
|||
# that branch.
|
||||
#
|
||||
foreach my $branch (keys %branches) {
|
||||
|
||||
# check that we actually know about the branch
|
||||
next unless -e "$git_dir/refs/heads/$branch";
|
||||
|
||||
my $mergebase = `git-merge-base $branch $ps->{branch}`;
|
||||
die "Cannot find merge base for $branch and $ps->{branch}" if $?;
|
||||
chomp $mergebase;
|
||||
|
|
|
@ -38,7 +38,8 @@ bisect_start() {
|
|||
# Verify HEAD. If we were bisecting before this, reset to the
|
||||
# top-of-line master first!
|
||||
#
|
||||
head=$(readlink $GIT_DIR/HEAD) || die "Bad HEAD - I need a symlink"
|
||||
head=$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD) ||
|
||||
die "Bad HEAD - I need a symbolic ref"
|
||||
case "$head" in
|
||||
refs/heads/bisect*)
|
||||
git checkout master || exit
|
||||
|
@ -46,7 +47,7 @@ bisect_start() {
|
|||
refs/heads/*)
|
||||
;;
|
||||
*)
|
||||
die "Bad HEAD - strange symlink"
|
||||
die "Bad HEAD - strange symbolic ref"
|
||||
;;
|
||||
esac
|
||||
|
||||
|
@ -135,7 +136,7 @@ bisect_next() {
|
|||
echo "$rev" > "$GIT_DIR/refs/heads/new-bisect"
|
||||
git checkout new-bisect || exit
|
||||
mv "$GIT_DIR/refs/heads/new-bisect" "$GIT_DIR/refs/heads/bisect" &&
|
||||
ln -sf refs/heads/bisect "$GIT_DIR/HEAD"
|
||||
GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD refs/heads/bisect
|
||||
git-show-branch "$rev"
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,8 @@ If two arguments, create a new branch <branchname> based off of <start-point>.
|
|||
|
||||
delete_branch () {
|
||||
option="$1" branch_name="$2"
|
||||
headref=$(readlink "$GIT_DIR/HEAD" | sed -e 's|^refs/heads/||')
|
||||
headref=$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD |
|
||||
sed -e 's|^refs/heads/||')
|
||||
case ",$headref," in
|
||||
",$branch_name,")
|
||||
die "Cannot delete the branch you are on." ;;
|
||||
|
@ -67,7 +68,8 @@ done
|
|||
|
||||
case "$#" in
|
||||
0)
|
||||
headref=$(readlink "$GIT_DIR/HEAD" | sed -e 's|^refs/heads/||')
|
||||
headref=$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD |
|
||||
sed -e 's|^refs/heads/||')
|
||||
git-rev-parse --symbolic --all |
|
||||
sed -ne 's|^refs/heads/||p' |
|
||||
sort |
|
||||
|
|
|
@ -71,7 +71,8 @@ if [ "$?" -eq 0 ]; then
|
|||
echo $new > "$GIT_DIR/refs/heads/$newbranch"
|
||||
branch="$newbranch"
|
||||
fi
|
||||
[ "$branch" ] && ln -sf "refs/heads/$branch" "$GIT_DIR/HEAD"
|
||||
[ "$branch" ] &&
|
||||
GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD "refs/heads/$branch"
|
||||
rm -f "$GIT_DIR/MERGE_HEAD"
|
||||
else
|
||||
exit 1
|
||||
|
|
|
@ -153,15 +153,8 @@ if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
|
|||
fi >>.editmsg
|
||||
|
||||
PARENTS="-p HEAD"
|
||||
if [ ! -r "$GIT_DIR/HEAD" ]; then
|
||||
if [ -z "$(git-ls-files)" ]; then
|
||||
echo Nothing to commit 1>&2
|
||||
exit 1
|
||||
fi
|
||||
PARENTS=""
|
||||
current=
|
||||
else
|
||||
current=$(git-rev-parse --verify HEAD)
|
||||
if GIT_DIR="$GIT_DIR" git-rev-parse --verify HEAD >/dev/null 2>&1
|
||||
then
|
||||
if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
|
||||
PARENTS="-p HEAD "`sed -e 's/^/-p /' "$GIT_DIR/MERGE_HEAD"`
|
||||
fi
|
||||
|
@ -194,6 +187,12 @@ else
|
|||
export GIT_AUTHOR_EMAIL
|
||||
export GIT_AUTHOR_DATE
|
||||
fi
|
||||
else
|
||||
if [ -z "$(git-ls-files)" ]; then
|
||||
echo Nothing to commit 1>&2
|
||||
exit 1
|
||||
fi
|
||||
PARENTS=""
|
||||
fi
|
||||
git-status >>.editmsg
|
||||
if [ "$?" != "0" -a ! -f $GIT_DIR/MERGE_HEAD ]
|
||||
|
|
|
@ -510,7 +510,7 @@ unless($pid) {
|
|||
|
||||
my $state = 0;
|
||||
|
||||
my($patchset,$date,$author,$branch,$ancestor,$tag,$logmsg);
|
||||
my($patchset,$date,$author_name,$author_email,$branch,$ancestor,$tag,$logmsg);
|
||||
my(@old,@new);
|
||||
my $commit = sub {
|
||||
my $pid;
|
||||
|
@ -591,11 +591,11 @@ my $commit = sub {
|
|||
}
|
||||
|
||||
exec("env",
|
||||
"GIT_AUTHOR_NAME=$author",
|
||||
"GIT_AUTHOR_EMAIL=$author",
|
||||
"GIT_AUTHOR_NAME=$author_name",
|
||||
"GIT_AUTHOR_EMAIL=$author_email",
|
||||
"GIT_AUTHOR_DATE=".strftime("+0000 %Y-%m-%d %H:%M:%S",gmtime($date)),
|
||||
"GIT_COMMITTER_NAME=$author",
|
||||
"GIT_COMMITTER_EMAIL=$author",
|
||||
"GIT_COMMITTER_NAME=$author_name",
|
||||
"GIT_COMMITTER_EMAIL=$author_email",
|
||||
"GIT_COMMITTER_DATE=".strftime("+0000 %Y-%m-%d %H:%M:%S",gmtime($date)),
|
||||
"git-commit-tree", $tree,@par);
|
||||
die "Cannot exec git-commit-tree: $!\n";
|
||||
|
@ -638,7 +638,7 @@ my $commit = sub {
|
|||
print $out "object $cid\n".
|
||||
"type commit\n".
|
||||
"tag $xtag\n".
|
||||
"tagger $author <$author>\n"
|
||||
"tagger $author_name <$author_email>\n"
|
||||
or die "Cannot create tag object $xtag: $!\n";
|
||||
close($out)
|
||||
or die "Cannot create tag object $xtag: $!\n";
|
||||
|
@ -683,7 +683,11 @@ while(<CVS>) {
|
|||
$state=3;
|
||||
} elsif($state == 3 and s/^Author:\s+//) {
|
||||
s/\s+$//;
|
||||
$author = $_;
|
||||
if (/^(.*?)\s+<(.*)>/) {
|
||||
($author_name, $author_email) = ($1, $2);
|
||||
} else {
|
||||
$author_name = $author_email = $_;
|
||||
}
|
||||
$state = 4;
|
||||
} elsif($state == 4 and s/^Branch:\s+//) {
|
||||
s/\s+$//;
|
||||
|
|
25
git-fetch.sh
25
git-fetch.sh
|
@ -5,6 +5,7 @@
|
|||
_x40='[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]'
|
||||
_x40="$_x40$_x40$_x40$_x40$_x40$_x40$_x40$_x40"
|
||||
|
||||
tags=
|
||||
append=
|
||||
force=
|
||||
update_head_ok=
|
||||
|
@ -17,6 +18,9 @@ do
|
|||
-f|--f|--fo|--for|--forc|--force)
|
||||
force=t
|
||||
;;
|
||||
-t|--t|--ta|--tag|--tags)
|
||||
tags=t
|
||||
;;
|
||||
-u|--u|--up|--upd|--upda|--updat|--update|--update-|--update-h|\
|
||||
--update-he|--update-hea|--update-head|--update-head-|\
|
||||
--update-head-o|--update-head-ok)
|
||||
|
@ -158,7 +162,26 @@ case "$update_head_ok" in
|
|||
;;
|
||||
esac
|
||||
|
||||
for ref in $(get_remote_refs_for_fetch "$@")
|
||||
# If --tags (and later --heads or --all) is specified, then we are
|
||||
# not talking about defaults stored in Pull: line of remotes or
|
||||
# branches file, and just fetch those and refspecs explicitly given.
|
||||
# Otherwise we do what we always did.
|
||||
|
||||
reflist=$(get_remote_refs_for_fetch "$@")
|
||||
if test "$tags"
|
||||
then
|
||||
taglist=$(git-ls-remote --tags "$remote" | awk '{ print "."$2":"$2 }')
|
||||
if test "$#" -gt 1
|
||||
then
|
||||
# remote URL plus explicit refspecs; we need to merge them.
|
||||
reflist="$reflist $taglist"
|
||||
else
|
||||
# No explicit refspecs; fetch tags only.
|
||||
reflist=$taglist
|
||||
fi
|
||||
fi
|
||||
|
||||
for ref in $reflist
|
||||
do
|
||||
refs="$refs $ref"
|
||||
|
||||
|
|
|
@ -7,9 +7,6 @@ from sets import Set
|
|||
sys.path.append('@@GIT_PYTHON_PATH@@')
|
||||
from gitMergeCommon import *
|
||||
|
||||
# The actual merge code
|
||||
# ---------------------
|
||||
|
||||
originalIndexFile = os.environ.get('GIT_INDEX_FILE',
|
||||
os.environ.get('GIT_DIR', '.git') + '/index')
|
||||
temporaryIndexFile = os.environ.get('GIT_DIR', '.git') + \
|
||||
|
@ -21,11 +18,23 @@ def setupIndex(temporary):
|
|||
pass
|
||||
if temporary:
|
||||
newIndex = temporaryIndexFile
|
||||
os.environ
|
||||
else:
|
||||
newIndex = originalIndexFile
|
||||
os.environ['GIT_INDEX_FILE'] = newIndex
|
||||
|
||||
# This is a global variable which is used in a number of places but
|
||||
# only written to in the 'merge' function.
|
||||
|
||||
# cacheOnly == True => Don't leave any non-stage 0 entries in the cache and
|
||||
# don't update the working directory.
|
||||
# False => Leave unmerged entries in the cache and update
|
||||
# the working directory.
|
||||
|
||||
cacheOnly = False
|
||||
|
||||
# The entry point to the merge code
|
||||
# ---------------------------------
|
||||
|
||||
def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0):
|
||||
'''Merge the commits h1 and h2, return the resulting virtual
|
||||
commit object and a flag indicating the cleaness of the merge.'''
|
||||
|
@ -35,6 +44,7 @@ def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0):
|
|||
def infoMsg(*args):
|
||||
sys.stdout.write(' '*callDepth)
|
||||
printList(args)
|
||||
|
||||
infoMsg('Merging:')
|
||||
infoMsg(h1)
|
||||
infoMsg(h2)
|
||||
|
@ -46,27 +56,27 @@ def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0):
|
|||
infoMsg(x)
|
||||
sys.stdout.flush()
|
||||
|
||||
Ms = ca[0]
|
||||
mergedCA = ca[0]
|
||||
for h in ca[1:]:
|
||||
[Ms, ignore] = merge(Ms, h,
|
||||
'Temporary shared merge branch 1',
|
||||
'Temporary shared merge branch 2',
|
||||
graph, callDepth+1)
|
||||
assert(isinstance(Ms, Commit))
|
||||
[mergedCA, dummy] = merge(mergedCA, h,
|
||||
'Temporary shared merge branch 1',
|
||||
'Temporary shared merge branch 2',
|
||||
graph, callDepth+1)
|
||||
assert(isinstance(mergedCA, Commit))
|
||||
|
||||
global cacheOnly
|
||||
if callDepth == 0:
|
||||
setupIndex(False)
|
||||
cleanCache = False
|
||||
cacheOnly = False
|
||||
else:
|
||||
setupIndex(True)
|
||||
runProgram(['git-read-tree', h1.tree()])
|
||||
cleanCache = True
|
||||
cacheOnly = True
|
||||
|
||||
[shaRes, clean] = mergeTrees(h1.tree(), h2.tree(), Ms.tree(),
|
||||
branch1Name, branch2Name,
|
||||
cleanCache)
|
||||
[shaRes, clean] = mergeTrees(h1.tree(), h2.tree(), mergedCA.tree(),
|
||||
branch1Name, branch2Name)
|
||||
|
||||
if clean or cleanCache:
|
||||
if clean or cacheOnly:
|
||||
res = Commit(None, [h1, h2], tree=shaRes)
|
||||
graph.addNode(res)
|
||||
else:
|
||||
|
@ -89,49 +99,14 @@ def getFilesAndDirs(tree):
|
|||
|
||||
return [files, dirs]
|
||||
|
||||
class CacheEntry:
|
||||
def __init__(self, path):
|
||||
class Stage:
|
||||
def __init__(self):
|
||||
self.sha1 = None
|
||||
self.mode = None
|
||||
|
||||
self.stages = [Stage(), Stage(), Stage()]
|
||||
self.path = path
|
||||
# Those two global variables are used in a number of places but only
|
||||
# written to in 'mergeTrees' and 'uniquePath'. They keep track of
|
||||
# every file and directory in the two branches that are about to be
|
||||
# merged.
|
||||
currentFileSet = None
|
||||
currentDirectorySet = None
|
||||
|
||||
unmergedRE = re.compile(r'^([0-7]+) ([0-9a-f]{40}) ([1-3])\t(.*)$', re.S)
|
||||
def unmergedCacheEntries():
|
||||
'''Create a dictionary mapping file names to CacheEntry
|
||||
objects. The dictionary contains one entry for every path with a
|
||||
non-zero stage entry.'''
|
||||
|
||||
lines = runProgram(['git-ls-files', '-z', '--unmerged']).split('\0')
|
||||
lines.pop()
|
||||
|
||||
res = {}
|
||||
for l in lines:
|
||||
m = unmergedRE.match(l)
|
||||
if m:
|
||||
mode = int(m.group(1), 8)
|
||||
sha1 = m.group(2)
|
||||
stage = int(m.group(3)) - 1
|
||||
path = m.group(4)
|
||||
|
||||
if res.has_key(path):
|
||||
e = res[path]
|
||||
else:
|
||||
e = CacheEntry(path)
|
||||
res[path] = e
|
||||
|
||||
e.stages[stage].mode = mode
|
||||
e.stages[stage].sha1 = sha1
|
||||
else:
|
||||
die('Error: Merge program failed: Unexpected output from', \
|
||||
'git-ls-files:', l)
|
||||
return res
|
||||
|
||||
def mergeTrees(head, merge, common, branch1Name, branch2Name,
|
||||
cleanCache):
|
||||
def mergeTrees(head, merge, common, branch1Name, branch2Name):
|
||||
'''Merge the trees 'head' and 'merge' with the common ancestor
|
||||
'common'. The name of the head branch is 'branch1Name' and the name of
|
||||
the merge branch is 'branch2Name'. Return a tuple (tree, cleanMerge)
|
||||
|
@ -144,33 +119,38 @@ def mergeTrees(head, merge, common, branch1Name, branch2Name,
|
|||
print 'Already uptodate!'
|
||||
return [head, True]
|
||||
|
||||
if cleanCache:
|
||||
if cacheOnly:
|
||||
updateArg = '-i'
|
||||
else:
|
||||
updateArg = '-u'
|
||||
|
||||
[out, code] = runProgram(['git-read-tree', updateArg, '-m', common, head, merge], returnCode = True)
|
||||
[out, code] = runProgram(['git-read-tree', updateArg, '-m',
|
||||
common, head, merge], returnCode = True)
|
||||
if code != 0:
|
||||
die('git-read-tree:', out)
|
||||
|
||||
cleanMerge = True
|
||||
|
||||
[tree, code] = runProgram('git-write-tree', returnCode=True)
|
||||
tree = tree.rstrip()
|
||||
if code != 0:
|
||||
[files, dirs] = getFilesAndDirs(head)
|
||||
global currentFileSet, currentDirectorySet
|
||||
[currentFileSet, currentDirectorySet] = getFilesAndDirs(head)
|
||||
[filesM, dirsM] = getFilesAndDirs(merge)
|
||||
files.union_update(filesM)
|
||||
dirs.union_update(dirsM)
|
||||
|
||||
cleanMerge = True
|
||||
currentFileSet.union_update(filesM)
|
||||
currentDirectorySet.union_update(dirsM)
|
||||
|
||||
entries = unmergedCacheEntries()
|
||||
for name in entries:
|
||||
if not processEntry(entries[name], branch1Name, branch2Name,
|
||||
files, dirs, cleanCache):
|
||||
renamesHead = getRenames(head, common, head, merge, entries)
|
||||
renamesMerge = getRenames(merge, common, head, merge, entries)
|
||||
|
||||
cleanMerge = processRenames(renamesHead, renamesMerge,
|
||||
branch1Name, branch2Name)
|
||||
for entry in entries:
|
||||
if entry.processed:
|
||||
continue
|
||||
if not processEntry(entry, branch1Name, branch2Name):
|
||||
cleanMerge = False
|
||||
|
||||
if cleanMerge or cleanCache:
|
||||
if cleanMerge or cacheOnly:
|
||||
tree = runProgram('git-write-tree').rstrip()
|
||||
else:
|
||||
tree = None
|
||||
|
@ -179,84 +159,538 @@ def mergeTrees(head, merge, common, branch1Name, branch2Name,
|
|||
|
||||
return [tree, cleanMerge]
|
||||
|
||||
def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
|
||||
'''Merge one cache entry. 'files' is a Set with the files in both of
|
||||
the heads that we are going to merge. 'dirs' contains the
|
||||
corresponding data for directories. If 'cleanCache' is True no
|
||||
non-zero stages will be left in the cache for the path
|
||||
corresponding to the entry 'entry'.'''
|
||||
# Low level file merging, update and removal
|
||||
# ------------------------------------------
|
||||
|
||||
# cleanCache == True => Don't leave any non-stage 0 entries in the cache and
|
||||
# don't update the working directory
|
||||
# False => Leave unmerged entries and update the working directory
|
||||
def mergeFile(oPath, oSha, oMode, aPath, aSha, aMode, bPath, bSha, bMode,
|
||||
branch1Name, branch2Name):
|
||||
|
||||
# clean == True => non-conflict case
|
||||
# False => conflict case
|
||||
merge = False
|
||||
clean = True
|
||||
|
||||
# If cleanCache == False then the cache shouldn't be updated if clean == False
|
||||
if stat.S_IFMT(aMode) != stat.S_IFMT(bMode):
|
||||
clean = False
|
||||
if stat.S_ISREG(aMode):
|
||||
mode = aMode
|
||||
sha = aSha
|
||||
else:
|
||||
mode = bMode
|
||||
sha = bSha
|
||||
else:
|
||||
if aSha != oSha and bSha != oSha:
|
||||
merge = True
|
||||
|
||||
def updateFile(clean, sha, mode, path, onlyWd=False):
|
||||
updateCache = not onlyWd and (cleanCache or (not cleanCache and clean))
|
||||
updateWd = onlyWd or (not cleanCache and clean)
|
||||
if aMode == oMode:
|
||||
mode = bMode
|
||||
else:
|
||||
mode = aMode
|
||||
|
||||
if updateWd:
|
||||
prog = ['git-cat-file', 'blob', sha]
|
||||
if stat.S_ISREG(mode):
|
||||
if aSha == oSha:
|
||||
sha = bSha
|
||||
elif bSha == oSha:
|
||||
sha = aSha
|
||||
elif stat.S_ISREG(aMode):
|
||||
assert(stat.S_ISREG(bMode))
|
||||
|
||||
orig = runProgram(['git-unpack-file', oSha]).rstrip()
|
||||
src1 = runProgram(['git-unpack-file', aSha]).rstrip()
|
||||
src2 = runProgram(['git-unpack-file', bSha]).rstrip()
|
||||
[out, code] = runProgram(['merge',
|
||||
'-L', branch1Name + '/' + aPath,
|
||||
'-L', 'orig/' + oPath,
|
||||
'-L', branch2Name + '/' + bPath,
|
||||
src1, orig, src2], returnCode=True)
|
||||
|
||||
sha = runProgram(['git-hash-object', '-t', 'blob', '-w',
|
||||
src1]).rstrip()
|
||||
|
||||
os.unlink(orig)
|
||||
os.unlink(src1)
|
||||
os.unlink(src2)
|
||||
|
||||
clean = (code == 0)
|
||||
else:
|
||||
assert(stat.S_ISLNK(aMode) and stat.S_ISLNK(bMode))
|
||||
sha = aSha
|
||||
|
||||
if aSha != bSha:
|
||||
clean = False
|
||||
|
||||
return [sha, mode, clean, merge]
|
||||
|
||||
def updateFile(clean, sha, mode, path):
|
||||
updateCache = cacheOnly or clean
|
||||
updateWd = not cacheOnly
|
||||
|
||||
return updateFileExt(sha, mode, path, updateCache, updateWd)
|
||||
|
||||
def updateFileExt(sha, mode, path, updateCache, updateWd):
|
||||
if cacheOnly:
|
||||
updateWd = False
|
||||
|
||||
if updateWd:
|
||||
pathComponents = path.split('/')
|
||||
for x in xrange(1, len(pathComponents)):
|
||||
p = '/'.join(pathComponents[0:x])
|
||||
|
||||
try:
|
||||
createDir = not stat.S_ISDIR(os.lstat(p).st_mode)
|
||||
except:
|
||||
createDir = True
|
||||
|
||||
if createDir:
|
||||
try:
|
||||
os.unlink(path)
|
||||
except OSError:
|
||||
pass
|
||||
if mode & 0100:
|
||||
mode = 0777
|
||||
else:
|
||||
mode = 0666
|
||||
fd = os.open(path, os.O_WRONLY | os.O_TRUNC | os.O_CREAT, mode)
|
||||
proc = subprocess.Popen(prog, stdout=fd)
|
||||
proc.wait()
|
||||
os.close(fd)
|
||||
elif stat.S_ISLNK(mode):
|
||||
linkTarget = runProgram(prog)
|
||||
os.symlink(linkTarget, path)
|
||||
else:
|
||||
assert(False)
|
||||
os.mkdir(p)
|
||||
except OSError, e:
|
||||
die("Couldn't create directory", p, e.strerror)
|
||||
|
||||
if updateWd and updateCache:
|
||||
runProgram(['git-update-index', '--add', '--', path])
|
||||
elif updateCache:
|
||||
runProgram(['git-update-index', '--add', '--cacheinfo',
|
||||
'0%o' % mode, sha, path])
|
||||
|
||||
def removeFile(clean, path):
|
||||
if cleanCache or (not cleanCache and clean):
|
||||
runProgram(['git-update-index', '--force-remove', '--', path])
|
||||
|
||||
if not cleanCache and clean:
|
||||
prog = ['git-cat-file', 'blob', sha]
|
||||
if stat.S_ISREG(mode):
|
||||
try:
|
||||
os.unlink(path)
|
||||
except OSError, e:
|
||||
if e.errno != errno.ENOENT and e.errno != errno.EISDIR:
|
||||
raise
|
||||
except OSError:
|
||||
pass
|
||||
if mode & 0100:
|
||||
mode = 0777
|
||||
else:
|
||||
mode = 0666
|
||||
fd = os.open(path, os.O_WRONLY | os.O_TRUNC | os.O_CREAT, mode)
|
||||
proc = subprocess.Popen(prog, stdout=fd)
|
||||
proc.wait()
|
||||
os.close(fd)
|
||||
elif stat.S_ISLNK(mode):
|
||||
linkTarget = runProgram(prog)
|
||||
os.symlink(linkTarget, path)
|
||||
else:
|
||||
assert(False)
|
||||
|
||||
def uniquePath(path, branch):
|
||||
newPath = path + '_' + branch
|
||||
suffix = 0
|
||||
while newPath in files or newPath in dirs:
|
||||
suffix += 1
|
||||
newPath = path + '_' + branch + '_' + str(suffix)
|
||||
files.add(newPath)
|
||||
return newPath
|
||||
if updateWd and updateCache:
|
||||
runProgram(['git-update-index', '--add', '--', path])
|
||||
elif updateCache:
|
||||
runProgram(['git-update-index', '--add', '--cacheinfo',
|
||||
'0%o' % mode, sha, path])
|
||||
|
||||
debug('processing', entry.path, 'clean cache:', cleanCache)
|
||||
def removeFile(clean, path):
|
||||
updateCache = cacheOnly or clean
|
||||
updateWd = not cacheOnly
|
||||
|
||||
if updateCache:
|
||||
runProgram(['git-update-index', '--force-remove', '--', path])
|
||||
|
||||
if updateWd:
|
||||
try:
|
||||
os.unlink(path)
|
||||
except OSError, e:
|
||||
if e.errno != errno.ENOENT and e.errno != errno.EISDIR:
|
||||
raise
|
||||
|
||||
def uniquePath(path, branch):
|
||||
def fileExists(path):
|
||||
try:
|
||||
os.lstat(path)
|
||||
return True
|
||||
except OSError, e:
|
||||
if e.errno == errno.ENOENT:
|
||||
return False
|
||||
else:
|
||||
raise
|
||||
|
||||
newPath = path + '_' + branch
|
||||
suffix = 0
|
||||
while newPath in currentFileSet or \
|
||||
newPath in currentDirectorySet or \
|
||||
fileExists(newPath):
|
||||
suffix += 1
|
||||
newPath = path + '_' + branch + '_' + str(suffix)
|
||||
currentFileSet.add(newPath)
|
||||
return newPath
|
||||
|
||||
# Cache entry management
|
||||
# ----------------------
|
||||
|
||||
class CacheEntry:
|
||||
def __init__(self, path):
|
||||
class Stage:
|
||||
def __init__(self):
|
||||
self.sha1 = None
|
||||
self.mode = None
|
||||
|
||||
# Used for debugging only
|
||||
def __str__(self):
|
||||
if self.mode != None:
|
||||
m = '0%o' % self.mode
|
||||
else:
|
||||
m = 'None'
|
||||
|
||||
if self.sha1:
|
||||
sha1 = self.sha1
|
||||
else:
|
||||
sha1 = 'None'
|
||||
return 'sha1: ' + sha1 + ' mode: ' + m
|
||||
|
||||
self.stages = [Stage(), Stage(), Stage(), Stage()]
|
||||
self.path = path
|
||||
self.processed = False
|
||||
|
||||
def __str__(self):
|
||||
return 'path: ' + self.path + ' stages: ' + repr([str(x) for x in self.stages])
|
||||
|
||||
class CacheEntryContainer:
|
||||
def __init__(self):
|
||||
self.entries = {}
|
||||
|
||||
def add(self, entry):
|
||||
self.entries[entry.path] = entry
|
||||
|
||||
def get(self, path):
|
||||
return self.entries.get(path)
|
||||
|
||||
def __iter__(self):
|
||||
return self.entries.itervalues()
|
||||
|
||||
unmergedRE = re.compile(r'^([0-7]+) ([0-9a-f]{40}) ([1-3])\t(.*)$', re.S)
|
||||
def unmergedCacheEntries():
|
||||
'''Create a dictionary mapping file names to CacheEntry
|
||||
objects. The dictionary contains one entry for every path with a
|
||||
non-zero stage entry.'''
|
||||
|
||||
lines = runProgram(['git-ls-files', '-z', '--unmerged']).split('\0')
|
||||
lines.pop()
|
||||
|
||||
res = CacheEntryContainer()
|
||||
for l in lines:
|
||||
m = unmergedRE.match(l)
|
||||
if m:
|
||||
mode = int(m.group(1), 8)
|
||||
sha1 = m.group(2)
|
||||
stage = int(m.group(3))
|
||||
path = m.group(4)
|
||||
|
||||
e = res.get(path)
|
||||
if not e:
|
||||
e = CacheEntry(path)
|
||||
res.add(e)
|
||||
|
||||
e.stages[stage].mode = mode
|
||||
e.stages[stage].sha1 = sha1
|
||||
else:
|
||||
die('Error: Merge program failed: Unexpected output from',
|
||||
'git-ls-files:', l)
|
||||
return res
|
||||
|
||||
lsTreeRE = re.compile(r'^([0-7]+) (\S+) ([0-9a-f]{40})\t(.*)\n$', re.S)
|
||||
def getCacheEntry(path, origTree, aTree, bTree):
|
||||
'''Returns a CacheEntry object which doesn't have to correspond to
|
||||
a real cache entry in Git's index.'''
|
||||
|
||||
def parse(out):
|
||||
if out == '':
|
||||
return [None, None]
|
||||
else:
|
||||
m = lsTreeRE.match(out)
|
||||
if not m:
|
||||
die('Unexpected output from git-ls-tree:', out)
|
||||
elif m.group(2) == 'blob':
|
||||
return [m.group(3), int(m.group(1), 8)]
|
||||
else:
|
||||
return [None, None]
|
||||
|
||||
res = CacheEntry(path)
|
||||
|
||||
[oSha, oMode] = parse(runProgram(['git-ls-tree', origTree, '--', path]))
|
||||
[aSha, aMode] = parse(runProgram(['git-ls-tree', aTree, '--', path]))
|
||||
[bSha, bMode] = parse(runProgram(['git-ls-tree', bTree, '--', path]))
|
||||
|
||||
res.stages[1].sha1 = oSha
|
||||
res.stages[1].mode = oMode
|
||||
res.stages[2].sha1 = aSha
|
||||
res.stages[2].mode = aMode
|
||||
res.stages[3].sha1 = bSha
|
||||
res.stages[3].mode = bMode
|
||||
|
||||
return res
|
||||
|
||||
# Rename detection and handling
|
||||
# -----------------------------
|
||||
|
||||
class RenameEntry:
|
||||
def __init__(self,
|
||||
src, srcSha, srcMode, srcCacheEntry,
|
||||
dst, dstSha, dstMode, dstCacheEntry,
|
||||
score):
|
||||
self.srcName = src
|
||||
self.srcSha = srcSha
|
||||
self.srcMode = srcMode
|
||||
self.srcCacheEntry = srcCacheEntry
|
||||
self.dstName = dst
|
||||
self.dstSha = dstSha
|
||||
self.dstMode = dstMode
|
||||
self.dstCacheEntry = dstCacheEntry
|
||||
self.score = score
|
||||
|
||||
self.processed = False
|
||||
|
||||
class RenameEntryContainer:
|
||||
def __init__(self):
|
||||
self.entriesSrc = {}
|
||||
self.entriesDst = {}
|
||||
|
||||
def add(self, entry):
|
||||
self.entriesSrc[entry.srcName] = entry
|
||||
self.entriesDst[entry.dstName] = entry
|
||||
|
||||
def getSrc(self, path):
|
||||
return self.entriesSrc.get(path)
|
||||
|
||||
def getDst(self, path):
|
||||
return self.entriesDst.get(path)
|
||||
|
||||
def __iter__(self):
|
||||
return self.entriesSrc.itervalues()
|
||||
|
||||
parseDiffRenamesRE = re.compile('^:([0-7]+) ([0-7]+) ([0-9a-f]{40}) ([0-9a-f]{40}) R([0-9]*)$')
|
||||
def getRenames(tree, oTree, aTree, bTree, cacheEntries):
|
||||
'''Get information of all renames which occured between 'oTree' and
|
||||
'tree'. We need the three trees in the merge ('oTree', 'aTree' and
|
||||
'bTree') to be able to associate the correct cache entries with
|
||||
the rename information. 'tree' is always equal to either aTree or bTree.'''
|
||||
|
||||
assert(tree == aTree or tree == bTree)
|
||||
inp = runProgram(['git-diff-tree', '-M', '--diff-filter=R', '-r',
|
||||
'-z', oTree, tree])
|
||||
|
||||
ret = RenameEntryContainer()
|
||||
try:
|
||||
recs = inp.split("\0")
|
||||
recs.pop() # remove last entry (which is '')
|
||||
it = recs.__iter__()
|
||||
while True:
|
||||
rec = it.next()
|
||||
m = parseDiffRenamesRE.match(rec)
|
||||
|
||||
if not m:
|
||||
die('Unexpected output from git-diff-tree:', rec)
|
||||
|
||||
srcMode = int(m.group(1), 8)
|
||||
dstMode = int(m.group(2), 8)
|
||||
srcSha = m.group(3)
|
||||
dstSha = m.group(4)
|
||||
score = m.group(5)
|
||||
src = it.next()
|
||||
dst = it.next()
|
||||
|
||||
srcCacheEntry = cacheEntries.get(src)
|
||||
if not srcCacheEntry:
|
||||
srcCacheEntry = getCacheEntry(src, oTree, aTree, bTree)
|
||||
cacheEntries.add(srcCacheEntry)
|
||||
|
||||
dstCacheEntry = cacheEntries.get(dst)
|
||||
if not dstCacheEntry:
|
||||
dstCacheEntry = getCacheEntry(dst, oTree, aTree, bTree)
|
||||
cacheEntries.add(dstCacheEntry)
|
||||
|
||||
ret.add(RenameEntry(src, srcSha, srcMode, srcCacheEntry,
|
||||
dst, dstSha, dstMode, dstCacheEntry,
|
||||
score))
|
||||
except StopIteration:
|
||||
pass
|
||||
return ret
|
||||
|
||||
def fmtRename(src, dst):
|
||||
srcPath = src.split('/')
|
||||
dstPath = dst.split('/')
|
||||
path = []
|
||||
endIndex = min(len(srcPath), len(dstPath)) - 1
|
||||
for x in range(0, endIndex):
|
||||
if srcPath[x] == dstPath[x]:
|
||||
path.append(srcPath[x])
|
||||
else:
|
||||
endIndex = x
|
||||
break
|
||||
|
||||
if len(path) > 0:
|
||||
return '/'.join(path) + \
|
||||
'/{' + '/'.join(srcPath[endIndex:]) + ' => ' + \
|
||||
'/'.join(dstPath[endIndex:]) + '}'
|
||||
else:
|
||||
return src + ' => ' + dst
|
||||
|
||||
def processRenames(renamesA, renamesB, branchNameA, branchNameB):
|
||||
srcNames = Set()
|
||||
for x in renamesA:
|
||||
srcNames.add(x.srcName)
|
||||
for x in renamesB:
|
||||
srcNames.add(x.srcName)
|
||||
|
||||
cleanMerge = True
|
||||
for path in srcNames:
|
||||
if renamesA.getSrc(path):
|
||||
renames1 = renamesA
|
||||
renames2 = renamesB
|
||||
branchName1 = branchNameA
|
||||
branchName2 = branchNameB
|
||||
else:
|
||||
renames1 = renamesB
|
||||
renames2 = renamesA
|
||||
branchName1 = branchNameB
|
||||
branchName2 = branchNameA
|
||||
|
||||
ren1 = renames1.getSrc(path)
|
||||
ren2 = renames2.getSrc(path)
|
||||
|
||||
ren1.dstCacheEntry.processed = True
|
||||
ren1.srcCacheEntry.processed = True
|
||||
|
||||
if ren1.processed:
|
||||
continue
|
||||
|
||||
ren1.processed = True
|
||||
removeFile(True, ren1.srcName)
|
||||
if ren2:
|
||||
# Renamed in 1 and renamed in 2
|
||||
assert(ren1.srcName == ren2.srcName)
|
||||
ren2.dstCacheEntry.processed = True
|
||||
ren2.processed = True
|
||||
|
||||
if ren1.dstName != ren2.dstName:
|
||||
print 'CONFLICT (rename/rename): Rename', \
|
||||
fmtRename(path, ren1.dstName), 'in branch', branchName1, \
|
||||
'rename', fmtRename(path, ren2.dstName), 'in', branchName2
|
||||
cleanMerge = False
|
||||
|
||||
if ren1.dstName in currentDirectorySet:
|
||||
dstName1 = uniquePath(ren1.dstName, branchName1)
|
||||
print ren1.dstName, 'is a directory in', branchName2, \
|
||||
'adding as', dstName1, 'instead.'
|
||||
removeFile(False, ren1.dstName)
|
||||
else:
|
||||
dstName1 = ren1.dstName
|
||||
|
||||
if ren2.dstName in currentDirectorySet:
|
||||
dstName2 = uniquePath(ren2.dstName, branchName2)
|
||||
print ren2.dstName, 'is a directory in', branchName1, \
|
||||
'adding as', dstName2, 'instead.'
|
||||
removeFile(False, ren2.dstName)
|
||||
else:
|
||||
dstName2 = ren1.dstName
|
||||
|
||||
updateFile(False, ren1.dstSha, ren1.dstMode, dstName1)
|
||||
updateFile(False, ren2.dstSha, ren2.dstMode, dstName2)
|
||||
else:
|
||||
print 'Renaming', fmtRename(path, ren1.dstName)
|
||||
[resSha, resMode, clean, merge] = \
|
||||
mergeFile(ren1.srcName, ren1.srcSha, ren1.srcMode,
|
||||
ren1.dstName, ren1.dstSha, ren1.dstMode,
|
||||
ren2.dstName, ren2.dstSha, ren2.dstMode,
|
||||
branchName1, branchName2)
|
||||
|
||||
if merge:
|
||||
print 'Auto-merging', ren1.dstName
|
||||
|
||||
if not clean:
|
||||
print 'CONFLICT (content): merge conflict in', ren1.dstName
|
||||
cleanMerge = False
|
||||
|
||||
if not cacheOnly:
|
||||
updateFileExt(ren1.dstSha, ren1.dstMode, ren1.dstName,
|
||||
updateCache=True, updateWd=False)
|
||||
updateFile(clean, resSha, resMode, ren1.dstName)
|
||||
else:
|
||||
# Renamed in 1, maybe changed in 2
|
||||
if renamesA == renames1:
|
||||
stage = 3
|
||||
else:
|
||||
stage = 2
|
||||
|
||||
srcShaOtherBranch = ren1.srcCacheEntry.stages[stage].sha1
|
||||
srcModeOtherBranch = ren1.srcCacheEntry.stages[stage].mode
|
||||
|
||||
dstShaOtherBranch = ren1.dstCacheEntry.stages[stage].sha1
|
||||
dstModeOtherBranch = ren1.dstCacheEntry.stages[stage].mode
|
||||
|
||||
tryMerge = False
|
||||
|
||||
if ren1.dstName in currentDirectorySet:
|
||||
newPath = uniquePath(ren1.dstName, branchName1)
|
||||
print 'CONFLICT (rename/directory): Rename', \
|
||||
fmtRename(ren1.srcName, ren1.dstName), 'in', branchName1,\
|
||||
'directory', ren1.dstName, 'added in', branchName2
|
||||
print 'Renaming', ren1.srcName, 'to', newPath, 'instead'
|
||||
cleanMerge = False
|
||||
removeFile(False, ren1.dstName)
|
||||
updateFile(False, ren1.dstSha, ren1.dstMode, newPath)
|
||||
elif srcShaOtherBranch == None:
|
||||
print 'CONFLICT (rename/delete): Rename', \
|
||||
fmtRename(ren1.srcName, ren1.dstName), 'in', \
|
||||
branchName1, 'and deleted in', branchName2
|
||||
cleanMerge = False
|
||||
updateFile(False, ren1.dstSha, ren1.dstMode, ren1.dstName)
|
||||
elif dstShaOtherBranch:
|
||||
newPath = uniquePath(ren1.dstName, branchName2)
|
||||
print 'CONFLICT (rename/add): Rename', \
|
||||
fmtRename(ren1.srcName, ren1.dstName), 'in', \
|
||||
branchName1 + '.', ren1.dstName, 'added in', branchName2
|
||||
print 'Adding as', newPath, 'instead'
|
||||
updateFile(False, dstShaOtherBranch, dstModeOtherBranch, newPath)
|
||||
cleanMerge = False
|
||||
tryMerge = True
|
||||
elif renames2.getDst(ren1.dstName):
|
||||
dst2 = renames2.getDst(ren1.dstName)
|
||||
newPath1 = uniquePath(ren1.dstName, branchName1)
|
||||
newPath2 = uniquePath(dst2.dstName, branchName2)
|
||||
print 'CONFLICT (rename/rename): Rename', \
|
||||
fmtRename(ren1.srcName, ren1.dstName), 'in', \
|
||||
branchName1+'. Rename', \
|
||||
fmtRename(dst2.srcName, dst2.dstName), 'in', branchName2
|
||||
print 'Renaming', ren1.srcName, 'to', newPath1, 'and', \
|
||||
dst2.srcName, 'to', newPath2, 'instead'
|
||||
removeFile(False, ren1.dstName)
|
||||
updateFile(False, ren1.dstSha, ren1.dstMode, newPath1)
|
||||
updateFile(False, dst2.dstSha, dst2.dstMode, newPath2)
|
||||
dst2.processed = True
|
||||
cleanMerge = False
|
||||
else:
|
||||
tryMerge = True
|
||||
|
||||
if tryMerge:
|
||||
print 'Renaming', fmtRename(ren1.srcName, ren1.dstName)
|
||||
[resSha, resMode, clean, merge] = \
|
||||
mergeFile(ren1.srcName, ren1.srcSha, ren1.srcMode,
|
||||
ren1.dstName, ren1.dstSha, ren1.dstMode,
|
||||
ren1.srcName, srcShaOtherBranch, srcModeOtherBranch,
|
||||
branchName1, branchName2)
|
||||
|
||||
if merge:
|
||||
print 'Auto-merging', ren1.dstName
|
||||
|
||||
if not clean:
|
||||
print 'CONFLICT (rename/modify): Merge conflict in', ren1.dstName
|
||||
cleanMerge = False
|
||||
|
||||
if not cacheOnly:
|
||||
updateFileExt(ren1.dstSha, ren1.dstMode, ren1.dstName,
|
||||
updateCache=True, updateWd=False)
|
||||
updateFile(clean, resSha, resMode, ren1.dstName)
|
||||
|
||||
return cleanMerge
|
||||
|
||||
# Per entry merge function
|
||||
# ------------------------
|
||||
|
||||
def processEntry(entry, branch1Name, branch2Name):
|
||||
'''Merge one cache entry.'''
|
||||
|
||||
debug('processing', entry.path, 'clean cache:', cacheOnly)
|
||||
|
||||
cleanMerge = True
|
||||
|
||||
path = entry.path
|
||||
oSha = entry.stages[0].sha1
|
||||
oMode = entry.stages[0].mode
|
||||
aSha = entry.stages[1].sha1
|
||||
aMode = entry.stages[1].mode
|
||||
bSha = entry.stages[2].sha1
|
||||
bMode = entry.stages[2].mode
|
||||
oSha = entry.stages[1].sha1
|
||||
oMode = entry.stages[1].mode
|
||||
aSha = entry.stages[2].sha1
|
||||
aMode = entry.stages[2].mode
|
||||
bSha = entry.stages[3].sha1
|
||||
bMode = entry.stages[3].mode
|
||||
|
||||
assert(oSha == None or isSha(oSha))
|
||||
assert(aSha == None or isSha(aSha))
|
||||
|
@ -275,28 +709,26 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
|
|||
(not aSha and bSha == oSha):
|
||||
# Deleted in both or deleted in one and unchanged in the other
|
||||
if aSha:
|
||||
print 'Removing ' + path
|
||||
print 'Removing', path
|
||||
removeFile(True, path)
|
||||
else:
|
||||
# Deleted in one and changed in the other
|
||||
cleanMerge = False
|
||||
if not aSha:
|
||||
print 'CONFLICT (del/mod): "' + path + '" deleted in', \
|
||||
branch1Name, 'and modified in', branch2Name, \
|
||||
'. Version', branch2Name, ' of "' + path + \
|
||||
'" left in tree'
|
||||
print 'CONFLICT (delete/modify):', path, 'deleted in', \
|
||||
branch1Name, 'and modified in', branch2Name + '.', \
|
||||
'Version', branch2Name, 'of', path, 'left in tree.'
|
||||
mode = bMode
|
||||
sha = bSha
|
||||
else:
|
||||
print 'CONFLICT (mod/del): "' + path + '" deleted in', \
|
||||
branch2Name, 'and modified in', branch1Name + \
|
||||
'. Version', branch1Name, 'of "' + path + \
|
||||
'" left in tree'
|
||||
print 'CONFLICT (modify/delete):', path, 'deleted in', \
|
||||
branch2Name, 'and modified in', branch1Name + '.', \
|
||||
'Version', branch1Name, 'of', path, 'left in tree.'
|
||||
mode = aMode
|
||||
sha = aSha
|
||||
|
||||
updateFile(False, sha, mode, path)
|
||||
|
||||
|
||||
elif (not oSha and aSha and not bSha) or \
|
||||
(not oSha and not aSha and bSha):
|
||||
#
|
||||
|
@ -307,27 +739,26 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
|
|||
otherBranch = branch2Name
|
||||
mode = aMode
|
||||
sha = aSha
|
||||
conf = 'file/dir'
|
||||
conf = 'file/directory'
|
||||
else:
|
||||
addBranch = branch2Name
|
||||
otherBranch = branch1Name
|
||||
mode = bMode
|
||||
sha = bSha
|
||||
conf = 'dir/file'
|
||||
conf = 'directory/file'
|
||||
|
||||
if path in dirs:
|
||||
if path in currentDirectorySet:
|
||||
cleanMerge = False
|
||||
newPath = uniquePath(path, addBranch)
|
||||
print 'CONFLICT (' + conf + \
|
||||
'): There is a directory with name "' + path + '" in', \
|
||||
otherBranch + '. Adding "' + path + '" as "' + newPath + '"'
|
||||
print 'CONFLICT (' + conf + '):', \
|
||||
'There is a directory with name', path, 'in', \
|
||||
otherBranch + '. Adding', path, 'as', newPath
|
||||
|
||||
removeFile(False, path)
|
||||
path = newPath
|
||||
updateFile(False, sha, mode, newPath)
|
||||
else:
|
||||
print 'Adding "' + path + '"'
|
||||
|
||||
updateFile(True, sha, mode, path)
|
||||
print 'Adding', path
|
||||
updateFile(True, sha, mode, path)
|
||||
|
||||
elif not oSha and aSha and bSha:
|
||||
#
|
||||
|
@ -336,10 +767,9 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
|
|||
if aSha == bSha:
|
||||
if aMode != bMode:
|
||||
cleanMerge = False
|
||||
print 'CONFLICT: File "' + path + \
|
||||
'" added identically in both branches,', \
|
||||
'but permissions conflict', '0%o' % aMode, '->', \
|
||||
'0%o' % bMode
|
||||
print 'CONFLICT: File', path, \
|
||||
'added identically in both branches, but permissions', \
|
||||
'conflict', '0%o' % aMode, '->', '0%o' % bMode
|
||||
print 'CONFLICT: adding with permission:', '0%o' % aMode
|
||||
|
||||
updateFile(False, aSha, aMode, path)
|
||||
|
@ -350,8 +780,9 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
|
|||
cleanMerge = False
|
||||
newPath1 = uniquePath(path, branch1Name)
|
||||
newPath2 = uniquePath(path, branch2Name)
|
||||
print 'CONFLICT (add/add): File "' + path + \
|
||||
'" added non-identically in both branches.'
|
||||
print 'CONFLICT (add/add): File', path, \
|
||||
'added non-identically in both branches. Adding as', \
|
||||
newPath1, 'and', newPath2, 'instead.'
|
||||
removeFile(False, path)
|
||||
updateFile(False, aSha, aMode, newPath1)
|
||||
updateFile(False, bSha, bMode, newPath2)
|
||||
|
@ -360,39 +791,24 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
|
|||
#
|
||||
# case D: Modified in both, but differently.
|
||||
#
|
||||
print 'Auto-merging', path
|
||||
orig = runProgram(['git-unpack-file', oSha]).rstrip()
|
||||
src1 = runProgram(['git-unpack-file', aSha]).rstrip()
|
||||
src2 = runProgram(['git-unpack-file', bSha]).rstrip()
|
||||
[out, ret] = runProgram(['merge',
|
||||
'-L', branch1Name + '/' + path,
|
||||
'-L', 'orig/' + path,
|
||||
'-L', branch2Name + '/' + path,
|
||||
src1, orig, src2], returnCode=True)
|
||||
|
||||
if aMode == oMode:
|
||||
mode = bMode
|
||||
print 'Auto-merging', path
|
||||
[sha, mode, clean, dummy] = \
|
||||
mergeFile(path, oSha, oMode,
|
||||
path, aSha, aMode,
|
||||
path, bSha, bMode,
|
||||
branch1Name, branch2Name)
|
||||
if clean:
|
||||
updateFile(True, sha, mode, path)
|
||||
else:
|
||||
mode = aMode
|
||||
|
||||
sha = runProgram(['git-hash-object', '-t', 'blob', '-w',
|
||||
src1]).rstrip()
|
||||
|
||||
if ret != 0:
|
||||
cleanMerge = False
|
||||
print 'CONFLICT (content): Merge conflict in "' + path + '".'
|
||||
print 'CONFLICT (content): Merge conflict in', path
|
||||
|
||||
if cleanCache:
|
||||
if cacheOnly:
|
||||
updateFile(False, sha, mode, path)
|
||||
else:
|
||||
updateFile(True, aSha, aMode, path)
|
||||
updateFile(False, sha, mode, path, True)
|
||||
else:
|
||||
updateFile(True, sha, mode, path)
|
||||
|
||||
os.unlink(orig)
|
||||
os.unlink(src1)
|
||||
os.unlink(src2)
|
||||
updateFileExt(aSha, aMode, path,
|
||||
updateCache=True, updateWd=False)
|
||||
updateFileExt(sha, mode, path, updateCache=False, updateWd=True)
|
||||
else:
|
||||
die("ERROR: Fatal merge failure, shouldn't happen.")
|
||||
|
||||
|
@ -416,7 +832,7 @@ for nextArg in xrange(1, len(sys.argv)):
|
|||
try:
|
||||
h1 = firstBranch = sys.argv[nextArg + 1]
|
||||
h2 = secondBranch = sys.argv[nextArg + 2]
|
||||
except IndexError:
|
||||
except IndexError:
|
||||
usage()
|
||||
break
|
||||
|
||||
|
@ -428,8 +844,8 @@ try:
|
|||
|
||||
graph = buildGraph([h1, h2])
|
||||
|
||||
[res, clean] = merge(graph.shaMap[h1], graph.shaMap[h2],
|
||||
firstBranch, secondBranch, graph)
|
||||
[dummy, clean] = merge(graph.shaMap[h1], graph.shaMap[h2],
|
||||
firstBranch, secondBranch, graph)
|
||||
|
||||
print ''
|
||||
except:
|
||||
|
|
22
git-merge.sh
22
git-merge.sh
|
@ -123,10 +123,30 @@ case "$#,$common" in
|
|||
dropsave
|
||||
exit 0
|
||||
;;
|
||||
1,*)
|
||||
1,?*"$LF"?*)
|
||||
# We are not doing octopus and not fast forward. Need a
|
||||
# real merge.
|
||||
;;
|
||||
1,*)
|
||||
# We are not doing octopus, not fast forward, and have only
|
||||
# one common. See if it is really trivial.
|
||||
echo "Trying really trivial in-index merge..."
|
||||
git-update-index --refresh 2>/dev/null
|
||||
if git-read-tree --trivial -m -u $common $head "$1" &&
|
||||
result_tree=$(git-write-tree)
|
||||
then
|
||||
echo "Wonderful."
|
||||
result_commit=$(
|
||||
echo "$merge_msg" |
|
||||
git-commit-tree $result_tree -p HEAD -p "$1"
|
||||
) || exit
|
||||
git-update-ref HEAD $result_commit $head
|
||||
summary $result_commit
|
||||
dropsave
|
||||
exit 0
|
||||
fi
|
||||
echo "Nope."
|
||||
;;
|
||||
*)
|
||||
# An octopus. If we can reach all the remote we are up to date.
|
||||
up_to_date=t
|
||||
|
|
|
@ -13,10 +13,13 @@
|
|||
unset CDPATH
|
||||
|
||||
die() {
|
||||
echo "$@" >&2
|
||||
echo >&2 "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
[ -h "$GIT_DIR/HEAD" ] &&
|
||||
case "$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD 2>/dev/null)" in
|
||||
refs/*) : ;;
|
||||
*) false ;;
|
||||
esac &&
|
||||
[ -d "$GIT_DIR/refs" ] &&
|
||||
[ -d "$GIT_OBJECT_DIRECTORY/00" ]
|
||||
|
|
|
@ -31,15 +31,15 @@ report () {
|
|||
[ "$header" ]
|
||||
}
|
||||
|
||||
branch=`readlink "$GIT_DIR/HEAD"`
|
||||
branch=$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD)
|
||||
case "$branch" in
|
||||
refs/heads/master) ;;
|
||||
*) echo "# On branch $branch" ;;
|
||||
esac
|
||||
|
||||
git-update-index --refresh >/dev/null 2>&1
|
||||
git-update-index -q --unmerged --refresh || exit
|
||||
|
||||
if test -f "$GIT_DIR/HEAD"
|
||||
if GIT_DIR="$GIT_DIR" git-rev-parse --verify HEAD >/dev/null 2>&1
|
||||
then
|
||||
git-diff-index -M --cached HEAD |
|
||||
sed 's/^://' |
|
||||
|
|
55
git.sh
55
git.sh
|
@ -26,17 +26,50 @@ esac
|
|||
|
||||
echo "Usage: git COMMAND [OPTIONS] [TARGET]"
|
||||
if [ -n "$cmd" ]; then
|
||||
echo " git command '$cmd' not found: commands are:"
|
||||
else
|
||||
echo " git commands are:"
|
||||
echo "git command '$cmd' not found."
|
||||
fi
|
||||
echo "git commands are:"
|
||||
|
||||
cat <<\EOF
|
||||
add apply archimport bisect branch checkout cherry clone
|
||||
commit count-objects cvsimport diff fetch format-patch
|
||||
fsck-cache get-tar-commit-id init-db log ls-remote octopus
|
||||
pack-objects parse-remote patch-id prune pull push rebase
|
||||
relink rename repack request-pull reset resolve revert
|
||||
send-email shortlog show-branch status tag verify-tag
|
||||
whatchanged
|
||||
fmt <<\EOF | sed -e 's/^/ /'
|
||||
add
|
||||
apply
|
||||
archimport
|
||||
bisect
|
||||
branch
|
||||
checkout
|
||||
cherry
|
||||
clone
|
||||
commit
|
||||
count-objects
|
||||
cvsimport
|
||||
diff
|
||||
fetch
|
||||
format-patch
|
||||
fsck-objects
|
||||
get-tar-commit-id
|
||||
init-db
|
||||
log
|
||||
ls-remote
|
||||
octopus
|
||||
pack-objects
|
||||
parse-remote
|
||||
patch-id
|
||||
prune
|
||||
pull
|
||||
push
|
||||
rebase
|
||||
relink
|
||||
rename
|
||||
repack
|
||||
request-pull
|
||||
reset
|
||||
resolve
|
||||
revert
|
||||
send-email
|
||||
shortlog
|
||||
show-branch
|
||||
status
|
||||
tag
|
||||
verify-tag
|
||||
whatchanged
|
||||
EOF
|
||||
|
|
192
http-fetch.c
192
http-fetch.c
|
@ -1,6 +1,6 @@
|
|||
#include "cache.h"
|
||||
#include "commit.h"
|
||||
|
||||
#include "pack.h"
|
||||
#include "fetch.h"
|
||||
|
||||
#include <curl/curl.h>
|
||||
|
@ -13,8 +13,12 @@
|
|||
#define curl_global_init(a) do { /* nothing */ } while(0)
|
||||
#endif
|
||||
|
||||
#define PREV_BUF_SIZE 4096
|
||||
#define RANGE_HEADER_SIZE 30
|
||||
|
||||
static CURL *curl;
|
||||
static struct curl_slist *no_pragma_header;
|
||||
static struct curl_slist *no_range_header;
|
||||
static char curl_errorstr[CURL_ERROR_SIZE];
|
||||
|
||||
static char *initial_base;
|
||||
|
@ -87,12 +91,37 @@ void prefetch(unsigned char *sha1)
|
|||
{
|
||||
}
|
||||
|
||||
int relink_or_rename(char *old, char *new) {
|
||||
int ret;
|
||||
|
||||
ret = link(old, new);
|
||||
if (ret < 0) {
|
||||
/* Same Coda hack as in write_sha1_file(sha1_file.c) */
|
||||
ret = errno;
|
||||
if (ret == EXDEV && !rename(old, new))
|
||||
return 0;
|
||||
}
|
||||
unlink(old);
|
||||
if (ret) {
|
||||
if (ret != EEXIST)
|
||||
return ret;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int got_alternates = 0;
|
||||
|
||||
static int fetch_index(struct alt_base *repo, unsigned char *sha1)
|
||||
{
|
||||
char *filename;
|
||||
char *url;
|
||||
char tmpfile[PATH_MAX];
|
||||
int ret;
|
||||
long prev_posn = 0;
|
||||
char range[RANGE_HEADER_SIZE];
|
||||
struct curl_slist *range_header = NULL;
|
||||
CURLcode curl_result;
|
||||
|
||||
FILE *indexfile;
|
||||
|
||||
|
@ -108,7 +137,8 @@ static int fetch_index(struct alt_base *repo, unsigned char *sha1)
|
|||
repo->base, sha1_to_hex(sha1));
|
||||
|
||||
filename = sha1_pack_index_name(sha1);
|
||||
indexfile = fopen(filename, "w");
|
||||
snprintf(tmpfile, sizeof(tmpfile), "%s.temp", filename);
|
||||
indexfile = fopen(tmpfile, "a");
|
||||
if (!indexfile)
|
||||
return error("Unable to open local file %s for pack index",
|
||||
filename);
|
||||
|
@ -119,13 +149,36 @@ static int fetch_index(struct alt_base *repo, unsigned char *sha1)
|
|||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_pragma_header);
|
||||
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr);
|
||||
|
||||
if (curl_easy_perform(curl)) {
|
||||
/* If there is data present from a previous transfer attempt,
|
||||
resume where it left off */
|
||||
prev_posn = ftell(indexfile);
|
||||
if (prev_posn>0) {
|
||||
if (get_verbosely)
|
||||
fprintf(stderr,
|
||||
"Resuming fetch of index for pack %s at byte %ld\n",
|
||||
sha1_to_hex(sha1), prev_posn);
|
||||
sprintf(range, "Range: bytes=%ld-", prev_posn);
|
||||
range_header = curl_slist_append(range_header, range);
|
||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, range_header);
|
||||
}
|
||||
|
||||
/* Clear out the Range: header after performing the request, so
|
||||
other curl requests don't inherit inappropriate header data */
|
||||
curl_result = curl_easy_perform(curl);
|
||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_range_header);
|
||||
if (curl_result != 0) {
|
||||
fclose(indexfile);
|
||||
return error("Unable to get pack index %s\n%s", url,
|
||||
curl_errorstr);
|
||||
}
|
||||
|
||||
fclose(indexfile);
|
||||
|
||||
ret = relink_or_rename(tmpfile, filename);
|
||||
if (ret)
|
||||
return error("unable to write index filename %s: %s",
|
||||
filename, strerror(ret));
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -306,6 +359,12 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1)
|
|||
struct packed_git **lst;
|
||||
FILE *packfile;
|
||||
char *filename;
|
||||
char tmpfile[PATH_MAX];
|
||||
int ret;
|
||||
long prev_posn = 0;
|
||||
char range[RANGE_HEADER_SIZE];
|
||||
struct curl_slist *range_header = NULL;
|
||||
CURLcode curl_result;
|
||||
|
||||
if (fetch_indices(repo))
|
||||
return -1;
|
||||
|
@ -325,7 +384,8 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1)
|
|||
repo->base, sha1_to_hex(target->sha1));
|
||||
|
||||
filename = sha1_pack_name(target->sha1);
|
||||
packfile = fopen(filename, "w");
|
||||
snprintf(tmpfile, sizeof(tmpfile), "%s.temp", filename);
|
||||
packfile = fopen(tmpfile, "a");
|
||||
if (!packfile)
|
||||
return error("Unable to open local file %s for pack",
|
||||
filename);
|
||||
|
@ -336,7 +396,24 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1)
|
|||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_pragma_header);
|
||||
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr);
|
||||
|
||||
if (curl_easy_perform(curl)) {
|
||||
/* If there is data present from a previous transfer attempt,
|
||||
resume where it left off */
|
||||
prev_posn = ftell(packfile);
|
||||
if (prev_posn>0) {
|
||||
if (get_verbosely)
|
||||
fprintf(stderr,
|
||||
"Resuming fetch of pack %s at byte %ld\n",
|
||||
sha1_to_hex(target->sha1), prev_posn);
|
||||
sprintf(range, "Range: bytes=%ld-", prev_posn);
|
||||
range_header = curl_slist_append(range_header, range);
|
||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, range_header);
|
||||
}
|
||||
|
||||
/* Clear out the Range: header after performing the request, so
|
||||
other curl requests don't inherit inappropriate header data */
|
||||
curl_result = curl_easy_perform(curl);
|
||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_range_header);
|
||||
if (curl_result != 0) {
|
||||
fclose(packfile);
|
||||
return error("Unable to get pack file %s\n%s", url,
|
||||
curl_errorstr);
|
||||
|
@ -344,11 +421,18 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1)
|
|||
|
||||
fclose(packfile);
|
||||
|
||||
ret = relink_or_rename(tmpfile, filename);
|
||||
if (ret)
|
||||
return error("unable to write pack filename %s: %s",
|
||||
filename, strerror(ret));
|
||||
|
||||
lst = &repo->packs;
|
||||
while (*lst != target)
|
||||
lst = &((*lst)->next);
|
||||
*lst = (*lst)->next;
|
||||
|
||||
if (verify_pack(target, 0))
|
||||
return -1;
|
||||
install_packed_git(target);
|
||||
|
||||
return 0;
|
||||
|
@ -360,14 +444,33 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1)
|
|||
char *filename = sha1_file_name(sha1);
|
||||
unsigned char real_sha1[20];
|
||||
char tmpfile[PATH_MAX];
|
||||
char prevfile[PATH_MAX];
|
||||
int ret;
|
||||
char *url;
|
||||
char *posn;
|
||||
int prevlocal;
|
||||
unsigned char prev_buf[PREV_BUF_SIZE];
|
||||
ssize_t prev_read = 0;
|
||||
long prev_posn = 0;
|
||||
char range[RANGE_HEADER_SIZE];
|
||||
struct curl_slist *range_header = NULL;
|
||||
CURLcode curl_result;
|
||||
|
||||
snprintf(tmpfile, sizeof(tmpfile), "%s/obj_XXXXXX",
|
||||
get_object_directory());
|
||||
snprintf(tmpfile, sizeof(tmpfile), "%s.temp", filename);
|
||||
snprintf(prevfile, sizeof(prevfile), "%s.prev", filename);
|
||||
|
||||
if (unlink(prevfile) && (errno != ENOENT))
|
||||
return error("Failed to unlink %s (%s)",
|
||||
prevfile, strerror(errno));
|
||||
if (rename(tmpfile, prevfile) && (errno != ENOENT))
|
||||
return error("Failed to rename %s to %s (%s)",
|
||||
tmpfile, prevfile, strerror(errno));
|
||||
|
||||
local = open(tmpfile, O_WRONLY | O_CREAT | O_EXCL, 0666);
|
||||
|
||||
/* Note: if another instance starts now, it will turn our new
|
||||
tmpfile into its prevfile. */
|
||||
|
||||
local = mkstemp(tmpfile);
|
||||
if (local < 0)
|
||||
return error("Couldn't create temporary file %s for %s: %s\n",
|
||||
tmpfile, filename, strerror(errno));
|
||||
|
@ -396,8 +499,57 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1)
|
|||
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url);
|
||||
|
||||
if (curl_easy_perform(curl)) {
|
||||
unlink(filename);
|
||||
/* If a previous temp file is present, process what was already
|
||||
fetched. */
|
||||
prevlocal = open(prevfile, O_RDONLY);
|
||||
if (prevlocal != -1) {
|
||||
do {
|
||||
prev_read = read(prevlocal, prev_buf, PREV_BUF_SIZE);
|
||||
if (prev_read>0) {
|
||||
if (fwrite_sha1_file(prev_buf,
|
||||
1,
|
||||
prev_read,
|
||||
NULL) == prev_read) {
|
||||
prev_posn += prev_read;
|
||||
} else {
|
||||
prev_read = -1;
|
||||
}
|
||||
}
|
||||
} while (prev_read > 0);
|
||||
close(prevlocal);
|
||||
}
|
||||
unlink(prevfile);
|
||||
|
||||
/* Reset inflate/SHA1 if there was an error reading the previous temp
|
||||
file; also rewind to the beginning of the local file. */
|
||||
if (prev_read == -1) {
|
||||
memset(&stream, 0, sizeof(stream));
|
||||
inflateInit(&stream);
|
||||
SHA1_Init(&c);
|
||||
if (prev_posn>0) {
|
||||
prev_posn = 0;
|
||||
lseek(local, SEEK_SET, 0);
|
||||
ftruncate(local, 0);
|
||||
}
|
||||
}
|
||||
|
||||
/* If we have successfully processed data from a previous fetch
|
||||
attempt, only fetch the data we don't already have. */
|
||||
if (prev_posn>0) {
|
||||
if (get_verbosely)
|
||||
fprintf(stderr,
|
||||
"Resuming fetch of object %s at byte %ld\n",
|
||||
hex, prev_posn);
|
||||
sprintf(range, "Range: bytes=%ld-", prev_posn);
|
||||
range_header = curl_slist_append(range_header, range);
|
||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, range_header);
|
||||
}
|
||||
|
||||
/* Clear out the Range: header after performing the request, so
|
||||
other curl requests don't inherit inappropriate header data */
|
||||
curl_result = curl_easy_perform(curl);
|
||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_range_header);
|
||||
if (curl_result != 0) {
|
||||
return error("%s", curl_errorstr);
|
||||
}
|
||||
|
||||
|
@ -413,20 +565,11 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1)
|
|||
unlink(tmpfile);
|
||||
return error("File %s has bad hash\n", hex);
|
||||
}
|
||||
ret = link(tmpfile, filename);
|
||||
if (ret < 0) {
|
||||
/* Same Coda hack as in write_sha1_file(sha1_file.c) */
|
||||
ret = errno;
|
||||
if (ret == EXDEV && !rename(tmpfile, filename))
|
||||
goto out;
|
||||
}
|
||||
unlink(tmpfile);
|
||||
if (ret) {
|
||||
if (ret != EEXIST)
|
||||
return error("unable to write sha1 filename %s: %s",
|
||||
filename, strerror(ret));
|
||||
}
|
||||
out:
|
||||
ret = relink_or_rename(tmpfile, filename);
|
||||
if (ret)
|
||||
return error("unable to write sha1 filename %s: %s",
|
||||
filename, strerror(ret));
|
||||
|
||||
pull_say("got %s\n", hex);
|
||||
return 0;
|
||||
}
|
||||
|
@ -519,6 +662,7 @@ int main(int argc, char **argv)
|
|||
|
||||
curl = curl_easy_init();
|
||||
no_pragma_header = curl_slist_append(no_pragma_header, "Pragma:");
|
||||
no_range_header = curl_slist_append(no_range_header, "Range:");
|
||||
|
||||
curl_ssl_verify = getenv("GIT_SSL_NO_VERIFY") ? 0 : 1;
|
||||
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, curl_ssl_verify);
|
||||
|
|
2
index.c
2
index.c
|
@ -29,7 +29,7 @@ int hold_index_file_for_update(struct cache_file *cf, const char *path)
|
|||
signal(SIGINT, remove_lock_file_on_signal);
|
||||
atexit(remove_lock_file);
|
||||
}
|
||||
return open(cf->lockfile, O_RDWR | O_CREAT | O_EXCL, 0600);
|
||||
return open(cf->lockfile, O_RDWR | O_CREAT | O_EXCL, 0666);
|
||||
}
|
||||
|
||||
int commit_index_file(struct cache_file *cf)
|
||||
|
|
10
init-db.c
10
init-db.c
|
@ -166,6 +166,7 @@ static void create_default_files(const char *git_dir,
|
|||
{
|
||||
unsigned len = strlen(git_dir);
|
||||
static char path[PATH_MAX];
|
||||
unsigned char sha1[20];
|
||||
|
||||
if (len > sizeof(path)-50)
|
||||
die("insane git directory %s", git_dir);
|
||||
|
@ -186,15 +187,14 @@ static void create_default_files(const char *git_dir,
|
|||
|
||||
/*
|
||||
* Create the default symlink from ".git/HEAD" to the "master"
|
||||
* branch
|
||||
* branch, if it does not exist yet.
|
||||
*/
|
||||
strcpy(path + len, "HEAD");
|
||||
if (symlink("refs/heads/master", path) < 0) {
|
||||
if (errno != EEXIST) {
|
||||
perror(path);
|
||||
if (read_ref(path, sha1) < 0) {
|
||||
if (create_symref(path, "refs/heads/master") < 0)
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
path[len] = 0;
|
||||
copy_templates(path, len, template_path);
|
||||
}
|
||||
|
||||
|
|
|
@ -530,7 +530,7 @@ static void verify_pathspec(void)
|
|||
static const char ls_files_usage[] =
|
||||
"git-ls-files [-z] [-t] (--[cached|deleted|others|stage|unmerged|killed|modified])* "
|
||||
"[ --ignored ] [--exclude=<pattern>] [--exclude-from=<file>] "
|
||||
"[ --exclude-per-directory=<filename> ]";
|
||||
"[ --exclude-per-directory=<filename> ] [--] [<file>]*";
|
||||
|
||||
int main(int argc, const char **argv)
|
||||
{
|
||||
|
@ -544,6 +544,10 @@ int main(int argc, const char **argv)
|
|||
for (i = 1; i < argc; i++) {
|
||||
const char *arg = argv[i];
|
||||
|
||||
if (!strcmp(arg, "--")) {
|
||||
i++;
|
||||
break;
|
||||
}
|
||||
if (!strcmp(arg, "-z")) {
|
||||
line_terminator = 0;
|
||||
continue;
|
||||
|
|
|
@ -128,7 +128,7 @@ int main(int argc, char **argv)
|
|||
unsigned long len = parse_email(map, size);
|
||||
assert(len <= size);
|
||||
sprintf(name, "%04d", ++nr);
|
||||
fd = open(name, O_WRONLY | O_CREAT | O_EXCL, 0600);
|
||||
fd = open(name, O_WRONLY | O_CREAT | O_EXCL, 0666);
|
||||
if (fd < 0) {
|
||||
perror(name);
|
||||
exit(1);
|
||||
|
|
14
read-cache.c
14
read-cache.c
|
@ -464,11 +464,15 @@ int read_cache(void)
|
|||
|
||||
errno = EBUSY;
|
||||
if (active_cache)
|
||||
return error("more than one cachefile");
|
||||
return active_nr;
|
||||
|
||||
errno = ENOENT;
|
||||
fd = open(get_index_file(), O_RDONLY);
|
||||
if (fd < 0)
|
||||
return (errno == ENOENT) ? 0 : error("open failed");
|
||||
if (fd < 0) {
|
||||
if (errno == ENOENT)
|
||||
return 0;
|
||||
die("index file open failed (%s)", strerror(errno));
|
||||
}
|
||||
|
||||
size = 0; // avoid gcc warning
|
||||
map = MAP_FAILED;
|
||||
|
@ -480,7 +484,7 @@ int read_cache(void)
|
|||
}
|
||||
close(fd);
|
||||
if (map == MAP_FAILED)
|
||||
return error("mmap failed");
|
||||
die("index file mmap failed (%s)", strerror(errno));
|
||||
|
||||
hdr = map;
|
||||
if (verify_hdr(hdr, size) < 0)
|
||||
|
@ -501,7 +505,7 @@ int read_cache(void)
|
|||
unmap:
|
||||
munmap(map, size);
|
||||
errno = EINVAL;
|
||||
return error("verify header failed");
|
||||
die("index file corrupt");
|
||||
}
|
||||
|
||||
#define WRITE_BUFFER_SIZE 8192
|
||||
|
|
18
read-tree.c
18
read-tree.c
|
@ -13,6 +13,8 @@
|
|||
static int merge = 0;
|
||||
static int update = 0;
|
||||
static int index_only = 0;
|
||||
static int nontrivial_merge = 0;
|
||||
static int trivial_merges_only = 0;
|
||||
|
||||
static int head_idx = -1;
|
||||
static int merge_size = 0;
|
||||
|
@ -275,6 +277,9 @@ static int unpack_trees(merge_fn_t fn)
|
|||
if (unpack_trees_rec(posns, len, "", fn, &indpos))
|
||||
return -1;
|
||||
|
||||
if (trivial_merges_only && nontrivial_merge)
|
||||
die("Merge requires file-level merging");
|
||||
|
||||
check_updates(active_cache, active_nr);
|
||||
return 0;
|
||||
}
|
||||
|
@ -460,6 +465,8 @@ static int threeway_merge(struct cache_entry **stages)
|
|||
verify_uptodate(index);
|
||||
}
|
||||
|
||||
nontrivial_merge = 1;
|
||||
|
||||
/* #2, #3, #4, #6, #7, #9, #11. */
|
||||
count = 0;
|
||||
if (!head_match || !remote_match) {
|
||||
|
@ -629,9 +636,9 @@ int main(int argc, char **argv)
|
|||
continue;
|
||||
}
|
||||
|
||||
if (!strcmp(arg, "--head")) {
|
||||
head_idx = stage - 1;
|
||||
fn = threeway_merge;
|
||||
if (!strcmp(arg, "--trivial")) {
|
||||
trivial_merges_only = 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
/* "-m" stands for "merge", meaning we start in stage 1 */
|
||||
|
@ -657,7 +664,8 @@ int main(int argc, char **argv)
|
|||
}
|
||||
if ((update||index_only) && !merge)
|
||||
usage(read_tree_usage);
|
||||
if (merge && !fn) {
|
||||
|
||||
if (merge) {
|
||||
if (stage < 2)
|
||||
die("just how do you expect me to merge %d trees?", stage-1);
|
||||
switch (stage - 1) {
|
||||
|
@ -674,9 +682,7 @@ int main(int argc, char **argv)
|
|||
fn = threeway_merge;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (head_idx < 0) {
|
||||
if (stage - 1 >= 3)
|
||||
head_idx = stage - 2;
|
||||
else
|
||||
|
|
189
refs.c
189
refs.c
|
@ -2,19 +2,157 @@
|
|||
#include "cache.h"
|
||||
|
||||
#include <errno.h>
|
||||
#include <ctype.h>
|
||||
|
||||
static int read_ref(const char *refname, unsigned char *sha1)
|
||||
/* We allow "recursive" symbolic refs. Only within reason, though */
|
||||
#define MAXDEPTH 5
|
||||
|
||||
#ifndef USE_SYMLINK_HEAD
|
||||
#define USE_SYMLINK_HEAD 1
|
||||
#endif
|
||||
|
||||
int validate_symref(const char *path)
|
||||
{
|
||||
int ret = -1;
|
||||
int fd = open(git_path("%s", refname), O_RDONLY);
|
||||
struct stat st;
|
||||
char *buf, buffer[256];
|
||||
int len, fd;
|
||||
|
||||
if (fd >= 0) {
|
||||
char buffer[60];
|
||||
if (read(fd, buffer, sizeof(buffer)) >= 40)
|
||||
ret = get_sha1_hex(buffer, sha1);
|
||||
close(fd);
|
||||
if (lstat(path, &st) < 0)
|
||||
return -1;
|
||||
|
||||
/* Make sure it is a "refs/.." symlink */
|
||||
if (S_ISLNK(st.st_mode)) {
|
||||
len = readlink(path, buffer, sizeof(buffer)-1);
|
||||
if (len >= 5 && !memcmp("refs/", buffer, 5))
|
||||
return 0;
|
||||
return -1;
|
||||
}
|
||||
return ret;
|
||||
|
||||
/*
|
||||
* Anything else, just open it and try to see if it is a symbolic ref.
|
||||
*/
|
||||
fd = open(path, O_RDONLY);
|
||||
if (fd < 0)
|
||||
return -1;
|
||||
len = read(fd, buffer, sizeof(buffer)-1);
|
||||
close(fd);
|
||||
|
||||
/*
|
||||
* Is it a symbolic ref?
|
||||
*/
|
||||
if (len < 4 || memcmp("ref:", buffer, 4))
|
||||
return -1;
|
||||
buf = buffer + 4;
|
||||
len -= 4;
|
||||
while (len && isspace(*buf))
|
||||
buf++, len--;
|
||||
if (len >= 5 && !memcmp("refs/", buffer, 5))
|
||||
return 0;
|
||||
return -1;
|
||||
}
|
||||
|
||||
const char *resolve_ref(const char *path, unsigned char *sha1, int reading)
|
||||
{
|
||||
int depth = MAXDEPTH, len;
|
||||
char buffer[256];
|
||||
|
||||
for (;;) {
|
||||
struct stat st;
|
||||
char *buf;
|
||||
int fd;
|
||||
|
||||
if (--depth < 0)
|
||||
return NULL;
|
||||
|
||||
/* Special case: non-existing file.
|
||||
* Not having the refs/heads/new-branch is OK
|
||||
* if we are writing into it, so is .git/HEAD
|
||||
* that points at refs/heads/master still to be
|
||||
* born. It is NOT OK if we are resolving for
|
||||
* reading.
|
||||
*/
|
||||
if (lstat(path, &st) < 0) {
|
||||
if (reading || errno != ENOENT)
|
||||
return NULL;
|
||||
memset(sha1, 0, 20);
|
||||
return path;
|
||||
}
|
||||
|
||||
/* Follow "normalized" - ie "refs/.." symlinks by hand */
|
||||
if (S_ISLNK(st.st_mode)) {
|
||||
len = readlink(path, buffer, sizeof(buffer)-1);
|
||||
if (len >= 5 && !memcmp("refs/", buffer, 5)) {
|
||||
path = git_path("%.*s", len, buffer);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Anything else, just open it and try to use it as
|
||||
* a ref
|
||||
*/
|
||||
fd = open(path, O_RDONLY);
|
||||
if (fd < 0)
|
||||
return NULL;
|
||||
len = read(fd, buffer, sizeof(buffer)-1);
|
||||
close(fd);
|
||||
|
||||
/*
|
||||
* Is it a symbolic ref?
|
||||
*/
|
||||
if (len < 4 || memcmp("ref:", buffer, 4))
|
||||
break;
|
||||
buf = buffer + 4;
|
||||
len -= 4;
|
||||
while (len && isspace(*buf))
|
||||
buf++, len--;
|
||||
while (len && isspace(buf[len-1]))
|
||||
buf[--len] = 0;
|
||||
path = git_path("%.*s", len, buf);
|
||||
}
|
||||
if (len < 40 || get_sha1_hex(buffer, sha1))
|
||||
return NULL;
|
||||
return path;
|
||||
}
|
||||
|
||||
int create_symref(const char *git_HEAD, const char *refs_heads_master)
|
||||
{
|
||||
#if USE_SYMLINK_HEAD
|
||||
unlink(git_HEAD);
|
||||
return symlink(refs_heads_master, git_HEAD);
|
||||
#else
|
||||
const char *lockpath;
|
||||
char ref[1000];
|
||||
int fd, len, written;
|
||||
|
||||
len = snprintf(ref, sizeof(ref), "ref: %s\n", refs_heads_master);
|
||||
if (sizeof(ref) <= len) {
|
||||
error("refname too long: %s", refs_heads_master);
|
||||
return -1;
|
||||
}
|
||||
lockpath = mkpath("%s.lock", git_HEAD);
|
||||
fd = open(lockpath, O_CREAT | O_EXCL | O_WRONLY, 0666);
|
||||
written = write(fd, ref, len);
|
||||
close(fd);
|
||||
if (written != len) {
|
||||
unlink(lockpath);
|
||||
error("Unable to write to %s", lockpath);
|
||||
return -2;
|
||||
}
|
||||
if (rename(lockpath, git_HEAD) < 0) {
|
||||
unlink(lockpath);
|
||||
error("Unable to create %s", git_HEAD);
|
||||
return -3;
|
||||
}
|
||||
return 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
int read_ref(const char *filename, unsigned char *sha1)
|
||||
{
|
||||
if (resolve_ref(filename, sha1, 1))
|
||||
return 0;
|
||||
return -1;
|
||||
}
|
||||
|
||||
static int do_for_each_ref(const char *base, int (*fn)(const char *path, const unsigned char *sha1))
|
||||
|
@ -54,7 +192,7 @@ static int do_for_each_ref(const char *base, int (*fn)(const char *path, const u
|
|||
break;
|
||||
continue;
|
||||
}
|
||||
if (read_ref(path, sha1) < 0)
|
||||
if (read_ref(git_path("%s", path), sha1) < 0)
|
||||
continue;
|
||||
if (!has_sha1_file(sha1))
|
||||
continue;
|
||||
|
@ -71,7 +209,7 @@ static int do_for_each_ref(const char *base, int (*fn)(const char *path, const u
|
|||
int head_ref(int (*fn)(const char *path, const unsigned char *sha1))
|
||||
{
|
||||
unsigned char sha1[20];
|
||||
if (!read_ref("HEAD", sha1))
|
||||
if (!read_ref(git_path("HEAD"), sha1))
|
||||
return fn("HEAD", sha1);
|
||||
return 0;
|
||||
}
|
||||
|
@ -101,33 +239,14 @@ static char *ref_lock_file_name(const char *ref)
|
|||
return ret;
|
||||
}
|
||||
|
||||
static int read_ref_file(const char *filename, unsigned char *sha1) {
|
||||
int fd = open(filename, O_RDONLY);
|
||||
char hex[41];
|
||||
if (fd < 0) {
|
||||
return error("Couldn't open %s\n", filename);
|
||||
}
|
||||
if ((read(fd, hex, 41) < 41) ||
|
||||
(hex[40] != '\n') ||
|
||||
get_sha1_hex(hex, sha1)) {
|
||||
error("Couldn't read a hash from %s\n", filename);
|
||||
close(fd);
|
||||
return -1;
|
||||
}
|
||||
close(fd);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int get_ref_sha1(const char *ref, unsigned char *sha1)
|
||||
{
|
||||
char *filename;
|
||||
int retval;
|
||||
const char *filename;
|
||||
|
||||
if (check_ref_format(ref))
|
||||
return -1;
|
||||
filename = ref_file_name(ref);
|
||||
retval = read_ref_file(filename, sha1);
|
||||
free(filename);
|
||||
return retval;
|
||||
filename = git_path("refs/%s", ref);
|
||||
return read_ref(filename, sha1);
|
||||
}
|
||||
|
||||
static int lock_ref_file(const char *filename, const char *lock_filename,
|
||||
|
@ -140,7 +259,7 @@ static int lock_ref_file(const char *filename, const char *lock_filename,
|
|||
return error("Couldn't open lock file for %s: %s",
|
||||
filename, strerror(errno));
|
||||
}
|
||||
retval = read_ref_file(filename, current_sha1);
|
||||
retval = read_ref(filename, current_sha1);
|
||||
if (old_sha1) {
|
||||
if (retval) {
|
||||
close(fd);
|
||||
|
|
16
setup.c
16
setup.c
|
@ -76,18 +76,20 @@ const char **get_pathspec(const char *prefix, const char **pathspec)
|
|||
* Test it it looks like we're at the top
|
||||
* level git directory. We want to see a
|
||||
*
|
||||
* - a HEAD symlink and a refs/ directory under ".git"
|
||||
* - either a .git/objects/ directory _or_ the proper
|
||||
* GIT_OBJECT_DIRECTORY environment variable
|
||||
* - a refs/ directory under ".git"
|
||||
* - either a HEAD symlink or a HEAD file that is formatted as
|
||||
* a proper "ref:".
|
||||
*/
|
||||
static int is_toplevel_directory(void)
|
||||
{
|
||||
struct stat st;
|
||||
|
||||
return !lstat(".git/HEAD", &st) &&
|
||||
S_ISLNK(st.st_mode) &&
|
||||
!access(".git/refs/", X_OK) &&
|
||||
(getenv(DB_ENVIRONMENT) || !access(".git/objects/", X_OK));
|
||||
if (access(".git/refs/", X_OK) ||
|
||||
access(getenv(DB_ENVIRONMENT) ?
|
||||
getenv(DB_ENVIRONMENT) : ".git/objects/", X_OK) ||
|
||||
validate_symref(".git/HEAD"))
|
||||
return 0;
|
||||
return 1;
|
||||
}
|
||||
|
||||
const char *setup_git_directory(void)
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
#endif
|
||||
#endif
|
||||
|
||||
const unsigned char null_sha1[20] = { 0, };
|
||||
|
||||
static unsigned int sha1_file_open_flag = O_NOATIME;
|
||||
|
||||
static unsigned hexval(char c)
|
||||
|
|
17
sha1_name.c
17
sha1_name.c
|
@ -119,21 +119,6 @@ static int get_short_sha1(const char *name, int len, unsigned char *sha1)
|
|||
return -1;
|
||||
}
|
||||
|
||||
static int get_sha1_file(const char *path, unsigned char *result)
|
||||
{
|
||||
char buffer[60];
|
||||
int fd = open(path, O_RDONLY);
|
||||
int len;
|
||||
|
||||
if (fd < 0)
|
||||
return -1;
|
||||
len = read(fd, buffer, sizeof(buffer));
|
||||
close(fd);
|
||||
if (len < 40)
|
||||
return -1;
|
||||
return get_sha1_hex(buffer, result);
|
||||
}
|
||||
|
||||
static int get_sha1_basic(const char *str, int len, unsigned char *sha1)
|
||||
{
|
||||
static const char *prefix[] = {
|
||||
|
@ -150,7 +135,7 @@ static int get_sha1_basic(const char *str, int len, unsigned char *sha1)
|
|||
|
||||
for (p = prefix; *p; p++) {
|
||||
char *pathname = git_path("%s/%.*s", *p, len, str);
|
||||
if (!get_sha1_file(pathname, sha1))
|
||||
if (!read_ref(pathname, sha1))
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
|
@ -349,6 +349,7 @@ int main(int ac, char **av)
|
|||
int all_heads = 0, all_tags = 0;
|
||||
int all_mask, all_revs, shown_merge_point;
|
||||
char head_path[128];
|
||||
const char *head_path_p;
|
||||
int head_path_len;
|
||||
unsigned char head_sha1[20];
|
||||
int merge_base = 0;
|
||||
|
@ -430,11 +431,15 @@ int main(int ac, char **av)
|
|||
if (0 <= extra)
|
||||
join_revs(&list, &seen, num_rev, extra);
|
||||
|
||||
head_path_len = readlink(".git/HEAD", head_path, sizeof(head_path)-1);
|
||||
if ((head_path_len < 0) || get_sha1("HEAD", head_sha1))
|
||||
head_path_p = resolve_ref(git_path("HEAD"), head_sha1, 1);
|
||||
if (head_path_p) {
|
||||
head_path_len = strlen(head_path_p);
|
||||
memcpy(head_path, head_path_p, head_path_len + 1);
|
||||
}
|
||||
else {
|
||||
head_path_len = 0;
|
||||
head_path[0] = 0;
|
||||
else
|
||||
head_path[head_path_len] = 0;
|
||||
}
|
||||
|
||||
if (merge_base)
|
||||
return show_merge_base(seen, num_rev);
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
#include "cache.h"
|
||||
|
||||
static const char git_symbolic_ref_usage[] =
|
||||
"git-symbolic-ref name [ref]";
|
||||
|
||||
static int check_symref(const char *HEAD)
|
||||
{
|
||||
unsigned char sha1[20];
|
||||
const char *git_HEAD = strdup(git_path("%s", HEAD));
|
||||
const char *git_refs_heads_master = resolve_ref(git_HEAD, sha1, 0);
|
||||
if (git_refs_heads_master) {
|
||||
/* we want to strip the .git/ part */
|
||||
int pfxlen = strlen(git_HEAD) - strlen(HEAD);
|
||||
puts(git_refs_heads_master + pfxlen);
|
||||
}
|
||||
else
|
||||
die("No such ref: %s", HEAD);
|
||||
}
|
||||
|
||||
int main(int argc, const char **argv)
|
||||
{
|
||||
setup_git_directory();
|
||||
switch (argc) {
|
||||
case 2:
|
||||
check_symref(argv[1]);
|
||||
break;
|
||||
case 3:
|
||||
create_symref(strdup(git_path("%s", argv[1])), argv[2]);
|
||||
break;
|
||||
default:
|
||||
usage(git_symbolic_ref_usage);
|
||||
}
|
||||
return 0;
|
||||
}
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
#GIT_TEST_OPTS=--verbose --debug
|
||||
SHELL_PATH ?= $(SHELL)
|
||||
TAR ?= $(TAR)
|
||||
|
||||
T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
|
||||
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Copyright (c) 2005 Junio C Hamano
|
||||
#
|
||||
|
||||
test_description='git-ls-files test (-- to terminate the path list).
|
||||
|
||||
This test runs git-ls-files --others with the following on the
|
||||
filesystem.
|
||||
|
||||
path0 - a file
|
||||
-foo - a file with a funny name.
|
||||
-- - another file with a funny name.
|
||||
'
|
||||
. ./test-lib.sh
|
||||
|
||||
test_expect_success \
|
||||
setup \
|
||||
'echo frotz >path0 &&
|
||||
echo frotz >./-foo &&
|
||||
echo frotz >./--'
|
||||
|
||||
test_expect_success \
|
||||
'git-ls-files without path restriction.' \
|
||||
'git-ls-files --others >output &&
|
||||
diff -u output - <<EOF
|
||||
--
|
||||
-foo
|
||||
output
|
||||
path0
|
||||
EOF
|
||||
'
|
||||
|
||||
test_expect_success \
|
||||
'git-ls-files with path restriction.' \
|
||||
'git-ls-files --others path0 >output &&
|
||||
diff -u output - <<EOF
|
||||
path0
|
||||
EOF
|
||||
'
|
||||
|
||||
test_expect_success \
|
||||
'git-ls-files with path restriction with --.' \
|
||||
'git-ls-files --others -- path0 >output &&
|
||||
diff -u output - <<EOF
|
||||
path0
|
||||
EOF
|
||||
'
|
||||
|
||||
test_expect_success \
|
||||
'git-ls-files with path restriction with -- --.' \
|
||||
'git-ls-files --others -- -- >output &&
|
||||
diff -u output - <<EOF
|
||||
--
|
||||
EOF
|
||||
'
|
||||
|
||||
test_expect_success \
|
||||
'git-ls-files with no path restriction.' \
|
||||
'git-ls-files --others -- >output &&
|
||||
diff -u output - <<EOF
|
||||
--
|
||||
-foo
|
||||
output
|
||||
path0
|
||||
EOF
|
||||
'
|
||||
|
||||
test_done
|
|
@ -50,7 +50,7 @@ test_expect_success \
|
|||
|
||||
test_expect_success \
|
||||
'validate file modification time' \
|
||||
'TZ=GMT tar tvf b.tar a/a |
|
||||
'TZ=GMT $TAR tvf b.tar a/a |
|
||||
awk \{print\ \$4,\ \(length\(\$5\)\<7\)\ ?\ \$5\":00\"\ :\ \$5\} \
|
||||
>b.mtime &&
|
||||
echo "2005-05-27 22:00:00" >expected.mtime &&
|
||||
|
@ -63,7 +63,7 @@ test_expect_success \
|
|||
|
||||
test_expect_success \
|
||||
'extract tar archive' \
|
||||
'(cd b && tar xf -) <b.tar'
|
||||
'(cd b && $TAR xf -) <b.tar'
|
||||
|
||||
test_expect_success \
|
||||
'validate filenames' \
|
||||
|
@ -80,7 +80,7 @@ test_expect_success \
|
|||
|
||||
test_expect_success \
|
||||
'extract tar archive with prefix' \
|
||||
'(cd c && tar xf -) <c.tar'
|
||||
'(cd c && $TAR xf -) <c.tar'
|
||||
|
||||
test_expect_success \
|
||||
'validate filenames with prefix' \
|
||||
|
|
|
@ -20,12 +20,12 @@ test_expect_success setup '
|
|||
commit=$(echo "Commit #$i" | git-commit-tree $tree -p $parent) &&
|
||||
parent=$commit || return 1
|
||||
done &&
|
||||
echo "$commit" >.git/HEAD &&
|
||||
git-update-ref HEAD "$commit" &&
|
||||
git-clone -l ./. victim &&
|
||||
cd victim &&
|
||||
git-log &&
|
||||
cd .. &&
|
||||
echo $zero >.git/HEAD &&
|
||||
git-update-ref HEAD "$zero" &&
|
||||
parent=$zero &&
|
||||
for i in $cnt
|
||||
do
|
||||
|
@ -33,7 +33,7 @@ test_expect_success setup '
|
|||
commit=$(echo "Rebase #$i" | git-commit-tree $tree -p $parent) &&
|
||||
parent=$commit || return 1
|
||||
done &&
|
||||
echo "$commit" >.git/HEAD &&
|
||||
git-update-ref HEAD "$commit" &&
|
||||
echo Rebase &&
|
||||
git-log'
|
||||
|
||||
|
|
|
@ -7,20 +7,6 @@ test_description='Tests git-rev-list --bisect functionality'
|
|||
. ./test-lib.sh
|
||||
. ../t6000lib.sh # t6xxx specific functions
|
||||
|
||||
bc_expr()
|
||||
{
|
||||
bc <<EOF
|
||||
scale=1
|
||||
define abs(x) {
|
||||
if (x>=0) { return (x); } else { return (-x); }
|
||||
}
|
||||
define floor(x) {
|
||||
save=scale; scale=0; result=x/1; scale=save; return (result);
|
||||
}
|
||||
$*
|
||||
EOF
|
||||
}
|
||||
|
||||
# usage: test_bisection max-diff bisect-option head ^prune...
|
||||
#
|
||||
# e.g. test_bisection 1 --bisect l1 ^l0
|
||||
|
@ -35,8 +21,19 @@ test_bisection_diff()
|
|||
_head=$1
|
||||
shift 1
|
||||
_bisection_size=$(git-rev-list $_bisection "$@" | wc -l)
|
||||
[ -n "$_list_size" -a -n "$_bisection_size" ] || error "test_bisection_diff failed"
|
||||
test_expect_success "bisection diff $_bisect_option $_head $* <= $_max_diff" "[ $(bc_expr "floor(abs($_list_size/2)-$_bisection_size)") -le $_max_diff ]"
|
||||
[ -n "$_list_size" -a -n "$_bisection_size" ] ||
|
||||
error "test_bisection_diff failed"
|
||||
|
||||
# Test if bisection size is close to half of list size within
|
||||
# tolerance.
|
||||
#
|
||||
_bisect_err=`expr $_list_size - $_bisection_size \* 2`
|
||||
test "$_bisect_err" -lt 0 && _bisect_err=`expr 0 - $_bisect_err`
|
||||
_bisect_err=`expr $_bisect_err / 2` ; # floor
|
||||
|
||||
test_expect_success \
|
||||
"bisection diff $_bisect_option $_head $* <= $_max_diff" \
|
||||
'test $_bisect_err -le $_max_diff'
|
||||
}
|
||||
|
||||
date >path0
|
||||
|
|
|
@ -353,6 +353,8 @@ static void traverse_tree(void *buffer, unsigned long size,
|
|||
|
||||
if (size < namelen + 20 || sscanf(buffer, "%o", &mode) != 1)
|
||||
die("corrupt 'tree' file");
|
||||
if (S_ISDIR(mode) || S_ISREG(mode))
|
||||
mode |= (mode & 0100) ? 0777 : 0666;
|
||||
buffer = sha1 + 20;
|
||||
size -= namelen + 20;
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
* like "git-update-index *" and suddenly having all the object
|
||||
* files be revision controlled.
|
||||
*/
|
||||
static int allow_add = 0, allow_remove = 0, allow_replace = 0, not_new = 0, quiet = 0, info_only = 0;
|
||||
static int allow_add = 0, allow_remove = 0, allow_replace = 0, allow_unmerged = 0, not_new = 0, quiet = 0, info_only = 0;
|
||||
static int force_remove;
|
||||
|
||||
/* Three functions to allow overloaded pointer return; see linux/err.h */
|
||||
|
@ -135,7 +135,7 @@ static struct cache_entry *refresh_entry(struct cache_entry *ce)
|
|||
|
||||
changed = ce_match_stat(ce, &st);
|
||||
if (!changed)
|
||||
return ce;
|
||||
return NULL;
|
||||
|
||||
if (ce_modified(ce, &st))
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
@ -156,16 +156,20 @@ static int refresh_cache(void)
|
|||
struct cache_entry *ce, *new;
|
||||
ce = active_cache[i];
|
||||
if (ce_stage(ce)) {
|
||||
printf("%s: needs merge\n", ce->name);
|
||||
has_errors = 1;
|
||||
while ((i < active_nr) &&
|
||||
! strcmp(active_cache[i]->name, ce->name))
|
||||
i++;
|
||||
i--;
|
||||
if (allow_unmerged)
|
||||
continue;
|
||||
printf("%s: needs merge\n", ce->name);
|
||||
has_errors = 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
new = refresh_entry(ce);
|
||||
if (!new)
|
||||
continue;
|
||||
if (IS_ERR(new)) {
|
||||
if (not_new && PTR_ERR(new) == -ENOENT)
|
||||
continue;
|
||||
|
@ -335,6 +339,10 @@ int main(int argc, const char **argv)
|
|||
allow_remove = 1;
|
||||
continue;
|
||||
}
|
||||
if (!strcmp(path, "--unmerged")) {
|
||||
allow_unmerged = 1;
|
||||
continue;
|
||||
}
|
||||
if (!strcmp(path, "--refresh")) {
|
||||
has_errors |= refresh_cache();
|
||||
continue;
|
||||
|
@ -383,9 +391,11 @@ int main(int argc, const char **argv)
|
|||
update_one(buf.buf, prefix, prefix_length);
|
||||
}
|
||||
}
|
||||
if (write_cache(newfd, active_cache, active_nr) ||
|
||||
commit_index_file(&cache_file))
|
||||
die("Unable to write new cachefile");
|
||||
if (active_cache_changed) {
|
||||
if (write_cache(newfd, active_cache, active_nr) ||
|
||||
commit_index_file(&cache_file))
|
||||
die("Unable to write new cachefile");
|
||||
}
|
||||
|
||||
return has_errors ? 1 : 0;
|
||||
}
|
||||
|
|
49
update-ref.c
49
update-ref.c
|
@ -4,53 +4,6 @@
|
|||
|
||||
static const char git_update_ref_usage[] = "git-update-ref <refname> <value> [<oldval>]";
|
||||
|
||||
#define MAXDEPTH 5
|
||||
|
||||
static const char *resolve_ref(const char *path, unsigned char *sha1)
|
||||
{
|
||||
int depth = MAXDEPTH, len;
|
||||
char buffer[256];
|
||||
|
||||
for (;;) {
|
||||
struct stat st;
|
||||
int fd;
|
||||
|
||||
if (--depth < 0)
|
||||
return NULL;
|
||||
|
||||
/* Special case: non-existing file */
|
||||
if (lstat(path, &st) < 0) {
|
||||
if (errno != ENOENT)
|
||||
return NULL;
|
||||
memset(sha1, 0, 20);
|
||||
return path;
|
||||
}
|
||||
|
||||
/* Follow "normalized" - ie "refs/.." symlinks by hand */
|
||||
if (S_ISLNK(st.st_mode)) {
|
||||
len = readlink(path, buffer, sizeof(buffer)-1);
|
||||
if (len >= 5 && !memcmp("refs/", buffer, 5)) {
|
||||
path = git_path("%.*s", len, buffer);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Anything else, just open it and try to use it as
|
||||
* a ref
|
||||
*/
|
||||
fd = open(path, O_RDONLY);
|
||||
if (fd < 0)
|
||||
return NULL;
|
||||
len = read(fd, buffer, sizeof(buffer)-1);
|
||||
close(fd);
|
||||
break;
|
||||
}
|
||||
if (len < 40 || get_sha1_hex(buffer, sha1))
|
||||
return NULL;
|
||||
return path;
|
||||
}
|
||||
|
||||
static int re_verify(const char *path, unsigned char *oldsha1, unsigned char *currsha1)
|
||||
{
|
||||
char buf[40];
|
||||
|
@ -84,7 +37,7 @@ int main(int argc, char **argv)
|
|||
if (oldval && get_sha1(oldval, oldsha1) < 0)
|
||||
die("%s: not a valid old SHA1", oldval);
|
||||
|
||||
path = resolve_ref(git_path("%s", refname), currsha1);
|
||||
path = resolve_ref(git_path("%s", refname), currsha1, !!oldval);
|
||||
if (!path)
|
||||
die("No such ref: %s", refname);
|
||||
|
||||
|
|
4
usage.c
4
usage.c
|
@ -15,7 +15,7 @@ static void report(const char *prefix, const char *err, va_list params)
|
|||
void usage(const char *err)
|
||||
{
|
||||
fprintf(stderr, "usage: %s\n", err);
|
||||
exit(1);
|
||||
exit(129);
|
||||
}
|
||||
|
||||
void die(const char *err, ...)
|
||||
|
@ -25,7 +25,7 @@ void die(const char *err, ...)
|
|||
va_start(params, err);
|
||||
report("fatal: ", err, params);
|
||||
va_end(params);
|
||||
exit(1);
|
||||
exit(128);
|
||||
}
|
||||
|
||||
int error(const char *err, ...)
|
||||
|
|
Загрузка…
Ссылка в новой задаче