2011-05-20 01:34:33 +04:00
|
|
|
#include "cache.h"
|
|
|
|
#include "sha1-array.h"
|
|
|
|
#include "sha1-lookup.h"
|
|
|
|
|
2017-03-31 04:40:00 +03:00
|
|
|
void oid_array_append(struct oid_array *array, const struct object_id *oid)
|
2011-05-20 01:34:33 +04:00
|
|
|
{
|
2017-03-26 19:01:37 +03:00
|
|
|
ALLOC_GROW(array->oid, array->nr + 1, array->alloc);
|
2017-03-31 04:39:56 +03:00
|
|
|
oidcpy(&array->oid[array->nr++], oid);
|
2011-05-20 01:34:33 +04:00
|
|
|
array->sorted = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int void_hashcmp(const void *a, const void *b)
|
|
|
|
{
|
2017-03-26 19:01:37 +03:00
|
|
|
return oidcmp(a, b);
|
2011-05-20 01:34:33 +04:00
|
|
|
}
|
|
|
|
|
2017-03-31 04:40:00 +03:00
|
|
|
static void oid_array_sort(struct oid_array *array)
|
2011-05-20 01:34:33 +04:00
|
|
|
{
|
2017-03-26 19:01:37 +03:00
|
|
|
QSORT(array->oid, array->nr, void_hashcmp);
|
2011-05-20 01:34:33 +04:00
|
|
|
array->sorted = 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static const unsigned char *sha1_access(size_t index, void *table)
|
|
|
|
{
|
2017-03-26 19:01:37 +03:00
|
|
|
struct object_id *array = table;
|
|
|
|
return array[index].hash;
|
2011-05-20 01:34:33 +04:00
|
|
|
}
|
|
|
|
|
2017-03-31 04:40:00 +03:00
|
|
|
int oid_array_lookup(struct oid_array *array, const struct object_id *oid)
|
2011-05-20 01:34:33 +04:00
|
|
|
{
|
|
|
|
if (!array->sorted)
|
2017-03-31 04:40:00 +03:00
|
|
|
oid_array_sort(array);
|
2017-03-31 04:39:58 +03:00
|
|
|
return sha1_pos(oid->hash, array->oid, array->nr, sha1_access);
|
2011-05-20 01:34:33 +04:00
|
|
|
}
|
|
|
|
|
2017-03-31 04:40:00 +03:00
|
|
|
void oid_array_clear(struct oid_array *array)
|
2011-05-20 01:34:33 +04:00
|
|
|
{
|
2017-03-26 19:01:37 +03:00
|
|
|
free(array->oid);
|
|
|
|
array->oid = NULL;
|
2011-05-20 01:34:33 +04:00
|
|
|
array->nr = 0;
|
|
|
|
array->alloc = 0;
|
|
|
|
array->sorted = 0;
|
|
|
|
}
|
receive-pack: eliminate duplicate .have refs
When receiving a push, we advertise ref tips from any
alternate repositories, in case that helps the client send a
smaller pack. Since these refs don't actually exist in the
destination repository, we don't transmit the real ref
names, but instead use the pseudo-ref ".have".
If your alternate has a large number of duplicate refs (for
example, because it is aggregating objects from many related
repositories, some of which will have the same tags and
branch tips), then we will send each ".have $sha1" line
multiple times. This is a pointless waste of bandwidth, as
we are simply repeating the same fact to the client over and
over.
This patch eliminates duplicate .have refs early on. It does
so efficiently by sorting the complete list and skipping
duplicates. This has the side effect of re-ordering the
.have lines by ascending sha1; this isn't a problem, though,
as the original order was meaningless.
There is a similar .have system in fetch-pack, but it
does not suffer from the same problem. For each alternate
ref we consider in fetch-pack, we actually open the object
and mark it with the SEEN flag, so duplicates are
automatically culled.
Signed-off-by: Jeff King <peff@peff.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-05-20 01:34:46 +04:00
|
|
|
|
2017-03-31 04:40:00 +03:00
|
|
|
int oid_array_for_each_unique(struct oid_array *array,
|
2017-03-31 04:39:59 +03:00
|
|
|
for_each_oid_fn fn,
|
receive-pack: eliminate duplicate .have refs
When receiving a push, we advertise ref tips from any
alternate repositories, in case that helps the client send a
smaller pack. Since these refs don't actually exist in the
destination repository, we don't transmit the real ref
names, but instead use the pseudo-ref ".have".
If your alternate has a large number of duplicate refs (for
example, because it is aggregating objects from many related
repositories, some of which will have the same tags and
branch tips), then we will send each ".have $sha1" line
multiple times. This is a pointless waste of bandwidth, as
we are simply repeating the same fact to the client over and
over.
This patch eliminates duplicate .have refs early on. It does
so efficiently by sorting the complete list and skipping
duplicates. This has the side effect of re-ordering the
.have lines by ascending sha1; this isn't a problem, though,
as the original order was meaningless.
There is a similar .have system in fetch-pack, but it
does not suffer from the same problem. For each alternate
ref we consider in fetch-pack, we actually open the object
and mark it with the SEEN flag, so duplicates are
automatically culled.
Signed-off-by: Jeff King <peff@peff.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-05-20 01:34:46 +04:00
|
|
|
void *data)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
|
|
|
|
if (!array->sorted)
|
2017-03-31 04:40:00 +03:00
|
|
|
oid_array_sort(array);
|
receive-pack: eliminate duplicate .have refs
When receiving a push, we advertise ref tips from any
alternate repositories, in case that helps the client send a
smaller pack. Since these refs don't actually exist in the
destination repository, we don't transmit the real ref
names, but instead use the pseudo-ref ".have".
If your alternate has a large number of duplicate refs (for
example, because it is aggregating objects from many related
repositories, some of which will have the same tags and
branch tips), then we will send each ".have $sha1" line
multiple times. This is a pointless waste of bandwidth, as
we are simply repeating the same fact to the client over and
over.
This patch eliminates duplicate .have refs early on. It does
so efficiently by sorting the complete list and skipping
duplicates. This has the side effect of re-ordering the
.have lines by ascending sha1; this isn't a problem, though,
as the original order was meaningless.
There is a similar .have system in fetch-pack, but it
does not suffer from the same problem. For each alternate
ref we consider in fetch-pack, we actually open the object
and mark it with the SEEN flag, so duplicates are
automatically culled.
Signed-off-by: Jeff King <peff@peff.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-05-20 01:34:46 +04:00
|
|
|
|
|
|
|
for (i = 0; i < array->nr; i++) {
|
2016-09-26 15:00:29 +03:00
|
|
|
int ret;
|
2017-03-26 19:01:37 +03:00
|
|
|
if (i > 0 && !oidcmp(array->oid + i, array->oid + i - 1))
|
receive-pack: eliminate duplicate .have refs
When receiving a push, we advertise ref tips from any
alternate repositories, in case that helps the client send a
smaller pack. Since these refs don't actually exist in the
destination repository, we don't transmit the real ref
names, but instead use the pseudo-ref ".have".
If your alternate has a large number of duplicate refs (for
example, because it is aggregating objects from many related
repositories, some of which will have the same tags and
branch tips), then we will send each ".have $sha1" line
multiple times. This is a pointless waste of bandwidth, as
we are simply repeating the same fact to the client over and
over.
This patch eliminates duplicate .have refs early on. It does
so efficiently by sorting the complete list and skipping
duplicates. This has the side effect of re-ordering the
.have lines by ascending sha1; this isn't a problem, though,
as the original order was meaningless.
There is a similar .have system in fetch-pack, but it
does not suffer from the same problem. For each alternate
ref we consider in fetch-pack, we actually open the object
and mark it with the SEEN flag, so duplicates are
automatically culled.
Signed-off-by: Jeff King <peff@peff.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-05-20 01:34:46 +04:00
|
|
|
continue;
|
2017-03-31 04:39:59 +03:00
|
|
|
ret = fn(array->oid + i, data);
|
2016-09-26 15:00:29 +03:00
|
|
|
if (ret)
|
|
|
|
return ret;
|
receive-pack: eliminate duplicate .have refs
When receiving a push, we advertise ref tips from any
alternate repositories, in case that helps the client send a
smaller pack. Since these refs don't actually exist in the
destination repository, we don't transmit the real ref
names, but instead use the pseudo-ref ".have".
If your alternate has a large number of duplicate refs (for
example, because it is aggregating objects from many related
repositories, some of which will have the same tags and
branch tips), then we will send each ".have $sha1" line
multiple times. This is a pointless waste of bandwidth, as
we are simply repeating the same fact to the client over and
over.
This patch eliminates duplicate .have refs early on. It does
so efficiently by sorting the complete list and skipping
duplicates. This has the side effect of re-ordering the
.have lines by ascending sha1; this isn't a problem, though,
as the original order was meaningless.
There is a similar .have system in fetch-pack, but it
does not suffer from the same problem. For each alternate
ref we consider in fetch-pack, we actually open the object
and mark it with the SEEN flag, so duplicates are
automatically culled.
Signed-off-by: Jeff King <peff@peff.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-05-20 01:34:46 +04:00
|
|
|
}
|
2016-09-26 15:00:29 +03:00
|
|
|
return 0;
|
receive-pack: eliminate duplicate .have refs
When receiving a push, we advertise ref tips from any
alternate repositories, in case that helps the client send a
smaller pack. Since these refs don't actually exist in the
destination repository, we don't transmit the real ref
names, but instead use the pseudo-ref ".have".
If your alternate has a large number of duplicate refs (for
example, because it is aggregating objects from many related
repositories, some of which will have the same tags and
branch tips), then we will send each ".have $sha1" line
multiple times. This is a pointless waste of bandwidth, as
we are simply repeating the same fact to the client over and
over.
This patch eliminates duplicate .have refs early on. It does
so efficiently by sorting the complete list and skipping
duplicates. This has the side effect of re-ordering the
.have lines by ascending sha1; this isn't a problem, though,
as the original order was meaningless.
There is a similar .have system in fetch-pack, but it
does not suffer from the same problem. For each alternate
ref we consider in fetch-pack, we actually open the object
and mark it with the SEEN flag, so duplicates are
automatically culled.
Signed-off-by: Jeff King <peff@peff.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2011-05-20 01:34:46 +04:00
|
|
|
}
|