Browse Source
In preparation for implementing narrow/partial clone, the object walking machinery has been taught a way to tell it to "filter" some objects from enumeration. * jh/object-filtering: rev-list: support --no-filter argument list-objects-filter-options: support --no-filter list-objects-filter-options: fix 'keword' typo in comment pack-objects: add list-objects filtering rev-list: add list-objects filtering support list-objects: filter objects in traverse_commit_list oidset: add iterator methods to oidset oidmap: add oidmap iterator methods dir: allow exclusions from blob in addition to filemaint
![gitster@pobox.com](/assets/img/avatar_default.png)
20 changed files with 1735 additions and 53 deletions
@ -0,0 +1,92 @@
@@ -0,0 +1,92 @@
|
||||
#include "cache.h" |
||||
#include "commit.h" |
||||
#include "config.h" |
||||
#include "revision.h" |
||||
#include "argv-array.h" |
||||
#include "list-objects.h" |
||||
#include "list-objects-filter.h" |
||||
#include "list-objects-filter-options.h" |
||||
|
||||
/* |
||||
* Parse value of the argument to the "filter" keyword. |
||||
* On the command line this looks like: |
||||
* --filter=<arg> |
||||
* and in the pack protocol as: |
||||
* "filter" SP <arg> |
||||
* |
||||
* The filter keyword will be used by many commands. |
||||
* See Documentation/rev-list-options.txt for allowed values for <arg>. |
||||
* |
||||
* Capture the given arg as the "filter_spec". This can be forwarded to |
||||
* subordinate commands when necessary. We also "intern" the arg for |
||||
* the convenience of the current command. |
||||
*/ |
||||
int parse_list_objects_filter(struct list_objects_filter_options *filter_options, |
||||
const char *arg) |
||||
{ |
||||
const char *v0; |
||||
|
||||
if (filter_options->choice) |
||||
die(_("multiple object filter types cannot be combined")); |
||||
|
||||
filter_options->filter_spec = strdup(arg); |
||||
|
||||
if (!strcmp(arg, "blob:none")) { |
||||
filter_options->choice = LOFC_BLOB_NONE; |
||||
return 0; |
||||
} |
||||
|
||||
if (skip_prefix(arg, "blob:limit=", &v0)) { |
||||
if (!git_parse_ulong(v0, &filter_options->blob_limit_value)) |
||||
die(_("invalid filter-spec expression '%s'"), arg); |
||||
filter_options->choice = LOFC_BLOB_LIMIT; |
||||
return 0; |
||||
} |
||||
|
||||
if (skip_prefix(arg, "sparse:oid=", &v0)) { |
||||
struct object_context oc; |
||||
struct object_id sparse_oid; |
||||
|
||||
/* |
||||
* Try to parse <oid-expression> into an OID for the current |
||||
* command, but DO NOT complain if we don't have the blob or |
||||
* ref locally. |
||||
*/ |
||||
if (!get_oid_with_context(v0, GET_OID_BLOB, |
||||
&sparse_oid, &oc)) |
||||
filter_options->sparse_oid_value = oiddup(&sparse_oid); |
||||
filter_options->choice = LOFC_SPARSE_OID; |
||||
return 0; |
||||
} |
||||
|
||||
if (skip_prefix(arg, "sparse:path=", &v0)) { |
||||
filter_options->choice = LOFC_SPARSE_PATH; |
||||
filter_options->sparse_path_value = strdup(v0); |
||||
return 0; |
||||
} |
||||
|
||||
die(_("invalid filter-spec expression '%s'"), arg); |
||||
return 0; |
||||
} |
||||
|
||||
int opt_parse_list_objects_filter(const struct option *opt, |
||||
const char *arg, int unset) |
||||
{ |
||||
struct list_objects_filter_options *filter_options = opt->value; |
||||
|
||||
if (unset || !arg) { |
||||
list_objects_filter_release(filter_options); |
||||
return 0; |
||||
} |
||||
|
||||
return parse_list_objects_filter(filter_options, arg); |
||||
} |
||||
|
||||
void list_objects_filter_release( |
||||
struct list_objects_filter_options *filter_options) |
||||
{ |
||||
free(filter_options->filter_spec); |
||||
free(filter_options->sparse_oid_value); |
||||
free(filter_options->sparse_path_value); |
||||
memset(filter_options, 0, sizeof(*filter_options)); |
||||
} |
@ -0,0 +1,61 @@
@@ -0,0 +1,61 @@
|
||||
#ifndef LIST_OBJECTS_FILTER_OPTIONS_H |
||||
#define LIST_OBJECTS_FILTER_OPTIONS_H |
||||
|
||||
#include "parse-options.h" |
||||
|
||||
/* |
||||
* The list of defined filters for list-objects. |
||||
*/ |
||||
enum list_objects_filter_choice { |
||||
LOFC_DISABLED = 0, |
||||
LOFC_BLOB_NONE, |
||||
LOFC_BLOB_LIMIT, |
||||
LOFC_SPARSE_OID, |
||||
LOFC_SPARSE_PATH, |
||||
LOFC__COUNT /* must be last */ |
||||
}; |
||||
|
||||
struct list_objects_filter_options { |
||||
/* |
||||
* 'filter_spec' is the raw argument value given on the command line |
||||
* or protocol request. (The part after the "--keyword=".) For |
||||
* commands that launch filtering sub-processes, this value should be |
||||
* passed to them as received by the current process. |
||||
*/ |
||||
char *filter_spec; |
||||
|
||||
/* |
||||
* 'choice' is determined by parsing the filter-spec. This indicates |
||||
* the filtering algorithm to use. |
||||
*/ |
||||
enum list_objects_filter_choice choice; |
||||
|
||||
/* |
||||
* Parsed values (fields) from within the filter-spec. These are |
||||
* choice-specific; not all values will be defined for any given |
||||
* choice. |
||||
*/ |
||||
struct object_id *sparse_oid_value; |
||||
char *sparse_path_value; |
||||
unsigned long blob_limit_value; |
||||
}; |
||||
|
||||
/* Normalized command line arguments */ |
||||
#define CL_ARG__FILTER "filter" |
||||
|
||||
int parse_list_objects_filter( |
||||
struct list_objects_filter_options *filter_options, |
||||
const char *arg); |
||||
|
||||
int opt_parse_list_objects_filter(const struct option *opt, |
||||
const char *arg, int unset); |
||||
|
||||
#define OPT_PARSE_LIST_OBJECTS_FILTER(fo) \ |
||||
{ OPTION_CALLBACK, 0, CL_ARG__FILTER, fo, N_("args"), \ |
||||
N_("object filtering"), 0, \ |
||||
opt_parse_list_objects_filter } |
||||
|
||||
void list_objects_filter_release( |
||||
struct list_objects_filter_options *filter_options); |
||||
|
||||
#endif /* LIST_OBJECTS_FILTER_OPTIONS_H */ |
@ -0,0 +1,401 @@
@@ -0,0 +1,401 @@
|
||||
#include "cache.h" |
||||
#include "dir.h" |
||||
#include "tag.h" |
||||
#include "commit.h" |
||||
#include "tree.h" |
||||
#include "blob.h" |
||||
#include "diff.h" |
||||
#include "tree-walk.h" |
||||
#include "revision.h" |
||||
#include "list-objects.h" |
||||
#include "list-objects-filter.h" |
||||
#include "list-objects-filter-options.h" |
||||
#include "oidset.h" |
||||
|
||||
/* Remember to update object flag allocation in object.h */ |
||||
/* |
||||
* FILTER_SHOWN_BUT_REVISIT -- we set this bit on tree objects |
||||
* that have been shown, but should be revisited if they appear |
||||
* in the traversal (until we mark it SEEN). This is a way to |
||||
* let us silently de-dup calls to show() in the caller. This |
||||
* is subtly different from the "revision.h:SHOWN" and the |
||||
* "sha1_name.c:ONELINE_SEEN" bits. And also different from |
||||
* the non-de-dup usage in pack-bitmap.c |
||||
*/ |
||||
#define FILTER_SHOWN_BUT_REVISIT (1<<21) |
||||
|
||||
/* |
||||
* A filter for list-objects to omit ALL blobs from the traversal. |
||||
* And to OPTIONALLY collect a list of the omitted OIDs. |
||||
*/ |
||||
struct filter_blobs_none_data { |
||||
struct oidset *omits; |
||||
}; |
||||
|
||||
static enum list_objects_filter_result filter_blobs_none( |
||||
enum list_objects_filter_situation filter_situation, |
||||
struct object *obj, |
||||
const char *pathname, |
||||
const char *filename, |
||||
void *filter_data_) |
||||
{ |
||||
struct filter_blobs_none_data *filter_data = filter_data_; |
||||
|
||||
switch (filter_situation) { |
||||
default: |
||||
die("unknown filter_situation"); |
||||
return LOFR_ZERO; |
||||
|
||||
case LOFS_BEGIN_TREE: |
||||
assert(obj->type == OBJ_TREE); |
||||
/* always include all tree objects */ |
||||
return LOFR_MARK_SEEN | LOFR_DO_SHOW; |
||||
|
||||
case LOFS_END_TREE: |
||||
assert(obj->type == OBJ_TREE); |
||||
return LOFR_ZERO; |
||||
|
||||
case LOFS_BLOB: |
||||
assert(obj->type == OBJ_BLOB); |
||||
assert((obj->flags & SEEN) == 0); |
||||
|
||||
if (filter_data->omits) |
||||
oidset_insert(filter_data->omits, &obj->oid); |
||||
return LOFR_MARK_SEEN; /* but not LOFR_DO_SHOW (hard omit) */ |
||||
} |
||||
} |
||||
|
||||
static void *filter_blobs_none__init( |
||||
struct oidset *omitted, |
||||
struct list_objects_filter_options *filter_options, |
||||
filter_object_fn *filter_fn, |
||||
filter_free_fn *filter_free_fn) |
||||
{ |
||||
struct filter_blobs_none_data *d = xcalloc(1, sizeof(*d)); |
||||
d->omits = omitted; |
||||
|
||||
*filter_fn = filter_blobs_none; |
||||
*filter_free_fn = free; |
||||
return d; |
||||
} |
||||
|
||||
/* |
||||
* A filter for list-objects to omit large blobs. |
||||
* And to OPTIONALLY collect a list of the omitted OIDs. |
||||
*/ |
||||
struct filter_blobs_limit_data { |
||||
struct oidset *omits; |
||||
unsigned long max_bytes; |
||||
}; |
||||
|
||||
static enum list_objects_filter_result filter_blobs_limit( |
||||
enum list_objects_filter_situation filter_situation, |
||||
struct object *obj, |
||||
const char *pathname, |
||||
const char *filename, |
||||
void *filter_data_) |
||||
{ |
||||
struct filter_blobs_limit_data *filter_data = filter_data_; |
||||
unsigned long object_length; |
||||
enum object_type t; |
||||
|
||||
switch (filter_situation) { |
||||
default: |
||||
die("unknown filter_situation"); |
||||
return LOFR_ZERO; |
||||
|
||||
case LOFS_BEGIN_TREE: |
||||
assert(obj->type == OBJ_TREE); |
||||
/* always include all tree objects */ |
||||
return LOFR_MARK_SEEN | LOFR_DO_SHOW; |
||||
|
||||
case LOFS_END_TREE: |
||||
assert(obj->type == OBJ_TREE); |
||||
return LOFR_ZERO; |
||||
|
||||
case LOFS_BLOB: |
||||
assert(obj->type == OBJ_BLOB); |
||||
assert((obj->flags & SEEN) == 0); |
||||
|
||||
t = sha1_object_info(obj->oid.hash, &object_length); |
||||
if (t != OBJ_BLOB) { /* probably OBJ_NONE */ |
||||
/* |
||||
* We DO NOT have the blob locally, so we cannot |
||||
* apply the size filter criteria. Be conservative |
||||
* and force show it (and let the caller deal with |
||||
* the ambiguity). |
||||
*/ |
||||
goto include_it; |
||||
} |
||||
|
||||
if (object_length < filter_data->max_bytes) |
||||
goto include_it; |
||||
|
||||
if (filter_data->omits) |
||||
oidset_insert(filter_data->omits, &obj->oid); |
||||
return LOFR_MARK_SEEN; /* but not LOFR_DO_SHOW (hard omit) */ |
||||
} |
||||
|
||||
include_it: |
||||
if (filter_data->omits) |
||||
oidset_remove(filter_data->omits, &obj->oid); |
||||
return LOFR_MARK_SEEN | LOFR_DO_SHOW; |
||||
} |
||||
|
||||
static void *filter_blobs_limit__init( |
||||
struct oidset *omitted, |
||||
struct list_objects_filter_options *filter_options, |
||||
filter_object_fn *filter_fn, |
||||
filter_free_fn *filter_free_fn) |
||||
{ |
||||
struct filter_blobs_limit_data *d = xcalloc(1, sizeof(*d)); |
||||
d->omits = omitted; |
||||
d->max_bytes = filter_options->blob_limit_value; |
||||
|
||||
*filter_fn = filter_blobs_limit; |
||||
*filter_free_fn = free; |
||||
return d; |
||||
} |
||||
|
||||
/* |
||||
* A filter driven by a sparse-checkout specification to only |
||||
* include blobs that a sparse checkout would populate. |
||||
* |
||||
* The sparse-checkout spec can be loaded from a blob with the |
||||
* given OID or from a local pathname. We allow an OID because |
||||
* the repo may be bare or we may be doing the filtering on the |
||||
* server. |
||||
*/ |
||||
struct frame { |
||||
/* |
||||
* defval is the usual default include/exclude value that |
||||
* should be inherited as we recurse into directories based |
||||
* upon pattern matching of the directory itself or of a |
||||
* containing directory. |
||||
*/ |
||||
int defval; |
||||
|
||||
/* |
||||
* 1 if the directory (recursively) contains any provisionally |
||||
* omitted objects. |
||||
* |
||||
* 0 if everything (recursively) contained in this directory |
||||
* has been explicitly included (SHOWN) in the result and |
||||
* the directory may be short-cut later in the traversal. |
||||
*/ |
||||
unsigned child_prov_omit : 1; |
||||
}; |
||||
|
||||
struct filter_sparse_data { |
||||
struct oidset *omits; |
||||
struct exclude_list el; |
||||
|
||||
size_t nr, alloc; |
||||
struct frame *array_frame; |
||||
}; |
||||
|
||||
static enum list_objects_filter_result filter_sparse( |
||||
enum list_objects_filter_situation filter_situation, |
||||
struct object *obj, |
||||
const char *pathname, |
||||
const char *filename, |
||||
void *filter_data_) |
||||
{ |
||||
struct filter_sparse_data *filter_data = filter_data_; |
||||
int val, dtype; |
||||
struct frame *frame; |
||||
|
||||
switch (filter_situation) { |
||||
default: |
||||
die("unknown filter_situation"); |
||||
return LOFR_ZERO; |
||||
|
||||
case LOFS_BEGIN_TREE: |
||||
assert(obj->type == OBJ_TREE); |
||||
dtype = DT_DIR; |
||||
val = is_excluded_from_list(pathname, strlen(pathname), |
||||
filename, &dtype, &filter_data->el, |
||||
&the_index); |
||||
if (val < 0) |
||||
val = filter_data->array_frame[filter_data->nr].defval; |
||||
|
||||
ALLOC_GROW(filter_data->array_frame, filter_data->nr + 1, |
||||
filter_data->alloc); |
||||
filter_data->nr++; |
||||
filter_data->array_frame[filter_data->nr].defval = val; |
||||
filter_data->array_frame[filter_data->nr].child_prov_omit = 0; |
||||
|
||||
/* |
||||
* A directory with this tree OID may appear in multiple |
||||
* places in the tree. (Think of a directory move or copy, |
||||
* with no other changes, so the OID is the same, but the |
||||
* full pathnames of objects within this directory are new |
||||
* and may match is_excluded() patterns differently.) |
||||
* So we cannot mark this directory as SEEN (yet), since |
||||
* that will prevent process_tree() from revisiting this |
||||
* tree object with other pathname prefixes. |
||||
* |
||||
* Only _DO_SHOW the tree object the first time we visit |
||||
* this tree object. |
||||
* |
||||
* We always show all tree objects. A future optimization |
||||
* may want to attempt to narrow this. |
||||
*/ |
||||
if (obj->flags & FILTER_SHOWN_BUT_REVISIT) |
||||
return LOFR_ZERO; |
||||
obj->flags |= FILTER_SHOWN_BUT_REVISIT; |
||||
return LOFR_DO_SHOW; |
||||
|
||||
case LOFS_END_TREE: |
||||
assert(obj->type == OBJ_TREE); |
||||
assert(filter_data->nr > 0); |
||||
|
||||
frame = &filter_data->array_frame[filter_data->nr]; |
||||
filter_data->nr--; |
||||
|
||||
/* |
||||
* Tell our parent directory if any of our children were |
||||
* provisionally omitted. |
||||
*/ |
||||
filter_data->array_frame[filter_data->nr].child_prov_omit |= |
||||
frame->child_prov_omit; |
||||
|
||||
/* |
||||
* If there are NO provisionally omitted child objects (ALL child |
||||
* objects in this folder were INCLUDED), then we can mark the |
||||
* folder as SEEN (so we will not have to revisit it again). |
||||
*/ |
||||
if (!frame->child_prov_omit) |
||||
return LOFR_MARK_SEEN; |
||||
return LOFR_ZERO; |
||||
|
||||
case LOFS_BLOB: |
||||
assert(obj->type == OBJ_BLOB); |
||||
assert((obj->flags & SEEN) == 0); |
||||
|
||||
frame = &filter_data->array_frame[filter_data->nr]; |
||||
|
||||
dtype = DT_REG; |
||||
val = is_excluded_from_list(pathname, strlen(pathname), |
||||
filename, &dtype, &filter_data->el, |
||||
&the_index); |
||||
if (val < 0) |
||||
val = frame->defval; |
||||
if (val > 0) { |
||||
if (filter_data->omits) |
||||
oidset_remove(filter_data->omits, &obj->oid); |
||||
return LOFR_MARK_SEEN | LOFR_DO_SHOW; |
||||
} |
||||
|
||||
/* |
||||
* Provisionally omit it. We've already established that |
||||
* this pathname is not in the sparse-checkout specification |
||||
* with the CURRENT pathname, so we *WANT* to omit this blob. |
||||
* |
||||
* However, a pathname elsewhere in the tree may also |
||||
* reference this same blob, so we cannot reject it yet. |
||||
* Leave the LOFR_ bits unset so that if the blob appears |
||||
* again in the traversal, we will be asked again. |
||||
*/ |
||||
if (filter_data->omits) |
||||
oidset_insert(filter_data->omits, &obj->oid); |
||||
|
||||
/* |
||||
* Remember that at least 1 blob in this tree was |
||||
* provisionally omitted. This prevents us from short |
||||
* cutting the tree in future iterations. |
||||
*/ |
||||
frame->child_prov_omit = 1; |
||||
return LOFR_ZERO; |
||||
} |
||||
} |
||||
|
||||
|
||||
static void filter_sparse_free(void *filter_data) |
||||
{ |
||||
struct filter_sparse_data *d = filter_data; |
||||
/* TODO free contents of 'd' */ |
||||
free(d); |
||||
} |
||||
|
||||
static void *filter_sparse_oid__init( |
||||
struct oidset *omitted, |
||||
struct list_objects_filter_options *filter_options, |
||||
filter_object_fn *filter_fn, |
||||
filter_free_fn *filter_free_fn) |
||||
{ |
||||
struct filter_sparse_data *d = xcalloc(1, sizeof(*d)); |
||||
d->omits = omitted; |
||||
if (add_excludes_from_blob_to_list(filter_options->sparse_oid_value, |
||||
NULL, 0, &d->el) < 0) |
||||
die("could not load filter specification"); |
||||
|
||||
ALLOC_GROW(d->array_frame, d->nr + 1, d->alloc); |
||||
d->array_frame[d->nr].defval = 0; /* default to include */ |
||||
d->array_frame[d->nr].child_prov_omit = 0; |
||||
|
||||
*filter_fn = filter_sparse; |
||||
*filter_free_fn = filter_sparse_free; |
||||
return d; |
||||
} |
||||
|
||||
static void *filter_sparse_path__init( |
||||
struct oidset *omitted, |
||||
struct list_objects_filter_options *filter_options, |
||||
filter_object_fn *filter_fn, |
||||
filter_free_fn *filter_free_fn) |
||||
{ |
||||
struct filter_sparse_data *d = xcalloc(1, sizeof(*d)); |
||||
d->omits = omitted; |
||||
if (add_excludes_from_file_to_list(filter_options->sparse_path_value, |
||||
NULL, 0, &d->el, NULL) < 0) |
||||
die("could not load filter specification"); |
||||
|
||||
ALLOC_GROW(d->array_frame, d->nr + 1, d->alloc); |
||||
d->array_frame[d->nr].defval = 0; /* default to include */ |
||||
d->array_frame[d->nr].child_prov_omit = 0; |
||||
|
||||
*filter_fn = filter_sparse; |
||||
*filter_free_fn = filter_sparse_free; |
||||
return d; |
||||
} |
||||
|
||||
typedef void *(*filter_init_fn)( |
||||
struct oidset *omitted, |
||||
struct list_objects_filter_options *filter_options, |
||||
filter_object_fn *filter_fn, |
||||
filter_free_fn *filter_free_fn); |
||||
|
||||
/* |
||||
* Must match "enum list_objects_filter_choice". |
||||
*/ |
||||
static filter_init_fn s_filters[] = { |
||||
NULL, |
||||
filter_blobs_none__init, |
||||
filter_blobs_limit__init, |
||||
filter_sparse_oid__init, |
||||
filter_sparse_path__init, |
||||
}; |
||||
|
||||
void *list_objects_filter__init( |
||||
struct oidset *omitted, |
||||
struct list_objects_filter_options *filter_options, |
||||
filter_object_fn *filter_fn, |
||||
filter_free_fn *filter_free_fn) |
||||
{ |
||||
filter_init_fn init_fn; |
||||
|
||||
assert((sizeof(s_filters) / sizeof(s_filters[0])) == LOFC__COUNT); |
||||
|
||||
if (filter_options->choice >= LOFC__COUNT) |
||||
die("invalid list-objects filter choice: %d", |
||||
filter_options->choice); |
||||
|
||||
init_fn = s_filters[filter_options->choice]; |
||||
if (init_fn) |
||||
return init_fn(omitted, filter_options, |
||||
filter_fn, filter_free_fn); |
||||
*filter_fn = NULL; |
||||
*filter_free_fn = NULL; |
||||
return NULL; |
||||
} |
@ -0,0 +1,77 @@
@@ -0,0 +1,77 @@
|
||||
#ifndef LIST_OBJECTS_FILTER_H |
||||
#define LIST_OBJECTS_FILTER_H |
||||
|
||||
/* |
||||
* During list-object traversal we allow certain objects to be |
||||
* filtered (omitted) from the result. The active filter uses |
||||
* these result values to guide list-objects. |
||||
* |
||||
* _ZERO : Do nothing with the object at this time. It may |
||||
* be revisited if it appears in another place in |
||||
* the tree or in another commit during the overall |
||||
* traversal. |
||||
* |
||||
* _MARK_SEEN : Mark this object as "SEEN" in the object flags. |
||||
* This will prevent it from being revisited during |
||||
* the remainder of the traversal. This DOES NOT |
||||
* imply that it will be included in the results. |
||||
* |
||||
* _DO_SHOW : Show this object in the results (call show() on it). |
||||
* In general, objects should only be shown once, but |
||||
* this result DOES NOT imply that we mark it SEEN. |
||||
* |
||||
* Most of the time, you want the combination (_MARK_SEEN | _DO_SHOW) |
||||
* but they can be used independently, such as when sparse-checkout |
||||
* pattern matching is being applied. |
||||
* |
||||
* A _MARK_SEEN without _DO_SHOW can be called a hard-omit -- the |
||||
* object is not shown and will never be reconsidered (unless a |
||||
* previous iteration has already shown it). |
||||
* |
||||
* A _DO_SHOW without _MARK_SEEN can be used, for example, to |
||||
* include a directory, but then revisit it to selectively include |
||||
* or omit objects within it. |
||||
* |
||||
* A _ZERO can be called a provisional-omit -- the object is NOT shown, |
||||
* but *may* be revisited (if the object appears again in the traversal). |
||||
* Therefore, it will be omitted from the results *unless* a later |
||||
* iteration causes it to be shown. |
||||
*/ |
||||
enum list_objects_filter_result { |
||||
LOFR_ZERO = 0, |
||||
LOFR_MARK_SEEN = 1<<0, |
||||
LOFR_DO_SHOW = 1<<1, |
||||
}; |
||||
|
||||
enum list_objects_filter_situation { |
||||
LOFS_BEGIN_TREE, |
||||
LOFS_END_TREE, |
||||
LOFS_BLOB |
||||
}; |
||||
|
||||
typedef enum list_objects_filter_result (*filter_object_fn)( |
||||
enum list_objects_filter_situation filter_situation, |
||||
struct object *obj, |
||||
const char *pathname, |
||||
const char *filename, |
||||
void *filter_data); |
||||
|
||||
typedef void (*filter_free_fn)(void *filter_data); |
||||
|
||||
/* |
||||
* Constructor for the set of defined list-objects filters. |
||||
* Returns a generic "void *filter_data". |
||||
* |
||||
* The returned "filter_fn" will be used by traverse_commit_list() |
||||
* to filter the results. |
||||
* |
||||
* The returned "filter_free_fn" is a destructor for the |
||||
* filter_data. |
||||
*/ |
||||
void *list_objects_filter__init( |
||||
struct oidset *omitted, |
||||
struct list_objects_filter_options *filter_options, |
||||
filter_object_fn *filter_fn, |
||||
filter_free_fn *filter_free_fn); |
||||
|
||||
#endif /* LIST_OBJECTS_FILTER_H */ |
@ -0,0 +1,375 @@
@@ -0,0 +1,375 @@
|
||||
#!/bin/sh |
||||
|
||||
test_description='git pack-objects using object filtering' |
||||
|
||||
. ./test-lib.sh |
||||
|
||||
# Test blob:none filter. |
||||
|
||||
test_expect_success 'setup r1' ' |
||||
echo "{print \$1}" >print_1.awk && |
||||
echo "{print \$2}" >print_2.awk && |
||||
|
||||
git init r1 && |
||||
for n in 1 2 3 4 5 |
||||
do |
||||
echo "This is file: $n" > r1/file.$n |
||||
git -C r1 add file.$n |
||||
git -C r1 commit -m "$n" |
||||
done |
||||
' |
||||
|
||||
test_expect_success 'verify blob count in normal packfile' ' |
||||
git -C r1 ls-files -s file.1 file.2 file.3 file.4 file.5 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r1 pack-objects --rev --stdout >all.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r1 index-pack ../all.pack && |
||||
git -C r1 verify-pack -v ../all.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify blob:none packfile has no blobs' ' |
||||
git -C r1 pack-objects --rev --stdout --filter=blob:none >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r1 index-pack ../filter.pack && |
||||
git -C r1 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
nr=$(wc -l <observed) && |
||||
test 0 -eq $nr |
||||
' |
||||
|
||||
test_expect_success 'verify normal and blob:none packfiles have same commits/trees' ' |
||||
git -C r1 verify-pack -v ../all.pack \ |
||||
| grep -E "commit|tree" \ |
||||
| awk -f print_1.awk \ |
||||
| sort >expected && |
||||
git -C r1 verify-pack -v ../filter.pack \ |
||||
| grep -E "commit|tree" \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
# Test blob:limit=<n>[kmg] filter. |
||||
# We boundary test around the size parameter. The filter is strictly less than |
||||
# the value, so size 500 and 1000 should have the same results, but 1001 should |
||||
# filter more. |
||||
|
||||
test_expect_success 'setup r2' ' |
||||
git init r2 && |
||||
for n in 1000 10000 |
||||
do |
||||
printf "%"$n"s" X > r2/large.$n |
||||
git -C r2 add large.$n |
||||
git -C r2 commit -m "$n" |
||||
done |
||||
' |
||||
|
||||
test_expect_success 'verify blob count in normal packfile' ' |
||||
git -C r2 ls-files -s large.1000 large.10000 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r2 pack-objects --rev --stdout >all.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r2 index-pack ../all.pack && |
||||
git -C r2 verify-pack -v ../all.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=500 omits all blobs' ' |
||||
git -C r2 pack-objects --rev --stdout --filter=blob:limit=500 >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r2 index-pack ../filter.pack && |
||||
git -C r2 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
nr=$(wc -l <observed) && |
||||
test 0 -eq $nr |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=1000' ' |
||||
git -C r2 pack-objects --rev --stdout --filter=blob:limit=1000 >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r2 index-pack ../filter.pack && |
||||
git -C r2 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
nr=$(wc -l <observed) && |
||||
test 0 -eq $nr |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=1001' ' |
||||
git -C r2 ls-files -s large.1000 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r2 pack-objects --rev --stdout --filter=blob:limit=1001 >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r2 index-pack ../filter.pack && |
||||
git -C r2 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=10001' ' |
||||
git -C r2 ls-files -s large.1000 large.10000 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r2 pack-objects --rev --stdout --filter=blob:limit=10001 >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r2 index-pack ../filter.pack && |
||||
git -C r2 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=1k' ' |
||||
git -C r2 ls-files -s large.1000 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r2 pack-objects --rev --stdout --filter=blob:limit=1k >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r2 index-pack ../filter.pack && |
||||
git -C r2 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=1m' ' |
||||
git -C r2 ls-files -s large.1000 large.10000 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r2 pack-objects --rev --stdout --filter=blob:limit=1m >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r2 index-pack ../filter.pack && |
||||
git -C r2 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify normal and blob:limit packfiles have same commits/trees' ' |
||||
git -C r2 verify-pack -v ../all.pack \ |
||||
| grep -E "commit|tree" \ |
||||
| awk -f print_1.awk \ |
||||
| sort >expected && |
||||
git -C r2 verify-pack -v ../filter.pack \ |
||||
| grep -E "commit|tree" \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
# Test sparse:path=<path> filter. |
||||
# Use a local file containing a sparse-checkout specification to filter |
||||
# out blobs not required for the corresponding sparse-checkout. We do not |
||||
# require sparse-checkout to actually be enabled. |
||||
|
||||
test_expect_success 'setup r3' ' |
||||
git init r3 && |
||||
mkdir r3/dir1 && |
||||
for n in sparse1 sparse2 |
||||
do |
||||
echo "This is file: $n" > r3/$n |
||||
git -C r3 add $n |
||||
echo "This is file: dir1/$n" > r3/dir1/$n |
||||
git -C r3 add dir1/$n |
||||
done && |
||||
git -C r3 commit -m "sparse" && |
||||
echo dir1/ >pattern1 && |
||||
echo sparse1 >pattern2 |
||||
' |
||||
|
||||
test_expect_success 'verify blob count in normal packfile' ' |
||||
git -C r3 ls-files -s sparse1 sparse2 dir1/sparse1 dir1/sparse2 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r3 pack-objects --rev --stdout >all.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r3 index-pack ../all.pack && |
||||
git -C r3 verify-pack -v ../all.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify sparse:path=pattern1' ' |
||||
git -C r3 ls-files -s dir1/sparse1 dir1/sparse2 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r3 pack-objects --rev --stdout --filter=sparse:path=../pattern1 >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r3 index-pack ../filter.pack && |
||||
git -C r3 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify normal and sparse:path=pattern1 packfiles have same commits/trees' ' |
||||
git -C r3 verify-pack -v ../all.pack \ |
||||
| grep -E "commit|tree" \ |
||||
| awk -f print_1.awk \ |
||||
| sort >expected && |
||||
git -C r3 verify-pack -v ../filter.pack \ |
||||
| grep -E "commit|tree" \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify sparse:path=pattern2' ' |
||||
git -C r3 ls-files -s sparse1 dir1/sparse1 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r3 pack-objects --rev --stdout --filter=sparse:path=../pattern2 >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r3 index-pack ../filter.pack && |
||||
git -C r3 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify normal and sparse:path=pattern2 packfiles have same commits/trees' ' |
||||
git -C r3 verify-pack -v ../all.pack \ |
||||
| grep -E "commit|tree" \ |
||||
| awk -f print_1.awk \ |
||||
| sort >expected && |
||||
git -C r3 verify-pack -v ../filter.pack \ |
||||
| grep -E "commit|tree" \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
# Test sparse:oid=<oid-ish> filter. |
||||
# Like sparse:path, but we get the sparse-checkout specification from |
||||
# a blob rather than a file on disk. |
||||
|
||||
test_expect_success 'setup r4' ' |
||||
git init r4 && |
||||
mkdir r4/dir1 && |
||||
for n in sparse1 sparse2 |
||||
do |
||||
echo "This is file: $n" > r4/$n |
||||
git -C r4 add $n |
||||
echo "This is file: dir1/$n" > r4/dir1/$n |
||||
git -C r4 add dir1/$n |
||||
done && |
||||
echo dir1/ >r4/pattern && |
||||
git -C r4 add pattern && |
||||
git -C r4 commit -m "pattern" |
||||
' |
||||
|
||||
test_expect_success 'verify blob count in normal packfile' ' |
||||
git -C r4 ls-files -s pattern sparse1 sparse2 dir1/sparse1 dir1/sparse2 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r4 pack-objects --rev --stdout >all.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r4 index-pack ../all.pack && |
||||
git -C r4 verify-pack -v ../all.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify sparse:oid=OID' ' |
||||
git -C r4 ls-files -s dir1/sparse1 dir1/sparse2 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
oid=$(git -C r4 ls-files -s pattern | awk -f print_2.awk) && |
||||
git -C r4 pack-objects --rev --stdout --filter=sparse:oid=$oid >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r4 index-pack ../filter.pack && |
||||
git -C r4 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify sparse:oid=oid-ish' ' |
||||
git -C r4 ls-files -s dir1/sparse1 dir1/sparse2 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r4 pack-objects --rev --stdout --filter=sparse:oid=master:pattern >filter.pack <<-EOF && |
||||
HEAD |
||||
EOF |
||||
git -C r4 index-pack ../filter.pack && |
||||
git -C r4 verify-pack -v ../filter.pack \ |
||||
| grep blob \ |
||||
| awk -f print_1.awk \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
# Delete some loose objects and use pack-objects, but WITHOUT any filtering. |
||||
# This models previously omitted objects that we did not receive. |
||||
|
||||
test_expect_success 'setup r1 - delete loose blobs' ' |
||||
git -C r1 ls-files -s file.1 file.2 file.3 file.4 file.5 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
for id in `cat expected | sed "s|..|&/|"` |
||||
do |
||||
rm r1/.git/objects/$id |
||||
done |
||||
' |
||||
|
||||
test_expect_success 'verify pack-objects fails w/ missing objects' ' |
||||
test_must_fail git -C r1 pack-objects --rev --stdout >miss.pack <<-EOF |
||||
HEAD |
||||
EOF |
||||
' |
||||
|
||||
test_expect_success 'verify pack-objects fails w/ --missing=error' ' |
||||
test_must_fail git -C r1 pack-objects --rev --stdout --missing=error >miss.pack <<-EOF |
||||
HEAD |
||||
EOF |
||||
' |
||||
|
||||
test_expect_success 'verify pack-objects w/ --missing=allow-any' ' |
||||
git -C r1 pack-objects --rev --stdout --missing=allow-any >miss.pack <<-EOF |
||||
HEAD |
||||
EOF |
||||
' |
||||
|
||||
test_done |
@ -0,0 +1,225 @@
@@ -0,0 +1,225 @@
|
||||
#!/bin/sh |
||||
|
||||
test_description='git rev-list using object filtering' |
||||
|
||||
. ./test-lib.sh |
||||
|
||||
# Test the blob:none filter. |
||||
|
||||
test_expect_success 'setup r1' ' |
||||
echo "{print \$1}" >print_1.awk && |
||||
echo "{print \$2}" >print_2.awk && |
||||
|
||||
git init r1 && |
||||
for n in 1 2 3 4 5 |
||||
do |
||||
echo "This is file: $n" > r1/file.$n |
||||
git -C r1 add file.$n |
||||
git -C r1 commit -m "$n" |
||||
done |
||||
' |
||||
|
||||
test_expect_success 'verify blob:none omits all 5 blobs' ' |
||||
git -C r1 ls-files -s file.1 file.2 file.3 file.4 file.5 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r1 rev-list HEAD --quiet --objects --filter-print-omitted --filter=blob:none \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify emitted+omitted == all' ' |
||||
git -C r1 rev-list HEAD --objects \ |
||||
| awk -f print_1.awk \ |
||||
| sort >expected && |
||||
git -C r1 rev-list HEAD --objects --filter-print-omitted --filter=blob:none \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
|
||||
# Test blob:limit=<n>[kmg] filter. |
||||
# We boundary test around the size parameter. The filter is strictly less than |
||||
# the value, so size 500 and 1000 should have the same results, but 1001 should |
||||
# filter more. |
||||
|
||||
test_expect_success 'setup r2' ' |
||||
git init r2 && |
||||
for n in 1000 10000 |
||||
do |
||||
printf "%"$n"s" X > r2/large.$n |
||||
git -C r2 add large.$n |
||||
git -C r2 commit -m "$n" |
||||
done |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=500 omits all blobs' ' |
||||
git -C r2 ls-files -s large.1000 large.10000 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r2 rev-list HEAD --quiet --objects --filter-print-omitted --filter=blob:limit=500 \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify emitted+omitted == all' ' |
||||
git -C r2 rev-list HEAD --objects \ |
||||
| awk -f print_1.awk \ |
||||
| sort >expected && |
||||
git -C r2 rev-list HEAD --objects --filter-print-omitted --filter=blob:limit=500 \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=1000' ' |
||||
git -C r2 ls-files -s large.1000 large.10000 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r2 rev-list HEAD --quiet --objects --filter-print-omitted --filter=blob:limit=1000 \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=1001' ' |
||||
git -C r2 ls-files -s large.10000 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r2 rev-list HEAD --quiet --objects --filter-print-omitted --filter=blob:limit=1001 \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=1k' ' |
||||
git -C r2 ls-files -s large.10000 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r2 rev-list HEAD --quiet --objects --filter-print-omitted --filter=blob:limit=1k \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify blob:limit=1m' ' |
||||
cat </dev/null >expected && |
||||
git -C r2 rev-list HEAD --quiet --objects --filter-print-omitted --filter=blob:limit=1m \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
# Test sparse:path=<path> filter. |
||||
# Use a local file containing a sparse-checkout specification to filter |
||||
# out blobs not required for the corresponding sparse-checkout. We do not |
||||
# require sparse-checkout to actually be enabled. |
||||
|
||||
test_expect_success 'setup r3' ' |
||||
git init r3 && |
||||
mkdir r3/dir1 && |
||||
for n in sparse1 sparse2 |
||||
do |
||||
echo "This is file: $n" > r3/$n |
||||
git -C r3 add $n |
||||
echo "This is file: dir1/$n" > r3/dir1/$n |
||||
git -C r3 add dir1/$n |
||||
done && |
||||
git -C r3 commit -m "sparse" && |
||||
echo dir1/ >pattern1 && |
||||
echo sparse1 >pattern2 |
||||
' |
||||
|
||||
test_expect_success 'verify sparse:path=pattern1 omits top-level files' ' |
||||
git -C r3 ls-files -s sparse1 sparse2 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r3 rev-list HEAD --quiet --objects --filter-print-omitted --filter=sparse:path=../pattern1 \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify sparse:path=pattern2 omits both sparse2 files' ' |
||||
git -C r3 ls-files -s sparse2 dir1/sparse2 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r3 rev-list HEAD --quiet --objects --filter-print-omitted --filter=sparse:path=../pattern2 \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
# Test sparse:oid=<oid-ish> filter. |
||||
# Like sparse:path, but we get the sparse-checkout specification from |
||||
# a blob rather than a file on disk. |
||||
|
||||
test_expect_success 'setup r3 part 2' ' |
||||
echo dir1/ >r3/pattern && |
||||
git -C r3 add pattern && |
||||
git -C r3 commit -m "pattern" |
||||
' |
||||
|
||||
test_expect_success 'verify sparse:oid=OID omits top-level files' ' |
||||
git -C r3 ls-files -s pattern sparse1 sparse2 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
oid=$(git -C r3 ls-files -s pattern | awk -f print_2.awk) && |
||||
git -C r3 rev-list HEAD --quiet --objects --filter-print-omitted --filter=sparse:oid=$oid \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'verify sparse:oid=oid-ish omits top-level files' ' |
||||
git -C r3 ls-files -s pattern sparse1 sparse2 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
git -C r3 rev-list HEAD --quiet --objects --filter-print-omitted --filter=sparse:oid=master:pattern \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/~//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
# Delete some loose objects and use rev-list, but WITHOUT any filtering. |
||||
# This models previously omitted objects that we did not receive. |
||||
|
||||
test_expect_success 'rev-list W/ --missing=print' ' |
||||
git -C r1 ls-files -s file.1 file.2 file.3 file.4 file.5 \ |
||||
| awk -f print_2.awk \ |
||||
| sort >expected && |
||||
for id in `cat expected | sed "s|..|&/|"` |
||||
do |
||||
rm r1/.git/objects/$id |
||||
done && |
||||
git -C r1 rev-list --quiet HEAD --missing=print --objects \ |
||||
| awk -f print_1.awk \ |
||||
| sed "s/?//" \ |
||||
| sort >observed && |
||||
test_cmp observed expected |
||||
' |
||||
|
||||
test_expect_success 'rev-list W/O --missing fails' ' |
||||
test_must_fail git -C r1 rev-list --quiet --objects HEAD |
||||
' |
||||
|
||||
test_expect_success 'rev-list W/ missing=allow-any' ' |
||||
git -C r1 rev-list --quiet --missing=allow-any --objects HEAD |
||||
' |
||||
|
||||
test_done |
Loading…
Reference in new issue