Merge branch 'ab/test-tool-leakfix'

Plug various memory leaks in test-tool commands.

* ab/test-tool-leakfix:
  test-tool delta: fix a memory leak
  test-tool ref-store: fix a memory leak
  test-tool bloom: fix memory leaks
  test-tool json-writer: fix memory leaks
  test-tool regex: call regfree(), fix memory leaks
  test-tool urlmatch-normalization: fix a memory leak
  test-tool {dump,scrap}-cache-tree: fix memory leaks
  test-tool path-utils: fix a memory leak
  test-tool test-hash: fix a memory leak
maint
Junio C Hamano 2022-07-18 13:31:54 -07:00
commit f63ac61fbf
21 changed files with 77 additions and 24 deletions

View File

@ -16,6 +16,7 @@ static void add_string_to_filter(const char *data, struct bloom_filter *filter)
}
printf("\n");
add_key_to_filter(&key, filter, &settings);
clear_bloom_key(&key);
}

static void print_bloom_filter(struct bloom_filter *filter) {
@ -80,6 +81,7 @@ int cmd__bloom(int argc, const char **argv)
}

print_bloom_filter(&filter);
free(filter.data);
}

if (!strcmp(argv[1], "get_filter_for_commit")) {

View File

@ -20,8 +20,9 @@ int cmd__delta(int argc, const char **argv)
{
int fd;
struct stat st;
void *from_buf, *data_buf, *out_buf;
void *from_buf = NULL, *data_buf = NULL, *out_buf = NULL;
unsigned long from_size, data_size, out_size;
int ret = 1;

if (argc != 5 || (strcmp(argv[1], "-d") && strcmp(argv[1], "-p"))) {
fprintf(stderr, "usage: %s\n", usage_str);
@ -38,21 +39,21 @@ int cmd__delta(int argc, const char **argv)
if (read_in_full(fd, from_buf, from_size) < 0) {
perror(argv[2]);
close(fd);
return 1;
goto cleanup;
}
close(fd);

fd = open(argv[3], O_RDONLY);
if (fd < 0 || fstat(fd, &st)) {
perror(argv[3]);
return 1;
goto cleanup;
}
data_size = st.st_size;
data_buf = xmalloc(data_size);
if (read_in_full(fd, data_buf, data_size) < 0) {
perror(argv[3]);
close(fd);
return 1;
goto cleanup;
}
close(fd);

@ -66,14 +67,20 @@ int cmd__delta(int argc, const char **argv)
&out_size);
if (!out_buf) {
fprintf(stderr, "delta operation failed (returned NULL)\n");
return 1;
goto cleanup;
}

fd = open (argv[4], O_WRONLY|O_CREAT|O_TRUNC, 0666);
if (fd < 0 || write_in_full(fd, out_buf, out_size) < 0) {
perror(argv[4]);
return 1;
goto cleanup;
}

return 0;
ret = 0;
cleanup:
free(from_buf);
free(data_buf);
free(out_buf);

return ret;
}

View File

@ -59,11 +59,16 @@ int cmd__dump_cache_tree(int ac, const char **av)
{
struct index_state istate;
struct cache_tree *another = cache_tree();
int ret;

setup_git_directory();
if (read_cache() < 0)
die("unable to read index file");
istate = the_index;
istate.cache_tree = another;
cache_tree_update(&istate, WRITE_TREE_DRY_RUN);
return dump_cache_tree(active_cache_tree, another, "");
ret = dump_cache_tree(active_cache_tree, another, "");
cache_tree_free(&another);

return ret;
}

View File

@ -54,5 +54,6 @@ int cmd_hash_impl(int ac, const char **av, int algo)
fwrite(hash, 1, algop->rawsz, stdout);
else
puts(hash_to_hex_algop(hash, algop));
free(buffer);
return 0;
}

View File

@ -181,12 +181,18 @@ static struct json_writer nest1 = JSON_WRITER_INIT;

static void make_nest1(int pretty)
{
make_obj1(0);
make_arr1(0);

jw_object_begin(&nest1, pretty);
{
jw_object_sub_jw(&nest1, "obj1", &obj1);
jw_object_sub_jw(&nest1, "arr1", &arr1);
}
jw_end(&nest1);

jw_release(&obj1);
jw_release(&arr1);
}

static char *expect_inline1 =
@ -313,6 +319,9 @@ static void make_mixed1(int pretty)
jw_object_sub_jw(&mixed1, "arr1", &arr1);
}
jw_end(&mixed1);

jw_release(&obj1);
jw_release(&arr1);
}

static void cmp(const char *test, const struct json_writer *jw, const char *exp)
@ -325,8 +334,8 @@ static void cmp(const char *test, const struct json_writer *jw, const char *exp)
exit(1);
}

#define t(v) do { make_##v(0); cmp(#v, &v, expect_##v); } while (0)
#define p(v) do { make_##v(1); cmp(#v, &v, pretty_##v); } while (0)
#define t(v) do { make_##v(0); cmp(#v, &v, expect_##v); jw_release(&v); } while (0)
#define p(v) do { make_##v(1); cmp(#v, &v, pretty_##v); jw_release(&v); } while (0)

/*
* Run some basic regression tests with some known patterns.
@ -381,7 +390,6 @@ static int unit_tests(void)

/* mixed forms */
t(mixed1);
jw_init(&mixed1);
p(mixed1);

return 0;
@ -544,7 +552,7 @@ static int scripted(void)

printf("%s\n", jw.json.buf);

strbuf_release(&jw.json);
jw_release(&jw);
return 0;
}


View File

@ -296,9 +296,8 @@ int cmd__path_utils(int argc, const char **argv)
if (argc == 3 && !strcmp(argv[1], "normalize_path_copy")) {
char *buf = xmallocz(strlen(argv[2]));
int rv = normalize_path_copy(buf, argv[2]);
if (rv)
buf = "++failed++";
puts(buf);
puts(rv ? "++failed++" : buf);
free(buf);
return 0;
}

@ -356,7 +355,10 @@ int cmd__path_utils(int argc, const char **argv)
int nongit_ok;
setup_git_directory_gently(&nongit_ok);
while (argc > 3) {
puts(prefix_path(prefix, prefix_len, argv[3]));
char *pfx = prefix_path(prefix, prefix_len, argv[3]);

puts(pfx);
free(pfx);
argc--;
argv++;
}
@ -366,6 +368,7 @@ int cmd__path_utils(int argc, const char **argv)
if (argc == 4 && !strcmp(argv[1], "strip_path_suffix")) {
char *prefix = strip_path_suffix(argv[2], argv[3]);
printf("%s\n", prefix ? prefix : "(null)");
free(prefix);
return 0;
}


View File

@ -96,6 +96,7 @@ static const char **get_store(const char **argv, struct ref_store **refs)
die("no such worktree: %s", gitdir);

*refs = get_worktree_ref_store(*p);
free_worktrees(worktrees);
} else
die("unknown backend %s", argv[0]);


View File

@ -34,6 +34,7 @@ static int test_regex_bug(void)
if (m[0].rm_so == 3) /* matches '\n' when it should not */
die("regex bug confirmed: re-build git with NO_REGEX=1");

regfree(&r);
return 0;
}

@ -94,18 +95,20 @@ int cmd__regex(int argc, const char **argv)
die("failed regcomp() for pattern '%s' (%s)", pat, errbuf);
}
if (!str)
return 0;
goto cleanup;

ret = regexec(&r, str, 1, m, 0);
if (ret) {
if (silent || ret == REG_NOMATCH)
return ret;
goto cleanup;

regerror(ret, &r, errbuf, sizeof(errbuf));
die("failed regexec() for subject '%s' (%s)", str, errbuf);
}

return 0;
cleanup:
regfree(&r);
return ret;
usage:
usage("\ttest-tool regex --bug\n"
"\ttest-tool regex [--silent] <pattern>\n"

View File

@ -12,6 +12,7 @@ int cmd__scrap_cache_tree(int ac, const char **av)
hold_locked_index(&index_lock, LOCK_DIE_ON_ERROR);
if (read_cache() < 0)
die("unable to read index file");
cache_tree_free(&active_cache_tree);
active_cache_tree = NULL;
if (write_locked_index(&the_index, &index_lock, COMMIT_LOCK))
die("unable to write index file");

View File

@ -5,8 +5,9 @@
int cmd__urlmatch_normalization(int argc, const char **argv)
{
const char usage[] = "test-tool urlmatch-normalization [-p | -l] <url1> | <url1> <url2>";
char *url1, *url2;
char *url1 = NULL, *url2 = NULL;
int opt_p = 0, opt_l = 0;
int ret = 0;

/*
* For one url, succeed if url_normalize succeeds on it, fail otherwise.
@ -39,7 +40,7 @@ int cmd__urlmatch_normalization(int argc, const char **argv)
printf("%s\n", url1);
if (opt_l)
printf("%u\n", (unsigned)info.url_len);
return 0;
goto cleanup;
}

if (opt_p || opt_l)
@ -47,5 +48,9 @@ int cmd__urlmatch_normalization(int argc, const char **argv)

url1 = url_normalize(argv[1], NULL);
url2 = url_normalize(argv[2], NULL);
return (url1 && url2 && !strcmp(url1, url2)) ? 0 : 1;
ret = (url1 && url2 && !strcmp(url1, url2)) ? 0 : 1;
cleanup:
free(url1);
free(url2);
return ret;
}

View File

@ -1,8 +1,9 @@
#!/bin/sh

test_description='test basic hash implementation'
. ./test-lib.sh

TEST_PASSES_SANITIZE_LEAK=true
. ./test-lib.sh

test_expect_success 'test basic SHA-1 hash values' '
test-tool sha1 </dev/null >actual &&

View File

@ -1,6 +1,8 @@
#!/bin/sh

test_description='test json-writer JSON generation'

TEST_PASSES_SANITIZE_LEAK=true
. ./test-lib.sh

test_expect_success 'unit test of json-writer routines' '

View File

@ -5,6 +5,7 @@

test_description='Test various path utilities'

TEST_PASSES_SANITIZE_LEAK=true
. ./test-lib.sh

norm_path() {

View File

@ -5,6 +5,8 @@ test_description="Test whether cache-tree is properly updated
Tests whether various commands properly update and/or rewrite the
cache-tree extension.
"

TEST_PASSES_SANITIZE_LEAK=true
. ./test-lib.sh

cmp_cache_tree () {

View File

@ -67,7 +67,7 @@ test_expect_success 'compute bloom key for test string 2' '
test_cmp expect actual
'

test_expect_success 'get bloom filters for commit with no changes' '
test_expect_success !SANITIZE_LEAK 'get bloom filters for commit with no changes' '
git init &&
git commit --allow-empty -m "c0" &&
cat >expect <<-\EOF &&

View File

@ -1,6 +1,8 @@
#!/bin/sh

test_description='urlmatch URL normalization'

TEST_PASSES_SANITIZE_LEAK=true
. ./test-lib.sh

# The base name of the test url files

View File

@ -4,6 +4,8 @@
#

test_description='resilience to pack corruptions with redundant objects'

TEST_PASSES_SANITIZE_LEAK=true
. ./test-lib.sh

# Note: the test objects are created with knowledge of their pack encoding

View File

@ -1,6 +1,8 @@
#!/bin/sh

test_description='handling of duplicate objects in incoming packfiles'

TEST_PASSES_SANITIZE_LEAK=true
. ./test-lib.sh
. "$TEST_DIRECTORY"/lib-pack.sh


View File

@ -1,6 +1,8 @@
#!/bin/sh

test_description='test index-pack handling of delta cycles in packfiles'

TEST_PASSES_SANITIZE_LEAK=true
. ./test-lib.sh
. "$TEST_DIRECTORY"/lib-pack.sh


View File

@ -6,6 +6,8 @@
test_description='git pack-object with "large" deltas

'

TEST_PASSES_SANITIZE_LEAK=true
. ./test-lib.sh
. "$TEST_DIRECTORY"/lib-pack.sh


View File

@ -2,6 +2,7 @@

test_description='grep icase on non-English locales'

TEST_PASSES_SANITIZE_LEAK=true
. ./lib-gettext.sh

doalarm () {