You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

262 lines
11 KiB

diff --git a/src/ftp.c b/src/ftp.c
index 2be2c76..345718f 100644
--- a/src/ftp.c
+++ b/src/ftp.c
@@ -234,14 +234,15 @@ print_length (wgint size, wgint start, bool authoritative)
logputs (LOG_VERBOSE, !authoritative ? _(" (unauthoritative)\n") : "\n");
}
-static uerr_t ftp_get_listing (struct url *, ccon *, struct fileinfo **);
+static uerr_t ftp_get_listing (struct url *, struct url *, ccon *, struct fileinfo **);
/* Retrieves a file with denoted parameters through opening an FTP
connection to the server. It always closes the data connection,
and closes the control connection in case of error. If warc_tmp
is non-NULL, the downloaded data will be written there as well. */
static uerr_t
-getftp (struct url *u, wgint passed_expected_bytes, wgint *qtyread,
+getftp (struct url *u, struct url *original_url,
+ wgint passed_expected_bytes, wgint *qtyread,
wgint restval, ccon *con, int count, FILE *warc_tmp)
{
int csock, dtsock, local_sock, res;
@@ -944,7 +945,7 @@ Error in server response, closing control connection.\n"));
bool exists = false;
uerr_t res;
struct fileinfo *f;
- res = ftp_get_listing (u, con, &f);
+ res = ftp_get_listing (u, original_url, con, &f);
/* Set the DO_RETR command flag again, because it gets unset when
calling ftp_get_listing() and would otherwise cause an assertion
failure earlier on when this function gets repeatedly called
@@ -1392,7 +1393,8 @@ Error in server response, closing control connection.\n"));
This loop either gets commands from con, or (if ON_YOUR_OWN is
set), makes them up to retrieve the file given by the URL. */
static uerr_t
-ftp_loop_internal (struct url *u, struct fileinfo *f, ccon *con, char **local_file)
+ftp_loop_internal (struct url *u, struct url *original_url, struct fileinfo *f,
+ ccon *con, char **local_file)
{
int count, orig_lp;
wgint restval, len = 0, qtyread = 0;
@@ -1415,7 +1417,7 @@ ftp_loop_internal (struct url *u, struct fileinfo *f, ccon *con, char **local_fi
else
{
/* URL-derived file. Consider "-O file" name. */
- con->target = url_file_name (u, NULL);
+ con->target = url_file_name (opt.trustservernames || !original_url ? u : original_url, NULL);
if (!opt.output_document)
locf = con->target;
else
@@ -1524,7 +1526,7 @@ ftp_loop_internal (struct url *u, struct fileinfo *f, ccon *con, char **local_fi
/* If we are working on a WARC record, getftp should also write
to the warc_tmp file. */
- err = getftp (u, len, &qtyread, restval, con, count, warc_tmp);
+ err = getftp (u, original_url, len, &qtyread, restval, con, count, warc_tmp);
if (con->csock == -1)
con->st &= ~DONE_CWD;
@@ -1677,7 +1679,8 @@ Removing file due to --delete-after in ftp_loop_internal():\n"));
/* Return the directory listing in a reusable format. The directory
is specifed in u->dir. */
static uerr_t
-ftp_get_listing (struct url *u, ccon *con, struct fileinfo **f)
+ftp_get_listing (struct url *u, struct url *original_url, ccon *con,
+ struct fileinfo **f)
{
uerr_t err;
char *uf; /* url file name */
@@ -1698,7 +1701,7 @@ ftp_get_listing (struct url *u, ccon *con, struct fileinfo **f)
con->target = xstrdup (lf);
xfree (lf);
- err = ftp_loop_internal (u, NULL, con, NULL);
+ err = ftp_loop_internal (u, original_url, NULL, con, NULL);
lf = xstrdup (con->target);
xfree (con->target);
con->target = old_target;
@@ -1721,8 +1724,9 @@ ftp_get_listing (struct url *u, ccon *con, struct fileinfo **f)
return err;
}
-static uerr_t ftp_retrieve_dirs (struct url *, struct fileinfo *, ccon *);
-static uerr_t ftp_retrieve_glob (struct url *, ccon *, int);
+static uerr_t ftp_retrieve_dirs (struct url *, struct url *,
+ struct fileinfo *, ccon *);
+static uerr_t ftp_retrieve_glob (struct url *, struct url *, ccon *, int);
static struct fileinfo *delelement (struct fileinfo *, struct fileinfo **);
static void freefileinfo (struct fileinfo *f);
@@ -1734,7 +1738,8 @@ static void freefileinfo (struct fileinfo *f);
If opt.recursive is set, after all files have been retrieved,
ftp_retrieve_dirs will be called to retrieve the directories. */
static uerr_t
-ftp_retrieve_list (struct url *u, struct fileinfo *f, ccon *con)
+ftp_retrieve_list (struct url *u, struct url *original_url,
+ struct fileinfo *f, ccon *con)
{
static int depth = 0;
uerr_t err;
@@ -1893,7 +1898,9 @@ Already have correct symlink %s -> %s\n\n"),
else /* opt.retr_symlinks */
{
if (dlthis)
- err = ftp_loop_internal (u, f, con, NULL);
+ {
+ err = ftp_loop_internal (u, original_url, f, con, NULL);
+ }
} /* opt.retr_symlinks */
break;
case FT_DIRECTORY:
@@ -1904,7 +1911,9 @@ Already have correct symlink %s -> %s\n\n"),
case FT_PLAINFILE:
/* Call the retrieve loop. */
if (dlthis)
- err = ftp_loop_internal (u, f, con, NULL);
+ {
+ err = ftp_loop_internal (u, original_url, f, con, NULL);
+ }
break;
case FT_UNKNOWN:
logprintf (LOG_NOTQUIET, _("%s: unknown/unsupported file type.\n"),
@@ -1969,7 +1978,7 @@ Already have correct symlink %s -> %s\n\n"),
/* We do not want to call ftp_retrieve_dirs here */
if (opt.recursive &&
!(opt.reclevel != INFINITE_RECURSION && depth >= opt.reclevel))
- err = ftp_retrieve_dirs (u, orig, con);
+ err = ftp_retrieve_dirs (u, original_url, orig, con);
else if (opt.recursive)
DEBUGP ((_("Will not retrieve dirs since depth is %d (max %d).\n"),
depth, opt.reclevel));
@@ -1982,7 +1991,8 @@ Already have correct symlink %s -> %s\n\n"),
ftp_retrieve_glob on each directory entry. The function knows
about excluded directories. */
static uerr_t
-ftp_retrieve_dirs (struct url *u, struct fileinfo *f, ccon *con)
+ftp_retrieve_dirs (struct url *u, struct url *original_url,
+ struct fileinfo *f, ccon *con)
{
char *container = NULL;
int container_size = 0;
@@ -2032,7 +2042,7 @@ Not descending to %s as it is excluded/not-included.\n"),
odir = xstrdup (u->dir); /* because url_set_dir will free
u->dir. */
url_set_dir (u, newdir);
- ftp_retrieve_glob (u, con, GLOB_GETALL);
+ ftp_retrieve_glob (u, original_url, con, GLOB_GETALL);
url_set_dir (u, odir);
xfree (odir);
@@ -2091,14 +2101,15 @@ is_invalid_entry (struct fileinfo *f)
GLOB_GLOBALL, use globbing; if it's GLOB_GETALL, download the whole
directory. */
static uerr_t
-ftp_retrieve_glob (struct url *u, ccon *con, int action)
+ftp_retrieve_glob (struct url *u, struct url *original_url,
+ ccon *con, int action)
{
struct fileinfo *f, *start;
uerr_t res;
con->cmd |= LEAVE_PENDING;
- res = ftp_get_listing (u, con, &start);
+ res = ftp_get_listing (u, original_url, con, &start);
if (res != RETROK)
return res;
/* First: weed out that do not conform the global rules given in
@@ -2194,7 +2205,7 @@ ftp_retrieve_glob (struct url *u, ccon *con, int action)
if (start)
{
/* Just get everything. */
- res = ftp_retrieve_list (u, start, con);
+ res = ftp_retrieve_list (u, original_url, start, con);
}
else
{
@@ -2210,7 +2221,7 @@ ftp_retrieve_glob (struct url *u, ccon *con, int action)
{
/* Let's try retrieving it anyway. */
con->st |= ON_YOUR_OWN;
- res = ftp_loop_internal (u, NULL, con, NULL);
+ res = ftp_loop_internal (u, original_url, NULL, con, NULL);
return res;
}
@@ -2230,8 +2241,8 @@ ftp_retrieve_glob (struct url *u, ccon *con, int action)
of URL. Inherently, its capabilities are limited on what can be
encoded into a URL. */
uerr_t
-ftp_loop (struct url *u, char **local_file, int *dt, struct url *proxy,
- bool recursive, bool glob)
+ftp_loop (struct url *u, struct url *original_url, char **local_file, int *dt,
+ struct url *proxy, bool recursive, bool glob)
{
ccon con; /* FTP connection */
uerr_t res;
@@ -2252,16 +2263,17 @@ ftp_loop (struct url *u, char **local_file, int *dt, struct url *proxy,
if (!*u->file && !recursive)
{
struct fileinfo *f;
- res = ftp_get_listing (u, &con, &f);
+ res = ftp_get_listing (u, original_url, &con, &f);
if (res == RETROK)
{
if (opt.htmlify && !opt.spider)
{
+ struct url *url_file = opt.trustservernames ? u : original_url;
char *filename = (opt.output_document
? xstrdup (opt.output_document)
: (con.target ? xstrdup (con.target)
- : url_file_name (u, NULL)));
+ : url_file_name (url_file, NULL)));
res = ftp_index (filename, u, f);
if (res == FTPOK && opt.verbose)
{
@@ -2306,11 +2318,13 @@ ftp_loop (struct url *u, char **local_file, int *dt, struct url *proxy,
/* ftp_retrieve_glob is a catch-all function that gets called
if we need globbing, time-stamping, recursion or preserve
permissions. Its third argument is just what we really need. */
- res = ftp_retrieve_glob (u, &con,
+ res = ftp_retrieve_glob (u, original_url, &con,
ispattern ? GLOB_GLOBALL : GLOB_GETONE);
}
else
- res = ftp_loop_internal (u, NULL, &con, local_file);
+ {
+ res = ftp_loop_internal (u, original_url, NULL, &con, local_file);
+ }
}
if (res == FTPOK)
res = RETROK;
diff --git a/src/ftp.h b/src/ftp.h
index be00d88..2abc9c0 100644
--- a/src/ftp.h
+++ b/src/ftp.h
@@ -129,7 +129,8 @@ enum wget_ftp_fstatus
};
struct fileinfo *ftp_parse_ls (const char *, const enum stype);
-uerr_t ftp_loop (struct url *, char **, int *, struct url *, bool, bool);
+uerr_t ftp_loop (struct url *, struct url *, char **, int *, struct url *,
+ bool, bool);
uerr_t ftp_index (const char *, struct url *, struct fileinfo *);
diff --git a/src/retr.c b/src/retr.c
index 66624dc..21fad56 100644
--- a/src/retr.c
+++ b/src/retr.c
@@ -794,7 +794,8 @@ retrieve_url (struct url * orig_parsed, const char *origurl, char **file,
if (redirection_count)
oldrec = glob = false;
- result = ftp_loop (u, &local_file, dt, proxy_url, recursive, glob);
+ result = ftp_loop (u, orig_parsed, &local_file, dt, proxy_url,
+ recursive, glob);
recursive = oldrec;
/* There is a possibility of having HTTP being redirected to