From 90030a49c7facfefeca8157255f213197343c340 Mon Sep 17 00:00:00 2001 From: Daniel Stenberg Date: Tue, 14 Mar 2000 08:33:15 +0000 Subject: 6.5 cleanup commit --- config.h.in | 6 + configure.in | 2 +- include/curl/curl.h | 2 +- lib/Makefile.in | 4 +- src/config.h.in | 3 + src/hugehelp.c | 379 ++++++++++++++++++++++++++++++++-------------------- src/version.h | 2 +- 7 files changed, 251 insertions(+), 147 deletions(-) diff --git a/config.h.in b/config.h.in index 9c5e11013..6b1f924ae 100644 --- a/config.h.in +++ b/config.h.in @@ -37,6 +37,9 @@ /* The number of bytes in a long long. */ #undef SIZEOF_LONG_LONG +/* Define if you have the RAND_status function. */ +#undef HAVE_RAND_STATUS + /* Define if you have the closesocket function. */ #undef HAVE_CLOSESOCKET @@ -67,6 +70,9 @@ /* Define if you have the select function. */ #undef HAVE_SELECT +/* Define if you have the setvbuf function. */ +#undef HAVE_SETVBUF + /* Define if you have the socket function. */ #undef HAVE_SOCKET diff --git a/configure.in b/configure.in index ad6eb5992..5a76508ce 100644 --- a/configure.in +++ b/configure.in @@ -2,7 +2,7 @@ dnl $Id$ dnl Process this file with autoconf to produce a configure script. AC_INIT(lib/urldata.h) AM_CONFIG_HEADER(config.h src/config.h) -AM_INIT_AUTOMAKE(curl,"6.5pre2") +AM_INIT_AUTOMAKE(curl,"6.5") dnl Checks for programs. AC_PROG_CC diff --git a/include/curl/curl.h b/include/curl/curl.h index 192c51f88..6533a572a 100644 --- a/include/curl/curl.h +++ b/include/curl/curl.h @@ -418,7 +418,7 @@ char *curl_GetEnv(char *variable); char *curl_version(void); /* This is the version number */ -#define LIBCURL_VERSION "6.5pre1" +#define LIBCURL_VERSION "6.5" /* linked-list structure for QUOTE */ struct curl_slist { diff --git a/lib/Makefile.in b/lib/Makefile.in index 707e6227e..2b6f6c611 100644 --- a/lib/Makefile.in +++ b/lib/Makefile.in @@ -81,7 +81,7 @@ CFLAGS = -g #-Wall -pedantic INCLUDES = -I$(top_srcdir)/include -libcurl_a_SOURCES = arpa_telnet.h file.c getpass.h netrc.h timeval.c base64.c file.h hostip.c progress.c timeval.h base64.h formdata.c hostip.h progress.h cookie.c formdata.h http.c sendf.c cookie.h ftp.c http.h sendf.h url.c dict.c ftp.h if2ip.c speedcheck.c url.h dict.h getdate.c if2ip.h speedcheck.h urldata.h download.c getdate.h ldap.c ssluse.c version.c download.h getenv.c ldap.h ssluse.h escape.c getenv.h mprintf.c telnet.c escape.h getpass.c netrc.c telnet.h +libcurl_a_SOURCES = arpa_telnet.h file.c getpass.h netrc.h timeval.c base64.c file.h hostip.c progress.c timeval.h base64.h formdata.c hostip.h progress.h cookie.c formdata.h http.c sendf.c cookie.h ftp.c http.h sendf.h url.c dict.c ftp.h if2ip.c speedcheck.c url.h dict.h getdate.c if2ip.h speedcheck.h urldata.h download.c getdate.h ldap.c ssluse.c version.c download.h getenv.c ldap.h ssluse.h escape.c getenv.h mprintf.c telnet.c escape.h getpass.c netrc.c telnet.h writeout.c writeout.h mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs CONFIG_HEADER = ../config.h ../src/config.h @@ -97,7 +97,7 @@ libcurl_a_LIBADD = libcurl_a_OBJECTS = file.o timeval.o base64.o hostip.o progress.o \ formdata.o cookie.o http.o sendf.o ftp.o url.o dict.o if2ip.o \ speedcheck.o getdate.o download.o ldap.o ssluse.o version.o getenv.o \ -escape.o mprintf.o telnet.o getpass.o netrc.o +escape.o mprintf.o telnet.o getpass.o netrc.o writeout.o AR = ar COMPILE = $(CC) $(DEFS) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) CCLD = $(CC) diff --git a/src/config.h.in b/src/config.h.in index 2e210ede8..3250e1852 100644 --- a/src/config.h.in +++ b/src/config.h.in @@ -10,3 +10,6 @@ /* Define if you have the header file. */ #undef HAVE_IO_H + +/* Define if you have strdup() */ +#undef HAVE_STRDUP diff --git a/src/hugehelp.c b/src/hugehelp.c index 5332776a2..04783b8eb 100644 --- a/src/hugehelp.c +++ b/src/hugehelp.c @@ -75,49 +75,59 @@ puts ( " format of the file to read cookies from should be plain\n" " HTTP headers or the netscape cookie file format.\n" "\n" +" NOTE that the file specified with -b/--cookie is only\n" +" used as input. No cookies will be stored in the file.\n" +" To store cookies, save the HTTP headers to a file using\n" +" -D/--dump-header!\n" +"\n" " -B/--ftp-ascii\n" -" (FTP/LDAP) Use ASCII transfer when getting an FTP file\n" -" or LDAP info. For FTP, this can also be enforced by\n" +" (FTP/LDAP) Use ASCII transfer when getting an FTP file\n" +" or LDAP info. For FTP, this can also be enforced by\n" " using an URL that ends with \";type=A\".\n" "\n" " -c/--continue\n" -" Continue/Resume a previous file transfer. This\n" -" instructs curl to continue appending data on the file\n" -" where it was previously left, possibly because of a\n" -" broken connection to the server. There must be a named\n" -" physical file to append to for this to work. Note:\n" -" Upload resume is depening on a command named SIZE not\n" +" Continue/Resume a previous file transfer. This\n" +" instructs curl to continue appending data on the file\n" +" where it was previously left, possibly because of a\n" +" broken connection to the server. There must be a named\n" +" physical file to append to for this to work. Note:\n" +" Upload resume is depening on a command named SIZE not\n" " always present in all ftp servers! Upload resume is for\n" -" FTP only. HTTP resume is only possible with HTTP/1.1\n" +" FTP only. HTTP resume is only possible with HTTP/1.1\n" " or later servers.\n" "\n" " -C/--continue-at \n" -" Continue/Resume a previous file transfer at the given\n" -" offset. The given offset is the exact number of bytes\n" -" that will be skipped counted from the beginning of the\n" +" Continue/Resume a previous file transfer at the given\n" +" offset. The given offset is the exact number of bytes\n" +" that will be skipped counted from the beginning of the\n" " source file before it is transfered to the destination.\n" -" If used with uploads, the ftp server command SIZE will\n" -" not be used by curl. Upload resume is for FTP only.\n" -" HTTP resume is only possible with HTTP/1.1 or later\n" +" If used with uploads, the ftp server command SIZE will\n" +" not be used by curl. Upload resume is for FTP only.\n" +" HTTP resume is only possible with HTTP/1.1 or later\n" " servers.\n" "\n" " -d/--data \n" -" (HTTP) Sends the specified data in a POST request to\n" -" the HTTP server. Note that the data is sent exactly as\n" +" (HTTP) Sends the specified data in a POST request to\n" +" the HTTP server. Note that the data is sent exactly as\n" " specified with no extra processing. The data is\n" -" expected to be \"url-encoded\". This will cause curl to\n" -" pass the data to the server using the content-type\n" +" expected to be \"url-encoded\". This will cause curl to\n" +" pass the data to the server using the content-type\n" " application/x-www-form-urlencoded. Compare to -F.\n" "\n" -" If you start the data with the letter @, the rest\n" -" should be a file name to read the data from, or - if\n" -" you want curl to read the data from stdin. The con­\n" +" If you start the data with the letter @, the rest\n" +" should be a file name to read the data from, or - if\n" +" you want curl to read the data from stdin. The con­\n" " tents of the file must already be url-encoded.\n" "\n" " -D/--dump-header \n" -" (HTTP/FTP) Write the HTTP headers to this file. Write\n" +" (HTTP/FTP) Write the HTTP headers to this file. Write\n" " the FTP file info to this file if -I/--head is used.\n" "\n" +" This option is handy to use when you want to store the\n" +" cookies that a HTTP site sends to you. The cookies\n" +" could then be read in a second curl invoke by using the\n" +" -b/--cookie option!\n" +"\n" " -e/--referer \n" " (HTTP) Sends the \"Referer Page\" information to the HTTP\n" " server. Some badly done CGIs fail if it's not set. This\n" @@ -153,7 +163,6 @@ puts ( " /etc/passwd will be the input:\n" "\n" " curl -F password=@/etc/passwd www.mypasswords.com\n" -"\n" " To read the file's content from stdin insted of a file,\n" " use - where the file name should've been.\n" "\n" @@ -214,6 +223,7 @@ puts ( " jobs from hanging for hours due to slow networks or\n" " links going down. This doesn't work properly in win32\n" " systems.\n" +"\n" " -M/--manual\n" " Manual. Display the huge help text.\n" "\n" @@ -233,11 +243,19 @@ puts ( "\n" " machine host.domain.com user myself password secret\n" "\n" +" -N/--no-buffer\n" +" Disables the buffering of the output stream. In normal\n" +" work situations, curl will use a standard buffered out­\n" +" put stream that will have the effect that it will out­\n" +" put the data in chunks, not necessarily exactly when\n" +" the data arrives. Using this option will disable that\n" +" buffering.\n" +"\n" " -o/--output \n" -" Write output to instead of stdout. If you are\n" +" Write output to instead of stdout. If you are\n" " using {} or [] to fetch multiple documents, you can use\n" -" # in the specifier. That variable will be\n" -" replaced with the current string for the URL being\n" +" #[num] in the specifier. That variable will be\n" +" replaced with the current string for the URL being\n" " fetched. Like in:\n" "\n" " curl http://{one,two}.site.com -o \"file_#1.txt\"\n" @@ -252,61 +270,71 @@ puts ( " the path is cut off.)\n" "\n" " -P/--ftpport
\n" -" (FTP) Reverses the initiator/listenor roles when con­\n" -" necting with ftp. This switch makes Curl use the PORT\n" -" command instead of PASV. In practice, PORT tells the\n" +" (FTP) Reverses the initiator/listener roles when con­\n" +" necting with ftp. This switch makes Curl use the PORT\n" +" command instead of PASV. In practice, PORT tells the\n" " server to connect to the client's specified address and\n" -" port, while PASV asks the server for an ip address and\n" +" port, while PASV asks the server for an ip address and\n" " port to connect to.
should be one of:\n" -" interface - i.e \"eth0\" to specify which interface's IP\n" -" address you want to use (Unix only)\n" -" IP address - i.e \"192.168.10.1\" to specify exact IP\n" -" number\n" -" host name - i.e \"my.host.domain\" to specify machine\n" -" \"-\" - (any single-letter string) to make it pick\n" -" the machine's default\n" +"\n" +" interface i.e \"eth0\" to specify which interface's IP\n" +" address you want to use (Unix only)\n" +"\n" +" IP address i.e \"192.168.10.1\" to specify exact IP num­\n" +" ber\n" +"\n" +" host name i.e \"my.host.domain\" to specify machine\n" +"\n" +" - (any single-letter string) to make it pick\n" +" the machine's default\n" +"\n" " -q If used as the first parameter on the command line, the\n" -" $HOME/.curlrc file will not be read and used as a con­\n" +" $HOME/.curlrc file will not be read and used as a con­\n" " fig file.\n" "\n" " -Q/--quote \n" -" (FTP) Send an arbitrary command to the remote FTP\n" -" server, by using the QUOTE command of the server. Not\n" -" all servers support this command, and the set of QUOTE\n" -" commands are server specific! Quote commands are sent\n" -" BEFORE the transfer is taking place. To make commands\n" -" take place after a successful transfer, prefix them\n" +" (FTP) Send an arbitrary command to the remote FTP\n" +" server, by using the QUOTE command of the server. Not\n" +" all servers support this command, and the set of QUOTE\n" +" commands are server specific! Quote commands are sent\n" +" BEFORE the transfer is taking place. To make commands\n" +" take place after a successful transfer, prefix them\n" " with a dash '-'. You may specify any amount of commands\n" -" to be run before and after the transfer. If the server\n" -" returns failure for one of the commands, the entire\n" +" to be run before and after the transfer. If the server\n" +" returns failure for one of the commands, the entire\n" " operation will be aborted.\n" "\n" " -r/--range \n" -" (HTTP/FTP) Retrieve a byte range (i.e a partial docu­\n" -" ment) from a HTTP/1.1 or FTP server. Ranges can be\n" +" (HTTP/FTP) Retrieve a byte range (i.e a partial docu­\n" +" ment) from a HTTP/1.1 or FTP server. Ranges can be\n" " specified in a number of ways.\n" -" 0-499 - specifies the first 500 bytes\n" -" 500-999 - specifies the second 500 bytes\n" -" -500 - specifies the last 500 bytes\n" -" 9500- - specifies the bytes from offset 9500\n" -" and forward\n" -" 0-0,-1 - specifies the first and last byte\n" -" only(*)(H)\n" -" 500-700,600-799 - specifies 300 bytes from offset\n" -" 500(H)\n" -" 100-199,500-599 - specifies two separate 100 bytes\n" -" ranges(*)(H)\n" -"\n" -" (*) = NOTE that this will cause the server to reply\n" -" with a multipart response!\n" -"\n" -" You should also be aware that many HTTP/1.1 servers do\n" -" not have this feature enabled, so that when you attempt\n" -" to get a range, you'll instead get the whole document.\n" -"\n" -" FTP range downloads only support the simple syntax\n" -" 'start-stop' (optionally with one of the numbers omit­\n" -" ted). It depends on the non-RFC command SIZE.\n" +"\n" +" 0-499 specifies the first 500 bytes\n" +"\n" +" 500-999 specifies the second 500 bytes\n" +"\n" +" -500 specifies the last 500 bytes\n" +"\n" +" 9500 specifies the bytes from offset 9500 and for­\n" +" ward\n" +"\n" +" 0-0,-1 specifies the first and last byte only(*)(H)\n" +" 500-700,600-799\n" +" specifies 300 bytes from offset 500(H)\n" +"\n" +" 100-199,500-599\n" +" specifies two separate 100 bytes ranges(*)(H)\n" +"\n" +" (*) = NOTE that this will cause the server to reply with a\n" +" multipart response!\n" +"\n" +" You should also be aware that many HTTP/1.1 servers do not\n" +" have this feature enabled, so that when you attempt to get a\n" +" range, you'll instead get the whole document.\n" +"\n" +" FTP range downloads only support the simple syntax 'start-\n" +" stop' (optionally with one of the numbers omitted). It\n" +" depends on the non-RFC command SIZE.\n" "\n" " -s/--silent\n" " Silent mode. Don't show progress meter or error mes­\n" @@ -343,7 +371,6 @@ puts ( " Specify user and password to use for Proxy authentica­\n" " tion. If no password is specified, curl will ask for it\n" " interactively.\n" -"\n" " -v/--verbose\n" " Makes the fetching more verbose/talkative. Mostly\n" " usable for debugging. Lines starting with '>' means\n" @@ -355,6 +382,66 @@ puts ( " Displays the full version of curl, libcurl and other\n" " 3rd party libraries linked with the executable.\n" "\n" +" -w/--write-out \n" +" Defines what to display after a completed and success­\n" +" ful operation. The format is a string that may contain\n" +" plain text mixed with any number of variables. The\n" +" string can be specified as \"string\", to get read from a\n" +" particular file you specify it \"@filename\" and to tell\n" +" curl to read the format from stdin you write \"@-\".\n" +"\n" +" The variables present in the output format will be sub­\n" +" stituted by the value or text that curl thinks fit, as\n" +" described below. All variables are specified like\n" +" %{variable_name} and to output a normal % you just\n" +" write them like %%. You can output a newline by using\n" +" \\n, a carrige return with \\r and a tab space with \\t.\n" +"\n" +" NOTE: The %-letter is a special letter in the\n" +" win32-environment, where all occurrences of % must be\n" +" doubled when using this option.\n" +"\n" +" Available variables are at this point:\n" +"\n" +" url_effective The URL that was fetched last. This is\n" +" mostly meaningful if you've told curl to\n" +" follow location: headers.\n" +"\n" +" http_code The numerical code that was found in the\n" +" last retrieved HTTP(S) page.\n" +"\n" +" time_total The total time, in seconds, that the\n" +" full operation lasted. The time will be\n" +" displayed with millisecond resolution.\n" +"\n" +" time_namelookup\n" +" The time, in seconds, it took from the\n" +" start until the name resolving was com­\n" +" pleted.\n" +"\n" +" time_connect The time, in seconds, it took from the\n" +" start until the connect to the remote\n" +" host (or proxy) was completed.\n" +" time_pretransfer\n" +" The time, in seconds, it took from the\n" +" start until the file transfer is just\n" +" about to begin. This includes all pre-\n" +" transfer commands and negotiations that\n" +" are specific to the particular proto­\n" +" col(s) involved.\n" +"\n" +" size_download The total amount of bytes that were\n" +" downloaded.\n" +"\n" +" size_upload The total amount of bytes that were\n" +" uploaded.\n" +"\n" +" speed_download The average download speed that curl\n" +" measured for the complete download.\n" +"\n" +" speed_upload The average upload speed that curl mea­\n" +" sured for the complete download.\n" +"\n" " -x/--proxy \n" " Use specified proxy. If the port number is not speci­\n" " fied, it is assumed at port 1080.\n" @@ -368,29 +455,27 @@ puts ( " (FTP) Specifies a custom FTP command to use instead of\n" " LIST when doing file lists with ftp.\n" "\n" -" -y/--speed-time \n" -" Speed Limit. If a download is slower than this given\n" -" speed, in bytes per second, for Speed Time seconds it\n" -" gets aborted. Speed Time is set with -Y and is 30 if\n" -" not set.\n" +" -y/--speed-time