tidy-up: use more example domains
Also make use of the example TLD: https://en.wikipedia.org/wiki/.example Reviewed-by: Daniel Stenberg Closes #11992
This commit is contained in:
Родитель
65729f65c7
Коммит
1bc69df7b4
6
docs/FAQ
6
docs/FAQ
|
@ -597,11 +597,11 @@ FAQ
|
|||
To specify a command line option that includes spaces, you might need to
|
||||
put the entire option within quotes. Like in:
|
||||
|
||||
curl -d " with spaces " url.com
|
||||
curl -d " with spaces " example.com
|
||||
|
||||
or perhaps
|
||||
|
||||
curl -d ' with spaces ' url.com
|
||||
curl -d ' with spaces ' example.com
|
||||
|
||||
Exactly what kind of quotes and how to do this is entirely up to the shell
|
||||
or command line interpreter that you are using. For most unix shells, you
|
||||
|
@ -820,7 +820,7 @@ FAQ
|
|||
To be able to use those characters as actual parts of the URL (without using
|
||||
them for the curl URL "globbing" system), use the -g/--globoff option:
|
||||
|
||||
curl -g 'www.site.com/weirdname[].html'
|
||||
curl -g 'www.example.com/weirdname[].html'
|
||||
|
||||
4.4 Why do I get downloaded data even though the web page does not exist?
|
||||
|
||||
|
|
|
@ -247,21 +247,21 @@ Run in the `C:/Program Files/msh3/lib` directory, copy `curl.exe` to that
|
|||
directory, or copy `msquic.dll` and `msh3.dll` from that directory to the
|
||||
`curl.exe` directory. For example:
|
||||
|
||||
% C:\Program Files\msh3\lib> F:\curl\builds\libcurl-vc-x64-release-dll-ipv6-sspi-schannel-msh3\bin\curl.exe --http3 https://www.google.com
|
||||
% C:\Program Files\msh3\lib> F:\curl\builds\libcurl-vc-x64-release-dll-ipv6-sspi-schannel-msh3\bin\curl.exe --http3 https://curl.se/
|
||||
|
||||
# `--http3`
|
||||
|
||||
Use only HTTP/3:
|
||||
|
||||
curl --http3-only https://nghttp2.org:4433/
|
||||
curl --http3-only https://example.org:4433/
|
||||
|
||||
Use HTTP/3 with fallback to HTTP/2 or HTTP/1.1 (see "HTTPS eyeballing" below):
|
||||
|
||||
curl --http3 https://nghttp2.org:4433/
|
||||
curl --http3 https://example.org:4433/
|
||||
|
||||
Upgrade via Alt-Svc:
|
||||
|
||||
curl --alt-svc altsvc.cache https://quic.aiortc.org/
|
||||
curl --alt-svc altsvc.cache https://curl.se/
|
||||
|
||||
See this [list of public HTTP/3 servers](https://bagder.github.io/HTTP3-test/)
|
||||
|
||||
|
|
|
@ -83,11 +83,11 @@ Fetch two files and store them with their remote names:
|
|||
|
||||
To ftp files using name and password, include them in the URL like:
|
||||
|
||||
curl ftp://name:passwd@machine.domain:port/full/path/to/file
|
||||
curl ftp://name:passwd@ftp.server.example:port/full/path/to/file
|
||||
|
||||
or specify them with the `-u` flag like
|
||||
|
||||
curl -u name:passwd ftp://machine.domain:port/full/path/to/file
|
||||
curl -u name:passwd ftp://ftp.server.example:port/full/path/to/file
|
||||
|
||||
### FTPS
|
||||
|
||||
|
@ -113,11 +113,11 @@ matching public key file must be specified using the `--pubkey` option.
|
|||
Curl also supports user and password in HTTP URLs, thus you can pick a file
|
||||
like:
|
||||
|
||||
curl http://name:passwd@machine.domain/full/path/to/file
|
||||
curl http://name:passwd@http.server.example/full/path/to/file
|
||||
|
||||
or specify user and password separately like in
|
||||
|
||||
curl -u name:passwd http://machine.domain/full/path/to/file
|
||||
curl -u name:passwd http://http.server.example/full/path/to/file
|
||||
|
||||
HTTP offers many different methods of authentication and curl supports
|
||||
several: Basic, Digest, NTLM and Negotiate (SPNEGO). Without telling which
|
||||
|
@ -174,9 +174,9 @@ curl supports the `-u`, `-Q` and `--ftp-account` options that can be used to
|
|||
set up transfers through many FTP proxies. For example, a file can be uploaded
|
||||
to a remote FTP server using a Blue Coat FTP proxy with the options:
|
||||
|
||||
curl -u "username@ftp.server Proxy-Username:Remote-Pass"
|
||||
curl -u "username@ftp.server.example Proxy-Username:Remote-Pass"
|
||||
--ftp-account Proxy-Password --upload-file local-file
|
||||
ftp://my-ftp.proxy.server:21/remote/upload/path/
|
||||
ftp://my-ftp.proxy.example:21/remote/upload/path/
|
||||
|
||||
See the manual for your FTP proxy to determine the form it expects to set up
|
||||
transfers, and curl's `-v` option to see exactly what curl is sending.
|
||||
|
@ -447,7 +447,7 @@ path beginning with `/foo`.
|
|||
|
||||
Example, get a page that wants my name passed in a cookie:
|
||||
|
||||
curl -b "name=Daniel" www.sillypage.com
|
||||
curl -b "name=Daniel" www.example.com
|
||||
|
||||
Curl also has the ability to use previously received cookies in following
|
||||
sessions. If you get cookies from a server and store them in a file in a
|
||||
|
@ -524,12 +524,12 @@ for a specified time.
|
|||
To have curl abort the download if the speed is slower than 3000 bytes per
|
||||
second for 1 minute, run:
|
||||
|
||||
curl -Y 3000 -y 60 www.far-away-site.com
|
||||
curl -Y 3000 -y 60 www.far-away.example.com
|
||||
|
||||
This can be used in combination with the overall time limit, so that the above
|
||||
operation must be completed in whole within 30 minutes:
|
||||
|
||||
curl -m 1800 -Y 3000 -y 60 www.far-away-site.com
|
||||
curl -m 1800 -Y 3000 -y 60 www.far-away.example.com
|
||||
|
||||
Forcing curl not to transfer data faster than a given rate is also possible,
|
||||
which might be useful if you are using a limited bandwidth connection and you
|
||||
|
@ -538,11 +538,11 @@ do not want your transfer to use all of it (sometimes referred to as
|
|||
|
||||
Make curl transfer data no faster than 10 kilobytes per second:
|
||||
|
||||
curl --limit-rate 10K www.far-away-site.com
|
||||
curl --limit-rate 10K www.far-away.example.com
|
||||
|
||||
or
|
||||
|
||||
curl --limit-rate 10240 www.far-away-site.com
|
||||
curl --limit-rate 10240 www.far-away.example.com
|
||||
|
||||
Or prevent curl from uploading data faster than 1 megabyte per second:
|
||||
|
||||
|
@ -583,7 +583,7 @@ up to the first characters of each line are ignored.
|
|||
Prevent curl from reading the default file by using -q as the first command
|
||||
line parameter, like:
|
||||
|
||||
curl -q www.thatsite.com
|
||||
curl -q www.example.org
|
||||
|
||||
Force curl to get and display a local help page in case it is invoked without
|
||||
URL by making a config file similar to:
|
||||
|
@ -607,7 +607,7 @@ flag.
|
|||
Example, send the header `X-you-and-me: yes` to the server when getting a
|
||||
page:
|
||||
|
||||
curl -H "X-you-and-me: yes" www.love.com
|
||||
curl -H "X-you-and-me: yes" love.example.com
|
||||
|
||||
This can also be useful in case you want curl to send a different text in a
|
||||
header than it normally does. The `-H` header you specify then replaces the
|
||||
|
@ -615,7 +615,7 @@ header curl would normally send. If you replace an internal header with an
|
|||
empty one, you prevent that header from being sent. To prevent the `Host:`
|
||||
header from being used:
|
||||
|
||||
curl -H "Host:" www.server.com
|
||||
curl -H "Host:" server.example.com
|
||||
|
||||
## FTP and Path Names
|
||||
|
||||
|
@ -651,7 +651,7 @@ to open another port and await another connection performed by the
|
|||
client. This is good if the client is behind a firewall that does not allow
|
||||
incoming connections.
|
||||
|
||||
curl ftp.download.com
|
||||
curl ftp.example.com
|
||||
|
||||
If the server, for example, is behind a firewall that does not allow
|
||||
connections on ports other than 21 (or if it just does not support the `PASV`
|
||||
|
@ -663,16 +663,16 @@ The `-P` flag to curl supports a few different options. Your machine may have
|
|||
several IP-addresses and/or network interfaces and curl allows you to select
|
||||
which of them to use. Default address can also be used:
|
||||
|
||||
curl -P - ftp.download.com
|
||||
curl -P - ftp.example.com
|
||||
|
||||
Download with `PORT` but use the IP address of our `le0` interface (this does
|
||||
not work on Windows):
|
||||
|
||||
curl -P le0 ftp.download.com
|
||||
curl -P le0 ftp.example.com
|
||||
|
||||
Download with `PORT` but use 192.168.0.10 as our IP address to use:
|
||||
|
||||
curl -P 192.168.0.10 ftp.download.com
|
||||
curl -P 192.168.0.10 ftp.example.com
|
||||
|
||||
## Network Interface
|
||||
|
||||
|
@ -865,7 +865,7 @@ information from the previous transfer you want to extract.
|
|||
To display the amount of bytes downloaded together with some text and an
|
||||
ending newline:
|
||||
|
||||
curl -w 'We downloaded %{size_download} bytes\n' www.download.com
|
||||
curl -w 'We downloaded %{size_download} bytes\n' www.example.com
|
||||
|
||||
## Kerberos FTP Transfer
|
||||
|
||||
|
|
|
@ -7,11 +7,11 @@ Doing a "POST" publishes the post data to the topic and exits.
|
|||
|
||||
Example subscribe:
|
||||
|
||||
curl mqtt://host/home/bedroom/temp
|
||||
curl mqtt://host.home/bedroom/temp
|
||||
|
||||
Example publish:
|
||||
|
||||
curl -d 75 mqtt://host/home/bedroom/dimmer
|
||||
curl -d 75 mqtt://host.home/bedroom/dimmer
|
||||
|
||||
## What does curl deliver as a response to a subscribe
|
||||
|
||||
|
|
|
@ -555,7 +555,7 @@
|
|||
cookie file at the end of an operation:
|
||||
|
||||
curl --cookie cookies.txt --cookie-jar newcookies.txt \
|
||||
http://www.example.com
|
||||
http://www.example.com
|
||||
|
||||
# HTTPS
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ See-also: remote-name remote-name-all remote-header-name
|
|||
Category: important curl
|
||||
Example: -o file $URL
|
||||
Example: "http://{one,two}.example.com" -o "file_#1.txt"
|
||||
Example: "http://{site,host}.host[1-5].com" -o "#1_#2"
|
||||
Example: "http://{site,host}.host[1-5].example" -o "#1_#2"
|
||||
Example: -o file $URL -o file2 https://example.net
|
||||
Added: 4.0
|
||||
Multi: append
|
||||
|
@ -22,7 +22,7 @@ string for the URL being fetched. Like in:
|
|||
|
||||
or use several variables like:
|
||||
|
||||
curl "http://{site,host}.host[1-5].com" -o "#1_#2"
|
||||
curl "http://{site,host}.host[1-5].example" -o "#1_#2"
|
||||
|
||||
You may use this option as many times as the number of URLs you have. For
|
||||
example, if you specify two URLs on the same command line, you can use it like
|
||||
|
|
10
lib/urlapi.c
10
lib/urlapi.c
|
@ -100,7 +100,7 @@ static void free_urlhandle(struct Curl_URL *u)
|
|||
|
||||
/*
|
||||
* Find the separator at the end of the host name, or the '?' in cases like
|
||||
* http://www.url.com?id=2380
|
||||
* http://www.example.com?id=2380
|
||||
*/
|
||||
static const char *find_host_sep(const char *url)
|
||||
{
|
||||
|
@ -338,7 +338,7 @@ static char *concat_url(char *base, const char *relurl)
|
|||
pathsep = strchr(protsep, '/');
|
||||
if(pathsep) {
|
||||
/* When people use badly formatted URLs, such as
|
||||
"http://www.url.com?dir=/home/daniel" we must not use the first
|
||||
"http://www.example.com?dir=/home/daniel" we must not use the first
|
||||
slash, if there's a ?-letter before it! */
|
||||
char *sep = strchr(protsep, '?');
|
||||
if(sep && (sep < pathsep))
|
||||
|
@ -347,9 +347,9 @@ static char *concat_url(char *base, const char *relurl)
|
|||
}
|
||||
else {
|
||||
/* There was no slash. Now, since we might be operating on a badly
|
||||
formatted URL, such as "http://www.url.com?id=2380" which doesn't
|
||||
use a slash separator as it is supposed to, we need to check for a
|
||||
?-letter as well! */
|
||||
formatted URL, such as "http://www.example.com?id=2380" which
|
||||
doesn't use a slash separator as it is supposed to, we need to check
|
||||
for a ?-letter as well! */
|
||||
pathsep = strchr(protsep, '?');
|
||||
if(pathsep)
|
||||
*pathsep = 0;
|
||||
|
|
|
@ -84,7 +84,7 @@ static void show_dir_errno(struct GlobalConfig *global, const char *name)
|
|||
/*
|
||||
* Create the needed directory hierarchy recursively in order to save
|
||||
* multi-GETs in file output, ie:
|
||||
* curl "http://my.site/dir[1-5]/file[1-5].txt" -o "dir#1/file#2.txt"
|
||||
* curl "http://example.org/dir[1-5]/file[1-5].txt" -o "dir#1/file#2.txt"
|
||||
* should create all the dir* automagically
|
||||
*/
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче