Download files from the Web. Supports HTTP, HTTPS, and FTP.

# To download the contents of an URL to a file (named "foo" in this case):

# To download a single web page and all its resources (scripts, stylesheets, images, etc.):
wget --page-requisites --convert-links

# To download a full website, with 3-second intervals between requests:
wget --mirror --page-requisites --convert-links --wait=3

# To download the contents of an URL via authenticated FTP:
wget --ftp-user=username --ftp-password=password

# To limit download speed to 200 kB/s:
wget --limit-rate=200k

# To continue an incomplete download:
wget -c

# To retry a given number of times if the download doesn't succeed at first:
wget -t number_of_retries

# download toc from, convert site links to local links and only fetch from example domain.
wget -rkp -l3 -np -nH --cut-dirs=1 --convert-links --domains

# download website
wget --wait=20 --limit-rate=20K -r -p -U Mozilla

# Safari 5 
wget --wait=20 --limit-rate=20K -r -p -U Mozilla/5.0

# quick and dirty
wget -r -p -U Mozilla

# wget check for 404's
wget get --spider -nd -r <URL>

# download a list of urls from a text file with wget
wget -i ~/Desktop/urls.txt

# get directory prefix
wget --directory-prefix=Downloads

# mirror website
wget -m

# grab all .mp3s from url
mp3=$(lynx -dump  | grep 'http://' | awk '/mp3/{print $2}')
for i in $mp3
    wget $i