Update copyright year to 2014 by running admin/update-copyright.
[emacs.git] / lisp / url / url.el
blobcbbcfd4f18b60595acff8220dff3e5a19a5b67cd
1 ;;; url.el --- Uniform Resource Locator retrieval tool -*- lexical-binding: t -*-
3 ;; Copyright (C) 1996-1999, 2001, 2004-2014 Free Software Foundation,
4 ;; Inc.
6 ;; Author: Bill Perry <wmperry@gnu.org>
7 ;; Keywords: comm, data, processes, hypermedia
9 ;; This file is part of GNU Emacs.
11 ;; GNU Emacs is free software: you can redistribute it and/or modify
12 ;; it under the terms of the GNU General Public License as published by
13 ;; the Free Software Foundation, either version 3 of the License, or
14 ;; (at your option) any later version.
16 ;; GNU Emacs is distributed in the hope that it will be useful,
17 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
18 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 ;; GNU General Public License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GNU Emacs. If not, see <http://www.gnu.org/licenses/>.
24 ;;; Commentary:
26 ;; Registered URI schemes: http://www.iana.org/assignments/uri-schemes
28 ;;; Code:
31 (require 'mailcap)
33 (eval-when-compile
34 (require 'mm-decode)
35 (require 'mm-view))
37 (require 'url-vars)
38 (require 'url-cookie)
39 (require 'url-history)
40 (require 'url-expand)
41 (require 'url-privacy)
42 (require 'url-methods)
43 (require 'url-proxy)
44 (require 'url-parse)
45 (require 'url-util)
48 (defcustom url-configuration-directory
49 (locate-user-emacs-file "url/" ".url/")
50 "Directory used by the URL package for cookies, history, etc."
51 :type 'directory
52 :group 'url)
54 (defun url-do-setup ()
55 "Setup the URL package.
56 This is to avoid conflict with user settings if URL is dumped with
57 Emacs."
58 (unless url-setup-done
60 ;; Make OS/2 happy
61 ;;(push '("http" "80") tcp-binary-process-input-services)
63 (mailcap-parse-mailcaps)
64 (mailcap-parse-mimetypes)
66 ;; Register all the authentication schemes we can handle
67 (url-register-auth-scheme "basic" nil 4)
68 (url-register-auth-scheme "digest" nil 7)
70 (setq url-cookie-file
71 (or url-cookie-file
72 (expand-file-name "cookies" url-configuration-directory)))
74 (setq url-history-file
75 (or url-history-file
76 (expand-file-name "history" url-configuration-directory)))
78 ;; Parse the global history file if it exists, so that it can be used
79 ;; for URL completion, etc.
80 (url-history-parse-history)
81 (url-history-setup-save-timer)
83 ;; Ditto for cookies
84 (url-cookie-setup-save-timer)
85 (url-cookie-parse-file url-cookie-file)
87 ;; Read in proxy gateways
88 (let ((noproxy (and (not (assoc "no_proxy" url-proxy-services))
89 (or (getenv "NO_PROXY")
90 (getenv "no_PROXY")
91 (getenv "no_proxy")))))
92 (if noproxy
93 (setq url-proxy-services
94 (cons (cons "no_proxy"
95 (concat "\\("
96 (mapconcat
97 (lambda (x)
98 (cond
99 ((= x ?,) "\\|")
100 ((= x ? ) "")
101 ((= x ?.) (regexp-quote "."))
102 ((= x ?*) ".*")
103 ((= x ??) ".")
104 (t (char-to-string x))))
105 noproxy "") "\\)"))
106 url-proxy-services))))
108 (url-setup-privacy-info)
109 (run-hooks 'url-load-hook)
110 (setq url-setup-done t)))
112 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
113 ;;; Retrieval functions
114 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
116 (defvar url-redirect-buffer nil
117 "New buffer into which the retrieval will take place.
118 Sometimes while retrieving a URL, the URL library needs to use another buffer
119 than the one returned initially by `url-retrieve'. In this case, it sets this
120 variable in the original buffer as a forwarding pointer.")
122 (defvar url-retrieve-number-of-calls 0)
123 (autoload 'url-cache-prune-cache "url-cache")
125 ;;;###autoload
126 (defun url-retrieve (url callback &optional cbargs silent inhibit-cookies)
127 "Retrieve URL asynchronously and call CALLBACK with CBARGS when finished.
128 URL is either a string or a parsed URL. If it is a string
129 containing characters that are not valid in a URI, those
130 characters are percent-encoded; see `url-encode-url'.
132 CALLBACK is called when the object has been completely retrieved, with
133 the current buffer containing the object, and any MIME headers associated
134 with it. It is called as (apply CALLBACK STATUS CBARGS).
135 STATUS is a plist representing what happened during the request,
136 with most recent events first, or an empty list if no events have
137 occurred. Each pair is one of:
139 \(:redirect REDIRECTED-TO) - the request was redirected to this URL
140 \(:error (ERROR-SYMBOL . DATA)) - an error occurred. The error can be
141 signaled with (signal ERROR-SYMBOL DATA).
143 Return the buffer URL will load into, or nil if the process has
144 already completed (i.e. URL was a mailto URL or similar; in this case
145 the callback is not called).
147 The variables `url-request-data', `url-request-method' and
148 `url-request-extra-headers' can be dynamically bound around the
149 request; dynamic binding of other variables doesn't necessarily
150 take effect.
152 If SILENT, then don't message progress reports and the like.
153 If INHIBIT-COOKIES, cookies will neither be stored nor sent to
154 the server.
155 If URL is a multibyte string, it will be encoded as utf-8 and
156 URL-encoded before it's used."
157 ;;; XXX: There is code in Emacs that does dynamic binding
158 ;;; of the following variables around url-retrieve:
159 ;;; url-standalone-mode, url-gateway-unplugged, w3-honor-stylesheets,
160 ;;; url-confirmation-func, url-cookie-multiple-line,
161 ;;; url-cookie-{{,secure-}storage,confirmation}
162 ;;; url-standalone-mode and url-gateway-unplugged should work as
163 ;;; usual. url-confirmation-func is only used in nnwarchive.el and
164 ;;; webmail.el; the latter should be updated. Is
165 ;;; url-cookie-multiple-line needed anymore? The other url-cookie-*
166 ;;; are (for now) only used in synchronous retrievals.
167 (url-retrieve-internal url callback (cons nil cbargs) silent
168 inhibit-cookies))
170 (defun url-retrieve-internal (url callback cbargs &optional silent
171 inhibit-cookies)
172 "Internal function; external interface is `url-retrieve'.
173 CBARGS is the list of arguments that the callback function will
174 receive; its first element should be a plist specifying what has
175 happened so far during the request, as described in the docstring
176 of `url-retrieve' (if in doubt, specify nil).
178 If SILENT, don't message progress reports and the like.
179 If INHIBIT-COOKIES, cookies will neither be stored nor sent to
180 the server.
181 If URL is a multibyte string, it will be encoded as utf-8 and
182 URL-encoded before it's used."
183 (url-do-setup)
184 (url-gc-dead-buffers)
185 (when (stringp url)
186 (set-text-properties 0 (length url) nil url)
187 (setq url (url-encode-url url)))
188 (if (not (vectorp url))
189 (setq url (url-generic-parse-url url)))
190 (if (not (functionp callback))
191 (error "Must provide a callback function to url-retrieve"))
192 (unless (url-type url)
193 (error "Bad url: %s" (url-recreate-url url)))
194 (setf (url-silent url) silent)
195 (setf (url-use-cookies url) (not inhibit-cookies))
196 ;; Once in a while, remove old entries from the URL cache.
197 (when (zerop (% url-retrieve-number-of-calls 1000))
198 (condition-case error
199 (url-cache-prune-cache)
200 (file-error
201 (message "Error when expiring the cache: %s" error))))
202 (setq url-retrieve-number-of-calls (1+ url-retrieve-number-of-calls))
203 (let ((loader (url-scheme-get-property (url-type url) 'loader))
204 (url-using-proxy (if (url-host url)
205 (url-find-proxy-for-url url (url-host url))))
206 (buffer nil)
207 (asynch (url-scheme-get-property (url-type url) 'asynchronous-p)))
208 (if url-using-proxy
209 (setq asynch t
210 loader 'url-proxy))
211 (if asynch
212 (let ((url-current-object url))
213 (setq buffer (funcall loader url callback cbargs)))
214 (setq buffer (funcall loader url))
215 (if buffer
216 (with-current-buffer buffer
217 (apply callback cbargs))))
218 (if url-history-track
219 (url-history-update-url url (current-time)))
220 buffer))
222 ;;;###autoload
223 (defun url-retrieve-synchronously (url &optional silent inhibit-cookies)
224 "Retrieve URL synchronously.
225 Return the buffer containing the data, or nil if there are no data
226 associated with it (the case for dired, info, or mailto URLs that need
227 no further processing). URL is either a string or a parsed URL."
228 (url-do-setup)
230 (let ((retrieval-done nil)
231 (asynch-buffer nil))
232 (setq asynch-buffer
233 (url-retrieve url (lambda (&rest ignored)
234 (url-debug 'retrieval "Synchronous fetching done (%S)" (current-buffer))
235 (setq retrieval-done t
236 asynch-buffer (current-buffer)))
237 nil silent inhibit-cookies))
238 (if (null asynch-buffer)
239 ;; We do not need to do anything, it was a mailto or something
240 ;; similar that takes processing completely outside of the URL
241 ;; package.
243 (let ((proc (get-buffer-process asynch-buffer)))
244 ;; If the access method was synchronous, `retrieval-done' should
245 ;; hopefully already be set to t. If it is nil, and `proc' is also
246 ;; nil, it implies that the async process is not running in
247 ;; asynch-buffer. This happens e.g. for FTP files. In such a case
248 ;; url-file.el should probably set something like a `url-process'
249 ;; buffer-local variable so we can find the exact process that we
250 ;; should be waiting for. In the mean time, we'll just wait for any
251 ;; process output.
252 (while (not retrieval-done)
253 (url-debug 'retrieval
254 "Spinning in url-retrieve-synchronously: %S (%S)"
255 retrieval-done asynch-buffer)
256 (if (buffer-local-value 'url-redirect-buffer asynch-buffer)
257 (setq proc (get-buffer-process
258 (setq asynch-buffer
259 (buffer-local-value 'url-redirect-buffer
260 asynch-buffer))))
261 (if (and proc (memq (process-status proc)
262 '(closed exit signal failed))
263 ;; Make sure another process hasn't been started.
264 (eq proc (or (get-buffer-process asynch-buffer) proc)))
265 ;; FIXME: It's not clear whether url-retrieve's callback is
266 ;; guaranteed to be called or not. It seems that url-http
267 ;; decides sometimes consciously not to call it, so it's not
268 ;; clear that it's a bug, but even then we need to decide how
269 ;; url-http can then warn us that the download has completed.
270 ;; In the mean time, we use this here workaround.
271 ;; XXX: The callback must always be called. Any
272 ;; exception is a bug that should be fixed, not worked
273 ;; around.
274 (progn ;; Call delete-process so we run any sentinel now.
275 (delete-process proc)
276 (setq retrieval-done t)))
277 ;; We used to use `sit-for' here, but in some cases it wouldn't
278 ;; work because apparently pending keyboard input would always
279 ;; interrupt it before it got a chance to handle process input.
280 ;; `sleep-for' was tried but it lead to other forms of
281 ;; hanging. --Stef
282 (unless (or (with-local-quit
283 (accept-process-output proc))
284 (null proc))
285 ;; accept-process-output returned nil, maybe because the process
286 ;; exited (and may have been replaced with another). If we got
287 ;; a quit, just stop.
288 (when quit-flag
289 (delete-process proc))
290 (setq proc (and (not quit-flag)
291 (get-buffer-process asynch-buffer)))))))
292 asynch-buffer)))
294 ;; url-mm-callback called from url-mm, which requires mm-decode.
295 (declare-function mm-dissect-buffer "mm-decode"
296 (&optional no-strict-mime loose-mime from))
297 (declare-function mm-display-part "mm-decode"
298 (handle &optional no-default force))
300 (defun url-mm-callback (&rest ignored)
301 (let ((handle (mm-dissect-buffer t)))
302 (url-mark-buffer-as-dead (current-buffer))
303 (with-current-buffer
304 (generate-new-buffer (url-recreate-url url-current-object))
305 (if (eq (mm-display-part handle) 'external)
306 (progn
307 (set-process-sentinel
308 ;; Fixme: this shouldn't have to know the form of the
309 ;; undisplayer produced by `mm-display-part'.
310 (get-buffer-process (cdr (mm-handle-undisplayer handle)))
311 `(lambda (proc event)
312 (mm-destroy-parts (quote ,handle))))
313 (message "Viewing externally")
314 (kill-buffer (current-buffer)))
315 (display-buffer (current-buffer))
316 (add-hook 'kill-buffer-hook
317 `(lambda () (mm-destroy-parts ',handle))
319 t)))))
321 (defun url-mm-url (url)
322 "Retrieve URL and pass to the appropriate viewing application."
323 ;; These requires could advantageously be moved to url-mm-callback or
324 ;; turned into autoloads, but I suspect that it would introduce some bugs
325 ;; because loading those files from a process sentinel or filter may
326 ;; result in some undesirable corner cases.
327 (require 'mm-decode)
328 (require 'mm-view)
329 (url-retrieve url 'url-mm-callback nil))
331 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
332 ;;; Miscellaneous
333 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
334 (defvar url-dead-buffer-list nil)
336 (defun url-mark-buffer-as-dead (buff)
337 (push buff url-dead-buffer-list))
339 (defun url-gc-dead-buffers ()
340 (let ((buff))
341 (while (setq buff (pop url-dead-buffer-list))
342 (if (buffer-live-p buff)
343 (kill-buffer buff)))))
345 (cond
346 ((fboundp 'display-warning)
347 (defalias 'url-warn 'display-warning))
348 ((fboundp 'warn)
349 (defun url-warn (class message &optional level)
350 (warn "(%s/%s) %s" class (or level 'warning) message)))
352 (defun url-warn (class message &optional level)
353 (with-current-buffer (get-buffer-create "*URL-WARNINGS*")
354 (goto-char (point-max))
355 (save-excursion
356 (insert (format "(%s/%s) %s\n" class (or level 'warning) message)))
357 (display-buffer (current-buffer))))))
359 (provide 'url)
361 ;;; url.el ends here