]> code.delx.au - gnu-emacs/blob - lisp/url/url.el
Merge from emacs-23
[gnu-emacs] / lisp / url / url.el
1 ;;; url.el --- Uniform Resource Locator retrieval tool
2
3 ;; Copyright (C) 1996, 1997, 1998, 1999, 2001, 2004,
4 ;; 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5
6 ;; Author: Bill Perry <wmperry@gnu.org>
7 ;; Keywords: comm, data, processes, hypermedia
8
9 ;; This file is part of GNU Emacs.
10 ;;
11 ;; GNU Emacs is free software: you can redistribute it and/or modify
12 ;; it under the terms of the GNU General Public License as published by
13 ;; the Free Software Foundation, either version 3 of the License, or
14 ;; (at your option) any later version.
15
16 ;; GNU Emacs is distributed in the hope that it will be useful,
17 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
18 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 ;; GNU General Public License for more details.
20
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GNU Emacs. If not, see <http://www.gnu.org/licenses/>.
23
24 ;;; Commentary:
25
26 ;; Registered URI schemes: http://www.iana.org/assignments/uri-schemes
27
28 ;;; Code:
29
30 (eval-when-compile (require 'cl))
31
32 (require 'mailcap)
33
34 (eval-when-compile
35 (require 'mm-decode)
36 (require 'mm-view))
37
38 (require 'url-vars)
39 (require 'url-cookie)
40 (require 'url-history)
41 (require 'url-expand)
42 (require 'url-privacy)
43 (require 'url-methods)
44 (require 'url-proxy)
45 (require 'url-parse)
46 (require 'url-util)
47
48
49 (defcustom url-configuration-directory
50 (locate-user-emacs-file "url/" ".url/")
51 "Directory used by the URL package for cookies, history, etc."
52 :type 'directory
53 :group 'url)
54
55 (defun url-do-setup ()
56 "Setup the URL package.
57 This is to avoid conflict with user settings if URL is dumped with
58 Emacs."
59 (unless url-setup-done
60
61 ;; Make OS/2 happy
62 ;;(push '("http" "80") tcp-binary-process-input-services)
63
64 (mailcap-parse-mailcaps)
65 (mailcap-parse-mimetypes)
66
67 ;; Register all the authentication schemes we can handle
68 (url-register-auth-scheme "basic" nil 4)
69 (url-register-auth-scheme "digest" nil 7)
70
71 (setq url-cookie-file
72 (or url-cookie-file
73 (expand-file-name "cookies" url-configuration-directory)))
74
75 (setq url-history-file
76 (or url-history-file
77 (expand-file-name "history" url-configuration-directory)))
78
79 ;; Parse the global history file if it exists, so that it can be used
80 ;; for URL completion, etc.
81 (url-history-parse-history)
82 (url-history-setup-save-timer)
83
84 ;; Ditto for cookies
85 (url-cookie-setup-save-timer)
86 (url-cookie-parse-file url-cookie-file)
87
88 ;; Read in proxy gateways
89 (let ((noproxy (and (not (assoc "no_proxy" url-proxy-services))
90 (or (getenv "NO_PROXY")
91 (getenv "no_PROXY")
92 (getenv "no_proxy")))))
93 (if noproxy
94 (setq url-proxy-services
95 (cons (cons "no_proxy"
96 (concat "\\("
97 (mapconcat
98 (lambda (x)
99 (cond
100 ((= x ?,) "\\|")
101 ((= x ? ) "")
102 ((= x ?.) (regexp-quote "."))
103 ((= x ?*) ".*")
104 ((= x ??) ".")
105 (t (char-to-string x))))
106 noproxy "") "\\)"))
107 url-proxy-services))))
108
109 (url-setup-privacy-info)
110 (run-hooks 'url-load-hook)
111 (setq url-setup-done t)))
112
113 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
114 ;;; Retrieval functions
115 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
116
117 (defvar url-redirect-buffer nil
118 "New buffer into which the retrieval will take place.
119 Sometimes while retrieving a URL, the URL library needs to use another buffer
120 than the one returned initially by `url-retrieve'. In this case, it sets this
121 variable in the original buffer as a forwarding pointer.")
122
123 ;;;###autoload
124 (defun url-retrieve (url callback &optional cbargs silent)
125 "Retrieve URL asynchronously and call CALLBACK with CBARGS when finished.
126 URL is either a string or a parsed URL.
127
128 CALLBACK is called when the object has been completely retrieved, with
129 the current buffer containing the object, and any MIME headers associated
130 with it. It is called as (apply CALLBACK STATUS CBARGS).
131 STATUS is a list with an even number of elements representing
132 what happened during the request, with most recent events first,
133 or an empty list if no events have occurred. Each pair is one of:
134
135 \(:redirect REDIRECTED-TO) - the request was redirected to this URL
136 \(:error (ERROR-SYMBOL . DATA)) - an error occurred. The error can be
137 signaled with (signal ERROR-SYMBOL DATA).
138
139 Return the buffer URL will load into, or nil if the process has
140 already completed (i.e. URL was a mailto URL or similar; in this case
141 the callback is not called).
142
143 The variables `url-request-data', `url-request-method' and
144 `url-request-extra-headers' can be dynamically bound around the
145 request; dynamic binding of other variables doesn't necessarily
146 take effect.
147
148 If SILENT, then don't message progress reports and the like."
149 ;;; XXX: There is code in Emacs that does dynamic binding
150 ;;; of the following variables around url-retrieve:
151 ;;; url-standalone-mode, url-gateway-unplugged, w3-honor-stylesheets,
152 ;;; url-confirmation-func, url-cookie-multiple-line,
153 ;;; url-cookie-{{,secure-}storage,confirmation}
154 ;;; url-standalone-mode and url-gateway-unplugged should work as
155 ;;; usual. url-confirmation-func is only used in nnwarchive.el and
156 ;;; webmail.el; the latter should be updated. Is
157 ;;; url-cookie-multiple-line needed anymore? The other url-cookie-*
158 ;;; are (for now) only used in synchronous retrievals.
159 (url-retrieve-internal url callback (cons nil cbargs) silent))
160
161 (defun url-retrieve-internal (url callback cbargs &optional silent)
162 "Internal function; external interface is `url-retrieve'.
163 CBARGS is what the callback will actually receive - the first item is
164 the list of events, as described in the docstring of `url-retrieve'.
165
166 If SILENT, don't message progress reports and the like."
167 (url-do-setup)
168 (url-gc-dead-buffers)
169 (if (stringp url)
170 (set-text-properties 0 (length url) nil url))
171 (if (not (vectorp url))
172 (setq url (url-generic-parse-url url)))
173 (if (not (functionp callback))
174 (error "Must provide a callback function to url-retrieve"))
175 (unless (url-type url)
176 (error "Bad url: %s" (url-recreate-url url)))
177 (setf (url-silent url) silent)
178 (let ((loader (url-scheme-get-property (url-type url) 'loader))
179 (url-using-proxy (if (url-host url)
180 (url-find-proxy-for-url url (url-host url))))
181 (buffer nil)
182 (asynch (url-scheme-get-property (url-type url) 'asynchronous-p)))
183 (if url-using-proxy
184 (setq asynch t
185 loader 'url-proxy))
186 (if asynch
187 (let ((url-current-object url))
188 (setq buffer (funcall loader url callback cbargs)))
189 (setq buffer (funcall loader url))
190 (if buffer
191 (with-current-buffer buffer
192 (apply callback cbargs))))
193 (if url-history-track
194 (url-history-update-url url (current-time)))
195 buffer))
196
197 ;;;###autoload
198 (defun url-retrieve-synchronously (url)
199 "Retrieve URL synchronously.
200 Return the buffer containing the data, or nil if there are no data
201 associated with it (the case for dired, info, or mailto URLs that need
202 no further processing). URL is either a string or a parsed URL."
203 (url-do-setup)
204
205 (lexical-let ((retrieval-done nil)
206 (asynch-buffer nil))
207 (setq asynch-buffer
208 (url-retrieve url (lambda (&rest ignored)
209 (url-debug 'retrieval "Synchronous fetching done (%S)" (current-buffer))
210 (setq retrieval-done t
211 asynch-buffer (current-buffer)))))
212 (if (null asynch-buffer)
213 ;; We do not need to do anything, it was a mailto or something
214 ;; similar that takes processing completely outside of the URL
215 ;; package.
216 nil
217 (let ((proc (get-buffer-process asynch-buffer)))
218 ;; If the access method was synchronous, `retrieval-done' should
219 ;; hopefully already be set to t. If it is nil, and `proc' is also
220 ;; nil, it implies that the async process is not running in
221 ;; asynch-buffer. This happens e.g. for FTP files. In such a case
222 ;; url-file.el should probably set something like a `url-process'
223 ;; buffer-local variable so we can find the exact process that we
224 ;; should be waiting for. In the mean time, we'll just wait for any
225 ;; process output.
226 (while (not retrieval-done)
227 (url-debug 'retrieval
228 "Spinning in url-retrieve-synchronously: %S (%S)"
229 retrieval-done asynch-buffer)
230 (if (buffer-local-value 'url-redirect-buffer asynch-buffer)
231 (setq proc (get-buffer-process
232 (setq asynch-buffer
233 (buffer-local-value 'url-redirect-buffer
234 asynch-buffer))))
235 (if (and proc (memq (process-status proc)
236 '(closed exit signal failed))
237 ;; Make sure another process hasn't been started.
238 (eq proc (or (get-buffer-process asynch-buffer) proc)))
239 ;; FIXME: It's not clear whether url-retrieve's callback is
240 ;; guaranteed to be called or not. It seems that url-http
241 ;; decides sometimes consciously not to call it, so it's not
242 ;; clear that it's a bug, but even then we need to decide how
243 ;; url-http can then warn us that the download has completed.
244 ;; In the mean time, we use this here workaround.
245 ;; XXX: The callback must always be called. Any
246 ;; exception is a bug that should be fixed, not worked
247 ;; around.
248 (progn ;; Call delete-process so we run any sentinel now.
249 (delete-process proc)
250 (setq retrieval-done t)))
251 ;; We used to use `sit-for' here, but in some cases it wouldn't
252 ;; work because apparently pending keyboard input would always
253 ;; interrupt it before it got a chance to handle process input.
254 ;; `sleep-for' was tried but it lead to other forms of
255 ;; hanging. --Stef
256 (unless (or (with-local-quit
257 (accept-process-output proc))
258 (null proc))
259 ;; accept-process-output returned nil, maybe because the process
260 ;; exited (and may have been replaced with another). If we got
261 ;; a quit, just stop.
262 (when quit-flag
263 (delete-process proc))
264 (setq proc (and (not quit-flag)
265 (get-buffer-process asynch-buffer)))))))
266 asynch-buffer)))
267
268 (defun url-mm-callback (&rest ignored)
269 (let ((handle (mm-dissect-buffer t)))
270 (url-mark-buffer-as-dead (current-buffer))
271 (with-current-buffer
272 (generate-new-buffer (url-recreate-url url-current-object))
273 (if (eq (mm-display-part handle) 'external)
274 (progn
275 (set-process-sentinel
276 ;; Fixme: this shouldn't have to know the form of the
277 ;; undisplayer produced by `mm-display-part'.
278 (get-buffer-process (cdr (mm-handle-undisplayer handle)))
279 `(lambda (proc event)
280 (mm-destroy-parts (quote ,handle))))
281 (message "Viewing externally")
282 (kill-buffer (current-buffer)))
283 (display-buffer (current-buffer))
284 (add-hook 'kill-buffer-hook
285 `(lambda () (mm-destroy-parts ',handle))
286 nil
287 t)))))
288
289 (defun url-mm-url (url)
290 "Retrieve URL and pass to the appropriate viewing application."
291 ;; These requires could advantageously be moved to url-mm-callback or
292 ;; turned into autoloads, but I suspect that it would introduce some bugs
293 ;; because loading those files from a process sentinel or filter may
294 ;; result in some undesirable carner cases.
295 (require 'mm-decode)
296 (require 'mm-view)
297 (url-retrieve url 'url-mm-callback nil))
298
299 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
300 ;;; Miscellaneous
301 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
302 (defvar url-dead-buffer-list nil)
303
304 (defun url-mark-buffer-as-dead (buff)
305 (push buff url-dead-buffer-list))
306
307 (defun url-gc-dead-buffers ()
308 (let ((buff))
309 (while (setq buff (pop url-dead-buffer-list))
310 (if (buffer-live-p buff)
311 (kill-buffer buff)))))
312
313 (cond
314 ((fboundp 'display-warning)
315 (defalias 'url-warn 'display-warning))
316 ((fboundp 'warn)
317 (defun url-warn (class message &optional level)
318 (warn "(%s/%s) %s" class (or level 'warning) message)))
319 (t
320 (defun url-warn (class message &optional level)
321 (with-current-buffer (get-buffer-create "*URL-WARNINGS*")
322 (goto-char (point-max))
323 (save-excursion
324 (insert (format "(%s/%s) %s\n" class (or level 'warning) message)))
325 (display-buffer (current-buffer))))))
326
327 (provide 'url)
328
329 ;; arch-tag: bc182f1f-d187-4f10-b961-47af2066579a
330 ;;; url.el ends here