diff options
Diffstat (limited to 'lisp/url/url-http.el')
-rw-r--r-- | lisp/url/url-http.el | 208 |
1 files changed, 117 insertions, 91 deletions
diff --git a/lisp/url/url-http.el b/lisp/url/url-http.el index 16c3a6a1e62..4e5d017036c 100644 --- a/lisp/url/url-http.el +++ b/lisp/url/url-http.el @@ -36,6 +36,7 @@ (defvar url-current-object) (defvar url-http-after-change-function) (defvar url-http-chunked-counter) +(defvar url-http-chunked-last-crlf-missing) (defvar url-http-chunked-length) (defvar url-http-chunked-start) (defvar url-http-connection-opened) @@ -332,7 +333,10 @@ Use `url-http-referer' as the Referer-header (subject to `url-privacy-level')." (if (and using-proxy ;; Bug#35969. (not (equal "https" (url-type url-http-target-url)))) - (url-recreate-url url-http-target-url) real-fname)) + (let ((url (copy-sequence url-http-target-url))) + (setf (url-host url) (puny-encode-domain (url-host url))) + (url-recreate-url url)) + real-fname)) " HTTP/" url-http-version "\r\n" ;; Version of MIME we speak "MIME-Version: 1.0\r\n" @@ -585,6 +589,13 @@ should be shown to the user." (url-http-debug "url-http-parse-headers called in (%s)" (buffer-name)) (url-http-parse-response) (mail-narrow-to-head) + (when url-debug + (save-excursion + (goto-char (point-min)) + (while (not (eobp)) + (url-http-debug "Response: %s" + (buffer-substring (point) (line-end-position))) + (forward-line 1)))) ;;(narrow-to-region (point-min) url-http-end-of-headers) (let ((connection (mail-fetch-field "Connection"))) ;; In HTTP 1.0, keep the connection only if there is a @@ -1068,90 +1079,105 @@ the callback to be triggered." Cannot give a sophisticated percentage, but we need a different function to look for the special 0-length chunk that signifies the end of the document." - (save-excursion - (goto-char st) - (let ((read-next-chunk t) - (case-fold-search t) - (regexp nil) - (no-initial-crlf nil)) - ;; We need to loop thru looking for more chunks even within - ;; one after-change-function call. - (while read-next-chunk - (setq no-initial-crlf (= 0 url-http-chunked-counter)) - (if url-http-content-type + (if url-http-chunked-last-crlf-missing + (progn + (goto-char url-http-chunked-last-crlf-missing) + (if (not (looking-at "\r\n")) + (url-http-debug + "Still spinning for the terminator of last chunk...") + (url-http-debug "Saw the last CRLF.") + (delete-region (match-beginning 0) (match-end 0)) + (when (url-http-parse-headers) + (url-http-activate-callback)))) + (save-excursion + (goto-char st) + (let ((read-next-chunk t) + (case-fold-search t) + (regexp nil) + (no-initial-crlf nil)) + ;; We need to loop thru looking for more chunks even within + ;; one after-change-function call. + (while read-next-chunk + (setq no-initial-crlf (= 0 url-http-chunked-counter)) + (if url-http-content-type + (url-display-percentage nil + "Reading [%s]... chunk #%d" + url-http-content-type url-http-chunked-counter) (url-display-percentage nil - "Reading [%s]... chunk #%d" - url-http-content-type url-http-chunked-counter) - (url-display-percentage nil - "Reading... chunk #%d" - url-http-chunked-counter)) - (url-http-debug "Reading chunk %d (%d %d %d)" - url-http-chunked-counter st nd length) - (setq regexp (if no-initial-crlf - "\\([0-9a-z]+\\).*\r?\n" - "\r?\n\\([0-9a-z]+\\).*\r?\n")) - - (if url-http-chunked-start - ;; We know how long the chunk is supposed to be, skip over - ;; leading crap if possible. - (if (> nd (+ url-http-chunked-start url-http-chunked-length)) - (progn - (url-http-debug "Got to the end of chunk #%d!" - url-http-chunked-counter) - (goto-char (+ url-http-chunked-start - url-http-chunked-length))) - (url-http-debug "Still need %d bytes to hit end of chunk" - (- (+ url-http-chunked-start - url-http-chunked-length) - nd)) - (setq read-next-chunk nil))) - (if (not read-next-chunk) - (url-http-debug "Still spinning for next chunk...") - (if no-initial-crlf (skip-chars-forward "\r\n")) - (if (not (looking-at regexp)) - (progn - ;; Must not have received the entirety of the chunk header, - ;; need to spin some more. - (url-http-debug "Did not see start of chunk @ %d!" (point)) - (setq read-next-chunk nil)) - ;; The data we got may have started in the middle of the - ;; initial chunk header, so move back to the start of the - ;; line and re-compute. - (when (= url-http-chunked-counter 0) - (beginning-of-line) - (looking-at regexp)) - (add-text-properties (match-beginning 0) (match-end 0) - (list 'chunked-encoding t - 'face 'cursor - 'invisible t)) - (setq url-http-chunked-length (string-to-number (buffer-substring - (match-beginning 1) - (match-end 1)) - 16) - url-http-chunked-counter (1+ url-http-chunked-counter) - url-http-chunked-start (set-marker - (or url-http-chunked-start - (make-marker)) - (match-end 0))) - (delete-region (match-beginning 0) (match-end 0)) - (url-http-debug "Saw start of chunk %d (length=%d, start=%d" - url-http-chunked-counter url-http-chunked-length - (marker-position url-http-chunked-start)) - (if (= 0 url-http-chunked-length) - (progn - ;; Found the end of the document! Wheee! - (url-http-debug "Saw end of stream chunk!") - (setq read-next-chunk nil) - (url-display-percentage nil nil) - ;; Every chunk, even the last 0-length one, is - ;; terminated by CRLF. Skip it. - (when (looking-at "\r?\n") - (url-http-debug "Removing terminator of last chunk") - (delete-region (match-beginning 0) (match-end 0))) - (if (re-search-forward "^\r?\n" nil t) - (url-http-debug "Saw end of trailers...")) - (if (url-http-parse-headers) - (url-http-activate-callback)))))))))) + "Reading... chunk #%d" + url-http-chunked-counter)) + (url-http-debug "Reading chunk %d (%d %d %d)" + url-http-chunked-counter st nd length) + (setq regexp (if no-initial-crlf + "\\([0-9a-z]+\\).*\r?\n" + "\r?\n\\([0-9a-z]+\\).*\r?\n")) + + (if url-http-chunked-start + ;; We know how long the chunk is supposed to be, skip over + ;; leading crap if possible. + (if (> nd (+ url-http-chunked-start url-http-chunked-length)) + (progn + (url-http-debug "Got to the end of chunk #%d!" + url-http-chunked-counter) + (goto-char (+ url-http-chunked-start + url-http-chunked-length))) + (url-http-debug "Still need %d bytes to hit end of chunk" + (- (+ url-http-chunked-start + url-http-chunked-length) + nd)) + (setq read-next-chunk nil))) + (if (not read-next-chunk) + (url-http-debug "Still spinning for next chunk...") + (if no-initial-crlf (skip-chars-forward "\r\n")) + (if (not (looking-at regexp)) + (progn + ;; Must not have received the entirety of the chunk header, + ;; need to spin some more. + (url-http-debug "Did not see start of chunk @ %d!" (point)) + (setq read-next-chunk nil)) + ;; The data we got may have started in the middle of the + ;; initial chunk header, so move back to the start of the + ;; line and re-compute. + (when (= url-http-chunked-counter 0) + (beginning-of-line) + (looking-at regexp)) + (add-text-properties (match-beginning 0) (match-end 0) + (list 'chunked-encoding t + 'face 'cursor + 'invisible t)) + (setq url-http-chunked-length + (string-to-number (buffer-substring (match-beginning 1) + (match-end 1)) + 16) + url-http-chunked-counter (1+ url-http-chunked-counter) + url-http-chunked-start (set-marker + (or url-http-chunked-start + (make-marker)) + (match-end 0))) + (delete-region (match-beginning 0) (match-end 0)) + (url-http-debug "Saw start of chunk %d (length=%d, start=%d" + url-http-chunked-counter url-http-chunked-length + (marker-position url-http-chunked-start)) + (if (= 0 url-http-chunked-length) + (progn + ;; Found the end of the document! Wheee! + (url-http-debug "Saw end of stream chunk!") + (setq read-next-chunk nil) + (url-display-percentage nil nil) + ;; Every chunk, even the last 0-length one, is + ;; terminated by CRLF. Skip it. + (if (not (looking-at "\r?\n")) + (progn + (url-http-debug + "Spinning for the terminator of last chunk...") + (setq url-http-chunked-last-crlf-missing + (point))) + (url-http-debug "Removing terminator of last chunk") + (delete-region (match-beginning 0) (match-end 0)) + (when (re-search-forward "^\r?\n" nil t) + (url-http-debug "Saw end of trailers...")) + (when (url-http-parse-headers) + (url-http-activate-callback)))))))))))) (defun url-http-wait-for-headers-change-function (_st nd _length) ;; This will wait for the headers to arrive and then splice in the @@ -1304,9 +1330,7 @@ The return value of this function is the retrieval buffer." (cl-check-type url url "Need a pre-parsed URL.") (let* (;; (host (url-host (or url-using-proxy url))) ;; (port (url-port (or url-using-proxy url))) - (nsm-noninteractive (or url-request-noninteractive - (and (boundp 'url-http-noninteractive) - url-http-noninteractive))) + (nsm-noninteractive (not (url-interactive-p))) ;; The following binding is needed in url-open-stream, which ;; is called from url-http-find-free-connection. (url-current-object url) @@ -1337,6 +1361,7 @@ The return value of this function is the retrieval buffer." url-http-after-change-function url-http-response-version url-http-response-status + url-http-chunked-last-crlf-missing url-http-chunked-length url-http-chunked-counter url-http-chunked-start @@ -1361,6 +1386,7 @@ The return value of this function is the retrieval buffer." url-http-noninteractive url-request-noninteractive url-http-data url-request-data url-http-process connection + url-http-chunked-last-crlf-missing nil url-http-chunked-length nil url-http-chunked-start nil url-http-chunked-counter 0 @@ -1407,10 +1433,10 @@ The return value of this function is the retrieval buffer." (and proxy-auth (concat "Proxy-Authorization: " proxy-auth "\r\n"))) "\r\n") - (url-host url-current-object) + (puny-encode-domain (url-host url-current-object)) (or (url-port url-current-object) url-https-default-port) - (url-host url-current-object)))) + (puny-encode-domain (url-host url-current-object))))) (defun url-https-proxy-after-change-function (_st _nd _length) (let* ((process-buffer (current-buffer)) @@ -1432,12 +1458,12 @@ The return value of this function is the retrieval buffer." (condition-case e (let ((tls-connection (gnutls-negotiate :process proc - :hostname (url-host url-current-object) + :hostname (puny-encode-domain (url-host url-current-object)) :verify-error nil))) ;; check certificate validity (setq tls-connection (nsm-verify-connection tls-connection - (url-host url-current-object) + (puny-encode-domain (url-host url-current-object)) (url-port url-current-object))) (with-current-buffer process-buffer (erase-buffer)) (set-process-buffer tls-connection process-buffer) |