api.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277
  1. # The public API for feedparser
  2. # Copyright 2010-2023 Kurt McKee <contactme@kurtmckee.org>
  3. # Copyright 2002-2008 Mark Pilgrim
  4. # All rights reserved.
  5. #
  6. # This file is a part of feedparser.
  7. #
  8. # Redistribution and use in source and binary forms, with or without modification,
  9. # are permitted provided that the following conditions are met:
  10. #
  11. # * Redistributions of source code must retain the above copyright notice,
  12. # this list of conditions and the following disclaimer.
  13. # * Redistributions in binary form must reproduce the above copyright notice,
  14. # this list of conditions and the following disclaimer in the documentation
  15. # and/or other materials provided with the distribution.
  16. #
  17. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
  18. # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  19. # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  20. # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
  21. # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  22. # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  23. # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
  24. # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
  25. # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  26. # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  27. # POSSIBILITY OF SUCH DAMAGE.
  28. import io
  29. import urllib.error
  30. import urllib.parse
  31. import xml.sax
  32. from .datetimes import registerDateHandler, _parse_date
  33. from .encodings import convert_to_utf8
  34. from .exceptions import *
  35. from .html import _BaseHTMLProcessor
  36. from . import http
  37. from . import mixin
  38. from .mixin import _FeedParserMixin
  39. from .parsers.loose import _LooseFeedParser
  40. from .parsers.strict import _StrictFeedParser
  41. from .sanitizer import replace_doctype
  42. from .sgml import *
  43. from .urls import convert_to_idn, make_safe_absolute_uri
  44. from .util import FeedParserDict
  45. # List of preferred XML parsers, by SAX driver name. These will be tried first,
  46. # but if they're not installed, Python will keep searching through its own list
  47. # of pre-installed parsers until it finds one that supports everything we need.
  48. PREFERRED_XML_PARSERS = ["drv_libxml2"]
  49. _XML_AVAILABLE = True
  50. SUPPORTED_VERSIONS = {
  51. '': 'unknown',
  52. 'rss090': 'RSS 0.90',
  53. 'rss091n': 'RSS 0.91 (Netscape)',
  54. 'rss091u': 'RSS 0.91 (Userland)',
  55. 'rss092': 'RSS 0.92',
  56. 'rss093': 'RSS 0.93',
  57. 'rss094': 'RSS 0.94',
  58. 'rss20': 'RSS 2.0',
  59. 'rss10': 'RSS 1.0',
  60. 'rss': 'RSS (unknown version)',
  61. 'atom01': 'Atom 0.1',
  62. 'atom02': 'Atom 0.2',
  63. 'atom03': 'Atom 0.3',
  64. 'atom10': 'Atom 1.0',
  65. 'atom': 'Atom (unknown version)',
  66. 'cdf': 'CDF',
  67. }
  68. def _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result):
  69. """URL, filename, or string --> stream
  70. This function lets you define parsers that take any input source
  71. (URL, pathname to local or network file, or actual data as a string)
  72. and deal with it in a uniform manner. Returned object is guaranteed
  73. to have all the basic stdio read methods (read, readline, readlines).
  74. Just .close() the object when you're done with it.
  75. If the etag argument is supplied, it will be used as the value of an
  76. If-None-Match request header.
  77. If the modified argument is supplied, it can be a tuple of 9 integers
  78. (as returned by gmtime() in the standard Python time module) or a date
  79. string in any format supported by feedparser. Regardless, it MUST
  80. be in GMT (Greenwich Mean Time). It will be reformatted into an
  81. RFC 1123-compliant date and used as the value of an If-Modified-Since
  82. request header.
  83. If the agent argument is supplied, it will be used as the value of a
  84. User-Agent request header.
  85. If the referrer argument is supplied, it will be used as the value of a
  86. Referer[sic] request header.
  87. If handlers is supplied, it is a list of handlers used to build a
  88. urllib2 opener.
  89. if request_headers is supplied it is a dictionary of HTTP request headers
  90. that will override the values generated by FeedParser.
  91. :return: A bytes object.
  92. """
  93. if hasattr(url_file_stream_or_string, 'read'):
  94. return url_file_stream_or_string.read()
  95. if isinstance(url_file_stream_or_string, str) \
  96. and urllib.parse.urlparse(url_file_stream_or_string)[0] in ('http', 'https', 'ftp', 'file', 'feed'):
  97. return http.get(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result)
  98. # try to open with native open function (if url_file_stream_or_string is a filename)
  99. try:
  100. with open(url_file_stream_or_string, 'rb') as f:
  101. data = f.read()
  102. except (IOError, UnicodeEncodeError, TypeError, ValueError):
  103. # if url_file_stream_or_string is a str object that
  104. # cannot be converted to the encoding returned by
  105. # sys.getfilesystemencoding(), a UnicodeEncodeError
  106. # will be thrown
  107. # If url_file_stream_or_string is a string that contains NULL
  108. # (such as an XML document encoded in UTF-32), TypeError will
  109. # be thrown.
  110. pass
  111. else:
  112. return data
  113. # treat url_file_stream_or_string as string
  114. if not isinstance(url_file_stream_or_string, bytes):
  115. return url_file_stream_or_string.encode('utf-8')
  116. return url_file_stream_or_string
  117. LooseFeedParser = type(
  118. 'LooseFeedParser',
  119. (_LooseFeedParser, _FeedParserMixin, _BaseHTMLProcessor, object),
  120. {},
  121. )
  122. StrictFeedParser = type(
  123. 'StrictFeedParser',
  124. (_StrictFeedParser, _FeedParserMixin, xml.sax.handler.ContentHandler, object),
  125. {},
  126. )
  127. def parse(url_file_stream_or_string, etag=None, modified=None, agent=None, referrer=None, handlers=None, request_headers=None, response_headers=None, resolve_relative_uris=None, sanitize_html=None):
  128. """Parse a feed from a URL, file, stream, or string.
  129. :param url_file_stream_or_string:
  130. File-like object, URL, file path, or string. Both byte and text strings
  131. are accepted. If necessary, encoding will be derived from the response
  132. headers or automatically detected.
  133. Note that strings may trigger network I/O or filesystem access
  134. depending on the value. Wrap an untrusted string in
  135. a :class:`io.StringIO` or :class:`io.BytesIO` to avoid this. Do not
  136. pass untrusted strings to this function.
  137. When a URL is not passed the feed location to use in relative URL
  138. resolution should be passed in the ``Content-Location`` response header
  139. (see ``response_headers`` below).
  140. :param str etag: HTTP ``ETag`` request header.
  141. :param modified: HTTP ``Last-Modified`` request header.
  142. :type modified: :class:`str`, :class:`time.struct_time` 9-tuple, or
  143. :class:`datetime.datetime`
  144. :param str agent: HTTP ``User-Agent`` request header, which defaults to
  145. the value of :data:`feedparser.USER_AGENT`.
  146. :param referrer: HTTP ``Referer`` [sic] request header.
  147. :param request_headers:
  148. A mapping of HTTP header name to HTTP header value to add to the
  149. request, overriding internally generated values.
  150. :type request_headers: :class:`dict` mapping :class:`str` to :class:`str`
  151. :param response_headers:
  152. A mapping of HTTP header name to HTTP header value. Multiple values may
  153. be joined with a comma. If a HTTP request was made, these headers
  154. override any matching headers in the response. Otherwise this specifies
  155. the entirety of the response headers.
  156. :type response_headers: :class:`dict` mapping :class:`str` to :class:`str`
  157. :param bool resolve_relative_uris:
  158. Should feedparser attempt to resolve relative URIs absolute ones within
  159. HTML content? Defaults to the value of
  160. :data:`feedparser.RESOLVE_RELATIVE_URIS`, which is ``True``.
  161. :param bool sanitize_html:
  162. Should feedparser skip HTML sanitization? Only disable this if you know
  163. what you are doing! Defaults to the value of
  164. :data:`feedparser.SANITIZE_HTML`, which is ``True``.
  165. :return: A :class:`FeedParserDict`.
  166. """
  167. if not agent or sanitize_html is None or resolve_relative_uris is None:
  168. import feedparser
  169. if not agent:
  170. agent = feedparser.USER_AGENT
  171. if sanitize_html is None:
  172. sanitize_html = feedparser.SANITIZE_HTML
  173. if resolve_relative_uris is None:
  174. resolve_relative_uris = feedparser.RESOLVE_RELATIVE_URIS
  175. result = FeedParserDict(
  176. bozo=False,
  177. entries=[],
  178. feed=FeedParserDict(),
  179. headers={},
  180. )
  181. try:
  182. data = _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result)
  183. except urllib.error.URLError as error:
  184. result.update({
  185. 'bozo': True,
  186. 'bozo_exception': error,
  187. })
  188. return result
  189. if not data:
  190. return result
  191. # overwrite existing headers using response_headers
  192. result['headers'].update(response_headers or {})
  193. data = convert_to_utf8(result['headers'], data, result)
  194. use_strict_parser = result['encoding'] and True or False
  195. result['version'], data, entities = replace_doctype(data)
  196. # Ensure that baseuri is an absolute URI using an acceptable URI scheme.
  197. contentloc = result['headers'].get('content-location', '')
  198. href = result.get('href', '')
  199. baseuri = make_safe_absolute_uri(href, contentloc) or make_safe_absolute_uri(contentloc) or href
  200. baselang = result['headers'].get('content-language', None)
  201. if isinstance(baselang, bytes) and baselang is not None:
  202. baselang = baselang.decode('utf-8', 'ignore')
  203. if not _XML_AVAILABLE:
  204. use_strict_parser = 0
  205. if use_strict_parser:
  206. # initialize the SAX parser
  207. feedparser = StrictFeedParser(baseuri, baselang, 'utf-8')
  208. feedparser.resolve_relative_uris = resolve_relative_uris
  209. feedparser.sanitize_html = sanitize_html
  210. saxparser = xml.sax.make_parser(PREFERRED_XML_PARSERS)
  211. saxparser.setFeature(xml.sax.handler.feature_namespaces, 1)
  212. try:
  213. # disable downloading external doctype references, if possible
  214. saxparser.setFeature(xml.sax.handler.feature_external_ges, 0)
  215. except xml.sax.SAXNotSupportedException:
  216. pass
  217. saxparser.setContentHandler(feedparser)
  218. saxparser.setErrorHandler(feedparser)
  219. source = xml.sax.xmlreader.InputSource()
  220. source.setByteStream(io.BytesIO(data))
  221. try:
  222. saxparser.parse(source)
  223. except xml.sax.SAXException as e:
  224. result['bozo'] = 1
  225. result['bozo_exception'] = feedparser.exc or e
  226. use_strict_parser = 0
  227. if not use_strict_parser:
  228. feedparser = LooseFeedParser(baseuri, baselang, 'utf-8', entities)
  229. feedparser.resolve_relative_uris = resolve_relative_uris
  230. feedparser.sanitize_html = sanitize_html
  231. feedparser.feed(data.decode('utf-8', 'replace'))
  232. result['feed'] = feedparser.feeddata
  233. result['entries'] = feedparser.entries
  234. result['version'] = result['version'] or feedparser.version
  235. result['namespaces'] = feedparser.namespaces_in_use
  236. return result