fix linter
This commit is contained in:
parent
41cd9b5e2a
commit
4899148310
5 changed files with 19 additions and 9 deletions
|
|
@ -79,7 +79,9 @@ class BingSerpConverter(DocumentConverter):
|
|||
slug.extract()
|
||||
|
||||
# Parse the algorithmic results
|
||||
_markdownify = _CustomMarkdownify(keep_data_uris=kwargs.get("keep_data_uris", False))
|
||||
_markdownify = _CustomMarkdownify(
|
||||
keep_data_uris=kwargs.get("keep_data_uris", False)
|
||||
)
|
||||
results = list()
|
||||
for result in soup.find_all(class_="b_algo"):
|
||||
if not hasattr(result, "find_all"):
|
||||
|
|
|
|||
|
|
@ -74,5 +74,5 @@ class DocxConverter(HtmlConverter):
|
|||
style_map = kwargs.get("style_map", None)
|
||||
return self._html_converter.convert_string(
|
||||
mammoth.convert_to_html(file_stream, style_map=style_map).value,
|
||||
keep_data_uris=kwargs.get("keep_data_uris", False)
|
||||
keep_data_uris=kwargs.get("keep_data_uris", False),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -57,9 +57,13 @@ class HtmlConverter(DocumentConverter):
|
|||
webpage_text = ""
|
||||
keep_data_uris = kwargs.get("keep_data_uris", False)
|
||||
if body_elm:
|
||||
webpage_text = _CustomMarkdownify(keep_data_uris=keep_data_uris).convert_soup(body_elm)
|
||||
webpage_text = _CustomMarkdownify(
|
||||
keep_data_uris=keep_data_uris
|
||||
).convert_soup(body_elm)
|
||||
else:
|
||||
webpage_text = _CustomMarkdownify(keep_data_uris=keep_data_uris).convert_soup(soup)
|
||||
webpage_text = _CustomMarkdownify(
|
||||
keep_data_uris=keep_data_uris
|
||||
).convert_soup(soup)
|
||||
|
||||
assert isinstance(webpage_text, str)
|
||||
|
||||
|
|
|
|||
|
|
@ -171,7 +171,9 @@ class RssConverter(DocumentConverter):
|
|||
try:
|
||||
# using bs4 because many RSS feeds have HTML-styled content
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
return _CustomMarkdownify(keep_data_uris=self._kwargs.get("keep_data_uris", False)).convert_soup(soup)
|
||||
return _CustomMarkdownify(
|
||||
keep_data_uris=self._kwargs.get("keep_data_uris", False)
|
||||
).convert_soup(soup)
|
||||
except BaseException as _:
|
||||
return content
|
||||
|
||||
|
|
|
|||
|
|
@ -76,11 +76,13 @@ class WikipediaConverter(DocumentConverter):
|
|||
main_title = title_elm.string
|
||||
|
||||
# Convert the page
|
||||
webpage_text = f"# {main_title}\n\n" + _CustomMarkdownify(keep_data_uris=kwargs.get("keep_data_uris", False)).convert_soup(
|
||||
body_elm
|
||||
)
|
||||
webpage_text = f"# {main_title}\n\n" + _CustomMarkdownify(
|
||||
keep_data_uris=kwargs.get("keep_data_uris", False)
|
||||
).convert_soup(body_elm)
|
||||
else:
|
||||
webpage_text = _CustomMarkdownify(keep_data_uris=kwargs.get("keep_data_uris", False)).convert_soup(soup)
|
||||
webpage_text = _CustomMarkdownify(
|
||||
keep_data_uris=kwargs.get("keep_data_uris", False)
|
||||
).convert_soup(soup)
|
||||
|
||||
return DocumentConverterResult(
|
||||
markdown=webpage_text,
|
||||
|
|
|
|||
Loading…
Reference in a new issue