Skip to content

Commit

Permalink
UnstructuredURLLoader: allow url failures, keep processing (#1954)
Browse files Browse the repository at this point in the history
By default, UnstructuredURLLoader now continues processing remaining
`urls` if encountering an error for a particular url.

If failure of the entire loader is desired as was previously the case,
use `continue_on_failure=False`.

E.g., this fails splendidly, courtesy of the 2nd url:

```
from langchain.document_loaders import UnstructuredURLLoader
urls = [
    "https://www.understandingwar.org/backgrounder/russian-offensive-campaign-assessment-february-8-2023",
    "https://doesnotexistithinkprobablynotverynotlikely.io",
    "https://www.understandingwar.org/backgrounder/russian-offensive-campaign-assessment-february-9-2023",
]
loader = UnstructuredURLLoader(urls=urls, continue_on_failure=False)
data = loader.load()
```

Issue: #1939
  • Loading branch information
cragwolfe committed Mar 27, 2023
1 parent 6598bea commit 71e8eaf
Showing 1 changed file with 12 additions and 2 deletions.
14 changes: 12 additions & 2 deletions langchain/document_loaders/url.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
"""Loader that uses unstructured to load HTML files."""
import logging
from typing import List

from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader

logger = logging.getLogger(__file__)


class UnstructuredURLLoader(BaseLoader):
"""Loader that uses unstructured to load HTML files."""

def __init__(self, urls: List[str]):
def __init__(self, urls: List[str], continue_on_failure: bool = True):
"""Initialize with file path."""
try:
import unstructured # noqa:F401
Expand All @@ -18,14 +21,21 @@ def __init__(self, urls: List[str]):
"`pip install unstructured`"
)
self.urls = urls
self.continue_on_failure = continue_on_failure

def load(self) -> List[Document]:
"""Load file."""
from unstructured.partition.html import partition_html

docs: List[Document] = list()
for url in self.urls:
elements = partition_html(url=url)
try:
elements = partition_html(url=url)
except Exception as e:
if self.continue_on_failure:
logger.error(f"Error fetching or processing {url}, exeption: {e}")
else:
raise e
text = "\n\n".join([str(el) for el in elements])
metadata = {"source": url}
docs.append(Document(page_content=text, metadata=metadata))
Expand Down

0 comments on commit 71e8eaf

Please sign in to comment.