Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 0 additions & 61 deletions arango/collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -623,67 +623,6 @@ def response_handler(resp: Response) -> Cursor:

return self._execute(request, response_handler)

def export(
self,
limit: Optional[int] = None,
count: bool = False,
batch_size: Optional[int] = None,
flush: bool = False,
flush_wait: Optional[int] = None,
ttl: Optional[Number] = None,
filter_fields: Optional[Sequence[str]] = None,
filter_type: str = "include",
) -> Result[Cursor]:
"""Export all documents in the collection using a server cursor.

:param flush: If set to True, flush the write-ahead log prior to the
export. If set to False, documents in the write-ahead log during
the export are not included in the result.
:type flush: bool
:param flush_wait: Max wait time in seconds for write-ahead log flush.
:type flush_wait: int | None
:param count: Include the document count in the server cursor.
:type count: bool
:param batch_size: Max number of documents in the batch fetched by
the cursor in one round trip.
:type batch_size: int | None
:param limit: Max number of documents fetched by the cursor.
:type limit: int | None
:param ttl: Time-to-live for the cursor on the server.
:type ttl: int | float | None
:param filter_fields: Document fields to filter with.
:type filter_fields: [str] | None
:param filter_type: Allowed values are "include" or "exclude".
:type filter_type: str
:return: Document cursor.
:rtype: arango.cursor.Cursor
:raise arango.exceptions.DocumentGetError: If export fails.
"""
data: Json = {"count": count, "flush": flush}
if flush_wait is not None:
data["flushWait"] = flush_wait
if batch_size is not None:
data["batchSize"] = batch_size
if limit is not None:
data["limit"] = limit
if ttl is not None:
data["ttl"] = ttl
if filter_fields is not None:
data["restrict"] = {"fields": filter_fields, "type": filter_type}
request = Request(
method="post",
endpoint="/_api/export",
params={"collection": self.name},
data=data,
)

def response_handler(resp: Response) -> Cursor:
if not resp.is_success:
raise DocumentGetError(resp, request)
return Cursor(self._conn, resp.body, "export")

return self._execute(request, response_handler)

def find(
self, filters: Json, skip: Optional[int] = None, limit: Optional[int] = None
) -> Result[Cursor]:
Expand Down
1 change: 0 additions & 1 deletion docs/simple.rst
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ Here is an example of using ArangoDB's **simply queries**:
Here are all simple query (and other utility) methods available:

* :func:`arango.collection.Collection.all`
* :func:`arango.collection.Collection.export`
* :func:`arango.collection.Collection.find`
* :func:`arango.collection.Collection.find_near`
* :func:`arango.collection.Collection.find_in_range`
Expand Down
72 changes: 0 additions & 72 deletions tests/test_document.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import pytest

from arango.exceptions import (
CursorCloseError,
CursorNextError,
DocumentCountError,
DocumentDeleteError,
DocumentGetError,
Expand Down Expand Up @@ -1676,76 +1674,6 @@ def test_document_keys(col, bad_col, docs):
assert err.value.error_code in {11, 1228}


def test_document_export(col, bad_col, docs, cluster):
if cluster:
pytest.skip("Not tested in a cluster setup")

# Set up test documents
col.insert_many(docs)

# Test export with flush set to True and flush_wait set to 1
cursor = col.export(flush=True, flush_wait=1)
assert clean_doc(cursor) == docs
assert cursor.type == "export"

# Test export with count
cursor = col.export(flush=False, count=True)
assert cursor.count() == len(docs)
assert clean_doc(cursor) == docs

# Test export with batch size
cursor = col.export(flush=False, count=True, batch_size=1)
assert cursor.count() == len(docs)
assert clean_doc(cursor) == docs

# Test export with time-to-live
cursor = col.export(flush=False, count=True, ttl=10)
assert cursor.count() == len(docs)
assert clean_doc(cursor) == docs

# Test export with filters
cursor = col.export(
count=True, flush=False, filter_fields=["text"], filter_type="exclude"
)
assert cursor.count() == len(docs)
assert all(["text" not in d for d in cursor])

# Test export with a limit of 0
cursor = col.export(flush=False, count=True, limit=0)
assert cursor.count() == 0
assert clean_doc(cursor) == []

# Test export with a limit of 1
cursor = col.export(flush=False, count=True, limit=1)
assert cursor.count() == 1
assert len(list(cursor)) == 1
all([clean_doc(d) in docs for d in cursor])

# Test export with a limit of 3
cursor = col.export(flush=False, count=True, limit=3)
assert cursor.count() == 3
assert len(list(cursor)) == 3
all([clean_doc(d) in docs for d in cursor])

# Test export with bad database
with assert_raises(DocumentGetError):
bad_col.export()

# Test closing export cursor
cursor = col.export(flush=False, count=True, batch_size=1)
assert cursor.close(ignore_missing=False) is True
assert cursor.close(ignore_missing=True) is False

assert clean_doc(cursor.next()) in docs
with assert_raises(CursorNextError):
cursor.next()
with assert_raises(CursorCloseError):
cursor.close(ignore_missing=False)

cursor = col.export(flush=False, count=True)
assert cursor.close(ignore_missing=True) is None


def test_document_random(col, bad_col, docs):
# Set up test documents
col.import_bulk(docs)
Expand Down