Skip to content

Commit

Permalink
(tests): fixed the tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Rambatino committed Feb 14, 2024
1 parent e697135 commit f57c5d6
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 15 deletions.
10 changes: 5 additions & 5 deletions axiom/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def query_legacy(
self.logger.debug(f"query result: {result}")
query_id = res.headers.get("X-Axiom-History-Query-Id")
self.logger.info(f"received query result with query_id: {query_id}")
result.saved_query_id = query_id
result.query_id = query_id
return result

def apl_query(
Expand All @@ -276,16 +276,16 @@ def query(
res = self.session.post(path, data=payload, params=params)
result = Util.from_dict(
(
TabularQueryResult
if opts.format == AplResultFormat.Tabular
else LegacyQueryResult
LegacyQueryResult
if opts is None or opts.format == AplResultFormat.Legacy
else TabularQueryResult
),
res.json(),
)
self.logger.debug(f"apl query result: {result}")
query_id = res.headers.get("X-Axiom-History-Query-Id")
self.logger.info(f"received query result with query_id: {query_id}")
result.saved_query_id = query_id
result.query_id = query_id
return result

def df(
Expand Down
14 changes: 7 additions & 7 deletions axiom/query/result.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,10 +145,10 @@ class QueryLegacyResult:
matches: List[Entry]
# Buckets are the time series buckets.
buckets: Timeseries
# save_query_id is the ID of the query that generated this result when it
# query_id is the ID of the query that generated this result when it
# was saved on the server. This is only set when the query was sent with
# the `save_as_kind` option specified.
save_query_id: Optional[str] = field(default=None)
query_id: Optional[str] = field(default=None)


@dataclass
Expand All @@ -164,10 +164,10 @@ class LegacyQueryResult:
buckets: Timeseries
# Dataset names are the datasets that were used in the apl query.
dataset_names: List[str] = field(default_factory=lambda: [])
# save_query_id is the ID of the apl query that generated this result when it
# query_id is the ID of the apl query that generated this result when it
# was saved on the server. This is only set when the apl query was sent with
# the `save_as_kind` option specified.
save_query_id: Optional[str] = field(default=None)
query_id: Optional[str] = field(default=None)


@dataclass
Expand Down Expand Up @@ -294,14 +294,14 @@ class TabularQueryResult:
request (Request): The request that generated this result.
dataset_names (List[str]): The names of datasets included in the result.
fields_meta_map (Dict[str, List[Any]]): Metadata for the fields in the result.
save_query_id (Optional[str]): The ID of the saved query that generated this result, if applicable.
query_id (Optional[str]): The ID of the saved query that generated this result, if applicable.
"""

format: str
status: QueryStatus
tables: List[Table]
request: Request
# save_query_id is the ID of the query that generated this result when it
# query_id is the ID of the query that generated this result when it
# was saved on the server. This is only set when the query was sent with
# the `save_as_kind` option specified.
save_query_id: Optional[str] = field(default=None)
query_id: Optional[str] = field(default=None)
2 changes: 1 addition & 1 deletion tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def test_step004_query(self):
)
qr = self.client.query_legacy(self.dataset_name, q, opts)

self.assertIsNotNone(qr.save_query_id)
self.assertIsNotNone(qr.query_id)
self.assertEqual(len(qr.matches), len(self.events))

def test_step005_apl_query(self):
Expand Down
4 changes: 2 additions & 2 deletions tests/test_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,14 @@ def test_log(self):

# this log shouldn't be ingested yet
res = client.apl_query(dataset_name)
self.assertEqual(0, res.status.rowsExamined)
self.assertEqual(0, res.status.rows_examined)

# flush events
axiom_handler.flush()

# now we should have a log
res = client.apl_query(dataset_name)
self.assertEqual(1, res.status.rowsExamined)
self.assertEqual(1, res.status.rows_examined)

# cleanup created dataset
client.datasets.delete(dataset_name)

0 comments on commit f57c5d6

Please sign in to comment.