Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Monitor][Ingestion] Add check for logs input type #32591

Merged
merged 1 commit into from Oct 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 2 additions & 0 deletions sdk/monitor/azure-monitor-ingestion/CHANGELOG.md
Expand Up @@ -10,6 +10,8 @@

### Other Changes

- Add type validation for the `logs` parameter in the `upload` method. ([#32591](https://github.com/Azure/azure-sdk-for-python/pull/32591/))

## 1.0.2 (2023-06-15)

### Bugs Fixed
Expand Down
2 changes: 1 addition & 1 deletion sdk/monitor/azure-monitor-ingestion/TROUBLESHOOTING.md
Expand Up @@ -90,7 +90,7 @@ When you send logs to Azure Monitor for ingestion, the request may succeed, but

* Verify that the custom table specified in the DCR exists in the Log Analytics workspace. Ensure that you provide the accurate name of the custom table to the upload method. Mismatched table names can lead to logs not being stored correctly.

* Confirm that the logs you're sending adhere to the format expected by the DCR. The data should be in the form of a JSON object or array, structured according to the requirements specified in the DCR. Additionally, it's essential to encode the request body in UTF-8 to avoid any data transmission issues.
* Confirm that the logs you're sending adhere to the format expected by the DCR. The data should be an array of JSON objects, structured according to the requirements specified in the DCR. Additionally, it's essential to encode the request body in UTF-8 to avoid any data transmission issues.

* Keep in mind that data ingestion may take some time, especially if you're sending data to a specific table for the first time. In such cases, allow up to 15 minutes for the data to be fully ingested and available for querying and analysis.

Expand Down
Expand Up @@ -6,6 +6,7 @@

Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
from collections.abc import Sequence
from io import IOBase
import logging
import sys
Expand Down Expand Up @@ -67,6 +68,9 @@ def upload(
super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs)
return

if not isinstance(logs, Sequence):
raise ValueError("The 'logs' parameter must be a list of JSON objects or an I/O stream that is readable.")

for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)):
try:
super()._upload( # type: ignore
Expand Down
Expand Up @@ -6,6 +6,7 @@

Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
from collections.abc import Sequence
from io import IOBase
import logging
import sys
Expand Down Expand Up @@ -67,6 +68,9 @@ async def upload(
await super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs)
return

if not isinstance(logs, Sequence):
raise ValueError("The 'logs' parameter must be a list of JSON objects or an I/O stream that is readable.")
xiangyan99 marked this conversation as resolved.
Show resolved Hide resolved

for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)):
try:
await super()._upload( # type: ignore
Expand Down
Expand Up @@ -100,7 +100,6 @@ def test_send_logs_gzip_file(self, monitor_info):
client.upload(rule_id=monitor_info['dcr_id'], stream_name=monitor_info['stream_name'], logs=f)
os.remove(temp_file)


def test_abort_error_handler(self, monitor_info):
client = self.create_client_from_credential(
LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce'])
Expand Down Expand Up @@ -140,3 +139,11 @@ def on_error(e):
on_error=on_error)

assert on_error.called

def test_invalid_logs_format(self, monitor_info):
client = self.create_client_from_credential(
LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce'])

body = {"foo": "bar"}
with pytest.raises(ValueError):
client.upload(rule_id="rule", stream_name="stream", logs=body)
Expand Up @@ -122,13 +122,11 @@ async def test_send_logs_gzip_file(self, monitor_info):
os.remove(temp_file)
await credential.close()


@pytest.mark.asyncio
async def test_abort_error_handler(self, monitor_info):
credential = self.get_credential(LogsIngestionClient, is_async=True)
client = self.create_client_from_credential(
LogsIngestionClient, credential, endpoint=monitor_info['dce'])
body = [{"foo": "bar"}]

class TestException(Exception):
pass
Expand Down Expand Up @@ -166,3 +164,14 @@ async def on_error(e):

assert on_error.called
await credential.close()

@pytest.mark.asyncio
async def test_invalid_logs_format(self, monitor_info):
credential = self.get_credential(LogsIngestionClient, is_async=True)
client = self.create_client_from_credential(LogsIngestionClient, credential, endpoint=monitor_info['dce'])

body = {"foo": "bar"}
async with client:
with pytest.raises(ValueError):
await client.upload(rule_id="rule", stream_name="stream", logs=body)
await credential.close()