Skip to content

Commit

Permalink
Improve logs when Docker is not running (#3711)
Browse files Browse the repository at this point in the history
  • Loading branch information
bblommers committed Aug 28, 2021
1 parent 31e10e9 commit 027d05e
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 25 deletions.
39 changes: 20 additions & 19 deletions moto/awslambda/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -569,9 +569,9 @@ def _invoke_lambda(self, code, event=None, context=None):

container = exit_code = None
log_config = docker.types.LogConfig(type=docker.types.LogConfig.types.JSON)

with _DockerDataVolumeContext(self) as data_vol:
try:
self.docker_client.ping() # Verify Docker is running
run_kwargs = (
dict(links={"motoserver": "motoserver"})
if settings.TEST_SERVER_MODE
Expand Down Expand Up @@ -608,23 +608,7 @@ def _invoke_lambda(self, code, event=None, context=None):

output = output.decode("utf-8")

# Send output to "logs" backend
invoke_id = uuid.uuid4().hex
log_stream_name = "{date.year}/{date.month:02d}/{date.day:02d}/[{version}]{invoke_id}".format(
date=datetime.datetime.utcnow(),
version=self.version,
invoke_id=invoke_id,
)

self.logs_backend.create_log_stream(self.logs_group_name, log_stream_name)

log_events = [
{"timestamp": unix_time_millis(), "message": line}
for line in output.splitlines()
]
self.logs_backend.put_log_events(
self.logs_group_name, log_stream_name, log_events, None
)
self.save_logs(output)

# We only care about the response from the lambda
# Which is the last line of the output, according to https://github.com/lambci/docker-lambda/issues/25
Expand All @@ -636,7 +620,24 @@ def _invoke_lambda(self, code, event=None, context=None):
return resp, invocation_error, logs
except docker.errors.DockerException as e:
# Docker itself is probably not running - there will be no Lambda-logs to handle
return "error running docker: {}".format(e), True, ""
msg = "error running docker: {}".format(e)
self.save_logs(msg)
return msg, True, ""

def save_logs(self, output):
# Send output to "logs" backend
invoke_id = uuid.uuid4().hex
log_stream_name = "{date.year}/{date.month:02d}/{date.day:02d}/[{version}]{invoke_id}".format(
date=datetime.datetime.utcnow(), version=self.version, invoke_id=invoke_id,
)
self.logs_backend.create_log_stream(self.logs_group_name, log_stream_name)
log_events = [
{"timestamp": unix_time_millis(), "message": line}
for line in output.splitlines()
]
self.logs_backend.put_log_events(
self.logs_group_name, log_stream_name, log_events, None
)

def invoke(self, body, request_headers, response_headers):

Expand Down
14 changes: 8 additions & 6 deletions tests/test_awslambda/test_lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,8 @@ def test_invoke_function_from_sns():
result = sns_conn.publish(TopicArn=topic_arn, Message=json.dumps({}))

start = time.time()
while (time.time() - start) < 30:
events = []
while (time.time() - start) < 10:
result = logs_conn.describe_log_streams(logGroupName="/aws/lambda/testFunction")
log_streams = result.get("logStreams")
if not log_streams:
Expand All @@ -375,13 +376,14 @@ def test_invoke_function_from_sns():
logGroupName="/aws/lambda/testFunction",
logStreamName=log_streams[0]["logStreamName"],
)
for event in result.get("events"):
events = result.get("events")
for event in events:
if event["message"] == "get_test_zip_file3 success":
return

time.sleep(1)

assert False, "Test Failed"
assert False, "Expected message not found in logs:" + str(events)


@mock_lambda
Expand Down Expand Up @@ -1306,7 +1308,7 @@ def test_invoke_function_from_sqs(key):
assert msg_showed_up, (
expected_msg
+ " was not found after sending an SQS message. All logs: "
+ all_logs
+ str(all_logs)
)


Expand Down Expand Up @@ -1355,7 +1357,7 @@ def test_invoke_function_from_dynamodb_put():
msg_showed_up, all_logs = wait_for_log_msg(expected_msg, log_group)

assert msg_showed_up, (
expected_msg + " was not found after a DDB insert. All logs: " + all_logs
expected_msg + " was not found after a DDB insert. All logs: " + str(all_logs)
)


Expand Down Expand Up @@ -1422,7 +1424,7 @@ def wait_for_log_msg(expected_msg, log_group):
logs_conn = boto3.client("logs", region_name="us-east-1")
received_messages = []
start = time.time()
while (time.time() - start) < 10:
while (time.time() - start) < 30:
result = logs_conn.describe_log_streams(logGroupName=log_group)
log_streams = result.get("logStreams")
if not log_streams:
Expand Down

0 comments on commit 027d05e

Please sign in to comment.