Skip to content

Commit

Permalink
Merge branch 'main' into chore/wrapping-context
Browse files Browse the repository at this point in the history
  • Loading branch information
P403n1x87 committed May 14, 2024
2 parents 70dd837 + 5148d18 commit 1ada6e8
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 6 deletions.
10 changes: 8 additions & 2 deletions ddtrace/contrib/langchain/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -731,7 +731,10 @@ def traced_lcel_runnable_sequence(langchain, pin, func, instance, args, kwargs):
inputs = None
final_output = None
try:
inputs = get_argument_value(args, kwargs, 0, "input")
try:
inputs = get_argument_value(args, kwargs, 0, "input")
except ArgumentError:
inputs = get_argument_value(args, kwargs, 0, "inputs")
if integration.is_pc_sampled_span(span):
if not isinstance(inputs, list):
inputs = [inputs]
Expand Down Expand Up @@ -775,7 +778,10 @@ async def traced_lcel_runnable_sequence_async(langchain, pin, func, instance, ar
inputs = None
final_output = None
try:
inputs = get_argument_value(args, kwargs, 0, "input")
try:
inputs = get_argument_value(args, kwargs, 0, "input")
except ArgumentError:
inputs = get_argument_value(args, kwargs, 0, "inputs")
if integration.is_pc_sampled_span(span):
if not isinstance(inputs, list):
inputs = [inputs]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
---
fixes:
- |
langchain: This fix resolves an issue where specifying inputs as a keyword argument for batching on chains caused a crash.
8 changes: 4 additions & 4 deletions tests/contrib/langchain/test_langchain_community.py
Original file line number Diff line number Diff line change
Expand Up @@ -1188,7 +1188,7 @@ def test_lcel_chain_batch(langchain_core, langchain_openai, request_vcr):
chain = {"topic": langchain_core.runnables.RunnablePassthrough()} | prompt | model | output_parser

with request_vcr.use_cassette("lcel_openai_chain_batch.yaml"):
chain.batch(["chickens", "pigs"])
chain.batch(inputs=["chickens", "pigs"])


@flaky(1735812000)
Expand All @@ -1205,7 +1205,7 @@ def test_lcel_chain_batch_311(langchain_core, langchain_openai, request_vcr):
chain = {"topic": langchain_core.runnables.RunnablePassthrough()} | prompt | model | output_parser

with request_vcr.use_cassette("lcel_openai_chain_batch_311.yaml"):
chain.batch(["chickens", "pigs"])
chain.batch(inputs=["chickens", "pigs"])


@flaky(1735812000)
Expand Down Expand Up @@ -1246,7 +1246,7 @@ async def test_lcel_chain_batch_async(langchain_core, langchain_openai, request_
chain = {"topic": langchain_core.runnables.RunnablePassthrough()} | prompt | model | output_parser

with request_vcr.use_cassette("lcel_openai_chain_batch_async.yaml"):
await chain.abatch(["chickens", "pigs"])
await chain.abatch(inputs=["chickens", "pigs"])


@pytest.mark.parametrize(
Expand Down Expand Up @@ -1542,7 +1542,7 @@ def test_llmobs_chain_batch(
chain = {"topic": langchain_core.runnables.RunnablePassthrough()} | prompt | model | output_parser

self._test_llmobs_chain_invoke(
generate_trace=lambda inputs: chain.batch(["chickens", "pigs"]),
generate_trace=lambda inputs: chain.batch(inputs=["chickens", "pigs"]),
request_vcr=request_vcr,
mock_llmobs_span_writer=mock_llmobs_span_writer,
mock_tracer=mock_tracer,
Expand Down

0 comments on commit 1ada6e8

Please sign in to comment.