@@ -1350,3 +1350,124 @@ async def test_non_streaming_does_not_pass_stream_param():
13501350 mock_client .messages .create .assert_called_once ()
13511351 _ , kwargs = mock_client .messages .create .call_args
13521352 assert "stream" not in kwargs
1353+
1354+
1355+ # --- Test for system_instruction=None fix (#5318) ---
1356+
1357+
1358+ @pytest .mark .asyncio
1359+ async def test_generate_content_async_none_system_instruction_non_streaming ():
1360+ """When system_instruction is None, system should be NOT_GIVEN, not None.
1361+
1362+ Regression test for #5318: AnthropicLlm.generate_content_async passes
1363+ system=None to the Anthropic API when no system instruction is set
1364+ (e.g. during event compaction via LlmEventSummarizer), which causes
1365+ a 400 Bad Request from the Anthropic API.
1366+ """
1367+ llm = AnthropicLlm (model = "claude-sonnet-4-20250514" )
1368+
1369+ mock_message = anthropic_types .Message (
1370+ id = "msg_test_no_sys" ,
1371+ content = [
1372+ anthropic_types .TextBlock (
1373+ text = "Hello!" , type = "text" , citations = None
1374+ )
1375+ ],
1376+ model = "claude-sonnet-4-20250514" ,
1377+ role = "assistant" ,
1378+ stop_reason = "end_turn" ,
1379+ stop_sequence = None ,
1380+ type = "message" ,
1381+ usage = anthropic_types .Usage (
1382+ input_tokens = 5 ,
1383+ output_tokens = 2 ,
1384+ cache_creation_input_tokens = 0 ,
1385+ cache_read_input_tokens = 0 ,
1386+ server_tool_use = None ,
1387+ service_tier = None ,
1388+ ),
1389+ )
1390+
1391+ mock_client = MagicMock ()
1392+ mock_client .messages .create = AsyncMock (return_value = mock_message )
1393+
1394+ # Config with system_instruction=None (as happens during event compaction)
1395+ llm_request = LlmRequest (
1396+ model = "claude-sonnet-4-20250514" ,
1397+ contents = [Content (role = "user" , parts = [Part .from_text (text = "Hi" )])],
1398+ config = types .GenerateContentConfig (
1399+ system_instruction = None ,
1400+ ),
1401+ )
1402+
1403+ with mock .patch .object (llm , "_anthropic_client" , mock_client ):
1404+ responses = [
1405+ r async for r in llm .generate_content_async (llm_request , stream = False )
1406+ ]
1407+
1408+ assert len (responses ) == 1
1409+ mock_client .messages .create .assert_called_once ()
1410+ _ , kwargs = mock_client .messages .create .call_args
1411+ # system should be NOT_GIVEN (omitted), NOT None
1412+ from anthropic import NOT_GIVEN
1413+
1414+ assert kwargs ["system" ] is NOT_GIVEN , (
1415+ f"Expected system=NOT_GIVEN but got system={ kwargs ['system' ]!r} . "
1416+ "Passing system=None causes Anthropic API 400 errors."
1417+ )
1418+
1419+
1420+ @pytest .mark .asyncio
1421+ async def test_generate_content_async_none_system_instruction_streaming ():
1422+ """Streaming path should also omit system when system_instruction is None."""
1423+ llm = AnthropicLlm (model = "claude-sonnet-4-20250514" )
1424+
1425+ events = [
1426+ MagicMock (
1427+ type = "message_start" ,
1428+ message = MagicMock (usage = MagicMock (input_tokens = 5 , output_tokens = 0 )),
1429+ ),
1430+ MagicMock (
1431+ type = "content_block_start" ,
1432+ index = 0 ,
1433+ content_block = anthropic_types .TextBlock (text = "" , type = "text" ),
1434+ ),
1435+ MagicMock (
1436+ type = "content_block_delta" ,
1437+ index = 0 ,
1438+ delta = anthropic_types .TextDelta (text = "Hi" , type = "text_delta" ),
1439+ ),
1440+ MagicMock (type = "content_block_stop" , index = 0 ),
1441+ MagicMock (
1442+ type = "message_delta" ,
1443+ delta = MagicMock (stop_reason = "end_turn" ),
1444+ usage = MagicMock (output_tokens = 1 ),
1445+ ),
1446+ MagicMock (type = "message_stop" ),
1447+ ]
1448+
1449+ mock_client = MagicMock ()
1450+ mock_client .messages .create = AsyncMock (
1451+ return_value = _make_mock_stream_events (events )
1452+ )
1453+
1454+ # Config with system_instruction=None
1455+ llm_request = LlmRequest (
1456+ model = "claude-sonnet-4-20250514" ,
1457+ contents = [Content (role = "user" , parts = [Part .from_text (text = "Hi" )])],
1458+ config = types .GenerateContentConfig (
1459+ system_instruction = None ,
1460+ ),
1461+ )
1462+
1463+ with mock .patch .object (llm , "_anthropic_client" , mock_client ):
1464+ _ = [r async for r in llm .generate_content_async (llm_request , stream = True )]
1465+
1466+ mock_client .messages .create .assert_called_once ()
1467+ _ , kwargs = mock_client .messages .create .call_args
1468+ from anthropic import NOT_GIVEN
1469+
1470+ assert kwargs ["system" ] is NOT_GIVEN , (
1471+ f"Expected system=NOT_GIVEN but got system={ kwargs ['system' ]!r} . "
1472+ "Passing system=None causes Anthropic API 400 errors."
1473+ )
0 commit comments