From b9ed77cbbb470a508363490b2776b705cdd9a039 Mon Sep 17 00:00:00 2001 From: Zijie Tian Date: Thu, 11 Dec 2025 05:31:06 +0800 Subject: [PATCH] [fix] Fix import error. --- nanovllm/kvcache/chunked_attention.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nanovllm/kvcache/chunked_attention.py b/nanovllm/kvcache/chunked_attention.py index b06dd05..cc679d1 100644 --- a/nanovllm/kvcache/chunked_attention.py +++ b/nanovllm/kvcache/chunked_attention.py @@ -499,7 +499,7 @@ class ChunkedPrefillState: # Test function def _test_chunked_attention(): """Test chunked attention correctness against full attention.""" - from flash_attn import flash_attn_func + from flash_attn.flash_attn_interface import flash_attn_func torch.manual_seed(42)