fix
This commit is contained in:
@@ -52,7 +52,7 @@ class LLMEngine:
|
||||
desc="Generating",
|
||||
dynamic_ncols=True,
|
||||
)
|
||||
if not isinstance(SamplingParams, list):
|
||||
if not isinstance(sampling_params, list):
|
||||
sampling_params = [sampling_params] * len(prompts)
|
||||
for prompt, sp in zip(prompts, sampling_params):
|
||||
self.add_request(prompt, sp)
|
||||
|
||||
@@ -170,7 +170,7 @@ class ModelRunner:
|
||||
context_lens = torch.zeros(max_bs, dtype=torch.int32)
|
||||
block_tables = torch.zeros(max_bs, max_num_blocks, dtype=torch.int32)
|
||||
outputs = torch.zeros(max_bs, hf_config.hidden_size)
|
||||
self.graph_bs = [1, 2, 4, 8, 16] + list(range(16, max_bs + 1, 16))
|
||||
self.graph_bs = [1, 2, 4, 8] + list(range(16, max_bs + 1, 16))
|
||||
self.graphs = {}
|
||||
self.graph_pool = None
|
||||
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
from contextlib import contextmanager
|
||||
from collections import defaultdict
|
||||
import torch
|
||||
|
||||
|
||||
class CUDATimer:
|
||||
|
||||
def __init__(self):
|
||||
self.events = defaultdict(list)
|
||||
|
||||
@contextmanager
|
||||
def record(self, name, enabled=True):
|
||||
if not enabled:
|
||||
yield
|
||||
else:
|
||||
start, end = torch.cuda.Event(enable_timing=True), torch.cuda.Event(enable_timing=True)
|
||||
self.events[name].append((start, end))
|
||||
start.record()
|
||||
yield
|
||||
end.record()
|
||||
|
||||
def log(self):
|
||||
torch.cuda.synchronize()
|
||||
ret = []
|
||||
for name, events in self.events.items():
|
||||
total = 0
|
||||
count = len(self.events)
|
||||
for start, end in events:
|
||||
total += start.elapsed_time(end)
|
||||
ret.append(f"{name} {total:.2f}ms/{count}times")
|
||||
return ", ".join(ret)
|
||||
Reference in New Issue
Block a user