rm torch.cuda.synchronize

pull/5258/head
yuehuayingxueluo 2024-01-09 15:53:04 +08:00 committed by FrankLeeeee
parent fab294c7f4
commit 10e3c9f923
1 changed files with 0 additions and 2 deletions

View File

@ -198,8 +198,6 @@ class RequestHandler:
if type in config_dict and config_dict[type] is not None:
logits = logit_processor(type, logits, config_dict[type])
torch.cuda.synchronize()
# calculate probs
probs = torch.softmax(logits, dim=-1, dtype=torch.float)
logprobs = torch.log_softmax(logits, dim=-1, dtype=torch.float)