mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
36 lines
713 B
36 lines
713 B
from typing import Any, Optional |
|
|
|
from pydantic import BaseModel |
|
|
|
|
|
# make it singleton |
|
class NumericIDGenerator: |
|
_instance = None |
|
|
|
def __new__(cls): |
|
if cls._instance is None: |
|
cls._instance = super(NumericIDGenerator, cls).__new__(cls) |
|
cls._instance.current_id = 0 |
|
return cls._instance |
|
|
|
def __call__(self): |
|
self.current_id += 1 |
|
return self.current_id |
|
|
|
|
|
id_generator = NumericIDGenerator() |
|
|
|
|
|
class ChatMessage(BaseModel): |
|
role: str |
|
content: Any |
|
|
|
|
|
class DeltaMessage(BaseModel): |
|
role: Optional[str] = None |
|
content: Optional[Any] = None |
|
|
|
|
|
class ChatCompletionResponseStreamChoice(BaseModel): |
|
index: int |
|
message: DeltaMessage
|
|
|