Browse Source

[Inference] resolve rebase conflicts

fix
feat/online-serving
CjhHa1 8 months ago
parent
commit
7bbb28e48b
  1. 2
      colossalai/inference/core/engine.py
  2. 1
      colossalai/shardformer/layer/embedding.py

2
colossalai/inference/core/engine.py

@ -1,6 +1,6 @@
import time
from itertools import count
from typing import Dict, List, Optional, Tuple, Union, Iterable
from typing import Dict, List, Optional, Tuple, Union
import numpy as np
import torch

1
colossalai/shardformer/layer/embedding.py

@ -248,7 +248,6 @@ class VocabParallelEmbedding1D(PaddingParallelModule):
he initializer of weight, defaults to normal initializer.
The ``args`` and ``kwargs`` used in :class:``torch.nn.functional.embedding`` should contain:
::
max_norm (float, optional): If given, each embedding vector with norm larger than max_norm is
renormalized to have norm max_norm. Note: this will modify weight in-place.

Loading…
Cancel
Save