mirror of https://github.com/hpcaitech/ColossalAI
parent
c064032865
commit
7bbb28e48b
|
@ -1,6 +1,6 @@
|
||||||
import time
|
import time
|
||||||
from itertools import count
|
from itertools import count
|
||||||
from typing import Dict, List, Optional, Tuple, Union, Iterable
|
from typing import Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import torch
|
import torch
|
||||||
|
|
|
@ -248,7 +248,6 @@ class VocabParallelEmbedding1D(PaddingParallelModule):
|
||||||
he initializer of weight, defaults to normal initializer.
|
he initializer of weight, defaults to normal initializer.
|
||||||
|
|
||||||
The ``args`` and ``kwargs`` used in :class:``torch.nn.functional.embedding`` should contain:
|
The ``args`` and ``kwargs`` used in :class:``torch.nn.functional.embedding`` should contain:
|
||||||
::
|
|
||||||
|
|
||||||
max_norm (float, optional): If given, each embedding vector with norm larger than max_norm is
|
max_norm (float, optional): If given, each embedding vector with norm larger than max_norm is
|
||||||
renormalized to have norm max_norm. Note: this will modify weight in-place.
|
renormalized to have norm max_norm. Note: this will modify weight in-place.
|
||||||
|
|
Loading…
Reference in New Issue