.. |
__init__.py
|
[shardformer] init shardformer code structure (#3731)
|
2023-07-04 16:05:01 +08:00 |
auto_policy.py
|
[lora] lora support hybrid parallel plugin (#5956)
|
2024-08-02 10:36:58 +08:00 |
base_policy.py
|
[Feature] Zigzag Ring attention (#5905)
|
2024-08-16 13:56:38 +08:00 |
bert.py
|
[shardformer]delete xformers (#5859)
|
2024-06-28 11:20:04 +08:00 |
blip2.py
|
[Shardformer] add assert for num of attention heads divisible by tp_size (#5670)
|
2024-04-29 18:47:47 +08:00 |
bloom.py
|
[shardformer]delete xformers (#5859)
|
2024-06-28 11:20:04 +08:00 |
chatglm2.py
|
[ShardFormer] Add Ulysses Sequence Parallelism support for Command-R, Qwen2 and ChatGLM (#5897)
|
2024-07-10 11:34:25 +08:00 |
command.py
|
[Feature] Zigzag Ring attention (#5905)
|
2024-08-16 13:56:38 +08:00 |
deepseek.py
|
[Feature] Zigzag Ring attention (#5905)
|
2024-08-16 13:56:38 +08:00 |
falcon.py
|
[Shardformer] Add parallel output for shardformer models(bloom, falcon) (#5702)
|
2024-05-21 11:07:13 +08:00 |
gpt2.py
|
change 'xxx if xxx else None' to 'xxx or None'
|
2024-06-18 03:32:42 +00:00 |
gptj.py
|
[shardformer] upgrade transformers to 4.39.3 (#5815)
|
2024-06-14 10:59:33 +08:00 |
llama.py
|
[Hotfix] Fix llama fwd replacement bug (#6031)
|
2024-08-23 15:44:27 +08:00 |
mistral.py
|
[Feature] Zigzag Ring attention (#5905)
|
2024-08-16 13:56:38 +08:00 |
mixtral.py
|
[Feature] Zigzag Ring attention (#5905)
|
2024-08-16 13:56:38 +08:00 |
opt.py
|
[pre-commit.ci] auto fixes from pre-commit.com hooks
|
2024-05-07 07:07:09 +00:00 |
qwen2.py
|
[Feature] Zigzag Ring attention (#5905)
|
2024-08-16 13:56:38 +08:00 |
sam.py
|
[shardformer]delete xformers (#5859)
|
2024-06-28 11:20:04 +08:00 |
t5.py
|
[shardformer] Support the T5ForTokenClassification model (#5816)
|
2024-06-27 16:40:38 +08:00 |
vit.py
|
[Shardformer] add assert for num of attention heads divisible by tp_size (#5670)
|
2024-04-29 18:47:47 +08:00 |
whisper.py
|
[Shardformer] add assert for num of attention heads divisible by tp_size (#5670)
|
2024-04-29 18:47:47 +08:00 |