mirror of https://github.com/hpcaitech/ColossalAI
[moe] fix mixtral forward default value (#5329)
parent
b60be18dcc
commit
956b561b54
|
@ -437,7 +437,7 @@ class MixtralPipelineForwards:
|
|||
use_cache: Optional[bool] = None,
|
||||
output_attentions: Optional[bool] = None,
|
||||
output_hidden_states: Optional[bool] = None,
|
||||
output_router_logits: Optional[bool] = True,
|
||||
output_router_logits: Optional[bool] = None,
|
||||
return_dict: Optional[bool] = None,
|
||||
stage_manager: Optional[PipelineStageManager] = None,
|
||||
hidden_states: Optional[torch.FloatTensor] = None,
|
||||
|
|
Loading…
Reference in New Issue