Skip to content

Commit 186737b

Browse files
committed
fix megatron
Signed-off-by: h-guo18 <67671475+h-guo18@users.noreply.github.com>
1 parent 35590b6 commit 186737b

1 file changed

Lines changed: 3 additions & 6 deletions

File tree

modelopt/torch/speculative/plugins/megatron_eagle.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -107,12 +107,9 @@ def dict_to_config(
107107
config.position_embedding_type = architecture_config.get("position_embedding_type")
108108
config.rotary_percent = 1.0
109109
config.rotary_base = architecture_config.get("rope_theta")
110-
config.rope_scaling = "rope_scaling" in architecture_config
111-
config.rope_scaling_factor = (
112-
architecture_config.get("rope_scaling").get("factor")
113-
if "rope_scaling" in architecture_config
114-
else None
115-
)
110+
_rope_scaling_dict = architecture_config.get("rope_scaling", {})
111+
config.rope_scaling = isinstance(_rope_scaling_dict, dict) and "factor" in _rope_scaling_dict
112+
config.rope_scaling_factor = _rope_scaling_dict.get("factor") if config.rope_scaling else None
116113

117114
config.draft_vocab_size = architecture_config.get("draft_vocab_size")
118115
config.use_input_layernorm_in_first_layer = architecture_config.get(

0 commit comments

Comments
 (0)