Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/speculators/convert/eagle/eagle3_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def _create_transformer_config_from_eagle(
vocab_size=eagle_config.get("target_vocab_size", 128000),
hidden_size=eagle_config.get("hidden_size", 4096),
intermediate_size=eagle_config.get("intermediate_size", 11008),
num_hidden_layers=1,
num_hidden_layers=eagle_config.get("num_hidden_layers", 1),
num_attention_heads=eagle_config.get("num_attention_heads", 32),
num_key_value_heads=eagle_config.get("num_key_value_heads", 8),
hidden_act=eagle_config.get("hidden_act", "silu"),
Expand Down
38 changes: 38 additions & 0 deletions tests/unit/convert/test_eagle3_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,44 @@ def test_config_max_position_embeddings_logic(
# rope_theta comes from Eagle3 config, not verifier
assert llama_config.rope_theta == 10000.0

@pytest.mark.sanity
@patch(
"speculators.convert.eagle.eagle3_converter.PretrainedConfig.get_config_dict"
)
def test_config_num_hidden_layers_from_config(
self, mock_get_config, sample_eagle3_config
):
"""Test that num_hidden_layers is taken from eagle_config when present."""
mock_get_config.return_value = ({}, None)
converter = Eagle3Converter()

# Add num_hidden_layers to the sample config
sample_eagle3_config["num_hidden_layers"] = 3

llama_config = converter._create_transformer_config_from_eagle(
sample_eagle3_config, "meta-llama/Llama-3.1-8B"
)
assert llama_config.num_hidden_layers == 3

@pytest.mark.sanity
@patch(
"speculators.convert.eagle.eagle3_converter.PretrainedConfig.get_config_dict"
)
def test_config_num_hidden_layers_default(
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could we also add a test for converting an actual multilayer checkpoint? We could use https://huggingface.co/nm-testing/random-weights-llama3.1.8b-2layer-eagle3/tree/main

self, mock_get_config, sample_eagle3_config
):
"""Test that num_hidden_layers defaults to 1 when not in config."""
mock_get_config.return_value = ({}, None)
converter = Eagle3Converter()

# Remove num_hidden_layers if present
sample_eagle3_config.pop("num_hidden_layers", None)

llama_config = converter._create_transformer_config_from_eagle(
sample_eagle3_config, "meta-llama/Llama-3.1-8B"
)
assert llama_config.num_hidden_layers == 1

@pytest.mark.sanity
@patch(
"speculators.convert.eagle.eagle3_converter.PretrainedConfig.get_config_dict"
Expand Down