Skip to content

Commit 68d3bed

Browse files
committed
Modify code comment
Signed-off-by: Shinichi Hemmi <[email protected]>
1 parent 49dd3b0 commit 68d3bed

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

vllm/model_executor/models/plamo2.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ class LinearType(str, enum.Enum):
4949
Fp8Retain = "fp8-retain"
5050

5151

52-
# Just for type hinting and PlamoPreTrainedModel.config_class.
52+
# Only used for type hinting and PlamoPreTrainedModel.config_class.
5353
class PlamoConfig(PretrainedConfig): # type: ignore
5454
model_type: str = "plamo"
5555

@@ -798,15 +798,15 @@ def load_weights(self, weights: Iterable[Tuple[str, torch.Tensor]]):
798798
params_dict = dict(self.named_parameters())
799799
for name, loaded_weight in weights:
800800

801-
# Alignment team workaround: somehow when tie_word_embeddings=True,
802-
# `lm_head.weight` may be in the safetensor, which causing dict key
803-
# access error.
801+
# Both tie_word_embeddings=True and lm_head.weight in the safetensor
802+
# at the same time causes dict key access error.
804803
if name == "lm_head.weight" and self.config.tie_word_embeddings:
805804
assert "lm_head.weight" not in params_dict
806805
continue
807806

808807
# Update the weight names to be compatible with the vllm version
809-
# of the model. Do not change the order of the replacements.
808+
# of the model.
809+
# Do not change the order of the replacements.
810810
replacements = {
811811
# Skip PlamoDecoderLayers.
812812
".layers.layers": ".layers",

0 commit comments

Comments
 (0)