We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 50e8de8 commit b182329Copy full SHA for b182329
vllm_ascend/patch/worker/patch_common/patch_lora_embedding.py
@@ -12,6 +12,7 @@
12
13
14
class AscendVocabParallelEmbeddingWithLoRA(VocabParallelEmbeddingWithLoRA):
15
+
16
@classmethod
17
def can_replace_layer(
18
cls,
@@ -22,6 +23,7 @@ def can_replace_layer(
22
23
) -> bool:
24
return type(source_layer) is AscendVocabParallelEmbedding
25
26
27
# Patch for lora register_model issue after overriding VocabParallelEmbedding class (#2515)
28
_all_lora_classes.add(AscendVocabParallelEmbeddingWithLoRA)
-vllm.lora.utils._all_lora_classes = _all_lora_classes
29
+vllm.lora.utils._all_lora_classes = _all_lora_classes
0 commit comments