Skip to content

Commit

Permalink
fix cpu test
Browse files Browse the repository at this point in the history
  • Loading branch information
dsikka committed Aug 4, 2024
1 parent 0ba00ab commit 4a86201
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions vllm/model_executor/layers/fused_moe/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
from vllm.model_executor.layers.fused_moe.fused_moe_awq import (
fused_experts_awq)
from vllm.model_executor.layers.fused_moe.layer import (FusedMoE,
FusedMoEMethodBase)
from vllm.triton_utils import HAS_TRITON

__all__ = [
"fused_experts_awq",
"FusedMoE",
"FusedMoEMethodBase",
]
Expand All @@ -15,9 +12,12 @@
from vllm.model_executor.layers.fused_moe.fused_moe import (
fused_experts, fused_moe, fused_topk, get_config_file_name,
grouped_topk)
from vllm.model_executor.layers.fused_moe.fused_moe_awq import (
fused_experts_awq)

__all__ += [
"fused_moe",
"fused_experts_awq",
"fused_experts",
"fused_topk",
"get_config_file_name",
Expand Down

0 comments on commit 4a86201

Please sign in to comment.