Skip to content

Commit b1c782e

Browse files
authored
Fix typos in comment (unslothai#3557)
1 parent 56392a7 commit b1c782e

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

‎unsloth/kernels/fp8.py‎

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -323,8 +323,8 @@ def forward(ctx, X, weight, weight_scale):
323323
assert block_size is not None, "block_size is not set"
324324
if triton.cdiv(m, block_size[0]) != p or triton.cdiv(n, block_size[1]) != q:
325325
if triton.cdiv(m, block_size[0]) == q and triton.cdiv(n, block_size[1]) == p:
326-
# weights are tranposed during backward pass for training :)
327-
# We tranpose weight scale to counter that. Note that transposing weight would cause issues with matmul with input X
326+
# weights are transposed during backward pass for training :)
327+
# We transpose weight scale to counter that. Note that transposing weight would cause issues with matmul with input X
328328
weight_scale = weight_scale.T
329329
else:
330330
raise ValueError(f"Weight shape {weight.shape} and scales shape {weight_scale.shape} is not compatible with block size {block_size}")
@@ -437,8 +437,8 @@ def forward(ctx, X, weight, weight_scale, bias=None):
437437

438438
if triton.cdiv(m, bs_n) != p or triton.cdiv(n, bs_k) != q:
439439
if triton.cdiv(m, bs_n) == q and triton.cdiv(n, bs_k) == p:
440-
# weights are tranposed during backward pass for training :)
441-
# We tranpose weight scale to counter that. Note that transposing weight would cause issues with matmul with input X
440+
# weights are transposed during backward pass for training :)
441+
# We transpose weight scale to counter that. Note that transposing weight would cause issues with matmul with input X
442442
weight_scale = weight_scale.T
443443
else:
444444
raise ValueError(f"Weight shape {weight.shape} and scales shape {weight_scale.shape} is not compatible with block size {block_size}")

‎unsloth/models/__init__.py‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
try:
2323
from .falcon_h1 import FastFalconH1Model
2424
except:
25-
# transformers_version < 4.53.0 does not have falcon_h1 so silenty skip it for now
25+
# transformers_version < 4.53.0 does not have falcon_h1 so silently skip it for now
2626
pass
2727
from .dpo import PatchDPOTrainer, PatchKTOTrainer
2828
from ._utils import is_bfloat16_supported, is_vLLM_available, __version__

0 commit comments

Comments
 (0)