We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent af074e5 commit a6dc025Copy full SHA for a6dc025
src/compressed_tensors/quantization/lifecycle/forward.py
@@ -469,7 +469,6 @@ def _quantize(
469
if global_scale is not None:
470
scale = scale / global_scale
471
472
- scale = maybe_convert_from_mxfp4_scale(args=args, scale=scale)
473
scaled = x / scale
474
475
if zero_point is not None:
0 commit comments