-
Notifications
You must be signed in to change notification settings - Fork 338
Consolidate ZeroPointDomain.NONE
& None
zero point domains
#1556
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
da2e9e0
bbc8dcd
4956a2e
8116c0c
29c379a
7b9477d
cf90998
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -21,6 +21,7 @@ | |
) | ||
from torchao.quantization.quant_primitives import ( | ||
MappingType, | ||
ZeroPointDomain, | ||
) | ||
|
||
|
||
|
@@ -74,7 +75,7 @@ def test_block_size_calc_success(self): | |
eps=torch.finfo(torch.float32).eps, | ||
scale_dtype=torch.float, | ||
zero_point_dtype=torch.int, | ||
zero_point_domain=None, | ||
zero_point_domain=ZeroPointDomain.NONE, | ||
) | ||
example_inputs = [ | ||
torch.randn(10, 2048), | ||
|
@@ -93,7 +94,7 @@ def test_block_size_calc_success(self): | |
eps=torch.finfo(torch.float32).eps, | ||
scale_dtype=torch.float, | ||
zero_point_dtype=torch.int, | ||
zero_point_domain=None, | ||
zero_point_domain=ZeroPointDomain.NONE, | ||
) | ||
for example_input in example_inputs: | ||
obs(example_input) | ||
|
@@ -108,7 +109,7 @@ def test_block_size_row_errors(self): | |
eps=torch.finfo(torch.float32).eps, | ||
scale_dtype=torch.float, | ||
zero_point_dtype=torch.int, | ||
zero_point_domain=None, | ||
zero_point_domain=ZeroPointDomain.NONE, | ||
) | ||
example_inputs = [ | ||
torch.randn(10, 2048), | ||
|
@@ -127,7 +128,7 @@ def test_block_size_row_errors(self): | |
eps=torch.finfo(torch.float32).eps, | ||
scale_dtype=torch.float, | ||
zero_point_dtype=torch.int, | ||
zero_point_domain=None, | ||
zero_point_domain=ZeroPointDomain.NONE, | ||
) | ||
example_inputs = [ | ||
torch.randn(10, 2048), | ||
|
@@ -155,7 +156,7 @@ def test_linear_observer_tensor(self, observe_weight: bool): | |
eps=torch.finfo(torch.float32).eps, | ||
scale_dtype=torch.float, | ||
zero_point_dtype=torch.int, | ||
zero_point_domain=None, | ||
zero_point_domain=ZeroPointDomain.NONE, | ||
) | ||
if observe_weight: | ||
weight_observer = AffineQuantizedMinMaxObserver( | ||
|
@@ -165,7 +166,7 @@ def test_linear_observer_tensor(self, observe_weight: bool): | |
eps=torch.finfo(torch.float32).eps, | ||
scale_dtype=torch.float, | ||
zero_point_dtype=torch.int, | ||
zero_point_domain=None, | ||
zero_point_domain=ZeroPointDomain.NONE, | ||
) | ||
else: | ||
weight_observer = None | ||
|
@@ -199,7 +200,6 @@ def test_linear_observer_tensor(self, observe_weight: bool): | |
input_scale.item(), | ||
max_val / max_fp8, | ||
) | ||
self.assertIsNotNone(input_zero_point) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. is there a change of behavior when you change zero_point_domain for None to ZeroPointDomain.NONE? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes, There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I see, so what is the meaning of There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Looks like some APIs/implementations were creating/expecting a There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hi @jerryzh168, is it possible that some torchao users' code may be expecting a non-None There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think that most usages of this function are internal to torchao so that its okay to BC break, you can add the label just to be sure There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thanks for your advice, @drisspg! There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I added a bc-breaking label, please also write a bc-breaking note similar to #1049 There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thanks again, @jerryzh168! I added a note & rebased the PR. |
||
|
||
if observe_weight: | ||
weight_observer = linear.weight.weight_observer | ||
|
@@ -210,7 +210,6 @@ def test_linear_observer_tensor(self, observe_weight: bool): | |
atol=5e-5, | ||
rtol=0.0, | ||
) | ||
self.assertIsNotNone(weight_zero_point) | ||
else: | ||
self.assertIsNone(linear.weight.weight_observer) | ||
|
||
|
Uh oh!
There was an error while loading. Please reload this page.