astc_decoder: Fix LDR CEM1 endpoint calculation

Per the spec, L1 is clamped to the value 0xff if it is greater than 0xff. An oversight caused us to take the maximum of L1 and 0xff, rather than the minimum.

Huge thanks to wwylele for finding this.

Co-Authored-By: Weiyi Wang <wwylele@gmail.com>
This commit is contained in:
ameerj 2021-06-15 20:16:16 -04:00
parent f9bfeaa2bc
commit 5fc8393125
2 changed files with 2 additions and 2 deletions

View file

@ -763,7 +763,7 @@ void ComputeEndpoints(out uvec4 ep1, out uvec4 ep2, uint color_endpoint_mode) {
case 1: {
READ_UINT_VALUES(2)
uint L0 = (v[0] >> 2) | (v[1] & 0xC0);
uint L1 = max(L0 + (v[1] & 0x3F), 0xFFU);
uint L1 = min(L0 + (v[1] & 0x3F), 0xFFU);
ep1 = uvec4(0xFF, L0, L0, L0);
ep2 = uvec4(0xFF, L1, L1, L1);
break;

View file

@ -1217,7 +1217,7 @@ static void ComputeEndpoints(Pixel& ep1, Pixel& ep2, const u32*& colorValues,
case 1: {
READ_UINT_VALUES(2)
u32 L0 = (v[0] >> 2) | (v[1] & 0xC0);
u32 L1 = std::max(L0 + (v[1] & 0x3F), 0xFFU);
u32 L1 = std::min(L0 + (v[1] & 0x3F), 0xFFU);
ep1 = Pixel(0xFF, L0, L0, L0);
ep2 = Pixel(0xFF, L1, L1, L1);
} break;