Browse Source

rainbow/test_boolean: Use 1^x instead of 1-x to avoid conversion warning

kyber
John M. Schanck 4 years ago
committed by Kris Kwiatkowski
parent
commit
bd1fcaabde
9 changed files with 9 additions and 9 deletions
  1. +1
    -1
      crypto_sign/rainbowIIIc-classic/clean/blas_comm.c
  2. +1
    -1
      crypto_sign/rainbowIIIc-cyclic-compressed/clean/blas_comm.c
  3. +1
    -1
      crypto_sign/rainbowIIIc-cyclic/clean/blas_comm.c
  4. +1
    -1
      crypto_sign/rainbowIa-classic/clean/blas_comm.c
  5. +1
    -1
      crypto_sign/rainbowIa-cyclic-compressed/clean/blas_comm.c
  6. +1
    -1
      crypto_sign/rainbowIa-cyclic/clean/blas_comm.c
  7. +1
    -1
      crypto_sign/rainbowVc-classic/clean/blas_comm.c
  8. +1
    -1
      crypto_sign/rainbowVc-cyclic-compressed/clean/blas_comm.c
  9. +1
    -1
      crypto_sign/rainbowVc-cyclic/clean/blas_comm.c

+ 1
- 1
crypto_sign/rainbowIIIc-classic/clean/blas_comm.c View File

@@ -72,7 +72,7 @@ static unsigned int gf256mat_gauss_elim_ref(uint8_t *mat, unsigned int h, unsign

for (unsigned int j = i + 1; j < h; j++) {
uint8_t *aj = mat + w * j;
PQCLEAN_RAINBOWIIICCLASSIC_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1-PQCLEAN_RAINBOWIIICCLASSIC_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
PQCLEAN_RAINBOWIIICCLASSIC_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1^PQCLEAN_RAINBOWIIICCLASSIC_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
}
r8 &= PQCLEAN_RAINBOWIIICCLASSIC_CLEAN_gf256_is_nonzero(ai[i]);
uint8_t pivot = ai[i];


+ 1
- 1
crypto_sign/rainbowIIIc-cyclic-compressed/clean/blas_comm.c View File

@@ -72,7 +72,7 @@ static unsigned int gf256mat_gauss_elim_ref(uint8_t *mat, unsigned int h, unsign

for (unsigned int j = i + 1; j < h; j++) {
uint8_t *aj = mat + w * j;
PQCLEAN_RAINBOWIIICCYCLICCOMPRESSED_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1-PQCLEAN_RAINBOWIIICCYCLICCOMPRESSED_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
PQCLEAN_RAINBOWIIICCYCLICCOMPRESSED_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1^PQCLEAN_RAINBOWIIICCYCLICCOMPRESSED_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
}
r8 &= PQCLEAN_RAINBOWIIICCYCLICCOMPRESSED_CLEAN_gf256_is_nonzero(ai[i]);
uint8_t pivot = ai[i];


+ 1
- 1
crypto_sign/rainbowIIIc-cyclic/clean/blas_comm.c View File

@@ -72,7 +72,7 @@ static unsigned int gf256mat_gauss_elim_ref(uint8_t *mat, unsigned int h, unsign

for (unsigned int j = i + 1; j < h; j++) {
uint8_t *aj = mat + w * j;
PQCLEAN_RAINBOWIIICCYCLIC_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1-PQCLEAN_RAINBOWIIICCYCLIC_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
PQCLEAN_RAINBOWIIICCYCLIC_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1^PQCLEAN_RAINBOWIIICCYCLIC_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
}
r8 &= PQCLEAN_RAINBOWIIICCYCLIC_CLEAN_gf256_is_nonzero(ai[i]);
uint8_t pivot = ai[i];


+ 1
- 1
crypto_sign/rainbowIa-classic/clean/blas_comm.c View File

@@ -74,7 +74,7 @@ static unsigned int gf16mat_gauss_elim_ref(uint8_t *mat, unsigned int h, unsigne
uint8_t *ai = mat + n_w_byte * i;
for (unsigned int j = i + 1; j < h; j++) {
uint8_t *aj = mat + n_w_byte * j;
PQCLEAN_RAINBOWIACLASSIC_CLEAN_gf256v_predicated_add(ai + offset_byte, 1-PQCLEAN_RAINBOWIACLASSIC_CLEAN_gf16_is_nonzero(PQCLEAN_RAINBOWIACLASSIC_CLEAN_gf16v_get_ele(ai, i)), aj + offset_byte, n_w_byte - offset_byte);
PQCLEAN_RAINBOWIACLASSIC_CLEAN_gf256v_predicated_add(ai + offset_byte, 1^PQCLEAN_RAINBOWIACLASSIC_CLEAN_gf16_is_nonzero(PQCLEAN_RAINBOWIACLASSIC_CLEAN_gf16v_get_ele(ai, i)), aj + offset_byte, n_w_byte - offset_byte);
}
uint8_t pivot = PQCLEAN_RAINBOWIACLASSIC_CLEAN_gf16v_get_ele(ai, i);
r8 &= PQCLEAN_RAINBOWIACLASSIC_CLEAN_gf16_is_nonzero(pivot);


+ 1
- 1
crypto_sign/rainbowIa-cyclic-compressed/clean/blas_comm.c View File

@@ -74,7 +74,7 @@ static unsigned int gf16mat_gauss_elim_ref(uint8_t *mat, unsigned int h, unsigne
uint8_t *ai = mat + n_w_byte * i;
for (unsigned int j = i + 1; j < h; j++) {
uint8_t *aj = mat + n_w_byte * j;
PQCLEAN_RAINBOWIACYCLICCOMPRESSED_CLEAN_gf256v_predicated_add(ai + offset_byte, 1-PQCLEAN_RAINBOWIACYCLICCOMPRESSED_CLEAN_gf16_is_nonzero(PQCLEAN_RAINBOWIACYCLICCOMPRESSED_CLEAN_gf16v_get_ele(ai, i)), aj + offset_byte, n_w_byte - offset_byte);
PQCLEAN_RAINBOWIACYCLICCOMPRESSED_CLEAN_gf256v_predicated_add(ai + offset_byte, 1^PQCLEAN_RAINBOWIACYCLICCOMPRESSED_CLEAN_gf16_is_nonzero(PQCLEAN_RAINBOWIACYCLICCOMPRESSED_CLEAN_gf16v_get_ele(ai, i)), aj + offset_byte, n_w_byte - offset_byte);
}
uint8_t pivot = PQCLEAN_RAINBOWIACYCLICCOMPRESSED_CLEAN_gf16v_get_ele(ai, i);
r8 &= PQCLEAN_RAINBOWIACYCLICCOMPRESSED_CLEAN_gf16_is_nonzero(pivot);


+ 1
- 1
crypto_sign/rainbowIa-cyclic/clean/blas_comm.c View File

@@ -74,7 +74,7 @@ static unsigned int gf16mat_gauss_elim_ref(uint8_t *mat, unsigned int h, unsigne
uint8_t *ai = mat + n_w_byte * i;
for (unsigned int j = i + 1; j < h; j++) {
uint8_t *aj = mat + n_w_byte * j;
PQCLEAN_RAINBOWIACYCLIC_CLEAN_gf256v_predicated_add(ai + offset_byte, 1-PQCLEAN_RAINBOWIACYCLIC_CLEAN_gf16_is_nonzero(PQCLEAN_RAINBOWIACYCLIC_CLEAN_gf16v_get_ele(ai, i)), aj + offset_byte, n_w_byte - offset_byte);
PQCLEAN_RAINBOWIACYCLIC_CLEAN_gf256v_predicated_add(ai + offset_byte, 1^PQCLEAN_RAINBOWIACYCLIC_CLEAN_gf16_is_nonzero(PQCLEAN_RAINBOWIACYCLIC_CLEAN_gf16v_get_ele(ai, i)), aj + offset_byte, n_w_byte - offset_byte);
}
uint8_t pivot = PQCLEAN_RAINBOWIACYCLIC_CLEAN_gf16v_get_ele(ai, i);
r8 &= PQCLEAN_RAINBOWIACYCLIC_CLEAN_gf16_is_nonzero(pivot);


+ 1
- 1
crypto_sign/rainbowVc-classic/clean/blas_comm.c View File

@@ -72,7 +72,7 @@ static unsigned int gf256mat_gauss_elim_ref(uint8_t *mat, unsigned int h, unsign

for (unsigned int j = i + 1; j < h; j++) {
uint8_t *aj = mat + w * j;
PQCLEAN_RAINBOWVCCLASSIC_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1-PQCLEAN_RAINBOWVCCLASSIC_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
PQCLEAN_RAINBOWVCCLASSIC_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1^PQCLEAN_RAINBOWVCCLASSIC_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
}
r8 &= PQCLEAN_RAINBOWVCCLASSIC_CLEAN_gf256_is_nonzero(ai[i]);
uint8_t pivot = ai[i];


+ 1
- 1
crypto_sign/rainbowVc-cyclic-compressed/clean/blas_comm.c View File

@@ -72,7 +72,7 @@ static unsigned int gf256mat_gauss_elim_ref(uint8_t *mat, unsigned int h, unsign

for (unsigned int j = i + 1; j < h; j++) {
uint8_t *aj = mat + w * j;
PQCLEAN_RAINBOWVCCYCLICCOMPRESSED_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1-PQCLEAN_RAINBOWVCCYCLICCOMPRESSED_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
PQCLEAN_RAINBOWVCCYCLICCOMPRESSED_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1^PQCLEAN_RAINBOWVCCYCLICCOMPRESSED_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
}
r8 &= PQCLEAN_RAINBOWVCCYCLICCOMPRESSED_CLEAN_gf256_is_nonzero(ai[i]);
uint8_t pivot = ai[i];


+ 1
- 1
crypto_sign/rainbowVc-cyclic/clean/blas_comm.c View File

@@ -72,7 +72,7 @@ static unsigned int gf256mat_gauss_elim_ref(uint8_t *mat, unsigned int h, unsign

for (unsigned int j = i + 1; j < h; j++) {
uint8_t *aj = mat + w * j;
PQCLEAN_RAINBOWVCCYCLIC_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1-PQCLEAN_RAINBOWVCCYCLIC_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
PQCLEAN_RAINBOWVCCYCLIC_CLEAN_gf256v_predicated_add(ai + skip_len_align4, 1^PQCLEAN_RAINBOWVCCYCLIC_CLEAN_gf256_is_nonzero(ai[i]), aj + skip_len_align4, w - skip_len_align4);
}
r8 &= PQCLEAN_RAINBOWVCCYCLIC_CLEAN_gf256_is_nonzero(ai[i]);
uint8_t pivot = ai[i];


Loading…
Cancel
Save