@@ -972,23 +972,69 @@ void crypto_inc(u8 *a, unsigned int size)
}
EXPORT_SYMBOL_GPL(crypto_inc);
-static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size)
+#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
+void __crypto_xor_unaligned(u8 *dst, const u8 *src, unsigned int len)
{
- for (; size; size--)
- *a++ ^= *b++;
-}
+ unsigned long delta = (unsigned long)dst ^ (unsigned long)src;
-void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
-{
- u32 *a = (u32 *)dst;
- u32 *b = (u32 *)src;
+ /* Handle relative misalignment */
+ if (delta % sizeof(unsigned long)) {
+
+ /* 1-byte relative misalignment? */
+ if (delta & 1) {
+ while (len--)
+ *dst++ ^= *src++;
+ return;
+ }
- for (; size >= 4; size -= 4)
- *a++ ^= *b++;
+ /* 2-byte relative misalignment? */
+ if ((delta & 2) || sizeof(unsigned long) == 4) {
+ if ((unsigned long)dst % __alignof__(u16) && len) {
+ *dst++ ^= *src++;
+ len--;
+ }
+ while (len >= 2) {
+ *(u16 *)dst ^= *(u16 *)src;
+ dst += 2, src += 2, len -= 2;
+ }
+ if (len)
+ *dst ^= *src;
+ return;
+ }
+
+ /* 4-byte relative misalignment? */
+ while ((unsigned long)dst % __alignof__(u32) && len) {
+ *dst++ ^= *src++;
+ len--;
+ }
+ while (len >= 4) {
+ *(u32 *)dst ^= *(u32 *)src;
+ dst += 4, src += 4, len -= 4;
+ }
+ while (len--)
+ *dst++ ^= *src++;
+ return;
+ }
+
+ /* No relative misalignment; use word accesses */
+
+ while ((unsigned long)dst % __alignof__(unsigned long) && len) {
+ *dst++ ^= *src++;
+ len--;
+ }
+
+ while (len >= sizeof(unsigned long)) {
+ *(unsigned long *)dst ^= *(unsigned long *)src;
+ dst += sizeof(unsigned long);
+ src += sizeof(unsigned long);
+ len -= sizeof(unsigned long);
+ }
- crypto_xor_byte((u8 *)a, (u8 *)b, size);
+ while (len--)
+ *dst++ ^= *src++;
}
-EXPORT_SYMBOL_GPL(crypto_xor);
+EXPORT_SYMBOL_GPL(__crypto_xor_unaligned);
+#endif /* !CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
unsigned int crypto_alg_extsize(struct crypto_alg *alg)
{
@@ -191,9 +191,29 @@ static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
return queue->qlen;
}
-/* These functions require the input/output to be aligned as u32. */
void crypto_inc(u8 *a, unsigned int size);
-void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
+
+void __crypto_xor_unaligned(u8 *dst, const u8 *src, unsigned int len);
+
+static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int len)
+{
+ if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
+ (((unsigned long)dst | (unsigned long)src) %
+ __alignof__(unsigned long) == 0))
+ {
+ while (len >= sizeof(unsigned long)) {
+ *(unsigned long *)dst ^= *(unsigned long *)src;
+ dst += sizeof(unsigned long);
+ src += sizeof(unsigned long);
+ len -= sizeof(unsigned long);
+ }
+
+ while (len--)
+ *dst++ ^= *src++;
+ return;
+ }
+ return __crypto_xor_unaligned(dst, src, len);
+}
int blkcipher_walk_done(struct blkcipher_desc *desc,
struct blkcipher_walk *walk, int err);