PageRenderTime 50ms CodeModel.GetById 27ms RepoModel.GetById 0ms app.codeStats 0ms

/arch/s390/crypto/crc32-vx.c

https://gitlab.com/kush/linux
C | 314 lines | 241 code | 44 blank | 29 comment | 7 complexity | 417d8ff675692d2cddf116cfd660b538 MD5 | raw file
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Crypto-API module for CRC-32 algorithms implemented with the
  4. * z/Architecture Vector Extension Facility.
  5. *
  6. * Copyright IBM Corp. 2015
  7. * Author(s): Hendrik Brueckner <brueckner@linux.vnet.ibm.com>
  8. */
  9. #define KMSG_COMPONENT "crc32-vx"
  10. #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  11. #include <linux/module.h>
  12. #include <linux/cpufeature.h>
  13. #include <linux/crc32.h>
  14. #include <crypto/internal/hash.h>
  15. #include <asm/fpu/api.h>
  16. #define CRC32_BLOCK_SIZE 1
  17. #define CRC32_DIGEST_SIZE 4
  18. #define VX_MIN_LEN 64
  19. #define VX_ALIGNMENT 16L
  20. #define VX_ALIGN_MASK (VX_ALIGNMENT - 1)
  21. struct crc_ctx {
  22. u32 key;
  23. };
  24. struct crc_desc_ctx {
  25. u32 crc;
  26. };
  27. /* Prototypes for functions in assembly files */
  28. u32 crc32_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
  29. u32 crc32_be_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
  30. u32 crc32c_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
  31. /*
  32. * DEFINE_CRC32_VX() - Define a CRC-32 function using the vector extension
  33. *
  34. * Creates a function to perform a particular CRC-32 computation. Depending
  35. * on the message buffer, the hardware-accelerated or software implementation
  36. * is used. Note that the message buffer is aligned to improve fetch
  37. * operations of VECTOR LOAD MULTIPLE instructions.
  38. *
  39. */
  40. #define DEFINE_CRC32_VX(___fname, ___crc32_vx, ___crc32_sw) \
  41. static u32 __pure ___fname(u32 crc, \
  42. unsigned char const *data, size_t datalen) \
  43. { \
  44. struct kernel_fpu vxstate; \
  45. unsigned long prealign, aligned, remaining; \
  46. \
  47. if (datalen < VX_MIN_LEN + VX_ALIGN_MASK) \
  48. return ___crc32_sw(crc, data, datalen); \
  49. \
  50. if ((unsigned long)data & VX_ALIGN_MASK) { \
  51. prealign = VX_ALIGNMENT - \
  52. ((unsigned long)data & VX_ALIGN_MASK); \
  53. datalen -= prealign; \
  54. crc = ___crc32_sw(crc, data, prealign); \
  55. data = (void *)((unsigned long)data + prealign); \
  56. } \
  57. \
  58. aligned = datalen & ~VX_ALIGN_MASK; \
  59. remaining = datalen & VX_ALIGN_MASK; \
  60. \
  61. kernel_fpu_begin(&vxstate, KERNEL_VXR_LOW); \
  62. crc = ___crc32_vx(crc, data, aligned); \
  63. kernel_fpu_end(&vxstate, KERNEL_VXR_LOW); \
  64. \
  65. if (remaining) \
  66. crc = ___crc32_sw(crc, data + aligned, remaining); \
  67. \
  68. return crc; \
  69. }
  70. DEFINE_CRC32_VX(crc32_le_vx, crc32_le_vgfm_16, crc32_le)
  71. DEFINE_CRC32_VX(crc32_be_vx, crc32_be_vgfm_16, crc32_be)
  72. DEFINE_CRC32_VX(crc32c_le_vx, crc32c_le_vgfm_16, __crc32c_le)
  73. static int crc32_vx_cra_init_zero(struct crypto_tfm *tfm)
  74. {
  75. struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
  76. mctx->key = 0;
  77. return 0;
  78. }
  79. static int crc32_vx_cra_init_invert(struct crypto_tfm *tfm)
  80. {
  81. struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
  82. mctx->key = ~0;
  83. return 0;
  84. }
  85. static int crc32_vx_init(struct shash_desc *desc)
  86. {
  87. struct crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
  88. struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
  89. ctx->crc = mctx->key;
  90. return 0;
  91. }
  92. static int crc32_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
  93. unsigned int newkeylen)
  94. {
  95. struct crc_ctx *mctx = crypto_shash_ctx(tfm);
  96. if (newkeylen != sizeof(mctx->key)) {
  97. crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  98. return -EINVAL;
  99. }
  100. mctx->key = le32_to_cpu(*(__le32 *)newkey);
  101. return 0;
  102. }
  103. static int crc32be_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
  104. unsigned int newkeylen)
  105. {
  106. struct crc_ctx *mctx = crypto_shash_ctx(tfm);
  107. if (newkeylen != sizeof(mctx->key)) {
  108. crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  109. return -EINVAL;
  110. }
  111. mctx->key = be32_to_cpu(*(__be32 *)newkey);
  112. return 0;
  113. }
  114. static int crc32le_vx_final(struct shash_desc *desc, u8 *out)
  115. {
  116. struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
  117. *(__le32 *)out = cpu_to_le32p(&ctx->crc);
  118. return 0;
  119. }
  120. static int crc32be_vx_final(struct shash_desc *desc, u8 *out)
  121. {
  122. struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
  123. *(__be32 *)out = cpu_to_be32p(&ctx->crc);
  124. return 0;
  125. }
  126. static int crc32c_vx_final(struct shash_desc *desc, u8 *out)
  127. {
  128. struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
  129. /*
  130. * Perform a final XOR with 0xFFFFFFFF to be in sync
  131. * with the generic crc32c shash implementation.
  132. */
  133. *(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
  134. return 0;
  135. }
  136. static int __crc32le_vx_finup(u32 *crc, const u8 *data, unsigned int len,
  137. u8 *out)
  138. {
  139. *(__le32 *)out = cpu_to_le32(crc32_le_vx(*crc, data, len));
  140. return 0;
  141. }
  142. static int __crc32be_vx_finup(u32 *crc, const u8 *data, unsigned int len,
  143. u8 *out)
  144. {
  145. *(__be32 *)out = cpu_to_be32(crc32_be_vx(*crc, data, len));
  146. return 0;
  147. }
  148. static int __crc32c_vx_finup(u32 *crc, const u8 *data, unsigned int len,
  149. u8 *out)
  150. {
  151. /*
  152. * Perform a final XOR with 0xFFFFFFFF to be in sync
  153. * with the generic crc32c shash implementation.
  154. */
  155. *(__le32 *)out = ~cpu_to_le32(crc32c_le_vx(*crc, data, len));
  156. return 0;
  157. }
  158. #define CRC32_VX_FINUP(alg, func) \
  159. static int alg ## _vx_finup(struct shash_desc *desc, const u8 *data, \
  160. unsigned int datalen, u8 *out) \
  161. { \
  162. return __ ## alg ## _vx_finup(shash_desc_ctx(desc), \
  163. data, datalen, out); \
  164. }
  165. CRC32_VX_FINUP(crc32le, crc32_le_vx)
  166. CRC32_VX_FINUP(crc32be, crc32_be_vx)
  167. CRC32_VX_FINUP(crc32c, crc32c_le_vx)
  168. #define CRC32_VX_DIGEST(alg, func) \
  169. static int alg ## _vx_digest(struct shash_desc *desc, const u8 *data, \
  170. unsigned int len, u8 *out) \
  171. { \
  172. return __ ## alg ## _vx_finup(crypto_shash_ctx(desc->tfm), \
  173. data, len, out); \
  174. }
  175. CRC32_VX_DIGEST(crc32le, crc32_le_vx)
  176. CRC32_VX_DIGEST(crc32be, crc32_be_vx)
  177. CRC32_VX_DIGEST(crc32c, crc32c_le_vx)
  178. #define CRC32_VX_UPDATE(alg, func) \
  179. static int alg ## _vx_update(struct shash_desc *desc, const u8 *data, \
  180. unsigned int datalen) \
  181. { \
  182. struct crc_desc_ctx *ctx = shash_desc_ctx(desc); \
  183. ctx->crc = func(ctx->crc, data, datalen); \
  184. return 0; \
  185. }
  186. CRC32_VX_UPDATE(crc32le, crc32_le_vx)
  187. CRC32_VX_UPDATE(crc32be, crc32_be_vx)
  188. CRC32_VX_UPDATE(crc32c, crc32c_le_vx)
  189. static struct shash_alg crc32_vx_algs[] = {
  190. /* CRC-32 LE */
  191. {
  192. .init = crc32_vx_init,
  193. .setkey = crc32_vx_setkey,
  194. .update = crc32le_vx_update,
  195. .final = crc32le_vx_final,
  196. .finup = crc32le_vx_finup,
  197. .digest = crc32le_vx_digest,
  198. .descsize = sizeof(struct crc_desc_ctx),
  199. .digestsize = CRC32_DIGEST_SIZE,
  200. .base = {
  201. .cra_name = "crc32",
  202. .cra_driver_name = "crc32-vx",
  203. .cra_priority = 200,
  204. .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  205. .cra_blocksize = CRC32_BLOCK_SIZE,
  206. .cra_ctxsize = sizeof(struct crc_ctx),
  207. .cra_module = THIS_MODULE,
  208. .cra_init = crc32_vx_cra_init_zero,
  209. },
  210. },
  211. /* CRC-32 BE */
  212. {
  213. .init = crc32_vx_init,
  214. .setkey = crc32be_vx_setkey,
  215. .update = crc32be_vx_update,
  216. .final = crc32be_vx_final,
  217. .finup = crc32be_vx_finup,
  218. .digest = crc32be_vx_digest,
  219. .descsize = sizeof(struct crc_desc_ctx),
  220. .digestsize = CRC32_DIGEST_SIZE,
  221. .base = {
  222. .cra_name = "crc32be",
  223. .cra_driver_name = "crc32be-vx",
  224. .cra_priority = 200,
  225. .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  226. .cra_blocksize = CRC32_BLOCK_SIZE,
  227. .cra_ctxsize = sizeof(struct crc_ctx),
  228. .cra_module = THIS_MODULE,
  229. .cra_init = crc32_vx_cra_init_zero,
  230. },
  231. },
  232. /* CRC-32C LE */
  233. {
  234. .init = crc32_vx_init,
  235. .setkey = crc32_vx_setkey,
  236. .update = crc32c_vx_update,
  237. .final = crc32c_vx_final,
  238. .finup = crc32c_vx_finup,
  239. .digest = crc32c_vx_digest,
  240. .descsize = sizeof(struct crc_desc_ctx),
  241. .digestsize = CRC32_DIGEST_SIZE,
  242. .base = {
  243. .cra_name = "crc32c",
  244. .cra_driver_name = "crc32c-vx",
  245. .cra_priority = 200,
  246. .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  247. .cra_blocksize = CRC32_BLOCK_SIZE,
  248. .cra_ctxsize = sizeof(struct crc_ctx),
  249. .cra_module = THIS_MODULE,
  250. .cra_init = crc32_vx_cra_init_invert,
  251. },
  252. },
  253. };
  254. static int __init crc_vx_mod_init(void)
  255. {
  256. return crypto_register_shashes(crc32_vx_algs,
  257. ARRAY_SIZE(crc32_vx_algs));
  258. }
  259. static void __exit crc_vx_mod_exit(void)
  260. {
  261. crypto_unregister_shashes(crc32_vx_algs, ARRAY_SIZE(crc32_vx_algs));
  262. }
  263. module_cpu_feature_match(VXRS, crc_vx_mod_init);
  264. module_exit(crc_vx_mod_exit);
  265. MODULE_AUTHOR("Hendrik Brueckner <brueckner@linux.vnet.ibm.com>");
  266. MODULE_LICENSE("GPL");
  267. MODULE_ALIAS_CRYPTO("crc32");
  268. MODULE_ALIAS_CRYPTO("crc32-vx");
  269. MODULE_ALIAS_CRYPTO("crc32c");
  270. MODULE_ALIAS_CRYPTO("crc32c-vx");